finally fully working image uploads
This commit is contained in:
parent
72ecc7b266
commit
8a96fbb4d0
10
.env.sample
10
.env.sample
@ -58,7 +58,7 @@ LND_SOCKET=sn_lnd:10009
|
|||||||
NOSTR_PRIVATE_KEY=5f30b7e7714360f51f2be2e30c1d93b7fdf67366e730658e85777dfcc4e4245f
|
NOSTR_PRIVATE_KEY=5f30b7e7714360f51f2be2e30c1d93b7fdf67366e730658e85777dfcc4e4245f
|
||||||
|
|
||||||
# imgproxy
|
# imgproxy
|
||||||
NEXT_PUBLIC_IMGPROXY_URL=http://imgproxy:3001
|
NEXT_PUBLIC_IMGPROXY_URL=http://localhost:3001
|
||||||
IMGPROXY_KEY=9c273e803fd5d444bf8883f8c3000de57bee7995222370cab7f2d218dd9a4bbff6ca11cbf902e61eeef4358616f231da51e183aee6841e3a797a5c9a9530ba67
|
IMGPROXY_KEY=9c273e803fd5d444bf8883f8c3000de57bee7995222370cab7f2d218dd9a4bbff6ca11cbf902e61eeef4358616f231da51e183aee6841e3a797a5c9a9530ba67
|
||||||
IMGPROXY_SALT=47b802be2c9250a66b998f411fc63912ab0bc1c6b47d99b8d37c61019d1312a984b98745eac83db9791b01bb8c93ecbc9b2ef9f2981d66061c7d0a4528ff6465
|
IMGPROXY_SALT=47b802be2c9250a66b998f411fc63912ab0bc1c6b47d99b8d37c61019d1312a984b98745eac83db9791b01bb8c93ecbc9b2ef9f2981d66061c7d0a4528ff6465
|
||||||
|
|
||||||
@ -74,8 +74,8 @@ IMGPROXY_DOWNLOAD_TIMEOUT=9
|
|||||||
# IMGPROXY_ENABLE_DEBUG_HEADERS=true
|
# IMGPROXY_ENABLE_DEBUG_HEADERS=true
|
||||||
|
|
||||||
NEXT_PUBLIC_AWS_UPLOAD_BUCKET=uploads
|
NEXT_PUBLIC_AWS_UPLOAD_BUCKET=uploads
|
||||||
NEXT_PUBLIC_MEDIA_DOMAIN=s3:4566
|
NEXT_PUBLIC_MEDIA_DOMAIN=localhost:4566
|
||||||
NEXT_PUBLIC_MEDIA_URL=http://s3:4566/uploads
|
NEXT_PUBLIC_MEDIA_URL=http://localhost:4566/uploads
|
||||||
|
|
||||||
# search
|
# search
|
||||||
OPENSEARCH_URL=http://opensearch:9200
|
OPENSEARCH_URL=http://opensearch:9200
|
||||||
@ -91,6 +91,10 @@ DATABASE_URL="postgresql://sn:password@db:5432/stackernews?schema=public"
|
|||||||
# FOR DOCKER ONLY #
|
# FOR DOCKER ONLY #
|
||||||
###################
|
###################
|
||||||
|
|
||||||
|
# containers can't use localhost, so we need to use the container name
|
||||||
|
IMGPROXY_URL_DOCKER=http://imgproxy:8080
|
||||||
|
MEDIA_URL_DOCKER=http://s3:4566/uploads
|
||||||
|
|
||||||
# postgres container stuff
|
# postgres container stuff
|
||||||
POSTGRES_PASSWORD=password
|
POSTGRES_PASSWORD=password
|
||||||
POSTGRES_USER=sn
|
POSTGRES_USER=sn
|
||||||
|
@ -8,11 +8,17 @@ AWS.config.update({
|
|||||||
region: bucketRegion
|
region: bucketRegion
|
||||||
})
|
})
|
||||||
|
|
||||||
|
const config = {
|
||||||
|
apiVersion: '2006-03-01',
|
||||||
|
endpoint: process.env.NODE_ENV === 'development' ? `${MEDIA_URL}` : undefined,
|
||||||
|
s3ForcePathStyle: process.env.NODE_ENV === 'development'
|
||||||
|
}
|
||||||
|
|
||||||
export function createPresignedPost ({ key, type, size }) {
|
export function createPresignedPost ({ key, type, size }) {
|
||||||
const s3 = new AWS.S3({
|
const s3 = new AWS.S3({
|
||||||
apiVersion: '2006-03-01',
|
...config,
|
||||||
endpoint: process.env.NODE_ENV === 'development' ? `${MEDIA_URL}/${Bucket}` : undefined,
|
// in development, we need to be able to call this from localhost
|
||||||
s3ForcePathStyle: process.env.NODE_ENV === 'development'
|
endpoint: process.env.NODE_ENV === 'development' ? `${process.env.NEXT_PUBLIC_MEDIA_URL}` : undefined
|
||||||
})
|
})
|
||||||
return new Promise((resolve, reject) => {
|
return new Promise((resolve, reject) => {
|
||||||
s3.createPresignedPost({
|
s3.createPresignedPost({
|
||||||
@ -30,7 +36,7 @@ export function createPresignedPost ({ key, type, size }) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
export async function deleteObjects (keys) {
|
export async function deleteObjects (keys) {
|
||||||
const s3 = new AWS.S3({ apiVersion: '2006-03-01' })
|
const s3 = new AWS.S3(config)
|
||||||
// max 1000 keys per request
|
// max 1000 keys per request
|
||||||
// see https://docs.aws.amazon.com/cli/latest/reference/s3api/delete-objects.html
|
// see https://docs.aws.amazon.com/cli/latest/reference/s3api/delete-objects.html
|
||||||
const batchSize = 1000
|
const batchSize = 1000
|
||||||
|
@ -68,6 +68,10 @@ function ImageProxy ({ src, srcSet: { dimensions, ...srcSetObj } = {}, onClick,
|
|||||||
if (!srcSetObj) return undefined
|
if (!srcSetObj) return undefined
|
||||||
// srcSetObj shape: { [widthDescriptor]: <imgproxyUrl>, ... }
|
// srcSetObj shape: { [widthDescriptor]: <imgproxyUrl>, ... }
|
||||||
return Object.entries(srcSetObj).reduce((acc, [wDescriptor, url], i, arr) => {
|
return Object.entries(srcSetObj).reduce((acc, [wDescriptor, url], i, arr) => {
|
||||||
|
// backwards compatibility: we used to replace image urls with imgproxy urls rather just storing paths
|
||||||
|
if (!url.startsWith('http')) {
|
||||||
|
url = `${process.env.NEXT_PUBLIC_IMGPROXY_URL}${url}`
|
||||||
|
}
|
||||||
return acc + `${url} ${wDescriptor}` + (i < arr.length - 1 ? ', ' : '')
|
return acc + `${url} ${wDescriptor}` + (i < arr.length - 1 ? ', ' : '')
|
||||||
}, '')
|
}, '')
|
||||||
}, [srcSetObj])
|
}, [srcSetObj])
|
||||||
@ -77,6 +81,9 @@ function ImageProxy ({ src, srcSet: { dimensions, ...srcSetObj } = {}, onClick,
|
|||||||
const bestResSrc = useMemo(() => {
|
const bestResSrc = useMemo(() => {
|
||||||
if (!srcSetObj) return src
|
if (!srcSetObj) return src
|
||||||
return Object.entries(srcSetObj).reduce((acc, [wDescriptor, url]) => {
|
return Object.entries(srcSetObj).reduce((acc, [wDescriptor, url]) => {
|
||||||
|
if (!url.startsWith('http')) {
|
||||||
|
url = `${process.env.NEXT_PUBLIC_IMGPROXY_URL}${url}`
|
||||||
|
}
|
||||||
const w = Number(wDescriptor.replace(/w$/, ''))
|
const w = Number(wDescriptor.replace(/w$/, ''))
|
||||||
return w > acc.w ? { w, url } : acc
|
return w > acc.w ? { w, url } : acc
|
||||||
}, { w: 0, url: undefined }).url
|
}, { w: 0, url: undefined }).url
|
||||||
|
@ -100,6 +100,8 @@ services:
|
|||||||
- .env.development
|
- .env.development
|
||||||
ports:
|
ports:
|
||||||
- "3001:8080"
|
- "3001:8080"
|
||||||
|
expose:
|
||||||
|
- "8080"
|
||||||
labels:
|
labels:
|
||||||
- "CONNECT=localhost:3001"
|
- "CONNECT=localhost:3001"
|
||||||
s3:
|
s3:
|
||||||
@ -118,6 +120,8 @@ services:
|
|||||||
- DEBUG=1
|
- DEBUG=1
|
||||||
ports:
|
ports:
|
||||||
- "4566:4566"
|
- "4566:4566"
|
||||||
|
expose:
|
||||||
|
- "4566"
|
||||||
volumes:
|
volumes:
|
||||||
- 's3:/var/lib/localstack'
|
- 's3:/var/lib/localstack'
|
||||||
- './docker/s3/init-s3.sh:/etc/localstack/init/ready.d/init-s3.sh'
|
- './docker/s3/init-s3.sh:/etc/localstack/init/ready.d/init-s3.sh'
|
||||||
|
@ -10,8 +10,8 @@ export const BOOST_MIN = BOOST_MULT * 5
|
|||||||
export const UPLOAD_SIZE_MAX = 25 * 1024 * 1024
|
export const UPLOAD_SIZE_MAX = 25 * 1024 * 1024
|
||||||
export const UPLOAD_SIZE_MAX_AVATAR = 5 * 1024 * 1024
|
export const UPLOAD_SIZE_MAX_AVATAR = 5 * 1024 * 1024
|
||||||
export const IMAGE_PIXELS_MAX = 35000000
|
export const IMAGE_PIXELS_MAX = 35000000
|
||||||
// backwards compatibile with old media domain env var
|
// backwards compatibile with old media domain env var and precedence for docker url if set
|
||||||
export const MEDIA_URL = process.env.NEXT_PUBLIC_MEDIA_URL || `https://${process.env.NEXT_PUBLIC_MEDIA_DOMAIN}`
|
export const MEDIA_URL = process.env.MEDIA_URL_DOCKER || process.env.NEXT_PUBLIC_MEDIA_URL || `https://${process.env.NEXT_PUBLIC_MEDIA_DOMAIN}`
|
||||||
export const AWS_S3_URL_REGEXP = new RegExp(`${MEDIA_URL}/([0-9]+)`, 'g')
|
export const AWS_S3_URL_REGEXP = new RegExp(`${MEDIA_URL}/([0-9]+)`, 'g')
|
||||||
export const UPLOAD_TYPES_ALLOW = [
|
export const UPLOAD_TYPES_ALLOW = [
|
||||||
'image/gif',
|
'image/gif',
|
||||||
|
@ -10,7 +10,7 @@ if (!imgProxyEnabled) {
|
|||||||
console.warn('IMGPROXY_* env vars not set, imgproxy calls are no-ops now')
|
console.warn('IMGPROXY_* env vars not set, imgproxy calls are no-ops now')
|
||||||
}
|
}
|
||||||
|
|
||||||
const IMGPROXY_URL = process.env.NEXT_PUBLIC_IMGPROXY_URL
|
const IMGPROXY_URL = process.env.IMGPROXY_URL_DOCKER || process.env.NEXT_PUBLIC_IMGPROXY_URL
|
||||||
const IMGPROXY_SALT = process.env.IMGPROXY_SALT
|
const IMGPROXY_SALT = process.env.IMGPROXY_SALT
|
||||||
const IMGPROXY_KEY = process.env.IMGPROXY_KEY
|
const IMGPROXY_KEY = process.env.IMGPROXY_KEY
|
||||||
|
|
||||||
@ -89,6 +89,12 @@ export const createImgproxyUrls = async (id, text, { models, forceFetch }) => {
|
|||||||
const imgproxyUrls = {}
|
const imgproxyUrls = {}
|
||||||
for (let url of urls) {
|
for (let url of urls) {
|
||||||
if (!url) continue
|
if (!url) continue
|
||||||
|
let fetchUrl = url
|
||||||
|
if (process.env.MEDIA_URL_DOCKER) {
|
||||||
|
console.log('[imgproxy] id:', id, '-- replacing media url:', url)
|
||||||
|
fetchUrl = url.replace(process.env.NEXT_PUBLIC_MEDIA_URL, process.env.MEDIA_URL_DOCKER)
|
||||||
|
console.log('[imgproxy] id:', id, '-- with:', fetchUrl)
|
||||||
|
}
|
||||||
|
|
||||||
console.log('[imgproxy] id:', id, '-- processing url:', url)
|
console.log('[imgproxy] id:', id, '-- processing url:', url)
|
||||||
if (url.startsWith(IMGPROXY_URL)) {
|
if (url.startsWith(IMGPROXY_URL)) {
|
||||||
@ -97,17 +103,17 @@ export const createImgproxyUrls = async (id, text, { models, forceFetch }) => {
|
|||||||
url = decodeOriginalUrl(url)
|
url = decodeOriginalUrl(url)
|
||||||
console.log('[imgproxy] id:', id, '-- original url:', url)
|
console.log('[imgproxy] id:', id, '-- original url:', url)
|
||||||
}
|
}
|
||||||
if (!(await isImageURL(url, { forceFetch }))) {
|
if (!(await isImageURL(fetchUrl, { forceFetch }))) {
|
||||||
console.log('[imgproxy] id:', id, '-- not image url:', url)
|
console.log('[imgproxy] id:', id, '-- not image url:', url)
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
imgproxyUrls[url] = {
|
imgproxyUrls[url] = {
|
||||||
dimensions: await getDimensions(url)
|
dimensions: await getDimensions(fetchUrl)
|
||||||
}
|
}
|
||||||
for (const res of resolutions) {
|
for (const res of resolutions) {
|
||||||
const [w, h] = res.split('x')
|
const [w, h] = res.split('x')
|
||||||
const processingOptions = `/rs:fit:${w}:${h}`
|
const processingOptions = `/rs:fit:${w}:${h}`
|
||||||
imgproxyUrls[url][`${w}w`] = createImgproxyUrl({ url, options: processingOptions })
|
imgproxyUrls[url][`${w}w`] = createImgproxyPath({ url: fetchUrl, options: processingOptions })
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return imgproxyUrls
|
return imgproxyUrls
|
||||||
@ -115,17 +121,17 @@ export const createImgproxyUrls = async (id, text, { models, forceFetch }) => {
|
|||||||
|
|
||||||
const getDimensions = async (url) => {
|
const getDimensions = async (url) => {
|
||||||
const options = '/d:1'
|
const options = '/d:1'
|
||||||
const imgproxyUrl = createImgproxyUrl({ url, options, pathname: 'info' })
|
const imgproxyUrl = new URL(createImgproxyPath({ url, options, pathname: '/info' }), IMGPROXY_URL).toString()
|
||||||
const res = await fetch(imgproxyUrl)
|
const res = await fetch(imgproxyUrl)
|
||||||
const { width, height } = await res.json()
|
const { width, height } = await res.json()
|
||||||
return { width, height }
|
return { width, height }
|
||||||
}
|
}
|
||||||
|
|
||||||
const createImgproxyUrl = ({ url, pathname = '', options }) => {
|
const createImgproxyPath = ({ url, pathname = '/', options }) => {
|
||||||
const b64Url = Buffer.from(url, 'utf-8').toString('base64url')
|
const b64Url = Buffer.from(url, 'utf-8').toString('base64url')
|
||||||
const target = path.join(options, b64Url)
|
const target = path.join(options, b64Url)
|
||||||
const signature = sign(target)
|
const signature = sign(target)
|
||||||
return new URL(path.join(pathname, signature, target), IMGPROXY_URL).toString()
|
return path.join(pathname, signature, target)
|
||||||
}
|
}
|
||||||
|
|
||||||
async function fetchWithTimeout (resource, { timeout = 1000, ...options } = {}) {
|
async function fetchWithTimeout (resource, { timeout = 1000, ...options } = {}) {
|
||||||
|
Loading…
x
Reference in New Issue
Block a user