Images v2 (#513)

This commit is contained in:
ekzyis 2023-10-02 01:03:52 +02:00 committed by GitHub
parent 1f0a4e0729
commit b2b38d8924
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
20 changed files with 747 additions and 222 deletions

View File

@ -71,8 +71,10 @@ OPENSEARCH_PASSWORD=
# imgproxy options
IMGPROXY_ENABLE_WEBP_DETECTION=1
IMGPROXY_MAX_ANIMATION_FRAMES=100
IMGPROXY_MAX_SRC_RESOLUTION=200
IMGPROXY_ENABLE_AVIF_DETECTION=1
IMGPROXY_MAX_ANIMATION_FRAMES=2000
IMGPROXY_MAX_SRC_RESOLUTION=50
IMGPROXY_MAX_ANIMATION_FRAME_RESOLUTION=200
IMGPROXY_READ_TIMEOUT=10
IMGPROXY_WRITE_TIMEOUT=10
IMGPROXY_DOWNLOAD_TIMEOUT=9

View File

@ -1,69 +0,0 @@
import { createHmac } from 'node:crypto'
import { extractUrls } from '../../../lib/md'
const imgProxyEnabled = process.env.NODE_ENV === 'production' ||
(process.env.NEXT_PUBLIC_IMGPROXY_URL && process.env.IMGPROXY_SALT && process.env.IMGPROXY_KEY)
if (!imgProxyEnabled) {
console.warn('IMGPROXY_* env vars not set, imgproxy calls are no-ops now')
}
const IMGPROXY_URL = process.env.NEXT_PUBLIC_IMGPROXY_URL
const IMGPROXY_SALT = process.env.IMGPROXY_SALT
const IMGPROXY_KEY = process.env.IMGPROXY_KEY
const hexDecode = (hex) => Buffer.from(hex, 'hex')
const sign = (target) => {
// https://github.com/imgproxy/imgproxy/blob/master/examples/signature.js
const hmac = createHmac('sha256', hexDecode(IMGPROXY_KEY))
hmac.update(hexDecode(IMGPROXY_SALT))
hmac.update(target)
return hmac.digest('base64url')
}
const createImageProxyUrl = url => {
const processingOptions = '/rs:fit:600:500:0/g:no'
const b64Url = Buffer.from(url, 'utf-8').toString('base64url')
const target = `${processingOptions}/${b64Url}`
const signature = sign(target)
return `${IMGPROXY_URL}${signature}${target}`
}
async function fetchWithTimeout (resource, { timeout = 1000, ...options } = {}) {
const controller = new AbortController()
const id = setTimeout(() => controller.abort(), timeout)
const response = await fetch(resource, {
...options,
signal: controller.signal
})
clearTimeout(id)
return response
}
const isImageURL = async url => {
// https://stackoverflow.com/a/68118683
try {
const res = await fetchWithTimeout(url, { method: 'HEAD' })
const buf = await res.blob()
return buf.type.startsWith('image/')
} catch (err) {
console.log(url, err)
return false
}
}
export const proxyImages = async text => {
if (!imgProxyEnabled) return text
const urls = extractUrls(text)
for (const url of urls) {
if (url.startsWith(IMGPROXY_URL)) continue
if (!(await isImageURL(url))) continue
const proxyUrl = createImageProxyUrl(url)
text = text.replaceAll(url, proxyUrl)
}
return text
}

View File

@ -14,7 +14,6 @@ import { parse } from 'tldts'
import uu from 'url-unshort'
import { advSchema, amountSchema, bountySchema, commentSchema, discussionSchema, jobSchema, linkSchema, pollSchema, ssValidate } from '../../lib/validate'
import { sendUserNotification } from '../webPush'
import { proxyImages } from './imgproxy'
import { defaultCommentSort } from '../../lib/item'
import { notifyItemParents, notifyUserSubscribers, notifyZapped } from '../../lib/push-notifications'
@ -1019,13 +1018,9 @@ export const updateItem = async (parent, { sub: subName, forward, options, ...it
throw new GraphQLError('item can no longer be editted', { extensions: { code: 'BAD_INPUT' } })
}
if (item.text) {
item.text = await proxyImages(item.text)
}
if (item.url && typeof item.maxBid === 'undefined') {
item.url = ensureProtocol(item.url)
item.url = removeTracking(item.url)
item.url = await proxyImages(item.url)
}
// only update item with the boost delta ... this is a bit of hack given the way
// boost used to work
@ -1063,13 +1058,9 @@ export const createItem = async (parent, { forward, options, ...item }, { me, mo
item.userId = me ? Number(me.id) : ANON_USER_ID
const fwdUsers = await getForwardUsers(models, forward)
if (item.text) {
item.text = await proxyImages(item.text)
}
if (item.url && typeof item.maxBid === 'undefined') {
item.url = ensureProtocol(item.url)
item.url = removeTracking(item.url)
item.url = await proxyImages(item.url)
}
const enforceFee = me ? undefined : (item.parentId ? ANON_COMMENT_FEE : (ANON_POST_FEE + (item.boost || 0)))
@ -1113,7 +1104,7 @@ export const SELECT =
"Item"."subName", "Item".status, "Item"."uploadId", "Item"."pollCost", "Item".boost, "Item".msats,
"Item".ncomments, "Item"."commentMsats", "Item"."lastCommentAt", "Item"."weightedVotes",
"Item"."weightedDownVotes", "Item".freebie, "Item"."otsHash", "Item"."bountyPaidTo",
ltree2text("Item"."path") AS "path", "Item"."weightedComments"`
ltree2text("Item"."path") AS "path", "Item"."weightedComments", "Item"."imgproxyUrls"`
async function topOrderByWeightedSats (me, models) {
return `ORDER BY ${await orderByNumerator(me, models)} DESC NULLS LAST, "Item".id DESC`

View File

@ -114,6 +114,7 @@ export default gql`
otsHash: String
parentOtsHash: String
forwards: [ItemForward]
imgproxyUrls: JSONObject
}
input ItemForwardInput {

View File

@ -208,7 +208,7 @@ export default function Comment ({
)
: (
<div className={styles.text}>
<Text topLevel={topLevel} nofollow={item.sats + item.boost < NOFOLLOW_LIMIT}>
<Text topLevel={topLevel} nofollow={item.sats + item.boost < NOFOLLOW_LIMIT} imgproxyUrls={item.imgproxyUrls}>
{truncate ? truncateString(item.text) : item.searchText || item.text}
</Text>
</div>

View File

@ -167,7 +167,7 @@ export function MarkdownInput ({ label, topLevel, groupClassName, onChange, setH
: (
<div className='form-group'>
<div className={`${styles.text} form-control`}>
<Text topLevel={topLevel} noFragments fetchOnlyImgProxy={false}>{meta.value}</Text>
<Text topLevel={topLevel} noFragments>{meta.value}</Text>
</div>
</div>
)}

221
components/image.js Normal file

File diff suppressed because one or more lines are too long

View File

@ -165,7 +165,7 @@ function TopLevelItem ({ item, noReply, ...props }) {
}
function ItemText ({ item }) {
return <Text topLevel nofollow={item.sats + item.boost < NOFOLLOW_LIMIT}>{item.searchText || item.text}</Text>
return <Text topLevel nofollow={item.sats + item.boost < NOFOLLOW_LIMIT} imgproxyUrls={item.imgproxyUrls}>{item.searchText || item.text}</Text>
}
export default function ItemFull ({ item, bio, rank, ...props }) {

View File

@ -1,6 +1,8 @@
import { createContext, useCallback, useContext, useMemo, useState } from 'react'
import { createContext, useCallback, useContext, useEffect, useMemo, useState } from 'react'
import Modal from 'react-bootstrap/Modal'
import BackArrow from '../svgs/arrow-left-line.svg'
import { useRouter } from 'next/router'
import ActionDropdown from './action-dropdown'
export const ShowModalContext = createContext(() => null)
@ -37,19 +39,38 @@ export default function useModal () {
const onClose = useCallback(() => {
setModalContent(null)
setModalStack([])
}, [])
modalOptions?.onClose?.()
}, [modalOptions?.onClose])
const router = useRouter()
useEffect(() => {
router.events.on('routeChangeStart', onClose)
return () => router.events.off('routeChangeStart', onClose)
}, [router, onClose])
const modal = useMemo(() => {
if (modalContent === null) {
return null
}
const className = modalOptions?.fullScreen ? 'fullscreen' : ''
return (
<Modal onHide={modalOptions?.keepOpen ? null : onClose} show={!!modalContent}>
<Modal
onHide={modalOptions?.keepOpen ? null : onClose} show={!!modalContent}
className={className}
dialogClassName={className}
contentClassName={className}
>
<div className='d-flex flex-row'>
{modalOptions?.overflow &&
<div className={'modal-btn modal-overflow ' + className}>
<ActionDropdown>
{modalOptions.overflow}
</ActionDropdown>
</div>}
{modalStack.length > 0 ? <div className='modal-btn modal-back' onClick={onBack}><BackArrow width={18} height={18} className='fill-white' /></div> : null}
<div className='modal-btn modal-close' onClick={onClose}>X</div>
<div className={'modal-btn modal-close ' + className} onClick={onClose}>X</div>
</div>
<Modal.Body>
<Modal.Body className={className}>
{modalContent}
</Modal.Body>
</Modal>

View File

@ -8,16 +8,14 @@ import sub from '../lib/remark-sub'
import remarkDirective from 'remark-directive'
import { visit } from 'unist-util-visit'
import reactStringReplace from 'react-string-replace'
import React, { useRef, useEffect, useState, memo } from 'react'
import React, { useState, memo } from 'react'
import GithubSlugger from 'github-slugger'
import LinkIcon from '../svgs/link.svg'
import Thumb from '../svgs/thumb-up-fill.svg'
import { toString } from 'mdast-util-to-string'
import copy from 'clipboard-copy'
import { IMGPROXY_URL_REGEXP, IMG_URL_REGEXP } from '../lib/url'
import { extractUrls } from '../lib/md'
import FileMissing from '../svgs/file-warning-line.svg'
import { useMe } from './me'
import { useImgUrlCache, IMG_CACHE_STATES, ZoomableImage, decodeOriginalUrl } from './image'
import { IMGPROXY_URL_REGEXP } from '../lib/url'
function searchHighlighter () {
return (tree) => {
@ -36,15 +34,6 @@ function searchHighlighter () {
}
}
function decodeOriginalUrl (imgProxyUrl) {
const parts = imgProxyUrl.split('/')
// base64url is not a known encoding in browsers
// so we need to replace the invalid chars
const b64Url = parts[parts.length - 1].replace(/-/g, '+').replace(/_/, '/')
const originalUrl = Buffer.from(b64Url, 'base64').toString('utf-8')
return originalUrl
}
function Heading ({ h, slugger, noFragments, topLevel, children, node, ...props }) {
const [copied, setCopied] = useState(false)
const [id] = useState(noFragments ? undefined : slugger.slug(toString(node).replace(/[^\w\-\s]+/gi, '')))
@ -73,54 +62,14 @@ function Heading ({ h, slugger, noFragments, topLevel, children, node, ...props
)
}
const CACHE_STATES = {
IS_LOADING: 'IS_LOADING',
IS_LOADED: 'IS_LOADED',
IS_ERROR: 'IS_ERROR'
}
// this is one of the slowest components to render
export default memo(function Text ({ topLevel, noFragments, nofollow, fetchOnlyImgProxy, children }) {
export default memo(function Text ({ topLevel, noFragments, nofollow, imgproxyUrls, children }) {
// all the reactStringReplace calls are to facilitate search highlighting
const slugger = new GithubSlugger()
fetchOnlyImgProxy ??= true
const HeadingWrapper = (props) => Heading({ topLevel, slugger, noFragments, ...props })
const imgCache = useRef({})
const [urlCache, setUrlCache] = useState({})
useEffect(() => {
const imgRegexp = fetchOnlyImgProxy ? IMGPROXY_URL_REGEXP : IMG_URL_REGEXP
const urls = extractUrls(children)
urls.forEach((url) => {
if (imgRegexp.test(url)) {
setUrlCache((prev) => ({ ...prev, [url]: CACHE_STATES.IS_LOADED }))
} else if (!fetchOnlyImgProxy) {
const img = new window.Image()
imgCache.current[url] = img
setUrlCache((prev) => ({ ...prev, [url]: CACHE_STATES.IS_LOADING }))
const callback = (state) => {
setUrlCache((prev) => ({ ...prev, [url]: state }))
delete imgCache.current[url]
}
img.onload = () => callback(CACHE_STATES.IS_LOADED)
img.onerror = () => callback(CACHE_STATES.IS_ERROR)
img.src = url
}
})
return () => {
Object.values(imgCache.current).forEach((img) => {
img.onload = null
img.onerror = null
img.src = ''
})
}
}, [children])
const imgUrlCache = useImgUrlCache(children, imgproxyUrls)
return (
<div className={styles.text}>
@ -159,8 +108,12 @@ export default memo(function Text ({ topLevel, noFragments, nofollow, fetchOnlyI
return <>{children}</>
}
if (urlCache[href] === CACHE_STATES.IS_LOADED) {
return <ZoomableImage topLevel={topLevel} useClickToLoad={fetchOnlyImgProxy} {...props} src={href} />
if (imgUrlCache[href] === IMG_CACHE_STATES.LOADED) {
const url = IMGPROXY_URL_REGEXP.test(href) ? decodeOriginalUrl(href) : href
// if `srcSet` is undefined, it means the image was not processed by worker yet
// if `srcSet` is null, image was processed but this specific url was not detected as an image by the worker
const srcSet = imgproxyUrls ? (imgproxyUrls[url] || null) : undefined
return <ZoomableImage topLevel={topLevel} srcSet={srcSet} {...props} src={href} />
}
// map: fix any highlighted links
@ -183,8 +136,12 @@ export default memo(function Text ({ topLevel, noFragments, nofollow, fetchOnlyI
</a>
)
},
img: ({ node, ...props }) => {
return <ZoomableImage topLevel={topLevel} useClickToLoad={fetchOnlyImgProxy} {...props} />
img: ({ node, src, ...props }) => {
const url = IMGPROXY_URL_REGEXP.test(src) ? decodeOriginalUrl(src) : src
// if `srcSet` is undefined, it means the image was not processed by worker yet
// if `srcSet` is null, image was processed but this specific url was not detected as an image by the worker
const srcSet = imgproxyUrls ? (imgproxyUrls[url] || null) : undefined
return <ZoomableImage topLevel={topLevel} srcSet={srcSet} src={src} {...props} />
}
}}
remarkPlugins={[gfm, mention, sub, remarkDirective, searchHighlighter]}
@ -194,76 +151,3 @@ export default memo(function Text ({ topLevel, noFragments, nofollow, fetchOnlyI
</div>
)
})
function ClickToLoad ({ children }) {
const [clicked, setClicked] = useState(false)
return clicked ? children : <div className='m-1 fst-italic pointer text-muted' onClick={() => setClicked(true)}>click to load image</div>
}
export function ZoomableImage ({ src, topLevel, useClickToLoad, ...props }) {
const me = useMe()
const [err, setErr] = useState()
const [imgSrc, setImgSrc] = useState(src)
const [isImgProxy, setIsImgProxy] = useState(IMGPROXY_URL_REGEXP.test(src))
const defaultMediaStyle = {
maxHeight: topLevel ? '75vh' : '25vh',
cursor: 'zoom-in'
}
useClickToLoad ??= true
// if image changes we need to update state
const [mediaStyle, setMediaStyle] = useState(defaultMediaStyle)
useEffect(() => {
setMediaStyle(defaultMediaStyle)
setErr(null)
}, [src])
if (!src) return null
if (err) {
if (!isImgProxy) {
return (
<span className='d-flex align-items-baseline text-warning-emphasis fw-bold pb-1'>
<FileMissing width={18} height={18} className='fill-warning me-1 align-self-center' />
image error
</span>
)
}
try {
const originalUrl = decodeOriginalUrl(src)
setImgSrc(originalUrl)
setErr(null)
} catch (err) {
console.error(err)
setErr(err)
}
// always set to false since imgproxy returned error
setIsImgProxy(false)
}
const img = (
<img
className={topLevel ? styles.topLevel : undefined}
style={mediaStyle}
src={imgSrc}
onClick={() => {
if (mediaStyle.cursor === 'zoom-in') {
setMediaStyle({
width: '100%',
cursor: 'zoom-out'
})
} else {
setMediaStyle(defaultMediaStyle)
}
}}
onError={() => setErr(true)}
{...props}
/>
)
return (
(!me || !me.clickToLoadImg || isImgProxy || !useClickToLoad)
? img
: <ClickToLoad>{img}</ClickToLoad>
)
}

View File

@ -30,6 +30,7 @@ export const COMMENT_FIELDS = gql`
mine
otsHash
ncomments
imgproxyUrls
}
`

View File

@ -45,6 +45,7 @@ export const ITEM_FIELDS = gql`
status
uploadId
mine
imgproxyUrls
}`
export const ITEM_FULL_FIELDS = gql`

View File

@ -0,0 +1,175 @@
-- AlterTable
ALTER TABLE "Item" ADD COLUMN "imgproxyUrls" JSONB;
-- schedule imgproxy job
CREATE OR REPLACE FUNCTION create_item(
jitem JSONB, forward JSONB, poll_options JSONB, spam_within INTERVAL)
RETURNS "Item"
LANGUAGE plpgsql
AS $$
DECLARE
user_msats BIGINT;
cost_msats BIGINT;
freebie BOOLEAN;
item "Item";
med_votes FLOAT;
select_clause TEXT;
BEGIN
PERFORM ASSERT_SERIALIZED();
-- access fields with appropriate types
item := jsonb_populate_record(NULL::"Item", jitem);
SELECT msats INTO user_msats FROM users WHERE id = item."userId";
IF item."maxBid" IS NOT NULL THEN
cost_msats := 1000000;
ELSE
cost_msats := 1000 * POWER(10, item_spam(item."parentId", item."userId", spam_within));
END IF;
-- it's only a freebie if it's a 1 sat cost, they have < 1 sat, and boost = 0
freebie := (cost_msats <= 1000) AND (user_msats < 1000) AND (item.boost = 0);
IF NOT freebie AND cost_msats > user_msats THEN
RAISE EXCEPTION 'SN_INSUFFICIENT_FUNDS';
END IF;
-- get this user's median item score
SELECT COALESCE(
percentile_cont(0.5) WITHIN GROUP(
ORDER BY "weightedVotes" - "weightedDownVotes"), 0)
INTO med_votes FROM "Item" WHERE "userId" = item."userId";
-- if their median votes are positive, start at 0
-- if the median votes are negative, start their post with that many down votes
-- basically: if their median post is bad, presume this post is too
-- addendum: if they're an anon poster, always start at 0
IF med_votes >= 0 OR item."userId" = 27 THEN
med_votes := 0;
ELSE
med_votes := ABS(med_votes);
END IF;
-- there's no great way to set default column values when using json_populate_record
-- so we need to only select fields with non-null values that way when func input
-- does not include a value, the default value is used instead of null
SELECT string_agg(quote_ident(key), ',') INTO select_clause
FROM jsonb_object_keys(jsonb_strip_nulls(jitem)) k(key);
-- insert the item
EXECUTE format($fmt$
INSERT INTO "Item" (%s, "weightedDownVotes")
SELECT %1$s, %L
FROM jsonb_populate_record(NULL::"Item", %L) RETURNING *
$fmt$, select_clause, med_votes, jitem) INTO item;
INSERT INTO "ItemForward" ("itemId", "userId", "pct")
SELECT item.id, "userId", "pct" FROM jsonb_populate_recordset(NULL::"ItemForward", forward);
-- Automatically subscribe forward recipients to the new post
INSERT INTO "ThreadSubscription" ("itemId", "userId")
SELECT item.id, "userId" FROM jsonb_populate_recordset(NULL::"ItemForward", forward);
INSERT INTO "PollOption" ("itemId", "option")
SELECT item.id, "option" FROM jsonb_array_elements_text(poll_options) o("option");
IF NOT freebie THEN
UPDATE users SET msats = msats - cost_msats WHERE id = item."userId";
INSERT INTO "ItemAct" (msats, "itemId", "userId", act)
VALUES (cost_msats, item.id, item."userId", 'FEE');
END IF;
-- if this item has boost
IF item.boost > 0 THEN
PERFORM item_act(item.id, item."userId", 'BOOST', item.boost);
END IF;
-- if this is a job
IF item."maxBid" IS NOT NULL THEN
PERFORM run_auction(item.id);
END IF;
-- if this is a bio
IF item.bio THEN
UPDATE users SET "bioId" = item.id WHERE id = item."userId";
END IF;
-- schedule imgproxy job
INSERT INTO pgboss.job (name, data, retrylimit, retrybackoff, startafter)
VALUES ('imgproxy', jsonb_build_object('id', item.id), 21, true, now() + interval '5 seconds');
RETURN item;
END;
$$;
-- schedule imgproxy job
CREATE OR REPLACE FUNCTION update_item(
jitem JSONB, forward JSONB, poll_options JSONB)
RETURNS "Item"
LANGUAGE plpgsql
AS $$
DECLARE
user_msats INTEGER;
item "Item";
select_clause TEXT;
BEGIN
PERFORM ASSERT_SERIALIZED();
item := jsonb_populate_record(NULL::"Item", jitem);
IF item.boost > 0 THEN
UPDATE "Item" SET boost = boost + item.boost WHERE id = item.id;
PERFORM item_act(item.id, item."userId", 'BOOST', item.boost);
END IF;
IF item.status IS NOT NULL THEN
UPDATE "Item" SET "statusUpdatedAt" = now_utc()
WHERE id = item.id AND status <> item.status;
END IF;
SELECT string_agg(quote_ident(key), ',') INTO select_clause
FROM jsonb_object_keys(jsonb_strip_nulls(jitem)) k(key)
WHERE key <> 'boost';
EXECUTE format($fmt$
UPDATE "Item" SET (%s) = (
SELECT %1$s
FROM jsonb_populate_record(NULL::"Item", %L)
) WHERE id = %L RETURNING *
$fmt$, select_clause, jitem, item.id) INTO item;
-- Delete any old thread subs if the user is no longer a fwd recipient
DELETE FROM "ThreadSubscription"
WHERE "itemId" = item.id
-- they aren't in the new forward list
AND NOT EXISTS (SELECT 1 FROM jsonb_populate_recordset(NULL::"ItemForward", forward) as nf WHERE "ThreadSubscription"."userId" = nf."userId")
-- and they are in the old forward list
AND EXISTS (SELECT 1 FROM "ItemForward" WHERE "ItemForward"."itemId" = item.id AND "ItemForward"."userId" = "ThreadSubscription"."userId" );
-- Automatically subscribe any new forward recipients to the post
INSERT INTO "ThreadSubscription" ("itemId", "userId")
SELECT item.id, "userId" FROM jsonb_populate_recordset(NULL::"ItemForward", forward)
EXCEPT
SELECT item.id, "userId" FROM "ItemForward" WHERE "itemId" = item.id;
-- Delete all old forward entries, to recreate in next command
DELETE FROM "ItemForward" WHERE "itemId" = item.id;
INSERT INTO "ItemForward" ("itemId", "userId", "pct")
SELECT item.id, "userId", "pct" FROM jsonb_populate_recordset(NULL::"ItemForward", forward);
INSERT INTO "PollOption" ("itemId", "option")
SELECT item.id, "option" FROM jsonb_array_elements_text(poll_options) o("option");
-- if this is a job
IF item."maxBid" IS NOT NULL THEN
PERFORM run_auction(item.id);
END IF;
-- schedule imgproxy job
INSERT INTO pgboss.job (name, data, retrylimit, retrybackoff, startafter)
VALUES ('imgproxy', jsonb_build_object('id', item.id), 21, true, now() + interval '5 seconds');
RETURN item;
END;
$$;

View File

@ -264,6 +264,7 @@ model Item {
deletedAt DateTime?
otsFile Bytes?
otsHash String?
imgproxyUrls Json?
bounty Int?
rootId Int?
bountyPaidTo Int[]

Binary file not shown.

After

Width:  |  Height:  |  Size: 9.0 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 6.3 KiB

71
scripts/imgproxy.js Normal file
View File

@ -0,0 +1,71 @@
const { PrismaClient, Prisma } = require('@prisma/client')
const prisma = new PrismaClient()
const imgProxyEnabled = process.env.NODE_ENV === 'production' ||
(process.env.NEXT_PUBLIC_IMGPROXY_URL && process.env.IMGPROXY_SALT && process.env.IMGPROXY_KEY)
if (!imgProxyEnabled) {
console.warn('IMGPROXY_* env vars must be set')
process.exit(1)
}
// queue size determines how many items can be processed at the same time.
// this is roughly equivalent to how many requests should be in flight.
// if queue is too large, we might run out of memory and too many requests fail due to timeouts.
const MAX_QUEUE = 1000
async function main () {
console.log('starting imgproxy job', Prisma.AnyNull)
const { createImgproxyUrls } = await import('../worker/imgproxy.js')
let cursor = 1
try {
while (true) {
const r = await prisma.item.findMany({
take: MAX_QUEUE,
skip: 1, // Skip the cursor
cursor: {
id: cursor
},
where: {
imgproxyUrls: {
equals: Prisma.AnyNull
}
},
orderBy: {
id: 'asc'
}
})
if (r.length === 0) {
break
}
cursor = r[r.length - 1].id
for (const { id, ...item } of r) {
const isJob = typeof item.maxBid !== 'undefined'
let imgproxyUrls = {}
try {
if (item.text) {
imgproxyUrls = await createImgproxyUrls(id, item.text, {})
}
if (item.url && !isJob) {
imgproxyUrls = { ...imgproxyUrls, ...(await createImgproxyUrls(id, item.url, {})) }
}
} catch (err) {
console.log('[imgproxy] error:', err)
// rethrow for retry
throw err
}
console.log('[imgproxy] updating item', id, 'with urls', imgproxyUrls)
}
}
} catch (err) {
console.error(err)
}
}
main()

View File

@ -193,6 +193,35 @@ $btn-close-bg: none;
margin-bottom: 0 !important;
}
.modal.fullscreen {
padding-right: 0;
background-color: color-mix(in srgb, var(--theme-inputBg) 66%, transparent);
}
.modal-dialog.fullscreen {
margin: 0;
max-width: 100vw;
max-height: 100vh;
max-height: 100svh;
background-color: transparent;
}
.modal-content.fullscreen {
border: none;
background-color: transparent;
}
.modal-body.fullscreen {
width: 100%;
height: calc(100svh - 6rem);
padding: 0;
background-color: transparent;
}
.modal-close.fullscreen {
padding: 1.25rem;
}
.modal-overflow.fullscreen {
padding: 1.25rem;
margin-top: -10px;
}
.modal-btn {
cursor: pointer;
display: flex;

194
worker/imgproxy.js Normal file
View File

@ -0,0 +1,194 @@
import { createHmac } from 'node:crypto'
import { extractUrls } from '../lib/md.js'
const imgProxyEnabled = process.env.NODE_ENV === 'production' ||
(process.env.NEXT_PUBLIC_IMGPROXY_URL && process.env.IMGPROXY_SALT && process.env.IMGPROXY_KEY)
if (!imgProxyEnabled) {
console.warn('IMGPROXY_* env vars not set, imgproxy calls are no-ops now')
}
const IMGPROXY_URL = process.env.NEXT_PUBLIC_IMGPROXY_URL
const IMGPROXY_SALT = process.env.IMGPROXY_SALT
const IMGPROXY_KEY = process.env.IMGPROXY_KEY
const cache = new Map()
// based on heuristics. see https://stacker.news/items/266838
const imageUrlMatchers = [
u => u.host === 'i.postimg.cc',
u => u.host === 'pbs.twimg.com',
u => u.host === 'i.ibb.co',
u => u.host === 'nostr.build' || u.host === 'cdn.nostr.build',
u => u.host === 'www.zapread.com' && u.pathname.startsWith('/i'),
u => u.host === 'i.imgflip.com',
u => u.host === 'i.redd.it',
u => u.host === 'media.tenor.com',
u => u.host === 'i.imgur.com'
]
const exclude = [
u => u.protocol === 'mailto:',
u => u.host.endsWith('.onion') || u.host.endsWith('.b32.ip') || u.host.endsWith('.loki'),
u => ['twitter.com', 'x.com', 'nitter.it', 'nitter.at'].some(h => h === u.host),
u => u.host === 'stacker.news',
u => u.host === 'news.ycombinator.com',
u => u.host === 'www.youtube.com' || u.host === 'youtu.be',
u => u.host === 'github.com'
]
function matchUrl (matchers, url) {
try {
return matchers.some(matcher => matcher(new URL(url)))
} catch (err) {
console.log(url, err)
return false
}
}
function decodeOriginalUrl (imgproxyUrl) {
const parts = imgproxyUrl.split('/')
const b64Url = parts[parts.length - 1]
const originalUrl = Buffer.from(b64Url, 'base64url').toString('utf-8')
return originalUrl
}
export function imgproxy ({ models }) {
return async function ({ data: { id, forceFetch = false } }) {
if (!imgProxyEnabled) return
console.log('running imgproxy job', id)
const item = await models.item.findUnique({ where: { id } })
const isJob = typeof item.maxBid !== 'undefined'
let imgproxyUrls = {}
try {
if (item.text) {
imgproxyUrls = await createImgproxyUrls(id, item.text, { forceFetch })
}
if (item.url && !isJob) {
imgproxyUrls = { ...imgproxyUrls, ...(await createImgproxyUrls(id, item.url, { forceFetch })) }
}
} catch (err) {
console.log('[imgproxy] error:', err)
// rethrow for retry
throw err
}
console.log('[imgproxy] updating item', id, 'with urls', imgproxyUrls)
await models.item.update({ where: { id }, data: { imgproxyUrls } })
}
}
export const createImgproxyUrls = async (id, text, { forceFetch }) => {
const urls = extractUrls(text)
console.log('[imgproxy] id:', id, '-- extracted urls:', urls)
// resolutions that we target:
// - nHD: 640x 360
// - qHD: 960x 540
// - HD: 1280x 720
// - HD+: 1600x 900
// - FHD: 1920x1080
// - QHD: 2560x1440
// reference:
// - https://en.wikipedia.org/wiki/Graphics_display_resolution#High-definition_(HD_and_derivatives)
// - https://www.browserstack.com/guide/ideal-screen-sizes-for-responsive-design
const resolutions = ['640x360', '960x540', '1280x720', '1600x900', '1920x1080', '2560x1440']
const imgproxyUrls = {}
for (let url of urls) {
if (!url) continue
console.log('[imgproxy] id:', id, '-- processing url:', url)
if (url.startsWith(IMGPROXY_URL)) {
console.log('[imgproxy] id:', id, '-- proxy url, decoding original url:', url)
// backwards compatibility: we used to replace image urls with imgproxy urls
url = decodeOriginalUrl(url)
console.log('[imgproxy] id:', id, '-- original url:', url)
}
if (!(await isImageURL(url, { forceFetch }))) {
console.log('[imgproxy] id:', id, '-- not image url:', url)
continue
}
imgproxyUrls[url] = {}
for (const res of resolutions) {
const [w, h] = res.split('x')
const processingOptions = `/rs:fill:${w}:${h}`
imgproxyUrls[url][`${w}w`] = createImgproxyUrl(url, processingOptions)
}
}
return imgproxyUrls
}
const createImgproxyUrl = (url, processingOptions) => {
const b64Url = Buffer.from(url, 'utf-8').toString('base64url')
const target = `${processingOptions}/${b64Url}`
const signature = sign(target)
return `${IMGPROXY_URL}${signature}${target}`
}
async function fetchWithTimeout (resource, { timeout = 1000, ...options } = {}) {
const controller = new AbortController()
const id = setTimeout(() => controller.abort(), timeout)
const response = await fetch(resource, {
...options,
signal: controller.signal
})
clearTimeout(id)
return response
}
const isImageURL = async (url, { forceFetch }) => {
if (cache.has(url)) return cache.get(url)
if (!forceFetch && matchUrl(imageUrlMatchers, url)) {
return true
}
if (!forceFetch && matchUrl(exclude, url)) {
return false
}
let isImage
// first run HEAD with small timeout
try {
// https://stackoverflow.com/a/68118683
const res = await fetchWithTimeout(url, { timeout: 1000, method: 'HEAD' })
const buf = await res.blob()
isImage = buf.type.startsWith('image/')
} catch (err) {
console.log(url, err)
}
// For HEAD requests, positives are most likely true positives.
// However, negatives may be false negatives
if (isImage) {
cache.set(url, true)
return true
}
// if not known yet, run GET request with longer timeout
try {
const res = await fetchWithTimeout(url, { timeout: 10000 })
const buf = await res.blob()
isImage = buf.type.startsWith('image/')
} catch (err) {
console.log(url, err)
}
cache.set(url, isImage)
return isImage
}
const hexDecode = (hex) => Buffer.from(hex, 'hex')
const sign = (target) => {
// https://github.com/imgproxy/imgproxy/blob/master/examples/signature.js
const hmac = createHmac('sha256', hexDecode(IMGPROXY_KEY))
hmac.update(hexDecode(IMGPROXY_SALT))
hmac.update(target)
return hmac.digest('base64url')
}

View File

@ -14,6 +14,7 @@ import { nip57 } from './nostr.js'
import fetch from 'cross-fetch'
import { authenticatedLndGrpc } from 'ln-service'
import { views, rankViews } from './views.js'
import { imgproxy } from './imgproxy.js'
const { loadEnvConfig } = nextEnv
const { ApolloClient, HttpLink, InMemoryCache } = apolloClient
@ -66,6 +67,7 @@ async function work () {
await boss.work('nip57', nip57(args))
await boss.work('views', views(args))
await boss.work('rankViews', rankViews(args))
await boss.work('imgproxy', imgproxy(args))
console.log('working jobs')
}