Merge branch 'master' into 266-zaps-without-account

This commit is contained in:
Keyan 2023-08-10 12:40:30 -05:00 committed by GitHub
commit cbfd6998a6
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
7 changed files with 91 additions and 48 deletions

View File

@ -55,6 +55,9 @@ IMGPROXY_KEY=
IMGPROXY_SALT= IMGPROXY_SALT=
IMGPROXY_ENABLE_WEBP_DETECTION=1 IMGPROXY_ENABLE_WEBP_DETECTION=1
IMGPROXY_MAX_ANIMATION_FRAMES=100 IMGPROXY_MAX_ANIMATION_FRAMES=100
IMGPROXY_MAX_SRC_RESOLUTION=200
# IMGPROXY_DEVELOPMENT_ERRORS_MODE=1
# IMGPROXY_ENABLE_DEBUG_HEADERS=true
# prisma db url # prisma db url
DATABASE_URL="postgresql://sn:password@db:5432/stackernews?schema=public" DATABASE_URL="postgresql://sn:password@db:5432/stackernews?schema=public"

View File

@ -5,15 +5,6 @@ import serialize from './serial'
export default { export default {
Query: { Query: {
expectedRewards: async (parent, args, { models }) => { expectedRewards: async (parent, args, { models }) => {
// get the last reward time, then get all contributions to rewards since then
const lastReward = await models.earn.findFirst({
orderBy: {
createdAt: 'desc'
}
})
if (!lastReward) return { total: 0, sources: [] }
const [result] = await models.$queryRaw` const [result] = await models.$queryRaw`
SELECT coalesce(FLOOR(sum(sats)), 0) as total, json_build_array( SELECT coalesce(FLOOR(sum(sats)), 0) as total, json_build_array(
json_build_object('name', 'donations', 'value', coalesce(FLOOR(sum(sats) FILTER(WHERE type = 'DONATION')), 0)), json_build_object('name', 'donations', 'value', coalesce(FLOOR(sum(sats) FILTER(WHERE type = 'DONATION')), 0)),
@ -24,15 +15,15 @@ export default {
FROM ( FROM (
(SELECT ("ItemAct".msats - COALESCE("ReferralAct".msats, 0)) / 1000.0 as sats, act::text as type (SELECT ("ItemAct".msats - COALESCE("ReferralAct".msats, 0)) / 1000.0 as sats, act::text as type
FROM "ItemAct" FROM "ItemAct"
LEFT JOIN "ReferralAct" ON "ItemAct".id = "ReferralAct"."itemActId" LEFT JOIN "ReferralAct" ON "ReferralAct"."itemActId" = "ItemAct".id
WHERE "ItemAct".created_at > ${lastReward.createdAt} AND "ItemAct".act <> 'TIP') WHERE date_trunc('day', "ItemAct".created_at AT TIME ZONE 'UTC' AT TIME ZONE 'America/Chicago') = date_trunc('day', now() AT TIME ZONE 'America/Chicago') AND "ItemAct".act <> 'TIP')
UNION ALL UNION ALL
(SELECT sats::FLOAT, 'DONATION' as type (SELECT sats::FLOAT, 'DONATION' as type
FROM "Donation" FROM "Donation"
WHERE created_at > ${lastReward.createdAt}) WHERE date_trunc('day', created_at AT TIME ZONE 'UTC' AT TIME ZONE 'America/Chicago') = date_trunc('day', now() AT TIME ZONE 'America/Chicago'))
) subquery` ) subquery`
return result return result || { total: 0, sources: [] }
} }
}, },
Mutation: { Mutation: {

View File

@ -219,7 +219,7 @@ function FormGroup ({ className, label, children }) {
function InputInner ({ function InputInner ({
prepend, append, hint, showValid, onChange, overrideValue, prepend, append, hint, showValid, onChange, overrideValue,
innerRef, noForm, clear, onKeyDown, ...props innerRef, noForm, clear, onKeyDown, debounce, ...props
}) { }) {
const [field, meta, helpers] = noForm ? [{}, {}, {}] : useField(props) const [field, meta, helpers] = noForm ? [{}, {}, {}] : useField(props)
const formik = noForm ? null : useFormikContext() const formik = noForm ? null : useFormikContext()
@ -245,6 +245,18 @@ function InputInner ({
const invalid = (!formik || formik.submitCount > 0) && meta.touched && meta.error const invalid = (!formik || formik.submitCount > 0) && meta.touched && meta.error
const debounceRef = useRef(-1)
useEffect(() => {
if (debounceRef.current !== -1) {
clearTimeout(debounceRef.current)
}
if (!noForm && !isNaN(debounce) && debounce > 0) {
debounceRef.current = setTimeout(() => formik.validateForm(), debounce)
}
return () => clearTimeout(debounceRef.current)
}, [noForm, formik, field.value])
return ( return (
<> <>
<InputGroup hasValidation> <InputGroup hasValidation>
@ -446,13 +458,14 @@ export function Checkbox ({ children, label, groupClassName, hiddenLabel, extra,
const StorageKeyPrefixContext = createContext() const StorageKeyPrefixContext = createContext()
export function Form ({ export function Form ({
initial, schema, onSubmit, children, initialError, validateImmediately, storageKeyPrefix, ...props initial, schema, onSubmit, children, initialError, validateImmediately, storageKeyPrefix, validateOnChange = true, ...props
}) { }) {
const [error, setError] = useState(initialError) const [error, setError] = useState(initialError)
return ( return (
<Formik <Formik
initialValues={initial} initialValues={initial}
validateOnChange={validateOnChange}
validationSchema={schema} validationSchema={schema}
initialTouched={validateImmediately && initial} initialTouched={validateImmediately && initial}
validateOnBlur={false} validateOnBlur={false}

View File

@ -111,6 +111,7 @@ function NymEdit ({ user, setEditting }) {
name: user.name name: user.name
}} }}
validateImmediately validateImmediately
validateOnChange={false}
onSubmit={async ({ name }) => { onSubmit={async ({ name }) => {
if (name === user.name) { if (name === user.name) {
setEditting(false) setEditting(false)
@ -137,6 +138,7 @@ function NymEdit ({ user, setEditting }) {
autoFocus autoFocus
groupClassName={styles.usernameForm} groupClassName={styles.usernameForm}
showValid showValid
debounce={500}
/> />
<SubmitButton variant='link' onClick={() => setEditting(true)}>save</SubmitButton> <SubmitButton variant='link' onClick={() => setEditting(true)}>save</SubmitButton>
</div> </div>

View File

@ -0,0 +1,13 @@
-- CreateIndex
CREATE INDEX "ReferralAct_referrerId_idx" ON "ReferralAct"("referrerId");
-- CreateIndex
CREATE INDEX "ReferralAct_itemActId_idx" ON "ReferralAct"("itemActId");
-- This is an empty migration.
CREATE INDEX IF NOT EXISTS "ItemAct.created_at_day_index"
ON "ItemAct"(date_trunc('day', created_at AT TIME ZONE 'UTC' AT TIME ZONE 'America/Chicago'));
CREATE INDEX IF NOT EXISTS "Donation.created_at_day_index"
ON "Donation"(date_trunc('day', created_at AT TIME ZONE 'UTC' AT TIME ZONE 'America/Chicago'));

View File

@ -350,6 +350,9 @@ model ReferralAct {
msats BigInt msats BigInt
itemAct ItemAct @relation(fields: [itemActId], references: [id], onDelete: Cascade) itemAct ItemAct @relation(fields: [itemActId], references: [id], onDelete: Cascade)
referrer User @relation(fields: [referrerId], references: [id], onDelete: Cascade) referrer User @relation(fields: [referrerId], references: [id], onDelete: Cascade)
@@index([referrerId])
@@index([itemActId])
} }
model ItemAct { model ItemAct {

View File

@ -3,41 +3,45 @@ const serialize = require('../api/resolvers/serial')
// const ITEM_EACH_REWARD = 3.0 // const ITEM_EACH_REWARD = 3.0
// const UPVOTE_EACH_REWARD = 6.0 // const UPVOTE_EACH_REWARD = 6.0
const TOP_PERCENTILE = 21 const TOP_PERCENTILE = 21
const TOTAL_UPPER_BOUND_MSATS = 1000000000
const REDUCE_REWARDS = [616, 6030, 946, 4502]
function earn ({ models }) { function earn ({ models }) {
return async function ({ name }) { return async function ({ name }) {
console.log('running', name) console.log('running', name)
// compute how much sn earned today // compute how much sn earned today
const [{ sum: actSum }] = await models.$queryRaw` const [{ sum: sumDecimal }] = await models.$queryRaw`
SELECT coalesce(sum("ItemAct".msats - coalesce("ReferralAct".msats, 0)), 0) as sum SELECT coalesce(sum(msats), 0) as sum
FROM "ItemAct" FROM (
JOIN "Item" ON "ItemAct"."itemId" = "Item".id (SELECT ("ItemAct".msats - COALESCE("ReferralAct".msats, 0)) as msats
LEFT JOIN "ReferralAct" ON "ItemAct".id = "ReferralAct"."itemActId" FROM "ItemAct"
WHERE "ItemAct".act <> 'TIP' LEFT JOIN "ReferralAct" ON "ReferralAct"."itemActId" = "ItemAct".id
AND "ItemAct".created_at > now_utc() - INTERVAL '1 day'` WHERE date_trunc('day', "ItemAct".created_at AT TIME ZONE 'UTC' AT TIME ZONE 'America/Chicago') = date_trunc('day', (now() - interval '1 day') AT TIME ZONE 'America/Chicago') AND "ItemAct".act <> 'TIP')
UNION ALL
(SELECT sats * 1000 as msats
FROM "Donation"
WHERE date_trunc('day', created_at AT TIME ZONE 'UTC' AT TIME ZONE 'America/Chicago') = date_trunc('day', (now() - interval '1 day') AT TIME ZONE 'America/Chicago'))
) subquery`
const [{ sum: donatedSum }] = await models.$queryRaw` // XXX primsa will return a Decimal (https://mikemcl.github.io/decimal.js)
SELECT coalesce(sum(sats), 0) as sum // because sum of a BIGINT returns a NUMERIC type (https://www.postgresql.org/docs/13/functions-aggregate.html)
FROM "Donation" // and Decimal is what prisma maps it to https://www.prisma.io/docs/concepts/components/prisma-client/raw-database-access#raw-query-type-mapping
WHERE created_at > now_utc() - INTERVAL '1 day'` // so check it before coercing to Number
if (!sumDecimal || sumDecimal.lessThanOrEqualTo(0)) {
// XXX prisma returns wonky types from raw queries ... so be extra
// careful with them
const sum = Number(actSum) + (Number(donatedSum) * 1000)
if (sum <= 0) {
console.log('done', name, 'no sats to award today') console.log('done', name, 'no sats to award today')
return return
} }
// extra sanity check on rewards ... if it more than 1m sats, we // extra sanity check on rewards ... if it's more than upper bound, we
// probably have a bug somewhere // probably have a bug somewhere or we've grown A LOT
if (sum > 1000000000) { if (sumDecimal.greaterThan(TOTAL_UPPER_BOUND_MSATS)) {
console.log('done', name, 'error: too many sats to award today') console.log('done', name, 'error: too many sats to award today', sumDecimal)
return return
} }
const sum = Number(sumDecimal)
console.log(name, 'giving away', sum, 'msats') console.log(name, 'giving away', sum, 'msats')
/* /*
@ -54,7 +58,8 @@ function earn ({ models }) {
*/ */
// get earners { userId, id, type, rank, proportion } // get earners { userId, id, type, rank, proportion }
const earners = await models.$queryRawUnsafe(` const earners = await models.$queryRaw`
-- get top 21% of posts and comments
WITH item_ratios AS ( WITH item_ratios AS (
SELECT *, SELECT *,
CASE WHEN "parentId" IS NULL THEN 'POST' ELSE 'COMMENT' END as type, CASE WHEN "parentId" IS NULL THEN 'POST' ELSE 'COMMENT' END as type,
@ -70,20 +75,32 @@ function earn ({ models }) {
) x ) x
WHERE x.percentile <= ${TOP_PERCENTILE} WHERE x.percentile <= ${TOP_PERCENTILE}
), ),
upvoters AS ( -- get top upvoters of top posts and comments
SELECT "ItemAct"."userId", item_ratios.id, item_ratios.ratio, item_ratios."parentId", upvoter_islands AS (
sum("ItemAct".msats) as tipped, min("ItemAct".created_at) as acted_at SELECT "ItemAct"."userId", item_ratios.id, item_ratios.ratio, item_ratios."parentId",
FROM item_ratios "ItemAct".msats as tipped, "ItemAct".created_at as acted_at,
JOIN "ItemAct" on "ItemAct"."itemId" = item_ratios.id ROW_NUMBER() OVER (partition by item_ratios.id order by "ItemAct".created_at asc)
WHERE act = 'TIP' - ROW_NUMBER() OVER (partition by item_ratios.id, "ItemAct"."userId" order by "ItemAct".created_at asc) AS island
GROUP BY "ItemAct"."userId", item_ratios.id, item_ratios.ratio, item_ratios."parentId" FROM item_ratios
JOIN "ItemAct" on "ItemAct"."itemId" = item_ratios.id
WHERE act = 'TIP'
), ),
-- isolate contiguous upzaps from the same user on the same item so that when we take the log
-- of the upzaps it accounts for successive zaps and does not disproporionately reward them
upvoters AS (
SELECT "userId", id, ratio, "parentId", GREATEST(log(sum(tipped) / 1000), 0) as tipped, min(acted_at) as acted_at
FROM upvoter_islands
GROUP BY "userId", id, ratio, "parentId", island
),
-- the relative contribution of each upvoter to the post/comment
-- early multiplier: 10/ln(early_rank + e)
-- we also weight by trust in a step wise fashion
upvoter_ratios AS ( upvoter_ratios AS (
SELECT "userId", sum(early_multiplier*tipped_ratio*ratio*users.trust) as upvoter_ratio, SELECT "userId", sum(early_multiplier*tipped_ratio*ratio*CASE WHEN users.id = ANY (${REDUCE_REWARDS}) THEN 0.2 ELSE CEIL(users.trust*2)+1 END) as upvoter_ratio,
"parentId" IS NULL as "isPost", CASE WHEN "parentId" IS NULL THEN 'TIP_POST' ELSE 'TIP_COMMENT' END as type "parentId" IS NULL as "isPost", CASE WHEN "parentId" IS NULL THEN 'TIP_POST' ELSE 'TIP_COMMENT' END as type
FROM ( FROM (
SELECT *, SELECT *,
1/(ROW_NUMBER() OVER (partition by id order by acted_at asc)) AS early_multiplier, 10.0/LN(ROW_NUMBER() OVER (partition by id order by acted_at asc) + EXP(1.0)) AS early_multiplier,
tipped::float/(sum(tipped) OVER (partition by id)) tipped_ratio tipped::float/(sum(tipped) OVER (partition by id)) tipped_ratio
FROM upvoters FROM upvoters
) u ) u
@ -93,7 +110,8 @@ function earn ({ models }) {
SELECT "userId", NULL as id, type, ROW_NUMBER() OVER (PARTITION BY "isPost" ORDER BY upvoter_ratio DESC) as rank, SELECT "userId", NULL as id, type, ROW_NUMBER() OVER (PARTITION BY "isPost" ORDER BY upvoter_ratio DESC) as rank,
upvoter_ratio/(sum(upvoter_ratio) OVER (PARTITION BY "isPost"))/2 as proportion upvoter_ratio/(sum(upvoter_ratio) OVER (PARTITION BY "isPost"))/2 as proportion
FROM upvoter_ratios FROM upvoter_ratios
WHERE upvoter_ratio > 0`) WHERE upvoter_ratio > 0
ORDER BY "isPost", rank ASC`
// in order to group earnings for users we use the same createdAt time for // in order to group earnings for users we use the same createdAt time for
// all earnings // all earnings
@ -113,7 +131,7 @@ function earn ({ models }) {
return return
} }
console.log('stacker', earner.userId, 'earned', earnings) console.log('stacker', earner.userId, 'earned', earnings, 'proportion', earner.proportion, 'rank', earner.rank, 'type', earner.type)
if (earnings > 0) { if (earnings > 0) {
await serialize(models, await serialize(models,