Compare commits

..

No commits in common. "a669ec832b859e73c94fc60b488e284c92735f47" and "0d93c92e30d0a7b9bede5666bd5f65ed2c87b92f" have entirely different histories.

37 changed files with 584 additions and 1392 deletions

View File

@ -5,7 +5,7 @@
</p> </p>
- Stacker News is trying to fix online communities with economics - Stacker News makes internet communities that pay you Bitcoin
- What You See is What We Ship (look ma, I invented an initialism) - What You See is What We Ship (look ma, I invented an initialism)
- 100% FOSS - 100% FOSS
- We pay bitcoin for PRs, issues, documentation, code reviews and more - We pay bitcoin for PRs, issues, documentation, code reviews and more

View File

@ -1,6 +1,5 @@
import { PAID_ACTION_PAYMENT_METHODS } from '@/lib/constants' import { PAID_ACTION_PAYMENT_METHODS } from '@/lib/constants'
import { msatsToSats, satsToMsats } from '@/lib/format' import { msatsToSats, satsToMsats } from '@/lib/format'
import { Prisma } from '@prisma/client'
export const anonable = false export const anonable = false
@ -49,9 +48,9 @@ export async function onPaid ({ invoice, actId }, { tx }) {
let itemAct let itemAct
if (invoice) { if (invoice) {
await tx.itemAct.updateMany({ where: { invoiceId: invoice.id }, data: { invoiceActionState: 'PAID' } }) await tx.itemAct.updateMany({ where: { invoiceId: invoice.id }, data: { invoiceActionState: 'PAID' } })
itemAct = await tx.itemAct.findFirst({ where: { invoiceId: invoice.id }, include: { item: true } }) itemAct = await tx.itemAct.findFirst({ where: { invoiceId: invoice.id } })
} else if (actId) { } else if (actId) {
itemAct = await tx.itemAct.findUnique({ where: { id: actId }, include: { item: true } }) itemAct = await tx.itemAct.findUnique({ where: { id: actId } })
} else { } else {
throw new Error('No invoice or actId') throw new Error('No invoice or actId')
} }
@ -61,34 +60,19 @@ export async function onPaid ({ invoice, actId }, { tx }) {
// denormalize downzaps // denormalize downzaps
await tx.$executeRaw` await tx.$executeRaw`
WITH territory AS ( WITH zapper AS (
SELECT COALESCE(r."subName", i."subName", 'meta')::TEXT as "subName" SELECT trust FROM users WHERE id = ${itemAct.userId}::INTEGER
FROM "Item" i ), zap AS (
LEFT JOIN "Item" r ON r.id = i."rootId" INSERT INTO "ItemUserAgg" ("userId", "itemId", "downZapSats")
WHERE i.id = ${itemAct.itemId}::INTEGER VALUES (${itemAct.userId}::INTEGER, ${itemAct.itemId}::INTEGER, ${sats}::INTEGER)
), zapper AS ( ON CONFLICT ("itemId", "userId") DO UPDATE
SELECT SET "downZapSats" = "ItemUserAgg"."downZapSats" + ${sats}::INTEGER, updated_at = now()
COALESCE(${itemAct.item.parentId RETURNING LOG("downZapSats" / GREATEST("downZapSats" - ${sats}::INTEGER, 1)::FLOAT) AS log_sats
? Prisma.sql`"zapCommentTrust"` )
: Prisma.sql`"zapPostTrust"`}, 0) as "zapTrust", UPDATE "Item"
COALESCE(${itemAct.item.parentId SET "weightedDownVotes" = "weightedDownVotes" + (zapper.trust * zap.log_sats)
? Prisma.sql`"subZapCommentTrust"` FROM zap, zapper
: Prisma.sql`"subZapPostTrust"`}, 0) as "subZapTrust" WHERE "Item".id = ${itemAct.itemId}::INTEGER`
FROM territory
LEFT JOIN "UserSubTrust" ust ON ust."subName" = territory."subName"
AND ust."userId" = ${itemAct.userId}::INTEGER
), zap AS (
INSERT INTO "ItemUserAgg" ("userId", "itemId", "downZapSats")
VALUES (${itemAct.userId}::INTEGER, ${itemAct.itemId}::INTEGER, ${sats}::INTEGER)
ON CONFLICT ("itemId", "userId") DO UPDATE
SET "downZapSats" = "ItemUserAgg"."downZapSats" + ${sats}::INTEGER, updated_at = now()
RETURNING LOG("downZapSats" / GREATEST("downZapSats" - ${sats}::INTEGER, 1)::FLOAT) AS log_sats
)
UPDATE "Item"
SET "weightedDownVotes" = "weightedDownVotes" + zapper."zapTrust" * zap.log_sats,
"subWeightedDownVotes" = "subWeightedDownVotes" + zapper."subZapTrust" * zap.log_sats
FROM zap, zapper
WHERE "Item".id = ${itemAct.itemId}::INTEGER`
} }
export async function onFail ({ invoice }, { tx }) { export async function onFail ({ invoice }, { tx }) {

View File

@ -252,18 +252,15 @@ export async function onPaid ({ invoice, id }, context) {
JOIN users ON "Item"."userId" = users.id JOIN users ON "Item"."userId" = users.id
WHERE "Item".id = ${item.id}::INTEGER WHERE "Item".id = ${item.id}::INTEGER
), ancestors AS ( ), ancestors AS (
SELECT "Item".*
FROM "Item", comment
WHERE "Item".path @> comment.path AND "Item".id <> comment.id
ORDER BY "Item".id
), updated_ancestors AS (
UPDATE "Item" UPDATE "Item"
SET ncomments = "Item".ncomments + 1, SET ncomments = "Item".ncomments + 1,
"lastCommentAt" = GREATEST("Item"."lastCommentAt", comment.created_at), "lastCommentAt" = GREATEST("Item"."lastCommentAt", comment.created_at),
"weightedComments" = "Item"."weightedComments" +
CASE WHEN comment."userId" = "Item"."userId" THEN 0 ELSE comment.trust END,
"nDirectComments" = "Item"."nDirectComments" + "nDirectComments" = "Item"."nDirectComments" +
CASE WHEN comment."parentId" = "Item".id THEN 1 ELSE 0 END CASE WHEN comment."parentId" = "Item".id THEN 1 ELSE 0 END
FROM comment, ancestors FROM comment
WHERE "Item".id = ancestors.id WHERE "Item".path @> comment.path AND "Item".id <> comment.id
RETURNING "Item".* RETURNING "Item".*
) )
INSERT INTO "Reply" (created_at, updated_at, "ancestorId", "ancestorUserId", "itemId", "userId", level) INSERT INTO "Reply" (created_at, updated_at, "ancestorId", "ancestorUserId", "itemId", "userId", level)

View File

@ -1,27 +0,0 @@
import { USER_ID } from '@/lib/constants'
export const GLOBAL_SEEDS = [USER_ID.k00b, USER_ID.ek]
export function initialTrust ({ name, userId }) {
const results = GLOBAL_SEEDS.map(id => ({
subName: name,
userId: id,
zapPostTrust: 1,
subZapPostTrust: 1,
zapCommentTrust: 1,
subZapCommentTrust: 1
}))
if (!GLOBAL_SEEDS.includes(userId)) {
results.push({
subName: name,
userId,
zapPostTrust: 0,
subZapPostTrust: 1,
zapCommentTrust: 0,
subZapCommentTrust: 1
})
}
return results
}

View File

@ -1,7 +1,6 @@
import { PAID_ACTION_PAYMENT_METHODS, TERRITORY_PERIOD_COST } from '@/lib/constants' import { PAID_ACTION_PAYMENT_METHODS, TERRITORY_PERIOD_COST } from '@/lib/constants'
import { satsToMsats } from '@/lib/format' import { satsToMsats } from '@/lib/format'
import { nextBilling } from '@/lib/territory' import { nextBilling } from '@/lib/territory'
import { initialTrust } from './lib/territory'
export const anonable = false export const anonable = false
@ -21,7 +20,7 @@ export async function perform ({ invoiceId, ...data }, { me, cost, tx }) {
const billedLastAt = new Date() const billedLastAt = new Date()
const billPaidUntil = nextBilling(billedLastAt, billingType) const billPaidUntil = nextBilling(billedLastAt, billingType)
const sub = await tx.sub.create({ return await tx.sub.create({
data: { data: {
...data, ...data,
billedLastAt, billedLastAt,
@ -43,12 +42,6 @@ export async function perform ({ invoiceId, ...data }, { me, cost, tx }) {
} }
} }
}) })
await tx.userSubTrust.createMany({
data: initialTrust({ name: sub.name, userId: sub.userId })
})
return sub
} }
export async function describe ({ name }) { export async function describe ({ name }) {

View File

@ -1,7 +1,6 @@
import { PAID_ACTION_PAYMENT_METHODS, TERRITORY_PERIOD_COST } from '@/lib/constants' import { PAID_ACTION_PAYMENT_METHODS, TERRITORY_PERIOD_COST } from '@/lib/constants'
import { satsToMsats } from '@/lib/format' import { satsToMsats } from '@/lib/format'
import { nextBilling } from '@/lib/territory' import { nextBilling } from '@/lib/territory'
import { initialTrust } from './lib/territory'
export const anonable = false export const anonable = false
@ -66,7 +65,7 @@ export async function perform ({ name, invoiceId, ...data }, { me, cost, tx }) {
} }
}) })
const updatedSub = await tx.sub.update({ return await tx.sub.update({
data, data,
// optimistic concurrency control // optimistic concurrency control
// make sure none of the relevant fields have changed since we fetched the sub // make sure none of the relevant fields have changed since we fetched the sub
@ -77,12 +76,6 @@ export async function perform ({ name, invoiceId, ...data }, { me, cost, tx }) {
} }
} }
}) })
await tx.userSubTrust.createMany({
data: initialTrust({ name: updatedSub.name, userId: updatedSub.userId })
})
return updatedSub
} }
export async function describe ({ name }, context) { export async function describe ({ name }, context) {

View File

@ -2,7 +2,6 @@ import { PAID_ACTION_PAYMENT_METHODS, USER_ID } from '@/lib/constants'
import { msatsToSats, satsToMsats } from '@/lib/format' import { msatsToSats, satsToMsats } from '@/lib/format'
import { notifyZapped } from '@/lib/webPush' import { notifyZapped } from '@/lib/webPush'
import { getInvoiceableWallets } from '@/wallets/server' import { getInvoiceableWallets } from '@/wallets/server'
import { Prisma } from '@prisma/client'
export const anonable = true export const anonable = true
@ -150,22 +149,8 @@ export async function onPaid ({ invoice, actIds }, { tx }) {
// perform denomormalized aggregates: weighted votes, upvotes, msats, lastZapAt // perform denomormalized aggregates: weighted votes, upvotes, msats, lastZapAt
// NOTE: for the rows that might be updated by a concurrent zap, we use UPDATE for implicit locking // NOTE: for the rows that might be updated by a concurrent zap, we use UPDATE for implicit locking
await tx.$queryRaw` await tx.$queryRaw`
WITH territory AS ( WITH zapper AS (
SELECT COALESCE(r."subName", i."subName", 'meta')::TEXT as "subName" SELECT trust FROM users WHERE id = ${itemAct.userId}::INTEGER
FROM "Item" i
LEFT JOIN "Item" r ON r.id = i."rootId"
WHERE i.id = ${itemAct.itemId}::INTEGER
), zapper AS (
SELECT
COALESCE(${itemAct.item.parentId
? Prisma.sql`"zapCommentTrust"`
: Prisma.sql`"zapPostTrust"`}, 0) as "zapTrust",
COALESCE(${itemAct.item.parentId
? Prisma.sql`"subZapCommentTrust"`
: Prisma.sql`"subZapPostTrust"`}, 0) as "subZapTrust"
FROM territory
LEFT JOIN "UserSubTrust" ust ON ust."subName" = territory."subName"
AND ust."userId" = ${itemAct.userId}::INTEGER
), zap AS ( ), zap AS (
INSERT INTO "ItemUserAgg" ("userId", "itemId", "zapSats") INSERT INTO "ItemUserAgg" ("userId", "itemId", "zapSats")
VALUES (${itemAct.userId}::INTEGER, ${itemAct.itemId}::INTEGER, ${sats}::INTEGER) VALUES (${itemAct.userId}::INTEGER, ${itemAct.itemId}::INTEGER, ${sats}::INTEGER)
@ -173,30 +158,17 @@ export async function onPaid ({ invoice, actIds }, { tx }) {
SET "zapSats" = "ItemUserAgg"."zapSats" + ${sats}::INTEGER, updated_at = now() SET "zapSats" = "ItemUserAgg"."zapSats" + ${sats}::INTEGER, updated_at = now()
RETURNING ("zapSats" = ${sats}::INTEGER)::INTEGER as first_vote, RETURNING ("zapSats" = ${sats}::INTEGER)::INTEGER as first_vote,
LOG("zapSats" / GREATEST("zapSats" - ${sats}::INTEGER, 1)::FLOAT) AS log_sats LOG("zapSats" / GREATEST("zapSats" - ${sats}::INTEGER, 1)::FLOAT) AS log_sats
), item_zapped AS (
UPDATE "Item"
SET
"weightedVotes" = "weightedVotes" + zapper."zapTrust" * zap.log_sats,
"subWeightedVotes" = "subWeightedVotes" + zapper."subZapTrust" * zap.log_sats,
upvotes = upvotes + zap.first_vote,
msats = "Item".msats + ${msats}::BIGINT,
mcredits = "Item".mcredits + ${invoice?.invoiceForward ? 0n : msats}::BIGINT,
"lastZapAt" = now()
FROM zap, zapper
WHERE "Item".id = ${itemAct.itemId}::INTEGER
RETURNING "Item".*, zapper."zapTrust" * zap.log_sats as "weightedVote"
), ancestors AS (
SELECT "Item".*
FROM "Item", item_zapped
WHERE "Item".path @> item_zapped.path AND "Item".id <> item_zapped.id
ORDER BY "Item".id
) )
UPDATE "Item" UPDATE "Item"
SET "weightedComments" = "Item"."weightedComments" + item_zapped."weightedVote", SET
"commentMsats" = "Item"."commentMsats" + ${msats}::BIGINT, "weightedVotes" = "weightedVotes" + (zapper.trust * zap.log_sats),
"commentMcredits" = "Item"."commentMcredits" + ${invoice?.invoiceForward ? 0n : msats}::BIGINT upvotes = upvotes + zap.first_vote,
FROM item_zapped, ancestors msats = "Item".msats + ${msats}::BIGINT,
WHERE "Item".id = ancestors.id` mcredits = "Item".mcredits + ${invoice?.invoiceForward ? 0n : msats}::BIGINT,
"lastZapAt" = now()
FROM zap, zapper
WHERE "Item".id = ${itemAct.itemId}::INTEGER
RETURNING "Item".*`
// record potential bounty payment // record potential bounty payment
// NOTE: we are at least guaranteed that we see the update "ItemUserAgg" from our tx so we can trust // NOTE: we are at least guaranteed that we see the update "ItemUserAgg" from our tx so we can trust
@ -216,6 +188,17 @@ export async function onPaid ({ invoice, actIds }, { tx }) {
SET "bountyPaidTo" = array_remove(array_append(array_remove("bountyPaidTo", bounty.target), bounty.target), NULL) SET "bountyPaidTo" = array_remove(array_append(array_remove("bountyPaidTo", bounty.target), bounty.target), NULL)
FROM bounty FROM bounty
WHERE "Item".id = bounty.id AND bounty.paid` WHERE "Item".id = bounty.id AND bounty.paid`
// update commentMsats on ancestors
await tx.$executeRaw`
WITH zapped AS (
SELECT * FROM "Item" WHERE id = ${itemAct.itemId}::INTEGER
)
UPDATE "Item"
SET "commentMsats" = "Item"."commentMsats" + ${msats}::BIGINT,
"commentMcredits" = "Item"."commentMcredits" + ${invoice?.invoiceForward ? 0n : msats}::BIGINT
FROM zapped
WHERE "Item".path @> zapped.path AND "Item".id <> zapped.id`
} }
export async function nonCriticalSideEffects ({ invoice, actIds }, { models }) { export async function nonCriticalSideEffects ({ invoice, actIds }, { models }) {

View File

@ -39,12 +39,16 @@ function commentsOrderByClause (me, models, sort) {
COALESCE("Item"."invoicePaidAt", "Item".created_at) DESC, "Item".id DESC` COALESCE("Item"."invoicePaidAt", "Item".created_at) DESC, "Item".id DESC`
} }
if (sort === 'hot') { if (me && sort === 'hot') {
return `ORDER BY ${sharedSorts}, return `ORDER BY ${sharedSorts},
"hotScore" DESC NULLS LAST, "personal_hot_score" DESC NULLS LAST,
"Item".msats DESC, "Item".id DESC` "Item".msats DESC, ("Item".cost > 0) DESC, "Item".id DESC`
} else { } else {
return `ORDER BY ${sharedSorts}, "Item"."weightedVotes" - "Item"."weightedDownVotes" DESC NULLS LAST, "Item".msats DESC, "Item".id DESC` if (sort === 'top') {
return `ORDER BY ${sharedSorts}, ${orderByNumerator({ models, commentScaler: 0 })} DESC NULLS LAST, "Item".msats DESC, ("Item".cost > 0) DESC, "Item".id DESC`
} else {
return `ORDER BY ${sharedSorts}, ${orderByNumerator({ models, commentScaler: 0, considerBoost: true })}/POWER(GREATEST(3, EXTRACT(EPOCH FROM (now_utc() - "Item".created_at))/3600), 1.3) DESC NULLS LAST, "Item".msats DESC, ("Item".cost > 0) DESC, "Item".id DESC`
}
} }
} }
@ -134,14 +138,14 @@ export async function getAd (parent, { sub, subArr = [], showNsfw = false }, { m
}, ...subArr))?.[0] || null }, ...subArr))?.[0] || null
} }
const orderByClause = (by, me, models, type, sub) => { const orderByClause = (by, me, models, type) => {
switch (by) { switch (by) {
case 'comments': case 'comments':
return 'ORDER BY "Item".ncomments DESC' return 'ORDER BY "Item".ncomments DESC'
case 'sats': case 'sats':
return 'ORDER BY "Item".msats DESC' return 'ORDER BY "Item".msats DESC'
case 'zaprank': case 'zaprank':
return topOrderByWeightedSats(me, models, sub) return topOrderByWeightedSats(me, models)
case 'boost': case 'boost':
return 'ORDER BY "Item".boost DESC' return 'ORDER BY "Item".boost DESC'
case 'random': case 'random':
@ -151,8 +155,22 @@ const orderByClause = (by, me, models, type, sub) => {
} }
} }
export function joinHotScoreView (me, models) { export function orderByNumerator ({ models, commentScaler = 0.5, considerBoost = false }) {
return ' JOIN hot_score_view g ON g.id = "Item".id ' return `((CASE WHEN "Item"."weightedVotes" - "Item"."weightedDownVotes" > 0 THEN
GREATEST("Item"."weightedVotes" - "Item"."weightedDownVotes", POWER("Item"."weightedVotes" - "Item"."weightedDownVotes", 1.2))
ELSE
"Item"."weightedVotes" - "Item"."weightedDownVotes"
END + "Item"."weightedComments"*${commentScaler}) + ${considerBoost ? `("Item".boost / ${BOOST_MULT})` : 0})`
}
export function joinZapRankPersonalView (me, models) {
let join = ` JOIN zap_rank_personal_view g ON g.id = "Item".id AND g."viewerId" = ${GLOBAL_SEED} `
if (me) {
join += ` LEFT JOIN zap_rank_personal_view l ON l.id = g.id AND l."viewerId" = ${me.id} `
}
return join
} }
// this grabs all the stuff we need to display the item list and only // this grabs all the stuff we need to display the item list and only
@ -457,10 +475,10 @@ export default {
await filterClause(me, models, type), await filterClause(me, models, type),
by === 'boost' && '"Item".boost > 0', by === 'boost' && '"Item".boost > 0',
muteClause(me))} muteClause(me))}
${orderByClause(by || 'zaprank', me, models, type, sub)} ${orderByClause(by || 'zaprank', me, models, type)}
OFFSET $3 OFFSET $3
LIMIT $4`, LIMIT $4`,
orderBy: orderByClause(by || 'zaprank', me, models, type, sub) orderBy: orderByClause(by || 'zaprank', me, models, type)
}, ...whenRange(when, from, to || decodedCursor.time), decodedCursor.offset, limit, ...subArr) }, ...whenRange(when, from, to || decodedCursor.time), decodedCursor.offset, limit, ...subArr)
break break
case 'random': case 'random':
@ -553,10 +571,10 @@ export default {
me, me,
models, models,
query: ` query: `
${SELECT}, g.hot_score AS "hotScore", g.sub_hot_score AS "subHotScore" ${SELECT}, ${me ? 'GREATEST(g.tf_hot_score, l.tf_hot_score)' : 'g.tf_hot_score'} AS rank
FROM "Item" FROM "Item"
LEFT JOIN "Sub" ON "Sub"."name" = "Item"."subName" LEFT JOIN "Sub" ON "Sub"."name" = "Item"."subName"
${joinHotScoreView(me, models)} ${joinZapRankPersonalView(me, models)}
${whereClause( ${whereClause(
// in home (sub undefined), filter out global pinned items since we inject them later // in home (sub undefined), filter out global pinned items since we inject them later
sub ? '"Item"."pinId" IS NULL' : 'NOT ("Item"."pinId" IS NOT NULL AND "Item"."subName" IS NULL)', sub ? '"Item"."pinId" IS NULL' : 'NOT ("Item"."pinId" IS NOT NULL AND "Item"."subName" IS NULL)',
@ -569,11 +587,40 @@ export default {
await filterClause(me, models, type), await filterClause(me, models, type),
subClause(sub, 3, 'Item', me, showNsfw), subClause(sub, 3, 'Item', me, showNsfw),
muteClause(me))} muteClause(me))}
ORDER BY ${sub ? '"subHotScore"' : '"hotScore"'} DESC, "Item".msats DESC, "Item".id DESC ORDER BY rank DESC
OFFSET $1 OFFSET $1
LIMIT $2`, LIMIT $2`,
orderBy: `ORDER BY ${sub ? '"subHotScore"' : '"hotScore"'} DESC, "Item".msats DESC, "Item".id DESC` orderBy: 'ORDER BY rank DESC'
}, decodedCursor.offset, limit, ...subArr) }, decodedCursor.offset, limit, ...subArr)
// XXX this is mostly for subs that are really empty
if (items.length < limit) {
items = await itemQueryWithMeta({
me,
models,
query: `
${SELECT}
FROM "Item"
LEFT JOIN "Sub" ON "Sub"."name" = "Item"."subName"
${whereClause(
subClause(sub, 3, 'Item', me, showNsfw),
muteClause(me),
// in home (sub undefined), filter out global pinned items since we inject them later
sub ? '"Item"."pinId" IS NULL' : 'NOT ("Item"."pinId" IS NOT NULL AND "Item"."subName" IS NULL)',
'"Item"."deletedAt" IS NULL',
'"Item"."parentId" IS NULL',
'"Item".bio = false',
ad ? `"Item".id <> ${ad.id}` : '',
activeOrMine(me),
await filterClause(me, models, type))}
ORDER BY ${orderByNumerator({ models, considerBoost: true })}/POWER(GREATEST(3, EXTRACT(EPOCH FROM (now_utc() - "Item".created_at))/3600), 1.3) DESC NULLS LAST,
"Item".msats DESC, ("Item".cost > 0) DESC, "Item".id DESC
OFFSET $1
LIMIT $2`,
orderBy: `ORDER BY ${orderByNumerator({ models, considerBoost: true })}/POWER(GREATEST(3, EXTRACT(EPOCH FROM (now_utc() - "Item".created_at))/3600), 1.3) DESC NULLS LAST,
"Item".msats DESC, ("Item".cost > 0) DESC, "Item".id DESC`
}, decodedCursor.offset, limit, ...subArr)
}
break break
} }
break break
@ -1527,9 +1574,6 @@ export const SELECT =
`SELECT "Item".*, "Item".created_at as "createdAt", "Item".updated_at as "updatedAt", `SELECT "Item".*, "Item".created_at as "createdAt", "Item".updated_at as "updatedAt",
ltree2text("Item"."path") AS "path"` ltree2text("Item"."path") AS "path"`
function topOrderByWeightedSats (me, models, sub) { function topOrderByWeightedSats (me, models) {
if (sub) { return `ORDER BY ${orderByNumerator({ models })} DESC NULLS LAST, "Item".id DESC`
return 'ORDER BY "Item"."subWeightedVotes" - "Item"."subWeightedDownVotes" DESC, "Item".msats DESC, "Item".id DESC'
}
return 'ORDER BY "Item"."weightedVotes" - "Item"."weightedDownVotes" DESC, "Item".msats DESC, "Item".id DESC'
} }

View File

@ -174,6 +174,7 @@ export default {
search: async (parent, { q, cursor, sort, what, when, from: whenFrom, to: whenTo }, { me, models, search }) => { search: async (parent, { q, cursor, sort, what, when, from: whenFrom, to: whenTo }, { me, models, search }) => {
const decodedCursor = decodeCursor(cursor) const decodedCursor = decodeCursor(cursor)
let sitems = null let sitems = null
let termQueries = []
// short circuit: return empty result if either: // short circuit: return empty result if either:
// 1. no query provided, or // 1. no query provided, or
@ -185,116 +186,56 @@ export default {
} }
} }
// build query in parts: const whatArr = []
// filters: determine the universe of potential search candidates
// termQueries: queries related to the actual search terms
// functions: rank modifiers to boost by recency or popularity
const filters = []
const termQueries = []
const functions = []
// filters for item types
switch (what) { switch (what) {
case 'posts': // posts only case 'posts':
filters.push({ bool: { must_not: { exists: { field: 'parentId' } } } }) whatArr.push({ bool: { must_not: { exists: { field: 'parentId' } } } })
break break
case 'comments': // comments only case 'comments':
filters.push({ bool: { must: { exists: { field: 'parentId' } } } }) whatArr.push({ bool: { must: { exists: { field: 'parentId' } } } })
break break
case 'bookmarks': case 'bookmarks':
if (me?.id) { if (me?.id) {
filters.push({ match: { bookmarkedBy: me?.id } }) whatArr.push({ match: { bookmarkedBy: me?.id } })
} }
break break
default: default:
break break
} }
// filter for active posts
filters.push(
me
? {
bool: {
should: [
{ match: { status: 'ACTIVE' } },
{ match: { status: 'NOSATS' } },
{ match: { userId: me.id } }
]
}
}
: {
bool: {
should: [
{ match: { status: 'ACTIVE' } },
{ match: { status: 'NOSATS' } }
]
}
}
)
// filter for time range
const whenRange = when === 'custom'
? {
gte: whenFrom,
lte: new Date(Math.min(new Date(Number(whenTo)), decodedCursor.time))
}
: {
lte: decodedCursor.time,
gte: whenToFrom(when)
}
filters.push({ range: { createdAt: whenRange } })
// filter for non negative wvotes
filters.push({ range: { wvotes: { gte: 0 } } })
// decompose the search terms
const { query: _query, quotes, nym, url, territory } = queryParts(q) const { query: _query, quotes, nym, url, territory } = queryParts(q)
const query = _query let query = _query
const isUrlSearch = url && query.length === 0 // exclusively searching for an url
// if search contains a url term, modify the query text
if (url) { if (url) {
const uri = url.slice(4) const isFQDN = url.startsWith('url:www.')
let uriObj const domain = isFQDN ? url.slice(8) : url.slice(4)
try { const fqdn = `www.${domain}`
uriObj = new URL(uri) query = (isUrlSearch) ? `${domain} ${fqdn}` : `${query.trim()} ${domain}`
} catch {
try {
uriObj = new URL(`https://${uri}`)
} catch {}
}
if (uriObj) {
termQueries.push({
wildcard: { url: `*${uriObj?.hostname ?? uri}${uriObj?.pathname ?? ''}*` }
})
termQueries.push({
match: { text: `${uriObj?.hostname ?? uri}${uriObj?.pathname ?? ''}` }
})
}
} }
// if nym, items must contain nym
if (nym) { if (nym) {
filters.push({ wildcard: { 'user.name': `*${nym.slice(1).toLowerCase()}*` } }) whatArr.push({ wildcard: { 'user.name': `*${nym.slice(1).toLowerCase()}*` } })
} }
// if territory, item must be from territory
if (territory) { if (territory) {
filters.push({ match: { 'sub.name': territory.slice(1) } }) whatArr.push({ match: { 'sub.name': territory.slice(1) } })
} }
// if quoted phrases, items must contain entire phrase termQueries.push({
// all terms are matched in fields
multi_match: {
query,
type: 'best_fields',
fields: ['title^100', 'text'],
minimum_should_match: (isUrlSearch) ? 1 : '100%',
boost: 1000
}
})
for (const quote of quotes) { for (const quote of quotes) {
termQueries.push({ whatArr.push({
multi_match: {
query: quote,
type: 'phrase',
fields: ['title', 'text']
}
})
// force the search to include the quoted phrase
filters.push({
multi_match: { multi_match: {
query: quote, query: quote,
type: 'phrase', type: 'phrase',
@ -303,104 +244,84 @@ export default {
}) })
} }
// functions for boosting search rank by recency or popularity // if we search for an exact string only, everything must match
// so score purely on sort field
let boostMode = query ? 'multiply' : 'replace'
let sortField
let sortMod = 'log1p'
switch (sort) { switch (sort) {
case 'comments': case 'comments':
functions.push({ sortField = 'ncomments'
field_value_factor: { sortMod = 'square'
field: 'ncomments',
modifier: 'log1p'
}
})
break break
case 'sats': case 'sats':
functions.push({ sortField = 'sats'
field_value_factor: {
field: 'sats',
modifier: 'log1p'
}
})
break break
case 'recent': case 'recent':
functions.push({ sortField = 'createdAt'
gauss: { sortMod = 'square'
createdAt: { boostMode = 'replace'
origin: 'now',
scale: '7d',
decay: 0.5
}
}
})
break
case 'zaprank':
functions.push({
field_value_factor: {
field: 'wvotes',
modifier: 'log1p'
}
})
break break
default: default:
sortField = 'wvotes'
sortMod = 'none'
break break
} }
let osQuery = { const functions = [
function_score: { {
query: { field_value_factor: {
bool: { field: sortField,
filter: filters, modifier: sortMod,
should: termQueries, factor: 1.2
minimum_should_match: termQueries.length > 0 ? 1 : 0 }
}
},
functions,
score_mode: 'multiply',
boost_mode: 'multiply'
} }
]
if (sort === 'recent' && !isUrlSearch) {
// prioritize exact matches
termQueries.push({
multi_match: {
query,
type: 'phrase',
fields: ['title^100', 'text'],
boost: 1000
}
})
} else {
// allow fuzzy matching with partial matches
termQueries.push({
multi_match: {
query,
type: 'most_fields',
fields: ['title^100', 'text'],
fuzziness: 'AUTO',
prefix_length: 3,
minimum_should_match: (isUrlSearch) ? 1 : '60%'
}
})
functions.push({
// small bias toward posts with comments
field_value_factor: {
field: 'ncomments',
modifier: 'ln1p',
factor: 1
}
},
{
// small bias toward recent posts
field_value_factor: {
field: 'createdAt',
modifier: 'log1p',
factor: 1
}
})
} }
// query for search terms
if (query.length) { if (query.length) {
// keyword based subquery, to be used on its own or in conjunction with a neural // if we have a model id and we aren't sort by recent, use neural search
// search if (process.env.OPENSEARCH_MODEL_ID && sort !== 'recent') {
const subquery = [ termQueries = {
{
multi_match: {
query,
type: 'best_fields',
fields: ['title^10', 'text'],
fuzziness: 'AUTO',
minimum_should_match: 1
}
},
// all match matches higher
{
multi_match: {
query,
type: 'best_fields',
fields: ['title^10', 'text'],
minimum_should_match: '100%',
boost: 100
}
},
// phrase match matches higher
{
multi_match: {
query,
type: 'phrase',
fields: ['title^10', 'text'],
boost: 1000
}
}
]
osQuery.function_score.query.bool.should = [...termQueries, ...subquery]
osQuery.function_score.query.bool.minimum_should_match = 1
// use hybrid neural search if model id is available, otherwise use only
// keyword search
if (process.env.OPENSEARCH_MODEL_ID) {
osQuery = {
hybrid: { hybrid: {
queries: [ queries: [
{ {
@ -424,18 +345,32 @@ export default {
} }
} }
} }
], ]
filter: filters,
minimum_should_match: 1
} }
}, },
osQuery {
bool: {
should: termQueries
}
}
] ]
} }
} }
} }
} else {
termQueries = []
} }
const whenRange = when === 'custom'
? {
gte: whenFrom,
lte: new Date(Math.min(new Date(Number(whenTo)), decodedCursor.time))
}
: {
lte: decodedCursor.time,
gte: whenToFrom(when)
}
try { try {
sitems = await search.search({ sitems = await search.search({
index: process.env.OPENSEARCH_INDEX, index: process.env.OPENSEARCH_INDEX,
@ -449,7 +384,45 @@ export default {
}, },
from: decodedCursor.offset, from: decodedCursor.offset,
body: { body: {
query: osQuery, query: {
function_score: {
query: {
bool: {
must: termQueries,
filter: [
...whatArr,
me
? {
bool: {
should: [
{ match: { status: 'ACTIVE' } },
{ match: { status: 'NOSATS' } },
{ match: { userId: me.id } }
]
}
}
: {
bool: {
should: [
{ match: { status: 'ACTIVE' } },
{ match: { status: 'NOSATS' } }
]
}
},
{
range:
{
createdAt: whenRange
}
},
{ range: { wvotes: { gte: 0 } } }
]
}
},
functions,
boost_mode: boostMode
}
},
highlight: { highlight: {
fields: { fields: {
title: { number_of_fragments: 0, pre_tags: ['***'], post_tags: ['***'] }, title: { number_of_fragments: 0, pre_tags: ['***'], post_tags: ['***'] },
@ -485,7 +458,7 @@ export default {
${SELECT}, rank ${SELECT}, rank
FROM "Item" FROM "Item"
JOIN r ON "Item".id = r.id`, JOIN r ON "Item".id = r.id`,
orderBy: 'ORDER BY rank ASC, msats DESC' orderBy: 'ORDER BY rank ASC'
})).map((item, i) => { })).map((item, i) => {
const e = sitems.body.hits.hits[i] const e = sitems.body.hits.hits[i]
item.searchTitle = (e.highlight?.title && e.highlight.title[0]) || item.title item.searchTitle = (e.highlight?.title && e.highlight.title[0]) || item.title

View File

@ -189,5 +189,3 @@ Scroogey-SN,pr,#1948,#1849,medium,urgent,,,750k,Scroogey@coinos.io,2025-03-10
felipebueno,issue,#1947,#1945,good-first-issue,,,,2k,felipebueno@blink.sv,2025-03-10 felipebueno,issue,#1947,#1945,good-first-issue,,,,2k,felipebueno@blink.sv,2025-03-10
ed-kung,pr,#1952,#1951,easy,,,,100k,simplestacker@getalby.com,2025-03-10 ed-kung,pr,#1952,#1951,easy,,,,100k,simplestacker@getalby.com,2025-03-10
ed-kung,issue,#1952,#1951,easy,,,,10k,simplestacker@getalby.com,2025-03-10 ed-kung,issue,#1952,#1951,easy,,,,10k,simplestacker@getalby.com,2025-03-10
Scroogey-SN,pr,#1973,#1959,good-first-issue,,,,20k,Scroogey@coinos.io,???
benthecarman,issue,#1953,#1950,good-first-issue,,,,2k,???,???

1 name type pr id issue ids difficulty priority changes requested notes amount receive method date paid
189 felipebueno issue #1947 #1945 good-first-issue 2k felipebueno@blink.sv 2025-03-10
190 ed-kung pr #1952 #1951 easy 100k simplestacker@getalby.com 2025-03-10
191 ed-kung issue #1952 #1951 easy 10k simplestacker@getalby.com 2025-03-10
Scroogey-SN pr #1973 #1959 good-first-issue 20k Scroogey@coinos.io ???
benthecarman issue #1953 #1950 good-first-issue 2k ??? ???

View File

@ -8,31 +8,40 @@ import { useQuery } from '@apollo/client'
import { UserListRow } from '@/components/user-list' import { UserListRow } from '@/components/user-list'
import Link from 'next/link' import Link from 'next/link'
import AddIcon from '@/svgs/add-fill.svg' import AddIcon from '@/svgs/add-fill.svg'
import { MultiAuthErrorBanner } from '@/components/banners'
const AccountContext = createContext() const AccountContext = createContext()
const CHECK_ERRORS_INTERVAL_MS = 5_000
const b64Decode = str => Buffer.from(str, 'base64').toString('utf-8') const b64Decode = str => Buffer.from(str, 'base64').toString('utf-8')
const b64Encode = obj => Buffer.from(JSON.stringify(obj)).toString('base64')
const maybeSecureCookie = cookie => { const maybeSecureCookie = cookie => {
return window.location.protocol === 'https:' ? cookie + '; Secure' : cookie return window.location.protocol === 'https:' ? cookie + '; Secure' : cookie
} }
export const AccountProvider = ({ children }) => { export const AccountProvider = ({ children }) => {
const { me } = useMe()
const [accounts, setAccounts] = useState([]) const [accounts, setAccounts] = useState([])
const [meAnon, setMeAnon] = useState(true) const [meAnon, setMeAnon] = useState(true)
const [errors, setErrors] = useState([])
const updateAccountsFromCookie = useCallback(() => { const updateAccountsFromCookie = useCallback(() => {
const { multi_auth: multiAuthCookie } = cookie.parse(document.cookie) try {
const accounts = multiAuthCookie const { multi_auth: multiAuthCookie } = cookie.parse(document.cookie)
? JSON.parse(b64Decode(multiAuthCookie)) const accounts = multiAuthCookie
: [] ? JSON.parse(b64Decode(multiAuthCookie))
setAccounts(accounts) : me ? [{ id: Number(me.id), name: me.name, photoId: me.photoId }] : []
setAccounts(accounts)
// required for backwards compatibility: sync cookie with accounts if no multi auth cookie exists
// this is the case for sessions that existed before we deployed account switching
if (!multiAuthCookie && !!me) {
document.cookie = maybeSecureCookie(`multi_auth=${b64Encode(accounts)}; Path=/`)
}
} catch (err) {
console.error('error parsing cookies:', err)
}
}, []) }, [])
useEffect(updateAccountsFromCookie, [])
const addAccount = useCallback(user => { const addAccount = useCallback(user => {
setAccounts(accounts => [...accounts, user]) setAccounts(accounts => [...accounts, user])
}, []) }, [])
@ -41,7 +50,7 @@ export const AccountProvider = ({ children }) => {
setAccounts(accounts => accounts.filter(({ id }) => id !== userId)) setAccounts(accounts => accounts.filter(({ id }) => id !== userId))
}, []) }, [])
const nextAccount = useCallback(async () => { const multiAuthSignout = useCallback(async () => {
const { status } = await fetch('/api/next-account', { credentials: 'include' }) const { status } = await fetch('/api/next-account', { credentials: 'include' })
// if status is 302, this means the server was able to switch us to the next available account // if status is 302, this means the server was able to switch us to the next available account
// and the current account was simply removed from the list of available accounts including the corresponding JWT. // and the current account was simply removed from the list of available accounts including the corresponding JWT.
@ -50,43 +59,15 @@ export const AccountProvider = ({ children }) => {
return switchSuccess return switchSuccess
}, [updateAccountsFromCookie]) }, [updateAccountsFromCookie])
const checkErrors = useCallback(() => {
const {
multi_auth: multiAuthCookie,
'multi_auth.user-id': multiAuthUserIdCookie
} = cookie.parse(document.cookie)
const errors = []
if (!multiAuthCookie) errors.push('multi_auth cookie not found')
if (!multiAuthUserIdCookie) errors.push('multi_auth.user-id cookie not found')
setErrors(errors)
}, [])
useEffect(() => { useEffect(() => {
if (SSR) return if (SSR) return
updateAccountsFromCookie()
const { 'multi_auth.user-id': multiAuthUserIdCookie } = cookie.parse(document.cookie) const { 'multi_auth.user-id': multiAuthUserIdCookie } = cookie.parse(document.cookie)
setMeAnon(multiAuthUserIdCookie === 'anonymous') setMeAnon(multiAuthUserIdCookie === 'anonymous')
}, [])
const interval = setInterval(checkErrors, CHECK_ERRORS_INTERVAL_MS)
return () => clearInterval(interval)
}, [updateAccountsFromCookie, checkErrors])
const value = useMemo( const value = useMemo(
() => ({ () => ({ accounts, addAccount, removeAccount, meAnon, setMeAnon, multiAuthSignout }),
accounts, [accounts, addAccount, removeAccount, meAnon, setMeAnon, multiAuthSignout])
addAccount,
removeAccount,
meAnon,
setMeAnon,
nextAccount,
multiAuthErrors: errors
}),
[accounts, addAccount, removeAccount, meAnon, setMeAnon, nextAccount, errors])
return <AccountContext.Provider value={value}>{children}</AccountContext.Provider> return <AccountContext.Provider value={value}>{children}</AccountContext.Provider>
} }
@ -148,23 +129,9 @@ const AccountListRow = ({ account, ...props }) => {
} }
export default function SwitchAccountList () { export default function SwitchAccountList () {
const { accounts, multiAuthErrors } = useAccounts() const { accounts } = useAccounts()
const router = useRouter() const router = useRouter()
const hasError = multiAuthErrors.length > 0
if (hasError) {
return (
<>
<div className='my-2'>
<div className='d-flex flex-column flex-wrap mt-2 mb-3'>
<MultiAuthErrorBanner errors={multiAuthErrors} />
</div>
</div>
</>
)
}
// can't show hat since the streak is not included in the JWT payload // can't show hat since the streak is not included in the JWT payload
return ( return (
<> <>

View File

@ -6,7 +6,6 @@ import { useMutation } from '@apollo/client'
import { WELCOME_BANNER_MUTATION } from '@/fragments/users' import { WELCOME_BANNER_MUTATION } from '@/fragments/users'
import { useToast } from '@/components/toast' import { useToast } from '@/components/toast'
import Link from 'next/link' import Link from 'next/link'
import AccordianItem from '@/components/accordian-item'
export function WelcomeBanner ({ Banner }) { export function WelcomeBanner ({ Banner }) {
const { me } = useMe() const { me } = useMe()
@ -124,24 +123,3 @@ export function AuthBanner () {
</Alert> </Alert>
) )
} }
export function MultiAuthErrorBanner ({ errors }) {
return (
<Alert className={styles.banner} key='info' variant='danger'>
<div className='fw-bold mb-3'>Account switching is currently unavailable</div>
<AccordianItem
className='my-3'
header='We have detected the following issues:'
headerColor='var(--bs-danger-text-emphasis)'
body={
<ul>
{errors.map((err, i) => (
<li key={i}>{err}</li>
))}
</ul>
}
/>
<div className='mt-3'>To resolve these issues, please sign out and sign in again.</div>
</Alert>
)
}

View File

@ -1,7 +1,7 @@
import { signIn } from 'next-auth/react' import { signIn } from 'next-auth/react'
import styles from './login.module.css' import styles from './login.module.css'
import { Form, Input, SubmitButton } from '@/components/form' import { Form, Input, SubmitButton } from '@/components/form'
import { useState, useEffect } from 'react' import { useState } from 'react'
import Alert from 'react-bootstrap/Alert' import Alert from 'react-bootstrap/Alert'
import { useRouter } from 'next/router' import { useRouter } from 'next/router'
import { LightningAuthWithExplainer } from './lightning-auth' import { LightningAuthWithExplainer } from './lightning-auth'
@ -42,10 +42,10 @@ const authErrorMessages = {
OAuthCallback: 'Error handling OAuth response. Try again or choose a different method.', OAuthCallback: 'Error handling OAuth response. Try again or choose a different method.',
OAuthCreateAccount: 'Could not create OAuth account. Try again or choose a different method.', OAuthCreateAccount: 'Could not create OAuth account. Try again or choose a different method.',
EmailCreateAccount: 'Could not create Email account. Try again or choose a different method.', EmailCreateAccount: 'Could not create Email account. Try again or choose a different method.',
Callback: 'Could not authenticate. Try again or choose a different method.', Callback: 'Try again or choose a different method.',
OAuthAccountNotLinked: 'This auth method is linked to another account. To link to this account first unlink the other account.', OAuthAccountNotLinked: 'This auth method is linked to another account. To link to this account first unlink the other account.',
EmailSignin: 'Failed to send email. Make sure you entered your email address correctly.', EmailSignin: 'Failed to send email. Make sure you entered your email address correctly.',
CredentialsSignin: 'Could not authenticate. Try again or choose a different method.', CredentialsSignin: 'Auth failed. Try again or choose a different method.',
default: 'Auth failed. Try again or choose a different method.' default: 'Auth failed. Try again or choose a different method.'
} }
@ -53,23 +53,10 @@ export function authErrorMessage (error) {
return error && (authErrorMessages[error] ?? authErrorMessages.default) return error && (authErrorMessages[error] ?? authErrorMessages.default)
} }
export default function Login ({ providers, callbackUrl, multiAuth, error, text, Header, Footer, signin }) { export default function Login ({ providers, callbackUrl, multiAuth, error, text, Header, Footer }) {
const [errorMessage, setErrorMessage] = useState(authErrorMessage(error)) const [errorMessage, setErrorMessage] = useState(authErrorMessage(error))
const router = useRouter() const router = useRouter()
// signup/signin awareness cookie
useEffect(() => {
const cookieOptions = [
`signin=${!!signin}`,
'path=/',
'max-age=' + (signin ? 60 * 60 * 24 : 0), // 24 hours if signin is true, expire the cookie otherwise
'SameSite=Lax',
process.env.NODE_ENV === 'production' ? 'Secure' : ''
].filter(Boolean).join(';')
document.cookie = cookieOptions
}, [signin])
if (router.query.type === 'lightning') { if (router.query.type === 'lightning') {
return <LightningAuthWithExplainer callbackUrl={callbackUrl} text={text} multiAuth={multiAuth} /> return <LightningAuthWithExplainer callbackUrl={callbackUrl} text={text} multiAuth={multiAuth} />
} }
@ -125,7 +112,6 @@ export default function Login ({ providers, callbackUrl, multiAuth, error, text,
default: default:
return ( return (
<OverlayTrigger <OverlayTrigger
key={provider.id}
placement='bottom' placement='bottom'
overlay={multiAuth ? <Tooltip>not available for account switching yet</Tooltip> : <></>} overlay={multiAuth ? <Tooltip>not available for account switching yet</Tooltip> : <></>}
trigger={['hover', 'focus']} trigger={['hover', 'focus']}
@ -133,6 +119,7 @@ export default function Login ({ providers, callbackUrl, multiAuth, error, text,
<div className='w-100'> <div className='w-100'>
<LoginButton <LoginButton
className={`mt-2 ${styles.providerButton}`} className={`mt-2 ${styles.providerButton}`}
key={provider.id}
type={provider.id.toLowerCase()} type={provider.id.toLowerCase()}
onClick={() => signIn(provider.id, { callbackUrl, multiAuth })} onClick={() => signIn(provider.id, { callbackUrl, multiAuth })}
text={`${text || 'Login'} with`} text={`${text || 'Login'} with`}

View File

@ -223,9 +223,6 @@ export function MeDropdown ({ me, dropNavKey }) {
) )
} }
// this is the width of the 'switch account' button if no width is given
const SWITCH_ACCOUNT_BUTTON_WIDTH = '162px'
export function SignUpButton ({ className = 'py-0', width }) { export function SignUpButton ({ className = 'py-0', width }) {
const router = useRouter() const router = useRouter()
const handleLogin = useCallback(async pathname => await router.push({ const handleLogin = useCallback(async pathname => await router.push({
@ -236,8 +233,7 @@ export function SignUpButton ({ className = 'py-0', width }) {
return ( return (
<Button <Button
className={classNames('align-items-center ps-2 pe-3', className)} className={classNames('align-items-center ps-2 pe-3', className)}
// 161px is the width of the 'switch account' button style={{ borderWidth: '2px', width: width || '150px' }}
style={{ borderWidth: '2px', width: width || SWITCH_ACCOUNT_BUTTON_WIDTH }}
id='signup' id='signup'
onClick={() => handleLogin('/signup')} onClick={() => handleLogin('/signup')}
> >
@ -261,7 +257,7 @@ export default function LoginButton () {
<Button <Button
className='align-items-center px-3 py-1' className='align-items-center px-3 py-1'
id='login' id='login'
style={{ borderWidth: '2px', width: SWITCH_ACCOUNT_BUTTON_WIDTH }} style={{ borderWidth: '2px', width: '150px' }}
variant='outline-grey-darkmode' variant='outline-grey-darkmode'
onClick={() => handleLogin('/login')} onClick={() => handleLogin('/login')}
> >
@ -273,7 +269,7 @@ export default function LoginButton () {
function LogoutObstacle ({ onClose }) { function LogoutObstacle ({ onClose }) {
const { registration: swRegistration, togglePushSubscription } = useServiceWorker() const { registration: swRegistration, togglePushSubscription } = useServiceWorker()
const { removeLocalWallets } = useWallets() const { removeLocalWallets } = useWallets()
const { nextAccount } = useAccounts() const { multiAuthSignout } = useAccounts()
const router = useRouter() const router = useRouter()
return ( return (
@ -289,9 +285,9 @@ function LogoutObstacle ({ onClose }) {
</Button> </Button>
<Button <Button
onClick={async () => { onClick={async () => {
const next = await nextAccount() const switchSuccess = await multiAuthSignout()
// only signout if we did not find a next account // only signout if multiAuth did not find a next available account
if (next) { if (switchSuccess) {
onClose() onClose()
// reload whatever page we're on to avoid any bugs // reload whatever page we're on to avoid any bugs
router.reload() router.reload()
@ -348,7 +344,7 @@ function SwitchAccountButton ({ handleClose }) {
<Button <Button
className='align-items-center px-3 py-1' className='align-items-center px-3 py-1'
variant='outline-grey-darkmode' variant='outline-grey-darkmode'
style={{ borderWidth: '2px', width: SWITCH_ACCOUNT_BUTTON_WIDTH }} style={{ borderWidth: '2px', width: '150px' }}
onClick={() => { onClick={() => {
// login buttons rendered in offcanvas aren't wrapped inside <Dropdown> // login buttons rendered in offcanvas aren't wrapped inside <Dropdown>
// so we manually close the offcanvas in that case by passing down handleClose here // so we manually close the offcanvas in that case by passing down handleClose here

View File

@ -72,10 +72,7 @@ export default function PullToRefresh ({ children, className }) {
onTouchMove={handleTouchMove} onTouchMove={handleTouchMove}
onTouchEnd={handleTouchEnd} onTouchEnd={handleTouchEnd}
> >
<p <p className={`${styles.pullMessage}`} style={{ top: `${Math.max(-20, Math.min(-20 + pullDistance / 2, 5))}px` }}>
className={`${styles.pullMessage}`}
style={{ opacity: pullDistance > 0 ? 1 : 0, top: `${Math.max(-20, Math.min(-20 + pullDistance / 2, 5))}px` }}
>
{pullMessage} {pullMessage}
</p> </p>
{children} {children}

View File

@ -36,7 +36,7 @@ export default function Search ({ sub }) {
} }
if (values.what === '' || values.what === 'all') delete values.what if (values.what === '' || values.what === 'all') delete values.what
if (values.sort === '' || values.sort === 'relevance') delete values.sort if (values.sort === '' || values.sort === 'zaprank') delete values.sort
if (values.when === '' || values.when === 'forever') delete values.when if (values.when === '' || values.when === 'forever') delete values.when
if (values.when !== 'custom') { delete values.from; delete values.to } if (values.when !== 'custom') { delete values.from; delete values.to }
if (values.from && !values.to) return if (values.from && !values.to) return
@ -50,7 +50,7 @@ export default function Search ({ sub }) {
const filter = sub !== 'jobs' const filter = sub !== 'jobs'
const what = router.pathname.startsWith('/stackers') ? 'stackers' : router.query.what || 'all' const what = router.pathname.startsWith('/stackers') ? 'stackers' : router.query.what || 'all'
const sort = router.query.sort || 'relevance' const sort = router.query.sort || 'zaprank'
const when = router.query.when || 'forever' const when = router.query.when || 'forever'
const whatItemOptions = useMemo(() => (['all', 'posts', 'comments', me ? 'bookmarks' : undefined, 'stackers'].filter(item => !!item)), [me]) const whatItemOptions = useMemo(() => (['all', 'posts', 'comments', me ? 'bookmarks' : undefined, 'stackers'].filter(item => !!item)), [me])
@ -100,7 +100,7 @@ export default function Search ({ sub }) {
name='sort' name='sort'
size='sm' size='sm'
overrideValue={sort} overrideValue={sort}
items={['relevance', 'zaprank', 'recent', 'comments', 'sats']} items={['zaprank', 'recent', 'comments', 'sats']}
/> />
for for
<Select <Select

View File

@ -94,19 +94,8 @@ export default function SubSelect ({ prependSubs, sub, onChange, size, appendSub
} }
} else { } else {
// we're currently on the home sub // we're currently on the home sub
// if in /top/cowboys, /top/territories, or /top/stackers // are we in a sub aware route?
// and a territory is selected, go to /~sub/top/posts/day if (router.pathname.startsWith('/~')) {
if (router.pathname.startsWith('/~/top/cowboys')) {
router.push(sub ? `/~${sub}/top/posts/day` : '/top/cowboys')
return
} else if (router.pathname.startsWith('/~/top/stackers')) {
router.push(sub ? `/~${sub}/top/posts/day` : 'top/stackers/day')
return
} else if (router.pathname.startsWith('/~/top/territories')) {
router.push(sub ? `/~${sub}/top/posts/day` : '/top/territories/day')
return
} else if (router.pathname.startsWith('/~')) {
// are we in a sub aware route?
// if we are, go to the same path but in the sub // if we are, go to the same path but in the sub
asPath = `/~${sub}` + router.asPath asPath = `/~${sub}` + router.asPath
} else { } else {

View File

@ -4,7 +4,7 @@ import Image from 'react-bootstrap/Image'
import Link from 'next/link' import Link from 'next/link'
import { useRouter } from 'next/router' import { useRouter } from 'next/router'
import Nav from 'react-bootstrap/Nav' import Nav from 'react-bootstrap/Nav'
import { useState, useEffect } from 'react' import { useState } from 'react'
import { Form, Input, SubmitButton } from './form' import { Form, Input, SubmitButton } from './form'
import { gql, useApolloClient, useMutation } from '@apollo/client' import { gql, useApolloClient, useMutation } from '@apollo/client'
import styles from './user-header.module.css' import styles from './user-header.module.css'
@ -199,14 +199,8 @@ export function NymActionDropdown ({ user, className = 'ms-2' }) {
} }
function HeaderNym ({ user, isMe }) { function HeaderNym ({ user, isMe }) {
const router = useRouter()
const [editting, setEditting] = useState(false) const [editting, setEditting] = useState(false)
// if route changes, reset editting state
useEffect(() => {
setEditting(false)
}, [router.asPath])
return editting return editting
? <NymEdit user={user} setEditting={setEditting} /> ? <NymEdit user={user} setEditting={setEditting} />
: <NymView user={user} isMe={isMe} setEditting={setEditting} /> : <NymView user={user} isMe={isMe} setEditting={setEditting} />

View File

@ -62,11 +62,7 @@ function DeleteWalletLogsObstacle ({ wallet, setLogs, onClose }) {
const { deleteLogs } = useWalletLogManager(setLogs) const { deleteLogs } = useWalletLogManager(setLogs)
const toaster = useToast() const toaster = useToast()
let prompt = 'Do you really want to delete all wallet logs?' const prompt = `Do you really want to delete all ${wallet ? '' : 'wallet'} logs ${wallet ? 'of this wallet' : ''}?`
if (wallet) {
prompt = 'Do you really want to delete all logs of this wallet?'
}
return ( return (
<div className='text-center'> <div className='text-center'>
{prompt} {prompt}

View File

@ -163,7 +163,7 @@ services:
- "CONNECT=localhost:4566" - "CONNECT=localhost:4566"
cpu_shares: "${CPU_SHARES_LOW}" cpu_shares: "${CPU_SHARES_LOW}"
opensearch: opensearch:
image: opensearchproject/opensearch:2.17.0 image: opensearchproject/opensearch:2.12.0
container_name: opensearch container_name: opensearch
profiles: profiles:
- search - search
@ -203,7 +203,7 @@ services:
' '
cpu_shares: "${CPU_SHARES_LOW}" cpu_shares: "${CPU_SHARES_LOW}"
os-dashboard: os-dashboard:
image: opensearchproject/opensearch-dashboards:2.17.0 image: opensearchproject/opensearch-dashboards:2.12.0
container_name: os-dashboard container_name: os-dashboard
restart: unless-stopped restart: unless-stopped
profiles: profiles:

View File

@ -1,161 +0,0 @@
import { datePivot } from '@/lib/time'
import * as cookie from 'cookie'
import { NodeNextRequest } from 'next/dist/server/base-http/node'
import { encode as encodeJWT, decode as decodeJWT } from 'next-auth/jwt'
const b64Encode = obj => Buffer.from(JSON.stringify(obj)).toString('base64')
const b64Decode = s => JSON.parse(Buffer.from(s, 'base64'))
const userJwtRegexp = /^multi_auth\.\d+$/
const HTTPS = process.env.NODE_ENV === 'production'
const SESSION_COOKIE_NAME = HTTPS ? '__Secure-next-auth.session-token' : 'next-auth.session-token'
const cookieOptions = (args) => ({
path: '/',
secure: process.env.NODE_ENV === 'production',
// httpOnly cookies by default
httpOnly: true,
sameSite: 'lax',
// default expiration for next-auth JWTs is in 30 days
expires: datePivot(new Date(), { days: 30 }),
...args
})
export function setMultiAuthCookies (req, res, { id, jwt, name, photoId }) {
const httpOnlyOptions = cookieOptions()
const jsOptions = { ...httpOnlyOptions, httpOnly: false }
// add JWT to **httpOnly** cookie
res.appendHeader('Set-Cookie', cookie.serialize(`multi_auth.${id}`, jwt, httpOnlyOptions))
// switch to user we just added
res.appendHeader('Set-Cookie', cookie.serialize('multi_auth.user-id', id, jsOptions))
let newMultiAuth = [{ id, name, photoId }]
if (req.cookies.multi_auth) {
const oldMultiAuth = b64Decode(req.cookies.multi_auth)
// make sure we don't add duplicates
if (oldMultiAuth.some(({ id: id_ }) => id_ === id)) return
newMultiAuth = [...oldMultiAuth, ...newMultiAuth]
}
res.appendHeader('Set-Cookie', cookie.serialize('multi_auth', b64Encode(newMultiAuth), jsOptions))
}
export function switchSessionCookie (request) {
// switch next-auth session cookie with multi_auth cookie if cookie pointer present
// is there a cookie pointer?
const cookiePointerName = 'multi_auth.user-id'
const hasCookiePointer = !!request.cookies[cookiePointerName]
// is there a session?
const hasSession = !!request.cookies[SESSION_COOKIE_NAME]
if (!hasCookiePointer || !hasSession) {
// no session or no cookie pointer. do nothing.
return request
}
const userId = request.cookies[cookiePointerName]
if (userId === 'anonymous') {
// user switched to anon. only delete session cookie.
delete request.cookies[SESSION_COOKIE_NAME]
return request
}
const userJWT = request.cookies[`multi_auth.${userId}`]
if (!userJWT) {
// no JWT for account switching found
return request
}
if (userJWT) {
// use JWT found in cookie pointed to by cookie pointer
request.cookies[SESSION_COOKIE_NAME] = userJWT
return request
}
return request
}
export function checkMultiAuthCookies (req, res) {
if (!req.cookies.multi_auth || !req.cookies['multi_auth.user-id']) {
return false
}
const accounts = b64Decode(req.cookies.multi_auth)
for (const account of accounts) {
if (!req.cookies[`multi_auth.${account.id}`]) {
return false
}
}
return true
}
export function resetMultiAuthCookies (req, res) {
const httpOnlyOptions = cookieOptions({ expires: 0, maxAge: 0 })
const jsOptions = { ...httpOnlyOptions, httpOnly: false }
if ('multi_auth' in req.cookies) res.appendHeader('Set-Cookie', cookie.serialize('multi_auth', '', jsOptions))
if ('multi_auth.user-id' in req.cookies) res.appendHeader('Set-Cookie', cookie.serialize('multi_auth.user-id', '', jsOptions))
for (const key of Object.keys(req.cookies)) {
// reset all user JWTs
if (userJwtRegexp.test(key)) {
res.appendHeader('Set-Cookie', cookie.serialize(key, '', httpOnlyOptions))
}
}
}
export async function refreshMultiAuthCookies (req, res) {
const httpOnlyOptions = cookieOptions()
const jsOptions = { ...httpOnlyOptions, httpOnly: false }
const refreshCookie = (name) => {
res.appendHeader('Set-Cookie', cookie.serialize(name, req.cookies[name], jsOptions))
}
const refreshToken = async (token) => {
const secret = process.env.NEXTAUTH_SECRET
return await encodeJWT({
token: await decodeJWT({ token, secret }),
secret
})
}
const isAnon = req.cookies['multi_auth.user-id'] === 'anonymous'
for (const [key, value] of Object.entries(req.cookies)) {
// only refresh session cookie manually if we switched to anon since else it's already handled by next-auth
if (key === SESSION_COOKIE_NAME && !isAnon) continue
if (!key.startsWith('multi_auth') && key !== SESSION_COOKIE_NAME) continue
if (userJwtRegexp.test(key) || key === SESSION_COOKIE_NAME) {
const oldToken = value
const newToken = await refreshToken(oldToken)
res.appendHeader('Set-Cookie', cookie.serialize(key, newToken, httpOnlyOptions))
continue
}
refreshCookie(key)
}
}
export async function multiAuthMiddleware (req, res) {
if (!req.cookies) {
// required to properly access parsed cookies via req.cookies and not unparsed via req.headers.cookie
req = new NodeNextRequest(req)
}
const ok = checkMultiAuthCookies(req, res)
if (!ok) {
resetMultiAuthCookies(req, res)
return switchSessionCookie(req)
}
await refreshMultiAuthCookies(req, res)
return switchSessionCookie(req)
}

View File

@ -23,11 +23,11 @@ export function timeSince (timeStamp) {
} }
export function datePivot (date, export function datePivot (date,
{ years = 0, months = 0, weeks = 0, days = 0, hours = 0, minutes = 0, seconds = 0, milliseconds = 0 }) { { years = 0, months = 0, days = 0, hours = 0, minutes = 0, seconds = 0, milliseconds = 0 }) {
return new Date( return new Date(
date.getFullYear() + years, date.getFullYear() + years,
date.getMonth() + months, date.getMonth() + months,
date.getDate() + days + weeks * 7, date.getDate() + days,
date.getHours() + hours, date.getHours() + hours,
date.getMinutes() + minutes, date.getMinutes() + minutes,
date.getSeconds() + seconds, date.getSeconds() + seconds,

View File

@ -194,21 +194,6 @@ module.exports = withPlausibleProxy()({
source: '/top/cowboys/:when', source: '/top/cowboys/:when',
destination: '/top/cowboys', destination: '/top/cowboys',
permanent: true permanent: true
},
{
source: '/~:sub/top/cowboys',
destination: '/top/cowboys',
permanent: true
},
{
source: '/~:sub/top/stackers/:when*',
destination: '/top/stackers/:when*',
permanent: true
},
{
source: '/~:sub/top/territories/:when*',
destination: '/top/territories/:when*',
permanent: true
} }
] ]
}, },

View File

@ -8,13 +8,13 @@ import prisma from '@/api/models'
import nodemailer from 'nodemailer' import nodemailer from 'nodemailer'
import { PrismaAdapter } from '@auth/prisma-adapter' import { PrismaAdapter } from '@auth/prisma-adapter'
import { getToken, encode as encodeJWT } from 'next-auth/jwt' import { getToken, encode as encodeJWT } from 'next-auth/jwt'
import { datePivot } from '@/lib/time'
import { schnorr } from '@noble/curves/secp256k1' import { schnorr } from '@noble/curves/secp256k1'
import { notifyReferral } from '@/lib/webPush' import { notifyReferral } from '@/lib/webPush'
import { hashEmail } from '@/lib/crypto' import { hashEmail } from '@/lib/crypto'
import { multiAuthMiddleware, setMultiAuthCookies } from '@/lib/auth'
import { BECH32_CHARSET } from '@/lib/constants'
import { NodeNextRequest } from 'next/dist/server/base-http/node'
import * as cookie from 'cookie' import * as cookie from 'cookie'
import { multiAuthMiddleware } from '@/pages/api/graphql'
import { BECH32_CHARSET } from '@/lib/constants'
/** /**
* Stores userIds in user table * Stores userIds in user table
@ -94,8 +94,6 @@ function getCallbacks (req, res) {
*/ */
async jwt ({ token, user, account, profile, isNewUser }) { async jwt ({ token, user, account, profile, isNewUser }) {
if (user) { if (user) {
// reset signup cookie if any
res.appendHeader('Set-Cookie', cookie.serialize('signin', '', { path: '/', expires: 0, maxAge: 0 }))
// token won't have an id on it for new logins, we add it // token won't have an id on it for new logins, we add it
// note: token is what's kept in the jwt // note: token is what's kept in the jwt
token.id = Number(user.id) token.id = Number(user.id)
@ -126,8 +124,8 @@ function getCallbacks (req, res) {
token.sub = Number(token.id) token.sub = Number(token.id)
} }
// add multi_auth cookie for user that just logged in
if (user && req && res) { if (user && req && res) {
// add multi_auth cookie for user that just logged in
const secret = process.env.NEXTAUTH_SECRET const secret = process.env.NEXTAUTH_SECRET
const jwt = await encodeJWT({ token, secret }) const jwt = await encodeJWT({ token, secret })
const me = await prisma.user.findUnique({ where: { id: token.id } }) const me = await prisma.user.findUnique({ where: { id: token.id } })
@ -146,6 +144,37 @@ function getCallbacks (req, res) {
} }
} }
function setMultiAuthCookies (req, res, { id, jwt, name, photoId }) {
const b64Encode = obj => Buffer.from(JSON.stringify(obj)).toString('base64')
const b64Decode = s => JSON.parse(Buffer.from(s, 'base64'))
// default expiration for next-auth JWTs is in 1 month
const expiresAt = datePivot(new Date(), { months: 1 })
const secure = process.env.NODE_ENV === 'production'
const cookieOptions = {
path: '/',
httpOnly: true,
secure,
sameSite: 'lax',
expires: expiresAt
}
// add JWT to **httpOnly** cookie
res.appendHeader('Set-Cookie', cookie.serialize(`multi_auth.${id}`, jwt, cookieOptions))
// switch to user we just added
res.appendHeader('Set-Cookie', cookie.serialize('multi_auth.user-id', id, { ...cookieOptions, httpOnly: false }))
let newMultiAuth = [{ id, name, photoId }]
if (req.cookies.multi_auth) {
const oldMultiAuth = b64Decode(req.cookies.multi_auth)
// make sure we don't add duplicates
if (oldMultiAuth.some(({ id: id_ }) => id_ === id)) return
newMultiAuth = [...oldMultiAuth, ...newMultiAuth]
}
res.appendHeader('Set-Cookie', cookie.serialize('multi_auth', b64Encode(newMultiAuth), { ...cookieOptions, httpOnly: false }))
}
async function pubkeyAuth (credentials, req, res, pubkeyColumnName) { async function pubkeyAuth (credentials, req, res, pubkeyColumnName) {
const { k1, pubkey } = credentials const { k1, pubkey } = credentials
@ -165,7 +194,7 @@ async function pubkeyAuth (credentials, req, res, pubkeyColumnName) {
let user = await prisma.user.findUnique({ where: { [pubkeyColumnName]: pubkey } }) let user = await prisma.user.findUnique({ where: { [pubkeyColumnName]: pubkey } })
// make following code aware of cookie pointer for account switching // make following code aware of cookie pointer for account switching
req = await multiAuthMiddleware(req, res) req = multiAuthMiddleware(req)
// token will be undefined if we're not logged in at all or if we switched to anon // token will be undefined if we're not logged in at all or if we switched to anon
const token = await getToken({ req }) const token = await getToken({ req })
if (!user) { if (!user) {
@ -176,8 +205,7 @@ async function pubkeyAuth (credentials, req, res, pubkeyColumnName) {
if (token?.id && !multiAuth) { if (token?.id && !multiAuth) {
user = await prisma.user.update({ where: { id: token.id }, data: { [pubkeyColumnName]: pubkey } }) user = await prisma.user.update({ where: { id: token.id }, data: { [pubkeyColumnName]: pubkey } })
} else { } else {
// create a new user only if we're trying to sign up // we're not logged in: create new user with that pubkey
if (new NodeNextRequest(req).cookies.signin) return null
user = await prisma.user.create({ data: { name: pubkey.slice(0, 10), [pubkeyColumnName]: pubkey } }) user = await prisma.user.create({ data: { name: pubkey.slice(0, 10), [pubkeyColumnName]: pubkey } })
} }
} }
@ -286,7 +314,6 @@ export const getAuthOptions = (req, res) => ({
adapter: { adapter: {
...PrismaAdapter(prisma), ...PrismaAdapter(prisma),
createUser: data => { createUser: data => {
if (req.cookies.signin) return null
// replace email with email hash in new user payload // replace email with email hash in new user payload
if (data.email) { if (data.email) {
const { email } = data const { email } = data
@ -727,7 +754,7 @@ const newUserHtml = ({ url, token, site, email }) => {
<tbody> <tbody>
<tr> <tr>
<td align="left" style="font-size:0px;padding:10px 25px;word-break:break-word;"> <td align="left" style="font-size:0px;padding:10px 25px;word-break:break-word;">
<div style="font-family:Helvetica, Arial, sans-serif;font-size:14px;line-height:20px;text-align:left;color:#000000;">Stacker News is like Reddit or Hacker News, but it <b>pays you Bitcoin</b>. Instead of giving posts or comments "upvotes," Stacker News users (aka stackers) send you small amounts of Bitcoin called sats.</div> <div style="font-family:Helvetica, Arial, sans-serif;font-size:14px;line-height:20px;text-align:left;color:#000000;">Stacker News is like Reddit or Hacker News, but it <b>pays you Bitcoin</b>. Instead of giving posts or comments upvotes, Stacker News users (aka stackers) send you small amounts of Bitcoin called sats.</div>
</td> </td>
</tr> </tr>
<tr> <tr>
@ -742,7 +769,7 @@ const newUserHtml = ({ url, token, site, email }) => {
</tr> </tr>
<tr> <tr>
<td align="left" style="font-size:0px;padding:10px 25px;word-break:break-word;"> <td align="left" style="font-size:0px;padding:10px 25px;word-break:break-word;">
<div style="font-family:Helvetica, Arial, sans-serif;font-size:14px;line-height:20px;text-align:left;color:#000000;">If you're not sure what to share, <a href="${dailyUrl}"><b><i>click here to introduce yourself to the community</i></b></a> with a comment on the daily discussion thread.</div> <div style="font-family:Helvetica, Arial, sans-serif;font-size:14px;line-height:20px;text-align:left;color:#000000;">If youre not sure what to share, <a href="${dailyUrl}"><b><i>click here to introduce yourself to the community</i></b></a> with a comment on the daily discussion thread.</div>
</td> </td>
</tr> </tr>
<tr> <tr>
@ -752,7 +779,7 @@ const newUserHtml = ({ url, token, site, email }) => {
</tr> </tr>
<tr> <tr>
<td align="left" style="font-size:0px;padding:10px 25px;word-break:break-word;"> <td align="left" style="font-size:0px;padding:10px 25px;word-break:break-word;">
<div style="font-family:Helvetica, Arial, sans-serif;font-size:14px;line-height:20px;text-align:left;color:#000000;">If anything isn't clear, comment on the FAQ post and we'll answer your question.</div> <div style="font-family:Helvetica, Arial, sans-serif;font-size:14px;line-height:20px;text-align:left;color:#000000;">If anything isnt clear, comment on the FAQ post and well answer your question.</div>
</td> </td>
</tr> </tr>
<tr> <tr>

View File

@ -11,7 +11,7 @@ import {
ApolloServerPluginLandingPageLocalDefault, ApolloServerPluginLandingPageLocalDefault,
ApolloServerPluginLandingPageProductionDefault ApolloServerPluginLandingPageProductionDefault
} from '@apollo/server/plugin/landingPage/default' } from '@apollo/server/plugin/landingPage/default'
import { multiAuthMiddleware } from '@/lib/auth' import { NodeNextRequest } from 'next/dist/server/base-http/node'
const apolloServer = new ApolloServer({ const apolloServer = new ApolloServer({
typeDefs, typeDefs,
@ -68,7 +68,7 @@ export default startServerAndCreateNextHandler(apolloServer, {
session = { user: { ...sessionFields, apiKey: true } } session = { user: { ...sessionFields, apiKey: true } }
} }
} else { } else {
req = await multiAuthMiddleware(req, res) req = multiAuthMiddleware(req)
session = await getServerSession(req, res, getAuthOptions(req)) session = await getServerSession(req, res, getAuthOptions(req))
} }
return { return {
@ -82,3 +82,49 @@ export default startServerAndCreateNextHandler(apolloServer, {
} }
} }
}) })
export function multiAuthMiddleware (request) {
// switch next-auth session cookie with multi_auth cookie if cookie pointer present
if (!request.cookies) {
// required to properly access parsed cookies via request.cookies
// and not unparsed via request.headers.cookie
request = new NodeNextRequest(request)
}
// is there a cookie pointer?
const cookiePointerName = 'multi_auth.user-id'
const hasCookiePointer = !!request.cookies[cookiePointerName]
const secure = process.env.NODE_ENV === 'production'
// is there a session?
const sessionCookieName = secure ? '__Secure-next-auth.session-token' : 'next-auth.session-token'
const hasSession = !!request.cookies[sessionCookieName]
if (!hasCookiePointer || !hasSession) {
// no session or no cookie pointer. do nothing.
return request
}
const userId = request.cookies[cookiePointerName]
if (userId === 'anonymous') {
// user switched to anon. only delete session cookie.
delete request.cookies[sessionCookieName]
return request
}
const userJWT = request.cookies[`multi_auth.${userId}`]
if (!userJWT) {
// no JWT for account switching found
return request
}
if (userJWT) {
// use JWT found in cookie pointed to by cookie pointer
request.cookies[sessionCookieName] = userJWT
return request
}
return request
}

View File

@ -81,7 +81,6 @@ export default function LoginPage (props) {
<Login <Login
Footer={() => <LoginFooter callbackUrl={props.callbackUrl} />} Footer={() => <LoginFooter callbackUrl={props.callbackUrl} />}
Header={() => <LoginHeader />} Header={() => <LoginHeader />}
signin
{...props} {...props}
/> />
</StaticLayout> </StaticLayout>

View File

@ -131,6 +131,8 @@ function Enabled ({ setVaultKey, clearVault }) {
placeholder='' placeholder=''
required required
autoFocus autoFocus
as='textarea'
rows={3}
qr qr
/> />
<div className='mt-3'> <div className='mt-3'>

View File

@ -1,109 +0,0 @@
-- AlterTable
ALTER TABLE "Item" ADD COLUMN "subWeightedDownVotes" FLOAT NOT NULL DEFAULT 0,
ADD COLUMN "subWeightedVotes" FLOAT NOT NULL DEFAULT 0;
CREATE INDEX "Item.sumSubVotes_index" ON "Item"(("subWeightedVotes" - "subWeightedDownVotes"));
-- CreateTable
CREATE TABLE "UserSubTrust" (
"subName" CITEXT NOT NULL,
"userId" INTEGER NOT NULL,
"zapPostTrust" DOUBLE PRECISION NOT NULL DEFAULT 0,
"subZapPostTrust" DOUBLE PRECISION NOT NULL DEFAULT 0,
"zapCommentTrust" DOUBLE PRECISION NOT NULL DEFAULT 0,
"subZapCommentTrust" DOUBLE PRECISION NOT NULL DEFAULT 0,
"created_at" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
"updated_at" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
CONSTRAINT "UserSubTrust_pkey" PRIMARY KEY ("userId","subName")
);
-- AddForeignKey
ALTER TABLE "UserSubTrust" ADD CONSTRAINT "UserSubTrust_userId_fkey" FOREIGN KEY ("userId") REFERENCES "users"("id") ON DELETE CASCADE ON UPDATE CASCADE;
-- AddForeignKey
ALTER TABLE "UserSubTrust" ADD CONSTRAINT "UserSubTrust_subName_fkey" FOREIGN KEY ("subName") REFERENCES "Sub"("name") ON DELETE CASCADE ON UPDATE CASCADE;
-- UserSubTrust is NOT populated, so this is a no-op ... but useful having it written out for migrating manually on deployment
UPDATE "Item"
SET "subWeightedVotes" = subquery."subWeightedVotes",
"subWeightedDownVotes" = subquery."subWeightedDownVotes",
"weightedVotes" = subquery."weightedVotes",
"weightedDownVotes" = subquery."weightedDownVotes"
FROM (
WITH sub_votes AS (
SELECT "ItemAct"."itemId",
CASE WHEN (SUM("ItemAct"."msats") FILTER (WHERE "ItemAct"."act" IN ('TIP', 'FEE')))::FLOAT > 0
THEN COALESCE(
LOG(
(SUM("ItemAct"."msats") FILTER (WHERE "ItemAct"."act" IN ('TIP', 'FEE')))::FLOAT / 1000
) * CASE
WHEN "Item"."parentId" IS NULL
THEN "UserSubTrust"."subZapPostTrust"
ELSE "UserSubTrust"."subZapCommentTrust"
END, 0)
ELSE 0
END AS "subWeightedVotes",
CASE WHEN (SUM("ItemAct"."msats") FILTER (WHERE "ItemAct"."act" IN ('TIP', 'FEE')))::FLOAT > 0
THEN COALESCE(
LOG(
(SUM("ItemAct"."msats") FILTER (WHERE "ItemAct"."act" IN ('TIP', 'FEE')))::FLOAT / 1000
) * CASE
WHEN "Item"."parentId" IS NULL
THEN "UserSubTrust"."zapPostTrust"
ELSE "UserSubTrust"."zapCommentTrust"
END, 0)
ELSE 0
END AS "weightedVotes",
CASE WHEN (SUM("ItemAct"."msats") FILTER (WHERE "ItemAct"."act" IN ('DONT_LIKE_THIS')))::FLOAT > 0
THEN COALESCE(
LOG(
(SUM("ItemAct"."msats") FILTER (WHERE "ItemAct"."act" IN ('DONT_LIKE_THIS')))::FLOAT / 1000
) * CASE
WHEN "Item"."parentId" IS NULL
THEN "UserSubTrust"."subZapPostTrust"
ELSE "UserSubTrust"."subZapCommentTrust"
END, 0)
ELSE 0
END AS "subWeightedDownVotes",
CASE WHEN (SUM("ItemAct"."msats") FILTER (WHERE "ItemAct"."act" IN ('DONT_LIKE_THIS')))::FLOAT > 0
THEN COALESCE(
LOG(
(SUM("ItemAct"."msats") FILTER (WHERE "ItemAct"."act" IN ('DONT_LIKE_THIS')))::FLOAT / 1000
) * CASE
WHEN "Item"."parentId" IS NULL
THEN "UserSubTrust"."zapPostTrust"
ELSE "UserSubTrust"."zapCommentTrust"
END, 0)
ELSE 0
END AS "weightedDownVotes"
FROM "ItemAct"
JOIN "UserSubTrust" ON "ItemAct"."userId" = "UserSubTrust"."userId"
JOIN "Item" ON "Item".id = "ItemAct"."itemId"
AND "UserSubTrust"."subName" = "Item"."subName"
AND "Item"."userId" <> "ItemAct"."userId"
WHERE "ItemAct".act IN ('TIP', 'FEE', 'DONT_LIKE_THIS')
GROUP BY "ItemAct"."itemId", "ItemAct"."userId", "UserSubTrust"."subZapPostTrust",
"UserSubTrust"."subZapCommentTrust", "UserSubTrust"."zapPostTrust", "UserSubTrust"."zapCommentTrust",
"Item"."parentId"
)
SELECT "itemId", SUM("subWeightedVotes") AS "subWeightedVotes", SUM("subWeightedDownVotes") AS "subWeightedDownVotes",
SUM("weightedVotes") AS "weightedVotes", SUM("weightedDownVotes") AS "weightedDownVotes"
FROM sub_votes
GROUP BY "itemId"
) AS subquery
WHERE "Item".id = subquery."itemId";
CREATE MATERIALIZED VIEW IF NOT EXISTS hot_score_view AS
SELECT id,
("Item"."weightedVotes" - "Item"."weightedDownVotes" + ("Item"."weightedComments"*0.25) + ("Item".boost / 5000))
/ POWER(GREATEST(3, EXTRACT(EPOCH FROM (now() - "Item".created_at))/3600), 1.1) AS hot_score,
("Item"."subWeightedVotes" - "Item"."subWeightedDownVotes" + ("Item"."weightedComments"*0.25) + ("Item".boost / 5000))
/ POWER(GREATEST(3, EXTRACT(EPOCH FROM (now() - "Item".created_at))/3600), 1.1) AS sub_hot_score
FROM "Item"
WHERE "Item"."weightedVotes" > 0 OR "Item"."weightedDownVotes" > 0 OR "Item"."subWeightedVotes" > 0
OR "Item"."subWeightedDownVotes" > 0 OR "Item"."weightedComments" > 0 OR "Item".boost > 0;
CREATE UNIQUE INDEX IF NOT EXISTS hot_score_view_id_idx ON hot_score_view(id);
CREATE INDEX IF NOT EXISTS hot_score_view_hot_score_idx ON hot_score_view(hot_score DESC NULLS LAST);
CREATE INDEX IF NOT EXISTS hot_score_view_sub_hot_score_idx ON hot_score_view(sub_hot_score DESC NULLS LAST);

View File

@ -1,224 +0,0 @@
-- add limit and offset
CREATE OR REPLACE FUNCTION item_comments_zaprank_with_me_limited(
_item_id int, _global_seed int, _me_id int, _limit int, _offset int, _grandchild_limit int,
_level int, _where text, _order_by text)
RETURNS jsonb
LANGUAGE plpgsql VOLATILE PARALLEL SAFE AS
$$
DECLARE
result jsonb;
BEGIN
IF _level < 1 THEN
RETURN '[]'::jsonb;
END IF;
EXECUTE 'CREATE TEMP TABLE IF NOT EXISTS t_item ON COMMIT DROP AS '
|| 'WITH RECURSIVE base AS ( '
|| ' (SELECT "Item".*, 1 as level, ROW_NUMBER() OVER () as rn '
|| ' FROM "Item" '
|| ' WHERE "Item"."parentId" = $1 '
|| _order_by || ' '
|| ' LIMIT $4 '
|| ' OFFSET $5) '
|| ' UNION ALL '
|| ' (SELECT "Item".*, b.level + 1, ROW_NUMBER() OVER (PARTITION BY "Item"."parentId" ' || _order_by || ') as rn '
|| ' FROM "Item" '
|| ' JOIN base b ON "Item"."parentId" = b.id '
|| ' LEFT JOIN hot_score_view g ON g.id = "Item".id '
|| ' WHERE b.level < $7 AND (b.level = 1 OR b.rn <= $6)) '
|| ') '
|| 'SELECT "Item".*, '
|| ' "Item".created_at at time zone ''UTC'' AS "createdAt", '
|| ' "Item".updated_at at time zone ''UTC'' AS "updatedAt", '
|| ' "Item"."invoicePaidAt" at time zone ''UTC'' AS "invoicePaidAtUTC", '
|| ' to_jsonb(users.*) || jsonb_build_object(''meMute'', "Mute"."mutedId" IS NOT NULL) AS user, '
|| ' COALESCE("ItemAct"."meMsats", 0) AS "meMsats", '
|| ' COALESCE("ItemAct"."mePendingMsats", 0) as "mePendingMsats", '
|| ' COALESCE("ItemAct"."meDontLikeMsats", 0) AS "meDontLikeMsats", '
|| ' COALESCE("ItemAct"."meMcredits", 0) AS "meMcredits", '
|| ' COALESCE("ItemAct"."mePendingMcredits", 0) as "mePendingMcredits", '
|| ' "Bookmark"."itemId" IS NOT NULL AS "meBookmark", '
|| ' "ThreadSubscription"."itemId" IS NOT NULL AS "meSubscription", '
|| ' g.hot_score AS "hotScore", g.sub_hot_score AS "subHotScore" '
|| 'FROM base "Item" '
|| 'JOIN users ON users.id = "Item"."userId" '
|| ' LEFT JOIN "Mute" ON "Mute"."muterId" = $3 AND "Mute"."mutedId" = "Item"."userId" '
|| ' LEFT JOIN "Bookmark" ON "Bookmark"."userId" = $3 AND "Bookmark"."itemId" = "Item".id '
|| ' LEFT JOIN "ThreadSubscription" ON "ThreadSubscription"."userId" = $3 AND "ThreadSubscription"."itemId" = "Item".id '
|| ' LEFT JOIN hot_score_view g ON g.id = "Item".id '
|| 'LEFT JOIN LATERAL ( '
|| ' SELECT "itemId", '
|| ' sum("ItemAct".msats) FILTER (WHERE "invoiceActionState" IS DISTINCT FROM ''FAILED'' AND "InvoiceForward".id IS NOT NULL AND (act = ''FEE'' OR act = ''TIP'')) AS "meMsats", '
|| ' sum("ItemAct".msats) FILTER (WHERE "invoiceActionState" IS DISTINCT FROM ''FAILED'' AND "InvoiceForward".id IS NULL AND (act = ''FEE'' OR act = ''TIP'')) AS "meMcredits", '
|| ' sum("ItemAct".msats) FILTER (WHERE "invoiceActionState" IS NOT DISTINCT FROM ''PENDING'' AND "InvoiceForward".id IS NOT NULL AND (act = ''FEE'' OR act = ''TIP'')) AS "mePendingMsats", '
|| ' sum("ItemAct".msats) FILTER (WHERE "invoiceActionState" IS NOT DISTINCT FROM ''PENDING'' AND "InvoiceForward".id IS NULL AND (act = ''FEE'' OR act = ''TIP'')) AS "mePendingMcredits", '
|| ' sum("ItemAct".msats) FILTER (WHERE "invoiceActionState" IS DISTINCT FROM ''FAILED'' AND act = ''DONT_LIKE_THIS'') AS "meDontLikeMsats" '
|| ' FROM "ItemAct" '
|| ' LEFT JOIN "Invoice" ON "Invoice".id = "ItemAct"."invoiceId" '
|| ' LEFT JOIN "InvoiceForward" ON "InvoiceForward"."invoiceId" = "Invoice"."id" '
|| ' WHERE "ItemAct"."userId" = $3 '
|| ' AND "ItemAct"."itemId" = "Item".id '
|| ' GROUP BY "ItemAct"."itemId" '
|| ') "ItemAct" ON true '
|| 'WHERE ("Item".level = 1 OR "Item".rn <= $6 - "Item".level + 2) ' || _where || ' '
USING _item_id, _global_seed, _me_id, _limit, _offset, _grandchild_limit, _level, _where, _order_by;
EXECUTE ''
|| 'SELECT COALESCE(jsonb_agg(sub), ''[]''::jsonb) AS comments '
|| 'FROM ( '
|| ' SELECT "Item".*, item_comments_zaprank_with_me_limited("Item".id, $2, $3, $4, $5, $6, $7 - 1, $8, $9) AS comments '
|| ' FROM t_item "Item" '
|| ' WHERE "Item"."parentId" = $1 '
|| _order_by
|| ' ) sub'
INTO result USING _item_id, _global_seed, _me_id, _limit, _offset, _grandchild_limit, _level, _where, _order_by;
RETURN result;
END
$$;
-- add cowboy credits
CREATE OR REPLACE FUNCTION item_comments_zaprank_with_me(_item_id int, _global_seed int, _me_id int, _level int, _where text, _order_by text)
RETURNS jsonb
LANGUAGE plpgsql VOLATILE PARALLEL SAFE AS
$$
DECLARE
result jsonb;
BEGIN
IF _level < 1 THEN
RETURN '[]'::jsonb;
END IF;
EXECUTE 'CREATE TEMP TABLE IF NOT EXISTS t_item ON COMMIT DROP AS'
|| ' SELECT "Item".*, "Item".created_at at time zone ''UTC'' AS "createdAt", "Item".updated_at at time zone ''UTC'' AS "updatedAt", '
|| ' "Item"."invoicePaidAt" at time zone ''UTC'' AS "invoicePaidAtUTC", to_jsonb(users.*) || jsonb_build_object(''meMute'', "Mute"."mutedId" IS NOT NULL) AS user, '
|| ' COALESCE("ItemAct"."meMsats", 0) AS "meMsats", COALESCE("ItemAct"."mePendingMsats", 0) as "mePendingMsats", COALESCE("ItemAct"."meDontLikeMsats", 0) AS "meDontLikeMsats", '
|| ' COALESCE("ItemAct"."meMcredits", 0) AS "meMcredits", COALESCE("ItemAct"."mePendingMcredits", 0) as "mePendingMcredits", '
|| ' "Bookmark"."itemId" IS NOT NULL AS "meBookmark", "ThreadSubscription"."itemId" IS NOT NULL AS "meSubscription", '
|| ' g.hot_score AS "hotScore", g.sub_hot_score AS "subHotScore" '
|| ' FROM "Item" '
|| ' JOIN users ON users.id = "Item"."userId" '
|| ' LEFT JOIN "Mute" ON "Mute"."muterId" = $5 AND "Mute"."mutedId" = "Item"."userId"'
|| ' LEFT JOIN "Bookmark" ON "Bookmark"."userId" = $5 AND "Bookmark"."itemId" = "Item".id '
|| ' LEFT JOIN "ThreadSubscription" ON "ThreadSubscription"."userId" = $5 AND "ThreadSubscription"."itemId" = "Item".id '
|| ' LEFT JOIN LATERAL ( '
|| ' SELECT "itemId", '
|| ' sum("ItemAct".msats) FILTER (WHERE "invoiceActionState" IS DISTINCT FROM ''FAILED'' AND "InvoiceForward".id IS NOT NULL AND (act = ''FEE'' OR act = ''TIP'')) AS "meMsats", '
|| ' sum("ItemAct".msats) FILTER (WHERE "invoiceActionState" IS DISTINCT FROM ''FAILED'' AND "InvoiceForward".id IS NULL AND (act = ''FEE'' OR act = ''TIP'')) AS "meMcredits", '
|| ' sum("ItemAct".msats) FILTER (WHERE "invoiceActionState" IS NOT DISTINCT FROM ''PENDING'' AND "InvoiceForward".id IS NOT NULL AND (act = ''FEE'' OR act = ''TIP'')) AS "mePendingMsats", '
|| ' sum("ItemAct".msats) FILTER (WHERE "invoiceActionState" IS NOT DISTINCT FROM ''PENDING'' AND "InvoiceForward".id IS NULL AND (act = ''FEE'' OR act = ''TIP'')) AS "mePendingMcredits", '
|| ' sum("ItemAct".msats) FILTER (WHERE "invoiceActionState" IS DISTINCT FROM ''FAILED'' AND act = ''DONT_LIKE_THIS'') AS "meDontLikeMsats" '
|| ' FROM "ItemAct" '
|| ' LEFT JOIN "Invoice" ON "Invoice".id = "ItemAct"."invoiceId" '
|| ' LEFT JOIN "InvoiceForward" ON "InvoiceForward"."invoiceId" = "Invoice"."id" '
|| ' WHERE "ItemAct"."userId" = $5 '
|| ' AND "ItemAct"."itemId" = "Item".id '
|| ' GROUP BY "ItemAct"."itemId" '
|| ' ) "ItemAct" ON true '
|| ' LEFT JOIN hot_score_view g ON g.id = "Item".id '
|| ' WHERE "Item".path <@ (SELECT path FROM "Item" WHERE id = $1) ' || _where || ' '
USING _item_id, _level, _where, _order_by, _me_id, _global_seed;
EXECUTE ''
|| 'SELECT COALESCE(jsonb_agg(sub), ''[]''::jsonb) AS comments '
|| 'FROM ( '
|| ' SELECT "Item".*, item_comments_zaprank_with_me("Item".id, $6, $5, $2 - 1, $3, $4) AS comments '
|| ' FROM t_item "Item" '
|| ' WHERE "Item"."parentId" = $1 '
|| _order_by
|| ' ) sub'
INTO result USING _item_id, _level, _where, _order_by, _me_id, _global_seed;
RETURN result;
END
$$;
CREATE OR REPLACE FUNCTION item_comments(_item_id int, _level int, _where text, _order_by text)
RETURNS jsonb
LANGUAGE plpgsql VOLATILE PARALLEL SAFE AS
$$
DECLARE
result jsonb;
BEGIN
IF _level < 1 THEN
RETURN '[]'::jsonb;
END IF;
EXECUTE 'CREATE TEMP TABLE IF NOT EXISTS t_item ON COMMIT DROP AS'
|| ' SELECT "Item".*, "Item".created_at at time zone ''UTC'' AS "createdAt", "Item".updated_at at time zone ''UTC'' AS "updatedAt", '
|| ' "Item"."invoicePaidAt" at time zone ''UTC'' AS "invoicePaidAtUTC", '
|| ' to_jsonb(users.*) as user, '
|| ' g.hot_score AS "hotScore", g.sub_hot_score AS "subHotScore" '
|| ' FROM "Item" '
|| ' JOIN users ON users.id = "Item"."userId" '
|| ' LEFT JOIN hot_score_view g ON g.id = "Item".id '
|| ' WHERE "Item".path <@ (SELECT path FROM "Item" WHERE id = $1) ' || _where
USING _item_id, _level, _where, _order_by;
EXECUTE ''
|| 'SELECT COALESCE(jsonb_agg(sub), ''[]''::jsonb) AS comments '
|| 'FROM ( '
|| ' SELECT "Item".*, item_comments("Item".id, $2 - 1, $3, $4) AS comments '
|| ' FROM t_item "Item"'
|| ' WHERE "Item"."parentId" = $1 '
|| _order_by
|| ' ) sub'
INTO result USING _item_id, _level, _where, _order_by;
RETURN result;
END
$$;
-- add limit and offset
CREATE OR REPLACE FUNCTION item_comments_limited(
_item_id int, _limit int, _offset int, _grandchild_limit int,
_level int, _where text, _order_by text)
RETURNS jsonb
LANGUAGE plpgsql VOLATILE PARALLEL SAFE AS
$$
DECLARE
result jsonb;
BEGIN
IF _level < 1 THEN
RETURN '[]'::jsonb;
END IF;
EXECUTE 'CREATE TEMP TABLE IF NOT EXISTS t_item ON COMMIT DROP AS '
|| 'WITH RECURSIVE base AS ( '
|| ' (SELECT "Item".*, 1 as level, ROW_NUMBER() OVER () as rn '
|| ' FROM "Item" '
|| ' WHERE "Item"."parentId" = $1 '
|| _order_by || ' '
|| ' LIMIT $2 '
|| ' OFFSET $3) '
|| ' UNION ALL '
|| ' (SELECT "Item".*, b.level + 1, ROW_NUMBER() OVER (PARTITION BY "Item"."parentId" ' || _order_by || ') '
|| ' FROM "Item" '
|| ' JOIN base b ON "Item"."parentId" = b.id '
|| ' LEFT JOIN hot_score_view g ON g.id = "Item".id '
|| ' WHERE b.level < $5 AND (b.level = 1 OR b.rn <= $4)) '
|| ') '
|| 'SELECT "Item".*, "Item".created_at at time zone ''UTC'' AS "createdAt", "Item".updated_at at time zone ''UTC'' AS "updatedAt", '
|| ' "Item"."invoicePaidAt" at time zone ''UTC'' AS "invoicePaidAtUTC", '
|| ' to_jsonb(users.*) as user, '
|| ' g.hot_score AS "hotScore", g.sub_hot_score AS "subHotScore" '
|| 'FROM base "Item" '
|| 'JOIN users ON users.id = "Item"."userId" '
|| 'LEFT JOIN hot_score_view g ON g.id = "Item".id '
|| 'WHERE ("Item".level = 1 OR "Item".rn <= $4 - "Item".level + 2) ' || _where
USING _item_id, _limit, _offset, _grandchild_limit, _level, _where, _order_by;
EXECUTE ''
|| 'SELECT COALESCE(jsonb_agg(sub), ''[]''::jsonb) AS comments '
|| 'FROM ( '
|| ' SELECT "Item".*, item_comments_limited("Item".id, $2, $3, $4, $5 - 1, $6, $7) AS comments '
|| ' FROM t_item "Item" '
|| ' WHERE "Item"."parentId" = $1 '
|| _order_by
|| ' ) sub'
INTO result USING _item_id, _limit, _offset, _grandchild_limit, _level, _where, _order_by;
RETURN result;
END
$$;

View File

@ -1,2 +0,0 @@
CREATE INDEX IF NOT EXISTS hot_score_view_hot_score_no_nulls_idx ON hot_score_view(hot_score DESC);
CREATE INDEX IF NOT EXISTS hot_score_view_sub_hot_score_no_nulls_idx ON hot_score_view(sub_hot_score DESC);

View File

@ -1,132 +0,0 @@
-- add limit and offset
CREATE OR REPLACE FUNCTION item_comments_zaprank_with_me_limited(
_item_id int, _global_seed int, _me_id int, _limit int, _offset int, _grandchild_limit int,
_level int, _where text, _order_by text)
RETURNS jsonb
LANGUAGE plpgsql VOLATILE PARALLEL SAFE AS
$$
DECLARE
result jsonb;
BEGIN
IF _level < 1 THEN
RETURN '[]'::jsonb;
END IF;
EXECUTE 'CREATE TEMP TABLE IF NOT EXISTS t_item ON COMMIT DROP AS '
|| 'WITH RECURSIVE base AS ( '
|| ' (SELECT "Item".*, 1 as level, ROW_NUMBER() OVER () as rn '
|| ' FROM "Item" '
|| ' LEFT JOIN hot_score_view g(id, "hotScore", "subHotScore") ON g.id = "Item".id '
|| ' WHERE "Item"."parentId" = $1 '
|| _order_by || ' '
|| ' LIMIT $4 '
|| ' OFFSET $5) '
|| ' UNION ALL '
|| ' (SELECT "Item".*, b.level + 1, ROW_NUMBER() OVER (PARTITION BY "Item"."parentId" ' || _order_by || ') as rn '
|| ' FROM "Item" '
|| ' JOIN base b ON "Item"."parentId" = b.id '
|| ' LEFT JOIN hot_score_view g(id, "hotScore", "subHotScore") ON g.id = "Item".id '
|| ' WHERE b.level < $7 AND (b.level = 1 OR b.rn <= $6)) '
|| ') '
|| 'SELECT "Item".*, '
|| ' "Item".created_at at time zone ''UTC'' AS "createdAt", '
|| ' "Item".updated_at at time zone ''UTC'' AS "updatedAt", '
|| ' "Item"."invoicePaidAt" at time zone ''UTC'' AS "invoicePaidAtUTC", '
|| ' to_jsonb(users.*) || jsonb_build_object(''meMute'', "Mute"."mutedId" IS NOT NULL) AS user, '
|| ' COALESCE("ItemAct"."meMsats", 0) AS "meMsats", '
|| ' COALESCE("ItemAct"."mePendingMsats", 0) as "mePendingMsats", '
|| ' COALESCE("ItemAct"."meDontLikeMsats", 0) AS "meDontLikeMsats", '
|| ' COALESCE("ItemAct"."meMcredits", 0) AS "meMcredits", '
|| ' COALESCE("ItemAct"."mePendingMcredits", 0) as "mePendingMcredits", '
|| ' "Bookmark"."itemId" IS NOT NULL AS "meBookmark", '
|| ' "ThreadSubscription"."itemId" IS NOT NULL AS "meSubscription", '
|| ' g.hot_score AS "hotScore", g.sub_hot_score AS "subHotScore" '
|| 'FROM base "Item" '
|| 'JOIN users ON users.id = "Item"."userId" '
|| ' LEFT JOIN "Mute" ON "Mute"."muterId" = $3 AND "Mute"."mutedId" = "Item"."userId" '
|| ' LEFT JOIN "Bookmark" ON "Bookmark"."userId" = $3 AND "Bookmark"."itemId" = "Item".id '
|| ' LEFT JOIN "ThreadSubscription" ON "ThreadSubscription"."userId" = $3 AND "ThreadSubscription"."itemId" = "Item".id '
|| ' LEFT JOIN hot_score_view g ON g.id = "Item".id '
|| 'LEFT JOIN LATERAL ( '
|| ' SELECT "itemId", '
|| ' sum("ItemAct".msats) FILTER (WHERE "invoiceActionState" IS DISTINCT FROM ''FAILED'' AND "InvoiceForward".id IS NOT NULL AND (act = ''FEE'' OR act = ''TIP'')) AS "meMsats", '
|| ' sum("ItemAct".msats) FILTER (WHERE "invoiceActionState" IS DISTINCT FROM ''FAILED'' AND "InvoiceForward".id IS NULL AND (act = ''FEE'' OR act = ''TIP'')) AS "meMcredits", '
|| ' sum("ItemAct".msats) FILTER (WHERE "invoiceActionState" IS NOT DISTINCT FROM ''PENDING'' AND "InvoiceForward".id IS NOT NULL AND (act = ''FEE'' OR act = ''TIP'')) AS "mePendingMsats", '
|| ' sum("ItemAct".msats) FILTER (WHERE "invoiceActionState" IS NOT DISTINCT FROM ''PENDING'' AND "InvoiceForward".id IS NULL AND (act = ''FEE'' OR act = ''TIP'')) AS "mePendingMcredits", '
|| ' sum("ItemAct".msats) FILTER (WHERE "invoiceActionState" IS DISTINCT FROM ''FAILED'' AND act = ''DONT_LIKE_THIS'') AS "meDontLikeMsats" '
|| ' FROM "ItemAct" '
|| ' LEFT JOIN "Invoice" ON "Invoice".id = "ItemAct"."invoiceId" '
|| ' LEFT JOIN "InvoiceForward" ON "InvoiceForward"."invoiceId" = "Invoice"."id" '
|| ' WHERE "ItemAct"."userId" = $3 '
|| ' AND "ItemAct"."itemId" = "Item".id '
|| ' GROUP BY "ItemAct"."itemId" '
|| ') "ItemAct" ON true '
|| 'WHERE ("Item".level = 1 OR "Item".rn <= $6 - "Item".level + 2) ' || _where || ' '
USING _item_id, _global_seed, _me_id, _limit, _offset, _grandchild_limit, _level, _where, _order_by;
EXECUTE ''
|| 'SELECT COALESCE(jsonb_agg(sub), ''[]''::jsonb) AS comments '
|| 'FROM ( '
|| ' SELECT "Item".*, item_comments_zaprank_with_me_limited("Item".id, $2, $3, $4, $5, $6, $7 - 1, $8, $9) AS comments '
|| ' FROM t_item "Item" '
|| ' WHERE "Item"."parentId" = $1 '
|| _order_by
|| ' ) sub'
INTO result USING _item_id, _global_seed, _me_id, _limit, _offset, _grandchild_limit, _level, _where, _order_by;
RETURN result;
END
$$;
CREATE OR REPLACE FUNCTION item_comments_limited(
_item_id int, _limit int, _offset int, _grandchild_limit int,
_level int, _where text, _order_by text)
RETURNS jsonb
LANGUAGE plpgsql VOLATILE PARALLEL SAFE AS
$$
DECLARE
result jsonb;
BEGIN
IF _level < 1 THEN
RETURN '[]'::jsonb;
END IF;
EXECUTE 'CREATE TEMP TABLE IF NOT EXISTS t_item ON COMMIT DROP AS '
|| 'WITH RECURSIVE base AS ( '
|| ' (SELECT "Item".*, 1 as level, ROW_NUMBER() OVER () as rn '
|| ' FROM "Item" '
|| ' LEFT JOIN hot_score_view g(id, "hotScore", "subHotScore") ON g.id = "Item".id '
|| ' WHERE "Item"."parentId" = $1 '
|| _order_by || ' '
|| ' LIMIT $2 '
|| ' OFFSET $3) '
|| ' UNION ALL '
|| ' (SELECT "Item".*, b.level + 1, ROW_NUMBER() OVER (PARTITION BY "Item"."parentId" ' || _order_by || ') '
|| ' FROM "Item" '
|| ' JOIN base b ON "Item"."parentId" = b.id '
|| ' LEFT JOIN hot_score_view g(id, "hotScore", "subHotScore") ON g.id = "Item".id '
|| ' WHERE b.level < $5 AND (b.level = 1 OR b.rn <= $4)) '
|| ') '
|| 'SELECT "Item".*, "Item".created_at at time zone ''UTC'' AS "createdAt", "Item".updated_at at time zone ''UTC'' AS "updatedAt", '
|| ' "Item"."invoicePaidAt" at time zone ''UTC'' AS "invoicePaidAtUTC", '
|| ' to_jsonb(users.*) as user, '
|| ' g.hot_score AS "hotScore", g.sub_hot_score AS "subHotScore" '
|| 'FROM base "Item" '
|| 'JOIN users ON users.id = "Item"."userId" '
|| 'LEFT JOIN hot_score_view g ON g.id = "Item".id '
|| 'WHERE ("Item".level = 1 OR "Item".rn <= $4 - "Item".level + 2) ' || _where
USING _item_id, _limit, _offset, _grandchild_limit, _level, _where, _order_by;
EXECUTE ''
|| 'SELECT COALESCE(jsonb_agg(sub), ''[]''::jsonb) AS comments '
|| 'FROM ( '
|| ' SELECT "Item".*, item_comments_limited("Item".id, $2, $3, $4, $5 - 1, $6, $7) AS comments '
|| ' FROM t_item "Item" '
|| ' WHERE "Item"."parentId" = $1 '
|| _order_by
|| ' ) sub'
INTO result USING _item_id, _limit, _offset, _grandchild_limit, _level, _where, _order_by;
RETURN result;
END
$$;

View File

@ -148,7 +148,6 @@ model User {
directReceive Boolean @default(true) directReceive Boolean @default(true)
DirectPaymentReceived DirectPayment[] @relation("DirectPaymentReceived") DirectPaymentReceived DirectPayment[] @relation("DirectPaymentReceived")
DirectPaymentSent DirectPayment[] @relation("DirectPaymentSent") DirectPaymentSent DirectPayment[] @relation("DirectPaymentSent")
UserSubTrust UserSubTrust[]
@@index([photoId]) @@index([photoId])
@@index([createdAt], map: "users.created_at_index") @@index([createdAt], map: "users.created_at_index")
@ -185,21 +184,6 @@ model OneDayReferral {
@@index([type, typeId]) @@index([type, typeId])
} }
model UserSubTrust {
subName String @db.Citext
userId Int
zapPostTrust Float @default(0)
subZapPostTrust Float @default(0)
zapCommentTrust Float @default(0)
subZapCommentTrust Float @default(0)
createdAt DateTime @default(now()) @map("created_at")
updatedAt DateTime @default(now()) @updatedAt @map("updated_at")
user User @relation(fields: [userId], references: [id], onDelete: Cascade)
sub Sub @relation(fields: [subName], references: [name], onDelete: Cascade)
@@id([userId, subName])
}
enum WalletType { enum WalletType {
LIGHTNING_ADDRESS LIGHTNING_ADDRESS
LND LND
@ -514,87 +498,85 @@ model Message {
/// This model contains an expression index which requires additional setup for migrations. Visit https://pris.ly/d/expression-indexes for more info. /// This model contains an expression index which requires additional setup for migrations. Visit https://pris.ly/d/expression-indexes for more info.
model Item { model Item {
id Int @id @default(autoincrement()) id Int @id @default(autoincrement())
createdAt DateTime @default(now()) @map("created_at") createdAt DateTime @default(now()) @map("created_at")
updatedAt DateTime @default(now()) @updatedAt @map("updated_at") updatedAt DateTime @default(now()) @updatedAt @map("updated_at")
title String? title String?
text String? text String?
url String? url String?
userId Int userId Int
parentId Int? parentId Int?
path Unsupported("ltree")? path Unsupported("ltree")?
pinId Int? pinId Int?
latitude Float? latitude Float?
location String? location String?
longitude Float? longitude Float?
maxBid Int? maxBid Int?
maxSalary Int? maxSalary Int?
minSalary Int? minSalary Int?
remote Boolean? remote Boolean?
subName String? @db.Citext subName String? @db.Citext
statusUpdatedAt DateTime? statusUpdatedAt DateTime?
status Status @default(ACTIVE) status Status @default(ACTIVE)
company String? company String?
weightedVotes Float @default(0) weightedVotes Float @default(0)
subWeightedVotes Float @default(0) boost Int @default(0)
boost Int @default(0) oldBoost Int @default(0)
oldBoost Int @default(0) pollCost Int?
pollCost Int? paidImgLink Boolean @default(false)
paidImgLink Boolean @default(false) commentMsats BigInt @default(0)
commentMsats BigInt @default(0) commentMcredits BigInt @default(0)
commentMcredits BigInt @default(0) lastCommentAt DateTime?
lastCommentAt DateTime? lastZapAt DateTime?
lastZapAt DateTime? ncomments Int @default(0)
ncomments Int @default(0) nDirectComments Int @default(0)
nDirectComments Int @default(0) msats BigInt @default(0)
msats BigInt @default(0) mcredits BigInt @default(0)
mcredits BigInt @default(0) cost Int @default(0)
cost Int @default(0) weightedDownVotes Float @default(0)
weightedDownVotes Float @default(0) bio Boolean @default(false)
subWeightedDownVotes Float @default(0) freebie Boolean @default(false)
bio Boolean @default(false) deletedAt DateTime?
freebie Boolean @default(false) otsFile Bytes?
deletedAt DateTime? otsHash String?
otsFile Bytes? imgproxyUrls Json?
otsHash String? bounty Int?
imgproxyUrls Json? noteId String? @unique(map: "Item.noteId_unique")
bounty Int? rootId Int?
noteId String? @unique(map: "Item.noteId_unique") bountyPaidTo Int[]
rootId Int? upvotes Int @default(0)
bountyPaidTo Int[] weightedComments Float @default(0)
upvotes Int @default(0) Bookmark Bookmark[]
weightedComments Float @default(0) parent Item? @relation("ParentChildren", fields: [parentId], references: [id])
Bookmark Bookmark[] children Item[] @relation("ParentChildren")
parent Item? @relation("ParentChildren", fields: [parentId], references: [id]) pin Pin? @relation(fields: [pinId], references: [id])
children Item[] @relation("ParentChildren") root Item? @relation("RootDescendant", fields: [rootId], references: [id])
pin Pin? @relation(fields: [pinId], references: [id]) descendants Item[] @relation("RootDescendant")
root Item? @relation("RootDescendant", fields: [rootId], references: [id]) sub Sub? @relation(fields: [subName], references: [name], onDelete: Cascade, onUpdate: Cascade)
descendants Item[] @relation("RootDescendant") user User @relation("UserItems", fields: [userId], references: [id], onDelete: Cascade)
sub Sub? @relation(fields: [subName], references: [name], onDelete: Cascade, onUpdate: Cascade) itemActs ItemAct[]
user User @relation("UserItems", fields: [userId], references: [id], onDelete: Cascade) mentions Mention[]
itemActs ItemAct[] itemReferrers ItemMention[] @relation("referrer")
mentions Mention[] itemReferees ItemMention[] @relation("referee")
itemReferrers ItemMention[] @relation("referrer") pollOptions PollOption[]
itemReferees ItemMention[] @relation("referee") PollVote PollVote[]
pollOptions PollOption[] threadSubscriptions ThreadSubscription[]
PollVote PollVote[] User User[]
threadSubscriptions ThreadSubscription[] itemForwards ItemForward[]
User User[] itemUploads ItemUpload[]
itemForwards ItemForward[] uploadId Int?
itemUploads ItemUpload[] invoiceId Int?
uploadId Int? invoiceActionState InvoiceActionState?
invoiceId Int? invoicePaidAt DateTime?
invoiceActionState InvoiceActionState? outlawed Boolean @default(false)
invoicePaidAt DateTime? apiKey Boolean @default(false)
outlawed Boolean @default(false) pollExpiresAt DateTime?
apiKey Boolean @default(false) Ancestors Reply[] @relation("AncestorReplyItem")
pollExpiresAt DateTime? Replies Reply[]
Ancestors Reply[] @relation("AncestorReplyItem") Reminder Reminder[]
Replies Reply[] invoice Invoice? @relation(fields: [invoiceId], references: [id], onDelete: SetNull)
Reminder Reminder[] PollBlindVote PollBlindVote[]
invoice Invoice? @relation(fields: [invoiceId], references: [id], onDelete: SetNull) ItemUserAgg ItemUserAgg[]
PollBlindVote PollBlindVote[]
ItemUserAgg ItemUserAgg[]
@@index([uploadId]) @@index([uploadId])
@@index([lastZapAt]) @@index([lastZapAt])
@ -778,7 +760,6 @@ model Sub {
MuteSub MuteSub[] MuteSub MuteSub[]
SubSubscription SubSubscription[] SubSubscription SubSubscription[]
TerritoryTransfer TerritoryTransfer[] TerritoryTransfer TerritoryTransfer[]
UserSubTrust UserSubTrust[]
@@index([parentName]) @@index([parentName])
@@index([createdAt]) @@index([createdAt])

View File

@ -33,7 +33,7 @@ export async function createInvoice (
out: false out: false
}) })
let hostname = url.replace(/^https?:\/\//, '').replace(/\/+$/, '') let hostname = url.replace(/^https?:\/\//, '')
const agent = getAgent({ hostname }) const agent = getAgent({ hostname })
if (process.env.NODE_ENV !== 'production' && hostname.startsWith('localhost:')) { if (process.env.NODE_ENV !== 'production' && hostname.startsWith('localhost:')) {

View File

@ -1,6 +1,5 @@
import { fetchWithTimeout } from '@/lib/fetch' import { fetchWithTimeout } from '@/lib/fetch'
import { msatsToSats } from '@/lib/format' import { msatsToSats } from '@/lib/format'
import { getAgent } from '@/lib/proxy'
import { assertContentTypeJson, assertResponseOk } from '@/lib/url' import { assertContentTypeJson, assertResponseOk } from '@/lib/url'
export * from '@/wallets/phoenixd' export * from '@/wallets/phoenixd'
@ -28,13 +27,9 @@ export async function createInvoice (
body.append('description', description) body.append('description', description)
body.append('amountSat', msatsToSats(msats)) body.append('amountSat', msatsToSats(msats))
const hostname = url.replace(/^https?:\/\//, '').replace(/\/+$/, '') const res = await fetchWithTimeout(url + path, {
const agent = getAgent({ hostname })
const res = await fetchWithTimeout(`${agent.protocol}//${hostname}${path}`, {
method: 'POST', method: 'POST',
headers, headers,
agent,
body, body,
signal signal
}) })

View File

@ -14,7 +14,6 @@ export async function expireBoost ({ data: { id }, models }) {
FROM "ItemAct" FROM "ItemAct"
WHERE act = 'BOOST' WHERE act = 'BOOST'
AND "itemId" = ${Number(id)}::INTEGER AND "itemId" = ${Number(id)}::INTEGER
AND ("invoiceActionState" IS NULL OR "invoiceActionState" = 'PAID')
) )
UPDATE "Item" UPDATE "Item"
SET boost = COALESCE(boost.cur_msats, 0) / 1000, "oldBoost" = COALESCE(boost.old_msats, 0) / 1000 SET boost = COALESCE(boost.cur_msats, 0) / 1000, "oldBoost" = COALESCE(boost.old_msats, 0) / 1000

View File

@ -1,89 +1,38 @@
import * as math from 'mathjs' import * as math from 'mathjs'
import { USER_ID } from '@/lib/constants' import { USER_ID, SN_ADMIN_IDS } from '@/lib/constants'
import { Prisma } from '@prisma/client'
import { initialTrust, GLOBAL_SEEDS } from '@/api/paidAction/lib/territory'
const MAX_DEPTH = 40 export async function trust ({ boss, models }) {
try {
console.time('trust')
console.timeLog('trust', 'getting graph')
const graph = await getGraph(models)
console.timeLog('trust', 'computing trust')
const [vGlobal, mPersonal] = await trustGivenGraph(graph)
console.timeLog('trust', 'storing trust')
await storeTrust(models, graph, vGlobal, mPersonal)
} finally {
console.timeEnd('trust')
}
}
const MAX_DEPTH = 10
const MAX_TRUST = 1 const MAX_TRUST = 1
const MIN_SUCCESS = 0 const MIN_SUCCESS = 1
// https://en.wikipedia.org/wiki/Normal_distribution#Quantile_function // https://en.wikipedia.org/wiki/Normal_distribution#Quantile_function
const Z_CONFIDENCE = 6.109410204869 // 99.9999999% confidence const Z_CONFIDENCE = 6.109410204869 // 99.9999999% confidence
const SEED_WEIGHT = 0.83 const GLOBAL_ROOT = 616
const SEED_WEIGHT = 1.0
const AGAINST_MSAT_MIN = 1000 const AGAINST_MSAT_MIN = 1000
const MSAT_MIN = 1001 // 20001 is the minimum for a tip to be counted in trust const MSAT_MIN = 20001 // 20001 is the minimum for a tip to be counted in trust
const INDEPENDENCE_THRESHOLD = 50 // how many zappers are needed to consider a sub independent const SIG_DIFF = 0.1 // need to differ by at least 10 percent
const IRRELEVANT_CUMULATIVE_TRUST = 0.001 // if a user has less than this amount of cumulative trust, they are irrelevant
// for each subName, we'll need to get two graphs
// one for comments and one for posts
// then we'll need to do two trust calculations on each graph
// one with global seeds and one with subName seeds
export async function trust ({ boss, models }) {
console.time('trust')
const territories = await models.sub.findMany({
where: {
status: 'ACTIVE'
}
})
for (const territory of territories) {
const seeds = GLOBAL_SEEDS.includes(territory.userId) ? GLOBAL_SEEDS : GLOBAL_SEEDS.concat(territory.userId)
try {
console.timeLog('trust', `getting post graph for ${territory.name}`)
const postGraph = await getGraph(models, territory.name, true, seeds)
console.timeLog('trust', `getting comment graph for ${territory.name}`)
const commentGraph = await getGraph(models, territory.name, false, seeds)
console.timeLog('trust', `computing global post trust for ${territory.name}`)
const vGlobalPost = await trustGivenGraph(postGraph)
console.timeLog('trust', `computing global comment trust for ${territory.name}`)
const vGlobalComment = await trustGivenGraph(commentGraph)
console.timeLog('trust', `computing sub post trust for ${territory.name}`)
const vSubPost = await trustGivenGraph(postGraph, postGraph.length > INDEPENDENCE_THRESHOLD ? [territory.userId] : seeds)
console.timeLog('trust', `computing sub comment trust for ${territory.name}`)
const vSubComment = await trustGivenGraph(commentGraph, commentGraph.length > INDEPENDENCE_THRESHOLD ? [territory.userId] : seeds)
console.timeLog('trust', `storing trust for ${territory.name}`)
let results = reduceVectors(territory.name, {
zapPostTrust: {
graph: postGraph,
vector: vGlobalPost
},
subZapPostTrust: {
graph: postGraph,
vector: vSubPost
},
zapCommentTrust: {
graph: commentGraph,
vector: vGlobalComment
},
subZapCommentTrust: {
graph: commentGraph,
vector: vSubComment
}
})
if (results.length === 0) {
console.timeLog('trust', `no results for ${territory.name} - adding seeds`)
results = initialTrust({ name: territory.name, userId: territory.userId })
}
await storeTrust(models, territory.name, results)
} catch (e) {
console.error(`error computing trust for ${territory.name}:`, e)
} finally {
console.timeLog('trust', `finished computing trust for ${territory.name}`)
}
}
console.timeEnd('trust')
}
/* /*
Given a graph and start this function returns an object where Given a graph and start this function returns an object where
the keys are the node id and their value is the trust of that node the keys are the node id and their value is the trust of that node
*/ */
// I'm going to need to send subName, and multiply by a vector instead of a matrix function trustGivenGraph (graph) {
function trustGivenGraph (graph, seeds = GLOBAL_SEEDS) {
console.timeLog('trust', `creating matrix of size ${graph.length} x ${graph.length}`)
// empty matrix of proper size nstackers x nstackers // empty matrix of proper size nstackers x nstackers
const mat = math.zeros(graph.length, graph.length, 'sparse') let mat = math.zeros(graph.length, graph.length, 'sparse')
// create a map of user id to position in matrix // create a map of user id to position in matrix
const posByUserId = {} const posByUserId = {}
@ -105,57 +54,54 @@ function trustGivenGraph (graph, seeds = GLOBAL_SEEDS) {
// perform random walk over trust matrix // perform random walk over trust matrix
// the resulting matrix columns represent the trust a user (col) has for each other user (rows) // the resulting matrix columns represent the trust a user (col) has for each other user (rows)
const matT = math.transpose(mat) // XXX this scales N^3 and mathjs is slow
const vTrust = math.zeros(graph.length) let matT = math.transpose(mat)
for (const seed of seeds) { const original = matT.clone()
vTrust.set([posByUserId[seed], 0], 1.0 / seeds.length)
}
let result = vTrust.clone()
console.timeLog('trust', 'matrix multiply')
for (let i = 0; i < MAX_DEPTH; i++) { for (let i = 0; i < MAX_DEPTH; i++) {
result = math.multiply(matT, result) console.timeLog('trust', `matrix multiply ${i}`)
result = math.add(math.multiply(1 - SEED_WEIGHT, result), math.multiply(SEED_WEIGHT, vTrust)) matT = math.multiply(original, matT)
matT = math.add(math.multiply(1 - SEED_WEIGHT, matT), math.multiply(SEED_WEIGHT, original))
} }
result = math.squeeze(result)
console.timeLog('trust', 'transforming result') console.timeLog('trust', 'transforming result')
const seedIdxs = seeds.map(id => posByUserId[id]) const seedIdxs = SN_ADMIN_IDS.map(id => posByUserId[id])
const filterZeroAndSeed = (val, idx) => { const isOutlier = (fromIdx, idx) => [...seedIdxs, fromIdx].includes(idx)
return val !== 0 && !seedIdxs.includes(idx[0]) const sqapply = (mat, fn) => {
} let idx = 0
const filterSeed = (val, idx) => { return math.squeeze(math.apply(mat, 1, d => {
return !seedIdxs.includes(idx[0]) const filtered = math.filter(d, (val, fidx) => {
} return val !== 0 && !isOutlier(idx, fidx[0])
const sqapply = (vec, filterFn, fn) => { })
// if the vector is smaller than the seeds, don't filter idx++
const filtered = vec.size()[0] > seeds.length ? math.filter(vec, filterFn) : vec if (filtered.length === 0) return 0
if (filtered.size()[0] === 0) return 0 return fn(filtered)
return fn(filtered) }))
} }
console.timeLog('trust', 'normalizing') console.timeLog('trust', 'normalizing')
console.timeLog('trust', 'stats') console.timeLog('trust', 'stats')
const std = sqapply(result, filterZeroAndSeed, math.std) // math.squeeze(math.std(mat, 1)) mat = math.transpose(matT)
const mean = sqapply(result, filterZeroAndSeed, math.mean) // math.squeeze(math.mean(mat, 1)) const std = sqapply(mat, math.std) // math.squeeze(math.std(mat, 1))
console.timeLog('trust', 'std', std) const mean = sqapply(mat, math.mean) // math.squeeze(math.mean(mat, 1))
console.timeLog('trust', 'mean', mean) const zscore = math.map(mat, (val, idx) => {
const zscore = math.map(result, (val) => { const zstd = math.subset(std, math.index(idx[0], 0))
if (std === 0) return 0 const zmean = math.subset(mean, math.index(idx[0], 0))
return (val - mean) / std return zstd ? (val - zmean) / zstd : 0
}) })
console.timeLog('trust', 'minmax') console.timeLog('trust', 'minmax')
const min = sqapply(zscore, filterSeed, math.min) // math.squeeze(math.min(zscore, 1)) const min = sqapply(zscore, math.min) // math.squeeze(math.min(zscore, 1))
const max = sqapply(zscore, filterSeed, math.max) // math.squeeze(math.max(zscore, 1)) const max = sqapply(zscore, math.max) // math.squeeze(math.max(zscore, 1))
console.timeLog('trust', 'min', min) const mPersonal = math.map(zscore, (val, idx) => {
console.timeLog('trust', 'max', max) const zmin = math.subset(min, math.index(idx[0], 0))
const normalized = math.map(zscore, (val) => { const zmax = math.subset(max, math.index(idx[0], 0))
const zrange = max - min const zrange = zmax - zmin
if (val > max) return MAX_TRUST if (val > zmax) return MAX_TRUST
return zrange ? (val - min) / zrange : 0 return zrange ? (val - zmin) / zrange : 0
}) })
const vGlobal = math.squeeze(math.row(mPersonal, posByUserId[GLOBAL_ROOT]))
return normalized return [vGlobal, mPersonal]
} }
/* /*
@ -165,31 +111,23 @@ function trustGivenGraph (graph, seeds = GLOBAL_SEEDS) {
... ...
] ]
*/ */
// I'm going to want to send subName to this function async function getGraph (models) {
// and whether it's for comments or posts
async function getGraph (models, subName, postTrust = true, seeds = GLOBAL_SEEDS) {
return await models.$queryRaw` return await models.$queryRaw`
SELECT id, json_agg(json_build_object( SELECT id, json_agg(json_build_object(
'node', oid, 'node', oid,
'trust', CASE WHEN total_trust > 0 THEN trust / total_trust::float ELSE 0 END)) AS hops 'trust', CASE WHEN total_trust > 0 THEN trust / total_trust::float ELSE 0 END)) AS hops
FROM ( FROM (
WITH user_votes AS ( WITH user_votes AS (
SELECT "ItemAct"."userId" AS user_id, users.name AS name, "ItemAct"."itemId" AS item_id, max("ItemAct".created_at) AS act_at, SELECT "ItemAct"."userId" AS user_id, users.name AS name, "ItemAct"."itemId" AS item_id, min("ItemAct".created_at) AS act_at,
users.created_at AS user_at, "ItemAct".act = 'DONT_LIKE_THIS' AS against, users.created_at AS user_at, "ItemAct".act = 'DONT_LIKE_THIS' AS against,
count(*) OVER (partition by "ItemAct"."userId") AS user_vote_count, count(*) OVER (partition by "ItemAct"."userId") AS user_vote_count,
sum("ItemAct".msats) as user_msats sum("ItemAct".msats) as user_msats
FROM "ItemAct" FROM "ItemAct"
JOIN "Item" ON "Item".id = "ItemAct"."itemId" AND "ItemAct".act IN ('FEE', 'TIP', 'DONT_LIKE_THIS') JOIN "Item" ON "Item".id = "ItemAct"."itemId" AND "ItemAct".act IN ('FEE', 'TIP', 'DONT_LIKE_THIS')
AND NOT "Item".bio AND "Item"."userId" <> "ItemAct"."userId" AND "Item"."parentId" IS NULL AND NOT "Item".bio AND "Item"."userId" <> "ItemAct"."userId"
AND ${postTrust
? Prisma.sql`"Item"."parentId" IS NULL AND "Item"."subName" = ${subName}::TEXT`
: Prisma.sql`
"Item"."parentId" IS NOT NULL
JOIN "Item" root ON "Item"."rootId" = root.id AND root."subName" = ${subName}::TEXT`
}
JOIN users ON "ItemAct"."userId" = users.id AND users.id <> ${USER_ID.anon} JOIN users ON "ItemAct"."userId" = users.id AND users.id <> ${USER_ID.anon}
WHERE ("ItemAct"."invoiceActionState" IS NULL OR "ItemAct"."invoiceActionState" = 'PAID') WHERE "ItemAct"."invoiceActionState" IS NULL OR "ItemAct"."invoiceActionState" = 'PAID'
GROUP BY user_id, users.name, item_id, user_at, against GROUP BY user_id, name, item_id, user_at, against
HAVING CASE WHEN HAVING CASE WHEN
"ItemAct".act = 'DONT_LIKE_THIS' THEN sum("ItemAct".msats) > ${AGAINST_MSAT_MIN} "ItemAct".act = 'DONT_LIKE_THIS' THEN sum("ItemAct".msats) > ${AGAINST_MSAT_MIN}
ELSE sum("ItemAct".msats) > ${MSAT_MIN} END ELSE sum("ItemAct".msats) > ${MSAT_MIN} END
@ -198,7 +136,7 @@ async function getGraph (models, subName, postTrust = true, seeds = GLOBAL_SEEDS
SELECT a.user_id AS a_id, b.user_id AS b_id, SELECT a.user_id AS a_id, b.user_id AS b_id,
sum(CASE WHEN b.user_msats > a.user_msats THEN a.user_msats / b.user_msats::FLOAT ELSE b.user_msats / a.user_msats::FLOAT END) FILTER(WHERE a.act_at > b.act_at AND a.against = b.against) AS before, sum(CASE WHEN b.user_msats > a.user_msats THEN a.user_msats / b.user_msats::FLOAT ELSE b.user_msats / a.user_msats::FLOAT END) FILTER(WHERE a.act_at > b.act_at AND a.against = b.against) AS before,
sum(CASE WHEN b.user_msats > a.user_msats THEN a.user_msats / b.user_msats::FLOAT ELSE b.user_msats / a.user_msats::FLOAT END) FILTER(WHERE b.act_at > a.act_at AND a.against = b.against) AS after, sum(CASE WHEN b.user_msats > a.user_msats THEN a.user_msats / b.user_msats::FLOAT ELSE b.user_msats / a.user_msats::FLOAT END) FILTER(WHERE b.act_at > a.act_at AND a.against = b.against) AS after,
count(*) FILTER(WHERE a.against <> b.against) AS disagree, sum(log(1 + a.user_msats / 10000::float) + log(1 + b.user_msats / 10000::float)) FILTER(WHERE a.against <> b.against) AS disagree,
b.user_vote_count AS b_total, a.user_vote_count AS a_total b.user_vote_count AS b_total, a.user_vote_count AS a_total
FROM user_votes a FROM user_votes a
JOIN user_votes b ON a.item_id = b.item_id JOIN user_votes b ON a.item_id = b.item_id
@ -211,9 +149,14 @@ async function getGraph (models, subName, postTrust = true, seeds = GLOBAL_SEEDS
confidence(before - disagree, b_total - after, ${Z_CONFIDENCE}) confidence(before - disagree, b_total - after, ${Z_CONFIDENCE})
ELSE 0 END AS trust ELSE 0 END AS trust
FROM user_pair FROM user_pair
WHERE NOT (b_id = ANY (${SN_ADMIN_IDS}))
UNION ALL UNION ALL
SELECT seed_id AS id, seed_id AS oid, 0 AS trust SELECT a_id AS id, seed_id AS oid, ${MAX_TRUST}::numeric as trust
FROM unnest(${seeds}::int[]) seed_id FROM user_pair, unnest(${SN_ADMIN_IDS}::int[]) seed_id
GROUP BY a_id, a_total, seed_id
UNION ALL
SELECT a_id AS id, a_id AS oid, ${MAX_TRUST}::float as trust
FROM user_pair
) )
SELECT id, oid, trust, sum(trust) OVER (PARTITION BY id) AS total_trust SELECT id, oid, trust, sum(trust) OVER (PARTITION BY id) AS total_trust
FROM trust_pairs FROM trust_pairs
@ -222,45 +165,46 @@ async function getGraph (models, subName, postTrust = true, seeds = GLOBAL_SEEDS
ORDER BY id ASC` ORDER BY id ASC`
} }
function reduceVectors (subName, fieldGraphVectors) { async function storeTrust (models, graph, vGlobal, mPersonal) {
function reduceVector (field, graph, vector, result = {}) { // convert nodeTrust into table literal string
vector.forEach((val, [idx]) => { let globalValues = ''
if (isNaN(val) || val <= 0) return let personalValues = ''
result[graph[idx].id] = { vGlobal.forEach((val, [idx]) => {
...result[graph[idx].id], if (isNaN(val)) return
subName, if (globalValues) globalValues += ','
userId: graph[idx].id, globalValues += `(${graph[idx].id}, ${val}::FLOAT)`
[field]: val if (personalValues) personalValues += ','
} personalValues += `(${GLOBAL_ROOT}, ${graph[idx].id}, ${val}::FLOAT)`
}) })
return result
}
let result = {} math.forEach(mPersonal, (val, [fromIdx, toIdx]) => {
for (const field in fieldGraphVectors) { const globalVal = vGlobal.get([toIdx, 0])
result = reduceVector(field, fieldGraphVectors[field].graph, fieldGraphVectors[field].vector, result) if (isNaN(val) || val - globalVal <= SIG_DIFF) return
} if (personalValues) personalValues += ','
personalValues += `(${graph[fromIdx].id}, ${graph[toIdx].id}, ${val}::FLOAT)`
})
// return only the users with trust > 0
return Object.values(result).filter(s =>
Object.keys(fieldGraphVectors).reduce(
(acc, key) => acc + (s[key] ?? 0),
0
) > IRRELEVANT_CUMULATIVE_TRUST
)
}
async function storeTrust (models, subName, results) {
console.timeLog('trust', `storing trust for ${subName} with ${results.length} users`)
// update the trust of each user in graph // update the trust of each user in graph
await models.$transaction([ await models.$transaction([
models.userSubTrust.deleteMany({ models.$executeRaw`UPDATE users SET trust = 0`,
where: { models.$executeRawUnsafe(
subName `UPDATE users
} SET trust = g.trust
}), FROM (values ${globalValues}) g(id, trust)
models.userSubTrust.createMany({ WHERE users.id = g.id`),
data: results models.$executeRawUnsafe(
}) `INSERT INTO "Arc" ("fromId", "toId", "zapTrust")
SELECT id, oid, trust
FROM (values ${personalValues}) g(id, oid, trust)
ON CONFLICT ("fromId", "toId") DO UPDATE SET "zapTrust" = EXCLUDED."zapTrust"`
),
// select all arcs that don't exist in personalValues and delete them
models.$executeRawUnsafe(
`DELETE FROM "Arc"
WHERE ("fromId", "toId") NOT IN (
SELECT id, oid
FROM (values ${personalValues}) g(id, oid, trust)
)`
)
]) ])
} }

View File

@ -32,7 +32,7 @@ export async function rankViews () {
const models = createPrisma({ connectionParams: { connection_limit: 1 } }) const models = createPrisma({ connectionParams: { connection_limit: 1 } })
try { try {
for (const view of ['hot_score_view']) { for (const view of ['zap_rank_personal_view']) {
await models.$queryRawUnsafe(`REFRESH MATERIALIZED VIEW CONCURRENTLY ${view}`) await models.$queryRawUnsafe(`REFRESH MATERIALIZED VIEW CONCURRENTLY ${view}`)
} }
} finally { } finally {