territories
This commit is contained in:
parent
82a1b450f5
commit
10203f565c
|
@ -81,7 +81,8 @@ export default {
|
|||
json_build_object('name', 'boost', 'value', floor(avg(boost))),
|
||||
json_build_object('name', 'fees', 'value', floor(avg(fees))),
|
||||
json_build_object('name', 'zaps', 'value', floor(avg(tips))),
|
||||
json_build_object('name', 'donation', 'value', floor(avg(donations)))
|
||||
json_build_object('name', 'donation', 'value', floor(avg(donations))),
|
||||
json_build_object('name', 'territories', 'value', floor(avg(territories)))
|
||||
) AS data
|
||||
FROM spender_growth_days
|
||||
WHERE ${viewIntervalClause(range, 'spender_growth_days')}
|
||||
|
@ -97,7 +98,8 @@ export default {
|
|||
json_build_object('name', 'boost', 'value', count(DISTINCT "userId") FILTER (WHERE act = 'BOOST')),
|
||||
json_build_object('name', 'fees', 'value', count(DISTINCT "userId") FILTER (WHERE act = 'FEE')),
|
||||
json_build_object('name', 'zaps', 'value', count(DISTINCT "userId") FILTER (WHERE act = 'TIP')),
|
||||
json_build_object('name', 'donation', 'value', count(DISTINCT "userId") FILTER (WHERE act = 'DONATION'))
|
||||
json_build_object('name', 'donation', 'value', count(DISTINCT "userId") FILTER (WHERE act = 'DONATION')),
|
||||
json_build_object('name', 'territories', 'value', count(DISTINCT "userId") FILTER (WHERE act = 'TERRITORY'))
|
||||
) AS data
|
||||
FROM times
|
||||
LEFT JOIN
|
||||
|
@ -107,7 +109,12 @@ export default {
|
|||
UNION ALL
|
||||
(SELECT created_at, "userId", 'DONATION' as act
|
||||
FROM "Donation"
|
||||
WHERE ${intervalClause(range, 'Donation')})) u ON time = date_trunc('${timeUnitForRange(range)}', u.created_at)
|
||||
WHERE ${intervalClause(range, 'Donation')})
|
||||
UNION ALL
|
||||
(SELECT created_at, "userId", 'TERRITORY' as act
|
||||
FROM "SubAct"
|
||||
WHERE type = 'BILLING' AND ${intervalClause(range, 'SubAct')})
|
||||
) u ON time = date_trunc('${timeUnitForRange(range)}', u.created_at)
|
||||
GROUP BY time
|
||||
ORDER BY time ASC`, ...range)
|
||||
},
|
||||
|
@ -151,7 +158,8 @@ export default {
|
|||
json_build_object('name', 'boost', 'value', sum(boost)),
|
||||
json_build_object('name', 'fees', 'value', sum(fees)),
|
||||
json_build_object('name', 'zaps', 'value', sum(tips)),
|
||||
json_build_object('name', 'donations', 'value', sum(donations))
|
||||
json_build_object('name', 'donations', 'value', sum(donations)),
|
||||
json_build_object('name', 'territories', 'value', sum(territories))
|
||||
) AS data
|
||||
FROM spending_growth_days
|
||||
WHERE ${viewIntervalClause(range, 'spending_growth_days')}
|
||||
|
@ -164,9 +172,10 @@ export default {
|
|||
SELECT time, json_build_array(
|
||||
json_build_object('name', 'jobs', 'value', coalesce(floor(sum(CASE WHEN act = 'STREAM' THEN msats ELSE 0 END)/1000),0)),
|
||||
json_build_object('name', 'boost', 'value', coalesce(floor(sum(CASE WHEN act = 'BOOST' THEN msats ELSE 0 END)/1000),0)),
|
||||
json_build_object('name', 'fees', 'value', coalesce(floor(sum(CASE WHEN act NOT IN ('BOOST', 'TIP', 'STREAM', 'DONATION') THEN msats ELSE 0 END)/1000),0)),
|
||||
json_build_object('name', 'fees', 'value', coalesce(floor(sum(CASE WHEN act NOT IN ('BOOST', 'TIP', 'STREAM', 'DONATION', 'REVENUE') THEN msats ELSE 0 END)/1000),0)),
|
||||
json_build_object('name', 'zaps', 'value', coalesce(floor(sum(CASE WHEN act = 'TIP' THEN msats ELSE 0 END)/1000),0)),
|
||||
json_build_object('name', 'donations', 'value', coalesce(floor(sum(CASE WHEN act = 'DONATION' THEN msats ELSE 0 END)/1000),0))
|
||||
json_build_object('name', 'donations', 'value', coalesce(floor(sum(CASE WHEN act = 'DONATION' THEN msats ELSE 0 END)/1000),0)),
|
||||
json_build_object('name', 'territories', 'value', coalesce(floor(sum(CASE WHEN act = 'REVENUE' THEN msats ELSE 0 END)/1000),0))
|
||||
) AS data
|
||||
FROM times
|
||||
LEFT JOIN
|
||||
|
@ -176,7 +185,12 @@ export default {
|
|||
UNION ALL
|
||||
(SELECT created_at, sats * 1000 as msats, 'DONATION' as act
|
||||
FROM "Donation"
|
||||
WHERE ${intervalClause(range, 'Donation')})) u ON time = date_trunc('${timeUnitForRange(range)}', u.created_at)
|
||||
WHERE ${intervalClause(range, 'Donation')})
|
||||
UNION ALL
|
||||
(SELECT created_at, msats, 'REVENUE' as act
|
||||
FROM "SubAct"
|
||||
WHERE type = 'BILLING' AND ${intervalClause(range, 'SubAct')})
|
||||
) u ON time = date_trunc('${timeUnitForRange(range)}', u.created_at)
|
||||
GROUP BY time
|
||||
ORDER BY time ASC`, ...range)
|
||||
},
|
||||
|
@ -190,7 +204,8 @@ export default {
|
|||
json_build_object('name', 'posts', 'value', floor(avg(posts))),
|
||||
json_build_object('name', 'comments', 'value', floor(floor(avg(comments)))),
|
||||
json_build_object('name', 'rewards', 'value', floor(avg(rewards))),
|
||||
json_build_object('name', 'referrals', 'value', floor(avg(referrals)))
|
||||
json_build_object('name', 'referrals', 'value', floor(avg(referrals))),
|
||||
json_build_object('name', 'territories', 'value', floor(avg(territories)))
|
||||
) AS data
|
||||
FROM stackers_growth_days
|
||||
WHERE ${viewIntervalClause(range, 'stackers_growth_days')}
|
||||
|
@ -205,7 +220,8 @@ export default {
|
|||
json_build_object('name', 'posts', 'value', count(distinct user_id) FILTER (WHERE type = 'POST')),
|
||||
json_build_object('name', 'comments', 'value', count(distinct user_id) FILTER (WHERE type = 'COMMENT')),
|
||||
json_build_object('name', 'rewards', 'value', count(distinct user_id) FILTER (WHERE type = 'EARN')),
|
||||
json_build_object('name', 'referrals', 'value', count(distinct user_id) FILTER (WHERE type = 'REFERRAL'))
|
||||
json_build_object('name', 'referrals', 'value', count(distinct user_id) FILTER (WHERE type = 'REFERRAL')),
|
||||
json_build_object('name', 'territories', 'value', count(distinct user_id) FILTER (WHERE type = 'REVENUE'))
|
||||
) AS data
|
||||
FROM times
|
||||
LEFT JOIN
|
||||
|
@ -217,6 +233,10 @@ export default {
|
|||
(SELECT created_at, "userId" as user_id, 'EARN' as type
|
||||
FROM "Earn"
|
||||
WHERE ${intervalClause(range, 'Earn')})
|
||||
UNION ALL
|
||||
(SELECT created_at, "userId" as user_id, 'REVENUE' as type
|
||||
FROM "SubAct"
|
||||
WHERE type = 'REVENUE' AND ${intervalClause(range, 'SubAct')})
|
||||
UNION ALL
|
||||
(SELECT created_at, "referrerId" as user_id, 'REFERRAL' as type
|
||||
FROM "ReferralAct"
|
||||
|
@ -233,7 +253,8 @@ export default {
|
|||
json_build_object('name', 'rewards', 'value', sum(rewards)),
|
||||
json_build_object('name', 'posts', 'value', sum(posts)),
|
||||
json_build_object('name', 'comments', 'value', sum(comments)),
|
||||
json_build_object('name', 'referrals', 'value', sum(referrals))
|
||||
json_build_object('name', 'referrals', 'value', sum(referrals)),
|
||||
json_build_object('name', 'territories', 'value', sum(territories))
|
||||
) AS data
|
||||
FROM stacking_growth_days
|
||||
WHERE ${viewIntervalClause(range, 'stacking_growth_days')}
|
||||
|
@ -247,23 +268,28 @@ export default {
|
|||
json_build_object('name', 'rewards', 'value', coalesce(floor(sum(airdrop)/1000),0)),
|
||||
json_build_object('name', 'posts', 'value', coalesce(floor(sum(post)/1000),0)),
|
||||
json_build_object('name', 'comments', 'value', coalesce(floor(sum(comment)/1000),0)),
|
||||
json_build_object('name', 'referrals', 'value', coalesce(floor(sum(referral)/1000),0))
|
||||
json_build_object('name', 'referrals', 'value', coalesce(floor(sum(referral)/1000),0)),
|
||||
json_build_object('name', 'territories', 'value', coalesce(floor(sum(revenue)/1000),0))
|
||||
) AS data
|
||||
FROM times
|
||||
LEFT JOIN
|
||||
((SELECT "ItemAct".created_at, 0 as airdrop,
|
||||
CASE WHEN "Item"."parentId" IS NULL THEN 0 ELSE "ItemAct".msats END as comment,
|
||||
CASE WHEN "Item"."parentId" IS NULL THEN "ItemAct".msats ELSE 0 END as post,
|
||||
0 as referral
|
||||
0 as referral, 0 as revenue
|
||||
FROM "ItemAct"
|
||||
JOIN "Item" on "ItemAct"."itemId" = "Item".id
|
||||
WHERE ${intervalClause(range, 'ItemAct')} AND "ItemAct".act = 'TIP')
|
||||
UNION ALL
|
||||
(SELECT created_at, 0 as airdrop, 0 as post, 0 as comment, msats as referral
|
||||
(SELECT created_at, 0 as airdrop, 0 as post, 0 as comment, msats as referral, 0 as revenue
|
||||
FROM "ReferralAct"
|
||||
WHERE ${intervalClause(range, 'ReferralAct')})
|
||||
UNION ALL
|
||||
(SELECT created_at, msats as airdrop, 0 as post, 0 as comment, 0 as referral
|
||||
(SELECT created_at, 0 as airdrop, 0 as post, 0 as comment, 0 as referral, msats as revenue
|
||||
FROM "SubAct"
|
||||
WHERE type = 'REVENUE' AND ${intervalClause(range, 'SubAct')})
|
||||
UNION ALL
|
||||
(SELECT created_at, msats as airdrop, 0 as post, 0 as comment, 0 as referral, 0 as revenue
|
||||
FROM "Earn"
|
||||
WHERE ${intervalClause(range, 'Earn')})) u ON time = date_trunc('${timeUnitForRange(range)}', u.created_at)
|
||||
GROUP BY time
|
||||
|
|
|
@ -466,23 +466,21 @@ export default {
|
|||
orderBy: 'ORDER BY rank DESC'
|
||||
}, decodedCursor.offset, limit, ...subArr)
|
||||
|
||||
// XXX this is just for migration purposes ... can remove after initial deployment
|
||||
// and views have been populated
|
||||
if (items.length === 0) {
|
||||
// XXX this is just for subs that are really empty
|
||||
if (decodedCursor.offset === 0 && items.length < limit) {
|
||||
items = await itemQueryWithMeta({
|
||||
me,
|
||||
models,
|
||||
query: `
|
||||
${SELECT}, rank
|
||||
${SELECT}
|
||||
FROM "Item"
|
||||
JOIN zap_rank_tender_view ON "Item".id = zap_rank_tender_view.id
|
||||
${whereClause(
|
||||
subClause(sub, 3, 'Item', true),
|
||||
muteClause(me))}
|
||||
ORDER BY rank ASC
|
||||
ORDER BY ${orderByNumerator(models, 0)}/POWER(GREATEST(3, EXTRACT(EPOCH FROM (now_utc() - "Item".created_at))/3600), 1.3) DESC NULLS LAST, "Item".msats DESC, ("Item".freebie IS FALSE) DESC, "Item".id DESC
|
||||
OFFSET $1
|
||||
LIMIT $2`,
|
||||
orderBy: 'ORDER BY rank ASC'
|
||||
orderBy: `ORDER BY ${orderByNumerator(models, 0)}/POWER(GREATEST(3, EXTRACT(EPOCH FROM (now_utc() - "Item".created_at))/3600), 1.3) DESC NULLS LAST, "Item".msats DESC, ("Item".freebie IS FALSE) DESC, "Item".id DESC`
|
||||
}, decodedCursor.offset, limit, ...subArr)
|
||||
}
|
||||
|
||||
|
|
|
@ -229,6 +229,15 @@ export default {
|
|||
AND created_at <= $2
|
||||
GROUP BY "userId", created_at`
|
||||
)
|
||||
queries.push(
|
||||
`SELECT min(id)::text, created_at AS "sortTime", FLOOR(sum(msats) / 1000) as "earnedSats",
|
||||
'Revenue' AS type
|
||||
FROM "SubAct"
|
||||
WHERE "userId" = $1
|
||||
AND type = 'REVENUE'
|
||||
AND created_at <= $2
|
||||
GROUP BY "userId", "subName", created_at`
|
||||
)
|
||||
}
|
||||
|
||||
if (meFull.noteCowboyHat) {
|
||||
|
@ -330,6 +339,17 @@ export default {
|
|||
JobChanged: {
|
||||
item: async (n, args, { models, me }) => getItem(n, { id: n.id }, { models, me })
|
||||
},
|
||||
Revenue: {
|
||||
subName: async (n, args, { models }) => {
|
||||
const subAct = await models.subAct.findUnique({
|
||||
where: {
|
||||
id: Number(n.id)
|
||||
}
|
||||
})
|
||||
|
||||
return subAct.subName
|
||||
}
|
||||
},
|
||||
Streak: {
|
||||
days: async (n, args, { models }) => {
|
||||
const res = await models.$queryRaw`
|
||||
|
|
|
@ -15,7 +15,13 @@ export default async function serialize (models, ...calls) {
|
|||
return calls.length > 1 ? result : result[0]
|
||||
} catch (error) {
|
||||
console.log(error)
|
||||
if (error.message.includes('SN_INSUFFICIENT_FUNDS')) {
|
||||
// two cases where we get insufficient funds:
|
||||
// 1. plpgsql function raises
|
||||
// 2. constraint violation via a prisma call
|
||||
// XXX prisma does not provide a way to distinguish these cases so we
|
||||
// have to check the error message
|
||||
if (error.message.includes('SN_INSUFFICIENT_FUNDS') ||
|
||||
error.message.includes('\\"users\\" violates check constraint \\"msats_positive\\"')) {
|
||||
bail(new GraphQLError('insufficient funds', { extensions: { code: 'BAD_INPUT' } }))
|
||||
}
|
||||
if (error.message.includes('SN_NOT_SERIALIZABLE')) {
|
||||
|
@ -65,24 +71,28 @@ export async function serializeInvoicable (query, { models, lnd, hash, hmac, me,
|
|||
throw new Error('you must be logged in or pay')
|
||||
}
|
||||
|
||||
let trx = [query]
|
||||
let trx = Array.isArray(query) ? query : [query]
|
||||
|
||||
let invoice
|
||||
if (hash) {
|
||||
invoice = await checkInvoice(models, hash, hmac, enforceFee)
|
||||
trx = [
|
||||
models.$queryRaw`UPDATE users SET msats = msats + ${invoice.msatsReceived} WHERE id = ${invoice.user.id}`,
|
||||
query,
|
||||
...trx,
|
||||
models.invoice.update({ where: { hash: invoice.hash }, data: { confirmedAt: new Date() } })
|
||||
]
|
||||
}
|
||||
|
||||
const results = await serialize(models, ...trx)
|
||||
const result = trx.length > 1 ? results[1][0] : results[0]
|
||||
let results = await serialize(models, ...trx)
|
||||
|
||||
if (invoice?.isHeld) await settleHodlInvoice({ secret: invoice.preimage, lnd })
|
||||
if (hash) {
|
||||
if (invoice?.isHeld) await settleHodlInvoice({ secret: invoice.preimage, lnd })
|
||||
results = results.slice(1, -1)
|
||||
}
|
||||
|
||||
return result
|
||||
// if there is only one result, return it directly, else the array
|
||||
results = results.flat(2)
|
||||
return results.length > 1 ? results : results[0]
|
||||
}
|
||||
|
||||
export async function checkInvoice (models, hash, hmac, fee) {
|
||||
|
|
|
@ -1,3 +1,65 @@
|
|||
import { GraphQLError } from 'graphql'
|
||||
import serialize, { serializeInvoicable } from './serial'
|
||||
import { TERRITORY_COST_MONTHLY, TERRITORY_COST_ONCE, TERRITORY_COST_YEARLY } from '../../lib/constants'
|
||||
import { datePivot } from '../../lib/time'
|
||||
import { ssValidate, territorySchema } from '../../lib/validate'
|
||||
|
||||
export function paySubQueries (sub, models) {
|
||||
let billingAt = datePivot(sub.billedLastAt, { months: 1 })
|
||||
let billAt = datePivot(sub.billedLastAt, { months: 2 })
|
||||
if (sub.billingType === 'ONCE') {
|
||||
return []
|
||||
} else if (sub.billingType === 'YEARLY') {
|
||||
billingAt = datePivot(sub.billedLastAt, { years: 1 })
|
||||
billAt = datePivot(sub.billedLastAt, { years: 2 })
|
||||
}
|
||||
|
||||
const cost = BigInt(sub.billingCost) * BigInt(1000)
|
||||
|
||||
return [
|
||||
models.user.update({
|
||||
where: {
|
||||
id: sub.userId
|
||||
},
|
||||
data: {
|
||||
msats: {
|
||||
decrement: cost
|
||||
}
|
||||
}
|
||||
}),
|
||||
// update 'em
|
||||
models.sub.update({
|
||||
where: {
|
||||
name: sub.name
|
||||
},
|
||||
data: {
|
||||
billedLastAt: billingAt,
|
||||
status: 'ACTIVE'
|
||||
}
|
||||
}),
|
||||
// record 'em
|
||||
models.subAct.create({
|
||||
data: {
|
||||
userId: sub.userId,
|
||||
subName: sub.name,
|
||||
msats: cost,
|
||||
type: 'BILLING'
|
||||
}
|
||||
}),
|
||||
models.$executeRaw`
|
||||
DELETE FROM pgboss.job
|
||||
WHERE name = 'territoryBilling'
|
||||
AND data->>'subName' = ${sub.name}
|
||||
AND completedon IS NULL`,
|
||||
// schedule 'em
|
||||
models.$queryRaw`
|
||||
INSERT INTO pgboss.job (name, data, startafter) VALUES ('territoryBilling',
|
||||
${JSON.stringify({
|
||||
subName: sub.name
|
||||
})}::JSONB, ${billAt})`
|
||||
]
|
||||
}
|
||||
|
||||
export default {
|
||||
Query: {
|
||||
sub: async (parent, { name }, { models, me }) => {
|
||||
|
@ -20,6 +82,18 @@ export default {
|
|||
}
|
||||
})
|
||||
},
|
||||
subs: async (parent, args, { models }) => {
|
||||
return await models.sub.findMany({
|
||||
where: {
|
||||
status: {
|
||||
not: 'STOPPED'
|
||||
}
|
||||
},
|
||||
orderBy: {
|
||||
name: 'asc'
|
||||
}
|
||||
})
|
||||
},
|
||||
subLatestPost: async (parent, { name }, { models, me }) => {
|
||||
const latest = await models.item.findFirst({
|
||||
where: {
|
||||
|
@ -32,5 +106,164 @@ export default {
|
|||
|
||||
return latest?.createdAt
|
||||
}
|
||||
},
|
||||
Mutation: {
|
||||
upsertSub: async (parent, { hash, hmac, ...data }, { me, models, lnd }) => {
|
||||
if (!me) {
|
||||
throw new GraphQLError('you must be logged in', { extensions: { code: 'UNAUTHENTICATED' } })
|
||||
}
|
||||
|
||||
// XXX this is because we did the wrong thing and used the subName as a primary key
|
||||
const existing = await models.sub.findUnique({
|
||||
where: {
|
||||
name: data.name,
|
||||
userId: me.id
|
||||
}
|
||||
})
|
||||
|
||||
await ssValidate(territorySchema, data, { models, me })
|
||||
|
||||
if (existing) {
|
||||
return await updateSub(parent, data, { me, models, lnd, hash, hmac })
|
||||
} else {
|
||||
return await createSub(parent, data, { me, models, lnd, hash, hmac })
|
||||
}
|
||||
},
|
||||
paySub: async (parent, { name, hash, hmac }, { me, models, lnd }) => {
|
||||
// check that they own the sub
|
||||
const sub = await models.sub.findUnique({
|
||||
where: {
|
||||
name
|
||||
}
|
||||
})
|
||||
|
||||
if (!sub) {
|
||||
throw new GraphQLError('sub not found', { extensions: { code: 'BAD_INPUT' } })
|
||||
}
|
||||
|
||||
if (sub.userId !== me.id) {
|
||||
throw new GraphQLError('you do not own this sub', { extensions: { code: 'BAD_INPUT' } })
|
||||
}
|
||||
|
||||
if (sub.status === 'ACTIVE') {
|
||||
return sub
|
||||
}
|
||||
|
||||
const queries = paySubQueries(sub, models)
|
||||
if (queries.length === 0) {
|
||||
return sub
|
||||
}
|
||||
|
||||
const results = await serializeInvoicable(
|
||||
queries,
|
||||
{ models, lnd, hash, hmac, me, enforceFee: sub.billingCost })
|
||||
return results[1]
|
||||
}
|
||||
},
|
||||
Sub: {
|
||||
user: async (sub, args, { models }) => {
|
||||
if (sub.user) {
|
||||
return sub.user
|
||||
}
|
||||
return await models.user.findUnique({ where: { id: sub.userId } })
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async function createSub (parent, data, { me, models, lnd, hash, hmac }) {
|
||||
const { billingType } = data
|
||||
let billingCost = TERRITORY_COST_MONTHLY
|
||||
let billAt = datePivot(new Date(), { months: 1 })
|
||||
|
||||
if (billingType === 'ONCE') {
|
||||
billingCost = TERRITORY_COST_ONCE
|
||||
billAt = null
|
||||
} else if (billingType === 'YEARLY') {
|
||||
billingCost = TERRITORY_COST_YEARLY
|
||||
billAt = datePivot(new Date(), { years: 1 })
|
||||
}
|
||||
|
||||
const cost = BigInt(1000) * BigInt(billingCost)
|
||||
|
||||
try {
|
||||
const results = await serializeInvoicable([
|
||||
// bill 'em
|
||||
models.user.update({
|
||||
where: {
|
||||
id: me.id
|
||||
},
|
||||
data: {
|
||||
msats: {
|
||||
decrement: cost
|
||||
}
|
||||
}
|
||||
}),
|
||||
// create 'em
|
||||
models.sub.create({
|
||||
data: {
|
||||
...data,
|
||||
billingCost,
|
||||
rankingType: 'WOT',
|
||||
userId: me.id
|
||||
}
|
||||
}),
|
||||
// record 'em
|
||||
models.subAct.create({
|
||||
data: {
|
||||
userId: me.id,
|
||||
subName: data.name,
|
||||
msats: cost,
|
||||
type: 'BILLING'
|
||||
}
|
||||
}),
|
||||
// schedule 'em
|
||||
...(billAt
|
||||
? [models.$queryRaw`
|
||||
INSERT INTO pgboss.job (name, data, startafter) VALUES ('territoryBilling',
|
||||
${JSON.stringify({
|
||||
subName: data.name
|
||||
})}::JSONB, ${billAt})`]
|
||||
: [])
|
||||
], { models, lnd, hash, hmac, me, enforceFee: billingCost })
|
||||
|
||||
return results[1]
|
||||
} catch (error) {
|
||||
if (error.code === 'P2002') {
|
||||
throw new GraphQLError('name taken', { extensions: { code: 'BAD_INPUT' } })
|
||||
}
|
||||
throw error
|
||||
}
|
||||
}
|
||||
|
||||
async function updateSub (parent, { name, ...data }, { me, models, lnd, hash, hmac }) {
|
||||
const { billingType } = data
|
||||
|
||||
let billingCost = TERRITORY_COST_MONTHLY
|
||||
if (billingType === 'ONCE') {
|
||||
billingCost = TERRITORY_COST_ONCE
|
||||
} else if (billingType === 'YEARLY') {
|
||||
billingCost = TERRITORY_COST_YEARLY
|
||||
}
|
||||
|
||||
try {
|
||||
const results = await serialize(models,
|
||||
// update 'em
|
||||
models.sub.update({
|
||||
data: {
|
||||
...data,
|
||||
billingCost,
|
||||
billingType
|
||||
},
|
||||
where: {
|
||||
name
|
||||
}
|
||||
}))
|
||||
|
||||
return results[0]
|
||||
} catch (error) {
|
||||
if (error.code === 'P2002') {
|
||||
throw new GraphQLError('name taken', { extensions: { code: 'BAD_INPUT' } })
|
||||
}
|
||||
throw error
|
||||
}
|
||||
}
|
||||
|
|
|
@ -106,101 +106,108 @@ export default {
|
|||
|
||||
if (include.has('invoice')) {
|
||||
queries.push(
|
||||
`(SELECT ('invoice' || id) as id, id as "factId", bolt11, created_at as "createdAt",
|
||||
COALESCE("msatsReceived", "msatsRequested") as msats, NULL as "msatsFee",
|
||||
CASE WHEN "confirmedAt" IS NOT NULL THEN 'CONFIRMED'
|
||||
WHEN "expiresAt" <= $2 THEN 'EXPIRED'
|
||||
WHEN cancelled THEN 'CANCELLED'
|
||||
ELSE 'PENDING' END as status,
|
||||
"desc" as description,
|
||||
comment as "invoiceComment",
|
||||
"lud18Data" as "invoicePayerData",
|
||||
'invoice' as type
|
||||
FROM "Invoice"
|
||||
WHERE "userId" = $1
|
||||
AND created_at <= $2)`)
|
||||
`(SELECT
|
||||
id, created_at as "createdAt", COALESCE("msatsReceived", "msatsRequested") as msats,
|
||||
'invoice' as type,
|
||||
jsonb_build_object(
|
||||
'bolt11', bolt11,
|
||||
'status', CASE WHEN "confirmedAt" IS NOT NULL THEN 'CONFIRMED'
|
||||
WHEN "expiresAt" <= $2 THEN 'EXPIRED'
|
||||
WHEN cancelled THEN 'CANCELLED'
|
||||
ELSE 'PENDING' END,
|
||||
'description', "desc",
|
||||
'invoiceComment', comment,
|
||||
'invoicePayerData', "lud18Data") as other
|
||||
FROM "Invoice"
|
||||
WHERE "userId" = $1
|
||||
AND created_at <= $2)`
|
||||
)
|
||||
}
|
||||
|
||||
if (include.has('withdrawal')) {
|
||||
queries.push(
|
||||
`(SELECT ('withdrawal' || id) as id, id as "factId", bolt11, created_at as "createdAt",
|
||||
CASE WHEN status = 'CONFIRMED' THEN "msatsPaid"
|
||||
ELSE "msatsPaying" END as msats,
|
||||
CASE WHEN status = 'CONFIRMED' THEN "msatsFeePaid"
|
||||
ELSE "msatsFeePaying" END as "msatsFee",
|
||||
COALESCE(status::text, 'PENDING') as status,
|
||||
NULL as description,
|
||||
NULL as "invoiceComment",
|
||||
NULL as "invoicePayerData",
|
||||
'withdrawal' as type
|
||||
FROM "Withdrawl"
|
||||
WHERE "userId" = $1
|
||||
AND created_at <= $2)`)
|
||||
`(SELECT
|
||||
id, created_at as "createdAt",
|
||||
COALESCE("msatsPaid", "msatsPaying") as msats,
|
||||
'withdrawal' as type,
|
||||
jsonb_build_object(
|
||||
'status', COALESCE(status::text, 'PENDING'),
|
||||
'msatsFee', COALESCE("msatsFeePaid", "msatsFeePaying")) as other
|
||||
FROM "Withdrawl"
|
||||
WHERE "userId" = $1
|
||||
AND created_at <= $2)`
|
||||
)
|
||||
}
|
||||
|
||||
if (include.has('stacked')) {
|
||||
// query1 - get all sats stacked as OP or as a forward
|
||||
queries.push(
|
||||
`(SELECT
|
||||
('stacked' || "Item".id) AS id,
|
||||
"Item".id AS "factId",
|
||||
NULL AS bolt11,
|
||||
MAX("ItemAct".created_at) AS "createdAt",
|
||||
FLOOR(
|
||||
SUM("ItemAct".msats)
|
||||
* (CASE WHEN "Item"."userId" = $1 THEN
|
||||
COALESCE(1 - ((SELECT SUM(pct) FROM "ItemForward" WHERE "itemId" = "Item".id) / 100.0), 1)
|
||||
ELSE
|
||||
(SELECT pct FROM "ItemForward" WHERE "itemId" = "Item".id AND "userId" = $1) / 100.0
|
||||
END)
|
||||
) AS "msats",
|
||||
0 AS "msatsFee",
|
||||
NULL AS status,
|
||||
NULL as description,
|
||||
NULL as "invoiceComment",
|
||||
NULL as "invoicePayerData",
|
||||
'stacked' AS type
|
||||
FROM "ItemAct"
|
||||
JOIN "Item" ON "ItemAct"."itemId" = "Item".id
|
||||
-- only join to with item forward for items where we aren't the OP
|
||||
LEFT JOIN "ItemForward" ON "ItemForward"."itemId" = "Item".id AND "Item"."userId" <> $1
|
||||
WHERE "ItemAct".act = 'TIP'
|
||||
AND ("Item"."userId" = $1 OR "ItemForward"."userId" = $1)
|
||||
AND "ItemAct".created_at <= $2
|
||||
GROUP BY "Item".id)`
|
||||
"Item".id,
|
||||
MAX("ItemAct".created_at) AS "createdAt",
|
||||
FLOOR(
|
||||
SUM("ItemAct".msats)
|
||||
* (CASE WHEN "Item"."userId" = $1 THEN
|
||||
COALESCE(1 - ((SELECT SUM(pct) FROM "ItemForward" WHERE "itemId" = "Item".id) / 100.0), 1)
|
||||
ELSE
|
||||
(SELECT pct FROM "ItemForward" WHERE "itemId" = "Item".id AND "userId" = $1) / 100.0
|
||||
END)
|
||||
) AS msats,
|
||||
'stacked' AS type, NULL::JSONB AS other
|
||||
FROM "ItemAct"
|
||||
JOIN "Item" ON "ItemAct"."itemId" = "Item".id
|
||||
-- only join to with item forward for items where we aren't the OP
|
||||
LEFT JOIN "ItemForward" ON "ItemForward"."itemId" = "Item".id AND "Item"."userId" <> $1
|
||||
WHERE "ItemAct".act = 'TIP'
|
||||
AND ("Item"."userId" = $1 OR "ItemForward"."userId" = $1)
|
||||
AND "ItemAct".created_at <= $2
|
||||
GROUP BY "Item".id)`
|
||||
)
|
||||
queries.push(
|
||||
`(SELECT ('earn' || min("Earn".id)) as id, min("Earn".id) as "factId", NULL as bolt11,
|
||||
created_at as "createdAt", sum(msats),
|
||||
0 as "msatsFee", NULL as status, NULL as description, NULL as "invoiceComment", NULL as "invoicePayerData", 'earn' as type
|
||||
`(SELECT
|
||||
min("Earn".id) as id, created_at as "createdAt",
|
||||
sum(msats) as msats, 'earn' as type, NULL::JSONB AS other
|
||||
FROM "Earn"
|
||||
WHERE "Earn"."userId" = $1 AND "Earn".created_at <= $2
|
||||
GROUP BY "userId", created_at)`)
|
||||
GROUP BY "userId", created_at)`
|
||||
)
|
||||
queries.push(
|
||||
`(SELECT ('referral' || "ReferralAct".id) as id, "ReferralAct".id as "factId", NULL as bolt11,
|
||||
created_at as "createdAt", msats,
|
||||
0 as "msatsFee", NULL as status, NULL as description, NULL as "invoiceComment", NULL as "invoicePayerData", 'referral' as type
|
||||
`(SELECT id, created_at as "createdAt", msats, 'referral' as type, NULL::JSONB AS other
|
||||
FROM "ReferralAct"
|
||||
WHERE "ReferralAct"."referrerId" = $1 AND "ReferralAct".created_at <= $2)`)
|
||||
WHERE "ReferralAct"."referrerId" = $1 AND "ReferralAct".created_at <= $2)`
|
||||
)
|
||||
queries.push(
|
||||
`(SELECT id, created_at as "createdAt", msats, 'revenue' as type,
|
||||
jsonb_build_object('subName', "SubAct"."subName") as other
|
||||
FROM "SubAct"
|
||||
WHERE "userId" = $1 AND type = 'REVENUE'
|
||||
AND created_at <= $2)`
|
||||
)
|
||||
}
|
||||
|
||||
if (include.has('spent')) {
|
||||
queries.push(
|
||||
`(SELECT ('spent' || "Item".id) as id, "Item".id as "factId", NULL as bolt11,
|
||||
MAX("ItemAct".created_at) as "createdAt", sum("ItemAct".msats) as msats,
|
||||
0 as "msatsFee", NULL as status, NULL as description, NULL as "invoiceComment", NULL as "invoicePayerData", 'spent' as type
|
||||
FROM "ItemAct"
|
||||
JOIN "Item" on "ItemAct"."itemId" = "Item".id
|
||||
WHERE "ItemAct"."userId" = $1
|
||||
AND "ItemAct".created_at <= $2
|
||||
GROUP BY "Item".id)`)
|
||||
`(SELECT "Item".id, MAX("ItemAct".created_at) as "createdAt", sum("ItemAct".msats) as msats,
|
||||
'spent' as type, NULL::JSONB AS other
|
||||
FROM "ItemAct"
|
||||
JOIN "Item" on "ItemAct"."itemId" = "Item".id
|
||||
WHERE "ItemAct"."userId" = $1
|
||||
AND "ItemAct".created_at <= $2
|
||||
GROUP BY "Item".id)`
|
||||
)
|
||||
queries.push(
|
||||
`(SELECT ('donation' || "Donation".id) as id, "Donation".id as "factId", NULL as bolt11,
|
||||
created_at as "createdAt", sats * 1000 as msats,
|
||||
0 as "msatsFee", NULL as status, NULL as description, NULL as "invoiceComment", NULL as "invoicePayerData", 'donation' as type
|
||||
`(SELECT id, created_at as "createdAt", sats * 1000 as msats,'donation' as type, NULL::JSONB AS other
|
||||
FROM "Donation"
|
||||
WHERE "userId" = $1
|
||||
AND created_at <= $2)`)
|
||||
AND created_at <= $2)`
|
||||
)
|
||||
queries.push(
|
||||
`(SELECT id, created_at as "createdAt", msats, 'billing' as type,
|
||||
jsonb_build_object('subName', "SubAct"."subName") as other
|
||||
FROM "SubAct"
|
||||
WHERE "userId" = $1 AND type = 'BILLING'
|
||||
AND created_at <= $2)`
|
||||
)
|
||||
}
|
||||
|
||||
if (queries.length === 0) {
|
||||
|
@ -211,12 +218,15 @@ export default {
|
|||
}
|
||||
|
||||
let history = await models.$queryRawUnsafe(`
|
||||
${queries.join(' UNION ALL ')}
|
||||
ORDER BY "createdAt" DESC
|
||||
OFFSET $3
|
||||
LIMIT ${LIMIT}`, me.id, decodedCursor.time, decodedCursor.offset)
|
||||
${queries.join(' UNION ALL ')}
|
||||
ORDER BY "createdAt" DESC
|
||||
OFFSET $3
|
||||
LIMIT ${LIMIT}`,
|
||||
me.id, decodedCursor.time, decodedCursor.offset)
|
||||
|
||||
history = history.map(f => {
|
||||
f = { ...f, ...f.other }
|
||||
|
||||
if (f.bolt11) {
|
||||
const inv = lnpr.decode(f.bolt11)
|
||||
if (inv) {
|
||||
|
@ -230,14 +240,14 @@ export default {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
switch (f.type) {
|
||||
case 'withdrawal':
|
||||
f.msats = (-1 * Number(f.msats)) - Number(f.msatsFee)
|
||||
break
|
||||
case 'spent':
|
||||
f.msats *= -1
|
||||
break
|
||||
case 'donation':
|
||||
case 'billing':
|
||||
f.msats *= -1
|
||||
break
|
||||
default:
|
||||
|
@ -281,6 +291,8 @@ export default {
|
|||
expires_at: expiresAt
|
||||
})
|
||||
|
||||
console.log('invoice', balanceLimit)
|
||||
|
||||
const [inv] = await serialize(models,
|
||||
models.$queryRaw`SELECT * FROM create_invoice(${invoice.id}, ${invoice.request},
|
||||
${expiresAt}::timestamp, ${amount * 1000}, ${user.id}::INTEGER, ${description}, NULL, NULL,
|
||||
|
@ -411,12 +423,11 @@ export default {
|
|||
const [item] = await models.$queryRawUnsafe(`
|
||||
${SELECT}
|
||||
FROM "Item"
|
||||
WHERE id = $1`, Number(fact.factId))
|
||||
WHERE id = $1`, Number(fact.id))
|
||||
|
||||
return item
|
||||
},
|
||||
sats: fact => msatsToSatsDecimal(fact.msats),
|
||||
satsFee: fact => msatsToSatsDecimal(fact.msatsFee)
|
||||
sats: fact => msatsToSatsDecimal(fact.msats)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -105,7 +105,7 @@ export function getGetServerSideProps (
|
|||
console.error(e)
|
||||
}
|
||||
|
||||
if (error || !data || (notFound && notFound(data, vars))) {
|
||||
if (error || !data || (notFound && notFound(data, vars, me))) {
|
||||
return {
|
||||
notFound: true
|
||||
}
|
||||
|
|
|
@ -77,6 +77,13 @@ export default gql`
|
|||
sources: EarnSources
|
||||
}
|
||||
|
||||
type Revenue {
|
||||
id: ID!
|
||||
earnedSats: Int!
|
||||
sortTime: Date!
|
||||
subName: String!
|
||||
}
|
||||
|
||||
type InvoicePaid {
|
||||
id: ID!
|
||||
earnedSats: Int!
|
||||
|
@ -91,7 +98,7 @@ export default gql`
|
|||
|
||||
union Notification = Reply | Votification | Mention
|
||||
| Invitification | Earn | JobChanged | InvoicePaid | Referral
|
||||
| Streak | FollowActivity | ForwardedVotification
|
||||
| Streak | FollowActivity | ForwardedVotification | Revenue
|
||||
|
||||
type Notifications {
|
||||
lastChecked: Date
|
||||
|
|
|
@ -4,14 +4,28 @@ export default gql`
|
|||
extend type Query {
|
||||
sub(name: String): Sub
|
||||
subLatestPost(name: String!): String
|
||||
subs: [Sub!]!
|
||||
}
|
||||
|
||||
extend type Mutation {
|
||||
upsertSub(name: String!, desc: String, baseCost: Int!,
|
||||
postTypes: [String!]!, billingType: String!, hash: String, hmac: String): Sub
|
||||
paySub(name: String!, hash: String, hmac: String): Sub
|
||||
}
|
||||
|
||||
type Sub {
|
||||
name: String!
|
||||
name: ID!
|
||||
createdAt: Date!
|
||||
userId: Int!
|
||||
user: User!
|
||||
desc: String
|
||||
updatedAt: Date!
|
||||
postTypes: [String!]!
|
||||
billingCost: Int!
|
||||
billingType: String!
|
||||
rankingType: String!
|
||||
billedLastAt: Date!
|
||||
baseCost: Int!
|
||||
status: String!
|
||||
}
|
||||
`
|
||||
|
|
|
@ -49,17 +49,16 @@ export default gql`
|
|||
|
||||
type Fact {
|
||||
id: ID!
|
||||
factId: ID!
|
||||
bolt11: String
|
||||
createdAt: Date!
|
||||
sats: Float!
|
||||
satsFee: Float
|
||||
status: String
|
||||
type: String!
|
||||
bolt11: String
|
||||
status: String
|
||||
description: String
|
||||
item: Item
|
||||
invoiceComment: String
|
||||
invoicePayerData: JSONObject
|
||||
subName: String
|
||||
}
|
||||
|
||||
type History {
|
||||
|
|
|
@ -31,3 +31,16 @@ export default function AccordianItem ({ header, body, headerColor = 'var(--them
|
|||
</Accordion>
|
||||
)
|
||||
}
|
||||
|
||||
export function AccordianCard ({ header, children, show }) {
|
||||
return (
|
||||
<Accordion defaultActiveKey={show ? '0' : undefined}>
|
||||
<Accordion.Item eventKey='0'>
|
||||
<Accordion.Header>{header}</Accordion.Header>
|
||||
<Accordion.Body>
|
||||
{children}
|
||||
</Accordion.Body>
|
||||
</Accordion.Item>
|
||||
</Accordion>
|
||||
)
|
||||
}
|
||||
|
|
|
@ -5,7 +5,7 @@ import Countdown from './countdown'
|
|||
import AdvPostForm, { AdvPostInitial } from './adv-post-form'
|
||||
import InputGroup from 'react-bootstrap/InputGroup'
|
||||
import { bountySchema } from '../lib/validate'
|
||||
import { SubSelectInitial } from './sub-select-form'
|
||||
import { SubSelectInitial } from './sub-select'
|
||||
import { useCallback } from 'react'
|
||||
import { normalizeForwards, toastDeleteScheduled } from '../lib/form'
|
||||
import { MAX_TITLE_LENGTH } from '../lib/constants'
|
||||
|
|
|
@ -11,3 +11,23 @@ export default function SimpleCountdown ({ className, onComplete, date }) {
|
|||
</span>
|
||||
)
|
||||
}
|
||||
|
||||
export function LongCountdown ({ className, onComplete, date }) {
|
||||
return (
|
||||
<span className={className}>
|
||||
<Countdown
|
||||
date={date}
|
||||
renderer={props => {
|
||||
return (
|
||||
<span suppressHydrationWarning>
|
||||
{props.formatted.days && `${props.formatted.days} days `}
|
||||
{props.formatted.minutes && `${props.formatted.minutes} minutes `}
|
||||
{props.formatted.seconds && `${props.formatted.seconds} seconds `}
|
||||
</span>
|
||||
)
|
||||
}}
|
||||
onComplete={onComplete}
|
||||
/>
|
||||
</span>
|
||||
)
|
||||
}
|
||||
|
|
|
@ -7,7 +7,7 @@ import { ITEM_FIELDS } from '../fragments/items'
|
|||
import AccordianItem from './accordian-item'
|
||||
import Item from './item'
|
||||
import { discussionSchema } from '../lib/validate'
|
||||
import { SubSelectInitial } from './sub-select-form'
|
||||
import { SubSelectInitial } from './sub-select'
|
||||
import { useCallback } from 'react'
|
||||
import { normalizeForwards, toastDeleteScheduled } from '../lib/form'
|
||||
import { MAX_TITLE_LENGTH } from '../lib/constants'
|
||||
|
|
|
@ -4,7 +4,7 @@ import ActionTooltip from './action-tooltip'
|
|||
import Info from './info'
|
||||
import styles from './fee-button.module.css'
|
||||
import { gql, useQuery } from '@apollo/client'
|
||||
import { FREEBIE_BASE_COST_THRESHOLD, SSR } from '../lib/constants'
|
||||
import { SSR } from '../lib/constants'
|
||||
import { numWithUnits } from '../lib/format'
|
||||
import { useMe } from './me'
|
||||
import AnonIcon from '../svgs/spy-fill.svg'
|
||||
|
@ -95,7 +95,8 @@ export function FeeButtonProvider ({ baseLineItems = {}, useRemoteLineItems = ()
|
|||
}
|
||||
|
||||
export function useFeeButton () {
|
||||
return useContext(FeeButtonContext)
|
||||
const context = useContext(FeeButtonContext)
|
||||
return context
|
||||
}
|
||||
|
||||
function FreebieDialog () {
|
||||
|
@ -114,9 +115,7 @@ export default function FeeButton ({ ChildButton = SubmitButton, variant, text,
|
|||
const me = useMe()
|
||||
const { lines, total, disabled: ctxDisabled } = useFeeButton()
|
||||
// freebies: there's only a base cost, it's less than 10, and we have less than 10 sats
|
||||
const free = total === lines.baseCost?.modifier(0) &&
|
||||
total <= FREEBIE_BASE_COST_THRESHOLD &&
|
||||
me?.privates?.sats < FREEBIE_BASE_COST_THRESHOLD
|
||||
const free = total === lines.baseCost?.modifier(0) && me?.privates?.sats < total
|
||||
const feeText = free
|
||||
? 'free'
|
||||
: total > 1
|
||||
|
|
|
@ -696,11 +696,14 @@ export function VariableInput ({ label, groupClassName, name, hint, max, min, re
|
|||
)
|
||||
}
|
||||
|
||||
export function Checkbox ({ children, label, groupClassName, hiddenLabel, extra, handleChange, inline, disabled, ...props }) {
|
||||
export function Checkbox ({
|
||||
children, label, groupClassName, type = 'checkbox',
|
||||
hiddenLabel, extra, handleChange, inline, disabled, ...props
|
||||
}) {
|
||||
// React treats radios and checkbox inputs differently other input types, select, and textarea.
|
||||
// Formik does this too! When you specify `type` to useField(), it will
|
||||
// return the correct bag of props for you
|
||||
const [field,, helpers] = useField({ ...props, type: 'checkbox' })
|
||||
const [field, meta, helpers] = useField({ ...props, type })
|
||||
return (
|
||||
<FormGroup className={groupClassName}>
|
||||
{hiddenLabel && <BootstrapForm.Label className='invisible'>{label}</BootstrapForm.Label>}
|
||||
|
@ -709,7 +712,8 @@ export function Checkbox ({ children, label, groupClassName, hiddenLabel, extra,
|
|||
inline={inline}
|
||||
>
|
||||
<BootstrapForm.Check.Input
|
||||
{...field} {...props} disabled={disabled} type='checkbox' onChange={(e) => {
|
||||
isInvalid={meta.touched && meta.error}
|
||||
{...field} {...props} disabled={disabled} type={type} onChange={(e) => {
|
||||
field.onChange(e)
|
||||
handleChange && handleChange(e.target.checked, helpers.setValue)
|
||||
}}
|
||||
|
@ -726,6 +730,19 @@ export function Checkbox ({ children, label, groupClassName, hiddenLabel, extra,
|
|||
)
|
||||
}
|
||||
|
||||
export function CheckboxGroup ({ label, groupClassName, children, ...props }) {
|
||||
const [, meta] = useField(props)
|
||||
return (
|
||||
<FormGroup label={label} className={groupClassName}>
|
||||
{children}
|
||||
{/* force the feedback to display with d-block */}
|
||||
<BootstrapForm.Control.Feedback className='d-block' type='invalid'>
|
||||
{meta.touched && meta.error}
|
||||
</BootstrapForm.Control.Feedback>
|
||||
</FormGroup>
|
||||
)
|
||||
}
|
||||
|
||||
const StorageKeyPrefixContext = createContext()
|
||||
|
||||
export function Form ({
|
||||
|
@ -832,7 +849,17 @@ export function Select ({ label, items, groupClassName, onChange, noForm, overri
|
|||
}}
|
||||
isInvalid={invalid}
|
||||
>
|
||||
{items?.map(item => <option key={item}>{item}</option>)}
|
||||
{items.map(item => {
|
||||
if (item && typeof item === 'object') {
|
||||
return (
|
||||
<optgroup key={item.label} label={item.label}>
|
||||
{item.items.map(item => <option key={item}>{item}</option>)}
|
||||
</optgroup>
|
||||
)
|
||||
} else {
|
||||
return <option key={item}>{item}</option>
|
||||
}
|
||||
})}
|
||||
</BootstrapForm.Select>
|
||||
<BootstrapForm.Control.Feedback type='invalid'>
|
||||
{meta.touched && meta.error}
|
||||
|
|
|
@ -16,10 +16,9 @@ import { abbrNum } from '../lib/format'
|
|||
import NoteIcon from '../svgs/notification-4-fill.svg'
|
||||
import { useQuery } from '@apollo/client'
|
||||
import LightningIcon from '../svgs/bolt.svg'
|
||||
import { Select } from './form'
|
||||
import SearchIcon from '../svgs/search-line.svg'
|
||||
import BackArrow from '../svgs/arrow-left-line.svg'
|
||||
import { SSR, SUBS } from '../lib/constants'
|
||||
import { SSR } from '../lib/constants'
|
||||
import { useLightning } from './lightning'
|
||||
import { HAS_NOTIFICATIONS } from '../fragments/notifications'
|
||||
import AnonIcon from '../svgs/spy-fill.svg'
|
||||
|
@ -27,6 +26,7 @@ import Hat from './hat'
|
|||
import HiddenWalletSummary from './hidden-wallet-summary'
|
||||
import { clearNotifications } from '../lib/badge'
|
||||
import { useServiceWorker } from './serviceworker'
|
||||
import SubSelect from './sub-select'
|
||||
|
||||
function WalletSummary ({ me }) {
|
||||
if (!me) return null
|
||||
|
@ -213,57 +213,17 @@ function LurkerCorner ({ path }) {
|
|||
</div>
|
||||
}
|
||||
|
||||
const PREPEND_SUBS = ['home']
|
||||
const APPEND_SUBS = [{ label: '--------', items: ['create'] }]
|
||||
function NavItems ({ className, sub, prefix }) {
|
||||
const router = useRouter()
|
||||
sub ||= 'home'
|
||||
|
||||
return (
|
||||
<>
|
||||
<Nav.Item className={className}>
|
||||
<Select
|
||||
<SubSelect
|
||||
sub={sub} prependSubs={PREPEND_SUBS} appendSubs={APPEND_SUBS} noForm
|
||||
groupClassName='mb-0'
|
||||
onChange={(_, e) => {
|
||||
const sub = e.target.value === 'home' ? undefined : e.target.value
|
||||
let asPath
|
||||
// are we currently in a sub (ie not home)
|
||||
if (router.query.sub) {
|
||||
// are we going to a sub or home?
|
||||
const subReplace = sub ? `/~${sub}` : ''
|
||||
|
||||
// if we are going to a sub, replace the current sub with the new one
|
||||
asPath = router.asPath.replace(`/~${router.query.sub}`, subReplace)
|
||||
// if we're going to home, just go there directly
|
||||
if (asPath === '') {
|
||||
router.push('/')
|
||||
return
|
||||
}
|
||||
} else {
|
||||
// we're currently on the home sub
|
||||
// are we in a sub aware route?
|
||||
if (router.pathname.startsWith('/~')) {
|
||||
// if we are, go to the same path but in the sub
|
||||
asPath = `/~${sub}` + router.asPath
|
||||
} else {
|
||||
// otherwise, just go to the sub
|
||||
router.push(sub ? `/~${sub}` : '/')
|
||||
return
|
||||
}
|
||||
}
|
||||
const query = {
|
||||
...router.query,
|
||||
sub
|
||||
}
|
||||
delete query.nodata
|
||||
router.push({
|
||||
pathname: router.pathname,
|
||||
query
|
||||
}, asPath)
|
||||
}}
|
||||
name='sub'
|
||||
size='sm'
|
||||
value={sub}
|
||||
noForm
|
||||
items={['home', ...SUBS]}
|
||||
/>
|
||||
</Nav.Item>
|
||||
<Nav.Item className={className}>
|
||||
|
|
|
@ -9,9 +9,9 @@ import Item from './item'
|
|||
import AccordianItem from './accordian-item'
|
||||
import { linkSchema } from '../lib/validate'
|
||||
import Moon from '../svgs/moon-fill.svg'
|
||||
import { SubSelectInitial } from './sub-select-form'
|
||||
import { normalizeForwards, toastDeleteScheduled } from '../lib/form'
|
||||
import { useToast } from './toast'
|
||||
import { SubSelectInitial } from './sub-select'
|
||||
import { MAX_TITLE_LENGTH } from '../lib/constants'
|
||||
import { useMe } from './me'
|
||||
import { ItemButtonBar } from './post'
|
||||
|
|
|
@ -25,6 +25,7 @@ import { nostrZapDetails } from '../lib/nostr'
|
|||
import Text from './text'
|
||||
import NostrIcon from '../svgs/nostr.svg'
|
||||
import { numWithUnits } from '../lib/format'
|
||||
import BountyIcon from '../svgs/bounty-bag.svg'
|
||||
|
||||
function Notification ({ n, fresh }) {
|
||||
const type = n.__typename
|
||||
|
@ -33,6 +34,7 @@ function Notification ({ n, fresh }) {
|
|||
<NotificationLayout nid={nid(n)} {...defaultOnClick(n)} fresh={fresh}>
|
||||
{
|
||||
(type === 'Earn' && <EarnNotification n={n} />) ||
|
||||
(type === 'Revenue' && <RevenueNotification n={n} />) ||
|
||||
(type === 'Invitification' && <Invitification n={n} />) ||
|
||||
(type === 'InvoicePaid' && (n.invoice.nostr ? <NostrZap n={n} /> : <InvoicePaid n={n} />)) ||
|
||||
(type === 'Referral' && <Referral n={n} />) ||
|
||||
|
@ -83,6 +85,7 @@ const defaultOnClick = n => {
|
|||
href += dayMonthYear(new Date(n.sortTime))
|
||||
return { href }
|
||||
}
|
||||
if (type === 'Revenue') return { href: `/~${n.subName}` }
|
||||
if (type === 'Invitification') return { href: '/invites' }
|
||||
if (type === 'InvoicePaid') return { href: `/invoices/${n.invoice.id}` }
|
||||
if (type === 'Referral') return { href: '/referrals/month' }
|
||||
|
@ -135,10 +138,10 @@ function Streak ({ n }) {
|
|||
}
|
||||
|
||||
return (
|
||||
<div className='d-flex fw-bold ms-2 py-1'>
|
||||
<div className='d-flex ms-2 py-1'>
|
||||
<div style={{ fontSize: '2rem' }}>{n.days ? <BaldIcon className='fill-grey' height={40} width={40} /> : <CowboyHatIcon className='fill-grey' height={40} width={40} />}</div>
|
||||
<div className='ms-1 p-1'>
|
||||
you {n.days ? 'lost your' : 'found a'} cowboy hat
|
||||
<span className='fw-bold'>you {n.days ? 'lost your' : 'found a'} cowboy hat</span>
|
||||
<div><small style={{ lineHeight: '140%', display: 'inline-block' }}>{blurb(n)}</small></div>
|
||||
</div>
|
||||
</div>
|
||||
|
@ -171,6 +174,22 @@ function EarnNotification ({ n }) {
|
|||
)
|
||||
}
|
||||
|
||||
function RevenueNotification ({ n }) {
|
||||
return (
|
||||
<div className='d-flex ms-2 py-1'>
|
||||
<BountyIcon className='align-self-center fill-success mx-1' width={24} height={24} style={{ flex: '0 0 24px' }} />
|
||||
<div className='ms-2 pb-1'>
|
||||
<div className='fw-bold text-success'>
|
||||
you stacked {numWithUnits(n.earnedSats, { abbreviate: false })} in territory revenue<small className='text-muted ms-1 fw-normal' suppressHydrationWarning>{timeSince(new Date(n.sortTime))}</small>
|
||||
</div>
|
||||
<div style={{ lineHeight: '140%' }}>
|
||||
As the founder of territory <Link href={`/~${n.subName}`}>~{n.subName}</Link>, you receive 50% of the revenue it generates and the other 50% go to <Link href='/rewards'>rewards</Link>.
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
function Invitification ({ n }) {
|
||||
return (
|
||||
<>
|
||||
|
|
|
@ -5,7 +5,7 @@ import Countdown from './countdown'
|
|||
import AdvPostForm, { AdvPostInitial } from './adv-post-form'
|
||||
import { MAX_POLL_CHOICE_LENGTH, MAX_POLL_NUM_CHOICES, MAX_TITLE_LENGTH } from '../lib/constants'
|
||||
import { pollSchema } from '../lib/validate'
|
||||
import { SubSelectInitial } from './sub-select-form'
|
||||
import { SubSelectInitial } from './sub-select'
|
||||
import { useCallback } from 'react'
|
||||
import { normalizeForwards, toastDeleteScheduled } from '../lib/form'
|
||||
import { useMe } from './me'
|
||||
|
|
|
@ -9,7 +9,7 @@ import { DiscussionForm } from './discussion-form'
|
|||
import { LinkForm } from './link-form'
|
||||
import { PollForm } from './poll-form'
|
||||
import { BountyForm } from './bounty-form'
|
||||
import SubSelect from './sub-select-form'
|
||||
import SubSelect, { SubInfo } from './sub-select'
|
||||
import { useCallback, useState } from 'react'
|
||||
import FeeButton, { FeeButtonProvider, postCommentBaseLineItems, postCommentUseRemoteLineItems } from './fee-button'
|
||||
import Delete from './delete'
|
||||
|
@ -29,33 +29,89 @@ export function PostForm ({ type, sub, children }) {
|
|||
}, [me, setErrorMessage])
|
||||
|
||||
if (!type) {
|
||||
let postButtons = []
|
||||
let morePostButtons = []
|
||||
|
||||
if (sub) {
|
||||
if (sub?.postTypes?.includes('LINK')) {
|
||||
postButtons.push(
|
||||
<Link key='LINK' href={prefix + '/post?type=link'}>
|
||||
<Button variant='secondary'>link</Button>
|
||||
</Link>
|
||||
)
|
||||
}
|
||||
|
||||
if (sub?.postTypes?.includes('DISCUSSION')) {
|
||||
postButtons.push(
|
||||
<Link key='DISCUSSION' href={prefix + '/post?type=discussion'}>
|
||||
<Button variant='secondary'>discussion</Button>
|
||||
</Link>
|
||||
)
|
||||
}
|
||||
|
||||
if (sub?.postTypes?.includes('POLL')) {
|
||||
const array = postButtons.length < 2 ? postButtons : morePostButtons
|
||||
array.push(
|
||||
<Link key='POLL' href={prefix + '/post?type=poll'}>
|
||||
<Button variant={postButtons.length < 2 ? 'secondary' : 'info'}>poll</Button>
|
||||
</Link>
|
||||
)
|
||||
}
|
||||
|
||||
if (sub?.postTypes?.includes('BOUNTY')) {
|
||||
const array = postButtons.length < 2 ? postButtons : morePostButtons
|
||||
array.push(
|
||||
<Link key='BOUNTY' href={prefix + '/post?type=bounty'}>
|
||||
<Button onClick={checkSession} variant={postButtons.length < 2 ? 'secondary' : 'info'}>bounty</Button>
|
||||
</Link>
|
||||
)
|
||||
}
|
||||
} else {
|
||||
postButtons = [
|
||||
<Link key='LINK' href={prefix + '/post?type=link'}>
|
||||
<Button variant='secondary'>link</Button>
|
||||
</Link>,
|
||||
<Link key='DISCUSSION' href={prefix + '/post?type=discussion'}>
|
||||
<Button variant='secondary'>discussion</Button>
|
||||
</Link>
|
||||
]
|
||||
morePostButtons = [
|
||||
<Link key='POLL' href={prefix + '/post?type=poll'}>
|
||||
<Button variant='info'>poll</Button>
|
||||
</Link>,
|
||||
<Link key='BOUNTY' href={prefix + '/post?type=bounty'}>
|
||||
<Button onClick={checkSession} variant='info'>bounty</Button>
|
||||
</Link>
|
||||
]
|
||||
}
|
||||
|
||||
postButtons = postButtons.reduce((acc, cur) => {
|
||||
if (acc.length) acc.push(<span key='OR-post-buttons' className='mx-3 fw-bold text-muted'>or</span>)
|
||||
acc.push(cur)
|
||||
return acc
|
||||
}, [])
|
||||
|
||||
morePostButtons = morePostButtons.reduce((acc, cur) => {
|
||||
if (acc.length) acc.push(<span key='OR-more-post-buttons' className='mx-3 fw-bold text-muted'>or</span>)
|
||||
acc.push(cur)
|
||||
return acc
|
||||
}, [])
|
||||
|
||||
return (
|
||||
<div className='position-relative align-items-center'>
|
||||
{errorMessage &&
|
||||
<Alert className='position-absolute' style={{ top: '-6rem' }} variant='danger' onClose={() => setErrorMessage(undefined)} dismissible>
|
||||
{errorMessage}
|
||||
</Alert>}
|
||||
<SubSelect noForm sub={sub?.name} />
|
||||
<Link href={prefix + '/post?type=link'}>
|
||||
<Button variant='secondary'>link</Button>
|
||||
</Link>
|
||||
<span className='mx-3 fw-bold text-muted'>or</span>
|
||||
<Link href={prefix + '/post?type=discussion'}>
|
||||
<Button variant='secondary'>discussion</Button>
|
||||
</Link>
|
||||
<SubSelect prependSubs={['pick territory']} className='w-auto d-flex' noForm sub={sub?.name} />
|
||||
{postButtons}
|
||||
<div className='d-flex mt-4'>
|
||||
<AccordianItem
|
||||
headerColor='#6c757d'
|
||||
header={<div className='fw-bold text-muted'>more types</div>}
|
||||
body={
|
||||
<div className='align-items-center'>
|
||||
<Link href={prefix + '/post?type=poll'}>
|
||||
<Button variant='info'>poll</Button>
|
||||
</Link>
|
||||
<span className='mx-3 fw-bold text-muted'>or</span>
|
||||
<Link href={prefix + '/post?type=bounty'}>
|
||||
<Button onClick={checkSession} variant='info'>bounty</Button>
|
||||
</Link>
|
||||
{morePostButtons}
|
||||
<div className='mt-3 d-flex justify-content-center'>
|
||||
<Link href='/~jobs/post'>
|
||||
<Button onClick={checkSession} variant='info'>job</Button>
|
||||
|
@ -101,7 +157,19 @@ export default function Post ({ sub }) {
|
|||
return (
|
||||
<>
|
||||
<PostForm type={type} sub={sub}>
|
||||
{sub?.name !== 'jobs' && <SubSelect label='sub' />}
|
||||
{sub?.name !== 'jobs' &&
|
||||
<SubSelect
|
||||
sub={sub?.name}
|
||||
prependSubs={sub?.name ? undefined : ['pick territory']}
|
||||
filterSubs={s => s.postTypes?.includes(type.toUpperCase())}
|
||||
className='w-auto d-flex'
|
||||
label={
|
||||
<span className='d-flex align-items-center'>
|
||||
territory
|
||||
<SubInfo />
|
||||
</span>
|
||||
}
|
||||
/>}
|
||||
</PostForm>
|
||||
</>
|
||||
)
|
||||
|
|
|
@ -1,12 +1,17 @@
|
|||
import { ITEM_TYPES } from '../lib/constants'
|
||||
import { ITEM_TYPES, ITEM_TYPES_UNIVERSAL } from '../lib/constants'
|
||||
import { Select } from './form'
|
||||
import { useRouter } from 'next/router'
|
||||
|
||||
export default function RecentHeader ({ type, sub }) {
|
||||
const router = useRouter()
|
||||
const prefix = sub ? `/~${sub}` : ''
|
||||
|
||||
const items = ITEM_TYPES(sub)
|
||||
const prefix = sub ? `/~${sub.name}` : ''
|
||||
|
||||
const items = sub
|
||||
? ITEM_TYPES_UNIVERSAL.concat(sub.postTypes.map(p =>
|
||||
['LINK', 'DISCUSSION', 'POLL', 'JOB'].includes(p) ? `${p.toLowerCase()}s` : 'bounties'
|
||||
))
|
||||
: ITEM_TYPES
|
||||
|
||||
type ||= router.query.type || type || 'posts'
|
||||
return (
|
||||
|
|
|
@ -1,68 +0,0 @@
|
|||
import { useRouter } from 'next/router'
|
||||
import { Select } from './form'
|
||||
import Info from './info'
|
||||
import { SUBS, SUBS_NO_JOBS } from '../lib/constants'
|
||||
|
||||
export function SubSelectInitial ({ sub }) {
|
||||
const router = useRouter()
|
||||
sub = sub || router.query.sub || 'pick sub'
|
||||
|
||||
return {
|
||||
sub
|
||||
}
|
||||
}
|
||||
|
||||
export default function SubSelect ({ label, sub, setSub, item, ...props }) {
|
||||
const router = useRouter()
|
||||
|
||||
const SubInfo = () => (
|
||||
<Info>
|
||||
<div>
|
||||
<div className='fw-bold'>The sub your post will go in ...</div>
|
||||
<ul>
|
||||
<li>If it's bitcoin related, put it in the bitcoin sub.</li>
|
||||
<li>If it's nostr related, put it in the nostr sub.</li>
|
||||
<li>If it's tech related, put it in the tech sub.</li>
|
||||
<li>If it's stacker news related, put it in the meta sub.</li>
|
||||
<li>If it's a job, put it in the jobs sub.</li>
|
||||
</ul>
|
||||
</div>
|
||||
</Info>
|
||||
)
|
||||
|
||||
sub ||= router.query.sub || 'pick sub'
|
||||
const extraProps = props.noForm
|
||||
? {
|
||||
value: sub,
|
||||
items: ['pick sub', ...SUBS]
|
||||
}
|
||||
: {
|
||||
overrideValue: sub,
|
||||
items: item ? SUBS_NO_JOBS : ['pick sub', ...SUBS_NO_JOBS]
|
||||
}
|
||||
|
||||
return (
|
||||
<Select
|
||||
className='w-auto d-flex'
|
||||
onChange={(formik, e) => {
|
||||
if (!item) {
|
||||
router.push({
|
||||
pathname: e.target.value === 'pick sub' ? '/post' : `/~${e.target.value}/post`,
|
||||
query: router.query?.type ? { type: router.query.type } : undefined
|
||||
})
|
||||
} else {
|
||||
setSub(e.target.value)
|
||||
}
|
||||
}}
|
||||
name='sub'
|
||||
size='sm'
|
||||
{...extraProps}
|
||||
label={label &&
|
||||
<>
|
||||
{label}
|
||||
<SubInfo />
|
||||
</>}
|
||||
{...props}
|
||||
/>
|
||||
)
|
||||
}
|
|
@ -0,0 +1,118 @@
|
|||
import { useRouter } from 'next/router'
|
||||
import { Select } from './form'
|
||||
import Info from './info'
|
||||
import { SSR } from '../lib/constants'
|
||||
import { SUBS } from '../fragments/subs'
|
||||
import { useQuery } from '@apollo/client'
|
||||
import { useEffect, useState } from 'react'
|
||||
import styles from './sub-select.module.css'
|
||||
|
||||
export function SubSelectInitial ({ sub }) {
|
||||
const router = useRouter()
|
||||
sub = sub || router.query.sub || 'pick territory'
|
||||
|
||||
return {
|
||||
sub
|
||||
}
|
||||
}
|
||||
|
||||
export function useSubs ({ prependSubs = [], sub, filterSubs = () => true, appendSubs = [] }) {
|
||||
const { data } = useQuery(SUBS, SSR
|
||||
? {}
|
||||
: {
|
||||
pollInterval: 300000,
|
||||
nextFetchPolicy: 'cache-and-network'
|
||||
})
|
||||
|
||||
const [subs, setSubs] = useState([
|
||||
...prependSubs.filter(s => s !== sub),
|
||||
sub,
|
||||
...appendSubs.filter(s => s !== sub)])
|
||||
useEffect(() => {
|
||||
if (!data) return
|
||||
setSubs([...prependSubs, ...data.subs.filter(filterSubs).map(s => s.name), ...appendSubs])
|
||||
}, [data])
|
||||
|
||||
return subs
|
||||
}
|
||||
|
||||
export const SubInfo = () => (
|
||||
<Info>
|
||||
<div>
|
||||
<div className='fw-bold'>The territory your post will go in ...</div>
|
||||
<ul>
|
||||
<li>If it's bitcoin related, put it in the bitcoin territory.</li>
|
||||
<li>If it's nostr related, put it in the nostr territory.</li>
|
||||
<li>If it's tech related, put it in the tech territory.</li>
|
||||
<li>If it's stacker news related, put it in the meta territory.</li>
|
||||
<li>If it's a job, put it in the jobs territory.</li>
|
||||
<li>etc...</li>
|
||||
</ul>
|
||||
</div>
|
||||
</Info>
|
||||
)
|
||||
|
||||
export default function SubSelect ({ prependSubs, sub, onChange, appendSubs, filterSubs, className, ...props }) {
|
||||
const router = useRouter()
|
||||
const subs = useSubs({ prependSubs, sub, filterSubs, appendSubs })
|
||||
const valueProps = props.noForm
|
||||
? {
|
||||
value: sub
|
||||
}
|
||||
: {
|
||||
overrideValue: sub
|
||||
}
|
||||
|
||||
return (
|
||||
<Select
|
||||
onChange={onChange || ((_, e) => {
|
||||
const sub = ['home', 'pick territory'].includes(e.target.value) ? undefined : e.target.value
|
||||
if (sub === 'create') {
|
||||
router.push('/territory')
|
||||
return
|
||||
}
|
||||
|
||||
let asPath
|
||||
// are we currently in a sub (ie not home)
|
||||
if (router.query.sub) {
|
||||
// are we going to a sub or home?
|
||||
const subReplace = sub ? `/~${sub}` : ''
|
||||
|
||||
// if we are going to a sub, replace the current sub with the new one
|
||||
asPath = router.asPath.replace(`/~${router.query.sub}`, subReplace)
|
||||
// if we're going to home, just go there directly
|
||||
if (asPath === '') {
|
||||
router.push('/')
|
||||
return
|
||||
}
|
||||
} else {
|
||||
// we're currently on the home sub
|
||||
// are we in a sub aware route?
|
||||
if (router.pathname.startsWith('/~')) {
|
||||
// if we are, go to the same path but in the sub
|
||||
asPath = `/~${sub}` + router.asPath
|
||||
} else {
|
||||
// otherwise, just go to the sub
|
||||
router.push(sub ? `/~${sub}` : '/')
|
||||
return
|
||||
}
|
||||
}
|
||||
const query = {
|
||||
...router.query,
|
||||
sub
|
||||
}
|
||||
delete query.nodata
|
||||
router.push({
|
||||
pathname: router.pathname,
|
||||
query
|
||||
}, asPath)
|
||||
})}
|
||||
name='sub'
|
||||
size='sm'
|
||||
{...valueProps}
|
||||
{...props}
|
||||
className={`${className} ${styles.subSelect}`}
|
||||
items={subs}
|
||||
/>
|
||||
)
|
||||
}
|
|
@ -0,0 +1,6 @@
|
|||
.subSelect {
|
||||
width: 100px !important;
|
||||
overflow: hidden;
|
||||
white-space: nowrap;
|
||||
text-overflow: ellipsis !important;
|
||||
}
|
|
@ -0,0 +1,183 @@
|
|||
import { Col, InputGroup, Row } from 'react-bootstrap'
|
||||
import { Checkbox, CheckboxGroup, Form, Input, MarkdownInput } from './form'
|
||||
import FeeButton, { FeeButtonProvider } from './fee-button'
|
||||
import { gql, useApolloClient, useMutation } from '@apollo/client'
|
||||
import { useCallback, useState } from 'react'
|
||||
import { useRouter } from 'next/router'
|
||||
import { MAX_TERRITORY_DESC_LENGTH, POST_TYPES, TERRITORY_BILLING_OPTIONS } from '../lib/constants'
|
||||
import { territorySchema } from '../lib/validate'
|
||||
import { useMe } from './me'
|
||||
|
||||
export default function TerritoryForm ({ sub }) {
|
||||
const router = useRouter()
|
||||
const client = useApolloClient()
|
||||
const me = useMe()
|
||||
const [upsertSub] = useMutation(
|
||||
gql`
|
||||
mutation upsertSub($name: String!, $desc: String, $baseCost: Int!,
|
||||
$postTypes: [String!]!, $billingType: String!, $hash: String, $hmac: String) {
|
||||
upsertSub(name: $name, desc: $desc, baseCost: $baseCost,
|
||||
postTypes: $postTypes, billingType: $billingType, hash: $hash, hmac: $hmac) {
|
||||
name
|
||||
}
|
||||
}`
|
||||
)
|
||||
|
||||
const onSubmit = useCallback(
|
||||
async ({ ...variables }) => {
|
||||
const { error } = await upsertSub({
|
||||
variables
|
||||
})
|
||||
|
||||
if (error) {
|
||||
throw new Error({ message: error.toString() })
|
||||
}
|
||||
|
||||
// modify graphql cache to include new sub
|
||||
client.cache.modify({
|
||||
fields: {
|
||||
subs (existing = []) {
|
||||
console.log('existing', existing, variables.name)
|
||||
return [
|
||||
...existing,
|
||||
{ __typename: 'Sub', name: variables.name }]
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
await router.push(`/~${variables.name}`)
|
||||
}, [client, upsertSub, router]
|
||||
)
|
||||
|
||||
const [billing, setBilling] = useState('monthly')
|
||||
|
||||
return (
|
||||
<FeeButtonProvider baseLineItems={sub ? undefined : { territory: TERRITORY_BILLING_OPTIONS('first')[billing] }}>
|
||||
<Form
|
||||
initial={{
|
||||
name: sub?.name || '',
|
||||
desc: sub?.desc || '',
|
||||
baseCost: sub?.baseCost || 10,
|
||||
postTypes: sub?.postTypes || POST_TYPES,
|
||||
billingType: sub?.billingType || 'MONTHLY'
|
||||
}}
|
||||
schema={territorySchema({ client, me })}
|
||||
invoiceable
|
||||
onSubmit={onSubmit}
|
||||
className='mb-5'
|
||||
storageKeyPrefix={sub ? undefined : 'territory'}
|
||||
>
|
||||
{sub?.name
|
||||
? <Input
|
||||
label={<>name <small className='text-muted ms-2'>read only</small></>}
|
||||
name='name'
|
||||
readOnly
|
||||
prepend={<InputGroup.Text className='text-monospace'>~</InputGroup.Text>}
|
||||
className='text-muted'
|
||||
/>
|
||||
: <Input
|
||||
label='name'
|
||||
name='name'
|
||||
required
|
||||
autoFocus
|
||||
clear
|
||||
maxLength={32}
|
||||
prepend={<InputGroup.Text className='text-monospace'>~</InputGroup.Text>}
|
||||
/>}
|
||||
<MarkdownInput
|
||||
label='description'
|
||||
name='desc'
|
||||
maxLength={MAX_TERRITORY_DESC_LENGTH}
|
||||
required
|
||||
minRows={3}
|
||||
/>
|
||||
<Input
|
||||
label='post cost'
|
||||
name='baseCost'
|
||||
type='number'
|
||||
required
|
||||
append={<InputGroup.Text className='text-monospace'>sats</InputGroup.Text>}
|
||||
/>
|
||||
<CheckboxGroup label='post types' name='postTypes'>
|
||||
<Row>
|
||||
<Col xs={4} sm='auto'>
|
||||
<Checkbox
|
||||
inline
|
||||
label='links'
|
||||
value='LINK'
|
||||
name='postTypes'
|
||||
groupClassName='ms-1 mb-0'
|
||||
/>
|
||||
</Col>
|
||||
<Col xs={4} sm='auto'>
|
||||
<Checkbox
|
||||
inline
|
||||
label='discussions'
|
||||
value='DISCUSSION'
|
||||
name='postTypes'
|
||||
groupClassName='ms-1 mb-0'
|
||||
/>
|
||||
</Col>
|
||||
<Col xs={4} sm='auto'>
|
||||
<Checkbox
|
||||
inline
|
||||
label='bounties'
|
||||
value='BOUNTY'
|
||||
name='postTypes'
|
||||
groupClassName='ms-1 mb-0'
|
||||
/>
|
||||
</Col>
|
||||
<Col xs={4} sm='auto'>
|
||||
<Checkbox
|
||||
inline
|
||||
label='polls'
|
||||
value='POLL'
|
||||
name='postTypes'
|
||||
groupClassName='ms-1 mb-0'
|
||||
/>
|
||||
</Col>
|
||||
</Row>
|
||||
</CheckboxGroup>
|
||||
<CheckboxGroup label={sub ? <>name <small className='text-muted ms-2'>read only</small></> : 'billing'} name='billing'>
|
||||
{(!sub?.billingType || sub.billingType === 'MONTHLY') &&
|
||||
<Checkbox
|
||||
type='radio'
|
||||
label='100k sats/month'
|
||||
value='MONTHLY'
|
||||
name='billingType'
|
||||
readOnly={!!sub}
|
||||
handleChange={checked => checked && setBilling('monthly')}
|
||||
groupClassName='ms-1 mb-0'
|
||||
/>}
|
||||
{(!sub?.billingType || sub.billingType === 'YEARLY') &&
|
||||
<Checkbox
|
||||
type='radio'
|
||||
label='1m sats/year'
|
||||
value='YEARLY'
|
||||
name='billingType'
|
||||
readOnly={!!sub}
|
||||
handleChange={checked => checked && setBilling('yearly')}
|
||||
groupClassName='ms-1 mb-0'
|
||||
/>}
|
||||
{(!sub?.billingType || sub.billingType === 'ONCE') &&
|
||||
<Checkbox
|
||||
type='radio'
|
||||
label='3m sats once'
|
||||
value='ONCE'
|
||||
name='billingType'
|
||||
readOnly={!!sub}
|
||||
handleChange={checked => checked && setBilling('once')}
|
||||
groupClassName='ms-1 mb-0'
|
||||
/>}
|
||||
</CheckboxGroup>
|
||||
<div className='mt-3 d-flex justify-content-end'>
|
||||
<FeeButton
|
||||
text={sub ? 'save' : 'found it'}
|
||||
variant='secondary'
|
||||
disabled={sub?.status === 'STOPPED'}
|
||||
/>
|
||||
</div>
|
||||
</Form>
|
||||
</FeeButtonProvider>
|
||||
)
|
||||
}
|
|
@ -0,0 +1,77 @@
|
|||
import { Alert } from 'react-bootstrap'
|
||||
import { useMe } from './me'
|
||||
import FeeButton, { FeeButtonProvider } from './fee-button'
|
||||
import { TERRITORY_BILLING_OPTIONS, TERRITORY_GRACE_DAYS } from '../lib/constants'
|
||||
import { Form } from './form'
|
||||
import { datePivot } from '../lib/time'
|
||||
import { LongCountdown } from './countdown'
|
||||
import { useCallback } from 'react'
|
||||
import { useApolloClient, useMutation } from '@apollo/client'
|
||||
import { SUB_PAY } from '../fragments/subs'
|
||||
|
||||
export default function TerritoryPaymentDue ({ sub }) {
|
||||
const me = useMe()
|
||||
const client = useApolloClient()
|
||||
const [paySub] = useMutation(SUB_PAY)
|
||||
|
||||
const dueDate = datePivot(
|
||||
new Date(sub.billedLastAt),
|
||||
sub.billingType === 'MONTHLY'
|
||||
? { months: 1, days: TERRITORY_GRACE_DAYS }
|
||||
: { years: 1, days: TERRITORY_GRACE_DAYS })
|
||||
|
||||
const onSubmit = useCallback(
|
||||
async ({ ...variables }) => {
|
||||
const { error } = await paySub({
|
||||
variables
|
||||
})
|
||||
|
||||
if (error) {
|
||||
throw new Error({ message: error.toString() })
|
||||
}
|
||||
}, [client, paySub])
|
||||
|
||||
if (!sub || sub.userId !== Number(me?.id) || sub.status === 'ACTIVE') return null
|
||||
|
||||
return (
|
||||
<Alert key='danger' variant='danger'>
|
||||
{sub.status === 'STOPPED'
|
||||
? (
|
||||
<>
|
||||
<Alert.Heading>
|
||||
Your ~{sub.name} territory has been archived!
|
||||
</Alert.Heading>
|
||||
<div>
|
||||
Make a payment to reactivate it.
|
||||
</div>
|
||||
</>)
|
||||
: (
|
||||
<>
|
||||
<Alert.Heading>
|
||||
Your ~{sub.name} territory payment is due!
|
||||
</Alert.Heading>
|
||||
<div>
|
||||
Your territory will be archived in <LongCountdown date={dueDate} />otherwise.
|
||||
</div>
|
||||
</>
|
||||
)}
|
||||
|
||||
<FeeButtonProvider baseLineItems={{ territory: TERRITORY_BILLING_OPTIONS('one')[sub.billingType.toLowerCase()] }}>
|
||||
<Form
|
||||
invoiceable
|
||||
initial={{
|
||||
name: sub.name
|
||||
}}
|
||||
onSubmit={onSubmit}
|
||||
>
|
||||
<div className='d-flex justify-content-end'>
|
||||
<FeeButton
|
||||
text='pay'
|
||||
variant='success'
|
||||
/>
|
||||
</div>
|
||||
</Form>
|
||||
</FeeButtonProvider>
|
||||
</Alert>
|
||||
)
|
||||
}
|
|
@ -32,6 +32,12 @@ export const NOTIFICATIONS = gql`
|
|||
text
|
||||
}
|
||||
}
|
||||
... on Revenue {
|
||||
id
|
||||
sortTime
|
||||
earnedSats
|
||||
subName
|
||||
}
|
||||
... on ForwardedVotification {
|
||||
id
|
||||
sortTime
|
||||
|
|
|
@ -7,7 +7,27 @@ export const SUB_FIELDS = gql`
|
|||
name
|
||||
postTypes
|
||||
rankingType
|
||||
billingType
|
||||
billingCost
|
||||
billedLastAt
|
||||
baseCost
|
||||
userId
|
||||
desc
|
||||
status
|
||||
}`
|
||||
|
||||
export const SUB_FULL_FIELDS = gql`
|
||||
${SUB_FIELDS}
|
||||
|
||||
fragment SubFullFields on Sub {
|
||||
...SubFields
|
||||
user {
|
||||
name
|
||||
id
|
||||
optional {
|
||||
streak
|
||||
}
|
||||
}
|
||||
}`
|
||||
|
||||
export const SUB = gql`
|
||||
|
@ -19,14 +39,32 @@ export const SUB = gql`
|
|||
}
|
||||
}`
|
||||
|
||||
export const SUB_ITEMS = gql`
|
||||
export const SUB_FULL = gql`
|
||||
${SUB_FULL_FIELDS}
|
||||
|
||||
query Sub($sub: String) {
|
||||
sub(name: $sub) {
|
||||
...SubFullFields
|
||||
}
|
||||
}`
|
||||
|
||||
export const SUBS = gql`
|
||||
${SUB_FIELDS}
|
||||
|
||||
query Subs {
|
||||
subs {
|
||||
...SubFields
|
||||
}
|
||||
}`
|
||||
|
||||
export const SUB_ITEMS = gql`
|
||||
${SUB_FULL_FIELDS}
|
||||
${ITEM_FIELDS}
|
||||
${COMMENTS_ITEM_EXT_FIELDS}
|
||||
|
||||
query SubItems($sub: String, $sort: String, $cursor: String, $type: String, $name: String, $when: String, $from: String, $to: String, $by: String, $limit: Limit, $includeComments: Boolean = false) {
|
||||
sub(name: $sub) {
|
||||
...SubFields
|
||||
...SubFullFields
|
||||
}
|
||||
|
||||
items(sub: $sub, sort: $sort, cursor: $cursor, type: $type, name: $name, when: $when, from: $from, to: $to, by: $by, limit: $limit) {
|
||||
|
@ -62,3 +100,11 @@ export const SUB_SEARCH = gql`
|
|||
}
|
||||
}
|
||||
`
|
||||
|
||||
export const SUB_PAY = gql`
|
||||
${SUB_FULL_FIELDS}
|
||||
mutation paySub($name: String!, $hash: String, $hmac: String) {
|
||||
paySub(name: $name, hash: $hash, hmac: $hmac) {
|
||||
...SubFullFields
|
||||
}
|
||||
}`
|
||||
|
|
|
@ -40,17 +40,16 @@ export const WALLET_HISTORY = gql`
|
|||
walletHistory(cursor: $cursor, inc: $inc) {
|
||||
facts {
|
||||
id
|
||||
factId
|
||||
bolt11
|
||||
type
|
||||
createdAt
|
||||
sats
|
||||
satsFee
|
||||
status
|
||||
type
|
||||
description
|
||||
invoiceComment
|
||||
invoicePayerData
|
||||
subName
|
||||
item {
|
||||
...ItemFullFields
|
||||
}
|
||||
|
|
|
@ -33,6 +33,9 @@ function getClient (uri) {
|
|||
cache: new InMemoryCache({
|
||||
freezeResults: true,
|
||||
typePolicies: {
|
||||
Sub: {
|
||||
keyFields: ['name']
|
||||
},
|
||||
User: {
|
||||
// https://www.apollographql.com/docs/react/caching/cache-field-behavior/#merging-non-normalized-objects
|
||||
fields: {
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
// XXX this is temporary until we have so many subs they have
|
||||
// to be loaded from the server
|
||||
export const SUBS = ['bitcoin', 'nostr', 'tech', 'meta', 'jobs']
|
||||
export const SUBS_NO_JOBS = SUBS.filter(s => s !== 'jobs')
|
||||
export const DEFAULT_SUBS = ['bitcoin', 'nostr', 'tech', 'meta', 'jobs']
|
||||
export const DEFAULT_SUBS_NO_JOBS = DEFAULT_SUBS.filter(s => s !== 'jobs')
|
||||
|
||||
export const NOFOLLOW_LIMIT = 1000
|
||||
export const BOOST_MULT = 5000
|
||||
|
@ -17,10 +17,14 @@ export const UPLOAD_TYPES_ALLOW = [
|
|||
'image/jpeg',
|
||||
'image/webp'
|
||||
]
|
||||
export const POST_TYPES = ['LINK', 'DISCUSSION', 'BOUNTY', 'POLL']
|
||||
export const TERRITORY_BILLING_TYPES = ['MONTHLY', 'YEARLY', 'ONCE']
|
||||
export const TERRITORY_GRACE_DAYS = 5
|
||||
export const COMMENT_DEPTH_LIMIT = 8
|
||||
export const MAX_TITLE_LENGTH = 80
|
||||
export const MAX_POST_TEXT_LENGTH = 100000 // 100k
|
||||
export const MAX_COMMENT_TEXT_LENGTH = 10000 // 10k
|
||||
export const MAX_TERRITORY_DESC_LENGTH = 140
|
||||
export const MAX_POLL_CHOICE_LENGTH = 40
|
||||
export const ITEM_SPAM_INTERVAL = '10m'
|
||||
export const ANON_ITEM_SPAM_INTERVAL = '0'
|
||||
|
@ -38,17 +42,9 @@ export const COMMENT_TYPE_QUERY = ['comments', 'freebies', 'outlawed', 'borderla
|
|||
export const USER_SORTS = ['stacked', 'spent', 'comments', 'posts', 'referrals']
|
||||
export const ITEM_SORTS = ['zaprank', 'comments', 'sats']
|
||||
export const WHENS = ['day', 'week', 'month', 'year', 'forever', 'custom']
|
||||
export const ITEM_TYPES = context => {
|
||||
const items = ['all', 'posts', 'comments', 'bounties', 'links', 'discussions', 'polls']
|
||||
if (!context) {
|
||||
items.push('bios', 'jobs')
|
||||
}
|
||||
items.push('freebies')
|
||||
if (context === 'user') {
|
||||
items.push('jobs', 'bookmarks')
|
||||
}
|
||||
return items
|
||||
}
|
||||
export const ITEM_TYPES_USER = ['all', 'posts', 'comments', 'bounties', 'links', 'discussions', 'polls', 'freebies', 'jobs', 'bookmarks']
|
||||
export const ITEM_TYPES = ['all', 'posts', 'comments', 'bounties', 'links', 'discussions', 'polls', 'freebies', 'bios', 'jobs']
|
||||
export const ITEM_TYPES_UNIVERSAL = ['all', 'posts', 'comments', 'freebies']
|
||||
export const OLD_ITEM_DAYS = 3
|
||||
export const ANON_USER_ID = 27
|
||||
export const DELETE_USER_ID = 106
|
||||
|
@ -63,6 +59,28 @@ export const GLOBAL_SEED = 616
|
|||
export const FREEBIE_BASE_COST_THRESHOLD = 10
|
||||
export const USER_IDS_BALANCE_NO_LIMIT = [...SN_USER_IDS, AD_USER_ID]
|
||||
|
||||
export const TERRITORY_COST_MONTHLY = 100000
|
||||
export const TERRITORY_COST_YEARLY = 1000000
|
||||
export const TERRITORY_COST_ONCE = 3000000
|
||||
|
||||
export const TERRITORY_BILLING_OPTIONS = (labelPrefix) => ({
|
||||
monthly: {
|
||||
term: '+ 100k',
|
||||
label: `${labelPrefix} month`,
|
||||
modifier: cost => cost + TERRITORY_COST_MONTHLY
|
||||
},
|
||||
yearly: {
|
||||
term: '+ 1m',
|
||||
label: `${labelPrefix} year`,
|
||||
modifier: cost => cost + TERRITORY_COST_YEARLY
|
||||
},
|
||||
once: {
|
||||
term: '+ 3m',
|
||||
label: 'one time',
|
||||
modifier: cost => cost + TERRITORY_COST_ONCE
|
||||
}
|
||||
})
|
||||
|
||||
export const FOUND_BLURBS = [
|
||||
'The harsh frontier is no place for the unprepared. This hat will protect you from the sun, dust, and other elements Mother Nature throws your way.',
|
||||
'A cowboy is nothing without a cowboy hat. Take good care of it, and it will protect you from the sun, dust, and other elements on your journey.',
|
||||
|
|
138
lib/validate.js
138
lib/validate.js
|
@ -1,10 +1,16 @@
|
|||
import { string, ValidationError, number, object, array, addMethod, boolean } from 'yup'
|
||||
import { BOOST_MIN, MAX_POLL_CHOICE_LENGTH, MAX_TITLE_LENGTH, MAX_POLL_NUM_CHOICES, MIN_POLL_NUM_CHOICES, SUBS_NO_JOBS, MAX_FORWARDS, BOOST_MULT, MAX_COMMENT_TEXT_LENGTH, MAX_POST_TEXT_LENGTH } from './constants'
|
||||
import {
|
||||
BOOST_MIN, MAX_POLL_CHOICE_LENGTH, MAX_TITLE_LENGTH, MAX_POLL_NUM_CHOICES,
|
||||
MIN_POLL_NUM_CHOICES, MAX_FORWARDS, BOOST_MULT, MAX_TERRITORY_DESC_LENGTH, POST_TYPES,
|
||||
TERRITORY_BILLING_TYPES, MAX_COMMENT_TEXT_LENGTH, MAX_POST_TEXT_LENGTH
|
||||
} from './constants'
|
||||
import { URL_REGEXP, WS_REGEXP } from './url'
|
||||
import { SUPPORTED_CURRENCIES } from './currency'
|
||||
import { NOSTR_MAX_RELAY_NUM, NOSTR_PUBKEY_BECH32, NOSTR_PUBKEY_HEX } from './nostr'
|
||||
import * as pkg from '../fragments/users'
|
||||
const { NAME_QUERY } = pkg
|
||||
import * as usersFragments from '../fragments/users'
|
||||
import * as subsFragments from '../fragments/subs'
|
||||
const { SUB } = subsFragments
|
||||
const { NAME_QUERY } = usersFragments
|
||||
|
||||
export async function ssValidate (schema, data, args) {
|
||||
try {
|
||||
|
@ -46,6 +52,10 @@ const textValidator = (max) => string().trim().max(
|
|||
max,
|
||||
({ max, value }) => `-${Math.abs(max - value.length)} characters remaining`
|
||||
)
|
||||
const nameValidator = string()
|
||||
.required('required')
|
||||
.matches(/^[\w_]+$/, 'only letters, numbers, and _')
|
||||
.max(32, 'too long')
|
||||
|
||||
const intValidator = number().typeError('must be a number').integer('must be whole')
|
||||
|
||||
|
@ -64,6 +74,55 @@ async function usernameExists (name, { client, models }) {
|
|||
return !!user
|
||||
}
|
||||
|
||||
async function subExists (name, { client, models, me }) {
|
||||
if (!client && !models) {
|
||||
throw new Error('cannot check for territory')
|
||||
}
|
||||
|
||||
let sub
|
||||
// apollo client
|
||||
if (client) {
|
||||
const { data } = await client.query({ query: SUB, variables: { sub: name } })
|
||||
sub = data?.sub
|
||||
} else {
|
||||
sub = await models.sub.findUnique({ where: { name } })
|
||||
}
|
||||
|
||||
return !!sub && sub.userId !== Number(me?.id)
|
||||
}
|
||||
|
||||
async function subActive (name, { client, models, me }) {
|
||||
if (!client && !models) {
|
||||
throw new Error('cannot check if territory is active')
|
||||
}
|
||||
|
||||
let sub
|
||||
// apollo client
|
||||
if (client) {
|
||||
const { data } = await client.query({ query: SUB, variables: { sub: name } })
|
||||
sub = data?.sub
|
||||
} else {
|
||||
sub = await models.sub.findUnique({ where: { name } })
|
||||
}
|
||||
|
||||
return sub && sub.status !== 'STOPPED'
|
||||
}
|
||||
|
||||
async function subHasPostType (name, type, { client, models }) {
|
||||
if (!client && !models) {
|
||||
throw new Error('cannot check for territory')
|
||||
}
|
||||
// apollo client
|
||||
if (client) {
|
||||
const { data } = await client.query({ query: SUB, variables: { name } })
|
||||
return !!(data?.sub?.postTypes?.includes(type))
|
||||
}
|
||||
|
||||
// prisma client
|
||||
const sub = await models.sub.findUnique({ where: { name } })
|
||||
return !!(sub?.postTypes?.includes(type))
|
||||
}
|
||||
|
||||
export function advPostSchemaMembers ({ me, existingBoost = 0, ...args }) {
|
||||
const boostMin = existingBoost || BOOST_MIN
|
||||
return {
|
||||
|
@ -108,9 +167,26 @@ export function advPostSchemaMembers ({ me, existingBoost = 0, ...args }) {
|
|||
}
|
||||
}
|
||||
|
||||
export function subSelectSchemaMembers () {
|
||||
export function subSelectSchemaMembers (args) {
|
||||
// for subSelectSchemaMembers we want to filter out me
|
||||
// because we want to allow the user to select their own territory
|
||||
const { me, ...filteredArgs } = args
|
||||
return {
|
||||
sub: string().required('required').oneOf(SUBS_NO_JOBS, 'required')
|
||||
sub: string().required('required').test({
|
||||
name: 'sub',
|
||||
test: async sub => {
|
||||
if (!sub || !sub.length) return false
|
||||
return await subExists(sub, filteredArgs)
|
||||
},
|
||||
message: 'pick valid territory'
|
||||
}).test({
|
||||
name: 'sub',
|
||||
test: async sub => {
|
||||
if (!sub || !sub.length) return false
|
||||
return await subActive(sub, filteredArgs)
|
||||
},
|
||||
message: 'territory is not active'
|
||||
})
|
||||
}
|
||||
}
|
||||
// for testing advPostSchemaMembers in isolation
|
||||
|
@ -128,7 +204,11 @@ export function bountySchema (args) {
|
|||
.min(1000, 'must be at least 1000')
|
||||
.max(1000000, 'must be at most 1m'),
|
||||
...advPostSchemaMembers(args),
|
||||
...subSelectSchemaMembers()
|
||||
...subSelectSchemaMembers(args)
|
||||
}).test({
|
||||
name: 'post-type-supported',
|
||||
test: ({ sub }) => subHasPostType(sub, 'BOUNTY', args),
|
||||
message: 'territory does not support bounties'
|
||||
})
|
||||
}
|
||||
|
||||
|
@ -137,7 +217,11 @@ export function discussionSchema (args) {
|
|||
title: titleValidator,
|
||||
text: textValidator(MAX_POST_TEXT_LENGTH),
|
||||
...advPostSchemaMembers(args),
|
||||
...subSelectSchemaMembers()
|
||||
...subSelectSchemaMembers(args)
|
||||
}).test({
|
||||
name: 'post-type-supported',
|
||||
test: ({ sub }) => subHasPostType(sub, 'DISCUSSION', args),
|
||||
message: 'territory does not support discussions'
|
||||
})
|
||||
}
|
||||
|
||||
|
@ -147,7 +231,11 @@ export function linkSchema (args) {
|
|||
text: textValidator(MAX_POST_TEXT_LENGTH),
|
||||
url: string().matches(URL_REGEXP, 'invalid url').required('required'),
|
||||
...advPostSchemaMembers(args),
|
||||
...subSelectSchemaMembers()
|
||||
...subSelectSchemaMembers(args)
|
||||
}).test({
|
||||
name: 'post-type-supported',
|
||||
test: ({ sub }) => subHasPostType(sub, 'LINK', args),
|
||||
message: 'territory does not support links'
|
||||
})
|
||||
}
|
||||
|
||||
|
@ -169,16 +257,40 @@ export function pollSchema ({ numExistingChoices = 0, ...args }) {
|
|||
test: arr => arr.length >= MIN_POLL_NUM_CHOICES - numExistingChoices
|
||||
}),
|
||||
...advPostSchemaMembers(args),
|
||||
...subSelectSchemaMembers()
|
||||
...subSelectSchemaMembers(args)
|
||||
}).test({
|
||||
name: 'post-type-supported',
|
||||
test: ({ sub }) => subHasPostType(sub, 'POLL', args),
|
||||
message: 'territory does not support polls'
|
||||
})
|
||||
}
|
||||
|
||||
export function territorySchema (args) {
|
||||
return object({
|
||||
name: nameValidator
|
||||
.test({
|
||||
name: 'name',
|
||||
test: async name => {
|
||||
if (!name || !name.length) return false
|
||||
return !(await subExists(name, args))
|
||||
},
|
||||
message: 'taken'
|
||||
}),
|
||||
desc: string().required('required').trim().max(
|
||||
MAX_TERRITORY_DESC_LENGTH,
|
||||
({ max, value }) => `-${Math.abs(max - value.length)} characters remaining`
|
||||
),
|
||||
baseCost: intValidator
|
||||
.min(1, 'must be at least 1')
|
||||
.max(100000, 'must be at most 100k'),
|
||||
postTypes: array().of(string().oneOf(POST_TYPES)).min(1, 'must support at least one post type'),
|
||||
billingType: string().required('required').oneOf(TERRITORY_BILLING_TYPES, 'required')
|
||||
})
|
||||
}
|
||||
|
||||
export function userSchema (args) {
|
||||
return object({
|
||||
name: string()
|
||||
.required('required')
|
||||
.matches(/^[\w_]+$/, 'only letters, numbers, and _')
|
||||
.max(32, 'too long')
|
||||
name: nameValidator
|
||||
.test({
|
||||
name: 'name',
|
||||
test: async name => {
|
||||
|
|
|
@ -3,7 +3,7 @@ import Items from '../../components/items'
|
|||
import { useRouter } from 'next/router'
|
||||
import { USER, USER_WITH_ITEMS } from '../../fragments/users'
|
||||
import { useQuery } from '@apollo/client'
|
||||
import { COMMENT_TYPE_QUERY, ITEM_SORTS, ITEM_TYPES, WHENS } from '../../lib/constants'
|
||||
import { COMMENT_TYPE_QUERY, ITEM_SORTS, ITEM_TYPES_USER, WHENS } from '../../lib/constants'
|
||||
import PageLoading from '../../components/page-loading'
|
||||
import { UserLayout } from '.'
|
||||
import { Form, Select, DatePicker } from '../../components/form'
|
||||
|
@ -45,7 +45,7 @@ function UserItemsHeader ({ type, name }) {
|
|||
const router = useRouter()
|
||||
async function select (values) {
|
||||
let { type, ...query } = values
|
||||
if (!type || type === 'all' || !ITEM_TYPES('user').includes(type)) type = 'all'
|
||||
if (!type || type === 'all' || !ITEM_TYPES_USER.includes(type)) type = 'all'
|
||||
if (!query.by || query.by === 'recent' || !ITEM_SORTS.includes(query.by)) delete query.by
|
||||
if (!query.when || query.when === 'forever' || !WHENS.includes(query.when) || query.when === 'forever') delete query.when
|
||||
if (query.when !== 'custom') { delete query.from; delete query.to }
|
||||
|
@ -74,7 +74,7 @@ function UserItemsHeader ({ type, name }) {
|
|||
name='type'
|
||||
size='sm'
|
||||
overrideValue={type}
|
||||
items={ITEM_TYPES('user')}
|
||||
items={ITEM_TYPES_USER}
|
||||
onChange={(formik, e) => select({ ...formik?.values, type: e.target.value })}
|
||||
/>
|
||||
by
|
||||
|
|
|
@ -6,12 +6,12 @@ import { CenterLayout } from '../../../components/layout'
|
|||
import JobForm from '../../../components/job-form'
|
||||
import { PollForm } from '../../../components/poll-form'
|
||||
import { BountyForm } from '../../../components/bounty-form'
|
||||
import SubSelect from '../../../components/sub-select-form'
|
||||
import { useState } from 'react'
|
||||
import { useQuery } from '@apollo/client'
|
||||
import { useRouter } from 'next/router'
|
||||
import PageLoading from '../../../components/page-loading'
|
||||
import { FeeButtonProvider } from '../../../components/fee-button'
|
||||
import SubSelect from '../../../components/sub-select'
|
||||
|
||||
export const getServerSideProps = getGetServerSideProps({
|
||||
query: ITEM,
|
||||
|
@ -30,14 +30,19 @@ export default function PostEdit ({ ssrData }) {
|
|||
const editThreshold = new Date(item.createdAt).getTime() + 10 * 60000
|
||||
|
||||
let FormType = DiscussionForm
|
||||
let itemType = 'DISCUSSION'
|
||||
if (item.isJob) {
|
||||
FormType = JobForm
|
||||
itemType = 'JOB'
|
||||
} else if (item.url) {
|
||||
FormType = LinkForm
|
||||
itemType = 'LINK'
|
||||
} else if (item.pollCost) {
|
||||
FormType = PollForm
|
||||
itemType = 'POLL'
|
||||
} else if (item.bounty) {
|
||||
FormType = BountyForm
|
||||
itemType = 'BOUNTY'
|
||||
}
|
||||
|
||||
const existingBoostLineItem = item.boost
|
||||
|
@ -54,7 +59,14 @@ export default function PostEdit ({ ssrData }) {
|
|||
<CenterLayout sub={sub}>
|
||||
<FeeButtonProvider baseLineItems={existingBoostLineItem}>
|
||||
<FormType item={item} editThreshold={editThreshold}>
|
||||
{!item.isJob && <SubSelect label='sub' item={item} setSub={setSub} sub={sub} />}
|
||||
{!item.isJob &&
|
||||
<SubSelect
|
||||
className='w-auto d-flex'
|
||||
label='territory'
|
||||
filterSubs={s => s.name !== 'jobs' && s.postTypes?.includes(itemType)}
|
||||
onChange={(_, e) => setSub(e.target.value)}
|
||||
sub={sub}
|
||||
/>}
|
||||
</FormType>
|
||||
</FeeButtonProvider>
|
||||
</CenterLayout>
|
||||
|
|
|
@ -88,7 +88,7 @@ function Detail ({ fact }) {
|
|||
if (fact.type === 'earn') {
|
||||
return (
|
||||
<Link href={`/rewards/${new Date(fact.createdAt).toISOString().slice(0, 10)}`} className='px-3 text-reset' style={{ lineHeight: '140%' }}>
|
||||
SN distributes the sats it earns back to its best stackers daily. These sats come from <Link href='/~jobs'>jobs</Link>, boosts, posting fees, and donations.
|
||||
SN distributes the sats it earns back to its best stackers daily. These sats come from jobs, boosts, posting fees, and donations.
|
||||
</Link>
|
||||
)
|
||||
}
|
||||
|
@ -107,6 +107,18 @@ function Detail ({ fact }) {
|
|||
)
|
||||
}
|
||||
|
||||
if (fact.type === 'billing') {
|
||||
return (
|
||||
<div className='px-3'>billing for <Link href={`/~${fact.subName}`}>~{fact.subName}</Link></div>
|
||||
)
|
||||
}
|
||||
|
||||
if (fact.type === 'revenue') {
|
||||
return (
|
||||
<div className='px-3'>revenue for <Link href={`/~${fact.subName}`}>~{fact.subName}</Link></div>
|
||||
)
|
||||
}
|
||||
|
||||
if (!fact.item) {
|
||||
let zap
|
||||
try {
|
||||
|
@ -114,7 +126,7 @@ function Detail ({ fact }) {
|
|||
} catch { }
|
||||
return (
|
||||
<div className='px-3'>
|
||||
<Link className={satusClass(fact.status)} href={`/${fact.type}s/${fact.factId}`}>
|
||||
<Link className={satusClass(fact.status)} href={`/${fact.type}s/${fact.id}`}>
|
||||
{(!fact.bolt11 && <span className='d-block text-muted fw-bold fst-italic'>invoice deleted</span>) ||
|
||||
(zap && <span className='d-block'>nostr zap{zap.content && `: ${zap.content}`}</span>) ||
|
||||
(fact.description && <span className='d-block'>{fact.description}</span>)}
|
||||
|
@ -217,7 +229,7 @@ export default function Satistics ({ ssrData }) {
|
|||
<div className={[styles.type, styles.head].join(' ')}>type</div>
|
||||
<div className={[styles.detail, styles.head].join(' ')}>detail</div>
|
||||
<div className={[styles.sats, styles.head].join(' ')}>sats</div>
|
||||
{facts.map(f => <Fact key={f.id} fact={f} />)}
|
||||
{facts.map(f => <Fact key={f.type + f.id} fact={f} />)}
|
||||
</div>
|
||||
</div>
|
||||
<MoreFooter cursor={cursor} count={facts?.length} fetchMore={fetchMore} Skeleton={PageLoading} />
|
||||
|
|
|
@ -0,0 +1,14 @@
|
|||
import { getGetServerSideProps } from '../api/ssrApollo'
|
||||
import { CenterLayout } from '../components/layout'
|
||||
import TerritoryForm from '../components/territory-form'
|
||||
|
||||
export const getServerSideProps = getGetServerSideProps({})
|
||||
|
||||
export default function TerritoryPage () {
|
||||
return (
|
||||
<CenterLayout>
|
||||
<h1 className='mt-5'>break new ground</h1>
|
||||
<TerritoryForm />
|
||||
</CenterLayout>
|
||||
)
|
||||
}
|
|
@ -0,0 +1,30 @@
|
|||
import { SUB } from '../../fragments/subs'
|
||||
import { getGetServerSideProps } from '../../api/ssrApollo'
|
||||
import { CenterLayout } from '../../components/layout'
|
||||
import TerritoryForm from '../../components/territory-form'
|
||||
import PageLoading from '../../components/page-loading'
|
||||
import { useQuery } from '@apollo/client'
|
||||
import { useRouter } from 'next/router'
|
||||
import TerritoryPaymentDue from '../../components/territory-payment-due'
|
||||
|
||||
export const getServerSideProps = getGetServerSideProps({
|
||||
query: SUB,
|
||||
notFound: (data, vars, me) => !data.sub || Number(data.sub.userId) !== Number(me?.id),
|
||||
authRequired: true
|
||||
})
|
||||
|
||||
export default function TerritoryPage ({ ssrData }) {
|
||||
const router = useRouter()
|
||||
const { data } = useQuery(SUB, { variables: { sub: router.query.sub } })
|
||||
if (!data && !ssrData) return <PageLoading />
|
||||
|
||||
const { sub } = data || ssrData
|
||||
|
||||
return (
|
||||
<CenterLayout sub={sub?.name}>
|
||||
<TerritoryPaymentDue sub={sub} />
|
||||
<h1 className='mt-5'>edit territory</h1>
|
||||
<TerritoryForm sub={sub} />
|
||||
</CenterLayout>
|
||||
)
|
||||
}
|
|
@ -2,9 +2,21 @@ import { useRouter } from 'next/router'
|
|||
import { getGetServerSideProps } from '../../api/ssrApollo'
|
||||
import Items from '../../components/items'
|
||||
import Layout from '../../components/layout'
|
||||
import { SUB_ITEMS } from '../../fragments/subs'
|
||||
import { SUB_FULL, SUB_ITEMS } from '../../fragments/subs'
|
||||
import Snl from '../../components/snl'
|
||||
import WelcomeBanner from '../../components/banners'
|
||||
import { AccordianCard } from '../../components/accordian-item'
|
||||
import Text from '../../components/text'
|
||||
import { useMe } from '../../components/me'
|
||||
import Gear from '../../svgs/settings-5-fill.svg'
|
||||
import Link from 'next/link'
|
||||
import { useQuery } from '@apollo/client'
|
||||
import PageLoading from '../../components/page-loading'
|
||||
import CardFooter from 'react-bootstrap/CardFooter'
|
||||
import Hat from '../../components/hat'
|
||||
import styles from '../../components/item.module.css'
|
||||
import TerritoryPaymentDue from '../../components/territory-payment-due'
|
||||
import Badge from 'react-bootstrap/Badge'
|
||||
|
||||
export const getServerSideProps = getGetServerSideProps({
|
||||
query: SUB_ITEMS,
|
||||
|
@ -13,12 +25,49 @@ export const getServerSideProps = getGetServerSideProps({
|
|||
|
||||
export default function Sub ({ ssrData }) {
|
||||
const router = useRouter()
|
||||
const me = useMe()
|
||||
const variables = { ...router.query }
|
||||
const { data } = useQuery(SUB_FULL, { variables })
|
||||
|
||||
if (!data && !ssrData) return <PageLoading />
|
||||
const { sub } = data || ssrData
|
||||
|
||||
return (
|
||||
<Layout sub={variables.sub}>
|
||||
<Snl />
|
||||
<WelcomeBanner />
|
||||
{sub
|
||||
? (
|
||||
<>
|
||||
<TerritoryPaymentDue sub={sub} />
|
||||
<div className='mb-3 d-flex'>
|
||||
<div className='flex-grow-1'>
|
||||
<AccordianCard
|
||||
header={<small className='text-muted fw-bold'>territory details{sub.status === 'STOPPED' && <Badge className='ms-2' bg='danger'>archived</Badge>}</small>}
|
||||
>
|
||||
<div className='py-2'>
|
||||
<Text>{sub.desc}</Text>
|
||||
</div>
|
||||
<CardFooter className={`py-1 ${styles.other}`}>
|
||||
<div className='text-muted'>
|
||||
<span>founded by</span>
|
||||
<span> </span>
|
||||
<Link href={`/${sub.user.name}`}>
|
||||
@{sub.user.name}<span> </span><Hat className='fill-grey' user={sub.user} height={12} width={12} />
|
||||
</Link>
|
||||
</div>
|
||||
</CardFooter>
|
||||
</AccordianCard>
|
||||
</div>
|
||||
{Number(sub.userId) === Number(me?.id) &&
|
||||
<Link href={`/~${sub.name}/edit`} className='d-flex align-items-center flex-shrink-1 ps-2'>
|
||||
<Gear className='fill-grey' width={22} height={22} />
|
||||
</Link>}
|
||||
</div>
|
||||
</>)
|
||||
: (
|
||||
<>
|
||||
<Snl />
|
||||
<WelcomeBanner />
|
||||
</>)}
|
||||
<Items ssrData={ssrData} variables={variables} />
|
||||
</Layout>
|
||||
)
|
||||
|
|
|
@ -3,8 +3,10 @@ import Items from '../../../components/items'
|
|||
import { getGetServerSideProps } from '../../../api/ssrApollo'
|
||||
import RecentHeader from '../../../components/recent-header'
|
||||
import { useRouter } from 'next/router'
|
||||
import { SUB_ITEMS } from '../../../fragments/subs'
|
||||
import { SUB_FULL, SUB_ITEMS } from '../../../fragments/subs'
|
||||
import { COMMENT_TYPE_QUERY } from '../../../lib/constants'
|
||||
import { useQuery } from '@apollo/client'
|
||||
import PageLoading from '../../../components/page-loading'
|
||||
|
||||
const staticVariables = { sort: 'recent' }
|
||||
const variablesFunc = vars =>
|
||||
|
@ -18,10 +20,14 @@ export const getServerSideProps = getGetServerSideProps({
|
|||
export default function Index ({ ssrData }) {
|
||||
const router = useRouter()
|
||||
const variables = variablesFunc(router.query)
|
||||
const { data } = useQuery(SUB_FULL, { variables })
|
||||
|
||||
if (!data && !ssrData) return <PageLoading />
|
||||
const { sub } = data || ssrData
|
||||
|
||||
return (
|
||||
<Layout sub={variables.sub}>
|
||||
<RecentHeader sub={variables.sub} />
|
||||
<Layout sub={sub?.name}>
|
||||
<RecentHeader sub={sub} />
|
||||
<Items
|
||||
ssrData={ssrData}
|
||||
query={SUB_ITEMS}
|
||||
|
|
|
@ -0,0 +1,285 @@
|
|||
/*
|
||||
Warnings:
|
||||
|
||||
- Added the required column `billingCost` to the `Sub` table without a default value. This is not possible if the table is not empty.
|
||||
- Added the required column `billingType` to the `Sub` table without a default value. This is not possible if the table is not empty.
|
||||
- Added the required column `userId` to the `Sub` table without a default value. This is not possible if the table is not empty.
|
||||
|
||||
*/
|
||||
-- CreateEnum
|
||||
CREATE TYPE "BillingType" AS ENUM ('MONTHLY', 'YEARLY', 'ONCE');
|
||||
|
||||
-- CreateEnum
|
||||
CREATE TYPE "SubActType" AS ENUM ('BILLING', 'REVENUE');
|
||||
|
||||
-- AlterEnum
|
||||
ALTER TYPE "Status" ADD VALUE 'GRACE';
|
||||
|
||||
-- add new columns giving old columns default special default values
|
||||
ALTER TABLE "Sub"
|
||||
ADD COLUMN "billedLastAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
ADD COLUMN "billingCost" INTEGER NOT NULL DEFAULT 0,
|
||||
ADD COLUMN "billingType" "BillingType" NOT NULL DEFAULT 'ONCE',
|
||||
ADD COLUMN "parentName" CITEXT,
|
||||
ADD COLUMN "path" ltree,
|
||||
ADD COLUMN "status" "Status" NOT NULL DEFAULT 'ACTIVE',
|
||||
ADD COLUMN "userId" INTEGER NOT NULL DEFAULT 616,
|
||||
ADD COLUMN "rewardsPct" INTEGER NOT NULL DEFAULT 100;
|
||||
|
||||
-- set the default values for the new columns from this point forward
|
||||
ALTER TABLE "Sub"
|
||||
ALTER COLUMN "billingCost" DROP DEFAULT,
|
||||
ALTER COLUMN "billingType" DROP DEFAULT,
|
||||
ALTER COLUMN "userId" DROP DEFAULT,
|
||||
ALTER COLUMN "rewardsPct" SET DEFAULT 50;
|
||||
|
||||
-- constrain percent to be between 0 and 100
|
||||
ALTER TABLE "Sub" ADD CONSTRAINT "rewardsPct" CHECK ("rewardsPct" >= 0 AND "rewardsPct" <= 100) NOT VALID;
|
||||
|
||||
-- we plan to structure subs as a tree
|
||||
UPDATE "Sub" SET "path" = LOWER(name)::ltree;
|
||||
|
||||
-- assign subs to appropriate people
|
||||
-- UPDATE "Sub" SET "userId" = 6030 WHERE name = 'tech';
|
||||
-- UPDATE "Sub" SET "userId" = 4502 WHERE name = 'meta';
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "SubAct" (
|
||||
"id" SERIAL NOT NULL,
|
||||
"created_at" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updated_at" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"userId" INTEGER NOT NULL,
|
||||
"subName" CITEXT NOT NULL,
|
||||
"msats" BIGINT NOT NULL,
|
||||
"type" "SubActType" NOT NULL,
|
||||
|
||||
CONSTRAINT "SubAct_pkey" PRIMARY KEY ("id")
|
||||
);
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "SubAct_userId_idx" ON "SubAct"("userId");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "SubAct_userId_type_idx" ON "SubAct"("userId", "type");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "SubAct_type_idx" ON "SubAct"("type");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "SubAct_created_at_idx" ON "SubAct"("created_at");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "SubAct_created_at_type_idx" ON "SubAct"("created_at", "type");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "SubAct_userId_created_at_type_idx" ON "SubAct"("userId", "created_at", "type");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "Sub_parentName_idx" ON "Sub"("parentName");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "Sub_created_at_idx" ON "Sub"("created_at");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "Sub_userId_idx" ON "Sub"("userId");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "Sub_path_idx" ON "Sub" USING GIST ("path" gist_ltree_ops(siglen=2024));
|
||||
|
||||
-- AddForeignKey
|
||||
ALTER TABLE "Sub" ADD CONSTRAINT "Sub_parentName_fkey" FOREIGN KEY ("parentName") REFERENCES "Sub"("name") ON DELETE SET NULL ON UPDATE CASCADE;
|
||||
|
||||
-- AddForeignKey
|
||||
ALTER TABLE "Sub" ADD CONSTRAINT "Sub_userId_fkey" FOREIGN KEY ("userId") REFERENCES "users"("id") ON DELETE CASCADE ON UPDATE CASCADE;
|
||||
|
||||
-- AddForeignKey
|
||||
ALTER TABLE "SubAct" ADD CONSTRAINT "SubAct_userId_fkey" FOREIGN KEY ("userId") REFERENCES "users"("id") ON DELETE CASCADE ON UPDATE CASCADE;
|
||||
|
||||
-- AddForeignKey
|
||||
ALTER TABLE "SubAct" ADD CONSTRAINT "SubAct_subName_fkey" FOREIGN KEY ("subName") REFERENCES "Sub"("name") ON DELETE CASCADE ON UPDATE CASCADE;
|
||||
|
||||
CREATE OR REPLACE FUNCTION update_sub_path() RETURNS TRIGGER AS $$
|
||||
DECLARE
|
||||
npath ltree;
|
||||
BEGIN
|
||||
IF NEW."parentName" IS NULL THEN
|
||||
SELECT LOWER(NEW.name)::ltree INTO npath;
|
||||
NEW."path" = npath;
|
||||
ELSEIF TG_OP = 'INSERT' OR OLD."parentName" IS NULL OR OLD."parentName" != NEW."parentName" THEN
|
||||
SELECT "path" || LOWER(NEW.name)::text
|
||||
FROM "Sub"
|
||||
WHERE name = NEW."parentName"
|
||||
INTO npath;
|
||||
|
||||
IF npath IS NULL THEN
|
||||
RAISE EXCEPTION 'Invalid parent name %', NEW."parentName";
|
||||
END IF;
|
||||
NEW."path" = npath;
|
||||
END IF;
|
||||
RETURN NEW;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE TRIGGER sub_path_tgr
|
||||
BEFORE INSERT OR UPDATE ON "Sub"
|
||||
FOR EACH ROW EXECUTE PROCEDURE update_sub_path();
|
||||
|
||||
-- fix balance limit check
|
||||
CREATE OR REPLACE FUNCTION create_invoice(hash TEXT, bolt11 TEXT, expires_at timestamp(3) without time zone,
|
||||
msats_req BIGINT, user_id INTEGER, idesc TEXT, comment TEXT, lud18_data JSONB, inv_limit INTEGER, balance_limit_msats BIGINT)
|
||||
RETURNS "Invoice"
|
||||
LANGUAGE plpgsql
|
||||
AS $$
|
||||
DECLARE
|
||||
invoice "Invoice";
|
||||
inv_limit_reached BOOLEAN;
|
||||
balance_limit_reached BOOLEAN;
|
||||
inv_pending_msats BIGINT;
|
||||
BEGIN
|
||||
PERFORM ASSERT_SERIALIZED();
|
||||
|
||||
-- prevent too many pending invoices
|
||||
SELECT inv_limit > 0 AND count(*) >= inv_limit, COALESCE(sum("msatsRequested"), 0) INTO inv_limit_reached, inv_pending_msats
|
||||
FROM "Invoice"
|
||||
WHERE "userId" = user_id AND "expiresAt" > now_utc() AND "confirmedAt" IS NULL AND cancelled = false;
|
||||
|
||||
IF inv_limit_reached THEN
|
||||
RAISE EXCEPTION 'SN_INV_PENDING_LIMIT';
|
||||
END IF;
|
||||
|
||||
-- prevent pending invoices + msats from exceeding the limit
|
||||
SELECT balance_limit_msats > 0 AND inv_pending_msats+msats_req+msats > balance_limit_msats INTO balance_limit_reached
|
||||
FROM users
|
||||
WHERE id = user_id;
|
||||
|
||||
IF balance_limit_reached THEN
|
||||
RAISE EXCEPTION 'SN_INV_EXCEED_BALANCE';
|
||||
END IF;
|
||||
|
||||
-- we good, proceed frens
|
||||
INSERT INTO "Invoice" (hash, bolt11, "expiresAt", "msatsRequested", "userId", created_at, updated_at, "desc", comment, "lud18Data")
|
||||
VALUES (hash, bolt11, expires_at, msats_req, user_id, now_utc(), now_utc(), idesc, comment, lud18_data) RETURNING * INTO invoice;
|
||||
|
||||
INSERT INTO pgboss.job (name, data, retrylimit, retrybackoff, startafter)
|
||||
VALUES ('checkInvoice', jsonb_build_object('hash', hash), 21, true, now() + interval '10 seconds');
|
||||
|
||||
RETURN invoice;
|
||||
END;
|
||||
$$;
|
||||
|
||||
|
||||
-- get spenders
|
||||
DROP MATERIALIZED VIEW IF EXISTS spender_growth_days;
|
||||
CREATE MATERIALIZED VIEW IF NOT EXISTS spender_growth_days AS
|
||||
SELECT day, count(DISTINCT "userId") as any,
|
||||
count(DISTINCT "userId") FILTER (WHERE act = 'STREAM') as jobs,
|
||||
count(DISTINCT "userId") FILTER (WHERE act = 'BOOST') as boost,
|
||||
count(DISTINCT "userId") FILTER (WHERE act = 'FEE') as fees,
|
||||
count(DISTINCT "userId") FILTER (WHERE act = 'TIP') as tips,
|
||||
count(DISTINCT "userId") FILTER (WHERE act = 'DONATION') as donations,
|
||||
count(DISTINCT "userId") FILTER (WHERE act = 'TERRITORY') as territories
|
||||
FROM days
|
||||
LEFT JOIN
|
||||
((SELECT "ItemAct".created_at, "userId", act::text as act
|
||||
FROM "ItemAct")
|
||||
UNION ALL
|
||||
(SELECT created_at, "userId", 'DONATION' as act
|
||||
FROM "Donation")
|
||||
UNION ALL
|
||||
(SELECT created_at, "userId", 'TERRITORY' as act
|
||||
FROM "SubAct"
|
||||
WHERE type = 'BILLING')
|
||||
) u ON day = date_trunc('day', timezone('America/Chicago', u.created_at at time zone 'UTC'))
|
||||
GROUP BY day
|
||||
ORDER BY day ASC;
|
||||
|
||||
-- get spending
|
||||
DROP MATERIALIZED VIEW IF EXISTS spending_growth_days;
|
||||
CREATE MATERIALIZED VIEW IF NOT EXISTS spending_growth_days AS
|
||||
SELECT day, coalesce(floor(sum(msats) FILTER (WHERE act = 'STREAM')/1000), 0) as jobs,
|
||||
coalesce(floor(sum(msats) FILTER (WHERE act = 'BOOST')/1000), 0) as boost,
|
||||
coalesce(floor(sum(msats) FILTER (WHERE act NOT IN ('BOOST', 'TIP', 'STREAM', 'DONATION', 'TERRITORY'))/1000), 0) as fees,
|
||||
coalesce(floor(sum(msats) FILTER (WHERE act = 'TIP')/1000), 0) as tips,
|
||||
coalesce(floor(sum(msats) FILTER (WHERE act = 'DONATION')/1000), 0) as donations,
|
||||
coalesce(floor(sum(msats) FILTER (WHERE act = 'TERRITORY')/1000), 0) as territories
|
||||
FROM days
|
||||
LEFT JOIN
|
||||
((SELECT "ItemAct".created_at, msats, act::text as act
|
||||
FROM "ItemAct")
|
||||
UNION ALL
|
||||
(SELECT created_at, sats * 1000 as msats, 'DONATION' as act
|
||||
FROM "Donation")
|
||||
UNION ALL
|
||||
(SELECT created_at, msats, 'TERRITORY' as act
|
||||
FROM "SubAct"
|
||||
WHERE type = 'BILLING')
|
||||
) u ON day = date_trunc('day', timezone('America/Chicago', u.created_at at time zone 'UTC'))
|
||||
GROUP BY day
|
||||
ORDER BY day ASC;
|
||||
|
||||
-- get stackers
|
||||
DROP MATERIALIZED VIEW IF EXISTS stackers_growth_days;
|
||||
CREATE MATERIALIZED VIEW IF NOT EXISTS stackers_growth_days AS
|
||||
SELECT day, count(distinct user_id) as any,
|
||||
count(distinct user_id) FILTER (WHERE type = 'POST') as posts,
|
||||
count(distinct user_id) FILTER (WHERE type = 'COMMENT') as comments,
|
||||
count(distinct user_id) FILTER (WHERE type = 'EARN') as rewards,
|
||||
count(distinct user_id) FILTER (WHERE type = 'REFERRAL') as referrals,
|
||||
count(distinct user_id) FILTER (WHERE type = 'REVENUE') as territories
|
||||
FROM days
|
||||
LEFT JOIN
|
||||
((SELECT "ItemAct".created_at, "Item"."userId" as user_id, CASE WHEN "Item"."parentId" IS NULL THEN 'POST' ELSE 'COMMENT' END as type
|
||||
FROM "ItemAct"
|
||||
JOIN "Item" on "ItemAct"."itemId" = "Item".id
|
||||
WHERE "ItemAct".act = 'TIP')
|
||||
UNION ALL
|
||||
(SELECT created_at, "userId" as user_id, 'EARN' as type
|
||||
FROM "Earn")
|
||||
UNION ALL
|
||||
(SELECT created_at, "referrerId" as user_id, 'REFERRAL' as type
|
||||
FROM "ReferralAct")
|
||||
UNION ALL
|
||||
(SELECT created_at, "userId" as user_id, 'REVENUE' as type
|
||||
FROM "SubAct"
|
||||
WHERE type = 'REVENUE')
|
||||
) u ON day = date_trunc('day', timezone('America/Chicago', u.created_at at time zone 'UTC'))
|
||||
GROUP BY day
|
||||
ORDER BY day ASC;
|
||||
|
||||
-- get stacking
|
||||
DROP MATERIALIZED VIEW IF EXISTS stacking_growth_days;
|
||||
CREATE MATERIALIZED VIEW IF NOT EXISTS stacking_growth_days AS
|
||||
SELECT day, coalesce(floor(sum(airdrop)/1000),0) as rewards,
|
||||
coalesce(floor(sum(post)/1000),0) as posts,
|
||||
coalesce(floor(sum(comment)/1000),0) as comments,
|
||||
coalesce(floor(sum(referral)/1000),0) as referrals,
|
||||
coalesce(floor(sum(revenue)/1000),0) as territories
|
||||
FROM days
|
||||
LEFT JOIN
|
||||
((SELECT "ItemAct".created_at, 0 as airdrop,
|
||||
CASE WHEN "Item"."parentId" IS NULL THEN 0 ELSE "ItemAct".msats END as comment,
|
||||
CASE WHEN "Item"."parentId" IS NULL THEN "ItemAct".msats ELSE 0 END as post,
|
||||
0 as referral, 0 as revenue
|
||||
FROM "ItemAct"
|
||||
JOIN "Item" on "ItemAct"."itemId" = "Item".id
|
||||
WHERE "ItemAct".act = 'TIP')
|
||||
UNION ALL
|
||||
(SELECT created_at, 0 as airdrop, 0 as post, 0 as comment, msats as referral, 0 as revenue
|
||||
FROM "ReferralAct")
|
||||
UNION ALL
|
||||
(SELECT created_at, msats as airdrop, 0 as post, 0 as comment, 0 as referral, 0 as revenue
|
||||
FROM "Earn")
|
||||
UNION ALL
|
||||
(SELECT created_at, 0 as airdrop, 0 as post, 0 as comment, 0 as referral, msats as revenue
|
||||
FROM "SubAct"
|
||||
WHERE type = 'REVENUE')
|
||||
) u ON day = date_trunc('day', timezone('America/Chicago', u.created_at at time zone 'UTC'))
|
||||
GROUP BY day
|
||||
ORDER BY day ASC;
|
||||
|
||||
-- indices for the other materialized view so we can refresh concurrently
|
||||
CREATE UNIQUE INDEX IF NOT EXISTS spender_growth_days_idx ON spender_growth_days(day);
|
||||
CREATE UNIQUE INDEX IF NOT EXISTS spending_growth_days_idx ON spending_growth_days(day);
|
||||
CREATE UNIQUE INDEX IF NOT EXISTS stackers_growth_days_idx ON stackers_growth_days(day);
|
||||
CREATE UNIQUE INDEX IF NOT EXISTS stacking_growth_days_idx ON stacking_growth_days(day);
|
||||
|
|
@ -100,6 +100,8 @@ model User {
|
|||
muteds Mute[] @relation("muted")
|
||||
ArcOut Arc[] @relation("fromUser")
|
||||
ArcIn Arc[] @relation("toUser")
|
||||
Sub Sub[]
|
||||
SubAct SubAct[]
|
||||
|
||||
@@index([photoId])
|
||||
@@index([createdAt], map: "users.created_at_index")
|
||||
|
@ -381,16 +383,66 @@ model PollVote {
|
|||
@@index([userId], map: "PollVote.userId_index")
|
||||
}
|
||||
|
||||
enum BillingType {
|
||||
MONTHLY
|
||||
YEARLY
|
||||
ONCE
|
||||
}
|
||||
|
||||
enum RankingType {
|
||||
WOT
|
||||
RECENT
|
||||
AUCTION
|
||||
}
|
||||
|
||||
model Sub {
|
||||
name String @id @db.Citext
|
||||
createdAt DateTime @default(now()) @map("created_at")
|
||||
updatedAt DateTime @default(now()) @updatedAt @map("updated_at")
|
||||
name String @id @db.Citext
|
||||
createdAt DateTime @default(now()) @map("created_at")
|
||||
updatedAt DateTime @default(now()) @updatedAt @map("updated_at")
|
||||
userId Int
|
||||
parentName String? @db.Citext
|
||||
path Unsupported("ltree")?
|
||||
|
||||
postTypes PostType[]
|
||||
rankingType RankingType
|
||||
baseCost Int @default(1)
|
||||
baseCost Int @default(1)
|
||||
rewardsPct Int @default(50)
|
||||
desc String?
|
||||
status Status @default(ACTIVE)
|
||||
billingType BillingType
|
||||
billingCost Int
|
||||
billedLastAt DateTime @default(now())
|
||||
|
||||
parent Sub? @relation("ParentChildren", fields: [parentName], references: [name])
|
||||
children Sub[] @relation("ParentChildren")
|
||||
user User @relation(fields: [userId], references: [id], onDelete: Cascade)
|
||||
Item Item[]
|
||||
Subscription Subscription[]
|
||||
SubAct SubAct[]
|
||||
|
||||
@@index([parentName])
|
||||
@@index([createdAt])
|
||||
@@index([userId])
|
||||
@@index([path], type: Gist)
|
||||
}
|
||||
|
||||
model SubAct {
|
||||
id Int @id @default(autoincrement())
|
||||
createdAt DateTime @default(now()) @map("created_at")
|
||||
updatedAt DateTime @default(now()) @updatedAt @map("updated_at")
|
||||
userId Int
|
||||
subName String @db.Citext
|
||||
msats BigInt
|
||||
type SubActType
|
||||
user User @relation(fields: [userId], references: [id], onDelete: Cascade)
|
||||
sub Sub @relation(fields: [subName], references: [name], onDelete: Cascade)
|
||||
|
||||
@@index([userId])
|
||||
@@index([userId, type])
|
||||
@@index([type])
|
||||
@@index([createdAt])
|
||||
@@index([createdAt, type])
|
||||
@@index([userId, createdAt, type])
|
||||
}
|
||||
|
||||
model Subscription {
|
||||
|
@ -630,10 +682,16 @@ enum EarnType {
|
|||
TIP_POST
|
||||
}
|
||||
|
||||
enum SubActType {
|
||||
BILLING
|
||||
REVENUE
|
||||
}
|
||||
|
||||
enum Status {
|
||||
ACTIVE
|
||||
STOPPED
|
||||
NOSATS
|
||||
GRACE
|
||||
}
|
||||
|
||||
enum PostType {
|
||||
|
@ -644,12 +702,6 @@ enum PostType {
|
|||
BOUNTY
|
||||
}
|
||||
|
||||
enum RankingType {
|
||||
WOT
|
||||
RECENT
|
||||
AUCTION
|
||||
}
|
||||
|
||||
enum ItemActType {
|
||||
VOTE
|
||||
BOOST
|
||||
|
|
|
@ -58,7 +58,7 @@ $close-color: inherit;
|
|||
$alert-border-radius: #{33% 2%} / #{11% 74%};
|
||||
$link-color: #007cbe;
|
||||
$link-decoration: none;
|
||||
$font-size-base: .9rem;
|
||||
$font-size-base: .93rem;
|
||||
$enable-responsive-font-sizes: true;
|
||||
$link-hover-decoration: none;
|
||||
$dropdown-border-color: #ced4da;
|
||||
|
@ -95,6 +95,41 @@ $grid-gutter-width: 2rem;
|
|||
$toast-spacing: .5rem;
|
||||
$btn-close-bg: none;
|
||||
|
||||
$accordion-padding-y: 0rem;
|
||||
$accordion-padding-x: .75rem;
|
||||
// $accordion-color: var(--#{$prefix}body-color) !default;
|
||||
// $accordion-bg: var(--#{$prefix}body-bg) !default;
|
||||
// $accordion-border-width: var(--#{$prefix}border-width) !default;
|
||||
// $accordion-border-color: var(--#{$prefix}border-color) !default;
|
||||
// $accordion-border-radius: var(--#{$prefix}border-radius) !default;
|
||||
// $accordion-inner-border-radius: subtract($accordion-border-radius, $accordion-border-width) !default;
|
||||
|
||||
// $accordion-body-padding-y: $accordion-padding-y !default;
|
||||
// $accordion-body-padding-x: $accordion-padding-x !default;
|
||||
|
||||
// $accordion-button-padding-y: $accordion-padding-y !default;
|
||||
// $accordion-button-padding-x: $accordion-padding-x !default;
|
||||
// $accordion-button-color: var(--#{$prefix}body-color) !default;
|
||||
// $accordion-button-bg: var(--#{$prefix}accordion-bg) !default;
|
||||
// $accordion-transition: $btn-transition, border-radius .15s ease !default;
|
||||
|
||||
// $accordion-button-focus-border-color: $input-focus-border-color !default;
|
||||
// $accordion-button-focus-box-shadow: $btn-focus-box-shadow !default;
|
||||
|
||||
$accordion-icon-width: 2rem;
|
||||
$accordion-icon-color: $form-select-indicator-color;
|
||||
$accordion-icon-active-color: $form-select-indicator-color;
|
||||
$accordion-icon-color-dark: $form-select-indicator-color;
|
||||
$accordion-icon-active-color-dark: $form-select-indicator-color;
|
||||
$accordion-button-active-bg: var(--theme-clickToContextColor);
|
||||
$accordion-button-active-color: var(--bs-body-color);
|
||||
|
||||
$accordion-button-icon: url("data:image/svg+xml, %3Csvg fill='#{$form-select-indicator-color}' xmlns='http://www.w3.org/2000/svg' viewBox='0 0 24 24'%3E%3Cpath d='M12 15.0006L7.75732 10.758L9.17154 9.34375L12 12.1722L14.8284 9.34375L16.2426 10.758L12 15.0006Z'%3E%3C/path%3E%3C/svg%3E%0A");
|
||||
$accordion-button-active-icon: $accordion-button-icon;
|
||||
$accordion-button-icon-dark: $accordion-button-icon;
|
||||
$accordion-button-active-icon-dark: $accordion-button-icon;
|
||||
|
||||
|
||||
:root, [data-bs-theme=light] {
|
||||
--theme-navLink: rgba(0, 0, 0, 0.55);
|
||||
--theme-navLinkFocus: rgba(0, 0, 0, 0.7);
|
||||
|
@ -117,7 +152,7 @@ $btn-close-bg: none;
|
|||
[data-bs-theme=dark] {
|
||||
color-scheme: dark;
|
||||
--theme-inputBg: #121211;
|
||||
--theme-inputDisabledBg: #121211;
|
||||
--theme-inputDisabledBg: #2d2d2d;
|
||||
--theme-navLink: rgba(255, 255, 255, 0.55);
|
||||
--theme-navLinkFocus: rgba(255, 255, 255, 0.75);
|
||||
--theme-navLinkActive: rgba(255, 255, 255, 0.9);
|
||||
|
|
|
@ -0,0 +1 @@
|
|||
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 24 24"><path d="M2.13225 13.6308C1.95018 12.5349 1.95619 11.434 2.13313 10.3695C3.23434 10.3963 4.22471 9.86798 4.60963 8.93871C4.99454 8.00944 4.66782 6.93557 3.87024 6.17581C4.49783 5.29798 5.27202 4.51528 6.17568 3.86911C6.93548 4.66716 8.00968 4.99416 8.9392 4.60914C9.86872 4.22412 10.3971 3.23332 10.37 2.13176C11.4659 1.94969 12.5668 1.9557 13.6313 2.13265C13.6045 3.23385 14.1329 4.22422 15.0621 4.60914C15.9914 4.99406 17.0653 4.66733 17.825 3.86975C18.7029 4.49734 19.4856 5.27153 20.1317 6.1752C19.3337 6.93499 19.0067 8.00919 19.3917 8.93871C19.7767 9.86823 20.7675 10.3966 21.8691 10.3695C22.0511 11.4654 22.0451 12.5663 21.8682 13.6308C20.767 13.6041 19.7766 14.1324 19.3917 15.0616C19.0068 15.9909 19.3335 17.0648 20.1311 17.8245C19.5035 18.7024 18.7293 19.4851 17.8256 20.1312C17.0658 19.3332 15.9916 19.0062 15.0621 19.3912C14.1326 19.7762 13.6043 20.767 13.6313 21.8686C12.5354 22.0507 11.4345 22.0447 10.37 21.8677C10.3968 20.7665 9.86847 19.7761 8.9392 19.3912C8.00993 19.0063 6.93605 19.333 6.1763 20.1306C5.29847 19.503 4.51577 18.7288 3.8696 17.8252C4.66765 17.0654 4.99465 15.9912 4.60963 15.0616C4.22461 14.1321 3.23381 13.6038 2.13225 13.6308ZM12.0007 15.0002C13.6575 15.0002 15.0007 13.657 15.0007 12.0002C15.0007 10.3433 13.6575 9.00018 12.0007 9.00018C10.3438 9.00018 9.00066 10.3433 9.00066 12.0002C9.00066 13.657 10.3438 15.0002 12.0007 15.0002Z"></path></svg>
|
After Width: | Height: | Size: 1.4 KiB |
|
@ -1,28 +1,23 @@
|
|||
import serialize from '../api/resolvers/serial.js'
|
||||
|
||||
export function auction ({ models }) {
|
||||
return async function ({ name }) {
|
||||
console.log('running', name)
|
||||
// get all items we need to check
|
||||
const items = await models.item.findMany(
|
||||
{
|
||||
where: {
|
||||
maxBid: {
|
||||
not: null
|
||||
},
|
||||
status: {
|
||||
not: 'STOPPED'
|
||||
}
|
||||
export async function auction ({ models }) {
|
||||
// get all items we need to check
|
||||
const items = await models.item.findMany(
|
||||
{
|
||||
where: {
|
||||
maxBid: {
|
||||
not: null
|
||||
},
|
||||
status: {
|
||||
not: 'STOPPED'
|
||||
}
|
||||
}
|
||||
)
|
||||
}
|
||||
)
|
||||
|
||||
// for each item, run serialized auction function
|
||||
items.forEach(async item => {
|
||||
await serialize(models,
|
||||
models.$executeRaw`SELECT run_auction(${item.id}::INTEGER)`)
|
||||
})
|
||||
|
||||
console.log('done', name)
|
||||
}
|
||||
// for each item, run serialized auction function
|
||||
items.forEach(async item => {
|
||||
await serialize(models,
|
||||
models.$executeRaw`SELECT run_auction(${item.id}::INTEGER)`)
|
||||
})
|
||||
}
|
||||
|
|
|
@ -1,27 +1,23 @@
|
|||
import { deleteObjects } from '../api/s3'
|
||||
import { ANON_USER_ID } from '../lib/constants'
|
||||
|
||||
export function deleteUnusedImages ({ models }) {
|
||||
return async function ({ name }) {
|
||||
console.log('running', name)
|
||||
export async function deleteUnusedImages ({ models }) {
|
||||
// delete all images in database and S3 which weren't paid in the last 24 hours
|
||||
const unpaidImages = await models.$queryRaw`
|
||||
SELECT id
|
||||
FROM "Upload"
|
||||
WHERE (paid = 'f'
|
||||
OR (
|
||||
-- for non-textarea images, they are free and paid is null
|
||||
paid IS NULL
|
||||
-- if the image is not used by a user or item (eg jobs), delete it
|
||||
AND NOT EXISTS (SELECT * FROM users WHERE "photoId" = "Upload".id)
|
||||
AND NOT EXISTS (SELECT * FROM "Item" WHERE "uploadId" = "Upload".id)
|
||||
))
|
||||
AND created_at < date_trunc('hour', now() - CASE WHEN "userId" = ${ANON_USER_ID} THEN interval '1 hour' ELSE interval '24 hours' END)`
|
||||
|
||||
// delete all images in database and S3 which weren't paid in the last 24 hours
|
||||
const unpaidImages = await models.$queryRaw`
|
||||
SELECT id
|
||||
FROM "Upload"
|
||||
WHERE (paid = 'f'
|
||||
OR (
|
||||
-- for non-textarea images, they are free and paid is null
|
||||
paid IS NULL
|
||||
-- if the image is not used by a user or item (eg jobs), delete it
|
||||
AND NOT EXISTS (SELECT * FROM users WHERE "photoId" = "Upload".id)
|
||||
AND NOT EXISTS (SELECT * FROM "Item" WHERE "uploadId" = "Upload".id)
|
||||
))
|
||||
AND created_at < date_trunc('hour', now() - CASE WHEN "userId" = ${ANON_USER_ID} THEN interval '1 hour' ELSE interval '24 hours' END)`
|
||||
|
||||
const s3Keys = unpaidImages.map(({ id }) => id)
|
||||
console.log('deleting images:', s3Keys)
|
||||
await deleteObjects(s3Keys)
|
||||
await models.upload.deleteMany({ where: { id: { in: s3Keys } } })
|
||||
}
|
||||
const s3Keys = unpaidImages.map(({ id }) => id)
|
||||
console.log('deleting images:', s3Keys)
|
||||
await deleteObjects(s3Keys)
|
||||
await models.upload.deleteMany({ where: { id: { in: s3Keys } } })
|
||||
}
|
||||
|
|
153
worker/earn.js
153
worker/earn.js
|
@ -8,16 +8,19 @@ const UPVOTE_EACH_REWARD = 4.0
|
|||
const TOP_PERCENTILE = 33
|
||||
const TOTAL_UPPER_BOUND_MSATS = 1000000000
|
||||
|
||||
export function earn ({ models }) {
|
||||
return async function ({ name }) {
|
||||
console.log('running', name)
|
||||
export async function earn ({ name, models }) {
|
||||
// rewards are calculated sitewide still
|
||||
// however for user gen subs currently only 50% of their fees go to rewards
|
||||
// the other 50% goes to the founder of the sub
|
||||
|
||||
// compute how much sn earned today
|
||||
const [{ sum: sumDecimal }] = await models.$queryRaw`
|
||||
// compute how much sn earned today
|
||||
const [{ sum: sumDecimal }] = await models.$queryRaw`
|
||||
SELECT coalesce(sum(msats), 0) as sum
|
||||
FROM (
|
||||
(SELECT ("ItemAct".msats - COALESCE("ReferralAct".msats, 0)) as msats
|
||||
(SELECT ("ItemAct".msats - COALESCE("ReferralAct".msats, 0)) * COALESCE("Sub"."rewardsPct", 100) * 0.01 as msats
|
||||
FROM "ItemAct"
|
||||
JOIN "Item" ON "Item"."id" = "ItemAct"."itemId"
|
||||
LEFT JOIN "Sub" ON "Sub"."name" = "Item"."subName"
|
||||
LEFT JOIN "ReferralAct" ON "ReferralAct"."itemActId" = "ItemAct".id
|
||||
WHERE date_trunc('day', "ItemAct".created_at AT TIME ZONE 'UTC' AT TIME ZONE 'America/Chicago') = date_trunc('day', (now() - interval '1 day') AT TIME ZONE 'America/Chicago')
|
||||
AND "ItemAct".act <> 'TIP')
|
||||
|
@ -37,32 +40,32 @@ export function earn ({ models }) {
|
|||
HAVING COUNT("ItemForward".id) = 0)
|
||||
) subquery`
|
||||
|
||||
// XXX primsa will return a Decimal (https://mikemcl.github.io/decimal.js)
|
||||
// because sum of a BIGINT returns a NUMERIC type (https://www.postgresql.org/docs/13/functions-aggregate.html)
|
||||
// and Decimal is what prisma maps it to https://www.prisma.io/docs/concepts/components/prisma-client/raw-database-access#raw-query-type-mapping
|
||||
// so check it before coercing to Number
|
||||
if (!sumDecimal || sumDecimal.lessThanOrEqualTo(0)) {
|
||||
console.log('done', name, 'no sats to award today')
|
||||
return
|
||||
}
|
||||
// XXX primsa will return a Decimal (https://mikemcl.github.io/decimal.js)
|
||||
// because sum of a BIGINT returns a NUMERIC type (https://www.postgresql.org/docs/13/functions-aggregate.html)
|
||||
// and Decimal is what prisma maps it to https://www.prisma.io/docs/concepts/components/prisma-client/raw-database-access#raw-query-type-mapping
|
||||
// so check it before coercing to Number
|
||||
if (!sumDecimal || sumDecimal.lessThanOrEqualTo(0)) {
|
||||
console.log('done', name, 'no sats to award today')
|
||||
return
|
||||
}
|
||||
|
||||
// extra sanity check on rewards ... if it's more than upper bound, we
|
||||
// probably have a bug somewhere or we've grown A LOT
|
||||
if (sumDecimal.greaterThan(TOTAL_UPPER_BOUND_MSATS)) {
|
||||
console.log('done', name, 'error: too many sats to award today', sumDecimal)
|
||||
return
|
||||
}
|
||||
// extra sanity check on rewards ... if it's more than upper bound, we
|
||||
// probably have a bug somewhere or we've grown A LOT
|
||||
if (sumDecimal.greaterThan(TOTAL_UPPER_BOUND_MSATS)) {
|
||||
console.log('done', name, 'error: too many sats to award today', sumDecimal)
|
||||
return
|
||||
}
|
||||
|
||||
const sum = Number(sumDecimal)
|
||||
const heads = Math.random() < 0.5
|
||||
// if this category is selected, double its proportion
|
||||
// if it isn't select, zero its proportion
|
||||
const itemRewardMult = heads ? 0 : 2.0
|
||||
const upvoteRewardMult = heads ? 2.0 : 0
|
||||
const sum = Number(sumDecimal)
|
||||
const heads = Math.random() < 0.5
|
||||
// if this category is selected, double its proportion
|
||||
// if it isn't select, zero its proportion
|
||||
const itemRewardMult = heads ? 0 : 2.0
|
||||
const upvoteRewardMult = heads ? 2.0 : 0
|
||||
|
||||
console.log(name, 'giving away', sum, 'msats', 'rewarding', heads ? 'items' : 'upvotes')
|
||||
console.log(name, 'giving away', sum, 'msats', 'rewarding', heads ? 'items' : 'upvotes')
|
||||
|
||||
/*
|
||||
/*
|
||||
How earnings (used to) work:
|
||||
1/3: top 21% posts over last 36 hours, scored on a relative basis
|
||||
1/3: top 21% comments over last 36 hours, scored on a relative basis
|
||||
|
@ -75,8 +78,8 @@ export function earn ({ models }) {
|
|||
Now: 100% of earnings go to either top 33% of comments/posts or top 33% of upvoters
|
||||
*/
|
||||
|
||||
// get earners { userId, id, type, rank, proportion }
|
||||
const earners = await models.$queryRaw`
|
||||
// get earners { userId, id, type, rank, proportion }
|
||||
const earners = await models.$queryRaw`
|
||||
-- get top 21% of posts and comments
|
||||
WITH item_ratios AS (
|
||||
SELECT *,
|
||||
|
@ -137,42 +140,72 @@ export function earn ({ models }) {
|
|||
FROM proportions
|
||||
WHERE proportion > 0.000001`
|
||||
|
||||
// in order to group earnings for users we use the same createdAt time for
|
||||
// all earnings
|
||||
const now = new Date(new Date().getTime())
|
||||
// in order to group earnings for users we use the same createdAt time for
|
||||
// all earnings
|
||||
const now = new Date(new Date().getTime())
|
||||
|
||||
// this is just a sanity check because it seems like a good idea
|
||||
let total = 0
|
||||
// this is just a sanity check because it seems like a good idea
|
||||
let total = 0
|
||||
|
||||
const notifications = {}
|
||||
for (const earner of earners) {
|
||||
const earnings = Math.floor(parseFloat(earner.proportion) * sum)
|
||||
total += earnings
|
||||
if (total > sum) {
|
||||
console.log(name, 'total exceeds sum', total, '>', sum)
|
||||
return
|
||||
}
|
||||
|
||||
console.log('stacker', earner.userId, 'earned', earnings, 'proportion', earner.proportion, 'rank', earner.rank, 'type', earner.type)
|
||||
|
||||
if (earnings > 0) {
|
||||
await serialize(models,
|
||||
models.$executeRaw`SELECT earn(${earner.userId}::INTEGER, ${earnings},
|
||||
${now}::timestamp without time zone, ${earner.type}::"EarnType", ${earner.id}::INTEGER, ${earner.rank}::INTEGER)`)
|
||||
notifications[earner.userId] = {
|
||||
...notifications[earner.userId],
|
||||
total: earnings + (notifications[earner.userId]?.total || 0),
|
||||
[earner.type]: { msats: earnings, rank: earner.rank }
|
||||
}
|
||||
}
|
||||
const notifications = {}
|
||||
for (const earner of earners) {
|
||||
const earnings = Math.floor(parseFloat(earner.proportion) * sum)
|
||||
total += earnings
|
||||
if (total > sum) {
|
||||
console.log(name, 'total exceeds sum', total, '>', sum)
|
||||
return
|
||||
}
|
||||
|
||||
Promise.allSettled(Object.entries(notifications).map(([userId, earnings]) =>
|
||||
sendUserNotification(parseInt(userId, 10), buildUserNotification(earnings))
|
||||
)).catch(console.error)
|
||||
console.log('stacker', earner.userId, 'earned', earnings, 'proportion', earner.proportion, 'rank', earner.rank, 'type', earner.type)
|
||||
|
||||
console.log('done', name)
|
||||
if (earnings > 0) {
|
||||
await serialize(models,
|
||||
models.$executeRaw`SELECT earn(${earner.userId}::INTEGER, ${earnings},
|
||||
${now}::timestamp without time zone, ${earner.type}::"EarnType", ${earner.id}::INTEGER, ${earner.rank}::INTEGER)`)
|
||||
notifications[earner.userId] = {
|
||||
...notifications[earner.userId],
|
||||
total: earnings + (notifications[earner.userId]?.total || 0),
|
||||
[earner.type]: { msats: earnings, rank: earner.rank }
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
await territoryRevenue({ models })
|
||||
|
||||
Promise.allSettled(Object.entries(notifications).map(([userId, earnings]) =>
|
||||
sendUserNotification(parseInt(userId, 10), buildUserNotification(earnings))
|
||||
)).catch(console.error)
|
||||
}
|
||||
|
||||
async function territoryRevenue ({ models }) {
|
||||
await serialize(models,
|
||||
models.$executeRaw`
|
||||
WITH revenue AS (
|
||||
SELECT coalesce(sum(msats), 0) as revenue, "subName", "userId"
|
||||
FROM (
|
||||
SELECT ("ItemAct".msats - COALESCE("ReferralAct".msats, 0)) * (1 - (COALESCE("Sub"."rewardsPct", 100) * 0.01)) as msats,
|
||||
"Sub"."name" as "subName", "Sub"."userId" as "userId"
|
||||
FROM "ItemAct"
|
||||
JOIN "Item" ON "Item"."id" = "ItemAct"."itemId"
|
||||
JOIN "Sub" ON "Sub"."name" = "Item"."subName"
|
||||
LEFT JOIN "ReferralAct" ON "ReferralAct"."itemActId" = "ItemAct".id
|
||||
WHERE date_trunc('day', "ItemAct".created_at AT TIME ZONE 'UTC' AT TIME ZONE 'America/Chicago') = date_trunc('day', (now() - interval '1 day') AT TIME ZONE 'America/Chicago')
|
||||
AND "ItemAct".act <> 'TIP'
|
||||
AND "Sub".status <> 'STOPPED'
|
||||
) subquery
|
||||
GROUP BY "subName", "userId"
|
||||
),
|
||||
"SubActResult" AS (
|
||||
INSERT INTO "SubAct" (msats, "subName", "userId", type)
|
||||
SELECT revenue, "subName", "userId", 'REVENUE'
|
||||
FROM revenue
|
||||
WHERE revenue > 1000
|
||||
RETURNING *
|
||||
)
|
||||
UPDATE users SET msats = users.msats + "SubActResult".msats
|
||||
FROM "SubActResult"
|
||||
WHERE users.id = "SubActResult"."userId"`
|
||||
)
|
||||
}
|
||||
|
||||
function buildUserNotification (earnings) {
|
||||
|
|
|
@ -1,13 +1,5 @@
|
|||
import { deleteItemByAuthor } from '../lib/item.js'
|
||||
|
||||
export function deleteItem ({ models }) {
|
||||
return async function ({ data: eventData }) {
|
||||
console.log('deleteItem', eventData)
|
||||
const { id } = eventData
|
||||
try {
|
||||
await deleteItemByAuthor({ models, id })
|
||||
} catch (err) {
|
||||
console.error('failed to delete item', err)
|
||||
}
|
||||
}
|
||||
export async function deleteItem ({ data: { id }, models }) {
|
||||
await deleteItemByAuthor({ models, id })
|
||||
}
|
||||
|
|
|
@ -53,32 +53,22 @@ function decodeOriginalUrl (imgproxyUrl) {
|
|||
return originalUrl
|
||||
}
|
||||
|
||||
export function imgproxy ({ models }) {
|
||||
return async function ({ data: { id, forceFetch = false } }) {
|
||||
if (!imgProxyEnabled) return
|
||||
export async function imgproxy ({ data: { id, forceFetch = false }, models }) {
|
||||
if (!imgProxyEnabled) return
|
||||
|
||||
console.log('running imgproxy job', id)
|
||||
const item = await models.item.findUnique({ where: { id } })
|
||||
|
||||
const item = await models.item.findUnique({ where: { id } })
|
||||
|
||||
let imgproxyUrls = {}
|
||||
try {
|
||||
if (item.text) {
|
||||
imgproxyUrls = await createImgproxyUrls(id, item.text, { models, forceFetch })
|
||||
}
|
||||
if (item.url && !isJob(item)) {
|
||||
imgproxyUrls = { ...imgproxyUrls, ...(await createImgproxyUrls(id, item.url, { models, forceFetch })) }
|
||||
}
|
||||
} catch (err) {
|
||||
console.log('[imgproxy] error:', err)
|
||||
// rethrow for retry
|
||||
throw err
|
||||
}
|
||||
|
||||
console.log('[imgproxy] updating item', id, 'with urls', imgproxyUrls)
|
||||
|
||||
await models.item.update({ where: { id }, data: { imgproxyUrls } })
|
||||
let imgproxyUrls = {}
|
||||
if (item.text) {
|
||||
imgproxyUrls = await createImgproxyUrls(id, item.text, { models, forceFetch })
|
||||
}
|
||||
if (item.url && !isJob(item)) {
|
||||
imgproxyUrls = { ...imgproxyUrls, ...(await createImgproxyUrls(id, item.url, { models, forceFetch })) }
|
||||
}
|
||||
|
||||
console.log('[imgproxy] updating item', id, 'with urls', imgproxyUrls)
|
||||
|
||||
await models.item.update({ where: { id }, data: { imgproxyUrls } })
|
||||
}
|
||||
|
||||
export const createImgproxyUrls = async (id, text, { models, forceFetch }) => {
|
||||
|
|
|
@ -17,6 +17,7 @@ import { views, rankViews } from './views.js'
|
|||
import { imgproxy } from './imgproxy.js'
|
||||
import { deleteItem } from './ephemeralItems.js'
|
||||
import { deleteUnusedImages } from './deleteUnusedImages.js'
|
||||
import { territoryBilling } from './territory.js'
|
||||
|
||||
const { loadEnvConfig } = nextEnv
|
||||
const { ApolloClient, HttpLink, InMemoryCache } = apolloClient
|
||||
|
@ -54,25 +55,39 @@ async function work () {
|
|||
|
||||
boss.on('error', error => console.error(error))
|
||||
|
||||
function jobWrapper (fn) {
|
||||
return async function (job) {
|
||||
console.log(`running ${job.name} with args`, job.data)
|
||||
try {
|
||||
await fn({ ...job, ...args })
|
||||
} catch (error) {
|
||||
console.error(`error running ${job.name}`, error)
|
||||
throw error
|
||||
}
|
||||
console.log(`finished ${job.name}`)
|
||||
}
|
||||
}
|
||||
|
||||
await boss.start()
|
||||
await boss.work('checkInvoice', checkInvoice(args))
|
||||
await boss.work('checkWithdrawal', checkWithdrawal(args))
|
||||
await boss.work('autoDropBolt11s', autoDropBolt11s(args))
|
||||
await boss.work('repin-*', repin(args))
|
||||
await boss.work('trust', trust(args))
|
||||
await boss.work('timestampItem', timestampItem(args))
|
||||
await boss.work('indexItem', indexItem(args))
|
||||
await boss.work('indexAllItems', indexAllItems(args))
|
||||
await boss.work('auction', auction(args))
|
||||
await boss.work('earn', earn(args))
|
||||
await boss.work('streak', computeStreaks(args))
|
||||
await boss.work('checkStreak', checkStreak(args))
|
||||
await boss.work('nip57', nip57(args))
|
||||
await boss.work('views', views(args))
|
||||
await boss.work('rankViews', rankViews(args))
|
||||
await boss.work('imgproxy', imgproxy(args))
|
||||
await boss.work('deleteItem', deleteItem(args))
|
||||
await boss.work('deleteUnusedImages', deleteUnusedImages(args))
|
||||
await boss.work('checkInvoice', jobWrapper(checkInvoice))
|
||||
await boss.work('checkWithdrawal', jobWrapper(checkWithdrawal))
|
||||
await boss.work('autoDropBolt11s', jobWrapper(autoDropBolt11s))
|
||||
await boss.work('repin-*', jobWrapper(repin))
|
||||
await boss.work('trust', jobWrapper(trust))
|
||||
await boss.work('timestampItem', jobWrapper(timestampItem))
|
||||
await boss.work('indexItem', jobWrapper(indexItem))
|
||||
await boss.work('indexAllItems', jobWrapper(indexAllItems))
|
||||
await boss.work('auction', jobWrapper(auction))
|
||||
await boss.work('earn', jobWrapper(earn))
|
||||
await boss.work('streak', jobWrapper(computeStreaks))
|
||||
await boss.work('checkStreak', jobWrapper(checkStreak))
|
||||
await boss.work('nip57', jobWrapper(nip57))
|
||||
await boss.work('views', jobWrapper(views))
|
||||
await boss.work('rankViews', jobWrapper(rankViews))
|
||||
await boss.work('imgproxy', jobWrapper(imgproxy))
|
||||
await boss.work('deleteItem', jobWrapper(deleteItem))
|
||||
await boss.work('deleteUnusedImages', jobWrapper(deleteUnusedImages))
|
||||
await boss.work('territoryBilling', jobWrapper(territoryBilling))
|
||||
|
||||
console.log('working jobs')
|
||||
}
|
||||
|
|
141
worker/nostr.js
141
worker/nostr.js
|
@ -3,84 +3,79 @@ import { Relay, signId, calculateId, getPublicKey } from 'nostr'
|
|||
|
||||
const nostrOptions = { startAfter: 5, retryLimit: 21, retryBackoff: true }
|
||||
|
||||
export function nip57 ({ boss, lnd, models }) {
|
||||
return async function ({ data: { hash } }) {
|
||||
console.log('running nip57')
|
||||
|
||||
let inv, lnInv
|
||||
try {
|
||||
lnInv = await getInvoice({ id: hash, lnd })
|
||||
inv = await models.invoice.findUnique({
|
||||
where: {
|
||||
hash
|
||||
}
|
||||
})
|
||||
} catch (err) {
|
||||
console.log(err)
|
||||
// on lnd related errors, we manually retry which so we don't exponentially backoff
|
||||
await boss.send('nip57', { hash }, nostrOptions)
|
||||
return
|
||||
}
|
||||
|
||||
// check if invoice still exists since HODL invoices get deleted after usage
|
||||
if (!inv) return
|
||||
|
||||
try {
|
||||
// if parsing fails it's not a zap
|
||||
console.log('zapping', inv.desc)
|
||||
const desc = JSON.parse(inv.desc)
|
||||
const ptag = desc.tags.filter(t => t?.length >= 2 && t[0] === 'p')[0]
|
||||
const etag = desc.tags.filter(t => t?.length >= 2 && t[0] === 'e')[0]
|
||||
const atag = desc.tags.filter(t => t?.length >= 2 && t[0] === 'a')[0]
|
||||
const relays = desc.tags.find(t => t?.length >= 2 && t[0] === 'relays').slice(1)
|
||||
|
||||
const tags = [ptag]
|
||||
if (etag) tags.push(etag)
|
||||
if (atag) tags.push(atag)
|
||||
tags.push(['bolt11', lnInv.request])
|
||||
tags.push(['description', inv.desc])
|
||||
tags.push(['preimage', lnInv.secret])
|
||||
|
||||
const e = {
|
||||
kind: 9735,
|
||||
pubkey: getPublicKey(process.env.NOSTR_PRIVATE_KEY),
|
||||
created_at: Math.floor(new Date(lnInv.confirmed_at).getTime() / 1000),
|
||||
content: '',
|
||||
tags
|
||||
export async function nip57 ({ data: { hash }, boss, lnd, models }) {
|
||||
let inv, lnInv
|
||||
try {
|
||||
lnInv = await getInvoice({ id: hash, lnd })
|
||||
inv = await models.invoice.findUnique({
|
||||
where: {
|
||||
hash
|
||||
}
|
||||
e.id = await calculateId(e)
|
||||
e.sig = await signId(process.env.NOSTR_PRIVATE_KEY, e.id)
|
||||
})
|
||||
} catch (err) {
|
||||
console.log(err)
|
||||
// on lnd related errors, we manually retry which so we don't exponentially backoff
|
||||
await boss.send('nip57', { hash }, nostrOptions)
|
||||
return
|
||||
}
|
||||
|
||||
console.log('zap note', e, relays)
|
||||
await Promise.allSettled(
|
||||
relays.map(r => new Promise((resolve, reject) => {
|
||||
const timeout = 1000
|
||||
const relay = Relay(r)
|
||||
// check if invoice still exists since HODL invoices get deleted after usage
|
||||
if (!inv) return
|
||||
|
||||
function timedout () {
|
||||
relay.close()
|
||||
console.log('failed to send to', r)
|
||||
reject(new Error('relay timeout'))
|
||||
}
|
||||
try {
|
||||
// if parsing fails it's not a zap
|
||||
console.log('zapping', inv.desc)
|
||||
const desc = JSON.parse(inv.desc)
|
||||
const ptag = desc.tags.filter(t => t?.length >= 2 && t[0] === 'p')[0]
|
||||
const etag = desc.tags.filter(t => t?.length >= 2 && t[0] === 'e')[0]
|
||||
const atag = desc.tags.filter(t => t?.length >= 2 && t[0] === 'a')[0]
|
||||
const relays = desc.tags.find(t => t?.length >= 2 && t[0] === 'relays').slice(1)
|
||||
|
||||
let timer = setTimeout(timedout, timeout)
|
||||
const tags = [ptag]
|
||||
if (etag) tags.push(etag)
|
||||
if (atag) tags.push(atag)
|
||||
tags.push(['bolt11', lnInv.request])
|
||||
tags.push(['description', inv.desc])
|
||||
tags.push(['preimage', lnInv.secret])
|
||||
|
||||
relay.on('open', () => {
|
||||
clearTimeout(timer)
|
||||
timer = setTimeout(timedout, timeout)
|
||||
relay.send(['EVENT', e])
|
||||
})
|
||||
|
||||
relay.on('ok', () => {
|
||||
clearTimeout(timer)
|
||||
relay.close()
|
||||
console.log('sent zap to', r)
|
||||
resolve()
|
||||
})
|
||||
})))
|
||||
} catch (e) {
|
||||
console.log(e)
|
||||
const e = {
|
||||
kind: 9735,
|
||||
pubkey: getPublicKey(process.env.NOSTR_PRIVATE_KEY),
|
||||
created_at: Math.floor(new Date(lnInv.confirmed_at).getTime() / 1000),
|
||||
content: '',
|
||||
tags
|
||||
}
|
||||
console.log('done running nip57')
|
||||
e.id = await calculateId(e)
|
||||
e.sig = await signId(process.env.NOSTR_PRIVATE_KEY, e.id)
|
||||
|
||||
console.log('zap note', e, relays)
|
||||
await Promise.allSettled(
|
||||
relays.map(r => new Promise((resolve, reject) => {
|
||||
const timeout = 1000
|
||||
const relay = Relay(r)
|
||||
|
||||
function timedout () {
|
||||
relay.close()
|
||||
console.log('failed to send to', r)
|
||||
reject(new Error('relay timeout'))
|
||||
}
|
||||
|
||||
let timer = setTimeout(timedout, timeout)
|
||||
|
||||
relay.on('open', () => {
|
||||
clearTimeout(timer)
|
||||
timer = setTimeout(timedout, timeout)
|
||||
relay.send(['EVENT', e])
|
||||
})
|
||||
|
||||
relay.on('ok', () => {
|
||||
clearTimeout(timer)
|
||||
relay.close()
|
||||
console.log('sent zap to', r)
|
||||
resolve()
|
||||
})
|
||||
})))
|
||||
} catch (e) {
|
||||
console.log(e)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -12,39 +12,33 @@ const ITEM_OTS_FIELDS = gql`
|
|||
url
|
||||
}`
|
||||
|
||||
export function timestampItem ({ apollo, models }) {
|
||||
return async function ({ data: { id } }) {
|
||||
console.log('timestamping item', id)
|
||||
|
||||
const { data: { item: { parentId, parentOtsHash, title, text, url } } } = await apollo.query({
|
||||
query: gql`
|
||||
export async function timestampItem ({ data: { id }, apollo, models }) {
|
||||
const { data: { item: { parentId, parentOtsHash, title, text, url } } } = await apollo.query({
|
||||
query: gql`
|
||||
${ITEM_OTS_FIELDS}
|
||||
query Item {
|
||||
item(id: ${id}) {
|
||||
...ItemOTSFields
|
||||
}
|
||||
}`
|
||||
})
|
||||
})
|
||||
|
||||
if (parentId && !parentOtsHash) {
|
||||
console.log('no parent hash available ... skipping')
|
||||
return
|
||||
}
|
||||
|
||||
// SHA256 hash item using a canonical serialization format { parentHash, title, text, url }
|
||||
const itemString = stringifyCanon({ parentHash: parentOtsHash, title, text, url })
|
||||
const otsHash = createHash('sha256').update(itemString).digest()
|
||||
const detached = Ots.DetachedTimestampFile.fromHash(new Ots.Ops.OpSHA256(), otsHash)
|
||||
|
||||
// timestamp it
|
||||
await Ots.stamp(detached)
|
||||
|
||||
// get proof
|
||||
const otsFile = Buffer.from(detached.serializeToBytes())
|
||||
|
||||
// store in item
|
||||
await models.item.update({ where: { id }, data: { otsHash: otsHash.toString('hex'), otsFile } })
|
||||
|
||||
console.log('done timestamping item', id)
|
||||
if (parentId && !parentOtsHash) {
|
||||
console.log('no parent hash available ... skipping')
|
||||
return
|
||||
}
|
||||
|
||||
// SHA256 hash item using a canonical serialization format { parentHash, title, text, url }
|
||||
const itemString = stringifyCanon({ parentHash: parentOtsHash, title, text, url })
|
||||
const otsHash = createHash('sha256').update(itemString).digest()
|
||||
const detached = Ots.DetachedTimestampFile.fromHash(new Ots.Ops.OpSHA256(), otsHash)
|
||||
|
||||
// timestamp it
|
||||
await Ots.stamp(detached)
|
||||
|
||||
// get proof
|
||||
const otsFile = Buffer.from(detached.serializeToBytes())
|
||||
|
||||
// store in item
|
||||
await models.item.update({ where: { id }, data: { otsHash: otsHash.toString('hex'), otsFile } })
|
||||
}
|
||||
|
|
|
@ -1,42 +1,38 @@
|
|||
export function repin ({ models }) {
|
||||
return async function ({ name }) {
|
||||
console.log('doing', name)
|
||||
|
||||
// get the id
|
||||
const id = name.slice('repin-'.length)
|
||||
if (id.length === 0 || isNaN(id)) {
|
||||
console.log('repin id not found in', name)
|
||||
return
|
||||
}
|
||||
|
||||
// get the latest item with this id
|
||||
const pinId = Number(id)
|
||||
const current = await models.item.findFirst(
|
||||
{
|
||||
where: {
|
||||
pinId
|
||||
},
|
||||
orderBy: {
|
||||
createdAt: 'desc'
|
||||
}
|
||||
}
|
||||
)
|
||||
|
||||
if (!current) {
|
||||
console.log('could not find existing item for', name)
|
||||
return
|
||||
}
|
||||
|
||||
// create a new item with matching 1) title, text, and url and 2) setting pinId
|
||||
await models.item.create({
|
||||
data: {
|
||||
title: current.title,
|
||||
text: current.text,
|
||||
url: current.url,
|
||||
userId: current.userId,
|
||||
subName: current.subName,
|
||||
pinId
|
||||
}
|
||||
})
|
||||
export async function repin ({ name, models }) {
|
||||
// get the id
|
||||
const id = name.slice('repin-'.length)
|
||||
if (id.length === 0 || isNaN(id)) {
|
||||
console.log('repin id not found in', name)
|
||||
return
|
||||
}
|
||||
|
||||
// get the latest item with this id
|
||||
const pinId = Number(id)
|
||||
const current = await models.item.findFirst(
|
||||
{
|
||||
where: {
|
||||
pinId
|
||||
},
|
||||
orderBy: {
|
||||
createdAt: 'desc'
|
||||
}
|
||||
}
|
||||
)
|
||||
|
||||
if (!current) {
|
||||
console.log('could not find existing item for pin', name)
|
||||
return
|
||||
}
|
||||
|
||||
// create a new item with matching 1) title, text, and url and 2) setting pinId
|
||||
await models.item.create({
|
||||
data: {
|
||||
title: current.title,
|
||||
text: current.text,
|
||||
url: current.url,
|
||||
userId: current.userId,
|
||||
subName: current.subName,
|
||||
pinId
|
||||
}
|
||||
})
|
||||
}
|
||||
|
|
|
@ -37,7 +37,6 @@ const ITEM_SEARCH_FIELDS = gql`
|
|||
|
||||
async function _indexItem (item, { models }) {
|
||||
console.log('indexing item', item.id)
|
||||
|
||||
// HACK: modify the title for jobs so that company/location are searchable
|
||||
// and highlighted without further modification
|
||||
const itemcp = { ...item }
|
||||
|
@ -78,38 +77,33 @@ async function _indexItem (item, { models }) {
|
|||
console.log(e)
|
||||
throw e
|
||||
}
|
||||
console.log('done indexing item', item.id)
|
||||
}
|
||||
|
||||
export function indexItem ({ apollo, models }) {
|
||||
return async function ({ data: { id } }) {
|
||||
console.log('indexing item, fetching ...', id)
|
||||
// 1. grab item from database
|
||||
// could use apollo to avoid duping logic
|
||||
// when grabbing sats and user name, etc
|
||||
const { data: { item } } = await apollo.query({
|
||||
query: gql`
|
||||
export async function indexItem ({ data: { id }, apollo, models }) {
|
||||
// 1. grab item from database
|
||||
// could use apollo to avoid duping logic
|
||||
// when grabbing sats and user name, etc
|
||||
const { data: { item } } = await apollo.query({
|
||||
query: gql`
|
||||
${ITEM_SEARCH_FIELDS}
|
||||
query Item {
|
||||
item(id: ${id}) {
|
||||
...ItemSearchFields
|
||||
}
|
||||
}`
|
||||
})
|
||||
})
|
||||
|
||||
// 2. index it with external version based on updatedAt
|
||||
await _indexItem(item, { models })
|
||||
}
|
||||
// 2. index it with external version based on updatedAt
|
||||
await _indexItem(item, { models })
|
||||
}
|
||||
|
||||
export function indexAllItems ({ apollo, models }) {
|
||||
return async function () {
|
||||
// cursor over all items in the Item table
|
||||
let items = []; let cursor = null
|
||||
do {
|
||||
// query for items
|
||||
({ data: { items: { items, cursor } } } = await apollo.query({
|
||||
query: gql`
|
||||
export async function indexAllItems ({ apollo, models }) {
|
||||
// cursor over all items in the Item table
|
||||
let items = []; let cursor = null
|
||||
do {
|
||||
// query for items
|
||||
({ data: { items: { items, cursor } } } = await apollo.query({
|
||||
query: gql`
|
||||
${ITEM_SEARCH_FIELDS}
|
||||
query AllItems($cursor: String) {
|
||||
items(cursor: $cursor, sort: "recent", limit: 1000, type: "all") {
|
||||
|
@ -119,16 +113,15 @@ export function indexAllItems ({ apollo, models }) {
|
|||
cursor
|
||||
}
|
||||
}`,
|
||||
variables: { cursor }
|
||||
}))
|
||||
variables: { cursor }
|
||||
}))
|
||||
|
||||
// for all items, index them
|
||||
try {
|
||||
items.forEach(i => _indexItem(i, { models }))
|
||||
} catch (e) {
|
||||
// ignore errors
|
||||
console.log(e)
|
||||
}
|
||||
} while (cursor)
|
||||
}
|
||||
// for all items, index them
|
||||
try {
|
||||
items.forEach(i => _indexItem(i, { models }))
|
||||
} catch (e) {
|
||||
// ignore errors
|
||||
console.log(e)
|
||||
}
|
||||
} while (cursor)
|
||||
}
|
||||
|
|
227
worker/streak.js
227
worker/streak.js
|
@ -3,136 +3,123 @@ import { FOUND_BLURBS, LOST_BLURBS } from '../lib/constants'
|
|||
|
||||
const STREAK_THRESHOLD = 100
|
||||
|
||||
export function computeStreaks ({ models }) {
|
||||
return async function () {
|
||||
console.log('computing streaks')
|
||||
export async function computeStreaks ({ models }) {
|
||||
// get all eligible users in the last day
|
||||
// if the user doesn't have an active streak, add one
|
||||
// if they have an active streak but didn't maintain it, end it
|
||||
const endingStreaks = await models.$queryRaw`
|
||||
WITH day_streaks (id) AS (
|
||||
SELECT "userId"
|
||||
FROM
|
||||
((SELECT "userId", floor(sum("ItemAct".msats)/1000) as sats_spent
|
||||
FROM "ItemAct"
|
||||
WHERE (created_at AT TIME ZONE 'UTC' AT TIME ZONE 'America/Chicago')::date >= (now() AT TIME ZONE 'America/Chicago' - interval '1 day')::date
|
||||
GROUP BY "userId")
|
||||
UNION ALL
|
||||
(SELECT "userId", sats as sats_spent
|
||||
FROM "Donation"
|
||||
WHERE (created_at AT TIME ZONE 'UTC' AT TIME ZONE 'America/Chicago')::date >= (now() AT TIME ZONE 'America/Chicago' - interval '1 day')::date
|
||||
)) spending
|
||||
GROUP BY "userId"
|
||||
HAVING sum(sats_spent) >= 100
|
||||
), existing_streaks (id, started_at) AS (
|
||||
SELECT "userId", "startedAt"
|
||||
FROM "Streak"
|
||||
WHERE "Streak"."endedAt" IS NULL
|
||||
), new_streaks (id) AS (
|
||||
SELECT day_streaks.id
|
||||
FROM day_streaks
|
||||
LEFT JOIN existing_streaks ON existing_streaks.id = day_streaks.id
|
||||
WHERE existing_streaks.id IS NULL
|
||||
), ending_streaks (id) AS (
|
||||
SELECT existing_streaks.id
|
||||
FROM existing_streaks
|
||||
LEFT JOIN day_streaks ON existing_streaks.id = day_streaks.id
|
||||
WHERE day_streaks.id IS NULL
|
||||
), extending_streaks (id, started_at) AS (
|
||||
SELECT existing_streaks.id, existing_streaks.started_at
|
||||
FROM existing_streaks
|
||||
JOIN day_streaks ON existing_streaks.id = day_streaks.id
|
||||
),
|
||||
-- a bunch of mutations
|
||||
streak_insert AS (
|
||||
INSERT INTO "Streak" ("userId", "startedAt", created_at, updated_at)
|
||||
SELECT id, (now() AT TIME ZONE 'America/Chicago' - interval '1 day')::date, now_utc(), now_utc()
|
||||
FROM new_streaks
|
||||
), user_update_new_streaks AS (
|
||||
UPDATE users SET streak = 1 FROM new_streaks WHERE new_streaks.id = users.id
|
||||
), user_update_end_streaks AS (
|
||||
UPDATE users SET streak = NULL FROM ending_streaks WHERE ending_streaks.id = users.id
|
||||
), user_update_extend_streaks AS (
|
||||
UPDATE users
|
||||
SET streak = (now() AT TIME ZONE 'America/Chicago')::date - extending_streaks.started_at
|
||||
FROM extending_streaks WHERE extending_streaks.id = users.id
|
||||
)
|
||||
UPDATE "Streak"
|
||||
SET "endedAt" = (now() AT TIME ZONE 'America/Chicago' - interval '1 day')::date, updated_at = now_utc()
|
||||
FROM ending_streaks
|
||||
WHERE ending_streaks.id = "Streak"."userId" AND "endedAt" IS NULL
|
||||
RETURNING "Streak".id, ending_streaks."id" AS "userId"`
|
||||
|
||||
// get all eligible users in the last day
|
||||
// if the user doesn't have an active streak, add one
|
||||
// if they have an active streak but didn't maintain it, end it
|
||||
const endingStreaks = await models.$queryRaw`
|
||||
WITH day_streaks (id) AS (
|
||||
Promise.allSettled(
|
||||
endingStreaks.map(({ id, userId }) => {
|
||||
const index = id % LOST_BLURBS.length
|
||||
const blurb = LOST_BLURBS[index]
|
||||
return sendUserNotification(userId, {
|
||||
title: 'you lost your cowboy hat',
|
||||
body: blurb,
|
||||
tag: 'STREAK-LOST'
|
||||
}).catch(console.error)
|
||||
})
|
||||
)
|
||||
}
|
||||
|
||||
export async function checkStreak ({ data: { id }, models }) {
|
||||
// if user is actively streaking skip
|
||||
let streak = await models.streak.findFirst({
|
||||
where: {
|
||||
userId: Number(id),
|
||||
endedAt: null
|
||||
}
|
||||
})
|
||||
|
||||
if (streak) {
|
||||
return
|
||||
}
|
||||
|
||||
[streak] = await models.$queryRaw`
|
||||
WITH streak_started (id) AS (
|
||||
SELECT "userId"
|
||||
FROM
|
||||
((SELECT "userId", floor(sum("ItemAct".msats)/1000) as sats_spent
|
||||
FROM "ItemAct"
|
||||
WHERE (created_at AT TIME ZONE 'UTC' AT TIME ZONE 'America/Chicago')::date >= (now() AT TIME ZONE 'America/Chicago' - interval '1 day')::date
|
||||
WHERE (created_at AT TIME ZONE 'UTC' AT TIME ZONE 'America/Chicago')::date >= (now() AT TIME ZONE 'America/Chicago')::date
|
||||
AND "userId" = ${Number(id)}
|
||||
GROUP BY "userId")
|
||||
UNION ALL
|
||||
(SELECT "userId", sats as sats_spent
|
||||
FROM "Donation"
|
||||
WHERE (created_at AT TIME ZONE 'UTC' AT TIME ZONE 'America/Chicago')::date >= (now() AT TIME ZONE 'America/Chicago' - interval '1 day')::date
|
||||
WHERE (created_at AT TIME ZONE 'UTC' AT TIME ZONE 'America/Chicago')::date >= (now() AT TIME ZONE 'America/Chicago')::date
|
||||
AND "userId" = ${Number(id)}
|
||||
)) spending
|
||||
GROUP BY "userId"
|
||||
HAVING sum(sats_spent) >= 100
|
||||
), existing_streaks (id, started_at) AS (
|
||||
SELECT "userId", "startedAt"
|
||||
FROM "Streak"
|
||||
WHERE "Streak"."endedAt" IS NULL
|
||||
), new_streaks (id) AS (
|
||||
SELECT day_streaks.id
|
||||
FROM day_streaks
|
||||
LEFT JOIN existing_streaks ON existing_streaks.id = day_streaks.id
|
||||
WHERE existing_streaks.id IS NULL
|
||||
), ending_streaks (id) AS (
|
||||
SELECT existing_streaks.id
|
||||
FROM existing_streaks
|
||||
LEFT JOIN day_streaks ON existing_streaks.id = day_streaks.id
|
||||
WHERE day_streaks.id IS NULL
|
||||
), extending_streaks (id, started_at) AS (
|
||||
SELECT existing_streaks.id, existing_streaks.started_at
|
||||
FROM existing_streaks
|
||||
JOIN day_streaks ON existing_streaks.id = day_streaks.id
|
||||
),
|
||||
-- a bunch of mutations
|
||||
streak_insert AS (
|
||||
INSERT INTO "Streak" ("userId", "startedAt", created_at, updated_at)
|
||||
SELECT id, (now() AT TIME ZONE 'America/Chicago' - interval '1 day')::date, now_utc(), now_utc()
|
||||
FROM new_streaks
|
||||
), user_update_new_streaks AS (
|
||||
UPDATE users SET streak = 1 FROM new_streaks WHERE new_streaks.id = users.id
|
||||
), user_update_end_streaks AS (
|
||||
UPDATE users SET streak = NULL FROM ending_streaks WHERE ending_streaks.id = users.id
|
||||
), user_update_extend_streaks AS (
|
||||
UPDATE users
|
||||
SET streak = (now() AT TIME ZONE 'America/Chicago')::date - extending_streaks.started_at
|
||||
FROM extending_streaks WHERE extending_streaks.id = users.id
|
||||
)
|
||||
UPDATE "Streak"
|
||||
SET "endedAt" = (now() AT TIME ZONE 'America/Chicago' - interval '1 day')::date, updated_at = now_utc()
|
||||
FROM ending_streaks
|
||||
WHERE ending_streaks.id = "Streak"."userId" AND "endedAt" IS NULL
|
||||
RETURNING "Streak".id, ending_streaks."id" AS "userId"`
|
||||
|
||||
Promise.allSettled(
|
||||
endingStreaks.map(({ id, userId }) => {
|
||||
const index = id % LOST_BLURBS.length
|
||||
const blurb = LOST_BLURBS[index]
|
||||
return sendUserNotification(userId, {
|
||||
title: 'you lost your cowboy hat',
|
||||
body: blurb,
|
||||
tag: 'STREAK-LOST'
|
||||
}).catch(console.error)
|
||||
})
|
||||
GROUP BY "userId"
|
||||
HAVING sum(sats_spent) >= ${STREAK_THRESHOLD}
|
||||
), user_start_streak AS (
|
||||
UPDATE users SET streak = 0 FROM streak_started WHERE streak_started.id = users.id
|
||||
)
|
||||
INSERT INTO "Streak" ("userId", "startedAt", created_at, updated_at)
|
||||
SELECT id, (now() AT TIME ZONE 'America/Chicago')::date, now_utc(), now_utc()
|
||||
FROM streak_started
|
||||
RETURNING "Streak".id`
|
||||
|
||||
console.log('done computing streaks')
|
||||
}
|
||||
}
|
||||
|
||||
export function checkStreak ({ models }) {
|
||||
return async function ({ data: { id } }) {
|
||||
console.log('checking streak', id)
|
||||
|
||||
// if user is actively streaking skip
|
||||
let streak = await models.streak.findFirst({
|
||||
where: {
|
||||
userId: Number(id),
|
||||
endedAt: null
|
||||
}
|
||||
})
|
||||
|
||||
if (streak) {
|
||||
console.log('done checking streak', id)
|
||||
return
|
||||
}
|
||||
|
||||
[streak] = await models.$queryRaw`
|
||||
WITH streak_started (id) AS (
|
||||
SELECT "userId"
|
||||
FROM
|
||||
((SELECT "userId", floor(sum("ItemAct".msats)/1000) as sats_spent
|
||||
FROM "ItemAct"
|
||||
WHERE (created_at AT TIME ZONE 'UTC' AT TIME ZONE 'America/Chicago')::date >= (now() AT TIME ZONE 'America/Chicago')::date
|
||||
AND "userId" = ${Number(id)}
|
||||
GROUP BY "userId")
|
||||
UNION ALL
|
||||
(SELECT "userId", sats as sats_spent
|
||||
FROM "Donation"
|
||||
WHERE (created_at AT TIME ZONE 'UTC' AT TIME ZONE 'America/Chicago')::date >= (now() AT TIME ZONE 'America/Chicago')::date
|
||||
AND "userId" = ${Number(id)}
|
||||
)) spending
|
||||
GROUP BY "userId"
|
||||
HAVING sum(sats_spent) >= ${STREAK_THRESHOLD}
|
||||
), user_start_streak AS (
|
||||
UPDATE users SET streak = 0 FROM streak_started WHERE streak_started.id = users.id
|
||||
)
|
||||
INSERT INTO "Streak" ("userId", "startedAt", created_at, updated_at)
|
||||
SELECT id, (now() AT TIME ZONE 'America/Chicago')::date, now_utc(), now_utc()
|
||||
FROM streak_started
|
||||
RETURNING "Streak".id`
|
||||
|
||||
console.log('done checking streak', id)
|
||||
|
||||
if (!streak) return
|
||||
|
||||
// new streak started for user
|
||||
const index = streak.id % FOUND_BLURBS.length
|
||||
const blurb = FOUND_BLURBS[index]
|
||||
sendUserNotification(id, {
|
||||
title: 'you found a cowboy hat',
|
||||
body: blurb,
|
||||
tag: 'STREAK-FOUND'
|
||||
}).catch(console.error)
|
||||
}
|
||||
if (!streak) return
|
||||
|
||||
// new streak started for user
|
||||
const index = streak.id % FOUND_BLURBS.length
|
||||
const blurb = FOUND_BLURBS[index]
|
||||
sendUserNotification(id, {
|
||||
title: 'you found a cowboy hat',
|
||||
body: blurb,
|
||||
tag: 'STREAK-FOUND'
|
||||
}).catch(console.error)
|
||||
}
|
||||
|
|
|
@ -0,0 +1,30 @@
|
|||
import serialize from '../api/resolvers/serial'
|
||||
import { paySubQueries } from '../api/resolvers/sub'
|
||||
import { TERRITORY_GRACE_DAYS } from '../lib/constants'
|
||||
import { datePivot } from '../lib/time'
|
||||
|
||||
export async function territoryBilling ({ data: { subName }, boss, models }) {
|
||||
const sub = await models.sub.findUnique({
|
||||
where: {
|
||||
name: subName
|
||||
}
|
||||
})
|
||||
|
||||
try {
|
||||
const queries = paySubQueries(sub, models)
|
||||
await serialize(models, ...queries)
|
||||
} catch (e) {
|
||||
console.error(e)
|
||||
|
||||
await models.sub.update({
|
||||
where: {
|
||||
name: subName
|
||||
},
|
||||
data: {
|
||||
status: sub.billedLastAt >= datePivot(new Date(), { days: -TERRITORY_GRACE_DAYS }) ? 'GRACE' : 'STOPPED'
|
||||
}
|
||||
})
|
||||
// retry billing in one day
|
||||
await boss.send('territoryBilling', { subName }, { startAfter: datePivot(new Date(), { days: 1 }) })
|
||||
}
|
||||
}
|
|
@ -1,22 +1,17 @@
|
|||
import * as math from 'mathjs'
|
||||
import { ANON_USER_ID, SN_USER_IDS } from '../lib/constants.js'
|
||||
|
||||
export function trust ({ boss, models }) {
|
||||
return async function () {
|
||||
try {
|
||||
console.time('trust')
|
||||
console.timeLog('trust', 'getting graph')
|
||||
const graph = await getGraph(models)
|
||||
console.timeLog('trust', 'computing trust')
|
||||
const [vGlobal, mPersonal] = await trustGivenGraph(graph)
|
||||
console.timeLog('trust', 'storing trust')
|
||||
await storeTrust(models, graph, vGlobal, mPersonal)
|
||||
} catch (e) {
|
||||
console.error(e)
|
||||
throw e
|
||||
} finally {
|
||||
console.timeEnd('trust')
|
||||
}
|
||||
export async function trust ({ boss, models }) {
|
||||
try {
|
||||
console.time('trust')
|
||||
console.timeLog('trust', 'getting graph')
|
||||
const graph = await getGraph(models)
|
||||
console.timeLog('trust', 'computing trust')
|
||||
const [vGlobal, mPersonal] = await trustGivenGraph(graph)
|
||||
console.timeLog('trust', 'storing trust')
|
||||
await storeTrust(models, graph, vGlobal, mPersonal)
|
||||
} finally {
|
||||
console.timeEnd('trust')
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -1,27 +1,15 @@
|
|||
// this is intended to be run everyday after midnight CT
|
||||
export function views ({ models }) {
|
||||
return async function () {
|
||||
console.log('refreshing stats views')
|
||||
|
||||
for (const view of ['reg_growth_days', 'spender_growth_days', 'item_growth_days',
|
||||
'spending_growth_days', 'stackers_growth_days', 'stacking_growth_days',
|
||||
'user_stats_days']) {
|
||||
await models.$queryRawUnsafe(`REFRESH MATERIALIZED VIEW CONCURRENTLY ${view}`)
|
||||
}
|
||||
|
||||
console.log('done refreshing stats views')
|
||||
export async function views ({ models }) {
|
||||
for (const view of ['reg_growth_days', 'spender_growth_days', 'item_growth_days',
|
||||
'spending_growth_days', 'stackers_growth_days', 'stacking_growth_days',
|
||||
'user_stats_days']) {
|
||||
await models.$queryRawUnsafe(`REFRESH MATERIALIZED VIEW CONCURRENTLY ${view}`)
|
||||
}
|
||||
}
|
||||
|
||||
// this should be run regularly ... like, every 5 minutes
|
||||
export function rankViews ({ models }) {
|
||||
return async function () {
|
||||
console.log('refreshing rank views')
|
||||
|
||||
for (const view of ['zap_rank_personal_view']) {
|
||||
await models.$queryRawUnsafe(`REFRESH MATERIALIZED VIEW CONCURRENTLY ${view}`)
|
||||
}
|
||||
|
||||
console.log('done refreshing rank views')
|
||||
export async function rankViews ({ models }) {
|
||||
for (const view of ['zap_rank_personal_view']) {
|
||||
await models.$queryRawUnsafe(`REFRESH MATERIALIZED VIEW CONCURRENTLY ${view}`)
|
||||
}
|
||||
}
|
||||
|
|
222
worker/wallet.js
222
worker/wallet.js
|
@ -8,131 +8,119 @@ import { INVOICE_RETENTION_DAYS } from '../lib/constants'
|
|||
const walletOptions = { startAfter: 5, retryLimit: 21, retryBackoff: true }
|
||||
|
||||
// TODO this should all be done via websockets
|
||||
export function checkInvoice ({ boss, models, lnd }) {
|
||||
return async function ({ data: { hash, isHeldSet } }) {
|
||||
let inv
|
||||
try {
|
||||
inv = await getInvoice({ id: hash, lnd })
|
||||
} catch (err) {
|
||||
console.log(err, hash)
|
||||
export async function checkInvoice ({ data: { hash, isHeldSet }, boss, models, lnd }) {
|
||||
let inv
|
||||
try {
|
||||
inv = await getInvoice({ id: hash, lnd })
|
||||
} catch (err) {
|
||||
console.log(err, hash)
|
||||
// on lnd related errors, we manually retry so we don't exponentially backoff
|
||||
await boss.send('checkInvoice', { hash }, walletOptions)
|
||||
return
|
||||
}
|
||||
console.log(inv)
|
||||
|
||||
// check if invoice still exists since HODL invoices get deleted after usage
|
||||
const dbInv = await models.invoice.findUnique({ where: { hash } })
|
||||
if (!dbInv) return
|
||||
|
||||
const expired = new Date(inv.expires_at) <= new Date()
|
||||
|
||||
if (inv.is_confirmed && !inv.is_held) {
|
||||
// never mark hodl invoices as confirmed here because
|
||||
// we manually confirm them when we settle them
|
||||
await serialize(models,
|
||||
models.$executeRaw`SELECT confirm_invoice(${inv.id}, ${Number(inv.received_mtokens)})`)
|
||||
sendUserNotification(dbInv.userId, {
|
||||
title: `${numWithUnits(msatsToSats(inv.received_mtokens), { abbreviate: false })} were deposited in your account`,
|
||||
body: dbInv.comment || undefined,
|
||||
tag: 'DEPOSIT',
|
||||
data: { sats: msatsToSats(inv.received_mtokens) }
|
||||
}).catch(console.error)
|
||||
return boss.send('nip57', { hash })
|
||||
}
|
||||
|
||||
if (inv.is_canceled) {
|
||||
return serialize(models,
|
||||
models.invoice.update({
|
||||
where: {
|
||||
hash: inv.id
|
||||
},
|
||||
data: {
|
||||
cancelled: true
|
||||
}
|
||||
}))
|
||||
}
|
||||
|
||||
if (inv.is_held && !isHeldSet) {
|
||||
// this is basically confirm_invoice without setting confirmed_at since it's not settled yet
|
||||
// and without setting the user balance since that's done inside the same tx as the HODL invoice action.
|
||||
await serialize(models,
|
||||
models.invoice.update({ where: { hash }, data: { msatsReceived: Number(inv.received_mtokens), isHeld: true } }))
|
||||
// remember that we already executed this if clause
|
||||
// (even though the query above is idempotent but imo, this makes the flow more clear)
|
||||
isHeldSet = true
|
||||
}
|
||||
|
||||
if (!expired) {
|
||||
// recheck in 5 seconds if the invoice is younger than 5 minutes
|
||||
// otherwise recheck in 60 seconds
|
||||
const startAfter = new Date(inv.created_at) > datePivot(new Date(), { minutes: -5 }) ? 5 : 60
|
||||
await boss.send('checkInvoice', { hash, isHeldSet }, { ...walletOptions, startAfter })
|
||||
}
|
||||
|
||||
if (expired && inv.is_held) {
|
||||
await cancelHodlInvoice({ id: hash, lnd })
|
||||
}
|
||||
}
|
||||
|
||||
export async function checkWithdrawal ({ data: { id, hash }, boss, models, lnd }) {
|
||||
let wdrwl
|
||||
let notFound = false
|
||||
try {
|
||||
wdrwl = await getPayment({ id: hash, lnd })
|
||||
} catch (err) {
|
||||
console.log(err)
|
||||
if (err[1] === 'SentPaymentNotFound') {
|
||||
notFound = true
|
||||
} else {
|
||||
// on lnd related errors, we manually retry so we don't exponentially backoff
|
||||
await boss.send('checkInvoice', { hash }, walletOptions)
|
||||
await boss.send('checkWithdrawal', { id, hash }, walletOptions)
|
||||
return
|
||||
}
|
||||
console.log(inv)
|
||||
|
||||
// check if invoice still exists since HODL invoices get deleted after usage
|
||||
const dbInv = await models.invoice.findUnique({ where: { hash } })
|
||||
if (!dbInv) return
|
||||
|
||||
const expired = new Date(inv.expires_at) <= new Date()
|
||||
|
||||
if (inv.is_confirmed && !inv.is_held) {
|
||||
// never mark hodl invoices as confirmed here because
|
||||
// we manually confirm them when we settle them
|
||||
await serialize(models,
|
||||
models.$executeRaw`SELECT confirm_invoice(${inv.id}, ${Number(inv.received_mtokens)})`)
|
||||
sendUserNotification(dbInv.userId, {
|
||||
title: `${numWithUnits(msatsToSats(inv.received_mtokens), { abbreviate: false })} were deposited in your account`,
|
||||
body: dbInv.comment || undefined,
|
||||
tag: 'DEPOSIT',
|
||||
data: { sats: msatsToSats(inv.received_mtokens) }
|
||||
}).catch(console.error)
|
||||
return boss.send('nip57', { hash })
|
||||
}
|
||||
|
||||
if (inv.is_canceled) {
|
||||
return serialize(models,
|
||||
models.invoice.update({
|
||||
where: {
|
||||
hash: inv.id
|
||||
},
|
||||
data: {
|
||||
cancelled: true
|
||||
}
|
||||
}))
|
||||
}
|
||||
|
||||
if (inv.is_held && !isHeldSet) {
|
||||
// this is basically confirm_invoice without setting confirmed_at since it's not settled yet
|
||||
// and without setting the user balance since that's done inside the same tx as the HODL invoice action.
|
||||
await serialize(models,
|
||||
models.invoice.update({ where: { hash }, data: { msatsReceived: Number(inv.received_mtokens), isHeld: true } }))
|
||||
// remember that we already executed this if clause
|
||||
// (even though the query above is idempotent but imo, this makes the flow more clear)
|
||||
isHeldSet = true
|
||||
}
|
||||
|
||||
if (!expired) {
|
||||
// recheck in 5 seconds if the invoice is younger than 5 minutes
|
||||
// otherwise recheck in 60 seconds
|
||||
const startAfter = new Date(inv.created_at) > datePivot(new Date(), { minutes: -5 }) ? 5 : 60
|
||||
await boss.send('checkInvoice', { hash, isHeldSet }, { ...walletOptions, startAfter })
|
||||
}
|
||||
|
||||
if (expired && inv.is_held) {
|
||||
await cancelHodlInvoice({ id: hash, lnd })
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export function checkWithdrawal ({ boss, models, lnd }) {
|
||||
return async function ({ data: { id, hash } }) {
|
||||
let wdrwl
|
||||
let notFound = false
|
||||
try {
|
||||
wdrwl = await getPayment({ id: hash, lnd })
|
||||
} catch (err) {
|
||||
console.log(err)
|
||||
if (err[1] === 'SentPaymentNotFound') {
|
||||
notFound = true
|
||||
} else {
|
||||
// on lnd related errors, we manually retry so we don't exponentially backoff
|
||||
await boss.send('checkWithdrawal', { id, hash }, walletOptions)
|
||||
return
|
||||
}
|
||||
}
|
||||
console.log(wdrwl)
|
||||
|
||||
if (wdrwl?.is_confirmed) {
|
||||
const fee = Number(wdrwl.payment.fee_mtokens)
|
||||
const paid = Number(wdrwl.payment.mtokens) - fee
|
||||
await serialize(models, models.$executeRaw`
|
||||
if (wdrwl?.is_confirmed) {
|
||||
const fee = Number(wdrwl.payment.fee_mtokens)
|
||||
const paid = Number(wdrwl.payment.mtokens) - fee
|
||||
await serialize(models, models.$executeRaw`
|
||||
SELECT confirm_withdrawl(${id}::INTEGER, ${paid}, ${fee})`)
|
||||
} else if (wdrwl?.is_failed || notFound) {
|
||||
let status = 'UNKNOWN_FAILURE'
|
||||
if (wdrwl?.failed.is_insufficient_balance) {
|
||||
status = 'INSUFFICIENT_BALANCE'
|
||||
} else if (wdrwl?.failed.is_invalid_payment) {
|
||||
status = 'INVALID_PAYMENT'
|
||||
} else if (wdrwl?.failed.is_pathfinding_timeout) {
|
||||
status = 'PATHFINDING_TIMEOUT'
|
||||
} else if (wdrwl?.failed.is_route_not_found) {
|
||||
status = 'ROUTE_NOT_FOUND'
|
||||
}
|
||||
await serialize(models, models.$executeRaw`
|
||||
SELECT reverse_withdrawl(${id}::INTEGER, ${status}::"WithdrawlStatus")`)
|
||||
} else {
|
||||
// we need to requeue to check again in 5 seconds
|
||||
const startAfter = new Date(wdrwl.created_at) > datePivot(new Date(), { minutes: -5 }) ? 5 : 60
|
||||
await boss.send('checkWithdrawal', { id, hash }, { ...walletOptions, startAfter })
|
||||
} else if (wdrwl?.is_failed || notFound) {
|
||||
let status = 'UNKNOWN_FAILURE'
|
||||
if (wdrwl?.failed.is_insufficient_balance) {
|
||||
status = 'INSUFFICIENT_BALANCE'
|
||||
} else if (wdrwl?.failed.is_invalid_payment) {
|
||||
status = 'INVALID_PAYMENT'
|
||||
} else if (wdrwl?.failed.is_pathfinding_timeout) {
|
||||
status = 'PATHFINDING_TIMEOUT'
|
||||
} else if (wdrwl?.failed.is_route_not_found) {
|
||||
status = 'ROUTE_NOT_FOUND'
|
||||
}
|
||||
await serialize(models, models.$executeRaw`
|
||||
SELECT reverse_withdrawl(${id}::INTEGER, ${status}::"WithdrawlStatus")`)
|
||||
} else {
|
||||
// we need to requeue to check again in 5 seconds
|
||||
const startAfter = new Date(wdrwl.created_at) > datePivot(new Date(), { minutes: -5 }) ? 5 : 60
|
||||
await boss.send('checkWithdrawal', { id, hash }, { ...walletOptions, startAfter })
|
||||
}
|
||||
}
|
||||
|
||||
export function autoDropBolt11s ({ models }) {
|
||||
return async function () {
|
||||
console.log('deleting invoices')
|
||||
try {
|
||||
await serialize(models, models.$executeRaw`
|
||||
UPDATE "Withdrawl"
|
||||
SET hash = NULL, bolt11 = NULL
|
||||
WHERE "userId" IN (SELECT id FROM users WHERE "autoDropBolt11s")
|
||||
AND now() > created_at + interval '${INVOICE_RETENTION_DAYS} days'
|
||||
AND hash IS NOT NULL;`
|
||||
)
|
||||
} catch (err) {
|
||||
console.log(err)
|
||||
}
|
||||
}
|
||||
export async function autoDropBolt11s ({ models }) {
|
||||
await serialize(models, models.$executeRaw`
|
||||
UPDATE "Withdrawl"
|
||||
SET hash = NULL, bolt11 = NULL
|
||||
WHERE "userId" IN (SELECT id FROM users WHERE "autoDropBolt11s")
|
||||
AND now() > created_at + interval '${INVOICE_RETENTION_DAYS} days'
|
||||
AND hash IS NOT NULL;`
|
||||
)
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue