Compare commits
27 Commits
a9a566a79f
...
29b3f6008e
Author | SHA1 | Date |
---|---|---|
k00b | 29b3f6008e | |
k00b | adcb80782b | |
k00b | 449568e3a2 | |
k00b | 8c0cafa3ec | |
k00b | 67498fbc87 | |
k00b | f8d88d18f8 | |
k00b | 651053fd71 | |
k00b | ebe513b5ca | |
Keyan | fec7c92fd9 | |
k00b | 4532e00085 | |
Keyan | 6b1f3ba8ef | |
k00b | a916533826 | |
k00b | 070b350211 | |
k00b | 154c0e0a4a | |
k00b | 177e0f6bb0 | |
k00b | 153455983e | |
Keyan | 5543a0755a | |
k00b | 00bcd8c992 | |
Keyan | a01590e321 | |
k00b | dff452f00f | |
Keyan | f4382ad73e | |
Keyan | 5f1d3dbde4 | |
k00b | 65a7ef10d0 | |
k00b | 5fab3abb82 | |
toyota-corolla0 | c400a6c1c6 | |
Keyan | 4ce395889d | |
toyota-corolla0 | 56809d6389 |
|
@ -11,7 +11,7 @@ jobs:
|
|||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: "18.17.0"
|
||||
node-version: "18.20.4"
|
||||
|
||||
- name: Install
|
||||
run: npm install
|
||||
|
|
|
@ -11,7 +11,7 @@ jobs:
|
|||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: "18.17.0"
|
||||
node-version: "18.20.4"
|
||||
|
||||
- name: Install
|
||||
run: npm install
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
# syntax=docker/dockerfile:1
|
||||
|
||||
FROM node:18.17.0-bullseye
|
||||
FROM node:18.20.4-bullseye
|
||||
|
||||
ENV NODE_ENV=development
|
||||
|
||||
|
|
|
@ -429,7 +429,7 @@ GITHUB_SECRET=<Client secret>
|
|||
|
||||
## Enabling web push notifications
|
||||
|
||||
To enable Web Push locally, you will need to set the `VAPID_*` env vars. `VAPID_MAILTO` needs to be an email address using the `mailto:` scheme. For `NEXT_PUBLIC_VAPID_KEY` and `VAPID_PRIVKEY`, you can run `npx web-push generate-vapid-keys`.
|
||||
To enable Web Push locally, you will need to set the `VAPID_*` env vars. `VAPID_MAILTO` needs to be an email address using the `mailto:` scheme. For `NEXT_PUBLIC_VAPID_PUBKEY` and `VAPID_PRIVKEY`, you can run `npx web-push generate-vapid-keys`.
|
||||
|
||||
<br>
|
||||
|
||||
|
|
|
@ -1,14 +1,15 @@
|
|||
import { cachedFetcher } from '@/lib/fetch'
|
||||
import { toPositiveNumber } from '@/lib/validate'
|
||||
import lndService from 'ln-service'
|
||||
import { authenticatedLndGrpc, getIdentity, getHeight, getWalletInfo, getNode } from 'ln-service'
|
||||
|
||||
const { lnd } = lndService.authenticatedLndGrpc({
|
||||
const { lnd } = authenticatedLndGrpc({
|
||||
cert: process.env.LND_CERT,
|
||||
macaroon: process.env.LND_MACAROON,
|
||||
socket: process.env.LND_SOCKET
|
||||
})
|
||||
|
||||
// Check LND GRPC connection
|
||||
lndService.getWalletInfo({ lnd }, (err, result) => {
|
||||
getWalletInfo({ lnd }, (err, result) => {
|
||||
if (err) {
|
||||
console.error('LND GRPC connection error')
|
||||
return
|
||||
|
@ -80,16 +81,75 @@ export function getPaymentFailureStatus (withdrawal) {
|
|||
}
|
||||
|
||||
if (withdrawal?.failed.is_insufficient_balance) {
|
||||
return 'INSUFFICIENT_BALANCE'
|
||||
return {
|
||||
status: 'INSUFFICIENT_BALANCE',
|
||||
message: 'you didn\'t have enough sats'
|
||||
}
|
||||
} else if (withdrawal?.failed.is_invalid_payment) {
|
||||
return 'INVALID_PAYMENT'
|
||||
return {
|
||||
status: 'INVALID_PAYMENT',
|
||||
message: 'invalid payment'
|
||||
}
|
||||
} else if (withdrawal?.failed.is_pathfinding_timeout) {
|
||||
return 'PATHFINDING_TIMEOUT'
|
||||
return {
|
||||
status: 'PATHFINDING_TIMEOUT',
|
||||
message: 'no route found'
|
||||
}
|
||||
} else if (withdrawal?.failed.is_route_not_found) {
|
||||
return 'ROUTE_NOT_FOUND'
|
||||
return {
|
||||
status: 'ROUTE_NOT_FOUND',
|
||||
message: 'no route found'
|
||||
}
|
||||
}
|
||||
|
||||
return 'UNKNOWN_FAILURE'
|
||||
return {
|
||||
status: 'UNKNOWN_FAILURE',
|
||||
message: 'unknown failure'
|
||||
}
|
||||
}
|
||||
|
||||
export const getBlockHeight = cachedFetcher(async function fetchBlockHeight ({ lnd, ...args }) {
|
||||
try {
|
||||
const { current_block_height: height } = await getHeight({ lnd, ...args })
|
||||
return height
|
||||
} catch (err) {
|
||||
throw new Error(`Unable to fetch block height: ${err.message}`)
|
||||
}
|
||||
}, {
|
||||
maxSize: 1,
|
||||
cacheExpiry: 60 * 1000, // 1 minute
|
||||
forceRefreshThreshold: 5 * 60 * 1000, // 5 minutes
|
||||
keyGenerator: () => 'getHeight'
|
||||
})
|
||||
|
||||
export const getOurPubkey = cachedFetcher(async function fetchOurPubkey ({ lnd, ...args }) {
|
||||
try {
|
||||
const { identity } = await getIdentity({ lnd, ...args })
|
||||
return identity.public_key
|
||||
} catch (err) {
|
||||
throw new Error(`Unable to fetch identity: ${err.message}`)
|
||||
}
|
||||
}, {
|
||||
maxSize: 1,
|
||||
cacheExpiry: 0, // never expire
|
||||
forceRefreshThreshold: 0, // never force refresh
|
||||
keyGenerator: () => 'getOurPubkey'
|
||||
})
|
||||
|
||||
export const getNodeSockets = cachedFetcher(async function fetchNodeSockets ({ lnd, ...args }) {
|
||||
try {
|
||||
return (await getNode({ lnd, is_omitting_channels: true, ...args }))?.sockets
|
||||
} catch (err) {
|
||||
throw new Error(`Unable to fetch node info: ${err.message}`)
|
||||
}
|
||||
}, {
|
||||
maxSize: 100,
|
||||
cacheExpiry: 1000 * 60 * 60 * 24, // 1 day
|
||||
forceRefreshThreshold: 1000 * 60 * 60 * 24 * 7, // 1 week
|
||||
keyGenerator: (args) => {
|
||||
const { public_key: publicKey } = args
|
||||
return publicKey
|
||||
}
|
||||
})
|
||||
|
||||
export default lnd
|
||||
|
|
|
@ -154,7 +154,11 @@ All functions have the following signature: `function(args: Object, context: Obj
|
|||
- it can optionally store in the invoice with the `invoiceId` the `actionId` to be able to link the action with the invoice regardless of retries
|
||||
- `onPaid`: called when the action is paid
|
||||
- if the action does not support optimism, this function is optional
|
||||
- this function should be used to mark the rows created in `perform` as `PAID` and perform any other side effects of the action (like notifications or denormalizations)
|
||||
- this function should be used to mark the rows created in `perform` as `PAID` and perform critical side effects of the action (like denormalizations)
|
||||
- `nonCriticalSideEffects`: called after the action is paid to run any side effects whose failure does not affect the action's execution
|
||||
- this function is always optional
|
||||
- it's passed the result of the action (or the action's paid invoice) and the current context
|
||||
- this is where things like push notifications should be handled
|
||||
- `onFail`: called when the action fails
|
||||
- if the action does not support optimism, this function is optional
|
||||
- this function should be used to mark the rows created in `perform` as `FAILED`
|
||||
|
|
|
@ -38,7 +38,7 @@ export async function retry ({ invoiceId, newInvoiceId }, { tx, cost }) {
|
|||
return { id, sats: msatsToSats(cost), act: 'BOOST', path }
|
||||
}
|
||||
|
||||
export async function onPaid ({ invoice, actId }, { models, tx }) {
|
||||
export async function onPaid ({ invoice, actId }, { tx }) {
|
||||
let itemAct
|
||||
if (invoice) {
|
||||
await tx.itemAct.updateMany({
|
||||
|
|
|
@ -99,7 +99,7 @@ async function performFeeCreditAction (actionType, args, context) {
|
|||
const { me, models, cost } = context
|
||||
const action = paidActions[actionType]
|
||||
|
||||
return await models.$transaction(async tx => {
|
||||
const result = await models.$transaction(async tx => {
|
||||
context.tx = tx
|
||||
|
||||
await tx.user.update({
|
||||
|
@ -121,6 +121,11 @@ async function performFeeCreditAction (actionType, args, context) {
|
|||
paymentMethod: 'FEE_CREDIT'
|
||||
}
|
||||
}, { isolationLevel: Prisma.TransactionIsolationLevel.ReadCommitted })
|
||||
|
||||
// run non critical side effects in the background
|
||||
// after the transaction has been committed
|
||||
action.nonCriticalSideEffects?.(result.result, context).catch(console.error)
|
||||
return result
|
||||
}
|
||||
|
||||
async function performOptimisticAction (actionType, args, context) {
|
||||
|
@ -309,7 +314,7 @@ async function createDbInvoice (actionType, args, context,
|
|||
const invoiceData = {
|
||||
hash: servedInvoice.id,
|
||||
msatsRequested: BigInt(servedInvoice.mtokens),
|
||||
preimage: optimistic ? undefined : preimage,
|
||||
preimage,
|
||||
bolt11: servedBolt11,
|
||||
userId: me?.id ?? USER_ID.anon,
|
||||
actionType,
|
||||
|
|
|
@ -154,15 +154,13 @@ export async function retry ({ invoiceId, newInvoiceId }, { tx }) {
|
|||
}
|
||||
|
||||
export async function onPaid ({ invoice, id }, context) {
|
||||
const { models, tx } = context
|
||||
const { tx } = context
|
||||
let item
|
||||
|
||||
if (invoice) {
|
||||
item = await tx.item.findFirst({
|
||||
where: { invoiceId: invoice.id },
|
||||
include: {
|
||||
mentions: true,
|
||||
itemReferrers: { include: { refereeItem: true } },
|
||||
user: true
|
||||
}
|
||||
})
|
||||
|
@ -173,8 +171,6 @@ export async function onPaid ({ invoice, id }, context) {
|
|||
item = await tx.item.findUnique({
|
||||
where: { id },
|
||||
include: {
|
||||
mentions: true,
|
||||
itemReferrers: { include: { refereeItem: true } },
|
||||
user: true,
|
||||
itemUploads: { include: { upload: true } }
|
||||
}
|
||||
|
@ -224,17 +220,30 @@ export async function onPaid ({ invoice, id }, context) {
|
|||
SELECT comment.created_at, comment.updated_at, ancestors.id, ancestors."userId",
|
||||
comment.id, comment."userId", nlevel(comment.path) - nlevel(ancestors.path)
|
||||
FROM ancestors, comment`
|
||||
}
|
||||
}
|
||||
|
||||
export async function nonCriticalSideEffects ({ invoice, id }, { models }) {
|
||||
const item = await models.item.findFirst({
|
||||
where: invoice ? { invoiceId: invoice.id } : { id: parseInt(id) },
|
||||
include: {
|
||||
mentions: true,
|
||||
itemReferrers: { include: { refereeItem: true } },
|
||||
user: true
|
||||
}
|
||||
})
|
||||
|
||||
if (item.parentId) {
|
||||
notifyItemParents({ item, models }).catch(console.error)
|
||||
}
|
||||
|
||||
for (const { userId } of item.mentions) {
|
||||
notifyMention({ models, item, userId }).catch(console.error)
|
||||
}
|
||||
for (const { refereeItem } of item.itemReferrers) {
|
||||
notifyItemMention({ models, referrerItem: item, refereeItem }).catch(console.error)
|
||||
}
|
||||
notifyUserSubscribers({ models: tx, item }).catch(console.error)
|
||||
|
||||
notifyUserSubscribers({ models, item }).catch(console.error)
|
||||
notifyTerritorySubscribers({ models, item }).catch(console.error)
|
||||
}
|
||||
|
||||
|
|
|
@ -8,7 +8,7 @@ export const anonable = true
|
|||
export const supportsPessimism = true
|
||||
export const supportsOptimism = false
|
||||
|
||||
export async function getCost ({ id, boost = 0, uploadIds }, { me, models }) {
|
||||
export async function getCost ({ id, boost = 0, uploadIds, bio }, { me, models }) {
|
||||
// the only reason updating items costs anything is when it has new uploads
|
||||
// or more boost
|
||||
const old = await models.item.findUnique({ where: { id: parseInt(id) } })
|
||||
|
@ -24,7 +24,7 @@ export async function getCost ({ id, boost = 0, uploadIds }, { me, models }) {
|
|||
|
||||
export async function perform (args, context) {
|
||||
const { id, boost = 0, uploadIds = [], options: pollOptions = [], forwardUsers: itemForwards = [], ...data } = args
|
||||
const { tx, me, models } = context
|
||||
const { tx, me } = context
|
||||
const old = await tx.item.findUnique({
|
||||
where: { id: parseInt(id) },
|
||||
include: {
|
||||
|
@ -63,12 +63,8 @@ export async function perform (args, context) {
|
|||
|
||||
// we put boost in the where clause because we don't want to update the boost
|
||||
// if it has changed concurrently
|
||||
const item = await tx.item.update({
|
||||
await tx.item.update({
|
||||
where: { id: parseInt(id), boost: old.boost },
|
||||
include: {
|
||||
mentions: true,
|
||||
itemReferrers: { include: { refereeItem: true } }
|
||||
},
|
||||
data: {
|
||||
...data,
|
||||
boost: {
|
||||
|
@ -151,6 +147,21 @@ export async function perform (args, context) {
|
|||
|
||||
await performBotBehavior(args, context)
|
||||
|
||||
// ltree is unsupported in Prisma, so we have to query it manually (FUCK!)
|
||||
return (await tx.$queryRaw`
|
||||
SELECT *, ltree2text(path) AS path, created_at AS "createdAt", updated_at AS "updatedAt"
|
||||
FROM "Item" WHERE id = ${parseInt(id)}::INTEGER`
|
||||
)[0]
|
||||
}
|
||||
|
||||
export async function nonCriticalSideEffects ({ invoice, id }, { models }) {
|
||||
const item = await models.item.findFirst({
|
||||
where: invoice ? { invoiceId: invoice.id } : { id: parseInt(id) },
|
||||
include: {
|
||||
mentions: true,
|
||||
itemReferrers: { include: { refereeItem: true } }
|
||||
}
|
||||
})
|
||||
// compare timestamps to only notify if mention or item referral was just created to avoid duplicates on edits
|
||||
for (const { userId, createdAt } of item.mentions) {
|
||||
if (item.updatedAt.getTime() !== createdAt.getTime()) continue
|
||||
|
@ -160,12 +171,6 @@ export async function perform (args, context) {
|
|||
if (item.updatedAt.getTime() !== createdAt.getTime()) continue
|
||||
notifyItemMention({ models, referrerItem: item, refereeItem }).catch(console.error)
|
||||
}
|
||||
|
||||
// ltree is unsupported in Prisma, so we have to query it manually (FUCK!)
|
||||
return (await tx.$queryRaw`
|
||||
SELECT *, ltree2text(path) AS path, created_at AS "createdAt", updated_at AS "updatedAt"
|
||||
FROM "Item" WHERE id = ${parseInt(id)}::INTEGER`
|
||||
)[0]
|
||||
}
|
||||
|
||||
export async function describe ({ id, parentId }, context) {
|
||||
|
|
|
@ -64,7 +64,7 @@ export async function retry ({ invoiceId, newInvoiceId }, { tx, cost }) {
|
|||
return { id, sats: msatsToSats(cost), act: 'TIP', path }
|
||||
}
|
||||
|
||||
export async function onPaid ({ invoice, actIds }, { models, tx }) {
|
||||
export async function onPaid ({ invoice, actIds }, { tx }) {
|
||||
let acts
|
||||
if (invoice) {
|
||||
await tx.itemAct.updateMany({
|
||||
|
@ -114,7 +114,7 @@ export async function onPaid ({ invoice, actIds }, { models, tx }) {
|
|||
|
||||
// perform denomormalized aggregates: weighted votes, upvotes, msats, lastZapAt
|
||||
// NOTE: for the rows that might be updated by a concurrent zap, we use UPDATE for implicit locking
|
||||
const [item] = await tx.$queryRaw`
|
||||
await tx.$queryRaw`
|
||||
WITH zapper AS (
|
||||
SELECT trust FROM users WHERE id = ${itemAct.userId}::INTEGER
|
||||
), zap AS (
|
||||
|
@ -163,8 +163,14 @@ export async function onPaid ({ invoice, actIds }, { models, tx }) {
|
|||
SET "commentMsats" = "Item"."commentMsats" + ${msats}::BIGINT
|
||||
FROM zapped
|
||||
WHERE "Item".path @> zapped.path AND "Item".id <> zapped.id`
|
||||
}
|
||||
|
||||
notifyZapped({ models, item }).catch(console.error)
|
||||
export async function nonCriticalSideEffects ({ invoice, actIds }, { models }) {
|
||||
const itemAct = await models.itemAct.findFirst({
|
||||
where: invoice ? { invoiceId: invoice.id } : { id: { in: actIds } },
|
||||
include: { item: true }
|
||||
})
|
||||
notifyZapped({ models, item: itemAct.item }).catch(console.error)
|
||||
}
|
||||
|
||||
export async function onFail ({ invoice }, { tx }) {
|
||||
|
|
|
@ -1,39 +1,27 @@
|
|||
import lndService from 'ln-service'
|
||||
import lnd from '@/api/lnd'
|
||||
import { isServiceEnabled } from '@/lib/sndev'
|
||||
import { cachedFetcher } from '@/lib/fetch'
|
||||
import { getHeight } from 'ln-service'
|
||||
|
||||
const cache = new Map()
|
||||
const expiresIn = 1000 * 30 // 30 seconds in milliseconds
|
||||
|
||||
async function fetchBlockHeight () {
|
||||
let blockHeight = 0
|
||||
if (!isServiceEnabled('payments')) return blockHeight
|
||||
const getBlockHeight = cachedFetcher(async function fetchBlockHeight ({ lnd }) {
|
||||
try {
|
||||
const height = await lndService.getHeight({ lnd })
|
||||
blockHeight = height.current_block_height
|
||||
cache.set('block', { height: blockHeight, createdAt: Date.now() })
|
||||
const { current_block_height: height } = await getHeight({ lnd })
|
||||
return height
|
||||
} catch (err) {
|
||||
console.error('fetchBlockHeight', err)
|
||||
console.error('getBlockHeight', err)
|
||||
return 0
|
||||
}
|
||||
return blockHeight
|
||||
}
|
||||
|
||||
async function getBlockHeight () {
|
||||
if (cache.has('block')) {
|
||||
const { height, createdAt } = cache.get('block')
|
||||
const expired = createdAt + expiresIn < Date.now()
|
||||
if (expired) fetchBlockHeight().catch(console.error) // update cache
|
||||
return height // serve stale block height (this on the SSR critical path)
|
||||
} else {
|
||||
fetchBlockHeight().catch(console.error)
|
||||
}
|
||||
return 0
|
||||
}
|
||||
}, {
|
||||
maxSize: 1,
|
||||
cacheExpiry: 60 * 1000, // 1 minute
|
||||
forceRefreshThreshold: 0,
|
||||
keyGenerator: () => 'getBlockHeight'
|
||||
})
|
||||
|
||||
export default {
|
||||
Query: {
|
||||
blockHeight: async (parent, opts, ctx) => {
|
||||
return await getBlockHeight()
|
||||
blockHeight: async (parent, opts, { lnd }) => {
|
||||
if (!isServiceEnabled('payments')) return 0
|
||||
return await getBlockHeight({ lnd }) || 0
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,36 +1,26 @@
|
|||
const cache = new Map()
|
||||
const expiresIn = 1000 * 30 // 30 seconds in milliseconds
|
||||
import { cachedFetcher } from '@/lib/fetch'
|
||||
|
||||
async function fetchChainFeeRate () {
|
||||
const getChainFeeRate = cachedFetcher(async function fetchChainFeeRate () {
|
||||
const url = 'https://mempool.space/api/v1/fees/recommended'
|
||||
const chainFee = await fetch(url)
|
||||
.then((res) => res.json())
|
||||
.then((body) => body.hourFee)
|
||||
.catch((err) => {
|
||||
console.error('fetchChainFee', err)
|
||||
return 0
|
||||
})
|
||||
|
||||
cache.set('fee', { fee: chainFee, createdAt: Date.now() })
|
||||
return chainFee
|
||||
}
|
||||
|
||||
async function getChainFeeRate () {
|
||||
if (cache.has('fee')) {
|
||||
const { fee, createdAt } = cache.get('fee')
|
||||
const expired = createdAt + expiresIn < Date.now()
|
||||
if (expired) fetchChainFeeRate().catch(console.error) // update cache
|
||||
return fee
|
||||
} else {
|
||||
fetchChainFeeRate().catch(console.error)
|
||||
try {
|
||||
const res = await fetch(url)
|
||||
const body = await res.json()
|
||||
return body.hourFee
|
||||
} catch (err) {
|
||||
console.error('fetchChainFee', err)
|
||||
return 0
|
||||
}
|
||||
return 0
|
||||
}
|
||||
}, {
|
||||
maxSize: 1,
|
||||
cacheExpiry: 60 * 1000, // 1 minute
|
||||
forceRefreshThreshold: 0, // never force refresh
|
||||
keyGenerator: () => 'getChainFeeRate'
|
||||
})
|
||||
|
||||
export default {
|
||||
Query: {
|
||||
chainFee: async (parent, opts, ctx) => {
|
||||
return await getChainFeeRate()
|
||||
return await getChainFeeRate() || 0
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -19,7 +19,6 @@ import chainFee from './chainFee'
|
|||
import { GraphQLScalarType, Kind } from 'graphql'
|
||||
import { createIntScalar } from 'graphql-scalar'
|
||||
import paidAction from './paidAction'
|
||||
import vault from './vault'
|
||||
|
||||
const date = new GraphQLScalarType({
|
||||
name: 'Date',
|
||||
|
@ -56,4 +55,4 @@ const limit = createIntScalar({
|
|||
|
||||
export default [user, item, message, wallet, lnurl, notifications, invite, sub,
|
||||
upload, search, growth, rewards, referrals, price, admin, blockHeight, chainFee,
|
||||
{ JSONObject }, { Date: date }, { Limit: limit }, paidAction, vault]
|
||||
{ JSONObject }, { Date: date }, { Limit: limit }, paidAction]
|
||||
|
|
|
@ -685,11 +685,7 @@ export default {
|
|||
|
||||
return await models.item.count({ where }) + 1
|
||||
},
|
||||
boostPosition: async (parent, { id, sub, boost }, { models, me }) => {
|
||||
if (boost <= 0) {
|
||||
throw new GqlInputError('boost must be greater than 0')
|
||||
}
|
||||
|
||||
boostPosition: async (parent, { id, sub, boost = 0 }, { models, me }) => {
|
||||
const where = {
|
||||
boost: { gte: boost },
|
||||
status: 'ACTIVE',
|
||||
|
@ -701,9 +697,29 @@ export default {
|
|||
where.id = { not: Number(id) }
|
||||
}
|
||||
|
||||
const homeAgg = await models.item.aggregate({
|
||||
_count: { id: true },
|
||||
_max: { boost: true },
|
||||
where
|
||||
})
|
||||
|
||||
let subAgg
|
||||
if (sub) {
|
||||
subAgg = await models.item.aggregate({
|
||||
_count: { id: true },
|
||||
_max: { boost: true },
|
||||
where: {
|
||||
...where,
|
||||
subName: sub
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
return {
|
||||
home: await models.item.count({ where }) === 0,
|
||||
sub: sub ? await models.item.count({ where: { ...where, subName: sub } }) === 0 : false
|
||||
home: homeAgg._count.id === 0 && boost >= BOOST_MULT,
|
||||
sub: subAgg?._count.id === 0 && boost >= BOOST_MULT,
|
||||
homeMaxBoost: homeAgg._max.boost || 0,
|
||||
subMaxBoost: subAgg?._max.boost || 0
|
||||
}
|
||||
}
|
||||
},
|
||||
|
|
|
@ -1,36 +1,27 @@
|
|||
const cache = new Map()
|
||||
const expiresIn = 30000 // in milliseconds
|
||||
import { SUPPORTED_CURRENCIES } from '@/lib/currency'
|
||||
import { cachedFetcher } from '@/lib/fetch'
|
||||
|
||||
async function fetchPrice (fiat) {
|
||||
const getPrice = cachedFetcher(async function fetchPrice (fiat = 'USD') {
|
||||
const url = `https://api.coinbase.com/v2/prices/BTC-${fiat}/spot`
|
||||
const price = await fetch(url)
|
||||
.then((res) => res.json())
|
||||
.then((body) => parseFloat(body.data.amount))
|
||||
.catch((err) => {
|
||||
console.error(err)
|
||||
return -1
|
||||
})
|
||||
cache.set(fiat, { price, createdAt: Date.now() })
|
||||
return price
|
||||
}
|
||||
|
||||
async function getPrice (fiat) {
|
||||
fiat ??= 'USD'
|
||||
if (cache.has(fiat)) {
|
||||
const { price, createdAt } = cache.get(fiat)
|
||||
const expired = createdAt + expiresIn < Date.now()
|
||||
if (expired) fetchPrice(fiat).catch(console.error) // update cache
|
||||
return price // serve stale price (this on the SSR critical path)
|
||||
} else {
|
||||
fetchPrice(fiat).catch(console.error)
|
||||
try {
|
||||
const res = await fetch(url)
|
||||
const body = await res.json()
|
||||
return parseFloat(body.data.amount)
|
||||
} catch (err) {
|
||||
console.error(err)
|
||||
return -1
|
||||
}
|
||||
return null
|
||||
}
|
||||
}, {
|
||||
maxSize: SUPPORTED_CURRENCIES.length,
|
||||
cacheExpiry: 60 * 1000, // 1 minute
|
||||
forceRefreshThreshold: 0, // never force refresh
|
||||
keyGenerator: (fiat = 'USD') => fiat
|
||||
})
|
||||
|
||||
export default {
|
||||
Query: {
|
||||
price: async (parent, { fiatCurrency }, ctx) => {
|
||||
return await getPrice(fiatCurrency)
|
||||
return await getPrice(fiatCurrency) || -1
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -49,10 +49,7 @@ export default {
|
|||
}
|
||||
|
||||
const upload = await models.upload.create({ data: { ...fileParams } })
|
||||
|
||||
const extension = type.split('/')[1]
|
||||
const key = `${upload.id}.${extension}`
|
||||
return createPresignedPost({ key, type, size })
|
||||
return createPresignedPost({ key: String(upload.id), type, size })
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -691,22 +691,20 @@ export default {
|
|||
|
||||
return Number(photoId)
|
||||
},
|
||||
upsertBio: async (parent, { bio }, { me, models }) => {
|
||||
upsertBio: async (parent, { text }, { me, models, lnd }) => {
|
||||
if (!me) {
|
||||
throw new GqlAuthenticationError()
|
||||
}
|
||||
|
||||
await ssValidate(bioSchema, { bio })
|
||||
await ssValidate(bioSchema, { text })
|
||||
|
||||
const user = await models.user.findUnique({ where: { id: me.id } })
|
||||
|
||||
if (user.bioId) {
|
||||
await updateItem(parent, { id: user.bioId, text: bio, title: `@${user.name}'s bio` }, { me, models })
|
||||
return await updateItem(parent, { id: user.bioId, bio: true, text, title: `@${user.name}'s bio` }, { me, models, lnd })
|
||||
} else {
|
||||
await createItem(parent, { bio: true, text: bio, title: `@${user.name}'s bio` }, { me, models })
|
||||
return await createItem(parent, { bio: true, text, title: `@${user.name}'s bio` }, { me, models, lnd })
|
||||
}
|
||||
|
||||
return await models.user.findUnique({ where: { id: me.id } })
|
||||
},
|
||||
generateApiKey: async (parent, { id }, { models, me }) => {
|
||||
if (!me) {
|
||||
|
|
|
@ -1,115 +0,0 @@
|
|||
import { E_VAULT_KEY_EXISTS, GqlAuthenticationError, GqlInputError } from '@/lib/error'
|
||||
|
||||
export default {
|
||||
Query: {
|
||||
getVaultEntry: async (parent, { key }, { me, models }) => {
|
||||
if (!me) {
|
||||
throw new GqlAuthenticationError()
|
||||
}
|
||||
if (!key) {
|
||||
throw new GqlInputError('must have key')
|
||||
}
|
||||
const k = await models.vault.findUnique({
|
||||
where: {
|
||||
userId_key: {
|
||||
key,
|
||||
userId: me.id
|
||||
}
|
||||
}
|
||||
})
|
||||
return k
|
||||
}
|
||||
},
|
||||
|
||||
Mutation: {
|
||||
setVaultKeyHash: async (parent, { hash }, { me, models }) => {
|
||||
if (!me) {
|
||||
throw new GqlAuthenticationError()
|
||||
}
|
||||
if (!hash) {
|
||||
throw new GqlInputError('hash required')
|
||||
}
|
||||
const { vaultKeyHash: oldKeyHash } = await models.user.findUnique({ where: { id: me.id } })
|
||||
if (oldKeyHash) {
|
||||
if (oldKeyHash !== hash) {
|
||||
throw new GqlInputError('vault key already set', E_VAULT_KEY_EXISTS)
|
||||
} else {
|
||||
return true
|
||||
}
|
||||
} else {
|
||||
await models.user.update({
|
||||
where: { id: me.id },
|
||||
data: { vaultKeyHash: hash }
|
||||
})
|
||||
}
|
||||
return true
|
||||
},
|
||||
setVaultEntry: async (parent, { key, value, skipIfSet }, { me, models }) => {
|
||||
if (!me) {
|
||||
throw new GqlAuthenticationError()
|
||||
}
|
||||
if (!key) {
|
||||
throw new GqlInputError('must have key')
|
||||
}
|
||||
if (!value) {
|
||||
throw new GqlInputError('must have value')
|
||||
}
|
||||
if (skipIfSet) {
|
||||
const existing = await models.vault.findUnique({
|
||||
where: {
|
||||
userId_key: {
|
||||
userId: me.id,
|
||||
key
|
||||
}
|
||||
}
|
||||
})
|
||||
if (existing) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
await models.vault.upsert({
|
||||
where: {
|
||||
userId_key: {
|
||||
userId: me.id,
|
||||
key
|
||||
}
|
||||
},
|
||||
update: {
|
||||
value
|
||||
},
|
||||
create: {
|
||||
key,
|
||||
value,
|
||||
userId: me.id
|
||||
}
|
||||
})
|
||||
return true
|
||||
},
|
||||
unsetVaultEntry: async (parent, { key }, { me, models }) => {
|
||||
if (!me) {
|
||||
throw new GqlAuthenticationError()
|
||||
}
|
||||
if (!key) {
|
||||
throw new GqlInputError('must have key')
|
||||
}
|
||||
await models.vault.deleteMany({
|
||||
where: {
|
||||
userId: me.id,
|
||||
key
|
||||
}
|
||||
})
|
||||
return true
|
||||
},
|
||||
clearVault: async (parent, args, { me, models }) => {
|
||||
if (!me) {
|
||||
throw new GqlAuthenticationError()
|
||||
}
|
||||
await models.user.update({
|
||||
where: { id: me.id },
|
||||
data: { vaultKeyHash: '' }
|
||||
})
|
||||
await models.vault.deleteMany({ where: { userId: me.id } })
|
||||
return true
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,10 +1,17 @@
|
|||
import { createHodlInvoice, createInvoice, decodePaymentRequest, payViaPaymentRequest, getInvoice as getInvoiceFromLnd, getNode, deletePayment, getPayment, getIdentity } from 'ln-service'
|
||||
import {
|
||||
createHodlInvoice, createInvoice, payViaPaymentRequest,
|
||||
getInvoice as getInvoiceFromLnd, deletePayment, getPayment,
|
||||
parsePaymentRequest
|
||||
} from 'ln-service'
|
||||
import crypto, { timingSafeEqual } from 'crypto'
|
||||
import serialize from './serial'
|
||||
import { decodeCursor, LIMIT, nextCursorEncoded } from '@/lib/cursor'
|
||||
import { SELECT, itemQueryWithMeta } from './item'
|
||||
import { msatsToSats, msatsToSatsDecimal } from '@/lib/format'
|
||||
import { ANON_BALANCE_LIMIT_MSATS, ANON_INV_PENDING_LIMIT, USER_ID, BALANCE_LIMIT_MSATS, INVOICE_RETENTION_DAYS, INV_PENDING_LIMIT, USER_IDS_BALANCE_NO_LIMIT, LND_PATHFINDING_TIMEOUT_MS } from '@/lib/constants'
|
||||
import {
|
||||
ANON_BALANCE_LIMIT_MSATS, ANON_INV_PENDING_LIMIT, USER_ID, BALANCE_LIMIT_MSATS,
|
||||
INVOICE_RETENTION_DAYS, INV_PENDING_LIMIT, USER_IDS_BALANCE_NO_LIMIT, LND_PATHFINDING_TIMEOUT_MS
|
||||
} from '@/lib/constants'
|
||||
import { amountSchema, ssValidate, withdrawlSchema, lnAddrSchema, walletValidate } from '@/lib/validate'
|
||||
import { datePivot } from '@/lib/time'
|
||||
import assertGofacYourself from './ofac'
|
||||
|
@ -15,6 +22,7 @@ import walletDefs from 'wallets/server'
|
|||
import { generateResolverName, generateTypeDefName } from '@/lib/wallet'
|
||||
import { lnAddrOptions } from '@/lib/lnurl'
|
||||
import { GqlAuthenticationError, GqlAuthorizationError, GqlInputError } from '@/lib/error'
|
||||
import { getNodeSockets, getOurPubkey } from '../lnd'
|
||||
|
||||
function injectResolvers (resolvers) {
|
||||
console.group('injected GraphQL resolvers:')
|
||||
|
@ -74,7 +82,7 @@ export async function getInvoice (parent, { id }, { me, models, lnd }) {
|
|||
|
||||
try {
|
||||
if (inv.confirmedAt) {
|
||||
inv.confirmedPreimage = (await getInvoiceFromLnd({ id: inv.hash, lnd })).secret
|
||||
inv.confirmedPreimage = inv.preimage ?? (await getInvoiceFromLnd({ id: inv.hash, lnd })).secret
|
||||
}
|
||||
} catch (err) {
|
||||
console.error('error fetching invoice from LND', err)
|
||||
|
@ -343,20 +351,58 @@ const resolvers = {
|
|||
facts: history
|
||||
}
|
||||
},
|
||||
walletLogs: async (parent, args, { me, models }) => {
|
||||
walletLogs: async (parent, { type, from, to, cursor }, { me, models }) => {
|
||||
if (!me) {
|
||||
throw new GqlAuthenticationError()
|
||||
}
|
||||
|
||||
return await models.walletLog.findMany({
|
||||
where: {
|
||||
userId: me.id
|
||||
},
|
||||
orderBy: [
|
||||
{ createdAt: 'desc' },
|
||||
{ id: 'desc' }
|
||||
]
|
||||
})
|
||||
// we cursoring with the wallet logs on the client
|
||||
// if we have from, don't use cursor
|
||||
// regardless, store the state of the cursor for the next call
|
||||
|
||||
const decodedCursor = cursor ? decodeCursor(cursor) : { offset: 0, time: to ?? new Date() }
|
||||
|
||||
let logs = []
|
||||
let nextCursor
|
||||
if (from) {
|
||||
logs = await models.walletLog.findMany({
|
||||
where: {
|
||||
userId: me.id,
|
||||
wallet: type ?? undefined,
|
||||
createdAt: {
|
||||
gte: from ? new Date(Number(from)) : undefined,
|
||||
lte: to ? new Date(Number(to)) : undefined
|
||||
}
|
||||
},
|
||||
orderBy: [
|
||||
{ createdAt: 'desc' },
|
||||
{ id: 'desc' }
|
||||
]
|
||||
})
|
||||
nextCursor = nextCursorEncoded(decodedCursor, logs.length)
|
||||
} else {
|
||||
logs = await models.walletLog.findMany({
|
||||
where: {
|
||||
userId: me.id,
|
||||
wallet: type ?? undefined,
|
||||
createdAt: {
|
||||
lte: decodedCursor.time
|
||||
}
|
||||
},
|
||||
orderBy: [
|
||||
{ createdAt: 'desc' },
|
||||
{ id: 'desc' }
|
||||
],
|
||||
take: LIMIT,
|
||||
skip: decodedCursor.offset
|
||||
})
|
||||
nextCursor = logs.length === LIMIT ? nextCursorEncoded(decodedCursor, logs.length) : null
|
||||
}
|
||||
|
||||
return {
|
||||
cursor: nextCursor,
|
||||
entries: logs
|
||||
}
|
||||
}
|
||||
},
|
||||
Wallet: {
|
||||
|
@ -399,7 +445,7 @@ const resolvers = {
|
|||
})
|
||||
|
||||
const [inv] = await serialize(
|
||||
models.$queryRaw`SELECT * FROM create_invoice(${invoice.id}, ${hodlInvoice ? invoice.secret : null}::TEXT, ${invoice.request},
|
||||
models.$queryRaw`SELECT * FROM create_invoice(${invoice.id}, ${invoice.secret}::TEXT, ${invoice.request},
|
||||
${expiresAt}::timestamp, ${amount * 1000}, ${user.id}::INTEGER, ${description}, NULL, NULL,
|
||||
${invLimit}::INTEGER, ${balanceLimit})`,
|
||||
{ models }
|
||||
|
@ -498,7 +544,7 @@ const resolvers = {
|
|||
preimage: async (withdrawl, args, { lnd }) => {
|
||||
try {
|
||||
if (withdrawl.status === 'CONFIRMED') {
|
||||
return (await getPayment({ id: withdrawl.hash, lnd })).payment.secret
|
||||
return withdrawl.preimage ?? (await getPayment({ id: withdrawl.hash, lnd })).payment.secret
|
||||
}
|
||||
} catch (err) {
|
||||
console.error('error fetching payment from LND', err)
|
||||
|
@ -677,23 +723,23 @@ export async function createWithdrawal (parent, { invoice, maxFee }, { me, model
|
|||
invoice = invoice.replace(/^lightning:/, '')
|
||||
|
||||
// decode invoice to get amount
|
||||
let decoded, node
|
||||
let decoded, sockets
|
||||
try {
|
||||
decoded = await decodePaymentRequest({ lnd, request: invoice })
|
||||
decoded = await parsePaymentRequest({ request: invoice })
|
||||
} catch (error) {
|
||||
console.log(error)
|
||||
throw new GqlInputError('could not decode invoice')
|
||||
}
|
||||
|
||||
try {
|
||||
node = await getNode({ lnd, public_key: decoded.destination, is_omitting_channels: true })
|
||||
sockets = await getNodeSockets({ lnd, public_key: decoded.destination })
|
||||
} catch (error) {
|
||||
// likely not found if it's an unannounced channel, e.g. phoenix
|
||||
console.log(error)
|
||||
}
|
||||
|
||||
if (node) {
|
||||
for (const { socket } of node.sockets) {
|
||||
if (sockets) {
|
||||
for (const { socket } of sockets) {
|
||||
const ip = socket.split(':')[0]
|
||||
await assertGofacYourself({ models, headers, ip })
|
||||
}
|
||||
|
@ -703,6 +749,10 @@ export async function createWithdrawal (parent, { invoice, maxFee }, { me, model
|
|||
throw new GqlInputError('your invoice must specify an amount')
|
||||
}
|
||||
|
||||
if (decoded.mtokens > Number.MAX_SAFE_INTEGER) {
|
||||
throw new GqlInputError('your invoice amount is too large')
|
||||
}
|
||||
|
||||
const msatsFee = Number(maxFee) * 1000
|
||||
|
||||
const user = await models.user.findUnique({ where: { id: me.id } })
|
||||
|
@ -784,8 +834,8 @@ export async function fetchLnAddrInvoice (
|
|||
|
||||
// decode invoice
|
||||
try {
|
||||
const decoded = await decodePaymentRequest({ lnd, request: res.pr })
|
||||
const ourPubkey = (await getIdentity({ lnd })).public_key
|
||||
const decoded = await parsePaymentRequest({ request: res.pr })
|
||||
const ourPubkey = await getOurPubkey({ lnd })
|
||||
if (autoWithdraw && decoded.destination === ourPubkey && process.env.NODE_ENV === 'production') {
|
||||
// unset lnaddr so we don't trigger another withdrawal with same destination
|
||||
await models.wallet.deleteMany({
|
||||
|
|
|
@ -40,13 +40,11 @@ export default async function getSSRApolloClient ({ req, res, me = null }) {
|
|||
watchQuery: {
|
||||
fetchPolicy: 'no-cache',
|
||||
nextFetchPolicy: 'no-cache',
|
||||
canonizeResults: true,
|
||||
ssr: true
|
||||
},
|
||||
query: {
|
||||
fetchPolicy: 'no-cache',
|
||||
nextFetchPolicy: 'no-cache',
|
||||
canonizeResults: true,
|
||||
ssr: true
|
||||
}
|
||||
}
|
||||
|
|
|
@ -18,7 +18,6 @@ import admin from './admin'
|
|||
import blockHeight from './blockHeight'
|
||||
import chainFee from './chainFee'
|
||||
import paidAction from './paidAction'
|
||||
import vault from './vault'
|
||||
|
||||
const common = gql`
|
||||
type Query {
|
||||
|
@ -39,4 +38,4 @@ const common = gql`
|
|||
`
|
||||
|
||||
export default [common, user, item, itemForward, message, wallet, lnurl, notifications, invite,
|
||||
sub, upload, growth, rewards, referrals, price, admin, blockHeight, chainFee, paidAction, vault]
|
||||
sub, upload, growth, rewards, referrals, price, admin, blockHeight, chainFee, paidAction]
|
||||
|
|
|
@ -16,6 +16,8 @@ export default gql`
|
|||
type BoostPositions {
|
||||
home: Boolean!
|
||||
sub: Boolean!
|
||||
homeMaxBoost: Int!
|
||||
subMaxBoost: Int!
|
||||
}
|
||||
|
||||
type TitleUnshorted {
|
||||
|
|
|
@ -33,7 +33,7 @@ export default gql`
|
|||
setName(name: String!): String
|
||||
setSettings(settings: SettingsInput!): User
|
||||
setPhoto(photoId: ID!): Int!
|
||||
upsertBio(bio: String!): User!
|
||||
upsertBio(text: String!): ItemPaidAction!
|
||||
setWalkthrough(tipPopover: Boolean, upvotePopover: Boolean): Boolean
|
||||
unlinkAuth(authType: String!): AuthMethods!
|
||||
linkUnverifiedEmail(email: String!): Boolean
|
||||
|
@ -182,7 +182,6 @@ export default gql`
|
|||
withdrawMaxFeeDefault: Int!
|
||||
autoWithdrawThreshold: Int
|
||||
autoWithdrawMaxFeePercent: Float
|
||||
vaultKeyHash: String
|
||||
}
|
||||
|
||||
type UserOptional {
|
||||
|
|
|
@ -1,22 +0,0 @@
|
|||
import { gql } from 'graphql-tag'
|
||||
|
||||
export default gql`
|
||||
type Vault {
|
||||
id: ID!
|
||||
key: String!
|
||||
value: String!
|
||||
createdAt: Date!
|
||||
updatedAt: Date!
|
||||
}
|
||||
|
||||
extend type Query {
|
||||
getVaultEntry(key: String!): Vault
|
||||
}
|
||||
|
||||
extend type Mutation {
|
||||
setVaultEntry(key: String!, value: String!, skipIfSet: Boolean): Boolean
|
||||
unsetVaultEntry(key: String!): Boolean
|
||||
clearVault: Boolean
|
||||
setVaultKeyHash(hash: String!): String
|
||||
}
|
||||
`
|
|
@ -66,7 +66,7 @@ const typeDefs = `
|
|||
wallets: [Wallet!]!
|
||||
wallet(id: ID!): Wallet
|
||||
walletByType(type: String!): Wallet
|
||||
walletLogs: [WalletLog]!
|
||||
walletLogs(type: String, from: String, to: String, cursor: String): WalletLog!
|
||||
}
|
||||
|
||||
extend type Mutation {
|
||||
|
@ -154,6 +154,11 @@ const typeDefs = `
|
|||
}
|
||||
|
||||
type WalletLog {
|
||||
entries: [WalletLogEntry!]!
|
||||
cursor: String
|
||||
}
|
||||
|
||||
type WalletLogEntry {
|
||||
id: ID!
|
||||
createdAt: Date!
|
||||
wallet: ID!
|
||||
|
|
|
@ -129,3 +129,9 @@ SatsAllDay,issue,#1368,#1331,medium,,,,25k,weareallsatoshi@getalby.com,2024-09-1
|
|||
benalleng,helpfulness,#1368,#1170,medium,,,did a lot of it in #1175,25k,BenAllenG@stacker.news,2024-09-16
|
||||
humble-GOAT,issue,#1412,#1407,good-first-issue,,,,2k,humble_GOAT@stacker.news,2024-09-18
|
||||
felipebueno,issue,#1425,#986,medium,,,,25k,felipebueno@getalby.com,2024-09-26
|
||||
riccardobl,pr,#1373,#1304,hard,high,,,2m,bolt11,2024-10-01
|
||||
tsmith123,pr,#1428,#1397,easy,,1,superceded,90k,stickymarch60@walletofsatoshi.com,2024-10-02
|
||||
toyota-corolla0,pr,#1449,,good-first-issue,,,,20k,toyota_corolla0@stacker.news,2024-10-02
|
||||
toyota-corolla0,pr,#1455,#1437,good-first-issue,,,,20k,toyota_corolla0@stacker.news,2024-10-02
|
||||
SouthKoreaLN,issue,#1436,,easy,,,,10k,south_korea_ln@stacker.news,2024-10-02
|
||||
TonyGiorgio,issue,#1462,,easy,urgent,,,30k,TonyGiorgio@stacker.news,2024-10-07
|
||||
|
|
|
|
@ -1,18 +1,20 @@
|
|||
import { useState, useEffect, useMemo } from 'react'
|
||||
import { useState, useEffect, useMemo, useCallback } from 'react'
|
||||
import AccordianItem from './accordian-item'
|
||||
import { Input, InputUserSuggest, VariableInput, Checkbox } from './form'
|
||||
import InputGroup from 'react-bootstrap/InputGroup'
|
||||
import { BOOST_MIN, BOOST_MULT, MAX_FORWARDS } from '@/lib/constants'
|
||||
import { BOOST_MIN, BOOST_MULT, MAX_FORWARDS, SSR } from '@/lib/constants'
|
||||
import { DEFAULT_CROSSPOSTING_RELAYS } from '@/lib/nostr'
|
||||
import Info from './info'
|
||||
import { numWithUnits } from '@/lib/format'
|
||||
import { abbrNum, numWithUnits } from '@/lib/format'
|
||||
import styles from './adv-post-form.module.css'
|
||||
import { useMe } from './me'
|
||||
import { useFeeButton } from './fee-button'
|
||||
import { useRouter } from 'next/router'
|
||||
import { useFormikContext } from 'formik'
|
||||
import { gql, useLazyQuery } from '@apollo/client'
|
||||
import { gql, useQuery } from '@apollo/client'
|
||||
import useDebounceCallback from './use-debounce-callback'
|
||||
import { Button } from 'react-bootstrap'
|
||||
import classNames from 'classnames'
|
||||
|
||||
const EMPTY_FORWARD = { nym: '', pct: '' }
|
||||
|
||||
|
@ -85,56 +87,96 @@ export function BoostInput ({ onChange, ...props }) {
|
|||
)
|
||||
}
|
||||
|
||||
const BoostMaxes = ({ subName, homeMax, subMax, boost, updateBoost }) => {
|
||||
return (
|
||||
<div className='d-flex flex-row mb-2'>
|
||||
<Button
|
||||
className={classNames(styles.boostMax, 'me-2', homeMax + BOOST_MULT <= (boost || 0) && 'invisible')}
|
||||
size='sm'
|
||||
onClick={() => updateBoost(homeMax + BOOST_MULT)}
|
||||
>
|
||||
{abbrNum(homeMax + BOOST_MULT)} <small>top of homepage</small>
|
||||
</Button>
|
||||
{subName &&
|
||||
<Button
|
||||
className={classNames(styles.boostMax, subMax + BOOST_MULT <= (boost || 0) && 'invisible')}
|
||||
size='sm'
|
||||
onClick={() => updateBoost(subMax + BOOST_MULT)}
|
||||
>
|
||||
{abbrNum(subMax + BOOST_MULT)} <small>top of ~{subName}</small>
|
||||
</Button>}
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
// act means we are adding to existing boost
|
||||
export function BoostItemInput ({ item, sub, act = false, ...props }) {
|
||||
const [boost, setBoost] = useState(Number(item?.boost) + (act ? BOOST_MULT : 0))
|
||||
// act adds boost to existing boost
|
||||
const existingBoost = act ? Number(item?.boost || 0) : 0
|
||||
const [boost, setBoost] = useState(act ? 0 : Number(item?.boost || 0))
|
||||
|
||||
const [getBoostPosition, { data }] = useLazyQuery(gql`
|
||||
const { data, previousData, refetch } = useQuery(gql`
|
||||
query BoostPosition($sub: String, $id: ID, $boost: Int) {
|
||||
boostPosition(sub: $sub, id: $id, boost: $boost) {
|
||||
home
|
||||
sub
|
||||
homeMaxBoost
|
||||
subMaxBoost
|
||||
}
|
||||
}`,
|
||||
{ fetchPolicy: 'cache-and-network' })
|
||||
{
|
||||
variables: { sub: item?.subName || sub?.name, boost: existingBoost + boost, id: item?.id },
|
||||
fetchPolicy: 'cache-and-network',
|
||||
skip: !!item?.parentId || SSR
|
||||
})
|
||||
|
||||
const getPositionDebounce = useDebounceCallback((...args) => getBoostPosition(...args), 1000, [getBoostPosition])
|
||||
const getPositionDebounce = useDebounceCallback((...args) => refetch(...args), 1000, [refetch])
|
||||
const updateBoost = useCallback((boost) => {
|
||||
const boostToUse = Number(boost || 0)
|
||||
setBoost(boostToUse)
|
||||
getPositionDebounce({ sub: item?.subName || sub?.name, boost: Number(existingBoost + boostToUse), id: item?.id })
|
||||
}, [getPositionDebounce, item?.id, item?.subName, sub?.name, existingBoost])
|
||||
|
||||
useEffect(() => {
|
||||
if (boost >= 0 && !item?.parentId) {
|
||||
getPositionDebounce({ variables: { sub: item?.subName || sub?.name, boost: Number(boost), id: item?.id } })
|
||||
}
|
||||
}, [boost, item?.id, !item?.parentId, item?.subName || sub?.name])
|
||||
const dat = data || previousData
|
||||
|
||||
const boostMessage = useMemo(() => {
|
||||
if (!item?.parentId) {
|
||||
if (data?.boostPosition?.home || data?.boostPosition?.sub) {
|
||||
if (!item?.parentId && boost >= BOOST_MULT) {
|
||||
if (dat?.boostPosition?.home || dat?.boostPosition?.sub || boost > dat?.boostPosition?.homeMaxBoost || boost > dat?.boostPosition?.subMaxBoost) {
|
||||
const boostPinning = []
|
||||
if (data?.boostPosition?.home) {
|
||||
if (dat?.boostPosition?.home || boost > dat?.boostPosition?.homeMaxBoost) {
|
||||
boostPinning.push('homepage')
|
||||
}
|
||||
if (data?.boostPosition?.sub) {
|
||||
if ((item?.subName || sub?.name) && (dat?.boostPosition?.sub || boost > dat?.boostPosition?.subMaxBoost)) {
|
||||
boostPinning.push(`~${item?.subName || sub?.name}`)
|
||||
}
|
||||
return `pins to the top of ${boostPinning.join(' and ')}`
|
||||
}
|
||||
}
|
||||
if (boost >= 0 && boost % BOOST_MULT === 0) {
|
||||
return `${act ? 'brings to' : 'equivalent to'} ${numWithUnits(boost / BOOST_MULT, { unitPlural: 'zapvotes', unitSingular: 'zapvote' })}`
|
||||
}
|
||||
return 'ranks posts higher based on the amount'
|
||||
}, [boost, data?.boostPosition?.home, data?.boostPosition?.sub, item?.subName, sub?.name])
|
||||
}, [boost, dat?.boostPosition?.home, dat?.boostPosition?.sub, item?.subName, sub?.name])
|
||||
|
||||
return (
|
||||
<BoostInput
|
||||
hint={<span className='text-muted'>{boostMessage}</span>}
|
||||
onChange={(_, e) => {
|
||||
if (e.target.value >= 0) {
|
||||
setBoost(Number(e.target.value) + (act ? Number(item?.boost) : 0))
|
||||
}
|
||||
}}
|
||||
{...props}
|
||||
/>
|
||||
<>
|
||||
<BoostInput
|
||||
hint={<span className='text-muted'>{boostMessage}</span>}
|
||||
onChange={(_, e) => {
|
||||
if (e.target.value >= 0) {
|
||||
updateBoost(Number(e.target.value))
|
||||
}
|
||||
}}
|
||||
overrideValue={boost}
|
||||
{...props}
|
||||
groupClassName='mb-1'
|
||||
/>
|
||||
{!item?.parentId &&
|
||||
<BoostMaxes
|
||||
subName={item?.subName || sub?.name}
|
||||
homeMax={(dat?.boostPosition?.homeMaxBoost || 0) - existingBoost}
|
||||
subMax={(dat?.boostPosition?.subMaxBoost || 0) - existingBoost}
|
||||
boost={existingBoost + boost}
|
||||
updateBoost={updateBoost}
|
||||
/>}
|
||||
</>
|
||||
)
|
||||
}
|
||||
|
||||
|
|
|
@ -9,4 +9,11 @@
|
|||
display: flex;
|
||||
flex: 0 1 fit-content;
|
||||
height: fit-content;
|
||||
}
|
||||
|
||||
.boostMax small {
|
||||
font-weight: 400;
|
||||
margin-left: 0.25rem;
|
||||
margin-right: 0.25rem;
|
||||
opacity: 0.5;
|
||||
}
|
|
@ -4,6 +4,6 @@ import Button from 'react-bootstrap/Button'
|
|||
export default function CancelButton ({ onClick }) {
|
||||
const router = useRouter()
|
||||
return (
|
||||
<Button className='me-2 text-muted nav-link fw-bold' variant='link' onClick={onClick || (() => router.back())}>cancel</Button>
|
||||
<Button className='me-4 text-muted nav-link fw-bold' variant='link' onClick={onClick || (() => router.back())}>cancel</Button>
|
||||
)
|
||||
}
|
||||
|
|
|
@ -1,236 +0,0 @@
|
|||
import { useCallback, useEffect, useState } from 'react'
|
||||
import { useMe } from './me'
|
||||
import { useShowModal } from './modal'
|
||||
import { useVaultConfigurator, useVaultMigration } from './use-vault'
|
||||
import { Button, InputGroup } from 'react-bootstrap'
|
||||
import { Form, Input, PasswordInput, SubmitButton } from './form'
|
||||
import bip39Words from '@/lib/bip39-words'
|
||||
import Info from './info'
|
||||
import CancelButton from './cancel-button'
|
||||
import * as yup from 'yup'
|
||||
import { deviceSyncSchema } from '@/lib/validate'
|
||||
import RefreshIcon from '@/svgs/refresh-line.svg'
|
||||
|
||||
export default function DeviceSync () {
|
||||
const { me } = useMe()
|
||||
const [value, setVaultKey, clearVault, disconnectVault] = useVaultConfigurator()
|
||||
const showModal = useShowModal()
|
||||
|
||||
const enabled = !!me?.privates?.vaultKeyHash
|
||||
const connected = !!value?.key
|
||||
|
||||
const migrate = useVaultMigration()
|
||||
|
||||
const manage = useCallback(async () => {
|
||||
if (enabled && connected) {
|
||||
showModal((onClose) => (
|
||||
<div>
|
||||
<h2>Device sync is enabled!</h2>
|
||||
<p>
|
||||
Sensitive data (like wallet credentials) is now securely synced between all connected devices.
|
||||
</p>
|
||||
<p className='text-muted text-sm'>
|
||||
Disconnect to prevent this device from syncing data or to reset your passphrase.
|
||||
</p>
|
||||
<div className='d-flex justify-content-between'>
|
||||
<div className='d-flex align-items-center ms-auto gap-2'>
|
||||
<Button className='me-2 text-muted nav-link fw-bold' variant='link' onClick={onClose}>close</Button>
|
||||
<Button
|
||||
variant='primary'
|
||||
onClick={() => {
|
||||
disconnectVault()
|
||||
onClose()
|
||||
}}
|
||||
>disconnect
|
||||
</Button>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
))
|
||||
} else {
|
||||
showModal((onClose) => (
|
||||
<ConnectForm onClose={onClose} onConnect={onConnect} enabled={enabled} />
|
||||
))
|
||||
}
|
||||
}, [migrate, enabled, connected, value])
|
||||
|
||||
const reset = useCallback(async () => {
|
||||
const schema = yup.object().shape({
|
||||
confirm: yup.string()
|
||||
.oneOf(['yes'], 'you must confirm by typing "yes"')
|
||||
.required('required')
|
||||
})
|
||||
showModal((onClose) => (
|
||||
<div>
|
||||
<h2>Reset device sync</h2>
|
||||
<p>
|
||||
This will delete all encrypted data on the server and disconnect all devices.
|
||||
</p>
|
||||
<p>
|
||||
You will need to enter a new passphrase on this and all other devices to sync data again.
|
||||
</p>
|
||||
<Form
|
||||
className='mt-3'
|
||||
initial={{ confirm: '' }}
|
||||
schema={schema}
|
||||
onSubmit={async values => {
|
||||
await clearVault()
|
||||
onClose()
|
||||
}}
|
||||
>
|
||||
<Input
|
||||
label='This action cannot be undone. Type `yes` to confirm.'
|
||||
name='confirm'
|
||||
placeholder=''
|
||||
required
|
||||
autoFocus
|
||||
autoComplete='off'
|
||||
/>
|
||||
<div className='d-flex justify-content-between'>
|
||||
<div className='d-flex align-items-center ms-auto'>
|
||||
<CancelButton onClick={onClose} />
|
||||
<SubmitButton variant='danger'>
|
||||
continue
|
||||
</SubmitButton>
|
||||
</div>
|
||||
</div>
|
||||
</Form>
|
||||
</div>
|
||||
))
|
||||
}, [])
|
||||
|
||||
const onConnect = useCallback(async (values, formik) => {
|
||||
if (values.passphrase) {
|
||||
try {
|
||||
await setVaultKey(values.passphrase)
|
||||
await migrate()
|
||||
} catch (e) {
|
||||
formik?.setErrors({ passphrase: e.message })
|
||||
throw e
|
||||
}
|
||||
}
|
||||
}, [setVaultKey, migrate])
|
||||
|
||||
return (
|
||||
<>
|
||||
<div className='form-label mt-3'>device sync</div>
|
||||
<div className='mt-2 d-flex align-items-center'>
|
||||
<div>
|
||||
<Button
|
||||
variant='secondary'
|
||||
onClick={manage}
|
||||
>
|
||||
{enabled ? (connected ? 'Manage ' : 'Connect to ') : 'Enable '}
|
||||
device sync
|
||||
</Button>
|
||||
</div>
|
||||
<Info>
|
||||
<p>
|
||||
Device sync uses end-to-end encryption to securely synchronize your data across devices.
|
||||
</p>
|
||||
<p className='text-muted text-sm'>
|
||||
Your sensitive data remains private and inaccessible to our servers while being synced across all your connected devices using only a passphrase.
|
||||
</p>
|
||||
</Info>
|
||||
</div>
|
||||
{enabled && !connected && (
|
||||
<div className='mt-2 d-flex align-items-center'>
|
||||
<div>
|
||||
<Button
|
||||
variant='danger'
|
||||
onClick={reset}
|
||||
>
|
||||
Reset device sync data
|
||||
</Button>
|
||||
</div>
|
||||
<Info>
|
||||
<p>
|
||||
If you have lost your passphrase or wish to erase all encrypted data from the server, you can reset the device sync data and start over.
|
||||
</p>
|
||||
<p className='text-muted text-sm'>
|
||||
This action cannot be undone.
|
||||
</p>
|
||||
</Info>
|
||||
</div>
|
||||
)}
|
||||
</>
|
||||
)
|
||||
}
|
||||
|
||||
const generatePassphrase = (n = 12) => {
|
||||
const rand = new Uint32Array(n)
|
||||
window.crypto.getRandomValues(rand)
|
||||
return Array.from(rand).map(i => bip39Words[i % bip39Words.length]).join(' ')
|
||||
}
|
||||
|
||||
function ConnectForm ({ onClose, onConnect, enabled }) {
|
||||
const [passphrase, setPassphrase] = useState(!enabled ? generatePassphrase : '')
|
||||
|
||||
useEffect(() => {
|
||||
const scannedPassphrase = window.localStorage.getItem('qr:passphrase')
|
||||
if (scannedPassphrase) {
|
||||
setPassphrase(scannedPassphrase)
|
||||
window.localStorage.removeItem('qr:passphrase')
|
||||
}
|
||||
})
|
||||
|
||||
const newPassphrase = useCallback(() => {
|
||||
setPassphrase(() => generatePassphrase(12))
|
||||
}, [])
|
||||
|
||||
return (
|
||||
<div>
|
||||
<h2>{!enabled ? 'Enable device sync' : 'Input your passphrase'}</h2>
|
||||
<p>
|
||||
{!enabled
|
||||
? 'Enable secure sync of sensitive data (like wallet credentials) between your devices. You’ll need to enter this passphrase on each device you want to connect.'
|
||||
: 'Enter the passphrase from device sync to access your encrypted sensitive data (like wallet credentials) on the server.'}
|
||||
</p>
|
||||
<Form
|
||||
schema={enabled ? undefined : deviceSyncSchema}
|
||||
initial={{ passphrase }}
|
||||
enableReinitialize
|
||||
onSubmit={async (values, formik) => {
|
||||
try {
|
||||
await onConnect(values, formik)
|
||||
onClose()
|
||||
} catch {}
|
||||
}}
|
||||
>
|
||||
<PasswordInput
|
||||
label='passphrase'
|
||||
name='passphrase'
|
||||
placeholder=''
|
||||
required
|
||||
autoFocus
|
||||
as='textarea'
|
||||
rows={3}
|
||||
readOnly={!enabled}
|
||||
copy={!enabled}
|
||||
append={
|
||||
!enabled && (
|
||||
<InputGroup.Text style={{ cursor: 'pointer', userSelect: 'none' }} onClick={newPassphrase}>
|
||||
<RefreshIcon width={16} height={16} />
|
||||
</InputGroup.Text>
|
||||
)
|
||||
}
|
||||
/>
|
||||
<p className='text-muted text-sm'>
|
||||
{
|
||||
!enabled
|
||||
? 'This passphrase is stored only on your device and cannot be shown again.'
|
||||
: 'If you have forgotten your passphrase, you can reset and start over.'
|
||||
}
|
||||
</p>
|
||||
<div className='mt-3'>
|
||||
<div className='d-flex justify-content-between'>
|
||||
<div className='d-flex align-items-center ms-auto gap-2'>
|
||||
<CancelButton onClick={onClose} />
|
||||
<SubmitButton variant='primary'>{enabled ? 'connect' : 'enable'}</SubmitButton>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</Form>
|
||||
</div>
|
||||
)
|
||||
}
|
|
@ -33,12 +33,6 @@ import EyeClose from '@/svgs/eye-close-line.svg'
|
|||
import Info from './info'
|
||||
import { useMe } from './me'
|
||||
import classNames from 'classnames'
|
||||
import Clipboard from '@/svgs/clipboard-line.svg'
|
||||
import QrIcon from '@/svgs/qr-code-line.svg'
|
||||
import QrScanIcon from '@/svgs/qr-scan-line.svg'
|
||||
import { useShowModal } from './modal'
|
||||
import QRCode from 'qrcode.react'
|
||||
import { QrScanner } from '@yudiel/react-qr-scanner'
|
||||
|
||||
export class SessionRequiredError extends Error {
|
||||
constructor () {
|
||||
|
@ -75,41 +69,31 @@ export function SubmitButton ({
|
|||
)
|
||||
}
|
||||
|
||||
function CopyButton ({ value, icon, ...props }) {
|
||||
export function CopyInput (props) {
|
||||
const toaster = useToast()
|
||||
const [copied, setCopied] = useState(false)
|
||||
|
||||
const handleClick = useCallback(async () => {
|
||||
const handleClick = async () => {
|
||||
try {
|
||||
await copy(value)
|
||||
await copy(props.placeholder)
|
||||
toaster.success('copied')
|
||||
setCopied(true)
|
||||
setTimeout(() => setCopied(false), 1500)
|
||||
} catch (err) {
|
||||
toaster.danger('failed to copy')
|
||||
}
|
||||
}, [toaster, value])
|
||||
|
||||
if (icon) {
|
||||
return (
|
||||
<InputGroup.Text style={{ cursor: 'pointer' }} onClick={handleClick}>
|
||||
<Clipboard height={20} width={20} />
|
||||
</InputGroup.Text>
|
||||
)
|
||||
}
|
||||
|
||||
return (
|
||||
<Button className={styles.appendButton} {...props} onClick={handleClick}>
|
||||
{copied ? <Thumb width={18} height={18} /> : 'copy'}
|
||||
</Button>
|
||||
)
|
||||
}
|
||||
|
||||
export function CopyInput (props) {
|
||||
return (
|
||||
<Input
|
||||
onClick={handleClick}
|
||||
append={
|
||||
<CopyButton value={props.placeholder} size={props.size} />
|
||||
<Button
|
||||
className={styles.appendButton}
|
||||
size={props.size}
|
||||
onClick={handleClick}
|
||||
>{copied ? <Thumb width={18} height={18} /> : 'copy'}
|
||||
</Button>
|
||||
}
|
||||
{...props}
|
||||
/>
|
||||
|
@ -727,11 +711,10 @@ export function InputUserSuggest ({
|
|||
)
|
||||
}
|
||||
|
||||
export function Input ({ label, groupClassName, under, ...props }) {
|
||||
export function Input ({ label, groupClassName, ...props }) {
|
||||
return (
|
||||
<FormGroup label={label} className={groupClassName}>
|
||||
<InputInner {...props} />
|
||||
{under}
|
||||
</FormGroup>
|
||||
)
|
||||
}
|
||||
|
@ -1087,121 +1070,24 @@ function PasswordHider ({ onClick, showPass }) {
|
|||
>
|
||||
{!showPass
|
||||
? <Eye
|
||||
fill='var(--bs-body-color)' height={16} width={16}
|
||||
fill='var(--bs-body-color)' height={20} width={20}
|
||||
/>
|
||||
: <EyeClose
|
||||
fill='var(--bs-body-color)' height={16} width={16}
|
||||
fill='var(--bs-body-color)' height={20} width={20}
|
||||
/>}
|
||||
</InputGroup.Text>
|
||||
)
|
||||
}
|
||||
|
||||
function QrPassword ({ value }) {
|
||||
const showModal = useShowModal()
|
||||
const toaster = useToast()
|
||||
|
||||
const showQr = useCallback(() => {
|
||||
showModal(close => (
|
||||
<div className={styles.qr}>
|
||||
<p>You can import this passphrase into another device by scanning this QR code</p>
|
||||
<QRCode value={value} renderAs='svg' />
|
||||
</div>
|
||||
))
|
||||
}, [toaster, value, showModal])
|
||||
|
||||
return (
|
||||
<>
|
||||
<InputGroup.Text
|
||||
style={{ cursor: 'pointer' }}
|
||||
onClick={showQr}
|
||||
>
|
||||
<QrIcon height={16} width={16} />
|
||||
</InputGroup.Text>
|
||||
</>
|
||||
)
|
||||
}
|
||||
|
||||
function PasswordScanner ({ onDecode }) {
|
||||
const showModal = useShowModal()
|
||||
const toaster = useToast()
|
||||
const ref = useRef(false)
|
||||
|
||||
return (
|
||||
<InputGroup.Text
|
||||
style={{ cursor: 'pointer' }}
|
||||
onClick={() => {
|
||||
showModal(onClose => {
|
||||
return (
|
||||
<QrScanner
|
||||
onDecode={(decoded) => {
|
||||
onDecode(decoded)
|
||||
|
||||
// avoid accidentally calling onClose multiple times
|
||||
if (ref?.current) return
|
||||
ref.current = true
|
||||
|
||||
onClose({ back: 1 })
|
||||
}}
|
||||
onError={(error) => {
|
||||
if (error instanceof DOMException) return
|
||||
toaster.danger('qr scan error:', error.message || error.toString?.())
|
||||
onClose({ back: 1 })
|
||||
}}
|
||||
/>
|
||||
)
|
||||
})
|
||||
}}
|
||||
>
|
||||
<QrScanIcon
|
||||
height={20} width={20} fill='var(--bs-body-color)'
|
||||
/>
|
||||
</InputGroup.Text>
|
||||
)
|
||||
}
|
||||
|
||||
export function PasswordInput ({ newPass, qr, copy, readOnly, append, ...props }) {
|
||||
export function PasswordInput ({ newPass, ...props }) {
|
||||
const [showPass, setShowPass] = useState(false)
|
||||
const [field] = useField(props)
|
||||
|
||||
const Append = useMemo(() => {
|
||||
return (
|
||||
<>
|
||||
<PasswordHider showPass={showPass} onClick={() => setShowPass(!showPass)} />
|
||||
{copy && (
|
||||
<CopyButton icon value={field?.value} />
|
||||
)}
|
||||
{qr && (readOnly
|
||||
? <QrPassword value={field?.value} />
|
||||
: <PasswordScanner
|
||||
onDecode={decoded => {
|
||||
// Formik helpers don't seem to work in another modal.
|
||||
// I assume it's because we unmount the Formik component
|
||||
// when replace it with another modal.
|
||||
window.localStorage.setItem('qr:passphrase', decoded)
|
||||
}}
|
||||
/>)}
|
||||
{append}
|
||||
</>
|
||||
)
|
||||
}, [showPass, copy, field?.value, qr, readOnly, append])
|
||||
|
||||
const maskedValue = !showPass && props.as === 'textarea' ? field?.value?.replace(/./g, '•') : field?.value
|
||||
|
||||
return (
|
||||
<ClientInput
|
||||
{...props}
|
||||
className={styles.passwordInput}
|
||||
type={showPass ? 'text' : 'password'}
|
||||
autoComplete={newPass ? 'new-password' : 'current-password'}
|
||||
readOnly={readOnly}
|
||||
append={props.as === 'textarea' ? undefined : Append}
|
||||
value={maskedValue}
|
||||
under={props.as === 'textarea'
|
||||
? (
|
||||
<div className='mt-2 d-flex justify-content-end' style={{ gap: '8px' }}>
|
||||
{Append}
|
||||
</div>)
|
||||
: undefined}
|
||||
append={<PasswordHider showPass={showPass} onClick={() => setShowPass(!showPass)} />}
|
||||
/>
|
||||
)
|
||||
}
|
||||
|
|
|
@ -2,10 +2,6 @@
|
|||
border-top-left-radius: 0;
|
||||
}
|
||||
|
||||
textarea.passwordInput {
|
||||
resize: none;
|
||||
}
|
||||
|
||||
.markdownInput textarea {
|
||||
margin-top: -1px;
|
||||
font-size: 94%;
|
||||
|
@ -73,16 +69,4 @@ textarea.passwordInput {
|
|||
0% {
|
||||
opacity: 42%;
|
||||
}
|
||||
}
|
||||
|
||||
div.qr {
|
||||
display: grid;
|
||||
}
|
||||
|
||||
div.qr>svg {
|
||||
justify-self: center;
|
||||
width: 100%;
|
||||
height: auto;
|
||||
padding: 1rem;
|
||||
background-color: white;
|
||||
}
|
||||
}
|
|
@ -70,19 +70,18 @@ function BoostForm ({ step, onSubmit, children, item, oValue, inputRef, act = 'B
|
|||
name='amount'
|
||||
type='number'
|
||||
innerRef={inputRef}
|
||||
overrideValue={oValue}
|
||||
sub={item.sub}
|
||||
step={step}
|
||||
required
|
||||
autoFocus
|
||||
item={item}
|
||||
/>
|
||||
{children}
|
||||
<div className='d-flex mt-3'>
|
||||
<SubmitButton variant='success' className='ms-auto mt-1 px-4' value={act}>
|
||||
boost
|
||||
</SubmitButton>
|
||||
</div>
|
||||
{children}
|
||||
</Form>
|
||||
)
|
||||
}
|
||||
|
@ -147,7 +146,7 @@ export default function ItemAct ({ onClose, item, act = 'TIP', step, children, a
|
|||
}, [me, actor, !!wallet, act, item.id, onClose, abortSignal, strike])
|
||||
|
||||
return act === 'BOOST'
|
||||
? <BoostForm step={step} onSubmit={onSubmit} item={item} oValue={oValue} inputRef={inputRef} act={act}>{children}</BoostForm>
|
||||
? <BoostForm step={step} onSubmit={onSubmit} item={item} inputRef={inputRef} act={act}>{children}</BoostForm>
|
||||
: (
|
||||
<Form
|
||||
initial={{
|
||||
|
@ -171,12 +170,12 @@ export default function ItemAct ({ onClose, item, act = 'TIP', step, children, a
|
|||
<div>
|
||||
<Tips setOValue={setOValue} />
|
||||
</div>
|
||||
{children}
|
||||
<div className='d-flex mt-3'>
|
||||
<SubmitButton variant={act === 'DONT_LIKE_THIS' ? 'danger' : 'success'} className='ms-auto mt-1 px-4' value={act}>
|
||||
{act === 'DONT_LIKE_THIS' ? 'downzap' : 'zap'}
|
||||
</SubmitButton>
|
||||
</div>
|
||||
{children}
|
||||
</Form>)
|
||||
}
|
||||
|
||||
|
|
|
@ -53,13 +53,21 @@ function BioItem ({ item, handleClick }) {
|
|||
function ItemEmbed ({ url, imgproxyUrls }) {
|
||||
const provider = parseEmbedUrl(url)
|
||||
if (provider) {
|
||||
return <Embed src={url} {...provider} topLevel />
|
||||
return (
|
||||
<div className='mt-3'>
|
||||
<Embed src={url} {...provider} topLevel />
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
if (imgproxyUrls) {
|
||||
const src = IMGPROXY_URL_REGEXP.test(url) ? decodeProxyUrl(url) : url
|
||||
const srcSet = imgproxyUrls?.[url]
|
||||
return <MediaOrLink src={src} srcSet={srcSet} topLevel linkFallback={false} />
|
||||
return (
|
||||
<div className='mt-3'>
|
||||
<MediaOrLink src={src} srcSet={srcSet} topLevel linkFallback={false} />
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
return null
|
||||
|
|
|
@ -37,7 +37,7 @@ export default function ItemInfo ({
|
|||
const editThreshold = new Date(item.invoice?.confirmedAt ?? item.createdAt).getTime() + 10 * 60000
|
||||
const { me } = useMe()
|
||||
const router = useRouter()
|
||||
const [canEdit, setCanEdit] = useState(item.mine && (Date.now() < editThreshold))
|
||||
const [canEdit, setCanEdit] = useState(item.mine && !item.bio && (Date.now() < editThreshold))
|
||||
const [hasNewComments, setHasNewComments] = useState(false)
|
||||
const root = useRoot()
|
||||
const sub = item?.sub || root?.sub
|
||||
|
@ -49,7 +49,7 @@ export default function ItemInfo ({
|
|||
}, [item])
|
||||
|
||||
useEffect(() => {
|
||||
const authorEdit = item.mine
|
||||
const authorEdit = item.mine && !item.bio
|
||||
const invParams = window.localStorage.getItem(`item:${item.id}:hash:hmac`)
|
||||
const hmacEdit = !!invParams && !me && Number(item.user.id) === USER_ID.anon
|
||||
setCanEdit((authorEdit || hmacEdit) && (Date.now() < editThreshold))
|
||||
|
|
|
@ -107,7 +107,7 @@ export default function Item ({
|
|||
{item.position && (pinnable || !item.subName)
|
||||
? <Pin width={24} height={24} className={styles.pin} />
|
||||
: item.mine || item.meForward
|
||||
? <Boost item={item} className={styles.upvote} />
|
||||
? <Boost item={item} className={classNames(styles.upvote, item.bio && 'invisible')} />
|
||||
: item.meDontLikeSats > item.meSats
|
||||
? <DownZap width={24} height={24} className={styles.dontLike} item={item} />
|
||||
: Number(item.user?.id) === USER_ID.ad
|
||||
|
|
|
@ -45,20 +45,13 @@ export default function useModal () {
|
|||
}, [getCurrentContent, forceUpdate])
|
||||
|
||||
// this is called on every navigation due to below useEffect
|
||||
const onClose = useCallback((options) => {
|
||||
if (options?.back) {
|
||||
for (let i = 0; i < options.back; i++) {
|
||||
onBack()
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
const onClose = useCallback(() => {
|
||||
while (modalStack.current.length) {
|
||||
getCurrentContent()?.options?.onClose?.()
|
||||
modalStack.current.pop()
|
||||
}
|
||||
forceUpdate()
|
||||
}, [onBack])
|
||||
}, [])
|
||||
|
||||
const router = useRouter()
|
||||
useEffect(() => {
|
||||
|
@ -97,7 +90,7 @@ export default function useModal () {
|
|||
{overflow}
|
||||
</ActionDropdown>
|
||||
</div>}
|
||||
{modalStack.current.length > 1 ? <div className='modal-btn modal-back' onClick={onBack}><BackArrow width={18} height={18} /></div> : null}
|
||||
{modalStack.current.length > 1 ? <div className='modal-btn modal-back' onClick={onBack}><BackArrow width={18} height={18} className='fill-white' /></div> : null}
|
||||
<div className={'modal-btn modal-close ' + className} onClick={onClose}>X</div>
|
||||
</div>
|
||||
<Modal.Body className={className}>
|
||||
|
|
|
@ -25,7 +25,6 @@ import { useHasNewNotes } from '../use-has-new-notes'
|
|||
import { useWallets } from 'wallets'
|
||||
import SwitchAccountList, { useAccounts } from '@/components/account'
|
||||
import { useShowModal } from '@/components/modal'
|
||||
import { unsetLocalKey as resetVaultKey } from '@/components/use-vault'
|
||||
|
||||
export function Brand ({ className }) {
|
||||
return (
|
||||
|
@ -261,7 +260,6 @@ function LogoutObstacle ({ onClose }) {
|
|||
const { registration: swRegistration, togglePushSubscription } = useServiceWorker()
|
||||
const wallets = useWallets()
|
||||
const { multiAuthSignout } = useAccounts()
|
||||
const { me } = useMe()
|
||||
|
||||
return (
|
||||
<div className='d-flex m-auto flex-column w-fit-content'>
|
||||
|
@ -290,7 +288,6 @@ function LogoutObstacle ({ onClose }) {
|
|||
}
|
||||
|
||||
await wallets.resetClient().catch(console.error)
|
||||
await resetVaultKey(me?.id)
|
||||
|
||||
await signOut({ callbackUrl: '/' })
|
||||
}}
|
||||
|
|
|
@ -67,19 +67,25 @@ export const useInvoice = () => {
|
|||
if (error) {
|
||||
throw error
|
||||
}
|
||||
const { hash, cancelled, actionError } = data.invoice
|
||||
|
||||
const { hash, cancelled, actionError, actionState } = data.invoice
|
||||
|
||||
if (cancelled || actionError) {
|
||||
throw new InvoiceCanceledError(hash, actionError)
|
||||
}
|
||||
|
||||
// write to cache if paid
|
||||
if (actionState === 'PAID') {
|
||||
client.writeQuery({ query: INVOICE, variables: { id }, data: { invoice: data.invoice } })
|
||||
}
|
||||
|
||||
return that(data.invoice)
|
||||
}, [client])
|
||||
|
||||
const waitController = useMemo(() => {
|
||||
const controller = new AbortController()
|
||||
const signal = controller.signal
|
||||
controller.wait = async ({ id }, waitFor = inv => (inv.satsReceived > 0)) => {
|
||||
controller.wait = async ({ id }, waitFor = inv => inv?.actionState === 'PAID') => {
|
||||
return await new Promise((resolve, reject) => {
|
||||
const interval = setInterval(async () => {
|
||||
try {
|
||||
|
@ -138,11 +144,7 @@ export const useWalletPayment = () => {
|
|||
return await new Promise((resolve, reject) => {
|
||||
// can't use await here since we might pay JIT invoices and sendPaymentAsync is not supported yet.
|
||||
// see https://www.webln.guide/building-lightning-apps/webln-reference/webln.sendpaymentasync
|
||||
wallet.sendPayment(bolt11)
|
||||
// JIT invoice payments will never resolve here
|
||||
// since they only get resolved after settlement which can't happen here
|
||||
.then(resolve)
|
||||
.catch(reject)
|
||||
wallet.sendPayment(bolt11).catch(reject)
|
||||
invoice.waitUntilPaid({ id }, waitFor)
|
||||
.then(resolve)
|
||||
.catch(reject)
|
||||
|
|
|
@ -18,6 +18,8 @@ import classNames from 'classnames'
|
|||
import { CarouselProvider, useCarousel } from './carousel'
|
||||
import rehypeSN from '@/lib/rehype-sn'
|
||||
import Embed from './embed'
|
||||
import remarkMath from 'remark-math'
|
||||
import rehypeMathjax from 'rehype-mathjax'
|
||||
|
||||
const rehypeSNStyled = () => rehypeSN({
|
||||
stylers: [{
|
||||
|
@ -31,8 +33,8 @@ const rehypeSNStyled = () => rehypeSN({
|
|||
}]
|
||||
})
|
||||
|
||||
const remarkPlugins = [gfm]
|
||||
const rehypePlugins = [rehypeSNStyled]
|
||||
const remarkPlugins = [gfm, remarkMath]
|
||||
const rehypePlugins = [rehypeSNStyled, rehypeMathjax]
|
||||
|
||||
export function SearchText ({ text }) {
|
||||
return (
|
||||
|
|
|
@ -216,13 +216,13 @@
|
|||
width: auto;
|
||||
max-height: inherit;
|
||||
height: 100%;
|
||||
max-width: 100%;
|
||||
aspect-ratio: var(--aspect-ratio);
|
||||
}
|
||||
|
||||
.mediaContainer img {
|
||||
cursor: zoom-in;
|
||||
min-width: 30%;
|
||||
max-width: 100%;
|
||||
object-position: left top;
|
||||
}
|
||||
|
||||
|
|
|
@ -0,0 +1,292 @@
|
|||
import { useState, useEffect, useCallback, useRef } from 'react'
|
||||
|
||||
function useIndexedDB (dbName, storeName, version = 1, indices = []) {
|
||||
const [db, setDb] = useState(null)
|
||||
const [error, setError] = useState(null)
|
||||
const [notSupported, setNotSupported] = useState(false)
|
||||
const operationQueue = useRef([])
|
||||
|
||||
const handleError = useCallback((error) => {
|
||||
console.error('IndexedDB error:', error)
|
||||
setError(error)
|
||||
}, [])
|
||||
|
||||
const processQueue = useCallback((db) => {
|
||||
if (!db) return
|
||||
|
||||
try {
|
||||
// try to run a noop to see if the db is ready
|
||||
db.transaction(storeName)
|
||||
while (operationQueue.current.length > 0) {
|
||||
const operation = operationQueue.current.shift()
|
||||
operation(db)
|
||||
}
|
||||
} catch (error) {
|
||||
handleError(error)
|
||||
}
|
||||
}, [storeName, handleError])
|
||||
|
||||
useEffect(() => {
|
||||
let isMounted = true
|
||||
let request
|
||||
try {
|
||||
if (!window.indexedDB) {
|
||||
console.log('IndexedDB is not supported')
|
||||
setNotSupported(true)
|
||||
return
|
||||
}
|
||||
|
||||
request = window.indexedDB.open(dbName, version)
|
||||
|
||||
request.onerror = (event) => {
|
||||
handleError(new Error('Error opening database'))
|
||||
}
|
||||
|
||||
request.onsuccess = (event) => {
|
||||
if (isMounted) {
|
||||
const database = event.target.result
|
||||
database.onversionchange = () => {
|
||||
database.close()
|
||||
setDb(null)
|
||||
handleError(new Error('Database is outdated, please reload the page'))
|
||||
}
|
||||
setDb(database)
|
||||
processQueue(database)
|
||||
}
|
||||
}
|
||||
|
||||
request.onupgradeneeded = (event) => {
|
||||
const database = event.target.result
|
||||
try {
|
||||
const store = database.createObjectStore(storeName, { keyPath: 'id', autoIncrement: true })
|
||||
|
||||
indices.forEach(index => {
|
||||
store.createIndex(index.name, index.keyPath, index.options)
|
||||
})
|
||||
} catch (error) {
|
||||
handleError(new Error('Error upgrading database: ' + error.message))
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
handleError(new Error('Error opening database: ' + error.message))
|
||||
}
|
||||
|
||||
return () => {
|
||||
isMounted = false
|
||||
if (db) {
|
||||
db.close()
|
||||
}
|
||||
}
|
||||
}, [dbName, storeName, version, indices, handleError, processQueue])
|
||||
|
||||
const queueOperation = useCallback((operation) => {
|
||||
if (notSupported) {
|
||||
return Promise.reject(new Error('IndexedDB is not supported'))
|
||||
}
|
||||
if (error) {
|
||||
return Promise.reject(new Error('Database error: ' + error.message))
|
||||
}
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
const wrappedOperation = (db) => {
|
||||
try {
|
||||
const result = operation(db)
|
||||
resolve(result)
|
||||
} catch (error) {
|
||||
reject(error)
|
||||
}
|
||||
}
|
||||
|
||||
operationQueue.current.push(wrappedOperation)
|
||||
processQueue(db)
|
||||
})
|
||||
}, [processQueue, db, notSupported, error])
|
||||
|
||||
const add = useCallback((value) => {
|
||||
return queueOperation((db) => {
|
||||
return new Promise((resolve, reject) => {
|
||||
const transaction = db.transaction(storeName, 'readwrite')
|
||||
const store = transaction.objectStore(storeName)
|
||||
const request = store.add(value)
|
||||
|
||||
request.onerror = () => reject(new Error('Error adding data'))
|
||||
request.onsuccess = () => resolve(request.result)
|
||||
})
|
||||
})
|
||||
}, [queueOperation, storeName])
|
||||
|
||||
const get = useCallback((key) => {
|
||||
return queueOperation((db) => {
|
||||
return new Promise((resolve, reject) => {
|
||||
const transaction = db.transaction(storeName, 'readonly')
|
||||
const store = transaction.objectStore(storeName)
|
||||
const request = store.get(key)
|
||||
|
||||
request.onerror = () => reject(new Error('Error getting data'))
|
||||
request.onsuccess = () => resolve(request.result ? request.result : undefined)
|
||||
})
|
||||
})
|
||||
}, [queueOperation, storeName])
|
||||
|
||||
const getAll = useCallback(() => {
|
||||
return queueOperation((db) => {
|
||||
return new Promise((resolve, reject) => {
|
||||
const transaction = db.transaction(storeName, 'readonly')
|
||||
const store = transaction.objectStore(storeName)
|
||||
const request = store.getAll()
|
||||
|
||||
request.onerror = () => reject(new Error('Error getting all data'))
|
||||
request.onsuccess = () => resolve(request.result)
|
||||
})
|
||||
})
|
||||
}, [queueOperation, storeName])
|
||||
|
||||
const update = useCallback((key, value) => {
|
||||
return queueOperation((db) => {
|
||||
return new Promise((resolve, reject) => {
|
||||
const transaction = db.transaction(storeName, 'readwrite')
|
||||
const store = transaction.objectStore(storeName)
|
||||
const request = store.get(key)
|
||||
|
||||
request.onerror = () => reject(new Error('Error updating data'))
|
||||
request.onsuccess = () => {
|
||||
const updatedValue = { ...request.result, ...value }
|
||||
const updateRequest = store.put(updatedValue)
|
||||
updateRequest.onerror = () => reject(new Error('Error updating data'))
|
||||
updateRequest.onsuccess = () => resolve(updateRequest.result)
|
||||
}
|
||||
})
|
||||
})
|
||||
}, [queueOperation, storeName])
|
||||
|
||||
const remove = useCallback((key) => {
|
||||
return queueOperation((db) => {
|
||||
return new Promise((resolve, reject) => {
|
||||
const transaction = db.transaction(storeName, 'readwrite')
|
||||
const store = transaction.objectStore(storeName)
|
||||
const request = store.delete(key)
|
||||
|
||||
request.onerror = () => reject(new Error('Error removing data'))
|
||||
request.onsuccess = () => resolve()
|
||||
})
|
||||
})
|
||||
}, [queueOperation, storeName])
|
||||
|
||||
const clear = useCallback((indexName = null, query = null) => {
|
||||
return queueOperation((db) => {
|
||||
return new Promise((resolve, reject) => {
|
||||
const transaction = db.transaction(storeName, 'readwrite')
|
||||
const store = transaction.objectStore(storeName)
|
||||
|
||||
if (!query) {
|
||||
// Clear all data if no query is provided
|
||||
const request = store.clear()
|
||||
request.onerror = () => reject(new Error('Error clearing all data'))
|
||||
request.onsuccess = () => resolve()
|
||||
} else {
|
||||
// Clear data based on the query
|
||||
const index = indexName ? store.index(indexName) : store
|
||||
const request = index.openCursor(query)
|
||||
let deletedCount = 0
|
||||
|
||||
request.onerror = () => reject(new Error('Error clearing data based on query'))
|
||||
request.onsuccess = (event) => {
|
||||
const cursor = event.target.result
|
||||
if (cursor) {
|
||||
const deleteRequest = cursor.delete()
|
||||
deleteRequest.onerror = () => reject(new Error('Error deleting item'))
|
||||
deleteRequest.onsuccess = () => {
|
||||
deletedCount++
|
||||
cursor.continue()
|
||||
}
|
||||
} else {
|
||||
resolve(deletedCount)
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
})
|
||||
}, [queueOperation, storeName])
|
||||
|
||||
const getByIndex = useCallback((indexName, key) => {
|
||||
return queueOperation((db) => {
|
||||
return new Promise((resolve, reject) => {
|
||||
const transaction = db.transaction(storeName, 'readonly')
|
||||
const store = transaction.objectStore(storeName)
|
||||
const index = store.index(indexName)
|
||||
const request = index.get(key)
|
||||
|
||||
request.onerror = () => reject(new Error('Error getting data by index'))
|
||||
request.onsuccess = () => resolve(request.result)
|
||||
})
|
||||
})
|
||||
}, [queueOperation, storeName])
|
||||
|
||||
const getAllByIndex = useCallback((indexName, query, direction = 'next', limit = Infinity) => {
|
||||
return queueOperation((db) => {
|
||||
return new Promise((resolve, reject) => {
|
||||
const transaction = db.transaction(storeName, 'readonly')
|
||||
const store = transaction.objectStore(storeName)
|
||||
const index = store.index(indexName)
|
||||
const request = index.openCursor(query, direction)
|
||||
const results = []
|
||||
|
||||
request.onerror = () => reject(new Error('Error getting data by index'))
|
||||
request.onsuccess = (event) => {
|
||||
const cursor = event.target.result
|
||||
if (cursor && results.length < limit) {
|
||||
results.push(cursor.value)
|
||||
cursor.continue()
|
||||
} else {
|
||||
resolve(results)
|
||||
}
|
||||
}
|
||||
})
|
||||
})
|
||||
}, [queueOperation, storeName])
|
||||
|
||||
const getPage = useCallback((page = 1, pageSize = 10, indexName = null, query = null, direction = 'next') => {
|
||||
return queueOperation((db) => {
|
||||
return new Promise((resolve, reject) => {
|
||||
const transaction = db.transaction(storeName, 'readonly')
|
||||
const store = transaction.objectStore(storeName)
|
||||
const target = indexName ? store.index(indexName) : store
|
||||
const request = target.openCursor(query, direction)
|
||||
const results = []
|
||||
let skipped = 0
|
||||
let hasMore = false
|
||||
|
||||
request.onerror = () => reject(new Error('Error getting page'))
|
||||
request.onsuccess = (event) => {
|
||||
const cursor = event.target.result
|
||||
if (cursor) {
|
||||
if (skipped < (page - 1) * pageSize) {
|
||||
skipped++
|
||||
cursor.continue()
|
||||
} else if (results.length < pageSize) {
|
||||
results.push(cursor.value)
|
||||
cursor.continue()
|
||||
} else {
|
||||
hasMore = true
|
||||
}
|
||||
}
|
||||
if (hasMore || !cursor) {
|
||||
const countRequest = target.count()
|
||||
countRequest.onsuccess = () => {
|
||||
resolve({
|
||||
data: results,
|
||||
total: countRequest.result,
|
||||
hasMore
|
||||
})
|
||||
}
|
||||
countRequest.onerror = () => reject(new Error('Error counting items'))
|
||||
}
|
||||
}
|
||||
})
|
||||
})
|
||||
}, [queueOperation, storeName])
|
||||
|
||||
return { add, get, getAll, update, remove, clear, getByIndex, getAllByIndex, getPage, error, notSupported }
|
||||
}
|
||||
|
||||
export default useIndexedDB
|
|
@ -85,7 +85,7 @@ export function usePaidMutation (mutation,
|
|||
// onCompleted is called before the invoice is paid for optimistic updates
|
||||
ourOnCompleted?.(data)
|
||||
// don't wait to pay the invoice
|
||||
waitForPayment(invoice, { persistOnNavigate }).then(() => {
|
||||
waitForPayment(invoice, { persistOnNavigate, waitFor }).then(() => {
|
||||
onPaid?.(client.cache, { data })
|
||||
}).catch(e => {
|
||||
console.error('usePaidMutation: failed to pay invoice', e)
|
||||
|
@ -178,7 +178,8 @@ export const paidActionCacheMods = {
|
|||
id: `Invoice:${invoice.id}`,
|
||||
fields: {
|
||||
actionState: () => 'PAID',
|
||||
confirmedAt: () => new Date().toISOString()
|
||||
confirmedAt: () => new Date().toISOString(),
|
||||
satsReceived: () => invoice.satsRequested
|
||||
}
|
||||
})
|
||||
}
|
||||
|
|
|
@ -1,487 +0,0 @@
|
|||
import { useCallback, useState, useEffect } from 'react'
|
||||
import { useMe } from '@/components/me'
|
||||
import { useMutation, useQuery } from '@apollo/client'
|
||||
import { GET_ENTRY, SET_ENTRY, UNSET_ENTRY, CLEAR_VAULT, SET_VAULT_KEY_HASH } from '@/fragments/vault'
|
||||
import { E_VAULT_KEY_EXISTS } from '@/lib/error'
|
||||
import { SSR } from '@/lib/constants'
|
||||
import { useToast } from '@/components/toast'
|
||||
|
||||
const USE_INDEXEDDB = true
|
||||
|
||||
export function useVaultConfigurator () {
|
||||
const { me } = useMe()
|
||||
const [setVaultKeyHash] = useMutation(SET_VAULT_KEY_HASH)
|
||||
const toaster = useToast()
|
||||
|
||||
// vault key stored locally
|
||||
const [vaultKey, innerSetVaultKey] = useState(null)
|
||||
|
||||
useEffect(() => {
|
||||
if (!me) return
|
||||
(async () => {
|
||||
let localVaultKey = await getLocalKey(me.id)
|
||||
|
||||
if (!me.privates.vaultKeyHash || localVaultKey?.hash !== me.privates.vaultKeyHash) {
|
||||
// We can tell that another device has reset the vault if the values
|
||||
// on the server are encrypted with a different key or no key exists anymore.
|
||||
// In that case, our local key is no longer valid and our device needs to be connected
|
||||
// to the vault again by entering the correct passphrase.
|
||||
console.log('vault key hash mismatch, clearing local key', localVaultKey, me.privates.vaultKeyHash)
|
||||
localVaultKey = null
|
||||
await unsetLocalKey(me.id)
|
||||
}
|
||||
|
||||
innerSetVaultKey(localVaultKey)
|
||||
})()
|
||||
}, [me?.privates?.vaultKeyHash])
|
||||
|
||||
// clear vault: remove everything and reset the key
|
||||
const [clearVault] = useMutation(CLEAR_VAULT, {
|
||||
onCompleted: async () => {
|
||||
await unsetLocalKey(me.id)
|
||||
innerSetVaultKey(null)
|
||||
}
|
||||
})
|
||||
|
||||
// initialize the vault and set a vault key
|
||||
const setVaultKey = useCallback(async (passphrase) => {
|
||||
const vaultKey = await deriveKey(me.id, passphrase)
|
||||
await setVaultKeyHash({
|
||||
variables: { hash: vaultKey.hash },
|
||||
onError: (error) => {
|
||||
const errorCode = error.graphQLErrors[0]?.extensions?.code
|
||||
if (errorCode === E_VAULT_KEY_EXISTS) {
|
||||
throw new Error('wrong passphrase')
|
||||
}
|
||||
toaster.danger(error.graphQLErrors[0].message)
|
||||
}
|
||||
})
|
||||
innerSetVaultKey(vaultKey)
|
||||
await setLocalKey(me.id, vaultKey)
|
||||
}, [setVaultKeyHash])
|
||||
|
||||
// disconnect the user from the vault (will not clear or reset the passphrase, use clearVault for that)
|
||||
const disconnectVault = useCallback(async () => {
|
||||
await unsetLocalKey(me.id)
|
||||
innerSetVaultKey(null)
|
||||
}, [innerSetVaultKey])
|
||||
|
||||
return [vaultKey, setVaultKey, clearVault, disconnectVault]
|
||||
}
|
||||
|
||||
export function useVaultMigration () {
|
||||
const { me } = useMe()
|
||||
const [setVaultEntry] = useMutation(SET_ENTRY)
|
||||
|
||||
// migrate local storage to vault
|
||||
const migrate = useCallback(async () => {
|
||||
const vaultKey = await getLocalKey(me.id)
|
||||
if (!vaultKey) throw new Error('vault key not found')
|
||||
|
||||
let migratedCount = 0
|
||||
|
||||
for (const migratableKey of retrieveMigratableKeys(me.id)) {
|
||||
try {
|
||||
const value = JSON.parse(window.localStorage.getItem(migratableKey.localStorageKey))
|
||||
if (!value) throw new Error('no value found in local storage')
|
||||
|
||||
const encrypted = await encryptJSON(vaultKey, value)
|
||||
|
||||
const { data } = await setVaultEntry({ variables: { key: migratableKey.vaultStorageKey, value: encrypted, skipIfSet: true } })
|
||||
if (data?.setVaultEntry) {
|
||||
window.localStorage.removeItem(migratableKey.localStorageKey)
|
||||
migratedCount++
|
||||
console.log('migrated to vault:', migratableKey)
|
||||
} else {
|
||||
throw new Error('could not set vault entry')
|
||||
}
|
||||
} catch (e) {
|
||||
console.error('failed migrate to vault:', migratableKey, e)
|
||||
}
|
||||
}
|
||||
|
||||
return migratedCount
|
||||
}, [me?.id])
|
||||
|
||||
return migrate
|
||||
}
|
||||
|
||||
// used to get and set values in the vault
|
||||
export default function useVault (vaultStorageKey, defaultValue, options = { localOnly: false }) {
|
||||
const { me } = useMe()
|
||||
const localOnly = options.localOnly || !me
|
||||
|
||||
// This is the key that we will use in local storage whereas vaultStorageKey is the key that we
|
||||
// will use on the server ("the vault").
|
||||
const localStorageKey = getLocalStorageKey(vaultStorageKey, me?.id, localOnly)
|
||||
|
||||
const [setVaultValue] = useMutation(SET_ENTRY)
|
||||
const [value, innerSetValue] = useState(undefined)
|
||||
const [clearVaultValue] = useMutation(UNSET_ENTRY)
|
||||
const { data: vaultData, refetch: refetchVaultValue } = useQuery(GET_ENTRY, {
|
||||
variables: { key: vaultStorageKey },
|
||||
// fetchPolicy only applies to first execution on mount so we also need to
|
||||
// set nextFetchPolicy to make sure we don't serve stale values from cache
|
||||
nextFetchPolicy: 'no-cache',
|
||||
fetchPolicy: 'no-cache'
|
||||
})
|
||||
|
||||
useEffect(() => {
|
||||
(async () => {
|
||||
if (localOnly) {
|
||||
innerSetValue((await getLocalStorage(localStorageKey)) || defaultValue)
|
||||
return
|
||||
}
|
||||
|
||||
const localVaultKey = await getLocalKey(me?.id)
|
||||
|
||||
if (!me.privates.vaultKeyHash || localVaultKey?.hash !== me.privates.vaultKeyHash) {
|
||||
// no or different vault setup on server
|
||||
// use unencrypted local storage
|
||||
await unsetLocalKey(me.id)
|
||||
innerSetValue((await getLocalStorage(localStorageKey)) || defaultValue)
|
||||
return
|
||||
}
|
||||
|
||||
// if vault key hash is set on the server, vault entry exists and vault key is set on the device
|
||||
// decrypt and use the value from the server
|
||||
const encrypted = vaultData?.getVaultEntry?.value
|
||||
if (encrypted) {
|
||||
try {
|
||||
const decrypted = await decryptJSON(localVaultKey, encrypted)
|
||||
// console.log('decrypted value from vault:', storageKey, encrypted, decrypted)
|
||||
innerSetValue(decrypted)
|
||||
// remove local storage value if it exists
|
||||
await unsetLocalStorage(localStorageKey)
|
||||
return
|
||||
} catch (e) {
|
||||
console.error('cannot read vault data:', vaultStorageKey, e)
|
||||
}
|
||||
}
|
||||
|
||||
// fallback to local storage
|
||||
innerSetValue((await getLocalStorage(localStorageKey)) || defaultValue)
|
||||
})()
|
||||
}, [vaultData, me?.privates?.vaultKeyHash, localOnly])
|
||||
|
||||
const setValue = useCallback(async (newValue) => {
|
||||
const vaultKey = await getLocalKey(me?.id)
|
||||
|
||||
const useVault = vaultKey && vaultKey.hash === me.privates.vaultKeyHash
|
||||
|
||||
if (useVault && !localOnly) {
|
||||
const encryptedValue = await encryptJSON(vaultKey, newValue)
|
||||
await setVaultValue({ variables: { key: vaultStorageKey, value: encryptedValue } })
|
||||
console.log('stored encrypted value in vault:', vaultStorageKey, encryptedValue)
|
||||
// clear local storage (we get rid of stored unencrypted data as soon as it can be stored on the vault)
|
||||
await unsetLocalStorage(localStorageKey)
|
||||
} else {
|
||||
console.log('stored value in local storage:', localStorageKey, newValue)
|
||||
// otherwise use local storage
|
||||
await setLocalStorage(localStorageKey, newValue)
|
||||
}
|
||||
// refresh in-memory value
|
||||
innerSetValue(newValue)
|
||||
}, [me?.privates?.vaultKeyHash, localStorageKey, vaultStorageKey, localOnly])
|
||||
|
||||
const clearValue = useCallback(async ({ onlyFromLocalStorage }) => {
|
||||
// unset a value
|
||||
// clear server
|
||||
if (!localOnly && !onlyFromLocalStorage) {
|
||||
await clearVaultValue({ variables: { key: vaultStorageKey } })
|
||||
await refetchVaultValue()
|
||||
}
|
||||
// clear local storage
|
||||
await unsetLocalStorage(localStorageKey)
|
||||
// clear in-memory value
|
||||
innerSetValue(undefined)
|
||||
}, [vaultStorageKey, localStorageKey, localOnly])
|
||||
|
||||
return [value, setValue, clearValue, refetchVaultValue]
|
||||
}
|
||||
|
||||
function retrieveMigratableKeys (userId) {
|
||||
// get all the local storage keys that can be migrated
|
||||
const out = []
|
||||
|
||||
for (const key of Object.keys(window.localStorage)) {
|
||||
if (key.includes(':local-only:')) continue
|
||||
if (!key.endsWith(`:${userId}`)) continue
|
||||
|
||||
if (key.startsWith('vault:')) {
|
||||
out.push({
|
||||
vaultStorageKey: key.substring('vault:'.length, key.length - `:${userId}`.length),
|
||||
localStorageKey: key
|
||||
})
|
||||
}
|
||||
|
||||
// required for backwards compatibility with keys that were stored before we had the vault
|
||||
if (key.startsWith('wallet:')) {
|
||||
out.push({
|
||||
vaultStorageKey: key.substring(0, key.length - `:${userId}`.length),
|
||||
localStorageKey: key
|
||||
})
|
||||
}
|
||||
}
|
||||
return out
|
||||
}
|
||||
|
||||
async function getLocalStorageBackend (useIndexedDb) {
|
||||
if (SSR) return null
|
||||
if (USE_INDEXEDDB && useIndexedDb && window.indexedDB && !window.snVaultIDB) {
|
||||
try {
|
||||
const storage = await new Promise((resolve, reject) => {
|
||||
const db = window.indexedDB.open('sn-vault', 1)
|
||||
db.onupgradeneeded = (event) => {
|
||||
const db = event.target.result
|
||||
db.createObjectStore('vault', { keyPath: 'key' })
|
||||
}
|
||||
db.onsuccess = () => {
|
||||
if (!db?.result?.transaction) reject(new Error('unsupported implementation'))
|
||||
else resolve(db.result)
|
||||
}
|
||||
db.onerror = reject
|
||||
})
|
||||
window.snVaultIDB = storage
|
||||
} catch (e) {
|
||||
console.error('could not use indexedDB:', e)
|
||||
}
|
||||
}
|
||||
|
||||
const isIDB = useIndexedDb && !!window.snVaultIDB
|
||||
|
||||
return {
|
||||
isIDB,
|
||||
set: async (key, value) => {
|
||||
if (isIDB) {
|
||||
const tx = window.snVaultIDB.transaction(['vault'], 'readwrite')
|
||||
const objectStore = tx.objectStore('vault')
|
||||
objectStore.add({ key, value })
|
||||
await new Promise((resolve, reject) => {
|
||||
tx.oncomplete = resolve
|
||||
tx.onerror = reject
|
||||
})
|
||||
} else {
|
||||
window.localStorage.setItem(key, JSON.stringify(value))
|
||||
}
|
||||
},
|
||||
get: async (key) => {
|
||||
if (isIDB) {
|
||||
const tx = window.snVaultIDB.transaction(['vault'], 'readonly')
|
||||
const objectStore = tx.objectStore('vault')
|
||||
const request = objectStore.get(key)
|
||||
return await new Promise((resolve, reject) => {
|
||||
request.onsuccess = () => resolve(request.result?.value)
|
||||
request.onerror = reject
|
||||
})
|
||||
} else {
|
||||
const v = window.localStorage.getItem(key)
|
||||
return v ? JSON.parse(v) : null
|
||||
}
|
||||
},
|
||||
clear: async (key) => {
|
||||
if (isIDB) {
|
||||
const tx = window.snVaultIDB.transaction(['vault'], 'readwrite')
|
||||
const objectStore = tx.objectStore('vault')
|
||||
objectStore.delete(key)
|
||||
await new Promise((resolve, reject) => {
|
||||
tx.oncomplete = resolve
|
||||
tx.onerror = reject
|
||||
})
|
||||
} else {
|
||||
window.localStorage.removeItem(key)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function getLocalStorageKey (key, userId, localOnly) {
|
||||
if (!userId) userId = 'anon'
|
||||
// We prefix localStorageKey with 'vault:' so we know which
|
||||
// keys we need to migrate to the vault when device sync is enabled.
|
||||
let localStorageKey = `vault:${key}`
|
||||
|
||||
// wallets like WebLN don't make sense to share across devices since they rely on a browser extension.
|
||||
// We check for this ':local-only:' tag during migration to skip any keys that contain it.
|
||||
if (localOnly) {
|
||||
localStorageKey = `vault:local-only:${key}`
|
||||
}
|
||||
|
||||
// always scope to user to avoid messing with wallets of other users on same device that might exist
|
||||
return `${localStorageKey}:${userId}`
|
||||
}
|
||||
|
||||
async function setLocalKey (userId, localKey) {
|
||||
if (SSR) return
|
||||
if (!userId) userId = 'anon'
|
||||
const storage = await getLocalStorageBackend(true)
|
||||
const k = `vault-key:local-only:${userId}`
|
||||
const { key, hash } = localKey
|
||||
|
||||
const rawKey = await window.crypto.subtle.exportKey('raw', key)
|
||||
if (storage.isIDB) {
|
||||
let nonExtractableKey
|
||||
// if IDB, we ensure the key is non extractable
|
||||
if (localKey.extractable) {
|
||||
nonExtractableKey = await window.crypto.subtle.importKey(
|
||||
'raw',
|
||||
rawKey,
|
||||
{ name: 'AES-GCM' },
|
||||
false,
|
||||
['encrypt', 'decrypt']
|
||||
)
|
||||
} else {
|
||||
nonExtractableKey = localKey.key
|
||||
}
|
||||
// and we store it
|
||||
return await storage.set(k, { key: nonExtractableKey, hash, extractable: false })
|
||||
} else {
|
||||
// if non IDB we need to serialize the key to store it
|
||||
const keyHex = toHex(rawKey)
|
||||
return await storage.set(k, { key: keyHex, hash, extractable: true })
|
||||
}
|
||||
}
|
||||
|
||||
async function getLocalKey (userId) {
|
||||
if (SSR) return null
|
||||
if (!userId) userId = 'anon'
|
||||
const storage = await getLocalStorageBackend(true)
|
||||
const key = await storage.get(`vault-key:local-only:${userId}`)
|
||||
if (!key) return null
|
||||
if (!storage.isIDB) {
|
||||
// if non IDB we need to deserialize the key
|
||||
const rawKey = fromHex(key.key)
|
||||
const keyMaterial = await window.crypto.subtle.importKey(
|
||||
'raw',
|
||||
rawKey,
|
||||
{ name: 'AES-GCM' },
|
||||
false,
|
||||
['encrypt', 'decrypt']
|
||||
)
|
||||
key.key = keyMaterial
|
||||
key.extractable = true
|
||||
}
|
||||
return key
|
||||
}
|
||||
|
||||
export async function unsetLocalKey (userId) {
|
||||
if (SSR) return
|
||||
if (!userId) userId = 'anon'
|
||||
const storage = await getLocalStorageBackend(true)
|
||||
return await storage.clear(`vault-key:local-only:${userId}`)
|
||||
}
|
||||
|
||||
async function setLocalStorage (key, value) {
|
||||
if (SSR) return
|
||||
const storage = await getLocalStorageBackend(false)
|
||||
await storage.set(key, value)
|
||||
}
|
||||
|
||||
async function getLocalStorage (key) {
|
||||
if (SSR) return null
|
||||
const storage = await getLocalStorageBackend(false)
|
||||
let v = await storage.get(key)
|
||||
|
||||
// ensure backwards compatible with wallet keys that we used before we had the vault
|
||||
if (!v) {
|
||||
const oldKey = key.replace(/vault:(local-only:)?/, '')
|
||||
v = await storage.get(oldKey)
|
||||
}
|
||||
|
||||
return v
|
||||
}
|
||||
|
||||
async function unsetLocalStorage (key) {
|
||||
if (SSR) return
|
||||
const storage = await getLocalStorageBackend(false)
|
||||
await storage.clear(key)
|
||||
}
|
||||
|
||||
function toHex (buffer) {
|
||||
const byteArray = new Uint8Array(buffer)
|
||||
const hexString = Array.from(byteArray, byte => byte.toString(16).padStart(2, '0')).join('')
|
||||
return hexString
|
||||
}
|
||||
|
||||
function fromHex (hex) {
|
||||
const byteArray = new Uint8Array(hex.match(/.{1,2}/g).map(byte => parseInt(byte, 16)))
|
||||
return byteArray.buffer
|
||||
}
|
||||
|
||||
async function deriveKey (userId, passphrase) {
|
||||
const enc = new TextEncoder()
|
||||
const keyMaterial = await window.crypto.subtle.importKey(
|
||||
'raw',
|
||||
enc.encode(passphrase),
|
||||
{ name: 'PBKDF2' },
|
||||
false,
|
||||
['deriveKey']
|
||||
)
|
||||
const key = await window.crypto.subtle.deriveKey(
|
||||
{
|
||||
name: 'PBKDF2',
|
||||
salt: enc.encode(`stacker${userId}`),
|
||||
// 600,000 iterations is recommended by OWASP
|
||||
// see https://cheatsheetseries.owasp.org/cheatsheets/Password_Storage_Cheat_Sheet.html#pbkdf2
|
||||
iterations: 600_000,
|
||||
hash: 'SHA-256'
|
||||
},
|
||||
keyMaterial,
|
||||
{ name: 'AES-GCM', length: 256 },
|
||||
true,
|
||||
['encrypt', 'decrypt']
|
||||
)
|
||||
const rawKey = await window.crypto.subtle.exportKey('raw', key)
|
||||
const rawHash = await window.crypto.subtle.digest('SHA-256', rawKey)
|
||||
return {
|
||||
key,
|
||||
hash: toHex(rawHash),
|
||||
extractable: true
|
||||
}
|
||||
}
|
||||
|
||||
async function encryptJSON (localKey, jsonData) {
|
||||
const { key } = localKey
|
||||
|
||||
// random IVs are _really_ important in GCM: reusing the IV once can lead to catastrophic failure
|
||||
// see https://crypto.stackexchange.com/questions/26790/how-bad-it-is-using-the-same-iv-twice-with-aes-gcm
|
||||
const iv = window.crypto.getRandomValues(new Uint8Array(12))
|
||||
|
||||
const encoded = new TextEncoder().encode(JSON.stringify(jsonData))
|
||||
|
||||
const encrypted = await window.crypto.subtle.encrypt(
|
||||
{
|
||||
name: 'AES-GCM',
|
||||
iv
|
||||
},
|
||||
key,
|
||||
encoded
|
||||
)
|
||||
|
||||
return JSON.stringify({
|
||||
iv: toHex(iv.buffer),
|
||||
data: toHex(encrypted)
|
||||
})
|
||||
}
|
||||
|
||||
async function decryptJSON (localKey, encryptedData) {
|
||||
const { key } = localKey
|
||||
|
||||
let { iv, data } = JSON.parse(encryptedData)
|
||||
|
||||
iv = fromHex(iv)
|
||||
data = fromHex(data)
|
||||
|
||||
const decrypted = await window.crypto.subtle.decrypt(
|
||||
{
|
||||
name: 'AES-GCM',
|
||||
iv
|
||||
},
|
||||
key,
|
||||
data
|
||||
)
|
||||
|
||||
const decoded = new TextDecoder().decode(decrypted)
|
||||
|
||||
return JSON.parse(decoded)
|
||||
}
|
|
@ -1,18 +1,22 @@
|
|||
import LogMessage from './log-message'
|
||||
import { createContext, useCallback, useContext, useEffect, useMemo, useRef, useState } from 'react'
|
||||
import { useCallback, useEffect, useMemo, useState } from 'react'
|
||||
import styles from '@/styles/log.module.css'
|
||||
import { Button } from 'react-bootstrap'
|
||||
import { useToast } from './toast'
|
||||
import { useShowModal } from './modal'
|
||||
import { WALLET_LOGS } from '@/fragments/wallet'
|
||||
import { getWalletByType } from 'wallets'
|
||||
import { gql, useMutation, useQuery } from '@apollo/client'
|
||||
import { gql, useLazyQuery, useMutation } from '@apollo/client'
|
||||
import { useMe } from './me'
|
||||
import useIndexedDB from './use-indexeddb'
|
||||
import { SSR } from '@/lib/constants'
|
||||
|
||||
export function WalletLogs ({ wallet, embedded }) {
|
||||
const logs = useWalletLogs(wallet)
|
||||
const { logs, setLogs, hasMore, loadMore, loadLogs, loading } = useWalletLogs(wallet)
|
||||
useEffect(() => {
|
||||
loadLogs()
|
||||
}, [loadLogs])
|
||||
|
||||
const tableRef = useRef()
|
||||
const showModal = useShowModal()
|
||||
|
||||
return (
|
||||
|
@ -21,13 +25,12 @@ export function WalletLogs ({ wallet, embedded }) {
|
|||
<span
|
||||
style={{ cursor: 'pointer' }}
|
||||
className='text-muted fw-bold nav-link ms-auto' onClick={() => {
|
||||
showModal(onClose => <DeleteWalletLogsObstacle wallet={wallet} onClose={onClose} />)
|
||||
showModal(onClose => <DeleteWalletLogsObstacle wallet={wallet} setLogs={setLogs} onClose={onClose} />)
|
||||
}}
|
||||
>clear logs
|
||||
</span>
|
||||
</div>
|
||||
<div ref={tableRef} className={`${styles.logTable} ${embedded ? styles.embedded : ''}`}>
|
||||
{logs.length === 0 && <div className='w-100 text-center'>empty</div>}
|
||||
<div className={`${styles.logTable} ${embedded ? styles.embedded : ''}`}>
|
||||
<table>
|
||||
<tbody>
|
||||
{logs.map((log, i) => (
|
||||
|
@ -39,15 +42,20 @@ export function WalletLogs ({ wallet, embedded }) {
|
|||
))}
|
||||
</tbody>
|
||||
</table>
|
||||
<div className='w-100 text-center'>------ start of logs ------</div>
|
||||
{loading
|
||||
? <div className='w-100 text-center'>loading...</div>
|
||||
: logs.length === 0 && <div className='w-100 text-center'>empty</div>}
|
||||
{hasMore
|
||||
? <Button onClick={loadMore} size='sm' className='mt-3'>Load More</Button>
|
||||
: <div className='w-100 text-center'>------ start of logs ------</div>}
|
||||
</div>
|
||||
</>
|
||||
)
|
||||
}
|
||||
|
||||
function DeleteWalletLogsObstacle ({ wallet, onClose }) {
|
||||
function DeleteWalletLogsObstacle ({ wallet, setLogs, onClose }) {
|
||||
const { deleteLogs } = useWalletLogger(wallet, setLogs)
|
||||
const toaster = useToast()
|
||||
const { deleteLogs } = useWalletLogger(wallet)
|
||||
|
||||
const prompt = `Do you really want to delete all ${wallet ? '' : 'wallet'} logs ${wallet ? 'of this wallet' : ''}?`
|
||||
return (
|
||||
|
@ -60,7 +68,7 @@ function DeleteWalletLogsObstacle ({ wallet, onClose }) {
|
|||
onClick={
|
||||
async () => {
|
||||
try {
|
||||
await deleteLogs()
|
||||
await deleteLogs(wallet)
|
||||
onClose()
|
||||
toaster.success('deleted wallet logs')
|
||||
} catch (err) {
|
||||
|
@ -76,72 +84,35 @@ function DeleteWalletLogsObstacle ({ wallet, onClose }) {
|
|||
)
|
||||
}
|
||||
|
||||
const WalletLoggerContext = createContext()
|
||||
const WalletLogsContext = createContext()
|
||||
const INDICES = [
|
||||
{ name: 'ts', keyPath: 'ts' },
|
||||
{ name: 'wallet_ts', keyPath: ['wallet', 'ts'] }
|
||||
]
|
||||
|
||||
const initIndexedDB = async (dbName, storeName) => {
|
||||
return new Promise((resolve, reject) => {
|
||||
if (!window.indexedDB) {
|
||||
return reject(new Error('IndexedDB not supported'))
|
||||
}
|
||||
|
||||
// https://developer.mozilla.org/en-US/docs/Web/API/IndexedDB_API/Using_IndexedDB
|
||||
const request = window.indexedDB.open(dbName, 1)
|
||||
|
||||
let db
|
||||
request.onupgradeneeded = () => {
|
||||
// this only runs if version was changed during open
|
||||
db = request.result
|
||||
if (!db.objectStoreNames.contains(storeName)) {
|
||||
const objectStore = db.createObjectStore(storeName, { autoIncrement: true })
|
||||
objectStore.createIndex('ts', 'ts')
|
||||
objectStore.createIndex('wallet_ts', ['wallet', 'ts'])
|
||||
}
|
||||
}
|
||||
|
||||
request.onsuccess = () => {
|
||||
// this gets called after onupgradeneeded finished
|
||||
db = request.result
|
||||
resolve(db)
|
||||
}
|
||||
|
||||
request.onerror = () => {
|
||||
reject(new Error('failed to open IndexedDB'))
|
||||
}
|
||||
})
|
||||
function useWalletLogDB () {
|
||||
const { me } = useMe()
|
||||
const dbName = `app:storage${me ? `:${me.id}` : ''}`
|
||||
const idbStoreName = 'wallet_logs'
|
||||
const { add, getPage, clear, error, notSupported } = useIndexedDB(dbName, idbStoreName, 1, INDICES)
|
||||
return { add, getPage, clear, error, notSupported }
|
||||
}
|
||||
|
||||
export const WalletLoggerProvider = ({ children }) => {
|
||||
const { me } = useMe()
|
||||
const [logs, setLogs] = useState([])
|
||||
let dbName = 'app:storage'
|
||||
if (me) {
|
||||
dbName = `${dbName}:${me.id}`
|
||||
}
|
||||
const idbStoreName = 'wallet_logs'
|
||||
const idb = useRef()
|
||||
const logQueue = useRef([])
|
||||
export function useWalletLogger (wallet, setLogs) {
|
||||
const { add, clear, notSupported } = useWalletLogDB()
|
||||
|
||||
useQuery(WALLET_LOGS, {
|
||||
fetchPolicy: 'network-only',
|
||||
// required to trigger onCompleted on refetches
|
||||
notifyOnNetworkStatusChange: true,
|
||||
onCompleted: ({ walletLogs }) => {
|
||||
setLogs((prevLogs) => {
|
||||
const existingIds = prevLogs.map(({ id }) => id)
|
||||
const logs = walletLogs
|
||||
.filter(({ id }) => !existingIds.includes(id))
|
||||
.map(({ createdAt, wallet: walletType, ...log }) => {
|
||||
return {
|
||||
ts: +new Date(createdAt),
|
||||
wallet: tag(getWalletByType(walletType)),
|
||||
...log
|
||||
}
|
||||
})
|
||||
return [...prevLogs, ...logs].sort((a, b) => b.ts - a.ts)
|
||||
})
|
||||
const appendLog = useCallback(async (wallet, level, message) => {
|
||||
const log = { wallet: tag(wallet), level, message, ts: +new Date() }
|
||||
try {
|
||||
if (notSupported) {
|
||||
console.log('cannot persist wallet log: indexeddb not supported')
|
||||
} else {
|
||||
await add(log)
|
||||
}
|
||||
setLogs?.(prevLogs => [log, ...prevLogs])
|
||||
} catch (error) {
|
||||
console.error('Failed to append wallet log:', error)
|
||||
}
|
||||
})
|
||||
}, [add, notSupported])
|
||||
|
||||
const [deleteServerWalletLogs] = useMutation(
|
||||
gql`
|
||||
|
@ -151,97 +122,29 @@ export const WalletLoggerProvider = ({ children }) => {
|
|||
`,
|
||||
{
|
||||
onCompleted: (_, { variables: { wallet: walletType } }) => {
|
||||
setLogs((logs) => {
|
||||
return logs.filter(l => walletType ? l.wallet !== getWalletByType(walletType).name : false)
|
||||
})
|
||||
setLogs?.(logs => logs.filter(l => walletType ? l.wallet !== getWalletByType(walletType).name : false))
|
||||
}
|
||||
}
|
||||
)
|
||||
|
||||
const saveLog = useCallback((log) => {
|
||||
if (!idb.current) {
|
||||
// IDB may not be ready yet
|
||||
return logQueue.current.push(log)
|
||||
}
|
||||
const tx = idb.current.transaction(idbStoreName, 'readwrite')
|
||||
const request = tx.objectStore(idbStoreName).add(log)
|
||||
request.onerror = () => console.error('failed to save log:', log)
|
||||
}, [])
|
||||
|
||||
useEffect(() => {
|
||||
initIndexedDB(dbName, idbStoreName)
|
||||
.then(db => {
|
||||
idb.current = db
|
||||
|
||||
// load all logs from IDB
|
||||
const tx = idb.current.transaction(idbStoreName, 'readonly')
|
||||
const store = tx.objectStore(idbStoreName)
|
||||
const index = store.index('ts')
|
||||
const request = index.getAll()
|
||||
request.onsuccess = () => {
|
||||
let logs = request.result
|
||||
setLogs((prevLogs) => {
|
||||
if (process.env.NODE_ENV !== 'production') {
|
||||
// in dev mode, useEffect runs twice, so we filter out duplicates here
|
||||
const existingIds = prevLogs.map(({ id }) => id)
|
||||
logs = logs.filter(({ id }) => !existingIds.includes(id))
|
||||
}
|
||||
// sort oldest first to keep same order as logs are appended
|
||||
return [...prevLogs, ...logs].sort((a, b) => b.ts - a.ts)
|
||||
})
|
||||
}
|
||||
|
||||
// flush queued logs to IDB
|
||||
logQueue.current.forEach(q => {
|
||||
const isLog = !!q.wallet
|
||||
if (isLog) saveLog(q)
|
||||
})
|
||||
|
||||
logQueue.current = []
|
||||
})
|
||||
.catch(console.error)
|
||||
return () => idb.current?.close()
|
||||
}, [])
|
||||
|
||||
const appendLog = useCallback((wallet, level, message) => {
|
||||
const log = { wallet: tag(wallet), level, message, ts: +new Date() }
|
||||
saveLog(log)
|
||||
setLogs((prevLogs) => [log, ...prevLogs])
|
||||
}, [saveLog])
|
||||
|
||||
const deleteLogs = useCallback(async (wallet, options) => {
|
||||
if ((!wallet || wallet.walletType) && !options?.clientOnly) {
|
||||
await deleteServerWalletLogs({ variables: { wallet: wallet?.walletType } })
|
||||
}
|
||||
if (!wallet || wallet.sendPayment) {
|
||||
const tx = idb.current.transaction(idbStoreName, 'readwrite')
|
||||
const objectStore = tx.objectStore(idbStoreName)
|
||||
const idx = objectStore.index('wallet_ts')
|
||||
const request = wallet ? idx.openCursor(window.IDBKeyRange.bound([tag(wallet), -Infinity], [tag(wallet), Infinity])) : idx.openCursor()
|
||||
request.onsuccess = function (event) {
|
||||
const cursor = event.target.result
|
||||
if (cursor) {
|
||||
cursor.delete()
|
||||
cursor.continue()
|
||||
try {
|
||||
const walletTag = wallet ? tag(wallet) : null
|
||||
if (notSupported) {
|
||||
console.log('cannot clear wallet logs: indexeddb not supported')
|
||||
} else {
|
||||
// finished
|
||||
setLogs((logs) => logs.filter(l => wallet ? l.wallet !== tag(wallet) : false))
|
||||
await clear('wallet_ts', walletTag ? window.IDBKeyRange.bound([walletTag, 0], [walletTag, Infinity]) : null)
|
||||
}
|
||||
setLogs?.(logs => logs.filter(l => wallet ? l.wallet !== tag(wallet) : false))
|
||||
} catch (e) {
|
||||
console.error('failed to delete logs', e)
|
||||
}
|
||||
}
|
||||
}, [me, setLogs])
|
||||
|
||||
return (
|
||||
<WalletLogsContext.Provider value={logs}>
|
||||
<WalletLoggerContext.Provider value={{ appendLog, deleteLogs }}>
|
||||
{children}
|
||||
</WalletLoggerContext.Provider>
|
||||
</WalletLogsContext.Provider>
|
||||
)
|
||||
}
|
||||
|
||||
export function useWalletLogger (wallet) {
|
||||
const { appendLog, deleteLogs: innerDeleteLogs } = useContext(WalletLoggerContext)
|
||||
}, [clear, deleteServerWalletLogs, setLogs, notSupported])
|
||||
|
||||
const log = useCallback(level => message => {
|
||||
if (!wallet) {
|
||||
|
@ -249,9 +152,6 @@ export function useWalletLogger (wallet) {
|
|||
return
|
||||
}
|
||||
|
||||
// TODO:
|
||||
// also send this to us if diagnostics was enabled,
|
||||
// very similar to how the service worker logger works.
|
||||
appendLog(wallet, level, message)
|
||||
console[level !== 'error' ? 'info' : 'error'](`[${tag(wallet)}]`, message)
|
||||
}, [appendLog, wallet])
|
||||
|
@ -262,10 +162,6 @@ export function useWalletLogger (wallet) {
|
|||
error: (...message) => log('error')(message.join(' '))
|
||||
}), [log, wallet?.name])
|
||||
|
||||
const deleteLogs = useCallback((options) => {
|
||||
return innerDeleteLogs(wallet, options)
|
||||
}, [innerDeleteLogs, wallet])
|
||||
|
||||
return { logger, deleteLogs }
|
||||
}
|
||||
|
||||
|
@ -273,7 +169,83 @@ function tag (wallet) {
|
|||
return wallet?.shortName || wallet?.name
|
||||
}
|
||||
|
||||
export function useWalletLogs (wallet) {
|
||||
const logs = useContext(WalletLogsContext)
|
||||
return logs.filter(l => !wallet || l.wallet === tag(wallet))
|
||||
export function useWalletLogs (wallet, initialPage = 1, logsPerPage = 10) {
|
||||
const [logs, setLogs] = useState([])
|
||||
const [page, setPage] = useState(initialPage)
|
||||
const [hasMore, setHasMore] = useState(true)
|
||||
const [total, setTotal] = useState(0)
|
||||
const [cursor, setCursor] = useState(null)
|
||||
const [loading, setLoading] = useState(true)
|
||||
|
||||
const { getPage, error, notSupported } = useWalletLogDB()
|
||||
const [getWalletLogs] = useLazyQuery(WALLET_LOGS, SSR ? {} : { fetchPolicy: 'cache-and-network' })
|
||||
|
||||
const loadLogsPage = useCallback(async (page, pageSize, wallet) => {
|
||||
try {
|
||||
let result = { data: [], hasMore: false }
|
||||
if (notSupported) {
|
||||
console.log('cannot get client wallet logs: indexeddb not supported')
|
||||
} else {
|
||||
const indexName = wallet ? 'wallet_ts' : 'ts'
|
||||
const query = wallet ? window.IDBKeyRange.bound([tag(wallet), -Infinity], [tag(wallet), Infinity]) : null
|
||||
|
||||
result = await getPage(page, pageSize, indexName, query, 'prev')
|
||||
// no walletType means we're using the local IDB
|
||||
if (wallet && !wallet.walletType) {
|
||||
return result
|
||||
}
|
||||
}
|
||||
const { data } = await getWalletLogs({
|
||||
variables: {
|
||||
type: wallet?.walletType,
|
||||
// if it client logs has more, page based on it's range
|
||||
from: result?.data[result.data.length - 1]?.ts && result.hasMore ? String(result.data[result.data.length - 1].ts) : null,
|
||||
// if we have a cursor (this isn't the first page), page based on it's range
|
||||
to: result?.data[0]?.ts && cursor ? String(result.data[0].ts) : null,
|
||||
cursor
|
||||
}
|
||||
})
|
||||
|
||||
const newLogs = data.walletLogs.entries.map(({ createdAt, wallet: walletType, ...log }) => ({
|
||||
ts: +new Date(createdAt),
|
||||
wallet: tag(getWalletByType(walletType)),
|
||||
...log
|
||||
}))
|
||||
const combinedLogs = Array.from(new Set([...result.data, ...newLogs].map(JSON.stringify))).map(JSON.parse).sort((a, b) => b.ts - a.ts)
|
||||
|
||||
setCursor(data.walletLogs.cursor)
|
||||
return { ...result, data: combinedLogs, hasMore: result.hasMore || !!data.walletLogs.cursor }
|
||||
} catch (error) {
|
||||
console.error('Error loading logs from IndexedDB:', error)
|
||||
return { data: [], total: 0, hasMore: false }
|
||||
}
|
||||
}, [getPage, setCursor, cursor, notSupported])
|
||||
|
||||
if (error) {
|
||||
console.error('IndexedDB error:', error)
|
||||
}
|
||||
|
||||
const loadMore = useCallback(async () => {
|
||||
if (hasMore) {
|
||||
setLoading(true)
|
||||
const result = await loadLogsPage(page, logsPerPage, wallet)
|
||||
setLogs(prevLogs => [...prevLogs, ...result.data])
|
||||
setHasMore(result.hasMore)
|
||||
setTotal(result.total)
|
||||
setPage(prevPage => prevPage + 1)
|
||||
setLoading(false)
|
||||
}
|
||||
}, [loadLogsPage, page, logsPerPage, wallet, hasMore])
|
||||
|
||||
const loadLogs = useCallback(async () => {
|
||||
setLoading(true)
|
||||
const result = await loadLogsPage(1, logsPerPage, wallet)
|
||||
setLogs(result.data)
|
||||
setHasMore(result.hasMore)
|
||||
setTotal(result.total)
|
||||
setPage(1)
|
||||
setLoading(false)
|
||||
}, [wallet, loadLogsPage])
|
||||
|
||||
return { logs, hasMore, total, loadMore, loadLogs, setLogs, loading }
|
||||
}
|
||||
|
|
|
@ -518,8 +518,7 @@ services:
|
|||
CONNECT: "localhost:8025"
|
||||
cpu_shares: "${CPU_SHARES_LOW}"
|
||||
nwc_send:
|
||||
build:
|
||||
context: ./docker/nwc
|
||||
image: ghcr.io/benthecarman/nostr-wallet-connect-lnd:master
|
||||
container_name: nwc_send
|
||||
profiles:
|
||||
- wallets
|
||||
|
@ -529,17 +528,18 @@ services:
|
|||
condition: service_healthy
|
||||
restart: true
|
||||
volumes:
|
||||
- ./docker/lnd/stacker:/root/.lnd
|
||||
- ./docker/lnd/stacker:/app/.lnd
|
||||
- nwc_send:/app
|
||||
environment:
|
||||
- RUST_LOG=info
|
||||
entrypoint:
|
||||
- 'nostr-wallet-connect-lnd'
|
||||
- './nostr-wallet-connect-lnd'
|
||||
- '--relay'
|
||||
- 'wss://relay.primal.net'
|
||||
- '--admin-macaroon-file'
|
||||
- '/root/.lnd/regtest/admin.macaroon'
|
||||
- '--macaroon-file'
|
||||
- '/app/.lnd/regtest/admin.macaroon'
|
||||
- '--cert-file'
|
||||
- '/root/.lnd/tls.cert'
|
||||
- '/app/.lnd/tls.cert'
|
||||
- '--lnd-host'
|
||||
- 'stacker_lnd'
|
||||
- '--lnd-port'
|
||||
|
@ -549,11 +549,10 @@ services:
|
|||
- '--daily-limit'
|
||||
- '0'
|
||||
- '--keys-file'
|
||||
- 'admin-keys.json'
|
||||
- 'keys-file.json'
|
||||
cpu_shares: "${CPU_SHARES_LOW}"
|
||||
nwc_recv:
|
||||
build:
|
||||
context: ./docker/nwc
|
||||
image: ghcr.io/benthecarman/nostr-wallet-connect-lnd:master
|
||||
container_name: nwc_recv
|
||||
profiles:
|
||||
- wallets
|
||||
|
@ -563,17 +562,18 @@ services:
|
|||
condition: service_healthy
|
||||
restart: true
|
||||
volumes:
|
||||
- ./docker/lnd/stacker:/root/.lnd
|
||||
- ./docker/lnd/stacker:/app/.lnd
|
||||
- nwc_recv:/app
|
||||
environment:
|
||||
- RUST_LOG=info
|
||||
entrypoint:
|
||||
- 'nostr-wallet-connect-lnd'
|
||||
- './nostr-wallet-connect-lnd'
|
||||
- '--relay'
|
||||
- 'wss://relay.primal.net'
|
||||
- '--invoice-macaroon-file'
|
||||
- '/root/.lnd/regtest/invoice.macaroon'
|
||||
- '/app/.lnd/regtest/invoice.macaroon'
|
||||
- '--cert-file'
|
||||
- '/root/.lnd/tls.cert'
|
||||
- '/app/.lnd/tls.cert'
|
||||
- '--lnd-host'
|
||||
- 'stacker_lnd'
|
||||
- '--lnd-port'
|
||||
|
@ -583,7 +583,7 @@ services:
|
|||
- '--daily-limit'
|
||||
- '0'
|
||||
- '--keys-file'
|
||||
- 'invoice-keys.json'
|
||||
- 'keys-file.json'
|
||||
cpu_shares: "${CPU_SHARES_LOW}"
|
||||
lnbits:
|
||||
image: lnbits/lnbits:0.12.5
|
||||
|
@ -614,3 +614,5 @@ volumes:
|
|||
stacker_lnd:
|
||||
stacker_cln:
|
||||
s3:
|
||||
nwc_send:
|
||||
nwc_recv:
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
# https://github.com/JonasProgrammer/docker-postgres-ip4r
|
||||
FROM postgres:13.2
|
||||
FROM postgres:16.3
|
||||
|
||||
RUN apt-get update \
|
||||
&& apt-get install --no-install-recommends -y postgresql-13-ip4r \
|
||||
&& apt-get install --no-install-recommends -y postgresql-16-ip4r \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
|
|
@ -1,16 +0,0 @@
|
|||
FROM rust:1.78
|
||||
|
||||
ARG KEY_FILE
|
||||
|
||||
RUN wget https://github.com/ekzyis/nostr-wallet-connect-lnd/archive/a02939c350191f8a6750a72d2456fbdf567e5848.zip \
|
||||
&& unzip a02939c350191f8a6750a72d2456fbdf567e5848.zip
|
||||
|
||||
WORKDIR nostr-wallet-connect-lnd-a02939c350191f8a6750a72d2456fbdf567e5848
|
||||
|
||||
RUN apt-get update -y \
|
||||
&& apt-get install -y cmake \
|
||||
&& apt-get clean \
|
||||
&& rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/*
|
||||
RUN cargo build --release && cargo install --path .
|
||||
|
||||
COPY . .
|
|
@ -1,5 +0,0 @@
|
|||
{
|
||||
"server_key": "ea7b559d5b49e6d4a22f57cc84a15fd3b87742ff91a85bb871242e09e6d0b0d7",
|
||||
"user_key": "c8f7fcb4707863ba1cc1b32c8871585ddb1eb7a555925cd2818a6caf4a21fb90",
|
||||
"sent_info": true
|
||||
}
|
|
@ -1,5 +0,0 @@
|
|||
{
|
||||
"server_key": "86e7b8a53c22677066d882618f28f8e1f39e4676114c0ae019e9d86518177e49",
|
||||
"user_key": "87e73293804edb089e0be8bf01ab2f6f219591f91998479851a7a2d1daf1a617",
|
||||
"sent_info": true
|
||||
}
|
|
@ -205,6 +205,16 @@ export const POLL_VOTE = gql`
|
|||
}
|
||||
}`
|
||||
|
||||
export const UPSERT_BIO = gql`
|
||||
${ITEM_PAID_ACTION_FIELDS}
|
||||
${PAID_ACTION}
|
||||
mutation upsertBio($text: String!) {
|
||||
upsertBio(text: $text) {
|
||||
...ItemPaidActionFields
|
||||
...PaidActionFields
|
||||
}
|
||||
}`
|
||||
|
||||
export const CREATE_COMMENT = gql`
|
||||
${ITEM_PAID_ACTION_FIELDS}
|
||||
${PAID_ACTION}
|
||||
|
|
|
@ -55,7 +55,6 @@ export const ME = gql`
|
|||
autoWithdrawMaxFeePercent
|
||||
autoWithdrawThreshold
|
||||
disableFreebies
|
||||
vaultKeyHash
|
||||
}
|
||||
optional {
|
||||
isContributor
|
||||
|
@ -391,9 +390,3 @@ export const USER_STATS = gql`
|
|||
}
|
||||
}
|
||||
}`
|
||||
|
||||
export const SET_VAULT_KEY_HASH = gql`
|
||||
mutation setVaultKeyHash($hash: String!) {
|
||||
setVaultKeyHash(hash: $hash)
|
||||
}
|
||||
`
|
||||
|
|
|
@ -1,33 +0,0 @@
|
|||
import { gql } from '@apollo/client'
|
||||
|
||||
export const GET_ENTRY = gql`
|
||||
query GetVaultEntry($key: String!) {
|
||||
getVaultEntry(key: $key) {
|
||||
value
|
||||
}
|
||||
}
|
||||
`
|
||||
|
||||
export const SET_ENTRY = gql`
|
||||
mutation SetVaultEntry($key: String!, $value: String!, $skipIfSet: Boolean) {
|
||||
setVaultEntry(key: $key, value: $value, skipIfSet: $skipIfSet)
|
||||
}
|
||||
`
|
||||
|
||||
export const UNSET_ENTRY = gql`
|
||||
mutation UnsetVaultEntry($key: String!) {
|
||||
unsetVaultEntry(key: $key)
|
||||
}
|
||||
`
|
||||
|
||||
export const CLEAR_VAULT = gql`
|
||||
mutation ClearVault {
|
||||
clearVault
|
||||
}
|
||||
`
|
||||
|
||||
export const SET_VAULT_KEY_HASH = gql`
|
||||
mutation SetVaultKeyHash($hash: String!) {
|
||||
setVaultKeyHash(hash: $hash)
|
||||
}
|
||||
`
|
|
@ -16,10 +16,10 @@ export const INVOICE_FIELDS = gql`
|
|||
isHeld
|
||||
comment
|
||||
lud18Data
|
||||
confirmedPreimage
|
||||
actionState
|
||||
actionType
|
||||
actionError
|
||||
confirmedPreimage
|
||||
}`
|
||||
|
||||
export const INVOICE_FULL = gql`
|
||||
|
@ -197,13 +197,16 @@ export const WALLETS = gql`
|
|||
`
|
||||
|
||||
export const WALLET_LOGS = gql`
|
||||
query WalletLogs {
|
||||
walletLogs {
|
||||
id
|
||||
createdAt
|
||||
wallet
|
||||
level
|
||||
message
|
||||
query WalletLogs($type: String, $from: String, $to: String, $cursor: String) {
|
||||
walletLogs(type: $type, from: $from, to: $to, cursor: $cursor) {
|
||||
cursor
|
||||
entries {
|
||||
id
|
||||
createdAt
|
||||
wallet
|
||||
level
|
||||
message
|
||||
}
|
||||
}
|
||||
}
|
||||
`
|
||||
|
|
|
@ -300,14 +300,12 @@ function getClient (uri) {
|
|||
initialFetchPolicy: defaultFetchPolicy,
|
||||
fetchPolicy: defaultFetchPolicy,
|
||||
nextFetchPolicy: defaultNextFetchPolicy,
|
||||
canonizeResults: true,
|
||||
ssr: SSR
|
||||
},
|
||||
query: {
|
||||
initialFetchPolicy: defaultFetchPolicy,
|
||||
fetchPolicy: defaultFetchPolicy,
|
||||
nextFetchPolicy: defaultNextFetchPolicy,
|
||||
canonizeResults: true,
|
||||
ssr: SSR
|
||||
}
|
||||
}
|
||||
|
|
|
@ -3,7 +3,6 @@ import { GraphQLError } from 'graphql'
|
|||
export const E_FORBIDDEN = 'E_FORBIDDEN'
|
||||
export const E_UNAUTHENTICATED = 'E_UNAUTHENTICATED'
|
||||
export const E_BAD_INPUT = 'E_BAD_INPUT'
|
||||
export const E_VAULT_KEY_EXISTS = 'E_VAULT_KEY_EXISTS'
|
||||
|
||||
export class GqlAuthorizationError extends GraphQLError {
|
||||
constructor (message) {
|
||||
|
@ -18,7 +17,7 @@ export class GqlAuthenticationError extends GraphQLError {
|
|||
}
|
||||
|
||||
export class GqlInputError extends GraphQLError {
|
||||
constructor (message, code) {
|
||||
super(message, { extensions: { code: code || E_BAD_INPUT } })
|
||||
constructor (message) {
|
||||
super(message, { extensions: { code: E_BAD_INPUT } })
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,180 @@
|
|||
export async function fetchWithTimeout (resource, { timeout = 1000, ...options } = {}) {
|
||||
const controller = new AbortController()
|
||||
const id = setTimeout(() => controller.abort(), timeout)
|
||||
|
||||
const response = await fetch(resource, {
|
||||
...options,
|
||||
signal: controller.signal
|
||||
})
|
||||
clearTimeout(id)
|
||||
|
||||
return response
|
||||
}
|
||||
|
||||
class LRUCache {
|
||||
constructor (maxSize = 100) {
|
||||
this.maxSize = maxSize
|
||||
this.cache = new Map()
|
||||
}
|
||||
|
||||
get (key) {
|
||||
if (!this.cache.has(key)) return undefined
|
||||
const value = this.cache.get(key)
|
||||
// refresh the entry
|
||||
this.cache.delete(key)
|
||||
this.cache.set(key, value)
|
||||
return value
|
||||
}
|
||||
|
||||
set (key, value) {
|
||||
if (this.cache.has(key)) this.cache.delete(key)
|
||||
else if (this.cache.size >= this.maxSize) {
|
||||
// Remove the least recently used item
|
||||
this.cache.delete(this.cache.keys().next().value)
|
||||
}
|
||||
this.cache.set(key, value)
|
||||
}
|
||||
}
|
||||
|
||||
function createDebugLogger (name, cache, debug) {
|
||||
const noop = () => {}
|
||||
|
||||
if (!debug) {
|
||||
return {
|
||||
log: noop,
|
||||
errorLog: noop,
|
||||
startPeriodicLogging: noop,
|
||||
stopPeriodicLogging: noop,
|
||||
incrementTotalFetches: noop,
|
||||
incrementCacheHits: noop,
|
||||
incrementCacheMisses: noop,
|
||||
incrementBackgroundRefreshes: noop
|
||||
}
|
||||
}
|
||||
|
||||
let totalFetches = 0
|
||||
let cacheMisses = 0
|
||||
let cacheHits = 0
|
||||
let backgroundRefreshes = 0
|
||||
let intervalId = null
|
||||
|
||||
const log = (message) => console.log(`[CACHE:${name}] ${message}`)
|
||||
const errorLog = (message, error) => console.error(`[CACHE:${name}] ${message}`, error)
|
||||
|
||||
function estimateCacheSize () {
|
||||
let size = 0
|
||||
for (const [key, value] of cache.cache) {
|
||||
size += key.length * 2
|
||||
size += JSON.stringify(value).length * 2
|
||||
}
|
||||
return size
|
||||
}
|
||||
|
||||
function startPeriodicLogging () {
|
||||
if (intervalId) return // Prevent multiple intervals
|
||||
intervalId = setInterval(() => {
|
||||
const cacheSize = cache.cache.size
|
||||
const memorySizeBytes = estimateCacheSize()
|
||||
log(`Stats: total=${totalFetches}, hits=${cacheHits}, misses=${cacheMisses}, backgroundRefreshes=${backgroundRefreshes}, cacheSize=${cacheSize}, memoryFootprint=${memorySizeBytes} bytes`)
|
||||
}, 60000)
|
||||
}
|
||||
|
||||
function stopPeriodicLogging () {
|
||||
if (intervalId) {
|
||||
clearInterval(intervalId)
|
||||
intervalId = null
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
log,
|
||||
errorLog,
|
||||
startPeriodicLogging,
|
||||
stopPeriodicLogging,
|
||||
incrementTotalFetches: () => totalFetches++,
|
||||
incrementCacheHits: () => cacheHits++,
|
||||
incrementCacheMisses: () => cacheMisses++,
|
||||
incrementBackgroundRefreshes: () => backgroundRefreshes++
|
||||
}
|
||||
}
|
||||
|
||||
export function cachedFetcher (fetcher, { maxSize = 100, cacheExpiry, forceRefreshThreshold, keyGenerator, debug = false }) {
|
||||
const cache = new LRUCache(maxSize)
|
||||
const name = fetcher.name || fetcher.toString().slice(0, 20).replace(/\s+/g, '_')
|
||||
const logger = createDebugLogger(name, cache, debug)
|
||||
|
||||
logger.log(`initializing with maxSize=${maxSize}, cacheExpiry=${cacheExpiry}, forceRefreshThreshold=${forceRefreshThreshold}`)
|
||||
logger.startPeriodicLogging()
|
||||
|
||||
if (!keyGenerator) {
|
||||
throw new Error('keyGenerator is required')
|
||||
}
|
||||
|
||||
const cachedFetch = async function (...args) {
|
||||
const key = keyGenerator(...args)
|
||||
const now = Date.now()
|
||||
logger.incrementTotalFetches()
|
||||
|
||||
async function fetchAndCache () {
|
||||
logger.log(`Fetching data for key: ${key}`)
|
||||
const result = await fetcher(...args)
|
||||
cache.set(key, { data: result, createdAt: now })
|
||||
logger.log(`Data fetched and cached for key: ${key}`)
|
||||
return result
|
||||
}
|
||||
|
||||
const cached = cache.get(key)
|
||||
|
||||
if (cached) {
|
||||
const age = now - cached.createdAt
|
||||
|
||||
if (cacheExpiry === 0 || age < cacheExpiry) {
|
||||
logger.incrementCacheHits()
|
||||
logger.log(`Cache hit for key: ${key}, age: ${age}ms`)
|
||||
return cached.data
|
||||
} else if (forceRefreshThreshold === 0 || age < forceRefreshThreshold) {
|
||||
if (cached.pendingPromise) {
|
||||
logger.log(`Already background refreshing key: ${key}`)
|
||||
return cached.data
|
||||
}
|
||||
|
||||
logger.incrementBackgroundRefreshes()
|
||||
logger.log(`Background refresh for key: ${key}, age: ${age}ms`)
|
||||
cached.pendingPromise = fetchAndCache().catch(error => {
|
||||
logger.errorLog(`Background refresh failed for key: ${key}`, error)
|
||||
return cached.data
|
||||
}).finally(() => {
|
||||
logger.log(`Background refresh completed for key: ${key}`)
|
||||
delete cached.pendingPromise
|
||||
})
|
||||
return cached.data
|
||||
}
|
||||
|
||||
if (cached.pendingPromise) {
|
||||
logger.log(`Waiting for pending force refresh for key: ${key}`)
|
||||
return await cached.pendingPromise
|
||||
}
|
||||
}
|
||||
|
||||
logger.incrementCacheMisses()
|
||||
logger.log(`Cache miss for key: ${key}`)
|
||||
const entry = { createdAt: now, pendingPromise: fetchAndCache() }
|
||||
cache.set(key, entry)
|
||||
try {
|
||||
entry.data = await entry.pendingPromise
|
||||
return entry.data
|
||||
} catch (error) {
|
||||
logger.errorLog(`Error fetching data for key: ${key}`, error)
|
||||
cache.delete(key)
|
||||
throw error
|
||||
} finally {
|
||||
logger.log(`Fetch completed for key: ${key}`)
|
||||
delete entry.pendingPromise
|
||||
}
|
||||
}
|
||||
|
||||
// Attach the stopPeriodicLogging method to the returned function
|
||||
cachedFetch.stopPeriodicLogging = logger.stopPeriodicLogging
|
||||
|
||||
return cachedFetch
|
||||
}
|
|
@ -800,7 +800,7 @@ export const phoenixdSchema = object().shape({
|
|||
}, ['primaryPassword', 'secondaryPassword'])
|
||||
|
||||
export const bioSchema = object({
|
||||
bio: string().required('required').trim()
|
||||
text: string().required('required').trim()
|
||||
})
|
||||
|
||||
export const inviteSchema = object({
|
||||
|
@ -840,23 +840,3 @@ export const toNumber = (x, min = Number.MIN_SAFE_INTEGER, max = Number.MAX_SAFE
|
|||
}
|
||||
|
||||
export const toPositiveNumber = (x) => toNumber(x, 0)
|
||||
|
||||
export const deviceSyncSchema = object().shape({
|
||||
passphrase: string().required('required')
|
||||
.test(async (value, context) => {
|
||||
const words = value ? value.trim().split(/[\s]+/) : []
|
||||
for (const w of words) {
|
||||
try {
|
||||
await string().oneOf(bip39Words).validate(w)
|
||||
} catch {
|
||||
return context.createError({ message: `'${w}' is not a valid pairing phrase word` })
|
||||
}
|
||||
}
|
||||
|
||||
if (words.length < 12) {
|
||||
return context.createError({ message: 'needs at least 12 words' })
|
||||
}
|
||||
|
||||
return true
|
||||
})
|
||||
})
|
||||
|
|
File diff suppressed because it is too large
Load Diff
20
package.json
20
package.json
|
@ -6,16 +6,16 @@
|
|||
"dev": "NODE_OPTIONS='--trace-warnings' next dev",
|
||||
"build": "next build",
|
||||
"migrate": "prisma migrate deploy",
|
||||
"start": "NODE_OPTIONS='--trace-warnings' next start -p $PORT --keepAliveTimeout 120000",
|
||||
"start": "NODE_OPTIONS='--trace-warnings --max-old-space-size=4096' next start -p $PORT --keepAliveTimeout 120000",
|
||||
"lint": "standard",
|
||||
"test": "NODE_OPTIONS='--experimental-vm-modules' jest",
|
||||
"worker": "tsx --tsconfig jsconfig.json --trace-warnings worker/index.js",
|
||||
"worker:dev": "tsx --tsconfig jsconfig.json --trace-warnings --watch worker/index.js"
|
||||
},
|
||||
"dependencies": {
|
||||
"@apollo/client": "^3.9.7",
|
||||
"@apollo/server": "^4.9.4",
|
||||
"@as-integrations/next": "^2.0.2",
|
||||
"@apollo/client": "^3.11.8",
|
||||
"@apollo/server": "^4.11.0",
|
||||
"@as-integrations/next": "^3.1.0",
|
||||
"@auth/prisma-adapter": "^1.0.3",
|
||||
"@graphql-tools/schema": "^10.0.0",
|
||||
"@lightninglabs/lnc-web": "^0.3.2-alpha",
|
||||
|
@ -43,7 +43,7 @@
|
|||
"formik": "^2.4.5",
|
||||
"github-slugger": "^2.0.0",
|
||||
"google-protobuf": "^3.21.2",
|
||||
"graphql": "^16.8.1",
|
||||
"graphql": "^16.9.0",
|
||||
"graphql-scalar": "^0.1.0",
|
||||
"graphql-tag": "^2.12.6",
|
||||
"graphql-type-json": "^0.3.2",
|
||||
|
@ -56,9 +56,9 @@
|
|||
"mdast-util-gfm": "^3.0.0",
|
||||
"mdast-util-to-string": "^4.0.0",
|
||||
"micromark-extension-gfm": "^3.0.0",
|
||||
"next": "^13.5.4",
|
||||
"next": "^13.5.7",
|
||||
"next-auth": "^4.23.2",
|
||||
"next-plausible": "^3.11.1",
|
||||
"next-plausible": "^3.12.2",
|
||||
"next-seo": "^6.1.0",
|
||||
"node-s3-url-encode": "^0.0.4",
|
||||
"nodemailer": "^6.9.6",
|
||||
|
@ -85,7 +85,9 @@
|
|||
"react-twitter-embed": "^4.0.4",
|
||||
"react-youtube": "^10.1.0",
|
||||
"recharts": "^2.9.0",
|
||||
"rehype-mathjax": "^6.0.0",
|
||||
"remark-gfm": "^4.0.0",
|
||||
"remark-math": "^6.0.0",
|
||||
"remove-markdown": "^0.5.0",
|
||||
"sass": "^1.69.3",
|
||||
"serviceworker-storage": "^0.1.0",
|
||||
|
@ -108,7 +110,7 @@
|
|||
"yup": "^1.3.2"
|
||||
},
|
||||
"engines": {
|
||||
"node": "18.17.0"
|
||||
"node": "18.20.4"
|
||||
},
|
||||
"standard": {
|
||||
"plugins": [
|
||||
|
@ -129,4 +131,4 @@
|
|||
"@/(.*)": "<rootDir>/$1"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
import Layout from '@/components/layout'
|
||||
import { gql, useMutation, useQuery } from '@apollo/client'
|
||||
import { useQuery } from '@apollo/client'
|
||||
import UserHeader from '@/components/user-header'
|
||||
import Button from 'react-bootstrap/Button'
|
||||
import styles from '@/styles/user.module.css'
|
||||
|
@ -8,62 +8,56 @@ import ItemFull from '@/components/item-full'
|
|||
import { Form, MarkdownInput } from '@/components/form'
|
||||
import { useMe } from '@/components/me'
|
||||
import { USER_FULL } from '@/fragments/users'
|
||||
import { ITEM_FIELDS } from '@/fragments/items'
|
||||
import { getGetServerSideProps } from '@/api/ssrApollo'
|
||||
import { FeeButtonProvider } from '@/components/fee-button'
|
||||
import { bioSchema } from '@/lib/validate'
|
||||
import { useRouter } from 'next/router'
|
||||
import PageLoading from '@/components/page-loading'
|
||||
import { ItemButtonBar } from '@/components/post'
|
||||
import useItemSubmit from '@/components/use-item-submit'
|
||||
import { UPSERT_BIO } from '@/fragments/paidAction'
|
||||
|
||||
export const getServerSideProps = getGetServerSideProps({
|
||||
query: USER_FULL,
|
||||
notFound: data => !data.user
|
||||
})
|
||||
|
||||
export function BioForm ({ handleDone, bio }) {
|
||||
const [upsertBio] = useMutation(
|
||||
gql`
|
||||
${ITEM_FIELDS}
|
||||
mutation upsertBio($bio: String!) {
|
||||
upsertBio(bio: $bio) {
|
||||
id
|
||||
bio {
|
||||
...ItemFields
|
||||
text
|
||||
}
|
||||
}
|
||||
}`, {
|
||||
update (cache, { data: { upsertBio } }) {
|
||||
export function BioForm ({ handleDone, bio, me }) {
|
||||
const onSubmit = useItemSubmit(UPSERT_BIO, {
|
||||
navigateOnSubmit: false,
|
||||
paidMutationOptions: {
|
||||
update (cache, { data: { upsertBio: { result, invoice } } }) {
|
||||
if (!result) return
|
||||
|
||||
cache.modify({
|
||||
id: `User:${upsertBio.id}`,
|
||||
id: `User:${me.id}`,
|
||||
fields: {
|
||||
bio () {
|
||||
return upsertBio.bio
|
||||
return result.text
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
},
|
||||
onSuccessfulSubmit: (data, { resetForm }) => {
|
||||
handleDone?.()
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
return (
|
||||
<div className={styles.createFormContainer}>
|
||||
<FeeButtonProvider>
|
||||
<Form
|
||||
initial={{
|
||||
bio: bio?.text || ''
|
||||
text: bio?.text || ''
|
||||
}}
|
||||
schema={bioSchema}
|
||||
onSubmit={async values => {
|
||||
const { error } = await upsertBio({ variables: values })
|
||||
if (error) throw error
|
||||
handleDone?.()
|
||||
}}
|
||||
onSubmit={onSubmit}
|
||||
storageKeyPrefix={`bio-${me.id}`}
|
||||
>
|
||||
<MarkdownInput
|
||||
topLevel
|
||||
name='bio'
|
||||
name='text'
|
||||
minRows={6}
|
||||
/>
|
||||
<ItemButtonBar createText='save' onCancel={handleDone} />
|
||||
|
@ -100,14 +94,14 @@ export default function User ({ ssrData }) {
|
|||
? (edit
|
||||
? (
|
||||
<div className={styles.create}>
|
||||
<BioForm bio={user.bio} handleDone={() => setEdit(false)} />
|
||||
<BioForm bio={user.bio} me={me} handleDone={() => setEdit(false)} />
|
||||
</div>)
|
||||
: <ItemFull item={user.bio} bio handleClick={setEdit} />
|
||||
)
|
||||
: (mine &&
|
||||
<div className={styles.create}>
|
||||
{create
|
||||
? <BioForm handleDone={() => setCreate(false)} />
|
||||
? <BioForm me={me} handleDone={() => setCreate(false)} />
|
||||
: (
|
||||
mine &&
|
||||
<div className='text-center'>
|
||||
|
|
|
@ -17,7 +17,6 @@ import { SSR } from '@/lib/constants'
|
|||
import NProgress from 'nprogress'
|
||||
import 'nprogress/nprogress.css'
|
||||
import { LoggerProvider } from '@/components/logger'
|
||||
import { WalletLoggerProvider } from '@/components/wallet-logger'
|
||||
import { ChainFeeProvider } from '@/components/chain-fee.js'
|
||||
import dynamic from 'next/dynamic'
|
||||
import { HasNewNotesProvider } from '@/components/use-has-new-notes'
|
||||
|
@ -107,30 +106,28 @@ export default function MyApp ({ Component, pageProps: { ...props } }) {
|
|||
<MeProvider me={me}>
|
||||
<HasNewNotesProvider>
|
||||
<LoggerProvider>
|
||||
<WalletLoggerProvider>
|
||||
<WebLnProvider>
|
||||
<ServiceWorkerProvider>
|
||||
<AccountProvider>
|
||||
<PriceProvider price={price}>
|
||||
<LightningProvider>
|
||||
<ToastProvider>
|
||||
<ShowModalProvider>
|
||||
<BlockHeightProvider blockHeight={blockHeight}>
|
||||
<ChainFeeProvider chainFee={chainFee}>
|
||||
<ErrorBoundary>
|
||||
<Component ssrData={ssrData} {...otherProps} />
|
||||
{!router?.query?.disablePrompt && <PWAPrompt copyBody='This website has app functionality. Add it to your home screen to use it in fullscreen and receive notifications. In Safari:' promptOnVisit={2} />}
|
||||
</ErrorBoundary>
|
||||
</ChainFeeProvider>
|
||||
</BlockHeightProvider>
|
||||
</ShowModalProvider>
|
||||
</ToastProvider>
|
||||
</LightningProvider>
|
||||
</PriceProvider>
|
||||
</AccountProvider>
|
||||
</ServiceWorkerProvider>
|
||||
</WebLnProvider>
|
||||
</WalletLoggerProvider>
|
||||
<WebLnProvider>
|
||||
<ServiceWorkerProvider>
|
||||
<AccountProvider>
|
||||
<PriceProvider price={price}>
|
||||
<LightningProvider>
|
||||
<ToastProvider>
|
||||
<ShowModalProvider>
|
||||
<BlockHeightProvider blockHeight={blockHeight}>
|
||||
<ChainFeeProvider chainFee={chainFee}>
|
||||
<ErrorBoundary>
|
||||
<Component ssrData={ssrData} {...otherProps} />
|
||||
{!router?.query?.disablePrompt && <PWAPrompt copyBody='This website has app functionality. Add it to your home screen to use it in fullscreen and receive notifications. In Safari:' promptOnVisit={2} />}
|
||||
</ErrorBoundary>
|
||||
</ChainFeeProvider>
|
||||
</BlockHeightProvider>
|
||||
</ShowModalProvider>
|
||||
</ToastProvider>
|
||||
</LightningProvider>
|
||||
</PriceProvider>
|
||||
</AccountProvider>
|
||||
</ServiceWorkerProvider>
|
||||
</WebLnProvider>
|
||||
</LoggerProvider>
|
||||
</HasNewNotesProvider>
|
||||
</MeProvider>
|
||||
|
|
|
@ -47,6 +47,7 @@ class MyDocument extends Document {
|
|||
}}
|
||||
/>
|
||||
<meta name='apple-mobile-web-app-capable' content='yes' />
|
||||
<meta name='mobile-web-app-capable' content='yes' />
|
||||
<meta name='theme-color' content='#121214' />
|
||||
<link rel='apple-touch-icon' href='/icons/icon_x192.png' />
|
||||
<Script id='dark-mode-js' strategy='beforeInteractive'>
|
||||
|
|
|
@ -81,7 +81,7 @@ export default async ({ query: { username, amount, nostr, comment, payerdata: pa
|
|||
})
|
||||
|
||||
await serialize(
|
||||
models.$queryRaw`SELECT * FROM create_invoice(${invoice.id}, NULL, ${invoice.request},
|
||||
models.$queryRaw`SELECT * FROM create_invoice(${invoice.id}, ${invoice.secret}::TEXT, ${invoice.request},
|
||||
${expiresAt}::timestamp, ${Number(amount)}, ${user.id}::INTEGER, ${noteStr || description},
|
||||
${comment || null}, ${parsedPayerData || null}::JSONB, ${INV_PENDING_LIMIT}::INTEGER,
|
||||
${USER_IDS_BALANCE_NO_LIMIT.includes(Number(user.id)) ? 0 : BALANCE_LIMIT_MSATS})`,
|
||||
|
|
|
@ -1,15 +1,15 @@
|
|||
import lnd from '@/api/lnd'
|
||||
import { getInvoice } from 'ln-service'
|
||||
import models from '@/api/models'
|
||||
|
||||
export default async ({ query: { hash } }, res) => {
|
||||
try {
|
||||
const inv = await getInvoice({ id: hash, lnd })
|
||||
const settled = inv.is_confirmed
|
||||
return res.status(200).json({ status: 'OK', settled, preimage: settled ? inv.secret : null, pr: inv.request })
|
||||
} catch (err) {
|
||||
if (err[1] === 'UnexpectedLookupInvoiceErr') {
|
||||
const inv = await models.invoice.findUnique({ where: { hash } })
|
||||
if (!inv) {
|
||||
return res.status(404).json({ status: 'ERROR', reason: 'not found' })
|
||||
}
|
||||
const settled = inv.confirmedAt
|
||||
return res.status(200).json({ status: 'OK', settled: !!settled, preimage: settled ? inv.preimage : null, pr: inv.bolt11 })
|
||||
} catch (err) {
|
||||
console.log('error', err)
|
||||
return res.status(500).json({ status: 'ERROR', reason: 'internal server error' })
|
||||
}
|
||||
}
|
||||
|
|
|
@ -31,7 +31,6 @@ import { OverlayTrigger, Tooltip } from 'react-bootstrap'
|
|||
import { useField } from 'formik'
|
||||
import styles from './settings.module.css'
|
||||
import { AuthBanner } from '@/components/banners'
|
||||
import DeviceSync from '@/components/device-sync'
|
||||
|
||||
export const getServerSideProps = getGetServerSideProps({ query: SETTINGS, authRequired: true })
|
||||
|
||||
|
@ -607,7 +606,6 @@ export default function Settings ({ ssrData }) {
|
|||
<div className='form-label'>saturday newsletter</div>
|
||||
<Button href='https://mail.stacker.news/subscription/form' target='_blank'>(re)subscribe</Button>
|
||||
{settings?.authMethods && <AuthMethods methods={settings.authMethods} apiKeyEnabled={settings.apiKeyEnabled} />}
|
||||
<DeviceSync />
|
||||
</div>
|
||||
</div>
|
||||
</Layout>
|
||||
|
|
|
@ -1,39 +0,0 @@
|
|||
-- AlterTable
|
||||
ALTER TABLE "users" ADD COLUMN "vaultKeyHash" TEXT NOT NULL DEFAULT '';
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "Vault" (
|
||||
"id" SERIAL NOT NULL,
|
||||
"key" VARCHAR(64) NOT NULL,
|
||||
"value" TEXT NOT NULL,
|
||||
"userId" INTEGER NOT NULL,
|
||||
"created_at" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updated_at" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
|
||||
CONSTRAINT "Vault_pkey" PRIMARY KEY ("id")
|
||||
);
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "Vault.userId_index" ON "Vault"("userId");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "Vault_userId_key_key" ON "Vault"("userId", "key");
|
||||
|
||||
-- AddForeignKey
|
||||
ALTER TABLE "Vault" ADD CONSTRAINT "Vault_userId_fkey" FOREIGN KEY ("userId") REFERENCES "users"("id") ON DELETE CASCADE ON UPDATE CASCADE;
|
||||
|
||||
-- avoid spam
|
||||
CREATE OR REPLACE FUNCTION enforce_vault_limit()
|
||||
RETURNS TRIGGER AS $$
|
||||
BEGIN
|
||||
IF (SELECT COUNT(*) FROM "Vault" WHERE "userId" = NEW."userId") >= 100 THEN
|
||||
RAISE EXCEPTION 'vault limit of 100 entries per user reached';
|
||||
END IF;
|
||||
RETURN NEW;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE TRIGGER enforce_vault_limit_trigger
|
||||
BEFORE INSERT ON "Vault"
|
||||
FOR EACH ROW
|
||||
EXECUTE FUNCTION enforce_vault_limit();
|
|
@ -0,0 +1,44 @@
|
|||
CREATE OR REPLACE FUNCTION create_invoice(hash TEXT, preimage TEXT, bolt11 TEXT, expires_at timestamp(3) without time zone,
|
||||
msats_req BIGINT, user_id INTEGER, idesc TEXT, comment TEXT, lud18_data JSONB, inv_limit INTEGER, balance_limit_msats BIGINT)
|
||||
RETURNS "Invoice"
|
||||
LANGUAGE plpgsql
|
||||
AS $$
|
||||
DECLARE
|
||||
invoice "Invoice";
|
||||
inv_limit_reached BOOLEAN;
|
||||
balance_limit_reached BOOLEAN;
|
||||
inv_pending_msats BIGINT;
|
||||
wdwl_pending_msats BIGINT;
|
||||
BEGIN
|
||||
PERFORM ASSERT_SERIALIZED();
|
||||
|
||||
-- prevent too many pending invoices
|
||||
SELECT inv_limit > 0 AND count(*) >= inv_limit, COALESCE(sum("msatsRequested"), 0) INTO inv_limit_reached, inv_pending_msats
|
||||
FROM "Invoice"
|
||||
WHERE "userId" = user_id AND "expiresAt" > now_utc() AND "confirmedAt" IS NULL AND cancelled = false;
|
||||
|
||||
IF inv_limit_reached THEN
|
||||
RAISE EXCEPTION 'SN_INV_PENDING_LIMIT';
|
||||
END IF;
|
||||
|
||||
-- account for pending withdrawals
|
||||
SELECT COALESCE(sum("msatsPaying"), 0) + COALESCE(sum("msatsFeePaying"), 0) INTO wdwl_pending_msats
|
||||
FROM "Withdrawl"
|
||||
WHERE "userId" = user_id AND status IS NULL;
|
||||
|
||||
-- prevent pending invoices + msats from exceeding the limit
|
||||
SELECT balance_limit_msats > 0 AND inv_pending_msats+wdwl_pending_msats+msats_req+msats > balance_limit_msats INTO balance_limit_reached
|
||||
FROM users
|
||||
WHERE id = user_id;
|
||||
|
||||
IF balance_limit_reached THEN
|
||||
RAISE EXCEPTION 'SN_INV_EXCEED_BALANCE';
|
||||
END IF;
|
||||
|
||||
-- we good, proceed frens
|
||||
INSERT INTO "Invoice" (hash, preimage, bolt11, "expiresAt", "msatsRequested", "userId", created_at, updated_at, "desc", comment, "lud18Data")
|
||||
VALUES (hash, preimage, bolt11, expires_at, msats_req, user_id, now_utc(), now_utc(), idesc, comment, lud18_data) RETURNING * INTO invoice;
|
||||
|
||||
RETURN invoice;
|
||||
END;
|
||||
$$;
|
|
@ -134,8 +134,6 @@ model User {
|
|||
ItemUserAgg ItemUserAgg[]
|
||||
oneDayReferrals OneDayReferral[] @relation("OneDayReferral_referrer")
|
||||
oneDayReferrees OneDayReferral[] @relation("OneDayReferral_referrees")
|
||||
vaultKeyHash String @default("")
|
||||
vaultEntries Vault[] @relation("VaultEntries")
|
||||
|
||||
@@index([photoId])
|
||||
@@index([createdAt], map: "users.created_at_index")
|
||||
|
@ -1101,19 +1099,6 @@ model Reminder {
|
|||
@@index([userId, remindAt], map: "Reminder.userId_reminderAt_index")
|
||||
}
|
||||
|
||||
model Vault {
|
||||
id Int @id @default(autoincrement())
|
||||
key String @db.VarChar(64)
|
||||
value String @db.Text
|
||||
userId Int
|
||||
user User @relation(fields: [userId], references: [id], onDelete: Cascade, name: "VaultEntries")
|
||||
createdAt DateTime @default(now()) @map("created_at")
|
||||
updatedAt DateTime @default(now()) @updatedAt @map("updated_at")
|
||||
|
||||
@@unique([userId, key])
|
||||
@@index([userId], map: "Vault.userId_index")
|
||||
}
|
||||
|
||||
enum EarnType {
|
||||
POST
|
||||
COMMENT
|
||||
|
|
|
@ -1 +0,0 @@
|
|||
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 24 24" fill="currentColor"><path d="M7 4V2H17V4H20.0066C20.5552 4 21 4.44495 21 4.9934V21.0066C21 21.5552 20.5551 22 20.0066 22H3.9934C3.44476 22 3 21.5551 3 21.0066V4.9934C3 4.44476 3.44495 4 3.9934 4H7ZM7 6H5V20H19V6H17V8H7V6ZM9 4V6H15V4H9Z"></path></svg>
|
Before Width: | Height: | Size: 310 B |
|
@ -1 +0,0 @@
|
|||
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 24 24" fill="currentColor"><path d="M16 17V16H13V13H16V15H18V17H17V19H15V21H13V18H15V17H16ZM21 21H17V19H19V17H21V21ZM3 3H11V11H3V3ZM5 5V9H9V5H5ZM13 3H21V11H13V3ZM15 5V9H19V5H15ZM3 13H11V21H3V13ZM5 15V19H9V15H5ZM18 13H21V15H18V13ZM6 6H8V8H6V6ZM6 16H8V18H6V16ZM16 6H18V8H16V6Z"></path></svg>
|
Before Width: | Height: | Size: 342 B |
|
@ -1 +0,0 @@
|
|||
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 24 24" fill="currentColor"><path d="M21 16V21H3V16H5V19H19V16H21ZM3 11H21V13H3V11ZM21 8H19V5H5V8H3V3H21V8Z"></path></svg>
|
Before Width: | Height: | Size: 174 B |
|
@ -1 +0,0 @@
|
|||
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 24 24" fill="currentColor"><path d="M5.46257 4.43262C7.21556 2.91688 9.5007 2 12 2C17.5228 2 22 6.47715 22 12C22 14.1361 21.3302 16.1158 20.1892 17.7406L17 12H20C20 7.58172 16.4183 4 12 4C9.84982 4 7.89777 4.84827 6.46023 6.22842L5.46257 4.43262ZM18.5374 19.5674C16.7844 21.0831 14.4993 22 12 22C6.47715 22 2 17.5228 2 12C2 9.86386 2.66979 7.88416 3.8108 6.25944L7 12H4C4 16.4183 7.58172 20 12 20C14.1502 20 16.1022 19.1517 17.5398 17.7716L18.5374 19.5674Z"></path></svg>
|
Before Width: | Height: | Size: 524 B |
|
@ -57,10 +57,6 @@ This acts as an ID for this wallet on the client. It therefore must be unique ac
|
|||
|
||||
Since `name` will also be used in [wallet logs](https://stacker.news/wallet/logs), you can specify a shorter name here which will be used in logs instead.
|
||||
|
||||
- `perDevice?: boolean`
|
||||
|
||||
This is an optional value. Set this to true if your wallet needs to be configured per device and should thus not be synced across devices.
|
||||
|
||||
- `fields: WalletField[]`
|
||||
|
||||
Wallet fields define what this wallet requires for configuration and thus are used to construct the forms like the one you can see at [/settings/wallets/lnbits](https://stacker.news/settings/walletslnbits).
|
||||
|
|
105
wallets/index.js
105
wallets/index.js
|
@ -1,7 +1,8 @@
|
|||
import { useCallback } from 'react'
|
||||
import { useMe } from '@/components/me'
|
||||
import useVault from '@/components/use-vault'
|
||||
import useClientConfig from '@/components/use-local-state'
|
||||
import { useWalletLogger } from '@/components/wallet-logger'
|
||||
import { SSR } from '@/lib/constants'
|
||||
import { bolt11Tags } from '@/lib/bolt11'
|
||||
|
||||
import walletDefs from 'wallets/client'
|
||||
|
@ -21,44 +22,28 @@ export const Status = {
|
|||
}
|
||||
|
||||
export function useWallet (name) {
|
||||
if (!name) {
|
||||
const defaultWallet = walletDefs
|
||||
.filter(def => !!def.sendPayment && !!def.name)
|
||||
.map(def => {
|
||||
const w = useWallet(def.name)
|
||||
return w
|
||||
})
|
||||
.filter((wallet) => {
|
||||
return wallet?.enabled
|
||||
})
|
||||
.sort(walletPrioritySort)[0]
|
||||
return defaultWallet
|
||||
}
|
||||
|
||||
const { me } = useMe()
|
||||
const showModal = useShowModal()
|
||||
const toaster = useToast()
|
||||
const [disableFreebies] = useMutation(gql`mutation { disableFreebies }`)
|
||||
|
||||
const wallet = getWalletByName(name)
|
||||
const wallet = name ? getWalletByName(name) : getEnabledWallet(me)
|
||||
const { logger, deleteLogs } = useWalletLogger(wallet)
|
||||
|
||||
const [config, saveConfig, clearConfig] = useConfig(wallet)
|
||||
const hasConfig = wallet?.fields.length > 0
|
||||
const _isConfigured = isConfigured({ ...wallet, config })
|
||||
|
||||
const enablePayments = useCallback((updatedConfig) => {
|
||||
// config might have been updated in the same render we call this function
|
||||
// so we allow to pass in the updated config to not overwrite it a stale one
|
||||
saveConfig({ ...(updatedConfig || config), enabled: true }, { skipTests: true })
|
||||
const enablePayments = useCallback(() => {
|
||||
enableWallet(name, me)
|
||||
logger.ok('payments enabled')
|
||||
disableFreebies().catch(console.error)
|
||||
}, [config, logger])
|
||||
}, [name, me, logger])
|
||||
|
||||
const disablePayments = useCallback((updatedConfig) => {
|
||||
saveConfig({ ...(updatedConfig || config), enabled: false }, { skipTests: true })
|
||||
const disablePayments = useCallback(() => {
|
||||
disableWallet(name, me)
|
||||
logger.info('payments disabled')
|
||||
}, [config, logger])
|
||||
}, [name, me, logger])
|
||||
|
||||
const status = config?.enabled ? Status.Enabled : Status.Initialized
|
||||
const enabled = status === Status.Enabled
|
||||
|
@ -80,7 +65,7 @@ export function useWallet (name) {
|
|||
const setPriority = useCallback(async (priority) => {
|
||||
if (_isConfigured && priority !== config.priority) {
|
||||
try {
|
||||
await saveConfig({ ...config, priority }, { logger, skipTests: true })
|
||||
await saveConfig({ ...config, priority }, { logger, priorityOnly: true })
|
||||
} catch (err) {
|
||||
toaster.danger(`failed to change priority of ${wallet.name} wallet: ${err.message}`)
|
||||
}
|
||||
|
@ -100,7 +85,7 @@ export function useWallet (name) {
|
|||
logger.error(message)
|
||||
throw err
|
||||
}
|
||||
}, [clearConfig, logger])
|
||||
}, [clearConfig, logger, disablePayments])
|
||||
|
||||
const deleteLogs_ = useCallback(async (options) => {
|
||||
// first argument is to override the wallet
|
||||
|
@ -173,9 +158,8 @@ function extractServerConfig (fields, config) {
|
|||
function useConfig (wallet) {
|
||||
const { me } = useMe()
|
||||
|
||||
const storageKey = `wallet:${wallet.name}`
|
||||
|
||||
const [clientConfig, setClientConfig, clearClientConfig] = useVault(storageKey, {}, { localOnly: wallet.perDevice })
|
||||
const storageKey = getStorageKey(wallet?.name, me)
|
||||
const [clientConfig, setClientConfig, clearClientConfig] = useClientConfig(storageKey, {})
|
||||
|
||||
const [serverConfig, setServerConfig, clearServerConfig] = useServerConfig(wallet)
|
||||
|
||||
|
@ -197,7 +181,7 @@ function useConfig (wallet) {
|
|||
config.priority ||= priority
|
||||
}
|
||||
|
||||
const saveConfig = useCallback(async (newConfig, { logger, skipTests } = {}) => {
|
||||
const saveConfig = useCallback(async (newConfig, { logger, priorityOnly }) => {
|
||||
// NOTE:
|
||||
// verifying the client/server configuration before saving it
|
||||
// prevents unsetting just one configuration if both are set.
|
||||
|
@ -219,7 +203,7 @@ function useConfig (wallet) {
|
|||
}
|
||||
|
||||
if (valid) {
|
||||
if (skipTests) {
|
||||
if (priorityOnly) {
|
||||
setClientConfig(newClientConfig)
|
||||
} else {
|
||||
try {
|
||||
|
@ -234,12 +218,9 @@ function useConfig (wallet) {
|
|||
}
|
||||
|
||||
setClientConfig(newClientConfig)
|
||||
|
||||
logger.ok(wallet.isConfigured ? 'payment details updated' : 'wallet attached for payments')
|
||||
|
||||
// we only call enable / disable for the side effects
|
||||
if (newConfig.enabled) wallet.enablePayments(newClientConfig)
|
||||
else wallet.disablePayments(newClientConfig)
|
||||
if (newConfig.enabled) wallet.enablePayments()
|
||||
else wallet.disablePayments()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -257,17 +238,17 @@ function useConfig (wallet) {
|
|||
valid = false
|
||||
}
|
||||
|
||||
if (valid) await setServerConfig(newServerConfig, { priorityOnly: skipTests })
|
||||
if (valid) await setServerConfig(newServerConfig, { priorityOnly })
|
||||
}
|
||||
}, [hasClientConfig, hasServerConfig, setClientConfig, setServerConfig, wallet])
|
||||
|
||||
const clearConfig = useCallback(async ({ logger, clientOnly, ...options }) => {
|
||||
const clearConfig = useCallback(async ({ logger, clientOnly }) => {
|
||||
if (hasClientConfig) {
|
||||
clearClientConfig(options)
|
||||
wallet.disablePayments({})
|
||||
clearClientConfig()
|
||||
wallet.disablePayments()
|
||||
logger.ok('wallet detached for payments')
|
||||
}
|
||||
if (hasServerConfig && !clientOnly) await clearServerConfig(options)
|
||||
if (hasServerConfig && !clientOnly) await clearServerConfig()
|
||||
}, [hasClientConfig, hasServerConfig, clearClientConfig, clearServerConfig, wallet])
|
||||
|
||||
return [config, saveConfig, clearConfig]
|
||||
|
@ -389,6 +370,20 @@ export function getWalletByType (type) {
|
|||
return walletDefs.find(def => def.walletType === type)
|
||||
}
|
||||
|
||||
export function getEnabledWallet (me) {
|
||||
return walletDefs
|
||||
.filter(def => !!def.sendPayment)
|
||||
.map(def => {
|
||||
// populate definition with properties from useWallet that are required for sorting
|
||||
const key = getStorageKey(def.name, me)
|
||||
const config = SSR ? null : JSON.parse(window?.localStorage.getItem(key))
|
||||
const priority = config?.priority
|
||||
return { ...def, config, priority }
|
||||
})
|
||||
.filter(({ config }) => config?.enabled)
|
||||
.sort(walletPrioritySort)[0]
|
||||
}
|
||||
|
||||
export function walletPrioritySort (w1, w2) {
|
||||
const delta = w1.priority - w2.priority
|
||||
// delta is NaN if either priority is undefined
|
||||
|
@ -414,7 +409,7 @@ export function useWallets () {
|
|||
const resetClient = useCallback(async (wallet) => {
|
||||
for (const w of wallets) {
|
||||
if (w.canSend) {
|
||||
await w.delete({ clientOnly: true, onlyFromLocalStorage: true })
|
||||
await w.delete({ clientOnly: true })
|
||||
}
|
||||
await w.deleteLogs({ clientOnly: true })
|
||||
}
|
||||
|
@ -422,3 +417,29 @@ export function useWallets () {
|
|||
|
||||
return { wallets, resetClient }
|
||||
}
|
||||
|
||||
function getStorageKey (name, me) {
|
||||
let storageKey = `wallet:${name}`
|
||||
|
||||
// WebLN has no credentials we need to scope to users
|
||||
// so we can use the same storage key for all users
|
||||
if (me && name !== 'webln') {
|
||||
storageKey = `${storageKey}:${me.id}`
|
||||
}
|
||||
|
||||
return storageKey
|
||||
}
|
||||
|
||||
function enableWallet (name, me) {
|
||||
const key = getStorageKey(name, me)
|
||||
const config = JSON.parse(window.localStorage.getItem(key)) || {}
|
||||
config.enabled = true
|
||||
window.localStorage.setItem(key, JSON.stringify(config))
|
||||
}
|
||||
|
||||
function disableWallet (name, me) {
|
||||
const key = getStorageKey(name, me)
|
||||
const config = JSON.parse(window.localStorage.getItem(key)) || {}
|
||||
config.enabled = false
|
||||
window.localStorage.setItem(key, JSON.stringify(config))
|
||||
}
|
||||
|
|
|
@ -2,12 +2,14 @@ Use these NWC strings to attach the wallet
|
|||
|
||||
* sending:
|
||||
|
||||
run the following command:
|
||||
```
|
||||
nostr+walletconnect://b7dcc7aca6e27ec2bc2374eef1a3ce1f975b76ea8ebc806fcbb9e4d359ced47e?relay=wss%3A%2F%2Frelay.primal.net&secret=c8f7fcb4707863ba1cc1b32c8871585ddb1eb7a555925cd2818a6caf4a21fb90
|
||||
sndev logs --since 0 nwc_send | awk '/nostr\+walletconnect/{print $3; exit}'
|
||||
```
|
||||
|
||||
- receiving:
|
||||
|
||||
run the following command:
|
||||
```
|
||||
nostr+walletconnect://ed77e8af26fee9d179443505ad7d11d5a535e1767eb3058b01673c3f56f08191?relay=wss%3A%2F%2Frelay.primal.net&secret=87e73293804edb089e0be8bf01ab2f6f219591f91998479851a7a2d1daf1a617
|
||||
sndev logs --since 0 nwc_recv | awk '/nostr\+walletconnect/{print $3; exit}'
|
||||
```
|
||||
|
|
|
@ -3,8 +3,6 @@ import { useWallet } from 'wallets'
|
|||
|
||||
export const name = 'webln'
|
||||
|
||||
export const perDevice = true
|
||||
|
||||
export const fields = []
|
||||
|
||||
export const fieldValidation = ({ enabled }) => {
|
||||
|
@ -37,8 +35,6 @@ export default function WebLnProvider ({ children }) {
|
|||
wallet.disablePayments()
|
||||
}
|
||||
|
||||
if (!window.webln) onDisable()
|
||||
|
||||
window.addEventListener('webln:enabled', onEnable)
|
||||
// event is not fired by Alby browser extension but added here for sake of completeness
|
||||
window.addEventListener('webln:disabled', onDisable)
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
import { createHodlInvoice, getHeight, parsePaymentRequest } from 'ln-service'
|
||||
import { estimateRouteFee } from '../api/lnd'
|
||||
import { createHodlInvoice, parsePaymentRequest } from 'ln-service'
|
||||
import { estimateRouteFee, getBlockHeight } from '../api/lnd'
|
||||
import { toPositiveNumber } from '@/lib/validate'
|
||||
|
||||
const MIN_OUTGOING_MSATS = BigInt(900) // the minimum msats we'll allow for the outgoing invoice
|
||||
|
@ -44,7 +44,7 @@ export default async function wrapInvoice (bolt11, { msats, description, descrip
|
|||
if (outgoingMsat < MIN_OUTGOING_MSATS) {
|
||||
throw new Error(`Invoice amount is too low: ${outgoingMsat}`)
|
||||
}
|
||||
if (inv.mtokens > MAX_OUTGOING_MSATS) {
|
||||
if (outgoingMsat > MAX_OUTGOING_MSATS) {
|
||||
throw new Error(`Invoice amount is too high: ${outgoingMsat}`)
|
||||
}
|
||||
} else {
|
||||
|
@ -131,7 +131,7 @@ export default async function wrapInvoice (bolt11, { msats, description, descrip
|
|||
timeout: FEE_ESTIMATE_TIMEOUT_SECS
|
||||
})
|
||||
|
||||
const { current_block_height: blockHeight } = await getHeight({ lnd })
|
||||
const blockHeight = await getBlockHeight({ lnd })
|
||||
/*
|
||||
we want the incoming invoice to have MIN_SETTLEMENT_CLTV_DELTA higher final cltv delta than
|
||||
the expected ctlv_delta of the outgoing invoice's entire route
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
# syntax=docker/dockerfile:1
|
||||
|
||||
FROM node:18.17.0-bullseye
|
||||
FROM node:18.20.4-bullseye
|
||||
|
||||
ENV NODE_ENV=development
|
||||
|
||||
|
|
|
@ -3,6 +3,7 @@ import { extractUrls } from '@/lib/md.js'
|
|||
import { isJob } from '@/lib/item.js'
|
||||
import path from 'node:path'
|
||||
import { decodeProxyUrl } from '@/lib/url'
|
||||
import { fetchWithTimeout } from '@/lib/fetch'
|
||||
|
||||
const imgProxyEnabled = process.env.NODE_ENV === 'production' ||
|
||||
(process.env.NEXT_PUBLIC_IMGPROXY_URL && process.env.IMGPROXY_SALT && process.env.IMGPROXY_KEY)
|
||||
|
@ -133,19 +134,6 @@ const createImgproxyPath = ({ url, pathname = '/', options }) => {
|
|||
return path.join(pathname, signature, target)
|
||||
}
|
||||
|
||||
async function fetchWithTimeout (resource, { timeout = 1000, ...options } = {}) {
|
||||
const controller = new AbortController()
|
||||
const id = setTimeout(() => controller.abort(), timeout)
|
||||
|
||||
const response = await fetch(resource, {
|
||||
...options,
|
||||
signal: controller.signal
|
||||
})
|
||||
clearTimeout(id)
|
||||
|
||||
return response
|
||||
}
|
||||
|
||||
const isMediaURL = async (url, { forceFetch }) => {
|
||||
if (cache.has(url)) return cache.get(url)
|
||||
|
||||
|
|
|
@ -1,18 +1,17 @@
|
|||
import { getInvoice } from 'ln-service'
|
||||
import { signId, calculateId, getPublicKey } from 'nostr'
|
||||
import { Relay } from '@/lib/nostr'
|
||||
|
||||
const nostrOptions = { startAfter: 5, retryLimit: 21, retryBackoff: true }
|
||||
|
||||
export async function nip57 ({ data: { hash }, boss, lnd, models }) {
|
||||
let inv, lnInv
|
||||
let inv
|
||||
try {
|
||||
lnInv = await getInvoice({ id: hash, lnd })
|
||||
inv = await models.invoice.findUnique({
|
||||
where: {
|
||||
hash
|
||||
}
|
||||
})
|
||||
if (!inv || !inv.confirmedAt) return
|
||||
} catch (err) {
|
||||
console.log(err)
|
||||
// on lnd related errors, we manually retry which so we don't exponentially backoff
|
||||
|
@ -35,14 +34,14 @@ export async function nip57 ({ data: { hash }, boss, lnd, models }) {
|
|||
const tags = [ptag]
|
||||
if (etag) tags.push(etag)
|
||||
if (atag) tags.push(atag)
|
||||
tags.push(['bolt11', lnInv.request])
|
||||
tags.push(['bolt11', inv.bolt11])
|
||||
tags.push(['description', inv.desc])
|
||||
tags.push(['preimage', lnInv.secret])
|
||||
tags.push(['preimage', inv.preimage])
|
||||
|
||||
const e = {
|
||||
kind: 9735,
|
||||
pubkey: getPublicKey(process.env.NOSTR_PRIVATE_KEY),
|
||||
created_at: Math.floor(new Date(lnInv.confirmed_at).getTime() / 1000),
|
||||
created_at: Math.floor(new Date(inv.confirmedAt).getTime() / 1000),
|
||||
content: '',
|
||||
tags
|
||||
}
|
||||
|
|
|
@ -14,7 +14,7 @@ import { MIN_SETTLEMENT_CLTV_DELTA } from 'wallets/wrap'
|
|||
// aggressive finalization retry options
|
||||
const FINALIZE_OPTIONS = { retryLimit: 2 ** 31 - 1, retryBackoff: false, retryDelay: 5, priority: 1000 }
|
||||
|
||||
async function transitionInvoice (jobName, { invoiceId, fromState, toState, transition }, { models, lnd, boss }) {
|
||||
async function transitionInvoice (jobName, { invoiceId, fromState, toState, transition, invoice }, { models, lnd, boss }) {
|
||||
console.group(`${jobName}: transitioning invoice ${invoiceId} from ${fromState} to ${toState}`)
|
||||
|
||||
try {
|
||||
|
@ -30,7 +30,7 @@ async function transitionInvoice (jobName, { invoiceId, fromState, toState, tran
|
|||
fromState = [fromState]
|
||||
}
|
||||
|
||||
const lndInvoice = await getInvoice({ id: currentDbInvoice.hash, lnd })
|
||||
const lndInvoice = invoice ?? await getInvoice({ id: currentDbInvoice.hash, lnd })
|
||||
|
||||
const transitionedInvoice = await models.$transaction(async tx => {
|
||||
const include = {
|
||||
|
@ -133,8 +133,8 @@ async function performPessimisticAction ({ lndInvoice, dbInvoice, tx, models, ln
|
|||
}
|
||||
}
|
||||
|
||||
export async function paidActionPaid ({ data: { invoiceId }, models, lnd, boss }) {
|
||||
return await transitionInvoice('paidActionPaid', {
|
||||
export async function paidActionPaid ({ data: { invoiceId, ...args }, models, lnd, boss }) {
|
||||
const transitionedInvoice = await transitionInvoice('paidActionPaid', {
|
||||
invoiceId,
|
||||
fromState: ['HELD', 'PENDING', 'FORWARDED'],
|
||||
toState: 'PAID',
|
||||
|
@ -153,12 +153,21 @@ export async function paidActionPaid ({ data: { invoiceId }, models, lnd, boss }
|
|||
confirmedIndex: lndInvoice.confirmed_index,
|
||||
msatsReceived: BigInt(lndInvoice.received_mtokens)
|
||||
}
|
||||
}
|
||||
},
|
||||
...args
|
||||
}, { models, lnd, boss })
|
||||
|
||||
if (transitionedInvoice) {
|
||||
// run non critical side effects in the background
|
||||
// after the transaction has been committed
|
||||
paidActions[transitionedInvoice.actionType]
|
||||
.nonCriticalSideEffects?.({ invoice: transitionedInvoice }, { models, lnd })
|
||||
.catch(console.error)
|
||||
}
|
||||
}
|
||||
|
||||
// this performs forward creating the outgoing payment
|
||||
export async function paidActionForwarding ({ data: { invoiceId }, models, lnd, boss }) {
|
||||
export async function paidActionForwarding ({ data: { invoiceId, ...args }, models, lnd, boss }) {
|
||||
const transitionedInvoice = await transitionInvoice('paidActionForwarding', {
|
||||
invoiceId,
|
||||
fromState: 'PENDING_HELD',
|
||||
|
@ -213,7 +222,8 @@ export async function paidActionForwarding ({ data: { invoiceId }, models, lnd,
|
|||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
...args
|
||||
}, { models, lnd, boss })
|
||||
|
||||
// only pay if we successfully transitioned which can only happen once
|
||||
|
@ -238,7 +248,7 @@ export async function paidActionForwarding ({ data: { invoiceId }, models, lnd,
|
|||
}
|
||||
|
||||
// this finalizes the forward by settling the incoming invoice after the outgoing payment is confirmed
|
||||
export async function paidActionForwarded ({ data: { invoiceId }, models, lnd, boss }) {
|
||||
export async function paidActionForwarded ({ data: { invoiceId, withdrawal, ...args }, models, lnd, boss }) {
|
||||
return await transitionInvoice('paidActionForwarded', {
|
||||
invoiceId,
|
||||
fromState: 'FORWARDING',
|
||||
|
@ -249,7 +259,7 @@ export async function paidActionForwarded ({ data: { invoiceId }, models, lnd, b
|
|||
}
|
||||
|
||||
const { hash, msatsPaying } = dbInvoice.invoiceForward.withdrawl
|
||||
const { payment, is_confirmed: isConfirmed } = await getPayment({ id: hash, lnd })
|
||||
const { payment, is_confirmed: isConfirmed } = withdrawal ?? await getPayment({ id: hash, lnd })
|
||||
if (!isConfirmed) {
|
||||
throw new Error('payment is not confirmed')
|
||||
}
|
||||
|
@ -258,6 +268,7 @@ export async function paidActionForwarded ({ data: { invoiceId }, models, lnd, b
|
|||
await settleHodlInvoice({ secret: payment.secret, lnd })
|
||||
|
||||
return {
|
||||
preimage: payment.secret,
|
||||
invoiceForward: {
|
||||
update: {
|
||||
withdrawl: {
|
||||
|
@ -271,12 +282,13 @@ export async function paidActionForwarded ({ data: { invoiceId }, models, lnd, b
|
|||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
...args
|
||||
}, { models, lnd, boss })
|
||||
}
|
||||
|
||||
// when the pending forward fails, we need to cancel the incoming invoice
|
||||
export async function paidActionFailedForward ({ data: { invoiceId }, models, lnd, boss }) {
|
||||
export async function paidActionFailedForward ({ data: { invoiceId, withdrawal: pWithdrawal, ...args }, models, lnd, boss }) {
|
||||
return await transitionInvoice('paidActionFailedForward', {
|
||||
invoiceId,
|
||||
fromState: 'FORWARDING',
|
||||
|
@ -289,7 +301,7 @@ export async function paidActionFailedForward ({ data: { invoiceId }, models, ln
|
|||
let withdrawal
|
||||
let notSent = false
|
||||
try {
|
||||
withdrawal = await getPayment({ id: dbInvoice.invoiceForward.withdrawl.hash, lnd })
|
||||
withdrawal = pWithdrawal ?? await getPayment({ id: dbInvoice.invoiceForward.withdrawl.hash, lnd })
|
||||
} catch (err) {
|
||||
if (err[1] === 'SentPaymentNotFound' &&
|
||||
dbInvoice.invoiceForward.withdrawl.createdAt < datePivot(new Date(), { milliseconds: -LND_PATHFINDING_TIMEOUT_MS * 2 })) {
|
||||
|
@ -313,17 +325,18 @@ export async function paidActionFailedForward ({ data: { invoiceId }, models, ln
|
|||
update: {
|
||||
withdrawl: {
|
||||
update: {
|
||||
status: getPaymentFailureStatus(withdrawal)
|
||||
status: getPaymentFailureStatus(withdrawal).status
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
...args
|
||||
}, { models, lnd, boss })
|
||||
}
|
||||
|
||||
export async function paidActionHeld ({ data: { invoiceId }, models, lnd, boss }) {
|
||||
export async function paidActionHeld ({ data: { invoiceId, ...args }, models, lnd, boss }) {
|
||||
return await transitionInvoice('paidActionHeld', {
|
||||
invoiceId,
|
||||
fromState: 'PENDING_HELD',
|
||||
|
@ -355,11 +368,12 @@ export async function paidActionHeld ({ data: { invoiceId }, models, lnd, boss }
|
|||
isHeld: true,
|
||||
msatsReceived: BigInt(lndInvoice.received_mtokens)
|
||||
}
|
||||
}
|
||||
},
|
||||
...args
|
||||
}, { models, lnd, boss })
|
||||
}
|
||||
|
||||
export async function paidActionCanceling ({ data: { invoiceId }, models, lnd, boss }) {
|
||||
export async function paidActionCanceling ({ data: { invoiceId, ...args }, models, lnd, boss }) {
|
||||
return await transitionInvoice('paidActionCanceling', {
|
||||
invoiceId,
|
||||
fromState: ['HELD', 'PENDING', 'PENDING_HELD', 'FAILED_FORWARD'],
|
||||
|
@ -370,11 +384,12 @@ export async function paidActionCanceling ({ data: { invoiceId }, models, lnd, b
|
|||
}
|
||||
|
||||
await cancelHodlInvoice({ id: dbInvoice.hash, lnd })
|
||||
}
|
||||
},
|
||||
...args
|
||||
}, { models, lnd, boss })
|
||||
}
|
||||
|
||||
export async function paidActionFailed ({ data: { invoiceId }, models, lnd, boss }) {
|
||||
export async function paidActionFailed ({ data: { invoiceId, ...args }, models, lnd, boss }) {
|
||||
return await transitionInvoice('paidActionFailed', {
|
||||
invoiceId,
|
||||
// any of these states can transition to FAILED
|
||||
|
@ -390,6 +405,7 @@ export async function paidActionFailed ({ data: { invoiceId }, models, lnd, boss
|
|||
return {
|
||||
cancelled: true
|
||||
}
|
||||
}
|
||||
},
|
||||
...args
|
||||
}, { models, lnd, boss })
|
||||
}
|
||||
|
|
|
@ -15,6 +15,7 @@ import {
|
|||
paidActionForwarding,
|
||||
paidActionCanceling
|
||||
} from './paidAction.js'
|
||||
import { getPaymentFailureStatus } from '@/api/lnd/index.js'
|
||||
|
||||
export async function subscribeToWallet (args) {
|
||||
await subscribeToDeposits(args)
|
||||
|
@ -67,10 +68,11 @@ async function subscribeToDeposits (args) {
|
|||
try {
|
||||
logEvent('invoice_updated', inv)
|
||||
if (inv.secret) {
|
||||
await checkInvoice({ data: { hash: inv.id }, ...args })
|
||||
// subscribeToInvoices only returns when added or settled
|
||||
await checkInvoice({ data: { hash: inv.id, invoice: inv }, ...args })
|
||||
} else {
|
||||
// this is a HODL invoice. We need to use SubscribeToInvoice which has is_held transitions
|
||||
// https://api.lightning.community/api/lnd/invoices/subscribe-single-invoice
|
||||
// and is_canceled transitions https://api.lightning.community/api/lnd/invoices/subscribe-single-invoice
|
||||
// SubscribeToInvoices is only for invoice creation and settlement transitions
|
||||
// https://api.lightning.community/api/lnd/lightning/subscribe-invoices
|
||||
subscribeToHodlInvoice({ hash: inv.id, ...args })
|
||||
|
@ -97,7 +99,7 @@ function subscribeToHodlInvoice (args) {
|
|||
sub.on('invoice_updated', async (inv) => {
|
||||
logEvent('hodl_invoice_updated', inv)
|
||||
try {
|
||||
await checkInvoice({ data: { hash: inv.id }, ...args })
|
||||
await checkInvoice({ data: { hash: inv.id, invoice: inv }, ...args })
|
||||
// after settle or confirm we can stop listening for updates
|
||||
if (inv.is_confirmed || inv.is_canceled) {
|
||||
resolve()
|
||||
|
@ -112,8 +114,10 @@ function subscribeToHodlInvoice (args) {
|
|||
})
|
||||
}
|
||||
|
||||
export async function checkInvoice ({ data: { hash }, boss, models, lnd }) {
|
||||
const inv = await getInvoice({ id: hash, lnd })
|
||||
// if we already have the invoice from a subscription event or previous call,
|
||||
// we can skip a getInvoice call
|
||||
export async function checkInvoice ({ data: { hash, invoice }, boss, models, lnd }) {
|
||||
const inv = invoice ?? await getInvoice({ id: hash, lnd })
|
||||
|
||||
// invoice could be created by LND but wasn't inserted into the database yet
|
||||
// this is expected and the function will be called again with the updates
|
||||
|
@ -134,7 +138,7 @@ export async function checkInvoice ({ data: { hash }, boss, models, lnd }) {
|
|||
|
||||
if (inv.is_confirmed) {
|
||||
if (dbInv.actionType) {
|
||||
return await paidActionPaid({ data: { invoiceId: dbInv.id }, models, lnd, boss })
|
||||
return await paidActionPaid({ data: { invoiceId: dbInv.id, invoice: inv }, models, lnd, boss })
|
||||
}
|
||||
|
||||
// NOTE: confirm invoice prevents double confirmations (idempotent)
|
||||
|
@ -146,8 +150,6 @@ export async function checkInvoice ({ data: { hash }, boss, models, lnd }) {
|
|||
models.invoice.update({ where: { hash }, data: { confirmedIndex: inv.confirmed_index } })
|
||||
], { models })
|
||||
|
||||
// don't send notifications for JIT invoices
|
||||
if (dbInv.preimage) return
|
||||
if (code === 0) {
|
||||
notifyDeposit(dbInv.userId, { comment: dbInv.comment, ...inv })
|
||||
}
|
||||
|
@ -160,11 +162,11 @@ export async function checkInvoice ({ data: { hash }, boss, models, lnd }) {
|
|||
if (dbInv.invoiceForward) {
|
||||
if (dbInv.invoiceForward.withdrawl) {
|
||||
// transitions when held are dependent on the withdrawl status
|
||||
return await checkWithdrawal({ data: { hash: dbInv.invoiceForward.withdrawl.hash }, models, lnd, boss })
|
||||
return await checkWithdrawal({ data: { hash: dbInv.invoiceForward.withdrawl.hash, invoice: inv }, models, lnd, boss })
|
||||
}
|
||||
return await paidActionForwarding({ data: { invoiceId: dbInv.id }, models, lnd, boss })
|
||||
return await paidActionForwarding({ data: { invoiceId: dbInv.id, invoice: inv }, models, lnd, boss })
|
||||
}
|
||||
return await paidActionHeld({ data: { invoiceId: dbInv.id }, models, lnd, boss })
|
||||
return await paidActionHeld({ data: { invoiceId: dbInv.id, invoice: inv }, models, lnd, boss })
|
||||
}
|
||||
// First query makes sure that after payment, JIT invoices are settled
|
||||
// within 60 seconds or they will be canceled to minimize risk of
|
||||
|
@ -190,7 +192,7 @@ export async function checkInvoice ({ data: { hash }, boss, models, lnd }) {
|
|||
|
||||
if (inv.is_canceled) {
|
||||
if (dbInv.actionType) {
|
||||
return await paidActionFailed({ data: { invoiceId: dbInv.id }, models, lnd, boss })
|
||||
return await paidActionFailed({ data: { invoiceId: dbInv.id, invoice: inv }, models, lnd, boss })
|
||||
}
|
||||
|
||||
return await serialize(
|
||||
|
@ -216,7 +218,9 @@ async function subscribeToWithdrawals (args) {
|
|||
sub.on('confirmed', async (payment) => {
|
||||
logEvent('confirmed', payment)
|
||||
try {
|
||||
await checkWithdrawal({ data: { hash: payment.id }, ...args })
|
||||
// see https://github.com/alexbosworth/lightning/blob/ddf1f214ebddf62e9e19fd32a57fbeeba713340d/lnd_methods/offchain/subscribe_to_payments.js
|
||||
const withdrawal = { payment, is_confirmed: true }
|
||||
await checkWithdrawal({ data: { hash: payment.id, withdrawal }, ...args })
|
||||
} catch (error) {
|
||||
logEventError('confirmed', error)
|
||||
}
|
||||
|
@ -225,7 +229,9 @@ async function subscribeToWithdrawals (args) {
|
|||
sub.on('failed', async (payment) => {
|
||||
logEvent('failed', payment)
|
||||
try {
|
||||
await checkWithdrawal({ data: { hash: payment.id }, ...args })
|
||||
// see https://github.com/alexbosworth/lightning/blob/ddf1f214ebddf62e9e19fd32a57fbeeba713340d/lnd_methods/offchain/subscribe_to_payments.js
|
||||
const withdrawal = { failed: payment, is_failed: true }
|
||||
await checkWithdrawal({ data: { hash: payment.id, withdrawal }, ...args })
|
||||
} catch (error) {
|
||||
logEventError('failed', error)
|
||||
}
|
||||
|
@ -238,7 +244,9 @@ async function subscribeToWithdrawals (args) {
|
|||
await checkPendingWithdrawals(args)
|
||||
}
|
||||
|
||||
export async function checkWithdrawal ({ data: { hash }, boss, models, lnd }) {
|
||||
// if we already have the payment from a subscription event or previous call,
|
||||
// we can skip a getPayment call
|
||||
export async function checkWithdrawal ({ data: { hash, withdrawal, invoice }, boss, models, lnd }) {
|
||||
// get the withdrawl if pending or it's an invoiceForward
|
||||
const dbWdrwl = await models.withdrawl.findFirst({
|
||||
where: {
|
||||
|
@ -265,7 +273,7 @@ export async function checkWithdrawal ({ data: { hash }, boss, models, lnd }) {
|
|||
let wdrwl
|
||||
let notSent = false
|
||||
try {
|
||||
wdrwl = await getPayment({ id: hash, lnd })
|
||||
wdrwl = withdrawal ?? await getPayment({ id: hash, lnd })
|
||||
} catch (err) {
|
||||
if (err[1] === 'SentPaymentNotFound' &&
|
||||
dbWdrwl.createdAt < datePivot(new Date(), { milliseconds: -LND_PATHFINDING_TIMEOUT_MS * 2 })) {
|
||||
|
@ -278,15 +286,20 @@ export async function checkWithdrawal ({ data: { hash }, boss, models, lnd }) {
|
|||
|
||||
if (wdrwl?.is_confirmed) {
|
||||
if (dbWdrwl.invoiceForward.length > 0) {
|
||||
return await paidActionForwarded({ data: { invoiceId: dbWdrwl.invoiceForward[0].invoice.id }, models, lnd, boss })
|
||||
return await paidActionForwarded({ data: { invoiceId: dbWdrwl.invoiceForward[0].invoice.id, withdrawal: wdrwl, invoice }, models, lnd, boss })
|
||||
}
|
||||
|
||||
const fee = Number(wdrwl.payment.fee_mtokens)
|
||||
const paid = Number(wdrwl.payment.mtokens) - fee
|
||||
const [{ confirm_withdrawl: code }] = await serialize(
|
||||
const [{ confirm_withdrawl: code }] = await serialize([
|
||||
models.$queryRaw`SELECT confirm_withdrawl(${dbWdrwl.id}::INTEGER, ${paid}, ${fee})`,
|
||||
{ models }
|
||||
)
|
||||
models.withdrawl.update({
|
||||
where: { id: dbWdrwl.id },
|
||||
data: {
|
||||
preimage: wdrwl.payment.secret
|
||||
}
|
||||
})
|
||||
], { models })
|
||||
if (code === 0) {
|
||||
notifyWithdrawal(dbWdrwl.userId, wdrwl)
|
||||
if (dbWdrwl.wallet) {
|
||||
|
@ -299,23 +312,10 @@ export async function checkWithdrawal ({ data: { hash }, boss, models, lnd }) {
|
|||
}
|
||||
} else if (wdrwl?.is_failed || notSent) {
|
||||
if (dbWdrwl.invoiceForward.length > 0) {
|
||||
return await paidActionFailedForward({ data: { invoiceId: dbWdrwl.invoiceForward[0].invoice.id }, models, lnd, boss })
|
||||
return await paidActionFailedForward({ data: { invoiceId: dbWdrwl.invoiceForward[0].invoice.id, withdrawal: wdrwl, invoice }, models, lnd, boss })
|
||||
}
|
||||
|
||||
let status = 'UNKNOWN_FAILURE'; let message = 'unknown failure'
|
||||
if (wdrwl?.failed.is_insufficient_balance) {
|
||||
status = 'INSUFFICIENT_BALANCE'
|
||||
message = "you didn't have enough sats"
|
||||
} else if (wdrwl?.failed.is_invalid_payment) {
|
||||
status = 'INVALID_PAYMENT'
|
||||
message = 'invalid payment'
|
||||
} else if (wdrwl?.failed.is_pathfinding_timeout) {
|
||||
status = 'PATHFINDING_TIMEOUT'
|
||||
message = 'no route found'
|
||||
} else if (wdrwl?.failed.is_route_not_found) {
|
||||
status = 'ROUTE_NOT_FOUND'
|
||||
message = 'no route found'
|
||||
}
|
||||
const { status, message } = getPaymentFailureStatus(wdrwl)
|
||||
|
||||
const [{ reverse_withdrawl: code }] = await serialize(
|
||||
models.$queryRaw`
|
||||
|
@ -393,12 +393,11 @@ export async function finalizeHodlInvoice ({ data: { hash }, models, lnd, boss,
|
|||
|
||||
// if this is an actionType we need to cancel conditionally
|
||||
if (dbInv.actionType) {
|
||||
await paidActionCanceling({ data: { invoiceId: dbInv.id }, models, lnd, boss })
|
||||
await checkInvoice({ data: { hash }, models, lnd, ...args })
|
||||
return
|
||||
await paidActionCanceling({ data: { invoiceId: dbInv.id, invoice: inv }, models, lnd, boss })
|
||||
} else {
|
||||
await cancelHodlInvoice({ id: hash, lnd })
|
||||
}
|
||||
|
||||
await cancelHodlInvoice({ id: hash, lnd })
|
||||
// sync LND invoice status with invoice status in database
|
||||
await checkInvoice({ data: { hash }, models, lnd, ...args })
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue