Compare commits

..

1 Commits

Author SHA1 Message Date
ekzyis
a8450be083 wip: refactor WebLN providers in frontend 2024-06-17 15:27:40 +02:00
494 changed files with 12006 additions and 30944 deletions

View File

@ -1,8 +1,3 @@
PRISMA_SLOW_LOGS_MS=
GRAPHQL_SLOW_LOGS_MS=
NODE_ENV=development
COMPOSE_PROFILES='minimal,images,search,payments,wallets,email,capture'
############################################################################
# OPTIONAL SECRETS #
# put these in .env.local, and don't commit them to git #
@ -29,8 +24,8 @@ SLACK_BOT_TOKEN=
SLACK_CHANNEL_ID=
# lnurl ... you'll need a tunnel to localhost:3000 for these
LNAUTH_URL=http://localhost:3000/api/lnauth
LNWITH_URL=http://localhost:3000/api/lnwith
LNAUTH_URL=
LNWITH_URL=
########################################
# SNDEV STUFF WE PRESET #
@ -78,7 +73,6 @@ IMGPROXY_MAX_ANIMATION_FRAME_RESOLUTION=200
IMGPROXY_READ_TIMEOUT=10
IMGPROXY_WRITE_TIMEOUT=10
IMGPROXY_DOWNLOAD_TIMEOUT=9
IMGPROXY_ENABLE_VIDEO_THUMBNAILS=1
# IMGPROXY_DEVELOPMENT_ERRORS_MODE=1
# IMGPROXY_ENABLE_DEBUG_HEADERS=true
@ -95,9 +89,6 @@ OPENSEARCH_MODEL_ID=
# prisma db url
DATABASE_URL="postgresql://sn:password@db:5432/stackernews?schema=public"
DB_APP_CONNECTION_LIMIT=2
DB_WORKER_CONNECTION_LIMIT=2
DB_TRANSACTION_TIMEOUT=5000
# polling intervals
NEXT_PUBLIC_FAST_POLL_INTERVAL=1000
@ -116,6 +107,8 @@ POSTGRES_DB=stackernews
# opensearch container stuff
OPENSEARCH_INITIAL_ADMIN_PASSWORD=mVchg1T5oA9wudUh
plugins.security.disabled=true
discovery.type=single-node
DISABLE_SECURITY_DASHBOARDS_PLUGIN=true
# bitcoind container stuff
@ -126,42 +119,27 @@ RPC_PORT=18443
P2P_PORT=18444
ZMQ_BLOCK_PORT=28334
ZMQ_TX_PORT=28335
ZMQ_HASHBLOCK_PORT=29000
# sn_lnd container stuff
SN_LND_REST_PORT=8080
SN_LND_GRPC_PORT=10009
SN_LND_P2P_PORT=9735
# sn lnd container stuff
LND_REST_PORT=8080
LND_GRPC_PORT=10009
LND_P2P_PORT=9735
# docker exec -u lnd sn_lnd lncli newaddress p2wkh --unused
SN_LND_ADDR=bcrt1q7q06n5st4vqq3lssn0rtkrn2qqypghv9xg2xnl
SN_LND_PUBKEY=02cb2e2d5a6c5b17fa67b1a883e2973c82e328fb9bd08b2b156a9e23820c87a490
# sn_lndk stuff
SN_LNDK_GRPC_PORT=10012
LND_ADDR=bcrt1q7q06n5st4vqq3lssn0rtkrn2qqypghv9xg2xnl
LND_PUBKEY=02cb2e2d5a6c5b17fa67b1a883e2973c82e328fb9bd08b2b156a9e23820c87a490
# lnd container stuff
LND_REST_PORT=8081
LND_GRPC_PORT=10010
# docker exec -u lnd lnd lncli newaddress p2wkh --unused
LND_ADDR=bcrt1qfqau4ug9e6rtrvxrgclg58e0r93wshucumm9vu
LND_PUBKEY=028093ae52e011d45b3e67f2e0f2cb6c3a1d7f88d2920d408f3ac6db3a56dc4b35
# stacker lnd container stuff
STACKER_LND_REST_PORT=8081
STACKER_LND_GRPC_PORT=10010
# docker exec -u lnd stacker_lnd lncli newaddress p2wkh --unused
STACKER_LND_ADDR=bcrt1qfqau4ug9e6rtrvxrgclg58e0r93wshucumm9vu
STACKER_LND_PUBKEY=028093ae52e011d45b3e67f2e0f2cb6c3a1d7f88d2920d408f3ac6db3a56dc4b35
# cln container stuff
CLN_REST_PORT=9092
# docker exec -u clightning cln lightning-cli newaddr bech32
CLN_ADDR=bcrt1q02sqd74l4pxedy24fg0qtjz4y2jq7x4lxlgzrx
CLN_PUBKEY=03ca7acec181dbf5e427c682c4261a46a0dd9ea5f35d97acb094e399f727835b90
# sndev cli eclair getnewaddress
# sndev cli eclair getinfo
ECLAIR_ADDR="bcrt1qdus2yml69wsax3unz8pts9h979lc3s4tw0tpf6"
ECLAIR_PUBKEY="02268c74cc07837041131474881f97d497706b89a29f939555da6d094b65bd5af0"
# router lnd container stuff
ROUTER_LND_REST_PORT=8082
ROUTER_LND_GRPC_PORT=10011
# docker exec -u lnd router_lnd lncli newaddress p2wkh --unused
ROUTER_LND_ADDR=bcrt1qfkmwfpwgn6wt0dd36s79x04swz8vleyafsdpdr
ROUTER_LND_PUBKEY=02750991fbf62e57631888bc469fae69c5e658bd1d245d8ab95ed883517caa33c3
# stacker cln container stuff
STACKER_CLN_REST_PORT=9092
# docker exec -u clightning stacker_cln lightning-cli newaddr bech32
STACKER_CLN_ADDR=bcrt1q02sqd74l4pxedy24fg0qtjz4y2jq7x4lxlgzrx
STACKER_CLN_PUBKEY=03ca7acec181dbf5e427c682c4261a46a0dd9ea5f35d97acb094e399f727835b90
LNCLI_NETWORK=regtest
@ -171,16 +149,8 @@ AWS_SECRET_ACCESS_KEY=wJalrXUtnFEMI/K7MDENG/bPxRfiCYEXAMPLEKEY
PERSISTENCE=1
SKIP_SSL_CERT_DOWNLOAD=1
# tor proxy
TOR_PROXY=http://tor:7050/
grpc_proxy=http://tor:7050/
# tor
TOR_PROXY=http://127.0.0.1:7050/
# lnbits
LNBITS_WEB_PORT=5001
# CPU shares for each category
CPU_SHARES_IMPORTANT=1024
CPU_SHARES_MODERATE=512
CPU_SHARES_LOW=256
NEXT_TELEMETRY_DISABLED=1
LNBITS_WEB_PORT=5001

View File

@ -16,10 +16,4 @@ NEXT_PUBLIC_NORMAL_POLL_INTERVAL=30000
NEXT_PUBLIC_LONG_POLL_INTERVAL=60000
NEXT_PUBLIC_EXTRA_LONG_POLL_INTERVAL=300000
NEXT_PUBLIC_URL=https://stacker.news
TOR_PROXY=http://127.0.0.1:7050/
PRISMA_SLOW_LOGS_MS=50
GRAPHQL_SLOW_LOGS_MS=50
DB_APP_CONNECTION_LIMIT=4
DB_WORKER_CONNECTION_LIMIT=2
DB_TRANSACTION_TIMEOUT=10000
NEXT_TELEMETRY_DISABLED=1
TOR_PROXY=http://127.0.0.1:7050/

35
.github/ISSUE_TEMPLATE/bug_report.md vendored Normal file
View File

@ -0,0 +1,35 @@
---
name: Bug report
about: Report a problem
title: ''
labels: bug
assignees: ''
---
*Note: this template is meant to help you report the bug so that we can fix it faster, ie not all of these sections are required*
**Description**
A clear and concise description of what the bug is.
**Steps to Reproduce**
A clear and concise way we might be able to reproduce the bug.
**Expected behavior**
A clear and concise description of what you expected to happen.
**Screenshots**
If applicable, add screenshots to help explain your problem.
**Logs**
If applicable, add your browsers console logs.
**Environment:**
If you only experience the issue on certain devices or browsers, provide that info.
- Device: [e.g. iPhone6]
- OS: [e.g. iOS]
- Browser [e.g. chrome, safari]
- Version [e.g. 22]
**Additional context**
Add any other context about the problem here.

View File

@ -1,65 +0,0 @@
name: 🐞 Bug report
description: Create a bug report to help us fix it
title: "bug report"
labels: [bug]
body:
- type: checkboxes
attributes:
label: Is there an existing issue for this?
description: Please search to see if an issue already exists for the bug you encountered.
options:
- label: I have searched the existing issues
required: true
- type: textarea
attributes:
label: Describe the bug
description: A clear and concise description of what the bug is. Include images if relevant.
placeholder: I accidentally deleted the internet. Here's my story ...
validations:
required: true
- type: textarea
attributes:
label: Screenshots
description: |
Add screenshots to help explain your problem. You can also add a video here.
Tip: You can attach images or video files by clicking this area to highlight it and then dragging files in.
validations:
required: false
- type: textarea
attributes:
label: Steps To Reproduce
description: Steps to reproduce the bug.
placeholder: |
1. Go to '...'
2. Click on '...'
3. Scroll to '...'
4. See error
validations:
required: true
- type: textarea
attributes:
label: Expected behavior
description: A clear and concise description of what you expected to happen
validations:
required: true
- type: textarea
attributes:
label: Logs
description: If applicable, add your browser's console logs here
- type: textarea
attributes:
label: Device information
placeholder: |
- OS: [e.g. Windows]
- Browser: [e.g. chrome, safari, firefox]
- Browser Version: [e.g. 22]
validations:
required: false
- type: textarea
attributes:
label: Additional context
description: |
Do you have links to discussions about this on SN or other references?
validations:
required: false

View File

@ -1,5 +0,0 @@
blank_issues_enabled: true
contact_links:
- name: Questions
url: https://stacker.news/~meta
about: If you simply have a question, you can ask it in ~meta or the saloon.

View File

@ -0,0 +1,20 @@
---
name: Feature request
about: Suggest a feature
title: ''
labels: feature
assignees: ''
---
**Is your feature request related to a problem? Please describe.**
A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
**Describe the solution you'd like**
A clear and concise description of what you want to happen.
**Describe alternatives you've considered**
A clear and concise description of any alternative solutions or features you've considered.
**Additional context**
Add any other context or screenshots about the feature request here.

View File

@ -1,32 +0,0 @@
name: ✨ Feature request
description: Request a feature you'd like to see in SN!
title: "feature request"
labels: [feature]
body:
- type: markdown
attributes:
value: |
We're always looking for suggestions on how we could improve SN!
- type: textarea
attributes:
label: Describe the problem you're trying to solve
description: |
Is your feature request related to a problem? Add a clear and concise description of what the problem is.
validations:
required: true
- type: textarea
attributes:
label: Describe the solution you'd like
description: A clear and concise description of what you want to happen.
validations:
required: true
- type: textarea
attributes:
label: Describe alternatives you've considered
description: |
A clear and concise description of any alternative solutions or features you have considered.
- type: textarea
attributes:
label: Additional context
description: |
Add any other additional context or screenshots about the feature request here.

View File

@ -1,22 +1,45 @@
## Description
_A clear and concise description of what you changed and why._
<!--
A clear and concise description of what you changed and why.
Don't forget to mention which tickets this closes (if any).
Use following syntax to close them automatically on merge: closes #<number>
-->
## Screenshots
<!--
If your changes are user facing, please add screenshots of the new UI.
You can also create a video to showcase your changes (useful to show UX).
-->
## Additional Context
_Was anything unclear during your work on this PR? Anything we should definitely take a closer look at?_
<!--
You can mention here anything that you think is relevant for this PR. Some examples:
* You encountered something that you didn't understand while working on this PR
* You were not sure about something you did but did not find a better way
* You initially had a different approach but went with a different approach for some reason
-->
## Checklist
**Are your changes backwards compatible? Please answer below:**
<!-- put your answer about backwards compatibility here -->
**On a scale of 1-10 how well and how have you QA'd this change and any features it might affect? Please answer below:**
<!--
If your PR is not ready for review yet, please mark your PR as a draft.
If changes were requested, request a new review when you incorporated the feedback.
-->
**Did you QA this? Could we deploy this straight to production? Please answer below:**
<!-- put your answer about QA here -->
**For frontend changes: Tested on mobile, light and dark mode? Please answer below:**
**For frontend changes: Tested on mobile? Please answer below:**
<!-- put your answer about mobile QA here -->
**Did you introduce any new environment variables? If so, call them out explicitly here:**
<!-- put your answer about env vars here -->

View File

@ -1,8 +1,8 @@
name: Lint Check
name: Eslint Check
on: [pull_request]
jobs:
lint-run:
eslint-run:
runs-on: ubuntu-latest
steps:
- name: Checkout
@ -11,7 +11,7 @@ jobs:
- name: Setup Node.js
uses: actions/setup-node@v3
with:
node-version: "18.20.4"
node-version: "18.17.0"
- name: Install
run: npm install

View File

@ -11,7 +11,7 @@ jobs:
- name: Setup Node.js
uses: actions/setup-node@v3
with:
node-version: "18.20.4"
node-version: "18.17.0"
- name: Install
run: npm install

7
.gitignore vendored
View File

@ -20,6 +20,7 @@ node_modules/
.DS_Store
*.pem
/*.sql
lnbits/
# debug
npm-debug.log*
@ -56,9 +57,3 @@ docker-compose.*.yml
# nostr wallet connect
scripts/nwc-keys.json
# lnbits
docker/lnbits/data
# lndk
!docker/lndk/tls-*.pem

View File

@ -1,6 +1,6 @@
# syntax=docker/dockerfile:1
FROM node:18.20.4-bullseye
FROM node:18.17.0-bullseye
ENV NODE_ENV=development

View File

@ -30,8 +30,7 @@ Go to [localhost:3000](http://localhost:3000).
- Clone the repo
- ssh: `git clone git@github.com:stackernews/stacker.news.git`
- https: `git clone https://github.com/stackernews/stacker.news.git`
- Install [docker](https://docs.docker.com/compose/install/)
- Please make sure that at least 10 GB of free space is available, otherwise you may encounter issues while setting up the development environment.
- Install [docker](https://docs.docker.com/get-docker/)
<br>
@ -64,55 +63,64 @@ USAGE
$ sndev help [COMMAND]
COMMANDS
help show help
help show help
env:
start start env
stop stop env
restart restart env
status status of env
logs logs from env
delete delete env
start start env
stop stop env
restart restart env
status status of env
logs logs from env
delete delete env
sn:
login login as a nym
set_balance set the balance of a nym
login login as a nym
lightning:
fund pay a bolt11 for funding
withdraw create a bolt11 for withdrawal
lnd:
fund pay a bolt11 for funding
withdraw create a bolt11 for withdrawal
cln:
cln_fund pay a bolt11 for funding with CLN
cln_withdraw create a bolt11 for withdrawal with CLN
db:
psql open psql on db
prisma run prisma commands
psql open psql on db
prisma run prisma commands
dev:
pr fetch and checkout a pr
lint run linters
test run tests
pr fetch and checkout a pr
lint run linters
other:
cli service cli passthrough
open open service GUI in browser
onion service onion address
cert service tls cert
compose docker compose passthrough
compose docker compose passthrough
sn_lndcli lncli passthrough on sn_lnd
stacker_lndcli lncli passthrough on stacker_lnd
stacker_clncli lightning-cli passthrough on stacker_cln
```
### Modifying services
#### Running specific services
By default all services will be run. If you want to exclude specific services from running, set `COMPOSE_PROFILES` in a `.env.local` file to one or more of `minimal,images,search,payments,wallets,email,capture`. To only run mininal necessary without things like payments in `.env.local`:
By default all services will be run. If you want to exclude specific services from running, set `COMPOSE_PROFILES` to use one or more of `minimal|images|search|payments|email|capture`. To only run mininal services without images, search, or payments:
```.env
COMPOSE_PROFILES=minimal
```sh
$ COMPOSE_PROFILES=minimal ./sndev start
```
Or, as I would recommend:
```sh
$ export COMPOSE_PROFILES=minimal
$ ./sndev start
```
To run with images and payments services:
```.env
COMPOSE_PROFILES=images,payments
```sh
$ COMPOSE_PROFILES=images,payments ./sndev start
```
#### Merging compose files
@ -225,7 +233,6 @@ _Due to Rule 3, make sure that you mark your PR as a draft when you create it an
| tag | multiplier |
| ----------------- | ---------- |
| `priority:low` | 0.5 |
| `priority:medium` | 1.5 |
| `priority:high` | 2 |
| `priority:urgent` | 3 |
@ -363,11 +370,9 @@ You can connect to the local database via `./sndev psql`. [psql](https://www.pos
<br>
## Running cli on local lightning nodes
## Running lncli on the local lnd nodes
You can run `lncli` on the local lnd nodes via `./sndev cli lnd` and `./sndev cli sn_lnd`. The node for your local SN instance is `sn_lnd` and the node serving as any external node, like a stacker's node or external wallet, is `lnd`.
You can run `lightning-cli` on the local cln node via `./sndev cli cln` which serves as an external node or wallet.
You can run `lncli` on the local lnd nodes via `./sndev sn_lncli` and `./sndev stacker_lncli`. The node for your local SN instance is `sn_lnd` and the node serving as any external node, like a stacker's node or external wallet, is `stacker_lnd`.
<br>
@ -426,7 +431,7 @@ GITHUB_SECRET=<Client secret>
## Enabling web push notifications
To enable Web Push locally, you will need to set the `VAPID_*` env vars. `VAPID_MAILTO` needs to be an email address using the `mailto:` scheme. For `NEXT_PUBLIC_VAPID_PUBKEY` and `VAPID_PRIVKEY`, you can run `npx web-push generate-vapid-keys`.
To enable Web Push locally, you will need to set the `VAPID_*` env vars. `VAPID_MAILTO` needs to be an email address using the `mailto:` scheme. For `NEXT_PUBLIC_VAPID_KEY` and `VAPID_PRIVKEY`, you can run `npx web-push generate-vapid-keys`.
<br>
@ -454,9 +459,7 @@ In addition, we run other critical services the above services interact with lik
## Wallet transaction safety
To ensure stackers balances are kept sane, some wallet updates are run in [serializable transactions](https://www.postgresql.org/docs/current/transaction-iso.html#XACT-SERIALIZABLE) at the database level. Because early versions of prisma had relatively poor support for transactions most wallet touching code is written in [plpgsql](https://www.postgresql.org/docs/current/plpgsql.html) stored procedures and can be found in the `prisma/migrations` folder.
*UPDATE*: Most wallet updates are now run in [read committed](https://www.postgresql.org/docs/current/transaction-iso.html#XACT-READ-COMMITTED) transactions. See `api/paidAction/README.md` for more information.
To ensure stackers balances are kept sane, all wallet updates are run in [serializable transactions](https://www.postgresql.org/docs/current/transaction-iso.html#XACT-SERIALIZABLE) at the database level. Because early versions of prisma had relatively poor support for transactions most wallet touching code is written in [plpgsql](https://www.postgresql.org/docs/current/plpgsql.html) stored procedures and can be found in the `prisma/migrations` folder.
<br>

View File

@ -1,20 +1,13 @@
import { cachedFetcher } from '@/lib/fetch'
import { toPositiveNumber } from '@/lib/format'
import { authenticatedLndGrpc } from '@/lib/lnd'
import { getIdentity, getHeight, getWalletInfo, getNode, getPayment } from 'ln-service'
import { datePivot } from '@/lib/time'
import { LND_PATHFINDING_TIMEOUT_MS } from '@/lib/constants'
import lndService from 'ln-service'
const lnd = global.lnd || authenticatedLndGrpc({
const { lnd } = lndService.authenticatedLndGrpc({
cert: process.env.LND_CERT,
macaroon: process.env.LND_MACAROON,
socket: process.env.LND_SOCKET
}).lnd
if (process.env.NODE_ENV === 'development') global.lnd = lnd
})
// Check LND GRPC connection
getWalletInfo({ lnd }, (err, result) => {
lndService.getWalletInfo({ lnd }, (err, result) => {
if (err) {
console.error('LND GRPC connection error')
return
@ -22,158 +15,4 @@ getWalletInfo({ lnd }, (err, result) => {
console.log('LND GRPC connection successful')
})
export async function estimateRouteFee ({ lnd, destination, tokens, mtokens, request, timeout }) {
return await new Promise((resolve, reject) => {
const params = {}
if (request) {
params.payment_request = request
} else {
params.dest = Buffer.from(destination, 'hex')
params.amt_sat = tokens ? toPositiveNumber(tokens) : toPositiveNumber(BigInt(mtokens) / BigInt(1e3))
}
lnd.router.estimateRouteFee({
...params,
timeout
}, (err, res) => {
if (err) {
if (res?.failure_reason) {
reject(new Error(`Unable to estimate route: ${res.failure_reason}`))
} else {
reject(err)
}
return
}
if (res.routing_fee_msat < 0 || res.time_lock_delay <= 0) {
reject(new Error('Unable to estimate route, excessive values: ' + JSON.stringify(res)))
return
}
resolve({
routingFeeMsat: toPositiveNumber(res.routing_fee_msat),
timeLockDelay: toPositiveNumber(res.time_lock_delay)
})
})
})
}
// created_height is the accepted_height, timeout is the expiry height
// ln-service remaps the `htlcs` field of lookupInvoice to `payments` and
// see: https://github.com/alexbosworth/lightning/blob/master/lnd_responses/htlc_as_payment.js
// and: https://lightning.engineering/api-docs/api/lnd/lightning/lookup-invoice/index.html#lnrpcinvoicehtlc
export function hodlInvoiceCltvDetails (inv) {
if (!inv.payments) {
throw new Error('No payments found')
}
if (!inv.is_held) {
throw new Error('Invoice is not held')
}
const acceptHeight = inv.payments.reduce((max, htlc) => {
const createdHeight = toPositiveNumber(htlc.created_height)
return createdHeight > max ? createdHeight : max
}, 0)
const expiryHeight = inv.payments.reduce((min, htlc) => {
const timeout = toPositiveNumber(htlc.timeout)
return timeout < min ? timeout : min
}, Number.MAX_SAFE_INTEGER)
return {
expiryHeight: toPositiveNumber(expiryHeight),
acceptHeight: toPositiveNumber(acceptHeight)
}
}
export function getPaymentFailureStatus (withdrawal) {
if (withdrawal && !withdrawal.is_failed) {
throw new Error('withdrawal is not failed')
}
if (withdrawal?.failed?.is_insufficient_balance) {
return {
status: 'INSUFFICIENT_BALANCE',
message: 'you didn\'t have enough sats'
}
} else if (withdrawal?.failed?.is_invalid_payment) {
return {
status: 'INVALID_PAYMENT',
message: 'invalid payment'
}
} else if (withdrawal?.failed?.is_pathfinding_timeout) {
return {
status: 'PATHFINDING_TIMEOUT',
message: 'no route found'
}
} else if (withdrawal?.failed?.is_route_not_found) {
return {
status: 'ROUTE_NOT_FOUND',
message: 'no route found'
}
}
return {
status: 'UNKNOWN_FAILURE',
message: 'unknown failure'
}
}
export const getBlockHeight = cachedFetcher(async function fetchBlockHeight ({ lnd, ...args }) {
try {
const { current_block_height: height } = await getHeight({ lnd, ...args })
return height
} catch (err) {
throw new Error(`Unable to fetch block height: ${err.message}`)
}
}, {
maxSize: 1,
cacheExpiry: 60 * 1000, // 1 minute
forceRefreshThreshold: 5 * 60 * 1000, // 5 minutes
keyGenerator: () => 'getHeight'
})
export const getOurPubkey = cachedFetcher(async function fetchOurPubkey ({ lnd, ...args }) {
try {
const identity = await getIdentity({ lnd, ...args })
return identity.public_key
} catch (err) {
throw new Error(`Unable to fetch identity: ${err.message}`)
}
}, {
maxSize: 1,
cacheExpiry: 0, // never expire
forceRefreshThreshold: 0, // never force refresh
keyGenerator: () => 'getOurPubkey'
})
export const getNodeSockets = cachedFetcher(async function fetchNodeSockets ({ lnd, ...args }) {
try {
return (await getNode({ lnd, is_omitting_channels: true, ...args }))?.sockets
} catch (err) {
throw new Error(`Unable to fetch node info: ${err.message}`)
}
}, {
maxSize: 100,
cacheExpiry: 1000 * 60 * 60 * 24, // 1 day
forceRefreshThreshold: 1000 * 60 * 60 * 24 * 7, // 1 week
keyGenerator: (args) => {
const { public_key: publicKey } = args
return publicKey
}
})
export async function getPaymentOrNotSent ({ id, lnd, createdAt }) {
try {
return await getPayment({ id, lnd })
} catch (err) {
if (err[1] === 'SentPaymentNotFound' &&
createdAt < datePivot(new Date(), { milliseconds: -LND_PATHFINDING_TIMEOUT_MS * 2 })) {
// if the payment is older than 2x timeout, but not found in LND, we can assume it errored before lnd stored it
return { notSent: true, is_failed: true }
} else {
throw err
}
}
}
export default lnd

View File

@ -1,12 +1,18 @@
import createPrisma from '@/lib/create-prisma'
import { PrismaClient } from '@prisma/client'
const prisma = global.prisma || (() => {
console.log('initing prisma')
return createPrisma({
connectionParams: {
connection_limit: process.env.DB_APP_CONNECTION_LIMIT
const prisma = new PrismaClient({
log: [{ level: 'query', emit: 'event' }, 'warn', 'error']
})
prisma.$on('query', (e) => {
if (e.duration > 50) {
console.log('Query: ' + e.query)
console.log('Params: ' + e.params)
console.log('Duration: ' + e.duration + 'ms')
}
})
return prisma
})()
if (process.env.NODE_ENV === 'development') global.prisma = prisma

3
api/package.json Normal file
View File

@ -0,0 +1,3 @@
{
"type": "module"
}

View File

@ -1,371 +0,0 @@
# Paid Actions
Paid actions are actions that require payments to perform. Given that we support several payment flows, some of which require more than one round of communication either with LND or the client, and several paid actions, we have this plugin-like interface to easily add new paid actions.
<details>
<summary>internals</summary>
All paid action progress, regardless of flow, is managed using a state machine that's transitioned by the invoice progress and payment progress (in the case of p2p paid action). Below is the full state machine for paid actions:
```mermaid
stateDiagram-v2
[*] --> PENDING
PENDING --> PAID
PENDING --> CANCELING
PENDING --> FAILED
PAID --> [*]
CANCELING --> FAILED
FAILED --> RETRYING
FAILED --> [*]
RETRYING --> [*]
[*] --> PENDING_HELD
PENDING_HELD --> HELD
PENDING_HELD --> FORWARDING
PENDING_HELD --> CANCELING
PENDING_HELD --> FAILED
HELD --> PAID
HELD --> CANCELING
HELD --> FAILED
FORWARDING --> FORWARDED
FORWARDING --> FAILED_FORWARD
FORWARDED --> PAID
FAILED_FORWARD --> CANCELING
FAILED_FORWARD --> FAILED
```
</details>
## Payment Flows
There are three payment flows:
### Fee credits
The stacker has enough fee credits to pay for the action. This is the simplest flow and is similar to a normal request.
### Optimistic
The optimistic flow is useful for actions that require immediate feedback to the client, but don't require the action to be immediately visible to everyone else.
For paid actions that support it, if the stacker doesn't have enough fee credits, we store the action in a `PENDING` state on the server, which is visible only to the stacker, then return a payment request to the client. The client then pays the invoice however and whenever they wish, and the server monitors payment progress. If the payment succeeds, the action is executed fully becoming visible to everyone and is marked as `PAID`. Otherwise, the action is marked as `FAILED`, the client is notified the payment failed and the payment can be retried.
<details>
<summary>Internals</summary>
Internally, optimistic flows make use of a state machine that's transitioned by the invoice payment progress.
```mermaid
stateDiagram-v2
[*] --> PENDING
PENDING --> PAID
PENDING --> CANCELING
PENDING --> FAILED
PAID --> [*]
CANCELING --> FAILED
FAILED --> RETRYING
FAILED --> [*]
RETRYING --> [*]
```
</details>
### Pessimistic
For paid actions that don't support optimistic actions (or when the stacker is `@anon`), if the client doesn't have enough fee credits, we return a payment request to the client without performing the action and only storing the action's arguments. After the client pays the invoice, the server performs the action with original arguments. Pessimistic actions require the payment to complete before being visible to them and everyone else.
Internally, pessimistic flows use hold invoices. If the action doesn't succeed, the payment is cancelled and it's as if the payment never happened (ie it's a lightning native refund mechanism).
<details>
<summary>Internals</summary>
Internally, pessimistic flows make use of a state machine that's transitioned by the invoice payment progress much like optimistic flows, but with extra steps.
```mermaid
stateDiagram-v2
PAID --> [*]
CANCELING --> FAILED
FAILED --> [*]
[*] --> PENDING_HELD
PENDING_HELD --> HELD
PENDING_HELD --> CANCELING
PENDING_HELD --> FAILED
HELD --> PAID
HELD --> CANCELING
HELD --> FAILED
```
</details>
### Table of existing paid actions and their supported flows
| action | fee credits | optimistic | pessimistic | anonable | qr payable | p2p wrapped | side effects | reward sats | p2p direct |
| ----------------- | ----------- | ---------- | ----------- | -------- | ---------- | ----------- | ------------ | ----------- | ---------- |
| zaps | x | x | x | x | x | x | x | | |
| posts | x | x | x | x | x | | x | x | |
| comments | x | x | x | x | x | | x | x | |
| downzaps | x | x | | | x | | x | x | |
| poll votes | x | x | | | x | | | x | |
| territory actions | x | | x | | x | | | x | |
| donations | x | | x | x | x | | | x | |
| update posts | x | | x | | x | | x | x | |
| update comments | x | | x | | x | | x | x | |
| receive | | x | | | x | x | x | | x |
| buy fee credits | | | x | | x | | | x | |
| invite gift | x | | | | | | x | x | |
## Not-custodial zaps (ie p2p wrapped payments)
Zaps, and possibly other future actions, can be performed peer to peer and non-custodially. This means that the payment is made directly from the client to the recipient, without the server taking custody of the funds. Currently, in order to trigger this behavior, the recipient must have a receiving wallet attached and the sender must have insufficient funds in their custodial wallet to perform the requested zap.
This works by requesting an invoice from the recipient's wallet and reusing the payment hash in a hold invoice paid to SN (to collect the sybil fee) which we serve to the sender. When the sender pays this wrapped invoice, we forward our own money to the recipient, who then reveals the preimage to us, allowing us to settle the wrapped invoice and claim the sender's funds. This effectively does what a lightning node does when forwarding a payment but allows us to do it at the application layer.
<details>
<summary>Internals</summary>
Internally, p2p wrapped payments make use of the same paid action state machine but it's transitioned by both the incoming invoice payment progress *and* the outgoing invoice payment progress.
```mermaid
stateDiagram-v2
PAID --> [*]
CANCELING --> FAILED
FAILED --> RETRYING
FAILED --> [*]
RETRYING --> [*]
[*] --> PENDING_HELD
PENDING_HELD --> FORWARDING
PENDING_HELD --> CANCELING
PENDING_HELD --> FAILED
FORWARDING --> FORWARDED
FORWARDING --> FAILED_FORWARD
FORWARDED --> PAID
FAILED_FORWARD --> CANCELING
FAILED_FORWARD --> FAILED
```
</details>
## Paid Action Interface
Each paid action is implemented in its own file in the `paidAction` directory. Each file exports a module with the following properties:
### Boolean flags
- `anonable`: can be performed anonymously
### Payment methods
- `paymentMethods`: an array of payment methods that the action supports ordered from most preferred to least preferred
- P2P: a p2p payment made directly from the client to the recipient
- after wrapping the invoice, anonymous users will follow a PESSIMISTIC flow to pay the invoice and logged in users will follow an OPTIMISTIC flow
- FEE_CREDIT: a payment made from the user's fee credit balance
- OPTIMISTIC: an optimistic payment flow
- PESSIMISTIC: a pessimistic payment flow
### Functions
All functions have the following signature: `function(args: Object, context: Object): Promise`
- `getCost`: returns the cost of the action in msats as a `BigInt`
- `perform`: performs the action
- returns: an object with the result of the action as defined in the `graphql` schema
- if the action supports optimism and an `invoiceId` is provided, the action should be performed optimistically
- any action data that needs to be hidden while it's pending, should store in its rows a `PENDING` state along with its `invoiceId`
- it can optionally store in the invoice with the `invoiceId` the `actionId` to be able to link the action with the invoice regardless of retries
- `onPaid`: called when the action is paid
- if the action does not support optimism, this function is optional
- this function should be used to mark the rows created in `perform` as `PAID` and perform critical side effects of the action (like denormalizations)
- `nonCriticalSideEffects`: called after the action is paid to run any side effects whose failure does not affect the action's execution
- this function is always optional
- it's passed the result of the action (or the action's paid invoice) and the current context
- this is where things like push notifications should be handled
- `onFail`: called when the action fails
- if the action does not support optimism, this function is optional
- this function should be used to mark the rows created in `perform` as `FAILED`
- `retry`: called when the action is retried with any new invoice information
- return: an object with the result of the action as defined in the `graphql` schema (same as `perform`)
- this function is called when an optimistic action is retried
- it's passed the original `invoiceId` and the `newInvoiceId`
- this function should update the rows created in `perform` to contain the new `newInvoiceId` and remark the row as `PENDING`
- `getInvoiceablePeer`: returns the userId of the peer that's capable of generating an invoice so they can be paid for the action
- this is only used for p2p wrapped zaps currently
- `describe`: returns a description as a string of the action
- for actions that require generating an invoice, and for stackers that don't hide invoice descriptions, this is used in the invoice description
- `getSybilFeePercent` (required if `getInvoiceablePeer` is implemented): returns the action sybil fee percent as a `BigInt` (eg. 30n for 30%)
#### Function arguments
`args` contains the arguments for the action as defined in the `graphql` schema. If the action is optimistic or pessimistic, `args` will contain an `invoiceId` field which can be stored alongside the paid action's data. If this is a call to `retry`, `args` will contain the original `invoiceId` and `newInvoiceId` fields.
`context` contains the following fields:
- `me`: the user performing the action (undefined if anonymous)
- `cost`: the cost of the action in msats as a `BigInt`
- `sybilFeePercent`: the sybil fee percent as a `BigInt` (eg. 30n for 30%)
- `tx`: the current transaction (for anything that needs to be done atomically with the payment)
- `models`: the current prisma client (for anything that doesn't need to be done atomically with the payment)
- `lnd`: the current lnd client
## Recording Cowboy Credits
To avoid adding sats and credits together everywhere to show an aggregate sat value, in most cases we denormalize a `sats` field that carries the "sats value", the combined sats + credits of something, and a `credits` field that carries only the earned `credits`. For example, the `Item` table has an `msats` field that carries the sum of the `mcredits` and `msats` earned and a `mcredits` field that carries the value of the `mcredits` earned. So, the sats value an item earned is `item.msats` BUT the real sats earned is `item.msats - item.mcredits`.
The ONLY exception to this are for the `users` table where we store a stacker's rewards sats and credits balances separately.
## `IMPORTANT: transaction isolation`
We use a `read committed` isolation level for actions. This means paid actions need to be mindful of concurrency issues. Specifically, reading data from the database and then writing it back in `read committed` is a common source of consistency bugs (aka serialization anamolies).
### This is a big deal
1. If you read from the database and intend to use that data to write to the database, and it's possible that a concurrent transaction could change the data you've read (it usually is), you need to be prepared to handle that.
2. This applies to **ALL**, and I really mean **ALL**, read data regardless of how you read the data within the `read committed` transaction:
- independent statements
- `WITH` queries (CTEs) in the same statement
- subqueries in the same statement
### How to handle it
1. take row level locks on the rows you read, using something like a `SELECT ... FOR UPDATE` statement
- NOTE: this does not protect against missing concurrent inserts. It only prevents concurrent updates to the rows you've already read.
- read about row level locks available in postgres: https://www.postgresql.org/docs/current/explicit-locking.html#LOCKING-ROWS
2. check that the data you read is still valid before writing it back to the database i.e. optimistic concurrency control
- NOTE: this does not protect against missing concurrent inserts. It only prevents concurrent updates to the rows you've already read.
3. avoid having to read data from one row to modify the data of another row all together
### Example
Let's say you are aggregating total sats for an item from a table `zaps` and updating the total sats for that item in another table `item_zaps`. Two 100 sat zaps are requested for the same item at the same time in two concurrent transactions. The total sats for the item should be 200, but because of the way `read committed` works, the following statements lead to a total sats of 100:
*the statements here are listed in the order they are executed, but each transaction is happening concurrently*
#### Incorrect
```sql
-- transaction 1
BEGIN;
INSERT INTO zaps (item_id, sats) VALUES (1, 100);
SELECT sum(sats) INTO total_sats FROM zaps WHERE item_id = 1;
-- total_sats is 100
-- transaction 2
BEGIN;
INSERT INTO zaps (item_id, sats) VALUES (1, 100);
SELECT sum(sats) INTO total_sats FROM zaps WHERE item_id = 1;
-- total_sats is still 100, because transaction 1 hasn't committed yet
-- transaction 1
UPDATE item_zaps SET sats = total_sats WHERE item_id = 1;
-- sets sats to 100
-- transaction 2
UPDATE item_zaps SET sats = total_sats WHERE item_id = 1;
-- sets sats to 100
COMMIT;
-- transaction 1
COMMIT;
-- item_zaps.sats is 100, but we would expect it to be 200
```
Note that row level locks wouldn't help in this case, because we can't lock the rows that the transactions don't know to exist yet.
#### Subqueries are still incorrect
```sql
-- transaction 1
BEGIN;
INSERT INTO zaps (item_id, sats) VALUES (1, 100);
UPDATE item_zaps SET sats = (SELECT sum(sats) INTO total_sats FROM zaps WHERE item_id = 1) WHERE item_id = 1;
-- item_zaps.sats is 100
-- transaction 2
BEGIN;
INSERT INTO zaps (item_id, sats) VALUES (1, 100);
UPDATE item_zaps SET sats = (SELECT sum(sats) INTO total_sats FROM zaps WHERE item_id = 1) WHERE item_id = 1;
-- item_zaps.sats is still 100, because transaction 1 hasn't committed yet
-- transaction 1
COMMIT;
-- transaction 2
COMMIT;
-- item_zaps.sats is 100, but we would expect it to be 200
```
Note that while the `UPDATE` transaction 2's update statement will block until transaction 1 commits, the subquery is computed before it blocks and is not re-evaluated after the block.
#### Correct
```sql
-- transaction 1
BEGIN;
INSERT INTO zaps (item_id, sats) VALUES (1, 100);
-- transaction 2
BEGIN;
INSERT INTO zaps (item_id, sats) VALUES (1, 100);
-- transaction 1
UPDATE item_zaps SET sats = sats + 100 WHERE item_id = 1;
-- transaction 2
UPDATE item_zaps SET sats = sats + 100 WHERE item_id = 1;
COMMIT;
-- transaction 1
COMMIT;
-- item_zaps.sats is 200
```
The above works because `UPDATE` takes a lock on the rows it's updating, so transaction 2 will block until transaction 1 commits, and once transaction 2 is unblocked, it will re-evaluate the `sats` value of the row it's updating.
#### More resources
- https://stackoverflow.com/questions/61781595/postgres-read-commited-doesnt-re-read-updated-row?noredirect=1#comment109279507_61781595
- https://www.cybertec-postgresql.com/en/transaction-anomalies-with-select-for-update/
From the [postgres docs](https://www.postgresql.org/docs/current/transaction-iso.html#XACT-READ-COMMITTED):
> UPDATE, DELETE, SELECT FOR UPDATE, and SELECT FOR SHARE commands behave the same as SELECT in terms of searching for target rows: they will only find target rows that were committed as of the command start time. However, such a target row might have already been updated (or deleted or locked) by another concurrent transaction by the time it is found. In this case, the would-be updater will wait for the first updating transaction to commit or roll back (if it is still in progress). If the first updater rolls back, then its effects are negated and the second updater can proceed with updating the originally found row. If the first updater commits, the second updater will ignore the row if the first updater deleted it, otherwise it will attempt to apply its operation to the updated version of the row. The search condition of the command (the WHERE clause) is re-evaluated to see if the updated version of the row still matches the search condition. If so, the second updater proceeds with its operation using the updated version of the row. In the case of SELECT FOR UPDATE and SELECT FOR SHARE, this means it is the updated version of the row that is locked and returned to the client.
From the [postgres source docs](https://git.postgresql.org/gitweb/?p=postgresql.git;a=blob;f=src/backend/executor/README#l350):
> It is also possible that there are relations in the query that are not to be locked (they are neither the UPDATE/DELETE/MERGE target nor specified to be locked in SELECT FOR UPDATE/SHARE). When re-running the test query ***we want to use the same rows*** from these relations that were joined to the locked rows.
## `IMPORTANT: deadlocks`
Deadlocks can occur when two transactions are waiting for each other to release locks. This can happen when two transactions lock rows in different orders whether explicit or implicit.
If both transactions lock the rows in the same order, the deadlock is avoided.
### Incorrect
```sql
-- transaction 1
BEGIN;
UPDATE users set msats = msats + 1 WHERE id = 1;
-- transaction 2
BEGIN;
UPDATE users set msats = msats + 1 WHERE id = 2;
-- transaction 1 (blocks here until transaction 2 commits)
UPDATE users set msats = msats + 1 WHERE id = 2;
-- transaction 2 (blocks here until transaction 1 commits)
UPDATE users set msats = msats + 1 WHERE id = 1;
-- deadlock occurs because neither transaction can proceed to here
```
In practice, this most often occurs when selecting multiple rows for update in different orders. Recently, we had a deadlock when spliting zaps to multiple users. The solution was to select the rows for update in the same order.
### Incorrect
```sql
WITH forwardees AS (
SELECT "userId", (($1::BIGINT * pct) / 100)::BIGINT AS msats
FROM "ItemForward"
WHERE "itemId" = $2::INTEGER
),
UPDATE users
SET
msats = users.msats + forwardees.msats,
"stackedMsats" = users."stackedMsats" + forwardees.msats
FROM forwardees
WHERE users.id = forwardees."userId";
```
If forwardees are selected in a different order in two concurrent transactions, e.g. (1,2) in tx 1 and (2,1) in tx 2, a deadlock can occur. To avoid this, always select rows for update in the same order.
### Correct
We fixed the deadlock by selecting the forwardees in the same order in these transactions.
```sql
WITH forwardees AS (
SELECT "userId", (($1::BIGINT * pct) / 100)::BIGINT AS msats
FROM "ItemForward"
WHERE "itemId" = $2::INTEGER
ORDER BY "userId" ASC
),
UPDATE users
SET
msats = users.msats + forwardees.msats,
"stackedMsats" = users."stackedMsats" + forwardees.msats
FROM forwardees
WHERE users.id = forwardees."userId";
```
### More resources
- https://www.postgresql.org/docs/current/explicit-locking.html#LOCKING-DEADLOCKS

View File

@ -1,82 +0,0 @@
import { PAID_ACTION_PAYMENT_METHODS } from '@/lib/constants'
import { msatsToSats, satsToMsats } from '@/lib/format'
export const anonable = false
export const paymentMethods = [
PAID_ACTION_PAYMENT_METHODS.FEE_CREDIT,
PAID_ACTION_PAYMENT_METHODS.REWARD_SATS,
PAID_ACTION_PAYMENT_METHODS.OPTIMISTIC
]
export async function getCost ({ sats }) {
return satsToMsats(sats)
}
export async function perform ({ invoiceId, sats, id: itemId, ...args }, { me, cost, tx }) {
itemId = parseInt(itemId)
let invoiceData = {}
if (invoiceId) {
invoiceData = { invoiceId, invoiceActionState: 'PENDING' }
// store a reference to the item in the invoice
await tx.invoice.update({
where: { id: invoiceId },
data: { actionId: itemId }
})
}
const act = await tx.itemAct.create({ data: { msats: cost, itemId, userId: me.id, act: 'BOOST', ...invoiceData } })
const [{ path }] = await tx.$queryRaw`
SELECT ltree2text(path) as path FROM "Item" WHERE id = ${itemId}::INTEGER`
return { id: itemId, sats, act: 'BOOST', path, actId: act.id }
}
export async function retry ({ invoiceId, newInvoiceId }, { tx, cost }) {
await tx.itemAct.updateMany({ where: { invoiceId }, data: { invoiceId: newInvoiceId, invoiceActionState: 'PENDING' } })
const [{ id, path }] = await tx.$queryRaw`
SELECT "Item".id, ltree2text(path) as path
FROM "Item"
JOIN "ItemAct" ON "Item".id = "ItemAct"."itemId"
WHERE "ItemAct"."invoiceId" = ${newInvoiceId}::INTEGER`
return { id, sats: msatsToSats(cost), act: 'BOOST', path }
}
export async function onPaid ({ invoice, actId }, { tx }) {
let itemAct
if (invoice) {
await tx.itemAct.updateMany({
where: { invoiceId: invoice.id },
data: {
invoiceActionState: 'PAID'
}
})
itemAct = await tx.itemAct.findFirst({ where: { invoiceId: invoice.id } })
} else if (actId) {
itemAct = await tx.itemAct.findFirst({ where: { id: actId } })
} else {
throw new Error('No invoice or actId')
}
// increment boost on item
await tx.item.update({
where: { id: itemAct.itemId },
data: {
boost: { increment: msatsToSats(itemAct.msats) }
}
})
await tx.$executeRaw`
INSERT INTO pgboss.job (name, data, retrylimit, retrybackoff, startafter, keepuntil)
VALUES ('expireBoost', jsonb_build_object('id', ${itemAct.itemId}::INTEGER), 21, true,
now() + interval '30 days', now() + interval '40 days')`
}
export async function onFail ({ invoice }, { tx }) {
await tx.itemAct.updateMany({ where: { invoiceId: invoice.id }, data: { invoiceActionState: 'FAILED' } })
}
export async function describe ({ id: itemId, sats }, { actionId, cost }) {
return `SN: boost ${sats ?? msatsToSats(cost)} sats to #${itemId ?? actionId}`
}

View File

@ -1,32 +0,0 @@
import { PAID_ACTION_PAYMENT_METHODS } from '@/lib/constants'
import { satsToMsats } from '@/lib/format'
export const anonable = false
export const paymentMethods = [
PAID_ACTION_PAYMENT_METHODS.REWARD_SATS,
PAID_ACTION_PAYMENT_METHODS.PESSIMISTIC
]
export async function getCost ({ credits }) {
return satsToMsats(credits)
}
export async function perform ({ credits }, { me, cost, tx }) {
await tx.user.update({
where: { id: me.id },
data: {
mcredits: {
increment: cost
}
}
})
return {
credits
}
}
export async function describe () {
return 'SN: buy fee credits'
}

View File

@ -1,29 +0,0 @@
import { PAID_ACTION_PAYMENT_METHODS, USER_ID } from '@/lib/constants'
import { satsToMsats } from '@/lib/format'
export const anonable = true
export const paymentMethods = [
PAID_ACTION_PAYMENT_METHODS.FEE_CREDIT,
PAID_ACTION_PAYMENT_METHODS.REWARD_SATS,
PAID_ACTION_PAYMENT_METHODS.PESSIMISTIC
]
export async function getCost ({ sats }) {
return satsToMsats(sats)
}
export async function perform ({ sats }, { me, tx }) {
await tx.donation.create({
data: {
sats,
userId: me?.id ?? USER_ID.anon
}
})
return { sats }
}
export async function describe (args, context) {
return 'SN: donate to rewards pool'
}

View File

@ -1,84 +0,0 @@
import { PAID_ACTION_PAYMENT_METHODS } from '@/lib/constants'
import { msatsToSats, satsToMsats } from '@/lib/format'
export const anonable = false
export const paymentMethods = [
PAID_ACTION_PAYMENT_METHODS.FEE_CREDIT,
PAID_ACTION_PAYMENT_METHODS.REWARD_SATS,
PAID_ACTION_PAYMENT_METHODS.OPTIMISTIC
]
export async function getCost ({ sats }) {
return satsToMsats(sats)
}
export async function perform ({ invoiceId, sats, id: itemId }, { me, cost, tx }) {
itemId = parseInt(itemId)
let invoiceData = {}
if (invoiceId) {
invoiceData = { invoiceId, invoiceActionState: 'PENDING' }
// store a reference to the item in the invoice
await tx.invoice.update({
where: { id: invoiceId },
data: { actionId: itemId }
})
}
const itemAct = await tx.itemAct.create({
data: { msats: cost, itemId, userId: me.id, act: 'DONT_LIKE_THIS', ...invoiceData }
})
const [{ path }] = await tx.$queryRaw`SELECT ltree2text(path) as path FROM "Item" WHERE id = ${itemId}::INTEGER`
return { id: itemId, sats, act: 'DONT_LIKE_THIS', path, actId: itemAct.id }
}
export async function retry ({ invoiceId, newInvoiceId }, { tx, cost }) {
await tx.itemAct.updateMany({ where: { invoiceId }, data: { invoiceId: newInvoiceId, invoiceActionState: 'PENDING' } })
const [{ id, path }] = await tx.$queryRaw`
SELECT "Item".id, ltree2text(path) as path
FROM "Item"
JOIN "ItemAct" ON "Item".id = "ItemAct"."itemId"
WHERE "ItemAct"."invoiceId" = ${newInvoiceId}::INTEGER`
return { id, sats: msatsToSats(cost), act: 'DONT_LIKE_THIS', path }
}
export async function onPaid ({ invoice, actId }, { tx }) {
let itemAct
if (invoice) {
await tx.itemAct.updateMany({ where: { invoiceId: invoice.id }, data: { invoiceActionState: 'PAID' } })
itemAct = await tx.itemAct.findFirst({ where: { invoiceId: invoice.id } })
} else if (actId) {
itemAct = await tx.itemAct.findUnique({ where: { id: actId } })
} else {
throw new Error('No invoice or actId')
}
const msats = BigInt(itemAct.msats)
const sats = msatsToSats(msats)
// denormalize downzaps
await tx.$executeRaw`
WITH zapper AS (
SELECT trust FROM users WHERE id = ${itemAct.userId}::INTEGER
), zap AS (
INSERT INTO "ItemUserAgg" ("userId", "itemId", "downZapSats")
VALUES (${itemAct.userId}::INTEGER, ${itemAct.itemId}::INTEGER, ${sats}::INTEGER)
ON CONFLICT ("itemId", "userId") DO UPDATE
SET "downZapSats" = "ItemUserAgg"."downZapSats" + ${sats}::INTEGER, updated_at = now()
RETURNING LOG("downZapSats" / GREATEST("downZapSats" - ${sats}::INTEGER, 1)::FLOAT) AS log_sats
)
UPDATE "Item"
SET "weightedDownVotes" = "weightedDownVotes" + (zapper.trust * zap.log_sats)
FROM zap, zapper
WHERE "Item".id = ${itemAct.itemId}::INTEGER`
}
export async function onFail ({ invoice }, { tx }) {
await tx.itemAct.updateMany({ where: { invoiceId: invoice.id }, data: { invoiceActionState: 'FAILED' } })
}
export async function describe ({ id: itemId, sats }, { cost, actionId }) {
return `SN: downzap of ${sats ?? msatsToSats(cost)} sats to #${itemId ?? actionId}`
}

View File

@ -1,486 +0,0 @@
import { createHodlInvoice, createInvoice, parsePaymentRequest } from 'ln-service'
import { datePivot } from '@/lib/time'
import { PAID_ACTION_PAYMENT_METHODS, USER_ID } from '@/lib/constants'
import { createHmac } from '@/api/resolvers/wallet'
import { Prisma } from '@prisma/client'
import { createWrappedInvoice, createInvoice as createUserInvoice } from '@/wallets/server'
import { assertBelowMaxPendingInvoices, assertBelowMaxPendingDirectPayments } from './lib/assert'
import * as ITEM_CREATE from './itemCreate'
import * as ITEM_UPDATE from './itemUpdate'
import * as ZAP from './zap'
import * as DOWN_ZAP from './downZap'
import * as POLL_VOTE from './pollVote'
import * as TERRITORY_CREATE from './territoryCreate'
import * as TERRITORY_UPDATE from './territoryUpdate'
import * as TERRITORY_BILLING from './territoryBilling'
import * as TERRITORY_UNARCHIVE from './territoryUnarchive'
import * as DONATE from './donate'
import * as BOOST from './boost'
import * as RECEIVE from './receive'
import * as BUY_CREDITS from './buyCredits'
import * as INVITE_GIFT from './inviteGift'
export const paidActions = {
ITEM_CREATE,
ITEM_UPDATE,
ZAP,
DOWN_ZAP,
BOOST,
POLL_VOTE,
TERRITORY_CREATE,
TERRITORY_UPDATE,
TERRITORY_BILLING,
TERRITORY_UNARCHIVE,
DONATE,
RECEIVE,
BUY_CREDITS,
INVITE_GIFT
}
export default async function performPaidAction (actionType, args, incomingContext) {
try {
const { me, models, forcePaymentMethod } = incomingContext
const paidAction = paidActions[actionType]
console.group('performPaidAction', actionType, args)
if (!paidAction) {
throw new Error(`Invalid action type ${actionType}`)
}
if (!me && !paidAction.anonable) {
throw new Error('You must be logged in to perform this action')
}
// treat context as immutable
const contextWithMe = {
...incomingContext,
me: me ? await models.user.findUnique({ where: { id: parseInt(me.id) } }) : undefined
}
const context = {
...contextWithMe,
cost: await paidAction.getCost(args, contextWithMe),
sybilFeePercent: await paidAction.getSybilFeePercent?.(args, contextWithMe)
}
// special case for zero cost actions
if (context.cost === 0n) {
console.log('performing zero cost action')
return await performNoInvoiceAction(actionType, args, { ...context, paymentMethod: 'ZERO_COST' })
}
for (const paymentMethod of paidAction.paymentMethods) {
console.log(`considering payment method ${paymentMethod}`)
const contextWithPaymentMethod = { ...context, paymentMethod }
if (forcePaymentMethod &&
paymentMethod !== forcePaymentMethod) {
console.log('skipping payment method', paymentMethod, 'because forcePaymentMethod is set to', forcePaymentMethod)
continue
}
// payment methods that anonymous users can use
if (paymentMethod === PAID_ACTION_PAYMENT_METHODS.P2P) {
try {
return await performP2PAction(actionType, args, contextWithPaymentMethod)
} catch (e) {
if (e instanceof NonInvoiceablePeerError) {
console.log('peer cannot be invoiced, skipping')
continue
}
console.error(`${paymentMethod} action failed`, e)
throw e
}
} else if (paymentMethod === PAID_ACTION_PAYMENT_METHODS.PESSIMISTIC) {
return await beginPessimisticAction(actionType, args, contextWithPaymentMethod)
}
// additional payment methods that logged in users can use
if (me) {
if (paymentMethod === PAID_ACTION_PAYMENT_METHODS.FEE_CREDIT ||
paymentMethod === PAID_ACTION_PAYMENT_METHODS.REWARD_SATS) {
try {
return await performNoInvoiceAction(actionType, args, contextWithPaymentMethod)
} catch (e) {
// if we fail with fee credits or reward sats, but not because of insufficient funds, bail
console.error(`${paymentMethod} action failed`, e)
if (!e.message.includes('\\"users\\" violates check constraint \\"msats_positive\\"') &&
!e.message.includes('\\"users\\" violates check constraint \\"mcredits_positive\\"')) {
throw e
}
}
} else if (paymentMethod === PAID_ACTION_PAYMENT_METHODS.OPTIMISTIC) {
return await performOptimisticAction(actionType, args, contextWithPaymentMethod)
} else if (paymentMethod === PAID_ACTION_PAYMENT_METHODS.DIRECT) {
try {
return await performDirectAction(actionType, args, contextWithPaymentMethod)
} catch (e) {
if (e instanceof NonInvoiceablePeerError) {
console.log('peer cannot be invoiced, skipping')
continue
}
console.error(`${paymentMethod} action failed`, e)
throw e
}
}
}
}
throw new Error('No working payment method found')
} catch (e) {
console.error('performPaidAction failed', e)
throw e
} finally {
console.groupEnd()
}
}
async function performNoInvoiceAction (actionType, args, incomingContext) {
const { me, models, cost, paymentMethod } = incomingContext
const action = paidActions[actionType]
const result = await models.$transaction(async tx => {
const context = { ...incomingContext, tx }
if (paymentMethod === 'FEE_CREDIT') {
await tx.user.update({
where: {
id: me?.id ?? USER_ID.anon
},
data: { mcredits: { decrement: cost } }
})
} else if (paymentMethod === PAID_ACTION_PAYMENT_METHODS.REWARD_SATS) {
await tx.user.update({
where: {
id: me?.id ?? USER_ID.anon
},
data: { msats: { decrement: cost } }
})
}
const result = await action.perform(args, context)
await action.onPaid?.(result, context)
return {
result,
paymentMethod
}
}, { isolationLevel: Prisma.TransactionIsolationLevel.ReadCommitted })
// run non critical side effects in the background
// after the transaction has been committed
action.nonCriticalSideEffects?.(result.result, incomingContext).catch(console.error)
return result
}
async function performOptimisticAction (actionType, args, incomingContext) {
const { models, invoiceArgs: incomingInvoiceArgs } = incomingContext
const action = paidActions[actionType]
const optimisticContext = { ...incomingContext, optimistic: true }
const invoiceArgs = incomingInvoiceArgs ?? await createSNInvoice(actionType, args, optimisticContext)
return await models.$transaction(async tx => {
const context = { ...optimisticContext, tx, invoiceArgs }
const invoice = await createDbInvoice(actionType, args, context)
return {
invoice,
result: await action.perform?.({ invoiceId: invoice.id, ...args }, context),
paymentMethod: 'OPTIMISTIC'
}
}, { isolationLevel: Prisma.TransactionIsolationLevel.ReadCommitted })
}
async function beginPessimisticAction (actionType, args, context) {
const action = paidActions[actionType]
if (!action.paymentMethods.includes(PAID_ACTION_PAYMENT_METHODS.PESSIMISTIC)) {
throw new Error(`This action ${actionType} does not support pessimistic invoicing`)
}
// just create the invoice and complete action when it's paid
const invoiceArgs = context.invoiceArgs ?? await createSNInvoice(actionType, args, context)
return {
invoice: await createDbInvoice(actionType, args, { ...context, invoiceArgs }),
paymentMethod: 'PESSIMISTIC'
}
}
async function performP2PAction (actionType, args, incomingContext) {
// if the action has an invoiceable peer, we'll create a peer invoice
// wrap it, and return the wrapped invoice
const { cost, sybilFeePercent, models, lnd, me } = incomingContext
if (!sybilFeePercent) {
throw new Error('sybil fee percent is not set for an invoiceable peer action')
}
const userId = await paidActions[actionType]?.getInvoiceablePeer?.(args, incomingContext)
if (!userId) {
throw new NonInvoiceablePeerError()
}
let context
try {
await assertBelowMaxPendingInvoices(incomingContext)
const description = await paidActions[actionType].describe(args, incomingContext)
const { invoice, wrappedInvoice, wallet, maxFee } = await createWrappedInvoice(userId, {
msats: cost,
feePercent: sybilFeePercent,
description,
expiry: INVOICE_EXPIRE_SECS
}, { models, me, lnd })
context = {
...incomingContext,
invoiceArgs: {
bolt11: invoice,
wrappedBolt11: wrappedInvoice,
wallet,
maxFee
}
}
} catch (e) {
console.error('failed to create wrapped invoice', e)
throw new NonInvoiceablePeerError()
}
return me
? await performOptimisticAction(actionType, args, context)
: await beginPessimisticAction(actionType, args, context)
}
// we don't need to use the module for perform-ing outside actions
// because we can't track the state of outside invoices we aren't paid/paying
async function performDirectAction (actionType, args, incomingContext) {
const { models, lnd, cost } = incomingContext
const { comment, lud18Data, noteStr, description: actionDescription } = args
const userId = await paidActions[actionType]?.getInvoiceablePeer?.(args, incomingContext)
if (!userId) {
throw new NonInvoiceablePeerError()
}
let invoiceObject
try {
await assertBelowMaxPendingDirectPayments(userId, incomingContext)
const description = actionDescription ?? await paidActions[actionType].describe(args, incomingContext)
invoiceObject = await createUserInvoice(userId, {
msats: cost,
description,
expiry: INVOICE_EXPIRE_SECS
}, { models, lnd })
} catch (e) {
console.error('failed to create outside invoice', e)
throw new NonInvoiceablePeerError()
}
const { invoice, wallet } = invoiceObject
const hash = parsePaymentRequest({ request: invoice }).id
const payment = await models.directPayment.create({
data: {
comment,
lud18Data,
desc: noteStr,
bolt11: invoice,
msats: cost,
hash,
walletId: wallet.id,
receiverId: userId
}
})
return {
invoice: payment,
paymentMethod: 'DIRECT'
}
}
export async function retryPaidAction (actionType, args, incomingContext) {
const { models, me } = incomingContext
const { invoice: failedInvoice } = args
console.log('retryPaidAction', actionType, args)
const action = paidActions[actionType]
if (!action) {
throw new Error(`retryPaidAction - invalid action type ${actionType}`)
}
if (!me) {
throw new Error(`retryPaidAction - must be logged in ${actionType}`)
}
if (!failedInvoice) {
throw new Error(`retryPaidAction - missing invoice ${actionType}`)
}
const { msatsRequested, actionId, actionArgs, actionOptimistic } = failedInvoice
const retryContext = {
...incomingContext,
optimistic: actionOptimistic,
me: await models.user.findUnique({ where: { id: parseInt(me.id) } }),
cost: BigInt(msatsRequested),
actionId,
predecessorId: failedInvoice.id
}
let invoiceArgs
const invoiceForward = await models.invoiceForward.findUnique({
where: {
invoiceId: failedInvoice.id
},
include: {
wallet: true
}
})
if (invoiceForward) {
// this is a wrapped invoice, we need to retry it with receiver fallbacks
try {
const { userId } = invoiceForward.wallet
// this will return an invoice from the first receiver wallet that didn't fail yet and throw if none is available
const { invoice: bolt11, wrappedInvoice: wrappedBolt11, wallet, maxFee } = await createWrappedInvoice(userId, {
msats: failedInvoice.msatsRequested,
feePercent: await action.getSybilFeePercent?.(actionArgs, retryContext),
description: await action.describe?.(actionArgs, retryContext),
expiry: INVOICE_EXPIRE_SECS
}, retryContext)
invoiceArgs = { bolt11, wrappedBolt11, wallet, maxFee }
} catch (err) {
console.log('failed to retry wrapped invoice, falling back to SN:', err)
}
}
invoiceArgs ??= await createSNInvoice(actionType, actionArgs, retryContext)
return await models.$transaction(async tx => {
const context = { ...retryContext, tx, invoiceArgs }
// update the old invoice to RETRYING, so that it's not confused with FAILED
await tx.invoice.update({
where: {
id: failedInvoice.id,
actionState: 'FAILED'
},
data: {
actionState: 'RETRYING'
}
})
// create a new invoice
const invoice = await createDbInvoice(actionType, actionArgs, context)
return {
result: await action.retry?.({ invoiceId: failedInvoice.id, newInvoiceId: invoice.id }, context),
invoice,
paymentMethod: actionOptimistic ? 'OPTIMISTIC' : 'PESSIMISTIC'
}
}, { isolationLevel: Prisma.TransactionIsolationLevel.ReadCommitted })
}
const INVOICE_EXPIRE_SECS = 600
export class NonInvoiceablePeerError extends Error {
constructor () {
super('non invoiceable peer')
this.name = 'NonInvoiceablePeerError'
}
}
// we seperate the invoice creation into two functions because
// because if lnd is slow, it'll timeout the interactive tx
async function createSNInvoice (actionType, args, context) {
const { me, lnd, cost, optimistic } = context
const action = paidActions[actionType]
const createLNDInvoice = optimistic ? createInvoice : createHodlInvoice
await assertBelowMaxPendingInvoices(context)
if (cost < 1000n) {
// sanity check
throw new Error('The cost of the action must be at least 1 sat')
}
const expiresAt = datePivot(new Date(), { seconds: INVOICE_EXPIRE_SECS })
const invoice = await createLNDInvoice({
description: me?.hideInvoiceDesc ? undefined : await action.describe(args, context),
lnd,
mtokens: String(cost),
expires_at: expiresAt
})
return { bolt11: invoice.request, preimage: invoice.secret }
}
async function createDbInvoice (actionType, args, context) {
const { me, models, tx, cost, optimistic, actionId, invoiceArgs, predecessorId } = context
const { bolt11, wrappedBolt11, preimage, wallet, maxFee } = invoiceArgs
const db = tx ?? models
if (cost < 1000n) {
// sanity check
throw new Error('The cost of the action must be at least 1 sat')
}
const servedBolt11 = wrappedBolt11 ?? bolt11
const servedInvoice = parsePaymentRequest({ request: servedBolt11 })
const expiresAt = new Date(servedInvoice.expires_at)
const invoiceData = {
hash: servedInvoice.id,
msatsRequested: BigInt(servedInvoice.mtokens),
preimage,
bolt11: servedBolt11,
userId: me?.id ?? USER_ID.anon,
actionType,
actionState: wrappedBolt11 ? 'PENDING_HELD' : optimistic ? 'PENDING' : 'PENDING_HELD',
actionOptimistic: optimistic,
actionArgs: args,
expiresAt,
actionId,
predecessorId
}
let invoice
if (wrappedBolt11) {
invoice = (await db.invoiceForward.create({
include: { invoice: true },
data: {
bolt11,
maxFeeMsats: maxFee,
invoice: {
create: invoiceData
},
wallet: {
connect: {
id: wallet.id
}
}
}
})).invoice
} else {
invoice = await db.invoice.create({ data: invoiceData })
}
// insert a job to check the invoice after it's set to expire
await db.$executeRaw`
INSERT INTO pgboss.job (name, data, retrylimit, retrybackoff, startafter, keepuntil, priority)
VALUES ('checkInvoice',
jsonb_build_object('hash', ${invoice.hash}::TEXT), 21, true,
${expiresAt}::TIMESTAMP WITH TIME ZONE,
${expiresAt}::TIMESTAMP WITH TIME ZONE + interval '10m', 100)`
// the HMAC is only returned during invoice creation
// this makes sure that only the person who created this invoice
// has access to the HMAC
invoice.hmac = createHmac(invoice.hash)
return invoice
}

View File

@ -1,60 +0,0 @@
import { PAID_ACTION_PAYMENT_METHODS } from '@/lib/constants'
import { satsToMsats } from '@/lib/format'
import { notifyInvite } from '@/lib/webPush'
export const anonable = false
export const paymentMethods = [
PAID_ACTION_PAYMENT_METHODS.FEE_CREDIT,
PAID_ACTION_PAYMENT_METHODS.REWARD_SATS
]
export async function getCost ({ id }, { models, me }) {
const invite = await models.invite.findUnique({ where: { id, userId: me.id, revoked: false } })
if (!invite) {
throw new Error('invite not found')
}
return satsToMsats(invite.gift)
}
export async function perform ({ id, userId }, { me, cost, tx }) {
const invite = await tx.invite.findUnique({
where: { id, userId: me.id, revoked: false }
})
if (invite.limit && invite.giftedCount >= invite.limit) {
throw new Error('invite limit reached')
}
// check that user was created in last hour
// check that user did not already redeem an invite
await tx.user.update({
where: {
id: userId,
inviteId: null,
createdAt: {
gt: new Date(Date.now() - 1000 * 60 * 60)
}
},
data: {
mcredits: {
increment: cost
},
inviteId: id,
referrerId: me.id
}
})
return await tx.invite.update({
where: { id, userId: me.id, revoked: false, ...(invite.limit ? { giftedCount: { lt: invite.limit } } : {}) },
data: {
giftedCount: {
increment: 1
}
}
})
}
export async function nonCriticalSideEffects (_, { me }) {
notifyInvite(me.id)
}

View File

@ -1,282 +0,0 @@
import { ANON_ITEM_SPAM_INTERVAL, ITEM_SPAM_INTERVAL, PAID_ACTION_PAYMENT_METHODS, USER_ID } from '@/lib/constants'
import { notifyItemMention, notifyItemParents, notifyMention, notifyTerritorySubscribers, notifyUserSubscribers } from '@/lib/webPush'
import { getItemMentions, getMentions, performBotBehavior } from './lib/item'
import { msatsToSats, satsToMsats } from '@/lib/format'
import { GqlInputError } from '@/lib/error'
export const anonable = true
export const paymentMethods = [
PAID_ACTION_PAYMENT_METHODS.FEE_CREDIT,
PAID_ACTION_PAYMENT_METHODS.REWARD_SATS,
PAID_ACTION_PAYMENT_METHODS.OPTIMISTIC,
PAID_ACTION_PAYMENT_METHODS.PESSIMISTIC
]
export async function getCost ({ subName, parentId, uploadIds, boost = 0, bio }, { models, me }) {
const sub = (parentId || bio) ? null : await models.sub.findUnique({ where: { name: subName } })
const baseCost = sub ? satsToMsats(sub.baseCost) : 1000n
// cost = baseCost * 10^num_items_in_10m * 100 (anon) or 1 (user) + upload fees + boost
const [{ cost }] = await models.$queryRaw`
SELECT ${baseCost}::INTEGER
* POWER(10, item_spam(${parseInt(parentId)}::INTEGER, ${me?.id ?? USER_ID.anon}::INTEGER,
${me?.id && !bio ? ITEM_SPAM_INTERVAL : ANON_ITEM_SPAM_INTERVAL}::INTERVAL))
* ${me ? 1 : 100}::INTEGER
+ (SELECT "nUnpaid" * "uploadFeesMsats"
FROM upload_fees(${me?.id || USER_ID.anon}::INTEGER, ${uploadIds}::INTEGER[]))
+ ${satsToMsats(boost)}::INTEGER as cost`
// sub allows freebies (or is a bio or a comment), cost is less than baseCost, not anon,
// cost must be greater than user's balance, and user has not disabled freebies
const freebie = (parentId || bio) && cost <= baseCost && !!me &&
me?.msats < cost && !me?.disableFreebies && me?.mcredits < cost
return freebie ? BigInt(0) : BigInt(cost)
}
export async function perform (args, context) {
const { invoiceId, parentId, uploadIds = [], forwardUsers = [], options: pollOptions = [], boost = 0, ...data } = args
const { tx, me, cost } = context
const boostMsats = satsToMsats(boost)
const deletedUploads = []
for (const uploadId of uploadIds) {
if (!await tx.upload.findUnique({ where: { id: uploadId } })) {
deletedUploads.push(uploadId)
}
}
if (deletedUploads.length > 0) {
throw new Error(`upload(s) ${deletedUploads.join(', ')} are expired, consider reuploading.`)
}
let invoiceData = {}
if (invoiceId) {
invoiceData = { invoiceId, invoiceActionState: 'PENDING' }
await tx.upload.updateMany({
where: { id: { in: uploadIds } },
data: invoiceData
})
}
const itemActs = []
if (boostMsats > 0) {
itemActs.push({
msats: boostMsats, act: 'BOOST', userId: data.userId, ...invoiceData
})
}
if (cost > 0) {
itemActs.push({
msats: cost - boostMsats, act: 'FEE', userId: data.userId, ...invoiceData
})
data.cost = msatsToSats(cost - boostMsats)
}
const mentions = await getMentions(args, context)
const itemMentions = await getItemMentions(args, context)
// start with median vote
if (me) {
const [row] = await tx.$queryRaw`SELECT
COALESCE(percentile_cont(0.5) WITHIN GROUP(
ORDER BY "weightedVotes" - "weightedDownVotes"), 0)
AS median FROM "Item" WHERE "userId" = ${me.id}::INTEGER`
if (row?.median < 0) {
data.weightedDownVotes = -row.median
}
}
const itemData = {
parentId: parentId ? parseInt(parentId) : null,
...data,
...invoiceData,
boost,
threadSubscriptions: {
createMany: {
data: [
{ userId: data.userId },
...forwardUsers.map(({ userId }) => ({ userId }))
]
}
},
itemForwards: {
createMany: {
data: forwardUsers
}
},
pollOptions: {
createMany: {
data: pollOptions.map(option => ({ option }))
}
},
itemUploads: {
create: uploadIds.map(id => ({ uploadId: id }))
},
itemActs: {
createMany: {
data: itemActs
}
},
mentions: {
createMany: {
data: mentions
}
},
itemReferrers: {
create: itemMentions
}
}
let item
if (data.bio && me) {
item = (await tx.user.update({
where: { id: data.userId },
include: { bio: true },
data: {
bio: {
create: itemData
}
}
})).bio
} else {
try {
item = await tx.item.create({ data: itemData })
} catch (err) {
if (err.message.includes('violates exclusion constraint \\"Item_unique_time_constraint\\"')) {
const message = `you already submitted this ${itemData.title ? 'post' : 'comment'}`
throw new GqlInputError(message)
}
throw err
}
}
// store a reference to the item in the invoice
if (invoiceId) {
await tx.invoice.update({
where: { id: invoiceId },
data: { actionId: item.id }
})
}
await performBotBehavior(item, context)
// ltree is unsupported in Prisma, so we have to query it manually (FUCK!)
return (await tx.$queryRaw`
SELECT *, ltree2text(path) AS path, created_at AS "createdAt", updated_at AS "updatedAt"
FROM "Item" WHERE id = ${item.id}::INTEGER`
)[0]
}
export async function retry ({ invoiceId, newInvoiceId }, { tx }) {
await tx.itemAct.updateMany({ where: { invoiceId }, data: { invoiceId: newInvoiceId, invoiceActionState: 'PENDING' } })
await tx.item.updateMany({ where: { invoiceId }, data: { invoiceId: newInvoiceId, invoiceActionState: 'PENDING' } })
await tx.upload.updateMany({ where: { invoiceId }, data: { invoiceId: newInvoiceId, invoiceActionState: 'PENDING' } })
return (await tx.$queryRaw`
SELECT *, ltree2text(path) AS path, created_at AS "createdAt", updated_at AS "updatedAt"
FROM "Item" WHERE "invoiceId" = ${newInvoiceId}::INTEGER`
)[0]
}
export async function onPaid ({ invoice, id }, context) {
const { tx } = context
let item
if (invoice) {
item = await tx.item.findFirst({
where: { invoiceId: invoice.id },
include: {
user: true
}
})
await tx.itemAct.updateMany({ where: { invoiceId: invoice.id }, data: { invoiceActionState: 'PAID' } })
await tx.item.updateMany({ where: { invoiceId: invoice.id }, data: { invoiceActionState: 'PAID', invoicePaidAt: new Date() } })
await tx.upload.updateMany({ where: { invoiceId: invoice.id }, data: { invoiceActionState: 'PAID', paid: true } })
} else if (id) {
item = await tx.item.findUnique({
where: { id },
include: {
user: true,
itemUploads: { include: { upload: true } }
}
})
await tx.upload.updateMany({
where: { id: { in: item.itemUploads.map(({ uploadId }) => uploadId) } },
data: {
paid: true
}
})
} else {
throw new Error('No item found')
}
await tx.$executeRaw`INSERT INTO pgboss.job (name, data, startafter, priority)
VALUES ('timestampItem', jsonb_build_object('id', ${item.id}::INTEGER), now() + interval '10 minutes', -2)`
await tx.$executeRaw`
INSERT INTO pgboss.job (name, data, retrylimit, retrybackoff, startafter)
VALUES ('imgproxy', jsonb_build_object('id', ${item.id}::INTEGER), 21, true, now() + interval '5 seconds')`
if (item.boost > 0) {
await tx.$executeRaw`
INSERT INTO pgboss.job (name, data, retrylimit, retrybackoff, startafter, keepuntil)
VALUES ('expireBoost', jsonb_build_object('id', ${item.id}::INTEGER), 21, true,
now() + interval '30 days', now() + interval '40 days')`
}
if (item.parentId) {
// denormalize ncomments, lastCommentAt, and "weightedComments" for ancestors, and insert into reply table
await tx.$executeRaw`
WITH comment AS (
SELECT "Item".*, users.trust
FROM "Item"
JOIN users ON "Item"."userId" = users.id
WHERE "Item".id = ${item.id}::INTEGER
), ancestors AS (
UPDATE "Item"
SET ncomments = "Item".ncomments + 1,
"lastCommentAt" = GREATEST("Item"."lastCommentAt", comment.created_at),
"weightedComments" = "Item"."weightedComments" +
CASE WHEN comment."userId" = "Item"."userId" THEN 0 ELSE comment.trust END
FROM comment
WHERE "Item".path @> comment.path AND "Item".id <> comment.id
RETURNING "Item".*
)
INSERT INTO "Reply" (created_at, updated_at, "ancestorId", "ancestorUserId", "itemId", "userId", level)
SELECT comment.created_at, comment.updated_at, ancestors.id, ancestors."userId",
comment.id, comment."userId", nlevel(comment.path) - nlevel(ancestors.path)
FROM ancestors, comment`
}
}
export async function nonCriticalSideEffects ({ invoice, id }, { models }) {
const item = await models.item.findFirst({
where: invoice ? { invoiceId: invoice.id } : { id: parseInt(id) },
include: {
mentions: true,
itemReferrers: { include: { refereeItem: true } },
user: true
}
})
if (item.parentId) {
notifyItemParents({ item, models }).catch(console.error)
}
for (const { userId } of item.mentions) {
notifyMention({ models, item, userId }).catch(console.error)
}
for (const { refereeItem } of item.itemReferrers) {
notifyItemMention({ models, referrerItem: item, refereeItem }).catch(console.error)
}
notifyUserSubscribers({ models, item }).catch(console.error)
notifyTerritorySubscribers({ models, item }).catch(console.error)
}
export async function onFail ({ invoice }, { tx }) {
await tx.itemAct.updateMany({ where: { invoiceId: invoice.id }, data: { invoiceActionState: 'FAILED' } })
await tx.item.updateMany({ where: { invoiceId: invoice.id }, data: { invoiceActionState: 'FAILED' } })
await tx.upload.updateMany({ where: { invoiceId: invoice.id }, data: { invoiceActionState: 'FAILED' } })
}
export async function describe ({ parentId }, context) {
return `SN: create ${parentId ? `reply to #${parentId}` : 'item'}`
}

View File

@ -1,182 +0,0 @@
import { PAID_ACTION_PAYMENT_METHODS, USER_ID } from '@/lib/constants'
import { uploadFees } from '../resolvers/upload'
import { getItemMentions, getMentions, performBotBehavior } from './lib/item'
import { notifyItemMention, notifyMention } from '@/lib/webPush'
import { satsToMsats } from '@/lib/format'
export const anonable = true
export const paymentMethods = [
PAID_ACTION_PAYMENT_METHODS.FEE_CREDIT,
PAID_ACTION_PAYMENT_METHODS.REWARD_SATS,
PAID_ACTION_PAYMENT_METHODS.PESSIMISTIC
]
export async function getCost ({ id, boost = 0, uploadIds, bio }, { me, models }) {
// the only reason updating items costs anything is when it has new uploads
// or more boost
const old = await models.item.findUnique({ where: { id: parseInt(id) } })
const { totalFeesMsats } = await uploadFees(uploadIds, { models, me })
const cost = BigInt(totalFeesMsats) + satsToMsats(boost - old.boost)
if (cost > 0 && old.invoiceActionState && old.invoiceActionState !== 'PAID') {
throw new Error('creation invoice not paid')
}
return cost
}
export async function perform (args, context) {
const { id, boost = 0, uploadIds = [], options: pollOptions = [], forwardUsers: itemForwards = [], ...data } = args
const { tx, me } = context
const old = await tx.item.findUnique({
where: { id: parseInt(id) },
include: {
threadSubscriptions: true,
mentions: true,
itemForwards: true,
itemReferrers: true,
itemUploads: true
}
})
const newBoost = boost - old.boost
const itemActs = []
if (newBoost > 0) {
const boostMsats = satsToMsats(newBoost)
itemActs.push({
msats: boostMsats, act: 'BOOST', userId: me?.id || USER_ID.anon
})
}
// createMany is the set difference of the new - old
// deleteMany is the set difference of the old - new
// updateMany is the intersection of the old and new
const difference = (a = [], b = [], key = 'userId') => a.filter(x => !b.find(y => y[key] === x[key]))
const intersectionMerge = (a = [], b = [], key) => a.filter(x => b.find(y => y.userId === x.userId))
.map(x => ({ [key]: x[key], ...b.find(y => y.userId === x.userId) }))
const mentions = await getMentions(args, context)
const itemMentions = await getItemMentions(args, context)
const itemUploads = uploadIds.map(id => ({ uploadId: id }))
await tx.upload.updateMany({
where: { id: { in: uploadIds } },
data: { paid: true }
})
// we put boost in the where clause because we don't want to update the boost
// if it has changed concurrently
await tx.item.update({
where: { id: parseInt(id), boost: old.boost },
data: {
...data,
boost: {
increment: newBoost
},
pollOptions: {
createMany: {
data: pollOptions?.map(option => ({ option }))
}
},
itemUploads: {
create: difference(itemUploads, old.itemUploads, 'uploadId').map(({ uploadId }) => ({ uploadId })),
deleteMany: {
uploadId: {
in: difference(old.itemUploads, itemUploads, 'uploadId').map(({ uploadId }) => uploadId)
}
}
},
itemActs: {
createMany: {
data: itemActs
}
},
itemForwards: {
deleteMany: {
userId: {
in: difference(old.itemForwards, itemForwards).map(({ userId }) => userId)
}
},
createMany: {
data: difference(itemForwards, old.itemForwards)
},
update: intersectionMerge(old.itemForwards, itemForwards, 'id').map(({ id, ...data }) => ({
where: { id },
data
}))
},
threadSubscriptions: {
deleteMany: {
userId: {
in: difference(old.itemForwards, itemForwards).map(({ userId }) => userId)
}
},
createMany: {
data: difference(itemForwards, old.itemForwards).map(({ userId }) => ({ userId }))
}
},
mentions: {
deleteMany: {
userId: {
in: difference(old.mentions, mentions).map(({ userId }) => userId)
}
},
createMany: {
data: difference(mentions, old.mentions)
}
},
itemReferrers: {
deleteMany: {
refereeId: {
in: difference(old.itemReferrers, itemMentions, 'refereeId').map(({ refereeId }) => refereeId)
}
},
create: difference(itemMentions, old.itemReferrers, 'refereeId')
}
}
})
await tx.$executeRaw`
INSERT INTO pgboss.job (name, data, retrylimit, retrybackoff, startafter, keepuntil)
VALUES ('imgproxy', jsonb_build_object('id', ${id}::INTEGER), 21, true,
now() + interval '5 seconds', now() + interval '1 day')`
if (newBoost > 0) {
await tx.$executeRaw`
INSERT INTO pgboss.job (name, data, retrylimit, retrybackoff, startafter, keepuntil)
VALUES ('expireBoost', jsonb_build_object('id', ${id}::INTEGER), 21, true,
now() + interval '30 days', now() + interval '40 days')`
}
await performBotBehavior(args, context)
// ltree is unsupported in Prisma, so we have to query it manually (FUCK!)
return (await tx.$queryRaw`
SELECT *, ltree2text(path) AS path, created_at AS "createdAt", updated_at AS "updatedAt"
FROM "Item" WHERE id = ${parseInt(id)}::INTEGER`
)[0]
}
export async function nonCriticalSideEffects ({ invoice, id }, { models }) {
const item = await models.item.findFirst({
where: invoice ? { invoiceId: invoice.id } : { id: parseInt(id) },
include: {
mentions: true,
itemReferrers: { include: { refereeItem: true } }
}
})
// compare timestamps to only notify if mention or item referral was just created to avoid duplicates on edits
for (const { userId, createdAt } of item.mentions) {
if (item.updatedAt.getTime() !== createdAt.getTime()) continue
notifyMention({ models, item, userId }).catch(console.error)
}
for (const { refereeItem, createdAt } of item.itemReferrers) {
if (item.updatedAt.getTime() !== createdAt.getTime()) continue
notifyItemMention({ models, referrerItem: item, refereeItem }).catch(console.error)
}
}
export async function describe ({ id, parentId }, context) {
return `SN: update ${parentId ? `reply to #${parentId}` : 'post'}`
}

View File

@ -1,56 +0,0 @@
import { PAID_ACTION_TERMINAL_STATES, USER_ID } from '@/lib/constants'
import { datePivot } from '@/lib/time'
const MAX_PENDING_PAID_ACTIONS_PER_USER = 100
const MAX_PENDING_DIRECT_INVOICES_PER_USER_MINUTES = 10
const MAX_PENDING_DIRECT_INVOICES_PER_USER = 100
export async function assertBelowMaxPendingInvoices (context) {
const { models, me } = context
const pendingInvoices = await models.invoice.count({
where: {
userId: me?.id ?? USER_ID.anon,
actionState: {
notIn: PAID_ACTION_TERMINAL_STATES
}
}
})
if (pendingInvoices >= MAX_PENDING_PAID_ACTIONS_PER_USER) {
throw new Error('You have too many pending paid actions, cancel some or wait for them to expire')
}
}
export async function assertBelowMaxPendingDirectPayments (userId, context) {
const { models, me } = context
if (me?.id !== userId) {
const pendingSenderInvoices = await models.directPayment.count({
where: {
senderId: me?.id ?? USER_ID.anon,
createdAt: {
gt: datePivot(new Date(), { minutes: -MAX_PENDING_DIRECT_INVOICES_PER_USER_MINUTES })
}
}
})
if (pendingSenderInvoices >= MAX_PENDING_DIRECT_INVOICES_PER_USER) {
throw new Error('You\'ve sent too many direct payments')
}
}
if (!userId) return
const pendingReceiverInvoices = await models.directPayment.count({
where: {
receiverId: userId,
createdAt: {
gt: datePivot(new Date(), { minutes: -MAX_PENDING_DIRECT_INVOICES_PER_USER_MINUTES })
}
}
})
if (pendingReceiverInvoices >= MAX_PENDING_DIRECT_INVOICES_PER_USER) {
throw new Error('Receiver has too many direct payments')
}
}

View File

@ -1,89 +0,0 @@
import { USER_ID } from '@/lib/constants'
import { deleteReminders, getDeleteAt, getRemindAt } from '@/lib/item'
import { parseInternalLinks } from '@/lib/url'
export async function getMentions ({ text }, { me, tx }) {
const mentionPattern = /\B@[\w_]+/gi
const names = text.match(mentionPattern)?.map(m => m.slice(1))
if (names?.length > 0) {
const users = await tx.user.findMany({
where: {
name: {
in: names
},
id: {
not: me?.id || USER_ID.anon
}
}
})
return users.map(user => ({ userId: user.id }))
}
return []
}
export const getItemMentions = async ({ text }, { me, tx }) => {
const linkPattern = new RegExp(`${process.env.NEXT_PUBLIC_URL}/items/\\d+[a-zA-Z0-9/?=]*`, 'gi')
const refs = text.match(linkPattern)?.map(m => {
try {
const { itemId, commentId } = parseInternalLinks(m)
return Number(commentId || itemId)
} catch (err) {
return null
}
}).filter(r => !!r)
if (refs?.length > 0) {
const referee = await tx.item.findMany({
where: {
id: { in: refs },
userId: { not: me?.id || USER_ID.anon }
}
})
return referee.map(r => ({ refereeId: r.id }))
}
return []
}
export async function performBotBehavior ({ text, id }, { me, tx }) {
// delete any existing deleteItem or reminder jobs for this item
const userId = me?.id || USER_ID.anon
id = Number(id)
await tx.$queryRaw`
DELETE FROM pgboss.job
WHERE name = 'deleteItem'
AND data->>'id' = ${id}::TEXT
AND state <> 'completed'`
await deleteReminders({ id, userId, models: tx })
if (text) {
const deleteAt = getDeleteAt(text)
if (deleteAt) {
await tx.$queryRaw`
INSERT INTO pgboss.job (name, data, startafter, keepuntil)
VALUES (
'deleteItem',
jsonb_build_object('id', ${id}::INTEGER),
${deleteAt}::TIMESTAMP WITH TIME ZONE,
${deleteAt}::TIMESTAMP WITH TIME ZONE + interval '1 minute')`
}
const remindAt = getRemindAt(text)
if (remindAt) {
await tx.$queryRaw`
INSERT INTO pgboss.job (name, data, startafter, keepuntil)
VALUES (
'reminder',
jsonb_build_object('itemId', ${id}::INTEGER, 'userId', ${userId}::INTEGER),
${remindAt}::TIMESTAMP WITH TIME ZONE,
${remindAt}::TIMESTAMP WITH TIME ZONE + interval '1 minute')`
await tx.reminder.create({
data: {
userId,
itemId: Number(id),
remindAt
}
})
}
}
}

View File

@ -1,70 +0,0 @@
import { PAID_ACTION_PAYMENT_METHODS } from '@/lib/constants'
import { satsToMsats } from '@/lib/format'
export const anonable = false
export const paymentMethods = [
PAID_ACTION_PAYMENT_METHODS.FEE_CREDIT,
PAID_ACTION_PAYMENT_METHODS.REWARD_SATS,
PAID_ACTION_PAYMENT_METHODS.OPTIMISTIC
]
export async function getCost ({ id }, { me, models }) {
const pollOption = await models.pollOption.findUnique({
where: { id: parseInt(id) },
include: { item: true }
})
return satsToMsats(pollOption.item.pollCost)
}
export async function perform ({ invoiceId, id }, { me, cost, tx }) {
const pollOption = await tx.pollOption.findUnique({
where: { id: parseInt(id) }
})
const itemId = parseInt(pollOption.itemId)
let invoiceData = {}
if (invoiceId) {
invoiceData = { invoiceId, invoiceActionState: 'PENDING' }
// store a reference to the item in the invoice
await tx.invoice.update({
where: { id: invoiceId },
data: { actionId: itemId }
})
}
// the unique index on userId, itemId will prevent double voting
await tx.itemAct.create({ data: { msats: cost, itemId, userId: me.id, act: 'POLL', ...invoiceData } })
await tx.pollBlindVote.create({ data: { userId: me.id, itemId, ...invoiceData } })
await tx.pollVote.create({ data: { pollOptionId: pollOption.id, itemId, ...invoiceData } })
return { id }
}
export async function retry ({ invoiceId, newInvoiceId }, { tx }) {
await tx.itemAct.updateMany({ where: { invoiceId }, data: { invoiceId: newInvoiceId, invoiceActionState: 'PENDING' } })
await tx.pollBlindVote.updateMany({ where: { invoiceId }, data: { invoiceId: newInvoiceId, invoiceActionState: 'PENDING' } })
await tx.pollVote.updateMany({ where: { invoiceId }, data: { invoiceId: newInvoiceId, invoiceActionState: 'PENDING' } })
const { pollOptionId } = await tx.pollVote.findFirst({ where: { invoiceId: newInvoiceId } })
return { id: pollOptionId }
}
export async function onPaid ({ invoice }, { tx }) {
if (!invoice) return
await tx.itemAct.updateMany({ where: { invoiceId: invoice.id }, data: { invoiceActionState: 'PAID' } })
await tx.pollBlindVote.updateMany({ where: { invoiceId: invoice.id }, data: { invoiceActionState: 'PAID' } })
// anonymize the vote
await tx.pollVote.updateMany({ where: { invoiceId: invoice.id }, data: { invoiceId: null, invoiceActionState: null } })
}
export async function onFail ({ invoice }, { tx }) {
await tx.itemAct.updateMany({ where: { invoiceId: invoice.id }, data: { invoiceActionState: 'FAILED' } })
await tx.pollBlindVote.updateMany({ where: { invoiceId: invoice.id }, data: { invoiceActionState: 'FAILED' } })
await tx.pollVote.updateMany({ where: { invoiceId: invoice.id }, data: { invoiceActionState: 'FAILED' } })
}
export async function describe ({ id }, { actionId }) {
return `SN: vote on poll #${id ?? actionId}`
}

View File

@ -1,85 +0,0 @@
import { PAID_ACTION_PAYMENT_METHODS } from '@/lib/constants'
import { toPositiveBigInt, numWithUnits, msatsToSats, satsToMsats } from '@/lib/format'
import { notifyDeposit } from '@/lib/webPush'
import { getInvoiceableWallets } from '@/wallets/server'
export const anonable = false
export const paymentMethods = [
PAID_ACTION_PAYMENT_METHODS.P2P,
PAID_ACTION_PAYMENT_METHODS.DIRECT,
PAID_ACTION_PAYMENT_METHODS.OPTIMISTIC
]
export async function getCost ({ msats }) {
return toPositiveBigInt(msats)
}
export async function getInvoiceablePeer (_, { me, models, cost, paymentMethod }) {
if (paymentMethod === PAID_ACTION_PAYMENT_METHODS.P2P && !me?.proxyReceive) return null
if (paymentMethod === PAID_ACTION_PAYMENT_METHODS.DIRECT && !me?.directReceive) return null
const wallets = await getInvoiceableWallets(me.id, { models })
if (wallets.length === 0) {
return null
}
if (cost < satsToMsats(me.receiveCreditsBelowSats)) {
return null
}
return me.id
}
export async function getSybilFeePercent () {
return 10n
}
export async function perform ({
invoiceId,
comment,
lud18Data,
noteStr
}, { me, tx }) {
return await tx.invoice.update({
where: { id: invoiceId },
data: {
comment,
lud18Data,
...(noteStr ? { desc: noteStr } : {})
},
include: { invoiceForward: true }
})
}
export async function describe ({ description }, { me, cost, paymentMethod, sybilFeePercent }) {
const fee = paymentMethod === PAID_ACTION_PAYMENT_METHODS.P2P
? cost * BigInt(sybilFeePercent) / 100n
: 0n
return description ?? `SN: ${me?.name ?? ''} receives ${numWithUnits(msatsToSats(cost - fee))}`
}
export async function onPaid ({ invoice }, { tx }) {
if (!invoice) {
throw new Error('invoice is required')
}
// P2P lnurlp does not need to update the user's balance
if (invoice?.invoiceForward) return
await tx.user.update({
where: { id: invoice.userId },
data: {
mcredits: {
increment: invoice.msatsReceived
}
}
})
}
export async function nonCriticalSideEffects ({ invoice }, { models }) {
await notifyDeposit(invoice.userId, invoice)
await models.$executeRaw`
INSERT INTO pgboss.job (name, data)
VALUES ('nip57', jsonb_build_object('hash', ${invoice.hash}))`
}

View File

@ -1,73 +0,0 @@
import { PAID_ACTION_PAYMENT_METHODS, TERRITORY_PERIOD_COST } from '@/lib/constants'
import { satsToMsats } from '@/lib/format'
import { nextBilling } from '@/lib/territory'
export const anonable = false
export const paymentMethods = [
PAID_ACTION_PAYMENT_METHODS.FEE_CREDIT,
PAID_ACTION_PAYMENT_METHODS.REWARD_SATS,
PAID_ACTION_PAYMENT_METHODS.PESSIMISTIC
]
export async function getCost ({ name }, { models }) {
const sub = await models.sub.findUnique({
where: {
name
}
})
return satsToMsats(TERRITORY_PERIOD_COST(sub.billingType))
}
export async function perform ({ name }, { cost, tx }) {
const sub = await tx.sub.findUnique({
where: {
name
}
})
if (sub.billingType === 'ONCE') {
throw new Error('Cannot bill a ONCE territory')
}
let billedLastAt = sub.billPaidUntil
let billingCost = sub.billingCost
// if the sub is archived, they are paying to reactivate it
if (sub.status === 'STOPPED') {
// get non-grandfathered cost and reset their billing to start now
billedLastAt = new Date()
billingCost = TERRITORY_PERIOD_COST(sub.billingType)
}
const billPaidUntil = nextBilling(billedLastAt, sub.billingType)
return await tx.sub.update({
// optimistic concurrency control
// make sure the sub hasn't changed since we fetched it
where: {
...sub,
postTypes: {
equals: sub.postTypes
}
},
data: {
billedLastAt,
billPaidUntil,
billingCost,
status: 'ACTIVE',
SubAct: {
create: {
msats: cost,
type: 'BILLING',
userId: sub.userId
}
}
}
})
}
export async function describe ({ name }) {
return `SN: billing for territory ${name}`
}

View File

@ -1,49 +0,0 @@
import { PAID_ACTION_PAYMENT_METHODS, TERRITORY_PERIOD_COST } from '@/lib/constants'
import { satsToMsats } from '@/lib/format'
import { nextBilling } from '@/lib/territory'
export const anonable = false
export const paymentMethods = [
PAID_ACTION_PAYMENT_METHODS.FEE_CREDIT,
PAID_ACTION_PAYMENT_METHODS.REWARD_SATS,
PAID_ACTION_PAYMENT_METHODS.PESSIMISTIC
]
export async function getCost ({ billingType }) {
return satsToMsats(TERRITORY_PERIOD_COST(billingType))
}
export async function perform ({ invoiceId, ...data }, { me, cost, tx }) {
const { billingType } = data
const billingCost = TERRITORY_PERIOD_COST(billingType)
const billedLastAt = new Date()
const billPaidUntil = nextBilling(billedLastAt, billingType)
return await tx.sub.create({
data: {
...data,
billedLastAt,
billPaidUntil,
billingCost,
rankingType: 'WOT',
userId: me.id,
SubAct: {
create: {
msats: cost,
type: 'BILLING',
userId: me.id
}
},
SubSubscription: {
create: {
userId: me.id
}
}
}
})
}
export async function describe ({ name }) {
return `SN: create territory ${name}`
}

View File

@ -1,65 +0,0 @@
import { PAID_ACTION_PAYMENT_METHODS, TERRITORY_PERIOD_COST } from '@/lib/constants'
import { satsToMsats } from '@/lib/format'
import { nextBilling } from '@/lib/territory'
export const anonable = false
export const paymentMethods = [
PAID_ACTION_PAYMENT_METHODS.FEE_CREDIT,
PAID_ACTION_PAYMENT_METHODS.REWARD_SATS,
PAID_ACTION_PAYMENT_METHODS.PESSIMISTIC
]
export async function getCost ({ billingType }) {
return satsToMsats(TERRITORY_PERIOD_COST(billingType))
}
export async function perform ({ name, invoiceId, ...data }, { me, cost, tx }) {
const sub = await tx.sub.findUnique({
where: {
name
}
})
data.billingCost = TERRITORY_PERIOD_COST(data.billingType)
// we never want to bill them again if they are changing to ONCE
if (data.billingType === 'ONCE') {
data.billPaidUntil = null
data.billingAutoRenew = false
}
data.billedLastAt = new Date()
data.billPaidUntil = nextBilling(data.billedLastAt, data.billingType)
data.status = 'ACTIVE'
data.userId = me.id
if (sub.userId !== me.id) {
await tx.territoryTransfer.create({ data: { subName: name, oldUserId: sub.userId, newUserId: me.id } })
}
await tx.subAct.create({
data: {
userId: me.id,
subName: name,
msats: cost,
type: 'BILLING'
}
})
return await tx.sub.update({
data,
// optimistic concurrency control
// make sure none of the relevant fields have changed since we fetched the sub
where: {
...sub,
postTypes: {
equals: sub.postTypes
}
}
})
}
export async function describe ({ name }, context) {
return `SN: unarchive territory ${name}`
}

View File

@ -1,83 +0,0 @@
import { PAID_ACTION_PAYMENT_METHODS, TERRITORY_PERIOD_COST } from '@/lib/constants'
import { satsToMsats } from '@/lib/format'
import { proratedBillingCost } from '@/lib/territory'
import { datePivot } from '@/lib/time'
export const anonable = false
export const paymentMethods = [
PAID_ACTION_PAYMENT_METHODS.FEE_CREDIT,
PAID_ACTION_PAYMENT_METHODS.REWARD_SATS,
PAID_ACTION_PAYMENT_METHODS.PESSIMISTIC
]
export async function getCost ({ oldName, billingType }, { models }) {
const oldSub = await models.sub.findUnique({
where: {
name: oldName
}
})
const cost = proratedBillingCost(oldSub, billingType)
if (!cost) {
return 0n
}
return satsToMsats(cost)
}
export async function perform ({ oldName, invoiceId, ...data }, { me, cost, tx }) {
const oldSub = await tx.sub.findUnique({
where: {
name: oldName
}
})
data.billingCost = TERRITORY_PERIOD_COST(data.billingType)
// we never want to bill them again if they are changing to ONCE
if (data.billingType === 'ONCE') {
data.billPaidUntil = null
data.billingAutoRenew = false
}
// if they are changing to YEARLY, bill them in a year
// if they are changing to MONTHLY from YEARLY, do nothing
if (oldSub.billingType === 'MONTHLY' && data.billingType === 'YEARLY') {
data.billPaidUntil = datePivot(new Date(oldSub.billedLastAt), { years: 1 })
}
// if this billing change makes their bill paid up, set them to active
if (data.billPaidUntil === null || data.billPaidUntil >= new Date()) {
data.status = 'ACTIVE'
}
if (cost > 0n) {
await tx.subAct.create({
data: {
userId: me.id,
subName: oldName,
msats: cost,
type: 'BILLING'
}
})
}
return await tx.sub.update({
data,
where: {
// optimistic concurrency control
// make sure none of the relevant fields have changed since we fetched the sub
...oldSub,
postTypes: {
equals: oldSub.postTypes
},
name: oldName,
userId: me.id
}
})
}
export async function describe ({ name }, context) {
return `SN: update territory billing ${name}`
}

View File

@ -1,228 +0,0 @@
import { PAID_ACTION_PAYMENT_METHODS, USER_ID } from '@/lib/constants'
import { msatsToSats, satsToMsats } from '@/lib/format'
import { notifyZapped } from '@/lib/webPush'
import { getInvoiceableWallets } from '@/wallets/server'
export const anonable = true
export const paymentMethods = [
PAID_ACTION_PAYMENT_METHODS.P2P,
PAID_ACTION_PAYMENT_METHODS.FEE_CREDIT,
PAID_ACTION_PAYMENT_METHODS.REWARD_SATS,
PAID_ACTION_PAYMENT_METHODS.OPTIMISTIC,
PAID_ACTION_PAYMENT_METHODS.PESSIMISTIC
]
export async function getCost ({ sats }) {
return satsToMsats(sats)
}
export async function getInvoiceablePeer ({ id, sats, hasSendWallet }, { models, me, cost }) {
// if the zap is dust, or if me doesn't have a send wallet but has enough sats/credits to pay for it
// then we don't invoice the peer
if (sats < me?.sendCreditsBelowSats ||
(me && !hasSendWallet && (me.mcredits >= cost || me.msats >= cost))) {
return null
}
const item = await models.item.findUnique({
where: { id: parseInt(id) },
include: {
itemForwards: true,
user: true
}
})
// bios don't get sats
if (item.bio) {
return null
}
const wallets = await getInvoiceableWallets(item.userId, { models })
// request peer invoice if they have an attached wallet and have not forwarded the item
// and the receiver doesn't want to receive credits
if (wallets.length > 0 &&
item.itemForwards.length === 0 &&
sats >= item.user.receiveCreditsBelowSats) {
return item.userId
}
return null
}
export async function getSybilFeePercent () {
return 30n
}
export async function perform ({ invoiceId, sats, id: itemId, ...args }, { me, cost, sybilFeePercent, tx }) {
const feeMsats = cost * sybilFeePercent / 100n
const zapMsats = cost - feeMsats
itemId = parseInt(itemId)
let invoiceData = {}
if (invoiceId) {
invoiceData = { invoiceId, invoiceActionState: 'PENDING' }
// store a reference to the item in the invoice
await tx.invoice.update({
where: { id: invoiceId },
data: { actionId: itemId }
})
}
const acts = await tx.itemAct.createManyAndReturn({
data: [
{ msats: feeMsats, itemId, userId: me?.id ?? USER_ID.anon, act: 'FEE', ...invoiceData },
{ msats: zapMsats, itemId, userId: me?.id ?? USER_ID.anon, act: 'TIP', ...invoiceData }
]
})
const [{ path }] = await tx.$queryRaw`
SELECT ltree2text(path) as path FROM "Item" WHERE id = ${itemId}::INTEGER`
return { id: itemId, sats, act: 'TIP', path, actIds: acts.map(act => act.id) }
}
export async function retry ({ invoiceId, newInvoiceId }, { tx, cost }) {
await tx.itemAct.updateMany({ where: { invoiceId }, data: { invoiceId: newInvoiceId, invoiceActionState: 'PENDING' } })
const [{ id, path }] = await tx.$queryRaw`
SELECT "Item".id, ltree2text(path) as path
FROM "Item"
JOIN "ItemAct" ON "Item".id = "ItemAct"."itemId"
WHERE "ItemAct"."invoiceId" = ${newInvoiceId}::INTEGER`
return { id, sats: msatsToSats(cost), act: 'TIP', path }
}
export async function onPaid ({ invoice, actIds }, { tx }) {
let acts
if (invoice) {
await tx.itemAct.updateMany({
where: { invoiceId: invoice.id },
data: {
invoiceActionState: 'PAID'
}
})
acts = await tx.itemAct.findMany({ where: { invoiceId: invoice.id }, include: { item: true } })
actIds = acts.map(act => act.id)
} else if (actIds) {
acts = await tx.itemAct.findMany({ where: { id: { in: actIds } }, include: { item: true } })
} else {
throw new Error('No invoice or actIds')
}
const msats = acts.reduce((a, b) => a + BigInt(b.msats), BigInt(0))
const sats = msatsToSats(msats)
const itemAct = acts.find(act => act.act === 'TIP')
if (invoice?.invoiceForward) {
// only the op got sats and we need to add it to their stackedMsats
// because the sats were p2p
await tx.user.update({
where: { id: itemAct.item.userId },
data: { stackedMsats: { increment: itemAct.msats } }
})
} else {
// splits only use mcredits
await tx.$executeRaw`
WITH forwardees AS (
SELECT "userId", ((${itemAct.msats}::BIGINT * pct) / 100)::BIGINT AS mcredits
FROM "ItemForward"
WHERE "itemId" = ${itemAct.itemId}::INTEGER
), total_forwarded AS (
SELECT COALESCE(SUM(mcredits), 0) as mcredits
FROM forwardees
), recipients AS (
SELECT "userId", mcredits FROM forwardees
UNION
SELECT ${itemAct.item.userId}::INTEGER as "userId",
${itemAct.msats}::BIGINT - (SELECT mcredits FROM total_forwarded)::BIGINT as mcredits
ORDER BY "userId" ASC -- order to prevent deadlocks
)
UPDATE users
SET
mcredits = users.mcredits + recipients.mcredits,
"stackedMsats" = users."stackedMsats" + recipients.mcredits,
"stackedMcredits" = users."stackedMcredits" + recipients.mcredits
FROM recipients
WHERE users.id = recipients."userId"`
}
// perform denomormalized aggregates: weighted votes, upvotes, msats, lastZapAt
// NOTE: for the rows that might be updated by a concurrent zap, we use UPDATE for implicit locking
await tx.$queryRaw`
WITH zapper AS (
SELECT trust FROM users WHERE id = ${itemAct.userId}::INTEGER
), zap AS (
INSERT INTO "ItemUserAgg" ("userId", "itemId", "zapSats")
VALUES (${itemAct.userId}::INTEGER, ${itemAct.itemId}::INTEGER, ${sats}::INTEGER)
ON CONFLICT ("itemId", "userId") DO UPDATE
SET "zapSats" = "ItemUserAgg"."zapSats" + ${sats}::INTEGER, updated_at = now()
RETURNING ("zapSats" = ${sats}::INTEGER)::INTEGER as first_vote,
LOG("zapSats" / GREATEST("zapSats" - ${sats}::INTEGER, 1)::FLOAT) AS log_sats
)
UPDATE "Item"
SET
"weightedVotes" = "weightedVotes" + (zapper.trust * zap.log_sats),
upvotes = upvotes + zap.first_vote,
msats = "Item".msats + ${msats}::BIGINT,
mcredits = "Item".mcredits + ${invoice?.invoiceForward ? 0n : msats}::BIGINT,
"lastZapAt" = now()
FROM zap, zapper
WHERE "Item".id = ${itemAct.itemId}::INTEGER
RETURNING "Item".*`
// record potential bounty payment
// NOTE: we are at least guaranteed that we see the update "ItemUserAgg" from our tx so we can trust
// we won't miss a zap that aggregates into a bounty payment, regardless of the order of updates
await tx.$executeRaw`
WITH bounty AS (
SELECT root.id, "ItemUserAgg"."zapSats" >= root.bounty AS paid, "ItemUserAgg"."itemId" AS target
FROM "ItemUserAgg"
JOIN "Item" ON "Item".id = "ItemUserAgg"."itemId"
LEFT JOIN "Item" root ON root.id = "Item"."rootId"
WHERE "ItemUserAgg"."userId" = ${itemAct.userId}::INTEGER
AND "ItemUserAgg"."itemId" = ${itemAct.itemId}::INTEGER
AND root."userId" = ${itemAct.userId}::INTEGER
AND root.bounty IS NOT NULL
)
UPDATE "Item"
SET "bountyPaidTo" = array_remove(array_append(array_remove("bountyPaidTo", bounty.target), bounty.target), NULL)
FROM bounty
WHERE "Item".id = bounty.id AND bounty.paid`
// update commentMsats on ancestors
await tx.$executeRaw`
WITH zapped AS (
SELECT * FROM "Item" WHERE id = ${itemAct.itemId}::INTEGER
)
UPDATE "Item"
SET "commentMsats" = "Item"."commentMsats" + ${msats}::BIGINT,
"commentMcredits" = "Item"."commentMcredits" + ${invoice?.invoiceForward ? 0n : msats}::BIGINT
FROM zapped
WHERE "Item".path @> zapped.path AND "Item".id <> zapped.id`
}
export async function nonCriticalSideEffects ({ invoice, actIds }, { models }) {
const itemAct = await models.itemAct.findFirst({
where: invoice ? { invoiceId: invoice.id } : { id: { in: actIds } },
include: { item: true }
})
// avoid duplicate notifications with the same zap amount
// by checking if there are any other pending acts on the item
const pendingActs = await models.itemAct.count({
where: {
itemId: itemAct.itemId,
createdAt: {
gt: itemAct.createdAt
}
}
})
if (pendingActs === 0) notifyZapped({ models, item: itemAct.item }).catch(console.error)
}
export async function onFail ({ invoice }, { tx }) {
await tx.itemAct.updateMany({ where: { invoiceId: invoice.id }, data: { invoiceActionState: 'FAILED' } })
}
export async function describe ({ id: itemId, sats }, { actionId, cost }) {
return `SN: zap ${sats ?? msatsToSats(cost)} sats to #${itemId ?? actionId}`
}

View File

@ -1,64 +0,0 @@
import { LND_PATHFINDING_TIME_PREF_PPM, LND_PATHFINDING_TIMEOUT_MS } from '@/lib/constants'
import { msatsToSats, satsToMsats, toPositiveBigInt } from '@/lib/format'
import { Prisma } from '@prisma/client'
import { parsePaymentRequest, payViaPaymentRequest } from 'ln-service'
// paying actions are completely distinct from paid actions
// and there's only one paying action: send
// ... still we want the api to at least be similar
export default async function performPayingAction ({ bolt11, maxFee, walletId }, { me, models, lnd }) {
try {
console.group('performPayingAction', `${bolt11.slice(0, 10)}...`, maxFee, walletId)
if (!me) {
throw new Error('You must be logged in to perform this action')
}
const decoded = await parsePaymentRequest({ request: bolt11 })
const cost = toPositiveBigInt(toPositiveBigInt(decoded.mtokens) + satsToMsats(maxFee))
console.log('cost', cost)
const withdrawal = await models.$transaction(async tx => {
await tx.user.update({
where: {
id: me.id
},
data: { msats: { decrement: cost } }
})
return await tx.withdrawl.create({
data: {
hash: decoded.id,
bolt11,
msatsPaying: toPositiveBigInt(decoded.mtokens),
msatsFeePaying: satsToMsats(maxFee),
userId: me.id,
walletId,
autoWithdraw: !!walletId
}
})
}, { isolationLevel: Prisma.TransactionIsolationLevel.ReadCommitted })
payViaPaymentRequest({
lnd,
request: withdrawal.bolt11,
max_fee: msatsToSats(withdrawal.msatsFeePaying),
pathfinding_timeout: LND_PATHFINDING_TIMEOUT_MS,
confidence: LND_PATHFINDING_TIME_PREF_PPM
}).catch(console.error)
return withdrawal
} catch (e) {
if (e.message.includes('\\"users\\" violates check constraint \\"msats_positive\\"')) {
throw new Error('insufficient funds')
}
if (e instanceof Prisma.PrismaClientKnownRequestError && e.code === 'P2002') {
throw new Error('you cannot withdraw to the same invoice twice')
}
console.error('performPayingAction failed', e)
throw e
} finally {
console.groupEnd()
}
}

View File

@ -1,7 +1,7 @@
import { GqlAuthorizationError } from '@/lib/error'
import { GraphQLError } from 'graphql'
export default function assertApiKeyNotPermitted ({ me }) {
if (me?.apiKey === true) {
throw new GqlAuthorizationError('this operation is not allowed to be performed via API Key')
throw new GraphQLError('this operation is not allowed to be performed via API Key', { extensions: { code: 'FORBIDDEN' } })
}
}

View File

@ -1,27 +1,37 @@
import { isServiceEnabled } from '@/lib/sndev'
import { cachedFetcher } from '@/lib/fetch'
import { getHeight } from 'ln-service'
import lndService from 'ln-service'
import lnd from '@/api/lnd'
const getBlockHeight = cachedFetcher(async function fetchBlockHeight ({ lnd }) {
const cache = new Map()
const expiresIn = 1000 * 30 // 30 seconds in milliseconds
async function fetchBlockHeight () {
let blockHeight = 0
try {
const { current_block_height: height } = await getHeight({ lnd })
return height
const height = await lndService.getHeight({ lnd })
blockHeight = height.current_block_height
} catch (err) {
console.error('getBlockHeight', err)
return 0
console.error('fetchBlockHeight', err)
}
}, {
maxSize: 1,
cacheExpiry: 60 * 1000, // 1 minute
forceRefreshThreshold: 0,
keyGenerator: () => 'getBlockHeight'
})
cache.set('block', { height: blockHeight, createdAt: Date.now() })
return blockHeight
}
async function getBlockHeight () {
if (cache.has('block')) {
const { height, createdAt } = cache.get('block')
const expired = createdAt + expiresIn < Date.now()
if (expired) fetchBlockHeight().catch(console.error) // update cache
return height // serve stale block height (this on the SSR critical path)
} else {
fetchBlockHeight().catch(console.error)
}
return 0
}
export default {
Query: {
blockHeight: async (parent, opts, { lnd }) => {
if (!isServiceEnabled('payments')) return 0
return await getBlockHeight({ lnd }) || 0
blockHeight: async (parent, opts, ctx) => {
return await getBlockHeight()
}
}
}

View File

@ -1,26 +1,36 @@
import { cachedFetcher } from '@/lib/fetch'
const cache = new Map()
const expiresIn = 1000 * 30 // 30 seconds in milliseconds
const getChainFeeRate = cachedFetcher(async function fetchChainFeeRate () {
async function fetchChainFeeRate () {
const url = 'https://mempool.space/api/v1/fees/recommended'
try {
const res = await fetch(url)
const body = await res.json()
return body.hourFee
} catch (err) {
console.error('fetchChainFee', err)
return 0
const chainFee = await fetch(url)
.then((res) => res.json())
.then((body) => body.hourFee)
.catch((err) => {
console.error('fetchChainFee', err)
return 0
})
cache.set('fee', { fee: chainFee, createdAt: Date.now() })
return chainFee
}
async function getChainFeeRate () {
if (cache.has('fee')) {
const { fee, createdAt } = cache.get('fee')
const expired = createdAt + expiresIn < Date.now()
if (expired) fetchChainFeeRate().catch(console.error) // update cache
return fee
} else {
fetchChainFeeRate().catch(console.error)
}
}, {
maxSize: 1,
cacheExpiry: 60 * 1000, // 1 minute
forceRefreshThreshold: 0, // never force refresh
keyGenerator: () => 'getChainFeeRate'
})
return 0
}
export default {
Query: {
chainFee: async (parent, opts, ctx) => {
return await getChainFeeRate() || 0
return await getChainFeeRate()
}
}
}

View File

@ -1,5 +1,26 @@
import { timeUnitForRange, whenRange } from '@/lib/time'
export function withClause (range) {
const unit = timeUnitForRange(range)
return `
WITH range_values AS (
SELECT date_trunc('${unit}', $1) as minval,
date_trunc('${unit}', $2) as maxval
),
times AS (
SELECT generate_series(minval, maxval, interval '1 ${unit}') as time
FROM range_values
)
`
}
export function intervalClause (range, table) {
const unit = timeUnitForRange(range)
return `date_trunc('${unit}', "${table}".created_at) >= date_trunc('${unit}', $1) AND date_trunc('${unit}', "${table}".created_at) <= date_trunc('${unit}', $2) `
}
export function viewIntervalClause (range, view) {
const unit = timeUnitForRange(range)
return `"${view}".t >= date_trunc('${unit}', timezone('America/Chicago', $1)) AND date_trunc('${unit}', "${view}".t) <= date_trunc('${unit}', timezone('America/Chicago', $2)) `
@ -21,11 +42,31 @@ export function viewGroup (range, view) {
${view}(
date_trunc('hour', timezone('America/Chicago', now())),
date_trunc('hour', timezone('America/Chicago', now())), '1 hour'::INTERVAL, 'hour')
WHERE "${view}".t >= date_trunc('hour', timezone('America/Chicago', $1))
AND "${view}".t <= date_trunc('hour', timezone('America/Chicago', $2)))
WHERE "${view}".t >= date_trunc('${unit}', timezone('America/Chicago', $1))
AND "${view}".t <= date_trunc('${unit}', timezone('America/Chicago', $2)))
) u`
}
export function subViewGroup (range) {
const unit = timeUnitForRange(range)
return `(
(SELECT *
FROM sub_stats_days
WHERE ${viewIntervalClause(range, 'sub_stats_days')})
UNION ALL
(SELECT *
FROM sub_stats_hours
WHERE ${viewIntervalClause(range, 'sub_stats_hours')}
${unit === 'hour' ? '' : 'AND "sub_stats_hours".t >= date_trunc(\'day\', timezone(\'America/Chicago\', now()))'})
UNION ALL
(SELECT * FROM
sub_stats(
date_trunc('hour', timezone('America/Chicago', now())),
date_trunc('hour', timezone('America/Chicago', now())), '1 hour'::INTERVAL, 'hour')
WHERE "sub_stats".t >= date_trunc('${unit}', timezone('America/Chicago', $1)))
)`
}
export default {
Query: {
registrationGrowth: async (parent, { when, from, to }, { models }) => {

25
api/resolvers/image.js Normal file
View File

@ -0,0 +1,25 @@
import { USER_ID, AWS_S3_URL_REGEXP } from '@/lib/constants'
import { msatsToSats } from '@/lib/format'
export default {
Query: {
imageFeesInfo: async (parent, { s3Keys }, { models, me }) => {
return imageFeesInfo(s3Keys, { models, me })
}
}
}
export function uploadIdsFromText (text, { models }) {
if (!text) return null
return [...text.matchAll(AWS_S3_URL_REGEXP)].map(m => Number(m[1]))
}
export async function imageFeesInfo (s3Keys, { models, me }) {
// returns info object in this format:
// { bytes24h: int, bytesUnpaid: int, nUnpaid: int, imageFeeMsats: BigInt }
const [info] = await models.$queryRawUnsafe('SELECT * FROM image_fees_info($1::INTEGER, $2::INTEGER[])', me ? me.id : USER_ID.anon, s3Keys)
const imageFee = msatsToSats(info.imageFeeMsats)
const totalFeesMsats = info.nUnpaid * Number(info.imageFeeMsats)
const totalFees = msatsToSats(totalFeesMsats)
return { ...info, imageFee, totalFees, totalFeesMsats }
}

View File

@ -16,10 +16,9 @@ import { GraphQLJSONObject as JSONObject } from 'graphql-type-json'
import admin from './admin'
import blockHeight from './blockHeight'
import chainFee from './chainFee'
import image from './image'
import { GraphQLScalarType, Kind } from 'graphql'
import { createIntScalar } from 'graphql-scalar'
import paidAction from './paidAction'
import vault from './vault'
const date = new GraphQLScalarType({
name: 'Date',
@ -55,5 +54,4 @@ const limit = createIntScalar({
})
export default [user, item, message, wallet, lnurl, notifications, invite, sub,
upload, search, growth, rewards, referrals, price, admin, blockHeight, chainFee,
{ JSONObject }, { Date: date }, { Limit: limit }, paidAction, vault]
upload, search, growth, rewards, referrals, price, admin, blockHeight, chainFee, image, { JSONObject }, { Date: date }, { Limit: limit }]

View File

@ -1,15 +1,15 @@
import { inviteSchema, validateSchema } from '@/lib/validate'
import { GraphQLError } from 'graphql'
import { inviteSchema, ssValidate } from '@/lib/validate'
import { msatsToSats } from '@/lib/format'
import assertApiKeyNotPermitted from './apiKey'
import { GqlAuthenticationError, GqlInputError } from '@/lib/error'
import { Prisma } from '@prisma/client'
export default {
Query: {
invites: async (parent, args, { me, models }) => {
if (!me) {
throw new GqlAuthenticationError()
throw new GraphQLError('you must be logged in', { extensions: { code: 'FORBIDDEN' } })
}
return await models.invite.findMany({
where: {
userId: me.id
@ -29,48 +29,27 @@ export default {
},
Mutation: {
createInvite: async (parent, { id, gift, limit, description }, { me, models }) => {
createInvite: async (parent, { gift, limit }, { me, models }) => {
if (!me) {
throw new GqlAuthenticationError()
throw new GraphQLError('you must be logged in', { extensions: { code: 'FORBIDDEN' } })
}
assertApiKeyNotPermitted({ me })
await validateSchema(inviteSchema, { id, gift, limit, description })
try {
return await models.invite.create({
data: {
id,
gift,
limit,
userId: me.id,
description
}
})
} catch (error) {
if (error instanceof Prisma.PrismaClientKnownRequestError) {
if (error.code === 'P2002' && error.meta.target.includes('id')) {
throw new GqlInputError('an invite with this code already exists')
}
}
throw error
}
await ssValidate(inviteSchema, { gift, limit })
return await models.invite.create({
data: { gift, limit, userId: me.id }
})
},
revokeInvite: async (parent, { id }, { me, models }) => {
if (!me) {
throw new GqlAuthenticationError()
throw new GraphQLError('you must be logged in', { extensions: { code: 'FORBIDDEN' } })
}
try {
return await models.invite.update({
where: { id, userId: me.id },
data: { revoked: true }
})
} catch (err) {
if (err.code === 'P2025') {
throw new GqlInputError('invite not found')
}
throw err
}
return await models.invite.update({
where: { id },
data: { revoked: true }
})
}
},
@ -83,10 +62,7 @@ export default {
},
poor: async (invite, args, { me, models }) => {
const user = await models.user.findUnique({ where: { id: invite.userId } })
return msatsToSats(user.msats) < invite.gift && msatsToSats(user.mcredits) < invite.gift
},
description: (invite, args, { me }) => {
return invite.userId === me?.id ? invite.description : undefined
return msatsToSats(user.msats) < invite.gift
}
}
}

File diff suppressed because it is too large Load Diff

View File

@ -1,8 +1,8 @@
import { randomBytes } from 'crypto'
import { bech32 } from 'bech32'
import { GraphQLError } from 'graphql'
import assertGofacYourself from './ofac'
import assertApiKeyNotPermitted from './apiKey'
import { GqlAuthenticationError } from '@/lib/error'
function encodedUrl (iurl, tag, k1) {
const url = new URL(iurl)
@ -35,7 +35,7 @@ export default {
await assertGofacYourself({ models, headers })
if (!me) {
throw new GqlAuthenticationError()
throw new GraphQLError('you must be logged in', { extensions: { code: 'UNAUTHENTICATED' } })
}
assertApiKeyNotPermitted({ me })

View File

@ -1,4 +1,4 @@
import { GqlInputError } from '@/lib/error'
import { GraphQLError } from 'graphql'
export default {
Query: {
@ -11,7 +11,7 @@ export default {
Mutation: {
createMessage: async (parent, { text }, { me, models }) => {
if (!text) {
throw new GqlInputError('must have text')
throw new GraphQLError('Must have text', { extensions: { code: 'BAD_INPUT' } })
}
return await models.message.create({

View File

@ -1,17 +1,17 @@
import { GraphQLError } from 'graphql'
import { decodeCursor, LIMIT, nextNoteCursorEncoded } from '@/lib/cursor'
import { getItem, filterClause, whereClause, muteClause, activeOrMine } from './item'
import { getItem, filterClause, whereClause, muteClause } from './item'
import { getInvoice, getWithdrawl } from './wallet'
import { pushSubscriptionSchema, validateSchema } from '@/lib/validate'
import { pushSubscriptionSchema, ssValidate } from '@/lib/validate'
import { replyToSubscription } from '@/lib/webPush'
import { getSub } from './sub'
import { GqlAuthenticationError, GqlInputError } from '@/lib/error'
export default {
Query: {
notifications: async (parent, { cursor, inc }, { me, models }) => {
const decodedCursor = decodeCursor(cursor)
if (!me) {
throw new GqlAuthenticationError()
throw new GraphQLError('you must be logged in', { extensions: { code: 'UNAUTHENTICATED' } })
}
const meFull = await models.user.findUnique({ where: { id: me.id } })
@ -167,8 +167,7 @@ export default {
${whereClause(
'"Item".created_at < $2',
await filterClause(me, models),
muteClause(me),
activeOrMine(me))}
muteClause(me))}
ORDER BY id ASC, CASE
WHEN type = 'Mention' THEN 1
WHEN type = 'Reply' THEN 2
@ -179,6 +178,17 @@ export default {
)`
)
queries.push(
`(SELECT "Item".id::text, "Item"."statusUpdatedAt" AS "sortTime", NULL as "earnedSats",
'JobChanged' AS type
FROM "Item"
WHERE "Item"."userId" = $1
AND "maxBid" IS NOT NULL
AND "statusUpdatedAt" < $2 AND "statusUpdatedAt" <> created_at
ORDER BY "sortTime" DESC
LIMIT ${LIMIT})`
)
// territory transfers
queries.push(
`(SELECT "TerritoryTransfer".id::text, "TerritoryTransfer"."created_at" AS "sortTime", NULL as "earnedSats",
@ -217,20 +227,13 @@ export default {
if (meFull.noteDeposits) {
queries.push(
`(SELECT "Invoice".id::text, "Invoice"."confirmedAt" AS "sortTime",
FLOOR("Invoice"."msatsReceived" / 1000) as "earnedSats",
`(SELECT "Invoice".id::text, "Invoice"."confirmedAt" AS "sortTime", FLOOR("msatsReceived" / 1000) as "earnedSats",
'InvoicePaid' AS type
FROM "Invoice"
WHERE "Invoice"."userId" = $1
AND "Invoice"."confirmedAt" IS NOT NULL
AND "Invoice"."created_at" < $2
AND (
("Invoice"."isHeld" IS NULL AND "Invoice"."actionType" IS NULL)
OR (
"Invoice"."actionType" = 'RECEIVE'
AND "Invoice"."actionState" = 'PAID'
)
)
AND "confirmedAt" IS NOT NULL
AND "isHeld" IS NULL
AND created_at < $2
ORDER BY "sortTime" DESC
LIMIT ${LIMIT})`
)
@ -238,17 +241,12 @@ export default {
if (meFull.noteWithdrawals) {
queries.push(
`(SELECT "Withdrawl".id::text, MAX(COALESCE("Invoice"."confirmedAt", "Withdrawl".created_at)) AS "sortTime",
FLOOR(MAX("Withdrawl"."msatsPaid" / 1000)) as "earnedSats",
`(SELECT "Withdrawl".id::text, "Withdrawl".created_at AS "sortTime", FLOOR("msatsPaid" / 1000) as "earnedSats",
'WithdrawlPaid' AS type
FROM "Withdrawl"
LEFT JOIN "InvoiceForward" ON "InvoiceForward"."withdrawlId" = "Withdrawl".id
LEFT JOIN "Invoice" ON "InvoiceForward"."invoiceId" = "Invoice".id
WHERE "Withdrawl"."userId" = $1
AND "Withdrawl".status = 'CONFIRMED'
AND "Withdrawl".created_at < $2
AND "InvoiceForward"."id" IS NULL
GROUP BY "Withdrawl".id
AND status = 'CONFIRMED'
AND created_at < $2
ORDER BY "sortTime" DESC
LIMIT ${LIMIT})`
)
@ -284,7 +282,6 @@ export default {
FROM "Earn"
WHERE "userId" = $1
AND created_at < $2
AND (type IS NULL OR type NOT IN ('FOREVER_REFERRAL', 'ONE_DAY_REFERRAL'))
GROUP BY "userId", created_at
ORDER BY "sortTime" DESC
LIMIT ${LIMIT})`
@ -300,17 +297,6 @@ export default {
ORDER BY "sortTime" DESC
LIMIT ${LIMIT})`
)
queries.push(
`(SELECT min(id)::text, created_at AS "sortTime", FLOOR(sum(msats) / 1000) as "earnedSats",
'ReferralReward' AS type
FROM "Earn"
WHERE "userId" = $1
AND created_at < $2
AND type IN ('FOREVER_REFERRAL', 'ONE_DAY_REFERRAL')
GROUP BY "userId", created_at
ORDER BY "sortTime" DESC
LIMIT ${LIMIT})`
)
}
if (meFull.noteCowboyHat) {
@ -344,23 +330,6 @@ export default {
LIMIT ${LIMIT})`
)
queries.push(
`(SELECT "Invoice".id::text, "Invoice"."updated_at" AS "sortTime", NULL as "earnedSats", 'Invoicification' AS type
FROM "Invoice"
WHERE "Invoice"."userId" = $1
AND "Invoice"."updated_at" < $2
AND "Invoice"."actionState" = 'FAILED'
AND (
"Invoice"."actionType" = 'ITEM_CREATE' OR
"Invoice"."actionType" = 'ZAP' OR
"Invoice"."actionType" = 'DOWN_ZAP' OR
"Invoice"."actionType" = 'POLL_VOTE' OR
"Invoice"."actionType" = 'BOOST'
)
ORDER BY "sortTime" DESC
LIMIT ${LIMIT})`
)
const notifications = await models.$queryRawUnsafe(
`SELECT id, "sortTime", "earnedSats", type,
"sortTime" AS "minSortTime"
@ -383,10 +352,10 @@ export default {
Mutation: {
savePushSubscription: async (parent, { endpoint, p256dh, auth, oldEndpoint }, { me, models }) => {
if (!me) {
throw new GqlAuthenticationError()
throw new GraphQLError('you must be logged in', { extensions: { code: 'UNAUTHENTICATED' } })
}
await validateSchema(pushSubscriptionSchema, { endpoint, p256dh, auth })
await ssValidate(pushSubscriptionSchema, { endpoint, p256dh, auth })
let dbPushSubscription
if (oldEndpoint) {
@ -407,12 +376,12 @@ export default {
},
deletePushSubscription: async (parent, { endpoint }, { me, models }) => {
if (!me) {
throw new GqlAuthenticationError()
throw new GraphQLError('you must be logged in', { extensions: { code: 'UNAUTHENTICATED' } })
}
const subscription = await models.pushSubscription.findFirst({ where: { endpoint, userId: Number(me.id) } })
if (!subscription) {
throw new GqlInputError('endpoint not found')
throw new GraphQLError('endpoint not found', { extensions: { code: 'BAD_INPUT' } })
}
const deletedSubscription = await models.pushSubscription.delete({ where: { id: subscription.id } })
console.log(`[webPush] deleted subscription ${deletedSubscription.id} of user ${deletedSubscription.userId} due to client request`)
@ -476,14 +445,6 @@ export default {
`
return res.length ? res[0].days : null
},
type: async (n, args, { models }) => {
const res = await models.$queryRaw`
SELECT "type"
FROM "Streak"
WHERE id = ${Number(n.id)}
`
return res.length ? res[0].type : null
}
},
Earn: {
@ -508,22 +469,6 @@ export default {
return null
}
},
ReferralReward: {
sources: async (n, args, { me, models }) => {
const [sources] = await models.$queryRawUnsafe(`
SELECT
COALESCE(FLOOR(sum(msats) FILTER(WHERE type = 'FOREVER_REFERRAL') / 1000), 0) AS forever,
COALESCE(FLOOR(sum(msats) FILTER(WHERE type = 'ONE_DAY_REFERRAL') / 1000), 0) AS "oneDay"
FROM "Earn"
WHERE "userId" = $1 AND created_at = $2
`, Number(me.id), new Date(n.sortTime))
if (sources.forever + sources.oneDay > 0) {
return sources
}
return null
}
},
Mention: {
mention: async (n, args, { models }) => true,
item: async (n, args, { models, me }) => getItem(n, { id: n.id }, { models, me })
@ -534,9 +479,6 @@ export default {
InvoicePaid: {
invoice: async (n, args, { me, models }) => getInvoice(n, { id: n.id }, { me, models })
},
Invoicification: {
invoice: async (n, args, { me, models }) => getInvoice(n, { id: n.id }, { me, models })
},
WithdrawlPaid: {
withdrawl: async (n, args, { me, models }) => getWithdrawl(n, { id: n.id }, { me, models })
},

View File

@ -1,11 +1,13 @@
import { GqlAuthorizationError } from '@/lib/error'
import { GraphQLError } from 'graphql'
// this function makes america more secure apparently
export default async function assertGofacYourself ({ models, headers, ip }) {
const country = await gOFACYourself({ models, headers, ip })
if (!country) return
throw new GqlAuthorizationError(`Your IP address is in ${country}. We cannot provide financial services to residents of ${country}.`)
throw new GraphQLError(
`Your IP address is in ${country}. We cannot provide financial services to residents of ${country}.`,
{ extensions: { code: 'FORBIDDEN' } })
}
export async function gOFACYourself ({ models, headers = {}, ip }) {

View File

@ -1,81 +0,0 @@
import { retryPaidAction } from '../paidAction'
import { USER_ID } from '@/lib/constants'
function paidActionType (actionType) {
switch (actionType) {
case 'ITEM_CREATE':
case 'ITEM_UPDATE':
return 'ItemPaidAction'
case 'ZAP':
case 'DOWN_ZAP':
case 'BOOST':
return 'ItemActPaidAction'
case 'TERRITORY_CREATE':
case 'TERRITORY_UPDATE':
case 'TERRITORY_BILLING':
case 'TERRITORY_UNARCHIVE':
return 'SubPaidAction'
case 'DONATE':
return 'DonatePaidAction'
case 'POLL_VOTE':
return 'PollVotePaidAction'
case 'RECEIVE':
return 'ReceivePaidAction'
case 'BUY_CREDITS':
return 'BuyCreditsPaidAction'
default:
throw new Error('Unknown action type')
}
}
export default {
Query: {
paidAction: async (parent, { invoiceId }, { models, me }) => {
const invoice = await models.invoice.findUnique({
where: {
id: invoiceId,
userId: me?.id ?? USER_ID.anon
}
})
if (!invoice) {
throw new Error('Invoice not found')
}
return {
type: paidActionType(invoice.actionType),
invoice,
result: invoice.actionResult,
paymentMethod: invoice.actionOptimistic ? 'OPTIMISTIC' : 'PESSIMISTIC'
}
}
},
Mutation: {
retryPaidAction: async (parent, { invoiceId }, { models, me, lnd }) => {
if (!me) {
throw new Error('You must be logged in')
}
const invoice = await models.invoice.findUnique({ where: { id: invoiceId, userId: me.id } })
if (!invoice) {
throw new Error('Invoice not found')
}
if (invoice.actionState !== 'FAILED') {
if (invoice.actionState === 'PAID') {
throw new Error('Invoice is already paid')
}
throw new Error(`Invoice is not in failed state: ${invoice.actionState}`)
}
const result = await retryPaidAction(invoice.actionType, { invoice }, { models, me, lnd })
return {
...result,
type: paidActionType(invoice.actionType)
}
}
},
PaidAction: {
__resolveType: obj => obj.type
}
}

View File

@ -1,27 +1,36 @@
import { SUPPORTED_CURRENCIES } from '@/lib/currency'
import { cachedFetcher } from '@/lib/fetch'
const cache = new Map()
const expiresIn = 30000 // in milliseconds
const getPrice = cachedFetcher(async function fetchPrice (fiat = 'USD') {
async function fetchPrice (fiat) {
const url = `https://api.coinbase.com/v2/prices/BTC-${fiat}/spot`
try {
const res = await fetch(url)
const body = await res.json()
return parseFloat(body.data.amount)
} catch (err) {
console.error(err)
return -1
const price = await fetch(url)
.then((res) => res.json())
.then((body) => parseFloat(body.data.amount))
.catch((err) => {
console.error(err)
return -1
})
cache.set(fiat, { price, createdAt: Date.now() })
return price
}
async function getPrice (fiat) {
fiat ??= 'USD'
if (cache.has(fiat)) {
const { price, createdAt } = cache.get(fiat)
const expired = createdAt + expiresIn < Date.now()
if (expired) fetchPrice(fiat).catch(console.error) // update cache
return price // serve stale price (this on the SSR critical path)
} else {
fetchPrice(fiat).catch(console.error)
}
}, {
maxSize: SUPPORTED_CURRENCIES.length,
cacheExpiry: 60 * 1000, // 1 minute
forceRefreshThreshold: 0, // never force refresh
keyGenerator: (fiat = 'USD') => fiat
})
return null
}
export default {
Query: {
price: async (parent, { fiatCurrency }, ctx) => {
return await getPrice(fiatCurrency) || -1
return await getPrice(fiatCurrency)
}
}
}

View File

@ -1,28 +1,56 @@
import { GraphQLError } from 'graphql'
import { withClause, intervalClause } from './growth'
import { timeUnitForRange, whenRange } from '@/lib/time'
import { viewGroup } from './growth'
import { GqlAuthenticationError } from '@/lib/error'
export default {
Query: {
referrals: async (parent, { when, from, to }, { models, me }) => {
if (!me) {
throw new GqlAuthenticationError()
throw new GraphQLError('you must be logged in', { extensions: { code: 'UNAUTHENTICATED' } })
}
const range = whenRange(when, from, to)
return await models.$queryRawUnsafe(`
SELECT date_trunc('${timeUnitForRange(range)}', t) at time zone 'America/Chicago' as time,
json_build_array(
json_build_object('name', 'referrals', 'value', COALESCE(SUM(referrals), 0)),
json_build_object('name', 'one day referrals', 'value', COALESCE(SUM(one_day_referrals), 0)),
json_build_object('name', 'referral sats', 'value', FLOOR(COALESCE(SUM(msats_referrals), 0) / 1000.0)),
json_build_object('name', 'one day referral sats', 'value', FLOOR(COALESCE(SUM(msats_one_day_referrals), 0) / 1000.0))
const [{ totalSats }] = await models.$queryRawUnsafe(`
SELECT COALESCE(FLOOR(sum(msats) / 1000), 0) as "totalSats"
FROM "ReferralAct"
WHERE ${intervalClause(range, 'ReferralAct')}
AND "ReferralAct"."referrerId" = $3
`, ...range, Number(me.id))
const [{ totalReferrals }] = await models.$queryRawUnsafe(`
SELECT count(*)::INTEGER as "totalReferrals"
FROM users
WHERE ${intervalClause(range, 'users')}
AND "referrerId" = $3
`, ...range, Number(me.id))
const stats = await models.$queryRawUnsafe(
`${withClause(range)}
SELECT time, json_build_array(
json_build_object('name', 'referrals', 'value', count(*) FILTER (WHERE act = 'REFERREE')),
json_build_object('name', 'sats', 'value', FLOOR(COALESCE(sum(msats) FILTER (WHERE act IN ('BOOST', 'STREAM', 'FEE')), 0)))
) AS data
FROM ${viewGroup(range, 'user_stats')}
WHERE id = ${me.id}
GROUP BY time
ORDER BY time ASC`, ...range)
FROM times
LEFT JOIN
((SELECT "ReferralAct".created_at, "ReferralAct".msats / 1000.0 as msats, "ItemAct".act::text as act
FROM "ReferralAct"
JOIN "ItemAct" ON "ItemAct".id = "ReferralAct"."itemActId"
WHERE ${intervalClause(range, 'ReferralAct')}
AND "ReferralAct"."referrerId" = $3)
UNION ALL
(SELECT created_at, 0.0 as sats, 'REFERREE' as act
FROM users
WHERE ${intervalClause(range, 'users')}
AND "referrerId" = $3)) u ON time = date_trunc('${timeUnitForRange(range)}', u.created_at)
GROUP BY time
ORDER BY time ASC`, ...range, Number(me.id))
return {
totalSats,
totalReferrals,
stats
}
}
}
}

View File

@ -1,8 +1,9 @@
import { amountSchema, validateSchema } from '@/lib/validate'
import { getAd, getItem } from './item'
import { GraphQLError } from 'graphql'
import { amountSchema, ssValidate } from '@/lib/validate'
import serialize from './serial'
import { USER_ID } from '@/lib/constants'
import { getItem } from './item'
import { topUsers } from './user'
import performPaidAction from '../paidAction'
import { GqlInputError } from '@/lib/error'
let rewardCache
@ -63,21 +64,21 @@ async function getMonthlyRewards (when, models) {
async function getRewards (when, models) {
if (when) {
if (when.length > 1) {
throw new GqlInputError('too many dates')
throw new GraphQLError('too many dates', { extensions: { code: 'BAD_USER_INPUT' } })
}
when.forEach(w => {
if (isNaN(new Date(w))) {
throw new GqlInputError('invalid date')
throw new GraphQLError('invalid date', { extensions: { code: 'BAD_USER_INPUT' } })
}
})
if (new Date(when[0]) > new Date(when[when.length - 1])) {
throw new GqlInputError('bad date range')
throw new GraphQLError('bad date range', { extensions: { code: 'BAD_USER_INPUT' } })
}
if (new Date(when[0]).getTime() > new Date('2024-03-01').getTime() && new Date(when[0]).getTime() < new Date('2024-05-02').getTime()) {
// after 3/1/2024 and until 5/1/2024, we reward monthly on the 1st
if (new Date(when[0]).getUTCDate() !== 1) {
throw new GqlInputError('bad reward date')
throw new GraphQLError('invalid reward date', { extensions: { code: 'BAD_USER_INPUT' } })
}
return await getMonthlyRewards(when, models)
@ -119,11 +120,11 @@ export default {
}
if (!when || when.length > 2) {
throw new GqlInputError('bad date range')
throw new GraphQLError('invalid date range', { extensions: { code: 'BAD_USER_INPUT' } })
}
for (const w of when) {
if (isNaN(new Date(w))) {
throw new GqlInputError('invalid date')
throw new GraphQLError('invalid date', { extensions: { code: 'BAD_USER_INPUT' } })
}
}
@ -141,7 +142,6 @@ export default {
(SELECT FLOOR("Earn".msats / 1000.0) as sats, type, rank, "typeId"
FROM "Earn"
WHERE "Earn"."userId" = ${me.id}
AND (type IS NULL OR type NOT IN ('FOREVER_REFERRAL', 'ONE_DAY_REFERRAL'))
AND date_trunc('day', "Earn".created_at AT TIME ZONE 'UTC' AT TIME ZONE 'America/Chicago') = days_cte.day
ORDER BY "Earn".msats DESC)
) "Earn"
@ -157,23 +157,19 @@ export default {
const [{ to, from }] = await models.$queryRaw`
SELECT date_trunc('day', (now() AT TIME ZONE 'America/Chicago')) AT TIME ZONE 'America/Chicago' as from,
(date_trunc('day', (now() AT TIME ZONE 'America/Chicago')) AT TIME ZONE 'America/Chicago') + interval '1 day - 1 second' as to`
return await topUsers(parent, { when: 'custom', to: new Date(to).getTime().toString(), from: new Date(from).getTime().toString(), limit: 500 }, { models, ...context })
},
total: async (parent, args, { models }) => {
if (!parent.total) {
return 0
}
return parent.total
},
ad: async (parent, args, { me, models }) => {
return await getAd(parent, { }, { me, models })
return await topUsers(parent, { when: 'custom', to: new Date(to).getTime().toString(), from: new Date(from).getTime().toString(), limit: 100 }, { models, ...context })
}
},
Mutation: {
donateToRewards: async (parent, { sats }, { me, models, lnd }) => {
await validateSchema(amountSchema, { amount: sats })
donateToRewards: async (parent, { sats, hash, hmac }, { me, models, lnd }) => {
await ssValidate(amountSchema, { amount: sats })
return await performPaidAction('DONATE', { sats }, { me, models, lnd })
await serialize(
models.$queryRaw`SELECT donate(${sats}::INTEGER, ${me?.id || USER_ID.anon}::INTEGER)`,
{ models, lnd, me, hash, hmac, fee: sats, verifyPayment: !!hash || !me }
)
return sats
}
},
Reward: {

141
api/resolvers/serial.js Normal file
View File

@ -0,0 +1,141 @@
import { GraphQLError } from 'graphql'
import { timingSafeEqual } from 'crypto'
import retry from 'async-retry'
import Prisma from '@prisma/client'
import { settleHodlInvoice } from 'ln-service'
import { createHmac } from './wallet'
import { msatsToSats, numWithUnits } from '@/lib/format'
import { BALANCE_LIMIT_MSATS } from '@/lib/constants'
export default async function serialize (trx, { models, lnd, me, hash, hmac, fee, verifyPayment: verify }) {
// wrap first argument in array if not array already
const isArray = Array.isArray(trx)
if (!isArray) trx = [trx]
// conditional queries can be added inline using && syntax
// we filter any falsy value out here
trx = trx.filter(q => !!q)
let invoice
if (verify) {
invoice = await verifyPayment(models, hash, hmac, fee)
trx = [
models.$executeRaw`SELECT confirm_invoice(${hash}, ${invoice.msatsReceived})`,
...trx
]
}
let results = await retry(async bail => {
try {
const [, ...results] = await models.$transaction(
[models.$executeRaw`SELECT ASSERT_SERIALIZED()`, ...trx],
{ isolationLevel: Prisma.TransactionIsolationLevel.Serializable })
return results
} catch (error) {
console.log(error)
// two cases where we get insufficient funds:
// 1. plpgsql function raises
// 2. constraint violation via a prisma call
// XXX prisma does not provide a way to distinguish these cases so we
// have to check the error message
if (error.message.includes('SN_INSUFFICIENT_FUNDS') ||
error.message.includes('\\"users\\" violates check constraint \\"msats_positive\\"')) {
bail(new GraphQLError('insufficient funds', { extensions: { code: 'BAD_INPUT' } }))
}
if (error.message.includes('SN_NOT_SERIALIZABLE')) {
bail(new Error('wallet balance transaction is not serializable'))
}
if (error.message.includes('SN_CONFIRMED_WITHDRAWL_EXISTS')) {
bail(new Error('withdrawal invoice already confirmed (to withdraw again create a new invoice)'))
}
if (error.message.includes('SN_PENDING_WITHDRAWL_EXISTS')) {
bail(new Error('withdrawal invoice exists and is pending'))
}
if (error.message.includes('SN_INELIGIBLE')) {
bail(new Error('user ineligible for gift'))
}
if (error.message.includes('SN_UNSUPPORTED')) {
bail(new Error('unsupported action'))
}
if (error.message.includes('SN_DUPLICATE')) {
bail(new Error('duplicate not allowed'))
}
if (error.message.includes('SN_REVOKED_OR_EXHAUSTED')) {
bail(new Error('faucet has been revoked or is exhausted'))
}
if (error.message.includes('SN_INV_PENDING_LIMIT')) {
bail(new Error('too many pending invoices'))
}
if (error.message.includes('SN_INV_EXCEED_BALANCE')) {
bail(new Error(`pending invoices and withdrawals must not cause balance to exceed ${numWithUnits(msatsToSats(BALANCE_LIMIT_MSATS))}`))
}
if (error.message.includes('40001') || error.code === 'P2034') {
throw new Error('wallet balance serialization failure - try again')
}
if (error.message.includes('23514') || ['P2002', 'P2003', 'P2004'].includes(error.code)) {
bail(new Error('constraint failure'))
}
bail(error)
}
}, {
minTimeout: 10,
maxTimeout: 100,
retries: 10
})
if (hash) {
if (invoice?.isHeld) {
await settleHodlInvoice({ secret: invoice.preimage, lnd })
}
// remove first element since that is the confirmed invoice
results = results.slice(1)
}
// if first argument was not an array, unwrap the result
return isArray ? results : results[0]
}
async function verifyPayment (models, hash, hmac, fee) {
if (!hash) {
throw new GraphQLError('hash required', { extensions: { code: 'BAD_INPUT' } })
}
if (!hmac) {
throw new GraphQLError('hmac required', { extensions: { code: 'BAD_INPUT' } })
}
const hmac2 = createHmac(hash)
if (!timingSafeEqual(Buffer.from(hmac), Buffer.from(hmac2))) {
throw new GraphQLError('bad hmac', { extensions: { code: 'FORBIDDEN' } })
}
const invoice = await models.invoice.findUnique({
where: { hash },
include: {
user: true
}
})
if (!invoice) {
throw new GraphQLError('invoice not found', { extensions: { code: 'BAD_INPUT' } })
}
const expired = new Date(invoice.expiresAt) <= new Date()
if (expired) {
throw new GraphQLError('invoice expired', { extensions: { code: 'BAD_INPUT' } })
}
if (invoice.confirmedAt) {
throw new GraphQLError('invoice already used', { extensions: { code: 'BAD_INPUT' } })
}
if (invoice.cancelled) {
throw new GraphQLError('invoice was canceled', { extensions: { code: 'BAD_INPUT' } })
}
if (!invoice.msatsReceived) {
throw new GraphQLError('invoice was not paid', { extensions: { code: 'BAD_INPUT' } })
}
if (fee && msatsToSats(invoice.msatsReceived) < fee) {
throw new GraphQLError('invoice amount too low', { extensions: { code: 'BAD_INPUT' } })
}
return invoice
}

View File

@ -1,10 +1,66 @@
import { whenRange } from '@/lib/time'
import { validateSchema, territorySchema } from '@/lib/validate'
import { GraphQLError } from 'graphql'
import serialize from './serial'
import { TERRITORY_COST_MONTHLY, TERRITORY_COST_ONCE, TERRITORY_COST_YEARLY, TERRITORY_PERIOD_COST } from '@/lib/constants'
import { datePivot, whenRange } from '@/lib/time'
import { ssValidate, territorySchema } from '@/lib/validate'
import { nextBilling, proratedBillingCost } from '@/lib/territory'
import { decodeCursor, LIMIT, nextCursorEncoded } from '@/lib/cursor'
import { viewGroup } from './growth'
import { subViewGroup } from './growth'
import { notifyTerritoryTransfer } from '@/lib/webPush'
import performPaidAction from '../paidAction'
import { GqlAuthenticationError, GqlInputError } from '@/lib/error'
export function paySubQueries (sub, models) {
if (sub.billingType === 'ONCE') {
return []
}
// if in active or grace, consider we are billing them from where they are paid up
// and use grandfathered cost
let billedLastAt = sub.billPaidUntil
let billingCost = sub.billingCost
// if the sub is archived, they are paying to reactivate it
if (sub.status === 'STOPPED') {
// get non-grandfathered cost and reset their billing to start now
billedLastAt = new Date()
billingCost = TERRITORY_PERIOD_COST(sub.billingType)
}
const billPaidUntil = nextBilling(billedLastAt, sub.billingType)
const cost = BigInt(billingCost) * BigInt(1000)
return [
models.user.update({
where: {
id: sub.userId
},
data: {
msats: {
decrement: cost
}
}
}),
// update 'em
models.sub.update({
where: {
name: sub.name
},
data: {
billedLastAt,
billPaidUntil,
billingCost,
status: 'ACTIVE'
}
}),
// record 'em
models.subAct.create({
data: {
userId: sub.userId,
subName: sub.name,
msats: cost,
type: 'BILLING'
}
})
]
}
export async function getSub (parent, { name }, { models, me }) {
if (!name) return null
@ -94,8 +150,8 @@ export default {
COALESCE(floor(sum(msats_spent)/1000), 0) as spent,
COALESCE(sum(posts), 0) as nposts,
COALESCE(sum(comments), 0) as ncomments
FROM ${viewGroup(range, 'sub_stats')}
JOIN "Sub" on "Sub".name = u.sub_name
FROM ${subViewGroup(range)} ss
JOIN "Sub" on "Sub".name = ss.sub_name
GROUP BY "Sub".name
ORDER BY ${column} DESC NULLS LAST, "Sub".created_at ASC
OFFSET $3
@ -108,12 +164,12 @@ export default {
},
userSubs: async (_parent, { name, cursor, when, by, from, to, limit = LIMIT }, { models }) => {
if (!name) {
throw new GqlInputError('must supply user name')
throw new GraphQLError('must supply user name', { extensions: { code: 'BAD_INPUT' } })
}
const user = await models.user.findUnique({ where: { name } })
if (!user) {
throw new GqlInputError('no user has that name')
throw new GraphQLError('no user has that name', { extensions: { code: 'BAD_INPUT' } })
}
const decodedCursor = decodeCursor(cursor)
@ -136,8 +192,8 @@ export default {
COALESCE(floor(sum(msats_spent)/1000), 0) as spent,
COALESCE(sum(posts), 0) as nposts,
COALESCE(sum(comments), 0) as ncomments
FROM ${viewGroup(range, 'sub_stats')}
JOIN "Sub" on "Sub".name = u.sub_name
FROM ${subViewGroup(range)} ss
JOIN "Sub" on "Sub".name = ss.sub_name
WHERE "Sub"."userId" = $3
AND "Sub".status = 'ACTIVE'
GROUP BY "Sub".name
@ -152,20 +208,20 @@ export default {
}
},
Mutation: {
upsertSub: async (parent, { ...data }, { me, models, lnd }) => {
upsertSub: async (parent, { hash, hmac, ...data }, { me, models, lnd }) => {
if (!me) {
throw new GqlAuthenticationError()
throw new GraphQLError('you must be logged in', { extensions: { code: 'UNAUTHENTICATED' } })
}
await validateSchema(territorySchema, data, { models, me, sub: { name: data.oldName } })
await ssValidate(territorySchema, data, { models, me, sub: { name: data.oldName } })
if (data.oldName) {
return await updateSub(parent, data, { me, models, lnd })
return await updateSub(parent, data, { me, models, lnd, hash, hmac })
} else {
return await createSub(parent, data, { me, models, lnd })
return await createSub(parent, data, { me, models, lnd, hash, hmac })
}
},
paySub: async (parent, { name }, { me, models, lnd }) => {
paySub: async (parent, { name, hash, hmac }, { me, models, lnd }) => {
// check that they own the sub
const sub = await models.sub.findUnique({
where: {
@ -174,22 +230,30 @@ export default {
})
if (!sub) {
throw new GqlInputError('sub not found')
throw new GraphQLError('sub not found', { extensions: { code: 'BAD_INPUT' } })
}
if (sub.userId !== me.id) {
throw new GqlInputError('you do not own this sub')
throw new GraphQLError('you do not own this sub', { extensions: { code: 'BAD_INPUT' } })
}
if (sub.status === 'ACTIVE') {
return sub
}
return await performPaidAction('TERRITORY_BILLING', { name }, { me, models, lnd })
const queries = paySubQueries(sub, models)
if (queries.length === 0) {
return sub
}
const results = await serialize(
queries,
{ models, lnd, me, hash, hmac, fee: sub.billingCost, verifyPayment: !!hash || !me })
return results[1]
},
toggleMuteSub: async (parent, { name }, { me, models }) => {
if (!me) {
throw new GqlAuthenticationError()
throw new GraphQLError('you must be logged in', { extensions: { code: 'UNAUTHENTICATED' } })
}
const lookupData = { userId: Number(me.id), subName: name }
@ -205,7 +269,7 @@ export default {
},
toggleSubSubscription: async (sub, { name }, { me, models }) => {
if (!me) {
throw new GqlAuthenticationError()
throw new GraphQLError('you must be logged in', { extensions: { code: 'UNAUTHENTICATED' } })
}
const lookupData = { userId: me.id, subName: name }
@ -221,7 +285,7 @@ export default {
},
transferTerritory: async (parent, { subName, userName }, { me, models }) => {
if (!me) {
throw new GqlAuthenticationError()
throw new GraphQLError('you must be logged in', { extensions: { code: 'UNAUTHENTICATED' } })
}
const sub = await models.sub.findUnique({
@ -230,18 +294,18 @@ export default {
}
})
if (!sub) {
throw new GqlInputError('sub not found')
throw new GraphQLError('sub not found', { extensions: { code: 'BAD_INPUT' } })
}
if (sub.userId !== me.id) {
throw new GqlInputError('you do not own this sub')
throw new GraphQLError('you do not own this sub', { extensions: { code: 'BAD_INPUT' } })
}
const user = await models.user.findFirst({ where: { name: userName } })
if (!user) {
throw new GqlInputError('user not found')
throw new GraphQLError('user not found', { extensions: { code: 'BAD_INPUT' } })
}
if (user.id === me.id) {
throw new GqlInputError('cannot transfer territory to yourself')
throw new GraphQLError('cannot transfer territory to yourself', { extensions: { code: 'BAD_INPUT' } })
}
const [, updatedSub] = await models.$transaction([
@ -253,30 +317,60 @@ export default {
return updatedSub
},
unarchiveTerritory: async (parent, { ...data }, { me, models, lnd }) => {
unarchiveTerritory: async (parent, { hash, hmac, ...data }, { me, models, lnd }) => {
if (!me) {
throw new GqlAuthenticationError()
throw new GraphQLError('you must be logged in', { extensions: { code: 'UNAUTHENTICATED' } })
}
const { name } = data
await validateSchema(territorySchema, data, { models, me })
await ssValidate(territorySchema, data, { models, me, sub: { name } })
const oldSub = await models.sub.findUnique({ where: { name } })
if (!oldSub) {
throw new GqlInputError('sub not found')
throw new GraphQLError('sub not found', { extensions: { code: 'BAD_INPUT' } })
}
if (oldSub.status !== 'STOPPED') {
throw new GqlInputError('sub is not archived')
throw new GraphQLError('sub is not archived', { extensions: { code: 'BAD_INPUT' } })
}
if (oldSub.billingType === 'ONCE') {
// sanity check. this should never happen but leaving this comment here
// to stop error propagation just in case and document that this should never happen.
// #defensivecode
throw new GqlInputError('sub should not be archived')
throw new GraphQLError('sub should not be archived', { extensions: { code: 'BAD_INPUT' } })
}
return await performPaidAction('TERRITORY_UNARCHIVE', data, { me, models, lnd })
const billingCost = TERRITORY_PERIOD_COST(data.billingType)
const billPaidUntil = nextBilling(new Date(), data.billingType)
const cost = BigInt(1000) * BigInt(billingCost)
const newSub = { ...data, billPaidUntil, billingCost, userId: me.id, status: 'ACTIVE' }
const isTransfer = oldSub.userId !== me.id
await serialize([
models.user.update({
where: {
id: me.id
},
data: {
msats: {
decrement: cost
}
}
}),
models.subAct.create({
data: {
subName: name,
userId: me.id,
msats: cost,
type: 'BILLING'
}
}),
models.sub.update({ where: { name }, data: newSub }),
isTransfer && models.territoryTransfer.create({ data: { subName: name, oldUserId: oldSub.userId, newUserId: me.id } })
],
{ models, lnd, hash, me, hmac, fee: billingCost, verifyPayment: !!hash || !me })
if (isTransfer) notifyTerritoryTransfer({ models, sub: newSub, to: me })
}
},
Sub: {
@ -314,18 +408,74 @@ export default {
}
}
async function createSub (parent, data, { me, models, lnd }) {
async function createSub (parent, data, { me, models, lnd, hash, hmac }) {
const { billingType } = data
let billingCost = TERRITORY_COST_MONTHLY
const billedLastAt = new Date()
let billPaidUntil = datePivot(billedLastAt, { months: 1 })
if (billingType === 'ONCE') {
billingCost = TERRITORY_COST_ONCE
billPaidUntil = null
} else if (billingType === 'YEARLY') {
billingCost = TERRITORY_COST_YEARLY
billPaidUntil = datePivot(billedLastAt, { years: 1 })
}
const cost = BigInt(1000) * BigInt(billingCost)
try {
return await performPaidAction('TERRITORY_CREATE', data, { me, models, lnd })
const results = await serialize([
// bill 'em
models.user.update({
where: {
id: me.id
},
data: {
msats: {
decrement: cost
}
}
}),
// create 'em
models.sub.create({
data: {
...data,
billedLastAt,
billPaidUntil,
billingCost,
rankingType: 'WOT',
userId: me.id
}
}),
// record 'em
models.subAct.create({
data: {
userId: me.id,
subName: data.name,
msats: cost,
type: 'BILLING'
}
}),
// notify 'em (in the future)
models.subSubscription.create({
data: {
userId: me.id,
subName: data.name
}
})
], { models, lnd, me, hash, hmac, fee: billingCost, verifyPayment: !!hash || !me })
return results[1]
} catch (error) {
if (error.code === 'P2002') {
throw new GqlInputError('name taken')
throw new GraphQLError('name taken', { extensions: { code: 'BAD_INPUT' } })
}
throw error
}
}
async function updateSub (parent, { oldName, ...data }, { me, models, lnd }) {
async function updateSub (parent, { oldName, ...data }, { me, models, lnd, hash, hmac }) {
const oldSub = await models.sub.findUnique({
where: {
name: oldName,
@ -339,14 +489,78 @@ async function updateSub (parent, { oldName, ...data }, { me, models, lnd }) {
})
if (!oldSub) {
throw new GqlInputError('sub not found')
throw new GraphQLError('sub not found', { extensions: { code: 'BAD_INPUT' } })
}
try {
return await performPaidAction('TERRITORY_UPDATE', { oldName, ...data }, { me, models, lnd })
// if the cost is changing, record the new cost and update billing job
if (oldSub.billingType !== data.billingType) {
// make sure the current cost is recorded so they are grandfathered in
data.billingCost = TERRITORY_PERIOD_COST(data.billingType)
// we never want to bill them again if they are changing to ONCE
if (data.billingType === 'ONCE') {
data.billPaidUntil = null
data.billingAutoRenew = false
}
// if they are changing to YEARLY, bill them in a year
// if they are changing to MONTHLY from YEARLY, do nothing
if (oldSub.billingType === 'MONTHLY' && data.billingType === 'YEARLY') {
data.billPaidUntil = datePivot(new Date(oldSub.billedLastAt), { years: 1 })
}
// if this billing change makes their bill paid up, set them to active
if (data.billPaidUntil === null || data.billPaidUntil >= new Date()) {
data.status = 'ACTIVE'
}
// if the billing type is changing such that it's more expensive, bill 'em the difference
const proratedCost = proratedBillingCost(oldSub, data.billingType)
if (proratedCost > 0) {
const cost = BigInt(1000) * BigInt(proratedCost)
const results = await serialize([
models.user.update({
where: {
id: me.id
},
data: {
msats: {
decrement: cost
}
}
}),
models.subAct.create({
data: {
userId: me.id,
subName: oldName,
msats: cost,
type: 'BILLING'
}
}),
models.sub.update({
data,
where: {
name: oldName,
userId: me.id
}
})
], { models, lnd, me, hash, hmac, fee: proratedCost, verifyPayment: !!hash || !me })
return results[2]
}
}
// if we get here they are changin in a way that doesn't cost them anything
return await models.sub.update({
data,
where: {
name: oldName,
userId: me.id
}
})
} catch (error) {
if (error.code === 'P2002') {
throw new GqlInputError('name taken')
throw new GraphQLError('name taken', { extensions: { code: 'BAD_INPUT' } })
}
throw error
}

View File

@ -1,40 +1,27 @@
import { USER_ID, IMAGE_PIXELS_MAX, UPLOAD_SIZE_MAX, UPLOAD_SIZE_MAX_AVATAR, UPLOAD_TYPES_ALLOW, AWS_S3_URL_REGEXP, AVATAR_TYPES_ALLOW } from '@/lib/constants'
import { GraphQLError } from 'graphql'
import { USER_ID, IMAGE_PIXELS_MAX, UPLOAD_SIZE_MAX, UPLOAD_SIZE_MAX_AVATAR, UPLOAD_TYPES_ALLOW } from '@/lib/constants'
import { createPresignedPost } from '@/api/s3'
import { GqlAuthenticationError, GqlInputError } from '@/lib/error'
import { msatsToSats } from '@/lib/format'
export default {
Query: {
uploadFees: async (parent, { s3Keys }, { models, me }) => {
return uploadFees(s3Keys, { models, me })
}
},
Mutation: {
getSignedPOST: async (parent, { type, size, width, height, avatar }, { models, me }) => {
if (UPLOAD_TYPES_ALLOW.indexOf(type) === -1) {
throw new GqlInputError(`upload must be ${UPLOAD_TYPES_ALLOW.map(t => t.replace(/^(image|video)\//, '')).join(', ')}`)
throw new GraphQLError(`image must be ${UPLOAD_TYPES_ALLOW.map(t => t.replace('image/', '')).join(', ')}`, { extensions: { code: 'BAD_INPUT' } })
}
if (size > UPLOAD_SIZE_MAX) {
throw new GqlInputError(`upload must be less than ${UPLOAD_SIZE_MAX / (1024 ** 2)} megabytes`)
throw new GraphQLError(`image must be less than ${UPLOAD_SIZE_MAX / (1024 ** 2)} megabytes`, { extensions: { code: 'BAD_INPUT' } })
}
if (avatar) {
if (AVATAR_TYPES_ALLOW.indexOf(type) === -1) {
throw new GqlInputError(`avatar must be ${AVATAR_TYPES_ALLOW.map(t => t.replace('image/', '')).join(', ')}`)
}
if (size > UPLOAD_SIZE_MAX_AVATAR) {
throw new GqlInputError(`avatar must be less than ${UPLOAD_SIZE_MAX_AVATAR / (1024 ** 2)} megabytes`)
}
if (avatar && size > UPLOAD_SIZE_MAX_AVATAR) {
throw new GraphQLError(`image must be less than ${UPLOAD_SIZE_MAX_AVATAR / (1024 ** 2)} megabytes`, { extensions: { code: 'BAD_INPUT' } })
}
// width and height is 0 for videos
if (width * height > IMAGE_PIXELS_MAX) {
throw new GqlInputError(`image must be less than ${IMAGE_PIXELS_MAX} pixels`)
throw new GraphQLError(`image must be less than ${IMAGE_PIXELS_MAX} pixels`, { extensions: { code: 'BAD_INPUT' } })
}
const fileParams = {
const imgParams = {
type,
size,
width,
@ -44,27 +31,12 @@ export default {
}
if (avatar) {
if (!me) throw new GqlAuthenticationError()
fileParams.paid = undefined
if (!me) throw new GraphQLError('you must be logged in', { extensions: { code: 'FORBIDDEN' } })
imgParams.paid = undefined
}
const upload = await models.upload.create({ data: { ...fileParams } })
const upload = await models.upload.create({ data: { ...imgParams } })
return createPresignedPost({ key: String(upload.id), type, size })
}
}
}
export function uploadIdsFromText (text, { models }) {
if (!text) return []
return [...new Set([...text.matchAll(AWS_S3_URL_REGEXP)].map(m => Number(m[1])))]
}
export async function uploadFees (s3Keys, { models, me }) {
// returns info object in this format:
// { bytes24h: int, bytesUnpaid: int, nUnpaid: int, uploadFeesMsats: BigInt }
const [info] = await models.$queryRawUnsafe('SELECT * FROM upload_fees($1::INTEGER, $2::INTEGER[])', me ? me.id : USER_ID.anon, s3Keys)
const uploadFees = msatsToSats(info.uploadFeesMsats)
const totalFeesMsats = info.nUnpaid * Number(info.uploadFeesMsats)
const totalFees = msatsToSats(totalFeesMsats)
return { ...info, uploadFees, totalFees, totalFeesMsats }
}

View File

@ -1,16 +1,16 @@
import { readFile } from 'fs/promises'
import { join, resolve } from 'path'
import { GraphQLError } from 'graphql'
import { decodeCursor, LIMIT, nextCursorEncoded } from '@/lib/cursor'
import { msatsToSats } from '@/lib/format'
import { bioSchema, emailSchema, settingsSchema, validateSchema, userSchema } from '@/lib/validate'
import { getItem, updateItem, filterClause, createItem, whereClause, muteClause, activeOrMine } from './item'
import { USER_ID, RESERVED_MAX_USER_ID, SN_NO_REWARDS_IDS, INVOICE_ACTION_NOTIFICATION_TYPES } from '@/lib/constants'
import { bioSchema, emailSchema, settingsSchema, ssValidate, userSchema } from '@/lib/validate'
import { getItem, updateItem, filterClause, createItem, whereClause, muteClause } from './item'
import { USER_ID, RESERVED_MAX_USER_ID, SN_NO_REWARDS_IDS } from '@/lib/constants'
import { viewGroup } from './growth'
import { timeUnitForRange, whenRange } from '@/lib/time'
import assertApiKeyNotPermitted from './apiKey'
import { hashEmail } from '@/lib/crypto'
import { isMuted } from '@/lib/user'
import { GqlAuthenticationError, GqlAuthorizationError, GqlInputError } from '@/lib/error'
const contributors = new Set()
@ -66,12 +66,11 @@ export async function topUsers (parent, { cursor, when, by, from, to, limit = LI
case 'comments': column = 'ncomments'; break
case 'referrals': column = 'referrals'; break
case 'stacking': column = 'stacked'; break
case 'value':
default: column = 'proportion'; break
}
const users = (await models.$queryRawUnsafe(`
SELECT * ${column === 'proportion' ? ', proportion' : ''}
SELECT *
FROM
(SELECT users.*,
COALESCE(floor(sum(msats_spent)/1000), 0) as spent,
@ -126,14 +125,13 @@ export default {
},
settings: async (parent, args, { models, me }) => {
if (!me) {
throw new GqlAuthenticationError()
throw new GraphQLError('you must be logged in', { extensions: { code: 'UNAUTHENTICATED' } })
}
return await models.user.findUnique({ where: { id: me.id } })
},
user: async (parent, { id, name }, { models }) => {
if (id) id = Number(id)
return await models.user.findUnique({ where: { id, name } })
user: async (parent, { name }, { models }) => {
return await models.user.findUnique({ where: { name } })
},
users: async (parent, args, { models }) =>
await models.user.findMany(),
@ -146,7 +144,7 @@ export default {
},
mySubscribedUsers: async (parent, { cursor }, { models, me }) => {
if (!me) {
throw new GqlAuthenticationError()
throw new GraphQLError('You must be logged in to view subscribed users', { extensions: { code: 'UNAUTHENTICATED' } })
}
const decodedCursor = decodeCursor(cursor)
@ -167,7 +165,7 @@ export default {
},
myMutedUsers: async (parent, { cursor }, { models, me }) => {
if (!me) {
throw new GqlAuthenticationError()
throw new GraphQLError('You must be logged in to view muted users', { extensions: { code: 'UNAUTHENTICATED' } })
}
const decodedCursor = decodeCursor(cursor)
@ -285,8 +283,6 @@ export default {
'"ThreadSubscription"."userId" = $1',
'r.created_at > $2',
'r.created_at >= "ThreadSubscription".created_at',
'r."userId" <> $1',
activeOrMine(me),
await filterClause(me, models),
muteClause(me),
...(user.noteAllDescendants ? [] : ['r.level = 1'])
@ -308,7 +304,6 @@ export default {
("Item"."parentId" IS NULL AND "UserSubscription"."postsSubscribedAt" IS NOT NULL AND "Item".created_at >= "UserSubscription"."postsSubscribedAt")
OR ("Item"."parentId" IS NOT NULL AND "UserSubscription"."commentsSubscribedAt" IS NOT NULL AND "Item".created_at >= "UserSubscription"."commentsSubscribedAt")
)`,
activeOrMine(me),
await filterClause(me, models),
muteClause(me))})`, me.id, lastChecked)
if (newUserSubs.exists) {
@ -325,8 +320,6 @@ export default {
'"SubSubscription"."userId" = $1',
'"Item".created_at > $2',
'"Item"."parentId" IS NULL',
'"Item"."userId" <> $1',
activeOrMine(me),
await filterClause(me, models),
muteClause(me))})`, me.id, lastChecked)
if (newSubPost.exists) {
@ -345,7 +338,6 @@ export default {
'"Mention"."userId" = $1',
'"Mention".created_at > $2',
'"Item"."userId" <> $1',
activeOrMine(me),
await filterClause(me, models),
muteClause(me)
)})`, me.id, lastChecked)
@ -366,7 +358,6 @@ export default {
'"ItemMention".created_at > $2',
'"Item"."userId" <> $1',
'"Referee"."userId" = $1',
activeOrMine(me),
await filterClause(me, models),
muteClause(me)
)})`, me.id, lastChecked)
@ -384,19 +375,30 @@ export default {
JOIN "ItemForward" ON
"ItemForward"."itemId" = "Item".id
AND "ItemForward"."userId" = $1
${whereClause(
'"Item"."lastZapAt" > $2',
'"Item"."userId" <> $1',
activeOrMine(me),
await filterClause(me, models),
muteClause(me)
)})`, me.id, lastChecked)
WHERE "Item"."lastZapAt" > $2
AND "Item"."userId" <> $1)`, me.id, lastChecked)
if (newFwdSats.exists) {
foundNotes()
return true
}
}
const job = await models.item.findFirst({
where: {
maxBid: {
not: null
},
userId: me.id,
statusUpdatedAt: {
gt: lastChecked
}
}
})
if (job && job.statusUpdatedAt > job.createdAt) {
foundNotes()
return true
}
if (user.noteEarning) {
const earn = await models.earn.findFirst({
where: {
@ -422,16 +424,7 @@ export default {
confirmedAt: {
gt: lastChecked
},
OR: [
{
isHeld: null,
actionType: null
},
{
actionType: 'RECEIVE',
actionState: 'PAID'
}
]
isHeld: null
}
})
if (invoice) {
@ -445,13 +438,9 @@ export default {
where: {
userId: me.id,
status: 'CONFIRMED',
hash: {
not: null
},
updatedAt: {
gt: lastChecked
},
invoiceForward: { is: null }
}
}
})
if (wdrwl) {
@ -534,24 +523,6 @@ export default {
return true
}
const invoiceActionFailed = await models.invoice.findFirst({
where: {
userId: me.id,
updatedAt: {
gt: lastChecked
},
actionType: {
in: INVOICE_ACTION_NOTIFICATION_TYPES
},
actionState: 'FAILED'
}
})
if (invoiceActionFailed) {
foundNotes()
return true
}
// update checkedNotesAt to prevent rechecking same time period
models.user.update({
where: { id: me.id },
@ -578,8 +549,7 @@ export default {
json_build_object('name', 'comments', 'value', COALESCE(SUM(comments), 0)),
json_build_object('name', 'posts', 'value', COALESCE(SUM(posts), 0)),
json_build_object('name', 'territories', 'value', COALESCE(SUM(territories), 0)),
json_build_object('name', 'referrals', 'value', COALESCE(SUM(referrals), 0)),
json_build_object('name', 'one day referrals', 'value', COALESCE(SUM(one_day_referrals), 0))
json_build_object('name', 'referrals', 'value', COALESCE(SUM(referrals), 0))
) AS data
FROM ${viewGroup(range, 'user_stats')}
WHERE id = ${me.id}
@ -594,7 +564,6 @@ export default {
json_build_object('name', 'zaps', 'value', ROUND(COALESCE(SUM(msats_tipped), 0) / 1000)),
json_build_object('name', 'rewards', 'value', ROUND(COALESCE(SUM(msats_rewards), 0) / 1000)),
json_build_object('name', 'referrals', 'value', ROUND( COALESCE(SUM(msats_referrals), 0) / 1000)),
json_build_object('name', 'one day referrals', 'value', ROUND( COALESCE(SUM(msats_one_day_referrals), 0) / 1000)),
json_build_object('name', 'territories', 'value', ROUND(COALESCE(SUM(msats_revenue), 0) / 1000))
) AS data
FROM ${viewGroup(range, 'user_stats')}
@ -608,7 +577,6 @@ export default {
SELECT date_trunc('${timeUnitForRange(range)}', t) at time zone 'America/Chicago' as time,
json_build_array(
json_build_object('name', 'fees', 'value', FLOOR(COALESCE(SUM(msats_fees), 0) / 1000)),
json_build_object('name', 'zapping', 'value', FLOOR(COALESCE(SUM(msats_zaps), 0) / 1000)),
json_build_object('name', 'donations', 'value', FLOOR(COALESCE(SUM(msats_donated), 0) / 1000)),
json_build_object('name', 'territories', 'value', FLOOR(COALESCE(SUM(msats_billing), 0) / 1000))
) AS data
@ -620,49 +588,29 @@ export default {
},
Mutation: {
disableFreebies: async (parent, args, { me, models }) => {
if (!me) {
throw new GqlAuthenticationError()
}
// disable freebies if it hasn't been set yet
try {
await models.user.update({
where: { id: me.id, disableFreebies: null },
data: { disableFreebies: true }
})
} catch (err) {
// ignore 'record not found' errors
if (err.code !== 'P2025') {
throw err
}
}
return true
},
setName: async (parent, data, { me, models }) => {
if (!me) {
throw new GqlAuthenticationError()
throw new GraphQLError('you must be logged in', { extensions: { code: 'UNAUTHENTICATED' } })
}
await validateSchema(userSchema, data, { models })
await ssValidate(userSchema, data, { models })
try {
await models.user.update({ where: { id: me.id }, data })
return data.name
} catch (error) {
if (error.code === 'P2002') {
throw new GqlInputError('name taken')
throw new GraphQLError('name taken', { extensions: { code: 'BAD_INPUT' } })
}
throw error
}
},
setSettings: async (parent, { settings: { nostrRelays, ...data } }, { me, models }) => {
if (!me) {
throw new GqlAuthenticationError()
throw new GraphQLError('you must be logged in', { extensions: { code: 'UNAUTHENTICATED' } })
}
await validateSchema(settingsSchema, { nostrRelays, ...data })
await ssValidate(settingsSchema, { nostrRelays, ...data })
if (nostrRelays?.length) {
const connectOrCreate = []
@ -685,7 +633,7 @@ export default {
},
setWalkthrough: async (parent, { upvotePopover, tipPopover }, { me, models }) => {
if (!me) {
throw new GqlAuthenticationError()
throw new GraphQLError('you must be logged in', { extensions: { code: 'UNAUTHENTICATED' } })
}
await models.user.update({ where: { id: me.id }, data: { upvotePopover, tipPopover } })
@ -694,7 +642,7 @@ export default {
},
setPhoto: async (parent, { photoId }, { me, models }) => {
if (!me) {
throw new GqlAuthenticationError()
throw new GraphQLError('you must be logged in', { extensions: { code: 'UNAUTHENTICATED' } })
}
await models.user.update({
@ -704,29 +652,31 @@ export default {
return Number(photoId)
},
upsertBio: async (parent, { text }, { me, models, lnd }) => {
upsertBio: async (parent, { bio }, { me, models }) => {
if (!me) {
throw new GqlAuthenticationError()
throw new GraphQLError('you must be logged in', { extensions: { code: 'UNAUTHENTICATED' } })
}
await validateSchema(bioSchema, { text })
await ssValidate(bioSchema, { bio })
const user = await models.user.findUnique({ where: { id: me.id } })
if (user.bioId) {
return await updateItem(parent, { id: user.bioId, bio: true, text, title: `@${user.name}'s bio` }, { me, models, lnd })
await updateItem(parent, { id: user.bioId, text: bio, title: `@${user.name}'s bio` }, { me, models })
} else {
return await createItem(parent, { bio: true, text, title: `@${user.name}'s bio` }, { me, models, lnd })
await createItem(parent, { bio: true, text: bio, title: `@${user.name}'s bio` }, { me, models })
}
return await models.user.findUnique({ where: { id: me.id } })
},
generateApiKey: async (parent, { id }, { models, me }) => {
if (!me) {
throw new GqlAuthenticationError()
throw new GraphQLError('you must be logged in', { extensions: { code: 'UNAUTHENTICATED' } })
}
const user = await models.user.findUnique({ where: { id: me.id } })
if (!user.apiKeyEnabled) {
throw new GqlAuthorizationError('you are not allowed to generate api keys')
throw new GraphQLError('you are not allowed to generate api keys', { extensions: { code: 'FORBIDDEN' } })
}
// I trust postgres CSPRNG more than the one from JS
@ -741,14 +691,14 @@ export default {
},
deleteApiKey: async (parent, { id }, { models, me }) => {
if (!me) {
throw new GqlAuthenticationError()
throw new GraphQLError('you must be logged in', { extensions: { code: 'UNAUTHENTICATED' } })
}
return await models.user.update({ where: { id: me.id }, data: { apiKeyHash: null } })
},
unlinkAuth: async (parent, { authType }, { models, me }) => {
if (!me) {
throw new GqlAuthenticationError()
throw new GraphQLError('you must be logged in', { extensions: { code: 'UNAUTHENTICATED' } })
}
assertApiKeyNotPermitted({ me })
@ -757,7 +707,7 @@ export default {
user = await models.user.findUnique({ where: { id: me.id } })
const account = await models.account.findFirst({ where: { userId: me.id, provider: authType } })
if (!account) {
throw new GqlInputError('no such account')
throw new GraphQLError('no such account', { extensions: { code: 'BAD_INPUT' } })
}
await models.account.delete({ where: { id: account.id } })
if (authType === 'twitter') {
@ -772,18 +722,18 @@ export default {
} else if (authType === 'email') {
user = await models.user.update({ where: { id: me.id }, data: { email: null, emailVerified: null, emailHash: null } })
} else {
throw new GqlInputError('no such account')
throw new GraphQLError('no such account', { extensions: { code: 'BAD_INPUT' } })
}
return await authMethods(user, undefined, { models, me })
},
linkUnverifiedEmail: async (parent, { email }, { models, me }) => {
if (!me) {
throw new GqlAuthenticationError()
throw new GraphQLError('you must be logged in', { extensions: { code: 'UNAUTHENTICATED' } })
}
assertApiKeyNotPermitted({ me })
await validateSchema(emailSchema, { email })
await ssValidate(emailSchema, { email })
try {
await models.user.update({
@ -792,7 +742,7 @@ export default {
})
} catch (error) {
if (error.code === 'P2002') {
throw new GqlInputError('email taken')
throw new GraphQLError('email taken', { extensions: { code: 'BAD_INPUT' } })
}
throw error
}
@ -805,12 +755,12 @@ export default {
const muted = await isMuted({ models, muterId: me?.id, mutedId: id })
if (existing) {
if (muted && !existing.postsSubscribedAt) {
throw new GqlInputError("you can't subscribe to a stacker that you've muted")
throw new GraphQLError("you can't subscribe to a stacker that you've muted", { extensions: { code: 'BAD_INPUT' } })
}
await models.userSubscription.update({ where: { followerId_followeeId: lookupData }, data: { postsSubscribedAt: existing.postsSubscribedAt ? null : new Date() } })
} else {
if (muted) {
throw new GqlInputError("you can't subscribe to a stacker that you've muted")
throw new GraphQLError("you can't subscribe to a stacker that you've muted", { extensions: { code: 'BAD_INPUT' } })
}
await models.userSubscription.create({ data: { ...lookupData, postsSubscribedAt: new Date() } })
}
@ -822,12 +772,12 @@ export default {
const muted = await isMuted({ models, muterId: me?.id, mutedId: id })
if (existing) {
if (muted && !existing.commentsSubscribedAt) {
throw new GqlInputError("you can't subscribe to a stacker that you've muted")
throw new GraphQLError("you can't subscribe to a stacker that you've muted", { extensions: { code: 'BAD_INPUT' } })
}
await models.userSubscription.update({ where: { followerId_followeeId: lookupData }, data: { commentsSubscribedAt: existing.commentsSubscribedAt ? null : new Date() } })
} else {
if (muted) {
throw new GqlInputError("you can't subscribe to a stacker that you've muted")
throw new GraphQLError("you can't subscribe to a stacker that you've muted", { extensions: { code: 'BAD_INPUT' } })
}
await models.userSubscription.create({ data: { ...lookupData, commentsSubscribedAt: new Date() } })
}
@ -850,7 +800,7 @@ export default {
}
})
if (subscription?.postsSubscribedAt || subscription?.commentsSubscribedAt) {
throw new GqlInputError("you can't mute a stacker to whom you've subscribed")
throw new GraphQLError("you can't mute a stacker to whom you've subscribed", { extensions: { code: 'BAD_INPUT' } })
}
await models.mute.create({ data: { ...lookupData } })
}
@ -858,7 +808,7 @@ export default {
},
hideWelcomeBanner: async (parent, data, { me, models }) => {
if (!me) {
throw new GqlAuthenticationError()
throw new GraphQLError('you must be logged in', { extensions: { code: 'UNAUTHENTICATED' } })
}
await models.user.update({ where: { id: me.id }, data: { hideWelcomeBanner: true } })
@ -915,8 +865,7 @@ export default {
// get the user's first item
const item = await models.item.findFirst({
where: {
userId: user.id,
OR: [{ invoiceActionState: 'PAID' }, { invoiceActionState: null }]
userId: user.id
},
orderBy: {
createdAt: 'asc'
@ -936,8 +885,7 @@ export default {
createdAt: {
gte,
lte
},
OR: [{ invoiceActionState: 'PAID' }, { invoiceActionState: null }]
}
}
})
},
@ -954,8 +902,7 @@ export default {
createdAt: {
gte,
lte
},
OR: [{ invoiceActionState: 'PAID' }, { invoiceActionState: null }]
}
}
})
},
@ -972,8 +919,7 @@ export default {
createdAt: {
gte,
lte
},
OR: [{ invoiceActionState: 'PAID' }, { invoiceActionState: null }]
}
}
})
},
@ -1004,13 +950,7 @@ export default {
if (!me || me.id !== user.id) {
return 0
}
return msatsToSats(user.msats + user.mcredits)
},
credits: async (user, args, { models, me }) => {
if (!me || me.id !== user.id) {
return 0
}
return msatsToSats(user.mcredits)
return msatsToSats(user.msats)
},
authMethods,
hasInvites: async (user, args, { models }) => {
@ -1030,12 +970,6 @@ export default {
})
return relays?.map(r => r.nostrRelayAddr)
},
tipRandom: async (user, args, { me }) => {
if (!me || me.id !== user.id) {
return false
}
return !!user.tipRandomMin && !!user.tipRandomMax
}
},
@ -1047,20 +981,6 @@ export default {
return user.streak
},
gunStreak: async (user, args, { models }) => {
if (user.hideCowboyHat) {
return null
}
return user.gunStreak
},
horseStreak: async (user, args, { models }) => {
if (user.hideCowboyHat) {
return null
}
return user.horseStreak
},
maxStreak: async (user, args, { models }) => {
if (user.hideCowboyHat) {
return null
@ -1092,7 +1012,7 @@ export default {
if (!when || when === 'forever') {
// forever
return ((user.stackedMsats && msatsToSats(user.stackedMsats)) || 0)
return (user.stackedMsats && msatsToSats(user.stackedMsats)) || 0
}
const range = whenRange(when, from, to)

View File

@ -1,75 +0,0 @@
import { E_VAULT_KEY_EXISTS, GqlAuthenticationError, GqlInputError } from '@/lib/error'
export default {
Query: {
getVaultEntry: async (parent, { key }, { me, models }, info) => {
if (!me) throw new GqlAuthenticationError()
if (!key) throw new GqlInputError('must have key')
const k = await models.vault.findUnique({
where: {
key,
userId: me.id
}
})
return k
},
getVaultEntries: async (parent, { keysFilter }, { me, models }, info) => {
if (!me) throw new GqlAuthenticationError()
const entries = await models.vaultEntry.findMany({
where: {
userId: me.id,
key: keysFilter?.length
? {
in: keysFilter
}
: undefined
}
})
return entries
}
},
Mutation: {
// atomic vault migration
updateVaultKey: async (parent, { entries, hash }, { me, models }) => {
if (!me) throw new GqlAuthenticationError()
if (!hash) throw new GqlInputError('hash required')
const txs = []
const { vaultKeyHash: oldKeyHash } = await models.user.findUnique({ where: { id: me.id } })
if (oldKeyHash) {
if (oldKeyHash !== hash) {
throw new GqlInputError('vault key already set', E_VAULT_KEY_EXISTS)
} else {
return true
}
} else {
txs.push(models.user.update({
where: { id: me.id },
data: { vaultKeyHash: hash }
}))
}
for (const entry of entries) {
txs.push(models.vaultEntry.update({
where: { userId_key: { userId: me.id, key: entry.key } },
data: { value: entry.value, iv: entry.iv }
}))
}
await models.$transaction(txs)
return true
},
clearVault: async (parent, args, { me, models }) => {
if (!me) throw new GqlAuthenticationError()
const txs = []
txs.push(models.user.update({
where: { id: me.id },
data: { vaultKeyHash: '' }
}))
txs.push(models.vaultEntry.deleteMany({ where: { userId: me.id } }))
await models.$transaction(txs)
return true
}
}
}

File diff suppressed because it is too large Load Diff

View File

@ -13,8 +13,6 @@ import { BLOCK_HEIGHT } from '@/fragments/blockHeight'
import { CHAIN_FEE } from '@/fragments/chainFee'
import { getServerSession } from 'next-auth/next'
import { getAuthOptions } from '@/pages/api/auth/[...nextauth]'
import { NOFOLLOW_LIMIT } from '@/lib/constants'
import { satsToMsats } from '@/lib/format'
export default async function getSSRApolloClient ({ req, res, me = null }) {
const session = req && await getServerSession(req, res, getAuthOptions(req))
@ -42,93 +40,20 @@ export default async function getSSRApolloClient ({ req, res, me = null }) {
watchQuery: {
fetchPolicy: 'no-cache',
nextFetchPolicy: 'no-cache',
canonizeResults: true,
ssr: true
},
query: {
fetchPolicy: 'no-cache',
nextFetchPolicy: 'no-cache',
canonizeResults: true,
ssr: true
}
}
})
await client.clearStore()
return client
}
function oneDayReferral (request, { me }) {
if (!me) return
const refHeader = request.headers['x-stacker-news-referrer']
if (!refHeader) return
const referrers = refHeader.split('; ').filter(Boolean)
for (const referrer of referrers) {
let prismaPromise, getData
if (referrer.startsWith('item-')) {
prismaPromise = models.item.findUnique({
where: {
id: parseInt(referrer.slice(5)),
msats: {
gt: satsToMsats(NOFOLLOW_LIMIT)
},
weightedVotes: {
gt: 0
}
}
})
getData = item => ({
referrerId: item.userId,
refereeId: parseInt(me.id),
type: item.parentId ? 'COMMENT' : 'POST',
typeId: String(item.id)
})
} else if (referrer.startsWith('profile-')) {
const name = referrer.slice(8)
// exclude all pages that are not user profiles
if (['api', 'auth', 'day', 'invites', 'invoices', 'referrals', 'rewards',
'satistics', 'settings', 'stackers', 'wallet', 'withdrawals', '404', '500',
'email', 'live', 'login', 'notifications', 'offline', 'search', 'share',
'signup', 'territory', 'recent', 'top', 'edit', 'post', 'rss', 'saloon',
'faq', 'story', 'privacy', 'copyright', 'tos', 'changes', 'guide', 'daily',
'anon', 'ad'].includes(name)) continue
prismaPromise = models.user.findUnique({ where: { name } })
getData = user => ({
referrerId: user.id,
refereeId: parseInt(me.id),
type: 'PROFILE',
typeId: String(user.id)
})
} else if (referrer.startsWith('territory-')) {
prismaPromise = models.sub.findUnique({ where: { name: referrer.slice(10) } })
getData = sub => ({
referrerId: sub.userId,
refereeId: parseInt(me.id),
type: 'TERRITORY',
typeId: sub.name
})
} else {
prismaPromise = models.user.findUnique({ where: { name: referrer } })
getData = user => ({
referrerId: user.id,
refereeId: parseInt(me.id),
type: 'REFERRAL',
typeId: String(user.id)
})
}
prismaPromise?.then(ref => {
if (ref && getData) {
const data = getData(ref)
// can't refer yourself
if (data.refereeId === data.referrerId) return
models.oneDayReferral.create({ data }).catch(console.error)
}
}).catch(console.error)
}
}
/**
* Takes a query and variables and returns a getServerSideProps function
*
@ -151,20 +76,10 @@ export function getGetServerSideProps (
const client = await getSSRApolloClient({ req, res })
let { data: { me } } = await client.query({ query: ME })
// required to redirect to /signup on page reload
// if we switched to anon and authentication is required
if (req.cookies['multi_auth.user-id'] === 'anonymous') {
me = null
}
const { data: { me } } = await client.query({ query: ME })
if (authRequired && !me) {
let callback = process.env.NEXT_PUBLIC_URL + req.url
// On client-side routing, the callback is a NextJS URL
// so we need to remove the NextJS stuff.
// Example: /_next/data/development/territory.json
callback = callback.replace(/\/_next\/data\/\w+\//, '/').replace(/\.json$/, '')
const callback = process.env.NEXT_PUBLIC_URL + req.url
return {
redirect: {
destination: `/signup?callbackUrl=${encodeURIComponent(callback)}`
@ -196,7 +111,6 @@ export function getGetServerSideProps (
}
if (error || !data || (notFound && notFound(data, vars, me))) {
error && console.error(error)
res.writeHead(302, {
Location: '/404'
}).end()
@ -210,8 +124,6 @@ export function getGetServerSideProps (
}
}
oneDayReferral(req, { me })
return {
props: {
...props,

16
api/typeDefs/image.js Normal file
View File

@ -0,0 +1,16 @@
import { gql } from 'graphql-tag'
export default gql`
type ImageFeesInfo {
totalFees: Int!
totalFeesMsats: Int!
imageFee: Int!
imageFeeMsats: Int!
nUnpaid: Int!
bytesUnpaid: Int!
bytes24h: Int!
}
extend type Query {
imageFeesInfo(s3Keys: [Int]!): ImageFeesInfo!
}
`

View File

@ -17,8 +17,7 @@ import price from './price'
import admin from './admin'
import blockHeight from './blockHeight'
import chainFee from './chainFee'
import paidAction from './paidAction'
import vault from './vault'
import image from './image'
const common = gql`
type Query {
@ -39,4 +38,4 @@ const common = gql`
`
export default [common, user, item, itemForward, message, wallet, lnurl, notifications, invite,
sub, upload, growth, rewards, referrals, price, admin, blockHeight, chainFee, paidAction, vault]
sub, upload, growth, rewards, referrals, price, admin, blockHeight, chainFee, image]

View File

@ -7,7 +7,7 @@ export default gql`
}
extend type Mutation {
createInvite(id: String, gift: Int!, limit: Int!, description: String): Invite
createInvite(gift: Int!, limit: Int): Invite
revokeInvite(id: ID!): Invite
}
@ -20,6 +20,5 @@ export default gql`
user: User!
revoked: Boolean!
poor: Boolean!
description: String
}
`

View File

@ -8,18 +8,10 @@ export default gql`
dupes(url: String!): [Item!]
related(cursor: String, title: String, id: ID, minMatch: String, limit: Limit): Items
search(q: String, sub: String, cursor: String, what: String, sort: String, when: String, from: String, to: String): Items
auctionPosition(sub: String, id: ID, boost: Int): Int!
boostPosition(sub: String, id: ID, boost: Int): BoostPositions!
auctionPosition(sub: String, id: ID, bid: Int!): Int!
itemRepetition(parentId: ID): Int!
}
type BoostPositions {
home: Boolean!
sub: Boolean!
homeMaxBoost: Int!
subMaxBoost: Int!
}
type TitleUnshorted {
title: String
unshorted: String
@ -28,47 +20,28 @@ export default gql`
type ItemActResult {
id: ID!
sats: Int!
path: String
path: String!
act: String!
}
type ItemAct {
id: ID!
act: String!
invoice: Invoice
}
extend type Mutation {
bookmarkItem(id: ID): Item
pinItem(id: ID): Item
subscribeItem(id: ID): Item
deleteItem(id: ID): Item
upsertLink(
id: ID, sub: String, title: String!, url: String!, text: String, boost: Int, forward: [ItemForwardInput],
hash: String, hmac: String): ItemPaidAction!
upsertDiscussion(
id: ID, sub: String, title: String!, text: String, boost: Int, forward: [ItemForwardInput],
hash: String, hmac: String): ItemPaidAction!
upsertBounty(
id: ID, sub: String, title: String!, text: String, bounty: Int, boost: Int, forward: [ItemForwardInput],
hash: String, hmac: String): ItemPaidAction!
upsertJob(
id: ID, sub: String!, title: String!, company: String!, location: String, remote: Boolean,
text: String!, url: String!, boost: Int, status: String, logo: Int): ItemPaidAction!
upsertPoll(
id: ID, sub: String, title: String!, text: String, options: [String!]!, boost: Int, forward: [ItemForwardInput], pollExpiresAt: Date,
hash: String, hmac: String): ItemPaidAction!
upsertLink(id: ID, sub: String, title: String!, url: String!, text: String, boost: Int, forward: [ItemForwardInput], hash: String, hmac: String): Item!
upsertDiscussion(id: ID, sub: String, title: String!, text: String, boost: Int, forward: [ItemForwardInput], hash: String, hmac: String): Item!
upsertBounty(id: ID, sub: String, title: String!, text: String, bounty: Int, hash: String, hmac: String, boost: Int, forward: [ItemForwardInput]): Item!
upsertJob(id: ID, sub: String!, title: String!, company: String!, location: String, remote: Boolean,
text: String!, url: String!, maxBid: Int!, status: String, logo: Int, hash: String, hmac: String): Item!
upsertPoll(id: ID, sub: String, title: String!, text: String, options: [String!]!, boost: Int, forward: [ItemForwardInput], hash: String, hmac: String, pollExpiresAt: Date): Item!
updateNoteId(id: ID!, noteId: String!): Item!
upsertComment(id: ID, text: String!, parentId: ID, boost: Int, hash: String, hmac: String): ItemPaidAction!
act(id: ID!, sats: Int, act: String, hasSendWallet: Boolean): ItemActPaidAction!
pollVote(id: ID!): PollVotePaidAction!
upsertComment(id:ID, text: String!, parentId: ID, hash: String, hmac: String): Item!
act(id: ID!, sats: Int, act: String, idempotent: Boolean, hash: String, hmac: String): ItemActResult!
pollVote(id: ID!, hash: String, hmac: String): ID!
toggleOutlaw(id: ID!): Item!
}
type PollVoteResult {
id: ID!
}
type PollOption {
id: ID,
option: String!
@ -77,8 +50,6 @@ export default gql`
type Poll {
meVoted: Boolean!
meInvoiceId: Int
meInvoiceActionState: InvoiceActionState
count: Int!
options: [PollOption!]!
}
@ -87,7 +58,6 @@ export default gql`
cursor: String
items: [Item!]!
pins: [Item!]
ad: Item
}
type Comments {
@ -95,14 +65,6 @@ export default gql`
comments: [Item!]!
}
enum InvoiceActionState {
PENDING
PENDING_HELD
HELD
PAID
FAILED
}
type Item {
id: ID!
createdAt: Date!
@ -127,13 +89,10 @@ export default gql`
bountyPaidTo: [Int]
noteId: String
sats: Int!
credits: Int!
commentSats: Int!
commentCredits: Int!
lastCommentAt: Date
upvotes: Int!
meSats: Int!
meCredits: Int!
meDontLikeSats: Int!
meBookmark: Boolean!
meSubscription: Boolean!
@ -148,6 +107,7 @@ export default gql`
path: String
position: Int
prior: Int
maxBid: Int
isJob: Boolean!
pollCost: Int
poll: Poll
@ -165,8 +125,6 @@ export default gql`
imgproxyUrls: JSONObject
rel: String
apiKey: Boolean
invoice: Invoice
cost: Int!
}
input ItemForwardInput {

View File

@ -55,12 +55,6 @@ export default gql`
sortTime: Date!
}
type Invoicification {
id: ID!
invoice: Invoice!
sortTime: Date!
}
type JobChanged {
id: ID!
item: Item!
@ -79,7 +73,6 @@ export default gql`
id: ID!
sortTime: Date!
days: Int
type: String!
}
type Earn {
@ -90,19 +83,6 @@ export default gql`
sources: EarnSources
}
type ReferralSources {
id: ID!
forever: Int!
oneDay: Int!
}
type ReferralReward {
id: ID!
earnedSats: Int!
sortTime: Date!
sources: ReferralSources
}
type Revenue {
id: ID!
earnedSats: Int!
@ -156,8 +136,7 @@ export default gql`
union Notification = Reply | Votification | Mention
| Invitification | Earn | JobChanged | InvoicePaid | WithdrawlPaid | Referral
| Streak | FollowActivity | ForwardedVotification | Revenue | SubStatus
| TerritoryPost | TerritoryTransfer | Reminder | ItemMention | Invoicification
| ReferralReward
| TerritoryPost | TerritoryTransfer | Reminder | ItemMention
type Notifications {
lastChecked: Date

View File

@ -1,61 +0,0 @@
import { gql } from 'graphql-tag'
export default gql`
extend type Query {
paidAction(invoiceId: Int!): PaidAction
}
extend type Mutation {
retryPaidAction(invoiceId: Int!): PaidAction!
}
enum PaymentMethod {
REWARD_SATS
FEE_CREDIT
ZERO_COST
OPTIMISTIC
PESSIMISTIC
}
interface PaidAction {
invoice: Invoice
paymentMethod: PaymentMethod!
}
type ItemPaidAction implements PaidAction {
result: Item
invoice: Invoice
paymentMethod: PaymentMethod!
}
type ItemActPaidAction implements PaidAction {
result: ItemActResult
invoice: Invoice
paymentMethod: PaymentMethod!
}
type PollVotePaidAction implements PaidAction {
result: PollVoteResult
invoice: Invoice
paymentMethod: PaymentMethod!
}
type SubPaidAction implements PaidAction {
result: Sub
invoice: Invoice
paymentMethod: PaymentMethod!
}
type DonatePaidAction implements PaidAction {
result: DonateResult
invoice: Invoice
paymentMethod: PaymentMethod!
}
type BuyCreditsPaidAction implements PaidAction {
result: BuyCreditsResult
invoice: Invoice
paymentMethod: PaymentMethod!
}
`

View File

@ -2,6 +2,12 @@ import { gql } from 'graphql-tag'
export default gql`
extend type Query {
referrals(when: String, from: String, to: String): [TimeData!]!
referrals(when: String, from: String, to: String): Referrals!
}
type Referrals {
totalSats: Int!
totalReferrals: Int!
stats: [TimeData!]!
}
`

View File

@ -7,11 +7,7 @@ export default gql`
}
extend type Mutation {
donateToRewards(sats: Int!): DonatePaidAction!
}
type DonateResult {
sats: Int!
donateToRewards(sats: Int!, hash: String, hmac: String): Int!
}
type Rewards {
@ -19,7 +15,6 @@ export default gql`
time: Date!
sources: [NameValue!]!
leaderboard: UsersNullable
ad: Item
}
type Reward {

View File

@ -16,17 +16,17 @@ export default gql`
extend type Mutation {
upsertSub(oldName: String, name: String!, desc: String, baseCost: Int!,
postTypes: [String!]!,
postTypes: [String!]!, allowFreebies: Boolean!,
billingType: String!, billingAutoRenew: Boolean!,
moderated: Boolean!, nsfw: Boolean!): SubPaidAction!
paySub(name: String!): SubPaidAction!
moderated: Boolean!, hash: String, hmac: String, nsfw: Boolean!): Sub
paySub(name: String!, hash: String, hmac: String): Sub
toggleMuteSub(name: String!): Boolean!
toggleSubSubscription(name: String!): Boolean!
transferTerritory(subName: String!, userName: String!): Sub
unarchiveTerritory(name: String!, desc: String, baseCost: Int!,
postTypes: [String!]!,
postTypes: [String!]!, allowFreebies: Boolean!,
billingType: String!, billingAutoRenew: Boolean!,
moderated: Boolean!, nsfw: Boolean!): SubPaidAction!
moderated: Boolean!, hash: String, hmac: String, nsfw: Boolean!): Sub
}
type Sub {

View File

@ -1,26 +1,12 @@
import { gql } from 'graphql-tag'
export default gql`
type UploadFees {
totalFees: Int!
totalFeesMsats: Int!
uploadFees: Int!
uploadFeesMsats: Int!
nUnpaid: Int!
bytesUnpaid: Int!
bytes24h: Int!
extend type Mutation {
getSignedPOST(type: String!, size: Int!, width: Int!, height: Int!, avatar: Boolean): SignedPost!
}
type SignedPost {
url: String!
fields: JSONObject!
}
extend type Query {
uploadFees(s3Keys: [Int]!): UploadFees!
}
extend type Mutation {
getSignedPOST(type: String!, size: Int!, width: Int!, height: Int!, avatar: Boolean): SignedPost!
}
`

View File

@ -4,7 +4,7 @@ export default gql`
extend type Query {
me: User
settings: User
user(id: ID, name: String): User
user(name: String!): User
users: [User!]
nameAvailable(name: String!): Boolean!
topUsers(cursor: String, when: String, from: String, to: String, by: String, limit: Limit): UsersNullable!
@ -33,7 +33,7 @@ export default gql`
setName(name: String!): String
setSettings(settings: SettingsInput!): User
setPhoto(photoId: ID!): Int!
upsertBio(text: String!): ItemPaidAction!
upsertBio(bio: String!): User!
setWalkthrough(tipPopover: Boolean, upvotePopover: Boolean): Boolean
unlinkAuth(authType: String!): AuthMethods!
linkUnverifiedEmail(email: String!): Boolean
@ -43,7 +43,6 @@ export default gql`
toggleMute(id: ID): User
generateApiKey(id: ID!): String
deleteApiKey(id: ID!): User
disableFreebies: Boolean
}
type User {
@ -59,11 +58,6 @@ export default gql`
photoId: Int
since: Int
"""
this is only returned when we sort stackers by value
"""
proportion: Float
optional: UserOptional!
privates: UserPrivates
@ -77,8 +71,7 @@ export default gql`
diagnostics: Boolean!
noReferralLinks: Boolean!
fiatCurrency: String!
satsFilter: Int!
disableFreebies: Boolean
greeterMode: Boolean!
hideBookmarks: Boolean!
hideCowboyHat: Boolean!
hideGithub: Boolean!
@ -89,7 +82,6 @@ export default gql`
hideIsContributor: Boolean!
hideWalletBalance: Boolean!
imgproxyOnly: Boolean!
showImagesAndVideos: Boolean!
nostrCrossposting: Boolean!
nostrPubkey: String
nostrRelays: [String!]
@ -106,16 +98,10 @@ export default gql`
noteItemMentions: Boolean!
nsfwMode: Boolean!
tipDefault: Int!
tipRandomMin: Int
tipRandomMax: Int
turboTipping: Boolean!
zapUndos: Int
wildWestMode: Boolean!
withdrawMaxFeeDefault: Int!
proxyReceive: Boolean
directReceive: Boolean
receiveCreditsBelowSats: Int!
sendCreditsBelowSats: Int!
}
type AuthMethods {
@ -132,7 +118,6 @@ export default gql`
extremely sensitive
"""
sats: Int!
credits: Int!
authMethods: AuthMethods!
lnAddr: String
@ -153,8 +138,6 @@ export default gql`
diagnostics: Boolean!
noReferralLinks: Boolean!
fiatCurrency: String!
satsFilter: Int!
disableFreebies: Boolean
greeterMode: Boolean!
hideBookmarks: Boolean!
hideCowboyHat: Boolean!
@ -166,7 +149,6 @@ export default gql`
hideIsContributor: Boolean!
hideWalletBalance: Boolean!
imgproxyOnly: Boolean!
showImagesAndVideos: Boolean!
nostrCrossposting: Boolean!
nostrPubkey: String
nostrRelays: [String!]
@ -183,22 +165,12 @@ export default gql`
noteItemMentions: Boolean!
nsfwMode: Boolean!
tipDefault: Int!
tipRandom: Boolean!
tipRandomMin: Int
tipRandomMax: Int
turboTipping: Boolean!
zapUndos: Int
wildWestMode: Boolean!
withdrawMaxFeeDefault: Int!
autoWithdrawThreshold: Int
autoWithdrawMaxFeePercent: Float
autoWithdrawMaxFeeTotal: Int
vaultKeyHash: String
walletsUpdatedAt: Date
proxyReceive: Boolean
directReceive: Boolean
receiveCreditsBelowSats: Int!
sendCreditsBelowSats: Int!
}
type UserOptional {
@ -209,15 +181,13 @@ export default gql`
spent(when: String, from: String, to: String): Int
referrals(when: String, from: String, to: String): Int
streak: Int
gunStreak: Int
horseStreak: Int
maxStreak: Int
isContributor: Boolean
githubId: String
twitterId: String
nostrAuthPubkey: String
}
type NameValue {
name: String!
value: Float!

View File

@ -1,29 +0,0 @@
import { gql } from 'graphql-tag'
export default gql`
type VaultEntry {
id: ID!
key: String!
iv: String!
value: String!
createdAt: Date!
updatedAt: Date!
}
input VaultEntryInput {
key: String!
iv: String!
value: String!
walletId: ID
}
extend type Query {
getVaultEntry(key: String!): VaultEntry
getVaultEntries(keysFilter: [String!]): [VaultEntry!]!
}
extend type Mutation {
clearVault: Boolean
updateVaultKey(entries: [VaultEntryInput!]!, hash: String!): Boolean
}
`

View File

@ -1,124 +1,70 @@
import { gql } from 'graphql-tag'
import { fieldToGqlArg, fieldToGqlArgOptional, generateResolverName, generateTypeDefName } from '@/wallets/graphql'
import { isServerField } from '@/wallets/common'
import walletDefs from '@/wallets/server'
function injectTypeDefs (typeDefs) {
const injected = [rawTypeDefs(), mutationTypeDefs()]
return `${typeDefs}\n\n${injected.join('\n\n')}\n`
}
function mutationTypeDefs () {
console.group('injected GraphQL mutations:')
const typeDefs = walletDefs.map((w) => {
let args = 'id: ID, '
const serverFields = w.fields
.filter(isServerField)
.map(fieldToGqlArgOptional)
if (serverFields.length > 0) args += serverFields.join(', ') + ','
args += 'enabled: Boolean, priority: Int, vaultEntries: [VaultEntryInput!], settings: AutowithdrawSettings, validateLightning: Boolean'
const resolverName = generateResolverName(w.walletField)
const typeDef = `${resolverName}(${args}): Wallet`
console.log(typeDef)
return typeDef
})
console.groupEnd()
return `extend type Mutation {\n${typeDefs.join('\n')}\n}`
}
function rawTypeDefs () {
console.group('injected GraphQL type defs:')
const typeDefs = walletDefs.map((w) => {
let args = w.fields
.filter(isServerField)
.map(fieldToGqlArg)
.map(s => ' ' + s)
.join('\n')
if (!args) {
// add a placeholder arg so the type is not empty
args = ' _empty: Boolean'
}
const typeDefName = generateTypeDefName(w.walletType)
const typeDef = `type ${typeDefName} {\n${args}\n}`
console.log(typeDef)
return typeDef
})
let union = 'union WalletDetails = '
union += walletDefs.map((w) => {
const typeDefName = generateTypeDefName(w.walletType)
return typeDefName
}).join(' | ')
console.log(union)
console.groupEnd()
return typeDefs.join('\n\n') + union
}
const typeDefs = `
export default gql`
extend type Query {
invoice(id: ID!): Invoice!
withdrawl(id: ID!): Withdrawl!
direct(id: ID!): Direct!
numBolt11s: Int!
connectAddress: String!
walletHistory(cursor: String, inc: String): History
wallets(includeReceivers: Boolean, includeSenders: Boolean, onlyEnabled: Boolean, prioritySort: String): [Wallet!]!
wallets: [Wallet!]!
wallet(id: ID!): Wallet
walletByType(type: String!): Wallet
walletLogs(type: String, from: String, to: String, cursor: String): WalletLog!
walletLogs: [WalletLog]!
}
extend type Mutation {
createInvoice(amount: Int!): InvoiceOrDirect!
createInvoice(amount: Int!, expireSecs: Int, hodlInvoice: Boolean): Invoice!
createWithdrawl(invoice: String!, maxFee: Int!): Withdrawl!
sendToLnAddr(addr: String!, amount: Int!, maxFee: Int!, comment: String, identifier: Boolean, name: String, email: String): Withdrawl!
cancelInvoice(hash: String!, hmac: String, userCancel: Boolean): Invoice!
dropBolt11(hash: String!): Boolean
cancelInvoice(hash: String!, hmac: String!): Invoice!
dropBolt11(id: ID): Withdrawl
upsertWalletLND(id: ID, socket: String!, macaroon: String!, cert: String, settings: AutowithdrawSettings!): Boolean
upsertWalletCLN(id: ID, socket: String!, rune: String!, cert: String, settings: AutowithdrawSettings!): Boolean
upsertWalletLNAddr(id: ID, address: String!, settings: AutowithdrawSettings!): Boolean
removeWallet(id: ID!): Boolean
deleteWalletLogs(wallet: String): Boolean
setWalletPriority(id: ID!, priority: Int!): Boolean
buyCredits(credits: Int!): BuyCreditsPaidAction!
}
type BuyCreditsResult {
credits: Int!
}
interface InvoiceOrDirect {
id: ID!
}
type Wallet {
id: ID!
createdAt: Date!
updatedAt: Date!
type: String!
enabled: Boolean!
priority: Int!
priority: Boolean!
wallet: WalletDetails!
vaultEntries: [VaultEntry!]!
}
type WalletLNAddr {
address: String!
}
type WalletLND {
socket: String!
macaroon: String!
cert: String
}
type WalletCLN {
socket: String!
rune: String!
cert: String
}
union WalletDetails = WalletLNAddr | WalletLND | WalletCLN
input AutowithdrawSettings {
autoWithdrawThreshold: Int!
autoWithdrawMaxFeePercent: Float!
autoWithdrawMaxFeeTotal: Int!
priority: Boolean!
}
type Invoice implements InvoiceOrDirect {
type Invoice {
id: ID!
createdAt: Date!
hash: String!
bolt11: String!
expiresAt: Date!
cancelled: Boolean!
cancelledAt: Date
confirmedAt: Date
satsReceived: Int
satsRequested: Int!
@ -128,14 +74,6 @@ const typeDefs = `
hmac: String
isHeld: Boolean
confirmedPreimage: String
actionState: String
actionType: String
actionError: String
invoiceForward: Boolean
item: Item
itemAct: ItemAct
forwardedSats: Int
forwardStatus: String
}
type Withdrawl {
@ -150,19 +88,6 @@ const typeDefs = `
status: String
autoWithdraw: Boolean!
preimage: String
forwardedActionType: String
}
type Direct implements InvoiceOrDirect {
id: ID!
createdAt: Date!
bolt11: String
hash: String
sats: Int
preimage: String
nostr: JSONObject
comment: String
lud18Data: JSONObject
}
type Fact {
@ -186,18 +111,10 @@ const typeDefs = `
}
type WalletLog {
entries: [WalletLogEntry!]!
cursor: String
}
type WalletLogEntry {
id: ID!
createdAt: Date!
wallet: ID!
level: String!
message: String!
context: JSONObject
}
`
export default gql`${injectTypeDefs(typeDefs)}`

View File

@ -18,7 +18,7 @@ felipebueno,pr,#948,,,,,,100k,felipe@stacker.news,2024-03-26
benalleng,pr,#972,#923,good-first-issue,,,,20k,BenAllenG@stacker.news,2024-03-26
SatsAllDay,issue,#972,#923,good-first-issue,,,,2k,weareallsatoshi@getalby.com,2024-03-26
felipebueno,pr,#974,#884,good-first-issue,,,,20k,felipe@stacker.news,2024-03-26
h0dlr,issue,#974,#884,good-first-issue,,,,2k,HODLR@stacker.news,2024-04-04
h0dlr,issue,#974,#884,good-first-issue,,,,2k,0xe14b9b5981c729a3@ln.tips,2024-04-04
benalleng,pr,#975,,,,,,20k,BenAllenG@stacker.news,2024-03-26
SatsAllDay,security,#980,GHSA-qg4g-m4xq-695p,,,,,100k,weareallsatoshi@getalby.com,2024-03-28
SatsAllDay,code review,#980,GHSA-qg4g-m4xq-695p,medium,,,,25k,weareallsatoshi@getalby.com,2024-03-28
@ -110,59 +110,3 @@ SatsAllDay,pr,#1197,#1192,medium,,,,250k,weareallsatoshi@getalby.com,2024-06-03
tsmith123,pr,#1216,#1213,easy,,1,,90k,stickymarch60@walletofsatoshi.com,2024-06-03
tsmith123,pr,#1231,#1230,good-first-issue,,,,20k,stickymarch60@walletofsatoshi.com,2024-06-13
felipebueno,issue,#1231,#1230,good-first-issue,,,,2k,felipebueno@getalby.com,2024-06-13
tsmith123,pr,#1223,#107,medium,,2,10k bonus for our slowness,210k,stickymarch60@walletofsatoshi.com,2024-06-22
cointastical,issue,#1223,#107,medium,,2,,20k,cointastical@stacker.news,2024-06-22
kravhen,pr,#1215,#253,medium,,2,upgraded to medium,200k,nichro@getalby.com,2024-06-28
dillon-co,pr,#1140,#633,hard,,,requested advance,500k,bolt11,2024-07-02
takitakitanana,issue,,#1257,good-first-issue,,,,2k,takitakitanana@stacker.news,2024-07-11
SatsAllDay,pr,#1263,#1112,medium,,,1,225k,weareallsatoshi@getalby.com,2024-07-31
OneOneSeven117,issue,#1272,#1268,easy,,,,10k,OneOneSeven@stacker.news,2024-07-31
aniskhalfallah,pr,#1264,#1226,good-first-issue,,,,20k,aniskhalfallah@stacker.news,2024-07-31
Gudnessuche,issue,#1264,#1226,good-first-issue,,,,2k,everythingsatoshi@getalby.com,2024-08-10
aniskhalfallah,pr,#1289,,easy,,,,100k,aniskhalfallah@blink.sv,2024-08-12
riccardobl,pr,#1293,#1142,medium,high,,,500k,rblb@getalby.com,2024-08-18
tsmith123,pr,#1306,#832,medium,,,,250k,stickymarch60@walletofsatoshi.com,2024-08-20
riccardobl,pr,#1311,#864,medium,high,,pending unrelated refactor,500k,rblb@getalby.com,2024-08-27
brugeman,issue,#1311,#864,medium,high,,,50k,brugeman@stacker.news,2024-08-27
riccardobl,pr,#1342,#1141,hard,high,,pending unrelated rearchitecture,1m,rblb@getalby.com,2024-09-09
SatsAllDay,issue,#1368,#1331,medium,,,,25k,weareallsatoshi@getalby.com,2024-09-16
benalleng,helpfulness,#1368,#1170,medium,,,did a lot of it in #1175,25k,BenAllenG@stacker.news,2024-09-16
humble-GOAT,issue,#1412,#1407,good-first-issue,,,,2k,humble_GOAT@stacker.news,2024-09-18
felipebueno,issue,#1425,#986,medium,,,,25k,felipebueno@getalby.com,2024-09-26
riccardobl,pr,#1373,#1304,hard,high,,,2m,bolt11,2024-10-01
tsmith123,pr,#1428,#1397,easy,,1,superceded,90k,stickymarch60@walletofsatoshi.com,2024-10-02
toyota-corolla0,pr,#1449,,good-first-issue,,,,20k,toyota_corolla0@stacker.news,2024-10-02
toyota-corolla0,pr,#1455,#1437,good-first-issue,,,,20k,toyota_corolla0@stacker.news,2024-10-02
SouthKoreaLN,issue,#1436,,easy,,,,10k,south_korea_ln@stacker.news,2024-10-02
TonyGiorgio,issue,#1462,,easy,urgent,,,30k,TonyGiorgio@stacker.news,2024-10-07
hkarani,issue,#1369,#1458,good-first-issue,,,,2k,asterisk32@stacker.news,2024-10-21
toyota-corolla0,pr,#1369,#1458,good-first-issue,,,,20k,toyota_corolla0@stacker.news,2024-10-20
Soxasora,pr,#1593,#1569,good-first-issue,,,,20k,soxasora@blink.sv,2024-11-19
Soxasora,pr,#1599,#1258,medium,,,,250k,soxasora@blink.sv,2024-11-19
aegroto,pr,#1585,#1522,easy,high,,1,180k,aegroto@blink.sv,2024-11-19
sig47,issue,#1585,#1522,easy,high,,1,18k,siggy47@stacker.news,2024-11-19
aegroto,pr,#1583,#1572,easy,,,2,80k,aegroto@blink.sv,2024-11-19
Soxasora,pr,#1617,#1616,easy,,,,100k,soxasora@blink.sv,2024-11-20
Soxasora,issue,#1617,#1616,easy,,,,10k,soxasora@blink.sv,2024-11-20
AndreaDiazCorreia,helpfulness,#1605,#1566,good-first-issue,,,tried in pr,2k,andrea@lawallet.ar,2024-11-20
Soxasora,pr,#1653,,medium,,,determined unecessary,250k,soxasora@blink.sv,2024-12-07
Soxasora,pr,#1659,#1657,easy,,,,100k,soxasora@blink.sv,2024-12-07
sig47,issue,#1659,#1657,easy,,,,10k,siggy47@stacker.news,2024-12-07
Gudnessuche,issue,#1662,#1661,good-first-issue,,,,2k,everythingsatoshi@getalby.com,2024-12-07
aegroto,pr,#1589,#1586,easy,,,,100k,aegroto@blink.sv,2024-12-07
aegroto,issue,#1589,#1586,easy,,,,10k,aegroto@blink.sv,2024-12-07
aegroto,pr,#1619,#914,easy,,,,100k,aegroto@blink.sv,2024-12-07
felipebueno,pr,#1620,,medium,,,1,225k,felipebueno@getalby.com,2024-12-09
Soxasora,pr,#1647,#1645,easy,,,,100k,soxasora@blink.sv,2024-12-07
Soxasora,pr,#1667,#1568,easy,,,,100k,soxasora@blink.sv,2024-12-07
aegroto,pr,#1633,#1471,easy,,,1,90k,aegroto@blink.sv,2024-12-07
Darth-Coin,issue,#1649,#1421,medium,,,,25k,darthcoin@stacker.news,2024-12-07
Soxasora,pr,#1685,,medium,,,,250k,soxasora@blink.sv,2024-12-07
aegroto,pr,#1606,#1242,medium,,,,250k,aegroto@blink.sv,2024-12-07
sfr0xyz,issue,#1696,#1196,good-first-issue,,,,2k,sefiro@getalby.com,2024-12-10
Soxasora,pr,#1794,#756,hard,urgent,,,3m,bolt11,2024-01-09
Soxasora,pr,#1794,#411,hard,high,sort of grouped with #1794,,1m,bolt11,2024-01-09
SatsAllDay,issue,#1749,#411,hard,high,,,200k,weareallsatoshi@getalby.com,???
Soxasora,pr,#1786,#363,easy,,,,100k,soxasora@blink.sv,???
felipebueno,issue,#1786,#363,easy,,,,10k,felipebueno@getalby.com,???
cyphercosmo,pr,#1745,#1648,good-first-issue,,,2,16k,cyphercosmo@getalby.com,???

1 name type pr id issue ids difficulty priority changes requested notes amount receive method date paid
18 benalleng pr #972 #923 good-first-issue 20k BenAllenG@stacker.news 2024-03-26
19 SatsAllDay issue #972 #923 good-first-issue 2k weareallsatoshi@getalby.com 2024-03-26
20 felipebueno pr #974 #884 good-first-issue 20k felipe@stacker.news 2024-03-26
21 h0dlr issue #974 #884 good-first-issue 2k HODLR@stacker.news 0xe14b9b5981c729a3@ln.tips 2024-04-04
22 benalleng pr #975 20k BenAllenG@stacker.news 2024-03-26
23 SatsAllDay security #980 GHSA-qg4g-m4xq-695p 100k weareallsatoshi@getalby.com 2024-03-28
24 SatsAllDay code review #980 GHSA-qg4g-m4xq-695p medium 25k weareallsatoshi@getalby.com 2024-03-28
110 tsmith123 pr #1216 #1213 easy 1 90k stickymarch60@walletofsatoshi.com 2024-06-03
111 tsmith123 pr #1231 #1230 good-first-issue 20k stickymarch60@walletofsatoshi.com 2024-06-13
112 felipebueno issue #1231 #1230 good-first-issue 2k felipebueno@getalby.com 2024-06-13
tsmith123 pr #1223 #107 medium 2 10k bonus for our slowness 210k stickymarch60@walletofsatoshi.com 2024-06-22
cointastical issue #1223 #107 medium 2 20k cointastical@stacker.news 2024-06-22
kravhen pr #1215 #253 medium 2 upgraded to medium 200k nichro@getalby.com 2024-06-28
dillon-co pr #1140 #633 hard requested advance 500k bolt11 2024-07-02
takitakitanana issue #1257 good-first-issue 2k takitakitanana@stacker.news 2024-07-11
SatsAllDay pr #1263 #1112 medium 1 225k weareallsatoshi@getalby.com 2024-07-31
OneOneSeven117 issue #1272 #1268 easy 10k OneOneSeven@stacker.news 2024-07-31
aniskhalfallah pr #1264 #1226 good-first-issue 20k aniskhalfallah@stacker.news 2024-07-31
Gudnessuche issue #1264 #1226 good-first-issue 2k everythingsatoshi@getalby.com 2024-08-10
aniskhalfallah pr #1289 easy 100k aniskhalfallah@blink.sv 2024-08-12
riccardobl pr #1293 #1142 medium high 500k rblb@getalby.com 2024-08-18
tsmith123 pr #1306 #832 medium 250k stickymarch60@walletofsatoshi.com 2024-08-20
riccardobl pr #1311 #864 medium high pending unrelated refactor 500k rblb@getalby.com 2024-08-27
brugeman issue #1311 #864 medium high 50k brugeman@stacker.news 2024-08-27
riccardobl pr #1342 #1141 hard high pending unrelated rearchitecture 1m rblb@getalby.com 2024-09-09
SatsAllDay issue #1368 #1331 medium 25k weareallsatoshi@getalby.com 2024-09-16
benalleng helpfulness #1368 #1170 medium did a lot of it in #1175 25k BenAllenG@stacker.news 2024-09-16
humble-GOAT issue #1412 #1407 good-first-issue 2k humble_GOAT@stacker.news 2024-09-18
felipebueno issue #1425 #986 medium 25k felipebueno@getalby.com 2024-09-26
riccardobl pr #1373 #1304 hard high 2m bolt11 2024-10-01
tsmith123 pr #1428 #1397 easy 1 superceded 90k stickymarch60@walletofsatoshi.com 2024-10-02
toyota-corolla0 pr #1449 good-first-issue 20k toyota_corolla0@stacker.news 2024-10-02
toyota-corolla0 pr #1455 #1437 good-first-issue 20k toyota_corolla0@stacker.news 2024-10-02
SouthKoreaLN issue #1436 easy 10k south_korea_ln@stacker.news 2024-10-02
TonyGiorgio issue #1462 easy urgent 30k TonyGiorgio@stacker.news 2024-10-07
hkarani issue #1369 #1458 good-first-issue 2k asterisk32@stacker.news 2024-10-21
toyota-corolla0 pr #1369 #1458 good-first-issue 20k toyota_corolla0@stacker.news 2024-10-20
Soxasora pr #1593 #1569 good-first-issue 20k soxasora@blink.sv 2024-11-19
Soxasora pr #1599 #1258 medium 250k soxasora@blink.sv 2024-11-19
aegroto pr #1585 #1522 easy high 1 180k aegroto@blink.sv 2024-11-19
sig47 issue #1585 #1522 easy high 1 18k siggy47@stacker.news 2024-11-19
aegroto pr #1583 #1572 easy 2 80k aegroto@blink.sv 2024-11-19
Soxasora pr #1617 #1616 easy 100k soxasora@blink.sv 2024-11-20
Soxasora issue #1617 #1616 easy 10k soxasora@blink.sv 2024-11-20
AndreaDiazCorreia helpfulness #1605 #1566 good-first-issue tried in pr 2k andrea@lawallet.ar 2024-11-20
Soxasora pr #1653 medium determined unecessary 250k soxasora@blink.sv 2024-12-07
Soxasora pr #1659 #1657 easy 100k soxasora@blink.sv 2024-12-07
sig47 issue #1659 #1657 easy 10k siggy47@stacker.news 2024-12-07
Gudnessuche issue #1662 #1661 good-first-issue 2k everythingsatoshi@getalby.com 2024-12-07
aegroto pr #1589 #1586 easy 100k aegroto@blink.sv 2024-12-07
aegroto issue #1589 #1586 easy 10k aegroto@blink.sv 2024-12-07
aegroto pr #1619 #914 easy 100k aegroto@blink.sv 2024-12-07
felipebueno pr #1620 medium 1 225k felipebueno@getalby.com 2024-12-09
Soxasora pr #1647 #1645 easy 100k soxasora@blink.sv 2024-12-07
Soxasora pr #1667 #1568 easy 100k soxasora@blink.sv 2024-12-07
aegroto pr #1633 #1471 easy 1 90k aegroto@blink.sv 2024-12-07
Darth-Coin issue #1649 #1421 medium 25k darthcoin@stacker.news 2024-12-07
Soxasora pr #1685 medium 250k soxasora@blink.sv 2024-12-07
aegroto pr #1606 #1242 medium 250k aegroto@blink.sv 2024-12-07
sfr0xyz issue #1696 #1196 good-first-issue 2k sefiro@getalby.com 2024-12-10
Soxasora pr #1794 #756 hard urgent 3m bolt11 2024-01-09
Soxasora pr #1794 #411 hard high sort of grouped with #1794 1m bolt11 2024-01-09
SatsAllDay issue #1749 #411 hard high 200k weareallsatoshi@getalby.com ???
Soxasora pr #1786 #363 easy 100k soxasora@blink.sv ???
felipebueno issue #1786 #363 easy 10k felipebueno@getalby.com ???
cyphercosmo pr #1745 #1648 good-first-issue 2 16k cyphercosmo@getalby.com ???

View File

@ -11,7 +11,7 @@ RUN npm ci
COPY . .
ADD https://deb.debian.org/debian/pool/main/f/fonts-noto-color-emoji/fonts-noto-color-emoji_0~20200916-1_all.deb fonts-noto-color-emoji.deb
ADD http://ftp.de.debian.org/debian/pool/main/f/fonts-noto-color-emoji/fonts-noto-color-emoji_0~20200916-1_all.deb fonts-noto-color-emoji.deb
RUN dpkg -i fonts-noto-color-emoji.deb
CMD [ "node", "index.js" ]
USER pptruser
USER pptruser

View File

@ -4,7 +4,6 @@ import { useAccordionButton } from 'react-bootstrap/AccordionButton'
import ArrowRight from '@/svgs/arrow-right-s-fill.svg'
import ArrowDown from '@/svgs/arrow-down-s-fill.svg'
import { useContext, useEffect, useState } from 'react'
import classNames from 'classnames'
const KEY_ID = '0'
@ -31,7 +30,7 @@ function ContextAwareToggle ({ children, headerColor = 'var(--theme-grey)', even
)
}
export default function AccordianItem ({ header, body, className, headerColor = 'var(--theme-grey)', show }) {
export default function AccordianItem ({ header, body, headerColor = 'var(--theme-grey)', show }) {
const [activeKey, setActiveKey] = useState()
useEffect(() => {
@ -44,8 +43,8 @@ export default function AccordianItem ({ header, body, className, headerColor =
return (
<Accordion defaultActiveKey={activeKey} activeKey={activeKey} onSelect={handleOnSelect}>
<ContextAwareToggle show={show} eventKey={KEY_ID} headerColor={headerColor}><div style={{ color: headerColor }}>{header}</div></ContextAwareToggle>
<Accordion.Collapse eventKey={KEY_ID} className={classNames('mt-2', className)}>
<ContextAwareToggle show={show} eventKey={KEY_ID}><div style={{ color: headerColor }}>{header}</div></ContextAwareToggle>
<Accordion.Collapse eventKey={KEY_ID} className='mt-2'>
<div>{body}</div>
</Accordion.Collapse>
</Accordion>

View File

@ -1,158 +0,0 @@
import { createContext, useCallback, useContext, useEffect, useMemo, useState } from 'react'
import { useRouter } from 'next/router'
import * as cookie from 'cookie'
import { useMe } from '@/components/me'
import { USER_ID, SSR } from '@/lib/constants'
import { USER } from '@/fragments/users'
import { useQuery } from '@apollo/client'
import { UserListRow } from '@/components/user-list'
import Link from 'next/link'
import AddIcon from '@/svgs/add-fill.svg'
const AccountContext = createContext()
const b64Decode = str => Buffer.from(str, 'base64').toString('utf-8')
const b64Encode = obj => Buffer.from(JSON.stringify(obj)).toString('base64')
const maybeSecureCookie = cookie => {
return window.location.protocol === 'https:' ? cookie + '; Secure' : cookie
}
export const AccountProvider = ({ children }) => {
const { me } = useMe()
const [accounts, setAccounts] = useState([])
const [meAnon, setMeAnon] = useState(true)
const updateAccountsFromCookie = useCallback(() => {
try {
const { multi_auth: multiAuthCookie } = cookie.parse(document.cookie)
const accounts = multiAuthCookie
? JSON.parse(b64Decode(multiAuthCookie))
: me ? [{ id: Number(me.id), name: me.name, photoId: me.photoId }] : []
setAccounts(accounts)
// required for backwards compatibility: sync cookie with accounts if no multi auth cookie exists
// this is the case for sessions that existed before we deployed account switching
if (!multiAuthCookie && !!me) {
document.cookie = maybeSecureCookie(`multi_auth=${b64Encode(accounts)}; Path=/`)
}
} catch (err) {
console.error('error parsing cookies:', err)
}
}, [])
useEffect(updateAccountsFromCookie, [])
const addAccount = useCallback(user => {
setAccounts(accounts => [...accounts, user])
}, [])
const removeAccount = useCallback(userId => {
setAccounts(accounts => accounts.filter(({ id }) => id !== userId))
}, [])
const multiAuthSignout = useCallback(async () => {
const { status } = await fetch('/api/signout', { credentials: 'include' })
// if status is 302, this means the server was able to switch us to the next available account
// and the current account was simply removed from the list of available accounts including the corresponding JWT.
const switchSuccess = status === 302
if (switchSuccess) updateAccountsFromCookie()
return switchSuccess
}, [updateAccountsFromCookie])
useEffect(() => {
if (SSR) return
const { 'multi_auth.user-id': multiAuthUserIdCookie } = cookie.parse(document.cookie)
setMeAnon(multiAuthUserIdCookie === 'anonymous')
}, [])
const value = useMemo(
() => ({ accounts, addAccount, removeAccount, meAnon, setMeAnon, multiAuthSignout }),
[accounts, addAccount, removeAccount, meAnon, setMeAnon, multiAuthSignout])
return <AccountContext.Provider value={value}>{children}</AccountContext.Provider>
}
export const useAccounts = () => useContext(AccountContext)
const AccountListRow = ({ account, ...props }) => {
const { meAnon, setMeAnon } = useAccounts()
const { me, refreshMe } = useMe()
const anonRow = account.id === USER_ID.anon
const selected = (meAnon && anonRow) || Number(me?.id) === Number(account.id)
const router = useRouter()
// fetch updated names and photo ids since they might have changed since we were issued the JWTs
const [name, setName] = useState(account.name)
const [photoId, setPhotoId] = useState(account.photoId)
useQuery(USER,
{
variables: { id: account.id },
onCompleted ({ user: { name, photoId } }) {
if (photoId) setPhotoId(photoId)
if (name) setName(name)
}
}
)
const onClick = async (e) => {
// prevent navigation
e.preventDefault()
// update pointer cookie
document.cookie = maybeSecureCookie(`multi_auth.user-id=${anonRow ? 'anonymous' : account.id}; Path=/`)
// update state
if (anonRow) {
// order is important to prevent flashes of no session
setMeAnon(true)
await refreshMe()
} else {
await refreshMe()
// order is important to prevent flashes of inconsistent data in switch account dialog
setMeAnon(account.id === USER_ID.anon)
}
// reload whatever page we're on to avoid any bugs due to missing authorization etc.
router.reload()
}
return (
<div className='d-flex flex-row'>
<UserListRow
user={{ ...account, photoId, name }}
className='d-flex align-items-center me-2'
{...props}
onNymClick={onClick}
selected={selected}
/>
</div>
)
}
export default function SwitchAccountList () {
const { accounts } = useAccounts()
const router = useRouter()
// can't show hat since the streak is not included in the JWT payload
return (
<>
<div className='my-2'>
<div className='d-flex flex-column flex-wrap mt-2 mb-3'>
<h4 className='text-muted'>Accounts</h4>
<AccountListRow account={{ id: USER_ID.anon, name: 'anon' }} showHat={false} />
{
accounts.map((account) => <AccountListRow key={account.id} account={account} showHat={false} />)
}
</div>
<Link
href={{
pathname: '/login',
query: { callbackUrl: window.location.origin + router.asPath, multiAuth: true }
}}
className='text-reset fw-bold'
>
<AddIcon height={20} width={20} /> another account
</Link>
</div>
</>
)
}

View File

@ -15,19 +15,12 @@ export default function ActionTooltip ({ children, notForm, disable, overlayText
<OverlayTrigger
placement={placement || 'bottom'}
overlay={
<Tooltip style={{ position: 'fixed' }}>
<Tooltip>
{overlayText}
</Tooltip>
}
trigger={['hover', 'focus']}
show={formik?.isSubmitting ? false : undefined}
popperConfig={{
modifiers: {
preventOverflow: {
enabled: false
}
}
}}
>
<span>
{children}

View File

@ -1,20 +1,16 @@
import { useState, useEffect, useMemo, useCallback } from 'react'
import { useState, useEffect } from 'react'
import AccordianItem from './accordian-item'
import { Input, InputUserSuggest, VariableInput, Checkbox } from './form'
import InputGroup from 'react-bootstrap/InputGroup'
import { BOOST_MIN, BOOST_MULT, MAX_FORWARDS, SSR } from '@/lib/constants'
import { BOOST_MIN, BOOST_MULT, MAX_FORWARDS } from '@/lib/constants'
import { DEFAULT_CROSSPOSTING_RELAYS } from '@/lib/nostr'
import Info from './info'
import { abbrNum, numWithUnits } from '@/lib/format'
import { numWithUnits } from '@/lib/format'
import styles from './adv-post-form.module.css'
import { useMe } from './me'
import { useFeeButton } from './fee-button'
import { useRouter } from 'next/router'
import { useFormikContext } from 'formik'
import { gql, useQuery } from '@apollo/client'
import useDebounceCallback from './use-debounce-callback'
import { Button } from 'react-bootstrap'
import classNames from 'classnames'
const EMPTY_FORWARD = { nym: '', pct: '' }
@ -30,158 +26,9 @@ const FormStatus = {
ERROR: 'error'
}
export function BoostHelp () {
return (
<ol style={{ lineHeight: 1.25 }}>
<li>Boost ranks items higher based on the amount</li>
<li>The highest boost in a territory over the last 30 days is pinned to the top of the territory</li>
<li>The highest boost across all territories over the last 30 days is pinned to the top of the homepage</li>
<li>The minimum boost is {numWithUnits(BOOST_MIN, { abbreviate: false })}</li>
<li>Each {numWithUnits(BOOST_MULT, { abbreviate: false })} of boost is equivalent to a zap-vote from a maximally trusted stacker
<ul>
<li>e.g. {numWithUnits(BOOST_MULT * 5, { abbreviate: false })} is like five zap-votes from a maximally trusted stacker</li>
</ul>
</li>
<li>The decay of boost "votes" increases at 1.25x the rate of organic votes
<ul>
<li>i.e. boost votes fall out of ranking faster</li>
</ul>
</li>
<li>boost can take a few minutes to show higher ranking in feed</li>
<li>100% of boost goes to the territory founder and top stackers as rewards</li>
</ol>
)
}
export function BoostInput ({ onChange, ...props }) {
const feeButton = useFeeButton()
let merge
if (feeButton) {
({ merge } = feeButton)
}
return (
<Input
label={
<div className='d-flex align-items-center'>boost
<Info>
<BoostHelp />
</Info>
</div>
}
name='boost'
onChange={(_, e) => {
merge?.({
boost: {
term: `+ ${e.target.value}`,
label: 'boost',
op: '+',
modifier: cost => cost + Number(e.target.value)
}
})
onChange && onChange(_, e)
}}
hint={<span className='text-muted'>ranks posts higher temporarily based on the amount</span>}
append={<InputGroup.Text className='text-monospace'>sats</InputGroup.Text>}
{...props}
/>
)
}
const BoostMaxes = ({ subName, homeMax, subMax, boost, updateBoost }) => {
return (
<div className='d-flex flex-row mb-2'>
<Button
className={classNames(styles.boostMax, 'me-2', homeMax + BOOST_MULT <= (boost || 0) && 'invisible')}
size='sm'
onClick={() => updateBoost(homeMax + BOOST_MULT)}
>
{abbrNum(homeMax + BOOST_MULT)} <small>top of homepage</small>
</Button>
{subName &&
<Button
className={classNames(styles.boostMax, subMax + BOOST_MULT <= (boost || 0) && 'invisible')}
size='sm'
onClick={() => updateBoost(subMax + BOOST_MULT)}
>
{abbrNum(subMax + BOOST_MULT)} <small>top of ~{subName}</small>
</Button>}
</div>
)
}
// act means we are adding to existing boost
export function BoostItemInput ({ item, sub, act = false, ...props }) {
// act adds boost to existing boost
const existingBoost = act ? Number(item?.boost || 0) : 0
const [boost, setBoost] = useState(act ? 0 : Number(item?.boost || 0))
const { data, previousData, refetch } = useQuery(gql`
query BoostPosition($sub: String, $id: ID, $boost: Int) {
boostPosition(sub: $sub, id: $id, boost: $boost) {
home
sub
homeMaxBoost
subMaxBoost
}
}`,
{
variables: { sub: item?.subName || sub?.name, boost: existingBoost + boost, id: item?.id },
fetchPolicy: 'cache-and-network',
skip: !!item?.parentId || SSR
})
const getPositionDebounce = useDebounceCallback((...args) => refetch(...args), 1000, [refetch])
const updateBoost = useCallback((boost) => {
const boostToUse = Number(boost || 0)
setBoost(boostToUse)
getPositionDebounce({ sub: item?.subName || sub?.name, boost: Number(existingBoost + boostToUse), id: item?.id })
}, [getPositionDebounce, item?.id, item?.subName, sub?.name, existingBoost])
const dat = data || previousData
const boostMessage = useMemo(() => {
if (!item?.parentId && boost >= BOOST_MULT) {
if (dat?.boostPosition?.home || dat?.boostPosition?.sub || boost > dat?.boostPosition?.homeMaxBoost || boost > dat?.boostPosition?.subMaxBoost) {
const boostPinning = []
if (dat?.boostPosition?.home || boost > dat?.boostPosition?.homeMaxBoost) {
boostPinning.push('homepage')
}
if ((item?.subName || sub?.name) && (dat?.boostPosition?.sub || boost > dat?.boostPosition?.subMaxBoost)) {
boostPinning.push(`~${item?.subName || sub?.name}`)
}
return `pins to the top of ${boostPinning.join(' and ')}`
}
}
return 'ranks posts higher based on the amount'
}, [boost, dat?.boostPosition?.home, dat?.boostPosition?.sub, item?.subName, sub?.name])
return (
<>
<BoostInput
hint={<span className='text-muted'>{boostMessage}</span>}
onChange={(_, e) => {
if (e.target.value >= 0) {
updateBoost(Number(e.target.value))
}
}}
overrideValue={boost}
{...props}
groupClassName='mb-1'
/>
{!item?.parentId &&
<BoostMaxes
subName={item?.subName || sub?.name}
homeMax={(dat?.boostPosition?.homeMaxBoost || 0) - existingBoost}
subMax={(dat?.boostPosition?.subMaxBoost || 0) - existingBoost}
boost={existingBoost + boost}
updateBoost={updateBoost}
/>}
</>
)
}
export default function AdvPostForm ({ children, item, sub, storageKeyPrefix }) {
const { me } = useMe()
export default function AdvPostForm ({ children, item, storageKeyPrefix }) {
const me = useMe()
const { merge } = useFeeButton()
const router = useRouter()
const [itemType, setItemType] = useState()
const formik = useFormikContext()
@ -264,7 +111,39 @@ export default function AdvPostForm ({ children, item, sub, storageKeyPrefix })
body={
<>
{children}
<BoostItemInput item={item} sub={sub} />
<Input
label={
<div className='d-flex align-items-center'>boost
<Info>
<ol className='fw-bold'>
<li>Boost ranks posts higher temporarily based on the amount</li>
<li>The minimum boost is {numWithUnits(BOOST_MIN, { abbreviate: false })}</li>
<li>Each {numWithUnits(BOOST_MULT, { abbreviate: false })} of boost is equivalent to one trusted upvote
<ul>
<li>e.g. {numWithUnits(BOOST_MULT * 5, { abbreviate: false })} is like 5 votes</li>
</ul>
</li>
<li>The decay of boost "votes" increases at 1.25x the rate of organic votes
<ul>
<li>i.e. boost votes fall out of ranking faster</li>
</ul>
</li>
<li>100% of sats from boost are given back to top stackers as rewards</li>
</ol>
</Info>
</div>
}
name='boost'
onChange={(_, e) => merge({
boost: {
term: `+ ${e.target.value}`,
label: 'boost',
modifier: cost => cost + Number(e.target.value)
}
})}
hint={<span className='text-muted'>ranks posts higher temporarily based on the amount</span>}
append={<InputGroup.Text className='text-monospace'>sats</InputGroup.Text>}
/>
<VariableInput
label='forward sats to'
name='forward'
@ -300,7 +179,7 @@ export default function AdvPostForm ({ children, item, sub, storageKeyPrefix })
label={
<div className='d-flex align-items-center'>crosspost to nostr
<Info>
<ul>
<ul className='fw-bold'>
{renderCrosspostDetails(itemType)}
<li>requires NIP-07 extension for signing</li>
<li>we use your NIP-05 relays if set</li>

View File

@ -9,11 +9,4 @@
display: flex;
flex: 0 1 fit-content;
height: fit-content;
}
.boostMax small {
font-weight: 400;
margin-left: 0.25rem;
margin-right: 0.25rem;
opacity: 0.5;
}

View File

@ -1,24 +1,23 @@
import { InputGroup } from 'react-bootstrap'
import { Input } from './form'
import { Checkbox, Input } from './form'
import { useMe } from './me'
import { useEffect, useState } from 'react'
import { isNumber } from '@/lib/format'
import Link from 'next/link'
import { isNumber } from 'mathjs'
function autoWithdrawThreshold ({ me }) {
return isNumber(me?.privates?.autoWithdrawThreshold) ? me?.privates?.autoWithdrawThreshold : 10000
}
export function autowithdrawInitial ({ me }) {
export function autowithdrawInitial ({ me, priority = false }) {
return {
priority,
autoWithdrawThreshold: autoWithdrawThreshold({ me }),
autoWithdrawMaxFeePercent: isNumber(me?.privates?.autoWithdrawMaxFeePercent) ? me?.privates?.autoWithdrawMaxFeePercent : 1,
autoWithdrawMaxFeeTotal: isNumber(me?.privates?.autoWithdrawMaxFeeTotal) ? me?.privates?.autoWithdrawMaxFeeTotal : 1
autoWithdrawMaxFeePercent: isNumber(me?.privates?.autoWithdrawMaxFeePercent) ? me?.privates?.autoWithdrawMaxFeePercent : 1
}
}
export function AutowithdrawSettings () {
const { me } = useMe()
export function AutowithdrawSettings ({ priority }) {
const me = useMe()
const threshold = autoWithdrawThreshold({ me })
const [sendThreshold, setSendThreshold] = useState(Math.max(Math.floor(threshold / 10), 1))
@ -29,6 +28,11 @@ export function AutowithdrawSettings () {
return (
<>
<Checkbox
label='make default autowithdraw method'
id='priority'
name='priority'
/>
<div className='my-4 border border-3 rounded'>
<div className='p-3'>
<h3 className='text-center text-muted'>desired balance</h3>
@ -42,31 +46,12 @@ export function AutowithdrawSettings () {
}}
hint={isNumber(sendThreshold) ? `will attempt auto-withdraw when your balance exceeds ${sendThreshold * 11} sats` : undefined}
append={<InputGroup.Text className='text-monospace'>sats</InputGroup.Text>}
required
/>
<h3 className='text-center text-muted pt-3'>network fees</h3>
<h6 className='text-center pb-3'>
we'll use whichever setting is higher during{' '}
<Link
target='_blank'
href='https://docs.lightning.engineering/the-lightning-network/pathfinding'
rel='noreferrer'
>pathfinding
</Link>
</h6>
<Input
label='max fee rate'
label='max fee'
name='autoWithdrawMaxFeePercent'
hint='max fee as percent of withdrawal amount'
append={<InputGroup.Text>%</InputGroup.Text>}
required
/>
<Input
label='max fee total'
name='autoWithdrawMaxFeeTotal'
hint='max fee for any withdrawal amount'
append={<InputGroup.Text className='text-monospace'>sats</InputGroup.Text>}
required
/>
</div>
</div>

View File

@ -5,7 +5,7 @@ import BootstrapForm from 'react-bootstrap/Form'
import EditImage from '@/svgs/image-edit-fill.svg'
import Moon from '@/svgs/moon-fill.svg'
import { useShowModal } from './modal'
import { FileUpload } from './file-upload'
import { ImageUpload } from './image'
export default function Avatar ({ onSuccess }) {
const [uploading, setUploading] = useState()
@ -49,8 +49,7 @@ export default function Avatar ({ onSuccess }) {
}
return (
<FileUpload
allow='image/*'
<ImageUpload
avatar
onError={e => {
console.log(e)
@ -85,6 +84,6 @@ export default function Avatar ({ onSuccess }) {
? <Moon className='fill-white spin' />
: <EditImage className='fill-white' />}
</div>
</FileUpload>
</ImageUpload>
)
}

View File

@ -1,87 +0,0 @@
import OverlayTrigger from 'react-bootstrap/OverlayTrigger'
import Tooltip from 'react-bootstrap/Tooltip'
import CowboyHatIcon from '@/svgs/cowboy.svg'
import AnonIcon from '@/svgs/spy-fill.svg'
import { numWithUnits } from '@/lib/format'
import { USER_ID } from '@/lib/constants'
import GunIcon from '@/svgs/revolver.svg'
import HorseIcon from '@/svgs/horse.svg'
import classNames from 'classnames'
const BADGES = [
{
icon: CowboyHatIcon,
streakName: 'streak'
},
{
icon: HorseIcon,
streakName: 'horseStreak'
},
{
icon: GunIcon,
streakName: 'gunStreak',
sizeDelta: 2
}
]
export default function Badges ({ user, badge, className = 'ms-1', badgeClassName, spacingClassName = 'ms-1', height = 16, width = 16 }) {
if (!user || Number(user.id) === USER_ID.ad) return null
if (Number(user.id) === USER_ID.anon) {
return (
<BadgeTooltip overlayText='anonymous'>
<span className={className}><AnonIcon className={`${badgeClassName} align-middle`} height={height} width={width} /></span>
</BadgeTooltip>
)
}
return (
<span className={className}>
{BADGES.map(({ icon, streakName, sizeDelta }, i) => (
<SNBadge
key={streakName}
user={user}
badge={badge}
streakName={streakName}
badgeClassName={classNames(badgeClassName, i > 0 && spacingClassName)}
IconForBadge={icon}
height={height}
width={width}
sizeDelta={sizeDelta}
/>
))}
</span>
)
}
function SNBadge ({ user, badge, streakName, badgeClassName, IconForBadge, height = 16, width = 16, sizeDelta = 0 }) {
const streak = user.optional[streakName]
if (streak === null) {
return null
}
return (
<BadgeTooltip
overlayText={streak
? `${numWithUnits(streak, { abbreviate: false, unitSingular: 'day', unitPlural: 'days' })}`
: 'new'}
>
<span><IconForBadge className={badgeClassName} height={height + sizeDelta} width={width + sizeDelta} /></span>
</BadgeTooltip>
)
}
export function BadgeTooltip ({ children, overlayText, placement }) {
return (
<OverlayTrigger
placement={placement || 'bottom'}
overlay={
<Tooltip style={{ position: 'fixed' }}>
{overlayText}
</Tooltip>
}
trigger={['hover', 'focus']}
>
{children}
</OverlayTrigger>
)
}

View File

@ -5,10 +5,11 @@ import { useMe } from '@/components/me'
import { useMutation } from '@apollo/client'
import { WELCOME_BANNER_MUTATION } from '@/fragments/users'
import { useToast } from '@/components/toast'
import Link from 'next/link'
import { BALANCE_LIMIT_MSATS } from '@/lib/constants'
import { msatsToSats, numWithUnits } from '@/lib/format'
export function WelcomeBanner ({ Banner }) {
const { me } = useMe()
const me = useMe()
const toaster = useToast()
const [hidden, setHidden] = useState(true)
const handleClose = async () => {
@ -69,7 +70,7 @@ export function WelcomeBanner ({ Banner }) {
}
export function MadnessBanner ({ handleClose }) {
const { me } = useMe()
const me = useMe()
return (
<Alert className={styles.banner} key='info' variant='info' onClose={handleClose} dismissible>
<Alert.Heading>
@ -100,17 +101,39 @@ export function MadnessBanner ({ handleClose }) {
)
}
export function WalletSecurityBanner ({ isActive }) {
export function WalletLimitBanner () {
const me = useMe()
const limitReached = me?.privates?.sats >= msatsToSats(BALANCE_LIMIT_MSATS)
if (!me || !limitReached) return
return (
<Alert className={styles.banner} key='info' variant='warning'>
<Alert.Heading>
Gunslingin' Safety Tips
Your wallet is over the current limit ({numWithUnits(msatsToSats(BALANCE_LIMIT_MSATS))})
</Alert.Heading>
<p className='mb-3 line-height-md'>
Listen up, pardner! Put a limit on yer spendin' wallet or hook up a wallet that's only for Stacker News. It'll keep them varmints from cleanin' out yer whole goldmine if they rustle up yer wallet.
<p className='mb-1'>
Deposits to your wallet from <strong>outside</strong> of SN are blocked.
</p>
<p className='line-height-md'>
Your spending wallet's credentials are never sent to our servers in plain text. To sync across devices, <Alert.Link as={Link} href='/settings/passphrase'>enable device sync in your settings</Alert.Link>.
<p>
Please spend or withdraw sats to restore full wallet functionality.
</p>
</Alert>
)
}
export function WalletSecurityBanner () {
return (
<Alert className={styles.banner} key='info' variant='warning'>
<Alert.Heading>
Wallet Security Disclaimer
</Alert.Heading>
<p className='mb-1'>
Your wallet's credentials are stored in the browser and never go to the server.<br />
However, you should definitely <strong>set a budget in your wallet</strong>.
</p>
<p>
Also, for the time being, you will have to reenter your credentials on other devices.
</p>
</Alert>
)

View File

@ -17,8 +17,7 @@ export default function BookmarkDropdownItem ({ item: { id, meBookmark } }) {
id: `Item:${id}`,
fields: {
meBookmark: () => bookmarkItem.meBookmark
},
optimistic: true
}
})
}
}

View File

@ -1,65 +0,0 @@
import { useShowModal } from './modal'
import { useToast } from './toast'
import ItemAct from './item-act'
import AccordianItem from './accordian-item'
import { useMemo } from 'react'
import getColor from '@/lib/rainbow'
import BoostIcon from '@/svgs/arrow-up-double-line.svg'
import styles from './upvote.module.css'
import { BoostHelp } from './adv-post-form'
import { BOOST_MULT } from '@/lib/constants'
import classNames from 'classnames'
export default function Boost ({ item, className, ...props }) {
const { boost } = item
const [color, nextColor] = useMemo(() => [getColor(boost), getColor(boost + BOOST_MULT)], [boost])
const style = useMemo(() => ({
'--hover-fill': nextColor,
'--hover-filter': `drop-shadow(0 0 6px ${nextColor}90)`,
'--fill': color,
'--filter': `drop-shadow(0 0 6px ${color}90)`
}), [color, nextColor])
return (
<Booster
item={item} As={oprops =>
<div className='upvoteParent'>
<div
className={classNames(styles.upvoteWrapper, item.deletedAt && styles.noSelfTips)}
>
<BoostIcon
{...props}
{...oprops}
style={style}
width={26}
height={26}
className={classNames(styles.boost, className, boost && styles.boosted)}
/>
</div>
</div>}
/>
)
}
function Booster ({ item, As, children }) {
const toaster = useToast()
const showModal = useShowModal()
return (
<As
onClick={async () => {
try {
showModal(onClose =>
<ItemAct onClose={onClose} item={item} act='BOOST' step={BOOST_MULT}>
<AccordianItem header='what is boost?' body={<BoostHelp />} />
</ItemAct>)
} catch (error) {
toaster.danger('failed to boost item')
}
}}
>
{children}
</As>
)
}

View File

@ -1,16 +1,18 @@
import { Form, Input, MarkdownInput } from '@/components/form'
import { useApolloClient } from '@apollo/client'
import { useRouter } from 'next/router'
import { gql, useApolloClient, useMutation } from '@apollo/client'
import Countdown from './countdown'
import AdvPostForm, { AdvPostInitial } from './adv-post-form'
import InputGroup from 'react-bootstrap/InputGroup'
import useCrossposter from './use-crossposter'
import { bountySchema } from '@/lib/validate'
import { SubSelectInitial } from './sub-select'
import { normalizeForwards } from '@/lib/form'
import { useCallback } from 'react'
import { normalizeForwards, toastUpsertSuccessMessages } from '@/lib/form'
import { MAX_TITLE_LENGTH } from '@/lib/constants'
import { useMe } from './me'
import { useToast } from './toast'
import { ItemButtonBar } from './post'
import useItemSubmit from './use-item-submit'
import { UPSERT_BOUNTY } from '@/fragments/paidAction'
export function BountyForm ({
item,
@ -22,11 +24,75 @@ export function BountyForm ({
handleSubmit,
children
}) {
const router = useRouter()
const client = useApolloClient()
const { me } = useMe()
const me = useMe()
const toaster = useToast()
const crossposter = useCrossposter()
const schema = bountySchema({ client, me, existingBoost: item?.boost })
const [upsertBounty] = useMutation(
gql`
mutation upsertBounty(
$sub: String
$id: ID
$title: String!
$bounty: Int!
$text: String
$boost: Int
$forward: [ItemForwardInput]
$hash: String
$hmac: String
) {
upsertBounty(
sub: $sub
id: $id
title: $title
bounty: $bounty
text: $text
boost: $boost
forward: $forward
hash: $hash
hmac: $hmac
) {
id
deleteScheduledAt
reminderScheduledAt
}
}
`
)
const onSubmit = useItemSubmit(UPSERT_BOUNTY, { item, sub })
const onSubmit = useCallback(
async ({ boost, bounty, crosspost, ...values }) => {
const { data, error } = await upsertBounty({
variables: {
sub: item?.subName || sub?.name,
id: item?.id,
boost: boost ? Number(boost) : undefined,
bounty: bounty ? Number(bounty) : undefined,
...values,
forward: normalizeForwards(values.forward)
}
})
if (error) {
throw new Error({ message: error.toString() })
}
const bountyId = data?.upsertBounty?.id
if (crosspost && bountyId) {
await crossposter(bountyId)
}
if (item) {
await router.push(`/items/${item.id}`)
} else {
const prefix = sub?.name ? `/~${sub.name}` : ''
await router.push(prefix + '/recent')
}
toastUpsertSuccessMessages(toaster, data, 'upsertBounty', !!item, values.text)
}, [upsertBounty, router]
)
const storageKeyPrefix = item ? undefined : 'bounty'
@ -42,6 +108,7 @@ export function BountyForm ({
}}
schema={schema}
requireSession
prepaid
onSubmit={
handleSubmit ||
onSubmit
@ -80,7 +147,7 @@ export function BountyForm ({
: null
}
/>
<AdvPostForm storageKeyPrefix={storageKeyPrefix} item={item} sub={sub} />
<AdvPostForm storageKeyPrefix={storageKeyPrefix} item={item} />
<ItemButtonBar itemId={item?.id} canDelete={false} />
</Form>
)

View File

@ -4,6 +4,6 @@ import Button from 'react-bootstrap/Button'
export default function CancelButton ({ onClick }) {
const router = useRouter()
return (
<Button className='me-3 text-muted nav-link fw-bold' variant='link' onClick={onClick || (() => router.back())}>cancel</Button>
<Button className='me-4 text-muted nav-link fw-bold' variant='link' onClick={onClick || (() => router.back())}>cancel</Button>
)
}

View File

@ -1,133 +0,0 @@
import { createContext, useCallback, useContext, useEffect, useMemo, useRef, useState } from 'react'
import classNames from 'classnames'
import ArrowLeft from '@/svgs/arrow-left-line.svg'
import ArrowRight from '@/svgs/arrow-right-line.svg'
import styles from './carousel.module.css'
import { useShowModal } from './modal'
import { Dropdown } from 'react-bootstrap'
function useSwiping ({ moveLeft, moveRight }) {
const [touchStartX, setTouchStartX] = useState(null)
const onTouchStart = useCallback((e) => {
if (e.touches.length === 1) {
setTouchStartX(e.touches[0].clientX)
}
}, [])
const onTouchEnd = useCallback((e) => {
if (touchStartX !== null) {
const touchEndX = e.changedTouches[0].clientX
const diff = touchEndX - touchStartX
if (diff > 50) {
moveLeft()
} else if (diff < -50) {
moveRight()
}
setTouchStartX(null)
}
}, [touchStartX, moveLeft, moveRight])
useEffect(() => {
document.addEventListener('touchstart', onTouchStart)
document.addEventListener('touchend', onTouchEnd)
return () => {
document.removeEventListener('touchstart', onTouchStart)
document.removeEventListener('touchend', onTouchEnd)
}
}, [onTouchStart, onTouchEnd])
}
function useArrowKeys ({ moveLeft, moveRight }) {
const onKeyDown = useCallback((e) => {
if (e.key === 'ArrowLeft') {
moveLeft()
} else if (e.key === 'ArrowRight') {
moveRight()
}
}, [moveLeft, moveRight])
useEffect(() => {
document.addEventListener('keydown', onKeyDown)
return () => document.removeEventListener('keydown', onKeyDown)
}, [onKeyDown])
}
export default function Carousel ({ close, mediaArr, src, originalSrc, setOptions }) {
const [index, setIndex] = useState(mediaArr.findIndex(([key]) => key === src))
const [currentSrc, canGoLeft, canGoRight] = useMemo(() => {
return [mediaArr[index][0], index > 0, index < mediaArr.length - 1]
}, [mediaArr, index])
const moveLeft = useCallback(() => {
setIndex(i => Math.max(0, i - 1))
}, [setIndex])
const moveRight = useCallback(() => {
setIndex(i => Math.min(mediaArr.length - 1, i + 1))
}, [setIndex, mediaArr.length])
useSwiping({ moveLeft, moveRight })
useArrowKeys({ moveLeft, moveRight })
return (
<div className={styles.fullScreenContainer} onClick={close}>
<img className={styles.fullScreen} src={currentSrc} />
<div className={styles.fullScreenNavContainer}>
<div
className={classNames(styles.fullScreenNav, !canGoLeft && 'invisible', styles.left)}
onClick={(e) => {
e.stopPropagation()
moveLeft()
}}
>
<ArrowLeft width={34} height={34} />
</div>
<div
className={classNames(styles.fullScreenNav, !canGoRight && 'invisible', styles.right)}
onClick={(e) => {
e.stopPropagation()
moveRight()
}}
>
<ArrowRight width={34} height={34} />
</div>
</div>
</div>
)
}
const CarouselContext = createContext()
function CarouselOverflow ({ originalSrc, rel }) {
return <Dropdown.Item href={originalSrc} rel={rel} target='_blank'>view original</Dropdown.Item>
}
export function CarouselProvider ({ children }) {
const media = useRef(new Map())
const showModal = useShowModal()
const showCarousel = useCallback(({ src }) => {
showModal((close, setOptions) => {
return <Carousel close={close} mediaArr={Array.from(media.current.entries())} src={src} setOptions={setOptions} />
}, {
fullScreen: true,
overflow: <CarouselOverflow {...media.current.get(src)} />
})
}, [showModal, media.current])
const addMedia = useCallback(({ src, originalSrc, rel }) => {
media.current.set(src, { src, originalSrc, rel })
}, [media.current])
const removeMedia = useCallback((src) => {
media.current.delete(src)
}, [media.current])
const value = useMemo(() => ({ showCarousel, addMedia, removeMedia }), [showCarousel, addMedia, removeMedia])
return <CarouselContext.Provider value={value}>{children}</CarouselContext.Provider>
}
export function useCarousel () {
return useContext(CarouselContext)
}

View File

@ -1,63 +0,0 @@
div.fullScreenNavContainer {
height: 100%;
width: 100%;
position: absolute;
top: 0;
left: 0;
pointer-events: none;
flex-direction: row;
display: flex;
justify-content: space-between;
align-items: center;
}
img.fullScreen {
cursor: zoom-out !important;
max-height: 100%;
max-width: 100vw;
min-width: 0;
min-height: 0;
align-self: center;
justify-self: center;
user-select: none;
}
.fullScreenContainer {
--bs-columns: 1;
--bs-rows: 1;
display: grid;
width: 100%;
height: 100%;
}
div.fullScreenNav:hover > svg {
background-color: rgba(0, 0, 0, .5);
}
div.fullScreenNav {
cursor: pointer;
pointer-events: auto;
width: 72px;
height: 72px;
display: flex;
align-items: center;
}
div.fullScreenNav.left {
justify-content: flex-start;
}
div.fullScreenNav.right {
justify-content: flex-end;
}
div.fullScreenNav > svg {
border-radius: 50%;
backdrop-filter: blur(4px);
background-color: rgba(0, 0, 0, 0.7);
fill: white;
max-height: 34px;
max-width: 34px;
padding: 0.35rem;
margin: .75rem;
}

View File

@ -156,7 +156,7 @@ export function WhenComposedChart ({
data,
lineNames = [], lineAxis = 'left',
areaNames = [], areaAxis = 'left',
barNames = [], barAxis = 'left', barStackId
barNames = [], barAxis = 'left'
}) {
const router = useRouter()
if (!data || data.length === 0) {
@ -189,7 +189,7 @@ export function WhenComposedChart ({
<Tooltip labelFormatter={labelFormatter(when, from, to)} contentStyle={{ color: 'var(--bs-body-color)', backgroundColor: 'var(--bs-body-bg)' }} />
<Legend />
{barNames?.map((v, i) =>
<Bar yAxisId={barAxis} key={v} stackId={barStackId} type='monotone' dataKey={v} name={v} stroke={getColor(i)} fill={getColor(i)} />)}
<Bar yAxisId={barAxis} key={v} type='monotone' dataKey={v} name={v} stroke={getColor(i)} fill={getColor(i)} />)}
{areaNames?.map((v, i) =>
<Area yAxisId={areaAxis} key={v} type='monotone' dataKey={v} name={v} stackId='1' stroke={getColor(barNames.length + i)} fill={getColor(barNames.length + i)} />)}
{lineNames?.map((v, i) =>

View File

@ -0,0 +1,187 @@
import { useApolloClient } from '@apollo/client'
import { useMe } from './me'
import { createContext, useCallback, useContext, useEffect, useMemo, useState } from 'react'
import { datePivot, timeSince } from '@/lib/time'
import { USER_ID, JIT_INVOICE_TIMEOUT_MS } from '@/lib/constants'
import { HAS_NOTIFICATIONS } from '@/fragments/notifications'
import Item, { ItemSkeleton } from './item'
import { RootProvider } from './root'
import Comment from './comment'
const toType = t => ({ ERROR: `${t}_ERROR`, PENDING: `${t}_PENDING` })
export const Types = {
Zap: toType('ZAP'),
Reply: toType('REPLY'),
Bounty: toType('BOUNTY'),
PollVote: toType('POLL_VOTE')
}
const ClientNotificationContext = createContext({ notifications: [], notify: () => {}, unnotify: () => {} })
export function ClientNotificationProvider ({ children }) {
const [notifications, setNotifications] = useState([])
const client = useApolloClient()
const me = useMe()
// anons don't have access to /notifications
// but we'll store client notifications anyway for simplicity's sake
const storageKey = `client-notifications:${me?.id || USER_ID.anon}`
useEffect(() => {
const loaded = loadNotifications(storageKey, client)
setNotifications(loaded)
}, [storageKey])
const notify = useCallback((type, props) => {
const id = crypto.randomUUID()
const sortTime = new Date()
const expiresAt = +datePivot(sortTime, { milliseconds: JIT_INVOICE_TIMEOUT_MS })
const isError = type.endsWith('ERROR')
const n = { __typename: type, id, sortTime: +sortTime, pending: !isError, expiresAt, ...props }
setNotifications(notifications => [n, ...notifications])
saveNotification(storageKey, n)
if (isError) {
client?.writeQuery({
query: HAS_NOTIFICATIONS,
data: {
hasNewNotes: true
}
})
}
return id
}, [storageKey, client])
const unnotify = useCallback((id) => {
setNotifications(notifications => notifications.filter(n => n.id !== id))
removeNotification(storageKey, id)
}, [storageKey])
const value = useMemo(() => ({ notifications, notify, unnotify }), [notifications, notify, unnotify])
return (
<ClientNotificationContext.Provider value={value}>
{children}
</ClientNotificationContext.Provider>
)
}
export function ClientNotifyProvider ({ children, additionalProps }) {
const ctx = useClientNotifications()
const notify = useCallback((type, props) => {
return ctx.notify(type, { ...props, ...additionalProps })
}, [ctx.notify])
const value = useMemo(() => ({ ...ctx, notify }), [ctx, notify])
return (
<ClientNotificationContext.Provider value={value}>
{children}
</ClientNotificationContext.Provider>
)
}
export function useClientNotifications () {
return useContext(ClientNotificationContext)
}
function ClientNotification ({ n, message }) {
if (n.pending) {
const expired = n.expiresAt < +new Date()
if (!expired) return null
n.reason = 'invoice expired'
}
// remove payment hashes due to x-overflow
n.reason = n.reason.replace(/(: )?[a-f0-9]{64}/, '')
return (
<div className='ms-2'>
<small className='fw-bold text-danger'>
{n.reason ? `${message}: ${n.reason}` : message}
<small className='text-muted ms-1 fw-normal' suppressHydrationWarning>{timeSince(new Date(n.sortTime))}</small>
</small>
{!n.item
? <ItemSkeleton />
: n.item.title
? <Item item={n.item} />
: (
<div className='pb-2'>
<RootProvider root={n.item.root}>
<Comment item={n.item} noReply includeParent noComments clickToContext />
</RootProvider>
</div>
)}
</div>
)
}
export function ClientZap ({ n }) {
const message = `failed to zap ${n.sats || n.amount} sats`
return <ClientNotification n={n} message={message} />
}
export function ClientReply ({ n }) {
const message = 'failed to submit reply'
return <ClientNotification n={n} message={message} />
}
export function ClientBounty ({ n }) {
const message = 'failed to pay bounty'
return <ClientNotification n={n} message={message} />
}
export function ClientPollVote ({ n }) {
const message = 'failed to submit poll vote'
return <ClientNotification n={n} message={message} />
}
function loadNotifications (storageKey, client) {
const stored = window.localStorage.getItem(storageKey)
if (!stored) return []
const filtered = JSON.parse(stored).filter(({ sortTime }) => {
// only keep notifications younger than 24 hours
return new Date(sortTime) >= datePivot(new Date(), { hours: -24 })
})
let hasNewNotes = false
const mapped = filtered.map((n) => {
if (!n.pending) return n
// anything that is still pending when we load the page was interrupted
// so we immediately mark it as failed instead of waiting until it expired
const type = n.__typename.replace('PENDING', 'ERROR')
const reason = 'payment was interrupted'
hasNewNotes = true
return { ...n, __typename: type, pending: false, reason }
})
if (hasNewNotes) {
client?.writeQuery({
query: HAS_NOTIFICATIONS,
data: {
hasNewNotes: true
}
})
}
window.localStorage.setItem(storageKey, JSON.stringify(mapped))
return filtered
}
function saveNotification (storageKey, n) {
const stored = window.localStorage.getItem(storageKey)
if (stored) {
window.localStorage.setItem(storageKey, JSON.stringify([...JSON.parse(stored), n]))
} else {
window.localStorage.setItem(storageKey, JSON.stringify([n]))
}
}
function removeNotification (storageKey, id) {
const stored = window.localStorage.getItem(storageKey)
if (stored) {
window.localStorage.setItem(storageKey, JSON.stringify(JSON.parse(stored).filter(n => n.id !== id)))
}
}

View File

@ -1,32 +1,35 @@
import { Form, MarkdownInput } from '@/components/form'
import { gql, useMutation } from '@apollo/client'
import styles from './reply.module.css'
import { commentSchema } from '@/lib/validate'
import { useToast } from './toast'
import { toastUpsertSuccessMessages } from '@/lib/form'
import { FeeButtonProvider } from './fee-button'
import { ItemButtonBar } from './post'
import { UPDATE_COMMENT } from '@/fragments/paidAction'
import useItemSubmit from './use-item-submit'
export default function CommentEdit ({ comment, editThreshold, onSuccess, onCancel }) {
const onSubmit = useItemSubmit(UPDATE_COMMENT, {
paidMutationOptions: {
update (cache, { data: { upsertComment: { result } } }) {
if (!result) return
const toaster = useToast()
const [upsertComment] = useMutation(
gql`
mutation upsertComment($id: ID! $text: String!) {
upsertComment(id: $id, text: $text) {
text
deleteScheduledAt
reminderScheduledAt
}
}`, {
update (cache, { data: { upsertComment } }) {
cache.modify({
id: `Item:${comment.id}`,
fields: {
text () {
return result.text
return upsertComment.text
}
},
optimistic: true
}
})
}
},
item: comment,
navigateOnSubmit: false,
onSuccessfulSubmit: onSuccess
})
}
)
return (
<div className={`${styles.reply} mt-2`}>
@ -36,7 +39,16 @@ export default function CommentEdit ({ comment, editThreshold, onSuccess, onCanc
text: comment.text
}}
schema={commentSchema}
onSubmit={onSubmit}
onSubmit={async (values, { resetForm }) => {
const { data, error } = await upsertComment({ variables: { ...values, id: comment.id } })
if (error) {
throw new Error({ message: error.toString() })
}
toastUpsertSuccessMessages(toaster, data, 'upsertComment', true, values.text)
if (onSuccess) {
onSuccess()
}
}}
>
<MarkdownInput
name='text'

View File

@ -25,8 +25,7 @@ import Skull from '@/svgs/death-skull.svg'
import { commentSubTreeRootId } from '@/lib/item'
import Pin from '@/svgs/pushpin-fill.svg'
import LinkToContext from './link-to-context'
import Boost from './boost-button'
import { gql, useApolloClient } from '@apollo/client'
import { ItemContextProvider, useItemContext } from './item'
function Parent ({ item, rootText }) {
const root = useRoot()
@ -79,7 +78,7 @@ export function CommentFlat ({ item, rank, siblingComments, ...props }) {
</div>)
: <div />}
<LinkToContext
className='py-2'
className={siblingComments ? 'py-3' : 'py-2'}
onClick={e => {
router.push(href, as)
}}
@ -95,14 +94,13 @@ export function CommentFlat ({ item, rank, siblingComments, ...props }) {
export default function Comment ({
item, children, replyOpen, includeParent, topLevel,
rootText, noComments, noReply, truncate, depth, pin, setDisableRetry, disableRetry
rootText, noComments, noReply, truncate, depth, pin
}) {
const [edit, setEdit] = useState()
const { me } = useMe()
const isHiddenFreebie = me?.privates?.satsFilter !== 0 && !item.mine && item.freebie && !item.freedFreebie
const isDeletedChildless = item?.ncomments === 0 && item?.deletedAt
const me = useMe()
const isHiddenFreebie = !me?.privates?.wildWestMode && !me?.privates?.greeterMode && !item.mine && item.freebie && !item.freedFreebie
const [collapse, setCollapse] = useState(
(isHiddenFreebie || isDeletedChildless || item?.user?.meMute || (item?.outlawed && !me?.privates?.wildWestMode)) && !includeParent
(isHiddenFreebie || item?.user?.meMute || (item?.outlawed && !me?.privates?.wildWestMode)) && !includeParent
? 'yep'
: 'nope')
const ref = useRef(null)
@ -110,28 +108,16 @@ export default function Comment ({
const root = useRoot()
const { ref: textRef, quote, quoteReply, cancelQuote } = useQuoteReply({ text: item.text })
const { cache } = useApolloClient()
useEffect(() => {
const comment = cache.readFragment({
id: `Item:${router.query.commentId}`,
fragment: gql`
fragment CommentPath on Item {
path
}`
})
if (comment?.path.split('.').includes(item.id)) {
window.localStorage.setItem(`commentCollapse:${item.id}`, 'nope')
}
setCollapse(window.localStorage.getItem(`commentCollapse:${item.id}`) || collapse)
if (Number(router.query.commentId) === Number(item.id)) {
// HACK wait for other comments to uncollapse if they're collapsed
// HACK wait for other comments to collapse if they're collapsed
setTimeout(() => {
ref.current.scrollIntoView({ behavior: 'instant', block: 'start' })
ref.current.classList.add('outline-it')
}, 100)
}
}, [item.id, cache, router.query.commentId])
}, [item.id, router.query.commentId])
useEffect(() => {
if (router.query.commentsViewedAt &&
@ -151,122 +137,129 @@ export default function Comment ({
const bountyPaid = root.bountyPaidTo?.includes(Number(item.id))
return (
<div
ref={ref} className={includeParent ? '' : `${styles.comment} ${collapse === 'yep' ? styles.collapsed : ''}`}
onMouseEnter={() => ref.current.classList.add('outline-new-comment-unset')}
onTouchStart={() => ref.current.classList.add('outline-new-comment-unset')}
>
<div className={`${itemStyles.item} ${styles.item}`}>
{item.outlawed && !me?.privates?.wildWestMode
? <Skull className={styles.dontLike} width={24} height={24} />
: item.mine
? <Boost item={item} className={styles.upvote} />
: item.meDontLikeSats > item.meSats
? <DownZap width={24} height={24} className={styles.dontLike} item={item} />
: pin ? <Pin width={22} height={22} className={styles.pin} /> : <UpVote item={item} className={styles.upvote} collapsed={collapse === 'yep'} />}
<div className={`${itemStyles.hunk} ${styles.hunk}`}>
<div className='d-flex align-items-center'>
{item.user?.meMute && !includeParent && collapse === 'yep'
? (
<span
className={`${itemStyles.other} ${styles.other} pointer`} onClick={() => {
setCollapse('nope')
window.localStorage.setItem(`commentCollapse:${item.id}`, 'nope')
}}
>reply from someone you muted
</span>)
: <ItemInfo
item={item}
commentsText='replies'
commentTextSingular='reply'
className={`${itemStyles.other} ${styles.other}`}
embellishUser={op && <><span> </span><Badge bg={op === 'fwd' ? 'secondary' : 'boost'} className={`${styles.op} bg-opacity-75`}>{op}</Badge></>}
onQuoteReply={quoteReply}
nested={!includeParent}
setDisableRetry={setDisableRetry}
disableRetry={disableRetry}
extraInfo={
<>
{includeParent && <Parent item={item} rootText={rootText} />}
{bountyPaid &&
<ActionTooltip notForm overlayText={`${numWithUnits(root.bounty)} paid`}>
<BountyIcon className={`${styles.bountyIcon} ${'fill-success vertical-align-middle'}`} height={16} width={16} />
</ActionTooltip>}
</>
<ItemContextProvider>
<div
ref={ref} className={includeParent ? '' : `${styles.comment} ${collapse === 'yep' ? styles.collapsed : ''}`}
onMouseEnter={() => ref.current.classList.add('outline-new-comment-unset')}
onTouchStart={() => ref.current.classList.add('outline-new-comment-unset')}
>
<div className={`${itemStyles.item} ${styles.item}`}>
<ZapIcon item={item} pin={pin} me={me} />
<div className={`${itemStyles.hunk} ${styles.hunk}`}>
<div className='d-flex align-items-center'>
{item.user?.meMute && !includeParent && collapse === 'yep'
? (
<span
className={`${itemStyles.other} ${styles.other} pointer`} onClick={() => {
setCollapse('nope')
window.localStorage.setItem(`commentCollapse:${item.id}`, 'nope')
}}
>reply from someone you muted
</span>)
: <ItemInfo
item={item}
commentsText='replies'
commentTextSingular='reply'
className={`${itemStyles.other} ${styles.other}`}
embellishUser={op && <><span> </span><Badge bg={op === 'fwd' ? 'secondary' : 'boost'} className={`${styles.op} bg-opacity-75`}>{op}</Badge></>}
onQuoteReply={quoteReply}
nested={!includeParent}
extraInfo={
<>
{includeParent && <Parent item={item} rootText={rootText} />}
{bountyPaid &&
<ActionTooltip notForm overlayText={`${numWithUnits(root.bounty)} paid`}>
<BountyIcon className={`${styles.bountyIcon} ${'fill-success vertical-align-middle'}`} height={16} width={16} />
</ActionTooltip>}
</>
}
edit={edit}
toggleEdit={e => { setEdit(!edit) }}
editText={edit ? 'cancel' : 'edit'}
/>}
onEdit={e => { setEdit(!edit) }}
editText={edit ? 'cancel' : 'edit'}
/>}
{!includeParent && (collapse === 'yep'
? <Eye
className={styles.collapser} height={10} width={10} onClick={() => {
setCollapse('nope')
window.localStorage.setItem(`commentCollapse:${item.id}`, 'nope')
{!includeParent && (collapse === 'yep'
? <Eye
className={styles.collapser} height={10} width={10} onClick={() => {
setCollapse('nope')
window.localStorage.setItem(`commentCollapse:${item.id}`, 'nope')
}}
/>
: <EyeClose
className={styles.collapser} height={10} width={10} onClick={() => {
setCollapse('yep')
window.localStorage.setItem(`commentCollapse:${item.id}`, 'yep')
}}
/>)}
{topLevel && (
<span className='d-flex ms-auto align-items-center'>
<Share title={item?.title} path={`/items/${item?.id}`} />
</span>
)}
</div>
{edit
? (
<CommentEdit
comment={item}
onSuccess={() => {
setEdit(!edit)
}}
/>
: <EyeClose
className={styles.collapser} height={10} width={10} onClick={() => {
setCollapse('yep')
window.localStorage.setItem(`commentCollapse:${item.id}`, 'yep')
}}
/>)}
{topLevel && (
<span className='d-flex ms-auto align-items-center'>
<Share title={item?.title} path={`/items/${item?.id}`} />
</span>
)}
)
: (
<div className={styles.text} ref={textRef}>
{item.searchText
? <SearchText text={item.searchText} />
: (
<Text itemId={item.id} topLevel={topLevel} rel={item.rel ?? UNKNOWN_LINK_REL} outlawed={item.outlawed} imgproxyUrls={item.imgproxyUrls}>
{item.outlawed && !me?.privates?.wildWestMode
? '*stackers have outlawed this. turn on wild west mode in your [settings](/settings) to see outlawed content.*'
: truncate ? truncateString(item.text) : item.text}
</Text>)}
</div>
)}
</div>
{edit
? (
<CommentEdit
comment={item}
onSuccess={() => {
setEdit(!edit)
}}
/>
)
: (
<div className={styles.text} ref={textRef}>
{item.searchText
? <SearchText text={item.searchText} />
: (
<Text itemId={item.id} topLevel={topLevel} rel={item.rel ?? UNKNOWN_LINK_REL} outlawed={item.outlawed} imgproxyUrls={item.imgproxyUrls}>
{item.outlawed && !me?.privates?.wildWestMode
? '*stackers have outlawed this. turn on wild west mode in your [settings](/settings) to see outlawed content.*'
: truncate ? truncateString(item.text) : item.text}
</Text>)}
</div>
)}
</div>
</div>
{collapse !== 'yep' && (
bottomedOut
? <div className={styles.children}><ReplyOnAnotherPage item={item} /></div>
: (
<div className={styles.children}>
{item.outlawed && !me?.privates?.wildWestMode
? <div className='py-2' />
: !noReply &&
<Reply depth={depth + 1} item={item} replyOpen={replyOpen} onCancelQuote={cancelQuote} onQuoteReply={quoteReply} quote={quote}>
{root.bounty && !bountyPaid && <PayBounty item={item} />}
</Reply>}
{children}
<div className={styles.comments}>
{item.comments && !noComments
? item.comments.map((item) => (
<Comment depth={depth + 1} key={item.id} item={item} />
))
: null}
{collapse !== 'yep' && (
bottomedOut
? <div className={styles.children}><ReplyOnAnotherPage item={item} /></div>
: (
<div className={styles.children}>
{item.outlawed && !me?.privates?.wildWestMode
? <div className='py-2' />
: !noReply &&
<Reply depth={depth + 1} item={item} replyOpen={replyOpen} onCancelQuote={cancelQuote} onQuoteReply={quoteReply} quote={quote}>
{root.bounty && !bountyPaid && <PayBounty item={item} />}
</Reply>}
{children}
<div className={styles.comments}>
{item.comments && !noComments
? item.comments.map((item) => (
<Comment depth={depth + 1} key={item.id} item={item} />
))
: null}
</div>
</div>
</div>
)
)}
</div>
)
)}
</div>
</ItemContextProvider>
)
}
function ZapIcon ({ item, pin }) {
const me = useMe()
const { pendingSats, pendingDownSats } = useItemContext()
const meSats = item.meSats + pendingSats
const downSats = item.meDontLikeSats + pendingDownSats
return item.outlawed && !me?.privates?.wildWestMode
? <Skull className={styles.dontLike} width={24} height={24} />
: downSats > meSats
? <DownZap width={24} height={24} className={styles.dontLike} item={item} />
: pin ? <Pin width={22} height={22} className={styles.pin} /> : <UpVote item={item} className={styles.upvote} />
}
export function CommentSkeleton ({ skeletonChildren }) {
return (
<div className={styles.comment}>

View File

@ -1,12 +1,13 @@
.item {
align-items: flex-start;
margin-bottom: 0 !important;
padding-top: 0 !important;
padding-bottom: 0 !important;
}
.upvote {
margin-top: 9px;
padding-right: 0.2rem;
margin-left: .25rem;
margin-right: 0rem;
}
.pin {
@ -64,7 +65,7 @@
.children {
margin-top: 0;
margin-left: 27px;
margin-left: 30px;
}
.comments {
@ -108,7 +109,7 @@
.comment {
border-radius: .4rem;
padding-top: .5rem;
padding-left: .7rem;
padding-left: .2rem;
background-color: var(--theme-commentBg);
}
@ -128,11 +129,7 @@
}
.comment:not(:first-of-type) {
padding-top: 0;
padding-top: .25rem;
border-top-left-radius: 0;
border-top-right-radius: 0;
}
.comment:has(.comment) + .comment{
padding-top: .5rem;
}

View File

@ -6,10 +6,12 @@ import Navbar from 'react-bootstrap/Navbar'
import { numWithUnits } from '@/lib/format'
import { defaultCommentSort } from '@/lib/item'
import { useRouter } from 'next/router'
import { ItemContextProvider, useItemContext } from './item'
export function CommentsHeader ({ handleSort, pinned, bio, parentCreatedAt, commentSats }) {
const router = useRouter()
const sort = router.query.sort || defaultCommentSort(pinned, bio, parentCreatedAt)
const { pendingCommentSats } = useItemContext()
const getHandleClick = sort => {
return () => {
@ -24,7 +26,7 @@ export function CommentsHeader ({ handleSort, pinned, bio, parentCreatedAt, comm
activeKey={sort}
>
<Nav.Item className='text-muted'>
{numWithUnits(commentSats)}
{numWithUnits(commentSats + pendingCommentSats)}
</Nav.Item>
<div className='ms-auto d-flex'>
<Nav.Item>
@ -66,7 +68,7 @@ export default function Comments ({ parentId, pinned, bio, parentCreatedAt, comm
const pins = comments?.filter(({ position }) => !!position).sort((a, b) => a.position - b.position)
return (
<>
<ItemContextProvider>
{comments?.length > 0
? <CommentsHeader
commentSats={commentSats} parentCreatedAt={parentCreatedAt}
@ -91,7 +93,7 @@ export default function Comments ({ parentId, pinned, bio, parentCreatedAt, comm
{comments.filter(({ position }) => !position).map(item => (
<Comment depth={1} key={item.id} item={item} {...props} />
))}
</>
</ItemContextProvider>
)
}

View File

@ -39,13 +39,7 @@ export function CompactLongCountdown (props) {
<>
{Number(props.formatted.days) > 0
? ` ${props.formatted.days}d ${props.formatted.hours}h ${props.formatted.minutes}m ${props.formatted.seconds}s`
: Number(props.formatted.hours) > 0
? ` ${props.formatted.hours}:${props.formatted.minutes}:${props.formatted.seconds}`
: Number(props.formatted.minutes) > 0
? ` ${props.formatted.minutes}:${props.formatted.seconds}`
: Number(props.formatted.seconds) >= 0
? ` ${props.formatted.seconds}s`
: ' '}
: ` ${props.formatted.hours}:${props.formatted.minutes}:${props.formatted.seconds}`}
</>
)
}}

View File

@ -48,15 +48,6 @@ const listenForThemeChange = (onChange) => {
onChange({ user: true, dark })
}
}
const root = window.document.documentElement
const observer = new window.MutationObserver(() => {
const theme = root.getAttribute('data-bs-theme')
onChange(dark => ({ ...dark, dark: theme === 'dark' }))
})
observer.observe(root, { attributes: true, attributeFilter: ['data-bs-theme'] })
return () => observer.disconnect()
}
export default function useDarkMode () {

View File

@ -30,8 +30,7 @@ export default function Delete ({ itemId, children, onDelete, type = 'post' }) {
url: () => deleteItem.url,
pollCost: () => deleteItem.pollCost,
deletedAt: () => deleteItem.deletedAt
},
optimistic: true
}
})
}
}
@ -46,7 +45,7 @@ export default function Delete ({ itemId, children, onDelete, type = 'post' }) {
onConfirm={async () => {
const { error } = await deleteItem({ variables: { id: itemId } })
if (error) {
throw error
throw new Error({ message: error.toString() })
}
if (onDelete) {
onDelete()

View File

@ -1,6 +1,6 @@
import { Form, Input, MarkdownInput } from '@/components/form'
import { useRouter } from 'next/router'
import { gql, useApolloClient, useLazyQuery } from '@apollo/client'
import { gql, useApolloClient, useLazyQuery, useMutation } from '@apollo/client'
import Countdown from './countdown'
import AdvPostForm, { AdvPostInitial } from './adv-post-form'
import { ITEM_FIELDS } from '@/fragments/items'
@ -8,12 +8,13 @@ import AccordianItem from './accordian-item'
import Item from './item'
import { discussionSchema } from '@/lib/validate'
import { SubSelectInitial } from './sub-select'
import { normalizeForwards } from '@/lib/form'
import { useCallback } from 'react'
import { normalizeForwards, toastUpsertSuccessMessages } from '@/lib/form'
import { MAX_TITLE_LENGTH } from '@/lib/constants'
import { useMe } from './me'
import useCrossposter from './use-crossposter'
import { useToast } from './toast'
import { ItemButtonBar } from './post'
import { UPSERT_DISCUSSION } from '@/fragments/paidAction'
import useItemSubmit from './use-item-submit'
export function DiscussionForm ({
item, sub, editThreshold, titleLabel = 'title',
@ -22,12 +23,56 @@ export function DiscussionForm ({
}) {
const router = useRouter()
const client = useApolloClient()
const { me } = useMe()
const onSubmit = useItemSubmit(UPSERT_DISCUSSION, { item, sub })
const me = useMe()
const schema = discussionSchema({ client, me, existingBoost: item?.boost })
// if Web Share Target API was used
const shareTitle = router.query.title
const shareText = router.query.text ? decodeURI(router.query.text) : undefined
const crossposter = useCrossposter()
const toaster = useToast()
const [upsertDiscussion] = useMutation(
gql`
mutation upsertDiscussion($sub: String, $id: ID, $title: String!, $text: String, $boost: Int, $forward: [ItemForwardInput], $hash: String, $hmac: String) {
upsertDiscussion(sub: $sub, id: $id, title: $title, text: $text, boost: $boost, forward: $forward, hash: $hash, hmac: $hmac) {
id
deleteScheduledAt
reminderScheduledAt
}
}`
)
const onSubmit = useCallback(
async ({ boost, crosspost, ...values }) => {
const { data, error } = await upsertDiscussion({
variables: {
sub: item?.subName || sub?.name,
id: item?.id,
boost: boost ? Number(boost) : undefined,
...values,
forward: normalizeForwards(values.forward)
}
})
if (error) {
throw new Error({ message: error.toString() })
}
const discussionId = data?.upsertDiscussion?.id
if (crosspost && discussionId) {
await crossposter(discussionId)
}
if (item) {
await router.push(`/items/${item.id}`)
} else {
const prefix = sub?.name ? `/~${sub.name}` : ''
await router.push(prefix + '/recent')
}
toastUpsertSuccessMessages(toaster, data, 'upsertDiscussion', !!item, values.text)
}, [upsertDiscussion, router, item, sub, crossposter]
)
const [getRelated, { data: relatedData }] = useLazyQuery(gql`
${ITEM_FIELDS}
@ -51,6 +96,7 @@ export function DiscussionForm ({
...SubSelectInitial({ sub: item?.subName || sub?.name })
}}
schema={schema}
prepaid
onSubmit={handleSubmit || onSubmit}
storageKeyPrefix={storageKeyPrefix}
>
@ -79,7 +125,7 @@ export function DiscussionForm ({
? <div className='text-muted fw-bold'><Countdown date={editThreshold} /></div>
: null}
/>
<AdvPostForm storageKeyPrefix={storageKeyPrefix} item={item} sub={sub} />
<AdvPostForm storageKeyPrefix={storageKeyPrefix} item={item} />
<ItemButtonBar itemId={item?.id} />
{!item &&
<div className={`mt-3 ${related.length > 0 ? '' : 'invisible'}`}>

View File

@ -4,31 +4,43 @@ import { useToast } from './toast'
import ItemAct from './item-act'
import AccordianItem from './accordian-item'
import Flag from '@/svgs/flag-fill.svg'
import { useMemo } from 'react'
import { useCallback, useMemo } from 'react'
import getColor from '@/lib/rainbow'
import { gql, useMutation } from '@apollo/client'
import { useItemContext } from './item'
import { useLightning } from './lightning'
export function DownZap ({ item, ...props }) {
const { pendingDownSats } = useItemContext()
const { meDontLikeSats } = item
const style = useMemo(() => (meDontLikeSats
const downSats = meDontLikeSats + pendingDownSats
const style = useMemo(() => (downSats
? {
fill: getColor(meDontLikeSats),
filter: `drop-shadow(0 0 6px ${getColor(meDontLikeSats)}90)`
fill: getColor(downSats),
filter: `drop-shadow(0 0 6px ${getColor(downSats)}90)`
}
: undefined), [meDontLikeSats])
: undefined), [downSats])
return (
<DownZapper
item={item} As={({ ...oprops }) =>
<div className='upvoteParent'>
<Flag {...props} {...oprops} style={style} />
</div>}
/>
<DownZapper item={item} As={({ ...oprops }) => <Flag {...props} {...oprops} style={style} />} />
)
}
function DownZapper ({ item, As, children }) {
const toaster = useToast()
const showModal = useShowModal()
const strike = useLightning()
const { setPendingDownSats } = useItemContext()
const optimisticUpdate = useCallback((sats, { onClose } = {}) => {
setPendingDownSats(pendingSats => pendingSats + sats)
strike()
onClose?.()
return () => {
setPendingDownSats(pendingSats => pendingSats - sats)
}
}, [])
return (
<As
@ -36,7 +48,7 @@ function DownZapper ({ item, As, children }) {
try {
showModal(onClose =>
<ItemAct
onClose={onClose} item={item} act='DONT_LIKE_THIS'
onClose={onClose} item={item} down optimisticUpdate={optimisticUpdate}
>
<AccordianItem
header='what is a downzap?' body={
@ -84,8 +96,7 @@ export function OutlawDropdownItem ({ item }) {
id: `Item:${item.id}`,
fields: {
outlawed: () => true
},
optimistic: true
}
})
}
}

View File

@ -1,215 +0,0 @@
import { memo, useEffect, useRef, useState } from 'react'
import classNames from 'classnames'
import useDarkMode from './dark-mode'
import styles from './text.module.css'
import { Button } from 'react-bootstrap'
import { TwitterTweetEmbed } from 'react-twitter-embed'
import YouTube from 'react-youtube'
function TweetSkeleton ({ className }) {
return (
<div className={classNames(styles.tweetsSkeleton, className)}>
<div className={styles.tweetSkeleton}>
<div className={`${styles.img} clouds`} />
<div className={styles.content1}>
<div className={`${styles.line} clouds`} />
<div className={`${styles.line} clouds`} />
<div className={`${styles.line} clouds`} />
</div>
</div>
</div>
)
}
export const NostrEmbed = memo(function NostrEmbed ({ src, className, topLevel, darkMode, id }) {
const [show, setShow] = useState(false)
const iframeRef = useRef(null)
useEffect(() => {
if (!iframeRef.current) return
const setHeightFromIframe = (e) => {
if (e.origin !== 'https://njump.me' || !e?.data?.height || e.source !== iframeRef.current.contentWindow) return
iframeRef.current.height = `${e.data.height}px`
}
window?.addEventListener('message', setHeightFromIframe)
const handleIframeLoad = () => {
iframeRef.current.contentWindow.postMessage({ setDarkMode: darkMode }, '*')
}
if (iframeRef.current.complete) {
handleIframeLoad()
} else {
iframeRef.current.addEventListener('load', handleIframeLoad)
}
// https://github.com/vercel/next.js/issues/39451
iframeRef.current.src = `https://njump.me/${id}?embed=yes`
return () => {
window?.removeEventListener('message', setHeightFromIframe)
iframeRef.current?.removeEventListener('load', handleIframeLoad)
}
}, [iframeRef.current, darkMode])
return (
<div className={classNames(styles.nostrContainer, !show && styles.twitterContained, className)}>
<iframe
ref={iframeRef}
width={topLevel ? '550px' : '350px'}
style={{ maxWidth: '100%' }}
height={iframeRef.current?.height || (topLevel ? '200px' : '150px')}
frameBorder='0'
sandbox='allow-scripts allow-same-origin allow-popups allow-popups-to-escape-sandbox'
allow=''
/>
{!show &&
<Button size='md' variant='info' className={styles.twitterShowFull} onClick={() => setShow(true)}>
<div>show full note</div>
<small className='fw-normal fst-italic'>or other stuff</small>
</Button>}
</div>
)
})
const SpotifyEmbed = function SpotifyEmbed ({ src, className }) {
const iframeRef = useRef(null)
// https://open.spotify.com/track/1KFxcj3MZrpBGiGA8ZWriv?si=f024c3aa52294aa1
// Remove any additional path segments
const url = new URL(src)
url.pathname = url.pathname.replace(/\/intl-\w+\//, '/')
useEffect(() => {
if (!iframeRef.current) return
const id = url.pathname.split('/').pop()
// https://developer.spotify.com/documentation/embeds/tutorials/using-the-iframe-api
window.onSpotifyIframeApiReady = (IFrameAPI) => {
const options = {
uri: `spotify:episode:${id}`
}
const callback = (EmbedController) => {}
IFrameAPI.createController(iframeRef.current, options, callback)
}
return () => { window.onSpotifyIframeApiReady = null }
}, [iframeRef.current, url.pathname])
return (
<div className={classNames(styles.spotifyWrapper, className)}>
<iframe
ref={iframeRef}
title='Spotify Web Player'
src={`https://open.spotify.com/embed${url.pathname}`}
width='100%'
height='152'
allowFullScreen
frameBorder='0'
allow='encrypted-media; clipboard-write;'
style={{ borderRadius: '12px' }}
sandbox='allow-scripts allow-popups allow-popups-to-escape-sandbox allow-same-origin allow-presentation'
/>
</div>
)
}
const Embed = memo(function Embed ({ src, provider, id, meta, className, topLevel, onError }) {
const [darkMode] = useDarkMode()
const [overflowing, setOverflowing] = useState(true)
const [show, setShow] = useState(false)
// This Twitter embed could use similar logic to the video embeds below
if (provider === 'twitter') {
return (
<div className={classNames(styles.twitterContainer, !show && styles.twitterContained, className)}>
<TwitterTweetEmbed
tweetId={id}
options={{ theme: darkMode ? 'dark' : 'light', width: topLevel ? '550px' : '350px' }}
key={darkMode ? '1' : '2'}
placeholder={<TweetSkeleton className={className} />}
onLoad={() => setOverflowing(true)}
/>
{overflowing && !show &&
<Button size='lg' variant='info' className={styles.twitterShowFull} onClick={() => setShow(true)}>
show full tweet
</Button>}
</div>
)
}
if (provider === 'nostr') {
return (
<NostrEmbed src={src} className={className} topLevel={topLevel} id={id} darkMode={darkMode} />
)
}
if (provider === 'wavlake') {
return (
<div className={classNames(styles.wavlakeWrapper, className)}>
<iframe
src={`https://embed.wavlake.com/track/${id}`} width='100%' height='380' frameBorder='0'
allow='encrypted-media'
sandbox='allow-scripts allow-popups allow-popups-to-escape-sandbox allow-forms allow-same-origin'
/>
</div>
)
}
if (provider === 'spotify') {
return (
<SpotifyEmbed src={src} className={className} />
)
}
if (provider === 'youtube') {
return (
<div className={classNames(styles.videoWrapper, className)}>
<YouTube
videoId={id} className={styles.videoContainer} opts={{
playerVars: {
start: meta?.start || 0
}
}}
/>
</div>
)
}
if (provider === 'rumble') {
return (
<div className={classNames(styles.videoWrapper, className)}>
<div className={styles.videoContainer}>
<iframe
title='Rumble Video'
allowFullScreen
src={meta?.href}
sandbox='allow-scripts'
/>
</div>
</div>
)
}
if (provider === 'peertube') {
return (
<div className={classNames(styles.videoWrapper, className)}>
<div className={styles.videoContainer}>
<iframe
title='PeerTube Video'
allowFullScreen
src={meta?.href}
sandbox='allow-scripts'
/>
</div>
</div>
)
}
return null
})
export default Embed

Some files were not shown because too many files have changed in this diff Show More