Compare commits
39 Commits
master
...
489-accoun
Author | SHA1 | Date | |
---|---|---|---|
|
7b7a5ebeaa | ||
|
499ba408ea | ||
|
25d5bb53bb | ||
|
1845db2da3 | ||
|
3e6748f94b | ||
|
74304e2f75 | ||
|
09b468660b | ||
|
703d7e3cb5 | ||
|
175124bcc5 | ||
|
9aeeae99d1 | ||
|
d0ea0d3917 | ||
|
c1e7b93688 | ||
|
47dc05d285 | ||
|
0e04daebfb | ||
|
051ba7397a | ||
|
31dec0eee6 | ||
|
dbf1fbb6b8 | ||
|
31cfd22480 | ||
|
7f1275a2e6 | ||
|
c480fd450b | ||
|
c235ca3fe7 | ||
|
58a1ee929b | ||
|
64e176ce1d | ||
|
3bb24652b3 | ||
|
260c97624b | ||
|
111d5ea610 | ||
|
fca2ec3e15 | ||
|
9baf5063e1 | ||
|
0caa517cd5 | ||
|
aae0d3765a | ||
|
4c5e470caf | ||
|
369328da15 | ||
|
c610f20773 | ||
|
d0a47fd304 | ||
|
01fe205350 | ||
|
78e50be2f7 | ||
|
b8243f8a87 | ||
|
24168f556e | ||
|
470e0dfc7a |
20
.babelrc
Normal file
20
.babelrc
Normal file
@ -0,0 +1,20 @@
|
||||
{
|
||||
"presets": [
|
||||
"next/babel"
|
||||
],
|
||||
"plugins": [
|
||||
[
|
||||
"inline-react-svg",
|
||||
{
|
||||
"svgo": {
|
||||
"plugins": [
|
||||
{
|
||||
"name": "removeViewBox",
|
||||
"active": false
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
]
|
||||
]
|
||||
}
|
5
.ebextensions/chromium.config
Normal file
5
.ebextensions/chromium.config
Normal file
@ -0,0 +1,5 @@
|
||||
commands:
|
||||
00_install_epel:
|
||||
command: 'sudo amazon-linux-extras install epel'
|
||||
01_install_chromium:
|
||||
command: 'sudo yum install -y chromium'
|
@ -1,3 +0,0 @@
|
||||
commands:
|
||||
00_install_psql:
|
||||
command: 'sudo amazon-linux-extras install -y postgresql13'
|
186
.env.development
186
.env.development
@ -1,186 +0,0 @@
|
||||
PRISMA_SLOW_LOGS_MS=
|
||||
GRAPHQL_SLOW_LOGS_MS=
|
||||
NODE_ENV=development
|
||||
COMPOSE_PROFILES='minimal,images,search,payments,wallets,email,capture'
|
||||
|
||||
############################################################################
|
||||
# OPTIONAL SECRETS #
|
||||
# put these in .env.local, and don't commit them to git #
|
||||
############################################################################
|
||||
|
||||
# github
|
||||
GITHUB_ID=
|
||||
GITHUB_SECRET=
|
||||
|
||||
# twitter
|
||||
TWITTER_ID=
|
||||
TWITTER_SECRET=
|
||||
|
||||
# email list
|
||||
LIST_MONK_AUTH=
|
||||
|
||||
# VAPID for Web Push
|
||||
VAPID_MAILTO=
|
||||
NEXT_PUBLIC_VAPID_PUBKEY=
|
||||
VAPID_PRIVKEY=
|
||||
|
||||
# slack
|
||||
SLACK_BOT_TOKEN=
|
||||
SLACK_CHANNEL_ID=
|
||||
|
||||
# lnurl ... you'll need a tunnel to localhost:3000 for these
|
||||
LNAUTH_URL=http://localhost:3000/api/lnauth
|
||||
LNWITH_URL=http://localhost:3000/api/lnwith
|
||||
|
||||
########################################
|
||||
# SNDEV STUFF WE PRESET #
|
||||
# which you can override in .env.local #
|
||||
########################################
|
||||
|
||||
# email
|
||||
LOGIN_EMAIL_SERVER=smtp://mailhog:1025
|
||||
LOGIN_EMAIL_FROM=sndev@mailhog.dev
|
||||
|
||||
# email salt
|
||||
# openssl rand -hex 32
|
||||
EMAIL_SALT=202c90943c313b829e65e3f29164fb5dd7ea3370d7262c4159691c2f6493bb8b
|
||||
|
||||
# static things
|
||||
NEXTAUTH_URL=http://localhost:3000/api/auth
|
||||
SELF_URL=http://app:3000
|
||||
PUBLIC_URL=http://localhost:3000
|
||||
NEXT_PUBLIC_URL=http://localhost:3000
|
||||
LND_CONNECT_ADDRESS=03cc1d0932bb99b0697f5b5e5961b83ab7fd66f1efc4c9f5c7bad66c1bcbe78f02@xhlmkj7mfrl6ejnczfwl2vqik3xim6wzmurc2vlyfoqw2sasaocgpuad.onion:9735
|
||||
NEXTAUTH_SECRET=3_0W_PhDRZVanbeJsZZGIEljexkKoGbL6qGIqSwTjjI
|
||||
JWT_SIGNING_PRIVATE_KEY={"kty":"oct","kid":"FvD__hmeKoKHu2fKjUrWbRKfhjimIM4IKshyrJG4KSM","alg":"HS512","k":"3_0W_PhDRZVanbeJsZZGIEljexkKoGbL6qGIqSwTjjI"}
|
||||
INVOICE_HMAC_KEY=a4c1d9c81edb87b79d28809876a18cf72293eadb39f92f3f4f2f1cfbdf907c91
|
||||
|
||||
# lnd
|
||||
# xxd -p -c0 docker/lnd/sn/regtest/admin.macaroon
|
||||
LND_CERT=2d2d2d2d2d424547494e2043455254494649434154452d2d2d2d2d0a4d494943516a43434165696741774942416749516139493834682b48653350385a437541525854554d54414b42676771686b6a4f50515144416a41344d5238770a485159445651514b45785a73626d5167595856306232646c626d56795958526c5a43426a5a584a304d5255774577594456515144457778694e6a41785a5749780a4d474d354f444d774868634e4d6a51774d7a41334d5463774d6a45355768634e4d6a55774e5441794d5463774d6a4535576a41344d523877485159445651514b0a45785a73626d5167595856306232646c626d56795958526c5a43426a5a584a304d5255774577594456515144457778694e6a41785a5749784d474d354f444d770a5754415442676371686b6a4f5051494242676771686b6a4f50514d4242774e4341415365596a4b62542b4a4a4a37624b6770677a6d6c3278496130364e3174680a2f4f7033533173382b4f4a41387836647849682f326548556b4f7578675a36703549434b496f375a544c356a5963764375793941334b6e466f3448544d4948510a4d41344741315564447745422f775145417749437044415442674e56485355454444414b4267677242674546425163444154415042674e5648524d42416638450a425441444151482f4d4230474131556444675157424252545756796e653752786f747568717354727969466d6a36736c557a423542674e5648524545636a42770a676778694e6a41785a5749784d474d354f444f4343577876593246736147397a64494947633235666247356b6768526f62334e304c6d52765932746c636935700a626e526c636d356862494945645735706549494b64573570654842685932746c64494948596e566d59323975626f6345667741414159635141414141414141410a41414141414141414141414141596345724273414254414b42676771686b6a4f5051514441674e4941444246416945413873616c4a667134476671465557532f0a35347a335461746c6447736673796a4a383035425a5263334f326f434943794e6e3975716976566f5575365935345143624c3966394c575779547a516e61616e0a656977482f51696b0a2d2d2d2d2d454e442043455254494649434154452d2d2d2d2d0a
|
||||
LND_MACAROON=0201036c6e6402f801030a106cf4e146abffa5d766befbbf4c73b5a31201301a160a0761646472657373120472656164120577726974651a130a04696e666f120472656164120577726974651a170a08696e766f69636573120472656164120577726974651a210a086d616361726f6f6e120867656e6572617465120472656164120577726974651a160a076d657373616765120472656164120577726974651a170a086f6666636861696e120472656164120577726974651a160a076f6e636861696e120472656164120577726974651a140a057065657273120472656164120577726974651a180a067369676e6572120867656e6572617465120472656164000006202c3bfd55c191e925cbffd73712c9d4b9b4a8440410bde5f8a0a6e33af8b3d876
|
||||
LND_SOCKET=sn_lnd:10009
|
||||
|
||||
# nostr (NIP-57 zap receipts)
|
||||
# openssl rand -hex 32
|
||||
NOSTR_PRIVATE_KEY=5f30b7e7714360f51f2be2e30c1d93b7fdf67366e730658e85777dfcc4e4245f
|
||||
|
||||
# imgproxy
|
||||
NEXT_PUBLIC_IMGPROXY_URL=http://localhost:3001
|
||||
IMGPROXY_KEY=9c273e803fd5d444bf8883f8c3000de57bee7995222370cab7f2d218dd9a4bbff6ca11cbf902e61eeef4358616f231da51e183aee6841e3a797a5c9a9530ba67
|
||||
IMGPROXY_SALT=47b802be2c9250a66b998f411fc63912ab0bc1c6b47d99b8d37c61019d1312a984b98745eac83db9791b01bb8c93ecbc9b2ef9f2981d66061c7d0a4528ff6465
|
||||
|
||||
IMGPROXY_ENABLE_WEBP_DETECTION=1
|
||||
IMGPROXY_ENABLE_AVIF_DETECTION=1
|
||||
IMGPROXY_MAX_ANIMATION_FRAMES=2000
|
||||
IMGPROXY_MAX_SRC_RESOLUTION=50
|
||||
IMGPROXY_MAX_ANIMATION_FRAME_RESOLUTION=200
|
||||
IMGPROXY_READ_TIMEOUT=10
|
||||
IMGPROXY_WRITE_TIMEOUT=10
|
||||
IMGPROXY_DOWNLOAD_TIMEOUT=9
|
||||
IMGPROXY_ENABLE_VIDEO_THUMBNAILS=1
|
||||
# IMGPROXY_DEVELOPMENT_ERRORS_MODE=1
|
||||
# IMGPROXY_ENABLE_DEBUG_HEADERS=true
|
||||
|
||||
NEXT_PUBLIC_AWS_UPLOAD_BUCKET=uploads
|
||||
NEXT_PUBLIC_MEDIA_DOMAIN=localhost:4566
|
||||
NEXT_PUBLIC_MEDIA_URL=http://localhost:4566/uploads
|
||||
|
||||
# search
|
||||
OPENSEARCH_URL=http://opensearch:9200
|
||||
OPENSEARCH_USERNAME=admin
|
||||
OPENSEARCH_PASSWORD=mVchg1T5oA9wudUh
|
||||
OPENSEARCH_INDEX=item
|
||||
OPENSEARCH_MODEL_ID=
|
||||
|
||||
# prisma db url
|
||||
DATABASE_URL="postgresql://sn:password@db:5432/stackernews?schema=public"
|
||||
DB_APP_CONNECTION_LIMIT=2
|
||||
DB_WORKER_CONNECTION_LIMIT=2
|
||||
DB_TRANSACTION_TIMEOUT=5000
|
||||
|
||||
# polling intervals
|
||||
NEXT_PUBLIC_FAST_POLL_INTERVAL=1000
|
||||
NEXT_PUBLIC_NORMAL_POLL_INTERVAL=30000
|
||||
NEXT_PUBLIC_LONG_POLL_INTERVAL=60000
|
||||
NEXT_PUBLIC_EXTRA_LONG_POLL_INTERVAL=300000
|
||||
|
||||
# containers can't use localhost, so we need to use the container name
|
||||
IMGPROXY_URL_DOCKER=http://imgproxy:8080
|
||||
MEDIA_URL_DOCKER=http://s3:4566/uploads
|
||||
|
||||
# postgres container stuff
|
||||
POSTGRES_PASSWORD=password
|
||||
POSTGRES_USER=sn
|
||||
POSTGRES_DB=stackernews
|
||||
|
||||
# opensearch container stuff
|
||||
OPENSEARCH_INITIAL_ADMIN_PASSWORD=mVchg1T5oA9wudUh
|
||||
DISABLE_SECURITY_DASHBOARDS_PLUGIN=true
|
||||
|
||||
# bitcoind container stuff
|
||||
RPC_AUTH='7c68e5fcdba94a366bfdf629ecc676bb$0d0fc087c3bf7f068f350292bf8de1418df3dd8cb31e35682d5d3108d601002b'
|
||||
RPC_USER=bitcoin
|
||||
RPC_PASS=bitcoin
|
||||
RPC_PORT=18443
|
||||
P2P_PORT=18444
|
||||
ZMQ_BLOCK_PORT=28334
|
||||
ZMQ_TX_PORT=28335
|
||||
ZMQ_HASHBLOCK_PORT=29000
|
||||
|
||||
# sn_lnd container stuff
|
||||
SN_LND_REST_PORT=8080
|
||||
SN_LND_GRPC_PORT=10009
|
||||
SN_LND_P2P_PORT=9735
|
||||
# docker exec -u lnd sn_lnd lncli newaddress p2wkh --unused
|
||||
SN_LND_ADDR=bcrt1q7q06n5st4vqq3lssn0rtkrn2qqypghv9xg2xnl
|
||||
SN_LND_PUBKEY=02cb2e2d5a6c5b17fa67b1a883e2973c82e328fb9bd08b2b156a9e23820c87a490
|
||||
# sn_lndk stuff
|
||||
SN_LNDK_GRPC_PORT=10012
|
||||
|
||||
# lnd container stuff
|
||||
LND_REST_PORT=8081
|
||||
LND_GRPC_PORT=10010
|
||||
# docker exec -u lnd lnd lncli newaddress p2wkh --unused
|
||||
LND_ADDR=bcrt1qfqau4ug9e6rtrvxrgclg58e0r93wshucumm9vu
|
||||
LND_PUBKEY=028093ae52e011d45b3e67f2e0f2cb6c3a1d7f88d2920d408f3ac6db3a56dc4b35
|
||||
|
||||
# cln container stuff
|
||||
CLN_REST_PORT=9092
|
||||
# docker exec -u clightning cln lightning-cli newaddr bech32
|
||||
CLN_ADDR=bcrt1q02sqd74l4pxedy24fg0qtjz4y2jq7x4lxlgzrx
|
||||
CLN_PUBKEY=03ca7acec181dbf5e427c682c4261a46a0dd9ea5f35d97acb094e399f727835b90
|
||||
|
||||
# sndev cli eclair getnewaddress
|
||||
# sndev cli eclair getinfo
|
||||
ECLAIR_ADDR="bcrt1qdus2yml69wsax3unz8pts9h979lc3s4tw0tpf6"
|
||||
ECLAIR_PUBKEY="02268c74cc07837041131474881f97d497706b89a29f939555da6d094b65bd5af0"
|
||||
|
||||
# router lnd container stuff
|
||||
ROUTER_LND_REST_PORT=8082
|
||||
ROUTER_LND_GRPC_PORT=10011
|
||||
# docker exec -u lnd router_lnd lncli newaddress p2wkh --unused
|
||||
ROUTER_LND_ADDR=bcrt1qfkmwfpwgn6wt0dd36s79x04swz8vleyafsdpdr
|
||||
ROUTER_LND_PUBKEY=02750991fbf62e57631888bc469fae69c5e658bd1d245d8ab95ed883517caa33c3
|
||||
|
||||
LNCLI_NETWORK=regtest
|
||||
|
||||
# localstack container stuff
|
||||
AWS_ACCESS_KEY_ID=AKIAIOSFODNN7EXAMPLE
|
||||
AWS_SECRET_ACCESS_KEY=wJalrXUtnFEMI/K7MDENG/bPxRfiCYEXAMPLEKEY
|
||||
PERSISTENCE=1
|
||||
SKIP_SSL_CERT_DOWNLOAD=1
|
||||
|
||||
# tor proxy
|
||||
TOR_PROXY=http://tor:7050/
|
||||
grpc_proxy=http://tor:7050/
|
||||
|
||||
# lnbits
|
||||
LNBITS_WEB_PORT=5001
|
||||
|
||||
# CPU shares for each category
|
||||
CPU_SHARES_IMPORTANT=1024
|
||||
CPU_SHARES_MODERATE=512
|
||||
CPU_SHARES_LOW=256
|
||||
|
||||
NEXT_TELEMETRY_DISABLED=1
|
@ -1,25 +0,0 @@
|
||||
LIST_MONK_URL=https://mail.stacker.news
|
||||
LNAUTH_URL=https://stacker.news/api/lnauth
|
||||
LND_CONNECT_ADDRESS=03cc1d0932bb99b0697f5b5e5961b83ab7fd66f1efc4c9f5c7bad66c1bcbe78f02@52.5.194.83:9735
|
||||
LNWITH_URL=https://stacker.news/api/lnwith
|
||||
LOGIN_EMAIL_FROM=login@stacker.news
|
||||
NEXTAUTH_URL=https://stacker.news
|
||||
NEXTAUTH_URL_INTERNAL=http://127.0.0.1:8080/api/auth
|
||||
NEXT_PUBLIC_AWS_UPLOAD_BUCKET=snuploads
|
||||
NEXT_PUBLIC_IMGPROXY_URL=https://imgprxy.stacker.news/
|
||||
NEXT_PUBLIC_MEDIA_DOMAIN=m.stacker.news
|
||||
PUBLIC_URL=https://stacker.news
|
||||
SELF_URL=http://127.0.0.1:8080
|
||||
grpc_proxy=http://127.0.0.1:7050
|
||||
NEXT_PUBLIC_FAST_POLL_INTERVAL=1000
|
||||
NEXT_PUBLIC_NORMAL_POLL_INTERVAL=30000
|
||||
NEXT_PUBLIC_LONG_POLL_INTERVAL=60000
|
||||
NEXT_PUBLIC_EXTRA_LONG_POLL_INTERVAL=300000
|
||||
NEXT_PUBLIC_URL=https://stacker.news
|
||||
TOR_PROXY=http://127.0.0.1:7050/
|
||||
PRISMA_SLOW_LOGS_MS=50
|
||||
GRAPHQL_SLOW_LOGS_MS=50
|
||||
DB_APP_CONNECTION_LIMIT=4
|
||||
DB_WORKER_CONNECTION_LIMIT=2
|
||||
DB_TRANSACTION_TIMEOUT=10000
|
||||
NEXT_TELEMETRY_DISABLED=1
|
92
.env.sample
Normal file
92
.env.sample
Normal file
@ -0,0 +1,92 @@
|
||||
############################################################################
|
||||
# AUTH / OPTIONAL #
|
||||
# if you want to work on logged in features, you'll need some kind of auth #
|
||||
############################################################################
|
||||
|
||||
# github
|
||||
GITHUB_ID=<YOUR GITHUB ID>
|
||||
GITHUB_SECRET=<YOUR GITHUB SECRET>
|
||||
|
||||
# twitter
|
||||
TWITTER_ID=<YOUR TWITTER ID>
|
||||
TWITTER_SECRET=<YOUR TWITTER SECRET>
|
||||
|
||||
# email
|
||||
LOGIN_EMAIL_SERVER=smtp://<YOUR EMAIL>:<YOUR PASSWORD>@<YOUR SMTP DOMAIN>:587
|
||||
LOGIN_EMAIL_FROM=<YOUR FROM ALIAS>
|
||||
LIST_MONK_AUTH=
|
||||
|
||||
#####################################################################
|
||||
# OTHER / OPTIONAL #
|
||||
# configuration for push notifications, slack and imgproxy are here #
|
||||
#####################################################################
|
||||
|
||||
# VAPID for Web Push
|
||||
VAPID_MAILTO=
|
||||
NEXT_PUBLIC_VAPID_PUBKEY=
|
||||
VAPID_PRIVKEY=
|
||||
|
||||
# slack
|
||||
SLACK_BOT_TOKEN=
|
||||
SLACK_CHANNEL_ID=
|
||||
|
||||
# imgproxy
|
||||
NEXT_PUBLIC_IMGPROXY_URL=
|
||||
IMGPROXY_KEY=
|
||||
IMGPROXY_SALT=
|
||||
|
||||
# search
|
||||
OPENSEARCH_URL=http://opensearch:9200
|
||||
OPENSEARCH_USERNAME=
|
||||
OPENSEARCH_PASSWORD=
|
||||
|
||||
#######################################################
|
||||
# WALLET / OPTIONAL #
|
||||
# if you want to work with payments you'll need these #
|
||||
#######################################################
|
||||
|
||||
# lnd
|
||||
LND_CERT=<YOUR LND HEX CERT>
|
||||
LND_MACAROON=<YOUR LND HEX MACAROON>
|
||||
LND_SOCKET=<YOUR LND GRPC HOST>:<YOUR LND GRPC PORT>
|
||||
|
||||
# lnurl
|
||||
LNAUTH_URL=<PUBLIC URL TO /api/lnauth>
|
||||
LNWITH_URL=<PUBLIC URL TO /api/lnwith>
|
||||
|
||||
# nostr (NIP-57 zap receipts)
|
||||
NOSTR_PRIVATE_KEY=<YOUR NOSTR PRIVATE KEY IN HEX>
|
||||
|
||||
###############
|
||||
# LEAVE AS IS #
|
||||
###############
|
||||
|
||||
# static things
|
||||
NEXTAUTH_URL=http://localhost:3000/api/auth
|
||||
SELF_URL=http://app:3000
|
||||
PUBLIC_URL=http://localhost:3000
|
||||
LND_CONNECT_ADDRESS=03cc1d0932bb99b0697f5b5e5961b83ab7fd66f1efc4c9f5c7bad66c1bcbe78f02@xhlmkj7mfrl6ejnczfwl2vqik3xim6wzmurc2vlyfoqw2sasaocgpuad.onion:9735
|
||||
NEXTAUTH_SECRET=3_0W_PhDRZVanbeJsZZGIEljexkKoGbL6qGIqSwTjjI
|
||||
JWT_SIGNING_PRIVATE_KEY={"kty":"oct","kid":"FvD__hmeKoKHu2fKjUrWbRKfhjimIM4IKshyrJG4KSM","alg":"HS512","k":"3_0W_PhDRZVanbeJsZZGIEljexkKoGbL6qGIqSwTjjI"}
|
||||
INVOICE_HMAC_KEY=a4c1d9c81edb87b79d28809876a18cf72293eadb39f92f3f4f2f1cfbdf907c91
|
||||
|
||||
# imgproxy options
|
||||
IMGPROXY_ENABLE_WEBP_DETECTION=1
|
||||
IMGPROXY_ENABLE_AVIF_DETECTION=1
|
||||
IMGPROXY_MAX_ANIMATION_FRAMES=2000
|
||||
IMGPROXY_MAX_SRC_RESOLUTION=50
|
||||
IMGPROXY_MAX_ANIMATION_FRAME_RESOLUTION=200
|
||||
IMGPROXY_READ_TIMEOUT=10
|
||||
IMGPROXY_WRITE_TIMEOUT=10
|
||||
IMGPROXY_DOWNLOAD_TIMEOUT=9
|
||||
# IMGPROXY_DEVELOPMENT_ERRORS_MODE=1
|
||||
# IMGPROXY_ENABLE_DEBUG_HEADERS=true
|
||||
|
||||
# prisma db url
|
||||
DATABASE_URL="postgresql://sn:password@db:5432/stackernews?schema=public"
|
||||
|
||||
# postgres container stuff
|
||||
POSTGRES_PASSWORD=password
|
||||
POSTGRES_USER=sn
|
||||
POSTGRES_DB=stackernews
|
||||
|
1
.gitattributes
vendored
1
.gitattributes
vendored
@ -1 +0,0 @@
|
||||
docker/db/seed.sql linguist-vendored
|
35
.github/ISSUE_TEMPLATE/bug_report.md
vendored
Normal file
35
.github/ISSUE_TEMPLATE/bug_report.md
vendored
Normal file
@ -0,0 +1,35 @@
|
||||
---
|
||||
name: Bug report
|
||||
about: Report a problem
|
||||
title: ''
|
||||
labels: bug
|
||||
assignees: ''
|
||||
|
||||
---
|
||||
|
||||
*Note: this template is meant to help you report the bug so that we can fix it faster, ie not all of these sections are required*
|
||||
|
||||
**Description**
|
||||
A clear and concise description of what the bug is.
|
||||
|
||||
**Steps to Reproduce**
|
||||
A clear and concise way we might be able to reproduce the bug.
|
||||
|
||||
**Expected behavior**
|
||||
A clear and concise description of what you expected to happen.
|
||||
|
||||
**Screenshots**
|
||||
If applicable, add screenshots to help explain your problem.
|
||||
|
||||
**Logs**
|
||||
If applicable, add your browsers console logs.
|
||||
|
||||
**Environment:**
|
||||
If you only experience the issue on certain devices or browsers, provide that info.
|
||||
- Device: [e.g. iPhone6]
|
||||
- OS: [e.g. iOS]
|
||||
- Browser [e.g. chrome, safari]
|
||||
- Version [e.g. 22]
|
||||
|
||||
**Additional context**
|
||||
Add any other context about the problem here.
|
65
.github/ISSUE_TEMPLATE/bug_report.yml
vendored
65
.github/ISSUE_TEMPLATE/bug_report.yml
vendored
@ -1,65 +0,0 @@
|
||||
name: 🐞 Bug report
|
||||
description: Create a bug report to help us fix it
|
||||
title: "bug report"
|
||||
labels: [bug]
|
||||
body:
|
||||
- type: checkboxes
|
||||
attributes:
|
||||
label: Is there an existing issue for this?
|
||||
description: Please search to see if an issue already exists for the bug you encountered.
|
||||
options:
|
||||
- label: I have searched the existing issues
|
||||
required: true
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Describe the bug
|
||||
description: A clear and concise description of what the bug is. Include images if relevant.
|
||||
placeholder: I accidentally deleted the internet. Here's my story ...
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Screenshots
|
||||
description: |
|
||||
Add screenshots to help explain your problem. You can also add a video here.
|
||||
|
||||
Tip: You can attach images or video files by clicking this area to highlight it and then dragging files in.
|
||||
validations:
|
||||
required: false
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Steps To Reproduce
|
||||
description: Steps to reproduce the bug.
|
||||
placeholder: |
|
||||
1. Go to '...'
|
||||
2. Click on '...'
|
||||
3. Scroll to '...'
|
||||
4. See error
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Expected behavior
|
||||
description: A clear and concise description of what you expected to happen
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Logs
|
||||
description: If applicable, add your browser's console logs here
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Device information
|
||||
placeholder: |
|
||||
- OS: [e.g. Windows]
|
||||
- Browser: [e.g. chrome, safari, firefox]
|
||||
- Browser Version: [e.g. 22]
|
||||
validations:
|
||||
required: false
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Additional context
|
||||
description: |
|
||||
Do you have links to discussions about this on SN or other references?
|
||||
validations:
|
||||
required: false
|
5
.github/ISSUE_TEMPLATE/config.yml
vendored
5
.github/ISSUE_TEMPLATE/config.yml
vendored
@ -1,5 +0,0 @@
|
||||
blank_issues_enabled: true
|
||||
contact_links:
|
||||
- name: Questions
|
||||
url: https://stacker.news/~meta
|
||||
about: If you simply have a question, you can ask it in ~meta or the saloon.
|
20
.github/ISSUE_TEMPLATE/feature_request.md
vendored
Normal file
20
.github/ISSUE_TEMPLATE/feature_request.md
vendored
Normal file
@ -0,0 +1,20 @@
|
||||
---
|
||||
name: Feature request
|
||||
about: Suggest a feature
|
||||
title: ''
|
||||
labels: feature
|
||||
assignees: ''
|
||||
|
||||
---
|
||||
|
||||
**Is your feature request related to a problem? Please describe.**
|
||||
A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
|
||||
|
||||
**Describe the solution you'd like**
|
||||
A clear and concise description of what you want to happen.
|
||||
|
||||
**Describe alternatives you've considered**
|
||||
A clear and concise description of any alternative solutions or features you've considered.
|
||||
|
||||
**Additional context**
|
||||
Add any other context or screenshots about the feature request here.
|
32
.github/ISSUE_TEMPLATE/feature_request.yml
vendored
32
.github/ISSUE_TEMPLATE/feature_request.yml
vendored
@ -1,32 +0,0 @@
|
||||
name: ✨ Feature request
|
||||
description: Request a feature you'd like to see in SN!
|
||||
title: "feature request"
|
||||
labels: [feature]
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
We're always looking for suggestions on how we could improve SN!
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Describe the problem you're trying to solve
|
||||
description: |
|
||||
Is your feature request related to a problem? Add a clear and concise description of what the problem is.
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Describe the solution you'd like
|
||||
description: A clear and concise description of what you want to happen.
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Describe alternatives you've considered
|
||||
description: |
|
||||
A clear and concise description of any alternative solutions or features you have considered.
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Additional context
|
||||
description: |
|
||||
Add any other additional context or screenshots about the feature request here.
|
22
.github/pull_request_template.md
vendored
22
.github/pull_request_template.md
vendored
@ -1,22 +0,0 @@
|
||||
## Description
|
||||
|
||||
_A clear and concise description of what you changed and why._
|
||||
|
||||
## Screenshots
|
||||
|
||||
## Additional Context
|
||||
|
||||
_Was anything unclear during your work on this PR? Anything we should definitely take a closer look at?_
|
||||
|
||||
## Checklist
|
||||
|
||||
**Are your changes backwards compatible? Please answer below:**
|
||||
|
||||
|
||||
**On a scale of 1-10 how well and how have you QA'd this change and any features it might affect? Please answer below:**
|
||||
|
||||
|
||||
**For frontend changes: Tested on mobile, light and dark mode? Please answer below:**
|
||||
|
||||
|
||||
**Did you introduce any new environment variables? If so, call them out explicitly here:**
|
35
.github/workflows/extend-awards.yml
vendored
35
.github/workflows/extend-awards.yml
vendored
@ -1,35 +0,0 @@
|
||||
name: extend-awards
|
||||
on:
|
||||
pull_request_target:
|
||||
types: [ closed ]
|
||||
branches:
|
||||
- master
|
||||
permissions:
|
||||
pull-requests: write
|
||||
contents: write
|
||||
issues: read
|
||||
jobs:
|
||||
if_merged:
|
||||
if: |
|
||||
github.event_name == 'pull_request_target' &&
|
||||
github.event.action == 'closed' &&
|
||||
github.event.pull_request.merged == true &&
|
||||
github.event.pull_request.head.ref != 'extend-awards/patch'
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: '3.13'
|
||||
- run: pip install requests
|
||||
- run: python extend-awards.py
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
GITHUB_CONTEXT: ${{ toJson(github) }}
|
||||
- uses: peter-evans/create-pull-request@v7
|
||||
with:
|
||||
add-paths: awards.csv
|
||||
branch: extend-awards/patch
|
||||
commit-message: Extending awards.csv
|
||||
title: Extending awards.csv
|
||||
body: A PR was merged that solves an issue and awards.csv should be extended.
|
6
.github/workflows/lint.yml
vendored
6
.github/workflows/lint.yml
vendored
@ -1,8 +1,8 @@
|
||||
name: Lint Check
|
||||
name: Eslint Check
|
||||
on: [pull_request]
|
||||
|
||||
jobs:
|
||||
lint-run:
|
||||
eslint-run:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
@ -11,7 +11,7 @@ jobs:
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: "18.20.4"
|
||||
node-version: "18.17.0"
|
||||
|
||||
- name: Install
|
||||
run: npm install
|
||||
|
17
.github/workflows/shell-check.yml
vendored
17
.github/workflows/shell-check.yml
vendored
@ -1,17 +0,0 @@
|
||||
name: ShellCheck
|
||||
|
||||
on: [pull_request]
|
||||
|
||||
jobs:
|
||||
shellcheck:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Shellcheck
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Run ShellCheck
|
||||
uses: ludeeus/action-shellcheck@master
|
||||
with:
|
||||
severity: error
|
||||
scandir: ./sndev
|
20
.github/workflows/test.yml
vendored
20
.github/workflows/test.yml
vendored
@ -1,20 +0,0 @@
|
||||
name: Tests
|
||||
on: [pull_request]
|
||||
|
||||
jobs:
|
||||
unit:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: "18.20.4"
|
||||
|
||||
- name: Install
|
||||
run: npm install
|
||||
|
||||
- name: Test
|
||||
run: npm test
|
35
.gitignore
vendored
35
.gitignore
vendored
@ -1,7 +1,7 @@
|
||||
# See https://help.github.com/articles/ignoring-files/ for more about ignoring files.
|
||||
|
||||
# dependencies
|
||||
node_modules/
|
||||
/node_modules
|
||||
/.pnp
|
||||
.pnp.js
|
||||
.cache
|
||||
@ -27,12 +27,12 @@ yarn-debug.log*
|
||||
yarn-error.log*
|
||||
|
||||
# local env files
|
||||
.env*
|
||||
!.env.development
|
||||
!.env.production
|
||||
|
||||
# local settings
|
||||
.vscode/settings.json
|
||||
.env
|
||||
envbak
|
||||
.env.local
|
||||
.env.development.local
|
||||
.env.test.local
|
||||
.env.production.local
|
||||
|
||||
# vercel
|
||||
.vercel
|
||||
@ -42,27 +42,12 @@ yarn-error.log*
|
||||
!.elasticbeanstalk/*.cfg.yml
|
||||
!.elasticbeanstalk/*.global.yml
|
||||
|
||||
# copilot
|
||||
copilot/
|
||||
|
||||
# service worker
|
||||
public/sw.js*
|
||||
sw/precache-manifest.json
|
||||
public/workbox-*.js*
|
||||
public/*-development.js
|
||||
|
||||
.cache_ggshield
|
||||
docker-compose.*.yml
|
||||
*.sql
|
||||
!/prisma/migrations/*/*.sql
|
||||
!/docker/db/seed.sql
|
||||
|
||||
# nostr wallet connect
|
||||
scripts/nwc-keys.json
|
||||
|
||||
# lnbits
|
||||
docker/lnbits/data
|
||||
|
||||
# lndk
|
||||
!docker/lndk/tls-*.pem
|
||||
|
||||
# nostr link extract
|
||||
scripts/nostr-link-extract.config.json
|
||||
scripts/nostr-links.db
|
9
.puppeteerrc.cjs
Normal file
9
.puppeteerrc.cjs
Normal file
@ -0,0 +1,9 @@
|
||||
const {join} = require('path');
|
||||
|
||||
/**
|
||||
* @type {import("puppeteer").Configuration}
|
||||
*/
|
||||
module.exports = {
|
||||
// Changes the cache location for Puppeteer.
|
||||
cacheDirectory: join(__dirname, '.cache', 'puppeteer'),
|
||||
};
|
12
Dockerfile
12
Dockerfile
@ -1,19 +1,11 @@
|
||||
# syntax=docker/dockerfile:1
|
||||
|
||||
FROM node:18.20.4-bullseye
|
||||
FROM node:18.17.0-bullseye
|
||||
|
||||
ENV NODE_ENV=development
|
||||
|
||||
ARG UID
|
||||
ARG GID
|
||||
RUN groupadd -fg "$GID" apprunner
|
||||
RUN useradd -om -u "$UID" -g "$GID" apprunner
|
||||
USER apprunner
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
EXPOSE 3000
|
||||
|
||||
COPY package.json package-lock.json ./
|
||||
RUN npm ci --legacy-peer-deps --loglevel verbose
|
||||
CMD ["sh","-c","npm install --loglevel verbose --legacy-peer-deps && npx prisma migrate dev && npm run dev"]
|
||||
CMD npm install --loglevel verbose --legacy-peer-deps; npx prisma migrate dev; npm run dev
|
486
README.md
486
README.md
@ -1,476 +1,46 @@
|
||||
<p align="center">
|
||||
<a href="https://stacker.news">
|
||||
<img height="50" alt="Internet Communities with Bitcoin Economies" src="https://github.com/stackernews/stacker.news/assets/34140557/a8ccc5dc-c453-46dc-be74-60dd0a42ce09">
|
||||
</a>
|
||||
</p>
|
||||
# contributing
|
||||
We pay sats for PRs. Sats will be proportional to the impact of the PR. If there's something you'd like to work on, suggest how much you'd do it for on the issue. If there's something you'd like to work on that isn't already an issue, whether its a bug fix or a new feature, create one.
|
||||
|
||||
We aim to have a quick PR turnaround time, but we are sometimes slower than we'd like. In most cases, if your PR is unambiguously good, it shouldn't take us more than 1 week.
|
||||
|
||||
- Stacker News is trying to fix online communities with economics
|
||||
- What You See is What We Ship (look ma, I invented an initialism)
|
||||
- 100% FOSS
|
||||
- We pay bitcoin for PRs, issues, documentation, code reviews and more
|
||||
- Next.js, postgres, graphql, and lnd
|
||||
If you have a question about contributing start a [discussion](https://github.com/stackernews/stacker.news/discussions).
|
||||
|
||||
<br>
|
||||
# responsible disclosure
|
||||
|
||||
# Getting started
|
||||
If you found a vulnerability, we would greatly appreciate it if you contact us via [kk@stacker.news](mailto:kk@stacker.news) or t.me/k00bideh.
|
||||
|
||||
Launch a fully featured SN development environment in a single command.
|
||||
# stacker.news
|
||||
[Stacker News](https://stacker.news) is like Hacker News but we pay you Bitcoin. We use Bitcoin and the Lightning Network to provide Sybil resistance and any karma earned is withdrawable as Bitcoin.
|
||||
|
||||
```sh
|
||||
$ ./sndev start
|
||||
```
|
||||
# wen decentralization
|
||||
We're experimenting with providing an SN-like service on nostr in [Outer Space](https://outer.space). It's our overarching goal to align SN with Bitcoin's ethos yet still make a product the average bitcoiner loves to use.
|
||||
|
||||
Go to [localhost:3000](http://localhost:3000).
|
||||
# local development
|
||||
1. [Install docker-compose](https://docs.docker.com/compose/install/) and deps if you don't already have it installed
|
||||
2. `git clone git@github.com:stackernews/stacker.news.git sn && cd sn`
|
||||
3. `docker-compose up --build`
|
||||
|
||||
<br>
|
||||
You should then be able to access the site at `localhost:3000` and any changes you make will hot reload. If you want to login locally or use lnd you'll need to modify `.env.sample` appropriately. More details [here](./docs/local-auth.md) and [here](./docs/local-lnd.md). If you have trouble please open an issue so I can help and update the README for everyone else.
|
||||
|
||||
## Installation
|
||||
# web push
|
||||
|
||||
- Clone the repo
|
||||
- ssh: `git clone git@github.com:stackernews/stacker.news.git`
|
||||
- https: `git clone https://github.com/stackernews/stacker.news.git`
|
||||
- Install [docker](https://docs.docker.com/compose/install/)
|
||||
- If you're running MacOS or Windows, I ***highly recommend*** using [OrbStack](https://orbstack.dev/) instead of Docker Desktop
|
||||
- Please make sure that at least 10 GB of free space is available, otherwise you may encounter issues while setting up the development environment.
|
||||
To enable Web Push locally, you will need to set the `VAPID_*` env vars. `VAPID_MAILTO` needs to be an email address using the `mailto:` scheme. For `NEXT_PUBLIC_VAPID_KEY` and `VAPID_PRIVKEY`, you can run `npx web-push generate-vapid-keys`.
|
||||
|
||||
<br>
|
||||
# imgproxy
|
||||
|
||||
## Usage
|
||||
To configure the image proxy, you will need to set the `IMGPROXY_` env vars. `NEXT_PUBLIC_IMGPROXY_URL` needs to point to the image proxy service. `IMGPROXY_KEY` and `IMGPROXY_SALT` can be set using `openssl rand -hex 64`.
|
||||
|
||||
Start the development environment
|
||||
# stack
|
||||
The site is written in javascript using Next.js, a React framework. The backend API is provided via GraphQL. The database is PostgreSQL modeled with Prisma. The job queue is also maintained in PostgreSQL. We use lnd for our lightning node. A customized Bootstrap theme is used for styling.
|
||||
|
||||
```sh
|
||||
$ ./sndev start
|
||||
```
|
||||
# processes
|
||||
There are two. 1. the web app and 2. the worker, which dequeues jobs sent to it by the web app, e.g. polling lnd for invoice/payment status
|
||||
|
||||
View all available commands
|
||||
# wallet transaction safety
|
||||
To ensure stackers balances are kept sane, all wallet updates are run in serializable transactions at the database level. Because prisma has relatively poor support for transactions all wallet touching code is written in plpgsql stored procedures and can be found in the prisma/migrations folder.
|
||||
|
||||
```sh
|
||||
$ ./sndev help
|
||||
# code
|
||||
The code is linted with standardjs.
|
||||
|
||||
888
|
||||
888
|
||||
888
|
||||
.d8888b 88888b. .d88888 .d88b. 888 888
|
||||
88K 888 '88b d88' 888 d8P Y8b 888 888
|
||||
'Y8888b. 888 888 888 888 88888888 Y88 88P
|
||||
X88 888 888 Y88b 888 Y8b. Y8bd8P
|
||||
88888P' 888 888 'Y88888 'Y8888 Y88P
|
||||
|
||||
manages a docker based stacker news development environment
|
||||
|
||||
USAGE
|
||||
$ sndev [COMMAND]
|
||||
$ sndev help [COMMAND]
|
||||
|
||||
COMMANDS
|
||||
help show help
|
||||
|
||||
env:
|
||||
start start env
|
||||
stop stop env
|
||||
restart restart env
|
||||
status status of env
|
||||
logs logs from env
|
||||
delete delete env
|
||||
|
||||
sn:
|
||||
login login as a nym
|
||||
set_balance set the balance of a nym
|
||||
|
||||
lightning:
|
||||
fund pay a bolt11 for funding
|
||||
withdraw create a bolt11 for withdrawal
|
||||
|
||||
db:
|
||||
psql open psql on db
|
||||
prisma run prisma commands
|
||||
|
||||
dev:
|
||||
pr fetch and checkout a pr
|
||||
lint run linters
|
||||
test run tests
|
||||
|
||||
other:
|
||||
cli service cli passthrough
|
||||
open open service GUI in browser
|
||||
onion service onion address
|
||||
cert service tls cert
|
||||
compose docker compose passthrough
|
||||
```
|
||||
|
||||
### Modifying services
|
||||
|
||||
#### Running specific services
|
||||
|
||||
By default all services will be run. If you want to exclude specific services from running, set `COMPOSE_PROFILES` in a `.env.local` file to one or more of `minimal,images,search,payments,wallets,email,capture`. To only run mininal necessary without things like payments in `.env.local`:
|
||||
|
||||
```.env
|
||||
COMPOSE_PROFILES=minimal
|
||||
```
|
||||
|
||||
To run with images and payments services:
|
||||
|
||||
```.env
|
||||
COMPOSE_PROFILES=images,payments
|
||||
```
|
||||
|
||||
#### Merging compose files
|
||||
|
||||
By default `sndev start` will merge `docker-compose.yml` with `docker-compose.override.yml`. Specify any overrides you want to merge with `docker-compose.override.yml`.
|
||||
|
||||
For example, if you want to replace the db seed with a custom seed file located in `docker/db/another.sql`, you'd create a `docker-compose.override.yml` file with the following:
|
||||
|
||||
```yml
|
||||
services:
|
||||
db:
|
||||
volumes:
|
||||
- ./docker/db/another.sql:/docker-entrypoint-initdb.d/seed.sql
|
||||
```
|
||||
|
||||
You can read more about [docker compose override files](https://docs.docker.com/compose/multiple-compose-files/merge/).
|
||||
|
||||
|
||||
|
||||
<br>
|
||||
|
||||
# Table of Contents
|
||||
- [Getting started](#getting-started)
|
||||
- [Installation](#installation)
|
||||
- [Usage](#usage)
|
||||
- [Modifying services](#modifying-services)
|
||||
- [Running specific services](#running-specific-services)
|
||||
- [Merging compose files](#merging-compose-files)
|
||||
- [Contributing](#contributing)
|
||||
- [We pay bitcoin for contributions](#we-pay-bitcoin-for-contributions)
|
||||
- [Pull request awards](#pull-request-awards)
|
||||
- [Code review awards](#code-review-awards)
|
||||
- [Issue specification awards](#issue-specification-awards)
|
||||
- [Responsible disclosure of security or privacy vulnerability awards](#responsible-disclosure-of-security-or-privacy-vulnerability-awards)
|
||||
- [Development documentation awards](#development-documentation-awards)
|
||||
- [Helpfulness awards](#helpfulness-awards)
|
||||
- [Contribution extras](#contribution-extras)
|
||||
- [Dev chat](#dev-chat)
|
||||
- [Triage permissions](#triage-permissions)
|
||||
- [Contributor badges on SN profiles](#contributor-badges-on-sn-profiles)
|
||||
- [What else you got](#what-else-you-got)
|
||||
- [Development Tips](#development-tips)
|
||||
- [Linting](#linting)
|
||||
- [Database migrations](#database-migrations)
|
||||
- [Connecting to the local database](#connecting-to-the-local-database)
|
||||
- [Running lncli on the local lnd nodes](#running-lncli-on-the-local-lnd-nodes)
|
||||
- [Testing local auth](#testing-local-auth)
|
||||
- [Login with Email](#login-with-email)
|
||||
- [Login with Github](#login-with-github)
|
||||
- [Login with Lightning](#login-with-lightning)
|
||||
- [Enabling web push notifications](#enabling-web-push-notifications)
|
||||
- [Internals](#internals)
|
||||
- [Stack](#stack)
|
||||
- [Services](#services)
|
||||
- [Wallet transaction safety](#wallet-transaction-safety)
|
||||
- [Need help?](#need-help)
|
||||
- [Responsible Disclosure](#responsible-disclosure)
|
||||
- [License](#license)
|
||||
|
||||
<br>
|
||||
|
||||
# Contributing
|
||||
We want your help.
|
||||
|
||||
<br>
|
||||
|
||||
## We pay bitcoin for contributions
|
||||
- pull requests closing existing issues
|
||||
- code review
|
||||
- issue specification whether for bugs, features, or enhancements
|
||||
- discovery of security vulnerabilities
|
||||
- discovery of privacy vulnerabilities
|
||||
- improvements to development documentation
|
||||
- helpfulness
|
||||
|
||||
[View a current list of granted awards](awards.csv)
|
||||
|
||||
<br>
|
||||
|
||||
## Just in case
|
||||
*This document in no way legally entitles you to payments for contributions, entitles you to being a contributor, or entitles you to the attention of other contributors. This document lays out the system we **can** use to determine contribution awards which we generally intend to abide by but importantly we reserve the right to refuse payments or contributions, modify rules and award amounts, make exceptions to rules or reward amounts, and withhold awards for any reason at anytime, even just for the heck of it, at our sole discretion. If you need more certainty than what I've just described, don't participate. We provide awards as an experiment to make FOSS less sucky.*
|
||||
|
||||
<br>
|
||||
|
||||
## Pull request awards
|
||||
|
||||
### Rules
|
||||
1. PRs closing an issue will be awarded according to the `difficulty` tag on an issue, e.g. `difficulty:easy` pays 100k sats.
|
||||
2. Issues are occasionally marked with a `priority` tag which multiplies the award of a PR closing an issue, e.g. an issue marked with `priority:high` and `difficulty:hard` awards 2m sats.
|
||||
3. An award is reduced by 10% of the award amount for each substantial change requested to the PR on code review, e.g. if two changes are requested on a PR closing an issue tagged with `difficulty:hard`, 800k sats will be awarded.
|
||||
- Reductions are applied before `priority` multipliers, e.g. a PR closing a `priority:high` and `difficulty:hard` issue that's approved after two changes are requested awards 1.6m sats.
|
||||
- You are responsible for understanding the issue and requirements **before requesting review on a PR**.
|
||||
- There is no award reduction for asking specific questions on the issue itself or on the PR **before requesting review**
|
||||
- There is no award reduction for asking more general questions in a discussion
|
||||
4. A PR must be merged by an SN engineer before a PR receives an award
|
||||
|
||||
_Due to Rule 3, make sure that you mark your PR as a draft when you create it and it's not ready for review yet._
|
||||
|
||||
### Difficulty award amounts
|
||||
|
||||
| tag | description | award |
|
||||
| ----------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ----------- |
|
||||
| `difficulty:good-first-issue` | at most a couple lines of code in a couple files and does not require much familiarity with the codebase | `20k sats` |
|
||||
| `difficulty:easy` | at most a couple lines of code in a couple files but does require familiarity with the code base | `100k sats` |
|
||||
| `difficulty:medium` | more code, more places and could require adding columns in the db and some modification chunky db queries | `250k sats` |
|
||||
| `difficulty:medium-hard` | even more code, even more places and requires either more sophisticated logic, more significant db modeling eg adding a table, and/or a deeper study of a something | `500k sats` |
|
||||
| `difficulty:hard` | either a bigger lift than the what's required of medium-hard or very tricky in a particular way that might not require a lot of code but does require a lot of context/troubleshooting/expertise | `1m sats` |
|
||||
|
||||
### Priority multipliers
|
||||
|
||||
| tag | multiplier |
|
||||
| ----------------- | ---------- |
|
||||
| `priority:low` | 0.5 |
|
||||
| `priority:medium` | 1.5 |
|
||||
| `priority:high` | 2 |
|
||||
| `priority:urgent` | 3 |
|
||||
|
||||
### Requesting modifications to reward amounts
|
||||
We try to assign difficulty and priority tags to issues accurately, but we're not perfect. If you believe an issue is mis-tagged, you can request a change to the issue's tags.
|
||||
|
||||
<br>
|
||||
|
||||
## Code review awards
|
||||
|
||||
Code reviewers will be awarded the amount their code review reduced from the PR author's reward, e.g. two substantial problems/areas of improvement identified in a PR closing a `priority:high` and `difficulty:hard` issue awards 400k sats.
|
||||
|
||||
### Rules
|
||||
1. The problem or improvement must be acknowledged as such by SN engineers explicitly
|
||||
2. A PR must be merged by an SN engineer before a PR's code reviewers receive an award
|
||||
|
||||
Code review approvals are more than welcome, but we can't guarantee awards for them because the work performed to approve a PR is unverifiable.
|
||||
|
||||
<br>
|
||||
|
||||
## Issue specification awards
|
||||
|
||||
Issue specifiers will be awarded up to 10% of a PR award for issues resulting in a PR being merged by an SN engineer that closes the issue. In addition to being subject to PR award amounts and reductions, specification amounts are awarded on the basis of how much additional help and specification is required by other contributors.
|
||||
|
||||
### Rules
|
||||
1. The issue must directly result in PR being merged by an SN engineer that closes the issue
|
||||
2. Issue specification award amounts are based on the final PR award amounts
|
||||
- that is, they are subject to PR award code review reductions and priority multipliers
|
||||
3. Award amounts will be reduced on the basis of how much additional help and specification is required by other contributors
|
||||
4. Issue specifiers who can close their own issues with their own PRs are also eligible for this 10%
|
||||
- e.g an issue tagged as `difficulty:hard` that is both specified and closed by a PR from the same contributor without changes requested awards 1.1m sats
|
||||
|
||||
### Relative awards
|
||||
|
||||
| circumstances | award |
|
||||
| ---------------------------------------------------------------------------------------------------------- | ----- |
|
||||
| issue doesn't require further help and/or specification from other contributors | 10% |
|
||||
| issue requires little help and/or specification from other contributors | 5% |
|
||||
| issue requires more help and/or specification from other contributors than the issue specifier contributed | 1% |
|
||||
| issue is vague and/or incomplete and must mostly be entirely specified by someone else | 0% |
|
||||
|
||||
For example: a specified issue that's tagged as `difficulty:hard`, doesn't require additional specification and disambiguation by other contributors, and results in PR being merged without changes requested awards the issue specifier 100k sats.
|
||||
|
||||
<br>
|
||||
|
||||
## Responsible disclosure of security or privacy vulnerability awards
|
||||
|
||||
Awards for responsible disclosures are assessed on the basis of:
|
||||
|
||||
1. the potential loss resulting from an exploit of the vulnerability
|
||||
2. the trivialness of exploiting the vulnerability
|
||||
3. the disclosure's detail
|
||||
|
||||
Award amounts will be easiest to assess on a case by case basis. Upon confirmation of a vulnerability, we agree to award responsible disclosures at minimum 100k sats and as high as the total potential loss that would result from exploiting the vulnerability.
|
||||
|
||||
### Rules
|
||||
1. Disclosure is responsible and does not increase the likelihood of an exploit.
|
||||
2. Disclosure includes steps to reproduce.
|
||||
3. Disclosure includes a realistic attack scenario with prerequisites for an attack and expected gains after the exploitation. Disclosures without such scenario, with unrealistic assumptions or without meaningful outcomes will not be eligible for awards.
|
||||
4. You must be the first person to responsibly disclose the issue to be eligible for awards.
|
||||
|
||||
<br>
|
||||
|
||||
## Development documentation awards
|
||||
|
||||
For significant changes to documentation, create an issue before making said changes. In such cases we will award documentation improvements in accordance with issue specification and PR awards.
|
||||
|
||||
For changes on the order of something like a typo, we'll award a nominal amount at our discretion.
|
||||
|
||||
<br>
|
||||
|
||||
## Helpfulness awards
|
||||
|
||||
Like issue specification awards, helping fellow contributors substantially in a well documented manner such that the helped fellow contributes a merged PR is eligible for a one-time relative reward.
|
||||
|
||||
| circumstances | award |
|
||||
| -------------------------------------------------------------------------------------- | ----- |
|
||||
| substantial and singular source of help | 10% |
|
||||
| substantial but nonsingular source of help | 1-5% |
|
||||
| source of relatively trivial help | 1% |
|
||||
|
||||
<br>
|
||||
|
||||
# Contribution extras
|
||||
We want to make contributing to SN as rewarding as possible, so we offer a few extras to contributors.
|
||||
|
||||
## Dev chat
|
||||
We self-host a private chat server for contributors to SN. If you'd like to join, please respond in this [discussion](https://github.com/stackernews/stacker.news/discussions/1059).
|
||||
|
||||
## Triage permissions
|
||||
We offer triage permissions to contributors after they've made a few contributions. I'll usually add them as I notice people contributing, but if I missed you and you'd like to be added, let me know!
|
||||
|
||||
## Contributor badges on SN profiles
|
||||
Contributors can get badges on their SN profiles by opening a pull request adding their SN nym to the [contributors.txt](/contributors.txt) file.
|
||||
|
||||
## What else you got
|
||||
In the future we plan to offer more, like gratis github copilot subscriptions, reverse tunnels, codespaces, and merch.
|
||||
|
||||
If you'd like to see something added, please make a suggestion.
|
||||
|
||||
<br>
|
||||
|
||||
# Development Tips
|
||||
|
||||
<br>
|
||||
|
||||
## Linting
|
||||
|
||||
We use [JavaScript Standard Style](https://standardjs.com/) to enforce code style and correctness. You should run `sndev lint` before submitting a PR.
|
||||
|
||||
If you're using VSCode, you can install the [StandardJS VSCode Extension](https://marketplace.visualstudio.com/items?itemName=standard.vscode-standard) extension to get linting in your editor. We also recommend installing [StandardJS code snippets](https://marketplace.visualstudio.com/items?itemName=capaj.vscode-standardjs-snippets) and [StandardJS react code snippets](https://marketplace.visualstudio.com/items?itemName=TimonVS.ReactSnippetsStandard) for code snippets.
|
||||
|
||||
<br>
|
||||
|
||||
## Database migrations
|
||||
|
||||
We use [prisma](https://www.prisma.io/) for our database migrations. To create a new migration, modify `prisma/schema.prisma` according to [prisma schema reference](https://www.prisma.io/docs/orm/reference/prisma-schema-reference) and apply it with:
|
||||
|
||||
`./sndev prisma migrate dev`
|
||||
|
||||
If you want to create a migration without applying it, eg to create a trigger or modify the generated sql before applying, use the `--create-only` option:
|
||||
|
||||
`./sndev prisma migrate dev --create-only`
|
||||
|
||||
Generate the local copy of the prisma ORM client in `node_modules` after changes. This should only be needed to get Intellisense in your editor locally.
|
||||
|
||||
`./sndev prisma generate`
|
||||
|
||||
<br>
|
||||
|
||||
## Connecting to the local database
|
||||
|
||||
You can connect to the local database via `./sndev psql`. [psql](https://www.postgresql.org/docs/13/app-psql.html) is an interactive terminal for working with PostgreSQL.
|
||||
|
||||
<br>
|
||||
|
||||
## Running cli on local lightning nodes
|
||||
|
||||
You can run `lncli` on the local lnd nodes via `./sndev cli lnd` and `./sndev cli sn_lnd`. The node for your local SN instance is `sn_lnd` and the node serving as any external node, like a stacker's node or external wallet, is `lnd`.
|
||||
|
||||
You can run `lightning-cli` on the local cln node via `./sndev cli cln` which serves as an external node or wallet.
|
||||
|
||||
<br>
|
||||
|
||||
## Testing local auth
|
||||
|
||||
You can login to test features like posting, replying, tipping, etc with `./sndev login <nym>` which will provide a link to login as an existing nym or a new account for a nonexistent nym. But, it you want to test auth specifically you'll need to configure them in your `.env` file.
|
||||
|
||||
### Login with Email
|
||||
|
||||
#### MailHog
|
||||
|
||||
- The app is already prepared to send emails through [MailHog](https://github.com/mailhog/MailHog) so no extra configuration is needed
|
||||
- Click "sign up" and enter any email address (remember, it's not going anywhere beyond your workstation)
|
||||
- Access MailHog's web UI on http://localhost:8025
|
||||
- Click the link (looks like this):
|
||||
|
||||
```
|
||||
http://localhost:3000/api/auth/callback/email?email=satoshi%40gmail.com&token=110e30a954ce7ca643379d90eb511640733de405f34a31b38eeda8e254d48cd7
|
||||
```
|
||||
|
||||
#### Sendgrid
|
||||
|
||||
- Create a Sendgrid account (or other smtp service)
|
||||
|
||||
```
|
||||
LOGIN_EMAIL_SERVER=smtp://apikey:<sendgrid_api_key>@smtp.sendgrid.net:587
|
||||
LOGIN_EMAIL_FROM=<sendgrid_email_from>
|
||||
```
|
||||
|
||||
- Click "sign up" and enter your email address
|
||||
- Check your email
|
||||
- Click the link (looks like this):
|
||||
|
||||
```
|
||||
http://localhost:3000/api/auth/callback/email?email=satoshi%40gmail.com&token=110e30a954ce7ca643379d90eb511640733de405f34a31b38eeda8e254d48cd7
|
||||
```
|
||||
|
||||
### Login with Github
|
||||
|
||||
- [Create a new OAuth app](https://docs.github.com/en/apps/oauth-apps/building-oauth-apps/creating-an-oauth-app) in your Github account
|
||||
- Set the callback URL to: `http://localhost:3000/api/auth/callback/github`
|
||||
- Update your `.env` file
|
||||
|
||||
```
|
||||
GITHUB_ID=<Client ID>
|
||||
GITHUB_SECRET=<Client secret>
|
||||
```
|
||||
- Signup and login as above
|
||||
|
||||
### Login with Lightning
|
||||
|
||||
- Use [ngrok](https://ngrok.com/) to create a HTTPS tunnel to localhost:3000
|
||||
- Update `LNAUTH_URL` in `.env` with the URL provided by `ngrok` and add /api/lnauth to it
|
||||
|
||||
<br>
|
||||
|
||||
## Enabling web push notifications
|
||||
|
||||
To enable Web Push locally, you will need to set the `VAPID_*` env vars. `VAPID_MAILTO` needs to be an email address using the `mailto:` scheme. For `NEXT_PUBLIC_VAPID_PUBKEY` and `VAPID_PRIVKEY`, you can run `npx web-push generate-vapid-keys`.
|
||||
|
||||
<br>
|
||||
|
||||
# Internals
|
||||
|
||||
<br>
|
||||
|
||||
## Stack
|
||||
|
||||
The site is written in javascript (not typescript 😱) using [Next.js](https://nextjs.org/), a [React](https://react.dev/) framework. The backend API is provided via [GraphQL](https://graphql.org/). The database is [PostgreSQL](https://www.postgresql.org/) modeled with [Prisma](https://www.prisma.io/). The [job queue](https://github.com/timgit/pg-boss) is also maintained in PostgreSQL. We use [lnd](https://github.com/lightningnetwork/lnd) for our lightning node. A customized [Bootstrap](https://react-bootstrap.netlify.app/) theme is used for styling.
|
||||
|
||||
<br>
|
||||
|
||||
## Services
|
||||
|
||||
Currently, SN runs and maintains two significant services and one microservice:
|
||||
|
||||
1. the nextjs web app, found in `./`
|
||||
2. the worker service, found in `./worker`, which runs periodic jobs and jobs sent to it by the web app
|
||||
3. a screenshot microservice, found in `./capture`, which takes screenshots of SN for social previews
|
||||
|
||||
In addition, we run other critical services the above services interact with like `lnd`, `postgres`, `opensearch`, `tor`, and `s3`.
|
||||
|
||||
<br>
|
||||
|
||||
## Wallet transaction safety
|
||||
|
||||
To ensure stackers balances are kept sane, some wallet updates are run in [serializable transactions](https://www.postgresql.org/docs/current/transaction-iso.html#XACT-SERIALIZABLE) at the database level. Because early versions of prisma had relatively poor support for transactions most wallet touching code is written in [plpgsql](https://www.postgresql.org/docs/current/plpgsql.html) stored procedures and can be found in the `prisma/migrations` folder.
|
||||
|
||||
*UPDATE*: Most wallet updates are now run in [read committed](https://www.postgresql.org/docs/current/transaction-iso.html#XACT-READ-COMMITTED) transactions. See `api/paidAction/README.md` for more information.
|
||||
|
||||
<br>
|
||||
|
||||
# Need help?
|
||||
Open a [discussion](http://github.com/stackernews/stacker.news/discussions) or [issue](http://github.com/stackernews/stacker.news/issues/new) or [email us](mailto:kk@stacker.news) or request joining the [dev chat](#dev-chat).
|
||||
|
||||
<br>
|
||||
|
||||
# Responsible disclosure
|
||||
|
||||
If you found a vulnerability, we would greatly appreciate it if you contact us via [security@stacker.news](mailto:security@stacker.news) or open a [security advisory](https://github.com/stackernews/stacker.news/security/advisories/new). Our PGP key can be found [here](https://stacker.news/pgp.txt) (EBAF 75DA 7279 CB48).
|
||||
|
||||
<br>
|
||||
|
||||
# License
|
||||
# license
|
||||
[MIT](https://choosealicense.com/licenses/mit/)
|
||||
|
192
api/lnd/index.js
192
api/lnd/index.js
@ -1,20 +1,13 @@
|
||||
import { cachedFetcher } from '@/lib/fetch'
|
||||
import { toPositiveNumber } from '@/lib/format'
|
||||
import { authenticatedLndGrpc } from '@/lib/lnd'
|
||||
import { getIdentity, getHeight, getWalletInfo, getNode, getPayment, parsePaymentRequest } from 'ln-service'
|
||||
import { datePivot } from '@/lib/time'
|
||||
import { LND_PATHFINDING_TIMEOUT_MS } from '@/lib/constants'
|
||||
import lndService from 'ln-service'
|
||||
|
||||
const lnd = global.lnd || authenticatedLndGrpc({
|
||||
const { lnd } = lndService.authenticatedLndGrpc({
|
||||
cert: process.env.LND_CERT,
|
||||
macaroon: process.env.LND_MACAROON,
|
||||
socket: process.env.LND_SOCKET
|
||||
}).lnd
|
||||
|
||||
if (process.env.NODE_ENV === 'development') global.lnd = lnd
|
||||
})
|
||||
|
||||
// Check LND GRPC connection
|
||||
getWalletInfo({ lnd }, (err, result) => {
|
||||
lndService.getWalletInfo({ lnd }, (err, result) => {
|
||||
if (err) {
|
||||
console.error('LND GRPC connection error')
|
||||
return
|
||||
@ -22,181 +15,4 @@ getWalletInfo({ lnd }, (err, result) => {
|
||||
console.log('LND GRPC connection successful')
|
||||
})
|
||||
|
||||
export async function estimateRouteFee ({ lnd, destination, tokens, mtokens, request, timeout }) {
|
||||
// if the payment request includes us as route hint, we needd to use the destination and amount
|
||||
// otherwise, this will fail with a self-payment error
|
||||
if (request) {
|
||||
const inv = parsePaymentRequest({ request })
|
||||
const ourPubkey = await getOurPubkey({ lnd })
|
||||
if (Array.isArray(inv.routes)) {
|
||||
for (const route of inv.routes) {
|
||||
if (Array.isArray(route)) {
|
||||
for (const hop of route) {
|
||||
if (hop.public_key === ourPubkey) {
|
||||
console.log('estimateRouteFee ignoring self-payment route')
|
||||
request = false
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return await new Promise((resolve, reject) => {
|
||||
const params = {}
|
||||
|
||||
if (request) {
|
||||
console.log('estimateRouteFee using payment request')
|
||||
params.payment_request = request
|
||||
} else {
|
||||
console.log('estimateRouteFee using destination and amount')
|
||||
params.dest = Buffer.from(destination, 'hex')
|
||||
params.amt_sat = tokens ? toPositiveNumber(tokens) : toPositiveNumber(BigInt(mtokens) / BigInt(1e3))
|
||||
}
|
||||
|
||||
lnd.router.estimateRouteFee({
|
||||
...params,
|
||||
timeout
|
||||
}, (err, res) => {
|
||||
if (err) {
|
||||
if (res?.failure_reason) {
|
||||
reject(new Error(`Unable to estimate route: ${res.failure_reason}`))
|
||||
} else {
|
||||
reject(err)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
if (res.routing_fee_msat < 0 || res.time_lock_delay <= 0) {
|
||||
reject(new Error('Unable to estimate route, excessive values: ' + JSON.stringify(res)))
|
||||
return
|
||||
}
|
||||
|
||||
resolve({
|
||||
routingFeeMsat: toPositiveNumber(res.routing_fee_msat),
|
||||
timeLockDelay: toPositiveNumber(res.time_lock_delay)
|
||||
})
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
// created_height is the accepted_height, timeout is the expiry height
|
||||
// ln-service remaps the `htlcs` field of lookupInvoice to `payments` and
|
||||
// see: https://github.com/alexbosworth/lightning/blob/master/lnd_responses/htlc_as_payment.js
|
||||
// and: https://lightning.engineering/api-docs/api/lnd/lightning/lookup-invoice/index.html#lnrpcinvoicehtlc
|
||||
export function hodlInvoiceCltvDetails (inv) {
|
||||
if (!inv.payments) {
|
||||
throw new Error('No payments found')
|
||||
}
|
||||
if (!inv.is_held) {
|
||||
throw new Error('Invoice is not held')
|
||||
}
|
||||
|
||||
const acceptHeight = inv.payments.reduce((max, htlc) => {
|
||||
const createdHeight = toPositiveNumber(htlc.created_height)
|
||||
return createdHeight > max ? createdHeight : max
|
||||
}, 0)
|
||||
const expiryHeight = inv.payments.reduce((min, htlc) => {
|
||||
const timeout = toPositiveNumber(htlc.timeout)
|
||||
return timeout < min ? timeout : min
|
||||
}, Number.MAX_SAFE_INTEGER)
|
||||
|
||||
return {
|
||||
expiryHeight: toPositiveNumber(expiryHeight),
|
||||
acceptHeight: toPositiveNumber(acceptHeight)
|
||||
}
|
||||
}
|
||||
|
||||
export function getPaymentFailureStatus (withdrawal) {
|
||||
if (withdrawal && !withdrawal.is_failed) {
|
||||
throw new Error('withdrawal is not failed')
|
||||
}
|
||||
|
||||
if (withdrawal?.failed?.is_insufficient_balance) {
|
||||
return {
|
||||
status: 'INSUFFICIENT_BALANCE',
|
||||
message: 'you didn\'t have enough sats'
|
||||
}
|
||||
} else if (withdrawal?.failed?.is_invalid_payment) {
|
||||
return {
|
||||
status: 'INVALID_PAYMENT',
|
||||
message: 'invalid payment'
|
||||
}
|
||||
} else if (withdrawal?.failed?.is_pathfinding_timeout) {
|
||||
return {
|
||||
status: 'PATHFINDING_TIMEOUT',
|
||||
message: 'no route found'
|
||||
}
|
||||
} else if (withdrawal?.failed?.is_route_not_found) {
|
||||
return {
|
||||
status: 'ROUTE_NOT_FOUND',
|
||||
message: 'no route found'
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
status: 'UNKNOWN_FAILURE',
|
||||
message: 'unknown failure'
|
||||
}
|
||||
}
|
||||
|
||||
export const getBlockHeight = cachedFetcher(async function fetchBlockHeight ({ lnd, ...args }) {
|
||||
try {
|
||||
const { current_block_height: height } = await getHeight({ lnd, ...args })
|
||||
return height
|
||||
} catch (err) {
|
||||
throw new Error(`Unable to fetch block height: ${err.message}`)
|
||||
}
|
||||
}, {
|
||||
maxSize: 1,
|
||||
cacheExpiry: 60 * 1000, // 1 minute
|
||||
forceRefreshThreshold: 5 * 60 * 1000, // 5 minutes
|
||||
keyGenerator: () => 'getHeight'
|
||||
})
|
||||
|
||||
export const getOurPubkey = cachedFetcher(async function fetchOurPubkey ({ lnd, ...args }) {
|
||||
try {
|
||||
const identity = await getIdentity({ lnd, ...args })
|
||||
return identity.public_key
|
||||
} catch (err) {
|
||||
throw new Error(`Unable to fetch identity: ${err.message}`)
|
||||
}
|
||||
}, {
|
||||
maxSize: 1,
|
||||
cacheExpiry: 0, // never expire
|
||||
forceRefreshThreshold: 0, // never force refresh
|
||||
keyGenerator: () => 'getOurPubkey'
|
||||
})
|
||||
|
||||
export const getNodeSockets = cachedFetcher(async function fetchNodeSockets ({ lnd, ...args }) {
|
||||
try {
|
||||
return (await getNode({ lnd, is_omitting_channels: true, ...args }))?.sockets
|
||||
} catch (err) {
|
||||
throw new Error(`Unable to fetch node info: ${err.message}`)
|
||||
}
|
||||
}, {
|
||||
maxSize: 100,
|
||||
cacheExpiry: 1000 * 60 * 60 * 24, // 1 day
|
||||
forceRefreshThreshold: 1000 * 60 * 60 * 24 * 7, // 1 week
|
||||
keyGenerator: (args) => {
|
||||
const { public_key: publicKey } = args
|
||||
return publicKey
|
||||
}
|
||||
})
|
||||
|
||||
export async function getPaymentOrNotSent ({ id, lnd, createdAt }) {
|
||||
try {
|
||||
return await getPayment({ id, lnd })
|
||||
} catch (err) {
|
||||
if (err[1] === 'SentPaymentNotFound' &&
|
||||
createdAt < datePivot(new Date(), { milliseconds: -LND_PATHFINDING_TIMEOUT_MS * 2 })) {
|
||||
// if the payment is older than 2x timeout, but not found in LND, we can assume it errored before lnd stored it
|
||||
return { notSent: true, is_failed: true }
|
||||
} else {
|
||||
throw err
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export default lnd
|
||||
|
@ -1,12 +1,18 @@
|
||||
import createPrisma from '@/lib/create-prisma'
|
||||
import { PrismaClient } from '@prisma/client'
|
||||
|
||||
const prisma = global.prisma || (() => {
|
||||
console.log('initing prisma')
|
||||
return createPrisma({
|
||||
connectionParams: {
|
||||
connection_limit: process.env.DB_APP_CONNECTION_LIMIT
|
||||
const prisma = new PrismaClient({
|
||||
log: [{ level: 'query', emit: 'event' }, 'warn', 'error']
|
||||
})
|
||||
prisma.$on('query', (e) => {
|
||||
if (e.duration > 50) {
|
||||
console.log('Query: ' + e.query)
|
||||
console.log('Params: ' + e.params)
|
||||
console.log('Duration: ' + e.duration + 'ms')
|
||||
}
|
||||
})
|
||||
return prisma
|
||||
})()
|
||||
|
||||
if (process.env.NODE_ENV === 'development') global.prisma = prisma
|
||||
|
3
api/package.json
Normal file
3
api/package.json
Normal file
@ -0,0 +1,3 @@
|
||||
{
|
||||
"type": "module"
|
||||
}
|
@ -1,371 +0,0 @@
|
||||
# Paid Actions
|
||||
|
||||
Paid actions are actions that require payments to perform. Given that we support several payment flows, some of which require more than one round of communication either with LND or the client, and several paid actions, we have this plugin-like interface to easily add new paid actions.
|
||||
|
||||
<details>
|
||||
<summary>internals</summary>
|
||||
|
||||
All paid action progress, regardless of flow, is managed using a state machine that's transitioned by the invoice progress and payment progress (in the case of p2p paid action). Below is the full state machine for paid actions:
|
||||
|
||||
```mermaid
|
||||
stateDiagram-v2
|
||||
[*] --> PENDING
|
||||
PENDING --> PAID
|
||||
PENDING --> CANCELING
|
||||
PENDING --> FAILED
|
||||
PAID --> [*]
|
||||
CANCELING --> FAILED
|
||||
FAILED --> RETRYING
|
||||
FAILED --> [*]
|
||||
RETRYING --> [*]
|
||||
[*] --> PENDING_HELD
|
||||
PENDING_HELD --> HELD
|
||||
PENDING_HELD --> FORWARDING
|
||||
PENDING_HELD --> CANCELING
|
||||
PENDING_HELD --> FAILED
|
||||
HELD --> PAID
|
||||
HELD --> CANCELING
|
||||
HELD --> FAILED
|
||||
FORWARDING --> FORWARDED
|
||||
FORWARDING --> FAILED_FORWARD
|
||||
FORWARDED --> PAID
|
||||
FAILED_FORWARD --> CANCELING
|
||||
FAILED_FORWARD --> FAILED
|
||||
```
|
||||
</details>
|
||||
|
||||
## Payment Flows
|
||||
|
||||
There are three payment flows:
|
||||
|
||||
### Fee credits
|
||||
The stacker has enough fee credits to pay for the action. This is the simplest flow and is similar to a normal request.
|
||||
|
||||
### Optimistic
|
||||
The optimistic flow is useful for actions that require immediate feedback to the client, but don't require the action to be immediately visible to everyone else.
|
||||
|
||||
For paid actions that support it, if the stacker doesn't have enough fee credits, we store the action in a `PENDING` state on the server, which is visible only to the stacker, then return a payment request to the client. The client then pays the invoice however and whenever they wish, and the server monitors payment progress. If the payment succeeds, the action is executed fully becoming visible to everyone and is marked as `PAID`. Otherwise, the action is marked as `FAILED`, the client is notified the payment failed and the payment can be retried.
|
||||
|
||||
<details>
|
||||
<summary>Internals</summary>
|
||||
|
||||
Internally, optimistic flows make use of a state machine that's transitioned by the invoice payment progress.
|
||||
|
||||
```mermaid
|
||||
stateDiagram-v2
|
||||
[*] --> PENDING
|
||||
PENDING --> PAID
|
||||
PENDING --> CANCELING
|
||||
PENDING --> FAILED
|
||||
PAID --> [*]
|
||||
CANCELING --> FAILED
|
||||
FAILED --> RETRYING
|
||||
FAILED --> [*]
|
||||
RETRYING --> [*]
|
||||
```
|
||||
</details>
|
||||
|
||||
### Pessimistic
|
||||
For paid actions that don't support optimistic actions (or when the stacker is `@anon`), if the client doesn't have enough fee credits, we return a payment request to the client without performing the action and only storing the action's arguments. After the client pays the invoice, the server performs the action with original arguments. Pessimistic actions require the payment to complete before being visible to them and everyone else.
|
||||
|
||||
Internally, pessimistic flows use hold invoices. If the action doesn't succeed, the payment is cancelled and it's as if the payment never happened (ie it's a lightning native refund mechanism).
|
||||
|
||||
<details>
|
||||
<summary>Internals</summary>
|
||||
|
||||
Internally, pessimistic flows make use of a state machine that's transitioned by the invoice payment progress much like optimistic flows, but with extra steps.
|
||||
|
||||
```mermaid
|
||||
stateDiagram-v2
|
||||
PAID --> [*]
|
||||
CANCELING --> FAILED
|
||||
FAILED --> [*]
|
||||
[*] --> PENDING_HELD
|
||||
PENDING_HELD --> HELD
|
||||
PENDING_HELD --> CANCELING
|
||||
PENDING_HELD --> FAILED
|
||||
HELD --> PAID
|
||||
HELD --> CANCELING
|
||||
HELD --> FAILED
|
||||
```
|
||||
</details>
|
||||
|
||||
### Table of existing paid actions and their supported flows
|
||||
|
||||
| action | fee credits | optimistic | pessimistic | anonable | qr payable | p2p wrapped | side effects | reward sats | p2p direct |
|
||||
| ----------------- | ----------- | ---------- | ----------- | -------- | ---------- | ----------- | ------------ | ----------- | ---------- |
|
||||
| zaps | x | x | x | x | x | x | x | | |
|
||||
| posts | x | x | x | x | x | | x | x | |
|
||||
| comments | x | x | x | x | x | | x | x | |
|
||||
| downzaps | x | x | | | x | | x | x | |
|
||||
| poll votes | x | x | | | x | | | x | |
|
||||
| territory actions | x | | x | | x | | | x | |
|
||||
| donations | x | | x | x | x | | | x | |
|
||||
| update posts | x | | x | | x | | x | x | |
|
||||
| update comments | x | | x | | x | | x | x | |
|
||||
| receive | | x | | | x | x | x | | x |
|
||||
| buy fee credits | | | x | | x | | | x | |
|
||||
| invite gift | x | | | | | | x | x | |
|
||||
|
||||
## Not-custodial zaps (ie p2p wrapped payments)
|
||||
Zaps, and possibly other future actions, can be performed peer to peer and non-custodially. This means that the payment is made directly from the client to the recipient, without the server taking custody of the funds. Currently, in order to trigger this behavior, the recipient must have a receiving wallet attached and the sender must have insufficient funds in their custodial wallet to perform the requested zap.
|
||||
|
||||
This works by requesting an invoice from the recipient's wallet and reusing the payment hash in a hold invoice paid to SN (to collect the sybil fee) which we serve to the sender. When the sender pays this wrapped invoice, we forward our own money to the recipient, who then reveals the preimage to us, allowing us to settle the wrapped invoice and claim the sender's funds. This effectively does what a lightning node does when forwarding a payment but allows us to do it at the application layer.
|
||||
|
||||
<details>
|
||||
<summary>Internals</summary>
|
||||
|
||||
Internally, p2p wrapped payments make use of the same paid action state machine but it's transitioned by both the incoming invoice payment progress *and* the outgoing invoice payment progress.
|
||||
|
||||
```mermaid
|
||||
stateDiagram-v2
|
||||
PAID --> [*]
|
||||
CANCELING --> FAILED
|
||||
FAILED --> RETRYING
|
||||
FAILED --> [*]
|
||||
RETRYING --> [*]
|
||||
[*] --> PENDING_HELD
|
||||
PENDING_HELD --> FORWARDING
|
||||
PENDING_HELD --> CANCELING
|
||||
PENDING_HELD --> FAILED
|
||||
FORWARDING --> FORWARDED
|
||||
FORWARDING --> FAILED_FORWARD
|
||||
FORWARDED --> PAID
|
||||
FAILED_FORWARD --> CANCELING
|
||||
FAILED_FORWARD --> FAILED
|
||||
```
|
||||
</details>
|
||||
|
||||
## Paid Action Interface
|
||||
|
||||
Each paid action is implemented in its own file in the `paidAction` directory. Each file exports a module with the following properties:
|
||||
|
||||
### Boolean flags
|
||||
- `anonable`: can be performed anonymously
|
||||
|
||||
### Payment methods
|
||||
- `paymentMethods`: an array of payment methods that the action supports ordered from most preferred to least preferred
|
||||
- P2P: a p2p payment made directly from the client to the recipient
|
||||
- after wrapping the invoice, anonymous users will follow a PESSIMISTIC flow to pay the invoice and logged in users will follow an OPTIMISTIC flow
|
||||
- FEE_CREDIT: a payment made from the user's fee credit balance
|
||||
- OPTIMISTIC: an optimistic payment flow
|
||||
- PESSIMISTIC: a pessimistic payment flow
|
||||
|
||||
### Functions
|
||||
|
||||
All functions have the following signature: `function(args: Object, context: Object): Promise`
|
||||
|
||||
- `getCost`: returns the cost of the action in msats as a `BigInt`
|
||||
- `perform`: performs the action
|
||||
- returns: an object with the result of the action as defined in the `graphql` schema
|
||||
- if the action supports optimism and an `invoiceId` is provided, the action should be performed optimistically
|
||||
- any action data that needs to be hidden while it's pending, should store in its rows a `PENDING` state along with its `invoiceId`
|
||||
- it can optionally store in the invoice with the `invoiceId` the `actionId` to be able to link the action with the invoice regardless of retries
|
||||
- `onPaid`: called when the action is paid
|
||||
- if the action does not support optimism, this function is optional
|
||||
- this function should be used to mark the rows created in `perform` as `PAID` and perform critical side effects of the action (like denormalizations)
|
||||
- `nonCriticalSideEffects`: called after the action is paid to run any side effects whose failure does not affect the action's execution
|
||||
- this function is always optional
|
||||
- it's passed the result of the action (or the action's paid invoice) and the current context
|
||||
- this is where things like push notifications should be handled
|
||||
- `onFail`: called when the action fails
|
||||
- if the action does not support optimism, this function is optional
|
||||
- this function should be used to mark the rows created in `perform` as `FAILED`
|
||||
- `retry`: called when the action is retried with any new invoice information
|
||||
- return: an object with the result of the action as defined in the `graphql` schema (same as `perform`)
|
||||
- this function is called when an optimistic action is retried
|
||||
- it's passed the original `invoiceId` and the `newInvoiceId`
|
||||
- this function should update the rows created in `perform` to contain the new `newInvoiceId` and remark the row as `PENDING`
|
||||
- `getInvoiceablePeer`: returns the userId of the peer that's capable of generating an invoice so they can be paid for the action
|
||||
- this is only used for p2p wrapped zaps currently
|
||||
- `describe`: returns a description as a string of the action
|
||||
- for actions that require generating an invoice, and for stackers that don't hide invoice descriptions, this is used in the invoice description
|
||||
- `getSybilFeePercent` (required if `getInvoiceablePeer` is implemented): returns the action sybil fee percent as a `BigInt` (eg. 30n for 30%)
|
||||
|
||||
#### Function arguments
|
||||
|
||||
`args` contains the arguments for the action as defined in the `graphql` schema. If the action is optimistic or pessimistic, `args` will contain an `invoiceId` field which can be stored alongside the paid action's data. If this is a call to `retry`, `args` will contain the original `invoiceId` and `newInvoiceId` fields.
|
||||
|
||||
`context` contains the following fields:
|
||||
- `me`: the user performing the action (undefined if anonymous)
|
||||
- `cost`: the cost of the action in msats as a `BigInt`
|
||||
- `sybilFeePercent`: the sybil fee percent as a `BigInt` (eg. 30n for 30%)
|
||||
- `tx`: the current transaction (for anything that needs to be done atomically with the payment)
|
||||
- `models`: the current prisma client (for anything that doesn't need to be done atomically with the payment)
|
||||
- `lnd`: the current lnd client
|
||||
|
||||
## Recording Cowboy Credits
|
||||
|
||||
To avoid adding sats and credits together everywhere to show an aggregate sat value, in most cases we denormalize a `sats` field that carries the "sats value", the combined sats + credits of something, and a `credits` field that carries only the earned `credits`. For example, the `Item` table has an `msats` field that carries the sum of the `mcredits` and `msats` earned and a `mcredits` field that carries the value of the `mcredits` earned. So, the sats value an item earned is `item.msats` BUT the real sats earned is `item.msats - item.mcredits`.
|
||||
|
||||
The ONLY exception to this are for the `users` table where we store a stacker's rewards sats and credits balances separately.
|
||||
|
||||
## `IMPORTANT: transaction isolation`
|
||||
|
||||
We use a `read committed` isolation level for actions. This means paid actions need to be mindful of concurrency issues. Specifically, reading data from the database and then writing it back in `read committed` is a common source of consistency bugs (aka serialization anamolies).
|
||||
|
||||
### This is a big deal
|
||||
1. If you read from the database and intend to use that data to write to the database, and it's possible that a concurrent transaction could change the data you've read (it usually is), you need to be prepared to handle that.
|
||||
2. This applies to **ALL**, and I really mean **ALL**, read data regardless of how you read the data within the `read committed` transaction:
|
||||
- independent statements
|
||||
- `WITH` queries (CTEs) in the same statement
|
||||
- subqueries in the same statement
|
||||
|
||||
### How to handle it
|
||||
1. take row level locks on the rows you read, using something like a `SELECT ... FOR UPDATE` statement
|
||||
- NOTE: this does not protect against missing concurrent inserts. It only prevents concurrent updates to the rows you've already read.
|
||||
- read about row level locks available in postgres: https://www.postgresql.org/docs/current/explicit-locking.html#LOCKING-ROWS
|
||||
2. check that the data you read is still valid before writing it back to the database i.e. optimistic concurrency control
|
||||
- NOTE: this does not protect against missing concurrent inserts. It only prevents concurrent updates to the rows you've already read.
|
||||
3. avoid having to read data from one row to modify the data of another row all together
|
||||
|
||||
### Example
|
||||
|
||||
Let's say you are aggregating total sats for an item from a table `zaps` and updating the total sats for that item in another table `item_zaps`. Two 100 sat zaps are requested for the same item at the same time in two concurrent transactions. The total sats for the item should be 200, but because of the way `read committed` works, the following statements lead to a total sats of 100:
|
||||
|
||||
*the statements here are listed in the order they are executed, but each transaction is happening concurrently*
|
||||
|
||||
#### Incorrect
|
||||
|
||||
```sql
|
||||
-- transaction 1
|
||||
BEGIN;
|
||||
INSERT INTO zaps (item_id, sats) VALUES (1, 100);
|
||||
SELECT sum(sats) INTO total_sats FROM zaps WHERE item_id = 1;
|
||||
-- total_sats is 100
|
||||
-- transaction 2
|
||||
BEGIN;
|
||||
INSERT INTO zaps (item_id, sats) VALUES (1, 100);
|
||||
SELECT sum(sats) INTO total_sats FROM zaps WHERE item_id = 1;
|
||||
-- total_sats is still 100, because transaction 1 hasn't committed yet
|
||||
-- transaction 1
|
||||
UPDATE item_zaps SET sats = total_sats WHERE item_id = 1;
|
||||
-- sets sats to 100
|
||||
-- transaction 2
|
||||
UPDATE item_zaps SET sats = total_sats WHERE item_id = 1;
|
||||
-- sets sats to 100
|
||||
COMMIT;
|
||||
-- transaction 1
|
||||
COMMIT;
|
||||
-- item_zaps.sats is 100, but we would expect it to be 200
|
||||
```
|
||||
|
||||
Note that row level locks wouldn't help in this case, because we can't lock the rows that the transactions don't know to exist yet.
|
||||
|
||||
#### Subqueries are still incorrect
|
||||
|
||||
```sql
|
||||
-- transaction 1
|
||||
BEGIN;
|
||||
INSERT INTO zaps (item_id, sats) VALUES (1, 100);
|
||||
UPDATE item_zaps SET sats = (SELECT sum(sats) INTO total_sats FROM zaps WHERE item_id = 1) WHERE item_id = 1;
|
||||
-- item_zaps.sats is 100
|
||||
-- transaction 2
|
||||
BEGIN;
|
||||
INSERT INTO zaps (item_id, sats) VALUES (1, 100);
|
||||
UPDATE item_zaps SET sats = (SELECT sum(sats) INTO total_sats FROM zaps WHERE item_id = 1) WHERE item_id = 1;
|
||||
-- item_zaps.sats is still 100, because transaction 1 hasn't committed yet
|
||||
-- transaction 1
|
||||
COMMIT;
|
||||
-- transaction 2
|
||||
COMMIT;
|
||||
-- item_zaps.sats is 100, but we would expect it to be 200
|
||||
```
|
||||
|
||||
Note that while the `UPDATE` transaction 2's update statement will block until transaction 1 commits, the subquery is computed before it blocks and is not re-evaluated after the block.
|
||||
|
||||
#### Correct
|
||||
|
||||
```sql
|
||||
-- transaction 1
|
||||
BEGIN;
|
||||
INSERT INTO zaps (item_id, sats) VALUES (1, 100);
|
||||
-- transaction 2
|
||||
BEGIN;
|
||||
INSERT INTO zaps (item_id, sats) VALUES (1, 100);
|
||||
-- transaction 1
|
||||
UPDATE item_zaps SET sats = sats + 100 WHERE item_id = 1;
|
||||
-- transaction 2
|
||||
UPDATE item_zaps SET sats = sats + 100 WHERE item_id = 1;
|
||||
COMMIT;
|
||||
-- transaction 1
|
||||
COMMIT;
|
||||
-- item_zaps.sats is 200
|
||||
```
|
||||
|
||||
The above works because `UPDATE` takes a lock on the rows it's updating, so transaction 2 will block until transaction 1 commits, and once transaction 2 is unblocked, it will re-evaluate the `sats` value of the row it's updating.
|
||||
|
||||
#### More resources
|
||||
- https://stackoverflow.com/questions/61781595/postgres-read-commited-doesnt-re-read-updated-row?noredirect=1#comment109279507_61781595
|
||||
- https://www.cybertec-postgresql.com/en/transaction-anomalies-with-select-for-update/
|
||||
|
||||
From the [postgres docs](https://www.postgresql.org/docs/current/transaction-iso.html#XACT-READ-COMMITTED):
|
||||
> UPDATE, DELETE, SELECT FOR UPDATE, and SELECT FOR SHARE commands behave the same as SELECT in terms of searching for target rows: they will only find target rows that were committed as of the command start time. However, such a target row might have already been updated (or deleted or locked) by another concurrent transaction by the time it is found. In this case, the would-be updater will wait for the first updating transaction to commit or roll back (if it is still in progress). If the first updater rolls back, then its effects are negated and the second updater can proceed with updating the originally found row. If the first updater commits, the second updater will ignore the row if the first updater deleted it, otherwise it will attempt to apply its operation to the updated version of the row. The search condition of the command (the WHERE clause) is re-evaluated to see if the updated version of the row still matches the search condition. If so, the second updater proceeds with its operation using the updated version of the row. In the case of SELECT FOR UPDATE and SELECT FOR SHARE, this means it is the updated version of the row that is locked and returned to the client.
|
||||
|
||||
From the [postgres source docs](https://git.postgresql.org/gitweb/?p=postgresql.git;a=blob;f=src/backend/executor/README#l350):
|
||||
> It is also possible that there are relations in the query that are not to be locked (they are neither the UPDATE/DELETE/MERGE target nor specified to be locked in SELECT FOR UPDATE/SHARE). When re-running the test query ***we want to use the same rows*** from these relations that were joined to the locked rows.
|
||||
|
||||
## `IMPORTANT: deadlocks`
|
||||
|
||||
Deadlocks can occur when two transactions are waiting for each other to release locks. This can happen when two transactions lock rows in different orders whether explicit or implicit.
|
||||
|
||||
If both transactions lock the rows in the same order, the deadlock is avoided.
|
||||
|
||||
### Incorrect
|
||||
|
||||
```sql
|
||||
-- transaction 1
|
||||
BEGIN;
|
||||
UPDATE users set msats = msats + 1 WHERE id = 1;
|
||||
-- transaction 2
|
||||
BEGIN;
|
||||
UPDATE users set msats = msats + 1 WHERE id = 2;
|
||||
-- transaction 1 (blocks here until transaction 2 commits)
|
||||
UPDATE users set msats = msats + 1 WHERE id = 2;
|
||||
-- transaction 2 (blocks here until transaction 1 commits)
|
||||
UPDATE users set msats = msats + 1 WHERE id = 1;
|
||||
-- deadlock occurs because neither transaction can proceed to here
|
||||
```
|
||||
|
||||
In practice, this most often occurs when selecting multiple rows for update in different orders. Recently, we had a deadlock when spliting zaps to multiple users. The solution was to select the rows for update in the same order.
|
||||
|
||||
### Incorrect
|
||||
|
||||
```sql
|
||||
WITH forwardees AS (
|
||||
SELECT "userId", (($1::BIGINT * pct) / 100)::BIGINT AS msats
|
||||
FROM "ItemForward"
|
||||
WHERE "itemId" = $2::INTEGER
|
||||
),
|
||||
UPDATE users
|
||||
SET
|
||||
msats = users.msats + forwardees.msats,
|
||||
"stackedMsats" = users."stackedMsats" + forwardees.msats
|
||||
FROM forwardees
|
||||
WHERE users.id = forwardees."userId";
|
||||
```
|
||||
|
||||
If forwardees are selected in a different order in two concurrent transactions, e.g. (1,2) in tx 1 and (2,1) in tx 2, a deadlock can occur. To avoid this, always select rows for update in the same order.
|
||||
|
||||
### Correct
|
||||
|
||||
We fixed the deadlock by selecting the forwardees in the same order in these transactions.
|
||||
|
||||
```sql
|
||||
WITH forwardees AS (
|
||||
SELECT "userId", (($1::BIGINT * pct) / 100)::BIGINT AS msats
|
||||
FROM "ItemForward"
|
||||
WHERE "itemId" = $2::INTEGER
|
||||
ORDER BY "userId" ASC
|
||||
),
|
||||
UPDATE users
|
||||
SET
|
||||
msats = users.msats + forwardees.msats,
|
||||
"stackedMsats" = users."stackedMsats" + forwardees.msats
|
||||
FROM forwardees
|
||||
WHERE users.id = forwardees."userId";
|
||||
```
|
||||
|
||||
### More resources
|
||||
|
||||
- https://www.postgresql.org/docs/current/explicit-locking.html#LOCKING-DEADLOCKS
|
@ -1,82 +0,0 @@
|
||||
import { PAID_ACTION_PAYMENT_METHODS } from '@/lib/constants'
|
||||
import { msatsToSats, satsToMsats } from '@/lib/format'
|
||||
|
||||
export const anonable = false
|
||||
|
||||
export const paymentMethods = [
|
||||
PAID_ACTION_PAYMENT_METHODS.FEE_CREDIT,
|
||||
PAID_ACTION_PAYMENT_METHODS.REWARD_SATS,
|
||||
PAID_ACTION_PAYMENT_METHODS.OPTIMISTIC
|
||||
]
|
||||
|
||||
export async function getCost ({ sats }) {
|
||||
return satsToMsats(sats)
|
||||
}
|
||||
|
||||
export async function perform ({ invoiceId, sats, id: itemId, ...args }, { me, cost, tx }) {
|
||||
itemId = parseInt(itemId)
|
||||
|
||||
let invoiceData = {}
|
||||
if (invoiceId) {
|
||||
invoiceData = { invoiceId, invoiceActionState: 'PENDING' }
|
||||
// store a reference to the item in the invoice
|
||||
await tx.invoice.update({
|
||||
where: { id: invoiceId },
|
||||
data: { actionId: itemId }
|
||||
})
|
||||
}
|
||||
|
||||
const act = await tx.itemAct.create({ data: { msats: cost, itemId, userId: me.id, act: 'BOOST', ...invoiceData } })
|
||||
|
||||
const [{ path }] = await tx.$queryRaw`
|
||||
SELECT ltree2text(path) as path FROM "Item" WHERE id = ${itemId}::INTEGER`
|
||||
return { id: itemId, sats, act: 'BOOST', path, actId: act.id }
|
||||
}
|
||||
|
||||
export async function retry ({ invoiceId, newInvoiceId }, { tx, cost }) {
|
||||
await tx.itemAct.updateMany({ where: { invoiceId }, data: { invoiceId: newInvoiceId, invoiceActionState: 'PENDING' } })
|
||||
const [{ id, path }] = await tx.$queryRaw`
|
||||
SELECT "Item".id, ltree2text(path) as path
|
||||
FROM "Item"
|
||||
JOIN "ItemAct" ON "Item".id = "ItemAct"."itemId"
|
||||
WHERE "ItemAct"."invoiceId" = ${newInvoiceId}::INTEGER`
|
||||
return { id, sats: msatsToSats(cost), act: 'BOOST', path }
|
||||
}
|
||||
|
||||
export async function onPaid ({ invoice, actId }, { tx }) {
|
||||
let itemAct
|
||||
if (invoice) {
|
||||
await tx.itemAct.updateMany({
|
||||
where: { invoiceId: invoice.id },
|
||||
data: {
|
||||
invoiceActionState: 'PAID'
|
||||
}
|
||||
})
|
||||
itemAct = await tx.itemAct.findFirst({ where: { invoiceId: invoice.id } })
|
||||
} else if (actId) {
|
||||
itemAct = await tx.itemAct.findFirst({ where: { id: actId } })
|
||||
} else {
|
||||
throw new Error('No invoice or actId')
|
||||
}
|
||||
|
||||
// increment boost on item
|
||||
await tx.item.update({
|
||||
where: { id: itemAct.itemId },
|
||||
data: {
|
||||
boost: { increment: msatsToSats(itemAct.msats) }
|
||||
}
|
||||
})
|
||||
|
||||
await tx.$executeRaw`
|
||||
INSERT INTO pgboss.job (name, data, retrylimit, retrybackoff, startafter, keepuntil)
|
||||
VALUES ('expireBoost', jsonb_build_object('id', ${itemAct.itemId}::INTEGER), 21, true,
|
||||
now() + interval '30 days', now() + interval '40 days')`
|
||||
}
|
||||
|
||||
export async function onFail ({ invoice }, { tx }) {
|
||||
await tx.itemAct.updateMany({ where: { invoiceId: invoice.id }, data: { invoiceActionState: 'FAILED' } })
|
||||
}
|
||||
|
||||
export async function describe ({ id: itemId, sats }, { actionId, cost }) {
|
||||
return `SN: boost ${sats ?? msatsToSats(cost)} sats to #${itemId ?? actionId}`
|
||||
}
|
@ -1,32 +0,0 @@
|
||||
import { PAID_ACTION_PAYMENT_METHODS } from '@/lib/constants'
|
||||
import { satsToMsats } from '@/lib/format'
|
||||
|
||||
export const anonable = false
|
||||
|
||||
export const paymentMethods = [
|
||||
PAID_ACTION_PAYMENT_METHODS.REWARD_SATS,
|
||||
PAID_ACTION_PAYMENT_METHODS.PESSIMISTIC
|
||||
]
|
||||
|
||||
export async function getCost ({ credits }) {
|
||||
return satsToMsats(credits)
|
||||
}
|
||||
|
||||
export async function perform ({ credits }, { me, cost, tx }) {
|
||||
await tx.user.update({
|
||||
where: { id: me.id },
|
||||
data: {
|
||||
mcredits: {
|
||||
increment: cost
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
return {
|
||||
credits
|
||||
}
|
||||
}
|
||||
|
||||
export async function describe () {
|
||||
return 'SN: buy fee credits'
|
||||
}
|
@ -1,29 +0,0 @@
|
||||
import { PAID_ACTION_PAYMENT_METHODS, USER_ID } from '@/lib/constants'
|
||||
import { satsToMsats } from '@/lib/format'
|
||||
|
||||
export const anonable = true
|
||||
|
||||
export const paymentMethods = [
|
||||
PAID_ACTION_PAYMENT_METHODS.FEE_CREDIT,
|
||||
PAID_ACTION_PAYMENT_METHODS.REWARD_SATS,
|
||||
PAID_ACTION_PAYMENT_METHODS.PESSIMISTIC
|
||||
]
|
||||
|
||||
export async function getCost ({ sats }) {
|
||||
return satsToMsats(sats)
|
||||
}
|
||||
|
||||
export async function perform ({ sats }, { me, tx }) {
|
||||
await tx.donation.create({
|
||||
data: {
|
||||
sats,
|
||||
userId: me?.id ?? USER_ID.anon
|
||||
}
|
||||
})
|
||||
|
||||
return { sats }
|
||||
}
|
||||
|
||||
export async function describe (args, context) {
|
||||
return 'SN: donate to rewards pool'
|
||||
}
|
@ -1,100 +0,0 @@
|
||||
import { PAID_ACTION_PAYMENT_METHODS } from '@/lib/constants'
|
||||
import { msatsToSats, satsToMsats } from '@/lib/format'
|
||||
import { Prisma } from '@prisma/client'
|
||||
|
||||
export const anonable = false
|
||||
|
||||
export const paymentMethods = [
|
||||
PAID_ACTION_PAYMENT_METHODS.FEE_CREDIT,
|
||||
PAID_ACTION_PAYMENT_METHODS.REWARD_SATS,
|
||||
PAID_ACTION_PAYMENT_METHODS.OPTIMISTIC
|
||||
]
|
||||
|
||||
export async function getCost ({ sats }) {
|
||||
return satsToMsats(sats)
|
||||
}
|
||||
|
||||
export async function perform ({ invoiceId, sats, id: itemId }, { me, cost, tx }) {
|
||||
itemId = parseInt(itemId)
|
||||
|
||||
let invoiceData = {}
|
||||
if (invoiceId) {
|
||||
invoiceData = { invoiceId, invoiceActionState: 'PENDING' }
|
||||
// store a reference to the item in the invoice
|
||||
await tx.invoice.update({
|
||||
where: { id: invoiceId },
|
||||
data: { actionId: itemId }
|
||||
})
|
||||
}
|
||||
|
||||
const itemAct = await tx.itemAct.create({
|
||||
data: { msats: cost, itemId, userId: me.id, act: 'DONT_LIKE_THIS', ...invoiceData }
|
||||
})
|
||||
|
||||
const [{ path }] = await tx.$queryRaw`SELECT ltree2text(path) as path FROM "Item" WHERE id = ${itemId}::INTEGER`
|
||||
return { id: itemId, sats, act: 'DONT_LIKE_THIS', path, actId: itemAct.id }
|
||||
}
|
||||
|
||||
export async function retry ({ invoiceId, newInvoiceId }, { tx, cost }) {
|
||||
await tx.itemAct.updateMany({ where: { invoiceId }, data: { invoiceId: newInvoiceId, invoiceActionState: 'PENDING' } })
|
||||
const [{ id, path }] = await tx.$queryRaw`
|
||||
SELECT "Item".id, ltree2text(path) as path
|
||||
FROM "Item"
|
||||
JOIN "ItemAct" ON "Item".id = "ItemAct"."itemId"
|
||||
WHERE "ItemAct"."invoiceId" = ${newInvoiceId}::INTEGER`
|
||||
return { id, sats: msatsToSats(cost), act: 'DONT_LIKE_THIS', path }
|
||||
}
|
||||
|
||||
export async function onPaid ({ invoice, actId }, { tx }) {
|
||||
let itemAct
|
||||
if (invoice) {
|
||||
await tx.itemAct.updateMany({ where: { invoiceId: invoice.id }, data: { invoiceActionState: 'PAID' } })
|
||||
itemAct = await tx.itemAct.findFirst({ where: { invoiceId: invoice.id }, include: { item: true } })
|
||||
} else if (actId) {
|
||||
itemAct = await tx.itemAct.findUnique({ where: { id: actId }, include: { item: true } })
|
||||
} else {
|
||||
throw new Error('No invoice or actId')
|
||||
}
|
||||
|
||||
const msats = BigInt(itemAct.msats)
|
||||
const sats = msatsToSats(msats)
|
||||
|
||||
// denormalize downzaps
|
||||
await tx.$executeRaw`
|
||||
WITH territory AS (
|
||||
SELECT COALESCE(r."subName", i."subName", 'meta')::TEXT as "subName"
|
||||
FROM "Item" i
|
||||
LEFT JOIN "Item" r ON r.id = i."rootId"
|
||||
WHERE i.id = ${itemAct.itemId}::INTEGER
|
||||
), zapper AS (
|
||||
SELECT
|
||||
COALESCE(${itemAct.item.parentId
|
||||
? Prisma.sql`"zapCommentTrust"`
|
||||
: Prisma.sql`"zapPostTrust"`}, 0) as "zapTrust",
|
||||
COALESCE(${itemAct.item.parentId
|
||||
? Prisma.sql`"subZapCommentTrust"`
|
||||
: Prisma.sql`"subZapPostTrust"`}, 0) as "subZapTrust"
|
||||
FROM territory
|
||||
LEFT JOIN "UserSubTrust" ust ON ust."subName" = territory."subName"
|
||||
AND ust."userId" = ${itemAct.userId}::INTEGER
|
||||
), zap AS (
|
||||
INSERT INTO "ItemUserAgg" ("userId", "itemId", "downZapSats")
|
||||
VALUES (${itemAct.userId}::INTEGER, ${itemAct.itemId}::INTEGER, ${sats}::INTEGER)
|
||||
ON CONFLICT ("itemId", "userId") DO UPDATE
|
||||
SET "downZapSats" = "ItemUserAgg"."downZapSats" + ${sats}::INTEGER, updated_at = now()
|
||||
RETURNING LOG("downZapSats" / GREATEST("downZapSats" - ${sats}::INTEGER, 1)::FLOAT) AS log_sats
|
||||
)
|
||||
UPDATE "Item"
|
||||
SET "weightedDownVotes" = "weightedDownVotes" + zapper."zapTrust" * zap.log_sats,
|
||||
"subWeightedDownVotes" = "subWeightedDownVotes" + zapper."subZapTrust" * zap.log_sats
|
||||
FROM zap, zapper
|
||||
WHERE "Item".id = ${itemAct.itemId}::INTEGER`
|
||||
}
|
||||
|
||||
export async function onFail ({ invoice }, { tx }) {
|
||||
await tx.itemAct.updateMany({ where: { invoiceId: invoice.id }, data: { invoiceActionState: 'FAILED' } })
|
||||
}
|
||||
|
||||
export async function describe ({ id: itemId, sats }, { cost, actionId }) {
|
||||
return `SN: downzap of ${sats ?? msatsToSats(cost)} sats to #${itemId ?? actionId}`
|
||||
}
|
@ -1,496 +0,0 @@
|
||||
import { createHodlInvoice, createInvoice, parsePaymentRequest } from 'ln-service'
|
||||
import { datePivot } from '@/lib/time'
|
||||
import { PAID_ACTION_PAYMENT_METHODS, USER_ID } from '@/lib/constants'
|
||||
import { createHmac } from '@/api/resolvers/wallet'
|
||||
import { Prisma } from '@prisma/client'
|
||||
import { createWrappedInvoice, createUserInvoice } from '@/wallets/server'
|
||||
import { assertBelowMaxPendingInvoices, assertBelowMaxPendingDirectPayments } from './lib/assert'
|
||||
|
||||
import * as ITEM_CREATE from './itemCreate'
|
||||
import * as ITEM_UPDATE from './itemUpdate'
|
||||
import * as ZAP from './zap'
|
||||
import * as DOWN_ZAP from './downZap'
|
||||
import * as POLL_VOTE from './pollVote'
|
||||
import * as TERRITORY_CREATE from './territoryCreate'
|
||||
import * as TERRITORY_UPDATE from './territoryUpdate'
|
||||
import * as TERRITORY_BILLING from './territoryBilling'
|
||||
import * as TERRITORY_UNARCHIVE from './territoryUnarchive'
|
||||
import * as DONATE from './donate'
|
||||
import * as BOOST from './boost'
|
||||
import * as RECEIVE from './receive'
|
||||
import * as BUY_CREDITS from './buyCredits'
|
||||
import * as INVITE_GIFT from './inviteGift'
|
||||
|
||||
export const paidActions = {
|
||||
ITEM_CREATE,
|
||||
ITEM_UPDATE,
|
||||
ZAP,
|
||||
DOWN_ZAP,
|
||||
BOOST,
|
||||
POLL_VOTE,
|
||||
TERRITORY_CREATE,
|
||||
TERRITORY_UPDATE,
|
||||
TERRITORY_BILLING,
|
||||
TERRITORY_UNARCHIVE,
|
||||
DONATE,
|
||||
RECEIVE,
|
||||
BUY_CREDITS,
|
||||
INVITE_GIFT
|
||||
}
|
||||
|
||||
export default async function performPaidAction (actionType, args, incomingContext) {
|
||||
try {
|
||||
const { me, models, forcePaymentMethod } = incomingContext
|
||||
const paidAction = paidActions[actionType]
|
||||
|
||||
console.group('performPaidAction', actionType, args)
|
||||
|
||||
if (!paidAction) {
|
||||
throw new Error(`Invalid action type ${actionType}`)
|
||||
}
|
||||
|
||||
if (!me && !paidAction.anonable) {
|
||||
throw new Error('You must be logged in to perform this action')
|
||||
}
|
||||
|
||||
// treat context as immutable
|
||||
const contextWithMe = {
|
||||
...incomingContext,
|
||||
me: me ? await models.user.findUnique({ where: { id: parseInt(me.id) } }) : undefined
|
||||
}
|
||||
const context = {
|
||||
...contextWithMe,
|
||||
cost: await paidAction.getCost(args, contextWithMe),
|
||||
sybilFeePercent: await paidAction.getSybilFeePercent?.(args, contextWithMe)
|
||||
}
|
||||
|
||||
// special case for zero cost actions
|
||||
if (context.cost === 0n) {
|
||||
console.log('performing zero cost action')
|
||||
return await performNoInvoiceAction(actionType, args, { ...context, paymentMethod: 'ZERO_COST' })
|
||||
}
|
||||
|
||||
for (const paymentMethod of paidAction.paymentMethods) {
|
||||
console.log(`considering payment method ${paymentMethod}`)
|
||||
const contextWithPaymentMethod = { ...context, paymentMethod }
|
||||
|
||||
if (forcePaymentMethod &&
|
||||
paymentMethod !== forcePaymentMethod) {
|
||||
console.log('skipping payment method', paymentMethod, 'because forcePaymentMethod is set to', forcePaymentMethod)
|
||||
continue
|
||||
}
|
||||
|
||||
// payment methods that anonymous users can use
|
||||
if (paymentMethod === PAID_ACTION_PAYMENT_METHODS.P2P) {
|
||||
try {
|
||||
return await performP2PAction(actionType, args, contextWithPaymentMethod)
|
||||
} catch (e) {
|
||||
if (e instanceof NonInvoiceablePeerError) {
|
||||
console.log('peer cannot be invoiced, skipping')
|
||||
continue
|
||||
}
|
||||
console.error(`${paymentMethod} action failed`, e)
|
||||
throw e
|
||||
}
|
||||
} else if (paymentMethod === PAID_ACTION_PAYMENT_METHODS.PESSIMISTIC) {
|
||||
return await beginPessimisticAction(actionType, args, contextWithPaymentMethod)
|
||||
}
|
||||
|
||||
// additional payment methods that logged in users can use
|
||||
if (me) {
|
||||
if (paymentMethod === PAID_ACTION_PAYMENT_METHODS.FEE_CREDIT ||
|
||||
paymentMethod === PAID_ACTION_PAYMENT_METHODS.REWARD_SATS) {
|
||||
try {
|
||||
return await performNoInvoiceAction(actionType, args, contextWithPaymentMethod)
|
||||
} catch (e) {
|
||||
// if we fail with fee credits or reward sats, but not because of insufficient funds, bail
|
||||
console.error(`${paymentMethod} action failed`, e)
|
||||
if (!e.message.includes('\\"users\\" violates check constraint \\"msats_positive\\"') &&
|
||||
!e.message.includes('\\"users\\" violates check constraint \\"mcredits_positive\\"')) {
|
||||
throw e
|
||||
}
|
||||
}
|
||||
} else if (paymentMethod === PAID_ACTION_PAYMENT_METHODS.OPTIMISTIC) {
|
||||
return await performOptimisticAction(actionType, args, contextWithPaymentMethod)
|
||||
} else if (paymentMethod === PAID_ACTION_PAYMENT_METHODS.DIRECT) {
|
||||
try {
|
||||
return await performDirectAction(actionType, args, contextWithPaymentMethod)
|
||||
} catch (e) {
|
||||
if (e instanceof NonInvoiceablePeerError) {
|
||||
console.log('peer cannot be invoiced, skipping')
|
||||
continue
|
||||
}
|
||||
console.error(`${paymentMethod} action failed`, e)
|
||||
throw e
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
throw new Error('No working payment method found')
|
||||
} catch (e) {
|
||||
console.error('performPaidAction failed', e)
|
||||
throw e
|
||||
} finally {
|
||||
console.groupEnd()
|
||||
}
|
||||
}
|
||||
|
||||
async function performNoInvoiceAction (actionType, args, incomingContext) {
|
||||
const { me, models, cost, paymentMethod } = incomingContext
|
||||
const action = paidActions[actionType]
|
||||
|
||||
const result = await models.$transaction(async tx => {
|
||||
const context = { ...incomingContext, tx }
|
||||
|
||||
if (paymentMethod === 'FEE_CREDIT') {
|
||||
await tx.user.update({
|
||||
where: {
|
||||
id: me?.id ?? USER_ID.anon
|
||||
},
|
||||
data: { mcredits: { decrement: cost } }
|
||||
})
|
||||
} else if (paymentMethod === PAID_ACTION_PAYMENT_METHODS.REWARD_SATS) {
|
||||
await tx.user.update({
|
||||
where: {
|
||||
id: me?.id ?? USER_ID.anon
|
||||
},
|
||||
data: { msats: { decrement: cost } }
|
||||
})
|
||||
}
|
||||
|
||||
const result = await action.perform(args, context)
|
||||
await action.onPaid?.(result, context)
|
||||
|
||||
return {
|
||||
result,
|
||||
paymentMethod
|
||||
}
|
||||
}, { isolationLevel: Prisma.TransactionIsolationLevel.ReadCommitted })
|
||||
|
||||
// run non critical side effects in the background
|
||||
// after the transaction has been committed
|
||||
action.nonCriticalSideEffects?.(result.result, incomingContext).catch(console.error)
|
||||
return result
|
||||
}
|
||||
|
||||
async function performOptimisticAction (actionType, args, incomingContext) {
|
||||
const { models, invoiceArgs: incomingInvoiceArgs } = incomingContext
|
||||
const action = paidActions[actionType]
|
||||
|
||||
const optimisticContext = { ...incomingContext, optimistic: true }
|
||||
const invoiceArgs = incomingInvoiceArgs ?? await createSNInvoice(actionType, args, optimisticContext)
|
||||
|
||||
return await models.$transaction(async tx => {
|
||||
const context = { ...optimisticContext, tx, invoiceArgs }
|
||||
|
||||
const invoice = await createDbInvoice(actionType, args, context)
|
||||
|
||||
return {
|
||||
invoice,
|
||||
result: await action.perform?.({ invoiceId: invoice.id, ...args }, context),
|
||||
paymentMethod: 'OPTIMISTIC'
|
||||
}
|
||||
}, { isolationLevel: Prisma.TransactionIsolationLevel.ReadCommitted })
|
||||
}
|
||||
|
||||
async function beginPessimisticAction (actionType, args, context) {
|
||||
const action = paidActions[actionType]
|
||||
|
||||
if (!action.paymentMethods.includes(PAID_ACTION_PAYMENT_METHODS.PESSIMISTIC)) {
|
||||
throw new Error(`This action ${actionType} does not support pessimistic invoicing`)
|
||||
}
|
||||
|
||||
// just create the invoice and complete action when it's paid
|
||||
const invoiceArgs = context.invoiceArgs ?? await createSNInvoice(actionType, args, context)
|
||||
return {
|
||||
invoice: await createDbInvoice(actionType, args, { ...context, invoiceArgs }),
|
||||
paymentMethod: 'PESSIMISTIC'
|
||||
}
|
||||
}
|
||||
|
||||
async function performP2PAction (actionType, args, incomingContext) {
|
||||
// if the action has an invoiceable peer, we'll create a peer invoice
|
||||
// wrap it, and return the wrapped invoice
|
||||
const { cost, sybilFeePercent, models, lnd, me } = incomingContext
|
||||
if (!sybilFeePercent) {
|
||||
throw new Error('sybil fee percent is not set for an invoiceable peer action')
|
||||
}
|
||||
|
||||
const userId = await paidActions[actionType]?.getInvoiceablePeer?.(args, incomingContext)
|
||||
if (!userId) {
|
||||
throw new NonInvoiceablePeerError()
|
||||
}
|
||||
|
||||
let context
|
||||
try {
|
||||
await assertBelowMaxPendingInvoices(incomingContext)
|
||||
|
||||
const description = await paidActions[actionType].describe(args, incomingContext)
|
||||
const { invoice, wrappedInvoice, wallet, maxFee } = await createWrappedInvoice(userId, {
|
||||
msats: cost,
|
||||
feePercent: sybilFeePercent,
|
||||
description,
|
||||
expiry: INVOICE_EXPIRE_SECS
|
||||
}, { models, me, lnd })
|
||||
|
||||
context = {
|
||||
...incomingContext,
|
||||
invoiceArgs: {
|
||||
bolt11: invoice,
|
||||
wrappedBolt11: wrappedInvoice,
|
||||
wallet,
|
||||
maxFee
|
||||
}
|
||||
}
|
||||
} catch (e) {
|
||||
console.error('failed to create wrapped invoice', e)
|
||||
throw new NonInvoiceablePeerError()
|
||||
}
|
||||
|
||||
return me
|
||||
? await performOptimisticAction(actionType, args, context)
|
||||
: await beginPessimisticAction(actionType, args, context)
|
||||
}
|
||||
|
||||
// we don't need to use the module for perform-ing outside actions
|
||||
// because we can't track the state of outside invoices we aren't paid/paying
|
||||
async function performDirectAction (actionType, args, incomingContext) {
|
||||
const { models, lnd, cost } = incomingContext
|
||||
const { comment, lud18Data, noteStr, description: actionDescription } = args
|
||||
|
||||
const userId = await paidActions[actionType]?.getInvoiceablePeer?.(args, incomingContext)
|
||||
if (!userId) {
|
||||
throw new NonInvoiceablePeerError()
|
||||
}
|
||||
|
||||
try {
|
||||
await assertBelowMaxPendingDirectPayments(userId, incomingContext)
|
||||
|
||||
const description = actionDescription ?? await paidActions[actionType].describe(args, incomingContext)
|
||||
|
||||
for await (const { invoice, logger, wallet } of createUserInvoice(userId, {
|
||||
msats: cost,
|
||||
description,
|
||||
expiry: INVOICE_EXPIRE_SECS
|
||||
}, { models, lnd })) {
|
||||
let hash
|
||||
try {
|
||||
hash = parsePaymentRequest({ request: invoice }).id
|
||||
} catch (e) {
|
||||
console.error('failed to parse invoice', e)
|
||||
logger?.error('failed to parse invoice: ' + e.message, { bolt11: invoice })
|
||||
continue
|
||||
}
|
||||
|
||||
try {
|
||||
return {
|
||||
invoice: await models.directPayment.create({
|
||||
data: {
|
||||
comment,
|
||||
lud18Data,
|
||||
desc: noteStr,
|
||||
bolt11: invoice,
|
||||
msats: cost,
|
||||
hash,
|
||||
walletId: wallet.id,
|
||||
receiverId: userId
|
||||
}
|
||||
}),
|
||||
paymentMethod: 'DIRECT'
|
||||
}
|
||||
} catch (e) {
|
||||
console.error('failed to create direct payment', e)
|
||||
logger?.error('failed to create direct payment: ' + e.message, { bolt11: invoice })
|
||||
}
|
||||
}
|
||||
} catch (e) {
|
||||
console.error('failed to create user invoice', e)
|
||||
}
|
||||
|
||||
throw new NonInvoiceablePeerError()
|
||||
}
|
||||
|
||||
export async function retryPaidAction (actionType, args, incomingContext) {
|
||||
const { models, me } = incomingContext
|
||||
const { invoice: failedInvoice } = args
|
||||
|
||||
console.log('retryPaidAction', actionType, args)
|
||||
|
||||
const action = paidActions[actionType]
|
||||
if (!action) {
|
||||
throw new Error(`retryPaidAction - invalid action type ${actionType}`)
|
||||
}
|
||||
|
||||
if (!me) {
|
||||
throw new Error(`retryPaidAction - must be logged in ${actionType}`)
|
||||
}
|
||||
|
||||
if (!failedInvoice) {
|
||||
throw new Error(`retryPaidAction - missing invoice ${actionType}`)
|
||||
}
|
||||
|
||||
const { msatsRequested, actionId, actionArgs, actionOptimistic } = failedInvoice
|
||||
const retryContext = {
|
||||
...incomingContext,
|
||||
optimistic: actionOptimistic,
|
||||
me: await models.user.findUnique({ where: { id: parseInt(me.id) } }),
|
||||
cost: BigInt(msatsRequested),
|
||||
actionId,
|
||||
predecessorId: failedInvoice.id
|
||||
}
|
||||
|
||||
let invoiceArgs
|
||||
const invoiceForward = await models.invoiceForward.findUnique({
|
||||
where: {
|
||||
invoiceId: failedInvoice.id
|
||||
},
|
||||
include: {
|
||||
wallet: true
|
||||
}
|
||||
})
|
||||
|
||||
if (invoiceForward) {
|
||||
// this is a wrapped invoice, we need to retry it with receiver fallbacks
|
||||
try {
|
||||
const { userId } = invoiceForward.wallet
|
||||
// this will return an invoice from the first receiver wallet that didn't fail yet and throw if none is available
|
||||
const { invoice: bolt11, wrappedInvoice: wrappedBolt11, wallet, maxFee } = await createWrappedInvoice(userId, {
|
||||
msats: failedInvoice.msatsRequested,
|
||||
feePercent: await action.getSybilFeePercent?.(actionArgs, retryContext),
|
||||
description: await action.describe?.(actionArgs, retryContext),
|
||||
expiry: INVOICE_EXPIRE_SECS
|
||||
}, retryContext)
|
||||
invoiceArgs = { bolt11, wrappedBolt11, wallet, maxFee }
|
||||
} catch (err) {
|
||||
console.log('failed to retry wrapped invoice, falling back to SN:', err)
|
||||
}
|
||||
}
|
||||
|
||||
invoiceArgs ??= await createSNInvoice(actionType, actionArgs, retryContext)
|
||||
|
||||
return await models.$transaction(async tx => {
|
||||
const context = { ...retryContext, tx, invoiceArgs }
|
||||
|
||||
// update the old invoice to RETRYING, so that it's not confused with FAILED
|
||||
await tx.invoice.update({
|
||||
where: {
|
||||
id: failedInvoice.id,
|
||||
actionState: 'FAILED'
|
||||
},
|
||||
data: {
|
||||
actionState: 'RETRYING'
|
||||
}
|
||||
})
|
||||
|
||||
// create a new invoice
|
||||
const invoice = await createDbInvoice(actionType, actionArgs, context)
|
||||
|
||||
return {
|
||||
result: await action.retry?.({ invoiceId: failedInvoice.id, newInvoiceId: invoice.id }, context),
|
||||
invoice,
|
||||
paymentMethod: actionOptimistic ? 'OPTIMISTIC' : 'PESSIMISTIC'
|
||||
}
|
||||
}, { isolationLevel: Prisma.TransactionIsolationLevel.ReadCommitted })
|
||||
}
|
||||
|
||||
const INVOICE_EXPIRE_SECS = 600
|
||||
|
||||
export class NonInvoiceablePeerError extends Error {
|
||||
constructor () {
|
||||
super('non invoiceable peer')
|
||||
this.name = 'NonInvoiceablePeerError'
|
||||
}
|
||||
}
|
||||
|
||||
// we seperate the invoice creation into two functions because
|
||||
// because if lnd is slow, it'll timeout the interactive tx
|
||||
async function createSNInvoice (actionType, args, context) {
|
||||
const { me, lnd, cost, optimistic } = context
|
||||
const action = paidActions[actionType]
|
||||
const createLNDInvoice = optimistic ? createInvoice : createHodlInvoice
|
||||
|
||||
await assertBelowMaxPendingInvoices(context)
|
||||
|
||||
if (cost < 1000n) {
|
||||
// sanity check
|
||||
throw new Error('The cost of the action must be at least 1 sat')
|
||||
}
|
||||
|
||||
const expiresAt = datePivot(new Date(), { seconds: INVOICE_EXPIRE_SECS })
|
||||
const invoice = await createLNDInvoice({
|
||||
description: me?.hideInvoiceDesc ? undefined : await action.describe(args, context),
|
||||
lnd,
|
||||
mtokens: String(cost),
|
||||
expires_at: expiresAt
|
||||
})
|
||||
return { bolt11: invoice.request, preimage: invoice.secret }
|
||||
}
|
||||
|
||||
async function createDbInvoice (actionType, args, context) {
|
||||
const { me, models, tx, cost, optimistic, actionId, invoiceArgs, paymentAttempt, predecessorId } = context
|
||||
const { bolt11, wrappedBolt11, preimage, wallet, maxFee } = invoiceArgs
|
||||
|
||||
const db = tx ?? models
|
||||
|
||||
if (cost < 1000n) {
|
||||
// sanity check
|
||||
throw new Error('The cost of the action must be at least 1 sat')
|
||||
}
|
||||
|
||||
const servedBolt11 = wrappedBolt11 ?? bolt11
|
||||
const servedInvoice = parsePaymentRequest({ request: servedBolt11 })
|
||||
const expiresAt = new Date(servedInvoice.expires_at)
|
||||
|
||||
const invoiceData = {
|
||||
hash: servedInvoice.id,
|
||||
msatsRequested: BigInt(servedInvoice.mtokens),
|
||||
preimage,
|
||||
bolt11: servedBolt11,
|
||||
userId: me?.id ?? USER_ID.anon,
|
||||
actionType,
|
||||
actionState: wrappedBolt11 ? 'PENDING_HELD' : optimistic ? 'PENDING' : 'PENDING_HELD',
|
||||
actionOptimistic: optimistic,
|
||||
actionArgs: args,
|
||||
expiresAt,
|
||||
actionId,
|
||||
paymentAttempt,
|
||||
predecessorId
|
||||
}
|
||||
|
||||
let invoice
|
||||
if (wrappedBolt11) {
|
||||
invoice = (await db.invoiceForward.create({
|
||||
include: { invoice: true },
|
||||
data: {
|
||||
bolt11,
|
||||
maxFeeMsats: maxFee,
|
||||
invoice: {
|
||||
create: invoiceData
|
||||
},
|
||||
wallet: {
|
||||
connect: {
|
||||
id: wallet.id
|
||||
}
|
||||
}
|
||||
}
|
||||
})).invoice
|
||||
} else {
|
||||
invoice = await db.invoice.create({ data: invoiceData })
|
||||
}
|
||||
|
||||
// insert a job to check the invoice after it's set to expire
|
||||
await db.$executeRaw`
|
||||
INSERT INTO pgboss.job (name, data, retrylimit, retrybackoff, startafter, keepuntil, priority)
|
||||
VALUES ('checkInvoice',
|
||||
jsonb_build_object('hash', ${invoice.hash}::TEXT), 21, true,
|
||||
${expiresAt}::TIMESTAMP WITH TIME ZONE,
|
||||
${expiresAt}::TIMESTAMP WITH TIME ZONE + interval '10m', 100)`
|
||||
|
||||
// the HMAC is only returned during invoice creation
|
||||
// this makes sure that only the person who created this invoice
|
||||
// has access to the HMAC
|
||||
invoice.hmac = createHmac(invoice.hash)
|
||||
|
||||
return invoice
|
||||
}
|
@ -1,60 +0,0 @@
|
||||
import { PAID_ACTION_PAYMENT_METHODS } from '@/lib/constants'
|
||||
import { satsToMsats } from '@/lib/format'
|
||||
import { notifyInvite } from '@/lib/webPush'
|
||||
|
||||
export const anonable = false
|
||||
|
||||
export const paymentMethods = [
|
||||
PAID_ACTION_PAYMENT_METHODS.FEE_CREDIT,
|
||||
PAID_ACTION_PAYMENT_METHODS.REWARD_SATS
|
||||
]
|
||||
|
||||
export async function getCost ({ id }, { models, me }) {
|
||||
const invite = await models.invite.findUnique({ where: { id, userId: me.id, revoked: false } })
|
||||
if (!invite) {
|
||||
throw new Error('invite not found')
|
||||
}
|
||||
return satsToMsats(invite.gift)
|
||||
}
|
||||
|
||||
export async function perform ({ id, userId }, { me, cost, tx }) {
|
||||
const invite = await tx.invite.findUnique({
|
||||
where: { id, userId: me.id, revoked: false }
|
||||
})
|
||||
|
||||
if (invite.limit && invite.giftedCount >= invite.limit) {
|
||||
throw new Error('invite limit reached')
|
||||
}
|
||||
|
||||
// check that user was created in last hour
|
||||
// check that user did not already redeem an invite
|
||||
await tx.user.update({
|
||||
where: {
|
||||
id: userId,
|
||||
inviteId: null,
|
||||
createdAt: {
|
||||
gt: new Date(Date.now() - 1000 * 60 * 60)
|
||||
}
|
||||
},
|
||||
data: {
|
||||
mcredits: {
|
||||
increment: cost
|
||||
},
|
||||
inviteId: id,
|
||||
referrerId: me.id
|
||||
}
|
||||
})
|
||||
|
||||
return await tx.invite.update({
|
||||
where: { id, userId: me.id, revoked: false, ...(invite.limit ? { giftedCount: { lt: invite.limit } } : {}) },
|
||||
data: {
|
||||
giftedCount: {
|
||||
increment: 1
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
export async function nonCriticalSideEffects (_, { me }) {
|
||||
notifyInvite(me.id)
|
||||
}
|
@ -1,309 +0,0 @@
|
||||
import { ANON_ITEM_SPAM_INTERVAL, ITEM_SPAM_INTERVAL, PAID_ACTION_PAYMENT_METHODS, USER_ID } from '@/lib/constants'
|
||||
import { notifyItemMention, notifyItemParents, notifyMention, notifyTerritorySubscribers, notifyUserSubscribers, notifyThreadSubscribers } from '@/lib/webPush'
|
||||
import { getItemMentions, getMentions, performBotBehavior } from './lib/item'
|
||||
import { msatsToSats, satsToMsats } from '@/lib/format'
|
||||
import { GqlInputError } from '@/lib/error'
|
||||
|
||||
export const anonable = true
|
||||
|
||||
export const paymentMethods = [
|
||||
PAID_ACTION_PAYMENT_METHODS.FEE_CREDIT,
|
||||
PAID_ACTION_PAYMENT_METHODS.REWARD_SATS,
|
||||
PAID_ACTION_PAYMENT_METHODS.OPTIMISTIC,
|
||||
PAID_ACTION_PAYMENT_METHODS.PESSIMISTIC
|
||||
]
|
||||
|
||||
export const DEFAULT_ITEM_COST = 1000n
|
||||
|
||||
export async function getBaseCost ({ models, bio, parentId, subName }) {
|
||||
if (bio) return DEFAULT_ITEM_COST
|
||||
|
||||
if (parentId) {
|
||||
// the subname is stored in the root item of the thread
|
||||
const [sub] = await models.$queryRaw`
|
||||
SELECT s."replyCost"
|
||||
FROM "Item" i
|
||||
LEFT JOIN "Item" r ON r.id = i."rootId"
|
||||
LEFT JOIN "Sub" s ON s.name = COALESCE(r."subName", i."subName")
|
||||
WHERE i.id = ${Number(parentId)}`
|
||||
|
||||
if (sub?.replyCost) return satsToMsats(sub.replyCost)
|
||||
return DEFAULT_ITEM_COST
|
||||
}
|
||||
|
||||
const sub = await models.sub.findUnique({ where: { name: subName } })
|
||||
return satsToMsats(sub.baseCost)
|
||||
}
|
||||
|
||||
export async function getCost ({ subName, parentId, uploadIds, boost = 0, bio }, { models, me }) {
|
||||
const baseCost = await getBaseCost({ models, bio, parentId, subName })
|
||||
|
||||
// cost = baseCost * 10^num_items_in_10m * 100 (anon) or 1 (user) + upload fees + boost
|
||||
const [{ cost }] = await models.$queryRaw`
|
||||
SELECT ${baseCost}::INTEGER
|
||||
* POWER(10, item_spam(${parseInt(parentId)}::INTEGER, ${me?.id ?? USER_ID.anon}::INTEGER,
|
||||
${me?.id && !bio ? ITEM_SPAM_INTERVAL : ANON_ITEM_SPAM_INTERVAL}::INTERVAL))
|
||||
* ${me ? 1 : 100}::INTEGER
|
||||
+ (SELECT "nUnpaid" * "uploadFeesMsats"
|
||||
FROM upload_fees(${me?.id || USER_ID.anon}::INTEGER, ${uploadIds}::INTEGER[]))
|
||||
+ ${satsToMsats(boost)}::INTEGER as cost`
|
||||
|
||||
// sub allows freebies (or is a bio or a comment), cost is less than baseCost, not anon,
|
||||
// cost must be greater than user's balance, and user has not disabled freebies
|
||||
const freebie = (parentId || bio) && cost <= baseCost && !!me &&
|
||||
me?.msats < cost && !me?.disableFreebies && me?.mcredits < cost
|
||||
|
||||
return freebie ? BigInt(0) : BigInt(cost)
|
||||
}
|
||||
|
||||
export async function perform (args, context) {
|
||||
const { invoiceId, parentId, uploadIds = [], forwardUsers = [], options: pollOptions = [], boost = 0, ...data } = args
|
||||
const { tx, me, cost } = context
|
||||
const boostMsats = satsToMsats(boost)
|
||||
|
||||
const deletedUploads = []
|
||||
for (const uploadId of uploadIds) {
|
||||
if (!await tx.upload.findUnique({ where: { id: uploadId } })) {
|
||||
deletedUploads.push(uploadId)
|
||||
}
|
||||
}
|
||||
if (deletedUploads.length > 0) {
|
||||
throw new Error(`upload(s) ${deletedUploads.join(', ')} are expired, consider reuploading.`)
|
||||
}
|
||||
|
||||
let invoiceData = {}
|
||||
if (invoiceId) {
|
||||
invoiceData = { invoiceId, invoiceActionState: 'PENDING' }
|
||||
await tx.upload.updateMany({
|
||||
where: { id: { in: uploadIds } },
|
||||
data: invoiceData
|
||||
})
|
||||
}
|
||||
|
||||
const itemActs = []
|
||||
if (boostMsats > 0) {
|
||||
itemActs.push({
|
||||
msats: boostMsats, act: 'BOOST', userId: data.userId, ...invoiceData
|
||||
})
|
||||
}
|
||||
if (cost > 0) {
|
||||
itemActs.push({
|
||||
msats: cost - boostMsats, act: 'FEE', userId: data.userId, ...invoiceData
|
||||
})
|
||||
data.cost = msatsToSats(cost - boostMsats)
|
||||
}
|
||||
|
||||
const mentions = await getMentions(args, context)
|
||||
const itemMentions = await getItemMentions(args, context)
|
||||
|
||||
// start with median vote
|
||||
if (me) {
|
||||
const [row] = await tx.$queryRaw`SELECT
|
||||
COALESCE(percentile_cont(0.5) WITHIN GROUP(
|
||||
ORDER BY "weightedVotes" - "weightedDownVotes"), 0)
|
||||
AS median FROM "Item" WHERE "userId" = ${me.id}::INTEGER`
|
||||
if (row?.median < 0) {
|
||||
data.weightedDownVotes = -row.median
|
||||
}
|
||||
}
|
||||
|
||||
const itemData = {
|
||||
parentId: parentId ? parseInt(parentId) : null,
|
||||
...data,
|
||||
...invoiceData,
|
||||
boost,
|
||||
threadSubscriptions: {
|
||||
createMany: {
|
||||
data: [
|
||||
{ userId: data.userId },
|
||||
...forwardUsers.map(({ userId }) => ({ userId }))
|
||||
]
|
||||
}
|
||||
},
|
||||
itemForwards: {
|
||||
createMany: {
|
||||
data: forwardUsers
|
||||
}
|
||||
},
|
||||
pollOptions: {
|
||||
createMany: {
|
||||
data: pollOptions.map(option => ({ option }))
|
||||
}
|
||||
},
|
||||
itemUploads: {
|
||||
create: uploadIds.map(id => ({ uploadId: id }))
|
||||
},
|
||||
itemActs: {
|
||||
createMany: {
|
||||
data: itemActs
|
||||
}
|
||||
},
|
||||
mentions: {
|
||||
createMany: {
|
||||
data: mentions
|
||||
}
|
||||
},
|
||||
itemReferrers: {
|
||||
create: itemMentions
|
||||
}
|
||||
}
|
||||
|
||||
let item
|
||||
if (data.bio && me) {
|
||||
item = (await tx.user.update({
|
||||
where: { id: data.userId },
|
||||
include: { bio: true },
|
||||
data: {
|
||||
bio: {
|
||||
create: itemData
|
||||
}
|
||||
}
|
||||
})).bio
|
||||
} else {
|
||||
try {
|
||||
item = await tx.item.create({ data: itemData })
|
||||
} catch (err) {
|
||||
if (err.message.includes('violates exclusion constraint \\"Item_unique_time_constraint\\"')) {
|
||||
const message = `you already submitted this ${itemData.title ? 'post' : 'comment'}`
|
||||
throw new GqlInputError(message)
|
||||
}
|
||||
throw err
|
||||
}
|
||||
}
|
||||
|
||||
// store a reference to the item in the invoice
|
||||
if (invoiceId) {
|
||||
await tx.invoice.update({
|
||||
where: { id: invoiceId },
|
||||
data: { actionId: item.id }
|
||||
})
|
||||
}
|
||||
|
||||
await performBotBehavior(item, context)
|
||||
|
||||
// ltree is unsupported in Prisma, so we have to query it manually (FUCK!)
|
||||
return (await tx.$queryRaw`
|
||||
SELECT *, ltree2text(path) AS path, created_at AS "createdAt", updated_at AS "updatedAt"
|
||||
FROM "Item" WHERE id = ${item.id}::INTEGER`
|
||||
)[0]
|
||||
}
|
||||
|
||||
export async function retry ({ invoiceId, newInvoiceId }, { tx }) {
|
||||
await tx.itemAct.updateMany({ where: { invoiceId }, data: { invoiceId: newInvoiceId, invoiceActionState: 'PENDING' } })
|
||||
await tx.item.updateMany({ where: { invoiceId }, data: { invoiceId: newInvoiceId, invoiceActionState: 'PENDING' } })
|
||||
await tx.upload.updateMany({ where: { invoiceId }, data: { invoiceId: newInvoiceId, invoiceActionState: 'PENDING' } })
|
||||
return (await tx.$queryRaw`
|
||||
SELECT *, ltree2text(path) AS path, created_at AS "createdAt", updated_at AS "updatedAt"
|
||||
FROM "Item" WHERE "invoiceId" = ${newInvoiceId}::INTEGER`
|
||||
)[0]
|
||||
}
|
||||
|
||||
export async function onPaid ({ invoice, id }, context) {
|
||||
const { tx } = context
|
||||
let item
|
||||
|
||||
if (invoice) {
|
||||
item = await tx.item.findFirst({
|
||||
where: { invoiceId: invoice.id },
|
||||
include: {
|
||||
user: true
|
||||
}
|
||||
})
|
||||
await tx.itemAct.updateMany({ where: { invoiceId: invoice.id }, data: { invoiceActionState: 'PAID' } })
|
||||
await tx.item.updateMany({ where: { invoiceId: invoice.id }, data: { invoiceActionState: 'PAID', invoicePaidAt: new Date() } })
|
||||
await tx.upload.updateMany({ where: { invoiceId: invoice.id }, data: { invoiceActionState: 'PAID', paid: true } })
|
||||
} else if (id) {
|
||||
item = await tx.item.findUnique({
|
||||
where: { id },
|
||||
include: {
|
||||
user: true,
|
||||
itemUploads: { include: { upload: true } }
|
||||
}
|
||||
})
|
||||
await tx.upload.updateMany({
|
||||
where: { id: { in: item.itemUploads.map(({ uploadId }) => uploadId) } },
|
||||
data: {
|
||||
paid: true
|
||||
}
|
||||
})
|
||||
} else {
|
||||
throw new Error('No item found')
|
||||
}
|
||||
|
||||
await tx.$executeRaw`INSERT INTO pgboss.job (name, data, startafter, priority)
|
||||
VALUES ('timestampItem', jsonb_build_object('id', ${item.id}::INTEGER), now() + interval '10 minutes', -2)`
|
||||
await tx.$executeRaw`
|
||||
INSERT INTO pgboss.job (name, data, retrylimit, retrybackoff, startafter)
|
||||
VALUES ('imgproxy', jsonb_build_object('id', ${item.id}::INTEGER), 21, true, now() + interval '5 seconds')`
|
||||
|
||||
if (item.boost > 0) {
|
||||
await tx.$executeRaw`
|
||||
INSERT INTO pgboss.job (name, data, retrylimit, retrybackoff, startafter, keepuntil)
|
||||
VALUES ('expireBoost', jsonb_build_object('id', ${item.id}::INTEGER), 21, true,
|
||||
now() + interval '30 days', now() + interval '40 days')`
|
||||
}
|
||||
|
||||
if (item.parentId) {
|
||||
// denormalize ncomments, lastCommentAt, and "weightedComments" for ancestors, and insert into reply table
|
||||
await tx.$executeRaw`
|
||||
WITH comment AS (
|
||||
SELECT "Item".*, users.trust
|
||||
FROM "Item"
|
||||
JOIN users ON "Item"."userId" = users.id
|
||||
WHERE "Item".id = ${item.id}::INTEGER
|
||||
), ancestors AS (
|
||||
SELECT "Item".*
|
||||
FROM "Item", comment
|
||||
WHERE "Item".path @> comment.path AND "Item".id <> comment.id
|
||||
ORDER BY "Item".id
|
||||
), updated_ancestors AS (
|
||||
UPDATE "Item"
|
||||
SET ncomments = "Item".ncomments + 1,
|
||||
"lastCommentAt" = GREATEST("Item"."lastCommentAt", comment.created_at),
|
||||
"nDirectComments" = "Item"."nDirectComments" +
|
||||
CASE WHEN comment."parentId" = "Item".id THEN 1 ELSE 0 END
|
||||
FROM comment, ancestors
|
||||
WHERE "Item".id = ancestors.id
|
||||
RETURNING "Item".*
|
||||
)
|
||||
INSERT INTO "Reply" (created_at, updated_at, "ancestorId", "ancestorUserId", "itemId", "userId", level)
|
||||
SELECT comment.created_at, comment.updated_at, ancestors.id, ancestors."userId",
|
||||
comment.id, comment."userId", nlevel(comment.path) - nlevel(ancestors.path)
|
||||
FROM ancestors, comment`
|
||||
}
|
||||
}
|
||||
|
||||
export async function nonCriticalSideEffects ({ invoice, id }, { models }) {
|
||||
const item = await models.item.findFirst({
|
||||
where: invoice ? { invoiceId: invoice.id } : { id: parseInt(id) },
|
||||
include: {
|
||||
mentions: true,
|
||||
itemReferrers: { include: { refereeItem: true } },
|
||||
user: true
|
||||
}
|
||||
})
|
||||
|
||||
if (item.parentId) {
|
||||
notifyItemParents({ item, models }).catch(console.error)
|
||||
notifyThreadSubscribers({ models, item }).catch(console.error)
|
||||
}
|
||||
for (const { userId } of item.mentions) {
|
||||
notifyMention({ models, item, userId }).catch(console.error)
|
||||
}
|
||||
for (const { refereeItem } of item.itemReferrers) {
|
||||
notifyItemMention({ models, referrerItem: item, refereeItem }).catch(console.error)
|
||||
}
|
||||
|
||||
notifyUserSubscribers({ models, item }).catch(console.error)
|
||||
notifyTerritorySubscribers({ models, item }).catch(console.error)
|
||||
}
|
||||
|
||||
export async function onFail ({ invoice }, { tx }) {
|
||||
await tx.itemAct.updateMany({ where: { invoiceId: invoice.id }, data: { invoiceActionState: 'FAILED' } })
|
||||
await tx.item.updateMany({ where: { invoiceId: invoice.id }, data: { invoiceActionState: 'FAILED' } })
|
||||
await tx.upload.updateMany({ where: { invoiceId: invoice.id }, data: { invoiceActionState: 'FAILED' } })
|
||||
}
|
||||
|
||||
export async function describe ({ parentId }, context) {
|
||||
return `SN: create ${parentId ? `reply to #${parentId}` : 'item'}`
|
||||
}
|
@ -1,182 +0,0 @@
|
||||
import { PAID_ACTION_PAYMENT_METHODS, USER_ID } from '@/lib/constants'
|
||||
import { uploadFees } from '../resolvers/upload'
|
||||
import { getItemMentions, getMentions, performBotBehavior } from './lib/item'
|
||||
import { notifyItemMention, notifyMention } from '@/lib/webPush'
|
||||
import { satsToMsats } from '@/lib/format'
|
||||
|
||||
export const anonable = true
|
||||
|
||||
export const paymentMethods = [
|
||||
PAID_ACTION_PAYMENT_METHODS.FEE_CREDIT,
|
||||
PAID_ACTION_PAYMENT_METHODS.REWARD_SATS,
|
||||
PAID_ACTION_PAYMENT_METHODS.PESSIMISTIC
|
||||
]
|
||||
|
||||
export async function getCost ({ id, boost = 0, uploadIds, bio }, { me, models }) {
|
||||
// the only reason updating items costs anything is when it has new uploads
|
||||
// or more boost
|
||||
const old = await models.item.findUnique({ where: { id: parseInt(id) } })
|
||||
const { totalFeesMsats } = await uploadFees(uploadIds, { models, me })
|
||||
const cost = BigInt(totalFeesMsats) + satsToMsats(boost - old.boost)
|
||||
|
||||
if (cost > 0 && old.invoiceActionState && old.invoiceActionState !== 'PAID') {
|
||||
throw new Error('creation invoice not paid')
|
||||
}
|
||||
|
||||
return cost
|
||||
}
|
||||
|
||||
export async function perform (args, context) {
|
||||
const { id, boost = 0, uploadIds = [], options: pollOptions = [], forwardUsers: itemForwards = [], ...data } = args
|
||||
const { tx, me } = context
|
||||
const old = await tx.item.findUnique({
|
||||
where: { id: parseInt(id) },
|
||||
include: {
|
||||
threadSubscriptions: true,
|
||||
mentions: true,
|
||||
itemForwards: true,
|
||||
itemReferrers: true,
|
||||
itemUploads: true
|
||||
}
|
||||
})
|
||||
|
||||
const newBoost = boost - old.boost
|
||||
const itemActs = []
|
||||
if (newBoost > 0) {
|
||||
const boostMsats = satsToMsats(newBoost)
|
||||
itemActs.push({
|
||||
msats: boostMsats, act: 'BOOST', userId: me?.id || USER_ID.anon
|
||||
})
|
||||
}
|
||||
|
||||
// createMany is the set difference of the new - old
|
||||
// deleteMany is the set difference of the old - new
|
||||
// updateMany is the intersection of the old and new
|
||||
const difference = (a = [], b = [], key = 'userId') => a.filter(x => !b.find(y => y[key] === x[key]))
|
||||
const intersectionMerge = (a = [], b = [], key) => a.filter(x => b.find(y => y.userId === x.userId))
|
||||
.map(x => ({ [key]: x[key], ...b.find(y => y.userId === x.userId) }))
|
||||
|
||||
const mentions = await getMentions(args, context)
|
||||
const itemMentions = await getItemMentions(args, context)
|
||||
const itemUploads = uploadIds.map(id => ({ uploadId: id }))
|
||||
|
||||
await tx.upload.updateMany({
|
||||
where: { id: { in: uploadIds } },
|
||||
data: { paid: true }
|
||||
})
|
||||
|
||||
// we put boost in the where clause because we don't want to update the boost
|
||||
// if it has changed concurrently
|
||||
await tx.item.update({
|
||||
where: { id: parseInt(id), boost: old.boost },
|
||||
data: {
|
||||
...data,
|
||||
boost: {
|
||||
increment: newBoost
|
||||
},
|
||||
pollOptions: {
|
||||
createMany: {
|
||||
data: pollOptions?.map(option => ({ option }))
|
||||
}
|
||||
},
|
||||
itemUploads: {
|
||||
create: difference(itemUploads, old.itemUploads, 'uploadId').map(({ uploadId }) => ({ uploadId })),
|
||||
deleteMany: {
|
||||
uploadId: {
|
||||
in: difference(old.itemUploads, itemUploads, 'uploadId').map(({ uploadId }) => uploadId)
|
||||
}
|
||||
}
|
||||
},
|
||||
itemActs: {
|
||||
createMany: {
|
||||
data: itemActs
|
||||
}
|
||||
},
|
||||
itemForwards: {
|
||||
deleteMany: {
|
||||
userId: {
|
||||
in: difference(old.itemForwards, itemForwards).map(({ userId }) => userId)
|
||||
}
|
||||
},
|
||||
createMany: {
|
||||
data: difference(itemForwards, old.itemForwards)
|
||||
},
|
||||
update: intersectionMerge(old.itemForwards, itemForwards, 'id').map(({ id, ...data }) => ({
|
||||
where: { id },
|
||||
data
|
||||
}))
|
||||
},
|
||||
threadSubscriptions: {
|
||||
deleteMany: {
|
||||
userId: {
|
||||
in: difference(old.itemForwards, itemForwards).map(({ userId }) => userId)
|
||||
}
|
||||
},
|
||||
createMany: {
|
||||
data: difference(itemForwards, old.itemForwards).map(({ userId }) => ({ userId }))
|
||||
}
|
||||
},
|
||||
mentions: {
|
||||
deleteMany: {
|
||||
userId: {
|
||||
in: difference(old.mentions, mentions).map(({ userId }) => userId)
|
||||
}
|
||||
},
|
||||
createMany: {
|
||||
data: difference(mentions, old.mentions)
|
||||
}
|
||||
},
|
||||
itemReferrers: {
|
||||
deleteMany: {
|
||||
refereeId: {
|
||||
in: difference(old.itemReferrers, itemMentions, 'refereeId').map(({ refereeId }) => refereeId)
|
||||
}
|
||||
},
|
||||
create: difference(itemMentions, old.itemReferrers, 'refereeId')
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
await tx.$executeRaw`
|
||||
INSERT INTO pgboss.job (name, data, retrylimit, retrybackoff, startafter, keepuntil)
|
||||
VALUES ('imgproxy', jsonb_build_object('id', ${id}::INTEGER), 21, true,
|
||||
now() + interval '5 seconds', now() + interval '1 day')`
|
||||
|
||||
if (newBoost > 0) {
|
||||
await tx.$executeRaw`
|
||||
INSERT INTO pgboss.job (name, data, retrylimit, retrybackoff, startafter, keepuntil)
|
||||
VALUES ('expireBoost', jsonb_build_object('id', ${id}::INTEGER), 21, true,
|
||||
now() + interval '30 days', now() + interval '40 days')`
|
||||
}
|
||||
|
||||
await performBotBehavior(args, context)
|
||||
|
||||
// ltree is unsupported in Prisma, so we have to query it manually (FUCK!)
|
||||
return (await tx.$queryRaw`
|
||||
SELECT *, ltree2text(path) AS path, created_at AS "createdAt", updated_at AS "updatedAt"
|
||||
FROM "Item" WHERE id = ${parseInt(id)}::INTEGER`
|
||||
)[0]
|
||||
}
|
||||
|
||||
export async function nonCriticalSideEffects ({ invoice, id }, { models }) {
|
||||
const item = await models.item.findFirst({
|
||||
where: invoice ? { invoiceId: invoice.id } : { id: parseInt(id) },
|
||||
include: {
|
||||
mentions: true,
|
||||
itemReferrers: { include: { refereeItem: true } }
|
||||
}
|
||||
})
|
||||
// compare timestamps to only notify if mention or item referral was just created to avoid duplicates on edits
|
||||
for (const { userId, createdAt } of item.mentions) {
|
||||
if (item.updatedAt.getTime() !== createdAt.getTime()) continue
|
||||
notifyMention({ models, item, userId }).catch(console.error)
|
||||
}
|
||||
for (const { refereeItem, createdAt } of item.itemReferrers) {
|
||||
if (item.updatedAt.getTime() !== createdAt.getTime()) continue
|
||||
notifyItemMention({ models, referrerItem: item, refereeItem }).catch(console.error)
|
||||
}
|
||||
}
|
||||
|
||||
export async function describe ({ id, parentId }, context) {
|
||||
return `SN: update ${parentId ? `reply to #${parentId}` : 'post'}`
|
||||
}
|
@ -1,56 +0,0 @@
|
||||
import { PAID_ACTION_TERMINAL_STATES, USER_ID } from '@/lib/constants'
|
||||
import { datePivot } from '@/lib/time'
|
||||
|
||||
const MAX_PENDING_PAID_ACTIONS_PER_USER = 100
|
||||
const MAX_PENDING_DIRECT_INVOICES_PER_USER_MINUTES = 10
|
||||
const MAX_PENDING_DIRECT_INVOICES_PER_USER = 100
|
||||
|
||||
export async function assertBelowMaxPendingInvoices (context) {
|
||||
const { models, me } = context
|
||||
const pendingInvoices = await models.invoice.count({
|
||||
where: {
|
||||
userId: me?.id ?? USER_ID.anon,
|
||||
actionState: {
|
||||
notIn: PAID_ACTION_TERMINAL_STATES
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
if (pendingInvoices >= MAX_PENDING_PAID_ACTIONS_PER_USER) {
|
||||
throw new Error('You have too many pending paid actions, cancel some or wait for them to expire')
|
||||
}
|
||||
}
|
||||
|
||||
export async function assertBelowMaxPendingDirectPayments (userId, context) {
|
||||
const { models, me } = context
|
||||
|
||||
if (me?.id !== userId) {
|
||||
const pendingSenderInvoices = await models.directPayment.count({
|
||||
where: {
|
||||
senderId: me?.id ?? USER_ID.anon,
|
||||
createdAt: {
|
||||
gt: datePivot(new Date(), { minutes: -MAX_PENDING_DIRECT_INVOICES_PER_USER_MINUTES })
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
if (pendingSenderInvoices >= MAX_PENDING_DIRECT_INVOICES_PER_USER) {
|
||||
throw new Error('You\'ve sent too many direct payments')
|
||||
}
|
||||
}
|
||||
|
||||
if (!userId) return
|
||||
|
||||
const pendingReceiverInvoices = await models.directPayment.count({
|
||||
where: {
|
||||
receiverId: userId,
|
||||
createdAt: {
|
||||
gt: datePivot(new Date(), { minutes: -MAX_PENDING_DIRECT_INVOICES_PER_USER_MINUTES })
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
if (pendingReceiverInvoices >= MAX_PENDING_DIRECT_INVOICES_PER_USER) {
|
||||
throw new Error('Receiver has too many direct payments')
|
||||
}
|
||||
}
|
@ -1,89 +0,0 @@
|
||||
import { USER_ID } from '@/lib/constants'
|
||||
import { deleteReminders, getDeleteAt, getRemindAt } from '@/lib/item'
|
||||
import { parseInternalLinks } from '@/lib/url'
|
||||
|
||||
export async function getMentions ({ text }, { me, tx }) {
|
||||
const mentionPattern = /\B@[\w_]+/gi
|
||||
const names = text.match(mentionPattern)?.map(m => m.slice(1))
|
||||
if (names?.length > 0) {
|
||||
const users = await tx.user.findMany({
|
||||
where: {
|
||||
name: {
|
||||
in: names
|
||||
},
|
||||
id: {
|
||||
not: me?.id || USER_ID.anon
|
||||
}
|
||||
}
|
||||
})
|
||||
return users.map(user => ({ userId: user.id }))
|
||||
}
|
||||
return []
|
||||
}
|
||||
|
||||
export const getItemMentions = async ({ text }, { me, tx }) => {
|
||||
const linkPattern = new RegExp(`${process.env.NEXT_PUBLIC_URL}/items/\\d+[a-zA-Z0-9/?=]*`, 'gi')
|
||||
const refs = text.match(linkPattern)?.map(m => {
|
||||
try {
|
||||
const { itemId, commentId } = parseInternalLinks(m)
|
||||
return Number(commentId || itemId)
|
||||
} catch (err) {
|
||||
return null
|
||||
}
|
||||
}).filter(r => !!r)
|
||||
|
||||
if (refs?.length > 0) {
|
||||
const referee = await tx.item.findMany({
|
||||
where: {
|
||||
id: { in: refs },
|
||||
userId: { not: me?.id || USER_ID.anon }
|
||||
}
|
||||
})
|
||||
return referee.map(r => ({ refereeId: r.id }))
|
||||
}
|
||||
|
||||
return []
|
||||
}
|
||||
|
||||
export async function performBotBehavior ({ text, id }, { me, tx }) {
|
||||
// delete any existing deleteItem or reminder jobs for this item
|
||||
const userId = me?.id || USER_ID.anon
|
||||
id = Number(id)
|
||||
await tx.$queryRaw`
|
||||
DELETE FROM pgboss.job
|
||||
WHERE name = 'deleteItem'
|
||||
AND data->>'id' = ${id}::TEXT
|
||||
AND state <> 'completed'`
|
||||
await deleteReminders({ id, userId, models: tx })
|
||||
|
||||
if (text) {
|
||||
const deleteAt = getDeleteAt(text)
|
||||
if (deleteAt) {
|
||||
await tx.$queryRaw`
|
||||
INSERT INTO pgboss.job (name, data, startafter, keepuntil)
|
||||
VALUES (
|
||||
'deleteItem',
|
||||
jsonb_build_object('id', ${id}::INTEGER),
|
||||
${deleteAt}::TIMESTAMP WITH TIME ZONE,
|
||||
${deleteAt}::TIMESTAMP WITH TIME ZONE + interval '1 minute')`
|
||||
}
|
||||
|
||||
const remindAt = getRemindAt(text)
|
||||
if (remindAt) {
|
||||
await tx.$queryRaw`
|
||||
INSERT INTO pgboss.job (name, data, startafter, keepuntil)
|
||||
VALUES (
|
||||
'reminder',
|
||||
jsonb_build_object('itemId', ${id}::INTEGER, 'userId', ${userId}::INTEGER),
|
||||
${remindAt}::TIMESTAMP WITH TIME ZONE,
|
||||
${remindAt}::TIMESTAMP WITH TIME ZONE + interval '1 minute')`
|
||||
await tx.reminder.create({
|
||||
data: {
|
||||
userId,
|
||||
itemId: Number(id),
|
||||
remindAt
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
@ -1,27 +0,0 @@
|
||||
import { USER_ID } from '@/lib/constants'
|
||||
|
||||
export const GLOBAL_SEEDS = [USER_ID.k00b, USER_ID.ek]
|
||||
|
||||
export function initialTrust ({ name, userId }) {
|
||||
const results = GLOBAL_SEEDS.map(id => ({
|
||||
subName: name,
|
||||
userId: id,
|
||||
zapPostTrust: 1,
|
||||
subZapPostTrust: 1,
|
||||
zapCommentTrust: 1,
|
||||
subZapCommentTrust: 1
|
||||
}))
|
||||
|
||||
if (!GLOBAL_SEEDS.includes(userId)) {
|
||||
results.push({
|
||||
subName: name,
|
||||
userId,
|
||||
zapPostTrust: 0,
|
||||
subZapPostTrust: 1,
|
||||
zapCommentTrust: 0,
|
||||
subZapCommentTrust: 1
|
||||
})
|
||||
}
|
||||
|
||||
return results
|
||||
}
|
@ -1,70 +0,0 @@
|
||||
import { PAID_ACTION_PAYMENT_METHODS } from '@/lib/constants'
|
||||
import { satsToMsats } from '@/lib/format'
|
||||
|
||||
export const anonable = false
|
||||
|
||||
export const paymentMethods = [
|
||||
PAID_ACTION_PAYMENT_METHODS.FEE_CREDIT,
|
||||
PAID_ACTION_PAYMENT_METHODS.REWARD_SATS,
|
||||
PAID_ACTION_PAYMENT_METHODS.OPTIMISTIC
|
||||
]
|
||||
|
||||
export async function getCost ({ id }, { me, models }) {
|
||||
const pollOption = await models.pollOption.findUnique({
|
||||
where: { id: parseInt(id) },
|
||||
include: { item: true }
|
||||
})
|
||||
return satsToMsats(pollOption.item.pollCost)
|
||||
}
|
||||
|
||||
export async function perform ({ invoiceId, id }, { me, cost, tx }) {
|
||||
const pollOption = await tx.pollOption.findUnique({
|
||||
where: { id: parseInt(id) }
|
||||
})
|
||||
const itemId = parseInt(pollOption.itemId)
|
||||
|
||||
let invoiceData = {}
|
||||
if (invoiceId) {
|
||||
invoiceData = { invoiceId, invoiceActionState: 'PENDING' }
|
||||
// store a reference to the item in the invoice
|
||||
await tx.invoice.update({
|
||||
where: { id: invoiceId },
|
||||
data: { actionId: itemId }
|
||||
})
|
||||
}
|
||||
|
||||
// the unique index on userId, itemId will prevent double voting
|
||||
await tx.itemAct.create({ data: { msats: cost, itemId, userId: me.id, act: 'POLL', ...invoiceData } })
|
||||
await tx.pollBlindVote.create({ data: { userId: me.id, itemId, ...invoiceData } })
|
||||
await tx.pollVote.create({ data: { pollOptionId: pollOption.id, itemId, ...invoiceData } })
|
||||
|
||||
return { id }
|
||||
}
|
||||
|
||||
export async function retry ({ invoiceId, newInvoiceId }, { tx }) {
|
||||
await tx.itemAct.updateMany({ where: { invoiceId }, data: { invoiceId: newInvoiceId, invoiceActionState: 'PENDING' } })
|
||||
await tx.pollBlindVote.updateMany({ where: { invoiceId }, data: { invoiceId: newInvoiceId, invoiceActionState: 'PENDING' } })
|
||||
await tx.pollVote.updateMany({ where: { invoiceId }, data: { invoiceId: newInvoiceId, invoiceActionState: 'PENDING' } })
|
||||
|
||||
const { pollOptionId } = await tx.pollVote.findFirst({ where: { invoiceId: newInvoiceId } })
|
||||
return { id: pollOptionId }
|
||||
}
|
||||
|
||||
export async function onPaid ({ invoice }, { tx }) {
|
||||
if (!invoice) return
|
||||
|
||||
await tx.itemAct.updateMany({ where: { invoiceId: invoice.id }, data: { invoiceActionState: 'PAID' } })
|
||||
await tx.pollBlindVote.updateMany({ where: { invoiceId: invoice.id }, data: { invoiceActionState: 'PAID' } })
|
||||
// anonymize the vote
|
||||
await tx.pollVote.updateMany({ where: { invoiceId: invoice.id }, data: { invoiceId: null, invoiceActionState: null } })
|
||||
}
|
||||
|
||||
export async function onFail ({ invoice }, { tx }) {
|
||||
await tx.itemAct.updateMany({ where: { invoiceId: invoice.id }, data: { invoiceActionState: 'FAILED' } })
|
||||
await tx.pollBlindVote.updateMany({ where: { invoiceId: invoice.id }, data: { invoiceActionState: 'FAILED' } })
|
||||
await tx.pollVote.updateMany({ where: { invoiceId: invoice.id }, data: { invoiceActionState: 'FAILED' } })
|
||||
}
|
||||
|
||||
export async function describe ({ id }, { actionId }) {
|
||||
return `SN: vote on poll #${id ?? actionId}`
|
||||
}
|
@ -1,84 +0,0 @@
|
||||
import { PAID_ACTION_PAYMENT_METHODS } from '@/lib/constants'
|
||||
import { toPositiveBigInt, numWithUnits, msatsToSats, satsToMsats } from '@/lib/format'
|
||||
import { notifyDeposit } from '@/lib/webPush'
|
||||
import { getInvoiceableWallets } from '@/wallets/server'
|
||||
|
||||
export const anonable = false
|
||||
|
||||
export const paymentMethods = [
|
||||
PAID_ACTION_PAYMENT_METHODS.P2P,
|
||||
PAID_ACTION_PAYMENT_METHODS.DIRECT
|
||||
]
|
||||
|
||||
export async function getCost ({ msats }) {
|
||||
return toPositiveBigInt(msats)
|
||||
}
|
||||
|
||||
export async function getInvoiceablePeer (_, { me, models, cost, paymentMethod }) {
|
||||
if (paymentMethod === PAID_ACTION_PAYMENT_METHODS.P2P && !me?.proxyReceive) return null
|
||||
if (paymentMethod === PAID_ACTION_PAYMENT_METHODS.DIRECT && !me?.directReceive) return null
|
||||
|
||||
const wallets = await getInvoiceableWallets(me.id, { models })
|
||||
if (wallets.length === 0) {
|
||||
return null
|
||||
}
|
||||
|
||||
if (cost < satsToMsats(me.receiveCreditsBelowSats)) {
|
||||
return null
|
||||
}
|
||||
|
||||
return me.id
|
||||
}
|
||||
|
||||
export async function getSybilFeePercent () {
|
||||
return 10n
|
||||
}
|
||||
|
||||
export async function perform ({
|
||||
invoiceId,
|
||||
comment,
|
||||
lud18Data,
|
||||
noteStr
|
||||
}, { me, tx }) {
|
||||
return await tx.invoice.update({
|
||||
where: { id: invoiceId },
|
||||
data: {
|
||||
comment,
|
||||
lud18Data,
|
||||
...(noteStr ? { desc: noteStr } : {})
|
||||
},
|
||||
include: { invoiceForward: true }
|
||||
})
|
||||
}
|
||||
|
||||
export async function describe ({ description }, { me, cost, paymentMethod, sybilFeePercent }) {
|
||||
const fee = paymentMethod === PAID_ACTION_PAYMENT_METHODS.P2P
|
||||
? cost * BigInt(sybilFeePercent) / 100n
|
||||
: 0n
|
||||
return description ?? `SN: ${me?.name ?? ''} receives ${numWithUnits(msatsToSats(cost - fee))}`
|
||||
}
|
||||
|
||||
export async function onPaid ({ invoice }, { tx }) {
|
||||
if (!invoice) {
|
||||
throw new Error('invoice is required')
|
||||
}
|
||||
|
||||
// P2P lnurlp does not need to update the user's balance
|
||||
if (invoice?.invoiceForward) return
|
||||
|
||||
await tx.user.update({
|
||||
where: { id: invoice.userId },
|
||||
data: {
|
||||
mcredits: {
|
||||
increment: invoice.msatsReceived
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
export async function nonCriticalSideEffects ({ invoice }, { models }) {
|
||||
await notifyDeposit(invoice.userId, invoice)
|
||||
await models.$executeRaw`
|
||||
INSERT INTO pgboss.job (name, data)
|
||||
VALUES ('nip57', jsonb_build_object('hash', ${invoice.hash}))`
|
||||
}
|
@ -1,73 +0,0 @@
|
||||
import { PAID_ACTION_PAYMENT_METHODS, TERRITORY_PERIOD_COST } from '@/lib/constants'
|
||||
import { satsToMsats } from '@/lib/format'
|
||||
import { nextBilling } from '@/lib/territory'
|
||||
|
||||
export const anonable = false
|
||||
|
||||
export const paymentMethods = [
|
||||
PAID_ACTION_PAYMENT_METHODS.FEE_CREDIT,
|
||||
PAID_ACTION_PAYMENT_METHODS.REWARD_SATS,
|
||||
PAID_ACTION_PAYMENT_METHODS.PESSIMISTIC
|
||||
]
|
||||
|
||||
export async function getCost ({ name }, { models }) {
|
||||
const sub = await models.sub.findUnique({
|
||||
where: {
|
||||
name
|
||||
}
|
||||
})
|
||||
|
||||
return satsToMsats(TERRITORY_PERIOD_COST(sub.billingType))
|
||||
}
|
||||
|
||||
export async function perform ({ name }, { cost, tx }) {
|
||||
const sub = await tx.sub.findUnique({
|
||||
where: {
|
||||
name
|
||||
}
|
||||
})
|
||||
|
||||
if (sub.billingType === 'ONCE') {
|
||||
throw new Error('Cannot bill a ONCE territory')
|
||||
}
|
||||
|
||||
let billedLastAt = sub.billPaidUntil
|
||||
let billingCost = sub.billingCost
|
||||
|
||||
// if the sub is archived, they are paying to reactivate it
|
||||
if (sub.status === 'STOPPED') {
|
||||
// get non-grandfathered cost and reset their billing to start now
|
||||
billedLastAt = new Date()
|
||||
billingCost = TERRITORY_PERIOD_COST(sub.billingType)
|
||||
}
|
||||
|
||||
const billPaidUntil = nextBilling(billedLastAt, sub.billingType)
|
||||
|
||||
return await tx.sub.update({
|
||||
// optimistic concurrency control
|
||||
// make sure the sub hasn't changed since we fetched it
|
||||
where: {
|
||||
...sub,
|
||||
postTypes: {
|
||||
equals: sub.postTypes
|
||||
}
|
||||
},
|
||||
data: {
|
||||
billedLastAt,
|
||||
billPaidUntil,
|
||||
billingCost,
|
||||
status: 'ACTIVE',
|
||||
SubAct: {
|
||||
create: {
|
||||
msats: cost,
|
||||
type: 'BILLING',
|
||||
userId: sub.userId
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
export async function describe ({ name }) {
|
||||
return `SN: billing for territory ${name}`
|
||||
}
|
@ -1,56 +0,0 @@
|
||||
import { PAID_ACTION_PAYMENT_METHODS, TERRITORY_PERIOD_COST } from '@/lib/constants'
|
||||
import { satsToMsats } from '@/lib/format'
|
||||
import { nextBilling } from '@/lib/territory'
|
||||
import { initialTrust } from './lib/territory'
|
||||
|
||||
export const anonable = false
|
||||
|
||||
export const paymentMethods = [
|
||||
PAID_ACTION_PAYMENT_METHODS.FEE_CREDIT,
|
||||
PAID_ACTION_PAYMENT_METHODS.REWARD_SATS,
|
||||
PAID_ACTION_PAYMENT_METHODS.PESSIMISTIC
|
||||
]
|
||||
|
||||
export async function getCost ({ billingType }) {
|
||||
return satsToMsats(TERRITORY_PERIOD_COST(billingType))
|
||||
}
|
||||
|
||||
export async function perform ({ invoiceId, ...data }, { me, cost, tx }) {
|
||||
const { billingType } = data
|
||||
const billingCost = TERRITORY_PERIOD_COST(billingType)
|
||||
const billedLastAt = new Date()
|
||||
const billPaidUntil = nextBilling(billedLastAt, billingType)
|
||||
|
||||
const sub = await tx.sub.create({
|
||||
data: {
|
||||
...data,
|
||||
billedLastAt,
|
||||
billPaidUntil,
|
||||
billingCost,
|
||||
rankingType: 'WOT',
|
||||
userId: me.id,
|
||||
SubAct: {
|
||||
create: {
|
||||
msats: cost,
|
||||
type: 'BILLING',
|
||||
userId: me.id
|
||||
}
|
||||
},
|
||||
SubSubscription: {
|
||||
create: {
|
||||
userId: me.id
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
await tx.userSubTrust.createMany({
|
||||
data: initialTrust({ name: sub.name, userId: sub.userId })
|
||||
})
|
||||
|
||||
return sub
|
||||
}
|
||||
|
||||
export async function describe ({ name }) {
|
||||
return `SN: create territory ${name}`
|
||||
}
|
@ -1,90 +0,0 @@
|
||||
import { PAID_ACTION_PAYMENT_METHODS, TERRITORY_PERIOD_COST } from '@/lib/constants'
|
||||
import { satsToMsats } from '@/lib/format'
|
||||
import { nextBilling } from '@/lib/territory'
|
||||
import { initialTrust } from './lib/territory'
|
||||
|
||||
export const anonable = false
|
||||
|
||||
export const paymentMethods = [
|
||||
PAID_ACTION_PAYMENT_METHODS.FEE_CREDIT,
|
||||
PAID_ACTION_PAYMENT_METHODS.REWARD_SATS,
|
||||
PAID_ACTION_PAYMENT_METHODS.PESSIMISTIC
|
||||
]
|
||||
|
||||
export async function getCost ({ billingType }) {
|
||||
return satsToMsats(TERRITORY_PERIOD_COST(billingType))
|
||||
}
|
||||
|
||||
export async function perform ({ name, invoiceId, ...data }, { me, cost, tx }) {
|
||||
const sub = await tx.sub.findUnique({
|
||||
where: {
|
||||
name
|
||||
}
|
||||
})
|
||||
|
||||
data.billingCost = TERRITORY_PERIOD_COST(data.billingType)
|
||||
|
||||
// we never want to bill them again if they are changing to ONCE
|
||||
if (data.billingType === 'ONCE') {
|
||||
data.billPaidUntil = null
|
||||
data.billingAutoRenew = false
|
||||
}
|
||||
|
||||
data.billedLastAt = new Date()
|
||||
data.billPaidUntil = nextBilling(data.billedLastAt, data.billingType)
|
||||
data.status = 'ACTIVE'
|
||||
data.userId = me.id
|
||||
|
||||
if (sub.userId !== me.id) {
|
||||
await tx.territoryTransfer.create({ data: { subName: name, oldUserId: sub.userId, newUserId: me.id } })
|
||||
await tx.subSubscription.delete({ where: { userId_subName: { userId: sub.userId, subName: name } } })
|
||||
}
|
||||
|
||||
await tx.subAct.create({
|
||||
data: {
|
||||
userId: me.id,
|
||||
subName: name,
|
||||
msats: cost,
|
||||
type: 'BILLING'
|
||||
}
|
||||
})
|
||||
|
||||
await tx.subSubscription.upsert({
|
||||
where: {
|
||||
userId_subName: {
|
||||
userId: me.id,
|
||||
subName: name
|
||||
}
|
||||
},
|
||||
update: {
|
||||
userId: me.id,
|
||||
subName: name
|
||||
},
|
||||
create: {
|
||||
userId: me.id,
|
||||
subName: name
|
||||
}
|
||||
})
|
||||
|
||||
const updatedSub = await tx.sub.update({
|
||||
data,
|
||||
// optimistic concurrency control
|
||||
// make sure none of the relevant fields have changed since we fetched the sub
|
||||
where: {
|
||||
...sub,
|
||||
postTypes: {
|
||||
equals: sub.postTypes
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
await tx.userSubTrust.createMany({
|
||||
data: initialTrust({ name: updatedSub.name, userId: updatedSub.userId })
|
||||
})
|
||||
|
||||
return updatedSub
|
||||
}
|
||||
|
||||
export async function describe ({ name }, context) {
|
||||
return `SN: unarchive territory ${name}`
|
||||
}
|
@ -1,83 +0,0 @@
|
||||
import { PAID_ACTION_PAYMENT_METHODS, TERRITORY_PERIOD_COST } from '@/lib/constants'
|
||||
import { satsToMsats } from '@/lib/format'
|
||||
import { proratedBillingCost } from '@/lib/territory'
|
||||
import { datePivot } from '@/lib/time'
|
||||
|
||||
export const anonable = false
|
||||
|
||||
export const paymentMethods = [
|
||||
PAID_ACTION_PAYMENT_METHODS.FEE_CREDIT,
|
||||
PAID_ACTION_PAYMENT_METHODS.REWARD_SATS,
|
||||
PAID_ACTION_PAYMENT_METHODS.PESSIMISTIC
|
||||
]
|
||||
|
||||
export async function getCost ({ oldName, billingType }, { models }) {
|
||||
const oldSub = await models.sub.findUnique({
|
||||
where: {
|
||||
name: oldName
|
||||
}
|
||||
})
|
||||
|
||||
const cost = proratedBillingCost(oldSub, billingType)
|
||||
if (!cost) {
|
||||
return 0n
|
||||
}
|
||||
|
||||
return satsToMsats(cost)
|
||||
}
|
||||
|
||||
export async function perform ({ oldName, invoiceId, ...data }, { me, cost, tx }) {
|
||||
const oldSub = await tx.sub.findUnique({
|
||||
where: {
|
||||
name: oldName
|
||||
}
|
||||
})
|
||||
|
||||
data.billingCost = TERRITORY_PERIOD_COST(data.billingType)
|
||||
|
||||
// we never want to bill them again if they are changing to ONCE
|
||||
if (data.billingType === 'ONCE') {
|
||||
data.billPaidUntil = null
|
||||
data.billingAutoRenew = false
|
||||
}
|
||||
|
||||
// if they are changing to YEARLY, bill them in a year
|
||||
// if they are changing to MONTHLY from YEARLY, do nothing
|
||||
if (oldSub.billingType === 'MONTHLY' && data.billingType === 'YEARLY') {
|
||||
data.billPaidUntil = datePivot(new Date(oldSub.billedLastAt), { years: 1 })
|
||||
}
|
||||
|
||||
// if this billing change makes their bill paid up, set them to active
|
||||
if (data.billPaidUntil === null || data.billPaidUntil >= new Date()) {
|
||||
data.status = 'ACTIVE'
|
||||
}
|
||||
|
||||
if (cost > 0n) {
|
||||
await tx.subAct.create({
|
||||
data: {
|
||||
userId: me.id,
|
||||
subName: oldName,
|
||||
msats: cost,
|
||||
type: 'BILLING'
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
return await tx.sub.update({
|
||||
data,
|
||||
where: {
|
||||
// optimistic concurrency control
|
||||
// make sure none of the relevant fields have changed since we fetched the sub
|
||||
...oldSub,
|
||||
postTypes: {
|
||||
equals: oldSub.postTypes
|
||||
},
|
||||
name: oldName,
|
||||
userId: me.id
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
export async function describe ({ name }, context) {
|
||||
return `SN: update territory billing ${name}`
|
||||
}
|
@ -1,245 +0,0 @@
|
||||
import { PAID_ACTION_PAYMENT_METHODS, USER_ID } from '@/lib/constants'
|
||||
import { msatsToSats, satsToMsats } from '@/lib/format'
|
||||
import { notifyZapped } from '@/lib/webPush'
|
||||
import { getInvoiceableWallets } from '@/wallets/server'
|
||||
import { Prisma } from '@prisma/client'
|
||||
|
||||
export const anonable = true
|
||||
|
||||
export const paymentMethods = [
|
||||
PAID_ACTION_PAYMENT_METHODS.P2P,
|
||||
PAID_ACTION_PAYMENT_METHODS.FEE_CREDIT,
|
||||
PAID_ACTION_PAYMENT_METHODS.REWARD_SATS,
|
||||
PAID_ACTION_PAYMENT_METHODS.OPTIMISTIC,
|
||||
PAID_ACTION_PAYMENT_METHODS.PESSIMISTIC
|
||||
]
|
||||
|
||||
export async function getCost ({ sats }) {
|
||||
return satsToMsats(sats)
|
||||
}
|
||||
|
||||
export async function getInvoiceablePeer ({ id, sats, hasSendWallet }, { models, me, cost }) {
|
||||
// if the zap is dust, or if me doesn't have a send wallet but has enough sats/credits to pay for it
|
||||
// then we don't invoice the peer
|
||||
if (sats < me?.sendCreditsBelowSats ||
|
||||
(me && !hasSendWallet && (me.mcredits >= cost || me.msats >= cost))) {
|
||||
return null
|
||||
}
|
||||
|
||||
const item = await models.item.findUnique({
|
||||
where: { id: parseInt(id) },
|
||||
include: {
|
||||
itemForwards: true,
|
||||
user: true
|
||||
}
|
||||
})
|
||||
|
||||
// bios don't get sats
|
||||
if (item.bio) {
|
||||
return null
|
||||
}
|
||||
|
||||
const wallets = await getInvoiceableWallets(item.userId, { models })
|
||||
|
||||
// request peer invoice if they have an attached wallet and have not forwarded the item
|
||||
// and the receiver doesn't want to receive credits
|
||||
if (wallets.length > 0 &&
|
||||
item.itemForwards.length === 0 &&
|
||||
sats >= item.user.receiveCreditsBelowSats) {
|
||||
return item.userId
|
||||
}
|
||||
|
||||
return null
|
||||
}
|
||||
|
||||
export async function getSybilFeePercent () {
|
||||
return 30n
|
||||
}
|
||||
|
||||
export async function perform ({ invoiceId, sats, id: itemId, ...args }, { me, cost, sybilFeePercent, tx }) {
|
||||
const feeMsats = cost * sybilFeePercent / 100n
|
||||
const zapMsats = cost - feeMsats
|
||||
itemId = parseInt(itemId)
|
||||
|
||||
let invoiceData = {}
|
||||
if (invoiceId) {
|
||||
invoiceData = { invoiceId, invoiceActionState: 'PENDING' }
|
||||
// store a reference to the item in the invoice
|
||||
await tx.invoice.update({
|
||||
where: { id: invoiceId },
|
||||
data: { actionId: itemId }
|
||||
})
|
||||
}
|
||||
|
||||
const acts = await tx.itemAct.createManyAndReturn({
|
||||
data: [
|
||||
{ msats: feeMsats, itemId, userId: me?.id ?? USER_ID.anon, act: 'FEE', ...invoiceData },
|
||||
{ msats: zapMsats, itemId, userId: me?.id ?? USER_ID.anon, act: 'TIP', ...invoiceData }
|
||||
]
|
||||
})
|
||||
|
||||
const [{ path }] = await tx.$queryRaw`
|
||||
SELECT ltree2text(path) as path FROM "Item" WHERE id = ${itemId}::INTEGER`
|
||||
return { id: itemId, sats, act: 'TIP', path, actIds: acts.map(act => act.id) }
|
||||
}
|
||||
|
||||
export async function retry ({ invoiceId, newInvoiceId }, { tx, cost }) {
|
||||
await tx.itemAct.updateMany({ where: { invoiceId }, data: { invoiceId: newInvoiceId, invoiceActionState: 'PENDING' } })
|
||||
const [{ id, path }] = await tx.$queryRaw`
|
||||
SELECT "Item".id, ltree2text(path) as path
|
||||
FROM "Item"
|
||||
JOIN "ItemAct" ON "Item".id = "ItemAct"."itemId"
|
||||
WHERE "ItemAct"."invoiceId" = ${newInvoiceId}::INTEGER`
|
||||
return { id, sats: msatsToSats(cost), act: 'TIP', path }
|
||||
}
|
||||
|
||||
export async function onPaid ({ invoice, actIds }, { tx }) {
|
||||
let acts
|
||||
if (invoice) {
|
||||
await tx.itemAct.updateMany({
|
||||
where: { invoiceId: invoice.id },
|
||||
data: {
|
||||
invoiceActionState: 'PAID'
|
||||
}
|
||||
})
|
||||
acts = await tx.itemAct.findMany({ where: { invoiceId: invoice.id }, include: { item: true } })
|
||||
actIds = acts.map(act => act.id)
|
||||
} else if (actIds) {
|
||||
acts = await tx.itemAct.findMany({ where: { id: { in: actIds } }, include: { item: true } })
|
||||
} else {
|
||||
throw new Error('No invoice or actIds')
|
||||
}
|
||||
|
||||
const msats = acts.reduce((a, b) => a + BigInt(b.msats), BigInt(0))
|
||||
const sats = msatsToSats(msats)
|
||||
const itemAct = acts.find(act => act.act === 'TIP')
|
||||
|
||||
if (invoice?.invoiceForward) {
|
||||
// only the op got sats and we need to add it to their stackedMsats
|
||||
// because the sats were p2p
|
||||
await tx.user.update({
|
||||
where: { id: itemAct.item.userId },
|
||||
data: { stackedMsats: { increment: itemAct.msats } }
|
||||
})
|
||||
} else {
|
||||
// splits only use mcredits
|
||||
await tx.$executeRaw`
|
||||
WITH forwardees AS (
|
||||
SELECT "userId", ((${itemAct.msats}::BIGINT * pct) / 100)::BIGINT AS mcredits
|
||||
FROM "ItemForward"
|
||||
WHERE "itemId" = ${itemAct.itemId}::INTEGER
|
||||
), total_forwarded AS (
|
||||
SELECT COALESCE(SUM(mcredits), 0) as mcredits
|
||||
FROM forwardees
|
||||
), recipients AS (
|
||||
SELECT "userId", mcredits FROM forwardees
|
||||
UNION
|
||||
SELECT ${itemAct.item.userId}::INTEGER as "userId",
|
||||
${itemAct.msats}::BIGINT - (SELECT mcredits FROM total_forwarded)::BIGINT as mcredits
|
||||
ORDER BY "userId" ASC -- order to prevent deadlocks
|
||||
)
|
||||
UPDATE users
|
||||
SET
|
||||
mcredits = users.mcredits + recipients.mcredits,
|
||||
"stackedMsats" = users."stackedMsats" + recipients.mcredits,
|
||||
"stackedMcredits" = users."stackedMcredits" + recipients.mcredits
|
||||
FROM recipients
|
||||
WHERE users.id = recipients."userId"`
|
||||
}
|
||||
|
||||
// perform denomormalized aggregates: weighted votes, upvotes, msats, lastZapAt
|
||||
// NOTE: for the rows that might be updated by a concurrent zap, we use UPDATE for implicit locking
|
||||
await tx.$queryRaw`
|
||||
WITH territory AS (
|
||||
SELECT COALESCE(r."subName", i."subName", 'meta')::TEXT as "subName"
|
||||
FROM "Item" i
|
||||
LEFT JOIN "Item" r ON r.id = i."rootId"
|
||||
WHERE i.id = ${itemAct.itemId}::INTEGER
|
||||
), zapper AS (
|
||||
SELECT
|
||||
COALESCE(${itemAct.item.parentId
|
||||
? Prisma.sql`"zapCommentTrust"`
|
||||
: Prisma.sql`"zapPostTrust"`}, 0) as "zapTrust",
|
||||
COALESCE(${itemAct.item.parentId
|
||||
? Prisma.sql`"subZapCommentTrust"`
|
||||
: Prisma.sql`"subZapPostTrust"`}, 0) as "subZapTrust"
|
||||
FROM territory
|
||||
LEFT JOIN "UserSubTrust" ust ON ust."subName" = territory."subName"
|
||||
AND ust."userId" = ${itemAct.userId}::INTEGER
|
||||
), zap AS (
|
||||
INSERT INTO "ItemUserAgg" ("userId", "itemId", "zapSats")
|
||||
VALUES (${itemAct.userId}::INTEGER, ${itemAct.itemId}::INTEGER, ${sats}::INTEGER)
|
||||
ON CONFLICT ("itemId", "userId") DO UPDATE
|
||||
SET "zapSats" = "ItemUserAgg"."zapSats" + ${sats}::INTEGER, updated_at = now()
|
||||
RETURNING ("zapSats" = ${sats}::INTEGER)::INTEGER as first_vote,
|
||||
LOG("zapSats" / GREATEST("zapSats" - ${sats}::INTEGER, 1)::FLOAT) AS log_sats
|
||||
), item_zapped AS (
|
||||
UPDATE "Item"
|
||||
SET
|
||||
"weightedVotes" = "weightedVotes" + zapper."zapTrust" * zap.log_sats,
|
||||
"subWeightedVotes" = "subWeightedVotes" + zapper."subZapTrust" * zap.log_sats,
|
||||
upvotes = upvotes + zap.first_vote,
|
||||
msats = "Item".msats + ${msats}::BIGINT,
|
||||
mcredits = "Item".mcredits + ${invoice?.invoiceForward ? 0n : msats}::BIGINT,
|
||||
"lastZapAt" = now()
|
||||
FROM zap, zapper
|
||||
WHERE "Item".id = ${itemAct.itemId}::INTEGER
|
||||
RETURNING "Item".*, zapper."zapTrust" * zap.log_sats as "weightedVote"
|
||||
), ancestors AS (
|
||||
SELECT "Item".*
|
||||
FROM "Item", item_zapped
|
||||
WHERE "Item".path @> item_zapped.path AND "Item".id <> item_zapped.id
|
||||
ORDER BY "Item".id
|
||||
)
|
||||
UPDATE "Item"
|
||||
SET "weightedComments" = "Item"."weightedComments" + item_zapped."weightedVote",
|
||||
"commentMsats" = "Item"."commentMsats" + ${msats}::BIGINT,
|
||||
"commentMcredits" = "Item"."commentMcredits" + ${invoice?.invoiceForward ? 0n : msats}::BIGINT
|
||||
FROM item_zapped, ancestors
|
||||
WHERE "Item".id = ancestors.id`
|
||||
|
||||
// record potential bounty payment
|
||||
// NOTE: we are at least guaranteed that we see the update "ItemUserAgg" from our tx so we can trust
|
||||
// we won't miss a zap that aggregates into a bounty payment, regardless of the order of updates
|
||||
await tx.$executeRaw`
|
||||
WITH bounty AS (
|
||||
SELECT root.id, "ItemUserAgg"."zapSats" >= root.bounty AS paid, "ItemUserAgg"."itemId" AS target
|
||||
FROM "ItemUserAgg"
|
||||
JOIN "Item" ON "Item".id = "ItemUserAgg"."itemId"
|
||||
LEFT JOIN "Item" root ON root.id = "Item"."rootId"
|
||||
WHERE "ItemUserAgg"."userId" = ${itemAct.userId}::INTEGER
|
||||
AND "ItemUserAgg"."itemId" = ${itemAct.itemId}::INTEGER
|
||||
AND root."userId" = ${itemAct.userId}::INTEGER
|
||||
AND root.bounty IS NOT NULL
|
||||
)
|
||||
UPDATE "Item"
|
||||
SET "bountyPaidTo" = array_remove(array_append(array_remove("bountyPaidTo", bounty.target), bounty.target), NULL)
|
||||
FROM bounty
|
||||
WHERE "Item".id = bounty.id AND bounty.paid`
|
||||
}
|
||||
|
||||
export async function nonCriticalSideEffects ({ invoice, actIds }, { models }) {
|
||||
const itemAct = await models.itemAct.findFirst({
|
||||
where: invoice ? { invoiceId: invoice.id } : { id: { in: actIds } },
|
||||
include: { item: true }
|
||||
})
|
||||
// avoid duplicate notifications with the same zap amount
|
||||
// by checking if there are any other pending acts on the item
|
||||
const pendingActs = await models.itemAct.count({
|
||||
where: {
|
||||
itemId: itemAct.itemId,
|
||||
createdAt: {
|
||||
gt: itemAct.createdAt
|
||||
}
|
||||
}
|
||||
})
|
||||
if (pendingActs === 0) notifyZapped({ models, item: itemAct.item }).catch(console.error)
|
||||
}
|
||||
|
||||
export async function onFail ({ invoice }, { tx }) {
|
||||
await tx.itemAct.updateMany({ where: { invoiceId: invoice.id }, data: { invoiceActionState: 'FAILED' } })
|
||||
}
|
||||
|
||||
export async function describe ({ id: itemId, sats }, { actionId, cost }) {
|
||||
return `SN: zap ${sats ?? msatsToSats(cost)} sats to #${itemId ?? actionId}`
|
||||
}
|
@ -1,64 +0,0 @@
|
||||
import { LND_PATHFINDING_TIME_PREF_PPM, LND_PATHFINDING_TIMEOUT_MS } from '@/lib/constants'
|
||||
import { msatsToSats, satsToMsats, toPositiveBigInt } from '@/lib/format'
|
||||
import { Prisma } from '@prisma/client'
|
||||
import { parsePaymentRequest, payViaPaymentRequest } from 'ln-service'
|
||||
|
||||
// paying actions are completely distinct from paid actions
|
||||
// and there's only one paying action: send
|
||||
// ... still we want the api to at least be similar
|
||||
export default async function performPayingAction ({ bolt11, maxFee, walletId }, { me, models, lnd }) {
|
||||
try {
|
||||
console.group('performPayingAction', `${bolt11.slice(0, 10)}...`, maxFee, walletId)
|
||||
|
||||
if (!me) {
|
||||
throw new Error('You must be logged in to perform this action')
|
||||
}
|
||||
|
||||
const decoded = await parsePaymentRequest({ request: bolt11 })
|
||||
const cost = toPositiveBigInt(toPositiveBigInt(decoded.mtokens) + satsToMsats(maxFee))
|
||||
|
||||
console.log('cost', cost)
|
||||
|
||||
const withdrawal = await models.$transaction(async tx => {
|
||||
await tx.user.update({
|
||||
where: {
|
||||
id: me.id
|
||||
},
|
||||
data: { msats: { decrement: cost } }
|
||||
})
|
||||
|
||||
return await tx.withdrawl.create({
|
||||
data: {
|
||||
hash: decoded.id,
|
||||
bolt11,
|
||||
msatsPaying: toPositiveBigInt(decoded.mtokens),
|
||||
msatsFeePaying: satsToMsats(maxFee),
|
||||
userId: me.id,
|
||||
walletId,
|
||||
autoWithdraw: !!walletId
|
||||
}
|
||||
})
|
||||
}, { isolationLevel: Prisma.TransactionIsolationLevel.ReadCommitted })
|
||||
|
||||
payViaPaymentRequest({
|
||||
lnd,
|
||||
request: withdrawal.bolt11,
|
||||
max_fee: msatsToSats(withdrawal.msatsFeePaying),
|
||||
pathfinding_timeout: LND_PATHFINDING_TIMEOUT_MS,
|
||||
confidence: LND_PATHFINDING_TIME_PREF_PPM
|
||||
}).catch(console.error)
|
||||
|
||||
return withdrawal
|
||||
} catch (e) {
|
||||
if (e.message.includes('\\"users\\" violates check constraint \\"msats_positive\\"')) {
|
||||
throw new Error('insufficient funds')
|
||||
}
|
||||
if (e instanceof Prisma.PrismaClientKnownRequestError && e.code === 'P2002') {
|
||||
throw new Error('you cannot withdraw to the same invoice twice')
|
||||
}
|
||||
console.error('performPayingAction failed', e)
|
||||
throw e
|
||||
} finally {
|
||||
console.groupEnd()
|
||||
}
|
||||
}
|
@ -1,15 +1,13 @@
|
||||
import { SN_ADMIN_IDS } from '@/lib/constants'
|
||||
|
||||
export default {
|
||||
Query: {
|
||||
snl: async (parent, _, { models }) => {
|
||||
const snl = await models.snl.findFirst()
|
||||
return !!snl?.live
|
||||
const { live } = await models.snl.findFirst()
|
||||
return live
|
||||
}
|
||||
},
|
||||
Mutation: {
|
||||
onAirToggle: async (parent, _, { models, me }) => {
|
||||
if (!me || !SN_ADMIN_IDS.includes(me.id)) {
|
||||
if (me.id !== 616) {
|
||||
throw new Error('not an admin')
|
||||
}
|
||||
const { id, live } = await models.snl.findFirst()
|
||||
|
@ -1,7 +0,0 @@
|
||||
import { GqlAuthorizationError } from '@/lib/error'
|
||||
|
||||
export default function assertApiKeyNotPermitted ({ me }) {
|
||||
if (me?.apiKey === true) {
|
||||
throw new GqlAuthorizationError('this operation is not allowed to be performed via API Key')
|
||||
}
|
||||
}
|
@ -1,27 +1,37 @@
|
||||
import { isServiceEnabled } from '@/lib/sndev'
|
||||
import { cachedFetcher } from '@/lib/fetch'
|
||||
import { getHeight } from 'ln-service'
|
||||
import lndService from 'ln-service'
|
||||
import lnd from '../lnd'
|
||||
|
||||
const getBlockHeight = cachedFetcher(async function fetchBlockHeight ({ lnd }) {
|
||||
const cache = new Map()
|
||||
const expiresIn = 1000 * 30 // 30 seconds in milliseconds
|
||||
|
||||
async function fetchBlockHeight () {
|
||||
let blockHeight = 0
|
||||
try {
|
||||
const { current_block_height: height } = await getHeight({ lnd })
|
||||
return height
|
||||
const height = await lndService.getHeight({ lnd })
|
||||
blockHeight = height.current_block_height
|
||||
} catch (err) {
|
||||
console.error('getBlockHeight', err)
|
||||
return 0
|
||||
console.error('fetchBlockHeight', err)
|
||||
}
|
||||
}, {
|
||||
maxSize: 1,
|
||||
cacheExpiry: 60 * 1000, // 1 minute
|
||||
forceRefreshThreshold: 0,
|
||||
keyGenerator: () => 'getBlockHeight'
|
||||
})
|
||||
cache.set('block', { height: blockHeight, createdAt: Date.now() })
|
||||
return blockHeight
|
||||
}
|
||||
|
||||
async function getBlockHeight () {
|
||||
if (cache.has('block')) {
|
||||
const { height, createdAt } = cache.get('block')
|
||||
const expired = createdAt + expiresIn < Date.now()
|
||||
if (expired) fetchBlockHeight().catch(console.error) // update cache
|
||||
return height // serve stale block height (this on the SSR critical path)
|
||||
} else {
|
||||
fetchBlockHeight().catch(console.error)
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
export default {
|
||||
Query: {
|
||||
blockHeight: async (parent, opts, { lnd }) => {
|
||||
if (!isServiceEnabled('payments')) return 0
|
||||
return await getBlockHeight({ lnd }) || 0
|
||||
blockHeight: async (parent, opts, ctx) => {
|
||||
return await getBlockHeight()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1,26 +1,37 @@
|
||||
import { cachedFetcher } from '@/lib/fetch'
|
||||
import lndService from 'ln-service'
|
||||
import lnd from '../lnd'
|
||||
|
||||
const getChainFeeRate = cachedFetcher(async function fetchChainFeeRate () {
|
||||
const url = 'https://mempool.space/api/v1/fees/recommended'
|
||||
const cache = new Map()
|
||||
const expiresIn = 1000 * 30 // 30 seconds in milliseconds
|
||||
|
||||
async function fetchChainFeeRate () {
|
||||
let chainFee = 0
|
||||
try {
|
||||
const res = await fetch(url)
|
||||
const body = await res.json()
|
||||
return body.hourFee
|
||||
const fee = await lndService.getChainFeeRate({ lnd })
|
||||
chainFee = fee.tokens_per_vbyte
|
||||
} catch (err) {
|
||||
console.error('fetchChainFee', err)
|
||||
return 0
|
||||
}
|
||||
}, {
|
||||
maxSize: 1,
|
||||
cacheExpiry: 60 * 1000, // 1 minute
|
||||
forceRefreshThreshold: 0, // never force refresh
|
||||
keyGenerator: () => 'getChainFeeRate'
|
||||
})
|
||||
cache.set('fee', { fee: chainFee, createdAt: Date.now() })
|
||||
return chainFee
|
||||
}
|
||||
|
||||
async function getChainFeeRate () {
|
||||
if (cache.has('fee')) {
|
||||
const { fee, createdAt } = cache.get('fee')
|
||||
const expired = createdAt + expiresIn < Date.now()
|
||||
if (expired) fetchChainFeeRate().catch(console.error) // update cache
|
||||
return fee
|
||||
} else {
|
||||
fetchChainFeeRate().catch(console.error)
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
export default {
|
||||
Query: {
|
||||
chainFee: async (parent, opts, ctx) => {
|
||||
return await getChainFeeRate() || 0
|
||||
return await getChainFeeRate()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1,29 +1,45 @@
|
||||
import { timeUnitForRange, whenRange } from '@/lib/time'
|
||||
import { timeUnitForRange, whenRange } from '../../lib/time'
|
||||
|
||||
export function viewIntervalClause (range, view) {
|
||||
const unit = timeUnitForRange(range)
|
||||
return `"${view}".t >= date_trunc('${unit}', timezone('America/Chicago', $1)) AND date_trunc('${unit}', "${view}".t) <= date_trunc('${unit}', timezone('America/Chicago', $2)) `
|
||||
const PLACEHOLDERS_NUM = 616
|
||||
|
||||
export function interval (when) {
|
||||
switch (when) {
|
||||
case 'week':
|
||||
return '1 week'
|
||||
case 'month':
|
||||
return '1 month'
|
||||
case 'year':
|
||||
return '1 year'
|
||||
case 'forever':
|
||||
return null
|
||||
default:
|
||||
return '1 day'
|
||||
}
|
||||
}
|
||||
|
||||
export function viewGroup (range, view) {
|
||||
export function withClause (range) {
|
||||
const unit = timeUnitForRange(range)
|
||||
return `(
|
||||
(SELECT *
|
||||
FROM ${view}_days
|
||||
WHERE ${viewIntervalClause(range, `${view}_days`)})
|
||||
UNION ALL
|
||||
(SELECT *
|
||||
FROM ${view}_hours
|
||||
WHERE ${viewIntervalClause(range, `${view}_hours`)}
|
||||
${unit === 'hour' ? '' : `AND "${view}_hours".t >= date_trunc('day', timezone('America/Chicago', now()))`})
|
||||
UNION ALL
|
||||
(SELECT * FROM
|
||||
${view}(
|
||||
date_trunc('hour', timezone('America/Chicago', now())),
|
||||
date_trunc('hour', timezone('America/Chicago', now())), '1 hour'::INTERVAL, 'hour')
|
||||
WHERE "${view}".t >= date_trunc('hour', timezone('America/Chicago', $1))
|
||||
AND "${view}".t <= date_trunc('hour', timezone('America/Chicago', $2)))
|
||||
) u`
|
||||
|
||||
return `
|
||||
WITH range_values AS (
|
||||
SELECT date_trunc('${unit}', $1) as minval,
|
||||
date_trunc('${unit}', $2) as maxval
|
||||
),
|
||||
times AS (
|
||||
SELECT generate_series(minval, maxval, interval '1 ${unit}') as time
|
||||
FROM range_values
|
||||
)
|
||||
`
|
||||
}
|
||||
|
||||
export function intervalClause (range, table) {
|
||||
const unit = timeUnitForRange(range)
|
||||
|
||||
return `date_trunc('${unit}', "${table}".created_at) >= date_trunc('${unit}', $1) AND date_trunc('${unit}', "${table}".created_at) <= date_trunc('${unit}', $2) `
|
||||
}
|
||||
|
||||
export function viewIntervalClause (range, view) {
|
||||
return `"${view}".day >= date_trunc('day', timezone('America/Chicago', $1)) AND "${view}".day <= date_trunc('day', timezone('America/Chicago', $2)) `
|
||||
}
|
||||
|
||||
export default {
|
||||
@ -31,129 +47,253 @@ export default {
|
||||
registrationGrowth: async (parent, { when, from, to }, { models }) => {
|
||||
const range = whenRange(when, from, to)
|
||||
|
||||
return await models.$queryRawUnsafe(`
|
||||
SELECT date_trunc('${timeUnitForRange(range)}', t) at time zone 'America/Chicago' as time, json_build_array(
|
||||
json_build_object('name', 'referrals', 'value', sum(referrals)),
|
||||
json_build_object('name', 'organic', 'value', sum(organic))
|
||||
if (when !== 'day') {
|
||||
return await models.$queryRawUnsafe(`
|
||||
SELECT date_trunc('${timeUnitForRange(range)}', day) as time, json_build_array(
|
||||
json_build_object('name', 'referrals', 'value', sum(referrals)),
|
||||
json_build_object('name', 'organic', 'value', sum(organic))
|
||||
) AS data
|
||||
FROM reg_growth_days
|
||||
WHERE ${viewIntervalClause(range, 'reg_growth_days')}
|
||||
GROUP BY time
|
||||
ORDER BY time ASC`, ...range)
|
||||
}
|
||||
|
||||
return await models.$queryRawUnsafe(
|
||||
`${withClause(range)}
|
||||
SELECT time, json_build_array(
|
||||
json_build_object('name', 'referrals', 'value', count("referrerId")),
|
||||
json_build_object('name', 'organic', 'value', count(users.id) FILTER(WHERE id > ${PLACEHOLDERS_NUM}) - count("referrerId"))
|
||||
) AS data
|
||||
FROM ${viewGroup(range, 'reg_growth')}
|
||||
FROM times
|
||||
LEFT JOIN users ON ${intervalClause(range, 'users')} AND time = date_trunc('${timeUnitForRange(range)}', created_at)
|
||||
GROUP BY time
|
||||
ORDER BY time ASC`, ...range)
|
||||
},
|
||||
spenderGrowth: async (parent, { when, to, from }, { models }) => {
|
||||
const range = whenRange(when, from, to)
|
||||
|
||||
return await models.$queryRawUnsafe(`
|
||||
SELECT date_trunc('${timeUnitForRange(range)}', t) at time zone 'America/Chicago' as time, json_build_array(
|
||||
json_build_object('name', 'any', 'value', COUNT(DISTINCT "userId")),
|
||||
json_build_object('name', 'jobs', 'value', COUNT(DISTINCT "userId") FILTER (WHERE type = 'STREAM')),
|
||||
json_build_object('name', 'boost', 'value', COUNT(DISTINCT "userId") FILTER (WHERE type = 'BOOST')),
|
||||
json_build_object('name', 'fees', 'value', COUNT(DISTINCT "userId") FILTER (WHERE type = 'FEE')),
|
||||
json_build_object('name', 'poll', 'value', COUNT(DISTINCT "userId") FILTER (WHERE type = 'POLL')),
|
||||
json_build_object('name', 'downzaps', 'value', COUNT(DISTINCT "userId") FILTER (WHERE type = 'DONT_LIKE_THIS')),
|
||||
json_build_object('name', 'zaps', 'value', COUNT(DISTINCT "userId") FILTER (WHERE type = 'TIP')),
|
||||
json_build_object('name', 'donation', 'value', COUNT(DISTINCT "userId") FILTER (WHERE type = 'DONATION')),
|
||||
json_build_object('name', 'territories', 'value', COUNT(DISTINCT "userId") FILTER (WHERE type = 'TERRITORY'))
|
||||
if (when !== 'day') {
|
||||
return await models.$queryRawUnsafe(`
|
||||
SELECT date_trunc('${timeUnitForRange(range)}', day) as time, json_build_array(
|
||||
json_build_object('name', 'any', 'value', floor(avg("any"))),
|
||||
json_build_object('name', 'jobs', 'value', floor(avg(jobs))),
|
||||
json_build_object('name', 'boost', 'value', floor(avg(boost))),
|
||||
json_build_object('name', 'fees', 'value', floor(avg(fees))),
|
||||
json_build_object('name', 'zaps', 'value', floor(avg(tips))),
|
||||
json_build_object('name', 'donation', 'value', floor(avg(donations))),
|
||||
json_build_object('name', 'territories', 'value', floor(avg(territories)))
|
||||
) AS data
|
||||
FROM spender_growth_days
|
||||
WHERE ${viewIntervalClause(range, 'spender_growth_days')}
|
||||
GROUP BY time
|
||||
ORDER BY time ASC`, ...range)
|
||||
}
|
||||
|
||||
return await models.$queryRawUnsafe(
|
||||
`${withClause(range)}
|
||||
SELECT time, json_build_array(
|
||||
json_build_object('name', 'any', 'value', count(DISTINCT "userId")),
|
||||
json_build_object('name', 'jobs', 'value', count(DISTINCT "userId") FILTER (WHERE act = 'STREAM')),
|
||||
json_build_object('name', 'boost', 'value', count(DISTINCT "userId") FILTER (WHERE act = 'BOOST')),
|
||||
json_build_object('name', 'fees', 'value', count(DISTINCT "userId") FILTER (WHERE act = 'FEE')),
|
||||
json_build_object('name', 'zaps', 'value', count(DISTINCT "userId") FILTER (WHERE act = 'TIP')),
|
||||
json_build_object('name', 'donation', 'value', count(DISTINCT "userId") FILTER (WHERE act = 'DONATION')),
|
||||
json_build_object('name', 'territories', 'value', count(DISTINCT "userId") FILTER (WHERE act = 'TERRITORY'))
|
||||
) AS data
|
||||
FROM ${viewGroup(range, 'spender_growth')}
|
||||
FROM times
|
||||
LEFT JOIN
|
||||
((SELECT "ItemAct".created_at, "userId", act::text as act
|
||||
FROM "ItemAct"
|
||||
WHERE ${intervalClause(range, 'ItemAct')})
|
||||
UNION ALL
|
||||
(SELECT created_at, "userId", 'DONATION' as act
|
||||
FROM "Donation"
|
||||
WHERE ${intervalClause(range, 'Donation')})
|
||||
UNION ALL
|
||||
(SELECT created_at, "userId", 'TERRITORY' as act
|
||||
FROM "SubAct"
|
||||
WHERE type = 'BILLING' AND ${intervalClause(range, 'SubAct')})
|
||||
) u ON time = date_trunc('${timeUnitForRange(range)}', u.created_at)
|
||||
GROUP BY time
|
||||
ORDER BY time ASC`, ...range)
|
||||
},
|
||||
itemGrowth: async (parent, { when, to, from }, { models }) => {
|
||||
const range = whenRange(when, from, to)
|
||||
|
||||
return await models.$queryRawUnsafe(`
|
||||
SELECT date_trunc('${timeUnitForRange(range)}', t) at time zone 'America/Chicago' as time, json_build_array(
|
||||
json_build_object('name', 'posts', 'value', sum(posts)),
|
||||
json_build_object('name', 'comments', 'value', sum(comments)),
|
||||
json_build_object('name', 'jobs', 'value', sum(jobs)),
|
||||
json_build_object('name', 'zaps', 'value', sum(zaps)),
|
||||
json_build_object('name', 'territories', 'value', sum(territories)),
|
||||
json_build_object('name', 'comments/posts', 'value', ROUND(sum(comments)/GREATEST(sum(posts), 1), 2))
|
||||
if (when !== 'day') {
|
||||
return await models.$queryRawUnsafe(`
|
||||
SELECT date_trunc('${timeUnitForRange(range)}', day) as time, json_build_array(
|
||||
json_build_object('name', 'posts', 'value', sum(posts)),
|
||||
json_build_object('name', 'comments', 'value', sum(comments)),
|
||||
json_build_object('name', 'jobs', 'value', sum(jobs)),
|
||||
json_build_object('name', 'comments/posts', 'value', ROUND(sum(comments)/GREATEST(sum(posts), 1), 2))
|
||||
) AS data
|
||||
FROM item_growth_days
|
||||
WHERE ${viewIntervalClause(range, 'item_growth_days')}
|
||||
GROUP BY time
|
||||
ORDER BY time ASC`, ...range)
|
||||
}
|
||||
|
||||
return await models.$queryRawUnsafe(
|
||||
`${withClause(range)}
|
||||
SELECT time, json_build_array(
|
||||
json_build_object('name', 'comments', 'value', count("parentId")),
|
||||
json_build_object('name', 'jobs', 'value', count("subName") FILTER (WHERE "subName" = 'jobs')),
|
||||
json_build_object('name', 'posts', 'value', count("Item".id)-count("parentId")-(count("subName") FILTER (WHERE "subName" = 'jobs'))),
|
||||
json_build_object('name', 'comments/posts', 'value', ROUND(count("parentId")/GREATEST(count("Item".id)-count("parentId"), 1), 2))
|
||||
) AS data
|
||||
FROM ${viewGroup(range, 'item_growth')}
|
||||
FROM times
|
||||
LEFT JOIN "Item" ON ${intervalClause(range, 'Item')} AND time = date_trunc('${timeUnitForRange(range)}', created_at)
|
||||
GROUP BY time
|
||||
ORDER BY time ASC`, ...range)
|
||||
},
|
||||
spendingGrowth: async (parent, { when, to, from }, { models }) => {
|
||||
const range = whenRange(when, from, to)
|
||||
|
||||
return await models.$queryRawUnsafe(`
|
||||
SELECT date_trunc('${timeUnitForRange(range)}', t) at time zone 'America/Chicago' as time, json_build_array(
|
||||
json_build_object('name', 'jobs', 'value', sum(jobs)),
|
||||
json_build_object('name', 'boost', 'value', sum(boost)),
|
||||
json_build_object('name', 'fees', 'value', sum(fees)),
|
||||
json_build_object('name', 'zaps', 'value', sum(tips)),
|
||||
json_build_object('name', 'donations', 'value', sum(donations)),
|
||||
json_build_object('name', 'territories', 'value', sum(territories))
|
||||
if (when !== 'day') {
|
||||
return await models.$queryRawUnsafe(`
|
||||
SELECT date_trunc('${timeUnitForRange(range)}', day) as time, json_build_array(
|
||||
json_build_object('name', 'jobs', 'value', sum(jobs)),
|
||||
json_build_object('name', 'boost', 'value', sum(boost)),
|
||||
json_build_object('name', 'fees', 'value', sum(fees)),
|
||||
json_build_object('name', 'zaps', 'value', sum(tips)),
|
||||
json_build_object('name', 'donations', 'value', sum(donations)),
|
||||
json_build_object('name', 'territories', 'value', sum(territories))
|
||||
) AS data
|
||||
FROM spending_growth_days
|
||||
WHERE ${viewIntervalClause(range, 'spending_growth_days')}
|
||||
GROUP BY time
|
||||
ORDER BY time ASC`, ...range)
|
||||
}
|
||||
|
||||
return await models.$queryRawUnsafe(
|
||||
`${withClause(range)}
|
||||
SELECT time, json_build_array(
|
||||
json_build_object('name', 'jobs', 'value', coalesce(floor(sum(CASE WHEN act = 'STREAM' THEN msats ELSE 0 END)/1000),0)),
|
||||
json_build_object('name', 'boost', 'value', coalesce(floor(sum(CASE WHEN act = 'BOOST' THEN msats ELSE 0 END)/1000),0)),
|
||||
json_build_object('name', 'fees', 'value', coalesce(floor(sum(CASE WHEN act NOT IN ('BOOST', 'TIP', 'STREAM', 'DONATION', 'REVENUE') THEN msats ELSE 0 END)/1000),0)),
|
||||
json_build_object('name', 'zaps', 'value', coalesce(floor(sum(CASE WHEN act = 'TIP' THEN msats ELSE 0 END)/1000),0)),
|
||||
json_build_object('name', 'donations', 'value', coalesce(floor(sum(CASE WHEN act = 'DONATION' THEN msats ELSE 0 END)/1000),0)),
|
||||
json_build_object('name', 'territories', 'value', coalesce(floor(sum(CASE WHEN act = 'REVENUE' THEN msats ELSE 0 END)/1000),0))
|
||||
) AS data
|
||||
FROM ${viewGroup(range, 'spending_growth')}
|
||||
FROM times
|
||||
LEFT JOIN
|
||||
((SELECT "ItemAct".created_at, msats, act::text as act
|
||||
FROM "ItemAct"
|
||||
WHERE ${intervalClause(range, 'ItemAct')})
|
||||
UNION ALL
|
||||
(SELECT created_at, sats * 1000 as msats, 'DONATION' as act
|
||||
FROM "Donation"
|
||||
WHERE ${intervalClause(range, 'Donation')})
|
||||
UNION ALL
|
||||
(SELECT created_at, msats, 'REVENUE' as act
|
||||
FROM "SubAct"
|
||||
WHERE type = 'BILLING' AND ${intervalClause(range, 'SubAct')})
|
||||
) u ON time = date_trunc('${timeUnitForRange(range)}', u.created_at)
|
||||
GROUP BY time
|
||||
ORDER BY time ASC`, ...range)
|
||||
},
|
||||
stackerGrowth: async (parent, { when, to, from }, { models }) => {
|
||||
const range = whenRange(when, from, to)
|
||||
|
||||
return await models.$queryRawUnsafe(`
|
||||
SELECT date_trunc('${timeUnitForRange(range)}', t) at time zone 'America/Chicago' as time, json_build_array(
|
||||
json_build_object('name', 'any', 'value', COUNT(DISTINCT "userId")),
|
||||
json_build_object('name', 'posts', 'value', COUNT(DISTINCT "userId") FILTER (WHERE type = 'POST')),
|
||||
json_build_object('name', 'comments', 'value', COUNT(DISTINCT "userId") FILTER (WHERE type = 'COMMENT')),
|
||||
json_build_object('name', 'rewards', 'value', COUNT(DISTINCT "userId") FILTER (WHERE type = 'EARN')),
|
||||
json_build_object('name', 'referrals', 'value', COUNT(DISTINCT "userId") FILTER (WHERE type = 'REFERRAL')),
|
||||
json_build_object('name', 'territories', 'value', COUNT(DISTINCT "userId") FILTER (WHERE type = 'REVENUE'))
|
||||
if (when !== 'day') {
|
||||
return await models.$queryRawUnsafe(`
|
||||
SELECT date_trunc('${timeUnitForRange(range)}', day) as time, json_build_array(
|
||||
json_build_object('name', 'any', 'value', floor(avg("any"))),
|
||||
json_build_object('name', 'posts', 'value', floor(avg(posts))),
|
||||
json_build_object('name', 'comments', 'value', floor(floor(avg(comments)))),
|
||||
json_build_object('name', 'rewards', 'value', floor(avg(rewards))),
|
||||
json_build_object('name', 'referrals', 'value', floor(avg(referrals))),
|
||||
json_build_object('name', 'territories', 'value', floor(avg(territories)))
|
||||
) AS data
|
||||
FROM stackers_growth_days
|
||||
WHERE ${viewIntervalClause(range, 'stackers_growth_days')}
|
||||
GROUP BY time
|
||||
ORDER BY time ASC`, ...range)
|
||||
}
|
||||
|
||||
return await models.$queryRawUnsafe(
|
||||
`${withClause(range)}
|
||||
SELECT time, json_build_array(
|
||||
json_build_object('name', 'any', 'value', count(distinct user_id)),
|
||||
json_build_object('name', 'posts', 'value', count(distinct user_id) FILTER (WHERE type = 'POST')),
|
||||
json_build_object('name', 'comments', 'value', count(distinct user_id) FILTER (WHERE type = 'COMMENT')),
|
||||
json_build_object('name', 'rewards', 'value', count(distinct user_id) FILTER (WHERE type = 'EARN')),
|
||||
json_build_object('name', 'referrals', 'value', count(distinct user_id) FILTER (WHERE type = 'REFERRAL')),
|
||||
json_build_object('name', 'territories', 'value', count(distinct user_id) FILTER (WHERE type = 'REVENUE'))
|
||||
) AS data
|
||||
FROM ${viewGroup(range, 'stackers_growth')}
|
||||
FROM times
|
||||
LEFT JOIN
|
||||
((SELECT "ItemAct".created_at, "Item"."userId" as user_id, CASE WHEN "Item"."parentId" IS NULL THEN 'POST' ELSE 'COMMENT' END as type
|
||||
FROM "ItemAct"
|
||||
JOIN "Item" on "ItemAct"."itemId" = "Item".id
|
||||
WHERE ${intervalClause(range, 'ItemAct')} AND "ItemAct".act = 'TIP')
|
||||
UNION ALL
|
||||
(SELECT created_at, "userId" as user_id, 'EARN' as type
|
||||
FROM "Earn"
|
||||
WHERE ${intervalClause(range, 'Earn')})
|
||||
UNION ALL
|
||||
(SELECT created_at, "userId" as user_id, 'REVENUE' as type
|
||||
FROM "SubAct"
|
||||
WHERE type = 'REVENUE' AND ${intervalClause(range, 'SubAct')})
|
||||
UNION ALL
|
||||
(SELECT created_at, "referrerId" as user_id, 'REFERRAL' as type
|
||||
FROM "ReferralAct"
|
||||
WHERE ${intervalClause(range, 'ReferralAct')})) u ON time = date_trunc('${timeUnitForRange(range)}', u.created_at)
|
||||
GROUP BY time
|
||||
ORDER BY time ASC`, ...range)
|
||||
},
|
||||
stackingGrowth: async (parent, { when, to, from }, { models }) => {
|
||||
const range = whenRange(when, from, to)
|
||||
|
||||
return await models.$queryRawUnsafe(`
|
||||
SELECT date_trunc('${timeUnitForRange(range)}', t) at time zone 'America/Chicago' as time, json_build_array(
|
||||
json_build_object('name', 'rewards', 'value', sum(rewards)),
|
||||
json_build_object('name', 'posts', 'value', sum(posts)),
|
||||
json_build_object('name', 'comments', 'value', sum(comments)),
|
||||
json_build_object('name', 'referrals', 'value', sum(referrals)),
|
||||
json_build_object('name', 'territories', 'value', sum(territories))
|
||||
if (when !== 'day') {
|
||||
return await models.$queryRawUnsafe(`
|
||||
SELECT date_trunc('${timeUnitForRange(range)}', day) as time, json_build_array(
|
||||
json_build_object('name', 'rewards', 'value', sum(rewards)),
|
||||
json_build_object('name', 'posts', 'value', sum(posts)),
|
||||
json_build_object('name', 'comments', 'value', sum(comments)),
|
||||
json_build_object('name', 'referrals', 'value', sum(referrals)),
|
||||
json_build_object('name', 'territories', 'value', sum(territories))
|
||||
) AS data
|
||||
FROM stacking_growth_days
|
||||
WHERE ${viewIntervalClause(range, 'stacking_growth_days')}
|
||||
GROUP BY time
|
||||
ORDER BY time ASC`, ...range)
|
||||
}
|
||||
|
||||
return await models.$queryRawUnsafe(
|
||||
`${withClause(range)}
|
||||
SELECT time, json_build_array(
|
||||
json_build_object('name', 'rewards', 'value', coalesce(floor(sum(airdrop)/1000),0)),
|
||||
json_build_object('name', 'posts', 'value', coalesce(floor(sum(post)/1000),0)),
|
||||
json_build_object('name', 'comments', 'value', coalesce(floor(sum(comment)/1000),0)),
|
||||
json_build_object('name', 'referrals', 'value', coalesce(floor(sum(referral)/1000),0)),
|
||||
json_build_object('name', 'territories', 'value', coalesce(floor(sum(revenue)/1000),0))
|
||||
) AS data
|
||||
FROM ${viewGroup(range, 'stacking_growth')}
|
||||
FROM times
|
||||
LEFT JOIN
|
||||
((SELECT "ItemAct".created_at, 0 as airdrop,
|
||||
CASE WHEN "Item"."parentId" IS NULL THEN 0 ELSE "ItemAct".msats END as comment,
|
||||
CASE WHEN "Item"."parentId" IS NULL THEN "ItemAct".msats ELSE 0 END as post,
|
||||
0 as referral, 0 as revenue
|
||||
FROM "ItemAct"
|
||||
JOIN "Item" on "ItemAct"."itemId" = "Item".id
|
||||
WHERE ${intervalClause(range, 'ItemAct')} AND "ItemAct".act = 'TIP')
|
||||
UNION ALL
|
||||
(SELECT created_at, 0 as airdrop, 0 as post, 0 as comment, msats as referral, 0 as revenue
|
||||
FROM "ReferralAct"
|
||||
WHERE ${intervalClause(range, 'ReferralAct')})
|
||||
UNION ALL
|
||||
(SELECT created_at, 0 as airdrop, 0 as post, 0 as comment, 0 as referral, msats as revenue
|
||||
FROM "SubAct"
|
||||
WHERE type = 'REVENUE' AND ${intervalClause(range, 'SubAct')})
|
||||
UNION ALL
|
||||
(SELECT created_at, msats as airdrop, 0 as post, 0 as comment, 0 as referral, 0 as revenue
|
||||
FROM "Earn"
|
||||
WHERE ${intervalClause(range, 'Earn')})) u ON time = date_trunc('${timeUnitForRange(range)}', u.created_at)
|
||||
GROUP BY time
|
||||
ORDER BY time ASC`, ...range)
|
||||
},
|
||||
itemGrowthSubs: async (parent, { when, to, from, sub }, { models }) => {
|
||||
const range = whenRange(when, from, to)
|
||||
|
||||
const subExists = await models.sub.findUnique({ where: { name: sub } })
|
||||
if (!subExists) throw new Error('Sub not found')
|
||||
|
||||
return await models.$queryRawUnsafe(`
|
||||
SELECT date_trunc('${timeUnitForRange(range)}', t) at time zone 'America/Chicago' as time, json_build_array(
|
||||
json_build_object('name', 'posts', 'value', coalesce(sum(posts),0)),
|
||||
json_build_object('name', 'comments', 'value', coalesce(sum(comments),0))
|
||||
) AS data
|
||||
FROM ${viewGroup(range, 'sub_stats')}
|
||||
WHERE sub_name = $3
|
||||
GROUP BY time
|
||||
ORDER BY time ASC`, ...range, sub)
|
||||
},
|
||||
revenueGrowthSubs: async (parent, { when, to, from, sub }, { models }) => {
|
||||
const range = whenRange(when, from, to)
|
||||
|
||||
const subExists = await models.sub.findUnique({ where: { name: sub } })
|
||||
if (!subExists) throw new Error('Sub not found')
|
||||
|
||||
return await models.$queryRawUnsafe(`
|
||||
SELECT date_trunc('${timeUnitForRange(range)}', t) at time zone 'America/Chicago' as time, json_build_array(
|
||||
json_build_object('name', 'revenue', 'value', coalesce(sum(msats_revenue/1000),0)),
|
||||
json_build_object('name', 'stacking', 'value', coalesce(sum(msats_stacked/1000),0)),
|
||||
json_build_object('name', 'spending', 'value', coalesce(sum(msats_spent/1000),0))
|
||||
) AS data
|
||||
FROM ${viewGroup(range, 'sub_stats')}
|
||||
WHERE sub_name = $3
|
||||
GROUP BY time
|
||||
ORDER BY time ASC`, ...range, sub)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
23
api/resolvers/image.js
Normal file
23
api/resolvers/image.js
Normal file
@ -0,0 +1,23 @@
|
||||
import { ANON_USER_ID, AWS_S3_URL_REGEXP } from '../../lib/constants'
|
||||
import { msatsToSats } from '../../lib/format'
|
||||
|
||||
export default {
|
||||
Query: {
|
||||
imageFeesInfo: async (parent, { s3Keys }, { models, me }) => {
|
||||
return imageFeesInfo(s3Keys, { models, me })
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export function uploadIdsFromText (text, { models }) {
|
||||
if (!text) return null
|
||||
return [...text.matchAll(AWS_S3_URL_REGEXP)].map(m => Number(m[1]))
|
||||
}
|
||||
|
||||
export async function imageFeesInfo (s3Keys, { models, me }) {
|
||||
const [info] = await models.$queryRawUnsafe('SELECT * FROM image_fees_info($1::INTEGER, $2::INTEGER[])', me ? me.id : ANON_USER_ID, s3Keys)
|
||||
const imageFee = msatsToSats(info.imageFeeMsats)
|
||||
const totalFeesMsats = info.nUnpaid * Number(info.imageFeeMsats)
|
||||
const totalFees = msatsToSats(totalFeesMsats)
|
||||
return { ...info, imageFee, totalFees, totalFeesMsats }
|
||||
}
|
@ -16,10 +16,9 @@ import { GraphQLJSONObject as JSONObject } from 'graphql-type-json'
|
||||
import admin from './admin'
|
||||
import blockHeight from './blockHeight'
|
||||
import chainFee from './chainFee'
|
||||
import image from './image'
|
||||
import { GraphQLScalarType, Kind } from 'graphql'
|
||||
import { createIntScalar } from 'graphql-scalar'
|
||||
import paidAction from './paidAction'
|
||||
import vault from './vault'
|
||||
|
||||
const date = new GraphQLScalarType({
|
||||
name: 'Date',
|
||||
@ -55,5 +54,4 @@ const limit = createIntScalar({
|
||||
})
|
||||
|
||||
export default [user, item, message, wallet, lnurl, notifications, invite, sub,
|
||||
upload, search, growth, rewards, referrals, price, admin, blockHeight, chainFee,
|
||||
{ JSONObject }, { Date: date }, { Limit: limit }, paidAction, vault]
|
||||
upload, search, growth, rewards, referrals, price, admin, blockHeight, chainFee, image, { JSONObject }, { Date: date }, { Limit: limit }]
|
||||
|
@ -1,15 +1,14 @@
|
||||
import { inviteSchema, validateSchema } from '@/lib/validate'
|
||||
import { msatsToSats } from '@/lib/format'
|
||||
import assertApiKeyNotPermitted from './apiKey'
|
||||
import { GqlAuthenticationError, GqlInputError } from '@/lib/error'
|
||||
import { Prisma } from '@prisma/client'
|
||||
import { GraphQLError } from 'graphql'
|
||||
import { inviteSchema, ssValidate } from '../../lib/validate'
|
||||
import { msatsToSats } from '../../lib/format'
|
||||
|
||||
export default {
|
||||
Query: {
|
||||
invites: async (parent, args, { me, models }) => {
|
||||
if (!me) {
|
||||
throw new GqlAuthenticationError()
|
||||
throw new GraphQLError('you must be logged in', { extensions: { code: 'FORBIDDEN' } })
|
||||
}
|
||||
|
||||
return await models.invite.findMany({
|
||||
where: {
|
||||
userId: me.id
|
||||
@ -29,48 +28,26 @@ export default {
|
||||
},
|
||||
|
||||
Mutation: {
|
||||
createInvite: async (parent, { id, gift, limit, description }, { me, models }) => {
|
||||
createInvite: async (parent, { gift, limit }, { me, models }) => {
|
||||
if (!me) {
|
||||
throw new GqlAuthenticationError()
|
||||
throw new GraphQLError('you must be logged in', { extensions: { code: 'FORBIDDEN' } })
|
||||
}
|
||||
assertApiKeyNotPermitted({ me })
|
||||
|
||||
await validateSchema(inviteSchema, { id, gift, limit, description })
|
||||
try {
|
||||
return await models.invite.create({
|
||||
data: {
|
||||
id,
|
||||
gift,
|
||||
limit,
|
||||
userId: me.id,
|
||||
description
|
||||
}
|
||||
})
|
||||
} catch (error) {
|
||||
if (error instanceof Prisma.PrismaClientKnownRequestError) {
|
||||
if (error.code === 'P2002' && error.meta.target.includes('id')) {
|
||||
throw new GqlInputError('an invite with this code already exists')
|
||||
}
|
||||
}
|
||||
throw error
|
||||
}
|
||||
await ssValidate(inviteSchema, { gift, limit })
|
||||
|
||||
return await models.invite.create({
|
||||
data: { gift, limit, userId: me.id }
|
||||
})
|
||||
},
|
||||
revokeInvite: async (parent, { id }, { me, models }) => {
|
||||
if (!me) {
|
||||
throw new GqlAuthenticationError()
|
||||
throw new GraphQLError('you must be logged in', { extensions: { code: 'FORBIDDEN' } })
|
||||
}
|
||||
|
||||
try {
|
||||
return await models.invite.update({
|
||||
where: { id, userId: me.id },
|
||||
data: { revoked: true }
|
||||
})
|
||||
} catch (err) {
|
||||
if (err.code === 'P2025') {
|
||||
throw new GqlInputError('invite not found')
|
||||
}
|
||||
throw err
|
||||
}
|
||||
return await models.invite.update({
|
||||
where: { id },
|
||||
data: { revoked: true }
|
||||
})
|
||||
}
|
||||
},
|
||||
|
||||
@ -83,10 +60,7 @@ export default {
|
||||
},
|
||||
poor: async (invite, args, { me, models }) => {
|
||||
const user = await models.user.findUnique({ where: { id: invite.userId } })
|
||||
return msatsToSats(user.msats) < invite.gift && msatsToSats(user.mcredits) < invite.gift
|
||||
},
|
||||
description: (invite, args, { me }) => {
|
||||
return invite.userId === me?.id ? invite.description : undefined
|
||||
return msatsToSats(user.msats) < invite.gift
|
||||
}
|
||||
}
|
||||
}
|
||||
|
File diff suppressed because it is too large
Load Diff
@ -1,8 +1,7 @@
|
||||
import { randomBytes } from 'crypto'
|
||||
import { bech32 } from 'bech32'
|
||||
import { GraphQLError } from 'graphql'
|
||||
import assertGofacYourself from './ofac'
|
||||
import assertApiKeyNotPermitted from './apiKey'
|
||||
import { GqlAuthenticationError } from '@/lib/error'
|
||||
|
||||
function encodedUrl (iurl, tag, k1) {
|
||||
const url = new URL(iurl)
|
||||
@ -27,19 +26,16 @@ export default {
|
||||
}
|
||||
},
|
||||
Mutation: {
|
||||
createAuth: async (parent, args, { models, me }) => {
|
||||
assertApiKeyNotPermitted({ me })
|
||||
createAuth: async (parent, args, { models }) => {
|
||||
return await models.lnAuth.create({ data: { k1: k1() } })
|
||||
},
|
||||
createWith: async (parent, args, { me, models, headers }) => {
|
||||
await assertGofacYourself({ models, headers })
|
||||
|
||||
if (!me) {
|
||||
throw new GqlAuthenticationError()
|
||||
throw new GraphQLError('you must be logged in', { extensions: { code: 'UNAUTHENTICATED' } })
|
||||
}
|
||||
|
||||
assertApiKeyNotPermitted({ me })
|
||||
|
||||
return await models.lnWith.create({ data: { k1: k1(), userId: me.id } })
|
||||
}
|
||||
},
|
||||
|
@ -1,4 +1,4 @@
|
||||
import { GqlInputError } from '@/lib/error'
|
||||
import { GraphQLError } from 'graphql'
|
||||
|
||||
export default {
|
||||
Query: {
|
||||
@ -11,7 +11,7 @@ export default {
|
||||
Mutation: {
|
||||
createMessage: async (parent, { text }, { me, models }) => {
|
||||
if (!text) {
|
||||
throw new GqlInputError('must have text')
|
||||
throw new GraphQLError('Must have text', { extensions: { code: 'BAD_INPUT' } })
|
||||
}
|
||||
|
||||
return await models.message.create({
|
||||
|
@ -1,18 +1,17 @@
|
||||
import { decodeCursor, LIMIT, nextNoteCursorEncoded } from '@/lib/cursor'
|
||||
import { getItem, filterClause, whereClause, muteClause, activeOrMine } from './item'
|
||||
import { getInvoice, getWithdrawl } from './wallet'
|
||||
import { pushSubscriptionSchema, validateSchema } from '@/lib/validate'
|
||||
import { replyToSubscription } from '@/lib/webPush'
|
||||
import { GraphQLError } from 'graphql'
|
||||
import { decodeCursor, LIMIT, nextCursorEncoded } from '../../lib/cursor'
|
||||
import { getItem, filterClause, whereClause, muteClause } from './item'
|
||||
import { getInvoice } from './wallet'
|
||||
import { pushSubscriptionSchema, ssValidate } from '../../lib/validate'
|
||||
import { replyToSubscription } from '../webPush'
|
||||
import { getSub } from './sub'
|
||||
import { GqlAuthenticationError, GqlInputError } from '@/lib/error'
|
||||
import { WALLET_MAX_RETRIES, WALLET_RETRY_BEFORE_MS } from '@/lib/constants'
|
||||
|
||||
export default {
|
||||
Query: {
|
||||
notifications: async (parent, { cursor, inc }, { me, models }) => {
|
||||
const decodedCursor = decodeCursor(cursor)
|
||||
if (!me) {
|
||||
throw new GqlAuthenticationError()
|
||||
throw new GraphQLError('you must be logged in', { extensions: { code: 'UNAUTHENTICATED' } })
|
||||
}
|
||||
|
||||
const meFull = await models.user.findUnique({ where: { id: me.id } })
|
||||
@ -79,17 +78,16 @@ export default {
|
||||
itemDrivenQueries.push(
|
||||
`SELECT "Item".*, "Item".created_at AS "sortTime", 'Reply' AS type
|
||||
FROM "ThreadSubscription"
|
||||
JOIN "Reply" r ON "ThreadSubscription"."itemId" = r."ancestorId"
|
||||
JOIN "Item" ON r."itemId" = "Item".id
|
||||
JOIN "Item" p ON "ThreadSubscription"."itemId" = p.id
|
||||
JOIN "Item" ON ${meFull.noteAllDescendants ? '"Item".path <@ p.path' : '"Item"."parentId" = p.id'}
|
||||
${whereClause(
|
||||
'"ThreadSubscription"."userId" = $1',
|
||||
'r.created_at >= "ThreadSubscription".created_at',
|
||||
'r.created_at < $2',
|
||||
'r."userId" <> $1',
|
||||
...(meFull.noteAllDescendants ? [] : ['r.level = 1'])
|
||||
'"Item"."userId" <> $1',
|
||||
'"Item".created_at >= "ThreadSubscription".created_at',
|
||||
'"Item"."parentId" IS NOT NULL'
|
||||
)}
|
||||
ORDER BY "sortTime" DESC
|
||||
LIMIT ${LIMIT}`
|
||||
LIMIT ${LIMIT}+$3`
|
||||
)
|
||||
|
||||
// User subscriptions
|
||||
@ -99,7 +97,6 @@ export default {
|
||||
FROM "Item"
|
||||
JOIN "UserSubscription" ON "Item"."userId" = "UserSubscription"."followeeId"
|
||||
${whereClause(
|
||||
'"Item".created_at < $2',
|
||||
'"UserSubscription"."followerId" = $1',
|
||||
`(
|
||||
("Item"."parentId" IS NULL AND "UserSubscription"."postsSubscribedAt" IS NOT NULL AND "Item".created_at >= "UserSubscription"."postsSubscribedAt")
|
||||
@ -107,23 +104,7 @@ export default {
|
||||
)`
|
||||
)}
|
||||
ORDER BY "sortTime" DESC
|
||||
LIMIT ${LIMIT}`
|
||||
)
|
||||
|
||||
// Territory subscriptions
|
||||
itemDrivenQueries.push(
|
||||
`SELECT "Item".*, "Item".created_at AS "sortTime", 'TerritoryPost' AS type
|
||||
FROM "Item"
|
||||
JOIN "SubSubscription" ON "Item"."subName" = "SubSubscription"."subName"
|
||||
${whereClause(
|
||||
'"Item".created_at < $2',
|
||||
'"SubSubscription"."userId" = $1',
|
||||
'"Item"."userId" <> $1',
|
||||
'"Item"."parentId" IS NULL',
|
||||
'"Item".created_at >= "SubSubscription".created_at'
|
||||
)}
|
||||
ORDER BY "sortTime" DESC
|
||||
LIMIT ${LIMIT}`
|
||||
LIMIT ${LIMIT}+$3`
|
||||
)
|
||||
|
||||
// mentions
|
||||
@ -133,28 +114,11 @@ export default {
|
||||
FROM "Mention"
|
||||
JOIN "Item" ON "Mention"."itemId" = "Item".id
|
||||
${whereClause(
|
||||
'"Item".created_at < $2',
|
||||
'"Mention"."userId" = $1',
|
||||
'"Item"."userId" <> $1'
|
||||
)}
|
||||
ORDER BY "sortTime" DESC
|
||||
LIMIT ${LIMIT}`
|
||||
)
|
||||
}
|
||||
// item mentions
|
||||
if (meFull.noteItemMentions) {
|
||||
itemDrivenQueries.push(
|
||||
`SELECT "Referrer".*, "ItemMention".created_at AS "sortTime", 'ItemMention' AS type
|
||||
FROM "ItemMention"
|
||||
JOIN "Item" "Referee" ON "ItemMention"."refereeId" = "Referee".id
|
||||
JOIN "Item" "Referrer" ON "ItemMention"."referrerId" = "Referrer".id
|
||||
${whereClause(
|
||||
'"ItemMention".created_at < $2',
|
||||
'"Referrer"."userId" <> $1',
|
||||
'"Referee"."userId" = $1'
|
||||
)}
|
||||
ORDER BY "sortTime" DESC
|
||||
LIMIT ${LIMIT}`
|
||||
LIMIT ${LIMIT}+$3`
|
||||
)
|
||||
}
|
||||
// Inner union to de-dupe item-driven notifications
|
||||
@ -166,92 +130,72 @@ export default {
|
||||
${itemDrivenQueries.map(q => `(${q})`).join(' UNION ALL ')}
|
||||
) as "Item"
|
||||
${whereClause(
|
||||
'"Item".created_at < $2',
|
||||
'"Item".created_at <= $2',
|
||||
await filterClause(me, models),
|
||||
muteClause(me),
|
||||
activeOrMine(me))}
|
||||
muteClause(me))}
|
||||
ORDER BY id ASC, CASE
|
||||
WHEN type = 'Mention' THEN 1
|
||||
WHEN type = 'Reply' THEN 2
|
||||
WHEN type = 'FollowActivity' THEN 3
|
||||
WHEN type = 'TerritoryPost' THEN 4
|
||||
WHEN type = 'ItemMention' THEN 5
|
||||
END ASC
|
||||
)`
|
||||
)
|
||||
|
||||
// territory transfers
|
||||
queries.push(
|
||||
`(SELECT "TerritoryTransfer".id::text, "TerritoryTransfer"."created_at" AS "sortTime", NULL as "earnedSats",
|
||||
'TerritoryTransfer' AS type
|
||||
FROM "TerritoryTransfer"
|
||||
WHERE "TerritoryTransfer"."newUserId" = $1
|
||||
AND "TerritoryTransfer"."created_at" <= $2
|
||||
`(SELECT "Item".id::text, "Item"."statusUpdatedAt" AS "sortTime", NULL as "earnedSats",
|
||||
'JobChanged' AS type
|
||||
FROM "Item"
|
||||
WHERE "Item"."userId" = $1
|
||||
AND "maxBid" IS NOT NULL
|
||||
AND "statusUpdatedAt" <= $2 AND "statusUpdatedAt" <> created_at
|
||||
ORDER BY "sortTime" DESC
|
||||
LIMIT ${LIMIT})`
|
||||
LIMIT ${LIMIT}+$3)`
|
||||
)
|
||||
|
||||
if (meFull.noteItemSats) {
|
||||
queries.push(
|
||||
`(SELECT "Item".id::TEXT, "Item"."lastZapAt" AS "sortTime",
|
||||
"Item".msats/1000 as "earnedSats", 'Votification' AS type
|
||||
`(SELECT "Item".id::TEXT, MAX("ItemAct".created_at) AS "sortTime",
|
||||
MAX("Item".msats/1000) as "earnedSats", 'Votification' AS type
|
||||
FROM "Item"
|
||||
WHERE "Item"."userId" = $1
|
||||
AND "Item"."lastZapAt" < $2
|
||||
JOIN "ItemAct" ON "ItemAct"."itemId" = "Item".id
|
||||
WHERE "ItemAct"."userId" <> $1
|
||||
AND "ItemAct".created_at <= $2
|
||||
AND "ItemAct".act IN ('TIP', 'FEE')
|
||||
AND "Item"."userId" = $1
|
||||
GROUP BY "Item".id
|
||||
ORDER BY "sortTime" DESC
|
||||
LIMIT ${LIMIT})`
|
||||
LIMIT ${LIMIT}+$3)`
|
||||
)
|
||||
}
|
||||
|
||||
if (meFull.noteForwardedSats) {
|
||||
queries.push(
|
||||
`(SELECT "Item".id::TEXT, "Item"."lastZapAt" AS "sortTime",
|
||||
("Item".msats / 1000 * "ItemForward".pct / 100) as "earnedSats", 'ForwardedVotification' AS type
|
||||
`(SELECT "Item".id::TEXT, MAX("ItemAct".created_at) AS "sortTime",
|
||||
MAX("Item".msats / 1000 * "ItemForward".pct / 100) as "earnedSats", 'ForwardedVotification' AS type
|
||||
FROM "Item"
|
||||
JOIN "ItemAct" ON "ItemAct"."itemId" = "Item".id
|
||||
JOIN "ItemForward" ON "ItemForward"."itemId" = "Item".id AND "ItemForward"."userId" = $1
|
||||
WHERE "Item"."userId" <> $1
|
||||
AND "Item"."lastZapAt" < $2
|
||||
WHERE "ItemAct"."userId" <> $1
|
||||
AND "Item"."userId" <> $1
|
||||
AND "ItemAct".created_at <= $2
|
||||
AND "ItemAct".act IN ('TIP')
|
||||
GROUP BY "Item".id
|
||||
ORDER BY "sortTime" DESC
|
||||
LIMIT ${LIMIT})`
|
||||
LIMIT ${LIMIT}+$3)`
|
||||
)
|
||||
}
|
||||
|
||||
if (meFull.noteDeposits) {
|
||||
queries.push(
|
||||
`(SELECT "Invoice".id::text, "Invoice"."confirmedAt" AS "sortTime",
|
||||
FLOOR("Invoice"."msatsReceived" / 1000) as "earnedSats",
|
||||
`(SELECT "Invoice".id::text, "Invoice"."confirmedAt" AS "sortTime", FLOOR("msatsReceived" / 1000) as "earnedSats",
|
||||
'InvoicePaid' AS type
|
||||
FROM "Invoice"
|
||||
WHERE "Invoice"."userId" = $1
|
||||
AND "Invoice"."confirmedAt" IS NOT NULL
|
||||
AND "Invoice"."created_at" < $2
|
||||
AND (
|
||||
("Invoice"."isHeld" IS NULL AND "Invoice"."actionType" IS NULL)
|
||||
OR (
|
||||
"Invoice"."actionType" = 'RECEIVE'
|
||||
AND "Invoice"."actionState" = 'PAID'
|
||||
)
|
||||
)
|
||||
AND "confirmedAt" IS NOT NULL
|
||||
AND "isHeld" IS NULL
|
||||
AND created_at <= $2
|
||||
ORDER BY "sortTime" DESC
|
||||
LIMIT ${LIMIT})`
|
||||
)
|
||||
}
|
||||
|
||||
if (meFull.noteWithdrawals) {
|
||||
queries.push(
|
||||
`(SELECT "Withdrawl".id::text, MAX(COALESCE("Invoice"."confirmedAt", "Withdrawl".created_at)) AS "sortTime",
|
||||
FLOOR(MAX("Withdrawl"."msatsPaid" / 1000)) as "earnedSats",
|
||||
'WithdrawlPaid' AS type
|
||||
FROM "Withdrawl"
|
||||
LEFT JOIN "InvoiceForward" ON "InvoiceForward"."withdrawlId" = "Withdrawl".id
|
||||
LEFT JOIN "Invoice" ON "InvoiceForward"."invoiceId" = "Invoice".id
|
||||
WHERE "Withdrawl"."userId" = $1
|
||||
AND "Withdrawl".status = 'CONFIRMED'
|
||||
AND "Withdrawl".created_at < $2
|
||||
AND "InvoiceForward"."id" IS NULL
|
||||
GROUP BY "Withdrawl".id
|
||||
ORDER BY "sortTime" DESC
|
||||
LIMIT ${LIMIT})`
|
||||
LIMIT ${LIMIT}+$3)`
|
||||
)
|
||||
}
|
||||
|
||||
@ -261,10 +205,10 @@ export default {
|
||||
'Invitification' AS type
|
||||
FROM users JOIN "Invite" on users."inviteId" = "Invite".id
|
||||
WHERE "Invite"."userId" = $1
|
||||
AND users.created_at < $2
|
||||
AND users.created_at <= $2
|
||||
GROUP BY "Invite".id
|
||||
ORDER BY "sortTime" DESC
|
||||
LIMIT ${LIMIT})`
|
||||
LIMIT ${LIMIT}+$3)`
|
||||
)
|
||||
queries.push(
|
||||
`(SELECT users.id::text, users.created_at AS "sortTime", NULL as "earnedSats",
|
||||
@ -272,56 +216,37 @@ export default {
|
||||
FROM users
|
||||
WHERE "users"."referrerId" = $1
|
||||
AND "inviteId" IS NULL
|
||||
AND users.created_at < $2
|
||||
ORDER BY "sortTime" DESC
|
||||
LIMIT ${LIMIT})`
|
||||
AND users.created_at <= $2
|
||||
LIMIT ${LIMIT}+$3)`
|
||||
)
|
||||
}
|
||||
|
||||
if (meFull.noteEarning) {
|
||||
queries.push(
|
||||
`(SELECT min(id)::text, created_at AS "sortTime", FLOOR(sum(msats) / 1000) as "earnedSats",
|
||||
`SELECT min(id)::text, created_at AS "sortTime", FLOOR(sum(msats) / 1000) as "earnedSats",
|
||||
'Earn' AS type
|
||||
FROM "Earn"
|
||||
WHERE "userId" = $1
|
||||
AND created_at < $2
|
||||
AND (type IS NULL OR type NOT IN ('FOREVER_REFERRAL', 'ONE_DAY_REFERRAL'))
|
||||
GROUP BY "userId", created_at
|
||||
ORDER BY "sortTime" DESC
|
||||
LIMIT ${LIMIT})`
|
||||
AND created_at <= $2
|
||||
GROUP BY "userId", created_at`
|
||||
)
|
||||
queries.push(
|
||||
`(SELECT min(id)::text, created_at AS "sortTime", FLOOR(sum(msats) / 1000) as "earnedSats",
|
||||
`SELECT min(id)::text, created_at AS "sortTime", FLOOR(sum(msats) / 1000) as "earnedSats",
|
||||
'Revenue' AS type
|
||||
FROM "SubAct"
|
||||
WHERE "userId" = $1
|
||||
AND type = 'REVENUE'
|
||||
AND created_at < $2
|
||||
GROUP BY "userId", "subName", created_at
|
||||
ORDER BY "sortTime" DESC
|
||||
LIMIT ${LIMIT})`
|
||||
)
|
||||
queries.push(
|
||||
`(SELECT min(id)::text, created_at AS "sortTime", FLOOR(sum(msats) / 1000) as "earnedSats",
|
||||
'ReferralReward' AS type
|
||||
FROM "Earn"
|
||||
WHERE "userId" = $1
|
||||
AND created_at < $2
|
||||
AND type IN ('FOREVER_REFERRAL', 'ONE_DAY_REFERRAL')
|
||||
GROUP BY "userId", created_at
|
||||
ORDER BY "sortTime" DESC
|
||||
LIMIT ${LIMIT})`
|
||||
AND created_at <= $2
|
||||
GROUP BY "userId", "subName", created_at`
|
||||
)
|
||||
}
|
||||
|
||||
if (meFull.noteCowboyHat) {
|
||||
queries.push(
|
||||
`(SELECT id::text, updated_at AS "sortTime", 0 as "earnedSats", 'Streak' AS type
|
||||
`SELECT id::text, updated_at AS "sortTime", 0 as "earnedSats", 'Streak' AS type
|
||||
FROM "Streak"
|
||||
WHERE "userId" = $1
|
||||
AND updated_at < $2
|
||||
ORDER BY "sortTime" DESC
|
||||
LIMIT ${LIMIT})`
|
||||
AND updated_at <= $2`
|
||||
)
|
||||
}
|
||||
|
||||
@ -331,58 +256,31 @@ export default {
|
||||
FROM "Sub"
|
||||
WHERE "Sub"."userId" = $1
|
||||
AND "status" <> 'ACTIVE'
|
||||
AND "statusUpdatedAt" < $2
|
||||
AND "statusUpdatedAt" <= $2
|
||||
ORDER BY "sortTime" DESC
|
||||
LIMIT ${LIMIT})`
|
||||
)
|
||||
|
||||
queries.push(
|
||||
`(SELECT "Reminder".id::text, "Reminder"."remindAt" AS "sortTime", NULL as "earnedSats", 'Reminder' AS type
|
||||
FROM "Reminder"
|
||||
WHERE "Reminder"."userId" = $1
|
||||
AND "Reminder"."remindAt" < $2
|
||||
ORDER BY "sortTime" DESC
|
||||
LIMIT ${LIMIT})`
|
||||
)
|
||||
|
||||
queries.push(
|
||||
`(SELECT "Invoice".id::text,
|
||||
CASE
|
||||
WHEN
|
||||
"Invoice"."paymentAttempt" < ${WALLET_MAX_RETRIES}
|
||||
AND "Invoice"."userCancel" = false
|
||||
AND "Invoice"."cancelledAt" <= now() - interval '${`${WALLET_RETRY_BEFORE_MS} milliseconds`}'
|
||||
THEN "Invoice"."cancelledAt" + interval '${`${WALLET_RETRY_BEFORE_MS} milliseconds`}'
|
||||
ELSE "Invoice"."updated_at"
|
||||
END AS "sortTime", NULL as "earnedSats", 'Invoicification' AS type
|
||||
FROM "Invoice"
|
||||
WHERE "Invoice"."userId" = $1
|
||||
AND "Invoice"."updated_at" < $2
|
||||
AND "Invoice"."actionState" = 'FAILED'
|
||||
AND (
|
||||
-- this is the inverse of the filter for automated retries
|
||||
"Invoice"."paymentAttempt" >= ${WALLET_MAX_RETRIES}
|
||||
OR "Invoice"."userCancel" = true
|
||||
OR "Invoice"."cancelledAt" <= now() - interval '${`${WALLET_RETRY_BEFORE_MS} milliseconds`}'
|
||||
)
|
||||
AND (
|
||||
"Invoice"."actionType" = 'ITEM_CREATE' OR
|
||||
"Invoice"."actionType" = 'ZAP' OR
|
||||
"Invoice"."actionType" = 'DOWN_ZAP' OR
|
||||
"Invoice"."actionType" = 'POLL_VOTE' OR
|
||||
"Invoice"."actionType" = 'BOOST'
|
||||
)
|
||||
ORDER BY "sortTime" DESC
|
||||
LIMIT ${LIMIT})`
|
||||
LIMIT ${LIMIT}+$3)`
|
||||
)
|
||||
|
||||
// we do all this crazy subquery stuff to make 'reward' islands
|
||||
const notifications = await models.$queryRawUnsafe(
|
||||
`SELECT id, "sortTime", "earnedSats", type,
|
||||
"sortTime" AS "minSortTime"
|
||||
`SELECT MAX(id) AS id, MAX("sortTime") AS "sortTime", sum("earnedSats") AS "earnedSats", type,
|
||||
MIN("sortTime") AS "minSortTime"
|
||||
FROM
|
||||
(${queries.join(' UNION ALL ')}) u
|
||||
(SELECT *,
|
||||
CASE
|
||||
WHEN type = 'Earn' THEN
|
||||
ROW_NUMBER() OVER(ORDER BY "sortTime" DESC) -
|
||||
ROW_NUMBER() OVER(PARTITION BY type = 'Earn' ORDER BY "sortTime" DESC)
|
||||
ELSE
|
||||
ROW_NUMBER() OVER(ORDER BY "sortTime" DESC)
|
||||
END as island
|
||||
FROM
|
||||
(${queries.join(' UNION ALL ')}) u
|
||||
) sub
|
||||
GROUP BY type, island
|
||||
ORDER BY "sortTime" DESC
|
||||
LIMIT ${LIMIT}`, me.id, decodedCursor.time)
|
||||
OFFSET $3
|
||||
LIMIT ${LIMIT}`, me.id, decodedCursor.time, decodedCursor.offset)
|
||||
|
||||
if (decodedCursor.offset === 0) {
|
||||
await models.user.update({ where: { id: me.id }, data: { checkedNotesAt: new Date() } })
|
||||
@ -390,7 +288,7 @@ export default {
|
||||
|
||||
return {
|
||||
lastChecked: meFull.checkedNotesAt,
|
||||
cursor: notifications.length === LIMIT ? nextNoteCursorEncoded(decodedCursor, notifications) : null,
|
||||
cursor: notifications.length === LIMIT ? nextCursorEncoded(decodedCursor) : null,
|
||||
notifications
|
||||
}
|
||||
}
|
||||
@ -398,10 +296,10 @@ export default {
|
||||
Mutation: {
|
||||
savePushSubscription: async (parent, { endpoint, p256dh, auth, oldEndpoint }, { me, models }) => {
|
||||
if (!me) {
|
||||
throw new GqlAuthenticationError()
|
||||
throw new GraphQLError('you must be logged in', { extensions: { code: 'UNAUTHENTICATED' } })
|
||||
}
|
||||
|
||||
await validateSchema(pushSubscriptionSchema, { endpoint, p256dh, auth })
|
||||
await ssValidate(pushSubscriptionSchema, { endpoint, p256dh, auth })
|
||||
|
||||
let dbPushSubscription
|
||||
if (oldEndpoint) {
|
||||
@ -422,12 +320,12 @@ export default {
|
||||
},
|
||||
deletePushSubscription: async (parent, { endpoint }, { me, models }) => {
|
||||
if (!me) {
|
||||
throw new GqlAuthenticationError()
|
||||
throw new GraphQLError('you must be logged in', { extensions: { code: 'UNAUTHENTICATED' } })
|
||||
}
|
||||
|
||||
const subscription = await models.pushSubscription.findFirst({ where: { endpoint, userId: Number(me.id) } })
|
||||
if (!subscription) {
|
||||
throw new GqlInputError('endpoint not found')
|
||||
throw new GraphQLError('endpoint not found', { extensions: { code: 'BAD_INPUT' } })
|
||||
}
|
||||
const deletedSubscription = await models.pushSubscription.delete({ where: { id: subscription.id } })
|
||||
console.log(`[webPush] deleted subscription ${deletedSubscription.id} of user ${deletedSubscription.userId} due to client request`)
|
||||
@ -450,21 +348,6 @@ export default {
|
||||
FollowActivity: {
|
||||
item: async (n, args, { models, me }) => getItem(n, { id: n.id }, { models, me })
|
||||
},
|
||||
TerritoryPost: {
|
||||
item: async (n, args, { models, me }) => getItem(n, { id: n.id }, { models, me })
|
||||
},
|
||||
Reminder: {
|
||||
item: async (n, args, { models, me }) => {
|
||||
const { itemId } = await models.reminder.findUnique({ where: { id: Number(n.id) } })
|
||||
return await getItem(n, { id: itemId }, { models, me })
|
||||
}
|
||||
},
|
||||
TerritoryTransfer: {
|
||||
sub: async (n, args, { models, me }) => {
|
||||
const transfer = await models.territoryTransfer.findUnique({ where: { id: Number(n.id) }, include: { sub: true } })
|
||||
return transfer.sub
|
||||
}
|
||||
},
|
||||
JobChanged: {
|
||||
item: async (n, args, { models, me }) => getItem(n, { id: n.id }, { models, me })
|
||||
},
|
||||
@ -482,24 +365,6 @@ export default {
|
||||
return subAct.subName
|
||||
}
|
||||
},
|
||||
ReferralSource: {
|
||||
__resolveType: async (n, args, { models }) => n.type
|
||||
},
|
||||
Referral: {
|
||||
source: async (n, args, { models, me }) => {
|
||||
// retrieve the referee landing record
|
||||
const referral = await models.oneDayReferral.findFirst({ where: { refereeId: Number(n.id), landing: true } })
|
||||
if (!referral) return null // if no landing record, it will return a generic referral
|
||||
|
||||
switch (referral.type) {
|
||||
case 'POST':
|
||||
case 'COMMENT': return { ...await getItem(n, { id: referral.typeId }, { models, me }), type: 'Item' }
|
||||
case 'TERRITORY': return { ...await getSub(n, { name: referral.typeId }, { models, me }), type: 'Sub' }
|
||||
case 'PROFILE': return { ...await models.user.findUnique({ where: { id: Number(referral.typeId) }, select: { name: true } }), type: 'User' }
|
||||
default: return null
|
||||
}
|
||||
}
|
||||
},
|
||||
Streak: {
|
||||
days: async (n, args, { models }) => {
|
||||
const res = await models.$queryRaw`
|
||||
@ -509,14 +374,6 @@ export default {
|
||||
`
|
||||
|
||||
return res.length ? res[0].days : null
|
||||
},
|
||||
type: async (n, args, { models }) => {
|
||||
const res = await models.$queryRaw`
|
||||
SELECT "type"
|
||||
FROM "Streak"
|
||||
WHERE id = ${Number(n.id)}
|
||||
`
|
||||
return res.length ? res[0].type : null
|
||||
}
|
||||
},
|
||||
Earn: {
|
||||
@ -541,38 +398,13 @@ export default {
|
||||
return null
|
||||
}
|
||||
},
|
||||
ReferralReward: {
|
||||
sources: async (n, args, { me, models }) => {
|
||||
const [sources] = await models.$queryRawUnsafe(`
|
||||
SELECT
|
||||
COALESCE(FLOOR(sum(msats) FILTER(WHERE type = 'FOREVER_REFERRAL') / 1000), 0) AS forever,
|
||||
COALESCE(FLOOR(sum(msats) FILTER(WHERE type = 'ONE_DAY_REFERRAL') / 1000), 0) AS "oneDay"
|
||||
FROM "Earn"
|
||||
WHERE "userId" = $1 AND created_at = $2
|
||||
`, Number(me.id), new Date(n.sortTime))
|
||||
if (sources.forever + sources.oneDay > 0) {
|
||||
return sources
|
||||
}
|
||||
|
||||
return null
|
||||
}
|
||||
},
|
||||
Mention: {
|
||||
mention: async (n, args, { models }) => true,
|
||||
item: async (n, args, { models, me }) => getItem(n, { id: n.id }, { models, me })
|
||||
},
|
||||
ItemMention: {
|
||||
item: async (n, args, { models, me }) => getItem(n, { id: n.id }, { models, me })
|
||||
},
|
||||
InvoicePaid: {
|
||||
invoice: async (n, args, { me, models }) => getInvoice(n, { id: n.id }, { me, models })
|
||||
},
|
||||
Invoicification: {
|
||||
invoice: async (n, args, { me, models }) => getInvoice(n, { id: n.id }, { me, models })
|
||||
},
|
||||
WithdrawlPaid: {
|
||||
withdrawl: async (n, args, { me, models }) => getWithdrawl(n, { id: n.id }, { me, models })
|
||||
},
|
||||
Invitification: {
|
||||
invite: async (n, args, { models }) => {
|
||||
return await models.invite.findUnique({
|
||||
|
@ -1,11 +1,13 @@
|
||||
import { GqlAuthorizationError } from '@/lib/error'
|
||||
import { GraphQLError } from 'graphql'
|
||||
|
||||
// this function makes america more secure apparently
|
||||
export default async function assertGofacYourself ({ models, headers, ip }) {
|
||||
const country = await gOFACYourself({ models, headers, ip })
|
||||
if (!country) return
|
||||
|
||||
throw new GqlAuthorizationError(`Your IP address is in ${country}. We cannot provide financial services to residents of ${country}.`)
|
||||
throw new GraphQLError(
|
||||
`Your IP address is in ${country}. We cannot provide financial services to residents of ${country}.`,
|
||||
{ extensions: { code: 'FORBIDDEN' } })
|
||||
}
|
||||
|
||||
export async function gOFACYourself ({ models, headers = {}, ip }) {
|
||||
|
@ -1,89 +0,0 @@
|
||||
import { retryPaidAction } from '../paidAction'
|
||||
import { USER_ID, WALLET_MAX_RETRIES, WALLET_RETRY_TIMEOUT_MS } from '@/lib/constants'
|
||||
|
||||
function paidActionType (actionType) {
|
||||
switch (actionType) {
|
||||
case 'ITEM_CREATE':
|
||||
case 'ITEM_UPDATE':
|
||||
return 'ItemPaidAction'
|
||||
case 'ZAP':
|
||||
case 'DOWN_ZAP':
|
||||
case 'BOOST':
|
||||
return 'ItemActPaidAction'
|
||||
case 'TERRITORY_CREATE':
|
||||
case 'TERRITORY_UPDATE':
|
||||
case 'TERRITORY_BILLING':
|
||||
case 'TERRITORY_UNARCHIVE':
|
||||
return 'SubPaidAction'
|
||||
case 'DONATE':
|
||||
return 'DonatePaidAction'
|
||||
case 'POLL_VOTE':
|
||||
return 'PollVotePaidAction'
|
||||
case 'RECEIVE':
|
||||
return 'ReceivePaidAction'
|
||||
case 'BUY_CREDITS':
|
||||
return 'BuyCreditsPaidAction'
|
||||
default:
|
||||
throw new Error('Unknown action type')
|
||||
}
|
||||
}
|
||||
|
||||
export default {
|
||||
Query: {
|
||||
paidAction: async (parent, { invoiceId }, { models, me }) => {
|
||||
const invoice = await models.invoice.findUnique({
|
||||
where: {
|
||||
id: invoiceId,
|
||||
userId: me?.id ?? USER_ID.anon
|
||||
}
|
||||
})
|
||||
if (!invoice) {
|
||||
throw new Error('Invoice not found')
|
||||
}
|
||||
|
||||
return {
|
||||
type: paidActionType(invoice.actionType),
|
||||
invoice,
|
||||
result: invoice.actionResult,
|
||||
paymentMethod: invoice.actionOptimistic ? 'OPTIMISTIC' : 'PESSIMISTIC'
|
||||
}
|
||||
}
|
||||
},
|
||||
Mutation: {
|
||||
retryPaidAction: async (parent, { invoiceId, newAttempt }, { models, me, lnd }) => {
|
||||
if (!me) {
|
||||
throw new Error('You must be logged in')
|
||||
}
|
||||
|
||||
// make sure only one client at a time can retry by acquiring a lock that expires
|
||||
const [invoice] = await models.$queryRaw`
|
||||
UPDATE "Invoice"
|
||||
SET "retryPendingSince" = now()
|
||||
WHERE
|
||||
id = ${invoiceId} AND
|
||||
"userId" = ${me.id} AND
|
||||
"actionState" = 'FAILED' AND
|
||||
("retryPendingSince" IS NULL OR "retryPendingSince" < now() - ${`${WALLET_RETRY_TIMEOUT_MS} milliseconds`}::interval)
|
||||
RETURNING *`
|
||||
if (!invoice) {
|
||||
throw new Error('Invoice not found or retry pending')
|
||||
}
|
||||
|
||||
// do we want to retry a payment from the beginning with all sender and receiver wallets?
|
||||
const paymentAttempt = newAttempt ? invoice.paymentAttempt + 1 : invoice.paymentAttempt
|
||||
if (paymentAttempt > WALLET_MAX_RETRIES) {
|
||||
throw new Error('Payment has been retried too many times')
|
||||
}
|
||||
|
||||
const result = await retryPaidAction(invoice.actionType, { invoice }, { paymentAttempt, models, me, lnd })
|
||||
|
||||
return {
|
||||
...result,
|
||||
type: paidActionType(invoice.actionType)
|
||||
}
|
||||
}
|
||||
},
|
||||
PaidAction: {
|
||||
__resolveType: obj => obj.type
|
||||
}
|
||||
}
|
@ -1,27 +1,36 @@
|
||||
import { SUPPORTED_CURRENCIES } from '@/lib/currency'
|
||||
import { cachedFetcher } from '@/lib/fetch'
|
||||
const cache = new Map()
|
||||
const expiresIn = 30000 // in milliseconds
|
||||
|
||||
const getPrice = cachedFetcher(async function fetchPrice (fiat = 'USD') {
|
||||
async function fetchPrice (fiat) {
|
||||
const url = `https://api.coinbase.com/v2/prices/BTC-${fiat}/spot`
|
||||
try {
|
||||
const res = await fetch(url)
|
||||
const body = await res.json()
|
||||
return parseFloat(body.data.amount)
|
||||
} catch (err) {
|
||||
console.error(err)
|
||||
return -1
|
||||
const price = await fetch(url)
|
||||
.then((res) => res.json())
|
||||
.then((body) => parseFloat(body.data.amount))
|
||||
.catch((err) => {
|
||||
console.error(err)
|
||||
return -1
|
||||
})
|
||||
cache.set(fiat, { price, createdAt: Date.now() })
|
||||
return price
|
||||
}
|
||||
|
||||
async function getPrice (fiat) {
|
||||
fiat ??= 'USD'
|
||||
if (cache.has(fiat)) {
|
||||
const { price, createdAt } = cache.get(fiat)
|
||||
const expired = createdAt + expiresIn < Date.now()
|
||||
if (expired) fetchPrice(fiat).catch(console.error) // update cache
|
||||
return price // serve stale price (this on the SSR critical path)
|
||||
} else {
|
||||
fetchPrice(fiat).catch(console.error)
|
||||
}
|
||||
}, {
|
||||
maxSize: SUPPORTED_CURRENCIES.length,
|
||||
cacheExpiry: 60 * 1000, // 1 minute
|
||||
forceRefreshThreshold: 0, // never force refresh
|
||||
keyGenerator: (fiat = 'USD') => fiat
|
||||
})
|
||||
return null
|
||||
}
|
||||
|
||||
export default {
|
||||
Query: {
|
||||
price: async (parent, { fiatCurrency }, ctx) => {
|
||||
return await getPrice(fiatCurrency) || -1
|
||||
return await getPrice(fiatCurrency)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1,28 +1,56 @@
|
||||
import { timeUnitForRange, whenRange } from '@/lib/time'
|
||||
import { viewGroup } from './growth'
|
||||
import { GqlAuthenticationError } from '@/lib/error'
|
||||
import { GraphQLError } from 'graphql'
|
||||
import { withClause, intervalClause } from './growth'
|
||||
import { timeUnitForRange, whenRange } from '../../lib/time'
|
||||
|
||||
export default {
|
||||
Query: {
|
||||
referrals: async (parent, { when, from, to }, { models, me }) => {
|
||||
if (!me) {
|
||||
throw new GqlAuthenticationError()
|
||||
throw new GraphQLError('you must be logged in', { extensions: { code: 'UNAUTHENTICATED' } })
|
||||
}
|
||||
|
||||
const range = whenRange(when, from, to)
|
||||
|
||||
return await models.$queryRawUnsafe(`
|
||||
SELECT date_trunc('${timeUnitForRange(range)}', t) at time zone 'America/Chicago' as time,
|
||||
json_build_array(
|
||||
json_build_object('name', 'referrals', 'value', COALESCE(SUM(referrals), 0)),
|
||||
json_build_object('name', 'one day referrals', 'value', COALESCE(SUM(one_day_referrals), 0)),
|
||||
json_build_object('name', 'referral sats', 'value', FLOOR(COALESCE(SUM(msats_referrals), 0) / 1000.0)),
|
||||
json_build_object('name', 'one day referral sats', 'value', FLOOR(COALESCE(SUM(msats_one_day_referrals), 0) / 1000.0))
|
||||
const [{ totalSats }] = await models.$queryRawUnsafe(`
|
||||
SELECT COALESCE(FLOOR(sum(msats) / 1000), 0) as "totalSats"
|
||||
FROM "ReferralAct"
|
||||
WHERE ${intervalClause(range, 'ReferralAct')}
|
||||
AND "ReferralAct"."referrerId" = $3
|
||||
`, ...range, Number(me.id))
|
||||
|
||||
const [{ totalReferrals }] = await models.$queryRawUnsafe(`
|
||||
SELECT count(*)::INTEGER as "totalReferrals"
|
||||
FROM users
|
||||
WHERE ${intervalClause(range, 'users')}
|
||||
AND "referrerId" = $3
|
||||
`, ...range, Number(me.id))
|
||||
|
||||
const stats = await models.$queryRawUnsafe(
|
||||
`${withClause(range)}
|
||||
SELECT time, json_build_array(
|
||||
json_build_object('name', 'referrals', 'value', count(*) FILTER (WHERE act = 'REFERREE')),
|
||||
json_build_object('name', 'sats', 'value', FLOOR(COALESCE(sum(msats) FILTER (WHERE act IN ('BOOST', 'STREAM', 'FEE')), 0)))
|
||||
) AS data
|
||||
FROM ${viewGroup(range, 'user_stats')}
|
||||
WHERE id = ${me.id}
|
||||
GROUP BY time
|
||||
ORDER BY time ASC`, ...range)
|
||||
FROM times
|
||||
LEFT JOIN
|
||||
((SELECT "ReferralAct".created_at, "ReferralAct".msats / 1000.0 as msats, "ItemAct".act::text as act
|
||||
FROM "ReferralAct"
|
||||
JOIN "ItemAct" ON "ItemAct".id = "ReferralAct"."itemActId"
|
||||
WHERE ${intervalClause(range, 'ReferralAct')}
|
||||
AND "ReferralAct"."referrerId" = $3)
|
||||
UNION ALL
|
||||
(SELECT created_at, 0.0 as sats, 'REFERREE' as act
|
||||
FROM users
|
||||
WHERE ${intervalClause(range, 'users')}
|
||||
AND "referrerId" = $3)) u ON time = date_trunc('${timeUnitForRange(range)}', u.created_at)
|
||||
GROUP BY time
|
||||
ORDER BY time ASC`, ...range, Number(me.id))
|
||||
|
||||
return {
|
||||
totalSats,
|
||||
totalReferrals,
|
||||
stats
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1,86 +1,39 @@
|
||||
import { amountSchema, validateSchema } from '@/lib/validate'
|
||||
import { getAd, getItem } from './item'
|
||||
import { topUsers } from './user'
|
||||
import performPaidAction from '../paidAction'
|
||||
import { GqlInputError } from '@/lib/error'
|
||||
import { GraphQLError } from 'graphql'
|
||||
import { amountSchema, ssValidate } from '../../lib/validate'
|
||||
import { serializeInvoicable } from './serial'
|
||||
import { ANON_USER_ID } from '../../lib/constants'
|
||||
import { getItem } from './item'
|
||||
|
||||
let rewardCache
|
||||
const rewardCache = new Map()
|
||||
|
||||
async function updateCachedRewards (models) {
|
||||
const rewards = await getActiveRewards(models)
|
||||
rewardCache = { rewards, createdAt: Date.now() }
|
||||
async function updateCachedRewards (when, models) {
|
||||
const rewards = await getRewards(when, models)
|
||||
rewardCache.set(when, { rewards, createdAt: Date.now() })
|
||||
return rewards
|
||||
}
|
||||
|
||||
async function getCachedActiveRewards (staleIn, models) {
|
||||
if (rewardCache) {
|
||||
const { rewards, createdAt } = rewardCache
|
||||
async function getCachedRewards (staleIn, when, models) {
|
||||
if (rewardCache.has(when)) {
|
||||
const { rewards, createdAt } = rewardCache.get(when)
|
||||
const expired = createdAt + staleIn < Date.now()
|
||||
if (expired) updateCachedRewards(models).catch(console.error)
|
||||
if (expired) updateCachedRewards(when, models).catch(console.error)
|
||||
return rewards // serve stale rewards
|
||||
}
|
||||
return await updateCachedRewards(models)
|
||||
}
|
||||
|
||||
async function getActiveRewards (models) {
|
||||
return await models.$queryRaw`
|
||||
SELECT
|
||||
(sum(total) / 1000)::INT as total,
|
||||
date_trunc('day', (now() AT TIME ZONE 'America/Chicago') + interval '1 day') AT TIME ZONE 'America/Chicago' as time,
|
||||
json_build_array(
|
||||
json_build_object('name', 'donations', 'value', (sum(donations) / 1000)::INT),
|
||||
json_build_object('name', 'fees', 'value', (sum(fees) / 1000)::INT),
|
||||
json_build_object('name', 'boost', 'value', (sum(boost) / 1000)::INT),
|
||||
json_build_object('name', 'jobs', 'value', (sum(jobs) / 1000)::INT),
|
||||
json_build_object('name', 'anon''s stack', 'value', (sum(anons_stack) / 1000)::INT)
|
||||
) AS sources
|
||||
FROM (
|
||||
(SELECT * FROM rewards_today)
|
||||
UNION ALL
|
||||
(SELECT * FROM
|
||||
rewards(
|
||||
date_trunc('hour', timezone('America/Chicago', now())),
|
||||
date_trunc('hour', timezone('America/Chicago', now())), '1 hour'::INTERVAL, 'hour'))
|
||||
) u`
|
||||
}
|
||||
|
||||
async function getMonthlyRewards (when, models) {
|
||||
return await models.$queryRaw`
|
||||
SELECT
|
||||
(sum(total) / 1000)::INT as total,
|
||||
date_trunc('month', ${when?.[0]}::text::timestamp) AT TIME ZONE 'America/Chicago' as time,
|
||||
json_build_array(
|
||||
json_build_object('name', 'donations', 'value', (sum(donations) / 1000)::INT),
|
||||
json_build_object('name', 'fees', 'value', (sum(fees) / 1000)::INT),
|
||||
json_build_object('name', 'boost', 'value', (sum(boost) / 1000)::INT),
|
||||
json_build_object('name', 'jobs', 'value', (sum(jobs) / 1000)::INT),
|
||||
json_build_object('name', 'anon''s stack', 'value', (sum(anons_stack) / 1000)::INT)
|
||||
) AS sources
|
||||
FROM rewards_days
|
||||
WHERE date_trunc('month', rewards_days.t) = date_trunc('month', ${when?.[0]}::text::timestamp - interval '1 month')`
|
||||
return await updateCachedRewards(when, models)
|
||||
}
|
||||
|
||||
async function getRewards (when, models) {
|
||||
if (when) {
|
||||
if (when.length > 1) {
|
||||
throw new GqlInputError('too many dates')
|
||||
if (when.length > 2) {
|
||||
throw new GraphQLError('too many dates', { extensions: { code: 'BAD_USER_INPUT' } })
|
||||
}
|
||||
when.forEach(w => {
|
||||
if (isNaN(new Date(w))) {
|
||||
throw new GqlInputError('invalid date')
|
||||
throw new GraphQLError('invalid date', { extensions: { code: 'BAD_USER_INPUT' } })
|
||||
}
|
||||
})
|
||||
if (new Date(when[0]) > new Date(when[when.length - 1])) {
|
||||
throw new GqlInputError('bad date range')
|
||||
}
|
||||
|
||||
if (new Date(when[0]).getTime() > new Date('2024-03-01').getTime() && new Date(when[0]).getTime() < new Date('2024-05-02').getTime()) {
|
||||
// after 3/1/2024 and until 5/1/2024, we reward monthly on the 1st
|
||||
if (new Date(when[0]).getUTCDate() !== 1) {
|
||||
throw new GqlInputError('bad reward date')
|
||||
}
|
||||
|
||||
return await getMonthlyRewards(when, models)
|
||||
throw new GraphQLError('bad date range', { extensions: { code: 'BAD_USER_INPUT' } })
|
||||
}
|
||||
}
|
||||
|
||||
@ -92,18 +45,37 @@ async function getRewards (when, models) {
|
||||
COALESCE(${when?.[when.length - 1]}::text::timestamp - interval '1 day', now() AT TIME ZONE 'America/Chicago'),
|
||||
interval '1 day') AS t
|
||||
)
|
||||
SELECT (total / 1000)::INT as total,
|
||||
SELECT coalesce(FLOOR(sum(sats)), 0) as total,
|
||||
days_cte.day + interval '1 day' as time,
|
||||
json_build_array(
|
||||
json_build_object('name', 'donations', 'value', donations / 1000),
|
||||
json_build_object('name', 'fees', 'value', fees / 1000),
|
||||
json_build_object('name', 'boost', 'value', boost / 1000),
|
||||
json_build_object('name', 'jobs', 'value', jobs / 1000),
|
||||
json_build_object('name', 'anon''s stack', 'value', anons_stack / 1000)
|
||||
json_build_object('name', 'donations', 'value', coalesce(FLOOR(sum(sats) FILTER(WHERE type = 'DONATION')), 0)),
|
||||
json_build_object('name', 'fees', 'value', coalesce(FLOOR(sum(sats) FILTER(WHERE type NOT IN ('BOOST', 'STREAM', 'DONATION', 'ANON'))), 0)),
|
||||
json_build_object('name', 'boost', 'value', coalesce(FLOOR(sum(sats) FILTER(WHERE type = 'BOOST')), 0)),
|
||||
json_build_object('name', 'jobs', 'value', coalesce(FLOOR(sum(sats) FILTER(WHERE type = 'STREAM')), 0)),
|
||||
json_build_object('name', 'anon''s stack', 'value', coalesce(FLOOR(sum(sats) FILTER(WHERE type = 'ANON')), 0))
|
||||
) AS sources
|
||||
FROM days_cte
|
||||
JOIN rewards_days ON rewards_days.t = days_cte.day
|
||||
GROUP BY days_cte.day, total, donations, fees, boost, jobs, anons_stack
|
||||
CROSS JOIN LATERAL (
|
||||
(SELECT ("ItemAct".msats - COALESCE("ReferralAct".msats, 0)) / 1000.0 as sats, act::text as type
|
||||
FROM "ItemAct"
|
||||
LEFT JOIN "ReferralAct" ON "ReferralAct"."itemActId" = "ItemAct".id
|
||||
WHERE date_trunc('day', "ItemAct".created_at AT TIME ZONE 'UTC' AT TIME ZONE 'America/Chicago') = days_cte.day AND "ItemAct".act <> 'TIP')
|
||||
UNION ALL
|
||||
(SELECT sats::FLOAT, 'DONATION' as type
|
||||
FROM "Donation"
|
||||
WHERE date_trunc('day', created_at AT TIME ZONE 'UTC' AT TIME ZONE 'America/Chicago') = days_cte.day)
|
||||
UNION ALL
|
||||
-- any earnings from anon's stack that are not forwarded to other users
|
||||
(SELECT "ItemAct".msats / 1000.0 as sats, 'ANON' as type
|
||||
FROM "Item"
|
||||
JOIN "ItemAct" ON "ItemAct"."itemId" = "Item".id
|
||||
LEFT JOIN "ItemForward" ON "ItemForward"."itemId" = "Item".id
|
||||
WHERE "Item"."userId" = ${ANON_USER_ID} AND "ItemAct".act = 'TIP'
|
||||
AND date_trunc('day', "ItemAct".created_at AT TIME ZONE 'UTC' AT TIME ZONE 'America/Chicago') = days_cte.day
|
||||
GROUP BY "ItemAct".id, "ItemAct".msats
|
||||
HAVING COUNT("ItemForward".id) = 0)
|
||||
) subquery
|
||||
GROUP BY days_cte.day
|
||||
ORDER BY days_cte.day ASC`
|
||||
|
||||
return results.length ? results : [{ total: 0, time: '0', sources: [] }]
|
||||
@ -112,18 +84,18 @@ async function getRewards (when, models) {
|
||||
export default {
|
||||
Query: {
|
||||
rewards: async (parent, { when }, { models }) =>
|
||||
when ? await getRewards(when, models) : await getCachedActiveRewards(5000, models),
|
||||
when ? await getRewards(when, models) : await getCachedRewards(5000, when, models),
|
||||
meRewards: async (parent, { when }, { me, models }) => {
|
||||
if (!me) {
|
||||
return null
|
||||
}
|
||||
|
||||
if (!when || when.length > 2) {
|
||||
throw new GqlInputError('bad date range')
|
||||
throw new GraphQLError('invalid date range', { extensions: { code: 'BAD_USER_INPUT' } })
|
||||
}
|
||||
for (const w of when) {
|
||||
if (isNaN(new Date(w))) {
|
||||
throw new GqlInputError('invalid date')
|
||||
throw new GraphQLError('invalid date', { extensions: { code: 'BAD_USER_INPUT' } })
|
||||
}
|
||||
}
|
||||
|
||||
@ -141,7 +113,6 @@ export default {
|
||||
(SELECT FLOOR("Earn".msats / 1000.0) as sats, type, rank, "typeId"
|
||||
FROM "Earn"
|
||||
WHERE "Earn"."userId" = ${me.id}
|
||||
AND (type IS NULL OR type NOT IN ('FOREVER_REFERRAL', 'ONE_DAY_REFERRAL'))
|
||||
AND date_trunc('day', "Earn".created_at AT TIME ZONE 'UTC' AT TIME ZONE 'America/Chicago') = days_cte.day
|
||||
ORDER BY "Earn".msats DESC)
|
||||
) "Earn"
|
||||
@ -151,29 +122,16 @@ export default {
|
||||
return results
|
||||
}
|
||||
},
|
||||
Rewards: {
|
||||
leaderboard: async (parent, args, { models, ...context }) => {
|
||||
// get to and from using postgres because it's easier to do there
|
||||
const [{ to, from }] = await models.$queryRaw`
|
||||
SELECT date_trunc('day', (now() AT TIME ZONE 'America/Chicago')) AT TIME ZONE 'America/Chicago' as from,
|
||||
(date_trunc('day', (now() AT TIME ZONE 'America/Chicago')) AT TIME ZONE 'America/Chicago') + interval '1 day - 1 second' as to`
|
||||
return await topUsers(parent, { when: 'custom', to: new Date(to).getTime().toString(), from: new Date(from).getTime().toString(), limit: 500 }, { models, ...context })
|
||||
},
|
||||
total: async (parent, args, { models }) => {
|
||||
if (!parent.total) {
|
||||
return 0
|
||||
}
|
||||
return parent.total
|
||||
},
|
||||
ad: async (parent, args, { me, models }) => {
|
||||
return await getAd(parent, { }, { me, models })
|
||||
}
|
||||
},
|
||||
Mutation: {
|
||||
donateToRewards: async (parent, { sats }, { me, models, lnd }) => {
|
||||
await validateSchema(amountSchema, { amount: sats })
|
||||
donateToRewards: async (parent, { sats, hash, hmac }, { me, models, lnd }) => {
|
||||
await ssValidate(amountSchema, { amount: sats })
|
||||
|
||||
return await performPaidAction('DONATE', { sats }, { me, models, lnd })
|
||||
await serializeInvoicable(
|
||||
models.$queryRaw`SELECT donate(${sats}::INTEGER, ${me?.id || ANON_USER_ID}::INTEGER)`,
|
||||
{ models, lnd, hash, hmac, me, enforceFee: sats }
|
||||
)
|
||||
|
||||
return sats
|
||||
}
|
||||
},
|
||||
Reward: {
|
||||
|
@ -1,29 +1,24 @@
|
||||
import { decodeCursor, LIMIT, nextCursorEncoded } from '@/lib/cursor'
|
||||
import { whenToFrom } from '@/lib/time'
|
||||
import { getItem, itemQueryWithMeta, SELECT } from './item'
|
||||
import { decodeCursor, LIMIT, nextCursorEncoded } from '../../lib/cursor'
|
||||
import { whenToFrom } from '../../lib/time'
|
||||
import { getItem } from './item'
|
||||
|
||||
function queryParts (q) {
|
||||
const regex = /"([^"]*)"/gm
|
||||
|
||||
const queryArr = q.replace(regex, '').trim().split(/\s+/)
|
||||
const url = queryArr.find(word => word.startsWith('url:'))
|
||||
const nym = queryArr.find(word => word.startsWith('@'))
|
||||
const territory = queryArr.find(word => word.startsWith('~'))
|
||||
const exclude = [url, nym, territory]
|
||||
const query = queryArr.filter(word => !exclude.includes(word)).join(' ')
|
||||
|
||||
return {
|
||||
quotes: [...q.matchAll(regex)].map(m => m[1]),
|
||||
nym,
|
||||
url,
|
||||
territory,
|
||||
query
|
||||
}
|
||||
}
|
||||
const STOP_WORDS = ['a', 'an', 'and', 'are', 'as', 'at', 'be', 'but',
|
||||
'by', 'for', 'if', 'in', 'into', 'is', 'it', 'no', 'not',
|
||||
'of', 'on', 'or', 'such', 'that', 'the', 'their', 'then',
|
||||
'there', 'these', 'they', 'this', 'to', 'was', 'will',
|
||||
'with', 'bitcoin', 'page', 'adds', 'how', 'why', 'what',
|
||||
'works', 'now', 'available', 'breaking', 'app', 'powered',
|
||||
'just', 'dev', 'using', 'crypto', 'has', 'my', 'i', 'apps',
|
||||
'really', 'new', 'era', 'application', 'best', 'year',
|
||||
'latest', 'still', 'few', 'crypto', 'keep', 'public', 'current',
|
||||
'levels', 'from', 'cryptocurrencies', 'confirmed', 'news', 'network',
|
||||
'about', 'sources', 'vote', 'considerations', 'hope',
|
||||
'keep', 'keeps', 'including', 'we', 'brings', "don't", 'do',
|
||||
'interesting', 'us', 'welcome', 'thoughts', 'results']
|
||||
|
||||
export default {
|
||||
Query: {
|
||||
related: async (parent, { title, id, cursor, limit = LIMIT, minMatch }, { me, models, search }) => {
|
||||
related: async (parent, { title, id, cursor, limit, minMatch }, { me, models, search }) => {
|
||||
const decodedCursor = decodeCursor(cursor)
|
||||
|
||||
if (!id && (!title || title.trim().split(/\s+/).length < 1)) {
|
||||
@ -36,7 +31,7 @@ export default {
|
||||
const like = []
|
||||
if (id) {
|
||||
like.push({
|
||||
_index: process.env.OPENSEARCH_INDEX,
|
||||
_index: 'item',
|
||||
_id: id
|
||||
})
|
||||
}
|
||||
@ -45,125 +40,95 @@ export default {
|
||||
like.push(title)
|
||||
}
|
||||
|
||||
const mustNot = [{ exists: { field: 'parentId' } }]
|
||||
const mustNot = []
|
||||
if (id) {
|
||||
mustNot.push({ term: { id } })
|
||||
}
|
||||
|
||||
let should = [
|
||||
{
|
||||
more_like_this: {
|
||||
fields: ['title', 'text'],
|
||||
like,
|
||||
min_term_freq: 1,
|
||||
min_doc_freq: 1,
|
||||
max_doc_freq: 5,
|
||||
min_word_length: 2,
|
||||
max_query_terms: 25,
|
||||
minimum_should_match: minMatch || '10%',
|
||||
boost_terms: 100
|
||||
}
|
||||
}
|
||||
]
|
||||
|
||||
if (process.env.OPENSEARCH_MODEL_ID) {
|
||||
let qtitle = title
|
||||
let qtext = title
|
||||
if (id) {
|
||||
const item = await getItem(parent, { id }, { me, models })
|
||||
qtitle = item.title || item.text
|
||||
qtext = item.text || item.title
|
||||
}
|
||||
|
||||
should = [
|
||||
{
|
||||
neural: {
|
||||
title_embedding: {
|
||||
query_text: qtext,
|
||||
model_id: process.env.OPENSEARCH_MODEL_ID,
|
||||
k: decodedCursor.offset + LIMIT
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
neural: {
|
||||
text_embedding: {
|
||||
query_text: qtitle,
|
||||
model_id: process.env.OPENSEARCH_MODEL_ID,
|
||||
k: decodedCursor.offset + LIMIT
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
const results = await search.search({
|
||||
index: process.env.OPENSEARCH_INDEX,
|
||||
size: limit,
|
||||
let items = await search.search({
|
||||
index: 'item',
|
||||
size: limit || LIMIT,
|
||||
from: decodedCursor.offset,
|
||||
_source: {
|
||||
excludes: [
|
||||
'text',
|
||||
'text_embedding',
|
||||
'title_embedding'
|
||||
]
|
||||
},
|
||||
body: {
|
||||
query: {
|
||||
function_score: {
|
||||
query: {
|
||||
bool: {
|
||||
should,
|
||||
filter: [
|
||||
should: [
|
||||
{
|
||||
bool: {
|
||||
should: [
|
||||
{ match: { status: 'ACTIVE' } },
|
||||
{ match: { status: 'NOSATS' } }
|
||||
],
|
||||
must_not: mustNot
|
||||
more_like_this: {
|
||||
fields: ['title'],
|
||||
like,
|
||||
min_term_freq: 1,
|
||||
min_doc_freq: 1,
|
||||
min_word_length: 2,
|
||||
max_query_terms: 12,
|
||||
minimum_should_match: minMatch || '80%',
|
||||
stop_words: STOP_WORDS,
|
||||
boost: 10000
|
||||
}
|
||||
},
|
||||
{
|
||||
range: { wvotes: { gte: minMatch ? 0 : 0.2 } }
|
||||
more_like_this: {
|
||||
fields: ['title'],
|
||||
like,
|
||||
min_term_freq: 1,
|
||||
min_doc_freq: 1,
|
||||
min_word_length: 2,
|
||||
max_query_terms: 12,
|
||||
minimum_should_match: minMatch || '60%',
|
||||
stop_words: STOP_WORDS,
|
||||
boost: 1000
|
||||
}
|
||||
},
|
||||
{
|
||||
more_like_this: {
|
||||
fields: ['title'],
|
||||
like,
|
||||
min_term_freq: 1,
|
||||
min_doc_freq: 1,
|
||||
min_word_length: 2,
|
||||
max_query_terms: 12,
|
||||
minimum_should_match: minMatch || '30%',
|
||||
stop_words: STOP_WORDS,
|
||||
boost: 100
|
||||
}
|
||||
},
|
||||
{
|
||||
more_like_this: {
|
||||
fields: ['text'],
|
||||
like,
|
||||
min_term_freq: 1,
|
||||
min_doc_freq: 1,
|
||||
min_word_length: 2,
|
||||
max_query_terms: 25,
|
||||
minimum_should_match: minMatch || '30%',
|
||||
stop_words: STOP_WORDS,
|
||||
boost: 10
|
||||
}
|
||||
}
|
||||
]
|
||||
],
|
||||
must_not: [{ exists: { field: 'parentId' } }, ...mustNot],
|
||||
filter: {
|
||||
range: { wvotes: { gte: minMatch ? 0 : 0.2 } }
|
||||
}
|
||||
}
|
||||
},
|
||||
functions: [{
|
||||
field_value_factor: {
|
||||
field: 'wvotes',
|
||||
modifier: 'none',
|
||||
factor: 1,
|
||||
missing: 0
|
||||
}
|
||||
}],
|
||||
field_value_factor: {
|
||||
field: 'wvotes',
|
||||
modifier: 'log1p',
|
||||
factor: 1.2,
|
||||
missing: 0
|
||||
},
|
||||
boost_mode: 'multiply'
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
const values = results.body.hits.hits.map((e, i) => {
|
||||
return `(${e._source.id}, ${i})`
|
||||
}).join(',')
|
||||
|
||||
if (values.length === 0) {
|
||||
return {
|
||||
cursor: null,
|
||||
items: []
|
||||
}
|
||||
}
|
||||
|
||||
const items = await itemQueryWithMeta({
|
||||
me,
|
||||
models,
|
||||
query: `
|
||||
WITH r(id, rank) AS (VALUES ${values})
|
||||
${SELECT}, rank
|
||||
FROM "Item"
|
||||
JOIN r ON "Item".id = r.id`,
|
||||
orderBy: 'ORDER BY rank ASC'
|
||||
items = items.body.hits.hits.map(async e => {
|
||||
// this is super inefficient but will suffice until we do something more generic
|
||||
return await getItem(parent, { id: e._source.id }, { me, models })
|
||||
})
|
||||
|
||||
return {
|
||||
@ -171,289 +136,189 @@ export default {
|
||||
items
|
||||
}
|
||||
},
|
||||
search: async (parent, { q, cursor, sort, what, when, from: whenFrom, to: whenTo }, { me, models, search }) => {
|
||||
search: async (parent, { q: query, sub, cursor, sort, what, when, from: whenFrom, to: whenTo }, { me, models, search }) => {
|
||||
const decodedCursor = decodeCursor(cursor)
|
||||
let sitems = null
|
||||
let sitems
|
||||
|
||||
// short circuit: return empty result if either:
|
||||
// 1. no query provided, or
|
||||
// 2. searching bookmarks without being authed
|
||||
if (!q || (what === 'bookmarks' && !me)) {
|
||||
if (!query) {
|
||||
return {
|
||||
items: [],
|
||||
cursor: null
|
||||
}
|
||||
}
|
||||
|
||||
// build query in parts:
|
||||
// filters: determine the universe of potential search candidates
|
||||
// termQueries: queries related to the actual search terms
|
||||
// functions: rank modifiers to boost by recency or popularity
|
||||
const filters = []
|
||||
const termQueries = []
|
||||
const functions = []
|
||||
|
||||
// filters for item types
|
||||
const whatArr = []
|
||||
switch (what) {
|
||||
case 'posts': // posts only
|
||||
filters.push({ bool: { must_not: { exists: { field: 'parentId' } } } })
|
||||
case 'posts':
|
||||
whatArr.push({ bool: { must_not: { exists: { field: 'parentId' } } } })
|
||||
break
|
||||
case 'comments': // comments only
|
||||
filters.push({ bool: { must: { exists: { field: 'parentId' } } } })
|
||||
break
|
||||
case 'bookmarks':
|
||||
if (me?.id) {
|
||||
filters.push({ match: { bookmarkedBy: me?.id } })
|
||||
}
|
||||
case 'comments':
|
||||
whatArr.push({ bool: { must: { exists: { field: 'parentId' } } } })
|
||||
break
|
||||
default:
|
||||
break
|
||||
}
|
||||
|
||||
// filter for active posts
|
||||
filters.push(
|
||||
me
|
||||
? {
|
||||
bool: {
|
||||
should: [
|
||||
{ match: { status: 'ACTIVE' } },
|
||||
{ match: { status: 'NOSATS' } },
|
||||
{ match: { userId: me.id } }
|
||||
]
|
||||
}
|
||||
}
|
||||
: {
|
||||
bool: {
|
||||
should: [
|
||||
{ match: { status: 'ACTIVE' } },
|
||||
{ match: { status: 'NOSATS' } }
|
||||
]
|
||||
}
|
||||
}
|
||||
)
|
||||
const queryArr = query.trim().split(/\s+/)
|
||||
const url = queryArr.find(word => word.startsWith('url:'))
|
||||
const nym = queryArr.find(word => word.startsWith('nym:'))
|
||||
const exclude = [url, nym]
|
||||
query = queryArr.filter(word => !exclude.includes(word)).join(' ')
|
||||
|
||||
if (url) {
|
||||
whatArr.push({ match_phrase_prefix: { url: `${url.slice(4).toLowerCase()}` } })
|
||||
}
|
||||
|
||||
if (nym) {
|
||||
whatArr.push({ wildcard: { 'user.name': `*${nym.slice(4).toLowerCase()}*` } })
|
||||
}
|
||||
|
||||
if (sub) {
|
||||
whatArr.push({ match: { 'sub.name': sub } })
|
||||
}
|
||||
|
||||
const should = [
|
||||
{
|
||||
// all terms are matched in fields
|
||||
multi_match: {
|
||||
query,
|
||||
type: 'most_fields',
|
||||
fields: ['title^1000', 'text'],
|
||||
minimum_should_match: '100%',
|
||||
boost: 10000
|
||||
}
|
||||
},
|
||||
{
|
||||
// all terms are matched in fields fuzzily
|
||||
multi_match: {
|
||||
query,
|
||||
type: 'most_fields',
|
||||
fields: ['title^1000', 'text'],
|
||||
minimum_should_match: '60%',
|
||||
boost: 1000
|
||||
}
|
||||
}
|
||||
]
|
||||
|
||||
let boostMode = 'multiply'
|
||||
let sortField
|
||||
let sortMod = 'log1p'
|
||||
switch (sort) {
|
||||
case 'comments':
|
||||
sortField = 'ncomments'
|
||||
sortMod = 'square'
|
||||
break
|
||||
case 'sats':
|
||||
sortField = 'sats'
|
||||
break
|
||||
case 'recent':
|
||||
sortField = 'createdAt'
|
||||
sortMod = 'square'
|
||||
boostMode = 'replace'
|
||||
break
|
||||
default:
|
||||
sortField = 'wvotes'
|
||||
sortMod = 'none'
|
||||
break
|
||||
}
|
||||
|
||||
const functions = [
|
||||
{
|
||||
field_value_factor: {
|
||||
field: sortField,
|
||||
modifier: sortMod,
|
||||
factor: 1.2
|
||||
}
|
||||
}
|
||||
]
|
||||
|
||||
// allow fuzzy matching for single terms
|
||||
if (sort !== 'recent') {
|
||||
should.push({
|
||||
// only some terms must match unless we're sorting
|
||||
multi_match: {
|
||||
query,
|
||||
type: 'most_fields',
|
||||
fields: ['title^1000', 'text'],
|
||||
fuzziness: 'AUTO',
|
||||
prefix_length: 3,
|
||||
minimum_should_match: '60%'
|
||||
}
|
||||
})
|
||||
// small bias toward posts with comments
|
||||
functions.push({
|
||||
field_value_factor: {
|
||||
field: 'ncomments',
|
||||
modifier: 'ln1p',
|
||||
factor: 1
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
if (query.length) {
|
||||
whatArr.push({
|
||||
bool: {
|
||||
should
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
// filter for time range
|
||||
const whenRange = when === 'custom'
|
||||
? {
|
||||
gte: whenFrom,
|
||||
lte: new Date(Math.min(new Date(Number(whenTo)), decodedCursor.time))
|
||||
lte: new Date(Math.min(new Date(whenTo), decodedCursor.time))
|
||||
}
|
||||
: {
|
||||
lte: decodedCursor.time,
|
||||
gte: whenToFrom(when)
|
||||
}
|
||||
filters.push({ range: { createdAt: whenRange } })
|
||||
|
||||
// filter for non negative wvotes
|
||||
filters.push({ range: { wvotes: { gte: 0 } } })
|
||||
|
||||
// decompose the search terms
|
||||
const { query: _query, quotes, nym, url, territory } = queryParts(q)
|
||||
const query = _query
|
||||
|
||||
// if search contains a url term, modify the query text
|
||||
if (url) {
|
||||
const uri = url.slice(4)
|
||||
let uriObj
|
||||
try {
|
||||
uriObj = new URL(uri)
|
||||
} catch {
|
||||
try {
|
||||
uriObj = new URL(`https://${uri}`)
|
||||
} catch {}
|
||||
}
|
||||
|
||||
if (uriObj) {
|
||||
termQueries.push({
|
||||
wildcard: { url: `*${uriObj?.hostname ?? uri}${uriObj?.pathname ?? ''}*` }
|
||||
})
|
||||
termQueries.push({
|
||||
match: { text: `${uriObj?.hostname ?? uri}${uriObj?.pathname ?? ''}` }
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// if nym, items must contain nym
|
||||
if (nym) {
|
||||
filters.push({ wildcard: { 'user.name': `*${nym.slice(1).toLowerCase()}*` } })
|
||||
// push same requirement to termQueries to avoid empty should clause
|
||||
termQueries.push({ wildcard: { 'user.name': `*${nym.slice(1).toLowerCase()}*` } })
|
||||
}
|
||||
|
||||
// if territory, item must be from territory
|
||||
if (territory) {
|
||||
filters.push({ match: { 'sub.name': territory.slice(1) } })
|
||||
// push same requirement to termQueries to avoid empty should clause
|
||||
termQueries.push({ match: { 'sub.name': territory.slice(1) } })
|
||||
}
|
||||
|
||||
// if quoted phrases, items must contain entire phrase
|
||||
for (const quote of quotes) {
|
||||
termQueries.push({
|
||||
multi_match: {
|
||||
query: quote,
|
||||
type: 'phrase',
|
||||
fields: ['title', 'text']
|
||||
}
|
||||
})
|
||||
|
||||
// force the search to include the quoted phrase
|
||||
filters.push({
|
||||
multi_match: {
|
||||
query: quote,
|
||||
type: 'phrase',
|
||||
fields: ['title', 'text']
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
// functions for boosting search rank by recency or popularity
|
||||
switch (sort) {
|
||||
case 'comments':
|
||||
functions.push({
|
||||
field_value_factor: {
|
||||
field: 'ncomments',
|
||||
modifier: 'log1p'
|
||||
}
|
||||
})
|
||||
break
|
||||
case 'sats':
|
||||
functions.push({
|
||||
field_value_factor: {
|
||||
field: 'sats',
|
||||
modifier: 'log1p'
|
||||
}
|
||||
})
|
||||
break
|
||||
case 'recent':
|
||||
functions.push({
|
||||
gauss: {
|
||||
createdAt: {
|
||||
origin: 'now',
|
||||
scale: '7d',
|
||||
decay: 0.5
|
||||
}
|
||||
}
|
||||
})
|
||||
break
|
||||
case 'zaprank':
|
||||
functions.push({
|
||||
field_value_factor: {
|
||||
field: 'wvotes',
|
||||
modifier: 'log1p'
|
||||
}
|
||||
})
|
||||
break
|
||||
default:
|
||||
break
|
||||
}
|
||||
|
||||
let osQuery = {
|
||||
function_score: {
|
||||
query: {
|
||||
bool: {
|
||||
filter: filters,
|
||||
should: termQueries,
|
||||
minimum_should_match: termQueries.length > 0 ? 1 : 0
|
||||
}
|
||||
},
|
||||
functions,
|
||||
score_mode: 'multiply',
|
||||
boost_mode: 'multiply'
|
||||
}
|
||||
}
|
||||
|
||||
// query for search terms
|
||||
if (query.length) {
|
||||
// keyword based subquery, to be used on its own or in conjunction with a neural
|
||||
// search
|
||||
const subquery = [
|
||||
{
|
||||
multi_match: {
|
||||
query,
|
||||
type: 'best_fields',
|
||||
fields: ['title^10', 'text'],
|
||||
fuzziness: 'AUTO',
|
||||
minimum_should_match: 1
|
||||
}
|
||||
},
|
||||
// all match matches higher
|
||||
{
|
||||
multi_match: {
|
||||
query,
|
||||
type: 'best_fields',
|
||||
fields: ['title^10', 'text'],
|
||||
minimum_should_match: '100%',
|
||||
boost: 100
|
||||
}
|
||||
},
|
||||
// phrase match matches higher
|
||||
{
|
||||
multi_match: {
|
||||
query,
|
||||
type: 'phrase',
|
||||
fields: ['title^10', 'text'],
|
||||
boost: 1000
|
||||
}
|
||||
}
|
||||
]
|
||||
|
||||
osQuery.function_score.query.bool.should = [...termQueries, ...subquery]
|
||||
osQuery.function_score.query.bool.minimum_should_match = 1
|
||||
|
||||
// use hybrid neural search if model id is available, otherwise use only
|
||||
// keyword search
|
||||
if (process.env.OPENSEARCH_MODEL_ID) {
|
||||
osQuery = {
|
||||
hybrid: {
|
||||
queries: [
|
||||
{
|
||||
bool: {
|
||||
should: [
|
||||
{
|
||||
neural: {
|
||||
title_embedding: {
|
||||
query_text: query,
|
||||
model_id: process.env.OPENSEARCH_MODEL_ID,
|
||||
k: decodedCursor.offset + LIMIT
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
neural: {
|
||||
text_embedding: {
|
||||
query_text: query,
|
||||
model_id: process.env.OPENSEARCH_MODEL_ID,
|
||||
k: decodedCursor.offset + LIMIT
|
||||
}
|
||||
}
|
||||
}
|
||||
],
|
||||
filter: filters,
|
||||
minimum_should_match: 1
|
||||
}
|
||||
},
|
||||
osQuery
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
sitems = await search.search({
|
||||
index: process.env.OPENSEARCH_INDEX,
|
||||
index: 'item',
|
||||
size: LIMIT,
|
||||
_source: {
|
||||
excludes: [
|
||||
'text',
|
||||
'text_embedding',
|
||||
'title_embedding'
|
||||
]
|
||||
},
|
||||
from: decodedCursor.offset,
|
||||
body: {
|
||||
query: osQuery,
|
||||
query: {
|
||||
function_score: {
|
||||
query: {
|
||||
bool: {
|
||||
must: [
|
||||
...whatArr,
|
||||
me
|
||||
? {
|
||||
bool: {
|
||||
should: [
|
||||
{ match: { status: 'ACTIVE' } },
|
||||
{ match: { status: 'NOSATS' } },
|
||||
{ match: { userId: me.id } }
|
||||
]
|
||||
}
|
||||
}
|
||||
: {
|
||||
bool: {
|
||||
should: [
|
||||
{ match: { status: 'ACTIVE' } },
|
||||
{ match: { status: 'NOSATS' } }
|
||||
]
|
||||
}
|
||||
}
|
||||
],
|
||||
filter: [
|
||||
{
|
||||
range:
|
||||
{
|
||||
createdAt: whenRange
|
||||
}
|
||||
},
|
||||
{ range: { wvotes: { gte: 0 } } }
|
||||
]
|
||||
}
|
||||
},
|
||||
functions,
|
||||
boost_mode: boostMode
|
||||
}
|
||||
},
|
||||
highlight: {
|
||||
fields: {
|
||||
title: { number_of_fragments: 0, pre_tags: ['***'], post_tags: ['***'] },
|
||||
@ -470,30 +335,14 @@ export default {
|
||||
}
|
||||
}
|
||||
|
||||
const values = sitems.body.hits.hits.map((e, i) => {
|
||||
return `(${e._source.id}, ${i})`
|
||||
}).join(',')
|
||||
// return highlights
|
||||
const items = sitems.body.hits.hits.map(async e => {
|
||||
// this is super inefficient but will suffice until we do something more generic
|
||||
const item = await getItem(parent, { id: e._source.id }, { me, models })
|
||||
|
||||
if (values.length === 0) {
|
||||
return {
|
||||
cursor: null,
|
||||
items: []
|
||||
}
|
||||
}
|
||||
|
||||
const items = (await itemQueryWithMeta({
|
||||
me,
|
||||
models,
|
||||
query: `
|
||||
WITH r(id, rank) AS (VALUES ${values})
|
||||
${SELECT}, rank
|
||||
FROM "Item"
|
||||
JOIN r ON "Item".id = r.id`,
|
||||
orderBy: 'ORDER BY rank ASC, msats DESC'
|
||||
})).map((item, i) => {
|
||||
const e = sitems.body.hits.hits[i]
|
||||
item.searchTitle = (e.highlight?.title && e.highlight.title[0]) || item.title
|
||||
item.searchText = (e.highlight?.text && e.highlight.text.join(' ... ')) || undefined
|
||||
|
||||
return item
|
||||
})
|
||||
|
||||
|
141
api/resolvers/serial.js
Normal file
141
api/resolvers/serial.js
Normal file
@ -0,0 +1,141 @@
|
||||
import { GraphQLError } from 'graphql'
|
||||
import retry from 'async-retry'
|
||||
import Prisma from '@prisma/client'
|
||||
import { settleHodlInvoice } from 'ln-service'
|
||||
import { createHmac } from './wallet'
|
||||
import { msatsToSats, numWithUnits } from '../../lib/format'
|
||||
import { BALANCE_LIMIT_MSATS } from '../../lib/constants'
|
||||
|
||||
export default async function serialize (models, ...calls) {
|
||||
return await retry(async bail => {
|
||||
try {
|
||||
const [, ...result] = await models.$transaction(
|
||||
[models.$executeRaw`SELECT ASSERT_SERIALIZED()`, ...calls],
|
||||
{ isolationLevel: Prisma.TransactionIsolationLevel.Serializable })
|
||||
return calls.length > 1 ? result : result[0]
|
||||
} catch (error) {
|
||||
console.log(error)
|
||||
// two cases where we get insufficient funds:
|
||||
// 1. plpgsql function raises
|
||||
// 2. constraint violation via a prisma call
|
||||
// XXX prisma does not provide a way to distinguish these cases so we
|
||||
// have to check the error message
|
||||
if (error.message.includes('SN_INSUFFICIENT_FUNDS') ||
|
||||
error.message.includes('\\"users\\" violates check constraint \\"msats_positive\\"')) {
|
||||
bail(new GraphQLError('insufficient funds', { extensions: { code: 'BAD_INPUT' } }))
|
||||
}
|
||||
if (error.message.includes('SN_NOT_SERIALIZABLE')) {
|
||||
bail(new Error('wallet balance transaction is not serializable'))
|
||||
}
|
||||
if (error.message.includes('SN_CONFIRMED_WITHDRAWL_EXISTS')) {
|
||||
bail(new Error('withdrawal invoice already confirmed (to withdraw again create a new invoice)'))
|
||||
}
|
||||
if (error.message.includes('SN_PENDING_WITHDRAWL_EXISTS')) {
|
||||
bail(new Error('withdrawal invoice exists and is pending'))
|
||||
}
|
||||
if (error.message.includes('SN_INELIGIBLE')) {
|
||||
bail(new Error('user ineligible for gift'))
|
||||
}
|
||||
if (error.message.includes('SN_UNSUPPORTED')) {
|
||||
bail(new Error('unsupported action'))
|
||||
}
|
||||
if (error.message.includes('SN_DUPLICATE')) {
|
||||
bail(new Error('duplicate not allowed'))
|
||||
}
|
||||
if (error.message.includes('SN_REVOKED_OR_EXHAUSTED')) {
|
||||
bail(new Error('faucet has been revoked or is exhausted'))
|
||||
}
|
||||
if (error.message.includes('SN_INV_PENDING_LIMIT')) {
|
||||
bail(new Error('too many pending invoices'))
|
||||
}
|
||||
if (error.message.includes('SN_INV_EXCEED_BALANCE')) {
|
||||
bail(new Error(`pending invoices must not cause balance to exceed ${numWithUnits(msatsToSats(BALANCE_LIMIT_MSATS))}`))
|
||||
}
|
||||
if (error.message.includes('40001') || error.code === 'P2034') {
|
||||
throw new Error('wallet balance serialization failure - try again')
|
||||
}
|
||||
if (error.message.includes('23514') || ['P2002', 'P2003', 'P2004'].includes(error.code)) {
|
||||
bail(new Error('constraint failure'))
|
||||
}
|
||||
bail(error)
|
||||
}
|
||||
}, {
|
||||
minTimeout: 100,
|
||||
factor: 1.1,
|
||||
retries: 5
|
||||
})
|
||||
}
|
||||
|
||||
export async function serializeInvoicable (query, { models, lnd, hash, hmac, me, enforceFee }) {
|
||||
if (!me && !hash) {
|
||||
throw new Error('you must be logged in or pay')
|
||||
}
|
||||
|
||||
let trx = Array.isArray(query) ? query : [query]
|
||||
|
||||
let invoice
|
||||
if (hash) {
|
||||
invoice = await checkInvoice(models, hash, hmac, enforceFee)
|
||||
trx = [
|
||||
models.$queryRaw`UPDATE users SET msats = msats + ${invoice.msatsReceived} WHERE id = ${invoice.user.id}`,
|
||||
...trx,
|
||||
models.invoice.update({ where: { hash: invoice.hash }, data: { confirmedAt: new Date() } })
|
||||
]
|
||||
}
|
||||
|
||||
let results = await serialize(models, ...trx)
|
||||
|
||||
if (hash) {
|
||||
if (invoice?.isHeld) await settleHodlInvoice({ secret: invoice.preimage, lnd })
|
||||
results = results.slice(1, -1)
|
||||
}
|
||||
|
||||
// if there is only one result, return it directly, else the array
|
||||
results = results.flat(2)
|
||||
return results.length > 1 ? results : results[0]
|
||||
}
|
||||
|
||||
export async function checkInvoice (models, hash, hmac, fee) {
|
||||
if (!hash) {
|
||||
throw new GraphQLError('hash required', { extensions: { code: 'BAD_INPUT' } })
|
||||
}
|
||||
if (!hmac) {
|
||||
throw new GraphQLError('hmac required', { extensions: { code: 'BAD_INPUT' } })
|
||||
}
|
||||
const hmac2 = createHmac(hash)
|
||||
if (hmac !== hmac2) {
|
||||
throw new GraphQLError('bad hmac', { extensions: { code: 'FORBIDDEN' } })
|
||||
}
|
||||
|
||||
const invoice = await models.invoice.findUnique({
|
||||
where: { hash },
|
||||
include: {
|
||||
user: true
|
||||
}
|
||||
})
|
||||
|
||||
if (!invoice) {
|
||||
throw new GraphQLError('invoice not found', { extensions: { code: 'BAD_INPUT' } })
|
||||
}
|
||||
|
||||
const expired = new Date(invoice.expiresAt) <= new Date()
|
||||
if (expired) {
|
||||
throw new GraphQLError('invoice expired', { extensions: { code: 'BAD_INPUT' } })
|
||||
}
|
||||
if (invoice.confirmedAt) {
|
||||
throw new GraphQLError('invoice already used', { extensions: { code: 'BAD_INPUT' } })
|
||||
}
|
||||
|
||||
if (invoice.cancelled) {
|
||||
throw new GraphQLError('invoice was canceled', { extensions: { code: 'BAD_INPUT' } })
|
||||
}
|
||||
|
||||
if (!invoice.msatsReceived) {
|
||||
throw new GraphQLError('invoice was not paid', { extensions: { code: 'BAD_INPUT' } })
|
||||
}
|
||||
if (fee && msatsToSats(invoice.msatsReceived) < fee) {
|
||||
throw new GraphQLError('invoice amount too low', { extensions: { code: 'BAD_INPUT' } })
|
||||
}
|
||||
|
||||
return invoice
|
||||
}
|
@ -1,10 +1,62 @@
|
||||
import { whenRange } from '@/lib/time'
|
||||
import { validateSchema, territorySchema } from '@/lib/validate'
|
||||
import { decodeCursor, LIMIT, nextCursorEncoded } from '@/lib/cursor'
|
||||
import { viewGroup } from './growth'
|
||||
import { notifyTerritoryTransfer } from '@/lib/webPush'
|
||||
import performPaidAction from '../paidAction'
|
||||
import { GqlAuthenticationError, GqlInputError } from '@/lib/error'
|
||||
import { GraphQLError } from 'graphql'
|
||||
import serialize, { serializeInvoicable } from './serial'
|
||||
import { TERRITORY_COST_MONTHLY, TERRITORY_COST_ONCE, TERRITORY_COST_YEARLY } from '../../lib/constants'
|
||||
import { datePivot } from '../../lib/time'
|
||||
import { ssValidate, territorySchema } from '../../lib/validate'
|
||||
import { nextBilling, nextNextBilling } from '../../lib/territory'
|
||||
|
||||
export function paySubQueries (sub, models) {
|
||||
if (sub.billingType === 'ONCE') {
|
||||
return []
|
||||
}
|
||||
|
||||
const billingAt = nextBilling(sub)
|
||||
const billAt = nextNextBilling(sub)
|
||||
const cost = BigInt(sub.billingCost) * BigInt(1000)
|
||||
|
||||
return [
|
||||
models.user.update({
|
||||
where: {
|
||||
id: sub.userId
|
||||
},
|
||||
data: {
|
||||
msats: {
|
||||
decrement: cost
|
||||
}
|
||||
}
|
||||
}),
|
||||
// update 'em
|
||||
models.sub.update({
|
||||
where: {
|
||||
name: sub.name
|
||||
},
|
||||
data: {
|
||||
billedLastAt: billingAt,
|
||||
status: 'ACTIVE'
|
||||
}
|
||||
}),
|
||||
// record 'em
|
||||
models.subAct.create({
|
||||
data: {
|
||||
userId: sub.userId,
|
||||
subName: sub.name,
|
||||
msats: cost,
|
||||
type: 'BILLING'
|
||||
}
|
||||
}),
|
||||
models.$executeRaw`
|
||||
DELETE FROM pgboss.job
|
||||
WHERE name = 'territoryBilling'
|
||||
AND data->>'subName' = ${sub.name}
|
||||
AND completedon IS NULL`,
|
||||
// schedule 'em
|
||||
models.$queryRaw`
|
||||
INSERT INTO pgboss.job (name, data, startafter, keepuntil) VALUES ('territoryBilling',
|
||||
${JSON.stringify({
|
||||
subName: sub.name
|
||||
})}::JSONB, ${billAt}, ${datePivot(billAt, { days: 1 })})`
|
||||
]
|
||||
}
|
||||
|
||||
export async function getSub (parent, { name }, { models, me }) {
|
||||
if (!name) return null
|
||||
@ -20,11 +72,6 @@ export async function getSub (parent, { name }, { models, me }) {
|
||||
where: {
|
||||
userId: Number(me?.id)
|
||||
}
|
||||
},
|
||||
SubSubscription: {
|
||||
where: {
|
||||
userId: Number(me?.id)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -37,25 +84,21 @@ export default {
|
||||
sub: getSub,
|
||||
subs: async (parent, args, { models, me }) => {
|
||||
if (me) {
|
||||
const currentUser = await models.user.findUnique({ where: { id: me.id } })
|
||||
const showNsfw = currentUser ? currentUser.nsfwMode : false
|
||||
|
||||
return await models.$queryRawUnsafe(`
|
||||
SELECT "Sub".*, "Sub".created_at as "createdAt", COALESCE(json_agg("MuteSub".*) FILTER (WHERE "MuteSub"."userId" IS NOT NULL), '[]') AS "MuteSub"
|
||||
return await models.$queryRaw`
|
||||
SELECT "Sub".*, COALESCE(json_agg("MuteSub".*) FILTER (WHERE "MuteSub"."userId" IS NOT NULL), '[]') AS "MuteSub"
|
||||
FROM "Sub"
|
||||
LEFT JOIN "MuteSub" ON "Sub".name = "MuteSub"."subName" AND "MuteSub"."userId" = ${me.id}::INTEGER
|
||||
WHERE status <> 'STOPPED' ${showNsfw ? '' : 'AND "Sub"."nsfw" = FALSE'}
|
||||
WHERE status <> 'STOPPED'
|
||||
GROUP BY "Sub".name, "MuteSub"."userId"
|
||||
ORDER BY "Sub".name ASC
|
||||
`)
|
||||
`
|
||||
}
|
||||
|
||||
return await models.sub.findMany({
|
||||
where: {
|
||||
status: {
|
||||
not: 'STOPPED'
|
||||
},
|
||||
nsfw: false
|
||||
}
|
||||
},
|
||||
orderBy: {
|
||||
name: 'asc'
|
||||
@ -73,99 +116,31 @@ export default {
|
||||
})
|
||||
|
||||
return latest?.createdAt
|
||||
},
|
||||
topSubs: async (parent, { cursor, when, by, from, to, limit = LIMIT }, { models, me }) => {
|
||||
const decodedCursor = decodeCursor(cursor)
|
||||
const range = whenRange(when, from, to || decodeCursor.time)
|
||||
|
||||
let column
|
||||
switch (by) {
|
||||
case 'revenue': column = 'revenue'; break
|
||||
case 'spent': column = 'spent'; break
|
||||
case 'posts': column = 'nposts'; break
|
||||
case 'comments': column = 'ncomments'; break
|
||||
default: column = 'stacked'; break
|
||||
}
|
||||
|
||||
const subs = await models.$queryRawUnsafe(`
|
||||
SELECT "Sub".*,
|
||||
COALESCE(floor(sum(msats_revenue)/1000), 0) as revenue,
|
||||
COALESCE(floor(sum(msats_stacked)/1000), 0) as stacked,
|
||||
COALESCE(floor(sum(msats_spent)/1000), 0) as spent,
|
||||
COALESCE(sum(posts), 0) as nposts,
|
||||
COALESCE(sum(comments), 0) as ncomments
|
||||
FROM ${viewGroup(range, 'sub_stats')}
|
||||
JOIN "Sub" on "Sub".name = u.sub_name
|
||||
GROUP BY "Sub".name
|
||||
ORDER BY ${column} DESC NULLS LAST, "Sub".created_at ASC
|
||||
OFFSET $3
|
||||
LIMIT $4`, ...range, decodedCursor.offset, limit)
|
||||
|
||||
return {
|
||||
cursor: subs.length === limit ? nextCursorEncoded(decodedCursor, limit) : null,
|
||||
subs
|
||||
}
|
||||
},
|
||||
userSubs: async (_parent, { name, cursor, when, by, from, to, limit = LIMIT }, { models }) => {
|
||||
if (!name) {
|
||||
throw new GqlInputError('must supply user name')
|
||||
}
|
||||
|
||||
const user = await models.user.findUnique({ where: { name } })
|
||||
if (!user) {
|
||||
throw new GqlInputError('no user has that name')
|
||||
}
|
||||
|
||||
const decodedCursor = decodeCursor(cursor)
|
||||
const range = whenRange(when, from, to || decodeCursor.time)
|
||||
|
||||
let column
|
||||
switch (by) {
|
||||
case 'revenue': column = 'revenue'; break
|
||||
case 'spent': column = 'spent'; break
|
||||
case 'posts': column = 'nposts'; break
|
||||
case 'comments': column = 'ncomments'; break
|
||||
default: column = 'stacked'; break
|
||||
}
|
||||
|
||||
const subs = await models.$queryRawUnsafe(`
|
||||
SELECT "Sub".*,
|
||||
"Sub".created_at as "createdAt",
|
||||
COALESCE(floor(sum(msats_revenue)/1000), 0) as revenue,
|
||||
COALESCE(floor(sum(msats_stacked)/1000), 0) as stacked,
|
||||
COALESCE(floor(sum(msats_spent)/1000), 0) as spent,
|
||||
COALESCE(sum(posts), 0) as nposts,
|
||||
COALESCE(sum(comments), 0) as ncomments
|
||||
FROM ${viewGroup(range, 'sub_stats')}
|
||||
JOIN "Sub" on "Sub".name = u.sub_name
|
||||
WHERE "Sub"."userId" = $3
|
||||
AND "Sub".status = 'ACTIVE'
|
||||
GROUP BY "Sub".name
|
||||
ORDER BY ${column} DESC NULLS LAST, "Sub".created_at ASC
|
||||
OFFSET $4
|
||||
LIMIT $5`, ...range, user.id, decodedCursor.offset, limit)
|
||||
|
||||
return {
|
||||
cursor: subs.length === limit ? nextCursorEncoded(decodedCursor, limit) : null,
|
||||
subs
|
||||
}
|
||||
}
|
||||
},
|
||||
Mutation: {
|
||||
upsertSub: async (parent, { ...data }, { me, models, lnd }) => {
|
||||
upsertSub: async (parent, { hash, hmac, ...data }, { me, models, lnd }) => {
|
||||
if (!me) {
|
||||
throw new GqlAuthenticationError()
|
||||
throw new GraphQLError('you must be logged in', { extensions: { code: 'UNAUTHENTICATED' } })
|
||||
}
|
||||
|
||||
await validateSchema(territorySchema, data, { models, me, sub: { name: data.oldName } })
|
||||
// XXX this is because we did the wrong thing and used the subName as a primary key
|
||||
const old = await models.sub.findUnique({
|
||||
where: {
|
||||
name: data.name,
|
||||
userId: me.id
|
||||
}
|
||||
})
|
||||
|
||||
if (data.oldName) {
|
||||
return await updateSub(parent, data, { me, models, lnd })
|
||||
await ssValidate(territorySchema, data, { models, me })
|
||||
|
||||
if (old) {
|
||||
return await updateSub(parent, data, { me, models, lnd, hash, hmac })
|
||||
} else {
|
||||
return await createSub(parent, data, { me, models, lnd })
|
||||
return await createSub(parent, data, { me, models, lnd, hash, hmac })
|
||||
}
|
||||
},
|
||||
paySub: async (parent, { name }, { me, models, lnd }) => {
|
||||
paySub: async (parent, { name, hash, hmac }, { me, models, lnd }) => {
|
||||
// check that they own the sub
|
||||
const sub = await models.sub.findUnique({
|
||||
where: {
|
||||
@ -174,22 +149,30 @@ export default {
|
||||
})
|
||||
|
||||
if (!sub) {
|
||||
throw new GqlInputError('sub not found')
|
||||
throw new GraphQLError('sub not found', { extensions: { code: 'BAD_INPUT' } })
|
||||
}
|
||||
|
||||
if (sub.userId !== me.id) {
|
||||
throw new GqlInputError('you do not own this sub')
|
||||
throw new GraphQLError('you do not own this sub', { extensions: { code: 'BAD_INPUT' } })
|
||||
}
|
||||
|
||||
if (sub.status === 'ACTIVE') {
|
||||
return sub
|
||||
}
|
||||
|
||||
return await performPaidAction('TERRITORY_BILLING', { name }, { me, models, lnd })
|
||||
const queries = paySubQueries(sub, models)
|
||||
if (queries.length === 0) {
|
||||
return sub
|
||||
}
|
||||
|
||||
const results = await serializeInvoicable(
|
||||
queries,
|
||||
{ models, lnd, hash, hmac, me, enforceFee: sub.billingCost })
|
||||
return results[1]
|
||||
},
|
||||
toggleMuteSub: async (parent, { name }, { me, models }) => {
|
||||
if (!me) {
|
||||
throw new GqlAuthenticationError()
|
||||
throw new GraphQLError('you must be logged in', { extensions: { code: 'UNAUTHENTICATED' } })
|
||||
}
|
||||
|
||||
const lookupData = { userId: Number(me.id), subName: name }
|
||||
@ -202,85 +185,9 @@ export default {
|
||||
await models.muteSub.create({ data: { ...lookupData } })
|
||||
return true
|
||||
}
|
||||
},
|
||||
toggleSubSubscription: async (sub, { name }, { me, models }) => {
|
||||
if (!me) {
|
||||
throw new GqlAuthenticationError()
|
||||
}
|
||||
|
||||
const lookupData = { userId: me.id, subName: name }
|
||||
const where = { userId_subName: lookupData }
|
||||
const existing = await models.subSubscription.findUnique({ where })
|
||||
if (existing) {
|
||||
await models.subSubscription.delete({ where })
|
||||
return false
|
||||
} else {
|
||||
await models.subSubscription.create({ data: lookupData })
|
||||
return true
|
||||
}
|
||||
},
|
||||
transferTerritory: async (parent, { subName, userName }, { me, models }) => {
|
||||
if (!me) {
|
||||
throw new GqlAuthenticationError()
|
||||
}
|
||||
|
||||
const sub = await models.sub.findUnique({
|
||||
where: {
|
||||
name: subName
|
||||
}
|
||||
})
|
||||
if (!sub) {
|
||||
throw new GqlInputError('sub not found')
|
||||
}
|
||||
if (sub.userId !== me.id) {
|
||||
throw new GqlInputError('you do not own this sub')
|
||||
}
|
||||
|
||||
const user = await models.user.findFirst({ where: { name: userName } })
|
||||
if (!user) {
|
||||
throw new GqlInputError('user not found')
|
||||
}
|
||||
if (user.id === me.id) {
|
||||
throw new GqlInputError('cannot transfer territory to yourself')
|
||||
}
|
||||
|
||||
const [, updatedSub] = await models.$transaction([
|
||||
models.territoryTransfer.create({ data: { subName, oldUserId: me.id, newUserId: user.id } }),
|
||||
models.sub.update({ where: { name: subName }, data: { userId: user.id, billingAutoRenew: false } })
|
||||
])
|
||||
|
||||
notifyTerritoryTransfer({ models, sub, to: user })
|
||||
|
||||
return updatedSub
|
||||
},
|
||||
unarchiveTerritory: async (parent, { ...data }, { me, models, lnd }) => {
|
||||
if (!me) {
|
||||
throw new GqlAuthenticationError()
|
||||
}
|
||||
|
||||
const { name } = data
|
||||
|
||||
await validateSchema(territorySchema, data, { models, me })
|
||||
|
||||
const oldSub = await models.sub.findUnique({ where: { name } })
|
||||
if (!oldSub) {
|
||||
throw new GqlInputError('sub not found')
|
||||
}
|
||||
if (oldSub.status !== 'STOPPED') {
|
||||
throw new GqlInputError('sub is not archived')
|
||||
}
|
||||
if (oldSub.billingType === 'ONCE') {
|
||||
// sanity check. this should never happen but leaving this comment here
|
||||
// to stop error propagation just in case and document that this should never happen.
|
||||
// #defensivecode
|
||||
throw new GqlInputError('sub should not be archived')
|
||||
}
|
||||
|
||||
return await performPaidAction('TERRITORY_UNARCHIVE', data, { me, models, lnd })
|
||||
}
|
||||
},
|
||||
Sub: {
|
||||
optional: sub => sub,
|
||||
user: async (sub, args, { models }) => {
|
||||
if (sub.user) {
|
||||
return sub.user
|
||||
@ -288,65 +195,94 @@ export default {
|
||||
return await models.user.findUnique({ where: { id: sub.userId } })
|
||||
},
|
||||
meMuteSub: async (sub, args, { models }) => {
|
||||
if (sub.meMuteSub !== undefined) {
|
||||
return sub.meMuteSub
|
||||
}
|
||||
return sub.MuteSub?.length > 0
|
||||
},
|
||||
nposts: async (sub, { when, from, to }, { models }) => {
|
||||
if (typeof sub.nposts !== 'undefined') {
|
||||
return sub.nposts
|
||||
}
|
||||
},
|
||||
ncomments: async (sub, { when, from, to }, { models }) => {
|
||||
if (typeof sub.ncomments !== 'undefined') {
|
||||
return sub.ncomments
|
||||
}
|
||||
},
|
||||
meSubscription: async (sub, args, { me, models }) => {
|
||||
if (sub.meSubscription !== undefined) {
|
||||
return sub.meSubscription
|
||||
}
|
||||
|
||||
return sub.SubSubscription?.length > 0
|
||||
},
|
||||
createdAt: sub => sub.createdAt || sub.created_at
|
||||
return sub.meMuteSub || sub.MuteSub?.length > 0
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async function createSub (parent, data, { me, models, lnd }) {
|
||||
async function createSub (parent, data, { me, models, lnd, hash, hmac }) {
|
||||
const { billingType } = data
|
||||
let billingCost = TERRITORY_COST_MONTHLY
|
||||
let billAt = datePivot(new Date(), { months: 1 })
|
||||
|
||||
if (billingType === 'ONCE') {
|
||||
billingCost = TERRITORY_COST_ONCE
|
||||
billAt = null
|
||||
} else if (billingType === 'YEARLY') {
|
||||
billingCost = TERRITORY_COST_YEARLY
|
||||
billAt = datePivot(new Date(), { years: 1 })
|
||||
}
|
||||
|
||||
const cost = BigInt(1000) * BigInt(billingCost)
|
||||
|
||||
try {
|
||||
return await performPaidAction('TERRITORY_CREATE', data, { me, models, lnd })
|
||||
const results = await serializeInvoicable([
|
||||
// bill 'em
|
||||
models.user.update({
|
||||
where: {
|
||||
id: me.id
|
||||
},
|
||||
data: {
|
||||
msats: {
|
||||
decrement: cost
|
||||
}
|
||||
}
|
||||
}),
|
||||
// create 'em
|
||||
models.sub.create({
|
||||
data: {
|
||||
...data,
|
||||
billingCost,
|
||||
rankingType: 'WOT',
|
||||
userId: me.id
|
||||
}
|
||||
}),
|
||||
// record 'em
|
||||
models.subAct.create({
|
||||
data: {
|
||||
userId: me.id,
|
||||
subName: data.name,
|
||||
msats: cost,
|
||||
type: 'BILLING'
|
||||
}
|
||||
}),
|
||||
// schedule 'em
|
||||
...(billAt
|
||||
? [models.$queryRaw`
|
||||
INSERT INTO pgboss.job (name, data, startafter, keepuntil) VALUES ('territoryBilling',
|
||||
${JSON.stringify({
|
||||
subName: data.name
|
||||
})}::JSONB, ${billAt}, ${datePivot(billAt, { days: 1 })})`]
|
||||
: [])
|
||||
], { models, lnd, hash, hmac, me, enforceFee: billingCost })
|
||||
|
||||
return results[1]
|
||||
} catch (error) {
|
||||
if (error.code === 'P2002') {
|
||||
throw new GqlInputError('name taken')
|
||||
throw new GraphQLError('name taken', { extensions: { code: 'BAD_INPUT' } })
|
||||
}
|
||||
throw error
|
||||
}
|
||||
}
|
||||
|
||||
async function updateSub (parent, { oldName, ...data }, { me, models, lnd }) {
|
||||
const oldSub = await models.sub.findUnique({
|
||||
where: {
|
||||
name: oldName,
|
||||
userId: me.id,
|
||||
// this function's logic is only valid if the sub is not stopped
|
||||
// so prevent updates to stopped subs
|
||||
status: {
|
||||
not: 'STOPPED'
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
if (!oldSub) {
|
||||
throw new GqlInputError('sub not found')
|
||||
}
|
||||
async function updateSub (parent, { name, ...data }, { me, models, lnd, hash, hmac }) {
|
||||
// prevent modification of billingType
|
||||
delete data.billingType
|
||||
|
||||
try {
|
||||
return await performPaidAction('TERRITORY_UPDATE', { oldName, ...data }, { me, models, lnd })
|
||||
const results = await serialize(models,
|
||||
// update 'em
|
||||
models.sub.update({
|
||||
data,
|
||||
where: {
|
||||
name
|
||||
}
|
||||
}))
|
||||
|
||||
return results[0]
|
||||
} catch (error) {
|
||||
if (error.code === 'P2002') {
|
||||
throw new GqlInputError('name taken')
|
||||
throw new GraphQLError('name taken', { extensions: { code: 'BAD_INPUT' } })
|
||||
}
|
||||
throw error
|
||||
}
|
||||
|
@ -1,70 +1,42 @@
|
||||
import { USER_ID, IMAGE_PIXELS_MAX, UPLOAD_SIZE_MAX, UPLOAD_SIZE_MAX_AVATAR, UPLOAD_TYPES_ALLOW, AWS_S3_URL_REGEXP, AVATAR_TYPES_ALLOW } from '@/lib/constants'
|
||||
import { createPresignedPost } from '@/api/s3'
|
||||
import { GqlAuthenticationError, GqlInputError } from '@/lib/error'
|
||||
import { msatsToSats } from '@/lib/format'
|
||||
import { GraphQLError } from 'graphql'
|
||||
import { ANON_USER_ID, IMAGE_PIXELS_MAX, UPLOAD_SIZE_MAX, UPLOAD_SIZE_MAX_AVATAR, UPLOAD_TYPES_ALLOW } from '../../lib/constants'
|
||||
import { createPresignedPost } from '../s3'
|
||||
|
||||
export default {
|
||||
Query: {
|
||||
uploadFees: async (parent, { s3Keys }, { models, me }) => {
|
||||
return uploadFees(s3Keys, { models, me })
|
||||
}
|
||||
},
|
||||
Mutation: {
|
||||
getSignedPOST: async (parent, { type, size, width, height, avatar }, { models, me }) => {
|
||||
if (UPLOAD_TYPES_ALLOW.indexOf(type) === -1) {
|
||||
throw new GqlInputError(`upload must be ${UPLOAD_TYPES_ALLOW.map(t => t.replace(/^(image|video)\//, '')).join(', ')}`)
|
||||
throw new GraphQLError(`image must be ${UPLOAD_TYPES_ALLOW.map(t => t.replace('image/', '')).join(', ')}`, { extensions: { code: 'BAD_INPUT' } })
|
||||
}
|
||||
|
||||
if (size > UPLOAD_SIZE_MAX) {
|
||||
throw new GqlInputError(`upload must be less than ${UPLOAD_SIZE_MAX / (1024 ** 2)} megabytes`)
|
||||
throw new GraphQLError(`image must be less than ${UPLOAD_SIZE_MAX / (1024 ** 2)} megabytes`, { extensions: { code: 'BAD_INPUT' } })
|
||||
}
|
||||
|
||||
if (avatar) {
|
||||
if (AVATAR_TYPES_ALLOW.indexOf(type) === -1) {
|
||||
throw new GqlInputError(`avatar must be ${AVATAR_TYPES_ALLOW.map(t => t.replace('image/', '')).join(', ')}`)
|
||||
}
|
||||
|
||||
if (size > UPLOAD_SIZE_MAX_AVATAR) {
|
||||
throw new GqlInputError(`avatar must be less than ${UPLOAD_SIZE_MAX_AVATAR / (1024 ** 2)} megabytes`)
|
||||
}
|
||||
if (avatar && size > UPLOAD_SIZE_MAX_AVATAR) {
|
||||
throw new GraphQLError(`image must be less than ${UPLOAD_SIZE_MAX_AVATAR / (1024 ** 2)} megabytes`, { extensions: { code: 'BAD_INPUT' } })
|
||||
}
|
||||
|
||||
// width and height is 0 for videos
|
||||
if (width * height > IMAGE_PIXELS_MAX) {
|
||||
throw new GqlInputError(`image must be less than ${IMAGE_PIXELS_MAX} pixels`)
|
||||
throw new GraphQLError(`image must be less than ${IMAGE_PIXELS_MAX} pixels`, { extensions: { code: 'BAD_INPUT' } })
|
||||
}
|
||||
|
||||
const fileParams = {
|
||||
const imgParams = {
|
||||
type,
|
||||
size,
|
||||
width,
|
||||
height,
|
||||
userId: me?.id || USER_ID.anon,
|
||||
userId: me?.id || ANON_USER_ID,
|
||||
paid: false
|
||||
}
|
||||
|
||||
if (avatar) {
|
||||
if (!me) throw new GqlAuthenticationError()
|
||||
fileParams.paid = undefined
|
||||
if (!me) throw new GraphQLError('you must be logged in', { extensions: { code: 'FORBIDDEN' } })
|
||||
imgParams.paid = undefined
|
||||
}
|
||||
|
||||
const upload = await models.upload.create({ data: { ...fileParams } })
|
||||
const upload = await models.upload.create({ data: { ...imgParams } })
|
||||
return createPresignedPost({ key: String(upload.id), type, size })
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export function uploadIdsFromText (text, { models }) {
|
||||
if (!text) return []
|
||||
return [...new Set([...text.matchAll(AWS_S3_URL_REGEXP)].map(m => Number(m[1])))]
|
||||
}
|
||||
|
||||
export async function uploadFees (s3Keys, { models, me }) {
|
||||
// returns info object in this format:
|
||||
// { bytes24h: int, bytesUnpaid: int, nUnpaid: int, uploadFeesMsats: BigInt }
|
||||
const [info] = await models.$queryRawUnsafe('SELECT * FROM upload_fees($1::INTEGER, $2::INTEGER[])', me ? me.id : USER_ID.anon, s3Keys)
|
||||
const uploadFees = msatsToSats(info.uploadFeesMsats)
|
||||
const totalFeesMsats = info.nUnpaid * Number(info.uploadFeesMsats)
|
||||
const totalFees = msatsToSats(totalFeesMsats)
|
||||
return { ...info, uploadFees, totalFees, totalFeesMsats }
|
||||
}
|
||||
|
File diff suppressed because it is too large
Load Diff
@ -1,75 +0,0 @@
|
||||
import { E_VAULT_KEY_EXISTS, GqlAuthenticationError, GqlInputError } from '@/lib/error'
|
||||
|
||||
export default {
|
||||
Query: {
|
||||
getVaultEntry: async (parent, { key }, { me, models }, info) => {
|
||||
if (!me) throw new GqlAuthenticationError()
|
||||
if (!key) throw new GqlInputError('must have key')
|
||||
|
||||
const k = await models.vault.findUnique({
|
||||
where: {
|
||||
key,
|
||||
userId: me.id
|
||||
}
|
||||
})
|
||||
return k
|
||||
},
|
||||
getVaultEntries: async (parent, { keysFilter }, { me, models }, info) => {
|
||||
if (!me) throw new GqlAuthenticationError()
|
||||
|
||||
const entries = await models.vaultEntry.findMany({
|
||||
where: {
|
||||
userId: me.id,
|
||||
key: keysFilter?.length
|
||||
? {
|
||||
in: keysFilter
|
||||
}
|
||||
: undefined
|
||||
}
|
||||
})
|
||||
return entries
|
||||
}
|
||||
},
|
||||
Mutation: {
|
||||
// atomic vault migration
|
||||
updateVaultKey: async (parent, { entries, hash }, { me, models }) => {
|
||||
if (!me) throw new GqlAuthenticationError()
|
||||
if (!hash) throw new GqlInputError('hash required')
|
||||
const txs = []
|
||||
|
||||
const { vaultKeyHash: oldKeyHash } = await models.user.findUnique({ where: { id: me.id } })
|
||||
if (oldKeyHash) {
|
||||
if (oldKeyHash !== hash) {
|
||||
throw new GqlInputError('vault key already set', E_VAULT_KEY_EXISTS)
|
||||
} else {
|
||||
return true
|
||||
}
|
||||
} else {
|
||||
txs.push(models.user.update({
|
||||
where: { id: me.id },
|
||||
data: { vaultKeyHash: hash }
|
||||
}))
|
||||
}
|
||||
|
||||
for (const entry of entries) {
|
||||
txs.push(models.vaultEntry.update({
|
||||
where: { userId_key: { userId: me.id, key: entry.key } },
|
||||
data: { value: entry.value, iv: entry.iv }
|
||||
}))
|
||||
}
|
||||
await models.$transaction(txs)
|
||||
return true
|
||||
},
|
||||
clearVault: async (parent, args, { me, models }) => {
|
||||
if (!me) throw new GqlAuthenticationError()
|
||||
const txs = []
|
||||
txs.push(models.user.update({
|
||||
where: { id: me.id },
|
||||
data: { vaultKeyHash: '' }
|
||||
}))
|
||||
txs.push(models.vaultEntry.deleteMany({ where: { userId: me.id } }))
|
||||
await models.$transaction(txs)
|
||||
return true
|
||||
}
|
||||
}
|
||||
}
|
File diff suppressed because it is too large
Load Diff
@ -1,5 +1,4 @@
|
||||
import AWS from 'aws-sdk'
|
||||
import { MEDIA_URL } from '@/lib/constants'
|
||||
|
||||
const bucketRegion = 'us-east-1'
|
||||
const Bucket = process.env.NEXT_PUBLIC_AWS_UPLOAD_BUCKET
|
||||
@ -8,19 +7,8 @@ AWS.config.update({
|
||||
region: bucketRegion
|
||||
})
|
||||
|
||||
const config = {
|
||||
apiVersion: '2006-03-01',
|
||||
s3ForcePathStyle: process.env.NODE_ENV === 'development'
|
||||
}
|
||||
|
||||
export function createPresignedPost ({ key, type, size }) {
|
||||
// for local development, we use the NEXT_PUBLIC_MEDIA_URL which
|
||||
// is reachable from the host machine
|
||||
if (process.env.NODE_ENV === 'development') {
|
||||
config.endpoint = process.env.NEXT_PUBLIC_MEDIA_URL
|
||||
}
|
||||
|
||||
const s3 = new AWS.S3(config)
|
||||
const s3 = new AWS.S3({ apiVersion: '2006-03-01' })
|
||||
return new Promise((resolve, reject) => {
|
||||
s3.createPresignedPost({
|
||||
Bucket,
|
||||
@ -36,35 +24,14 @@ export function createPresignedPost ({ key, type, size }) {
|
||||
})
|
||||
}
|
||||
|
||||
export async function deleteObjects (keys) {
|
||||
// for local development, we use the MEDIA_URL which
|
||||
// is reachable from the container network
|
||||
if (process.env.NODE_ENV === 'development') {
|
||||
config.endpoint = MEDIA_URL
|
||||
}
|
||||
|
||||
const s3 = new AWS.S3(config)
|
||||
// max 1000 keys per request
|
||||
// see https://docs.aws.amazon.com/cli/latest/reference/s3api/delete-objects.html
|
||||
const batchSize = 1000
|
||||
const deleted = []
|
||||
for (let i = 0; i < keys.length; i += batchSize) {
|
||||
const batch = keys.slice(i, i + batchSize)
|
||||
await new Promise((resolve, reject) => {
|
||||
const params = {
|
||||
Bucket,
|
||||
Delete: {
|
||||
Objects: batch.map(key => ({ Key: String(key) }))
|
||||
}
|
||||
export function deleteObjects (keys) {
|
||||
const s3 = new AWS.S3({ apiVersion: '2006-03-01' })
|
||||
return new Promise((resolve, reject) => {
|
||||
s3.deleteObjects({
|
||||
Bucket,
|
||||
Delete: {
|
||||
Objects: keys.map(key => ({ Key: String(key) }))
|
||||
}
|
||||
s3.deleteObjects(params, (err, data) => {
|
||||
if (err) return reject(err)
|
||||
const deleted = data.Deleted?.map(({ Key }) => parseInt(Key)) || []
|
||||
resolve(deleted)
|
||||
})
|
||||
}).then((deleteConfirmed) => {
|
||||
deleted.push(...deleteConfirmed)
|
||||
}).catch(console.error)
|
||||
}
|
||||
return deleted
|
||||
}, (err, data) => { err ? reject(err) : resolve(keys) })
|
||||
})
|
||||
}
|
||||
|
@ -1,12 +1,14 @@
|
||||
import os from '@opensearch-project/opensearch'
|
||||
|
||||
const options = {
|
||||
node: process.env.OPENSEARCH_URL,
|
||||
auth: {
|
||||
username: process.env.OPENSEARCH_USERNAME,
|
||||
password: process.env.OPENSEARCH_PASSWORD
|
||||
}
|
||||
}
|
||||
const options = process.env.NODE_ENV === 'development'
|
||||
? { node: process.env.OPENSEARCH_URL || 'http://localhost:9200' }
|
||||
: {
|
||||
node: process.env.OPENSEARCH_URL,
|
||||
auth: {
|
||||
username: process.env.OPENSEARCH_USERNAME,
|
||||
password: process.env.OPENSEARCH_PASSWORD
|
||||
}
|
||||
}
|
||||
|
||||
global.os = global.os || new os.Client(options)
|
||||
|
||||
|
115
api/ssrApollo.js
115
api/ssrApollo.js
@ -7,15 +7,12 @@ import models from './models'
|
||||
import { print } from 'graphql'
|
||||
import lnd from './lnd'
|
||||
import search from './search'
|
||||
import { ME } from '@/fragments/users'
|
||||
import { PRICE } from '@/fragments/price'
|
||||
import { BLOCK_HEIGHT } from '@/fragments/blockHeight'
|
||||
import { CHAIN_FEE } from '@/fragments/chainFee'
|
||||
import { ME } from '../fragments/users'
|
||||
import { PRICE } from '../fragments/price'
|
||||
import { BLOCK_HEIGHT } from '../fragments/blockHeight'
|
||||
import { CHAIN_FEE } from '../fragments/chainFee'
|
||||
import { getServerSession } from 'next-auth/next'
|
||||
import { getAuthOptions } from '@/pages/api/auth/[...nextauth]'
|
||||
import { NOFOLLOW_LIMIT } from '@/lib/constants'
|
||||
import { satsToMsats } from '@/lib/format'
|
||||
import { MULTI_AUTH_ANON, MULTI_AUTH_LIST } from '@/lib/auth'
|
||||
import { getAuthOptions } from '../pages/api/auth/[...nextauth]'
|
||||
|
||||
export default async function getSSRApolloClient ({ req, res, me = null }) {
|
||||
const session = req && await getServerSession(req, res, getAuthOptions(req))
|
||||
@ -43,93 +40,20 @@ export default async function getSSRApolloClient ({ req, res, me = null }) {
|
||||
watchQuery: {
|
||||
fetchPolicy: 'no-cache',
|
||||
nextFetchPolicy: 'no-cache',
|
||||
canonizeResults: true,
|
||||
ssr: true
|
||||
},
|
||||
query: {
|
||||
fetchPolicy: 'no-cache',
|
||||
nextFetchPolicy: 'no-cache',
|
||||
canonizeResults: true,
|
||||
ssr: true
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
await client.clearStore()
|
||||
return client
|
||||
}
|
||||
|
||||
function oneDayReferral (request, { me }) {
|
||||
if (!me) return
|
||||
const refHeader = request.headers['x-stacker-news-referrer']
|
||||
if (!refHeader) return
|
||||
|
||||
const referrers = refHeader.split('; ').filter(Boolean)
|
||||
for (const referrer of referrers) {
|
||||
let prismaPromise, getData
|
||||
|
||||
if (referrer.startsWith('item-')) {
|
||||
prismaPromise = models.item.findUnique({
|
||||
where: {
|
||||
id: parseInt(referrer.slice(5)),
|
||||
msats: {
|
||||
gt: satsToMsats(NOFOLLOW_LIMIT)
|
||||
},
|
||||
weightedVotes: {
|
||||
gt: 0
|
||||
}
|
||||
}
|
||||
})
|
||||
getData = item => ({
|
||||
referrerId: item.userId,
|
||||
refereeId: parseInt(me.id),
|
||||
type: item.parentId ? 'COMMENT' : 'POST',
|
||||
typeId: String(item.id)
|
||||
})
|
||||
} else if (referrer.startsWith('profile-')) {
|
||||
const name = referrer.slice(8)
|
||||
// exclude all pages that are not user profiles
|
||||
if (['api', 'auth', 'day', 'invites', 'invoices', 'referrals', 'rewards',
|
||||
'satistics', 'settings', 'stackers', 'wallet', 'withdrawals', '404', '500',
|
||||
'email', 'live', 'login', 'notifications', 'offline', 'search', 'share',
|
||||
'signup', 'territory', 'recent', 'top', 'edit', 'post', 'rss', 'saloon',
|
||||
'faq', 'story', 'privacy', 'copyright', 'tos', 'changes', 'guide', 'daily',
|
||||
'anon', 'ad'].includes(name)) continue
|
||||
|
||||
prismaPromise = models.user.findUnique({ where: { name } })
|
||||
getData = user => ({
|
||||
referrerId: user.id,
|
||||
refereeId: parseInt(me.id),
|
||||
type: 'PROFILE',
|
||||
typeId: String(user.id)
|
||||
})
|
||||
} else if (referrer.startsWith('territory-')) {
|
||||
prismaPromise = models.sub.findUnique({ where: { name: referrer.slice(10) } })
|
||||
getData = sub => ({
|
||||
referrerId: sub.userId,
|
||||
refereeId: parseInt(me.id),
|
||||
type: 'TERRITORY',
|
||||
typeId: sub.name
|
||||
})
|
||||
} else {
|
||||
prismaPromise = models.user.findUnique({ where: { name: referrer } })
|
||||
getData = user => ({
|
||||
referrerId: user.id,
|
||||
refereeId: parseInt(me.id),
|
||||
type: 'REFERRAL',
|
||||
typeId: String(user.id)
|
||||
})
|
||||
}
|
||||
|
||||
prismaPromise?.then(ref => {
|
||||
if (ref && getData) {
|
||||
const data = getData(ref)
|
||||
// can't refer yourself
|
||||
if (data.refereeId === data.referrerId) return
|
||||
models.oneDayReferral.create({ data }).catch(console.error)
|
||||
}
|
||||
}).catch(console.error)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Takes a query and variables and returns a getServerSideProps function
|
||||
*
|
||||
@ -152,23 +76,13 @@ export function getGetServerSideProps (
|
||||
|
||||
const client = await getSSRApolloClient({ req, res })
|
||||
|
||||
let { data: { me } } = await client.query({ query: ME })
|
||||
|
||||
// required to redirect to /signup on page reload
|
||||
// if we switched to anon and authentication is required
|
||||
if (req.cookies[MULTI_AUTH_LIST] === MULTI_AUTH_ANON) {
|
||||
me = null
|
||||
}
|
||||
const { data: { me } } = await client.query({ query: ME })
|
||||
|
||||
if (authRequired && !me) {
|
||||
let callback = process.env.NEXT_PUBLIC_URL + req.url
|
||||
// On client-side routing, the callback is a NextJS URL
|
||||
// so we need to remove the NextJS stuff.
|
||||
// Example: /_next/data/development/territory.json
|
||||
callback = callback.replace(/\/_next\/data\/\w+\//, '/').replace(/\.json$/, '')
|
||||
const callback = process.env.PUBLIC_URL + req.url
|
||||
return {
|
||||
redirect: {
|
||||
destination: `/signup?callbackUrl=${encodeURIComponent(callback)}`
|
||||
destination: `/login?callbackUrl=${encodeURIComponent(callback)}`
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -197,10 +111,9 @@ export function getGetServerSideProps (
|
||||
}
|
||||
|
||||
if (error || !data || (notFound && notFound(data, vars, me))) {
|
||||
error && console.error(error)
|
||||
res.writeHead(302, {
|
||||
Location: '/404'
|
||||
}).end()
|
||||
return {
|
||||
notFound: true
|
||||
}
|
||||
}
|
||||
|
||||
props = {
|
||||
@ -211,8 +124,6 @@ export function getGetServerSideProps (
|
||||
}
|
||||
}
|
||||
|
||||
oneDayReferral(req, { me })
|
||||
|
||||
return {
|
||||
props: {
|
||||
...props,
|
||||
|
@ -13,8 +13,6 @@ export default gql`
|
||||
spenderGrowth(when: String, from: String, to: String): [TimeData!]!
|
||||
stackingGrowth(when: String, from: String, to: String): [TimeData!]!
|
||||
stackerGrowth(when: String, from: String, to: String): [TimeData!]!
|
||||
itemGrowthSubs(when: String, from: String, to: String, sub: String): [TimeData!]!
|
||||
revenueGrowthSubs(when: String, from: String, to: String, sub: String): [TimeData!]!
|
||||
}
|
||||
|
||||
type TimeData {
|
||||
|
16
api/typeDefs/image.js
Normal file
16
api/typeDefs/image.js
Normal file
@ -0,0 +1,16 @@
|
||||
import { gql } from 'graphql-tag'
|
||||
|
||||
export default gql`
|
||||
type ImageFeesInfo {
|
||||
totalFees: Int!
|
||||
totalFeesMsats: Int!
|
||||
imageFee: Int!
|
||||
imageFeeMsats: Int!
|
||||
nUnpaid: Int!
|
||||
bytesUnpaid: Int!
|
||||
bytes24h: Int!
|
||||
}
|
||||
extend type Query {
|
||||
imageFeesInfo(s3Keys: [Int]!): ImageFeesInfo!
|
||||
}
|
||||
`
|
@ -17,8 +17,7 @@ import price from './price'
|
||||
import admin from './admin'
|
||||
import blockHeight from './blockHeight'
|
||||
import chainFee from './chainFee'
|
||||
import paidAction from './paidAction'
|
||||
import vault from './vault'
|
||||
import image from './image'
|
||||
|
||||
const common = gql`
|
||||
type Query {
|
||||
@ -39,4 +38,4 @@ const common = gql`
|
||||
`
|
||||
|
||||
export default [common, user, item, itemForward, message, wallet, lnurl, notifications, invite,
|
||||
sub, upload, growth, rewards, referrals, price, admin, blockHeight, chainFee, paidAction, vault]
|
||||
sub, upload, growth, rewards, referrals, price, admin, blockHeight, chainFee, image]
|
||||
|
@ -7,7 +7,7 @@ export default gql`
|
||||
}
|
||||
|
||||
extend type Mutation {
|
||||
createInvite(id: String, gift: Int!, limit: Int!, description: String): Invite
|
||||
createInvite(gift: Int!, limit: Int): Invite
|
||||
revokeInvite(id: ID!): Invite
|
||||
}
|
||||
|
||||
@ -20,6 +20,5 @@ export default gql`
|
||||
user: User!
|
||||
revoked: Boolean!
|
||||
poor: Boolean!
|
||||
description: String
|
||||
}
|
||||
`
|
||||
|
@ -8,18 +8,10 @@ export default gql`
|
||||
dupes(url: String!): [Item!]
|
||||
related(cursor: String, title: String, id: ID, minMatch: String, limit: Limit): Items
|
||||
search(q: String, sub: String, cursor: String, what: String, sort: String, when: String, from: String, to: String): Items
|
||||
auctionPosition(sub: String, id: ID, boost: Int): Int!
|
||||
boostPosition(sub: String, id: ID, boost: Int): BoostPositions!
|
||||
auctionPosition(sub: String, id: ID, bid: Int!): Int!
|
||||
itemRepetition(parentId: ID): Int!
|
||||
}
|
||||
|
||||
type BoostPositions {
|
||||
home: Boolean!
|
||||
sub: Boolean!
|
||||
homeMaxBoost: Int!
|
||||
subMaxBoost: Int!
|
||||
}
|
||||
|
||||
type TitleUnshorted {
|
||||
title: String
|
||||
unshorted: String
|
||||
@ -28,57 +20,36 @@ export default gql`
|
||||
type ItemActResult {
|
||||
id: ID!
|
||||
sats: Int!
|
||||
path: String
|
||||
path: String!
|
||||
act: String!
|
||||
}
|
||||
|
||||
type ItemAct {
|
||||
id: ID!
|
||||
act: String!
|
||||
invoice: Invoice
|
||||
}
|
||||
|
||||
extend type Mutation {
|
||||
bookmarkItem(id: ID): Item
|
||||
pinItem(id: ID): Item
|
||||
subscribeItem(id: ID): Item
|
||||
deleteItem(id: ID): Item
|
||||
upsertLink(
|
||||
id: ID, sub: String, title: String!, url: String!, text: String, boost: Int, forward: [ItemForwardInput],
|
||||
hash: String, hmac: String): ItemPaidAction!
|
||||
upsertDiscussion(
|
||||
id: ID, sub: String, title: String!, text: String, boost: Int, forward: [ItemForwardInput],
|
||||
hash: String, hmac: String): ItemPaidAction!
|
||||
upsertBounty(
|
||||
id: ID, sub: String, title: String!, text: String, bounty: Int, boost: Int, forward: [ItemForwardInput],
|
||||
hash: String, hmac: String): ItemPaidAction!
|
||||
upsertJob(
|
||||
id: ID, sub: String!, title: String!, company: String!, location: String, remote: Boolean,
|
||||
text: String!, url: String!, boost: Int, status: String, logo: Int): ItemPaidAction!
|
||||
upsertPoll(
|
||||
id: ID, sub: String, title: String!, text: String, options: [String!]!, boost: Int, forward: [ItemForwardInput], pollExpiresAt: Date,
|
||||
hash: String, hmac: String): ItemPaidAction!
|
||||
upsertLink(id: ID, sub: String, title: String!, url: String!, text: String, boost: Int, forward: [ItemForwardInput], hash: String, hmac: String): Item!
|
||||
upsertDiscussion(id: ID, sub: String, title: String!, text: String, boost: Int, forward: [ItemForwardInput], hash: String, hmac: String): Item!
|
||||
upsertBounty(id: ID, sub: String, title: String!, text: String, bounty: Int, hash: String, hmac: String, boost: Int, forward: [ItemForwardInput]): Item!
|
||||
upsertJob(id: ID, sub: String!, title: String!, company: String!, location: String, remote: Boolean,
|
||||
text: String!, url: String!, maxBid: Int!, status: String, logo: Int, hash: String, hmac: String): Item!
|
||||
upsertPoll(id: ID, sub: String, title: String!, text: String, options: [String!]!, boost: Int, forward: [ItemForwardInput], hash: String, hmac: String): Item!
|
||||
updateNoteId(id: ID!, noteId: String!): Item!
|
||||
upsertComment(id: ID, text: String!, parentId: ID, boost: Int, hash: String, hmac: String): ItemPaidAction!
|
||||
act(id: ID!, sats: Int, act: String, hasSendWallet: Boolean): ItemActPaidAction!
|
||||
pollVote(id: ID!): PollVotePaidAction!
|
||||
upsertComment(id:ID, text: String!, parentId: ID, hash: String, hmac: String): Item!
|
||||
act(id: ID!, sats: Int, act: String, idempotent: Boolean, hash: String, hmac: String): ItemActResult!
|
||||
pollVote(id: ID!, hash: String, hmac: String): ID!
|
||||
toggleOutlaw(id: ID!): Item!
|
||||
}
|
||||
|
||||
type PollVoteResult {
|
||||
id: ID!
|
||||
}
|
||||
|
||||
type PollOption {
|
||||
id: ID,
|
||||
option: String!
|
||||
count: Int!
|
||||
meVoted: Boolean!
|
||||
}
|
||||
|
||||
type Poll {
|
||||
meVoted: Boolean!
|
||||
meInvoiceId: Int
|
||||
meInvoiceActionState: InvoiceActionState
|
||||
count: Int!
|
||||
options: [PollOption!]!
|
||||
}
|
||||
@ -87,7 +58,6 @@ export default gql`
|
||||
cursor: String
|
||||
items: [Item!]!
|
||||
pins: [Item!]
|
||||
ad: Item
|
||||
}
|
||||
|
||||
type Comments {
|
||||
@ -95,22 +65,12 @@ export default gql`
|
||||
comments: [Item!]!
|
||||
}
|
||||
|
||||
enum InvoiceActionState {
|
||||
PENDING
|
||||
PENDING_HELD
|
||||
HELD
|
||||
PAID
|
||||
FAILED
|
||||
}
|
||||
|
||||
type Item {
|
||||
id: ID!
|
||||
createdAt: Date!
|
||||
updatedAt: Date!
|
||||
invoicePaidAt: Date
|
||||
deletedAt: Date
|
||||
deleteScheduledAt: Date
|
||||
reminderScheduledAt: Date
|
||||
title: String
|
||||
searchTitle: String
|
||||
url: String
|
||||
@ -128,13 +88,10 @@ export default gql`
|
||||
bountyPaidTo: [Int]
|
||||
noteId: String
|
||||
sats: Int!
|
||||
credits: Int!
|
||||
commentSats: Int!
|
||||
commentCredits: Int!
|
||||
lastCommentAt: Date
|
||||
upvotes: Int!
|
||||
meSats: Int!
|
||||
meCredits: Int!
|
||||
meDontLikeSats: Int!
|
||||
meBookmark: Boolean!
|
||||
meSubscription: Boolean!
|
||||
@ -145,30 +102,25 @@ export default gql`
|
||||
bio: Boolean!
|
||||
paidImgLink: Boolean
|
||||
ncomments: Int!
|
||||
nDirectComments: Int!
|
||||
comments(sort: String, cursor: String): Comments!
|
||||
comments(sort: String): [Item!]!
|
||||
path: String
|
||||
position: Int
|
||||
prior: Int
|
||||
maxBid: Int
|
||||
isJob: Boolean!
|
||||
pollCost: Int
|
||||
poll: Poll
|
||||
pollExpiresAt: Date
|
||||
company: String
|
||||
location: String
|
||||
remote: Boolean
|
||||
sub: Sub
|
||||
subName: String
|
||||
status: String!
|
||||
status: String
|
||||
uploadId: Int
|
||||
otsHash: String
|
||||
parentOtsHash: String
|
||||
forwards: [ItemForward]
|
||||
imgproxyUrls: JSONObject
|
||||
rel: String
|
||||
apiKey: Boolean
|
||||
invoice: Invoice
|
||||
cost: Int!
|
||||
}
|
||||
|
||||
input ItemForwardInput {
|
||||
|
@ -43,24 +43,12 @@ export default gql`
|
||||
sortTime: Date!
|
||||
}
|
||||
|
||||
type ItemMention {
|
||||
id: ID!
|
||||
item: Item!
|
||||
sortTime: Date!
|
||||
}
|
||||
|
||||
type Invitification {
|
||||
id: ID!
|
||||
invite: Invite!
|
||||
sortTime: Date!
|
||||
}
|
||||
|
||||
type Invoicification {
|
||||
id: ID!
|
||||
invoice: Invoice!
|
||||
sortTime: Date!
|
||||
}
|
||||
|
||||
type JobChanged {
|
||||
id: ID!
|
||||
item: Item!
|
||||
@ -79,7 +67,6 @@ export default gql`
|
||||
id: ID!
|
||||
sortTime: Date!
|
||||
days: Int
|
||||
type: String!
|
||||
}
|
||||
|
||||
type Earn {
|
||||
@ -90,19 +77,6 @@ export default gql`
|
||||
sources: EarnSources
|
||||
}
|
||||
|
||||
type ReferralSources {
|
||||
id: ID!
|
||||
forever: Int!
|
||||
oneDay: Int!
|
||||
}
|
||||
|
||||
type ReferralReward {
|
||||
id: ID!
|
||||
earnedSats: Int!
|
||||
sortTime: Date!
|
||||
sources: ReferralSources
|
||||
}
|
||||
|
||||
type Revenue {
|
||||
id: ID!
|
||||
earnedSats: Int!
|
||||
@ -117,19 +91,9 @@ export default gql`
|
||||
sortTime: Date!
|
||||
}
|
||||
|
||||
type WithdrawlPaid {
|
||||
id: ID!
|
||||
earnedSats: Int!
|
||||
sortTime: Date!
|
||||
withdrawl: Withdrawl!
|
||||
}
|
||||
|
||||
union ReferralSource = Item | Sub | User
|
||||
|
||||
type Referral {
|
||||
id: ID!
|
||||
sortTime: Date!
|
||||
source: ReferralSource
|
||||
}
|
||||
|
||||
type SubStatus {
|
||||
@ -138,29 +102,9 @@ export default gql`
|
||||
sortTime: Date!
|
||||
}
|
||||
|
||||
type TerritoryPost {
|
||||
id: ID!
|
||||
item: Item!
|
||||
sortTime: Date!
|
||||
}
|
||||
|
||||
type TerritoryTransfer {
|
||||
id: ID!
|
||||
sub: Sub!
|
||||
sortTime: Date!
|
||||
}
|
||||
|
||||
type Reminder {
|
||||
id: ID!
|
||||
item: Item!
|
||||
sortTime: Date!
|
||||
}
|
||||
|
||||
union Notification = Reply | Votification | Mention
|
||||
| Invitification | Earn | JobChanged | InvoicePaid | WithdrawlPaid | Referral
|
||||
| Invitification | Earn | JobChanged | InvoicePaid | Referral
|
||||
| Streak | FollowActivity | ForwardedVotification | Revenue | SubStatus
|
||||
| TerritoryPost | TerritoryTransfer | Reminder | ItemMention | Invoicification
|
||||
| ReferralReward
|
||||
|
||||
type Notifications {
|
||||
lastChecked: Date
|
||||
|
@ -1,61 +0,0 @@
|
||||
import { gql } from 'graphql-tag'
|
||||
|
||||
export default gql`
|
||||
|
||||
extend type Query {
|
||||
paidAction(invoiceId: Int!): PaidAction
|
||||
}
|
||||
|
||||
extend type Mutation {
|
||||
retryPaidAction(invoiceId: Int!, newAttempt: Boolean): PaidAction!
|
||||
}
|
||||
|
||||
enum PaymentMethod {
|
||||
REWARD_SATS
|
||||
FEE_CREDIT
|
||||
ZERO_COST
|
||||
OPTIMISTIC
|
||||
PESSIMISTIC
|
||||
}
|
||||
|
||||
interface PaidAction {
|
||||
invoice: Invoice
|
||||
paymentMethod: PaymentMethod!
|
||||
}
|
||||
|
||||
type ItemPaidAction implements PaidAction {
|
||||
result: Item
|
||||
invoice: Invoice
|
||||
paymentMethod: PaymentMethod!
|
||||
}
|
||||
|
||||
type ItemActPaidAction implements PaidAction {
|
||||
result: ItemActResult
|
||||
invoice: Invoice
|
||||
paymentMethod: PaymentMethod!
|
||||
}
|
||||
|
||||
type PollVotePaidAction implements PaidAction {
|
||||
result: PollVoteResult
|
||||
invoice: Invoice
|
||||
paymentMethod: PaymentMethod!
|
||||
}
|
||||
|
||||
type SubPaidAction implements PaidAction {
|
||||
result: Sub
|
||||
invoice: Invoice
|
||||
paymentMethod: PaymentMethod!
|
||||
}
|
||||
|
||||
type DonatePaidAction implements PaidAction {
|
||||
result: DonateResult
|
||||
invoice: Invoice
|
||||
paymentMethod: PaymentMethod!
|
||||
}
|
||||
|
||||
type BuyCreditsPaidAction implements PaidAction {
|
||||
result: BuyCreditsResult
|
||||
invoice: Invoice
|
||||
paymentMethod: PaymentMethod!
|
||||
}
|
||||
`
|
@ -2,6 +2,12 @@ import { gql } from 'graphql-tag'
|
||||
|
||||
export default gql`
|
||||
extend type Query {
|
||||
referrals(when: String, from: String, to: String): [TimeData!]!
|
||||
referrals(when: String, from: String, to: String): Referrals!
|
||||
}
|
||||
|
||||
type Referrals {
|
||||
totalSats: Int!
|
||||
totalReferrals: Int!
|
||||
stats: [TimeData!]!
|
||||
}
|
||||
`
|
||||
|
@ -7,19 +7,13 @@ export default gql`
|
||||
}
|
||||
|
||||
extend type Mutation {
|
||||
donateToRewards(sats: Int!): DonatePaidAction!
|
||||
}
|
||||
|
||||
type DonateResult {
|
||||
sats: Int!
|
||||
donateToRewards(sats: Int!, hash: String, hmac: String): Int!
|
||||
}
|
||||
|
||||
type Rewards {
|
||||
total: Int!
|
||||
time: Date!
|
||||
sources: [NameValue!]!
|
||||
leaderboard: UsersNullable
|
||||
ad: Item
|
||||
}
|
||||
|
||||
type Reward {
|
||||
|
@ -5,33 +5,19 @@ export default gql`
|
||||
sub(name: String): Sub
|
||||
subLatestPost(name: String!): String
|
||||
subs: [Sub!]!
|
||||
topSubs(cursor: String, when: String, from: String, to: String, by: String, limit: Limit): Subs
|
||||
userSubs(name: String!, cursor: String, when: String, from: String, to: String, by: String, limit: Limit): Subs
|
||||
}
|
||||
|
||||
type Subs {
|
||||
cursor: String
|
||||
subs: [Sub!]!
|
||||
}
|
||||
|
||||
extend type Mutation {
|
||||
upsertSub(oldName: String, name: String!, desc: String, baseCost: Int!,
|
||||
replyCost: Int!,
|
||||
postTypes: [String!]!,
|
||||
upsertSub(name: String!, desc: String, baseCost: Int!,
|
||||
postTypes: [String!]!, allowFreebies: Boolean!,
|
||||
billingType: String!, billingAutoRenew: Boolean!,
|
||||
moderated: Boolean!, nsfw: Boolean!): SubPaidAction!
|
||||
paySub(name: String!): SubPaidAction!
|
||||
moderated: Boolean!, hash: String, hmac: String): Sub
|
||||
paySub(name: String!, hash: String, hmac: String): Sub
|
||||
toggleMuteSub(name: String!): Boolean!
|
||||
toggleSubSubscription(name: String!): Boolean!
|
||||
transferTerritory(subName: String!, userName: String!): Sub
|
||||
unarchiveTerritory(name: String!, desc: String, baseCost: Int!,
|
||||
replyCost: Int!, postTypes: [String!]!,
|
||||
billingType: String!, billingAutoRenew: Boolean!,
|
||||
moderated: Boolean!, nsfw: Boolean!): SubPaidAction!
|
||||
}
|
||||
|
||||
type Sub {
|
||||
name: String!
|
||||
name: ID!
|
||||
createdAt: Date!
|
||||
userId: Int!
|
||||
user: User!
|
||||
@ -44,27 +30,10 @@ export default gql`
|
||||
billingAutoRenew: Boolean!
|
||||
rankingType: String!
|
||||
billedLastAt: Date!
|
||||
billPaidUntil: Date
|
||||
baseCost: Int!
|
||||
replyCost: Int!
|
||||
status: String!
|
||||
moderated: Boolean!
|
||||
moderatedCount: Int!
|
||||
meMuteSub: Boolean!
|
||||
nsfw: Boolean!
|
||||
nposts(when: String, from: String, to: String): Int!
|
||||
ncomments(when: String, from: String, to: String): Int!
|
||||
meSubscription: Boolean!
|
||||
|
||||
optional: SubOptional!
|
||||
}
|
||||
|
||||
type SubOptional {
|
||||
"""
|
||||
conditionally private
|
||||
"""
|
||||
stacked(when: String, from: String, to: String): Int
|
||||
spent(when: String, from: String, to: String): Int
|
||||
revenue(when: String, from: String, to: String): Int
|
||||
}
|
||||
`
|
||||
|
@ -1,26 +1,12 @@
|
||||
import { gql } from 'graphql-tag'
|
||||
|
||||
export default gql`
|
||||
type UploadFees {
|
||||
totalFees: Int!
|
||||
totalFeesMsats: Int!
|
||||
uploadFees: Int!
|
||||
uploadFeesMsats: Int!
|
||||
nUnpaid: Int!
|
||||
bytesUnpaid: Int!
|
||||
bytes24h: Int!
|
||||
extend type Mutation {
|
||||
getSignedPOST(type: String!, size: Int!, width: Int!, height: Int!, avatar: Boolean): SignedPost!
|
||||
}
|
||||
|
||||
type SignedPost {
|
||||
url: String!
|
||||
fields: JSONObject!
|
||||
}
|
||||
|
||||
extend type Query {
|
||||
uploadFees(s3Keys: [Int]!): UploadFees!
|
||||
}
|
||||
|
||||
extend type Mutation {
|
||||
getSignedPOST(type: String!, size: Int!, width: Int!, height: Int!, avatar: Boolean): SignedPost!
|
||||
}
|
||||
`
|
||||
|
@ -7,21 +7,11 @@ export default gql`
|
||||
user(id: ID, name: String): User
|
||||
users: [User!]
|
||||
nameAvailable(name: String!): Boolean!
|
||||
topUsers(cursor: String, when: String, from: String, to: String, by: String, limit: Limit): UsersNullable!
|
||||
topCowboys(cursor: String): UsersNullable!
|
||||
topUsers(cursor: String, when: String, from: String, to: String, by: String, limit: Limit): Users
|
||||
topCowboys(cursor: String): Users
|
||||
searchUsers(q: String!, limit: Limit, similarity: Float): [User!]!
|
||||
userSuggestions(q: String, limit: Limit): [User!]!
|
||||
hasNewNotes: Boolean!
|
||||
mySubscribedUsers(cursor: String): Users!
|
||||
myMutedUsers(cursor: String): Users!
|
||||
userStatsActions(when: String, from: String, to: String): [TimeData!]!
|
||||
userStatsIncomingSats(when: String, from: String, to: String): [TimeData!]!
|
||||
userStatsOutgoingSats(when: String, from: String, to: String): [TimeData!]!
|
||||
}
|
||||
|
||||
type UsersNullable {
|
||||
cursor: String
|
||||
users: [User]!
|
||||
}
|
||||
|
||||
type Users {
|
||||
@ -33,7 +23,7 @@ export default gql`
|
||||
setName(name: String!): String
|
||||
setSettings(settings: SettingsInput!): User
|
||||
setPhoto(photoId: ID!): Int!
|
||||
upsertBio(text: String!): ItemPaidAction!
|
||||
upsertBio(bio: String!): User!
|
||||
setWalkthrough(tipPopover: Boolean, upvotePopover: Boolean): Boolean
|
||||
unlinkAuth(authType: String!): AuthMethods!
|
||||
linkUnverifiedEmail(email: String!): Boolean
|
||||
@ -41,29 +31,20 @@ export default gql`
|
||||
subscribeUserPosts(id: ID): User
|
||||
subscribeUserComments(id: ID): User
|
||||
toggleMute(id: ID): User
|
||||
generateApiKey(id: ID!): String
|
||||
deleteApiKey(id: ID!): User
|
||||
disableFreebies: Boolean
|
||||
}
|
||||
|
||||
type User {
|
||||
id: ID!
|
||||
createdAt: Date!
|
||||
name: String!
|
||||
name: String
|
||||
nitems(when: String, from: String, to: String): Int!
|
||||
nposts(when: String, from: String, to: String): Int!
|
||||
nterritories(when: String, from: String, to: String): Int!
|
||||
ncomments(when: String, from: String, to: String): Int!
|
||||
bio: Item
|
||||
bioId: Int
|
||||
photoId: Int
|
||||
since: Int
|
||||
|
||||
"""
|
||||
this is only returned when we sort stackers by value
|
||||
"""
|
||||
proportion: Float
|
||||
|
||||
optional: UserOptional!
|
||||
privates: UserPrivates
|
||||
|
||||
@ -75,47 +56,31 @@ export default gql`
|
||||
input SettingsInput {
|
||||
autoDropBolt11s: Boolean!
|
||||
diagnostics: Boolean!
|
||||
noReferralLinks: Boolean!
|
||||
fiatCurrency: String!
|
||||
satsFilter: Int!
|
||||
disableFreebies: Boolean
|
||||
greeterMode: Boolean!
|
||||
hideBookmarks: Boolean!
|
||||
hideCowboyHat: Boolean!
|
||||
hideGithub: Boolean!
|
||||
hideNostr: Boolean!
|
||||
hideTwitter: Boolean!
|
||||
hideFromTopUsers: Boolean!
|
||||
hideInvoiceDesc: Boolean!
|
||||
hideIsContributor: Boolean!
|
||||
hideWalletBalance: Boolean!
|
||||
imgproxyOnly: Boolean!
|
||||
showImagesAndVideos: Boolean!
|
||||
nostrCrossposting: Boolean!
|
||||
nostrPubkey: String
|
||||
nostrRelays: [String!]
|
||||
noteAllDescendants: Boolean!
|
||||
noteCowboyHat: Boolean!
|
||||
noteDeposits: Boolean!,
|
||||
noteWithdrawals: Boolean!,
|
||||
noteDeposits: Boolean!
|
||||
noteEarning: Boolean!
|
||||
noteForwardedSats: Boolean!
|
||||
noteInvites: Boolean!
|
||||
noteItemSats: Boolean!
|
||||
noteJobIndicator: Boolean!
|
||||
noteMentions: Boolean!
|
||||
noteItemMentions: Boolean!
|
||||
nsfwMode: Boolean!
|
||||
tipDefault: Int!
|
||||
tipRandomMin: Int
|
||||
tipRandomMax: Int
|
||||
turboTipping: Boolean!
|
||||
zapUndos: Int
|
||||
wildWestMode: Boolean!
|
||||
withdrawMaxFeeDefault: Int!
|
||||
proxyReceive: Boolean
|
||||
directReceive: Boolean
|
||||
receiveCreditsBelowSats: Int!
|
||||
sendCreditsBelowSats: Int!
|
||||
}
|
||||
|
||||
type AuthMethods {
|
||||
@ -123,8 +88,7 @@ export default gql`
|
||||
nostr: Boolean!
|
||||
github: Boolean!
|
||||
twitter: Boolean!
|
||||
email: Boolean!
|
||||
apiKey: Boolean
|
||||
email: String
|
||||
}
|
||||
|
||||
type UserPrivates {
|
||||
@ -132,9 +96,7 @@ export default gql`
|
||||
extremely sensitive
|
||||
"""
|
||||
sats: Int!
|
||||
credits: Int!
|
||||
authMethods: AuthMethods!
|
||||
lnAddr: String
|
||||
|
||||
"""
|
||||
only relevant to user
|
||||
@ -144,61 +106,37 @@ export default gql`
|
||||
tipPopover: Boolean!
|
||||
upvotePopover: Boolean!
|
||||
hasInvites: Boolean!
|
||||
apiKeyEnabled: Boolean!
|
||||
|
||||
"""
|
||||
mirrors SettingsInput
|
||||
"""
|
||||
autoDropBolt11s: Boolean!
|
||||
diagnostics: Boolean!
|
||||
noReferralLinks: Boolean!
|
||||
fiatCurrency: String!
|
||||
satsFilter: Int!
|
||||
disableFreebies: Boolean
|
||||
greeterMode: Boolean!
|
||||
hideBookmarks: Boolean!
|
||||
hideCowboyHat: Boolean!
|
||||
hideGithub: Boolean!
|
||||
hideNostr: Boolean!
|
||||
hideTwitter: Boolean!
|
||||
hideFromTopUsers: Boolean!
|
||||
hideInvoiceDesc: Boolean!
|
||||
hideIsContributor: Boolean!
|
||||
hideWalletBalance: Boolean!
|
||||
imgproxyOnly: Boolean!
|
||||
showImagesAndVideos: Boolean!
|
||||
nostrCrossposting: Boolean!
|
||||
nostrPubkey: String
|
||||
nostrRelays: [String!]
|
||||
noteAllDescendants: Boolean!
|
||||
noteCowboyHat: Boolean!
|
||||
noteDeposits: Boolean!
|
||||
noteWithdrawals: Boolean!
|
||||
noteEarning: Boolean!
|
||||
noteForwardedSats: Boolean!
|
||||
noteInvites: Boolean!
|
||||
noteItemSats: Boolean!
|
||||
noteJobIndicator: Boolean!
|
||||
noteMentions: Boolean!
|
||||
noteItemMentions: Boolean!
|
||||
nsfwMode: Boolean!
|
||||
tipDefault: Int!
|
||||
tipRandom: Boolean!
|
||||
tipRandomMin: Int
|
||||
tipRandomMax: Int
|
||||
turboTipping: Boolean!
|
||||
zapUndos: Int
|
||||
wildWestMode: Boolean!
|
||||
withdrawMaxFeeDefault: Int!
|
||||
autoWithdrawThreshold: Int
|
||||
autoWithdrawMaxFeePercent: Float
|
||||
autoWithdrawMaxFeeTotal: Int
|
||||
vaultKeyHash: String
|
||||
walletsUpdatedAt: Date
|
||||
proxyReceive: Boolean
|
||||
directReceive: Boolean
|
||||
receiveCreditsBelowSats: Int!
|
||||
sendCreditsBelowSats: Int!
|
||||
}
|
||||
|
||||
type UserOptional {
|
||||
@ -209,22 +147,7 @@ export default gql`
|
||||
spent(when: String, from: String, to: String): Int
|
||||
referrals(when: String, from: String, to: String): Int
|
||||
streak: Int
|
||||
gunStreak: Int
|
||||
horseStreak: Int
|
||||
maxStreak: Int
|
||||
isContributor: Boolean
|
||||
githubId: String
|
||||
twitterId: String
|
||||
nostrAuthPubkey: String
|
||||
}
|
||||
|
||||
type NameValue {
|
||||
name: String!
|
||||
value: Float!
|
||||
}
|
||||
|
||||
type TimeData {
|
||||
time: Date!
|
||||
data: [NameValue!]!
|
||||
}
|
||||
`
|
||||
|
@ -1,29 +0,0 @@
|
||||
import { gql } from 'graphql-tag'
|
||||
|
||||
export default gql`
|
||||
type VaultEntry {
|
||||
id: ID!
|
||||
key: String!
|
||||
iv: String!
|
||||
value: String!
|
||||
createdAt: Date!
|
||||
updatedAt: Date!
|
||||
}
|
||||
|
||||
input VaultEntryInput {
|
||||
key: String!
|
||||
iv: String!
|
||||
value: String!
|
||||
walletId: ID
|
||||
}
|
||||
|
||||
extend type Query {
|
||||
getVaultEntry(key: String!): VaultEntry
|
||||
getVaultEntries(keysFilter: [String!]): [VaultEntry!]!
|
||||
}
|
||||
|
||||
extend type Mutation {
|
||||
clearVault: Boolean
|
||||
updateVaultKey(entries: [VaultEntryInput!]!, hash: String!): Boolean
|
||||
}
|
||||
`
|
@ -1,125 +1,29 @@
|
||||
import { gql } from 'graphql-tag'
|
||||
import { fieldToGqlArg, fieldToGqlArgOptional, generateResolverName, generateTypeDefName } from '@/wallets/graphql'
|
||||
import { isServerField } from '@/wallets/common'
|
||||
import walletDefs from '@/wallets/server'
|
||||
|
||||
function injectTypeDefs (typeDefs) {
|
||||
const injected = [rawTypeDefs(), mutationTypeDefs()]
|
||||
return `${typeDefs}\n\n${injected.join('\n\n')}\n`
|
||||
}
|
||||
|
||||
function mutationTypeDefs () {
|
||||
console.group('injected GraphQL mutations:')
|
||||
|
||||
const typeDefs = walletDefs.map((w) => {
|
||||
let args = 'id: ID, '
|
||||
const serverFields = w.fields
|
||||
.filter(isServerField)
|
||||
.map(fieldToGqlArgOptional)
|
||||
if (serverFields.length > 0) args += serverFields.join(', ') + ','
|
||||
args += 'enabled: Boolean, priority: Int, vaultEntries: [VaultEntryInput!], settings: AutowithdrawSettings, validateLightning: Boolean'
|
||||
const resolverName = generateResolverName(w.walletField)
|
||||
const typeDef = `${resolverName}(${args}): Wallet`
|
||||
console.log(typeDef)
|
||||
return typeDef
|
||||
})
|
||||
|
||||
console.groupEnd()
|
||||
|
||||
return `extend type Mutation {\n${typeDefs.join('\n')}\n}`
|
||||
}
|
||||
|
||||
function rawTypeDefs () {
|
||||
console.group('injected GraphQL type defs:')
|
||||
|
||||
const typeDefs = walletDefs.map((w) => {
|
||||
let args = w.fields
|
||||
.filter(isServerField)
|
||||
.map(fieldToGqlArg)
|
||||
.map(s => ' ' + s)
|
||||
.join('\n')
|
||||
if (!args) {
|
||||
// add a placeholder arg so the type is not empty
|
||||
args = ' _empty: Boolean'
|
||||
}
|
||||
const typeDefName = generateTypeDefName(w.walletType)
|
||||
const typeDef = `type ${typeDefName} {\n${args}\n}`
|
||||
console.log(typeDef)
|
||||
return typeDef
|
||||
})
|
||||
|
||||
let union = 'union WalletDetails = '
|
||||
union += walletDefs.map((w) => {
|
||||
const typeDefName = generateTypeDefName(w.walletType)
|
||||
return typeDefName
|
||||
}).join(' | ')
|
||||
console.log(union)
|
||||
|
||||
console.groupEnd()
|
||||
|
||||
return typeDefs.join('\n\n') + union
|
||||
}
|
||||
|
||||
const typeDefs = `
|
||||
export default gql`
|
||||
extend type Query {
|
||||
invoice(id: ID!): Invoice!
|
||||
withdrawl(id: ID!): Withdrawl!
|
||||
direct(id: ID!): Direct!
|
||||
numBolt11s: Int!
|
||||
connectAddress: String!
|
||||
walletHistory(cursor: String, inc: String): History
|
||||
wallets(includeReceivers: Boolean, includeSenders: Boolean, onlyEnabled: Boolean, prioritySort: String): [Wallet!]!
|
||||
wallet(id: ID!): Wallet
|
||||
walletByType(type: String!): Wallet
|
||||
walletLogs(type: String, from: String, to: String, cursor: String): WalletLog!
|
||||
failedInvoices: [Invoice!]!
|
||||
}
|
||||
|
||||
extend type Mutation {
|
||||
createInvoice(amount: Int!): InvoiceOrDirect!
|
||||
createInvoice(amount: Int!, expireSecs: Int, hodlInvoice: Boolean): Invoice!
|
||||
createWithdrawl(invoice: String!, maxFee: Int!): Withdrawl!
|
||||
sendToLnAddr(addr: String!, amount: Int!, maxFee: Int!, comment: String, identifier: Boolean, name: String, email: String): Withdrawl!
|
||||
cancelInvoice(hash: String!, hmac: String, userCancel: Boolean): Invoice!
|
||||
dropBolt11(hash: String!): Boolean
|
||||
removeWallet(id: ID!): Boolean
|
||||
deleteWalletLogs(wallet: String): Boolean
|
||||
setWalletPriority(id: ID!, priority: Int!): Boolean
|
||||
buyCredits(credits: Int!): BuyCreditsPaidAction!
|
||||
cancelInvoice(hash: String!, hmac: String!): Invoice!
|
||||
dropBolt11(id: ID): Withdrawl
|
||||
}
|
||||
|
||||
type BuyCreditsResult {
|
||||
credits: Int!
|
||||
}
|
||||
|
||||
interface InvoiceOrDirect {
|
||||
id: ID!
|
||||
}
|
||||
|
||||
type Wallet {
|
||||
id: ID!
|
||||
createdAt: Date!
|
||||
updatedAt: Date!
|
||||
type: String!
|
||||
enabled: Boolean!
|
||||
priority: Int!
|
||||
wallet: WalletDetails!
|
||||
vaultEntries: [VaultEntry!]!
|
||||
}
|
||||
|
||||
input AutowithdrawSettings {
|
||||
autoWithdrawThreshold: Int!
|
||||
autoWithdrawMaxFeePercent: Float!
|
||||
autoWithdrawMaxFeeTotal: Int!
|
||||
}
|
||||
|
||||
type Invoice implements InvoiceOrDirect {
|
||||
type Invoice {
|
||||
id: ID!
|
||||
createdAt: Date!
|
||||
hash: String!
|
||||
bolt11: String!
|
||||
expiresAt: Date!
|
||||
cancelled: Boolean!
|
||||
cancelledAt: Date
|
||||
confirmedAt: Date
|
||||
satsReceived: Int
|
||||
satsRequested: Int!
|
||||
@ -129,14 +33,6 @@ const typeDefs = `
|
||||
hmac: String
|
||||
isHeld: Boolean
|
||||
confirmedPreimage: String
|
||||
actionState: String
|
||||
actionType: String
|
||||
actionError: String
|
||||
invoiceForward: Boolean
|
||||
item: Item
|
||||
itemAct: ItemAct
|
||||
forwardedSats: Int
|
||||
forwardStatus: String
|
||||
}
|
||||
|
||||
type Withdrawl {
|
||||
@ -149,21 +45,6 @@ const typeDefs = `
|
||||
satsFeePaying: Int!
|
||||
satsFeePaid: Int
|
||||
status: String
|
||||
autoWithdraw: Boolean!
|
||||
preimage: String
|
||||
forwardedActionType: String
|
||||
}
|
||||
|
||||
type Direct implements InvoiceOrDirect {
|
||||
id: ID!
|
||||
createdAt: Date!
|
||||
bolt11: String
|
||||
hash: String
|
||||
sats: Int
|
||||
preimage: String
|
||||
nostr: JSONObject
|
||||
comment: String
|
||||
lud18Data: JSONObject
|
||||
}
|
||||
|
||||
type Fact {
|
||||
@ -174,7 +55,6 @@ const typeDefs = `
|
||||
bolt11: String
|
||||
status: String
|
||||
description: String
|
||||
autoWithdraw: Boolean
|
||||
item: Item
|
||||
invoiceComment: String
|
||||
invoicePayerData: JSONObject
|
||||
@ -185,20 +65,4 @@ const typeDefs = `
|
||||
facts: [Fact!]!
|
||||
cursor: String
|
||||
}
|
||||
|
||||
type WalletLog {
|
||||
entries: [WalletLogEntry!]!
|
||||
cursor: String
|
||||
}
|
||||
|
||||
type WalletLogEntry {
|
||||
id: ID!
|
||||
createdAt: Date!
|
||||
wallet: ID!
|
||||
level: String!
|
||||
message: String!
|
||||
context: JSONObject
|
||||
}
|
||||
`
|
||||
|
||||
export default gql`${injectTypeDefs(typeDefs)}`
|
||||
|
114
api/webPush/index.js
Normal file
114
api/webPush/index.js
Normal file
@ -0,0 +1,114 @@
|
||||
import webPush from 'web-push'
|
||||
import models from '../models'
|
||||
import { COMMENT_DEPTH_LIMIT } from '../../lib/constants'
|
||||
import removeMd from 'remove-markdown'
|
||||
|
||||
const webPushEnabled = process.env.NODE_ENV === 'production' ||
|
||||
(process.env.VAPID_MAILTO && process.env.NEXT_PUBLIC_VAPID_PUBKEY && process.env.VAPID_PRIVKEY)
|
||||
|
||||
if (webPushEnabled) {
|
||||
webPush.setVapidDetails(
|
||||
process.env.VAPID_MAILTO,
|
||||
process.env.NEXT_PUBLIC_VAPID_PUBKEY,
|
||||
process.env.VAPID_PRIVKEY
|
||||
)
|
||||
} else {
|
||||
console.warn('VAPID_* env vars not set, skipping webPush setup')
|
||||
}
|
||||
|
||||
const createPayload = (notification) => {
|
||||
// https://web.dev/push-notifications-display-a-notification/#visual-options
|
||||
let { title, body, ...options } = notification
|
||||
if (body) body = removeMd(body)
|
||||
return JSON.stringify({
|
||||
title,
|
||||
options: {
|
||||
body,
|
||||
timestamp: Date.now(),
|
||||
icon: '/icons/icon_x96.png',
|
||||
...options
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
const createUserFilter = (tag) => {
|
||||
// filter users by notification settings
|
||||
const tagMap = {
|
||||
REPLY: 'noteAllDescendants',
|
||||
MENTION: 'noteMentions',
|
||||
TIP: 'noteItemSats',
|
||||
FORWARDEDTIP: 'noteForwardedSats',
|
||||
REFERRAL: 'noteInvites',
|
||||
INVITE: 'noteInvites',
|
||||
EARN: 'noteEarning',
|
||||
DEPOSIT: 'noteDeposits',
|
||||
STREAK: 'noteCowboyHat'
|
||||
}
|
||||
const key = tagMap[tag.split('-')[0]]
|
||||
return key ? { user: { [key]: true } } : undefined
|
||||
}
|
||||
|
||||
const createItemUrl = async ({ id }) => {
|
||||
const [rootItem] = await models.$queryRawUnsafe(
|
||||
'SELECT subpath(path, -LEAST(nlevel(path), $1::INTEGER), 1)::text AS id FROM "Item" WHERE id = $2::INTEGER',
|
||||
COMMENT_DEPTH_LIMIT + 1, Number(id)
|
||||
)
|
||||
return `/items/${rootItem.id}` + (rootItem.id !== id ? `?commentId=${id}` : '')
|
||||
}
|
||||
|
||||
const sendNotification = (subscription, payload) => {
|
||||
if (!webPushEnabled) {
|
||||
console.warn('webPush not configured. skipping notification')
|
||||
return
|
||||
}
|
||||
const { id, endpoint, p256dh, auth } = subscription
|
||||
return webPush.sendNotification({ endpoint, keys: { p256dh, auth } }, payload)
|
||||
.catch(async (err) => {
|
||||
if (err.statusCode === 400) {
|
||||
console.log('[webPush] invalid request: ', err)
|
||||
} else if ([401, 403].includes(err.statusCode)) {
|
||||
console.log('[webPush] auth error: ', err)
|
||||
} else if (err.statusCode === 404 || err.statusCode === 410) {
|
||||
console.log('[webPush] subscription has expired or is no longer valid: ', err)
|
||||
const deletedSubscripton = await models.pushSubscription.delete({ where: { id } })
|
||||
console.log(`[webPush] deleted subscription ${id} of user ${deletedSubscripton.userId} due to push error`)
|
||||
} else if (err.statusCode === 413) {
|
||||
console.log('[webPush] payload too large: ', err)
|
||||
} else if (err.statusCode === 429) {
|
||||
console.log('[webPush] too many requests: ', err)
|
||||
} else {
|
||||
console.log('[webPush] error: ', err)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
export async function sendUserNotification (userId, notification) {
|
||||
try {
|
||||
notification.data ??= {}
|
||||
if (notification.item) {
|
||||
notification.data.url ??= await createItemUrl(notification.item)
|
||||
notification.data.itemId ??= notification.item.id
|
||||
delete notification.item
|
||||
}
|
||||
const userFilter = createUserFilter(notification.tag)
|
||||
const payload = createPayload(notification)
|
||||
const subscriptions = await models.pushSubscription.findMany({
|
||||
where: { userId, ...userFilter }
|
||||
})
|
||||
await Promise.allSettled(
|
||||
subscriptions.map(subscription => sendNotification(subscription, payload))
|
||||
)
|
||||
} catch (err) {
|
||||
console.log('[webPush] error sending user notification: ', err)
|
||||
}
|
||||
}
|
||||
|
||||
export async function replyToSubscription (subscriptionId, notification) {
|
||||
try {
|
||||
const payload = createPayload(notification)
|
||||
const subscription = await models.pushSubscription.findUnique({ where: { id: subscriptionId } })
|
||||
await sendNotification(subscription, payload)
|
||||
} catch (err) {
|
||||
console.log('[webPush] error sending subscription reply: ', err)
|
||||
}
|
||||
}
|
207
awards.csv
207
awards.csv
@ -1,207 +0,0 @@
|
||||
name,type,pr id,issue ids,difficulty,priority,changes requested,notes,amount,receive method,date paid
|
||||
jp30566347,pr,#898,#680,good-first-issue,,,,20k,jpmelanson@getalby.com,2024-03-16
|
||||
NEEDcreations,issue,#898,#680,good-first-issue,,,,2k,NEEDcreations@stacker.news,2024-03-16
|
||||
SatsAllDay,docs,#925,,,,,typo,100,weareallsatoshi@getalby.com,2024-03-16
|
||||
SatsAllDay,issue,#933,#928,medium,,,,25k,weareallsatoshi@getalby.com,2024-03-18
|
||||
SatsAllDay,code review,#933,#928,medium,,,,25k,weareallsatoshi@getalby.com,2024-03-18
|
||||
SatsAllDay,pr,#942,#941,good-first-issue,,,,20k,weareallsatoshi@getalby.com,2024-03-20
|
||||
SatsAllDay,pr,#954,#926,easy,,,,100k,weareallsatoshi@getalby.com,2024-03-23
|
||||
SatsAllDay,pr,#956,,good-first-issue,,,,22k,weareallsatoshi@getalby.com,2024-03-23
|
||||
cointastical,issue,#960,#735,good-first-issue,,,,2k,cointastical@stacker.news,2024-03-24
|
||||
SatsAllDay,pr,#960,#735,good-first-issue,,,,20k,weareallsatoshi@getalby.com,2024-03-24
|
||||
cointastical,issue,,#932,,,,,10k,cointastical@stacker.news,2024-03-25
|
||||
SatsAllDay,pr,#955,#901,good-first-issue,,,,20k,weareallsatoshi@getalby.com,2024-03-25
|
||||
SatsAllDay,issue,#964,#959,easy,,,,10k,weareallsatoshi@getalby.com,2024-03-25
|
||||
SatsAllDay,code review,#964,#959,easy,,,,10k,weareallsatoshi@getalby.com,2024-03-25
|
||||
AustinKelsay,pr,#970,,,,,,20k,bitcoinplebdev@stacker.news,2024-03-25
|
||||
felipebueno,pr,#948,,,,,,100k,felipe@stacker.news,2024-03-26
|
||||
benalleng,pr,#972,#923,good-first-issue,,,,20k,BenAllenG@stacker.news,2024-03-26
|
||||
SatsAllDay,issue,#972,#923,good-first-issue,,,,2k,weareallsatoshi@getalby.com,2024-03-26
|
||||
felipebueno,pr,#974,#884,good-first-issue,,,,20k,felipe@stacker.news,2024-03-26
|
||||
h0dlr,issue,#974,#884,good-first-issue,,,,2k,HODLR@stacker.news,2024-04-04
|
||||
benalleng,pr,#975,,,,,,20k,BenAllenG@stacker.news,2024-03-26
|
||||
SatsAllDay,security,#980,GHSA-qg4g-m4xq-695p,,,,,100k,weareallsatoshi@getalby.com,2024-03-28
|
||||
SatsAllDay,code review,#980,GHSA-qg4g-m4xq-695p,medium,,,,25k,weareallsatoshi@getalby.com,2024-03-28
|
||||
Darth-Coin,issue,#1009,#1002,easy,,,,10k,darthcoin@stacker.news,2024-04-04
|
||||
atomantic,issue,#1009,#679,medium,high,,,50k,antic@stacker.news,2024-04-04
|
||||
aniskhalfallah,pr,#1001,#976,good-first-issue,,,,20k,aniskhalfallah@stacker.news,2024-04-04
|
||||
SatsAllDay,pr,#944,#1000,medium,,,,250k,weareallsatoshi@getalby.com,2024-04-04
|
||||
SatsAllDay,issue,#944,#1000,medium,,,,25k,weareallsatoshi@getalby.com,2024-04-04
|
||||
SatsAllDay,pr,#989,#984,medium,,,,250k,weareallsatoshi@getalby.com,2024-04-04
|
||||
SatsAllDay,issue,#989,#984,medium,,,,25k,weareallsatoshi@getalby.com,2024-04-04
|
||||
SouthKoreaLN,pr,#1015,#1010,good-first-issue,,,,20k,south_korea_ln@stacker.news,2024-04-04
|
||||
SatsAllDay,issue,#1015,#1010,good-first-issue,,,,20k,weareallsatoshi@getalby.com,2024-04-04
|
||||
jp30566347,pr,#991,#718,good-first-issue,,,,20k,jpmelanson@getalby.com,2024-04-04
|
||||
benalleng,helpfulness,#1015,#1010,good-first-issue,,,,2k,BenAllenG@stacker.news,2024-04-04
|
||||
felipebueno,pr,#1012,,,,,,20k,felipe@stacker.news,2024-04-04
|
||||
abhiShandy,helpfulness,#1018,#1006,good-first-issue,,,identified problem,2k,bolt11,2024-04-04
|
||||
benalleng,issue,#1018,#1006,good-first-issue,,,,2k,benalleng@mutiny.plus,2024-04-28
|
||||
benalleng,issue,#1011,#993,easy,high,,,20k,benalleng@mutiny.plus,2024-04-28
|
||||
benalleng,pr,#1011,#993,easy,high,,tortured them,200k,benalleng@mutiny.plus,2024-04-28
|
||||
abhiShandy,pr,#1031,#908,good-first-issue,,1,,18k,abhishandy@stacker.news,2024-04-12
|
||||
SatsAllDay,issue,#1031,#908,good-first-issue,,1,,1.8k,weareallsatoshi@getalby.com,2024-04-12
|
||||
SatsAllDay,pr,#1034,#934,medium,,,,250k,weareallsatoshi@getalby.com,2024-04-12
|
||||
SatsAllDay,issue,#1034,#934,medium,,,,25k,weareallsatoshi@getalby.com,2024-04-12
|
||||
benalleng,pr,#1037,#1036,easy,,1,,90k,benalleng@mutiny.plus,2024-04-28
|
||||
SatsAllDay,pr,#1038,#1033,easy,,,,100k,weareallsatoshi@getalby.com,2024-04-12
|
||||
SatsAllDay,issue,#1038,#1033,easy,,,,10k,weareallsatoshi@getalby.com,2024-04-12
|
||||
felipebueno,pr,#1043,,easy,,,,100k,felipe@stacker.news,2024-04-12
|
||||
benalleng,pr,#1050,,good-first-issue,,,,20k,benalleng@mutiny.plus,2024-04-28
|
||||
jp30566347,pr,#1055,#771,medium,,,extra mile,300k,jpmelanson@getalby.com,2024-04-12
|
||||
benalleng,helpfulness,#1063,202,medium,,,did much of the legwork in another pr,100k,benalleng@mutiny.plus,2024-04-28
|
||||
benalleng,code review,#1063,202,medium,,,,25k,benalleng@mutiny.plus,2024-04-28
|
||||
benalleng,pr,#1066,#1060,good-first-issue,,,,20k,benalleng@mutiny.plus,2024-04-28
|
||||
benalleng,pr,#1068,#1067,good-first-issue,,,,20k,benalleng@mutiny.plus,2024-04-28
|
||||
abhiShandy,helpfulness,#1068,#1067,good-first-issue,,,,2k,abhishandy@stacker.news,2024-04-14
|
||||
bumi,pr,#1076,,,,,,20k,bumi@getalby.com,2024-04-16
|
||||
benalleng,pr,#1079,#977,easy,,,,100k,benalleng@mutiny.plus,2024-04-28
|
||||
felipebueno,pr,#1024,,,,,,20k,felipe@stacker.news,2024-04-21
|
||||
SatsAllDay,pr,#1075,#1064,medium-hard,,1,,450k,weareallsatoshi@getalby.com,2024-04-21
|
||||
aChrisYouKnow,issue,#1075,#1064,medium-hard,,1,,45k,ACYK@stacker.news,2024-04-22
|
||||
SatsAllDay,pr,#1098,,,,,,20k,weareallsatoshi@getalby.com,2024-04-21
|
||||
SatsAllDay,pr,#1095,#728,medium,,,,250k,weareallsatoshi@getalby.com,2024-04-21
|
||||
benalleng,pr,#1090,#1077,good-first-issue,,,,20k,benalleng@mutiny.plus,2024-04-28
|
||||
benalleng,helpfulness,#1087,,,,,informed fix,20k,benalleng@mutiny.plus,2024-04-28
|
||||
benalleng,pr,#1099,#794,medium-hard,,,refined in a commit,450k,benalleng@mutiny.plus,2024-04-28
|
||||
dillon-co,helpfulness,#1099,#794,medium-hard,,,#988 did much of the legwork,225k,bolt11,2024-04-29
|
||||
abhiShandy,pr,#1119,#1110,good-first-issue,,,,20k,abhishandy@stacker.news,2024-04-28
|
||||
felipebueno,issue,#1119,#1110,good-first-issue,,,,2k,felipe@stacker.news,2024-04-28
|
||||
SatsAllDay,pr,#1111,#622,medium-hard,,,,500k,weareallsatoshi@getalby.com,2024-05-04
|
||||
itsrealfake,pr,#1130,#622,good-first-issue,,,,20k,itsrealfake2@stacker.news,2024-05-06
|
||||
Darth-Coin,issue,#1130,#622,easy,,,,2k,darthcoin@stacker.news,2024-05-04
|
||||
benalleng,pr,#1137,#1125,good-first-issue,,,,20k,benalleng@mutiny.plus,2024-05-04
|
||||
SatsAllDay,issue,#1137,#1125,good-first-issue,,,,2k,weareallsatoshi@getalby.com,2024-05-04
|
||||
SatsAllDay,helpfulness,#1137,#1125,good-first-issue,,,,2k,weareallsatoshi@getalby.com,2024-05-04
|
||||
itsrealfake,pr,#1138,#995,good-first-issue,,,,20k,itsrealfake2@stacker.news,2024-05-06
|
||||
SouthKoreaLN,issue,#1138,#995,good-first-issue,,,,2k,south_korea_ln@stacker.news,2024-05-04
|
||||
mateusdeap,helpfulness,#1138,#995,good-first-issue,,,,1k,mateusdeap@stacker.news,2024-05-17
|
||||
felipebueno,pr,#1094,,,,2,,80k,felipebueno@getalby.com,2024-05-06
|
||||
benalleng,helpfulness,#1127,#927,good-first-issue,,,,2k,benalleng@mutiny.plus,2024-05-04
|
||||
itsrealfake,pr,#1135,#1016,good-first-issue,,,nonideal solution,10k,itsrealfake2@stacker.news,2024-05-06
|
||||
SatsAllDay,issue,#1135,#1016,good-first-issue,,,,1k,weareallsatoshi@getalby.com,2024-05-04
|
||||
s373nZ,issue,#1136,#1107,medium,high,,,50k,se7enz@minibits.cash,2024-05-05
|
||||
abhiShandy,pr,#1123,#624,good-first-issue,,,,20k,abhishandy@stacker.news,2024-05-17
|
||||
hkarani,pr,#1147,#1143,good-first-issue,,,,20k,asterisk32@stacker.news,2024-05-17
|
||||
benalleng,helpfulness,#1147,#1143,good-first-issue,,,,2k,benalleng@mutiny.plus,2024-05-17
|
||||
abhiShandy,pr,#1157,#1148,good-first-issue,,,,20k,abhishandy@stacker.news,2024-05-17
|
||||
SatsAllDay,issue,#1157,#1148,good-first-issue,,,,2k,weareallsatoshi@getalby.com,2024-05-17
|
||||
abhiShandy,pr,#1158,#1139,good-first-issue,,,,20k,abhishandy@stacker.news,2024-05-17
|
||||
SatsAllDay,issue,#1158,#1139,good-first-issue,,,,2k,weareallsatoshi@getalby.com,2024-05-17
|
||||
SatsAllDay,pr,#1145,#717,medium,,,,250k,weareallsatoshi@getalby.com,2024-05-17
|
||||
benalleng,pr,#1129,#491,good-first-issue,,,paid for advice out of band,20k,benalleng@mutiny.plus,2024-05-17
|
||||
benalleng,pr,#1129,#1045,easy,,2,post-humously upgraded to easy,80k,benalleng@mutiny.plus,2024-05-17
|
||||
SouthKoreaLN,issue,#1129,#1045,easy,,,,8k,south_korea_ln@stacker.news,2024-05-17
|
||||
tsmith123,pr,#1171,#1124,good-first-issue,,,bonus for refactor,40k,stickymarch60@walletofsatoshi.com,2024-05-17
|
||||
SatsAllDay,issue,#1171,#1124,good-first-issue,,,,4k,weareallsatoshi@getalby.com,2024-05-17
|
||||
felipebueno,pr,#1162,,,,2,,200k,felipebueno@getalby.com,2024-05-17
|
||||
Radentor,issue,,#1177,easy,,,,10k,Radentor@stacker.news,2024-05-17
|
||||
tsmith123,pr,#1179,#790,good-first-issue,high,,,40k,stickymarch60@walletofsatoshi.com,2024-05-17
|
||||
SatsAllDay,pr,#1159,#510,medium-hard,,1,,450k,weareallsatoshi@getalby.com,2024-05-22
|
||||
Darth-Coin,issue,#1159,#510,medium-hard,,1,,45k,darthcoin@stacker.news,2024-05-22
|
||||
OneOneSeven117,issue,#1187,#1164,easy,,,,10k,OneOneSeven@stacker.news,2024-05-23
|
||||
tsmith123,pr,#1191,#134,medium,,,required small fix,225k,stickymarch60@walletofsatoshi.com,2024-05-28
|
||||
benalleng,helpfulness,#1191,#134,medium,,,did most of this before,100k,benalleng@mutiny.plus,2024-05-28
|
||||
cointastical,issue,#1191,#134,medium,,,,22k,cointastical@stacker.news,2024-05-28
|
||||
kravhen,pr,#1198,#1180,good-first-issue,,,required linting,18k,nichro@getalby.com,2024-05-28
|
||||
OneOneSeven117,issue,#1198,#1180,good-first-issue,,,required linting,2k,OneOneSeven@stacker.news,2024-05-28
|
||||
tsmith123,pr,#1207,#837,easy,high,1,,180k,stickymarch60@walletofsatoshi.com,2024-05-31
|
||||
SatsAllDay,pr,#1214,#1199,good-first-issue,,,,20k,weareallsatoshi@getalby.com,2024-06-03
|
||||
SatsAllDay,pr,#1197,#1192,medium,,,,250k,weareallsatoshi@getalby.com,2024-06-03
|
||||
tsmith123,pr,#1216,#1213,easy,,1,,90k,stickymarch60@walletofsatoshi.com,2024-06-03
|
||||
tsmith123,pr,#1231,#1230,good-first-issue,,,,20k,stickymarch60@walletofsatoshi.com,2024-06-13
|
||||
felipebueno,issue,#1231,#1230,good-first-issue,,,,2k,felipebueno@getalby.com,2024-06-13
|
||||
tsmith123,pr,#1223,#107,medium,,2,10k bonus for our slowness,210k,stickymarch60@walletofsatoshi.com,2024-06-22
|
||||
cointastical,issue,#1223,#107,medium,,2,,20k,cointastical@stacker.news,2024-06-22
|
||||
kravhen,pr,#1215,#253,medium,,2,upgraded to medium,200k,nichro@getalby.com,2024-06-28
|
||||
dillon-co,pr,#1140,#633,hard,,,requested advance,500k,bolt11,2024-07-02
|
||||
takitakitanana,issue,,#1257,good-first-issue,,,,2k,takitakitanana@stacker.news,2024-07-11
|
||||
SatsAllDay,pr,#1263,#1112,medium,,,1,225k,weareallsatoshi@getalby.com,2024-07-31
|
||||
OneOneSeven117,issue,#1272,#1268,easy,,,,10k,OneOneSeven@stacker.news,2024-07-31
|
||||
aniskhalfallah,pr,#1264,#1226,good-first-issue,,,,20k,aniskhalfallah@stacker.news,2024-07-31
|
||||
Gudnessuche,issue,#1264,#1226,good-first-issue,,,,2k,everythingsatoshi@getalby.com,2024-08-10
|
||||
aniskhalfallah,pr,#1289,,easy,,,,100k,aniskhalfallah@blink.sv,2024-08-12
|
||||
riccardobl,pr,#1293,#1142,medium,high,,,500k,rblb@getalby.com,2024-08-18
|
||||
tsmith123,pr,#1306,#832,medium,,,,250k,stickymarch60@walletofsatoshi.com,2024-08-20
|
||||
riccardobl,pr,#1311,#864,medium,high,,pending unrelated refactor,500k,rblb@getalby.com,2024-08-27
|
||||
brugeman,issue,#1311,#864,medium,high,,,50k,brugeman@stacker.news,2024-08-27
|
||||
riccardobl,pr,#1342,#1141,hard,high,,pending unrelated rearchitecture,1m,rblb@getalby.com,2024-09-09
|
||||
SatsAllDay,issue,#1368,#1331,medium,,,,25k,weareallsatoshi@getalby.com,2024-09-16
|
||||
benalleng,helpfulness,#1368,#1170,medium,,,did a lot of it in #1175,25k,BenAllenG@stacker.news,2024-09-16
|
||||
humble-GOAT,issue,#1412,#1407,good-first-issue,,,,2k,humble_GOAT@stacker.news,2024-09-18
|
||||
felipebueno,issue,#1425,#986,medium,,,,25k,felipebueno@getalby.com,2024-09-26
|
||||
riccardobl,pr,#1373,#1304,hard,high,,,2m,bolt11,2024-10-01
|
||||
tsmith123,pr,#1428,#1397,easy,,1,superceded,90k,stickymarch60@walletofsatoshi.com,2024-10-02
|
||||
toyota-corolla0,pr,#1449,,good-first-issue,,,,20k,toyota_corolla0@stacker.news,2024-10-02
|
||||
toyota-corolla0,pr,#1455,#1437,good-first-issue,,,,20k,toyota_corolla0@stacker.news,2024-10-02
|
||||
SouthKoreaLN,issue,#1436,,easy,,,,10k,south_korea_ln@stacker.news,2024-10-02
|
||||
TonyGiorgio,issue,#1462,,easy,urgent,,,30k,TonyGiorgio@stacker.news,2024-10-07
|
||||
hkarani,issue,#1369,#1458,good-first-issue,,,,2k,asterisk32@stacker.news,2024-10-21
|
||||
toyota-corolla0,pr,#1369,#1458,good-first-issue,,,,20k,toyota_corolla0@stacker.news,2024-10-20
|
||||
Soxasora,pr,#1593,#1569,good-first-issue,,,,20k,soxasora@blink.sv,2024-11-19
|
||||
Soxasora,pr,#1599,#1258,medium,,,,250k,soxasora@blink.sv,2024-11-19
|
||||
aegroto,pr,#1585,#1522,easy,high,,1,180k,aegroto@blink.sv,2024-11-19
|
||||
sig47,issue,#1585,#1522,easy,high,,1,18k,siggy47@stacker.news,2024-11-19
|
||||
aegroto,pr,#1583,#1572,easy,,,2,80k,aegroto@blink.sv,2024-11-19
|
||||
Soxasora,pr,#1617,#1616,easy,,,,100k,soxasora@blink.sv,2024-11-20
|
||||
Soxasora,issue,#1617,#1616,easy,,,,10k,soxasora@blink.sv,2024-11-20
|
||||
AndreaDiazCorreia,helpfulness,#1605,#1566,good-first-issue,,,tried in pr,2k,andrea@lawallet.ar,2024-11-20
|
||||
Soxasora,pr,#1653,,medium,,,determined unecessary,250k,soxasora@blink.sv,2024-12-07
|
||||
Soxasora,pr,#1659,#1657,easy,,,,100k,soxasora@blink.sv,2024-12-07
|
||||
sig47,issue,#1659,#1657,easy,,,,10k,siggy47@stacker.news,2024-12-07
|
||||
Gudnessuche,issue,#1662,#1661,good-first-issue,,,,2k,everythingsatoshi@getalby.com,2024-12-07
|
||||
aegroto,pr,#1589,#1586,easy,,,,100k,aegroto@blink.sv,2024-12-07
|
||||
aegroto,issue,#1589,#1586,easy,,,,10k,aegroto@blink.sv,2024-12-07
|
||||
aegroto,pr,#1619,#914,easy,,,,100k,aegroto@blink.sv,2024-12-07
|
||||
felipebueno,pr,#1620,,medium,,,1,225k,felipebueno@getalby.com,2024-12-09
|
||||
Soxasora,pr,#1647,#1645,easy,,,,100k,soxasora@blink.sv,2024-12-07
|
||||
Soxasora,pr,#1667,#1568,easy,,,,100k,soxasora@blink.sv,2024-12-07
|
||||
aegroto,pr,#1633,#1471,easy,,,1,90k,aegroto@blink.sv,2024-12-07
|
||||
Darth-Coin,issue,#1649,#1421,medium,,,,25k,darthcoin@stacker.news,2024-12-07
|
||||
Soxasora,pr,#1685,,medium,,,,250k,soxasora@blink.sv,2024-12-07
|
||||
aegroto,pr,#1606,#1242,medium,,,,250k,aegroto@blink.sv,2024-12-07
|
||||
sfr0xyz,issue,#1696,#1196,good-first-issue,,,,2k,sefiro@getalby.com,2024-12-10
|
||||
Soxasora,pr,#1794,#756,hard,urgent,,includes #411,3m,bolt11,2025-01-09
|
||||
Soxasora,pr,#1786,#363,easy,,,,100k,bolt11,2025-01-09
|
||||
Soxasora,pr,#1768,#1186,medium-hard,,,,500k,bolt11,2025-01-09
|
||||
Soxasora,pr,#1750,#1035,medium,,,,250k,bolt11,2025-01-09
|
||||
SatsAllDay,issue,#1794,#411,hard,high,,,200k,weareallsatoshi@getalby.com,2025-01-20
|
||||
felipebueno,issue,#1786,#363,easy,,,,10k,felipebueno@blink.sv,2025-01-27
|
||||
cyphercosmo,pr,#1745,#1648,good-first-issue,,,2,16k,cyphercosmo@getalby.com,2025-01-27
|
||||
Radentor,issue,#1768,#1186,medium-hard,,,,50k,revisedbird84@walletofsatoshi.com,2025-01-27
|
||||
Soxasora,pr,#1841,#1692,good-first-issue,,,,20k,soxasora@blink.sv,2025-01-27
|
||||
Soxasora,pr,#1839,#1790,easy,,,1,90k,soxasora@blink.sv,2025-01-27
|
||||
Soxasora,pr,#1820,#1819,easy,,,1,90k,soxasora@blink.sv,2025-01-27
|
||||
SatsAllDay,issue,#1820,#1819,easy,,,1,9k,weareallsatoshi@getalby.com,2025-01-27
|
||||
Soxasora,pr,#1814,#1736,easy,,,,100k,soxasora@blink.sv,2025-01-27
|
||||
jason-me,pr,#1857,,easy,,,,100k,rrbtc@vlt.ge,2025-02-08
|
||||
ed-kung,pr,#1901,#323,good-first-issue,,,,20k,simplestacker@getalby.com,2025-02-14
|
||||
Scroogey-SN,pr,#1911,#1905,good-first-issue,,,1,18k,Scroogey@coinos.io,2025-03-10
|
||||
Scroogey-SN,pr,#1928,#1924,good-first-issue,,,,20k,Scroogey@coinos.io,2025-03-10
|
||||
dtonon,issue,#1928,#1924,good-first-issue,,,,2k,???,???
|
||||
ed-kung,pr,#1926,#1914,medium-hard,,,,500k,simplestacker@getalby.com,2025-03-10
|
||||
ed-kung,issue,#1926,#1914,medium-hard,,,,50k,simplestacker@getalby.com,2025-03-10
|
||||
ed-kung,pr,#1926,#1927,easy,,,,100k,simplestacker@getalby.com,2025-03-10
|
||||
ed-kung,issue,#1926,#1927,easy,,,,10k,simplestacker@getalby.com,2025-03-10
|
||||
ed-kung,issue,#1913,#1890,good-first-issue,,,,2k,simplestacker@getalby.com,2025-03-10
|
||||
Scroogey-SN,pr,#1930,#1167,good-first-issue,,,,20k,Scroogey@coinos.io,2025-03-10
|
||||
itsrealfake,issue,#1930,#1167,good-first-issue,,,,2k,smallimagination100035@getalby.com,???
|
||||
Scroogey-SN,pr,#1948,#1849,medium,urgent,,,750k,Scroogey@coinos.io,2025-03-10
|
||||
felipebueno,issue,#1947,#1945,good-first-issue,,,,2k,felipebueno@blink.sv,2025-03-10
|
||||
ed-kung,pr,#1952,#1951,easy,,,,100k,simplestacker@getalby.com,2025-03-10
|
||||
ed-kung,issue,#1952,#1951,easy,,,,10k,simplestacker@getalby.com,2025-03-10
|
||||
Scroogey-SN,pr,#1973,#1959,good-first-issue,,,,20k,Scroogey@coinos.io,???
|
||||
benthecarman,issue,#1953,#1950,good-first-issue,,,,2k,???,???
|
||||
ed-kung,pr,#2012,#2004,easy,,,,100k,simplestacker@getalby.com,???
|
||||
ed-kung,issue,#2012,#2004,easy,,,,10k,simplestacker@getalby.com,???
|
||||
ed-kung,pr,#1993,#1982,good-first-issue,,,,20k,simplestacker@getalby.com,???
|
||||
rideandslide,issue,#1993,#1982,good-first-issue,,,,2k,???,???
|
||||
ed-kung,pr,#1972,#1254,good-first-issue,,,,20k,simplestacker@getalby.com,???
|
||||
SatsAllDay,issue,#1972,#1254,good-first-issue,,,,2k,weareallsatoshi@getalby.com,???
|
||||
ed-kung,pr,#1962,#1343,good-first-issue,,,,20k,simplestacker@getalby.com,???
|
||||
ed-kung,pr,#1962,#1217,good-first-issue,,,,20k,simplestacker@getalby.com,???
|
||||
ed-kung,pr,#1962,#866,easy,,,,100k,simplestacker@getalby.com,???
|
||||
felipebueno,issue,#1962,#866,easy,,,,10k,felipebueno@blink.sv,???
|
||||
cointastical,issue,#1962,#1217,good-first-issue,,,,2k,cointastical@stacker.news,???
|
||||
Scroogey-SN,pr,#1975,#1964,good-first-issue,,,,20k,Scroogey@coinos.io,???
|
||||
rideandslide,issue,#1986,#1985,good-first-issue,,,,2k,???,???
|
||||
kristapsk,issue,#1976,#841,good-first-issue,,,,2k,???,???
|
|
@ -1 +0,0 @@
|
||||
node_modules
|
@ -1,17 +0,0 @@
|
||||
FROM ghcr.io/puppeteer/puppeteer:21.10.0
|
||||
|
||||
EXPOSE 5678
|
||||
|
||||
USER root
|
||||
WORKDIR /home/pptruser
|
||||
|
||||
ENV PUPPETEER_SKIP_DOWNLOAD true
|
||||
COPY ./package*.json ./
|
||||
RUN npm ci
|
||||
|
||||
COPY . .
|
||||
|
||||
ADD https://deb.debian.org/debian/pool/main/f/fonts-noto-color-emoji/fonts-noto-color-emoji_0~20200916-1_all.deb fonts-noto-color-emoji.deb
|
||||
RUN dpkg -i fonts-noto-color-emoji.deb
|
||||
CMD [ "node", "index.js" ]
|
||||
USER pptruser
|
116
capture/index.js
116
capture/index.js
@ -1,116 +0,0 @@
|
||||
import express from 'express'
|
||||
import puppeteer from 'puppeteer'
|
||||
|
||||
const captureUrl = process.env.CAPTURE_URL || 'http://host.docker.internal:3000/'
|
||||
const port = process.env.PORT || 5678
|
||||
const maxPages = Number(process.env.MAX_PAGES) || 5
|
||||
const timeout = Number(process.env.TIMEOUT) || 10000
|
||||
const cache = process.env.CACHE || 60000
|
||||
const width = process.env.WIDTH || 600
|
||||
const height = process.env.HEIGHT || 315
|
||||
const deviceScaleFactor = process.env.SCALE_FACTOR || 2
|
||||
// from https://www.bannerbear.com/blog/ways-to-speed-up-puppeteer-screenshots/
|
||||
const args = [
|
||||
'--autoplay-policy=user-gesture-required',
|
||||
'--disable-background-networking',
|
||||
'--disable-background-timer-throttling',
|
||||
'--disable-backgrounding-occluded-windows',
|
||||
'--disable-breakpad',
|
||||
'--disable-client-side-phishing-detection',
|
||||
'--disable-component-update',
|
||||
'--disable-default-apps',
|
||||
'--disable-dev-shm-usage',
|
||||
'--disable-domain-reliability',
|
||||
'--disable-extensions',
|
||||
'--disable-features=AudioServiceOutOfProcess',
|
||||
'--disable-hang-monitor',
|
||||
'--disable-ipc-flooding-protection',
|
||||
'--disable-notifications',
|
||||
'--disable-offer-store-unmasked-wallet-cards',
|
||||
'--disable-popup-blocking',
|
||||
'--disable-print-preview',
|
||||
'--disable-prompt-on-repost',
|
||||
'--disable-renderer-backgrounding',
|
||||
'--disable-setuid-sandbox',
|
||||
'--disable-speech-api',
|
||||
'--disable-sync',
|
||||
'--hide-scrollbars',
|
||||
'--ignore-gpu-blacklist',
|
||||
'--metrics-recording-only',
|
||||
'--mute-audio',
|
||||
'--no-default-browser-check',
|
||||
'--no-first-run',
|
||||
'--no-pings',
|
||||
'--no-sandbox',
|
||||
'--no-zygote',
|
||||
'--password-store=basic',
|
||||
'--use-gl=swiftshader',
|
||||
'--use-mock-keychain'
|
||||
]
|
||||
|
||||
let browser
|
||||
const app = express()
|
||||
|
||||
app.get('/health', (req, res) => {
|
||||
res.status(200).end()
|
||||
})
|
||||
|
||||
app.get('/*', async (req, res) => {
|
||||
const url = new URL(req.originalUrl, captureUrl)
|
||||
const timeLabel = `${Date.now()}-${url.href}`
|
||||
|
||||
const urlParams = new URLSearchParams(url.search)
|
||||
const commentId = urlParams.get('commentId')
|
||||
|
||||
let page, pages
|
||||
|
||||
try {
|
||||
console.time(timeLabel)
|
||||
browser ||= await puppeteer.launch({
|
||||
headless: 'new',
|
||||
useDataDir: './data',
|
||||
executablePath: 'google-chrome-stable',
|
||||
args,
|
||||
protocolTimeout: timeout,
|
||||
defaultViewport: { width, height, deviceScaleFactor }
|
||||
})
|
||||
|
||||
pages = (await browser.pages()).length
|
||||
console.timeLog(timeLabel, 'capturing', 'current pages', pages)
|
||||
|
||||
// limit number of active pages
|
||||
if (pages > maxPages + 1) {
|
||||
console.timeLog(timeLabel, 'too many pages')
|
||||
return res.writeHead(503, {
|
||||
'Retry-After': 1
|
||||
}).end()
|
||||
}
|
||||
|
||||
page = await browser.newPage()
|
||||
await page.emulateMediaFeatures([{ name: 'prefers-color-scheme', value: 'dark' }])
|
||||
await page.goto(url.href, { waitUntil: 'load', timeout })
|
||||
console.timeLog(timeLabel, 'page loaded')
|
||||
|
||||
if (commentId) {
|
||||
console.timeLog(timeLabel, 'scrolling to comment')
|
||||
await page.waitForSelector('.outline-it')
|
||||
await new Promise((resolve, _reject) => setTimeout(resolve, 100))
|
||||
}
|
||||
|
||||
const file = await page.screenshot({ type: 'png', captureBeyondViewport: false })
|
||||
console.timeLog(timeLabel, 'screenshot complete')
|
||||
res.setHeader('Content-Type', 'image/png')
|
||||
res.setHeader('Cache-Control', `public, max-age=${cache}, immutable, stale-while-revalidate=${cache * 24}, stale-if-error=${cache * 24}`)
|
||||
return res.status(200).end(file)
|
||||
} catch (err) {
|
||||
console.timeLog(timeLabel, 'error', err)
|
||||
return res.status(500).end()
|
||||
} finally {
|
||||
console.timeEnd(timeLabel, 'pages at start', pages)
|
||||
page?.close().catch(console.error)
|
||||
}
|
||||
})
|
||||
|
||||
app.listen(port, () =>
|
||||
console.log(`Screenshot listen on http://:${port}`)
|
||||
)
|
2637
capture/package-lock.json
generated
2637
capture/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@ -1,16 +0,0 @@
|
||||
{
|
||||
"name": "capture",
|
||||
"version": "1.0.0",
|
||||
"description": "",
|
||||
"main": "index.js",
|
||||
"scripts": {
|
||||
"test": "echo \"Error: no test specified\" && exit 1"
|
||||
},
|
||||
"author": "",
|
||||
"license": "ISC",
|
||||
"dependencies": {
|
||||
"express": "^4.18.2",
|
||||
"puppeteer": "^20.8.2"
|
||||
},
|
||||
"type": "module"
|
||||
}
|
@ -1,5 +0,0 @@
|
||||
# use vectorim/element-web as base but copy config.json to /app/config.json
|
||||
|
||||
FROM vectorim/element-web:latest
|
||||
|
||||
COPY config.json /app/config.json
|
@ -1,41 +0,0 @@
|
||||
{
|
||||
"default_server_name": "https://sndev.team",
|
||||
"default_server_config": {
|
||||
"m.homeserver": {
|
||||
"base_url": "https://sndev.team"
|
||||
},
|
||||
"m.identity_server": {
|
||||
"base_url": "https://sndev.team"
|
||||
}
|
||||
},
|
||||
"brand": "chat.sndev.team",
|
||||
"permalink_prefix": "https://chat.sndev.team",
|
||||
"show_labs_settings": false,
|
||||
"mobile_guide_toast": false,
|
||||
"default_country_code": "US",
|
||||
"disable_3pid_login": true,
|
||||
"disable_custom_urls": true,
|
||||
"disable_guests": true,
|
||||
"disable_login_language_selector": true,
|
||||
"room_directory": {
|
||||
"servers": ["sndev.team"]
|
||||
},
|
||||
"enable_presence_by_hs_url": {
|
||||
"https://matrix.org": false,
|
||||
"https://matrix-client.matrix.org": false
|
||||
},
|
||||
"terms_and_conditions_links": [
|
||||
{
|
||||
"url": "https://element.io/privacy",
|
||||
"text": "Privacy Policy"
|
||||
},
|
||||
{
|
||||
"url": "https://element.io/cookie-policy",
|
||||
"text": "Cookie Policy"
|
||||
}
|
||||
],
|
||||
"privacy_policy_url": "https://element.io/cookie-policy",
|
||||
"setting_defaults": {
|
||||
"RustCrypto.staged_rollout_percent": 10
|
||||
}
|
||||
}
|
@ -1,24 +1,14 @@
|
||||
import Accordion from 'react-bootstrap/Accordion'
|
||||
import AccordionContext from 'react-bootstrap/AccordionContext'
|
||||
import { useAccordionButton } from 'react-bootstrap/AccordionButton'
|
||||
import ArrowRight from '@/svgs/arrow-right-s-fill.svg'
|
||||
import ArrowDown from '@/svgs/arrow-down-s-fill.svg'
|
||||
import { useContext, useEffect, useState } from 'react'
|
||||
import classNames from 'classnames'
|
||||
import ArrowRight from '../svgs/arrow-right-s-fill.svg'
|
||||
import ArrowDown from '../svgs/arrow-down-s-fill.svg'
|
||||
import { useContext } from 'react'
|
||||
|
||||
const KEY_ID = '0'
|
||||
|
||||
function ContextAwareToggle ({ children, headerColor = 'var(--theme-grey)', eventKey, show }) {
|
||||
function ContextAwareToggle ({ children, headerColor = 'var(--theme-grey)', eventKey }) {
|
||||
const { activeEventKey } = useContext(AccordionContext)
|
||||
const decoratedOnClick = useAccordionButton(eventKey)
|
||||
|
||||
useEffect(() => {
|
||||
// if we want to show the accordian and it's not open, open it
|
||||
if (show && activeEventKey !== eventKey) {
|
||||
decoratedOnClick()
|
||||
}
|
||||
}, [show])
|
||||
|
||||
const isCurrentEventKey = activeEventKey === eventKey
|
||||
|
||||
return (
|
||||
@ -31,21 +21,11 @@ function ContextAwareToggle ({ children, headerColor = 'var(--theme-grey)', even
|
||||
)
|
||||
}
|
||||
|
||||
export default function AccordianItem ({ header, body, className, headerColor = 'var(--theme-grey)', show }) {
|
||||
const [activeKey, setActiveKey] = useState()
|
||||
|
||||
useEffect(() => {
|
||||
setActiveKey(show ? KEY_ID : null)
|
||||
}, [show])
|
||||
|
||||
const handleOnSelect = () => {
|
||||
setActiveKey(activeKey === KEY_ID ? null : KEY_ID)
|
||||
}
|
||||
|
||||
export default function AccordianItem ({ header, body, headerColor = 'var(--theme-grey)', show }) {
|
||||
return (
|
||||
<Accordion defaultActiveKey={activeKey} activeKey={activeKey} onSelect={handleOnSelect}>
|
||||
<ContextAwareToggle show={show} eventKey={KEY_ID} headerColor={headerColor}><div style={{ color: headerColor }}>{header}</div></ContextAwareToggle>
|
||||
<Accordion.Collapse eventKey={KEY_ID} className={classNames('mt-2', className)}>
|
||||
<Accordion defaultActiveKey={show ? '0' : undefined}>
|
||||
<ContextAwareToggle eventKey='0'><div style={{ color: headerColor }}>{header}</div></ContextAwareToggle>
|
||||
<Accordion.Collapse eventKey='0' className='mt-2'>
|
||||
<div>{body}</div>
|
||||
</Accordion.Collapse>
|
||||
</Accordion>
|
||||
|
@ -1,176 +1,141 @@
|
||||
import { createContext, useCallback, useContext, useEffect, useMemo, useState } from 'react'
|
||||
import { createContext, useCallback, useContext, useEffect, useState } from 'react'
|
||||
import { useRouter } from 'next/router'
|
||||
import * as cookie from 'cookie'
|
||||
import { useMe } from '@/components/me'
|
||||
import { USER_ID, SSR } from '@/lib/constants'
|
||||
import { USER } from '@/fragments/users'
|
||||
import { useQuery } from '@apollo/client'
|
||||
import { UserListRow } from '@/components/user-list'
|
||||
import Link from 'next/link'
|
||||
import AddIcon from '@/svgs/add-fill.svg'
|
||||
import { MultiAuthErrorBanner } from '@/components/banners'
|
||||
import { cookieOptions, MULTI_AUTH_ANON, MULTI_AUTH_LIST, MULTI_AUTH_POINTER } from '@/lib/auth'
|
||||
import cookie from 'cookie'
|
||||
import { useMe } from './me'
|
||||
import { ANON_USER_ID, SSR } from '../lib/constants'
|
||||
import { USER } from '../fragments/users'
|
||||
import { useApolloClient, useQuery } from '@apollo/client'
|
||||
import { UserListRow } from './user-list'
|
||||
|
||||
const AccountContext = createContext()
|
||||
|
||||
const CHECK_ERRORS_INTERVAL_MS = 5_000
|
||||
|
||||
const b64Decode = str => Buffer.from(str, 'base64').toString('utf-8')
|
||||
const b64Encode = obj => Buffer.from(JSON.stringify(obj)).toString('base64')
|
||||
|
||||
const secureCookie = cookie => {
|
||||
return window.location.protocol === 'https:' ? cookie + '; Secure' : cookie
|
||||
}
|
||||
|
||||
export const AccountProvider = ({ children }) => {
|
||||
const { me } = useMe()
|
||||
const [accounts, setAccounts] = useState([])
|
||||
const [meAnon, setMeAnon] = useState(true)
|
||||
const [errors, setErrors] = useState([])
|
||||
const [isAnon, setIsAnon] = useState(true)
|
||||
|
||||
const updateAccountsFromCookie = useCallback(() => {
|
||||
const { [MULTI_AUTH_LIST]: listCookie } = cookie.parse(document.cookie)
|
||||
const accounts = listCookie
|
||||
? JSON.parse(b64Decode(listCookie))
|
||||
: []
|
||||
setAccounts(accounts)
|
||||
}, [])
|
||||
|
||||
const nextAccount = useCallback(async () => {
|
||||
const { status } = await fetch('/api/next-account', { credentials: 'include' })
|
||||
// if status is 302, this means the server was able to switch us to the next available account
|
||||
// and the current account was simply removed from the list of available accounts including the corresponding JWT.
|
||||
const switchSuccess = status === 302
|
||||
if (switchSuccess) updateAccountsFromCookie()
|
||||
return switchSuccess
|
||||
}, [updateAccountsFromCookie])
|
||||
|
||||
const checkErrors = useCallback(() => {
|
||||
const {
|
||||
[MULTI_AUTH_LIST]: listCookie,
|
||||
[MULTI_AUTH_POINTER]: pointerCookie
|
||||
} = cookie.parse(document.cookie)
|
||||
|
||||
const errors = []
|
||||
|
||||
if (!listCookie) errors.push(`${MULTI_AUTH_LIST} cookie not found`)
|
||||
if (!pointerCookie) errors.push(`${MULTI_AUTH_POINTER} cookie not found`)
|
||||
|
||||
setErrors(errors)
|
||||
}, [])
|
||||
try {
|
||||
const { multi_auth: multiAuthCookie } = cookie.parse(document.cookie)
|
||||
const accounts = multiAuthCookie
|
||||
? JSON.parse(b64Decode(multiAuthCookie))
|
||||
: me ? [{ id: Number(me.id), name: me.name, photoId: me.photoId }] : []
|
||||
setAccounts(accounts)
|
||||
// required for backwards compatibility: sync cookie with accounts if no multi auth cookie exists
|
||||
// this is the case for sessions that existed before we deployed account switching
|
||||
if (!multiAuthCookie && !!me) {
|
||||
document.cookie = secureCookie(`multi_auth=${b64Encode(accounts)}; Path=/`)
|
||||
}
|
||||
} catch (err) {
|
||||
console.error('error parsing cookies:', err)
|
||||
}
|
||||
}, [setAccounts])
|
||||
|
||||
useEffect(() => {
|
||||
if (SSR) return
|
||||
|
||||
updateAccountsFromCookie()
|
||||
}, [])
|
||||
|
||||
const { [MULTI_AUTH_POINTER]: pointerCookie } = cookie.parse(document.cookie)
|
||||
setMeAnon(pointerCookie === 'anonymous')
|
||||
const addAccount = useCallback(user => {
|
||||
setAccounts(accounts => [...accounts, user])
|
||||
}, [setAccounts])
|
||||
|
||||
const interval = setInterval(checkErrors, CHECK_ERRORS_INTERVAL_MS)
|
||||
return () => clearInterval(interval)
|
||||
}, [updateAccountsFromCookie, checkErrors])
|
||||
const removeAccount = useCallback(userId => {
|
||||
setAccounts(accounts => accounts.filter(({ id }) => id !== userId))
|
||||
}, [setAccounts])
|
||||
|
||||
const value = useMemo(
|
||||
() => ({
|
||||
accounts,
|
||||
meAnon,
|
||||
setMeAnon,
|
||||
nextAccount,
|
||||
multiAuthErrors: errors
|
||||
}),
|
||||
[accounts, meAnon, setMeAnon, nextAccount])
|
||||
return <AccountContext.Provider value={value}>{children}</AccountContext.Provider>
|
||||
const multiAuthSignout = useCallback(async () => {
|
||||
// switch to next available account
|
||||
const { status } = await fetch('/api/signout', { credentials: 'include' })
|
||||
// if status is 201, this mean the server was able to switch us to the next available account
|
||||
// and the current account was simply removed from the list of available accounts including the corresponding JWT.
|
||||
// -> update needed to sync state with cookies
|
||||
if (status === 201) updateAccountsFromCookie()
|
||||
return status
|
||||
}, [updateAccountsFromCookie])
|
||||
|
||||
useEffect(() => {
|
||||
// document not defined on server
|
||||
if (SSR) return
|
||||
const { 'multi_auth.user-id': multiAuthUserIdCookie } = cookie.parse(document.cookie)
|
||||
setIsAnon(multiAuthUserIdCookie === 'anonymous')
|
||||
}, [])
|
||||
|
||||
return <AccountContext.Provider value={{ accounts, addAccount, removeAccount, isAnon, setIsAnon, multiAuthSignout }}>{children}</AccountContext.Provider>
|
||||
}
|
||||
|
||||
export const useAccounts = () => useContext(AccountContext)
|
||||
|
||||
const AccountListRow = ({ account, ...props }) => {
|
||||
const { meAnon, setMeAnon } = useAccounts()
|
||||
const { isAnon, setIsAnon } = useAccounts()
|
||||
const { me, refreshMe } = useMe()
|
||||
const anonRow = account.id === USER_ID.anon
|
||||
const selected = (meAnon && anonRow) || Number(me?.id) === Number(account.id)
|
||||
const router = useRouter()
|
||||
const anonRow = account.id === ANON_USER_ID
|
||||
const selected = (isAnon && anonRow) || Number(me?.id) === Number(account.id)
|
||||
const client = useApolloClient()
|
||||
|
||||
// fetch updated names and photo ids since they might have changed since we were issued the JWTs
|
||||
const { data, error } = useQuery(USER,
|
||||
const [name, setName] = useState(account.name)
|
||||
const [photoId, setPhotoId] = useState(account.photoId)
|
||||
useQuery(USER,
|
||||
{
|
||||
variables: { id: account.id }
|
||||
variables: { id: account.id },
|
||||
onCompleted ({ user: { name, photoId } }) {
|
||||
if (photoId) setPhotoId(photoId)
|
||||
if (name) setName(name)
|
||||
}
|
||||
}
|
||||
)
|
||||
if (error) console.error(`query for user ${account.id} failed:`, error)
|
||||
|
||||
const name = data?.user?.name || account.name
|
||||
const photoId = data?.user?.photoId || account.photoId
|
||||
|
||||
const onClick = async (e) => {
|
||||
// prevent navigation
|
||||
e.preventDefault()
|
||||
|
||||
// update pointer cookie
|
||||
const options = cookieOptions({ httpOnly: false })
|
||||
document.cookie = cookie.serialize(MULTI_AUTH_POINTER, anonRow ? MULTI_AUTH_ANON : account.id, options)
|
||||
|
||||
// update state
|
||||
document.cookie = secureCookie(`multi_auth.user-id=${anonRow ? 'anonymous' : account.id}; Path=/`)
|
||||
if (anonRow) {
|
||||
// order is important to prevent flashes of no session
|
||||
setMeAnon(true)
|
||||
setIsAnon(true)
|
||||
await refreshMe()
|
||||
} else {
|
||||
await refreshMe()
|
||||
// order is important to prevent flashes of inconsistent data in switch account dialog
|
||||
setMeAnon(account.id === USER_ID.anon)
|
||||
setIsAnon(account.id === ANON_USER_ID)
|
||||
}
|
||||
|
||||
// reload whatever page we're on to avoid any bugs due to missing authorization etc.
|
||||
router.reload()
|
||||
await client.refetchQueries({ include: 'active' })
|
||||
}
|
||||
|
||||
return (
|
||||
<div className='d-flex flex-row'>
|
||||
<UserListRow
|
||||
user={{ ...account, photoId, name }}
|
||||
className='d-flex align-items-center me-2'
|
||||
{...props}
|
||||
onNymClick={onClick}
|
||||
selected={selected}
|
||||
/>
|
||||
<UserListRow user={{ ...account, photoId, name }} className='d-flex align-items-center me-2' {...props} onNymClick={onClick} />
|
||||
{selected && <div className='text-muted fst-italic text-muted'>selected</div>}
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
export default function SwitchAccountList () {
|
||||
const { accounts, multiAuthErrors } = useAccounts()
|
||||
const { accounts } = useAccounts()
|
||||
const router = useRouter()
|
||||
|
||||
const hasError = multiAuthErrors.length > 0
|
||||
|
||||
if (hasError) {
|
||||
return (
|
||||
<>
|
||||
<div className='my-2'>
|
||||
<div className='d-flex flex-column flex-wrap mt-2 mb-3'>
|
||||
<MultiAuthErrorBanner errors={multiAuthErrors} />
|
||||
</div>
|
||||
</div>
|
||||
</>
|
||||
)
|
||||
const addAccount = () => {
|
||||
router.push({
|
||||
pathname: '/login',
|
||||
query: { callbackUrl: window.location.origin + router.asPath, multiAuth: true }
|
||||
})
|
||||
}
|
||||
|
||||
// can't show hat since the streak is not included in the JWT payload
|
||||
return (
|
||||
<>
|
||||
<div className='my-2'>
|
||||
<div className='d-flex flex-column flex-wrap mt-2 mb-3'>
|
||||
<h4 className='text-muted'>Accounts</h4>
|
||||
<AccountListRow account={{ id: USER_ID.anon, name: 'anon' }} showHat={false} />
|
||||
<div className='d-flex flex-column flex-wrap'>
|
||||
<AccountListRow account={{ id: ANON_USER_ID, name: 'anon' }} showHat={false} />
|
||||
{
|
||||
accounts.map((account) => <AccountListRow key={account.id} account={account} showHat={false} />)
|
||||
}
|
||||
<div style={{ cursor: 'pointer' }} onClick={addAccount}>+ add account</div>
|
||||
</div>
|
||||
<Link
|
||||
href={{
|
||||
pathname: '/login',
|
||||
query: { callbackUrl: window.location.origin + router.asPath, multiAuth: true }
|
||||
}}
|
||||
className='text-reset fw-bold'
|
||||
>
|
||||
<AddIcon height={20} width={20} /> another account
|
||||
</Link>
|
||||
</div>
|
||||
</>
|
||||
)
|
||||
|
@ -1,6 +1,6 @@
|
||||
import Dropdown from 'react-bootstrap/Dropdown'
|
||||
import styles from './item.module.css'
|
||||
import MoreIcon from '@/svgs/more-fill.svg'
|
||||
import MoreIcon from '../svgs/more-fill.svg'
|
||||
|
||||
export default function ActionDropdown ({ children }) {
|
||||
if (!children) {
|
||||
|
@ -8,26 +8,19 @@ export default function ActionTooltip ({ children, notForm, disable, overlayText
|
||||
if (!notForm) {
|
||||
formik = useFormikContext()
|
||||
}
|
||||
if (disable || !overlayText) {
|
||||
if (disable) {
|
||||
return children
|
||||
}
|
||||
return (
|
||||
<OverlayTrigger
|
||||
placement={placement || 'bottom'}
|
||||
overlay={
|
||||
<Tooltip style={{ position: 'fixed' }}>
|
||||
{overlayText}
|
||||
<Tooltip>
|
||||
{overlayText || '1 sat'}
|
||||
</Tooltip>
|
||||
}
|
||||
trigger={['hover', 'focus']}
|
||||
show={formik?.isSubmitting ? false : undefined}
|
||||
popperConfig={{
|
||||
modifiers: {
|
||||
preventOverflow: {
|
||||
enabled: false
|
||||
}
|
||||
}
|
||||
}}
|
||||
>
|
||||
<span>
|
||||
{children}
|
||||
|
@ -1,20 +1,14 @@
|
||||
import { useState, useEffect, useMemo, useCallback } from 'react'
|
||||
import AccordianItem from './accordian-item'
|
||||
import { Input, InputUserSuggest, VariableInput, Checkbox } from './form'
|
||||
import InputGroup from 'react-bootstrap/InputGroup'
|
||||
import { BOOST_MIN, BOOST_MULT, MAX_FORWARDS, SSR } from '@/lib/constants'
|
||||
import { DEFAULT_CROSSPOSTING_RELAYS } from '@/lib/nostr'
|
||||
import { BOOST_MIN, BOOST_MULT, MAX_FORWARDS } from '../lib/constants'
|
||||
import { DEFAULT_CROSSPOSTING_RELAYS } from '../lib/nostr'
|
||||
import Info from './info'
|
||||
import { abbrNum, numWithUnits } from '@/lib/format'
|
||||
import { numWithUnits } from '../lib/format'
|
||||
import styles from './adv-post-form.module.css'
|
||||
import { useMe } from './me'
|
||||
import { useFeeButton } from './fee-button'
|
||||
import { useRouter } from 'next/router'
|
||||
import { useFormikContext } from 'formik'
|
||||
import { gql, useQuery } from '@apollo/client'
|
||||
import useDebounceCallback from './use-debounce-callback'
|
||||
import { Button } from 'react-bootstrap'
|
||||
import classNames from 'classnames'
|
||||
import { useFeeButton } from './fee-button'
|
||||
|
||||
const EMPTY_FORWARD = { nym: '', pct: '' }
|
||||
|
||||
@ -25,241 +19,50 @@ export function AdvPostInitial ({ forward, boost }) {
|
||||
}
|
||||
}
|
||||
|
||||
const FormStatus = {
|
||||
DIRTY: 'dirty',
|
||||
ERROR: 'error'
|
||||
}
|
||||
|
||||
export function BoostHelp () {
|
||||
return (
|
||||
<ol style={{ lineHeight: 1.25 }}>
|
||||
<li>Boost ranks items higher based on the amount</li>
|
||||
<li>The highest boost in a territory over the last 30 days is pinned to the top of the territory</li>
|
||||
<li>The highest boost across all territories over the last 30 days is pinned to the top of the homepage</li>
|
||||
<li>The minimum boost is {numWithUnits(BOOST_MIN, { abbreviate: false })}</li>
|
||||
<li>Each {numWithUnits(BOOST_MULT, { abbreviate: false })} of boost is equivalent to a zap-vote from a maximally trusted stacker (very rare)
|
||||
<ul>
|
||||
<li>e.g. {numWithUnits(BOOST_MULT * 5, { abbreviate: false })} is like five zap-votes from a maximally trusted stacker</li>
|
||||
</ul>
|
||||
</li>
|
||||
<li>boost can take a few minutes to show higher ranking in feed</li>
|
||||
<li>100% of boost goes to the territory founder and top stackers as rewards</li>
|
||||
</ol>
|
||||
)
|
||||
}
|
||||
|
||||
export function BoostInput ({ onChange, ...props }) {
|
||||
const feeButton = useFeeButton()
|
||||
let merge
|
||||
if (feeButton) {
|
||||
({ merge } = feeButton)
|
||||
}
|
||||
return (
|
||||
<Input
|
||||
label={
|
||||
<div className='d-flex align-items-center'>boost
|
||||
<Info>
|
||||
<BoostHelp />
|
||||
</Info>
|
||||
</div>
|
||||
}
|
||||
name='boost'
|
||||
onChange={(_, e) => {
|
||||
merge?.({
|
||||
boost: {
|
||||
term: `+ ${e.target.value}`,
|
||||
label: 'boost',
|
||||
op: '+',
|
||||
modifier: cost => cost + Number(e.target.value)
|
||||
}
|
||||
})
|
||||
onChange && onChange(_, e)
|
||||
}}
|
||||
hint={<span className='text-muted'>ranks posts higher temporarily based on the amount</span>}
|
||||
append={<InputGroup.Text className='text-monospace'>sats</InputGroup.Text>}
|
||||
{...props}
|
||||
/>
|
||||
)
|
||||
}
|
||||
|
||||
const BoostMaxes = ({ subName, homeMax, subMax, boost, updateBoost }) => {
|
||||
return (
|
||||
<div className='d-flex flex-row mb-2'>
|
||||
<Button
|
||||
className={classNames(styles.boostMax, 'me-2', homeMax + BOOST_MULT <= (boost || 0) && 'invisible')}
|
||||
size='sm'
|
||||
onClick={() => updateBoost(homeMax + BOOST_MULT)}
|
||||
>
|
||||
{abbrNum(homeMax + BOOST_MULT)} <small>top of homepage</small>
|
||||
</Button>
|
||||
{subName &&
|
||||
<Button
|
||||
className={classNames(styles.boostMax, subMax + BOOST_MULT <= (boost || 0) && 'invisible')}
|
||||
size='sm'
|
||||
onClick={() => updateBoost(subMax + BOOST_MULT)}
|
||||
>
|
||||
{abbrNum(subMax + BOOST_MULT)} <small>top of ~{subName}</small>
|
||||
</Button>}
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
// act means we are adding to existing boost
|
||||
export function BoostItemInput ({ item, sub, act = false, ...props }) {
|
||||
// act adds boost to existing boost
|
||||
const existingBoost = act ? Number(item?.boost || 0) : 0
|
||||
const [boost, setBoost] = useState(act ? 0 : Number(item?.boost || 0))
|
||||
|
||||
const { data, previousData, refetch } = useQuery(gql`
|
||||
query BoostPosition($sub: String, $id: ID, $boost: Int) {
|
||||
boostPosition(sub: $sub, id: $id, boost: $boost) {
|
||||
home
|
||||
sub
|
||||
homeMaxBoost
|
||||
subMaxBoost
|
||||
}
|
||||
}`,
|
||||
{
|
||||
variables: { sub: item?.subName || sub?.name, boost: existingBoost + boost, id: item?.id },
|
||||
fetchPolicy: 'cache-and-network',
|
||||
skip: !!item?.parentId || SSR
|
||||
})
|
||||
|
||||
const getPositionDebounce = useDebounceCallback((...args) => refetch(...args), 1000, [refetch])
|
||||
const updateBoost = useCallback((boost) => {
|
||||
const boostToUse = Number(boost || 0)
|
||||
setBoost(boostToUse)
|
||||
getPositionDebounce({ sub: item?.subName || sub?.name, boost: Number(existingBoost + boostToUse), id: item?.id })
|
||||
}, [getPositionDebounce, item?.id, item?.subName, sub?.name, existingBoost])
|
||||
|
||||
const dat = data || previousData
|
||||
|
||||
const boostMessage = useMemo(() => {
|
||||
if (!item?.parentId && boost >= BOOST_MULT) {
|
||||
if (dat?.boostPosition?.home || dat?.boostPosition?.sub || boost > dat?.boostPosition?.homeMaxBoost || boost > dat?.boostPosition?.subMaxBoost) {
|
||||
const boostPinning = []
|
||||
if (dat?.boostPosition?.home || boost > dat?.boostPosition?.homeMaxBoost) {
|
||||
boostPinning.push('homepage')
|
||||
}
|
||||
if ((item?.subName || sub?.name) && (dat?.boostPosition?.sub || boost > dat?.boostPosition?.subMaxBoost)) {
|
||||
boostPinning.push(`~${item?.subName || sub?.name}`)
|
||||
}
|
||||
return `pins to the top of ${boostPinning.join(' and ')}`
|
||||
}
|
||||
}
|
||||
return 'ranks posts higher based on the amount'
|
||||
}, [boost, dat?.boostPosition?.home, dat?.boostPosition?.sub, item?.subName, sub?.name])
|
||||
|
||||
return (
|
||||
<>
|
||||
<BoostInput
|
||||
hint={<span className='text-muted'>{boostMessage}</span>}
|
||||
onChange={(_, e) => {
|
||||
if (e.target.value >= 0) {
|
||||
updateBoost(Number(e.target.value))
|
||||
}
|
||||
}}
|
||||
overrideValue={boost}
|
||||
{...props}
|
||||
groupClassName='mb-1'
|
||||
/>
|
||||
{!item?.parentId &&
|
||||
<BoostMaxes
|
||||
subName={item?.subName || sub?.name}
|
||||
homeMax={(dat?.boostPosition?.homeMaxBoost || 0) - existingBoost}
|
||||
subMax={(dat?.boostPosition?.subMaxBoost || 0) - existingBoost}
|
||||
boost={existingBoost + boost}
|
||||
updateBoost={updateBoost}
|
||||
/>}
|
||||
</>
|
||||
)
|
||||
}
|
||||
|
||||
export default function AdvPostForm ({ children, item, sub, storageKeyPrefix }) {
|
||||
export default function AdvPostForm ({ children }) {
|
||||
const { me } = useMe()
|
||||
const router = useRouter()
|
||||
const [itemType, setItemType] = useState()
|
||||
const formik = useFormikContext()
|
||||
const [show, setShow] = useState(false)
|
||||
|
||||
useEffect(() => {
|
||||
const isDirty = formik?.values.forward?.[0].nym !== '' || formik?.values.forward?.[0].pct !== '' ||
|
||||
formik?.values.boost !== '' || (router.query?.type === 'link' && formik?.values.text !== '')
|
||||
|
||||
// if the adv post form is dirty on first render, show the accordian
|
||||
if (isDirty) {
|
||||
setShow(FormStatus.DIRTY)
|
||||
}
|
||||
|
||||
// HACK ... TODO: we should generically handle this kind of local storage stuff
|
||||
// in the form component, overriding the initial values
|
||||
if (storageKeyPrefix) {
|
||||
for (let i = 0; i < MAX_FORWARDS; i++) {
|
||||
['nym', 'pct'].forEach(key => {
|
||||
const value = window.localStorage.getItem(`${storageKeyPrefix}-forward[${i}].${key}`)
|
||||
if (value) {
|
||||
formik?.setFieldValue(`forward[${i}].${key}`, value)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
}, [formik?.values, storageKeyPrefix])
|
||||
|
||||
useEffect(() => {
|
||||
// force show the accordian if there is an error and the form is submitting
|
||||
const hasError = !!formik?.errors?.boost || formik?.errors?.forward?.length > 0
|
||||
// if it's open we don't want to collapse on submit
|
||||
setShow(show => hasError && formik?.isSubmitting ? FormStatus.ERROR : show)
|
||||
}, [formik?.isSubmitting])
|
||||
|
||||
useEffect(() => {
|
||||
const determineItemType = () => {
|
||||
if (router && router.query.type) {
|
||||
return router.query.type
|
||||
} else if (item) {
|
||||
const typeMap = {
|
||||
url: 'link',
|
||||
bounty: 'bounty',
|
||||
pollCost: 'poll'
|
||||
}
|
||||
|
||||
for (const [key, type] of Object.entries(typeMap)) {
|
||||
if (item[key]) {
|
||||
return type
|
||||
}
|
||||
}
|
||||
|
||||
return 'discussion'
|
||||
}
|
||||
}
|
||||
|
||||
const type = determineItemType()
|
||||
setItemType(type)
|
||||
}, [item, router])
|
||||
|
||||
function renderCrosspostDetails (itemType) {
|
||||
switch (itemType) {
|
||||
case 'discussion':
|
||||
return <li>crosspost this discussion as a NIP-23 event</li>
|
||||
case 'link':
|
||||
return <li>crosspost this link as a NIP-01 event</li>
|
||||
case 'bounty':
|
||||
return <li>crosspost this bounty as a NIP-99 event</li>
|
||||
case 'poll':
|
||||
return <li>crosspost this poll as a NIP-41 event</li>
|
||||
default:
|
||||
return null
|
||||
}
|
||||
}
|
||||
const { merge } = useFeeButton()
|
||||
|
||||
return (
|
||||
<AccordianItem
|
||||
header={<div style={{ fontWeight: 'bold', fontSize: '92%' }}>options</div>}
|
||||
show={show}
|
||||
body={
|
||||
<>
|
||||
{children}
|
||||
<BoostItemInput item={item} sub={sub} />
|
||||
<Input
|
||||
label={
|
||||
<div className='d-flex align-items-center'>boost
|
||||
<Info>
|
||||
<ol className='fw-bold'>
|
||||
<li>Boost ranks posts higher temporarily based on the amount</li>
|
||||
<li>The minimum boost is {numWithUnits(BOOST_MIN, { abbreviate: false })}</li>
|
||||
<li>Each {numWithUnits(BOOST_MULT, { abbreviate: false })} of boost is equivalent to one trusted upvote
|
||||
<ul>
|
||||
<li>e.g. {numWithUnits(BOOST_MULT * 5, { abbreviate: false })} is like 5 votes</li>
|
||||
</ul>
|
||||
</li>
|
||||
<li>The decay of boost "votes" increases at 1.25x the rate of organic votes
|
||||
<ul>
|
||||
<li>i.e. boost votes fall out of ranking faster</li>
|
||||
</ul>
|
||||
</li>
|
||||
<li>100% of sats from boost are given back to top stackers as rewards</li>
|
||||
</ol>
|
||||
</Info>
|
||||
</div>
|
||||
}
|
||||
name='boost'
|
||||
onChange={(_, e) => merge({
|
||||
boost: {
|
||||
term: `+ ${e.target.value}`,
|
||||
label: 'boost',
|
||||
modifier: cost => cost + Number(e.target.value)
|
||||
}
|
||||
})}
|
||||
hint={<span className='text-muted'>ranks posts higher temporarily based on the amount</span>}
|
||||
append={<InputGroup.Text className='text-monospace'>sats</InputGroup.Text>}
|
||||
/>
|
||||
<VariableInput
|
||||
label='forward sats to'
|
||||
name='forward'
|
||||
@ -290,16 +93,16 @@ export default function AdvPostForm ({ children, item, sub, storageKeyPrefix })
|
||||
)
|
||||
}}
|
||||
</VariableInput>
|
||||
{me && itemType &&
|
||||
{me && router.query.type === 'discussion' &&
|
||||
<Checkbox
|
||||
label={
|
||||
<div className='d-flex align-items-center'>crosspost to nostr
|
||||
<Info>
|
||||
<ul>
|
||||
{renderCrosspostDetails(itemType)}
|
||||
<ul className='fw-bold'>
|
||||
<li>crosspost this discussion item to nostr</li>
|
||||
<li>requires NIP-07 extension for signing</li>
|
||||
<li>we use your NIP-05 relays if set</li>
|
||||
<li>we use these relays by default:</li>
|
||||
<li>otherwise we default to these relays:</li>
|
||||
<ul>
|
||||
{DEFAULT_CROSSPOSTING_RELAYS.map((relay, i) => (
|
||||
<li key={i}>{relay}</li>
|
||||
|
@ -9,11 +9,4 @@
|
||||
display: flex;
|
||||
flex: 0 1 fit-content;
|
||||
height: fit-content;
|
||||
}
|
||||
|
||||
.boostMax small {
|
||||
font-weight: 400;
|
||||
margin-left: 0.25rem;
|
||||
margin-right: 0.25rem;
|
||||
opacity: 0.5;
|
||||
}
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user