Store hashed and salted email addresses (#1111)

* first pass of hashing user emails

* use salt

* add a salt to .env.development (prod salt needs to be kept a secret)
* move `hashEmail` util to a new util module

* trigger a one-time job to migrate existing emails via the worker

so we can use the salt from an env var

* move newsletter signup

move newsletter signup to prisma adapter create user with email code path
so we can still auto-enroll email accounts without having to persist the email address
in plaintext

* remove `email` from api key session lookup query

* drop user email index before dropping column

* restore email column, just null values instead

* fix function name

* fix salt and hash raw sql statement

* update auth methods email type in typedefs from str to bool

* remove todo comment

* lowercase email before hashing during migration

* check for emailHash and email to accommodate migration window

update our lookups to check for a matching emailHash, and then a matching
email, in that order, to accommodate the case that a user tries to login
via email while the migration is running, and their account has not yet been migrated

also update sndev to have a command `./sndev email` to launch the mailhog inbox in your browser

also update `./sndev login` to hash the generated email address and insert it into the db record

* update sndev help

* update awards.csv

* update the hack in next-auth to re-use the email supplied on input to `getUserByEmail`

* consolidate console.error logs

* create generic open command

---------

Co-authored-by: Keyan <34140557+huumn@users.noreply.github.com>
Co-authored-by: keyan <keyan.kousha+huumn@gmail.com>
This commit is contained in:
SatsAllDay 2024-05-04 19:06:15 -04:00 committed by GitHub
parent 6220eb06ee
commit 15f9950477
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
14 changed files with 206 additions and 36 deletions

View File

@ -36,6 +36,10 @@ LNWITH_URL=
LOGIN_EMAIL_SERVER=smtp://mailhog:1025
LOGIN_EMAIL_FROM=sndev@mailhog.dev
# email salt
# openssl rand -hex 32
EMAIL_SALT=202c90943c313b829e65e3f29164fb5dd7ea3370d7262c4159691c2f6493bb8b
# static things
NEXTAUTH_URL=http://localhost:3000/api/auth
SELF_URL=http://app:3000

View File

@ -9,6 +9,7 @@ import { ANON_USER_ID, DELETE_USER_ID, RESERVED_MAX_USER_ID, SN_NO_REWARDS_IDS }
import { viewGroup } from './growth'
import { timeUnitForRange, whenRange } from '@/lib/time'
import assertApiKeyNotPermitted from './apiKey'
import { hashEmail } from '@/lib/crypto'
const contributors = new Set()
@ -44,7 +45,7 @@ async function authMethods (user, args, { models, me }) {
return {
lightning: !!user.pubkey,
email: user.emailVerified && user.email,
email: !!(user.emailVerified && user.emailHash),
twitter: oauth.indexOf('twitter') >= 0,
github: oauth.indexOf('github') >= 0,
nostr: !!user.nostrAuthPubkey,
@ -686,7 +687,7 @@ export default {
try {
await models.user.update({
where: { id: me.id },
data: { email: email.toLowerCase() }
data: { emailHash: hashEmail({ email }) }
})
} catch (error) {
if (error.code === 'P2002') {

View File

@ -108,7 +108,7 @@ export default gql`
nostr: Boolean!
github: Boolean!
twitter: Boolean!
email: String
email: Boolean!
apiKey: Boolean
}

View File

@ -66,3 +66,4 @@ benalleng,pr,#1099,#794,medium-hard,,,refined in a commit,450k,benalleng@mutiny.
dillon-co,helpfulness,#1099,#794,medium-hard,,,#988 did much of the legwork,225k,bolt11,2024-04-29
abhiShandy,pr,#1119,#1110,good-first-issue,,,,20k,abhishandy@stacker.news,2024-04-28
felipebueno,issue,#1119,#1110,good-first-issue,,,,2k,felipe@stacker.news,2024-04-28
SatsAllDay,pr,#1111,#622,medium-hard,,,,500k,weareallsatoshi@getalby.com,???

1 name type pr id issue ids difficulty priority changes requested notes amount receive method date paid
66 dillon-co helpfulness #1099 #794 medium-hard #988 did much of the legwork 225k bolt11 2024-04-29
67 abhiShandy pr #1119 #1110 good-first-issue 20k abhishandy@stacker.news 2024-04-28
68 felipebueno issue #1119 #1110 good-first-issue 2k felipe@stacker.news 2024-04-28
69 SatsAllDay pr #1111 #622 medium-hard 500k weareallsatoshi@getalby.com ???

View File

@ -397,6 +397,8 @@ services:
- '--autopilot.disable'
- '--pool.auctionserver=test.pool.lightning.finance:12010'
- '--loop.server.host=test.swap.lightning.today:11010'
labels:
CONNECT: "localhost:8443"
stacker_cln:
build:
context: ./docker/cln
@ -466,6 +468,8 @@ services:
- "1025:1025"
links:
- app
labels:
CONNECT: "localhost:8025"
volumes:
db:
os:

9
lib/crypto.js Normal file
View File

@ -0,0 +1,9 @@
import { createHash } from 'node:crypto'
export function hashEmail ({
email,
salt = process.env.EMAIL_SALT
}) {
const saltedEmail = `${email.toLowerCase()}${salt}`
return createHash('sha256').update(saltedEmail).digest('hex')
}

View File

@ -11,6 +11,7 @@ import { getToken } from 'next-auth/jwt'
import { NodeNextRequest } from 'next/dist/server/base-http/node'
import { schnorr } from '@noble/curves/secp256k1'
import { notifyReferral } from '@/lib/webPush'
import { hashEmail } from '@/lib/crypto'
/**
* Stores userIds in user table
@ -71,24 +72,6 @@ function getCallbacks (req) {
token.sub = Number(token.id)
}
// sign them up for the newsletter
if (isNewUser && user?.email && process.env.LIST_MONK_URL && process.env.LIST_MONK_AUTH) {
fetch(process.env.LIST_MONK_URL + '/api/subscribers', {
method: 'POST',
headers: {
'Content-Type': 'application/json',
Authorization: 'Basic ' + Buffer.from(process.env.LIST_MONK_AUTH).toString('base64')
},
body: JSON.stringify({
email: user.email,
name: 'blank',
lists: [2],
status: 'enabled',
preconfirm_subscriptions: true
})
}).then(async r => console.log(await r.json())).catch(console.log)
}
return token
},
async session ({ session, token }) {
@ -217,7 +200,49 @@ const providers = [
export const getAuthOptions = req => ({
callbacks: getCallbacks(req),
providers,
adapter: PrismaAdapter(prisma),
adapter: {
...PrismaAdapter(prisma),
createUser: data => {
// replace email with email hash in new user payload
if (data.email) {
const { email } = data
data.emailHash = hashEmail({ email })
delete data.email
// data.email used to be used for name of new accounts. since it's missing, let's generate a new name
data.name = data.emailHash.substring(0, 10)
// sign them up for the newsletter
// don't await it, let it run async
enrollInNewsletter({ email })
}
return prisma.user.create({ data })
},
getUserByEmail: async email => {
const hashedEmail = hashEmail({ email })
let user = await prisma.user.findUnique({
where: {
// lookup by email hash since we don't store plaintext emails any more
emailHash: hashedEmail
}
})
if (!user) {
user = await prisma.user.findUnique({
where: {
// lookup by email as a fallback in case a user attempts to login by email during the migration
// and their email hasn't been migrated yet
email
}
})
}
// HACK! This is required to satisfy next-auth's check here:
// https://github.com/nextauthjs/next-auth/blob/5b647e1ac040250ad055e331ba97f8fa461b63cc/packages/next-auth/src/core/routes/callback.ts#L227
// since we are nulling `email`, but it expects it to be truthy there.
// Since we have the email from the input request, we can copy it here and pretend like we store user emails, even though we don't.
if (user) {
user.email = email
}
return user
}
},
session: {
strategy: 'jwt'
},
@ -229,6 +254,34 @@ export const getAuthOptions = req => ({
events: getEventCallbacks()
})
async function enrollInNewsletter ({ email }) {
if (process.env.LIST_MONK_URL && process.env.LIST_MONK_AUTH) {
try {
const response = await fetch(process.env.LIST_MONK_URL + '/api/subscribers', {
method: 'POST',
headers: {
'Content-Type': 'application/json',
Authorization: 'Basic ' + Buffer.from(process.env.LIST_MONK_AUTH).toString('base64')
},
body: JSON.stringify({
email,
name: 'blank',
lists: [2],
status: 'enabled',
preconfirm_subscriptions: true
})
})
const jsonResponse = await response.json()
console.log(jsonResponse)
} catch (err) {
console.log('error signing user up for newsletter')
console.log(err)
}
} else {
console.log('LIST MONK env vars not set, skipping newsletter enrollment')
}
}
export default async (req, res) => {
await NextAuth(req, res, getAuthOptions(req))
}
@ -238,7 +291,21 @@ async function sendVerificationRequest ({
url,
provider
}) {
const user = await prisma.user.findUnique({ where: { email } })
let user = await prisma.user.findUnique({
where: {
// Look for the user by hashed email
emailHash: hashEmail({ email })
}
})
if (!user) {
user = await prisma.user.findUnique({
where: {
// or plaintext email, in case a user tries to login via email during the migration
// before their particular record has been migrated
email
}
})
}
return new Promise((resolve, reject) => {
const { server, from } = provider

View File

@ -57,7 +57,7 @@ export default startServerAndCreateNextHandler(apolloServer, {
let session
if (apiKey) {
const [user] = await models.$queryRaw`
SELECT id, name, email, "apiKeyEnabled"
SELECT id, name, "apiKeyEnabled"
FROM users
WHERE "apiKeyHash" = encode(digest(${apiKey}, 'sha256'), 'hex')
LIMIT 1`

View File

@ -714,15 +714,8 @@ function AuthMethods ({ methods, apiKeyEnabled }) {
return methods.email
? (
<div key={provider} className='mt-2 d-flex align-items-center'>
<Input
name='email'
placeholder={methods.email}
groupClassName='mb-0'
readOnly
noForm
/>
<Button
className='ms-2' variant='secondary' onClick={
variant='secondary' onClick={
async () => {
await unlink('email')
}

View File

@ -0,0 +1,43 @@
/*
Warnings:
- A unique constraint covering the columns `[emailHash]` on the table `users` will be added. If there are existing duplicate values, this will fail.
*/
-- AlterTable
ALTER TABLE "users" ADD COLUMN "emailHash" TEXT;
-- CreateIndex
CREATE UNIQUE INDEX "users.email_hash_unique" ON "users"("emailHash");
-- hack ... prisma doesn't know about our other schemas (e.g. pgboss)
-- and this is only really a problem on their "shadow database"
-- so we catch the exception it throws and ignore it
CREATE OR REPLACE FUNCTION submit_migrate_existing_user_emails_job() RETURNS void AS $$
BEGIN
-- Submit a job to salt and hash emails after the updated worker has spun-up
INSERT INTO pgboss.job (name, data, priority, startafter, expirein)
SELECT 'saltAndHashEmails', jsonb_build_object(), -100, now() + interval '10 minutes', interval '1 day';
EXCEPTION WHEN OTHERS THEN
-- catch the exception for prisma dev execution, but do nothing with it
END;
$$ LANGUAGE plpgsql;
-- execute the function once to submit the one-time job
SELECT submit_migrate_existing_user_emails_job();
-- then drop it since we don't need it anymore
DROP FUNCTION submit_migrate_existing_user_emails_job();
-- function that accepts a salt and migrates all existing emails using the salt then hashing the salted email
CREATE OR REPLACE FUNCTION migrate_existing_user_emails(salt TEXT) RETURNS void AS $$
BEGIN
UPDATE "users"
SET "emailHash" = encode(digest(LOWER("email") || salt, 'sha256'), 'hex')
WHERE "email" IS NOT NULL;
-- then wipe the email values
UPDATE "users"
SET email = NULL;
END;
$$ LANGUAGE plpgsql;

View File

@ -19,6 +19,7 @@ model User {
name String? @unique(map: "users.name_unique") @db.Citext
email String? @unique(map: "users.email_unique")
emailVerified DateTime? @map("email_verified")
emailHash String? @unique(map: "users.email_hash_unique")
image String?
msats BigInt @default(0)
freeComments Int @default(5)

42
sndev
View File

@ -106,6 +106,39 @@ OPTIONS"
docker__compose down --help | awk '/Options:/{y=1;next}y'
}
sndev__open() {
shift
service=$(docker__compose ps $1 --format '{{.Label "CONNECT"}}')
if [ -z "$service" ]; then
echo "no url found for $1"
exit 1
fi
service="http://$service"
echo "opening $1 ... $service"
if [ "$(uname)" = "Darwin" ]; then
open $service
elif [ "$(uname)" = "Linux" ]; then
xdg-open $service
elif [ "$(uname)" = "Windows_NT" ]; then
start $service
fi
}
sndev__help_open() {
help="
open a container's url if it has one
USAGE
$ sndev open SERVICE
OPTIONS
no options currently exist
"
echo "$help"
}
sndev__restart() {
shift
docker__compose restart "$@"
@ -418,14 +451,16 @@ sndev__login() {
# "SNDEV-TOKEN3_0W_PhDRZVanbeJsZZGIEljexkKoGbL6qGIqSwTjjI"
# next-auth concats the token with the secret from env and then sha256's it
token="d5fce54babffcb070c39f78d947761fd9ec37647fafcecb9734a3085a78e5c5e"
salt="202c90943c313b829e65e3f29164fb5dd7ea3370d7262c4159691c2f6493bb8b"
# upsert user with nym and nym@sndev.team
email="$1@sndev.team"
docker__exec db psql -U sn -d stackernews -q <<EOF
INSERT INTO users (name) VALUES ('$1') ON CONFLICT DO NOTHING;
UPDATE users SET email = '$1@sndev.team' WHERE name = '$1';
UPDATE users SET email = '$email', "emailHash" = encode(digest(LOWER('$email')||'$salt', 'sha256'), 'hex') WHERE name = '$1';
INSERT INTO verification_requests (identifier, token, expires)
VALUES ('$1@sndev.team', '$token', NOW() + INTERVAL '1 day')
VALUES ('$email', '$token', NOW() + INTERVAL '1 day')
ON CONFLICT (token) DO UPDATE
SET identifier = '$1@sndev.team', expires = NOW() + INTERVAL '1 day';
SET identifier = '$email', expires = NOW() + INTERVAL '1 day';
EOF
echo
@ -496,6 +531,7 @@ COMMANDS
dev:
pr fetch and checkout a pr
lint run linters
open open container url in browser
other:
compose docker compose passthrough

View File

@ -23,6 +23,7 @@ import { deleteUnusedImages } from './deleteUnusedImages.js'
import { territoryBilling, territoryRevenue } from './territory.js'
import { ofac } from './ofac.js'
import { autoWithdraw } from './autowithdraw.js'
import { saltAndHashEmails } from './saltAndHashEmails.js'
const { loadEnvConfig } = nextEnv
const { ApolloClient, HttpLink, InMemoryCache } = apolloClient
@ -100,6 +101,7 @@ async function work () {
await boss.work('territoryBilling', jobWrapper(territoryBilling))
await boss.work('territoryRevenue', jobWrapper(territoryRevenue))
await boss.work('ofac', jobWrapper(ofac))
await boss.work('saltAndHashEmails', jobWrapper(saltAndHashEmails))
console.log('working jobs')
}

View File

@ -0,0 +1,9 @@
export async function saltAndHashEmails ({ models }) {
try {
console.log('Migrating existing emails to salt and hash them...')
await models.$executeRaw`select migrate_existing_user_emails(${process.env.EMAIL_SALT})`
console.log('Successfully migrated existing emails to salt and hash them!')
} catch (err) {
console.error('Error occurred while salting and hashing existing emails:', err)
}
}