wip upsertWallet
This commit is contained in:
parent
2bdbb433df
commit
4826ae5a7b
|
@ -20,7 +20,6 @@ import { bolt11Tags } from '@/lib/bolt11'
|
|||
import { finalizeHodlInvoice } from 'worker/wallet'
|
||||
import walletDefs from 'wallets/server'
|
||||
import { generateResolverName, generateTypeDefName } from '@/wallets/graphql'
|
||||
import { isConfigured } from '@/wallets/common'
|
||||
import { lnAddrOptions } from '@/lib/lnurl'
|
||||
import { GqlAuthenticationError, GqlAuthorizationError, GqlInputError } from '@/lib/error'
|
||||
import { getNodeSockets, getOurPubkey } from '../lnd'
|
||||
|
@ -30,39 +29,24 @@ function injectResolvers (resolvers) {
|
|||
for (const w of walletDefs) {
|
||||
const resolverName = generateResolverName(w.walletField)
|
||||
console.log(resolverName)
|
||||
resolvers.Mutation[resolverName] = async (parent, { settings, priorityOnly, canSend, canReceive, ...data }, { me, models }) => {
|
||||
if (canReceive && !w.createInvoice) {
|
||||
console.warn('Requested to upsert wallet as a receiver, but wallet does not support createInvoice. disabling')
|
||||
canReceive = false
|
||||
resolvers.Mutation[resolverName] = async (parent, { settings, validateLightning, vaultEntries, ...data }, { me, models }) => {
|
||||
// allow transformation of the data on validation (this is optional ... won't do anything if not implemented)
|
||||
const validData = await walletValidate(w, { ...data, ...settings, vaultEntries })
|
||||
if (validData) {
|
||||
Object.keys(validData).filter(key => key in data).forEach(key => { data[key] = validData[key] })
|
||||
Object.keys(validData).filter(key => key in settings).forEach(key => { settings[key] = validData[key] })
|
||||
}
|
||||
|
||||
if (!priorityOnly && canReceive) {
|
||||
// check if the required fields are set
|
||||
if (!isConfigured({ fields: w.fields, config: data, serverOnly: true })) {
|
||||
throw new GqlInputError('missing required fields')
|
||||
}
|
||||
// allow transformation of the data on validation (this is optional ... won't do anything if not implemented)
|
||||
const validData = await walletValidate(w, { ...data, ...settings })
|
||||
if (validData) {
|
||||
Object.keys(validData).filter(key => key in data).forEach(key => { data[key] = validData[key] })
|
||||
Object.keys(validData).filter(key => key in settings).forEach(key => { settings[key] = validData[key] })
|
||||
}
|
||||
}
|
||||
|
||||
if (!canReceive && !canSend) throw new GqlInputError('wallet must be able to send or receive')
|
||||
return await upsertWallet({
|
||||
wallet: {
|
||||
field:
|
||||
w.walletField,
|
||||
field: w.walletField,
|
||||
type: w.walletType
|
||||
},
|
||||
testCreateInvoice: w.testCreateInvoice ? (data) => w.testCreateInvoice(data, { me, models }) : null
|
||||
testCreateInvoice: w.testCreateInvoice && validateLightning ? (data) => w.testCreateInvoice(data, { me, models }) : null
|
||||
}, {
|
||||
settings,
|
||||
data,
|
||||
priorityOnly,
|
||||
canSend,
|
||||
canReceive
|
||||
vaultEntries
|
||||
}, { me, models })
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,299 +0,0 @@
|
|||
import { SSR } from '@/lib/constants'
|
||||
import { useMe } from './me'
|
||||
import { useEffect, useRef } from 'react'
|
||||
import createTaskQueue from '@/lib/task-queue'
|
||||
|
||||
const VERSION = 1
|
||||
|
||||
/**
|
||||
* A react hook to use the local storage
|
||||
* It handles the lifecycle of the storage, opening and closing it as needed.
|
||||
*
|
||||
* @param {*} options
|
||||
* @param {string} options.database - the database name
|
||||
* @param {[string]} options.namespace - the namespace of the storage
|
||||
* @returns {[object]} - the local storage
|
||||
*/
|
||||
export default function useLocalStorage ({ database = 'default', namespace = ['default'] }) {
|
||||
const { me } = useMe()
|
||||
if (!Array.isArray(namespace)) namespace = [namespace]
|
||||
const joinedNamespace = namespace.join(':')
|
||||
const storage = useRef(openLocalStorage({ database, userId: me?.id, namespace }))
|
||||
|
||||
useEffect(() => {
|
||||
const currentStorage = storage.current
|
||||
const newStorage = openLocalStorage({ database, userId: me?.id, namespace })
|
||||
storage.current = newStorage
|
||||
if (currentStorage)currentStorage.close()
|
||||
return () => {
|
||||
newStorage.close()
|
||||
}
|
||||
}, [me, database, joinedNamespace])
|
||||
|
||||
return [{
|
||||
set: (key, value) => storage.current.set(key, value),
|
||||
get: (key) => storage.current.get(key),
|
||||
unset: (key) => storage.current.unset(key),
|
||||
clear: () => storage.current.clear(),
|
||||
list: () => storage.current.list()
|
||||
}]
|
||||
}
|
||||
|
||||
/**
|
||||
* Open a local storage.
|
||||
* This is an abstraction on top of IndexedDB or, when not available, an in-memory storage.
|
||||
* A combination of userId, database and namespace is used to efficiently separate different storage units.
|
||||
* Namespaces can be an array of strings, that will be internally joined to form a single namespace.
|
||||
*
|
||||
* @param {*} options
|
||||
* @param {string} options.userId - the user that owns the storage (anon if not provided)
|
||||
* @param {string} options.database - the database name (default if not provided)
|
||||
* @param {[string]} options.namespace - the namespace of the storage (default if not provided)
|
||||
* @returns {object} - the local storage
|
||||
* @throws Error if the namespace is invalid
|
||||
*/
|
||||
export function openLocalStorage ({ userId, database = 'default', namespace = ['default'] }) {
|
||||
if (!userId) userId = 'anon'
|
||||
if (!Array.isArray(namespace)) namespace = [namespace]
|
||||
if (SSR) return createMemBackend(userId, namespace)
|
||||
|
||||
let backend = newIdxDBBackend(userId, database, namespace)
|
||||
|
||||
if (!backend) {
|
||||
console.warn('no local storage backend available, fallback to in memory storage')
|
||||
backend = createMemBackend(userId, namespace)
|
||||
}
|
||||
return backend
|
||||
}
|
||||
|
||||
export async function listLocalStorages ({ userId, database }) {
|
||||
if (SSR) return []
|
||||
return await listIdxDBBackendNamespaces(userId, database)
|
||||
}
|
||||
|
||||
/**
|
||||
* In memory storage backend (volatile/dummy storage)
|
||||
*/
|
||||
function createMemBackend (userId, namespace) {
|
||||
const joinedNamespace = userId + ':' + namespace.join(':')
|
||||
let memory
|
||||
if (SSR) {
|
||||
memory = {}
|
||||
} else {
|
||||
if (!window.snMemStorage) window.snMemStorage = {}
|
||||
memory = window.snMemStorage[joinedNamespace]
|
||||
if (!memory) window.snMemStorage[joinedNamespace] = memory = {}
|
||||
}
|
||||
return {
|
||||
set: (key, value) => { memory[key] = value },
|
||||
get: (key) => memory[key],
|
||||
unset: (key) => { delete memory[key] },
|
||||
clear: () => { Object.keys(memory).forEach(key => delete memory[key]) },
|
||||
list: () => Object.keys(memory),
|
||||
close: () => { }
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Open an IndexedDB connection
|
||||
* @param {*} userId
|
||||
* @param {*} database
|
||||
* @param {*} onupgradeneeded
|
||||
* @param {*} queue
|
||||
* @returns {object} - an open connection
|
||||
* @throws Error if the connection cannot be opened
|
||||
*/
|
||||
async function openIdxDB (userId, database, onupgradeneeded, queue) {
|
||||
const fullDbName = `${database}:${userId}`
|
||||
// we keep a reference to every open indexed db connection
|
||||
// to reuse them whenever possible
|
||||
if (window && !window.snIdxDB) window.snIdxDB = {}
|
||||
let openConnection = window?.snIdxDB?.[fullDbName]
|
||||
|
||||
const close = () => {
|
||||
const conn = openConnection
|
||||
conn.ref--
|
||||
if (conn.ref === 0) { // close the connection for real if nothing is using it
|
||||
if (window?.snIdxDB) delete window.snIdxDB[fullDbName]
|
||||
queue.enqueue(() => {
|
||||
conn.db.close()
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// if for any reason the connection is outdated, we close it
|
||||
if (openConnection && openConnection.version !== VERSION) {
|
||||
close()
|
||||
openConnection = undefined
|
||||
}
|
||||
// an open connections is not available, so we create a new one
|
||||
if (!openConnection) {
|
||||
openConnection = {
|
||||
version: VERSION,
|
||||
ref: 1, // we need a ref count to know when to close the connection for real
|
||||
db: null,
|
||||
close
|
||||
}
|
||||
openConnection.db = await new Promise((resolve, reject) => {
|
||||
const request = window.indexedDB.open(fullDbName, VERSION)
|
||||
request.onupgradeneeded = (event) => {
|
||||
const db = event.target.result
|
||||
if (onupgradeneeded) onupgradeneeded(db)
|
||||
}
|
||||
request.onsuccess = (event) => {
|
||||
const db = event.target.result
|
||||
if (!db?.transaction) reject(new Error('unsupported implementation'))
|
||||
else resolve(db)
|
||||
}
|
||||
request.onerror = reject
|
||||
})
|
||||
window.snIdxDB[fullDbName] = openConnection
|
||||
} else {
|
||||
// increase the reference count
|
||||
openConnection.ref++
|
||||
}
|
||||
return openConnection
|
||||
}
|
||||
|
||||
/**
|
||||
* An IndexedDB based persistent storage
|
||||
* @param {string} userId - the user that owns the storage
|
||||
* @param {string} database - the database name
|
||||
* @returns {object} - an indexedDB persistent storage
|
||||
* @throws Error if the namespace is invalid
|
||||
*/
|
||||
function newIdxDBBackend (userId, database, namespace) {
|
||||
if (!window.indexedDB) return undefined
|
||||
if (!namespace) throw new Error('missing namespace')
|
||||
if (!Array.isArray(namespace) || !namespace.length || namespace.find(n => !n || typeof n !== 'string')) throw new Error('invalid namespace. must be a non-empty array of strings')
|
||||
if (namespace.find(n => n.includes(':'))) throw new Error('invalid namespace. must not contain ":"')
|
||||
|
||||
namespace = namespace.join(':')
|
||||
|
||||
const queue = createTaskQueue()
|
||||
|
||||
let openConnection = null
|
||||
let closed = false
|
||||
const initialize = async () => {
|
||||
if (!openConnection) {
|
||||
openConnection = await openIdxDB(userId, database, (db) => {
|
||||
db.createObjectStore(database, { keyPath: ['namespace', 'key'] })
|
||||
}, queue)
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
set: async (key, value) => {
|
||||
await queue.enqueue(async () => {
|
||||
await initialize()
|
||||
const tx = openConnection.db.transaction([database], 'readwrite')
|
||||
const objectStore = tx.objectStore(database)
|
||||
objectStore.put({ namespace, key, value })
|
||||
await new Promise((resolve, reject) => {
|
||||
tx.oncomplete = resolve
|
||||
tx.onerror = reject
|
||||
})
|
||||
})
|
||||
},
|
||||
get: async (key) => {
|
||||
return await queue.enqueue(async () => {
|
||||
await initialize()
|
||||
const tx = openConnection.db.transaction([database], 'readonly')
|
||||
const objectStore = tx.objectStore(database)
|
||||
const request = objectStore.get([namespace, key])
|
||||
return await new Promise((resolve, reject) => {
|
||||
request.onsuccess = () => resolve(request.result?.value)
|
||||
request.onerror = reject
|
||||
})
|
||||
})
|
||||
},
|
||||
unset: async (key) => {
|
||||
await queue.enqueue(async () => {
|
||||
await initialize()
|
||||
const tx = openConnection.db.transaction([database], 'readwrite')
|
||||
const objectStore = tx.objectStore(database)
|
||||
objectStore.delete([namespace, key])
|
||||
await new Promise((resolve, reject) => {
|
||||
tx.oncomplete = resolve
|
||||
tx.onerror = reject
|
||||
})
|
||||
})
|
||||
},
|
||||
clear: async () => {
|
||||
await queue.enqueue(async () => {
|
||||
await initialize()
|
||||
const tx = openConnection.db.transaction([database], 'readwrite')
|
||||
const objectStore = tx.objectStore(database)
|
||||
objectStore.clear()
|
||||
await new Promise((resolve, reject) => {
|
||||
tx.oncomplete = resolve
|
||||
tx.onerror = reject
|
||||
})
|
||||
})
|
||||
},
|
||||
list: async () => {
|
||||
return await queue.enqueue(async () => {
|
||||
await initialize()
|
||||
const tx = openConnection.db.transaction([database], 'readonly')
|
||||
const objectStore = tx.objectStore(database)
|
||||
const keys = []
|
||||
return await new Promise((resolve, reject) => {
|
||||
const request = objectStore.openCursor()
|
||||
request.onsuccess = (event) => {
|
||||
const cursor = event.target.result
|
||||
if (cursor) {
|
||||
if (cursor.key[0] === namespace) {
|
||||
keys.push(cursor.key[1]) // Push only the 'key' part of the composite key
|
||||
}
|
||||
cursor.continue()
|
||||
} else {
|
||||
resolve(keys)
|
||||
}
|
||||
}
|
||||
request.onerror = reject
|
||||
})
|
||||
})
|
||||
},
|
||||
close: async () => {
|
||||
if (closed) return
|
||||
closed = true
|
||||
queue.enqueue(async () => {
|
||||
if (openConnection) await openConnection.close()
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* List all the namespaces used in an IndexedDB database
|
||||
* @param {*} userId - the user that owns the storage
|
||||
* @param {*} database - the database name
|
||||
* @returns {array} - an array of namespace names
|
||||
*/
|
||||
async function listIdxDBBackendNamespaces (userId, database) {
|
||||
if (!window?.indexedDB) return []
|
||||
const queue = createTaskQueue()
|
||||
const openConnection = await openIdxDB(userId, database, null, queue)
|
||||
try {
|
||||
const list = await queue.enqueue(async () => {
|
||||
const objectStore = openConnection.db.transaction([database], 'readonly').objectStore(database)
|
||||
const namespaces = new Set()
|
||||
return await new Promise((resolve, reject) => {
|
||||
const request = objectStore.openCursor()
|
||||
request.onsuccess = (event) => {
|
||||
const cursor = event.target.result
|
||||
if (cursor) {
|
||||
namespaces.add(cursor.key[0])
|
||||
cursor.continue()
|
||||
} else {
|
||||
resolve(Array.from(namespaces).map(n => n.split(':')))
|
||||
}
|
||||
}
|
||||
request.onerror = reject
|
||||
})
|
||||
})
|
||||
return list
|
||||
} finally {
|
||||
openConnection.close()
|
||||
}
|
||||
}
|
|
@ -186,20 +186,26 @@ export const WALLET_BY_TYPE = gql`
|
|||
}
|
||||
`
|
||||
|
||||
export const WALLET_FIELDS = gql`
|
||||
fragment WalletFields on Wallet {
|
||||
id
|
||||
priority
|
||||
type
|
||||
updatedAt
|
||||
enabled
|
||||
vaultEntries {
|
||||
key
|
||||
value
|
||||
}
|
||||
}
|
||||
`
|
||||
|
||||
export const WALLETS = gql`
|
||||
${WALLET_FIELDS}
|
||||
|
||||
query Wallets {
|
||||
wallets {
|
||||
id
|
||||
priority
|
||||
type
|
||||
updatedAt
|
||||
canSend
|
||||
canReceive
|
||||
enabled
|
||||
vaultEntries {
|
||||
key
|
||||
value
|
||||
}
|
||||
...WalletFields
|
||||
}
|
||||
}
|
||||
`
|
||||
|
|
|
@ -1,54 +0,0 @@
|
|||
/**
|
||||
* Create a queue to run tasks sequentially
|
||||
* @returns {Object} - the queue
|
||||
* @returns {function} enqueue - Function to add a task to the queue
|
||||
* @returns {function} lock - Function to lock the queue
|
||||
* @returns {function} wait - Function to wait for the queue to be empty
|
||||
*/
|
||||
export default function createTaskQueue () {
|
||||
const queue = {
|
||||
queue: Promise.resolve(),
|
||||
/**
|
||||
* Enqueue a task to be run sequentially
|
||||
* @param {function} fn - The task function to be enqueued
|
||||
* @returns {Promise} - A promise that resolves with the result of the task function
|
||||
*/
|
||||
enqueue (fn) {
|
||||
return new Promise((resolve, reject) => {
|
||||
queue.queue = queue.queue.then(async () => {
|
||||
try {
|
||||
resolve(await fn())
|
||||
} catch (e) {
|
||||
reject(e)
|
||||
}
|
||||
})
|
||||
})
|
||||
},
|
||||
/**
|
||||
* Lock the queue so that it can't move forward until unlocked
|
||||
* @param {boolean} [wait=true] - Whether to wait for the lock to be acquired
|
||||
* @returns {Promise<function>} - A promise that resolves with the unlock function
|
||||
*/
|
||||
async lock (wait = true) {
|
||||
let unlock
|
||||
const lock = new Promise((resolve) => { unlock = resolve })
|
||||
const locking = new Promise((resolve) => {
|
||||
queue.queue = queue.queue.then(() => {
|
||||
resolve()
|
||||
return lock
|
||||
})
|
||||
})
|
||||
if (wait) await locking
|
||||
return unlock
|
||||
},
|
||||
/**
|
||||
* Wait for the queue to be empty
|
||||
* @returns {Promise} - A promise that resolves when the queue is empty
|
||||
*/
|
||||
async wait () {
|
||||
return queue.queue
|
||||
}
|
||||
}
|
||||
|
||||
return queue
|
||||
}
|
|
@ -8,11 +8,13 @@ import { useRouter } from 'next/router'
|
|||
import { useWallet } from '@/wallets/index'
|
||||
import Info from '@/components/info'
|
||||
import Text from '@/components/text'
|
||||
import { AutowithdrawSettings } from '@/components/autowithdraw-shared'
|
||||
import { autowithdrawInitial, AutowithdrawSettings } from '@/components/autowithdraw-shared'
|
||||
import { canSend, isConfigured } from '@/wallets/common'
|
||||
import { SSR } from '@/lib/constants'
|
||||
import WalletButtonBar from '@/components/wallet-buttonbar'
|
||||
import { useWalletConfigurator } from '@/wallets/config'
|
||||
import { useMemo } from 'react'
|
||||
import { useMe } from '@/components/me'
|
||||
|
||||
export const getServerSideProps = getGetServerSideProps({ authRequired: true })
|
||||
|
||||
|
@ -21,19 +23,29 @@ export default function WalletSettings () {
|
|||
const router = useRouter()
|
||||
const { wallet: name } = router.query
|
||||
const wallet = useWallet(name)
|
||||
const { me } = useMe()
|
||||
const { save, detach } = useWalletConfigurator(wallet)
|
||||
|
||||
const initial = wallet?.def.fields.reduce((acc, field) => {
|
||||
// We still need to run over all wallet fields via reduce
|
||||
// even though we use wallet.config as the initial value
|
||||
// since wallet.config is empty when wallet is not configured.
|
||||
// Also, wallet.config includes general fields like
|
||||
// 'enabled' and 'priority' which are not defined in wallet.fields.
|
||||
return {
|
||||
...acc,
|
||||
[field.name]: wallet?.config?.[field.name] || ''
|
||||
const initial = useMemo(() => {
|
||||
const initial = wallet?.def.fields.reduce((acc, field) => {
|
||||
// We still need to run over all wallet fields via reduce
|
||||
// even though we use wallet.config as the initial value
|
||||
// since wallet.config is empty when wallet is not configured.
|
||||
// Also, wallet.config includes general fields like
|
||||
// 'enabled' and 'priority' which are not defined in wallet.fields.
|
||||
return {
|
||||
...acc,
|
||||
[field.name]: wallet?.config?.[field.name] || ''
|
||||
}
|
||||
}, wallet?.config)
|
||||
if (wallet?.def.clientOnly) {
|
||||
return initial
|
||||
}
|
||||
}, wallet?.config)
|
||||
return {
|
||||
...initial,
|
||||
...autowithdrawInitial({ me })
|
||||
}
|
||||
}, [wallet, me])
|
||||
|
||||
// check if wallet uses the form-level validation built into Formik or a Yup schema
|
||||
const validateProps = typeof wallet?.fieldValidation === 'function'
|
||||
|
|
|
@ -12,7 +12,8 @@ export const fields = [
|
|||
type: 'text',
|
||||
placeholder: '55.5.555.55:3010',
|
||||
hint: 'tor or clearnet',
|
||||
clear: true
|
||||
clear: true,
|
||||
serverOnly: true
|
||||
},
|
||||
{
|
||||
name: 'rune',
|
||||
|
@ -23,7 +24,8 @@ export const fields = [
|
|||
type: 'text',
|
||||
placeholder: 'S34KtUW-6gqS_hD_9cc_PNhfF-NinZyBOCgr1aIrark9NCZtZXRob2Q9aW52b2ljZQ==',
|
||||
hint: 'must be restricted to method=invoice',
|
||||
clear: true
|
||||
clear: true,
|
||||
serverOnly: true
|
||||
},
|
||||
{
|
||||
name: 'cert',
|
||||
|
@ -32,7 +34,8 @@ export const fields = [
|
|||
placeholder: 'LS0tLS1CRUdJTiBDRVJUSUZJQ0FURS0tLS0tCk1JSUNNVENDQWRpZ0F3SUJBZ0lRSHBFdFdrcGJwZHV4RVF2eVBPc3NWVEFLQmdncWhrak9QUVFEQWpBdk1SOHcKSFFZRFZRUUtFeFpzYm1RZ1lYVjBiMmRsYm1WeVlYUmxaQ0JqWlhKME1Rd3dDZ1lEVlFRREV3TmliMkl3SGhjTgpNalF3TVRBM01qQXhORE0wV2hjTk1qVXdNekF6TWpBeE5ETTBXakF2TVI4d0hRWURWUVFLRXhac2JtUWdZWFYwCmIyZGxibVZ5WVhSbFpDQmpaWEowTVF3d0NnWURWUVFERXdOaWIySXdXVEFUQmdjcWhrak9QUUlCQmdncWhrak8KUFFNQkJ3TkNBQVJUS3NMVk5oZnhqb1FLVDlkVVdDbzUzSmQwTnBuL1BtYi9LUE02M1JxbU52dFYvdFk4NjJJZwpSbE41cmNHRnBEajhUeFc2OVhIK0pTcHpjWDdlN3N0Um80SFZNSUhTTUE0R0ExVWREd0VCL3dRRUF3SUNwREFUCkJnTlZIU1VFRERBS0JnZ3JCZ0VGQlFjREFUQVBCZ05WSFJNQkFmOEVCVEFEQVFIL01CMEdBMVVkRGdRV0JCVDAKMnh3V25GeHRUNzI0MWxwZlNoNm9FWi9UMWpCN0JnTlZIUkVFZERCeWdnTmliMktDQ1d4dlkyRnNhRzl6ZElJRApZbTlpZ2d4d2IyeGhjaTF1TVMxaWIyS0NGR2h2YzNRdVpHOWphMlZ5TG1sdWRHVnlibUZzZ2dSMWJtbDRnZ3AxCmJtbDRjR0ZqYTJWMGdnZGlkV1pqYjI1dWh3Ui9BQUFCaHhBQUFBQUFBQUFBQUFBQUFBQUFBQUFCaHdTc0VnQUQKTUFvR0NDcUdTTTQ5QkFNQ0EwY0FNRVFDSUEwUTlkRXdoNXpPRnpwL3hYeHNpemh5SkxNVG5yazU1VWx1NHJPRwo4WW52QWlBVGt4U3p3Y3hZZnFscGx0UlNIbmd0NUJFcDBzcXlHL05nenBzb2pmMGNqQT09Ci0tLS0tRU5EIENFUlRJRklDQVRFLS0tLS0K',
|
||||
optional: 'optional if from [CA](https://en.wikipedia.org/wiki/Certificate_authority) (e.g. voltage)',
|
||||
hint: 'hex or base64 encoded',
|
||||
clear: true
|
||||
clear: true,
|
||||
serverOnly: true
|
||||
}
|
||||
]
|
||||
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
import { useMe } from '@/components/me'
|
||||
import useVault from '@/components/vault/use-vault'
|
||||
import { useCallback } from 'react'
|
||||
import { canReceive, canSend, getStorageKey, isClientField, isServerField } from './common'
|
||||
import { canReceive, canSend, getStorageKey } from './common'
|
||||
import { useMutation } from '@apollo/client'
|
||||
import { generateMutation } from './graphql'
|
||||
import { REMOVE_WALLET } from '@/fragments/wallet'
|
||||
|
@ -17,63 +17,71 @@ export function useWalletConfigurator (wallet) {
|
|||
const [upsertWallet] = useMutation(generateMutation(wallet?.def))
|
||||
const [removeWallet] = useMutation(REMOVE_WALLET)
|
||||
|
||||
const _saveToServer = useCallback(async (serverConfig, clientConfig) => {
|
||||
const _saveToServer = useCallback(async (serverConfig, clientConfig, validateLightning) => {
|
||||
const { serverWithShared, settings, clientOnly } = siftConfig(wallet.def.fields, { ...serverConfig, ...clientConfig })
|
||||
const vaultEntries = []
|
||||
if (clientConfig) {
|
||||
for (const [key, value] of Object.entries(clientConfig)) {
|
||||
if (clientOnly) {
|
||||
for (const [key, value] of Object.entries(clientOnly)) {
|
||||
vaultEntries.push({ key, value: encrypt(value) })
|
||||
}
|
||||
}
|
||||
await upsertWallet({ variables: { ...serverConfig, vaultEntries } })
|
||||
}, [encrypt, isActive])
|
||||
await upsertWallet({ variables: { ...serverWithShared, settings, validateLightning, vaultEntries } })
|
||||
}, [encrypt, isActive, wallet.def.fields])
|
||||
|
||||
const _saveToLocal = useCallback(async (newConfig) => {
|
||||
window.localStorage.setItem(getStorageKey(wallet.def.name, me?.id), JSON.stringify(newConfig))
|
||||
reloadLocalWallets()
|
||||
}, [me?.id, wallet.def.name, reloadLocalWallets])
|
||||
|
||||
const save = useCallback(async (newConfig, validate = true) => {
|
||||
let clientConfig = extractClientConfig(wallet.def.fields, newConfig)
|
||||
let serverConfig = extractServerConfig(wallet.def.fields, newConfig)
|
||||
const _validate = useCallback(async (config, validateLightning = true) => {
|
||||
const { serverWithShared, clientWithShared } = siftConfig(wallet.def.fields, config)
|
||||
console.log('sifted', siftConfig(wallet.def.fields, config))
|
||||
|
||||
if (validate) {
|
||||
if (canSend(wallet)) {
|
||||
let transformedConfig = await walletValidate(wallet, clientConfig)
|
||||
let clientConfig = clientWithShared
|
||||
let serverConfig = serverWithShared
|
||||
|
||||
if (canSend(wallet)) {
|
||||
let transformedConfig = await walletValidate(wallet, clientWithShared)
|
||||
if (transformedConfig) {
|
||||
clientConfig = Object.assign(clientConfig, transformedConfig)
|
||||
}
|
||||
if (wallet.def.testSendPayment && validateLightning) {
|
||||
transformedConfig = await wallet.def.testSendPayment(clientConfig, { me, logger })
|
||||
if (transformedConfig) {
|
||||
clientConfig = Object.assign(clientConfig, transformedConfig)
|
||||
}
|
||||
if (wallet.def.testSendPayment) {
|
||||
transformedConfig = await wallet.def.testSendPayment(clientConfig, { me, logger })
|
||||
if (transformedConfig) {
|
||||
clientConfig = Object.assign(clientConfig, transformedConfig)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (canReceive(wallet)) {
|
||||
const transformedConfig = await walletValidate(wallet, serverConfig)
|
||||
if (transformedConfig) {
|
||||
serverConfig = Object.assign(serverConfig, transformedConfig)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (canReceive(wallet)) {
|
||||
const transformedConfig = await walletValidate(wallet, serverConfig)
|
||||
if (transformedConfig) {
|
||||
serverConfig = Object.assign(serverConfig, transformedConfig)
|
||||
}
|
||||
}
|
||||
|
||||
return { clientConfig, serverConfig }
|
||||
}, [wallet])
|
||||
|
||||
const save = useCallback(async (newConfig, validateLightning = true) => {
|
||||
const { clientConfig, serverConfig } = _validate(newConfig, validateLightning)
|
||||
|
||||
// if vault is active, encrypt and send to server regardless of wallet type
|
||||
if (isActive) {
|
||||
await _saveToServer(serverConfig, clientConfig)
|
||||
await _saveToServer(serverConfig, clientConfig, validateLightning)
|
||||
} else {
|
||||
if (canSend(wallet)) {
|
||||
await _saveToLocal(clientConfig)
|
||||
}
|
||||
if (canReceive(wallet)) {
|
||||
await _saveToServer(serverConfig)
|
||||
await _saveToServer(serverConfig, clientConfig, validateLightning)
|
||||
}
|
||||
}
|
||||
}, [wallet, encrypt, isActive])
|
||||
}, [isActive, _saveToServer, _saveToLocal, _validate])
|
||||
|
||||
const _detachFromServer = useCallback(async () => {
|
||||
await removeWallet({ variables: { id: wallet.config.id } })
|
||||
}, [wallet.config.id])
|
||||
}, [wallet.config?.id])
|
||||
|
||||
const _detachFromLocal = useCallback(async () => {
|
||||
// if vault is not active and has a client config, delete from local storage
|
||||
|
@ -95,30 +103,45 @@ export function useWalletConfigurator (wallet) {
|
|||
return { save, detach }
|
||||
}
|
||||
|
||||
function extractConfig (fields, config, client, includeMeta = true) {
|
||||
return Object.entries(config).reduce((acc, [key, value]) => {
|
||||
function siftConfig (fields, config) {
|
||||
const sifted = {
|
||||
clientOnly: {},
|
||||
serverOnly: {},
|
||||
shared: {},
|
||||
serverWithShared: {},
|
||||
clientWithShared: {},
|
||||
settings: {}
|
||||
}
|
||||
|
||||
for (const [key, value] of Object.entries(config)) {
|
||||
if (['id'].includes(key)) {
|
||||
sifted.serverOnly[key] = value
|
||||
continue
|
||||
}
|
||||
|
||||
if (['autoWithdrawMaxFeePercent', 'autoWithdrawThreshold', 'autoWithdrawMaxFeeTotal'].includes(key)) {
|
||||
sifted.serverOnly[key] = value
|
||||
sifted.settings[key] = value
|
||||
continue
|
||||
}
|
||||
|
||||
const field = fields.find(({ name }) => name === key)
|
||||
|
||||
// filter server config which isn't specified as wallet fields
|
||||
// (we allow autowithdraw members to pass validation)
|
||||
if (client && key === 'id') return acc
|
||||
|
||||
// field might not exist because config.enabled doesn't map to a wallet field
|
||||
if ((!field && includeMeta) || (field && (client ? isClientField(field) : isServerField(field)))) {
|
||||
return {
|
||||
...acc,
|
||||
[key]: value
|
||||
if (field) {
|
||||
if (field.serverOnly) {
|
||||
sifted.serverOnly[key] = value
|
||||
} else if (field.clientOnly) {
|
||||
sifted.clientOnly[key] = value
|
||||
} else {
|
||||
sifted.shared[key] = value
|
||||
}
|
||||
} else {
|
||||
return acc
|
||||
sifted.shared[key] = value
|
||||
}
|
||||
}, {})
|
||||
}
|
||||
}
|
||||
|
||||
function extractClientConfig (fields, config) {
|
||||
return extractConfig(fields, config, true, true)
|
||||
}
|
||||
sifted.serverWithShared = { ...sifted.shared, ...sifted.serverOnly }
|
||||
sifted.clientWithShared = { ...sifted.shared, ...sifted.clientOnly }
|
||||
|
||||
function extractServerConfig (fields, config) {
|
||||
return extractConfig(fields, config, false, true)
|
||||
return sifted
|
||||
}
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
import gql from 'graphql-tag'
|
||||
import { isServerField } from './common'
|
||||
import { WALLET_FIELDS } from '@/fragments/wallet'
|
||||
|
||||
export function fieldToGqlArg (field) {
|
||||
let arg = `${field.name}: String`
|
||||
|
@ -30,30 +31,20 @@ export function generateMutation (wallet) {
|
|||
let headerArgs = '$id: ID, '
|
||||
headerArgs += wallet.fields
|
||||
.filter(isServerField)
|
||||
.map(f => {
|
||||
const arg = `$${f.name}: String`
|
||||
// required fields are checked server-side
|
||||
// if (!f.optional) {
|
||||
// arg += '!'
|
||||
// }
|
||||
return arg
|
||||
}).join(', ')
|
||||
headerArgs += ', $settings: AutowithdrawSettings!, $priorityOnly: Boolean, $canSend: Boolean!, $canReceive: Boolean!'
|
||||
.map(f => `$${f.name}: String`)
|
||||
.join(', ')
|
||||
headerArgs += ', $settings: AutowithdrawSettings!, $validateLightning: Boolean'
|
||||
|
||||
let inputArgs = 'id: $id, '
|
||||
inputArgs += wallet.fields
|
||||
.filter(isServerField)
|
||||
.map(f => `${f.name}: $${f.name}`).join(', ')
|
||||
inputArgs += ', settings: $settings, priorityOnly: $priorityOnly, canSend: $canSend, canReceive: $canReceive,'
|
||||
inputArgs += ', settings: $settings, validateLightning: $validateLightning,'
|
||||
|
||||
return gql`mutation ${resolverName}(${headerArgs}) {
|
||||
${WALLET_FIELDS}
|
||||
${resolverName}(${inputArgs}) {
|
||||
id,
|
||||
type,
|
||||
enabled,
|
||||
priority,
|
||||
canReceive,
|
||||
canSend
|
||||
...WalletFields
|
||||
}
|
||||
}`
|
||||
}
|
||||
|
|
|
@ -11,7 +11,8 @@ export const fields = [
|
|||
name: 'address',
|
||||
label: 'lightning address',
|
||||
type: 'text',
|
||||
autoComplete: 'off'
|
||||
autoComplete: 'off',
|
||||
serverOnly: true
|
||||
}
|
||||
]
|
||||
|
||||
|
|
|
@ -12,25 +12,29 @@ export const fields = [
|
|||
label: 'pairing phrase',
|
||||
type: 'password',
|
||||
help: 'We only need permissions for the uri `/lnrpc.Lightning/SendPaymentSync`\n\nCreate a budgeted account with narrow permissions:\n\n```$ litcli accounts create --balance <budget>```\n\n```$ litcli sessions add --type custom --label <your label> --account_id <account_id> --uri /lnrpc.Lightning/SendPaymentSync```\n\nGrab the `pairing_secret_mnemonic` from the output and paste it here.',
|
||||
editable: false
|
||||
editable: false,
|
||||
clientOnly: true
|
||||
},
|
||||
{
|
||||
name: 'localKey',
|
||||
type: 'text',
|
||||
optional: true,
|
||||
hidden: true
|
||||
hidden: true,
|
||||
clientOnly: true
|
||||
},
|
||||
{
|
||||
name: 'remoteKey',
|
||||
type: 'text',
|
||||
optional: true,
|
||||
hidden: true
|
||||
hidden: true,
|
||||
clientOnly: true
|
||||
},
|
||||
{
|
||||
name: 'serverHost',
|
||||
type: 'text',
|
||||
optional: true,
|
||||
hidden: true
|
||||
hidden: true,
|
||||
clientOnly: true
|
||||
}
|
||||
]
|
||||
|
||||
|
|
|
@ -12,7 +12,8 @@ export const fields = [
|
|||
type: 'text',
|
||||
placeholder: '55.5.555.55:10001',
|
||||
hint: 'tor or clearnet',
|
||||
clear: true
|
||||
clear: true,
|
||||
serverOnly: true
|
||||
},
|
||||
{
|
||||
name: 'macaroon',
|
||||
|
@ -24,7 +25,8 @@ export const fields = [
|
|||
type: 'text',
|
||||
placeholder: 'AgEDbG5kAlgDChCn7YgfWX7uTkQQgXZ2uahNEgEwGhYKB2FkZHJlc3MSBHJlYWQSBXdyaXRlGhcKCGludm9pY2VzEgRyZWFkEgV3cml0ZRoPCgdvbmNoYWluEgRyZWFkAAAGIJkMBrrDV0npU90JV0TGNJPrqUD8m2QYoTDjolaL6eBs',
|
||||
hint: 'hex or base64 encoded',
|
||||
clear: true
|
||||
clear: true,
|
||||
serverOnly: true
|
||||
},
|
||||
{
|
||||
name: 'cert',
|
||||
|
@ -33,7 +35,8 @@ export const fields = [
|
|||
placeholder: 'LS0tLS1CRUdJTiBDRVJUSUZJQ0FURS0tLS0tCk1JSUNNVENDQWRpZ0F3SUJBZ0lRSHBFdFdrcGJwZHV4RVF2eVBPc3NWVEFLQmdncWhrak9QUVFEQWpBdk1SOHcKSFFZRFZRUUtFeFpzYm1RZ1lYVjBiMmRsYm1WeVlYUmxaQ0JqWlhKME1Rd3dDZ1lEVlFRREV3TmliMkl3SGhjTgpNalF3TVRBM01qQXhORE0wV2hjTk1qVXdNekF6TWpBeE5ETTBXakF2TVI4d0hRWURWUVFLRXhac2JtUWdZWFYwCmIyZGxibVZ5WVhSbFpDQmpaWEowTVF3d0NnWURWUVFERXdOaWIySXdXVEFUQmdjcWhrak9QUUlCQmdncWhrak8KUFFNQkJ3TkNBQVJUS3NMVk5oZnhqb1FLVDlkVVdDbzUzSmQwTnBuL1BtYi9LUE02M1JxbU52dFYvdFk4NjJJZwpSbE41cmNHRnBEajhUeFc2OVhIK0pTcHpjWDdlN3N0Um80SFZNSUhTTUE0R0ExVWREd0VCL3dRRUF3SUNwREFUCkJnTlZIU1VFRERBS0JnZ3JCZ0VGQlFjREFUQVBCZ05WSFJNQkFmOEVCVEFEQVFIL01CMEdBMVVkRGdRV0JCVDAKMnh3V25GeHRUNzI0MWxwZlNoNm9FWi9UMWpCN0JnTlZIUkVFZERCeWdnTmliMktDQ1d4dlkyRnNhRzl6ZElJRApZbTlpZ2d4d2IyeGhjaTF1TVMxaWIyS0NGR2h2YzNRdVpHOWphMlZ5TG1sdWRHVnlibUZzZ2dSMWJtbDRnZ3AxCmJtbDRjR0ZqYTJWMGdnZGlkV1pqYjI1dWh3Ui9BQUFCaHhBQUFBQUFBQUFBQUFBQUFBQUFBQUFCaHdTc0VnQUQKTUFvR0NDcUdTTTQ5QkFNQ0EwY0FNRVFDSUEwUTlkRXdoNXpPRnpwL3hYeHNpemh5SkxNVG5yazU1VWx1NHJPRwo4WW52QWlBVGt4U3p3Y3hZZnFscGx0UlNIbmd0NUJFcDBzcXlHL05nenBzb2pmMGNqQT09Ci0tLS0tRU5EIENFUlRJRklDQVRFLS0tLS0K',
|
||||
optional: 'optional if from [CA](https://en.wikipedia.org/wiki/Certificate_authority) (e.g. voltage)',
|
||||
hint: 'hex or base64 encoded',
|
||||
clear: true
|
||||
clear: true,
|
||||
serverOnly: true
|
||||
}
|
||||
]
|
||||
|
||||
|
|
Loading…
Reference in New Issue