stacker.news/lib/fetch.js
ekzyis 72e2d19433
supercharged wallet logs (#1516)
* Inject wallet logger interface

* Include method in NWC logs

* Fix wrong page total

* Poll for new logs every second

* Fix overlapping pagination

* Remove unused total

* Better logs for incoming payments

* Use _setLogs instead of wrapper

* Remove inconsistent receive log

* Remove console.log from wallet logger on server

* Fix missing 'wallet detached' log

* Fix confirm_withdrawl code

* Remove duplicate autowithdrawal log

* Add context to log

* Add more context

* Better table styling

* Move CSS for wallet logs into one file

* remove unused logNav class
* rename classes

* Align key with second column

* Fix TypeError if context empty

* Check content-type header before calling res.json()

* Fix duplicate 'failed to create invoice'

* Parse details from LND error

* Fix invalid DOM property 'colspan'

* P2P zap logs with context

* Remove unnecessary withdrawal error log

* the code assignment was broken anyway
* we already log withdrawal errors using .catch on payViaPaymentRequest

* Don't show outgoing fee to receiver to avoid confusion

* Fix typo in comment

* Log if invoice was canceled by payer

* Automatically populate context from bolt11

* Fix missing context

* Fix wrap errors not logged

* Only log cancel if client canceled

* Remove unused imports

* Log withdrawal/forward success/error in payment flow

* Fix boss not passed to checkInvoice

* Fix TypeError

* Fix database timeouts caused by logger

The logger shares the same connection pool with any currently running transaction.

This means that we enter a classic deadlock when we await logger calls: the logger call is waiting for a connection but the currently running transaction is waiting for the logger call to finish before it can release a connection.

* Fix cache returning undefined

* Fix typo in comment

* Add padding-right to key in log context

* Always use 'incoming payment failed:'

---------

Co-authored-by: Keyan <34140557+huumn@users.noreply.github.com>
2024-11-08 13:26:40 -06:00

188 lines
5.3 KiB
JavaScript

export async function fetchWithTimeout (resource, { timeout = 1000, ...options } = {}) {
const controller = new AbortController()
const id = setTimeout(() => controller.abort(), timeout)
const response = await fetch(resource, {
...options,
signal: controller.signal
})
clearTimeout(id)
return response
}
class LRUCache {
constructor (maxSize = 100) {
this.maxSize = maxSize
this.cache = new Map()
}
get (key) {
if (!this.cache.has(key)) return undefined
const value = this.cache.get(key)
// refresh the entry
this.cache.delete(key)
this.cache.set(key, value)
return value
}
delete (key) {
this.cache.delete(key)
}
set (key, value) {
if (this.cache.has(key)) this.cache.delete(key)
else if (this.cache.size >= this.maxSize) {
// Remove the least recently used item
this.cache.delete(this.cache.keys().next().value)
}
this.cache.set(key, value)
}
}
function createDebugLogger (name, cache, debug) {
const noop = () => {}
if (!debug) {
return {
log: noop,
errorLog: noop,
startPeriodicLogging: noop,
stopPeriodicLogging: noop,
incrementTotalFetches: noop,
incrementCacheHits: noop,
incrementCacheMisses: noop,
incrementBackgroundRefreshes: noop
}
}
let totalFetches = 0
let cacheMisses = 0
let cacheHits = 0
let backgroundRefreshes = 0
let intervalId = null
const log = (message) => console.log(`[CACHE:${name}] ${message}`)
const errorLog = (message, error) => console.error(`[CACHE:${name}] ${message}`, error)
function estimateCacheSize () {
let size = 0
for (const [key, value] of cache.cache) {
size += key.length * 2
size += JSON.stringify(value).length * 2
}
return size
}
function startPeriodicLogging () {
if (intervalId) return // Prevent multiple intervals
intervalId = setInterval(() => {
const cacheSize = cache.cache.size
const memorySizeBytes = estimateCacheSize()
log(`Stats: total=${totalFetches}, hits=${cacheHits}, misses=${cacheMisses}, backgroundRefreshes=${backgroundRefreshes}, cacheSize=${cacheSize}, memoryFootprint=${memorySizeBytes} bytes`)
}, 60000)
}
function stopPeriodicLogging () {
if (intervalId) {
clearInterval(intervalId)
intervalId = null
}
}
return {
log,
errorLog,
startPeriodicLogging,
stopPeriodicLogging,
incrementTotalFetches: () => totalFetches++,
incrementCacheHits: () => cacheHits++,
incrementCacheMisses: () => cacheMisses++,
incrementBackgroundRefreshes: () => backgroundRefreshes++
}
}
export function cachedFetcher (fetcher, {
maxSize = 100, cacheExpiry, forceRefreshThreshold,
keyGenerator, debug = process.env.DEBUG_CACHED_FETCHER
}) {
const cache = new LRUCache(maxSize)
const name = fetcher.name || fetcher.toString().slice(0, 20).replace(/\s+/g, '_')
const logger = createDebugLogger(name, cache, debug)
logger.log(`initializing with maxSize=${maxSize}, cacheExpiry=${cacheExpiry}, forceRefreshThreshold=${forceRefreshThreshold}`)
logger.startPeriodicLogging()
if (!keyGenerator) {
throw new Error('keyGenerator is required')
}
const cachedFetch = async function (...args) {
const key = keyGenerator(...args)
const now = Date.now()
logger.incrementTotalFetches()
async function fetchAndCache () {
logger.log(`Fetching data for key: ${key}`)
const result = await fetcher(...args)
cache.set(key, { data: result, createdAt: now })
logger.log(`Data fetched and cached for key: ${key}`)
return result
}
const cached = cache.get(key)
if (cached) {
const age = now - cached.createdAt
if (cacheExpiry === 0 || age < cacheExpiry) {
logger.incrementCacheHits()
logger.log(`Cache hit for key: ${key}, age: ${age}ms`)
return cached.data
} else if (forceRefreshThreshold === 0 || age < forceRefreshThreshold) {
if (cached.pendingPromise) {
logger.log(`Already background refreshing key: ${key}`)
return cached.data
}
logger.incrementBackgroundRefreshes()
logger.log(`Background refresh for key: ${key}, age: ${age}ms`)
cached.pendingPromise = fetchAndCache().catch(error => {
logger.errorLog(`Background refresh failed for key: ${key}`, error)
return cached.data
}).finally(() => {
logger.log(`Background refresh completed for key: ${key}`)
delete cached.pendingPromise
})
return cached.data
}
if (cached.pendingPromise) {
logger.log(`Waiting for pending force refresh for key: ${key}`)
return await cached.pendingPromise
}
}
logger.incrementCacheMisses()
logger.log(`Cache miss for key: ${key}`)
const entry = { createdAt: now, pendingPromise: fetchAndCache() }
try {
entry.data = await entry.pendingPromise
cache.set(key, entry)
return entry.data
} catch (error) {
logger.errorLog(`Error fetching data for key: ${key}`, error)
cache.delete(key)
throw error
} finally {
logger.log(`Fetch completed for key: ${key}`)
delete entry.pendingPromise
}
}
// Attach the stopPeriodicLogging method to the returned function
cachedFetch.stopPeriodicLogging = logger.stopPeriodicLogging
return cachedFetch
}