caching is hard
This commit is contained in:
		
							parent
							
								
									449568e3a2
								
							
						
					
					
						commit
						adcb80782b
					
				@ -108,7 +108,7 @@ export function getPaymentFailureStatus (withdrawal) {
 | 
			
		||||
  }
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
export const getBlockHeight = cachedFetcher(async ({ lnd, ...args }) => {
 | 
			
		||||
export const getBlockHeight = cachedFetcher(async function fetchBlockHeight ({ lnd, ...args }) {
 | 
			
		||||
  try {
 | 
			
		||||
    const { current_block_height: height } = await getHeight({ lnd, ...args })
 | 
			
		||||
    return height
 | 
			
		||||
@ -122,7 +122,7 @@ export const getBlockHeight = cachedFetcher(async ({ lnd, ...args }) => {
 | 
			
		||||
  keyGenerator: () => 'getHeight'
 | 
			
		||||
})
 | 
			
		||||
 | 
			
		||||
export const getOurPubkey = cachedFetcher(async ({ lnd, ...args }) => {
 | 
			
		||||
export const getOurPubkey = cachedFetcher(async function fetchOurPubkey ({ lnd, ...args }) {
 | 
			
		||||
  try {
 | 
			
		||||
    const { identity } = await getIdentity({ lnd, ...args })
 | 
			
		||||
    return identity.public_key
 | 
			
		||||
@ -136,7 +136,7 @@ export const getOurPubkey = cachedFetcher(async ({ lnd, ...args }) => {
 | 
			
		||||
  keyGenerator: () => 'getOurPubkey'
 | 
			
		||||
})
 | 
			
		||||
 | 
			
		||||
export const getNodeSockets = cachedFetcher(async ({ lnd, ...args }) => {
 | 
			
		||||
export const getNodeSockets = cachedFetcher(async function fetchNodeSockets ({ lnd, ...args }) {
 | 
			
		||||
  try {
 | 
			
		||||
    return (await getNode({ lnd, is_omitting_channels: true, ...args }))?.sockets
 | 
			
		||||
  } catch (err) {
 | 
			
		||||
 | 
			
		||||
@ -2,7 +2,7 @@ import { isServiceEnabled } from '@/lib/sndev'
 | 
			
		||||
import { cachedFetcher } from '@/lib/fetch'
 | 
			
		||||
import { getHeight } from 'ln-service'
 | 
			
		||||
 | 
			
		||||
const getBlockHeight = cachedFetcher(async ({ lnd }) => {
 | 
			
		||||
const getBlockHeight = cachedFetcher(async function fetchBlockHeight ({ lnd }) {
 | 
			
		||||
  try {
 | 
			
		||||
    const { current_block_height: height } = await getHeight({ lnd })
 | 
			
		||||
    return height
 | 
			
		||||
 | 
			
		||||
@ -1,6 +1,6 @@
 | 
			
		||||
import { cachedFetcher } from '@/lib/fetch'
 | 
			
		||||
 | 
			
		||||
const getChainFeeRate = cachedFetcher(async () => {
 | 
			
		||||
const getChainFeeRate = cachedFetcher(async function fetchChainFeeRate () {
 | 
			
		||||
  const url = 'https://mempool.space/api/v1/fees/recommended'
 | 
			
		||||
  try {
 | 
			
		||||
    const res = await fetch(url)
 | 
			
		||||
 | 
			
		||||
@ -1,7 +1,7 @@
 | 
			
		||||
import { SUPPORTED_CURRENCIES } from '@/lib/currency'
 | 
			
		||||
import { cachedFetcher } from '@/lib/fetch'
 | 
			
		||||
 | 
			
		||||
const getPrice = cachedFetcher(async (fiat = 'USD') => {
 | 
			
		||||
const getPrice = cachedFetcher(async function fetchPrice (fiat = 'USD') {
 | 
			
		||||
  const url = `https://api.coinbase.com/v2/prices/BTC-${fiat}/spot`
 | 
			
		||||
  try {
 | 
			
		||||
    const res = await fetch(url)
 | 
			
		||||
 | 
			
		||||
							
								
								
									
										125
									
								
								lib/fetch.js
									
									
									
									
									
								
							
							
						
						
									
										125
									
								
								lib/fetch.js
									
									
									
									
									
								
							@ -36,76 +36,145 @@ class LRUCache {
 | 
			
		||||
  }
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
export function cachedFetcher (fetcher, { maxSize = 100, cacheExpiry, forceRefreshThreshold, keyGenerator }) {
 | 
			
		||||
  const cache = new LRUCache(maxSize)
 | 
			
		||||
  console.log(`[CACHE] Initializing cache: maxSize=${maxSize}, cacheExpiry=${cacheExpiry}, forceRefreshThreshold=${forceRefreshThreshold}`)
 | 
			
		||||
function createDebugLogger (name, cache, debug) {
 | 
			
		||||
  const noop = () => {}
 | 
			
		||||
 | 
			
		||||
  if (!keyGenerator) {
 | 
			
		||||
    throw new Error('keyGenerator is required')
 | 
			
		||||
  if (!debug) {
 | 
			
		||||
    return {
 | 
			
		||||
      log: noop,
 | 
			
		||||
      errorLog: noop,
 | 
			
		||||
      startPeriodicLogging: noop,
 | 
			
		||||
      stopPeriodicLogging: noop,
 | 
			
		||||
      incrementTotalFetches: noop,
 | 
			
		||||
      incrementCacheHits: noop,
 | 
			
		||||
      incrementCacheMisses: noop,
 | 
			
		||||
      incrementBackgroundRefreshes: noop
 | 
			
		||||
    }
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  let totalFetches = 0
 | 
			
		||||
  let cacheMisses = 0
 | 
			
		||||
  let cacheHits = 0
 | 
			
		||||
  let backgroundRefreshes = 0
 | 
			
		||||
  let intervalId = null
 | 
			
		||||
 | 
			
		||||
  setInterval(() => {
 | 
			
		||||
    console.log(`[CACHE] Stats: total=${totalFetches}, hits=${cacheHits}, misses=${cacheMisses}, backgroundRefreshes=${backgroundRefreshes}, cacheSize=${cache.cache.size}`)
 | 
			
		||||
  }, 60000) // Log stats every minute
 | 
			
		||||
  const log = (message) => console.log(`[CACHE:${name}] ${message}`)
 | 
			
		||||
  const errorLog = (message, error) => console.error(`[CACHE:${name}] ${message}`, error)
 | 
			
		||||
 | 
			
		||||
  return async function cachedFetch (...args) {
 | 
			
		||||
  function estimateCacheSize () {
 | 
			
		||||
    let size = 0
 | 
			
		||||
    for (const [key, value] of cache.cache) {
 | 
			
		||||
      size += key.length * 2
 | 
			
		||||
      size += JSON.stringify(value).length * 2
 | 
			
		||||
    }
 | 
			
		||||
    return size
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  function startPeriodicLogging () {
 | 
			
		||||
    if (intervalId) return // Prevent multiple intervals
 | 
			
		||||
    intervalId = setInterval(() => {
 | 
			
		||||
      const cacheSize = cache.cache.size
 | 
			
		||||
      const memorySizeBytes = estimateCacheSize()
 | 
			
		||||
      log(`Stats: total=${totalFetches}, hits=${cacheHits}, misses=${cacheMisses}, backgroundRefreshes=${backgroundRefreshes}, cacheSize=${cacheSize}, memoryFootprint=${memorySizeBytes} bytes`)
 | 
			
		||||
    }, 60000)
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  function stopPeriodicLogging () {
 | 
			
		||||
    if (intervalId) {
 | 
			
		||||
      clearInterval(intervalId)
 | 
			
		||||
      intervalId = null
 | 
			
		||||
    }
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  return {
 | 
			
		||||
    log,
 | 
			
		||||
    errorLog,
 | 
			
		||||
    startPeriodicLogging,
 | 
			
		||||
    stopPeriodicLogging,
 | 
			
		||||
    incrementTotalFetches: () => totalFetches++,
 | 
			
		||||
    incrementCacheHits: () => cacheHits++,
 | 
			
		||||
    incrementCacheMisses: () => cacheMisses++,
 | 
			
		||||
    incrementBackgroundRefreshes: () => backgroundRefreshes++
 | 
			
		||||
  }
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
export function cachedFetcher (fetcher, { maxSize = 100, cacheExpiry, forceRefreshThreshold, keyGenerator, debug = false }) {
 | 
			
		||||
  const cache = new LRUCache(maxSize)
 | 
			
		||||
  const name = fetcher.name || fetcher.toString().slice(0, 20).replace(/\s+/g, '_')
 | 
			
		||||
  const logger = createDebugLogger(name, cache, debug)
 | 
			
		||||
 | 
			
		||||
  logger.log(`initializing with maxSize=${maxSize}, cacheExpiry=${cacheExpiry}, forceRefreshThreshold=${forceRefreshThreshold}`)
 | 
			
		||||
  logger.startPeriodicLogging()
 | 
			
		||||
 | 
			
		||||
  if (!keyGenerator) {
 | 
			
		||||
    throw new Error('keyGenerator is required')
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  const cachedFetch = async function (...args) {
 | 
			
		||||
    const key = keyGenerator(...args)
 | 
			
		||||
    const now = Date.now()
 | 
			
		||||
    totalFetches++
 | 
			
		||||
    logger.incrementTotalFetches()
 | 
			
		||||
 | 
			
		||||
    async function fetchAndCache () {
 | 
			
		||||
      console.log(`[CACHE] Fetching data for key: ${key}`)
 | 
			
		||||
      logger.log(`Fetching data for key: ${key}`)
 | 
			
		||||
      const result = await fetcher(...args)
 | 
			
		||||
      cache.set(key, { data: result, createdAt: now })
 | 
			
		||||
      console.log(`[CACHE] Data fetched and cached for key: ${key}`)
 | 
			
		||||
      logger.log(`Data fetched and cached for key: ${key}`)
 | 
			
		||||
      return result
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    const cached = cache.get(key)
 | 
			
		||||
 | 
			
		||||
    if (cached) {
 | 
			
		||||
      if (cached.pendingPromise) {
 | 
			
		||||
        console.log(`[CACHE] Waiting for pending promise for key: ${key}`)
 | 
			
		||||
        return await cached.pendingPromise
 | 
			
		||||
      }
 | 
			
		||||
 | 
			
		||||
      const age = now - cached.createdAt
 | 
			
		||||
 | 
			
		||||
      if (cacheExpiry === 0 || age < cacheExpiry) {
 | 
			
		||||
        cacheHits++
 | 
			
		||||
        console.log(`[CACHE] Cache hit for key: ${key}, age: ${age}ms`)
 | 
			
		||||
        logger.incrementCacheHits()
 | 
			
		||||
        logger.log(`Cache hit for key: ${key}, age: ${age}ms`)
 | 
			
		||||
        return cached.data
 | 
			
		||||
      } else if (forceRefreshThreshold === 0 || age < forceRefreshThreshold) {
 | 
			
		||||
        backgroundRefreshes++
 | 
			
		||||
        console.log(`[CACHE] Background refresh for key: ${key}, age: ${age}ms`)
 | 
			
		||||
        cached.pendingPromise = fetchAndCache()
 | 
			
		||||
        cached.pendingPromise.finally(() => {
 | 
			
		||||
          console.log(`[CACHE] Background refresh completed for key: ${key}`)
 | 
			
		||||
        if (cached.pendingPromise) {
 | 
			
		||||
          logger.log(`Already background refreshing key: ${key}`)
 | 
			
		||||
          return cached.data
 | 
			
		||||
        }
 | 
			
		||||
 | 
			
		||||
        logger.incrementBackgroundRefreshes()
 | 
			
		||||
        logger.log(`Background refresh for key: ${key}, age: ${age}ms`)
 | 
			
		||||
        cached.pendingPromise = fetchAndCache().catch(error => {
 | 
			
		||||
          logger.errorLog(`Background refresh failed for key: ${key}`, error)
 | 
			
		||||
          return cached.data
 | 
			
		||||
        }).finally(() => {
 | 
			
		||||
          logger.log(`Background refresh completed for key: ${key}`)
 | 
			
		||||
          delete cached.pendingPromise
 | 
			
		||||
        })
 | 
			
		||||
        return cached.data
 | 
			
		||||
      }
 | 
			
		||||
 | 
			
		||||
      if (cached.pendingPromise) {
 | 
			
		||||
        logger.log(`Waiting for pending force refresh for key: ${key}`)
 | 
			
		||||
        return await cached.pendingPromise
 | 
			
		||||
      }
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    cacheMisses++
 | 
			
		||||
    console.log(`[CACHE] Cache miss for key: ${key}`)
 | 
			
		||||
    logger.incrementCacheMisses()
 | 
			
		||||
    logger.log(`Cache miss for key: ${key}`)
 | 
			
		||||
    const entry = { createdAt: now, pendingPromise: fetchAndCache() }
 | 
			
		||||
    cache.set(key, entry)
 | 
			
		||||
    try {
 | 
			
		||||
      entry.data = await entry.pendingPromise
 | 
			
		||||
      return entry.data
 | 
			
		||||
    } catch (error) {
 | 
			
		||||
      console.error(`[CACHE] Error fetching data for key: ${key}`, error)
 | 
			
		||||
      logger.errorLog(`Error fetching data for key: ${key}`, error)
 | 
			
		||||
      cache.delete(key)
 | 
			
		||||
      throw error
 | 
			
		||||
    } finally {
 | 
			
		||||
      console.log(`[CACHE] Fetch completed for key: ${key}`)
 | 
			
		||||
      logger.log(`Fetch completed for key: ${key}`)
 | 
			
		||||
      delete entry.pendingPromise
 | 
			
		||||
    }
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  // Attach the stopPeriodicLogging method to the returned function
 | 
			
		||||
  cachedFetch.stopPeriodicLogging = logger.stopPeriodicLogging
 | 
			
		||||
 | 
			
		||||
  return cachedFetch
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
		Loading…
	
	
			
			x
			
			
		
	
		Reference in New Issue
	
	Block a user