stacker.news/lib/fetch.js

188 lines
5.3 KiB
JavaScript
Raw Normal View History

export async function fetchWithTimeout (resource, { timeout = 1000, ...options } = {}) {
const controller = new AbortController()
const id = setTimeout(() => controller.abort(), timeout)
const response = await fetch(resource, {
...options,
signal: controller.signal
})
clearTimeout(id)
return response
}
class LRUCache {
constructor (maxSize = 100) {
this.maxSize = maxSize
this.cache = new Map()
}
get (key) {
if (!this.cache.has(key)) return undefined
const value = this.cache.get(key)
// refresh the entry
this.cache.delete(key)
this.cache.set(key, value)
return value
}
2024-10-16 16:25:31 +00:00
delete (key) {
this.cache.delete(key)
}
set (key, value) {
if (this.cache.has(key)) this.cache.delete(key)
else if (this.cache.size >= this.maxSize) {
// Remove the least recently used item
this.cache.delete(this.cache.keys().next().value)
}
this.cache.set(key, value)
}
}
2024-10-09 00:26:29 +00:00
function createDebugLogger (name, cache, debug) {
const noop = () => {}
if (!debug) {
return {
log: noop,
errorLog: noop,
startPeriodicLogging: noop,
stopPeriodicLogging: noop,
incrementTotalFetches: noop,
incrementCacheHits: noop,
incrementCacheMisses: noop,
incrementBackgroundRefreshes: noop
}
2024-10-08 22:58:15 +00:00
}
let totalFetches = 0
let cacheMisses = 0
let cacheHits = 0
let backgroundRefreshes = 0
2024-10-09 00:26:29 +00:00
let intervalId = null
2024-10-08 22:58:15 +00:00
2024-10-09 00:26:29 +00:00
const log = (message) => console.log(`[CACHE:${name}] ${message}`)
const errorLog = (message, error) => console.error(`[CACHE:${name}] ${message}`, error)
2024-10-09 00:26:29 +00:00
function estimateCacheSize () {
let size = 0
for (const [key, value] of cache.cache) {
size += key.length * 2
size += JSON.stringify(value).length * 2
}
return size
}
function startPeriodicLogging () {
if (intervalId) return // Prevent multiple intervals
intervalId = setInterval(() => {
const cacheSize = cache.cache.size
const memorySizeBytes = estimateCacheSize()
log(`Stats: total=${totalFetches}, hits=${cacheHits}, misses=${cacheMisses}, backgroundRefreshes=${backgroundRefreshes}, cacheSize=${cacheSize}, memoryFootprint=${memorySizeBytes} bytes`)
}, 60000)
}
function stopPeriodicLogging () {
if (intervalId) {
clearInterval(intervalId)
intervalId = null
}
}
return {
log,
errorLog,
startPeriodicLogging,
stopPeriodicLogging,
incrementTotalFetches: () => totalFetches++,
incrementCacheHits: () => cacheHits++,
incrementCacheMisses: () => cacheMisses++,
incrementBackgroundRefreshes: () => backgroundRefreshes++
}
}
2024-10-10 14:35:39 +00:00
export function cachedFetcher (fetcher, {
maxSize = 100, cacheExpiry, forceRefreshThreshold,
keyGenerator, debug = process.env.DEBUG_CACHED_FETCHER
}) {
2024-10-09 00:26:29 +00:00
const cache = new LRUCache(maxSize)
const name = fetcher.name || fetcher.toString().slice(0, 20).replace(/\s+/g, '_')
const logger = createDebugLogger(name, cache, debug)
logger.log(`initializing with maxSize=${maxSize}, cacheExpiry=${cacheExpiry}, forceRefreshThreshold=${forceRefreshThreshold}`)
logger.startPeriodicLogging()
if (!keyGenerator) {
throw new Error('keyGenerator is required')
}
const cachedFetch = async function (...args) {
2024-10-08 22:58:15 +00:00
const key = keyGenerator(...args)
const now = Date.now()
2024-10-09 00:26:29 +00:00
logger.incrementTotalFetches()
async function fetchAndCache () {
2024-10-09 00:26:29 +00:00
logger.log(`Fetching data for key: ${key}`)
const result = await fetcher(...args)
cache.set(key, { data: result, createdAt: now })
2024-10-09 00:26:29 +00:00
logger.log(`Data fetched and cached for key: ${key}`)
return result
}
const cached = cache.get(key)
if (cached) {
const age = now - cached.createdAt
if (cacheExpiry === 0 || age < cacheExpiry) {
2024-10-09 00:26:29 +00:00
logger.incrementCacheHits()
logger.log(`Cache hit for key: ${key}, age: ${age}ms`)
return cached.data
} else if (forceRefreshThreshold === 0 || age < forceRefreshThreshold) {
2024-10-09 00:26:29 +00:00
if (cached.pendingPromise) {
logger.log(`Already background refreshing key: ${key}`)
return cached.data
}
logger.incrementBackgroundRefreshes()
logger.log(`Background refresh for key: ${key}, age: ${age}ms`)
cached.pendingPromise = fetchAndCache().catch(error => {
logger.errorLog(`Background refresh failed for key: ${key}`, error)
return cached.data
}).finally(() => {
logger.log(`Background refresh completed for key: ${key}`)
2024-10-08 22:58:15 +00:00
delete cached.pendingPromise
})
return cached.data
}
2024-10-09 00:26:29 +00:00
if (cached.pendingPromise) {
logger.log(`Waiting for pending force refresh for key: ${key}`)
return await cached.pendingPromise
}
}
2024-10-09 00:26:29 +00:00
logger.incrementCacheMisses()
logger.log(`Cache miss for key: ${key}`)
2024-10-08 22:58:15 +00:00
const entry = { createdAt: now, pendingPromise: fetchAndCache() }
cache.set(key, entry)
try {
entry.data = await entry.pendingPromise
return entry.data
} catch (error) {
2024-10-09 00:26:29 +00:00
logger.errorLog(`Error fetching data for key: ${key}`, error)
2024-10-08 22:58:15 +00:00
cache.delete(key)
throw error
} finally {
2024-10-09 00:26:29 +00:00
logger.log(`Fetch completed for key: ${key}`)
2024-10-08 22:58:15 +00:00
delete entry.pendingPromise
}
}
2024-10-09 00:26:29 +00:00
// Attach the stopPeriodicLogging method to the returned function
cachedFetch.stopPeriodicLogging = logger.stopPeriodicLogging
return cachedFetch
}