diff --git a/services/ponder/eslint.config.js b/services/ponder/eslint.config.js index 77fb894..e6d4f05 100644 --- a/services/ponder/eslint.config.js +++ b/services/ponder/eslint.config.js @@ -17,6 +17,8 @@ export default [ process: 'readonly', console: 'readonly', Context: 'readonly', + setInterval: 'readonly', + clearInterval: 'readonly', }, }, plugins: { diff --git a/services/ponder/src/api/index.ts b/services/ponder/src/api/index.ts index a4d98eb..725ed5e 100644 --- a/services/ponder/src/api/index.ts +++ b/services/ponder/src/api/index.ts @@ -30,6 +30,7 @@ app.use( // if a caching CDN or proxy is added later the two layers stay in sync. const GRAPHQL_CACHE_TTL_MS = 5_000; // 5 seconds – matches Caddy max-age=5 +const MAX_CACHE_ENTRIES = 500; // guard against unbounded growth from unique variable sets const responseCache = new Map(); @@ -38,15 +39,31 @@ const responseCache = new Map(); // issuing their own. const inFlight = new Map>(); +// Evict expired entries every 30 s so queries that are never repeated don't +// accumulate in memory indefinitely. +const evictInterval = setInterval(() => { + const now = Date.now(); + for (const [k, v] of responseCache) { + if (v.expiresAt <= now) responseCache.delete(k); + } +}, 30_000); +// Don't keep the process alive just for eviction. +(evictInterval as unknown as { unref?: () => void }).unref?.(); + async function graphqlCache(c: Context, next: Next): Promise { if (c.req.method !== 'GET' && c.req.method !== 'POST') return next(); // Build a stable cache key without consuming the original request body so // the downstream graphql() handler can still read it. - const cacheKey = + // For GET: extract the query string via string operations to avoid `new URL()` + // which can throw a TypeError if the URL is relative (no scheme/host). + const rawKey = c.req.method === 'POST' ? await c.req.raw.clone().text() - : new URL(c.req.url).search; + : (c.req.url.includes('?') ? c.req.url.slice(c.req.url.indexOf('?')) : ''); + // Prefix with the route path so /graphql and / never share cache entries, + // even though both currently serve identical content. + const cacheKey = `${c.req.path}:${rawKey}`; const now = Date.now(); @@ -78,6 +95,12 @@ async function graphqlCache(c: Context, next: Next): Promise { try { const parsed = JSON.parse(body) as { errors?: unknown }; if (!parsed.errors) { + // Evict the oldest entry when the cache is at capacity to prevent + // unbounded growth from callers with many unique variable sets. + if (responseCache.size >= MAX_CACHE_ENTRIES) { + const oldestKey = responseCache.keys().next().value; + if (oldestKey !== undefined) responseCache.delete(oldestKey); + } responseCache.set(cacheKey, { body, expiresAt: now + GRAPHQL_CACHE_TTL_MS }); return body; } @@ -88,7 +111,14 @@ async function graphqlCache(c: Context, next: Next): Promise { })(); inFlight.set(cacheKey, promise); - promise.finally(() => inFlight.delete(cacheKey)); + // Only delete the key if our promise is still the one registered. A waiting + // request may have replaced it with its own promise before our .finally() + // fires (microtask ordering), and we must not evict that replacement. + promise.finally(() => { + if (inFlight.get(cacheKey) === promise) { + inFlight.delete(cacheKey); + } + }); const body = await promise; if (body !== null) {