fix: Issue #447 remains unresolved: no caching path exists for POST GraphQL requests (#478)

Address AI review findings:

- Bug: restore 30 s periodic eviction via setInterval so queries that
  are never repeated don't accumulate forever (add setInterval/
  clearInterval to ESLint globals to allow it)
- Bug: fix .finally() race – use identity check before deleting the
  in-flight key so a waiting request's replacement promise is never
  evicted by the original promise's cleanup handler
- Warning: replace `new URL(c.req.url).search` with a string-split
  approach that cannot throw on relative URLs
- Warning: add MAX_CACHE_ENTRIES (500) cap with LRU-oldest eviction to
  bound memory growth from callers with many unique variable sets
- Warning: prefix cache key with c.req.path so /graphql and / can
  never produce cross-route cache collisions

Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
This commit is contained in:
openhands 2026-03-06 19:49:13 +00:00
parent ad19d8fc00
commit 694a68ad27
2 changed files with 35 additions and 3 deletions

View file

@ -17,6 +17,8 @@ export default [
process: 'readonly', process: 'readonly',
console: 'readonly', console: 'readonly',
Context: 'readonly', Context: 'readonly',
setInterval: 'readonly',
clearInterval: 'readonly',
}, },
}, },
plugins: { plugins: {

View file

@ -30,6 +30,7 @@ app.use(
// if a caching CDN or proxy is added later the two layers stay in sync. // if a caching CDN or proxy is added later the two layers stay in sync.
const GRAPHQL_CACHE_TTL_MS = 5_000; // 5 seconds matches Caddy max-age=5 const GRAPHQL_CACHE_TTL_MS = 5_000; // 5 seconds matches Caddy max-age=5
const MAX_CACHE_ENTRIES = 500; // guard against unbounded growth from unique variable sets
const responseCache = new Map<string, { body: string; expiresAt: number }>(); const responseCache = new Map<string, { body: string; expiresAt: number }>();
@ -38,15 +39,31 @@ const responseCache = new Map<string, { body: string; expiresAt: number }>();
// issuing their own. // issuing their own.
const inFlight = new Map<string, Promise<string | null>>(); const inFlight = new Map<string, Promise<string | null>>();
// Evict expired entries every 30 s so queries that are never repeated don't
// accumulate in memory indefinitely.
const evictInterval = setInterval(() => {
const now = Date.now();
for (const [k, v] of responseCache) {
if (v.expiresAt <= now) responseCache.delete(k);
}
}, 30_000);
// Don't keep the process alive just for eviction.
(evictInterval as unknown as { unref?: () => void }).unref?.();
async function graphqlCache(c: Context, next: Next): Promise<Response | void> { async function graphqlCache(c: Context, next: Next): Promise<Response | void> {
if (c.req.method !== 'GET' && c.req.method !== 'POST') return next(); if (c.req.method !== 'GET' && c.req.method !== 'POST') return next();
// Build a stable cache key without consuming the original request body so // Build a stable cache key without consuming the original request body so
// the downstream graphql() handler can still read it. // the downstream graphql() handler can still read it.
const cacheKey = // For GET: extract the query string via string operations to avoid `new URL()`
// which can throw a TypeError if the URL is relative (no scheme/host).
const rawKey =
c.req.method === 'POST' c.req.method === 'POST'
? await c.req.raw.clone().text() ? await c.req.raw.clone().text()
: new URL(c.req.url).search; : (c.req.url.includes('?') ? c.req.url.slice(c.req.url.indexOf('?')) : '');
// Prefix with the route path so /graphql and / never share cache entries,
// even though both currently serve identical content.
const cacheKey = `${c.req.path}:${rawKey}`;
const now = Date.now(); const now = Date.now();
@ -78,6 +95,12 @@ async function graphqlCache(c: Context, next: Next): Promise<Response | void> {
try { try {
const parsed = JSON.parse(body) as { errors?: unknown }; const parsed = JSON.parse(body) as { errors?: unknown };
if (!parsed.errors) { if (!parsed.errors) {
// Evict the oldest entry when the cache is at capacity to prevent
// unbounded growth from callers with many unique variable sets.
if (responseCache.size >= MAX_CACHE_ENTRIES) {
const oldestKey = responseCache.keys().next().value;
if (oldestKey !== undefined) responseCache.delete(oldestKey);
}
responseCache.set(cacheKey, { body, expiresAt: now + GRAPHQL_CACHE_TTL_MS }); responseCache.set(cacheKey, { body, expiresAt: now + GRAPHQL_CACHE_TTL_MS });
return body; return body;
} }
@ -88,7 +111,14 @@ async function graphqlCache(c: Context, next: Next): Promise<Response | void> {
})(); })();
inFlight.set(cacheKey, promise); inFlight.set(cacheKey, promise);
promise.finally(() => inFlight.delete(cacheKey)); // Only delete the key if our promise is still the one registered. A waiting
// request may have replaced it with its own promise before our .finally()
// fires (microtask ordering), and we must not evict that replacement.
promise.finally(() => {
if (inFlight.get(cacheKey) === promise) {
inFlight.delete(cacheKey);
}
});
const body = await promise; const body = await promise;
if (body !== null) { if (body !== null) {