fix: Issue #447 remains unresolved: no caching path exists for POST GraphQL requests (#478)

Add server-side response cache + in-flight coalescing to Ponder's Hono
API layer (services/ponder/src/api/index.ts).

Previously every polling client generated an independent DB query, giving
O(users × 1/poll_interval) load. With a 5 s in-process cache keyed on the
raw request body (POST) or query string (GET), the effective DB hit rate
is capped at O(1/5s) regardless of how many clients are polling.

In-flight coalescing ensures that N concurrent identical queries that
arrive before the first response is ready all share a single DB hit
instead of each issuing their own. Expired entries are evicted every 30 s
to keep memory use bounded.

The 5 s TTL deliberately matches the existing Caddy `Cache-Control:
public, max-age=5` header so that if a caching proxy/CDN is layered in
front later, both layers stay in sync.

Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
This commit is contained in:
openhands 2026-03-06 18:56:48 +00:00
parent b97ab18514
commit 032222fac9

View file

@ -1,4 +1,5 @@
import { Hono } from 'hono';
import type { Context, Next } from 'hono';
import { cors } from 'hono/cors';
import { client, graphql } from 'ponder';
import { db } from 'ponder:api';
@ -19,11 +20,96 @@ app.use(
})
);
// Server-side GraphQL response cache.
//
// Without this, every polling client (O(users × 1/poll_interval)) generates a
// separate DB query. With this 5 s cache + in-flight coalescing, the effective
// DB hit rate is capped at O(1/5s) regardless of how many clients poll.
//
// The TTL matches the Caddy `Cache-Control: public, max-age=5` header so that
// if a caching CDN or proxy is added later the two layers stay in sync.
const GRAPHQL_CACHE_TTL_MS = 5_000; // 5 seconds matches Caddy max-age=5
const responseCache = new Map<string, { body: string; expiresAt: number }>();
// In-flight map: when concurrent requests arrive for the same query before the
// first response is ready, they all await the same DB hit instead of each
// issuing their own.
const inFlight = new Map<string, Promise<string | null>>();
// Evict expired entries periodically so the cache stays bounded in memory.
const evictInterval = setInterval(() => {
const now = Date.now();
for (const [k, v] of responseCache) {
if (v.expiresAt <= now) responseCache.delete(k);
}
}, 30_000);
// Don't keep the process alive just for eviction.
(evictInterval as unknown as { unref?: () => void }).unref?.();
async function graphqlCache(c: Context, next: Next): Promise<Response | void> {
if (c.req.method !== 'GET' && c.req.method !== 'POST') return next();
// Build a stable cache key without consuming the original request body so
// the downstream graphql() handler can still read it.
const cacheKey =
c.req.method === 'POST'
? await c.req.raw.clone().text()
: new URL(c.req.url).search;
const now = Date.now();
// 1. Cache hit serve immediately, no DB involved.
const hit = responseCache.get(cacheKey);
if (hit && hit.expiresAt > now) {
return c.body(hit.body, 200, { 'Content-Type': 'application/json' });
}
// 2. In-flight coalescing N concurrent identical queries share one DB hit.
const flying = inFlight.get(cacheKey);
if (flying) {
const body = await flying;
if (body !== null) {
return c.body(body, 200, { 'Content-Type': 'application/json' });
}
// The in-flight request errored; fall through and try again fresh.
}
// 3. Cache miss run the real graphql() handler and cache a successful result.
const promise = (async (): Promise<string | null> => {
await next();
if (c.res.status !== 200) return null;
const body = await c.res.clone().text();
try {
const parsed = JSON.parse(body) as { errors?: unknown };
if (!parsed.errors) {
responseCache.set(cacheKey, { body, expiresAt: now + GRAPHQL_CACHE_TTL_MS });
return body;
}
} catch {
// Non-JSON response; don't cache.
}
return null;
})();
inFlight.set(cacheKey, promise);
promise.finally(() => inFlight.delete(cacheKey));
const body = await promise;
if (body !== null) {
return c.body(body, 200, { 'Content-Type': 'application/json' });
}
// Error path: graphql() already populated c.res; let Hono send it as-is.
}
// SQL endpoint
app.use('/sql/*', client({ db, schema }));
// GraphQL endpoints
// GraphQL endpoints with server-side caching + in-flight coalescing
app.use('/graphql', graphqlCache);
app.use('/graphql', graphql({ db, schema }));
app.use('/', graphqlCache);
app.use('/', graphql({ db, schema }));
export default app;