mirror of
https://github.com/koala73/worldmonitor.git
synced 2026-04-25 17:14:57 +02:00
* fix(cache): align Redis digest + RSS feed TTLs to CF CDN TTL
RSS feed TTL 600s → 3600s; digest TTL 900s → 3600s.
CF CDN caches at 3600s, so Redis expiring earlier caused every hourly
CF revalidation to hit a cold origin and run the full buildDigest()
pipeline (75 feeds, up to 25s). Aligning both to 3600s ensures CF
revalidation gets a warm Redis hit and returns immediately.
* fix(cache): emit only non-ok feedStatuses; update proto comment + make generate
Digest was emitting 'ok' for every successful feed (~50 entries, ~1-2KB
per response). No in-repo client reads feedStatuses values. Changed to
only emit 'empty' and 'timeout'; absent key implies ok.
Updated proto comment to document the absence-implies-ok contract and
ran make generate to regenerate docs/api/ OpenAPI files.
* fix(cache): add slow-browser tier; move digest route to it
New 'slow-browser' tier is identical to 'slow' but adds max-age=300,
letting browsers skip the network for 5 minutes. Without max-age,
browsers ignore s-maxage and send conditional If-None-Match on every
20-min poll — each costing 1 billable edge request even for 304s.
Scoped only to list-feed-digest (a safe polling endpoint). Premium
user-triggered endpoints (analyze-stock, backtest-stock) stay on 'slow'
where browser caching is inappropriate.
* test: regression tests for feedStatuses and slow-browser tier
- digest-no-reclassify: assert buildDigest does not write 'ok' to feedStatuses
- route-cache-tier: include slow-browser in tier regex; assert slow-browser
has max-age and slow tier does not
* fix(cache): add variant to per-feed RSS cache key
rss:feed:v1:${url} was shared across variants even though classifyByKeyword()
bakes variant-specific threat/category labels into the cached ParsedItem[].
Feeds shared between full and tech variants (Verge, Ars, HN, etc.) had
whichever variant populated the cache first control the other variant's
classifications for the full 3600s TTL — turning a pre-existing 10-minute
bleed-through into a 1-hour accuracy bug for the tech dashboard.
Fix: key is now rss:feed:v1:${variant}:${url}.
* fix(cache): bypass browser HTTP cache on digest fetch
max-age=300 on the slow-browser tier lets browsers serve the digest
from their HTTP cache for up to 5 minutes, including on explicit
in-app refresh (window.location.reload) or page reload after a
breaking event. Users would see stale data until the TTL expired.
Add cache: 'no-cache' to tryFetchDigest() so every fetch revalidates
against CF edge. CF returns 304 (minimal cost) when data is unchanged,
or 200 with the current digest. s-maxage and CF-level caching are
unaffected; max-age still benefits browser back/forward cache.
* fix(cache): 15-min consistent TTL + degrade guard for digest
Issue 1 — TTL alignment: Redis digest TTL reverted to 900s (from 3600).
slow-browser tier reduced from s-maxage=1800/CDN=3600 to s-maxage=900 on
both sides, matching the Redis TTL. The freshness window is now consistently
15 minutes across Redis, Vercel edge, and CF CDN. max-age=300 (browser
local) is kept to avoid unnecessary revalidations on tab switch.
Issue 2 — Cache poisoning: replaced cachedFetchJson in listFeedDigest with
explicit getCachedJson/setCachedJson. After buildDigest(), if total items
across all categories is 0 the response is treated as degraded: Redis write
is skipped and markNoCacheResponse(ctx.request) is called so the gateway
sets Cache-Control: no-store instead of the normal tier headers. This
prevents a transient bad run from poisoning Redis and browser/CDN for the
full TTL. Error paths also call markNoCacheResponse.
95 lines
3.3 KiB
JavaScript
95 lines
3.3 KiB
JavaScript
import { describe, it } from 'node:test';
|
|
import assert from 'node:assert/strict';
|
|
import { readFileSync, readdirSync, statSync } from 'node:fs';
|
|
import { dirname, resolve, join } from 'node:path';
|
|
import { fileURLToPath } from 'node:url';
|
|
|
|
const __dirname = dirname(fileURLToPath(import.meta.url));
|
|
const root = resolve(__dirname, '..');
|
|
|
|
function extractGetRoutes() {
|
|
const generatedDir = join(root, 'src', 'generated', 'server', 'worldmonitor');
|
|
const routes = [];
|
|
|
|
function walk(dir) {
|
|
for (const entry of readdirSync(dir)) {
|
|
const full = join(dir, entry);
|
|
if (statSync(full).isDirectory()) {
|
|
walk(full);
|
|
} else if (entry === 'service_server.ts') {
|
|
const src = readFileSync(full, 'utf-8');
|
|
// Match both object literal { method: "GET", path: "/..." }
|
|
// and factory call makeHandler(..., "/...") which is hardcoded as GET
|
|
const re = /method:\s*"GET",[\s\S]*?path:\s*"([^"]+)"/g;
|
|
const re2 = /makeHandler\s*\(\s*"[^"]+",\s*"([^"]+)"/g;
|
|
let m;
|
|
while ((m = re.exec(src)) !== null) {
|
|
routes.push(m[1]);
|
|
}
|
|
while ((m = re2.exec(src)) !== null) {
|
|
routes.push(m[1]);
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
walk(generatedDir);
|
|
return routes.sort();
|
|
}
|
|
|
|
function extractCacheTierKeys() {
|
|
const gatewayPath = join(root, 'server', 'gateway.ts');
|
|
const src = readFileSync(gatewayPath, 'utf-8');
|
|
const re = /'\/(api\/[^']+)':\s*'(fast|medium|slow|slow-browser|static|daily|no-store)'/g;
|
|
const entries = {};
|
|
let m;
|
|
while ((m = re.exec(src)) !== null) {
|
|
entries['/' + m[1]] = m[2];
|
|
}
|
|
return entries;
|
|
}
|
|
|
|
describe('RPC_CACHE_TIER route parity', () => {
|
|
const getRoutes = extractGetRoutes();
|
|
const tierMap = extractCacheTierKeys();
|
|
const tierKeys = Object.keys(tierMap);
|
|
|
|
it('finds at least 50 GET routes in generated server files', () => {
|
|
assert.ok(getRoutes.length >= 50, `Expected ≥50 GET routes, found ${getRoutes.length}`);
|
|
});
|
|
|
|
it('every generated GET route has an explicit cache tier entry', () => {
|
|
const missing = getRoutes.filter((r) => !(r in tierMap));
|
|
assert.deepStrictEqual(
|
|
missing,
|
|
[],
|
|
`Missing RPC_CACHE_TIER entries for:\n ${missing.join('\n ')}\n\nAdd explicit tier entries in server/gateway.ts`,
|
|
);
|
|
});
|
|
|
|
it('every cache tier key maps to a real generated route', () => {
|
|
const stale = tierKeys.filter((k) => !getRoutes.includes(k));
|
|
assert.deepStrictEqual(
|
|
stale,
|
|
[],
|
|
`Stale RPC_CACHE_TIER entries (no matching generated route):\n ${stale.join('\n ')}`,
|
|
);
|
|
});
|
|
|
|
it('no route uses the implicit default tier', () => {
|
|
const gatewaySrc = readFileSync(join(root, 'server', 'gateway.ts'), 'utf-8');
|
|
assert.match(
|
|
gatewaySrc,
|
|
/RPC_CACHE_TIER\[pathname\]\s*\?\?\s*'medium'/,
|
|
'Gateway still has medium default fallback — ensure all routes are explicit',
|
|
);
|
|
});
|
|
|
|
it('slow-browser tier includes max-age, slow tier does not', () => {
|
|
const gatewaySrc = readFileSync(join(root, 'server', 'gateway.ts'), 'utf-8');
|
|
assert.match(gatewaySrc, /slow-browser.*max-age/s, 'slow-browser tier must include max-age');
|
|
const slowLine = gatewaySrc.match(/^\s+slow: 'public.*'/m)?.[0] ?? '';
|
|
assert.ok(!slowLine.includes('max-age'), 'slow tier must NOT include max-age');
|
|
});
|
|
});
|