diff --git a/server/worldmonitor/intelligence/v1/get-company-enrichment.ts b/server/worldmonitor/intelligence/v1/get-company-enrichment.ts index f752b3cc8..32b688153 100644 --- a/server/worldmonitor/intelligence/v1/get-company-enrichment.ts +++ b/server/worldmonitor/intelligence/v1/get-company-enrichment.ts @@ -11,6 +11,7 @@ import type { } from '../../../../src/generated/server/worldmonitor/intelligence/v1/service_server'; import { ValidationError } from '../../../../src/generated/server/worldmonitor/intelligence/v1/service_server'; import { fetchJson } from '../../../_shared/fetch-json'; +import { cachedFetchJson } from '../../../_shared/redis'; interface GitHubOrg { name?: string; @@ -56,6 +57,36 @@ interface HNAlgoliaResponse { hits: HNAlgoliaHit[]; } +interface GitHubOrgResult { + name: string; + description: string; + blog: string; + location: string; + publicRepos: number; + followers: number; + avatarUrl: string; + createdAt: string | undefined; +} + +interface TechStackItem { + name: string; + category: string; + confidence: number; +} + +interface SECResult { + totalFilings: number; + recentFilings: Array<{ form: string; fileDate: string; description: string }>; +} + +interface HNMentionItem { + title: string; + url: string; + points: number; + comments: number; + createdAtMs: number; +} + function getDateMonthsAgo(months: number): string { const d = new Date(); d.setMonth(d.getMonth() - months); @@ -86,68 +117,92 @@ function parseIsoMs(value: string | undefined): number { return Number.isFinite(ms) ? ms : 0; } -async function fetchGitHubOrg(name: string) { - const data = await fetchJson(`https://api.github.com/orgs/${encodeURIComponent(name)}`); - if (!data) return null; - return { - name: data.name || data.login || '', - description: data.description || '', - blog: data.blog || '', - location: data.location || '', - publicRepos: data.public_repos || 0, - followers: data.followers || 0, - avatarUrl: data.avatar_url || '', - createdAt: data.created_at, - }; +async function fetchGitHubOrg(name: string): Promise { + return cachedFetchJson( + `intel:enrichment:gh-org:${encodeURIComponent(name.toLowerCase())}`, + 3600, + async () => { + const data = await fetchJson(`https://api.github.com/orgs/${encodeURIComponent(name)}`); + if (!data) return null; + return { + name: data.name || data.login || '', + description: data.description || '', + blog: data.blog || '', + location: data.location || '', + publicRepos: data.public_repos || 0, + followers: data.followers || 0, + avatarUrl: data.avatar_url || '', + createdAt: data.created_at, + }; + }, + ); } -async function fetchGitHubTechStack(orgName: string) { - const repos = await fetchJson(`https://api.github.com/orgs/${encodeURIComponent(orgName)}/repos?sort=stars&per_page=10`); - if (!Array.isArray(repos)) return []; +async function fetchGitHubTechStack(orgName: string): Promise { + return cachedFetchJson( + `intel:enrichment:gh-tech:${encodeURIComponent(orgName.toLowerCase())}`, + 3600, + async () => { + const repos = await fetchJson(`https://api.github.com/orgs/${encodeURIComponent(orgName)}/repos?sort=stars&per_page=10`); + if (!Array.isArray(repos)) return null; - const languages = new Map(); - for (const repo of repos) { - if (repo.language) { - languages.set(repo.language, (languages.get(repo.language) || 0) + (repo.stargazers_count || 0) + 1); - } - } + const languages = new Map(); + for (const repo of repos) { + if (repo.language) { + languages.set(repo.language, (languages.get(repo.language) || 0) + (repo.stargazers_count || 0) + 1); + } + } - return Array.from(languages.entries()) - .sort((a, b) => b[1] - a[1]) - .slice(0, 10) - .map(([lang, score]) => ({ - name: lang, - category: 'Programming Language', - confidence: Math.min(1, score / 100), - })); + return Array.from(languages.entries()) + .sort((a, b) => b[1] - a[1]) + .slice(0, 10) + .map(([lang, score]) => ({ + name: lang, + category: 'Programming Language', + confidence: Math.min(1, score / 100), + })); + }, + ); } -async function fetchSECData(companyName: string) { - const url = `https://efts.sec.gov/LATEST/search-index?q=${encodeURIComponent(companyName)}&dateRange=custom&startdt=${getDateMonthsAgo(6)}&enddt=${getTodayISO()}&forms=10-K,10-Q,8-K&from=0&size=5`; - const data = await fetchJson(url, { timeoutMs: 12_000 }); - if (!data?.hits?.hits) return null; +async function fetchSECData(companyName: string): Promise { + return cachedFetchJson( + `intel:enrichment:sec:${encodeURIComponent(companyName.toLowerCase())}:${getTodayISO()}`, + 3600, + async () => { + const url = `https://efts.sec.gov/LATEST/search-index?q=${encodeURIComponent(companyName)}&dateRange=custom&startdt=${getDateMonthsAgo(6)}&enddt=${getTodayISO()}&forms=10-K,10-Q,8-K&from=0&size=5`; + const data = await fetchJson(url, { timeoutMs: 12_000 }); + if (!data?.hits?.hits) return null; - return { - totalFilings: data.hits.total?.value || 0, - recentFilings: data.hits.hits.slice(0, 5).map((h) => ({ - form: h._source?.form_type || h._source?.file_type || 'Unknown', - fileDate: h._source?.file_date || h._source?.period_of_report || '', - description: h._source?.display_names?.[0] || companyName, - })), - }; + return { + totalFilings: data.hits.total?.value || 0, + recentFilings: data.hits.hits.slice(0, 5).map((h) => ({ + form: h._source?.form_type || h._source?.file_type || 'Unknown', + fileDate: h._source?.file_date || h._source?.period_of_report || '', + description: h._source?.display_names?.[0] || companyName, + })), + }; + }, + ); } -async function fetchHackerNewsMentions(companyName: string) { - const data = await fetchJson(`https://hn.algolia.com/api/v1/search?query=${encodeURIComponent(companyName)}&tags=story&hitsPerPage=5`); - if (!data?.hits) return []; +async function fetchHackerNewsMentions(companyName: string): Promise { + return cachedFetchJson( + `intel:enrichment:hn:${encodeURIComponent(companyName.toLowerCase())}`, + 1800, + async () => { + const data = await fetchJson(`https://hn.algolia.com/api/v1/search?query=${encodeURIComponent(companyName)}&tags=story&hitsPerPage=5`); + if (data === null || !data.hits) return null; - return data.hits.map((h) => ({ - title: h.title || '', - url: h.url || '', - points: h.points || 0, - comments: h.num_comments || 0, - createdAtMs: parseIsoMs(h.created_at), - })); + return data.hits.map((h) => ({ + title: h.title || '', + url: h.url || '', + points: h.points || 0, + comments: h.num_comments || 0, + createdAtMs: parseIsoMs(h.created_at), + })); + }, + ); } export async function getCompanyEnrichment( @@ -171,6 +226,9 @@ export async function getCompanyEnrichment( fetchHackerNewsMentions(companyName), ]); + const techStackItems = techStack ?? []; + const hnMentionItems = hnMentions ?? []; + return { company: { name: githubOrg?.name || companyName, @@ -185,15 +243,15 @@ export async function getCompanyEnrichment( followers: githubOrg.followers, avatarUrl: githubOrg.avatarUrl, } : undefined, - techStack, + techStack: techStackItems, secFilings: secData || undefined, - hackerNewsMentions: hnMentions, + hackerNewsMentions: hnMentionItems, enrichedAtMs: Date.now(), sources: [ githubOrg ? 'github' : null, - techStack.length > 0 ? 'github_repos' : null, + techStackItems.length > 0 ? 'github_repos' : null, secData ? 'sec_edgar' : null, - hnMentions.length > 0 ? 'hacker_news' : null, + hnMentionItems.length > 0 ? 'hacker_news' : null, ].filter((s): s is string => s !== null), }; } diff --git a/server/worldmonitor/intelligence/v1/list-company-signals.ts b/server/worldmonitor/intelligence/v1/list-company-signals.ts index a2b0e0498..0addb38b3 100644 --- a/server/worldmonitor/intelligence/v1/list-company-signals.ts +++ b/server/worldmonitor/intelligence/v1/list-company-signals.ts @@ -12,6 +12,7 @@ import type { } from '../../../../src/generated/server/worldmonitor/intelligence/v1/service_server'; import { ValidationError } from '../../../../src/generated/server/worldmonitor/intelligence/v1/service_server'; import { fetchJson } from '../../../_shared/fetch-json'; +import { cachedFetchJson } from '../../../_shared/redis'; interface HNAlgoliaSignalHit { title?: string; @@ -85,95 +86,117 @@ function slugFromDomain(domain: string): string { return domain.replace(/\.(com|io|co|org|net|ai|dev|app)$/, '').split('.').pop() || domain; } -async function fetchHNSignals(companyName: string): Promise { - const thirtyDaysAgo = Math.floor(Date.now() / 1000) - 30 * 86400; - const data = await fetchJson( - `https://hn.algolia.com/api/v1/search_by_date?query=${encodeURIComponent(companyName)}&tags=story&hitsPerPage=20&numericFilters=created_at_i>${thirtyDaysAgo}`, - ); - if (!data?.hits) return []; - - const now = Date.now(); - return data.hits.map((h) => { - const ts = toEventTimeMs(h.created_at); - const recencyDays = (now - ts) / 86400000; - return { - type: classifySignal(h.title || ''), - title: h.title || '', - url: h.url || `https://news.ycombinator.com/item?id=${h.objectID || ''}`, - source: 'Hacker News', - sourceTier: 2, - timestampMs: ts, - strength: scoreSignalStrength(h.points || 0, h.num_comments || 0, recencyDays), - engagement: { - points: h.points || 0, - comments: h.num_comments || 0, - stars: 0, - forks: 0, - mentions: 0, - }, - }; - }); +function hourBucket(): number { + return Math.floor(Date.now() / 3_600_000); } -async function fetchGitHubSignals(orgName: string): Promise { - const repos = await fetchJson(`https://api.github.com/orgs/${encodeURIComponent(orgName)}/repos?sort=created&per_page=10`); - if (!Array.isArray(repos)) return []; +async function fetchHNSignals(companyName: string): Promise { + return cachedFetchJson( + `intel:signals:hn:${encodeURIComponent(companyName.toLowerCase())}:${hourBucket()}`, + 1800, + async () => { + const thirtyDaysAgo = Math.floor(Date.now() / 1000) - 30 * 86400; + const data = await fetchJson( + `https://hn.algolia.com/api/v1/search_by_date?query=${encodeURIComponent(companyName)}&tags=story&hitsPerPage=20&numericFilters=created_at_i>${thirtyDaysAgo}`, + ); + if (data === null || !data.hits) return null; - const now = Date.now(); - const thirtyDaysAgo = now - 30 * 86400000; - - return repos - .filter((r) => toEventTimeMs(r.created_at) > thirtyDaysAgo) - .map((r) => ({ - type: 'technology_adoption', - title: `New repository: ${r.full_name || 'unknown'} - ${r.description || 'No description'}`, - url: r.html_url || '', - source: 'GitHub', - sourceTier: 2, - timestampMs: toEventTimeMs(r.created_at), - strength: (r.stargazers_count || 0) > 50 ? 'high' : (r.stargazers_count || 0) > 10 ? 'medium' : 'low', - engagement: { - points: 0, - comments: 0, - stars: r.stargazers_count || 0, - forks: r.forks_count || 0, - mentions: 0, - }, - })); -} - -async function fetchJobSignals(companyName: string): Promise { - const sixtyDaysAgo = Math.floor(Date.now() / 1000) - 60 * 86400; - const data = await fetchJson( - `https://hn.algolia.com/api/v1/search?query=${encodeURIComponent(companyName)}&tags=comment,ask_hn&hitsPerPage=10&numericFilters=created_at_i>${sixtyDaysAgo}`, - ); - if (!data?.hits) return []; - - const hiringComments = data.hits.filter((h) => { - const text = (h.comment_text || '').toLowerCase(); - return text.includes('hiring') || text.includes('job') || text.includes('apply'); - }); - - if (hiringComments.length === 0) return []; - const firstComment = hiringComments[0]; - if (!firstComment) return []; - - return [{ - type: 'hiring_surge', - title: `${companyName} hiring activity (${hiringComments.length} mentions in HN hiring threads)`, - url: `https://news.ycombinator.com/item?id=${firstComment.story_id || ''}`, - source: 'HN Hiring Threads', - sourceTier: 3, - timestampMs: toEventTimeMs(firstComment.created_at), - strength: hiringComments.length >= 3 ? 'high' : 'medium', - engagement: { - points: 0, - comments: 0, - stars: 0, - forks: 0, - mentions: hiringComments.length, + const now = Date.now(); + return data.hits.map((h) => { + const ts = toEventTimeMs(h.created_at); + const recencyDays = (now - ts) / 86400000; + return { + type: classifySignal(h.title || ''), + title: h.title || '', + url: h.url || `https://news.ycombinator.com/item?id=${h.objectID || ''}`, + source: 'Hacker News', + sourceTier: 2, + timestampMs: ts, + strength: scoreSignalStrength(h.points || 0, h.num_comments || 0, recencyDays), + engagement: { + points: h.points || 0, + comments: h.num_comments || 0, + stars: 0, + forks: 0, + mentions: 0, + }, + }; + }); }, - }]; + ); +} + +async function fetchGitHubSignals(orgName: string): Promise { + return cachedFetchJson( + `intel:signals:gh:${encodeURIComponent(orgName.toLowerCase())}:${hourBucket()}`, + 3600, + async () => { + const repos = await fetchJson(`https://api.github.com/orgs/${encodeURIComponent(orgName)}/repos?sort=created&per_page=10`); + if (!Array.isArray(repos)) return null; + + const now = Date.now(); + const thirtyDaysAgo = now - 30 * 86400000; + + return repos + .filter((r) => toEventTimeMs(r.created_at) > thirtyDaysAgo) + .map((r) => ({ + type: 'technology_adoption', + title: `New repository: ${r.full_name || 'unknown'} - ${r.description || 'No description'}`, + url: r.html_url || '', + source: 'GitHub', + sourceTier: 2, + timestampMs: toEventTimeMs(r.created_at), + strength: (r.stargazers_count || 0) > 50 ? 'high' : (r.stargazers_count || 0) > 10 ? 'medium' : 'low', + engagement: { + points: 0, + comments: 0, + stars: r.stargazers_count || 0, + forks: r.forks_count || 0, + mentions: 0, + }, + })); + }, + ); +} + +async function fetchJobSignals(companyName: string): Promise { + return cachedFetchJson( + `intel:signals:jobs:${encodeURIComponent(companyName.toLowerCase())}:${hourBucket()}`, + 1800, + async () => { + const sixtyDaysAgo = Math.floor(Date.now() / 1000) - 60 * 86400; + const data = await fetchJson( + `https://hn.algolia.com/api/v1/search?query=${encodeURIComponent(companyName)}&tags=comment,ask_hn&hitsPerPage=10&numericFilters=created_at_i>${sixtyDaysAgo}`, + ); + if (data === null || !data.hits) return null; + + const hiringComments = data.hits.filter((h) => { + const text = (h.comment_text || '').toLowerCase(); + return text.includes('hiring') || text.includes('job') || text.includes('apply'); + }); + + if (hiringComments.length === 0) return []; + const firstComment = hiringComments[0]; + if (!firstComment) return []; + + return [{ + type: 'hiring_surge', + title: `${companyName} hiring activity (${hiringComments.length} mentions in HN hiring threads)`, + url: `https://news.ycombinator.com/item?id=${firstComment.story_id || ''}`, + source: 'HN Hiring Threads', + sourceTier: 3, + timestampMs: toEventTimeMs(firstComment.created_at), + strength: hiringComments.length >= 3 ? 'high' : 'medium', + engagement: { + points: 0, + comments: 0, + stars: 0, + forks: 0, + mentions: hiringComments.length, + }, + }]; + }, + ); } export async function listCompanySignals( @@ -195,7 +218,7 @@ export async function listCompanySignals( fetchJobSignals(company), ]); - const allSignals = [...hnSignals, ...githubSignals, ...jobSignals] + const allSignals = [...(hnSignals ?? []), ...(githubSignals ?? []), ...(jobSignals ?? [])] .sort((a, b) => b.timestampMs - a.timestampMs); const signalTypeCounts: Record = {}; diff --git a/tests/enrichment-caching.test.mjs b/tests/enrichment-caching.test.mjs new file mode 100644 index 000000000..a351e403b --- /dev/null +++ b/tests/enrichment-caching.test.mjs @@ -0,0 +1,250 @@ +/** + * Functional tests for per-upstream cachedFetchJson in enrichment/signals handlers. + * Verifies null/[] semantics, cache key encoding, and cache hit behavior. + */ + +import { describe, it, before, after } from 'node:test'; +import assert from 'node:assert/strict'; + +// ─── Redis stub ────────────────────────────────────────────────────────────── + +const NEG_SENTINEL = '__WM_NEG__'; + +function makeRedisStub() { + const store = new Map(); + const ttls = new Map(); + const setCalls = []; + + return { + store, + ttls, + setCalls, + async get(key) { + return store.has(key) ? store.get(key) : null; + }, + async set(key, value, ttl) { + store.set(key, value); + ttls.set(key, ttl); + setCalls.push({ key, value, ttl }); + }, + reset() { + store.clear(); + ttls.clear(); + setCalls.length = 0; + }, + }; +} + +// ─── cachedFetchJson re-implementation for testing ─────────────────────────── +// Tests the same logic as server/_shared/redis.ts cachedFetchJson + +function makeCachedFetchJson(redis) { + const inflight = new Map(); + + return async function cachedFetchJson(key, ttlSeconds, fetcher, negativeTtlSeconds = 120) { + const cached = await redis.get(key); + if (cached === NEG_SENTINEL) return null; + if (cached !== null) return cached; + + const existing = inflight.get(key); + if (existing) return existing; + + const promise = fetcher() + .then(async (result) => { + if (result != null) { + await redis.set(key, result, ttlSeconds); + } else { + await redis.set(key, NEG_SENTINEL, negativeTtlSeconds); + } + return result; + }) + .finally(() => { + inflight.delete(key); + }); + + inflight.set(key, promise); + return promise; + }; +} + +// ─── Tests ─────────────────────────────────────────────────────────────────── + +describe('cachedFetchJson — null path (fetch failure)', () => { + const redis = makeRedisStub(); + const cachedFetchJson = makeCachedFetchJson(redis); + + before(() => redis.reset()); + + it('returns null when fetcher returns null', async () => { + const result = await cachedFetchJson( + 'intel:enrichment:gh-org:testco', + 3600, + async () => null, + ); + assert.equal(result, null); + }); + + it('writes NEG_SENTINEL to Redis with 120s TTL on fetch failure', () => { + assert.equal(redis.setCalls.length, 1); + const call = redis.setCalls[0]; + assert.equal(call.key, 'intel:enrichment:gh-org:testco'); + assert.equal(call.value, NEG_SENTINEL); + assert.equal(call.ttl, 120); + }); + + it('subsequent call returns null from NEG_SENTINEL (no fetcher call)', async () => { + let fetcherCalled = false; + const result = await cachedFetchJson( + 'intel:enrichment:gh-org:testco', + 3600, + async () => { fetcherCalled = true; return { name: 'should not get here' }; }, + ); + assert.equal(result, null); + assert.equal(fetcherCalled, false, 'fetcher should not be called on neg cache hit'); + }); +}); + +describe('cachedFetchJson — empty array (successful empty result)', () => { + const redis = makeRedisStub(); + const cachedFetchJson = makeCachedFetchJson(redis); + + before(() => redis.reset()); + + it('returns [] when fetcher returns []', async () => { + const result = await cachedFetchJson( + 'intel:enrichment:hn:emptyco', + 1800, + async () => [], + ); + assert.deepEqual(result, []); + }); + + it('caches [] with normal TTL (not neg cache)', () => { + assert.equal(redis.setCalls.length, 1); + const call = redis.setCalls[0]; + assert.equal(call.key, 'intel:enrichment:hn:emptyco'); + assert.deepEqual(call.value, []); + assert.equal(call.ttl, 1800); + assert.notEqual(call.value, NEG_SENTINEL); + }); + + it('subsequent call returns [] from cache (no fetcher call)', async () => { + let fetcherCalled = false; + const result = await cachedFetchJson( + 'intel:enrichment:hn:emptyco', + 1800, + async () => { fetcherCalled = true; return ['should not appear']; }, + ); + assert.deepEqual(result, []); + assert.equal(fetcherCalled, false, 'fetcher should not be called on cache hit'); + }); +}); + +describe('cachedFetchJson — cache hit skips upstream fetch', () => { + const redis = makeRedisStub(); + const cachedFetchJson = makeCachedFetchJson(redis); + + before(() => { + redis.reset(); + redis.store.set('intel:enrichment:gh-org:stripe', { name: 'Stripe', publicRepos: 42 }); + }); + + it('returns cached data without calling fetcher', async () => { + let fetcherCalled = false; + const result = await cachedFetchJson( + 'intel:enrichment:gh-org:stripe', + 3600, + async () => { fetcherCalled = true; return { name: 'WRONG' }; }, + ); + assert.deepEqual(result, { name: 'Stripe', publicRepos: 42 }); + assert.equal(fetcherCalled, false); + }); + + it('no Redis SET calls on cache hit', () => { + assert.equal(redis.setCalls.length, 0); + }); +}); + +describe('cachedFetchJson — cache key encoding', () => { + const redis = makeRedisStub(); + const cachedFetchJson = makeCachedFetchJson(redis); + + before(() => redis.reset()); + + it('encodes special chars in company names', async () => { + await cachedFetchJson( + `intel:enrichment:hn:${encodeURIComponent('at&t')}`, + 1800, + async () => [{ title: 'AT&T news', url: 'https://example.com', points: 10, comments: 5, createdAtMs: 0 }], + ); + const key = redis.setCalls[0]?.key; + assert.ok(key?.includes('at%26t'), `Expected key to contain "at%26t", got: ${key}`); + }); + + it('different companies produce different keys', async () => { + await cachedFetchJson( + `intel:enrichment:hn:${encodeURIComponent('johnson %26 johnson')}`, + 1800, + async () => [], + ); + const keys = redis.setCalls.map((c) => c.key); + assert.equal(new Set(keys).size, keys.length, 'each company should produce a unique cache key'); + }); + + it('lowercased names produce consistent keys', async () => { + const redis2 = makeRedisStub(); + const cf2 = makeCachedFetchJson(redis2); + + await cf2(`intel:enrichment:gh-org:${encodeURIComponent('stripe')}`, 3600, async () => ({ name: 'Stripe' })); + await cf2(`intel:enrichment:gh-org:${encodeURIComponent('STRIPE'.toLowerCase())}`, 3600, async () => ({ name: 'WRONG' })); + + assert.equal(redis2.setCalls.length, 1, 'STRIPE and stripe should resolve to the same key'); + }); +}); + +describe('cachedFetchJson — import verification', () => { + it('get-company-enrichment.ts imports cachedFetchJson', async () => { + const { readFileSync } = await import('node:fs'); + const { resolve } = await import('node:path'); + const src = readFileSync(resolve('server/worldmonitor/intelligence/v1/get-company-enrichment.ts'), 'utf-8'); + assert.ok(src.includes("from '../../../_shared/redis'"), 'must import from _shared/redis'); + assert.ok(src.includes('cachedFetchJson'), 'must use cachedFetchJson'); + assert.ok(src.includes('intel:enrichment:gh-org:'), 'must use gh-org cache key'); + assert.ok(src.includes('intel:enrichment:gh-tech:'), 'must use gh-tech cache key'); + assert.ok(src.includes('intel:enrichment:sec:'), 'must use sec cache key'); + assert.ok(src.includes('intel:enrichment:hn:'), 'must use hn cache key'); + assert.ok( + src.includes('intel:enrichment:sec:') && src.includes('getTodayISO()'), + 'SEC cache key must include getTodayISO() daily bucket to track date-window changes', + ); + }); + + it('list-company-signals.ts imports cachedFetchJson', async () => { + const { readFileSync } = await import('node:fs'); + const { resolve } = await import('node:path'); + const src = readFileSync(resolve('server/worldmonitor/intelligence/v1/list-company-signals.ts'), 'utf-8'); + assert.ok(src.includes("from '../../../_shared/redis'"), 'must import from _shared/redis'); + assert.ok(src.includes('cachedFetchJson'), 'must use cachedFetchJson'); + assert.ok(src.includes('intel:signals:hn:'), 'must use signals:hn cache key'); + assert.ok(src.includes('intel:signals:gh:'), 'must use signals:gh cache key'); + assert.ok(src.includes('intel:signals:jobs:'), 'must use signals:jobs cache key'); + assert.ok( + src.includes('hourBucket()'), + 'all signal cache keys must include hourBucket() to prevent stale rolling-window results', + ); + }); + + it('cache keys do not collide with existing bootstrap keys', async () => { + const { readFileSync } = await import('node:fs'); + const { resolve } = await import('node:path'); + const cacheKeysSrc = readFileSync(resolve('server/_shared/cache-keys.ts'), 'utf-8'); + assert.ok( + !cacheKeysSrc.includes('intel:enrichment:'), + 'intel:enrichment: prefix should not exist in bootstrap cache-keys.ts (on-demand keys)', + ); + assert.ok( + !cacheKeysSrc.includes('intel:signals:'), + 'intel:signals: prefix should not exist in bootstrap cache-keys.ts (on-demand keys)', + ); + }); +});