fix(enrichment): add per-upstream Redis caching to company enrichment and signals RPCs (#1986)

* fix(enrichment): add per-upstream cachedFetchJson caching to enrichment/signals

Prevents parallel external calls (GitHub, SEC, HN) on every RPC request.
Each upstream fetcher is now independently cached in Redis with appropriate
TTLs. Null returned on fetch failure (neg-cached 120s), [] on successful
empty result (normal TTL). encodeURIComponent used for cache key safety.

* fix(enrichment): add date/hour buckets to time-sensitive cache keys

SEC key now includes getTodayISO() so the 6-month date window
(startdt/enddt) advances daily rather than serving stale results
for up to an hour past midnight.

Signal keys now include hourBucket() (floor(ms/3600000)) so
rolling 30/60-day HN and GitHub sets are refreshed each hour
instead of serving entries that have aged out of the window or
missing newly posted items for 30-60 minutes.
This commit is contained in:
Elie Habib
2026-03-21 16:24:17 +04:00
committed by GitHub
parent 758bfc0548
commit 506a03c807
3 changed files with 473 additions and 142 deletions

View File

@@ -11,6 +11,7 @@ import type {
} from '../../../../src/generated/server/worldmonitor/intelligence/v1/service_server'; } from '../../../../src/generated/server/worldmonitor/intelligence/v1/service_server';
import { ValidationError } from '../../../../src/generated/server/worldmonitor/intelligence/v1/service_server'; import { ValidationError } from '../../../../src/generated/server/worldmonitor/intelligence/v1/service_server';
import { fetchJson } from '../../../_shared/fetch-json'; import { fetchJson } from '../../../_shared/fetch-json';
import { cachedFetchJson } from '../../../_shared/redis';
interface GitHubOrg { interface GitHubOrg {
name?: string; name?: string;
@@ -56,6 +57,36 @@ interface HNAlgoliaResponse {
hits: HNAlgoliaHit[]; hits: HNAlgoliaHit[];
} }
interface GitHubOrgResult {
name: string;
description: string;
blog: string;
location: string;
publicRepos: number;
followers: number;
avatarUrl: string;
createdAt: string | undefined;
}
interface TechStackItem {
name: string;
category: string;
confidence: number;
}
interface SECResult {
totalFilings: number;
recentFilings: Array<{ form: string; fileDate: string; description: string }>;
}
interface HNMentionItem {
title: string;
url: string;
points: number;
comments: number;
createdAtMs: number;
}
function getDateMonthsAgo(months: number): string { function getDateMonthsAgo(months: number): string {
const d = new Date(); const d = new Date();
d.setMonth(d.getMonth() - months); d.setMonth(d.getMonth() - months);
@@ -86,68 +117,92 @@ function parseIsoMs(value: string | undefined): number {
return Number.isFinite(ms) ? ms : 0; return Number.isFinite(ms) ? ms : 0;
} }
async function fetchGitHubOrg(name: string) { async function fetchGitHubOrg(name: string): Promise<GitHubOrgResult | null> {
const data = await fetchJson<GitHubOrg>(`https://api.github.com/orgs/${encodeURIComponent(name)}`); return cachedFetchJson<GitHubOrgResult>(
if (!data) return null; `intel:enrichment:gh-org:${encodeURIComponent(name.toLowerCase())}`,
return { 3600,
name: data.name || data.login || '', async () => {
description: data.description || '', const data = await fetchJson<GitHubOrg>(`https://api.github.com/orgs/${encodeURIComponent(name)}`);
blog: data.blog || '', if (!data) return null;
location: data.location || '', return {
publicRepos: data.public_repos || 0, name: data.name || data.login || '',
followers: data.followers || 0, description: data.description || '',
avatarUrl: data.avatar_url || '', blog: data.blog || '',
createdAt: data.created_at, location: data.location || '',
}; publicRepos: data.public_repos || 0,
followers: data.followers || 0,
avatarUrl: data.avatar_url || '',
createdAt: data.created_at,
};
},
);
} }
async function fetchGitHubTechStack(orgName: string) { async function fetchGitHubTechStack(orgName: string): Promise<TechStackItem[] | null> {
const repos = await fetchJson<GitHubRepo[]>(`https://api.github.com/orgs/${encodeURIComponent(orgName)}/repos?sort=stars&per_page=10`); return cachedFetchJson<TechStackItem[]>(
if (!Array.isArray(repos)) return []; `intel:enrichment:gh-tech:${encodeURIComponent(orgName.toLowerCase())}`,
3600,
async () => {
const repos = await fetchJson<GitHubRepo[]>(`https://api.github.com/orgs/${encodeURIComponent(orgName)}/repos?sort=stars&per_page=10`);
if (!Array.isArray(repos)) return null;
const languages = new Map<string, number>(); const languages = new Map<string, number>();
for (const repo of repos) { for (const repo of repos) {
if (repo.language) { if (repo.language) {
languages.set(repo.language, (languages.get(repo.language) || 0) + (repo.stargazers_count || 0) + 1); languages.set(repo.language, (languages.get(repo.language) || 0) + (repo.stargazers_count || 0) + 1);
} }
} }
return Array.from(languages.entries()) return Array.from(languages.entries())
.sort((a, b) => b[1] - a[1]) .sort((a, b) => b[1] - a[1])
.slice(0, 10) .slice(0, 10)
.map(([lang, score]) => ({ .map(([lang, score]) => ({
name: lang, name: lang,
category: 'Programming Language', category: 'Programming Language',
confidence: Math.min(1, score / 100), confidence: Math.min(1, score / 100),
})); }));
},
);
} }
async function fetchSECData(companyName: string) { async function fetchSECData(companyName: string): Promise<SECResult | null> {
const url = `https://efts.sec.gov/LATEST/search-index?q=${encodeURIComponent(companyName)}&dateRange=custom&startdt=${getDateMonthsAgo(6)}&enddt=${getTodayISO()}&forms=10-K,10-Q,8-K&from=0&size=5`; return cachedFetchJson<SECResult>(
const data = await fetchJson<SECSearchResponse>(url, { timeoutMs: 12_000 }); `intel:enrichment:sec:${encodeURIComponent(companyName.toLowerCase())}:${getTodayISO()}`,
if (!data?.hits?.hits) return null; 3600,
async () => {
const url = `https://efts.sec.gov/LATEST/search-index?q=${encodeURIComponent(companyName)}&dateRange=custom&startdt=${getDateMonthsAgo(6)}&enddt=${getTodayISO()}&forms=10-K,10-Q,8-K&from=0&size=5`;
const data = await fetchJson<SECSearchResponse>(url, { timeoutMs: 12_000 });
if (!data?.hits?.hits) return null;
return { return {
totalFilings: data.hits.total?.value || 0, totalFilings: data.hits.total?.value || 0,
recentFilings: data.hits.hits.slice(0, 5).map((h) => ({ recentFilings: data.hits.hits.slice(0, 5).map((h) => ({
form: h._source?.form_type || h._source?.file_type || 'Unknown', form: h._source?.form_type || h._source?.file_type || 'Unknown',
fileDate: h._source?.file_date || h._source?.period_of_report || '', fileDate: h._source?.file_date || h._source?.period_of_report || '',
description: h._source?.display_names?.[0] || companyName, description: h._source?.display_names?.[0] || companyName,
})), })),
}; };
},
);
} }
async function fetchHackerNewsMentions(companyName: string) { async function fetchHackerNewsMentions(companyName: string): Promise<HNMentionItem[] | null> {
const data = await fetchJson<HNAlgoliaResponse>(`https://hn.algolia.com/api/v1/search?query=${encodeURIComponent(companyName)}&tags=story&hitsPerPage=5`); return cachedFetchJson<HNMentionItem[]>(
if (!data?.hits) return []; `intel:enrichment:hn:${encodeURIComponent(companyName.toLowerCase())}`,
1800,
async () => {
const data = await fetchJson<HNAlgoliaResponse>(`https://hn.algolia.com/api/v1/search?query=${encodeURIComponent(companyName)}&tags=story&hitsPerPage=5`);
if (data === null || !data.hits) return null;
return data.hits.map((h) => ({ return data.hits.map((h) => ({
title: h.title || '', title: h.title || '',
url: h.url || '', url: h.url || '',
points: h.points || 0, points: h.points || 0,
comments: h.num_comments || 0, comments: h.num_comments || 0,
createdAtMs: parseIsoMs(h.created_at), createdAtMs: parseIsoMs(h.created_at),
})); }));
},
);
} }
export async function getCompanyEnrichment( export async function getCompanyEnrichment(
@@ -171,6 +226,9 @@ export async function getCompanyEnrichment(
fetchHackerNewsMentions(companyName), fetchHackerNewsMentions(companyName),
]); ]);
const techStackItems = techStack ?? [];
const hnMentionItems = hnMentions ?? [];
return { return {
company: { company: {
name: githubOrg?.name || companyName, name: githubOrg?.name || companyName,
@@ -185,15 +243,15 @@ export async function getCompanyEnrichment(
followers: githubOrg.followers, followers: githubOrg.followers,
avatarUrl: githubOrg.avatarUrl, avatarUrl: githubOrg.avatarUrl,
} : undefined, } : undefined,
techStack, techStack: techStackItems,
secFilings: secData || undefined, secFilings: secData || undefined,
hackerNewsMentions: hnMentions, hackerNewsMentions: hnMentionItems,
enrichedAtMs: Date.now(), enrichedAtMs: Date.now(),
sources: [ sources: [
githubOrg ? 'github' : null, githubOrg ? 'github' : null,
techStack.length > 0 ? 'github_repos' : null, techStackItems.length > 0 ? 'github_repos' : null,
secData ? 'sec_edgar' : null, secData ? 'sec_edgar' : null,
hnMentions.length > 0 ? 'hacker_news' : null, hnMentionItems.length > 0 ? 'hacker_news' : null,
].filter((s): s is string => s !== null), ].filter((s): s is string => s !== null),
}; };
} }

View File

@@ -12,6 +12,7 @@ import type {
} from '../../../../src/generated/server/worldmonitor/intelligence/v1/service_server'; } from '../../../../src/generated/server/worldmonitor/intelligence/v1/service_server';
import { ValidationError } from '../../../../src/generated/server/worldmonitor/intelligence/v1/service_server'; import { ValidationError } from '../../../../src/generated/server/worldmonitor/intelligence/v1/service_server';
import { fetchJson } from '../../../_shared/fetch-json'; import { fetchJson } from '../../../_shared/fetch-json';
import { cachedFetchJson } from '../../../_shared/redis';
interface HNAlgoliaSignalHit { interface HNAlgoliaSignalHit {
title?: string; title?: string;
@@ -85,95 +86,117 @@ function slugFromDomain(domain: string): string {
return domain.replace(/\.(com|io|co|org|net|ai|dev|app)$/, '').split('.').pop() || domain; return domain.replace(/\.(com|io|co|org|net|ai|dev|app)$/, '').split('.').pop() || domain;
} }
async function fetchHNSignals(companyName: string): Promise<CompanySignal[]> { function hourBucket(): number {
const thirtyDaysAgo = Math.floor(Date.now() / 1000) - 30 * 86400; return Math.floor(Date.now() / 3_600_000);
const data = await fetchJson<HNAlgoliaSignalResponse>(
`https://hn.algolia.com/api/v1/search_by_date?query=${encodeURIComponent(companyName)}&tags=story&hitsPerPage=20&numericFilters=created_at_i>${thirtyDaysAgo}`,
);
if (!data?.hits) return [];
const now = Date.now();
return data.hits.map((h) => {
const ts = toEventTimeMs(h.created_at);
const recencyDays = (now - ts) / 86400000;
return {
type: classifySignal(h.title || ''),
title: h.title || '',
url: h.url || `https://news.ycombinator.com/item?id=${h.objectID || ''}`,
source: 'Hacker News',
sourceTier: 2,
timestampMs: ts,
strength: scoreSignalStrength(h.points || 0, h.num_comments || 0, recencyDays),
engagement: {
points: h.points || 0,
comments: h.num_comments || 0,
stars: 0,
forks: 0,
mentions: 0,
},
};
});
} }
async function fetchGitHubSignals(orgName: string): Promise<CompanySignal[]> { async function fetchHNSignals(companyName: string): Promise<CompanySignal[] | null> {
const repos = await fetchJson<GitHubSignalRepo[]>(`https://api.github.com/orgs/${encodeURIComponent(orgName)}/repos?sort=created&per_page=10`); return cachedFetchJson<CompanySignal[]>(
if (!Array.isArray(repos)) return []; `intel:signals:hn:${encodeURIComponent(companyName.toLowerCase())}:${hourBucket()}`,
1800,
async () => {
const thirtyDaysAgo = Math.floor(Date.now() / 1000) - 30 * 86400;
const data = await fetchJson<HNAlgoliaSignalResponse>(
`https://hn.algolia.com/api/v1/search_by_date?query=${encodeURIComponent(companyName)}&tags=story&hitsPerPage=20&numericFilters=created_at_i>${thirtyDaysAgo}`,
);
if (data === null || !data.hits) return null;
const now = Date.now(); const now = Date.now();
const thirtyDaysAgo = now - 30 * 86400000; return data.hits.map((h) => {
const ts = toEventTimeMs(h.created_at);
return repos const recencyDays = (now - ts) / 86400000;
.filter((r) => toEventTimeMs(r.created_at) > thirtyDaysAgo) return {
.map((r) => ({ type: classifySignal(h.title || ''),
type: 'technology_adoption', title: h.title || '',
title: `New repository: ${r.full_name || 'unknown'} - ${r.description || 'No description'}`, url: h.url || `https://news.ycombinator.com/item?id=${h.objectID || ''}`,
url: r.html_url || '', source: 'Hacker News',
source: 'GitHub', sourceTier: 2,
sourceTier: 2, timestampMs: ts,
timestampMs: toEventTimeMs(r.created_at), strength: scoreSignalStrength(h.points || 0, h.num_comments || 0, recencyDays),
strength: (r.stargazers_count || 0) > 50 ? 'high' : (r.stargazers_count || 0) > 10 ? 'medium' : 'low', engagement: {
engagement: { points: h.points || 0,
points: 0, comments: h.num_comments || 0,
comments: 0, stars: 0,
stars: r.stargazers_count || 0, forks: 0,
forks: r.forks_count || 0, mentions: 0,
mentions: 0, },
}, };
})); });
}
async function fetchJobSignals(companyName: string): Promise<CompanySignal[]> {
const sixtyDaysAgo = Math.floor(Date.now() / 1000) - 60 * 86400;
const data = await fetchJson<HNAlgoliaSignalResponse>(
`https://hn.algolia.com/api/v1/search?query=${encodeURIComponent(companyName)}&tags=comment,ask_hn&hitsPerPage=10&numericFilters=created_at_i>${sixtyDaysAgo}`,
);
if (!data?.hits) return [];
const hiringComments = data.hits.filter((h) => {
const text = (h.comment_text || '').toLowerCase();
return text.includes('hiring') || text.includes('job') || text.includes('apply');
});
if (hiringComments.length === 0) return [];
const firstComment = hiringComments[0];
if (!firstComment) return [];
return [{
type: 'hiring_surge',
title: `${companyName} hiring activity (${hiringComments.length} mentions in HN hiring threads)`,
url: `https://news.ycombinator.com/item?id=${firstComment.story_id || ''}`,
source: 'HN Hiring Threads',
sourceTier: 3,
timestampMs: toEventTimeMs(firstComment.created_at),
strength: hiringComments.length >= 3 ? 'high' : 'medium',
engagement: {
points: 0,
comments: 0,
stars: 0,
forks: 0,
mentions: hiringComments.length,
}, },
}]; );
}
async function fetchGitHubSignals(orgName: string): Promise<CompanySignal[] | null> {
return cachedFetchJson<CompanySignal[]>(
`intel:signals:gh:${encodeURIComponent(orgName.toLowerCase())}:${hourBucket()}`,
3600,
async () => {
const repos = await fetchJson<GitHubSignalRepo[]>(`https://api.github.com/orgs/${encodeURIComponent(orgName)}/repos?sort=created&per_page=10`);
if (!Array.isArray(repos)) return null;
const now = Date.now();
const thirtyDaysAgo = now - 30 * 86400000;
return repos
.filter((r) => toEventTimeMs(r.created_at) > thirtyDaysAgo)
.map((r) => ({
type: 'technology_adoption',
title: `New repository: ${r.full_name || 'unknown'} - ${r.description || 'No description'}`,
url: r.html_url || '',
source: 'GitHub',
sourceTier: 2,
timestampMs: toEventTimeMs(r.created_at),
strength: (r.stargazers_count || 0) > 50 ? 'high' : (r.stargazers_count || 0) > 10 ? 'medium' : 'low',
engagement: {
points: 0,
comments: 0,
stars: r.stargazers_count || 0,
forks: r.forks_count || 0,
mentions: 0,
},
}));
},
);
}
async function fetchJobSignals(companyName: string): Promise<CompanySignal[] | null> {
return cachedFetchJson<CompanySignal[]>(
`intel:signals:jobs:${encodeURIComponent(companyName.toLowerCase())}:${hourBucket()}`,
1800,
async () => {
const sixtyDaysAgo = Math.floor(Date.now() / 1000) - 60 * 86400;
const data = await fetchJson<HNAlgoliaSignalResponse>(
`https://hn.algolia.com/api/v1/search?query=${encodeURIComponent(companyName)}&tags=comment,ask_hn&hitsPerPage=10&numericFilters=created_at_i>${sixtyDaysAgo}`,
);
if (data === null || !data.hits) return null;
const hiringComments = data.hits.filter((h) => {
const text = (h.comment_text || '').toLowerCase();
return text.includes('hiring') || text.includes('job') || text.includes('apply');
});
if (hiringComments.length === 0) return [];
const firstComment = hiringComments[0];
if (!firstComment) return [];
return [{
type: 'hiring_surge',
title: `${companyName} hiring activity (${hiringComments.length} mentions in HN hiring threads)`,
url: `https://news.ycombinator.com/item?id=${firstComment.story_id || ''}`,
source: 'HN Hiring Threads',
sourceTier: 3,
timestampMs: toEventTimeMs(firstComment.created_at),
strength: hiringComments.length >= 3 ? 'high' : 'medium',
engagement: {
points: 0,
comments: 0,
stars: 0,
forks: 0,
mentions: hiringComments.length,
},
}];
},
);
} }
export async function listCompanySignals( export async function listCompanySignals(
@@ -195,7 +218,7 @@ export async function listCompanySignals(
fetchJobSignals(company), fetchJobSignals(company),
]); ]);
const allSignals = [...hnSignals, ...githubSignals, ...jobSignals] const allSignals = [...(hnSignals ?? []), ...(githubSignals ?? []), ...(jobSignals ?? [])]
.sort((a, b) => b.timestampMs - a.timestampMs); .sort((a, b) => b.timestampMs - a.timestampMs);
const signalTypeCounts: Record<string, number> = {}; const signalTypeCounts: Record<string, number> = {};

View File

@@ -0,0 +1,250 @@
/**
* Functional tests for per-upstream cachedFetchJson in enrichment/signals handlers.
* Verifies null/[] semantics, cache key encoding, and cache hit behavior.
*/
import { describe, it, before, after } from 'node:test';
import assert from 'node:assert/strict';
// ─── Redis stub ──────────────────────────────────────────────────────────────
const NEG_SENTINEL = '__WM_NEG__';
function makeRedisStub() {
const store = new Map();
const ttls = new Map();
const setCalls = [];
return {
store,
ttls,
setCalls,
async get(key) {
return store.has(key) ? store.get(key) : null;
},
async set(key, value, ttl) {
store.set(key, value);
ttls.set(key, ttl);
setCalls.push({ key, value, ttl });
},
reset() {
store.clear();
ttls.clear();
setCalls.length = 0;
},
};
}
// ─── cachedFetchJson re-implementation for testing ───────────────────────────
// Tests the same logic as server/_shared/redis.ts cachedFetchJson
function makeCachedFetchJson(redis) {
const inflight = new Map();
return async function cachedFetchJson(key, ttlSeconds, fetcher, negativeTtlSeconds = 120) {
const cached = await redis.get(key);
if (cached === NEG_SENTINEL) return null;
if (cached !== null) return cached;
const existing = inflight.get(key);
if (existing) return existing;
const promise = fetcher()
.then(async (result) => {
if (result != null) {
await redis.set(key, result, ttlSeconds);
} else {
await redis.set(key, NEG_SENTINEL, negativeTtlSeconds);
}
return result;
})
.finally(() => {
inflight.delete(key);
});
inflight.set(key, promise);
return promise;
};
}
// ─── Tests ───────────────────────────────────────────────────────────────────
describe('cachedFetchJson — null path (fetch failure)', () => {
const redis = makeRedisStub();
const cachedFetchJson = makeCachedFetchJson(redis);
before(() => redis.reset());
it('returns null when fetcher returns null', async () => {
const result = await cachedFetchJson(
'intel:enrichment:gh-org:testco',
3600,
async () => null,
);
assert.equal(result, null);
});
it('writes NEG_SENTINEL to Redis with 120s TTL on fetch failure', () => {
assert.equal(redis.setCalls.length, 1);
const call = redis.setCalls[0];
assert.equal(call.key, 'intel:enrichment:gh-org:testco');
assert.equal(call.value, NEG_SENTINEL);
assert.equal(call.ttl, 120);
});
it('subsequent call returns null from NEG_SENTINEL (no fetcher call)', async () => {
let fetcherCalled = false;
const result = await cachedFetchJson(
'intel:enrichment:gh-org:testco',
3600,
async () => { fetcherCalled = true; return { name: 'should not get here' }; },
);
assert.equal(result, null);
assert.equal(fetcherCalled, false, 'fetcher should not be called on neg cache hit');
});
});
describe('cachedFetchJson — empty array (successful empty result)', () => {
const redis = makeRedisStub();
const cachedFetchJson = makeCachedFetchJson(redis);
before(() => redis.reset());
it('returns [] when fetcher returns []', async () => {
const result = await cachedFetchJson(
'intel:enrichment:hn:emptyco',
1800,
async () => [],
);
assert.deepEqual(result, []);
});
it('caches [] with normal TTL (not neg cache)', () => {
assert.equal(redis.setCalls.length, 1);
const call = redis.setCalls[0];
assert.equal(call.key, 'intel:enrichment:hn:emptyco');
assert.deepEqual(call.value, []);
assert.equal(call.ttl, 1800);
assert.notEqual(call.value, NEG_SENTINEL);
});
it('subsequent call returns [] from cache (no fetcher call)', async () => {
let fetcherCalled = false;
const result = await cachedFetchJson(
'intel:enrichment:hn:emptyco',
1800,
async () => { fetcherCalled = true; return ['should not appear']; },
);
assert.deepEqual(result, []);
assert.equal(fetcherCalled, false, 'fetcher should not be called on cache hit');
});
});
describe('cachedFetchJson — cache hit skips upstream fetch', () => {
const redis = makeRedisStub();
const cachedFetchJson = makeCachedFetchJson(redis);
before(() => {
redis.reset();
redis.store.set('intel:enrichment:gh-org:stripe', { name: 'Stripe', publicRepos: 42 });
});
it('returns cached data without calling fetcher', async () => {
let fetcherCalled = false;
const result = await cachedFetchJson(
'intel:enrichment:gh-org:stripe',
3600,
async () => { fetcherCalled = true; return { name: 'WRONG' }; },
);
assert.deepEqual(result, { name: 'Stripe', publicRepos: 42 });
assert.equal(fetcherCalled, false);
});
it('no Redis SET calls on cache hit', () => {
assert.equal(redis.setCalls.length, 0);
});
});
describe('cachedFetchJson — cache key encoding', () => {
const redis = makeRedisStub();
const cachedFetchJson = makeCachedFetchJson(redis);
before(() => redis.reset());
it('encodes special chars in company names', async () => {
await cachedFetchJson(
`intel:enrichment:hn:${encodeURIComponent('at&t')}`,
1800,
async () => [{ title: 'AT&T news', url: 'https://example.com', points: 10, comments: 5, createdAtMs: 0 }],
);
const key = redis.setCalls[0]?.key;
assert.ok(key?.includes('at%26t'), `Expected key to contain "at%26t", got: ${key}`);
});
it('different companies produce different keys', async () => {
await cachedFetchJson(
`intel:enrichment:hn:${encodeURIComponent('johnson %26 johnson')}`,
1800,
async () => [],
);
const keys = redis.setCalls.map((c) => c.key);
assert.equal(new Set(keys).size, keys.length, 'each company should produce a unique cache key');
});
it('lowercased names produce consistent keys', async () => {
const redis2 = makeRedisStub();
const cf2 = makeCachedFetchJson(redis2);
await cf2(`intel:enrichment:gh-org:${encodeURIComponent('stripe')}`, 3600, async () => ({ name: 'Stripe' }));
await cf2(`intel:enrichment:gh-org:${encodeURIComponent('STRIPE'.toLowerCase())}`, 3600, async () => ({ name: 'WRONG' }));
assert.equal(redis2.setCalls.length, 1, 'STRIPE and stripe should resolve to the same key');
});
});
describe('cachedFetchJson — import verification', () => {
it('get-company-enrichment.ts imports cachedFetchJson', async () => {
const { readFileSync } = await import('node:fs');
const { resolve } = await import('node:path');
const src = readFileSync(resolve('server/worldmonitor/intelligence/v1/get-company-enrichment.ts'), 'utf-8');
assert.ok(src.includes("from '../../../_shared/redis'"), 'must import from _shared/redis');
assert.ok(src.includes('cachedFetchJson'), 'must use cachedFetchJson');
assert.ok(src.includes('intel:enrichment:gh-org:'), 'must use gh-org cache key');
assert.ok(src.includes('intel:enrichment:gh-tech:'), 'must use gh-tech cache key');
assert.ok(src.includes('intel:enrichment:sec:'), 'must use sec cache key');
assert.ok(src.includes('intel:enrichment:hn:'), 'must use hn cache key');
assert.ok(
src.includes('intel:enrichment:sec:') && src.includes('getTodayISO()'),
'SEC cache key must include getTodayISO() daily bucket to track date-window changes',
);
});
it('list-company-signals.ts imports cachedFetchJson', async () => {
const { readFileSync } = await import('node:fs');
const { resolve } = await import('node:path');
const src = readFileSync(resolve('server/worldmonitor/intelligence/v1/list-company-signals.ts'), 'utf-8');
assert.ok(src.includes("from '../../../_shared/redis'"), 'must import from _shared/redis');
assert.ok(src.includes('cachedFetchJson'), 'must use cachedFetchJson');
assert.ok(src.includes('intel:signals:hn:'), 'must use signals:hn cache key');
assert.ok(src.includes('intel:signals:gh:'), 'must use signals:gh cache key');
assert.ok(src.includes('intel:signals:jobs:'), 'must use signals:jobs cache key');
assert.ok(
src.includes('hourBucket()'),
'all signal cache keys must include hourBucket() to prevent stale rolling-window results',
);
});
it('cache keys do not collide with existing bootstrap keys', async () => {
const { readFileSync } = await import('node:fs');
const { resolve } = await import('node:path');
const cacheKeysSrc = readFileSync(resolve('server/_shared/cache-keys.ts'), 'utf-8');
assert.ok(
!cacheKeysSrc.includes('intel:enrichment:'),
'intel:enrichment: prefix should not exist in bootstrap cache-keys.ts (on-demand keys)',
);
assert.ok(
!cacheKeysSrc.includes('intel:signals:'),
'intel:signals: prefix should not exist in bootstrap cache-keys.ts (on-demand keys)',
);
});
});