mirror of
https://github.com/koala73/worldmonitor.git
synced 2026-05-14 02:56:21 +02:00
* feat(conflict): wire UCDP API access token across full stack UCDP API now requires an `x-ucdp-access-token` header. Renames the stub `UC_DP_KEY` to `UCDP_ACCESS_TOKEN` (matching ACLED convention) and wires it through Rust keychain, sidecar allowlist + verification, handler fetch headers, feature toggles, and desktop settings UI. - Rename UC_DP_KEY → UCDP_ACCESS_TOKEN in type system and labels - Add ucdpConflicts feature toggle with required secret - Add UCDP_ACCESS_TOKEN to Rust SUPPORTED_SECRET_KEYS (24→25) - Add sidecar ALLOWED_ENV_KEYS entry + validation with dynamic GED version probing - Handler sends x-ucdp-access-token header when token is present - UC_DP_KEY fallback in handler for one-release migration window - Update .env.example, desktop-readiness, and docs * feat(conflict): pre-fetch UCDP events via Railway cron + Redis cache Replace the 228-line edge handler that fetched UCDP GED API on every request with a thin Redis reader. The heavy fetch logic (version discovery, paginated backward fetch, 1-year trailing window filter) now runs as a setInterval loop in the Railway relay (ais-relay.cjs) every 6 hours, writing to Redis key conflict:ucdp-events:v1. Changes: - Add UCDP seed loop to ais-relay.cjs (6h interval, 6 pages, 2K cap) - Rewrite list-ucdp-events.ts as thin Redis reader (35 lines) - Add conflict:ucdp-events:v1 to bootstrap batch keys - Protect key from cache-purge via durable data prefix - Add manual-only seed-ucdp-events workflow + standalone script - Rename panel "UCDP Events" → "Armed Conflict Events" in locale - Add 24h TTL + 25h staleness check as safety nets
110 lines
3.5 KiB
JavaScript
110 lines
3.5 KiB
JavaScript
import { getCorsHeaders, isDisallowedOrigin } from './_cors.js';
|
|
import { validateApiKey } from './_api-key.js';
|
|
|
|
export const config = { runtime: 'edge' };
|
|
|
|
const BOOTSTRAP_CACHE_KEYS = {
|
|
earthquakes: 'seismology:earthquakes:v1',
|
|
outages: 'infra:outages:v1',
|
|
serviceStatuses: 'infra:service-statuses:v1',
|
|
sectors: 'market:sectors:v1',
|
|
etfFlows: 'market:etf-flows:v1',
|
|
macroSignals: 'economic:macro-signals:v1',
|
|
bisPolicy: 'economic:bis:policy:v1',
|
|
bisExchange: 'economic:bis:eer:v1',
|
|
bisCredit: 'economic:bis:credit:v1',
|
|
shippingRates: 'supply_chain:shipping:v2',
|
|
chokepoints: 'supply_chain:chokepoints:v1',
|
|
minerals: 'supply_chain:minerals:v1',
|
|
giving: 'giving:summary:v1',
|
|
climateAnomalies: 'climate:anomalies:v1',
|
|
wildfires: 'wildfire:fires:v1',
|
|
ucdpEvents: 'conflict:ucdp-events:v1',
|
|
};
|
|
|
|
const NEG_SENTINEL = '__WM_NEG__';
|
|
|
|
async function getCachedJsonBatch(keys) {
|
|
const result = new Map();
|
|
if (keys.length === 0) return result;
|
|
|
|
const url = process.env.UPSTASH_REDIS_REST_URL;
|
|
const token = process.env.UPSTASH_REDIS_REST_TOKEN;
|
|
if (!url || !token) return result;
|
|
|
|
// Always read unprefixed keys — bootstrap is a read-only consumer of
|
|
// production cache data. Preview/branch deploys don't run handlers that
|
|
// populate prefixed keys, so prefixing would always miss.
|
|
const pipeline = keys.map((k) => ['GET', k]);
|
|
const resp = await fetch(`${url}/pipeline`, {
|
|
method: 'POST',
|
|
headers: { Authorization: `Bearer ${token}`, 'Content-Type': 'application/json' },
|
|
body: JSON.stringify(pipeline),
|
|
signal: AbortSignal.timeout(3000),
|
|
});
|
|
if (!resp.ok) return result;
|
|
|
|
const data = await resp.json();
|
|
for (let i = 0; i < keys.length; i++) {
|
|
const raw = data[i]?.result;
|
|
if (raw) {
|
|
try {
|
|
const parsed = JSON.parse(raw);
|
|
if (parsed !== NEG_SENTINEL) result.set(keys[i], parsed);
|
|
} catch { /* skip malformed */ }
|
|
}
|
|
}
|
|
return result;
|
|
}
|
|
|
|
export default async function handler(req) {
|
|
if (isDisallowedOrigin(req))
|
|
return new Response('Forbidden', { status: 403 });
|
|
|
|
const cors = getCorsHeaders(req);
|
|
if (req.method === 'OPTIONS')
|
|
return new Response(null, { status: 204, headers: cors });
|
|
|
|
const apiKeyResult = validateApiKey(req);
|
|
if (apiKeyResult.required && !apiKeyResult.valid)
|
|
return new Response(JSON.stringify({ error: apiKeyResult.error }), {
|
|
status: 401, headers: { ...cors, 'Content-Type': 'application/json' },
|
|
});
|
|
|
|
const url = new URL(req.url);
|
|
const requested = url.searchParams.get('keys')?.split(',').filter(Boolean);
|
|
const registry = requested
|
|
? Object.fromEntries(Object.entries(BOOTSTRAP_CACHE_KEYS).filter(([k]) => requested.includes(k)))
|
|
: BOOTSTRAP_CACHE_KEYS;
|
|
|
|
const keys = Object.values(registry);
|
|
const names = Object.keys(registry);
|
|
|
|
let cached;
|
|
try {
|
|
cached = await getCachedJsonBatch(keys);
|
|
} catch {
|
|
return new Response(JSON.stringify({ data: {}, missing: names }), {
|
|
status: 200,
|
|
headers: { ...cors, 'Content-Type': 'application/json', 'Cache-Control': 'no-cache' },
|
|
});
|
|
}
|
|
|
|
const data = {};
|
|
const missing = [];
|
|
for (let i = 0; i < names.length; i++) {
|
|
const val = cached.get(keys[i]);
|
|
if (val !== undefined) data[names[i]] = val;
|
|
else missing.push(names[i]);
|
|
}
|
|
|
|
return new Response(JSON.stringify({ data, missing }), {
|
|
status: 200,
|
|
headers: {
|
|
...cors,
|
|
'Content-Type': 'application/json',
|
|
'Cache-Control': 'public, s-maxage=60, stale-while-revalidate=30',
|
|
},
|
|
});
|
|
}
|