mirror of
https://github.com/koala73/worldmonitor.git
synced 2026-04-25 17:14:57 +02:00
feat(mcp): OAuth 2.0 Authorization Server for claude.ai connector (#2418)
* feat(mcp): add OAuth 2.0 Authorization Server for claude.ai connector Implements spec-compliant MCP authentication so claude.ai's remote connector (which requires OAuth Client ID + Secret, no custom headers) can authenticate. - public/.well-known/oauth-authorization-server: RFC 8414 discovery document - api/oauth/token.js: client_credentials grant, issues UUID Bearer token in Redis TTL 3600s - api/_oauth-token.js: resolveApiKeyFromBearer() looks up token in Redis - api/mcp.ts: 3-tier auth (Bearer OAuth first, then ?key=, then X-WorldMonitor-Key); switch to getPublicCorsHeaders; surface error messages in catch - vercel.json: rewrite /oauth/token, exclude oauth from SPA, CORS headers - tests: update SPA no-cache pattern Supersedes PR #2417. Usage: URL=worldmonitor.app/mcp, Client ID=worldmonitor, Client Secret=<API key> Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com> * docs: fix markdown lint in OAuth plan (blank lines around lists) * fix(oauth): address all P1+P2 code review findings for MCP OAuth endpoint - Add per-IP rate limiting (10 req/min) to /oauth/token via Upstash slidingWindow - Return HTTP 401 + WWW-Authenticate header when Bearer token is invalid/expired - Add Cache-Control: no-store + Pragma: no-cache to token response (RFC 6749 §5.1) - Simplify _oauth-token.js to delegate to readJsonFromUpstash (removes duplicated Redis boilerplate) - Remove dead code from token.js: parseBasicAuth, JSON body path, clientId/issuedAt fields - Add Content-Type: application/json header for /.well-known/oauth-authorization-server - Remove response_types_supported (only applies to authorization endpoint, not client_credentials) Closes: todos 075, 076, 077, 078, 079 🤖 Generated with claude-sonnet-4-6 via Claude Code (https://claude.ai/claude-code) + Compound Engineering v2.40.0 Co-Authored-By: claude-sonnet-4-6 (200K context) <noreply@anthropic.com> * chore(review): fresh review findings — todos 081-086, mark 075/077/078/079 complete * fix(mcp): remove ?key= URL param auth + mask internal errors - Remove ?key= query param auth path — API keys in URLs appear in Vercel/CF access logs, browser history, Referer headers. OAuth client_credentials (same PR) already covers clients that cannot set custom headers. Only two auth paths remain: Bearer OAuth and X-WorldMonitor-Key header. - Revert err.message disclosure: catch block was accidentally exposing internal service URLs/IPs via err.message. Restore original hardcoded string, add console.error for server-side visibility. Resolves: todos 081, 082 * fix(oauth): resolve all P2/P3 review findings (todos 076, 080, 083-086) - 076: no-credentials path in mcp.ts now returns HTTP 401 + WWW-Authenticate instead of rpcError (200) - 080: store key fingerprint (sha256 first 16 hex chars) in Redis, not plaintext key - 083: replace Array.includes() with timingSafeIncludes() (constant-time HMAC comparison) in token.js and mcp.ts - 084: resolveApiKeyFromBearer uses direct fetch that throws on Redis errors (500 not 401 on infra failure) - 085: token.js imports getClientIp, getPublicCorsHeaders, jsonResponse from shared helpers; removes local duplicates - 086: mcp.ts auth chain restructured to check Bearer header first, passes token string to resolveApiKeyFromBearer (eliminates double header read + unconditional await) * test(mcp): update auth test to expect HTTP 401 for missing credentials Align with todo 076 fix: no-credentials path now returns 401 + WWW-Authenticate instead of JSON-RPC 200 response. Also asserts WWW-Authenticate header presence. * chore: mark todos 076, 080, 083-086 complete * fix(mcp): harden OAuth error paths and fix rate limit cross-user collision - Wrap resolveApiKeyFromBearer() in try/catch in mcp.ts; Redis/network errors now return 503 + Retry-After: 5 instead of crashing the handler - Wrap storeToken() fetch in try/catch in oauth/token.js; network errors return false so the existing if (!stored) path returns 500 cleanly - Re-key token endpoint rate limit by sha256(clientSecret).slice(0,8) instead of IP; prevents cross-user 429s when callers share Anthropic's shared outbound IPs (Claude remote MCP connector) --------- Co-authored-by: Claude Sonnet 4.6 <noreply@anthropic.com>
This commit is contained in:
24
api/_crypto.js
Normal file
24
api/_crypto.js
Normal file
@@ -0,0 +1,24 @@
|
||||
export async function sha256Hex(str) {
|
||||
const buf = await crypto.subtle.digest('SHA-256', new TextEncoder().encode(str));
|
||||
return Array.from(new Uint8Array(buf)).map(b => b.toString(16).padStart(2, '0')).join('');
|
||||
}
|
||||
|
||||
export async function keyFingerprint(key) {
|
||||
return (await sha256Hex(key)).slice(0, 16);
|
||||
}
|
||||
|
||||
export async function timingSafeIncludes(candidate, validKeys) {
|
||||
if (!candidate || !validKeys.length) return false;
|
||||
const enc = new TextEncoder();
|
||||
const candidateHash = await crypto.subtle.digest('SHA-256', enc.encode(candidate));
|
||||
const candidateBytes = new Uint8Array(candidateHash);
|
||||
let found = false;
|
||||
for (const k of validKeys) {
|
||||
const kHash = await crypto.subtle.digest('SHA-256', enc.encode(k));
|
||||
const kBytes = new Uint8Array(kHash);
|
||||
let diff = 0;
|
||||
for (let i = 0; i < kBytes.length; i++) diff |= candidateBytes[i] ^ kBytes[i];
|
||||
if (diff === 0) found = true;
|
||||
}
|
||||
return found;
|
||||
}
|
||||
29
api/_oauth-token.js
Normal file
29
api/_oauth-token.js
Normal file
@@ -0,0 +1,29 @@
|
||||
// @ts-expect-error — JS module, no declaration file
|
||||
import { keyFingerprint } from './_crypto.js';
|
||||
|
||||
async function fetchOAuthToken(uuid) {
|
||||
const url = process.env.UPSTASH_REDIS_REST_URL;
|
||||
const token = process.env.UPSTASH_REDIS_REST_TOKEN;
|
||||
if (!url || !token) return null;
|
||||
|
||||
const resp = await fetch(`${url}/get/${encodeURIComponent(`oauth:token:${uuid}`)}`, {
|
||||
headers: { Authorization: `Bearer ${token}` },
|
||||
signal: AbortSignal.timeout(3_000),
|
||||
});
|
||||
if (!resp.ok) throw new Error(`Redis HTTP ${resp.status}`);
|
||||
|
||||
const data = await resp.json();
|
||||
if (!data.result) return null;
|
||||
try { return JSON.parse(data.result); } catch { return null; }
|
||||
}
|
||||
|
||||
export async function resolveApiKeyFromBearer(token) {
|
||||
if (!token) return null;
|
||||
const fingerprint = await fetchOAuthToken(token);
|
||||
if (typeof fingerprint !== 'string' || !fingerprint) return null;
|
||||
const validKeys = (process.env.WORLDMONITOR_VALID_KEYS || '').split(',').filter(Boolean);
|
||||
for (const k of validKeys) {
|
||||
if (await keyFingerprint(k) === fingerprint) return k;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
@@ -21,7 +21,7 @@ function getRatelimit() {
|
||||
return ratelimit;
|
||||
}
|
||||
|
||||
function getClientIp(request) {
|
||||
export function getClientIp(request) {
|
||||
// With Cloudflare proxy -> Vercel, x-real-ip is the CF edge IP (shared
|
||||
// across users). cf-connecting-ip is the actual client IP — prefer it.
|
||||
// (Matches server/_shared/rate-limit.ts)
|
||||
|
||||
56
api/mcp.ts
56
api/mcp.ts
@@ -5,9 +5,11 @@ import { getPublicCorsHeaders } from './_cors.js';
|
||||
// @ts-expect-error — JS module, no declaration file
|
||||
import { jsonResponse } from './_json-response.js';
|
||||
// @ts-expect-error — JS module, no declaration file
|
||||
import { validateApiKey } from './_api-key.js';
|
||||
// @ts-expect-error — JS module, no declaration file
|
||||
import { readJsonFromUpstash } from './_upstash-json.js';
|
||||
// @ts-expect-error — JS module, no declaration file
|
||||
import { resolveApiKeyFromBearer } from './_oauth-token.js';
|
||||
// @ts-expect-error — JS module, no declaration file
|
||||
import { timingSafeIncludes } from './_crypto.js';
|
||||
|
||||
export const config = { runtime: 'edge' };
|
||||
|
||||
@@ -424,13 +426,47 @@ export default async function handler(req: Request): Promise<Response> {
|
||||
return new Response(null, { status: 204, headers: corsHeaders });
|
||||
}
|
||||
|
||||
// Auth — always require API key (MCP clients are never same-origin browser requests)
|
||||
const auth = validateApiKey(req, { forceKey: true });
|
||||
if (!auth.valid) {
|
||||
return rpcError(null, -32001, auth.error ?? 'API key required');
|
||||
// Auth chain (in priority order):
|
||||
// 1. Authorization: Bearer <oauth_token> — issued by /oauth/token (spec-compliant OAuth 2.0)
|
||||
// 2. X-WorldMonitor-Key header — direct API key (curl, custom integrations)
|
||||
let apiKey = '';
|
||||
const authHeader = req.headers.get('Authorization') ?? '';
|
||||
if (authHeader.startsWith('Bearer ')) {
|
||||
const token = authHeader.slice(7).trim();
|
||||
let bearerApiKey: string | null;
|
||||
try {
|
||||
bearerApiKey = await resolveApiKeyFromBearer(token);
|
||||
} catch {
|
||||
// Redis/network error — return 503 so clients know to retry, not re-authenticate
|
||||
return new Response(
|
||||
JSON.stringify({ jsonrpc: '2.0', id: null, error: { code: -32603, message: 'Auth service temporarily unavailable. Try again.' } }),
|
||||
{ status: 503, headers: { 'Content-Type': 'application/json', 'Retry-After': '5', ...corsHeaders } }
|
||||
);
|
||||
}
|
||||
if (bearerApiKey) {
|
||||
apiKey = bearerApiKey;
|
||||
} else {
|
||||
// Bearer token present but unresolvable — expired or invalid UUID
|
||||
return new Response(
|
||||
JSON.stringify({ jsonrpc: '2.0', id: null, error: { code: -32001, message: 'Invalid or expired OAuth token. Re-authenticate via /oauth/token.' } }),
|
||||
{ status: 401, headers: { 'Content-Type': 'application/json', 'WWW-Authenticate': 'Bearer realm="worldmonitor", error="invalid_token"', ...corsHeaders } }
|
||||
);
|
||||
}
|
||||
} else {
|
||||
const candidateKey = req.headers.get('X-WorldMonitor-Key') ?? '';
|
||||
if (!candidateKey) {
|
||||
return new Response(
|
||||
JSON.stringify({ jsonrpc: '2.0', id: null, error: { code: -32001, message: 'Authentication required. Use OAuth (/oauth/token) or pass your API key via X-WorldMonitor-Key header.' } }),
|
||||
{ status: 401, headers: { 'Content-Type': 'application/json', 'WWW-Authenticate': 'Bearer realm="worldmonitor"', ...corsHeaders } }
|
||||
);
|
||||
}
|
||||
const validKeys = (process.env.WORLDMONITOR_VALID_KEYS || '').split(',').filter(Boolean);
|
||||
if (!await timingSafeIncludes(candidateKey, validKeys)) {
|
||||
return rpcError(null, -32001, 'Invalid API key');
|
||||
}
|
||||
apiKey = candidateKey;
|
||||
}
|
||||
|
||||
const apiKey = req.headers.get('X-WorldMonitor-Key') ?? '';
|
||||
|
||||
// Per-key rate limit
|
||||
const rl = getMcpRatelimit();
|
||||
@@ -496,9 +532,9 @@ export default async function handler(req: Request): Promise<Response> {
|
||||
return rpcOk(id, {
|
||||
content: [{ type: 'text', text: JSON.stringify(result) }],
|
||||
}, corsHeaders);
|
||||
} catch (err) {
|
||||
const msg = err instanceof Error ? err.message : 'data fetch failed';
|
||||
return rpcError(id, -32603, `Internal error: ${msg}`);
|
||||
} catch (err: unknown) {
|
||||
console.error('[mcp] tool execution error:', err);
|
||||
return rpcError(id, -32603, 'Internal error: data fetch failed');
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
121
api/oauth/token.js
Normal file
121
api/oauth/token.js
Normal file
@@ -0,0 +1,121 @@
|
||||
import { Ratelimit } from '@upstash/ratelimit';
|
||||
import { Redis } from '@upstash/redis';
|
||||
// @ts-expect-error — JS module, no declaration file
|
||||
import { getClientIp } from '../_rate-limit.js';
|
||||
// @ts-expect-error — JS module, no declaration file
|
||||
import { getPublicCorsHeaders } from '../_cors.js';
|
||||
// @ts-expect-error — JS module, no declaration file
|
||||
import { jsonResponse } from '../_json-response.js';
|
||||
// @ts-expect-error — JS module, no declaration file
|
||||
import { keyFingerprint, sha256Hex, timingSafeIncludes } from '../_crypto.js';
|
||||
|
||||
export const config = { runtime: 'edge' };
|
||||
|
||||
const TOKEN_TTL_SECONDS = 3600;
|
||||
|
||||
function jsonResp(body, status = 200, extra = {}) {
|
||||
return jsonResponse(body, status, { ...getPublicCorsHeaders('POST, OPTIONS'), ...extra });
|
||||
}
|
||||
|
||||
// Tight rate limiter for credential endpoint: 10 token requests per minute per credential
|
||||
let _rl = null;
|
||||
function getRatelimit() {
|
||||
if (_rl) return _rl;
|
||||
const url = process.env.UPSTASH_REDIS_REST_URL;
|
||||
const token = process.env.UPSTASH_REDIS_REST_TOKEN;
|
||||
if (!url || !token) return null;
|
||||
_rl = new Ratelimit({
|
||||
redis: new Redis({ url, token }),
|
||||
limiter: Ratelimit.slidingWindow(10, '60 s'),
|
||||
prefix: 'rl:oauth-token',
|
||||
analytics: false,
|
||||
});
|
||||
return _rl;
|
||||
}
|
||||
|
||||
async function validateSecret(secret) {
|
||||
if (!secret) return false;
|
||||
const validKeys = (process.env.WORLDMONITOR_VALID_KEYS || '').split(',').filter(Boolean);
|
||||
return timingSafeIncludes(secret, validKeys);
|
||||
}
|
||||
|
||||
async function storeToken(uuid, apiKey) {
|
||||
const url = process.env.UPSTASH_REDIS_REST_URL;
|
||||
const token = process.env.UPSTASH_REDIS_REST_TOKEN;
|
||||
if (!url || !token) return false;
|
||||
|
||||
try {
|
||||
const fingerprint = await keyFingerprint(apiKey);
|
||||
const resp = await fetch(`${url}/pipeline`, {
|
||||
method: 'POST',
|
||||
headers: { Authorization: `Bearer ${token}`, 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify([['SET', `oauth:token:${uuid}`, JSON.stringify(fingerprint), 'EX', TOKEN_TTL_SECONDS]]),
|
||||
signal: AbortSignal.timeout(3_000),
|
||||
});
|
||||
if (!resp.ok) return false;
|
||||
const results = await resp.json().catch(() => null);
|
||||
return Array.isArray(results) && results[0]?.result === 'OK';
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
export default async function handler(req) {
|
||||
const corsHeaders = getPublicCorsHeaders('POST, OPTIONS');
|
||||
|
||||
if (req.method === 'OPTIONS') {
|
||||
return new Response(null, { status: 204, headers: corsHeaders });
|
||||
}
|
||||
if (req.method !== 'POST') {
|
||||
return jsonResp({ error: 'method_not_allowed' }, 405);
|
||||
}
|
||||
|
||||
// Parse body first so we can key the rate limit on the credential fingerprint
|
||||
// rather than IP — Claude's shared outbound IPs would otherwise cause cross-user 429s
|
||||
const params = new URLSearchParams(await req.text().catch(() => ''));
|
||||
const grantType = params.get('grant_type');
|
||||
const clientSecret = params.get('client_secret');
|
||||
|
||||
const rl = getRatelimit();
|
||||
if (rl) {
|
||||
try {
|
||||
// Key by sha256(clientSecret).slice(0,8) when a secret is present so each
|
||||
// credential gets its own 10/min bucket regardless of shared outbound IP.
|
||||
// Fall back to IP for requests without a secret (will fail validation anyway).
|
||||
const rlKey = clientSecret
|
||||
? `cred:${(await sha256Hex(clientSecret)).slice(0, 8)}`
|
||||
: `ip:${getClientIp(req)}`;
|
||||
const { success, reset } = await rl.limit(rlKey);
|
||||
if (!success) {
|
||||
return jsonResp(
|
||||
{ error: 'rate_limit_exceeded', error_description: 'Too many token requests. Try again later.' },
|
||||
429,
|
||||
{ 'Retry-After': String(Math.ceil((reset - Date.now()) / 1000)) }
|
||||
);
|
||||
}
|
||||
} catch {
|
||||
// Upstash unavailable — allow through (graceful degradation)
|
||||
}
|
||||
}
|
||||
|
||||
if (grantType !== 'client_credentials') {
|
||||
return jsonResp({ error: 'unsupported_grant_type' }, 400);
|
||||
}
|
||||
|
||||
if (!await validateSecret(clientSecret)) {
|
||||
return jsonResp({ error: 'invalid_client', error_description: 'Invalid client credentials' }, 401);
|
||||
}
|
||||
|
||||
const uuid = crypto.randomUUID();
|
||||
const stored = await storeToken(uuid, clientSecret);
|
||||
if (!stored) {
|
||||
return jsonResp({ error: 'server_error', error_description: 'Token storage failed' }, 500);
|
||||
}
|
||||
|
||||
return jsonResp({
|
||||
access_token: uuid,
|
||||
token_type: 'Bearer',
|
||||
expires_in: TOKEN_TTL_SECONDS,
|
||||
scope: 'mcp',
|
||||
}, 200, { 'Cache-Control': 'no-store', 'Pragma': 'no-cache' });
|
||||
}
|
||||
@@ -0,0 +1,166 @@
|
||||
---
|
||||
title: "feat: MCP OAuth 2.0 Authorization Server (spec-compliant)"
|
||||
type: feat
|
||||
status: active
|
||||
date: 2026-03-28
|
||||
---
|
||||
|
||||
# feat: MCP OAuth 2.0 Authorization Server (spec-compliant)
|
||||
|
||||
## Overview
|
||||
|
||||
Claude.ai's remote MCP connector only accepts OAuth credentials (Client ID + Client Secret) — no custom headers. The current `X-WorldMonitor-Key` header approach is therefore incompatible. This plan implements a minimal, spec-compliant OAuth 2.0 Authorization Server so claude.ai (and any other MCP client) can authenticate using standard OAuth client credentials.
|
||||
|
||||
## Problem Statement
|
||||
|
||||
The MCP 2025-03-26 spec defines OAuth 2.0 as the standard auth mechanism for remote MCP servers. Claude.ai's connector UI exposes exactly three fields:
|
||||
|
||||
1. MCP Server URL
|
||||
2. Client ID
|
||||
3. Client Secret
|
||||
|
||||
There is no header field, no API key field. If the server does not expose a proper `/.well-known/oauth-authorization-server` discovery document and a `/oauth/token` endpoint, the connector silently fails to authenticate. The current `?key=` query param workaround (PR #2417) is non-standard and not discoverable by OAuth clients.
|
||||
|
||||
## Proposed Solution
|
||||
|
||||
Implement the **Client Credentials grant** (RFC 6749 §4.4) — the correct grant type for machine-to-machine API access without user interaction:
|
||||
|
||||
```
|
||||
claude.ai → GET /.well-known/oauth-authorization-server
|
||||
← { token_endpoint, grant_types_supported: ["client_credentials"] }
|
||||
|
||||
claude.ai → POST /api/oauth/token
|
||||
grant_type=client_credentials
|
||||
client_id=worldmonitor
|
||||
client_secret=<user's API key>
|
||||
← { access_token, token_type: "Bearer", expires_in: 3600 }
|
||||
|
||||
claude.ai → POST /mcp
|
||||
Authorization: Bearer <access_token>
|
||||
← MCP JSON-RPC response
|
||||
```
|
||||
|
||||
The `client_secret` **is** the existing WorldMonitor API key. No new credential system needed — OAuth is just a wrapper around the existing key validation.
|
||||
|
||||
## Technical Approach
|
||||
|
||||
### New files
|
||||
|
||||
**`public/.well-known/oauth-authorization-server`** (static JSON, served by Vercel as-is):
|
||||
```json
|
||||
{
|
||||
"issuer": "https://worldmonitor.app",
|
||||
"token_endpoint": "https://worldmonitor.app/api/oauth/token",
|
||||
"grant_types_supported": ["client_credentials"],
|
||||
"token_endpoint_auth_methods_supported": ["client_secret_post", "client_secret_basic"],
|
||||
"response_types_supported": ["token"],
|
||||
"scopes_supported": ["mcp"]
|
||||
}
|
||||
```
|
||||
|
||||
**`api/oauth/token.js`** (Vercel Edge function):
|
||||
|
||||
- Parses `grant_type`, `client_id`, `client_secret` from POST body (form-encoded or JSON)
|
||||
- Also supports HTTP Basic auth (`Authorization: Basic base64(client_id:client_secret)`)
|
||||
- Validates `client_secret` against `WORLDMONITOR_VALID_KEYS` env var (same logic as `_api-key.js`)
|
||||
- On success: generates opaque token (`crypto.randomUUID()`), stores in Upstash Redis with key `oauth:token:<uuid>` → `{ apiKey, clientId, issuedAt }`, TTL 3600s
|
||||
- Returns: `{ access_token, token_type: "Bearer", expires_in: 3600 }`
|
||||
- On failure: returns RFC 6749 error: `{ error: "invalid_client" }` with HTTP 401
|
||||
|
||||
**`api/_oauth-token.js`** (shared helper, importable by `api/mcp.ts`):
|
||||
|
||||
- `resolveApiKeyFromBearer(req)` — extracts `Authorization: Bearer <token>`, looks up `oauth:token:<token>` in Redis, returns the stored API key or null
|
||||
- Used by `mcp.ts` in its auth chain
|
||||
|
||||
### Modified files
|
||||
|
||||
**`api/mcp.ts`** — extend auth to check Bearer token before falling back to direct key:
|
||||
```
|
||||
1. Extract Bearer token from Authorization header
|
||||
2. If Bearer: resolveApiKeyFromBearer(token) → apiKey
|
||||
3. If no Bearer: existing ?key= / X-WorldMonitor-Key logic
|
||||
4. Proceed with apiKey as before
|
||||
```
|
||||
|
||||
**`vercel.json`**:
|
||||
|
||||
- Add rewrite: `{ "source": "/oauth/token", "destination": "/api/oauth/token" }` (canonical URL without `/api/` prefix, cleaner for discovery doc)
|
||||
- Add CORS headers entry for `/api/oauth/token` (allow `*`, `Content-Type, Authorization`)
|
||||
- Update discovery doc to use `/oauth/token` (no `/api/` prefix)
|
||||
|
||||
**`public/.well-known/oauth-authorization-server`** — already in excluded list in SPA regex.
|
||||
|
||||
### Auth chain in `mcp.ts` after change
|
||||
|
||||
```
|
||||
Request arrives →
|
||||
1. Bearer token present? → Redis lookup → apiKey (or 401 if not found/expired)
|
||||
2. ?key= param? → direct key validate
|
||||
3. X-WorldMonitor-Key header? → direct key validate
|
||||
4. None → 401
|
||||
```
|
||||
|
||||
## Acceptance Criteria
|
||||
|
||||
- [ ] `GET /.well-known/oauth-authorization-server` returns valid RFC 8414 JSON with `token_endpoint`
|
||||
- [ ] `POST /oauth/token` with valid `client_secret` (= any key in `WORLDMONITOR_VALID_KEYS`) returns `{ access_token, token_type: "Bearer", expires_in: 3600 }`
|
||||
- [ ] `POST /oauth/token` with invalid `client_secret` returns `{ error: "invalid_client" }` + HTTP 401
|
||||
- [ ] `POST /mcp` with `Authorization: Bearer <valid_access_token>` returns MCP tools list (not 401)
|
||||
- [ ] `POST /mcp` with `Authorization: Bearer <expired_or_unknown_token>` returns 401
|
||||
- [ ] `POST /mcp` with direct `X-WorldMonitor-Key` still works (backward compat)
|
||||
- [ ] `POST /mcp` with `?key=` query param still works (backward compat)
|
||||
- [ ] Token expires after 3600s (Redis TTL enforced)
|
||||
- [ ] CORS: token endpoint and discovery doc return `Access-Control-Allow-Origin: *`
|
||||
- [ ] Claude.ai connector connects successfully using Client ID + Client Secret fields
|
||||
|
||||
## Implementation Order
|
||||
|
||||
1. Add static `public/.well-known/oauth-authorization-server` JSON file
|
||||
2. Add `api/oauth/token.js` (token issuance)
|
||||
3. Add `api/_oauth-token.js` (Bearer resolution helper)
|
||||
4. Update `api/mcp.ts` auth chain (import + Bearer check)
|
||||
5. Update `vercel.json` (rewrite `/oauth/token` → `/api/oauth/token`, CORS headers)
|
||||
6. Test end-to-end with curl before claude.ai
|
||||
|
||||
## Curl Test Sequence
|
||||
|
||||
```bash
|
||||
# 1. Discovery
|
||||
curl https://worldmonitor.app/.well-known/oauth-authorization-server
|
||||
|
||||
# 2. Get token
|
||||
curl -X POST https://worldmonitor.app/oauth/token \
|
||||
-d "grant_type=client_credentials&client_id=worldmonitor&client_secret=YOUR_API_KEY"
|
||||
|
||||
# 3. Use token with MCP
|
||||
curl -X POST https://worldmonitor.app/mcp \
|
||||
-H "Authorization: Bearer <access_token>" \
|
||||
-H "Content-Type: application/json" \
|
||||
-d '{"jsonrpc":"2.0","id":1,"method":"initialize","params":{"protocolVersion":"2025-03-26","capabilities":{},"clientInfo":{"name":"test","version":"1.0"}}}'
|
||||
```
|
||||
|
||||
## Dependencies & Risks
|
||||
|
||||
- **Upstash Redis** — already used for rate limiting and cache; no new dep needed
|
||||
- **Token storage** — Redis keys `oauth:token:<uuid>` with 3600s TTL. Tokens are opaque UUIDs, no JWT complexity
|
||||
- **Client ID semantics** — The Client ID field in claude.ai is just a label; the real auth is `client_secret`. We accept any non-empty string for `client_id`
|
||||
- **CORS on discovery doc** — static file served from `public/`, Vercel handles it with `Cache-Control` from the static headers rules; `ACAO: *` needed — add to vercel.json headers for `/.well-known/(.*)`
|
||||
- **No PKCE needed** — Client Credentials grant is machine-to-machine, no browser redirect, no PKCE required
|
||||
- **Token refresh** — Not needed; claude.ai will re-fetch a token when it gets 401. `expires_in: 3600` is standard and claude.ai handles re-auth automatically
|
||||
|
||||
## Out of Scope
|
||||
|
||||
- Authorization Code flow (not needed for claude.ai connector)
|
||||
- Refresh tokens (not needed; client re-authenticates with client_secret)
|
||||
- Per-scope permissions (MCP server already gates on API key validity)
|
||||
- JWT tokens (opaque tokens simpler, sufficient, and avoids key management)
|
||||
- User-facing OAuth consent screens
|
||||
|
||||
## Sources & References
|
||||
|
||||
- [MCP Spec 2025-03-26 — Authorization](https://spec.modelcontextprotocol.io/specification/2025-03-26/basic/authorization/)
|
||||
- [RFC 6749 §4.4 — Client Credentials Grant](https://datatracker.ietf.org/doc/html/rfc6749#section-4.4)
|
||||
- [RFC 8414 — OAuth 2.0 Authorization Server Metadata](https://datatracker.ietf.org/doc/html/rfc8414)
|
||||
- Current MCP implementation: `api/mcp.ts`
|
||||
- API key validation: `api/_api-key.js`
|
||||
- Redis client: `api/_upstash-json.js`
|
||||
7
public/.well-known/oauth-authorization-server
Normal file
7
public/.well-known/oauth-authorization-server
Normal file
@@ -0,0 +1,7 @@
|
||||
{
|
||||
"issuer": "https://worldmonitor.app",
|
||||
"token_endpoint": "https://worldmonitor.app/oauth/token",
|
||||
"grant_types_supported": ["client_credentials"],
|
||||
"token_endpoint_auth_methods_supported": ["client_secret_post"],
|
||||
"scopes_supported": ["mcp"]
|
||||
}
|
||||
@@ -16,7 +16,7 @@ const getCacheHeaderValue = (sourcePath) => {
|
||||
|
||||
describe('deploy/cache configuration guardrails', () => {
|
||||
it('disables caching for HTML entry routes on Vercel', () => {
|
||||
const spaNoCache = getCacheHeaderValue('/((?!api|mcp|assets|blog|docs|favico|map-styles|data|textures|pro|sw\\.js|workbox-[a-f0-9]+\\.js|manifest\\.webmanifest|offline\\.html|robots\\.txt|sitemap\\.xml|llms\\.txt|llms-full\\.txt|\\.well-known|wm-widget-sandbox\\.html).*)');
|
||||
const spaNoCache = getCacheHeaderValue('/((?!api|mcp|oauth|assets|blog|docs|favico|map-styles|data|textures|pro|sw\\.js|workbox-[a-f0-9]+\\.js|manifest\\.webmanifest|offline\\.html|robots\\.txt|sitemap\\.xml|llms\\.txt|llms-full\\.txt|\\.well-known|wm-widget-sandbox\\.html).*)');
|
||||
assert.equal(spaNoCache, 'no-cache, no-store, must-revalidate');
|
||||
});
|
||||
|
||||
|
||||
@@ -50,14 +50,15 @@ describe('api/mcp.ts — PRO MCP Server', () => {
|
||||
|
||||
// --- Auth ---
|
||||
|
||||
it('returns JSON-RPC -32001 when no API key provided', async () => {
|
||||
it('returns HTTP 401 + WWW-Authenticate when no credentials provided', async () => {
|
||||
const req = new Request(BASE_URL, {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify(initBody()),
|
||||
});
|
||||
const res = await handler(req);
|
||||
assert.equal(res.status, 200);
|
||||
assert.equal(res.status, 401);
|
||||
assert.ok(res.headers.get('www-authenticate')?.includes('Bearer realm="worldmonitor"'), 'must include WWW-Authenticate header');
|
||||
const body = await res.json();
|
||||
assert.equal(body.error?.code, -32001);
|
||||
});
|
||||
|
||||
@@ -0,0 +1,101 @@
|
||||
---
|
||||
status: complete
|
||||
priority: p1
|
||||
issue_id: "075"
|
||||
tags: [code-review, security, performance, oauth, mcp]
|
||||
dependencies: []
|
||||
---
|
||||
|
||||
# Add rate limiting to /oauth/token endpoint
|
||||
|
||||
## Problem Statement
|
||||
|
||||
`api/oauth/token.js` has no rate limiting. An attacker can call this endpoint at full Vercel Edge concurrency to brute-force `client_secret` values from `WORLDMONITOR_VALID_KEYS`. The MCP endpoint has per-key rate limiting but the token issuance endpoint — the highest-value attack surface — has zero backpressure.
|
||||
|
||||
## Findings
|
||||
|
||||
- `api/oauth/token.js` — no calls to any rate limiter (line 1-122, entire file)
|
||||
- `api/mcp.ts:437-457` — has `@upstash/ratelimit` with 60/min per key; same pattern needed here
|
||||
- `api/_rate-limit.js` — existing IP-based rate limiter (600 req/60s) already in the codebase
|
||||
- `validateSecret` runs a linear `Array.includes` scan on every request with no per-IP backpressure
|
||||
- Security agent: "An attacker can call this endpoint at full Vercel Edge concurrency globally"
|
||||
- Performance agent: "At 10x request volume the token endpoint is the first bottleneck"
|
||||
- Architecture agent: "Credential stuffing vector — C2 blocking"
|
||||
|
||||
## Proposed Solutions
|
||||
|
||||
### Option 1: IP-based rate limit via `_rate-limit.js`
|
||||
|
||||
**Approach:** Import and call `checkRateLimit(req)` from `api/_rate-limit.js` at the top of the handler, before `parseBody`. Uses Upstash Redis, same as all other endpoints.
|
||||
|
||||
**Pros:**
|
||||
- One import, two lines of code
|
||||
- Consistent with existing pattern
|
||||
- Handles Cloudflare `CF-Connecting-IP` header correctly (already in `_rate-limit.js`)
|
||||
|
||||
**Cons:**
|
||||
- Generic 600/60s limit — may be too permissive for a credential endpoint
|
||||
|
||||
**Effort:** 30 minutes
|
||||
|
||||
**Risk:** Low
|
||||
|
||||
---
|
||||
|
||||
### Option 2: Tighter custom limiter (10/min per IP)
|
||||
|
||||
**Approach:** Instantiate a new `Ratelimit` instance in `oauth/token.js` with a tighter window (10 req/min per IP), keyed on `CF-Connecting-IP` or `X-Real-IP`. Pattern mirrors `getMcpRatelimit()` in `mcp.ts`.
|
||||
|
||||
**Pros:**
|
||||
- Appropriate tightness for a credential endpoint
|
||||
- Independent from the general API rate limiter
|
||||
|
||||
**Cons:**
|
||||
- ~15 additional lines of code
|
||||
- One more Redis key namespace (`rl:oauth-token:`)
|
||||
|
||||
**Effort:** 1 hour
|
||||
|
||||
**Risk:** Low
|
||||
|
||||
## Recommended Action
|
||||
|
||||
Use **Option 2**. 10 requests/min per IP is the right limit for a token endpoint. The MCP rate limiter pattern in `mcp.ts` is the exact template. Add before the `parseBody` call.
|
||||
|
||||
## Technical Details
|
||||
|
||||
**Affected files:**
|
||||
|
||||
- `api/oauth/token.js` — add rate limiter instance + check in handler
|
||||
- `api/_rate-limit.js` — reference for IP extraction pattern
|
||||
|
||||
**Related components:**
|
||||
|
||||
- `api/mcp.ts:437-457` — rate limiter pattern to mirror
|
||||
|
||||
## Resources
|
||||
|
||||
- **PR:** #2418
|
||||
- **Security finding:** C-2 (security-sentinel agent)
|
||||
- **Performance finding:** CRITICAL (performance-oracle agent)
|
||||
- **Architecture finding:** C2 (architecture-strategist agent)
|
||||
|
||||
## Acceptance Criteria
|
||||
|
||||
- [ ] Rate limiter is called before `validateSecret` in the token handler
|
||||
- [ ] IP extraction uses `CF-Connecting-IP` → `X-Real-IP` fallback (per existing pattern)
|
||||
- [ ] Returns `{ error: "rate_limit_exceeded" }` with HTTP 429 on limit breach
|
||||
- [ ] Token issuance still works on first request with valid credentials
|
||||
- [ ] Tests pass (node --test tests/deploy-config.test.mjs)
|
||||
|
||||
## Work Log
|
||||
|
||||
### 2026-03-28 — Code Review Discovery
|
||||
|
||||
**By:** Claude Code (compound-engineering:ce-review)
|
||||
|
||||
**Actions:**
|
||||
|
||||
- Security-sentinel, performance-oracle, and architecture-strategist all independently flagged this as blocking
|
||||
- Confirmed no rate limiter exists in `api/oauth/token.js`
|
||||
- Identified `getMcpRatelimit()` in `api/mcp.ts` as the exact pattern to follow
|
||||
124
todos/076-complete-p1-mcp-auth-failure-must-return-http-401.md
Normal file
124
todos/076-complete-p1-mcp-auth-failure-must-return-http-401.md
Normal file
@@ -0,0 +1,124 @@
|
||||
---
|
||||
status: complete
|
||||
priority: p1
|
||||
issue_id: "076"
|
||||
tags: [code-review, security, oauth, mcp, agent-native]
|
||||
dependencies: []
|
||||
---
|
||||
|
||||
# MCP auth failures must return HTTP 401 not HTTP 200
|
||||
|
||||
## Problem Statement
|
||||
|
||||
`rpcError` in `api/mcp.ts` always returns HTTP 200, including for authentication errors (`-32001`). RFC 6750 requires HTTP 401 + `WWW-Authenticate: Bearer` header on auth failures. Claude.ai's OAuth connector watches for HTTP 401 to trigger token re-authentication. With HTTP 200, an expired token causes a silent tool failure that agents cannot self-heal from.
|
||||
|
||||
## Findings
|
||||
|
||||
- `api/mcp.ts:379` — `rpcError` hardcodes `200` as HTTP status for all JSON-RPC errors
|
||||
- `api/mcp.ts:439,443` — auth failure returns `-32001` via `rpcError` → HTTP 200
|
||||
- Agent-native reviewer: "Must fix — HTTP 200 on auth errors blocks automatic re-auth loop"
|
||||
- RFC 6750 §3.1: server MUST return HTTP 401 + `WWW-Authenticate: Bearer realm=..., error=...` on token errors
|
||||
- claude.ai connector specifically monitors HTTP 401 to re-fetch an OAuth token
|
||||
- A client sending only `Authorization: Bearer <expired>` gets 200 + JSON error — indistinguishable from a tool result to non-parsing callers
|
||||
|
||||
## Proposed Solutions
|
||||
|
||||
### Option 1: Special-case -32001 in rpcError
|
||||
|
||||
**Approach:** Add an optional `httpStatus` parameter to `rpcError`, default 200. Callers that pass auth errors explicitly set 401. In the handler, when `rpcError(null, -32001, ...)` is called via the auth chain, construct the response manually with 401 + `WWW-Authenticate` header.
|
||||
|
||||
**Pros:**
|
||||
- Minimal change — only auth errors get 401
|
||||
- Other JSON-RPC errors stay HTTP 200 (correct per JSON-RPC spec)
|
||||
- Clean separation
|
||||
|
||||
**Cons:**
|
||||
- Two call sites for auth errors (lines 439 + 443)
|
||||
|
||||
**Effort:** 30 minutes
|
||||
|
||||
**Risk:** Low
|
||||
|
||||
---
|
||||
|
||||
### Option 2: Detect Bearer presence and return proper 401
|
||||
|
||||
**Approach:** After Bearer lookup fails (returns null), if a `Bearer` header was present, return a proper HTTP 401 response immediately (not via `rpcError`). This distinguishes "token expired/invalid" from "no credentials at all".
|
||||
|
||||
```typescript
|
||||
const bearerHeader = req.headers.get('Authorization');
|
||||
const bearerApiKey = await resolveApiKeyFromBearer(req);
|
||||
if (bearerHeader?.startsWith('Bearer ') && !bearerApiKey) {
|
||||
return new Response(
|
||||
JSON.stringify({ jsonrpc: '2.0', id: null, error: { code: -32001, message: 'Invalid or expired token' } }),
|
||||
{ status: 401, headers: { 'WWW-Authenticate': 'Bearer realm="worldmonitor", error="invalid_token"', ...corsHeaders } }
|
||||
);
|
||||
}
|
||||
```
|
||||
|
||||
**Pros:**
|
||||
- Correctly distinguishes expired token from missing credentials
|
||||
- `WWW-Authenticate` header tells clients exactly what to do
|
||||
- claude.ai re-auth loop fires on the right condition
|
||||
|
||||
**Cons:**
|
||||
- Slightly more code in the auth chain
|
||||
|
||||
**Effort:** 1 hour
|
||||
|
||||
**Risk:** Low
|
||||
|
||||
## Recommended Action
|
||||
|
||||
Use **Option 2**. Distinguishing "Bearer present but invalid" from "no auth" is important for agent self-healing. The `WWW-Authenticate` header is RFC 6750 mandatory and claude.ai uses it to trigger re-auth.
|
||||
|
||||
## Technical Details
|
||||
|
||||
**Affected files:**
|
||||
|
||||
- `api/mcp.ts:429-446` — auth chain section
|
||||
|
||||
**RFC references:**
|
||||
|
||||
- RFC 6750 §3.1 — The use of Bearer tokens: `WWW-Authenticate: Bearer realm="..."` required
|
||||
- RFC 6750 §3.1 — `error="invalid_token"` for expired/revoked tokens, `error="invalid_request"` for malformed header
|
||||
|
||||
## Resources
|
||||
|
||||
- **PR:** #2418
|
||||
- **Agent-native finding:** CRITICAL (agent-native-reviewer)
|
||||
- **Security finding:** related to H-4 (RFC compliance)
|
||||
|
||||
## Acceptance Criteria
|
||||
|
||||
- [ ] `POST /mcp` with expired/unknown Bearer token returns HTTP 401 (not 200)
|
||||
- [ ] HTTP 401 response includes `WWW-Authenticate: Bearer realm="worldmonitor", error="invalid_token"`
|
||||
- [ ] `POST /mcp` with no credentials returns HTTP 200 with JSON-RPC `-32001` error (existing behavior for non-OAuth clients)
|
||||
- [ ] `POST /mcp` with valid Bearer token works normally
|
||||
- [ ] curl test confirms: `curl -si -X POST /mcp -H "Authorization: Bearer invalid" | head -5` shows `HTTP/1.1 401`
|
||||
|
||||
## Work Log
|
||||
|
||||
### 2026-03-28 — Code Review Discovery
|
||||
|
||||
**By:** Claude Code (compound-engineering:ce-review)
|
||||
|
||||
**Actions:**
|
||||
|
||||
- Agent-native reviewer flagged as CRITICAL for agent re-auth loop
|
||||
- Security sentinel independently flagged RFC 6750 non-compliance
|
||||
- Identified `rpcError` always returns 200 as the root cause
|
||||
|
||||
### 2026-03-28 — Partial Fix Applied (commit a2cf0df3b)
|
||||
|
||||
**Bearer-present-but-invalid path now returns 401:**
|
||||
|
||||
```typescript
|
||||
} else if (bearerHeader.startsWith('Bearer ')) {
|
||||
return new Response(
|
||||
JSON.stringify({ jsonrpc: '2.0', id: null, error: { code: -32001, message: '...' } }),
|
||||
{ status: 401, headers: { 'WWW-Authenticate': 'Bearer realm="worldmonitor", error="invalid_token"', ...corsHeaders } }
|
||||
);
|
||||
```
|
||||
|
||||
**Still pending:** The "no credentials at all" path and "invalid direct key" path still return HTTP 200 via `rpcError`. For claude.ai OAuth clients specifically this is acceptable (they always send a Bearer header), but it is a RFC 6750 non-compliance for any client that calls the endpoint without any auth. Full fix requires either: (a) special-casing -32001 in `rpcError` to return 401, or (b) manually constructing the 401 response for the "no candidateKey" and "invalid candidateKey" branches.
|
||||
@@ -0,0 +1,77 @@
|
||||
---
|
||||
status: complete
|
||||
priority: p1
|
||||
issue_id: "077"
|
||||
tags: [code-review, security, oauth, rfc-compliance]
|
||||
dependencies: []
|
||||
---
|
||||
|
||||
# Token response missing Cache-Control: no-store (RFC 6749 §5.1)
|
||||
|
||||
## Problem Statement
|
||||
|
||||
RFC 6749 §5.1 explicitly requires: "The authorization server MUST include the HTTP `Cache-Control` response header field with a value of `no-store` in any response containing tokens, credentials, or other sensitive information." The token endpoint in `api/oauth/token.js` omits this header. A CDN, proxy, or browser cache could store a token response and serve it to a different requester.
|
||||
|
||||
## Findings
|
||||
|
||||
- `api/oauth/token.js:5-10` — `jsonResp` helper adds only `Content-Type` and CORS headers
|
||||
- No `Cache-Control: no-store` or `Pragma: no-cache` on the success response (line 116)
|
||||
- `vercel.json` has no explicit cache rule for `/oauth/(.*)` responses
|
||||
- Security agent: H-2 — "Required by RFC 6749. CF or Vercel CDN could cache a 200 token response"
|
||||
- This is a one-line fix with zero risk
|
||||
|
||||
## Proposed Solutions
|
||||
|
||||
### Option 1: Add headers to success response only
|
||||
|
||||
**Approach:** Add `'Cache-Control': 'no-store', 'Pragma': 'no-cache'` to the `jsonResp()` call at line 116 (success path only). Error responses are fine without it.
|
||||
|
||||
```js
|
||||
return jsonResp({
|
||||
access_token: uuid,
|
||||
token_type: 'Bearer',
|
||||
expires_in: TOKEN_TTL_SECONDS,
|
||||
scope: 'mcp',
|
||||
}, 200, {
|
||||
'Cache-Control': 'no-store',
|
||||
'Pragma': 'no-cache',
|
||||
});
|
||||
```
|
||||
|
||||
**Effort:** 5 minutes
|
||||
|
||||
**Risk:** None
|
||||
|
||||
## Recommended Action
|
||||
|
||||
Option 1 — add the two headers to the success `jsonResp` call. One-line fix.
|
||||
|
||||
## Technical Details
|
||||
|
||||
**Affected files:**
|
||||
|
||||
- `api/oauth/token.js:116-121`
|
||||
|
||||
## Resources
|
||||
|
||||
- **PR:** #2418
|
||||
- **RFC:** RFC 6749 §5.1 — Successful Response
|
||||
- **Security finding:** H-2 (security-sentinel)
|
||||
|
||||
## Acceptance Criteria
|
||||
|
||||
- [ ] `POST /oauth/token` success response includes `cache-control: no-store`
|
||||
- [ ] `POST /oauth/token` success response includes `pragma: no-cache`
|
||||
- [ ] Error responses (400, 401, 405, 500) are unaffected
|
||||
|
||||
## Work Log
|
||||
|
||||
### 2026-03-28 — Code Review Discovery
|
||||
|
||||
**By:** Claude Code (compound-engineering:ce-review)
|
||||
|
||||
**Actions:**
|
||||
|
||||
- Security sentinel flagged RFC 6749 §5.1 non-compliance
|
||||
- Confirmed absence of `Cache-Control: no-store` in token response
|
||||
- Confirmed one-line fix
|
||||
@@ -0,0 +1,120 @@
|
||||
---
|
||||
status: complete
|
||||
priority: p2
|
||||
issue_id: "078"
|
||||
tags: [code-review, quality, oauth, simplification]
|
||||
dependencies: []
|
||||
---
|
||||
|
||||
# OAuth token files: remove dead code and simplify (parseBasicAuth, JSON body, clientId/issuedAt)
|
||||
|
||||
## Problem Statement
|
||||
|
||||
`api/oauth/token.js` and `api/_oauth-token.js` contain ~60% dead or over-engineered code relative to their actual use case (claude.ai `client_credentials` with form-encoded body). The code-simplicity reviewer identified three YAGNI violations that add maintenance surface without any real-world value.
|
||||
|
||||
## Findings
|
||||
|
||||
- `api/oauth/token.js:43-54` — `parseBasicAuth` (12 lines) is never used by any known client. Claude.ai uses `client_secret_post` form encoding only.
|
||||
- `api/oauth/token.js:56-76` — `parseBody` JSON branch (20 lines). No OAuth 2.0 client sends `application/json` to a token endpoint (spec uses form-encoded). Zero real-world callers.
|
||||
- `api/oauth/token.js:33` — `clientId` and `issuedAt` stored in Redis but never read back by `_oauth-token.js`. Pure dead payload in Redis.
|
||||
- `api/_oauth-token.js:6-26` — 28 lines duplicating `_upstash-json.js` Redis boilerplate. Can be replaced with:
|
||||
|
||||
```js
|
||||
import { readJsonFromUpstash } from './_upstash-json.js';
|
||||
// ...
|
||||
const entry = await readJsonFromUpstash(`oauth:token:${token}`);
|
||||
return entry?.apiKey ?? null;
|
||||
```
|
||||
|
||||
- Discovery doc advertises only `client_secret_post` and `client_secret_basic` — but `client_secret_basic` is only supported by the dead `parseBasicAuth` code path
|
||||
- Simplicity reviewer: estimated ~57 LOC reduction (80 → 23 across both files)
|
||||
|
||||
## Proposed Solutions
|
||||
|
||||
### Option 1: Remove all dead code in one pass
|
||||
|
||||
**Approach:**
|
||||
|
||||
1. Delete `parseBasicAuth` function and the Basic-auth branch in `handler`
|
||||
2. Replace `parseBody` with 3 inline lines: `const params = new URLSearchParams(await req.text()); const grantType = params.get('grant_type'); const clientSecret = params.get('client_secret');`
|
||||
3. Remove `clientId` and `issuedAt` from `storeToken` — store only the `apiKey` string as the Redis value (eliminates `JSON.stringify`/`JSON.parse` round-trip)
|
||||
4. Rewrite `_oauth-token.js` to delegate to `readJsonFromUpstash`:
|
||||
```js
|
||||
import { readJsonFromUpstash } from './_upstash-json.js';
|
||||
export async function resolveApiKeyFromBearer(req) {
|
||||
const hdr = req.headers.get('Authorization') || '';
|
||||
if (!hdr.startsWith('Bearer ')) return null;
|
||||
const token = hdr.slice(7).trim();
|
||||
if (!token) return null;
|
||||
const apiKey = await readJsonFromUpstash(`oauth:token:${token}`);
|
||||
return typeof apiKey === 'string' && apiKey ? apiKey : null;
|
||||
}
|
||||
```
|
||||
5. Update discovery doc to remove `client_secret_basic` from `token_endpoint_auth_methods_supported` since Basic auth is no longer supported
|
||||
|
||||
**Pros:**
|
||||
- ~57 LOC reduction
|
||||
- `_oauth-token.js` now reuses existing tested code instead of duplicating Redis boilerplate
|
||||
- Redis stores `apiKey` as plain string (faster GET, no JSON parse)
|
||||
- Stored value validates: `typeof apiKey === 'string' && apiKey` guards against corrupted entries
|
||||
|
||||
**Cons:**
|
||||
- Removes `client_secret_basic` support — fine since no client uses it
|
||||
- Need to verify `readJsonFromUpstash` can return a plain string (not just an object)
|
||||
|
||||
**Effort:** 2-3 hours
|
||||
|
||||
**Risk:** Low (removing unused code paths)
|
||||
|
||||
---
|
||||
|
||||
### Option 2: Keep Basic auth, only simplify storage
|
||||
|
||||
**Approach:** Keep `parseBasicAuth` and JSON body branch, but simplify Redis storage (plain string) and rewrite `_oauth-token.js` to use `readJsonFromUpstash`.
|
||||
|
||||
**Pros:** More spec coverage
|
||||
|
||||
**Cons:** Keeps ~35 lines of dead code
|
||||
|
||||
**Effort:** 1 hour
|
||||
|
||||
**Risk:** Low
|
||||
|
||||
## Recommended Action
|
||||
|
||||
Option 1. The dead code provides no value and creates maintenance burden. The simplification of `_oauth-token.js` is particularly important — it currently duplicates the Redis boilerplate that `_upstash-json.js` already handles, including the `encodeURIComponent` question (which goes away since `readJsonFromUpstash` handles it).
|
||||
|
||||
## Technical Details
|
||||
|
||||
**Affected files:**
|
||||
|
||||
- `api/oauth/token.js` — remove parseBasicAuth, simplify parseBody, simplify storage
|
||||
- `api/_oauth-token.js` — rewrite to delegate to `readJsonFromUpstash`
|
||||
- `api/_upstash-json.js` — verify it handles plain string values (currently used for JSON objects)
|
||||
- `public/.well-known/oauth-authorization-server` — remove `client_secret_basic` from auth methods if Basic is removed
|
||||
|
||||
## Resources
|
||||
|
||||
- **PR:** #2418
|
||||
- **Simplicity finding:** code-simplicity-reviewer (high confidence)
|
||||
- **TS finding:** item #5 (encodeURIComponent on Redis key fixed by using readJsonFromUpstash)
|
||||
|
||||
## Acceptance Criteria
|
||||
|
||||
- [ ] `api/_oauth-token.js` delegates to `readJsonFromUpstash` (no duplicate Redis boilerplate)
|
||||
- [ ] `resolveApiKeyFromBearer` validates returned value is a non-empty string before returning
|
||||
- [ ] Redis stores `apiKey` as plain string (verify with `redis-cli GET oauth:token:<uuid>`)
|
||||
- [ ] Token issuance and Bearer resolution end-to-end still work
|
||||
- [ ] Tests pass
|
||||
|
||||
## Work Log
|
||||
|
||||
### 2026-03-28 — Code Review Discovery
|
||||
|
||||
**By:** Claude Code (compound-engineering:ce-review)
|
||||
|
||||
**Actions:**
|
||||
|
||||
- Code-simplicity-reviewer identified 3 YAGNI violations and estimated 57 LOC reduction
|
||||
- TS reviewer independently noted `encodeURIComponent` risk (fixed by this simplification)
|
||||
- Architecture reviewer noted clientId/issuedAt are dead payload
|
||||
@@ -0,0 +1,103 @@
|
||||
---
|
||||
status: complete
|
||||
priority: p2
|
||||
issue_id: "079"
|
||||
tags: [code-review, oauth, rfc-compliance, vercel]
|
||||
dependencies: []
|
||||
---
|
||||
|
||||
# Discovery doc: fix Content-Type header and remove wrong response_types_supported
|
||||
|
||||
## Problem Statement
|
||||
|
||||
Two issues with `public/.well-known/oauth-authorization-server`:
|
||||
|
||||
1. The file has no extension, so Vercel likely serves it as `application/octet-stream` or `text/plain`. RFC 8414 requires `Content-Type: application/json`.
|
||||
2. `response_types_supported: ["token"]` is the wrong field — it refers to authorization endpoint response types (implicit flow), which doesn't exist here. For `client_credentials`-only servers, this field should be omitted or empty.
|
||||
|
||||
## Findings
|
||||
|
||||
- `public/.well-known/oauth-authorization-server` — no `.json` extension, no explicit Content-Type set in vercel.json
|
||||
- Security agent M-3: "Vercel will likely serve as `application/octet-stream`. RFC 8414 requires `application/json`. Some MCP clients may reject it."
|
||||
- Architecture agent M1: "`response_types_supported: ['token']` is implicit flow nomenclature. RFC 8414 §2 specifies this refers to authorization endpoint response types. Since there's no authorization endpoint, this field should be `[]` or omitted."
|
||||
- The `vercel.json` `.well-known/(.*)` header rule adds CORS but not `Content-Type`
|
||||
|
||||
## Proposed Solutions
|
||||
|
||||
### Option 1: Add Content-Type header in vercel.json + fix response_types field
|
||||
|
||||
**Approach:**
|
||||
|
||||
1. Add `Content-Type: application/json` to the `/.well-known/oauth-authorization-server` header rule in `vercel.json` (or add a specific rule for just this file)
|
||||
2. Remove `response_types_supported` from the discovery doc (or set to `[]`)
|
||||
|
||||
```json
|
||||
{
|
||||
"source": "/.well-known/oauth-authorization-server",
|
||||
"headers": [
|
||||
{ "key": "Content-Type", "value": "application/json" },
|
||||
{ "key": "Access-Control-Allow-Origin", "value": "*" },
|
||||
{ "key": "Cache-Control", "value": "public, max-age=3600" }
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
**Pros:**
|
||||
- Fixes both issues in two files
|
||||
- No rename needed (avoids rewrite rule complexity)
|
||||
- Explicit rule is clearer than relying on file extension inference
|
||||
|
||||
**Effort:** 15 minutes
|
||||
|
||||
**Risk:** None
|
||||
|
||||
---
|
||||
|
||||
### Option 2: Rename file to .json + rewrite rule
|
||||
|
||||
**Approach:** Rename to `oauth-authorization-server.json`, add vercel.json rewrite from `/.well-known/oauth-authorization-server` to `/well-known/oauth-authorization-server.json`.
|
||||
|
||||
**Pros:** File extension carries semantic meaning
|
||||
|
||||
**Cons:** Adds a rewrite rule, slightly more complex
|
||||
|
||||
**Effort:** 20 minutes
|
||||
|
||||
**Risk:** Low
|
||||
|
||||
## Recommended Action
|
||||
|
||||
Option 1 — explicit `Content-Type` header in `vercel.json` is simpler and doesn't require a rewrite rule. Also remove `response_types_supported` from the discovery doc.
|
||||
|
||||
## Technical Details
|
||||
|
||||
**Affected files:**
|
||||
|
||||
- `vercel.json` — add specific header rule for `/.well-known/oauth-authorization-server`
|
||||
- `public/.well-known/oauth-authorization-server` — remove `response_types_supported` field
|
||||
|
||||
## Resources
|
||||
|
||||
- **PR:** #2418
|
||||
- **Security finding:** M-3 (security-sentinel)
|
||||
- **Architecture finding:** M1 (architecture-strategist)
|
||||
- **RFC 8414:** §2 — Authorization Server Metadata
|
||||
|
||||
## Acceptance Criteria
|
||||
|
||||
- [ ] `curl -I https://worldmonitor.app/.well-known/oauth-authorization-server` returns `content-type: application/json`
|
||||
- [ ] Discovery doc no longer contains `response_types_supported` field (or it is `[]`)
|
||||
- [ ] Discovery doc still contains all required fields: `issuer`, `token_endpoint`, `grant_types_supported`
|
||||
- [ ] Tests pass (deploy-config tests)
|
||||
|
||||
## Work Log
|
||||
|
||||
### 2026-03-28 — Code Review Discovery
|
||||
|
||||
**By:** Claude Code (compound-engineering:ce-review)
|
||||
|
||||
**Actions:**
|
||||
|
||||
- Security sentinel flagged missing Content-Type (M-3)
|
||||
- Architecture strategist flagged wrong response_types field (M1)
|
||||
- Both are non-breaking fixes requiring minimal changes
|
||||
@@ -0,0 +1,113 @@
|
||||
---
|
||||
status: complete
|
||||
priority: p2
|
||||
issue_id: "080"
|
||||
tags: [code-review, security, oauth, redis]
|
||||
dependencies: [078]
|
||||
---
|
||||
|
||||
# Store API key hash (not plaintext) in Redis OAuth token entries
|
||||
|
||||
## Problem Statement
|
||||
|
||||
`api/oauth/token.js` stores the raw `client_secret` (= the actual WorldMonitor API key) verbatim in Redis under `oauth:token:<uuid>`. Anyone who gains read access to Upstash (misconfigured token, support incident, future ACL issue) gets all live API keys in plaintext. Redis is now a second authoritative secret store alongside `WORLDMONITOR_VALID_KEYS`.
|
||||
|
||||
## Findings
|
||||
|
||||
- `api/oauth/token.js:33` — `value = JSON.stringify({ apiKey, ... })` stores raw API key
|
||||
- `api/_oauth-token.js:24` — `return entry?.apiKey ?? null` returns the raw key directly
|
||||
- `api/mcp.ts:431` — uses `bearerApiKey` directly for rate limiting and downstream calls
|
||||
- Security agent C-1: "Anyone who dumps or scans Redis gets all live API keys in plaintext"
|
||||
- After todo #078 simplification: Redis stores plain string `apiKey` — still plaintext
|
||||
|
||||
## Proposed Solutions
|
||||
|
||||
### Option 1: Store SHA-256 hash of key, re-validate on lookup
|
||||
|
||||
**Approach:**
|
||||
|
||||
On token issuance: `redis.set('oauth:token:<uuid>', sha256(apiKey))`
|
||||
|
||||
On Bearer resolution: `resolveApiKeyFromBearer` returns the hash. Then `mcp.ts` compares `sha256(candidate)` against stored value, and validates against `WORLDMONITOR_VALID_KEYS` using `Array.includes` (or constant-time comparison).
|
||||
|
||||
Actually — the simpler version: store `sha256(apiKey)` in Redis. On lookup, return the hash. In `mcp.ts`, compare `sha256(candidateKey)` against the hash for all valid keys in `WORLDMONITOR_VALID_KEYS`. If any match, the key is valid.
|
||||
|
||||
**Pros:**
|
||||
- Redis compromise exposes hashes, not live API keys
|
||||
- Defense-in-depth
|
||||
|
||||
**Cons:**
|
||||
- Adds `crypto.subtle.digest` calls (available in Edge runtime)
|
||||
- Slightly more complex lookup: hash comparison instead of direct string match
|
||||
- Breaking change to stored token format (need migration or versioned format)
|
||||
|
||||
**Effort:** 2-3 hours
|
||||
|
||||
**Risk:** Medium (changing auth critical path)
|
||||
|
||||
---
|
||||
|
||||
### Option 2: Store a stable key ID (non-reversible label)
|
||||
|
||||
**Approach:** Generate a deterministic short ID from each key (e.g., first 8 chars of SHA-256 hex). Store only this as the token value. On lookup, compute the same ID for each candidate in `WORLDMONITOR_VALID_KEYS` and find the matching one.
|
||||
|
||||
**Pros:**
|
||||
- Simpler than storing full hash
|
||||
- Redis only exposes a partial fingerprint
|
||||
|
||||
**Cons:**
|
||||
- Still requires iterating `WORLDMONITOR_VALID_KEYS` on every Bearer lookup
|
||||
|
||||
**Effort:** 2 hours
|
||||
|
||||
**Risk:** Medium
|
||||
|
||||
---
|
||||
|
||||
### Option 3: Accept the current design (defer)
|
||||
|
||||
**Approach:** Keep raw key storage but document the trust boundary. Rotate API keys immediately on any Upstash access incident.
|
||||
|
||||
**Pros:** No code change
|
||||
**Cons:** Violates least-privilege principle; Redis breach = all active sessions compromised
|
||||
|
||||
**Effort:** 0
|
||||
|
||||
**Risk:** High (latent)
|
||||
|
||||
## Recommended Action
|
||||
|
||||
Implement Option 2 (key ID/fingerprint). The lookup path is simpler and Redis compromise exposes only partial fingerprints. Block on todo #078 (simplification) since that changes the storage format.
|
||||
|
||||
## Technical Details
|
||||
|
||||
**Affected files:**
|
||||
|
||||
- `api/oauth/token.js` — change stored value from raw key to key fingerprint
|
||||
- `api/_oauth-token.js` — return fingerprint; update lookup logic in `mcp.ts`
|
||||
- `api/mcp.ts` — match fingerprint against computed fingerprints of valid keys
|
||||
|
||||
## Resources
|
||||
|
||||
- **PR:** #2418
|
||||
- **Security finding:** C-1 (security-sentinel agent)
|
||||
- **Architecture note:** Architecture-strategist confirmed no-risk for current design but flagged Redis trust boundary
|
||||
|
||||
## Acceptance Criteria
|
||||
|
||||
- [ ] Redis `oauth:token:<uuid>` value does not contain raw API key
|
||||
- [ ] Bearer token resolution still correctly identifies the originating API key
|
||||
- [ ] Upstash dump of `oauth:token:*` keys reveals no plaintext API keys
|
||||
- [ ] Full auth flow still works end-to-end
|
||||
|
||||
## Work Log
|
||||
|
||||
### 2026-03-28 — Code Review Discovery
|
||||
|
||||
**By:** Claude Code (compound-engineering:ce-review)
|
||||
|
||||
**Actions:**
|
||||
|
||||
- Security sentinel flagged as CRITICAL (C-1)
|
||||
- Architecture strategist confirmed acceptable design for now but noted trust boundary gap
|
||||
- Marked P2 (not P1) because: current API keys have short TTLs in practice, Upstash access is tightly controlled, and todo #078 must land first to establish the storage format
|
||||
93
todos/081-complete-p1-api-key-exposed-in-url-query-param.md
Normal file
93
todos/081-complete-p1-api-key-exposed-in-url-query-param.md
Normal file
@@ -0,0 +1,93 @@
|
||||
---
|
||||
status: complete
|
||||
priority: p1
|
||||
issue_id: "081"
|
||||
tags: [code-review, security, oauth, mcp]
|
||||
dependencies: []
|
||||
---
|
||||
|
||||
# Remove `?key=` URL query param auth path — API key exposed in access logs
|
||||
|
||||
## Problem Statement
|
||||
|
||||
PR #2418 added `?key=` as a third auth fallback in `api/mcp.ts` for "clients that cannot set headers." This is a security regression: API keys in URL query parameters are logged verbatim by Vercel access logs, Cloudflare access logs, browser history, proxy logs, and Referer headers. Unlike HTTP headers, URL params cannot be stripped at the transport layer. The OAuth `client_credentials` flow this same PR introduces already solves the "no headers" use case, making `?key=` have no remaining justified use case.
|
||||
|
||||
## Findings
|
||||
|
||||
- `api/mcp.ts:442` — `const urlKey = new URL(req.url).searchParams.get('key') ?? '';` — new path added in this PR
|
||||
- Security sentinel: "This is a new attack surface introduced specifically by this PR. The original `mcp.ts` used header-only auth."
|
||||
- Architecture strategist: "The correct solution for clients that cannot set headers is the OAuth flow this PR already provides."
|
||||
- Pre-PR auth used `validateApiKey(req, { forceKey: true })` which was header-only
|
||||
- Any MCP request to `/mcp?key=wm_live_xxxxx` permanently records the API key in Vercel + CF logs
|
||||
- Tool request URLs show up in browser devtools and Referer headers on redirect
|
||||
|
||||
## Proposed Solutions
|
||||
|
||||
### Option 1: Remove the `?key=` path (recommended)
|
||||
|
||||
Delete the `urlKey` line and its usage. Direct clients that cannot set custom headers to use the OAuth flow (`POST /oauth/token` → `Authorization: Bearer`).
|
||||
|
||||
```typescript
|
||||
// Remove this:
|
||||
const urlKey = new URL(req.url).searchParams.get('key') ?? '';
|
||||
const headerKey = req.headers.get('X-WorldMonitor-Key') ?? '';
|
||||
const candidateKey = urlKey || headerKey;
|
||||
|
||||
// Replace with:
|
||||
const candidateKey = req.headers.get('X-WorldMonitor-Key') ?? '';
|
||||
```
|
||||
|
||||
**Pros:** Eliminates credential-in-URL leakage. OAuth already handles the "no custom headers" use case.
|
||||
**Cons:** Any existing client using `?key=` URL param breaks. (No known clients per PR description.)
|
||||
**Effort:** Small (2 lines)
|
||||
**Risk:** Low — no documented clients depend on `?key=` per PR description.
|
||||
|
||||
---
|
||||
|
||||
### Option 2: Keep `?key=` but gate behind env flag
|
||||
|
||||
Add `WORLDMONITOR_ALLOW_KEY_QUERY_PARAM=true` env var; only enable the `?key=` path if explicitly opted in.
|
||||
|
||||
**Pros:** Backward compat if any undocumented client uses it.
|
||||
**Cons:** Still allows the security risk to exist in production; adds env config complexity.
|
||||
**Effort:** Small
|
||||
**Risk:** Low
|
||||
|
||||
---
|
||||
|
||||
### Option 3: Log a deprecation warning but keep the path
|
||||
|
||||
Return the response but add a header like `Warning: 299 - "API key in URL is deprecated; use Authorization: Bearer"`.
|
||||
|
||||
**Pros:** Non-breaking, signals deprecation.
|
||||
**Cons:** Does not fix the log-exposure problem.
|
||||
**Effort:** Small
|
||||
**Risk:** Low (but doesn't fix the actual issue)
|
||||
|
||||
## Recommended Action
|
||||
|
||||
Option 1: Remove immediately. OAuth covers the stated use case. No documented clients depend on `?key=`.
|
||||
|
||||
## Technical Details
|
||||
|
||||
**Affected files:**
|
||||
- `api/mcp.ts:440-451` — remove `urlKey` and change `const candidateKey = urlKey || headerKey` to `const candidateKey = req.headers.get('X-WorldMonitor-Key') ?? ''`
|
||||
|
||||
## Acceptance Criteria
|
||||
|
||||
- [ ] `?key=` URL parameter is not present in `api/mcp.ts` auth chain
|
||||
- [ ] `X-WorldMonitor-Key` header path still works
|
||||
- [ ] OAuth Bearer path still works
|
||||
- [ ] No test regressions
|
||||
|
||||
## Work Log
|
||||
|
||||
### 2026-03-28 — Code Review Discovery
|
||||
|
||||
**By:** Claude Code (compound-engineering:ce-review)
|
||||
|
||||
**Actions:**
|
||||
|
||||
- Security sentinel and architecture strategist independently flagged as P1 regression
|
||||
- Pattern confirmed: OAuth `client_credentials` (added in same PR) covers the stated use case
|
||||
- No known clients depend on `?key=` URL param per PR description
|
||||
@@ -0,0 +1,104 @@
|
||||
---
|
||||
status: complete
|
||||
priority: p1
|
||||
issue_id: "082"
|
||||
tags: [code-review, security, mcp, information-disclosure]
|
||||
dependencies: []
|
||||
---
|
||||
|
||||
# `err.message` leaks internal service details in MCP tool error responses
|
||||
|
||||
## Problem Statement
|
||||
|
||||
PR #2418 changed the MCP catch block from a hardcoded string to `err.message`, creating an information disclosure regression. Error messages from internal services can contain Redis key names, Upstash endpoint hostnames, internal service URLs, IP addresses, and stack fragments. The original code deliberately masked this with a static string.
|
||||
|
||||
## Findings
|
||||
|
||||
- `api/mcp.ts:520-521` — changed from:
|
||||
```typescript
|
||||
} catch {
|
||||
return rpcError(id, -32603, 'Internal error: data fetch failed');
|
||||
```
|
||||
to:
|
||||
```typescript
|
||||
} catch (err) {
|
||||
const msg = err instanceof Error ? err.message : String(err);
|
||||
return rpcError(id, -32603, `Internal error: ${msg}`);
|
||||
```
|
||||
- Security sentinel: "The `err.message` from a network call, Redis read, or JSON parse failure can contain internal URLs, Redis key names, Upstash endpoint hostnames, internal service names, or stack fragments."
|
||||
- Example leak: `Internal error: fetch failed — connect ECONNREFUSED 10.0.0.5:443` maps internal network topology
|
||||
- The original masking was intentional; this change is an unintentional regression introduced while adding the `catch (err)` for type narrowing
|
||||
- TypeScript reviewer flagged as regression: "This is a regression: the original code deliberately masked the error."
|
||||
|
||||
## Proposed Solutions
|
||||
|
||||
### Option 1: Log to Sentry, return fixed string (recommended)
|
||||
|
||||
```typescript
|
||||
} catch (err) {
|
||||
// Log full error internally for debugging, mask from API callers
|
||||
console.error('[mcp] tool execution error:', err);
|
||||
return rpcError(id, -32603, 'Internal error: data fetch failed');
|
||||
}
|
||||
```
|
||||
|
||||
If Sentry is wired in: `Sentry.captureException(err)` before returning.
|
||||
|
||||
**Pros:** Retains debuggability, masks internals from API surface.
|
||||
**Cons:** Requires Sentry integration or console.error.
|
||||
**Effort:** Small (1 line change)
|
||||
**Risk:** Low
|
||||
|
||||
---
|
||||
|
||||
### Option 2: Sanitize error message before returning
|
||||
|
||||
Strip known patterns (URLs, IPs, file paths) from `err.message` before including in response.
|
||||
|
||||
**Pros:** Gives some signal without full masking.
|
||||
**Cons:** Regex sanitization is hard to get right and easy to bypass. Sanitization creates a false sense of security.
|
||||
**Effort:** Medium
|
||||
**Risk:** Medium (sanitization gaps can still leak)
|
||||
|
||||
---
|
||||
|
||||
### Option 3: Revert to hardcoded string, keep err for logging only
|
||||
|
||||
```typescript
|
||||
} catch (err: unknown) {
|
||||
console.error('[mcp] executeTool error:', err);
|
||||
return rpcError(id, -32603, 'Internal error: data fetch failed');
|
||||
}
|
||||
```
|
||||
|
||||
**Pros:** Exact revert to the intentional behavior.
|
||||
**Effort:** Small
|
||||
**Risk:** Low
|
||||
|
||||
## Recommended Action
|
||||
|
||||
Option 3. The catch block only needs `err` for logging, not for the response string. Revert response to hardcoded string, keep `catch (err: unknown)` for console/Sentry.
|
||||
|
||||
## Technical Details
|
||||
|
||||
**Affected files:**
|
||||
- `api/mcp.ts:516-520` — change 2 lines
|
||||
|
||||
## Acceptance Criteria
|
||||
|
||||
- [ ] Tool error responses do not expose `err.message` to callers
|
||||
- [ ] Error is logged (console.error or Sentry)
|
||||
- [ ] Response string matches or is equivalent to original "Internal error: data fetch failed"
|
||||
- [ ] TypeScript type error from `catch (err)` is handled without leaking message
|
||||
|
||||
## Work Log
|
||||
|
||||
### 2026-03-28 — Code Review Discovery
|
||||
|
||||
**By:** Claude Code (compound-engineering:ce-review)
|
||||
|
||||
**Actions:**
|
||||
|
||||
- Security sentinel (H-3) and TypeScript reviewer both flagged independently
|
||||
- Root cause: developer changed `catch` to `catch (err)` for type narrowing and accidentally introduced the leak by adding `err.message` to the response
|
||||
- Original hardcoded string was intentional masking, not a lazy placeholder
|
||||
97
todos/083-complete-p2-timing-unsafe-key-comparison.md
Normal file
97
todos/083-complete-p2-timing-unsafe-key-comparison.md
Normal file
@@ -0,0 +1,97 @@
|
||||
---
|
||||
status: complete
|
||||
priority: p2
|
||||
issue_id: "083"
|
||||
tags: [code-review, security, oauth, timing-attack]
|
||||
dependencies: []
|
||||
---
|
||||
|
||||
# Non-constant-time API key comparison enables timing oracle attack
|
||||
|
||||
## Problem Statement
|
||||
|
||||
Both `api/oauth/token.js` (`validateSecret`) and `api/mcp.ts` (direct key path) use `Array.includes()` for API key validation. JavaScript `===` exits on the first mismatching byte, creating a timing side-channel. Over enough requests, an attacker can enumerate valid key prefixes character by character. The OAuth token endpoint is the most exposed surface since it validates the full raw API key with no caching.
|
||||
|
||||
## Findings
|
||||
|
||||
- `api/oauth/token.js:47` — `return validKeys.includes(secret);`
|
||||
- `api/mcp.ts:449` — `if (!validKeys.includes(candidateKey))`
|
||||
- Security sentinel: "JavaScript's `===` operator exits on the first mismatching byte. For a known partial key prefix, an attacker can run thousands of requests and measure response time differentials to enumerate valid key prefixes character by character."
|
||||
- Rate limit (10 req/min per IP) slows but does not prevent: Vercel edge runs globally, a distributed attacker sources from many IPs
|
||||
- Fix is `crypto.timingSafeEqual` on Uint8Array-encoded key bytes (available in Web Crypto API on edge runtimes)
|
||||
|
||||
## Proposed Solutions
|
||||
|
||||
### Option 1: `crypto.timingSafeEqual` on Uint8Array (recommended)
|
||||
|
||||
```javascript
|
||||
function timingSafeIncludes(candidateKey, validKeys) {
|
||||
if (!candidateKey) return false;
|
||||
const enc = new TextEncoder();
|
||||
const candidate = enc.encode(candidateKey);
|
||||
return validKeys.some(k => {
|
||||
const valid = enc.encode(k);
|
||||
if (valid.length !== candidate.length) return false;
|
||||
return crypto.subtle.timingSafeEqual(valid, candidate);
|
||||
});
|
||||
}
|
||||
```
|
||||
|
||||
Note: `crypto.subtle.timingSafeEqual` is available in the Web Crypto API (edge runtimes). Node's `crypto.timingSafeEqual` is not available in Vercel edge.
|
||||
|
||||
**Pros:** Eliminates timing oracle. Cryptographically sound.
|
||||
**Cons:** Requires encoding keys to Uint8Array before comparison. Slightly more code.
|
||||
**Effort:** Small (1 helper function, 2 call sites)
|
||||
**Risk:** Low
|
||||
|
||||
---
|
||||
|
||||
### Option 2: Use Web Crypto HMAC comparison
|
||||
|
||||
Compute `HMAC-SHA256(key, nonce)` and compare digests. More complex, similar result.
|
||||
|
||||
**Pros:** Industry standard for constant-time comparison.
|
||||
**Cons:** More complex than `timingSafeEqual`, requires a nonce.
|
||||
**Effort:** Medium
|
||||
**Risk:** Low (but overkill vs Option 1)
|
||||
|
||||
---
|
||||
|
||||
### Option 3: Accept risk (defer)
|
||||
|
||||
The rate limiter and short key length (typical API keys are random high-entropy strings) reduce practical attack feasibility significantly. Accept the risk and document it.
|
||||
|
||||
**Pros:** No code change.
|
||||
**Cons:** Leaves a known timing oracle on a credential endpoint.
|
||||
**Effort:** 0
|
||||
**Risk:** Medium (latent)
|
||||
|
||||
## Recommended Action
|
||||
|
||||
Option 1. `crypto.subtle.timingSafeEqual` is available on Vercel edge runtime. The fix is a single helper function replacing two `Array.includes` calls.
|
||||
|
||||
## Technical Details
|
||||
|
||||
**Affected files:**
|
||||
- `api/oauth/token.js:45-49` — replace `validateSecret`
|
||||
- `api/mcp.ts:448-450` — replace inline `validKeys.includes`
|
||||
- Potentially extract to `api/_api-key.js` as `isValidApiKey(k)` (see todo #085)
|
||||
|
||||
## Acceptance Criteria
|
||||
|
||||
- [ ] No `Array.includes` used for API key comparison in `token.js` or `mcp.ts`
|
||||
- [ ] Constant-time comparison used for all key validation
|
||||
- [ ] Edge runtime compatibility verified (no Node.js `crypto` module)
|
||||
- [ ] Existing tests still pass
|
||||
|
||||
## Work Log
|
||||
|
||||
### 2026-03-28 — Code Review Discovery
|
||||
|
||||
**By:** Claude Code (compound-engineering:ce-review)
|
||||
|
||||
**Actions:**
|
||||
|
||||
- Security sentinel flagged as H-2 (HIGH)
|
||||
- Both key validation call sites identified
|
||||
- `crypto.subtle.timingSafeEqual` confirmed available on Web Crypto API (edge compatible)
|
||||
110
todos/084-complete-p2-bearer-resolution-swallows-redis-errors.md
Normal file
110
todos/084-complete-p2-bearer-resolution-swallows-redis-errors.md
Normal file
@@ -0,0 +1,110 @@
|
||||
---
|
||||
status: complete
|
||||
priority: p2
|
||||
issue_id: "084"
|
||||
tags: [code-review, oauth, mcp, reliability]
|
||||
dependencies: []
|
||||
---
|
||||
|
||||
# `resolveApiKeyFromBearer` returns null on Redis errors, causing misleading 401 on infra failure
|
||||
|
||||
## Problem Statement
|
||||
|
||||
`resolveApiKeyFromBearer` in `api/_oauth-token.js` treats any `readJsonFromUpstash` failure (timeout, connection error, Redis unavailable) the same as "token not found." This causes OAuth-authenticated MCP clients to receive HTTP 401 during a Redis outage, forcing them to re-authenticate — which also fails because `/oauth/token` also uses Redis. The 401 is a lie: the token may be valid but Redis is simply down.
|
||||
|
||||
## Findings
|
||||
|
||||
- `api/_oauth-token.js:9` — `const apiKey = await readJsonFromUpstash('oauth:token:${token}');`
|
||||
- `api/_upstash-json.js` — returns `null` on any fetch failure, timeout, or parse error (does not throw)
|
||||
- `api/mcp.ts:436-439` — when `resolveApiKeyFromBearer` returns null and a Bearer header is present, returns HTTP 401 with `error="invalid_token"`
|
||||
- TypeScript reviewer: "A transient Redis outage thus locks out all OAuth-authenticated clients with no indication that the failure was infrastructure, not the token."
|
||||
- A 401 on token-not-found and 401 on Redis-down are indistinguishable to the caller
|
||||
- The correct behavior for Redis unavailability is HTTP 503 or a clear error in the 500 range, not 401
|
||||
|
||||
## Proposed Solutions
|
||||
|
||||
### Option 1: Distinguish error types via thrown exception
|
||||
|
||||
Modify `resolveApiKeyFromBearer` to re-throw on infrastructure errors (non-404 failures):
|
||||
|
||||
```javascript
|
||||
export async function resolveApiKeyFromBearer(req) {
|
||||
const hdr = req.headers.get('Authorization') || '';
|
||||
if (!hdr.startsWith('Bearer ')) return null;
|
||||
const token = hdr.slice(7).trim();
|
||||
if (!token) return null;
|
||||
// Throws on network/timeout error (infrastructure failure)
|
||||
// Returns null if key is missing from Redis (valid "not found")
|
||||
const apiKey = await readJsonFromUpstash(`oauth:token:${token}`);
|
||||
return typeof apiKey === 'string' && apiKey ? apiKey : null;
|
||||
}
|
||||
```
|
||||
|
||||
This requires `readJsonFromUpstash` to distinguish "key not found" (returns null) from "fetch error" (throws). If `_upstash-json.js` doesn't distinguish these, it would need updating.
|
||||
|
||||
**Pros:** Correct semantics. Infrastructure errors become 500 in `mcp.ts`'s existing catch block.
|
||||
**Cons:** Requires changes to `_upstash-json.js` to propagate error vs. null distinctly.
|
||||
**Effort:** Medium
|
||||
**Risk:** Low
|
||||
|
||||
---
|
||||
|
||||
### Option 2: Discriminated return type
|
||||
|
||||
Change `resolveApiKeyFromBearer` to return a discriminated union:
|
||||
|
||||
```typescript
|
||||
type BearerResult =
|
||||
| { status: 'no_bearer' }
|
||||
| { status: 'valid'; apiKey: string }
|
||||
| { status: 'invalid' } // token not in Redis
|
||||
| { status: 'error' }; // infrastructure failure
|
||||
```
|
||||
|
||||
On `status: 'error'`, `mcp.ts` returns HTTP 503 instead of 401.
|
||||
|
||||
**Pros:** Explicit types, correct HTTP semantics per failure mode.
|
||||
**Cons:** More complex refactor; changes `_oauth-token.js` and `mcp.ts`.
|
||||
**Effort:** Medium
|
||||
**Risk:** Low (but larger change)
|
||||
|
||||
---
|
||||
|
||||
### Option 3: Accept the current behavior (document only)
|
||||
|
||||
Add a comment explaining that Redis failure causes transient 401s for OAuth clients, and that clients should implement re-auth with backoff. Document in the PR description.
|
||||
|
||||
**Pros:** No code change.
|
||||
**Cons:** Misleading error semantics. OAuth clients will loop re-authenticating against a system that's down.
|
||||
**Effort:** 0
|
||||
**Risk:** Medium (latent reliability issue)
|
||||
|
||||
## Recommended Action
|
||||
|
||||
Option 2 is the cleanest long-term. Short-term, Option 1 is acceptable. The key change needed is: infrastructure errors should not map to 401.
|
||||
|
||||
## Technical Details
|
||||
|
||||
**Affected files:**
|
||||
- `api/_oauth-token.js` — return discriminated result or throw on infrastructure error
|
||||
- `api/mcp.ts:432-439` — handle `resolveApiKeyFromBearer` error case distinctly
|
||||
- Possibly `api/_upstash-json.js` — expose "key not found" vs "fetch error" distinction
|
||||
|
||||
## Acceptance Criteria
|
||||
|
||||
- [ ] Redis timeout/connection error does not return HTTP 401 to caller
|
||||
- [ ] "Token not in Redis" (expired) still returns HTTP 401 with `error="invalid_token"`
|
||||
- [ ] Infrastructure failure returns HTTP 503 or 500
|
||||
- [ ] Existing OAuth happy-path tests pass
|
||||
|
||||
## Work Log
|
||||
|
||||
### 2026-03-28 — Code Review Discovery
|
||||
|
||||
**By:** Claude Code (compound-engineering:ce-review)
|
||||
|
||||
**Actions:**
|
||||
|
||||
- TypeScript reviewer flagged as HIGH severity
|
||||
- Traced through: `readJsonFromUpstash` → null on any failure → `resolveApiKeyFromBearer` returns null → mcp.ts returns 401
|
||||
- Redis down = misleading "invalid token" to caller, triggers re-auth loop that also fails
|
||||
110
todos/085-complete-p2-oauth-token-js-code-duplication.md
Normal file
110
todos/085-complete-p2-oauth-token-js-code-duplication.md
Normal file
@@ -0,0 +1,110 @@
|
||||
---
|
||||
status: complete
|
||||
priority: p2
|
||||
issue_id: "085"
|
||||
tags: [code-review, quality, oauth]
|
||||
dependencies: []
|
||||
---
|
||||
|
||||
# `api/oauth/token.js` duplicates 4 patterns already in existing helpers
|
||||
|
||||
## Problem Statement
|
||||
|
||||
`api/oauth/token.js` re-implements four utilities that already exist in the `api/` helper modules: `getClientIp`, key validation, `jsonResponse`, and CORS headers. This creates maintenance drift — if any of these patterns need to change (CF header priority, allowed CORS headers, key validation logic), `token.js` will not be updated.
|
||||
|
||||
## Findings
|
||||
|
||||
**1. `getClientIp` (token.js:36-43) duplicates `_rate-limit.js`**
|
||||
|
||||
`_rate-limit.js` has an identical function (same header priority: cf-connecting-ip → x-real-ip → x-forwarded-for → 0.0.0.0). If CF header priority changes again (it already changed once per MEMORY.md PR #1241), two files will diverge.
|
||||
|
||||
**2. `validateSecret` (token.js:45-49) duplicates key parsing from `_api-key.js` and `mcp.ts`**
|
||||
|
||||
`(process.env.WORLDMONITOR_VALID_KEYS || '').split(',').filter(Boolean).includes(key)` now exists in 3 files. A fourth copy if direct-key path in `mcp.ts` also has it inline (line 448).
|
||||
|
||||
**3. `jsonResp` + `CORS_HEADERS` (token.js:7-18) duplicates `_json-response.js` + `_cors.js`**
|
||||
|
||||
`_json-response.js` exports `jsonResponse(body, status, headers)` with the same behavior. `_cors.js` exports `getPublicCorsHeaders()`. The local `CORS_HEADERS` hardcodes `Allow-Headers: Content-Type, Authorization` while the shared module includes `X-WorldMonitor-Key, X-Widget-Key, X-Pro-Key` too. This creates a silent CORS header divergence.
|
||||
|
||||
**4. `storeToken` raw pipeline fetch duplicates Upstash write pattern**
|
||||
|
||||
`_upstash-json.js` handles GET. There is no shared `writeJsonToUpstash`. `storeToken` owns the raw pipeline fetch including env-var guards, error handling, and `results[0]?.result === 'OK'` parsing. A second write path from another endpoint cannot reuse this.
|
||||
|
||||
## Proposed Solutions
|
||||
|
||||
### Option 1: Import from existing helpers (recommended)
|
||||
|
||||
```js
|
||||
// Replace local getClientIp:
|
||||
import { getClientIp } from '../_rate-limit.js'; // requires export on _rate-limit.js
|
||||
|
||||
// Replace validateSecret:
|
||||
import { isValidApiKey } from '../_api-key.js'; // requires new named export
|
||||
|
||||
// Replace jsonResp + CORS_HEADERS:
|
||||
import { getPublicCorsHeaders } from '../_cors.js';
|
||||
import { jsonResponse } from '../_json-response.js';
|
||||
|
||||
// Keep storeToken as-is or extract writeJsonToUpstash to _upstash-json.js
|
||||
```
|
||||
|
||||
**Pros:** Single source of truth for all patterns. ~25 LOC removed from token.js.
|
||||
**Cons:** Requires small changes to 2 helper files (export `getClientIp`, add `isValidApiKey`).
|
||||
**Effort:** Small-Medium
|
||||
**Risk:** Low
|
||||
|
||||
---
|
||||
|
||||
### Option 2: Accept local copies, add doc comment explaining why
|
||||
|
||||
Add a comment: `// Note: local copy of _rate-limit.js:getClientIp to avoid import across api/oauth/ subdirectory.`
|
||||
|
||||
**Pros:** No helper changes needed.
|
||||
**Cons:** Drift risk remains. Comment-based coupling is weaker than import-based coupling.
|
||||
**Effort:** Tiny
|
||||
**Risk:** Low (but doesn't fix the problem)
|
||||
|
||||
---
|
||||
|
||||
### Option 3: Extract all to `api/_oauth-utils.js`
|
||||
|
||||
Create a dedicated util file for OAuth-specific patterns.
|
||||
|
||||
**Pros:** Clean separation.
|
||||
**Cons:** Overkill — the patterns already exist in shared helpers.
|
||||
**Effort:** Small
|
||||
**Risk:** Low
|
||||
|
||||
## Recommended Action
|
||||
|
||||
Option 1. The import path `../` works since `oauth/token.js` is in `api/oauth/`. Export `getClientIp` from `_rate-limit.js` and `isValidApiKey` from `_api-key.js`. Both are trivial changes.
|
||||
|
||||
## Technical Details
|
||||
|
||||
**Affected files:**
|
||||
- `api/oauth/token.js` — remove 4 duplicated helpers, add imports
|
||||
- `api/_rate-limit.js` — export `getClientIp`
|
||||
- `api/_api-key.js` — export `isValidApiKey(k: string): boolean`
|
||||
- Optionally: `api/_upstash-json.js` — add `writeJsonToUpstash(key, value, ttlSeconds)`
|
||||
|
||||
## Acceptance Criteria
|
||||
|
||||
- [ ] `getClientIp` used from one location
|
||||
- [ ] Key validation logic used from one location
|
||||
- [ ] CORS headers for token endpoint come from `_cors.js`
|
||||
- [ ] `jsonResponse` from `_json-response.js`
|
||||
- [ ] No duplicated patterns across `api/` helpers
|
||||
- [ ] All existing tests pass
|
||||
|
||||
## Work Log
|
||||
|
||||
### 2026-03-28 — Code Review Discovery
|
||||
|
||||
**By:** Claude Code (compound-engineering:ce-review)
|
||||
|
||||
**Actions:**
|
||||
|
||||
- Simplicity reviewer and architecture strategist both flagged
|
||||
- getClientIp: exact copy confirmed by code comparison
|
||||
- validateSecret pattern: appears in 3+ locations now
|
||||
- CORS divergence: `token.js` misses `X-WorldMonitor-Key` from allow-headers vs `_cors.js`
|
||||
@@ -0,0 +1,104 @@
|
||||
---
|
||||
status: complete
|
||||
priority: p3
|
||||
issue_id: "086"
|
||||
tags: [code-review, quality, oauth, mcp]
|
||||
dependencies: [084]
|
||||
---
|
||||
|
||||
# Defer Bearer resolution in `mcp.ts` + discriminated return type for `resolveApiKeyFromBearer`
|
||||
|
||||
## Problem Statement
|
||||
|
||||
Two related simplification opportunities in `api/mcp.ts` auth chain:
|
||||
|
||||
1. `resolveApiKeyFromBearer(req)` is called unconditionally before the Bearer header is checked, creating an invisible `await` for non-Bearer requests (though no I/O fires due to the short-circuit in `_oauth-token.js`). Future maintainers may not know there's zero cost for non-Bearer callers.
|
||||
|
||||
2. `resolveApiKeyFromBearer` returns `string | null` where null means EITHER "no Bearer header" OR "Bearer present but token not found." `mcp.ts` handles the "Bearer present" case by re-reading `req.headers.get('Authorization')` — the header is parsed twice.
|
||||
|
||||
## Findings
|
||||
|
||||
- `api/mcp.ts:431-435` — `bearerHeader` is read to gate the 401 path, but `resolveApiKeyFromBearer` also reads Authorization internally
|
||||
- Performance oracle: "The `await` on a synchronously-resolved `null` return is a microtask tick, not a real I/O pause — but the pattern prevents future readers from understanding the cost model."
|
||||
- Simplicity reviewer: "The correct fix is a discriminated return type that makes the call contract explicit and testable."
|
||||
- Double header read: `req.headers.get('Authorization')` called at line 431 and again at line 5 of `_oauth-token.js`
|
||||
|
||||
## Proposed Solutions
|
||||
|
||||
### Option 1: Deferred conditional resolution
|
||||
|
||||
```typescript
|
||||
let apiKey = '';
|
||||
const authHeader = req.headers.get('Authorization') ?? '';
|
||||
if (authHeader.startsWith('Bearer ')) {
|
||||
const bearerApiKey = await resolveApiKeyFromBearer(req);
|
||||
if (bearerApiKey) {
|
||||
apiKey = bearerApiKey;
|
||||
} else {
|
||||
return new Response(...401...);
|
||||
}
|
||||
} else {
|
||||
// Direct key path — zero Upstash I/O
|
||||
const candidateKey = req.headers.get('X-WorldMonitor-Key') ?? '';
|
||||
...
|
||||
}
|
||||
```
|
||||
|
||||
Eliminates unconditional await and the double header read. `resolveApiKeyFromBearer` can accept the pre-read `token` string instead of `req`.
|
||||
|
||||
**Pros:** Self-documenting cost model. Single header read.
|
||||
**Cons:** Slightly longer code but clearer.
|
||||
**Effort:** Small
|
||||
|
||||
---
|
||||
|
||||
### Option 2: Discriminated return from `resolveApiKeyFromBearer`
|
||||
|
||||
```typescript
|
||||
type BearerResult = { found: true; apiKey: string } | { found: false; hadBearer: boolean };
|
||||
|
||||
export async function resolveApiKeyFromBearer(req): Promise<BearerResult> {
|
||||
const hdr = req.headers.get('Authorization') || '';
|
||||
if (!hdr.startsWith('Bearer ')) return { found: false, hadBearer: false };
|
||||
const token = hdr.slice(7).trim();
|
||||
if (!token) return { found: false, hadBearer: true };
|
||||
const apiKey = await readJsonFromUpstash(`oauth:token:${token}`);
|
||||
if (typeof apiKey === 'string' && apiKey) return { found: true, apiKey };
|
||||
return { found: false, hadBearer: true };
|
||||
}
|
||||
```
|
||||
|
||||
`mcp.ts` then needs no second header read and no `bearerHeader` variable.
|
||||
|
||||
**Pros:** Explicit contract, eliminates double-read, testable.
|
||||
**Cons:** Changes `_oauth-token.js` signature (only called from one place today).
|
||||
**Effort:** Small-Medium
|
||||
|
||||
## Recommended Action
|
||||
|
||||
Option 1 is the quickest win. Option 2 is cleaner if todo #084 is also implemented (since the discriminated return would add the `error` case there too).
|
||||
|
||||
## Technical Details
|
||||
|
||||
**Affected files:**
|
||||
- `api/mcp.ts:430-445` — restructure auth chain
|
||||
- `api/_oauth-token.js` — optionally change signature per Option 2
|
||||
|
||||
## Acceptance Criteria
|
||||
|
||||
- [ ] `Authorization` header is not read twice for the same request
|
||||
- [ ] Unconditional `await resolveApiKeyFromBearer` is replaced with conditional logic
|
||||
- [ ] All existing auth paths still work
|
||||
- [ ] No test regressions
|
||||
|
||||
## Work Log
|
||||
|
||||
### 2026-03-28 — Code Review Discovery
|
||||
|
||||
**By:** Claude Code (compound-engineering:ce-review)
|
||||
|
||||
**Actions:**
|
||||
|
||||
- Performance oracle and simplicity reviewer both flagged
|
||||
- Confirmed: double header read at mcp.ts:431 and _oauth-token.js:5
|
||||
- No runtime performance issue (short-circuit returns synchronously) but code clarity suffers
|
||||
28
vercel.json
28
vercel.json
@@ -8,7 +8,8 @@
|
||||
{ "source": "/docs/:match*", "destination": "https://worldmonitor.mintlify.dev/docs/:match*" },
|
||||
{ "source": "/pro", "destination": "/pro/index.html" },
|
||||
{ "source": "/mcp", "destination": "/api/mcp" },
|
||||
{ "source": "/((?!api|mcp|assets|blog|docs|favico|map-styles|data|textures|pro|sw\\.js|workbox-[a-f0-9]+\\.js|manifest\\.webmanifest|offline\\.html|robots\\.txt|sitemap\\.xml|llms\\.txt|llms-full\\.txt|\\.well-known|wm-widget-sandbox\\.html).*)", "destination": "/index.html" }
|
||||
{ "source": "/oauth/token", "destination": "/api/oauth/token" },
|
||||
{ "source": "/((?!api|mcp|oauth|assets|blog|docs|favico|map-styles|data|textures|pro|sw\\.js|workbox-[a-f0-9]+\\.js|manifest\\.webmanifest|offline\\.html|robots\\.txt|sitemap\\.xml|llms\\.txt|llms-full\\.txt|\\.well-known|wm-widget-sandbox\\.html).*)", "destination": "/index.html" }
|
||||
],
|
||||
"headers": [
|
||||
{
|
||||
@@ -27,6 +28,29 @@
|
||||
{ "key": "Access-Control-Allow-Headers", "value": "Content-Type, Authorization, X-WorldMonitor-Key" }
|
||||
]
|
||||
},
|
||||
{
|
||||
"source": "/oauth/(.*)",
|
||||
"headers": [
|
||||
{ "key": "Access-Control-Allow-Origin", "value": "*" },
|
||||
{ "key": "Access-Control-Allow-Methods", "value": "POST, OPTIONS" },
|
||||
{ "key": "Access-Control-Allow-Headers", "value": "Content-Type, Authorization" }
|
||||
]
|
||||
},
|
||||
{
|
||||
"source": "/.well-known/oauth-authorization-server",
|
||||
"headers": [
|
||||
{ "key": "Content-Type", "value": "application/json" },
|
||||
{ "key": "Access-Control-Allow-Origin", "value": "*" },
|
||||
{ "key": "Cache-Control", "value": "public, max-age=3600" }
|
||||
]
|
||||
},
|
||||
{
|
||||
"source": "/.well-known/(.*)",
|
||||
"headers": [
|
||||
{ "key": "Access-Control-Allow-Origin", "value": "*" },
|
||||
{ "key": "Cache-Control", "value": "public, max-age=3600" }
|
||||
]
|
||||
},
|
||||
{
|
||||
"source": "/docs/:path*",
|
||||
"headers": [
|
||||
@@ -59,7 +83,7 @@
|
||||
]
|
||||
},
|
||||
{
|
||||
"source": "/((?!api|mcp|assets|blog|docs|favico|map-styles|data|textures|pro|sw\\.js|workbox-[a-f0-9]+\\.js|manifest\\.webmanifest|offline\\.html|robots\\.txt|sitemap\\.xml|llms\\.txt|llms-full\\.txt|\\.well-known|wm-widget-sandbox\\.html).*)",
|
||||
"source": "/((?!api|mcp|oauth|assets|blog|docs|favico|map-styles|data|textures|pro|sw\\.js|workbox-[a-f0-9]+\\.js|manifest\\.webmanifest|offline\\.html|robots\\.txt|sitemap\\.xml|llms\\.txt|llms-full\\.txt|\\.well-known|wm-widget-sandbox\\.html).*)",
|
||||
"headers": [
|
||||
{ "key": "Cache-Control", "value": "no-cache, no-store, must-revalidate" }
|
||||
]
|
||||
|
||||
Reference in New Issue
Block a user