test: add Ollama provider tests across endpoint, sidecar, and chain layers

Three test files covering Ollama integration:

api/ollama-summarize.test.mjs (9 tests):
- Fallback signal when unconfigured, on API error, on empty response
- Success path with correct provider label and response shape
- Model selection via OLLAMA_MODEL env / default fallback
- Network error handling (ECONNREFUSED)
- Translate mode prompt verification

tests/summarization-chain.test.mjs (7 tests):
- Ollama success short-circuits chain (Groq never called)
- Ollama fail → Groq success fallback
- Full fallback when both unconfigured
- Provider label correctness for Ollama and Groq
- Uniform response shape across providers
- Identical fallback signal shapes

src-tauri/sidecar/local-api-server.test.mjs (8 new tests):
- OLLAMA_API_URL and OLLAMA_MODEL accepted via env-update allowlist
- Unknown keys rejected (403)
- Validation via /v1/models probe (reachable mock)
- Validation via /api/tags native fallback
- OLLAMA_MODEL pass-through validation
- Non-http protocol rejection (422)
- Auth-required behavior preserved with token

https://claude.ai/code/session_01AGg9fG6LZ8Y6XhvLszdfeY
This commit is contained in:
Claude
2026-02-19 14:41:32 +00:00
parent 3f5fa51f40
commit ba329e2a2a
3 changed files with 697 additions and 0 deletions

View File

@@ -0,0 +1,221 @@
/**
* Tests for api/ollama-summarize.js endpoint
* Validates response shape, fallback semantics, caching, and error handling.
*/
import { strict as assert } from 'node:assert';
import test from 'node:test';
import handler from './ollama-summarize.js';
const ORIGINAL_FETCH = globalThis.fetch;
const ORIGINAL_OLLAMA_URL = process.env.OLLAMA_API_URL;
const ORIGINAL_OLLAMA_MODEL = process.env.OLLAMA_MODEL;
function makeRequest(body = {}, origin = 'https://tauri.localhost') {
const headers = new Headers();
headers.set('origin', origin);
headers.set('content-type', 'application/json');
const encoded = JSON.stringify(body);
headers.set('content-length', String(Buffer.byteLength(encoded)));
return new Request('https://worldmonitor.app/api/ollama-summarize', {
method: 'POST',
headers,
body: encoded,
});
}
function ollamaCompletionResponse(content, model = 'llama3.1:8b') {
return new Response(JSON.stringify({
choices: [{ message: { content } }],
usage: { total_tokens: 42 },
model,
}), {
status: 200,
headers: { 'content-type': 'application/json' },
});
}
test.afterEach(() => {
globalThis.fetch = ORIGINAL_FETCH;
if (ORIGINAL_OLLAMA_URL !== undefined) {
process.env.OLLAMA_API_URL = ORIGINAL_OLLAMA_URL;
} else {
delete process.env.OLLAMA_API_URL;
}
if (ORIGINAL_OLLAMA_MODEL !== undefined) {
process.env.OLLAMA_MODEL = ORIGINAL_OLLAMA_MODEL;
} else {
delete process.env.OLLAMA_MODEL;
}
});
test('returns fallback signal when OLLAMA_API_URL is not configured', async () => {
delete process.env.OLLAMA_API_URL;
const response = await handler(makeRequest({
headlines: ['Test headline 1', 'Test headline 2'],
}));
assert.equal(response.status, 200);
const body = await response.json();
assert.equal(body.fallback, true);
assert.equal(body.skipped, true);
assert.equal(body.summary, null);
});
test('returns summary with provider "ollama" on success', async () => {
process.env.OLLAMA_API_URL = 'http://127.0.0.1:11434';
process.env.OLLAMA_MODEL = 'llama3.1:8b';
globalThis.fetch = async (url) => {
const target = String(url);
assert.equal(target.includes('/v1/chat/completions'), true, 'should call OpenAI-compatible endpoint');
return ollamaCompletionResponse('Iran escalated tensions with new missile test in the Strait of Hormuz.');
};
const response = await handler(makeRequest({
headlines: ['Iran tests new missile', 'Tensions rise in Strait of Hormuz'],
mode: 'brief',
variant: 'full',
lang: 'en',
}));
assert.equal(response.status, 200);
const body = await response.json();
assert.equal(body.provider, 'ollama');
assert.equal(body.cached, false);
assert.equal(typeof body.summary, 'string');
assert.equal(body.summary.length > 10, true);
assert.equal(typeof body.tokens, 'number');
assert.equal(body.model, 'llama3.1:8b');
});
test('returns fallback signal when Ollama API returns error', async () => {
process.env.OLLAMA_API_URL = 'http://127.0.0.1:11434';
globalThis.fetch = async () => {
return new Response(JSON.stringify({ error: 'model not found' }), {
status: 404,
headers: { 'content-type': 'application/json' },
});
};
const response = await handler(makeRequest({
headlines: ['Test headline 1', 'Test headline 2'],
}));
const body = await response.json();
assert.equal(body.fallback, true);
assert.equal(body.error, 'Ollama API error');
});
test('returns fallback signal when Ollama returns empty response', async () => {
process.env.OLLAMA_API_URL = 'http://127.0.0.1:11434';
globalThis.fetch = async () => {
return new Response(JSON.stringify({
choices: [{ message: { content: '' } }],
}), {
status: 200,
headers: { 'content-type': 'application/json' },
});
};
const response = await handler(makeRequest({
headlines: ['Test headline 1', 'Test headline 2'],
}));
assert.equal(response.status, 500);
const body = await response.json();
assert.equal(body.fallback, true);
assert.equal(body.error, 'Empty response');
});
test('returns 400 when headlines array is missing', async () => {
process.env.OLLAMA_API_URL = 'http://127.0.0.1:11434';
const response = await handler(makeRequest({}));
assert.equal(response.status, 400);
const body = await response.json();
assert.equal(body.error, 'Headlines array required');
});
test('uses OLLAMA_MODEL env for model selection', async () => {
process.env.OLLAMA_API_URL = 'http://127.0.0.1:11434';
process.env.OLLAMA_MODEL = 'mistral:7b';
let capturedModel = null;
globalThis.fetch = async (url, init) => {
const payload = JSON.parse(init.body);
capturedModel = payload.model;
return ollamaCompletionResponse('Summary of events.');
};
const response = await handler(makeRequest({
headlines: ['Event A occurred', 'Event B followed'],
}));
assert.equal(response.status, 200);
assert.equal(capturedModel, 'mistral:7b');
const body = await response.json();
assert.equal(body.model, 'mistral:7b');
});
test('falls back to default model when OLLAMA_MODEL not set', async () => {
process.env.OLLAMA_API_URL = 'http://127.0.0.1:11434';
delete process.env.OLLAMA_MODEL;
let capturedModel = null;
globalThis.fetch = async (url, init) => {
const payload = JSON.parse(init.body);
capturedModel = payload.model;
return ollamaCompletionResponse('Summary.');
};
await handler(makeRequest({
headlines: ['Event A', 'Event B'],
}));
assert.equal(capturedModel, 'llama3.1:8b');
});
test('returns fallback signal on network error (Ollama unreachable)', async () => {
process.env.OLLAMA_API_URL = 'http://127.0.0.1:11434';
globalThis.fetch = async () => {
throw new Error('connect ECONNREFUSED 127.0.0.1:11434');
};
const response = await handler(makeRequest({
headlines: ['Test headline 1', 'Test headline 2'],
}));
assert.equal(response.status, 500);
const body = await response.json();
assert.equal(body.fallback, true);
assert.equal(body.errorType, 'Error');
});
test('handles translate mode correctly', async () => {
process.env.OLLAMA_API_URL = 'http://127.0.0.1:11434';
let capturedMessages = null;
globalThis.fetch = async (url, init) => {
const payload = JSON.parse(init.body);
capturedMessages = payload.messages;
return ollamaCompletionResponse('L\'Iran a testé un nouveau missile.');
};
const response = await handler(makeRequest({
headlines: ['Iran tests new missile'],
mode: 'translate',
variant: 'fr',
}));
assert.equal(response.status, 200);
const body = await response.json();
assert.equal(typeof body.summary, 'string');
// System prompt should mention translation
assert.equal(capturedMessages[0].content.includes('translator'), true);
assert.equal(capturedMessages[1].content.includes('Translate to fr'), true);
});

View File

@@ -359,3 +359,261 @@ test('resolves packaged tauri resource layout under _up_/api', async () => {
await remote.close();
}
});
// ── Ollama env key allowlist + validation tests ──
test('accepts OLLAMA_API_URL via /api/local-env-update', async () => {
const localApi = await setupApiDir({});
const app = await createLocalApiServer({
port: 0,
apiDir: localApi.apiDir,
logger: { log() {}, warn() {}, error() {} },
});
const { port } = await app.start();
try {
const response = await fetch(`http://127.0.0.1:${port}/api/local-env-update`, {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({ key: 'OLLAMA_API_URL', value: 'http://127.0.0.1:11434' }),
});
assert.equal(response.status, 200);
const body = await response.json();
assert.equal(body.ok, true);
assert.equal(body.key, 'OLLAMA_API_URL');
assert.equal(process.env.OLLAMA_API_URL, 'http://127.0.0.1:11434');
} finally {
delete process.env.OLLAMA_API_URL;
await app.close();
await localApi.cleanup();
}
});
test('accepts OLLAMA_MODEL via /api/local-env-update', async () => {
const localApi = await setupApiDir({});
const app = await createLocalApiServer({
port: 0,
apiDir: localApi.apiDir,
logger: { log() {}, warn() {}, error() {} },
});
const { port } = await app.start();
try {
const response = await fetch(`http://127.0.0.1:${port}/api/local-env-update`, {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({ key: 'OLLAMA_MODEL', value: 'llama3.1:8b' }),
});
assert.equal(response.status, 200);
const body = await response.json();
assert.equal(body.ok, true);
assert.equal(body.key, 'OLLAMA_MODEL');
assert.equal(process.env.OLLAMA_MODEL, 'llama3.1:8b');
} finally {
delete process.env.OLLAMA_MODEL;
await app.close();
await localApi.cleanup();
}
});
test('rejects unknown key via /api/local-env-update', async () => {
const localApi = await setupApiDir({});
const app = await createLocalApiServer({
port: 0,
apiDir: localApi.apiDir,
logger: { log() {}, warn() {}, error() {} },
});
const { port } = await app.start();
try {
const response = await fetch(`http://127.0.0.1:${port}/api/local-env-update`, {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({ key: 'NOT_ALLOWED_KEY', value: 'some-value' }),
});
assert.equal(response.status, 403);
const body = await response.json();
assert.equal(body.error, 'key not in allowlist');
} finally {
await app.close();
await localApi.cleanup();
}
});
test('validates OLLAMA_API_URL via /api/local-validate-secret (reachable endpoint)', async () => {
// Stand up a mock Ollama server that responds to /v1/models
const mockOllama = createServer((req, res) => {
if (req.url === '/v1/models') {
res.writeHead(200, { 'content-type': 'application/json' });
res.end(JSON.stringify({ data: [{ id: 'llama3.1:8b' }] }));
} else {
res.writeHead(404);
res.end('not found');
}
});
const ollamaPort = await listen(mockOllama);
const localApi = await setupApiDir({});
const app = await createLocalApiServer({
port: 0,
apiDir: localApi.apiDir,
logger: { log() {}, warn() {}, error() {} },
});
const { port } = await app.start();
try {
const response = await fetch(`http://127.0.0.1:${port}/api/local-validate-secret`, {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({ key: 'OLLAMA_API_URL', value: `http://127.0.0.1:${ollamaPort}` }),
});
assert.equal(response.status, 200);
const body = await response.json();
assert.equal(body.valid, true);
assert.equal(body.message, 'Ollama endpoint verified');
} finally {
await app.close();
await localApi.cleanup();
await new Promise((resolve, reject) => {
mockOllama.close((err) => (err ? reject(err) : resolve()));
});
}
});
test('validates OLLAMA_API_URL via native /api/tags fallback', async () => {
// Mock server that only responds to /api/tags (not /v1/models)
const mockOllama = createServer((req, res) => {
if (req.url === '/api/tags') {
res.writeHead(200, { 'content-type': 'application/json' });
res.end(JSON.stringify({ models: [{ name: 'llama3.1:8b' }] }));
} else {
res.writeHead(404);
res.end('not found');
}
});
const ollamaPort = await listen(mockOllama);
const localApi = await setupApiDir({});
const app = await createLocalApiServer({
port: 0,
apiDir: localApi.apiDir,
logger: { log() {}, warn() {}, error() {} },
});
const { port } = await app.start();
try {
const response = await fetch(`http://127.0.0.1:${port}/api/local-validate-secret`, {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({ key: 'OLLAMA_API_URL', value: `http://127.0.0.1:${ollamaPort}` }),
});
assert.equal(response.status, 200);
const body = await response.json();
assert.equal(body.valid, true);
assert.equal(body.message, 'Ollama endpoint verified (native API)');
} finally {
await app.close();
await localApi.cleanup();
await new Promise((resolve, reject) => {
mockOllama.close((err) => (err ? reject(err) : resolve()));
});
}
});
test('validates OLLAMA_MODEL stores model name', async () => {
const localApi = await setupApiDir({});
const app = await createLocalApiServer({
port: 0,
apiDir: localApi.apiDir,
logger: { log() {}, warn() {}, error() {} },
});
const { port } = await app.start();
try {
const response = await fetch(`http://127.0.0.1:${port}/api/local-validate-secret`, {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({ key: 'OLLAMA_MODEL', value: 'mistral:7b' }),
});
assert.equal(response.status, 200);
const body = await response.json();
assert.equal(body.valid, true);
assert.equal(body.message, 'Model name stored');
} finally {
await app.close();
await localApi.cleanup();
}
});
test('rejects OLLAMA_API_URL with non-http protocol', async () => {
const localApi = await setupApiDir({});
const app = await createLocalApiServer({
port: 0,
apiDir: localApi.apiDir,
logger: { log() {}, warn() {}, error() {} },
});
const { port } = await app.start();
try {
const response = await fetch(`http://127.0.0.1:${port}/api/local-validate-secret`, {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({ key: 'OLLAMA_API_URL', value: 'ftp://127.0.0.1:11434' }),
});
assert.equal(response.status, 422);
const body = await response.json();
assert.equal(body.valid, false);
assert.equal(body.message, 'Must be an http(s) URL');
} finally {
await app.close();
await localApi.cleanup();
}
});
test('auth-required behavior unchanged — rejects unauthenticated requests when token is set', async () => {
const localApi = await setupApiDir({});
const originalToken = process.env.LOCAL_API_TOKEN;
process.env.LOCAL_API_TOKEN = 'secret-token-123';
const app = await createLocalApiServer({
port: 0,
apiDir: localApi.apiDir,
logger: { log() {}, warn() {}, error() {} },
});
const { port } = await app.start();
try {
// Request without auth header should be rejected
const response = await fetch(`http://127.0.0.1:${port}/api/local-env-update`, {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({ key: 'OLLAMA_API_URL', value: 'http://127.0.0.1:11434' }),
});
assert.equal(response.status, 401);
const body = await response.json();
assert.equal(body.error, 'Unauthorized');
// Request with correct auth header should succeed
const authedResponse = await fetch(`http://127.0.0.1:${port}/api/local-env-update`, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
'Authorization': 'Bearer secret-token-123',
},
body: JSON.stringify({ key: 'OLLAMA_API_URL', value: 'http://127.0.0.1:11434' }),
});
assert.equal(authedResponse.status, 200);
} finally {
if (originalToken !== undefined) {
process.env.LOCAL_API_TOKEN = originalToken;
} else {
delete process.env.LOCAL_API_TOKEN;
}
delete process.env.OLLAMA_API_URL;
await app.close();
await localApi.cleanup();
}
});

View File

@@ -0,0 +1,218 @@
/**
* Summarization chain order tests
* Validates provider fallback order: Ollama → Groq → OpenRouter → Browser T5
*
* Tests the API endpoint handlers directly (same approach as cyber-threats.test.mjs)
* to verify chain semantics: short-circuit on success, fallback on failure.
*/
import { strict as assert } from 'node:assert';
import test from 'node:test';
// We test the chain order by importing handlers directly and verifying
// their response shapes + fallback signals match what summarization.ts expects.
import ollamaHandler from '../api/ollama-summarize.js';
import groqHandler from '../api/groq-summarize.js';
const ORIGINAL_FETCH = globalThis.fetch;
const ORIGINAL_OLLAMA_URL = process.env.OLLAMA_API_URL;
const ORIGINAL_OLLAMA_MODEL = process.env.OLLAMA_MODEL;
const ORIGINAL_GROQ_KEY = process.env.GROQ_API_KEY;
function makeRequest(body = {}, origin = 'https://tauri.localhost') {
const headers = new Headers();
headers.set('origin', origin);
headers.set('content-type', 'application/json');
const encoded = JSON.stringify(body);
headers.set('content-length', String(Buffer.byteLength(encoded)));
return new Request('https://worldmonitor.app/api/test', {
method: 'POST',
headers,
body: encoded,
});
}
function ollamaCompletionResponse(content) {
return new Response(JSON.stringify({
choices: [{ message: { content } }],
usage: { total_tokens: 42 },
}), { status: 200, headers: { 'content-type': 'application/json' } });
}
function groqCompletionResponse(content) {
return new Response(JSON.stringify({
choices: [{ message: { content } }],
usage: { total_tokens: 35 },
}), { status: 200, headers: { 'content-type': 'application/json' } });
}
const TEST_HEADLINES = { headlines: ['Event A happened today', 'Event B followed quickly'] };
test.afterEach(() => {
globalThis.fetch = ORIGINAL_FETCH;
if (ORIGINAL_OLLAMA_URL !== undefined) {
process.env.OLLAMA_API_URL = ORIGINAL_OLLAMA_URL;
} else {
delete process.env.OLLAMA_API_URL;
}
if (ORIGINAL_OLLAMA_MODEL !== undefined) {
process.env.OLLAMA_MODEL = ORIGINAL_OLLAMA_MODEL;
} else {
delete process.env.OLLAMA_MODEL;
}
if (ORIGINAL_GROQ_KEY !== undefined) {
process.env.GROQ_API_KEY = ORIGINAL_GROQ_KEY;
} else {
delete process.env.GROQ_API_KEY;
}
});
// ── Chain order: Ollama success short-circuits (no Groq/OpenRouter calls) ──
test('Ollama success short-circuits the chain (no downstream calls)', async () => {
process.env.OLLAMA_API_URL = 'http://127.0.0.1:11434';
process.env.GROQ_API_KEY = 'test-groq-key';
let groqCalled = false;
globalThis.fetch = async (url) => {
const target = String(url);
if (target.includes('/v1/chat/completions')) {
return ollamaCompletionResponse('Ollama summary of events.');
}
if (target.includes('api.groq.com')) {
groqCalled = true;
return groqCompletionResponse('Groq summary.');
}
return new Response('not found', { status: 404 });
};
const ollamaResponse = await ollamaHandler(makeRequest(TEST_HEADLINES));
assert.equal(ollamaResponse.status, 200);
const ollamaBody = await ollamaResponse.json();
// Ollama succeeded — chain should stop here
assert.equal(ollamaBody.provider, 'ollama');
assert.equal(typeof ollamaBody.summary, 'string');
assert.equal(ollamaBody.summary.length > 5, true);
assert.equal(ollamaBody.fallback, undefined);
assert.equal(groqCalled, false, 'Groq should not be called when Ollama succeeds');
});
// ── Chain order: Ollama fail → Groq success ──
test('Ollama failure signals fallback, then Groq succeeds', async () => {
// Step 1: Ollama fails
process.env.OLLAMA_API_URL = 'http://127.0.0.1:11434';
globalThis.fetch = async () => {
throw new Error('connect ECONNREFUSED');
};
const ollamaResponse = await ollamaHandler(makeRequest(TEST_HEADLINES));
const ollamaBody = await ollamaResponse.json();
assert.equal(ollamaBody.fallback, true, 'Ollama should signal fallback on failure');
// Step 2: Groq succeeds
process.env.GROQ_API_KEY = 'test-groq-key';
globalThis.fetch = async () => groqCompletionResponse('Groq picked up the summary.');
const groqResponse = await groqHandler(makeRequest(TEST_HEADLINES));
assert.equal(groqResponse.status, 200);
const groqBody = await groqResponse.json();
assert.equal(groqBody.provider, 'groq');
assert.equal(typeof groqBody.summary, 'string');
assert.equal(groqBody.fallback, undefined);
});
// ── Chain order: Both fail → fallback signals propagate ──
test('full fallback: Ollama + Groq both fail with fallback signals', async () => {
// Ollama: unconfigured
delete process.env.OLLAMA_API_URL;
const ollamaResponse = await ollamaHandler(makeRequest(TEST_HEADLINES));
const ollamaBody = await ollamaResponse.json();
assert.equal(ollamaBody.fallback, true);
assert.equal(ollamaBody.skipped, true);
// Groq: unconfigured
delete process.env.GROQ_API_KEY;
const groqResponse = await groqHandler(makeRequest(TEST_HEADLINES));
const groqBody = await groqResponse.json();
assert.equal(groqBody.fallback, true);
assert.equal(groqBody.skipped, true);
});
// ── Response shape: provider labels are correct ──
test('Ollama response uses provider label "ollama"', async () => {
process.env.OLLAMA_API_URL = 'http://127.0.0.1:11434';
globalThis.fetch = async () => ollamaCompletionResponse('Summary here.');
const response = await ollamaHandler(makeRequest(TEST_HEADLINES));
const body = await response.json();
assert.equal(body.provider, 'ollama');
});
test('Groq response uses provider label "groq"', async () => {
process.env.GROQ_API_KEY = 'test-key';
globalThis.fetch = async () => groqCompletionResponse('Summary here.');
const response = await groqHandler(makeRequest(TEST_HEADLINES));
const body = await response.json();
assert.equal(body.provider, 'groq');
});
// ── Response shape: all providers share uniform response contract ──
test('Ollama and Groq share the same response shape', async () => {
process.env.OLLAMA_API_URL = 'http://127.0.0.1:11434';
process.env.GROQ_API_KEY = 'test-key';
globalThis.fetch = async (url) => {
const target = String(url);
if (target.includes('127.0.0.1:11434')) {
return ollamaCompletionResponse('Ollama analysis.');
}
return groqCompletionResponse('Groq analysis.');
};
const ollamaResponse = await ollamaHandler(makeRequest(TEST_HEADLINES));
const groqResponse = await groqHandler(makeRequest(TEST_HEADLINES));
const ollamaBody = await ollamaResponse.json();
const groqBody = await groqResponse.json();
// Both should have the same keys
const requiredKeys = ['summary', 'model', 'provider', 'cached', 'tokens'];
for (const key of requiredKeys) {
assert.equal(key in ollamaBody, true, `Ollama response missing key: ${key}`);
assert.equal(key in groqBody, true, `Groq response missing key: ${key}`);
}
assert.equal(typeof ollamaBody.summary, 'string');
assert.equal(typeof groqBody.summary, 'string');
assert.equal(ollamaBody.cached, false);
assert.equal(groqBody.cached, false);
});
// ── Fallback shape consistency ──
test('Ollama and Groq produce identical fallback signal shapes', async () => {
// Both unconfigured
delete process.env.OLLAMA_API_URL;
delete process.env.GROQ_API_KEY;
const ollamaResponse = await ollamaHandler(makeRequest(TEST_HEADLINES));
const groqResponse = await groqHandler(makeRequest(TEST_HEADLINES));
const ollamaBody = await ollamaResponse.json();
const groqBody = await groqResponse.json();
// Both should signal fallback with same shape
assert.equal(ollamaBody.fallback, true);
assert.equal(groqBody.fallback, true);
assert.equal(ollamaBody.skipped, true);
assert.equal(groqBody.skipped, true);
assert.equal(ollamaBody.summary, null);
assert.equal(groqBody.summary, null);
});