mirror of
https://github.com/koala73/worldmonitor.git
synced 2026-04-25 17:14:57 +02:00
Three test files covering Ollama integration: api/ollama-summarize.test.mjs (9 tests): - Fallback signal when unconfigured, on API error, on empty response - Success path with correct provider label and response shape - Model selection via OLLAMA_MODEL env / default fallback - Network error handling (ECONNREFUSED) - Translate mode prompt verification tests/summarization-chain.test.mjs (7 tests): - Ollama success short-circuits chain (Groq never called) - Ollama fail → Groq success fallback - Full fallback when both unconfigured - Provider label correctness for Ollama and Groq - Uniform response shape across providers - Identical fallback signal shapes src-tauri/sidecar/local-api-server.test.mjs (8 new tests): - OLLAMA_API_URL and OLLAMA_MODEL accepted via env-update allowlist - Unknown keys rejected (403) - Validation via /v1/models probe (reachable mock) - Validation via /api/tags native fallback - OLLAMA_MODEL pass-through validation - Non-http protocol rejection (422) - Auth-required behavior preserved with token https://claude.ai/code/session_01AGg9fG6LZ8Y6XhvLszdfeY
222 lines
6.8 KiB
JavaScript
222 lines
6.8 KiB
JavaScript
/**
|
|
* Tests for api/ollama-summarize.js endpoint
|
|
* Validates response shape, fallback semantics, caching, and error handling.
|
|
*/
|
|
|
|
import { strict as assert } from 'node:assert';
|
|
import test from 'node:test';
|
|
import handler from './ollama-summarize.js';
|
|
|
|
const ORIGINAL_FETCH = globalThis.fetch;
|
|
const ORIGINAL_OLLAMA_URL = process.env.OLLAMA_API_URL;
|
|
const ORIGINAL_OLLAMA_MODEL = process.env.OLLAMA_MODEL;
|
|
|
|
function makeRequest(body = {}, origin = 'https://tauri.localhost') {
|
|
const headers = new Headers();
|
|
headers.set('origin', origin);
|
|
headers.set('content-type', 'application/json');
|
|
const encoded = JSON.stringify(body);
|
|
headers.set('content-length', String(Buffer.byteLength(encoded)));
|
|
return new Request('https://worldmonitor.app/api/ollama-summarize', {
|
|
method: 'POST',
|
|
headers,
|
|
body: encoded,
|
|
});
|
|
}
|
|
|
|
function ollamaCompletionResponse(content, model = 'llama3.1:8b') {
|
|
return new Response(JSON.stringify({
|
|
choices: [{ message: { content } }],
|
|
usage: { total_tokens: 42 },
|
|
model,
|
|
}), {
|
|
status: 200,
|
|
headers: { 'content-type': 'application/json' },
|
|
});
|
|
}
|
|
|
|
test.afterEach(() => {
|
|
globalThis.fetch = ORIGINAL_FETCH;
|
|
if (ORIGINAL_OLLAMA_URL !== undefined) {
|
|
process.env.OLLAMA_API_URL = ORIGINAL_OLLAMA_URL;
|
|
} else {
|
|
delete process.env.OLLAMA_API_URL;
|
|
}
|
|
if (ORIGINAL_OLLAMA_MODEL !== undefined) {
|
|
process.env.OLLAMA_MODEL = ORIGINAL_OLLAMA_MODEL;
|
|
} else {
|
|
delete process.env.OLLAMA_MODEL;
|
|
}
|
|
});
|
|
|
|
test('returns fallback signal when OLLAMA_API_URL is not configured', async () => {
|
|
delete process.env.OLLAMA_API_URL;
|
|
|
|
const response = await handler(makeRequest({
|
|
headlines: ['Test headline 1', 'Test headline 2'],
|
|
}));
|
|
|
|
assert.equal(response.status, 200);
|
|
const body = await response.json();
|
|
assert.equal(body.fallback, true);
|
|
assert.equal(body.skipped, true);
|
|
assert.equal(body.summary, null);
|
|
});
|
|
|
|
test('returns summary with provider "ollama" on success', async () => {
|
|
process.env.OLLAMA_API_URL = 'http://127.0.0.1:11434';
|
|
process.env.OLLAMA_MODEL = 'llama3.1:8b';
|
|
|
|
globalThis.fetch = async (url) => {
|
|
const target = String(url);
|
|
assert.equal(target.includes('/v1/chat/completions'), true, 'should call OpenAI-compatible endpoint');
|
|
return ollamaCompletionResponse('Iran escalated tensions with new missile test in the Strait of Hormuz.');
|
|
};
|
|
|
|
const response = await handler(makeRequest({
|
|
headlines: ['Iran tests new missile', 'Tensions rise in Strait of Hormuz'],
|
|
mode: 'brief',
|
|
variant: 'full',
|
|
lang: 'en',
|
|
}));
|
|
|
|
assert.equal(response.status, 200);
|
|
const body = await response.json();
|
|
assert.equal(body.provider, 'ollama');
|
|
assert.equal(body.cached, false);
|
|
assert.equal(typeof body.summary, 'string');
|
|
assert.equal(body.summary.length > 10, true);
|
|
assert.equal(typeof body.tokens, 'number');
|
|
assert.equal(body.model, 'llama3.1:8b');
|
|
});
|
|
|
|
test('returns fallback signal when Ollama API returns error', async () => {
|
|
process.env.OLLAMA_API_URL = 'http://127.0.0.1:11434';
|
|
|
|
globalThis.fetch = async () => {
|
|
return new Response(JSON.stringify({ error: 'model not found' }), {
|
|
status: 404,
|
|
headers: { 'content-type': 'application/json' },
|
|
});
|
|
};
|
|
|
|
const response = await handler(makeRequest({
|
|
headlines: ['Test headline 1', 'Test headline 2'],
|
|
}));
|
|
|
|
const body = await response.json();
|
|
assert.equal(body.fallback, true);
|
|
assert.equal(body.error, 'Ollama API error');
|
|
});
|
|
|
|
test('returns fallback signal when Ollama returns empty response', async () => {
|
|
process.env.OLLAMA_API_URL = 'http://127.0.0.1:11434';
|
|
|
|
globalThis.fetch = async () => {
|
|
return new Response(JSON.stringify({
|
|
choices: [{ message: { content: '' } }],
|
|
}), {
|
|
status: 200,
|
|
headers: { 'content-type': 'application/json' },
|
|
});
|
|
};
|
|
|
|
const response = await handler(makeRequest({
|
|
headlines: ['Test headline 1', 'Test headline 2'],
|
|
}));
|
|
|
|
assert.equal(response.status, 500);
|
|
const body = await response.json();
|
|
assert.equal(body.fallback, true);
|
|
assert.equal(body.error, 'Empty response');
|
|
});
|
|
|
|
test('returns 400 when headlines array is missing', async () => {
|
|
process.env.OLLAMA_API_URL = 'http://127.0.0.1:11434';
|
|
|
|
const response = await handler(makeRequest({}));
|
|
assert.equal(response.status, 400);
|
|
const body = await response.json();
|
|
assert.equal(body.error, 'Headlines array required');
|
|
});
|
|
|
|
test('uses OLLAMA_MODEL env for model selection', async () => {
|
|
process.env.OLLAMA_API_URL = 'http://127.0.0.1:11434';
|
|
process.env.OLLAMA_MODEL = 'mistral:7b';
|
|
|
|
let capturedModel = null;
|
|
globalThis.fetch = async (url, init) => {
|
|
const payload = JSON.parse(init.body);
|
|
capturedModel = payload.model;
|
|
return ollamaCompletionResponse('Summary of events.');
|
|
};
|
|
|
|
const response = await handler(makeRequest({
|
|
headlines: ['Event A occurred', 'Event B followed'],
|
|
}));
|
|
|
|
assert.equal(response.status, 200);
|
|
assert.equal(capturedModel, 'mistral:7b');
|
|
const body = await response.json();
|
|
assert.equal(body.model, 'mistral:7b');
|
|
});
|
|
|
|
test('falls back to default model when OLLAMA_MODEL not set', async () => {
|
|
process.env.OLLAMA_API_URL = 'http://127.0.0.1:11434';
|
|
delete process.env.OLLAMA_MODEL;
|
|
|
|
let capturedModel = null;
|
|
globalThis.fetch = async (url, init) => {
|
|
const payload = JSON.parse(init.body);
|
|
capturedModel = payload.model;
|
|
return ollamaCompletionResponse('Summary.');
|
|
};
|
|
|
|
await handler(makeRequest({
|
|
headlines: ['Event A', 'Event B'],
|
|
}));
|
|
|
|
assert.equal(capturedModel, 'llama3.1:8b');
|
|
});
|
|
|
|
test('returns fallback signal on network error (Ollama unreachable)', async () => {
|
|
process.env.OLLAMA_API_URL = 'http://127.0.0.1:11434';
|
|
|
|
globalThis.fetch = async () => {
|
|
throw new Error('connect ECONNREFUSED 127.0.0.1:11434');
|
|
};
|
|
|
|
const response = await handler(makeRequest({
|
|
headlines: ['Test headline 1', 'Test headline 2'],
|
|
}));
|
|
|
|
assert.equal(response.status, 500);
|
|
const body = await response.json();
|
|
assert.equal(body.fallback, true);
|
|
assert.equal(body.errorType, 'Error');
|
|
});
|
|
|
|
test('handles translate mode correctly', async () => {
|
|
process.env.OLLAMA_API_URL = 'http://127.0.0.1:11434';
|
|
|
|
let capturedMessages = null;
|
|
globalThis.fetch = async (url, init) => {
|
|
const payload = JSON.parse(init.body);
|
|
capturedMessages = payload.messages;
|
|
return ollamaCompletionResponse('L\'Iran a testé un nouveau missile.');
|
|
};
|
|
|
|
const response = await handler(makeRequest({
|
|
headlines: ['Iran tests new missile'],
|
|
mode: 'translate',
|
|
variant: 'fr',
|
|
}));
|
|
|
|
assert.equal(response.status, 200);
|
|
const body = await response.json();
|
|
assert.equal(typeof body.summary, 'string');
|
|
// System prompt should mention translation
|
|
assert.equal(capturedMessages[0].content.includes('translator'), true);
|
|
assert.equal(capturedMessages[1].content.includes('Translate to fr'), true);
|
|
});
|