Files
worldmonitor/server/_shared/llm.ts
Elie Habib 595e3dbb86 feat: premium finance stock analysis suite (#1268)
* Add premium finance stock analysis suite

* docs: link premium finance from README

Add Premium Stock Analysis entry to the Finance & Markets section
with a link to docs/PREMIUM_FINANCE.md.

* fix: address review feedback on premium finance suite

- Chunk Redis pipelines into batches of 200 (Upstash limit)
- Add try-catch around cachedFetchJson in backtest handler
- Log warnings on Redis pipeline HTTP failures
- Include name in analyze-stock cache key to avoid collisions
- Change analyze-stock and backtest-stock gateway cache to 'slow'
- Add dedup guard for concurrent ledger generation
- Add SerpAPI date pre-filter (tbs=qdr:d/w)
- Extract sanitizeSymbol to shared module
- Extract buildEmptyAnalysisResponse helper
- Fix RSI to use Wilder's smoothing (matches TradingView)
- Add console.warn for daily brief summarization errors
- Fall back to stale data in loadStockBacktest on error
- Make daily-market-brief premium on all platforms
- Use word boundaries for short token headline matching
- Add stock-analysis 15-min refresh interval
- Stagger stock-analysis and backtest requests (200ms)
- Rename signalTone to stockSignalTone
2026-03-08 22:54:40 +04:00

195 lines
5.2 KiB
TypeScript

import { CHROME_UA } from './constants';
export interface ProviderCredentials {
apiUrl: string;
model: string;
headers: Record<string, string>;
extraBody?: Record<string, unknown>;
}
const OLLAMA_HOST_ALLOWLIST = new Set([
'localhost', '127.0.0.1', '::1', '[::1]', 'host.docker.internal',
]);
function isSidecar(): boolean {
return typeof process !== 'undefined' &&
(process.env?.LOCAL_API_MODE || '').includes('sidecar');
}
export function getProviderCredentials(provider: string): ProviderCredentials | null {
if (provider === 'ollama') {
const baseUrl = process.env.OLLAMA_API_URL;
if (!baseUrl) return null;
if (!isSidecar()) {
try {
const hostname = new URL(baseUrl).hostname;
if (!OLLAMA_HOST_ALLOWLIST.has(hostname)) {
console.warn(`[llm] Ollama blocked: hostname "${hostname}" not in allowlist`);
return null;
}
} catch {
return null;
}
}
const headers: Record<string, string> = { 'Content-Type': 'application/json' };
const apiKey = process.env.OLLAMA_API_KEY;
if (apiKey) headers['Authorization'] = `Bearer ${apiKey}`;
return {
apiUrl: new URL('/v1/chat/completions', baseUrl).toString(),
model: process.env.OLLAMA_MODEL || 'llama3.1:8b',
headers,
extraBody: { think: false },
};
}
if (provider === 'groq') {
const apiKey = process.env.GROQ_API_KEY;
if (!apiKey) return null;
return {
apiUrl: 'https://api.groq.com/openai/v1/chat/completions',
model: 'llama-3.1-8b-instant',
headers: {
'Authorization': `Bearer ${apiKey}`,
'Content-Type': 'application/json',
},
};
}
if (provider === 'openrouter') {
const apiKey = process.env.OPENROUTER_API_KEY;
if (!apiKey) return null;
return {
apiUrl: 'https://openrouter.ai/api/v1/chat/completions',
model: 'openrouter/free',
headers: {
'Authorization': `Bearer ${apiKey}`,
'Content-Type': 'application/json',
'HTTP-Referer': 'https://worldmonitor.app',
'X-Title': 'WorldMonitor',
},
};
}
return null;
}
export function stripThinkingTags(text: string): string {
let s = text
.replace(/<think>[\s\S]*?<\/think>/gi, '')
.replace(/<\|thinking\|>[\s\S]*?<\|\/thinking\|>/gi, '')
.replace(/<reasoning>[\s\S]*?<\/reasoning>/gi, '')
.replace(/<reflection>[\s\S]*?<\/reflection>/gi, '')
.replace(/<\|begin_of_thought\|>[\s\S]*?<\|end_of_thought\|>/gi, '')
.trim();
s = s
.replace(/<think>[\s\S]*/gi, '')
.replace(/<\|thinking\|>[\s\S]*/gi, '')
.replace(/<reasoning>[\s\S]*/gi, '')
.replace(/<reflection>[\s\S]*/gi, '')
.replace(/<\|begin_of_thought\|>[\s\S]*/gi, '')
.trim();
return s;
}
const PROVIDER_CHAIN = ['ollama', 'groq', 'openrouter'] as const;
export interface LlmCallOptions {
messages: Array<{ role: string; content: string }>;
temperature?: number;
maxTokens?: number;
timeoutMs?: number;
provider?: string;
stripThinkingTags?: boolean;
validate?: (content: string) => boolean;
}
export interface LlmCallResult {
content: string;
model: string;
provider: string;
tokens: number;
}
export async function callLlm(opts: LlmCallOptions): Promise<LlmCallResult | null> {
const {
messages,
temperature = 0.3,
maxTokens = 1500,
timeoutMs = 25_000,
provider: forcedProvider,
stripThinkingTags: shouldStrip = true,
validate,
} = opts;
const providers = forcedProvider ? [forcedProvider] : [...PROVIDER_CHAIN];
for (const providerName of providers) {
const creds = getProviderCredentials(providerName);
if (!creds) {
if (forcedProvider) return null;
continue;
}
try {
const resp = await fetch(creds.apiUrl, {
method: 'POST',
headers: { ...creds.headers, 'User-Agent': CHROME_UA },
body: JSON.stringify({
...creds.extraBody,
model: creds.model,
messages,
temperature,
max_tokens: maxTokens,
}),
signal: AbortSignal.timeout(timeoutMs),
});
if (!resp.ok) {
console.warn(`[llm:${providerName}] HTTP ${resp.status}`);
if (forcedProvider) return null;
continue;
}
const data = (await resp.json()) as {
choices?: Array<{ message?: { content?: string } }>;
usage?: { total_tokens?: number };
};
let content = data.choices?.[0]?.message?.content?.trim() || '';
if (!content) {
if (forcedProvider) return null;
continue;
}
const tokens = data.usage?.total_tokens ?? 0;
if (shouldStrip) {
content = stripThinkingTags(content);
if (!content) {
if (forcedProvider) return null;
continue;
}
}
if (validate && !validate(content)) {
console.warn(`[llm:${providerName}] validate() rejected response, trying next`);
if (forcedProvider) return null;
continue;
}
return { content, model: creds.model, provider: providerName, tokens };
} catch (err) {
console.warn(`[llm:${providerName}] ${(err as Error).message}`);
if (forcedProvider) return null;
continue;
}
}
return null;
}