import { env } from '$env/dynamic/private'; import type { LLMConfig } from './types'; const DEFAULT_CONFIG: LLMConfig = { baseUrl: 'http://localhost:11434/v1', apiKey: 'ollama', model: 'llama3' }; function getConfig(): LLMConfig { return { baseUrl: env.OPENAI_BASE_URL || DEFAULT_CONFIG.baseUrl, apiKey: env.OPENAI_API_KEY || DEFAULT_CONFIG.apiKey, model: env.OPENAI_MODEL || DEFAULT_CONFIG.model }; } export interface ConvertResult { converted: string; systemPrompt: string; userMessage: string; } export function buildSystemPrompt(styleModifier: string, intensityInstruction: string): string { const intensityFilled = intensityInstruction.replace('{style}', styleModifier); return `You are an expert English style converter. ${intensityFilled}. ${styleModifier} Preserve the core meaning but fully transform the voice and tone. Output ONLY the converted text — no explanations, no labels, no quotes.`; } export function buildUserMessage(text: string): string { return text; } export async function convertText( text: string, styleModifier: string, intensityInstruction: string, overrides?: Partial ): Promise { const merged: LLMConfig = { ...DEFAULT_CONFIG, ...getConfig(), ...overrides }; const systemPrompt = buildSystemPrompt(styleModifier, intensityInstruction); const userMessage = buildUserMessage(text); const response = await fetch(`${merged.baseUrl}/chat/completions`, { method: 'POST', headers: { 'Content-Type': 'application/json', Authorization: `Bearer ${merged.apiKey}` }, body: JSON.stringify({ model: merged.model, messages: [ { role: 'system', content: systemPrompt }, { role: 'user', content: userMessage } ], temperature: 0.8 }) }); if (!response.ok) { const errorText = await response.text().catch(() => 'Unknown error'); throw new Error(`LLM request failed (${response.status}): ${errorText}`); } const data = await response.json(); const converted = data.choices?.[0]?.message?.content?.trim(); if (!converted) { throw new Error('LLM returned empty response'); } return { converted, systemPrompt, userMessage }; }