- SvelteKit project scaffolded with TypeScript - Type definitions for Style, StyleCategory, ConversionRequest, ConversionResponse, LLMConfig - Style definitions with 6 categories and 25 sub-styles - Intensity mapping (1-5) with prompt modifier placeholders - LLM client using OpenAI-compatible API (Ollama default) - POST /api/convert endpoint with input validation - Animated loading modal with per-letter animations - Main page UI with category/style selectors, intensity slider - Copy to clipboard, collapsible prompt display - Vitest tests for styles, LLM prompt building, and API validation - Environment configuration for LLM settings
77 lines
2.1 KiB
TypeScript
77 lines
2.1 KiB
TypeScript
import { env } from '$env/dynamic/private';
|
|
import type { LLMConfig } from './types';
|
|
|
|
const DEFAULT_CONFIG: LLMConfig = {
|
|
baseUrl: 'http://localhost:11434/v1',
|
|
apiKey: 'ollama',
|
|
model: 'llama3'
|
|
};
|
|
|
|
function getConfig(): LLMConfig {
|
|
return {
|
|
baseUrl: env.OPENAI_BASE_URL || DEFAULT_CONFIG.baseUrl,
|
|
apiKey: env.OPENAI_API_KEY || DEFAULT_CONFIG.apiKey,
|
|
model: env.OPENAI_MODEL || DEFAULT_CONFIG.model
|
|
};
|
|
}
|
|
|
|
export interface ConvertResult {
|
|
converted: string;
|
|
systemPrompt: string;
|
|
userMessage: string;
|
|
}
|
|
|
|
export function buildSystemPrompt(styleModifier: string, intensityInstruction: string): string {
|
|
const intensityFilled = intensityInstruction.replace('{style}', styleModifier);
|
|
return `You are an expert English style converter.
|
|
${intensityFilled}.
|
|
${styleModifier}
|
|
Preserve the core meaning but fully transform the voice and tone.
|
|
Output ONLY the converted text — no explanations, no labels, no quotes.`;
|
|
}
|
|
|
|
export function buildUserMessage(text: string): string {
|
|
return text;
|
|
}
|
|
|
|
export async function convertText(
|
|
text: string,
|
|
styleModifier: string,
|
|
intensityInstruction: string,
|
|
overrides?: Partial<LLMConfig>
|
|
): Promise<ConvertResult> {
|
|
const merged: LLMConfig = { ...DEFAULT_CONFIG, ...getConfig(), ...overrides };
|
|
|
|
const systemPrompt = buildSystemPrompt(styleModifier, intensityInstruction);
|
|
const userMessage = buildUserMessage(text);
|
|
|
|
const response = await fetch(`${merged.baseUrl}/chat/completions`, {
|
|
method: 'POST',
|
|
headers: {
|
|
'Content-Type': 'application/json',
|
|
Authorization: `Bearer ${merged.apiKey}`
|
|
},
|
|
body: JSON.stringify({
|
|
model: merged.model,
|
|
messages: [
|
|
{ role: 'system', content: systemPrompt },
|
|
{ role: 'user', content: userMessage }
|
|
],
|
|
temperature: 0.8
|
|
})
|
|
});
|
|
|
|
if (!response.ok) {
|
|
const errorText = await response.text().catch(() => 'Unknown error');
|
|
throw new Error(`LLM request failed (${response.status}): ${errorText}`);
|
|
}
|
|
|
|
const data = await response.json();
|
|
const converted = data.choices?.[0]?.message?.content?.trim();
|
|
|
|
if (!converted) {
|
|
throw new Error('LLM returned empty response');
|
|
}
|
|
|
|
return { converted, systemPrompt, userMessage };
|
|
} |