feat: implement English Style Converter

- SvelteKit project scaffolded with TypeScript
- Type definitions for Style, StyleCategory, ConversionRequest, ConversionResponse, LLMConfig
- Style definitions with 6 categories and 25 sub-styles
- Intensity mapping (1-5) with prompt modifier placeholders
- LLM client using OpenAI-compatible API (Ollama default)
- POST /api/convert endpoint with input validation
- Animated loading modal with per-letter animations
- Main page UI with category/style selectors, intensity slider
- Copy to clipboard, collapsible prompt display
- Vitest tests for styles, LLM prompt building, and API validation
- Environment configuration for LLM settings
This commit is contained in:
2026-04-12 21:53:27 -04:00
parent fcf80638e1
commit a12afb792e
16 changed files with 1464 additions and 37 deletions

77
src/lib/llm.ts Normal file
View File

@@ -0,0 +1,77 @@
import { env } from '$env/dynamic/private';
import type { LLMConfig } from './types';
const DEFAULT_CONFIG: LLMConfig = {
baseUrl: 'http://localhost:11434/v1',
apiKey: 'ollama',
model: 'llama3'
};
function getConfig(): LLMConfig {
return {
baseUrl: env.OPENAI_BASE_URL || DEFAULT_CONFIG.baseUrl,
apiKey: env.OPENAI_API_KEY || DEFAULT_CONFIG.apiKey,
model: env.OPENAI_MODEL || DEFAULT_CONFIG.model
};
}
export interface ConvertResult {
converted: string;
systemPrompt: string;
userMessage: string;
}
export function buildSystemPrompt(styleModifier: string, intensityInstruction: string): string {
const intensityFilled = intensityInstruction.replace('{style}', styleModifier);
return `You are an expert English style converter.
${intensityFilled}.
${styleModifier}
Preserve the core meaning but fully transform the voice and tone.
Output ONLY the converted text — no explanations, no labels, no quotes.`;
}
export function buildUserMessage(text: string): string {
return text;
}
export async function convertText(
text: string,
styleModifier: string,
intensityInstruction: string,
overrides?: Partial<LLMConfig>
): Promise<ConvertResult> {
const merged: LLMConfig = { ...DEFAULT_CONFIG, ...getConfig(), ...overrides };
const systemPrompt = buildSystemPrompt(styleModifier, intensityInstruction);
const userMessage = buildUserMessage(text);
const response = await fetch(`${merged.baseUrl}/chat/completions`, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
Authorization: `Bearer ${merged.apiKey}`
},
body: JSON.stringify({
model: merged.model,
messages: [
{ role: 'system', content: systemPrompt },
{ role: 'user', content: userMessage }
],
temperature: 0.8
})
});
if (!response.ok) {
const errorText = await response.text().catch(() => 'Unknown error');
throw new Error(`LLM request failed (${response.status}): ${errorText}`);
}
const data = await response.json();
const converted = data.choices?.[0]?.message?.content?.trim();
if (!converted) {
throw new Error('LLM returned empty response');
}
return { converted, systemPrompt, userMessage };
}