From 11bb42240a9c03cd0f8c56d0303ea573dd3988b4 Mon Sep 17 00:00:00 2001 From: Santhosh Janardhanan Date: Mon, 13 Apr 2026 00:05:46 -0400 Subject: [PATCH] feat: show model name below conversion result Add muted 'Responded by {model}' line below the output text so the user knows which LLM produced the result. The model name comes from the server-side LLM config (OPENAI_MODEL env var, default: llama3) and is passed through the API response. --- src/lib/llm.ts | 4 +++- src/lib/types.ts | 1 + src/routes/+page.svelte | 13 +++++++++++++ src/routes/api/convert/+server.ts | 3 ++- 4 files changed, 19 insertions(+), 2 deletions(-) diff --git a/src/lib/llm.ts b/src/lib/llm.ts index 90c90f3..1628af7 100644 --- a/src/lib/llm.ts +++ b/src/lib/llm.ts @@ -21,6 +21,7 @@ export interface ConvertResult { converted: string; publicSystemPrompt: string; publicUserMessage: string; + model: string; } const INPUT_TAG_START = '###### USER INPUT START ######'; @@ -102,6 +103,7 @@ export async function convertText( return { converted, publicSystemPrompt: buildPublicSystemPrompt(styleModifier, intensityInstruction), - publicUserMessage: text + publicUserMessage: text, + model: merged.model }; } \ No newline at end of file diff --git a/src/lib/types.ts b/src/lib/types.ts index f0cfe7d..ab6fac9 100644 --- a/src/lib/types.ts +++ b/src/lib/types.ts @@ -24,6 +24,7 @@ export interface ConversionResponse { intensity: number; systemPrompt: string; userMessage: string; + model: string; } export interface LLMConfig { diff --git a/src/routes/+page.svelte b/src/routes/+page.svelte index 3b0c2e6..78f0fdc 100644 --- a/src/routes/+page.svelte +++ b/src/routes/+page.svelte @@ -12,6 +12,7 @@ let error = $state(''); let systemPrompt = $state(''); let userMessage = $state(''); + let modelName = $state(''); let showPrompt = $state(false); let copied = $state(false); @@ -40,6 +41,7 @@ outputText = ''; systemPrompt = ''; userMessage = ''; + modelName = ''; showPrompt = false; try { @@ -63,6 +65,7 @@ outputText = result.converted; systemPrompt = result.systemPrompt; userMessage = result.userMessage; + modelName = result.model; } catch (err) { error = err instanceof Error ? err.message : 'Something went wrong'; } finally { @@ -185,6 +188,9 @@
{outputText}
+ {#if modelName} +

Responded by {modelName}

+ {/if}
@@ -381,6 +387,13 @@ font-size: 1rem; } + .model-attribution { + margin-top: 0.75rem; + font-size: 0.8rem; + color: #9ca3af; + font-style: italic; + } + .prompt-section { margin-top: 1rem; } diff --git a/src/routes/api/convert/+server.ts b/src/routes/api/convert/+server.ts index a60e9cf..ecdeba9 100644 --- a/src/routes/api/convert/+server.ts +++ b/src/routes/api/convert/+server.ts @@ -53,7 +53,8 @@ export const POST: RequestHandler = async ({ request }) => { styleId, intensity, systemPrompt: result.publicSystemPrompt, - userMessage: result.publicUserMessage + userMessage: result.publicUserMessage, + model: result.model }; return json(response);