From eaa1544e6626987cd3a340e3637a190aad67d62a Mon Sep 17 00:00:00 2001 From: Santhosh Janardhanan Date: Mon, 13 Apr 2026 01:13:47 -0400 Subject: [PATCH] feat: show routed model name when using openrouter/free When the configured model is a routing endpoint like 'openrouter/free', the actual model used (e.g. 'upstage/solar-pro-3:free') is returned in the LLM response's 'model' field. We now extract that and display it as: Responded by upstage/solar-pro-3:free model from openrouter/free For any other model (e.g. 'llama3', 'gemma2'), we still show just: Responded by llama3 Implementation: - LLM client returns both requestedModel and actualModel - API endpoint builds a display-friendly modelLabel - Frontend uses modelLabel for the attribution line --- src/lib/llm.ts | 10 ++++++++-- src/lib/types.ts | 2 +- src/routes/+page.svelte | 10 +++++----- src/routes/api/convert/+server.ts | 4 +++- 4 files changed, 17 insertions(+), 9 deletions(-) diff --git a/src/lib/llm.ts b/src/lib/llm.ts index 1628af7..25b333f 100644 --- a/src/lib/llm.ts +++ b/src/lib/llm.ts @@ -21,7 +21,8 @@ export interface ConvertResult { converted: string; publicSystemPrompt: string; publicUserMessage: string; - model: string; + requestedModel: string; + actualModel: string | null; } const INPUT_TAG_START = '###### USER INPUT START ######'; @@ -100,10 +101,15 @@ export async function convertText( throw new Error('LLM returned empty response'); } + // OpenRouter's free router returns the actual model used in data.model + // e.g. requested "openrouter/free" -> actual "upstage/solar-pro-3:free" + const actualModel = (typeof data.model === 'string' && data.model !== merged.model) ? data.model : null; + return { converted, publicSystemPrompt: buildPublicSystemPrompt(styleModifier, intensityInstruction), publicUserMessage: text, - model: merged.model + requestedModel: merged.model, + actualModel }; } \ No newline at end of file diff --git a/src/lib/types.ts b/src/lib/types.ts index ab6fac9..12f3d3e 100644 --- a/src/lib/types.ts +++ b/src/lib/types.ts @@ -24,7 +24,7 @@ export interface ConversionResponse { intensity: number; systemPrompt: string; userMessage: string; - model: string; + modelLabel: string; } export interface LLMConfig { diff --git a/src/routes/+page.svelte b/src/routes/+page.svelte index 955c6a3..836befc 100644 --- a/src/routes/+page.svelte +++ b/src/routes/+page.svelte @@ -49,7 +49,7 @@ let error = $state(''); let systemPrompt = $state(''); let userMessage = $state(''); - let modelName = $state(''); + let modelLabel = $state(''); let showPrompt = $state(saved?.showPrompt ?? false); let copied = $state(false); @@ -83,7 +83,7 @@ outputText = ''; systemPrompt = ''; userMessage = ''; - modelName = ''; + modelLabel = ''; showPrompt = false; try { @@ -107,7 +107,7 @@ outputText = result.converted; systemPrompt = result.systemPrompt; userMessage = result.userMessage; - modelName = result.model; + modelLabel = result.modelLabel; } catch (err) { error = err instanceof Error ? err.message : 'Something went wrong'; } finally { @@ -234,8 +234,8 @@
{outputText}
- {#if modelName} -

Responded by {modelName}

+ {#if modelLabel} +

Responded by {modelLabel}

{/if} diff --git a/src/routes/api/convert/+server.ts b/src/routes/api/convert/+server.ts index ecdeba9..92410df 100644 --- a/src/routes/api/convert/+server.ts +++ b/src/routes/api/convert/+server.ts @@ -54,7 +54,9 @@ export const POST: RequestHandler = async ({ request }) => { intensity, systemPrompt: result.publicSystemPrompt, userMessage: result.publicUserMessage, - model: result.model + modelLabel: result.actualModel + ? `${result.actualModel} model from ${result.requestedModel}` + : result.requestedModel }; return json(response);