feat: show routed model name when using openrouter/free

When the configured model is a routing endpoint like 'openrouter/free',
the actual model used (e.g. 'upstage/solar-pro-3:free') is returned in
the LLM response's 'model' field. We now extract that and display it as:

  Responded by upstage/solar-pro-3:free model from openrouter/free

For any other model (e.g. 'llama3', 'gemma2'), we still show just:

  Responded by llama3

Implementation:
- LLM client returns both requestedModel and actualModel
- API endpoint builds a display-friendly modelLabel
- Frontend uses modelLabel for the attribution line
This commit is contained in:
2026-04-13 01:13:47 -04:00
parent 70dc396fe3
commit eaa1544e66
4 changed files with 17 additions and 9 deletions

View File

@@ -21,7 +21,8 @@ export interface ConvertResult {
converted: string;
publicSystemPrompt: string;
publicUserMessage: string;
model: string;
requestedModel: string;
actualModel: string | null;
}
const INPUT_TAG_START = '###### USER INPUT START ######';
@@ -100,10 +101,15 @@ export async function convertText(
throw new Error('LLM returned empty response');
}
// OpenRouter's free router returns the actual model used in data.model
// e.g. requested "openrouter/free" -> actual "upstage/solar-pro-3:free"
const actualModel = (typeof data.model === 'string' && data.model !== merged.model) ? data.model : null;
return {
converted,
publicSystemPrompt: buildPublicSystemPrompt(styleModifier, intensityInstruction),
publicUserMessage: text,
model: merged.model
requestedModel: merged.model,
actualModel
};
}

View File

@@ -24,7 +24,7 @@ export interface ConversionResponse {
intensity: number;
systemPrompt: string;
userMessage: string;
model: string;
modelLabel: string;
}
export interface LLMConfig {

View File

@@ -49,7 +49,7 @@
let error = $state('');
let systemPrompt = $state('');
let userMessage = $state('');
let modelName = $state('');
let modelLabel = $state('');
let showPrompt = $state(saved?.showPrompt ?? false);
let copied = $state(false);
@@ -83,7 +83,7 @@
outputText = '';
systemPrompt = '';
userMessage = '';
modelName = '';
modelLabel = '';
showPrompt = false;
try {
@@ -107,7 +107,7 @@
outputText = result.converted;
systemPrompt = result.systemPrompt;
userMessage = result.userMessage;
modelName = result.model;
modelLabel = result.modelLabel;
} catch (err) {
error = err instanceof Error ? err.message : 'Something went wrong';
} finally {
@@ -234,8 +234,8 @@
</button>
</div>
<div class="output-text">{outputText}</div>
{#if modelName}
<p class="model-attribution">Responded by {modelName}</p>
{#if modelLabel}
<p class="model-attribution">Responded by {modelLabel}</p>
{/if}
</div>

View File

@@ -54,7 +54,9 @@ export const POST: RequestHandler = async ({ request }) => {
intensity,
systemPrompt: result.publicSystemPrompt,
userMessage: result.publicUserMessage,
model: result.model
modelLabel: result.actualModel
? `${result.actualModel} model from ${result.requestedModel}`
: result.requestedModel
};
return json(response);