feat: show model name below conversion result

Add muted 'Responded by {model}' line below the output text so the
user knows which LLM produced the result. The model name comes from
the server-side LLM config (OPENAI_MODEL env var, default: llama3)
and is passed through the API response.
This commit is contained in:
2026-04-13 00:05:46 -04:00
parent 85dec4908f
commit 11bb42240a
4 changed files with 19 additions and 2 deletions

View File

@@ -21,6 +21,7 @@ export interface ConvertResult {
converted: string; converted: string;
publicSystemPrompt: string; publicSystemPrompt: string;
publicUserMessage: string; publicUserMessage: string;
model: string;
} }
const INPUT_TAG_START = '###### USER INPUT START ######'; const INPUT_TAG_START = '###### USER INPUT START ######';
@@ -102,6 +103,7 @@ export async function convertText(
return { return {
converted, converted,
publicSystemPrompt: buildPublicSystemPrompt(styleModifier, intensityInstruction), publicSystemPrompt: buildPublicSystemPrompt(styleModifier, intensityInstruction),
publicUserMessage: text publicUserMessage: text,
model: merged.model
}; };
} }

View File

@@ -24,6 +24,7 @@ export interface ConversionResponse {
intensity: number; intensity: number;
systemPrompt: string; systemPrompt: string;
userMessage: string; userMessage: string;
model: string;
} }
export interface LLMConfig { export interface LLMConfig {

View File

@@ -12,6 +12,7 @@
let error = $state(''); let error = $state('');
let systemPrompt = $state(''); let systemPrompt = $state('');
let userMessage = $state(''); let userMessage = $state('');
let modelName = $state('');
let showPrompt = $state(false); let showPrompt = $state(false);
let copied = $state(false); let copied = $state(false);
@@ -40,6 +41,7 @@
outputText = ''; outputText = '';
systemPrompt = ''; systemPrompt = '';
userMessage = ''; userMessage = '';
modelName = '';
showPrompt = false; showPrompt = false;
try { try {
@@ -63,6 +65,7 @@
outputText = result.converted; outputText = result.converted;
systemPrompt = result.systemPrompt; systemPrompt = result.systemPrompt;
userMessage = result.userMessage; userMessage = result.userMessage;
modelName = result.model;
} catch (err) { } catch (err) {
error = err instanceof Error ? err.message : 'Something went wrong'; error = err instanceof Error ? err.message : 'Something went wrong';
} finally { } finally {
@@ -185,6 +188,9 @@
</button> </button>
</div> </div>
<div class="output-text">{outputText}</div> <div class="output-text">{outputText}</div>
{#if modelName}
<p class="model-attribution">Responded by {modelName}</p>
{/if}
</div> </div>
<div class="prompt-section"> <div class="prompt-section">
@@ -381,6 +387,13 @@
font-size: 1rem; font-size: 1rem;
} }
.model-attribution {
margin-top: 0.75rem;
font-size: 0.8rem;
color: #9ca3af;
font-style: italic;
}
.prompt-section { .prompt-section {
margin-top: 1rem; margin-top: 1rem;
} }

View File

@@ -53,7 +53,8 @@ export const POST: RequestHandler = async ({ request }) => {
styleId, styleId,
intensity, intensity,
systemPrompt: result.publicSystemPrompt, systemPrompt: result.publicSystemPrompt,
userMessage: result.publicUserMessage userMessage: result.publicUserMessage,
model: result.model
}; };
return json(response); return json(response);