feat: show model name below conversion result
Add muted 'Responded by {model}' line below the output text so the
user knows which LLM produced the result. The model name comes from
the server-side LLM config (OPENAI_MODEL env var, default: llama3)
and is passed through the API response.
This commit is contained in:
@@ -21,6 +21,7 @@ export interface ConvertResult {
|
||||
converted: string;
|
||||
publicSystemPrompt: string;
|
||||
publicUserMessage: string;
|
||||
model: string;
|
||||
}
|
||||
|
||||
const INPUT_TAG_START = '###### USER INPUT START ######';
|
||||
@@ -102,6 +103,7 @@ export async function convertText(
|
||||
return {
|
||||
converted,
|
||||
publicSystemPrompt: buildPublicSystemPrompt(styleModifier, intensityInstruction),
|
||||
publicUserMessage: text
|
||||
publicUserMessage: text,
|
||||
model: merged.model
|
||||
};
|
||||
}
|
||||
@@ -24,6 +24,7 @@ export interface ConversionResponse {
|
||||
intensity: number;
|
||||
systemPrompt: string;
|
||||
userMessage: string;
|
||||
model: string;
|
||||
}
|
||||
|
||||
export interface LLMConfig {
|
||||
|
||||
@@ -12,6 +12,7 @@
|
||||
let error = $state('');
|
||||
let systemPrompt = $state('');
|
||||
let userMessage = $state('');
|
||||
let modelName = $state('');
|
||||
let showPrompt = $state(false);
|
||||
let copied = $state(false);
|
||||
|
||||
@@ -40,6 +41,7 @@
|
||||
outputText = '';
|
||||
systemPrompt = '';
|
||||
userMessage = '';
|
||||
modelName = '';
|
||||
showPrompt = false;
|
||||
|
||||
try {
|
||||
@@ -63,6 +65,7 @@
|
||||
outputText = result.converted;
|
||||
systemPrompt = result.systemPrompt;
|
||||
userMessage = result.userMessage;
|
||||
modelName = result.model;
|
||||
} catch (err) {
|
||||
error = err instanceof Error ? err.message : 'Something went wrong';
|
||||
} finally {
|
||||
@@ -185,6 +188,9 @@
|
||||
</button>
|
||||
</div>
|
||||
<div class="output-text">{outputText}</div>
|
||||
{#if modelName}
|
||||
<p class="model-attribution">Responded by {modelName}</p>
|
||||
{/if}
|
||||
</div>
|
||||
|
||||
<div class="prompt-section">
|
||||
@@ -381,6 +387,13 @@
|
||||
font-size: 1rem;
|
||||
}
|
||||
|
||||
.model-attribution {
|
||||
margin-top: 0.75rem;
|
||||
font-size: 0.8rem;
|
||||
color: #9ca3af;
|
||||
font-style: italic;
|
||||
}
|
||||
|
||||
.prompt-section {
|
||||
margin-top: 1rem;
|
||||
}
|
||||
|
||||
@@ -53,7 +53,8 @@ export const POST: RequestHandler = async ({ request }) => {
|
||||
styleId,
|
||||
intensity,
|
||||
systemPrompt: result.publicSystemPrompt,
|
||||
userMessage: result.publicUserMessage
|
||||
userMessage: result.publicUserMessage,
|
||||
model: result.model
|
||||
};
|
||||
|
||||
return json(response);
|
||||
|
||||
Reference in New Issue
Block a user