feat: show model name below conversion result
Add muted 'Responded by {model}' line below the output text so the
user knows which LLM produced the result. The model name comes from
the server-side LLM config (OPENAI_MODEL env var, default: llama3)
and is passed through the API response.
This commit is contained in:
@@ -21,6 +21,7 @@ export interface ConvertResult {
|
||||
converted: string;
|
||||
publicSystemPrompt: string;
|
||||
publicUserMessage: string;
|
||||
model: string;
|
||||
}
|
||||
|
||||
const INPUT_TAG_START = '###### USER INPUT START ######';
|
||||
@@ -102,6 +103,7 @@ export async function convertText(
|
||||
return {
|
||||
converted,
|
||||
publicSystemPrompt: buildPublicSystemPrompt(styleModifier, intensityInstruction),
|
||||
publicUserMessage: text
|
||||
publicUserMessage: text,
|
||||
model: merged.model
|
||||
};
|
||||
}
|
||||
@@ -24,6 +24,7 @@ export interface ConversionResponse {
|
||||
intensity: number;
|
||||
systemPrompt: string;
|
||||
userMessage: string;
|
||||
model: string;
|
||||
}
|
||||
|
||||
export interface LLMConfig {
|
||||
|
||||
Reference in New Issue
Block a user