"use client";
import { FormEvent, useCallback, useEffect, useMemo, useState } from "react";
import {
getActiveModel,
getDefaultObjective5ModelOptions,
getDefaultObjective5Settings,
isLocalEndpoint,
LAB2_CHAT_STORAGE_KEY,
LAB2_CUSTOM_MODEL_VALUE,
LAB2_DEFAULT_ENDPOINT,
type Objective5ModelOption,
type Objective5Metrics,
type Objective5Message,
type Objective5RenderMode,
svgToDataUrl,
} from "~/lib/lab2-chat";
type ChatTurn = Objective5Message & {
error?: string;
id: string;
metrics?: Objective5Metrics | null;
model?: string;
renderMode: Objective5RenderMode;
svg?: string;
};
type ChatApiSuccess = {
content: string;
error?: string;
metrics?: Objective5Metrics | null;
renderMode: Objective5RenderMode;
role: "assistant";
svg?: string;
};
const starterPrompts = [
"Draw a pelican riding a bicycle.",
"Draw a raccoon conducting an orchestra with a baguette baton.",
"Draw a capybara skateboarding through a volcano museum.",
] as const;
function formatMetricValue(value: number, suffix = "") {
if (Number.isInteger(value)) {
return `${value}${suffix}`;
}
return `${value.toFixed(1)}${suffix}`;
}
function renderMetrics(metrics: Objective5Metrics | null | undefined) {
if (!metrics) return null;
const metricItems = [
typeof metrics.tokensPerSecond === "number"
? `Tokens/sec ${formatMetricValue(metrics.tokensPerSecond)}`
: null,
typeof metrics.completionTokens === "number"
? `Output tokens ${formatMetricValue(metrics.completionTokens)}`
: null,
typeof metrics.promptTokens === "number"
? `Prompt tokens ${formatMetricValue(metrics.promptTokens)}`
: null,
typeof metrics.evalDurationMs === "number"
? `Eval ${formatMetricValue(metrics.evalDurationMs, " ms")}`
: null,
typeof metrics.totalDurationMs === "number"
? `Total ${formatMetricValue(metrics.totalDurationMs, " ms")}`
: null,
].filter(Boolean);
if (metricItems.length === 0) {
return null;
}
return (
{metricItems.map((item) => (
{item}
))}
);
}
function buildTurnId() {
return `turn-${Date.now()}-${Math.random().toString(36).slice(2, 8)}`;
}
function toApiConversation(messages: ChatTurn[]) {
return messages.map(({ content, role }) => ({ content, role }));
}
export function Objective5Chat() {
const defaults = useMemo(() => getDefaultObjective5Settings(), []);
const defaultModelOptions = useMemo(() => getDefaultObjective5ModelOptions(), []);
const [endpoint, setEndpoint] = useState(defaults.endpoint);
const [apiKey, setApiKey] = useState(defaults.apiKey);
const [selectedModel, setSelectedModel] = useState(defaults.selectedModel);
const [customModel, setCustomModel] = useState(defaults.customModel);
const [draft, setDraft] = useState(starterPrompts[1]);
const [messages, setMessages] = useState([]);
const [modelOptions, setModelOptions] =
useState(defaultModelOptions);
const [modelError, setModelError] = useState(null);
const [isRefreshingModels, setIsRefreshingModels] = useState(false);
const [error, setError] = useState(null);
const [isSubmitting, setIsSubmitting] = useState(false);
const [hasLoadedSettings, setHasLoadedSettings] = useState(false);
const activeModel = getActiveModel(selectedModel, customModel);
useEffect(() => {
try {
const savedSettings = window.localStorage.getItem(LAB2_CHAT_STORAGE_KEY);
if (!savedSettings) {
setHasLoadedSettings(true);
return;
}
const parsed = JSON.parse(savedSettings) as Partial;
setEndpoint(parsed.endpoint?.trim() || LAB2_DEFAULT_ENDPOINT);
setApiKey(parsed.apiKey ?? "");
setSelectedModel(parsed.selectedModel?.trim() || defaults.selectedModel);
setCustomModel(parsed.customModel?.trim() || "");
} catch {
window.localStorage.removeItem(LAB2_CHAT_STORAGE_KEY);
} finally {
setHasLoadedSettings(true);
}
}, [defaults.selectedModel]);
useEffect(() => {
if (!hasLoadedSettings) return;
window.localStorage.setItem(
LAB2_CHAT_STORAGE_KEY,
JSON.stringify({
apiKey,
customModel,
endpoint,
selectedModel,
}),
);
}, [apiKey, customModel, endpoint, hasLoadedSettings, selectedModel]);
const refreshModels = useCallback(async () => {
const trimmedEndpoint = endpoint.trim();
const trimmedKey = apiKey.trim();
if (!trimmedEndpoint) {
setModelError("Enter an endpoint before refreshing models.");
return;
}
if (!trimmedKey && !isLocalEndpoint(trimmedEndpoint)) {
setModelError("Enter an API key before refreshing remote models.");
return;
}
setIsRefreshingModels(true);
setModelError(null);
try {
const response = await fetch("/api/lab2/models", {
body: JSON.stringify({
apiKey: trimmedKey,
endpoint: trimmedEndpoint,
}),
headers: {
"Content-Type": "application/json",
},
method: "POST",
});
const payload = (await response.json()) as {
error?: string;
models?: Objective5ModelOption[];
};
if (!response.ok) {
throw new Error(payload.error || "Could not load models.");
}
const nextOptions = Array.isArray(payload.models) ? payload.models : [];
const optionsWithCustom = ensureCustomOption(nextOptions);
setModelOptions(optionsWithCustom);
setSelectedModel((currentModel) => {
if (
currentModel === LAB2_CUSTOM_MODEL_VALUE ||
optionsWithCustom.some((option) => option.value === currentModel)
) {
return currentModel;
}
return optionsWithCustom[0]?.value ?? currentModel;
});
} catch (caughtError) {
setModelError(
caughtError instanceof Error
? caughtError.message
: "Could not load models.",
);
} finally {
setIsRefreshingModels(false);
}
}, [apiKey, endpoint]);
useEffect(() => {
if (!hasLoadedSettings) return;
if (!endpoint.trim()) return;
if (!apiKey.trim() && !isLocalEndpoint(endpoint.trim())) return;
void refreshModels();
}, [apiKey, endpoint, hasLoadedSettings, refreshModels]);
async function handleSubmit(event: FormEvent) {
event.preventDefault();
const prompt = draft.trim();
const trimmedEndpoint = endpoint.trim();
const trimmedKey = apiKey.trim();
if (!trimmedEndpoint) {
setError("Enter the model endpoint before sending a prompt.");
return;
}
if (!trimmedKey && !isLocalEndpoint(trimmedEndpoint)) {
setError("Enter an API key before sending a prompt to a remote endpoint.");
return;
}
if (!activeModel) {
setError("Choose one of the quantized models or enter a custom model name.");
return;
}
if (!prompt) {
setError("Enter a prompt to compare qualitative output.");
return;
}
const nextUserTurn: ChatTurn = {
content: prompt,
id: buildTurnId(),
renderMode: "text",
role: "user",
};
const nextConversation = [...messages, nextUserTurn];
setMessages(nextConversation);
setDraft("");
setError(null);
setIsSubmitting(true);
try {
const response = await fetch("/api/lab2/chat", {
body: JSON.stringify({
apiKey: trimmedKey,
endpoint: trimmedEndpoint,
messages: toApiConversation(nextConversation),
model: activeModel,
}),
headers: {
"Content-Type": "application/json",
},
method: "POST",
});
const payload = (await response.json()) as ChatApiSuccess & {
error?: string;
};
if (!response.ok) {
throw new Error(payload.error || "The chat request failed.");
}
const assistantTurn: ChatTurn = {
content: payload.content,
error: payload.error,
id: buildTurnId(),
metrics: payload.metrics,
model: activeModel,
renderMode: payload.renderMode,
role: "assistant",
svg: payload.svg,
};
setMessages((currentMessages) => [...currentMessages, assistantTurn]);
} catch (caughtError) {
setError(
caughtError instanceof Error
? caughtError.message
: "The chat request failed.",
);
} finally {
setIsSubmitting(false);
}
}
return (
Objective 5 Lab Widget
Compare qualitative output with a hosted chat endpoint
Switch between quantized models, reuse the same prompt, and ask for
text or simple SVG sketches like{" "}
Draw a pelican riding a bicycle.
Settings stay in your browser for this lab only. Available models are
refreshed from the configured endpoint, and changing the model does not
clear the transcript.
{modelError ? (
{modelError}
) : null}
{starterPrompts.map((prompt) => (
setDraft(prompt)}
type="button"
>
{prompt}
))}
{messages.length === 0 ? (
Try the same prompt on each model.
Start with one of the suggested prompts, then switch the model and
send the same question again to compare coherence and SVG fidelity.
) : (
messages.map((message) => {
const svgDataUrl =
message.renderMode === "svg" && message.svg
? svgToDataUrl(message.svg)
: null;
return (
{message.role === "user" ? "You" : "Assistant"}
{message.model ? {message.model} : null}
{message.renderMode === "svg" && svgDataUrl ? (
{/* eslint-disable-next-line @next/next/no-img-element */}
View SVG source
{message.svg}
) : (
{message.content}
)}
{message.role === "assistant" ? renderMetrics(message.metrics) : null}
{message.error ? (
{message.error}
) : null}
);
})
)}
);
}
function ensureCustomOption(modelOptions: Objective5ModelOption[]) {
if (
modelOptions.some((option) => option.value === LAB2_CUSTOM_MODEL_VALUE)
) {
return modelOptions;
}
return [
...modelOptions,
{
label: "Custom model",
value: LAB2_CUSTOM_MODEL_VALUE,
},
];
}