Skip to content

Instantly share code, notes, and snippets.

@abdusco
Last active March 25, 2025 15:25
Show Gist options
  • Save abdusco/a7d439ffe58539743287638589db0184 to your computer and use it in GitHub Desktop.
Save abdusco/a7d439ffe58539743287638589db0184 to your computer and use it in GitHub Desktop.
LLM chat interface
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8" />
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
<title>Cet ci pi ti</title>
<script src="https://cdn.jsdelivr.net/npm/marked/marked.min.js"></script>
<style>
:root {
/* Light theme variables */
--bg-primary: #ffffff;
--bg-secondary: hsl(220, 20%, 90%);
--bg-user-message: hsl(220, 58%, 80%);
--bg-code: #f8f8f8;
--text-primary: #2c3e50;
--text-user: hsl(220, 78%, 22%);
--border-color: #e1e1e1;
}
@media (prefers-color-scheme: dark) {
:root {
/* Dark theme variables */
--bg-primary: #1a1a1a;
--bg-secondary: hsl(220, 20%, 15%);
--bg-user-message: hsl(220, 30%, 25%);
--bg-code: #2d2d2d;
--text-primary: #e1e1e1;
--text-user: hsl(220, 58%, 80%);
--border-color: #404040;
}
}
*,
*::before,
*::after {
box-sizing: border-box;
font-family: inherit;
}
body {
margin: 0;
font-family: menlo, consolas, monospace;
line-height: 1.6;
font-size: 14px;
background-color: var(--bg-primary);
color: var(--text-primary);
}
.chat-container {
height: 100vh;
display: grid;
grid-template-columns: 1fr min-content;
grid-template-rows: 1fr min-content;
gap: 1rem;
padding: 1rem;
grid-template-areas:
"messages messages"
"prompt submit";
}
.messages {
grid-area: messages;
overflow-y: auto;
padding-right: 0.5rem;
}
.message {
padding: 0.8rem 1.2rem;
border-radius: 0.8rem;
max-width: 85%;
}
textarea,
button {
font-size: inherit;
}
.message + .message {
margin-top: 1rem;
}
.user-message {
color: var(--text-user);
background: var(--bg-user-message);
margin-left: auto;
}
.system-message {
background: var(--bg-secondary);
margin-right: auto;
}
.system-message pre,
.system-message code {
background: var(--bg-code);
padding: 0.5rem;
border-radius: 0.4rem;
white-space: pre;
overflow-x: auto;
}
.system-message code {
padding: 2px 4px;
}
.prompt {
grid-area: prompt;
min-height: 40px;
padding: 0.8rem;
border: 1px solid var(--border-color);
border-radius: 0.4rem;
background: var(--bg-primary);
color: var(--text-primary);
resize: vertical;
}
.submit {
grid-area: submit;
cursor: pointer;
padding: 0.8rem 1.6rem;
background: var(--bg-secondary);
color: var(--text-primary);
border: 1px solid var(--border-color);
border-radius: 0.4rem;
transition: background-color 0.2s ease;
}
.submit:hover {
background: var(--bg-user-message);
}
/* Scrollbar styling */
.messages::-webkit-scrollbar {
width: 4px;
}
.messages::-webkit-scrollbar-track {
background: var(--bg-primary);
}
.messages::-webkit-scrollbar-thumb {
background: var(--border-color);
border-radius: 4px;
}
/* Focus styles */
.prompt:focus,
.submit:focus {
outline: none;
border-color: var(--bg-user-message);
}
.submit {
anchor-name: --submit;
}
.model-selector {
position: absolute;
position-anchor: --submit;
bottom: anchor(top);
right: anchor(right);
}
.model-opt__label {
display: block;
}
details {
cursor: pointer;
}
</style>
</head>
<body>
<div id="app">
<div class="chat-container">
<div id="messages" class="messages"></div>
<textarea rows="5" id="prompt" class="prompt" placeholder="Type your message..."></textarea>
<button id="submit" class="submit">Send</button>
<details class="model-selector">
<summary>Model</summary>
<label class="model-opt__label">
<input type="radio" name="model" value="gpt-4o" checked />
GPT-4o
</label>
<label class="model-opt__label">
<input type="radio" name="model" value="gpt-4o-mini" />
GPT-4o mini
</label>
<label class="model-opt__label">
<input type="radio" name="model" value="claude-3-7-sonnet-20250219" />
Claude 3.7 Sonnet
</label>
<label class="model-opt__label">
<input type="radio" name="model" value="gemini-2.0-flash-lite" />
Gemini 2.0 Flash Lite
</label>
</details>
</div>
</div>
<script>
window.env ??= {};
let abortController = null;
const messages = [{ role: "system", content: "You are a helpful AI assistant. Do not preach about implications." }];
const $messages = document.getElementById("messages");
const $prompt = document.getElementById("prompt");
const $submit = document.getElementById("submit");
document.querySelectorAll("[name=model]").forEach((modelOpt) => {
if (modelOpt.value === window.env.MODEL) {
modelOpt.checked = true;
}
});
$submit.addEventListener("click", () => {
if (abortController) {
abortController.abort();
} else {
handleSubmit();
}
});
$prompt.addEventListener("keydown", (e) => {
if (e.key === "Enter" && (e.ctrlKey || e.metaKey)) {
e.preventDefault();
if (!abortController) {
handleSubmit();
}
}
});
const injectedPrompt = window.env.PROMPT?.trim() ?? "";
if (injectedPrompt) {
$prompt.value = injectedPrompt + "\n\n";
}
$prompt.focus();
async function handleSubmit() {
const prompt = $prompt.value.trim();
if (!prompt) return;
// Add user message
const userMessage = createMessageElement(prompt, true);
$messages.appendChild(userMessage);
messages.push({ role: "user", content: prompt });
// Prepare for system response
const systemMessage = createMessageElement("", false);
$messages.appendChild(systemMessage);
let fullResponse = "";
// Clear input and scroll
$prompt.value = "";
scrollToBottom($messages);
// Update button state
$submit.textContent = "Cancel";
abortController = new AbortController();
const pickedModel = document.querySelector("[name=model]:checked")?.value ?? window.env.MODEL;
const chat = {
chatgpt: (params) => chatWithOpenAI({ ...params, apiKey: window.env.OPENAI_API_KEY, model: "gpt-4o" }),
claude: (params) => chatWithClaude({ ...params, apiKey: window.env.ANTHROPIC_API_KEY, model: "claude-3-5-sonnet-20241022" }),
"gpt-4o": (params) => chatWithOpenAI({ ...params, apiKey: window.env.OPENAI_API_KEY, model: "gpt-4o" }),
"gpt-4o-mini": (params) => chatWithOpenAI({ ...params, apiKey: window.env.OPENAI_API_KEY, model: "gpt-4o-mini" }),
"gemini-1.5-flash-8b-latest": (params) => chatWithGemini({ ...params, apiKey: window.env.GEMINI_API_KEY, model: "gemini-1.5-flash-8b-latest" }),
"gemini-2.0-flash-lite": (params) => chatWithGemini({ ...params, apiKey: window.env.GEMINI_API_KEY, model: "gemini-2.0-flash-lite" }),
"claude-3-5-sonnet-20241022": (params) => chatWithClaude({ ...params, apiKey: window.env.ANTHROPIC_API_KEY, model: "claude-3-5-sonnet-20241022" }),
"claude-3-7-sonnet-20250219": (params) => chatWithClaude({ ...params, apiKey: window.env.ANTHROPIC_API_KEY, model: "claude-3-7-sonnet-20250219" }),
}[pickedModel];
if (!chat) {
console.error(`Unknown model: ${window.env.MODEL}`);
return;
}
try {
await chat({
onContent: (content) => {
fullResponse += content;
updateMessageContent(systemMessage, fullResponse);
scrollToBottom($messages);
},
messages,
signal: abortController.signal,
});
if (fullResponse.trim() !== "") {
messages.push({ role: "assistant", content: fullResponse });
}
} catch (err) {
if (err.name === "AbortError") {
updateMessageContent(systemMessage, fullResponse + "\n[Cancelled]");
} else {
updateMessageContent(systemMessage, fullResponse + "\n[Error: " + err.message + "]");
}
} finally {
$submit.textContent = "Send";
abortController = null;
}
}
function createMessageElement(content, isUser) {
const $el = document.createElement("div");
$el.className = `message ${isUser ? "user-message" : "system-message"}`;
$el.innerHTML = marked.parse(content, { sanitize: true });
return $el;
}
function updateMessageContent($el, markdown) {
$el.innerHTML = marked.parse(markdown, { sanitize: true });
}
function scrollToBottom($el) {
$el.scrollTop = $el.scrollHeight;
}
async function chatWithOpenAI({ onContent, messages, signal, apiKey, model }) {
const payload = {
model,
messages,
stream: true,
};
const response = await fetch("https://api.openai.com/v1/chat/completions", {
method: "POST",
headers: {
"Content-Type": "application/json",
Authorization: `Bearer ${apiKey}`,
},
body: JSON.stringify(payload),
signal,
});
const reader = response.body.getReader();
const decoder = new TextDecoder();
while (true) {
const { done, value } = await reader.read();
if (done) break;
const text = decoder.decode(value, { stream: true });
const lines = text
.split("\n\n")
.filter((line) => !!line.trim())
.map((line) => line.replace("data: ", ""));
for (const line of lines) {
if (line === "[DONE]") return;
try {
const parsed = JSON.parse(line);
const content = parsed.choices[0].delta.content;
if (content) {
await onContent(content);
}
} catch (e) {}
}
}
}
async function chatWithClaude({ onContent, messages, signal, apiKey, model }) {
const payload = {
model,
messages,
stream: true,
max_tokens: 2048,
};
const systemPrompt = messages.find((message) => message.role === "system")?.content;
if (systemPrompt) {
payload.messages = messages.filter((message) => message.role !== "system");
payload.system = systemPrompt;
}
const response = await fetch("https://api.anthropic.com/v1/messages", {
method: "POST",
headers: {
"Content-Type": "application/json",
"x-api-key": apiKey,
"anthropic-version": "2023-06-01",
"anthropic-dangerous-direct-browser-access": "true",
},
body: JSON.stringify(payload),
signal,
});
const reader = response.body.getReader();
const decoder = new TextDecoder();
while (true) {
const { done, value } = await reader.read();
if (done) break;
const text = decoder.decode(value, { stream: true });
const lines = text
.split("\n")
.filter((line) => line.trim())
.map((line) => line.replace("data: ", ""));
for (const line of lines) {
if (line === "" || line === "[DONE]") continue;
try {
const parsed = JSON.parse(line);
if (parsed.type === "content_block_delta") {
const content = parsed.delta?.text;
if (content) {
await onContent(content);
}
}
} catch (e) {
console.error("Error parsing line:", e);
}
}
}
}
async function chatWithGemini({ onContent, messages, signal, apiKey, model = "gemini-1.5-flash-8b" }) {
const payload = {
contents: messages.map((msg) => ({
role: msg.role === "system" ? "user" : msg.role,
parts: [{ text: msg.content }],
})),
generationConfig: {
temperature: 0,
},
safetySettings: [
{ category: "HARM_CATEGORY_DANGEROUS_CONTENT", threshold: "BLOCK_NONE" },
{ category: "HARM_CATEGORY_HARASSMENT", threshold: "BLOCK_NONE" },
{ category: "HARM_CATEGORY_HATE_SPEECH", threshold: "BLOCK_NONE" },
{ category: "HARM_CATEGORY_SEXUALLY_EXPLICIT", threshold: "BLOCK_NONE" },
],
};
const response = await fetch(`https://generativelanguage.googleapis.com/v1beta/models/${model}:streamGenerateContent?alt=sse`, {
method: "POST",
headers: {
"Content-Type": "application/json",
"x-goog-api-key": apiKey,
},
body: JSON.stringify(payload),
signal,
});
const reader = response.body.getReader();
const decoder = new TextDecoder();
while (true) {
const { done, value } = await reader.read();
if (done) break;
const text = decoder.decode(value, { stream: true });
const lines = text
.split("\n")
.filter((line) => line.trim())
.map((line) => line.replace("data: ", ""));
for (const line of lines) {
if (line === "") continue;
try {
const parsed = JSON.parse(line);
if (parsed.candidates?.[0]?.content?.parts?.[0]?.text) {
const content = parsed.candidates[0].content.parts[0].text;
if (content) {
await onContent(content);
}
}
} catch (e) {
console.error("Error parsing line:", e);
}
}
}
}
</script>
</body>
</html>
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment