Compare commits

...

2 Commits

Author SHA1 Message Date
c4ch3c4d3 8626b3d1db Add terminal link to site header 2026-04-27 09:15:50 -06:00
c4ch3c4d3 fd77d6ee1e Polish lab link buttons 2026-04-27 09:11:45 -06:00
8 changed files with 144 additions and 18 deletions
+3 -3
View File
@@ -60,7 +60,7 @@ The projects original goal was to make LLaMA models accessible on systems wit
[HuggingFace](https://huggingface.com) is the “GitHub” for LLMs, datasets, and more. The following steps walk you through locating Metas **LLaMA3.21B** model card and its files.
1. **Open the LLaMA3.21B page**
<https://huggingface.co/meta-llama/Llama-3.2-1B>
<a class="lab-open-pill" href="https://huggingface.co/meta-llama/Llama-3.2-1B" target="_blank" rel="noreferrer">LLaMA-3.2-1B on Hugging Face</a>
<br>
2. **Read the model card** note the description, license, tags (e.g., _Text Generation_, _SafeTensors_, _PyTorch_), and links to finetunes/quantizations.
<br>
@@ -104,7 +104,7 @@ For this lab we will work with **WhiteRabbitNeoV37B**, a cybersecurityo
### 1. Locate & download the model
1. Go to <https://huggingface.co/WhiteRabbitNeo/WhiteRabbitNeo-V3-7B>.
1. Go to <a class="lab-open-pill" href="https://huggingface.co/WhiteRabbitNeo/WhiteRabbitNeo-V3-7B" target="_blank" rel="noreferrer">WhiteRabbitNeo-V3-7B on Hugging Face</a>.
2. Points of Interest on this modelcard:
1. This model appears to be a fine tune of **Qwen2.5-Coder-7B**
2. This model is openly licensed, and does have any requirements to download and use for our purposes.
@@ -237,7 +237,7 @@ Ollama is a lightweight framework that hides the lowlevel steps required by L
Lets start by downloading Meta's llama3.2-3b, the "big" brother to the small model we've continuously worked with so far. The Ollama project and community have made this exceptionally easy for us to accomplish.
1. **Open the Ollama registry** visit <https://ollama.com> in your browser.
1. **Open the Ollama registry** visit <a class="lab-open-pill" href="https://ollama.com" target="_blank" rel="noreferrer">Ollama registry</a> in your browser.
2. **Search for the model**
<figure style="text-align: center;">
+1 -1
View File
@@ -95,7 +95,7 @@ Locate, pull, and run **Qwen3.5 4B** using the **OpenWebUI**. By defualt, Ope
- Click the **copytoclipboard** icon next to the tag (or highlight the text and press **Ctrl+C**).
6. **Open the OpenWebUI interface**
- In a new browser tab, navigate to the URL where your OpenWebUI instance is running (e.g., `http://localhost:8080`).
- In a new browser tab, navigate to {{service-url:open-webui}}.
7. **Pull the model through the UI**
- In the **“Select a model”** dropdown, paste the copied tag into the text field.
+1 -1
View File
@@ -33,7 +33,7 @@ Before we install any harness, we need a key that lets the harness call the same
### Execute: Sign in to Open WebUI
1. Navigate to `{{service-url:open-webui}}`.
1. Navigate to {{service-url:open-webui}}.
2. Sign in with the same account you used in Lab 4, or the credentials supplied by your instructor.
3. Confirm that you can reach the normal chat screen before continuing.
+3
View File
@@ -1,6 +1,8 @@
import Image from "next/image";
import Link from "next/link";
import { TerminalNavLink } from "~/components/TerminalNavLink";
export function SiteHeader() {
return (
<header className="sticky top-0 z-20 border-b border-[#f8c27a] bg-white/95 shadow-sm backdrop-blur">
@@ -16,6 +18,7 @@ export function SiteHeader() {
<Link href="/labs" className="hover:text-[#F89C27]">
Labs
</Link>
<TerminalNavLink />
<Link
href="https://discord.gg/Ma9UZNBxvh"
className="rounded-md border border-[#F89C27] px-3 py-1.5 text-[#004E78] hover:bg-[#F89C27] hover:text-white"
+48
View File
@@ -0,0 +1,48 @@
import { render, screen, waitFor } from "@testing-library/react";
import { afterEach, describe, expect, it, vi } from "vitest";
import { TerminalNavLink } from "~/components/TerminalNavLink";
import {
COURSEWARE_RUNTIME_CONFIG_PATH,
LAB3_DEFAULT_TERMINAL_PATH,
} from "~/lib/courseware-runtime";
describe("TerminalNavLink", () => {
afterEach(() => {
vi.restoreAllMocks();
});
it("defaults to the same-origin WeTTY path", () => {
vi.spyOn(globalThis, "fetch").mockRejectedValue(new Error("not found"));
render(<TerminalNavLink />);
expect(screen.getByRole("link", { name: "Terminal" })).toHaveAttribute(
"href",
LAB3_DEFAULT_TERMINAL_PATH,
);
});
it("loads the terminal link from runtime config", async () => {
const fetchMock = vi.spyOn(globalThis, "fetch").mockResolvedValue(
new Response(
JSON.stringify({
lab3TerminalUrl: "http://127.0.0.1:7681/wetty",
}),
{ status: 200 },
),
);
render(<TerminalNavLink />);
await waitFor(() => {
expect(screen.getByRole("link", { name: "Terminal" })).toHaveAttribute(
"href",
"http://localhost:7681/wetty",
);
});
expect(fetchMock).toHaveBeenCalledWith(COURSEWARE_RUNTIME_CONFIG_PATH, {
cache: "no-store",
});
});
});
+41
View File
@@ -0,0 +1,41 @@
"use client";
import { useEffect, useState } from "react";
import {
LAB3_DEFAULT_TERMINAL_PATH,
fetchCoursewareRuntimeConfig,
} from "~/lib/courseware-runtime";
export function TerminalNavLink() {
const [terminalPath, setTerminalPath] = useState(LAB3_DEFAULT_TERMINAL_PATH);
useEffect(() => {
let isCancelled = false;
void fetchCoursewareRuntimeConfig()
.then((runtimeConfig) => {
if (isCancelled) return;
setTerminalPath(runtimeConfig.lab3TerminalUrl);
})
.catch(() => {
if (isCancelled) return;
setTerminalPath(LAB3_DEFAULT_TERMINAL_PATH);
});
return () => {
isCancelled = true;
};
}, []);
return (
<a
className="hover:text-[#F89C27]"
href={terminalPath}
rel="noreferrer"
target="_blank"
>
Terminal
</a>
);
}
+41 -10
View File
@@ -138,6 +138,38 @@ describe("LabContent", () => {
expect(link).toHaveClass("lab-service-pill");
});
it("renders Lab 3 browser targets as polished open buttons", async () => {
mockRuntimeConfig();
const lab = getLabDocument("lab-3-llama-cpp-and-ollama");
expect(lab).not.toBeNull();
render(
<LabContent
className="lab-content"
html={micromark(lab?.content ?? "", { allowDangerousHtml: true })}
/>,
);
const llamaLink = await screen.findByRole("link", {
name: "LLaMA-3.2-1B on Hugging Face",
});
expect(llamaLink).toHaveAttribute(
"href",
"https://huggingface.co/meta-llama/Llama-3.2-1B",
);
expect(llamaLink).toHaveClass("lab-open-pill");
expect(
screen.getByRole("link", {
name: "WhiteRabbitNeo-V3-7B on Hugging Face",
}),
).toHaveClass("lab-open-pill");
expect(
screen.getByRole("link", { name: "Ollama registry" }),
).toHaveClass("lab-open-pill");
});
it("keeps rendered service URL links after opening an image zoom modal", async () => {
mockRuntimeConfig();
@@ -238,16 +270,15 @@ describe("LabContent", () => {
/>,
);
expect(
await screen.findByRole("link", { name: "Open WebUI" }),
).toHaveAttribute("href", "https://lab.example/openwebui");
expect(screen.getByRole("link", { name: "Open WebUI" })).toHaveClass(
"lab-service-pill",
);
expect(screen.getByRole("link", { name: "Open WebUI" })).toHaveAttribute(
"title",
"https://lab.example/openwebui",
);
const openWebUiLinks = await screen.findAllByRole("link", {
name: "Open WebUI",
});
expect(openWebUiLinks).toHaveLength(2);
for (const link of openWebUiLinks) {
expect(link).toHaveAttribute("href", "https://lab.example/openwebui");
expect(link).toHaveClass("lab-service-pill");
expect(link).toHaveAttribute("title", "https://lab.example/openwebui");
}
const apiMatches = await screen.findAllByText(
"https://lab.example/openwebui/api",
);
+6 -3
View File
@@ -2051,7 +2051,8 @@ ol {
box-shadow: 0 12px 28px -22px rgba(15, 92, 139, 0.85);
}
.lab-content a.lab-service-pill {
.lab-content a.lab-service-pill,
.lab-content a.lab-open-pill {
display: inline-flex;
align-items: center;
gap: 0.45rem;
@@ -2072,7 +2073,8 @@ ol {
background-color 120ms ease;
}
.lab-content a.lab-service-pill::before {
.lab-content a.lab-service-pill::before,
.lab-content a.lab-open-pill::before {
content: "Open";
display: inline-flex;
align-items: center;
@@ -2086,7 +2088,8 @@ ol {
text-transform: uppercase;
}
.lab-content a.lab-service-pill:hover {
.lab-content a.lab-service-pill:hover,
.lab-content a.lab-open-pill:hover {
transform: translateY(-1px);
box-shadow: 0 12px 28px -22px rgba(15, 92, 139, 0.85);
}