Initial snapshot before transformerlab recovery

This commit is contained in:
Codex
2026-03-31 13:25:25 -06:00
commit d860318d43
49 changed files with 3864 additions and 0 deletions
+19
View File
@@ -0,0 +1,19 @@
- name: Clone ChunkViz
git:
repo: "https://github.com/gkamradt/ChunkViz"
dest: "{{ courseware_repos_dir }}/ChunkViz"
version: "{{ courseware_chunkviz_commit }}"
update: false
- name: Install ChunkViz dependencies
command: npm install
args:
chdir: "{{ courseware_repos_dir }}/ChunkViz"
creates: "{{ courseware_repos_dir }}/ChunkViz/node_modules"
- name: Build ChunkViz
command: npm run build
args:
chdir: "{{ courseware_repos_dir }}/ChunkViz"
creates: "{{ courseware_repos_dir }}/ChunkViz/build/index.html"
+21
View File
@@ -0,0 +1,21 @@
---
# Default variables for common role
common_packages_debian:
- python3
- python3-pip
- git
- curl
- wget
- build-essential
- cmake
- ninja-build
- libssl-dev
- pkg-config
common_packages_macos:
- python3
- git
- curl
- cmake
- ninja
+9
View File
@@ -0,0 +1,9 @@
#!/bin/sh
# Add local bin to PATH if not already added
case ":${PATH}:" in
*:"$HOME/.local/bin":*)
;;
*)
export PATH="$HOME/.local/bin:$PATH"
;;
esac
+10
View File
@@ -0,0 +1,10 @@
---
# Handlers for common role
- name: Update PATH in shell config
ansible.builtin.debug:
msg: "PATH will be updated in next shell session"
- name: Shell updated
ansible.builtin.debug:
msg: "Shell configuration updated. Please restart your shell for changes to take effect."
+92
View File
@@ -0,0 +1,92 @@
---
# Common setup tasks - runs on all platforms
- name: Ensure required system packages are installed (Debian/Ubuntu)
ansible.builtin.apt:
name:
- python3
- python3-pip
- git
- curl
- wget
- build-essential
- cmake
- ninja-build
- libssl-dev
- pkg-config
- zstd
state: present
update_cache: no
when: ansible_os_family == "Debian"
become: yes
- name: Ensure Homebrew is installed (macOS)
ansible.builtin.homebrew:
name:
- python3
- git
- curl
- cmake
- ninja
state: present
when: ansible_os_family == "Darwin"
- name: Install Python virtual environment module (user space)
ansible.builtin.pip:
name: virtualenv
state: present
executable: pip3
extra_args: "--break-system-packages"
become: no
when: ansible_os_family == "Debian"
- name: Create lab base directory structure
ansible.builtin.file:
path: "{{ item }}"
state: directory
mode: '0755'
loop:
- "{{ llmlab_base }}/lab1"
- "{{ llmlab_base }}/lab2"
- "{{ llmlab_base }}/lab3"
- "{{ llmlab_base }}/lab4"
- "{{ llmlab_base }}/lab5"
- "{{ llmlab_base }}/lab6"
- "{{ llmlab_base }}/.llmlab"
- "{{ llmlab_base }}/.llmlab/logs"
become: no
- name: Create .local/bin directory
ansible.builtin.file:
path: "{{ llmlab_base }}/.local/bin"
state: directory
mode: '0755'
become: no
- name: Copy common environment script
ansible.builtin.copy:
src: "{{ playbook_dir }}/roles/common/files/env"
dest: "{{ llmlab_base }}/.local/bin/env"
mode: '0755'
force: yes
notify: Update PATH in shell config
- name: Ensure .local/bin is in PATH
ansible.builtin.lineinfile:
path: "{{ llmlab_base }}/.bashrc"
line: 'export PATH="$HOME/.local/bin:$PATH"'
state: present
insertafter: EOF
notify: Shell updated
- name: Create logs directory
ansible.builtin.file:
path: "{{ llmlab_base }}/.llmlab/logs"
state: directory
mode: '0755'
- name: Create setup log file
ansible.builtin.file:
path: "{{ llmlab_base }}/.llmlab/logs/setup.log"
state: touch
mode: '0644'
+90
View File
@@ -0,0 +1,90 @@
- name: Create managed state directories
file:
path: "{{ item }}"
state: directory
mode: "0755"
loop:
- "{{ courseware_state_dir }}"
- "{{ courseware_markers_dir }}"
- "{{ courseware_logs_dir }}"
- "{{ courseware_run_dir }}"
- "{{ courseware_repos_dir }}"
- "{{ courseware_venvs_dir }}"
- "{{ courseware_models_dir }}"
- "{{ courseware_datasets_dir }}"
- "{{ courseware_tools_dir }}"
- "{{ courseware_apps_dir }}"
- "{{ courseware_downloads_dir }}"
- "{{ courseware_lab2_dir }}"
- "{{ courseware_transformerlab_home }}"
- "{{ courseware_unsloth_home }}"
- "{{ courseware_ollama_models_dir }}"
- name: Seed managed ownership markers
file:
path: "{{ item }}"
state: touch
mode: "0644"
loop:
- "{{ courseware_transformerlab_home }}/.courseware-managed"
- "{{ courseware_unsloth_home }}/.courseware-managed"
- name: Check existing TransformerLab path
stat:
path: "{{ ansible_env.HOME }}/.transformerlab"
follow: false
register: courseware_transformerlab_link
- name: Check existing TransformerLab ownership marker
stat:
path: "{{ ansible_env.HOME }}/.transformerlab/.courseware-managed"
register: courseware_transformerlab_marker
- name: Fail if TransformerLab path is already occupied
fail:
msg: "{{ ansible_env.HOME }}/.transformerlab already exists and is not managed by this project."
when:
- courseware_transformerlab_link.stat.exists
- >
(
(not courseware_transformerlab_link.stat.islnk) or
(courseware_transformerlab_link.stat.islnk and
courseware_transformerlab_link.stat.lnk_source != courseware_transformerlab_home)
) and
(not courseware_transformerlab_marker.stat.exists)
- name: Link TransformerLab home into project state
file:
src: "{{ courseware_transformerlab_home }}"
dest: "{{ ansible_env.HOME }}/.transformerlab"
state: link
force: true
- name: Check existing Unsloth path
stat:
path: "{{ ansible_env.HOME }}/.unsloth"
follow: false
register: courseware_unsloth_link
- name: Fail if Unsloth path is already occupied
fail:
msg: "{{ ansible_env.HOME }}/.unsloth already exists and is not managed by this project."
when:
- courseware_unsloth_link.stat.exists
- >
(not courseware_unsloth_link.stat.islnk) or
(courseware_unsloth_link.stat.islnk and
courseware_unsloth_link.stat.lnk_source != courseware_unsloth_home)
- name: Link Unsloth home into project state
file:
src: "{{ courseware_unsloth_home }}"
dest: "{{ ansible_env.HOME }}/.unsloth"
state: link
force: true
- name: Write runtime environment file
template:
src: "{{ playbook_dir }}/../templates/runtime.env.j2"
dest: "{{ courseware_state_dir }}/runtime.env"
mode: "0644"
+25
View File
@@ -0,0 +1,25 @@
- name: Download Kiln Linux archive
get_url:
url: "https://github.com/Kiln-AI/Kiln/releases/download/{{ courseware_kiln_release_tag }}/Kiln.Linux.x64.zip"
dest: "{{ courseware_downloads_dir }}/Kiln.Linux.x64.zip"
mode: "0644"
- name: Create Kiln Linux directory
file:
path: "{{ courseware_apps_dir }}/kiln"
state: directory
mode: "0755"
- name: Unpack Kiln Linux binary
unarchive:
src: "{{ courseware_downloads_dir }}/Kiln.Linux.x64.zip"
dest: "{{ courseware_apps_dir }}/kiln"
remote_src: true
creates: "{{ courseware_apps_dir }}/kiln/Kiln"
- name: Ensure Kiln Linux binary is executable
file:
path: "{{ courseware_apps_dir }}/kiln/Kiln"
mode: "0755"
state: file
+19
View File
@@ -0,0 +1,19 @@
- name: Download Kiln macOS disk image
get_url:
url: "https://github.com/Kiln-AI/Kiln/releases/download/{{ courseware_kiln_release_tag }}/Kiln.MacOS.AppleSilicon.M-Processor.dmg"
dest: "{{ courseware_downloads_dir }}/Kiln.MacOS.AppleSilicon.M-Processor.dmg"
mode: "0644"
- name: Install Kiln.app into project state
shell: |
set -euo pipefail
mount_point=$(mktemp -d /tmp/kiln.XXXXXX)
hdiutil attach "{{ courseware_downloads_dir }}/Kiln.MacOS.AppleSilicon.M-Processor.dmg" -mountpoint "$mount_point" -nobrowse -quiet
app_path=$(find "$mount_point" -maxdepth 1 -name '*.app' | head -n 1)
rm -rf "{{ courseware_apps_dir }}/Kiln.app"
cp -R "$app_path" "{{ courseware_apps_dir }}/Kiln.app"
hdiutil detach "$mount_point" -quiet
rmdir "$mount_point"
args:
executable: /bin/bash
creates: "{{ courseware_apps_dir }}/Kiln.app"
+8
View File
@@ -0,0 +1,8 @@
- name: Install Kiln on Linux
include_tasks: linux.yml
when: ansible_system == "Linux"
- name: Install Kiln on macOS
include_tasks: macos.yml
when: ansible_system == "Darwin"
@@ -0,0 +1,46 @@
#!/usr/bin/env bash
set -euo pipefail
# Wiki update script - runs as student user
# This script clones or updates the wiki repository
REPO_URL="${WIKI_REPO_URL:-https://git.zuccaro.me/bzuccaro/LLM-Labs.git}"
WIKI_DIR="${HOME}/wiki"
STUDENT_USER="${SUDO_USER:-student}"
run_as_student() {
sudo -u "$STUDENT_USER" -- "$@"
}
if [ -d "$WIKI_DIR/.git" ]; then
echo "Updating existing wiki..."
run_as_student git -C "$WIKI_DIR" pull --ff-only
else
echo "Cloning wiki repository..."
tmp_dir=$(mktemp /tmp/wiki.clone.XXXXXX)
chown "$STUDENT_USER:$STUDENT_USER" "$tmp_dir"
rmdir "$tmp_dir"
run_as_student git clone "$REPO_URL" "$tmp_dir"
# Preserve node_modules and .next if they exist
if [ -d "$WIKI_DIR/node_modules" ] && [ ! -e "$tmp_dir/node_modules" ]; then
mv "$WIKI_DIR/node_modules" "$tmp_dir/node_modules"
fi
if [ -d "$WIKI_DIR/.next" ] && [ ! -e "$tmp_dir/.next" ]; then
mv "$WIKI_DIR/.next" "$tmp_dir/.next"
fi
rm -rf "$WIKI_DIR"
mv "$tmp_dir" "$WIKI_DIR"
chown -R "$STUDENT_USER:$STUDENT_USER" "$WIKI_DIR"
fi
# Install dependencies if needed
if [ ! -d "$WIKI_DIR/node_modules" ]; then
echo "Installing wiki dependencies..."
run_as_student bash -lc "cd '$WIKI_DIR' && npm install --no-fund --no-audit"
fi
echo "Wiki updated successfully!"
+135
View File
@@ -0,0 +1,135 @@
---
# Lab start scripts setup
- name: Create lab1 start script (Transformer Lab)
ansible.builtin.copy:
dest: "{{ llmlab_base }}/lab1/start.sh"
content: |
#!/bin/bash
set -e
export NUMPY_DISABLE_OPTIMIZATION_CHECK=1
source "{{ llmlab_base }}/.transformerlab/miniforge3/etc/profile.d/conda.sh"
conda activate transformerlab
cd "{{ llmlab_base }}/.transformerlab/src"
./run.sh
mode: '0755'
force: no
- name: Create lab2 start script (Ollama)
ansible.builtin.copy:
dest: "{{ llmlab_base }}/lab2/start.sh"
content: |
#!/bin/bash
set -e
echo "Starting Ollama..."
# Check if already running
if pgrep -f "ollama serve" > /dev/null; then
echo "Ollama is already running."
exit 0
fi
# Start Ollama
nohup ollama serve > {{ llmlab_base }}/.llmlab/logs/ollama.log 2>&1 &
echo "Ollama started (PID: $!)"
echo "Ollama is available at http://localhost:11434"
mode: '0755'
force: no
- name: Create lab3 start script (Open WebUI)
ansible.builtin.copy:
dest: "{{ llmlab_base }}/lab3/start.sh"
content: |
#!/bin/bash
set -e
export OPEN_WEBUI_PORT=8080
export OPEN_WEBUI_HOST=0.0.0.0
# Check if already running
if pgrep -f "open-webui serve" > /dev/null; then
echo "Open WebUI is already running."
exit 0
fi
# Start Open WebUI
nohup open-webui serve \
--port ${OPEN_WEBUI_PORT} \
--host ${OPEN_WEBUI_HOST} \
> {{ llmlab_base }}/.llmlab/logs/open-webui.log 2>&1 &
echo "Open WebUI started on http://${OPEN_WEBUI_HOST}:${OPEN_WEBUI_PORT}"
echo "PID: $!"
mode: '0755'
force: no
- name: Create lab4 start script (ChunkViz)
ansible.builtin.copy:
dest: "{{ llmlab_base }}/lab4/start.sh"
content: |
#!/bin/bash
set -e
CHUNKVIZ_PORT=${PORT:-3001}
# Start ChunkViz in background
cd "{{ llmlab_base }}/lab4/ChunkViz"
nohup npm start > {{ llmlab_base }}/.llmlab/logs/chunkviz.log 2>&1 &
CHUNKVIZ_PID=$!
echo "ChunkViz started on http://0.0.0.0:${CHUNKVIZ_PORT}"
echo "PID: ${CHUNKVIZ_PID}"
mode: '0755'
force: no
- name: Create lab5 start script (Promptfoo)
ansible.builtin.copy:
dest: "{{ llmlab_base }}/lab6/start.sh"
content: |
#!/bin/bash
set -e
cd "{{ llmlab_base }}/lab6"
# Run Promptfoo evaluation
npx promptfoo eval -c promptfoo.yaml
mode: '0755'
force: no
- name: Create lab stop scripts
ansible.builtin.copy:
dest: "{{ llmlab_base }}/lab{{ item }}/stop.sh"
content: |
#!/bin/bash
set -e
echo "Stopping Lab {{ item }}..."
case "{{ item }}" in
1)
pkill -f "transformerlab.*run.sh" 2>/dev/null || true
;;
2)
pkill -f "ollama serve" 2>/dev/null || true
;;
3)
pkill -f "open-webui" 2>/dev/null || true
;;
4)
pkill -f "ChunkViz" 2>/dev/null || true
;;
5)
pkill -f "promptfoo" 2>/dev/null || true
;;
esac
echo "Lab {{ item }} stopped."
mode: '0755'
loop: [1, 2, 3, 4, 5]
- name: Display lab scripts creation
ansible.builtin.debug:
msg: "All lab start/stop scripts created in {{ llmlab_base }}/"
+44
View File
@@ -0,0 +1,44 @@
- name: Set lab asset source paths
set_fact:
courseware_lab2_asset_src: "{{ playbook_dir }}/../../assets/lab2/wiki.test.raw"
courseware_lab4_asset_src: "{{ playbook_dir }}/../../assets/lab4/ttps_dataset.parquet"
- name: Check lab 2 asset presence in repo
stat:
path: "{{ courseware_lab2_asset_src }}"
register: courseware_lab2_asset
- name: Check lab 4 asset presence in repo
stat:
path: "{{ courseware_lab4_asset_src }}"
register: courseware_lab4_asset
- name: Fail if required lab assets are missing from this checkout
fail:
msg: >-
Required lab assets were not found in this repo checkout.
Expected:
{{ courseware_lab2_asset_src }}
and
{{ courseware_lab4_asset_src }}.
Make sure the full project was copied, including the assets/ directory.
when:
- not courseware_lab2_asset.stat.exists or not courseware_lab4_asset.stat.exists
- name: Copy lab 2 wiki test corpus
copy:
src: "{{ courseware_lab2_asset_src }}"
dest: "{{ courseware_datasets_dir }}/wiki.test.raw"
mode: "0644"
- name: Render lab 2 WhiteRabbitNeo download helper
template:
src: "{{ playbook_dir }}/../templates/download_whiterabbitneo-gguf.sh.j2"
dest: "{{ courseware_lab2_dir }}/download_whiterabbitneo-gguf.sh"
mode: "0755"
- name: Copy lab 4 parquet dataset
copy:
src: "{{ courseware_lab4_asset_src }}"
dest: "{{ courseware_datasets_dir }}/ttps_dataset.parquet"
mode: "0644"
+49
View File
@@ -0,0 +1,49 @@
---
# Linux GPU detection and validation tasks
- name: Check for NVIDIA GPU
ansible.builtin.command: nvidia-smi
register: nvidia_smi_output
changed_when: false
failed_when: false
- name: Display NVIDIA GPU information
ansible.builtin.debug:
msg: "NVIDIA GPU detected: {{ nvidia_smi_output.stdout_lines | join(', ') }}"
when: nvidia_smi_output.rc == 0
- name: Validate NVIDIA VRAM
ansible.builtin.set_fact:
gpu_valid: true
gpu_vram_gb: "{{ (nvidia_smi_output.stdout | regex_findall('(\\d+)MiB')) | first | default(0) | int / 1024 | int }}"
when: nvidia_smi_output.rc == 0
- name: Check VRAM requirement (8GB minimum)
ansible.builtin.debug:
msg: "GPU VRAM: {{ gpu_vram_gb | default(0) }}GB - Requirement met: {{ gpu_vram_gb | default(0) >= 8 }}"
when: gpu_valid is defined
- name: Warn about insufficient VRAM
ansible.builtin.debug:
msg: "WARNING: NVIDIA GPU has less than 8GB VRAM. Some labs may not function correctly."
when: gpu_vram_gb is defined and gpu_vram_gb < 8
- name: Check for AMD GPU (ROCm)
ansible.builtin.command: rocminfo
register: rocm_output
changed_when: false
failed_when: false
when: nvidia_smi_output.rc != 0
- name: Display AMD GPU information
ansible.builtin.debug:
msg: "AMD GPU detected"
when: rocm_output.rc == 0
- name: Set GPU type fact
ansible.builtin.set_fact:
gpu_type: "{{ 'nvidia' if nvidia_smi_output.rc == 0 else ('amd' if rocm_output.rc == 0 else 'none') }}"
- name: Display GPU summary
ansible.builtin.debug:
msg: "GPU Type: {{ gpu_type | default('none') }}"
+172
View File
@@ -0,0 +1,172 @@
---
# llama.cpp installation and build
- name: Check if running on WSL
ansible.builtin.command: grep -qi microsoft /proc/version
register: wsl_check
changed_when: false
failed_when: false
- name: Set WSL fact
ansible.builtin.set_fact:
is_wsl: "{{ wsl_check.rc == 0 }}"
- name: Detect GPU on Linux/WSL
ansible.builtin.command: nvidia-smi
register: nvidia_smi_output
changed_when: false
failed_when: false
when: ansible_os_family == "Debian" or is_wsl | default(false)
- name: Set GPU type for WSL/Linux
ansible.builtin.set_fact:
gpu_type: "{{ 'nvidia' if nvidia_smi_output.rc == 0 else 'none' }}"
when: is_wsl | default(false) or ansible_os_family == "Debian"
- name: Check for Metal GPU on macOS
ansible.builtin.command: system_profiler SPDisplaysDataType
register: metal_check
changed_when: false
failed_when: false
when: ansible_os_family == "Darwin"
- name: Set GPU type for macOS
ansible.builtin.set_fact:
gpu_type: "metal"
when: ansible_os_family == "Darwin" and metal_check.rc == 0
- name: Display detected GPU type
ansible.builtin.debug:
msg: "llama.cpp GPU type: {{ gpu_type | default('none') }}"
- name: Check if llama.cpp already exists
ansible.builtin.stat:
path: "{{ llmlab_base }}/lab2/llama.cpp"
register: llama_cpp_stat
- name: Check existing build config
ansible.builtin.command:
cmd: grep -q "^GGML_CUDA:BOOL=ON" "{{ llmlab_base }}/lab2/llama.cpp/build/CMakeCache.txt" 2>/dev/null && echo "cuda" || echo "none"
register: existing_gpu_check
changed_when: false
failed_when: false
when: llama_cpp_stat.stat.exists
- name: Determine if rebuild needed
ansible.builtin.set_fact:
needs_rebuild: >-
{{
not llama_cpp_stat.stat.exists or
(gpu_type == 'nvidia' and existing_gpu_check.stdout != 'cuda') or
(gpu_type == 'metal' and existing_gpu_check.stdout != 'metal') or
(gpu_type == 'amd' and existing_gpu_check.stdout != 'amd')
}}
- name: Clean build directory for rebuild
ansible.builtin.file:
path: "{{ llmlab_base }}/lab2/llama.cpp/build"
state: absent
become: no
when: needs_rebuild | default(false)
- name: Clone llama.cpp repository
ansible.builtin.git:
repo: https://github.com/ggerganov/llama.cpp
dest: "{{ llmlab_base }}/lab2/llama.cpp"
version: master
update: no
become: no
when: not llama_cpp_stat.stat.exists
- name: Create build directory
ansible.builtin.file:
path: "{{ llmlab_base }}/lab2/llama.cpp/build"
state: directory
mode: '0755'
become: no
- name: Configure llama.cpp only if not already configured
ansible.builtin.command:
cmd: test -f CMakeCache.txt
args:
chdir: "{{ llmlab_base }}/lab2/llama.cpp/build"
register: cmake_configured
changed_when: false
failed_when: false
- name: Configure llama.cpp with CUDA (NVIDIA GPU)
ansible.builtin.command:
argv:
- cmake
- ..
- -G Ninja
- -DCMAKE_BUILD_TYPE=Release
- -DGGML_CUDA=on
args:
chdir: "{{ llmlab_base }}/lab2/llama.cpp/build"
when: gpu_type == 'nvidia' and cmake_configured.rc != 0
become: no
- name: Configure llama.cpp for AMD (ROCm)
ansible.builtin.command:
argv:
- cmake
- ..
- -G Ninja
- -DCMAKE_BUILD_TYPE=Release
- -DGGML_ROCM=on
args:
chdir: "{{ llmlab_base }}/lab2/llama.cpp/build"
when: gpu_type == 'amd' and cmake_configured.rc != 0
become: no
- name: Configure llama.cpp for Metal (macOS)
ansible.builtin.command:
argv:
- cmake
- ..
- -G Ninja
- -DCMAKE_BUILD_TYPE=Release
- -DGGML_METAL=on
args:
chdir: "{{ llmlab_base }}/lab2/llama.cpp/build"
when: gpu_type == 'metal' and cmake_configured.rc != 0
become: no
- name: Configure llama.cpp for CPU only
ansible.builtin.command:
argv:
- cmake
- ..
- -G Ninja
- -DCMAKE_BUILD_TYPE=Release
args:
chdir: "{{ llmlab_base }}/lab2/llama.cpp/build"
when: gpu_type | default('none') == 'none' and cmake_configured.rc != 0
become: no
- name: Build llama.cpp
ansible.builtin.command:
argv:
- ninja
args:
chdir: "{{ llmlab_base }}/lab2/llama.cpp/build"
become: no
register: build_output
- name: Display build output
ansible.builtin.debug:
msg: "{{ build_output.stdout_lines[-10:] }}"
when: build_output.stdout_lines is defined
- name: Add llama.cpp to user PATH
ansible.builtin.lineinfile:
path: "{{ llmlab_base }}/.bashrc"
line: 'export PATH="$HOME/lab2/llama.cpp/build/bin:$PATH"'
state: present
insertafter: EOF
notify: Shell updated
- name: Display llama.cpp installation
ansible.builtin.debug:
msg: "llama.cpp installed to {{ llmlab_base }}/lab2/llama.cpp"
+106
View File
@@ -0,0 +1,106 @@
- name: Clone llama.cpp
git:
repo: "https://github.com/ggml-org/llama.cpp.git"
dest: "{{ courseware_repos_dir }}/llama.cpp"
version: "{{ courseware_llama_cpp_commit }}"
update: false
- name: Check for CUDA compiler on Linux
command: which nvcc
register: courseware_llama_nvcc
changed_when: false
failed_when: false
when: ansible_system == "Linux"
- name: Check for CUDA runtime header on Linux
stat:
path: "{{ item }}"
loop:
- /usr/local/cuda/include/cuda_runtime.h
- /usr/include/cuda_runtime.h
register: courseware_llama_cuda_headers
when: ansible_system == "Linux"
- name: Fail early when CUDA toolkit is missing on Linux/WSL
fail:
msg: |
CUDA Toolkit is not installed inside this Linux environment.
`nvidia-smi` only proves that the NVIDIA driver is visible. It does not provide the Linux-side CUDA development toolkit needed to build CUDA-enabled llama.cpp.
If you are using WSL, this is the common split:
- Windows side: NVIDIA driver exposes the GPU to WSL
- Linux side: CUDA toolkit still must exist inside the distro
Fix it, then rerun:
bash deploy-courseware.sh
First try:
sudo apt update
sudo apt install -y nvidia-cuda-toolkit
If that package is unavailable in your distro:
1. add NVIDIA's CUDA apt repository for your Debian/Ubuntu release
2. install the CUDA toolkit from that repository
Verify with:
nvcc --version
ls /usr/local/cuda/include/cuda_runtime.h
when:
- ansible_system == "Linux"
- courseware_llama_nvcc.rc != 0 or (courseware_llama_cuda_headers.results | selectattr('stat.exists', 'equalto', true) | list | length == 0)
- name: Set llama.cpp backend flag
set_fact:
courseware_llama_backend_flag: "{{ '-DGGML_METAL=ON' if ansible_system == 'Darwin' else '-DGGML_CUDA=ON' }}"
- name: Configure llama.cpp
command:
argv:
- cmake
- -S
- "{{ courseware_repos_dir }}/llama.cpp"
- -B
- "{{ courseware_repos_dir }}/llama.cpp/build"
- -DCMAKE_BUILD_TYPE=Release
- "{{ courseware_llama_backend_flag }}"
args:
creates: "{{ courseware_repos_dir }}/llama.cpp/build/CMakeCache.txt"
- name: Build llama.cpp tools
command:
argv:
- cmake
- --build
- "{{ courseware_repos_dir }}/llama.cpp/build"
- --target
- llama-cli
- llama-quantize
- llama-perplexity
- llama-server
- -j
- name: Check system PATH slots for llama.cpp tools
stat:
path: "/usr/local/bin/{{ item }}"
follow: false
loop:
- llama-cli
- llama-quantize
- llama-perplexity
- llama-server
register: courseware_llama_path_slots
when: ansible_system == "Linux"
- name: Link llama.cpp tools into /usr/local/bin
become: true
file:
src: "{{ courseware_llama_cpp_bin_dir }}/{{ item.item }}"
dest: "/usr/local/bin/{{ item.item }}"
state: link
force: true
loop: "{{ courseware_llama_path_slots.results | default([]) }}"
when:
- ansible_system == "Linux"
- not item.stat.exists or item.stat.islnk
- not item.stat.exists or item.stat.lnk_source == (courseware_llama_cpp_bin_dir ~ '/' ~ item.item)
+74
View File
@@ -0,0 +1,74 @@
---
# LLaMA Factory installation and setup
- name: Create LLaMA Factory directory
ansible.builtin.file:
path: "{{ llmlab_base }}/lab5/LLaMA-Factory"
state: directory
mode: '0755'
- name: Check if LLaMA Factory already cloned
ansible.builtin.stat:
path: "{{ llmlab_base }}/lab5/LLaMA-Factory/.git"
register: llm_factory_git_check
- name: Clone LLaMA Factory repository
ansible.builtin.git:
repo: https://github.com/hiyouga/LLaMA-Factory.git
dest: "{{ llmlab_base }}/lab5/LLaMA-Factory"
version: main
update: no
become: no
when: not llm_factory_git_check.stat.exists
- name: Create LLaMA Factory virtual environment
ansible.builtin.command:
cmd: python3 -m venv "{{ llmlab_base }}/lab5/LLaMA-Factory/.venv"
args:
creates: "{{ llmlab_base }}/lab5/LLaMA-Factory/.venv/bin/activate"
become: no
- name: Install pip in virtual environment
ansible.builtin.shell: |
#!/bin/bash
source "{{ llmlab_base }}/lab5/LLaMA-Factory/.venv/bin/activate"
pip install --upgrade pip
args:
chdir: "{{ llmlab_base }}/lab5/LLaMA-Factory"
executable: /bin/bash
become: no
register: pip_install_result
changed_when: pip_install_result.rc == 0
- name: Install LLaMA Factory with GPU support
ansible.builtin.shell: |
#!/bin/bash
source "{{ llmlab_base }}/lab5/LLaMA-Factory/.venv/bin/activate"
pip install --break-system-packages -e ".[torch,metrics]"
args:
chdir: "{{ llmlab_base }}/lab5/LLaMA-Factory"
executable: /bin/bash
become: no
register: install_result
failed_when: install_result.rc != 0
- name: Create LLaMA Factory start script
ansible.builtin.copy:
dest: "{{ llmlab_base }}/lab5/start.sh"
content: |
#!/bin/bash
set -e
# Activate virtual environment
source "{{ llmlab_base }}/lab5/LLaMA-Factory/.venv/bin/activate"
# Navigate to LLaMA-Factory directory
cd "{{ llmlab_base }}/lab5/LLaMA-Factory"
# Launch LLaMA Board web interface
llamafactory-cli webui
mode: '0755'
- name: Display LLaMA Factory installation
ansible.builtin.debug:
msg: "LLaMA Factory installed to {{ llmlab_base }}/lab5/LLaMA-Factory"
+24
View File
@@ -0,0 +1,24 @@
- name: Create local Node runtime directory
file:
path: "{{ courseware_node_runtime_dir }}"
state: directory
mode: "0755"
- name: Install contained Node runtime for web tooling
command:
argv:
- npm
- install
- "node@{{ courseware_node_runtime_version }}"
args:
chdir: "{{ courseware_node_runtime_dir }}"
creates: "{{ courseware_node_runtime_bin_dir }}/node"
- name: Allow contained Node runtime to bind low ports on Linux
become: true
command:
argv:
- setcap
- cap_net_bind_service=+ep
- "{{ courseware_node_runtime_bin_dir }}/node"
when: ansible_system == "Linux"
+5
View File
@@ -0,0 +1,5 @@
---
# Default variables for Ollama role
ollama_port: 11434
ollama_host: "0.0.0.0"
+9
View File
@@ -0,0 +1,9 @@
---
# Handlers for Ollama role
- name: Start Ollama service
ansible.builtin.systemd:
name: ollama
state: started
enabled: yes
become: yes
+67
View File
@@ -0,0 +1,67 @@
---
# Ollama installation and setup
- name: Check if Ollama is already installed
ansible.builtin.command: ollama --version
register: ollama_version_check
changed_when: false
failed_when: false
- name: Install Ollama (Linux)
ansible.builtin.shell: |
curl -fsSL https://ollama.com/install.sh | sh
when:
- ansible_os_family == "Debian"
- ollama_version_check.rc != 0
become: yes
notify: Start Ollama service
- name: Install Ollama (macOS via Homebrew)
ansible.builtin.homebrew:
name: ollama
state: present
when:
- ansible_os_family == "Darwin"
- ollama_version_check.rc != 0
- name: Check if Ollama service exists
ansible.builtin.command: systemctl list-unit-files ollama.service
register: ollama_service_check
changed_when: false
failed_when: false
when: ansible_os_family == "Debian"
- name: Ensure Ollama service is running (Linux)
ansible.builtin.systemd:
name: ollama
state: started
enabled: yes
become: yes
when:
- ansible_os_family == "Debian"
- ollama_version_check.rc == 0
- ollama_service_check.stdout is defined
- "'ollama.service' in ollama_service_check.stdout"
ignore_errors: yes
- name: Start Ollama manually if no systemd service
ansible.builtin.shell: |
pkill -f "ollama serve" 2>/dev/null || true
nohup ollama serve > {{ llmlab_base }}/.llmlab/logs/ollama.log 2>&1 &
when:
- ansible_os_family == "Debian"
- ollama_version_check.rc == 0
- (ollama_service_check.stdout is not defined or "'ollama.service' not in ollama_service_check.stdout")
become: no
ignore_errors: yes
- name: Wait for Ollama to be ready
ansible.builtin.wait_for:
port: 11434
delay: 5
timeout: 60
when: ollama_version_check.rc == 0
- name: Display Ollama version
ansible.builtin.debug:
msg: "Ollama installed: {{ ollama_version_check.stdout }}"
@@ -0,0 +1,30 @@
- name: Start Ollama before model pulls
command:
argv:
- "{{ courseware_root }}/scripts/service_manager.sh"
- start
- ollama
changed_when: false
- name: Pull core Ollama models
command:
argv:
- "{{ courseware_ollama_bin }}"
- pull
- "{{ item }}"
environment:
OLLAMA_HOST: "{{ courseware_bind_host }}:{{ courseware_ports.ollama }}"
OLLAMA_MODELS: "{{ courseware_ollama_models_dir }}"
loop: "{{ courseware_ollama_models }}"
- name: Pull optional heavy Ollama models
command:
argv:
- "{{ courseware_ollama_bin }}"
- pull
- "{{ item }}"
environment:
OLLAMA_HOST: "{{ courseware_bind_host }}:{{ courseware_ports.ollama }}"
OLLAMA_MODELS: "{{ courseware_ollama_models_dir }}"
loop: "{{ courseware_optional_ollama_models }}"
when: courseware_install_optional_heavy_models | bool
+53
View File
@@ -0,0 +1,53 @@
---
# Open WebUI installation and setup
# Using Docker installation as it's more compatible and the recommended method
- name: Check if Docker is installed
ansible.builtin.command: docker --version
register: docker_check
changed_when: false
failed_when: false
- name: Display Open WebUI installation method
ansible.builtin.debug:
msg: "Open WebUI will be installed via Docker. Run: docker run -d -p 3080:8080 -v open-webui:/app/backend/data --name open-webui --restart always ghcr.io/open-webui/open-webui:main"
when: docker_check.rc == 0
- name: Skip Open WebUI pip installation (use Docker instead)
ansible.builtin.debug:
msg: "Skipping pip installation of Open WebUI due to Python version incompatibility. Use Docker instead."
when: docker_check.rc != 0
- name: Create Open WebUI start script (Docker)
ansible.builtin.copy:
dest: "{{ llmlab_base }}/lab3/start.sh"
content: |
#!/bin/bash
set -e
# Check if Docker is running
if ! command -v docker &> /dev/null; then
echo "Error: Docker is not installed. Please install Docker first."
exit 1
fi
# Check if container is already running
if docker ps | grep -q open-webui; then
echo "Open WebUI is already running."
exit 0
fi
# Start Open WebUI container
docker run -d \
-p 3080:8080 \
-v open-webui:/app/backend/data \
--name open-webui \
--restart always \
ghcr.io/open-webui/open-webui:main
echo "Open WebUI started on http://localhost:3080"
mode: '0755'
- name: Display Open WebUI installation
ansible.builtin.debug:
msg: "Open WebUI installed via Docker. Access at http://localhost:3080"
+69
View File
@@ -0,0 +1,69 @@
- name: Create Open WebUI data directory
file:
path: "{{ courseware_state_dir }}/open-webui"
state: directory
mode: "0755"
- name: Create Open WebUI virtual environment
command:
argv:
- "{{ courseware_python_bin }}"
- -m
- venv
- "{{ courseware_venvs_dir }}/open-webui"
args:
creates: "{{ courseware_venvs_dir }}/open-webui/bin/python"
- name: Upgrade Open WebUI venv tooling
command:
argv:
- "{{ courseware_venvs_dir }}/open-webui/bin/python"
- -m
- pip
- install
- --upgrade
- pip
- setuptools
- wheel
- name: Install Open WebUI
command:
argv:
- "{{ courseware_venvs_dir }}/open-webui/bin/python"
- -m
- pip
- install
- "{{ courseware_open_webui_spec }}"
- "numpy<2"
- name: Create Embedding Atlas virtual environment
command:
argv:
- "{{ courseware_python_bin }}"
- -m
- venv
- "{{ courseware_venvs_dir }}/embedding-atlas"
args:
creates: "{{ courseware_venvs_dir }}/embedding-atlas/bin/python"
- name: Upgrade Embedding Atlas venv tooling
command:
argv:
- "{{ courseware_venvs_dir }}/embedding-atlas/bin/python"
- -m
- pip
- install
- --upgrade
- pip
- setuptools
- wheel
- name: Install Embedding Atlas
command:
argv:
- "{{ courseware_venvs_dir }}/embedding-atlas/bin/python"
- -m
- pip
- install
- "{{ courseware_embedding_atlas_spec }}"
- "numpy<2"
+155
View File
@@ -0,0 +1,155 @@
- name: Install Debian/Ubuntu prerequisites
become: true
apt:
name:
- build-essential
- ca-certificates
- cmake
- curl
- git
- git-lfs
- libcap2-bin
- libcurl4-openssl-dev
- nodejs
- npm
- pkg-config
- python3
- python3-pip
- python3-venv
- unzip
- zstd
state: present
update_cache: true
- name: Query CUDA toolkit apt candidate
command: apt-cache policy nvidia-cuda-toolkit
register: courseware_cuda_toolkit_policy
changed_when: false
failed_when: false
- name: Check for nvcc
command: which nvcc
register: courseware_nvcc_check
changed_when: false
failed_when: false
- name: Set CUDA toolkit package availability
set_fact:
courseware_cuda_toolkit_package_available: >-
{{
courseware_cuda_toolkit_policy.rc == 0
and 'Candidate: (none)' not in courseware_cuda_toolkit_policy.stdout
}}
- name: Install distro CUDA toolkit when available
block:
- name: Install nvidia-cuda-toolkit
become: true
apt:
name: nvidia-cuda-toolkit
state: present
rescue:
- name: Fail with CUDA toolkit guidance after apt install error
fail:
msg: |
CUDA Toolkit could not be installed from the distro package manager.
This installer needs the Linux-side CUDA toolkit for llama.cpp, not just a working `nvidia-smi`.
Try this first:
sudo apt update
sudo apt install -y nvidia-cuda-toolkit
If that still fails, add NVIDIA's CUDA repository for your Debian/Ubuntu release and install the toolkit from there.
Verify with:
nvcc --version
ls /usr/local/cuda/include/cuda_runtime.h
when:
- not courseware_is_wsl
- courseware_cuda_toolkit_package_available
- courseware_nvcc_check.rc != 0
- name: Fail with CUDA toolkit guidance when no apt candidate exists
fail:
msg: |
CUDA Toolkit is not available from this distro's current apt sources.
This installer needs the Linux-side CUDA toolkit for llama.cpp, not just a working `nvidia-smi`.
On WSL this usually means:
- Windows side: the NVIDIA driver is installed correctly
- Linux side: the CUDA toolkit repository is still missing
Add NVIDIA's CUDA repository for your Debian/Ubuntu release, install the toolkit, then rerun:
bash deploy-courseware.sh
Verify with:
nvcc --version
ls /usr/local/cuda/include/cuda_runtime.h
when:
- not courseware_is_wsl
- not courseware_cuda_toolkit_package_available
- courseware_nvcc_check.rc != 0
- name: Check for Ollama binary
command: which ollama
register: courseware_ollama_check
changed_when: false
failed_when: false
- name: Install Ollama
become: true
shell: curl -fsSL https://ollama.com/install.sh | sh
args:
creates: /usr/local/bin/ollama
when: courseware_ollama_check.rc != 0
- name: Mark Ollama as installed by courseware
file:
path: "{{ courseware_ollama_install_marker }}"
state: touch
mode: "0644"
when: courseware_ollama_check.rc != 0
- name: Check for courseware-managed Ollama install marker
stat:
path: "{{ courseware_ollama_install_marker }}"
register: courseware_ollama_install_marker_before
- name: Adopt existing local Ollama install into courseware management
file:
path: "{{ courseware_ollama_install_marker }}"
state: touch
mode: "0644"
when:
- not courseware_ollama_install_marker_before.stat.exists
- courseware_ollama_check.rc == 0
- ansible_system == "Linux"
- ansible_service_mgr == "systemd"
- courseware_is_wsl | bool
- ansible_user_id != "ollama"
- ansible_env.HOME is search('/home/')
- ansible_env.HOME != '/usr/share/ollama'
- ansible_env.HOME != '/var/lib/ollama'
- name: Refresh courseware-managed Ollama install marker
stat:
path: "{{ courseware_ollama_install_marker }}"
register: courseware_ollama_install_marker_stat
- name: Check for Ollama systemd unit
stat:
path: /etc/systemd/system/ollama.service
register: courseware_ollama_systemd_unit
- name: Stop and disable courseware-managed Ollama systemd service
become: true
systemd:
name: ollama
state: stopped
enabled: false
when:
- ansible_service_mgr == "systemd"
- courseware_ollama_install_marker_stat.stat.exists
- courseware_ollama_systemd_unit.stat.exists
+29
View File
@@ -0,0 +1,29 @@
- name: Check installed Homebrew formulas
command: "brew list --versions {{ item }}"
loop:
- git
- git-lfs
- cmake
- node
- python@3.11
- ollama
register: courseware_brew_checks
changed_when: false
failed_when: false
- name: Install missing Homebrew formulas
command: "brew install {{ item.item }}"
loop: "{{ courseware_brew_checks.results }}"
when: item.rc != 0
- name: Mark Ollama as installed by courseware on macOS
file:
path: "{{ courseware_ollama_install_marker }}"
state: touch
mode: "0644"
when:
- courseware_brew_checks.results
| selectattr('item', 'equalto', 'ollama')
| selectattr('rc', 'ne', 0)
| list
| length > 0
+8
View File
@@ -0,0 +1,8 @@
- name: Install macOS prerequisites
include_tasks: macos.yml
when: ansible_system == "Darwin"
- name: Install Linux prerequisites
include_tasks: linux.yml
when: ansible_system == "Linux"
+329
View File
@@ -0,0 +1,329 @@
- name: Detect WSL
set_fact:
courseware_is_wsl: "{{ 'microsoft' in ansible_kernel | lower or 'wsl' in ansible_kernel | lower }}"
- name: Fail on unsupported operating systems
fail:
msg: "Supported platforms are Apple Silicon macOS and Debian-family Linux/WSL."
when: ansible_system not in ["Darwin", "Linux"]
- name: Fail on unsupported macOS architecture
fail:
msg: "This installer supports Apple Silicon Macs only."
when:
- ansible_system == "Darwin"
- ansible_architecture not in ["arm64", "aarch64"]
- name: Fail on undersized macOS systems
fail:
msg: "This courseware assumes a modern Apple Silicon Mac with at least 16 GB of unified memory."
when:
- ansible_system == "Darwin"
- (ansible_memtotal_mb | int) < 16000
- name: Check for Xcode command line tools
command: xcode-select -p
register: courseware_xcode_select
changed_when: false
when: ansible_system == "Darwin"
- name: Check for Homebrew
command: which brew
register: courseware_brew_check
changed_when: false
failed_when: false
when: ansible_system == "Darwin"
- name: Fail when Xcode command line tools are missing
fail:
msg: "Install Xcode Command Line Tools first with 'xcode-select --install'."
when:
- ansible_system == "Darwin"
- courseware_xcode_select.rc != 0
- name: Fail when Homebrew is missing
fail:
msg: "Install Homebrew first from https://brew.sh/."
when:
- ansible_system == "Darwin"
- courseware_brew_check.rc != 0
- name: Fail on unsupported Linux family
fail:
msg: "This installer currently supports Debian and Ubuntu only."
when:
- ansible_system == "Linux"
- ansible_os_family != "Debian"
- name: Query NVIDIA GPU memory
command: nvidia-smi --query-gpu=memory.total --format=csv,noheader,nounits
register: courseware_gpu_memory
changed_when: false
failed_when: false
when: ansible_system == "Linux"
- name: Query NVIDIA GPU names
command: nvidia-smi --query-gpu=name --format=csv,noheader
register: courseware_gpu_names
changed_when: false
failed_when: false
when: ansible_system == "Linux"
- name: Fail when no supported NVIDIA GPU is visible
fail:
msg: "Linux/WSL requires an NVIDIA GPU visible to nvidia-smi."
when:
- ansible_system == "Linux"
- courseware_gpu_memory.rc != 0
- name: Fail when GPU VRAM is below baseline
fail:
msg: "This build assumes at least 8 GB of VRAM on Linux/WSL."
when:
- ansible_system == "Linux"
- (courseware_gpu_memory.stdout_lines | map('int') | max) < 8192
- name: Check for CUDA compiler on Linux
command: which nvcc
register: courseware_preflight_nvcc
changed_when: false
failed_when: false
when: ansible_system == "Linux"
- name: Check for CUDA runtime header on Linux
stat:
path: "{{ item }}"
loop:
- /usr/local/cuda/include/cuda_runtime.h
- /usr/include/cuda_runtime.h
register: courseware_preflight_cuda_headers
when: ansible_system == "Linux"
- name: Set CUDA toolkit readiness
set_fact:
courseware_cuda_toolkit_ready: >-
{{
courseware_preflight_nvcc.rc == 0
or (courseware_preflight_cuda_headers.results | selectattr('stat.exists', 'equalto', true) | list | length > 0)
}}
when: ansible_system == "Linux"
- name: Query distro CUDA toolkit apt candidate
command: apt-cache policy nvidia-cuda-toolkit
register: courseware_preflight_cuda_toolkit_policy
changed_when: false
failed_when: false
when:
- ansible_system == "Linux"
- ansible_os_family == "Debian"
- name: Set distro CUDA toolkit package availability
set_fact:
courseware_preflight_cuda_toolkit_package_available: >-
{{
courseware_preflight_cuda_toolkit_policy.rc == 0
and 'Candidate: (none)' not in courseware_preflight_cuda_toolkit_policy.stdout
}}
when:
- ansible_system == "Linux"
- ansible_os_family == "Debian"
- name: Fail when automatic WSL CUDA bootstrap is unsupported
fail:
msg: "Automatic CUDA bootstrap currently supports Ubuntu x86_64 on WSL only. For other WSL distros, install the CUDA toolkit manually before rerunning."
when:
- ansible_system == "Linux"
- courseware_is_wsl
- not courseware_cuda_toolkit_ready
- ansible_distribution != "Ubuntu" or ansible_architecture not in ["x86_64", "amd64"]
- name: Install distro CUDA toolkit on Ubuntu WSL when available
become: true
apt:
name: nvidia-cuda-toolkit
state: present
update_cache: true
when:
- ansible_system == "Linux"
- courseware_is_wsl
- not courseware_cuda_toolkit_ready
- ansible_distribution == "Ubuntu"
- ansible_architecture in ["x86_64", "amd64"]
- courseware_preflight_cuda_toolkit_package_available | default(false)
- name: Recheck CUDA compiler after distro toolkit install
command: which nvcc
register: courseware_preflight_nvcc_after_distro_install
changed_when: false
failed_when: false
when:
- ansible_system == "Linux"
- courseware_is_wsl
- not courseware_cuda_toolkit_ready
- ansible_distribution == "Ubuntu"
- ansible_architecture in ["x86_64", "amd64"]
- courseware_preflight_cuda_toolkit_package_available | default(false)
- name: Recheck CUDA runtime header after distro toolkit install
stat:
path: "{{ item }}"
loop:
- /usr/local/cuda/include/cuda_runtime.h
- /usr/include/cuda_runtime.h
register: courseware_preflight_cuda_headers_after_distro_install
when:
- ansible_system == "Linux"
- courseware_is_wsl
- not courseware_cuda_toolkit_ready
- ansible_distribution == "Ubuntu"
- ansible_architecture in ["x86_64", "amd64"]
- courseware_preflight_cuda_toolkit_package_available | default(false)
- name: Refresh CUDA toolkit readiness after distro toolkit install
set_fact:
courseware_cuda_toolkit_ready: >-
{{
courseware_preflight_nvcc_after_distro_install.rc == 0
or (courseware_preflight_cuda_headers_after_distro_install.results | selectattr('stat.exists', 'equalto', true) | list | length > 0)
}}
when:
- ansible_system == "Linux"
- courseware_is_wsl
- not courseware_cuda_toolkit_ready
- ansible_distribution == "Ubuntu"
- ansible_architecture in ["x86_64", "amd64"]
- courseware_preflight_cuda_toolkit_package_available | default(false)
- name: Remove legacy NVIDIA CUDA apt key when preparing WSL toolkit install
become: true
command: apt-key del 7fa2af80
register: courseware_wsl_cuda_apt_key_delete
changed_when: courseware_wsl_cuda_apt_key_delete.rc == 0
failed_when: false
when:
- ansible_system == "Linux"
- courseware_is_wsl
- not courseware_cuda_toolkit_ready
- ansible_distribution == "Ubuntu"
- ansible_architecture in ["x86_64", "amd64"]
- name: Download NVIDIA WSL CUDA apt pin
become: true
get_url:
url: "{{ courseware_wsl_cuda_pin_url }}"
dest: "{{ courseware_wsl_cuda_pin_dest }}"
mode: "0644"
force: true
when:
- ansible_system == "Linux"
- courseware_is_wsl
- not courseware_cuda_toolkit_ready
- ansible_distribution == "Ubuntu"
- ansible_architecture in ["x86_64", "amd64"]
- name: Download NVIDIA WSL CUDA local installer
get_url:
url: "{{ courseware_wsl_cuda_installer_url }}"
dest: "{{ courseware_wsl_cuda_installer_local_path }}"
mode: "0644"
force: false
when:
- ansible_system == "Linux"
- courseware_is_wsl
- not courseware_cuda_toolkit_ready
- ansible_distribution == "Ubuntu"
- ansible_architecture in ["x86_64", "amd64"]
- name: Install NVIDIA WSL CUDA local repository package
become: true
apt:
deb: "{{ courseware_wsl_cuda_installer_local_path }}"
state: present
when:
- ansible_system == "Linux"
- courseware_is_wsl
- not courseware_cuda_toolkit_ready
- ansible_distribution == "Ubuntu"
- ansible_architecture in ["x86_64", "amd64"]
- name: Find NVIDIA WSL CUDA keyring
become: true
find:
paths: "{{ courseware_wsl_cuda_repo_dir }}"
patterns: "cuda-*-keyring.gpg"
file_type: file
register: courseware_wsl_cuda_keyring
when:
- ansible_system == "Linux"
- courseware_is_wsl
- not courseware_cuda_toolkit_ready
- ansible_distribution == "Ubuntu"
- ansible_architecture in ["x86_64", "amd64"]
- name: Fail when NVIDIA WSL CUDA keyring is missing
fail:
msg: "The NVIDIA WSL CUDA repository package was installed, but its keyring file was not found under {{ courseware_wsl_cuda_repo_dir }}."
when:
- ansible_system == "Linux"
- courseware_is_wsl
- not courseware_cuda_toolkit_ready
- ansible_distribution == "Ubuntu"
- ansible_architecture in ["x86_64", "amd64"]
- (courseware_wsl_cuda_keyring.files | length) == 0
- name: Copy NVIDIA WSL CUDA keyring into trusted keyrings
become: true
copy:
src: "{{ courseware_wsl_cuda_keyring.files[0].path }}"
dest: "/usr/share/keyrings/{{ courseware_wsl_cuda_keyring.files[0].path | basename }}"
remote_src: true
mode: "0644"
when:
- ansible_system == "Linux"
- courseware_is_wsl
- not courseware_cuda_toolkit_ready
- ansible_distribution == "Ubuntu"
- ansible_architecture in ["x86_64", "amd64"]
- (courseware_wsl_cuda_keyring.files | length) > 0
- name: Install NVIDIA WSL CUDA toolkit
become: true
apt:
name: "{{ courseware_wsl_cuda_toolkit_package }}"
state: present
update_cache: true
when:
- ansible_system == "Linux"
- courseware_is_wsl
- not courseware_cuda_toolkit_ready
- ansible_distribution == "Ubuntu"
- ansible_architecture in ["x86_64", "amd64"]
- name: Recheck CUDA compiler after WSL toolkit install
command: which nvcc
register: courseware_preflight_nvcc_after_install
changed_when: false
failed_when: false
when:
- ansible_system == "Linux"
- courseware_is_wsl
- not courseware_cuda_toolkit_ready
- ansible_distribution == "Ubuntu"
- ansible_architecture in ["x86_64", "amd64"]
- name: Fail when CUDA toolkit is still missing after WSL install attempt
fail:
msg: "The NVIDIA WSL CUDA toolkit install completed, but `nvcc` is still missing. Verify the repository package and rerun the installer."
when:
- ansible_system == "Linux"
- courseware_is_wsl
- not courseware_cuda_toolkit_ready
- ansible_distribution == "Ubuntu"
- ansible_architecture in ["x86_64", "amd64"]
- courseware_preflight_nvcc_after_install.rc != 0
- name: Set runtime binary defaults
set_fact:
courseware_python_bin: >-
{{ '/opt/homebrew/opt/python@3.11/bin/python3.11' if ansible_system == 'Darwin' else '/usr/bin/python3' }}
courseware_ollama_bin: "ollama"
+27
View File
@@ -0,0 +1,27 @@
- name: Create Promptfoo working directories
file:
path: "{{ item }}"
state: directory
mode: "0755"
loop:
- "{{ courseware_tools_dir }}/promptfoo"
- "{{ courseware_lab6_dir }}"
- name: Install Promptfoo locally
command: "npm install promptfoo@{{ courseware_promptfoo_version }}"
args:
chdir: "{{ courseware_tools_dir }}/promptfoo"
environment:
PATH: "{{ courseware_node_runtime_bin_dir }}:{{ ansible_env.PATH }}"
- name: Force Promptfoo server to bind to the configured host
replace:
path: "{{ courseware_tools_dir }}/promptfoo/node_modules/promptfoo/dist/src/server/server.js"
regexp: '\.listen\(port, \(\) => \{'
replace: ".listen(port, process.env.COURSEWARE_BIND_HOST || '0.0.0.0', () => {"
- name: Render promptfoo starter config
template:
src: "{{ playbook_dir }}/../templates/promptfoo.yaml.j2"
dest: "{{ courseware_lab6_dir }}/promptfoo.yaml"
mode: "0644"
+139
View File
@@ -0,0 +1,139 @@
- name: Bootstrap TransformerLab release files
shell: |
set -euo pipefail
cd "{{ courseware_transformerlab_home }}"
curl -L "https://github.com/transformerlab/transformerlab-app/archive/refs/tags/{{ courseware_transformerlab_version }}.tar.gz" -o transformerlab.tar.gz
tar -xzf transformerlab.tar.gz
rm -f transformerlab.tar.gz
rm -rf src
mv "transformerlab-app-{{ courseware_transformerlab_version_dir }}/api" src
echo "{{ courseware_transformerlab_version }}" > src/LATEST_VERSION
curl -L "https://github.com/transformerlab/transformerlab-app/releases/download/{{ courseware_transformerlab_version }}/transformerlab_web.tar.gz" -o transformerlab_web.tar.gz
rm -rf webapp
mkdir -p webapp
tar -xzf transformerlab_web.tar.gz -C webapp
rm -f transformerlab_web.tar.gz
args:
executable: /bin/bash
creates: "{{ courseware_transformerlab_home }}/src/install.sh"
- name: Add TransformerLab Miniforge Python path for space-safe bootstrap
replace:
path: "{{ courseware_transformerlab_home }}/src/install.sh"
regexp: 'CONDA_BIN=\$\{MINIFORGE_ROOT\}/bin/conda\n'
replace: |
CONDA_BIN=${MINIFORGE_ROOT}/bin/conda
CONDA_PYTHON_BIN=${MINIFORGE_ROOT}/bin/python
when: "' ' in courseware_transformerlab_home"
- name: Inject space-safe TransformerLab conda runner
blockinfile:
path: "{{ courseware_transformerlab_home }}/src/install.sh"
insertbefore: '^check_conda\(\) \{$'
marker: '# {mark} courseware conda runner'
block: |
conda_direct_exec_works() {
"${CONDA_BIN}" --version >/dev/null 2>&1
}
run_conda() {
if conda_direct_exec_works; then
"${CONDA_BIN}" "$@"
else
"${CONDA_PYTHON_BIN}" "${CONDA_BIN}" "$@"
fi
}
when: "' ' in courseware_transformerlab_home"
- name: Rewrite TransformerLab installer to use the space-safe conda runner
replace:
path: "{{ courseware_transformerlab_home }}/src/install.sh"
regexp: 'eval "\$\(\$\{CONDA_BIN\} shell\.bash hook\)"'
replace: 'eval "$(run_conda shell.bash hook)"'
when: "' ' in courseware_transformerlab_home"
- name: Rewrite TransformerLab doctor output to use the space-safe conda runner
replace:
path: "{{ courseware_transformerlab_home }}/src/install.sh"
regexp: '\$\(\$\{CONDA_BIN\} --version\)'
replace: '$(run_conda --version)'
when: "' ' in courseware_transformerlab_home"
- name: Install TransformerLab
shell: |
set -euo pipefail
./src/install.sh 2>&1 | tee "{{ courseware_logs_dir }}/transformerlab_install.log"
touch "{{ courseware_transformerlab_home }}/.courseware-managed"
args:
executable: /bin/bash
chdir: "{{ courseware_transformerlab_home }}"
creates: "{{ courseware_transformerlab_home }}/miniforge3/bin/conda"
- name: Rewrite TransformerLab Miniforge entrypoints to a space-safe shebang path
shell: |
set -euo pipefail
actual_prefix="{{ courseware_transformerlab_home }}/miniforge3/bin/"
safe_prefix="{{ ansible_env.HOME }}/.transformerlab/miniforge3/bin/"
find "{{ courseware_transformerlab_home }}/miniforge3/bin" -maxdepth 1 -type f -print0 |
while IFS= read -r -d '' file; do
first_line=$(head -n 1 "$file" || true)
case "$first_line" in
"#!${actual_prefix}"*)
suffix=${first_line#\#!}
suffix=${suffix#"${actual_prefix}"}
replacement="#!${safe_prefix}${suffix}"
tmp_file=$(mktemp)
{
printf '%s\n' "$replacement"
tail -n +2 "$file"
} >"$tmp_file"
chmod --reference="$file" "$tmp_file"
mv "$tmp_file" "$file"
;;
esac
done
args:
executable: /bin/bash
when: "' ' in courseware_transformerlab_home"
- name: Install TransformerLab multiuser dependencies
shell: |
set -euo pipefail
./src/install.sh multiuser_setup 2>&1 | tee "{{ courseware_logs_dir }}/transformerlab_multiuser_setup.log"
touch "{{ courseware_transformerlab_home }}/.courseware-managed"
args:
executable: /bin/bash
chdir: "{{ courseware_transformerlab_home }}"
creates: "{{ courseware_transformerlab_home }}/envs/general-uv/bin/python"
- name: Check TransformerLab general uv environment
stat:
path: "{{ courseware_transformerlab_home }}/envs/general-uv/bin/python"
register: courseware_transformerlab_general_uv
- name: Retry TransformerLab multiuser setup after source refresh
shell: |
set -euo pipefail
./src/install.sh multiuser_setup 2>&1 | tee "{{ courseware_logs_dir }}/transformerlab_multiuser_setup_retry.log"
args:
executable: /bin/bash
chdir: "{{ courseware_transformerlab_home }}"
when: not courseware_transformerlab_general_uv.stat.exists
- name: Recheck TransformerLab general uv environment
stat:
path: "{{ courseware_transformerlab_home }}/envs/general-uv/bin/python"
register: courseware_transformerlab_general_uv
- name: Mark TransformerLab multiuser setup complete
file:
path: "{{ courseware_transformerlab_home }}/.multiuser_setup_complete"
state: touch
mode: "0644"
when: courseware_transformerlab_general_uv.stat.exists
- name: Fail if TransformerLab general uv environment is missing
fail:
msg: "TransformerLab multiuser setup completed without creating {{ courseware_transformerlab_home }}/envs/general-uv/bin/python."
when: not courseware_transformerlab_general_uv.stat.exists
+43
View File
@@ -0,0 +1,43 @@
- name: Download Unsloth Studio installer
get_url:
url: "{{ courseware_unsloth_installer_url }}"
dest: "{{ courseware_unsloth_installer_path }}"
mode: "0755"
- name: Install Unsloth Studio
block:
- name: Run Unsloth Studio installer
shell: |
set -euo pipefail
timeout "{{ courseware_unsloth_install_timeout_seconds }}" \
bash "{{ courseware_unsloth_installer_path }}" --python "{{ courseware_unsloth_python_version }}" \
> "{{ courseware_logs_dir }}/unsloth-install.log" 2>&1
touch "{{ courseware_unsloth_home }}/.courseware-managed"
touch "{{ courseware_unsloth_home }}/.install_complete"
args:
executable: /bin/bash
creates: "{{ courseware_unsloth_home }}/.install_complete"
rescue:
- name: Capture Unsloth installer log tail
shell: |
if [ -f "{{ courseware_logs_dir }}/unsloth-install.log" ]; then
tail -n 80 "{{ courseware_logs_dir }}/unsloth-install.log"
fi
args:
executable: /bin/bash
register: courseware_unsloth_install_log_tail
changed_when: false
failed_when: false
- name: Fail with Unsloth installer guidance
fail:
msg: |
Unsloth Studio install failed or timed out.
Review the full log at:
{{ courseware_logs_dir }}/unsloth-install.log
The installer is pinned to Python {{ courseware_unsloth_python_version }} to avoid slower, less predictable dependency resolution on Linux/WSL.
Last log lines:
{{ courseware_unsloth_install_log_tail.stdout | default('(no log output captured)') }}
@@ -0,0 +1,60 @@
diff --git a/src/app/labs/[slug]/page.tsx b/src/app/labs/[slug]/page.tsx
index f67308f..a6aac38 100644
--- a/src/app/labs/[slug]/page.tsx
+++ b/src/app/labs/[slug]/page.tsx
@@ -462,6 +462,19 @@ function markdownToHtml(markdown: string) {
return micromark(convertGfmTables(markdown), { allowDangerousHtml: true });
}
+function addNoReferrerToExternalImages(html: string) {
+ return html.replace(/<img\b([^>]*?)>/gi, (imageTag, rawAttrs: string) => {
+ const srcMatch = /\bsrc=(['"])(https?:\/\/[^"']+)\1/i.exec(rawAttrs);
+ if (!srcMatch || /\breferrerpolicy\s*=/i.test(rawAttrs)) return imageTag;
+
+ const trimmedAttrs = rawAttrs.trimEnd();
+ const isSelfClosing = trimmedAttrs.endsWith("/");
+ const attrs = isSelfClosing ? trimmedAttrs.slice(0, -1).trimEnd() : rawAttrs;
+
+ return `<img${attrs} referrerpolicy="no-referrer"${isSelfClosing ? " /" : ""}>`;
+ });
+}
+
export async function generateStaticParams() {
return getLabSummaries().map((lab) => ({ slug: lab.slug }));
}
@@ -503,14 +516,15 @@ export default async function LabPage({
stripOrdinals: breakoutStyle === "instruction-rails",
}),
);
- const htmlContent =
+ const htmlContent = addNoReferrerToExternalImages(
breakoutStyle === "none"
? baseHtml
: transformOutsideDetails(baseHtml, (safeHtml) =>
segmentStepSections(markExplicitInstructionElements(safeHtml, {
commandPills: breakoutStyle === "command-pills",
})),
- );
+ ),
+ );
return (
<main className="mx-auto w-full max-w-5xl px-6 py-10">
diff --git a/src/components/labs/LabContent.tsx b/src/components/labs/LabContent.tsx
index 7a7ce52..8778a23 100644
--- a/src/components/labs/LabContent.tsx
+++ b/src/components/labs/LabContent.tsx
@@ -277,7 +277,12 @@ export function LabContent({ className, html }: LabContentProps) {
>
<div className="lab-image-modal__surface" onClick={(event) => event.stopPropagation()}>
{/* eslint-disable-next-line @next/next/no-img-element */}
- <img className="lab-image-modal__image" src={zoomedImage.src} alt={zoomedImage.alt} />
+ <img
+ className="lab-image-modal__image"
+ src={zoomedImage.src}
+ alt={zoomedImage.alt}
+ referrerPolicy="no-referrer"
+ />
</div>
</div>
) : null}
+51
View File
@@ -0,0 +1,51 @@
- name: Clone lab wiki
git:
repo: "{{ courseware_wiki_repo }}"
dest: "{{ courseware_wiki_repo_dir }}"
update: false
- name: Check whether wiki referrer policy patch is already applied
command:
argv:
- git
- apply
- --reverse
- --check
- "{{ role_path }}/files/referrer-policy.patch"
args:
chdir: "{{ courseware_wiki_repo_dir }}"
register: courseware_wiki_referrer_policy_patch
changed_when: false
failed_when: false
- name: Apply managed wiki referrer policy patch
command:
argv:
- git
- apply
- "{{ role_path }}/files/referrer-policy.patch"
args:
chdir: "{{ courseware_wiki_repo_dir }}"
when: courseware_wiki_referrer_policy_patch.rc != 0
- name: Install wiki dependencies with contained Node runtime
command: npm install
args:
chdir: "{{ courseware_wiki_repo_dir }}"
creates: "{{ courseware_wiki_repo_dir }}/node_modules/next/package.json"
environment:
PATH: "{{ courseware_node_runtime_bin_dir }}:{{ ansible_env.PATH }}"
- name: Stat wiki build output
stat:
path: "{{ courseware_wiki_repo_dir }}/.next/BUILD_ID"
register: courseware_wiki_build
- name: Build wiki for managed service startup
command: npm run build
args:
chdir: "{{ courseware_wiki_repo_dir }}"
environment:
PATH: "{{ courseware_node_runtime_bin_dir }}:{{ ansible_env.PATH }}"
when:
- not courseware_wiki_build.stat.exists or courseware_wiki_referrer_policy_patch.rc != 0