Preload
This commit is contained in:
@@ -4,7 +4,7 @@ This project builds a student-friendly local lab environment for the courseware
|
||||
|
||||
- `./deploy-courseware.sh` installs and configures the environment, then starts every managed service.
|
||||
- `./destroy-courseware.sh` stops the managed services, uninstalls courseware-managed Ollama, and removes the project-owned lab state.
|
||||
- `./labctl` provides day-two controls such as `assets lab2`, `start`, `stop`, `status`, `urls`, `logs`, and `open kiln`.
|
||||
- `./labctl` provides day-two controls such as `assets lab2`, `ollama_models`, `start`, `stop`, `status`, `urls`, `logs`, and `open kiln`.
|
||||
|
||||
## What It Installs
|
||||
|
||||
@@ -28,6 +28,16 @@ Lab 1 is now provisioned directly by the installer:
|
||||
- The wiki serves same-host download links for both GGUFs through `/api/lab1/models/...`.
|
||||
- Lab 1 confidence visualization requires Ollama `0.12.11` or newer because it depends on logprobs.
|
||||
|
||||
## Lab 2 Defaults
|
||||
|
||||
`./labctl up` now pre-pulls the Lab 2 Gemma 4 E2B Ollama variants used by the wiki widget:
|
||||
|
||||
- `cajina/gemma4_e2b-q2_k_xl:v01`
|
||||
- `batiai/gemma4-e2b:q4`
|
||||
- `bjoernb/gemma4-e2b-fast:latest`
|
||||
|
||||
If you want to re-pull just those managed Ollama models later, run `./labctl ollama_models`.
|
||||
|
||||
## Supported Host Profiles
|
||||
|
||||
This build intentionally avoids the reference VM's hardware workarounds.
|
||||
@@ -126,6 +136,7 @@ The deployment will:
|
||||
- `./labctl up` installs the environment and then starts every managed service.
|
||||
- `./labctl versions` shows the pinned Netron version, minimum Ollama version, and Ansible runtime version used by this workspace.
|
||||
- `./labctl assets lab2` is a separate manual step that clones the base WhiteRabbitNeo repo into `assets/lab2/WhiteRabbitNeo-V3-7B` and downloads the supported `Q4_K_M`, `Q8_0`, and `IQ2_M` GGUFs into `assets/lab2/WhiteRabbitNeo_WhiteRabbitNeo-V3-7B-GGUF`.
|
||||
- `./labctl ollama_models` re-pulls the managed Lab 2 Gemma 4 E2B Ollama model set without rerunning the full installer.
|
||||
- `./labctl start core` starts only `ollama` and `open-webui`.
|
||||
- `./labctl start all` starts every managed web service.
|
||||
- `./labctl open kiln` launches the Kiln desktop app installed into the project state.
|
||||
|
||||
@@ -73,12 +73,15 @@ courseware_white_rabbit_variants:
|
||||
- ollama_model: "WhiteRabbitNeo-IQ2"
|
||||
quant: "IQ2_M"
|
||||
filename: "WhiteRabbitNeo_WhiteRabbitNeo-V3-7B-IQ2_M.gguf"
|
||||
courseware_ollama_models:
|
||||
- "llama3.2"
|
||||
- "qwen3.5:4b"
|
||||
- "gemma3n:e2b"
|
||||
courseware_optional_ollama_models:
|
||||
- "gemma3:12b-it-qat"
|
||||
courseware_lab2_ollama_models:
|
||||
- label: "Gemma 4 E2B Q2"
|
||||
value: "cajina/gemma4_e2b-q2_k_xl:v01"
|
||||
- label: "Gemma 4 E2B Q4"
|
||||
value: "batiai/gemma4-e2b:q4"
|
||||
- label: "Gemma 4 E2B Q8"
|
||||
value: "bjoernb/gemma4-e2b-fast:latest"
|
||||
courseware_ollama_models: "{{ courseware_lab2_ollama_models | map(attribute='value') | list }}"
|
||||
courseware_optional_ollama_models: []
|
||||
courseware_install_optional_heavy_models: false
|
||||
|
||||
courseware_wsl_cuda_pin_url: "https://developer.download.nvidia.com/compute/cuda/repos/wsl-ubuntu/x86_64/cuda-wsl-ubuntu.pin"
|
||||
|
||||
@@ -15,6 +15,7 @@
|
||||
- open_webui
|
||||
- chunkviz
|
||||
- promptfoo
|
||||
- { role: ollama_models, tags: ["ollama_models"] }
|
||||
- wiki
|
||||
- kiln
|
||||
- unsloth
|
||||
|
||||
@@ -1,4 +1,13 @@
|
||||
{
|
||||
"lab1NetronUrl": "http://{{ courseware_url_host }}:{{ courseware_ports.netron }}",
|
||||
"lab2OllamaUrl": "http://{{ courseware_url_host }}:{{ courseware_ports.ollama }}",
|
||||
"lab2OllamaModels": [
|
||||
{% for model in courseware_lab2_ollama_models %}
|
||||
{
|
||||
"label": "{{ model.label }}",
|
||||
"value": "{{ model.value }}"
|
||||
}{% if not loop.last %},{% endif %}
|
||||
{% endfor %}
|
||||
],
|
||||
"lab3TerminalUrl": "http://{{ courseware_url_host }}:{{ courseware_ports.wetty }}{{ courseware_wetty_base_path }}"
|
||||
}
|
||||
|
||||
@@ -18,6 +18,7 @@ usage() {
|
||||
Usage:
|
||||
./labctl up
|
||||
./labctl down
|
||||
./labctl ollama_models
|
||||
./labctl preflight
|
||||
./labctl versions
|
||||
./labctl assets lab2 [--refresh]
|
||||
@@ -82,6 +83,7 @@ WARNING: THIS SCRIPT WILL CONFIGURE YOUR ENVIRONMENT WILL THE FOLLOWING SOFTWARE
|
||||
- Unsloth Studio
|
||||
- Kiln Desktop
|
||||
- Course-specific support assets for lab 1, lab 2, and lab 4
|
||||
- Pre-pulled Lab 2 Ollama models for Q2, Q4, and Q8 Gemma 4 E2B
|
||||
- A pre-registered Lab 1 Ollama model (requires Ollama ${min_ollama}+)
|
||||
|
||||
IT IS RECOMMENDED TO RUN THIS IN AN ISLOATED ENVIRONMENT (Dedicated WSL, VM, etc.)
|
||||
@@ -106,6 +108,7 @@ WARNING: THIS SCRIPT WILL CONFIGURE YOUR ENVIRONMENT WILL THE FOLLOWING SOFTWARE
|
||||
- Unsloth Studio
|
||||
- Kiln Desktop
|
||||
- Course-specific support assets for lab 1, lab 2, and lab 4
|
||||
- Pre-pulled Lab 2 Ollama models for Q2, Q4, and Q8 Gemma 4 E2B
|
||||
- A pre-registered Lab 1 Ollama model (requires Ollama ${min_ollama}+)
|
||||
|
||||
IT IS RECOMMENDED TO RUN THIS IN AN ISLOATED ENVIRONMENT (Dedicated WSL, VM, etc.)
|
||||
@@ -536,6 +539,17 @@ handle_assets_command() {
|
||||
esac
|
||||
}
|
||||
|
||||
refresh_ollama_models() {
|
||||
if [ ! -f "$ROOT_DIR/state/runtime.env" ]; then
|
||||
cat <<'EOF' >&2
|
||||
Missing state/runtime.env. Run ./labctl up first so the managed Ollama service is configured before pulling models.
|
||||
EOF
|
||||
exit 1
|
||||
fi
|
||||
|
||||
run_playbook up.yml --tags ollama_models
|
||||
}
|
||||
|
||||
main() {
|
||||
local cmd=${1:-}
|
||||
shift || true
|
||||
@@ -546,6 +560,9 @@ main() {
|
||||
run_playbook up.yml
|
||||
run_project_script "$ROOT_DIR/scripts/service_manager.sh" start all
|
||||
;;
|
||||
ollama_models)
|
||||
refresh_ollama_models
|
||||
;;
|
||||
down)
|
||||
run_project_script "$ROOT_DIR/scripts/service_manager.sh" stop all || true
|
||||
run_playbook down.yml
|
||||
|
||||
Reference in New Issue
Block a user