Add wiki refresh command and service updates

This commit is contained in:
OpenCode
2026-04-24 10:02:39 -06:00
parent 78676ece59
commit 7360cd040a
7 changed files with 97 additions and 22 deletions
+3 -3
View File
@@ -4,7 +4,7 @@ This project builds a student-friendly local lab environment for the courseware
- `./deploy-courseware.sh` installs and configures the environment, then starts every managed service. - `./deploy-courseware.sh` installs and configures the environment, then starts every managed service.
- `./destroy-courseware.sh` stops the managed services, uninstalls courseware-managed Ollama, and removes the project-owned lab state. - `./destroy-courseware.sh` stops the managed services, uninstalls courseware-managed Ollama, and removes the project-owned lab state.
- `./labctl` provides day-two controls such as `assets lab2`, `ollama_models`, `start`, `stop`, `status`, `urls`, `logs`, and `open kiln`. - `./labctl` provides day-two controls such as `assets lab2`, `ollama_models`, `update_wiki`, `start`, `stop`, `status`, `urls`, `logs`, and `open kiln`.
## What It Installs ## What It Installs
@@ -32,9 +32,8 @@ Lab 1 is now provisioned directly by the installer:
`./labctl up` now pre-pulls the Lab 2 Gemma 4 E2B Ollama variants used by the wiki widget: `./labctl up` now pre-pulls the Lab 2 Gemma 4 E2B Ollama variants used by the wiki widget:
- `cajina/gemma4_e2b-q2_k_xl:v01`
- `batiai/gemma4-e2b:q4` - `batiai/gemma4-e2b:q4`
- `bjoernb/gemma4-e2b-fast:latest` - `batiai/gemma4-e2b:q6`
If you want to re-pull just those managed Ollama models later, run `./labctl ollama_models`. If you want to re-pull just those managed Ollama models later, run `./labctl ollama_models`.
@@ -137,6 +136,7 @@ The deployment will:
- `./labctl versions` shows the pinned Netron version, minimum Ollama version, and Ansible runtime version used by this workspace. - `./labctl versions` shows the pinned Netron version, minimum Ollama version, and Ansible runtime version used by this workspace.
- `./labctl assets lab2` is a separate manual step that clones the base WhiteRabbitNeo repo into `assets/lab2/WhiteRabbitNeo-V3-7B` and downloads the supported `Q4_K_M`, `Q8_0`, and `IQ2_M` GGUFs into `assets/lab2/WhiteRabbitNeo_WhiteRabbitNeo-V3-7B-GGUF`. - `./labctl assets lab2` is a separate manual step that clones the base WhiteRabbitNeo repo into `assets/lab2/WhiteRabbitNeo-V3-7B` and downloads the supported `Q4_K_M`, `Q8_0`, and `IQ2_M` GGUFs into `assets/lab2/WhiteRabbitNeo_WhiteRabbitNeo-V3-7B-GGUF`.
- `./labctl ollama_models` re-pulls the managed Lab 2 Gemma 4 E2B Ollama model set without rerunning the full installer. - `./labctl ollama_models` re-pulls the managed Lab 2 Gemma 4 E2B Ollama model set without rerunning the full installer.
- `./labctl update_wiki` hard-resets the managed wiki checkout to the remote latest, rebuilds it, and restarts only the managed wiki service on port `80`.
- `./labctl start core` starts only `ollama` and `open-webui`. - `./labctl start core` starts only `ollama` and `open-webui`.
- `./labctl start all` starts every managed web service. - `./labctl start all` starts every managed web service.
- `./labctl open kiln` launches the Kiln desktop app installed into the project state. - `./labctl open kiln` launches the Kiln desktop app installed into the project state.
+2 -4
View File
@@ -74,12 +74,10 @@ courseware_white_rabbit_variants:
quant: "IQ2_M" quant: "IQ2_M"
filename: "WhiteRabbitNeo_WhiteRabbitNeo-V3-7B-IQ2_M.gguf" filename: "WhiteRabbitNeo_WhiteRabbitNeo-V3-7B-IQ2_M.gguf"
courseware_lab2_ollama_models: courseware_lab2_ollama_models:
- label: "Gemma 4 E2B Q2"
value: "cajina/gemma4_e2b-q2_k_xl:v01"
- label: "Gemma 4 E2B Q4" - label: "Gemma 4 E2B Q4"
value: "batiai/gemma4-e2b:q4" value: "batiai/gemma4-e2b:q4"
- label: "Gemma 4 E2B Q8" - label: "Gemma 4 E2B Q6"
value: "bjoernb/gemma4-e2b-fast:latest" value: "batiai/gemma4-e2b:q6"
courseware_ollama_models: "{{ courseware_lab2_ollama_models | map(attribute='value') | list }}" courseware_ollama_models: "{{ courseware_lab2_ollama_models | map(attribute='value') | list }}"
courseware_optional_ollama_models: [] courseware_optional_ollama_models: []
courseware_install_optional_heavy_models: false courseware_install_optional_heavy_models: false
+1 -1
View File
@@ -16,6 +16,6 @@
- chunkviz - chunkviz
- promptfoo - promptfoo
- { role: ollama_models, tags: ["ollama_models"] } - { role: ollama_models, tags: ["ollama_models"] }
- wiki - { role: wiki, tags: ["wiki"] }
- kiln - kiln
- unsloth - unsloth
+12 -3
View File
@@ -2,7 +2,9 @@
git: git:
repo: "{{ courseware_wiki_repo }}" repo: "{{ courseware_wiki_repo }}"
dest: "{{ courseware_wiki_repo_dir }}" dest: "{{ courseware_wiki_repo_dir }}"
update: false update: "{{ courseware_wiki_force_update | default(false) | bool }}"
force: "{{ courseware_wiki_force_update | default(false) | bool }}"
register: courseware_wiki_repo_sync
- name: Check whether wiki referrer policy patch is already applied - name: Check whether wiki referrer policy patch is already applied
command: command:
@@ -27,14 +29,21 @@
args: args:
chdir: "{{ courseware_wiki_repo_dir }}" chdir: "{{ courseware_wiki_repo_dir }}"
when: courseware_wiki_referrer_policy_patch.rc != 0 when: courseware_wiki_referrer_policy_patch.rc != 0
register: courseware_wiki_referrer_policy_apply
- name: Stat wiki Next dependency
stat:
path: "{{ courseware_wiki_repo_dir }}/node_modules/next/package.json"
register: courseware_wiki_next_dependency
- name: Install wiki dependencies with contained Node runtime - name: Install wiki dependencies with contained Node runtime
command: npm install command: npm install
args: args:
chdir: "{{ courseware_wiki_repo_dir }}" chdir: "{{ courseware_wiki_repo_dir }}"
creates: "{{ courseware_wiki_repo_dir }}/node_modules/next/package.json"
environment: environment:
PATH: "{{ courseware_node_runtime_bin_dir }}:{{ ansible_env.PATH }}" PATH: "{{ courseware_node_runtime_bin_dir }}:{{ ansible_env.PATH }}"
when:
- not courseware_wiki_next_dependency.stat.exists or courseware_wiki_repo_sync.changed
- name: Render wiki runtime config - name: Render wiki runtime config
template: template:
@@ -54,4 +63,4 @@
environment: environment:
PATH: "{{ courseware_node_runtime_bin_dir }}:{{ ansible_env.PATH }}" PATH: "{{ courseware_node_runtime_bin_dir }}:{{ ansible_env.PATH }}"
when: when:
- not courseware_wiki_build.stat.exists or courseware_wiki_referrer_policy_patch.rc != 0 - not courseware_wiki_build.stat.exists or courseware_wiki_repo_sync.changed or courseware_wiki_referrer_policy_patch.rc != 0
+22 -9
View File
@@ -18,6 +18,7 @@ usage() {
Usage: Usage:
./labctl up ./labctl up
./labctl down ./labctl down
./labctl update_wiki
./labctl ollama_models ./labctl ollama_models
./labctl preflight ./labctl preflight
./labctl versions ./labctl versions
@@ -83,7 +84,7 @@ WARNING: THIS SCRIPT WILL CONFIGURE YOUR ENVIRONMENT WILL THE FOLLOWING SOFTWARE
- Unsloth Studio - Unsloth Studio
- Kiln Desktop - Kiln Desktop
- Course-specific support assets for lab 1, lab 2, and lab 4 - Course-specific support assets for lab 1, lab 2, and lab 4
- Pre-pulled Lab 2 Ollama models for Q2, Q4, and Q8 Gemma 4 E2B - Pre-pulled Lab 2 Ollama models for Q4 and Q6 Gemma 4 E2B
- A pre-registered Lab 1 Ollama model (requires Ollama ${min_ollama}+) - A pre-registered Lab 1 Ollama model (requires Ollama ${min_ollama}+)
IT IS RECOMMENDED TO RUN THIS IN AN ISLOATED ENVIRONMENT (Dedicated WSL, VM, etc.) IT IS RECOMMENDED TO RUN THIS IN AN ISLOATED ENVIRONMENT (Dedicated WSL, VM, etc.)
@@ -108,7 +109,7 @@ WARNING: THIS SCRIPT WILL CONFIGURE YOUR ENVIRONMENT WILL THE FOLLOWING SOFTWARE
- Unsloth Studio - Unsloth Studio
- Kiln Desktop - Kiln Desktop
- Course-specific support assets for lab 1, lab 2, and lab 4 - Course-specific support assets for lab 1, lab 2, and lab 4
- Pre-pulled Lab 2 Ollama models for Q2, Q4, and Q8 Gemma 4 E2B - Pre-pulled Lab 2 Ollama models for Q4 and Q6 Gemma 4 E2B
- A pre-registered Lab 1 Ollama model (requires Ollama ${min_ollama}+) - A pre-registered Lab 1 Ollama model (requires Ollama ${min_ollama}+)
IT IS RECOMMENDED TO RUN THIS IN AN ISLOATED ENVIRONMENT (Dedicated WSL, VM, etc.) IT IS RECOMMENDED TO RUN THIS IN AN ISLOATED ENVIRONMENT (Dedicated WSL, VM, etc.)
@@ -524,6 +525,15 @@ require_arg() {
fi fi
} }
require_managed_runtime() {
if [ ! -f "$ROOT_DIR/state/runtime.env" ]; then
cat <<'EOF' >&2
Missing state/runtime.env. Run ./labctl up first so the managed environment exists before using this command.
EOF
exit 1
fi
}
handle_assets_command() { handle_assets_command() {
local asset_group=${1:-} local asset_group=${1:-}
shift || true shift || true
@@ -540,16 +550,16 @@ handle_assets_command() {
} }
refresh_ollama_models() { refresh_ollama_models() {
if [ ! -f "$ROOT_DIR/state/runtime.env" ]; then require_managed_runtime
cat <<'EOF' >&2
Missing state/runtime.env. Run ./labctl up first so the managed Ollama service is configured before pulling models.
EOF
exit 1
fi
run_playbook up.yml --tags ollama_models run_playbook up.yml --tags ollama_models
} }
update_wiki() {
require_managed_runtime
run_playbook up.yml --tags wiki -e "courseware_wiki_force_update=true"
run_project_script "$ROOT_DIR/scripts/service_manager.sh" restart-wiki
}
main() { main() {
local cmd=${1:-} local cmd=${1:-}
shift || true shift || true
@@ -563,6 +573,9 @@ main() {
ollama_models) ollama_models)
refresh_ollama_models refresh_ollama_models
;; ;;
update_wiki)
update_wiki
;;
down) down)
run_project_script "$ROOT_DIR/scripts/service_manager.sh" stop all || true run_project_script "$ROOT_DIR/scripts/service_manager.sh" stop all || true
run_playbook down.yml run_playbook down.yml
+1 -1
View File
@@ -128,7 +128,7 @@ service_command() {
"$COURSEWARE_CHUNKVIZ_PORT" "$COURSEWARE_CHUNKVIZ_PORT"
;; ;;
embedding-atlas) embedding-atlas)
printf 'exec "%s/bin/embedding-atlas" "%s" --text "Scenario" --host %s --port %s' \ printf 'exec "%s/bin/embedding-atlas" "%s" --text "Scenario" --host %s --port %s --no-auto-port' \
"$EMBEDDING_ATLAS_VENV" \ "$EMBEDDING_ATLAS_VENV" \
"$TTPS_DATASET_PATH" \ "$TTPS_DATASET_PATH" \
"$COURSEWARE_BIND_HOST" \ "$COURSEWARE_BIND_HOST" \
+56 -1
View File
@@ -112,6 +112,18 @@ is_running() {
has_live_pid "$service" || service_ready "$service" has_live_pid "$service" || service_ready "$service"
} }
service_startup_attempts() {
case "$1" in
embedding-atlas)
# The first launch can be noticeably slower on cold environments.
printf '%s\n' 180
;;
*)
printf '%s\n' 60
;;
esac
}
service_ready() { service_ready() {
local service=$1 local service=$1
@@ -142,6 +154,22 @@ service_listener_pids() {
| sort -u | sort -u
} }
service_port_has_listener() {
local service=$1
local port
port=$(service_port "$service") || return 1
ss -ltnH "( sport = :$port )" 2>/dev/null | grep -q .
}
service_listener_details() {
local service=$1
local port
port=$(service_port "$service") || return 0
ss -ltnp "( sport = :$port )" 2>/dev/null || true
}
kill_pid_tree() { kill_pid_tree() {
local signal=$1 local signal=$1
local pid=$2 local pid=$2
@@ -177,6 +205,7 @@ start_one() {
local pid_file local pid_file
local attempt local attempt
local pid_grace_attempts=5 local pid_grace_attempts=5
local startup_attempts
if [ "$service" = "ollama" ] || [ "$service" = "wiki" ]; then if [ "$service" = "ollama" ] || [ "$service" = "wiki" ]; then
assert_ollama_logprobs_support assert_ollama_logprobs_support
@@ -204,6 +233,7 @@ start_one() {
esac esac
cmd=$(service_command "$service") cmd=$(service_command "$service")
startup_attempts=$(service_startup_attempts "$service")
log_file=$(service_log_file "$service") log_file=$(service_log_file "$service")
pid_file=$(service_pid_file "$service") pid_file=$(service_pid_file "$service")
@@ -219,7 +249,7 @@ start_one() {
fi fi
echo $! >"$pid_file" echo $! >"$pid_file"
for attempt in $(seq 1 60); do for attempt in $(seq 1 "$startup_attempts"); do
if service_ready "$service"; then if service_ready "$service"; then
echo "started $service" echo "started $service"
return 0 return 0
@@ -286,6 +316,28 @@ stop_one() {
exit 1 exit 1
} }
restart_managed_wiki() {
local wiki_log_file
wiki_log_file=$(service_log_file wiki)
if has_live_pid wiki; then
stop_one wiki
fi
if service_port_has_listener wiki; then
cat <<EOF >&2
Cannot restart wiki because port $(service_port wiki) is already in use by a non-managed listener.
Listener details:
$(service_listener_details wiki)
Leave that process alone or move it off port $(service_port wiki), then rerun ./labctl update_wiki.
Wiki log: $wiki_log_file
EOF
exit 1
fi
start_one wiki
}
status_one() { status_one() {
local service=$1 local service=$1
@@ -388,6 +440,9 @@ main() {
fi fi
show_logs "$1" show_logs "$1"
;; ;;
restart-wiki)
restart_managed_wiki
;;
*) *)
echo "Unknown command: $cmd" >&2 echo "Unknown command: $cmd" >&2
exit 1 exit 1