diff --git a/README.md b/README.md index d1a6581..518504b 100644 --- a/README.md +++ b/README.md @@ -4,7 +4,7 @@ This project builds a student-friendly local lab environment for the courseware - `./deploy-courseware.sh` installs and configures the environment, then starts every managed service. - `./destroy-courseware.sh` stops the managed services, uninstalls courseware-managed Ollama, and removes the project-owned lab state. -- `./labctl` provides day-two controls such as `assets lab2`, `ollama_models`, `start`, `stop`, `status`, `urls`, `logs`, and `open kiln`. +- `./labctl` provides day-two controls such as `assets lab2`, `ollama_models`, `update_wiki`, `start`, `stop`, `status`, `urls`, `logs`, and `open kiln`. ## What It Installs @@ -32,9 +32,8 @@ Lab 1 is now provisioned directly by the installer: `./labctl up` now pre-pulls the Lab 2 Gemma 4 E2B Ollama variants used by the wiki widget: -- `cajina/gemma4_e2b-q2_k_xl:v01` - `batiai/gemma4-e2b:q4` -- `bjoernb/gemma4-e2b-fast:latest` +- `batiai/gemma4-e2b:q6` If you want to re-pull just those managed Ollama models later, run `./labctl ollama_models`. @@ -137,6 +136,7 @@ The deployment will: - `./labctl versions` shows the pinned Netron version, minimum Ollama version, and Ansible runtime version used by this workspace. - `./labctl assets lab2` is a separate manual step that clones the base WhiteRabbitNeo repo into `assets/lab2/WhiteRabbitNeo-V3-7B` and downloads the supported `Q4_K_M`, `Q8_0`, and `IQ2_M` GGUFs into `assets/lab2/WhiteRabbitNeo_WhiteRabbitNeo-V3-7B-GGUF`. - `./labctl ollama_models` re-pulls the managed Lab 2 Gemma 4 E2B Ollama model set without rerunning the full installer. +- `./labctl update_wiki` hard-resets the managed wiki checkout to the remote latest, rebuilds it, and restarts only the managed wiki service on port `80`. - `./labctl start core` starts only `ollama` and `open-webui`. - `./labctl start all` starts every managed web service. - `./labctl open kiln` launches the Kiln desktop app installed into the project state. diff --git a/ansible/group_vars/all.yml b/ansible/group_vars/all.yml index 54fb3f7..cabb5d0 100644 --- a/ansible/group_vars/all.yml +++ b/ansible/group_vars/all.yml @@ -74,12 +74,10 @@ courseware_white_rabbit_variants: quant: "IQ2_M" filename: "WhiteRabbitNeo_WhiteRabbitNeo-V3-7B-IQ2_M.gguf" courseware_lab2_ollama_models: - - label: "Gemma 4 E2B Q2" - value: "cajina/gemma4_e2b-q2_k_xl:v01" - label: "Gemma 4 E2B Q4" value: "batiai/gemma4-e2b:q4" - - label: "Gemma 4 E2B Q8" - value: "bjoernb/gemma4-e2b-fast:latest" + - label: "Gemma 4 E2B Q6" + value: "batiai/gemma4-e2b:q6" courseware_ollama_models: "{{ courseware_lab2_ollama_models | map(attribute='value') | list }}" courseware_optional_ollama_models: [] courseware_install_optional_heavy_models: false diff --git a/ansible/playbooks/up.yml b/ansible/playbooks/up.yml index 732919a..6861cbf 100644 --- a/ansible/playbooks/up.yml +++ b/ansible/playbooks/up.yml @@ -16,6 +16,6 @@ - chunkviz - promptfoo - { role: ollama_models, tags: ["ollama_models"] } - - wiki + - { role: wiki, tags: ["wiki"] } - kiln - unsloth diff --git a/ansible/roles/wiki/tasks/main.yml b/ansible/roles/wiki/tasks/main.yml index be1aa3a..e2a492c 100644 --- a/ansible/roles/wiki/tasks/main.yml +++ b/ansible/roles/wiki/tasks/main.yml @@ -2,7 +2,9 @@ git: repo: "{{ courseware_wiki_repo }}" dest: "{{ courseware_wiki_repo_dir }}" - update: false + update: "{{ courseware_wiki_force_update | default(false) | bool }}" + force: "{{ courseware_wiki_force_update | default(false) | bool }}" + register: courseware_wiki_repo_sync - name: Check whether wiki referrer policy patch is already applied command: @@ -27,14 +29,21 @@ args: chdir: "{{ courseware_wiki_repo_dir }}" when: courseware_wiki_referrer_policy_patch.rc != 0 + register: courseware_wiki_referrer_policy_apply + +- name: Stat wiki Next dependency + stat: + path: "{{ courseware_wiki_repo_dir }}/node_modules/next/package.json" + register: courseware_wiki_next_dependency - name: Install wiki dependencies with contained Node runtime command: npm install args: chdir: "{{ courseware_wiki_repo_dir }}" - creates: "{{ courseware_wiki_repo_dir }}/node_modules/next/package.json" environment: PATH: "{{ courseware_node_runtime_bin_dir }}:{{ ansible_env.PATH }}" + when: + - not courseware_wiki_next_dependency.stat.exists or courseware_wiki_repo_sync.changed - name: Render wiki runtime config template: @@ -54,4 +63,4 @@ environment: PATH: "{{ courseware_node_runtime_bin_dir }}:{{ ansible_env.PATH }}" when: - - not courseware_wiki_build.stat.exists or courseware_wiki_referrer_policy_patch.rc != 0 + - not courseware_wiki_build.stat.exists or courseware_wiki_repo_sync.changed or courseware_wiki_referrer_policy_patch.rc != 0 diff --git a/labctl b/labctl index 2262b67..bfbbd52 100755 --- a/labctl +++ b/labctl @@ -18,6 +18,7 @@ usage() { Usage: ./labctl up ./labctl down + ./labctl update_wiki ./labctl ollama_models ./labctl preflight ./labctl versions @@ -83,7 +84,7 @@ WARNING: THIS SCRIPT WILL CONFIGURE YOUR ENVIRONMENT WILL THE FOLLOWING SOFTWARE - Unsloth Studio - Kiln Desktop - Course-specific support assets for lab 1, lab 2, and lab 4 -- Pre-pulled Lab 2 Ollama models for Q2, Q4, and Q8 Gemma 4 E2B +- Pre-pulled Lab 2 Ollama models for Q4 and Q6 Gemma 4 E2B - A pre-registered Lab 1 Ollama model (requires Ollama ${min_ollama}+) IT IS RECOMMENDED TO RUN THIS IN AN ISLOATED ENVIRONMENT (Dedicated WSL, VM, etc.) @@ -108,7 +109,7 @@ WARNING: THIS SCRIPT WILL CONFIGURE YOUR ENVIRONMENT WILL THE FOLLOWING SOFTWARE - Unsloth Studio - Kiln Desktop - Course-specific support assets for lab 1, lab 2, and lab 4 -- Pre-pulled Lab 2 Ollama models for Q2, Q4, and Q8 Gemma 4 E2B +- Pre-pulled Lab 2 Ollama models for Q4 and Q6 Gemma 4 E2B - A pre-registered Lab 1 Ollama model (requires Ollama ${min_ollama}+) IT IS RECOMMENDED TO RUN THIS IN AN ISLOATED ENVIRONMENT (Dedicated WSL, VM, etc.) @@ -524,6 +525,15 @@ require_arg() { fi } +require_managed_runtime() { + if [ ! -f "$ROOT_DIR/state/runtime.env" ]; then + cat <<'EOF' >&2 +Missing state/runtime.env. Run ./labctl up first so the managed environment exists before using this command. +EOF + exit 1 + fi +} + handle_assets_command() { local asset_group=${1:-} shift || true @@ -540,16 +550,16 @@ handle_assets_command() { } refresh_ollama_models() { - if [ ! -f "$ROOT_DIR/state/runtime.env" ]; then - cat <<'EOF' >&2 -Missing state/runtime.env. Run ./labctl up first so the managed Ollama service is configured before pulling models. -EOF - exit 1 - fi - + require_managed_runtime run_playbook up.yml --tags ollama_models } +update_wiki() { + require_managed_runtime + run_playbook up.yml --tags wiki -e "courseware_wiki_force_update=true" + run_project_script "$ROOT_DIR/scripts/service_manager.sh" restart-wiki +} + main() { local cmd=${1:-} shift || true @@ -563,6 +573,9 @@ main() { ollama_models) refresh_ollama_models ;; + update_wiki) + update_wiki + ;; down) run_project_script "$ROOT_DIR/scripts/service_manager.sh" stop all || true run_playbook down.yml diff --git a/scripts/common.sh b/scripts/common.sh index 7dc4901..33b68c8 100644 --- a/scripts/common.sh +++ b/scripts/common.sh @@ -128,7 +128,7 @@ service_command() { "$COURSEWARE_CHUNKVIZ_PORT" ;; embedding-atlas) - printf 'exec "%s/bin/embedding-atlas" "%s" --text "Scenario" --host %s --port %s' \ + printf 'exec "%s/bin/embedding-atlas" "%s" --text "Scenario" --host %s --port %s --no-auto-port' \ "$EMBEDDING_ATLAS_VENV" \ "$TTPS_DATASET_PATH" \ "$COURSEWARE_BIND_HOST" \ diff --git a/scripts/service_manager.sh b/scripts/service_manager.sh index 8851a35..bea7810 100755 --- a/scripts/service_manager.sh +++ b/scripts/service_manager.sh @@ -112,6 +112,18 @@ is_running() { has_live_pid "$service" || service_ready "$service" } +service_startup_attempts() { + case "$1" in + embedding-atlas) + # The first launch can be noticeably slower on cold environments. + printf '%s\n' 180 + ;; + *) + printf '%s\n' 60 + ;; + esac +} + service_ready() { local service=$1 @@ -142,6 +154,22 @@ service_listener_pids() { | sort -u } +service_port_has_listener() { + local service=$1 + local port + + port=$(service_port "$service") || return 1 + ss -ltnH "( sport = :$port )" 2>/dev/null | grep -q . +} + +service_listener_details() { + local service=$1 + local port + + port=$(service_port "$service") || return 0 + ss -ltnp "( sport = :$port )" 2>/dev/null || true +} + kill_pid_tree() { local signal=$1 local pid=$2 @@ -177,6 +205,7 @@ start_one() { local pid_file local attempt local pid_grace_attempts=5 + local startup_attempts if [ "$service" = "ollama" ] || [ "$service" = "wiki" ]; then assert_ollama_logprobs_support @@ -204,6 +233,7 @@ start_one() { esac cmd=$(service_command "$service") + startup_attempts=$(service_startup_attempts "$service") log_file=$(service_log_file "$service") pid_file=$(service_pid_file "$service") @@ -219,7 +249,7 @@ start_one() { fi echo $! >"$pid_file" - for attempt in $(seq 1 60); do + for attempt in $(seq 1 "$startup_attempts"); do if service_ready "$service"; then echo "started $service" return 0 @@ -286,6 +316,28 @@ stop_one() { exit 1 } +restart_managed_wiki() { + local wiki_log_file + wiki_log_file=$(service_log_file wiki) + + if has_live_pid wiki; then + stop_one wiki + fi + + if service_port_has_listener wiki; then + cat <&2 +Cannot restart wiki because port $(service_port wiki) is already in use by a non-managed listener. +Listener details: +$(service_listener_details wiki) +Leave that process alone or move it off port $(service_port wiki), then rerun ./labctl update_wiki. +Wiki log: $wiki_log_file +EOF + exit 1 + fi + + start_one wiki +} + status_one() { local service=$1 @@ -388,6 +440,9 @@ main() { fi show_logs "$1" ;; + restart-wiki) + restart_managed_wiki + ;; *) echo "Unknown command: $cmd" >&2 exit 1