Compare commits
4 Commits
7360cd040a
...
2080_super
| Author | SHA1 | Date | |
|---|---|---|---|
| 86a5df4681 | |||
| e95ee9c938 | |||
| fe568c17cd | |||
| e915d87ec6 |
@@ -23,15 +23,16 @@ This project builds a student-friendly local lab environment for the courseware
|
|||||||
|
|
||||||
Lab 1 is now provisioned directly by the installer:
|
Lab 1 is now provisioned directly by the installer:
|
||||||
|
|
||||||
- The `Qwen3-0.6B-Q8_0.gguf` and `Llama-3.2-1B.Q4_K_M.gguf` files are mirrored into `state/models/lab1/`.
|
- The `Llama-3.2-1B.Q4_K_M.gguf` file is mirrored into `state/models/lab1/`.
|
||||||
- The Qwen GGUF is pre-registered in Ollama as `lab1-qwen3-0.6b-q8_0`.
|
- The Lab 1 confidence widget uses the pre-pulled Gemma 4 E2B Q4 Ollama model, `batiai/gemma4-e2b:q4`.
|
||||||
- The wiki serves same-host download links for both GGUFs through `/api/lab1/models/...`.
|
- The wiki serves a same-host download link for the Llama GGUF through `/api/lab1/models/...`.
|
||||||
- Lab 1 confidence visualization requires Ollama `0.12.11` or newer because it depends on logprobs.
|
- Lab 1 confidence visualization requires Ollama `0.12.11` or newer because it depends on logprobs.
|
||||||
|
|
||||||
## Lab 2 Defaults
|
## Lab 2 Defaults
|
||||||
|
|
||||||
`./labctl up` now pre-pulls the Lab 2 Gemma 4 E2B Ollama variants used by the wiki widget:
|
`./labctl up` now pre-pulls the Gemma 4 E2B Ollama variants used by the wiki widgets:
|
||||||
|
|
||||||
|
- `gemma4:e2b-it-q8_0`
|
||||||
- `batiai/gemma4-e2b:q4`
|
- `batiai/gemma4-e2b:q4`
|
||||||
- `batiai/gemma4-e2b:q6`
|
- `batiai/gemma4-e2b:q6`
|
||||||
|
|
||||||
@@ -39,15 +40,13 @@ If you want to re-pull just those managed Ollama models later, run `./labctl oll
|
|||||||
|
|
||||||
## Supported Host Profiles
|
## Supported Host Profiles
|
||||||
|
|
||||||
This build intentionally avoids the reference VM's hardware workarounds.
|
This build is the Linux/WSL variant of LLM Labs Local. If you are deploying on Apple Silicon macOS, use the sibling `LLM-Labs-Local-Mac` project instead.
|
||||||
|
|
||||||
- macOS: Apple Silicon only, with at least 16 GB unified memory.
|
|
||||||
- Native Debian/Ubuntu: Debian-family Linux with an NVIDIA GPU visible to `nvidia-smi` and at least 8 GB VRAM.
|
- Native Debian/Ubuntu: Debian-family Linux with an NVIDIA GPU visible to `nvidia-smi` and at least 8 GB VRAM.
|
||||||
- WSL: Debian/Ubuntu-family Linux running under WSL, with the NVIDIA GPU exposed into the distro.
|
- WSL: Debian/Ubuntu-family Linux running under WSL, with the NVIDIA GPU exposed into the distro.
|
||||||
|
|
||||||
The launcher and Ansible preflight classify the host dynamically and apply different setup behavior for:
|
The launcher and Ansible preflight classify the host dynamically and apply different setup behavior for:
|
||||||
|
|
||||||
- `macos`
|
|
||||||
- `native-debian-ubuntu`
|
- `native-debian-ubuntu`
|
||||||
- `wsl`
|
- `wsl`
|
||||||
|
|
||||||
@@ -97,12 +96,12 @@ If CUDA is already mounted or preinstalled outside `PATH`, the installer detects
|
|||||||
|
|
||||||
- The default deployment is centered on Ollama-backed local inference and browser-based tools such as Netron and the wiki.
|
- The default deployment is centered on Ollama-backed local inference and browser-based tools such as Netron and the wiki.
|
||||||
- Netron is installed into a managed Python virtual environment and served locally instead of being provisioned as a desktop package.
|
- Netron is installed into a managed Python virtual environment and served locally instead of being provisioned as a desktop package.
|
||||||
- Lab 1 model downloads are mirrored locally during `./labctl up`, so students do not have to fetch them manually from the original source.
|
- Lab 1's Llama GGUF download is mirrored locally during `./labctl up`, so students do not have to fetch it manually from the original source.
|
||||||
- WhiteRabbitNeo assets remain a separate Lab 2 flow and are still handled outside the default `./labctl up` run.
|
- WhiteRabbitNeo assets remain a separate Lab 2 flow and are still handled outside the default `./labctl up` run.
|
||||||
- Run `./labctl assets lab2` when you want to populate repo-local Lab 2 assets in `assets/lab2/` from Hugging Face.
|
- Run `./labctl assets lab2` when you want to populate repo-local Lab 2 assets in `assets/lab2/` from Hugging Face.
|
||||||
- After base setup, run `state/lab2/download_whiterabbitneo-gguf.sh` to fetch only the `Q4_K_M`, `Q8_0`, and `IQ2_M` files from `bartowski/WhiteRabbitNeo_WhiteRabbitNeo-V3-7B-GGUF` and register local Ollama models `WhiteRabbitNeo`, `WhiteRabbitNeo-Q4`, `WhiteRabbitNeo-Q8`, and `WhiteRabbitNeo-IQ2`.
|
- After base setup, run `state/lab2/download_whiterabbitneo-gguf.sh` to fetch only the `Q4_K_M`, `Q8_0`, and `IQ2_M` files from `bartowski/WhiteRabbitNeo_WhiteRabbitNeo-V3-7B-GGUF` and register local Ollama models `WhiteRabbitNeo`, `WhiteRabbitNeo-Q4`, `WhiteRabbitNeo-Q8`, and `WhiteRabbitNeo-IQ2`.
|
||||||
- Unsloth homes are redirected into this project's `state/` tree via symlinks.
|
- Unsloth homes are redirected into this project's `state/` tree via symlinks.
|
||||||
- Managed web services bind for access from both Linux and the Windows side of WSL, while `labctl urls` still reports localhost-friendly URLs.
|
- Managed web services bind on all interfaces for headless LAN/VPN access. `labctl urls` reports the detected LAN IP by default; set `COURSEWARE_URL_HOST=<host-or-ip>` before `./labctl up` to advertise a specific VPN DNS name or address.
|
||||||
- The local Ansible bootstrap in `.venv-ansible/` is machine-specific and will be recreated automatically if the folder is copied between hosts.
|
- The local Ansible bootstrap in `.venv-ansible/` is machine-specific and will be recreated automatically if the folder is copied between hosts.
|
||||||
- `llama.cpp` uses a conservative, memory-aware build parallelism setting instead of an unbounded `-j` build, which avoids OOM failures on smaller Linux and WSL hosts.
|
- `llama.cpp` uses a conservative, memory-aware build parallelism setting instead of an unbounded `-j` build, which avoids OOM failures on smaller Linux and WSL hosts.
|
||||||
|
|
||||||
@@ -110,25 +109,25 @@ If CUDA is already mounted or preinstalled outside `PATH`, the installer detects
|
|||||||
|
|
||||||
After `./deploy-courseware.sh`, run `./labctl urls`.
|
After `./deploy-courseware.sh`, run `./labctl urls`.
|
||||||
|
|
||||||
Default endpoints:
|
Default endpoints use the detected host LAN IP:
|
||||||
|
|
||||||
- Ollama API: `http://127.0.0.1:11434`
|
- Ollama API: `http://<host-lan-ip>:11434`
|
||||||
- Open WebUI: `http://127.0.0.1:8080`
|
- Open WebUI: `http://<host-lan-ip>:8080`
|
||||||
- Netron: `http://127.0.0.1:8338`
|
- Netron: `http://<host-lan-ip>:8338`
|
||||||
- ChunkViz: `http://127.0.0.1:3001`
|
- ChunkViz: `http://<host-lan-ip>:3001`
|
||||||
- Embedding Atlas: `http://127.0.0.1:5055`
|
- Embedding Atlas: `http://<host-lan-ip>:5055`
|
||||||
- Unsloth Studio: `http://127.0.0.1:8888`
|
- Unsloth Studio: `http://<host-lan-ip>:8888`
|
||||||
- Promptfoo UI: `http://127.0.0.1:15500`
|
- Promptfoo UI: `http://<host-lan-ip>:15500`
|
||||||
- Wiki: `http://127.0.0.1:80`
|
- Wiki: `http://<host-lan-ip>:80`
|
||||||
- Lab 3 Terminal: `http://127.0.0.1:7681/wetty`
|
- Lab 3 Terminal: `http://<host-lan-ip>:7681/wetty`
|
||||||
|
|
||||||
## Lab 3 Browser Terminal
|
## Lab 3 Browser Terminal
|
||||||
|
|
||||||
The deployment will:
|
The deployment will:
|
||||||
|
|
||||||
- bind `sshd` to `127.0.0.1:22` only
|
- bind `sshd` to `0.0.0.0:22` so regular SSH clients can connect over the LAN/VPN
|
||||||
- install WeTTY and expose it at `http://127.0.0.1:7681/wetty`
|
- install WeTTY and expose it at `http://<host-lan-ip>:7681/wetty`
|
||||||
- leave login identity management to the host, so any existing local account with password-based SSH access can sign in through the browser terminal
|
- leave login identity management to the host, so any existing local account with password-based SSH access can sign in through SSH or the browser terminal
|
||||||
|
|
||||||
## Notes
|
## Notes
|
||||||
|
|
||||||
@@ -145,4 +144,4 @@ The deployment will:
|
|||||||
- Lab 2 includes `state/lab2/download_whiterabbitneo-gguf.sh`, which uses `git` + `git lfs` to pull only the supported WhiteRabbitNeo quants. Add `--download-only` if you want the files without Ollama registration.
|
- Lab 2 includes `state/lab2/download_whiterabbitneo-gguf.sh`, which uses `git` + `git lfs` to pull only the supported WhiteRabbitNeo quants. Add `--download-only` if you want the files without Ollama registration.
|
||||||
- The wiki is cloned from `https://git.zuccaro.me/bzuccaro/LLM-Labs.git` into `state/repos/LLM-Labs` and started with `npm`.
|
- The wiki is cloned from `https://git.zuccaro.me/bzuccaro/LLM-Labs.git` into `state/repos/LLM-Labs` and started with `npm`.
|
||||||
- `./labctl down` uninstalls Ollama entirely when this project installed it, instead of only stopping the service.
|
- `./labctl down` uninstalls Ollama entirely when this project installed it, instead of only stopping the service.
|
||||||
- Unsloth Studio currently supports chat and data workflows on macOS; Linux/WSL remains the standard path for NVIDIA-backed training.
|
- This variant is intended for NVIDIA-backed Linux/WSL training and lab workflows.
|
||||||
|
|||||||
@@ -2,6 +2,8 @@ courseware_state_dir: "{{ courseware_root }}/state"
|
|||||||
courseware_markers_dir: "{{ courseware_state_dir }}/markers"
|
courseware_markers_dir: "{{ courseware_state_dir }}/markers"
|
||||||
courseware_logs_dir: "{{ courseware_state_dir }}/logs"
|
courseware_logs_dir: "{{ courseware_state_dir }}/logs"
|
||||||
courseware_run_dir: "{{ courseware_state_dir }}/run"
|
courseware_run_dir: "{{ courseware_state_dir }}/run"
|
||||||
|
courseware_cache_dir: "{{ courseware_state_dir }}/cache"
|
||||||
|
courseware_tmp_dir: "{{ courseware_state_dir }}/tmp"
|
||||||
courseware_repos_dir: "{{ courseware_state_dir }}/repos"
|
courseware_repos_dir: "{{ courseware_state_dir }}/repos"
|
||||||
courseware_venvs_dir: "{{ courseware_state_dir }}/venvs"
|
courseware_venvs_dir: "{{ courseware_state_dir }}/venvs"
|
||||||
courseware_models_dir: "{{ courseware_state_dir }}/models"
|
courseware_models_dir: "{{ courseware_state_dir }}/models"
|
||||||
@@ -17,6 +19,10 @@ courseware_lab1_models_dir: "{{ courseware_models_dir }}/lab1"
|
|||||||
courseware_ollama_models_dir: "{{ courseware_models_dir }}/ollama"
|
courseware_ollama_models_dir: "{{ courseware_models_dir }}/ollama"
|
||||||
courseware_node_runtime_dir: "{{ courseware_tools_dir }}/node-runtime"
|
courseware_node_runtime_dir: "{{ courseware_tools_dir }}/node-runtime"
|
||||||
courseware_node_runtime_bin_dir: "{{ courseware_node_runtime_dir }}/node_modules/node/bin"
|
courseware_node_runtime_bin_dir: "{{ courseware_node_runtime_dir }}/node_modules/node/bin"
|
||||||
|
courseware_uv_dir: "{{ courseware_tools_dir }}/uv"
|
||||||
|
courseware_uv_bin: "{{ courseware_uv_dir }}/bin/uv"
|
||||||
|
courseware_uv_cache_dir: "{{ courseware_cache_dir }}/uv"
|
||||||
|
courseware_python_runtime_dir: "{{ courseware_tools_dir }}/python"
|
||||||
courseware_netron_venv_dir: "{{ courseware_venvs_dir }}/netron"
|
courseware_netron_venv_dir: "{{ courseware_venvs_dir }}/netron"
|
||||||
courseware_wetty_dir: "{{ courseware_tools_dir }}/wetty"
|
courseware_wetty_dir: "{{ courseware_tools_dir }}/wetty"
|
||||||
courseware_promptfoo_dir: "{{ courseware_lab6_dir }}"
|
courseware_promptfoo_dir: "{{ courseware_lab6_dir }}"
|
||||||
@@ -25,7 +31,15 @@ courseware_wiki_runtime_config_path: "{{ courseware_wiki_repo_dir }}/public/cour
|
|||||||
courseware_llama_cpp_bin_dir: "{{ courseware_repos_dir }}/llama.cpp/build/bin"
|
courseware_llama_cpp_bin_dir: "{{ courseware_repos_dir }}/llama.cpp/build/bin"
|
||||||
|
|
||||||
courseware_bind_host: "0.0.0.0"
|
courseware_bind_host: "0.0.0.0"
|
||||||
courseware_url_host: "127.0.0.1"
|
courseware_url_host: >-
|
||||||
|
{{
|
||||||
|
(lookup('env', 'COURSEWARE_URL_HOST') | trim)
|
||||||
|
if (lookup('env', 'COURSEWARE_URL_HOST') | trim | length) > 0
|
||||||
|
else (
|
||||||
|
ansible_default_ipv4.address
|
||||||
|
| default(ansible_all_ipv4_addresses | default(['127.0.0.1']) | first)
|
||||||
|
)
|
||||||
|
}}
|
||||||
courseware_ports:
|
courseware_ports:
|
||||||
ollama: 11434
|
ollama: 11434
|
||||||
open_webui: 8080
|
open_webui: 8080
|
||||||
@@ -44,16 +58,15 @@ courseware_chunkviz_commit: "a891eacafda1f28a12373ad3b00102e68f07c57f"
|
|||||||
courseware_promptfoo_version: "0.119.0"
|
courseware_promptfoo_version: "0.119.0"
|
||||||
courseware_kiln_release_tag: "v0.18.1"
|
courseware_kiln_release_tag: "v0.18.1"
|
||||||
courseware_node_runtime_version: "20.20.2"
|
courseware_node_runtime_version: "20.20.2"
|
||||||
|
courseware_python_runtime_version: "3.12"
|
||||||
|
courseware_uv_spec: "uv"
|
||||||
courseware_wetty_spec: "wetty@2.5.0"
|
courseware_wetty_spec: "wetty@2.5.0"
|
||||||
courseware_wetty_base_path: "/wetty"
|
courseware_wetty_base_path: "/wetty"
|
||||||
courseware_wiki_repo: "https://git.zuccaro.me/bzuccaro/LLM-Labs.git"
|
courseware_wiki_repo: "https://git.zuccaro.me/bzuccaro/LLM-Labs.git"
|
||||||
|
|
||||||
courseware_open_webui_spec: "open-webui"
|
courseware_open_webui_spec: "open-webui"
|
||||||
courseware_embedding_atlas_spec: "embedding-atlas"
|
courseware_embedding_atlas_spec: "embedding-atlas"
|
||||||
courseware_lab1_qwen_filename: "Qwen3-0.6B-Q8_0.gguf"
|
courseware_lab1_ollama_model_alias: "batiai/gemma4-e2b:q4"
|
||||||
courseware_lab1_qwen_download_url: "https://huggingface.co/Qwen/Qwen3-0.6B-GGUF/resolve/main/Qwen3-0.6B-Q8_0.gguf?download=true"
|
|
||||||
courseware_lab1_qwen_local_path: "{{ courseware_lab1_models_dir }}/{{ courseware_lab1_qwen_filename }}"
|
|
||||||
courseware_lab1_qwen_model_alias: "lab1-qwen3-0.6b-q8_0"
|
|
||||||
courseware_lab1_llama_filename: "Llama-3.2-1B.Q4_K_M.gguf"
|
courseware_lab1_llama_filename: "Llama-3.2-1B.Q4_K_M.gguf"
|
||||||
courseware_lab1_llama_download_url: "https://huggingface.co/DevQuasar-3/meta-llama.Llama-3.2-1B-GGUF/resolve/main/Llama-3.2-1B.Q4_K_M.gguf?download=true"
|
courseware_lab1_llama_download_url: "https://huggingface.co/DevQuasar-3/meta-llama.Llama-3.2-1B-GGUF/resolve/main/Llama-3.2-1B.Q4_K_M.gguf?download=true"
|
||||||
courseware_lab1_llama_local_path: "{{ courseware_lab1_models_dir }}/{{ courseware_lab1_llama_filename }}"
|
courseware_lab1_llama_local_path: "{{ courseware_lab1_models_dir }}/{{ courseware_lab1_llama_filename }}"
|
||||||
@@ -74,6 +87,8 @@ courseware_white_rabbit_variants:
|
|||||||
quant: "IQ2_M"
|
quant: "IQ2_M"
|
||||||
filename: "WhiteRabbitNeo_WhiteRabbitNeo-V3-7B-IQ2_M.gguf"
|
filename: "WhiteRabbitNeo_WhiteRabbitNeo-V3-7B-IQ2_M.gguf"
|
||||||
courseware_lab2_ollama_models:
|
courseware_lab2_ollama_models:
|
||||||
|
- label: "Gemma 4 E2B IT Q8"
|
||||||
|
value: "gemma4:e2b-it-q8_0"
|
||||||
- label: "Gemma 4 E2B Q4"
|
- label: "Gemma 4 E2B Q4"
|
||||||
value: "batiai/gemma4-e2b:q4"
|
value: "batiai/gemma4-e2b:q4"
|
||||||
- label: "Gemma 4 E2B Q6"
|
- label: "Gemma 4 E2B Q6"
|
||||||
|
|||||||
@@ -140,22 +140,6 @@
|
|||||||
- courseware_down_ollama_marker.stat.exists
|
- courseware_down_ollama_marker.stat.exists
|
||||||
failed_when: false
|
failed_when: false
|
||||||
|
|
||||||
- name: Stop courseware-managed Ollama macOS app if running
|
|
||||||
command: pkill -x Ollama
|
|
||||||
when:
|
|
||||||
- ansible_system == "Darwin"
|
|
||||||
- courseware_down_ollama_marker.stat.exists
|
|
||||||
changed_when: false
|
|
||||||
failed_when: false
|
|
||||||
|
|
||||||
- name: Uninstall courseware-managed Ollama Homebrew formula
|
|
||||||
command: brew uninstall ollama
|
|
||||||
when:
|
|
||||||
- ansible_system == "Darwin"
|
|
||||||
- courseware_down_ollama_marker.stat.exists
|
|
||||||
changed_when: false
|
|
||||||
failed_when: false
|
|
||||||
|
|
||||||
- name: Remove managed Unsloth path
|
- name: Remove managed Unsloth path
|
||||||
file:
|
file:
|
||||||
path: "{{ ansible_env.HOME }}/.unsloth"
|
path: "{{ ansible_env.HOME }}/.unsloth"
|
||||||
|
|||||||
@@ -6,6 +6,7 @@
|
|||||||
- { role: preflight, tags: ["preflight"] }
|
- { role: preflight, tags: ["preflight"] }
|
||||||
- directories
|
- directories
|
||||||
- packages
|
- packages
|
||||||
|
- python_runtime
|
||||||
- netron
|
- netron
|
||||||
- lab1_assets
|
- lab1_assets
|
||||||
- lab_assets
|
- lab_assets
|
||||||
|
|||||||
@@ -12,10 +12,3 @@ common_packages_debian:
|
|||||||
- ninja-build
|
- ninja-build
|
||||||
- libssl-dev
|
- libssl-dev
|
||||||
- pkg-config
|
- pkg-config
|
||||||
|
|
||||||
common_packages_macos:
|
|
||||||
- python3
|
|
||||||
- git
|
|
||||||
- curl
|
|
||||||
- cmake
|
|
||||||
- ninja
|
|
||||||
|
|||||||
@@ -20,17 +20,6 @@
|
|||||||
when: ansible_os_family == "Debian"
|
when: ansible_os_family == "Debian"
|
||||||
become: yes
|
become: yes
|
||||||
|
|
||||||
- name: Ensure Homebrew is installed (macOS)
|
|
||||||
ansible.builtin.homebrew:
|
|
||||||
name:
|
|
||||||
- python3
|
|
||||||
- git
|
|
||||||
- curl
|
|
||||||
- cmake
|
|
||||||
- ninja
|
|
||||||
state: present
|
|
||||||
when: ansible_os_family == "Darwin"
|
|
||||||
|
|
||||||
- name: Install Python virtual environment module (user space)
|
- name: Install Python virtual environment module (user space)
|
||||||
ansible.builtin.pip:
|
ansible.builtin.pip:
|
||||||
name: virtualenv
|
name: virtualenv
|
||||||
|
|||||||
@@ -8,6 +8,9 @@
|
|||||||
- "{{ courseware_markers_dir }}"
|
- "{{ courseware_markers_dir }}"
|
||||||
- "{{ courseware_logs_dir }}"
|
- "{{ courseware_logs_dir }}"
|
||||||
- "{{ courseware_run_dir }}"
|
- "{{ courseware_run_dir }}"
|
||||||
|
- "{{ courseware_cache_dir }}"
|
||||||
|
- "{{ courseware_tmp_dir }}"
|
||||||
|
- "{{ courseware_uv_cache_dir }}"
|
||||||
- "{{ courseware_repos_dir }}"
|
- "{{ courseware_repos_dir }}"
|
||||||
- "{{ courseware_venvs_dir }}"
|
- "{{ courseware_venvs_dir }}"
|
||||||
- "{{ courseware_models_dir }}"
|
- "{{ courseware_models_dir }}"
|
||||||
|
|||||||
@@ -1,19 +0,0 @@
|
|||||||
- name: Download Kiln macOS disk image
|
|
||||||
get_url:
|
|
||||||
url: "https://github.com/Kiln-AI/Kiln/releases/download/{{ courseware_kiln_release_tag }}/Kiln.MacOS.AppleSilicon.M-Processor.dmg"
|
|
||||||
dest: "{{ courseware_downloads_dir }}/Kiln.MacOS.AppleSilicon.M-Processor.dmg"
|
|
||||||
mode: "0644"
|
|
||||||
|
|
||||||
- name: Install Kiln.app into project state
|
|
||||||
shell: |
|
|
||||||
set -euo pipefail
|
|
||||||
mount_point=$(mktemp -d /tmp/kiln.XXXXXX)
|
|
||||||
hdiutil attach "{{ courseware_downloads_dir }}/Kiln.MacOS.AppleSilicon.M-Processor.dmg" -mountpoint "$mount_point" -nobrowse -quiet
|
|
||||||
app_path=$(find "$mount_point" -maxdepth 1 -name '*.app' | head -n 1)
|
|
||||||
rm -rf "{{ courseware_apps_dir }}/Kiln.app"
|
|
||||||
cp -R "$app_path" "{{ courseware_apps_dir }}/Kiln.app"
|
|
||||||
hdiutil detach "$mount_point" -quiet
|
|
||||||
rmdir "$mount_point"
|
|
||||||
args:
|
|
||||||
executable: /bin/bash
|
|
||||||
creates: "{{ courseware_apps_dir }}/Kiln.app"
|
|
||||||
@@ -1,8 +1,3 @@
|
|||||||
- name: Install Kiln on Linux
|
- name: Install Kiln on Linux
|
||||||
include_tasks: linux.yml
|
include_tasks: linux.yml
|
||||||
when: ansible_system == "Linux"
|
when: ansible_system == "Linux"
|
||||||
|
|
||||||
- name: Install Kiln on macOS
|
|
||||||
include_tasks: macos.yml
|
|
||||||
when: ansible_system == "Darwin"
|
|
||||||
|
|
||||||
|
|||||||
@@ -31,41 +31,8 @@
|
|||||||
- courseware_lab1_ollama_semver | length == 0
|
- courseware_lab1_ollama_semver | length == 0
|
||||||
or not (courseware_lab1_ollama_semver is version(courseware_ollama_min_version, '>='))
|
or not (courseware_lab1_ollama_semver is version(courseware_ollama_min_version, '>='))
|
||||||
|
|
||||||
- name: Download mirrored Lab 1 Qwen model
|
|
||||||
get_url:
|
|
||||||
url: "{{ courseware_lab1_qwen_download_url }}"
|
|
||||||
dest: "{{ courseware_lab1_qwen_local_path }}"
|
|
||||||
mode: "0644"
|
|
||||||
|
|
||||||
- name: Download mirrored Lab 1 Llama model
|
- name: Download mirrored Lab 1 Llama model
|
||||||
get_url:
|
get_url:
|
||||||
url: "{{ courseware_lab1_llama_download_url }}"
|
url: "{{ courseware_lab1_llama_download_url }}"
|
||||||
dest: "{{ courseware_lab1_llama_local_path }}"
|
dest: "{{ courseware_lab1_llama_local_path }}"
|
||||||
mode: "0644"
|
mode: "0644"
|
||||||
|
|
||||||
- name: Write Lab 1 Ollama Modelfile
|
|
||||||
copy:
|
|
||||||
dest: "{{ courseware_lab1_dir }}/Modelfile.{{ courseware_lab1_qwen_model_alias }}"
|
|
||||||
mode: "0644"
|
|
||||||
content: |
|
|
||||||
FROM {{ courseware_lab1_qwen_local_path }}
|
|
||||||
|
|
||||||
- name: Start Ollama before Lab 1 model registration
|
|
||||||
command:
|
|
||||||
argv:
|
|
||||||
- "{{ courseware_root }}/scripts/service_manager.sh"
|
|
||||||
- start
|
|
||||||
- ollama
|
|
||||||
changed_when: false
|
|
||||||
|
|
||||||
- name: Register Lab 1 Qwen model with Ollama
|
|
||||||
command:
|
|
||||||
argv:
|
|
||||||
- "{{ courseware_ollama_bin }}"
|
|
||||||
- create
|
|
||||||
- "{{ courseware_lab1_qwen_model_alias }}"
|
|
||||||
- -f
|
|
||||||
- "{{ courseware_lab1_dir }}/Modelfile.{{ courseware_lab1_qwen_model_alias }}"
|
|
||||||
environment:
|
|
||||||
OLLAMA_HOST: "{{ courseware_bind_host }}:{{ courseware_ports.ollama }}"
|
|
||||||
OLLAMA_MODELS: "{{ courseware_ollama_models_dir }}"
|
|
||||||
|
|||||||
@@ -23,18 +23,6 @@
|
|||||||
gpu_type: "{{ 'nvidia' if nvidia_smi_output.rc == 0 else 'none' }}"
|
gpu_type: "{{ 'nvidia' if nvidia_smi_output.rc == 0 else 'none' }}"
|
||||||
when: is_wsl | default(false) or ansible_os_family == "Debian"
|
when: is_wsl | default(false) or ansible_os_family == "Debian"
|
||||||
|
|
||||||
- name: Check for Metal GPU on macOS
|
|
||||||
ansible.builtin.command: system_profiler SPDisplaysDataType
|
|
||||||
register: metal_check
|
|
||||||
changed_when: false
|
|
||||||
failed_when: false
|
|
||||||
when: ansible_os_family == "Darwin"
|
|
||||||
|
|
||||||
- name: Set GPU type for macOS
|
|
||||||
ansible.builtin.set_fact:
|
|
||||||
gpu_type: "metal"
|
|
||||||
when: ansible_os_family == "Darwin" and metal_check.rc == 0
|
|
||||||
|
|
||||||
- name: Display detected GPU type
|
- name: Display detected GPU type
|
||||||
ansible.builtin.debug:
|
ansible.builtin.debug:
|
||||||
msg: "llama.cpp GPU type: {{ gpu_type | default('none') }}"
|
msg: "llama.cpp GPU type: {{ gpu_type | default('none') }}"
|
||||||
@@ -58,7 +46,6 @@
|
|||||||
{{
|
{{
|
||||||
not llama_cpp_stat.stat.exists or
|
not llama_cpp_stat.stat.exists or
|
||||||
(gpu_type == 'nvidia' and existing_gpu_check.stdout != 'cuda') or
|
(gpu_type == 'nvidia' and existing_gpu_check.stdout != 'cuda') or
|
||||||
(gpu_type == 'metal' and existing_gpu_check.stdout != 'metal') or
|
|
||||||
(gpu_type == 'amd' and existing_gpu_check.stdout != 'amd')
|
(gpu_type == 'amd' and existing_gpu_check.stdout != 'amd')
|
||||||
}}
|
}}
|
||||||
|
|
||||||
@@ -120,19 +107,6 @@
|
|||||||
when: gpu_type == 'amd' and cmake_configured.rc != 0
|
when: gpu_type == 'amd' and cmake_configured.rc != 0
|
||||||
become: no
|
become: no
|
||||||
|
|
||||||
- name: Configure llama.cpp for Metal (macOS)
|
|
||||||
ansible.builtin.command:
|
|
||||||
argv:
|
|
||||||
- cmake
|
|
||||||
- ..
|
|
||||||
- -G Ninja
|
|
||||||
- -DCMAKE_BUILD_TYPE=Release
|
|
||||||
- -DGGML_METAL=on
|
|
||||||
args:
|
|
||||||
chdir: "{{ llmlab_base }}/lab2/llama.cpp/build"
|
|
||||||
when: gpu_type == 'metal' and cmake_configured.rc != 0
|
|
||||||
become: no
|
|
||||||
|
|
||||||
- name: Configure llama.cpp for CPU only
|
- name: Configure llama.cpp for CPU only
|
||||||
ansible.builtin.command:
|
ansible.builtin.command:
|
||||||
argv:
|
argv:
|
||||||
|
|||||||
@@ -78,7 +78,7 @@
|
|||||||
|
|
||||||
- name: Set llama.cpp backend flag
|
- name: Set llama.cpp backend flag
|
||||||
set_fact:
|
set_fact:
|
||||||
courseware_llama_backend_flag: "{{ '-DGGML_METAL=ON' if ansible_system == 'Darwin' else '-DGGML_CUDA=ON' }}"
|
courseware_llama_backend_flag: "-DGGML_CUDA=ON"
|
||||||
|
|
||||||
- name: Set llama.cpp build parallelism
|
- name: Set llama.cpp build parallelism
|
||||||
set_fact:
|
set_fact:
|
||||||
|
|||||||
@@ -16,14 +16,6 @@
|
|||||||
become: yes
|
become: yes
|
||||||
notify: Start Ollama service
|
notify: Start Ollama service
|
||||||
|
|
||||||
- name: Install Ollama (macOS via Homebrew)
|
|
||||||
ansible.builtin.homebrew:
|
|
||||||
name: ollama
|
|
||||||
state: present
|
|
||||||
when:
|
|
||||||
- ansible_os_family == "Darwin"
|
|
||||||
- ollama_version_check.rc != 0
|
|
||||||
|
|
||||||
- name: Check if Ollama service exists
|
- name: Check if Ollama service exists
|
||||||
ansible.builtin.command: systemctl list-unit-files ollama.service
|
ansible.builtin.command: systemctl list-unit-files ollama.service
|
||||||
register: ollama_service_check
|
register: ollama_service_check
|
||||||
|
|||||||
@@ -4,6 +4,24 @@
|
|||||||
state: directory
|
state: directory
|
||||||
mode: "0755"
|
mode: "0755"
|
||||||
|
|
||||||
|
- name: Check Open WebUI virtual environment Python version
|
||||||
|
command:
|
||||||
|
argv:
|
||||||
|
- "{{ courseware_venvs_dir }}/open-webui/bin/python"
|
||||||
|
- -c
|
||||||
|
- "import sys; print(f'{sys.version_info.major}.{sys.version_info.minor}')"
|
||||||
|
register: courseware_open_webui_venv_python_version
|
||||||
|
changed_when: false
|
||||||
|
failed_when: false
|
||||||
|
|
||||||
|
- name: Remove Open WebUI virtual environment with incompatible Python
|
||||||
|
file:
|
||||||
|
path: "{{ courseware_venvs_dir }}/open-webui"
|
||||||
|
state: absent
|
||||||
|
when:
|
||||||
|
- courseware_open_webui_venv_python_version.rc == 0
|
||||||
|
- courseware_open_webui_venv_python_version.stdout != courseware_python_runtime_version
|
||||||
|
|
||||||
- name: Create Open WebUI virtual environment
|
- name: Create Open WebUI virtual environment
|
||||||
command:
|
command:
|
||||||
argv:
|
argv:
|
||||||
@@ -36,6 +54,24 @@
|
|||||||
- "{{ courseware_open_webui_spec }}"
|
- "{{ courseware_open_webui_spec }}"
|
||||||
- "numpy<2"
|
- "numpy<2"
|
||||||
|
|
||||||
|
- name: Check Embedding Atlas virtual environment Python version
|
||||||
|
command:
|
||||||
|
argv:
|
||||||
|
- "{{ courseware_venvs_dir }}/embedding-atlas/bin/python"
|
||||||
|
- -c
|
||||||
|
- "import sys; print(f'{sys.version_info.major}.{sys.version_info.minor}')"
|
||||||
|
register: courseware_embedding_atlas_venv_python_version
|
||||||
|
changed_when: false
|
||||||
|
failed_when: false
|
||||||
|
|
||||||
|
- name: Remove Embedding Atlas virtual environment with incompatible Python
|
||||||
|
file:
|
||||||
|
path: "{{ courseware_venvs_dir }}/embedding-atlas"
|
||||||
|
state: absent
|
||||||
|
when:
|
||||||
|
- courseware_embedding_atlas_venv_python_version.rc == 0
|
||||||
|
- courseware_embedding_atlas_venv_python_version.stdout != courseware_python_runtime_version
|
||||||
|
|
||||||
- name: Create Embedding Atlas virtual environment
|
- name: Create Embedding Atlas virtual environment
|
||||||
command:
|
command:
|
||||||
argv:
|
argv:
|
||||||
|
|||||||
@@ -14,6 +14,7 @@
|
|||||||
- pkg-config
|
- pkg-config
|
||||||
- python3
|
- python3
|
||||||
- python3-pip
|
- python3-pip
|
||||||
|
- python3-setuptools
|
||||||
- python3-venv
|
- python3-venv
|
||||||
- unzip
|
- unzip
|
||||||
- zstd
|
- zstd
|
||||||
|
|||||||
@@ -1,29 +0,0 @@
|
|||||||
- name: Check installed Homebrew formulas
|
|
||||||
command: "brew list --versions {{ item }}"
|
|
||||||
loop:
|
|
||||||
- git
|
|
||||||
- git-lfs
|
|
||||||
- cmake
|
|
||||||
- node
|
|
||||||
- python@3.11
|
|
||||||
- ollama
|
|
||||||
register: courseware_brew_checks
|
|
||||||
changed_when: false
|
|
||||||
failed_when: false
|
|
||||||
|
|
||||||
- name: Install missing Homebrew formulas
|
|
||||||
command: "brew install {{ item.item }}"
|
|
||||||
loop: "{{ courseware_brew_checks.results }}"
|
|
||||||
when: item.rc != 0
|
|
||||||
|
|
||||||
- name: Mark Ollama as installed by courseware on macOS
|
|
||||||
file:
|
|
||||||
path: "{{ courseware_ollama_install_marker }}"
|
|
||||||
state: touch
|
|
||||||
mode: "0644"
|
|
||||||
when:
|
|
||||||
- courseware_brew_checks.results
|
|
||||||
| selectattr('item', 'equalto', 'ollama')
|
|
||||||
| selectattr('rc', 'ne', 0)
|
|
||||||
| list
|
|
||||||
| length > 0
|
|
||||||
@@ -1,8 +1,3 @@
|
|||||||
- name: Install macOS prerequisites
|
|
||||||
include_tasks: macos.yml
|
|
||||||
when: ansible_system == "Darwin"
|
|
||||||
|
|
||||||
- name: Install Linux prerequisites
|
- name: Install Linux prerequisites
|
||||||
include_tasks: linux.yml
|
include_tasks: linux.yml
|
||||||
when: ansible_system == "Linux"
|
when: ansible_system == "Linux"
|
||||||
|
|
||||||
|
|||||||
@@ -1,56 +1,14 @@
|
|||||||
- name: Classify supported host profile
|
- name: Classify supported host profile
|
||||||
set_fact:
|
set_fact:
|
||||||
courseware_is_macos: "{{ ansible_system == 'Darwin' }}"
|
|
||||||
courseware_is_linux: "{{ ansible_system == 'Linux' }}"
|
courseware_is_linux: "{{ ansible_system == 'Linux' }}"
|
||||||
courseware_is_wsl: "{{ 'microsoft' in ansible_kernel | lower or 'wsl' in ansible_kernel | lower }}"
|
courseware_is_wsl: "{{ 'microsoft' in ansible_kernel | lower or 'wsl' in ansible_kernel | lower }}"
|
||||||
courseware_is_native_linux: "{{ ansible_system == 'Linux' and not ('microsoft' in ansible_kernel | lower or 'wsl' in ansible_kernel | lower) }}"
|
courseware_is_native_linux: "{{ ansible_system == 'Linux' and not ('microsoft' in ansible_kernel | lower or 'wsl' in ansible_kernel | lower) }}"
|
||||||
courseware_host_profile: "{{ 'macos' if ansible_system == 'Darwin' else ('wsl' if ('microsoft' in ansible_kernel | lower or 'wsl' in ansible_kernel | lower) else ('native-debian-ubuntu' if ansible_system == 'Linux' and ansible_os_family == 'Debian' else 'unsupported')) }}"
|
courseware_host_profile: "{{ 'wsl' if ansible_system == 'Linux' and ('microsoft' in ansible_kernel | lower or 'wsl' in ansible_kernel | lower) else ('native-debian-ubuntu' if ansible_system == 'Linux' and ansible_os_family == 'Debian' else 'unsupported') }}"
|
||||||
|
|
||||||
- name: Fail on unsupported operating systems
|
- name: Fail on unsupported operating systems
|
||||||
fail:
|
fail:
|
||||||
msg: "Supported platforms are Apple Silicon macOS and Debian-family Linux/WSL."
|
msg: "Supported platforms are Debian-family Linux and WSL."
|
||||||
when: ansible_system not in ["Darwin", "Linux"]
|
when: courseware_host_profile == "unsupported"
|
||||||
|
|
||||||
- name: Fail on unsupported macOS architecture
|
|
||||||
fail:
|
|
||||||
msg: "This installer supports Apple Silicon Macs only."
|
|
||||||
when:
|
|
||||||
- ansible_system == "Darwin"
|
|
||||||
- ansible_architecture not in ["arm64", "aarch64"]
|
|
||||||
|
|
||||||
- name: Fail on undersized macOS systems
|
|
||||||
fail:
|
|
||||||
msg: "This courseware assumes a modern Apple Silicon Mac with at least 16 GB of unified memory."
|
|
||||||
when:
|
|
||||||
- ansible_system == "Darwin"
|
|
||||||
- (ansible_memtotal_mb | int) < 16000
|
|
||||||
|
|
||||||
- name: Check for Xcode command line tools
|
|
||||||
command: xcode-select -p
|
|
||||||
register: courseware_xcode_select
|
|
||||||
changed_when: false
|
|
||||||
when: ansible_system == "Darwin"
|
|
||||||
|
|
||||||
- name: Check for Homebrew
|
|
||||||
command: which brew
|
|
||||||
register: courseware_brew_check
|
|
||||||
changed_when: false
|
|
||||||
failed_when: false
|
|
||||||
when: ansible_system == "Darwin"
|
|
||||||
|
|
||||||
- name: Fail when Xcode command line tools are missing
|
|
||||||
fail:
|
|
||||||
msg: "Install Xcode Command Line Tools first with 'xcode-select --install'."
|
|
||||||
when:
|
|
||||||
- ansible_system == "Darwin"
|
|
||||||
- courseware_xcode_select.rc != 0
|
|
||||||
|
|
||||||
- name: Fail when Homebrew is missing
|
|
||||||
fail:
|
|
||||||
msg: "Install Homebrew first from https://brew.sh/."
|
|
||||||
when:
|
|
||||||
- ansible_system == "Darwin"
|
|
||||||
- courseware_brew_check.rc != 0
|
|
||||||
|
|
||||||
- name: Fail on unsupported Linux family
|
- name: Fail on unsupported Linux family
|
||||||
fail:
|
fail:
|
||||||
@@ -330,6 +288,5 @@
|
|||||||
|
|
||||||
- name: Set runtime binary defaults
|
- name: Set runtime binary defaults
|
||||||
set_fact:
|
set_fact:
|
||||||
courseware_python_bin: >-
|
courseware_python_bin: "/usr/bin/python3"
|
||||||
{{ '/opt/homebrew/opt/python@3.11/bin/python3.11' if ansible_system == 'Darwin' else '/usr/bin/python3' }}
|
|
||||||
courseware_ollama_bin: "ollama"
|
courseware_ollama_bin: "ollama"
|
||||||
|
|||||||
@@ -0,0 +1,74 @@
|
|||||||
|
- name: Create contained Python runtime manager virtual environment
|
||||||
|
command:
|
||||||
|
argv:
|
||||||
|
- /usr/bin/python3
|
||||||
|
- -m
|
||||||
|
- venv
|
||||||
|
- "{{ courseware_uv_dir }}"
|
||||||
|
args:
|
||||||
|
creates: "{{ courseware_uv_dir }}/bin/python"
|
||||||
|
|
||||||
|
- name: Upgrade contained Python runtime manager tooling
|
||||||
|
command:
|
||||||
|
argv:
|
||||||
|
- "{{ courseware_uv_dir }}/bin/python"
|
||||||
|
- -m
|
||||||
|
- pip
|
||||||
|
- install
|
||||||
|
- --upgrade
|
||||||
|
- pip
|
||||||
|
- setuptools
|
||||||
|
- wheel
|
||||||
|
|
||||||
|
- name: Install contained Python runtime manager
|
||||||
|
command:
|
||||||
|
argv:
|
||||||
|
- "{{ courseware_uv_dir }}/bin/python"
|
||||||
|
- -m
|
||||||
|
- pip
|
||||||
|
- install
|
||||||
|
- "{{ courseware_uv_spec }}"
|
||||||
|
|
||||||
|
- name: Install managed CPython runtime
|
||||||
|
command:
|
||||||
|
argv:
|
||||||
|
- "{{ courseware_uv_bin }}"
|
||||||
|
- python
|
||||||
|
- install
|
||||||
|
- "{{ courseware_python_runtime_version }}"
|
||||||
|
- --install-dir
|
||||||
|
- "{{ courseware_python_runtime_dir }}"
|
||||||
|
environment:
|
||||||
|
UV_PYTHON_INSTALL_DIR: "{{ courseware_python_runtime_dir }}"
|
||||||
|
UV_CACHE_DIR: "{{ courseware_uv_cache_dir }}"
|
||||||
|
XDG_CACHE_HOME: "{{ courseware_cache_dir }}"
|
||||||
|
TMPDIR: "{{ courseware_tmp_dir }}"
|
||||||
|
register: courseware_python_runtime_install
|
||||||
|
changed_when: "'Installed Python' in courseware_python_runtime_install.stdout"
|
||||||
|
|
||||||
|
- name: Resolve managed CPython runtime
|
||||||
|
command:
|
||||||
|
argv:
|
||||||
|
- "{{ courseware_uv_bin }}"
|
||||||
|
- python
|
||||||
|
- find
|
||||||
|
- "{{ courseware_python_runtime_version }}"
|
||||||
|
environment:
|
||||||
|
UV_PYTHON_INSTALL_DIR: "{{ courseware_python_runtime_dir }}"
|
||||||
|
UV_CACHE_DIR: "{{ courseware_uv_cache_dir }}"
|
||||||
|
XDG_CACHE_HOME: "{{ courseware_cache_dir }}"
|
||||||
|
TMPDIR: "{{ courseware_tmp_dir }}"
|
||||||
|
register: courseware_python_runtime_find
|
||||||
|
changed_when: false
|
||||||
|
|
||||||
|
- name: Set managed Python runtime for courseware venvs
|
||||||
|
set_fact:
|
||||||
|
courseware_python_bin: "{{ courseware_python_runtime_find.stdout | trim }}"
|
||||||
|
|
||||||
|
- name: Verify managed Python runtime version
|
||||||
|
command:
|
||||||
|
argv:
|
||||||
|
- "{{ courseware_python_bin }}"
|
||||||
|
- -c
|
||||||
|
- "import sys; expected=tuple(map(int, '{{ courseware_python_runtime_version }}'.split('.'))); raise SystemExit(0 if sys.version_info[:len(expected)] == expected else 1)"
|
||||||
|
changed_when: false
|
||||||
@@ -46,6 +46,25 @@
|
|||||||
enabled: true
|
enabled: true
|
||||||
when: ansible_service_mgr == "systemd"
|
when: ansible_service_mgr == "systemd"
|
||||||
|
|
||||||
|
- name: Check systemd sshd listener policy
|
||||||
|
become: true
|
||||||
|
command: ss -ltn
|
||||||
|
register: courseware_terminal_systemd_ss_listeners
|
||||||
|
changed_when: false
|
||||||
|
when: ansible_service_mgr == "systemd"
|
||||||
|
|
||||||
|
- name: Restart sshd with systemd when listener policy is not active
|
||||||
|
become: true
|
||||||
|
systemd:
|
||||||
|
name: ssh
|
||||||
|
state: restarted
|
||||||
|
enabled: true
|
||||||
|
when:
|
||||||
|
- ansible_service_mgr == "systemd"
|
||||||
|
- >-
|
||||||
|
'0.0.0.0:22' not in courseware_terminal_systemd_ss_listeners.stdout
|
||||||
|
or '[::]:22' in courseware_terminal_systemd_ss_listeners.stdout
|
||||||
|
|
||||||
- name: Check for running sshd when systemd is unavailable
|
- name: Check for running sshd when systemd is unavailable
|
||||||
become: true
|
become: true
|
||||||
command: pgrep -x sshd
|
command: pgrep -x sshd
|
||||||
@@ -89,19 +108,18 @@
|
|||||||
environment:
|
environment:
|
||||||
PATH: "{{ courseware_node_runtime_bin_dir }}:{{ ansible_env.PATH }}"
|
PATH: "{{ courseware_node_runtime_bin_dir }}:{{ ansible_env.PATH }}"
|
||||||
|
|
||||||
- name: Check loopback sshd listener
|
- name: Check sshd listener
|
||||||
become: true
|
become: true
|
||||||
command: ss -ltn
|
command: ss -ltn
|
||||||
register: courseware_terminal_ss_listeners
|
register: courseware_terminal_ss_listeners
|
||||||
changed_when: false
|
changed_when: false
|
||||||
|
|
||||||
- name: Assert sshd is loopback-only
|
- name: Assert sshd accepts LAN and loopback clients
|
||||||
assert:
|
assert:
|
||||||
that:
|
that:
|
||||||
- "'127.0.0.1:22' in courseware_terminal_ss_listeners.stdout"
|
- "'0.0.0.0:22' in courseware_terminal_ss_listeners.stdout"
|
||||||
- "'0.0.0.0:22' not in courseware_terminal_ss_listeners.stdout"
|
|
||||||
- "'[::]:22' not in courseware_terminal_ss_listeners.stdout"
|
- "'[::]:22' not in courseware_terminal_ss_listeners.stdout"
|
||||||
fail_msg: "sshd must listen only on 127.0.0.1:22 for the browser terminal deployment."
|
fail_msg: "sshd must listen on 0.0.0.0:22 so VPN/LAN SSH clients and local WeTTY can connect."
|
||||||
|
|
||||||
- name: Assert WeTTY binary exists
|
- name: Assert WeTTY binary exists
|
||||||
stat:
|
stat:
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
# Managed by Local Courseware Deployment.
|
# Managed by Local Courseware Deployment.
|
||||||
ListenAddress 127.0.0.1
|
ListenAddress 0.0.0.0
|
||||||
AddressFamily inet
|
AddressFamily inet
|
||||||
PermitRootLogin no
|
PermitRootLogin no
|
||||||
PasswordAuthentication yes
|
PasswordAuthentication yes
|
||||||
|
|||||||
@@ -145,7 +145,7 @@
|
|||||||
|
|
||||||
- name: Determine Miniforge platform suffix
|
- name: Determine Miniforge platform suffix
|
||||||
set_fact:
|
set_fact:
|
||||||
courseware_transformerlab_miniforge_platform: "{{ 'Linux-x86_64' if ansible_system == 'Linux' and ansible_architecture == 'x86_64' else 'Linux-aarch64' if ansible_system == 'Linux' and ansible_architecture in ['aarch64', 'arm64'] else 'MacOSX-arm64' if ansible_system == 'Darwin' and ansible_architecture == 'arm64' else 'MacOSX-x86_64' if ansible_system == 'Darwin' and ansible_architecture == 'x86_64' else 'unsupported' }}"
|
courseware_transformerlab_miniforge_platform: "{{ 'Linux-x86_64' if ansible_system == 'Linux' and ansible_architecture == 'x86_64' else 'Linux-aarch64' if ansible_system == 'Linux' and ansible_architecture in ['aarch64', 'arm64'] else 'unsupported' }}"
|
||||||
|
|
||||||
- name: Fail for unsupported Miniforge platform
|
- name: Fail for unsupported Miniforge platform
|
||||||
fail:
|
fail:
|
||||||
@@ -210,7 +210,7 @@
|
|||||||
elif [ "$accelerator" = "rocm" ]; then
|
elif [ "$accelerator" = "rocm" ]; then
|
||||||
wheel_args+=(--index https://download.pytorch.org/whl/rocm6.4 --index-strategy unsafe-best-match)
|
wheel_args+=(--index https://download.pytorch.org/whl/rocm6.4 --index-strategy unsafe-best-match)
|
||||||
extra="rocm"
|
extra="rocm"
|
||||||
elif [ "{{ ansible_system }}" != "Darwin" ]; then
|
else
|
||||||
wheel_args+=(--index https://download.pytorch.org/whl/cpu --index-strategy unsafe-best-match)
|
wheel_args+=(--index https://download.pytorch.org/whl/cpu --index-strategy unsafe-best-match)
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
|||||||
@@ -17,6 +17,10 @@
|
|||||||
args:
|
args:
|
||||||
executable: /bin/bash
|
executable: /bin/bash
|
||||||
creates: "{{ courseware_unsloth_home }}/.install_complete"
|
creates: "{{ courseware_unsloth_home }}/.install_complete"
|
||||||
|
environment:
|
||||||
|
UV_CACHE_DIR: "{{ courseware_uv_cache_dir }}"
|
||||||
|
XDG_CACHE_HOME: "{{ courseware_cache_dir }}"
|
||||||
|
TMPDIR: "{{ courseware_tmp_dir }}"
|
||||||
rescue:
|
rescue:
|
||||||
- name: Capture Unsloth installer log tail
|
- name: Capture Unsloth installer log tail
|
||||||
shell: |
|
shell: |
|
||||||
@@ -41,3 +45,18 @@
|
|||||||
|
|
||||||
Last log lines:
|
Last log lines:
|
||||||
{{ courseware_unsloth_install_log_tail.stdout | default('(no log output captured)') }}
|
{{ courseware_unsloth_install_log_tail.stdout | default('(no log output captured)') }}
|
||||||
|
|
||||||
|
- name: Install x86_64-compatible NumPy for Unsloth Studio
|
||||||
|
command:
|
||||||
|
argv:
|
||||||
|
- "{{ ansible_env.HOME }}/.unsloth/studio/unsloth_studio/bin/python"
|
||||||
|
- -m
|
||||||
|
- pip
|
||||||
|
- install
|
||||||
|
- "numpy<2"
|
||||||
|
environment:
|
||||||
|
UV_CACHE_DIR: "{{ courseware_uv_cache_dir }}"
|
||||||
|
XDG_CACHE_HOME: "{{ courseware_cache_dir }}"
|
||||||
|
TMPDIR: "{{ courseware_tmp_dir }}"
|
||||||
|
register: courseware_unsloth_numpy_install
|
||||||
|
changed_when: "'Successfully installed' in courseware_unsloth_numpy_install.stdout"
|
||||||
|
|||||||
@@ -26,9 +26,8 @@ EMBEDDING_ATLAS_VENV="{{ courseware_venvs_dir }}/embedding-atlas"
|
|||||||
TTPS_DATASET_PATH="{{ courseware_datasets_dir }}/ttps_dataset.parquet"
|
TTPS_DATASET_PATH="{{ courseware_datasets_dir }}/ttps_dataset.parquet"
|
||||||
WIKI_TEST_RAW_PATH="{{ courseware_datasets_dir }}/wiki.test.raw"
|
WIKI_TEST_RAW_PATH="{{ courseware_datasets_dir }}/wiki.test.raw"
|
||||||
COURSEWARE_OLLAMA_BASE_URL="http://{{ courseware_url_host }}:{{ courseware_ports.ollama }}"
|
COURSEWARE_OLLAMA_BASE_URL="http://{{ courseware_url_host }}:{{ courseware_ports.ollama }}"
|
||||||
COURSEWARE_LAB1_QWEN_MODEL_PATH="{{ courseware_lab1_qwen_local_path }}"
|
|
||||||
COURSEWARE_LAB1_LLAMA_MODEL_PATH="{{ courseware_lab1_llama_local_path }}"
|
COURSEWARE_LAB1_LLAMA_MODEL_PATH="{{ courseware_lab1_llama_local_path }}"
|
||||||
COURSEWARE_LAB1_OLLAMA_MODEL_ALIAS="{{ courseware_lab1_qwen_model_alias }}"
|
COURSEWARE_LAB1_OLLAMA_MODEL_ALIAS="{{ courseware_lab1_ollama_model_alias }}"
|
||||||
UNSLOTH_BIN="{{ ansible_env.HOME }}/.local/bin/unsloth"
|
UNSLOTH_BIN="{{ ansible_env.HOME }}/.local/bin/unsloth"
|
||||||
PROMPTFOO_DIR="{{ courseware_promptfoo_dir }}"
|
PROMPTFOO_DIR="{{ courseware_promptfoo_dir }}"
|
||||||
PROMPTFOO_BIN="{{ courseware_tools_dir }}/promptfoo/node_modules/.bin/promptfoo"
|
PROMPTFOO_BIN="{{ courseware_tools_dir }}/promptfoo/node_modules/.bin/promptfoo"
|
||||||
@@ -36,5 +35,4 @@ WIKI_DIR="{{ courseware_wiki_repo_dir }}"
|
|||||||
WIKI_RUNTIME_CONFIG_PATH="{{ courseware_wiki_runtime_config_path }}"
|
WIKI_RUNTIME_CONFIG_PATH="{{ courseware_wiki_runtime_config_path }}"
|
||||||
LLAMA_CPP_BIN_DIR="{{ courseware_llama_cpp_bin_dir }}"
|
LLAMA_CPP_BIN_DIR="{{ courseware_llama_cpp_bin_dir }}"
|
||||||
KILN_LINUX_BIN="{{ courseware_apps_dir }}/kiln/Kiln"
|
KILN_LINUX_BIN="{{ courseware_apps_dir }}/kiln/Kiln"
|
||||||
KILN_MAC_APP="{{ courseware_apps_dir }}/Kiln.app"
|
KILN_LAUNCH_PATH="{{ courseware_apps_dir }}/kiln/Kiln"
|
||||||
KILN_LAUNCH_PATH="{% if ansible_system == 'Darwin' %}{{ courseware_apps_dir }}/Kiln.app{% else %}{{ courseware_apps_dir }}/kiln/Kiln{% endif %}"
|
|
||||||
|
|||||||
@@ -84,8 +84,8 @@ WARNING: THIS SCRIPT WILL CONFIGURE YOUR ENVIRONMENT WILL THE FOLLOWING SOFTWARE
|
|||||||
- Unsloth Studio
|
- Unsloth Studio
|
||||||
- Kiln Desktop
|
- Kiln Desktop
|
||||||
- Course-specific support assets for lab 1, lab 2, and lab 4
|
- Course-specific support assets for lab 1, lab 2, and lab 4
|
||||||
- Pre-pulled Lab 2 Ollama models for Q4 and Q6 Gemma 4 E2B
|
- Pre-pulled Gemma 4 E2B Ollama models for Lab 1 and Lab 2
|
||||||
- A pre-registered Lab 1 Ollama model (requires Ollama ${min_ollama}+)
|
- Lab 1 confidence support through Gemma 4 E2B Q4 (requires Ollama ${min_ollama}+)
|
||||||
|
|
||||||
IT IS RECOMMENDED TO RUN THIS IN AN ISLOATED ENVIRONMENT (Dedicated WSL, VM, etc.)
|
IT IS RECOMMENDED TO RUN THIS IN AN ISLOATED ENVIRONMENT (Dedicated WSL, VM, etc.)
|
||||||
|
|
||||||
@@ -109,8 +109,8 @@ WARNING: THIS SCRIPT WILL CONFIGURE YOUR ENVIRONMENT WILL THE FOLLOWING SOFTWARE
|
|||||||
- Unsloth Studio
|
- Unsloth Studio
|
||||||
- Kiln Desktop
|
- Kiln Desktop
|
||||||
- Course-specific support assets for lab 1, lab 2, and lab 4
|
- Course-specific support assets for lab 1, lab 2, and lab 4
|
||||||
- Pre-pulled Lab 2 Ollama models for Q4 and Q6 Gemma 4 E2B
|
- Pre-pulled Gemma 4 E2B Ollama models for Lab 1 and Lab 2
|
||||||
- A pre-registered Lab 1 Ollama model (requires Ollama ${min_ollama}+)
|
- Lab 1 confidence support through Gemma 4 E2B Q4 (requires Ollama ${min_ollama}+)
|
||||||
|
|
||||||
IT IS RECOMMENDED TO RUN THIS IN AN ISLOATED ENVIRONMENT (Dedicated WSL, VM, etc.)
|
IT IS RECOMMENDED TO RUN THIS IN AN ISLOATED ENVIRONMENT (Dedicated WSL, VM, etc.)
|
||||||
|
|
||||||
@@ -132,29 +132,16 @@ host_is_wsl() {
|
|||||||
[ "$(uname -s)" = "Linux" ] && uname -r | grep -qiE 'microsoft|wsl'
|
[ "$(uname -s)" = "Linux" ] && uname -r | grep -qiE 'microsoft|wsl'
|
||||||
}
|
}
|
||||||
|
|
||||||
host_is_macos() {
|
|
||||||
[ "$(uname -s)" = "Darwin" ]
|
|
||||||
}
|
|
||||||
|
|
||||||
host_is_linux() {
|
host_is_linux() {
|
||||||
[ "$(uname -s)" = "Linux" ]
|
[ "$(uname -s)" = "Linux" ]
|
||||||
}
|
}
|
||||||
|
|
||||||
host_is_arm_mac() {
|
|
||||||
host_is_macos && [ "$(uname -m)" = "arm64" ]
|
|
||||||
}
|
|
||||||
|
|
||||||
host_profile() {
|
host_profile() {
|
||||||
if host_is_wsl; then
|
if host_is_wsl; then
|
||||||
printf '%s\n' "wsl"
|
printf '%s\n' "wsl"
|
||||||
return
|
return
|
||||||
fi
|
fi
|
||||||
|
|
||||||
if host_is_macos; then
|
|
||||||
printf '%s\n' "macos"
|
|
||||||
return
|
|
||||||
fi
|
|
||||||
|
|
||||||
if host_is_linux && host_is_debian_family; then
|
if host_is_linux && host_is_debian_family; then
|
||||||
printf '%s\n' "native-debian-ubuntu"
|
printf '%s\n' "native-debian-ubuntu"
|
||||||
return
|
return
|
||||||
@@ -299,7 +286,6 @@ Python 3 was not found.
|
|||||||
|
|
||||||
Install it first, then rerun this command:
|
Install it first, then rerun this command:
|
||||||
- Debian/Ubuntu/WSL: sudo apt update && sudo apt install -y python3 python3-venv
|
- Debian/Ubuntu/WSL: sudo apt update && sudo apt install -y python3 python3-venv
|
||||||
- macOS: brew install python@3.11
|
|
||||||
EOF
|
EOF
|
||||||
exit 1
|
exit 1
|
||||||
}
|
}
|
||||||
@@ -401,7 +387,6 @@ Python 3 is installed, but its virtual environment support is still unavailable.
|
|||||||
|
|
||||||
Install the missing venv package for your platform, then rerun this command:
|
Install the missing venv package for your platform, then rerun this command:
|
||||||
- Debian/Ubuntu/WSL: sudo apt update && sudo apt install -y python3-venv python3-pip
|
- Debian/Ubuntu/WSL: sudo apt update && sudo apt install -y python3-venv python3-pip
|
||||||
- macOS: brew reinstall python@3.11
|
|
||||||
EOF
|
EOF
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
|||||||
+6
-5
@@ -14,7 +14,10 @@ load_runtime_env() {
|
|||||||
|
|
||||||
: "${COURSEWARE_STATE_DIR:=$STATE_DIR}"
|
: "${COURSEWARE_STATE_DIR:=$STATE_DIR}"
|
||||||
: "${COURSEWARE_BIND_HOST:=127.0.0.1}"
|
: "${COURSEWARE_BIND_HOST:=127.0.0.1}"
|
||||||
: "${COURSEWARE_URL_HOST:=127.0.0.1}"
|
if [ -z "${COURSEWARE_URL_HOST:-}" ]; then
|
||||||
|
COURSEWARE_URL_HOST=$(ip route get 1.1.1.1 2>/dev/null | sed -nE 's/.* src ([0-9.]+).*/\1/p' | head -n 1)
|
||||||
|
: "${COURSEWARE_URL_HOST:=127.0.0.1}"
|
||||||
|
fi
|
||||||
: "${COURSEWARE_NETRON_PORT:=8338}"
|
: "${COURSEWARE_NETRON_PORT:=8338}"
|
||||||
: "${COURSEWARE_PROMPTFOO_PORT:=15500}"
|
: "${COURSEWARE_PROMPTFOO_PORT:=15500}"
|
||||||
: "${COURSEWARE_WIKI_PORT:=80}"
|
: "${COURSEWARE_WIKI_PORT:=80}"
|
||||||
@@ -28,9 +31,8 @@ load_runtime_env() {
|
|||||||
: "${WIKI_DIR:=$COURSEWARE_STATE_DIR/repos/LLM-Labs}"
|
: "${WIKI_DIR:=$COURSEWARE_STATE_DIR/repos/LLM-Labs}"
|
||||||
: "${WIKI_RUNTIME_CONFIG_PATH:=$WIKI_DIR/public/courseware-runtime.json}"
|
: "${WIKI_RUNTIME_CONFIG_PATH:=$WIKI_DIR/public/courseware-runtime.json}"
|
||||||
: "${COURSEWARE_OLLAMA_BASE_URL:=http://$COURSEWARE_URL_HOST:$COURSEWARE_OLLAMA_PORT}"
|
: "${COURSEWARE_OLLAMA_BASE_URL:=http://$COURSEWARE_URL_HOST:$COURSEWARE_OLLAMA_PORT}"
|
||||||
: "${COURSEWARE_LAB1_QWEN_MODEL_PATH:=$COURSEWARE_STATE_DIR/models/lab1/Qwen3-0.6B-Q8_0.gguf}"
|
|
||||||
: "${COURSEWARE_LAB1_LLAMA_MODEL_PATH:=$COURSEWARE_STATE_DIR/models/lab1/Llama-3.2-1B.Q4_K_M.gguf}"
|
: "${COURSEWARE_LAB1_LLAMA_MODEL_PATH:=$COURSEWARE_STATE_DIR/models/lab1/Llama-3.2-1B.Q4_K_M.gguf}"
|
||||||
: "${COURSEWARE_LAB1_OLLAMA_MODEL_ALIAS:=lab1-qwen3-0.6b-q8_0}"
|
: "${COURSEWARE_LAB1_OLLAMA_MODEL_ALIAS:=batiai/gemma4-e2b:q4}"
|
||||||
: "${LLAMA_CPP_BIN_DIR:=$COURSEWARE_STATE_DIR/repos/llama.cpp/build/bin}"
|
: "${LLAMA_CPP_BIN_DIR:=$COURSEWARE_STATE_DIR/repos/llama.cpp/build/bin}"
|
||||||
|
|
||||||
if [ -n "${OLLAMA_BIN:-}" ] && [[ "$OLLAMA_BIN" != */* ]] && command -v "$OLLAMA_BIN" >/dev/null 2>&1; then
|
if [ -n "${OLLAMA_BIN:-}" ] && [[ "$OLLAMA_BIN" != */* ]] && command -v "$OLLAMA_BIN" >/dev/null 2>&1; then
|
||||||
@@ -149,11 +151,10 @@ service_command() {
|
|||||||
"$COURSEWARE_PROMPTFOO_PORT"
|
"$COURSEWARE_PROMPTFOO_PORT"
|
||||||
;;
|
;;
|
||||||
wiki)
|
wiki)
|
||||||
printf 'cd "%s" && PATH="%s:$PATH" exec env COURSEWARE_OLLAMA_BASE_URL="%s" COURSEWARE_LAB1_QWEN_MODEL_PATH="%s" COURSEWARE_LAB1_LLAMA_MODEL_PATH="%s" COURSEWARE_LAB1_OLLAMA_MODEL_ALIAS="%s" "./node_modules/.bin/next" start --hostname %s --port %s' \
|
printf 'cd "%s" && PATH="%s:$PATH" exec env COURSEWARE_OLLAMA_BASE_URL="%s" COURSEWARE_LAB1_LLAMA_MODEL_PATH="%s" COURSEWARE_LAB1_OLLAMA_MODEL_ALIAS="%s" "./node_modules/.bin/next" start --hostname %s --port %s' \
|
||||||
"$WIKI_DIR" \
|
"$WIKI_DIR" \
|
||||||
"$NODE_RUNTIME_BIN_DIR" \
|
"$NODE_RUNTIME_BIN_DIR" \
|
||||||
"$COURSEWARE_OLLAMA_BASE_URL" \
|
"$COURSEWARE_OLLAMA_BASE_URL" \
|
||||||
"$COURSEWARE_LAB1_QWEN_MODEL_PATH" \
|
|
||||||
"$COURSEWARE_LAB1_LLAMA_MODEL_PATH" \
|
"$COURSEWARE_LAB1_LLAMA_MODEL_PATH" \
|
||||||
"$COURSEWARE_LAB1_OLLAMA_MODEL_ALIAS" \
|
"$COURSEWARE_LAB1_OLLAMA_MODEL_ALIAS" \
|
||||||
"$COURSEWARE_BIND_HOST" \
|
"$COURSEWARE_BIND_HOST" \
|
||||||
|
|||||||
+70
-58
@@ -115,8 +115,9 @@ is_running() {
|
|||||||
service_startup_attempts() {
|
service_startup_attempts() {
|
||||||
case "$1" in
|
case "$1" in
|
||||||
embedding-atlas)
|
embedding-atlas)
|
||||||
# The first launch can be noticeably slower on cold environments.
|
# First launch embeds the bundled dataset. On older GPU drivers this falls
|
||||||
printf '%s\n' 180
|
# back to CPU and can take close to an hour.
|
||||||
|
printf '%s\n' 3600
|
||||||
;;
|
;;
|
||||||
*)
|
*)
|
||||||
printf '%s\n' 60
|
printf '%s\n' 60
|
||||||
@@ -198,56 +199,13 @@ terminate_service_processes() {
|
|||||||
done < <(service_listener_pids "$service")
|
done < <(service_listener_pids "$service")
|
||||||
}
|
}
|
||||||
|
|
||||||
start_one() {
|
wait_for_service_ready() {
|
||||||
local service=$1
|
local service=$1
|
||||||
local cmd
|
local log_file=$2
|
||||||
local log_file
|
local pid_file=$3
|
||||||
local pid_file
|
local startup_attempts=$4
|
||||||
|
local pid_grace_attempts=$5
|
||||||
local attempt
|
local attempt
|
||||||
local pid_grace_attempts=5
|
|
||||||
local startup_attempts
|
|
||||||
|
|
||||||
if [ "$service" = "ollama" ] || [ "$service" = "wiki" ]; then
|
|
||||||
assert_ollama_logprobs_support
|
|
||||||
fi
|
|
||||||
|
|
||||||
if has_live_pid "$service"; then
|
|
||||||
echo "$service already running"
|
|
||||||
return 0
|
|
||||||
fi
|
|
||||||
|
|
||||||
if service_ready "$service"; then
|
|
||||||
echo "$service already available"
|
|
||||||
return 0
|
|
||||||
fi
|
|
||||||
|
|
||||||
case "$service" in
|
|
||||||
open-webui)
|
|
||||||
start_one ollama
|
|
||||||
;;
|
|
||||||
wetty)
|
|
||||||
check_wetty_prereqs
|
|
||||||
;;
|
|
||||||
*)
|
|
||||||
;;
|
|
||||||
esac
|
|
||||||
|
|
||||||
cmd=$(service_command "$service")
|
|
||||||
startup_attempts=$(service_startup_attempts "$service")
|
|
||||||
log_file=$(service_log_file "$service")
|
|
||||||
pid_file=$(service_pid_file "$service")
|
|
||||||
|
|
||||||
if [ "$service" = "ollama" ]; then
|
|
||||||
env \
|
|
||||||
OLLAMA_HOST="${COURSEWARE_BIND_HOST}:${COURSEWARE_OLLAMA_PORT}" \
|
|
||||||
OLLAMA_MODELS="$OLLAMA_MODELS_DIR" \
|
|
||||||
"$OLLAMA_BIN" serve </dev/null >>"$log_file" 2>&1 &
|
|
||||||
elif command -v setsid >/dev/null 2>&1; then
|
|
||||||
nohup setsid bash -lc "$cmd" </dev/null >>"$log_file" 2>&1 &
|
|
||||||
else
|
|
||||||
nohup bash -lc "$cmd" </dev/null >>"$log_file" 2>&1 &
|
|
||||||
fi
|
|
||||||
echo $! >"$pid_file"
|
|
||||||
|
|
||||||
for attempt in $(seq 1 "$startup_attempts"); do
|
for attempt in $(seq 1 "$startup_attempts"); do
|
||||||
if service_ready "$service"; then
|
if service_ready "$service"; then
|
||||||
@@ -270,6 +228,68 @@ start_one() {
|
|||||||
exit 1
|
exit 1
|
||||||
}
|
}
|
||||||
|
|
||||||
|
start_one() {
|
||||||
|
local service=$1
|
||||||
|
local cmd
|
||||||
|
local log_file
|
||||||
|
local pid_file
|
||||||
|
local pid_grace_attempts=5
|
||||||
|
local startup_attempts
|
||||||
|
|
||||||
|
if [ "$service" = "ollama" ] || [ "$service" = "wiki" ]; then
|
||||||
|
assert_ollama_logprobs_support
|
||||||
|
fi
|
||||||
|
|
||||||
|
startup_attempts=$(service_startup_attempts "$service")
|
||||||
|
log_file=$(service_log_file "$service")
|
||||||
|
pid_file=$(service_pid_file "$service")
|
||||||
|
|
||||||
|
if service_ready "$service"; then
|
||||||
|
echo "$service already available"
|
||||||
|
return 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
if has_live_pid "$service"; then
|
||||||
|
echo "$service already starting"
|
||||||
|
wait_for_service_ready "$service" "$log_file" "$pid_file" "$startup_attempts" "$pid_grace_attempts"
|
||||||
|
return 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
case "$service" in
|
||||||
|
open-webui)
|
||||||
|
start_one ollama
|
||||||
|
;;
|
||||||
|
wetty)
|
||||||
|
check_wetty_prereqs
|
||||||
|
;;
|
||||||
|
*)
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
|
||||||
|
cmd=$(service_command "$service")
|
||||||
|
|
||||||
|
if [ "$service" = "ollama" ]; then
|
||||||
|
if command -v setsid >/dev/null 2>&1; then
|
||||||
|
nohup setsid env \
|
||||||
|
OLLAMA_HOST="${COURSEWARE_BIND_HOST}:${COURSEWARE_OLLAMA_PORT}" \
|
||||||
|
OLLAMA_MODELS="$OLLAMA_MODELS_DIR" \
|
||||||
|
"$OLLAMA_BIN" serve </dev/null >>"$log_file" 2>&1 &
|
||||||
|
else
|
||||||
|
nohup env \
|
||||||
|
OLLAMA_HOST="${COURSEWARE_BIND_HOST}:${COURSEWARE_OLLAMA_PORT}" \
|
||||||
|
OLLAMA_MODELS="$OLLAMA_MODELS_DIR" \
|
||||||
|
"$OLLAMA_BIN" serve </dev/null >>"$log_file" 2>&1 &
|
||||||
|
fi
|
||||||
|
elif command -v setsid >/dev/null 2>&1; then
|
||||||
|
nohup setsid bash -lc "$cmd" </dev/null >>"$log_file" 2>&1 &
|
||||||
|
else
|
||||||
|
nohup bash -lc "$cmd" </dev/null >>"$log_file" 2>&1 &
|
||||||
|
fi
|
||||||
|
echo $! >"$pid_file"
|
||||||
|
|
||||||
|
wait_for_service_ready "$service" "$log_file" "$pid_file" "$startup_attempts" "$pid_grace_attempts"
|
||||||
|
}
|
||||||
|
|
||||||
stop_one() {
|
stop_one() {
|
||||||
local service=$1
|
local service=$1
|
||||||
local pid_file
|
local pid_file
|
||||||
@@ -367,14 +387,6 @@ EOF
|
|||||||
}
|
}
|
||||||
|
|
||||||
open_kiln() {
|
open_kiln() {
|
||||||
local host_os
|
|
||||||
|
|
||||||
host_os=$(uname -s)
|
|
||||||
if [ "$host_os" = "Darwin" ] && [ -d "$KILN_MAC_APP" ]; then
|
|
||||||
open "$KILN_MAC_APP"
|
|
||||||
return 0
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [ -x "$KILN_LINUX_BIN" ]; then
|
if [ -x "$KILN_LINUX_BIN" ]; then
|
||||||
nohup "$KILN_LINUX_BIN" >/dev/null 2>&1 &
|
nohup "$KILN_LINUX_BIN" >/dev/null 2>&1 &
|
||||||
echo "started Kiln from $KILN_LINUX_BIN"
|
echo "started Kiln from $KILN_LINUX_BIN"
|
||||||
|
|||||||
Reference in New Issue
Block a user