Files
LLM-Labs-Local/ansible/roles/packages/tasks/linux.yml
T

156 lines
4.4 KiB
YAML

- name: Install Debian/Ubuntu prerequisites
become: true
apt:
name:
- build-essential
- ca-certificates
- cmake
- curl
- git
- git-lfs
- libcap2-bin
- libcurl4-openssl-dev
- nodejs
- npm
- pkg-config
- python3
- python3-pip
- python3-venv
- unzip
- zstd
state: present
update_cache: true
- name: Query CUDA toolkit apt candidate
command: apt-cache policy nvidia-cuda-toolkit
register: courseware_cuda_toolkit_policy
changed_when: false
failed_when: false
- name: Check for nvcc
command: which nvcc
register: courseware_nvcc_check
changed_when: false
failed_when: false
- name: Set CUDA toolkit package availability
set_fact:
courseware_cuda_toolkit_package_available: >-
{{
courseware_cuda_toolkit_policy.rc == 0
and 'Candidate: (none)' not in courseware_cuda_toolkit_policy.stdout
}}
- name: Install distro CUDA toolkit when available
block:
- name: Install nvidia-cuda-toolkit
become: true
apt:
name: nvidia-cuda-toolkit
state: present
rescue:
- name: Fail with CUDA toolkit guidance after apt install error
fail:
msg: |
CUDA Toolkit could not be installed from the distro package manager.
This installer needs the Linux-side CUDA toolkit for llama.cpp, not just a working `nvidia-smi`.
Try this first:
sudo apt update
sudo apt install -y nvidia-cuda-toolkit
If that still fails, add NVIDIA's CUDA repository for your Debian/Ubuntu release and install the toolkit from there.
Verify with:
nvcc --version
ls /usr/local/cuda/include/cuda_runtime.h
when:
- not courseware_is_wsl
- courseware_cuda_toolkit_package_available
- courseware_nvcc_check.rc != 0
- name: Fail with CUDA toolkit guidance when no apt candidate exists
fail:
msg: |
CUDA Toolkit is not available from this distro's current apt sources.
This installer needs the Linux-side CUDA toolkit for llama.cpp, not just a working `nvidia-smi`.
On WSL this usually means:
- Windows side: the NVIDIA driver is installed correctly
- Linux side: the CUDA toolkit repository is still missing
Add NVIDIA's CUDA repository for your Debian/Ubuntu release, install the toolkit, then rerun:
bash deploy-courseware.sh
Verify with:
nvcc --version
ls /usr/local/cuda/include/cuda_runtime.h
when:
- not courseware_is_wsl
- not courseware_cuda_toolkit_package_available
- courseware_nvcc_check.rc != 0
- name: Check for Ollama binary
command: which ollama
register: courseware_ollama_check
changed_when: false
failed_when: false
- name: Install Ollama
become: true
shell: curl -fsSL https://ollama.com/install.sh | sh
args:
creates: /usr/local/bin/ollama
when: courseware_ollama_check.rc != 0
- name: Mark Ollama as installed by courseware
file:
path: "{{ courseware_ollama_install_marker }}"
state: touch
mode: "0644"
when: courseware_ollama_check.rc != 0
- name: Check for courseware-managed Ollama install marker
stat:
path: "{{ courseware_ollama_install_marker }}"
register: courseware_ollama_install_marker_before
- name: Adopt existing local Ollama install into courseware management
file:
path: "{{ courseware_ollama_install_marker }}"
state: touch
mode: "0644"
when:
- not courseware_ollama_install_marker_before.stat.exists
- courseware_ollama_check.rc == 0
- ansible_system == "Linux"
- ansible_service_mgr == "systemd"
- courseware_is_wsl | bool
- ansible_user_id != "ollama"
- ansible_env.HOME is search('/home/')
- ansible_env.HOME != '/usr/share/ollama'
- ansible_env.HOME != '/var/lib/ollama'
- name: Refresh courseware-managed Ollama install marker
stat:
path: "{{ courseware_ollama_install_marker }}"
register: courseware_ollama_install_marker_stat
- name: Check for Ollama systemd unit
stat:
path: /etc/systemd/system/ollama.service
register: courseware_ollama_systemd_unit
- name: Stop and disable courseware-managed Ollama systemd service
become: true
systemd:
name: ollama
state: stopped
enabled: false
when:
- ansible_service_mgr == "systemd"
- courseware_ollama_install_marker_stat.stat.exists
- courseware_ollama_systemd_unit.stat.exists