--- # LLaMA Factory installation and setup - name: Create LLaMA Factory directory ansible.builtin.file: path: "{{ llmlab_base }}/lab5/LLaMA-Factory" state: directory mode: '0755' - name: Check if LLaMA Factory already cloned ansible.builtin.stat: path: "{{ llmlab_base }}/lab5/LLaMA-Factory/.git" register: llm_factory_git_check - name: Clone LLaMA Factory repository ansible.builtin.git: repo: https://github.com/hiyouga/LLaMA-Factory.git dest: "{{ llmlab_base }}/lab5/LLaMA-Factory" version: main update: no become: no when: not llm_factory_git_check.stat.exists - name: Create LLaMA Factory virtual environment ansible.builtin.command: cmd: python3 -m venv "{{ llmlab_base }}/lab5/LLaMA-Factory/.venv" args: creates: "{{ llmlab_base }}/lab5/LLaMA-Factory/.venv/bin/activate" become: no - name: Install pip in virtual environment ansible.builtin.shell: | #!/bin/bash source "{{ llmlab_base }}/lab5/LLaMA-Factory/.venv/bin/activate" pip install --upgrade pip args: chdir: "{{ llmlab_base }}/lab5/LLaMA-Factory" executable: /bin/bash become: no register: pip_install_result changed_when: pip_install_result.rc == 0 - name: Install LLaMA Factory with GPU support ansible.builtin.shell: | #!/bin/bash source "{{ llmlab_base }}/lab5/LLaMA-Factory/.venv/bin/activate" pip install --break-system-packages -e ".[torch,metrics]" args: chdir: "{{ llmlab_base }}/lab5/LLaMA-Factory" executable: /bin/bash become: no register: install_result failed_when: install_result.rc != 0 - name: Create LLaMA Factory start script ansible.builtin.copy: dest: "{{ llmlab_base }}/lab5/start.sh" content: | #!/bin/bash set -e # Activate virtual environment source "{{ llmlab_base }}/lab5/LLaMA-Factory/.venv/bin/activate" # Navigate to LLaMA-Factory directory cd "{{ llmlab_base }}/lab5/LLaMA-Factory" # Launch LLaMA Board web interface llamafactory-cli webui mode: '0755' - name: Display LLaMA Factory installation ansible.builtin.debug: msg: "LLaMA Factory installed to {{ llmlab_base }}/lab5/LLaMA-Factory"