Initial snapshot before transformerlab recovery
This commit is contained in:
@@ -0,0 +1,170 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
SCRIPT_DIR=$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)
|
||||
STATE_DIR=$(cd "$SCRIPT_DIR/.." && pwd)
|
||||
RUNTIME_ENV="$STATE_DIR/runtime.env"
|
||||
|
||||
if [ ! -f "$RUNTIME_ENV" ]; then
|
||||
echo "Missing $RUNTIME_ENV. Run ./labctl up first." >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# shellcheck disable=SC1090
|
||||
. "$RUNTIME_ENV"
|
||||
|
||||
REPO_URL="https://huggingface.co/{{ courseware_white_rabbit_repo }}"
|
||||
REPO_REF="main"
|
||||
REPO_DIR="$COURSEWARE_STATE_DIR/downloads/WhiteRabbitNeo_WhiteRabbitNeo-V3-7B-GGUF"
|
||||
TARGET_DIR="$COURSEWARE_STATE_DIR/models/WhiteRabbitNeo"
|
||||
REGISTER_WITH_OLLAMA=1
|
||||
|
||||
DOWNLOAD_FILES=(
|
||||
{% for variant in courseware_white_rabbit_variants | unique(attribute='filename') %}
|
||||
"{{ variant.filename }}"
|
||||
{% endfor %}
|
||||
)
|
||||
|
||||
MODEL_NAMES=(
|
||||
{% for variant in courseware_white_rabbit_variants %}
|
||||
"{{ variant.ollama_model }}"
|
||||
{% endfor %}
|
||||
)
|
||||
|
||||
MODEL_FILES=(
|
||||
{% for variant in courseware_white_rabbit_variants %}
|
||||
"{{ variant.filename }}"
|
||||
{% endfor %}
|
||||
)
|
||||
|
||||
usage() {
|
||||
cat <<'EOF'
|
||||
Usage: ./download_whiterabbitneo-gguf.sh [--download-only]
|
||||
|
||||
Downloads the WhiteRabbitNeo GGUF variants used in lab 2 with git + git-lfs.
|
||||
By default it also registers local Ollama aliases after the files are present.
|
||||
|
||||
Options:
|
||||
--download-only Skip Ollama model registration
|
||||
-h, --help Show this help text
|
||||
EOF
|
||||
}
|
||||
|
||||
require_cmd() {
|
||||
if ! command -v "$1" >/dev/null 2>&1; then
|
||||
echo "Missing required command: $1" >&2
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
parse_args() {
|
||||
while [ $# -gt 0 ]; do
|
||||
case "$1" in
|
||||
--download-only)
|
||||
REGISTER_WITH_OLLAMA=0
|
||||
;;
|
||||
-h|--help)
|
||||
usage
|
||||
exit 0
|
||||
;;
|
||||
*)
|
||||
echo "Unknown option: $1" >&2
|
||||
usage >&2
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
shift
|
||||
done
|
||||
}
|
||||
|
||||
setup_repo() {
|
||||
mkdir -p "$COURSEWARE_STATE_DIR/downloads"
|
||||
|
||||
if [ ! -d "$REPO_DIR/.git" ]; then
|
||||
rm -rf "$REPO_DIR"
|
||||
GIT_LFS_SKIP_SMUDGE=1 git clone --filter=blob:none --no-checkout "$REPO_URL" "$REPO_DIR"
|
||||
fi
|
||||
|
||||
git -C "$REPO_DIR" remote set-url origin "$REPO_URL"
|
||||
git -C "$REPO_DIR" sparse-checkout init --no-cone
|
||||
git -C "$REPO_DIR" sparse-checkout set -- "${DOWNLOAD_FILES[@]}"
|
||||
git -C "$REPO_DIR" fetch --depth=1 origin "$REPO_REF"
|
||||
git -C "$REPO_DIR" checkout -f --detach FETCH_HEAD
|
||||
git -C "$REPO_DIR" lfs install --local >/dev/null
|
||||
}
|
||||
|
||||
download_files() {
|
||||
local lfs_include
|
||||
|
||||
mkdir -p "$TARGET_DIR"
|
||||
lfs_include=$(IFS=,; printf '%s' "${DOWNLOAD_FILES[*]}")
|
||||
|
||||
git -C "$REPO_DIR" lfs pull origin --include="$lfs_include" --exclude=""
|
||||
|
||||
for file in "${DOWNLOAD_FILES[@]}"; do
|
||||
cp -f "$REPO_DIR/$file" "$TARGET_DIR/$file"
|
||||
done
|
||||
}
|
||||
|
||||
start_ollama_if_needed() {
|
||||
if curl -fsS "http://$COURSEWARE_BIND_HOST:$COURSEWARE_OLLAMA_PORT/api/tags" >/dev/null 2>&1; then
|
||||
return 0
|
||||
fi
|
||||
|
||||
if [ -x "$COURSEWARE_ROOT/scripts/service_manager.sh" ]; then
|
||||
"$COURSEWARE_ROOT/scripts/service_manager.sh" start ollama >/dev/null
|
||||
fi
|
||||
|
||||
curl -fsS "http://$COURSEWARE_BIND_HOST:$COURSEWARE_OLLAMA_PORT/api/tags" >/dev/null 2>&1
|
||||
}
|
||||
|
||||
register_models() {
|
||||
local i
|
||||
local modelfile
|
||||
|
||||
if [ "$REGISTER_WITH_OLLAMA" -eq 0 ]; then
|
||||
echo "Skipping Ollama registration because --download-only was requested."
|
||||
return 0
|
||||
fi
|
||||
|
||||
if ! command -v "$OLLAMA_BIN" >/dev/null 2>&1; then
|
||||
echo "Ollama is not installed or not on PATH; downloads completed but model registration was skipped." >&2
|
||||
return 0
|
||||
fi
|
||||
|
||||
if ! start_ollama_if_needed; then
|
||||
echo "Ollama is not reachable on http://$COURSEWARE_BIND_HOST:$COURSEWARE_OLLAMA_PORT; downloads completed but model registration was skipped." >&2
|
||||
return 0
|
||||
fi
|
||||
|
||||
export OLLAMA_HOST="$COURSEWARE_BIND_HOST:$COURSEWARE_OLLAMA_PORT"
|
||||
export OLLAMA_MODELS="$OLLAMA_MODELS_DIR"
|
||||
|
||||
for i in "${!MODEL_NAMES[@]}"; do
|
||||
modelfile="$TARGET_DIR/Modelfile.${MODEL_NAMES[$i]}"
|
||||
printf 'FROM %s/%s\n' "$TARGET_DIR" "${MODEL_FILES[$i]}" >"$modelfile"
|
||||
"$OLLAMA_BIN" create "${MODEL_NAMES[$i]}" -f "$modelfile"
|
||||
done
|
||||
}
|
||||
|
||||
main() {
|
||||
parse_args "$@"
|
||||
require_cmd git
|
||||
require_cmd git-lfs
|
||||
git lfs install --skip-repo >/dev/null
|
||||
|
||||
echo "Preparing WhiteRabbitNeo GGUF checkout in $REPO_DIR"
|
||||
setup_repo
|
||||
|
||||
echo "Downloading selected WhiteRabbitNeo GGUF variants into $TARGET_DIR"
|
||||
download_files
|
||||
|
||||
echo "Download complete:"
|
||||
printf ' - %s\n' "${DOWNLOAD_FILES[@]}"
|
||||
|
||||
register_models
|
||||
|
||||
echo "WhiteRabbitNeo lab 2 setup is ready."
|
||||
}
|
||||
|
||||
main "$@"
|
||||
@@ -0,0 +1,14 @@
|
||||
description: Local evaluation placeholder for lab 6
|
||||
providers:
|
||||
- id: openai:chat
|
||||
label: Local Ollama
|
||||
config:
|
||||
apiBaseUrl: http://{{ courseware_bind_host }}:{{ courseware_ports.ollama }}/v1
|
||||
apiKey: local-only
|
||||
model: qwen3.5:4b
|
||||
prompts:
|
||||
- "{{ '{{prompt}}' }}"
|
||||
tests:
|
||||
- vars:
|
||||
prompt: "Summarize the purpose of this lab environment in one sentence."
|
||||
|
||||
@@ -0,0 +1,30 @@
|
||||
COURSEWARE_ROOT="{{ courseware_root }}"
|
||||
COURSEWARE_STATE_DIR="{{ courseware_state_dir }}"
|
||||
COURSEWARE_BIND_HOST="{{ courseware_bind_host }}"
|
||||
COURSEWARE_URL_HOST="{{ courseware_url_host }}"
|
||||
COURSEWARE_OLLAMA_PORT="{{ courseware_ports.ollama }}"
|
||||
COURSEWARE_OPEN_WEBUI_PORT="{{ courseware_ports.open_webui }}"
|
||||
COURSEWARE_TRANSFORMERLAB_PORT="{{ courseware_ports.transformerlab }}"
|
||||
COURSEWARE_CHUNKVIZ_PORT="{{ courseware_ports.chunkviz }}"
|
||||
COURSEWARE_EMBEDDING_ATLAS_PORT="{{ courseware_ports.embedding_atlas }}"
|
||||
COURSEWARE_UNSLOTH_PORT="{{ courseware_ports.unsloth }}"
|
||||
COURSEWARE_PROMPTFOO_PORT="{{ courseware_ports.promptfoo }}"
|
||||
COURSEWARE_WIKI_PORT="{{ courseware_ports.wiki }}"
|
||||
OLLAMA_BIN="{{ courseware_ollama_bin }}"
|
||||
OLLAMA_MODELS_DIR="{{ courseware_ollama_models_dir }}"
|
||||
NODE_RUNTIME_BIN_DIR="{{ courseware_node_runtime_bin_dir }}"
|
||||
OPEN_WEBUI_VENV="{{ courseware_venvs_dir }}/open-webui"
|
||||
OPEN_WEBUI_DATA_DIR="{{ courseware_state_dir }}/open-webui"
|
||||
CHUNKVIZ_DIR="{{ courseware_repos_dir }}/ChunkViz"
|
||||
EMBEDDING_ATLAS_VENV="{{ courseware_venvs_dir }}/embedding-atlas"
|
||||
TTPS_DATASET_PATH="{{ courseware_datasets_dir }}/ttps_dataset.parquet"
|
||||
WIKI_TEST_RAW_PATH="{{ courseware_datasets_dir }}/wiki.test.raw"
|
||||
TRANSFORMERLAB_DIR="{{ courseware_transformerlab_home }}"
|
||||
UNSLOTH_BIN="{{ ansible_env.HOME }}/.local/bin/unsloth"
|
||||
PROMPTFOO_DIR="{{ courseware_promptfoo_dir }}"
|
||||
PROMPTFOO_BIN="{{ courseware_tools_dir }}/promptfoo/node_modules/.bin/promptfoo"
|
||||
WIKI_DIR="{{ courseware_wiki_repo_dir }}"
|
||||
LLAMA_CPP_BIN_DIR="{{ courseware_llama_cpp_bin_dir }}"
|
||||
KILN_LINUX_BIN="{{ courseware_apps_dir }}/kiln/Kiln"
|
||||
KILN_MAC_APP="{{ courseware_apps_dir }}/Kiln.app"
|
||||
KILN_LAUNCH_PATH="{% if ansible_system == 'Darwin' %}{{ courseware_apps_dir }}/Kiln.app{% else %}{{ courseware_apps_dir }}/kiln/Kiln{% endif %}"
|
||||
Reference in New Issue
Block a user