Support LAN deployment and managed Python runtime

Made-with: Cursor
This commit is contained in:
bzuccaro
2026-04-25 18:05:56 +00:00
parent fe568c17cd
commit e95ee9c938
12 changed files with 263 additions and 72 deletions
+4 -1
View File
@@ -14,7 +14,10 @@ load_runtime_env() {
: "${COURSEWARE_STATE_DIR:=$STATE_DIR}"
: "${COURSEWARE_BIND_HOST:=127.0.0.1}"
: "${COURSEWARE_URL_HOST:=127.0.0.1}"
if [ -z "${COURSEWARE_URL_HOST:-}" ]; then
COURSEWARE_URL_HOST=$(ip route get 1.1.1.1 2>/dev/null | sed -nE 's/.* src ([0-9.]+).*/\1/p' | head -n 1)
: "${COURSEWARE_URL_HOST:=127.0.0.1}"
fi
: "${COURSEWARE_NETRON_PORT:=8338}"
: "${COURSEWARE_PROMPTFOO_PORT:=15500}"
: "${COURSEWARE_WIKI_PORT:=80}"
+70 -50
View File
@@ -115,8 +115,9 @@ is_running() {
service_startup_attempts() {
case "$1" in
embedding-atlas)
# The first launch can be noticeably slower on cold environments.
printf '%s\n' 180
# First launch embeds the bundled dataset. On older GPU drivers this falls
# back to CPU and can take close to an hour.
printf '%s\n' 3600
;;
*)
printf '%s\n' 60
@@ -198,56 +199,13 @@ terminate_service_processes() {
done < <(service_listener_pids "$service")
}
start_one() {
wait_for_service_ready() {
local service=$1
local cmd
local log_file
local pid_file
local log_file=$2
local pid_file=$3
local startup_attempts=$4
local pid_grace_attempts=$5
local attempt
local pid_grace_attempts=5
local startup_attempts
if [ "$service" = "ollama" ] || [ "$service" = "wiki" ]; then
assert_ollama_logprobs_support
fi
if has_live_pid "$service"; then
echo "$service already running"
return 0
fi
if service_ready "$service"; then
echo "$service already available"
return 0
fi
case "$service" in
open-webui)
start_one ollama
;;
wetty)
check_wetty_prereqs
;;
*)
;;
esac
cmd=$(service_command "$service")
startup_attempts=$(service_startup_attempts "$service")
log_file=$(service_log_file "$service")
pid_file=$(service_pid_file "$service")
if [ "$service" = "ollama" ]; then
env \
OLLAMA_HOST="${COURSEWARE_BIND_HOST}:${COURSEWARE_OLLAMA_PORT}" \
OLLAMA_MODELS="$OLLAMA_MODELS_DIR" \
"$OLLAMA_BIN" serve </dev/null >>"$log_file" 2>&1 &
elif command -v setsid >/dev/null 2>&1; then
nohup setsid bash -lc "$cmd" </dev/null >>"$log_file" 2>&1 &
else
nohup bash -lc "$cmd" </dev/null >>"$log_file" 2>&1 &
fi
echo $! >"$pid_file"
for attempt in $(seq 1 "$startup_attempts"); do
if service_ready "$service"; then
@@ -270,6 +228,68 @@ start_one() {
exit 1
}
start_one() {
local service=$1
local cmd
local log_file
local pid_file
local pid_grace_attempts=5
local startup_attempts
if [ "$service" = "ollama" ] || [ "$service" = "wiki" ]; then
assert_ollama_logprobs_support
fi
startup_attempts=$(service_startup_attempts "$service")
log_file=$(service_log_file "$service")
pid_file=$(service_pid_file "$service")
if service_ready "$service"; then
echo "$service already available"
return 0
fi
if has_live_pid "$service"; then
echo "$service already starting"
wait_for_service_ready "$service" "$log_file" "$pid_file" "$startup_attempts" "$pid_grace_attempts"
return 0
fi
case "$service" in
open-webui)
start_one ollama
;;
wetty)
check_wetty_prereqs
;;
*)
;;
esac
cmd=$(service_command "$service")
if [ "$service" = "ollama" ]; then
if command -v setsid >/dev/null 2>&1; then
nohup setsid env \
OLLAMA_HOST="${COURSEWARE_BIND_HOST}:${COURSEWARE_OLLAMA_PORT}" \
OLLAMA_MODELS="$OLLAMA_MODELS_DIR" \
"$OLLAMA_BIN" serve </dev/null >>"$log_file" 2>&1 &
else
nohup env \
OLLAMA_HOST="${COURSEWARE_BIND_HOST}:${COURSEWARE_OLLAMA_PORT}" \
OLLAMA_MODELS="$OLLAMA_MODELS_DIR" \
"$OLLAMA_BIN" serve </dev/null >>"$log_file" 2>&1 &
fi
elif command -v setsid >/dev/null 2>&1; then
nohup setsid bash -lc "$cmd" </dev/null >>"$log_file" 2>&1 &
else
nohup bash -lc "$cmd" </dev/null >>"$log_file" 2>&1 &
fi
echo $! >"$pid_file"
wait_for_service_ready "$service" "$log_file" "$pid_file" "$startup_attempts" "$pid_grace_attempts"
}
stop_one() {
local service=$1
local pid_file