Add install script with full dependency checks
Verifies NVIDIA T4 driver (>=525), CUDA, Python 3.11+, Ollama, Piper system libs, then creates venv, installs pip packages, and downloads the Piper TTS voice model. Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
parent
98310bf062
commit
e3722e5f0b
1 changed files with 241 additions and 0 deletions
241
install.sh
Normal file
241
install.sh
Normal file
|
|
@ -0,0 +1,241 @@
|
|||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
# Shop Bob Server — Install Script
|
||||
# Target: Ubuntu/Debian with NVIDIA T4 (16GB VRAM)
|
||||
|
||||
RED='\033[0;31m'
|
||||
GREEN='\033[0;32m'
|
||||
YELLOW='\033[1;33m'
|
||||
NC='\033[0m'
|
||||
|
||||
ERRORS=0
|
||||
WARNINGS=0
|
||||
|
||||
pass() { echo -e " ${GREEN}✓${NC} $1"; }
|
||||
warn() { echo -e " ${YELLOW}⚠${NC} $1"; ((WARNINGS++)); }
|
||||
fail() { echo -e " ${RED}✗${NC} $1"; ((ERRORS++)); }
|
||||
|
||||
SCRIPT_DIR="$(cd "$(dirname "$0")" && pwd)"
|
||||
VENV_DIR="${SCRIPT_DIR}/.venv"
|
||||
PIPER_MODEL="en_US-lessac-medium"
|
||||
PIPER_MODEL_DIR="${SCRIPT_DIR}/piper-models"
|
||||
|
||||
# ─── System checks ───────────────────────────────────────────────────────────
|
||||
|
||||
echo ""
|
||||
echo "═══ Shop Bob Server — Dependency Check & Install ═══"
|
||||
echo ""
|
||||
|
||||
# ── OS ────────────────────────────────────────────────────────────────────────
|
||||
echo "System:"
|
||||
if [ -f /etc/os-release ]; then
|
||||
. /etc/os-release
|
||||
pass "OS: ${PRETTY_NAME}"
|
||||
else
|
||||
warn "Cannot detect OS — expected Ubuntu/Debian"
|
||||
fi
|
||||
|
||||
# ── Python 3.11+ ──────────────────────────────────────────────────────────────
|
||||
echo ""
|
||||
echo "Python:"
|
||||
if command -v python3 &>/dev/null; then
|
||||
PY_VER=$(python3 -c 'import sys; print(f"{sys.version_info.major}.{sys.version_info.minor}")')
|
||||
PY_MAJOR=$(echo "$PY_VER" | cut -d. -f1)
|
||||
PY_MINOR=$(echo "$PY_VER" | cut -d. -f2)
|
||||
if [ "$PY_MAJOR" -eq 3 ] && [ "$PY_MINOR" -ge 11 ]; then
|
||||
pass "Python ${PY_VER}"
|
||||
else
|
||||
fail "Python ${PY_VER} found — need 3.11+"
|
||||
fi
|
||||
else
|
||||
fail "python3 not found"
|
||||
echo " Install: sudo apt install python3.11 python3.11-venv python3.11-dev"
|
||||
fi
|
||||
|
||||
if command -v pip3 &>/dev/null || python3 -m pip --version &>/dev/null 2>&1; then
|
||||
pass "pip available"
|
||||
else
|
||||
fail "pip not found"
|
||||
echo " Install: sudo apt install python3-pip"
|
||||
fi
|
||||
|
||||
# ── NVIDIA Driver ─────────────────────────────────────────────────────────────
|
||||
echo ""
|
||||
echo "NVIDIA GPU:"
|
||||
if command -v nvidia-smi &>/dev/null; then
|
||||
DRIVER_VER=$(nvidia-smi --query-gpu=driver_version --format=csv,noheader 2>/dev/null | head -1)
|
||||
GPU_NAME=$(nvidia-smi --query-gpu=name --format=csv,noheader 2>/dev/null | head -1)
|
||||
GPU_MEM=$(nvidia-smi --query-gpu=memory.total --format=csv,noheader 2>/dev/null | head -1)
|
||||
|
||||
if echo "$GPU_NAME" | grep -qi "T4"; then
|
||||
pass "GPU: ${GPU_NAME} (${GPU_MEM})"
|
||||
else
|
||||
warn "GPU: ${GPU_NAME} (${GPU_MEM}) — expected NVIDIA T4"
|
||||
fi
|
||||
pass "Driver: ${DRIVER_VER}"
|
||||
|
||||
# Check minimum driver version for CUDA 12 (>=525.60)
|
||||
DRIVER_MAJOR=$(echo "$DRIVER_VER" | cut -d. -f1)
|
||||
if [ "$DRIVER_MAJOR" -ge 525 ]; then
|
||||
pass "Driver supports CUDA 12"
|
||||
else
|
||||
fail "Driver ${DRIVER_VER} too old — need >=525 for CUDA 12"
|
||||
echo " Install: sudo apt install nvidia-driver-535"
|
||||
fi
|
||||
else
|
||||
fail "nvidia-smi not found — no NVIDIA driver installed"
|
||||
echo " Install driver:"
|
||||
echo " sudo apt update"
|
||||
echo " sudo apt install nvidia-driver-535"
|
||||
echo " sudo reboot"
|
||||
fi
|
||||
|
||||
# ── CUDA Toolkit ──────────────────────────────────────────────────────────────
|
||||
echo ""
|
||||
echo "CUDA:"
|
||||
if command -v nvcc &>/dev/null; then
|
||||
CUDA_VER=$(nvcc --version | grep -oP 'release \K[0-9]+\.[0-9]+')
|
||||
pass "CUDA toolkit: ${CUDA_VER}"
|
||||
elif [ -d /usr/local/cuda ]; then
|
||||
CUDA_VER=$(cat /usr/local/cuda/version.json 2>/dev/null | python3 -c "import sys,json; print(json.load(sys.stdin)['cuda']['version'])" 2>/dev/null || echo "unknown")
|
||||
pass "CUDA found at /usr/local/cuda (${CUDA_VER})"
|
||||
warn "nvcc not in PATH — add: export PATH=/usr/local/cuda/bin:\$PATH"
|
||||
else
|
||||
# faster-whisper bundles cuDNN/cuBLAS via ctranslate2, so CUDA toolkit
|
||||
# is not strictly required if the driver is new enough
|
||||
warn "CUDA toolkit not found — faster-whisper may still work via bundled libs"
|
||||
echo " Optional install: sudo apt install nvidia-cuda-toolkit"
|
||||
fi
|
||||
|
||||
# ── cuDNN ─────────────────────────────────────────────────────────────────────
|
||||
if ldconfig -p 2>/dev/null | grep -q libcudnn; then
|
||||
CUDNN_VER=$(ldconfig -p 2>/dev/null | grep libcudnn | head -1 | grep -oP 'libcudnn\S+')
|
||||
pass "cuDNN: found (${CUDNN_VER})"
|
||||
else
|
||||
warn "cuDNN not detected in system libs — faster-whisper ships its own, should be OK"
|
||||
fi
|
||||
|
||||
# ── Ollama ────────────────────────────────────────────────────────────────────
|
||||
echo ""
|
||||
echo "Ollama:"
|
||||
if command -v ollama &>/dev/null; then
|
||||
OLLAMA_VER=$(ollama --version 2>/dev/null || echo "unknown")
|
||||
pass "Ollama installed: ${OLLAMA_VER}"
|
||||
|
||||
# Check if Ollama service is running
|
||||
if curl -sf http://localhost:11434/api/tags &>/dev/null; then
|
||||
pass "Ollama service is running"
|
||||
|
||||
# Check for the default model
|
||||
if curl -sf http://localhost:11434/api/tags | python3 -c "
|
||||
import sys, json
|
||||
tags = json.load(sys.stdin)
|
||||
models = [m['name'] for m in tags.get('models', [])]
|
||||
if any('llama3.1' in m for m in models):
|
||||
print('found')
|
||||
else:
|
||||
sys.exit(1)
|
||||
" &>/dev/null; then
|
||||
pass "llama3.1 model available"
|
||||
else
|
||||
warn "llama3.1 model not pulled yet"
|
||||
echo " Run: ollama pull llama3.1:8b"
|
||||
fi
|
||||
else
|
||||
warn "Ollama installed but service not running"
|
||||
echo " Start: sudo systemctl start ollama"
|
||||
fi
|
||||
else
|
||||
fail "Ollama not installed"
|
||||
echo " Install: curl -fsSL https://ollama.com/install.sh | sh"
|
||||
echo " Then: ollama pull llama3.1:8b"
|
||||
fi
|
||||
|
||||
# ── System packages for Piper ────────────────────────────────────────────────
|
||||
echo ""
|
||||
echo "Piper TTS dependencies:"
|
||||
PIPER_DEPS=(libespeak-ng1 libsndfile1)
|
||||
for dep in "${PIPER_DEPS[@]}"; do
|
||||
if dpkg -l "$dep" &>/dev/null 2>&1; then
|
||||
pass "${dep}"
|
||||
else
|
||||
warn "${dep} not installed"
|
||||
echo " Install: sudo apt install ${dep}"
|
||||
fi
|
||||
done
|
||||
|
||||
# ─── Install ─────────────────────────────────────────────────────────────────
|
||||
|
||||
echo ""
|
||||
echo "═══ Installation ═══"
|
||||
echo ""
|
||||
|
||||
# ── Python venv ───────────────────────────────────────────────────────────────
|
||||
echo "Virtual environment:"
|
||||
if [ -d "$VENV_DIR" ]; then
|
||||
pass "Existing venv at ${VENV_DIR}"
|
||||
else
|
||||
echo " Creating venv..."
|
||||
python3 -m venv "$VENV_DIR"
|
||||
pass "Created ${VENV_DIR}"
|
||||
fi
|
||||
source "${VENV_DIR}/bin/activate"
|
||||
pass "Activated venv ($(python3 --version))"
|
||||
|
||||
# ── pip dependencies ──────────────────────────────────────────────────────────
|
||||
echo ""
|
||||
echo "Python packages:"
|
||||
echo " Installing from requirements.txt..."
|
||||
pip install --upgrade pip -q
|
||||
pip install -r "${SCRIPT_DIR}/server/requirements.txt" -q
|
||||
pass "All pip packages installed"
|
||||
|
||||
# ── Piper model ───────────────────────────────────────────────────────────────
|
||||
echo ""
|
||||
echo "Piper TTS model:"
|
||||
ONNX_FILE="${PIPER_MODEL_DIR}/${PIPER_MODEL}.onnx"
|
||||
JSON_FILE="${PIPER_MODEL_DIR}/${PIPER_MODEL}.onnx.json"
|
||||
|
||||
if [ -f "$ONNX_FILE" ] && [ -f "$JSON_FILE" ]; then
|
||||
pass "Model already downloaded: ${PIPER_MODEL}"
|
||||
else
|
||||
echo " Downloading ${PIPER_MODEL}..."
|
||||
mkdir -p "$PIPER_MODEL_DIR"
|
||||
PIPER_URL="https://huggingface.co/rhasspy/piper-voices/resolve/main/en/en_US/lessac/medium"
|
||||
curl -sL "${PIPER_URL}/${PIPER_MODEL}.onnx" -o "$ONNX_FILE"
|
||||
curl -sL "${PIPER_URL}/${PIPER_MODEL}.onnx.json" -o "$JSON_FILE"
|
||||
|
||||
if [ -f "$ONNX_FILE" ] && [ -s "$ONNX_FILE" ]; then
|
||||
pass "Downloaded ${PIPER_MODEL}"
|
||||
else
|
||||
fail "Failed to download Piper model"
|
||||
fi
|
||||
fi
|
||||
|
||||
# ─── Summary ─────────────────────────────────────────────────────────────────
|
||||
|
||||
echo ""
|
||||
echo "═══════════════════════════════════════════════════"
|
||||
if [ $ERRORS -gt 0 ]; then
|
||||
echo -e "${RED} ${ERRORS} error(s)${NC}, ${WARNINGS} warning(s)"
|
||||
echo " Fix errors above before running the server."
|
||||
echo "═══════════════════════════════════════════════════"
|
||||
exit 1
|
||||
elif [ $WARNINGS -gt 0 ]; then
|
||||
echo -e "${GREEN} Install complete${NC} with ${YELLOW}${WARNINGS} warning(s)${NC}"
|
||||
echo ""
|
||||
echo " Review warnings above. To start the server:"
|
||||
echo " source .venv/bin/activate"
|
||||
echo " uvicorn server.main:app --host 0.0.0.0 --port 8765"
|
||||
echo "═══════════════════════════════════════════════════"
|
||||
exit 0
|
||||
else
|
||||
echo -e "${GREEN} Install complete — all checks passed!${NC}"
|
||||
echo ""
|
||||
echo " To start the server:"
|
||||
echo " source .venv/bin/activate"
|
||||
echo " uvicorn server.main:app --host 0.0.0.0 --port 8765"
|
||||
echo "═══════════════════════════════════════════════════"
|
||||
exit 0
|
||||
fi
|
||||
Loading…
Reference in a new issue