|
#!/bin/sh |
|
# User-space Ollama installer (no sudo) |
|
# Installs to ~/.local/bin and ~/.local/lib/ollama |
|
# Modified from https://github.com/ollama/ollama/blob/main/scripts/install.sh |
|
|
|
set -eu |
|
|
|
red="$( (/usr/bin/tput bold || :; /usr/bin/tput setaf 1 || :) 2>&-)" |
|
plain="$( (/usr/bin/tput sgr0 || :) 2>&-)" |
|
|
|
status() { echo ">>> $*" >&2; } |
|
error() { echo "${red}ERROR:${plain} $*" >&2; exit 1; } |
|
warning() { echo "${red}WARNING:${plain} $*" >&2; } |
|
|
|
available() { command -v "$1" >/dev/null 2>&1; } |
|
|
|
require() { |
|
MISSING='' |
|
for TOOL in "$@"; do |
|
if ! available "$TOOL"; then |
|
MISSING="$MISSING $TOOL" |
|
fi |
|
done |
|
echo "$MISSING" |
|
} |
|
|
|
[ "$(uname -s)" = "Linux" ] || error "This script is intended to run on Linux only." |
|
|
|
ARCH="$(uname -m)" |
|
case "$ARCH" in |
|
x86_64) ARCH="amd64" ;; |
|
aarch64|arm64) ARCH="arm64" ;; |
|
*) error "Unsupported architecture: $ARCH" ;; |
|
esac |
|
|
|
IS_WSL2=false |
|
KERN="$(uname -r)" |
|
case "$KERN" in |
|
*icrosoft*WSL2|*icrosoft*wsl2) IS_WSL2=true ;; |
|
*icrosoft) error "Microsoft WSL1 is not supported. Please use WSL2: wsl --set-version <distro> 2" ;; |
|
*) ;; |
|
esac |
|
|
|
NEEDS="$(require curl awk grep sed tee xargs tar mktemp)" |
|
if [ -n "$NEEDS" ]; then |
|
status "The following tools are required but missing:" |
|
for NEED in $NEEDS; do |
|
echo " - $NEED" >&2 |
|
done |
|
exit 1 |
|
fi |
|
|
|
# Install locations (user-space) |
|
LOCAL_ROOT="${XDG_LOCAL_HOME:-"$HOME/.local"}" |
|
BINDIR="$LOCAL_ROOT/bin" |
|
LIBROOT="$LOCAL_ROOT/lib" |
|
OLLAMA_LIB_DIR="$LIBROOT/ollama" |
|
|
|
# Optional version pinning (same as official script) |
|
VER_PARAM="${OLLAMA_VERSION:+?version=$OLLAMA_VERSION}" |
|
|
|
TEMP_DIR="$(mktemp -d)" |
|
cleanup() { rm -rf "$TEMP_DIR"; } |
|
|
|
install_success() { |
|
status "The Ollama API is now available at 127.0.0.1:11434 (when 'ollama serve' is running)." |
|
status "Install complete." |
|
status "Binary: $BINDIR/ollama" |
|
if ! echo "$PATH" | grep -q "$BINDIR"; then |
|
warning "$BINDIR is not in PATH." |
|
warning "Add this to your shell rc (e.g. ~/.bashrc, ~/.zshrc):" |
|
echo " export PATH=\"$BINDIR:\$PATH\"" >&2 |
|
fi |
|
} |
|
trap 'cleanup; install_success' EXIT |
|
|
|
status "Installing to user-space:" |
|
status " - bin: $BINDIR" |
|
status " - lib: $OLLAMA_LIB_DIR" |
|
|
|
mkdir -p "$BINDIR" "$OLLAMA_LIB_DIR" |
|
|
|
# Clean old lib dir (safe user-space cleanup) |
|
if [ -d "$OLLAMA_LIB_DIR" ]; then |
|
status "Cleaning up old lib dir at $OLLAMA_LIB_DIR" |
|
rm -rf "$OLLAMA_LIB_DIR" |
|
mkdir -p "$OLLAMA_LIB_DIR" |
|
fi |
|
|
|
status "Downloading Linux ${ARCH} bundle" |
|
curl --fail --show-error --location --progress-bar \ |
|
"https://ollama.com/download/ollama-linux-${ARCH}.tgz${VER_PARAM}" | \ |
|
tar -xzf - -C "$LOCAL_ROOT" |
|
|
|
# The official tarball layout can differ across versions. |
|
# Prefer ~/.local/bin/ollama if it exists, else link ~/.local/ollama -> ~/.local/bin/ollama |
|
if [ -x "$LOCAL_ROOT/bin/ollama" ]; then |
|
: # already good |
|
elif [ -x "$LOCAL_ROOT/ollama" ]; then |
|
status "Linking $LOCAL_ROOT/ollama -> $BINDIR/ollama" |
|
ln -sf "$LOCAL_ROOT/ollama" "$BINDIR/ollama" |
|
else |
|
error "Could not find extracted ollama binary under $LOCAL_ROOT (expected bin/ollama or ./ollama)." |
|
fi |
|
|
|
# Jetson extra components (still user-space extract) |
|
if [ -f /etc/nv_tegra_release ] ; then |
|
if grep R36 /etc/nv_tegra_release >/dev/null 2>&1 ; then |
|
status "Downloading JetPack 6 components" |
|
curl --fail --show-error --location --progress-bar \ |
|
"https://ollama.com/download/ollama-linux-${ARCH}-jetpack6.tgz${VER_PARAM}" | \ |
|
tar -xzf - -C "$LOCAL_ROOT" |
|
elif grep R35 /etc/nv_tegra_release >/dev/null 2>&1 ; then |
|
status "Downloading JetPack 5 components" |
|
curl --fail --show-error --location --progress-bar \ |
|
"https://ollama.com/download/ollama-linux-${ARCH}-jetpack5.tgz${VER_PARAM}" | \ |
|
tar -xzf - -C "$LOCAL_ROOT" |
|
else |
|
warning "Unsupported JetPack version detected. GPU may not be supported." |
|
fi |
|
fi |
|
|
|
# Optional: user-level systemd service (no sudo) |
|
configure_systemd_user() { |
|
if ! available systemctl; then |
|
return 0 |
|
fi |
|
|
|
# systemd user manager may not be available in some contexts (e.g. minimal containers) |
|
if ! systemctl --user show-environment >/dev/null 2>&1; then |
|
warning "systemd user session not available; skipping user service install." |
|
return 0 |
|
fi |
|
|
|
UNIT_DIR="${XDG_CONFIG_HOME:-"$HOME/.config"}/systemd/user" |
|
mkdir -p "$UNIT_DIR" |
|
|
|
status "Creating systemd user service: $UNIT_DIR/ollama.service" |
|
cat <<EOF > "$UNIT_DIR/ollama.service" |
|
[Unit] |
|
Description=Ollama Service (user) |
|
After=network-online.target |
|
|
|
[Service] |
|
ExecStart=$BINDIR/ollama serve |
|
Restart=always |
|
RestartSec=3 |
|
Environment="PATH=$BINDIR:/usr/local/bin:/usr/bin:/bin" |
|
|
|
[Install] |
|
WantedBy=default.target |
|
EOF |
|
|
|
status "Enabling & starting user service..." |
|
systemctl --user daemon-reload |
|
systemctl --user enable --now ollama.service |
|
|
|
status "User service installed. Check status via:" |
|
echo " systemctl --user status ollama.service" >&2 |
|
} |
|
|
|
# Install user service if possible |
|
configure_systemd_user |
|
|
|
# WSL2 note (no driver install here) |
|
if [ "$IS_WSL2" = true ]; then |
|
if available nvidia-smi && [ -n "$(nvidia-smi | grep -o "CUDA Version: [0-9]*\.[0-9]*" || true)" ]; then |
|
status "WSL2 Nvidia GPU detected (nvidia-smi available)." |
|
else |
|
warning "WSL2: If you want GPU acceleration, ensure NVIDIA driver + WSL GPU support is set up." |
|
fi |
|
exit 0 |
|
fi |
|
|
|
# GPU detection (informational only; no root driver install) |
|
if available nvidia-smi; then |
|
status "NVIDIA GPU detected (nvidia-smi available)." |
|
else |
|
# Detect AMD/NVIDIA presence (best-effort) just to inform user |
|
if available lspci && lspci | grep -Eiq 'VGA|3D|Display'; then |
|
status "GPU devices found (lspci), but driver/tooling may be missing." |
|
fi |
|
warning "No NVIDIA driver tooling detected (nvidia-smi missing). Ollama will run CPU-only unless GPU drivers are already installed." |
|
fi |