Skip to content

Instantly share code, notes, and snippets.

@binjamil
Last active August 6, 2025 05:23
Show Gist options
  • Select an option

  • Save binjamil/5920d011becd0439f71d3596a1716581 to your computer and use it in GitHub Desktop.

Select an option

Save binjamil/5920d011becd0439f71d3596a1716581 to your computer and use it in GitHub Desktop.
#!/bin/bash
# Exit on error
set -e
# Colors for output
RED='\033[0;31m'
GREEN='\033[0;32m'
YELLOW='\033[1;33m'
NC='\033[0m' # No Color
# Logging function
log() {
echo -e "${GREEN}[$(date '+%Y-%m-%d %H:%M:%S')]${NC} $1"
}
error() {
echo -e "${RED}[$(date '+%Y-%m-%d %H:%M:%S')] ERROR:${NC} $1" >&2
}
# Start setup
log "Starting Ollama setup on EC2..."
# Install Ollama
log "Installing Ollama..."
curl -fsSL https://ollama.com/install.sh | sh
# Create models directory
log "Creating models directory..."
sudo mkdir -p /opt/dlami/nvme/ollama-models
sudo chown -R ollama:ollama /opt/dlami/nvme/ollama-models
# Configure Ollama service
log "Configuring Ollama service..."
sudo mkdir -p /etc/systemd/system/ollama.service.d
sudo tee /etc/systemd/system/ollama.service.d/override.conf > /dev/null <<EOF
[Service]
Environment="OLLAMA_HOST=0.0.0.0"
Environment="OLLAMA_MODELS=/opt/dlami/nvme/ollama-models"
EOF
# Reload and restart service
log "Restarting Ollama service..."
sudo systemctl daemon-reload
sudo systemctl restart ollama
# Wait for service to be ready
log "Waiting for Ollama service to be ready..."
sleep 5
# Check if Ollama is running
if ! systemctl is-active --quiet ollama; then
error "Ollama service failed to start"
exit 1
fi
# Pull the model
log "Pulling models (this may take a while)..."
ollama pull gpt-oss:20b
PRIVATE_IP=$(hostname -I | awk '{print $1}')
sudo apt install btop
# Display results
echo ""
echo -e "${GREEN}========================================${NC}"
echo -e "${GREEN}Ollama Setup Complete!${NC}"
echo -e "${GREEN}========================================${NC}"
echo ""
echo -e "${YELLOW}OpenAI Base URL:${NC} http://${PRIVATE_IP}:11434/v1"
echo ""
echo -e "${GREEN}You can now use this base URL with OpenAI-compatible clients${NC}"
echo -e "${GREEN}Example: export OPENAI_BASE_URL=\"http://${PRIVATE_IP}:11434/v1\"${NC}"
echo ""
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment