|
#!/bin/bash |
|
|
|
set -e |
|
|
|
# Colors for output |
|
RED='\033[0;31m' |
|
GREEN='\033[0;32m' |
|
YELLOW='\033[1;33m' |
|
BLUE='\033[0;34m' |
|
NC='\033[0m' # No Color |
|
|
|
echo -e "${BLUE}========================================${NC}" |
|
echo -e "${BLUE}k6 + InfluxDB + Grafana Setup Script${NC}" |
|
echo -e "${BLUE}========================================${NC}" |
|
echo "" |
|
|
|
# Function to check if command exists |
|
command_exists() { |
|
command -v "$1" >/dev/null 2>&1 |
|
} |
|
|
|
# Check prerequisites |
|
echo -e "${YELLOW}Checking prerequisites...${NC}" |
|
|
|
if ! command_exists docker; then |
|
echo -e "${RED}Error: docker is not installed${NC}" |
|
exit 1 |
|
fi |
|
|
|
if ! command_exists docker-compose && ! docker compose version >/dev/null 2>&1; then |
|
echo -e "${RED}Error: docker-compose is not installed${NC}" |
|
exit 1 |
|
fi |
|
|
|
if ! command_exists python3; then |
|
echo -e "${RED}Error: python3 is not installed${NC}" |
|
exit 1 |
|
fi |
|
|
|
if ! command_exists git; then |
|
echo -e "${RED}Error: git is not installed${NC}" |
|
exit 1 |
|
fi |
|
|
|
if ! command_exists jq; then |
|
echo -e "${YELLOW}Warning: jq is not installed. Dashboard import may fail.${NC}" |
|
echo -e "${YELLOW}Install jq with: brew install jq (macOS) or apt-get install jq (Linux)${NC}" |
|
JQ_AVAILABLE=false |
|
else |
|
JQ_AVAILABLE=true |
|
fi |
|
|
|
echo -e "${GREEN}✓ All prerequisites met${NC}" |
|
echo "" |
|
|
|
# Get the script directory |
|
SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" |
|
|
|
# Prompt for JSON file |
|
echo -e "${YELLOW}Enter the path to your k6 JSON results file:${NC}" |
|
echo -e "${YELLOW}(You can use a filename, relative path, absolute path, or ~/path)${NC}" |
|
read -e -r JSON_FILE_INPUT |
|
|
|
# Sanitize input - remove any dangerous characters |
|
# Allow: alphanumeric, dash, underscore, dot, slash, tilde, space |
|
JSON_FILE_INPUT=$(echo "$JSON_FILE_INPUT" | sed 's/[^a-zA-Z0-9._/~[:space:]-]//g') |
|
|
|
# Trim leading/trailing whitespace |
|
JSON_FILE_INPUT=$(echo "$JSON_FILE_INPUT" | sed -e 's/^[[:space:]]*//' -e 's/[[:space:]]*$//') |
|
|
|
if [ -z "$JSON_FILE_INPUT" ]; then |
|
echo -e "${RED}Error: No file path provided${NC}" |
|
exit 1 |
|
fi |
|
|
|
# Expand tilde to home directory |
|
JSON_FILE_INPUT="${JSON_FILE_INPUT/#\~/$HOME}" |
|
|
|
# Try to find the file in multiple locations |
|
JSON_FILE="" |
|
|
|
# 1. Try as absolute or relative path from current directory |
|
if [ -f "$JSON_FILE_INPUT" ]; then |
|
JSON_FILE="$JSON_FILE_INPUT" |
|
echo -e "${GREEN}✓ Found JSON file in current directory${NC}" |
|
# 2. Try relative to script directory |
|
elif [ -f "$SCRIPT_DIR/$JSON_FILE_INPUT" ]; then |
|
JSON_FILE="$SCRIPT_DIR/$JSON_FILE_INPUT" |
|
echo -e "${GREEN}✓ Found JSON file in script directory${NC}" |
|
# 3. Check if it's just a filename and search for it in script directory |
|
elif [[ "$JSON_FILE_INPUT" != *"/"* ]]; then |
|
# It's just a filename, try to find it |
|
FOUND_FILES=$(find "$SCRIPT_DIR" -maxdepth 2 -type f -name "$JSON_FILE_INPUT" 2>/dev/null) |
|
FILE_COUNT=$(echo "$FOUND_FILES" | grep -c .) |
|
|
|
if [ -n "$FOUND_FILES" ] && [ "$FILE_COUNT" -eq 1 ]; then |
|
JSON_FILE="$FOUND_FILES" |
|
echo -e "${GREEN}✓ Found JSON file: $(basename "$JSON_FILE")${NC}" |
|
elif [ "$FILE_COUNT" -gt 1 ]; then |
|
echo -e "${RED}Error: Multiple files found matching '$JSON_FILE_INPUT':${NC}" |
|
echo "$FOUND_FILES" |
|
echo -e "${YELLOW}Please provide a more specific path${NC}" |
|
exit 1 |
|
fi |
|
fi |
|
|
|
# If still not found, show error with helpful message |
|
if [ -z "$JSON_FILE" ] || [ ! -f "$JSON_FILE" ]; then |
|
echo -e "${RED}Error: File not found: $JSON_FILE_INPUT${NC}" |
|
echo "" |
|
echo -e "${YELLOW}Searched in:${NC}" |
|
echo -e " • Current directory: $(pwd)" |
|
echo -e " • Script directory: $SCRIPT_DIR" |
|
echo "" |
|
echo -e "${YELLOW}Available JSON files in script directory:${NC}" |
|
find "$SCRIPT_DIR" -maxdepth 2 -type f -name "*.json" 2>/dev/null | head -10 | while read -r file; do |
|
echo -e " • $(basename "$file")" |
|
done |
|
exit 1 |
|
fi |
|
|
|
# Convert to absolute path |
|
JSON_FILE=$(cd "$(dirname "$JSON_FILE")" && pwd)/$(basename "$JSON_FILE") |
|
echo -e "${GREEN}✓ Using file: $JSON_FILE${NC}" |
|
echo "" |
|
|
|
# Ask for project directory |
|
echo -e "${YELLOW}Enter directory name for the setup (default: k6-grafana-stack):${NC}" |
|
read -r PROJECT_DIR |
|
PROJECT_DIR=${PROJECT_DIR:-k6-grafana-stack} |
|
|
|
# Check if directory exists and clean up any existing setup |
|
if [ -d "$PROJECT_DIR" ]; then |
|
echo -e "${YELLOW}Found existing setup directory. Cleaning up...${NC}" |
|
cd "$PROJECT_DIR" |
|
|
|
# Stop and remove any running containers |
|
if [ -f "docker-compose.yml" ]; then |
|
docker-compose down -v > /dev/null 2>&1 |
|
echo -e "${GREEN}✓ Stopped and removed existing containers${NC}" |
|
fi |
|
|
|
# Remove old converter directory if it exists |
|
if [ -d "k6-json-to-influxdb-line-protocol" ]; then |
|
rm -rf k6-json-to-influxdb-line-protocol |
|
fi |
|
|
|
cd .. |
|
echo -e "${GREEN}✓ Cleaned up existing setup${NC}" |
|
else |
|
echo -e "${YELLOW}Creating new setup directory...${NC}" |
|
fi |
|
|
|
# Create/recreate project directory |
|
mkdir -p "$PROJECT_DIR" |
|
cd "$PROJECT_DIR" |
|
echo -e "${GREEN}✓ Project directory ready: $PROJECT_DIR${NC}" |
|
echo "" |
|
|
|
# Create docker-compose.yml |
|
echo -e "${YELLOW}Creating docker-compose.yml...${NC}" |
|
cat > docker-compose.yml <<'EOF' |
|
services: |
|
influxdb: |
|
image: influxdb:1.8 |
|
container_name: k6-influxdb |
|
ports: |
|
- "8086:8086" |
|
environment: |
|
- INFLUXDB_DB=k6 |
|
- INFLUXDB_ADMIN_USER=admin |
|
- INFLUXDB_ADMIN_PASSWORD=admin |
|
- INFLUXDB_HTTP_AUTH_ENABLED=true |
|
volumes: |
|
- influxdb-data:/var/lib/influxdb |
|
networks: |
|
- k6-network |
|
deploy: |
|
resources: |
|
limits: |
|
cpus: '8.0' |
|
memory: 8G |
|
reservations: |
|
cpus: '4.0' |
|
memory: 4G |
|
|
|
grafana: |
|
image: grafana/grafana:latest |
|
container_name: k6-grafana |
|
ports: |
|
- "3000:3000" |
|
environment: |
|
- GF_SECURITY_ADMIN_USER=admin |
|
- GF_SECURITY_ADMIN_PASSWORD=admin |
|
- GF_AUTH_ANONYMOUS_ENABLED=true |
|
- GF_AUTH_ANONYMOUS_ORG_ROLE=Viewer |
|
volumes: |
|
- grafana-data:/var/lib/grafana |
|
depends_on: |
|
- influxdb |
|
networks: |
|
- k6-network |
|
deploy: |
|
resources: |
|
limits: |
|
cpus: '2.0' |
|
memory: 2G |
|
reservations: |
|
cpus: '0.5' |
|
memory: 512M |
|
|
|
volumes: |
|
influxdb-data: |
|
grafana-data: |
|
|
|
networks: |
|
k6-network: |
|
driver: bridge |
|
EOF |
|
|
|
echo -e "${GREEN}✓ docker-compose.yml created${NC}" |
|
echo "" |
|
|
|
# Start Docker containers |
|
echo -e "${YELLOW}Starting Docker containers...${NC}" |
|
docker-compose up -d |
|
|
|
echo -e "${GREEN}✓ Containers started${NC}" |
|
echo "" |
|
|
|
# Wait for services to be ready |
|
echo -e "${YELLOW}Waiting for services to be ready...${NC}" |
|
sleep 10 |
|
|
|
# Wait for InfluxDB |
|
echo -n "Waiting for InfluxDB" |
|
INFLUX_READY=false |
|
for i in {1..30}; do |
|
if curl -s --max-time 2 http://localhost:8086/ping > /dev/null 2>&1; then |
|
echo -e " ${GREEN}✓${NC}" |
|
INFLUX_READY=true |
|
break |
|
fi |
|
echo -n "." |
|
sleep 2 |
|
done |
|
|
|
if [ "$INFLUX_READY" = false ]; then |
|
echo -e " ${RED}✗${NC}" |
|
echo -e "${RED}Error: InfluxDB did not start properly${NC}" |
|
echo -e "${YELLOW}Check logs with: cd $PROJECT_DIR && docker-compose logs influxdb${NC}" |
|
exit 1 |
|
fi |
|
|
|
# Wait for Grafana |
|
echo -n "Waiting for Grafana" |
|
GRAFANA_READY=false |
|
for i in {1..60}; do |
|
if curl -s --max-time 2 http://localhost:3000/api/health > /dev/null 2>&1; then |
|
echo -e " ${GREEN}✓${NC}" |
|
GRAFANA_READY=true |
|
break |
|
fi |
|
echo -n "." |
|
sleep 2 |
|
|
|
# Check if we've been waiting too long |
|
if [ $i -eq 30 ]; then |
|
echo "" |
|
echo -e "${YELLOW}⚠ Grafana is taking longer than expected to start...${NC}" |
|
echo -n "Still waiting" |
|
fi |
|
done |
|
|
|
if [ "$GRAFANA_READY" = false ]; then |
|
echo -e " ${RED}✗${NC}" |
|
echo -e "${RED}Error: Grafana did not start properly${NC}" |
|
echo -e "${YELLOW}Check logs with: cd $PROJECT_DIR && docker-compose logs grafana${NC}" |
|
exit 1 |
|
fi |
|
|
|
echo "" |
|
|
|
# Configure Grafana data source via API |
|
echo -e "${YELLOW}Configuring InfluxDB data source in Grafana...${NC}" |
|
|
|
# Wait a bit more for Grafana to fully initialize |
|
sleep 5 |
|
|
|
# Try to configure data source with timeout and error capture |
|
DS_RESPONSE=$(curl -s --max-time 10 -w "\n%{http_code}" -X POST \ |
|
-H "Content-Type: application/json" \ |
|
-d '{ |
|
"name": "InfluxDB-k6", |
|
"type": "influxdb", |
|
"url": "http://influxdb:8086", |
|
"access": "proxy", |
|
"database": "k6", |
|
"basicAuth": false, |
|
"isDefault": true, |
|
"jsonData": { |
|
"httpMode": "GET" |
|
}, |
|
"secureJsonData": { |
|
"password": "admin" |
|
}, |
|
"user": "admin" |
|
}' \ |
|
http://admin:admin@localhost:3000/api/datasources 2>&1) |
|
|
|
HTTP_CODE=$(echo "$DS_RESPONSE" | tail -n1) |
|
DS_BODY=$(echo "$DS_RESPONSE" | head -n-1) |
|
|
|
if [ "$HTTP_CODE" = "200" ] || [ "$HTTP_CODE" = "201" ]; then |
|
echo -e "${GREEN}✓ Data source configured${NC}" |
|
elif [ "$HTTP_CODE" = "409" ]; then |
|
echo -e "${YELLOW}⚠ Data source already exists (this is fine)${NC}" |
|
else |
|
echo -e "${YELLOW}⚠ Data source configuration returned HTTP $HTTP_CODE${NC}" |
|
echo -e "${YELLOW} You may need to manually configure it in Grafana${NC}" |
|
if [ -n "$DS_BODY" ]; then |
|
echo -e "${YELLOW} Response: $DS_BODY${NC}" |
|
fi |
|
fi |
|
echo "" |
|
|
|
# Import k6 Grafana dashboard (14801 - modern version) |
|
echo -e "${YELLOW}Importing K6 Dashboard (ID: 14801)...${NC}" |
|
|
|
if [ "$JQ_AVAILABLE" = true ]; then |
|
# Download dashboard JSON |
|
if curl -s --max-time 10 https://grafana.com/api/dashboards/14801/revisions/1/download -o k6-dashboard.json 2>&1; then |
|
# Check if file was downloaded |
|
if [ -s k6-dashboard.json ]; then |
|
# Create proper import payload using jq |
|
# Dashboard expects DS_DUMMY as the input name |
|
IMPORT_PAYLOAD=$(jq -n --argjson dashboard "$(cat k6-dashboard.json)" '{ |
|
dashboard: $dashboard, |
|
overwrite: true, |
|
inputs: [ |
|
{ |
|
name: "DS_DUMMY", |
|
type: "datasource", |
|
pluginId: "influxdb", |
|
value: "InfluxDB-k6" |
|
} |
|
] |
|
}' 2>&1) |
|
|
|
if [ $? -eq 0 ]; then |
|
# Import dashboard via API |
|
IMPORT_RESPONSE=$(curl -s --max-time 10 -w "\n%{http_code}" -X POST \ |
|
-H "Content-Type: application/json" \ |
|
-d "$IMPORT_PAYLOAD" \ |
|
http://admin:admin@localhost:3000/api/dashboards/import 2>&1) |
|
|
|
IMPORT_CODE=$(echo "$IMPORT_RESPONSE" | tail -n1) |
|
IMPORT_BODY=$(echo "$IMPORT_RESPONSE" | head -n-1) |
|
|
|
if [ "$IMPORT_CODE" = "200" ] || [ "$IMPORT_CODE" = "201" ]; then |
|
echo -e "${GREEN}✓ K6 Dashboard imported${NC}" |
|
# Extract dashboard URL if available |
|
DASH_URL=$(echo "$IMPORT_BODY" | jq -r '.importedUrl // empty' 2>/dev/null) |
|
if [ -n "$DASH_URL" ]; then |
|
echo -e "${GREEN} Dashboard URL: http://localhost:3000${DASH_URL}${NC}" |
|
fi |
|
elif [ "$IMPORT_CODE" = "412" ]; then |
|
echo -e "${YELLOW}⚠ Dashboard already exists (this is fine)${NC}" |
|
else |
|
echo -e "${YELLOW}⚠ Dashboard import returned HTTP $IMPORT_CODE${NC}" |
|
if echo "$IMPORT_BODY" | grep -q "name-exists"; then |
|
echo -e "${YELLOW} Dashboard may already exist - continuing${NC}" |
|
else |
|
echo -e "${YELLOW} You can manually import dashboard ID 14801 from Grafana${NC}" |
|
fi |
|
fi |
|
else |
|
echo -e "${YELLOW}⚠ Failed to create import payload${NC}" |
|
echo -e "${YELLOW} You can manually import dashboard ID 14801 from Grafana${NC}" |
|
fi |
|
else |
|
echo -e "${YELLOW}⚠ Failed to download dashboard${NC}" |
|
echo -e "${YELLOW} You can manually import dashboard ID 14801 from Grafana${NC}" |
|
fi |
|
else |
|
echo -e "${YELLOW}⚠ Failed to download dashboard from Grafana.com${NC}" |
|
echo -e "${YELLOW} You can manually import dashboard ID 14801 from Grafana${NC}" |
|
fi |
|
else |
|
echo -e "${YELLOW}⚠ Skipping automatic dashboard import (jq not installed)${NC}" |
|
echo -e "${YELLOW} Manually import dashboard: Go to Grafana → Dashboards → Import → Use ID: 14801${NC}" |
|
fi |
|
echo "" |
|
|
|
# Clone the k6-json-to-influxdb converter |
|
echo -e "${YELLOW}Setting up JSON to InfluxDB converter...${NC}" |
|
|
|
if [ ! -d "k6-json-to-influxdb-line-protocol" ]; then |
|
if git clone https://github.com/yooap/k6-json-to-influxdb-line-protocol.git > /dev/null 2>&1; then |
|
echo -e "${GREEN}✓ Converter cloned${NC}" |
|
else |
|
echo -e "${RED}✗ Failed to clone converter repository${NC}" |
|
echo -e "${YELLOW}Check your internet connection or try again later${NC}" |
|
exit 1 |
|
fi |
|
else |
|
echo -e "${GREEN}✓ Converter already exists${NC}" |
|
fi |
|
|
|
cd k6-json-to-influxdb-line-protocol |
|
|
|
# Copy optimized converters if available |
|
if [ -f "$SCRIPT_DIR/convert_and_write_to_db_multithreaded.py" ]; then |
|
cp "$SCRIPT_DIR/convert_and_write_to_db_multithreaded.py" . |
|
chmod +x convert_and_write_to_db_multithreaded.py |
|
echo -e "${GREEN}✓ Multi-threaded converter installed${NC}" |
|
elif [ -f "$SCRIPT_DIR/convert_and_write_to_db_streaming.py" ]; then |
|
cp "$SCRIPT_DIR/convert_and_write_to_db_streaming.py" . |
|
chmod +x convert_and_write_to_db_streaming.py |
|
echo -e "${GREEN}✓ Streaming converter installed${NC}" |
|
fi |
|
|
|
# Create and activate virtual environment |
|
echo -e "${YELLOW}Creating Python virtual environment...${NC}" |
|
if python3 -m venv venv 2>&1; then |
|
echo -e "${GREEN}✓ Virtual environment created${NC}" |
|
else |
|
echo -e "${RED}✗ Failed to create virtual environment${NC}" |
|
cd .. |
|
exit 1 |
|
fi |
|
|
|
# Activate virtual environment |
|
source venv/bin/activate |
|
|
|
# Install Python dependencies |
|
echo -e "${YELLOW}Installing Python dependencies in venv...${NC}" |
|
if [ -f "requirements.txt" ]; then |
|
if python3 -m pip install -q -r requirements.txt 2>&1; then |
|
echo -e "${GREEN}✓ Dependencies installed${NC}" |
|
else |
|
echo -e "${YELLOW}⚠ Some dependencies may have failed to install${NC}" |
|
fi |
|
else |
|
# Install manually if requirements.txt doesn't exist |
|
if python3 -m pip install -q influxdb 2>&1; then |
|
echo -e "${GREEN}✓ Dependencies installed (influxdb)${NC}" |
|
else |
|
echo -e "${RED}✗ Failed to install influxdb package${NC}" |
|
deactivate |
|
cd .. |
|
exit 1 |
|
fi |
|
fi |
|
echo "" |
|
|
|
# Import JSON data to InfluxDB using optimized converter |
|
echo -e "${YELLOW}Importing k6 JSON data to InfluxDB...${NC}" |
|
|
|
# Get file size for progress indication |
|
FILE_SIZE=$(ls -lh "$JSON_FILE" | awk '{print $5}') |
|
FILE_SIZE_BYTES=$(stat -f%z "$JSON_FILE" 2>/dev/null || stat -c%s "$JSON_FILE" 2>/dev/null) |
|
FILE_SIZE_MB=$(echo "scale=1; $FILE_SIZE_BYTES / 1024 / 1024" | bc 2>/dev/null || echo "?") |
|
|
|
echo -e "${BLUE}File size: ${FILE_SIZE} (${FILE_SIZE_MB} MB)${NC}" |
|
|
|
# Select best converter |
|
if [ -f "convert_and_write_to_db_multithreaded.py" ]; then |
|
CONVERTER_SCRIPT="convert_and_write_to_db_multithreaded.py" |
|
# Use 100k batch size - sweet spot between performance and HTTP limits |
|
CONVERTER_ARGS="--verbose --workers=4 --batch-size=100000" |
|
echo -e "${BLUE}Using multi-threaded import (4 workers, 100k batch size)${NC}" |
|
elif [ -f "convert_and_write_to_db_streaming.py" ]; then |
|
CONVERTER_SCRIPT="convert_and_write_to_db_streaming.py" |
|
CONVERTER_ARGS="--verbose" |
|
echo -e "${BLUE}Using streaming import${NC}" |
|
else |
|
CONVERTER_SCRIPT="convert_and_write_to_db.py" |
|
CONVERTER_ARGS="" |
|
echo -e "${BLUE}Using standard import${NC}" |
|
fi |
|
echo "" |
|
|
|
# Run the import (in venv) with live progress output |
|
if python3 $CONVERTER_SCRIPT "$JSON_FILE" \ |
|
--host=localhost \ |
|
--port=8086 \ |
|
--username=admin \ |
|
--password=admin \ |
|
--db=k6 \ |
|
$CONVERTER_ARGS 2>&1; then |
|
echo "" |
|
echo -e "${GREEN}✓ JSON data imported successfully${NC}" |
|
|
|
# Get final count |
|
FINAL_COUNT=$(docker exec k6-influxdb influx -username admin -password admin -database k6 -execute "SELECT COUNT(*) FROM http_req_duration" 2>/dev/null | tail -1 | awk '{print $2}') |
|
if [ -n "$FINAL_COUNT" ] && [ "$FINAL_COUNT" != "count_value" ]; then |
|
echo -e "${GREEN} Total requests in database: ${FINAL_COUNT}${NC}" |
|
fi |
|
else |
|
IMPORT_EXIT_CODE=$? |
|
echo "" |
|
echo -e "${RED}✗ Failed to import JSON data (exit code: $IMPORT_EXIT_CODE)${NC}" |
|
echo -e "${YELLOW}Possible issues:${NC}" |
|
echo -e "${YELLOW} • JSON file may not be in k6 NDJSON format${NC}" |
|
echo -e "${YELLOW} • InfluxDB may not be ready${NC}" |
|
echo -e "${YELLOW} • Check the error messages above${NC}" |
|
echo "" |
|
echo -e "${YELLOW}Try checking InfluxDB logs:${NC}" |
|
echo -e "${YELLOW} cd .. && docker-compose logs influxdb${NC}" |
|
deactivate |
|
cd .. |
|
exit 1 |
|
fi |
|
echo "" |
|
|
|
# Deactivate venv |
|
deactivate |
|
|
|
cd .. |
|
|
|
# Final instructions |
|
echo -e "${GREEN}========================================${NC}" |
|
echo -e "${GREEN}Setup Complete! 🎉${NC}" |
|
echo -e "${GREEN}========================================${NC}" |
|
echo "" |
|
echo -e "${BLUE}Services:${NC}" |
|
echo -e " • Grafana: ${GREEN}http://localhost:3000${NC} (admin/admin)" |
|
echo -e " • InfluxDB: ${GREEN}http://localhost:8086${NC} (admin/admin)" |
|
echo "" |
|
echo -e "${BLUE}Next Steps:${NC}" |
|
echo -e " 1. Grafana opened automatically with the K6 Dashboard" |
|
echo -e " 2. Time range is preset to match your test data" |
|
echo -e " 3. View your test results!" |
|
echo "" |
|
echo -e "${YELLOW}Important:${NC}" |
|
echo -e " • Time range is automatically calculated from your test data" |
|
echo -e " • If some panels show 'No data', try adjusting the time range slightly" |
|
echo -e " • Dashboard data source is configured as 'InfluxDB-k6'" |
|
echo -e " • Dashboard: K6 Dashboard (ID: 14801) - Modern, community-maintained" |
|
echo "" |
|
echo -e "${BLUE}To stop the services:${NC}" |
|
echo -e " cd $(pwd) && docker-compose down" |
|
echo "" |
|
echo -e "${BLUE}To restart the services:${NC}" |
|
echo -e " cd $(pwd) && docker-compose up -d" |
|
echo "" |
|
echo -e "${BLUE}To remove everything (including data):${NC}" |
|
echo -e " cd $(pwd) && docker-compose down -v" |
|
echo "" |
|
|
|
# Extract time range from JSON file |
|
echo -e "${YELLOW}Calculating time range from test data...${NC}" |
|
|
|
# Get first and last timestamps from JSON |
|
FIRST_TIME=$(grep -o '"time":"[^"]*"' "$JSON_FILE" | head -1 | sed 's/"time":"//;s/"//') |
|
LAST_TIME=$(grep -o '"time":"[^"]*"' "$JSON_FILE" | tail -1 | sed 's/"time":"//;s/"//') |
|
|
|
if [ -n "$FIRST_TIME" ] && [ -n "$LAST_TIME" ]; then |
|
# Convert to milliseconds for Grafana URL (with 5 minute buffer before/after) |
|
FROM_MS=$(python3 -c " |
|
import datetime |
|
dt = datetime.datetime.fromisoformat('$FIRST_TIME'.replace('Z', '+00:00')) |
|
# Subtract 5 minutes for buffer |
|
dt = dt - datetime.timedelta(minutes=5) |
|
print(int(dt.timestamp() * 1000)) |
|
" 2>/dev/null) |
|
|
|
TO_MS=$(python3 -c " |
|
import datetime |
|
dt = datetime.datetime.fromisoformat('$LAST_TIME'.replace('Z', '+00:00')) |
|
# Add 5 minutes for buffer |
|
dt = dt + datetime.timedelta(minutes=5) |
|
print(int(dt.timestamp() * 1000)) |
|
" 2>/dev/null) |
|
|
|
if [ -n "$FROM_MS" ] && [ -n "$TO_MS" ]; then |
|
echo -e "${GREEN}✓ Time range detected: $FIRST_TIME to $LAST_TIME${NC}" |
|
DASHBOARD_URL="http://localhost:3000/d/9lcthCWnk/k6-dashboard?from=${FROM_MS}&to=${TO_MS}" |
|
else |
|
echo -e "${YELLOW}⚠ Could not calculate time range, using default${NC}" |
|
DASHBOARD_URL="http://localhost:3000" |
|
fi |
|
else |
|
echo -e "${YELLOW}⚠ Could not extract timestamps from JSON file${NC}" |
|
DASHBOARD_URL="http://localhost:3000" |
|
fi |
|
|
|
# Open Grafana in browser |
|
echo -e "${YELLOW}Opening Grafana dashboard...${NC}" |
|
sleep 2 |
|
|
|
# Detect OS and open browser |
|
if [[ "$OSTYPE" == "darwin"* ]]; then |
|
# macOS |
|
open "$DASHBOARD_URL" |
|
elif [[ "$OSTYPE" == "linux-gnu"* ]]; then |
|
# Linux |
|
if command_exists xdg-open; then |
|
xdg-open "$DASHBOARD_URL" 2>/dev/null |
|
elif command_exists gnome-open; then |
|
gnome-open "$DASHBOARD_URL" 2>/dev/null |
|
else |
|
echo -e "${YELLOW}⚠ Could not auto-open browser. Please manually navigate to ${DASHBOARD_URL}${NC}" |
|
fi |
|
elif [[ "$OSTYPE" == "msys" || "$OSTYPE" == "cygwin" ]]; then |
|
# Windows (Git Bash or Cygwin) |
|
start "$DASHBOARD_URL" |
|
else |
|
echo -e "${YELLOW}⚠ Could not auto-open browser. Please manually navigate to ${DASHBOARD_URL}${NC}" |
|
fi |
|
|
|
echo -e "${GREEN}✓ Browser opened with time range preset${NC}" |
|
echo "" |