Skip to content

Instantly share code, notes, and snippets.

Show Gist options
  • Select an option

  • Save WalczRobert/63dbfb3946e9bdb517878c32d60c14e2 to your computer and use it in GitHub Desktop.

Select an option

Save WalczRobert/63dbfb3946e9bdb517878c32d60c14e2 to your computer and use it in GitHub Desktop.
MyMiniFactory Model Downloader
#!/bin/bash
# MyMiniFactory Model Metadata Downloader
# Downloads JSON metadata for a list of model IDs from MyMiniFactory API
# This is STEP 1 - run this first to get model metadata, then use the STL downloader
#
# Prerequisites:
# 1. Create model_ids.txt with one model ID per line (no commas, no spaces)
# 2. Valid MyMiniFactory session cookie
# 3. Models must be owned/accessible by your account
#
# Usage:
# 1. Update COOKIE variable below with your session cookie
# 2. Ensure model_ids.txt has clean line endings (Unix format)
# 3. Run: bash download_metadata.sh
# Colors for output
RED='\033[0;31m'
GREEN='\033[0;32m'
BLUE='\033[0;34m'
NC='\033[0m' # No Color
# UPDATE THIS: Get your cookie from browser developer tools (F12 -> Network -> Copy Cookie header)
COOKIE='REPLACE_WITH_YOUR_ACTUAL_COOKIE_STRING'
# Check if model_ids.txt exists
if [[ ! -f "model_ids.txt" ]]; then
echo -e "${RED}Error: model_ids.txt not found!${NC}"
echo "Create a file with one model ID per line, like:"
echo "409352"
echo "409348"
echo "496377"
exit 1
fi
# Create downloads directory
mkdir -p downloads
cd downloads || exit
# Fix Windows line endings if present (common issue)
if grep -q $'\r' ../model_ids.txt; then
echo -e "${BLUE}Fixing Windows line endings in model_ids.txt...${NC}"
sed -i 's/\r$//' ../model_ids.txt
fi
# Count total models
total=$(wc -l < ../model_ids.txt)
current=0
echo -e "${BLUE}Starting download of $total model metadata files...${NC}"
echo "JSON files will be saved in the 'downloads' directory"
echo "Rate limited to 20 requests per minute (3 second delay between requests)"
echo ""
# Read each ID and download metadata
while read -r id; do
# Skip empty lines
[[ -z "$id" ]] && continue
current=$((current + 1))
echo -e "${BLUE}[$current/$total] Downloading metadata for model $id...${NC}"
# Make the API request to get model metadata
curl --silent \
-H "User-Agent: Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:142.0) Gecko/20100101 Firefox/142.0" \
-H "Accept: application/json" \
-H "Accept-Language: en-US,en;q=0.5" \
-H "Accept-Encoding: gzip, deflate, br, zstd" \
-H "Referer: https://www.myminifactory.com/api-doc/index.html" \
-H "Connection: keep-alive" \
-H "Cookie: $COOKIE" \
-H "Sec-Fetch-Dest: empty" \
-H "Sec-Fetch-Mode: cors" \
-H "Sec-Fetch-Site: same-origin" \
-H "Priority: u=0" \
--compressed \
"https://www.myminifactory.com/api/v2/objects/$id" \
-o "model_${id}.json"
# Check if download was successful
if [[ -f "model_${id}.json" ]] && [[ -s "model_${id}.json" ]]; then
echo -e "${GREEN}✓ Successfully downloaded metadata for model $id${NC}"
else
echo -e "${RED}✗ Failed to download metadata for model $id${NC}"
# Remove empty/failed file
rm -f "model_${id}.json"
fi
# Rate limiting: sleep for 3 seconds (20 requests per minute)
if [[ $current -lt $total ]]; then
sleep 3
fi
done < ../model_ids.txt
echo ""
echo -e "${GREEN}Metadata download complete!${NC}"
echo "Downloaded files are in the 'downloads' directory"
echo "Total JSON files: $(ls -1 model_*.json 2>/dev/null | wc -l)"
echo ""
echo -e "${BLUE}Next step: Use the STL downloader script to get actual 3D files${NC}"
# Common Issues and Solutions:
#
# 1. "Failed to download" - Check cookie expiration, get fresh cookie from browser
# 2. All downloads fail - Cookie expired or malformed, copy fresh cookie from developer tools
# 3. Some models fail - Model might be private, deleted, or require different permissions
# 4. "model_ids.txt not found" - Create file with one model ID per line
# 5. Windows line ending issues - Script automatically fixes these
#
# Tips:
# - Copy cookie from browser: F12 -> Network tab -> find any request -> Copy Cookie header
# - Cookies expire frequently (30-60 minutes), refresh as needed
# - Model IDs are numbers like 409352, not full URLs
# - One ID per line in model_ids.txt, no commas or extra formatting
# - This downloads metadata only, use STL downloader script for actual files
#!/bin/bash
# MyMiniFactory Bulk STL/ZIP File Downloader
# Downloads actual 3D printable files from a list of model IDs
# This is STEP 2 - run AFTER the metadata downloader creates JSON files
#
# Prerequisites:
# 1. JSON metadata files from Step 1 (model_*.json files)
# 2. Valid MyMiniFactory session cookie
# 3. jq installed (JSON parser) - download from https://github.com/stedolan/jq/releases
#
# Usage:
# 1. Run metadata downloader first (Step 1)
# 2. Update COOKIE variable below with your session cookie
# 3. Place jq or jq.exe in same directory
# 4. Run: bash download_stl_files.sh
# Colors for output
RED='\033[0;31m'
GREEN='\033[0;32m'
BLUE='\033[0;34m'
YELLOW='\033[1;33m'
NC='\033[0m' # No Color
# UPDATE THIS: Get your cookie from browser developer tools (F12 -> Network -> Copy Cookie header)
COOKIE='REPLACE_WITH_YOUR_ACTUAL_COOKIE_STRING'
# Check if we're in the correct directory (should contain JSON files)
if ! ls model_*.json 1> /dev/null 2>&1; then
echo -e "${RED}Error: No model JSON files found. Run the metadata downloader first (Step 1).${NC}"
exit 1
fi
# Check if jq is available
JQ_CMD="jq"
if ! command -v jq &> /dev/null; then
if [[ -f "./jq.exe" ]]; then
JQ_CMD="../jq.exe"
elif [[ -f "./jq" ]]; then
JQ_CMD="../jq"
else
echo -e "${RED}Error: jq not found. Download from https://github.com/stedolan/jq/releases${NC}"
echo "Place jq or jq.exe in the same directory as this script"
exit 1
fi
fi
# Create STL downloads directory
mkdir -p stl_files
cd stl_files || exit
# Count JSON files
json_count=$(find .. -name "model_*.json" -type f 2>/dev/null | wc -l)
current_file=0
total_downloads=0
successful_downloads=0
echo -e "${BLUE}Found $json_count JSON files to process${NC}"
echo -e "${BLUE}Extracting download URLs and downloading STL/ZIP files...${NC}"
echo -e "${YELLOW}This will take a while - respecting rate limits${NC}"
echo ""
# Process each JSON file
for json_file in ../model_*.json; do
current_file=$((current_file + 1))
model_id=$(basename "$json_file" | sed 's/model_//; s/.json//')
echo -e "${BLUE}[$current_file/$json_count] Processing model $model_id...${NC}"
# Extract download URLs and filenames using jq
download_data=$($JQ_CMD -r '.files.items[] | "\(.filename)|\(.download_url)"' "$json_file" 2>/dev/null)
if [[ -z "$download_data" ]]; then
echo -e "${RED} ✗ No download URLs found in $json_file${NC}"
continue
fi
# Create directory for this model
model_dir="model_${model_id}"
mkdir -p "$model_dir"
# Download each file
while IFS='|' read -r filename download_url; do
if [[ -n "$filename" && -n "$download_url" ]]; then
# Clean carriage returns and whitespace (Windows line ending fix)
filename=$(echo "$filename" | tr -d '\r' | xargs)
download_url=$(echo "$download_url" | tr -d '\r' | xargs)
total_downloads=$((total_downloads + 1))
output_file="${model_dir}/${filename}"
echo -e " ${YELLOW}Downloading: $filename${NC}"
# Download with redirect following and proper headers
curl --silent \
-L \
-H "User-Agent: Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:142.0) Gecko/20100101 Firefox/142.0" \
-H "Accept: application/octet-stream" \
-H "Cookie: $COOKIE" \
--compressed \
"$download_url" \
-o "$output_file"
# Check if download was successful
if [[ -f "$output_file" ]] && [[ -s "$output_file" ]]; then
file_size=$(stat -c%s "$output_file" 2>/dev/null || stat -f%z "$output_file" 2>/dev/null || echo "unknown")
echo -e " ${GREEN}✓ Downloaded $filename (${file_size} bytes)${NC}"
successful_downloads=$((successful_downloads + 1))
else
echo -e " ${RED}✗ Failed to download $filename${NC}"
# Remove empty/failed file
rm -f "$output_file"
fi
# Rate limiting: sleep for 2 seconds between downloads
sleep 2
fi
done <<< "$download_data"
echo ""
done
echo -e "${GREEN}Download complete!${NC}"
echo -e "${GREEN}Successfully downloaded: $successful_downloads/$total_downloads files${NC}"
echo -e "${BLUE}Files are organized in the 'stl_files' directory by model ID${NC}"
echo ""
echo "Directory structure:"
find . -name "model_*" -type d 2>/dev/null | head -10
# Common Issues and Solutions:
#
# 1. "No download URLs found" - Verify JSON files exist and contain file data
# 2. "Failed to download" with valid URLs - Cookie expired, get fresh cookie
# 3. "URL malformed" errors - Windows line ending issue (script handles automatically)
# 4. "jq not found" - Download jq.exe and place in same directory as script
# 5. All downloads redirect to login - Cookie expired or invalid
# 6. Only photos download, not STL files - Missing -L flag for redirects (fixed in script)
#
# Tips:
# - Cookies expire frequently (30-60 min), refresh if downloads start failing
# - Script follows redirects automatically with -L flag
# - Rate limited to 2 seconds between downloads (respectful to servers)
# - Large STL files take longer, be patient
# - Windows line endings are automatically cleaned from URLs
# - Each model gets its own subdirectory with all associated files
# MyMiniFactory - Mass ZIP Extraction Script
# Extracts all ZIP files in all model subdirectories
# WINDOWS ONLY - Requires PowerShell 5.0+
#
# This is OPTIONAL Step 3 - run after downloading STL/ZIP files
#
# Prerequisites:
# - Completed Steps 1 & 2 (metadata and STL downloads)
# - PowerShell execution policy allows scripts
# - Close any Windows Explorer windows viewing the files
#
# Usage:
# 1. Update $BASE_PATH variable below to point to your stl_files directory
# 2. Choose extraction mode (in-place or separate folders)
# 3. Run: .\extract_all_zips.ps1
# ============================================================================
# CONFIGURATION - UPDATE THIS PATH TO YOUR stl_files DIRECTORY
# ============================================================================
# Example Windows paths:
# $BASE_PATH = "C:\Users\YourName\Downloads\MyMiniFactory\downloads\stl_files"
# $BASE_PATH = "D:\3D Printing\MMF Downloads\downloads\stl_files"
$BASE_PATH = "PATH\TO\YOUR\downloads\stl_files"
# Choose extraction mode:
# $EXTRACT_IN_PLACE = $true -> Extract directly into model folders alongside zips
# $EXTRACT_IN_PLACE = $false -> Create separate "_extracted" subdirectories
$EXTRACT_IN_PLACE = $true
# ============================================================================
Write-Host "MyMiniFactory ZIP Extraction Tool" -ForegroundColor Cyan
Write-Host "=================================" -ForegroundColor Cyan
Write-Host ""
# Validate base path exists
if (-not (Test-Path $BASE_PATH)) {
Write-Host "ERROR: Base path does not exist: $BASE_PATH" -ForegroundColor Red
Write-Host "Please update the `$BASE_PATH variable in this script to point to your stl_files directory" -ForegroundColor Yellow
exit 1
}
# Navigate to base directory
Set-Location $BASE_PATH
# Find all ZIP files recursively
$zipFiles = Get-ChildItem -Recurse -Filter "*.zip"
$totalFiles = $zipFiles.Count
if ($totalFiles -eq 0) {
Write-Host "No ZIP files found in $BASE_PATH" -ForegroundColor Yellow
exit 0
}
Write-Host "Found $totalFiles ZIP files to extract" -ForegroundColor Green
Write-Host "Extraction mode: $(if ($EXTRACT_IN_PLACE) { 'In-place' } else { 'Separate folders' })" -ForegroundColor Cyan
Write-Host ""
$current = 0
$successful = 0
$failed = 0
foreach ($zip in $zipFiles) {
$current++
$percentComplete = [math]::Round(($current / $totalFiles) * 100, 1)
Write-Host "[$current/$totalFiles - $percentComplete%] Extracting: $($zip.Name)" -ForegroundColor Cyan
try {
if ($EXTRACT_IN_PLACE) {
# Extract directly into the same directory as the ZIP
$destination = $zip.DirectoryName
} else {
# Create a separate extraction folder
$extractFolderName = $zip.BaseName + "_extracted"
$destination = Join-Path $zip.DirectoryName $extractFolderName
# Create extraction directory if it doesn't exist
if (-not (Test-Path $destination)) {
New-Item -ItemType Directory -Path $destination -Force | Out-Null
}
}
# Extract the archive
Expand-Archive -Path $zip.FullName -DestinationPath $destination -Force -ErrorAction Stop
Write-Host " ✓ Extracted to: $destination" -ForegroundColor Green
$successful++
} catch {
Write-Host " ✗ Failed: $($_.Exception.Message)" -ForegroundColor Red
$failed++
}
}
Write-Host ""
Write-Host "Extraction Complete!" -ForegroundColor Green
Write-Host "===================" -ForegroundColor Green
Write-Host "Successfully extracted: $successful files" -ForegroundColor Green
if ($failed -gt 0) {
Write-Host "Failed: $failed files" -ForegroundColor Red
}
Write-Host ""
# Optionally show summary of what was extracted
Write-Host "Sample of extracted directories:" -ForegroundColor Cyan
Get-ChildItem -Directory | Select-Object -First 5 | ForEach-Object {
$fileCount = (Get-ChildItem $_.FullName -Recurse -File | Measure-Object).Count
Write-Host " $($_.Name): $fileCount files" -ForegroundColor Gray
}
# Common Issues and Solutions:
#
# 1. "Base path does not exist" - Update $BASE_PATH to your actual stl_files directory path
# 2. "Access denied" or extraction fails - Close Windows Explorer windows viewing those folders
# 3. Script won't run - Run: Set-ExecutionPolicy -ExecutionPolicy RemoteSigned -Scope CurrentUser
# 4. Slow extraction - Large files (100MB+) take time, this is normal
# 5. Out of disk space - Check free space, each model can be 200MB+ extracted
#
# Tips:
# - Use in-place extraction ($EXTRACT_IN_PLACE = $true) to keep things organized
# - Close all Explorer windows before running to avoid file lock issues
# - Extraction can take 10-30 minutes depending on file count and sizes
# - You can delete the ZIP files after successful extraction to save space
# - STL files are typically in the extracted folders, ready for slicing software
# MyMiniFactory - Intelligent Folder Renaming Script
# Renames model_XXXXXX folders to include actual model names from JSON metadata
# WINDOWS ONLY - Requires PowerShell 5.0+
#
# This is OPTIONAL Step 4 - run after downloading files and optionally extracting
#
# Prerequisites:
# - Completed Steps 1 & 2 (must have JSON metadata files)
# - model_XXXXXX folders exist in stl_files directory
# - Close any Windows Explorer windows viewing these folders
#
# Usage:
# 1. Update $JSON_PATH and $FOLDERS_PATH variables below
# 2. Customize naming format if desired
# 3. Run: .\rename_folders_from_json.ps1
# ============================================================================
# CONFIGURATION - UPDATE THESE PATHS TO YOUR DIRECTORY STRUCTURE
# ============================================================================
# Path to directory containing model_*.json files (usually 'downloads')
# Example: "C:\Users\YourName\Downloads\MyMiniFactory\downloads"
$JSON_PATH = "PATH\TO\YOUR\downloads"
# Path to directory containing model_XXXXXX folders to rename (usually 'downloads\stl_files')
# Example: "C:\Users\YourName\Downloads\MyMiniFactory\downloads\stl_files"
$FOLDERS_PATH = "PATH\TO\YOUR\downloads\stl_files"
# Naming format options:
# Format: How to construct the new folder name
# Options: "ID_NAME" (e.g., 409352_Crystal_Clusters) or "NAME_ONLY" (e.g., Crystal_Clusters)
$NAMING_FORMAT = "ID_NAME"
# Maximum name length (to avoid Windows path limits)
$MAX_NAME_LENGTH = 80
# ============================================================================
Write-Host "MyMiniFactory Folder Renaming Tool" -ForegroundColor Cyan
Write-Host "===================================" -ForegroundColor Cyan
Write-Host ""
# Validate paths exist
if (-not (Test-Path $JSON_PATH)) {
Write-Host "ERROR: JSON path does not exist: $JSON_PATH" -ForegroundColor Red
Write-Host "Update `$JSON_PATH to point to your 'downloads' directory containing model_*.json files" -ForegroundColor Yellow
exit 1
}
if (-not (Test-Path $FOLDERS_PATH)) {
Write-Host "ERROR: Folders path does not exist: $FOLDERS_PATH" -ForegroundColor Red
Write-Host "Update `$FOLDERS_PATH to point to your 'stl_files' directory containing model_* folders" -ForegroundColor Yellow
exit 1
}
Write-Host "Configuration:" -ForegroundColor Cyan
Write-Host " JSON files location: $JSON_PATH" -ForegroundColor Gray
Write-Host " Folders location: $FOLDERS_PATH" -ForegroundColor Gray
Write-Host " Naming format: $NAMING_FORMAT" -ForegroundColor Gray
Write-Host " Max name length: $MAX_NAME_LENGTH characters" -ForegroundColor Gray
Write-Host ""
$renamed = 0
$failed = 0
$skipped = 0
# Get all JSON files
$jsonFiles = Get-ChildItem -Path $JSON_PATH -Filter "model_*.json"
$totalFiles = $jsonFiles.Count
if ($totalFiles -eq 0) {
Write-Host "ERROR: No model_*.json files found in $JSON_PATH" -ForegroundColor Red
Write-Host "Make sure you've run the metadata downloader (Step 1) first" -ForegroundColor Yellow
exit 1
}
Write-Host "Found $totalFiles JSON files to process" -ForegroundColor Green
Write-Host ""
$current = 0
foreach ($jsonFile in $jsonFiles) {
$current++
$modelId = $jsonFile.BaseName -replace "model_", ""
try {
# Read and parse JSON
$json = Get-Content $jsonFile.FullName -Raw | ConvertFrom-Json
$modelName = $json.name
if (-not $modelName) {
Write-Host "[$current/$totalFiles] Model $modelId - No name found in JSON, skipping" -ForegroundColor Yellow
$skipped++
continue
}
# Clean up the name for use as folder name
# Remove invalid Windows filename characters: < > : " / \ | ? *
$cleanName = $modelName -replace '[<>:"/\\|?*]', '_'
# Replace multiple spaces with single space, then spaces with underscores
$cleanName = $cleanName -replace '\s+', ' ' -replace ' ', '_'
# Replace multiple underscores with single underscore
$cleanName = $cleanName -replace '_+', '_'
# Remove leading/trailing underscores
$cleanName = $cleanName.Trim('_')
# Limit length to avoid Windows path issues
if ($cleanName.Length -gt $MAX_NAME_LENGTH) {
$cleanName = $cleanName.Substring(0, $MAX_NAME_LENGTH).Trim('_')
}
# Construct new folder name based on format preference
if ($NAMING_FORMAT -eq "ID_NAME") {
$newFolderName = "${modelId}_${cleanName}"
} else {
$newFolderName = $cleanName
}
$oldFolder = Join-Path $FOLDERS_PATH "model_$modelId"
$newFolder = Join-Path $FOLDERS_PATH $newFolderName
# Check if source folder exists
if (-not (Test-Path $oldFolder)) {
Write-Host "[$current/$totalFiles] Model $modelId - Folder not found, skipping" -ForegroundColor Yellow
$skipped++
continue
}
# Check if target already exists (avoid conflicts)
if (Test-Path $newFolder) {
Write-Host "[$current/$totalFiles] Model $modelId - Target folder already exists: $newFolderName" -ForegroundColor Yellow
$skipped++
continue
}
# Attempt rename
Write-Host "[$current/$totalFiles] Renaming: model_$modelId → $newFolderName" -ForegroundColor Cyan
Rename-Item -Path $oldFolder -NewName $newFolderName -ErrorAction Stop
Write-Host " ✓ Success" -ForegroundColor Green
$renamed++
} catch {
Write-Host "[$current/$totalFiles] Model $modelId - ERROR: $($_.Exception.Message)" -ForegroundColor Red
$failed++
# Provide specific help for common errors
if ($_.Exception.Message -like "*path or device name*") {
Write-Host " → TIP: Close any Windows Explorer windows viewing this folder and try again" -ForegroundColor Yellow
}
}
}
Write-Host ""
Write-Host "Renaming Complete!" -ForegroundColor Green
Write-Host "==================" -ForegroundColor Green
Write-Host "Successfully renamed: $renamed folders" -ForegroundColor Green
if ($skipped -gt 0) {
Write-Host "Skipped: $skipped folders (missing, no name, or conflicts)" -ForegroundColor Yellow
}
if ($failed -gt 0) {
Write-Host "Failed: $failed folders (see errors above)" -ForegroundColor Red
}
Write-Host ""
# Show sample of renamed folders
Write-Host "Sample of renamed folders:" -ForegroundColor Cyan
Get-ChildItem -Path $FOLDERS_PATH -Directory |
Where-Object { $_.Name -notlike "model_*" } |
Select-Object -First 10 |
ForEach-Object {
Write-Host " $($_.Name)" -ForegroundColor Gray
}
# Common Issues and Solutions:
#
# 1. "JSON path does not exist" - Update $JSON_PATH to your 'downloads' directory path
# 2. "Folders path does not exist" - Update $FOLDERS_PATH to your 'stl_files' directory path
# 3. "Cannot rename... path or device name" - Close Windows Explorer windows, wait a moment, try again
# 4. "Target folder already exists" - Another folder has the same name, script skips to avoid conflicts
# 5. "No JSON files found" - Make sure you ran the metadata downloader (Step 1) first
# 6. All folders skipped - Check that JSON files and model folders are in the correct paths
#
# Tips:
# - Close ALL Windows Explorer windows before running to avoid lock issues
# - Use ID_NAME format (default) to keep unique identifiers and avoid name conflicts
# - NAME_ONLY format is cleaner but may have duplicates if model names aren't unique
# - Script is safe - it never overwrites existing folders
# - You can run multiple times; already-renamed folders will be skipped
# - Names are cleaned automatically: special characters → underscores, length limited
# - Useful for browsing your library without needing to look up model IDs

README.md - Full workflow explanation 1_mmf_download_metadata.sh - Get JSON metadata 2_mmf_download_stl_files.sh - Get actual files

Key features both scripts have:

✅ Windows line ending fixes (automatic) ✅ Cookie authentication handling ✅ Rate limiting to be respectful to servers ✅ Clear error messages and progress tracking ✅ Comprehensive troubleshooting documentation

Script 2 specifically includes:

✅ Redirect following with -L flag (critical fix) ✅ URL cleaning to handle Windows corruption ✅ jq path detection for cross-platform compatibility

BONUS EXTRACTION AND RENAMING SCRIPTS FOR WINDOWS

3_extract_all_zips.ps1 - [BONUS] Windows mass extraction 4_rename_folders_from_json.ps1 - [BONUS] Intelligent renaming

What Users Need to Update:
Extract Script (extract_all_zips.ps1):

$BASE_PATH - Path to their stl_files directory
$EXTRACT_IN_PLACE - Choose extraction mode (true/false)

Rename Script (rename_folders_from_json.ps1):

$JSON_PATH - Path to directory with JSON files (usually downloads)
$FOLDERS_PATH - Path to stl_files directory with model folders
$NAMING_FORMAT - Choose "ID_NAME" or "NAME_ONLY" style
$MAX_NAME_LENGTH - Customize if needed

Both scripts include: ✅ Clear "UPDATE THIS" sections at the top with example paths ✅ Path validation with helpful error messages if wrong ✅ All the fixes we discovered (Explorer locks, filename cleaning, etc.) ✅ Progress tracking and summary statistics ✅ Comprehensive troubleshooting sections based on our debugging ✅ Safe operation - won't overwrite or damage existing data

New Features: ✅ Cookie Validation - Checks for PHPSESSID and cf_clearance before starting ✅ Test Mode - Run with --test flag to verify setup with one file first ✅ HTML Error Detection - Catches "enable Javascript" pages automatically ✅ Consecutive Failure Protection - Stops after 3 failures in a row (systematic error) ✅ Better Error Messages - Shows actual error content and troubleshooting steps ✅ Detailed Cookie Instructions - Step-by-step guide in the script header ✅ Early Exit - Won't download 157 error pages before noticing something's wrong

Key Improvements:

Test mode will catch the issue immediately:

bash mmf_download_stl_files_enhanced.sh --test

This downloads ONE file and validates it before proceeding.

  1. Cookie validation checks for cf_clearance - The "enable Javascript" error is almost always missing this Cloudflare token
  2. Shows the actual error page content so they can see what MyMiniFactory is returning
  3. Stops after 3 consecutive failures instead of downloading 157 HTML error pages

Replace your script with this enhanced version Run [bash mmf_download_stl_files_enhanced.sh --test] first If test fails, follow the error message instructions to get a fresh cookie Make sure cookie is from a download request, not just browsing the site.

#!/bin/bash
# MyMiniFactory Bulk STL/ZIP File Downloader (Enhanced Edition)
# Downloads actual 3D printable files from a list of model IDs
# This is STEP 2 - run AFTER the metadata downloader creates JSON files
#
# NEW in Enhanced Edition:
# - Cookie validation before starting
# - Automatic HTML error page detection
# - Test mode to verify setup before bulk download
# - Better error messages with troubleshooting steps
# - Stops on systematic errors (e.g., all downloads failing)
#
# Prerequisites:
# 1. JSON metadata files from Step 1 (model_*.json files)
# 2. Valid MyMiniFactory session cookie
# 3. jq installed (JSON parser) - download from https://github.com/stedolan/jq/releases
#
# Usage:
# 1. Run metadata downloader first (Step 1)
# 2. Update COOKIE variable below with your session cookie
# 3. Place jq or jq.exe in same directory
# 4. Run in test mode first: bash download_stl_files.sh --test
# 5. If test passes, run full: bash download_stl_files.sh
# Colors for output
RED='\033[0;31m'
GREEN='\033[0;32m'
BLUE='\033[0;34m'
YELLOW='\033[1;33m'
CYAN='\033[0;36m'
NC='\033[0m' # No Color
# ============================================================================
# CONFIGURATION
# ============================================================================
# UPDATE THIS: Get your cookie from browser developer tools
#
# HOW TO GET YOUR COOKIE (IMPORTANT - READ CAREFULLY):
# 1. Open MyMiniFactory in your browser and log in
# 2. Navigate to any model you own
# 3. Click download button for any file (Resin, FDM, etc.)
# 4. Open Developer Tools (F12)
# 5. Go to Network tab
# 6. Find the request to "myminifactory.com/download/XXXXX?archive_id=XXXXX"
# 7. Click on that request
# 8. Scroll to "Request Headers" section
# 9. Find the "Cookie:" header
# 10. Copy ONLY the value (everything after "Cookie: ")
# 11. Paste below between the single quotes
#
# Your cookie MUST include these parts:
# - PHPSESSID=...
# - cf_clearance=... (Cloudflare token - CRITICAL)
# - Various _ga and _pk tracking cookies
#
# If you're missing cf_clearance, you'll get "enable Javascript" errors
COOKIE='REPLACE_WITH_YOUR_ACTUAL_COOKIE_STRING'
# ============================================================================
# Configuration
TEST_MODE=false
MAX_CONSECUTIVE_FAILURES=3 # Stop if this many downloads fail in a row
# Parse command line arguments
if [[ "$1" == "--test" ]]; then
TEST_MODE=true
fi
# Function to validate cookie format
validate_cookie() {
if [[ "$COOKIE" == "REPLACE_WITH_YOUR_ACTUAL_COOKIE_STRING" ]]; then
echo -e "${RED}ERROR: Cookie not configured!${NC}"
echo "Please update the COOKIE variable in this script with your actual session cookie."
echo "See the HOW TO GET YOUR COOKIE instructions in the script."
return 1
fi
if [[ ! "$COOKIE" =~ PHPSESSID ]]; then
echo -e "${YELLOW}WARNING: Cookie missing PHPSESSID - this may not work${NC}"
fi
if [[ ! "$COOKIE" =~ cf_clearance ]]; then
echo -e "${YELLOW}WARNING: Cookie missing cf_clearance (Cloudflare token)${NC}"
echo "This is the most common cause of 'enable Javascript' errors."
echo "Make sure you copied the cookie from a DOWNLOAD request, not a page view."
return 1
fi
echo -e "${GREEN}✓ Cookie format looks valid${NC}"
return 0
}
# Function to check if downloaded file is an HTML error page
is_html_error() {
local file="$1"
if [[ ! -f "$file" ]] || [[ ! -s "$file" ]]; then
return 1
fi
# Check if file starts with HTML tags or common error messages
if head -20 "$file" | grep -qi "<!DOCTYPE\|<html\|enable javascript\|cloudflare"; then
return 0
fi
return 1
}
# Function to display error page content
show_error_content() {
local file="$1"
echo -e "${CYAN}Error page content (first 20 lines):${NC}"
head -20 "$file" | sed 's/^/ /'
}
echo -e "${CYAN}╔════════════════════════════════════════════════════════╗${NC}"
echo -e "${CYAN}║ MyMiniFactory STL Downloader - Enhanced Edition ║${NC}"
echo -e "${CYAN}╚════════════════════════════════════════════════════════╝${NC}"
echo ""
# Validate cookie before doing anything else
echo -e "${BLUE}Validating cookie configuration...${NC}"
if ! validate_cookie; then
exit 1
fi
echo ""
# Check if we're in the correct directory (should contain JSON files)
if ! ls model_*.json 1> /dev/null 2>&1; then
echo -e "${RED}Error: No model JSON files found. Run the metadata downloader first (Step 1).${NC}"
exit 1
fi
# Check if jq is available
JQ_CMD="jq"
if ! command -v jq &> /dev/null; then
if [[ -f "./jq.exe" ]]; then
JQ_CMD="../jq.exe"
elif [[ -f "./jq" ]]; then
JQ_CMD="../jq"
else
echo -e "${RED}Error: jq not found. Download from https://github.com/stedolan/jq/releases${NC}"
echo "Place jq or jq.exe in the same directory as this script"
exit 1
fi
fi
# Create STL downloads directory
mkdir -p stl_files
cd stl_files || exit
# Count JSON files
json_count=$(find .. -name "model_*.json" -type f 2>/dev/null | wc -l)
if [[ $json_count -eq 0 ]]; then
echo -e "${RED}Error: No JSON files found${NC}"
exit 1
fi
echo -e "${BLUE}Found $json_count JSON files to process${NC}"
# TEST MODE - Download just one file to verify everything works
if [[ "$TEST_MODE" == true ]]; then
echo -e "${YELLOW}═══════════════════════════════════════${NC}"
echo -e "${YELLOW} RUNNING IN TEST MODE${NC}"
echo -e "${YELLOW}═══════════════════════════════════════${NC}"
echo "Testing with first available model to verify cookie and setup..."
echo ""
# Find first JSON file with download URLs
for json_file in ../model_*.json; do
model_id=$(basename "$json_file" | sed 's/model_//; s/.json//')
download_data=$($JQ_CMD -r '.files.items[] | "\(.filename)|\(.download_url)"' "$json_file" 2>/dev/null)
if [[ -n "$download_data" ]]; then
echo -e "${BLUE}Testing with model $model_id${NC}"
# Get first file from this model
filename=$(echo "$download_data" | head -1 | cut -d'|' -f1 | tr -d '\r' | xargs)
download_url=$(echo "$download_data" | head -1 | cut -d'|' -f2 | tr -d '\r' | xargs)
echo -e " Downloading: ${CYAN}$filename${NC}"
test_file="test_download_${model_id}.tmp"
curl --silent -L \
-H "User-Agent: Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:142.0) Gecko/20100101 Firefox/142.0" \
-H "Accept: application/octet-stream" \
-H "Cookie: $COOKIE" \
--compressed \
"$download_url" \
-o "$test_file"
echo ""
if is_html_error "$test_file"; then
echo -e "${RED}✗ TEST FAILED${NC}"
echo -e "${RED}Downloaded file is an HTML error page, not the actual file${NC}"
echo ""
show_error_content "$test_file"
echo ""
echo -e "${YELLOW}Common causes:${NC}"
echo " 1. Cookie expired - get a fresh cookie from browser"
echo " 2. Missing cf_clearance token - copy cookie from download request, not page view"
echo " 3. Not logged in - make sure you're logged into MyMiniFactory in browser"
echo " 4. Cookie formatting error - check for extra quotes or special characters"
echo ""
echo -e "${CYAN}How to get a fresh cookie:${NC}"
echo " 1. Open MyMiniFactory in browser, log in"
echo " 2. Download any file from any model"
echo " 3. F12 → Network → Find 'download' request"
echo " 4. Copy the Cookie header value"
echo " 5. Paste into script (no extra quotes)"
rm -f "$test_file"
exit 1
else
file_size=$(stat -c%s "$test_file" 2>/dev/null || stat -f%z "$test_file" 2>/dev/null || echo "unknown")
echo -e "${GREEN}✓ TEST PASSED${NC}"
echo -e " Successfully downloaded ${CYAN}$filename${NC} (${file_size} bytes)"
echo " File appears to be valid (not an error page)"
echo ""
echo -e "${GREEN}Cookie is working! You can now run the full download:${NC}"
echo " bash $(basename "$0")"
rm -f "$test_file"
exit 0
fi
fi
done
echo -e "${RED}No models with download URLs found for testing${NC}"
exit 1
fi
# FULL DOWNLOAD MODE
echo -e "${BLUE}Extracting download URLs and downloading STL/ZIP files...${NC}"
echo -e "${YELLOW}This will take a while - respecting rate limits${NC}"
echo -e "${YELLOW}Press Ctrl+C to stop at any time${NC}"
echo ""
current_file=0
total_downloads=0
successful_downloads=0
consecutive_failures=0
# Process each JSON file
for json_file in ../model_*.json; do
current_file=$((current_file + 1))
model_id=$(basename "$json_file" | sed 's/model_//; s/.json//')
echo -e "${BLUE}[$current_file/$json_count] Processing model $model_id...${NC}"
# Extract download URLs and filenames using jq
download_data=$($JQ_CMD -r '.files.items[] | "\(.filename)|\(.download_url)"' "$json_file" 2>/dev/null)
if [[ -z "$download_data" ]]; then
echo -e "${RED} ✗ No download URLs found in $json_file${NC}"
continue
fi
# Create directory for this model
model_dir="model_${model_id}"
mkdir -p "$model_dir"
# Download each file
while IFS='|' read -r filename download_url; do
if [[ -n "$filename" && -n "$download_url" ]]; then
# Clean carriage returns and whitespace (Windows line ending fix)
filename=$(echo "$filename" | tr -d '\r' | xargs)
download_url=$(echo "$download_url" | tr -d '\r' | xargs)
total_downloads=$((total_downloads + 1))
output_file="${model_dir}/${filename}"
echo -e " ${YELLOW}Downloading: $filename${NC}"
# Download with redirect following and proper headers
curl --silent -L \
-H "User-Agent: Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:142.0) Gecko/20100101 Firefox/142.0" \
-H "Accept: application/octet-stream" \
-H "Cookie: $COOKIE" \
--compressed \
"$download_url" \
-o "$output_file"
# Check if download was successful
if [[ -f "$output_file" ]] && [[ -s "$output_file" ]]; then
# Check if we got an HTML error page instead of actual file
if is_html_error "$output_file"; then
echo -e " ${RED}✗ Failed: Downloaded HTML error page instead of file${NC}"
consecutive_failures=$((consecutive_failures + 1))
# Show error content for first failure
if [[ $consecutive_failures -eq 1 ]]; then
show_error_content "$output_file"
fi
rm -f "$output_file"
# Stop if too many consecutive failures
if [[ $consecutive_failures -ge $MAX_CONSECUTIVE_FAILURES ]]; then
echo ""
echo -e "${RED}═══════════════════════════════════════════════════════${NC}"
echo -e "${RED}STOPPING: $MAX_CONSECUTIVE_FAILURES consecutive failures detected${NC}"
echo -e "${RED}═══════════════════════════════════════════════════════${NC}"
echo ""
echo -e "${YELLOW}This indicates a systematic problem (not random failures)${NC}"
echo ""
echo -e "${CYAN}Most likely causes:${NC}"
echo " 1. Cookie expired - get fresh cookie from browser"
echo " 2. Missing cf_clearance token in cookie"
echo " 3. Session logged out - log back into MyMiniFactory"
echo " 4. Account permissions issue"
echo ""
echo -e "${CYAN}To fix:${NC}"
echo " 1. Log into MyMiniFactory in your browser"
echo " 2. Download a file manually to verify access"
echo " 3. Copy fresh cookie from that download request (F12 → Network)"
echo " 4. Update COOKIE variable in script"
echo " 5. Run in test mode first: bash $(basename "$0") --test"
echo ""
exit 1
fi
else
file_size=$(stat -c%s "$output_file" 2>/dev/null || stat -f%z "$output_file" 2>/dev/null || echo "unknown")
echo -e " ${GREEN}✓ Downloaded $filename (${file_size} bytes)${NC}"
successful_downloads=$((successful_downloads + 1))
consecutive_failures=0 # Reset on success
fi
else
echo -e " ${RED}✗ Failed to download $filename${NC}"
consecutive_failures=$((consecutive_failures + 1))
rm -f "$output_file"
# Stop if too many consecutive failures
if [[ $consecutive_failures -ge $MAX_CONSECUTIVE_FAILURES ]]; then
echo ""
echo -e "${RED}STOPPING: Too many consecutive failures${NC}"
echo "Check your internet connection and cookie validity"
exit 1
fi
fi
# Rate limiting: sleep for 2 seconds between downloads
sleep 2
fi
done <<< "$download_data"
echo ""
done
echo -e "${GREEN}═══════════════════════════════════════${NC}"
echo -e "${GREEN} Download Complete!${NC}"
echo -e "${GREEN}═══════════════════════════════════════${NC}"
echo -e "${GREEN}Successfully downloaded: $successful_downloads/$total_downloads files${NC}"
if [[ $((total_downloads - successful_downloads)) -gt 0 ]]; then
echo -e "${YELLOW}Failed: $((total_downloads - successful_downloads)) files${NC}"
fi
echo -e "${BLUE}Files are organized in the 'stl_files' directory by model ID${NC}"
echo ""
# Show sample of downloaded content
echo "Sample of downloaded directories:"
find . -name "model_*" -type d 2>/dev/null | head -5 | while read -r dir; do
file_count=$(find "$dir" -type f | wc -l)
echo " $dir: $file_count files"
done
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment