|
#!/usr/bin/env bash |
|
set -euo pipefail |
|
|
|
############################################# |
|
### LOAD CONFIG IF EXISTS (.env) |
|
############################################# |
|
|
|
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" |
|
ENV_FILE="$SCRIPT_DIR/.env" |
|
|
|
[[ -f "$ENV_FILE" ]] && source "$ENV_FILE" |
|
|
|
############################################# |
|
### DEFAULTS (used if .env missing) |
|
############################################# |
|
|
|
SERVER_URL="${EDB_SERVER_URL:-http://localhost:8080/exist}" |
|
USER="${EDB_USER:-admin}" |
|
PASS="${EDB_PASS:-password}" |
|
EXIST_COLLECTION="${EDB_COLLECTION:-/db/apps/sympa}" |
|
LOCAL_DIR="${EDB_LOCAL_DIR:-./sympa}" |
|
|
|
REST_BASE="$SERVER_URL/rest" |
|
|
|
### XAR defaults (NEW) |
|
XAR_NAME="${EDB_XAR_NAME:-$(basename "$EXIST_COLLECTION")}" |
|
XAR_DIST_DIR="${EDB_XAR_DIST_DIR:-$SCRIPT_DIR/dist}" |
|
|
|
### BACKUP defaults |
|
BACKUP_DIR="${EDB_BACKUP_DIR:-$SCRIPT_DIR/backups}" |
|
BACKUP_KEEP="${EDB_BACKUP_KEEP:-10}" # numero massimo di backup da tenere |
|
|
|
|
|
############################################# |
|
### HELP |
|
############################################# |
|
|
|
usage() { |
|
echo "eXist-DB CLI" |
|
echo |
|
echo "Usage:" |
|
echo " ./edb.sh init create .env config" |
|
echo " ./edb.sh edit edit .env" |
|
echo " ./edb.sh export export from eXist β local" |
|
echo " ./edb.sh import import from local β eXist (overwrite)" |
|
echo " ./edb.sh watch watch local dir and auto-import changes" |
|
echo " ./edb.sh build-xar build XAR package from local dir" |
|
echo " ./edb.sh backup backup remote collection to local backups dir" |
|
|
|
echo |
|
} |
|
|
|
############################################# |
|
### INIT CONFIG FILE |
|
############################################# |
|
|
|
do_init() { |
|
if [[ -f "$ENV_FILE" ]]; then |
|
echo ".env already exists β remove it if you want to regenerate." |
|
exit 1 |
|
fi |
|
|
|
echo 'EDB_SERVER_URL="http://localhost:8080/exist"' > "$ENV_FILE" |
|
echo 'EDB_USER="admin"' >> "$ENV_FILE" |
|
echo 'EDB_PASS="password"' >> "$ENV_FILE" |
|
echo 'EDB_COLLECTION="/db/apps/sympa"' >> "$ENV_FILE" |
|
echo 'EDB_LOCAL_DIR="./sympa"' >> "$ENV_FILE" |
|
echo '' >> "$ENV_FILE" |
|
echo '# Optional XAR settings:' >> "$ENV_FILE" |
|
echo '# EDB_XAR_NAME="sympa"' >> "$ENV_FILE" |
|
echo '# EDB_XAR_DIST_DIR="./dist"' >> "$ENV_FILE" |
|
echo '# EDB_XAR_VERSION="1.0.0"' >> "$ENV_FILE" |
|
echo '' >> "$ENV_FILE" |
|
echo '# Optional backup settings:' >> "$ENV_FILE" |
|
echo '# EDB_BACKUP_DIR="./backups"' >> "$ENV_FILE" |
|
echo '# EDB_BACKUP_KEEP=10' >> "$ENV_FILE" |
|
|
|
echo "Created .env β adjust it, then run:" |
|
echo " ./edb.sh export or ./edb.sh import" |
|
} |
|
|
|
|
|
############################################# |
|
### EDIT ENV |
|
############################################# |
|
|
|
do_edit() { |
|
[[ ! -f "$ENV_FILE" ]] && echo "Run ./edb.sh init first." && exit 1 |
|
${EDITOR:-nano} "$ENV_FILE" |
|
} |
|
|
|
############################################# |
|
### SMALL URL ENCODER (spaces β %20) |
|
############################################# |
|
|
|
url_encode() { |
|
local s="$1" |
|
s="${s// /%20}" |
|
echo "$s" |
|
} |
|
|
|
############################################# |
|
### EXPORT β via REST, recursive, no weird paths |
|
############################################# |
|
|
|
do_export() { |
|
|
|
echo "π₯ Exporting via REST" |
|
echo "Root: $EXIST_COLLECTION" |
|
echo "Output: $LOCAL_DIR" |
|
|
|
mkdir -p "$LOCAL_DIR" |
|
|
|
ROOT="$EXIST_COLLECTION" |
|
ROOT="${ROOT%/}" |
|
|
|
crawl() { |
|
local dbpath="$1" |
|
|
|
# relative path based on ROOT |
|
local rel="${dbpath#$ROOT}" |
|
rel="${rel#/}" |
|
[[ -z "$rel" ]] && rel="." |
|
|
|
mkdir -p "$LOCAL_DIR/$rel" |
|
|
|
curl -s -u "$USER:$PASS" "$REST_BASE$dbpath" > /tmp/exist_listing.xml |
|
|
|
# ---- FILES ---- |
|
sed -n 's/.*resource name="\([^"]*\)".*/\1/p' /tmp/exist_listing.xml | |
|
while read -r name; do |
|
[[ -z "$name" ]] && continue |
|
# skip directory entries |
|
if grep -q "collection name=\"$name\"" /tmp/exist_listing.xml; then |
|
continue |
|
fi |
|
|
|
echo "β¬ $rel/$name" |
|
curl -s -u "$USER:$PASS" \ |
|
"$REST_BASE$dbpath/$name" \ |
|
-o "$LOCAL_DIR/$rel/$name" |
|
done |
|
|
|
# ---- DIRECTORIES (with anti-loop guard) ---- |
|
sed -n 's/.*collection name="\([^"]*\)".*/\1/p' /tmp/exist_listing.xml | |
|
while read -r dir; do |
|
[[ -z "$dir" ]] && continue |
|
local next="$dbpath/$dir" |
|
|
|
# if ROOT appears more than once β infinite recursion, stop |
|
if [[ "$(grep -o "$ROOT" <<< "$next" | wc -l)" -gt 1 ]]; then |
|
echo "β STOP LOOP β $next" |
|
continue |
|
fi |
|
|
|
echo "π $rel/$dir" |
|
crawl "$next" |
|
done |
|
} |
|
|
|
crawl "$ROOT" |
|
|
|
echo "β EXPORT COMPLETE β $(realpath "$LOCAL_DIR")" |
|
} |
|
|
|
############################################# |
|
### IMPORT β from local dir β eXist (mirror paths) |
|
############################################# |
|
|
|
do_import() { |
|
|
|
echo "π€ Importing to eXist" |
|
echo "Source: $LOCAL_DIR" |
|
echo "Target: $EXIST_COLLECTION" |
|
|
|
[[ ! -d "$LOCAL_DIR" ]] && echo "Local dir not found: $LOCAL_DIR" && exit 1 |
|
|
|
echo "π Auto-backup enabled β exporting remote collection before import..." |
|
do_backup |
|
echo "π Backup done. Proceeding with import." |
|
|
|
# ---- CREATE COLLECTIONS ---- |
|
echo "π Creating collections in eXist..." |
|
|
|
find "$LOCAL_DIR" -type d | while read -r dir; do |
|
# relative path under LOCAL_DIR |
|
local rel="${dir#$LOCAL_DIR}" |
|
rel="${rel#/}" # remove leading / |
|
|
|
local dbpath="$EXIST_COLLECTION" |
|
if [[ -n "$rel" ]]; then |
|
dbpath="$dbpath/$rel" |
|
fi |
|
|
|
local enc_path |
|
enc_path=$(url_encode "$dbpath") |
|
|
|
# MKCOL (ignore errors if already exists) |
|
curl -s -u "$USER:$PASS" -X MKCOL "$REST_BASE$enc_path" >/dev/null 2>&1 || true |
|
done |
|
|
|
# ---- UPLOAD FILES ---- |
|
echo "β¬ Uploading files (overwrite enabled)..." |
|
|
|
find "$LOCAL_DIR" -type f \ |
|
! -name '.DS_Store' \ |
|
! -path '*/.git/*' \ |
|
! -path '*/.idea/*' \ |
|
| while read -r file; do |
|
|
|
local rel="${file#$LOCAL_DIR}" |
|
rel="${rel#/}" |
|
|
|
local enc_rel |
|
enc_rel=$(url_encode "$rel") |
|
|
|
local dest="$REST_BASE$EXIST_COLLECTION/$enc_rel" |
|
|
|
echo "PUT $rel" |
|
curl -s -u "$USER:$PASS" -X PUT -T "$file" "$dest" >/dev/null |
|
done |
|
|
|
echo "β IMPORT COMPLETE to $EXIST_COLLECTION" |
|
} |
|
|
|
############################################# |
|
### BUILD XAR β package LOCAL_DIR into .xar |
|
############################################# |
|
|
|
do_build_xar() { |
|
|
|
echo "π¦ Building XAR package" |
|
echo "Source dir: $LOCAL_DIR" |
|
echo "App name: $XAR_NAME" |
|
echo "Output dir: $XAR_DIST_DIR" |
|
|
|
[[ ! -d "$LOCAL_DIR" ]] && echo "Local dir not found: $LOCAL_DIR" && exit 1 |
|
|
|
if ! command -v zip >/dev/null 2>&1; then |
|
echo "β 'zip' command not found. Install it first (es: 'sudo apt-get install zip' o 'brew install zip')." |
|
exit 1 |
|
fi |
|
|
|
# crea la cartella di build se non esiste |
|
mkdir -p "$XAR_DIST_DIR" |
|
|
|
# normalizza la cartella di build in path assoluto |
|
local XAR_DIST_DIR_ABS |
|
XAR_DIST_DIR_ABS="$(cd "$XAR_DIST_DIR" && pwd)" |
|
|
|
local ts |
|
ts="$(date +%Y%m%d_%H%M%S)" |
|
|
|
# se EDB_XAR_VERSION Γ¨ impostata la usiamo, altrimenti timestamp |
|
local version="${EDB_XAR_VERSION:-$ts}" |
|
|
|
local xar_path="$XAR_DIST_DIR_ABS/${XAR_NAME}-${version}.xar" |
|
|
|
echo "β‘ Output file: $xar_path" |
|
|
|
# creiamo l'archivio partendo da LOCAL_DIR |
|
( |
|
cd "$LOCAL_DIR" |
|
zip -rq "$xar_path" . |
|
) |
|
|
|
echo "β XAR created: $xar_path" |
|
|
|
# opzionale: symlink / copia "latest" |
|
local latest="$XAR_DIST_DIR_ABS/${XAR_NAME}-latest.xar" |
|
if command -v ln >/dev/null 2>&1; then |
|
ln -sf "$(basename "$xar_path")" "$latest" 2>/dev/null || cp "$xar_path" "$latest" |
|
else |
|
cp "$xar_path" "$latest" |
|
fi |
|
|
|
echo "π Symlink/copy updated: $latest" |
|
} |
|
|
|
|
|
############################################# |
|
### WATCH β auto-upload on file change |
|
############################################# |
|
do_watch() { |
|
|
|
echo "π Watch mode ON" |
|
echo "Watching: $LOCAL_DIR" |
|
echo "Target: $EXIST_COLLECTION" |
|
echo "Every change to a file will be uploaded automatically." |
|
echo |
|
|
|
[[ ! -d "$LOCAL_DIR" ]] && echo "Local dir not found: $LOCAL_DIR" && exit 1 |
|
|
|
local watcher="" |
|
if command -v fswatch >/dev/null 2>&1; then |
|
watcher="fswatch" |
|
elif command -v inotifywait >/dev/null 2>&1; then |
|
watcher="inotifywait" |
|
else |
|
echo "β No watcher found." |
|
echo "Install one of:" |
|
echo " macOS: brew install fswatch" |
|
echo " Linux: sudo apt-get install inotify-tools" |
|
exit 1 |
|
fi |
|
|
|
# normalizza la cartella osservata in path assoluto |
|
local WATCH_DIR_ABS |
|
WATCH_DIR_ABS="$(cd "$LOCAL_DIR" && pwd)" |
|
|
|
upload_one() { |
|
local full="$1" |
|
|
|
# normalizza anche il path del file |
|
local full_abs |
|
full_abs="$(realpath "$full" 2>/dev/null || echo "$full")" |
|
|
|
# assicurati che sia sotto la cartella osservata |
|
case "$full_abs" in |
|
"$WATCH_DIR_ABS"/*) ;; |
|
*) return 0 ;; |
|
esac |
|
|
|
# solo file normali |
|
[[ ! -f "$full_abs" ]] && return 0 |
|
|
|
# escludi schifezze |
|
case "$(basename "$full_abs")" in |
|
.DS_Store) return 0 ;; |
|
esac |
|
[[ "$full_abs" == *"/.git/"* ]] && return 0 |
|
[[ "$full_abs" == *"/.idea/"* ]] && return 0 |
|
|
|
# calcola il path relativo rispetto a WATCH_DIR_ABS |
|
local rel="${full_abs#$WATCH_DIR_ABS/}" |
|
|
|
local enc_rel |
|
enc_rel=$(url_encode "$rel") |
|
|
|
local dest="$REST_BASE$EXIST_COLLECTION/$enc_rel" |
|
|
|
echo "π change detected β $rel (uploading)" |
|
curl -s -u "$USER:$PASS" -X PUT -T "$full_abs" "$dest" >/dev/null \ |
|
&& echo " β uploaded" \ |
|
|| echo " β upload failed for $rel" |
|
} |
|
|
|
if [[ "$watcher" == "fswatch" ]]; then |
|
echo "Using fswatch..." |
|
fswatch -r "$WATCH_DIR_ABS" | while read -r path; do |
|
upload_one "$path" |
|
done |
|
else |
|
echo "Using inotifywait..." |
|
inotifywait -m -r -e close_write,create "$WATCH_DIR_ABS" 2>/dev/null | \ |
|
while read -r dir _ file; do |
|
upload_one "$dir/$file" |
|
done |
|
fi |
|
} |
|
|
|
############################################# |
|
### BACKUP β export remote collection β backups dir |
|
############################################# |
|
|
|
do_backup() { |
|
echo "π Creating backup of remote collection" |
|
echo "Remote: $EXIST_COLLECTION" |
|
echo "Backups: $BACKUP_DIR" |
|
|
|
local app_name="$XAR_NAME" |
|
local ts |
|
ts="$(date +%Y%m%d_%H%M%S)" |
|
|
|
# es: ./backups/sympa/20250218_153012 |
|
local target_root="$BACKUP_DIR/$app_name/$ts" |
|
|
|
mkdir -p "$target_root" |
|
|
|
local ROOT="$EXIST_COLLECTION" |
|
ROOT="${ROOT%/}" |
|
|
|
crawl_backup() { |
|
local dbpath="$1" |
|
|
|
# path relativo rispetto alla ROOT |
|
local rel="${dbpath#$ROOT}" |
|
rel="${rel#/}" |
|
[[ -z "$rel" ]] && rel="." |
|
|
|
mkdir -p "$target_root/$rel" |
|
|
|
# scarica listing in un file temporaneo |
|
local listing |
|
listing="$(mktemp)" |
|
curl -s -u "$USER:$PASS" "$REST_BASE$dbpath" > "$listing" |
|
|
|
# ---- FILES ---- |
|
sed -n 's/.*resource name="\([^"]*\)".*/\1/p' "$listing" | \ |
|
while read -r name; do |
|
[[ -z "$name" ]] && continue |
|
# se compare anche come collection, Γ¨ una dir e la gestiamo dopo |
|
if grep -q "collection name=\"$name\"" "$listing"; then |
|
continue |
|
fi |
|
|
|
echo "β¬ [backup] $rel/$name" |
|
curl -s -u "$USER:$PASS" \ |
|
"$REST_BASE$dbpath/$name" \ |
|
-o "$target_root/$rel/$name" |
|
done |
|
|
|
# ---- DIRECTORIES ---- |
|
sed -n 's/.*collection name="\([^"]*\)".*/\1/p' "$listing" | \ |
|
while read -r dir; do |
|
[[ -z "$dir" ]] && continue |
|
local next="$dbpath/$dir" |
|
echo "π [backup] $rel/$dir" |
|
crawl_backup "$next" |
|
done |
|
|
|
rm -f "$listing" |
|
} |
|
|
|
crawl_backup "$ROOT" |
|
|
|
echo "β BACKUP COMPLETE β $target_root" |
|
|
|
# ---- ROTAZIONE BACKUP ---- |
|
if [[ "$BACKUP_KEEP" -gt 0 ]] && [[ -d "$BACKUP_DIR/$app_name" ]]; then |
|
# lista backup ordinati |
|
mapfile -t backups < <(ls -1d "$BACKUP_DIR/$app_name"/* 2>/dev/null | sort) |
|
local count="${#backups[@]}" |
|
|
|
if (( count > BACKUP_KEEP )); then |
|
local to_delete=$((count - BACKUP_KEEP)) |
|
echo "π§Ή Rotating backups (keep=$BACKUP_KEEP, total=$count)" |
|
|
|
for ((i=0; i<to_delete; i++)); do |
|
echo " rm -rf ${backups[$i]}" |
|
rm -rf "${backups[$i]}" |
|
done |
|
fi |
|
fi |
|
} |
|
|
|
|
|
|
|
############################################# |
|
### COMMAND ROUTER |
|
############################################# |
|
|
|
case "${1:-}" in |
|
init) do_init ;; |
|
edit) do_edit ;; |
|
export) do_export ;; |
|
import) do_import ;; |
|
watch) do_watch ;; |
|
build-xar) do_build_xar ;; |
|
backup) do_backup ;; |
|
*) usage ;; |
|
esac |