infctl-cli/scripts/init-data-ctl.sh
jon brookes 924954d0ff changed app to use json config for pipeline steps
readme command line usage - to specify pipeline file name
readme updated to include reasoning behind the project

use native golang sqlite

RunScriptCommand named in functionMap
removed unused functions
removed unused functions
run script and pipeline example
renamed functions to drop the word script and add pipeline verb
2025-07-14 16:34:15 +01:00

42 lines
No EOL
1.7 KiB
Bash

LOG_FILE="/var/log/init-data.log"
mkdir -p /var/log
echo "env variables" | tee -a "$LOG_FILE"
env | tee -a "$LOG_FILE"
ls -lirt /var/www/public | tee -a "$LOG_FILE"
mkdir -p /var/www/{public,storage,database}
# Function to log errors and continue
log_error() {
echo "[ERROR] $1" | tee -a "$LOG_FILE"
}
# Check if public directory is empty
if [ -z "$(find /var/www/public -type f -o -type d -not -name "lost+found" -not -path "/var/www/public" 2>/dev/null)" ]; then
echo "Public directory is empty, copying data from S3..." | tee -a "$LOG_FILE"
aws s3 cp $S3_BUCKET/assets/public.tar /var/www/public/ 2>>"$LOG_FILE" || log_error "Failed to copy public data from S3"
else
echo "Public directory already has data, skipping S3 copy..." | tee -a "$LOG_FILE"
fi
# Check if storage directory is empty
if [ -z "$(find /var/www/storage -type f -o -type d -not -name "lost+found" -not -path "/var/www/storage" 2>/dev/null)" ]; then
echo "Storage directory is empty, copying data from S3..." | tee -a "$LOG_FILE"
aws s3 cp $S3_BUCKET/assets/storage.tar /var/www/storage/ 2>>"$LOG_FILE" || log_error "Failed to copy storage data from S3"
else
echo "Storage directory already has data, skipping S3 copy..." | tee -a "$LOG_FILE"
fi
# Check if database directory is empty
if [ -z "$(ls -A /var/www/database 2>/dev/null)" ]; then
echo "Database directory is empty, copying data from S3..." | tee -a "$LOG_FILE"
aws s3 cp $S3_BUCKET/assets/database.tar /var/www/database/ 2>>"$LOG_FILE" || log_error "Failed to copy database data from S3"
else
echo "Database directory already has data, skipping S3 copy..." | tee -a "$LOG_FILE"
fi
echo "Script completed. Check $LOG_FILE for details."