changed app to use json config for pipeline steps

readme command line usage - to specify pipeline file name
readme updated to include reasoning behind the project

use native golang sqlite

RunScriptCommand named in functionMap
removed unused functions
removed unused functions
run script and pipeline example
renamed functions to drop the word script and add pipeline verb
This commit is contained in:
jon brookes 2025-07-09 13:19:43 +01:00 committed by jon brookes
parent bd7cee720a
commit 924954d0ff
49 changed files with 2059 additions and 101 deletions

42
scripts/init-data-ctl.sh Normal file
View file

@ -0,0 +1,42 @@
LOG_FILE="/var/log/init-data.log"
mkdir -p /var/log
echo "env variables" | tee -a "$LOG_FILE"
env | tee -a "$LOG_FILE"
ls -lirt /var/www/public | tee -a "$LOG_FILE"
mkdir -p /var/www/{public,storage,database}
# Function to log errors and continue
log_error() {
echo "[ERROR] $1" | tee -a "$LOG_FILE"
}
# Check if public directory is empty
if [ -z "$(find /var/www/public -type f -o -type d -not -name "lost+found" -not -path "/var/www/public" 2>/dev/null)" ]; then
echo "Public directory is empty, copying data from S3..." | tee -a "$LOG_FILE"
aws s3 cp $S3_BUCKET/assets/public.tar /var/www/public/ 2>>"$LOG_FILE" || log_error "Failed to copy public data from S3"
else
echo "Public directory already has data, skipping S3 copy..." | tee -a "$LOG_FILE"
fi
# Check if storage directory is empty
if [ -z "$(find /var/www/storage -type f -o -type d -not -name "lost+found" -not -path "/var/www/storage" 2>/dev/null)" ]; then
echo "Storage directory is empty, copying data from S3..." | tee -a "$LOG_FILE"
aws s3 cp $S3_BUCKET/assets/storage.tar /var/www/storage/ 2>>"$LOG_FILE" || log_error "Failed to copy storage data from S3"
else
echo "Storage directory already has data, skipping S3 copy..." | tee -a "$LOG_FILE"
fi
# Check if database directory is empty
if [ -z "$(ls -A /var/www/database 2>/dev/null)" ]; then
echo "Database directory is empty, copying data from S3..." | tee -a "$LOG_FILE"
aws s3 cp $S3_BUCKET/assets/database.tar /var/www/database/ 2>>"$LOG_FILE" || log_error "Failed to copy database data from S3"
else
echo "Database directory already has data, skipping S3 copy..." | tee -a "$LOG_FILE"
fi
echo "Script completed. Check $LOG_FILE for details."