- Implemented a comprehensive health check utility to monitor system dependencies including NocoDB, SMTP, Represent API, disk space, and memory usage. - Created a logger utility using Winston for structured logging with daily rotation and various log levels. - Developed a metrics utility using Prometheus client to track application performance metrics such as email sends, HTTP requests, and user activity. - Added a backup script for automated backups of NocoDB data, uploaded files, and environment configurations with optional S3 support. - Introduced a toggle script to switch between development (MailHog) and production (ProtonMail) SMTP configurations.
307 lines
8.2 KiB
Bash
Executable File
307 lines
8.2 KiB
Bash
Executable File
#!/bin/bash
|
|
|
|
##############################################################################
|
|
# Influence App Backup Script
|
|
# Automated backup for NocoDB data, uploaded files, and configurations
|
|
##############################################################################
|
|
|
|
set -e # Exit on error
|
|
|
|
# Configuration
|
|
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
|
APP_DIR="$(dirname "$SCRIPT_DIR")"
|
|
BACKUP_DIR="${BACKUP_BASE_DIR:-$APP_DIR/backups}"
|
|
TIMESTAMP=$(date +%Y%m%d_%H%M%S)
|
|
BACKUP_NAME="influence_backup_${TIMESTAMP}"
|
|
BACKUP_PATH="$BACKUP_DIR/$BACKUP_NAME"
|
|
|
|
# Retention settings
|
|
RETENTION_DAYS=${BACKUP_RETENTION_DAYS:-30}
|
|
|
|
# S3/External storage settings (optional)
|
|
USE_S3=${USE_S3_BACKUP:-false}
|
|
S3_BUCKET=${S3_BACKUP_BUCKET:-""}
|
|
S3_PREFIX=${S3_BACKUP_PREFIX:-"influence-backups"}
|
|
|
|
# Colors for output
|
|
RED='\033[0;31m'
|
|
GREEN='\033[0;32m'
|
|
YELLOW='\033[1;33m'
|
|
NC='\033[0m' # No Color
|
|
|
|
# Logging functions
|
|
log_info() {
|
|
echo -e "${GREEN}[INFO]${NC} $(date '+%Y-%m-%d %H:%M:%S') - $1"
|
|
}
|
|
|
|
log_warn() {
|
|
echo -e "${YELLOW}[WARN]${NC} $(date '+%Y-%m-%d %H:%M:%S') - $1"
|
|
}
|
|
|
|
log_error() {
|
|
echo -e "${RED}[ERROR]${NC} $(date '+%Y-%m-%d %H:%M:%S') - $1"
|
|
}
|
|
|
|
# Check if required commands exist
|
|
check_dependencies() {
|
|
local missing_deps=()
|
|
|
|
for cmd in tar gzip du; do
|
|
if ! command -v $cmd &> /dev/null; then
|
|
missing_deps+=($cmd)
|
|
fi
|
|
done
|
|
|
|
if [ "$USE_S3" = "true" ]; then
|
|
if ! command -v aws &> /dev/null; then
|
|
missing_deps+=(aws-cli)
|
|
fi
|
|
fi
|
|
|
|
if [ ${#missing_deps[@]} -ne 0 ]; then
|
|
log_error "Missing dependencies: ${missing_deps[*]}"
|
|
exit 1
|
|
fi
|
|
}
|
|
|
|
# Create backup directory
|
|
create_backup_dir() {
|
|
log_info "Creating backup directory: $BACKUP_PATH"
|
|
mkdir -p "$BACKUP_PATH"
|
|
}
|
|
|
|
# Backup uploaded files
|
|
backup_uploads() {
|
|
log_info "Backing up uploaded files..."
|
|
|
|
local uploads_dir="$APP_DIR/public/uploads"
|
|
|
|
if [ -d "$uploads_dir" ]; then
|
|
local size=$(du -sh "$uploads_dir" | cut -f1)
|
|
log_info "Uploads directory size: $size"
|
|
|
|
tar -czf "$BACKUP_PATH/uploads.tar.gz" -C "$APP_DIR/public" uploads
|
|
log_info "Uploads backed up successfully"
|
|
else
|
|
log_warn "Uploads directory not found, skipping"
|
|
fi
|
|
}
|
|
|
|
# Backup environment configuration (encrypted)
|
|
backup_env_config() {
|
|
log_info "Backing up environment configuration..."
|
|
|
|
local env_file="$APP_DIR/.env"
|
|
|
|
if [ -f "$env_file" ]; then
|
|
# Copy .env file (will be encrypted later)
|
|
cp "$env_file" "$BACKUP_PATH/.env"
|
|
|
|
# Encrypt if encryption key is provided
|
|
if [ -n "${BACKUP_ENCRYPTION_KEY}" ]; then
|
|
log_info "Encrypting environment file..."
|
|
openssl enc -aes-256-cbc -salt -pbkdf2 \
|
|
-in "$BACKUP_PATH/.env" \
|
|
-out "$BACKUP_PATH/.env.encrypted" \
|
|
-k "${BACKUP_ENCRYPTION_KEY}"
|
|
rm "$BACKUP_PATH/.env" # Remove unencrypted version
|
|
log_info "Environment file encrypted"
|
|
else
|
|
log_warn "BACKUP_ENCRYPTION_KEY not set, .env file not encrypted"
|
|
fi
|
|
else
|
|
log_warn ".env file not found, skipping"
|
|
fi
|
|
}
|
|
|
|
# Backup NocoDB data (if accessible)
|
|
backup_nocodb() {
|
|
log_info "Checking NocoDB backup capability..."
|
|
|
|
# Load environment variables
|
|
if [ -f "$APP_DIR/.env" ]; then
|
|
source "$APP_DIR/.env"
|
|
fi
|
|
|
|
# If NocoDB is accessible via API, export data
|
|
if [ -n "$NOCODB_API_URL" ] && [ -n "$NOCODB_API_TOKEN" ]; then
|
|
log_info "Exporting NocoDB metadata..."
|
|
|
|
# Export project metadata
|
|
curl -s -H "xc-token: $NOCODB_API_TOKEN" \
|
|
"$NOCODB_API_URL/api/v1/db/meta/projects" \
|
|
-o "$BACKUP_PATH/nocodb_projects.json" || log_warn "Failed to export NocoDB projects"
|
|
|
|
log_info "NocoDB metadata exported"
|
|
else
|
|
log_warn "NocoDB credentials not available, skipping data export"
|
|
fi
|
|
}
|
|
|
|
# Backup logs
|
|
backup_logs() {
|
|
log_info "Backing up log files..."
|
|
|
|
local logs_dir="$APP_DIR/logs"
|
|
|
|
if [ -d "$logs_dir" ]; then
|
|
# Only backup logs from last 7 days
|
|
find "$logs_dir" -name "*.log" -mtime -7 -print0 | \
|
|
tar -czf "$BACKUP_PATH/logs.tar.gz" --null -T -
|
|
log_info "Logs backed up successfully"
|
|
else
|
|
log_warn "Logs directory not found, skipping"
|
|
fi
|
|
}
|
|
|
|
# Create backup manifest
|
|
create_manifest() {
|
|
log_info "Creating backup manifest..."
|
|
|
|
cat > "$BACKUP_PATH/manifest.txt" << EOF
|
|
Influence App Backup
|
|
====================
|
|
Backup Date: $(date)
|
|
Backup Name: $BACKUP_NAME
|
|
Server: $(hostname)
|
|
|
|
Contents:
|
|
$(ls -lh "$BACKUP_PATH")
|
|
|
|
Total Size: $(du -sh "$BACKUP_PATH" | cut -f1)
|
|
EOF
|
|
|
|
log_info "Manifest created"
|
|
}
|
|
|
|
# Compress entire backup
|
|
compress_backup() {
|
|
log_info "Compressing backup..."
|
|
|
|
cd "$BACKUP_DIR"
|
|
tar -czf "${BACKUP_NAME}.tar.gz" "$BACKUP_NAME"
|
|
|
|
# Remove uncompressed directory
|
|
rm -rf "$BACKUP_NAME"
|
|
|
|
local size=$(du -sh "${BACKUP_NAME}.tar.gz" | cut -f1)
|
|
log_info "Backup compressed: ${BACKUP_NAME}.tar.gz ($size)"
|
|
}
|
|
|
|
# Upload to S3 (if enabled)
|
|
upload_to_s3() {
|
|
if [ "$USE_S3" != "true" ]; then
|
|
return 0
|
|
fi
|
|
|
|
if [ -z "$S3_BUCKET" ]; then
|
|
log_error "S3_BUCKET not set, cannot upload to S3"
|
|
return 1
|
|
fi
|
|
|
|
log_info "Uploading backup to S3..."
|
|
|
|
aws s3 cp "${BACKUP_DIR}/${BACKUP_NAME}.tar.gz" \
|
|
"s3://${S3_BUCKET}/${S3_PREFIX}/${BACKUP_NAME}.tar.gz" \
|
|
--storage-class STANDARD_IA
|
|
|
|
if [ $? -eq 0 ]; then
|
|
log_info "Backup uploaded to S3 successfully"
|
|
|
|
# Optionally remove local backup after successful S3 upload
|
|
if [ "${REMOVE_LOCAL_AFTER_S3:-false}" = "true" ]; then
|
|
log_info "Removing local backup after S3 upload"
|
|
rm "${BACKUP_DIR}/${BACKUP_NAME}.tar.gz"
|
|
fi
|
|
else
|
|
log_error "Failed to upload backup to S3"
|
|
return 1
|
|
fi
|
|
}
|
|
|
|
# Clean old backups
|
|
cleanup_old_backups() {
|
|
log_info "Cleaning up backups older than $RETENTION_DAYS days..."
|
|
|
|
# Clean local backups
|
|
find "$BACKUP_DIR" -name "influence_backup_*.tar.gz" -mtime +$RETENTION_DAYS -delete
|
|
|
|
# Clean S3 backups (if enabled)
|
|
if [ "$USE_S3" = "true" ] && [ -n "$S3_BUCKET" ]; then
|
|
log_info "Cleaning old S3 backups..."
|
|
|
|
# This requires AWS CLI with proper permissions
|
|
aws s3 ls "s3://${S3_BUCKET}/${S3_PREFIX}/" | \
|
|
while read -r line; do
|
|
createDate=$(echo $line | awk '{print $1" "$2}')
|
|
createDateSec=$(date -d "$createDate" +%s)
|
|
olderThan=$(date -d "-${RETENTION_DAYS} days" +%s)
|
|
|
|
if [ $createDateSec -lt $olderThan ]; then
|
|
fileName=$(echo $line | awk '{print $4}')
|
|
if [ -n "$fileName" ]; then
|
|
aws s3 rm "s3://${S3_BUCKET}/${S3_PREFIX}/${fileName}"
|
|
log_info "Deleted old S3 backup: $fileName"
|
|
fi
|
|
fi
|
|
done
|
|
fi
|
|
|
|
log_info "Cleanup completed"
|
|
}
|
|
|
|
# Verify backup integrity
|
|
verify_backup() {
|
|
log_info "Verifying backup integrity..."
|
|
|
|
if tar -tzf "${BACKUP_DIR}/${BACKUP_NAME}.tar.gz" > /dev/null 2>&1; then
|
|
log_info "Backup integrity verified successfully"
|
|
return 0
|
|
else
|
|
log_error "Backup integrity check failed!"
|
|
return 1
|
|
fi
|
|
}
|
|
|
|
# Main backup process
|
|
main() {
|
|
log_info "Starting Influence App backup..."
|
|
log_info "Backup path: $BACKUP_PATH"
|
|
|
|
# Check dependencies
|
|
check_dependencies
|
|
|
|
# Create backup directory
|
|
create_backup_dir
|
|
|
|
# Perform backups
|
|
backup_uploads
|
|
backup_env_config
|
|
backup_nocodb
|
|
backup_logs
|
|
|
|
# Create manifest
|
|
create_manifest
|
|
|
|
# Compress backup
|
|
compress_backup
|
|
|
|
# Verify backup
|
|
if ! verify_backup; then
|
|
log_error "Backup verification failed, aborting"
|
|
exit 1
|
|
fi
|
|
|
|
# Upload to S3
|
|
upload_to_s3 || log_warn "S3 upload failed or skipped"
|
|
|
|
# Cleanup old backups
|
|
cleanup_old_backups
|
|
|
|
log_info "Backup completed successfully: ${BACKUP_NAME}.tar.gz"
|
|
log_info "Backup size: $(du -sh "${BACKUP_DIR}/${BACKUP_NAME}.tar.gz" | cut -f1)"
|
|
}
|
|
|
|
# Run main function
|
|
main "$@"
|