Initial commit
This commit is contained in:
214
scripts/backup-storage.sh
Executable file
214
scripts/backup-storage.sh
Executable file
@@ -0,0 +1,214 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Laravel Timebank Storage Backup Script
|
||||
# Backs up storage directory with incremental rsync and compression
|
||||
# Usage: ./backup-storage.sh [backup_type]
|
||||
# backup_type: daily (default), weekly, monthly, full
|
||||
|
||||
set -e # Exit on any error
|
||||
|
||||
# Configuration
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
PROJECT_ROOT="$(dirname "$SCRIPT_DIR")"
|
||||
STORAGE_DIR="$PROJECT_ROOT/storage"
|
||||
BACKUP_ROOT_DIR="$PROJECT_ROOT/backups"
|
||||
LOG_FILE="$BACKUP_ROOT_DIR/backup.log"
|
||||
|
||||
# Create backup directories
|
||||
mkdir -p "$BACKUP_ROOT_DIR"/{storage/{daily,weekly,monthly,snapshots},logs}
|
||||
|
||||
# Logging function
|
||||
log() {
|
||||
echo "[$(date '+%Y-%m-%d %H:%M:%S')] $1" | tee -a "$LOG_FILE"
|
||||
}
|
||||
|
||||
# Check if storage directory exists
|
||||
if [ ! -d "$STORAGE_DIR" ]; then
|
||||
log "ERROR: Storage directory not found: $STORAGE_DIR"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Set backup type
|
||||
BACKUP_TYPE="${1:-daily}"
|
||||
TIMESTAMP=$(date '+%Y%m%d_%H%M%S')
|
||||
BACKUP_DIR="$BACKUP_ROOT_DIR/storage/$BACKUP_TYPE"
|
||||
SNAPSHOT_DIR="$BACKUP_ROOT_DIR/storage/snapshots"
|
||||
|
||||
# Create directories
|
||||
mkdir -p "$BACKUP_DIR" "$SNAPSHOT_DIR"
|
||||
|
||||
log "Starting $BACKUP_TYPE storage backup"
|
||||
|
||||
# Rsync exclude patterns for Laravel storage
|
||||
RSYNC_EXCLUDES="
|
||||
--exclude=framework/cache/*
|
||||
--exclude=framework/sessions/*
|
||||
--exclude=framework/testing/*
|
||||
--exclude=framework/views/*
|
||||
--exclude=logs/*.log
|
||||
--exclude=debugbar/*
|
||||
--exclude=app/backup/*
|
||||
--exclude=*/livewire-tmp/*
|
||||
--exclude=*.tmp
|
||||
--exclude=.DS_Store
|
||||
--exclude=Thumbs.db
|
||||
"
|
||||
|
||||
# Function for incremental backup using rsync
|
||||
incremental_backup() {
|
||||
local target_dir="$1"
|
||||
local snapshot_name="$2"
|
||||
|
||||
log "Performing incremental backup to $target_dir"
|
||||
|
||||
# Create current snapshot directory
|
||||
local current_snapshot="$SNAPSHOT_DIR/$snapshot_name"
|
||||
mkdir -p "$current_snapshot"
|
||||
|
||||
# Rsync with hard links to previous snapshot for space efficiency
|
||||
local link_dest_option=""
|
||||
local latest_snapshot=$(find "$SNAPSHOT_DIR" -maxdepth 1 -type d -name "${BACKUP_TYPE}_*" | sort -r | head -n 1)
|
||||
|
||||
if [ -n "$latest_snapshot" ] && [ "$latest_snapshot" != "$current_snapshot" ]; then
|
||||
link_dest_option="--link-dest=$latest_snapshot"
|
||||
fi
|
||||
|
||||
rsync -av \
|
||||
--delete \
|
||||
$RSYNC_EXCLUDES \
|
||||
$link_dest_option \
|
||||
"$STORAGE_DIR/" \
|
||||
"$current_snapshot/" \
|
||||
2>&1 | tee -a "$LOG_FILE"
|
||||
|
||||
# Create compressed archive for long-term storage
|
||||
local archive_name="${snapshot_name}.tar.gz"
|
||||
local archive_path="$target_dir/$archive_name"
|
||||
|
||||
log "Creating compressed archive: $archive_name"
|
||||
tar -czf "$archive_path" -C "$SNAPSHOT_DIR" "$snapshot_name" 2>&1 | tee -a "$LOG_FILE"
|
||||
|
||||
# Verify archive
|
||||
if [ -f "$archive_path" ] && [ -s "$archive_path" ]; then
|
||||
local archive_size=$(du -h "$archive_path" | cut -f1)
|
||||
log "Archive created successfully: $archive_path ($archive_size)"
|
||||
|
||||
# Keep snapshots for recent backups only (last 3)
|
||||
if [ "$BACKUP_TYPE" = "daily" ]; then
|
||||
# Clean up old daily snapshots (keep last 3)
|
||||
find "$SNAPSHOT_DIR" -maxdepth 1 -type d -name "daily_*" | sort -r | tail -n +4 | xargs -r rm -rf
|
||||
fi
|
||||
|
||||
return 0
|
||||
else
|
||||
log "ERROR: Archive creation failed or archive is empty"
|
||||
return 1
|
||||
fi
|
||||
}
|
||||
|
||||
# Function for full backup (copy entire directory)
|
||||
full_backup() {
|
||||
local target_dir="$1"
|
||||
local backup_name="storage_full_${TIMESTAMP}"
|
||||
local backup_path="$target_dir/${backup_name}.tar.gz"
|
||||
|
||||
log "Performing full storage backup"
|
||||
|
||||
# Create temporary directory for clean backup
|
||||
local temp_dir="/tmp/timebank_storage_backup_$$"
|
||||
mkdir -p "$temp_dir"
|
||||
|
||||
# Copy storage with rsync (excluding unwanted files)
|
||||
rsync -av $RSYNC_EXCLUDES "$STORAGE_DIR/" "$temp_dir/" 2>&1 | tee -a "$LOG_FILE"
|
||||
|
||||
# Create compressed archive
|
||||
log "Creating compressed archive: $backup_name.tar.gz"
|
||||
tar -czf "$backup_path" -C "/tmp" "timebank_storage_backup_$$" 2>&1 | tee -a "$LOG_FILE"
|
||||
|
||||
# Cleanup temporary directory
|
||||
rm -rf "$temp_dir"
|
||||
|
||||
# Verify archive
|
||||
if [ -f "$backup_path" ] && [ -s "$backup_path" ]; then
|
||||
local archive_size=$(du -h "$backup_path" | cut -f1)
|
||||
log "Full backup completed: $backup_path ($archive_size)"
|
||||
return 0
|
||||
else
|
||||
log "ERROR: Full backup failed"
|
||||
return 1
|
||||
fi
|
||||
}
|
||||
|
||||
# Perform backup based on type
|
||||
case "$BACKUP_TYPE" in
|
||||
daily)
|
||||
incremental_backup "$BACKUP_DIR" "daily_${TIMESTAMP}"
|
||||
;;
|
||||
weekly)
|
||||
incremental_backup "$BACKUP_DIR" "weekly_${TIMESTAMP}"
|
||||
;;
|
||||
monthly)
|
||||
incremental_backup "$BACKUP_DIR" "monthly_${TIMESTAMP}"
|
||||
;;
|
||||
full)
|
||||
full_backup "$BACKUP_DIR"
|
||||
;;
|
||||
*)
|
||||
log "ERROR: Invalid backup type: $BACKUP_TYPE (valid: daily, weekly, monthly, full)"
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
|
||||
# Cleanup function
|
||||
cleanup_old_backups() {
|
||||
local backup_dir="$1"
|
||||
local keep_count="$2"
|
||||
local pattern="$3"
|
||||
|
||||
log "Cleaning up old $BACKUP_TYPE storage backups (keeping $keep_count most recent)"
|
||||
|
||||
find "$backup_dir" -name "$pattern" -type f | sort -r | tail -n +$((keep_count + 1)) | while read -r old_backup; do
|
||||
if [ -f "$old_backup" ]; then
|
||||
rm "$old_backup"
|
||||
log "Removed old backup: $(basename "$old_backup")"
|
||||
fi
|
||||
done
|
||||
}
|
||||
|
||||
# Apply retention policy
|
||||
case "$BACKUP_TYPE" in
|
||||
daily)
|
||||
cleanup_old_backups "$BACKUP_DIR" 7 "daily_*.tar.gz"
|
||||
;;
|
||||
weekly)
|
||||
cleanup_old_backups "$BACKUP_DIR" 4 "weekly_*.tar.gz"
|
||||
;;
|
||||
monthly)
|
||||
cleanup_old_backups "$BACKUP_DIR" 12 "monthly_*.tar.gz"
|
||||
;;
|
||||
full)
|
||||
cleanup_old_backups "$BACKUP_DIR" 2 "storage_full_*.tar.gz"
|
||||
;;
|
||||
esac
|
||||
|
||||
# Generate storage usage report
|
||||
log "Storage backup summary:"
|
||||
if [ -d "$BACKUP_ROOT_DIR/storage" ]; then
|
||||
find "$BACKUP_ROOT_DIR/storage" -name "*.tar.gz" -exec basename {} \; | sort | while read -r backup; do
|
||||
backup_path="$BACKUP_ROOT_DIR/storage"/*/"$backup"
|
||||
if [ -f $backup_path ]; then
|
||||
size=$(du -h $backup_path | cut -f1)
|
||||
log " $backup ($size)"
|
||||
fi
|
||||
done
|
||||
|
||||
total_size=$(du -sh "$BACKUP_ROOT_DIR/storage" | cut -f1)
|
||||
log "Total storage backups size: $total_size"
|
||||
fi
|
||||
|
||||
log "$BACKUP_TYPE storage backup completed successfully"
|
||||
|
||||
# Send notification
|
||||
if command -v mail >/dev/null 2>&1; then
|
||||
echo "Storage backup completed successfully at $(date)" | mail -s "Timebank Storage Backup Success" "${BACKUP_NOTIFY_EMAIL:-$USER@localhost}" 2>/dev/null || true
|
||||
fi
|
||||
Reference in New Issue
Block a user