From c2f5d4a30260fb8f97725d71d2a00f8f381f446c Mon Sep 17 00:00:00 2001 From: Nikola Petrov Date: Wed, 3 Dec 2025 23:28:59 +0100 Subject: [PATCH] s --- backup/auto_copy_backup.sh | 32 ++++++++++++++++++++++++++++++++ 1 file changed, 32 insertions(+) create mode 100644 backup/auto_copy_backup.sh diff --git a/backup/auto_copy_backup.sh b/backup/auto_copy_backup.sh new file mode 100644 index 0000000..2d505f2 --- /dev/null +++ b/backup/auto_copy_backup.sh @@ -0,0 +1,32 @@ +#!/bin/bash + +# Define backup directory on your local machine +BACKUP_DIR="/path/to/your/backups" +WEEKLY_DIR="$BACKUP_DIR/weekly" + +# Run the backup script +ssh server /root/backup/script.sh +scp server:/root/backup/backup_$(date +"%Y%m%d").tar "$BACKUP_DIR" +ssh server rm /root/backup/backup_$(date +"%Y%m%d").tar + +# Keep only the last 7 daily backups +find "$BACKUP_DIR" -maxdepth 1 -name "backup_*.tar" -type f -mtime +6 -exec rm {} \; + +# Weekly backup: Every Sunday, move the daily backup to a weekly folder +if [ "$(date +%u)" -eq 7 ]; then + mkdir -p "$WEEKLY_DIR" + cp "$BACKUP_DIR/backup_$(date +"%Y%m%d").tar" "$WEEKLY_DIR/weekly_$(date +"%Y%m%d").tar" + + # Enforce 100 GB limit for weekly backups + while true; do + TOTAL_SIZE=$(du -s "$WEEKLY_DIR" | awk '{print $1}') + if [ "$TOTAL_SIZE" -le 102400 ]; then # 100 GB in KB + break + fi + # Find and remove the oldest weekly backup + OLDEST_FILE=$(ls -t "$WEEKLY_DIR" | tail -1) + if [ -n "$OLDEST_FILE" ]; then + rm "$WEEKLY_DIR/$OLDEST_FILE" + fi + done +fi