myscripts/do-daily-btrfs-snapshot.sh

166 lines
5.3 KiB
Bash
Raw Normal View History

2023-11-02 16:44:20 +00:00
#!/usr/bin/bash
###############################################################################
2024-06-06 22:37:05 +00:00
# do-daily-btrfs-snapshot.sh #
2023-11-02 16:44:20 +00:00
###############################################################################
# This script is my attempt at replicating the same function that timeshift #
# performs. This is a very simplified script to handle automated btrfs #
# snapshots via a daily cron entry. #
# #
# EX: #
# ``` #
# 0 0 * * * /path/to/script #
# ``` #
# #
# Afer putting it in your crontab it will automatically handle your making #
# snapshots for your system and will only make up to the limit you set with #
# LIMIT. #
# #
# You will also have to configure your own subvols in the array decleration #
# SUBVOLS following the same format in the comments above it. #
# #
###############################################################################
# BEGIN: Variables
## Reusable vars
2024-06-06 22:37:05 +00:00
DATE=$(date +%Y-%m-%d)
2023-11-02 16:44:20 +00:00
### Getting the name of the script (as it will be the first thing passed to bash when executing)
2024-06-06 22:37:05 +00:00
SCRIPT=$(basename $0)
2023-11-02 16:44:20 +00:00
BACKUP_DIR=/.backups
### Setting dir to house log files
LOG_DIR=/var/log/$SCRIPT
LOG_FILE=$LOG_DIR/$DATE.log
## The amount of backups that are allowed of each configured subvols
LIMIT=5
## The subvols that we want to backup
### <actual-directory>:<name-of-backup-dir>
declare -a SUBVOLS=(
/:root
/home:home
)
## To use to seperate named manual backups from automatic ones
DATE_REGEX='[0-9]{4}-[0-9]{2}-[0-9]{2}'
# END: Variables
2024-06-06 22:15:31 +00:00
# BEGIN: Helper Functions
function handle_logs() {
cd $LOG_DIR
echo "Began Handling logs at: $(date)"
local ARCHIVE_FILE=archive.tar.gz
2024-06-06 22:37:05 +00:00
local ARCHIVE_FILE_UNZIPPED=${ARCHIVE_FILE%.gz}
2024-06-06 22:15:31 +00:00
if [[ -f $ARCHIVE_FILE ]]; then
## Decompressing archive in case it exists
gunzip $ARCHIVE_FILE 2>/dev/null
fi
## Getting all files
local FILES=( $( ls -1tr *.log ) )
if [[ ${#FILES[@]} -le $LIMIT ]]; then
echo "Only had ${#FILES[@]} logs, and did not exceed $LIMIT. Not handling logs"
return
fi
## Getting files we are keeping
local FILES_TO_KEEP=${FILES[@]: -$LIMIT}
## Creating REGEX filter
FILES_TO_KEEP=${FILES_TO_KEEP//[[:space:]]/|}
## Filtering out logs to keep
local FILES_TO_ARCHIVE=( $( ls -1 *.log | grep -Ev "(${FILES_TO_KEEP})" ) )
echo "Adding archived logs to archive"
## Updating archive
2024-06-06 22:37:05 +00:00
tar uvf $ARCHIVE_FILE_UNZIPPED ${FILES_TO_ARCHIVE[@]}
2024-06-06 22:15:31 +00:00
## Compressing Archive
echo "Compressing Archive"
2024-06-06 22:37:05 +00:00
gzip $ARCHIVE_FILE_UNZIPPED
2024-06-06 22:15:31 +00:00
## Removing archived logs
echo "Removing archived files"
rm -vf ${FILES_TO_ARCHIVE[@]}
echo "Finished Handling logs at: $(date)"
}
# END: Helper Functions
2023-11-02 16:44:20 +00:00
# BEGIN: Pre-Work Checks
2024-06-06 22:15:31 +00:00
## Setting up the script to direct all output to the log file for this snapshot session
exec > $LOG_FILE
2023-11-02 16:44:20 +00:00
## This will check to make sure that the log directory has been created, if not it will create it
[[ ! -d $LOG_DIR ]] && {
mkdir -p $LOG_DIR
echo "$LOG_DIR did not exist. Creating"
}
2023-11-02 16:44:20 +00:00
# END: Pre-Work Checks
# BEGIN: Work
echo "Beginning backup at `date`"
2023-11-02 16:44:20 +00:00
for SUBVOL_INFO in ${SUBVOLS[@]}; do
## Stripping the delemited info out of the subvol entry
OIFS=$IFS
IFS=':'
read DIR NAME <<< "$SUBVOL_INFO"
IFS=$OIFS
echo "Subvol : $DIR, Name: $NAME"
SUBVOL_BACKUP_DIR=$BACKUP_DIR/$NAME
## This will handle creating the backup directory as well as the specific snapshot directory as well
[[ ! -d $SUBVOL_BACKUP_DIR ]] && mkdir -p $SUBVOL_BACKUP_DIR
## Implemented it this way so that it's less complex to handle when there are more backups than we are
## configured to keep. Definately a rather stupid way to implement it, but it works and requires less logic to handle
while true; do
## So that any manual ones I do (named pre-upgrade, etc) will remain. And it will only delete the
## Automatic timestamped ones
declare -a backups=( $( ls -1 $SUBVOL_BACKUP_DIR | grep -Eo $DATE_REGEX ) )
echo "${#backups[@]} backups were found the backup dir $SUBVOL_BACKUP_DIR"
if [[ ${#backups[@]} -ge $LIMIT ]]; then
SNAPSHOT_PATH="$SUBVOL_BACKUP_DIR/${backups[0]}"
echo "${#backups[@]} was found to be equal to or greater than $LIMIT. Deleting $SNAPSHOT_PATH."
/usr/sbin/btrfs subvol del "$SUBVOL_BACKUP_DIR/${backups[0]}"
2023-11-02 16:44:20 +00:00
else
break
fi
## Unsetting the backups list so that it can be reloaded again
unset backups $SNAPSHOT_PATH
done
SNAPSHOT=$SUBVOL_BACKUP_DIR/$DATE
## If the snapshot doesn't already exist, then create a new read-only snapshot
2024-06-06 22:15:31 +00:00
if [[ ! -d $SNAPSHOT ]]; then
/usr/sbin/btrfs subvol snapshot -r $DIR $SNAPSHOT
else
2024-06-06 22:37:05 +00:00
echo "$SNAPSHOT already existed. Not doing a snapshot"
2024-06-06 22:15:31 +00:00
fi
2023-11-02 16:44:20 +00:00
done
2024-06-06 22:15:31 +00:00
handle_logs
echo "Finishing backup at `date`"
2024-06-06 22:15:31 +00:00
2023-11-02 16:44:20 +00:00
# END: Work