You can prevent data loss by automatically backing up MongoDB database, optimize storage space by cleaning old backups, configure scheduled backup with cron job, and perform restore operations from backup with mongodump.
/opt/mongoyedek folder. If there are multiple backups, it keeps the newest 2 and deletes the others.sudo vi mongodump.sh
#!/bin/bash
IP_ADDRESS="{Ip}"
USERNAME="{Username}"
PASSWORD="{Password}"
PORT="25080"
BACKUP_DIR="/opt/mongoyedek"
# Check and Create Backup Folder
if [ ! -d "$BACKUP_DIR" ]; then
sudo mkdir -p "$BACKUP_DIR"
sudo chown $USER:$USER "$BACKUP_DIR"
fi
# Determine file name and add number
DATE=$(date +%F)
BASE_NAME="apinizer-backup-${DATE}"
index=0
# Check existing files within the same day and determine an appropriate number
while [ -e "${BACKUP_DIR}/${BASE_NAME}-${index}.archive" ]; do
index=$((index + 1))
done
BACKUP_PATH="${BACKUP_DIR}/${BASE_NAME}-${index}.archive"
# Get file names to keep last 2 files and delete
files_to_keep=$(ls -lt "$BACKUP_DIR"/*.archive | head -n 2 | awk '{print $9}')
all_files=$(ls -lt "$BACKUP_DIR"/*.archive | awk '{print $9}')
files_to_delete=$(comm -23 <(echo "$all_files" | sort) <(echo "$files_to_keep" | sort))
for file in $files_to_delete; do
echo "Deleting: $file"
sudo rm -f "$file"
done
sudo mongodump --host "$IP_ADDRESS" --port="$PORT" --username="$USERNAME" --password="$PASSWORD" --authenticationDatabase=admin --gzip --archive="$BACKUP_PATH"
echo "Backup completed: $BACKUP_PATH"
sudo chmod +x mongodump.sh
./mongodump.sh
sudo crontab -e
59 23 1 * * /path/mongodump.sh