The following script is from the network, and the copyright belongs to the original author (it is recommended to automatically back up at night, and use cron to plan tasks)
#!/bin/bash #Save the number of backups and backup the data for 7 days number=7 #Backup save path backup_dir=/var/erp/data/mysql #date dd=`date +%Y-%m-%d-%H-%M-%S` #Backup tool dumper=mysqldump #User name username=root #Password password=netmarch\$shroot #Database to be backed up database_name=netmarcherp #Create if folder does not exist if [ ! -d $backup_dir ]; then mkdir -p $backup_dir; fi #Backup file name dest_archive=$backup_dir/$(date +%Y-%m-%d-%H.%M.%S).sql.gz ##Simple writing mysqldump -u root -p123456 users > /root/mysqlbackup/users-$filename.sql #$tool -u $username -p$password $database_name > $backup_dir/$database_name-$dd.sql $dumper -u $username -p$password --opt --default-character-set=utf8 -e --triggers -R --hex-blob --flush-logs -x $database_name| gzip -c | cat > $dest_archive #Write create backup log echo "create $backup_dir/$dest_archive" >> $backup_dir/log.txt #Find the backup that needs to be deleted delfile=`ls -l -crt $backup_dir/*.sql.gz | awk '{print $9 }' | head -1` #Determine whether the current number of backups is greater than $number count=`ls -l -crt $backup_dir/*.sql.gz | awk '{print $9 }' | wc -l` if [ $count -gt $number ] then #Delete the earliest generated backup and only keep the number of backups rm $delfile #Write delete file log echo "delete $delfile" >> $backup_dir/log.txt fi
mysqldump other optional parameters
-q fast export does not need to use stdout buffer
Other usage:
Using mysqldump to quickly export wildcard table names
# mysqldump ecos $(mysql -D ecos -Bse "SHOW TABLES LIKE 'tbl_user_%'") > /data/tbl_user_alltables.sql
mysqldump --host=192.168.80.137 -uroot -proot -C --databases test |mysql --host=192.168.80.133 -uroot -proot test
Add the - C parameter to enable compression delivery.