#!/cygdrive/d/cygwin/bin/bash PATH=$PATH:/cygdrive/d/cygwin/bin DATE=`d:/cygwin/bin/date +%A` daily=/cygdrive/f/backup/daily.$DATE.tar.bz2 current=/cygdrive/f/backup/current.tar current_config=/cygdrive/f/backup/current_config.tar current_logs=/cygdrive/f/backup/current_logs.tar.bz2 current_web=/cygdrive/f/backup/current_web.tar.bz2 cyg=d:/cygwin/bin www=/var/www named=/var/named etc=/etc mail=/var/spool/mqueue mail1=/var/spool/mail home=/home sucks_logs=/home/elgreen/logs all_logs=/var/log mysql=/var/lib/mysql exclude=/cygdrive/f/ex_file logfile=backup.log stdlog=dl.log all="$mail $mail1 $home $mysql" logs="$all_logs" configs="$etc $named" cd /cygdrive/f echo >>$logfile echo >>$logfile echo `date`>>$logfile echo Script starting>>$logfile pwd #d:/cygwin/bin/cp -f daily.$DATE.tar.bz2 daily.$DATE.tar.bz2.1 #d:/cygwin/bin/cp -f current.tar current.tar.1 for i in $www $named $etc $mail $mail1 $home $all_logs $mysql; do d:/cygwin/bin/rsync -avvc --recursive --delete-after --progress --rsh='d:/cygwin/bin/ssh -l root -i /cygdrive/d/cygwin/home/pat/.ssh/id_dsa' root@ssh.rwcinc.net:$i . ;done #Trying this addition to see if it will speed up the dowload process #Move the config backup if it's there if [ -f $current_config ]; then echo Found config file- moving>>$logfile; mv -f $current_config.bz2 $current_config.bz2.1; bzip2 $current_config; # tar -cf $current_config $configs # else fi filename1=`basename $etc` filename2=`basename $named` configs="$filename1 $filename2" echo Creating new config file>>$logfile tar -cf $current_config $configs #fi #Create the log backups if they don't exist, otherwise, update the backup echo Checking for logs>>$logfile if [ -f $current_logs ]; then echo "Moving $current_logs to $current_logs.1">>$logfile # bzip2 $current_logs mv -f $current_logs $current_logs.1 echo Creating new log file>>$logfile filename=`basename $logs`; tar -cjf $current_logs $filename # filename= #if [ -f !$current_logs ]; then # echo "Creating new log file" # tar -cf $current_logs $logs else echo Log file missing- creating new>>$logfile filename=`basename $logs` tar -cjf $current_logs $filename fi echo Checking for web backup>>$logfile filename=`basename $www` if [ -f $current_web ]; then echo Moving web backup- creating new web file>>$logfile mv $current_web $current_web.1 else echo No web backup found- new www backup>>$logfile fi tar -cjf $current_web -X $exclude $filename # Rotate logs if they exist- this should give me a total of 14 days backup- # if it works echo Daily log section>>$logfile if [ -f $daily ]; then echo Moving the $daily file>>$logfile mv -f $daily $daily.1 fi #create the tar file # all="$mail $mail1 $home $mysql" filename1=`basename $mail` filename2=`basename $mail1` filename3=`basename $home` filename4=`basename $mysql` all="$filename1 $filename2 $filename3 $filename4"; echo Creating new $daily file>>$logfile tar -cjf $daily -X $exclude $all; #Rotate the current tar file- should give me two running backups if [ -f $current ]; then mv -f $current $current.1 fi; #Create a tar file that should always be the current one...; #filename=`basename $all`; echo Creating the current backup>>$logfile; tar -cf $current -X $exclude $all echo Just about to run the sync...>>$logfile sleep 10; #back to original script /cygdrive/d/Tax_transfer/temp/backup_dlscript echo Exiting >>$logfile exit 0