Re: shell script for automated backups

Top Page
Attachments:
Message as email
+ (text/plain)
+ ex_file (text/plain)
+ backup (text/plain)
+ dlscript (text/plain)
Delete this message
Reply to this message
Author: Patrick Fleming, EA
Date:  
To: plug-discuss
Subject: Re: shell script for automated backups

Mike,

You asked for it ;)
They're a little ugly with comments and such- but since they work I
haven't gone back to clean up.

Note that there is the server side script and the client, or backup
machine scripts. The server side script is named backup and uses the
attached ex_file. The client side is named dlscript and runs on a
Windows machine using Cygwin.

mike hoy wrote:
> Patrick,
>
> Yes I would be grateful for any examples you could provide. Of course
> don't want you to go out of your way or anything.
>
> Thanks,
>
> Mike H


/var/www/rwcinc/download
/var/www/horde
/var/lib/mysql/mysql.sock
/home/pat
/home/elgreen/logs

#!/bin/bash
# This script is designed to make daily backups of important files.
# Designed correctly it will make a backup of the files and then they will be
# available for download to a backup machine. Hopefully this will rotate the
# the daily logs and I will have seven mostly current backups.

#Set the daily extension
extension=`date +%A`
lockfile=/var/backup/$extension.logs
#Name the different backups
daily=/var/backup/daily.$extension.tar.bz2
current=/var/backup/current.tar
current_config=/var/backup/current_config.tar
current_logs=/var/backup/current_logs.tar.bz2
current_web=/var/backup/current_web.tar.bz2

#Setup the types of backups
www=/var/www
named=/var/named
etc=/etc
mail=/var/spool/mqueue
mail1=/var/spool/mail
home=/home
sucks_logs=/home/elgreen/logs
all_logs=/var/log
mysql=/var/lib/mysql
exclude=/etc/cron.backup/ex_file

all="$mail $mail1 $home $mysql"
logs="$all_logs"
configs="$etc $named"

#Changed this from ext3 to just a regular mount to see if the mount problem will go away

#mount -t ext3 -o rw /dev/hdb7 /var/backup
touch $lockfile

#Move the config backup if it's there 
if [ -f $current_config ]; then
    bzip2 $current_config
    mv -f $current_config.bz2 $current_config.bz2.1
#   tar -cf $current_config $configs
# else 
fi
   tar -cf $current_config $configs
#fi
#Create the log backups if they don't exist, otherwise, update the backup
if [ -f $current_logs ]; then
   echo "Moving $current_logs to $current_logs.1"
#   bzip2 $current_logs
   mv -f $current_logs $current_logs.1


   echo "Creating new log file"
   tar -cjf $current_logs $logs 
#if [ -f !$current_logs ]; then
#    echo "Creating new log file"
#    tar -cf $current_logs $logs
else 
    echo "Log file missing- creating new"
    tar -cjf $current_logs $logs
fi
if [ -f !$current_web ]; then
   tar -cjf $current_web -X $exclude $www
else


mv -f $current_web $current_web.1
tar -cjf $current_web -X $exclude $www
fi

# Rotate logs if they exist- this should give me a total of 14 days backup- 
# if it works
if [ -f $daily ]; then
  mv -f $daily $daily.1
fi
#create the tar file
tar -cjf $daily -X $exclude $all 
#Rotate the current tar file- should give me two running backups
if [ -f $current ]; then
    mv -f $current $current.1
fi
#Create a tar file that should always be the current one...
tar -cf  $current -X $exclude $all 


rm -rf $lockfile
#umount /var/backup
exit 0



#!/cygdrive/d/cygwin/bin/bash
PATH=$PATH:/cygdrive/d/cygwin/bin
DATE=`d:/cygwin/bin/date +%A`
daily=/cygdrive/f/backup/daily.$DATE.tar.bz2
current=/cygdrive/f/backup/current.tar
current_config=/cygdrive/f/backup/current_config.tar
current_logs=/cygdrive/f/backup/current_logs.tar.bz2
current_web=/cygdrive/f/backup/current_web.tar.bz2
cyg=d:/cygwin/bin
www=/var/www
named=/var/named
etc=/etc
mail=/var/spool/mqueue
mail1=/var/spool/mail
home=/home
sucks_logs=/home/elgreen/logs
all_logs=/var/log
mysql=/var/lib/mysql
exclude=/cygdrive/f/ex_file
logfile=backup.log
stdlog=dl.log

all="$mail $mail1 $home $mysql"
logs="$all_logs"
configs="$etc $named"
cd /cygdrive/f
echo >>$logfile
echo >>$logfile
echo `date`>>$logfile
echo Script starting>>$logfile

pwd
#d:/cygwin/bin/cp -f daily.$DATE.tar.bz2 daily.$DATE.tar.bz2.1
#d:/cygwin/bin/cp -f current.tar current.tar.1

 for i in $www $named $etc $mail $mail1 $home $all_logs $mysql; do 
    d:/cygwin/bin/rsync -avvc --recursive --delete-after --progress --rsh='d:/cygwin/bin/ssh -l root -i /cygdrive/d/cygwin/home/pat/.ssh/id_dsa' :$i . ;done



#Trying this addition to see if it will speed up the dowload process

#Move the config backup if it's there 
if [ -f $current_config ]; then
    echo Found config file- moving>>$logfile;
    mv -f $current_config.bz2 $current_config.bz2.1;
    bzip2 $current_config;
#   tar -cf $current_config $configs
# else 
fi


filename1=`basename $etc`
filename2=`basename $named`
configs="$filename1 $filename2"
echo Creating new config file>>$logfile
tar -cf $current_config $configs
#fi
#Create the log backups if they don't exist, otherwise, update the backup
echo Checking for logs>>$logfile
if [ -f $current_logs ]; then
echo "Moving $current_logs to $current_logs.1">>$logfile
# bzip2 $current_logs
mv -f $current_logs $current_logs.1

   echo Creating new log file>>$logfile
   filename=`basename $logs`;
   tar -cjf $current_logs $filename 
#   filename=
#if [ -f !$current_logs ]; then
#    echo "Creating new log file"
#    tar -cf $current_logs $logs
else 
    echo Log file missing- creating new>>$logfile
    filename=`basename $logs`
    tar -cjf $current_logs $filename
fi
echo Checking for web backup>>$logfile
   filename=`basename $www`
if [ -f  $current_web ]; then
  echo Moving web backup- creating new web file>>$logfile    
  mv $current_web $current_web.1
else
  echo No web backup found- new www backup>>$logfile   
fi
 tar -cjf $current_web -X $exclude $filename


# Rotate logs if they exist- this should give me a total of 14 days backup-
# if it works
echo Daily log section>>$logfile

if [ -f $daily ]; then
echo Moving the $daily file>>$logfile
mv -f $daily $daily.1
fi
#create the tar file
# all="$mail $mail1 $home $mysql"
filename1=`basename $mail`
filename2=`basename $mail1`
filename3=`basename $home`
filename4=`basename $mysql`
all="$filename1 $filename2 $filename3 $filename4";

echo Creating new $daily file>>$logfile
tar -cjf $daily -X $exclude $all;

#Rotate the current tar file- should give me two running backups

if [ -f $current ]; then
    mv -f $current $current.1
fi;


#Create a tar file that should always be the current one...;
#filename=`basename $all`;
echo Creating the current backup>>$logfile;
tar -cf $current -X $exclude $all

echo Just about to run the sync...>>$logfile
sleep 10;
#back to original script

/cygdrive/d/Tax_transfer/temp/backup_dlscript
echo Exiting >>$logfile


exit 0