From fb2aaeae0f2db379c91b087a8d997ca8a39c3a14 Mon Sep 17 00:00:00 2001 From: Ryan Tucker Date: Fri, 10 Jun 2011 20:55:59 -0400 Subject: [PATCH 1/3] make the backup-manager.py --help more useful --- backup-manager.py | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/backup-manager.py b/backup-manager.py index 09d94e7..10f509a 100755 --- a/backup-manager.py +++ b/backup-manager.py @@ -150,7 +150,15 @@ def make_restore_script(backup, expire=86400): def main(): # check command line options - parser = optparse.OptionParser(usage="usage: %prog [options] list/delete/script") + parser = optparse.OptionParser( + usage="usage: %prog [options] [list|delete|script]", + description="" + + "Companion maintenance script for BackupPC_archiveHost_s3. " + + "By default, it assumes the 'list' command, which displays all " + + "of the backups currently archived on S3. The 'delete' command " + + "is used to delete backups. The 'script' command produces a " + + "script that can be used to download and restore a backup." + ) parser.add_option("-H", "--host", dest="host", help="Name of backed-up host") parser.add_option("-b", "--backup-number", dest="backupnum", From 96e8cf12e436cfaf6959bbf6f93f4bc498bbcb76 Mon Sep 17 00:00:00 2001 From: Ryan Tucker Date: Fri, 10 Jun 2011 21:04:08 -0400 Subject: [PATCH 2/3] add something that looks like documentation --- COPYRIGHT | 19 +++++++ README.markdown | 130 ++++++++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 149 insertions(+) create mode 100644 COPYRIGHT create mode 100644 README.markdown diff --git a/COPYRIGHT b/COPYRIGHT new file mode 100644 index 0000000..29a90c6 --- /dev/null +++ b/COPYRIGHT @@ -0,0 +1,19 @@ +Copyright (c) 2009-2011 Ryan S. Tucker + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/README.markdown b/README.markdown new file mode 100644 index 0000000..def0adb --- /dev/null +++ b/README.markdown @@ -0,0 +1,130 @@ +BackupPC_archiveHost_s3 +======================= + +This is a Python script that acts as an interface between +[BackupPC](http://backuppc.sourceforge.net/) and +[Amazon S3](http://aws.amazon.com/s3/). It uses BackupPC's +[archive function](http://backuppc.sourceforge.net/faq/BackupPC.html#archive_functions) +to extract a tarball and split it into chunks, like the normal archive +function. Then, the chunks are encrypted using gpg and transmitted to +S3 using [Boto](http://code.google.com/p/boto/). + +Installation +------------ + +I wrote this script some years ago, and can't remember how to get it going. +But, here's going to be my best guess :-) + +1. Install prerequisites + + You will need Python, [Boto](http://code.google.com/p/boto/), and a + working BackupPC installation. + +2. Download and install this script + + Something like this seems like a good idea: + + cd /usr/local/src/ + git clone git://github.com/rtucker/backuppc-archive-s3.git + + Then create a link from `/usr/share/backuppc/bin/` to here: + + ln -s /usr/local/src/backuppc-archive-s3/BackupPC_archiveHost_s3 /usr/share/backuppc/bin/ + +3. Configure this script + + Create a file in this directory called `secrets.py`, based upon the + `secrets.py.orig` file. It should have your AWS Access and Shared keys, + a passphrase that will be used to encrypt the tarballs, and, optionally, + a path to a file that contains a maximum upload rate in kilobits per + second: + + accesskey = 'ASDIASDVINASDVASsvblahblah' + sharedkey = '889rv98rv8fmasmvasdvsdvasdv' + gpgsymmetrickey = 'hunter2' + speedfile = '/var/cache/speedlimit.txt' + + If you use the `speedfile` option, you can change this on the fly to + limit upstream bandwidth usage during peak hours, etc. + +4. Configure BackupPC + + From the BackupPC configuration interface, go to `Edit Hosts` and add a + new host, `archiveS3`, which looks like the existing `archive` host. + Save this, select the `archives3` host, and then `Edit Config` for that + host. + + Change the settings on each tab as follows: + + Xfer: + XferMethod: archive + ArchiveDest: /var/lib/backuppc/archives3 + ArchiveComp: bzip2 + ArchiveSplit: 500 + ArchiveClientCmd: $Installdir/bin/BackupPC_archiveHost_s3 $tarCreatePath $splitpath $parpath $host $backupnumber $compression $compext $splitsize $archiveloc $parfile * + + Backup Settings: + ClientTimeout: 720000 + + That should be just about it. Note that `ArchiveDest` is where it will + stage the tarballs before it uploads them; this must have enough disk + space for your archive! `ArchiveSplit` is the size of each tar file, + in megabytes; you may want to adjust this for your needs. Also, the + `ArchiveClientCmd` is the default, except with the `_s3` added. + +5. Use it + + Go to the main page for the `archives3` host and click `Start Archive`. + To start with, just tick the box next to the smallest backup you have, + then `Archive selected hosts`. Go with the defaults (which look + suspiciously like what you set on the Xfer tab, do they not? :-) and + then `Start the Archive`. + + Watch syslog and hopefully everything will work. + +backup-manager.py +----------------- + +There is a companion script, `backup-manager.py`, that can be used to see +what's on S3. Run it with no arguments to get a listing of backups and +their ages, or use the `--help` argument to see what it can do. + +The "crown jewel" of this whole system is the `script` command, which +produces a script that can be used to restore a backup. It uses S3's +[Query String Request Authentication](http://docs.amazonwebservices.com/AmazonS3/latest/dev/index.html?RESTAuthentication.html#RESTAuthenticationQueryStringAuth) +mechanism to generate temporary URLs to download each file required to +restore a backup. + +Each night, from `cron`, I run a script: + + #!/bin/sh + BACKUPMGR=/path/to/backup-manager.py + + # Delete all backups older than 30 days. + $BACKUPMGR delete --age=30 + + # Create restore scripts, valid for one week, for all of my computers + cd /home/rtucker/Dropbox/RestoreScripts/ + $BACKUPMGR --expire=604800 --host=gandalf script > restore_gandalf.sh + $BACKUPMGR --expire=604800 --host=witte script > restore_witte.sh + # etc, etc + + # Output a list of what's on the server + $BACKUPMGR + +The output of this is mailed to me, so I always know what's going on! + +FAQs +---- +* BackupPC is written in Perl. Why is this thing written in Python? + + I know Python much better than I know Perl, so I wrote it in Python. + The good news is that BackupPC doesn't care, but it does mean this + probably won't be part of the BackupPC main distribution any time soon. + +* Is this project dead? + + You could say that. A lot of [my projects](https://github.com/rtucker/) + are one-off scripts that solve a very specific need I have, and I don't + put too much thought into making them useful for other people. This + script works for me and (sorta) meets my needs, so that's where it is. From c3730f8719ca03b3a6c1485004f52894c649acb5 Mon Sep 17 00:00:00 2001 From: Ryan Tucker Date: Fri, 10 Jun 2011 21:14:53 -0400 Subject: [PATCH 3/3] markdown formatting fixes --- README.markdown | 138 ++++++++++++++++++++++++------------------------ 1 file changed, 69 insertions(+), 69 deletions(-) diff --git a/README.markdown b/README.markdown index def0adb..18dda90 100644 --- a/README.markdown +++ b/README.markdown @@ -15,72 +15,72 @@ Installation I wrote this script some years ago, and can't remember how to get it going. But, here's going to be my best guess :-) -1. Install prerequisites +### Install the prerequisites - You will need Python, [Boto](http://code.google.com/p/boto/), and a - working BackupPC installation. +> You will need Python, [Boto](http://code.google.com/p/boto/), and a +> working BackupPC installation. -2. Download and install this script +### Download and install this script - Something like this seems like a good idea: +> Something like this seems like a good idea: +> +> cd /usr/local/src/ +> git clone git://github.com/rtucker/backuppc-archive-s3.git +> +> Then create a link from `/usr/share/backuppc/bin/` to here: +> +> ln -s /usr/local/src/backuppc-archive-s3/BackupPC_archiveHost_s3 /usr/share/backuppc/bin/ - cd /usr/local/src/ - git clone git://github.com/rtucker/backuppc-archive-s3.git +### Configure this script - Then create a link from `/usr/share/backuppc/bin/` to here: +> Create a file in this directory called `secrets.py`, based upon the +> `secrets.py.orig` file. It should have your AWS Access and Shared keys, +> a passphrase that will be used to encrypt the tarballs, and, optionally, +> a path to a file that contains a maximum upload rate in kilobits per +> second: +> +> accesskey = 'ASDIASDVINASDVASsvblahblah' +> sharedkey = '889rv98rv8fmasmvasdvsdvasdv' +> gpgsymmetrickey = 'hunter2' +> speedfile = '/var/cache/speedlimit.txt' +> +> If you use the `speedfile` option, you can change this on the fly to +> limit upstream bandwidth usage during peak hours, etc. + +### Configure BackupPC - ln -s /usr/local/src/backuppc-archive-s3/BackupPC_archiveHost_s3 /usr/share/backuppc/bin/ +> From the BackupPC configuration interface, go to `Edit Hosts` and add a +> new host, `archiveS3`, which looks like the existing `archive` host. +> Save this, select the `archives3` host, and then `Edit Config` for that +> host. +> +> Change the settings on each tab as follows: +> +>> #### Xfer +>> XferMethod: archive +>> ArchiveDest: /var/lib/backuppc/archives3 +>> ArchiveComp: bzip2 +>> ArchiveSplit: 500 +>> ArchiveClientCmd: $Installdir/bin/BackupPC_archiveHost_s3 $tarCreatePath $splitpath $parpath $host $backupnumber $compression $compext $splitsize $archiveloc $parfile * +>> +>> #### Backup Settings +>> ClientTimeout: 720000 +> +> That should be just about it. Note that `ArchiveDest` is where it will +> stage the tarballs before it uploads them; this must have enough disk +> space for your archive! `ArchiveSplit` is the size of each tar file, +> in megabytes; you may want to adjust this for your needs. Also, the +> `ArchiveClientCmd` is the default, except with the `_s3` added. -3. Configure this script +### Use it - Create a file in this directory called `secrets.py`, based upon the - `secrets.py.orig` file. It should have your AWS Access and Shared keys, - a passphrase that will be used to encrypt the tarballs, and, optionally, - a path to a file that contains a maximum upload rate in kilobits per - second: - - accesskey = 'ASDIASDVINASDVASsvblahblah' - sharedkey = '889rv98rv8fmasmvasdvsdvasdv' - gpgsymmetrickey = 'hunter2' - speedfile = '/var/cache/speedlimit.txt' - - If you use the `speedfile` option, you can change this on the fly to - limit upstream bandwidth usage during peak hours, etc. - -4. Configure BackupPC - - From the BackupPC configuration interface, go to `Edit Hosts` and add a - new host, `archiveS3`, which looks like the existing `archive` host. - Save this, select the `archives3` host, and then `Edit Config` for that - host. - - Change the settings on each tab as follows: - - Xfer: - XferMethod: archive - ArchiveDest: /var/lib/backuppc/archives3 - ArchiveComp: bzip2 - ArchiveSplit: 500 - ArchiveClientCmd: $Installdir/bin/BackupPC_archiveHost_s3 $tarCreatePath $splitpath $parpath $host $backupnumber $compression $compext $splitsize $archiveloc $parfile * - - Backup Settings: - ClientTimeout: 720000 - - That should be just about it. Note that `ArchiveDest` is where it will - stage the tarballs before it uploads them; this must have enough disk - space for your archive! `ArchiveSplit` is the size of each tar file, - in megabytes; you may want to adjust this for your needs. Also, the - `ArchiveClientCmd` is the default, except with the `_s3` added. - -5. Use it - - Go to the main page for the `archives3` host and click `Start Archive`. - To start with, just tick the box next to the smallest backup you have, - then `Archive selected hosts`. Go with the defaults (which look - suspiciously like what you set on the Xfer tab, do they not? :-) and - then `Start the Archive`. - - Watch syslog and hopefully everything will work. +> Go to the main page for the `archives3` host and click `Start Archive`. +> To start with, just tick the box next to the smallest backup you have, +> then `Archive selected hosts`. Go with the defaults (which look +> suspiciously like what you set on the Xfer tab, do they not? :-) and +> then `Start the Archive`. +> +> Watch syslog and hopefully everything will work. backup-manager.py ----------------- @@ -97,20 +97,20 @@ restore a backup. Each night, from `cron`, I run a script: - #!/bin/sh - BACKUPMGR=/path/to/backup-manager.py + #!/bin/sh + BACKUPMGR=/path/to/backup-manager.py - # Delete all backups older than 30 days. - $BACKUPMGR delete --age=30 + # Delete all backups older than 30 days. + $BACKUPMGR delete --age=30 - # Create restore scripts, valid for one week, for all of my computers - cd /home/rtucker/Dropbox/RestoreScripts/ - $BACKUPMGR --expire=604800 --host=gandalf script > restore_gandalf.sh - $BACKUPMGR --expire=604800 --host=witte script > restore_witte.sh - # etc, etc + # Create restore scripts, valid for one week, for all of my computers + cd /home/rtucker/Dropbox/RestoreScripts/ + $BACKUPMGR --expire=604800 --host=gandalf script > restore_gandalf.sh + $BACKUPMGR --expire=604800 --host=witte script > restore_witte.sh + # etc, etc - # Output a list of what's on the server - $BACKUPMGR + # Output a list of what's on the server + $BACKUPMGR The output of this is mailed to me, so I always know what's going on!