From c823bc37d6b50dcd4e183963922da829a193f0d7 Mon Sep 17 00:00:00 2001 From: Marc Weber Date: Sun, 1 Sep 2013 02:22:01 +0200 Subject: [PATCH] backup tools bup, storeBackup: implement sanity check bup: - update - run make test (all tests seem to pass :-) - add python.modules.readline - add comment that there is no way to prune old revisions (yet) --- pkgs/tools/backup/bup/default.nix | 45 ++++++++- pkgs/tools/backup/store-backup/default.nix | 73 +++++--------- pkgs/tools/backup/test-case.nix | 105 +++++++++++++++++++++ 3 files changed, 170 insertions(+), 53 deletions(-) create mode 100644 pkgs/tools/backup/test-case.nix diff --git a/pkgs/tools/backup/bup/default.nix b/pkgs/tools/backup/bup/default.nix index e683df5a7fe1..63356e5f7c1b 100644 --- a/pkgs/tools/backup/bup/default.nix +++ b/pkgs/tools/backup/bup/default.nix @@ -1,6 +1,9 @@ { stdenv, fetchgit, python, pyxattr, pylibacl, setuptools, fuse, git, perl, pandoc, makeWrapper +, diffutils, writeTextFile, rsync , par2cmdline, par2Support ? false }: +# keep in mind you cannot prune older revisions yet! (2013-06) + assert par2Support -> par2cmdline != null; with stdenv.lib; @@ -10,12 +13,14 @@ stdenv.mkDerivation { src = fetchgit { url = "https://github.com/bup/bup.git"; - rev = "96c6fa2a70425fff1e73d2e0945f8e242411ab58"; - sha256 = "0d9hgyh1g5qcpdvnqv3a5zy67x79yx9qx557rxrnxyzqckp9v75n"; + rev = "98a8e2ebb775386cb7e66b1953df46cdbd4b4bd3"; + sha256 = "ab01c70f0caf993c0c05ec3a1008b5940b433bf2f7bd4e9b995d85e81958c1b7"; }; buildInputs = [ python git ]; - nativeBuildInputs = [ pandoc perl makeWrapper ]; + nativeBuildInputs = [ pandoc perl makeWrapper rsync ]; + + enableParallelBuilding = true; patchPhase = '' substituteInPlace Makefile --replace "-Werror" "" @@ -24,6 +29,12 @@ stdenv.mkDerivation { substituteInPlace $f --replace "/usr/bin/env python" "${python}/bin/python" done substituteInPlace Makefile --replace "./format-subst.pl" "perl ./format-subst.pl" + for t in t/*.sh t/configure-sampledata t/compare-trees; do + substituteInPlace $t --replace "/usr/bin/env bash" "$(type -p bash)" + done + substituteInPlace wvtestrun --replace "/usr/bin/env perl" "${perl}/bin/perl" + + substituteInPlace t/test.sh --replace "/bin/pwd" "$(type -P pwd)" '' + optionalString par2Support '' substituteInPlace cmd/fsck-cmd.py --replace "['par2'" "['${par2cmdline}/bin/par2'" ''; @@ -40,7 +51,33 @@ stdenv.mkDerivation { postInstall = optionalString (elem stdenv.system platforms.linux) '' wrapProgram $out/bin/bup --prefix PYTHONPATH : \ ${stdenv.lib.concatStringsSep ":" - (map (path: "$(toPythonPath ${path})") [ pyxattr pylibacl setuptools fuse ])} + (map (path: "$(toPythonPath ${path})") [ pyxattr pylibacl setuptools fuse python.modules.readline ])} + + ## test it + make test + + # if make test passes the following probably passes, too + backup_init(){ + export BUP_DIR=$TMP/bup + PATH=$out/bin:$PATH + bup init + } + backup_make(){ + ( cd "$1"; tar -cvf - .) | bup split -n backup + } + backup_restore_latest(){ + bup join backup | ( cd "$1"; tar -xf - ) + } + backup_verify_integrity_latest(){ + bup fsck + } + backup_verify_latest(){ + # maybe closest would be to mount or use the FTP like server .. + true + } + + . ${import ../test-case.nix { inherit diffutils writeTextFile; }} + backup_test backup 100M ''; meta = { diff --git a/pkgs/tools/backup/store-backup/default.nix b/pkgs/tools/backup/store-backup/default.nix index e9b98fec7100..94493760df8c 100644 --- a/pkgs/tools/backup/store-backup/default.nix +++ b/pkgs/tools/backup/store-backup/default.nix @@ -1,4 +1,4 @@ -{stdenv, which, coreutils, perl, fetchurl, perlPackages, makeWrapper, diffutils , writeScriptBin, bzip2}: +{stdenv, which, coreutils, perl, fetchurl, perlPackages, makeWrapper, diffutils , writeScriptBin, writeTextFile, bzip2}: # quick usage: # storeBackup.pl --sourceDir /home/user --backupDir /tmp/my_backup_destination @@ -48,56 +48,31 @@ stdenv.mkDerivation { PATH=$PATH:${dummyMount}/bin - { # simple sanity test, test backup/restore of simple store paths - + ## test it + backup_init(){ mkdir backup - - backupRestore(){ - source="$2" - echo ========= - echo RUNNING TEST "$1" source: "$source" - mkdir restored - - $out/bin/storeBackup.pl --sourceDir "$source" --backupDir backup - latestBackup=backup/default/$(ls -1 backup/default | sort | tail -n 1) - $out/bin/storeBackupRecover.pl -b "$latestBackup" -t restored -r / - ${diffutils}/bin/diff -r "$source" restored - - # storeBackupCheckSource should return 0 - $out/bin/storeBackupCheckSource.pl -s "$source" -b "$latestBackup" - # storeBackupCheckSource should return not 0 when using different source - ! $out/bin/storeBackupCheckSource.pl -s $TMP -b "$latestBackup" - - # storeBackupCheckBackup should return 0 - $out/bin/storeBackupCheckBackup.pl -c "$latestBackup" - - chmod -R +w restored - rm -fr restored - } - - testDir=$TMP/testDir - - mkdir $testDir - echo X > $testDir/X - ln -s ./X $testDir/Y - - backupRestore 'test 1: backup, restore' $testDir - - # test huge blocks, according to docs files bigger than 100MB get split - # into pieces - dd if=/dev/urandom bs=100M of=block-1 count=1 - dd if=/dev/urandom bs=100M of=block-2 count=1 - cat block-1 block-2 > $testDir/block - backupRestore 'test 1 with huge block' $testDir - - cat block-2 block-1 > $testDir/block - backupRestore 'test 1 with huge block reversed' $testDir - - backupRestore 'test 2: backup, restore' $out - backupRestore 'test 3: backup, restore' $out - backupRestore 'test 4: backup diffutils to same backup locations, restore' ${diffutils} } - ''; + latestBackup(){ + echo backup/default/$(ls -1 backup/default | sort | tail -n 1) + } + backup_make(){ + # $1=source + $out/bin/storeBackup.pl --sourceDir "$1" --backupDir "backup" + } + backup_restore_latest(){ + $out/bin/storeBackupRecover.pl -b "$(latestBackup)" -t "$1" -r / + } + + backup_verify_integrity_latest(){ + $out/bin/storeBackupCheckBackup.pl -c "$(latestBackup)" + } + backup_verify_latest(){ + $out/bin/storeBackupCheckSource.pl -s "$1" -b "$(latestBackup)" + } + + . ${import ../test-case.nix { inherit diffutils writeTextFile; }} + backup_test backup 100M +''; meta = { description = "Storebackup is a backup suite that stores files on other disks"; diff --git a/pkgs/tools/backup/test-case.nix b/pkgs/tools/backup/test-case.nix new file mode 100644 index 000000000000..8b2044b17235 --- /dev/null +++ b/pkgs/tools/backup/test-case.nix @@ -0,0 +1,105 @@ +# while this test suite is not perfect it will at least provide some guarantees +# that basic features should be fine .. + +/* + In order to use the suite you have to define the following functions + + backup_init + backup_make source + backup_restore_latest target + backup_verify_integrity_latest + backup_verify_latest source + + use true if a backup system does not implement a feature + + TODO: add test cases for all backup solutions shipping with nixpkgs + + This does not replace the test suites shipping with the backup solutions! +*/ + +{diffutils, writeTextFile}: + +writeTextFile { + name = "backup-test-case"; + text = '' + backup_run_tests_on_source(){ + local test="$1" + local source="$2" + local backup="$3" + echo ========= + echo RUNNING TEST "$test" source: "$source" + mkdir restored + + backup_make "$source" backup + + { # verify that restoring works + backup_restore_latest restored + ${diffutils}/bin/diff -r "$source" restored + # diff does not make a difference for symlinks, so list them and compare + # lists + ( cd "$source"; find /var/www/ -type l) | sort > 1 + ( cd "$restored"; find /var/www/ -type l) | sort > 2 + diff 1 2 + } + + { # verify that backup tool thinks so, too: + backup_verify_latest "$source" backup + # using different source verification must fail: + ! backup_verify_latest "$TMP" backup + } + + backup_verify_integrity_latest backup + + chmod -R +w restored + rm -fr restored + } + + backup_test(){ + set -x + # allow getting run time to compare backup solutions + echo "START $(date)" + + local block_size="$2" + + backup_init + + if [ -z "$SKIP_SYMLINK_TEST" ]; then + { # create first test case directory contentents + testDir=$TMP/test-1a + mkdir $testDir + echo X > $testDir/X + ln -s ./X $testDir/Y + } + + backup_run_tests_on_source 'test 1a: backup, restore' "$testDir" "$backup" + fi + + if [ -z "$SKIP_EMPTY_DIR_TEST" ]; then + { # create first test case directory contentents + testDir=$TMP/test-1b + mkdir -p $testDir/empty-directory + } + + backup_run_tests_on_source 'test 1b: backup, restore' "$testDir" "$backup" + fi + + testDir=$TMP/test-huge-blocks + mkdir $testDir + # test huge blocks, according to docs files bigger than 100MB get split + # into pieces + dd if=/dev/urandom bs=1M of=block-0 count=20 + dd if=/dev/urandom bs="$block_size" of=block-1 count=1 + dd if=/dev/urandom bs="$block_size" of=block-2 count=1 + cat block-0 block-0 block-0 block-1 block-2 block-0 block-0 block-0 > $testDir/block + backup_run_tests_on_source 'test 1 with huge block' $testDir + + cat block-2 block-0 block-0 block-1 > $testDir/block + backup_run_tests_on_source 'test 1 with huge block reversed' $testDir + + backup_run_tests_on_source 'test 2: backup, restore' $out + backup_run_tests_on_source 'test 3: backup, restore' $out + backup_run_tests_on_source 'test 4: backup diffutils to same backup locations, restore' ${diffutils} + echo "STOP $(date)" + } + ''; +}