mirror of
https://github.com/NixOS/nixpkgs.git
synced 2025-01-22 14:45:27 +00:00
Merge pull request #3037 from cstrahan/extra-fetchers
add nix-prefetch-zip
This commit is contained in:
commit
29a6f1cc78
153
pkgs/build-support/fetchzip/nix-prefetch-zip
Executable file
153
pkgs/build-support/fetchzip/nix-prefetch-zip
Executable file
|
@ -0,0 +1,153 @@
|
||||||
|
#! /bin/sh -e
|
||||||
|
|
||||||
|
usage(){
|
||||||
|
echo >&2 "syntax: nix-prefetch-zip [OPTIONS] [URL [EXPECTED-HASH]]
|
||||||
|
|
||||||
|
Options:
|
||||||
|
--url url The url of the archive to fetch.
|
||||||
|
--name name The name to use for the store path (defaults to \`basename \$url\`).
|
||||||
|
--hash hash The hash of unpacked archive.
|
||||||
|
--hash-type type Use the specified cryptographic hash algorithm, which can be one of md5, sha1, and sha256.
|
||||||
|
--leave-root Keep the root directory of the archive.
|
||||||
|
--help Show this help text.
|
||||||
|
"
|
||||||
|
exit 1
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
argi=0
|
||||||
|
argfun=""
|
||||||
|
for arg; do
|
||||||
|
if test -z "$argfun"; then
|
||||||
|
case $arg in
|
||||||
|
--url) argfun=set_url;;
|
||||||
|
--name) argfun=set_name;;
|
||||||
|
--hash) argfun=set_expHash;;
|
||||||
|
--hash-type) argfun=set_hashType;;
|
||||||
|
--leave-root) leaveRoot=true;;
|
||||||
|
--help) usage;;
|
||||||
|
*) argi=$(($argi + 1))
|
||||||
|
case $argi in
|
||||||
|
1) url=$arg;;
|
||||||
|
2) rev=$arg;;
|
||||||
|
3) expHash=$arg;;
|
||||||
|
*) echo "Unexpected argument: $arg" >&2
|
||||||
|
usage
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
else
|
||||||
|
case $argfun in
|
||||||
|
set_*)
|
||||||
|
var=$(echo $argfun | sed 's,^set_,,')
|
||||||
|
eval "$var=\$arg"
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
argfun=""
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
|
if [ -z "$url" ]; then
|
||||||
|
echo "Error: No --url flag given" >&2
|
||||||
|
usage
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ -z "$name" ]; then
|
||||||
|
name=$(basename "$url")
|
||||||
|
fi
|
||||||
|
|
||||||
|
if test -z "$hashType"; then
|
||||||
|
hashType=sha256
|
||||||
|
fi
|
||||||
|
|
||||||
|
hashFormat="--base32"
|
||||||
|
|
||||||
|
tmp=$(mktemp -d 2>/dev/null || mktemp -d -t "$$")
|
||||||
|
trap "rm -rf '$tmp'" EXIT
|
||||||
|
|
||||||
|
unpackDir=$tmp/unpacked/$name
|
||||||
|
mkdir -p $unpackDir
|
||||||
|
downloadedFile=$tmp/$name
|
||||||
|
|
||||||
|
unpackFile() {
|
||||||
|
local curSrc="$1"
|
||||||
|
|
||||||
|
case "$curSrc" in
|
||||||
|
*.tar.xz | *.tar.lzma)
|
||||||
|
# Don't rely on tar knowing about .xz.
|
||||||
|
xz -d < $curSrc | tar xf -
|
||||||
|
;;
|
||||||
|
*.tar | *.tar.* | *.tgz | *.tbz2)
|
||||||
|
# GNU tar can automatically select the decompression method
|
||||||
|
# (info "(tar) gzip").
|
||||||
|
tar xf $curSrc
|
||||||
|
;;
|
||||||
|
*.zip)
|
||||||
|
unzip -qq $curSrc
|
||||||
|
;;
|
||||||
|
*)
|
||||||
|
echo "source archive $curSrc has unknown type" >&2
|
||||||
|
exit 1
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
}
|
||||||
|
|
||||||
|
# If the hash was given, a file with that hash may already be in the
|
||||||
|
# store.
|
||||||
|
if test -n "$expHash"; then
|
||||||
|
finalPath=$(nix-store --print-fixed-path --recursive "$hashType" "$expHash" "$name")
|
||||||
|
if ! nix-store --check-validity "$finalPath" 2> /dev/null; then
|
||||||
|
finalPath=
|
||||||
|
fi
|
||||||
|
hash=$expHash
|
||||||
|
fi
|
||||||
|
|
||||||
|
# If we don't know the hash or a path with that hash doesn't exist,
|
||||||
|
# download the file and add it to the store.
|
||||||
|
if test -z "$finalPath"; then
|
||||||
|
curl="curl \
|
||||||
|
--location --max-redirs 20 \
|
||||||
|
--disable-epsv \
|
||||||
|
--insecure"
|
||||||
|
|
||||||
|
if ! $curl --fail "$url" --output "$downloadedFile"; then
|
||||||
|
echo "error: could not download $url" >&2
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
cd $unpackDir
|
||||||
|
unpackFile "$downloadedFile"
|
||||||
|
|
||||||
|
# FIXME: handle zip files that contain a single regular file.
|
||||||
|
if [ -z "$leaveRoot" ]; then
|
||||||
|
shopt -s dotglob
|
||||||
|
if [ $(ls -d $unpackDir/* | wc -l) != 1 ]; then
|
||||||
|
echo "error: zip file must contain a single directory."
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
fn=$(cd "$unpackDir" && echo *)
|
||||||
|
mv $unpackDir/$fn/* "$unpackDir/"
|
||||||
|
rmdir "$unpackDir/$fn"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Compute the hash.
|
||||||
|
hash=$(nix-hash --type $hashType $hashFormat $unpackDir)
|
||||||
|
if ! test -n "$QUIET"; then echo "hash is $hash" >&2; fi
|
||||||
|
|
||||||
|
# Add the downloaded file to the Nix store.
|
||||||
|
finalPath=$(nix-store --add-fixed --recursive "$hashType" $unpackDir)
|
||||||
|
|
||||||
|
if test -n "$expHash" -a "$expHash" != "$hash"; then
|
||||||
|
echo "hash mismatch for URL \`$url'"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
if ! test -n "$QUIET"; then echo "path is $finalPath" >&2; fi
|
||||||
|
|
||||||
|
echo $hash
|
||||||
|
|
||||||
|
if test -n "$PRINT_PATH"; then
|
||||||
|
echo $finalPath
|
||||||
|
fi
|
|
@ -1,4 +1,4 @@
|
||||||
{ stdenv, makeWrapper, git, subversion, mercurial, bazaar, cvs }:
|
{ stdenv, makeWrapper, git, subversion, mercurial, bazaar, cvs, unzip, curl, gnused }:
|
||||||
|
|
||||||
stdenv.mkDerivation {
|
stdenv.mkDerivation {
|
||||||
name = "nix-prefetch-scripts";
|
name = "nix-prefetch-scripts";
|
||||||
|
@ -11,9 +11,13 @@ stdenv.mkDerivation {
|
||||||
function copyScript {
|
function copyScript {
|
||||||
local name=nix-prefetch-$1;
|
local name=nix-prefetch-$1;
|
||||||
local src=$2;
|
local src=$2;
|
||||||
local exe=$3/bin;
|
local wrapArgs=""
|
||||||
cp $src $out/bin/$name;
|
cp $src $out/bin/$name;
|
||||||
wrapProgram $out/bin/$name --suffix PATH : "$exe"
|
for dep in ''${@:3}; do
|
||||||
|
wrapArgs="$wrapArgs --prefix PATH : $dep/bin"
|
||||||
|
done
|
||||||
|
wrapArgs="$wrapArgs --prefix PATH : ${gnused}/bin"
|
||||||
|
wrapProgram $out/bin/$name $wrapArgs
|
||||||
}
|
}
|
||||||
|
|
||||||
copyScript "hg" ${../../../build-support/fetchhg/nix-prefetch-hg} ${mercurial}
|
copyScript "hg" ${../../../build-support/fetchhg/nix-prefetch-hg} ${mercurial}
|
||||||
|
@ -21,6 +25,7 @@ stdenv.mkDerivation {
|
||||||
copyScript "svn" ${../../../build-support/fetchsvn/nix-prefetch-svn} ${subversion}
|
copyScript "svn" ${../../../build-support/fetchsvn/nix-prefetch-svn} ${subversion}
|
||||||
copyScript "bzr" ${../../../build-support/fetchbzr/nix-prefetch-bzr} ${bazaar}
|
copyScript "bzr" ${../../../build-support/fetchbzr/nix-prefetch-bzr} ${bazaar}
|
||||||
copyScript "cvs" ${../../../build-support/fetchcvs/nix-prefetch-cvs} ${cvs}
|
copyScript "cvs" ${../../../build-support/fetchcvs/nix-prefetch-cvs} ${cvs}
|
||||||
|
copyScript "zip" ${../../../build-support/fetchzip/nix-prefetch-zip} ${unzip} ${curl}
|
||||||
'';
|
'';
|
||||||
|
|
||||||
meta = with stdenv.lib; {
|
meta = with stdenv.lib; {
|
||||||
|
@ -30,4 +35,4 @@ stdenv.mkDerivation {
|
||||||
# Quicker to build than to download, I hope
|
# Quicker to build than to download, I hope
|
||||||
hydraPlatforms = [];
|
hydraPlatforms = [];
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in a new issue