3
0
Fork 0
forked from mirrors/nixpkgs

Merge pull request #135874 from junjihashimoto/feature/bump-torchvision

This commit is contained in:
Sandro 2021-09-17 11:19:56 +02:00 committed by GitHub
commit cf15afd884
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
4 changed files with 63 additions and 17 deletions

View file

@ -5472,6 +5472,12 @@
githubId = 39434424;
name = "Felix Springer";
};
junjihashimoto = {
email = "junji.hashimoto@gmail.com";
github = "junjihashimoto";
githubId = 2469618;
name = "Junji Hashimoto";
};
justinas = {
email = "justinas@justinas.org";
github = "justinas";

View file

@ -15,7 +15,7 @@ let
pyVerNoDot = builtins.replaceStrings [ "." ] [ "" ] python.pythonVersion;
srcs = import ./binary-hashes.nix version;
unsupported = throw "Unsupported system";
version = "0.9.1";
version = "0.10.0";
in buildPythonPackage {
inherit version;
@ -54,6 +54,6 @@ in buildPythonPackage {
changelog = "https://github.com/pytorch/vision/releases/tag/v${version}";
license = licenses.bsd3;
platforms = platforms.linux;
maintainers = with maintainers; [ ];
maintainers = with maintainers; [ junjihashimoto ];
};
}

View file

@ -3,20 +3,24 @@
# Precompiled wheels can be found at:
# https://download.pytorch.org/whl/torch_stable.html
version: {
x86_64-linux-37 = {
name = "torchvision-${version}-cp37-cp37m-linux_x86_64.whl";
url = "https://download.pytorch.org/whl/cu111/torchvision-${version}%2Bcu111-cp37-cp37m-linux_x86_64.whl";
hash = "sha256-7EMVB8KZg2I3P4RqnIVk/7OOAPA1OWOipns58cSCUrw=";
};
x86_64-linux-38 = {
name = "torchvision-${version}-cp38-cp38-linux_x86_64.whl";
url = "https://download.pytorch.org/whl/cu111/torchvision-${version}%2Bcu111-cp38-cp38-linux_x86_64.whl";
hash = "sha256-VjsCBW9Lusr4aDQLqaFh5dpV/5ZJ5PDs7nY4CbCHDTA=";
};
x86_64-linux-39 = {
name = "torchvision-${version}-cp39-cp39-linux_x86_64.whl";
url = "https://download.pytorch.org/whl/cu111/torchvision-${version}%2Bcu111-cp39-cp39-linux_x86_64.whl";
hash = "sha256-pzR7TBE+WcAmozskoeOVBuMkGJf9tvsaXsUkTcu86N8=";
# To add a new version, run "prefetch.sh 'new-version'" to paste the generated file as follows.
version : builtins.getAttr version {
"0.10.0" = {
x86_64-linux-37 = {
name = "torchvision-0.10.0-cp37-cp37m-linux_x86_64.whl";
url = "https://download.pytorch.org/whl/cu111/torchvision-0.10.0%2Bcu111-cp37-cp37m-linux_x86_64.whl";
hash = "sha256-yMgRhp06/rYIIiDNehNrZYIrvIbPvusCxGJL0mL+Bs4=";
};
x86_64-linux-38 = {
name = "torchvision-0.10.0-cp38-cp38-linux_x86_64.whl";
url = "https://download.pytorch.org/whl/cu111/torchvision-0.10.0%2Bcu111-cp38-cp38-linux_x86_64.whl";
hash = "sha256-p1bw+4KsdTuXle+AwXYQVL8elPMroAHV9lkXJGWbtPc=";
};
x86_64-linux-39 = {
name = "torchvision-0.10.0-cp39-cp39-linux_x86_64.whl";
url = "https://download.pytorch.org/whl/cu111/torchvision-0.10.0%2Bcu111-cp39-cp39-linux_x86_64.whl";
hash = "sha256-AOfhnHThVdJx5qxj2LTj+T9dPMFxQoxP3duxIm1y7KE=";
};
};
}

View file

@ -0,0 +1,36 @@
#!/usr/bin/env nix-shell
#!nix-shell -i bash -p nix-prefetch-scripts
version=$1
bucket="https://download.pytorch.org/whl/cu111"
url_and_key_list=(
"x86_64-linux-37 $bucket/torchvision-${version}%2Bcu111-cp37-cp37m-linux_x86_64.whl torchvision-${version}-cp37-cp37m-linux_x86_64.whl"
"x86_64-linux-38 $bucket/torchvision-${version}%2Bcu111-cp38-cp38-linux_x86_64.whl torchvision-${version}-cp38-cp38-linux_x86_64.whl"
"x86_64-linux-39 $bucket/torchvision-${version}%2Bcu111-cp39-cp39-linux_x86_64.whl torchvision-${version}-cp39-cp39-linux_x86_64.whl"
)
hashfile=binary-hashes-"$version".nix
rm -f $hashfile
echo " \"$version\" = {" >> $hashfile
for url_and_key in "${url_and_key_list[@]}"; do
key=$(echo "$url_and_key" | cut -d' ' -f1)
url=$(echo "$url_and_key" | cut -d' ' -f2)
name=$(echo "$url_and_key" | cut -d' ' -f3)
echo "prefetching ${url}..."
hash=$(nix hash to-sri --type sha256 `nix-prefetch-url "$url" --name "$name"`)
echo " $key = {" >> $hashfile
echo " name = \"$name\";" >> $hashfile
echo " url = \"$url\";" >> $hashfile
echo " hash = \"$hash\";" >> $hashfile
echo " };" >> $hashfile
echo
done
echo " };" >> $hashfile
echo "done."