3
0
Fork 0
forked from mirrors/nixpkgs

fetchurl: add user agent

It would be nice to be able to track Nix requests. It's not trustworthy,
but can be helpful for stats and routing in HTTP logs.

Since `fetchurl` is used so widely, we should "magically" get a UA on
`fetchzip`, `fetchFromGitHub`, and other related fetchers.

Since `fetchurl` is only used for fixed-output derivations, this should
cause no mass rebuild.

User-Agent example: curl/7.57.0 Nixpkgs/18.03
This commit is contained in:
Dan Peebles 2016-08-15 10:27:39 -04:00 committed by Yegor Timoshenko
parent 36e02645eb
commit 0cb623c3d9
No known key found for this signature in database
GPG key ID: C34BF9DCC9DF8210
2 changed files with 17 additions and 12 deletions

View file

@ -2,20 +2,23 @@ source $stdenv/setup
source $mirrorsFile
curlVersion=$(curl -V | head -1 | cut -d' ' -f2)
# Curl flags to handle redirects, not use EPSV, handle cookies for
# servers to need them during redirects, and work on SSL without a
# certificate (this isn't a security problem because we check the
# cryptographic hash of the output anyway).
curl="curl \
--location --max-redirs 20 \
--retry 3 \
--disable-epsv \
--cookie-jar cookies \
--insecure \
$curlOpts \
$NIX_CURL_FLAGS"
curl=(
curl
--location
--max-redirs 20
--disable-epsv
--cookie-jar cookies
--insecure
--user-agent "curl/$curlVersion Nixpkgs/$nixpkgsVersion"
$curlOpts
$NIX_CURL_FLAGS
)
downloadedFile="$out"
if [ -n "$downloadToTemp" ]; then downloadedFile="$TMPDIR/file"; fi
@ -32,7 +35,7 @@ tryDownload() {
# if we get error code 18, resume partial download
while [ $curlexit -eq 18 ]; do
# keep this inside an if statement, since on failure it doesn't abort the script
if $curl -C - --fail "$url" --output "$downloadedFile"; then
if "${curl[@]}" -C - --fail "$url" --output "$downloadedFile"; then
success=1
break
else
@ -61,7 +64,7 @@ tryHashedMirrors() {
for mirror in $hashedMirrors; do
url="$mirror/$outputHashAlgo/$outputHash"
if $curl --retry 0 --connect-timeout "${NIX_CONNECT_TIMEOUT:-15}" \
if "${curl[@]}" --retry 0 --connect-timeout "${NIX_CONNECT_TIMEOUT:-15}" \
--fail --silent --show-error --head "$url" \
--write-out "%{http_code}" --output /dev/null > code 2> log; then
tryDownload "$url"

View file

@ -95,7 +95,7 @@ assert sha512 != "" -> builtins.compareVersions "1.11" builtins.nixVersion <= 0;
let
inherit (stdenv.lib) fileContents;
hasHash = showURLs || (outputHash != "" && outputHashAlgo != "")
|| sha1 != "" || sha256 != "" || sha512 != "";
urls_ = if urls != [] then urls else [url];
@ -132,6 +132,8 @@ else stdenv.mkDerivation {
impureEnvVars = impureEnvVars ++ netrcImpureEnvVars;
nixpkgsVersion = fileContents ../../../.version;
# Doing the download on a remote machine just duplicates network
# traffic, so don't do that.
preferLocalBuild = true;