3
0
Fork 0
forked from mirrors/nixpkgs

Merge pull request #151363 from Stunkymonkey/doc-updateWalker

This commit is contained in:
Sandro 2022-01-27 14:06:36 +01:00 committed by GitHub
commit 27cccd4e49
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
68 changed files with 5 additions and 967 deletions

View file

@ -192,10 +192,6 @@ meta.hydraPlatforms = [];
If set to `true`, the package is marked as "broken", meaning that it wont show up in `nix-env -qa`, and cannot be built or installed. Such packages should be removed from Nixpkgs eventually unless they are fixed.
### `updateWalker` {#var-meta-updateWalker}
If set to `true`, the package is tested to be updated correctly by the `update-walker.sh` script without additional settings. Such packages have `meta.version` set and their homepage (or the page specified by `meta.downloadPage`) contains a direct link to the package tarball.
## Licenses {#sec-meta-license}
The `meta.license` attribute should preferably contain a value from `lib.licenses` defined in [`nixpkgs/lib/licenses.nix`](https://github.com/NixOS/nixpkgs/blob/master/lib/licenses.nix), or in-place license description of the same format if the license is unlikely to be useful in another expression.

View file

@ -1,3 +0,0 @@
url http://spiegl.de/qiv/download/
version_link '[.]tgz$'
do_overwrite() { do_overwrite_just_version; }

View file

@ -1,7 +0,0 @@
url https://mupdf.com/downloads/archive/
do_overwrite(){
ensure_hash
ensure_version
set_var_value version $CURRENT_VERSION
set_var_value sha256 $CURRENT_HASH
}

View file

@ -38,7 +38,6 @@ in stdenv.mkDerivation rec {
license = licenses.gpl2;
downloadPage = "https://vifm.info/downloads.shtml";
homepage = "https://vifm.info/";
updateWalker = true;
changelog = "https://github.com/vifm/vifm/blob/v${version}/ChangeLog";
};
}

View file

@ -87,7 +87,6 @@ python3.pkgs.buildPythonApplication rec {
license = lib.licenses.gpl3Plus;
maintainers = with lib.maintainers; [ raskin abbradar ];
downloadPage = "http://gajim.org/downloads.php";
updateWalker = true;
platforms = lib.platforms.linux;
};
}

View file

@ -38,8 +38,6 @@ stdenv.mkDerivation rec {
license = licenses.gpl2;
maintainers = with maintainers; [ pSub ];
platforms = with platforms; linux;
updateWalker = true;
downloadPage = "http://mcabber.com/files/";
downloadURLRegexp = "mcabber-[0-9.]+[.]tar[.][a-z0-9]+$";
};
}

View file

@ -1,6 +0,0 @@
url http://page.mi.fu-berlin.de/cbenzmueller/leo/download.html
version_link '[.]tgz'
version '.*_v([0-9.]+)[.][a-z0-9]+$' '\1'
do_overwrite () {
do_overwrite_just_version
}

View file

@ -1,6 +0,0 @@
target clingo.nix
attribute_name clingo
url https://github.com/potassco/clingo/releases/
ensure_choice
version '.*/v([0-9.]+)[.]tar[.].*' '\1'
minimize_overwrite

View file

@ -66,6 +66,5 @@ stdenv.mkDerivation rec {
platforms = lib.platforms.linux;
downloadPage = "http://www.ps.uni-saarland.de/~cebrown/satallax/downloads.php";
homepage = "http://www.ps.uni-saarland.de/~cebrown/satallax/index.php";
updateWalker = true;
};
}

View file

@ -82,6 +82,5 @@ stdenv.mkDerivation rec {
license = licenses.gpl2Plus;
maintainers = with maintainers; [ ertes AndersonTorres ] ++ teams.sage.members;
platforms = platforms.linux ++ platforms.darwin;
updateWalker = true;
};
}

View file

@ -31,6 +31,5 @@ stdenv.mkDerivation rec {
maintainers = [lib.maintainers.raskin];
platforms = lib.platforms.unix;
homepage = "http://www.mathe2.uni-bayreuth.de/stoll/programs/";
updateWalker = true;
};
}

View file

@ -1,5 +0,0 @@
url https://sourceforge.net/projects/golly/files/golly/
version_link '[-][0-9.]+/$'
SF_version_tarball 'src'
SF_redirect
minimize_overwrite

View file

@ -92,7 +92,6 @@ let
downloadPage = "https://www.mercurial-scm.org/release/";
license = licenses.gpl2Plus;
maintainers = with maintainers; [ eelco lukegb pacien ];
updateWalker = true;
platforms = platforms.unix;
};
};

View file

@ -1,8 +0,0 @@
url http://www.monotone.ca/downloads.php
do_overwrite(){
ensure_version
ensure_hash
set_var_value version $CURRENT_VERSION
set_var_value sha256 $CURRENT_HASH
}

View file

@ -1,19 +0,0 @@
a :
a.stdenv.mkDerivation {
buildCommand = ''
mkdir -p "$out/attributes"
'' + (a.lib.concatStrings (map
(n: ''
ln -s "${a.writeTextFile {name=n; text=builtins.getAttr n a.theAttrSet;}}" $out/attributes/${n};
'')
(builtins.attrNames a.theAttrSet)
));
name = "attribute-set";
meta = {
description = "Contents of an attribute set";
maintainers = [
a.lib.maintainers.raskin
];
};
}

View file

@ -1,24 +0,0 @@
#! /bin/sh
[ -z "$1" ] && {
echo "Use $0 expression-basename repo-url branch-name package-base-name"
echo "Like:"
echo "$0 default http://git.example.com/repo origin/master hello"
exit 1;
} >&2
own_dir="$(cd "$(dirname "$0")"; sh -c pwd)"
cp "$own_dir/../builder-defs/template-bdp-uud.nix" "$1.nix"
sed -e "s@src-for-default.nix@src-for-$1.nix@g;
s@fetchUrlFromSrcInfo@fetchGitFromSrcInfo@g" -i "$1.nix"
echo '{}' > "src-for-$1.nix"
cat << EOF > src-info-for-$1.nix
{
repoUrl = "$2";
rev = "$3";
baseName = "$4";
method = "fetchgit";
}
EOF

View file

@ -1,20 +0,0 @@
#! /bin/sh
[ -z "$1" ] && {
echo "Use $0 expression-basename download-page package-base-name"
echo "Like:"
echo "$0 default http://example.com/downloads hello"
exit 1;
} >&2
own_dir="$(cd "$(dirname "$0")"; sh -c pwd)"
cp "$own_dir/../builder-defs/template-auto-callable.nix" "$1.nix"
sed -e "s@src-for-default.nix@src-for-$1.nix@g" -i "$1.nix"
echo '{}' > "src-for-$1.nix"
cat << EOF > src-info-for-$1.nix
{
downloadPage = "$2";
baseName = "$3";
}
EOF

View file

@ -1,29 +0,0 @@
Next to file.nix we get src-for-file.nix
src-for-file.nix should evaluate to a flat attribute set with
string values.
It is supposed to be imported in the main expression.
In the ideal world it can export url, hash, version.
src-for-file.nix generation is directed by
src-info-for-file.nix.
Attributes:
src-info-for-file.nix:
downloadPage
rev (for repos)
baseName (default = unnamed-package)
sourceRegexp (default = '.*[.]tar[.].*')
choiceCommand (default = 'head -1')
versionExtractorSedScript (default = 's/.*-([0-9.]+)[.].*/\1/')
versionReferenceCreator (default = 's/-([0-9.]+)[.]/-${version}./')
mirrorSedScript (default = none)
src-for-file.nix:
advertisedUrl (its match is the check for update presence)
url
hash
version
name

View file

@ -1,14 +0,0 @@
# sed scripts
#http://sourceforge.net/projects/webdruid/files/webdruid/0.6.0-alpha5/webdruid-0.6.0-alpha5.tar.gz/download
#http://downloads.sourceforge.net/webdruid/files/webdruid/0.6.0-alpha5/webdruid-0.6.0-alpha5.tar.gz
skipRedirectSF='s@sourceforge.net/projects@downloads.sourceforge.net/project@; s@/files@@; s@/download$@@;'
extractReleaseSF='s@.*/([^/]+)/[^/]+@\1@'
extractVersionSF='s@.*/[^/0-9]*([0-9].*)[.](tar|tgz|tbz2|zip).*@\1@'
apacheMirror='s@http://www.apache.org/dist/@mirror://apache/@'
skipRedirectApache='s@/dyn/closer.cgi[?]path=@/dist@'
replaceAllVersionOccurences() {
echo s/"$version"/\${version}/g
}
dashDelimitedVersion='s/.*-([0-9.]+)-.*/\1/'

View file

@ -1,13 +0,0 @@
{
a=1;
b="text";
c=''
text
'';
d=''
Multi-line text with special characters -
like \ (backslash) and ''${} (dollar +
curly braces) and $ (dollar) and ' (quote)
and " (double quote).
'';
}

View file

@ -1,182 +0,0 @@
#! /bin/sh
set -x
own_dir="$(cd "$(dirname "$0")"; sh -c pwd)"
source "$own_dir/snippets.sh"
[ -z "$1" ] && {
echo "Specify main expression filename."
exit 1;
}
main_file="$1"
main_dir="$(cd "$(dirname "$main_file")" ; sh -c pwd)"
file_name="$(basename "$main_file")"
defs_file="$main_dir"/src-info-for-"$file_name"
src_file="$main_dir"/src-for-"$file_name"
# OK, [vcs] revert is always possible
new_src_file="$main_dir"/src-for-"$file_name"
forcedUrl="$2"
defs_dir="$("$own_dir"/attrset-to-dir.sh "$defs_file")"
src_defs_dir="$("$own_dir"/attrset-to-dir.sh "$src_file")"
getAttr () {
file="$defs_dir"/"$1"
data="$( ( [ -f "$file" ] && cat "$file" ) || echo "$2" )"
echo "attribute $1 obtained as: [[$data]]" >&2
echo "$data"
}
method="$(getAttr method fetchurl)"
baseName="$(getAttr baseName 'unnamed-package')"
commonPrefetchVars=" version name hash"
prefetchClause=""
[ fetchSFdirs = "$method" ] && {
if [ -z "$forcedUrl" ]; then
freshUrl="$("$own_dir"/urls-from-page.sh "$(getAttr downloadPage)" |
eval "egrep '$(getAttr sourceRegexp '[-][0-9.]+/$')'" |
eval "egrep -v '$(getAttr blacklistRegexp '^$')'" |
eval "$(getAttr choiceCommand 'head -n 1')" |
eval "$(getAttr versionToFileCommand "sed -re 's@/([^/]*-[0-9.]+)/@/\1/\1$(getAttr fileSuffix .tar.gz)@'")"
)"
if ! egrep ':' <<< "$freshUrl" ; then
freshUrl="$(dirname "$(getAttr downloadPage).")/$freshUrl"
fi
echo "Found download link: $freshUrl" >&2
else
freshUrl="$forcedUrl"
fi
freshUrl="$(echo "$freshUrl" | sed -re "$skipRedirectSF")"
echo "Sourceforge-corrected URL: $freshUrl" >&2
version="$(echo "$freshUrl" |
sed -re "$(getAttr versionExtractorSedScript "$extractVersionSF")")"
baseName="$(getAttr baseName "$(echo "$freshUrl" | sed -re 's@.*/project/([^/]+)/.*@\1@')")"
url="$freshUrl"
name="$baseName-$version"
advertisedUrl="$freshUrl"
if [ x"$freshUrl" = x"$(cat "$src_defs_dir"/advertisedUrl)" ]; then
echo "Source link not changed" >&2
exit
fi
hash=$(nix-prefetch-url "$freshUrl")
prefetchVars="url advertisedUrl";
}
[ fetchSF = "$method" ] && {
if [ -z "$forcedUrl" ]; then
freshUrl="$("$own_dir"/urls-from-page.sh "$(getAttr downloadPage)" |
eval "egrep \"$(getAttr sourceRegexp '.*[.]tar[.].*|.*[.]tgz$|.*[.]tbz2$')\"" |
eval "egrep -v \"$(getAttr blacklistRegexp '^$')\"" |
eval "$(getAttr choiceCommand 'head -1')")"
if ! egrep ':' <<< "$freshUrl" ; then
freshUrl="$(dirname "$(getAttr downloadPage).")/$freshUrl"
fi
echo "Found download link: $freshUrl" >&2
else
freshUrl="$forcedUrl"
fi
freshUrl="$(echo "$freshUrl" | sed -re "$skipRedirectSF")"
echo "Sourceforge-corrected URL: $freshUrl" >&2
version="$(echo "$freshUrl" |
sed -re "$(getAttr versionExtractorSedScript "$extractVersionSF")")"
baseName="$(getAttr baseName "$(echo "$freshUrl" | sed -re 's@.*/project/([^/]+)/.*@\1@')")"
url="$freshUrl"
name="$baseName-$version"
advertisedUrl="$freshUrl"
if [ x"$freshUrl" = x"$(cat "$src_defs_dir"/advertisedUrl)" ]; then
echo "Source link not changed" >&2
exit
fi
hash=$(nix-prefetch-url "$freshUrl")
prefetchVars="url advertisedUrl";
}
[ fetchurl = "$method" ] && {
if [ -z "$forcedUrl" ] ; then
freshUrl="$("$own_dir"/urls-from-page.sh "$(getAttr downloadPage)" |
eval "egrep \"$(getAttr sourceRegexp '.*[.]tar[.].*|.*[.]tgz$|.*[.]tbz2$')\"" |
eval "egrep -v \"$(getAttr blacklistRegexp '^$')\"" |
eval "$(getAttr choiceCommand 'head -1')")"
if ! egrep ':' <<< "$freshUrl" ; then
freshUrl="$(dirname "$(getAttr downloadPage).")/$freshUrl"
fi
echo "Found download link: $freshUrl" >&2
else
freshUrl="$forcedUrl"
fi
version="$(echo "$freshUrl" |
eval "sed -re \"$(getAttr versionExtractorSedScript \
's/.*-([0-9.]+)[.].*/\1/')\"")"
mirrorUrl="$(echo "$freshUrl" |
eval "sed -r -e \"$(getAttr versionReferenceCreator \
's/-'"${version}"'[.]/-\${version}./')\"" |
eval "sed -r -e \"$(getAttr mirrorSedScript)\"")"
url="$mirrorUrl"
name="$baseName-$version"
advertisedUrl="$freshUrl"
url="$mirrorUrl"
if [ x"$freshUrl" = x"$(cat "$src_defs_dir"/advertisedUrl)" ]; then
echo "Source link not changed" >&2
exit
fi
hash=$(nix-prefetch-url "$freshUrl")
prefetchVars="url advertisedUrl";
}
[ "fetchgit" = "$method" ] && {
repoUrl="$(getAttr repoUrl)"
export NIX_PREFETCH_GIT_CHECKOUT_HOOK="
cat .git/HEAD
"
export NIX_HASH_ALGO=sha256
rev="$(getAttr rev '')";
rev_and_hash="$("$own_dir"/../fetchgit/nix-prefetch-git "$repoUrl" "$rev" | tee /dev/stderr | tail -2)"
rev="$(echo "$rev_and_hash" | head -1)"
url="$repoUrl";
hash="$(echo "$rev_and_hash" | tail -1)"
version="$rev"
name="$baseName-$version"
prefetchVars="rev url";
}
prefetchAssignments="";
for i in $commonPrefetchVars $prefetchVars; do
prefetchAssignments="$prefetchAssignments $i=\"$(eval echo \"\$$i\")\";$(echo -e '\n ')"
done;
extraAssignments=""
for i in $(getAttr extraVars ''); do
eval "$(getAttr "eval_$i" 'i=""')"
extraAssignments="$extraAssignments $i=\"$(eval echo \"\$$i\")\";$(echo -e '\n ')"
done
cat << EOF > "$new_src_file"
rec {
$prefetchAssignments
$extraAssignments
}
EOF

View file

@ -1,20 +0,0 @@
SF_redirect () {
redirect 99
process 'http://[a-z]+[.]dl[.]sourceforge[.]net/' 'mirror://sourceforge/'
process '[?].*' ''
}
SF_version_dir () {
version_link 'http://sourceforge.net/.+/'"$1"'[0-9.]+/$'
}
SF_version_tarball () {
version_link "${1:-[.]tar[.]}.*/download\$"
}
GH_latest () {
prefetch_command_rel ../fetchgit/nix-prefetch-git
revision "$("$(dirname "$0")/urls-from-page.sh" "$CURRENT_URL/commits" | grep /commit/ | head -n 1 | xargs basename )"
version '.*' "git-$(date +%Y-%m-%d)"
NEED_TO_CHOOSE_URL=
}

View file

@ -1,320 +0,0 @@
#! /bin/sh
own_dir="$(cd "$(dirname "$0")"; pwd)"
URL_WAS_SET=
DL_URL_RE=
CURRENT_URL=
CURRENT_REV=
PREFETCH_COMMAND=
NEED_TO_CHOOSE_URL=1
url () {
URL_WAS_SET=1
CURRENT_URL="$1"
}
dl_url_re () {
DL_URL_RE="$1"
}
version_unpack () {
sed -re '
s/[.]/ /g;
s@/@ / @g
s/-(rc|pre)/ -1 \1 /g;
s/-(gamma)/ -2 \1 /g;
s/-(beta)/ -3 \1 /g;
s/-(alpha)/ -4 \1 /g;
s/[-]/ - /g;
'
}
version_repack () {
sed -re '
s/ - /-/g;
s/ -[0-9]+ ([a-z]+) /-\1/g;
s@ / @/@g
s/ /./g;
'
}
version_sort () {
version_unpack |
sort -t ' ' -n $(for i in $(seq 30); do echo " -k${i}n" ; done) | tac |
version_repack
}
position_choice () {
head -n "${1:-1}" | tail -n "${2:-1}"
}
matching_links () {
"$own_dir"/urls-from-page.sh "$CURRENT_URL" | grep -E "$1"
}
link () {
CURRENT_URL="$(matching_links "$1" | position_choice "$2" "$3")"
unset NEED_TO_CHOOSE_URL
echo "Linked by: $*"
echo "URL: $CURRENT_URL" >&2
}
version_link () {
CURRENT_URL="$(matching_links "$1" | version_sort | position_choice "$2" "$3")"
unset NEED_TO_CHOOSE_URL
echo "Linked version by: $*"
echo "URL: $CURRENT_URL" >&2
}
redirect () {
CURRENT_URL="$(curl -I -L --max-redirs "${1:-99}" "$CURRENT_URL" |
grep -E '^[Ll]ocation: ' | position_choice "${2:-999999}" "$3" |
sed -e 's/^[Ll]ocation: //; s/\r//')"
echo "Redirected: $*"
echo "URL: $CURRENT_URL" >&2
}
replace () {
sed -re "s $1 $2 g"
}
process () {
CURRENT_URL="$(echo "$CURRENT_URL" | replace "$1" "$2")"
echo "Processed: $*"
echo "URL: $CURRENT_URL" >&2
}
version () {
CURRENT_VERSION="$(echo "$CURRENT_URL" | replace "$1" "$2")"
echo "Version: $CURRENT_VERSION" >&2
}
ensure_version () {
echo "Ensuring version. CURRENT_VERSION: $CURRENT_VERSION" >&2
[ -z "$CURRENT_VERSION" ] && version '.*-([0-9.]+)[-._].*' '\1'
}
ensure_target () {
echo "Ensuring target. CURRENT_TARGET: $CURRENT_TARGET" >&2
[ -z "$CURRENT_TARGET" ] && target "$(basename "$CONFIG_NAME" .upstream).nix"
}
ensure_name () {
echo "Ensuring name. CURRENT_NAME: $CURRENT_NAME" >&2
[ -z "$CURRENT_NAME" ] && name "$(basename "$CONFIG_DIR")"
echo "Resulting name: $CURRENT_NAME"
}
ensure_attribute_name () {
echo "Ensuring attribute name. CURRENT_ATTRIBUTE_NAME: $CURRENT_ATTRIBUTE_NAME" >&2
ensure_name
[ -z "$CURRENT_ATTRIBUTE_NAME" ] && attribute_name "$CURRENT_NAME"
echo "Resulting attribute name: $CURRENT_ATTRIBUTE_NAME"
}
ensure_url () {
echo "Ensuring starting URL. CURRENT_URL: $CURRENT_URL" >&2
ensure_attribute_name
[ -z "$CURRENT_URL" ] && CURRENT_URL="$(retrieve_meta downloadPage)"
[ -z "$CURRENT_URL" ] && CURRENT_URL="$(retrieve_meta downloadpage)"
[ -z "$CURRENT_URL" ] && CURRENT_URL="$(retrieve_meta homepage)"
echo "Resulting URL: $CURRENT_URL"
}
ensure_choice () {
echo "Ensuring that choice is made." >&2
echo "NEED_TO_CHOOSE_URL: [$NEED_TO_CHOOSE_URL]." >&2
echo "CURRENT_URL: $CURRENT_URL" >&2
[ -z "$URL_WAS_SET" ] && [ -z "$CURRENT_URL" ] && ensure_url
[ -n "$NEED_TO_CHOOSE_URL" ] && {
version_link "${DL_URL_RE:-[.]tar[.]([^./])+\$}"
unset NEED_TO_CHOOSE_URL
}
[ -z "$CURRENT_URL" ] && {
echo "Error: empty CURRENT_URL"
echo "Error: empty CURRENT_URL" >&2
exit 1
}
}
revision () {
CURRENT_REV="$1"
echo "CURRENT_REV: $CURRENT_REV"
}
prefetch_command () {
PREFETCH_COMMAND="$1"
}
prefetch_command_rel () {
PREFETCH_COMMAND="$(dirname "$0")/$1"
}
ensure_hash () {
echo "Ensuring hash. CURRENT_HASH: $CURRENT_HASH" >&2
[ -z "$CURRENT_HASH" ] && hash
}
hash () {
CURRENT_HASH="$(${PREFETCH_COMMAND:-nix-prefetch-url} "$CURRENT_URL" $CURRENT_REV)"
echo "CURRENT_HASH: $CURRENT_HASH" >&2
}
name () {
CURRENT_NAME="$1"
echo "CURRENT_NAME: $CURRENT_NAME" >&2
}
attribute_name () {
CURRENT_ATTRIBUTE_NAME="$1"
echo "CURRENT_ATTRIBUTE_NAME: $CURRENT_ATTRIBUTE_NAME" >&2
}
retrieve_meta () {
nix-instantiate --eval-only '<nixpkgs>' -A "$CURRENT_ATTRIBUTE_NAME".meta."$1" | xargs
}
retrieve_version () {
PACKAGED_VERSION="$(retrieve_meta version)"
}
ensure_dl_url_re () {
echo "Ensuring DL_URL_RE. DL_URL_RE: $DL_URL_RE" >&2
[ -z "$DL_URL_RE" ] && dl_url_re "$(retrieve_meta downloadURLRegexp)"
echo "DL_URL_RE: $DL_URL_RE" >&2
}
directory_of () {
cd "$(dirname "$1")"; pwd
}
full_path () {
echo "$(directory_of "$1")/$(basename "$1")"
}
target () {
CURRENT_TARGET="$1"
{ [ "$CURRENT_TARGET" = "${CURRENT_TARGET#/}" ] && CURRENT_TARGET="$CONFIG_DIR/$CURRENT_TARGET"; }
echo "Target set to: $CURRENT_TARGET"
}
marker () {
BEGIN_EXPRESSION="$1"
}
update_found () {
echo "Compare: $CURRENT_VERSION vs $PACKAGED_VERSION"
[ "$CURRENT_VERSION" != "$PACKAGED_VERSION" ]
}
do_write_expression () {
echo "${1}rec {"
echo "${1} baseName=\"$CURRENT_NAME\";"
echo "${1} version=\"$CURRENT_VERSION\";"
echo "${1} name=\"\${baseName}-\${version}\";"
echo "${1} hash=\"$CURRENT_HASH\";"
echo "${1} url=\"$CURRENT_URL\";"
[ -n "$CURRENT_REV" ] && echo "${1} rev=\"$CURRENT_REV\";"
echo "${1} sha256=\"$CURRENT_HASH\";"
echo "$2"
}
line_position () {
file="$1"
regexp="$2"
count="${3:-1}"
grep -E "$regexp" -m "$count" -B 999999 "$file" | wc -l
}
replace_once () {
file="$1"
regexp="$2"
replacement="$3"
instance="${4:-1}"
echo "Replacing once:"
echo "file: [[$file]]"
echo "regexp: [[$regexp]]"
echo "replacement: [[$replacement]]"
echo "instance: [[$instance]]"
position="$(line_position "$file" "$regexp" "$instance")"
sed -re "${position}s $regexp $replacement " -i "$file"
}
set_var_value () {
var="${1}"
value="${2}"
instance="${3:-1}"
file="${4:-$CURRENT_TARGET}"
no_quotes="${5:-0}"
quote='"'
let "$no_quotes" && quote=""
replace_once "$file" "${var} *= *.*" "${var} = ${quote}${value}${quote};" "$instance"
}
do_regenerate () {
BEFORE="$(cat "$1" | grep -F "$BEGIN_EXPRESSION" -B 999999;)"
AFTER_EXPANDED="$(cat "$1" | grep -F "$BEGIN_EXPRESSION" -A 999999 | grep -E '^ *[}] *; *$' -A 999999;)"
AFTER="$(echo "$AFTER_EXPANDED" | tail -n +2)"
CLOSE_BRACE="$(echo "$AFTER_EXPANDED" | head -n 1)"
SPACING="$(echo "$CLOSE_BRACE" | sed -re 's/[^ ].*//')"
echo "$BEFORE"
do_write_expression "$SPACING" "$CLOSE_BRACE"
echo "$AFTER"
}
do_overwrite () {
ensure_hash
do_regenerate "$1" > "$1.new.tmp"
mv "$1.new.tmp" "$1"
}
do_overwrite_just_version () {
ensure_hash
set_var_value version $CURRENT_VERSION
set_var_value sha256 $CURRENT_HASH
}
minimize_overwrite() {
do_overwrite(){
do_overwrite_just_version
}
}
process_config () {
CONFIG_DIR="$(directory_of "$1")"
CONFIG_NAME="$(basename "$1")"
BEGIN_EXPRESSION='# Generated upstream information';
if [ -f "$CONFIG_DIR/$CONFIG_NAME" ] &&
[ "${CONFIG_NAME}" = "${CONFIG_NAME%.nix}" ]; then
source "$CONFIG_DIR/$CONFIG_NAME"
else
CONFIG_NAME="${CONFIG_NAME%.nix}"
ensure_attribute_name
[ -n "$(retrieve_meta updateWalker)" ] ||
[ -n "$FORCE_UPDATE_WALKER" ] || {
echo "Error: package not marked as safe for update-walker" >&2
echo "Set FORCE_UPDATE_WALKER=1 to override" >&2
exit 1;
}
[ -z "$(retrieve_meta fullRegenerate)" ] && eval "
minimize_overwrite
"
fi
ensure_attribute_name
retrieve_version
ensure_dl_url_re
ensure_choice
ensure_version
ensure_target
update_found && do_overwrite "$CURRENT_TARGET"
}
source "$own_dir/update-walker-service-specific.sh"
process_config "$1"

View file

@ -1,118 +0,0 @@
update-walker is an imperative semiautomated update helper.
It runs the X.upstream file to find the freshest version of the package in
the specified upstream source and updates the corresponding X.nix file.
The simplest available commands:
url: set the upstream source list URL equal to $1; the default is
meta.downloadPage with meta.homepage fallback
dl_url_re: set the regular expression used to select download links to $1; the
default is meta.downloadURLRegexp or '[.]tar[.]([^./])+\$' if it is not set
target: specify target expression; default is to replace .upstream extension
with .nix extension
name: specify the derivation name; default is the basename of the dirname
of the .upstream file
attribute_name: specify the attribute name to evaluate for getting the current
version from meta.version; default is to use the derivation name
minimize_overwrite: set config options that mean that only version= and
sha256= have to be replaced; the default is to regenerate a full upstream
description block with url, name, version, hash etc.
A lot of packages can be updated in a pseudo-declarative style using only
the commands from the previous paragraph.
Some packages do not need any non-default settings, in these case just setting
meta.updateWalker to true is enough, you can run update-walker directly on the
.nix file afterwards. In this case minimize_overwrite it implied unless
meta.fullRegenerate is set.
The packages that require more fine-grained control than the described options
allow, you need to take into account the default control flow of the tool.
First, the definitions from update-walker script and additional definitions
from update-walker-service-specific.sh are loaded. Then the config is executed
as a shell script. Some of the commands it can use do remember whether they
have been used. Afterwards the following steps happen:
attribute_name is set to name unless it has been already set
meta.version is read from the NixPkgs package called attribute_name
download URL regexp is set to default unless it has been already set in the
updater script
the download page URL gets set to default value unless it has been set
previously
if the action of getting the download page and choosing the freshest link by
version has not yet been taken, it happens
if the version has not yet been extracted from the URL, it gets extracted
target nix expression to update gets set to the default value unless it has
been set explicitly
if the URL version is fresher than the packaged version, the new file gets
downloaded and its hash is calculated
do_overwrite function is called; the default calculates a big upstream data
block and puts it after the '# Generated upstream information' marker (the
marker can be changed by the command marker)
If the update needs some special logic, it is put into the updater script and
the corresponding steps are skipped because the needed action has already been
performed.
For example:
minimize_overwrite is exactly the same as
do_overwrite() { do_overwrite_just_version; }
redefinition. You can do a more complex do_overwrite redifinition, if needed.
It can probably use ensure_hash to download the source and calculate the hash
and set_var_value.
set_var_value alters the $3-th instance of assigning the $1 name in the
expression to the value $2. $3 defaults to 1. It can modify $4 instead of the
current target, it can put the value without quotes if $5 is 1.
Typical steps include:
ensure_choice: download current URL and find the freshest version link on the
page, it is now the new URL
ensure_hash: download current URL and calculate the source package hash
ensure_version: extract version from the URL
SF_redirect: replace the current URL with a SourceForge.net mirror:// URL
SF_version_dir: assume SourceForge.net layout and choose the freshest
version-named subdirectory in the file catalog; you can optionally specify $1
as a directory name regexp (digits and periods will be required after it)
SF_version_tarball: assume SourceForge.net layout and choose the freshest
tarball download link
version: apply replacement of $1 with $2 (extended regexp format) to extract
the version from URL
version_link: choose the freshest versioned link, $1 is the regexp of
acceptable links

View file

@ -1,6 +0,0 @@
url https://sourceforge.net/projects/freepascal/files/Source/
SF_version_dir
version_link 'fpcbuild-[0-9.]+[.]tar[.]gz/download$'
SF_redirect
version '.*-([0-9.]+)[.]tar[.]gz' '\1'
do_overwrite () { do_overwrite_just_version; }

View file

@ -119,7 +119,5 @@ stdenv.mkDerivation rec {
}
'');
meta = sbclBootstrap.meta // {
updateWalker = true;
};
meta = sbclBootstrap.meta;
}

View file

@ -1,5 +0,0 @@
url https://sourceforge.net/projects/regina-rexx/files/regina-rexx/
SF_version_dir
SF_version_tarball
SF_redirect
minimize_overwrite

View file

@ -1,10 +0,0 @@
url https://botan.randombit.net/
version_link 'Botan-[0-9]+([.][0-9]+)*[.](tgz|tbz|tbz2|tar[.]bz2)$'
ensure_version
attribute_name botan2
do_overwrite(){
ensure_hash
set_var_value sha256 $CURRENT_HASH
set_var_value baseVersion ${CURRENT_VERSION%.*}
set_var_value revision ${CURRENT_VERSION##*.}
}

View file

@ -1,9 +0,0 @@
url http://botan.randombit.net/download.html
version_link 'Botan-[0-9]+[.][0-9]*[02468]([.][0-9]+)?[.](tgz|tbz|tbz2|tar[.]bz2)$'
ensure_version
do_overwrite(){
ensure_hash
set_var_value sha256 $CURRENT_HASH
set_var_value baseVersion ${CURRENT_VERSION%.*}
set_var_value revision ${CURRENT_VERSION##*.}
}

View file

@ -1,4 +0,0 @@
url http://eigen.tuxfamily.org/
ensure_choice
version '.*/([-0-9.]+)[.]tar[.].*' '\1'
do_overwrite() { do_overwrite_just_version; }

View file

@ -51,6 +51,5 @@ stdenv.mkDerivation rec {
platforms = platforms.unix;
homepage = "https://www.flintlib.org/";
downloadPage = "https://www.flintlib.org/downloads.html";
updateWalker = true;
};
}

View file

@ -24,6 +24,5 @@ stdenv.mkDerivation rec {
maintainers = [lib.maintainers.raskin];
platforms = lib.platforms.unix;
homepage = "https://cs.uwaterloo.ca/~astorjoh/iml.html";
updateWalker = true;
};
}

View file

@ -20,6 +20,5 @@ stdenv.mkDerivation rec {
platforms = lib.platforms.unix;
homepage = "https://wiki.documentfoundation.org/DLP/Libraries/libzmf";
downloadPage = "http://dev-www.libreoffice.org/src/libzmf/";
updateWalker = true;
};
}

View file

@ -22,8 +22,6 @@ stdenv.mkDerivation rec {
description = "A lightweight C library for the Jabber protocol";
platforms = platforms.all;
downloadPage = "http://mcabber.com/files/loudmouth/";
downloadURLRegexp = "loudmouth-[0-9.]+[.]tar[.]bz2$";
updateWalker = true;
license = licenses.lgpl21;
};
}

View file

@ -1,10 +0,0 @@
url https://gitlab.com/mdds/mdds
version_link 'mdds-.*[.]tar[.][a-z0-9]+$'
version '.*-([0-9.]+)[.]tar[.].*' '\1'
do_overwrite(){
ensure_hash
ensure_version
set_var_value version $CURRENT_VERSION
set_var_value sha256 $CURRENT_HASH
}

View file

@ -21,6 +21,5 @@ stdenv.mkDerivation rec {
platforms = lib.platforms.unix;
downloadPage = "https://mpir.org/downloads.html";
homepage = "https://mpir.org/";
updateWalker = true;
};
}

View file

@ -1,3 +0,0 @@
url http://ftp.u-tx.net/gnu/osip/
attribute_name libosip
minimize_overwrite

View file

@ -31,6 +31,5 @@ stdenv.mkDerivation rec {
platforms = platforms.all;
homepage = "http://www.phash.org";
downloadPage = "https://github.com/clearscene/pHash";
updateWalker = true;
};
}

View file

@ -8,20 +8,11 @@ stdenv.mkDerivation rec {
sha256 = "1y0gikds2nr8jk8smhrl617njk23ymmpxyjb2j1xbj0k82xspv78";
};
passthru = {
updateScript = ''
#!${runtimeShell}
cd ${toString ./.}
${toString path}/pkgs/build-support/upstream-updater/update-walker.sh default.nix
'';
};
meta = {
meta = with lib; {
description = "File open routines to safely open a file when in the presence of an attack";
license = lib.licenses.asl20 ;
maintainers = [lib.maintainers.raskin];
platforms = lib.platforms.all;
license = licenses.asl20;
maintainers = with maintainers; [ raskin ];
platforms = platforms.all;
homepage = "https://research.cs.wisc.edu/mist/safefile/";
updateWalker = true;
};
}

View file

@ -29,6 +29,5 @@ stdenv.mkDerivation rec {
maintainers = with lib.maintainers; [raskin];
license = lib.licenses.gpl2;
downloadPage = "http://www.soft-switch.org/downloads/spandsp/";
updateWalker = true;
};
}

View file

@ -73,7 +73,6 @@ stdenv.mkDerivation rec {
description = "XML Security Library in C based on libxml2";
license = lib.licenses.mit;
platforms = with lib.platforms; linux ++ darwin;
updateWalker = true;
};
}
)

View file

@ -72,6 +72,5 @@ stdenv.mkDerivation rec {
maintainers = with maintainers; [ raskin globin ];
platforms = platforms.unix;
downloadPage = "https://stedolan.github.io/jq/download/";
updateWalker = true;
};
}

View file

@ -77,6 +77,5 @@ stdenv.mkDerivation rec {
maintainers = with maintainers; [raskin teto];
platforms = platforms.linux ++ platforms.darwin;
downloadPage = "http://luarocks.org/releases/";
updateWalker = true;
};
}

View file

@ -77,6 +77,5 @@ stdenv.mkDerivation rec {
maintainers = with maintainers; [raskin teto];
platforms = platforms.linux ++ platforms.darwin;
downloadPage = "http://luarocks.org/releases/";
updateWalker = true;
};
}

View file

@ -1,8 +0,0 @@
url https://sourceforge.net/projects/blobby/files/Blobby%20Volley%202%20%28Linux%29/
SF_version_dir
version_link '[.]tar[.][^.]+/download$'
SF_redirect
do_overwrite(){
do_overwrite_just_version
set_var_value url $CURRENT_URL
}

View file

@ -1,5 +0,0 @@
minimize_overwrite
url http://www.chiark.greenend.org.uk/~sgtatham/puzzles/puzzles.tar.gz
redirect
NEED_TO_CHOOSE_URL=
version '.*[-_.]([0-9]{8}[.][^.]+)[.]tar[.].*' '\1'

View file

@ -62,6 +62,5 @@ stdenv.mkDerivation rec {
maintainers = with lib.maintainers; [ raskin ];
platforms = lib.platforms.unix; # Once had cygwin problems
downloadPage = "https://www.jwz.org/xscreensaver/download.html";
updateWalker = true;
};
}

View file

@ -1,4 +0,0 @@
url http://atoptool.nl/downloadatop.php
version_link '[.]tar[.]'
version '[^0-9]*[-.]([0-9]([-0-9.]*[0-9])?)[.].*' '\1'
minimize_overwrite

View file

@ -49,6 +49,5 @@ stdenv.mkDerivation rec {
maintainers = with maintainers; [ fpletz ];
license = licenses.gpl2;
downloadPage = "https://www.netfilter.org/projects/iptables/files/";
updateWalker = true;
};
}

View file

@ -1,8 +0,0 @@
url https://github.com/draios/sysdig/releases
ensure_choice
version '.*/([0-9.]+)[.]tar[.].*' '\1'
do_overwrite () {
ensure_hash
set_var_value sha256 $CURRENT_HASH
set_var_value version $CURRENT_VERSION
}

View file

@ -1,8 +0,0 @@
url http://nginx.org/en/download.html
version_link '.*-([0-9]+[.][0-9]+([.][0-9]+)*)[.]tar[.][a-z0-9]*$'
do_overwrite() {
ensure_hash
set_var_value version "$CURRENT_VERSION"
set_var_value sha256 "$CURRENT_HASH"
}

View file

@ -1,8 +0,0 @@
url http://nginx.org/en/download.html
version_link '.*-([0-9]+[.][0-9]*[02468]([.][0-9]+)*)[.]tar[.][a-z0-9]*$'
do_overwrite() {
ensure_hash
set_var_value version "$CURRENT_VERSION"
set_var_value sha256 "$CURRENT_HASH"
}

View file

@ -236,7 +236,6 @@ let
name = str;
version = str;
tag = str;
updateWalker = bool;
executables = listOf str;
outputsToInstall = listOf str;
position = str;
@ -244,7 +243,6 @@ let
repositories = attrsOf str;
isBuildPythonPackage = platforms;
schedulingPriority = int;
downloadURLRegexp = str;
isFcitxEngine = bool;
isIbusEngine = bool;
isGutenprint = bool;

View file

@ -20,6 +20,5 @@ stdenv.mkDerivation rec {
description = "Tracks changes in the server's cutbuffer and CLIPBOARD selection";
license = lib.licenses.gpl2Plus;
platforms = with lib.platforms; all;
updateWalker = true;
};
}

View file

@ -135,7 +135,6 @@ in buildPythonApplication rec {
meta = {
homepage = "https://xpra.org/";
downloadPage = "https://xpra.org/src/";
downloadURLRegexp = "xpra-.*[.]tar[.][gx]z$";
description = "Persistent remote applications for X";
platforms = platforms.linux;
license = licenses.gpl2;

View file

@ -1,8 +0,0 @@
url http://ck.kolivas.org/apps/lrzip/
version_link '[.]tar[.]bz2$'
do_overwrite () {
ensure_hash
set_var_value version "$CURRENT_VERSION"
set_var_value sha256 "$CURRENT_HASH"
}

View file

@ -47,6 +47,5 @@ stdenv.mkDerivation rec {
platforms = platforms.linux;
license = with licenses; [ gpl2 lgpl21 ];
downloadPage = "http://nilfs.sourceforge.net/en/download.html";
updateWalker = true;
};
}

View file

@ -17,7 +17,6 @@ stdenv.mkDerivation rec {
platforms = platforms.linux;
license = licenses.gpl2;
downloadPage = "https://sourceforge.net/projects/smbnetfs/files/smbnetfs";
updateWalker = true;
homepage = "https://sourceforge.net/projects/smbnetfs/";
};
}

View file

@ -1,6 +0,0 @@
url https://sourceforge.net/projects/smbnetfs/files/smbnetfs/
version_link '[-][0-9.]+[a-z]*/$'
version_link '[.]tar[.][a-z0-9]+/download$'
SF_redirect
version '.*[-]([0-9.]+[a-z]*)[.]tar[.].*' '\1'
do_overwrite () { do_overwrite_just_version; }

View file

@ -23,7 +23,6 @@ stdenv.mkDerivation rec {
maintainers = with maintainers; [ raskin ];
platforms = platforms.linux; # Maybe other non-darwin Unix
downloadPage = "https://ftp.gnu.org/gnu/barcode/";
updateWalker = true;
homepage = "https://www.gnu.org/software/barcode/";
license = licenses.gpl3;
};

View file

@ -1,8 +0,0 @@
url https://launchpad.net/cuneiform-linux/+download
do_overwrite () {
ensure_hash
set_var_value version "$CURRENT_VERSION"
set_var_value sha256 "$CURRENT_HASH"
set_var_value ' url' "$CURRENT_URL"
}

View file

@ -1,4 +0,0 @@
url http://ised.sourceforge.net/web_nav.html
SF_version_tarball
SF_redirect
minimize_overwrite

View file

@ -94,6 +94,5 @@ stdenv.mkDerivation rec {
maintainers = with maintainers; [ sander ];
platforms = with platforms; linux ++ darwin;
repositories.git = "https://github.com/MidnightCommander/mc.git";
updateWalker = true;
};
}

View file

@ -28,7 +28,6 @@ python3.pkgs.buildPythonApplication rec {
meta = with lib; {
description = "A program for retrieving mail";
homepage = "https://getmail6.org";
updateWalker = true;
license = licenses.gpl2Plus;
maintainers = with maintainers; [ abbe dotlambda ];
};

View file

@ -36,6 +36,5 @@ stdenv.mkDerivation rec {
platforms = platforms.linux;
homepage = "http://www.garloff.de/kurt/linux/ddrescue/";
license = licenses.gpl2Plus;
updateWalker = true;
};
}

View file

@ -15,7 +15,6 @@ stdenv.mkDerivation rec {
description = "Disk information utility; displays everything 'df' does and more";
homepage = "https://gentoo.com/di/";
license = licenses.zlib;
updateWalker = true;
maintainers = with maintainers; [ manveru ];
platforms = platforms.all;
};

View file

@ -37,7 +37,5 @@ stdenv.mkDerivation rec {
maintainers = with lib.maintainers; [ raskin ];
platforms = lib.platforms.gnu ++ lib.platforms.linux; # arbitrary choice
updateWalker = true;
};
}

View file

@ -1,4 +0,0 @@
url https://sourceforge.net/projects/ipmiutil/files/
SF_version_tarball
SF_redirect
minimize_overwrite