3
0
Fork 0
forked from mirrors/nixpkgs

Merge master into staging-next

This commit is contained in:
github-actions[bot] 2023-12-07 06:01:00 +00:00 committed by GitHub
commit 32d842483f
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
34 changed files with 1634 additions and 1006 deletions

View file

@ -69,8 +69,8 @@ with lib;
confDir = mkOption {
type = types.path;
description = lib.mdDoc "Spark configuration directory. Spark will use the configuration files (spark-defaults.conf, spark-env.sh, log4j.properties, etc) from this directory.";
default = "${cfg.package}/lib/${cfg.package.untarDir}/conf";
defaultText = literalExpression ''"''${package}/lib/''${package.untarDir}/conf"'';
default = "${cfg.package}/conf";
defaultText = literalExpression ''"''${package}/conf"'';
};
logDir = mkOption {
type = types.path;
@ -111,9 +111,9 @@ with lib;
Type = "forking";
User = "spark";
Group = "spark";
WorkingDirectory = "${cfg.package}/lib/${cfg.package.untarDir}";
ExecStart = "${cfg.package}/lib/${cfg.package.untarDir}/sbin/start-master.sh";
ExecStop = "${cfg.package}/lib/${cfg.package.untarDir}/sbin/stop-master.sh";
WorkingDirectory = "${cfg.package}/";
ExecStart = "${cfg.package}/sbin/start-master.sh";
ExecStop = "${cfg.package}/sbin/stop-master.sh";
TimeoutSec = 300;
StartLimitBurst=10;
Restart = "always";
@ -134,9 +134,9 @@ with lib;
serviceConfig = {
Type = "forking";
User = "spark";
WorkingDirectory = "${cfg.package}/lib/${cfg.package.untarDir}";
ExecStart = "${cfg.package}/lib/${cfg.package.untarDir}/sbin/start-worker.sh spark://${cfg.worker.master}";
ExecStop = "${cfg.package}/lib/${cfg.package.untarDir}/sbin/stop-worker.sh";
WorkingDirectory = "${cfg.package}/";
ExecStart = "${cfg.package}/sbin/start-worker.sh spark://${cfg.worker.master}";
ExecStop = "${cfg.package}/sbin/stop-worker.sh";
TimeoutSec = 300;
StartLimitBurst=10;
Restart = "always";

View file

@ -1,28 +1,48 @@
import ../make-test-python.nix ({...}: {
name = "spark";
{ pkgs, ... }:
nodes = {
worker = { nodes, pkgs, ... }: {
services.spark.worker = {
enable = true;
master = "master:7077";
};
virtualisation.memorySize = 2048;
};
master = { config, pkgs, ... }: {
services.spark.master = {
enable = true;
bind = "0.0.0.0";
};
networking.firewall.allowedTCPPorts = [ 22 7077 8080 ];
};
let
inherit (pkgs) lib;
tests = {
default = testsForPackage { sparkPackage = pkgs.spark; };
};
testScript = ''
master.wait_for_unit("spark-master.service")
worker.wait_for_unit("spark-worker.service")
worker.copy_from_host( "${./spark_sample.py}", "/spark_sample.py" )
assert "<title>Spark Master at spark://" in worker.succeed("curl -sSfkL http://master:8080/")
worker.succeed("spark-submit --master spark://master:7077 --executor-memory 512m --executor-cores 1 /spark_sample.py")
'';
})
testsForPackage = args: lib.recurseIntoAttrs {
sparkCluster = testSparkCluster args;
passthru.override = args': testsForPackage (args // args');
};
testSparkCluster = { sparkPackage, ... }: pkgs.nixosTest ({
name = "spark";
nodes = {
worker = { nodes, pkgs, ... }: {
services.spark = {
package = sparkPackage;
worker = {
enable = true;
master = "master:7077";
};
};
virtualisation.memorySize = 2048;
};
master = { config, pkgs, ... }: {
services.spark = {
package = sparkPackage;
master = {
enable = true;
bind = "0.0.0.0";
};
};
networking.firewall.allowedTCPPorts = [ 22 7077 8080 ];
};
};
testScript = ''
master.wait_for_unit("spark-master.service")
worker.wait_for_unit("spark-worker.service")
worker.copy_from_host( "${./spark_sample.py}", "/spark_sample.py" )
assert "<title>Spark Master at spark://" in worker.succeed("curl -sSfkL http://master:8080/")
worker.succeed("spark-submit --version | systemd-cat")
worker.succeed("spark-submit --master spark://master:7077 --executor-memory 512m --executor-cores 1 /spark_sample.py")
'';
});
in tests

View file

@ -16,7 +16,7 @@ python3.pkgs.buildPythonPackage rec {
postPatch = ''
substituteInPlace setup.py \
--replace "'rpi-ws281x>=4.3.0; platform_system == \"Linux\"'," "" \
--replace '"sentry-sdk==1.14.0",' "sentry-sdk" \
--replace "sentry-sdk==1.14.0" "sentry-sdk" \
--replace "~=" ">="
'';

View file

@ -21,11 +21,11 @@
let
pname = "sparrow";
version = "1.7.9";
version = "1.8.1";
src = fetchurl {
url = "https://github.com/sparrowwallet/${pname}/releases/download/${version}/${pname}-${version}-x86_64.tar.gz";
sha256 = "0bz8mx6mszqadx7nlb4ini45r2r57grdgmrq6k9lxgrgcpd8gasy";
sha256 = "sha256-dpYGMclYMjxjUbIcSZ7V54I1LTVfHxAKH9+7CaprD4U=";
};
launcher = writeScript "sparrow" ''

View file

@ -2123,19 +2123,19 @@ let
kddejong.vscode-cfn-lint =
let
inherit (python3Packages) cfn-lint;
inherit (python3Packages) cfn-lint pydot;
in
buildVscodeMarketplaceExtension {
mktplcRef = {
name = "vscode-cfn-lint";
publisher = "kddejong";
version = "0.21.0";
version = "0.25.1";
sha256 = "sha256-IueXiN+077tiecAsVCzgYksWYTs00mZv6XJVMtRJ/PQ=";
};
nativeBuildInputs = [ jq moreutils ];
buildInputs = [ cfn-lint ];
buildInputs = [ cfn-lint pydot ];
postInstall = ''
cd "$out/$installPrefix"

View file

@ -28,6 +28,9 @@ vscode-utils.buildVscodeMarketplaceExtension {
EOF
}
jq "$(print_jq_query)" ./package.json | sponge ./package.json
# Add a link from temp to /tmp so that the extension gets a writable
# directory to write to.
ln -s /tmp temp
'';
meta = {

View file

@ -2,11 +2,11 @@
let
pname = "mobilecoin-wallet";
version = "1.5.0";
version = "1.8.0";
name = "${pname}-${version}";
src = fetchurl {
url = "https://github.com/mobilecoinofficial/desktop-wallet/releases/download/v${version}/MobileCoin-Wallet-${version}.AppImage";
sha256 = "sha256-zSTtnKvgcDSiicEDuVK2LN2d8WHiGReYI3XLBmm3Fbo=";
url = "https://github.com/mobilecoinofficial/desktop-wallet/releases/download/v${version}/MobileCoin.Wallet-${version}.AppImage";
hash = "sha256-XGU/xxsMhOBAh+MeMtL2S707yH8HnoO9w5l7zqjO6rs=";
};
appimageContents = appimageTools.extractType2 { inherit name src; };
@ -34,6 +34,7 @@ in appimageTools.wrapType2 {
homepage = "https://github.com/mobilecoinofficial/desktop-wallet";
license = licenses.gpl3Only;
maintainers = with maintainers; [ wolfangaukang ];
mainProgram = "mobilecoin-wallet";
platforms = [ "x86_64-linux" ];
};
}

View file

@ -31,6 +31,9 @@ stdenv.mkDerivation rec {
mv usr $out
mv opt $out
# binary is not used and probably vulnerable to CVE(s)
rm $out/opt/TheDesk/thedesk
substituteInPlace $out/share/applications/thedesk.desktop \
--replace '/opt/TheDesk' $out/bin

View file

@ -3,63 +3,55 @@
, fetchzip
, makeWrapper
, jdk8
, python3Packages
, extraPythonPackages ? [ ]
, python3
, python310
, coreutils
, hadoopSupport ? true
, hadoop
, RSupport ? true
, R
, nixosTests
}:
let
spark = { pname, version, hash, extraMeta ? {} }:
stdenv.mkDerivation rec {
inherit pname version;
jdk = if hadoopSupport then hadoop.jdk else jdk8;
spark = { pname, version, hash, extraMeta ? {}, pysparkPython ? python3 }:
stdenv.mkDerivation (finalAttrs: {
inherit pname version hash hadoop R pysparkPython;
inherit (finalAttrs.hadoop) jdk;
src = fetchzip {
url = "mirror://apache/spark/${pname}-${version}/${pname}-${version}-bin-without-hadoop.tgz";
inherit hash;
url = with finalAttrs; "mirror://apache/spark/${pname}-${version}/${pname}-${version}-bin-without-hadoop.tgz";
inherit (finalAttrs) hash;
};
nativeBuildInputs = [ makeWrapper ];
buildInputs = [ jdk python3Packages.python ]
++ extraPythonPackages
++ lib.optional RSupport R;
buildInputs = with finalAttrs; [ jdk pysparkPython ]
++ lib.optional RSupport finalAttrs.R;
untarDir = "${pname}-${version}";
installPhase = ''
mkdir -p $out/{lib/${untarDir}/conf,bin,/share/java}
mv * $out/lib/${untarDir}
cp $out/lib/${untarDir}/conf/log4j.properties{.template,} || \
cp $out/lib/${untarDir}/conf/log4j2.properties{.template,}
cat > $out/lib/${untarDir}/conf/spark-env.sh <<- EOF
export JAVA_HOME="${jdk}"
export SPARK_HOME="$out/lib/${untarDir}"
'' + lib.optionalString hadoopSupport ''
export SPARK_DIST_CLASSPATH=$(${hadoop}/bin/hadoop classpath)
'' + ''
export PYSPARK_PYTHON="${python3Packages.python}/bin/${python3Packages.python.executable}"
export PYTHONPATH="\$PYTHONPATH:$PYTHONPATH"
${lib.optionalString RSupport ''
export SPARKR_R_SHELL="${R}/bin/R"
export PATH="\$PATH:${R}/bin"''}
EOF
for n in $(find $out/lib/${untarDir}/bin -type f ! -name "*.*"); do
makeWrapper "$n" "$out/bin/$(basename $n)"
substituteInPlace "$n" --replace dirname ${coreutils.out}/bin/dirname
mkdir -p "$out/opt"
mv * $out/
for n in $(find $out/bin -type f -executable ! -name "find-spark-home"); do
wrapProgram "$n" --set JAVA_HOME "${finalAttrs.jdk}" \
--run "[ -z $SPARK_DIST_CLASSPATH ] && export SPARK_DIST_CLASSPATH=$(${finalAttrs.hadoop}/bin/hadoop classpath)" \
${lib.optionalString RSupport ''--set SPARKR_R_SHELL "${finalAttrs.R}/bin/R"''} \
--prefix PATH : "${
lib.makeBinPath (
[ finalAttrs.pysparkPython ] ++
(lib.optionals RSupport [ finalAttrs.R ])
)}"
done
for n in $(find $out/lib/${untarDir}/sbin -type f); do
# Spark deprecated scripts with "slave" in the name.
# This line adds forward compatibility with the nixos spark module for
# older versions of spark that don't have the new "worker" scripts.
ln -s "$n" $(echo "$n" | sed -r 's/slave(s?).sh$/worker\1.sh/g') || true
done
ln -s $out/lib/${untarDir}/lib/spark-assembly-*.jar $out/share/java
ln -s ${finalAttrs.hadoop} "$out/opt/hadoop"
${lib.optionalString RSupport ''ln -s ${finalAttrs.R} "$out/opt/R"''}
'';
passthru = {
tests = nixosTests.spark.default.passthru.override {
sparkPackage = finalAttrs.finalPackage;
};
# Add python packages to PYSPARK_PYTHON
withPythonPackages = f: finalAttrs.finalPackage.overrideAttrs (old: {
pysparkPython = old.pysparkPython.withPackages f;
});
};
meta = {
description = "Apache Spark is a fast and general engine for large-scale data processing";
homepage = "https://spark.apache.org/";
@ -68,24 +60,23 @@ let
platforms = lib.platforms.all;
maintainers = with lib.maintainers; [ thoughtpolice offline kamilchm illustris ];
} // extraMeta;
};
});
in
{
spark_3_5 = spark rec {
pname = "spark";
version = "3.5.0";
hash = "sha256-f+a4a23aOM0GCDoZlZ7WNXs0Olzyh3yMtO8ZmEoYvZ4=";
};
spark_3_4 = spark rec {
pname = "spark";
version = "3.4.0";
hash = "sha256-0y80dRYzb6Ceu6MlGQHtpMdzOob/TBg6kf8dtF6KyCk=";
version = "3.4.2";
hash = "sha256-qr0tRuzzEcarJznrQYkaQzGqI7tugp/XJpoZxL7tJwk=";
};
spark_3_3 = spark rec {
pname = "spark";
version = "3.3.2";
hash = "sha256-AeKe2QN+mhUJgZRSIgbi/DttAWlDgwC1kl9p7syEvbo=";
extraMeta.knownVulnerabilities = [ "CVE-2023-22946" ];
};
spark_3_2 = spark rec {
pname = "spark";
version = "3.2.4";
hash = "sha256-xL4W+dTWbvmmncq3/8iXmhp24rp5SftvoRfkTyxCI8E=";
extraMeta.knownVulnerabilities = [ "CVE-2023-22946" ];
version = "3.3.3";
hash = "sha256-YtHxRYTwrwSle3UpFjRSwKcnLFj2m9/zLBENH/HVzuM=";
pysparkPython = python310;
};
}

View file

@ -10,7 +10,7 @@ let
src = fetchurl {
url = "https://s3.amazonaws.com/timeular-desktop-packages/linux/production/Timeular-${version}.AppImage";
sha256 = "sha256-RmWRNKy2w3BM/ipQyFpkNC3+XXsJXjN6VYWNo8OKpy0=";
hash = "sha256-kacJSlctE1bNAByH26Qpu609ZNbdkYTx6OUEgCmefqg=";
};
appimageContents = appimageTools.extractType2 {

View file

@ -173,6 +173,7 @@ rec {
libpulseaudio
libsamplerate
libmikmod
libthai
libtheora
libtiff
pixman

View file

@ -165,7 +165,7 @@ stdenvNoCC.mkDerivation (args // {
components = lib.pipe args.version [
lib.splitVersion
(lib.filter (x: (lib.strings.match "[0-9]+" x) != null))
(lib.filter (x: (lib.toInt x) < 65535)) # one version component in dotnet has to fit in 16 bits
(lib.filter (x: (lib.toIntBase10 x) < 65535)) # one version component in dotnet has to fit in 16 bits
];
in if (lib.length components) == 0
then null

View file

@ -0,0 +1,28 @@
{ lib
, buildGoModule
, fetchFromGitHub
}:
buildGoModule rec {
pname = "drone-scp";
version = "1.6.11";
src = fetchFromGitHub {
owner = "appleboy";
repo = "drone-scp";
rev = "v${version}";
hash = "sha256-JCqiYPhuPKDcbg8eo4DFuUVazu+0e0YTnG87NZRARMU=";
};
vendorHash = "sha256-zPpwvU/shSK1bfm0Qc2VjifSzDTpFnsUiogQfQcdY7I=";
# Needs a specific user...
doCheck = false;
meta = with lib; {
description = "Copy files and artifacts via SSH using a binary, docker or Drone CI";
homepage = "https://github.com/appleboy/drone-scp";
license = licenses.mit;
maintainers = with maintainers; [ ambroisie ];
mainProgram = "drone-scp";
};
}

View file

@ -16,10 +16,6 @@ appimageTools.wrapType2 rec {
mv $out/bin/{${name},${pname}}
'';
extraPkgs = pkgs: with pkgs; [
libthai
];
meta = with lib; {
description = "A VR coworking platform";
homepage = "https://immersed.com";

View file

@ -2,12 +2,12 @@
stdenvNoCC.mkDerivation rec {
pname = "cldr-annotations";
version = "43.0";
version = "44.0";
src = fetchzip {
url = "https://unicode.org/Public/cldr/${lib.versions.major version}/cldr-common-${version}.zip";
stripRoot = false;
hash = "sha256-L8ikzRpSw4mDCV79TiUqhPHWC0PmGi4i4He0OAB54R0=";
hash = "sha256-oK+NlzuRF45laEMJKhNDzr12RF4CHIfDFNBFsIjJh1I=";
};
installPhase = ''

View file

@ -21,6 +21,7 @@ let
#### Services
biometryd = callPackage ./services/biometryd { };
hfd-service = callPackage ./services/hfd-service { };
lomiri-app-launch = callPackage ./development/lomiri-app-launch { };
};
in
lib.makeScope libsForQt5.newScope packages

View file

@ -0,0 +1,129 @@
{ stdenv
, lib
, fetchFromGitLab
, gitUpdater
, testers
, cmake
, cmake-extras
, curl
, dbus
, dbus-test-runner
, dpkg
, gobject-introspection
, gtest
, json-glib
, libxkbcommon
, lomiri-api
, lttng-ust
, pkg-config
, properties-cpp
, python3
, systemd
, ubports-click
, zeitgeist
, withDocumentation ? true
, doxygen
, python3Packages
, sphinx
}:
stdenv.mkDerivation (finalAttrs: {
pname = "lomiri-app-launch";
version = "0.1.8";
outputs = [
"out"
"dev"
] ++ lib.optionals withDocumentation [
"doc"
];
src = fetchFromGitLab {
owner = "ubports";
repo = "development/core/lomiri-app-launch";
rev = finalAttrs.version;
hash = "sha256-NIBZk5H0bPwAwkI0Qiq2S9dZvchAFPBCHKi2inUVZmI=";
};
postPatch = ''
patchShebangs tests/{desktop-hook-test.sh.in,repeat-until-pass.sh}
# used pkg_get_variable, cannot replace prefix
substituteInPlace data/CMakeLists.txt \
--replace 'DESTINATION "''${SYSTEMD_USER_UNIT_DIR}"' 'DESTINATION "${placeholder "out"}/lib/systemd/user"'
substituteInPlace tests/jobs-systemd.cpp \
--replace '^(/usr)?' '^(/nix/store/\\w+-bash-.+)?'
'';
strictDeps = true;
nativeBuildInputs = [
cmake
dpkg # for setting LOMIRI_APP_LAUNCH_ARCH
gobject-introspection
pkg-config
] ++ lib.optionals withDocumentation [
doxygen
python3Packages.breathe
sphinx
];
buildInputs = [
cmake-extras
curl
dbus
json-glib
libxkbcommon
lomiri-api
lttng-ust
properties-cpp
systemd
ubports-click
zeitgeist
];
nativeCheckInputs = [
dbus
(python3.withPackages (ps: with ps; [
python-dbusmock
]))
];
checkInputs = [
dbus-test-runner
gtest
];
cmakeFlags = [
"-DENABLE_MIRCLIENT=OFF"
"-DENABLE_TESTS=${lib.boolToString finalAttrs.doCheck}"
];
postBuild = lib.optionalString withDocumentation ''
make -C ../docs html
'';
doCheck = stdenv.buildPlatform.canExecute stdenv.hostPlatform;
postInstall = lib.optionalString withDocumentation ''
mkdir -p $doc/share/doc/lomiri-app-launch
mv ../docs/_build/html $doc/share/doc/lomiri-app-launch/
'';
passthru = {
tests.pkg-config = testers.testMetaPkgConfig finalAttrs.finalPackage;
updateScript = gitUpdater { };
};
meta = with lib; {
description = "System and associated utilities to launch applications in a standard and confined way";
homepage = "https://gitlab.com/ubports/development/core/lomiri-app-launch";
license = licenses.gpl3Only;
maintainers = teams.lomiri.members;
platforms = platforms.linux;
pkgConfigModules = [
"lomiri-app-launch-0"
];
};
})

View file

@ -1,28 +0,0 @@
{ lib
, buildPythonPackage
, fetchFromGitHub
, isPyPy
, livestreamer
}:
buildPythonPackage rec {
pname = "livestreamer-curses";
version = "1.5.2";
disabled = isPyPy;
src = fetchFromGitHub {
owner = "gapato";
repo = "livestreamer-curses";
rev = "v${version}";
hash = "sha256-Pi0PIOUhMMAWft9ackB04IgF6DyPrXppNqyVjozIjN4=";
};
propagatedBuildInputs = [ livestreamer ];
meta = with lib; {
homepage = "https://github.com/gapato/livestreamer-curses";
description = "Curses frontend for livestreamer";
license = licenses.mit;
maintainers = with maintainers; [ ];
};
}

View file

@ -1,38 +0,0 @@
{ lib
, buildPythonPackage
, fetchFromGitHub
, isPyPy
, makeWrapper
, rtmpdump
, pycrypto
, requests
}:
buildPythonPackage rec {
pname = "livestreamer";
version = "1.12.2";
disabled = isPyPy;
src = fetchFromGitHub {
owner = "chrippa";
repo = "livestreamer";
rev = "v${version}";
hash = "sha256-PqqyBh+oMmu7Ynly3fqx/+6mQYX+6SpI0Okj2O+YLz0=";
};
nativeBuildInputs = [ makeWrapper ];
propagatedBuildInputs = [ rtmpdump pycrypto requests ];
postInstall = ''
wrapProgram $out/bin/livestreamer --prefix PATH : ${lib.makeBinPath [ rtmpdump ]}
'';
meta = with lib; {
homepage = "http://livestreamer.tanuki.se";
description = "Livestreamer is CLI program that extracts streams from various services and pipes them into a video player of choice";
license = licenses.bsd2;
maintainers = with maintainers; [ ];
};
}

View file

@ -1,22 +1,39 @@
{ lib
, buildPythonPackage
, fetchPypi
, pytestCheckHook
, pythonOlder
, setuptools
}:
buildPythonPackage rec {
pname = "python-osc";
version = "1.8.3";
pyproject = true;
disabled = pythonOlder "3.7";
src = fetchPypi {
inherit pname version;
hash = "sha256-pc4bpWyNgt9Ryz8pRrXdM6cFInkazEuFZOYtKyCtnKo=";
};
pythonImportsCheck = [ "pythonosc" ];
nativeBuildInputs = [
setuptools
];
nativeCheckInputs = [
pytestCheckHook
];
pythonImportsCheck = [
"pythonosc"
];
meta = with lib; {
description = "Open Sound Control server and client in pure python";
homepage = "https://github.com/attwad/python-osc";
changelog = "https://github.com/attwad/python-osc/blob/v${version}/CHANGELOG.md";
license = licenses.unlicense;
maintainers = with maintainers; [ anirrudh ];
};

View file

@ -5,14 +5,14 @@
python3.pkgs.buildPythonApplication rec {
pname = "checkov";
version = "3.1.25";
version = "3.1.26";
pyproject = true;
src = fetchFromGitHub {
owner = "bridgecrewio";
repo = "checkov";
rev = "refs/tags/${version}";
hash = "sha256-DNMS22IatzdaqHDDNVnhdAGvXGcgnU6lnaLrhESh2x4=";
hash = "sha256-EUfrIrn8vRAKN0RShiWDYd5L9/7cgmNJZT9lM/ee5/s=";
};
patches = [

View file

@ -8,13 +8,13 @@
python3.pkgs.buildPythonApplication rec {
pname = "memray";
version = "1.10.0";
version = "1.11.0";
format = "setuptools";
src = fetchFromGitHub {
owner = "bloomberg";
repo = pname;
rev = "refs/tags/${version}";
repo = "memray";
rev = "refs/tags/v${version}";
hash = "sha256-DaJ1Hhg7q4ckA5feUx0twOsmy28v5aBBCTUAkn43xAo=";
};
@ -63,9 +63,9 @@ python3.pkgs.buildPythonApplication rec {
meta = with lib; {
description = "Memory profiler for Python";
homepage = "https://bloomberg.github.io/memray/";
changelog = "https://github.com/bloomberg/memray/releases/tag/v${version}";
license = licenses.asl20;
maintainers = with maintainers; [ fab ];
platforms = platforms.linux;
changelog = "https://github.com/bloomberg/memray/releases/tag/${version}";
};
}

File diff suppressed because it is too large Load diff

View file

@ -1,6 +1,6 @@
{
"version": "2.0.1",
"integrity": "sha512-Xvlzso5vJAYfbO/N/6CCmcEnpHAv/PF4D6RqAvr8BFoPjCmYFwKDjOHEHjaPtrJYY1gWEDN5gaukZfqcAxiDFg==",
"filename": "mongosh-2.0.1.tgz",
"deps": "sha256-wICy0PoMQ6ypiZL/4Yf2l9KNXC9LNNdzy8EmhwK3kws="
"version": "2.1.1",
"integrity": "sha512-2Gw9fJp2ebK2Gx7QT6sg7FCEpTfFlRib7hJkRhUO92/irGDTDtH6lYU45A4jRRMwgYVyjCfcfeWC11JQCSfsvg==",
"filename": "mongosh-2.1.1.tgz",
"deps": "sha256-F/ACpX5BsnPttrYRabWsTgUN6uABQ6RHdGc2h6qMbp8="
}

View file

@ -0,0 +1,22 @@
{ lib, buildGoModule, fetchFromGitHub }:
buildGoModule rec {
pname = "atlas-exporter";
version = "1.0.4";
src = fetchFromGitHub {
owner = "czerwonk";
repo = "atlas_exporter";
rev = version;
sha256 = "sha256-vhUhWO7fQpUHT5nyxbT8AylgUqDNZRSb+EGRNGZJ14E=";
};
vendorHash = "sha256-tR+OHxj/97AixuAp0Kx9xQsKPAxpvF6hDha5BgMBha0=";
meta = with lib; {
description = "Prometheus exporter for RIPE Atlas measurement results ";
homepage = "https://github.com/czerwonk/atlas_exporter";
license = licenses.lgpl3;
maintainers = with maintainers; [ clerie ];
};
}

View file

@ -7,19 +7,19 @@
buildGoModule rec {
pname = "trivy";
version = "0.47.0";
version = "0.48.0";
src = fetchFromGitHub {
owner = "aquasecurity";
repo = pname;
rev = "refs/tags/v${version}";
hash = "sha256-Ahy6wiBoOwS3MIrAIg2gIk2gkmphkCJUYAUtqVydA2Q=";
hash = "sha256-NINEitFZm1d0foG1P+evLiXXNVNwzK3PMCicksDaBFc=";
};
# Hash mismatch on across Linux and Darwin
proxyVendor = true;
vendorHash = "sha256-qG4z52oVa3sgu8QKX8UbHsk/aSfacgZShX298WUu2oU=";
vendorHash = "sha256-EYcOOQBwzXu87q0EfJr7TUypGJW3qtosP3ARLssPOS8=";
subPackages = [ "cmd/trivy" ];

View file

@ -5,13 +5,13 @@
python3.pkgs.buildPythonApplication rec {
pname = "cfripper";
version = "1.15.1";
version = "1.15.2";
src = fetchFromGitHub {
owner = "Skyscanner";
repo = pname;
rev = "refs/tags/${version}";
hash = "sha256-Gtg4zoMTOW/nrFm7PF4/96VGcMALg2msVZ3E7lGm5KA=";
hash = "sha256-SmD3Dq5LicPRe3lWFsq4zqM/yDZ1LsgRwSUA5/RbN9I=";
};
propagatedBuildInputs = with python3.pkgs; [

View file

@ -7,13 +7,13 @@
buildGoModule rec {
pname = "gotestwaf";
version = "0.4.8";
version = "0.4.9";
src = fetchFromGitHub {
owner = "wallarm";
repo = pname;
rev = "refs/tags/v${version}";
hash = "sha256-5cvyKCzisfuoMPuQpiRnIBCTw2yMdA1yavKNS9OwW58=";
hash = "sha256-fBmn+p5uVGnI4lyL12cX4S8eda79k2Y0RPQG9iZQs2Q=";
};
vendorHash = null;

View file

@ -24,5 +24,6 @@ rustPlatform.buildRustPackage {
homepage = "https://github.com/crev-dev/rblake2sum";
license = [ licenses.mit ];
maintainers = with maintainers; [ dpc ];
mainProgram = "rblake2sum";
};
}

View file

@ -0,0 +1,29 @@
{ lib
, stdenv
, rustPlatform
, fetchFromGitHub
, Security
}:
rustPlatform.buildRustPackage {
pname = "rblake3sum";
version = "0.4.0";
src = fetchFromGitHub {
owner = "rustshop";
repo = "rblake3sum";
rev = "6a8e2576ccc05214eacb75b75a9d4cfdf272161c";
hash = "sha256-UFk6SJVA58WXhH1CIuT48MEF19yPUe1HD+ekn4LDj8g=";
};
cargoHash = "sha256-SE/Zg/UEV/vhB/VDcn8Y70OUIoxbJBh6H2QgFMkWPc4=";
buildInputs = lib.optionals stdenv.isDarwin [ Security ];
meta = with lib; {
description = "A recursive blake3 digest (hash) of a file-system path";
homepage = "https://github.com/rustshop/rblake3sum";
license = [ licenses.mit ];
maintainers = with maintainers; [ dpc ];
mainProgram = "rblake3sum";
};
}

View file

@ -6,16 +6,17 @@
buildGoModule rec {
pname = "xe-guest-utilities";
version = "7.30.0";
version = "8.3.1";
src = fetchFromGitHub {
owner = "xenserver";
repo = "xe-guest-utilities";
rev = "v${version}";
hash = "sha256-gMb8QIUg8t0SiTtUzqeh4XM5hHgCXuf5KlV3OeoU0LI=";
hash = "sha256-d0WdezcT44ExeHSnoJ3Dn0u/IRlhWreOZPSVw6Q1h/w=";
};
vendorHash = "sha256-zhpDvo8iujE426/gxJY+Pqfv99vLNKHqyMQbbXIKodY=";
deleteVendor = true;
vendorHash = "sha256-X/BI+ZhoqCGCmJfccyEBVgZc70aRTp3rL5j+rBWG5fE=";
postPatch = ''
substituteInPlace mk/xen-vcpu-hotplug.rules \

View file

@ -6324,6 +6324,10 @@ with pkgs;
inherit (darwin.apple_sdk.frameworks) Security;
};
rblake3sum = callPackage ../tools/security/rblake3sum {
inherit (darwin.apple_sdk.frameworks) Security;
};
reg = callPackage ../tools/virtualization/reg { };
retool = callPackage ../applications/misc/retool { };
@ -18034,8 +18038,8 @@ with pkgs;
smiley-sans = callPackage ../data/fonts/smiley-sans { };
inherit (callPackages ../applications/networking/cluster/spark { })
spark_3_4 spark_3_3 spark_3_2;
spark3 = spark_3_4;
spark_3_5 spark_3_4 spark_3_3;
spark3 = spark_3_5;
spark = spark3;
sparkleshare = callPackage ../applications/version-management/sparkleshare { };
@ -27143,6 +27147,7 @@ with pkgs;
prometheus-alertmanager = callPackage ../servers/monitoring/prometheus/alertmanager.nix { };
prometheus-apcupsd-exporter = callPackage ../servers/monitoring/prometheus/apcupsd-exporter.nix { };
prometheus-artifactory-exporter = callPackage ../servers/monitoring/prometheus/artifactory-exporter.nix { };
prometheus-atlas-exporter = callPackage ../servers/monitoring/prometheus/atlas-exporter.nix { };
prometheus-aws-s3-exporter = callPackage ../servers/monitoring/prometheus/aws-s3-exporter.nix { };
prometheus-bind-exporter = callPackage ../servers/monitoring/prometheus/bind-exporter.nix { };
prometheus-bird-exporter = callPackage ../servers/monitoring/prometheus/bird-exporter.nix { };

View file

@ -225,6 +225,8 @@ mapAliases ({
lazy_imports = lazy-imports; # added 2023-10-13
lektor = throw "lektor has been promoted to a top-level attribute"; # added 2023-08-01
line_profiler = line-profiler; # added 2023-11-04
livestreamer = throw "'livestreamer' has been removed, as it unmaintained. A currently maintained fork is 'streamlink'."; # added 2023-11-14
livestreamer-curses = throw "'livestreamer-curses' has been removed as it, and livestreamer itself are unmaintained."; # added 2023-11-14
logilab_astng = throw "logilab-astng has not been released since 2013 and is unmaintained"; # added 2022-11-29
logilab_common = logilab-common; # added 2022-11-21
loo-py = loopy; # added 2022-05-03

View file

@ -6481,10 +6481,6 @@ self: super: with self; {
livereload = callPackage ../development/python-modules/livereload { };
livestreamer = callPackage ../development/python-modules/livestreamer { };
livestreamer-curses = callPackage ../development/python-modules/livestreamer-curses { };
lizard = callPackage ../development/python-modules/lizard { };
llfuse = callPackage ../development/python-modules/llfuse {