1
0
Fork 1
mirror of https://github.com/NixOS/nixpkgs.git synced 2024-11-18 19:51:17 +00:00

Merge remote-tracking branch 'origin/master' into staging-next

This commit is contained in:
Martin Weinelt 2022-04-16 00:52:15 +02:00
commit 2bd8fc9378
43 changed files with 3403 additions and 726 deletions

View file

@ -500,7 +500,7 @@ rec {
# based on:
# https://www.mail-archive.com/qemu-discuss@nongnu.org/msg05179.html
# https://gmplib.org/~tege/qemu.html#mips64-debian
mips64el-qemu-linux-gnuabi64 = (import ./examples).mips64el-linux-gnuabi64 // {
mips64el-qemu-linux-gnuabi64 = {
linux-kernel = {
name = "mips64el";
baseConfig = "64r2el_defconfig";

View file

@ -732,6 +732,131 @@
updated.
</para>
</listitem>
<listitem>
<para>
The Keycloak package (<literal>pkgs.keycloak</literal>) has
been switched from the Wildfly version, which will soon be
deprecated, to the Quarkus based version. The Keycloak service
(<literal>services.keycloak</literal>) has been updated to
accommodate the change and now differs from the previous
version in a few ways:
</para>
<itemizedlist>
<listitem>
<para>
<literal>services.keycloak.extraConfig</literal> has been
removed in favor of the new
<link xlink:href="https://github.com/NixOS/rfcs/blob/master/rfcs/0042-config-option.md">settings-style</link>
<link linkend="opt-services.keycloak.settings"><literal>services.keycloak.settings</literal></link>
option. The available options correspond directly to
parameters in <literal>conf/keycloak.conf</literal>. Some
of the most important parameters are documented as
suboptions, the rest can be found in the
<link xlink:href="https://www.keycloak.org/server/all-config">All
configuration section of the Keycloak Server Installation
and Configuration Guide</link>. While the new
configuration is much simpler and cleaner than the old
JBoss CLI one, this unfortunately mean that theres no
straightforward way to convert an old configuration to the
new format and some settings may not even be available
anymore.
</para>
</listitem>
<listitem>
<para>
<literal>services.keycloak.frontendUrl</literal> was
removed and the frontend URL is now configured through the
<literal>hostname</literal> family of settings in
<link linkend="opt-services.keycloak.settings"><literal>services.keycloak.settings</literal></link>
instead. See the
<link xlink:href="https://www.keycloak.org/server/hostname">Hostname
section of the Keycloak Server Installation and
Configuration Guide</link> for more details. Additionally,
<literal>/auth</literal> was removed from the default
context path and needs to be added back in
<link linkend="opt-services.keycloak.settings.http-relative-path"><literal>services.keycloak.settings.http-relative-path</literal></link>
if you want to keep compatibility with your current
clients.
</para>
</listitem>
<listitem>
<para>
<literal>services.keycloak.bindAddress</literal>,
<literal>services.keycloak.forceBackendUrlToFrontendUrl</literal>,
<literal>services.keycloak.httpPort</literal> and
<literal>services.keycloak.httpsPort</literal> have been
removed in favor of their equivalent options in
<link linkend="opt-services.keycloak.settings"><literal>services.keycloak.settings</literal></link>.
<literal>httpPort</literal> and
<literal>httpsPort</literal> have additionally had their
types changed from <literal>str</literal> to
<literal>port</literal>.
</para>
<para>
The new names are as follows:
</para>
<itemizedlist spacing="compact">
<listitem>
<para>
<literal>bindAddress</literal>:
<link linkend="opt-services.keycloak.settings.http-host"><literal>services.keycloak.settings.http-host</literal></link>
</para>
</listitem>
<listitem>
<para>
<literal>forceBackendUrlToFrontendUrl</literal>:
<link linkend="opt-services.keycloak.settings.hostname-strict-backchannel"><literal>services.keycloak.settings.hostname-strict-backchannel</literal></link>
</para>
</listitem>
<listitem>
<para>
<literal>httpPort</literal>:
<link linkend="opt-services.keycloak.settings.http-port"><literal>services.keycloak.settings.http-port</literal></link>
</para>
</listitem>
<listitem>
<para>
<literal>httpsPort</literal>:
<link linkend="opt-services.keycloak.settings.https-port"><literal>services.keycloak.settings.https-port</literal></link>
</para>
</listitem>
</itemizedlist>
</listitem>
</itemizedlist>
<para>
For example, when using a reverse proxy the migration could
look like this:
</para>
<para>
Before:
</para>
<programlisting language="bash">
services.keycloak = {
enable = true;
httpPort = &quot;8080&quot;;
frontendUrl = &quot;https://keycloak.example.com/auth&quot;;
database.passwordFile = &quot;/run/keys/db_password&quot;;
extraConfig = {
&quot;subsystem=undertow&quot;.&quot;server=default-server&quot;.&quot;http-listener=default&quot;.proxy-address-forwarding = true;
};
};
</programlisting>
<para>
After:
</para>
<programlisting language="bash">
services.keycloak = {
enable = true;
settings = {
http-port = 8080;
hostname = &quot;keycloak.example.com&quot;;
http-relative-path = &quot;/auth&quot;;
proxy = &quot;edge&quot;;
};
database.passwordFile = &quot;/run/keys/db_password&quot;;
};
</programlisting>
</listitem>
<listitem>
<para>
The MoinMoin wiki engine

View file

@ -290,6 +290,81 @@ In addition to numerous new and upgraded packages, this release has the followin
`media_store_path` was changed from `${dataDir}/media` to `${dataDir}/media_store` if `system.stateVersion` is at least `22.05`. Files will need to be manually moved to the new
location if the `stateVersion` is updated.
- The Keycloak package (`pkgs.keycloak`) has been switched from the
Wildfly version, which will soon be deprecated, to the Quarkus based
version. The Keycloak service (`services.keycloak`) has been updated
to accommodate the change and now differs from the previous version
in a few ways:
- `services.keycloak.extraConfig` has been removed in favor of the
new [settings-style](https://github.com/NixOS/rfcs/blob/master/rfcs/0042-config-option.md)
[`services.keycloak.settings`](#opt-services.keycloak.settings)
option. The available options correspond directly to parameters in
`conf/keycloak.conf`. Some of the most important parameters are
documented as suboptions, the rest can be found in the [All
configuration section of the Keycloak Server Installation and
Configuration
Guide](https://www.keycloak.org/server/all-config). While the new
configuration is much simpler and cleaner than the old JBoss CLI
one, this unfortunately mean that there's no straightforward way
to convert an old configuration to the new format and some
settings may not even be available anymore.
- `services.keycloak.frontendUrl` was removed and the frontend URL
is now configured through the `hostname` family of settings in
[`services.keycloak.settings`](#opt-services.keycloak.settings)
instead. See the [Hostname section of the Keycloak Server
Installation and Configuration
Guide](https://www.keycloak.org/server/hostname) for more
details. Additionally, `/auth` was removed from the default
context path and needs to be added back in
[`services.keycloak.settings.http-relative-path`](#opt-services.keycloak.settings.http-relative-path)
if you want to keep compatibility with your current clients.
- `services.keycloak.bindAddress`,
`services.keycloak.forceBackendUrlToFrontendUrl`,
`services.keycloak.httpPort` and `services.keycloak.httpsPort`
have been removed in favor of their equivalent options in
[`services.keycloak.settings`](#opt-services.keycloak.settings). `httpPort`
and `httpsPort` have additionally had their types changed from
`str` to `port`.
The new names are as follows:
- `bindAddress`: [`services.keycloak.settings.http-host`](#opt-services.keycloak.settings.http-host)
- `forceBackendUrlToFrontendUrl`: [`services.keycloak.settings.hostname-strict-backchannel`](#opt-services.keycloak.settings.hostname-strict-backchannel)
- `httpPort`: [`services.keycloak.settings.http-port`](#opt-services.keycloak.settings.http-port)
- `httpsPort`: [`services.keycloak.settings.https-port`](#opt-services.keycloak.settings.https-port)
For example, when using a reverse proxy the migration could look
like this:
Before:
```nix
services.keycloak = {
enable = true;
httpPort = "8080";
frontendUrl = "https://keycloak.example.com/auth";
database.passwordFile = "/run/keys/db_password";
extraConfig = {
"subsystem=undertow"."server=default-server"."http-listener=default".proxy-address-forwarding = true;
};
};
```
After:
```nix
services.keycloak = {
enable = true;
settings = {
http-port = 8080;
hostname = "keycloak.example.com";
http-relative-path = "/auth";
proxy = "edge";
};
database.passwordFile = "/run/keys/db_password";
};
```
- The MoinMoin wiki engine (`services.moinmoin`) has been removed, because Python 2 is being retired from nixpkgs.
- Services in the `hadoop` module previously set `openFirewall` to true by default.

View file

@ -410,6 +410,7 @@
./services/display-managers/greetd.nix
./services/editors/emacs.nix
./services/editors/infinoted.nix
./services/editors/haste.nix
./services/finance/odoo.nix
./services/games/asf.nix
./services/games/crossfire-server.nix

View file

@ -0,0 +1,86 @@
{ config, lib, pkgs, ... }:
with lib;
let
pkg = pkgs.haste-server;
cfg = config.services.haste-server;
format = pkgs.formats.json {};
in
{
options.services.haste-server = {
enable = mkEnableOption "haste-server";
openFirewall = mkEnableOption "firewall passthrough for haste-server";
settings = mkOption {
description = ''
Configuration for haste-server.
For documentation see <link xlink:href="https://github.com/toptal/haste-server#settings">project readme</link>
'';
type = format.type;
};
};
config = mkIf (cfg.enable) {
networking.firewall.allowedTCPPorts = mkIf (cfg.openFirewall) [ cfg.settings.port ];
services.haste-server = {
settings = {
host = mkDefault "::";
port = mkDefault 7777;
keyLength = mkDefault 10;
maxLength = mkDefault 400000;
staticMaxAge = mkDefault 86400;
recompressStaticAssets = mkDefault false;
logging = mkDefault [
{
level = "verbose";
type = "Console";
colorize = true;
}
];
keyGenerator = mkDefault {
type = "phonetic";
};
rateLimits = {
categories = {
normal = {
totalRequests = mkDefault 500;
every = mkDefault 60000;
};
};
};
storage = mkDefault {
type = "file";
};
documents = {
about = mkDefault "${pkg}/share/haste-server/about.md";
};
};
};
systemd.services.haste-server = {
wantedBy = [ "multi-user.target" ];
requires = [ "network.target" ];
after = [ "network.target" ];
serviceConfig = {
User = "haste-server";
DynamicUser = true;
StateDirectory = "haste-server";
WorkingDirectory = "/var/lib/haste-server";
ExecStart = "${pkg}/bin/haste-server ${format.generate "config.json" cfg.settings}";
};
path = with pkgs; [ pkg coreutils ];
};
};
}

View file

@ -81,7 +81,7 @@ in {
(mkRemovedOptionModule [ "services" "matrix-synapse" "verbose" ] "Use a log config instead." )
# options that were moved into rfc42 style settigns
(mkRemovedOptionModule [ "services" "matrix-synapse" "app_service_config_files" ] "Use settings.app_service_config_Files instead" )
(mkRemovedOptionModule [ "services" "matrix-synapse" "app_service_config_files" ] "Use settings.app_service_config_files instead" )
(mkRemovedOptionModule [ "services" "matrix-synapse" "database_args" ] "Use settings.database.args instead" )
(mkRemovedOptionModule [ "services" "matrix-synapse" "database_name" ] "Use settings.database.args.database instead" )
(mkRemovedOptionModule [ "services" "matrix-synapse" "database_type" ] "Use settings.database.name instead" )

File diff suppressed because it is too large Load diff

View file

@ -27,10 +27,10 @@
<para>
Refer to the <link
xlink:href="https://www.keycloak.org/docs/latest/server_admin/index.html#admin-console">Admin
Console section of the Keycloak Server Administration Guide</link> for
information on how to administer your
<productname>Keycloak</productname> instance.
xlink:href="https://www.keycloak.org/docs/latest/server_admin/index.html">
Keycloak Server Administration Guide</link> for information on
how to administer your <productname>Keycloak</productname>
instance.
</para>
</section>
@ -38,27 +38,28 @@
<title>Database access</title>
<para>
<productname>Keycloak</productname> can be used with either
<productname>PostgreSQL</productname> or
<productname>PostgreSQL</productname>,
<productname>MariaDB</productname> or
<productname>MySQL</productname>. Which one is used can be
configured in <xref
linkend="opt-services.keycloak.database.type" />. The selected
database will automatically be enabled and a database and role
created unless <xref
linkend="opt-services.keycloak.database.host" /> is changed from
its default of <literal>localhost</literal> or <xref
linkend="opt-services.keycloak.database.createLocally" /> is set
to <literal>false</literal>.
linkend="opt-services.keycloak.database.host" /> is changed
from its default of <literal>localhost</literal> or <xref
linkend="opt-services.keycloak.database.createLocally" /> is
set to <literal>false</literal>.
</para>
<para>
External database access can also be configured by setting
<xref linkend="opt-services.keycloak.database.host" />, <xref
linkend="opt-services.keycloak.database.name" />, <xref
linkend="opt-services.keycloak.database.username" />, <xref
linkend="opt-services.keycloak.database.useSSL" /> and <xref
linkend="opt-services.keycloak.database.caCert" /> as
appropriate. Note that you need to manually create a database
called <literal>keycloak</literal> and allow the configured
database user full access to it.
appropriate. Note that you need to manually create the database
and allow the configured database user full access to it.
</para>
<para>
@ -79,22 +80,27 @@
</warning>
</section>
<section xml:id="module-services-keycloak-frontendurl">
<title>Frontend URL</title>
<section xml:id="module-services-keycloak-hostname">
<title>Hostname</title>
<para>
The frontend URL is used as base for all frontend requests and
must be configured through <xref linkend="opt-services.keycloak.frontendUrl" />.
It should normally include a trailing <literal>/auth</literal>
(the default web context). If you use a reverse proxy, you need
to set this option to <literal>""</literal>, so that frontend URL
is derived from HTTP headers. <literal>X-Forwarded-*</literal> headers
support also should be enabled, using <link
xlink:href="https://www.keycloak.org/docs/latest/server_installation/index.html#identifying-client-ip-addresses">
respective guidelines</link>.
The hostname is used to build the public URL used as base for
all frontend requests and must be configured through <xref
linkend="opt-services.keycloak.settings.hostname" />.
</para>
<note>
<para>
If you're migrating an old Wildfly based Keycloak instance
and want to keep compatibility with your current clients,
you'll likely want to set <xref
linkend="opt-services.keycloak.settings.http-relative-path"
/> to <literal>/auth</literal>. See the option description
for more details.
</para>
</note>
<para>
<xref linkend="opt-services.keycloak.forceBackendUrlToFrontendUrl" />
<xref linkend="opt-services.keycloak.settings.hostname-strict-backchannel" />
determines whether Keycloak should force all requests to go
through the frontend URL. By default,
<productname>Keycloak</productname> allows backend requests to
@ -104,10 +110,10 @@
</para>
<para>
See the <link
xlink:href="https://www.keycloak.org/docs/latest/server_installation/#_hostname">Hostname
section of the Keycloak Server Installation and Configuration
Guide</link> for more information.
For more information on hostname configuration, see the <link
xlink:href="https://www.keycloak.org/server/hostname">Hostname
section of the Keycloak Server Installation and Configuration
Guide</link>.
</para>
</section>
@ -139,68 +145,40 @@
<section xml:id="module-services-keycloak-themes">
<title>Themes</title>
<para>
You can package custom themes and make them visible to Keycloak via
<xref linkend="opt-services.keycloak.themes" />
option. See the <link xlink:href="https://www.keycloak.org/docs/latest/server_development/#_themes">
You can package custom themes and make them visible to
Keycloak through <xref linkend="opt-services.keycloak.themes"
/>. See the <link
xlink:href="https://www.keycloak.org/docs/latest/server_development/#_themes">
Themes section of the Keycloak Server Development Guide</link>
and respective NixOS option description for more information.
and the description of the aforementioned NixOS option for
more information.
</para>
</section>
<section xml:id="module-services-keycloak-extra-config">
<title>Additional configuration</title>
<section xml:id="module-services-keycloak-settings">
<title>Configuration file settings</title>
<para>
Additional Keycloak configuration options, for which no
explicit <productname>NixOS</productname> options are provided,
can be set in <xref linkend="opt-services.keycloak.extraConfig" />.
Keycloak server configuration parameters can be set in <xref
linkend="opt-services.keycloak.settings" />. These correspond
directly to options in
<filename>conf/keycloak.conf</filename>. Some of the most
important parameters are documented as suboptions, the rest can
be found in the <link
xlink:href="https://www.keycloak.org/server/all-config">All
configuration section of the Keycloak Server Installation and
Configuration Guide</link>.
</para>
<para>
Options are expressed as a Nix attribute set which matches the
structure of the jboss-cli configuration. The configuration is
effectively overlayed on top of the default configuration
shipped with Keycloak. To remove existing nodes and undefine
attributes from the default configuration, set them to
<literal>null</literal>.
</para>
<para>
For example, the following script, which removes the hostname
provider <literal>default</literal>, adds the deprecated
hostname provider <literal>fixed</literal> and defines it the
default:
<programlisting>
/subsystem=keycloak-server/spi=hostname/provider=default:remove()
/subsystem=keycloak-server/spi=hostname/provider=fixed:add(enabled = true, properties = { hostname = "keycloak.example.com" })
/subsystem=keycloak-server/spi=hostname:write-attribute(name=default-provider, value="fixed")
</programlisting>
would be expressed as
<programlisting>
services.keycloak.extraConfig = {
"subsystem=keycloak-server" = {
"spi=hostname" = {
"provider=default" = null;
"provider=fixed" = {
enabled = true;
properties.hostname = "keycloak.example.com";
};
default-provider = "fixed";
};
};
};
</programlisting>
</para>
<para>
You can discover available options by using the <link
xlink:href="http://docs.wildfly.org/21/Admin_Guide.html#Command_Line_Interface">jboss-cli.sh</link>
program and by referring to the <link
xlink:href="https://www.keycloak.org/docs/latest/server_installation/index.html">Keycloak
Server Installation and Configuration Guide</link>.
Options containing secret data should be set to an attribute
set containing the attribute <literal>_secret</literal> - a
string pointing to a file containing the value the option
should be set to. See the description of <xref
linkend="opt-services.keycloak.settings" /> for an example.
</para>
</section>
<section xml:id="module-services-keycloak-example-config">
<title>Example configuration</title>
<para>
@ -208,9 +186,11 @@ services.keycloak.extraConfig = {
<programlisting>
services.keycloak = {
<link linkend="opt-services.keycloak.enable">enable</link> = true;
settings = {
<link linkend="opt-services.keycloak.settings.hostname">hostname</link> = "keycloak.example.com";
<link linkend="opt-services.keycloak.settings.hostname-strict-backchannel">hostname-strict-backchannel</link> = true;
};
<link linkend="opt-services.keycloak.initialAdminPassword">initialAdminPassword</link> = "e6Wcm0RrtegMEHl"; # change on first login
<link linkend="opt-services.keycloak.frontendUrl">frontendUrl</link> = "https://keycloak.example.com/auth";
<link linkend="opt-services.keycloak.forceBackendUrlToFrontendUrl">forceBackendUrlToFrontendUrl</link> = true;
<link linkend="opt-services.keycloak.sslCertificate">sslCertificate</link> = "/run/keys/ssl_cert";
<link linkend="opt-services.keycloak.sslCertificateKey">sslCertificateKey</link> = "/run/keys/ssl_key";
<link linkend="opt-services.keycloak.database.passwordFile">database.passwordFile</link> = "/run/keys/db_password";

View file

@ -196,6 +196,7 @@ in
hadoop_3_2 = import ./hadoop { inherit handleTestOn; package=pkgs.hadoop_3_2; };
hadoop2 = import ./hadoop { inherit handleTestOn; package=pkgs.hadoop2; };
haka = handleTest ./haka.nix {};
haste-server = handleTest ./haste-server.nix {};
haproxy = handleTest ./haproxy.nix {};
hardened = handleTest ./hardened.nix {};
hedgedoc = handleTest ./hedgedoc.nix {};

View file

@ -0,0 +1,23 @@
import ./make-test-python.nix ({ pkgs, lib, ... }:
{
name = "haste-server";
meta.maintainers = with lib.maintainers; [ mkg20001 ];
nodes.machine = { pkgs, ... }: {
environment.systemPackages = with pkgs; [
curl
jq
];
services.haste-server = {
enable = true;
};
};
testScript = ''
machine.wait_for_unit("haste-server")
machine.wait_until_succeeds("curl -s localhost:7777")
machine.succeed('curl -s -X POST http://localhost:7777/documents -d "Hello World!" > bla')
machine.succeed('curl http://localhost:7777/raw/$(cat bla | jq -r .key) | grep "Hello World"')
'';
})

View file

@ -4,7 +4,7 @@
let
certs = import ./common/acme/server/snakeoil-certs.nix;
frontendUrl = "https://${certs.domain}/auth";
frontendUrl = "https://${certs.domain}";
initialAdminPassword = "h4IhoJFnt2iQIR9";
keycloakTest = import ./make-test-python.nix (
@ -27,20 +27,23 @@ let
services.keycloak = {
enable = true;
inherit frontendUrl initialAdminPassword;
sslCertificate = certs.${certs.domain}.cert;
sslCertificateKey = certs.${certs.domain}.key;
settings = {
hostname = certs.domain;
};
inherit initialAdminPassword;
sslCertificate = "${certs.${certs.domain}.cert}";
sslCertificateKey = "${certs.${certs.domain}.key}";
database = {
type = databaseType;
username = "bogus";
passwordFile = pkgs.writeText "dbPassword" "wzf6vOCbPp6cqTH";
name = "also bogus";
passwordFile = "${pkgs.writeText "dbPassword" "wzf6vOCbPp6cqTH"}";
};
plugins = with config.services.keycloak.package.plugins; [
keycloak-discord
keycloak-metrics-spi
];
};
environment.systemPackages = with pkgs; [
xmlstarlet
html-tidy
@ -99,9 +102,9 @@ let
in ''
keycloak.start()
keycloak.wait_for_unit("keycloak.service")
keycloak.wait_for_open_port(443)
keycloak.wait_until_succeeds("curl -sSf ${frontendUrl}")
### Realm Setup ###
# Get an admin interface access token
@ -117,8 +120,8 @@ let
# Register the metrics SPI
keycloak.succeed(
"${pkgs.jre}/bin/keytool -import -alias snakeoil -file ${certs.ca.cert} -storepass aaaaaa -keystore cacert.jks -noprompt",
"KC_OPTS='-Djavax.net.ssl.trustStore=cacert.jks -Djavax.net.ssl.trustStorePassword=aaaaaa' ${pkgs.keycloak}/bin/kcadm.sh config credentials --server '${frontendUrl}' --realm master --user admin --password '${initialAdminPassword}'",
"KC_OPTS='-Djavax.net.ssl.trustStore=cacert.jks -Djavax.net.ssl.trustStorePassword=aaaaaa' ${pkgs.keycloak}/bin/kcadm.sh update events/config -s 'eventsEnabled=true' -s 'adminEventsEnabled=true' -s 'eventsListeners+=metrics-listener'",
"KC_OPTS='-Djavax.net.ssl.trustStore=cacert.jks -Djavax.net.ssl.trustStorePassword=aaaaaa' kcadm.sh config credentials --server '${frontendUrl}' --realm master --user admin --password '${initialAdminPassword}'",
"KC_OPTS='-Djavax.net.ssl.trustStore=cacert.jks -Djavax.net.ssl.trustStorePassword=aaaaaa' kcadm.sh update events/config -s 'eventsEnabled=true' -s 'adminEventsEnabled=true' -s 'eventsListeners+=metrics-listener'",
"curl -sSf '${frontendUrl}/realms/master/metrics' | grep '^keycloak_admin_event_UPDATE'"
)
@ -172,5 +175,6 @@ let
in
{
postgres = keycloakTest { databaseType = "postgresql"; };
mariadb = keycloakTest { databaseType = "mariadb"; };
mysql = keycloakTest { databaseType = "mysql"; };
}

View file

@ -2,13 +2,13 @@
let
pname = "anytype";
version = "0.24.0";
version = "0.25.0";
name = "Anytype-${version}";
nameExecutable = pname;
src = fetchurl {
url = "https://at9412003.fra1.digitaloceanspaces.com/Anytype-${version}.AppImage";
name = "Anytype-${version}.AppImage";
sha256 = "sha256-QyexUZNn7QGHjXYO/+1kUebTmAzdVpwG9Ile8Uh3i8Q=";
sha256 = "sha256-cfiSZLfaVmxsZWDwulbMHKzHCG7zMKCWwg8q/2MolVs=";
};
appimageContents = appimageTools.extractType2 { inherit name src; };
in

View file

@ -53,17 +53,18 @@ autoPatchelf() {
esac
done
if [ "${autoPatchelfIgnoreMissingDeps[*]}" == "1" ]; then
local ignoreMissingDepsArray=($autoPatchelfIgnoreMissingDeps)
if [ "$autoPatchelfIgnoreMissingDeps" == "1" ]; then
echo "autoPatchelf: WARNING: setting 'autoPatchelfIgnoreMissingDeps" \
"= true;' is deprecated and will be removed in a future release." \
"Use 'autoPatchelfIgnoreMissingDeps = [ \"*\" ];' instead." >&2
autoPatchelfIgnoreMissingDeps=( "*" )
ignoreMissingDepsArray=( "*" )
fi
local runtimeDependenciesArray=($runtimeDependencies)
@pythonInterpreter@ @autoPatchelfScript@ \
${norecurse:+--no-recurse} \
--ignore-missing "${autoPatchelfIgnoreMissingDeps[@]}" \
--ignore-missing "${ignoreMissingDepsArray[@]}" \
--paths "$@" \
--libs "${autoPatchelfLibs[@]}" \
"${extraAutoPatchelfLibs[@]}" \

View file

@ -3,22 +3,13 @@
stdenv.mkDerivation (rec {
pname = "ponyc";
version = "0.44.0";
version = "0.49.0";
src = fetchFromGitHub {
owner = "ponylang";
repo = pname;
rev = version;
sha256 = "0bzdkrrh6lvfqc61kdxvgz573dj32wwzhzwil53jvynhfcwp38ld";
# Due to a bug in LLVM 9.x, ponyc has to include its own vendored patched
# LLVM. (The submodule is a specific tag in the LLVM source tree).
#
# The pony developers are currently working to get off 9.x as quickly
# as possible so hopefully in a few revisions this package build will
# become a lot simpler.
#
# https://reviews.llvm.org/rG9f4f237e29e7150dfcf04ae78fa287d2dc8d48e2
sha256 = "sha256-WS3/POC+2vdx6bA8314sjkdWCIWGu9lJG4kbKMWfnX8=";
fetchSubmodules = true;
};

View file

@ -1,14 +1,16 @@
diff --git a/packages/net/_test.pony b/packages/net/_test.pony
index baf29e7..b63f368 100644
index 9044dfb1..f0ea10f7 100644
--- a/packages/net/_test.pony
+++ b/packages/net/_test.pony
@@ -5,9 +5,6 @@ actor Main is TestList
new make() => None
@@ -26,11 +26,6 @@ actor \nodoc\ Main is TestList
test(_TestTCPThrottle)
end
fun tag tests(test: PonyTest) =>
- // Tests below exclude osx and are listed alphabetically
- ifdef not osx then
- test(_TestBroadcast)
- end
test(_TestTCPWritev)
test(_TestTCPExpect)
test(_TestTCPMute)
-
class \nodoc\ _TestPing is UDPNotify
let _h: TestHelper
let _ip: NetAddress

View file

@ -1,28 +1,33 @@
--- a/lib/CMakeLists.txt.orig 2021-10-01 13:04:00.867762912 -0400
+++ a/lib/CMakeLists.txt 2021-10-01 13:06:21.220023453 -0400
@@ -15,12 +15,12 @@
diff --git a/lib/CMakeLists.txt b/lib/CMakeLists.txt
index dab2aaef..26b587b1 100644
--- a/lib/CMakeLists.txt
+++ b/lib/CMakeLists.txt
@@ -36,7 +36,7 @@ if(${CMAKE_SYSTEM_NAME} STREQUAL "FreeBSD")
endif()
ExternalProject_Add(gbenchmark
- URL ${PONYC_GBENCHMARK_URL}
+ SOURCE_DIR gbenchmark-prefix/src/benchmark
CMAKE_ARGS -DCMAKE_BUILD_TYPE=${PONYC_LIBS_BUILD_TYPE} -DCMAKE_INSTALL_PREFIX=${CMAKE_INSTALL_PREFIX} -DBENCHMARK_ENABLE_GTEST_TESTS=OFF -DCMAKE_CXX_FLAGS=-fpic --no-warn-unused-cli
CMAKE_ARGS -DCMAKE_BUILD_TYPE=${PONYC_LIBS_BUILD_TYPE} -DCMAKE_INSTALL_PREFIX=${CMAKE_INSTALL_PREFIX} -DBENCHMARK_ENABLE_GTEST_TESTS=OFF -DCMAKE_CXX_FLAGS=${PONY_PIC_FLAG} --no-warn-unused-cli
)
@@ -46,7 +46,7 @@ if(${CMAKE_SYSTEM_NAME} STREQUAL "FreeBSD")
endif()
ExternalProject_Add(googletest
- URL https://github.com/google/googletest/archive/release-1.10.0.tar.gz
- URL ${PONYC_GOOGLETEST_URL}
+ URL @googletest@
CMAKE_ARGS -DCMAKE_BUILD_TYPE=${PONYC_LIBS_BUILD_TYPE} -DCMAKE_INSTALL_PREFIX=${CMAKE_INSTALL_PREFIX} -DCMAKE_CXX_FLAGS=-fpic -Dgtest_force_shared_crt=ON --no-warn-unused-cli
CMAKE_ARGS -DCMAKE_BUILD_TYPE=${PONYC_LIBS_BUILD_TYPE} -DCMAKE_INSTALL_PREFIX=${CMAKE_INSTALL_PREFIX} -DCMAKE_CXX_FLAGS=${PONY_PIC_FLAG} -Dgtest_force_shared_crt=ON --no-warn-unused-cli
)
@@ -33,82 +33,6 @@
@@ -59,82 +59,6 @@ install(TARGETS blake2
COMPONENT library
)
-find_package(Git)
-
-set(LLVM_DESIRED_HASH "fed41342a82f5a3a9201819a82bf7a48313e296b")
-set(PATCHES_DESIRED_HASH "3a655193262fd9b2e87340e096efcbd96726a07fe6dd42a263f3a4fc2dc0192e")
-set(LLVM_DESIRED_HASH "75e33f71c2dae584b13a7d1186ae0a038ba98838")
-set(PATCHES_DESIRED_HASH "a16f299fbfced16a2bbc628746db341f2a5af9ae8cc9c9ef4b1e9ca26de3c292")
-
-if(GIT_FOUND)
- if(EXISTS "${PROJECT_SOURCE_DIR}/../.git")
@ -57,7 +62,7 @@
-
- # check to see if the patch hashes match
- message("Checking patches ${PONY_LLVM_PATCHES}")
- set(PATCHES_ACTUAL_HASH "")
- set(PATCHES_ACTUAL_HASH "needed_if_no_patches")
- foreach (PATCH ${PONY_LLVM_PATCHES})
- file(STRINGS ${PATCH} patch_file NEWLINE_CONSUME)
- string(REPLACE "\n" " " patch_file ${patch_file})
@ -69,8 +74,8 @@
- string(SHA256 PATCHES_ACTUAL_HASH ${PATCHES_ACTUAL_HASH})
- # message("Desired hash ${PATCHES_DESIRED_HASH}")
- # message("Actual hash ${PATCHES_ACTUAL_HASH}")
- if(NOT PATCHES_ACTUAL_HASH EQUAL "${PATCHES_DESIRED_HASH}")
- message(FATAL_ERROR "Patch hash actual ${PATCHES_ACTUAL_HASH} does not match desired ${PATCHES_DESIRED_HASH}")
- if(NOT PATCHES_ACTUAL_HASH MATCHES "${PATCHES_DESIRED_HASH}")
- message(FATAL_ERROR "Patch hash actual '${PATCHES_ACTUAL_HASH}' does not match desired '${PATCHES_DESIRED_HASH}'")
- endif()
-
- foreach (PATCH ${PONY_LLVM_PATCHES})

View file

@ -2,13 +2,13 @@
stdenv.mkDerivation ( rec {
pname = "corral";
version = "0.5.4";
version = "0.5.7";
src = fetchFromGitHub {
owner = "ponylang";
repo = pname;
rev = version;
sha256 = "1chw56khx5akjxkq0vwrw9ryjpyc3fzdmksh496llc513l01hpkl";
sha256 = "sha256-OLA09C/6s2PyzreBvqFfzsoRDXiRMbdf3Jgnmawr7k4=";
};
buildInputs = [ ponyc ];

View file

@ -1,7 +1,7 @@
{ lib, stdenv, fetchFromGitHub, cmake, fmt_8, fetchpatch }:
{ lib, stdenv, fetchFromGitHub, cmake, fmt_8 }:
let
generic = { version, sha256, patches ? [] }:
generic = { version, sha256 }:
stdenv.mkDerivation {
pname = "spdlog";
inherit version;
@ -13,8 +13,6 @@ let
inherit sha256;
};
inherit patches;
nativeBuildInputs = [ cmake ];
# spdlog <1.3 uses a bundled version of fmt
propagatedBuildInputs = lib.optional (lib.versionAtLeast version "1.3") fmt_8;
@ -38,7 +36,11 @@ let
'';
doCheck = true;
preCheck = "export LD_LIBRARY_PATH=$(pwd)\${LD_LIBRARY_PATH:+:}$LD_LIBRARY_PATH";
preCheck = if stdenv.isDarwin then ''
export DYLD_LIBRARY_PATH="$(pwd)''${DYLD_LIBRARY_PATH:+:}$DYLD_LIBRARY_PATH"
'' else ''
export LD_LIBRARY_PATH="$(pwd)''${LD_LIBRARY_PATH:+:}$LD_LIBRARY_PATH"
'';
meta = with lib; {
description = "Very fast, header only, C++ logging library";
@ -51,15 +53,8 @@ let
in
{
spdlog_1 = generic {
version = "1.9.2";
sha256 = "sha256-GSUdHtvV/97RyDKy8i+ticnSlQCubGGWHg4Oo+YAr8Y=";
patches = [
# glibc 2.34 compat
(fetchpatch {
url = "https://github.com/gabime/spdlog/commit/d54b8e89c058f3cab2b32b3e9a2b49fd171d5895.patch";
sha256 = "sha256-pb7cREF90GXb5Mbs8xFLQ+eLo6Xum13/xYa8JUgJlbI=";
})
];
version = "1.10.0";
sha256 = "sha256-c6s27lQCXKx6S1FhZ/LiKh14GnXMhZtD1doltU4Avws=";
};
spdlog_0 = generic {

View file

@ -13,8 +13,8 @@ stdenv.mkDerivation {
sha256 = "nsm3HgTU9csU91XveQYxzQtFwGA+Ecg2/Hz9niaM0Ho=";
};
buildInputs = [ meson ninja pkg-config gettext flex bison vala glib gtk-doc docbook_xsl docbook_xml_dtd_43 ];
nativeBuildInputs = [ glib gobject-introspection ];
nativeBuildInputs = [ meson ninja pkg-config gettext flex bison vala glib gtk-doc docbook_xsl docbook_xml_dtd_43 gobject-introspection ];
buildInputs = [ glib ];
mesonFlags = [
"-Denable_gtk_doc=true"

View file

@ -0,0 +1,45 @@
{ lib
, aiowinreg
, buildPythonPackage
, colorama
, fetchPypi
, pycryptodomex
, pythonOlder
, tqdm
, unicrypto
}:
buildPythonPackage rec {
pname = "aesedb";
version = "0.0.5";
format = "setuptools";
disabled = pythonOlder "3.7";
src = fetchPypi {
inherit pname version;
hash = "sha256-2m4VxqTD9zvUpZ1O8/SBprAzG4vUX4z3LthMpP5Hc8g=";
};
propagatedBuildInputs = [
aiowinreg
colorama
pycryptodomex
tqdm
unicrypto
];
# Module has no tests
doCheck = false;
pythonImportsCheck = [
"aesedb"
];
meta = with lib; {
description = "Parser for JET databases";
homepage = "https://github.com/skelsec/aesedb";
license = with licenses; [ mit ];
maintainers = with maintainers; [ fab ];
};
}

View file

@ -1,19 +1,25 @@
{ lib
, buildPythonPackage
, fetchPypi
, fetchFromGitHub
, pytestCheckHook
}:
buildPythonPackage rec {
pname = "asn1crypto";
version = "1.5.1";
format = "setuptools";
src = fetchPypi {
inherit pname version;
sha256 = "sha256-E644UCvmMhFav4oky+X02lLjtSMZkK/zESPIBTBsy5w=";
# Pulling from Github to run tests
src = fetchFromGitHub {
owner = "wbond";
repo = "asn1crypto";
rev = version;
sha256 = "sha256-M8vASxhaJPgkiTrAckxz7gk/QHkrFlNz7fFbnLEBT+M=";
};
# No tests included
doCheck = false;
checkInputs = [
pytestCheckHook
];
meta = {
description = "Fast ASN.1 parser and serializer with definitions for private keys, public keys, certificates, CRL, OCSP, CMS, PKCS#3, PKCS#7, PKCS#8, PKCS#12, PKCS#5, X.509 and TSP";

View file

@ -0,0 +1,97 @@
{ stdenv
, lib
, buildPythonPackage
, fetchFromGitHub
, poetry-core
, APScheduler
, bitstring
, cffi
, ecdsa
, monero
, pypng
, pyqrcode
, pyramid
, pyramid_jinja2
, pysocks
, requests
, tzlocal
, waitress
, yoyo-migrations
, pytestCheckHook
, pytest-cov
, webtest
}:
buildPythonPackage rec {
pname = "cypherpunkpay";
version = "1.0.15";
format = "pyproject";
src = fetchFromGitHub {
owner = "CypherpunkPay";
repo = "CypherpunkPay";
rev = "v${version}";
sha256 = "sha256-W2f4jtEqopDXiXx0pklZrjOmVhpx2kDdTJRPm2Ka0Cg=";
};
postPatch = ''
substituteInPlace pyproject.toml \
--replace 'monero = "^0.99"' 'monero = ">=0.99"' \
--replace 'pypng = "^0.0.20"' 'pypng = ">=0.0.20"' \
--replace 'tzlocal = "2.1"' 'tzlocal = ">=2.1"'
'';
nativeBuildInputs = [
poetry-core
];
propagatedBuildInputs = [
APScheduler
bitstring
cffi
ecdsa
monero
pypng
pyqrcode
pyramid
pyramid_jinja2
pysocks
requests
tzlocal
waitress
yoyo-migrations
];
checkInputs = [
pytestCheckHook
pytest-cov
webtest
];
disabledTestPaths = [
# performance test
"test/unit/tools/pbkdf2_test.py"
# tests require network connection
"test/network/explorers/bitcoin"
"test/network/net/http_client"
"test/network/prices"
# tests require bitcoind running
"test/network/full_node_clients"
# tests require lnd running
"test/network/ln"
# tests require tor running
"test/network/net/tor_client"
# tests require the full environment running
"test/acceptance/views"
"test/acceptance/views_admin"
"test/acceptance/views_donations"
"test/acceptance/views_dummystore"
];
meta = with lib; {
description = "Modern self-hosted software for accepting Bitcoin";
homepage = "https://cypherpunkpay.org";
license = with licenses; [ mit /* or */ unlicense ];
maintainers = with maintainers; [ prusnak ];
};
}

View file

@ -8,12 +8,12 @@
buildPythonPackage rec {
pname = "ipympl";
version = "0.8.8";
version = "0.9.0";
format = "wheel";
src = fetchPypi {
inherit pname version format;
sha256 = "sha256-hkaK6q6MCigAfQx/bbuF8rbLmAUWfojU2qdSlWIAkVk=";
sha256 = "sha256-HpO3T/zRbimxd1+nUkbSmclj7nPsMYuSUK0VJItZQs4=";
};

View file

@ -216,9 +216,9 @@ let
fetchAttrs = {
sha256 =
if cudaSupport then
"1k0rjxqjm703gd9navwzx5x3874b4dxamr62m1fxhm79d271zxis"
"0d2rqwk9n4a6c51m4g21rxymv85kw2sdksni30cdx3pdcdbqgic7"
else
"0ivah1w41jcj13jm740qzwx5h0ia8vbj71pjgd0zrfk3c92kll41";
"0q540mwmh7grig0qq48ynzqi0gynimxnrq7k97wribqpkx99k39d";
};
buildAttrs = {

View file

@ -0,0 +1,51 @@
{ lib
, buildPythonPackage
, fetchFromGitHub
, pycryptodomex
, pysocks
, pynacl
, requests
, six
, varint
, pytestCheckHook
, pytest-cov
, responses
}:
buildPythonPackage rec {
pname = "monero";
version = "1.0.1";
src = fetchFromGitHub {
owner = "monero-ecosystem";
repo = "monero-python";
rev = "v${version}";
sha256 = "sha256-ZjAShIeGVVIKlwgSNPVSN7eaqhKu3wEpDP9wgBMOyZU=";
};
postPatch = ''
substituteInPlace requirements.txt \
--replace 'pynacl~=1.4' 'pynacl>=1.4' \
--replace 'ipaddress' ""
'';
pythonImportsCheck = [ "monero" ];
propagatedBuildInputs = [
pycryptodomex
pynacl
pysocks
requests
six
varint
];
checkInputs = [ pytestCheckHook pytest-cov responses ];
meta = with lib; {
description = "Comprehensive Python module for handling Monero";
homepage = "https://github.com/monero-ecosystem/monero-python";
license = licenses.bsd3;
maintainers = with maintainers; [ prusnak ];
};
}

View file

@ -0,0 +1,29 @@
{ lib
, buildPythonPackage
, fetchFromGitHub
, pytestCheckHook
}:
buildPythonPackage rec {
pname = "pypng";
version = "0.0.21";
format = "pyproject";
src = fetchFromGitHub {
owner = "drj11";
repo = "pypng";
rev = "${pname}-${version}";
sha256 = "sha256-JU1GCSTm2s6Kczn6aRcF5DizPJVpizNtnAMJxTBi9vo=";
};
pythonImportsCheck = [ "png" ];
checkInputs = [ pytestCheckHook ];
meta = with lib; {
description = "Pure Python library for PNG image encoding/decoding";
homepage = "https://github.com/drj11/pypng";
license = licenses.mit;
maintainers = with maintainers; [ prusnak ];
};
}

View file

@ -1,4 +1,5 @@
{ lib
, aesedb
, aiosmb
, aiowinreg
, buildPythonPackage
@ -6,19 +7,24 @@
, minidump
, minikerberos
, msldap
, pythonOlder
, winsspi
}:
buildPythonPackage rec {
pname = "pypykatz";
version = "0.5.2";
version = "0.5.6";
format = "setuptools";
disabled = pythonOlder "3.7";
src = fetchPypi {
inherit pname version;
sha256 = "1lyvypi1g4l9fq1f9q05bdn6vq8y5y9ghmb6ziqdycr0lxn7lfdd";
hash = "sha256-iuLQfdRNxy6Z+7sYGG+dSHlxicOPtNOdB/VNLyZjRsY=";
};
propagatedBuildInputs = [
aesedb
aiosmb
aiowinreg
minikerberos
@ -29,7 +35,10 @@ buildPythonPackage rec {
# Project doesn't have tests
doCheck = false;
pythonImportsCheck = [ "pypykatz" ];
pythonImportsCheck = [
"pypykatz"
];
meta = with lib; {
description = "Mimikatz implementation in Python";

View file

@ -9,7 +9,7 @@
buildPythonPackage rec {
pname = "pysigma-backend-insightidr";
version = "0.1.4";
version = "0.1.5";
format = "pyproject";
disabled = pythonOlder "3.8";
@ -17,8 +17,8 @@ buildPythonPackage rec {
src = fetchFromGitHub {
owner = "SigmaHQ";
repo = "pySigma-backend-insightidr";
rev = "v${version}";
hash = "sha256-ivigYBCoQtAfVmTiKvYugzPbw3tG0Xn5IYbHVJuubDE=";
rev = "refs/tags/v${version}";
hash = "sha256-RjBRFNMIpjW/x5vShXUgi25oOmvRlD2zP6mNQJ7sG8M=";
};
nativeBuildInputs = [

View file

@ -9,6 +9,11 @@ buildPythonPackage rec {
sha256 = "0wvdv0frl7xib05sixjv9m6jywaa2wdhdhsqqdfk45akk2r80pcn";
};
postPatch = ''
substituteInPlace src/testing/common/database.py \
--replace "collections.Callable" "collections.abc.Callable"
'';
# There are no unit tests
doCheck = false;

View file

@ -12,7 +12,7 @@
buildPythonPackage rec {
pname = "xknx";
version = "0.20.2";
version = "0.20.3";
format = "setuptools";
disabled = pythonOlder "3.8";
@ -21,7 +21,7 @@ buildPythonPackage rec {
owner = "XKNX";
repo = pname;
rev = version;
sha256 = "sha256-9OEoU2r6/tThEoLjssWD0jrgF2oYk5IRCWLTeF4ddGc=";
sha256 = "sha256-RGwo6IH1WDNBanpQ14gB3/75db3NPwNUsFy0wPP1Yok=";
};
propagatedBuildInputs = [

View file

@ -0,0 +1,30 @@
{ lib
, buildPythonPackage
, fetchPypi
, setuptools
, sqlparse
, tabulate
}:
buildPythonPackage rec {
pname = "yoyo-migrations";
version = "7.3.2";
src = fetchPypi {
inherit pname version;
sha256 = "sha256-RIQIKOGgFp9UHnAtWu3KgYWtpoCH57rUhQpvxdced6Q=";
};
propagatedBuildInputs = [ setuptools sqlparse tabulate ];
doCheck = false; # pypi tarball does not contain tests
pythonImportsCheck = [ "yoyo" ];
meta = with lib; {
description = "Database schema migration tool";
homepage = "https://ollycope.com/software/yoyo";
license = licenses.asl20;
maintainers = with maintainers; [ prusnak ];
};
}

View file

@ -11,11 +11,14 @@ stdenv.mkDerivation rec {
sha256 = "1sm99423hh90kr4wdjqi9sdrrpk65j2vz2hzj65zcxfxyr6khjci";
};
nativeBuildInputs = [
meson
pkg-config
ninja
];
buildInputs = [
meson
pkg-config
cairo
ninja
cairo
];
meta = with lib; {

View file

@ -0,0 +1,62 @@
{ lib
, nixosTests
, stdenv
, fetchFromGitHub
, makeWrapper
, nodejs
, pkgs
}:
stdenv.mkDerivation rec {
pname = "haste-server";
version = "3dcc43578b99dbafac35dece9d774ff2af39e8d0";
src = fetchFromGitHub {
owner = "toptal";
repo = "haste-server";
rev = version;
hash = "sha256-srSPRlG+gXSIwgVFLyfzRex97tCbV9FZXYpLD0KFRfw=";
};
nativeBuildInputs = [
nodejs
makeWrapper
];
installPhase =
let
nodeDependencies = ((import ./node-composition.nix {
inherit pkgs nodejs;
inherit (stdenv.hostPlatform) system;
}).nodeDependencies.override (old: {
# access to path '/nix/store/...-source' is forbidden in restricted mode
src = src;
dontNpmInstall = true;
}));
in
''
runHook postInstall
mkdir -p $out/share
cp -ra . $out/share/haste-server
ln -s ${nodeDependencies}/lib/node_modules $out/share/haste-server/node_modules
makeWrapper ${nodejs}/bin/node $out/bin/haste-server \
--add-flags $out/share/haste-server/server.js
runHook postBuild
'';
passthru = {
tests = {
inherit (nixosTests) haste-server;
};
updateScript = ./update.sh;
};
meta = with lib; {
description = "open source pastebin written in node.js";
homepage = "https://www.toptal.com/developers/hastebin/about.md";
license = licenses.mit;
maintainers = with maintainers; [ mkg20001 ];
};
}

View file

@ -0,0 +1,17 @@
# This file has been generated by node2nix 1.9.0. Do not edit!
{pkgs ? import <nixpkgs> {
inherit system;
}, system ? builtins.currentSystem, nodejs ? pkgs."nodejs-14_x"}:
let
nodeEnv = import ./node-env.nix {
inherit (pkgs) stdenv lib python2 runCommand writeTextFile writeShellScript;
inherit pkgs nodejs;
libtool = if pkgs.stdenv.isDarwin then pkgs.darwin.cctools else null;
};
in
import ./node-deps.nix {
inherit (pkgs) fetchurl nix-gitignore stdenv lib fetchgit;
inherit nodeEnv;
}

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,588 @@
# This file originates from node2nix
{lib, stdenv, nodejs, python2, pkgs, libtool, runCommand, writeTextFile, writeShellScript}:
let
# Workaround to cope with utillinux in Nixpkgs 20.09 and util-linux in Nixpkgs master
utillinux = if pkgs ? utillinux then pkgs.utillinux else pkgs.util-linux;
python = if nodejs ? python then nodejs.python else python2;
# Create a tar wrapper that filters all the 'Ignoring unknown extended header keyword' noise
tarWrapper = runCommand "tarWrapper" {} ''
mkdir -p $out/bin
cat > $out/bin/tar <<EOF
#! ${stdenv.shell} -e
$(type -p tar) "\$@" --warning=no-unknown-keyword --delay-directory-restore
EOF
chmod +x $out/bin/tar
'';
# Function that generates a TGZ file from a NPM project
buildNodeSourceDist =
{ name, version, src, ... }:
stdenv.mkDerivation {
name = "node-tarball-${name}-${version}";
inherit src;
buildInputs = [ nodejs ];
buildPhase = ''
export HOME=$TMPDIR
tgzFile=$(npm pack | tail -n 1) # Hooks to the pack command will add output (https://docs.npmjs.com/misc/scripts)
'';
installPhase = ''
mkdir -p $out/tarballs
mv $tgzFile $out/tarballs
mkdir -p $out/nix-support
echo "file source-dist $out/tarballs/$tgzFile" >> $out/nix-support/hydra-build-products
'';
};
# Common shell logic
installPackage = writeShellScript "install-package" ''
installPackage() {
local packageName=$1 src=$2
local strippedName
local DIR=$PWD
cd $TMPDIR
unpackFile $src
# Make the base dir in which the target dependency resides first
mkdir -p "$(dirname "$DIR/$packageName")"
if [ -f "$src" ]
then
# Figure out what directory has been unpacked
packageDir="$(find . -maxdepth 1 -type d | tail -1)"
# Restore write permissions to make building work
find "$packageDir" -type d -exec chmod u+x {} \;
chmod -R u+w "$packageDir"
# Move the extracted tarball into the output folder
mv "$packageDir" "$DIR/$packageName"
elif [ -d "$src" ]
then
# Get a stripped name (without hash) of the source directory.
# On old nixpkgs it's already set internally.
if [ -z "$strippedName" ]
then
strippedName="$(stripHash $src)"
fi
# Restore write permissions to make building work
chmod -R u+w "$strippedName"
# Move the extracted directory into the output folder
mv "$strippedName" "$DIR/$packageName"
fi
# Change to the package directory to install dependencies
cd "$DIR/$packageName"
}
'';
# Bundle the dependencies of the package
#
# Only include dependencies if they don't exist. They may also be bundled in the package.
includeDependencies = {dependencies}:
lib.optionalString (dependencies != []) (
''
mkdir -p node_modules
cd node_modules
''
+ (lib.concatMapStrings (dependency:
''
if [ ! -e "${dependency.name}" ]; then
${composePackage dependency}
fi
''
) dependencies)
+ ''
cd ..
''
);
# Recursively composes the dependencies of a package
composePackage = { name, packageName, src, dependencies ? [], ... }@args:
builtins.addErrorContext "while evaluating node package '${packageName}'" ''
installPackage "${packageName}" "${src}"
${includeDependencies { inherit dependencies; }}
cd ..
${lib.optionalString (builtins.substring 0 1 packageName == "@") "cd .."}
'';
pinpointDependencies = {dependencies, production}:
let
pinpointDependenciesFromPackageJSON = writeTextFile {
name = "pinpointDependencies.js";
text = ''
var fs = require('fs');
var path = require('path');
function resolveDependencyVersion(location, name) {
if(location == process.env['NIX_STORE']) {
return null;
} else {
var dependencyPackageJSON = path.join(location, "node_modules", name, "package.json");
if(fs.existsSync(dependencyPackageJSON)) {
var dependencyPackageObj = JSON.parse(fs.readFileSync(dependencyPackageJSON));
if(dependencyPackageObj.name == name) {
return dependencyPackageObj.version;
}
} else {
return resolveDependencyVersion(path.resolve(location, ".."), name);
}
}
}
function replaceDependencies(dependencies) {
if(typeof dependencies == "object" && dependencies !== null) {
for(var dependency in dependencies) {
var resolvedVersion = resolveDependencyVersion(process.cwd(), dependency);
if(resolvedVersion === null) {
process.stderr.write("WARNING: cannot pinpoint dependency: "+dependency+", context: "+process.cwd()+"\n");
} else {
dependencies[dependency] = resolvedVersion;
}
}
}
}
/* Read the package.json configuration */
var packageObj = JSON.parse(fs.readFileSync('./package.json'));
/* Pinpoint all dependencies */
replaceDependencies(packageObj.dependencies);
if(process.argv[2] == "development") {
replaceDependencies(packageObj.devDependencies);
}
replaceDependencies(packageObj.optionalDependencies);
/* Write the fixed package.json file */
fs.writeFileSync("package.json", JSON.stringify(packageObj, null, 2));
'';
};
in
''
node ${pinpointDependenciesFromPackageJSON} ${if production then "production" else "development"}
${lib.optionalString (dependencies != [])
''
if [ -d node_modules ]
then
cd node_modules
${lib.concatMapStrings (dependency: pinpointDependenciesOfPackage dependency) dependencies}
cd ..
fi
''}
'';
# Recursively traverses all dependencies of a package and pinpoints all
# dependencies in the package.json file to the versions that are actually
# being used.
pinpointDependenciesOfPackage = { packageName, dependencies ? [], production ? true, ... }@args:
''
if [ -d "${packageName}" ]
then
cd "${packageName}"
${pinpointDependencies { inherit dependencies production; }}
cd ..
${lib.optionalString (builtins.substring 0 1 packageName == "@") "cd .."}
fi
'';
# Extract the Node.js source code which is used to compile packages with
# native bindings
nodeSources = runCommand "node-sources" {} ''
tar --no-same-owner --no-same-permissions -xf ${nodejs.src}
mv node-* $out
'';
# Script that adds _integrity fields to all package.json files to prevent NPM from consulting the cache (that is empty)
addIntegrityFieldsScript = writeTextFile {
name = "addintegrityfields.js";
text = ''
var fs = require('fs');
var path = require('path');
function augmentDependencies(baseDir, dependencies) {
for(var dependencyName in dependencies) {
var dependency = dependencies[dependencyName];
// Open package.json and augment metadata fields
var packageJSONDir = path.join(baseDir, "node_modules", dependencyName);
var packageJSONPath = path.join(packageJSONDir, "package.json");
if(fs.existsSync(packageJSONPath)) { // Only augment packages that exist. Sometimes we may have production installs in which development dependencies can be ignored
console.log("Adding metadata fields to: "+packageJSONPath);
var packageObj = JSON.parse(fs.readFileSync(packageJSONPath));
if(dependency.integrity) {
packageObj["_integrity"] = dependency.integrity;
} else {
packageObj["_integrity"] = "sha1-000000000000000000000000000="; // When no _integrity string has been provided (e.g. by Git dependencies), add a dummy one. It does not seem to harm and it bypasses downloads.
}
if(dependency.resolved) {
packageObj["_resolved"] = dependency.resolved; // Adopt the resolved property if one has been provided
} else {
packageObj["_resolved"] = dependency.version; // Set the resolved version to the version identifier. This prevents NPM from cloning Git repositories.
}
if(dependency.from !== undefined) { // Adopt from property if one has been provided
packageObj["_from"] = dependency.from;
}
fs.writeFileSync(packageJSONPath, JSON.stringify(packageObj, null, 2));
}
// Augment transitive dependencies
if(dependency.dependencies !== undefined) {
augmentDependencies(packageJSONDir, dependency.dependencies);
}
}
}
if(fs.existsSync("./package-lock.json")) {
var packageLock = JSON.parse(fs.readFileSync("./package-lock.json"));
if(![1, 2].includes(packageLock.lockfileVersion)) {
process.stderr.write("Sorry, I only understand lock file versions 1 and 2!\n");
process.exit(1);
}
if(packageLock.dependencies !== undefined) {
augmentDependencies(".", packageLock.dependencies);
}
}
'';
};
# Reconstructs a package-lock file from the node_modules/ folder structure and package.json files with dummy sha1 hashes
reconstructPackageLock = writeTextFile {
name = "addintegrityfields.js";
text = ''
var fs = require('fs');
var path = require('path');
var packageObj = JSON.parse(fs.readFileSync("package.json"));
var lockObj = {
name: packageObj.name,
version: packageObj.version,
lockfileVersion: 1,
requires: true,
dependencies: {}
};
function augmentPackageJSON(filePath, dependencies) {
var packageJSON = path.join(filePath, "package.json");
if(fs.existsSync(packageJSON)) {
var packageObj = JSON.parse(fs.readFileSync(packageJSON));
dependencies[packageObj.name] = {
version: packageObj.version,
integrity: "sha1-000000000000000000000000000=",
dependencies: {}
};
processDependencies(path.join(filePath, "node_modules"), dependencies[packageObj.name].dependencies);
}
}
function processDependencies(dir, dependencies) {
if(fs.existsSync(dir)) {
var files = fs.readdirSync(dir);
files.forEach(function(entry) {
var filePath = path.join(dir, entry);
var stats = fs.statSync(filePath);
if(stats.isDirectory()) {
if(entry.substr(0, 1) == "@") {
// When we encounter a namespace folder, augment all packages belonging to the scope
var pkgFiles = fs.readdirSync(filePath);
pkgFiles.forEach(function(entry) {
if(stats.isDirectory()) {
var pkgFilePath = path.join(filePath, entry);
augmentPackageJSON(pkgFilePath, dependencies);
}
});
} else {
augmentPackageJSON(filePath, dependencies);
}
}
});
}
}
processDependencies("node_modules", lockObj.dependencies);
fs.writeFileSync("package-lock.json", JSON.stringify(lockObj, null, 2));
'';
};
prepareAndInvokeNPM = {packageName, bypassCache, reconstructLock, npmFlags, production}:
let
forceOfflineFlag = if bypassCache then "--offline" else "--registry http://www.example.com";
in
''
# Pinpoint the versions of all dependencies to the ones that are actually being used
echo "pinpointing versions of dependencies..."
source $pinpointDependenciesScriptPath
# Patch the shebangs of the bundled modules to prevent them from
# calling executables outside the Nix store as much as possible
patchShebangs .
# Deploy the Node.js package by running npm install. Since the
# dependencies have been provided already by ourselves, it should not
# attempt to install them again, which is good, because we want to make
# it Nix's responsibility. If it needs to install any dependencies
# anyway (e.g. because the dependency parameters are
# incomplete/incorrect), it fails.
#
# The other responsibilities of NPM are kept -- version checks, build
# steps, postprocessing etc.
export HOME=$TMPDIR
cd "${packageName}"
runHook preRebuild
${lib.optionalString bypassCache ''
${lib.optionalString reconstructLock ''
if [ -f package-lock.json ]
then
echo "WARNING: Reconstruct lock option enabled, but a lock file already exists!"
echo "This will most likely result in version mismatches! We will remove the lock file and regenerate it!"
rm package-lock.json
else
echo "No package-lock.json file found, reconstructing..."
fi
node ${reconstructPackageLock}
''}
node ${addIntegrityFieldsScript}
''}
npm ${forceOfflineFlag} --nodedir=${nodeSources} ${npmFlags} ${lib.optionalString production "--production"} rebuild
if [ "''${dontNpmInstall-}" != "1" ]
then
# NPM tries to download packages even when they already exist if npm-shrinkwrap is used.
rm -f npm-shrinkwrap.json
npm ${forceOfflineFlag} --nodedir=${nodeSources} ${npmFlags} ${lib.optionalString production "--production"} install
fi
'';
# Builds and composes an NPM package including all its dependencies
buildNodePackage =
{ name
, packageName
, version
, dependencies ? []
, buildInputs ? []
, production ? true
, npmFlags ? ""
, dontNpmInstall ? false
, bypassCache ? false
, reconstructLock ? false
, preRebuild ? ""
, dontStrip ? true
, unpackPhase ? "true"
, buildPhase ? "true"
, meta ? {}
, ... }@args:
let
extraArgs = removeAttrs args [ "name" "dependencies" "buildInputs" "dontStrip" "dontNpmInstall" "preRebuild" "unpackPhase" "buildPhase" "meta" ];
in
stdenv.mkDerivation ({
name = "${name}-${version}";
buildInputs = [ tarWrapper python nodejs ]
++ lib.optional (stdenv.isLinux) utillinux
++ lib.optional (stdenv.isDarwin) libtool
++ buildInputs;
inherit nodejs;
inherit dontStrip; # Stripping may fail a build for some package deployments
inherit dontNpmInstall preRebuild unpackPhase buildPhase;
compositionScript = composePackage args;
pinpointDependenciesScript = pinpointDependenciesOfPackage args;
passAsFile = [ "compositionScript" "pinpointDependenciesScript" ];
installPhase = ''
source ${installPackage}
# Create and enter a root node_modules/ folder
mkdir -p $out/lib/node_modules
cd $out/lib/node_modules
# Compose the package and all its dependencies
source $compositionScriptPath
${prepareAndInvokeNPM { inherit packageName bypassCache reconstructLock npmFlags production; }}
# Create symlink to the deployed executable folder, if applicable
if [ -d "$out/lib/node_modules/.bin" ]
then
ln -s $out/lib/node_modules/.bin $out/bin
fi
# Create symlinks to the deployed manual page folders, if applicable
if [ -d "$out/lib/node_modules/${packageName}/man" ]
then
mkdir -p $out/share
for dir in "$out/lib/node_modules/${packageName}/man/"*
do
mkdir -p $out/share/man/$(basename "$dir")
for page in "$dir"/*
do
ln -s $page $out/share/man/$(basename "$dir")
done
done
fi
# Run post install hook, if provided
runHook postInstall
'';
meta = {
# default to Node.js' platforms
platforms = nodejs.meta.platforms;
} // meta;
} // extraArgs);
# Builds a node environment (a node_modules folder and a set of binaries)
buildNodeDependencies =
{ name
, packageName
, version
, src
, dependencies ? []
, buildInputs ? []
, production ? true
, npmFlags ? ""
, dontNpmInstall ? false
, bypassCache ? false
, reconstructLock ? false
, dontStrip ? true
, unpackPhase ? "true"
, buildPhase ? "true"
, ... }@args:
let
extraArgs = removeAttrs args [ "name" "dependencies" "buildInputs" ];
in
stdenv.mkDerivation ({
name = "node-dependencies-${name}-${version}";
buildInputs = [ tarWrapper python nodejs ]
++ lib.optional (stdenv.isLinux) utillinux
++ lib.optional (stdenv.isDarwin) libtool
++ buildInputs;
inherit dontStrip; # Stripping may fail a build for some package deployments
inherit dontNpmInstall unpackPhase buildPhase;
includeScript = includeDependencies { inherit dependencies; };
pinpointDependenciesScript = pinpointDependenciesOfPackage args;
passAsFile = [ "includeScript" "pinpointDependenciesScript" ];
installPhase = ''
source ${installPackage}
mkdir -p $out/${packageName}
cd $out/${packageName}
source $includeScriptPath
# Create fake package.json to make the npm commands work properly
cp ${src}/package.json .
chmod 644 package.json
${lib.optionalString bypassCache ''
if [ -f ${src}/package-lock.json ]
then
cp ${src}/package-lock.json .
fi
''}
# Go to the parent folder to make sure that all packages are pinpointed
cd ..
${lib.optionalString (builtins.substring 0 1 packageName == "@") "cd .."}
${prepareAndInvokeNPM { inherit packageName bypassCache reconstructLock npmFlags production; }}
# Expose the executables that were installed
cd ..
${lib.optionalString (builtins.substring 0 1 packageName == "@") "cd .."}
mv ${packageName} lib
ln -s $out/lib/node_modules/.bin $out/bin
'';
} // extraArgs);
# Builds a development shell
buildNodeShell =
{ name
, packageName
, version
, src
, dependencies ? []
, buildInputs ? []
, production ? true
, npmFlags ? ""
, dontNpmInstall ? false
, bypassCache ? false
, reconstructLock ? false
, dontStrip ? true
, unpackPhase ? "true"
, buildPhase ? "true"
, ... }@args:
let
nodeDependencies = buildNodeDependencies args;
in
stdenv.mkDerivation {
name = "node-shell-${name}-${version}";
buildInputs = [ python nodejs ] ++ lib.optional (stdenv.isLinux) utillinux ++ buildInputs;
buildCommand = ''
mkdir -p $out/bin
cat > $out/bin/shell <<EOF
#! ${stdenv.shell} -e
$shellHook
exec ${stdenv.shell}
EOF
chmod +x $out/bin/shell
'';
# Provide the dependencies in a development shell through the NODE_PATH environment variable
inherit nodeDependencies;
shellHook = lib.optionalString (dependencies != []) ''
export NODE_PATH=${nodeDependencies}/lib/node_modules
export PATH="${nodeDependencies}/bin:$PATH"
'';
};
in
{
buildNodeSourceDist = lib.makeOverridable buildNodeSourceDist;
buildNodePackage = lib.makeOverridable buildNodePackage;
buildNodeDependencies = lib.makeOverridable buildNodeDependencies;
buildNodeShell = lib.makeOverridable buildNodeShell;
}

View file

@ -0,0 +1,28 @@
#!/usr/bin/env nix-shell
#!nix-shell -i bash -p curl common-updater-scripts nodePackages.node2nix gnused nix coreutils jq
set -euo pipefail
latestVersion="$(curl -s "https://api.github.com/repos/toptal/haste-server/commits?per_page=1" | jq -r ".[0].sha")"
currentVersion=$(nix-instantiate --eval -E "with import ./. {}; haste-server.version or (lib.getVersion haste-server)" | tr -d '"')
if [[ "$currentVersion" == "$latestVersion" ]]; then
echo "haste-server is up-to-date: $currentVersion"
exit 0
fi
update-source-version haste-server 0 sha256-AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA=
update-source-version haste-server "$latestVersion"
# use patched source
store_src="$(nix-build . -A haste-server.src --no-out-link)"
cd "$(dirname "${BASH_SOURCE[0]}")"
node2nix \
--nodejs-14 \
--development \
--node-env ./node-env.nix \
--output ./node-deps.nix \
--input "$store_src/package.json" \
--composition ./node-composition.nix

View file

@ -1,73 +1,81 @@
{ stdenv, lib, fetchzip, makeWrapper, jre, writeText, nixosTests
, postgresql_jdbc ? null, mysql_jdbc ? null
{ stdenv
, lib
, fetchzip
, makeWrapper
, jre
, writeText
, nixosTests
, callPackage
, confFile ? null
, plugins ? [ ]
}:
let
mkModuleXml = name: jarFile: writeText "module.xml" ''
<?xml version="1.0" ?>
<module xmlns="urn:jboss:module:1.3" name="${name}">
<resources>
<resource-root path="${jarFile}"/>
</resources>
<dependencies>
<module name="javax.api"/>
<module name="javax.transaction.api"/>
</dependencies>
</module>
'';
in
stdenv.mkDerivation rec {
pname = "keycloak";
pname = "keycloak";
version = "17.0.1";
src = fetchzip {
url = "https://github.com/keycloak/keycloak/releases/download/${version}/keycloak-legacy-${version}.zip";
sha256 = "sha256-oqANNk7T6+CAS818v3I1QNsuxetL/JFZMqxouRn+kdE=";
url = "https://github.com/keycloak/keycloak/releases/download/${version}/keycloak-${version}.zip";
sha256 = "sha256-z1LfTUoK+v4oQxdyIQruFhl5O333zirSrkPoTFgVfmI=";
};
nativeBuildInputs = [ makeWrapper ];
nativeBuildInputs = [ makeWrapper jre ];
buildPhase = ''
runHook preBuild
'' + lib.optionalString (confFile != null) ''
install -m 0600 ${confFile} conf/keycloak.conf
'' + ''
install_plugin() {
if [ -d "$1" ]; then
find "$1" -type f \( -iname \*.ear -o -iname \*.jar \) -exec install -m 0500 "{}" "providers/" \;
else
install -m 0500 "$1" "providers/"
fi
}
${lib.concatMapStringsSep "\n" (pl: "install_plugin ${lib.escapeShellArg pl}") plugins}
'' + ''
export KC_HOME_DIR=$out
export KC_CONF_DIR=$out/conf
patchShebangs bin/kc.sh
bin/kc.sh build
runHook postBuild
'';
installPhase = ''
runHook preInstall
mkdir $out
cp -r * $out
rm -rf $out/bin/*.{ps1,bat}
rm $out/bin/*.{ps1,bat}
module_path=$out/modules/system/layers/keycloak
if ! [[ -d $module_path ]]; then
echo "The module path $module_path not found!"
exit 1
fi
runHook postInstall
'';
${lib.optionalString (postgresql_jdbc != null) ''
mkdir -p $module_path/org/postgresql/main
ln -s ${postgresql_jdbc}/share/java/postgresql-jdbc.jar $module_path/org/postgresql/main/
ln -s ${mkModuleXml "org.postgresql" "postgresql-jdbc.jar"} $module_path/org/postgresql/main/module.xml
''}
${lib.optionalString (mysql_jdbc != null) ''
mkdir -p $module_path/com/mysql/main
ln -s ${mysql_jdbc}/share/java/mysql-connector-java.jar $module_path/com/mysql/main/
ln -s ${mkModuleXml "com.mysql" "mysql-connector-java.jar"} $module_path/com/mysql/main/module.xml
''}
postFixup = ''
substituteInPlace $out/bin/kc.sh --replace '-Dkc.home.dir=$DIRNAME/../' '-Dkc.home.dir=$KC_HOME_DIR'
substituteInPlace $out/bin/kc.sh --replace '-Djboss.server.config.dir=$DIRNAME/../conf' '-Djboss.server.config.dir=$KC_CONF_DIR'
for script in add-user-keycloak.sh add-user.sh domain.sh elytron-tool.sh jboss-cli.sh jconsole.sh jdr.sh standalone.sh wsconsume.sh wsprovide.sh; do
wrapProgram $out/bin/$script --set JAVA_HOME ${jre}
for script in $(find $out/bin -type f -executable); do
wrapProgram "$script" --set JAVA_HOME ${jre} --prefix PATH : ${jre}/bin
done
wrapProgram $out/bin/kcadm.sh --prefix PATH : ${jre}/bin
wrapProgram $out/bin/kcreg.sh --prefix PATH : ${jre}/bin
'';
passthru = {
tests = nixosTests.keycloak;
plugins = callPackage ./all-plugins.nix {};
plugins = callPackage ./all-plugins.nix { };
enabledPlugins = plugins;
};
meta = with lib; {
homepage = "https://www.keycloak.org/";
homepage = "https://www.keycloak.org/";
description = "Identity and access management for modern applications and services";
license = licenses.asl20;
platforms = jre.meta.platforms;
license = licenses.asl20;
platforms = jre.meta.platforms;
maintainers = with maintainers; [ ngerstle talyz ];
};

View file

@ -2,16 +2,16 @@
buildGoModule rec {
pname = "bird-exporter";
version = "1.4.0";
version = "1.4.1";
src = fetchFromGitHub {
owner = "czerwonk";
repo = "bird_exporter";
rev = version;
sha256 = "sha256-N/00+2OrP0BsEazD9bHk+w/xO9E6sFT6nC0MM4n9lR4=";
sha256 = "sha256-QCnOMiAcvn0HcppGJlf3sdllApKcjHpucvk9xxD/MqE=";
};
vendorSha256 = "sha256-9xKMwHNgPMtC+J3mwwUNSJnpMGttpaWF6l8gv0YtvHE=";
vendorSha256 = "sha256-jBwaneVv1a8iIqnhDbQOnvaJdnXgO8P90Iv51IfGaM0=";
passthru.tests = { inherit (nixosTests.prometheus-exporters) bird; };

View file

@ -1,4 +1,4 @@
{ lib, stdenv, fetchurl }:
{ lib, stdenv, fetchurl, libiconv }:
stdenv.mkDerivation rec {
pname = "mtools";
@ -14,6 +14,8 @@ stdenv.mkDerivation rec {
# fails to find X on darwin
configureFlags = lib.optional stdenv.isDarwin "--without-x";
buildInputs = lib.optional stdenv.isDarwin libiconv;
doCheck = true;
meta = with lib; {

View file

@ -2,13 +2,13 @@
stdenv.mkDerivation rec {
pname = "gti";
version = "1.7.0";
version = "1.8.0";
src = fetchFromGitHub {
owner = "rwos";
repo = "gti";
rev = "v${version}";
sha256 = "1jivnjswlhwjfg5v9nwfg3vfssvqbdxxf9znwmfb5dgfblg9wxw9";
sha256 = "sha256-x6ncvnZPPrVcQYwtwkSenW+ri0L6FpuDa7U7uYUqiyk=";
};
postPatch = ''

View file

@ -6504,6 +6504,8 @@ with pkgs;
haste-client = callPackage ../tools/misc/haste-client { };
haste-server = callPackage ../servers/haste-server { };
hal-hardware-analyzer = libsForQt5.callPackage ../applications/science/electronics/hal-hardware-analyzer { };
half = callPackage ../development/libraries/half { };
@ -27103,7 +27105,7 @@ with pkgs;
# C++20 is required, darwin has Clang 7 by default, aarch64 has gcc 9 by default
stdenv = if stdenv.isDarwin
then clang12Stdenv
then llvmPackages_12.stdenv
else if stdenv.isAarch64 then gcc10Stdenv else stdenv;
# tdesktop has random crashes when jemalloc is built with gcc.

View file

@ -209,6 +209,8 @@ in {
aesara = callPackage ../development/python-modules/aesara { };
aesedb = callPackage ../development/python-modules/aesedb { };
afdko = callPackage ../development/python-modules/afdko { };
affine = callPackage ../development/python-modules/affine { };
@ -1982,6 +1984,8 @@ in {
cypari2 = callPackage ../development/python-modules/cypari2 { };
cypherpunkpay = callPackage ../development/python-modules/cypherpunkpay { };
cysignals = callPackage ../development/python-modules/cysignals { };
cython = callPackage ../development/python-modules/Cython { };
@ -5253,6 +5257,8 @@ in {
mohawk = callPackage ../development/python-modules/mohawk { };
monero = callPackage ../development/python-modules/monero { };
mongomock = callPackage ../development/python-modules/mongomock { };
mongodict = callPackage ../development/python-modules/mongodict { };
@ -7432,6 +7438,8 @@ in {
inherit (pkgs) jq;
};
pypng = callPackage ../development/python-modules/pypng { };
phonemizer = callPackage ../development/python-modules/phonemizer { };
pyopencl = callPackage ../development/python-modules/pyopencl {
@ -11000,6 +11008,8 @@ in {
phantomjsSupport = false;
};
yoyo-migrations = callPackage ../development/python-modules/yoyo-migrations { };
yt-dlp = callPackage ../tools/misc/yt-dlp { };
yt-dlp-light = callPackage ../tools/misc/yt-dlp {