From bf41254a8add0d7255505afa3ff8068e0baf4127 Mon Sep 17 00:00:00 2001 From: Linus Heckemann Date: Mon, 19 Dec 2022 12:04:13 +0100 Subject: [PATCH 001/154] nixos/qemu-vm: allow use without a disk image --- nixos/modules/virtualisation/qemu-vm.nix | 17 ++++++++++++----- 1 file changed, 12 insertions(+), 5 deletions(-) diff --git a/nixos/modules/virtualisation/qemu-vm.nix b/nixos/modules/virtualisation/qemu-vm.nix index 1b3c0e23f97d..30f3035941a7 100644 --- a/nixos/modules/virtualisation/qemu-vm.nix +++ b/nixos/modules/virtualisation/qemu-vm.nix @@ -108,9 +108,9 @@ let set -e - NIX_DISK_IMAGE=$(readlink -f "''${NIX_DISK_IMAGE:-${config.virtualisation.diskImage}}") + NIX_DISK_IMAGE=$(readlink -f "''${NIX_DISK_IMAGE:-${toString config.virtualisation.diskImage}}") || test -z "$NIX_DISK_IMAGE" - if ! test -e "$NIX_DISK_IMAGE"; then + if test -n "$NIX_DISK_IMAGE" && ! test -e "$NIX_DISK_IMAGE"; then ${qemu}/bin/qemu-img create -f qcow2 "$NIX_DISK_IMAGE" \ ${toString config.virtualisation.diskSize}M fi @@ -346,7 +346,7 @@ in virtualisation.diskImage = mkOption { - type = types.str; + type = types.nullOr types.str; default = "./${config.system.name}.qcow2"; defaultText = literalExpression ''"./''${config.system.name}.qcow2"''; description = @@ -354,6 +354,9 @@ in Path to the disk image containing the root filesystem. The image will be created on startup if it does not exist. + + If null, a tmpfs will be used as the root filesystem and + the VM's state will not be persistent. ''; }; @@ -975,12 +978,12 @@ in ]; virtualisation.qemu.drives = mkMerge [ - [{ + (mkIf (cfg.diskImage != null) [{ name = "root"; file = ''"$NIX_DISK_IMAGE"''; driveExtraOpts.cache = "writeback"; driveExtraOpts.werror = "report"; - }] + }]) (mkIf cfg.useNixStoreImage [{ name = "nix-store"; file = ''"$TMPDIR"/store.img''; @@ -1031,6 +1034,10 @@ in "/".fsType = "ext4"; "/".autoFormat = true; } // + optionalAttrs (cfg.diskImage == null) { + "/".device = "tmpfs"; + "/".fsType = "tmpfs"; + } // optionalAttrs config.boot.tmpOnTmpfs { "/tmp" = { device = "tmpfs"; From 246d09fea28c85de7a2f6223fad6927429c3697b Mon Sep 17 00:00:00 2001 From: Linus Heckemann Date: Tue, 20 Dec 2022 20:35:40 +0100 Subject: [PATCH 002/154] qemu-vm: use nixos module patterns for filesystems --- nixos/modules/virtualisation/qemu-vm.nix | 49 +++++++++++------------- 1 file changed, 22 insertions(+), 27 deletions(-) diff --git a/nixos/modules/virtualisation/qemu-vm.nix b/nixos/modules/virtualisation/qemu-vm.nix index 30f3035941a7..f594e7ee9c5d 100644 --- a/nixos/modules/virtualisation/qemu-vm.nix +++ b/nixos/modules/virtualisation/qemu-vm.nix @@ -1006,20 +1006,21 @@ in }) cfg.emptyDiskImages) ]; + fileSystems = mkVMOverride cfg.fileSystems; + # Mount the host filesystem via 9P, and bind-mount the Nix store # of the host into our own filesystem. We use mkVMOverride to # allow this module to be applied to "normal" NixOS system # configuration, where the regular value for the `fileSystems' # attribute should be disregarded for the purpose of building a VM # test image (since those filesystems don't exist in the VM). - fileSystems = - let + virtualisation.fileSystems = let mkSharedDir = tag: share: { name = if tag == "nix-store" && cfg.writableStore - then "/nix/.ro-store" - else share.target; + then "/nix/.ro-store" + else share.target; value.device = tag; value.fsType = "9p"; value.neededForBoot = true; @@ -1027,48 +1028,42 @@ in [ "trans=virtio" "version=9p2000.L" "msize=${toString cfg.msize}" ] ++ lib.optional (tag == "nix-store") "cache=loose"; }; - in - mkVMOverride (cfg.fileSystems // - optionalAttrs cfg.useDefaultFilesystems { - "/".device = cfg.bootDevice; - "/".fsType = "ext4"; - "/".autoFormat = true; - } // - optionalAttrs (cfg.diskImage == null) { - "/".device = "tmpfs"; - "/".fsType = "tmpfs"; - } // - optionalAttrs config.boot.tmpOnTmpfs { - "/tmp" = { + in lib.mkMerge [ + (lib.mapAttrs' mkSharedDir cfg.sharedDirectories) + { + "/" = lib.mkIf cfg.useDefaultFilesystems (if cfg.diskImage == null then { + device = "tmpfs"; + fsType = "tmpfs"; + } else { + device = cfg.bootDevice; + fsType = "ext4"; + autoFormat = true; + }); + "/tmp" = lib.mkIf config.boot.tmpOnTmpfs { device = "tmpfs"; fsType = "tmpfs"; neededForBoot = true; # Sync with systemd's tmp.mount; options = [ "mode=1777" "strictatime" "nosuid" "nodev" "size=${toString config.boot.tmpOnTmpfsSize}" ]; }; - } // - optionalAttrs cfg.useNixStoreImage { - "/nix/${if cfg.writableStore then ".ro-store" else "store"}" = { + "/nix/${if cfg.writableStore then ".ro-store" else "store"}" = lib.mkIf cfg.useNixStoreImage { device = "${lookupDriveDeviceName "nix-store" cfg.qemu.drives}"; neededForBoot = true; options = [ "ro" ]; }; - } // - optionalAttrs (cfg.writableStore && cfg.writableStoreUseTmpfs) { - "/nix/.rw-store" = { + "/nix/.rw-store" = lib.mkIf (cfg.writableStore && cfg.writableStoreUseTmpfs) { fsType = "tmpfs"; options = [ "mode=0755" ]; neededForBoot = true; }; - } // - optionalAttrs cfg.useBootLoader { # see note [Disk layout with `useBootLoader`] - "/boot" = { + "/boot" = lib.mkIf cfg.useBootLoader { device = "${lookupDriveDeviceName "boot" cfg.qemu.drives}2"; # 2 for e.g. `vdb2`, as created in `bootDisk` fsType = "vfat"; noCheck = true; # fsck fails on a r/o filesystem }; - } // lib.mapAttrs' mkSharedDir cfg.sharedDirectories); + } + ]; boot.initrd.systemd = lib.mkIf (config.boot.initrd.systemd.enable && cfg.writableStore) { mounts = [{ From a440e955e049da9458837c43c8bd08202ef16229 Mon Sep 17 00:00:00 2001 From: Weijia Wang <9713184+wegank@users.noreply.github.com> Date: Sun, 1 Jan 2023 16:05:48 +0100 Subject: [PATCH 003/154] python3: allow enabling framework on darwin --- .../interpreters/python/cpython/default.nix | 22 +++++++++++++++---- 1 file changed, 18 insertions(+), 4 deletions(-) diff --git a/pkgs/development/interpreters/python/cpython/default.nix b/pkgs/development/interpreters/python/cpython/default.nix index 8d55ea6c39cb..0e62723ca51c 100644 --- a/pkgs/development/interpreters/python/cpython/default.nix +++ b/pkgs/development/interpreters/python/cpython/default.nix @@ -17,6 +17,7 @@ , libxcrypt , self , configd +, darwin , autoreconfHook , autoconf-archive , pkg-config @@ -41,6 +42,7 @@ , stripBytecode ? true , includeSiteCustomize ? true , static ? stdenv.hostPlatform.isStatic +, enableFramework ? false , enableOptimizations ? false # enableNoSemanticInterposition is a subset of the enableOptimizations flag that doesn't harm reproducibility. # clang starts supporting `-fno-sematic-interposition` with version 10 @@ -65,6 +67,8 @@ assert x11Support -> tcl != null assert bluezSupport -> bluez != null; +assert enableFramework -> stdenv.isDarwin; + assert lib.assertMsg (reproducibleBuild -> stripBytecode) "Deterministic builds require stripping bytecode."; @@ -84,6 +88,8 @@ let buildPackages = pkgsBuildHost; inherit (passthru) pythonForBuild; + inherit (darwin.apple_sdk.frameworks) Cocoa; + tzdataSupport = tzdata != null && passthru.pythonAtLeast "3.9"; passthru = let @@ -125,6 +131,8 @@ let ++ optionals x11Support [ tcl tk libX11 xorgproto ] ++ optionals (bluezSupport && stdenv.isLinux) [ bluez ] ++ optionals stdenv.isDarwin [ configd ]) + + ++ optionals enableFramework [ Cocoa ] ++ optionals tzdataSupport [ tzdata ]; # `zoneinfo` module hasDistutilsCxxPatch = !(stdenv.cc.isGNU or false); @@ -307,8 +315,10 @@ in with passthru; stdenv.mkDerivation { "--without-ensurepip" "--with-system-expat" "--with-system-ffi" - ] ++ optionals (!static) [ + ] ++ optionals (!static && !enableFramework) [ "--enable-shared" + ] ++ optionals enableFramework [ + "--enable-framework=${placeholder "out"}/Library/Frameworks" ] ++ optionals enableOptimizations [ "--enable-optimizations" ] ++ optionals enableLTO [ @@ -387,7 +397,11 @@ in with passthru; stdenv.mkDerivation { ] ++ optionals tzdataSupport [ tzdata ]); - in '' + in lib.optionalString enableFramework '' + for dir in include lib share; do + ln -s $out/Library/Frameworks/Python.framework/Versions/Current/$dir $out/$dir + done + '' + '' # needed for some packages, especially packages that backport functionality # to 2.x from 3.x for item in $out/lib/${libPrefix}/test/*; do @@ -484,7 +498,7 @@ in with passthru; stdenv.mkDerivation { # Enforce that we don't have references to the OpenSSL -dev package, which we # explicitly specify in our configure flags above. disallowedReferences = - lib.optionals (openssl' != null && !static) [ openssl'.dev ] + lib.optionals (openssl' != null && !static && !enableFramework) [ openssl'.dev ] ++ lib.optionals (stdenv.hostPlatform != stdenv.buildPlatform) [ # Ensure we don't have references to build-time packages. # These typically end up in shebangs. @@ -518,7 +532,7 @@ in with passthru; stdenv.mkDerivation { high level dynamic data types. ''; license = licenses.psfl; - platforms = with platforms; linux ++ darwin; + platforms = platforms.linux ++ platforms.darwin; maintainers = with maintainers; [ fridh ]; }; } From 919f2b2b6292436570613e7f7166eb992b12255f Mon Sep 17 00:00:00 2001 From: Tom Hubrecht Date: Sun, 5 Feb 2023 14:48:46 +0100 Subject: [PATCH 004/154] nixos/borgmatic: Allow defining multiple configurations --- .../manual/release-notes/rl-2305.section.md | 2 + nixos/modules/services/backup/borgmatic.nix | 86 +++++++++++-------- 2 files changed, 54 insertions(+), 34 deletions(-) diff --git a/nixos/doc/manual/release-notes/rl-2305.section.md b/nixos/doc/manual/release-notes/rl-2305.section.md index c81cbc69f94e..5437a4ab6521 100644 --- a/nixos/doc/manual/release-notes/rl-2305.section.md +++ b/nixos/doc/manual/release-notes/rl-2305.section.md @@ -148,6 +148,8 @@ In addition to numerous new and upgraded packages, this release has the followin - NixOS now defaults to using nsncd (a non-caching reimplementation in Rust) as NSS lookup dispatcher, instead of the buggy and deprecated glibc-provided nscd. If you need to switch back, set `services.nscd.enableNsncd = false`, but please open an issue in nixpkgs so your issue can be fixed. +- `services.borgmatic` now allows for multiple configurations, placed in `/etc/borgmatic.d/`, you can define them with `services.borgmatic.configurations`. + - The `dnsmasq` service now takes configuration via the `services.dnsmasq.settings` attribute set. The option `services.dnsmasq.extraConfig` will be deprecated when NixOS 22.11 reaches diff --git a/nixos/modules/services/backup/borgmatic.nix b/nixos/modules/services/backup/borgmatic.nix index 73c4acda3936..e7cd6ae4bb57 100644 --- a/nixos/modules/services/backup/borgmatic.nix +++ b/nixos/modules/services/backup/borgmatic.nix @@ -5,44 +5,58 @@ with lib; let cfg = config.services.borgmatic; settingsFormat = pkgs.formats.yaml { }; + + cfgType = with types; submodule { + freeformType = settingsFormat.type; + options.location = { + source_directories = mkOption { + type = listOf str; + description = mdDoc '' + List of source directories to backup (required). Globs and + tildes are expanded. + ''; + example = [ "/home" "/etc" "/var/log/syslog*" ]; + }; + repositories = mkOption { + type = listOf str; + description = mdDoc '' + Paths to local or remote repositories (required). Tildes are + expanded. Multiple repositories are backed up to in + sequence. Borg placeholders can be used. See the output of + "borg help placeholders" for details. See ssh_command for + SSH options like identity file or port. If systemd service + is used, then add local repository paths in the systemd + service file to the ReadWritePaths list. + ''; + example = [ + "ssh://user@backupserver/./sourcehostname.borg" + "ssh://user@backupserver/./{fqdn}" + "/var/local/backups/local.borg" + ]; + }; + }; + }; + cfgfile = settingsFormat.generate "config.yaml" cfg.settings; -in { +in +{ options.services.borgmatic = { - enable = mkEnableOption (lib.mdDoc "borgmatic"); + enable = mkEnableOption (mdDoc "borgmatic"); settings = mkOption { - description = lib.mdDoc '' + description = mdDoc '' See https://torsion.org/borgmatic/docs/reference/configuration/ ''; - type = types.submodule { - freeformType = settingsFormat.type; - options.location = { - source_directories = mkOption { - type = types.listOf types.str; - description = lib.mdDoc '' - List of source directories to backup (required). Globs and - tildes are expanded. - ''; - example = [ "/home" "/etc" "/var/log/syslog*" ]; - }; - repositories = mkOption { - type = types.listOf types.str; - description = lib.mdDoc '' - Paths to local or remote repositories (required). Tildes are - expanded. Multiple repositories are backed up to in - sequence. Borg placeholders can be used. See the output of - "borg help placeholders" for details. See ssh_command for - SSH options like identity file or port. If systemd service - is used, then add local repository paths in the systemd - service file to the ReadWritePaths list. - ''; - example = [ - "user@backupserver:sourcehostname.borg" - "user@backupserver:{fqdn}" - ]; - }; - }; - }; + default = null; + type = types.nullOr cfgType; + }; + + configurations = mkOption { + description = mdDoc '' + Set of borgmatic configurations, see https://torsion.org/borgmatic/docs/reference/configuration/ + ''; + default = { }; + type = types.attrsOf cfgType; }; }; @@ -50,9 +64,13 @@ in { environment.systemPackages = [ pkgs.borgmatic ]; - environment.etc."borgmatic/config.yaml".source = cfgfile; + environment.etc = (optionalAttrs (cfg.settings != null) { "borgmatic/config.yaml".source = cfgfile; }) // + mapAttrs' + (name: value: nameValuePair + "borgmatic.d/${name}.yaml" + { source = settingsFormat.generate "${name}.yaml" value; }) + cfg.configurations; systemd.packages = [ pkgs.borgmatic ]; - }; } From 3794c04d798c39755f36718b650172c1ae7ff6f9 Mon Sep 17 00:00:00 2001 From: pennae Date: Fri, 17 Feb 2023 13:48:15 +0100 Subject: [PATCH 005/154] nixos/manual: fix manpage links {manpage} already exapnds to a link but akkoma wants to link to a specific setting. split the mention for clarity. networkd just straight up duplicated what {manpage} generates anyway, so that link can go away completely. --- nixos/modules/services/web-apps/akkoma.md | 4 ++-- nixos/modules/system/boot/networkd.nix | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/nixos/modules/services/web-apps/akkoma.md b/nixos/modules/services/web-apps/akkoma.md index 5419940a68d6..83dd1a8b35f2 100644 --- a/nixos/modules/services/web-apps/akkoma.md +++ b/nixos/modules/services/web-apps/akkoma.md @@ -318,8 +318,8 @@ to make packages available in the chroot. {option}`services.systemd.akkoma.serviceConfig.BindPaths` and {option}`services.systemd.akkoma.serviceConfig.BindReadOnlyPaths` permit access to outside paths through bind mounts. Refer to -[{manpage}`systemd.exec(5)`](https://www.freedesktop.org/software/systemd/man/systemd.exec.html#BindPaths=) -for details. +[`BindPaths=`](https://www.freedesktop.org/software/systemd/man/systemd.exec.html#BindPaths=) +of {manpage}`systemd.exec(5)` for details. ### Distributed deployment {#modules-services-akkoma-distributed-deployment} diff --git a/nixos/modules/system/boot/networkd.nix b/nixos/modules/system/boot/networkd.nix index 188f2f64dc84..d1ce3d13ee85 100644 --- a/nixos/modules/system/boot/networkd.nix +++ b/nixos/modules/system/boot/networkd.nix @@ -1948,7 +1948,7 @@ in Extra command-line arguments to pass to systemd-networkd-wait-online. These also affect per-interface `systemd-network-wait-online@` services. - See [{manpage}`systemd-networkd-wait-online.service(8)`](https://www.freedesktop.org/software/systemd/man/systemd-networkd-wait-online.service.html) for all available options. + See {manpage}`systemd-networkd-wait-online.service(8)` for all available options. ''; type = with types; listOf str; default = []; From 0236dcb59fb7b58f0ba5fee3ff15cc88ba903d61 Mon Sep 17 00:00:00 2001 From: pennae Date: Fri, 17 Feb 2023 17:49:08 +0100 Subject: [PATCH 006/154] nixos-render-docs: don't use markdown-it RendererProtocol our renderers carry significantly more state than markdown-it wants to easily cater for, and the html renderer will need even more state still. relying on the markdown-it-provided rendering functions has already proven to be a nuisance, and since parsing and rendering are split well enough we can just replace the rendering part with our own stuff outright. this also frees us from the tyranny of having to set instance variables before calling super().__init__ just to make sure that the renderer creation callback has access to everything it needs. --- .../src/nixos_render_docs/asciidoc.py | 5 +- .../src/nixos_render_docs/commonmark.py | 5 +- .../src/nixos_render_docs/docbook.py | 5 +- .../src/nixos_render_docs/manpage.py | 7 +-- .../src/nixos_render_docs/manual.py | 18 +++--- .../src/nixos_render_docs/md.py | 32 +++++++--- .../src/nixos_render_docs/options.py | 60 +++++++++---------- .../src/tests/test_asciidoc.py | 8 ++- .../src/tests/test_commonmark.py | 8 ++- .../src/tests/test_headings.py | 8 ++- .../nixos-render-docs/src/tests/test_lists.py | 8 ++- .../src/tests/test_manpage.py | 23 +++---- .../src/tests/test_plugins.py | 8 ++- 13 files changed, 101 insertions(+), 94 deletions(-) diff --git a/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/asciidoc.py b/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/asciidoc.py index 637185227e83..2730dc5e024f 100644 --- a/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/asciidoc.py +++ b/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/asciidoc.py @@ -5,7 +5,6 @@ from urllib.parse import quote from .md import Renderer -import markdown_it from markdown_it.token import Token from markdown_it.utils import OptionsDict @@ -59,8 +58,8 @@ class AsciiDocRenderer(Renderer): _list_stack: list[List] _attrspans: list[str] - def __init__(self, manpage_urls: Mapping[str, str], parser: Optional[markdown_it.MarkdownIt] = None): - super().__init__(manpage_urls, parser) + def __init__(self, manpage_urls: Mapping[str, str]): + super().__init__(manpage_urls) self._parstack = [ Par("\n\n", "====") ] self._list_stack = [] self._attrspans = [] diff --git a/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/commonmark.py b/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/commonmark.py index 4a708b1f92c6..8fe32289b85d 100644 --- a/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/commonmark.py +++ b/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/commonmark.py @@ -4,7 +4,6 @@ from typing import Any, cast, Optional from .md import md_escape, md_make_code, Renderer -import markdown_it from markdown_it.token import Token from markdown_it.utils import OptionsDict @@ -26,8 +25,8 @@ class CommonMarkRenderer(Renderer): _link_stack: list[str] _list_stack: list[List] - def __init__(self, manpage_urls: Mapping[str, str], parser: Optional[markdown_it.MarkdownIt] = None): - super().__init__(manpage_urls, parser) + def __init__(self, manpage_urls: Mapping[str, str]): + super().__init__(manpage_urls) self._parstack = [ Par("") ] self._link_stack = [] self._list_stack = [] diff --git a/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/docbook.py b/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/docbook.py index e6a761dcf13f..b279ace06ff1 100644 --- a/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/docbook.py +++ b/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/docbook.py @@ -32,14 +32,13 @@ class Heading(NamedTuple): partintro_closed: bool = False class DocBookRenderer(Renderer): - __output__ = "docbook" _link_tags: list[str] _deflists: list[Deflist] _headings: list[Heading] _attrspans: list[str] - def __init__(self, manpage_urls: Mapping[str, str], parser: Optional[markdown_it.MarkdownIt] = None): - super().__init__(manpage_urls, parser) + def __init__(self, manpage_urls: Mapping[str, str]): + super().__init__(manpage_urls) self._link_tags = [] self._deflists = [] self._headings = [] diff --git a/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/manpage.py b/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/manpage.py index 1b796d9f0486..a61537b49c54 100644 --- a/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/manpage.py +++ b/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/manpage.py @@ -75,8 +75,6 @@ class List: # horizontal motion in a line) we do attempt to copy the style of mdoc(7) semantic requests # as appropriate for each markup element. class ManpageRenderer(Renderer): - __output__ = "man" - # whether to emit mdoc .Ql equivalents for inline code or just the contents. this is # mainly used by the options manpage converter to not emit extra quotes in defaults # and examples where it's already clear from context that the following text is code. @@ -90,9 +88,8 @@ class ManpageRenderer(Renderer): _list_stack: list[List] _font_stack: list[str] - def __init__(self, manpage_urls: Mapping[str, str], href_targets: dict[str, str], - parser: Optional[markdown_it.MarkdownIt] = None): - super().__init__(manpage_urls, parser) + def __init__(self, manpage_urls: Mapping[str, str], href_targets: dict[str, str]): + super().__init__(manpage_urls) self._href_targets = href_targets self._link_stack = [] self._do_parbreak_stack = [] diff --git a/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/manual.py b/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/manual.py index efc8b02e8d6b..dfcdb96cc21f 100644 --- a/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/manual.py +++ b/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/manual.py @@ -18,9 +18,8 @@ from .md import Converter class ManualDocBookRenderer(DocBookRenderer): _toplevel_tag: str - def __init__(self, toplevel_tag: str, manpage_urls: Mapping[str, str], - parser: Optional[markdown_it.MarkdownIt] = None): - super().__init__(manpage_urls, parser) + def __init__(self, toplevel_tag: str, manpage_urls: Mapping[str, str]): + super().__init__(manpage_urls) self._toplevel_tag = toplevel_tag self.rules |= { 'included_sections': lambda *args: self._included_thing("section", *args), @@ -92,7 +91,7 @@ class ManualDocBookRenderer(DocBookRenderer): self._headings[-1] = self._headings[-1]._replace(partintro_closed=True) # must nest properly for structural includes. this requires saving at least # the headings stack, but creating new renderers is cheap and much easier. - r = ManualDocBookRenderer(tag, self._manpage_urls, None) + r = ManualDocBookRenderer(tag, self._manpage_urls) for (included, path) in token.meta['included']: try: result.append(r.render(included, options, env)) @@ -118,16 +117,13 @@ class ManualDocBookRenderer(DocBookRenderer): info = f" language={quoteattr(token.info)}" if token.info != "" else "" return f"\n{escape(token.content)}" -class DocBookConverter(Converter): - def __renderer__(self, manpage_urls: Mapping[str, str], - parser: Optional[markdown_it.MarkdownIt]) -> ManualDocBookRenderer: - return ManualDocBookRenderer('book', manpage_urls, parser) - +class DocBookConverter(Converter[ManualDocBookRenderer]): _base_paths: list[Path] _revision: str def __init__(self, manpage_urls: Mapping[str, str], revision: str): - super().__init__(manpage_urls) + super().__init__() + self._renderer = ManualDocBookRenderer('book', manpage_urls) self._revision = revision def convert(self, file: Path) -> str: @@ -195,7 +191,7 @@ class DocBookConverter(Converter): try: conv = options.DocBookConverter( - self._manpage_urls, self._revision, False, 'fragment', varlist_id, id_prefix) + self._renderer._manpage_urls, self._revision, False, 'fragment', varlist_id, id_prefix) with open(self._base_paths[-1].parent / source, 'r') as f: conv.add_options(json.load(f)) token.meta['rendered-options'] = conv.finalize(fragment=True) diff --git a/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/md.py b/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/md.py index 96cc8af69bce..d73a1715f4f9 100644 --- a/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/md.py +++ b/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/md.py @@ -1,6 +1,6 @@ from abc import ABC from collections.abc import Mapping, MutableMapping, Sequence -from typing import Any, Callable, cast, get_args, Iterable, Literal, NoReturn, Optional +from typing import Any, Callable, cast, Generic, get_args, Iterable, Literal, NoReturn, Optional, TypeVar import dataclasses import re @@ -44,11 +44,11 @@ AttrBlockKind = Literal['admonition', 'example'] AdmonitionKind = Literal["note", "caution", "tip", "important", "warning"] -class Renderer(markdown_it.renderer.RendererProtocol): +class Renderer: _admonitions: dict[AdmonitionKind, tuple[RenderFn, RenderFn]] _admonition_stack: list[AdmonitionKind] - def __init__(self, manpage_urls: Mapping[str, str], parser: Optional[markdown_it.MarkdownIt] = None): + def __init__(self, manpage_urls: Mapping[str, str]): self._manpage_urls = manpage_urls self.rules = { 'text': self.text, @@ -466,12 +466,26 @@ def _block_attr(md: markdown_it.MarkdownIt) -> None: md.core.ruler.push("block_attr", block_attr) -class Converter(ABC): - __renderer__: Callable[[Mapping[str, str], markdown_it.MarkdownIt], Renderer] +TR = TypeVar('TR', bound='Renderer') - def __init__(self, manpage_urls: Mapping[str, str]): - self._manpage_urls = manpage_urls +class Converter(ABC, Generic[TR]): + # we explicitly disable markdown-it rendering support and use our own entirely. + # rendering is well separated from parsing and our renderers carry much more state than + # markdown-it easily acknowledges as 'good' (unless we used the untyped env args to + # shuttle that state around, which is very fragile) + class ForbiddenRenderer(markdown_it.renderer.RendererProtocol): + __output__ = "none" + def __init__(self, parser: Optional[markdown_it.MarkdownIt]): + pass + + def render(self, tokens: Sequence[Token], options: OptionsDict, + env: MutableMapping[str, Any]) -> str: + raise NotImplementedError("do not use Converter._md.renderer. 'tis a silly place") + + _renderer: TR + + def __init__(self) -> None: self._md = markdown_it.MarkdownIt( "commonmark", { @@ -479,7 +493,7 @@ class Converter(ABC): 'html': False, # not useful since we target many formats 'typographer': True, # required for smartquotes }, - renderer_cls=lambda parser: self.__renderer__(self._manpage_urls, parser) + renderer_cls=self.ForbiddenRenderer ) self._md.use( container_plugin, @@ -502,4 +516,4 @@ class Converter(ABC): def _render(self, src: str, env: Optional[MutableMapping[str, Any]] = None) -> str: env = {} if env is None else env tokens = self._parse(src, env) - return self._md.renderer.render(tokens, self._md.options, env) # type: ignore[no-any-return] + return self._renderer.render(tokens, self._md.options, env) diff --git a/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/options.py b/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/options.py index f29d8fdb8968..8f64bd3ed538 100644 --- a/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/options.py +++ b/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/options.py @@ -7,12 +7,13 @@ from abc import abstractmethod from collections.abc import Mapping, MutableMapping, Sequence from markdown_it.utils import OptionsDict from markdown_it.token import Token -from typing import Any, Optional +from typing import Any, Generic, Optional from urllib.parse import quote from xml.sax.saxutils import escape, quoteattr import markdown_it +from . import md from . import parallel from .asciidoc import AsciiDocRenderer, asciidoc_escape from .commonmark import CommonMarkRenderer @@ -30,15 +31,13 @@ def option_is(option: Option, key: str, typ: str) -> Optional[dict[str, str]]: return None return option[key] # type: ignore[return-value] -class BaseConverter(Converter): +class BaseConverter(Converter[md.TR], Generic[md.TR]): __option_block_separator__: str _options: dict[str, RenderedOption] - def __init__(self, manpage_urls: Mapping[str, str], - revision: str, - markdown_by_default: bool): - super().__init__(manpage_urls) + def __init__(self, revision: str, markdown_by_default: bool): + super().__init__() self._options = {} self._revision = revision self._markdown_by_default = markdown_by_default @@ -153,7 +152,7 @@ class BaseConverter(Converter): # since it's good enough so far. @classmethod @abstractmethod - def _parallel_render_init_worker(cls, a: Any) -> BaseConverter: raise NotImplementedError() + def _parallel_render_init_worker(cls, a: Any) -> BaseConverter[md.TR]: raise NotImplementedError() def _render_option(self, name: str, option: dict[str, Any]) -> RenderedOption: try: @@ -162,7 +161,7 @@ class BaseConverter(Converter): raise Exception(f"Failed to render option {name}") from e @classmethod - def _parallel_render_step(cls, s: BaseConverter, a: Any) -> RenderedOption: + def _parallel_render_step(cls, s: BaseConverter[md.TR], a: Any) -> RenderedOption: return s._render_option(*a) def add_options(self, options: dict[str, Any]) -> None: @@ -199,8 +198,7 @@ class OptionsDocBookRenderer(OptionDocsRestrictions, DocBookRenderer): token.meta['compact'] = False return super().bullet_list_open(token, tokens, i, options, env) -class DocBookConverter(BaseConverter): - __renderer__ = OptionsDocBookRenderer +class DocBookConverter(BaseConverter[OptionsDocBookRenderer]): __option_block_separator__ = "" def __init__(self, manpage_urls: Mapping[str, str], @@ -209,13 +207,14 @@ class DocBookConverter(BaseConverter): document_type: str, varlist_id: str, id_prefix: str): - super().__init__(manpage_urls, revision, markdown_by_default) + super().__init__(revision, markdown_by_default) + self._renderer = OptionsDocBookRenderer(manpage_urls) self._document_type = document_type self._varlist_id = varlist_id self._id_prefix = id_prefix def _parallel_render_prepare(self) -> Any: - return (self._manpage_urls, self._revision, self._markdown_by_default, self._document_type, + return (self._renderer._manpage_urls, self._revision, self._markdown_by_default, self._document_type, self._varlist_id, self._id_prefix) @classmethod def _parallel_render_init_worker(cls, a: Any) -> DocBookConverter: @@ -300,11 +299,7 @@ class DocBookConverter(BaseConverter): class OptionsManpageRenderer(OptionDocsRestrictions, ManpageRenderer): pass -class ManpageConverter(BaseConverter): - def __renderer__(self, manpage_urls: Mapping[str, str], - parser: Optional[markdown_it.MarkdownIt] = None) -> OptionsManpageRenderer: - return OptionsManpageRenderer(manpage_urls, self._options_by_id, parser) - +class ManpageConverter(BaseConverter[OptionsManpageRenderer]): __option_block_separator__ = ".sp" _options_by_id: dict[str, str] @@ -314,8 +309,9 @@ class ManpageConverter(BaseConverter): *, # only for parallel rendering _options_by_id: Optional[dict[str, str]] = None): + super().__init__(revision, markdown_by_default) self._options_by_id = _options_by_id or {} - super().__init__({}, revision, markdown_by_default) + self._renderer = OptionsManpageRenderer({}, self._options_by_id) def _parallel_render_prepare(self) -> Any: return ((self._revision, self._markdown_by_default), { '_options_by_id': self._options_by_id }) @@ -324,10 +320,9 @@ class ManpageConverter(BaseConverter): return cls(*a[0], **a[1]) def _render_option(self, name: str, option: dict[str, Any]) -> RenderedOption: - assert isinstance(self._md.renderer, OptionsManpageRenderer) - links = self._md.renderer.link_footnotes = [] + links = self._renderer.link_footnotes = [] result = super()._render_option(name, option) - self._md.renderer.link_footnotes = None + self._renderer.link_footnotes = None return result._replace(links=links) def add_options(self, options: dict[str, Any]) -> None: @@ -339,12 +334,11 @@ class ManpageConverter(BaseConverter): if lit := option_is(option, key, 'literalDocBook'): raise RuntimeError("can't render manpages in the presence of docbook") else: - assert isinstance(self._md.renderer, OptionsManpageRenderer) try: - self._md.renderer.inline_code_is_quoted = False + self._renderer.inline_code_is_quoted = False return super()._render_code(option, key) finally: - self._md.renderer.inline_code_is_quoted = True + self._renderer.inline_code_is_quoted = True def _render_description(self, desc: str | dict[str, Any]) -> list[str]: if isinstance(desc, str) and not self._markdown_by_default: @@ -428,12 +422,15 @@ class ManpageConverter(BaseConverter): class OptionsCommonMarkRenderer(OptionDocsRestrictions, CommonMarkRenderer): pass -class CommonMarkConverter(BaseConverter): - __renderer__ = OptionsCommonMarkRenderer +class CommonMarkConverter(BaseConverter[OptionsCommonMarkRenderer]): __option_block_separator__ = "" + def __init__(self, manpage_urls: Mapping[str, str], revision: str, markdown_by_default: bool): + super().__init__(revision, markdown_by_default) + self._renderer = OptionsCommonMarkRenderer(manpage_urls) + def _parallel_render_prepare(self) -> Any: - return (self._manpage_urls, self._revision, self._markdown_by_default) + return (self._renderer._manpage_urls, self._revision, self._markdown_by_default) @classmethod def _parallel_render_init_worker(cls, a: Any) -> CommonMarkConverter: return cls(*a) @@ -481,12 +478,15 @@ class CommonMarkConverter(BaseConverter): class OptionsAsciiDocRenderer(OptionDocsRestrictions, AsciiDocRenderer): pass -class AsciiDocConverter(BaseConverter): - __renderer__ = AsciiDocRenderer +class AsciiDocConverter(BaseConverter[OptionsAsciiDocRenderer]): __option_block_separator__ = "" + def __init__(self, manpage_urls: Mapping[str, str], revision: str, markdown_by_default: bool): + super().__init__(revision, markdown_by_default) + self._renderer = OptionsAsciiDocRenderer(manpage_urls) + def _parallel_render_prepare(self) -> Any: - return (self._manpage_urls, self._revision, self._markdown_by_default) + return (self._renderer._manpage_urls, self._revision, self._markdown_by_default) @classmethod def _parallel_render_init_worker(cls, a: Any) -> AsciiDocConverter: return cls(*a) diff --git a/pkgs/tools/nix/nixos-render-docs/src/tests/test_asciidoc.py b/pkgs/tools/nix/nixos-render-docs/src/tests/test_asciidoc.py index 487506469954..3cf5b208f392 100644 --- a/pkgs/tools/nix/nixos-render-docs/src/tests/test_asciidoc.py +++ b/pkgs/tools/nix/nixos-render-docs/src/tests/test_asciidoc.py @@ -1,9 +1,11 @@ -import nixos_render_docs +import nixos_render_docs as nrd from sample_md import sample1 -class Converter(nixos_render_docs.md.Converter): - __renderer__ = nixos_render_docs.asciidoc.AsciiDocRenderer +class Converter(nrd.md.Converter[nrd.asciidoc.AsciiDocRenderer]): + def __init__(self, manpage_urls: dict[str, str]): + super().__init__() + self._renderer = nrd.asciidoc.AsciiDocRenderer(manpage_urls) def test_lists() -> None: c = Converter({}) diff --git a/pkgs/tools/nix/nixos-render-docs/src/tests/test_commonmark.py b/pkgs/tools/nix/nixos-render-docs/src/tests/test_commonmark.py index 5e0d63eb6723..72700d3dbab3 100644 --- a/pkgs/tools/nix/nixos-render-docs/src/tests/test_commonmark.py +++ b/pkgs/tools/nix/nixos-render-docs/src/tests/test_commonmark.py @@ -1,4 +1,4 @@ -import nixos_render_docs +import nixos_render_docs as nrd from sample_md import sample1 @@ -6,8 +6,10 @@ from typing import Mapping, Optional import markdown_it -class Converter(nixos_render_docs.md.Converter): - __renderer__ = nixos_render_docs.commonmark.CommonMarkRenderer +class Converter(nrd.md.Converter[nrd.commonmark.CommonMarkRenderer]): + def __init__(self, manpage_urls: Mapping[str, str]): + super().__init__() + self._renderer = nrd.commonmark.CommonMarkRenderer(manpage_urls) # NOTE: in these tests we represent trailing spaces by ` ` and replace them with real space later, # since a number of editors will strip trailing whitespace on save and that would break the tests. diff --git a/pkgs/tools/nix/nixos-render-docs/src/tests/test_headings.py b/pkgs/tools/nix/nixos-render-docs/src/tests/test_headings.py index 0b73cdc8e7c7..8cbf3dabcea2 100644 --- a/pkgs/tools/nix/nixos-render-docs/src/tests/test_headings.py +++ b/pkgs/tools/nix/nixos-render-docs/src/tests/test_headings.py @@ -1,10 +1,12 @@ -import nixos_render_docs +import nixos_render_docs as nrd from markdown_it.token import Token -class Converter(nixos_render_docs.md.Converter): +class Converter(nrd.md.Converter[nrd.docbook.DocBookRenderer]): # actual renderer doesn't matter, we're just parsing. - __renderer__ = nixos_render_docs.docbook.DocBookRenderer + def __init__(self, manpage_urls: dict[str, str]) -> None: + super().__init__() + self._renderer = nrd.docbook.DocBookRenderer(manpage_urls) def test_heading_id_absent() -> None: c = Converter({}) diff --git a/pkgs/tools/nix/nixos-render-docs/src/tests/test_lists.py b/pkgs/tools/nix/nixos-render-docs/src/tests/test_lists.py index 660c410a85cc..f53442a96d4c 100644 --- a/pkgs/tools/nix/nixos-render-docs/src/tests/test_lists.py +++ b/pkgs/tools/nix/nixos-render-docs/src/tests/test_lists.py @@ -1,11 +1,13 @@ -import nixos_render_docs +import nixos_render_docs as nrd import pytest from markdown_it.token import Token -class Converter(nixos_render_docs.md.Converter): +class Converter(nrd.md.Converter[nrd.docbook.DocBookRenderer]): # actual renderer doesn't matter, we're just parsing. - __renderer__ = nixos_render_docs.docbook.DocBookRenderer + def __init__(self, manpage_urls: dict[str, str]) -> None: + super().__init__() + self._renderer = nrd.docbook.DocBookRenderer(manpage_urls) @pytest.mark.parametrize("ordered", [True, False]) def test_list_wide(ordered: bool) -> None: diff --git a/pkgs/tools/nix/nixos-render-docs/src/tests/test_manpage.py b/pkgs/tools/nix/nixos-render-docs/src/tests/test_manpage.py index fbfd21358a85..9b7e1652f0f6 100644 --- a/pkgs/tools/nix/nixos-render-docs/src/tests/test_manpage.py +++ b/pkgs/tools/nix/nixos-render-docs/src/tests/test_manpage.py @@ -1,4 +1,4 @@ -import nixos_render_docs +import nixos_render_docs as nrd from sample_md import sample1 @@ -6,15 +6,10 @@ from typing import Mapping, Optional import markdown_it -class Converter(nixos_render_docs.md.Converter): - def __renderer__(self, manpage_urls: Mapping[str, str], - parser: Optional[markdown_it.MarkdownIt] = None - ) -> nixos_render_docs.manpage.ManpageRenderer: - return nixos_render_docs.manpage.ManpageRenderer(manpage_urls, self.options_by_id, parser) - +class Converter(nrd.md.Converter[nrd.manpage.ManpageRenderer]): def __init__(self, manpage_urls: Mapping[str, str], options_by_id: dict[str, str] = {}): - self.options_by_id = options_by_id - super().__init__(manpage_urls) + super().__init__() + self._renderer = nrd.manpage.ManpageRenderer(manpage_urls, options_by_id) def test_inline_code() -> None: c = Converter({}) @@ -32,17 +27,15 @@ def test_expand_link_targets() -> None: def test_collect_links() -> None: c = Converter({}, { '#foo': "bar" }) - assert isinstance(c._md.renderer, nixos_render_docs.manpage.ManpageRenderer) - c._md.renderer.link_footnotes = [] + c._renderer.link_footnotes = [] assert c._render("[a](link1) [b](link2)") == "\\fBa\\fR[1]\\fR \\fBb\\fR[2]\\fR" - assert c._md.renderer.link_footnotes == ['link1', 'link2'] + assert c._renderer.link_footnotes == ['link1', 'link2'] def test_dedup_links() -> None: c = Converter({}, { '#foo': "bar" }) - assert isinstance(c._md.renderer, nixos_render_docs.manpage.ManpageRenderer) - c._md.renderer.link_footnotes = [] + c._renderer.link_footnotes = [] assert c._render("[a](link) [b](link)") == "\\fBa\\fR[1]\\fR \\fBb\\fR[1]\\fR" - assert c._md.renderer.link_footnotes == ['link'] + assert c._renderer.link_footnotes == ['link'] def test_full() -> None: c = Converter({ 'man(1)': 'http://example.org' }) diff --git a/pkgs/tools/nix/nixos-render-docs/src/tests/test_plugins.py b/pkgs/tools/nix/nixos-render-docs/src/tests/test_plugins.py index 1d836a916d96..f94ede6382bf 100644 --- a/pkgs/tools/nix/nixos-render-docs/src/tests/test_plugins.py +++ b/pkgs/tools/nix/nixos-render-docs/src/tests/test_plugins.py @@ -1,10 +1,12 @@ -import nixos_render_docs +import nixos_render_docs as nrd from markdown_it.token import Token -class Converter(nixos_render_docs.md.Converter): +class Converter(nrd.md.Converter[nrd.docbook.DocBookRenderer]): # actual renderer doesn't matter, we're just parsing. - __renderer__ = nixos_render_docs.docbook.DocBookRenderer + def __init__(self, manpage_urls: dict[str, str]) -> None: + super().__init__() + self._renderer = nrd.docbook.DocBookRenderer(manpage_urls) def test_attr_span_parsing() -> None: c = Converter({}) From 6f253fc70b5f21fde4a61f873650778478cdcc30 Mon Sep 17 00:00:00 2001 From: pennae Date: Fri, 17 Feb 2023 21:29:22 +0100 Subject: [PATCH 007/154] nixos-render-docs: drop options, env parameters these weren't used for anything. options never was (and does not contain any information for the renderer that we *want* to honor), and env is not used because typed renderer state is much more useful for all our cases. --- .../src/nixos_render_docs/asciidoc.py | 139 +++++---------- .../src/nixos_render_docs/commonmark.py | 134 +++++--------- .../src/nixos_render_docs/docbook.py | 161 ++++++----------- .../src/nixos_render_docs/manpage.py | 141 +++++---------- .../src/nixos_render_docs/manual.py | 53 +++--- .../src/nixos_render_docs/md.py | 168 ++++++------------ .../src/nixos_render_docs/options.py | 25 +-- .../src/nixos_render_docs/types.py | 5 +- 8 files changed, 288 insertions(+), 538 deletions(-) diff --git a/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/asciidoc.py b/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/asciidoc.py index 2730dc5e024f..7fc14c1631ef 100644 --- a/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/asciidoc.py +++ b/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/asciidoc.py @@ -1,4 +1,4 @@ -from collections.abc import Mapping, MutableMapping, Sequence +from collections.abc import Mapping, Sequence from dataclasses import dataclass from typing import Any, cast, Optional from urllib.parse import quote @@ -6,7 +6,6 @@ from urllib.parse import quote from .md import Renderer from markdown_it.token import Token -from markdown_it.utils import OptionsDict _asciidoc_escapes = { # escape all dots, just in case one is pasted at SOL @@ -95,142 +94,103 @@ class AsciiDocRenderer(Renderer): self._list_stack.pop() return "" - def text(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def text(self, token: Token, tokens: Sequence[Token], i: int) -> str: self._parstack[-1].continuing = True return asciidoc_escape(token.content) - def paragraph_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def paragraph_open(self, token: Token, tokens: Sequence[Token], i: int) -> str: return self._break() - def paragraph_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def paragraph_close(self, token: Token, tokens: Sequence[Token], i: int) -> str: return "" - def hardbreak(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def hardbreak(self, token: Token, tokens: Sequence[Token], i: int) -> str: return " +\n" - def softbreak(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def softbreak(self, token: Token, tokens: Sequence[Token], i: int) -> str: return f" " - def code_inline(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def code_inline(self, token: Token, tokens: Sequence[Token], i: int) -> str: self._parstack[-1].continuing = True return f"``{asciidoc_escape(token.content)}``" - def code_block(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: - return self.fence(token, tokens, i, options, env) - def link_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def code_block(self, token: Token, tokens: Sequence[Token], i: int) -> str: + return self.fence(token, tokens, i) + def link_open(self, token: Token, tokens: Sequence[Token], i: int) -> str: self._parstack[-1].continuing = True return f"link:{quote(cast(str, token.attrs['href']), safe='/:')}[" - def link_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def link_close(self, token: Token, tokens: Sequence[Token], i: int) -> str: return "]" - def list_item_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def list_item_open(self, token: Token, tokens: Sequence[Token], i: int) -> str: self._enter_block(True) # allow the next token to be a block or an inline. return f'\n{self._list_stack[-1].head} {{empty}}' - def list_item_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def list_item_close(self, token: Token, tokens: Sequence[Token], i: int) -> str: self._leave_block() return "\n" - def bullet_list_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def bullet_list_open(self, token: Token, tokens: Sequence[Token], i: int) -> str: return self._list_open(token, '*') - def bullet_list_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def bullet_list_close(self, token: Token, tokens: Sequence[Token], i: int) -> str: return self._list_close() - def em_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def em_open(self, token: Token, tokens: Sequence[Token], i: int) -> str: return "__" - def em_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def em_close(self, token: Token, tokens: Sequence[Token], i: int) -> str: return "__" - def strong_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def strong_open(self, token: Token, tokens: Sequence[Token], i: int) -> str: return "**" - def strong_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def strong_close(self, token: Token, tokens: Sequence[Token], i: int) -> str: return "**" - def fence(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def fence(self, token: Token, tokens: Sequence[Token], i: int) -> str: attrs = f"[source,{token.info}]\n" if token.info else "" code = token.content if code.endswith('\n'): code = code[:-1] return f"{self._break(True)}{attrs}----\n{code}\n----" - def blockquote_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def blockquote_open(self, token: Token, tokens: Sequence[Token], i: int) -> str: pbreak = self._break(True) self._enter_block(False) return f"{pbreak}[quote]\n{self._parstack[-2].block_delim}\n" - def blockquote_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def blockquote_close(self, token: Token, tokens: Sequence[Token], i: int) -> str: self._leave_block() return f"\n{self._parstack[-1].block_delim}" - def note_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def note_open(self, token: Token, tokens: Sequence[Token], i: int) -> str: return self._admonition_open("NOTE") - def note_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def note_close(self, token: Token, tokens: Sequence[Token], i: int) -> str: return self._admonition_close() - def caution_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def caution_open(self, token: Token, tokens: Sequence[Token], i: int) -> str: return self._admonition_open("CAUTION") - def caution_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def caution_close(self, token: Token, tokens: Sequence[Token], i: int) -> str: return self._admonition_close() - def important_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def important_open(self, token: Token, tokens: Sequence[Token], i: int) -> str: return self._admonition_open("IMPORTANT") - def important_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def important_close(self, token: Token, tokens: Sequence[Token], i: int) -> str: return self._admonition_close() - def tip_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def tip_open(self, token: Token, tokens: Sequence[Token], i: int) -> str: return self._admonition_open("TIP") - def tip_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def tip_close(self, token: Token, tokens: Sequence[Token], i: int) -> str: return self._admonition_close() - def warning_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def warning_open(self, token: Token, tokens: Sequence[Token], i: int) -> str: return self._admonition_open("WARNING") - def warning_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def warning_close(self, token: Token, tokens: Sequence[Token], i: int) -> str: return self._admonition_close() - def dl_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def dl_open(self, token: Token, tokens: Sequence[Token], i: int) -> str: return f"{self._break()}[]" - def dl_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def dl_close(self, token: Token, tokens: Sequence[Token], i: int) -> str: return "" - def dt_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def dt_open(self, token: Token, tokens: Sequence[Token], i: int) -> str: return self._break() - def dt_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def dt_close(self, token: Token, tokens: Sequence[Token], i: int) -> str: self._enter_block(True) return ":: {empty}" - def dd_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def dd_open(self, token: Token, tokens: Sequence[Token], i: int) -> str: return "" - def dd_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def dd_close(self, token: Token, tokens: Sequence[Token], i: int) -> str: self._leave_block() return "\n" - def myst_role(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def myst_role(self, token: Token, tokens: Sequence[Token], i: int) -> str: self._parstack[-1].continuing = True content = asciidoc_escape(token.content) if token.meta['name'] == 'manpage' and (url := self._manpage_urls.get(token.content)): return f"link:{quote(url, safe='/:')}[{content}]" return f"[.{token.meta['name']}]``{asciidoc_escape(token.content)}``" - def inline_anchor(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def inline_anchor(self, token: Token, tokens: Sequence[Token], i: int) -> str: self._parstack[-1].continuing = True return f"[[{token.attrs['id']}]]" - def attr_span_begin(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def attr_span_begin(self, token: Token, tokens: Sequence[Token], i: int) -> str: self._parstack[-1].continuing = True (id_part, class_part) = ("", "") if id := token.attrs.get('id'): @@ -240,22 +200,17 @@ class AsciiDocRenderer(Renderer): class_part = "kbd:[" self._attrspans.append("]") else: - return super().attr_span_begin(token, tokens, i, options, env) + return super().attr_span_begin(token, tokens, i) else: self._attrspans.append("") return id_part + class_part - def attr_span_end(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def attr_span_end(self, token: Token, tokens: Sequence[Token], i: int) -> str: return self._attrspans.pop() - def heading_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def heading_open(self, token: Token, tokens: Sequence[Token], i: int) -> str: return token.markup.replace("#", "=") + " " - def heading_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def heading_close(self, token: Token, tokens: Sequence[Token], i: int) -> str: return "\n" - def ordered_list_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def ordered_list_open(self, token: Token, tokens: Sequence[Token], i: int) -> str: return self._list_open(token, '.') - def ordered_list_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def ordered_list_close(self, token: Token, tokens: Sequence[Token], i: int) -> str: return self._list_close() diff --git a/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/commonmark.py b/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/commonmark.py index 8fe32289b85d..9649eb653d44 100644 --- a/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/commonmark.py +++ b/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/commonmark.py @@ -1,11 +1,10 @@ -from collections.abc import Mapping, MutableMapping, Sequence +from collections.abc import Mapping, Sequence from dataclasses import dataclass from typing import Any, cast, Optional from .md import md_escape, md_make_code, Renderer from markdown_it.token import Token -from markdown_it.utils import OptionsDict @dataclass(kw_only=True) class List: @@ -57,39 +56,29 @@ class CommonMarkRenderer(Renderer): return s return f"\n{self._parstack[-1].indent}".join(s.splitlines()) - def text(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def text(self, token: Token, tokens: Sequence[Token], i: int) -> str: self._parstack[-1].continuing = True return self._indent_raw(md_escape(token.content)) - def paragraph_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def paragraph_open(self, token: Token, tokens: Sequence[Token], i: int) -> str: return self._maybe_parbreak() - def paragraph_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def paragraph_close(self, token: Token, tokens: Sequence[Token], i: int) -> str: return "" - def hardbreak(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def hardbreak(self, token: Token, tokens: Sequence[Token], i: int) -> str: return f" {self._break()}" - def softbreak(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def softbreak(self, token: Token, tokens: Sequence[Token], i: int) -> str: return self._break() - def code_inline(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def code_inline(self, token: Token, tokens: Sequence[Token], i: int) -> str: self._parstack[-1].continuing = True return md_make_code(token.content) - def code_block(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: - return self.fence(token, tokens, i, options, env) - def link_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def code_block(self, token: Token, tokens: Sequence[Token], i: int) -> str: + return self.fence(token, tokens, i) + def link_open(self, token: Token, tokens: Sequence[Token], i: int) -> str: self._parstack[-1].continuing = True self._link_stack.append(cast(str, token.attrs['href'])) return "[" - def link_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def link_close(self, token: Token, tokens: Sequence[Token], i: int) -> str: return f"]({md_escape(self._link_stack.pop())})" - def list_item_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def list_item_open(self, token: Token, tokens: Sequence[Token], i: int) -> str: lst = self._list_stack[-1] lbreak = "" if not lst.first_item_seen else self._break() * (1 if lst.compact else 2) lst.first_item_seen = True @@ -99,132 +88,99 @@ class CommonMarkRenderer(Renderer): lst.next_idx += 1 self._enter_block(" " * (len(head) + 1)) return f'{lbreak}{head} ' - def list_item_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def list_item_close(self, token: Token, tokens: Sequence[Token], i: int) -> str: self._leave_block() return "" - def bullet_list_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def bullet_list_open(self, token: Token, tokens: Sequence[Token], i: int) -> str: self._list_stack.append(List(compact=bool(token.meta['compact']))) return self._maybe_parbreak() - def bullet_list_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def bullet_list_close(self, token: Token, tokens: Sequence[Token], i: int) -> str: self._list_stack.pop() return "" - def em_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def em_open(self, token: Token, tokens: Sequence[Token], i: int) -> str: return "*" - def em_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def em_close(self, token: Token, tokens: Sequence[Token], i: int) -> str: return "*" - def strong_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def strong_open(self, token: Token, tokens: Sequence[Token], i: int) -> str: return "**" - def strong_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def strong_close(self, token: Token, tokens: Sequence[Token], i: int) -> str: return "**" - def fence(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def fence(self, token: Token, tokens: Sequence[Token], i: int) -> str: code = token.content if code.endswith('\n'): code = code[:-1] pbreak = self._maybe_parbreak() return pbreak + self._indent_raw(md_make_code(code, info=token.info, multiline=True)) - def blockquote_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def blockquote_open(self, token: Token, tokens: Sequence[Token], i: int) -> str: pbreak = self._maybe_parbreak() self._enter_block("> ") return pbreak + "> " - def blockquote_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def blockquote_close(self, token: Token, tokens: Sequence[Token], i: int) -> str: self._leave_block() return "" - def note_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def note_open(self, token: Token, tokens: Sequence[Token], i: int) -> str: return self._admonition_open("Note") - def note_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def note_close(self, token: Token, tokens: Sequence[Token], i: int) -> str: return self._admonition_close() - def caution_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def caution_open(self, token: Token, tokens: Sequence[Token], i: int) -> str: return self._admonition_open("Caution") - def caution_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def caution_close(self, token: Token, tokens: Sequence[Token], i: int) -> str: return self._admonition_close() - def important_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def important_open(self, token: Token, tokens: Sequence[Token], i: int) -> str: return self._admonition_open("Important") - def important_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def important_close(self, token: Token, tokens: Sequence[Token], i: int) -> str: return self._admonition_close() - def tip_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def tip_open(self, token: Token, tokens: Sequence[Token], i: int) -> str: return self._admonition_open("Tip") - def tip_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def tip_close(self, token: Token, tokens: Sequence[Token], i: int) -> str: return self._admonition_close() - def warning_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def warning_open(self, token: Token, tokens: Sequence[Token], i: int) -> str: return self._admonition_open("Warning") - def warning_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def warning_close(self, token: Token, tokens: Sequence[Token], i: int) -> str: return self._admonition_close() - def dl_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def dl_open(self, token: Token, tokens: Sequence[Token], i: int) -> str: self._list_stack.append(List(compact=False)) return "" - def dl_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def dl_close(self, token: Token, tokens: Sequence[Token], i: int) -> str: self._list_stack.pop() return "" - def dt_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def dt_open(self, token: Token, tokens: Sequence[Token], i: int) -> str: pbreak = self._maybe_parbreak() self._enter_block(" ") # add an opening zero-width non-joiner to separate *our* emphasis from possible # emphasis in the provided term return f'{pbreak} - *{chr(0x200C)}' - def dt_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def dt_close(self, token: Token, tokens: Sequence[Token], i: int) -> str: return f"{chr(0x200C)}*" - def dd_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def dd_open(self, token: Token, tokens: Sequence[Token], i: int) -> str: self._parstack[-1].continuing = True return "" - def dd_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def dd_close(self, token: Token, tokens: Sequence[Token], i: int) -> str: self._leave_block() return "" - def myst_role(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def myst_role(self, token: Token, tokens: Sequence[Token], i: int) -> str: self._parstack[-1].continuing = True content = md_make_code(token.content) if token.meta['name'] == 'manpage' and (url := self._manpage_urls.get(token.content)): return f"[{content}]({url})" return content # no roles in regular commonmark - def attr_span_begin(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def attr_span_begin(self, token: Token, tokens: Sequence[Token], i: int) -> str: # there's no way we can emit attrspans correctly in all cases. we could use inline # html for ids, but that would not round-trip. same holds for classes. since this # renderer is only used for approximate options export and all of these things are # not allowed in options we can ignore them for now. return "" - def attr_span_end(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def attr_span_end(self, token: Token, tokens: Sequence[Token], i: int) -> str: return "" - def heading_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def heading_open(self, token: Token, tokens: Sequence[Token], i: int) -> str: return token.markup + " " - def heading_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def heading_close(self, token: Token, tokens: Sequence[Token], i: int) -> str: return "\n" - def ordered_list_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def ordered_list_open(self, token: Token, tokens: Sequence[Token], i: int) -> str: self._list_stack.append( List(next_idx = cast(int, token.attrs.get('start', 1)), compact = bool(token.meta['compact']))) return self._maybe_parbreak() - def ordered_list_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def ordered_list_close(self, token: Token, tokens: Sequence[Token], i: int) -> str: self._list_stack.pop() return "" diff --git a/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/docbook.py b/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/docbook.py index b279ace06ff1..4c90606ff455 100644 --- a/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/docbook.py +++ b/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/docbook.py @@ -1,9 +1,8 @@ -from collections.abc import Mapping, MutableMapping, Sequence +from collections.abc import Mapping, Sequence from typing import Any, cast, Optional, NamedTuple import markdown_it from markdown_it.token import Token -from markdown_it.utils import OptionsDict from xml.sax.saxutils import escape, quoteattr from .md import Renderer @@ -44,13 +43,11 @@ class DocBookRenderer(Renderer): self._headings = [] self._attrspans = [] - def render(self, tokens: Sequence[Token], options: OptionsDict, - env: MutableMapping[str, Any]) -> str: - result = super().render(tokens, options, env) - result += self._close_headings(None, env) + def render(self, tokens: Sequence[Token]) -> str: + result = super().render(tokens) + result += self._close_headings(None) return result - def renderInline(self, tokens: Sequence[Token], options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def renderInline(self, tokens: Sequence[Token]) -> str: # HACK to support docbook links and xrefs. link handling is only necessary because the docbook # manpage stylesheet converts - in urls to a mathematical minus, which may be somewhat incorrect. for i, token in enumerate(tokens): @@ -64,135 +61,98 @@ class DocBookRenderer(Renderer): if tokens[i + 1].type == 'text' and tokens[i + 1].content == token.attrs['href']: tokens[i + 1].content = '' - return super().renderInline(tokens, options, env) + return super().renderInline(tokens) - def text(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def text(self, token: Token, tokens: Sequence[Token], i: int) -> str: return escape(token.content) - def paragraph_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def paragraph_open(self, token: Token, tokens: Sequence[Token], i: int) -> str: return "" - def paragraph_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def paragraph_close(self, token: Token, tokens: Sequence[Token], i: int) -> str: return "" - def hardbreak(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def hardbreak(self, token: Token, tokens: Sequence[Token], i: int) -> str: return "\n" - def softbreak(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def softbreak(self, token: Token, tokens: Sequence[Token], i: int) -> str: # should check options.breaks() and emit hard break if so return "\n" - def code_inline(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def code_inline(self, token: Token, tokens: Sequence[Token], i: int) -> str: return f"{escape(token.content)}" - def code_block(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def code_block(self, token: Token, tokens: Sequence[Token], i: int) -> str: return f"{escape(token.content)}" - def link_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def link_open(self, token: Token, tokens: Sequence[Token], i: int) -> str: self._link_tags.append(token.tag) href = cast(str, token.attrs['href']) (attr, start) = ('linkend', 1) if href[0] == '#' else ('xlink:href', 0) return f"<{token.tag} {attr}={quoteattr(href[start:])}>" - def link_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def link_close(self, token: Token, tokens: Sequence[Token], i: int) -> str: return f"" - def list_item_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def list_item_open(self, token: Token, tokens: Sequence[Token], i: int) -> str: return "" - def list_item_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def list_item_close(self, token: Token, tokens: Sequence[Token], i: int) -> str: return "\n" # HACK open and close para for docbook change size. remove soon. - def bullet_list_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def bullet_list_open(self, token: Token, tokens: Sequence[Token], i: int) -> str: spacing = ' spacing="compact"' if token.meta.get('compact', False) else '' return f"\n" - def bullet_list_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def bullet_list_close(self, token: Token, tokens: Sequence[Token], i: int) -> str: return "\n" - def em_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def em_open(self, token: Token, tokens: Sequence[Token], i: int) -> str: return "" - def em_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def em_close(self, token: Token, tokens: Sequence[Token], i: int) -> str: return "" - def strong_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def strong_open(self, token: Token, tokens: Sequence[Token], i: int) -> str: return "" - def strong_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def strong_close(self, token: Token, tokens: Sequence[Token], i: int) -> str: return "" - def fence(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def fence(self, token: Token, tokens: Sequence[Token], i: int) -> str: info = f" language={quoteattr(token.info)}" if token.info != "" else "" return f"{escape(token.content)}" - def blockquote_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def blockquote_open(self, token: Token, tokens: Sequence[Token], i: int) -> str: return "
" - def blockquote_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def blockquote_close(self, token: Token, tokens: Sequence[Token], i: int) -> str: return "
" - def note_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def note_open(self, token: Token, tokens: Sequence[Token], i: int) -> str: return "" - def note_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def note_close(self, token: Token, tokens: Sequence[Token], i: int) -> str: return "" - def caution_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def caution_open(self, token: Token, tokens: Sequence[Token], i: int) -> str: return "" - def caution_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def caution_close(self, token: Token, tokens: Sequence[Token], i: int) -> str: return "" - def important_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def important_open(self, token: Token, tokens: Sequence[Token], i: int) -> str: return "" - def important_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def important_close(self, token: Token, tokens: Sequence[Token], i: int) -> str: return "" - def tip_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def tip_open(self, token: Token, tokens: Sequence[Token], i: int) -> str: return "" - def tip_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def tip_close(self, token: Token, tokens: Sequence[Token], i: int) -> str: return "" - def warning_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def warning_open(self, token: Token, tokens: Sequence[Token], i: int) -> str: return "" - def warning_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def warning_close(self, token: Token, tokens: Sequence[Token], i: int) -> str: return "" # markdown-it emits tokens based on the html syntax tree, but docbook is # slightly different. html has
{
{
}}
, # docbook has {} # we have to reject multiple definitions for the same term for time being. - def dl_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def dl_open(self, token: Token, tokens: Sequence[Token], i: int) -> str: self._deflists.append(Deflist()) return "" - def dl_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def dl_close(self, token: Token, tokens: Sequence[Token], i: int) -> str: self._deflists.pop() return "" - def dt_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def dt_open(self, token: Token, tokens: Sequence[Token], i: int) -> str: self._deflists[-1].has_dd = False return "" - def dt_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def dt_close(self, token: Token, tokens: Sequence[Token], i: int) -> str: return "" - def dd_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def dd_open(self, token: Token, tokens: Sequence[Token], i: int) -> str: if self._deflists[-1].has_dd: raise Exception("multiple definitions per term not supported") self._deflists[-1].has_dd = True return "" - def dd_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def dd_close(self, token: Token, tokens: Sequence[Token], i: int) -> str: return "" - def myst_role(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def myst_role(self, token: Token, tokens: Sequence[Token], i: int) -> str: if token.meta['name'] == 'command': return f"{escape(token.content)}" if token.meta['name'] == 'file': @@ -215,8 +175,7 @@ class DocBookRenderer(Renderer): else: return ref raise NotImplementedError("md node not supported yet", token) - def attr_span_begin(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def attr_span_begin(self, token: Token, tokens: Sequence[Token], i: int) -> str: # we currently support *only* inline anchors and the special .keycap class to produce # docbook elements. (id_part, class_part) = ("", "") @@ -227,31 +186,26 @@ class DocBookRenderer(Renderer): class_part = "" self._attrspans.append("") else: - return super().attr_span_begin(token, tokens, i, options, env) + return super().attr_span_begin(token, tokens, i) else: self._attrspans.append("") return id_part + class_part - def attr_span_end(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def attr_span_end(self, token: Token, tokens: Sequence[Token], i: int) -> str: return self._attrspans.pop() - def ordered_list_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def ordered_list_open(self, token: Token, tokens: Sequence[Token], i: int) -> str: start = f' startingnumber="{token.attrs["start"]}"' if 'start' in token.attrs else "" spacing = ' spacing="compact"' if token.meta.get('compact', False) else '' return f"" - def ordered_list_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def ordered_list_close(self, token: Token, tokens: Sequence[Token], i: int) -> str: return f"" - def heading_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def heading_open(self, token: Token, tokens: Sequence[Token], i: int) -> str: hlevel = int(token.tag[1:]) - result = self._close_headings(hlevel, env) - (tag, attrs) = self._heading_tag(token, tokens, i, options, env) + result = self._close_headings(hlevel) + (tag, attrs) = self._heading_tag(token, tokens, i) self._headings.append(Heading(tag, hlevel)) attrs_str = "".join([ f" {k}={quoteattr(v)}" for k, v in attrs.items() ]) return result + f'<{tag}{attrs_str}>\n' - def heading_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def heading_close(self, token: Token, tokens: Sequence[Token], i: int) -> str: heading = self._headings[-1] result = '' if heading.container_tag == 'part': @@ -263,16 +217,14 @@ class DocBookRenderer(Renderer): maybe_id = " xml:id=" + quoteattr(id + "-intro") result += f"" return result - def example_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def example_open(self, token: Token, tokens: Sequence[Token], i: int) -> str: if id := token.attrs.get('id'): return f"" return "" - def example_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def example_close(self, token: Token, tokens: Sequence[Token], i: int) -> str: return "" - def _close_headings(self, level: Optional[int], env: MutableMapping[str, Any]) -> str: + def _close_headings(self, level: Optional[int]) -> str: # we rely on markdown-it producing h{1..6} tags in token.tag for this to work result = [] while len(self._headings): @@ -285,8 +237,7 @@ class DocBookRenderer(Renderer): break return "\n".join(result) - def _heading_tag(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> tuple[str, dict[str, str]]: + def _heading_tag(self, token: Token, tokens: Sequence[Token], i: int) -> tuple[str, dict[str, str]]: attrs = {} if id := token.attrs.get('id'): attrs['xml:id'] = cast(str, id) diff --git a/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/manpage.py b/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/manpage.py index a61537b49c54..a01aa1b4634b 100644 --- a/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/manpage.py +++ b/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/manpage.py @@ -1,4 +1,4 @@ -from collections.abc import Mapping, MutableMapping, Sequence +from collections.abc import Mapping, Sequence from dataclasses import dataclass from typing import Any, cast, Iterable, Optional @@ -6,7 +6,6 @@ import re import markdown_it from markdown_it.token import Token -from markdown_it.utils import OptionsDict from .md import Renderer @@ -123,36 +122,27 @@ class ManpageRenderer(Renderer): self._leave_block() return ".RE" - def render(self, tokens: Sequence[Token], options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def render(self, tokens: Sequence[Token]) -> str: self._do_parbreak_stack = [ False ] self._font_stack = [ "\\fR" ] - return super().render(tokens, options, env) + return super().render(tokens) - def text(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def text(self, token: Token, tokens: Sequence[Token], i: int) -> str: return man_escape(token.content) - def paragraph_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def paragraph_open(self, token: Token, tokens: Sequence[Token], i: int) -> str: return self._maybe_parbreak() - def paragraph_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def paragraph_close(self, token: Token, tokens: Sequence[Token], i: int) -> str: return "" - def hardbreak(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def hardbreak(self, token: Token, tokens: Sequence[Token], i: int) -> str: return ".br" - def softbreak(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def softbreak(self, token: Token, tokens: Sequence[Token], i: int) -> str: return " " - def code_inline(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def code_inline(self, token: Token, tokens: Sequence[Token], i: int) -> str: s = _protect_spaces(man_escape(token.content)) return f"\\fR\\(oq{s}\\(cq\\fP" if self.inline_code_is_quoted else s - def code_block(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: - return self.fence(token, tokens, i, options, env) - def link_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def code_block(self, token: Token, tokens: Sequence[Token], i: int) -> str: + return self.fence(token, tokens, i) + def link_open(self, token: Token, tokens: Sequence[Token], i: int) -> str: href = cast(str, token.attrs['href']) self._link_stack.append(href) text = "" @@ -161,8 +151,7 @@ class ManpageRenderer(Renderer): text = self._href_targets[href] self._font_stack.append("\\fB") return f"\\fB{text}\0 <" - def link_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def link_close(self, token: Token, tokens: Sequence[Token], i: int) -> str: href = self._link_stack.pop() text = "" if self.link_footnotes is not None: @@ -174,8 +163,7 @@ class ManpageRenderer(Renderer): text = "\\fR" + man_escape(f"[{idx}]") self._font_stack.pop() return f">\0 {text}{self._font_stack[-1]}" - def list_item_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def list_item_open(self, token: Token, tokens: Sequence[Token], i: int) -> str: self._enter_block() lst = self._list_stack[-1] maybe_space = '' if lst.compact or not lst.first_item_seen else '.sp\n' @@ -189,36 +177,28 @@ class ManpageRenderer(Renderer): f'.RS {lst.width}\n' f"\\h'-{len(head) + 1}'\\fB{man_escape(head)}\\fP\\h'1'\\c" ) - def list_item_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def list_item_close(self, token: Token, tokens: Sequence[Token], i: int) -> str: self._leave_block() return ".RE" - def bullet_list_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def bullet_list_open(self, token: Token, tokens: Sequence[Token], i: int) -> str: self._list_stack.append(List(width=4, compact=bool(token.meta['compact']))) return self._maybe_parbreak() - def bullet_list_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def bullet_list_close(self, token: Token, tokens: Sequence[Token], i: int) -> str: self._list_stack.pop() return "" - def em_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def em_open(self, token: Token, tokens: Sequence[Token], i: int) -> str: self._font_stack.append("\\fI") return "\\fI" - def em_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def em_close(self, token: Token, tokens: Sequence[Token], i: int) -> str: self._font_stack.pop() return self._font_stack[-1] - def strong_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def strong_open(self, token: Token, tokens: Sequence[Token], i: int) -> str: self._font_stack.append("\\fB") return "\\fB" - def strong_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def strong_close(self, token: Token, tokens: Sequence[Token], i: int) -> str: self._font_stack.pop() return self._font_stack[-1] - def fence(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def fence(self, token: Token, tokens: Sequence[Token], i: int) -> str: s = man_escape(token.content).rstrip('\n') return ( '.sp\n' @@ -228,8 +208,7 @@ class ManpageRenderer(Renderer): '.fi\n' '.RE' ) - def blockquote_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def blockquote_open(self, token: Token, tokens: Sequence[Token], i: int) -> str: maybe_par = self._maybe_parbreak("\n") self._enter_block() return ( @@ -237,62 +216,44 @@ class ManpageRenderer(Renderer): ".RS 4\n" f"\\h'-3'\\fI\\(lq\\(rq\\fP\\h'1'\\c" ) - def blockquote_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def blockquote_close(self, token: Token, tokens: Sequence[Token], i: int) -> str: self._leave_block() return ".RE" - def note_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def note_open(self, token: Token, tokens: Sequence[Token], i: int) -> str: return self._admonition_open("Note") - def note_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def note_close(self, token: Token, tokens: Sequence[Token], i: int) -> str: return self._admonition_close() - def caution_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def caution_open(self, token: Token, tokens: Sequence[Token], i: int) -> str: return self._admonition_open( "Caution") - def caution_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def caution_close(self, token: Token, tokens: Sequence[Token], i: int) -> str: return self._admonition_close() - def important_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def important_open(self, token: Token, tokens: Sequence[Token], i: int) -> str: return self._admonition_open( "Important") - def important_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def important_close(self, token: Token, tokens: Sequence[Token], i: int) -> str: return self._admonition_close() - def tip_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def tip_open(self, token: Token, tokens: Sequence[Token], i: int) -> str: return self._admonition_open( "Tip") - def tip_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def tip_close(self, token: Token, tokens: Sequence[Token], i: int) -> str: return self._admonition_close() - def warning_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def warning_open(self, token: Token, tokens: Sequence[Token], i: int) -> str: return self._admonition_open( "Warning") - def warning_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def warning_close(self, token: Token, tokens: Sequence[Token], i: int) -> str: return self._admonition_close() - def dl_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def dl_open(self, token: Token, tokens: Sequence[Token], i: int) -> str: return ".RS 4" - def dl_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def dl_close(self, token: Token, tokens: Sequence[Token], i: int) -> str: return ".RE" - def dt_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def dt_open(self, token: Token, tokens: Sequence[Token], i: int) -> str: return ".PP" - def dt_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def dt_close(self, token: Token, tokens: Sequence[Token], i: int) -> str: return "" - def dd_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def dd_open(self, token: Token, tokens: Sequence[Token], i: int) -> str: self._enter_block() return ".RS 4" - def dd_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def dd_close(self, token: Token, tokens: Sequence[Token], i: int) -> str: self._leave_block() return ".RE" - def myst_role(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def myst_role(self, token: Token, tokens: Sequence[Token], i: int) -> str: if token.meta['name'] in [ 'command', 'env', 'option' ]: return f'\\fB{man_escape(token.content)}\\fP' elif token.meta['name'] in [ 'file', 'var' ]: @@ -303,23 +264,18 @@ class ManpageRenderer(Renderer): return f'\\fB{man_escape(page)}\\fP\\fR({man_escape(section)})\\fP' else: raise NotImplementedError("md node not supported yet", token) - def attr_span_begin(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def attr_span_begin(self, token: Token, tokens: Sequence[Token], i: int) -> str: # mdoc knows no anchors so we can drop those, but classes must be rejected. if 'class' in token.attrs: - return super().attr_span_begin(token, tokens, i, options, env) + return super().attr_span_begin(token, tokens, i) return "" - def attr_span_end(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def attr_span_end(self, token: Token, tokens: Sequence[Token], i: int) -> str: return "" - def heading_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def heading_open(self, token: Token, tokens: Sequence[Token], i: int) -> str: raise RuntimeError("md token not supported in manpages", token) - def heading_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def heading_close(self, token: Token, tokens: Sequence[Token], i: int) -> str: raise RuntimeError("md token not supported in manpages", token) - def ordered_list_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def ordered_list_open(self, token: Token, tokens: Sequence[Token], i: int) -> str: # max item head width for a number, a dot, and one leading space and one trailing space width = 3 + len(str(cast(int, token.meta['end']))) self._list_stack.append( @@ -327,7 +283,6 @@ class ManpageRenderer(Renderer): next_idx = cast(int, token.attrs.get('start', 1)), compact = bool(token.meta['compact']))) return self._maybe_parbreak() - def ordered_list_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def ordered_list_close(self, token: Token, tokens: Sequence[Token], i: int) -> str: self._list_stack.pop() return "" diff --git a/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/manual.py b/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/manual.py index dfcdb96cc21f..7ac82958a5da 100644 --- a/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/manual.py +++ b/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/manual.py @@ -2,14 +2,13 @@ import argparse import json from abc import abstractmethod -from collections.abc import Mapping, MutableMapping, Sequence +from collections.abc import Mapping, Sequence from pathlib import Path from typing import Any, cast, NamedTuple, Optional, Union from xml.sax.saxutils import escape, quoteattr import markdown_it from markdown_it.token import Token -from markdown_it.utils import OptionsDict from . import options from .docbook import DocBookRenderer, Heading @@ -30,8 +29,7 @@ class ManualDocBookRenderer(DocBookRenderer): 'included_options': self.included_options, } - def render(self, tokens: Sequence[Token], options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def render(self, tokens: Sequence[Token]) -> str: wanted = { 'h1': 'title' } wanted |= { 'h2': 'subtitle' } if self._toplevel_tag == 'book' else {} for (i, (tag, kind)) in enumerate(wanted.items()): @@ -62,16 +60,15 @@ class ManualDocBookRenderer(DocBookRenderer): return (f'' - f' {self.renderInline(tokens[1].children, options, env)}' - f' {self.renderInline(tokens[4].children, options, env)}' - f' {super().render(tokens[6:], options, env)}' + f' {self.renderInline(tokens[1].children)}' + f' {self.renderInline(tokens[4].children)}' + f' {super().render(tokens[6:])}' f'') - return super().render(tokens, options, env) + return super().render(tokens) - def _heading_tag(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> tuple[str, dict[str, str]]: - (tag, attrs) = super()._heading_tag(token, tokens, i, options, env) + def _heading_tag(self, token: Token, tokens: Sequence[Token], i: int) -> tuple[str, dict[str, str]]: + (tag, attrs) = super()._heading_tag(token, tokens, i) # render() has already verified that we don't have supernumerary headings and since the # book tag is handled specially we can leave the check this simple if token.tag != 'h1': @@ -81,8 +78,7 @@ class ManualDocBookRenderer(DocBookRenderer): 'xmlns:xlink': "http://www.w3.org/1999/xlink", }) - def _included_thing(self, tag: str, token: Token, tokens: Sequence[Token], i: int, - options: OptionsDict, env: MutableMapping[str, Any]) -> str: + def _included_thing(self, tag: str, token: Token, tokens: Sequence[Token], i: int) -> str: result = [] # close existing partintro. the generic render doesn't really need this because # it doesn't have a concept of structure in the way the manual does. @@ -94,26 +90,21 @@ class ManualDocBookRenderer(DocBookRenderer): r = ManualDocBookRenderer(tag, self._manpage_urls) for (included, path) in token.meta['included']: try: - result.append(r.render(included, options, env)) + result.append(r.render(included)) except Exception as e: raise RuntimeError(f"rendering {path}") from e return "".join(result) - def included_options(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def included_options(self, token: Token, tokens: Sequence[Token], i: int) -> str: return cast(str, token.meta['rendered-options']) # TODO minimize docbook diffs with existing conversions. remove soon. - def paragraph_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: - return super().paragraph_open(token, tokens, i, options, env) + "\n " - def paragraph_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: - return "\n" + super().paragraph_close(token, tokens, i, options, env) - def code_block(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def paragraph_open(self, token: Token, tokens: Sequence[Token], i: int) -> str: + return super().paragraph_open(token, tokens, i) + "\n " + def paragraph_close(self, token: Token, tokens: Sequence[Token], i: int) -> str: + return "\n" + super().paragraph_close(token, tokens, i) + def code_block(self, token: Token, tokens: Sequence[Token], i: int) -> str: return f"\n{escape(token.content)}" - def fence(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def fence(self, token: Token, tokens: Sequence[Token], i: int) -> str: info = f" language={quoteattr(token.info)}" if token.info != "" else "" return f"\n{escape(token.content)}" @@ -134,8 +125,8 @@ class DocBookConverter(Converter[ManualDocBookRenderer]): except Exception as e: raise RuntimeError(f"failed to render manual {file}") from e - def _parse(self, src: str, env: Optional[MutableMapping[str, Any]] = None) -> list[Token]: - tokens = super()._parse(src, env) + def _parse(self, src: str) -> list[Token]: + tokens = super()._parse(src) for token in tokens: if token.type != "fence" or not token.info.startswith("{=include=} "): continue @@ -145,12 +136,12 @@ class DocBookConverter(Converter[ManualDocBookRenderer]): self._parse_options(token) elif typ in [ 'sections', 'chapters', 'preface', 'parts', 'appendix' ]: token.type = 'included_' + typ - self._parse_included_blocks(token, env) + self._parse_included_blocks(token) else: raise RuntimeError(f"unsupported structural include type '{typ}'") return tokens - def _parse_included_blocks(self, token: Token, env: Optional[MutableMapping[str, Any]]) -> None: + def _parse_included_blocks(self, token: Token) -> None: assert token.map included = token.meta['included'] = [] for (lnum, line) in enumerate(token.content.splitlines(), token.map[0] + 2): @@ -161,7 +152,7 @@ class DocBookConverter(Converter[ManualDocBookRenderer]): try: self._base_paths.append(path) with open(path, 'r') as f: - tokens = self._parse(f.read(), env) + tokens = self._parse(f.read()) included.append((tokens, path)) self._base_paths.pop() except Exception as e: diff --git a/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/md.py b/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/md.py index d73a1715f4f9..e8fee1b71328 100644 --- a/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/md.py +++ b/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/md.py @@ -104,169 +104,120 @@ class Renderer: def _join_inline(self, ls: Iterable[str]) -> str: return "".join(ls) - def admonition_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def admonition_open(self, token: Token, tokens: Sequence[Token], i: int) -> str: tag = token.meta['kind'] self._admonition_stack.append(tag) - return self._admonitions[tag][0](token, tokens, i, options, env) - def admonition_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: - return self._admonitions[self._admonition_stack.pop()][1](token, tokens, i, options, env) + return self._admonitions[tag][0](token, tokens, i) + def admonition_close(self, token: Token, tokens: Sequence[Token], i: int) -> str: + return self._admonitions[self._admonition_stack.pop()][1](token, tokens, i) - def render(self, tokens: Sequence[Token], options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def render(self, tokens: Sequence[Token]) -> str: def do_one(i: int, token: Token) -> str: if token.type == "inline": assert token.children is not None - return self.renderInline(token.children, options, env) + return self.renderInline(token.children) elif token.type in self.rules: - return self.rules[token.type](tokens[i], tokens, i, options, env) + return self.rules[token.type](tokens[i], tokens, i) else: raise NotImplementedError("md token not supported yet", token) return self._join_block(map(lambda arg: do_one(*arg), enumerate(tokens))) - def renderInline(self, tokens: Sequence[Token], options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def renderInline(self, tokens: Sequence[Token]) -> str: def do_one(i: int, token: Token) -> str: if token.type in self.rules: - return self.rules[token.type](tokens[i], tokens, i, options, env) + return self.rules[token.type](tokens[i], tokens, i) else: raise NotImplementedError("md token not supported yet", token) return self._join_inline(map(lambda arg: do_one(*arg), enumerate(tokens))) - def text(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def text(self, token: Token, tokens: Sequence[Token], i: int) -> str: raise RuntimeError("md token not supported", token) - def paragraph_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def paragraph_open(self, token: Token, tokens: Sequence[Token], i: int) -> str: raise RuntimeError("md token not supported", token) - def paragraph_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def paragraph_close(self, token: Token, tokens: Sequence[Token], i: int) -> str: raise RuntimeError("md token not supported", token) - def hardbreak(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def hardbreak(self, token: Token, tokens: Sequence[Token], i: int) -> str: raise RuntimeError("md token not supported", token) - def softbreak(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def softbreak(self, token: Token, tokens: Sequence[Token], i: int) -> str: raise RuntimeError("md token not supported", token) - def code_inline(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def code_inline(self, token: Token, tokens: Sequence[Token], i: int) -> str: raise RuntimeError("md token not supported", token) - def code_block(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def code_block(self, token: Token, tokens: Sequence[Token], i: int) -> str: raise RuntimeError("md token not supported", token) - def link_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def link_open(self, token: Token, tokens: Sequence[Token], i: int) -> str: raise RuntimeError("md token not supported", token) - def link_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def link_close(self, token: Token, tokens: Sequence[Token], i: int) -> str: raise RuntimeError("md token not supported", token) - def list_item_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def list_item_open(self, token: Token, tokens: Sequence[Token], i: int) -> str: raise RuntimeError("md token not supported", token) - def list_item_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def list_item_close(self, token: Token, tokens: Sequence[Token], i: int) -> str: raise RuntimeError("md token not supported", token) - def bullet_list_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def bullet_list_open(self, token: Token, tokens: Sequence[Token], i: int) -> str: raise RuntimeError("md token not supported", token) - def bullet_list_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def bullet_list_close(self, token: Token, tokens: Sequence[Token], i: int) -> str: raise RuntimeError("md token not supported", token) - def em_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def em_open(self, token: Token, tokens: Sequence[Token], i: int) -> str: raise RuntimeError("md token not supported", token) - def em_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def em_close(self, token: Token, tokens: Sequence[Token], i: int) -> str: raise RuntimeError("md token not supported", token) - def strong_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def strong_open(self, token: Token, tokens: Sequence[Token], i: int) -> str: raise RuntimeError("md token not supported", token) - def strong_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def strong_close(self, token: Token, tokens: Sequence[Token], i: int) -> str: raise RuntimeError("md token not supported", token) - def fence(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def fence(self, token: Token, tokens: Sequence[Token], i: int) -> str: raise RuntimeError("md token not supported", token) - def blockquote_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def blockquote_open(self, token: Token, tokens: Sequence[Token], i: int) -> str: raise RuntimeError("md token not supported", token) - def blockquote_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def blockquote_close(self, token: Token, tokens: Sequence[Token], i: int) -> str: raise RuntimeError("md token not supported", token) - def note_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def note_open(self, token: Token, tokens: Sequence[Token], i: int) -> str: raise RuntimeError("md token not supported", token) - def note_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def note_close(self, token: Token, tokens: Sequence[Token], i: int) -> str: raise RuntimeError("md token not supported", token) - def caution_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def caution_open(self, token: Token, tokens: Sequence[Token], i: int) -> str: raise RuntimeError("md token not supported", token) - def caution_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def caution_close(self, token: Token, tokens: Sequence[Token], i: int) -> str: raise RuntimeError("md token not supported", token) - def important_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def important_open(self, token: Token, tokens: Sequence[Token], i: int) -> str: raise RuntimeError("md token not supported", token) - def important_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def important_close(self, token: Token, tokens: Sequence[Token], i: int) -> str: raise RuntimeError("md token not supported", token) - def tip_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def tip_open(self, token: Token, tokens: Sequence[Token], i: int) -> str: raise RuntimeError("md token not supported", token) - def tip_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def tip_close(self, token: Token, tokens: Sequence[Token], i: int) -> str: raise RuntimeError("md token not supported", token) - def warning_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def warning_open(self, token: Token, tokens: Sequence[Token], i: int) -> str: raise RuntimeError("md token not supported", token) - def warning_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def warning_close(self, token: Token, tokens: Sequence[Token], i: int) -> str: raise RuntimeError("md token not supported", token) - def dl_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def dl_open(self, token: Token, tokens: Sequence[Token], i: int) -> str: raise RuntimeError("md token not supported", token) - def dl_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def dl_close(self, token: Token, tokens: Sequence[Token], i: int) -> str: raise RuntimeError("md token not supported", token) - def dt_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def dt_open(self, token: Token, tokens: Sequence[Token], i: int) -> str: raise RuntimeError("md token not supported", token) - def dt_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def dt_close(self, token: Token, tokens: Sequence[Token], i: int) -> str: raise RuntimeError("md token not supported", token) - def dd_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def dd_open(self, token: Token, tokens: Sequence[Token], i: int) -> str: raise RuntimeError("md token not supported", token) - def dd_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def dd_close(self, token: Token, tokens: Sequence[Token], i: int) -> str: raise RuntimeError("md token not supported", token) - def myst_role(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def myst_role(self, token: Token, tokens: Sequence[Token], i: int) -> str: raise RuntimeError("md token not supported", token) - def attr_span_begin(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def attr_span_begin(self, token: Token, tokens: Sequence[Token], i: int) -> str: raise RuntimeError("md token not supported", token) - def attr_span_end(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def attr_span_end(self, token: Token, tokens: Sequence[Token], i: int) -> str: raise RuntimeError("md token not supported", token) - def heading_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def heading_open(self, token: Token, tokens: Sequence[Token], i: int) -> str: raise RuntimeError("md token not supported", token) - def heading_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def heading_close(self, token: Token, tokens: Sequence[Token], i: int) -> str: raise RuntimeError("md token not supported", token) - def ordered_list_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def ordered_list_open(self, token: Token, tokens: Sequence[Token], i: int) -> str: raise RuntimeError("md token not supported", token) - def ordered_list_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def ordered_list_close(self, token: Token, tokens: Sequence[Token], i: int) -> str: raise RuntimeError("md token not supported", token) - def example_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def example_open(self, token: Token, tokens: Sequence[Token], i: int) -> str: raise RuntimeError("md token not supported", token) - def example_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def example_close(self, token: Token, tokens: Sequence[Token], i: int) -> str: raise RuntimeError("md token not supported", token) def _is_escaped(src: str, pos: int) -> bool: @@ -510,10 +461,9 @@ class Converter(ABC, Generic[TR]): self._md.use(_block_attr) self._md.enable(["smartquotes", "replacements"]) - def _parse(self, src: str, env: Optional[MutableMapping[str, Any]] = None) -> list[Token]: - return self._md.parse(src, env if env is not None else {}) + def _parse(self, src: str) -> list[Token]: + return self._md.parse(src, {}) - def _render(self, src: str, env: Optional[MutableMapping[str, Any]] = None) -> str: - env = {} if env is None else env - tokens = self._parse(src, env) - return self._renderer.render(tokens, self._md.options, env) + def _render(self, src: str) -> str: + tokens = self._parse(src) + return self._renderer.render(tokens) diff --git a/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/options.py b/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/options.py index 8f64bd3ed538..88c6d7443318 100644 --- a/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/options.py +++ b/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/options.py @@ -4,8 +4,7 @@ import argparse import json from abc import abstractmethod -from collections.abc import Mapping, MutableMapping, Sequence -from markdown_it.utils import OptionsDict +from collections.abc import Mapping, Sequence from markdown_it.token import Token from typing import Any, Generic, Optional from urllib.parse import quote @@ -174,29 +173,23 @@ class BaseConverter(Converter[md.TR], Generic[md.TR]): def finalize(self) -> str: raise NotImplementedError() class OptionDocsRestrictions: - def heading_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def heading_open(self, token: Token, tokens: Sequence[Token], i: int) -> str: raise RuntimeError("md token not supported in options doc", token) - def heading_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def heading_close(self, token: Token, tokens: Sequence[Token], i: int) -> str: raise RuntimeError("md token not supported in options doc", token) - def attr_span_begin(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def attr_span_begin(self, token: Token, tokens: Sequence[Token], i: int) -> str: raise RuntimeError("md token not supported in options doc", token) - def example_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def example_open(self, token: Token, tokens: Sequence[Token], i: int) -> str: raise RuntimeError("md token not supported in options doc", token) class OptionsDocBookRenderer(OptionDocsRestrictions, DocBookRenderer): # TODO keep optionsDocBook diff small. remove soon if rendering is still good. - def ordered_list_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + def ordered_list_open(self, token: Token, tokens: Sequence[Token], i: int) -> str: token.meta['compact'] = False - return super().ordered_list_open(token, tokens, i, options, env) - def bullet_list_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict, - env: MutableMapping[str, Any]) -> str: + return super().ordered_list_open(token, tokens, i) + def bullet_list_open(self, token: Token, tokens: Sequence[Token], i: int) -> str: token.meta['compact'] = False - return super().bullet_list_open(token, tokens, i, options, env) + return super().bullet_list_open(token, tokens, i) class DocBookConverter(BaseConverter[OptionsDocBookRenderer]): __option_block_separator__ = "" diff --git a/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/types.py b/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/types.py index d20e056aacdc..c6146429ea02 100644 --- a/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/types.py +++ b/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/types.py @@ -1,8 +1,7 @@ -from collections.abc import Sequence, MutableMapping +from collections.abc import Sequence from typing import Any, Callable, Optional, Tuple, NamedTuple from markdown_it.token import Token -from markdown_it.utils import OptionsDict OptionLoc = str | dict[str, str] Option = dict[str, str | dict[str, str] | list[OptionLoc]] @@ -12,4 +11,4 @@ class RenderedOption(NamedTuple): lines: list[str] links: Optional[list[str]] = None -RenderFn = Callable[[Token, Sequence[Token], int, OptionsDict, MutableMapping[str, Any]], str] +RenderFn = Callable[[Token, Sequence[Token], int], str] From 068916ae8fccebf137ffe68b511bff26f1069ef8 Mon Sep 17 00:00:00 2001 From: pennae Date: Sun, 19 Feb 2023 15:43:11 +0100 Subject: [PATCH 008/154] nixos-render-docs: keep revision in renderer, not converter ultimately it's the renderer that needs it, for the options rendering that will be simplified in a bit. --- .../src/nixos_render_docs/manual.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/manual.py b/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/manual.py index 7ac82958a5da..78bf7659451b 100644 --- a/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/manual.py +++ b/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/manual.py @@ -16,10 +16,12 @@ from .md import Converter class ManualDocBookRenderer(DocBookRenderer): _toplevel_tag: str + _revision: str - def __init__(self, toplevel_tag: str, manpage_urls: Mapping[str, str]): + def __init__(self, toplevel_tag: str, revision: str, manpage_urls: Mapping[str, str]): super().__init__(manpage_urls) self._toplevel_tag = toplevel_tag + self._revision = revision self.rules |= { 'included_sections': lambda *args: self._included_thing("section", *args), 'included_chapters': lambda *args: self._included_thing("chapter", *args), @@ -87,7 +89,7 @@ class ManualDocBookRenderer(DocBookRenderer): self._headings[-1] = self._headings[-1]._replace(partintro_closed=True) # must nest properly for structural includes. this requires saving at least # the headings stack, but creating new renderers is cheap and much easier. - r = ManualDocBookRenderer(tag, self._manpage_urls) + r = ManualDocBookRenderer(tag, self._revision, self._manpage_urls) for (included, path) in token.meta['included']: try: result.append(r.render(included)) @@ -110,12 +112,10 @@ class ManualDocBookRenderer(DocBookRenderer): class DocBookConverter(Converter[ManualDocBookRenderer]): _base_paths: list[Path] - _revision: str def __init__(self, manpage_urls: Mapping[str, str], revision: str): super().__init__() - self._renderer = ManualDocBookRenderer('book', manpage_urls) - self._revision = revision + self._renderer = ManualDocBookRenderer('book', revision, manpage_urls) def convert(self, file: Path) -> str: self._base_paths = [ file ] @@ -182,7 +182,7 @@ class DocBookConverter(Converter[ManualDocBookRenderer]): try: conv = options.DocBookConverter( - self._renderer._manpage_urls, self._revision, False, 'fragment', varlist_id, id_prefix) + self._renderer._manpage_urls, self._renderer._revision, False, 'fragment', varlist_id, id_prefix) with open(self._base_paths[-1].parent / source, 'r') as f: conv.add_options(json.load(f)) token.meta['rendered-options'] = conv.finalize(fragment=True) From 5b8be28e66a31ba4683d0fc337f67a46d5db8f9a Mon Sep 17 00:00:00 2001 From: pennae Date: Sun, 19 Feb 2023 15:56:52 +0100 Subject: [PATCH 009/154] nixos-render-docs: don't render options during manual parsing we should really be rendering options at *rendering* time, not at parse time. currently this is just an academic exercise, but the html renderer will have to inspect the options.json data after the entire document has been parsed, but before anything gets rendered. --- .../src/nixos_render_docs/manual.py | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/manual.py b/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/manual.py index 78bf7659451b..780a5f38c32a 100644 --- a/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/manual.py +++ b/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/manual.py @@ -97,7 +97,10 @@ class ManualDocBookRenderer(DocBookRenderer): raise RuntimeError(f"rendering {path}") from e return "".join(result) def included_options(self, token: Token, tokens: Sequence[Token], i: int) -> str: - return cast(str, token.meta['rendered-options']) + conv = options.DocBookConverter(self._manpage_urls, self._revision, False, 'fragment', + token.meta['list-id'], token.meta['id-prefix']) + conv.add_options(token.meta['source']) + return conv.finalize(fragment=True) # TODO minimize docbook diffs with existing conversions. remove soon. def paragraph_open(self, token: Token, tokens: Sequence[Token], i: int) -> str: @@ -181,11 +184,10 @@ class DocBookConverter(Converter[ManualDocBookRenderer]): " ".join(items.keys())) try: - conv = options.DocBookConverter( - self._renderer._manpage_urls, self._renderer._revision, False, 'fragment', varlist_id, id_prefix) with open(self._base_paths[-1].parent / source, 'r') as f: - conv.add_options(json.load(f)) - token.meta['rendered-options'] = conv.finalize(fragment=True) + token.meta['id-prefix'] = id_prefix + token.meta['list-id'] = varlist_id + token.meta['source'] = json.load(f) except Exception as e: raise RuntimeError(f"processing options block in line {token.map[0] + 1}") from e From 2ab8e742a541659baa0470e3be8fc7e01ff51175 Mon Sep 17 00:00:00 2001 From: pennae Date: Wed, 15 Feb 2023 12:57:32 +0100 Subject: [PATCH 010/154] nixos-render-docs: move recursive manual parsing to base class the html renderer will need all of these functions as well. some extensions will be needed, but we'll add those as they become necessary. --- .../src/nixos_render_docs/manual.py | 153 +++++++++--------- 1 file changed, 77 insertions(+), 76 deletions(-) diff --git a/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/manual.py b/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/manual.py index 780a5f38c32a..2e83ac90b5b6 100644 --- a/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/manual.py +++ b/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/manual.py @@ -4,16 +4,90 @@ import json from abc import abstractmethod from collections.abc import Mapping, Sequence from pathlib import Path -from typing import Any, cast, NamedTuple, Optional, Union +from typing import Any, cast, Generic, NamedTuple, Optional, Union from xml.sax.saxutils import escape, quoteattr import markdown_it from markdown_it.token import Token -from . import options +from . import md, options from .docbook import DocBookRenderer, Heading from .md import Converter +class BaseConverter(Converter[md.TR], Generic[md.TR]): + _base_paths: list[Path] + + def convert(self, file: Path) -> str: + self._base_paths = [ file ] + try: + with open(file, 'r') as f: + return self._render(f.read()) + except Exception as e: + raise RuntimeError(f"failed to render manual {file}") from e + + def _parse(self, src: str) -> list[Token]: + tokens = super()._parse(src) + for token in tokens: + if token.type != "fence" or not token.info.startswith("{=include=} "): + continue + typ = token.info[12:].strip() + if typ == 'options': + token.type = 'included_options' + self._parse_options(token) + elif typ in [ 'sections', 'chapters', 'preface', 'parts', 'appendix' ]: + token.type = 'included_' + typ + self._parse_included_blocks(token) + else: + raise RuntimeError(f"unsupported structural include type '{typ}'") + return tokens + + def _parse_included_blocks(self, token: Token) -> None: + assert token.map + included = token.meta['included'] = [] + for (lnum, line) in enumerate(token.content.splitlines(), token.map[0] + 2): + line = line.strip() + path = self._base_paths[-1].parent / line + if path in self._base_paths: + raise RuntimeError(f"circular include found in line {lnum}") + try: + self._base_paths.append(path) + with open(path, 'r') as f: + tokens = self._parse(f.read()) + included.append((tokens, path)) + self._base_paths.pop() + except Exception as e: + raise RuntimeError(f"processing included file {path} from line {lnum}") from e + + def _parse_options(self, token: Token) -> None: + assert token.map + + items = {} + for (lnum, line) in enumerate(token.content.splitlines(), token.map[0] + 2): + if len(args := line.split(":", 1)) != 2: + raise RuntimeError(f"options directive with no argument in line {lnum}") + (k, v) = (args[0].strip(), args[1].strip()) + if k in items: + raise RuntimeError(f"duplicate options directive {k} in line {lnum}") + items[k] = v + try: + id_prefix = items.pop('id-prefix') + varlist_id = items.pop('list-id') + source = items.pop('source') + except KeyError as e: + raise RuntimeError(f"options directive {e} missing in block at line {token.map[0] + 1}") + if items.keys(): + raise RuntimeError( + f"unsupported options directives in block at line {token.map[0] + 1}", + " ".join(items.keys())) + + try: + with open(self._base_paths[-1].parent / source, 'r') as f: + token.meta['id-prefix'] = id_prefix + token.meta['list-id'] = varlist_id + token.meta['source'] = json.load(f) + except Exception as e: + raise RuntimeError(f"processing options block in line {token.map[0] + 1}") from e + class ManualDocBookRenderer(DocBookRenderer): _toplevel_tag: str _revision: str @@ -113,84 +187,11 @@ class ManualDocBookRenderer(DocBookRenderer): info = f" language={quoteattr(token.info)}" if token.info != "" else "" return f"\n{escape(token.content)}" -class DocBookConverter(Converter[ManualDocBookRenderer]): - _base_paths: list[Path] - +class DocBookConverter(BaseConverter[ManualDocBookRenderer]): def __init__(self, manpage_urls: Mapping[str, str], revision: str): super().__init__() self._renderer = ManualDocBookRenderer('book', revision, manpage_urls) - def convert(self, file: Path) -> str: - self._base_paths = [ file ] - try: - with open(file, 'r') as f: - return self._render(f.read()) - except Exception as e: - raise RuntimeError(f"failed to render manual {file}") from e - - def _parse(self, src: str) -> list[Token]: - tokens = super()._parse(src) - for token in tokens: - if token.type != "fence" or not token.info.startswith("{=include=} "): - continue - typ = token.info[12:].strip() - if typ == 'options': - token.type = 'included_options' - self._parse_options(token) - elif typ in [ 'sections', 'chapters', 'preface', 'parts', 'appendix' ]: - token.type = 'included_' + typ - self._parse_included_blocks(token) - else: - raise RuntimeError(f"unsupported structural include type '{typ}'") - return tokens - - def _parse_included_blocks(self, token: Token) -> None: - assert token.map - included = token.meta['included'] = [] - for (lnum, line) in enumerate(token.content.splitlines(), token.map[0] + 2): - line = line.strip() - path = self._base_paths[-1].parent / line - if path in self._base_paths: - raise RuntimeError(f"circular include found in line {lnum}") - try: - self._base_paths.append(path) - with open(path, 'r') as f: - tokens = self._parse(f.read()) - included.append((tokens, path)) - self._base_paths.pop() - except Exception as e: - raise RuntimeError(f"processing included file {path} from line {lnum}") from e - - def _parse_options(self, token: Token) -> None: - assert token.map - - items = {} - for (lnum, line) in enumerate(token.content.splitlines(), token.map[0] + 2): - if len(args := line.split(":", 1)) != 2: - raise RuntimeError(f"options directive with no argument in line {lnum}") - (k, v) = (args[0].strip(), args[1].strip()) - if k in items: - raise RuntimeError(f"duplicate options directive {k} in line {lnum}") - items[k] = v - try: - id_prefix = items.pop('id-prefix') - varlist_id = items.pop('list-id') - source = items.pop('source') - except KeyError as e: - raise RuntimeError(f"options directive {e} missing in block at line {token.map[0] + 1}") - if items.keys(): - raise RuntimeError( - f"unsupported options directives in block at line {token.map[0] + 1}", - " ".join(items.keys())) - - try: - with open(self._base_paths[-1].parent / source, 'r') as f: - token.meta['id-prefix'] = id_prefix - token.meta['list-id'] = varlist_id - token.meta['source'] = json.load(f) - except Exception as e: - raise RuntimeError(f"processing options block in line {token.map[0] + 1}") from e - def _build_cli_db(p: argparse.ArgumentParser) -> None: From a7c25bb01f2373eb1f113272731e1659bc91de95 Mon Sep 17 00:00:00 2001 From: pennae Date: Sun, 19 Feb 2023 01:33:36 +0100 Subject: [PATCH 011/154] nixos-render-docs: add Freezable class for most of our data classes we can use dataclasses.dataclass with frozen=True or even plain named tuples. the TOC structure we'll need to generate proper navigation links is most easily represented and used as a cyclic structure though, and for that we can use neither. if we want to make the TOC structures immutable (which seems like a good idea) we'll need a hack of *some* kind, and this hack seems like the least intrusive. --- .../src/nixos_render_docs/utils.py | 21 +++++++++++++++++++ 1 file changed, 21 insertions(+) create mode 100644 pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/utils.py diff --git a/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/utils.py b/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/utils.py new file mode 100644 index 000000000000..3377d1fa4fe1 --- /dev/null +++ b/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/utils.py @@ -0,0 +1,21 @@ +from typing import Any + +_frozen_classes: dict[type, type] = {} + +# make a derived class freezable (ie, disallow modifications). +# we do this by changing the class of an instance at runtime when freeze() +# is called, providing a derived class that is exactly the same except +# for a __setattr__ that raises an error when called. this beats having +# a field for frozenness and an unconditional __setattr__ that checks this +# field because it does not insert anything into the class dict. +class Freezeable: + def freeze(self) -> None: + cls = type(self) + if not (frozen := _frozen_classes.get(cls)): + def __setattr__(instance: Any, n: str, v: Any) -> None: + raise TypeError(f'{cls.__name__} is frozen') + frozen = type(cls.__name__, (cls,), { + '__setattr__': __setattr__, + }) + _frozen_classes[cls] = frozen + self.__class__ = frozen From 7b0824c0031dd65e1abd6d88ca537a90d4dfbe23 Mon Sep 17 00:00:00 2001 From: pennae Date: Sat, 18 Feb 2023 20:34:31 +0100 Subject: [PATCH 012/154] nixos-render-docs: check heading presence during parsing check that all required headings are present during parsing, not during rendering. building a correct TOC will need this since every TOC entry needs a heading to set its title, and every included substructure needs a title. also improve the error message on repeated title headings slightly, giving the end line turns out to not be very useful. --- .../src/nixos_render_docs/manual.py | 34 ++++++------------- .../src/nixos_render_docs/manual_structure.py | 29 ++++++++++++++++ 2 files changed, 40 insertions(+), 23 deletions(-) create mode 100644 pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/manual_structure.py diff --git a/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/manual.py b/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/manual.py index 2e83ac90b5b6..7e1923f35ec4 100644 --- a/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/manual.py +++ b/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/manual.py @@ -4,7 +4,7 @@ import json from abc import abstractmethod from collections.abc import Mapping, Sequence from pathlib import Path -from typing import Any, cast, Generic, NamedTuple, Optional, Union +from typing import Any, cast, Generic, get_args, NamedTuple, Optional, Union from xml.sax.saxutils import escape, quoteattr import markdown_it @@ -12,13 +12,16 @@ from markdown_it.token import Token from . import md, options from .docbook import DocBookRenderer, Heading +from .manual_structure import check_titles, FragmentType, TocEntryType from .md import Converter class BaseConverter(Converter[md.TR], Generic[md.TR]): _base_paths: list[Path] + _current_type: list[TocEntryType] def convert(self, file: Path) -> str: self._base_paths = [ file ] + self._current_type = ['book'] try: with open(file, 'r') as f: return self._render(f.read()) @@ -27,6 +30,7 @@ class BaseConverter(Converter[md.TR], Generic[md.TR]): def _parse(self, src: str) -> list[Token]: tokens = super()._parse(src) + check_titles(self._current_type[-1], tokens) for token in tokens: if token.type != "fence" or not token.info.startswith("{=include=} "): continue @@ -34,11 +38,14 @@ class BaseConverter(Converter[md.TR], Generic[md.TR]): if typ == 'options': token.type = 'included_options' self._parse_options(token) - elif typ in [ 'sections', 'chapters', 'preface', 'parts', 'appendix' ]: + else: + fragment_type = typ.removesuffix('s') + if fragment_type not in get_args(FragmentType): + raise RuntimeError(f"unsupported structural include type '{typ}'") + self._current_type.append(cast(FragmentType, fragment_type)) token.type = 'included_' + typ self._parse_included_blocks(token) - else: - raise RuntimeError(f"unsupported structural include type '{typ}'") + self._current_type.pop() return tokens def _parse_included_blocks(self, token: Token) -> None: @@ -106,25 +113,6 @@ class ManualDocBookRenderer(DocBookRenderer): } def render(self, tokens: Sequence[Token]) -> str: - wanted = { 'h1': 'title' } - wanted |= { 'h2': 'subtitle' } if self._toplevel_tag == 'book' else {} - for (i, (tag, kind)) in enumerate(wanted.items()): - if len(tokens) < 3 * (i + 1): - raise RuntimeError(f"missing {kind} ({tag}) heading") - token = tokens[3 * i] - if token.type != 'heading_open' or token.tag != tag: - assert token.map - raise RuntimeError(f"expected {kind} ({tag}) heading in line {token.map[0] + 1}", token) - for t in tokens[3 * len(wanted):]: - if t.type != 'heading_open' or (info := wanted.get(t.tag)) is None: - continue - assert t.map - raise RuntimeError( - f"only one {info[0]} heading ({t.markup} [text...]) allowed per " - f"{self._toplevel_tag}, but found a second in lines [{t.map[0] + 1}..{t.map[1]}]. " - "please remove all such headings except the first or demote the subsequent headings.", - t) - # books get special handling because they have *two* title tags. doing this with # generic code is more complicated than it's worth. the checks above have verified # that both titles actually exist. diff --git a/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/manual_structure.py b/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/manual_structure.py new file mode 100644 index 000000000000..d7cf449a417b --- /dev/null +++ b/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/manual_structure.py @@ -0,0 +1,29 @@ +from typing import Literal, Sequence + +from markdown_it.token import Token + +# FragmentType is used to restrict structural include blocks. +FragmentType = Literal['preface', 'part', 'chapter', 'section', 'appendix'] + +# in the TOC all fragments are allowed, plus the all-encompassing book. +TocEntryType = Literal['book', 'preface', 'part', 'chapter', 'section', 'appendix'] + +def check_titles(kind: TocEntryType, tokens: Sequence[Token]) -> None: + wanted = { 'h1': 'title' } + wanted |= { 'h2': 'subtitle' } if kind == 'book' else {} + for (i, (tag, role)) in enumerate(wanted.items()): + if len(tokens) < 3 * (i + 1): + raise RuntimeError(f"missing {role} ({tag}) heading") + token = tokens[3 * i] + if token.type != 'heading_open' or token.tag != tag: + assert token.map + raise RuntimeError(f"expected {role} ({tag}) heading in line {token.map[0] + 1}", token) + for t in tokens[3 * len(wanted):]: + if t.type != 'heading_open' or not (role := wanted.get(t.tag, '')): + continue + assert t.map + raise RuntimeError( + f"only one {role} heading ({t.markup} [text...]) allowed per " + f"{kind}, but found a second in line {t.map[0] + 1}. " + "please remove all such headings except the first or demote the subsequent headings.", + t) From ba201144605ed4ba83d165a37c3660ef2b49b193 Mon Sep 17 00:00:00 2001 From: pennae Date: Sat, 18 Feb 2023 20:36:29 +0100 Subject: [PATCH 013/154] nixos-render-docs: check heading continuity while not technically necessary for correct rendering of *contents* we do need to disallow heading levels being skipped to build a correct TOC. treating headings that have skipped a number of levels to actually be headings that many levels up only gets confusing, and inserting artifical intermediate headings suffers from problems, such as which ids to use and what to call them. --- .../src/nixos_render_docs/manual_structure.py | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/manual_structure.py b/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/manual_structure.py index d7cf449a417b..c6842db606f1 100644 --- a/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/manual_structure.py +++ b/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/manual_structure.py @@ -27,3 +27,14 @@ def check_titles(kind: TocEntryType, tokens: Sequence[Token]) -> None: f"{kind}, but found a second in line {t.map[0] + 1}. " "please remove all such headings except the first or demote the subsequent headings.", t) + + last_heading_level = 0 + for token in tokens: + if token.type != 'heading_open': + continue + level = int(token.tag[1:]) # because tag = h1..h6 + if level > last_heading_level + 1: + assert token.map + raise RuntimeError(f"heading in line {token.map[0] + 1} skips one or more heading levels, " + "which is currently not allowed") + last_heading_level = level From 163b667352e19411473fdf8603f0883c1b106d58 Mon Sep 17 00:00:00 2001 From: pennae Date: Sat, 18 Feb 2023 20:41:34 +0100 Subject: [PATCH 014/154] nixos-render-docs: require headings to have ids without this we cannot build a TOC to arbitrary depth without generating ids for headings, but generated ids are fragile and liable to either break or point to different things if the manual changes shape. we already have the convention that all headings should have an id, this formalizes it. --- .../src/nixos_render_docs/manual_structure.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/manual_structure.py b/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/manual_structure.py index c6842db606f1..32b6287b34ad 100644 --- a/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/manual_structure.py +++ b/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/manual_structure.py @@ -32,6 +32,14 @@ def check_titles(kind: TocEntryType, tokens: Sequence[Token]) -> None: for token in tokens: if token.type != 'heading_open': continue + + # book subtitle headings do not need an id, only book title headings do. + # every other headings needs one too. we need this to build a TOC and to + # provide stable links if the manual changes shape. + if 'id' not in token.attrs and (kind != 'book' or token.tag != 'h2'): + assert token.map + raise RuntimeError(f"heading in line {token.map[0] + 1} does not have an id") + level = int(token.tag[1:]) # because tag = h1..h6 if level > last_heading_level + 1: assert token.map From 768794d6c11b5e37f954405a3f03d63ef45897f6 Mon Sep 17 00:00:00 2001 From: pennae Date: Sat, 18 Feb 2023 20:48:12 +0100 Subject: [PATCH 015/154] nixos-render-docs: check book structure text content in the toplevel file of a book will not render properly. the first proper element will be a preface, part, or chapter anyway, and those require includes to produce. parts do not currently allow headings in the part file itself, but that's mainly a renderer limitation. we can add support for headings in part intros when we need them in all other cases includes must be followed by either another include, a heading, or end of file. text content could not be properly linked to from a TOC without a preceding heading. --- .../src/nixos_render_docs/manual.py | 6 +-- .../src/nixos_render_docs/manual_structure.py | 43 ++++++++++++++++++- 2 files changed, 45 insertions(+), 4 deletions(-) diff --git a/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/manual.py b/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/manual.py index 7e1923f35ec4..858ecad9c11a 100644 --- a/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/manual.py +++ b/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/manual.py @@ -12,7 +12,7 @@ from markdown_it.token import Token from . import md, options from .docbook import DocBookRenderer, Heading -from .manual_structure import check_titles, FragmentType, TocEntryType +from .manual_structure import check_structure, FragmentType, is_include, TocEntryType from .md import Converter class BaseConverter(Converter[md.TR], Generic[md.TR]): @@ -30,9 +30,9 @@ class BaseConverter(Converter[md.TR], Generic[md.TR]): def _parse(self, src: str) -> list[Token]: tokens = super()._parse(src) - check_titles(self._current_type[-1], tokens) + check_structure(self._current_type[-1], tokens) for token in tokens: - if token.type != "fence" or not token.info.startswith("{=include=} "): + if not is_include(token): continue typ = token.info[12:].strip() if typ == 'options': diff --git a/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/manual_structure.py b/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/manual_structure.py index 32b6287b34ad..93a8ecc3f935 100644 --- a/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/manual_structure.py +++ b/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/manual_structure.py @@ -8,7 +8,41 @@ FragmentType = Literal['preface', 'part', 'chapter', 'section', 'appendix'] # in the TOC all fragments are allowed, plus the all-encompassing book. TocEntryType = Literal['book', 'preface', 'part', 'chapter', 'section', 'appendix'] -def check_titles(kind: TocEntryType, tokens: Sequence[Token]) -> None: +def is_include(token: Token) -> bool: + return token.type == "fence" and token.info.startswith("{=include=} ") + +# toplevel file must contain only the title headings and includes, anything else +# would cause strange rendering. +def _check_book_structure(tokens: Sequence[Token]) -> None: + for token in tokens[6:]: + if not is_include(token): + assert token.map + raise RuntimeError(f"unexpected content in line {token.map[0] + 1}, " + "expected structural include") + +# much like books, parts may not contain headings other than their title heading. +# this is a limitation of the current renderers that do not handle this case well +# even though it is supported in docbook (and probably supportable anywhere else). +def _check_part_structure(tokens: Sequence[Token]) -> None: + _check_fragment_structure(tokens) + for token in tokens[3:]: + if token.type == 'heading_open': + assert token.map + raise RuntimeError(f"unexpected heading in line {token.map[0] + 1}") + +# two include blocks must either be adjacent or separated by a heading, otherwise +# we cannot generate a correct TOC (since there'd be nothing to link to between +# the two includes). +def _check_fragment_structure(tokens: Sequence[Token]) -> None: + for i, token in enumerate(tokens): + if is_include(token) \ + and i + 1 < len(tokens) \ + and not (is_include(tokens[i + 1]) or tokens[i + 1].type == 'heading_open'): + assert token.map + raise RuntimeError(f"unexpected content in line {token.map[0] + 1}, " + "expected heading or structural include") + +def check_structure(kind: TocEntryType, tokens: Sequence[Token]) -> None: wanted = { 'h1': 'title' } wanted |= { 'h2': 'subtitle' } if kind == 'book' else {} for (i, (tag, role)) in enumerate(wanted.items()): @@ -46,3 +80,10 @@ def check_titles(kind: TocEntryType, tokens: Sequence[Token]) -> None: raise RuntimeError(f"heading in line {token.map[0] + 1} skips one or more heading levels, " "which is currently not allowed") last_heading_level = level + + if kind == 'book': + _check_book_structure(tokens) + elif kind == 'part': + _check_part_structure(tokens) + else: + _check_fragment_structure(tokens) From 23dc31a9755cf34a2040fe574798c09d1c857df0 Mon Sep 17 00:00:00 2001 From: pennae Date: Sat, 18 Feb 2023 21:23:48 +0100 Subject: [PATCH 016/154] nixos-render-docs: allow for options in include blocks while docbook relies on external chunk-toc info to do chunking of the rendered manual we have nothing of the sort for html. there it seems easiest to add annotations to blocks to create new chunks. such annotations could be extended to docbook to create the chunk-toc instead of passing it in externally, but with docbook on the way out that seems like a waste of effort. --- .../src/nixos_render_docs/manual.py | 36 +++++++++++++++---- 1 file changed, 30 insertions(+), 6 deletions(-) diff --git a/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/manual.py b/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/manual.py index 858ecad9c11a..8d7bf4a102f3 100644 --- a/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/manual.py +++ b/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/manual.py @@ -4,7 +4,7 @@ import json from abc import abstractmethod from collections.abc import Mapping, Sequence from pathlib import Path -from typing import Any, cast, Generic, get_args, NamedTuple, Optional, Union +from typing import Any, cast, ClassVar, Generic, get_args, NamedTuple, Optional, Union from xml.sax.saxutils import escape, quoteattr import markdown_it @@ -16,6 +16,14 @@ from .manual_structure import check_structure, FragmentType, is_include, TocEntr from .md import Converter class BaseConverter(Converter[md.TR], Generic[md.TR]): + # per-converter configuration for ns:arg=value arguments to include blocks, following + # the include type. html converters need something like this to support chunking, or + # another external method like the chunktocs docbook uses (but block options seem like + # a much nicer of doing this). + INCLUDE_ARGS_NS: ClassVar[str] + INCLUDE_FRAGMENT_ALLOWED_ARGS: ClassVar[set[str]] = set() + INCLUDE_OPTIONS_ALLOWED_ARGS: ClassVar[set[str]] = set() + _base_paths: list[Path] _current_type: list[TocEntryType] @@ -34,21 +42,35 @@ class BaseConverter(Converter[md.TR], Generic[md.TR]): for token in tokens: if not is_include(token): continue - typ = token.info[12:].strip() + directive = token.info[12:].split() + if not directive: + continue + args = { k: v for k, _sep, v in map(lambda s: s.partition('='), directive[1:]) } + typ = directive[0] if typ == 'options': token.type = 'included_options' - self._parse_options(token) + self._process_include_args(token, args, self.INCLUDE_OPTIONS_ALLOWED_ARGS) + self._parse_options(token, args) else: fragment_type = typ.removesuffix('s') if fragment_type not in get_args(FragmentType): raise RuntimeError(f"unsupported structural include type '{typ}'") self._current_type.append(cast(FragmentType, fragment_type)) token.type = 'included_' + typ - self._parse_included_blocks(token) + self._process_include_args(token, args, self.INCLUDE_FRAGMENT_ALLOWED_ARGS) + self._parse_included_blocks(token, args) self._current_type.pop() return tokens - def _parse_included_blocks(self, token: Token) -> None: + def _process_include_args(self, token: Token, args: dict[str, str], allowed: set[str]) -> None: + ns = self.INCLUDE_ARGS_NS + ":" + args = { k[len(ns):]: v for k, v in args.items() if k.startswith(ns) } + if unknown := set(args.keys()) - allowed: + assert token.map + raise RuntimeError(f"unrecognized include argument in line {token.map[0] + 1}", unknown) + token.meta['include-args'] = args + + def _parse_included_blocks(self, token: Token, block_args: dict[str, str]) -> None: assert token.map included = token.meta['included'] = [] for (lnum, line) in enumerate(token.content.splitlines(), token.map[0] + 2): @@ -65,7 +87,7 @@ class BaseConverter(Converter[md.TR], Generic[md.TR]): except Exception as e: raise RuntimeError(f"processing included file {path} from line {lnum}") from e - def _parse_options(self, token: Token) -> None: + def _parse_options(self, token: Token, block_args: dict[str, str]) -> None: assert token.map items = {} @@ -176,6 +198,8 @@ class ManualDocBookRenderer(DocBookRenderer): return f"\n{escape(token.content)}" class DocBookConverter(BaseConverter[ManualDocBookRenderer]): + INCLUDE_ARGS_NS = "docbook" + def __init__(self, manpage_urls: Mapping[str, str], revision: str): super().__init__() self._renderer = ManualDocBookRenderer('book', revision, manpage_urls) From 7a74ce51a1643aa6e83c912203bb6c3a987376b9 Mon Sep 17 00:00:00 2001 From: pennae Date: Sun, 19 Feb 2023 19:19:13 +0100 Subject: [PATCH 017/154] nixos-render-docs: add toc generator the docbook toolchain uses docbook-xsl to generate its TOC, our html renderer will have to do this on its own. this generator uses a very straight-forward algorithm of only inspecting headings, but anything else could be inspected as well. (examples come to mind, but those do not have titles and would thus make for bad toc entries) we also use path information (that will be taken from include block args in the html renderer) to produce navigation information. the algorithm we use mirrors what docbook does, linking to the next/previous files in depth-first toc order. toc entries are linked to the tokens they refer to for easy use later. --- .../src/nixos_render_docs/manual_structure.py | 103 +++++++++++++++++- 1 file changed, 100 insertions(+), 3 deletions(-) diff --git a/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/manual_structure.py b/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/manual_structure.py index 93a8ecc3f935..c271ca3c5aa5 100644 --- a/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/manual_structure.py +++ b/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/manual_structure.py @@ -1,7 +1,15 @@ -from typing import Literal, Sequence +from __future__ import annotations + +import dataclasses as dc +import html +import itertools + +from typing import cast, get_args, Iterable, Literal, Sequence from markdown_it.token import Token +from .utils import Freezeable + # FragmentType is used to restrict structural include blocks. FragmentType = Literal['preface', 'part', 'chapter', 'section', 'appendix'] @@ -21,8 +29,9 @@ def _check_book_structure(tokens: Sequence[Token]) -> None: "expected structural include") # much like books, parts may not contain headings other than their title heading. -# this is a limitation of the current renderers that do not handle this case well -# even though it is supported in docbook (and probably supportable anywhere else). +# this is a limitation of the current renderers and TOC generators that do not handle +# this case well even though it is supported in docbook (and probably supportable +# anywhere else). def _check_part_structure(tokens: Sequence[Token]) -> None: _check_fragment_structure(tokens) for token in tokens[3:]: @@ -87,3 +96,91 @@ def check_structure(kind: TocEntryType, tokens: Sequence[Token]) -> None: _check_part_structure(tokens) else: _check_fragment_structure(tokens) + +@dc.dataclass(frozen=True) +class XrefTarget: + id: str + """link label for `[](#local-references)`""" + title_html: str + """toc label""" + toc_html: str | None + """text for `` tags and `title="..."` attributes""" + title: str | None + """path to file that contains the anchor""" + path: str + """whether to drop the `#anchor` from links when expanding xrefs""" + drop_fragment: bool = False + + def href(self) -> str: + path = html.escape(self.path, True) + return path if self.drop_fragment else f"{path}#{html.escape(self.id, True)}" + +@dc.dataclass +class TocEntry(Freezeable): + kind: TocEntryType + target: XrefTarget + parent: TocEntry | None = None + prev: TocEntry | None = None + next: TocEntry | None = None + children: list[TocEntry] = dc.field(default_factory=list) + starts_new_chunk: bool = False + + @property + def root(self) -> TocEntry: + return self.parent.root if self.parent else self + + @classmethod + def of(cls, token: Token) -> TocEntry: + entry = token.meta.get('TocEntry') + if not isinstance(entry, TocEntry): + raise RuntimeError('requested toc entry, none found', token) + return entry + + @classmethod + def collect_and_link(cls, xrefs: dict[str, XrefTarget], tokens: Sequence[Token]) -> TocEntry: + result = cls._collect_entries(xrefs, tokens, 'book') + + def flatten_with_parent(this: TocEntry, parent: TocEntry | None) -> Iterable[TocEntry]: + this.parent = parent + return itertools.chain([this], *[ flatten_with_parent(c, this) for c in this.children ]) + + flat = list(flatten_with_parent(result, None)) + prev = flat[0] + prev.starts_new_chunk = True + paths_seen = set([prev.target.path]) + for c in flat[1:]: + if prev.target.path != c.target.path and c.target.path not in paths_seen: + c.starts_new_chunk = True + c.prev, prev.next = prev, c + prev = c + paths_seen.add(c.target.path) + + for c in flat: + c.freeze() + + return result + + @classmethod + def _collect_entries(cls, xrefs: dict[str, XrefTarget], tokens: Sequence[Token], + kind: TocEntryType) -> TocEntry: + # we assume that check_structure has been run recursively over the entire input. + # list contains (tag, entry) pairs that will collapse to a single entry for + # the full sequence. + entries: list[tuple[str, TocEntry]] = [] + for token in tokens: + if token.type.startswith('included_') and (included := token.meta.get('included')): + fragment_type_str = token.type[9:].removesuffix('s') + assert fragment_type_str in get_args(TocEntryType) + fragment_type = cast(TocEntryType, fragment_type_str) + for fragment, _path in included: + entries[-1][1].children.append(cls._collect_entries(xrefs, fragment, fragment_type)) + elif token.type == 'heading_open' and (id := cast(str, token.attrs.get('id', ''))): + while len(entries) > 1 and entries[-1][0] >= token.tag: + entries[-2][1].children.append(entries.pop()[1]) + entries.append((token.tag, + TocEntry(kind if token.tag == 'h1' else 'section', xrefs[id]))) + token.meta['TocEntry'] = entries[-1][1] + + while len(entries) > 1: + entries[-2][1].children.append(entries.pop()[1]) + return entries[0][1] From 82e62614e9171981a3972940b8e96e02c7e55f83 Mon Sep 17 00:00:00 2001 From: pennae <github@quasiparticle.net> Date: Sun, 19 Feb 2023 22:53:21 +0100 Subject: [PATCH 018/154] nixos-render-docs: add html renderer the basic html renderer. it doesn't have all the docbook compatibility codes embedded into it, but there is a good amount. this renderer is unaware of manual structure and does not traverse structural include tokens (if it finds any it'll just fail), that task falls to derived classes. once we have more uses for structural includes than just the manual we may revisit this decision. --- .../src/nixos_render_docs/__init__.py | 1 + .../src/nixos_render_docs/html.py | 245 ++++++++++++++++++ .../nixos-render-docs/src/tests/test_html.py | 179 +++++++++++++ 3 files changed, 425 insertions(+) create mode 100644 pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/html.py create mode 100644 pkgs/tools/nix/nixos-render-docs/src/tests/test_html.py diff --git a/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/__init__.py b/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/__init__.py index 1c58accb4166..7f7463e5c837 100644 --- a/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/__init__.py +++ b/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/__init__.py @@ -8,6 +8,7 @@ from pprint import pprint from typing import Any, Dict from .md import Converter +from . import html from . import manual from . import options from . import parallel diff --git a/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/html.py b/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/html.py new file mode 100644 index 000000000000..39d2da6adf8c --- /dev/null +++ b/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/html.py @@ -0,0 +1,245 @@ +from collections.abc import Mapping, Sequence +from typing import cast, Optional, NamedTuple + +from html import escape +from markdown_it.token import Token + +from .manual_structure import XrefTarget +from .md import Renderer + +class UnresolvedXrefError(Exception): + pass + +class Heading(NamedTuple): + container_tag: str + level: int + html_tag: str + # special handling for part content: whether partinfo div was already closed from + # elsewhere or still needs closing. + partintro_closed: bool + # tocs are generated when the heading opens, but have to be emitted into the file + # after the heading titlepage (and maybe partinfo) has been closed. + toc_fragment: str + +_bullet_list_styles = [ 'disc', 'circle', 'square' ] +_ordered_list_styles = [ '1', 'a', 'i', 'A', 'I' ] + +class HTMLRenderer(Renderer): + _xref_targets: Mapping[str, XrefTarget] + + _headings: list[Heading] + _attrspans: list[str] + _hlevel_offset: int = 0 + _bullet_list_nesting: int = 0 + _ordered_list_nesting: int = 0 + + def __init__(self, manpage_urls: Mapping[str, str], xref_targets: Mapping[str, XrefTarget]): + super().__init__(manpage_urls) + self._headings = [] + self._attrspans = [] + self._xref_targets = xref_targets + + def render(self, tokens: Sequence[Token]) -> str: + result = super().render(tokens) + result += self._close_headings(None) + return result + + def text(self, token: Token, tokens: Sequence[Token], i: int) -> str: + return escape(token.content) + def paragraph_open(self, token: Token, tokens: Sequence[Token], i: int) -> str: + return "<p>" + def paragraph_close(self, token: Token, tokens: Sequence[Token], i: int) -> str: + return "</p>" + def hardbreak(self, token: Token, tokens: Sequence[Token], i: int) -> str: + return "<br />" + def softbreak(self, token: Token, tokens: Sequence[Token], i: int) -> str: + return "\n" + def code_inline(self, token: Token, tokens: Sequence[Token], i: int) -> str: + return f'<code class="literal">{escape(token.content)}</code>' + def code_block(self, token: Token, tokens: Sequence[Token], i: int) -> str: + return self.fence(token, tokens, i) + def link_open(self, token: Token, tokens: Sequence[Token], i: int) -> str: + href = escape(cast(str, token.attrs['href']), True) + tag, title, target, text = "link", "", 'target="_top"', "" + if href.startswith('#'): + if not (xref := self._xref_targets.get(href[1:])): + raise UnresolvedXrefError(f"bad local reference, id {href} not known") + if tokens[i + 1].type == 'link_close': + tag, text = "xref", xref.title_html + if xref.title: + title = f'title="{escape(xref.title, True)}"' + target, href = "", xref.href() + return f'<a class="{tag}" href="{href}" {title} {target}>{text}' + def link_close(self, token: Token, tokens: Sequence[Token], i: int) -> str: + return "</a>" + def list_item_open(self, token: Token, tokens: Sequence[Token], i: int) -> str: + return '<li class="listitem">' + def list_item_close(self, token: Token, tokens: Sequence[Token], i: int) -> str: + return "</li>" + def bullet_list_open(self, token: Token, tokens: Sequence[Token], i: int) -> str: + extra = 'compact' if token.meta.get('compact', False) else '' + style = _bullet_list_styles[self._bullet_list_nesting % len(_bullet_list_styles)] + self._bullet_list_nesting += 1 + return f'<div class="itemizedlist"><ul class="itemizedlist {extra}" style="list-style-type: {style};">' + def bullet_list_close(self, token: Token, tokens: Sequence[Token], i: int) -> str: + self._bullet_list_nesting -= 1 + return "</ul></div>" + def em_open(self, token: Token, tokens: Sequence[Token], i: int) -> str: + return '<span class="emphasis"><em>' + def em_close(self, token: Token, tokens: Sequence[Token], i: int) -> str: + return "</em></span>" + def strong_open(self, token: Token, tokens: Sequence[Token], i: int) -> str: + return '<span class="strong"><strong>' + def strong_close(self, token: Token, tokens: Sequence[Token], i: int) -> str: + return "</strong></span>" + def fence(self, token: Token, tokens: Sequence[Token], i: int) -> str: + # TODO use token.info. docbook doesn't so we can't yet. + return f'<pre class="programlisting">\n{escape(token.content)}</pre>' + def blockquote_open(self, token: Token, tokens: Sequence[Token], i: int) -> str: + return '<div class="blockquote"><blockquote class="blockquote">' + def blockquote_close(self, token: Token, tokens: Sequence[Token], i: int) -> str: + return "</blockquote></div>" + def note_open(self, token: Token, tokens: Sequence[Token], i: int) -> str: + return '<div class="note"><h3 class="title">Note</h3>' + def note_close(self, token: Token, tokens: Sequence[Token], i: int) -> str: + return "</div>" + def caution_open(self, token: Token, tokens: Sequence[Token], i: int) -> str: + return '<div class="caution"><h3 class="title">Caution</h3>' + def caution_close(self, token: Token, tokens: Sequence[Token], i: int) -> str: + return "</div>" + def important_open(self, token: Token, tokens: Sequence[Token], i: int) -> str: + return '<div class="important"><h3 class="title">Important</h3>' + def important_close(self, token: Token, tokens: Sequence[Token], i: int) -> str: + return "</div>" + def tip_open(self, token: Token, tokens: Sequence[Token], i: int) -> str: + return '<div class="tip"><h3 class="title">Tip</h3>' + def tip_close(self, token: Token, tokens: Sequence[Token], i: int) -> str: + return "</div>" + def warning_open(self, token: Token, tokens: Sequence[Token], i: int) -> str: + return '<div class="warning"><h3 class="title">Warning</h3>' + def warning_close(self, token: Token, tokens: Sequence[Token], i: int) -> str: + return "</div>" + def dl_open(self, token: Token, tokens: Sequence[Token], i: int) -> str: + return '<div class="variablelist"><dl class="variablelist">' + def dl_close(self, token: Token, tokens: Sequence[Token], i: int) -> str: + return "</dl></div>" + def dt_open(self, token: Token, tokens: Sequence[Token], i: int) -> str: + return '<dt><span class="term">' + def dt_close(self, token: Token, tokens: Sequence[Token], i: int) -> str: + return "</span></dt>" + def dd_open(self, token: Token, tokens: Sequence[Token], i: int) -> str: + return "<dd>" + def dd_close(self, token: Token, tokens: Sequence[Token], i: int) -> str: + return "</dd>" + def myst_role(self, token: Token, tokens: Sequence[Token], i: int) -> str: + if token.meta['name'] == 'command': + return f'<span class="command"><strong>{escape(token.content)}</strong></span>' + if token.meta['name'] == 'file': + return f'<code class="filename">{escape(token.content)}</code>' + if token.meta['name'] == 'var': + return f'<code class="varname">{escape(token.content)}</code>' + if token.meta['name'] == 'env': + return f'<code class="envar">{escape(token.content)}</code>' + if token.meta['name'] == 'option': + return f'<code class="option">{escape(token.content)}</code>' + if token.meta['name'] == 'manpage': + [page, section] = [ s.strip() for s in token.content.rsplit('(', 1) ] + section = section[:-1] + man = f"{page}({section})" + title = f'<span class="refentrytitle">{escape(page)}</span>' + vol = f"({escape(section)})" + ref = f'<span class="citerefentry">{title}{vol}</span>' + if man in self._manpage_urls: + return f'<a class="link" href="{escape(self._manpage_urls[man], True)}" target="_top">{ref}</a>' + else: + return ref + return super().myst_role(token, tokens, i) + def attr_span_begin(self, token: Token, tokens: Sequence[Token], i: int) -> str: + # we currently support *only* inline anchors and the special .keycap class to produce + # keycap-styled spans. + (id_part, class_part) = ("", "") + if s := token.attrs.get('id'): + id_part = f'<a id="{escape(cast(str, s), True)}" />' + if s := token.attrs.get('class'): + if s == 'keycap': + class_part = '<span class="keycap"><strong>' + self._attrspans.append("</strong></span>") + else: + return super().attr_span_begin(token, tokens, i) + else: + self._attrspans.append("") + return id_part + class_part + def attr_span_end(self, token: Token, tokens: Sequence[Token], i: int) -> str: + return self._attrspans.pop() + def heading_open(self, token: Token, tokens: Sequence[Token], i: int) -> str: + hlevel = int(token.tag[1:]) + htag, hstyle = self._make_hN(hlevel) + if hstyle: + hstyle = f'style="{escape(hstyle, True)}"' + if anchor := cast(str, token.attrs.get('id', '')): + anchor = f'<a id="{escape(anchor, True)}"></a>' + result = self._close_headings(hlevel) + tag = self._heading_tag(token, tokens, i) + toc_fragment = self._build_toc(tokens, i) + self._headings.append(Heading(tag, hlevel, htag, tag != 'part', toc_fragment)) + return ( + f'{result}' + f'<div class="{tag}">' + f' <div class="titlepage">' + f' <div>' + f' <div>' + f' <{htag} class="title" {hstyle}>' + f' {anchor}' + ) + def heading_close(self, token: Token, tokens: Sequence[Token], i: int) -> str: + heading = self._headings[-1] + result = ( + f' </{heading.html_tag}>' + f' </div>' + f' </div>' + f'</div>' + ) + if heading.container_tag == 'part': + result += '<div class="partintro">' + else: + result += heading.toc_fragment + return result + def ordered_list_open(self, token: Token, tokens: Sequence[Token], i: int) -> str: + extra = 'compact' if token.meta.get('compact', False) else '' + start = f'start="{token.attrs["start"]}"' if 'start' in token.attrs else "" + style = _ordered_list_styles[self._ordered_list_nesting % len(_ordered_list_styles)] + self._ordered_list_nesting += 1 + return f'<div class="orderedlist"><ol class="orderedlist {extra}" {start} type="{style}">' + def ordered_list_close(self, token: Token, tokens: Sequence[Token], i: int) -> str: + self._ordered_list_nesting -= 1; + return "</ol></div>" + def example_open(self, token: Token, tokens: Sequence[Token], i: int) -> str: + if id := token.attrs.get('id'): + return f'<a id="{escape(cast(str, id), True)}" />' + return "" + def example_close(self, token: Token, tokens: Sequence[Token], i: int) -> str: + return "" + + def _make_hN(self, level: int) -> tuple[str, str]: + return f"h{min(6, max(1, level + self._hlevel_offset))}", "" + + def _maybe_close_partintro(self) -> str: + if self._headings: + heading = self._headings[-1] + if heading.container_tag == 'part' and not heading.partintro_closed: + self._headings[-1] = heading._replace(partintro_closed=True) + return heading.toc_fragment + "</div>" + return "" + + def _close_headings(self, level: Optional[int]) -> str: + result = [] + while len(self._headings) and (level is None or self._headings[-1].level >= level): + result.append(self._maybe_close_partintro()) + result.append("</div>") + self._headings.pop() + return "\n".join(result) + + def _heading_tag(self, token: Token, tokens: Sequence[Token], i: int) -> str: + return "section" + def _build_toc(self, tokens: Sequence[Token], i: int) -> str: + return "" diff --git a/pkgs/tools/nix/nixos-render-docs/src/tests/test_html.py b/pkgs/tools/nix/nixos-render-docs/src/tests/test_html.py new file mode 100644 index 000000000000..df366a8babd7 --- /dev/null +++ b/pkgs/tools/nix/nixos-render-docs/src/tests/test_html.py @@ -0,0 +1,179 @@ +import nixos_render_docs as nrd +import pytest + +from sample_md import sample1 + +class Converter(nrd.md.Converter[nrd.html.HTMLRenderer]): + def __init__(self, manpage_urls: dict[str, str], xrefs: dict[str, nrd.manual_structure.XrefTarget]): + super().__init__() + self._renderer = nrd.html.HTMLRenderer(manpage_urls, xrefs) + +def unpretty(s: str) -> str: + return "".join(map(str.strip, s.splitlines())).replace('␣', ' ').replace('↵', '\n') + +def test_lists_styles() -> None: + # nested lists rotate through a number of list style + c = Converter({}, {}) + assert c._render("- - - - foo") == unpretty(""" + <div class="itemizedlist"><ul class="itemizedlist compact" style="list-style-type: disc;"> + <li class="listitem"> + <div class="itemizedlist"><ul class="itemizedlist compact" style="list-style-type: circle;"> + <li class="listitem"> + <div class="itemizedlist"><ul class="itemizedlist compact" style="list-style-type: square;"> + <li class="listitem"> + <div class="itemizedlist"><ul class="itemizedlist compact" style="list-style-type: disc;"> + <li class="listitem"><p>foo</p></li> + </ul></div> + </li> + </ul></div> + </li> + </ul></div> + </li> + </ul></div> + """) + assert c._render("1. 1. 1. 1. 1. 1. foo") == unpretty(""" + <div class="orderedlist"><ol class="orderedlist compact" type="1"> + <li class="listitem"> + <div class="orderedlist"><ol class="orderedlist compact" type="a"> + <li class="listitem"> + <div class="orderedlist"><ol class="orderedlist compact" type="i"> + <li class="listitem"> + <div class="orderedlist"><ol class="orderedlist compact" type="A"> + <li class="listitem"> + <div class="orderedlist"><ol class="orderedlist compact" type="I"> + <li class="listitem"> + <div class="orderedlist"><ol class="orderedlist compact" type="1"> + <li class="listitem"><p>foo</p></li> + </ol></div> + </li> + </ol></div> + </li> + </ol></div> + </li> + </ol></div> + </li> + </ol></div> + </li> + </ol></div> + """) + +def test_xrefs() -> None: + # nested lists rotate through a number of list style + c = Converter({}, { + 'foo': nrd.manual_structure.XrefTarget('foo', '<hr/>', 'toc1', 'title1', 'index.html'), + 'bar': nrd.manual_structure.XrefTarget('bar', '<br/>', 'toc2', 'title2', 'index.html', True), + }) + assert c._render("[](#foo)") == '<p><a class="xref" href="index.html#foo" title="title1" ><hr/></a></p>' + assert c._render("[](#bar)") == '<p><a class="xref" href="index.html" title="title2" ><br/></a></p>' + with pytest.raises(nrd.html.UnresolvedXrefError) as exc: + c._render("[](#baz)") + assert exc.value.args[0] == 'bad local reference, id #baz not known' + +def test_full() -> None: + c = Converter({ 'man(1)': 'http://example.org' }, {}) + assert c._render(sample1) == unpretty(""" + <div class="warning"> + <h3 class="title">Warning</h3> + <p>foo</p> + <div class="note"> + <h3 class="title">Note</h3> + <p>nested</p> + </div> + </div> + <p> + <a class="link" href="link" target="_top">↵ + multiline↵ + </a> + </p> + <p> + <a class="link" href="http://example.org" target="_top"> + <span class="citerefentry"><span class="refentrytitle">man</span>(1)</span> + </a> reference + </p> + <p><a id="b" />some <a id="a" />nested anchors</p> + <p> + <span class="emphasis"><em>emph</em></span>␣ + <span class="strong"><strong>strong</strong></span>␣ + <span class="emphasis"><em>nesting emph <span class="strong"><strong>and strong</strong></span>␣ + and <code class="literal">code</code></em></span> + </p> + <div class="itemizedlist"> + <ul class="itemizedlist " style="list-style-type: disc;"> + <li class="listitem"><p>wide bullet</p></li> + <li class="listitem"><p>list</p></li> + </ul> + </div> + <div class="orderedlist"> + <ol class="orderedlist " type="1"> + <li class="listitem"><p>wide ordered</p></li> + <li class="listitem"><p>list</p></li> + </ol> + </div> + <div class="itemizedlist"> + <ul class="itemizedlist compact" style="list-style-type: disc;"> + <li class="listitem"><p>narrow bullet</p></li> + <li class="listitem"><p>list</p></li> + </ul> + </div> + <div class="orderedlist"> + <ol class="orderedlist compact" type="1"> + <li class="listitem"><p>narrow ordered</p></li> + <li class="listitem"><p>list</p></li> + </ol> + </div> + <div class="blockquote"> + <blockquote class="blockquote"> + <p>quotes</p> + <div class="blockquote"> + <blockquote class="blockquote"> + <p>with <span class="emphasis"><em>nesting</em></span></p> + <pre class="programlisting">↵ + nested code block↵ + </pre> + </blockquote> + </div> + <div class="itemizedlist"> + <ul class="itemizedlist compact" style="list-style-type: disc;"> + <li class="listitem"><p>and lists</p></li> + <li class="listitem"> + <pre class="programlisting">↵ + containing code↵ + </pre> + </li> + </ul> + </div> + <p>and more quote</p> + </blockquote> + </div> + <div class="orderedlist"> + <ol class="orderedlist compact" start="100" type="1"> + <li class="listitem"><p>list starting at 100</p></li> + <li class="listitem"><p>goes on</p></li> + </ol> + </div> + <div class="variablelist"> + <dl class="variablelist"> + <dt><span class="term">deflist</span></dt> + <dd> + <div class="blockquote"> + <blockquote class="blockquote"> + <p> + with a quote↵ + and stuff + </p> + </blockquote> + </div> + <pre class="programlisting">↵ + code block↵ + </pre> + <pre class="programlisting">↵ + fenced block↵ + </pre> + <p>text</p> + </dd> + <dt><span class="term">more stuff in same deflist</span></dt> + <dd> + <p>foo</p> + </dd> + </dl> + </div>""") From d520d55dee017fff8b3d0682260e12a27035378e Mon Sep 17 00:00:00 2001 From: pennae <github@quasiparticle.net> Date: Sun, 19 Feb 2023 23:03:32 +0100 Subject: [PATCH 019/154] nixos-render-docs: add options html renderer it's not hooked up to anything yet, but that will come soon. there's a bit of docbook compat here that must be interoperable with the actual docbook exporter, but luckily it's not all that much. --- .../src/nixos_render_docs/options.py | 116 +++++++++++++++++- 1 file changed, 111 insertions(+), 5 deletions(-) diff --git a/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/options.py b/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/options.py index 88c6d7443318..06e5f9711216 100644 --- a/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/options.py +++ b/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/options.py @@ -1,14 +1,15 @@ from __future__ import annotations import argparse +import html import json +import xml.sax.saxutils as xml from abc import abstractmethod from collections.abc import Mapping, Sequence from markdown_it.token import Token from typing import Any, Generic, Optional from urllib.parse import quote -from xml.sax.saxutils import escape, quoteattr import markdown_it @@ -17,7 +18,9 @@ from . import parallel from .asciidoc import AsciiDocRenderer, asciidoc_escape from .commonmark import CommonMarkRenderer from .docbook import DocBookRenderer, make_xml_id +from .html import HTMLRenderer from .manpage import ManpageRenderer, man_escape +from .manual_structure import XrefTarget from .md import Converter, md_escape, md_make_code from .types import OptionLoc, Option, RenderedOption @@ -240,10 +243,10 @@ class DocBookConverter(BaseConverter[OptionsDocBookRenderer]): def _decl_def_entry(self, href: Optional[str], name: str) -> list[str]: if href is not None: - href = " xlink:href=" + quoteattr(href) + href = " xlink:href=" + xml.quoteattr(href) return [ f"<member><filename{href}>", - escape(name), + xml.escape(name), "</filename></member>" ] @@ -273,8 +276,8 @@ class DocBookConverter(BaseConverter[OptionsDocBookRenderer]): result += [ "<varlistentry>", # NOTE adding extra spaces here introduces spaces into xref link expansions - (f"<term xlink:href={quoteattr('#' + id)} xml:id={quoteattr(id)}>" + - f"<option>{escape(name)}</option></term>"), + (f"<term xlink:href={xml.quoteattr('#' + id)} xml:id={xml.quoteattr(id)}>" + + f"<option>{xml.escape(name)}</option></term>"), "<listitem>" ] result += opt.lines @@ -524,6 +527,109 @@ class AsciiDocConverter(BaseConverter[OptionsAsciiDocRenderer]): return "\n".join(result) +class OptionsHTMLRenderer(OptionDocsRestrictions, HTMLRenderer): + # TODO docbook compat. must be removed together with the matching docbook handlers. + def ordered_list_open(self, token: Token, tokens: Sequence[Token], i: int) -> str: + token.meta['compact'] = False + return super().ordered_list_open(token, tokens, i) + def bullet_list_open(self, token: Token, tokens: Sequence[Token], i: int) -> str: + token.meta['compact'] = False + return super().bullet_list_open(token, tokens, i) + def fence(self, token: Token, tokens: Sequence[Token], i: int) -> str: + # TODO use token.info. docbook doesn't so we can't yet. + return f'<pre class="programlisting">{html.escape(token.content)}</pre>' + +class HTMLConverter(BaseConverter[OptionsHTMLRenderer]): + __option_block_separator__ = "" + + def __init__(self, manpage_urls: Mapping[str, str], revision: str, markdown_by_default: bool, + varlist_id: str, id_prefix: str, xref_targets: Mapping[str, XrefTarget]): + super().__init__(revision, markdown_by_default) + self._xref_targets = xref_targets + self._varlist_id = varlist_id + self._id_prefix = id_prefix + self._renderer = OptionsHTMLRenderer(manpage_urls, self._xref_targets) + + def _parallel_render_prepare(self) -> Any: + return (self._renderer._manpage_urls, self._revision, self._markdown_by_default, + self._varlist_id, self._id_prefix, self._xref_targets) + @classmethod + def _parallel_render_init_worker(cls, a: Any) -> HTMLConverter: + return cls(*a) + + def _render_code(self, option: dict[str, Any], key: str) -> list[str]: + if lit := option_is(option, key, 'literalDocBook'): + raise RuntimeError("can't render html in the presence of docbook") + else: + return super()._render_code(option, key) + + def _render_description(self, desc: str | dict[str, Any]) -> list[str]: + if isinstance(desc, str) and not self._markdown_by_default: + raise RuntimeError("can't render html in the presence of docbook") + else: + return super()._render_description(desc) + + def _related_packages_header(self) -> list[str]: + return [ + '<p><span class="emphasis"><em>Related packages:</em></span></p>', + ] + + def _decl_def_header(self, header: str) -> list[str]: + return [ + f'<p><span class="emphasis"><em>{header}:</em></span></p>', + '<table border="0" summary="Simple list" class="simplelist">' + ] + + def _decl_def_entry(self, href: Optional[str], name: str) -> list[str]: + if href is not None: + href = f' href="{html.escape(href, True)}"' + return [ + "<tr><td>", + f'<code class="filename"><a class="filename" {href} target="_top">', + f'{html.escape(name)}', + '</a></code>', + "</td></tr>" + ] + + def _decl_def_footer(self) -> list[str]: + return [ "</table>" ] + + def finalize(self) -> str: + result = [] + + result += [ + '<div class="variablelist">', + f'<a id="{html.escape(self._varlist_id, True)}"></a>', + ' <dl class="variablelist">', + ] + + for (name, opt) in self._sorted_options(): + id = make_xml_id(self._id_prefix + name) + target = self._xref_targets[id] + result += [ + '<dt>', + ' <span class="term">', + # docbook compat, these could be one tag + f' <a id="{html.escape(id, True)}"></a><a class="term" href="{target.href()}">' + # no spaces here (and string merging) for docbook output compat + f'<code class="option">{html.escape(name)}</code>', + ' </a>', + ' </span>', + '</dt>', + '<dd>', + ] + result += opt.lines + result += [ + "</dd>", + ] + + result += [ + " </dl>", + "</div>" + ] + + return "\n".join(result) + def _build_cli_db(p: argparse.ArgumentParser) -> None: p.add_argument('--manpage-urls', required=True) p.add_argument('--revision', required=True) From feaa97e5dcbb889404442ebe952357ef927d17ca Mon Sep 17 00:00:00 2001 From: pennae <github@quasiparticle.net> Date: Sat, 18 Feb 2023 21:10:42 +0100 Subject: [PATCH 020/154] nixos-render-docs: render directly from file to file this will be necessary for html since there we have to do chunking into multiple files ourselves. writing one file from the caller of the converter and all others from within the converter is unnecessarily spread out, and returning a dict of file names and their contents is not quite as meaningful for docbook (which has only one file to begin with). --- .../src/nixos_render_docs/manual.py | 13 ++++++------- 1 file changed, 6 insertions(+), 7 deletions(-) diff --git a/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/manual.py b/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/manual.py index 8d7bf4a102f3..27850c0bbd17 100644 --- a/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/manual.py +++ b/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/manual.py @@ -27,14 +27,14 @@ class BaseConverter(Converter[md.TR], Generic[md.TR]): _base_paths: list[Path] _current_type: list[TocEntryType] - def convert(self, file: Path) -> str: - self._base_paths = [ file ] + def convert(self, infile: Path, outfile: Path) -> None: + self._base_paths = [ infile ] self._current_type = ['book'] try: - with open(file, 'r') as f: - return self._render(f.read()) + converted = self._render(infile.read_text()) + outfile.write_text(converted) except Exception as e: - raise RuntimeError(f"failed to render manual {file}") from e + raise RuntimeError(f"failed to render manual {infile}") from e def _parse(self, src: str) -> list[Token]: tokens = super()._parse(src) @@ -215,8 +215,7 @@ def _build_cli_db(p: argparse.ArgumentParser) -> None: def _run_cli_db(args: argparse.Namespace) -> None: with open(args.manpage_urls, 'r') as manpage_urls: md = DocBookConverter(json.load(manpage_urls), args.revision) - converted = md.convert(args.infile) - args.outfile.write_text(converted) + md.convert(args.infile, args.outfile) def build_cli(p: argparse.ArgumentParser) -> None: formats = p.add_subparsers(dest='format', required=True) From 36f04733ddc40beb54659f290aaf369d380f312b Mon Sep 17 00:00:00 2001 From: pennae <github@quasiparticle.net> Date: Wed, 15 Feb 2023 16:28:52 +0100 Subject: [PATCH 021/154] nixos-render-docs: add manual html converter this converter is currently supposed to be able to reproduce the docbook-generated html DOMs exactly, though not necessarily the html *files*. it mirrors many docbook behaviours that seem rather odd, such as top-level sections in chapters using the same heading depth as understood by html as their parent chapters do. over time we can hopefully remove all special casing needed to reproduce docbook rendering, but for now at least it doesn't hurt *too* much. --- .../src/nixos_render_docs/__init__.py | 1 - .../src/nixos_render_docs/manual.py | 455 +++++++++++++++++- 2 files changed, 433 insertions(+), 23 deletions(-) diff --git a/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/__init__.py b/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/__init__.py index 7f7463e5c837..1c58accb4166 100644 --- a/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/__init__.py +++ b/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/__init__.py @@ -8,7 +8,6 @@ from pprint import pprint from typing import Any, Dict from .md import Converter -from . import html from . import manual from . import options from . import parallel diff --git a/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/manual.py b/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/manual.py index 27850c0bbd17..40dea3c7d1d8 100644 --- a/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/manual.py +++ b/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/manual.py @@ -1,19 +1,23 @@ import argparse +import html import json +import re +import xml.sax.saxutils as xml from abc import abstractmethod from collections.abc import Mapping, Sequence from pathlib import Path from typing import Any, cast, ClassVar, Generic, get_args, NamedTuple, Optional, Union -from xml.sax.saxutils import escape, quoteattr import markdown_it from markdown_it.token import Token from . import md, options -from .docbook import DocBookRenderer, Heading -from .manual_structure import check_structure, FragmentType, is_include, TocEntryType -from .md import Converter +from .docbook import DocBookRenderer, Heading, make_xml_id +from .html import HTMLRenderer, UnresolvedXrefError +from .manual_structure import check_structure, FragmentType, is_include, TocEntry, TocEntryType, XrefTarget +from .md import Converter, Renderer +from .utils import Freezeable class BaseConverter(Converter[md.TR], Generic[md.TR]): # per-converter configuration for ns:arg=value arguments to include blocks, following @@ -31,11 +35,16 @@ class BaseConverter(Converter[md.TR], Generic[md.TR]): self._base_paths = [ infile ] self._current_type = ['book'] try: - converted = self._render(infile.read_text()) + tokens = self._parse(infile.read_text()) + self._postprocess(infile, outfile, tokens) + converted = self._renderer.render(tokens) outfile.write_text(converted) except Exception as e: raise RuntimeError(f"failed to render manual {infile}") from e + def _postprocess(self, infile: Path, outfile: Path, tokens: Sequence[Token]) -> None: + pass + def _parse(self, src: str) -> list[Token]: tokens = super()._parse(src) check_structure(self._current_type[-1], tokens) @@ -117,12 +126,12 @@ class BaseConverter(Converter[md.TR], Generic[md.TR]): except Exception as e: raise RuntimeError(f"processing options block in line {token.map[0] + 1}") from e -class ManualDocBookRenderer(DocBookRenderer): +class RendererMixin(Renderer): _toplevel_tag: str _revision: str - def __init__(self, toplevel_tag: str, revision: str, manpage_urls: Mapping[str, str]): - super().__init__(manpage_urls) + def __init__(self, toplevel_tag: str, revision: str, *args: Any, **kwargs: Any): + super().__init__(*args, **kwargs) self._toplevel_tag = toplevel_tag self._revision = revision self.rules |= { @@ -139,20 +148,39 @@ class ManualDocBookRenderer(DocBookRenderer): # generic code is more complicated than it's worth. the checks above have verified # that both titles actually exist. if self._toplevel_tag == 'book': - assert tokens[1].children - assert tokens[4].children - if (maybe_id := cast(str, tokens[0].attrs.get('id', ""))): - maybe_id = "xml:id=" + quoteattr(maybe_id) - return (f'<book xmlns="http://docbook.org/ns/docbook"' - f' xmlns:xlink="http://www.w3.org/1999/xlink"' - f' {maybe_id} version="5.0">' - f' <title>{self.renderInline(tokens[1].children)}' - f' {self.renderInline(tokens[4].children)}' - f' {super().render(tokens[6:])}' - f'') + return self._render_book(tokens) return super().render(tokens) + @abstractmethod + def _render_book(self, tokens: Sequence[Token]) -> str: + raise NotImplementedError() + + @abstractmethod + def _included_thing(self, tag: str, token: Token, tokens: Sequence[Token], i: int) -> str: + raise NotImplementedError() + + @abstractmethod + def included_options(self, token: Token, tokens: Sequence[Token], i: int) -> str: + raise NotImplementedError() + +class ManualDocBookRenderer(RendererMixin, DocBookRenderer): + def __init__(self, toplevel_tag: str, revision: str, manpage_urls: Mapping[str, str]): + super().__init__(toplevel_tag, revision, manpage_urls) + + def _render_book(self, tokens: Sequence[Token]) -> str: + assert tokens[1].children + assert tokens[4].children + if (maybe_id := cast(str, tokens[0].attrs.get('id', ""))): + maybe_id = "xml:id=" + xml.quoteattr(maybe_id) + return (f'' + f' {self.renderInline(tokens[1].children)}' + f' {self.renderInline(tokens[4].children)}' + f' {super(DocBookRenderer, self).render(tokens[6:])}' + f'') + def _heading_tag(self, token: Token, tokens: Sequence[Token], i: int) -> tuple[str, dict[str, str]]: (tag, attrs) = super()._heading_tag(token, tokens, i) # render() has already verified that we don't have supernumerary headings and since the @@ -192,10 +220,10 @@ class ManualDocBookRenderer(DocBookRenderer): def paragraph_close(self, token: Token, tokens: Sequence[Token], i: int) -> str: return "\n" + super().paragraph_close(token, tokens, i) def code_block(self, token: Token, tokens: Sequence[Token], i: int) -> str: - return f"\n{escape(token.content)}" + return f"\n{xml.escape(token.content)}" def fence(self, token: Token, tokens: Sequence[Token], i: int) -> str: - info = f" language={quoteattr(token.info)}" if token.info != "" else "" - return f"\n{escape(token.content)}" + info = f" language={xml.quoteattr(token.info)}" if token.info != "" else "" + return f"\n{xml.escape(token.content)}" class DocBookConverter(BaseConverter[ManualDocBookRenderer]): INCLUDE_ARGS_NS = "docbook" @@ -205,6 +233,366 @@ class DocBookConverter(BaseConverter[ManualDocBookRenderer]): self._renderer = ManualDocBookRenderer('book', revision, manpage_urls) +class HTMLParameters(NamedTuple): + generator: str + stylesheets: Sequence[str] + scripts: Sequence[str] + toc_depth: int + chunk_toc_depth: int + +class ManualHTMLRenderer(RendererMixin, HTMLRenderer): + _base_path: Path + _html_params: HTMLParameters + + def __init__(self, toplevel_tag: str, revision: str, html_params: HTMLParameters, + manpage_urls: Mapping[str, str], xref_targets: dict[str, XrefTarget], + base_path: Path): + super().__init__(toplevel_tag, revision, manpage_urls, xref_targets) + self._base_path, self._html_params = base_path, html_params + + def _push(self, tag: str, hlevel_offset: int) -> Any: + result = (self._toplevel_tag, self._headings, self._attrspans, self._hlevel_offset) + self._hlevel_offset += hlevel_offset + self._toplevel_tag, self._headings, self._attrspans = tag, [], [] + return result + + def _pop(self, state: Any) -> None: + (self._toplevel_tag, self._headings, self._attrspans, self._hlevel_offset) = state + + def _render_book(self, tokens: Sequence[Token]) -> str: + assert tokens[4].children + title_id = cast(str, tokens[0].attrs.get('id', "")) + title = self._xref_targets[title_id].title + # subtitles don't have IDs, so we can't use xrefs to get them + subtitle = self.renderInline(tokens[4].children) + + toc = TocEntry.of(tokens[0]) + return "\n".join([ + self._file_header(toc), + '
', + '
', + '
', + f'

{title}

', + f'

{subtitle}

', + '
', + "
", + '
', + self._build_toc(tokens, 0), + super(HTMLRenderer, self).render(tokens[6:]), + '
', + self._file_footer(toc), + ]) + + def _file_header(self, toc: TocEntry) -> str: + prev_link, up_link, next_link = "", "", "" + prev_a, next_a, parent_title = "", "", " " + home = toc.root + if toc.prev: + prev_link = f'' + prev_a = f'Prev' + if toc.parent: + up_link = ( + f'' + ) + if (part := toc.parent) and part.kind != 'book': + assert part.target.title + parent_title = part.target.title + if toc.next: + next_link = f'' + next_a = f'Next' + return "\n".join([ + '', + '', + '', + ' ', + f' {toc.target.title}', + "".join((f'' + for style in self._html_params.stylesheets)), + "".join((f'' + for script in self._html_params.scripts)), + f' ', + f' ', + f' {up_link}{prev_link}{next_link}', + ' ', + ' ', + ' ', + ]) + + def _file_footer(self, toc: TocEntry) -> str: + # prev, next = self._get_prev_and_next() + prev_a, up_a, home_a, next_a = "", " ", " ", "" + prev_text, up_text, next_text = "", "", "" + home = toc.root + if toc.prev: + prev_a = f'Prev' + assert toc.prev.target.title + prev_text = toc.prev.target.title + if toc.parent: + home_a = f'Home' + if toc.parent != home: + up_a = f'Up' + if toc.next: + next_a = f'Next' + assert toc.next.target.title + next_text = toc.next.target.title + return "\n".join([ + ' ', + ' ', + '', + ]) + + def _heading_tag(self, token: Token, tokens: Sequence[Token], i: int) -> str: + if token.tag == 'h1': + return self._toplevel_tag + return super()._heading_tag(token, tokens, i) + def _build_toc(self, tokens: Sequence[Token], i: int) -> str: + toc = TocEntry.of(tokens[i]) + if toc.kind == 'section': + return "" + def walk_and_emit(toc: TocEntry, depth: int) -> list[str]: + if depth <= 0: + return [] + result = [] + for child in toc.children: + result.append( + f'
' + f' ' + f' {child.target.toc_html}' + f' ' + f'
' + ) + # we want to look straight through parts because docbook-xsl does too, but it + # also makes for more uesful top-level tocs. + next_level = walk_and_emit(child, depth - (0 if child.kind == 'part' else 1)) + if next_level: + result.append(f'
{"".join(next_level)}
') + return result + toc_depth = ( + self._html_params.chunk_toc_depth + if toc.starts_new_chunk and toc.kind != 'book' + else self._html_params.toc_depth + ) + if not (items := walk_and_emit(toc, toc_depth)): + return "" + return ( + f'
' + f'

Table of Contents

' + f'
' + f' {"".join(items)}' + f'
' + f'
' + ) + + def _make_hN(self, level: int) -> tuple[str, str]: + # for some reason chapters don't increase the hN nesting count in docbook xslts. duplicate + # this for consistency. + if self._toplevel_tag == 'chapter': + level -= 1 + # TODO docbook compat. these are never useful for us, but not having them breaks manual + # compare workflows while docbook is still allowed. + style = "" + if level + self._hlevel_offset < 3 \ + and (self._toplevel_tag == 'section' or (self._toplevel_tag == 'chapter' and level > 0)): + style = "clear: both" + tag, hstyle = super()._make_hN(max(1, level)) + return tag, style + + def _included_thing(self, tag: str, token: Token, tokens: Sequence[Token], i: int) -> str: + outer, inner = [], [] + # since books have no non-include content the toplevel book wrapper will not count + # towards nesting depth. other types will have at least a title+id heading which + # *does* count towards the nesting depth. chapters give a -1 to included sections + # mirroring the special handing in _make_hN. sigh. + hoffset = ( + 0 if not self._headings + else self._headings[-1].level - 1 if self._toplevel_tag == 'chapter' + else self._headings[-1].level + ) + outer.append(self._maybe_close_partintro()) + into = token.meta['include-args'].get('into-file') + fragments = token.meta['included'] + state = self._push(tag, hoffset) + if into: + toc = TocEntry.of(fragments[0][0][0]) + inner.append(self._file_header(toc)) + # we do not set _hlevel_offset=0 because docbook doesn't either. + else: + inner = outer + for included, path in fragments: + try: + inner.append(self.render(included)) + except Exception as e: + raise RuntimeError(f"rendering {path}") from e + if into: + inner.append(self._file_footer(toc)) + (self._base_path / into).write_text("".join(inner)) + self._pop(state) + return "".join(outer) + + def included_options(self, token: Token, tokens: Sequence[Token], i: int) -> str: + conv = options.HTMLConverter(self._manpage_urls, self._revision, False, + token.meta['list-id'], token.meta['id-prefix'], + self._xref_targets) + conv.add_options(token.meta['source']) + return conv.finalize() + +def _to_base26(n: int) -> str: + return (_to_base26(n // 26) if n > 26 else "") + chr(ord("A") + n % 26) + +class HTMLConverter(BaseConverter[ManualHTMLRenderer]): + INCLUDE_ARGS_NS = "html" + INCLUDE_FRAGMENT_ALLOWED_ARGS = { 'into-file' } + + _revision: str + _html_params: HTMLParameters + _manpage_urls: Mapping[str, str] + _xref_targets: dict[str, XrefTarget] + _redirection_targets: set[str] + _appendix_count: int = 0 + + def _next_appendix_id(self) -> str: + self._appendix_count += 1 + return _to_base26(self._appendix_count - 1) + + def __init__(self, revision: str, html_params: HTMLParameters, manpage_urls: Mapping[str, str]): + super().__init__() + self._revision, self._html_params, self._manpage_urls = revision, html_params, manpage_urls + self._xref_targets = {} + self._redirection_targets = set() + # renderer not set on purpose since it has a dependency on the output path! + + def convert(self, infile: Path, outfile: Path) -> None: + self._renderer = ManualHTMLRenderer('book', self._revision, self._html_params, + self._manpage_urls, self._xref_targets, outfile.parent) + super().convert(infile, outfile) + + def _parse(self, src: str) -> list[Token]: + tokens = super()._parse(src) + for token in tokens: + if not token.type.startswith('included_') \ + or not (into := token.meta['include-args'].get('into-file')): + continue + assert token.map + if len(token.meta['included']) == 0: + raise RuntimeError(f"redirection target {into} in line {token.map[0] + 1} is empty!") + # we use blender-style //path to denote paths relative to the origin file + # (usually index.html). this makes everything a lot easier and clearer. + if not into.startswith("//") or '/' in into[2:]: + raise RuntimeError(f"html:into-file must be a relative-to-origin //filename", into) + into = token.meta['include-args']['into-file'] = into[2:] + if into in self._redirection_targets: + raise RuntimeError(f"redirection target {into} in line {token.map[0] + 1} is already in use") + self._redirection_targets.add(into) + return tokens + + # xref | (id, type, heading inlines, file, starts new file) + def _collect_ids(self, tokens: Sequence[Token], target_file: str, typ: str, file_changed: bool + ) -> list[XrefTarget | tuple[str, str, Token, str, bool]]: + result: list[XrefTarget | tuple[str, str, Token, str, bool]] = [] + # collect all IDs and their xref substitutions. headings are deferred until everything + # has been parsed so we can resolve links in headings. if that's even used anywhere. + for (i, bt) in enumerate(tokens): + if bt.type == 'heading_open' and (id := cast(str, bt.attrs.get('id', ''))): + result.append((id, typ if bt.tag == 'h1' else 'section', tokens[i + 1], target_file, + i == 0 and file_changed)) + elif bt.type == 'included_options': + id_prefix = bt.meta['id-prefix'] + for opt in bt.meta['source'].keys(): + id = make_xml_id(f"{id_prefix}{opt}") + name = html.escape(opt) + result.append(XrefTarget(id, f'{name}', name, None, target_file)) + elif bt.type.startswith('included_'): + sub_file = bt.meta['include-args'].get('into-file', target_file) + subtyp = bt.type.removeprefix('included_').removesuffix('s') + for si, (sub, _path) in enumerate(bt.meta['included']): + result += self._collect_ids(sub, sub_file, subtyp, si == 0 and sub_file != target_file) + elif bt.type == 'inline': + assert bt.children + result += self._collect_ids(bt.children, target_file, typ, False) + elif id := cast(str, bt.attrs.get('id', '')): + # anchors and examples have no titles we could use, but we'll have to put + # *something* here to communicate that there's no title. + result.append(XrefTarget(id, "???", None, None, target_file)) + return result + + def _render_xref(self, id: str, typ: str, inlines: Token, path: str, drop_fragment: bool) -> XrefTarget: + assert inlines.children + title_html = self._renderer.renderInline(inlines.children) + if typ == 'appendix': + # NOTE the docbook compat is strong here + n = self._next_appendix_id() + prefix = f"Appendix\u00A0{n}.\u00A0" + # HACK for docbook compat: prefix the title inlines with appendix id if + # necessary. the alternative is to mess with titlepage rendering in headings, + # which seems just a lot worse than this + prefix_tokens = [Token(type='text', tag='', nesting=0, content=prefix)] + inlines.children = prefix_tokens + list(inlines.children) + title = prefix + title_html + toc_html = f"{n}. {title_html}" + title_html = f"Appendix {n}" + else: + toc_html, title = title_html, title_html + title_html = ( + f"{title_html}" + if typ == 'chapter' + else title_html if typ in [ 'book', 'part' ] + else f'the section called “{title_html}”' + ) + return XrefTarget(id, title_html, toc_html, re.sub('<.*?>', '', title), path, drop_fragment) + + def _postprocess(self, infile: Path, outfile: Path, tokens: Sequence[Token]) -> None: + xref_queue = self._collect_ids(tokens, outfile.name, 'book', True) + + failed = False + deferred = [] + while xref_queue: + for item in xref_queue: + try: + target = item if isinstance(item, XrefTarget) else self._render_xref(*item) + except UnresolvedXrefError as e: + if failed: + raise + deferred.append(item) + continue + + if target.id in self._xref_targets: + raise RuntimeError(f"found duplicate id #{target.id}") + self._xref_targets[target.id] = target + if len(deferred) == len(xref_queue): + failed = True # do another round and report the first error + xref_queue = deferred + + TocEntry.collect_and_link(self._xref_targets, tokens) + + def _build_cli_db(p: argparse.ArgumentParser) -> None: p.add_argument('--manpage-urls', required=True) @@ -212,17 +600,40 @@ def _build_cli_db(p: argparse.ArgumentParser) -> None: p.add_argument('infile', type=Path) p.add_argument('outfile', type=Path) +def _build_cli_html(p: argparse.ArgumentParser) -> None: + p.add_argument('--manpage-urls', required=True) + p.add_argument('--revision', required=True) + p.add_argument('--generator', default='nixos-render-docs') + p.add_argument('--stylesheet', default=[], action='append') + p.add_argument('--script', default=[], action='append') + p.add_argument('--toc-depth', default=1, type=int) + p.add_argument('--chunk-toc-depth', default=1, type=int) + p.add_argument('infile', type=Path) + p.add_argument('outfile', type=Path) + def _run_cli_db(args: argparse.Namespace) -> None: with open(args.manpage_urls, 'r') as manpage_urls: md = DocBookConverter(json.load(manpage_urls), args.revision) md.convert(args.infile, args.outfile) +def _run_cli_html(args: argparse.Namespace) -> None: + with open(args.manpage_urls, 'r') as manpage_urls: + md = HTMLConverter( + args.revision, + HTMLParameters(args.generator, args.stylesheet, args.script, args.toc_depth, + args.chunk_toc_depth), + json.load(manpage_urls)) + md.convert(args.infile, args.outfile) + def build_cli(p: argparse.ArgumentParser) -> None: formats = p.add_subparsers(dest='format', required=True) _build_cli_db(formats.add_parser('docbook')) + _build_cli_html(formats.add_parser('html')) def run_cli(args: argparse.Namespace) -> None: if args.format == 'docbook': _run_cli_db(args) + elif args.format == 'html': + _run_cli_html(args) else: raise RuntimeError('format not hooked up', args) From 54f4992e80df3b8007dd8235c9f8d6ec77eef16f Mon Sep 17 00:00:00 2001 From: pennae Date: Mon, 20 Feb 2023 18:55:15 +0100 Subject: [PATCH 022/154] nixos/manual: render html with nixos-render-doc if !allowDocBook this reproduces the docbook-generated html manual exactly enough to appease the compare workflows while we still support both toolchains. it's also a lot faster than the docbook toolchain, rendering the entire html manual in about two seconds on this machine (while docbook needs about 20). --- nixos/doc/manual/default.nix | 90 ++++++++++++++++++++++++------------ nixos/doc/manual/manual.md | 5 +- 2 files changed, 65 insertions(+), 30 deletions(-) diff --git a/nixos/doc/manual/default.nix b/nixos/doc/manual/default.nix index 714b3efca20a..2e07edd61c2a 100644 --- a/nixos/doc/manual/default.nix +++ b/nixos/doc/manual/default.nix @@ -135,28 +135,32 @@ let } ''; + prepareManualFromMD = '' + cp -r --no-preserve=all $inputs/* . + + substituteInPlace ./manual.md \ + --replace '@NIXOS_VERSION@' "${version}" + substituteInPlace ./configuration/configuration.md \ + --replace \ + '@MODULE_CHAPTERS@' \ + ${lib.escapeShellArg (lib.concatMapStringsSep "\n" (p: "${p.value}") config.meta.doc)} + substituteInPlace ./nixos-options.md \ + --replace \ + '@NIXOS_OPTIONS_JSON@' \ + ${optionsDoc.optionsJSON}/share/doc/nixos/options.json + substituteInPlace ./development/writing-nixos-tests.section.md \ + --replace \ + '@NIXOS_TEST_OPTIONS_JSON@' \ + ${testOptionsDoc.optionsJSON}/share/doc/nixos/options.json + ''; + manual-combined = runCommand "nixos-manual-combined" { inputs = lib.sourceFilesBySuffices ./. [ ".xml" ".md" ]; nativeBuildInputs = [ pkgs.nixos-render-docs pkgs.libxml2.bin pkgs.libxslt.bin ]; meta.description = "The NixOS manual as plain docbook XML"; } '' - cp -r --no-preserve=all $inputs/* . - - substituteInPlace ./manual.md \ - --replace '@NIXOS_VERSION@' "${version}" - substituteInPlace ./configuration/configuration.md \ - --replace \ - '@MODULE_CHAPTERS@' \ - ${lib.escapeShellArg (lib.concatMapStringsSep "\n" (p: "${p.value}") config.meta.doc)} - substituteInPlace ./nixos-options.md \ - --replace \ - '@NIXOS_OPTIONS_JSON@' \ - ${optionsDoc.optionsJSON}/share/doc/nixos/options.json - substituteInPlace ./development/writing-nixos-tests.section.md \ - --replace \ - '@NIXOS_TEST_OPTIONS_JSON@' \ - ${testOptionsDoc.optionsJSON}/share/doc/nixos/options.json + ${prepareManualFromMD} nixos-render-docs -j $NIX_BUILD_CORES manual docbook \ --manpage-urls ${manpageUrls} \ @@ -193,7 +197,14 @@ in rec { # Generate the NixOS manual. manualHTML = runCommand "nixos-manual-html" - { nativeBuildInputs = [ buildPackages.libxml2.bin buildPackages.libxslt.bin ]; + { nativeBuildInputs = + if allowDocBook then [ + buildPackages.libxml2.bin + buildPackages.libxslt.bin + ] else [ + buildPackages.nixos-render-docs + ]; + inputs = lib.optionals (! allowDocBook) (lib.sourceFilesBySuffices ./. [ ".md" ]); meta.description = "The NixOS manual in HTML format"; allowedReferences = ["out"]; } @@ -201,23 +212,44 @@ in rec { # Generate the HTML manual. dst=$out/share/doc/nixos mkdir -p $dst - xsltproc \ - ${manualXsltprocOptions} \ - --stringparam id.warnings "1" \ - --nonet --output $dst/ \ - ${docbook_xsl_ns}/xml/xsl/docbook/xhtml/chunktoc.xsl \ - ${manual-combined}/manual-combined.xml \ - |& tee xsltproc.out - grep "^ID recommended on" xsltproc.out &>/dev/null && echo "error: some IDs are missing" && false - rm xsltproc.out - - mkdir -p $dst/images/callouts - cp ${docbook_xsl_ns}/xml/xsl/docbook/images/callouts/*.svg $dst/images/callouts/ cp ${../../../doc/style.css} $dst/style.css cp ${../../../doc/overrides.css} $dst/overrides.css cp -r ${pkgs.documentation-highlighter} $dst/highlightjs + ${if allowDocBook then '' + xsltproc \ + ${manualXsltprocOptions} \ + --stringparam id.warnings "1" \ + --nonet --output $dst/ \ + ${docbook_xsl_ns}/xml/xsl/docbook/xhtml/chunktoc.xsl \ + ${manual-combined}/manual-combined.xml \ + |& tee xsltproc.out + grep "^ID recommended on" xsltproc.out &>/dev/null && echo "error: some IDs are missing" && false + rm xsltproc.out + + mkdir -p $dst/images/callouts + cp ${docbook_xsl_ns}/xml/xsl/docbook/images/callouts/*.svg $dst/images/callouts/ + '' else '' + ${prepareManualFromMD} + + # TODO generator is set like this because the docbook/md manual compare workflow will + # trigger if it's different + nixos-render-docs -j $NIX_BUILD_CORES manual html \ + --manpage-urls ${manpageUrls} \ + --revision ${lib.escapeShellArg revision} \ + --generator "DocBook XSL Stylesheets V${docbook_xsl_ns.version}" \ + --stylesheet style.css \ + --stylesheet overrides.css \ + --stylesheet highlightjs/mono-blue.css \ + --script ./highlightjs/highlight.pack.js \ + --script ./highlightjs/loader.js \ + --toc-depth 1 \ + --chunk-toc-depth 1 \ + ./manual.md \ + $dst/index.html + ''} + mkdir -p $out/nix-support echo "nix-build out $out" >> $out/nix-support/hydra-build-products echo "doc manual $dst" >> $out/nix-support/hydra-build-products diff --git a/nixos/doc/manual/manual.md b/nixos/doc/manual/manual.md index 1972eaeda872..8cb766eeccf6 100644 --- a/nixos/doc/manual/manual.md +++ b/nixos/doc/manual/manual.md @@ -47,7 +47,10 @@ development/development.md contributing-to-this-manual.chapter.md ``` -```{=include=} appendix +```{=include=} appendix html:into-file=//options.html nixos-options.md +``` + +```{=include=} appendix html:into-file=//release-notes.html release-notes/release-notes.md ``` From fd18361c85b13c4e70922c92cab5468da9d6c109 Mon Sep 17 00:00:00 2001 From: Ran Xiao Date: Thu, 16 Feb 2023 11:23:08 +1100 Subject: [PATCH 023/154] maintainers.nix: add myself --- maintainers/maintainer-list.nix | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/maintainers/maintainer-list.nix b/maintainers/maintainer-list.nix index 161a6065df87..cc15de52ecef 100644 --- a/maintainers/maintainer-list.nix +++ b/maintainers/maintainer-list.nix @@ -12893,6 +12893,12 @@ githubId = 12877905; name = "Roman Volosatovs"; }; + rxiao = { + email = "ben.xiao@me.com"; + github = "benxiao"; + githubId = 10908495; + name = "Ran Xiao"; + }; ryanartecona = { email = "ryanartecona@gmail.com"; github = "ryanartecona"; From 166ec8a5ec7c14b1f3633883701bc0e25b77194c Mon Sep 17 00:00:00 2001 From: Ran Xiao Date: Thu, 16 Feb 2023 11:27:37 +1100 Subject: [PATCH 024/154] python310Packages.execnb: init at 0.1.4 --- .../python-modules/execnb/default.nix | 33 +++++++++++++++++++ pkgs/top-level/python-packages.nix | 2 ++ 2 files changed, 35 insertions(+) create mode 100644 pkgs/development/python-modules/execnb/default.nix diff --git a/pkgs/development/python-modules/execnb/default.nix b/pkgs/development/python-modules/execnb/default.nix new file mode 100644 index 000000000000..22661c9ad07a --- /dev/null +++ b/pkgs/development/python-modules/execnb/default.nix @@ -0,0 +1,33 @@ +{ lib +, buildPythonPackage +, fetchPypi +, fastcore +, traitlets +, ipython +, pythonOlder +}: + +buildPythonPackage rec { + pname = "execnb"; + version = "0.1.4"; + format = "setuptools"; + disabled = pythonOlder "3.6"; + + src = fetchPypi { + inherit pname version; + sha256 = "sha256-y9gSvzJA8Fsh56HbA8SszlozsBBfTLfgWGDXm9uSBvA="; + }; + + propagatedBuildInputs = [ fastcore traitlets ipython ]; + + # no real tests + doCheck = false; + pythonImportsCheck = [ "execnb" ]; + + meta = with lib; { + homepage = "https://github.com/fastai/execnb"; + description = "Execute a jupyter notebook, fast, without needing jupyter"; + license = licenses.asl20; + maintainers = with maintainers; [ rxiao ]; + }; +} diff --git a/pkgs/top-level/python-packages.nix b/pkgs/top-level/python-packages.nix index f09a8eb1a188..6c6eb15d0332 100644 --- a/pkgs/top-level/python-packages.nix +++ b/pkgs/top-level/python-packages.nix @@ -3168,6 +3168,8 @@ self: super: with self; { exchangelib = callPackage ../development/python-modules/exchangelib { }; + execnb = callPackage ../development/python-modules/execnb { }; + execnet = callPackage ../development/python-modules/execnet { }; executing = callPackage ../development/python-modules/executing { }; From 460911e6e72d06b189fe0f1889c1a94b15b4af0c Mon Sep 17 00:00:00 2001 From: Ran Xiao Date: Thu, 16 Feb 2023 11:29:55 +1100 Subject: [PATCH 025/154] python310Packages.fastdownload: init at 0.0.6 --- .../python-modules/fastdownload/default.nix | 32 +++++++++++++++++++ pkgs/top-level/python-packages.nix | 2 ++ 2 files changed, 34 insertions(+) create mode 100644 pkgs/development/python-modules/fastdownload/default.nix diff --git a/pkgs/development/python-modules/fastdownload/default.nix b/pkgs/development/python-modules/fastdownload/default.nix new file mode 100644 index 000000000000..4a60a7042986 --- /dev/null +++ b/pkgs/development/python-modules/fastdownload/default.nix @@ -0,0 +1,32 @@ +{ lib +, buildPythonPackage +, fetchPypi +, fastprogress +, fastcore +, pythonOlder +}: + +buildPythonPackage rec { + pname = "fastdownload"; + version = "0.0.6"; + format = "setuptools"; + disabled = pythonOlder "3.6"; + + src = fetchPypi { + inherit pname version; + sha256 = "sha256-1ayb0zx8rFKDgqlq/tVVLqDkh47T5jofHt53r8bWr30="; + }; + + propagatedBuildInputs = [ fastprogress fastcore ]; + + # no real tests + doCheck = false; + pythonImportsCheck = [ "fastdownload" ]; + + meta = with lib; { + homepage = "https://github.com/fastai/fastdownload"; + description = "Easily download, verify, and extract archives"; + license = licenses.asl20; + maintainers = with maintainers; [ rxiao ]; + }; +} diff --git a/pkgs/top-level/python-packages.nix b/pkgs/top-level/python-packages.nix index 6c6eb15d0332..c556abe6372d 100644 --- a/pkgs/top-level/python-packages.nix +++ b/pkgs/top-level/python-packages.nix @@ -3269,6 +3269,8 @@ self: super: with self; { fastdiff = callPackage ../development/python-modules/fastdiff { }; + fastdownload = callPackage ../development/python-modules/fastdownload { }; + fastdtw = callPackage ../development/python-modules/fastdtw { }; fastecdsa = callPackage ../development/python-modules/fastecdsa { }; From cdd01b988d3df152931137c401906ec6b87248e0 Mon Sep 17 00:00:00 2001 From: Ran Xiao Date: Thu, 16 Feb 2023 11:32:56 +1100 Subject: [PATCH 026/154] python310Packages.nbdev: init at 2.3.11 --- .../python-modules/nbdev/default.nix | 36 +++++++++++++++++++ pkgs/top-level/python-packages.nix | 2 ++ 2 files changed, 38 insertions(+) create mode 100644 pkgs/development/python-modules/nbdev/default.nix diff --git a/pkgs/development/python-modules/nbdev/default.nix b/pkgs/development/python-modules/nbdev/default.nix new file mode 100644 index 000000000000..c47508918a8d --- /dev/null +++ b/pkgs/development/python-modules/nbdev/default.nix @@ -0,0 +1,36 @@ +{ lib +, buildPythonPackage +, fetchPypi +, fastprogress +, fastcore +, asttokens +, astunparse +, watchdog +, execnb +, ghapi +, pythonOlder +}: + +buildPythonPackage rec { + pname = "nbdev"; + version = "2.3.11"; + format = "setuptools"; + disabled = pythonOlder "3.6"; + + src = fetchPypi { + inherit pname version; + sha256 = "sha256-ITMCmuAb1lXONbP5MREpk8vfNSztoTEmT87W1o+fbIU="; + }; + + propagatedBuildInputs = [ fastprogress fastcore asttokens astunparse watchdog execnb ghapi ]; + # no real tests + doCheck = false; + pythonImportsCheck = [ "nbdev" ]; + + meta = with lib; { + homepage = "https://github.com/fastai/nbdev"; + description = "Create delightful software with Jupyter Notebooks"; + license = licenses.asl20; + maintainers = with maintainers; [ rxiao ]; + }; +} diff --git a/pkgs/top-level/python-packages.nix b/pkgs/top-level/python-packages.nix index c556abe6372d..7b8a42b0b965 100644 --- a/pkgs/top-level/python-packages.nix +++ b/pkgs/top-level/python-packages.nix @@ -6272,6 +6272,8 @@ self: super: with self; { nbconvert = callPackage ../development/python-modules/nbconvert { }; + nbdev = callPackage ../development/python-modules/nbdev { }; + nbdime = callPackage ../development/python-modules/nbdime { }; nbformat = callPackage ../development/python-modules/nbformat { }; From e9be42072d493a2696579ed21eb187da6fb52b25 Mon Sep 17 00:00:00 2001 From: Ran Xiao Date: Thu, 16 Feb 2023 11:34:31 +1100 Subject: [PATCH 027/154] python310Packages.fastai: init at 2.7.10 --- .../python-modules/fastai/default.nix | 53 +++++++++++++++++++ pkgs/top-level/python-packages.nix | 2 + 2 files changed, 55 insertions(+) create mode 100644 pkgs/development/python-modules/fastai/default.nix diff --git a/pkgs/development/python-modules/fastai/default.nix b/pkgs/development/python-modules/fastai/default.nix new file mode 100644 index 000000000000..dcb0b582ea74 --- /dev/null +++ b/pkgs/development/python-modules/fastai/default.nix @@ -0,0 +1,53 @@ +{ lib +, buildPythonPackage +, fetchPypi +, fastprogress +, fastcore +, fastdownload +, torch +, torchvision +, matplotlib +, pillow +, scikit-learn +, scipy +, spacy +, pandas +, requests +, pythonOlder +}: + +buildPythonPackage rec { + pname = "fastai"; + version = "2.7.10"; + format = "setuptools"; + disabled = pythonOlder "3.6"; + + src = fetchPypi { + inherit pname version; + sha256 = "sha256-zO9qGFrjpjfvybzZ/qjki3X0VNDrrTtt9CbyL64gA50="; + }; + + propagatedBuildInputs = [ + fastprogress + fastcore + fastdownload + torchvision + matplotlib + pillow + scikit-learn + scipy + spacy + pandas + requests + ]; + + doCheck = false; + pythonImportsCheck = [ "fastai" ]; + + meta = with lib; { + homepage = "https://github.com/fastai/fastai"; + description = "The fastai deep learning library"; + license = licenses.asl20; + maintainers = with maintainers; [ rxiao ]; + }; +} diff --git a/pkgs/top-level/python-packages.nix b/pkgs/top-level/python-packages.nix index 7b8a42b0b965..cb471d1c87b1 100644 --- a/pkgs/top-level/python-packages.nix +++ b/pkgs/top-level/python-packages.nix @@ -3255,6 +3255,8 @@ self: super: with self; { faraday-plugins = callPackage ../development/python-modules/faraday-plugins { }; + fastai = callPackage ../development/python-modules/fastai { }; + fastapi = callPackage ../development/python-modules/fastapi { }; fastapi-mail = callPackage ../development/python-modules/fastapi-mail { }; From e867e3e4f52736dfd46308b4875290c9b5d98520 Mon Sep 17 00:00:00 2001 From: Aaron Arinder Date: Tue, 7 Feb 2023 13:27:17 -0500 Subject: [PATCH 028/154] maintainers: adding aaronarinder --- maintainers/maintainer-list.nix | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/maintainers/maintainer-list.nix b/maintainers/maintainer-list.nix index 6431ef96fc59..c90c7f79a7da 100644 --- a/maintainers/maintainer-list.nix +++ b/maintainers/maintainer-list.nix @@ -200,6 +200,12 @@ githubId = 22131756; name = "Aaqa Ishtyaq"; }; + aaronarinder = { + email = "aaronarinder@gmail.com"; + github = "aaronArinder"; + githubId = 26738844; + name = "Aaron Arinder"; + }; aaronjanse = { email = "aaron@ajanse.me"; matrix = "@aaronjanse:matrix.org"; From 64a2ee45ea50ecf50761e02d5c6badac6ea501d6 Mon Sep 17 00:00:00 2001 From: Aaron Arinder Date: Tue, 7 Feb 2023 13:43:12 -0500 Subject: [PATCH 029/154] rover: 0.5.1 -> 0.11.0 --- pkgs/development/tools/rover/default.nix | 31 +- pkgs/development/tools/rover/schema/etag.id | 2 +- pkgs/development/tools/rover/schema/hash.id | 1 + .../tools/rover/schema/schema.graphql | 2104 +++++++++++++++-- 4 files changed, 1980 insertions(+), 158 deletions(-) create mode 100644 pkgs/development/tools/rover/schema/hash.id diff --git a/pkgs/development/tools/rover/default.nix b/pkgs/development/tools/rover/default.nix index 5144ecb38e88..3099b2e72619 100644 --- a/pkgs/development/tools/rover/default.nix +++ b/pkgs/development/tools/rover/default.nix @@ -3,32 +3,46 @@ , fetchFromGitHub , perl , rustPlatform +, darwin +, stdenv }: rustPlatform.buildRustPackage rec { pname = "rover"; - version = "0.5.1"; + version = "0.11.0"; src = fetchFromGitHub { owner = "apollographql"; repo = pname; rev = "v${version}"; - sha256 = "sha256-wBHMND/xpm9o7pkWMUj9lEtEkzy3mX+E4Dt7qDn6auY="; + sha256 = "sha256-Ei6EeM0+b3EsMoRo38nHO79onT9Oq/cfbiCZhyDYQrc="; }; - cargoSha256 = "sha256-n0R2MdAYGsOsYt4x1N1KdGvBZYTALyhSzCGW29bnFU4="; + cargoSha256 = "sha256-+iDU8LPb7P4MNQ8MB5ldbWq4wWRcnbgOmSZ93Z//5O0="; + + buildInputs = lib.optionals stdenv.isDarwin [ + darwin.apple_sdk.frameworks.Security + darwin.apple_sdk.frameworks.CoreServices + ]; nativeBuildInputs = [ perl ]; - # The rover-client's build script (crates/rover-client/build.rs) will try to + # This test checks whether the plugins specified in the plugins json file are + # valid by making a network call to the repo that houses their binaries; but, the + # build env can't make network calls (impurity) + cargoTestFlags = [ + "-- --skip=latest_plugins_are_valid_versions" + ]; + + # The rover-client's build script (xtask/src/commands/prep/schema.rs) will try to # download the API's graphql schema at build time to our read-only filesystem. # To avoid this we pre-download it to a location the build script checks. preBuild = '' - mkdir crates/rover-client/.schema - cp ${./schema}/etag.id crates/rover-client/.schema/ - cp ${./schema}/schema.graphql crates/rover-client/.schema/ + cp ${./schema}/hash.id crates/rover-client/.schema/ + cp ${./schema}/etag.id crates/rover-client/.schema/ + cp ${./schema}/schema.graphql crates/rover-client/.schema/ ''; passthru.updateScript = ./update.sh; @@ -41,10 +55,9 @@ rustPlatform.buildRustPackage rec { ''; meta = with lib; { - description = "A CLI for managing and maintaining graphs with Apollo Studio"; + description = "A CLI for interacting with ApolloGraphQL's developer tooling, including managing self-hosted and GraphOS graphs."; homepage = "https://www.apollographql.com/docs/rover"; license = licenses.mit; maintainers = [ maintainers.ivanbrennan ]; - platforms = ["x86_64-linux"]; }; } diff --git a/pkgs/development/tools/rover/schema/etag.id b/pkgs/development/tools/rover/schema/etag.id index a8b9f0cece3e..59331ac0df6d 100644 --- a/pkgs/development/tools/rover/schema/etag.id +++ b/pkgs/development/tools/rover/schema/etag.id @@ -1 +1 @@ -2694c7b893d44c9ad8f5d7161116deb9985a6bd05e8e0cdcd7379947430e6f89 +d35f8c48cb89329f33656944fa9e997de1e778b043b9ca4d78c8accdecfd9046 diff --git a/pkgs/development/tools/rover/schema/hash.id b/pkgs/development/tools/rover/schema/hash.id new file mode 100644 index 000000000000..d730728cfa77 --- /dev/null +++ b/pkgs/development/tools/rover/schema/hash.id @@ -0,0 +1 @@ +ff145f12604d11312e6a2f8a61a3d226fcdb2ca79f6b7fbc24c5a22aa23ab1af diff --git a/pkgs/development/tools/rover/schema/schema.graphql b/pkgs/development/tools/rover/schema/schema.graphql index b20b21a91e41..8cc527f4f822 100644 --- a/pkgs/development/tools/rover/schema/schema.graphql +++ b/pkgs/development/tools/rover/schema/schema.graphql @@ -1,20 +1,319 @@ -"""An organization. Can have multiple members and graphs.""" type Account{auditLogExports:[AuditLogExport!]"""These are the roles that the account is able to use""" availableRoles:[UserPermission!]!"""Get an URL to which an avatar image can be uploaded. Client uploads by sending a PUT request -with the image data to MediaUploadInfo.url. Client SHOULD set the "Content-Type" header to the -browser-inferred MIME type, and SHOULD set the "x-apollo-content-filename" header to the -filename, if such information is available. Client MUST set the "x-apollo-csrf-token" header to -MediaUploadInfo.csrfToken.""" avatarUpload:AvatarUploadResult """Get an image URL for the account's avatar. Note that CORS is not enabled for these URLs. The size -argument is used for bandwidth reduction, and should be the size of the image as displayed in the -application. Apollo's media server will downscale larger images to at least the requested size, -but this will not happen for third-party media servers.""" avatarUrl(size:Int!=40):String billingInfo:BillingInfo companyUrl:String currentBillingMonth:BillingMonth currentPlan:BillingPlan!currentPlanV2:BillingPlanV2!currentSubscription:BillingSubscription currentSubscriptionV2:BillingSubscriptionV2 experimentalFeatures:AccountExperimentalFeatures!expiredTrialSubscription:BillingSubscription expiredTrialSubscriptionV2:BillingSubscriptionV2 graphIDAvailable(id:ID!):Boolean!hasBeenOnTrial:Boolean!hasBeenOnTrialV2:Boolean!"""Globally unique identifier, which isn't guaranteed stable (can be changed by administrators).""" id:ID!"""Internal immutable identifier for the account. Only visible to Apollo admins (because it really -shouldn't be used in normal client apps).""" internalID:ID!invitations(includeAccepted:Boolean!=false):[AccountInvitation!]invoices:[Invoice!]invoicesV2:[InvoiceV2!]!isOnExpiredTrial:Boolean!isOnTrial:Boolean!legacyIsOnTrial:Boolean!memberships:[AccountMembership!]"""Name of the organization, which can change over time and isn't unique.""" name:String!provisionedAt:Timestamp recurlyEmail:String """Returns a different registry related stats pertaining to this account.""" registryStatsWindow(from:Timestamp!resolution:Resolution to:Timestamp):RegistryStatsWindow requests(from:Timestamp!to:Timestamp!):Long requestsInCurrentBillingPeriod:Long roles:AccountRoles """How many seats would be included in your next bill, as best estimated today""" seatCountForNextBill:Int seats:Seats secondaryIDs:[ID!]!"""Graphs belonging to this organization.""" services(includeDeleted:Boolean):[Service!]!"""If non-null, this organization tracks its members through an upstream, eg PingOne; -invitations are not possible on SSO-synchronized account.""" sso:OrganizationSSO state:AccountState """A list of reusable invitations for the organization.""" staticInvitations:[OrganizationInviteLink!]stats(from:Timestamp!"""Granularity of buckets. Defaults to the entire range (aggregate all data into a single durationBucket) when null.""" resolution:Resolution """Defaults to the current time when null.""" to:Timestamp):AccountStatsWindow!@deprecated(reason:"use Account.statsWindow instead")statsWindow(from:Timestamp!"""Granularity of buckets. Defaults to the entire range (aggregate all data into a single durationBucket) when null.""" resolution:Resolution """Defaults to the current time when null.""" to:Timestamp):AccountStatsWindow subscriptions:[BillingSubscription!]subscriptionsV2:[BillingSubscriptionV2!]!"""Gets a ticket for this org, by id""" ticket(id:ID!):ZendeskTicket """List of Zendesk tickets submitted for this org""" tickets:[ZendeskTicket!]}"""Columns of AccountBillingUsageStats.""" enum AccountBillingUsageStatsColumn{OPERATION_COUNT OPERATION_COUNT_PROVIDED_EXPLICITLY SCHEMA_TAG SERVICE_ID TIMESTAMP}type AccountBillingUsageStatsDimensions{operationCountProvidedExplicitly:String schemaTag:String serviceId:ID}"""Filter for data in AccountBillingUsageStats. Fields with dimension names represent equality checks. All fields are implicitly ANDed together.""" input AccountBillingUsageStatsFilter{and:[AccountBillingUsageStatsFilter!]in:AccountBillingUsageStatsFilterIn not:AccountBillingUsageStatsFilter """Selects rows whose operationCountProvidedExplicitly dimension equals the given value if not null. To query for the null value, use {in: {operationCountProvidedExplicitly: [null]}} instead.""" operationCountProvidedExplicitly:String or:[AccountBillingUsageStatsFilter!]"""Selects rows whose schemaTag dimension equals the given value if not null. To query for the null value, use {in: {schemaTag: [null]}} instead.""" schemaTag:String """Selects rows whose serviceId dimension equals the given value if not null. To query for the null value, use {in: {serviceId: [null]}} instead.""" serviceId:ID}"""Filter for data in AccountBillingUsageStats. Fields match if the corresponding dimension's value is in the given list. All fields are implicitly ANDed together.""" input AccountBillingUsageStatsFilterIn{"""Selects rows whose operationCountProvidedExplicitly dimension is in the given list. A null value in the list means a row with null for that dimension.""" operationCountProvidedExplicitly:[String]"""Selects rows whose schemaTag dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaTag:[String]"""Selects rows whose serviceId dimension is in the given list. A null value in the list means a row with null for that dimension.""" serviceId:[ID]}type AccountBillingUsageStatsMetrics{operationCount:Long!}input AccountBillingUsageStatsOrderBySpec{column:AccountBillingUsageStatsColumn!direction:Ordering!}type AccountBillingUsageStatsRecord{"""Dimensions of AccountBillingUsageStats that can be grouped by.""" groupBy:AccountBillingUsageStatsDimensions!"""Metrics of AccountBillingUsageStats that can be aggregated over.""" metrics:AccountBillingUsageStatsMetrics!"""Starting segment timestamp.""" timestamp:Timestamp!}type AccountChecksStatsMetrics{totalFailedChecks:Long!totalSuccessfulChecks:Long!}type AccountChecksStatsRecord{id:ID!metrics:AccountChecksStatsMetrics!timestamp:Timestamp!}"""Columns of AccountEdgeServerInfos.""" enum AccountEdgeServerInfosColumn{BOOT_ID EXECUTABLE_SCHEMA_ID LIBRARY_VERSION PLATFORM RUNTIME_VERSION SCHEMA_TAG SERVER_ID SERVICE_ID TIMESTAMP USER_VERSION}type AccountEdgeServerInfosDimensions{bootId:ID executableSchemaId:ID libraryVersion:String platform:String runtimeVersion:String schemaTag:String serverId:ID serviceId:ID userVersion:String}"""Filter for data in AccountEdgeServerInfos. Fields with dimension names represent equality checks. All fields are implicitly ANDed together.""" input AccountEdgeServerInfosFilter{and:[AccountEdgeServerInfosFilter!]"""Selects rows whose bootId dimension equals the given value if not null. To query for the null value, use {in: {bootId: [null]}} instead.""" bootId:ID """Selects rows whose executableSchemaId dimension equals the given value if not null. To query for the null value, use {in: {executableSchemaId: [null]}} instead.""" executableSchemaId:ID in:AccountEdgeServerInfosFilterIn """Selects rows whose libraryVersion dimension equals the given value if not null. To query for the null value, use {in: {libraryVersion: [null]}} instead.""" libraryVersion:String not:AccountEdgeServerInfosFilter or:[AccountEdgeServerInfosFilter!]"""Selects rows whose platform dimension equals the given value if not null. To query for the null value, use {in: {platform: [null]}} instead.""" platform:String """Selects rows whose runtimeVersion dimension equals the given value if not null. To query for the null value, use {in: {runtimeVersion: [null]}} instead.""" runtimeVersion:String """Selects rows whose schemaTag dimension equals the given value if not null. To query for the null value, use {in: {schemaTag: [null]}} instead.""" schemaTag:String """Selects rows whose serverId dimension equals the given value if not null. To query for the null value, use {in: {serverId: [null]}} instead.""" serverId:ID """Selects rows whose serviceId dimension equals the given value if not null. To query for the null value, use {in: {serviceId: [null]}} instead.""" serviceId:ID """Selects rows whose userVersion dimension equals the given value if not null. To query for the null value, use {in: {userVersion: [null]}} instead.""" userVersion:String}"""Filter for data in AccountEdgeServerInfos. Fields match if the corresponding dimension's value is in the given list. All fields are implicitly ANDed together.""" input AccountEdgeServerInfosFilterIn{"""Selects rows whose bootId dimension is in the given list. A null value in the list means a row with null for that dimension.""" bootId:[ID]"""Selects rows whose executableSchemaId dimension is in the given list. A null value in the list means a row with null for that dimension.""" executableSchemaId:[ID]"""Selects rows whose libraryVersion dimension is in the given list. A null value in the list means a row with null for that dimension.""" libraryVersion:[String]"""Selects rows whose platform dimension is in the given list. A null value in the list means a row with null for that dimension.""" platform:[String]"""Selects rows whose runtimeVersion dimension is in the given list. A null value in the list means a row with null for that dimension.""" runtimeVersion:[String]"""Selects rows whose schemaTag dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaTag:[String]"""Selects rows whose serverId dimension is in the given list. A null value in the list means a row with null for that dimension.""" serverId:[ID]"""Selects rows whose serviceId dimension is in the given list. A null value in the list means a row with null for that dimension.""" serviceId:[ID]"""Selects rows whose userVersion dimension is in the given list. A null value in the list means a row with null for that dimension.""" userVersion:[String]}input AccountEdgeServerInfosOrderBySpec{column:AccountEdgeServerInfosColumn!direction:Ordering!}type AccountEdgeServerInfosRecord{"""Dimensions of AccountEdgeServerInfos that can be grouped by.""" groupBy:AccountEdgeServerInfosDimensions!"""Starting segment timestamp.""" timestamp:Timestamp!}"""Columns of AccountErrorStats.""" enum AccountErrorStatsColumn{CLIENT_NAME CLIENT_VERSION ERRORS_COUNT PATH QUERY_ID QUERY_NAME REQUESTS_WITH_ERRORS_COUNT SCHEMA_HASH SCHEMA_TAG SERVICE_ID TIMESTAMP}type AccountErrorStatsDimensions{clientName:String clientVersion:String path:String queryId:ID queryName:String schemaHash:String schemaTag:String serviceId:ID}"""Filter for data in AccountErrorStats. Fields with dimension names represent equality checks. All fields are implicitly ANDed together.""" input AccountErrorStatsFilter{and:[AccountErrorStatsFilter!]"""Selects rows whose clientName dimension equals the given value if not null. To query for the null value, use {in: {clientName: [null]}} instead.""" clientName:String """Selects rows whose clientVersion dimension equals the given value if not null. To query for the null value, use {in: {clientVersion: [null]}} instead.""" clientVersion:String in:AccountErrorStatsFilterIn not:AccountErrorStatsFilter or:[AccountErrorStatsFilter!]"""Selects rows whose path dimension equals the given value if not null. To query for the null value, use {in: {path: [null]}} instead.""" path:String """Selects rows whose queryId dimension equals the given value if not null. To query for the null value, use {in: {queryId: [null]}} instead.""" queryId:ID """Selects rows whose queryName dimension equals the given value if not null. To query for the null value, use {in: {queryName: [null]}} instead.""" queryName:String """Selects rows whose schemaHash dimension equals the given value if not null. To query for the null value, use {in: {schemaHash: [null]}} instead.""" schemaHash:String """Selects rows whose schemaTag dimension equals the given value if not null. To query for the null value, use {in: {schemaTag: [null]}} instead.""" schemaTag:String """Selects rows whose serviceId dimension equals the given value if not null. To query for the null value, use {in: {serviceId: [null]}} instead.""" serviceId:ID}"""Filter for data in AccountErrorStats. Fields match if the corresponding dimension's value is in the given list. All fields are implicitly ANDed together.""" input AccountErrorStatsFilterIn{"""Selects rows whose clientName dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientName:[String]"""Selects rows whose clientVersion dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientVersion:[String]"""Selects rows whose path dimension is in the given list. A null value in the list means a row with null for that dimension.""" path:[String]"""Selects rows whose queryId dimension is in the given list. A null value in the list means a row with null for that dimension.""" queryId:[ID]"""Selects rows whose queryName dimension is in the given list. A null value in the list means a row with null for that dimension.""" queryName:[String]"""Selects rows whose schemaHash dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaHash:[String]"""Selects rows whose schemaTag dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaTag:[String]"""Selects rows whose serviceId dimension is in the given list. A null value in the list means a row with null for that dimension.""" serviceId:[ID]}type AccountErrorStatsMetrics{errorsCount:Long!requestsWithErrorsCount:Long!}input AccountErrorStatsOrderBySpec{column:AccountErrorStatsColumn!direction:Ordering!}type AccountErrorStatsRecord{"""Dimensions of AccountErrorStats that can be grouped by.""" groupBy:AccountErrorStatsDimensions!"""Metrics of AccountErrorStats that can be aggregated over.""" metrics:AccountErrorStatsMetrics!"""Starting segment timestamp.""" timestamp:Timestamp!}type AccountExperimentalFeatures{auditLogs:Boolean!championDashboard:Boolean!federation2Preview:Boolean!preRequestPreview:Boolean!publicVariants:Boolean!variantHomepage:Boolean!webhooksPreview:Boolean!}"""Columns of AccountFieldExecutions.""" enum AccountFieldExecutionsColumn{ESTIMATED_EXECUTION_COUNT FIELD_NAME OBSERVED_EXECUTION_COUNT PARENT_TYPE REFERENCING_OPERATION_COUNT SCHEMA_TAG SERVICE_ID TIMESTAMP}type AccountFieldExecutionsDimensions{fieldName:String parentType:String schemaTag:String serviceId:ID}"""Filter for data in AccountFieldExecutions. Fields with dimension names represent equality checks. All fields are implicitly ANDed together.""" input AccountFieldExecutionsFilter{and:[AccountFieldExecutionsFilter!]"""Selects rows whose fieldName dimension equals the given value if not null. To query for the null value, use {in: {fieldName: [null]}} instead.""" fieldName:String in:AccountFieldExecutionsFilterIn not:AccountFieldExecutionsFilter or:[AccountFieldExecutionsFilter!]"""Selects rows whose parentType dimension equals the given value if not null. To query for the null value, use {in: {parentType: [null]}} instead.""" parentType:String """Selects rows whose schemaTag dimension equals the given value if not null. To query for the null value, use {in: {schemaTag: [null]}} instead.""" schemaTag:String """Selects rows whose serviceId dimension equals the given value if not null. To query for the null value, use {in: {serviceId: [null]}} instead.""" serviceId:ID}"""Filter for data in AccountFieldExecutions. Fields match if the corresponding dimension's value is in the given list. All fields are implicitly ANDed together.""" input AccountFieldExecutionsFilterIn{"""Selects rows whose fieldName dimension is in the given list. A null value in the list means a row with null for that dimension.""" fieldName:[String]"""Selects rows whose parentType dimension is in the given list. A null value in the list means a row with null for that dimension.""" parentType:[String]"""Selects rows whose schemaTag dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaTag:[String]"""Selects rows whose serviceId dimension is in the given list. A null value in the list means a row with null for that dimension.""" serviceId:[ID]}type AccountFieldExecutionsMetrics{estimatedExecutionCount:Long!observedExecutionCount:Long!referencingOperationCount:Long!}input AccountFieldExecutionsOrderBySpec{column:AccountFieldExecutionsColumn!direction:Ordering!}type AccountFieldExecutionsRecord{"""Dimensions of AccountFieldExecutions that can be grouped by.""" groupBy:AccountFieldExecutionsDimensions!"""Metrics of AccountFieldExecutions that can be aggregated over.""" metrics:AccountFieldExecutionsMetrics!"""Starting segment timestamp.""" timestamp:Timestamp!}"""Columns of AccountFieldLatencies.""" enum AccountFieldLatenciesColumn{FIELD_HISTOGRAM FIELD_NAME PARENT_TYPE SCHEMA_HASH SCHEMA_TAG SERVICE_ID TIMESTAMP}type AccountFieldLatenciesDimensions{field:String fieldName:String parentType:String schemaHash:String schemaTag:String serviceId:ID}"""Filter for data in AccountFieldLatencies. Fields with dimension names represent equality checks. All fields are implicitly ANDed together.""" input AccountFieldLatenciesFilter{and:[AccountFieldLatenciesFilter!]"""Selects rows whose fieldName dimension equals the given value if not null. To query for the null value, use {in: {fieldName: [null]}} instead.""" fieldName:String in:AccountFieldLatenciesFilterIn not:AccountFieldLatenciesFilter or:[AccountFieldLatenciesFilter!]"""Selects rows whose parentType dimension equals the given value if not null. To query for the null value, use {in: {parentType: [null]}} instead.""" parentType:String """Selects rows whose schemaHash dimension equals the given value if not null. To query for the null value, use {in: {schemaHash: [null]}} instead.""" schemaHash:String """Selects rows whose schemaTag dimension equals the given value if not null. To query for the null value, use {in: {schemaTag: [null]}} instead.""" schemaTag:String """Selects rows whose serviceId dimension equals the given value if not null. To query for the null value, use {in: {serviceId: [null]}} instead.""" serviceId:ID}"""Filter for data in AccountFieldLatencies. Fields match if the corresponding dimension's value is in the given list. All fields are implicitly ANDed together.""" input AccountFieldLatenciesFilterIn{"""Selects rows whose fieldName dimension is in the given list. A null value in the list means a row with null for that dimension.""" fieldName:[String]"""Selects rows whose parentType dimension is in the given list. A null value in the list means a row with null for that dimension.""" parentType:[String]"""Selects rows whose schemaHash dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaHash:[String]"""Selects rows whose schemaTag dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaTag:[String]"""Selects rows whose serviceId dimension is in the given list. A null value in the list means a row with null for that dimension.""" serviceId:[ID]}type AccountFieldLatenciesMetrics{fieldHistogram:DurationHistogram!}input AccountFieldLatenciesOrderBySpec{column:AccountFieldLatenciesColumn!direction:Ordering!}type AccountFieldLatenciesRecord{"""Dimensions of AccountFieldLatencies that can be grouped by.""" groupBy:AccountFieldLatenciesDimensions!"""Metrics of AccountFieldLatencies that can be aggregated over.""" metrics:AccountFieldLatenciesMetrics!"""Starting segment timestamp.""" timestamp:Timestamp!}"""Columns of AccountFieldRequestsByClientVersion.""" enum AccountFieldRequestsByClientVersionColumn{CLIENT_NAME CLIENT_VERSION ESTIMATED_EXECUTION_COUNT FIELD_NAME OBSERVED_EXECUTION_COUNT PARENT_TYPE REFERENCING_OPERATION_COUNT SCHEMA_TAG SERVICE_ID TIMESTAMP}type AccountFieldRequestsByClientVersionDimensions{clientName:String clientVersion:String fieldName:String parentType:String schemaTag:String serviceId:ID}"""Filter for data in AccountFieldRequestsByClientVersion. Fields with dimension names represent equality checks. All fields are implicitly ANDed together.""" input AccountFieldRequestsByClientVersionFilter{and:[AccountFieldRequestsByClientVersionFilter!]"""Selects rows whose clientName dimension equals the given value if not null. To query for the null value, use {in: {clientName: [null]}} instead.""" clientName:String """Selects rows whose clientVersion dimension equals the given value if not null. To query for the null value, use {in: {clientVersion: [null]}} instead.""" clientVersion:String """Selects rows whose fieldName dimension equals the given value if not null. To query for the null value, use {in: {fieldName: [null]}} instead.""" fieldName:String in:AccountFieldRequestsByClientVersionFilterIn not:AccountFieldRequestsByClientVersionFilter or:[AccountFieldRequestsByClientVersionFilter!]"""Selects rows whose parentType dimension equals the given value if not null. To query for the null value, use {in: {parentType: [null]}} instead.""" parentType:String """Selects rows whose schemaTag dimension equals the given value if not null. To query for the null value, use {in: {schemaTag: [null]}} instead.""" schemaTag:String """Selects rows whose serviceId dimension equals the given value if not null. To query for the null value, use {in: {serviceId: [null]}} instead.""" serviceId:ID}"""Filter for data in AccountFieldRequestsByClientVersion. Fields match if the corresponding dimension's value is in the given list. All fields are implicitly ANDed together.""" input AccountFieldRequestsByClientVersionFilterIn{"""Selects rows whose clientName dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientName:[String]"""Selects rows whose clientVersion dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientVersion:[String]"""Selects rows whose fieldName dimension is in the given list. A null value in the list means a row with null for that dimension.""" fieldName:[String]"""Selects rows whose parentType dimension is in the given list. A null value in the list means a row with null for that dimension.""" parentType:[String]"""Selects rows whose schemaTag dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaTag:[String]"""Selects rows whose serviceId dimension is in the given list. A null value in the list means a row with null for that dimension.""" serviceId:[ID]}type AccountFieldRequestsByClientVersionMetrics{estimatedExecutionCount:Long!observedExecutionCount:Long!referencingOperationCount:Long!}input AccountFieldRequestsByClientVersionOrderBySpec{column:AccountFieldRequestsByClientVersionColumn!direction:Ordering!}type AccountFieldRequestsByClientVersionRecord{"""Dimensions of AccountFieldRequestsByClientVersion that can be grouped by.""" groupBy:AccountFieldRequestsByClientVersionDimensions!"""Metrics of AccountFieldRequestsByClientVersion that can be aggregated over.""" metrics:AccountFieldRequestsByClientVersionMetrics!"""Starting segment timestamp.""" timestamp:Timestamp!}"""Columns of AccountFieldUsage.""" enum AccountFieldUsageColumn{CLIENT_NAME CLIENT_VERSION ESTIMATED_EXECUTION_COUNT EXECUTION_COUNT FIELD_NAME PARENT_TYPE QUERY_ID QUERY_NAME REFERENCING_OPERATION_COUNT SCHEMA_HASH SCHEMA_TAG SERVICE_ID TIMESTAMP}type AccountFieldUsageDimensions{clientName:String clientVersion:String fieldName:String parentType:String queryId:ID queryName:String schemaHash:String schemaTag:String serviceId:ID}"""Filter for data in AccountFieldUsage. Fields with dimension names represent equality checks. All fields are implicitly ANDed together.""" input AccountFieldUsageFilter{and:[AccountFieldUsageFilter!]"""Selects rows whose clientName dimension equals the given value if not null. To query for the null value, use {in: {clientName: [null]}} instead.""" clientName:String """Selects rows whose clientVersion dimension equals the given value if not null. To query for the null value, use {in: {clientVersion: [null]}} instead.""" clientVersion:String """Selects rows whose fieldName dimension equals the given value if not null. To query for the null value, use {in: {fieldName: [null]}} instead.""" fieldName:String in:AccountFieldUsageFilterIn not:AccountFieldUsageFilter or:[AccountFieldUsageFilter!]"""Selects rows whose parentType dimension equals the given value if not null. To query for the null value, use {in: {parentType: [null]}} instead.""" parentType:String """Selects rows whose queryId dimension equals the given value if not null. To query for the null value, use {in: {queryId: [null]}} instead.""" queryId:ID """Selects rows whose queryName dimension equals the given value if not null. To query for the null value, use {in: {queryName: [null]}} instead.""" queryName:String """Selects rows whose schemaHash dimension equals the given value if not null. To query for the null value, use {in: {schemaHash: [null]}} instead.""" schemaHash:String """Selects rows whose schemaTag dimension equals the given value if not null. To query for the null value, use {in: {schemaTag: [null]}} instead.""" schemaTag:String """Selects rows whose serviceId dimension equals the given value if not null. To query for the null value, use {in: {serviceId: [null]}} instead.""" serviceId:ID}"""Filter for data in AccountFieldUsage. Fields match if the corresponding dimension's value is in the given list. All fields are implicitly ANDed together.""" input AccountFieldUsageFilterIn{"""Selects rows whose clientName dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientName:[String]"""Selects rows whose clientVersion dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientVersion:[String]"""Selects rows whose fieldName dimension is in the given list. A null value in the list means a row with null for that dimension.""" fieldName:[String]"""Selects rows whose parentType dimension is in the given list. A null value in the list means a row with null for that dimension.""" parentType:[String]"""Selects rows whose queryId dimension is in the given list. A null value in the list means a row with null for that dimension.""" queryId:[ID]"""Selects rows whose queryName dimension is in the given list. A null value in the list means a row with null for that dimension.""" queryName:[String]"""Selects rows whose schemaHash dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaHash:[String]"""Selects rows whose schemaTag dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaTag:[String]"""Selects rows whose serviceId dimension is in the given list. A null value in the list means a row with null for that dimension.""" serviceId:[ID]}type AccountFieldUsageMetrics{estimatedExecutionCount:Long!executionCount:Long!referencingOperationCount:Long!}input AccountFieldUsageOrderBySpec{column:AccountFieldUsageColumn!direction:Ordering!}type AccountFieldUsageRecord{"""Dimensions of AccountFieldUsage that can be grouped by.""" groupBy:AccountFieldUsageDimensions!"""Metrics of AccountFieldUsage that can be aggregated over.""" metrics:AccountFieldUsageMetrics!"""Starting segment timestamp.""" timestamp:Timestamp!}type AccountInvitation{"""An accepted invitation cannot be used anymore""" acceptedAt:Timestamp """Who accepted the invitation""" acceptedBy:User """Time the invitation was created""" createdAt:Timestamp!"""Who created the invitation""" createdBy:User email:String!id:ID!"""Last time we sent an email for the invitation""" lastSentAt:Timestamp """Access role for the invitee""" role:UserPermission!}type AccountMembership{account:Account!createdAt:Timestamp!"""If this membership is a free seat (based on role)""" free:Boolean permission:UserPermission!user:User!}type AccountMutation{auditExport(id:String!):AuditLogExportMutation """Cancel a pending change from an annual team subscription to a monthly team subscription when the current period expires.""" cancelConvertAnnualTeamSubscriptionToMonthlyAtNextPeriod:Account """Cancel account subscriptions, subscriptions will remain active until the end of the paid period""" cancelSubscriptions:Account """Changes an annual team subscription to a monthly team subscription when the current period expires.""" convertAnnualTeamSubscriptionToMonthlyAtNextPeriod:Account """Changes a monthly team subscription to an annual team subscription.""" convertMonthlyTeamSubscriptionToAnnual:Account createStaticInvitation(role:UserPermission!):OrganizationInviteLink """Delete the account's avatar. Requires Account.canUpdateAvatar to be true.""" deleteAvatar:AvatarDeleteError """Acknowledge that a trial has expired and return to community""" dismissExpiredTrial:Account """Apollo admins only: extend an ongoing trial""" extendTrial(to:Timestamp!):Account """Hard delete an account and all associated services""" hardDelete:Void """Send an invitation to join the account by E-mail""" invite(email:String!role:UserPermission):AccountInvitation """Reactivate a canceled current subscription""" reactivateCurrentSubscription:Account """Refresh billing information from third-party billing service""" refreshBilling:Void """Delete an invitation""" removeInvitation(id:ID):Void """Remove a member of the account""" removeMember(id:ID!):Account requestAuditExport(actors:[ActorInput!]from:Timestamp!graphIds:[String!]to:Timestamp!):Account """Send a new E-mail for an existing invitation""" resendInvitation(id:ID):AccountInvitation revokeStaticInvitation(token:String!):OrganizationInviteLink """Apollo admins only: set the billing plan to an arbitrary plan""" setPlan(id:ID!):Void """Start a new team subscription with the given billing period""" startTeamSubscription(billingPeriod:BillingPeriod!):Account """Start a team trial""" startTrial:Account """This is called by the form shown to users after they cancel their team subscription.""" submitTeamCancellationFeedback(feedback:String!):Void """Apollo admins only: terminate any ongoing subscriptions in the account, without refunds""" terminateSubscriptions:Account """Update the billing address for a Recurly token""" updateBillingAddress(billingAddress:BillingAddressInput!):Account """Update the billing information from a Recurly token""" updateBillingInfo(token:String!):Void updateCompanyUrl(companyUrl:String):Account """Set the E-mail address of the account, used notably for billing""" updateEmail(email:String!):Void """Update the account ID""" updateID(id:ID!):Account """Update the company name""" updateName(name:String!):Void """Apollo admins only: enable or disable an account for PingOne SSO login""" updatePingOneSSOIDPID(idpid:String):Account """Updates the role assigned to new SSO users.""" updateSSODefaultRole(role:UserPermission!):OrganizationSSO """A (currently) internal to Apollo mutation to update a user's role within an organization""" updateUserPermission(permission:UserPermission!userID:ID!):User}"""Columns of AccountOperationCheckStats.""" enum AccountOperationCheckStatsColumn{CACHED_REQUESTS_COUNT CLIENT_NAME CLIENT_VERSION QUERY_ID QUERY_NAME SCHEMA_TAG SERVICE_ID TIMESTAMP UNCACHED_REQUESTS_COUNT}type AccountOperationCheckStatsDimensions{clientName:String clientVersion:String queryId:ID queryName:String schemaTag:String serviceId:ID}"""Filter for data in AccountOperationCheckStats. Fields with dimension names represent equality checks. All fields are implicitly ANDed together.""" input AccountOperationCheckStatsFilter{and:[AccountOperationCheckStatsFilter!]"""Selects rows whose clientName dimension equals the given value if not null. To query for the null value, use {in: {clientName: [null]}} instead.""" clientName:String """Selects rows whose clientVersion dimension equals the given value if not null. To query for the null value, use {in: {clientVersion: [null]}} instead.""" clientVersion:String in:AccountOperationCheckStatsFilterIn not:AccountOperationCheckStatsFilter or:[AccountOperationCheckStatsFilter!]"""Selects rows whose queryId dimension equals the given value if not null. To query for the null value, use {in: {queryId: [null]}} instead.""" queryId:ID """Selects rows whose queryName dimension equals the given value if not null. To query for the null value, use {in: {queryName: [null]}} instead.""" queryName:String """Selects rows whose schemaTag dimension equals the given value if not null. To query for the null value, use {in: {schemaTag: [null]}} instead.""" schemaTag:String """Selects rows whose serviceId dimension equals the given value if not null. To query for the null value, use {in: {serviceId: [null]}} instead.""" serviceId:ID}"""Filter for data in AccountOperationCheckStats. Fields match if the corresponding dimension's value is in the given list. All fields are implicitly ANDed together.""" input AccountOperationCheckStatsFilterIn{"""Selects rows whose clientName dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientName:[String]"""Selects rows whose clientVersion dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientVersion:[String]"""Selects rows whose queryId dimension is in the given list. A null value in the list means a row with null for that dimension.""" queryId:[ID]"""Selects rows whose queryName dimension is in the given list. A null value in the list means a row with null for that dimension.""" queryName:[String]"""Selects rows whose schemaTag dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaTag:[String]"""Selects rows whose serviceId dimension is in the given list. A null value in the list means a row with null for that dimension.""" serviceId:[ID]}type AccountOperationCheckStatsMetrics{cachedRequestsCount:Long!uncachedRequestsCount:Long!}input AccountOperationCheckStatsOrderBySpec{column:AccountOperationCheckStatsColumn!direction:Ordering!}type AccountOperationCheckStatsRecord{"""Dimensions of AccountOperationCheckStats that can be grouped by.""" groupBy:AccountOperationCheckStatsDimensions!"""Metrics of AccountOperationCheckStats that can be aggregated over.""" metrics:AccountOperationCheckStatsMetrics!"""Starting segment timestamp.""" timestamp:Timestamp!}type AccountPublishesStatsMetrics{totalPublishes:Long!}type AccountPublishesStatsRecord{id:ID!metrics:AccountPublishesStatsMetrics!timestamp:Timestamp!}"""Columns of AccountQueryStats.""" enum AccountQueryStatsColumn{CACHED_HISTOGRAM CACHED_REQUESTS_COUNT CACHE_TTL_HISTOGRAM CLIENT_NAME CLIENT_VERSION FORBIDDEN_OPERATION_COUNT FROM_ENGINEPROXY QUERY_ID QUERY_NAME REGISTERED_OPERATION_COUNT REQUESTS_WITH_ERRORS_COUNT SCHEMA_HASH SCHEMA_TAG SERVICE_ID TIMESTAMP UNCACHED_HISTOGRAM UNCACHED_REQUESTS_COUNT}type AccountQueryStatsDimensions{clientName:String clientVersion:String fromEngineproxy:String queryId:ID queryName:String querySignature:String schemaHash:String schemaTag:String serviceId:ID}"""Filter for data in AccountQueryStats. Fields with dimension names represent equality checks. All fields are implicitly ANDed together.""" input AccountQueryStatsFilter{and:[AccountQueryStatsFilter!]"""Selects rows whose clientName dimension equals the given value if not null. To query for the null value, use {in: {clientName: [null]}} instead.""" clientName:String """Selects rows whose clientVersion dimension equals the given value if not null. To query for the null value, use {in: {clientVersion: [null]}} instead.""" clientVersion:String """Selects rows whose fromEngineproxy dimension equals the given value if not null. To query for the null value, use {in: {fromEngineproxy: [null]}} instead.""" fromEngineproxy:String in:AccountQueryStatsFilterIn not:AccountQueryStatsFilter or:[AccountQueryStatsFilter!]"""Selects rows whose queryId dimension equals the given value if not null. To query for the null value, use {in: {queryId: [null]}} instead.""" queryId:ID """Selects rows whose queryName dimension equals the given value if not null. To query for the null value, use {in: {queryName: [null]}} instead.""" queryName:String """Selects rows whose schemaHash dimension equals the given value if not null. To query for the null value, use {in: {schemaHash: [null]}} instead.""" schemaHash:String """Selects rows whose schemaTag dimension equals the given value if not null. To query for the null value, use {in: {schemaTag: [null]}} instead.""" schemaTag:String """Selects rows whose serviceId dimension equals the given value if not null. To query for the null value, use {in: {serviceId: [null]}} instead.""" serviceId:ID}"""Filter for data in AccountQueryStats. Fields match if the corresponding dimension's value is in the given list. All fields are implicitly ANDed together.""" input AccountQueryStatsFilterIn{"""Selects rows whose clientName dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientName:[String]"""Selects rows whose clientVersion dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientVersion:[String]"""Selects rows whose fromEngineproxy dimension is in the given list. A null value in the list means a row with null for that dimension.""" fromEngineproxy:[String]"""Selects rows whose queryId dimension is in the given list. A null value in the list means a row with null for that dimension.""" queryId:[ID]"""Selects rows whose queryName dimension is in the given list. A null value in the list means a row with null for that dimension.""" queryName:[String]"""Selects rows whose schemaHash dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaHash:[String]"""Selects rows whose schemaTag dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaTag:[String]"""Selects rows whose serviceId dimension is in the given list. A null value in the list means a row with null for that dimension.""" serviceId:[ID]}type AccountQueryStatsMetrics{cacheTtlHistogram:DurationHistogram!cachedHistogram:DurationHistogram!cachedRequestsCount:Long!forbiddenOperationCount:Long!registeredOperationCount:Long!requestsWithErrorsCount:Long!totalLatencyHistogram:DurationHistogram!totalRequestCount:Long!uncachedHistogram:DurationHistogram!uncachedRequestsCount:Long!}input AccountQueryStatsOrderBySpec{column:AccountQueryStatsColumn!direction:Ordering!}type AccountQueryStatsRecord{"""Dimensions of AccountQueryStats that can be grouped by.""" groupBy:AccountQueryStatsDimensions!"""Metrics of AccountQueryStats that can be aggregated over.""" metrics:AccountQueryStatsMetrics!"""Starting segment timestamp.""" timestamp:Timestamp!}type AccountRoles{canAudit:Boolean!canCreateDevGraph:Boolean!canCreateService:Boolean!canDelete:Boolean!canDownloadInvoice:Boolean!@deprecated(reason:"Use canQueryBillingInfo instead")canManageMembers:Boolean!canQuery:Boolean!canQueryAudit:Boolean!canQueryBillingInfo:Boolean!canQueryInvoices:Boolean!@deprecated(reason:"Use canQueryBillingInfo instead")canQueryMembers:Boolean!canQueryStats:Boolean!canReadTickets:Boolean!canRemoveMembers:Boolean!canSetConstrainedPlan:Boolean!canUpdateBillingInfo:Boolean!canUpdateMetadata:Boolean!}enum AccountState{ACTIVE CLOSED UNKNOWN UNPROVISIONED}"""A time window with a specified granularity over a given account.""" type AccountStatsWindow{billingUsageStats("""Filter to select what rows to return.""" filter:AccountBillingUsageStatsFilter """The maximum number of entries to return, cannot be more than 15000.""" limit:Int=10000 """A list of OrderBySpecs to order AccountBillingUsageStats by. The earlier an OrderBySpec appears in the list, the higher priority it has in the final ordering. When empty or null, defaults to sorting by ascending timestamp.""" orderBy:[AccountBillingUsageStatsOrderBySpec!]):[AccountBillingUsageStatsRecord!]!edgeServerInfos("""Filter to select what rows to return.""" filter:AccountEdgeServerInfosFilter """The maximum number of entries to return, cannot be more than 15000.""" limit:Int=10000 """A list of OrderBySpecs to order AccountEdgeServerInfos by. The earlier an OrderBySpec appears in the list, the higher priority it has in the final ordering. When empty or null, defaults to sorting by ascending timestamp.""" orderBy:[AccountEdgeServerInfosOrderBySpec!]):[AccountEdgeServerInfosRecord!]!errorStats("""Filter to select what rows to return.""" filter:AccountErrorStatsFilter """The maximum number of entries to return, cannot be more than 15000.""" limit:Int=10000 """A list of OrderBySpecs to order AccountErrorStats by. The earlier an OrderBySpec appears in the list, the higher priority it has in the final ordering. When empty or null, defaults to sorting by ascending timestamp.""" orderBy:[AccountErrorStatsOrderBySpec!]):[AccountErrorStatsRecord!]!fieldExecutions("""Filter to select what rows to return.""" filter:AccountFieldExecutionsFilter """The maximum number of entries to return, cannot be more than 15000.""" limit:Int=10000 """A list of OrderBySpecs to order AccountFieldExecutions by. The earlier an OrderBySpec appears in the list, the higher priority it has in the final ordering. When empty or null, defaults to sorting by ascending timestamp.""" orderBy:[AccountFieldExecutionsOrderBySpec!]):[AccountFieldExecutionsRecord!]!fieldLatencies("""Filter to select what rows to return.""" filter:AccountFieldLatenciesFilter """The maximum number of entries to return, cannot be more than 15000.""" limit:Int=10000 """A list of OrderBySpecs to order AccountFieldLatencies by. The earlier an OrderBySpec appears in the list, the higher priority it has in the final ordering. When empty or null, defaults to sorting by ascending timestamp.""" orderBy:[AccountFieldLatenciesOrderBySpec!]):[AccountFieldLatenciesRecord!]!fieldRequestsByClientVersion("""Filter to select what rows to return.""" filter:AccountFieldRequestsByClientVersionFilter """The maximum number of entries to return, cannot be more than 15000.""" limit:Int=10000 """A list of OrderBySpecs to order AccountFieldRequestsByClientVersion by. The earlier an OrderBySpec appears in the list, the higher priority it has in the final ordering. When empty or null, defaults to sorting by ascending timestamp.""" orderBy:[AccountFieldRequestsByClientVersionOrderBySpec!]):[AccountFieldRequestsByClientVersionRecord!]!fieldUsage("""Filter to select what rows to return.""" filter:AccountFieldUsageFilter """The maximum number of entries to return, cannot be more than 15000.""" limit:Int=10000 """A list of OrderBySpecs to order AccountFieldUsage by. The earlier an OrderBySpec appears in the list, the higher priority it has in the final ordering. When empty or null, defaults to sorting by ascending timestamp.""" orderBy:[AccountFieldUsageOrderBySpec!]):[AccountFieldUsageRecord!]!operationCheckStats("""Filter to select what rows to return.""" filter:AccountOperationCheckStatsFilter """The maximum number of entries to return, cannot be more than 15000.""" limit:Int=10000 """A list of OrderBySpecs to order AccountOperationCheckStats by. The earlier an OrderBySpec appears in the list, the higher priority it has in the final ordering. When empty or null, defaults to sorting by ascending timestamp.""" orderBy:[AccountOperationCheckStatsOrderBySpec!]):[AccountOperationCheckStatsRecord!]!queryStats("""Filter to select what rows to return.""" filter:AccountQueryStatsFilter """The maximum number of entries to return, cannot be more than 15000.""" limit:Int=10000 """A list of OrderBySpecs to order AccountQueryStats by. The earlier an OrderBySpec appears in the list, the higher priority it has in the final ordering. When empty or null, defaults to sorting by ascending timestamp.""" orderBy:[AccountQueryStatsOrderBySpec!]):[AccountQueryStatsRecord!]!"""From field rounded down to the nearest resolution.""" roundedDownFrom:Timestamp!"""To field rounded up to the nearest resolution.""" roundedUpTo:Timestamp!tracePathErrorsRefs("""Filter to select what rows to return.""" filter:AccountTracePathErrorsRefsFilter """The maximum number of entries to return, cannot be more than 15000.""" limit:Int=10000 """A list of OrderBySpecs to order AccountTracePathErrorsRefs by. The earlier an OrderBySpec appears in the list, the higher priority it has in the final ordering. When empty or null, defaults to sorting by ascending timestamp.""" orderBy:[AccountTracePathErrorsRefsOrderBySpec!]):[AccountTracePathErrorsRefsRecord!]!traceRefs("""Filter to select what rows to return.""" filter:AccountTraceRefsFilter """The maximum number of entries to return, cannot be more than 15000.""" limit:Int=10000 """A list of OrderBySpecs to order AccountTraceRefs by. The earlier an OrderBySpec appears in the list, the higher priority it has in the final ordering. When empty or null, defaults to sorting by ascending timestamp.""" orderBy:[AccountTraceRefsOrderBySpec!]):[AccountTraceRefsRecord!]!}"""Columns of AccountTracePathErrorsRefs.""" enum AccountTracePathErrorsRefsColumn{CLIENT_NAME CLIENT_VERSION DURATION_BUCKET ERRORS_COUNT_IN_PATH ERRORS_COUNT_IN_TRACE ERROR_MESSAGE PATH QUERY_ID QUERY_NAME SCHEMA_HASH SCHEMA_TAG SERVICE_ID TIMESTAMP TRACE_HTTP_STATUS_CODE TRACE_ID TRACE_SIZE_BYTES TRACE_STARTS_AT}type AccountTracePathErrorsRefsDimensions{clientName:String clientVersion:String durationBucket:Int errorMessage:String path:String queryId:ID queryName:String schemaHash:String schemaTag:String serviceId:ID traceHttpStatusCode:Int traceId:ID traceStartsAt:Timestamp}"""Filter for data in AccountTracePathErrorsRefs. Fields with dimension names represent equality checks. All fields are implicitly ANDed together.""" input AccountTracePathErrorsRefsFilter{and:[AccountTracePathErrorsRefsFilter!]"""Selects rows whose clientName dimension equals the given value if not null. To query for the null value, use {in: {clientName: [null]}} instead.""" clientName:String """Selects rows whose clientVersion dimension equals the given value if not null. To query for the null value, use {in: {clientVersion: [null]}} instead.""" clientVersion:String """Selects rows whose durationBucket dimension equals the given value if not null. To query for the null value, use {in: {durationBucket: [null]}} instead.""" durationBucket:Int """Selects rows whose errorMessage dimension equals the given value if not null. To query for the null value, use {in: {errorMessage: [null]}} instead.""" errorMessage:String in:AccountTracePathErrorsRefsFilterIn not:AccountTracePathErrorsRefsFilter or:[AccountTracePathErrorsRefsFilter!]"""Selects rows whose path dimension equals the given value if not null. To query for the null value, use {in: {path: [null]}} instead.""" path:String """Selects rows whose queryId dimension equals the given value if not null. To query for the null value, use {in: {queryId: [null]}} instead.""" queryId:ID """Selects rows whose queryName dimension equals the given value if not null. To query for the null value, use {in: {queryName: [null]}} instead.""" queryName:String """Selects rows whose schemaHash dimension equals the given value if not null. To query for the null value, use {in: {schemaHash: [null]}} instead.""" schemaHash:String """Selects rows whose schemaTag dimension equals the given value if not null. To query for the null value, use {in: {schemaTag: [null]}} instead.""" schemaTag:String """Selects rows whose serviceId dimension equals the given value if not null. To query for the null value, use {in: {serviceId: [null]}} instead.""" serviceId:ID """Selects rows whose traceHttpStatusCode dimension equals the given value if not null. To query for the null value, use {in: {traceHttpStatusCode: [null]}} instead.""" traceHttpStatusCode:Int """Selects rows whose traceId dimension equals the given value if not null. To query for the null value, use {in: {traceId: [null]}} instead.""" traceId:ID}"""Filter for data in AccountTracePathErrorsRefs. Fields match if the corresponding dimension's value is in the given list. All fields are implicitly ANDed together.""" input AccountTracePathErrorsRefsFilterIn{"""Selects rows whose clientName dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientName:[String]"""Selects rows whose clientVersion dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientVersion:[String]"""Selects rows whose durationBucket dimension is in the given list. A null value in the list means a row with null for that dimension.""" durationBucket:[Int]"""Selects rows whose errorMessage dimension is in the given list. A null value in the list means a row with null for that dimension.""" errorMessage:[String]"""Selects rows whose path dimension is in the given list. A null value in the list means a row with null for that dimension.""" path:[String]"""Selects rows whose queryId dimension is in the given list. A null value in the list means a row with null for that dimension.""" queryId:[ID]"""Selects rows whose queryName dimension is in the given list. A null value in the list means a row with null for that dimension.""" queryName:[String]"""Selects rows whose schemaHash dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaHash:[String]"""Selects rows whose schemaTag dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaTag:[String]"""Selects rows whose serviceId dimension is in the given list. A null value in the list means a row with null for that dimension.""" serviceId:[ID]"""Selects rows whose traceHttpStatusCode dimension is in the given list. A null value in the list means a row with null for that dimension.""" traceHttpStatusCode:[Int]"""Selects rows whose traceId dimension is in the given list. A null value in the list means a row with null for that dimension.""" traceId:[ID]}type AccountTracePathErrorsRefsMetrics{errorsCountInPath:Long!errorsCountInTrace:Long!traceSizeBytes:Long!}input AccountTracePathErrorsRefsOrderBySpec{column:AccountTracePathErrorsRefsColumn!direction:Ordering!}type AccountTracePathErrorsRefsRecord{"""Dimensions of AccountTracePathErrorsRefs that can be grouped by.""" groupBy:AccountTracePathErrorsRefsDimensions!"""Metrics of AccountTracePathErrorsRefs that can be aggregated over.""" metrics:AccountTracePathErrorsRefsMetrics!"""Starting segment timestamp.""" timestamp:Timestamp!}"""Columns of AccountTraceRefs.""" enum AccountTraceRefsColumn{CLIENT_NAME CLIENT_VERSION DURATION_BUCKET DURATION_NS QUERY_ID QUERY_NAME SCHEMA_HASH SCHEMA_TAG SERVICE_ID TIMESTAMP TRACE_ID TRACE_SIZE_BYTES}type AccountTraceRefsDimensions{clientName:String clientVersion:String durationBucket:Int queryId:ID queryName:String querySignature:String schemaHash:String schemaTag:String serviceId:ID traceId:ID}"""Filter for data in AccountTraceRefs. Fields with dimension names represent equality checks. All fields are implicitly ANDed together.""" input AccountTraceRefsFilter{and:[AccountTraceRefsFilter!]"""Selects rows whose clientName dimension equals the given value if not null. To query for the null value, use {in: {clientName: [null]}} instead.""" clientName:String """Selects rows whose clientVersion dimension equals the given value if not null. To query for the null value, use {in: {clientVersion: [null]}} instead.""" clientVersion:String """Selects rows whose durationBucket dimension equals the given value if not null. To query for the null value, use {in: {durationBucket: [null]}} instead.""" durationBucket:Int in:AccountTraceRefsFilterIn not:AccountTraceRefsFilter or:[AccountTraceRefsFilter!]"""Selects rows whose queryId dimension equals the given value if not null. To query for the null value, use {in: {queryId: [null]}} instead.""" queryId:ID """Selects rows whose queryName dimension equals the given value if not null. To query for the null value, use {in: {queryName: [null]}} instead.""" queryName:String """Selects rows whose schemaHash dimension equals the given value if not null. To query for the null value, use {in: {schemaHash: [null]}} instead.""" schemaHash:String """Selects rows whose schemaTag dimension equals the given value if not null. To query for the null value, use {in: {schemaTag: [null]}} instead.""" schemaTag:String """Selects rows whose serviceId dimension equals the given value if not null. To query for the null value, use {in: {serviceId: [null]}} instead.""" serviceId:ID """Selects rows whose traceId dimension equals the given value if not null. To query for the null value, use {in: {traceId: [null]}} instead.""" traceId:ID}"""Filter for data in AccountTraceRefs. Fields match if the corresponding dimension's value is in the given list. All fields are implicitly ANDed together.""" input AccountTraceRefsFilterIn{"""Selects rows whose clientName dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientName:[String]"""Selects rows whose clientVersion dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientVersion:[String]"""Selects rows whose durationBucket dimension is in the given list. A null value in the list means a row with null for that dimension.""" durationBucket:[Int]"""Selects rows whose queryId dimension is in the given list. A null value in the list means a row with null for that dimension.""" queryId:[ID]"""Selects rows whose queryName dimension is in the given list. A null value in the list means a row with null for that dimension.""" queryName:[String]"""Selects rows whose schemaHash dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaHash:[String]"""Selects rows whose schemaTag dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaTag:[String]"""Selects rows whose serviceId dimension is in the given list. A null value in the list means a row with null for that dimension.""" serviceId:[ID]"""Selects rows whose traceId dimension is in the given list. A null value in the list means a row with null for that dimension.""" traceId:[ID]}type AccountTraceRefsMetrics{durationNs:Long!traceSizeBytes:Long!}input AccountTraceRefsOrderBySpec{column:AccountTraceRefsColumn!direction:Ordering!}type AccountTraceRefsRecord{"""Dimensions of AccountTraceRefs that can be grouped by.""" groupBy:AccountTraceRefsDimensions!"""Metrics of AccountTraceRefs that can be aggregated over.""" metrics:AccountTraceRefsMetrics!"""Starting segment timestamp.""" timestamp:Timestamp!}"""An actor (view of Identity) that performed an action within Studio.""" type Actor{actorId:ID!type:ActorType!}input ActorInput{actorId:ID!type:ActorType!}enum ActorType{ANONYMOUS_USER BACKFILL CRON GRAPH INTERNAL_IDENTITY SYNCHRONIZATION SYSTEM USER}union AddOperationCollectionEntryResult=OperationCollectionEntry|PermissionError|ValidationError union AddOperationCollectionToVariantResult=GraphVariant|InvalidTarget|PermissionError|ValidationError type AffectedClient{"""ID, often the name, of the client set by the user and reported alongside metrics""" clientReferenceId:ID@deprecated(reason:"Unsupported.")"""version of the client set by the user and reported alongside metrics""" clientVersion:String@deprecated(reason:"Unsupported.")}type AffectedQuery{"""If the operation would be approved if the check ran again. Returns null if queried from SchemaDiff.changes.affectedQueries.alreadyApproved""" alreadyApproved:Boolean """If the operation would be ignored if the check ran again""" alreadyIgnored:Boolean """List of changes affecting this query. Returns null if queried from SchemaDiff.changes.affectedQueries.changes""" changes:[ChangeOnOperation!]"""Name to display to the user for the operation""" displayName:String id:ID!"""Determines if this query validates against the proposed schema""" isValid:Boolean """Whether this operation was ignored and its severity was downgraded for that reason""" markedAsIgnored:Boolean """Whether the changes were marked as safe and its severity was downgraded for that reason""" markedAsSafe:Boolean """Name provided for the operation, which can be empty string if it is an anonymous operation""" name:String """First 128 characters of query signature for display""" signature:String}interface ApiKey{id:ID!keyName:String token:String!}type ApiKeyProvision{apiKey:ApiKey!created:Boolean!}type AuditLogExport{actors:[Identity!]bigqueryTriggeredAt:Timestamp completedAt:Timestamp createdAt:Timestamp!exportedFiles:[String!]from:Timestamp!graphs:[Service!]id:ID!requester:User status:AuditStatus!to:Timestamp!}type AuditLogExportMutation{cancel:Account delete:Account}enum AuditStatus{CANCELLED COMPLETED EXPIRED FAILED IN_PROGRESS QUEUED}type AvatarDeleteError{clientMessage:String!code:AvatarDeleteErrorCode!serverMessage:String!}enum AvatarDeleteErrorCode{SSO_USERS_CANNOT_DELETE_SELF_AVATAR}type AvatarUploadError{clientMessage:String!code:AvatarUploadErrorCode!serverMessage:String!}enum AvatarUploadErrorCode{SSO_USERS_CANNOT_UPLOAD_SELF_AVATAR}union AvatarUploadResult=AvatarUploadError|MediaUploadInfo type BillingAddress{address1:String address2:String city:String country:String state:String zip:String}"""Billing address inpnut""" input BillingAddressInput{address1:String!address2:String city:String!country:String!state:String!zip:String!}type BillingInfo{address:BillingAddress!cardType:String firstName:String lastFour:Int lastName:String month:Int vatNumber:String year:Int}enum BillingModel{REQUEST_BASED SEAT_BASED}type BillingMonth{end:Timestamp!requests:Long!start:Timestamp!}enum BillingPeriod{MONTHLY QUARTERLY SEMI_ANNUALLY YEARLY}type BillingPlan{addons:[BillingPlanAddon!]!billingModel:BillingModel!billingPeriod:BillingPeriod capabilities:BillingPlanCapabilities!description:String id:ID!isTrial:Boolean!kind:BillingPlanKind!name:String!"""The price of every seat""" pricePerSeatInUsdCents:Int """The price of subscribing to this plan with a quantity of 1 (currently always the case)""" pricePerUnitInUsdCents:Int!"""Whether the plan is accessible by all users in QueryRoot.allPlans, QueryRoot.plan, or AccountMutation.setPlan""" public:Boolean!tier:BillingPlanTier!}type BillingPlanAddon{id:ID!pricePerUnitInUsdCents:Int!}type BillingPlanAddonV2{id:ID!pricePerUnitInUsdCents:Int!}type BillingPlanCapabilities{clients:Boolean!contracts:Boolean!datadog:Boolean!errors:Boolean!federation:Boolean!launches:Boolean!maxAuditInDays:Int!maxRangeInDays:Int maxRequestsPerMonth:Long metrics:Boolean!notifications:Boolean!operationRegistry:Boolean!ranges:[String!]!schemaValidation:Boolean!traces:Boolean!userRoles:Boolean!webhooks:Boolean!}enum BillingPlanKind{COMMUNITY ENTERPRISE_INTERNAL ENTERPRISE_PAID ENTERPRISE_PILOT TEAM_PAID TEAM_TRIAL}enum BillingPlanKindV2{COMMUNITY ENTERPRISE_INTERNAL ENTERPRISE_PAID ENTERPRISE_PILOT TEAM_PAID TEAM_TRIAL UNKNOWN}enum BillingPlanTier{COMMUNITY ENTERPRISE TEAM}enum BillingPlanTierV2{COMMUNITY ENTERPRISE TEAM UNKNOWN}type BillingPlanV2{addons:[BillingPlanAddonV2!]!billingModel:BillingModel!billingPeriod:BillingPeriod clients:Boolean!contracts:Boolean!datadog:Boolean!description:String errors:Boolean!federation:Boolean!id:ID!isTrial:Boolean!kind:BillingPlanKindV2!launches:Boolean!maxAuditInDays:Int!maxRangeInDays:Int maxRequestsPerMonth:Long metrics:Boolean!name:String!notifications:Boolean!operationRegistry:Boolean!"""The price of every seat""" pricePerSeatInUsdCents:Int """The price of subscribing to this plan with a quantity of 1 (currently always the case)""" pricePerUnitInUsdCents:Int!"""Whether the plan is accessible by all users in QueryRoot.allPlans, QueryRoot.plan, or AccountMutation.setPlan""" public:Boolean!ranges:[String!]!schemaValidation:Boolean!tier:BillingPlanTierV2!traces:Boolean!userRoles:Boolean!webhooks:Boolean!}type BillingSubscription{activatedAt:Timestamp!addons:[BillingSubscriptionAddon!]!autoRenew:Boolean!"""The price of the subscription when ignoring add-ons (such as seats), ie quantity * pricePerUnitInUsdCents""" basePriceInUsdCents:Long!canceledAt:Timestamp currentPeriodEndsAt:Timestamp!currentPeriodStartedAt:Timestamp!expiresAt:Timestamp plan:BillingPlan!"""The price of every seat""" pricePerSeatInUsdCents:Int """The price of every unit in the subscription (hence multiplied by quantity to get to the basePriceInUsdCents)""" pricePerUnitInUsdCents:Int!quantity:Int!"""Total price of the subscription when it next renews, including add-ons (such as seats)""" renewalTotalPriceInUsdCents:Long!state:SubscriptionState!"""Total price of the subscription, including add-ons (such as seats)""" totalPriceInUsdCents:Long!"""When this subscription's trial period expires (if it is a trial). Not the same as the -subscription's Recurly expiration).""" trialExpiresAt:Timestamp uuid:ID!}type BillingSubscriptionAddon{id:ID!pricePerUnitInUsdCents:Int!quantity:Int!}type BillingSubscriptionAddonV2{id:ID!pricePerUnitInUsdCents:Int!quantity:Int!}type BillingSubscriptionV2{"""The price of every unit in the subscription (hence multiplied by quantity to get to the basePriceInUsdCents)""" activatedAt:Timestamp!addons:[BillingSubscriptionAddonV2!]!autoRenew:Boolean!canceledAt:Timestamp currentPeriodEndsAt:Timestamp!currentPeriodStartedAt:Timestamp!expiresAt:Timestamp plan:BillingPlanV2!"""The price of every seat""" pricePerSeatInUsdCents:Int quantity:Int!state:SubscriptionStateV2!"""When this subscription's trial period expires (if it is a trial). Not the same as the -subscription's Recurly expiration).""" trialExpiresAt:Timestamp uuid:ID!}"""Columns of BillingUsageStats.""" enum BillingUsageStatsColumn{ACCOUNT_ID OPERATION_COUNT OPERATION_COUNT_PROVIDED_EXPLICITLY SCHEMA_TAG SERVICE_ID TIMESTAMP}type BillingUsageStatsDimensions{accountId:ID operationCountProvidedExplicitly:String schemaTag:String serviceId:ID}"""Filter for data in BillingUsageStats. Fields with dimension names represent equality checks. All fields are implicitly ANDed together.""" input BillingUsageStatsFilter{"""Selects rows whose accountId dimension equals the given value if not null. To query for the null value, use {in: {accountId: [null]}} instead.""" accountId:ID and:[BillingUsageStatsFilter!]in:BillingUsageStatsFilterIn not:BillingUsageStatsFilter """Selects rows whose operationCountProvidedExplicitly dimension equals the given value if not null. To query for the null value, use {in: {operationCountProvidedExplicitly: [null]}} instead.""" operationCountProvidedExplicitly:String or:[BillingUsageStatsFilter!]"""Selects rows whose schemaTag dimension equals the given value if not null. To query for the null value, use {in: {schemaTag: [null]}} instead.""" schemaTag:String """Selects rows whose serviceId dimension equals the given value if not null. To query for the null value, use {in: {serviceId: [null]}} instead.""" serviceId:ID}"""Filter for data in BillingUsageStats. Fields match if the corresponding dimension's value is in the given list. All fields are implicitly ANDed together.""" input BillingUsageStatsFilterIn{"""Selects rows whose accountId dimension is in the given list. A null value in the list means a row with null for that dimension.""" accountId:[ID]"""Selects rows whose operationCountProvidedExplicitly dimension is in the given list. A null value in the list means a row with null for that dimension.""" operationCountProvidedExplicitly:[String]"""Selects rows whose schemaTag dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaTag:[String]"""Selects rows whose serviceId dimension is in the given list. A null value in the list means a row with null for that dimension.""" serviceId:[ID]}type BillingUsageStatsMetrics{operationCount:Long!}input BillingUsageStatsOrderBySpec{column:BillingUsageStatsColumn!direction:Ordering!}type BillingUsageStatsRecord{"""Dimensions of BillingUsageStats that can be grouped by.""" groupBy:BillingUsageStatsDimensions!"""Metrics of BillingUsageStats that can be aggregated over.""" metrics:BillingUsageStatsMetrics!"""Starting segment timestamp.""" timestamp:Timestamp!}"""A blob (base64'ed in JSON & GraphQL)""" scalar Blob type Build{input:BuildInput!result:BuildResult}type BuildError{code:String locations:[SourceLocation!]!message:String!}type BuildFailure{errorMessages:[BuildError!]!}union BuildInput=CompositionBuildInput|FilterBuildInput union BuildResult=BuildFailure|BuildSuccess type BuildSuccess{coreSchema:CoreSchema!}enum CacheScope{PRIVATE PUBLIC UNKNOWN UNRECOGNIZED}"""A specific change to a definition in your schema.""" type Change{affectedQueries:[AffectedQuery!]"""Target arg of change made.""" argNode:NamedIntrospectionArg """Indication of the category of the change (e.g. addition, removal, edit).""" category:ChangeCategory!"""Node related to the top level node that was changed, such as a field in an object, -a value in an enum or the object of an interface.""" childNode:NamedIntrospectionValue """Indication of the kind of target and action of the change, e.g. 'TYPE_REMOVED'.""" code:String!"""Human-readable description of the change.""" description:String!"""Top level node affected by the change.""" parentNode:NamedIntrospectionType """Severity of the change, either failure or warning.""" severity:ChangeSeverity!"""Indication of the success of the overall change, either failure, warning, or notice.""" type:ChangeType!@deprecated(reason:"use severity instead")}"""Defines a set of categories that a schema change -can be grouped by.""" enum ChangeCategory{ADDITION DEPRECATION EDIT REMOVAL}"""These schema change codes represent all of the possible changes that can -occur during the schema diff algorithm.""" enum ChangeCode{"""Type of the argument was changed.""" ARG_CHANGED_TYPE """Argument was changed from nullable to non-nullable.""" ARG_CHANGED_TYPE_OPTIONAL_TO_REQUIRED """Default value added or changed for the argument.""" ARG_DEFAULT_VALUE_CHANGE """Description was added, removed, or updated for argument.""" ARG_DESCRIPTION_CHANGE """Argument to a field was removed.""" ARG_REMOVED """Argument to the directive was removed.""" DIRECTIVE_ARG_REMOVED """Location of the directive was removed.""" DIRECTIVE_LOCATION_REMOVED """Directive was removed.""" DIRECTIVE_REMOVED """Repeatable flag was removed for directive.""" DIRECTIVE_REPEATABLE_REMOVED """Enum was deprecated.""" ENUM_DEPRECATED """Reason for enum deprecation changed.""" ENUM_DEPRECATED_REASON_CHANGE """Enum deprecation was removed.""" ENUM_DEPRECATION_REMOVED """Description was added, removed, or updated for enum value.""" ENUM_VALUE_DESCRIPTION_CHANGE """Field was added to the type.""" FIELD_ADDED """Return type for the field was changed.""" FIELD_CHANGED_TYPE """Field was deprecated.""" FIELD_DEPRECATED """Reason for field deprecation changed.""" FIELD_DEPRECATED_REASON_CHANGE """Field deprecation removed.""" FIELD_DEPRECATION_REMOVED """Description was added, removed, or updated for field.""" FIELD_DESCRIPTION_CHANGE """Type of the field in the input object was changed.""" FIELD_ON_INPUT_OBJECT_CHANGED_TYPE """Field was removed from the type.""" FIELD_REMOVED """Field was removed from the input object.""" FIELD_REMOVED_FROM_INPUT_OBJECT """Non-nullable field was added to the input object. (Deprecated.)""" NON_NULLABLE_FIELD_ADDED_TO_INPUT_OBJECT """Nullable field was added to the input type. (Deprecated.)""" NULLABLE_FIELD_ADDED_TO_INPUT_OBJECT """Nullable argument was added to the field.""" OPTIONAL_ARG_ADDED """Optional field was added to the input type.""" OPTIONAL_FIELD_ADDED_TO_INPUT_OBJECT """Non-nullable argument was added to the field.""" REQUIRED_ARG_ADDED """Non-nullable argument added to directive.""" REQUIRED_DIRECTIVE_ARG_ADDED """Required field was added to the input object.""" REQUIRED_FIELD_ADDED_TO_INPUT_OBJECT """Type was added to the schema.""" TYPE_ADDED """Type now implements the interface.""" TYPE_ADDED_TO_INTERFACE """A new value was added to the enum.""" TYPE_ADDED_TO_UNION """Type was changed from one kind to another. -Ex: scalar to object or enum to union.""" TYPE_CHANGED_KIND """Description was added, removed, or updated for type.""" TYPE_DESCRIPTION_CHANGE """Type (object or scalar) was removed from the schema.""" TYPE_REMOVED """Type no longer implements the interface.""" TYPE_REMOVED_FROM_INTERFACE """Type is no longer included in the union.""" TYPE_REMOVED_FROM_UNION """A new value was added to the enum.""" VALUE_ADDED_TO_ENUM """Value was removed from the enum.""" VALUE_REMOVED_FROM_ENUM}"""Represents the tuple of static information -about a particular kind of schema change.""" type ChangeDefinition{category:ChangeCategory!code:ChangeCode!defaultSeverity:ChangeSeverity!}"""Info about a change in the context of an operation it affects""" type ChangeOnOperation{"""Human-readable explanation of the impact of this change on the operation""" impact:String """The semantic info about this change, i.e. info about the change that doesn't depend on the operation""" semanticChange:SemanticChange!}enum ChangeSeverity{FAILURE NOTICE}"""Summary of the changes for a schema diff, computed by placing the changes into categories and then +schema { + query: Query + mutation: Mutation +} + +"""An organization in Apollo Studio. Can have multiple members and graphs.""" +type Organization { + auditLogExports: [AuditLogExport!] + """Graphs belonging to this organization.""" + graphs(includeDeleted: Boolean): [Graph!]! + """Globally unique identifier, which isn't guaranteed stable (can be changed by administrators).""" + id: ID! + """Name of the organization, which can change over time and isn't unique.""" + name: String! + """Graphs belonging to this organization.""" + services(includeDeleted: Boolean): [Graph!]! @deprecated(reason: "Use graphs field instead") +} + +type OrganizationMutation { + """Trigger a request for an audit export""" + requestAuditExport(actors: [ActorInput!], from: Timestamp!, graphIds: [String!], to: Timestamp!): Organization +} + +"""Represents an actor that performs actions in Apollo Studio. Most actors are either a `USER` or a `GRAPH` (based on a request's provided API key), and they have the corresponding `ActorType`.""" +type Actor { + actorId: ID! + type: ActorType! +} + +"""Input type to provide when specifying an `Actor` in operation arguments. See also the `Actor` object type.""" +input ActorInput { + actorId: ID! + type: ActorType! +} + +enum ActorType { + ANONYMOUS_USER + BACKFILL + CRON + GRAPH + INTERNAL_IDENTITY + SYNCHRONIZATION + SYSTEM + USER +} + +union AddOperationCollectionEntriesResult = AddOperationCollectionEntriesSuccess | PermissionError | ValidationError + +type AddOperationCollectionEntriesSuccess { + operationCollectionEntries: [OperationCollectionEntry!]! +} + +union AddOperationCollectionEntryResult = OperationCollectionEntry | PermissionError | ValidationError + +input AddOperationInput { + """The operation's fields.""" + document: OperationCollectionEntryStateInput! + """The operation's name.""" + name: String! +} + +type AffectedQuery { + id: ID! + """First 128 characters of query signature for display""" + signature: String + """Name to display to the user for the operation""" + displayName: String + """Name provided for the operation, which can be empty string if it is an anonymous operation""" + name: String + """Determines if this query validates against the proposed schema""" + isValid: Boolean + """List of changes affecting this query. Returns null if queried from SchemaDiff.changes.affectedQueries.changes""" + changes: [ChangeOnOperation!] + """Whether this operation was ignored and its severity was downgraded for that reason""" + markedAsIgnored: Boolean + """Whether the changes were marked as safe and its severity was downgraded for that reason""" + markedAsSafe: Boolean + """If the operation would be approved if the check ran again. Returns null if queried from SchemaDiff.changes.affectedQueries.alreadyApproved""" + alreadyApproved: Boolean + """If the operation would be ignored if the check ran again""" + alreadyIgnored: Boolean +} + +""" +Represents an API key that's used to authenticate a +particular Apollo user or graph. +""" +interface ApiKey { + """The API key's ID.""" + id: ID! + """The API key's name, for distinguishing it from other keys.""" + keyName: String + """The value of the API key. **This is a secret credential!**""" + token: String! +} + +type ApiKeyProvision { + apiKey: ApiKey! + created: Boolean! +} + +type AuditLogExport { + """The list of actors to filter the audit export""" + actors: [Identity!] + """The time when the audit export was completed""" + completedAt: Timestamp + """The time when the audit export was reqeusted""" + createdAt: Timestamp! + """List of URLs to download the audits for the requested range""" + downloadUrls: [String!] + """The starting point of audits to include in export""" + from: Timestamp! + """The list of graphs to filter the audit export""" + graphs: [Graph!] + """The id for the audit export""" + id: ID! + """The user that initiated the audit export""" + requester: User + """The status of the audit export""" + status: AuditStatus! + """The end point of audits to include in export""" + to: Timestamp! +} + +enum AuditStatus { + CANCELLED + COMPLETED + EXPIRED + FAILED + IN_PROGRESS + QUEUED +} + +"""The building of a Studio variant (including supergraph composition and any contract filtering) as part of a launch.""" +type Build { + """The inputs provided to the build, including subgraph and contract details.""" + input: BuildInput! + """The result of the build. This value is null until the build completes.""" + result: BuildResult +} + +"""A single error that occurred during the failed execution of a build.""" +type BuildError { + code: String + locations: [SourceLocation!]! + message: String! +} + +"""Contains the details of an executed build that failed.""" +type BuildFailure { + """A list of all errors that occurred during the failed build.""" + errorMessages: [BuildError!]! +} + +union BuildInput = CompositionBuildInput | FilterBuildInput + +union BuildResult = BuildFailure | BuildSuccess + +"""Contains the details of an executed build that succeeded.""" +type BuildSuccess { + """Contains the supergraph and API schemas created by composition.""" + coreSchema: CoreSchema! +} + +"""A single change that was made to a definition in a schema.""" +type Change { + """The severity of the change (e.g., `FAILURE` or `NOTICE`)""" + severity: ChangeSeverity! + """Indicates the type of change that was made, and to what (e.g., 'TYPE_REMOVED').""" + code: String! + """Indication of the category of the change (e.g. addition, removal, edit).""" + category: ChangeCategory! + """A human-readable description of the change.""" + description: String! + affectedQueries: [AffectedQuery!] + """Top level node affected by the change.""" + parentNode: NamedIntrospectionType + """ + Node related to the top level node that was changed, such as a field in an object, + a value in an enum or the object of an interface. + """ + childNode: NamedIntrospectionValue + """Target arg of change made.""" + argNode: NamedIntrospectionArg +} + +""" +Defines a set of categories that a schema change +can be grouped by. +""" +enum ChangeCategory { + ADDITION + EDIT + REMOVAL + DEPRECATION +} + +""" +These schema change codes represent all of the possible changes that can +occur during the schema diff algorithm. +""" +enum ChangeCode { + """Field was removed from the type.""" + FIELD_REMOVED + """Type (object or scalar) was removed from the schema.""" + TYPE_REMOVED + """Argument to a field was removed.""" + ARG_REMOVED + """Type is no longer included in the union.""" + TYPE_REMOVED_FROM_UNION + """Field was removed from the input object.""" + FIELD_REMOVED_FROM_INPUT_OBJECT + """Value was removed from the enum.""" + VALUE_REMOVED_FROM_ENUM + """Type no longer implements the interface.""" + TYPE_REMOVED_FROM_INTERFACE + """Non-nullable argument was added to the field.""" + REQUIRED_ARG_ADDED + """Non-nullable field was added to the input object. (Deprecated.)""" + NON_NULLABLE_FIELD_ADDED_TO_INPUT_OBJECT + """Required field was added to the input object.""" + REQUIRED_FIELD_ADDED_TO_INPUT_OBJECT + """Return type for the field was changed.""" + FIELD_CHANGED_TYPE + """Type of the field in the input object was changed.""" + FIELD_ON_INPUT_OBJECT_CHANGED_TYPE + """ + Type was changed from one kind to another. + Ex: scalar to object or enum to union. + """ + TYPE_CHANGED_KIND + """Type of the argument was changed.""" + ARG_CHANGED_TYPE + """Argument was changed from nullable to non-nullable.""" + ARG_CHANGED_TYPE_OPTIONAL_TO_REQUIRED + """A new value was added to the enum.""" + VALUE_ADDED_TO_ENUM + """A new value was added to the enum.""" + TYPE_ADDED_TO_UNION + """Type now implements the interface.""" + TYPE_ADDED_TO_INTERFACE + """Default value added or changed for the argument.""" + ARG_DEFAULT_VALUE_CHANGE + """Nullable argument was added to the field.""" + OPTIONAL_ARG_ADDED + """Nullable field was added to the input type. (Deprecated.)""" + NULLABLE_FIELD_ADDED_TO_INPUT_OBJECT + """Optional field was added to the input type.""" + OPTIONAL_FIELD_ADDED_TO_INPUT_OBJECT + """Field was added to the type.""" + FIELD_ADDED + """Type was added to the schema.""" + TYPE_ADDED + """Enum was deprecated.""" + ENUM_DEPRECATED + """Enum deprecation was removed.""" + ENUM_DEPRECATION_REMOVED + """Reason for enum deprecation changed.""" + ENUM_DEPRECATED_REASON_CHANGE + """Field was deprecated.""" + FIELD_DEPRECATED + """Field deprecation removed.""" + FIELD_DEPRECATION_REMOVED + """Reason for field deprecation changed.""" + FIELD_DEPRECATED_REASON_CHANGE + """Description was added, removed, or updated for type.""" + TYPE_DESCRIPTION_CHANGE + """Description was added, removed, or updated for field.""" + FIELD_DESCRIPTION_CHANGE + """Description was added, removed, or updated for enum value.""" + ENUM_VALUE_DESCRIPTION_CHANGE + """Description was added, removed, or updated for argument.""" + ARG_DESCRIPTION_CHANGE + """Directive was removed.""" + DIRECTIVE_REMOVED + """Argument to the directive was removed.""" + DIRECTIVE_ARG_REMOVED + """Location of the directive was removed.""" + DIRECTIVE_LOCATION_REMOVED + """Repeatable flag was removed for directive.""" + DIRECTIVE_REPEATABLE_REMOVED + """Non-nullable argument added to directive.""" + REQUIRED_DIRECTIVE_ARG_ADDED +} + +""" +Represents the tuple of static information +about a particular kind of schema change. +""" +type ChangeDefinition { + code: ChangeCode! + defaultSeverity: ChangeSeverity! + category: ChangeCategory! +} + +"""An addition made to a Studio variant's changelog after a launch.""" +type ChangelogLaunchResult { + createdAt: Timestamp! + schemaTagID: ID! +} + +"""Info about a change in the context of an operation it affects""" +type ChangeOnOperation { + """The semantic info about this change, i.e. info about the change that doesn't depend on the operation""" + semanticChange: SemanticChange! + """Human-readable explanation of the impact of this change on the operation""" + impact: String +} + +enum ChangeSeverity { + FAILURE + NOTICE +} + +""" +Summary of the changes for a schema diff, computed by placing the changes into categories and then counting the size of each category. This categorization can be done in different ways, and accordingly there are multiple fields here for each type of categorization. @@ -22,54 +321,1130 @@ Note that if an object or interface field is added/removed, there won't be any a changes generated for its arguments or @deprecated usages. If an enum type is added/removed, there will be addition/removal changes generated for its values, but not for those values' @deprecated usages. Description changes won't be generated for a schema element if that element (or an -ancestor) was added/removed.""" type ChangeSummary{"""Counts for changes to fields of objects, input objects, and interfaces.""" field:FieldChangeSummaryCounts!"""Counts for all changes.""" total:TotalChangeSummaryCounts!"""Counts for changes to non-field aspects of objects, input objects, and interfaces, -and all aspects of enums, unions, and scalars.""" type:TypeChangeSummaryCounts!}enum ChangeType{FAILURE NOTICE}type ChangelogLaunchResult{createdAt:Timestamp!schemaTagID:ID!}"""Destination for notifications""" interface Channel{id:ID!name:String!subscriptions:[ChannelSubscription!]!}interface ChannelSubscription{channels:[Channel!]!enabled:Boolean!id:ID!variant:String}type CheckConfiguration{"""Time when check configuration was created""" createdAt:Timestamp!"""Clients to ignore during validation""" excludedClients:[ClientFilter!]!"""Operation names to ignore during validation""" excludedOperationNames:[OperationNameFilter]"""Operations to ignore during validation""" excludedOperations:[ExcludedOperation!]!"""Graph that this check configuration belongs to""" graphID:ID!"""ID of the check configuration""" id:ID!"""Default configuration to include operations on the base variant.""" includeBaseVariant:Boolean!"""Variant overrides for validation""" includedVariants:[String!]!"""Minimum number of requests within the window for an operation to be considered.""" operationCountThreshold:Int!"""Number of requests within the window for an operation to be considered, relative to -total request count. Expected values are between 0 and 0.05 (minimum 5% of -total request volume)""" operationCountThresholdPercentage:Float!"""Only check operations from the last seconds. -The default is 7 days (604,800 seconds).""" timeRangeSeconds:Long!"""Time when check configuration was last updated""" updatedAt:Timestamp!"""Identity of the last user to update the check configuration""" updatedBy:Identity}"""Filter options available when listing checks.""" input CheckFilterInput{authors:[String!]branches:[String!]status:CheckFilterInputStatusOption subgraphs:[String!]}"""Options for filtering CheckWorkflows by status""" enum CheckFilterInputStatusOption{FAILED PASSED PENDING}"""The result of performing a subgraph check, including all steps.""" type CheckPartialSchemaResult{"""Overall result of the check. This will be null if composition validation was unsuccessful.""" checkSchemaResult:CheckSchemaResult """Result of compostion run as part of the overall subgraph check.""" compositionValidationResult:CompositionValidationResult!"""Whether any modifications were detected in the composed core schema.""" coreSchemaModified:Boolean!"""Check workflow associated with the overall subgraph check.""" workflow:CheckWorkflow}type CheckSchemaResult{"""Schema diff and affected operations generated by the schema check""" diffToPrevious:SchemaDiff!"""ID of the operations check that was created""" operationsCheckID:ID!"""Generated url to view schema diff in Engine""" targetUrl:String """Workflow associated with this check result""" workflow:CheckWorkflow}type CheckWorkflow{"""The variant provided as a base to check against. Only the differences from the -base schema will be tested in operations checks.""" baseVariant:GraphVariant completedAt:Timestamp createdAt:Timestamp!"""Contextual parameters supplied by the runtime environment where the check was run.""" gitContext:GitContext id:ID!"""The name of the implementing service that was responsible for triggering the validation.""" implementingServiceName:String """If this check is triggered for an sdl fetched using introspection, this is the endpoint where that schema was being served.""" introspectionEndpoint:String """Only true if the check was triggered from Sandbox Checks page.""" isSandboxCheck:Boolean!"""If this check was created by rerunning, the original check that was rerun.""" rerunOf:CheckWorkflow """Checks created by re-running this check, most recent first.""" reruns(limit:Int!=20):[CheckWorkflow!]startedAt:Timestamp """Overall status of the workflow, based on the underlying task statuses.""" status:CheckWorkflowStatus!"""The set of check tasks associated with this workflow, e.g. OperationsCheck, GraphComposition, etc.""" tasks:[CheckWorkflowTask!]!"""Identity of the user who ran this check""" triggeredBy:Identity """Configuration of validation at the time the check was run.""" validationConfig:SchemaDiffValidationConfig}type CheckWorkflowMutation{"""Re-run a check workflow using the current configuration. A new workflow is created and returned.""" rerun:CheckWorkflowRerunResult}type CheckWorkflowRerunResult{"""Check workflow created by re-running.""" result:CheckWorkflow """Check workflow that was rerun.""" source:CheckWorkflow}enum CheckWorkflowStatus{FAILED PASSED PENDING}interface CheckWorkflowTask{completedAt:Timestamp createdAt:Timestamp!id:ID!status:CheckWorkflowTaskStatus!"""The workflow that this task belongs to.""" workflow:CheckWorkflow!}enum CheckWorkflowTaskStatus{BLOCKED FAILED PASSED PENDING}"""Client filter configuration for a graph.""" type ClientFilter{"""name of the client set by the user and reported alongside metrics""" name:String """version of the client set by the user and reported alongside metrics""" version:String}"""Options to filter by client reference ID, client name, and client version. -If passing client version, make sure to either provide a client reference ID or client name.""" input ClientFilterInput{"""name of the client set by the user and reported alongside metrics""" name:String """version of the client set by the user and reported alongside metrics""" version:String}"""Filter options to exclude by client reference ID, client name, and client version.""" input ClientInfoFilter{name:String """Ignored""" referenceID:ID version:String}"""Filter options to exclude clients. Used as an output type for SchemaDiffValidationConfig.""" type ClientInfoFilterOutput{name:String version:String}enum ComparisonOperator{EQUALS GREATER_THAN GREATER_THAN_OR_EQUAL_TO LESS_THAN LESS_THAN_OR_EQUAL_TO NOT_EQUALS UNRECOGNIZED}"""The result of composition run in the cloud, upon an attempted subgraph deletion.""" type CompositionAndRemoveResult{"""The produced composition config. Will be null if there are any errors""" compositionConfig:CompositionConfig """Whether the removed implementing service existed.""" didExist:Boolean!""" List of errors during composition. Errors mean that Apollo was unable to compose the - graph variant's subgraphs into a GraphQL schema. If present, gateways / routers -are not updated.""" errors:[SchemaCompositionError]!"""ID that points to the results of composition.""" graphCompositionID:String!"""List of subgraphs that are included in this composition.""" subgraphConfigs:[SubgraphConfig!]!"""Whether the gateway/router was updated via Uplink, or would have been for dry runs.""" updatedGateway:Boolean!}"""The result of composition run in the cloud, upon attempted publish of a subgraph.""" type CompositionAndUpsertResult{"""The produced composition config, or null if there are any errors.""" compositionConfig:CompositionConfig """List of errors during composition. Errors mean that Apollo was unable to compose the -graph variant's subgraphs into a supergraph schema. If present, gateways / routers -are not updated.""" errors:[SchemaCompositionError]!"""ID that points to the results of composition.""" graphCompositionID:String!"""Copy text for the launch result of a publish.""" launchCliCopy:String """Link to corresponding launches page on Studio if available.""" launchUrl:String """List of subgraphs that are included in this composition.""" subgraphConfigs:[SubgraphConfig!]!"""Whether the gateway/router was updated via Uplink, or would have been for dry runs.""" updatedGateway:Boolean!"""Whether a subgraph was created as part of this mutation.""" wasCreated:Boolean!"""Whether an implementingService was updated as part of this mutation""" wasUpdated:Boolean!}type CompositionBuildInput{subgraphs:[Subgraph!]!version:String}type CompositionCheckTask implements CheckWorkflowTask{completedAt:Timestamp createdAt:Timestamp!id:ID!"""The result of the composition.""" result:CompositionResult status:CheckWorkflowTaskStatus!workflow:CheckWorkflow!}"""Composition configuration exposed to the gateway.""" type CompositionConfig{"""List of GCS links for implementing services that comprise a composed graph. Is empty if tag/inaccessible is enabled.""" implementingServiceLocations:[ImplementingServiceLocation!]!@deprecated(reason:"Soon we will stop writing to GCS locations")"""Hash of the API schema.""" schemaHash:String!}"""The result of composition run in the cloud.""" type CompositionPublishResult implements CompositionResult{"""The produced composition config. Will be null if there are any errors""" compositionConfig:CompositionConfig """Supergraph SDL generated by composition (this is not the CSDL, that is a deprecated format).""" csdl:GraphQLDocument@deprecated(reason:"Use supergraphSdl instead")"""List of errors during composition. Errors mean that Apollo was unable to compose the -graph variant's subgraphs into a supergraph schema. If present, gateways / routers -are not updated.""" errors:[SchemaCompositionError!]!"""ID for a particular composition.""" graphCompositionID:ID!"""List of subgraphs that are included in this composition.""" subgraphConfigs:[SubgraphConfig!]!"""Supergraph SDL generated by composition.""" supergraphSdl:GraphQLDocument """Whether the gateway/router was updated via Uplink, or would have been for dry runs.""" updatedGateway:Boolean!webhookNotificationBody:String}"""Result of a composition, often as the result of a subgraph check or subgraph publish. -See implementations for more details.""" interface CompositionResult{"""Supergraph SDL generated by composition (this is not the cSDL, a deprecated format).""" csdl:GraphQLDocument@deprecated(reason:"Use supergraphSdl instead")"""List of errors during composition. Errors mean that Apollo was unable to compose the -graph variant's subgraphs into a supergraph schema. If present, gateways / routers -are not updated.""" errors:[SchemaCompositionError!]!"""Globally unique identifier for the composition.""" graphCompositionID:ID!"""List of subgraphs included in this composition.""" subgraphConfigs:[SubgraphConfig!]!"""Supergraph SDL generated by composition.""" supergraphSdl:GraphQLDocument}type CompositionStatusSubscription implements ChannelSubscription{channels:[Channel!]!createdAt:Timestamp!enabled:Boolean!id:ID!lastUpdatedAt:Timestamp!variant:String}"""The composition config exposed to the gateway""" type CompositionValidationDetails{"""List of implementing service partial schemas that comprised the graph composed during validation""" implementingServices:[FederatedImplementingServicePartialSchema!]!"""Hash of the composed schema""" schemaHash:String}"""Metadata about the result of compositions validation run in the cloud, during a subgraph check.""" type CompositionValidationResult implements CompositionResult{"""Describes whether composition succeeded.""" compositionSuccess:Boolean!"""Akin to a composition config, represents the subgraph schemas and corresponding subgraphs that were used -in running composition. Will be null if any errors are encountered. Also may contain a schema hash if -one could be computed, which can be used for schema validation.""" compositionValidationDetails:CompositionValidationDetails """Supergraph SDL generated by composition (this is not the CSDL, that is a deprecated format).""" csdl:GraphQLDocument@deprecated(reason:"Use supergraphSdl instead")"""List of errors during composition. Errors mean that Apollo was unable to compose the -graph variant's subgraphs into a supergraph schema. If present, gateways / routers -are not updated.""" errors:[SchemaCompositionError!]!"""ID that points to the results of this composition.""" graphCompositionID:ID!"""The implementing service that was responsible for triggering the validation""" proposedImplementingService:FederatedImplementingServicePartialSchema!"""List of subgraphs that are included in this composition.""" subgraphConfigs:[SubgraphConfig!]!"""Supergraph schema document generated by composition.""" supergraphSdl:GraphQLDocument """If created as part of a check workflow, the associated workflow task.""" workflowTask:CompositionCheckTask}type ContractPreview{result:ContractPreviewResult!upstreamLaunch:Launch!}type ContractPreviewErrors{errors:[String!]!failedAt:ContractVariantFailedStep!}union ContractPreviewResult=ContractPreviewErrors|ContractPreviewSuccess type ContractPreviewSuccess{apiDocument:String!coreDocument:String!fieldCount:Int!typeCount:Int!}enum ContractVariantFailedStep{ADD_DIRECTIVE_DEFINITIONS_IF_NOT_PRESENT DIRECTIVE_DEFINITION_LOCATION_AUGMENTING EMPTY_ENUM_MASKING EMPTY_INPUT_OBJECT_MASKING EMPTY_OBJECT_AND_INTERFACE_FIELD_MASKING EMPTY_OBJECT_AND_INTERFACE_MASKING EMPTY_UNION_MASKING INPUT_VALIDATION PARSING PARSING_TAG_DIRECTIVES PARTIAL_INTERFACE_MASKING SCHEMA_RETRIEVAL TAG_INHERITING TAG_MATCHING TO_API_SCHEMA TO_FILTER_SCHEMA UNKNOWN VERSION_CHECK}type ContractVariantPreviewErrors{errorMessages:[String!]!failedStep:ContractVariantFailedStep!}union ContractVariantPreviewResult=ContractVariantPreviewErrors|ContractVariantPreviewSuccess type ContractVariantPreviewSuccess{baseApiSchema:String!baseCoreSchema:String!contractApiSchema:String!contractCoreSchema:String!}type ContractVariantUpsertErrors{errorMessages:[String!]!}union ContractVariantUpsertResult=ContractVariantUpsertErrors|ContractVariantUpsertSuccess type ContractVariantUpsertSuccess{contractVariant:GraphVariant!}type CoreSchema{apiDocument:GraphQLDocument!coreDocument:GraphQLDocument!coreHash:String!fieldCount:Int!tags:[String!]!typeCount:Int!}union CreateOperationCollectionResult=OperationCollection|PermissionError|ValidationError type CronExecution{completedAt:Timestamp failure:String id:ID!job:CronJob!resolvedAt:Timestamp resolvedBy:Actor schedule:String!startedAt:Timestamp!}type CronJob{group:String!name:String!recentExecutions(n:Int):[CronExecution!]!}enum DatadogApiRegion{EU EU1 US US1 US1FED US3 US5}type DatadogMetricsConfig{apiKey:String!apiRegion:DatadogApiRegion!enabled:Boolean!legacyMetricNames:Boolean!}union DeleteOperationCollectionResult=DeleteOperationCollectionSuccess|PermissionError type DeleteOperationCollectionSuccess{sandboxOwner:User variants:[GraphVariant!]!}"""The result of attempting to delete a graph variant.""" type DeleteSchemaTagResult{"""WHether a variant was deleted or not.""" deleted:Boolean!}enum DeletionTargetType{ACCOUNT USER}"""Support for a single directive on a graph variant""" type DirectiveSupportStatus{"""whether the directive is supported on the current graph variant""" enabled:Boolean!"""name of the directive""" name:String!}union DuplicateOperationCollectionResult=OperationCollection|PermissionError|ValidationError type DurationHistogram{averageDurationMs:Float buckets:[DurationHistogramBucket!]!durationMs("""Percentile (between 0 and 1)""" percentile:Float!):Float """Counts per durationBucket, where sequences of zeroes are replaced with the negative of their size""" sparseBuckets:[Long!]!totalCount:Long!totalDurationMs:Float!}type DurationHistogramBucket{count:Long!index:Int!rangeBeginMs:Float!rangeEndMs:Float!}input EdgeServerInfo{"""A randomly generated UUID, immutable for the lifetime of the edge server runtime.""" bootId:String!"""A unique identifier for the executable GraphQL served by the edge server. length must be <= 64 characters.""" executableSchemaId:String!"""The graph variant, defaults to 'current'""" graphVariant:String!="current" """The version of the edge server reporting agent, e.g. apollo-server-2.8, graphql-java-3.1, etc. length must be <= 256 characters.""" libraryVersion:String """The infra environment in which this edge server is running, e.g. localhost, Kubernetes, AWS Lambda, Google CloudRun, AWS ECS, etc. length must be <= 256 characters.""" platform:String """The runtime in which the edge server is running, e.g. node 12.03, zulu8.46.0.19-ca-jdk8.0.252-macosx_x64, etc. length must be <= 256 characters.""" runtimeVersion:String """If available, an identifier for the edge server instance, such that when restarting this instance it will have the same serverId, with a different bootId. For example, in Kubernetes this might be the pod name. Length must be <= 256 characters.""" serverId:String """An identifier used to distinguish the version (from the user's perspective) of the edge server's code itself. For instance, the git sha of the server's repository or the docker sha of the associated image this server runs with. Length must be <= 256 characters.""" userVersion:String}"""Columns of EdgeServerInfos.""" enum EdgeServerInfosColumn{BOOT_ID EXECUTABLE_SCHEMA_ID LIBRARY_VERSION PLATFORM RUNTIME_VERSION SCHEMA_TAG SERVER_ID SERVICE_ID TIMESTAMP USER_VERSION}type EdgeServerInfosDimensions{bootId:ID executableSchemaId:ID libraryVersion:String platform:String runtimeVersion:String schemaTag:String serverId:ID serviceId:ID userVersion:String}"""Filter for data in EdgeServerInfos. Fields with dimension names represent equality checks. All fields are implicitly ANDed together.""" input EdgeServerInfosFilter{and:[EdgeServerInfosFilter!]"""Selects rows whose bootId dimension equals the given value if not null. To query for the null value, use {in: {bootId: [null]}} instead.""" bootId:ID """Selects rows whose executableSchemaId dimension equals the given value if not null. To query for the null value, use {in: {executableSchemaId: [null]}} instead.""" executableSchemaId:ID in:EdgeServerInfosFilterIn """Selects rows whose libraryVersion dimension equals the given value if not null. To query for the null value, use {in: {libraryVersion: [null]}} instead.""" libraryVersion:String not:EdgeServerInfosFilter or:[EdgeServerInfosFilter!]"""Selects rows whose platform dimension equals the given value if not null. To query for the null value, use {in: {platform: [null]}} instead.""" platform:String """Selects rows whose runtimeVersion dimension equals the given value if not null. To query for the null value, use {in: {runtimeVersion: [null]}} instead.""" runtimeVersion:String """Selects rows whose schemaTag dimension equals the given value if not null. To query for the null value, use {in: {schemaTag: [null]}} instead.""" schemaTag:String """Selects rows whose serverId dimension equals the given value if not null. To query for the null value, use {in: {serverId: [null]}} instead.""" serverId:ID """Selects rows whose serviceId dimension equals the given value if not null. To query for the null value, use {in: {serviceId: [null]}} instead.""" serviceId:ID """Selects rows whose userVersion dimension equals the given value if not null. To query for the null value, use {in: {userVersion: [null]}} instead.""" userVersion:String}"""Filter for data in EdgeServerInfos. Fields match if the corresponding dimension's value is in the given list. All fields are implicitly ANDed together.""" input EdgeServerInfosFilterIn{"""Selects rows whose bootId dimension is in the given list. A null value in the list means a row with null for that dimension.""" bootId:[ID]"""Selects rows whose executableSchemaId dimension is in the given list. A null value in the list means a row with null for that dimension.""" executableSchemaId:[ID]"""Selects rows whose libraryVersion dimension is in the given list. A null value in the list means a row with null for that dimension.""" libraryVersion:[String]"""Selects rows whose platform dimension is in the given list. A null value in the list means a row with null for that dimension.""" platform:[String]"""Selects rows whose runtimeVersion dimension is in the given list. A null value in the list means a row with null for that dimension.""" runtimeVersion:[String]"""Selects rows whose schemaTag dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaTag:[String]"""Selects rows whose serverId dimension is in the given list. A null value in the list means a row with null for that dimension.""" serverId:[ID]"""Selects rows whose serviceId dimension is in the given list. A null value in the list means a row with null for that dimension.""" serviceId:[ID]"""Selects rows whose userVersion dimension is in the given list. A null value in the list means a row with null for that dimension.""" userVersion:[String]}input EdgeServerInfosOrderBySpec{column:EdgeServerInfosColumn!direction:Ordering!}type EdgeServerInfosRecord{"""Dimensions of EdgeServerInfos that can be grouped by.""" groupBy:EdgeServerInfosDimensions!"""Starting segment timestamp.""" timestamp:Timestamp!}enum EmailCategory{EDUCATIONAL}type EmailPreferences{email:String!subscriptions:[EmailCategory!]!unsubscribedFromAll:Boolean!}interface Error{message:String!}"""Columns of ErrorStats.""" enum ErrorStatsColumn{ACCOUNT_ID CLIENT_NAME CLIENT_VERSION ERRORS_COUNT PATH QUERY_ID QUERY_NAME REQUESTS_WITH_ERRORS_COUNT SCHEMA_HASH SCHEMA_TAG SERVICE_ID TIMESTAMP}type ErrorStatsDimensions{accountId:ID clientName:String clientVersion:String path:String queryId:ID queryName:String schemaHash:String schemaTag:String serviceId:ID}"""Filter for data in ErrorStats. Fields with dimension names represent equality checks. All fields are implicitly ANDed together.""" input ErrorStatsFilter{"""Selects rows whose accountId dimension equals the given value if not null. To query for the null value, use {in: {accountId: [null]}} instead.""" accountId:ID and:[ErrorStatsFilter!]"""Selects rows whose clientName dimension equals the given value if not null. To query for the null value, use {in: {clientName: [null]}} instead.""" clientName:String """Selects rows whose clientVersion dimension equals the given value if not null. To query for the null value, use {in: {clientVersion: [null]}} instead.""" clientVersion:String in:ErrorStatsFilterIn not:ErrorStatsFilter or:[ErrorStatsFilter!]"""Selects rows whose path dimension equals the given value if not null. To query for the null value, use {in: {path: [null]}} instead.""" path:String """Selects rows whose queryId dimension equals the given value if not null. To query for the null value, use {in: {queryId: [null]}} instead.""" queryId:ID """Selects rows whose queryName dimension equals the given value if not null. To query for the null value, use {in: {queryName: [null]}} instead.""" queryName:String """Selects rows whose schemaHash dimension equals the given value if not null. To query for the null value, use {in: {schemaHash: [null]}} instead.""" schemaHash:String """Selects rows whose schemaTag dimension equals the given value if not null. To query for the null value, use {in: {schemaTag: [null]}} instead.""" schemaTag:String """Selects rows whose serviceId dimension equals the given value if not null. To query for the null value, use {in: {serviceId: [null]}} instead.""" serviceId:ID}"""Filter for data in ErrorStats. Fields match if the corresponding dimension's value is in the given list. All fields are implicitly ANDed together.""" input ErrorStatsFilterIn{"""Selects rows whose accountId dimension is in the given list. A null value in the list means a row with null for that dimension.""" accountId:[ID]"""Selects rows whose clientName dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientName:[String]"""Selects rows whose clientVersion dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientVersion:[String]"""Selects rows whose path dimension is in the given list. A null value in the list means a row with null for that dimension.""" path:[String]"""Selects rows whose queryId dimension is in the given list. A null value in the list means a row with null for that dimension.""" queryId:[ID]"""Selects rows whose queryName dimension is in the given list. A null value in the list means a row with null for that dimension.""" queryName:[String]"""Selects rows whose schemaHash dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaHash:[String]"""Selects rows whose schemaTag dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaTag:[String]"""Selects rows whose serviceId dimension is in the given list. A null value in the list means a row with null for that dimension.""" serviceId:[ID]}type ErrorStatsMetrics{errorsCount:Long!requestsWithErrorsCount:Long!}input ErrorStatsOrderBySpec{column:ErrorStatsColumn!direction:Ordering!}type ErrorStatsRecord{"""Dimensions of ErrorStats that can be grouped by.""" groupBy:ErrorStatsDimensions!"""Metrics of ErrorStats that can be aggregated over.""" metrics:ErrorStatsMetrics!"""Starting segment timestamp.""" timestamp:Timestamp!}""" Input parameters for run explorer operation event.""" enum EventEnum{CLICK_CHECK_LIST CLICK_GO_TO_GRAPH_SETTINGS RUN_EXPLORER_OPERATION}"""Excluded operation for a graph.""" type ExcludedOperation{"""Operation ID to exclude from schema check.""" ID:ID!}"""Option to filter by operation ID.""" input ExcludedOperationInput{"""Operation ID to exclude from schema check.""" ID:ID!}type FeatureIntros{devGraph:Boolean!federatedGraph:Boolean!freeConsumerSeats:Boolean!}"""Feature Intros Input Type""" input FeatureIntrosInput{devGraph:Boolean federatedGraph:Boolean freeConsumerSeats:Boolean}"""Subgraph. Federated graph variants that are managed by Apollo Studio are composed of subgraphs. -See https://www.apollographql.com/docs/federation/managed-federation/overview/ for more information.""" type FederatedImplementingService{"""The subgraph schema actively published, used for composition for the graph variant this subgraph belongs to.""" activePartialSchema:PartialSchema!"""Timestamp of when this subgraph was created.""" createdAt:Timestamp!"""The ID of the graph this subgraph belongs to.""" graphID:String!"""Which variant of a graph this subgraph belongs to.""" graphVariant:String!"""Name of the subgraph.""" name:String!"""The particular version/edition of a subgraph, entered by users. Typically a Git SHA or docker image ID.""" revision:String!"""Timestamp for when this subgraph was updated.""" updatedAt:Timestamp!"""URL of the subgraph's GraphQL endpoint.""" url:String}"""A minimal representation of a federated implementing service, using only a name and partial schema SDL""" type FederatedImplementingServicePartialSchema{"""The name of the implementing service""" name:String!"""The partial schema of the implementing service""" sdl:String!}"""Container for a list of subgraphs composing a graph.""" type FederatedImplementingServices{"""The list of underlying subgraphs.""" services:[FederatedImplementingService!]!}"""Counts of changes at the field level, including objects, interfaces, and input fields.""" type FieldChangeSummaryCounts{"""Number of changes that are additions of fields to object, interface, and input types.""" additions:Int!"""Number of changes that are field edits. This includes fields changing type and any field -deprecation and description changes, but also includes any argument changes and any input object -field changes.""" edits:Int!"""Number of changes that are removals of fields from object, interface, and input types.""" removals:Int!}"""Columns of FieldExecutions.""" enum FieldExecutionsColumn{ESTIMATED_EXECUTION_COUNT FIELD_NAME OBSERVED_EXECUTION_COUNT PARENT_TYPE REFERENCING_OPERATION_COUNT SCHEMA_TAG SERVICE_ID TIMESTAMP}type FieldExecutionsDimensions{fieldName:String parentType:String schemaTag:String serviceId:ID}"""Filter for data in FieldExecutions. Fields with dimension names represent equality checks. All fields are implicitly ANDed together.""" input FieldExecutionsFilter{and:[FieldExecutionsFilter!]"""Selects rows whose fieldName dimension equals the given value if not null. To query for the null value, use {in: {fieldName: [null]}} instead.""" fieldName:String in:FieldExecutionsFilterIn not:FieldExecutionsFilter or:[FieldExecutionsFilter!]"""Selects rows whose parentType dimension equals the given value if not null. To query for the null value, use {in: {parentType: [null]}} instead.""" parentType:String """Selects rows whose schemaTag dimension equals the given value if not null. To query for the null value, use {in: {schemaTag: [null]}} instead.""" schemaTag:String """Selects rows whose serviceId dimension equals the given value if not null. To query for the null value, use {in: {serviceId: [null]}} instead.""" serviceId:ID}"""Filter for data in FieldExecutions. Fields match if the corresponding dimension's value is in the given list. All fields are implicitly ANDed together.""" input FieldExecutionsFilterIn{"""Selects rows whose fieldName dimension is in the given list. A null value in the list means a row with null for that dimension.""" fieldName:[String]"""Selects rows whose parentType dimension is in the given list. A null value in the list means a row with null for that dimension.""" parentType:[String]"""Selects rows whose schemaTag dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaTag:[String]"""Selects rows whose serviceId dimension is in the given list. A null value in the list means a row with null for that dimension.""" serviceId:[ID]}type FieldExecutionsMetrics{estimatedExecutionCount:Long!observedExecutionCount:Long!referencingOperationCount:Long!}input FieldExecutionsOrderBySpec{column:FieldExecutionsColumn!direction:Ordering!}type FieldExecutionsRecord{"""Dimensions of FieldExecutions that can be grouped by.""" groupBy:FieldExecutionsDimensions!"""Metrics of FieldExecutions that can be aggregated over.""" metrics:FieldExecutionsMetrics!"""Starting segment timestamp.""" timestamp:Timestamp!}"""Columns of FieldLatencies.""" enum FieldLatenciesColumn{FIELD_HISTOGRAM FIELD_NAME PARENT_TYPE SCHEMA_HASH SCHEMA_TAG SERVICE_ID TIMESTAMP}type FieldLatenciesDimensions{field:String fieldName:String parentType:String schemaHash:String schemaTag:String serviceId:ID}"""Filter for data in FieldLatencies. Fields with dimension names represent equality checks. All fields are implicitly ANDed together.""" input FieldLatenciesFilter{and:[FieldLatenciesFilter!]"""Selects rows whose fieldName dimension equals the given value if not null. To query for the null value, use {in: {fieldName: [null]}} instead.""" fieldName:String in:FieldLatenciesFilterIn not:FieldLatenciesFilter or:[FieldLatenciesFilter!]"""Selects rows whose parentType dimension equals the given value if not null. To query for the null value, use {in: {parentType: [null]}} instead.""" parentType:String """Selects rows whose schemaHash dimension equals the given value if not null. To query for the null value, use {in: {schemaHash: [null]}} instead.""" schemaHash:String """Selects rows whose schemaTag dimension equals the given value if not null. To query for the null value, use {in: {schemaTag: [null]}} instead.""" schemaTag:String """Selects rows whose serviceId dimension equals the given value if not null. To query for the null value, use {in: {serviceId: [null]}} instead.""" serviceId:ID}"""Filter for data in FieldLatencies. Fields match if the corresponding dimension's value is in the given list. All fields are implicitly ANDed together.""" input FieldLatenciesFilterIn{"""Selects rows whose fieldName dimension is in the given list. A null value in the list means a row with null for that dimension.""" fieldName:[String]"""Selects rows whose parentType dimension is in the given list. A null value in the list means a row with null for that dimension.""" parentType:[String]"""Selects rows whose schemaHash dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaHash:[String]"""Selects rows whose schemaTag dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaTag:[String]"""Selects rows whose serviceId dimension is in the given list. A null value in the list means a row with null for that dimension.""" serviceId:[ID]}type FieldLatenciesMetrics{fieldHistogram:DurationHistogram!}input FieldLatenciesOrderBySpec{column:FieldLatenciesColumn!direction:Ordering!}type FieldLatenciesRecord{"""Dimensions of FieldLatencies that can be grouped by.""" groupBy:FieldLatenciesDimensions!"""Metrics of FieldLatencies that can be aggregated over.""" metrics:FieldLatenciesMetrics!"""Starting segment timestamp.""" timestamp:Timestamp!}"""Columns of FieldRequestsByClientVersion.""" enum FieldRequestsByClientVersionColumn{CLIENT_NAME CLIENT_VERSION ESTIMATED_EXECUTION_COUNT FIELD_NAME OBSERVED_EXECUTION_COUNT PARENT_TYPE REFERENCING_OPERATION_COUNT SCHEMA_TAG SERVICE_ID TIMESTAMP}type FieldRequestsByClientVersionDimensions{clientName:String clientVersion:String fieldName:String parentType:String schemaTag:String serviceId:ID}"""Filter for data in FieldRequestsByClientVersion. Fields with dimension names represent equality checks. All fields are implicitly ANDed together.""" input FieldRequestsByClientVersionFilter{and:[FieldRequestsByClientVersionFilter!]"""Selects rows whose clientName dimension equals the given value if not null. To query for the null value, use {in: {clientName: [null]}} instead.""" clientName:String """Selects rows whose clientVersion dimension equals the given value if not null. To query for the null value, use {in: {clientVersion: [null]}} instead.""" clientVersion:String """Selects rows whose fieldName dimension equals the given value if not null. To query for the null value, use {in: {fieldName: [null]}} instead.""" fieldName:String in:FieldRequestsByClientVersionFilterIn not:FieldRequestsByClientVersionFilter or:[FieldRequestsByClientVersionFilter!]"""Selects rows whose parentType dimension equals the given value if not null. To query for the null value, use {in: {parentType: [null]}} instead.""" parentType:String """Selects rows whose schemaTag dimension equals the given value if not null. To query for the null value, use {in: {schemaTag: [null]}} instead.""" schemaTag:String """Selects rows whose serviceId dimension equals the given value if not null. To query for the null value, use {in: {serviceId: [null]}} instead.""" serviceId:ID}"""Filter for data in FieldRequestsByClientVersion. Fields match if the corresponding dimension's value is in the given list. All fields are implicitly ANDed together.""" input FieldRequestsByClientVersionFilterIn{"""Selects rows whose clientName dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientName:[String]"""Selects rows whose clientVersion dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientVersion:[String]"""Selects rows whose fieldName dimension is in the given list. A null value in the list means a row with null for that dimension.""" fieldName:[String]"""Selects rows whose parentType dimension is in the given list. A null value in the list means a row with null for that dimension.""" parentType:[String]"""Selects rows whose schemaTag dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaTag:[String]"""Selects rows whose serviceId dimension is in the given list. A null value in the list means a row with null for that dimension.""" serviceId:[ID]}type FieldRequestsByClientVersionMetrics{estimatedExecutionCount:Long!observedExecutionCount:Long!referencingOperationCount:Long!}input FieldRequestsByClientVersionOrderBySpec{column:FieldRequestsByClientVersionColumn!direction:Ordering!}type FieldRequestsByClientVersionRecord{"""Dimensions of FieldRequestsByClientVersion that can be grouped by.""" groupBy:FieldRequestsByClientVersionDimensions!"""Metrics of FieldRequestsByClientVersion that can be aggregated over.""" metrics:FieldRequestsByClientVersionMetrics!"""Starting segment timestamp.""" timestamp:Timestamp!}"""Columns of FieldUsage.""" enum FieldUsageColumn{CLIENT_NAME CLIENT_VERSION ESTIMATED_EXECUTION_COUNT EXECUTION_COUNT FIELD_NAME PARENT_TYPE QUERY_ID QUERY_NAME REFERENCING_OPERATION_COUNT SCHEMA_HASH SCHEMA_TAG SERVICE_ID TIMESTAMP}type FieldUsageDimensions{clientName:String clientVersion:String fieldName:String parentType:String queryId:ID queryName:String schemaHash:String schemaTag:String serviceId:ID}"""Filter for data in FieldUsage. Fields with dimension names represent equality checks. All fields are implicitly ANDed together.""" input FieldUsageFilter{and:[FieldUsageFilter!]"""Selects rows whose clientName dimension equals the given value if not null. To query for the null value, use {in: {clientName: [null]}} instead.""" clientName:String """Selects rows whose clientVersion dimension equals the given value if not null. To query for the null value, use {in: {clientVersion: [null]}} instead.""" clientVersion:String """Selects rows whose fieldName dimension equals the given value if not null. To query for the null value, use {in: {fieldName: [null]}} instead.""" fieldName:String in:FieldUsageFilterIn not:FieldUsageFilter or:[FieldUsageFilter!]"""Selects rows whose parentType dimension equals the given value if not null. To query for the null value, use {in: {parentType: [null]}} instead.""" parentType:String """Selects rows whose queryId dimension equals the given value if not null. To query for the null value, use {in: {queryId: [null]}} instead.""" queryId:ID """Selects rows whose queryName dimension equals the given value if not null. To query for the null value, use {in: {queryName: [null]}} instead.""" queryName:String """Selects rows whose schemaHash dimension equals the given value if not null. To query for the null value, use {in: {schemaHash: [null]}} instead.""" schemaHash:String """Selects rows whose schemaTag dimension equals the given value if not null. To query for the null value, use {in: {schemaTag: [null]}} instead.""" schemaTag:String """Selects rows whose serviceId dimension equals the given value if not null. To query for the null value, use {in: {serviceId: [null]}} instead.""" serviceId:ID}"""Filter for data in FieldUsage. Fields match if the corresponding dimension's value is in the given list. All fields are implicitly ANDed together.""" input FieldUsageFilterIn{"""Selects rows whose clientName dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientName:[String]"""Selects rows whose clientVersion dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientVersion:[String]"""Selects rows whose fieldName dimension is in the given list. A null value in the list means a row with null for that dimension.""" fieldName:[String]"""Selects rows whose parentType dimension is in the given list. A null value in the list means a row with null for that dimension.""" parentType:[String]"""Selects rows whose queryId dimension is in the given list. A null value in the list means a row with null for that dimension.""" queryId:[ID]"""Selects rows whose queryName dimension is in the given list. A null value in the list means a row with null for that dimension.""" queryName:[String]"""Selects rows whose schemaHash dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaHash:[String]"""Selects rows whose schemaTag dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaTag:[String]"""Selects rows whose serviceId dimension is in the given list. A null value in the list means a row with null for that dimension.""" serviceId:[ID]}type FieldUsageMetrics{estimatedExecutionCount:Long!executionCount:Long!referencingOperationCount:Long!}input FieldUsageOrderBySpec{column:FieldUsageColumn!direction:Ordering!}type FieldUsageRecord{"""Dimensions of FieldUsage that can be grouped by.""" groupBy:FieldUsageDimensions!"""Metrics of FieldUsage that can be aggregated over.""" metrics:FieldUsageMetrics!"""Starting segment timestamp.""" timestamp:Timestamp!}type FilterBuildInput{filterConfig:FilterConfig!schemaHash:String!}type FilterConfig{exclude:[String!]!include:[String!]!}input FilterConfigInput{exclude:[String!]!include:[String!]!}type GitContext{branch:String commit:ID commitUrl:String committer:String message:String remoteHost:GitRemoteHost remoteUrl:String}"""This is stored with a schema when it is uploaded""" input GitContextInput{branch:String commit:ID committer:String message:String remoteUrl:String}enum GitRemoteHost{BITBUCKET GITHUB GITLAB}type GlobalExperimentalFeatures{operationsCollections:Boolean!sandboxesFullRelease:Boolean!sandboxesPreview:Boolean!sandboxesSchemaChecksPage:Boolean!sandboxesSchemaDiffPage:Boolean!subgraphsInSandbox:Boolean!}type GraphApiKey implements ApiKey{createdAt:Timestamp!createdBy:Identity id:ID!keyName:String role:UserPermission!token:String!}"""A union of all combinations that can comprise the implementingServices for a Service""" union GraphImplementors=FederatedImplementingServices|NonFederatedImplementingService scalar GraphQLDocument """A variant of a graph, often corresponding to an environment where a graph runs (e.g. staging). -See https://www.apollographql.com/docs/studio/org/graphs/ for more details.""" type GraphVariant{"""As new schema tags keep getting published, activeSchemaPublish refers to the latest.""" activeSchemaPublish:SchemaTag """The version of composition currently in use, if applicable""" compositionVersion:String """Filter configuration used to create the contract schema""" contractFilterConfig:FilterConfig """Preview a Contract schema built from this source variant.""" contractPreview(filters:FilterConfigInput!):ContractPreview!defaultHeaders:String@deprecated(reason:"Use sharedHeaders instead")derivedVariantCount:Int!"""Graph the variant belongs to.""" graph:Service!"""Graph ID of the variant. Prefer using graph { id } when feasible.""" graphId:String!"""If the variant has managed subgraphs.""" hasManagedSubgraphs:Boolean """Global identifier for the graph variant, in the form `graph@variant`.""" id:ID!"""Represents whether this variant is a Contract.""" isContract:Boolean!"""Is this variant one of the current user's favorite variants?""" isFavoriteOfCurrentUser:Boolean!"""If the variant has managed subgraphs.""" isFederated:Boolean@deprecated(reason:"Replaced by hasManagedSubgraphs")"""If the variant is protected""" isProtected:Boolean!isPublic:Boolean!"""Represents whether this variant should be listed in the public variants directory. This can only be true if the variant is also public.""" isPubliclyListed:Boolean!"""Represents whether Apollo has verified the authenticity of this public variant. This can only be true if the variant is also public.""" isVerified:Boolean!"""Latest approved launch for the variant, and what is served through Uplink.""" latestApprovedLaunch:Launch """Latest launch for the variant, whether successful or not.""" latestLaunch:Launch """Latest publication for the variant.""" latestPublication:SchemaTag launch(id:ID!):Launch launchHistory(limit:Int!=100):[Launch!]!links:[LinkInfo!]"""Name of the variant, like `variant`.""" name:String!operationCollections:[OperationCollection!]!"""Which permissions the current user has for interacting with this variant""" permissions:GraphVariantPermissions!"""Generate a federated operation plan for a given operation""" plan(document:GraphQLDocument!operationName:String):QueryPlan """Explorer setting for preflight script to run before the actual GraphQL operations is run.""" preflightScript:String readme:Readme """Registry stats for this particular graph variant""" registryStatsWindow(from:Timestamp!resolution:Resolution to:Timestamp):RegistryStatsWindow """The total number of requests for this variant in the last 24 hours""" requestsInLastDay:Long """If the graphql endpoint is set up to accept cookies.""" sendCookies:Boolean """Explorer setting for shared headers for a graph""" sharedHeaders:String sourceVariant:GraphVariant """Subgraph of a given name, null if non-existent.""" subgraph(name:ID!):FederatedImplementingService """List of subgraphs that comprise a variant, null if not federated. -Set includeDeleted to see deleted subgraphs.""" subgraphs(includeDeleted:Boolean!=false):[FederatedImplementingService!]"""URL where subscription operations can be executed.""" subscriptionUrl:String """A list of supported directives""" supportedDirectives:[DirectiveSupportStatus!]"""URL where non-subscription operations can be executed.""" url:String """The last instant that usage information (e.g. operation stat, client stats) was reported for this variant""" usageLastReportedAt:Timestamp}"""Result of looking up a variant by ref""" union GraphVariantLookup=GraphVariant|InvalidRefFormat """Modifies a variant of a graph, also called a schema tag in parts of our product.""" type GraphVariantMutation{addLinkToVariant(title:String type:LinkInfoType!url:String!):GraphVariant!configureComposition(enableTagAndInaccessible:Boolean version:String):GraphVariant """Delete the variant.""" delete:DeleteSchemaTagResult!enableTagAndInaccessible(enabled:Boolean!):GraphVariant@deprecated(reason:"Use configureComposition instead")"""Graph ID of the variant""" graphId:String!"""Global identifier for the graph variant, in the form `graph@variant`.""" id:ID!"""Name of the variant, like `variant`.""" name:String!relaunch:RelaunchResult!removeLinkFromVariant(linkInfoId:ID!):GraphVariant!setIsFavoriteOfCurrentUser(favorite:Boolean!):GraphVariant!updateDefaultHeaders(defaultHeaders:String):GraphVariant@deprecated(reason:"Use updateSharedHeaders instead")updateIsProtected(isProtected:Boolean!):GraphVariant updatePreflightScript(preflightScript:String):GraphVariant updateSendCookies(sendCookies:Boolean!):GraphVariant updateSharedHeaders(sharedHeaders:String):GraphVariant updateSubscriptionURL(subscriptionUrl:String):GraphVariant updateURL(url:String):GraphVariant updateVariantIsPublic(isPublic:Boolean!):GraphVariant updateVariantIsPubliclyListed(isPubliclyListed:Boolean!):GraphVariant updateVariantIsVerified(isVerified:Boolean!):GraphVariant updateVariantReadme(readme:String!):GraphVariant}"""A map from permission String to boolean that the currently authenticated user is allowed for a particular graph variant.""" type GraphVariantPermissions{canCreateCollectionInVariant:Boolean!"""Whether the currently authenticated user is permitted to manage/update the build configuration (e.g. build pipeline version) for this variant.""" canManageBuildConfig:Boolean!"""Whether the currently authenticated user is permitted to update variant-level settings for the Schema Explorer.""" canManageExplorerSettings:Boolean!"""Whether the currently authenticated user is permitted to publish schemas to this variant.""" canPushSchemas:Boolean!"""Whether the currently authenticated user is permitted to view details regarding the build configuration (e.g. build pipeline version) for this variant.""" canQueryBuildConfig:Boolean!"""Whether the currently authenticated user is permitted to download schemas associated to this variant.""" canQuerySchemas:Boolean!canShareCollectionInVariant:Boolean!canUpdateVariantLinkInfo:Boolean!"""Whether the currently authenticated user is permitted to update the README for this variant.""" canUpdateVariantReadme:Boolean!variantId:ID!}enum HTTPMethod{CONNECT DELETE GET HEAD OPTIONS PATCH POST PUT TRACE UNKNOWN UNRECOGNIZED}input HistoricQueryParameters{"""A list of clients to filter out during validation.""" excludedClients:[ClientInfoFilter!]=null """A list of operation names to filter out during validation.""" excludedOperationNames:[OperationNameFilterInput!]=null from:Timestamp="-86400" """A list of operation IDs to filter out during validation.""" ignoredOperations:[ID!]=null """A list of variants to include in the validation. If no variants are provided -then this defaults to the "current" variant along with the base variant. The -base variant indicates the schema that generates diff and marks the metrics that -are checked for broken queries. We union this base variant with the untagged values('', -same as null inside of `in`, and 'current') in this metrics fetch. This strategy -supports users who have not tagged their metrics or schema.""" includedVariants:[String!]=null """Minimum number of requests within the window for a query to be considered.""" queryCountThreshold:Int=1 """Number of requests within the window for a query to be considered, relative to -total request count. Expected values are between 0 and 0.05 (minimum 5% of total -request volume)""" queryCountThresholdPercentage:Float=0 to:Timestamp="-0"}"""An identity (e.g. Anonymous, a specific User) within Apollo Studio. See implementations.""" interface Identity{"""A view of the identity as an Actor type.""" asActor:Actor!"""An identifier for a given identity, unique within the context of the identity type.""" id:ID!"""A human-readable name for the identity in question.""" name:String!}"""An actor's identity and info about the client they used to perform the action""" type IdentityAndClientInfo{"""Client name provided when the actor performed the action""" clientName:String """Client version provided when the actor performed the action""" clientVersion:String """Identity info about the actor""" identity:Identity}union IdentityMutation=ServiceMutation|UserMutation type IgnoreOperationsInChecksResult{graph:Service!}"""The location of the implementing service config file in storage""" type ImplementingServiceLocation{"""The name of the implementing service""" name:String!"""The path in storage to access the implementing service config file""" path:String!}type InternalAdminUser{role:InternalMdgAdminRole!userID:String!}type InternalIdentity implements Identity{accounts:[Account!]!asActor:Actor!email:String id:ID!name:String!}enum InternalMdgAdminRole{INTERNAL_MDG_READ_ONLY INTERNAL_MDG_SALES INTERNAL_MDG_SUPER_ADMIN INTERNAL_MDG_SUPPORT}type IntrospectionDirective{args:[IntrospectionInputValue!]!description:String locations:[IntrospectionDirectiveLocation!]!name:String!}input IntrospectionDirectiveInput{args:[IntrospectionInputValueInput!]!description:String isRepeatable:Boolean locations:[IntrospectionDirectiveLocation!]!name:String!}"""__DirectiveLocation introspection type""" enum IntrospectionDirectiveLocation{"""Location adjacent to an argument definition.""" ARGUMENT_DEFINITION """Location adjacent to an enum definition.""" ENUM """Location adjacent to an enum value definition.""" ENUM_VALUE """Location adjacent to a field.""" FIELD """Location adjacent to a field definition.""" FIELD_DEFINITION """Location adjacent to a fragment definition.""" FRAGMENT_DEFINITION """Location adjacent to a fragment spread.""" FRAGMENT_SPREAD """Location adjacent to an inline fragment.""" INLINE_FRAGMENT """Location adjacent to an input object field definition.""" INPUT_FIELD_DEFINITION """Location adjacent to an input object type definition.""" INPUT_OBJECT """Location adjacent to an interface definition.""" INTERFACE """Location adjacent to a mutation operation.""" MUTATION """Location adjacent to an object type definition.""" OBJECT """Location adjacent to a query operation.""" QUERY """Location adjacent to a scalar definition.""" SCALAR """Location adjacent to a schema definition.""" SCHEMA """Location adjacent to a subscription operation.""" SUBSCRIPTION """Location adjacent to a union definition.""" UNION """Location adjacent to a variable definition.""" VARIABLE_DEFINITION}"""Values associated with introspection result for an enum value""" type IntrospectionEnumValue{depreactionReason:String@deprecated(reason:"Use deprecationReason instead")deprecationReason:String description:String isDeprecated:Boolean!name:String!}"""__EnumValue introspection type""" input IntrospectionEnumValueInput{deprecationReason:String description:String isDeprecated:Boolean!name:String!}"""Values associated with introspection result for field""" type IntrospectionField{args:[IntrospectionInputValue!]!deprecationReason:String description:String isDeprecated:Boolean!name:String!type:IntrospectionType!}"""__Field introspection type""" input IntrospectionFieldInput{args:[IntrospectionInputValueInput!]!deprecationReason:String description:String isDeprecated:Boolean!name:String!type:IntrospectionTypeInput!}"""Values associated with introspection result for an input field""" type IntrospectionInputValue{defaultValue:String description:String name:String!type:IntrospectionType!}"""__Value introspection type""" input IntrospectionInputValueInput{defaultValue:String deprecationReason:String description:String isDeprecated:Boolean name:String!type:IntrospectionTypeInput!}type IntrospectionSchema{directives:[IntrospectionDirective!]!mutationType:IntrospectionType queryType:IntrospectionType!subscriptionType:IntrospectionType types(filter:TypeFilterConfig={includeAbstractTypes:true includeBuiltInTypes:true includeIntrospectionTypes:true}):[IntrospectionType!]!}"""__Schema introspection type""" input IntrospectionSchemaInput{description:String directives:[IntrospectionDirectiveInput!]!mutationType:IntrospectionTypeRefInput queryType:IntrospectionTypeRefInput!subscriptionType:IntrospectionTypeRefInput types:[IntrospectionTypeInput!]}"""Object containing all possible values for an introspectionType""" type IntrospectionType{"""the base kind of the type this references, ignoring lists and nullability""" baseKind:IntrospectionTypeKind description:String enumValues(includeDeprecated:Boolean=false):[IntrospectionEnumValue!]fields:[IntrospectionField!]inputFields:[IntrospectionInputValue!]interfaces:[IntrospectionType!]kind:IntrospectionTypeKind name:String ofType:IntrospectionType possibleTypes:[IntrospectionType!]"""printed representation of type, including nested nullability and list ofTypes""" printed:String!}"""__Type introspection type""" input IntrospectionTypeInput{description:String enumValues:[IntrospectionEnumValueInput!]fields:[IntrospectionFieldInput!]inputFields:[IntrospectionInputValueInput!]interfaces:[IntrospectionTypeInput!]kind:IntrospectionTypeKind!name:String ofType:IntrospectionTypeInput possibleTypes:[IntrospectionTypeInput!]specifiedByUrl:String}enum IntrospectionTypeKind{"""Indicates this type is an enum. 'enumValues' is a valid field.""" ENUM """Indicates this type is an input object. 'inputFields' is a valid field.""" INPUT_OBJECT """Indicates this type is an interface. 'fields' and 'possibleTypes' are valid -fields""" INTERFACE """Indicates this type is a list. 'ofType' is a valid field.""" LIST """Indicates this type is a non-null. 'ofType' is a valid field.""" NON_NULL """Indicates this type is an object. 'fields' and 'interfaces' are valid fields.""" OBJECT """Indicates this type is a scalar.""" SCALAR """Indicates this type is a union. 'possibleTypes' is a valid field.""" UNION}"""Shallow __Type introspection type""" input IntrospectionTypeRefInput{kind:String name:String!}type InvalidOperation{errors:[OperationValidationError!]signature:ID!}"""Type returned by reference lookup when the reference was invalid""" type InvalidRefFormat implements Error{message:String!}type InvalidTarget implements Error{message:String!}type Invoice{closedAt:Timestamp collectionMethod:String createdAt:Timestamp!invoiceNumber:Int!state:InvoiceState!totalInCents:Int!updatedAt:Timestamp!uuid:ID!}enum InvoiceState{COLLECTED FAILED OPEN PAST_DUE UNKNOWN}enum InvoiceStateV2{COLLECTED FAILED OPEN PAST_DUE UNKNOWN}type InvoiceV2{closedAt:Timestamp collectionMethod:String createdAt:Timestamp!invoiceNumber:Int!state:InvoiceStateV2!totalInCents:Int!updatedAt:Timestamp!uuid:ID!}"""A Launch represents the complete process of making a set of updates to your deployed graph.""" type Launch{"""The time at which this launch was approved.""" approvedAt:Timestamp """The build for the variant being launched. Is non-null once the build is initiated.""" build:Build """Set of items that will be passed to the build.""" buildInput:BuildInput!"""The time at which this launch completed.""" completedAt:Timestamp """The time at which this launch initiated.""" createdAt:Timestamp!"""Contract launches that were triggered by this launch.""" downstreamLaunches:[Launch!]!"""The ID of the graph that this launch was initiated for.""" graphId:String!"""The name of the variant that this launch was initiated for.""" graphVariant:String!"""Unique identifier for this launch.""" id:ID!isAvailable:Boolean """Whether the launch completed.""" isCompleted:Boolean """Whether the launch was published.""" isPublished:Boolean isTarget:Boolean """Returns the most recent launch sequence step.""" latestSequenceStep:LaunchSequenceStep """A specific publication of a graph variant pertaining to this launch.""" publication:SchemaTag """The outcome of the launch.""" results:[LaunchResult!]!schemaTag:SchemaTag """This represents a sequence in the Launch. Returns a list of sequence steps that represents points of time in the launch.""" sequence:[LaunchSequenceStep!]!"""A shortened version of Launch.id. Contains the first 8 characters of the ID.""" shortenedID:String!"""The status of the launch.""" status:LaunchStatus!"""Changes that were made to the subgraphs for this launch.""" subgraphChanges:[SubgraphChange!]"""The time at which this launch was superseded by another launch.""" supersededAt:Timestamp """Represents the launch that caused this launch to not continue/publish.""" supersededBy:Launch """Upstream launch represents the launch of the source variant.""" upstreamLaunch:Launch}"""more result types will be supported in the future""" union LaunchResult=ChangelogLaunchResult type LaunchSequenceBuildStep{completedAt:Timestamp startedAt:Timestamp}type LaunchSequenceCheckStep{completedAt:Timestamp startedAt:Timestamp}type LaunchSequenceCompletedStep{completedAt:Timestamp}type LaunchSequenceInitiatedStep{startedAt:Timestamp}type LaunchSequencePublishStep{completedAt:Timestamp startedAt:Timestamp}union LaunchSequenceStep=LaunchSequenceBuildStep|LaunchSequenceCheckStep|LaunchSequenceCompletedStep|LaunchSequenceInitiatedStep|LaunchSequencePublishStep|LaunchSequenceSupersededStep type LaunchSequenceSupersededStep{completedAt:Timestamp}enum LaunchStatus{LAUNCH_COMPLETED LAUNCH_FAILED LAUNCH_INITIATED}type LinkInfo{createdAt:Timestamp!id:ID!title:String type:LinkInfoType!url:String!}enum LinkInfoType{DEVELOPER_PORTAL OTHER REPOSITORY}"""Long type""" scalar Long type MarkChangesForOperationAsSafeResult{"""Nice to have for the frontend since the Apollo cache is already watching for AffectedQuery to update. -This might return null if no behavior changes were found for the affected operation ID. -This is a weird situation that should never happen.""" affectedOperation:AffectedQuery message:String!success:Boolean!}type MediaUploadInfo{csrfToken:String!maxContentLength:Int!url:String!}union MoveOperationCollectionEntryResult=InvalidTarget|MoveOperationCollectionEntrySuccess|PermissionError type MoveOperationCollectionEntrySuccess{operation:OperationCollectionEntry!originCollection:OperationCollection!targetCollection:OperationCollection!}type Mutation{account(id:ID!):AccountMutation """Creates an operation collection for the given variantRefs, or make a sandbox collection without variantRefs.""" createOperationCollection(description:String editRoles:[UserPermission!]isSandbox:Boolean!isShared:Boolean!name:String!variantRefs:[ID!]):CreateOperationCollectionResult!"""Finalize a password reset with a token included in the E-mail link, -returns the corresponding login email when successful""" finalizePasswordReset(newPassword:String!resetToken:String!):String """Mutation a graph.""" graph(id:ID!):ServiceMutation """Join an account with a token""" joinAccount(accountId:ID!joinToken:String!):Account me:IdentityMutation newAccount(companyUrl:String id:ID!):Account newService(accountId:ID!description:String hiddenFromUninvitedNonAdminAccountMembers:Boolean!=false id:ID!isDev:Boolean!=false name:String onboardingArchitecture:OnboardingArchitecture title:String):Service operationCollection(id:ID!):OperationCollectionMutation """Report a running GraphQL server's schema.""" reportSchema("""Only sent if previously requested i.e. received ReportSchemaResult with withCoreSchema = true. This is a GraphQL schema document as a string. Note that for a GraphQL server with a core schema, this should be the core schema, not the API schema.""" coreSchema:String """Information about server and its schema.""" report:SchemaReport!):ReportSchemaResult """Ask for a user's password to be reset by E-mail""" resetPassword(email:String!):Void resolveAllInternalCronExecutions(group:String name:String):Void resolveInternalCronExecution(id:ID!):CronExecution service(id:ID!):ServiceMutation """Set the subscriptions for a given email""" setSubscriptions(email:String!subscriptions:[EmailCategory!]!token:String!):EmailPreferences """Set the studio settings for the current user""" setUserSettings(newSettings:UserSettingsInput):UserSettings signUp(email:String!fullName:String!password:String!referrer:String trackingGoogleClientId:String trackingMarketoClientId:String userSegment:UserSegment utmCampaign:String utmMedium:String utmSource:String):User """This is called by the form shown to users after they delete their user or organization account.""" submitPostDeletionFeedback(feedback:String!targetIdentifier:ID!targetType:DeletionTargetType!):Void """Mutation for basic engagement tracking in studio""" track(event:EventEnum!graphID:String!graphVariant:String!="current"):Void """Rover session tracking. Reserved to https://rover.apollo.dev/telemetry (https://github.com/apollographql/orbiter).""" trackRoverSession(anonymousId:ID!arguments:[RoverArgumentInput!]!ci:String command:String!cwdHash:SHA256!os:String!remoteUrlHash:SHA256 sessionId:ID!version:String!):Void """Unsubscribe a given email from all emails""" unsubscribeFromAll(email:String!token:String!):EmailPreferences user(id:ID!):UserMutation}type NamedIntrospectionArg{description:String name:String}type NamedIntrospectionArgNoDescription{name:String}"""The shared fields for a named introspection type. Currently this is returned for the +ancestor) was added/removed. +""" +type ChangeSummary { + """ + Counts for changes to non-field aspects of objects, input objects, and interfaces, + and all aspects of enums, unions, and scalars. + """ + type: TypeChangeSummaryCounts! + """Counts for changes to fields of objects, input objects, and interfaces.""" + field: FieldChangeSummaryCounts! + """Counts for all changes.""" + total: TotalChangeSummaryCounts! +} + +enum ChangeType { + FAILURE + NOTICE +} + +"""Filter options available when listing checks.""" +input CheckFilterInput { + authors: [String!] + branches: [String!] + subgraphs: [String!] + status: CheckFilterInputStatusOption + variants: [String!] +} + +"""Options for filtering CheckWorkflows by status""" +enum CheckFilterInputStatusOption { + FAILED + PENDING + PASSED +} + +"""The result of performing a subgraph check, including all steps.""" +type CheckPartialSchemaResult { + """Result of compostion run as part of the overall subgraph check.""" + compositionValidationResult: CompositionCheckResult! + """Overall result of the check. This will be null if composition validation was unsuccessful.""" + checkSchemaResult: CheckSchemaResult + """Whether any modifications were detected in the composed core schema.""" + coreSchemaModified: Boolean! +} + +"""The possible results of a request to initiate schema checks (either a success object or one of multiple `Error` objects).""" +union CheckRequestResult = CheckRequestSuccess | InvalidInputError | PermissionError | PlanError + +"""Represents a successfully initiated execution of schema checks. This does not indicate the _result_ of the checks, only that they were initiated.""" +type CheckRequestSuccess { + """The URL of the Apollo Studio page for this check.""" + targetURL: String! + """The unique ID for this execution of schema checks.""" + workflowID: ID! +} + +"""Input type to provide when running schema checks asynchronously for a non-federated graph.""" +input CheckSchemaAsyncInput { + """Configuration options for the check execution.""" + config: HistoricQueryParametersInput! + """The GitHub context to associate with the check.""" + gitContext: GitContextInput! + graphRef: ID @deprecated(reason: "This field is not required to be sent anymore") + """The URL of the GraphQL endpoint that Apollo Sandbox introspected to obtain the proposed schema. Required if `isSandbox` is `true`.""" + introspectionEndpoint: String + """If `true`, the check was initiated by Apollo Sandbox.""" + isSandbox: Boolean! + proposedSchemaDocument: String +} + +"""The result of running schema checks on a graph variant.""" +type CheckSchemaResult { + """The schema diff and affected operations generated by the schema check.""" + diffToPrevious: SchemaDiff! + """The URL to view the schema diff in Studio.""" + targetUrl: String +} + +type CheckWorkflow { + """ + The variant provided as a base to check against. Only the differences from the + base schema will be tested in operations checks. + """ + baseVariant: GraphVariant + """The timestamp when the check workflow completed.""" + completedAt: Timestamp + id: ID! + """The name of the implementing service that was responsible for triggering the validation.""" + implementingServiceName: String + """The timestamp when the check workflow started.""" + startedAt: Timestamp + """Overall status of the workflow, based on the underlying task statuses.""" + status: CheckWorkflowStatus! + """The set of check tasks associated with this workflow, e.g. composition, operations, etc.""" + tasks: [CheckWorkflowTask!]! + """Contextual parameters supplied by the runtime environment where the check was run.""" + gitContext: GitContext + createdAt: Timestamp! +} + +enum CheckWorkflowStatus { + FAILED + PASSED + PENDING +} + +interface CheckWorkflowTask { + completedAt: Timestamp + createdAt: Timestamp! + id: ID! + """ + The status of this task. All tasks start with the PENDING status while initializing. If any + prerequisite task fails, then the task status becomes BLOCKED. Otherwise, if all prerequisite + tasks pass, then this task runs (still having the PENDING status). Once the task completes, the + task status will become either PASSED or FAILED. + """ + status: CheckWorkflowTaskStatus! + """A studio UI url to view the details of this check workflow task""" + targetURL: String + """The workflow that this task belongs to.""" + workflow: CheckWorkflow! +} + +enum CheckWorkflowTaskStatus { + BLOCKED + FAILED + PASSED + PENDING +} + +"""Filter options to exclude by client reference ID, client name, and client version.""" +input ClientInfoFilter { + name: String! + """Ignored""" + referenceID: ID + version: String +} + +"""The result of supergraph composition that Studio performed in response to an attempted deletion of a subgraph.""" +type SubgraphRemovalResult { + """A list of errors that occurred during composition. Errors mean that Apollo was unable to compose the graph variant's subgraphs into a supergraph schema. If any errors are present, gateways / routers are not updated.""" + errors: [SchemaCompositionError]! + """Whether this composition result resulted in a new supergraph schema passed to Uplink (`true`), or the build failed for any reason (`false`). For dry runs, this value is `true` if Uplink _would have_ been updated with the result.""" + updatedGateway: Boolean! +} + +"""The result of supergraph composition that Studio performed in response to an attempted publish of a subgraph.""" +type SubgraphPublicationResult { + """The generated composition config, or null if any errors occurred.""" + compositionConfig: CompositionConfig + """A list of errors that occurred during composition. Errors mean that Apollo was unable to compose the graph variant's subgraphs into a supergraph schema. If any errors are present, gateways / routers are not updated.""" + errors: [SchemaCompositionError]! + """Whether this composition result resulted in a new supergraph schema passed to Uplink (`true`), or the build failed for any reason (`false`). For dry runs, this value is `true` if Uplink _would have_ been updated with the result.""" + updatedGateway: Boolean! + """Whether a new subgraph was created as part of this publish.""" + wasCreated: Boolean! + """The URL of the Studio page for this update's associated launch, if available.""" + launchUrl: String + """Human-readable text describing the launch result of the subgraph publish.""" + launchCliCopy: String +} + +type CompositionBuildInput { + subgraphs: [Subgraph!]! + version: String +} + +type CompositionCheckTask implements CheckWorkflowTask { + completedAt: Timestamp + """ + Whether the build's output supergraph core schema differs from that of the active publish for + the workflow's variant at the time this field executed (NOT at the time the check workflow + started). + """ + coreSchemaModified: Boolean! + createdAt: Timestamp! + id: ID! + status: CheckWorkflowTaskStatus! + targetURL: String + workflow: CheckWorkflow! + """ + An old version of buildResult that returns a very old GraphQL type that generally should be + avoided. This field will soon be deprecated. + """ + result: CompositionResult +} + +"""Composition configuration exposed to the gateway.""" +type CompositionConfig { + """The resulting API schema's SHA256 hash, represented as a hexadecimal string.""" + schemaHash: String! +} + +"""The result of supergraph composition that Studio performed.""" +type CompositionPublishResult implements CompositionResult { + """The unique ID for this instance of composition.""" + graphCompositionID: ID! + """A list of errors that occurred during composition. Errors mean that Apollo was unable to compose the graph variant's subgraphs into a supergraph schema. If any errors are present, gateways / routers are not updated.""" + errors: [SchemaCompositionError!]! + """The supergraph SDL generated by composition.""" + supergraphSdl: GraphQLDocument +} + +"""The result of supergraph composition performed by Apollo Studio, often as the result of a subgraph check or subgraph publish. See individual implementations for more details.""" +interface CompositionResult { + """The unique ID for this instance of composition.""" + graphCompositionID: ID! + """A list of errors that occurred during composition. Errors mean that Apollo was unable to compose the graph variant's subgraphs into a supergraph schema. If any errors are present, gateways / routers are not updated.""" + errors: [SchemaCompositionError!]! + """Supergraph SDL generated by composition.""" + supergraphSdl: GraphQLDocument +} + +"""The result of composition validation run by Apollo Studio during a subgraph check.""" +type CompositionCheckResult implements CompositionResult { + """The unique ID for this instance of composition.""" + graphCompositionID: ID! + """A list of errors that occurred during composition. Errors mean that Apollo was unable to compose the graph variant's subgraphs into a supergraph schema. If any errors are present, gateways / routers are not updated.""" + errors: [SchemaCompositionError!]! + """The supergraph schema document generated by composition.""" + supergraphSdl: GraphQLDocument +} + +type ContractVariantUpsertErrors { + """A list of all errors that occurred when attempting to create or update a contract variant.""" + errorMessages: [String!]! +} + +union ContractVariantUpsertResult = ContractVariantUpsertErrors | ContractVariantUpsertSuccess + +type ContractVariantUpsertSuccess { + """The updated contract variant""" + contractVariant: GraphVariant! + """Human-readable text describing the launch result of the contract update.""" + launchCliCopy: String + """The URL of the Studio page for this update's associated launch, if available.""" + launchUrl: String +} + +"""Contains the supergraph and API schemas generated by composition.""" +type CoreSchema { + """The composed API schema document.""" + apiDocument: GraphQLDocument! + """The composed supergraph schema document.""" + coreDocument: GraphQLDocument! + """The supergraph schema document's SHA256 hash, represented as a hexadecimal string.""" + coreHash: String! +} + +union CreateOperationCollectionResult = OperationCollection | PermissionError | ValidationError + +""" +Implement the DateTime scalar + +The input/output is a string in RFC3339 format. +""" +scalar DateTime @specifiedBy(url: "https://datatracker.ietf.org/doc/html/rfc3339") + +union DeleteOperationCollectionResult = PermissionError + +"""The result of attempting to delete a graph variant.""" +type GraphVariantDeletionResult { + """Whether the variant was deleted or not.""" + deleted: Boolean! +} + +"""The result of a schema checks workflow that was run on a downstream variant as part of checks for the corresponding source variant. Most commonly, these downstream checks are [contract checks](https://www.apollographql.com/docs/studio/contracts#contract-checks).""" +type DownstreamCheckResult { + """Whether the downstream check workflow blocks the upstream check workflow from completing.""" + blocking: Boolean! + """The ID of the graph that the downstream variant belongs to.""" + downstreamGraphID: String! + """The name of the downstream variant.""" + downstreamVariantName: String! + """ + The downstream checks workflow that this result corresponds to. This value is null + if the workflow hasn't been initialized yet, or if the downstream variant was deleted. + """ + downstreamWorkflow: CheckWorkflow + """ + Whether the downstream check workflow is causing the upstream check workflow to fail. This occurs + when the downstream check workflow is both blocking and failing. This may be null while the + downstream check workflow is pending. + """ + failsUpstreamWorkflow: Boolean + """The downstream checks task that this result corresponds to.""" + workflowTask: DownstreamCheckTask! +} + +type DownstreamCheckTask implements CheckWorkflowTask { + completedAt: Timestamp + createdAt: Timestamp! + id: ID! + """ + A list of results for all downstream checks triggered as part of the source variant's checks workflow. + This value is null if the task hasn't been initialized yet, or if the build task fails (the build task is a + prerequisite to this task). This value is _not_ null _while_ the task is running. The returned list is empty + if the source variant has no downstream variants. + """ + results: [DownstreamCheckResult!] + status: CheckWorkflowTaskStatus! + targetURL: String + workflow: CheckWorkflow! +} + +interface Error { + message: String! +} + +"""A single subgraph in a supergraph. Every supergraph managed by Apollo Studio includes at least one subgraph. See https://www.apollographql.com/docs/federation/managed-federation/overview/ for more information.""" +type GraphVariantSubgraph { + """The subgraph's name.""" + name: String! + """The URL of the subgraph's GraphQL endpoint.""" + url: String + """The current user-provided version/edition of the subgraph. Typically a Git SHA or docker image ID.""" + revision: String! + """The ID of the graph this subgraph belongs to.""" + graphID: String! + """The name of the graph variant this subgraph belongs to.""" + graphVariant: String! + """The subgraph's current active schema, used in supergraph composition for the the associated variant.""" + activePartialSchema: SubgraphSchema! + """The timestamp when the subgraph was created.""" + createdAt: Timestamp! + """The timestamp when the subgraph was most recently updated.""" + updatedAt: Timestamp! +} + +"""Container for a list of subgraphs composing a supergraph.""" +type GraphVariantSubgraphs { + """The list of underlying subgraphs.""" + services: [GraphVariantSubgraph!]! +} + +"""Counts of changes at the field level, including objects, interfaces, and input fields.""" +type FieldChangeSummaryCounts { + """Number of changes that are additions of fields to object, interface, and input types.""" + additions: Int! + """Number of changes that are removals of fields from object, interface, and input types.""" + removals: Int! + """ + Number of changes that are field edits. This includes fields changing type and any field + deprecation and description changes, but also includes any argument changes and any input object + field changes. + """ + edits: Int! +} + +"""Inputs provided to the build for a contract variant, which filters types and fields from a source variant's schema.""" +type FilterBuildInput { + """Schema filtering rules for the build, such as tags to include or exclude from the source variant schema.""" + filterConfig: FilterConfig! + """The source variant schema document's SHA256 hash, represented as a hexadecimal string.""" + schemaHash: String! +} + +type FilterCheckTask implements CheckWorkflowTask { + completedAt: Timestamp + createdAt: Timestamp! + id: ID! + status: CheckWorkflowTaskStatus! + targetURL: String + workflow: CheckWorkflow! +} + +"""The filter configuration used to build a contract schema. The configuration consists of lists of tags for schema elements to include or exclude in the resulting schema.""" +type FilterConfig { + """Tags of schema elements to exclude from the contract schema.""" + exclude: [String!]! + """Tags of schema elements to include in the contract schema.""" + include: [String!]! +} + +input FilterConfigInput { + """A list of tags for schema elements to exclude from the resulting contract schema.""" + exclude: [String!]! + """ + Whether to hide unreachable objects, interfaces, unions, inputs, enums and scalars from + the resulting contract schema. Defaults to `false`. + """ + hideUnreachableTypes: Boolean! = false + """A list of tags for schema elements to include in the resulting contract schema.""" + include: [String!]! +} + +type GitContext { + commit: ID +} + +"""Input type to provide when specifying the Git context for a run of schema checks.""" +input GitContextInput { + """The Git repository branch used in the check.""" + branch: String + """The ID of the Git commit used in the check.""" + commit: ID + """The username of the user who created the Git commit used in the check.""" + committer: String + """The commit message of the Git commit used in the check.""" + message: String + """The Git repository's remote URL.""" + remoteUrl: String +} + +""" +Represents a graph API key, which has permissions scoped to a +user role for a single Apollo graph. +""" +type GraphApiKey implements ApiKey { + """The timestamp when the API key was created.""" + createdAt: Timestamp! + """Details of the user or graph that created the API key.""" + createdBy: Identity + """The API key's ID.""" + id: ID! + """The API key's name, for distinguishing it from other keys.""" + keyName: String + """The permission level assigned to the API key upon creation.""" + role: UserPermission! + """The value of the API key. **This is a secret credential!**""" + token: String! +} + +"""A union of all containers that can comprise the components of a Studio graph""" +union GraphImplementors = GraphVariantSubgraphs + +"""A GraphQL document, such as the definition of an operation or schema.""" +scalar GraphQLDocument + +"""A graph variant""" +type GraphVariant { + """The variant's global identifier in the form `graphID@variant`.""" + id: ID! + router: Router + """The filter configuration used to build a contract schema. The configuration consists of lists of tags for schema elements to include or exclude in the resulting schema.""" + contractFilterConfig: FilterConfig + """ + A human-readable description of the filter configuration of this contract variant, or null if this isn't a contract + variant. + """ + contractFilterConfigDescription: String + """The graph that this variant belongs to.""" + graph: Graph! + """Latest approved launch for the variant, and what is served through Uplink.""" + latestApprovedLaunch: Launch + """Latest launch for the variant, whether successful or not.""" + latestLaunch: Launch + """The variant's name (e.g., `staging`).""" + name: String! + """Which permissions the current user has for interacting with this variant""" + permissions: GraphVariantPermissions! + readme: Readme! + """The variant this variant is derived from. This property currently only exists on contract variants.""" + sourceVariant: GraphVariant + """A list of the saved [operation collections](https://www.apollographql.com/docs/studio/explorer/operation-collections/) associated with this variant.""" + operationCollections: [OperationCollection!]! + """The URL of the variant's GraphQL endpoint for query and mutation operations. For subscription operations, use `subscriptionUrl`.""" + url: String + """The URL of the variant's GraphQL endpoint for subscription operations.""" + subscriptionUrl: String + """The details of the variant's most recent publication.""" + latestPublication: SchemaPublication + """A list of the subgraphs included in this variant. This value is null for non-federated variants. Set `includeDeleted` to `true` to include deleted subgraphs.""" + subgraphs(includeDeleted: Boolean! = false): [GraphVariantSubgraph!] + """Returns the details of the subgraph with the provided `name`, or null if this variant doesn't include a subgraph with that name.""" + subgraph(name: ID!): GraphVariantSubgraph +} + +"""Result of looking up a variant by ref""" +union GraphVariantLookup = GraphVariant | InvalidRefFormat + +"""Modifies a variant of a graph, also called a schema tag in parts of our product.""" +type GraphVariantMutation { + """ + _Asynchronously_ kicks off operation checks for a proposed non-federated + schema change against its associated graph. + + Returns a `CheckRequestSuccess` object with a workflow ID that you can use + to check status, or an error object if the checks workflow failed to start. + """ + submitCheckSchemaAsync(input: CheckSchemaAsyncInput!): CheckRequestResult! + """ + _Asynchronously_ kicks off composition and operation checks for a proposed subgraph schema change against its associated supergraph. + + Returns a `CheckRequestSuccess` object with a workflow ID that you can use + to check status, or an error object if the checks workflow failed to start. + """ + submitSubgraphCheckAsync(input: SubgraphCheckAsyncInput!): CheckRequestResult! + """Updates the [README](https://www.apollographql.com/docs/studio/org/graphs/#the-readme-page) of this variant.""" + updateVariantReadme( + """The full new text of the README, as a Markdown-formatted string.""" + readme: String! + ): GraphVariant + """Delete the variant.""" + delete: GraphVariantDeletionResult! +} + +"""Individual permissions for the current user when interacting with a particular Studio graph variant.""" +type GraphVariantPermissions { + """Whether the currently authenticated user is permitted to manage/update this variant's build configuration (e.g., build pipeline version).""" + canManageBuildConfig: Boolean! + """Whether the currently authenticated user is permitted to manage/update cloud routers""" + canManageCloudRouter: Boolean! + """Whether the currently authenticated user is permitted to update variant-level settings for the Apollo Studio Explorer.""" + canManageExplorerSettings: Boolean! + """Whether the currently authenticated user is permitted to publish schemas to this variant.""" + canPushSchemas: Boolean! + """Whether the currently authenticated user is permitted to view this variant's build configuration details (e.g., build pipeline version).""" + canQueryBuildConfig: Boolean! + """Whether the currently authenticated user is permitted to view details regarding cloud routers""" + canQueryCloudRouter: Boolean! + """Whether the currently authenticated user is permitted to view cloud router logs""" + canQueryCloudRouterLogs: Boolean! + """Whether the currently authenticated user is permitted to download schemas associated to this variant.""" + canQuerySchemas: Boolean! + """Whether the currently authenticated user is permitted to update the README for this variant.""" + canUpdateVariantReadme: Boolean! + canCreateCollectionInVariant: Boolean! + canShareCollectionInVariant: Boolean! +} + +input HistoricQueryParameters { + from: String = "-86400" + to: String = "0" + """Minimum number of requests within the window for a query to be considered.""" + queryCountThreshold: Int = 1 + """ + Number of requests within the window for a query to be considered, relative to + total request count. Expected values are between 0 and 0.05 (minimum 5% of total + request volume) + """ + queryCountThresholdPercentage: Float = 0 + """A list of operation IDs to filter out during validation.""" + ignoredOperations: [ID!] = null + """A list of clients to filter out during validation.""" + excludedClients: [ClientInfoFilter!] = null + """A list of operation names to filter out during validation.""" + excludedOperationNames: [OperationNameFilterInput!] = null + """ + A list of variants to include in the validation. If no variants are provided + then this defaults to the "current" variant along with the base variant. The + base variant indicates the schema that generates diff and marks the metrics that + are checked for broken queries. We union this base variant with the untagged values('', + same as null inside of `in`, and 'current') in this metrics fetch. This strategy + supports users who have not tagged their metrics or schema. + """ + includedVariants: [String!] = null +} + +"""Input type to provide when specifying configuration details for schema checks.""" +input HistoricQueryParametersInput { + """Clients to be excluded from check.""" + excludedClients: [ClientInfoFilter!] + """Operations to be ignored in this schema check, specified by operation name.""" + excludedOperationNames: [OperationNameFilterInput!] + """Start time for operations to be checked against. Specified as either a) an ISO formatted date/time string or b) a negative number of seconds relative to the time the check request was submitted.""" + from: String + """Operations to be ignored in this schema check, specified by ID.""" + ignoredOperations: [ID!] + """Graph variants to be included in check.""" + includedVariants: [String!] + """Maximum number of queries to be checked against the change.""" + queryCountThreshold: Int + """Only fail check if this percentage of operations would be negatively impacted.""" + queryCountThresholdPercentage: Float + """End time for operations to be checked against. Specified as either a) an ISO formatted date/time string or b) a negative number of seconds relative to the time the check request was submitted.""" + to: String +} + +"""An identity (such as a `User` or `Graph`) in Apollo Studio. See implementing types for details.""" +interface Identity { + """Returns a representation of the identity as an `Actor` type.""" + asActor: Actor! + """The identity's identifier, which is unique among objects of its type.""" + id: ID! + """The identity's human-readable name.""" + name: String! +} + +type InternalIdentity implements Identity { + accounts: [Organization!]! + asActor: Actor! + email: String + id: ID! + name: String! +} + +input IntrospectionDirectiveInput { + name: String! + description: String + locations: [IntrospectionDirectiveLocation!]! + args: [IntrospectionInputValueInput!]! + isRepeatable: Boolean +} + +"""__DirectiveLocation introspection type""" +enum IntrospectionDirectiveLocation { + """Location adjacent to a query operation.""" + QUERY + """Location adjacent to a mutation operation.""" + MUTATION + """Location adjacent to a subscription operation.""" + SUBSCRIPTION + """Location adjacent to a field.""" + FIELD + """Location adjacent to a fragment definition.""" + FRAGMENT_DEFINITION + """Location adjacent to a fragment spread.""" + FRAGMENT_SPREAD + """Location adjacent to an inline fragment.""" + INLINE_FRAGMENT + """Location adjacent to a variable definition.""" + VARIABLE_DEFINITION + """Location adjacent to a schema definition.""" + SCHEMA + """Location adjacent to a scalar definition.""" + SCALAR + """Location adjacent to an object type definition.""" + OBJECT + """Location adjacent to a field definition.""" + FIELD_DEFINITION + """Location adjacent to an argument definition.""" + ARGUMENT_DEFINITION + """Location adjacent to an interface definition.""" + INTERFACE + """Location adjacent to a union definition.""" + UNION + """Location adjacent to an enum definition.""" + ENUM + """Location adjacent to an enum value definition.""" + ENUM_VALUE + """Location adjacent to an input object type definition.""" + INPUT_OBJECT + """Location adjacent to an input object field definition.""" + INPUT_FIELD_DEFINITION +} + +"""__EnumValue introspection type""" +input IntrospectionEnumValueInput { + name: String! + description: String + isDeprecated: Boolean! + deprecationReason: String +} + +"""__Field introspection type""" +input IntrospectionFieldInput { + name: String! + description: String + args: [IntrospectionInputValueInput!]! + type: IntrospectionTypeInput! + isDeprecated: Boolean! + deprecationReason: String +} + +"""__Value introspection type""" +input IntrospectionInputValueInput { + name: String! + description: String + type: IntrospectionTypeInput! + defaultValue: String + isDeprecated: Boolean + deprecationReason: String +} + +"""__Schema introspection type""" +input IntrospectionSchemaInput { + types: [IntrospectionTypeInput!] + queryType: IntrospectionTypeRefInput! + mutationType: IntrospectionTypeRefInput + subscriptionType: IntrospectionTypeRefInput + directives: [IntrospectionDirectiveInput!]! + description: String +} + +"""__Type introspection type""" +input IntrospectionTypeInput { + kind: IntrospectionTypeKind! + name: String + description: String + specifiedByUrl: String + fields: [IntrospectionFieldInput!] + interfaces: [IntrospectionTypeInput!] + possibleTypes: [IntrospectionTypeInput!] + enumValues: [IntrospectionEnumValueInput!] + inputFields: [IntrospectionInputValueInput!] + ofType: IntrospectionTypeInput +} + +enum IntrospectionTypeKind { + """Indicates this type is a scalar.""" + SCALAR + """Indicates this type is an object. 'fields' and 'interfaces' are valid fields.""" + OBJECT + """ + Indicates this type is an interface. 'fields' and 'possibleTypes' are valid + fields + """ + INTERFACE + """Indicates this type is a union. 'possibleTypes' is a valid field.""" + UNION + """Indicates this type is an enum. 'enumValues' is a valid field.""" + ENUM + """Indicates this type is an input object. 'inputFields' is a valid field.""" + INPUT_OBJECT + """Indicates this type is a list. 'ofType' is a valid field.""" + LIST + """Indicates this type is a non-null. 'ofType' is a valid field.""" + NON_NULL +} + +"""Shallow __Type introspection type""" +input IntrospectionTypeRefInput { + name: String! + kind: String +} + +"""An error caused by providing invalid input for a task, such as schema checks.""" +type InvalidInputError { + """The error message.""" + message: String! +} + +"""This object is returned when a request to fetch a Studio graph variant provides an invalid graph ref.""" +type InvalidRefFormat implements Error { + message: String! +} + +"""Represents the complete process of making a set of updates to a deployed graph variant.""" +type Launch { + """The unique identifier for this launch.""" + id: ID! + """The ID of the launch's associated graph.""" + graphId: String! + """The name of the launch's associated variant.""" + graphVariant: String! + order: OrderOrError! + """The timestamp when the launch was approved.""" + approvedAt: Timestamp + """The associated build for this launch (a build includes schema composition and contract filtering). This value is null until the build is initiated.""" + build: Build + """The inputs provided to this launch's associated build, including subgraph schemas and contract filters.""" + buildInput: BuildInput! + """The timestamp when the launch completed. This value is null until the launch completes.""" + completedAt: Timestamp + """The timestamp when the launch was initiated.""" + createdAt: Timestamp! + """Contract launches that were triggered by this launch.""" + downstreamLaunches: [Launch!]! + """Whether the launch completed.""" + isCompleted: Boolean + """Whether the result of the launch has been published to the associated graph and variant. This is always false for a failed launch.""" + isPublished: Boolean + """The most recent launch sequence step that has started but not necessarily completed.""" + latestSequenceStep: LaunchSequenceStep + """A specific publication of a graph variant pertaining to this launch.""" + publication: SchemaPublication + """A list of results from the completed launch. The items included in this list vary depending on whether the launch succeeded, failed, or was superseded.""" + results: [LaunchResult!]! + """Cloud router configuration associated with this build event. It will be non-null for any cloud-router variant, and null for any not cloudy variant/graph.""" + routerConfig: String + """A list of all serial steps in the launch sequence. This list can change as the launch progresses. For example, a `LaunchCompletedStep` is appended after a launch completes.""" + sequence: [LaunchSequenceStep!]! + """A shortened version of `Launch.id` that includes only the first 8 characters.""" + shortenedID: String! + """The launch's status. If a launch is superseded, its status remains `LAUNCH_INITIATED`. To check for a superseded launch, use `supersededAt`.""" + status: LaunchStatus! + """A list of subgraph changes that are included in this launch.""" + subgraphChanges: [SubgraphChange!] + """The timestamp when this launch was superseded by another launch. If an active launch is superseded, it terminates.""" + supersededAt: Timestamp + """The launch that superseded this launch, if any. If an active launch is superseded, it terminates.""" + supersededBy: Launch + """The source variant launch that caused this launch to be initiated. This value is present only for contract variant launches. Otherwise, it's null.""" + upstreamLaunch: Launch +} + +"""Types of results that can be associated with a `Launch`""" +union LaunchResult = ChangelogLaunchResult + +"""The timing details for the build step of a launch.""" +type LaunchSequenceBuildStep { + """The timestamp when the step completed.""" + completedAt: Timestamp + """The timestamp when the step started.""" + startedAt: Timestamp +} + +"""The timing details for the checks step of a launch.""" +type LaunchSequenceCheckStep { + """The timestamp when the step completed.""" + completedAt: Timestamp + """The timestamp when the step started.""" + startedAt: Timestamp +} + +"""The timing details for the completion step of a launch.""" +type LaunchSequenceCompletedStep { + """The timestamp when the step (and therefore the launch) completed.""" + completedAt: Timestamp +} + +"""The timing details for the initiation step of a launch.""" +type LaunchSequenceInitiatedStep { + """The timestamp when the step (and therefore the launch) started.""" + startedAt: Timestamp +} + +"""The timing details for the publish step of a launch.""" +type LaunchSequencePublishStep { + """The timestamp when the step completed.""" + completedAt: Timestamp + """The timestamp when the step started.""" + startedAt: Timestamp +} + +"""Represents the various steps that occur in sequence during a single launch.""" +union LaunchSequenceStep = LaunchSequenceBuildStep | LaunchSequenceCheckStep | LaunchSequenceCompletedStep | LaunchSequenceInitiatedStep | LaunchSequencePublishStep | LaunchSequenceSupersededStep + +"""The timing details for the superseded step of a launch. This step occurs only if the launch is superseded by another launch.""" +type LaunchSequenceSupersededStep { + """The timestamp when the step completed, thereby ending the execution of this launch in favor of the superseding launch.""" + completedAt: Timestamp +} + +enum LaunchStatus { + LAUNCH_COMPLETED + LAUNCH_FAILED + LAUNCH_INITIATED +} + +enum LogLevel { + WARN + INFO + ERROR + DEBUG +} + +type LogMessage { + """Timestamp in UTC""" + timestamp: DateTime! + """Log message contents""" + message: String! + """Log level""" + level: LogLevel! +} + +type Mutation { + """Provides access to mutation fields for modifying a Studio graph with the provided ID.""" + graph(id: ID!): GraphMutation + """ + Provides access to mutation fields for modifying an Apollo user with the + provided ID. + """ + user(id: ID!): UserMutation + """Creates an [operation collection](https://www.apollographql.com/docs/studio/explorer/operation-collections/) for a given variant, or creates a [sandbox collection](https://www.apollographql.com/docs/studio/explorer/operation-collections/#sandbox-collections) without an associated variant.""" + createOperationCollection( + """The collection's description.""" + description: String + """Whether the collection is a [sandbox collection](https://www.apollographql.com/docs/studio/explorer/operation-collections/#sandbox-collections).""" + isSandbox: Boolean! + """Whether the collection is shared across its associated organization.""" + isShared: Boolean! + """The minimum role a user needs to edit this collection. Valid values: null, CONSUMER, OBSERVER, DOCUMENTER, CONTRIBUTOR, GRAPH_ADMIN. This value is ignored if `isShared` is `false`. The default value is `GRAPH_ADMIN`.""" + minEditRole: UserPermission + """The collection's name.""" + name: String! + """The [graph ref](https://www.apollographql.com/docs/rover/conventions/#graph-refs) of the graph variants to associate the collection with.""" + variantRefs: [ID!] + ): CreateOperationCollectionResult! + operationCollection(id: ID!): OperationCollectionMutation +} + +""" +ISO 8601 combined date and time without timezone. + +# Examples + +* `2015-07-01T08:59:60.123`, +""" +scalar NaiveDateTime + +type NamedIntrospectionArg { + name: String + description: String +} + +""" +The shared fields for a named introspection type. Currently this is returned for the top level value affected by a change. In the future, we may update this type to be an interface, which is extended by the more specific types: scalar, object, input object, union, interface, and enum For an in-depth look at where these types come from, see: -https://github.com/DefinitelyTyped/DefinitelyTyped/blob/659eb50d3/types/graphql/utilities/introspectionQuery.d.ts#L31-L37""" type NamedIntrospectionType{description:String kind:IntrospectionTypeKind name:String}type NamedIntrospectionTypeNoDescription{name:String}"""Introspection values that can be children of other types for changes, such +https://github.com/DefinitelyTyped/DefinitelyTyped/blob/659eb50d3/types/graphql/utilities/introspectionQuery.d.ts#L31-L37 +""" +type NamedIntrospectionType { + kind: IntrospectionTypeKind + name: String + description: String +} + +""" +Introspection values that can be children of other types for changes, such as input fields, objects in interfaces, enum values. In the future, this value could become an interface to allow fields specific to the types -returned.""" type NamedIntrospectionValue{description:String name:String printedType:String}type NamedIntrospectionValueNoDescription{name:String printedType:String}"""A non-federated service for a monolithic graph.""" type NonFederatedImplementingService{"""Timestamp of when this implementing service was created.""" createdAt:Timestamp!"""Identifies which graph this non-implementing service belongs to. -Formerly known as "service_id".""" graphID:String!"""Specifies which variant of a graph this implementing service belongs to". -Formerly known as "tag".""" graphVariant:String!}type NotFoundError implements Error{message:String!}"""Arbitrary JSON object""" scalar Object type OdysseyAttempt{completedAt:Timestamp id:ID!responses:[OdysseyResponse!]!startedAt:Timestamp!testId:String!}type OdysseyCertification{certificationId:String!earnedAt:Timestamp!id:ID!owner:OdysseyCertificationOwner}type OdysseyCertificationOwner{fullName:String!id:ID!}type OdysseyCourse{completedAt:Timestamp enrolledAt:Timestamp id:ID!}input OdysseyCourseInput{completedAt:Timestamp courseId:String!}type OdysseyResponse{correct:Boolean!id:ID!questionId:String!values:[OdysseyValue!]!}input OdysseyResponseInput{attemptId:ID!correct:Boolean!questionId:String!values:[String!]!}type OdysseyTask{completedAt:Timestamp id:ID!value:String}input OdysseyTaskInput{completedAt:Timestamp taskId:String!value:String}type OdysseyValue{id:ID!value:String!}enum OnboardingArchitecture{MONOLITH SUPERGRAPH}type Operation{id:ID!name:String signature:String truncated:Boolean!}type OperationAcceptedChange{acceptedAt:Timestamp!acceptedBy:Identity!change:StoredApprovedChange!checkID:ID!graphID:ID!id:ID!operationID:String!}"""Columns of OperationCheckStats.""" enum OperationCheckStatsColumn{CACHED_REQUESTS_COUNT CLIENT_NAME CLIENT_VERSION QUERY_ID QUERY_NAME SCHEMA_TAG SERVICE_ID TIMESTAMP UNCACHED_REQUESTS_COUNT}type OperationCheckStatsDimensions{clientName:String clientVersion:String queryId:ID queryName:String schemaTag:String serviceId:ID}"""Filter for data in OperationCheckStats. Fields with dimension names represent equality checks. All fields are implicitly ANDed together.""" input OperationCheckStatsFilter{and:[OperationCheckStatsFilter!]"""Selects rows whose clientName dimension equals the given value if not null. To query for the null value, use {in: {clientName: [null]}} instead.""" clientName:String """Selects rows whose clientVersion dimension equals the given value if not null. To query for the null value, use {in: {clientVersion: [null]}} instead.""" clientVersion:String in:OperationCheckStatsFilterIn not:OperationCheckStatsFilter or:[OperationCheckStatsFilter!]"""Selects rows whose queryId dimension equals the given value if not null. To query for the null value, use {in: {queryId: [null]}} instead.""" queryId:ID """Selects rows whose queryName dimension equals the given value if not null. To query for the null value, use {in: {queryName: [null]}} instead.""" queryName:String """Selects rows whose schemaTag dimension equals the given value if not null. To query for the null value, use {in: {schemaTag: [null]}} instead.""" schemaTag:String """Selects rows whose serviceId dimension equals the given value if not null. To query for the null value, use {in: {serviceId: [null]}} instead.""" serviceId:ID}"""Filter for data in OperationCheckStats. Fields match if the corresponding dimension's value is in the given list. All fields are implicitly ANDed together.""" input OperationCheckStatsFilterIn{"""Selects rows whose clientName dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientName:[String]"""Selects rows whose clientVersion dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientVersion:[String]"""Selects rows whose queryId dimension is in the given list. A null value in the list means a row with null for that dimension.""" queryId:[ID]"""Selects rows whose queryName dimension is in the given list. A null value in the list means a row with null for that dimension.""" queryName:[String]"""Selects rows whose schemaTag dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaTag:[String]"""Selects rows whose serviceId dimension is in the given list. A null value in the list means a row with null for that dimension.""" serviceId:[ID]}type OperationCheckStatsMetrics{cachedRequestsCount:Long!uncachedRequestsCount:Long!}input OperationCheckStatsOrderBySpec{column:OperationCheckStatsColumn!direction:Ordering!}type OperationCheckStatsRecord{"""Dimensions of OperationCheckStats that can be grouped by.""" groupBy:OperationCheckStatsDimensions!"""Metrics of OperationCheckStats that can be aggregated over.""" metrics:OperationCheckStatsMetrics!"""Starting segment timestamp.""" timestamp:Timestamp!}type OperationCollection{createdAt:Timestamp!createdBy:Identity description:String """If a user has any of these roles, they will be able to edit this -collection. This will be null if and only if \`isShared\` is false""" editRoles:[UserPermission!]@deprecated(reason:"deprecated in favour of minEditRole")id:ID!isFavorite:Boolean!isSandbox:Boolean!isShared:Boolean!lastUpdatedAt:Timestamp!lastUpdatedBy:Identity minEditRole:UserPermission name:String!operation(id:ID!):OperationCollectionEntryResult operations:[OperationCollectionEntry!]!""" Permissions the current user has for this collection""" permissions:OperationCollectionPermissions!variants:[GraphVariant!]!}type OperationCollectionEntry{collection:OperationCollection!createdAt:Timestamp!createdBy:Identity currentOperationRevision:OperationCollectionEntryState!id:ID!lastUpdatedAt:Timestamp!lastUpdatedBy:Identity name:String!orderingIndex:String!}type OperationCollectionEntryMutation{moveToCollection(collectionId:ID!lowerOrderingBound:String upperOrderingBound:String):MoveOperationCollectionEntryResult!reorderEntry(lowerOrderingBound:String upperOrderingBound:String):UpdateOperationCollectionResult updateName(name:String!):UpdateOperationCollectionEntryResult updateValues(operationInput:OperationCollectionEntryStateInput!):UpdateOperationCollectionEntryResult}union OperationCollectionEntryMutationResult=NotFoundError|OperationCollectionEntryMutation|PermissionError union OperationCollectionEntryResult=NotFoundError|OperationCollectionEntry type OperationCollectionEntryState{body:String!createdAt:Timestamp!createdBy:Identity headers:[OperationHeader!]variables:String}input OperationCollectionEntryStateInput{body:String!headers:[OperationHeaderInput!]""" I'm assuming this is non null""" variables:String}type OperationCollectionMutation{addOperation(name:String!operationInput:OperationCollectionEntryStateInput!):AddOperationCollectionEntryResult addToVariant(variantRef:ID!):AddOperationCollectionToVariantResult!@deprecated(reason:"Will throw NotImplemented")delete:DeleteOperationCollectionResult deleteOperation(id:ID!):RemoveOperationCollectionEntryResult duplicateCollection(description:String isSandbox:Boolean!isShared:Boolean!name:String!variantRef:ID):DuplicateOperationCollectionResult!operation(id:ID!):OperationCollectionEntryMutationResult removeFromVariant(variantRef:ID!):RemoveOperationCollectionFromVariantResult!@deprecated(reason:"Will throw NotImplemented")setMinEditRole(editRole:UserPermission):UpdateOperationCollectionResult updateDescription(description:String):UpdateOperationCollectionResult updateEditRoles(editRoles:[UserPermission!]!):UpdateOperationCollectionResult@deprecated(reason:"Deprecated in favour of setMinEditRole")updateIsFavorite(isFavorite:Boolean!):UpdateOperationCollectionResult updateIsShared(isShared:Boolean!):UpdateOperationCollectionResult updateName(name:String!):UpdateOperationCollectionResult}type OperationCollectionPermissions{canEditOperations:Boolean!canManage:Boolean!canReadOperations:Boolean!}union OperationCollectionResult=NotFoundError|OperationCollection|PermissionError type OperationDocument{"""Operation document body""" body:String!"""Operation name""" name:String}input OperationDocumentInput{"""Operation document body""" body:String!"""Operation name""" name:String}type OperationHeader{name:String!value:String!}input OperationHeaderInput{name:String!value:String!}"""Operation name filter configuration for a graph.""" type OperationNameFilter{"""name of the operation by the user and reported alongside metrics""" name:String!}"""Options to filter by operation name.""" input OperationNameFilterInput{"""name of the operation set by the user and reported alongside metrics""" name:String!}type OperationValidationError{message:String!}type OperationsCheckResult{"""Operations affected by all changes in diff""" affectedQueries:[AffectedQuery!]"""Summary/counts for all changes in diff""" changeSummary:ChangeSummary!"""List of schema changes with associated affected clients and operations""" changes:[Change!]!"""Indication of the success of the change, either failure, warning, or notice.""" checkSeverity:ChangeSeverity!"""The variant that was used as a base to check against""" checkedVariant:GraphVariant!createdAt:Timestamp!id:ID!"""Number of affected query operations that are neither marked as SAFE or IGNORED""" numberOfAffectedOperations:Int!"""Number of operations that were validated during schema diff""" numberOfCheckedOperations:Int!workflowTask:OperationsCheckTask!}type OperationsCheckTask implements CheckWorkflowTask{completedAt:Timestamp createdAt:Timestamp!id:ID!"""The result of the check.""" result:OperationsCheckResult status:CheckWorkflowTaskStatus!workflow:CheckWorkflow!}enum Ordering{ASCENDING DESCENDING}"""A reusable invite link for an organization.""" type OrganizationInviteLink{createdAt:Timestamp!"""A joinToken that can be passed to Mutation.joinAccount to join the organization.""" joinToken:String!"""The role that the user will receive if they join the organization with this link.""" role:UserPermission!}type OrganizationSSO{defaultRole:UserPermission!idpid:ID!provider:OrganizationSSOProvider!}enum OrganizationSSOProvider{PINGONE}"""PagerDuty notification channel""" type PagerDutyChannel implements Channel{id:ID!name:String!routingKey:String!subscriptions:[ChannelSubscription!]!}"""PagerDuty notification channel parameters""" input PagerDutyChannelInput{name:String routingKey:String!}"""Schema for a subgraph with associated metadata""" type PartialSchema{"""Timestamp for when the partial schema was created""" createdAt:Timestamp!"""If this sdl is currently actively composed in the gateway, this is true""" isLive:Boolean!"""The GraphQL document for a subgraph schema.""" sdl:String!"""The path of deep storage to find the raw enriched partial schema file""" sdlPath:String!}"""Input for registering a partial schema to an implementing service. +returned. +""" +type NamedIntrospectionValue { + name: String + description: String + printedType: String +} + +"""An error that occurs when a requested object is not found.""" +type NotFoundError implements Error { + """The error message.""" + message: String! +} + +"""A list of saved GraphQL operations.""" +type OperationCollection { + """The timestamp when the collection was created.""" + createdAt: Timestamp! + """The user or other entity that created the collection.""" + createdBy: Identity + """The collection's description. A `null` description was never set, and empty string description was set to be empty string by a user, or other entity.""" + description: String + id: ID! + """Whether the current user has marked the collection as a favorite.""" + isFavorite: Boolean! + """Whether the collection is a [sandbox collection](https://www.apollographql.com/docs/studio/explorer/operation-collections/#sandbox-collections).""" + isSandbox: Boolean! + """Whether the collection is shared across its associated organization.""" + isShared: Boolean! + """The timestamp when the collection was most recently updated.""" + lastUpdatedAt: Timestamp! + """The user or other entity that most recently updated the collection.""" + lastUpdatedBy: Identity + """The minimum role a user needs to edit this collection. Valid values: null, CONSUMER, OBSERVER, DOCUMENTER, CONTRIBUTOR, GRAPH_ADMIN. This value is always `null` if `isShared` is `false`. If `null` when `isShared` is `true`, the minimum role is `GRAPH_ADMIN`.""" + minEditRole: UserPermission + """The collection's name.""" + name: String! + """Returns the operation in the collection with the specified ID, if any.""" + operation(id: ID!): OperationCollectionEntryResult + """A list of the GraphQL operations that belong to the collection.""" + operations: [OperationCollectionEntry!]! + """The permissions that the current user has for the collection.""" + permissions: OperationCollectionPermissions! +} + +"""A saved operation entry within an Operation Collection.""" +type OperationCollectionEntry { + """The timestamp when the entry was created.""" + createdAt: Timestamp! + """The user or other entity that created the entry.""" + createdBy: Identity + """Details of the entry's associated operation, such as its `body` and `variables`.""" + currentOperationRevision: OperationCollectionEntryState! + id: ID! + """The timestamp when the entry was most recently updated.""" + lastUpdatedAt: Timestamp! + """The user or other entity that most recently updated the entry.""" + lastUpdatedBy: Identity + """The entry's name.""" + name: String! + """The entry's lexicographical ordering index within its containing collection.""" + orderingIndex: String! +} + +"""Provides fields for modifying an operation in a collection.""" +type OperationCollectionEntryMutation { + """Updates the name of an operation.""" + updateName(name: String!): UpdateOperationCollectionEntryResult + """Updates the body, headers, and/or variables of an operation.""" + updateValues(operationInput: OperationCollectionEntryStateInput!): UpdateOperationCollectionEntryResult +} + +union OperationCollectionEntryMutationResult = NotFoundError | OperationCollectionEntryMutation | PermissionError + +"""Possible return values when querying for an entry in an operation collection (either the entry object or an `Error` object).""" +union OperationCollectionEntryResult = NotFoundError | OperationCollectionEntry + +"""The most recent body, variable and header values of a saved operation entry.""" +type OperationCollectionEntryState { + """The raw body of the entry's GraphQL operation.""" + body: String! + """Headers for the entry's GraphQL operation.""" + headers: [OperationHeader!] + """Variables for the entry's GraphQL operation, as a JSON string.""" + variables: String +} + +"""Fields for creating or modifying an operation collection entry.""" +input OperationCollectionEntryStateInput { + """The operation's query body.""" + body: String! + """The operation's headers.""" + headers: [OperationHeaderInput!] + """The operation's variables.""" + variables: String +} + +"""Provides fields for modifying an [operation collection](https://www.apollographql.com/docs/studio/explorer/operation-collections/).""" +type OperationCollectionMutation { + """Adds an operation to this collection.""" + addOperation(name: String!, operationInput: OperationCollectionEntryStateInput!): AddOperationCollectionEntryResult + """Adds operations to this collection.""" + addOperations(operations: [AddOperationInput!]!): AddOperationCollectionEntriesResult + """Deletes this operation collection. This also deletes all of the collection's associated operations.""" + delete: DeleteOperationCollectionResult + """Deletes an operation from this collection.""" + deleteOperation(id: ID!): RemoveOperationCollectionEntryResult + operation(id: ID!): OperationCollectionEntryMutationResult + """Updates the minimum role a user needs to be able to modify this collection.""" + setMinEditRole(editRole: UserPermission): UpdateOperationCollectionResult + """Updates this collection's description.""" + updateDescription(description: String): UpdateOperationCollectionResult + """Updates whether the current user has marked this collection as a favorite.""" + updateIsFavorite(isFavorite: Boolean!): UpdateOperationCollectionResult + """Updates whether this collection is shared across its associated organization.""" + updateIsShared(isShared: Boolean!): UpdateOperationCollectionResult + """Updates this operation collection's name.""" + updateName(name: String!): UpdateOperationCollectionResult +} + +"""Whether the current user can perform various actions on the associated collection.""" +type OperationCollectionPermissions { + """Whether the current user can edit operations in the associated collection.""" + canEditOperations: Boolean! + """Whether the current user can delete or update the associated collection's metadata, such as its name and description.""" + canManage: Boolean! + """Whether the current user can read operations in the associated collection.""" + canReadOperations: Boolean! +} + +union OperationCollectionResult = NotFoundError | OperationCollection | PermissionError | ValidationError + +"""Saved headers on a saved operation.""" +type OperationHeader { + """The header's name.""" + name: String! + """The header's value.""" + value: String! +} + +input OperationHeaderInput { + """The header's name.""" + name: String! + """The header's value.""" + value: String! +} + +"""Options to filter by operation name.""" +input OperationNameFilterInput { + """name of the operation set by the user and reported alongside metrics""" + name: String! + version: String +} + +type OperationsCheckResult { + id: ID! + """Indication of the success of the change, either failure, warning, or notice.""" + checkSeverity: ChangeSeverity! + """Number of operations that were validated during schema diff""" + numberOfCheckedOperations: Int! + """List of schema changes with associated affected clients and operations""" + changes: [Change!]! + """Summary/counts for all changes in diff""" + changeSummary: ChangeSummary! + """Operations affected by all changes in diff""" + affectedQueries: [AffectedQuery!] + """Number of affected query operations that are neither marked as SAFE or IGNORED""" + numberOfAffectedOperations: Int! + createdAt: Timestamp! +} + +type OperationsCheckTask implements CheckWorkflowTask { + completedAt: Timestamp + createdAt: Timestamp! + id: ID! + status: CheckWorkflowTaskStatus! + targetURL: String + workflow: CheckWorkflow! + """ + The result of the operations check. This will be null when the task is initializing or running, + or when the build task fails (which is a prerequisite task to this one). + """ + result: OperationsCheckResult +} + +type Order { + id: ID! + orderType: OrderType! + status: OrderStatus! + reason: String + logs(first: Int, offset: Int): [LogMessage!]! + router: Router! +} + +union OrderOrError = Order + +enum OrderStatus { + PENDING + COMPLETED + ROLLING_BACK + ERRORED + SUPERSEDED +} + +enum OrderType { + CREATE_ROUTER + DESTROY_ROUTER + UPDATE_ROUTER +} + +"""The schema for a single published subgraph in Studio.""" +type SubgraphSchema { + """The subgraph schema document as SDL.""" + sdl: String! +} + +""" +Input for registering a partial schema to an implementing service. One of the fields must be specified (validated server-side). If a new partialSchemaSDL is passed in, this operation will store it before @@ -77,96 +1452,529 @@ creating the association. If both the sdl and hash are specified, an error will be thrown if the provided hash doesn't match our hash of the sdl contents. If the sdl field is specified, -the hash does not need to be and will be computed server-side.""" input PartialSchemaInput{"""Hash of the partial schema to associate; error is thrown if only the hash is -specified and the hash has not been seen before""" hash:String """Contents of the partial schema in SDL syntax, but may reference types -that aren't defined in this document""" sdl:String}type PermissionError implements Error{message:String!}type PromoteSchemaError{code:PromoteSchemaErrorCode!message:String!}enum PromoteSchemaErrorCode{CANNOT_PROMOTE_SCHEMA_FOR_FEDERATED_GRAPH}type PromoteSchemaResponse{code:PromoteSchemaResponseCode!tag:SchemaTag!}enum PromoteSchemaResponseCode{NO_CHANGES_DETECTED PROMOTION_SUCCESS}union PromoteSchemaResponseOrError=PromoteSchemaError|PromoteSchemaResponse type Protobuf{json:String!object:Object!raw:Blob!text:String!}type Query{"""Account by ID""" account(id:ID!):Account """Retrieve account by billing provider identifier""" accountByBillingCode(id:ID!):Account """Retrieve account by internal id""" accountByInternalID(id:ID!):Account """Whether an account ID is available for mutation{newAccount(id:)}""" accountIDAvailable(id:ID!):Boolean!"""All accounts""" allAccounts(search:String tier:BillingPlanTier):[Account!]"""All available plans""" allPlans:[BillingPlan!]!allPublicVariants:[GraphVariant!]"""All services""" allServices(search:String):[Service!]"""All timezones with their offsets from UTC""" allTimezoneOffsets:[TimezoneOffset!]!"""All users""" allUsers(search:String):[User!]"""Look up a plan by ID""" billingPlan(id:ID):BillingPlanV2 """All available plans""" billingPlans:[BillingPlanV2!]!"""If this is true, the user is an Apollo administrator who can ignore restrictions based purely on billing plan.""" canBypassPlanRestrictions:Boolean!diffSchemas(baseSchema:String!nextSchema:String!):[Change!]!"""Get the unsubscribe settings for a given email.""" emailPreferences(email:String!token:String!):EmailPreferences experimentalFeatures:GlobalExperimentalFeatures!"""Address of the Studio frontend.""" frontendUrlRoot:String!"""Access a graph by ID.""" graph(id:ID!):Service internalActiveCronJobs:[CronJob!]!internalAdminUsers:[InternalAdminUser!]internalUnresolvedCronExecutionFailures:[CronExecution!]!"""User or graph querying the API, null if not authenticated.""" me:Identity odysseyCertification(id:ID!):OdysseyCertification operationCollection(id:ID!):OperationCollectionResult!operationCollectionEntries(collectionEntryIds:[ID!]!):[OperationCollectionEntry!]!"""Access an organization by ID.""" organization(id:ID!):Account """Look up a plan by ID""" plan(id:ID):BillingPlan """A list of public variants that have been selected to be shown on our Graph Directory.""" publiclyListedVariants:[GraphVariant!]"""Service by ID""" service(id:ID!):Service """Query statistics across all services. For admins only; normal users must go through AccountsStatsWindow or ServiceStatsWindow.""" stats(from:Timestamp!"""Granularity of buckets. Defaults to the entire range (aggregate all data into a single durationBucket) when null.""" resolution:Resolution """Defaults to the current time when null.""" to:Timestamp):StatsWindow!"""Get the studio settings for the current user""" studioSettings:UserSettings """The plan started by AccountMutation.startTeamSubscription""" teamBillingPlan(billingPeriod:BillingPeriod!):BillingPlanV2!"""The plan started by AccountMutation.startTeamSubscription""" teamPlan(billingPeriod:BillingPeriod!):BillingPlan!"""Schema transformation for the Apollo platform API. Renames types. Internal to Apollo.""" transformSchemaForPlatformApi(baseSchema:GraphQLDocument!):GraphQLDocument """The plan started by AccountMutation.startTrial""" trialBillingPlan:BillingPlanV2!"""The plan started by AccountMutation.startTrial""" trialPlan:BillingPlan!"""User by ID""" user(id:ID!):User """Access a variant by reference of the form `graphID@variantName`, or `graphID` for the default `current` variant. -Returns null when the graph or variant do not exist, or when the graph cannot be accessed. -Note that we can return more types implementing Error in the future.""" variant(ref:ID!):GraphVariantLookup}"""query documents to validate against""" input QueryDocumentInput{document:String}type QueryPlan{json:String!object:Object!text:String!}"""Columns of QueryStats.""" enum QueryStatsColumn{ACCOUNT_ID CACHED_HISTOGRAM CACHED_REQUESTS_COUNT CACHE_TTL_HISTOGRAM CLIENT_NAME CLIENT_VERSION FORBIDDEN_OPERATION_COUNT FROM_ENGINEPROXY QUERY_ID QUERY_NAME REGISTERED_OPERATION_COUNT REQUESTS_WITH_ERRORS_COUNT SCHEMA_HASH SCHEMA_TAG SERVICE_ID TIMESTAMP UNCACHED_HISTOGRAM UNCACHED_REQUESTS_COUNT}type QueryStatsDimensions{accountId:ID clientName:String clientVersion:String fromEngineproxy:String queryId:ID queryName:String querySignature:String schemaHash:String schemaTag:String serviceId:ID}"""Filter for data in QueryStats. Fields with dimension names represent equality checks. All fields are implicitly ANDed together.""" input QueryStatsFilter{"""Selects rows whose accountId dimension equals the given value if not null. To query for the null value, use {in: {accountId: [null]}} instead.""" accountId:ID and:[QueryStatsFilter!]"""Selects rows whose clientName dimension equals the given value if not null. To query for the null value, use {in: {clientName: [null]}} instead.""" clientName:String """Selects rows whose clientVersion dimension equals the given value if not null. To query for the null value, use {in: {clientVersion: [null]}} instead.""" clientVersion:String """Selects rows whose fromEngineproxy dimension equals the given value if not null. To query for the null value, use {in: {fromEngineproxy: [null]}} instead.""" fromEngineproxy:String in:QueryStatsFilterIn not:QueryStatsFilter or:[QueryStatsFilter!]"""Selects rows whose queryId dimension equals the given value if not null. To query for the null value, use {in: {queryId: [null]}} instead.""" queryId:ID """Selects rows whose queryName dimension equals the given value if not null. To query for the null value, use {in: {queryName: [null]}} instead.""" queryName:String """Selects rows whose schemaHash dimension equals the given value if not null. To query for the null value, use {in: {schemaHash: [null]}} instead.""" schemaHash:String """Selects rows whose schemaTag dimension equals the given value if not null. To query for the null value, use {in: {schemaTag: [null]}} instead.""" schemaTag:String """Selects rows whose serviceId dimension equals the given value if not null. To query for the null value, use {in: {serviceId: [null]}} instead.""" serviceId:ID}"""Filter for data in QueryStats. Fields match if the corresponding dimension's value is in the given list. All fields are implicitly ANDed together.""" input QueryStatsFilterIn{"""Selects rows whose accountId dimension is in the given list. A null value in the list means a row with null for that dimension.""" accountId:[ID]"""Selects rows whose clientName dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientName:[String]"""Selects rows whose clientVersion dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientVersion:[String]"""Selects rows whose fromEngineproxy dimension is in the given list. A null value in the list means a row with null for that dimension.""" fromEngineproxy:[String]"""Selects rows whose queryId dimension is in the given list. A null value in the list means a row with null for that dimension.""" queryId:[ID]"""Selects rows whose queryName dimension is in the given list. A null value in the list means a row with null for that dimension.""" queryName:[String]"""Selects rows whose schemaHash dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaHash:[String]"""Selects rows whose schemaTag dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaTag:[String]"""Selects rows whose serviceId dimension is in the given list. A null value in the list means a row with null for that dimension.""" serviceId:[ID]}type QueryStatsMetrics{cacheTtlHistogram:DurationHistogram!cachedHistogram:DurationHistogram!cachedRequestsCount:Long!forbiddenOperationCount:Long!registeredOperationCount:Long!requestsWithErrorsCount:Long!totalLatencyHistogram:DurationHistogram!totalRequestCount:Long!uncachedHistogram:DurationHistogram!uncachedRequestsCount:Long!}input QueryStatsOrderBySpec{column:QueryStatsColumn!direction:Ordering!}type QueryStatsRecord{"""Dimensions of QueryStats that can be grouped by.""" groupBy:QueryStatsDimensions!"""Metrics of QueryStats that can be aggregated over.""" metrics:QueryStatsMetrics!"""Starting segment timestamp.""" timestamp:Timestamp!}"""Query Trigger""" type QueryTrigger implements ChannelSubscription{channels:[Channel!]!comparisonOperator:ComparisonOperator!enabled:Boolean!excludedOperationNames:[String!]!id:ID!metric:QueryTriggerMetric!operationNames:[String!]!percentile:Float scope:QueryTriggerScope!serviceId:String!state:QueryTriggerState!threshold:Float!variant:String window:QueryTriggerWindow!}"""Query trigger""" input QueryTriggerInput{channelIds:[String!]comparisonOperator:ComparisonOperator!enabled:Boolean excludedOperationNames:[String!]metric:QueryTriggerMetric!operationNames:[String!]percentile:Float scope:QueryTriggerScope threshold:Float!variant:String window:QueryTriggerWindow!}enum QueryTriggerMetric{"""Number of requests within the window that resulted in an error. Ignores `percentile`.""" ERROR_COUNT """Number of error requests divided by total number of requests. Ignores `percentile`.""" ERROR_PERCENTAGE """Number of requests within the window. Ignores `percentile`.""" REQUEST_COUNT """Request latency in ms. Requires `percentile`.""" REQUEST_SERVICE_TIME}enum QueryTriggerScope{ALL ANY UNRECOGNIZED}"""Query trigger state""" type QueryTriggerState{evaluatedAt:Timestamp!lastTriggeredAt:Timestamp operations:[QueryTriggerStateOperation!]!triggered:Boolean!}type QueryTriggerStateOperation{count:Long!operation:String!triggered:Boolean!value:Float!}enum QueryTriggerWindow{FIFTEEN_MINUTES FIVE_MINUTES ONE_MINUTE UNRECOGNIZED}"""The documentation for a graph variant, as display in Studio.""" type Readme{"""Content of the document.""" content:String!id:ID!"""Last time the document was updated.""" lastUpdatedAt:Timestamp!"""Identity of who updated the document last.""" lastUpdatedBy:Identity}type RegisterOperationsMutationResponse{invalidOperations:[InvalidOperation!]newOperations:[RegisteredOperation!]registrationSuccess:Boolean!}input RegisteredClientIdentityInput{identifier:String!name:String!version:String}type RegisteredOperation{signature:ID!}input RegisteredOperationInput{document:String metadata:RegisteredOperationMetadataInput signature:ID!}input RegisteredOperationMetadataInput{"""This will be used to link existing records in Engine to a new ID.""" engineSignature:String}type RegistryApiKey{keyName:String token:String!}type RegistryStatsWindow{schemaCheckStats:[AccountChecksStatsRecord!]!schemaPublishStats:[AccountPublishesStatsRecord!]!}type RegistrySubscription implements ChannelSubscription{channel:Channel channels:[Channel!]!@deprecated(reason:"Use channels list instead")createdAt:Timestamp!enabled:Boolean!id:ID!lastUpdatedAt:Timestamp!options:SubscriptionOptions!variant:String}type RelaunchComplete{latestLaunch:Launch!updated:Boolean!}type RelaunchError{message:String!}union RelaunchResult=RelaunchComplete|RelaunchError union RemoveOperationCollectionEntryResult=OperationCollection|PermissionError union RemoveOperationCollectionFromVariantResult=GraphVariant|NotFoundError|PermissionError|ValidationError union ReorderOperationCollectionResult=OperationCollection|PermissionError type ReportSchemaError implements ReportSchemaResult{code:ReportSchemaErrorCode!inSeconds:Int!message:String!withCoreSchema:Boolean!}enum ReportSchemaErrorCode{BOOT_ID_IS_NOT_VALID_UUID BOOT_ID_IS_REQUIRED CORE_SCHEMA_HASH_IS_NOT_SCHEMA_SHA256 CORE_SCHEMA_HASH_IS_REQUIRED CORE_SCHEMA_HASH_IS_TOO_LONG EXECUTABLE_SCHEMA_ID_IS_NOT_SCHEMA_SHA256 EXECUTABLE_SCHEMA_ID_IS_REQUIRED EXECUTABLE_SCHEMA_ID_IS_TOO_LONG GRAPH_REF_INVALID_FORMAT GRAPH_REF_IS_REQUIRED GRAPH_VARIANT_DOES_NOT_MATCH_REGEX GRAPH_VARIANT_IS_REQUIRED LIBRARY_VERSION_IS_TOO_LONG PLATFORM_IS_TOO_LONG RUNTIME_VERSION_IS_TOO_LONG SCHEMA_IS_NOT_PARSABLE SCHEMA_IS_NOT_VALID SERVER_ID_IS_TOO_LONG USER_VERSION_IS_TOO_LONG}type ReportSchemaResponse implements ReportSchemaResult{inSeconds:Int!withCoreSchema:Boolean!}interface ReportSchemaResult{inSeconds:Int!withCoreSchema:Boolean!}type ReportServerInfoError implements ReportServerInfoResult{code:ReportSchemaErrorCode!inSeconds:Int!message:String!withExecutableSchema:Boolean!}type ReportServerInfoResponse implements ReportServerInfoResult{inSeconds:Int!withExecutableSchema:Boolean!}interface ReportServerInfoResult{inSeconds:Int!withExecutableSchema:Boolean!}enum Resolution{R1D R1H R1M R5M R6H R15M}enum ResponseHints{NONE SAMPLE_RESPONSES SUBGRAPHS TIMINGS TRACE_TIMINGS}type RoleOverride{graph:Service!lastUpdatedAt:Timestamp!role:UserPermission!user:User!}input RoverArgumentInput{key:String!value:Object}"""SHA-256 hash, represented in lowercase hexadecimal""" scalar SHA256 type ScheduledSummary implements ChannelSubscription{channel:Channel@deprecated(reason:"Use channels list instead")channels:[Channel!]!enabled:Boolean!id:ID!timezone:String!variant:String!}"""A GraphQL schema document, which may optionally map back to context with which the schema was ingested.""" type Schema{createTemporaryURL(expiresInSeconds:Int!=86400):TemporaryURL """The timestamp of initial ingestion of a schema to a graph.""" createdAt:Timestamp!"""The raw GraphQL document for the schema in question""" document:GraphQLDocument!"""The number of fields; this includes user defined fields only, excluding built-in types and fields""" fieldCount:Int!gitContext:GitContext """The hex representation of the SHA256 of the GraphQL document.""" hash:ID!introspection:IntrospectionSchema!"""The number of types; this includes user defined types only, excluding built-in types""" typeCount:Int!}"""Represents an error from running schema composition on a list of subgraph definitions.""" type SchemaCompositionError{"""A machine-readable error code.""" code:String """Affected locations.""" locations:[SourceLocation]!"""A human-readable locations.""" message:String!}"""The difference between two schemas, as usually computed during a schema check.""" type SchemaDiff{"""Clients affected by all changes in the diff.""" affectedClients:[AffectedClient!]@deprecated(reason:"Unsupported.")"""Operations affected by all changes in the diff.""" affectedQueries:[AffectedQuery!]"""Numeric summary of all changes in the diff.""" changeSummary:ChangeSummary!"""List of schema changes with associated affected clients and operations.""" changes:[Change!]!"""Number of affected query operations that are neither marked as safe or ignored.""" numberOfAffectedOperations:Int!"""Number of operations that were validated during the check.""" numberOfCheckedOperations:Int """Indication of the success of the change; either failure, warning, or notice.""" severity:ChangeSeverity!"""The tag against which this diff was created""" tag:String type:ChangeType!@deprecated(reason:"use severity instead")"""Configuration of validation""" validationConfig:SchemaDiffValidationConfig}type SchemaDiffValidationConfig{"""Clients to ignore during validation.""" excludedClients:[ClientInfoFilterOutput!]"""Operation names to ignore during validation.""" excludedOperationNames:[OperationNameFilter]"""delta in seconds from current time that determines the start of the window -for reported metrics included in a schema diff. A day window from the present -day would have a `from` value of -86400. In rare cases, this could be an ISO -timestamp if the user passed one in on diff creation""" from:Timestamp """Operation IDs to ignore during validation.""" ignoredOperations:[ID!]"""Variants to include during validation.""" includedVariants:[String!]"""Minimum number of requests within the window for a query to be considered.""" queryCountThreshold:Int """Number of requests within the window for a query to be considered, relative to -total request count. Expected values are between 0 and 0.05 (minimum 5% of -total request volume)""" queryCountThresholdPercentage:Float """delta in seconds from current time that determines the end of the -window for reported metrics included in a schema diff. A day window -from the present day would have a `to` value of -0. In rare -cases, this could be an ISO timestamp if the user passed one in on diff -creation""" to:Timestamp}type SchemaPublishSubscription implements ChannelSubscription{channels:[Channel!]!createdAt:Timestamp!enabled:Boolean!id:ID!lastUpdatedAt:Timestamp!variant:String}input SchemaReport{"""A randomly generated UUID, immutable for the lifetime of the edge server runtime.""" bootId:String!"""The hex SHA256 hash of the schema being reported. Note that for a GraphQL server with a core schema, this should be the core schema, not the API schema.""" coreSchemaHash:String!"""The graph ref (eg, 'id@variant')""" graphRef:String!"""The version of the edge server reporting agent, e.g. apollo-server-2.8, graphql-java-3.1, etc. length must be <= 256 characters.""" libraryVersion:String """The infra environment in which this edge server is running, e.g. localhost, Kubernetes, AWS Lambda, Google CloudRun, AWS ECS, etc. length must be <= 256 characters.""" platform:String """The runtime in which the edge server is running, e.g. node 12.03, zulu8.46.0.19-ca-jdk8.0.252-macosx_x64, etc. length must be <= 256 characters.""" runtimeVersion:String """If available, an identifier for the edge server instance, such that when restarting this instance it will have the same serverId, with a different bootId. For example, in Kubernetes this might be the pod name. Length must be <= 256 characters.""" serverId:String """An identifier used to distinguish the version (from the user's perspective) of the edge server's code itself. For instance, the git sha of the server's repository or the docker sha of the associated image this server runs with. Length must be <= 256 characters.""" userVersion:String}"""A specific publication of a graph variant.""" type SchemaTag{"""The result of composition, including either a supergraph schema or errors, -executed during this publication. Only available with managed federation.""" compositionResult:CompositionResult createdAt:Timestamp!"""Differences with the schema from the previous successful publication.""" diffToPrevious:SchemaDiff gitContext:GitContext """List of previously uploaded SchemaTags under the same tag name, starting with -the selected published schema record. Sorted in reverse chronological order -by creation date (newest publish first). +the hash does not need to be and will be computed server-side. +""" +input PartialSchemaInput { + """ + Contents of the partial schema in SDL syntax, but may reference types + that aren't defined in this document + """ + sdl: String + """ + Hash of the partial schema to associate; error is thrown if only the hash is + specified and the hash has not been seen before + """ + hash: String +} -Note: This does not include the history of checked schemas""" history(includeUnchanged:Boolean!=true limit:Int!=3 offset:Int=0):[SchemaTag!]!"""Number of tagged schemas created under the same tag name. -Also represents the maximum size of the history's limit argument.""" historyLength:Int!"""Number of schemas tagged prior to this one under the same tag name, its position -in the tag history.""" historyOrder:Int!"""The identifier for this specific publication.""" id:ID!"""Time of publication.""" publishedAt:Timestamp!"""The Identity that published this schema and their client info, or null if this isn't -a publish. Sub-fields may be null if they weren't recorded.""" publishedBy:IdentityAndClientInfo """Indicates the schemaTag of the schema's original upload, null if this is the -first upload of the schema.""" reversionFrom:SchemaTag """The published schema.""" schema:Schema!slackNotificationBody(graphDisplayName:String!):String tag:String!@deprecated(reason:"Please use variant { name } instead")"""The graph variant this belongs to.""" variant:GraphVariant!webhookNotificationBody:String!}"""How many seats of the given types does an organization have (regardless of plan type)?""" type Seats{"""How many members that are free in this organization.""" free:Int!"""How many members that are not free in this organization.""" fullPrice:Int!}type SemanticChange{"""Target arg of change made.""" argNode:NamedIntrospectionArg """Node related to the top level node that was changed, such as a field in an object, -a value in an enum or the object of an interface""" childNode:NamedIntrospectionValue """Semantic metadata about the type of change""" definition:ChangeDefinition!"""Top level node affected by the change""" parentNode:NamedIntrospectionType}"""A graph in Apollo Studio represents a graph in your organization. +"""An error that occurs when the current user doesn't have sufficient permissions to perform an action.""" +type PermissionError implements Error { + """The error message.""" + message: String! +} + +"""An error related to an organization's Apollo Studio plan.""" +type PlanError { + """The error message.""" + message: String! +} + +type Query { + """Returns the root URL of the Apollo Studio frontend.""" + frontendUrlRoot: String! + """Returns details of the graph with the provided ID.""" + graph(id: ID!): Graph + """Returns details of the authenticated `User` or `Graph` executing this query. If this is an unauthenticated query (i.e., no API key is provided), this field returns null.""" + me: Identity + """Returns details of the Studio organization with the provided ID.""" + organization(id: ID!): Organization + """Returns details of the Apollo user with the provided ID.""" + user(id: ID!): User + """Returns details of a Studio graph variant with the provided graph ref. A graph ref has the format `graphID@variantName` (or just `graphID` for the default variant `current`). Returns null if the graph or variant doesn't exist, or if the graph isn't accessible by the current actor.""" + variant(ref: ID!): GraphVariantLookup + """Returns the [operation collection](https://www.apollographql.com/docs/studio/explorer/operation-collections/) for the provided ID.""" + operationCollection(id: ID!): OperationCollectionResult! +} + +"""The README documentation for a graph variant, which is displayed in Studio.""" +type Readme { + """The contents of the README in plaintext.""" + content: String! + """The README's unique ID. `a15177c0-b003-4837-952a-dbfe76062eb1` for the default README""" + id: ID! + """The actor that most recently updated the README (usually a `User`). `null` for the default README, or if the `User` was deleted.""" + lastUpdatedBy: Identity + """The timestamp when the README was most recently updated. `null` for the default README""" + lastUpdatedTime: Timestamp +} + +union RemoveOperationCollectionEntryResult = OperationCollection | PermissionError + +type Router { + """ + Last time when the Cloud Router was updated + + If the Cloud Router was never updated, this value will be null + """ + updatedAt: NaiveDateTime + """Current status of the Cloud Router""" + status: RouterStatus! + """Current version of the Cloud Router""" + routerVersion: RouterVersion! + """ + URL where the Cloud Router can be found + + This will be null if the Cloud Router is in a deleted status + """ + routerUrl: String + """Retrieves a specific Order related to this Cloud Router""" + order(orderId: ID!): Order + """Retrieves all Orders related to this Cloud Router""" + orders(first: Int, offset: Int): [Order!]! + """Return the list of secrets for this Cloud Router with their hash values""" + secrets: [Secret!]! +} + +enum RouterStatus { + CREATING + UPDATING + DELETING + ROLLING_BACK + RUNNING + DELETED +} + +type RouterVersion { + version: String! + core: String! + build: String! + status: Status! + configVersion: String! + configSchema: String! +} + +"""A GraphQL schema document and associated metadata.""" +type Schema { + """The GraphQL schema document's SHA256 hash, represented as a hexadecimal string.""" + hash: ID! + """The GraphQL schema document.""" + document: GraphQLDocument! +} + +"""An error that occurred while running schema composition on a set of subgraph schemas.""" +type SchemaCompositionError { + """A human-readable message describing the error.""" + message: String! + """Source locations related to the error.""" + locations: [SourceLocation]! + """A machine-readable error code.""" + code: String +} + +"""The result of computing the difference between two schemas, usually as part of schema checks.""" +type SchemaDiff { + """Indicates the overall safety of the changes included in the diff, based on operation history (e.g., `FAILURE` or `NOTICE`).""" + severity: ChangeSeverity! + """A list of all schema changes in the diff, including their severity.""" + changes: [Change!]! + """Numeric summaries for each type of change in the diff.""" + changeSummary: ChangeSummary! + """Operations affected by all changes in the diff.""" + affectedQueries: [AffectedQuery!] + """The number of GraphQL operations that were validated during the check.""" + numberOfCheckedOperations: Int + """The number of GraphQL operations affected by the diff's changes that are neither marked as safe nor ignored.""" + numberOfAffectedOperations: Int! +} + +"""Contains details for an individual publication of an individual graph variant.""" +type SchemaPublication { + """ + The variant that was published to." + """ + variant: GraphVariant! + """The schema that was published to the variant.""" + schema: Schema! + """The result of federated composition executed for this publication. This result includes either a supergraph schema or error details, depending on whether composition succeeded. This value is null when the publication is for a non-federated graph.""" + compositionResult: CompositionResult + """The timestamp when the variant was published to.""" + publishedAt: Timestamp! + """A schema diff comparing against the schema from the most recent previous successful publication.""" + diffToPrevious: SchemaDiff +} + +type Secret { + createdAt: DateTime! + name: String! + hash: String! +} + +type SemanticChange { + """Semantic metadata about the type of change""" + definition: ChangeDefinition! + """Top level node affected by the change""" + parentNode: NamedIntrospectionType + """ + Node related to the top level node that was changed, such as a field in an object, + a value in an enum or the object of an interface + """ + childNode: NamedIntrospectionValue + """Target arg of change made.""" + argNode: NamedIntrospectionArg +} + +""" +A graph in Apollo Studio represents a graph in your organization. Each graph has one or more variants, which correspond to the different environments where that graph runs (such as staging and production). -Each variant has its own GraphQL schema, which means schemas can differ between environments.""" type Service implements Identity{"""Organization that this graph belongs to.""" account:Account accountId:ID apiKeys:[GraphApiKey!]"""A view of the identity as an Actor type.""" asActor:Actor!"""Get an URL to which an avatar image can be uploaded. Client uploads by sending a PUT request -with the image data to MediaUploadInfo.url. Client SHOULD set the "Content-Type" header to the -browser-inferred MIME type, and SHOULD set the "x-apollo-content-filename" header to the -filename, if such information is available. Client MUST set the "x-apollo-csrf-token" header to -MediaUploadInfo.csrfToken.""" avatarUpload:AvatarUploadResult """Get an image URL for the service's avatar. Note that CORS is not enabled for these URLs. The size -argument is used for bandwidth reduction, and should be the size of the image as displayed in the -application. Apollo's media server will downscale larger images to at least the requested size, -but this will not happen for third-party media servers.""" avatarUrl(size:Int!=40):String """Get available notification endpoints""" channels(channelIds:[ID!]):[Channel!]"""Get check configuration for this graph.""" checkConfiguration:CheckConfiguration """Get a check workflow for this graph by its ID""" checkWorkflow(id:ID!):CheckWorkflow """Get check workflows for this graph ordered by creation time, most recent first.""" checkWorkflows(filter:CheckFilterInput limit:Int!=100):[CheckWorkflow!]!"""List of options available for filtering checks for this graph by author. -If a filter is passed, constrains results to match the filter.""" checksAuthorOptions(filter:CheckFilterInput):[String!]!"""List of options available for filtering checks for this graph by branch. -If a filter is passed, constrains results to match the filter.""" checksBranchOptions(filter:CheckFilterInput):[String!]!"""List of options available for filtering checks for this graph by subgraph name. -If a filter is passed, constrains results to match the filter.""" checksSubgraphOptions(filter:CheckFilterInput):[String!]!"""Given a graphCompositionID, return the results of composition. This can represent either a validation or a publish.""" compositionResultById(id:ID!):CompositionResult createdAt:Timestamp!createdBy:Identity datadogMetricsConfig:DatadogMetricsConfig defaultBuildPipelineTrack:String deletedAt:Timestamp description:String devGraphOwner:User """Get a GraphQL document by hash""" document(hash:SHA256):GraphQLDocument """When this is true, this graph will be hidden from non-admin members of the org who haven't been explicitly assigned a -role on this graph.""" hiddenFromUninvitedNonAdminAccountMembers:Boolean!"""Globally unique identifier for this graph.""" id:ID!"""List of subgraphs that comprise a graph. A non-federated graph should have a single implementing service. -Set includeDeleted to see deleted subgraphs.""" implementingServices(graphVariant:String!includeDeleted:Boolean):GraphImplementors lastReportedAt(graphVariant:String):Timestamp """Current identity, null if not authenticated.""" me:Identity """The composition result that was most recently published to a graph variant.""" mostRecentCompositionPublish(graphVariant:String!):CompositionPublishResult """Permissions of the current user in this graph.""" myRole:UserPermission """Name of this graph. Note that this field is deprecated.""" name:String!@deprecated(reason:"Use Service.title")onboardingArchitecture:OnboardingArchitecture operation(id:ID!):Operation """Gets the operations and their approved changes for this graph, checkID, and operationID.""" operationsAcceptedChanges(checkID:ID!operationID:String!):[OperationAcceptedChange!]!"""Get an operations check result for a specific check ID""" operationsCheck(checkID:ID!):OperationsCheckResult """Get query triggers for a given variant. If variant is null all the triggers for this service will be gotten.""" queryTriggers(graphVariant:String operationNames:[String!]):[QueryTrigger!]readme:Readme """Registry specific stats for this graph.""" registryStatsWindow(from:Timestamp!resolution:Resolution to:Timestamp):RegistryStatsWindow """Whether registry subscriptions (with any options) are enabled. If variant is not passed, returns true if configuration is present for any variant""" registrySubscriptionsEnabled(graphVariant:String):Boolean!@deprecated(reason:"This field will be removed")reportingEnabled:Boolean!"""The list of members that can access this graph, accounting for graph role overrides""" roleOverrides:[RoleOverride!]"""Which permissions the current user has for interacting with this graph""" roles:ServiceRoles scheduledSummaries:[ScheduledSummary!]!"""Get a schema by hash or current tag""" schema(hash:ID tag:String):Schema """The current publish associated to a given variant (with 'tag' as the variant name).""" schemaTag(tag:String!):SchemaTag schemaTagById(id:ID!):SchemaTag """Get schema tags, with optional filtering to a set of tags. Always sorted by creation -date in reverse chronological order.""" schemaTags(tags:[String!]):[SchemaTag!]stats(from:Timestamp!"""Granularity of buckets. Defaults to the entire range (aggregate all data into a single durationBucket) when null.""" resolution:Resolution """Defaults to the current time when null.""" to:Timestamp):ServiceStatsWindow!@deprecated(reason:"use Service.statsWindow instead")statsWindow(from:Timestamp!"""Granularity of buckets. Defaults to the entire range (aggregate all data into a single durationBucket) when null.""" resolution:Resolution """Defaults to the current time when null.""" to:Timestamp):ServiceStatsWindow """Generate a test schema publish notification body""" testSchemaPublishBody(variant:String!):String!"""Name of this graph.""" title:String!trace(id:ID!):Trace traceStorageEnabled:Boolean!"""A particular variant often representing a live traffic environment (such as "dev", "staging", or "prod"). -Each variant can represent a specific URL or destination to query at, analytics, and its own schema history. -Pass in a name to get a specific variant. Use `Graph.variants` to get a list of variants.""" variant(name:String!):GraphVariant """The list of variants that exist for this graph""" variants:[GraphVariant!]!}"""Columns of ServiceBillingUsageStats.""" enum ServiceBillingUsageStatsColumn{OPERATION_COUNT OPERATION_COUNT_PROVIDED_EXPLICITLY SCHEMA_TAG TIMESTAMP}type ServiceBillingUsageStatsDimensions{operationCountProvidedExplicitly:String schemaTag:String}"""Filter for data in ServiceBillingUsageStats. Fields with dimension names represent equality checks. All fields are implicitly ANDed together.""" input ServiceBillingUsageStatsFilter{and:[ServiceBillingUsageStatsFilter!]in:ServiceBillingUsageStatsFilterIn not:ServiceBillingUsageStatsFilter """Selects rows whose operationCountProvidedExplicitly dimension equals the given value if not null. To query for the null value, use {in: {operationCountProvidedExplicitly: [null]}} instead.""" operationCountProvidedExplicitly:String or:[ServiceBillingUsageStatsFilter!]"""Selects rows whose schemaTag dimension equals the given value if not null. To query for the null value, use {in: {schemaTag: [null]}} instead.""" schemaTag:String}"""Filter for data in ServiceBillingUsageStats. Fields match if the corresponding dimension's value is in the given list. All fields are implicitly ANDed together.""" input ServiceBillingUsageStatsFilterIn{"""Selects rows whose operationCountProvidedExplicitly dimension is in the given list. A null value in the list means a row with null for that dimension.""" operationCountProvidedExplicitly:[String]"""Selects rows whose schemaTag dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaTag:[String]}type ServiceBillingUsageStatsMetrics{operationCount:Long!}input ServiceBillingUsageStatsOrderBySpec{column:ServiceBillingUsageStatsColumn!direction:Ordering!}type ServiceBillingUsageStatsRecord{"""Dimensions of ServiceBillingUsageStats that can be grouped by.""" groupBy:ServiceBillingUsageStatsDimensions!"""Metrics of ServiceBillingUsageStats that can be aggregated over.""" metrics:ServiceBillingUsageStatsMetrics!"""Starting segment timestamp.""" timestamp:Timestamp!}"""Columns of ServiceEdgeServerInfos.""" enum ServiceEdgeServerInfosColumn{BOOT_ID EXECUTABLE_SCHEMA_ID LIBRARY_VERSION PLATFORM RUNTIME_VERSION SCHEMA_TAG SERVER_ID TIMESTAMP USER_VERSION}type ServiceEdgeServerInfosDimensions{bootId:ID executableSchemaId:ID libraryVersion:String platform:String runtimeVersion:String schemaTag:String serverId:ID userVersion:String}"""Filter for data in ServiceEdgeServerInfos. Fields with dimension names represent equality checks. All fields are implicitly ANDed together.""" input ServiceEdgeServerInfosFilter{and:[ServiceEdgeServerInfosFilter!]"""Selects rows whose bootId dimension equals the given value if not null. To query for the null value, use {in: {bootId: [null]}} instead.""" bootId:ID """Selects rows whose executableSchemaId dimension equals the given value if not null. To query for the null value, use {in: {executableSchemaId: [null]}} instead.""" executableSchemaId:ID in:ServiceEdgeServerInfosFilterIn """Selects rows whose libraryVersion dimension equals the given value if not null. To query for the null value, use {in: {libraryVersion: [null]}} instead.""" libraryVersion:String not:ServiceEdgeServerInfosFilter or:[ServiceEdgeServerInfosFilter!]"""Selects rows whose platform dimension equals the given value if not null. To query for the null value, use {in: {platform: [null]}} instead.""" platform:String """Selects rows whose runtimeVersion dimension equals the given value if not null. To query for the null value, use {in: {runtimeVersion: [null]}} instead.""" runtimeVersion:String """Selects rows whose schemaTag dimension equals the given value if not null. To query for the null value, use {in: {schemaTag: [null]}} instead.""" schemaTag:String """Selects rows whose serverId dimension equals the given value if not null. To query for the null value, use {in: {serverId: [null]}} instead.""" serverId:ID """Selects rows whose userVersion dimension equals the given value if not null. To query for the null value, use {in: {userVersion: [null]}} instead.""" userVersion:String}"""Filter for data in ServiceEdgeServerInfos. Fields match if the corresponding dimension's value is in the given list. All fields are implicitly ANDed together.""" input ServiceEdgeServerInfosFilterIn{"""Selects rows whose bootId dimension is in the given list. A null value in the list means a row with null for that dimension.""" bootId:[ID]"""Selects rows whose executableSchemaId dimension is in the given list. A null value in the list means a row with null for that dimension.""" executableSchemaId:[ID]"""Selects rows whose libraryVersion dimension is in the given list. A null value in the list means a row with null for that dimension.""" libraryVersion:[String]"""Selects rows whose platform dimension is in the given list. A null value in the list means a row with null for that dimension.""" platform:[String]"""Selects rows whose runtimeVersion dimension is in the given list. A null value in the list means a row with null for that dimension.""" runtimeVersion:[String]"""Selects rows whose schemaTag dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaTag:[String]"""Selects rows whose serverId dimension is in the given list. A null value in the list means a row with null for that dimension.""" serverId:[ID]"""Selects rows whose userVersion dimension is in the given list. A null value in the list means a row with null for that dimension.""" userVersion:[String]}input ServiceEdgeServerInfosOrderBySpec{column:ServiceEdgeServerInfosColumn!direction:Ordering!}type ServiceEdgeServerInfosRecord{"""Dimensions of ServiceEdgeServerInfos that can be grouped by.""" groupBy:ServiceEdgeServerInfosDimensions!"""Starting segment timestamp.""" timestamp:Timestamp!}"""Columns of ServiceErrorStats.""" enum ServiceErrorStatsColumn{CLIENT_NAME CLIENT_VERSION ERRORS_COUNT PATH QUERY_ID QUERY_NAME REQUESTS_WITH_ERRORS_COUNT SCHEMA_HASH SCHEMA_TAG TIMESTAMP}type ServiceErrorStatsDimensions{clientName:String clientVersion:String path:String queryId:ID queryName:String schemaHash:String schemaTag:String}"""Filter for data in ServiceErrorStats. Fields with dimension names represent equality checks. All fields are implicitly ANDed together.""" input ServiceErrorStatsFilter{and:[ServiceErrorStatsFilter!]"""Selects rows whose clientName dimension equals the given value if not null. To query for the null value, use {in: {clientName: [null]}} instead.""" clientName:String """Selects rows whose clientVersion dimension equals the given value if not null. To query for the null value, use {in: {clientVersion: [null]}} instead.""" clientVersion:String in:ServiceErrorStatsFilterIn not:ServiceErrorStatsFilter or:[ServiceErrorStatsFilter!]"""Selects rows whose path dimension equals the given value if not null. To query for the null value, use {in: {path: [null]}} instead.""" path:String """Selects rows whose queryId dimension equals the given value if not null. To query for the null value, use {in: {queryId: [null]}} instead.""" queryId:ID """Selects rows whose queryName dimension equals the given value if not null. To query for the null value, use {in: {queryName: [null]}} instead.""" queryName:String """Selects rows whose schemaHash dimension equals the given value if not null. To query for the null value, use {in: {schemaHash: [null]}} instead.""" schemaHash:String """Selects rows whose schemaTag dimension equals the given value if not null. To query for the null value, use {in: {schemaTag: [null]}} instead.""" schemaTag:String}"""Filter for data in ServiceErrorStats. Fields match if the corresponding dimension's value is in the given list. All fields are implicitly ANDed together.""" input ServiceErrorStatsFilterIn{"""Selects rows whose clientName dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientName:[String]"""Selects rows whose clientVersion dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientVersion:[String]"""Selects rows whose path dimension is in the given list. A null value in the list means a row with null for that dimension.""" path:[String]"""Selects rows whose queryId dimension is in the given list. A null value in the list means a row with null for that dimension.""" queryId:[ID]"""Selects rows whose queryName dimension is in the given list. A null value in the list means a row with null for that dimension.""" queryName:[String]"""Selects rows whose schemaHash dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaHash:[String]"""Selects rows whose schemaTag dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaTag:[String]}type ServiceErrorStatsMetrics{errorsCount:Long!requestsWithErrorsCount:Long!}input ServiceErrorStatsOrderBySpec{column:ServiceErrorStatsColumn!direction:Ordering!}type ServiceErrorStatsRecord{"""Dimensions of ServiceErrorStats that can be grouped by.""" groupBy:ServiceErrorStatsDimensions!"""Metrics of ServiceErrorStats that can be aggregated over.""" metrics:ServiceErrorStatsMetrics!"""Starting segment timestamp.""" timestamp:Timestamp!}"""Columns of ServiceFieldExecutions.""" enum ServiceFieldExecutionsColumn{ESTIMATED_EXECUTION_COUNT FIELD_NAME OBSERVED_EXECUTION_COUNT PARENT_TYPE REFERENCING_OPERATION_COUNT SCHEMA_TAG TIMESTAMP}type ServiceFieldExecutionsDimensions{fieldName:String parentType:String schemaTag:String}"""Filter for data in ServiceFieldExecutions. Fields with dimension names represent equality checks. All fields are implicitly ANDed together.""" input ServiceFieldExecutionsFilter{and:[ServiceFieldExecutionsFilter!]"""Selects rows whose fieldName dimension equals the given value if not null. To query for the null value, use {in: {fieldName: [null]}} instead.""" fieldName:String in:ServiceFieldExecutionsFilterIn not:ServiceFieldExecutionsFilter or:[ServiceFieldExecutionsFilter!]"""Selects rows whose parentType dimension equals the given value if not null. To query for the null value, use {in: {parentType: [null]}} instead.""" parentType:String """Selects rows whose schemaTag dimension equals the given value if not null. To query for the null value, use {in: {schemaTag: [null]}} instead.""" schemaTag:String}"""Filter for data in ServiceFieldExecutions. Fields match if the corresponding dimension's value is in the given list. All fields are implicitly ANDed together.""" input ServiceFieldExecutionsFilterIn{"""Selects rows whose fieldName dimension is in the given list. A null value in the list means a row with null for that dimension.""" fieldName:[String]"""Selects rows whose parentType dimension is in the given list. A null value in the list means a row with null for that dimension.""" parentType:[String]"""Selects rows whose schemaTag dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaTag:[String]}type ServiceFieldExecutionsMetrics{estimatedExecutionCount:Long!observedExecutionCount:Long!referencingOperationCount:Long!}input ServiceFieldExecutionsOrderBySpec{column:ServiceFieldExecutionsColumn!direction:Ordering!}type ServiceFieldExecutionsRecord{"""Dimensions of ServiceFieldExecutions that can be grouped by.""" groupBy:ServiceFieldExecutionsDimensions!"""Metrics of ServiceFieldExecutions that can be aggregated over.""" metrics:ServiceFieldExecutionsMetrics!"""Starting segment timestamp.""" timestamp:Timestamp!}"""Columns of ServiceFieldLatencies.""" enum ServiceFieldLatenciesColumn{FIELD_HISTOGRAM FIELD_NAME PARENT_TYPE SCHEMA_HASH SCHEMA_TAG TIMESTAMP}type ServiceFieldLatenciesDimensions{field:String fieldName:String parentType:String schemaHash:String schemaTag:String}"""Filter for data in ServiceFieldLatencies. Fields with dimension names represent equality checks. All fields are implicitly ANDed together.""" input ServiceFieldLatenciesFilter{and:[ServiceFieldLatenciesFilter!]"""Selects rows whose fieldName dimension equals the given value if not null. To query for the null value, use {in: {fieldName: [null]}} instead.""" fieldName:String in:ServiceFieldLatenciesFilterIn not:ServiceFieldLatenciesFilter or:[ServiceFieldLatenciesFilter!]"""Selects rows whose parentType dimension equals the given value if not null. To query for the null value, use {in: {parentType: [null]}} instead.""" parentType:String """Selects rows whose schemaHash dimension equals the given value if not null. To query for the null value, use {in: {schemaHash: [null]}} instead.""" schemaHash:String """Selects rows whose schemaTag dimension equals the given value if not null. To query for the null value, use {in: {schemaTag: [null]}} instead.""" schemaTag:String}"""Filter for data in ServiceFieldLatencies. Fields match if the corresponding dimension's value is in the given list. All fields are implicitly ANDed together.""" input ServiceFieldLatenciesFilterIn{"""Selects rows whose fieldName dimension is in the given list. A null value in the list means a row with null for that dimension.""" fieldName:[String]"""Selects rows whose parentType dimension is in the given list. A null value in the list means a row with null for that dimension.""" parentType:[String]"""Selects rows whose schemaHash dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaHash:[String]"""Selects rows whose schemaTag dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaTag:[String]}type ServiceFieldLatenciesMetrics{fieldHistogram:DurationHistogram!}input ServiceFieldLatenciesOrderBySpec{column:ServiceFieldLatenciesColumn!direction:Ordering!}type ServiceFieldLatenciesRecord{"""Dimensions of ServiceFieldLatencies that can be grouped by.""" groupBy:ServiceFieldLatenciesDimensions!"""Metrics of ServiceFieldLatencies that can be aggregated over.""" metrics:ServiceFieldLatenciesMetrics!"""Starting segment timestamp.""" timestamp:Timestamp!}"""Columns of ServiceFieldRequestsByClientVersion.""" enum ServiceFieldRequestsByClientVersionColumn{CLIENT_NAME CLIENT_VERSION ESTIMATED_EXECUTION_COUNT FIELD_NAME OBSERVED_EXECUTION_COUNT PARENT_TYPE REFERENCING_OPERATION_COUNT SCHEMA_TAG TIMESTAMP}type ServiceFieldRequestsByClientVersionDimensions{clientName:String clientVersion:String fieldName:String parentType:String schemaTag:String}"""Filter for data in ServiceFieldRequestsByClientVersion. Fields with dimension names represent equality checks. All fields are implicitly ANDed together.""" input ServiceFieldRequestsByClientVersionFilter{and:[ServiceFieldRequestsByClientVersionFilter!]"""Selects rows whose clientName dimension equals the given value if not null. To query for the null value, use {in: {clientName: [null]}} instead.""" clientName:String """Selects rows whose clientVersion dimension equals the given value if not null. To query for the null value, use {in: {clientVersion: [null]}} instead.""" clientVersion:String """Selects rows whose fieldName dimension equals the given value if not null. To query for the null value, use {in: {fieldName: [null]}} instead.""" fieldName:String in:ServiceFieldRequestsByClientVersionFilterIn not:ServiceFieldRequestsByClientVersionFilter or:[ServiceFieldRequestsByClientVersionFilter!]"""Selects rows whose parentType dimension equals the given value if not null. To query for the null value, use {in: {parentType: [null]}} instead.""" parentType:String """Selects rows whose schemaTag dimension equals the given value if not null. To query for the null value, use {in: {schemaTag: [null]}} instead.""" schemaTag:String}"""Filter for data in ServiceFieldRequestsByClientVersion. Fields match if the corresponding dimension's value is in the given list. All fields are implicitly ANDed together.""" input ServiceFieldRequestsByClientVersionFilterIn{"""Selects rows whose clientName dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientName:[String]"""Selects rows whose clientVersion dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientVersion:[String]"""Selects rows whose fieldName dimension is in the given list. A null value in the list means a row with null for that dimension.""" fieldName:[String]"""Selects rows whose parentType dimension is in the given list. A null value in the list means a row with null for that dimension.""" parentType:[String]"""Selects rows whose schemaTag dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaTag:[String]}type ServiceFieldRequestsByClientVersionMetrics{estimatedExecutionCount:Long!observedExecutionCount:Long!referencingOperationCount:Long!}input ServiceFieldRequestsByClientVersionOrderBySpec{column:ServiceFieldRequestsByClientVersionColumn!direction:Ordering!}type ServiceFieldRequestsByClientVersionRecord{"""Dimensions of ServiceFieldRequestsByClientVersion that can be grouped by.""" groupBy:ServiceFieldRequestsByClientVersionDimensions!"""Metrics of ServiceFieldRequestsByClientVersion that can be aggregated over.""" metrics:ServiceFieldRequestsByClientVersionMetrics!"""Starting segment timestamp.""" timestamp:Timestamp!}"""Columns of ServiceFieldUsage.""" enum ServiceFieldUsageColumn{CLIENT_NAME CLIENT_VERSION ESTIMATED_EXECUTION_COUNT EXECUTION_COUNT FIELD_NAME PARENT_TYPE QUERY_ID QUERY_NAME REFERENCING_OPERATION_COUNT SCHEMA_HASH SCHEMA_TAG TIMESTAMP}type ServiceFieldUsageDimensions{clientName:String clientVersion:String fieldName:String parentType:String queryId:ID queryName:String schemaHash:String schemaTag:String}"""Filter for data in ServiceFieldUsage. Fields with dimension names represent equality checks. All fields are implicitly ANDed together.""" input ServiceFieldUsageFilter{and:[ServiceFieldUsageFilter!]"""Selects rows whose clientName dimension equals the given value if not null. To query for the null value, use {in: {clientName: [null]}} instead.""" clientName:String """Selects rows whose clientVersion dimension equals the given value if not null. To query for the null value, use {in: {clientVersion: [null]}} instead.""" clientVersion:String """Selects rows whose fieldName dimension equals the given value if not null. To query for the null value, use {in: {fieldName: [null]}} instead.""" fieldName:String in:ServiceFieldUsageFilterIn not:ServiceFieldUsageFilter or:[ServiceFieldUsageFilter!]"""Selects rows whose parentType dimension equals the given value if not null. To query for the null value, use {in: {parentType: [null]}} instead.""" parentType:String """Selects rows whose queryId dimension equals the given value if not null. To query for the null value, use {in: {queryId: [null]}} instead.""" queryId:ID """Selects rows whose queryName dimension equals the given value if not null. To query for the null value, use {in: {queryName: [null]}} instead.""" queryName:String """Selects rows whose schemaHash dimension equals the given value if not null. To query for the null value, use {in: {schemaHash: [null]}} instead.""" schemaHash:String """Selects rows whose schemaTag dimension equals the given value if not null. To query for the null value, use {in: {schemaTag: [null]}} instead.""" schemaTag:String}"""Filter for data in ServiceFieldUsage. Fields match if the corresponding dimension's value is in the given list. All fields are implicitly ANDed together.""" input ServiceFieldUsageFilterIn{"""Selects rows whose clientName dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientName:[String]"""Selects rows whose clientVersion dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientVersion:[String]"""Selects rows whose fieldName dimension is in the given list. A null value in the list means a row with null for that dimension.""" fieldName:[String]"""Selects rows whose parentType dimension is in the given list. A null value in the list means a row with null for that dimension.""" parentType:[String]"""Selects rows whose queryId dimension is in the given list. A null value in the list means a row with null for that dimension.""" queryId:[ID]"""Selects rows whose queryName dimension is in the given list. A null value in the list means a row with null for that dimension.""" queryName:[String]"""Selects rows whose schemaHash dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaHash:[String]"""Selects rows whose schemaTag dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaTag:[String]}type ServiceFieldUsageMetrics{estimatedExecutionCount:Long!executionCount:Long!referencingOperationCount:Long!}input ServiceFieldUsageOrderBySpec{column:ServiceFieldUsageColumn!direction:Ordering!}type ServiceFieldUsageRecord{"""Dimensions of ServiceFieldUsage that can be grouped by.""" groupBy:ServiceFieldUsageDimensions!"""Metrics of ServiceFieldUsage that can be aggregated over.""" metrics:ServiceFieldUsageMetrics!"""Starting segment timestamp.""" timestamp:Timestamp!}"""Mutations to a graph.""" type ServiceMutation{"""Check a proposed subgraph schema change. -If the proposal composes successfully, perform a usage check for the resulting schema.""" checkPartialSchema("""Deprecated and ignored.""" frontend:String gitContext:GitContextInput """Specifies which variant of a graph this mutation operates on.""" graphVariant:String!historicParameters:HistoricQueryParameters """Name of the implementing service to validate the partial schema against""" implementingServiceName:String!"""If this check is triggered for an sdl fetched using introspection, this is the endpoint where that schema was being served.""" introspectionEndpoint:String isSandboxCheck:Boolean!=false """The partial schema to validate against an implementing service""" partialSchema:PartialSchemaInput!"""Whether to use the maximum retention for historical validation. This only takes -effect if historicParameters is null.""" useMaximumRetention:Boolean):CheckPartialSchemaResult!"""Checks a proposed schema against the schema that has been published to -a particular variant, using metrics corresponding to `historicParameters`. -Callers can set `historicParameters` directly or rely on defaults set in the -graph's check configuration (7 days by default). -If they do not set `historicParameters` but set `useMaximumRetention`, -validation will use the maximum retention the graph has access to.""" checkSchema(baseSchemaTag:String="current" """Deprecated and ignored.""" frontend:String gitContext:GitContextInput historicParameters:HistoricQueryParameters """If this check is triggered for an sdl fetched using introspection, this is the endpoint where that schema was being served.""" introspectionEndpoint:String isSandboxCheck:Boolean!=false """Only one of proposedSchema, proposedSchemaDocument, and proposedSchemaHash -may be specified""" proposedSchema:IntrospectionSchemaInput proposedSchemaDocument:String proposedSchemaHash:String useMaximumRetention:Boolean):CheckSchemaResult!"""Make changes to a check workflow.""" checkWorkflow(id:ID!):CheckWorkflowMutation createCompositionStatusSubscription("""ID of Slack channel for registry notification.""" channelID:ID!"""Variant to notify on.""" variant:String!):SchemaPublishSubscription!createSchemaPublishSubscription("""ID of Slack channel for registry notification.""" channelID:ID!"""Variant to notify on.""" variant:String!):SchemaPublishSubscription!"""Soft delete a graph. Data associated with the graph is not permanently deleted; Apollo support can undo.""" delete:Void """Delete the service's avatar. Requires Service.roles.canUpdateAvatar to be true.""" deleteAvatar:AvatarDeleteError """Delete an existing channel""" deleteChannel(id:ID!):Boolean!"""Delete an existing query trigger""" deleteQueryTrigger(id:ID!):Boolean!"""Deletes this service's current subscriptions specific to the ID, returns true if it existed""" deleteRegistrySubscription(id:ID!):Boolean!"""Deletes this service's current registry subscription(s) specific to its graph variant, -returns a list of subscription IDs that were deleted.""" deleteRegistrySubscriptions(variant:String!):[ID!]!deleteScheduledSummary(id:ID!):Boolean!"""Delete a variant by name.""" deleteSchemaTag(tag:String!):DeleteSchemaTagResult!"""Given a UTC timestamp, delete all traces associated with this Service, on that corresponding day. If a timestamp to is provided, deletes all days inclusive.""" deleteTraces(from:Timestamp!to:Timestamp):Void disableDatadogForwardingLegacyMetricNames:Service """Hard delete a graph and all data associated with it. Its ID cannot be reused.""" hardDelete:Void id:ID!@deprecated(reason:"Use service.id")"""Ignore an operation in future checks; -changes affecting it will be tracked, -but won't affect the outcome of the check. -Returns true if the operation is newly ignored, -false if it already was.""" ignoreOperationsInChecks(ids:[ID!]!):IgnoreOperationsInChecksResult """Mark the changeset that affects an operation in a given check instance as safe. -Note that only operations marked as behavior changes are allowed to be marked as safe.""" markChangesForOperationAsSafe("""ID of the schema check.""" checkID:ID!"""ID of the operation to accept changes for.""" operationID:ID!):MarkChangesForOperationAsSafeResult!newKey(keyName:String role:UserPermission!=GRAPH_ADMIN):GraphApiKey!"""Adds an override to the given users permission for this graph""" overrideUserPermission(permission:UserPermission userID:ID!):Service """Promote the schema with the given SHA-256 hash to active for the given variant/tag.""" promoteSchema(graphVariant:String!historicParameters:HistoricQueryParameters overrideComposedSchema:Boolean!=false sha256:SHA256!):PromoteSchemaResponseOrError!"""Publish to a subgraph. If composition is successful, this will update running routers.""" publishSubgraph(activePartialSchema:PartialSchemaInput!gitContext:GitContextInput graphVariant:String!name:String!revision:String!url:String):CompositionAndUpsertResult registerOperationsWithResponse(clientIdentity:RegisteredClientIdentityInput gitContext:GitContextInput """Specifies which variant of a graph these operations belong to. -Formerly known as "tag" -Defaults to "current" -""" graphVariant:String!="current" manifestVersion:Int operations:[RegisteredOperationInput!]!):RegisterOperationsMutationResponse """Removes a subgraph. If composition is successful, this will update running routers.""" removeImplementingServiceAndTriggerComposition("""Do not remove the service, but recompose without it and report any errors.""" dryRun:Boolean!=false graphVariant:String!name:String!):CompositionAndRemoveResult!removeKey("""API key ID""" id:ID):Void renameKey(id:ID!newKeyName:String):GraphApiKey reportServerInfo("""Only sent if previously requested i.e. received ReportServerInfoResult with withExecutableSchema = true. An executable schema is a schema document that describes the full GraphQL schema that an external client could execute queries against. This must be a valid GraphQL schema document, as per the GraphQL specification: https://spec.graphql.org/""" executableSchema:String """Information about the edge server, see descriptions for individual fields.""" info:EdgeServerInfo!):ReportServerInfoResult@deprecated(reason:"use Mutation.reportSchema instead")service:Service!setDefaultBuildPipelineTrack(version:String!):String """Store a given schema document. This schema will be attached to the graph but -not be associated with any variant. On success, returns the schema hash.""" storeSchemaDocument(schemaDocument:String!):StoreSchemaResponseOrError!"""Test Slack notification channel""" testSlackChannel(id:ID!notification:SlackNotificationInput!):Void testSubscriptionForChannel(channelID:ID!subscriptionID:ID!):String!transfer(to:String!):Service triggerRepublish(graphVariant:String!):Void undelete:Service """Revert the effects of ignoreOperation. -Returns true if the operation is no longer ignored, -false if it wasn't.""" unignoreOperationsInChecks(ids:[ID!]!):UnignoreOperationsInChecksResult """Unmark changes for an operation as safe.""" unmarkChangesForOperationAsSafe("""ID of the schema check.""" checkID:ID!"""ID of the operation to unmark changes for.""" operationID:ID!):MarkChangesForOperationAsSafeResult!"""Update schema check configuration for a graph.""" updateCheckConfiguration("""Clients to ignore during validation.""" excludedClients:[ClientFilterInput!]"""Operation names to ignore during validation.""" excludedOperationNames:[OperationNameFilterInput!]"""Operations to ignore during validation.""" excludedOperations:[ExcludedOperationInput!]"""Default configuration to include operations on the base variant.""" includeBaseVariant:Boolean """Variant overrides for validation.""" includedVariants:[String!]"""Minimum number of requests within the window for a query to be considered.""" operationCountThreshold:Int """Number of requests within the window for a query to be considered, relative to -total request count. Expected values are between 0 and 0.05 (minimum 5% of -total request volume)""" operationCountThresholdPercentage:Float """Only check operations from the last seconds. The default is 7 days (604,800 seconds).""" timeRangeSeconds:Long):CheckConfiguration!updateDatadogMetricsConfig(apiKey:String apiRegion:DatadogApiRegion enabled:Boolean):DatadogMetricsConfig updateDescription(description:String!):Service """Update hiddenFromUninvitedNonAdminAccountMembers""" updateHiddenFromUninvitedNonAdminAccountMembers(hiddenFromUninvitedNonAdminAccountMembers:Boolean!):Service updateReadme(readme:String!):Service updateTitle(title:String!):Service """Publish a schema to this variant, either via a document or an introspection query result.""" uploadSchema(errorOnBadRequest:Boolean!=true gitContext:GitContextInput historicParameters:HistoricQueryParameters overrideComposedSchema:Boolean!=false schema:IntrospectionSchemaInput schemaDocument:String tag:String!):UploadSchemaMutationResponse upsertChannel(id:ID pagerDutyChannel:PagerDutyChannelInput slackChannel:SlackChannelInput webhookChannel:WebhookChannelInput):Channel """Creates a contract schema from a source variant and a set of filter configurations""" upsertContractVariant(contractVariantName:String!filterConfig:FilterConfigInput!initiateLaunch:Boolean!=true sourceVariant:String!):ContractVariantUpsertResult!"""Publish to a subgraph. If composition is successful, this will update running routers.""" upsertImplementingServiceAndTriggerComposition(activePartialSchema:PartialSchemaInput!gitContext:GitContextInput graphVariant:String!name:String!revision:String!url:String):CompositionAndUpsertResult """Create/update PagerDuty notification channel""" upsertPagerDutyChannel(channel:PagerDutyChannelInput!id:ID):PagerDutyChannel upsertQueryTrigger(id:ID trigger:QueryTriggerInput!):QueryTrigger """Create or update a subscription for a service.""" upsertRegistrySubscription("""ID of Slack channel for registry notification.""" channelID:ID """ID of registry subscription""" id:ID """Set of options/customization for notification.""" options:SubscriptionOptionsInput """Variant to notify on.""" variant:String):RegistrySubscription!upsertScheduledSummary(channelID:ID enabled:Boolean id:ID """Deprecated, use the 'variant' argument instead""" tag:String timezone:String variant:String):ScheduledSummary """Create/update Slack notification channel""" upsertSlackChannel(channel:SlackChannelInput!id:ID):SlackChannel upsertWebhookChannel(id:ID name:String secretToken:String url:String!):WebhookChannel validateOperations(gitContext:GitContextInput operations:[OperationDocumentInput!]!tag:String="current"):ValidateOperationsResult!"""This mutation will not result in any changes to the implementing service -Run composition with the Implementing Service's partial schema replaced with the one provided -in the mutation's input. Store the composed schema, return the hash of the composed schema, -and any warnings and errors pertaining to composition. -This mutation will not run validation against operations.""" validatePartialSchemaOfImplementingServiceAgainstGraph(graphVariant:String!implementingServiceName:String!partialSchema:PartialSchemaInput!):CompositionValidationResult!"""Make changes to a graph variant.""" variant(name:String!):GraphVariantMutation}"""Columns of ServiceOperationCheckStats.""" enum ServiceOperationCheckStatsColumn{CACHED_REQUESTS_COUNT CLIENT_NAME CLIENT_VERSION QUERY_ID QUERY_NAME SCHEMA_TAG TIMESTAMP UNCACHED_REQUESTS_COUNT}type ServiceOperationCheckStatsDimensions{clientName:String clientVersion:String queryId:ID queryName:String schemaTag:String}"""Filter for data in ServiceOperationCheckStats. Fields with dimension names represent equality checks. All fields are implicitly ANDed together.""" input ServiceOperationCheckStatsFilter{and:[ServiceOperationCheckStatsFilter!]"""Selects rows whose clientName dimension equals the given value if not null. To query for the null value, use {in: {clientName: [null]}} instead.""" clientName:String """Selects rows whose clientVersion dimension equals the given value if not null. To query for the null value, use {in: {clientVersion: [null]}} instead.""" clientVersion:String in:ServiceOperationCheckStatsFilterIn not:ServiceOperationCheckStatsFilter or:[ServiceOperationCheckStatsFilter!]"""Selects rows whose queryId dimension equals the given value if not null. To query for the null value, use {in: {queryId: [null]}} instead.""" queryId:ID """Selects rows whose queryName dimension equals the given value if not null. To query for the null value, use {in: {queryName: [null]}} instead.""" queryName:String """Selects rows whose schemaTag dimension equals the given value if not null. To query for the null value, use {in: {schemaTag: [null]}} instead.""" schemaTag:String}"""Filter for data in ServiceOperationCheckStats. Fields match if the corresponding dimension's value is in the given list. All fields are implicitly ANDed together.""" input ServiceOperationCheckStatsFilterIn{"""Selects rows whose clientName dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientName:[String]"""Selects rows whose clientVersion dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientVersion:[String]"""Selects rows whose queryId dimension is in the given list. A null value in the list means a row with null for that dimension.""" queryId:[ID]"""Selects rows whose queryName dimension is in the given list. A null value in the list means a row with null for that dimension.""" queryName:[String]"""Selects rows whose schemaTag dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaTag:[String]}type ServiceOperationCheckStatsMetrics{cachedRequestsCount:Long!uncachedRequestsCount:Long!}input ServiceOperationCheckStatsOrderBySpec{column:ServiceOperationCheckStatsColumn!direction:Ordering!}type ServiceOperationCheckStatsRecord{"""Dimensions of ServiceOperationCheckStats that can be grouped by.""" groupBy:ServiceOperationCheckStatsDimensions!"""Metrics of ServiceOperationCheckStats that can be aggregated over.""" metrics:ServiceOperationCheckStatsMetrics!"""Starting segment timestamp.""" timestamp:Timestamp!}"""Columns of ServiceQueryStats.""" enum ServiceQueryStatsColumn{CACHED_HISTOGRAM CACHED_REQUESTS_COUNT CACHE_TTL_HISTOGRAM CLIENT_NAME CLIENT_VERSION FORBIDDEN_OPERATION_COUNT FROM_ENGINEPROXY QUERY_ID QUERY_NAME REGISTERED_OPERATION_COUNT REQUESTS_WITH_ERRORS_COUNT SCHEMA_HASH SCHEMA_TAG TIMESTAMP UNCACHED_HISTOGRAM UNCACHED_REQUESTS_COUNT}type ServiceQueryStatsDimensions{clientName:String clientVersion:String fromEngineproxy:String queryId:ID queryName:String querySignature:String schemaHash:String schemaTag:String}"""Filter for data in ServiceQueryStats. Fields with dimension names represent equality checks. All fields are implicitly ANDed together.""" input ServiceQueryStatsFilter{and:[ServiceQueryStatsFilter!]"""Selects rows whose clientName dimension equals the given value if not null. To query for the null value, use {in: {clientName: [null]}} instead.""" clientName:String """Selects rows whose clientVersion dimension equals the given value if not null. To query for the null value, use {in: {clientVersion: [null]}} instead.""" clientVersion:String """Selects rows whose fromEngineproxy dimension equals the given value if not null. To query for the null value, use {in: {fromEngineproxy: [null]}} instead.""" fromEngineproxy:String in:ServiceQueryStatsFilterIn not:ServiceQueryStatsFilter or:[ServiceQueryStatsFilter!]"""Selects rows whose queryId dimension equals the given value if not null. To query for the null value, use {in: {queryId: [null]}} instead.""" queryId:ID """Selects rows whose queryName dimension equals the given value if not null. To query for the null value, use {in: {queryName: [null]}} instead.""" queryName:String """Selects rows whose schemaHash dimension equals the given value if not null. To query for the null value, use {in: {schemaHash: [null]}} instead.""" schemaHash:String """Selects rows whose schemaTag dimension equals the given value if not null. To query for the null value, use {in: {schemaTag: [null]}} instead.""" schemaTag:String}"""Filter for data in ServiceQueryStats. Fields match if the corresponding dimension's value is in the given list. All fields are implicitly ANDed together.""" input ServiceQueryStatsFilterIn{"""Selects rows whose clientName dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientName:[String]"""Selects rows whose clientVersion dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientVersion:[String]"""Selects rows whose fromEngineproxy dimension is in the given list. A null value in the list means a row with null for that dimension.""" fromEngineproxy:[String]"""Selects rows whose queryId dimension is in the given list. A null value in the list means a row with null for that dimension.""" queryId:[ID]"""Selects rows whose queryName dimension is in the given list. A null value in the list means a row with null for that dimension.""" queryName:[String]"""Selects rows whose schemaHash dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaHash:[String]"""Selects rows whose schemaTag dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaTag:[String]}type ServiceQueryStatsMetrics{cacheTtlHistogram:DurationHistogram!cachedHistogram:DurationHistogram!cachedRequestsCount:Long!forbiddenOperationCount:Long!registeredOperationCount:Long!requestsWithErrorsCount:Long!totalLatencyHistogram:DurationHistogram!totalRequestCount:Long!uncachedHistogram:DurationHistogram!uncachedRequestsCount:Long!}input ServiceQueryStatsOrderBySpec{column:ServiceQueryStatsColumn!direction:Ordering!}type ServiceQueryStatsRecord{"""Dimensions of ServiceQueryStats that can be grouped by.""" groupBy:ServiceQueryStatsDimensions!"""Metrics of ServiceQueryStats that can be aggregated over.""" metrics:ServiceQueryStatsMetrics!"""Starting segment timestamp.""" timestamp:Timestamp!}"""A map from role (permission) String to boolean that the current user is allowed for a particular graph.""" type ServiceRoles{"""Whether the currently authenticated user is permitted to perform schema checks (i.e. run `rover (sub)graph check`).""" canCheckSchemas:Boolean!"""Whether the currently authenticated user is permitted to create new graph variants.""" canCreateVariants:Boolean!"""Whether the currently authenticated user is permitted to delete the graph in question""" canDelete:Boolean!"""Whether the currently authenticated user is permitted to manage user access to the graph in question.""" canManageAccess:Boolean!"""Whether the currently authenticated user is permitted to manage the build configuration (e.g. build pipeline version).""" canManageBuildConfig:Boolean!"""Whether the currently authenticated user is permitted to manage 3rd party integrations (e.g. Datadog forwarding).""" canManageIntegrations:Boolean!"""Whether the currently authenticated user is permitted to manage graph-level API keys.""" canManageKeys:Boolean!"""Whether the currently authenticated user is permitted to perform basic administration (e.g. set to public) over variants.""" canManageVariants:Boolean!"""Whether the currently authenticated user is permitted to view details about the build configuration (e.g. build pipeline version).""" canQueryBuildConfig:Boolean!"""Whether the currently authenticated user is permitted to view details of the check configuration for this graph.""" canQueryCheckConfiguration:Boolean!canQueryDeletedImplementingServices:Boolean!"""Whether the currently authenticated user is permitted to view which subgraphs the graph is composed of.""" canQueryImplementingServices:Boolean!canQueryIntegrations:Boolean!canQueryPrivateInfo:Boolean!canQueryPublicInfo:Boolean!canQueryReadmeAuthor:Boolean!canQueryRoleOverrides:Boolean!"""Whether the currently authenticated user is permitted to download schemas owned by this graph.""" canQuerySchemas:Boolean!canQueryStats:Boolean!canQueryTokens:Boolean!canQueryTraces:Boolean!"""Whether the currently authenticated user is permitted to register operations (i.e. `apollo client:push`) for this graph.""" canRegisterOperations:Boolean!canStoreSchemasWithoutVariant:Boolean!canUndelete:Boolean!canUpdateAvatar:Boolean!canUpdateDescription:Boolean!canUpdateTitle:Boolean!canVisualizeStats:Boolean!@deprecated(reason:"Replaced with canQueryTraces and canQueryStats")"""Whether the currently authenticated user is permitted to make updates to the check configuration for this graph.""" canWriteCheckConfiguration:Boolean!canWriteTraces:Boolean!@deprecated(reason:"Never worked, not replaced")}"""A time window with a specified granularity over a given service.""" type ServiceStatsWindow{billingUsageStats("""Filter to select what rows to return.""" filter:ServiceBillingUsageStatsFilter """The maximum number of entries to return, cannot be more than 15000.""" limit:Int=10000 """A list of OrderBySpecs to order ServiceBillingUsageStats by. The earlier an OrderBySpec appears in the list, the higher priority it has in the final ordering. When empty or null, defaults to sorting by ascending timestamp.""" orderBy:[ServiceBillingUsageStatsOrderBySpec!]):[ServiceBillingUsageStatsRecord!]!edgeServerInfos("""Filter to select what rows to return.""" filter:ServiceEdgeServerInfosFilter """The maximum number of entries to return, cannot be more than 15000.""" limit:Int=10000 """A list of OrderBySpecs to order ServiceEdgeServerInfos by. The earlier an OrderBySpec appears in the list, the higher priority it has in the final ordering. When empty or null, defaults to sorting by ascending timestamp.""" orderBy:[ServiceEdgeServerInfosOrderBySpec!]):[ServiceEdgeServerInfosRecord!]!errorStats("""Filter to select what rows to return.""" filter:ServiceErrorStatsFilter """The maximum number of entries to return, cannot be more than 15000.""" limit:Int=10000 """A list of OrderBySpecs to order ServiceErrorStats by. The earlier an OrderBySpec appears in the list, the higher priority it has in the final ordering. When empty or null, defaults to sorting by ascending timestamp.""" orderBy:[ServiceErrorStatsOrderBySpec!]):[ServiceErrorStatsRecord!]!fieldExecutions("""Filter to select what rows to return.""" filter:ServiceFieldExecutionsFilter """The maximum number of entries to return, cannot be more than 15000.""" limit:Int=10000 """A list of OrderBySpecs to order ServiceFieldExecutions by. The earlier an OrderBySpec appears in the list, the higher priority it has in the final ordering. When empty or null, defaults to sorting by ascending timestamp.""" orderBy:[ServiceFieldExecutionsOrderBySpec!]):[ServiceFieldExecutionsRecord!]!fieldLatencies("""Filter to select what rows to return.""" filter:ServiceFieldLatenciesFilter """The maximum number of entries to return, cannot be more than 15000.""" limit:Int=10000 """A list of OrderBySpecs to order ServiceFieldLatencies by. The earlier an OrderBySpec appears in the list, the higher priority it has in the final ordering. When empty or null, defaults to sorting by ascending timestamp.""" orderBy:[ServiceFieldLatenciesOrderBySpec!]):[ServiceFieldLatenciesRecord!]!fieldRequestsByClientVersion("""Filter to select what rows to return.""" filter:ServiceFieldRequestsByClientVersionFilter """The maximum number of entries to return, cannot be more than 15000.""" limit:Int=10000 """A list of OrderBySpecs to order ServiceFieldRequestsByClientVersion by. The earlier an OrderBySpec appears in the list, the higher priority it has in the final ordering. When empty or null, defaults to sorting by ascending timestamp.""" orderBy:[ServiceFieldRequestsByClientVersionOrderBySpec!]):[ServiceFieldRequestsByClientVersionRecord!]!fieldStats("""Filter to select what rows to return.""" filter:ServiceFieldLatenciesFilter """The maximum number of entries to return, cannot be more than 15000.""" limit:Int=10000 """A list of OrderBySpecs to order ServiceFieldLatencies by. The earlier an OrderBySpec appears in the list, the higher priority it has in the final ordering. When empty or null, defaults to sorting by ascending timestamp.""" orderBy:[ServiceFieldLatenciesOrderBySpec!]):[ServiceFieldLatenciesRecord!]!fieldUsage("""Filter to select what rows to return.""" filter:ServiceFieldUsageFilter """The maximum number of entries to return, cannot be more than 15000.""" limit:Int=10000 """A list of OrderBySpecs to order ServiceFieldUsage by. The earlier an OrderBySpec appears in the list, the higher priority it has in the final ordering. When empty or null, defaults to sorting by ascending timestamp.""" orderBy:[ServiceFieldUsageOrderBySpec!]):[ServiceFieldUsageRecord!]!operationCheckStats("""Filter to select what rows to return.""" filter:ServiceOperationCheckStatsFilter """The maximum number of entries to return, cannot be more than 15000.""" limit:Int=10000 """A list of OrderBySpecs to order ServiceOperationCheckStats by. The earlier an OrderBySpec appears in the list, the higher priority it has in the final ordering. When empty or null, defaults to sorting by ascending timestamp.""" orderBy:[ServiceOperationCheckStatsOrderBySpec!]):[ServiceOperationCheckStatsRecord!]!queryStats("""Filter to select what rows to return.""" filter:ServiceQueryStatsFilter """The maximum number of entries to return, cannot be more than 15000.""" limit:Int=10000 """A list of OrderBySpecs to order ServiceQueryStats by. The earlier an OrderBySpec appears in the list, the higher priority it has in the final ordering. When empty or null, defaults to sorting by ascending timestamp.""" orderBy:[ServiceQueryStatsOrderBySpec!]):[ServiceQueryStatsRecord!]!"""From field rounded down to the nearest resolution.""" roundedDownFrom:Timestamp!"""To field rounded up to the nearest resolution.""" roundedUpTo:Timestamp!tracePathErrorsRefs("""Filter to select what rows to return.""" filter:ServiceTracePathErrorsRefsFilter """The maximum number of entries to return, cannot be more than 15000.""" limit:Int=10000 """A list of OrderBySpecs to order ServiceTracePathErrorsRefs by. The earlier an OrderBySpec appears in the list, the higher priority it has in the final ordering. When empty or null, defaults to sorting by ascending timestamp.""" orderBy:[ServiceTracePathErrorsRefsOrderBySpec!]):[ServiceTracePathErrorsRefsRecord!]!traceRefs("""Filter to select what rows to return.""" filter:ServiceTraceRefsFilter """The maximum number of entries to return, cannot be more than 15000.""" limit:Int=10000 """A list of OrderBySpecs to order ServiceTraceRefs by. The earlier an OrderBySpec appears in the list, the higher priority it has in the final ordering. When empty or null, defaults to sorting by ascending timestamp.""" orderBy:[ServiceTraceRefsOrderBySpec!]):[ServiceTraceRefsRecord!]!}"""Columns of ServiceTracePathErrorsRefs.""" enum ServiceTracePathErrorsRefsColumn{CLIENT_NAME CLIENT_VERSION DURATION_BUCKET ERRORS_COUNT_IN_PATH ERRORS_COUNT_IN_TRACE ERROR_MESSAGE PATH QUERY_ID QUERY_NAME SCHEMA_HASH SCHEMA_TAG TIMESTAMP TRACE_HTTP_STATUS_CODE TRACE_ID TRACE_SIZE_BYTES TRACE_STARTS_AT}type ServiceTracePathErrorsRefsDimensions{clientName:String clientVersion:String durationBucket:Int errorMessage:String path:String queryId:ID queryName:String schemaHash:String schemaTag:String traceHttpStatusCode:Int traceId:ID traceStartsAt:Timestamp}"""Filter for data in ServiceTracePathErrorsRefs. Fields with dimension names represent equality checks. All fields are implicitly ANDed together.""" input ServiceTracePathErrorsRefsFilter{and:[ServiceTracePathErrorsRefsFilter!]"""Selects rows whose clientName dimension equals the given value if not null. To query for the null value, use {in: {clientName: [null]}} instead.""" clientName:String """Selects rows whose clientVersion dimension equals the given value if not null. To query for the null value, use {in: {clientVersion: [null]}} instead.""" clientVersion:String """Selects rows whose durationBucket dimension equals the given value if not null. To query for the null value, use {in: {durationBucket: [null]}} instead.""" durationBucket:Int """Selects rows whose errorMessage dimension equals the given value if not null. To query for the null value, use {in: {errorMessage: [null]}} instead.""" errorMessage:String in:ServiceTracePathErrorsRefsFilterIn not:ServiceTracePathErrorsRefsFilter or:[ServiceTracePathErrorsRefsFilter!]"""Selects rows whose path dimension equals the given value if not null. To query for the null value, use {in: {path: [null]}} instead.""" path:String """Selects rows whose queryId dimension equals the given value if not null. To query for the null value, use {in: {queryId: [null]}} instead.""" queryId:ID """Selects rows whose queryName dimension equals the given value if not null. To query for the null value, use {in: {queryName: [null]}} instead.""" queryName:String """Selects rows whose schemaHash dimension equals the given value if not null. To query for the null value, use {in: {schemaHash: [null]}} instead.""" schemaHash:String """Selects rows whose schemaTag dimension equals the given value if not null. To query for the null value, use {in: {schemaTag: [null]}} instead.""" schemaTag:String """Selects rows whose traceHttpStatusCode dimension equals the given value if not null. To query for the null value, use {in: {traceHttpStatusCode: [null]}} instead.""" traceHttpStatusCode:Int """Selects rows whose traceId dimension equals the given value if not null. To query for the null value, use {in: {traceId: [null]}} instead.""" traceId:ID}"""Filter for data in ServiceTracePathErrorsRefs. Fields match if the corresponding dimension's value is in the given list. All fields are implicitly ANDed together.""" input ServiceTracePathErrorsRefsFilterIn{"""Selects rows whose clientName dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientName:[String]"""Selects rows whose clientVersion dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientVersion:[String]"""Selects rows whose durationBucket dimension is in the given list. A null value in the list means a row with null for that dimension.""" durationBucket:[Int]"""Selects rows whose errorMessage dimension is in the given list. A null value in the list means a row with null for that dimension.""" errorMessage:[String]"""Selects rows whose path dimension is in the given list. A null value in the list means a row with null for that dimension.""" path:[String]"""Selects rows whose queryId dimension is in the given list. A null value in the list means a row with null for that dimension.""" queryId:[ID]"""Selects rows whose queryName dimension is in the given list. A null value in the list means a row with null for that dimension.""" queryName:[String]"""Selects rows whose schemaHash dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaHash:[String]"""Selects rows whose schemaTag dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaTag:[String]"""Selects rows whose traceHttpStatusCode dimension is in the given list. A null value in the list means a row with null for that dimension.""" traceHttpStatusCode:[Int]"""Selects rows whose traceId dimension is in the given list. A null value in the list means a row with null for that dimension.""" traceId:[ID]}type ServiceTracePathErrorsRefsMetrics{errorsCountInPath:Long!errorsCountInTrace:Long!traceSizeBytes:Long!}input ServiceTracePathErrorsRefsOrderBySpec{column:ServiceTracePathErrorsRefsColumn!direction:Ordering!}type ServiceTracePathErrorsRefsRecord{"""Dimensions of ServiceTracePathErrorsRefs that can be grouped by.""" groupBy:ServiceTracePathErrorsRefsDimensions!"""Metrics of ServiceTracePathErrorsRefs that can be aggregated over.""" metrics:ServiceTracePathErrorsRefsMetrics!"""Starting segment timestamp.""" timestamp:Timestamp!}"""Columns of ServiceTraceRefs.""" enum ServiceTraceRefsColumn{CLIENT_NAME CLIENT_VERSION DURATION_BUCKET DURATION_NS QUERY_ID QUERY_NAME SCHEMA_HASH SCHEMA_TAG TIMESTAMP TRACE_ID TRACE_SIZE_BYTES}type ServiceTraceRefsDimensions{clientName:String clientVersion:String durationBucket:Int queryId:ID queryName:String querySignature:String schemaHash:String schemaTag:String traceId:ID}"""Filter for data in ServiceTraceRefs. Fields with dimension names represent equality checks. All fields are implicitly ANDed together.""" input ServiceTraceRefsFilter{and:[ServiceTraceRefsFilter!]"""Selects rows whose clientName dimension equals the given value if not null. To query for the null value, use {in: {clientName: [null]}} instead.""" clientName:String """Selects rows whose clientVersion dimension equals the given value if not null. To query for the null value, use {in: {clientVersion: [null]}} instead.""" clientVersion:String """Selects rows whose durationBucket dimension equals the given value if not null. To query for the null value, use {in: {durationBucket: [null]}} instead.""" durationBucket:Int in:ServiceTraceRefsFilterIn not:ServiceTraceRefsFilter or:[ServiceTraceRefsFilter!]"""Selects rows whose queryId dimension equals the given value if not null. To query for the null value, use {in: {queryId: [null]}} instead.""" queryId:ID """Selects rows whose queryName dimension equals the given value if not null. To query for the null value, use {in: {queryName: [null]}} instead.""" queryName:String """Selects rows whose schemaHash dimension equals the given value if not null. To query for the null value, use {in: {schemaHash: [null]}} instead.""" schemaHash:String """Selects rows whose schemaTag dimension equals the given value if not null. To query for the null value, use {in: {schemaTag: [null]}} instead.""" schemaTag:String """Selects rows whose traceId dimension equals the given value if not null. To query for the null value, use {in: {traceId: [null]}} instead.""" traceId:ID}"""Filter for data in ServiceTraceRefs. Fields match if the corresponding dimension's value is in the given list. All fields are implicitly ANDed together.""" input ServiceTraceRefsFilterIn{"""Selects rows whose clientName dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientName:[String]"""Selects rows whose clientVersion dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientVersion:[String]"""Selects rows whose durationBucket dimension is in the given list. A null value in the list means a row with null for that dimension.""" durationBucket:[Int]"""Selects rows whose queryId dimension is in the given list. A null value in the list means a row with null for that dimension.""" queryId:[ID]"""Selects rows whose queryName dimension is in the given list. A null value in the list means a row with null for that dimension.""" queryName:[String]"""Selects rows whose schemaHash dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaHash:[String]"""Selects rows whose schemaTag dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaTag:[String]"""Selects rows whose traceId dimension is in the given list. A null value in the list means a row with null for that dimension.""" traceId:[ID]}type ServiceTraceRefsMetrics{durationNs:Long!traceSizeBytes:Long!}input ServiceTraceRefsOrderBySpec{column:ServiceTraceRefsColumn!direction:Ordering!}type ServiceTraceRefsRecord{"""Dimensions of ServiceTraceRefs that can be grouped by.""" groupBy:ServiceTraceRefsDimensions!"""Metrics of ServiceTraceRefs that can be aggregated over.""" metrics:ServiceTraceRefsMetrics!"""Starting segment timestamp.""" timestamp:Timestamp!}"""Slack notification channel""" type SlackChannel implements Channel{id:ID!name:String!subscriptions:[ChannelSubscription!]!url:String!}"""Slack notification channel parameters""" input SlackChannelInput{name:String url:String!}input SlackNotificationField{key:String!value:String!}"""Slack notification message""" input SlackNotificationInput{color:String fallback:String!fields:[SlackNotificationField!]iconUrl:String text:String timestamp:Timestamp title:String titleLink:String username:String}type SourceLocation{column:Int!line:Int!}"""A time window with a specified granularity.""" type StatsWindow{billingUsageStats("""Filter to select what rows to return.""" filter:BillingUsageStatsFilter """The maximum number of entries to return, cannot be more than 15000.""" limit:Int=10000 """A list of OrderBySpecs to order BillingUsageStats by. The earlier an OrderBySpec appears in the list, the higher priority it has in the final ordering. When empty or null, defaults to sorting by ascending timestamp.""" orderBy:[BillingUsageStatsOrderBySpec!]):[BillingUsageStatsRecord!]!edgeServerInfos("""Filter to select what rows to return.""" filter:EdgeServerInfosFilter """The maximum number of entries to return, cannot be more than 15000.""" limit:Int=10000 """A list of OrderBySpecs to order EdgeServerInfos by. The earlier an OrderBySpec appears in the list, the higher priority it has in the final ordering. When empty or null, defaults to sorting by ascending timestamp.""" orderBy:[EdgeServerInfosOrderBySpec!]):[EdgeServerInfosRecord!]!errorStats("""Filter to select what rows to return.""" filter:ErrorStatsFilter """The maximum number of entries to return, cannot be more than 15000.""" limit:Int=10000 """A list of OrderBySpecs to order ErrorStats by. The earlier an OrderBySpec appears in the list, the higher priority it has in the final ordering. When empty or null, defaults to sorting by ascending timestamp.""" orderBy:[ErrorStatsOrderBySpec!]):[ErrorStatsRecord!]!fieldExecutions("""Filter to select what rows to return.""" filter:FieldExecutionsFilter """The maximum number of entries to return, cannot be more than 15000.""" limit:Int=10000 """A list of OrderBySpecs to order FieldExecutions by. The earlier an OrderBySpec appears in the list, the higher priority it has in the final ordering. When empty or null, defaults to sorting by ascending timestamp.""" orderBy:[FieldExecutionsOrderBySpec!]):[FieldExecutionsRecord!]!fieldLatencies("""Filter to select what rows to return.""" filter:FieldLatenciesFilter """The maximum number of entries to return, cannot be more than 15000.""" limit:Int=10000 """A list of OrderBySpecs to order FieldLatencies by. The earlier an OrderBySpec appears in the list, the higher priority it has in the final ordering. When empty or null, defaults to sorting by ascending timestamp.""" orderBy:[FieldLatenciesOrderBySpec!]):[FieldLatenciesRecord!]!fieldRequestsByClientVersion("""Filter to select what rows to return.""" filter:FieldRequestsByClientVersionFilter """The maximum number of entries to return, cannot be more than 15000.""" limit:Int=10000 """A list of OrderBySpecs to order FieldRequestsByClientVersion by. The earlier an OrderBySpec appears in the list, the higher priority it has in the final ordering. When empty or null, defaults to sorting by ascending timestamp.""" orderBy:[FieldRequestsByClientVersionOrderBySpec!]):[FieldRequestsByClientVersionRecord!]!fieldUsage("""Filter to select what rows to return.""" filter:FieldUsageFilter """The maximum number of entries to return, cannot be more than 15000.""" limit:Int=10000 """A list of OrderBySpecs to order FieldUsage by. The earlier an OrderBySpec appears in the list, the higher priority it has in the final ordering. When empty or null, defaults to sorting by ascending timestamp.""" orderBy:[FieldUsageOrderBySpec!]):[FieldUsageRecord!]!operationCheckStats("""Filter to select what rows to return.""" filter:OperationCheckStatsFilter """The maximum number of entries to return, cannot be more than 15000.""" limit:Int=10000 """A list of OrderBySpecs to order OperationCheckStats by. The earlier an OrderBySpec appears in the list, the higher priority it has in the final ordering. When empty or null, defaults to sorting by ascending timestamp.""" orderBy:[OperationCheckStatsOrderBySpec!]):[OperationCheckStatsRecord!]!queryStats("""Filter to select what rows to return.""" filter:QueryStatsFilter """The maximum number of entries to return, cannot be more than 15000.""" limit:Int=10000 """A list of OrderBySpecs to order QueryStats by. The earlier an OrderBySpec appears in the list, the higher priority it has in the final ordering. When empty or null, defaults to sorting by ascending timestamp.""" orderBy:[QueryStatsOrderBySpec!]):[QueryStatsRecord!]!"""From field rounded down to the nearest resolution.""" roundedDownFrom:Timestamp!"""To field rounded up to the nearest resolution.""" roundedUpTo:Timestamp!tracePathErrorsRefs("""Filter to select what rows to return.""" filter:TracePathErrorsRefsFilter """The maximum number of entries to return, cannot be more than 15000.""" limit:Int=10000 """A list of OrderBySpecs to order TracePathErrorsRefs by. The earlier an OrderBySpec appears in the list, the higher priority it has in the final ordering. When empty or null, defaults to sorting by ascending timestamp.""" orderBy:[TracePathErrorsRefsOrderBySpec!]):[TracePathErrorsRefsRecord!]!traceRefs("""Filter to select what rows to return.""" filter:TraceRefsFilter """The maximum number of entries to return, cannot be more than 15000.""" limit:Int=10000 """A list of OrderBySpecs to order TraceRefs by. The earlier an OrderBySpec appears in the list, the higher priority it has in the final ordering. When empty or null, defaults to sorting by ascending timestamp.""" orderBy:[TraceRefsOrderBySpec!]):[TraceRefsRecord!]!}type StoreSchemaError{code:StoreSchemaErrorCode!message:String!}enum StoreSchemaErrorCode{SCHEMA_IS_NOT_PARSABLE SCHEMA_IS_NOT_VALID}type StoreSchemaResponse{sha256:SHA256!}union StoreSchemaResponseOrError=StoreSchemaError|StoreSchemaResponse type StoredApprovedChange{argNode:NamedIntrospectionArgNoDescription childNode:NamedIntrospectionValueNoDescription code:ChangeCode!parentNode:NamedIntrospectionTypeNoDescription}scalar StringOrInt type StringToString{key:String!value:String!}input StringToStringInput{key:String!value:String!}type Subgraph{hash:String!name:String!routingURL:String!}type SubgraphChange{name:ID!type:SubgraphChangeType!}enum SubgraphChangeType{ADDITION DELETION MODIFICATION}type SubgraphConfig{id:ID!name:String!schemaHash:String!sdl:String!url:String!}type SubscriptionOptions{"""Enables notifications for schema updates""" schemaUpdates:Boolean!}input SubscriptionOptionsInput{"""Enables notifications for schema updates""" schemaUpdates:Boolean!}enum SubscriptionState{ACTIVE CANCELED EXPIRED FUTURE PAST_DUE PAUSED PENDING UNKNOWN}enum SubscriptionStateV2{ACTIVE CANCELED EXPIRED FUTURE PAST_DUE PAUSED PENDING UNKNOWN}type TemporaryURL{url:String!}enum ThemeName{DARK LIGHT}enum TicketPriority{P0 P1 P2 P3}enum TicketStatus{CLOSED HOLD NEW OPEN PENDING SOLVED}"""ISO 8601, extended format with nanoseconds, Zulu (or '[+-]seconds' for times relative to now)""" scalar Timestamp type TimezoneOffset{minutesOffsetFromUTC:Int!zoneID:String!}"""Counts of changes.""" type TotalChangeSummaryCounts{"""Number of changes that are additions. This includes adding types, adding fields to object, input -object, and interface types, adding values to enums, adding members to interfaces and unions, and -adding arguments.""" additions:Int!"""Number of changes that are new usages of the @deprecated directive.""" deprecations:Int!"""Number of changes that are edits. This includes types changing kind, fields and arguments -changing type, arguments changing default value, and any description changes. This also includes -edits to @deprecated reason strings.""" edits:Int!"""Number of changes that are removals. This includes removing types, removing fields from object, -input object, and interface types, removing values from enums, removing members from interfaces -and unions, and removing arguments. This also includes removing @deprecated usages.""" removals:Int!}type Trace{cacheMaxAgeMs:Float cacheScope:CacheScope clientName:String clientVersion:String durationMs:Float!endTime:Timestamp!http:TraceHTTP id:ID!operationName:String protobuf:Protobuf!root:TraceNode!signature:String!startTime:Timestamp!unexecutedOperationBody:String unexecutedOperationName:String variablesJSON:[StringToString!]!}type TraceError{json:String!locations:[TraceSourceLocation!]!message:String!timestamp:Timestamp}type TraceHTTP{host:String method:HTTPMethod!path:String protocol:String requestHeaders:[StringToString!]!responseHeaders:[StringToString!]!secure:Boolean!statusCode:Int!}type TraceNode{cacheMaxAgeMs:Float cacheScope:CacheScope children:[TraceNode!]!childrenIds:[ID!]!descendants:[TraceNode!]!descendantsIds:[ID!]!endTime:Timestamp!errors:[TraceError!]!id:ID!key:StringOrInt originalFieldName:String parent:ID!parentId:ID path:[String!]!startTime:Timestamp!type:String}"""Columns of TracePathErrorsRefs.""" enum TracePathErrorsRefsColumn{CLIENT_NAME CLIENT_VERSION DURATION_BUCKET ERRORS_COUNT_IN_PATH ERRORS_COUNT_IN_TRACE ERROR_MESSAGE PATH QUERY_ID QUERY_NAME SCHEMA_HASH SCHEMA_TAG SERVICE_ID TIMESTAMP TRACE_HTTP_STATUS_CODE TRACE_ID TRACE_SIZE_BYTES TRACE_STARTS_AT}type TracePathErrorsRefsDimensions{clientName:String clientVersion:String durationBucket:Int errorMessage:String """If metrics were collected from a federated service, this field will be prefixed with `service:.`""" path:String queryId:ID queryName:String schemaHash:String schemaTag:String serviceId:ID traceHttpStatusCode:Int traceId:ID traceStartsAt:Timestamp}"""Filter for data in TracePathErrorsRefs. Fields with dimension names represent equality checks. All fields are implicitly ANDed together.""" input TracePathErrorsRefsFilter{and:[TracePathErrorsRefsFilter!]"""Selects rows whose clientName dimension equals the given value if not null. To query for the null value, use {in: {clientName: [null]}} instead.""" clientName:String """Selects rows whose clientVersion dimension equals the given value if not null. To query for the null value, use {in: {clientVersion: [null]}} instead.""" clientVersion:String """Selects rows whose durationBucket dimension equals the given value if not null. To query for the null value, use {in: {durationBucket: [null]}} instead.""" durationBucket:Int """Selects rows whose errorMessage dimension equals the given value if not null. To query for the null value, use {in: {errorMessage: [null]}} instead.""" errorMessage:String in:TracePathErrorsRefsFilterIn not:TracePathErrorsRefsFilter or:[TracePathErrorsRefsFilter!]"""Selects rows whose path dimension equals the given value if not null. To query for the null value, use {in: {path: [null]}} instead.""" path:String """Selects rows whose queryId dimension equals the given value if not null. To query for the null value, use {in: {queryId: [null]}} instead.""" queryId:ID """Selects rows whose queryName dimension equals the given value if not null. To query for the null value, use {in: {queryName: [null]}} instead.""" queryName:String """Selects rows whose schemaHash dimension equals the given value if not null. To query for the null value, use {in: {schemaHash: [null]}} instead.""" schemaHash:String """Selects rows whose schemaTag dimension equals the given value if not null. To query for the null value, use {in: {schemaTag: [null]}} instead.""" schemaTag:String """Selects rows whose serviceId dimension equals the given value if not null. To query for the null value, use {in: {serviceId: [null]}} instead.""" serviceId:ID """Selects rows whose traceHttpStatusCode dimension equals the given value if not null. To query for the null value, use {in: {traceHttpStatusCode: [null]}} instead.""" traceHttpStatusCode:Int """Selects rows whose traceId dimension equals the given value if not null. To query for the null value, use {in: {traceId: [null]}} instead.""" traceId:ID}"""Filter for data in TracePathErrorsRefs. Fields match if the corresponding dimension's value is in the given list. All fields are implicitly ANDed together.""" input TracePathErrorsRefsFilterIn{"""Selects rows whose clientName dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientName:[String]"""Selects rows whose clientVersion dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientVersion:[String]"""Selects rows whose durationBucket dimension is in the given list. A null value in the list means a row with null for that dimension.""" durationBucket:[Int]"""Selects rows whose errorMessage dimension is in the given list. A null value in the list means a row with null for that dimension.""" errorMessage:[String]"""Selects rows whose path dimension is in the given list. A null value in the list means a row with null for that dimension.""" path:[String]"""Selects rows whose queryId dimension is in the given list. A null value in the list means a row with null for that dimension.""" queryId:[ID]"""Selects rows whose queryName dimension is in the given list. A null value in the list means a row with null for that dimension.""" queryName:[String]"""Selects rows whose schemaHash dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaHash:[String]"""Selects rows whose schemaTag dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaTag:[String]"""Selects rows whose serviceId dimension is in the given list. A null value in the list means a row with null for that dimension.""" serviceId:[ID]"""Selects rows whose traceHttpStatusCode dimension is in the given list. A null value in the list means a row with null for that dimension.""" traceHttpStatusCode:[Int]"""Selects rows whose traceId dimension is in the given list. A null value in the list means a row with null for that dimension.""" traceId:[ID]}type TracePathErrorsRefsMetrics{errorsCountInPath:Long!errorsCountInTrace:Long!traceSizeBytes:Long!}input TracePathErrorsRefsOrderBySpec{column:TracePathErrorsRefsColumn!direction:Ordering!}type TracePathErrorsRefsRecord{"""Dimensions of TracePathErrorsRefs that can be grouped by.""" groupBy:TracePathErrorsRefsDimensions!"""Metrics of TracePathErrorsRefs that can be aggregated over.""" metrics:TracePathErrorsRefsMetrics!"""Starting segment timestamp.""" timestamp:Timestamp!}"""Columns of TraceRefs.""" enum TraceRefsColumn{CLIENT_NAME CLIENT_VERSION DURATION_BUCKET DURATION_NS QUERY_ID QUERY_NAME SCHEMA_HASH SCHEMA_TAG SERVICE_ID TIMESTAMP TRACE_ID TRACE_SIZE_BYTES}type TraceRefsDimensions{clientName:String clientVersion:String durationBucket:Int queryId:ID queryName:String querySignature:String schemaHash:String schemaTag:String serviceId:ID traceId:ID}"""Filter for data in TraceRefs. Fields with dimension names represent equality checks. All fields are implicitly ANDed together.""" input TraceRefsFilter{and:[TraceRefsFilter!]"""Selects rows whose clientName dimension equals the given value if not null. To query for the null value, use {in: {clientName: [null]}} instead.""" clientName:String """Selects rows whose clientVersion dimension equals the given value if not null. To query for the null value, use {in: {clientVersion: [null]}} instead.""" clientVersion:String """Selects rows whose durationBucket dimension equals the given value if not null. To query for the null value, use {in: {durationBucket: [null]}} instead.""" durationBucket:Int in:TraceRefsFilterIn not:TraceRefsFilter or:[TraceRefsFilter!]"""Selects rows whose queryId dimension equals the given value if not null. To query for the null value, use {in: {queryId: [null]}} instead.""" queryId:ID """Selects rows whose queryName dimension equals the given value if not null. To query for the null value, use {in: {queryName: [null]}} instead.""" queryName:String """Selects rows whose schemaHash dimension equals the given value if not null. To query for the null value, use {in: {schemaHash: [null]}} instead.""" schemaHash:String """Selects rows whose schemaTag dimension equals the given value if not null. To query for the null value, use {in: {schemaTag: [null]}} instead.""" schemaTag:String """Selects rows whose serviceId dimension equals the given value if not null. To query for the null value, use {in: {serviceId: [null]}} instead.""" serviceId:ID """Selects rows whose traceId dimension equals the given value if not null. To query for the null value, use {in: {traceId: [null]}} instead.""" traceId:ID}"""Filter for data in TraceRefs. Fields match if the corresponding dimension's value is in the given list. All fields are implicitly ANDed together.""" input TraceRefsFilterIn{"""Selects rows whose clientName dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientName:[String]"""Selects rows whose clientVersion dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientVersion:[String]"""Selects rows whose durationBucket dimension is in the given list. A null value in the list means a row with null for that dimension.""" durationBucket:[Int]"""Selects rows whose queryId dimension is in the given list. A null value in the list means a row with null for that dimension.""" queryId:[ID]"""Selects rows whose queryName dimension is in the given list. A null value in the list means a row with null for that dimension.""" queryName:[String]"""Selects rows whose schemaHash dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaHash:[String]"""Selects rows whose schemaTag dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaTag:[String]"""Selects rows whose serviceId dimension is in the given list. A null value in the list means a row with null for that dimension.""" serviceId:[ID]"""Selects rows whose traceId dimension is in the given list. A null value in the list means a row with null for that dimension.""" traceId:[ID]}type TraceRefsMetrics{durationNs:Long!traceSizeBytes:Long!}input TraceRefsOrderBySpec{column:TraceRefsColumn!direction:Ordering!}type TraceRefsRecord{"""Dimensions of TraceRefs that can be grouped by.""" groupBy:TraceRefsDimensions!"""Metrics of TraceRefs that can be aggregated over.""" metrics:TraceRefsMetrics!"""Starting segment timestamp.""" timestamp:Timestamp!}type TraceSourceLocation{column:Int!line:Int!}"""Counts of changes at the type level, including interfaces, unions, enums, scalars, input objects, etc.""" type TypeChangeSummaryCounts{"""Number of changes that are additions of types.""" additions:Int!"""Number of changes that are edits. This includes types changing kind and any type description -changes, but also includes adding/removing values from enums, adding/removing members from -interfaces and unions, and any enum value deprecation and description changes.""" edits:Int!"""Number of changes that are removals of types.""" removals:Int!}"""the TypeFilterConfig is used to isolate -types, and subsequent fields, through -various configuration settings. +Each variant has its own GraphQL schema, which means schemas can differ between environments. +""" +type Graph implements Identity { + """The organization that this graph belongs to.""" + account: Organization + """A list of the graph API keys that are active for this graph.""" + apiKeys: [GraphApiKey!] + """Provides a view of the graph as an `Actor` type.""" + asActor: Actor! + """Get a check workflow for this graph by its ID""" + checkWorkflow(id: ID!): CheckWorkflow + """The graph's globally unique identifier.""" + id: ID! + """Permissions of the current user in this graph.""" + myRole: UserPermission + name: String! + """Describes the permissions that the active user has for this graph.""" + roles: GraphRoles + """The graph's name.""" + title: String! + """ + Provides details of the graph variant with the provided `name`, if a variant + with that name exists for this graph. Otherwise, returns null. -It defaults to filter towards user defined -types only""" input TypeFilterConfig{"""include abstract types (interfaces and unions)""" includeAbstractTypes:Boolean=true """include built in scalars (i.e. Boolean, Int, etc)""" includeBuiltInTypes:Boolean=false """include reserved introspection types (i.e. __Type)""" includeIntrospectionTypes:Boolean=false}type URI{"""A GCS URI""" gcs:String!}type UnignoreOperationsInChecksResult{graph:Service!}union UpdateOperationCollectionEntryResult=OperationCollectionEntry|PermissionError|ValidationError union UpdateOperationCollectionResult=OperationCollection|PermissionError|ValidationError """The result of a schema publish to a graph variant.""" type UploadSchemaMutationResponse{"""A response code for processing via machines (e.g. UPLOAD_SUCCESS or NO_CHANGES)""" code:String!"""Human readable result of a schema publish.""" message:String!"""If successful, the corresponding publication.""" publication:SchemaTag """Whether the schema publish successfully completed or encountered errors.""" success:Boolean!"""If successful, the corresponding publication.""" tag:SchemaTag}"""A registered user.""" type User implements Identity{acceptedPrivacyPolicyAt:Timestamp accounts:[Account!]!@deprecated(reason:"Replaced with User.memberships.account")apiKeys(includeCookies:Boolean=false):[UserApiKey!]!"""Translation of this user identity to an 'Actor' type.""" asActor:Actor!"""Get an URL to which an avatar image can be uploaded. Client uploads by sending a PUT request -with the image data to MediaUploadInfo.url. Client SHOULD set the "Content-Type" header to the -browser-inferred MIME type, and SHOULD set the "x-apollo-content-filename" header to the -filename, if such information is available. Client MUST set the "x-apollo-csrf-token" header to -MediaUploadInfo.csrfToken.""" avatarUpload:AvatarUploadResult """Get an image URL for the user's avatar. Note that CORS is not enabled for these URLs. The size -argument is used for bandwidth reduction, and should be the size of the image as displayed in the -application. Apollo's media server will downscale larger images to at least the requested size, -but this will not happen for third-party media servers.""" avatarUrl(size:Int!=40):String betaFeaturesOn:Boolean!canUpdateAvatar:Boolean!canUpdateEmail:Boolean!canUpdateFullName:Boolean!createdAt:Timestamp!email:String emailModifiedAt:Timestamp emailVerified:Boolean!experimentalFeatures:UserExperimentalFeatures!featureIntros:FeatureIntros fullName:String!"""The user's GitHub username, if they log in via GitHub. May be null even for GitHub users in some edge cases.""" githubUsername:String """The unique identifier for a user.""" id:ID!"""This role is reserved exclusively for internal MDG employees, and it controls what access they may have to other -organizations. Only admins are allowed to see this field.""" internalAdminRole:InternalMdgAdminRole """Last time any API token from this user was used against AGM services""" lastAuthenticatedAt:Timestamp logoutAfterIdleMs:Int """Which organizations a user belongs to.""" memberships:[UserMembership!]!"""The name (first and last) of a user.""" name:String!odysseyAttempt(id:ID!):OdysseyAttempt odysseyAttempts:[OdysseyAttempt!]!odysseyCertifications:[OdysseyCertification!]odysseyCourses:[OdysseyCourse!]odysseyHasEarlyAccess:Boolean!odysseyHasRequestedEarlyAccess:Boolean!odysseyTasks:[OdysseyTask!]sandboxOperationCollections:[OperationCollection!]!synchronized:Boolean!"""List of Zendesk tickets this user has submitted""" tickets:[ZendeskTicket!]type:UserType!}type UserApiKey implements ApiKey{id:ID!keyName:String token:String!}type UserExperimentalFeatures{exampleFeature:Boolean!}"""An organization a given user belongs to.""" type UserMembership{"""The organization a user is a member of.""" account:Account!"""When the user joined the organization.""" createdAt:Timestamp!"""What level of access a use has to an organization.""" permission:UserPermission!"""The user that is a member of an organization.""" user:User!}type UserMutation{acceptPrivacyPolicy:Void """Change the user's password""" changePassword(newPassword:String!previousPassword:String!):Void createOdysseyAttempt(testId:String!):OdysseyAttempt createOdysseyCertification(certificationId:String!):OdysseyCertification createOdysseyCourses(courses:[OdysseyCourseInput!]!):[OdysseyCourse!]createOdysseyTasks(tasks:[OdysseyTaskInput!]!):[OdysseyTask!]"""Delete the user's avatar. Requires User.canUpdateAvatar to be true.""" deleteAvatar:AvatarDeleteError """Hard deletes the associated user. Throws an error otherwise with reason included.""" hardDelete:Void """Create a new API key for this user. Must take in a name for this key.""" newKey(keyName:String!):UserApiKey!"""Create a new API key for this user if there are no current API keys. -If an API key already exists, this will return one at random and not create a new one.""" provisionKey(keyName:String!="add-a-name"):ApiKeyProvision """Refresh information about the user from its upstream service (eg list of organizations from GitHub)""" refresh:User """Removes the given key from this user. Can be used to remove either a web cookie or a user API key.""" removeKey(id:ID!):Void """Renames the given key to the new key name.""" renameKey(id:ID!newKeyName:String):UserApiKey resendVerificationEmail:Void setOdysseyCourse(course:OdysseyCourseInput!):OdysseyCourse setOdysseyResponse(response:OdysseyResponseInput!):OdysseyResponse setOdysseyTask(task:OdysseyTaskInput!):OdysseyTask """Submit a zendesk ticket for this user""" submitZendeskTicket(collaborators:[String!]email:String!ticket:ZendeskTicketInput!):ZendeskTicket """Update information about a user; all arguments are optional""" update(email:String fullName:String referrer:String trackingGoogleClientId:String trackingMarketoClientId:String userSegment:UserSegment utmCampaign:String utmMedium:String utmSource:String):User """Updates this users' preference concerning opting into beta features.""" updateBetaFeaturesOn(betaFeaturesOn:Boolean!):User """Update the status of a feature for this. For example, if you want to hide an introductory popup.""" updateFeatureIntros(newFeatureIntros:FeatureIntrosInput):User updateOdysseyAttempt(completedAt:Timestamp id:ID!):OdysseyAttempt """Update user to have the given internal mdg admin role. -It is necessary to be an MDG_INTERNAL_SUPER_ADMIN to perform update. -Additionally, upserting a null value explicitly revokes this user's -admin status.""" updateRole(newRole:InternalMdgAdminRole):User user:User!verifyEmail(token:String!):User}enum UserPermission{BILLING_MANAGER CONSUMER CONTRIBUTOR DOCUMENTER GRAPH_ADMIN LEGACY_GRAPH_KEY OBSERVER ORG_ADMIN}enum UserSegment{JOIN_MY_TEAM LOCAL_DEVELOPMENT NOT_SPECIFIED PRODUCTION_GRAPHS SANDBOX SANDBOX_OPERATION_COLLECTIONS TRY_TEAM}type UserSettings{appNavCollapsed:Boolean!autoManageVariables:Boolean!id:String!mockingResponses:Boolean!preflightScriptEnabled:Boolean!responseHints:ResponseHints!tableMode:Boolean!themeName:ThemeName!}"""Explorer user settings input""" input UserSettingsInput{appNavCollapsed:Boolean autoManageVariables:Boolean mockingResponses:Boolean preflightScriptEnabled:Boolean responseHints:ResponseHints tableMode:Boolean themeName:ThemeName}enum UserType{APOLLO GITHUB SSO}type ValidateOperationsResult{validationResults:[ValidationResult!]!}type ValidationError implements Error{message:String!}enum ValidationErrorCode{DEPRECATED_FIELD INVALID_OPERATION NON_PARSEABLE_DOCUMENT}enum ValidationErrorType{FAILURE INVALID WARNING}"""Represents a single validation error, with information relating to the error -and its respective operation""" type ValidationResult{"""The validation result's error code""" code:ValidationErrorCode!"""Description of the validation error""" description:String!"""The operation related to this validation result""" operation:OperationDocument!"""The type of validation error thrown - warning, failure, or invalid.""" type:ValidationErrorType!}"""Always null""" scalar Void """Webhook notification channel""" type WebhookChannel implements Channel{id:ID!name:String!secretToken:String subscriptions:[ChannelSubscription!]!url:String!}"""PagerDuty notification channel parameters""" input WebhookChannelInput{name:String secretToken:String url:String!}type ZendeskTicket{createdAt:Timestamp!description:String!graph:Service id:Int!organization:Account priority:TicketPriority!status:TicketStatus subject:String!user:User}"""Zendesk ticket input""" input ZendeskTicketInput{description:String!graphId:String organizationId:String priority:TicketPriority!subject:String!} + For a list of _all_ variants associated with a graph, use `Graph.variants` instead. + """ + variant(name: String!): GraphVariant + """A list of the variants for this graph.""" + variants: [GraphVariant!]! + """Get a GraphQL document by hash""" + document(hash: SHA256): GraphQLDocument + """Get check workflows for this graph ordered by creation time, most recent first.""" + checkWorkflows(limit: Int! = 100, filter: CheckFilterInput): [CheckWorkflow!]! +} + +"""Provides access to mutation fields for managing Studio graphs and subgraphs.""" +type GraphMutation { + """Generates a new graph API key for this graph with the specified permission level.""" + newKey(keyName: String, role: UserPermission! = GRAPH_ADMIN): GraphApiKey! + """Deletes the existing graph API key with the provided ID, if any.""" + removeKey( + """API key ID""" + id: ID! + ): Void + """Sets a new name for the graph API key with the provided ID, if any. This does not invalidate the key or change its value.""" + renameKey(id: ID!, newKeyName: String): GraphApiKey + """Creates a contract schema from a source variant and a set of filter configurations""" + upsertContractVariant( + """The name of the contract variant, e.g. `public-api`. Once set, this value cannot be changed.""" + contractVariantName: String! + """The filter configuration used to build a contract schema. The configuration consists of lists of tags for schema elements to include or exclude in the resulting schema.""" + filterConfig: FilterConfigInput! + """Whether a launch and schema publish should be initiated after updating configuration. Defaults to `true`.""" + initiateLaunch: Boolean! = true + """The graphRef of the variant the contract will be derived from, e.g. `my-graph@production`. Once set, this value cannot be changed.""" + sourceVariant: String + ): ContractVariantUpsertResult! + """Make changes to a graph variant.""" + variant(name: String!): GraphVariantMutation + """Publish a schema to this variant, either via a document or an introspection query result.""" + uploadSchema(schema: IntrospectionSchemaInput, schemaDocument: String, tag: String!, historicParameters: HistoricQueryParameters, overrideComposedSchema: Boolean! = false, errorOnBadRequest: Boolean! = true, gitContext: GitContextInput): SchemaPublicationResult + """ + Checks a proposed schema against the schema that has been published to + a particular variant, using metrics corresponding to `historicParameters`. + Callers can set `historicParameters` directly or rely on defaults set in the + graph's check configuration (7 days by default). + If they do not set `historicParameters` but set `useMaximumRetention`, + validation will use the maximum retention the graph has access to. + """ + checkSchema( + """ + Only one of proposedSchema, proposedSchemaDocument, and proposedSchemaHash + may be specified + """ + proposedSchema: IntrospectionSchemaInput + proposedSchemaDocument: String + proposedSchemaHash: String + baseSchemaTag: String = "current" + gitContext: GitContextInput + historicParameters: HistoricQueryParameters + useMaximumRetention: Boolean + isSandboxCheck: Boolean! = false + """If this check is triggered for an sdl fetched using introspection, this is the endpoint where that schema was being served.""" + introspectionEndpoint: String + """Deprecated and ignored.""" + frontend: String + ): CheckSchemaResult! + """Publish to a subgraph. If composition is successful, this will update running routers.""" + publishSubgraph(graphVariant: String!, name: String!, url: String, revision: String!, activePartialSchema: PartialSchemaInput!, gitContext: GitContextInput): SubgraphPublicationResult + """Removes a subgraph. If composition is successful, this will update running routers.""" + removeImplementingServiceAndTriggerComposition( + graphVariant: String! + name: String! + """Do not remove the service, but recompose without it and report any errors.""" + dryRun: Boolean! = false + ): SubgraphRemovalResult! + """ + Checks a proposed subgraph schema change against a published subgraph. + If the proposal composes successfully, perform a usage check for the resulting supergraph schema. + """ + checkPartialSchema( + """The name of the graph variant to run the check against.""" + graphVariant: String! + """Name of the implementing service to validate the partial schema against""" + implementingServiceName: String! + """The partial schema to validate against an implementing service""" + partialSchema: PartialSchemaInput! + gitContext: GitContextInput + historicParameters: HistoricQueryParameters + """Deprecated and ignored.""" + frontend: String + """ + Whether to use the maximum retention for historical validation. This only takes + effect if historicParameters is null. + """ + useMaximumRetention: Boolean + isSandboxCheck: Boolean! = false + """If this check is triggered for an sdl fetched using introspection, this is the endpoint where that schema was being served.""" + introspectionEndpoint: String + ): CheckPartialSchemaResult! +} + +"""Individual permissions for the current user when interacting with a particular Studio graph.""" +type GraphRoles { + """Whether the currently authenticated user is permitted to perform schema checks (i.e., run `rover (sub)graph check`).""" + canCheckSchemas: Boolean! + """Whether the currently authenticated user is permitted to create new graph variants.""" + canCreateVariants: Boolean! + """Whether the currently authenticated user is permitted to delete the graph in question""" + canDelete: Boolean! + """Whether the currently authenticated user is permitted to manage user access to the graph in question.""" + canManageAccess: Boolean! + """Whether the currently authenticated user is permitted to manage the build configuration (e.g., build pipeline version).""" + canManageBuildConfig: Boolean! + """Whether the currently authenticated user is permitted to manage third-party integrations (e.g., Datadog forwarding).""" + canManageIntegrations: Boolean! + """Whether the currently authenticated user is permitted to manage graph-level API keys.""" + canManageKeys: Boolean! + """Whether the currently authenticated user is permitted to perform basic administration of variants (e.g., make a variant public).""" + canManageVariants: Boolean! + """Whether the currently authenticated user is permitted to view details about the build configuration (e.g. build pipeline version).""" + canQueryBuildConfig: Boolean! + """Whether the currently authenticated user is permitted to view details of the check configuration for this graph.""" + canQueryCheckConfiguration: Boolean! + """Whether the currently authenticated user is permitted to view which subgraphs the graph is composed of.""" + canQueryImplementingServices: Boolean! + """Whether the currently authenticated user is permitted to download schemas owned by this graph.""" + canQuerySchemas: Boolean! + """Whether the currently authenticated user is permitted to register operations (i.e. `apollo client:push`) for this graph.""" + canRegisterOperations: Boolean! + """Whether the currently authenticated user is permitted to make updates to the check configuration for this graph.""" + canWriteCheckConfiguration: Boolean! +} + +"""A SHA-256 hash, represented as a lowercase hexadecimal string.""" +scalar SHA256 + +"""A location in a source code file.""" +type SourceLocation { + """Column number.""" + column: Int! + """Line number.""" + line: Int! +} + +enum Status { + STABLE + NEXT + DEPRECATED +} + +"""A subgraph in a federated Studio supergraph.""" +type Subgraph { + """The subgraph schema document's SHA256 hash, represented as a hexadecimal string.""" + hash: String! + """The subgraph's registered name.""" + name: String! + """The number of fields in this subgraph""" + numberOfFields: Int + """The number of types in this subgraph""" + numberOfTypes: Int + """The subgraph's routing URL, provided to gateways that use managed federation.""" + routingURL: String! + """Timestamp of when the subgraph was published.""" + updatedAt: Timestamp +} + +"""A change made to a subgraph as part of a launch.""" +type SubgraphChange { + """The subgraph's name.""" + name: ID! + """The type of change that was made.""" + type: SubgraphChangeType! +} + +enum SubgraphChangeType { + ADDITION + DELETION + MODIFICATION +} + +"""Input type to provide when running schema checks asynchronously for a federated supergraph.""" +input SubgraphCheckAsyncInput { + """Configuration options for the check execution.""" + config: HistoricQueryParametersInput! + """The GitHub context to associate with the check.""" + gitContext: GitContextInput! + """The graph ref of the Studio graph and variant to run checks against (such as `my-graph@current`).""" + graphRef: ID + """The URL of the GraphQL endpoint that Apollo Sandbox introspected to obtain the proposed schema. Required if `isSandbox` is `true`.""" + introspectionEndpoint: String + """If `true`, the check was initiated by Apollo Sandbox.""" + isSandbox: Boolean! + """The proposed subgraph schema to perform checks with.""" + proposedSchema: GraphQLDocument! + """The name of the subgraph to check schema changes for.""" + subgraphName: String! +} + +"""ISO 8601, extended format with nanoseconds, Zulu (or "[+-]seconds" as a string or number relative to now)""" +scalar Timestamp + +"""Counts of changes.""" +type TotalChangeSummaryCounts { + """ + Number of changes that are additions. This includes adding types, adding fields to object, input + object, and interface types, adding values to enums, adding members to interfaces and unions, and + adding arguments. + """ + additions: Int! + """ + Number of changes that are removals. This includes removing types, removing fields from object, + input object, and interface types, removing values from enums, removing members from interfaces + and unions, and removing arguments. This also includes removing @deprecated usages. + """ + removals: Int! + """ + Number of changes that are edits. This includes types changing kind, fields and arguments + changing type, arguments changing default value, and any description changes. This also includes + edits to @deprecated reason strings. + """ + edits: Int! + """Number of changes that are new usages of the @deprecated directive.""" + deprecations: Int! +} + +"""Counts of changes at the type level, including interfaces, unions, enums, scalars, input objects, etc.""" +type TypeChangeSummaryCounts { + """Number of changes that are additions of types.""" + additions: Int! + """Number of changes that are removals of types.""" + removals: Int! + """ + Number of changes that are edits. This includes types changing kind and any type description + changes, but also includes adding/removing values from enums, adding/removing members from + interfaces and unions, and any enum value deprecation and description changes. + """ + edits: Int! +} + +union UpdateOperationCollectionEntryResult = OperationCollectionEntry | PermissionError | ValidationError + +union UpdateOperationCollectionResult = OperationCollection | PermissionError | ValidationError + +"""Describes the result of publishing a schema to a graph variant.""" +type SchemaPublicationResult { + """A machine-readable response code that indicates the type of result (e.g., `UPLOAD_SUCCESS` or `NO_CHANGES`)""" + code: String! + """Whether the schema publish operation succeeded (`true`) or encountered errors (`false`).""" + success: Boolean! + """A Human-readable message describing the type of result.""" + message: String! + """If the publish operation succeeded, this contains its details. Otherwise, this is null.""" + publication: SchemaPublication +} + +"""A registered Apollo Studio user.""" +type User implements Identity { + """Returns a list of all active user API keys for the user.""" + apiKeys(includeCookies: Boolean = false): [UserApiKey!]! + """Returns a representation of this user as an `Actor` type. Useful when determining which actor (usually a `User` or `Graph`) performed a particular action in Studio.""" + asActor: Actor! + """The user's unique ID.""" + id: ID! + """A list of the user's memberships in Apollo Studio organizations.""" + memberships: [UserMembership!]! + """The user's first and last name.""" + name: String! +} + +""" +Represents a user API key, which has permissions identical to +its associated Apollo user. +""" +type UserApiKey implements ApiKey { + """The API key's ID.""" + id: ID! + """The API key's name, for distinguishing it from other keys.""" + keyName: String + """The value of the API key. **This is a secret credential!**""" + token: String! +} + +"""A single user's membership in a single Apollo Studio organization.""" +type UserMembership { + """The organization that the user belongs to.""" + account: Organization! + """The timestamp when the user was added to the organization.""" + createdAt: Timestamp! + """The user's permission level within the organization.""" + permission: UserPermission! + """The user that belongs to the organization.""" + user: User! +} + +type UserMutation { + """Creates a new user API key for this user.""" + newKey(keyName: String!): UserApiKey! + """ + If this user has no active user API keys, this creates one for the user. + + If this user has at least one active user API key, this returns one of those keys at random and does _not_ create a new key. + """ + provisionKey(keyName: String! = "add-a-name"): ApiKeyProvision + """Deletes the user API key with the provided ID, if any.""" + removeKey( + """API key ID""" + id: ID! + ): Void + """Sets a new name for the user API key with the provided ID, if any. This does not invalidate the key or change its value.""" + renameKey(id: ID!, newKeyName: String): UserApiKey +} + +enum UserPermission { + BILLING_MANAGER + CONSUMER + CONTRIBUTOR + DOCUMENTER + GRAPH_ADMIN + LEGACY_GRAPH_KEY + OBSERVER + ORG_ADMIN +} + +"""An error that occurs when an operation contains invalid user input.""" +type ValidationError implements Error { + """The error's details.""" + message: String! +} + +"""Always null""" +scalar Void From db8170d30796d3492971f01d41a939117bd39ab3 Mon Sep 17 00:00:00 2001 From: Aaron Arinder Date: Tue, 7 Feb 2023 13:58:58 -0500 Subject: [PATCH 030/154] rover: adding aaronarinder as maintainer --- pkgs/development/tools/rover/default.nix | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pkgs/development/tools/rover/default.nix b/pkgs/development/tools/rover/default.nix index 3099b2e72619..91852f751662 100644 --- a/pkgs/development/tools/rover/default.nix +++ b/pkgs/development/tools/rover/default.nix @@ -58,6 +58,6 @@ rustPlatform.buildRustPackage rec { description = "A CLI for interacting with ApolloGraphQL's developer tooling, including managing self-hosted and GraphOS graphs."; homepage = "https://www.apollographql.com/docs/rover"; license = licenses.mit; - maintainers = [ maintainers.ivanbrennan ]; + maintainers = [ maintainers.ivanbrennan maintainers.aaronarinder ]; }; } From 626a0ff9fa580762fbf3cb93c914bf2f7e5a7d0a Mon Sep 17 00:00:00 2001 From: Lily Foster Date: Fri, 24 Feb 2023 15:03:53 -0500 Subject: [PATCH 031/154] gmic: fix linking against shared lib with cmake --- pkgs/tools/graphics/gmic/default.nix | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/pkgs/tools/graphics/gmic/default.nix b/pkgs/tools/graphics/gmic/default.nix index 52499196bba4..4e1812b9c1e3 100644 --- a/pkgs/tools/graphics/gmic/default.nix +++ b/pkgs/tools/graphics/gmic/default.nix @@ -1,6 +1,7 @@ { stdenv , lib , fetchFromGitHub +, fetchpatch , fetchurl , cmake , ninja @@ -14,6 +15,7 @@ , libjpeg , libtiff , libpng +, libX11 , writeShellScript , common-updater-scripts , curl @@ -37,6 +39,14 @@ stdenv.mkDerivation rec { hash = "sha256-oEH4GlSV+642TGSJJhV4yzydh1hAQZfzwaiPAZFNQtI="; }; + patches = [ + (fetchpatch { + name = "gmic-3.2.1-fix-system-gmic.patch"; + url = "https://github.com/GreycLab/gmic/commit/1fc184b40b7c31e7b100722d32cb9f7c5a79c90f.patch"; + hash = "sha256-xLlZ87QUiBrfioG7DNXf0HioxqOu6HX+57LW2FGdbLI="; + }) + ]; + # TODO: build this from source # https://github.com/dtschump/gmic/blob/b36b2428db5926af5eea5454f822f369c2d9907e/src/Makefile#L675-L729 gmic_stdlib = fetchurl { @@ -58,6 +68,7 @@ stdenv.mkDerivation rec { libjpeg libtiff libpng + libX11 opencv openexr graphicsmagick From 89bf849a27917ec745af3a79a3b58b835fddaad5 Mon Sep 17 00:00:00 2001 From: davidak Date: Sat, 25 Feb 2023 12:09:57 +0100 Subject: [PATCH 032/154] pkgsMusl: disable for i686-linux there are no bootstrap binaries for 32-bit musl libc --- pkgs/top-level/stage.nix | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/pkgs/top-level/stage.nix b/pkgs/top-level/stage.nix index e5412c409ed5..2b503bce7c04 100644 --- a/pkgs/top-level/stage.nix +++ b/pkgs/top-level/stage.nix @@ -199,8 +199,8 @@ let # All packages built with the Musl libc. This will override the # default GNU libc on Linux systems. Non-Linux systems are not - # supported. - pkgsMusl = if stdenv.hostPlatform.isLinux then nixpkgsFun { + # supported. 32-bit is also not supported. + pkgsMusl = if stdenv.hostPlatform.isLinux && stdenv.buildPlatform.is64bit then nixpkgsFun { overlays = [ (self': super': { pkgsMusl = super'; })] ++ overlays; @@ -208,7 +208,7 @@ let then "localSystem" else "crossSystem"} = { parsed = makeMuslParsedPlatform stdenv.hostPlatform.parsed; }; - } else throw "Musl libc only supports Linux systems."; + } else throw "Musl libc only supports 64-bit Linux systems."; # All packages built for i686 Linux. # Used by wine, firefox with debugging version of Flash, ... From 724569db28ffb82d8f8063162bbbd4bf4d6deb86 Mon Sep 17 00:00:00 2001 From: Vonfry Date: Sun, 26 Feb 2023 12:05:03 +0800 Subject: [PATCH 033/154] fortune: 3.14.1 -> 3.16.0 add rinutils into native build inputs to avoid missing pack in releases --- pkgs/tools/misc/fortune/default.nix | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/pkgs/tools/misc/fortune/default.nix b/pkgs/tools/misc/fortune/default.nix index e82b13d12c65..e69161541ff3 100644 --- a/pkgs/tools/misc/fortune/default.nix +++ b/pkgs/tools/misc/fortune/default.nix @@ -1,17 +1,17 @@ -{ lib, stdenv, fetchurl, cmake, recode, perl, withOffensive ? false }: +{ lib, stdenv, fetchurl, cmake, recode, perl, rinutils, withOffensive ? false }: stdenv.mkDerivation rec { pname = "fortune-mod"; - version = "3.14.1"; + version = "3.16.0"; # We use fetchurl instead of fetchFromGitHub because the release pack has some # special files. src = fetchurl { url = "https://github.com/shlomif/fortune-mod/releases/download/${pname}-${version}/${pname}-${version}.tar.xz"; - sha256 = "sha256-NnAj9dsB1ZUuTm2W8mPdK2h15Dtro8ve6c+tPoKUsXs="; + sha256 = "sha256-dkpkTBulXaN52BHaV4MWEIoQFkmWaG66O9Ppes/GLPo="; }; - nativeBuildInputs = [ cmake perl ]; + nativeBuildInputs = [ cmake perl rinutils ]; buildInputs = [ recode ]; From fd3e2b49f4a3138a1c3e2dd854578f64b9c772cd Mon Sep 17 00:00:00 2001 From: Lily Foster Date: Sun, 26 Feb 2023 07:13:09 -0500 Subject: [PATCH 034/154] gmic-qt: fix build with 3.2.1 How the build was fixed: * Fetch tarball instead since that is what upstream supports to build gmic-qt from. * Set `sourceRoot` within tarball * Fetch patch from GreycLab/gmic#435 into gmic * Fetch patch from c-koi/gmic-qt#175 into gmic-qt --- pkgs/tools/graphics/gmic-qt/default.nix | 28 +++++++++++++++---------- pkgs/tools/graphics/gmic/default.nix | 6 ++---- 2 files changed, 19 insertions(+), 15 deletions(-) diff --git a/pkgs/tools/graphics/gmic-qt/default.nix b/pkgs/tools/graphics/gmic-qt/default.nix index f9766650ec41..3d1a9aa6607a 100644 --- a/pkgs/tools/graphics/gmic-qt/default.nix +++ b/pkgs/tools/graphics/gmic-qt/default.nix @@ -1,7 +1,8 @@ { lib , mkDerivation , variant ? "standalone" -, fetchFromGitHub +, fetchzip +, fetchpatch , cmake , pkg-config , ninja @@ -16,7 +17,6 @@ , curl , gimp ? null , gmic -, cimg , qtbase , qttools , writeShellScript @@ -54,13 +54,21 @@ mkDerivation rec { pname = "gmic-qt${lib.optionalString (variant != "standalone") "-${variant}"}"; version = "3.2.1"; - src = fetchFromGitHub { - owner = "c-koi"; - repo = "gmic-qt"; - rev = "v.${version}"; - sha256 = "sha256-z+GtYLBcHVufXwdeSd8WKmPmU1+/EKMv26kNaEgyt5w="; + src = fetchzip { + url = "https://gmic.eu/files/source/gmic_${version}.tar.gz"; + hash = "sha256-2lMnn19FcFKnfIjSxOObqxIjqLMUoWgi0ADZBCBePY4="; }; + patches = [ + (fetchpatch { + name = "gmic-qt-3.2.1-fix-system-gmic.patch"; + url = "https://github.com/c-koi/gmic-qt/commit/e8d7a3523753ff592da63b1d54edf0921c54fe53.patch"; + hash = "sha256-kBFZo2qvod4pH3oK8gvnmw39x6eMH9zjr4mMcY74mFo="; + }) + ]; + + sourceRoot = "source/gmic-qt"; + nativeBuildInputs = [ cmake pkg-config @@ -69,7 +77,6 @@ mkDerivation rec { buildInputs = [ gmic - cimg qtbase qttools fftw @@ -85,7 +92,8 @@ mkDerivation rec { cmakeFlags = [ "-DGMIC_QT_HOST=${if variant == "standalone" then "none" else variant}" - "-DENABLE_SYSTEM_GMIC:BOOL=ON" + "-DENABLE_SYSTEM_GMIC=ON" + "-DENABLE_DYNAMIC_LINKING=ON" ]; postPatch = '' @@ -108,8 +116,6 @@ mkDerivation rec { }; meta = with lib; { - # Broken since 3.2.0 update, cannot handle system gmic and cimg. - broken = true; description = variants.${variant}.description; homepage = "http://gmic.eu/"; license = licenses.gpl3Plus; diff --git a/pkgs/tools/graphics/gmic/default.nix b/pkgs/tools/graphics/gmic/default.nix index 4e1812b9c1e3..68d07ed734b6 100644 --- a/pkgs/tools/graphics/gmic/default.nix +++ b/pkgs/tools/graphics/gmic/default.nix @@ -15,7 +15,6 @@ , libjpeg , libtiff , libpng -, libX11 , writeShellScript , common-updater-scripts , curl @@ -42,8 +41,8 @@ stdenv.mkDerivation rec { patches = [ (fetchpatch { name = "gmic-3.2.1-fix-system-gmic.patch"; - url = "https://github.com/GreycLab/gmic/commit/1fc184b40b7c31e7b100722d32cb9f7c5a79c90f.patch"; - hash = "sha256-xLlZ87QUiBrfioG7DNXf0HioxqOu6HX+57LW2FGdbLI="; + url = "https://github.com/GreycLab/gmic/commit/9db3f6a39d9ed67b4279654da88993a8057575ff.patch"; + hash = "sha256-JznKCs56t6cJ4HLqlhMZjSOupEB8cdkn3j6RgZpcpzo="; }) ]; @@ -68,7 +67,6 @@ stdenv.mkDerivation rec { libjpeg libtiff libpng - libX11 opencv openexr graphicsmagick From 59462da91769ff9d089a753a26ba77387349441d Mon Sep 17 00:00:00 2001 From: Nick Bathum Date: Fri, 24 Feb 2023 10:36:31 -0500 Subject: [PATCH 035/154] nixos/lib/make-disk-image: expose memSize parameter Generating large disk images can fail from out-of-memory in vm-run. --- doc/builders/images/makediskimage.section.md | 1 + nixos/lib/make-disk-image.nix | 5 ++++- 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/doc/builders/images/makediskimage.section.md b/doc/builders/images/makediskimage.section.md index 833a6461e57b..c4566e753108 100644 --- a/doc/builders/images/makediskimage.section.md +++ b/doc/builders/images/makediskimage.section.md @@ -101,6 +101,7 @@ in diskSize = "auto"; additionalSpace = "0M"; # Defaults to 512M. copyChannel = false; + memSize = 2048; # Qemu VM memory size in megabytes. Defaults to 1024M. } ``` diff --git a/nixos/lib/make-disk-image.nix b/nixos/lib/make-disk-image.nix index 365fc1f03a5b..d641d1289fe4 100644 --- a/nixos/lib/make-disk-image.nix +++ b/nixos/lib/make-disk-image.nix @@ -154,6 +154,9 @@ To solve this, you can run `fdisk -l $image` and generate `dd if=$image of=$imag , # Shell code executed after the VM has finished. postVM ? "" +, # Guest memory size + memSize ? 1024 + , # Copy the contents of the Nix store to the root of the image and # skip further setup. Incompatible with `contents`, # `installBootLoader` and `configFile`. @@ -525,7 +528,7 @@ let format' = format; in let "-drive if=pflash,format=raw,unit=1,file=$efiVars" ] ); - memSize = 1024; + inherit memSize; } '' export PATH=${binPath}:$PATH From 519958c94bbe5dd9a9b34f529e0a8e56afc40df1 Mon Sep 17 00:00:00 2001 From: Nick Bathum Date: Sun, 26 Feb 2023 20:17:27 -0500 Subject: [PATCH 036/154] nixos/lib/make-multi-disk-zfs-image: expose memSize parameter closes #178095 --- nixos/lib/make-multi-disk-zfs-image.nix | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/nixos/lib/make-multi-disk-zfs-image.nix b/nixos/lib/make-multi-disk-zfs-image.nix index f9046a485a7d..ecbde44971a9 100644 --- a/nixos/lib/make-multi-disk-zfs-image.nix +++ b/nixos/lib/make-multi-disk-zfs-image.nix @@ -73,6 +73,9 @@ , # Shell code executed after the VM has finished. postVM ? "" +, # Guest memory size + memSize ? 1024 + , name ? "nixos-disk-image" , # Disk image format, one of qcow2, qcow2-compressed, vdi, vpc, raw. @@ -242,6 +245,7 @@ let { QEMU_OPTS = "-drive file=$bootDiskImage,if=virtio,cache=unsafe,werror=report" + " -drive file=$rootDiskImage,if=virtio,cache=unsafe,werror=report"; + inherit memSize; preVM = '' PATH=$PATH:${pkgs.qemu_kvm}/bin mkdir $out From 4131726270412e87ea7666c5a6a175d7fa0bbd62 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Robert=20Sch=C3=BCtz?= Date: Mon, 27 Feb 2023 13:48:51 -0800 Subject: [PATCH 037/154] deltachat-desktop: 1.34.4 -> 1.34.5 Diff: https://github.com/deltachat/deltachat-desktop/compare/v1.34.4...v1.34.5 Changelog: https://github.com/deltachat/deltachat-desktop/blob/v1.34.5/CHANGELOG.md --- .../instant-messengers/deltachat-desktop/default.nix | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/pkgs/applications/networking/instant-messengers/deltachat-desktop/default.nix b/pkgs/applications/networking/instant-messengers/deltachat-desktop/default.nix index f4eb47428b5b..b7280468b754 100644 --- a/pkgs/applications/networking/instant-messengers/deltachat-desktop/default.nix +++ b/pkgs/applications/networking/instant-messengers/deltachat-desktop/default.nix @@ -21,17 +21,17 @@ let libdeltachat' = libdeltachat.overrideAttrs (old: rec { - version = "1.107.1"; + version = "1.110.0"; src = fetchFromGitHub { owner = "deltachat"; repo = "deltachat-core-rust"; rev = version; - hash = "sha256-ISAUZyFrp86ILtRrlowceBQNJ7+tbJReIAe6+u4wwQI="; + hash = "sha256-SPBuStrBp9fnrLfFT2ec9yYItZsvQF9BHdJxi+plbgw="; }; cargoDeps = rustPlatform.fetchCargoTarball { inherit src; name = "${old.pname}-${version}"; - hash = "sha256-B4BMxiI3GhsjeD3gYrq5ZpbZ7l77ycrIMWu2sUzZiz4="; + hash = "sha256-Y4+CkaV9njHqmmiZnDtfZ5OwMVk583FtncxOgAqACkA="; }; }); esbuild' = esbuild.override { @@ -48,16 +48,16 @@ let }; in buildNpmPackage rec { pname = "deltachat-desktop"; - version = "1.34.4"; + version = "1.34.5"; src = fetchFromGitHub { owner = "deltachat"; repo = "deltachat-desktop"; rev = "v${version}"; - hash = "sha256-LV8/r6psUZuCEGbaH1nWlrkeNbEYG8R5O1aCxECPH1E="; + hash = "sha256-gNcYcxyztUrcxbOO7kaTSCyxqdykjp7Esm3jPJ/d4gc="; }; - npmDepsHash = "sha256-rdZVvsyCo/6C4+gjytCCn9Qcl+chc6U+6orkcM59I8U="; + npmDepsHash = "sha256-I0PhE+GXFgOdvH5aLZRyn3lVmXgATX2kmltXYC9chog="; nativeBuildInputs = [ makeWrapper From cf4927e005ef245cabf69f6454b71b89fb83289d Mon Sep 17 00:00:00 2001 From: "R. Ryantm" Date: Wed, 1 Mar 2023 01:28:22 +0000 Subject: [PATCH 038/154] cargo-tarpaulin: 0.25.0 -> 0.25.1 --- pkgs/development/tools/analysis/cargo-tarpaulin/default.nix | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/pkgs/development/tools/analysis/cargo-tarpaulin/default.nix b/pkgs/development/tools/analysis/cargo-tarpaulin/default.nix index 499a42429230..ea50c29fcd53 100644 --- a/pkgs/development/tools/analysis/cargo-tarpaulin/default.nix +++ b/pkgs/development/tools/analysis/cargo-tarpaulin/default.nix @@ -2,13 +2,13 @@ rustPlatform.buildRustPackage rec { pname = "cargo-tarpaulin"; - version = "0.25.0"; + version = "0.25.1"; src = fetchFromGitHub { owner = "xd009642"; repo = "tarpaulin"; rev = version; - sha256 = "sha256-9duL16AuwG3lBMq1hUAXbNrvoBF6SASCiakmT42LQ/E="; + sha256 = "sha256-JTkVNy2wqPIQ5mVcptI10a3Ghhdygnm9dmwUmiDqYjE="; }; nativeBuildInputs = [ @@ -17,7 +17,7 @@ rustPlatform.buildRustPackage rec { buildInputs = [ openssl ] ++ lib.optionals stdenv.isDarwin [ curl Security ]; - cargoHash = "sha256-MXnE3Fq/jzWHvmO2i8cWixRKRuwVbUU/OmBj1SUkEiY="; + cargoHash = "sha256-t4L3HSOGk/lAL8mOaVl/pm3kE0CVVzsYpyu0V6zeIFQ="; #checkFlags = [ "--test-threads" "1" ]; doCheck = false; From e938616a7ec61e0c116be18ed63fe9d7e6b7cf82 Mon Sep 17 00:00:00 2001 From: Anderson Torres Date: Wed, 1 Mar 2023 00:19:46 -0300 Subject: [PATCH 039/154] yapesdl: 0.70.2 -> 0.71.2 --- .../emulators/yapesdl/default.nix | 26 +++++++++---------- 1 file changed, 13 insertions(+), 13 deletions(-) diff --git a/pkgs/applications/emulators/yapesdl/default.nix b/pkgs/applications/emulators/yapesdl/default.nix index 5f4b7771fb5f..06176e3f525f 100644 --- a/pkgs/applications/emulators/yapesdl/default.nix +++ b/pkgs/applications/emulators/yapesdl/default.nix @@ -5,20 +5,21 @@ , SDL2 }: -stdenv.mkDerivation rec { +stdenv.mkDerivation (self: { pname = "yapesdl"; - version = "0.70.2"; + version = "0.71.2"; src = fetchFromGitHub { owner = "calmopyrin"; - repo = pname; - rev = "v${version}"; - hash = "sha256-51P6wNaSfVA3twu+yRUKXguEmVBvuuEnHxH1Zl1vsCc="; + repo = "yapesdl"; + rev = "v${self.version}"; + hash = "sha256-QGF3aS/YSzdGxHONKyA/iTewEVYsjBAsKARVMXkFV2k="; }; nativeBuildInputs = [ pkg-config ]; + buildInputs = [ SDL2 ]; @@ -27,17 +28,16 @@ stdenv.mkDerivation rec { installPhase = '' runHook preInstall - install --directory $out/bin $out/share/doc/$pname - install yapesdl $out/bin/ - install README.SDL $out/share/doc/$pname/ + install -Dm755 yapesdl -t $out/bin/ + install -Dm755 README.SDL -t $out/share/doc/yapesdl/ runHook postInstall ''; - meta = with lib; { + meta = { homepage = "http://yape.plus4.net/"; description = "Multiplatform Commodore 64 and 264 family emulator"; - license = licenses.gpl2Plus; - maintainers = with maintainers; [ AndersonTorres ]; - platforms = platforms.unix; + license = lib.licenses.gpl2Plus; + maintainers = with lib.maintainers; [ AndersonTorres ]; + platforms = lib.platforms.unix; }; -} +}) From b70230f026d1219c20773703a780a5b2a11da8c5 Mon Sep 17 00:00:00 2001 From: Mario Rodas Date: Wed, 1 Mar 2023 04:20:00 +0000 Subject: [PATCH 040/154] redis: 7.0.8 -> 7.0.9 https://github.com/redis/redis/releases/tag/7.0.9 --- pkgs/servers/nosql/redis/default.nix | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pkgs/servers/nosql/redis/default.nix b/pkgs/servers/nosql/redis/default.nix index 0f788dd9d387..85e9fe5022e5 100644 --- a/pkgs/servers/nosql/redis/default.nix +++ b/pkgs/servers/nosql/redis/default.nix @@ -7,11 +7,11 @@ stdenv.mkDerivation rec { pname = "redis"; - version = "7.0.8"; + version = "7.0.9"; src = fetchurl { url = "https://download.redis.io/releases/${pname}-${version}.tar.gz"; - hash = "sha256-BqM55JEwZ4Pc9VuX8VpdvL3AHMvebcIwJ8R1yrc16RQ="; + hash = "sha256-93E1wqR8kVHUAov+o7NEcKtNMk0UhPeahMbzKjz7n2U="; }; nativeBuildInputs = [ pkg-config ]; From fd40fce2c393dc38417817fffcc73e17708c4099 Mon Sep 17 00:00:00 2001 From: Sebastian Neubauer Date: Wed, 1 Mar 2023 14:43:14 +0100 Subject: [PATCH 041/154] rgp: 1.14 -> 1.14.1 --- pkgs/development/tools/rgp/default.nix | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/pkgs/development/tools/rgp/default.nix b/pkgs/development/tools/rgp/default.nix index 2e5400c1cdce..6b80dae38a55 100644 --- a/pkgs/development/tools/rgp/default.nix +++ b/pkgs/development/tools/rgp/default.nix @@ -19,15 +19,15 @@ }: let - buildNum = "2022-12-12-1037"; + buildNum = "2023-02-15-1051"; in -stdenv.mkDerivation rec { +stdenv.mkDerivation { pname = "rgp"; - version = "1.14"; + version = "1.14.1"; src = fetchurl { url = "https://gpuopen.com/download/radeon-developer-tool-suite/RadeonDeveloperToolSuite-${buildNum}.tgz"; - hash = "sha256-T13SOy+77lLxmlcczXEFZAnyx9Lm52G/WiCcC1Py4HA="; + hash = "sha256-1JxW6vXfOYDaCnHWEq8crjuu0QrUCwahm+ipOKVDQPA="; }; nativeBuildInputs = [ makeWrapper autoPatchelfHook ]; From 7317a23ece1308d251c48a72a3acaa97f53e82ec Mon Sep 17 00:00:00 2001 From: Doron Behar Date: Wed, 4 Jan 2023 18:42:00 +0200 Subject: [PATCH 042/154] syncthingtray: 1.3.1 -> 1.3.2 Diff: https://github.com/Martchus/syncthingtray/compare/v1.3.1...v1.3.2 Add `autostartExecPath` option. --- .../misc/syncthingtray/default.nix | 22 ++++++++----------- .../use-nix-path-in-autostart.patch | 13 ----------- 2 files changed, 9 insertions(+), 26 deletions(-) delete mode 100644 pkgs/applications/misc/syncthingtray/use-nix-path-in-autostart.patch diff --git a/pkgs/applications/misc/syncthingtray/default.nix b/pkgs/applications/misc/syncthingtray/default.nix index 5d583181ce65..99404d38efee 100644 --- a/pkgs/applications/misc/syncthingtray/default.nix +++ b/pkgs/applications/misc/syncthingtray/default.nix @@ -19,30 +19,25 @@ , kioPluginSupport ? true , plasmoidSupport ? true , systemdSupport ? true +/* It is possible to set via this option an absolute exec path that will be +written to the `~/.config/autostart/syncthingtray.desktop` file generated +during runtime. Alternatively, one can edit the desktop file themselves after +it is generated See: +https://github.com/NixOS/nixpkgs/issues/199596#issuecomment-1310136382 */ +, autostartExecPath ? "syncthingtray" }: mkDerivation rec { - version = "1.3.1"; + version = "1.3.2"; pname = "syncthingtray"; src = fetchFromGitHub { owner = "Martchus"; repo = "syncthingtray"; rev = "v${version}"; - sha256 = "sha256-0rmfDkPvgubVqfbIOZ+mnv/x1p2sb88zGeg/Q2JCy3I="; + sha256 = "sha256-zLZw6ltdgO66dvKdLXhr/a6r8UhbSAx06jXrgMARHyw="; }; - patches = [ - # Fix Exec= path in runtime-generated - # ~/.config/autostart/syncthingtray.desktop file - this is required because - # we are wrapping the executable. We can't use `substituteAll` because we - # can't use `${placeholder "out"}` because that will produce the $out of - # the patch derivation itself, and not of syncthing's "out" placeholder. - # Hence we use a C definition with NIX_CFLAGS_COMPILE - ./use-nix-path-in-autostart.patch - ]; - env.NIX_CFLAGS_COMPILE = "-DEXEC_NIX_PATH=\"${placeholder "out"}/bin/syncthingtray\""; - buildInputs = [ qtbase cpp-utilities @@ -70,6 +65,7 @@ mkDerivation rec { ''; cmakeFlags = [ + "-DAUTOSTART_EXEC_PATH=${autostartExecPath}" # See https://github.com/Martchus/syncthingtray/issues/42 "-DQT_PLUGIN_DIR:STRING=${placeholder "out"}/lib/qt-5" ] ++ lib.optionals (!plasmoidSupport) ["-DNO_PLASMOID=ON"] diff --git a/pkgs/applications/misc/syncthingtray/use-nix-path-in-autostart.patch b/pkgs/applications/misc/syncthingtray/use-nix-path-in-autostart.patch deleted file mode 100644 index a0907496ff9a..000000000000 --- a/pkgs/applications/misc/syncthingtray/use-nix-path-in-autostart.patch +++ /dev/null @@ -1,13 +0,0 @@ -diff --git i/widgets/settings/settingsdialog.cpp w/widgets/settings/settingsdialog.cpp -index 4deff1f..16845b5 100644 ---- i/widgets/settings/settingsdialog.cpp -+++ w/widgets/settings/settingsdialog.cpp -@@ -802,7 +802,7 @@ bool setAutostartEnabled(bool enabled) - desktopFile.write("[Desktop Entry]\n" - "Name=" APP_NAME "\n" - "Exec=\""); -- desktopFile.write(qEnvironmentVariable("APPIMAGE", QCoreApplication::applicationFilePath()).toUtf8().data()); -+ desktopFile.write(qEnvironmentVariable("APPIMAGE", EXEC_NIX_PATH).toUtf8().data()); - desktopFile.write("\" qt-widgets-gui --single-instance\nComment=" APP_DESCRIPTION "\n" - "Icon=" PROJECT_NAME "\n" - "Type=Application\n" From d5d0ec7f55a7c5ec2d6ac9ed265e2cc38907f642 Mon Sep 17 00:00:00 2001 From: "R. Ryantm" Date: Thu, 2 Mar 2023 12:09:57 +0000 Subject: [PATCH 043/154] vault: 1.12.3 -> 1.13.0 --- pkgs/tools/security/vault/default.nix | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/pkgs/tools/security/vault/default.nix b/pkgs/tools/security/vault/default.nix index 5c2a2e9b0d89..266ed3f5c883 100644 --- a/pkgs/tools/security/vault/default.nix +++ b/pkgs/tools/security/vault/default.nix @@ -6,16 +6,16 @@ buildGoModule rec { pname = "vault"; - version = "1.12.3"; + version = "1.13.0"; src = fetchFromGitHub { owner = "hashicorp"; repo = "vault"; rev = "v${version}"; - sha256 = "sha256-ZNk9bmZwD1aUY3fYT5Qngoq+9qXgvH/nWSWc30st7nE="; + sha256 = "sha256-F9Ki+3jMkJ+CI2yQmrnqT98xJqSSKQTtYHxQTYdfNbQ="; }; - vendorHash = "sha256-sPpTB3N1w0JppHcwdyLYwSxjzzUAJcBJ5zJ2u4rXXkQ="; + vendorHash = "sha256-Ny4TTa67x/mwTclZrtPoWU6nHu5q4KafP1s4rvk21Hs="; subPackages = [ "." ]; From 2e719bcde5056e7db066da394ebf89810fab28ae Mon Sep 17 00:00:00 2001 From: Florian Brandes Date: Thu, 2 Mar 2023 13:34:45 +0100 Subject: [PATCH 044/154] pgadmin4: 6.19 -> 6.20 Changelog: https://www.pgadmin.org/docs/pgadmin4/latest/release_notes_6_20.html Signed-off-by: Florian Brandes --- pkgs/tools/admin/pgadmin/default.nix | 4 ++-- pkgs/tools/admin/pgadmin/package.json | 5 ++++- pkgs/tools/admin/pgadmin/yarn.lock | 23 ++++++++++++++++--- pkgs/tools/admin/pgadmin/yarn.nix | 32 +++++++++++++++++++++++---- 4 files changed, 54 insertions(+), 10 deletions(-) diff --git a/pkgs/tools/admin/pgadmin/default.nix b/pkgs/tools/admin/pgadmin/default.nix index a83d96779de6..2025b4290dca 100644 --- a/pkgs/tools/admin/pgadmin/default.nix +++ b/pkgs/tools/admin/pgadmin/default.nix @@ -14,11 +14,11 @@ let pname = "pgadmin"; - version = "6.19"; + version = "6.20"; src = fetchurl { url = "https://ftp.postgresql.org/pub/pgadmin/pgadmin4/v${version}/source/pgadmin4-${version}.tar.gz"; - sha256 = "sha256-xHvdqVpNU9ZzTA6Xl2Bv044l6Tbvf4fjqyz4TmS9gmI="; + sha256 = "sha256-6aQvg98LymZGAgAcNX5Xhw/aRdE5h4HOCPS+kQnkstU="; }; yarnDeps = mkYarnModules { diff --git a/pkgs/tools/admin/pgadmin/package.json b/pkgs/tools/admin/pgadmin/package.json index 462d82e2134d..f43473b081cd 100644 --- a/pkgs/tools/admin/pgadmin/package.json +++ b/pkgs/tools/admin/pgadmin/package.json @@ -150,6 +150,7 @@ "react-draggable": "^4.4.4", "react-dropzone": "^14.2.1", "react-leaflet": "^3.2.2", + "react-resize-detector": "^8.0.3", "react-rnd": "^10.3.5", "react-router-dom": "^6.2.2", "react-select": "^4.2.1", @@ -164,6 +165,8 @@ "styled-components": "^5.2.1", "tempusdominus-bootstrap-4": "^5.1.2", "tempusdominus-core": "^5.19.3", + "uplot": "^1.6.24", + "uplot-react": "^1.1.4", "valid-filename": "^2.0.1", "webcabin-docker": "git+https://github.com/pgadmin-org/wcdocker/#3df8aac825ee2892f4d824de273b779cc6dbcad8", "wkx": "^0.5.0", @@ -188,7 +191,7 @@ "pep8": "pycodestyle --config=../.pycodestyle ../docs && pycodestyle --config=../.pycodestyle ../pkg && pycodestyle --config=../.pycodestyle ../tools && pycodestyle --config=../.pycodestyle ../web", "auditjs-html": "yarn audit --json | yarn run yarn-audit-html --output ../auditjs.html", "auditjs": "yarn audit --groups dependencies", - "auditpy": "safety check --full-report -i 40493 -i 51668", + "auditpy": "safety check --full-report -i 51668 -i 52495", "audit": "yarn run auditjs && yarn run auditpy" } } diff --git a/pkgs/tools/admin/pgadmin/yarn.lock b/pkgs/tools/admin/pgadmin/yarn.lock index df20a984a8f7..f506196c2040 100644 --- a/pkgs/tools/admin/pgadmin/yarn.lock +++ b/pkgs/tools/admin/pgadmin/yarn.lock @@ -8609,6 +8609,13 @@ react-property@2.0.0: resolved "https://registry.yarnpkg.com/react-property/-/react-property-2.0.0.tgz#2156ba9d85fa4741faf1918b38efc1eae3c6a136" integrity sha512-kzmNjIgU32mO4mmH5+iUyrqlpFQhF8K2k7eZ4fdLSOPFrD1XgEuSBv9LDEgxRXTMBqMd8ppT0x6TIzqE5pdGdw== +react-resize-detector@^8.0.3: + version "8.0.3" + resolved "https://registry.yarnpkg.com/react-resize-detector/-/react-resize-detector-8.0.3.tgz#dab4470aae23bb07deb857230ccf945d000ef99b" + integrity sha512-c3eqm5BVcluVhxHsBQnhyPO/5uYB3XHIHz6D1ZOHzU2WcnZF0Cr3KLl5OIozRC2RSsdQlu5vn1PHEqrvKRnIYA== + dependencies: + lodash "^4.17.21" + react-rnd@^10.3.5: version "10.3.7" resolved "https://registry.yarnpkg.com/react-rnd/-/react-rnd-10.3.7.tgz#037ce277e6c5e682989b51278e44a6ba299990af" @@ -10084,9 +10091,9 @@ typescript@^3.2.2: integrity sha512-w6fIxVE/H1PkLKcCPsFqKE7Kv7QUwhU8qQY2MueZXWx5cPZdwFupLgKK3vntcK98BtNHZtAF4LA/yl2a7k8R6Q== ua-parser-js@^0.7.30: - version "0.7.32" - resolved "https://registry.yarnpkg.com/ua-parser-js/-/ua-parser-js-0.7.32.tgz#cd8c639cdca949e30fa68c44b7813ef13e36d211" - integrity sha512-f9BESNVhzlhEFf2CHMSj40NWOjYPl1YKYbrvIr/hFTDEmLq7SRbWvm7FcdcpCYT95zrOhC7gZSxjdnnTpBcwVw== + version "0.7.33" + resolved "https://registry.yarnpkg.com/ua-parser-js/-/ua-parser-js-0.7.33.tgz#1d04acb4ccef9293df6f70f2c3d22f3030d8b532" + integrity sha512-s8ax/CeZdK9R/56Sui0WM6y9OFREJarMRHqLB2EwkovemBxNQ+Bqu8GAsUnVcXKgphb++ghr/B2BZx4mahujPw== uglify-js@^3.1.4: version "3.17.4" @@ -10192,6 +10199,16 @@ update-browserslist-db@^1.0.9: escalade "^3.1.1" picocolors "^1.0.0" +uplot-react@^1.1.4: + version "1.1.4" + resolved "https://registry.yarnpkg.com/uplot-react/-/uplot-react-1.1.4.tgz#02b9918a199da9983fc0d375fb44e443749e2ac0" + integrity sha512-qO1UkQwjVKdj5vTm3O3yldvu1T6hwY4++rH4KznLhjqpnLdncq1zsRxq/zQz/HUHPVD0j7WBcEISbNM61JsuAQ== + +uplot@^1.6.24: + version "1.6.24" + resolved "https://registry.yarnpkg.com/uplot/-/uplot-1.6.24.tgz#dfa213fa7da92763261920ea972ed1a5f9f6af12" + integrity sha512-WpH2BsrFrqxkMu+4XBvc0eCDsRBhzoq9crttYeSI0bfxpzR5YoSVzZXOKFVWcVC7sp/aDXrdDPbDZGCtck2PVg== + uri-js@^4.2.2: version "4.4.1" resolved "https://registry.yarnpkg.com/uri-js/-/uri-js-4.4.1.tgz#9b1a52595225859e55f669d928f88c6c57f2a77e" diff --git a/pkgs/tools/admin/pgadmin/yarn.nix b/pkgs/tools/admin/pgadmin/yarn.nix index d4c5e2c7b1ce..0dc8367c7ca2 100644 --- a/pkgs/tools/admin/pgadmin/yarn.nix +++ b/pkgs/tools/admin/pgadmin/yarn.nix @@ -9223,6 +9223,14 @@ sha512 = "kzmNjIgU32mO4mmH5+iUyrqlpFQhF8K2k7eZ4fdLSOPFrD1XgEuSBv9LDEgxRXTMBqMd8ppT0x6TIzqE5pdGdw=="; }; } + { + name = "react_resize_detector___react_resize_detector_8.0.3.tgz"; + path = fetchurl { + name = "react_resize_detector___react_resize_detector_8.0.3.tgz"; + url = "https://registry.yarnpkg.com/react-resize-detector/-/react-resize-detector-8.0.3.tgz"; + sha512 = "c3eqm5BVcluVhxHsBQnhyPO/5uYB3XHIHz6D1ZOHzU2WcnZF0Cr3KLl5OIozRC2RSsdQlu5vn1PHEqrvKRnIYA=="; + }; + } { name = "react_rnd___react_rnd_10.3.7.tgz"; path = fetchurl { @@ -10912,11 +10920,11 @@ }; } { - name = "ua_parser_js___ua_parser_js_0.7.32.tgz"; + name = "ua_parser_js___ua_parser_js_0.7.33.tgz"; path = fetchurl { - name = "ua_parser_js___ua_parser_js_0.7.32.tgz"; - url = "https://registry.yarnpkg.com/ua-parser-js/-/ua-parser-js-0.7.32.tgz"; - sha512 = "f9BESNVhzlhEFf2CHMSj40NWOjYPl1YKYbrvIr/hFTDEmLq7SRbWvm7FcdcpCYT95zrOhC7gZSxjdnnTpBcwVw=="; + name = "ua_parser_js___ua_parser_js_0.7.33.tgz"; + url = "https://registry.yarnpkg.com/ua-parser-js/-/ua-parser-js-0.7.33.tgz"; + sha512 = "s8ax/CeZdK9R/56Sui0WM6y9OFREJarMRHqLB2EwkovemBxNQ+Bqu8GAsUnVcXKgphb++ghr/B2BZx4mahujPw=="; }; } { @@ -11047,6 +11055,22 @@ sha512 = "OztqDenkfFkbSG+tRxBeAnCVPckDBcvibKd35yDONx6OU8N7sqgwc7rCbkJ/WcYtVRZ4ba68d6byhC21GFh7sQ=="; }; } + { + name = "uplot_react___uplot_react_1.1.4.tgz"; + path = fetchurl { + name = "uplot_react___uplot_react_1.1.4.tgz"; + url = "https://registry.yarnpkg.com/uplot-react/-/uplot-react-1.1.4.tgz"; + sha512 = "qO1UkQwjVKdj5vTm3O3yldvu1T6hwY4++rH4KznLhjqpnLdncq1zsRxq/zQz/HUHPVD0j7WBcEISbNM61JsuAQ=="; + }; + } + { + name = "uplot___uplot_1.6.24.tgz"; + path = fetchurl { + name = "uplot___uplot_1.6.24.tgz"; + url = "https://registry.yarnpkg.com/uplot/-/uplot-1.6.24.tgz"; + sha512 = "WpH2BsrFrqxkMu+4XBvc0eCDsRBhzoq9crttYeSI0bfxpzR5YoSVzZXOKFVWcVC7sp/aDXrdDPbDZGCtck2PVg=="; + }; + } { name = "uri_js___uri_js_4.4.1.tgz"; path = fetchurl { From c3d9d9a38f413d7758ede2eb39df6226324496ca Mon Sep 17 00:00:00 2001 From: Doron Behar Date: Thu, 2 Mar 2023 15:27:41 +0200 Subject: [PATCH 045/154] qrupdate: 1.1.2 -> 1.1.5 Switch to a maintained fork. --- .../libraries/qrupdate/default.nix | 62 +++++++++---------- 1 file changed, 29 insertions(+), 33 deletions(-) diff --git a/pkgs/development/libraries/qrupdate/default.nix b/pkgs/development/libraries/qrupdate/default.nix index d3d821ba1db1..bc4263928078 100644 --- a/pkgs/development/libraries/qrupdate/default.nix +++ b/pkgs/development/libraries/qrupdate/default.nix @@ -1,54 +1,50 @@ { stdenv , lib -, fetchurl +, fetchFromGitHub , gfortran , blas +, cmake , lapack , which }: stdenv.mkDerivation rec { pname = "qrupdate"; - version = "1.1.2"; - src = fetchurl { - url = "mirror://sourceforge/qrupdate/${pname}-${version}.tar.gz"; - sha256 = "024f601685phcm1pg8lhif3lpy5j9j0k6n0r46743g4fvh8wg8g2"; + version = "1.1.5"; + + src = fetchFromGitHub { + owner = "mpimd-csc"; + repo = "qrupdate-ng"; + rev = "v${version}"; + hash = "sha256-dHxLPrN00wwozagY2JyfZkD3sKUD2+BcnbjNgZepzFg="; }; - preBuild = - # Check that blas and lapack are compatible - assert (blas.isILP64 == lapack.isILP64); - # We don't have structuredAttrs yet implemented, and we need to use space - # seprated values in makeFlags, so only this works. - '' - makeFlagsArray+=( - "LAPACK=-L${lapack}/lib -llapack" - "BLAS=-L${blas}/lib -lblas" - "PREFIX=${placeholder "out"}" - "FFLAGS=${toString ([ - "-std=legacy" - ] ++ lib.optionals blas.isILP64 [ - # If another application intends to use qrupdate compiled with blas with - # 64 bit support, it should add this to it's FFLAGS as well. See (e.g): - # https://savannah.gnu.org/bugs/?50339 - "-fdefault-integer-8" - ])}" - ) - ''; + cmakeFlags = assert (blas.isILP64 == lapack.isILP64); [ + "-DCMAKE_Fortran_FLAGS=${toString ([ + "-std=legacy" + ] ++ lib.optionals blas.isILP64 [ + # If another application intends to use qrupdate compiled with blas with + # 64 bit support, it should add this to it's FFLAGS as well. See (e.g): + # https://savannah.gnu.org/bugs/?50339 + "-fdefault-integer-8" + ])}" + ]; doCheck = true; - checkTarget = "test"; - - buildFlags = [ "lib" "solib" ]; - - installTargets = lib.optionals stdenv.isDarwin [ "install-staticlib" "install-shlib" ]; - - nativeBuildInputs = [ which gfortran ]; + nativeBuildInputs = [ + cmake + which + gfortran + ]; + buildInputs = [ + blas + lapack + ]; meta = with lib; { description = "Library for fast updating of qr and cholesky decompositions"; - homepage = "https://sourceforge.net/projects/qrupdate/"; + homepage = "https://github.com/mpimd-csc/qrupdate-ng"; license = licenses.gpl3Plus; maintainers = with maintainers; [ doronbehar ]; platforms = platforms.unix; From 4a114efec9a10564372482ff6ae4ee3169ec98a6 Mon Sep 17 00:00:00 2001 From: "John D. Boy" <2187261+jboynyc@users.noreply.github.com> Date: Mon, 27 Feb 2023 20:46:17 +0100 Subject: [PATCH 046/154] python3Packages.textnets: init at 0.8.7 --- .../python-modules/textnets/default.nix | 72 +++++++++++++++++++ pkgs/top-level/python-packages.nix | 4 ++ 2 files changed, 76 insertions(+) create mode 100644 pkgs/development/python-modules/textnets/default.nix diff --git a/pkgs/development/python-modules/textnets/default.nix b/pkgs/development/python-modules/textnets/default.nix new file mode 100644 index 000000000000..2de03cdec9d1 --- /dev/null +++ b/pkgs/development/python-modules/textnets/default.nix @@ -0,0 +1,72 @@ +{ lib +, buildPythonPackage +, cairocffi +, cython +, fetchFromGitHub +, igraph +, leidenalg +, pandas +, poetry-core +, pytestCheckHook +, pythonOlder +, scipy +, setuptools +, spacy +, en_core_web_sm +, toolz +, tqdm +, wasabi +}: + +buildPythonPackage rec { + pname = "textnets"; + version = "0.8.7"; + format = "pyproject"; + + disabled = pythonOlder "3.8"; + + src = fetchFromGitHub { + owner = "jboynyc"; + repo = pname; + rev = "v${version}"; + hash = "sha256-BBndY+3leJBxiImuyRL7gMD5eocE4i96+97I9hDEwec="; + }; + + nativeBuildInputs = [ + cython + poetry-core + setuptools + ]; + + propagatedBuildInputs = [ + cairocffi + igraph + leidenalg + pandas + scipy + spacy + toolz + tqdm + wasabi + ]; + + # Deselect test of experimental feature that fails due to having an + # additional dependency. + disabledTests = [ + "test_context" + ]; + + nativeCheckInputs = [ + pytestCheckHook + en_core_web_sm + ]; + + pythonImportsCheck = [ pname ]; + + meta = with lib; { + description = "Text analysis with networks"; + homepage = "https://textnets.readthedocs.io"; + license = licenses.gpl3Only; + maintainers = with maintainers; [ jboy ]; + }; +} diff --git a/pkgs/top-level/python-packages.nix b/pkgs/top-level/python-packages.nix index caf05ed079e8..72d88a97a68b 100644 --- a/pkgs/top-level/python-packages.nix +++ b/pkgs/top-level/python-packages.nix @@ -11450,6 +11450,10 @@ self: super: with self; { textacy = callPackage ../development/python-modules/textacy { }; + textnets = callPackage ../development/python-modules/textnets { + en_core_web_sm = spacy_models.en_core_web_sm; + }; + texttable = callPackage ../development/python-modules/texttable { }; text-unidecode = callPackage ../development/python-modules/text-unidecode { }; From 82985da5545430c6934dfda0f862abe99b70057c Mon Sep 17 00:00:00 2001 From: Doron Behar Date: Thu, 2 Mar 2023 16:15:40 +0200 Subject: [PATCH 047/154] snzip: 1.0.4 -> 1.0.5 Diff: https://github.com/kubo/snzip/compare/1.0.4...v1.0.5 --- pkgs/tools/archivers/snzip/default.nix | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/pkgs/tools/archivers/snzip/default.nix b/pkgs/tools/archivers/snzip/default.nix index f3a940068734..c759a7ac07f1 100644 --- a/pkgs/tools/archivers/snzip/default.nix +++ b/pkgs/tools/archivers/snzip/default.nix @@ -6,13 +6,13 @@ stdenv.mkDerivation rec { pname = "snzip"; - version = "1.0.4"; + version = "1.0.5"; src = fetchFromGitHub { owner = "kubo"; repo = "snzip"; - rev = version; - sha256 = "1v8li1zv9f2g31iyi9y9zx42rjvwkaw221g60pmkbv53y667i325"; + rev = "v${version}"; + hash = "sha256-trxCGVNw2MugE7kmth62Qrp7JZcHeP1gdTZk32c3hFg="; }; buildInputs = [ snappy ]; From a2f3a14815dce2f1d2d5f76ef5a30921b2f382eb Mon Sep 17 00:00:00 2001 From: Doron Behar Date: Thu, 2 Mar 2023 16:25:18 +0200 Subject: [PATCH 048/154] python3.pkgs.lsprotocol: 2022.0.0a9 -> 2022.0.0a10 Diff: https://github.com/microsoft/lsprotocol/compare/refs/tags/2022.0.0a9...2022.0.0a10 --- pkgs/development/python-modules/lsprotocol/default.nix | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pkgs/development/python-modules/lsprotocol/default.nix b/pkgs/development/python-modules/lsprotocol/default.nix index e2dcd7bb9a74..b447ab18cba5 100644 --- a/pkgs/development/python-modules/lsprotocol/default.nix +++ b/pkgs/development/python-modules/lsprotocol/default.nix @@ -13,7 +13,7 @@ buildPythonPackage rec { pname = "lsprotocol"; - version = "2022.0.0a9"; + version = "2022.0.0a10"; format = "pyproject"; disabled = pythonOlder "3.7"; @@ -22,7 +22,7 @@ buildPythonPackage rec { owner = "microsoft"; repo = pname; rev = "refs/tags/${version}"; - hash = "sha256-6XecPKuBhwtkmZrGozzO+VEryI5wwy9hlvWE1oV6ajk="; + hash = "sha256-IAFNEWpBRVAGcJNIV1bog9K2nANRw/qJfCJ9+Wu/yJc="; }; nativeBuildInputs = [ From 5a207fd8c963affc6803378a25c0ad0e8daf6822 Mon Sep 17 00:00:00 2001 From: Doron Behar Date: Thu, 2 Mar 2023 16:26:35 +0200 Subject: [PATCH 049/154] python3.pkgs.crate: 0.29.0 -> 0.30.0 --- pkgs/development/python-modules/crate/default.nix | 13 ++++++++++--- 1 file changed, 10 insertions(+), 3 deletions(-) diff --git a/pkgs/development/python-modules/crate/default.nix b/pkgs/development/python-modules/crate/default.nix index 5ba9a0f7695a..198c0662351b 100644 --- a/pkgs/development/python-modules/crate/default.nix +++ b/pkgs/development/python-modules/crate/default.nix @@ -12,12 +12,12 @@ buildPythonPackage rec { pname = "crate"; - version = "0.29.0"; + version = "0.30.0"; disabled = !isPy3k; src = fetchPypi { inherit pname version; - sha256 = "sha256-SywW/b4DnVeSzzRiHbDaKTjcuwDnkwrK6vFfaQVIZhQ="; + sha256 = "sha256-8xraDCFZbpJZsh3sO5VlSHwnEfH4u4AJZkXA+L4TB60="; }; propagatedBuildInputs = [ @@ -32,8 +32,15 @@ buildPythonPackage rec { ]; disabledTests = [ - # network access + # the following tests require network access "test_layer_from_uri" + "test_additional_settings" + "test_basic" + "test_cluster" + "test_default_settings" + "test_dynamic_http_port" + "test_environment_variables" + "test_verbosity" ]; disabledTestPaths = [ From 25d431a04dd91e473b9382f642c2c4c3ce119142 Mon Sep 17 00:00:00 2001 From: misuzu Date: Thu, 2 Mar 2023 19:06:34 +0200 Subject: [PATCH 050/154] ookla-speedtest: armv7l-linux support --- pkgs/tools/networking/ookla-speedtest/default.nix | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/pkgs/tools/networking/ookla-speedtest/default.nix b/pkgs/tools/networking/ookla-speedtest/default.nix index 0ad02a53ee6c..af299d999fd3 100644 --- a/pkgs/tools/networking/ookla-speedtest/default.nix +++ b/pkgs/tools/networking/ookla-speedtest/default.nix @@ -13,6 +13,10 @@ let url = "https://install.speedtest.net/app/cli/${pname}-${version}-linux-aarch64.tgz"; sha256 = "sha256-OVPSMdo3g+K/iQS23XJ2fFxuUz4WPTdC/QQ3r/pDG9M="; }; + armv7l-linux = fetchurl { + url = "https://install.speedtest.net/app/cli/${pname}-${version}-linux-armhf.tgz"; + sha256 = "sha256-5F/N672KGFVTU1Uz3QMtaxC8jGTu5BObEUe5wJg10I0="; + }; x86_64-darwin = fetchurl { url = "https://install.speedtest.net/app/cli/${pname}-${version}-macosx-universal.tgz"; sha256 = "sha256-yfgZIUnryI+GmZmM7Ksc4UQUQEWQfs5vU89Qh39N5m8="; From b52a29645c00bed74e0ef57e04e3451219163e90 Mon Sep 17 00:00:00 2001 From: misuzu Date: Thu, 2 Mar 2023 19:06:40 +0200 Subject: [PATCH 051/154] ookla-speedtest: i686-linux support --- pkgs/tools/networking/ookla-speedtest/default.nix | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/pkgs/tools/networking/ookla-speedtest/default.nix b/pkgs/tools/networking/ookla-speedtest/default.nix index af299d999fd3..d889414bc118 100644 --- a/pkgs/tools/networking/ookla-speedtest/default.nix +++ b/pkgs/tools/networking/ookla-speedtest/default.nix @@ -9,6 +9,10 @@ let url = "https://install.speedtest.net/app/cli/${pname}-${version}-linux-x86_64.tgz"; sha256 = "sha256-VpBZbFT/m+1j+jcy+BigXbwtsZrTbtaPIcpfZNXP7rc="; }; + i686-linux = fetchurl { + url = "https://install.speedtest.net/app/cli/${pname}-${version}-linux-i386.tgz"; + sha256 = "sha256-n/fhjbrn7g4DxmEIRFovts7qbIb2ZILhOS9ViBt3L+g="; + }; aarch64-linux = fetchurl { url = "https://install.speedtest.net/app/cli/${pname}-${version}-linux-aarch64.tgz"; sha256 = "sha256-OVPSMdo3g+K/iQS23XJ2fFxuUz4WPTdC/QQ3r/pDG9M="; From 18aec01cfbd451f4e8ac9116107a674c0d22fe57 Mon Sep 17 00:00:00 2001 From: Janik H Date: Thu, 2 Mar 2023 22:37:12 +0100 Subject: [PATCH 052/154] wireshark: wayland support --- pkgs/applications/networking/sniffers/wireshark/default.nix | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pkgs/applications/networking/sniffers/wireshark/default.nix b/pkgs/applications/networking/sniffers/wireshark/default.nix index 47687bb544c2..5d69667f1bfb 100644 --- a/pkgs/applications/networking/sniffers/wireshark/default.nix +++ b/pkgs/applications/networking/sniffers/wireshark/default.nix @@ -44,7 +44,7 @@ in stdenv.mkDerivation { buildInputs = [ gettext pcre2 libpcap lua5 libssh nghttp2 openssl libgcrypt libgpg-error gnutls geoip c-ares glib zlib - ] ++ lib.optionals withQt (with qt5; [ qtbase qtmultimedia qtsvg qttools ]) + ] ++ lib.optionals withQt (with qt5; [ qtbase qtmultimedia qtsvg qttools qtwayland ]) ++ lib.optionals stdenv.isLinux [ libcap libnl ] ++ lib.optionals stdenv.isDarwin [ SystemConfiguration ApplicationServices gmp ] ++ lib.optionals (withQt && stdenv.isDarwin) (with qt5; [ qtmacextras ]); From e46eadb51655cfb7dc60a22afbb00b63b7d390ae Mon Sep 17 00:00:00 2001 From: "R. Ryantm" Date: Thu, 2 Mar 2023 23:58:49 +0000 Subject: [PATCH 053/154] audacity: 3.2.4 -> 3.2.5 --- pkgs/applications/audio/audacity/default.nix | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pkgs/applications/audio/audacity/default.nix b/pkgs/applications/audio/audacity/default.nix index 4a652e1ac474..6d8f279dadbd 100644 --- a/pkgs/applications/audio/audacity/default.nix +++ b/pkgs/applications/audio/audacity/default.nix @@ -61,13 +61,13 @@ stdenv.mkDerivation rec { pname = "audacity"; - version = "3.2.4"; + version = "3.2.5"; src = fetchFromGitHub { owner = pname; repo = pname; rev = "Audacity-${version}"; - hash = "sha256-gz2o0Rj4364nJAvJmMQzwIQycoQmqz2/43DBvd3qbho="; + hash = "sha256-tMz55fZh+TfvLEyApDqC0QMd2hEQLJsNQ6y2Xy0xgaQ="; }; postPatch = '' From 82666e62f8ea1e132bdd28c317ed2efb5b78a5ad Mon Sep 17 00:00:00 2001 From: "R. Ryantm" Date: Fri, 3 Mar 2023 00:34:15 +0000 Subject: [PATCH 054/154] rocksdb_lite: 7.9.2 -> 7.10.2 --- pkgs/development/libraries/rocksdb/default.nix | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pkgs/development/libraries/rocksdb/default.nix b/pkgs/development/libraries/rocksdb/default.nix index 4b7bd0b9fd94..ffad392c6216 100644 --- a/pkgs/development/libraries/rocksdb/default.nix +++ b/pkgs/development/libraries/rocksdb/default.nix @@ -19,13 +19,13 @@ stdenv.mkDerivation rec { pname = "rocksdb"; - version = "7.9.2"; + version = "7.10.2"; src = fetchFromGitHub { owner = "facebook"; repo = pname; rev = "v${version}"; - sha256 = "sha256-5P7IqJ14EZzDkbjaBvbix04ceGGdlWBuVFH/5dpD5VM="; + sha256 = "sha256-U2ReSrJwjAXUdRmwixC0DQXht/h/6rV8SOf5e2NozIs="; }; nativeBuildInputs = [ cmake ninja ]; From 05c2aab5f1831397fb9f3d2735333b784f94c48e Mon Sep 17 00:00:00 2001 From: kilianar Date: Fri, 3 Mar 2023 01:37:48 +0100 Subject: [PATCH 055/154] beancount-black: 0.1.13 -> 0.1.14 https://github.com/LaunchPlatform/beancount-black/releases/tag/0.1.14 --- pkgs/development/python-modules/beancount-black/default.nix | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pkgs/development/python-modules/beancount-black/default.nix b/pkgs/development/python-modules/beancount-black/default.nix index eb082f5b9537..103574a24354 100644 --- a/pkgs/development/python-modules/beancount-black/default.nix +++ b/pkgs/development/python-modules/beancount-black/default.nix @@ -10,7 +10,7 @@ buildPythonPackage rec { pname = "beancount-black"; - version = "0.1.13"; + version = "0.1.14"; disabled = pythonOlder "3.9"; format = "pyproject"; @@ -19,7 +19,7 @@ buildPythonPackage rec { owner = "LaunchPlatform"; repo = "beancount-black"; rev = version; - sha256 = "sha256-jhcPR+5+e8d9cbcXC//xuBwmZ14xtXNlYtmH5yNSU0E="; + hash = "sha256-4ooMskwPJJLJBfPikaHJ4xuwR1x478ecYWZdIE0UAK8="; }; buildInputs = [ From ec2defc8ece583e4cb0a56b8b3a0dfa4affcf94a Mon Sep 17 00:00:00 2001 From: kilianar Date: Fri, 3 Mar 2023 01:54:14 +0100 Subject: [PATCH 056/154] hugo: 0.110.0 -> 0.111.1 https://github.com/gohugoio/hugo/releases/tag/v0.111.0 https://github.com/gohugoio/hugo/releases/tag/v0.111.1 --- pkgs/applications/misc/hugo/default.nix | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/pkgs/applications/misc/hugo/default.nix b/pkgs/applications/misc/hugo/default.nix index f4f4f579ef56..5fc8dd5e9d4c 100644 --- a/pkgs/applications/misc/hugo/default.nix +++ b/pkgs/applications/misc/hugo/default.nix @@ -2,16 +2,16 @@ buildGoModule rec { pname = "hugo"; - version = "0.110.0"; + version = "0.111.1"; src = fetchFromGitHub { owner = "gohugoio"; repo = pname; rev = "v${version}"; - hash = "sha256-7B0C8191lUGsv81+0eKDrBm+5hLlFjID3RTuajSg/RM="; + hash = "sha256-3bg7cmM05ekR5gtJCEJk3flplw8MRc9hVqlZx3ZUIaw="; }; - vendorHash = "sha256-GtywXjtAF5Q4jUz2clfseUJVqiU+eSguG/ZoKy2TzuA="; + vendorHash = "sha256-xiysjJi3bL0xIoEEo7xXQbznFzwKJrCT6l/bxEbDRUI="; doCheck = false; From 7548197583a29a2d499182098ad02d75fcd73493 Mon Sep 17 00:00:00 2001 From: Anderson Torres Date: Thu, 2 Mar 2023 22:38:15 -0300 Subject: [PATCH 057/154] yapesdl: mark as broken on Darwin --- pkgs/applications/emulators/yapesdl/default.nix | 1 + 1 file changed, 1 insertion(+) diff --git a/pkgs/applications/emulators/yapesdl/default.nix b/pkgs/applications/emulators/yapesdl/default.nix index 06176e3f525f..2ea3583edd76 100644 --- a/pkgs/applications/emulators/yapesdl/default.nix +++ b/pkgs/applications/emulators/yapesdl/default.nix @@ -39,5 +39,6 @@ stdenv.mkDerivation (self: { license = lib.licenses.gpl2Plus; maintainers = with lib.maintainers; [ AndersonTorres ]; platforms = lib.platforms.unix; + broken = stdenv.isDarwin; }; }) From 575ce45578210976798fabe2787fd1d45ba700d8 Mon Sep 17 00:00:00 2001 From: figsoda Date: Thu, 2 Mar 2023 22:06:49 -0500 Subject: [PATCH 058/154] typeshare: 1.0.1 -> 1.1.0 Diff: https://github.com/1password/typeshare/compare/v1.0.1...v1.1.0 Changelog: https://github.com/1password/typeshare/blob/v1.1.0/CHANGELOG.md --- .../tools/rust/typeshare/default.nix | 29 ++++++++++++++----- 1 file changed, 22 insertions(+), 7 deletions(-) diff --git a/pkgs/development/tools/rust/typeshare/default.nix b/pkgs/development/tools/rust/typeshare/default.nix index 6e96bf72bbad..afcdedd9bf88 100644 --- a/pkgs/development/tools/rust/typeshare/default.nix +++ b/pkgs/development/tools/rust/typeshare/default.nix @@ -1,22 +1,37 @@ -{ lib, rustPlatform, fetchCrate }: +{ lib +, rustPlatform +, fetchFromGitHub +, installShellFiles +}: rustPlatform.buildRustPackage rec { pname = "typeshare"; - version = "1.0.1"; + version = "1.1.0"; - src = fetchCrate { - inherit version; - pname = "typeshare-cli"; - sha256 = "sha256-SbTI7170Oc1e09dv4TvUwByG3qkyAL5YXZ96NzI0FSI="; + src = fetchFromGitHub { + owner = "1password"; + repo = "typeshare"; + rev = "v${version}"; + hash = "sha256-FQ9KL8X7zz3ew+H1lhh4bkZ01Te1TD+QXAMxS8dXAaI="; }; - cargoSha256 = "sha256-5EhXw2WcRJqCbdMvOtich9EYQqi0uwCH1a1XXIo8aAo="; + cargoHash = "sha256-t6tGNHmPasmTRto2hobvJywrF/8tO79zkfWwa6lCPK8="; + + nativeBuildInputs = [ installShellFiles ]; buildFeatures = [ "go" ]; + postInstall = '' + installShellCompletion --cmd typeshare \ + --bash <($out/bin/typeshare completions bash) \ + --fish <($out/bin/typeshare completions fish) \ + --zsh <($out/bin/typeshare completions zsh) + ''; + meta = with lib; { description = "Command Line Tool for generating language files with typeshare"; homepage = "https://github.com/1password/typeshare"; + changelog = "https://github.com/1password/typeshare/blob/v${version}/CHANGELOG.md"; license = with licenses; [ asl20 /* or */ mit ]; maintainers = with maintainers; [ figsoda ]; }; From 1e18b0b197f08a0206b0fa836c58654777ba4998 Mon Sep 17 00:00:00 2001 From: "R. Ryantm" Date: Fri, 3 Mar 2023 03:24:12 +0000 Subject: [PATCH 059/154] zenith-nvidia: 0.13.1 -> 0.14.0 --- pkgs/tools/system/zenith/default.nix | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/pkgs/tools/system/zenith/default.nix b/pkgs/tools/system/zenith/default.nix index 282bd31571d9..25552ad8b860 100644 --- a/pkgs/tools/system/zenith/default.nix +++ b/pkgs/tools/system/zenith/default.nix @@ -12,13 +12,13 @@ assert nvidiaSupport -> stdenv.isLinux; rustPlatform.buildRustPackage rec { pname = "zenith"; - version = "0.13.1"; + version = "0.14.0"; src = fetchFromGitHub { owner = "bvaisvil"; repo = pname; rev = version; - sha256 = "sha256-N/DvPVYGM/DjTvKvOlR60q6rvNyfAQlnvFnFG5nbUmQ="; + sha256 = "sha256-GrrdE9Ih8x8N2HN+1NfxfthfHbufLAT/Ac+ZZWW5Zg8="; }; # remove cargo config so it can find the linker on aarch64-linux @@ -26,7 +26,7 @@ rustPlatform.buildRustPackage rec { rm .cargo/config ''; - cargoSha256 = "sha256-Y/vvRJpv82Uc+Bu3lbZxRsu4TL6sAjz5AWHAHkwh98Y="; + cargoHash = "sha256-2VgyUVBcmSlmPSqAWrzWjH5J6Co/rAC9EQCckYzfW2o="; nativeBuildInputs = [ llvmPackages.clang ] ++ lib.optional nvidiaSupport makeWrapper; buildInputs = [ llvmPackages.libclang ] ++ lib.optionals stdenv.isDarwin [ IOKit ]; From c66d73e9611be06e35e6b0ad21acf31aaad8f1b4 Mon Sep 17 00:00:00 2001 From: Martin Weinelt Date: Fri, 3 Mar 2023 04:22:06 +0100 Subject: [PATCH 060/154] ircdog: 0.3.0 -> 0.4.0 Diff: https://github.com/goshuirc/ircdog/compare/v0.3.0...v0.4.0 Changelog: https://github.com/ergochat/ircdog/releases/tag/v0.4.0 --- pkgs/applications/networking/irc/ircdog/default.nix | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/pkgs/applications/networking/irc/ircdog/default.nix b/pkgs/applications/networking/irc/ircdog/default.nix index ebb9d2ad15a6..6d1f534877e9 100644 --- a/pkgs/applications/networking/irc/ircdog/default.nix +++ b/pkgs/applications/networking/irc/ircdog/default.nix @@ -5,15 +5,15 @@ buildGoModule rec { pname = "ircdog"; - version = "0.3.0"; + version = "0.4.0"; src = fetchFromGitHub { owner = "goshuirc"; - repo = pname; - rev = "v${version}"; - sha256 = "sha256-x3ihWLgVYu17vG1xQTgIr4TSkeZ467TZBV1fPTPnZgw="; - fetchSubmodules = true; + repo = "ircdog"; + rev = "refs/tags/v${version}"; + hash = "sha256-uqqgXmEpGEJHnd1mtgpp13jFhKP5fbhE5wtcZNZL8t4="; }; + vendorSha256 = null; meta = with lib; { From a60883cb274d8cbfe16aacf92b3fc35c17caadf5 Mon Sep 17 00:00:00 2001 From: Mario Rodas Date: Fri, 3 Mar 2023 04:20:00 +0000 Subject: [PATCH 061/154] millet: 0.7.9 -> 0.8.1 --- pkgs/development/tools/language-servers/millet/default.nix | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/pkgs/development/tools/language-servers/millet/default.nix b/pkgs/development/tools/language-servers/millet/default.nix index e39abae89a2c..8c01c6ee20d3 100644 --- a/pkgs/development/tools/language-servers/millet/default.nix +++ b/pkgs/development/tools/language-servers/millet/default.nix @@ -2,16 +2,16 @@ rustPlatform.buildRustPackage rec { pname = "millet"; - version = "0.7.9"; + version = "0.8.1"; src = fetchFromGitHub { owner = "azdavis"; repo = pname; rev = "v${version}"; - hash = "sha256-c4hNrswfYz/Wr59sWQJu8yuOqk594iVm+NzxYpG96Ys="; + hash = "sha256-yIOb6AeEpIbKarY4I0X4zq5Gtrv05QLrDlFaBD3x6rw="; }; - cargoHash = "sha256-Ja5Vjt3z0pkxEMtkyWYY+lZH0AnzVzyGxlQtlmwWbS4="; + cargoHash = "sha256-DIRs+xhcdV74NFjsB1jJYgd8Cu/BmAUcBf58rGAp/yo="; postPatch = '' rm .cargo/config.toml From cba18b768325220e1806f17c4f03c4c88264880c Mon Sep 17 00:00:00 2001 From: Mario Rodas Date: Fri, 3 Mar 2023 04:20:00 +0000 Subject: [PATCH 062/154] esbuild: 0.17.10 -> 0.17.11 https://github.com/evanw/esbuild/releases/tag/v0.17.11 --- pkgs/development/tools/esbuild/default.nix | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pkgs/development/tools/esbuild/default.nix b/pkgs/development/tools/esbuild/default.nix index 3845f18c351d..fae82784e45f 100644 --- a/pkgs/development/tools/esbuild/default.nix +++ b/pkgs/development/tools/esbuild/default.nix @@ -2,13 +2,13 @@ buildGoModule rec { pname = "esbuild"; - version = "0.17.10"; + version = "0.17.11"; src = fetchFromGitHub { owner = "evanw"; repo = "esbuild"; rev = "v${version}"; - hash = "sha256-qe7YCOIwp+MSa5VkwImdOea1aMcpWdor/13PIgGEkkw="; + hash = "sha256-k7bXEDAmxyn2u/cniqKtr9zbrWnzwbhTZkL35/igctM="; }; vendorHash = "sha256-+BfxCyg0KkDQpHt/wycy/8CTG6YBA/VJvJFhhzUnSiQ="; From 99d44fe54d90eb23b7f380aee1f855096ae6a1ce Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Robert=20Sch=C3=BCtz?= Date: Thu, 2 Mar 2023 20:40:29 -0800 Subject: [PATCH 063/154] corrosion: 0.3.3 -> 0.3.4 Diff: https://github.com/corrosion-rs/corrosion/compare/v0.3.3...v0.3.4 Changelog: https://github.com/corrosion-rs/corrosion/blob/v0.3.4/RELEASES.md --- pkgs/development/tools/build-managers/corrosion/default.nix | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/pkgs/development/tools/build-managers/corrosion/default.nix b/pkgs/development/tools/build-managers/corrosion/default.nix index bee465fbfd88..1f8e04c08082 100644 --- a/pkgs/development/tools/build-managers/corrosion/default.nix +++ b/pkgs/development/tools/build-managers/corrosion/default.nix @@ -8,13 +8,13 @@ stdenv.mkDerivation rec { pname = "corrosion"; - version = "0.3.3"; + version = "0.3.4"; src = fetchFromGitHub { owner = "corrosion-rs"; repo = "corrosion"; rev = "v${version}"; - hash = "sha256-dXUjQmKk+UdgYqdMuNh9ALaots1t0xwg6hEWwAbGPJc="; + hash = "sha256-g2kA1FYt6OWb0zb3pSQ46dJMsSZpT6kLYkpIIN3XZbI="; }; cargoRoot = "generator"; @@ -23,7 +23,7 @@ stdenv.mkDerivation rec { inherit src; sourceRoot = "${src.name}/${cargoRoot}"; name = "${pname}-${version}"; - hash = "sha256-f+n/bjjdKar5aURkPNYKkHUll6lqNa/dlzq3dIFh+tc="; + hash = "sha256-088qK9meyqV93ezLlBIjdp1l/n+pv+9afaJGYlXEFQc="; }; buildInputs = lib.optional stdenv.isDarwin libiconv; From 5224fd727b527a13965882f94fe64ef3f5323f06 Mon Sep 17 00:00:00 2001 From: "R. Ryantm" Date: Fri, 3 Mar 2023 05:07:02 +0000 Subject: [PATCH 064/154] ocm: 0.1.65 -> 0.1.66 --- pkgs/applications/networking/cluster/ocm/default.nix | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/pkgs/applications/networking/cluster/ocm/default.nix b/pkgs/applications/networking/cluster/ocm/default.nix index 4c4ececb71ef..e7f01e9916e1 100644 --- a/pkgs/applications/networking/cluster/ocm/default.nix +++ b/pkgs/applications/networking/cluster/ocm/default.nix @@ -2,16 +2,16 @@ buildGoModule rec { pname = "ocm"; - version = "0.1.65"; + version = "0.1.66"; src = fetchFromGitHub { owner = "openshift-online"; repo = "ocm-cli"; rev = "v${version}"; - sha256 = "sha256-UzHGVK/HZ5eH8nO4+G92NunOQi9AWnqv4vgcHjtoPDw="; + sha256 = "sha256-iOgDWqP9sFd5/0e5/+WP6R3PpJa8AiUE4EjI39HwWX8="; }; - vendorSha256 = "sha256-4pqXap1WayqdXuwwLktE71D7x6Ao9MkIKSzIKtVyP84="; + vendorHash = "sha256-yY/X0LVIH1ULegx8MIZyUxD1wPNxxISSCBxj9aY2wtA="; # Strip the final binary. ldflags = [ "-s" "-w" ]; From 0104f996a0425a92b00f00ded4335d58e54e6b6b Mon Sep 17 00:00:00 2001 From: "R. Ryantm" Date: Fri, 3 Mar 2023 06:26:38 +0000 Subject: [PATCH 065/154] gobgpd: 3.11.0 -> 3.12.0 --- pkgs/servers/misc/gobgpd/default.nix | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/pkgs/servers/misc/gobgpd/default.nix b/pkgs/servers/misc/gobgpd/default.nix index 68edff754a88..d20a60e5208c 100644 --- a/pkgs/servers/misc/gobgpd/default.nix +++ b/pkgs/servers/misc/gobgpd/default.nix @@ -5,16 +5,16 @@ buildGoModule rec { pname = "gobgpd"; - version = "3.11.0"; + version = "3.12.0"; src = fetchFromGitHub { owner = "osrg"; repo = "gobgp"; rev = "refs/tags/v${version}"; - hash = "sha256-UGRGJqeVWrt8NVf9d5Mk7k+k2Is/fwHv2X0hmyXvTZs="; + hash = "sha256-keev3DZ3xN5UARuYKfSdox0KKBjrM5RoMD273Aw0AGY="; }; - vendorHash = "sha256-9Vi8qrcFC2SazcGVgAf1vbKvxd8rTMgye63wSCaFonk="; + vendorHash = "sha256-5lRW9gWQZRRqZoVB16kI1VEnr0XsiPtLUuioK/0f8w0="; postConfigure = '' export CGO_ENABLED=0 From 87e424fb9c3e08d4305a0d63a0ef5e33c8233780 Mon Sep 17 00:00:00 2001 From: "R. Ryantm" Date: Fri, 3 Mar 2023 07:10:02 +0000 Subject: [PATCH 066/154] libdnf: 0.69.0 -> 0.70.0 --- pkgs/tools/package-management/libdnf/default.nix | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pkgs/tools/package-management/libdnf/default.nix b/pkgs/tools/package-management/libdnf/default.nix index 6fee9ea187ca..f006bb60190e 100644 --- a/pkgs/tools/package-management/libdnf/default.nix +++ b/pkgs/tools/package-management/libdnf/default.nix @@ -3,13 +3,13 @@ stdenv.mkDerivation rec { pname = "libdnf"; - version = "0.69.0"; + version = "0.70.0"; src = fetchFromGitHub { owner = "rpm-software-management"; repo = pname; rev = version; - sha256 = "sha256-Mc9yI18D4OYv8l4axQ8W0XZ8HfmEZ5IhHC6/uKkv0Ec="; + sha256 = "sha256-tuHrkL3tL+sCLPxNElVgnb4zQ6OTu65X9pb/cX6vD/w="; }; nativeBuildInputs = [ From 06a2fe50fff1a77894c9eb3c239a90a720c4ddbe Mon Sep 17 00:00:00 2001 From: "R. Ryantm" Date: Fri, 3 Mar 2023 07:28:12 +0000 Subject: [PATCH 067/154] process-compose: 0.40.2 -> 0.43.1 --- pkgs/applications/misc/process-compose/default.nix | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/pkgs/applications/misc/process-compose/default.nix b/pkgs/applications/misc/process-compose/default.nix index 67c1c7e815c1..967904c6e194 100644 --- a/pkgs/applications/misc/process-compose/default.nix +++ b/pkgs/applications/misc/process-compose/default.nix @@ -8,13 +8,13 @@ let config-module = "github.com/f1bonacc1/process-compose/src/config"; in buildGoModule rec { pname = "process-compose"; - version = "0.40.2"; + version = "0.43.1"; src = fetchFromGitHub { owner = "F1bonacc1"; repo = pname; rev = "v${version}"; - hash = "sha256-+09gLeifEFwG2Ou1tQP29hYHhr0Qn0hOKj7p7PB8Jfc="; + hash = "sha256-yNYoVz6vITKkAkqH/0p7D4sifTpjtEZS4syFSwN4v98="; # populate values that require us to use git. By doing this in postFetch we # can delete .git afterwards and maintain better reproducibility of the src. leaveDotGit = true; @@ -43,7 +43,7 @@ buildGoModule rec { installShellFiles ]; - vendorHash = "sha256-g82JRmfbKH/XEZx2aLZOcyen23vOxQXR7VyeAYxCSi4="; + vendorHash = "sha256-iiGn0dYHNEp5Bs54X44sHbsG3HD92Xs4oah4iZXqqvQ="; doCheck = false; From f385a972fa57e4c16ad83b56ea7bbcf07c1ab94c Mon Sep 17 00:00:00 2001 From: "R. Ryantm" Date: Fri, 3 Mar 2023 09:05:53 +0000 Subject: [PATCH 068/154] mkvtoolnix: 73.0.0 -> 74.0.0 --- pkgs/applications/video/mkvtoolnix/default.nix | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pkgs/applications/video/mkvtoolnix/default.nix b/pkgs/applications/video/mkvtoolnix/default.nix index bce50b43a07c..3befb8534916 100644 --- a/pkgs/applications/video/mkvtoolnix/default.nix +++ b/pkgs/applications/video/mkvtoolnix/default.nix @@ -47,13 +47,13 @@ let in stdenv.mkDerivation rec { pname = "mkvtoolnix"; - version = "73.0.0"; + version = "74.0.0"; src = fetchFromGitLab { owner = "mbunkus"; repo = "mkvtoolnix"; rev = "release-${version}"; - sha256 = "HGoT3t/ooRMiyjUkHnvVGOB04IU5U8VEKDixhE57kR8="; + sha256 = "sha256-p8rIAHSqYCOlNbuxisQlIkMh2OArc+MOYn1kgC5kJsc="; }; nativeBuildInputs = [ From e23609cd219206b80bf7813b1685b31e355168bb Mon Sep 17 00:00:00 2001 From: "R. Ryantm" Date: Fri, 3 Mar 2023 09:43:35 +0000 Subject: [PATCH 069/154] bind: 9.18.11 -> 9.18.12 --- pkgs/servers/dns/bind/default.nix | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pkgs/servers/dns/bind/default.nix b/pkgs/servers/dns/bind/default.nix index 9934a4710d76..2047436d8ce6 100644 --- a/pkgs/servers/dns/bind/default.nix +++ b/pkgs/servers/dns/bind/default.nix @@ -8,11 +8,11 @@ stdenv.mkDerivation rec { pname = "bind"; - version = "9.18.11"; + version = "9.18.12"; src = fetchurl { url = "https://downloads.isc.org/isc/bind9/${version}/${pname}-${version}.tar.xz"; - sha256 = "sha256-j/M1KBIjDLy9pC34fK2WH5QWPT2kV8XkvvgFf9XfIVg="; + sha256 = "sha256-R3Zrt7BjqrutBUOGsZCqf2wUUkQnr9Qnww7EJlEgJ+c="; }; outputs = [ "out" "lib" "dev" "man" "dnsutils" "host" ]; From 8f84ab48c4005e63efb5390b72505f5f01cb3ac2 Mon Sep 17 00:00:00 2001 From: "R. Ryantm" Date: Fri, 3 Mar 2023 10:43:07 +0000 Subject: [PATCH 070/154] libreswan: 4.9 -> 4.10 --- pkgs/tools/networking/libreswan/default.nix | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pkgs/tools/networking/libreswan/default.nix b/pkgs/tools/networking/libreswan/default.nix index ab3249e57a87..47838f756635 100644 --- a/pkgs/tools/networking/libreswan/default.nix +++ b/pkgs/tools/networking/libreswan/default.nix @@ -45,11 +45,11 @@ in stdenv.mkDerivation rec { pname = "libreswan"; - version = "4.9"; + version = "4.10"; src = fetchurl { url = "https://download.libreswan.org/${pname}-${version}.tar.gz"; - sha256 = "sha256-9kLctjXpCVZMqP2Z6kSrQ/YHI7TXbBWO2BKXjEWzmLk="; + sha256 = "sha256-WpQAwlqO26B0IEJvtV3Lqv2qNwLlsPLBkgWmxWckins="; }; strictDeps = true; From 123e1dc0dd4e65668f0709e7dc7dfdefbb864c8a Mon Sep 17 00:00:00 2001 From: "R. Ryantm" Date: Fri, 3 Mar 2023 11:15:38 +0000 Subject: [PATCH 071/154] libdatovka: 0.2.1 -> 0.3.0 --- pkgs/development/libraries/libdatovka/default.nix | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pkgs/development/libraries/libdatovka/default.nix b/pkgs/development/libraries/libdatovka/default.nix index aca25f73eddb..8cddc9a9f37a 100644 --- a/pkgs/development/libraries/libdatovka/default.nix +++ b/pkgs/development/libraries/libdatovka/default.nix @@ -15,11 +15,11 @@ stdenv.mkDerivation rec { pname = "libdatovka"; - version = "0.2.1"; + version = "0.3.0"; src = fetchurl { url = "https://gitlab.nic.cz/datovka/libdatovka/-/archive/v${version}/libdatovka-v${version}.tar.gz"; - sha256 = "sha256-687d8ZD9zfMeo62YWCW5Kc0CXkKClxtbbwXR51pPwBE="; + sha256 = "sha256-aG7U8jP3pvOeFDetYVOx+cE78ys0uSkKNjSgB09ste8="; }; patches = [ From ae00f6e42e90b8e99b401a5373d2b4ffcccf71f9 Mon Sep 17 00:00:00 2001 From: Florian Brandes Date: Fri, 3 Mar 2023 13:01:33 +0100 Subject: [PATCH 072/154] octoprint: 1.8.6 -> 1.8.7 Changelog: https://github.com/OctoPrint/OctoPrint/releases/tag/1.8.7 Signed-off-by: Florian Brandes --- pkgs/applications/misc/octoprint/default.nix | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/pkgs/applications/misc/octoprint/default.nix b/pkgs/applications/misc/octoprint/default.nix index 4ca54221c2fc..c1a4f6aec922 100644 --- a/pkgs/applications/misc/octoprint/default.nix +++ b/pkgs/applications/misc/octoprint/default.nix @@ -16,7 +16,7 @@ let packageOverrides = lib.foldr lib.composeExtensions (self: super: { }) ( [ ( - # with version 3 of flask-limiter octoprint 1.8.6 fails to start with + # with version 3 of flask-limiter octoprint 1.8.7 fails to start with # TypeError: Limiter.__init__() got multiple values for argument 'key_func' self: super: { flask-limiter = super.flask-limiter.overridePythonAttrs (oldAttrs: rec { @@ -105,13 +105,13 @@ let self: super: { octoprint = self.buildPythonPackage rec { pname = "OctoPrint"; - version = "1.8.6"; + version = "1.8.7"; src = fetchFromGitHub { owner = "OctoPrint"; repo = "OctoPrint"; rev = version; - hash = "sha256-DCUesPy4/g7DYN/9CDRvwAWHcv4dFsF+gsysg5UWThQ="; + hash = "sha256-g4PYB9YbkX0almRPgMFlb8D633Y5fc3H+Boa541suqc="; }; propagatedBuildInputs = with self; [ From a210bb2111aa393cec9d4e6e62839129f8204033 Mon Sep 17 00:00:00 2001 From: Tom Fitzhenry Date: Sat, 4 Mar 2023 00:47:15 +1100 Subject: [PATCH 073/154] phosh: add tomfitzhenry@ as maintainer --- nixos/tests/phosh.nix | 2 +- pkgs/applications/misc/phoc/default.nix | 2 +- pkgs/applications/window-managers/phosh/default.nix | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/nixos/tests/phosh.nix b/nixos/tests/phosh.nix index 25bf4848542e..78d6da31beee 100644 --- a/nixos/tests/phosh.nix +++ b/nixos/tests/phosh.nix @@ -3,7 +3,7 @@ import ./make-test-python.nix ({ pkgs, ...}: let in { name = "phosh"; meta = with pkgs.lib.maintainers; { - maintainers = [ zhaofengli ]; + maintainers = [ tomfitzhenry zhaofengli ]; }; nodes = { diff --git a/pkgs/applications/misc/phoc/default.nix b/pkgs/applications/misc/phoc/default.nix index 0fae80fc14e4..79ffb3fdbf9b 100644 --- a/pkgs/applications/misc/phoc/default.nix +++ b/pkgs/applications/misc/phoc/default.nix @@ -95,7 +95,7 @@ in stdenv.mkDerivation rec { description = "Wayland compositor for mobile phones like the Librem 5"; homepage = "https://gitlab.gnome.org/World/Phosh/phoc"; license = licenses.gpl3Plus; - maintainers = with maintainers; [ masipcat zhaofengli ]; + maintainers = with maintainers; [ masipcat tomfitzhenry zhaofengli ]; platforms = platforms.linux; }; } diff --git a/pkgs/applications/window-managers/phosh/default.nix b/pkgs/applications/window-managers/phosh/default.nix index 5b4480499d95..adb22269f04c 100644 --- a/pkgs/applications/window-managers/phosh/default.nix +++ b/pkgs/applications/window-managers/phosh/default.nix @@ -141,7 +141,7 @@ stdenv.mkDerivation rec { homepage = "https://gitlab.gnome.org/World/Phosh/phosh"; changelog = "https://gitlab.gnome.org/World/Phosh/phosh/-/blob/v${version}/debian/changelog"; license = licenses.gpl3Plus; - maintainers = with maintainers; [ masipcat zhaofengli ]; + maintainers = with maintainers; [ masipcat tomfitzhenry zhaofengli ]; platforms = platforms.linux; }; } From 3d8505781153d8328c1dc771f1f1890acc6e25f8 Mon Sep 17 00:00:00 2001 From: Bernardo Meurer Date: Fri, 3 Mar 2023 14:43:44 -0500 Subject: [PATCH 074/154] roon-server: 2.0-1202 -> 2.0-1223 --- pkgs/servers/roon-server/default.nix | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pkgs/servers/roon-server/default.nix b/pkgs/servers/roon-server/default.nix index 067f9b48d58e..2f60d525dc07 100644 --- a/pkgs/servers/roon-server/default.nix +++ b/pkgs/servers/roon-server/default.nix @@ -15,7 +15,7 @@ , stdenv }: let - version = "2.0-1202"; + version = "2.0-1223"; urlVersion = builtins.replaceStrings [ "." "-" ] [ "00" "0" ] version; in stdenv.mkDerivation { @@ -24,7 +24,7 @@ stdenv.mkDerivation { src = fetchurl { url = "https://download.roonlabs.com/updates/production/RoonServer_linuxx64_${urlVersion}.tar.bz2"; - hash = "sha256-YeBzXnw/BpJDUJ7fUf7TH0zQcpCjUm9peB7zPO2ZsYI="; + hash = "sha256-1jHNHj1tB80/CdE7GPCgRsI0+2Gfx4kiE6a0EOI/K5U="; }; dontConfigure = true; From 7089294f10068dbeeed6e2e4d7a24300bf4bacb6 Mon Sep 17 00:00:00 2001 From: Patrick Widmer Date: Thu, 2 Mar 2023 21:41:51 +0100 Subject: [PATCH 075/154] strings: add escapeQuery for url encoding --- lib/ascii-table.nix | 5 ++++- lib/default.nix | 2 +- lib/strings.nix | 21 ++++++++++++++++++--- lib/tests/misc.nix | 9 +++++++++ 4 files changed, 32 insertions(+), 5 deletions(-) diff --git a/lib/ascii-table.nix b/lib/ascii-table.nix index c564e12bcc6f..74989936ea40 100644 --- a/lib/ascii-table.nix +++ b/lib/ascii-table.nix @@ -1,4 +1,7 @@ -{ " " = 32; +{ "\t" = 9; + "\n" = 10; + "\r" = 13; + " " = 32; "!" = 33; "\"" = 34; "#" = 35; diff --git a/lib/default.nix b/lib/default.nix index dc4df9575418..7948dbd5a1ef 100644 --- a/lib/default.nix +++ b/lib/default.nix @@ -100,7 +100,7 @@ let escapeShellArg escapeShellArgs isStorePath isStringLike isValidPosixName toShellVar toShellVars - escapeRegex escapeXML replaceChars lowerChars + escapeRegex escapeURL escapeXML replaceChars lowerChars upperChars toLower toUpper addContextFrom splitString removePrefix removeSuffix versionOlder versionAtLeast getName getVersion diff --git a/lib/strings.nix b/lib/strings.nix index 68d930950662..e49ed4382240 100644 --- a/lib/strings.nix +++ b/lib/strings.nix @@ -34,6 +34,8 @@ rec { unsafeDiscardStringContext ; + asciiTable = import ./ascii-table.nix; + /* Concatenate a list of strings. Type: concatStrings :: [string] -> string @@ -327,9 +329,7 @@ rec { => 40 */ - charToInt = let - table = import ./ascii-table.nix; - in c: builtins.getAttr c table; + charToInt = c: builtins.getAttr c asciiTable; /* Escape occurrence of the elements of `list` in `string` by prefixing it with a backslash. @@ -355,6 +355,21 @@ rec { */ escapeC = list: replaceStrings list (map (c: "\\x${ toLower (lib.toHexString (charToInt c))}") list); + /* Escape the string so it can be safely placed inside a URL + query. + + Type: escapeURL :: string -> string + + Example: + escapeURL "foo/bar baz" + => "foo%2Fbar%20baz" + */ + escapeURL = let + unreserved = [ "A" "B" "C" "D" "E" "F" "G" "H" "I" "J" "K" "L" "M" "N" "O" "P" "Q" "R" "S" "T" "U" "V" "W" "X" "Y" "Z" "a" "b" "c" "d" "e" "f" "g" "h" "i" "j" "k" "l" "m" "n" "o" "p" "q" "r" "s" "t" "u" "v" "w" "x" "y" "z" "0" "1" "2" "3" "4" "5" "6" "7" "8" "9" "-" "_" "." "~" ]; + toEscape = builtins.removeAttrs asciiTable unreserved; + in + replaceStrings (builtins.attrNames toEscape) (lib.mapAttrsToList (_: c: "%${fixedWidthString 2 "0" (lib.toHexString c)}") toEscape); + /* Quote string to be used safely within the Bourne shell. Type: escapeShellArg :: string -> string diff --git a/lib/tests/misc.nix b/lib/tests/misc.nix index 406656dac1a9..07d04f5356c7 100644 --- a/lib/tests/misc.nix +++ b/lib/tests/misc.nix @@ -347,6 +347,15 @@ runTests { expected = "Hello\\x20World"; }; + testEscapeURL = testAllTrue [ + ("" == strings.escapeURL "") + ("Hello" == strings.escapeURL "Hello") + ("Hello%20World" == strings.escapeURL "Hello World") + ("Hello%2FWorld" == strings.escapeURL "Hello/World") + ("42%25" == strings.escapeURL "42%") + ("%20%3F%26%3D%23%2B%25%21%3C%3E%23%22%7B%7D%7C%5C%5E%5B%5D%60%09%3A%2F%40%24%27%28%29%2A%2C%3B" == strings.escapeURL " ?&=#+%!<>#\"{}|\\^[]`\t:/@$'()*,;") + ]; + testToInt = testAllTrue [ # Naive (123 == toInt "123") From 3b02da3fddeb9f7df648a420dbf4fc107142578f Mon Sep 17 00:00:00 2001 From: Maximilian Bosch Date: Thu, 2 Mar 2023 17:51:50 +0100 Subject: [PATCH 076/154] nixos/tests/gitea: keep calling the file itself evaluatable When I work on something gitea-related I want to be able to just do `nix-build nixos/tests/gitea.nix` to run the tests. --- nixos/tests/gitea.nix | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nixos/tests/gitea.nix b/nixos/tests/gitea.nix index da61abd84e89..a8b5567c2075 100644 --- a/nixos/tests/gitea.nix +++ b/nixos/tests/gitea.nix @@ -1,6 +1,6 @@ { system ? builtins.currentSystem, config ? {}, - giteaPackage, + giteaPackage ? pkgs.gitea, pkgs ? import ../.. { inherit system config; } }: From d168fec9cf5b1f5ff6312d2ed8f9a89993ec5263 Mon Sep 17 00:00:00 2001 From: figsoda Date: Fri, 3 Mar 2023 16:11:02 -0500 Subject: [PATCH 077/154] pods: 1.0.5 -> 1.0.6 Diff: https://github.com/marhkb/pods/compare/v1.0.5...v1.0.6 Changelog: https://github.com/marhkb/pods/releases/tag/v1.0.6 --- pkgs/applications/virtualization/pods/default.nix | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/pkgs/applications/virtualization/pods/default.nix b/pkgs/applications/virtualization/pods/default.nix index 648183654701..5e68eeb2c827 100644 --- a/pkgs/applications/virtualization/pods/default.nix +++ b/pkgs/applications/virtualization/pods/default.nix @@ -17,19 +17,19 @@ stdenv.mkDerivation rec { pname = "pods"; - version = "1.0.5"; + version = "1.0.6"; src = fetchFromGitHub { owner = "marhkb"; repo = pname; rev = "v${version}"; - sha256 = "sha256-V/4atbYG3jP0o1Bfn/dZBDXEk+Yi4cSJAY8HnTmpHRI="; + sha256 = "sha256-ZryzNlEj/2JTp5FJiDzXN9v1DvczfebqEOrJP+dKaRw="; }; cargoDeps = rustPlatform.fetchCargoTarball { inherit src; name = "${pname}-${version}"; - sha256 = "sha256-gJZ3z6xDgWwOPjCLZg3LRMk3KoTXGaotXgO/xDUwAvk="; + sha256 = "sha256-OgvlRnii4T4HcFPiGkcLcagyHCg+lWXCXQ9XdXjHDbQ="; }; nativeBuildInputs = [ From 4b853137a6341956d56280e08c559e0a35454e15 Mon Sep 17 00:00:00 2001 From: Weijia Wang <9713184+wegank@users.noreply.github.com> Date: Sat, 4 Mar 2023 00:04:16 +0200 Subject: [PATCH 078/154] cargo-tarpaulin: add aarch64 support --- pkgs/development/tools/analysis/cargo-tarpaulin/default.nix | 1 - 1 file changed, 1 deletion(-) diff --git a/pkgs/development/tools/analysis/cargo-tarpaulin/default.nix b/pkgs/development/tools/analysis/cargo-tarpaulin/default.nix index 499a42429230..5ee26703a18b 100644 --- a/pkgs/development/tools/analysis/cargo-tarpaulin/default.nix +++ b/pkgs/development/tools/analysis/cargo-tarpaulin/default.nix @@ -26,6 +26,5 @@ rustPlatform.buildRustPackage rec { homepage = "https://github.com/xd009642/tarpaulin"; license = with licenses; [ mit /* or */ asl20 ]; maintainers = with maintainers; [ hugoreeves ]; - platforms = lib.platforms.x86_64; }; } From e3d8ef89aa9038481890a947a482908aa108d242 Mon Sep 17 00:00:00 2001 From: Serg Nesterov Date: Sat, 4 Mar 2023 01:34:04 +0300 Subject: [PATCH 079/154] jmusicbot: 0.3.8 -> 0.3.9 --- pkgs/applications/audio/jmusicbot/default.nix | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pkgs/applications/audio/jmusicbot/default.nix b/pkgs/applications/audio/jmusicbot/default.nix index 7a1676e7873c..590c695577cf 100644 --- a/pkgs/applications/audio/jmusicbot/default.nix +++ b/pkgs/applications/audio/jmusicbot/default.nix @@ -2,11 +2,11 @@ stdenv.mkDerivation rec { pname = "JMusicBot"; - version = "0.3.8"; + version = "0.3.9"; src = fetchurl { url = "https://github.com/jagrosh/MusicBot/releases/download/${version}/JMusicBot-${version}.jar"; - sha256 = "sha256-wzmrh9moY6oo3RqOy9Zl1X70BZlvbJkQmz8BaBIFtIM="; + sha256 = "sha256-2A1yo2e1MawGLMTM6jWwpQJJuKOmljxFriORv90Jqg8="; }; dontUnpack = true; From 484099cd447899bd84c8babfdbcb37fbf345ebb0 Mon Sep 17 00:00:00 2001 From: Fabian Affolter Date: Sat, 4 Mar 2023 00:11:26 +0100 Subject: [PATCH 080/154] python310Packages.mypy-boto3-builder: 7.12.4 -> 7.12.5 Diff: https://github.com/youtype/mypy_boto3_builder/compare/refs/tags/7.12.4...7.12.5 Changelog: https://github.com/youtype/mypy_boto3_builder/releases/tag/7.12.5 --- .../development/python-modules/mypy-boto3-builder/default.nix | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pkgs/development/python-modules/mypy-boto3-builder/default.nix b/pkgs/development/python-modules/mypy-boto3-builder/default.nix index 3c582230ee2e..a02a6dce390d 100644 --- a/pkgs/development/python-modules/mypy-boto3-builder/default.nix +++ b/pkgs/development/python-modules/mypy-boto3-builder/default.nix @@ -17,7 +17,7 @@ buildPythonPackage rec { pname = "mypy-boto3-builder"; - version = "7.12.4"; + version = "7.12.5"; format = "pyproject"; disabled = pythonOlder "3.10"; @@ -26,7 +26,7 @@ buildPythonPackage rec { owner = "youtype"; repo = "mypy_boto3_builder"; rev = "refs/tags/${version}"; - hash = "sha256-X8ATnycG7MvzDNaMClvhyy4Qy4hvoNhn0sQ+s/JnX64="; + hash = "sha256-Ij01EExSc4pU8eC+JPhSB8YKXkspusMRgdPhdgbUEKk="; }; nativeBuildInputs = [ From a84a25c3ae077754d5571623cb3f34b60fa29d4e Mon Sep 17 00:00:00 2001 From: Yureka Date: Sat, 4 Mar 2023 00:38:17 +0100 Subject: [PATCH 081/154] lkl: downgrade to 5.15 --- pkgs/applications/virtualization/lkl/default.nix | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/pkgs/applications/virtualization/lkl/default.nix b/pkgs/applications/virtualization/lkl/default.nix index 0247cd433b04..f9b5b7f62695 100644 --- a/pkgs/applications/virtualization/lkl/default.nix +++ b/pkgs/applications/virtualization/lkl/default.nix @@ -6,15 +6,15 @@ stdenv.mkDerivation rec { pname = "lkl"; - version = "2023-01-27"; + version = "2022-08-08"; outputs = [ "dev" "lib" "out" ]; src = fetchFromGitHub { owner = "lkl"; repo = "linux"; - rev = "b00f0fbcd5ae24636a9315fea3af32f411cf93be"; - sha256 = "sha256-GZpnTVdcnS5uAUHsVre539+0Qlv36Fui0WGjOPwvWrE="; + rev = "ffbb4aa67b3e0a64f6963f59385a200d08cb2d8b"; + sha256 = "sha256-24sNREdnhkF+P+3P0qEh2tF1jHKF7KcbFSn/rPK2zWs="; }; nativeBuildInputs = [ bc bison flex python3 ]; From 0385ba7150bb07c8d2efadcf8aeedb50e91aa912 Mon Sep 17 00:00:00 2001 From: Fabian Affolter Date: Sat, 4 Mar 2023 00:52:45 +0100 Subject: [PATCH 082/154] python310Packages.ulid-transform: init at 0.4.0 --- .../python-modules/ulid-transform/default.nix | 51 +++++++++++++++++++ pkgs/top-level/python-packages.nix | 2 + 2 files changed, 53 insertions(+) create mode 100644 pkgs/development/python-modules/ulid-transform/default.nix diff --git a/pkgs/development/python-modules/ulid-transform/default.nix b/pkgs/development/python-modules/ulid-transform/default.nix new file mode 100644 index 000000000000..d53281f5f062 --- /dev/null +++ b/pkgs/development/python-modules/ulid-transform/default.nix @@ -0,0 +1,51 @@ +{ lib +, cython +, buildPythonPackage +, fetchFromGitHub +, poetry-core +, pytestCheckHook +, pythonOlder +, setuptools +}: + +buildPythonPackage rec { + pname = "ulid-transform"; + version = "0.4.0"; + format = "pyproject"; + + disabled = pythonOlder "3.9"; + + src = fetchFromGitHub { + owner = "bdraco"; + repo = pname; + rev = "refs/tags/v${version}"; + hash = "sha256-JuTIE8FAVZkfn+byJ1z9/ep9Oih1uXpz/QTB2OfM0WU="; + }; + + nativeBuildInputs = [ + cython + poetry-core + setuptools + ]; + + nativeCheckInputs = [ + pytestCheckHook + ]; + + postPatch = '' + substituteInPlace pyproject.toml \ + --replace " --cov=ulid_transform --cov-report=term-missing:skip-covered" "" + ''; + + pythonImportsCheck = [ + "ulid_transform" + ]; + + meta = with lib; { + description = "Library to create and transform ULIDs"; + homepage = "https://github.com/bdraco/ulid-transform"; + changelog = "https://github.com/bdraco/ulid-transform/releases/tag/v${version}"; + license = with licenses; [ mit ]; + maintainers = with maintainers; [ fab ]; + }; +} diff --git a/pkgs/top-level/python-packages.nix b/pkgs/top-level/python-packages.nix index 9646f4aa624b..591cb71293b7 100644 --- a/pkgs/top-level/python-packages.nix +++ b/pkgs/top-level/python-packages.nix @@ -11985,6 +11985,8 @@ self: super: with self; { ukrainealarm = callPackage ../development/python-modules/ukrainealarm { }; + ulid-transform = callPackage ../development/python-modules/ulid-transform { }; + ultraheat-api = callPackage ../development/python-modules/ultraheat-api { }; umalqurra = callPackage ../development/python-modules/umalqurra { }; From a2d85f5bad9d3e93ab6a728b87ffac29949fa2a0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Robert=20Sch=C3=BCtz?= Date: Fri, 3 Mar 2023 16:03:57 -0800 Subject: [PATCH 083/154] python310Packages.openapi-core: 0.16.5 -> 0.16.6 Diff: https://github.com/p1c2u/openapi-core/compare/refs/tags/0.16.5...0.16.6 Changelog: https://github.com/python-openapi/openapi-core/releases/tag/0.16.6 --- pkgs/development/python-modules/openapi-core/default.nix | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pkgs/development/python-modules/openapi-core/default.nix b/pkgs/development/python-modules/openapi-core/default.nix index 496dceeaab48..987f5cf6b14d 100644 --- a/pkgs/development/python-modules/openapi-core/default.nix +++ b/pkgs/development/python-modules/openapi-core/default.nix @@ -27,7 +27,7 @@ buildPythonPackage rec { pname = "openapi-core"; - version = "0.16.5"; + version = "0.16.6"; format = "pyproject"; disabled = pythonOlder "3.7"; @@ -36,7 +36,7 @@ buildPythonPackage rec { owner = "p1c2u"; repo = "openapi-core"; rev = "refs/tags/${version}"; - hash = "sha256-xXSZ9qxjmeIyYIWQubJbJxkXUdOu/WSSBddIWsVaH8k="; + hash = "sha256-cpWEZ+gX4deTxMQ5BG+Qh863jcqUkOlNSY3KtOwOcBo="; }; postPatch = '' From 593e4a3c6e334e7b3c95572210efc7b6aeffeb06 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?PedroHLC=20=E2=98=AD?= Date: Fri, 3 Mar 2023 22:04:09 -0300 Subject: [PATCH 084/154] linuxKernel.kernels.linux_lqx: 6.1.13-lqx2 -> 6.1.14-lqx1 --- pkgs/os-specific/linux/kernel/zen-kernels.nix | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/pkgs/os-specific/linux/kernel/zen-kernels.nix b/pkgs/os-specific/linux/kernel/zen-kernels.nix index 7dbf9c97291c..70a7fbceb3f2 100644 --- a/pkgs/os-specific/linux/kernel/zen-kernels.nix +++ b/pkgs/os-specific/linux/kernel/zen-kernels.nix @@ -11,9 +11,9 @@ let }; # ./update-zen.py lqx lqxVariant = { - version = "6.1.13"; #lqx - suffix = "lqx2"; #lqx - sha256 = "1264cfkb3kfrava8g7byr10avkjg0k281annqppcqqjkyjf63q4y"; #lqx + version = "6.1.14"; #lqx + suffix = "lqx1"; #lqx + sha256 = "026nnmbpipk4gg7llsvm4fgws3ka0hjdywl7h0a8bvq6n9by15i6"; #lqx isLqx = true; }; zenKernelsFor = { version, suffix, sha256, isLqx }: buildLinux (args // { From 7ffb427895cb5e2da3d351e8b5cc8cb56f9e788e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?PedroHLC=20=E2=98=AD?= Date: Fri, 3 Mar 2023 22:08:25 -0300 Subject: [PATCH 085/154] linuxKernel.kernels.linux_zen: 6.2-zen1 -> 6.2.2-zen1 --- pkgs/os-specific/linux/kernel/zen-kernels.nix | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pkgs/os-specific/linux/kernel/zen-kernels.nix b/pkgs/os-specific/linux/kernel/zen-kernels.nix index 70a7fbceb3f2..9f172447f10e 100644 --- a/pkgs/os-specific/linux/kernel/zen-kernels.nix +++ b/pkgs/os-specific/linux/kernel/zen-kernels.nix @@ -4,9 +4,9 @@ let # comments with variant added for update script # ./update-zen.py zen zenVariant = { - version = "6.2.1"; #zen + version = "6.2.2"; #zen suffix = "zen1"; #zen - sha256 = "1ypgdc4bz35cqqwp8nka6rx7m9dqfl6wzfb8ad27gqgxwzil3sjg"; #zen + sha256 = "004aghwdclky7w341yg9nkr5r58qnp4hxnmvxrp2z06pzcbsq933"; #zen isLqx = false; }; # ./update-zen.py lqx From b67369049f8297747e5ed721e4cbf4406da24417 Mon Sep 17 00:00:00 2001 From: Dmitry Bogatov Date: Tue, 14 Feb 2023 19:35:38 -0500 Subject: [PATCH 086/154] mpdecimal: split C++ library into separate output --- pkgs/development/libraries/mpdecimal/default.nix | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/pkgs/development/libraries/mpdecimal/default.nix b/pkgs/development/libraries/mpdecimal/default.nix index 5fa03b7083e2..3a2e4b1fe7d0 100644 --- a/pkgs/development/libraries/mpdecimal/default.nix +++ b/pkgs/development/libraries/mpdecimal/default.nix @@ -3,7 +3,7 @@ stdenv.mkDerivation rec { pname = "mpdecimal"; version = "2.5.1"; - outputs = [ "out" "doc" ]; + outputs = [ "out" "cxx" "doc" "dev" ]; src = fetchurl { url = "https://www.bytereef.org/software/mpdecimal/releases/mpdecimal-${version}.tar.gz"; @@ -12,6 +12,14 @@ stdenv.mkDerivation rec { configureFlags = [ "LD=${stdenv.cc.targetPrefix}cc" ]; + postInstall = '' + mkdir -p $cxx/lib + mv $out/lib/*c++* $cxx/lib + + mkdir -p $dev/nix-support + echo -n $cxx >> $dev/nix-support/propagated-build-inputs + ''; + meta = { description = "Library for arbitrary precision decimal floating point arithmetic"; From 3696038c3dce4a2f2316ea3f640bbc9a878feca0 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Sat, 4 Mar 2023 03:07:11 +0000 Subject: [PATCH 087/154] =?UTF-8?q?terraform-providers.auth0:=200.44.0=20?= =?UTF-8?q?=E2=86=92=200.44.1?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../networking/cluster/terraform-providers/providers.json | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/pkgs/applications/networking/cluster/terraform-providers/providers.json b/pkgs/applications/networking/cluster/terraform-providers/providers.json index e96f2e0b8c6a..edc1d7a5189e 100644 --- a/pkgs/applications/networking/cluster/terraform-providers/providers.json +++ b/pkgs/applications/networking/cluster/terraform-providers/providers.json @@ -82,13 +82,13 @@ "vendorHash": "sha256-99PwwxVHfRGC0QCQGhifRzqWFOHZ1R7Ge2ou7OjiggQ=" }, "auth0": { - "hash": "sha256-3hAfDzK7iO4D68OsCvuXQx5Gk0VOtoBiw21tBJjDJtQ=", + "hash": "sha256-d5zM6FKFT9UFUyrm+5aF2wRvGsdtkq3Z8NvlsvZib7c=", "homepage": "https://registry.terraform.io/providers/auth0/auth0", "owner": "auth0", "repo": "terraform-provider-auth0", - "rev": "v0.44.0", + "rev": "v0.44.1", "spdx": "MPL-2.0", - "vendorHash": "sha256-UP9A0lcW5QbTuur1MMjKMlvC8S3nenqs0WjpoqvwEQI=" + "vendorHash": "sha256-vcKw8G9SqbP0wBnhLKJUz9ua1nGdP5ioZ+5ACxkeCZk=" }, "avi": { "hash": "sha256-mBLdIL4mUI4zA3c9gB4DL1QY0xHW15Q1rO/v1gVYKYU=", From 7fe2256c99842bfeb1b3cc0af762d50614efe3b4 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Sat, 4 Mar 2023 03:15:45 +0000 Subject: [PATCH 088/154] =?UTF-8?q?terraform-providers.alicloud:=201.199.0?= =?UTF-8?q?=20=E2=86=92=201.200.0?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../networking/cluster/terraform-providers/providers.json | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pkgs/applications/networking/cluster/terraform-providers/providers.json b/pkgs/applications/networking/cluster/terraform-providers/providers.json index edc1d7a5189e..76f0690489fc 100644 --- a/pkgs/applications/networking/cluster/terraform-providers/providers.json +++ b/pkgs/applications/networking/cluster/terraform-providers/providers.json @@ -46,11 +46,11 @@ "vendorHash": "sha256-JOaw8rKH7eb3RiP/FD+M7VEXCRfVuarTjfEusz1yGmQ=" }, "alicloud": { - "hash": "sha256-Cf3plUhdewlq3MvOqZGcICP0j9R3vg0nZdBMrk/Et7k=", + "hash": "sha256-QefplcJVXduBbado4Ykg2Ngybb/oxf6/ulCgRqJGm0A=", "homepage": "https://registry.terraform.io/providers/aliyun/alicloud", "owner": "aliyun", "repo": "terraform-provider-alicloud", - "rev": "v1.199.0", + "rev": "v1.200.0", "spdx": "MPL-2.0", "vendorHash": null }, From 70126da303672d743b03fdfcf25e5e50dca5dce8 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Sat, 4 Mar 2023 03:17:20 +0000 Subject: [PATCH 089/154] =?UTF-8?q?terraform-providers.azuread:=202.35.0?= =?UTF-8?q?=20=E2=86=92=202.36.0?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../networking/cluster/terraform-providers/providers.json | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pkgs/applications/networking/cluster/terraform-providers/providers.json b/pkgs/applications/networking/cluster/terraform-providers/providers.json index 76f0690489fc..813b29ec1707 100644 --- a/pkgs/applications/networking/cluster/terraform-providers/providers.json +++ b/pkgs/applications/networking/cluster/terraform-providers/providers.json @@ -119,11 +119,11 @@ "vendorHash": "sha256-OBWwMNDpeoPR6NLIgsjiYGQdePEWDMWGN1Y0nHsecYs=" }, "azuread": { - "hash": "sha256-vfkheaRQoDpItMEFzuDkkOOoVvj07MyCkAaybef71nc=", + "hash": "sha256-MGCGfocs16qmJnvMRRD7TRHnPkS17h+oNUkMARAQhLs=", "homepage": "https://registry.terraform.io/providers/hashicorp/azuread", "owner": "hashicorp", "repo": "terraform-provider-azuread", - "rev": "v2.35.0", + "rev": "v2.36.0", "spdx": "MPL-2.0", "vendorHash": null }, From c816405620ec1e1f10f2a4d44c1ab208c2542d86 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Sat, 4 Mar 2023 03:20:29 +0000 Subject: [PATCH 090/154] =?UTF-8?q?terraform-providers.equinix:=201.12.0?= =?UTF-8?q?=20=E2=86=92=201.13.0?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../networking/cluster/terraform-providers/providers.json | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pkgs/applications/networking/cluster/terraform-providers/providers.json b/pkgs/applications/networking/cluster/terraform-providers/providers.json index 813b29ec1707..26be4579125d 100644 --- a/pkgs/applications/networking/cluster/terraform-providers/providers.json +++ b/pkgs/applications/networking/cluster/terraform-providers/providers.json @@ -355,11 +355,11 @@ "vendorHash": "sha256-oVTanZpCWs05HwyIKW2ajiBPz1HXOFzBAt5Us+EtTRw=" }, "equinix": { - "hash": "sha256-aah3f/5Bd+IgXbyJpDhcyklIYHlK3yy16UkYlOprh0c=", + "hash": "sha256-zyRPpAaDgjRafn5RcrzmbVTzO6gGS1HMmvLR8VFdKow=", "homepage": "https://registry.terraform.io/providers/equinix/equinix", "owner": "equinix", "repo": "terraform-provider-equinix", - "rev": "v1.12.0", + "rev": "v1.13.0", "spdx": "MIT", "vendorHash": "sha256-Zi2e/Vg9iKTrU8Mb37Y8xHYIBL+IfDnWMUUg5Vqrbfo=" }, From ad344d3e2ed66ab5cff5497c38f8099a0216b815 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Sat, 4 Mar 2023 03:24:36 +0000 Subject: [PATCH 091/154] =?UTF-8?q?terraform-providers.flexibleengine:=201?= =?UTF-8?q?.36.0=20=E2=86=92=201.36.1?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../networking/cluster/terraform-providers/providers.json | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/pkgs/applications/networking/cluster/terraform-providers/providers.json b/pkgs/applications/networking/cluster/terraform-providers/providers.json index 26be4579125d..f087c392927c 100644 --- a/pkgs/applications/networking/cluster/terraform-providers/providers.json +++ b/pkgs/applications/networking/cluster/terraform-providers/providers.json @@ -391,13 +391,13 @@ "vendorHash": null }, "flexibleengine": { - "hash": "sha256-uT8BmACMMJKVPAhL/7rudCXG9AOb4kS1Lswr5ZxY6M4=", + "hash": "sha256-0wpyi397+5YAa3epZZII312rK1SnPU5k9a1/iVTbqmU=", "homepage": "https://registry.terraform.io/providers/FlexibleEngineCloud/flexibleengine", "owner": "FlexibleEngineCloud", "repo": "terraform-provider-flexibleengine", - "rev": "v1.36.0", + "rev": "v1.36.1", "spdx": "MPL-2.0", - "vendorHash": "sha256-obBN7Q/gKbvERJIUVz+GgPjn7/OKjXCiFI6WuOd0hic=" + "vendorHash": "sha256-HcyUGKbgj322fU7keN/lBEn6UJhV3QXScBJHZHJkCII=" }, "fortios": { "deleteVendor": true, From 58cbbfbc78638f86ff761340239384a5fbd1e78e Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Sat, 4 Mar 2023 03:28:26 +0000 Subject: [PATCH 092/154] =?UTF-8?q?terraform-providers.huaweicloud:=201.44?= =?UTF-8?q?.2=20=E2=86=92=201.45.0?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../networking/cluster/terraform-providers/providers.json | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pkgs/applications/networking/cluster/terraform-providers/providers.json b/pkgs/applications/networking/cluster/terraform-providers/providers.json index f087c392927c..f66752b86e44 100644 --- a/pkgs/applications/networking/cluster/terraform-providers/providers.json +++ b/pkgs/applications/networking/cluster/terraform-providers/providers.json @@ -540,11 +540,11 @@ "vendorHash": "sha256-rxh8Me+eOKPCbfHFT3tRsbM7JU67dBqv2JOiWArI/2Y=" }, "huaweicloud": { - "hash": "sha256-oZUPfhndpht9EuBiltLknblGaMX2M/dD1iOiwDJKgWY=", + "hash": "sha256-x/5jt31yPTJRHSHRZqSrrjNdERWho6l71jvS7x6dR0c=", "homepage": "https://registry.terraform.io/providers/huaweicloud/huaweicloud", "owner": "huaweicloud", "repo": "terraform-provider-huaweicloud", - "rev": "v1.44.2", + "rev": "v1.45.0", "spdx": "MPL-2.0", "vendorHash": null }, From 9d4587aec8bfdbac21acceb109e6fc83ab7ea931 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Sat, 4 Mar 2023 03:33:50 +0000 Subject: [PATCH 093/154] =?UTF-8?q?terraform-providers.aws:=204.56.0=20?= =?UTF-8?q?=E2=86=92=204.57.0?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../networking/cluster/terraform-providers/providers.json | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/pkgs/applications/networking/cluster/terraform-providers/providers.json b/pkgs/applications/networking/cluster/terraform-providers/providers.json index f66752b86e44..b59263170fa8 100644 --- a/pkgs/applications/networking/cluster/terraform-providers/providers.json +++ b/pkgs/applications/networking/cluster/terraform-providers/providers.json @@ -110,13 +110,13 @@ "vendorHash": null }, "aws": { - "hash": "sha256-qopoHOcCdOAkgpZq3AvCnsq00sjvNSFdUKzn7SQ0G5k=", + "hash": "sha256-U9mzz/r3xb6bl9n1Go6JiM6CemB2Nwsu6LEhc5ypV3c=", "homepage": "https://registry.terraform.io/providers/hashicorp/aws", "owner": "hashicorp", "repo": "terraform-provider-aws", - "rev": "v4.56.0", + "rev": "v4.57.0", "spdx": "MPL-2.0", - "vendorHash": "sha256-OBWwMNDpeoPR6NLIgsjiYGQdePEWDMWGN1Y0nHsecYs=" + "vendorHash": "sha256-dK1NGOpX8h4XvcDtp4DEaVrxHaGmzTXldKbsfFoVWu4=" }, "azuread": { "hash": "sha256-MGCGfocs16qmJnvMRRD7TRHnPkS17h+oNUkMARAQhLs=", From 13f6bc07111fc875402734e37f2941b1267209d8 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Sat, 4 Mar 2023 03:34:06 +0000 Subject: [PATCH 094/154] =?UTF-8?q?terraform-providers.ibm:=201.50.0=20?= =?UTF-8?q?=E2=86=92=201.51.0?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../networking/cluster/terraform-providers/providers.json | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/pkgs/applications/networking/cluster/terraform-providers/providers.json b/pkgs/applications/networking/cluster/terraform-providers/providers.json index b59263170fa8..3b861d5f465d 100644 --- a/pkgs/applications/networking/cluster/terraform-providers/providers.json +++ b/pkgs/applications/networking/cluster/terraform-providers/providers.json @@ -567,13 +567,13 @@ "vendorHash": null }, "ibm": { - "hash": "sha256-Qdb5HpamjCNGlqSf3etFv0++Skrk/jm6UVBFsKGU+jw=", + "hash": "sha256-7TuvaeCRtQcYkJe6KbinGdK3JvmEbT4yxwHbzLR6jfE=", "homepage": "https://registry.terraform.io/providers/IBM-Cloud/ibm", "owner": "IBM-Cloud", "repo": "terraform-provider-ibm", - "rev": "v1.50.0", + "rev": "v1.51.0", "spdx": "MPL-2.0", - "vendorHash": "sha256-JkmfZ9yz3r26j1SHIwnyNA+nYWAy4DoaWEMfFUTzD3Y=" + "vendorHash": "sha256-l+Q4ix50ItXI/i5aDvqSC2kTk3tDBPZgO/6aok+P0hQ=" }, "icinga2": { "hash": "sha256-Y/Oq0aTzP+oSKPhHiHY9Leal4HJJm7TNDpcdqkUsCmk=", From 828ffa7112b90d7fefa4c2709d37087c5a56e271 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Sat, 4 Mar 2023 03:35:46 +0000 Subject: [PATCH 095/154] =?UTF-8?q?terraform-providers.ksyun:=201.3.64=20?= =?UTF-8?q?=E2=86=92=201.3.66?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../networking/cluster/terraform-providers/providers.json | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pkgs/applications/networking/cluster/terraform-providers/providers.json b/pkgs/applications/networking/cluster/terraform-providers/providers.json index 3b861d5f465d..69fe1b7c00ac 100644 --- a/pkgs/applications/networking/cluster/terraform-providers/providers.json +++ b/pkgs/applications/networking/cluster/terraform-providers/providers.json @@ -621,11 +621,11 @@ "vendorHash": "sha256-nDvnLEOtXkUJFY22pKogOzkWrj4qjyQbdlJ5pa/xnK8=" }, "ksyun": { - "hash": "sha256-F/A+hDjYTQS0NT0rslE792qNINghfdiQHRNnbMpyBdM=", + "hash": "sha256-mq0wE9jkn67HFyg0MgtD9lY7lk0+4/rnPLJ4mXX0xwY=", "homepage": "https://registry.terraform.io/providers/kingsoftcloud/ksyun", "owner": "kingsoftcloud", "repo": "terraform-provider-ksyun", - "rev": "v1.3.64", + "rev": "v1.3.66", "spdx": "MPL-2.0", "vendorHash": "sha256-miHKAz+ONXtuC1DNukcyZbbaYReY69dz9Zk6cJdORdQ=" }, From f0d64d844b5b540353ff32282370dc2cfe148576 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Sat, 4 Mar 2023 03:36:10 +0000 Subject: [PATCH 096/154] =?UTF-8?q?terraform-providers.ns1:=201.13.4=20?= =?UTF-8?q?=E2=86=92=202.0.0?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../networking/cluster/terraform-providers/providers.json | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/pkgs/applications/networking/cluster/terraform-providers/providers.json b/pkgs/applications/networking/cluster/terraform-providers/providers.json index 69fe1b7c00ac..4195ae3216ff 100644 --- a/pkgs/applications/networking/cluster/terraform-providers/providers.json +++ b/pkgs/applications/networking/cluster/terraform-providers/providers.json @@ -783,13 +783,13 @@ "vendorHash": "sha256-3t8pUAwuVeZN5cYGs72YsdRvJunudSmKSldFWEFVA/4=" }, "ns1": { - "hash": "sha256-2w9x/FTtieWB88CIEkP7BH5saC6dt4IxdROBucczios=", + "hash": "sha256-fPeWs1VMsCY+OywHdwP9EUyjpoTYquBqP8W08Z/0DAA=", "homepage": "https://registry.terraform.io/providers/ns1-terraform/ns1", "owner": "ns1-terraform", "repo": "terraform-provider-ns1", - "rev": "v1.13.4", + "rev": "v2.0.0", "spdx": "MPL-2.0", - "vendorHash": "sha256-/Rgerbd8c6Owo79LrYsR9O0JNBrDOODFD+k1Yd5G6cY=" + "vendorHash": "sha256-R4q9ASqTdKv4BG4zNktKsLxa6UU42UzWTLYHuRnJ4Zg=" }, "null": { "hash": "sha256-ExXDbAXMVCTZBlYmi4kD/7JFB1fCFAoPL637+1N6rEI=", From 140fadb426b7c2fa0a4c11af6aeb785ee34a99a9 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Sat, 4 Mar 2023 03:38:31 +0000 Subject: [PATCH 097/154] =?UTF-8?q?terraform-providers.snowflake:=200.57.0?= =?UTF-8?q?=20=E2=86=92=200.58.0?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../networking/cluster/terraform-providers/providers.json | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pkgs/applications/networking/cluster/terraform-providers/providers.json b/pkgs/applications/networking/cluster/terraform-providers/providers.json index 4195ae3216ff..662950d711d0 100644 --- a/pkgs/applications/networking/cluster/terraform-providers/providers.json +++ b/pkgs/applications/networking/cluster/terraform-providers/providers.json @@ -1027,11 +1027,11 @@ "vendorHash": null }, "snowflake": { - "hash": "sha256-nNv2lo7I5+eFmw+BvRB/DmgNE6iuR3Aq0kxyOeQdiqU=", + "hash": "sha256-MMWObJRS7FKvOfor2j0QywRMRbGsE5QcyDGbY2CXjo4=", "homepage": "https://registry.terraform.io/providers/Snowflake-Labs/snowflake", "owner": "Snowflake-Labs", "repo": "terraform-provider-snowflake", - "rev": "v0.57.0", + "rev": "v0.58.0", "spdx": "MIT", "vendorHash": "sha256-yFk5ap28JluaKkUPfePBuRUEg6/Ma5MrRkmWK6iAGNg=" }, From ce2f08f142ab289720f317f2f8d313e6347e67db Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Sat, 4 Mar 2023 03:40:46 +0000 Subject: [PATCH 098/154] =?UTF-8?q?terraform-providers.tencentcloud:=201.7?= =?UTF-8?q?9.12=20=E2=86=92=201.79.13?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../networking/cluster/terraform-providers/providers.json | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pkgs/applications/networking/cluster/terraform-providers/providers.json b/pkgs/applications/networking/cluster/terraform-providers/providers.json index 662950d711d0..9f1e72eeeda4 100644 --- a/pkgs/applications/networking/cluster/terraform-providers/providers.json +++ b/pkgs/applications/networking/cluster/terraform-providers/providers.json @@ -1099,11 +1099,11 @@ "vendorHash": "sha256-tltQNtTsPoT5CTrKM7vLDVkmmW2FTd6MBubfXZveGxI=" }, "tencentcloud": { - "hash": "sha256-aqi6lEGVj0PhIMwUfU/4lu5uGgbU4+R42UhINbHgMjY=", + "hash": "sha256-91efifPY9ErjqtNPzm3+XSy1Jy+eQs2znxYzez74J/0=", "homepage": "https://registry.terraform.io/providers/tencentcloudstack/tencentcloud", "owner": "tencentcloudstack", "repo": "terraform-provider-tencentcloud", - "rev": "v1.79.12", + "rev": "v1.79.13", "spdx": "MPL-2.0", "vendorHash": null }, From f6f9829f50dc13807565b2b7d41fbbc0f41b9ba7 Mon Sep 17 00:00:00 2001 From: figsoda Date: Fri, 3 Mar 2023 16:37:59 -0500 Subject: [PATCH 099/154] atuin: fix build on darwin xvfb-run is not available on darwin --- pkgs/tools/misc/atuin/default.nix | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pkgs/tools/misc/atuin/default.nix b/pkgs/tools/misc/atuin/default.nix index 9d5a9fc92704..a1f7df1d4597 100644 --- a/pkgs/tools/misc/atuin/default.nix +++ b/pkgs/tools/misc/atuin/default.nix @@ -34,11 +34,11 @@ rustPlatform.buildRustPackage rec { --zsh <($out/bin/atuin gen-completions -s zsh) ''; - nativeCheckInputs = [ + nativeCheckInputs = lib.optionals xvfb-run.meta.available [ xvfb-run ]; - checkPhase = '' + checkPhase = lib.optionalString xvfb-run.meta.available '' runHook preCheck xvfb-run cargo test runHook postCheck From 7f76ed40100aeb413ef019b5c0f116038d83e546 Mon Sep 17 00:00:00 2001 From: Delan Azabani Date: Thu, 2 Mar 2023 23:36:47 +0800 Subject: [PATCH 100/154] osu-lazer-bin: 2023.207.0 -> 2023.301.0 --- pkgs/games/osu-lazer/bin.nix | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pkgs/games/osu-lazer/bin.nix b/pkgs/games/osu-lazer/bin.nix index 7a62db1003fa..cf17332785fd 100644 --- a/pkgs/games/osu-lazer/bin.nix +++ b/pkgs/games/osu-lazer/bin.nix @@ -2,11 +2,11 @@ appimageTools.wrapType2 rec { pname = "osu-lazer-bin"; - version = "2023.207.0"; + version = "2023.301.0"; src = fetchurl { url = "https://github.com/ppy/osu/releases/download/${version}/osu.AppImage"; - sha256 = "sha256-xJQcqNV/Pr3gEGStczc3gv8AYrEKFsAo2g4WtA59fwk="; + sha256 = "sha256-0c74bGOY9f2K52xE7CZy/i3OfyCC+a6XGI30c6hI7jM="; }; extraPkgs = pkgs: with pkgs; [ icu ]; From f0ea95c2c7d52a5a6dcef105c97343d46749382a Mon Sep 17 00:00:00 2001 From: Delan Azabani Date: Thu, 2 Mar 2023 23:37:20 +0800 Subject: [PATCH 101/154] osu-lazer: 2023.207.0 -> 2023.301.0 --- pkgs/games/osu-lazer/default.nix | 4 +- pkgs/games/osu-lazer/deps.nix | 81 ++++++++++++++++---------------- 2 files changed, 43 insertions(+), 42 deletions(-) diff --git a/pkgs/games/osu-lazer/default.nix b/pkgs/games/osu-lazer/default.nix index 3a56c35fe88d..f3aea2a333af 100644 --- a/pkgs/games/osu-lazer/default.nix +++ b/pkgs/games/osu-lazer/default.nix @@ -17,13 +17,13 @@ buildDotnetModule rec { pname = "osu-lazer"; - version = "2023.207.0"; + version = "2023.301.0"; src = fetchFromGitHub { owner = "ppy"; repo = "osu"; rev = version; - sha256 = "sha256-s0gzSfj4+xk3joS7S68ZGjgatiJY2Y1FBCmrhptaWIk="; + sha256 = "sha256-SUVxe3PdUch8NYR7X4fatbmSpyYewI69usBDICcSq3s="; }; projectFile = "osu.Desktop/osu.Desktop.csproj"; diff --git a/pkgs/games/osu-lazer/deps.nix b/pkgs/games/osu-lazer/deps.nix index 908140259b1e..753f1cd9af1a 100644 --- a/pkgs/games/osu-lazer/deps.nix +++ b/pkgs/games/osu-lazer/deps.nix @@ -2,10 +2,10 @@ # Please dont edit it manually, your changes might get overwritten! { fetchNuGet }: [ - (fetchNuGet { pname = "AutoMapper"; version = "11.0.1"; sha256 = "1z1x5c1dkwk6142km5q6jglhpq9x82alwjjy5a72c8qnq9ppdfg3"; }) + (fetchNuGet { pname = "AutoMapper"; version = "12.0.1"; sha256 = "0s0wjl4ck3sal8a50x786wxs9mbca7bxaqk3558yx5wpld4h4z3b"; }) (fetchNuGet { pname = "Clowd.Squirrel"; version = "2.9.42"; sha256 = "1xxrr9jmgn343d467nz40569mkybinnmxaxyc4fhgy6yddvzk1y0"; }) (fetchNuGet { pname = "DiffPlex"; version = "1.7.1"; sha256 = "1q78r70pirgb7j5wkh454ws237lihh0fig212cpbj02cz53c2h6j"; }) - (fetchNuGet { pname = "DiscordRichPresence"; version = "1.1.1.14"; sha256 = "18adkrddjlci5ajs17ck1c8cd8id3cgjylqvfggyqwrmsh7yr4j6"; }) + (fetchNuGet { pname = "DiscordRichPresence"; version = "1.1.3.18"; sha256 = "0p4bhaggjjfd4gl06yiphqgncxgcq2bws4sjkrw0n2ldf3hgrps3"; }) (fetchNuGet { pname = "FFmpeg.AutoGen"; version = "4.3.0.1"; sha256 = "0n6x57mnnvcjnrs8zyvy07h5zm4bcfy9gh4n4bvd9fx5ys4pxkvv"; }) (fetchNuGet { pname = "Fody"; version = "6.6.4"; sha256 = "1hhdwj0ska7dvak9hki8cnyfmmw5r8yw8w24gzsdwhqx68dnrvsx"; }) (fetchNuGet { pname = "HidSharpCore"; version = "1.2.1.1"; sha256 = "1zkndglmz0s8rblfhnqcvv90rkq2i7lf4bc380g7z8h1avf2ikll"; }) @@ -63,38 +63,37 @@ (fetchNuGet { pname = "JetBrains.Annotations"; version = "2021.3.0"; sha256 = "01ssylllbwpana2w3iybi533zlvcsbhzjc8kr0g4kg307kjbfn8v"; }) (fetchNuGet { pname = "managed-midi"; version = "1.10.0"; sha256 = "1rih8iq8k4j6n3206d2j7z4vygp725kzs95c6yc7p1mlhfiiimvq"; }) (fetchNuGet { pname = "Markdig"; version = "0.23.0"; sha256 = "1bwn885w7balwncmr764vidyyp9bixqlq6r3lhsapj8ykrpxxa70"; }) - (fetchNuGet { pname = "MessagePack"; version = "2.4.35"; sha256 = "0y8pz073ync51cv39lxldc797nmcm39r4pdhy2il6r95rppjqg5h"; }) - (fetchNuGet { pname = "MessagePack.Annotations"; version = "2.4.35"; sha256 = "1jny2r6rwq7xzwymm779w9x8a5rhyln97mxzplxwd53wwbb0wbzd"; }) - (fetchNuGet { pname = "Microsoft.AspNetCore.Connections.Abstractions"; version = "6.0.10"; sha256 = "1wic0bghgwg2r8q676miv3kk7ph5g46kvkw1iljr4b8s58mqbwas"; }) - (fetchNuGet { pname = "Microsoft.AspNetCore.Http.Connections.Client"; version = "6.0.10"; sha256 = "1a8m44qgjwfhmqpfsyyb1hgak3sh99s62hnfmphxsflfvx611mbb"; }) - (fetchNuGet { pname = "Microsoft.AspNetCore.Http.Connections.Common"; version = "6.0.10"; sha256 = "0vqc62xjiwlqwifx3nj0nwssjrdqka2avpqiiwylsbd48s1ahxdy"; }) - (fetchNuGet { pname = "Microsoft.AspNetCore.SignalR.Client"; version = "6.0.10"; sha256 = "090ggwxv2j86hkmnzqxa728wpn5g30dfqd05widhd7n1m51igq71"; }) - (fetchNuGet { pname = "Microsoft.AspNetCore.SignalR.Client.Core"; version = "6.0.10"; sha256 = "13i22fkai420fvr71c3pfnadspcv8jpf5bci9fn3yh580bfqw21a"; }) - (fetchNuGet { pname = "Microsoft.AspNetCore.SignalR.Common"; version = "6.0.10"; sha256 = "0kmy2h310hqpr6bgd128r4q7ny4i7qjfvgrv1swhqv2j9n1yriby"; }) - (fetchNuGet { pname = "Microsoft.AspNetCore.SignalR.Protocols.Json"; version = "6.0.10"; sha256 = "13q429kwbijyfgpb4dp04lr2c691ra5br5wf8g7s260pij10x1nz"; }) - (fetchNuGet { pname = "Microsoft.AspNetCore.SignalR.Protocols.MessagePack"; version = "6.0.10"; sha256 = "068gw5q25yaf5k5c96kswmna1jixpw6s82r7gmgnw54rcc8gdz3f"; }) - (fetchNuGet { pname = "Microsoft.AspNetCore.SignalR.Protocols.NewtonsoftJson"; version = "6.0.10"; sha256 = "1k7jvvvz8wwbd1bw1shcgrgz2gw3l877krhw39b9sj2vbwzc8bn7"; }) - (fetchNuGet { pname = "Microsoft.CodeAnalysis.BannedApiAnalyzers"; version = "3.3.3"; sha256 = "1z6x0d8lpcfjr3sxy25493i17vvcg5bsay6c03qan6mnj5aqzw2k"; }) + (fetchNuGet { pname = "MessagePack"; version = "2.4.59"; sha256 = "13igx5m5hkqqyhyw04z2nwfxn2jwlrpvvwx4c8qrayv9j4l31ajm"; }) + (fetchNuGet { pname = "MessagePack.Annotations"; version = "2.4.59"; sha256 = "1y8mg95x87jddk0hyf58cc1zy666mqbla7479njkm7kmpwz61s8c"; }) + (fetchNuGet { pname = "Microsoft.AspNetCore.Connections.Abstractions"; version = "7.0.2"; sha256 = "1k5gjiwmcrbwfz54jafz6mmf4md7jgk3j8jdpp9ax72glwa7ia4a"; }) + (fetchNuGet { pname = "Microsoft.AspNetCore.Http.Connections.Client"; version = "7.0.2"; sha256 = "0rnra67gkg0qs7wys8bacm1raf9khb688ch2yr56m88kwdk5bhw4"; }) + (fetchNuGet { pname = "Microsoft.AspNetCore.Http.Connections.Common"; version = "7.0.2"; sha256 = "19dviyc68m56mmy05lylhp2bxvww2gqx1y07kc0yqp61rcjb1d85"; }) + (fetchNuGet { pname = "Microsoft.AspNetCore.SignalR.Client"; version = "7.0.2"; sha256 = "0ms9syxlxk6f5pxjw23s2cz4ld60vk84v67l0bhnnb8v42rz97nn"; }) + (fetchNuGet { pname = "Microsoft.AspNetCore.SignalR.Client.Core"; version = "7.0.2"; sha256 = "15qs3pdji2sd629as4i8zd5bjbs165waim9jypxqjkb55bslz8d7"; }) + (fetchNuGet { pname = "Microsoft.AspNetCore.SignalR.Common"; version = "7.0.2"; sha256 = "0c3ia03m1shc2xslqln5m986kpvc1dqb15j85vqxbzb0jj6fr52y"; }) + (fetchNuGet { pname = "Microsoft.AspNetCore.SignalR.Protocols.Json"; version = "7.0.2"; sha256 = "028r8sk5dlxkfxw6wz2ys62rm9dqa85s6rfhilrfy1phsl47rkal"; }) + (fetchNuGet { pname = "Microsoft.AspNetCore.SignalR.Protocols.MessagePack"; version = "7.0.2"; sha256 = "1zkznsq5r7gg2pnlj9y7swrbvzyywf6q5xf9ggcwbvccwp0g6jr4"; }) + (fetchNuGet { pname = "Microsoft.AspNetCore.SignalR.Protocols.NewtonsoftJson"; version = "7.0.2"; sha256 = "1x5pymqc315nb8z2414dvqdpcfd5zy5slcfa9b3vjhrbbbngaly7"; }) + (fetchNuGet { pname = "Microsoft.CodeAnalysis.BannedApiAnalyzers"; version = "3.3.4"; sha256 = "1vzrni7n94f17bzc13lrvcxvgspx9s25ap1p005z6i1ikx6wgx30"; }) (fetchNuGet { pname = "Microsoft.CSharp"; version = "4.5.0"; sha256 = "01i28nvzccxbqmiz217fxs6hnjwmd5fafs37rd49a6qp53y6623l"; }) (fetchNuGet { pname = "Microsoft.CSharp"; version = "4.7.0"; sha256 = "0gd67zlw554j098kabg887b5a6pq9kzavpa3jjy5w53ccjzjfy8j"; }) - (fetchNuGet { pname = "Microsoft.Data.Sqlite.Core"; version = "6.0.10"; sha256 = "1sdh5rw2pyg6c64z0haxf57bakd5kwaav624vlqif1m59iz26rag"; }) + (fetchNuGet { pname = "Microsoft.Data.Sqlite.Core"; version = "7.0.2"; sha256 = "0xipbci6pshj825a1r8nlc19hf26n4ba33sx7dbx727ja5lyjv8m"; }) (fetchNuGet { pname = "Microsoft.Diagnostics.NETCore.Client"; version = "0.2.61701"; sha256 = "1ic1607jj4ln8dbibf1fz5v9svk9x2kqlgvhndc6ijaqnbc4wcr1"; }) (fetchNuGet { pname = "Microsoft.Diagnostics.Runtime"; version = "2.0.161401"; sha256 = "02qcm8nv1ch07g8b0i60ynrjn33b8y5ivyk4rxal3vd9zfi6pvwi"; }) (fetchNuGet { pname = "Microsoft.DotNet.PlatformAbstractions"; version = "2.0.3"; sha256 = "020214swxm0hip1d9gjskrzmqzjnji7c6l5b3xcch8vp166066m9"; }) - (fetchNuGet { pname = "Microsoft.Extensions.Configuration.Abstractions"; version = "6.0.0"; sha256 = "0w6wwxv12nbc3sghvr68847wc9skkdgsicrz3fx4chgng1i3xy0j"; }) + (fetchNuGet { pname = "Microsoft.Extensions.Configuration.Abstractions"; version = "7.0.0"; sha256 = "1as8cygz0pagg17w22nsf6mb49lr2mcl1x8i3ad1wi8lyzygy1a3"; }) (fetchNuGet { pname = "Microsoft.Extensions.DependencyInjection"; version = "6.0.0-rc.1.21451.13"; sha256 = "0r6945jq7c2f1wjifq514zvngicndjqfnsjya6hqw0yzah0jr56c"; }) - (fetchNuGet { pname = "Microsoft.Extensions.DependencyInjection"; version = "6.0.1"; sha256 = "0kl5ypidmzllyxb91gwy3z950dc416p1y8wikzbdbp0l7aaaxq2p"; }) - (fetchNuGet { pname = "Microsoft.Extensions.DependencyInjection.Abstractions"; version = "6.0.0"; sha256 = "1vi67fw7q99gj7jd64gnnfr4d2c0ijpva7g9prps48ja6g91x6a9"; }) + (fetchNuGet { pname = "Microsoft.Extensions.DependencyInjection"; version = "7.0.0"; sha256 = "121zs4jp8iimgbpzm3wsglhjwkc06irg1pxy8c1zcdlsg34cfq1p"; }) (fetchNuGet { pname = "Microsoft.Extensions.DependencyInjection.Abstractions"; version = "6.0.0-rc.1.21451.13"; sha256 = "11dg16x6g0gssb143qpghxz1s41himvhr7yhjwxs9hacx4ij2dm1"; }) + (fetchNuGet { pname = "Microsoft.Extensions.DependencyInjection.Abstractions"; version = "7.0.0"; sha256 = "181d7mp9307fs17lyy42f8cxnjwysddmpsalky4m0pqxcimnr6g7"; }) (fetchNuGet { pname = "Microsoft.Extensions.DependencyModel"; version = "2.0.3"; sha256 = "0dpyjp0hy9kkvk2dd4dclfmb10yq5avsw2a6v8nra9g6ii2p1nla"; }) - (fetchNuGet { pname = "Microsoft.Extensions.Features"; version = "6.0.10"; sha256 = "10avgg7c4iggq3i7gba0srd01fip637mmc903ymdpa2c92qgkqr8"; }) - (fetchNuGet { pname = "Microsoft.Extensions.Logging"; version = "6.0.0"; sha256 = "0fd9jii3y3irfcwlsiww1y9npjgabzarh33rn566wpcz24lijszi"; }) - (fetchNuGet { pname = "Microsoft.Extensions.Logging.Abstractions"; version = "6.0.0"; sha256 = "0b75fmins171zi6bfdcq1kcvyrirs8n91mknjnxy4c3ygi1rrnj0"; }) - (fetchNuGet { pname = "Microsoft.Extensions.Logging.Abstractions"; version = "6.0.2"; sha256 = "1wv54f3p3r2zj1pr9a6z8zqrh2ihm6v6qcw2pjwis1lcc0qb472m"; }) + (fetchNuGet { pname = "Microsoft.Extensions.Features"; version = "7.0.2"; sha256 = "18ipxpw73wi5gdj7vxhmqgk8rl3l95w6h5ajxbccdfyv5p75v66d"; }) + (fetchNuGet { pname = "Microsoft.Extensions.Logging"; version = "7.0.0"; sha256 = "1bqd3pqn5dacgnkq0grc17cgb2i0w8z1raw12nwm3p3zhrfcvgxf"; }) + (fetchNuGet { pname = "Microsoft.Extensions.Logging.Abstractions"; version = "7.0.0"; sha256 = "1gn7d18i1wfy13vrwhmdv1rmsb4vrk26kqdld4cgvh77yigj90xs"; }) (fetchNuGet { pname = "Microsoft.Extensions.ObjectPool"; version = "5.0.11"; sha256 = "0i7li76gmk6hml12aig4cvyvja9mgl16qr8pkwvx5vm6lc9a3nn4"; }) - (fetchNuGet { pname = "Microsoft.Extensions.Options"; version = "6.0.0"; sha256 = "008pnk2p50i594ahz308v81a41mbjz9mwcarqhmrjpl2d20c868g"; }) - (fetchNuGet { pname = "Microsoft.Extensions.Primitives"; version = "6.0.0"; sha256 = "1kjiw6s4yfz9gm7mx3wkhp06ghnbs95icj9hi505shz9rjrg42q2"; }) - (fetchNuGet { pname = "Microsoft.NET.StringTools"; version = "1.0.0"; sha256 = "06yakiyzgss399giivfx6xdrnfxqfsvy5fzm90scjanvandv0sdj"; }) + (fetchNuGet { pname = "Microsoft.Extensions.Options"; version = "7.0.0"; sha256 = "0b90zkrsk5dw3wr749rbynhpxlg4bgqdnd7d5vdlw2g9c7zlhgx6"; }) + (fetchNuGet { pname = "Microsoft.Extensions.Primitives"; version = "7.0.0"; sha256 = "1b4km9fszid9vp2zb3gya5ni9fn8bq62bzaas2ck2r7gs0sdys80"; }) + (fetchNuGet { pname = "Microsoft.NET.StringTools"; version = "17.4.0"; sha256 = "1smx30nq22plrn2mw4wb5vfgxk6hyx12b60c4wabmpnr81lq3nzv"; }) (fetchNuGet { pname = "Microsoft.NETCore.Platforms"; version = "1.0.1"; sha256 = "01al6cfxp68dscl15z7rxfw9zvhm64dncsw09a1vmdkacsa2v6lr"; }) (fetchNuGet { pname = "Microsoft.NETCore.Platforms"; version = "1.1.0"; sha256 = "08vh1r12g6ykjygq5d3vq09zylgb84l63k49jc4v8faw9g93iqqm"; }) (fetchNuGet { pname = "Microsoft.NETCore.Platforms"; version = "2.0.0"; sha256 = "1fk2fk2639i7nzy58m9dvpdnzql4vb8yl8vr19r2fp8lmj9w2jr0"; }) @@ -110,7 +109,7 @@ (fetchNuGet { pname = "NativeLibraryLoader"; version = "1.0.12"; sha256 = "1nkn5iylxj8i7355cljfvrn3ha7ylf30dh8f63zhybc2vb8hbpkk"; }) (fetchNuGet { pname = "NETStandard.Library"; version = "1.6.1"; sha256 = "1z70wvsx2d847a2cjfii7b83pjfs34q05gb037fdjikv5kbagml8"; }) (fetchNuGet { pname = "NETStandard.Library"; version = "2.0.0"; sha256 = "1bc4ba8ahgk15m8k4nd7x406nhi0kwqzbgjk2dmw52ss553xz7iy"; }) - (fetchNuGet { pname = "Newtonsoft.Json"; version = "12.0.2"; sha256 = "0w2fbji1smd2y7x25qqibf1qrznmv4s6s0jvrbvr6alb7mfyqvh5"; }) + (fetchNuGet { pname = "Newtonsoft.Json"; version = "13.0.1"; sha256 = "0fijg0w6iwap8gvzyjnndds0q4b8anwxxvik7y8vgq97dram4srb"; }) (fetchNuGet { pname = "Newtonsoft.Json"; version = "13.0.2"; sha256 = "1p9splg1min274dpz7xdfgzrwkyfd3xlkygwpr1xgjvvyjvs6b0i"; }) (fetchNuGet { pname = "NuGet.Common"; version = "5.11.0"; sha256 = "1amf6scr5mcjdvd1fflag6i4qjwmydq5qwp6g3f099n901zq0dr3"; }) (fetchNuGet { pname = "NuGet.Configuration"; version = "5.11.0"; sha256 = "1s9pbrh7xy9jz7npz0sahdsj1cw8gfx1fwf3knv0ms1n0c9bk53l"; }) @@ -130,15 +129,15 @@ (fetchNuGet { pname = "ppy.ManagedBass"; version = "2022.1216.0"; sha256 = "19nnj1hq2v21mrplnivjr9c4y3wg4hhfnc062sjgzkmiv1cchvf8"; }) (fetchNuGet { pname = "ppy.ManagedBass.Fx"; version = "2022.1216.0"; sha256 = "1vw573mkligpx9qiqasw1683cqaa1kgnxhlnbdcj9c4320b1pwjm"; }) (fetchNuGet { pname = "ppy.ManagedBass.Mix"; version = "2022.1216.0"; sha256 = "185bpvgbnd8y20r7vxb1an4pd1aal9b7b5wvmv3knz0qg8j0chd9"; }) - (fetchNuGet { pname = "ppy.osu.Framework"; version = "2023.131.0"; sha256 = "1mbgcg0c8w6114c36jxypz7z1yps5zgw3f2lxw75fra0rylwqm23"; }) + (fetchNuGet { pname = "ppy.osu.Framework"; version = "2023.228.0"; sha256 = "1acr957wlpgwng6mvyh6m1wv59ljvk9wh2aclds8ary8li00skdb"; }) (fetchNuGet { pname = "ppy.osu.Framework.NativeLibs"; version = "2022.525.0"; sha256 = "1zsqj3xng06bb46vg79xx35n2dsh3crqg951r1ga2gxqzgzy4nk0"; }) (fetchNuGet { pname = "ppy.osu.Framework.SourceGeneration"; version = "2022.1222.1"; sha256 = "1pwwsp4rfzl6166mhrn5lsnyazpckhfh1m6ggf9d1lw2wb58vxfr"; }) - (fetchNuGet { pname = "ppy.osu.Game.Resources"; version = "2023.202.0"; sha256 = "13apknxly9fqqchmdvkdgfq2jbimln0ixg2d7yn6jcfd235279mj"; }) + (fetchNuGet { pname = "ppy.osu.Game.Resources"; version = "2023.228.0"; sha256 = "12i5z7pkm03zc34q162qjas20v4d9rd1qwbwz1l4iyv010riaa43"; }) (fetchNuGet { pname = "ppy.osuTK.NS20"; version = "1.0.211"; sha256 = "0j4a9n39pqm0cgdcps47p5n2mqph3h94r7hmf0bs59imif4jxvjy"; }) (fetchNuGet { pname = "ppy.SDL2-CS"; version = "1.0.630-alpha"; sha256 = "0jrf70jrz976b49ac0ygfy9qph2w7fnbfrqv0g0x7hlpaip33ra8"; }) - (fetchNuGet { pname = "Realm"; version = "10.18.0"; sha256 = "0dzwpcqkp8x8zah1bpx8cf01w4j1vi4gvipmaxlxczrc8p0f9zws"; }) - (fetchNuGet { pname = "Realm.Fody"; version = "10.18.0"; sha256 = "1d2y7kz1jp1b11kskgk0fpp6ci17aqkrhzdfq5vcr4y7a8hbi9j5"; }) - (fetchNuGet { pname = "Realm.SourceGenerator"; version = "10.18.0"; sha256 = "10bj3mgxdxgwsnpgbvlpnsj5ha582dvkvjnhb4qk7558g262dia8"; }) + (fetchNuGet { pname = "Realm"; version = "10.20.0"; sha256 = "0gy0l2r7726wb6i599n55dn9035h0g7k0binfiy2dy9bjwz60jqk"; }) + (fetchNuGet { pname = "Realm.Fody"; version = "10.20.0"; sha256 = "0rwcbbzr41iww3k59rjgy5xy7bna1x906h5blbllpywgpc2l5afw"; }) + (fetchNuGet { pname = "Realm.SourceGenerator"; version = "10.20.0"; sha256 = "0y0bwqg87pmsld7cmawwwz2ps5lpkbyyzkb9cj0fbynsn4jdygg0"; }) (fetchNuGet { pname = "Remotion.Linq"; version = "2.2.0"; sha256 = "1y46ni0xswmmiryp8sydjgryafwn458dr91f9xn653w73kdyk4xf"; }) (fetchNuGet { pname = "runtime.any.System.Collections"; version = "4.3.0"; sha256 = "0bv5qgm6vr47ynxqbnkc7i797fdi8gbjjxii173syrx14nmrkwg0"; }) (fetchNuGet { pname = "runtime.any.System.Diagnostics.Tools"; version = "4.3.0"; sha256 = "1wl76vk12zhdh66vmagni66h5xbhgqq7zkdpgw21jhxhvlbcl8pk"; }) @@ -182,18 +181,18 @@ (fetchNuGet { pname = "runtime.unix.System.Net.Sockets"; version = "4.3.0"; sha256 = "03npdxzy8gfv035bv1b9rz7c7hv0rxl5904wjz51if491mw0xy12"; }) (fetchNuGet { pname = "runtime.unix.System.Private.Uri"; version = "4.3.0"; sha256 = "1jx02q6kiwlvfksq1q9qr17fj78y5v6mwsszav4qcz9z25d5g6vk"; }) (fetchNuGet { pname = "runtime.unix.System.Runtime.Extensions"; version = "4.3.0"; sha256 = "0pnxxmm8whx38dp6yvwgmh22smknxmqs5n513fc7m4wxvs1bvi4p"; }) - (fetchNuGet { pname = "Sentry"; version = "3.23.1"; sha256 = "0cch803ixx5vqfm2zv5qdkkyksh1184669r1109snbkvvv5qy1g9"; }) + (fetchNuGet { pname = "Sentry"; version = "3.28.1"; sha256 = "09xl3bm5clqxnn8wyy36zwmj8ai8zci6ngw64d0r3rzgd95gbf61"; }) (fetchNuGet { pname = "SharpCompress"; version = "0.31.0"; sha256 = "01az7amjkxjbya5rdcqwxzrh2d3kybf1gsd3617rsxvvxadyra1r"; }) (fetchNuGet { pname = "SharpCompress"; version = "0.32.2"; sha256 = "1p198bl08ia89rf4n6yjpacj3yrz6s574snsfl40l8vlqcdrc1pm"; }) (fetchNuGet { pname = "SharpFNT"; version = "2.0.0"; sha256 = "1bgacgh9hbck0qvji6frbb50sdiqfdng2fvvfgfw8b9qaql91mx0"; }) (fetchNuGet { pname = "SharpGen.Runtime"; version = "2.0.0-beta.10"; sha256 = "0yxq0b4m96z71afc7sywfrlwz2pgr5nilacmssjk803v70f0ydr1"; }) (fetchNuGet { pname = "SharpGen.Runtime.COM"; version = "2.0.0-beta.10"; sha256 = "1qvpphja72x9r3yi96bnmwwy30b1n155v2yy2gzlxjil6qg3xjmb"; }) (fetchNuGet { pname = "SixLabors.ImageSharp"; version = "2.1.0"; sha256 = "0lmj3qs39v5jcf2rjwav43nqnc7g6sd4l226l2jw85nidzmpvkwr"; }) - (fetchNuGet { pname = "SQLitePCLRaw.bundle_e_sqlite3"; version = "2.1.2"; sha256 = "07rc4pj3rphi8nhzkcvilnm0fv27qcdp68jdwk4g0zjk7yfvbcay"; }) - (fetchNuGet { pname = "SQLitePCLRaw.core"; version = "2.0.6"; sha256 = "1w4iyg0v1v1z2m7akq7rv8lsgixp2m08732vr14vgpqs918bsy1i"; }) + (fetchNuGet { pname = "SQLitePCLRaw.bundle_e_sqlite3"; version = "2.1.4"; sha256 = "0shdspl9cm71wwqg9103s44r0l01r3sgnpxr523y4a0wlgac50g0"; }) (fetchNuGet { pname = "SQLitePCLRaw.core"; version = "2.1.2"; sha256 = "19hxv895lairrjmk4gkzd3mcb6b0na45xn4n551h4kckplqadg3d"; }) - (fetchNuGet { pname = "SQLitePCLRaw.lib.e_sqlite3"; version = "2.1.2"; sha256 = "0jn98bkjk8h4smi09z31ib6s6392054lwmkziqmkqf5gf614k2fz"; }) - (fetchNuGet { pname = "SQLitePCLRaw.provider.e_sqlite3"; version = "2.1.2"; sha256 = "0bnm2fhvcsyg5ry74gal2cziqnyf5a8d2cb491vsa7j41hbbx7kv"; }) + (fetchNuGet { pname = "SQLitePCLRaw.core"; version = "2.1.4"; sha256 = "09akxz92qipr1cj8mk2hw99i0b81wwbwx26gpk21471zh543f8ld"; }) + (fetchNuGet { pname = "SQLitePCLRaw.lib.e_sqlite3"; version = "2.1.4"; sha256 = "11l85ksv1ck46j8z08fyf0c3l572zmp9ynb7p5chm5iyrh8xwkkn"; }) + (fetchNuGet { pname = "SQLitePCLRaw.provider.e_sqlite3"; version = "2.1.4"; sha256 = "0b8f51nrjkq0pmfzjaqk5rp7r0cp2lbdm2whynj3xsjklppzmn35"; }) (fetchNuGet { pname = "StbiSharp"; version = "1.1.0"; sha256 = "0wbw20m7nyhxj32k153l668sxigamlwig0qpz8l8d0jqz35vizm0"; }) (fetchNuGet { pname = "System.AppContext"; version = "4.1.0"; sha256 = "0fv3cma1jp4vgj7a8hqc9n7hr1f1kjp541s6z0q1r6nazb4iz9mz"; }) (fetchNuGet { pname = "System.AppContext"; version = "4.3.0"; sha256 = "1649qvy3dar900z3g817h17nl8jp4ka5vcfmsr05kh0fshn7j3ya"; }) @@ -209,7 +208,6 @@ (fetchNuGet { pname = "System.Diagnostics.Debug"; version = "4.0.11"; sha256 = "0gmjghrqmlgzxivd2xl50ncbglb7ljzb66rlx8ws6dv8jm0d5siz"; }) (fetchNuGet { pname = "System.Diagnostics.Debug"; version = "4.3.0"; sha256 = "00yjlf19wjydyr6cfviaph3vsjzg3d5nvnya26i2fvfg53sknh3y"; }) (fetchNuGet { pname = "System.Diagnostics.DiagnosticSource"; version = "4.3.0"; sha256 = "0z6m3pbiy0qw6rn3n209rrzf9x1k4002zh90vwcrsym09ipm2liq"; }) - (fetchNuGet { pname = "System.Diagnostics.DiagnosticSource"; version = "6.0.0"; sha256 = "0rrihs9lnb1h6x4h0hn6kgfnh58qq7hx8qq99gh6fayx4dcnx3s5"; }) (fetchNuGet { pname = "System.Diagnostics.Tools"; version = "4.3.0"; sha256 = "0in3pic3s2ddyibi8cvgl102zmvp9r9mchh82ns9f0ms4basylw1"; }) (fetchNuGet { pname = "System.Diagnostics.Tracing"; version = "4.3.0"; sha256 = "1m3bx6c2s958qligl67q7grkwfz3w53hpy7nc97mh6f7j5k168c4"; }) (fetchNuGet { pname = "System.Dynamic.Runtime"; version = "4.0.11"; sha256 = "1pla2dx8gkidf7xkciig6nifdsb494axjvzvann8g2lp3dbqasm9"; }) @@ -226,8 +224,8 @@ (fetchNuGet { pname = "System.IO.FileSystem"; version = "4.0.1"; sha256 = "0kgfpw6w4djqra3w5crrg8xivbanh1w9dh3qapb28q060wb9flp1"; }) (fetchNuGet { pname = "System.IO.FileSystem"; version = "4.3.0"; sha256 = "0z2dfrbra9i6y16mm9v1v6k47f0fm617vlb7s5iybjjsz6g1ilmw"; }) (fetchNuGet { pname = "System.IO.FileSystem.Primitives"; version = "4.3.0"; sha256 = "0j6ndgglcf4brg2lz4wzsh1av1gh8xrzdsn9f0yznskhqn1xzj9c"; }) - (fetchNuGet { pname = "System.IO.Packaging"; version = "6.0.0"; sha256 = "112nq0k2jc4vh71rifqqmpjxkaanxfapk7g8947jkfgq3lmfmaac"; }) - (fetchNuGet { pname = "System.IO.Pipelines"; version = "6.0.3"; sha256 = "1jgdazpmwc21dd9naq3l9n5s8a1jnbwlvgkf1pnm0aji6jd4xqdz"; }) + (fetchNuGet { pname = "System.IO.Packaging"; version = "7.0.0"; sha256 = "16fgj2ab5ci217shmfsi6c0rnmkh90h6vyb60503nhpmh7y8di13"; }) + (fetchNuGet { pname = "System.IO.Pipelines"; version = "7.0.0"; sha256 = "1ila2vgi1w435j7g2y7ykp2pdbh9c5a02vm85vql89az93b7qvav"; }) (fetchNuGet { pname = "System.Linq"; version = "4.1.0"; sha256 = "1ppg83svb39hj4hpp5k7kcryzrf3sfnm08vxd5sm2drrijsla2k5"; }) (fetchNuGet { pname = "System.Linq"; version = "4.3.0"; sha256 = "1w0gmba695rbr80l1k2h4mrwzbzsyfl2z4klmpbsvsg5pm4a56s7"; }) (fetchNuGet { pname = "System.Linq.Expressions"; version = "4.1.0"; sha256 = "1gpdxl6ip06cnab7n3zlcg6mqp7kknf73s8wjinzi4p0apw82fpg"; }) @@ -235,6 +233,7 @@ (fetchNuGet { pname = "System.Linq.Queryable"; version = "4.0.1"; sha256 = "11jn9k34g245yyf260gr3ldzvaqa9477w2c5nhb1p8vjx4xm3qaw"; }) (fetchNuGet { pname = "System.Memory"; version = "4.5.3"; sha256 = "0naqahm3wljxb5a911d37mwjqjdxv9l0b49p5dmfyijvni2ppy8a"; }) (fetchNuGet { pname = "System.Memory"; version = "4.5.4"; sha256 = "14gbbs22mcxwggn0fcfs1b062521azb9fbb7c113x0mq6dzq9h6y"; }) + (fetchNuGet { pname = "System.Memory"; version = "4.5.5"; sha256 = "08jsfwimcarfzrhlyvjjid61j02irx6xsklf32rv57x2aaikvx0h"; }) (fetchNuGet { pname = "System.Net.Http"; version = "4.3.0"; sha256 = "1i4gc757xqrzflbk7kc5ksn20kwwfjhw9w7pgdkn19y3cgnl302j"; }) (fetchNuGet { pname = "System.Net.NameResolution"; version = "4.3.0"; sha256 = "15r75pwc0rm3vvwsn8rvm2krf929mjfwliv0mpicjnii24470rkq"; }) (fetchNuGet { pname = "System.Net.Primitives"; version = "4.3.0"; sha256 = "0c87k50rmdgmxx7df2khd9qj7q35j9rzdmm2572cc55dygmdk3ii"; }) @@ -294,10 +293,12 @@ (fetchNuGet { pname = "System.Text.Encoding"; version = "4.3.0"; sha256 = "1f04lkir4iladpp51sdgmis9dj4y8v08cka0mbmsy0frc9a4gjqr"; }) (fetchNuGet { pname = "System.Text.Encoding.CodePages"; version = "5.0.0"; sha256 = "1bn2pzaaq4wx9ixirr8151vm5hynn3lmrljcgjx9yghmm4k677k0"; }) (fetchNuGet { pname = "System.Text.Encoding.Extensions"; version = "4.3.0"; sha256 = "11q1y8hh5hrp5a3kw25cb6l00v5l5dvirkz8jr3sq00h1xgcgrxy"; }) + (fetchNuGet { pname = "System.Text.Encodings.Web"; version = "7.0.0"; sha256 = "1151hbyrcf8kyg1jz8k9awpbic98lwz9x129rg7zk1wrs6vjlpxl"; }) + (fetchNuGet { pname = "System.Text.Json"; version = "7.0.1"; sha256 = "1lqh6nrrkx4sksvn5509y6j9z8zkhcls0yghd0n31zywmmy3pnf2"; }) (fetchNuGet { pname = "System.Text.RegularExpressions"; version = "4.3.0"; sha256 = "1bgq51k7fwld0njylfn7qc5fmwrk2137gdq7djqdsw347paa9c2l"; }) (fetchNuGet { pname = "System.Threading"; version = "4.0.11"; sha256 = "19x946h926bzvbsgj28csn46gak2crv2skpwsx80hbgazmkgb1ls"; }) (fetchNuGet { pname = "System.Threading"; version = "4.3.0"; sha256 = "0rw9wfamvhayp5zh3j7p1yfmx9b5khbf4q50d8k5rk993rskfd34"; }) - (fetchNuGet { pname = "System.Threading.Channels"; version = "6.0.0"; sha256 = "1qbyi7yymqc56frqy7awvcqc1m7x3xrpx87a37dgb3mbrjg9hlcj"; }) + (fetchNuGet { pname = "System.Threading.Channels"; version = "7.0.0"; sha256 = "1qrmqa6hpzswlmyp3yqsbnmia9i5iz1y208xpqc1y88b1f6j1v8a"; }) (fetchNuGet { pname = "System.Threading.Tasks"; version = "4.0.11"; sha256 = "0nr1r41rak82qfa5m0lhk9mp0k93bvfd7bbd9sdzwx9mb36g28p5"; }) (fetchNuGet { pname = "System.Threading.Tasks"; version = "4.3.0"; sha256 = "134z3v9abw3a6jsw17xl3f6hqjpak5l682k2vz39spj4kmydg6k7"; }) (fetchNuGet { pname = "System.Threading.Tasks.Extensions"; version = "4.3.0"; sha256 = "1xxcx2xh8jin360yjwm4x4cf5y3a2bwpn2ygkfkwkicz7zk50s2z"; }) From 0bba5cdd8ce70f07500910703721b4e9e52a0889 Mon Sep 17 00:00:00 2001 From: "R. Ryantm" Date: Sat, 4 Mar 2023 07:25:33 +0000 Subject: [PATCH 102/154] python310Packages.peaqevcore: 12.2.1 -> 12.2.6 --- pkgs/development/python-modules/peaqevcore/default.nix | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pkgs/development/python-modules/peaqevcore/default.nix b/pkgs/development/python-modules/peaqevcore/default.nix index 3333331e85f6..be36f758f941 100644 --- a/pkgs/development/python-modules/peaqevcore/default.nix +++ b/pkgs/development/python-modules/peaqevcore/default.nix @@ -6,14 +6,14 @@ buildPythonPackage rec { pname = "peaqevcore"; - version = "12.2.1"; + version = "12.2.6"; format = "setuptools"; disabled = pythonOlder "3.7"; src = fetchPypi { inherit pname version; - hash = "sha256-WOuKGVrNZzvY7F0Mvj3MjSdTu47c5Y11ySe1qorzlWE="; + hash = "sha256-IAqXp/d0f1khhNpkp4uQmxqJ4Xh8Nl87i+iMa3U9EDM="; }; postPatch = '' From 18f85de76dac7d3a86767be3aea313a1add5ec67 Mon Sep 17 00:00:00 2001 From: K900 Date: Sat, 4 Mar 2023 10:50:13 +0300 Subject: [PATCH 103/154] nixos/firewall: assert that the kernel supports conntrack helper auto-loading --- nixos/doc/manual/release-notes/rl-2305.section.md | 2 ++ nixos/modules/services/networking/firewall.nix | 4 ++++ 2 files changed, 6 insertions(+) diff --git a/nixos/doc/manual/release-notes/rl-2305.section.md b/nixos/doc/manual/release-notes/rl-2305.section.md index 6fcab17df5ee..dad97b42d3bb 100644 --- a/nixos/doc/manual/release-notes/rl-2305.section.md +++ b/nixos/doc/manual/release-notes/rl-2305.section.md @@ -142,6 +142,8 @@ In addition to numerous new and upgraded packages, this release has the followin - [services.xserver.videoDrivers](options.html#opt-services.xserver.videoDrivers) now defaults to the `modesetting` driver over device-specific ones. The `radeon`, `amdgpu` and `nouveau` drivers are still available, but effectively unmaintained and not recommended for use. +- conntrack helper autodetection has been removed from kernels 6.0 and up upstream, and an assertion was added to ensure things don't silently stop working. Migrate your configuration to assign helpers explicitly or use an older LTS kernel branch as a temporary workaround. + ## Other Notable Changes {#sec-release-23.05-notable-changes} diff --git a/nixos/modules/services/networking/firewall.nix b/nixos/modules/services/networking/firewall.nix index 4e332d489e4d..ac02a93836b8 100644 --- a/nixos/modules/services/networking/firewall.nix +++ b/nixos/modules/services/networking/firewall.nix @@ -269,6 +269,10 @@ in assertion = cfg.filterForward -> config.networking.nftables.enable; message = "filterForward only works with the nftables based firewall"; } + { + assertion = cfg.autoLoadConntrackHelpers -> lib.versionOlder config.boot.kernelPackages.kernel.version "6"; + message = "conntrack helper autoloading has been removed from kernel 6.0 and newer"; + } ]; networking.firewall.trustedInterfaces = [ "lo" ]; From 84f3520c8ff96b0eb10b9d511e630c5bace07c29 Mon Sep 17 00:00:00 2001 From: K900 Date: Sat, 4 Mar 2023 10:50:38 +0300 Subject: [PATCH 104/154] nixos/tests/nat: remove conntrack helpers test Removed upstream --- nixos/tests/all-tests.nix | 2 -- nixos/tests/nat.nix | 15 +++------------ 2 files changed, 3 insertions(+), 14 deletions(-) diff --git a/nixos/tests/all-tests.nix b/nixos/tests/all-tests.nix index 785a5621f57e..28ea9272ffb7 100644 --- a/nixos/tests/all-tests.nix +++ b/nixos/tests/all-tests.nix @@ -433,10 +433,8 @@ in { nagios = handleTest ./nagios.nix {}; nar-serve = handleTest ./nar-serve.nix {}; nat.firewall = handleTest ./nat.nix { withFirewall = true; }; - nat.firewall-conntrack = handleTest ./nat.nix { withFirewall = true; withConntrackHelpers = true; }; nat.standalone = handleTest ./nat.nix { withFirewall = false; }; nat.nftables.firewall = handleTest ./nat.nix { withFirewall = true; nftables = true; }; - nat.nftables.firewall-conntrack = handleTest ./nat.nix { withFirewall = true; withConntrackHelpers = true; nftables = true; }; nat.nftables.standalone = handleTest ./nat.nix { withFirewall = false; nftables = true; }; nats = handleTest ./nats.nix {}; navidrome = handleTest ./navidrome.nix {}; diff --git a/nixos/tests/nat.nix b/nixos/tests/nat.nix index 912a04deae8b..0b617cea7774 100644 --- a/nixos/tests/nat.nix +++ b/nixos/tests/nat.nix @@ -3,7 +3,7 @@ # client on the inside network, a server on the outside network, and a # router connected to both that performs Network Address Translation # for the client. -import ./make-test-python.nix ({ pkgs, lib, withFirewall, withConntrackHelpers ? false, nftables ? false, ... }: +import ./make-test-python.nix ({ pkgs, lib, withFirewall, nftables ? false, ... }: let unit = if nftables then "nftables" else (if withFirewall then "firewall" else "nat"); @@ -16,16 +16,11 @@ import ./make-test-python.nix ({ pkgs, lib, withFirewall, withConntrackHelpers ? networking.nat.internalIPs = [ "192.168.1.0/24" ]; networking.nat.externalInterface = "eth1"; } - (lib.optionalAttrs withConntrackHelpers { - networking.firewall.connectionTrackingModules = [ "ftp" ]; - networking.firewall.autoLoadConntrackHelpers = true; - }) ]; in { name = "nat" + (lib.optionalString nftables "Nftables") - + (if withFirewall then "WithFirewall" else "Standalone") - + (lib.optionalString withConntrackHelpers "withConntrackHelpers"); + + (if withFirewall then "WithFirewall" else "Standalone"); meta = with pkgs.lib.maintainers; { maintainers = [ eelco rob ]; }; @@ -39,10 +34,6 @@ import ./make-test-python.nix ({ pkgs, lib, withFirewall, withConntrackHelpers ? (pkgs.lib.head nodes.router.config.networking.interfaces.eth2.ipv4.addresses).address; networking.nftables.enable = nftables; } - (lib.optionalAttrs withConntrackHelpers { - networking.firewall.connectionTrackingModules = [ "ftp" ]; - networking.firewall.autoLoadConntrackHelpers = true; - }) ]; router = @@ -95,7 +86,7 @@ import ./make-test-python.nix ({ pkgs, lib, withFirewall, withConntrackHelpers ? client.succeed("curl -v ftp://server/foo.txt >&2") # Test whether active FTP works. - client.${if withConntrackHelpers then "succeed" else "fail"}("curl -v -P - ftp://server/foo.txt >&2") + client.fail("curl -v -P - ftp://server/foo.txt >&2") # Test ICMP. client.succeed("ping -c 1 router >&2") From dc01a822821a033f188987a4a861cfe4381914ba Mon Sep 17 00:00:00 2001 From: K900 Date: Sat, 4 Mar 2023 11:01:44 +0300 Subject: [PATCH 105/154] lkl: add note for future update attempts --- pkgs/applications/virtualization/lkl/default.nix | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/pkgs/applications/virtualization/lkl/default.nix b/pkgs/applications/virtualization/lkl/default.nix index f9b5b7f62695..c7503e789680 100644 --- a/pkgs/applications/virtualization/lkl/default.nix +++ b/pkgs/applications/virtualization/lkl/default.nix @@ -6,6 +6,11 @@ stdenv.mkDerivation rec { pname = "lkl"; + + # NOTE: pinned to the last known version that doesn't have a hang in cptofs. + # Please verify `nix build -f nixos/release-combined.nix nixos.ova` works + # before attempting to update again. + # ref: https://github.com/NixOS/nixpkgs/pull/219434 version = "2022-08-08"; outputs = [ "dev" "lib" "out" ]; From 32bc50dc369e3c0ee6e7516b6766c4a3cfe2a498 Mon Sep 17 00:00:00 2001 From: K900 Date: Sat, 4 Mar 2023 11:05:10 +0300 Subject: [PATCH 106/154] lkl: add raitobezarius to maintainers (as requested on Matrix) --- pkgs/applications/virtualization/lkl/default.nix | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pkgs/applications/virtualization/lkl/default.nix b/pkgs/applications/virtualization/lkl/default.nix index c7503e789680..839022a8d551 100644 --- a/pkgs/applications/virtualization/lkl/default.nix +++ b/pkgs/applications/virtualization/lkl/default.nix @@ -77,6 +77,6 @@ stdenv.mkDerivation rec { homepage = "https://github.com/lkl/linux/"; platforms = platforms.linux; # Darwin probably works too but I haven't tested it license = licenses.gpl2; - maintainers = with maintainers; [ copumpkin ]; + maintainers = with maintainers; [ copumpkin raitobezarius ]; }; } From 52006da65b2d65d8d576975b2ee8e52c46f68a09 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Vladim=C3=ADr=20=C4=8Cun=C3=A1t?= Date: Sat, 4 Mar 2023 09:28:09 +0100 Subject: [PATCH 107/154] nixos/release-*: finish dropping the conntrack tests This was forgotten in commit 84f3520c8ff96b. --- nixos/release-combined.nix | 1 - nixos/release-small.nix | 1 - 2 files changed, 2 deletions(-) diff --git a/nixos/release-combined.nix b/nixos/release-combined.nix index 9652be5d85b4..125086294d41 100644 --- a/nixos/release-combined.nix +++ b/nixos/release-combined.nix @@ -100,7 +100,6 @@ in rec { (onFullSupported "nixos.tests.login") (onFullSupported "nixos.tests.misc") (onFullSupported "nixos.tests.mutableUsers") - (onFullSupported "nixos.tests.nat.firewall-conntrack") (onFullSupported "nixos.tests.nat.firewall") (onFullSupported "nixos.tests.nat.standalone") (onFullSupported "nixos.tests.networking.scripted.bond") diff --git a/nixos/release-small.nix b/nixos/release-small.nix index 05ff9ca2499f..7be300bbcf3b 100644 --- a/nixos/release-small.nix +++ b/nixos/release-small.nix @@ -118,7 +118,6 @@ in rec { "nixos.tests.ipv6" "nixos.tests.login" "nixos.tests.misc" - "nixos.tests.nat.firewall-conntrack" "nixos.tests.nat.firewall" "nixos.tests.nat.standalone" "nixos.tests.nfs3.simple" From 546a42df3cdc3bdf85bd5c8fd7b4faf89d996af3 Mon Sep 17 00:00:00 2001 From: "R. Ryantm" Date: Sat, 4 Mar 2023 09:24:39 +0000 Subject: [PATCH 108/154] mwic: 0.7.9 -> 0.7.10 --- pkgs/applications/misc/mwic/default.nix | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pkgs/applications/misc/mwic/default.nix b/pkgs/applications/misc/mwic/default.nix index e1f9baa60883..a31d6d5a90ab 100644 --- a/pkgs/applications/misc/mwic/default.nix +++ b/pkgs/applications/misc/mwic/default.nix @@ -1,12 +1,12 @@ { lib, stdenv, fetchurl, pythonPackages }: stdenv.mkDerivation rec { - version = "0.7.9"; + version = "0.7.10"; pname = "mwic"; src = fetchurl { url = "https://github.com/jwilk/mwic/releases/download/${version}/${pname}-${version}.tar.gz"; - sha256 = "sha256-i7DSvUBUMOvn2aYpwYOCDHKq0nkleknD7k2xopo+C5s="; + sha256 = "sha256-dmIHPehkxpSb78ymVpcPCu4L41coskrHQOg067dprOo="; }; makeFlags=["PREFIX=\${out}"]; From 1bca4bbc8aea29fce9ab85bc796a8cfae9e8f1b4 Mon Sep 17 00:00:00 2001 From: "R. Ryantm" Date: Sat, 4 Mar 2023 09:33:51 +0000 Subject: [PATCH 109/154] waf-tester: 0.6.12 -> 0.6.13 --- pkgs/tools/security/waf-tester/default.nix | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/pkgs/tools/security/waf-tester/default.nix b/pkgs/tools/security/waf-tester/default.nix index 427bb1db2c31..3372e09d2c7b 100644 --- a/pkgs/tools/security/waf-tester/default.nix +++ b/pkgs/tools/security/waf-tester/default.nix @@ -7,16 +7,16 @@ buildGoModule rec { pname = "waf-tester"; - version = "0.6.12"; + version = "0.6.13"; src = fetchFromGitHub { owner = "jreisinger"; repo = pname; rev = "v${version}"; - hash = "sha256-baj9JuC4PF5c50K2aY+xwdE9t4aTzOu+isqJ6r1pWuc="; + hash = "sha256-UPviooQNGRVwf/bTz9ApedJDAGeCvh9iD1HXFOQXPcw="; }; - vendorSha256 = "sha256-qVzgZX4HVXZ3qgYAu3a46vcGl4Pk2D1Zx/giEmPEG88="; + vendorHash = "sha256-HOYHrR1LtVcXMKFHPaA7PYH4Fp9nhqal2oxYTq/i4/8="; ldflags = [ "-s" From c016144591b351e6ade3a5e54e619baa0acfe135 Mon Sep 17 00:00:00 2001 From: Maximilian Bosch Date: Sat, 4 Mar 2023 10:57:56 +0100 Subject: [PATCH 110/154] grafana: 9.4.2 -> 9.4.3 ChangeLog: https://github.com/grafana/grafana/releases/tag/v9.4.3 --- pkgs/servers/monitoring/grafana/default.nix | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/pkgs/servers/monitoring/grafana/default.nix b/pkgs/servers/monitoring/grafana/default.nix index b0c9a538e049..24813af5a29d 100644 --- a/pkgs/servers/monitoring/grafana/default.nix +++ b/pkgs/servers/monitoring/grafana/default.nix @@ -2,7 +2,7 @@ buildGoModule rec { pname = "grafana"; - version = "9.4.2"; + version = "9.4.3"; excludedPackages = [ "alert_webhook_listener" "clean-swagger" "release_publisher" "slow_proxy" "slow_proxy_mac" "macaron" "devenv" ]; @@ -10,12 +10,12 @@ buildGoModule rec { rev = "v${version}"; owner = "grafana"; repo = "grafana"; - sha256 = "sha256-dSKIQiav6y4P1e/7CptIdRuOrDdXdvItCaRBcbepadE="; + sha256 = "sha256-LYUbypPXoWwWA4u2JxhUS/lozQNo2DCFGDPCmNP3GoE="; }; srcStatic = fetchurl { url = "https://dl.grafana.com/oss/release/grafana-${version}.linux-amd64.tar.gz"; - sha256 = "sha256-dBp6V5ozu1koSoXIecjysSIdG0hL1K5lH9Z8yougUKo="; + sha256 = "sha256-aq6/sMfYVebxh46+zxphfWttFN4vBpUgCLXobLWVozk="; }; vendorSha256 = "sha256-atnlEdGDiUqQkslvRlPSi6VC5rEvRVV6R2Wxur3geew="; From 1d6febd896ea08065fc0a70893d4ff3db11194af Mon Sep 17 00:00:00 2001 From: "R. Ryantm" Date: Sat, 4 Mar 2023 10:16:16 +0000 Subject: [PATCH 111/154] automatic-timezoned: 1.0.62 -> 1.0.68 --- pkgs/tools/system/automatic-timezoned/default.nix | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/pkgs/tools/system/automatic-timezoned/default.nix b/pkgs/tools/system/automatic-timezoned/default.nix index ceee8c9efa6c..b2a7bb8a8b5a 100644 --- a/pkgs/tools/system/automatic-timezoned/default.nix +++ b/pkgs/tools/system/automatic-timezoned/default.nix @@ -5,16 +5,16 @@ rustPlatform.buildRustPackage rec { pname = "automatic-timezoned"; - version = "1.0.62"; + version = "1.0.68"; src = fetchFromGitHub { owner = "maxbrunet"; repo = pname; rev = "v${version}"; - sha256 = "sha256-3T9/VAr/ZrGTZZK3rsIpnOeKdp9WxPO0JkGamDi3hyM="; + sha256 = "sha256-wtmyUlkruFE3dQmsb9x2683gwEVjsBCQJ8VW4b0IdkU="; }; - cargoHash = "sha256-rNMEXvAGpKxn2t6uvgTx3sc3tpGCXmzOM/iPWwWq2JM="; + cargoHash = "sha256-nQx70KtWzvg6w8UNJqTrqzBc5SZKwCiHx2jhoBbmNP4="; meta = with lib; { description = "Automatically update system timezone based on location"; From 58347c6d96a12ea7d4691f9ace76ce2d1be2c80a Mon Sep 17 00:00:00 2001 From: Fabian Affolter Date: Sat, 4 Mar 2023 11:20:08 +0100 Subject: [PATCH 112/154] waf-tester: add changelog to meta --- pkgs/tools/security/waf-tester/default.nix | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/pkgs/tools/security/waf-tester/default.nix b/pkgs/tools/security/waf-tester/default.nix index 3372e09d2c7b..b8dd01ed19b8 100644 --- a/pkgs/tools/security/waf-tester/default.nix +++ b/pkgs/tools/security/waf-tester/default.nix @@ -12,7 +12,7 @@ buildGoModule rec { src = fetchFromGitHub { owner = "jreisinger"; repo = pname; - rev = "v${version}"; + rev = "refs/tags/v${version}"; hash = "sha256-UPviooQNGRVwf/bTz9ApedJDAGeCvh9iD1HXFOQXPcw="; }; @@ -33,6 +33,7 @@ buildGoModule rec { meta = with lib; { description = "Tool to test Web Application Firewalls (WAFs)"; homepage = "https://github.com/jreisinger/waf-tester"; + changelog = "https://github.com/jreisinger/waf-tester/releases/tag/v${version}"; license = licenses.gpl3Only; maintainers = with maintainers; [ fab ]; }; From 3330d1de1d6fd0a72058dbf8b2da007b83cb747e Mon Sep 17 00:00:00 2001 From: Maximilian Bosch Date: Sat, 4 Mar 2023 11:27:51 +0100 Subject: [PATCH 113/154] prometheus-redis-exporter: 1.47.0 -> 1.48.0 ChangeLog: https://github.com/oliver006/redis_exporter/releases/tag/v1.48.0 --- pkgs/servers/monitoring/prometheus/redis-exporter.nix | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/pkgs/servers/monitoring/prometheus/redis-exporter.nix b/pkgs/servers/monitoring/prometheus/redis-exporter.nix index a4590277acc9..a457a2ee39cf 100644 --- a/pkgs/servers/monitoring/prometheus/redis-exporter.nix +++ b/pkgs/servers/monitoring/prometheus/redis-exporter.nix @@ -2,13 +2,13 @@ buildGoModule rec { pname = "redis_exporter"; - version = "1.47.0"; + version = "1.48.0"; src = fetchFromGitHub { owner = "oliver006"; repo = "redis_exporter"; rev = "v${version}"; - sha256 = "sha256-pSLFfArmG4DIgYUD8qz71P+7RYIQuUycnYzNFXNhZ8A="; + sha256 = "sha256-hBkekoVwNuRDGhpvbW57eR+UUMkntdEcHJAVQbwk7NE="; }; vendorHash = "sha256-Owfxy7WkucQ6BM8yjnZg9/8CgopGTtbQTTUuxoT3RRE="; @@ -28,7 +28,7 @@ buildGoModule rec { description = "Prometheus exporter for Redis metrics"; inherit (src.meta) homepage; license = licenses.mit; - maintainers = with maintainers; [ eskytthe srhb ]; + maintainers = with maintainers; [ eskytthe srhb ma27 ]; platforms = platforms.unix; }; } From 004d858df899a7096f508dc7d76649f96b01577e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Batuhan=20Apayd=C4=B1n?= Date: Fri, 3 Mar 2023 22:56:59 +0300 Subject: [PATCH 114/154] rekor: add developer-guy to maintainers list MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: Batuhan Apaydın --- pkgs/tools/security/rekor/default.nix | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pkgs/tools/security/rekor/default.nix b/pkgs/tools/security/rekor/default.nix index e367314ba19b..6fadf0c8f361 100644 --- a/pkgs/tools/security/rekor/default.nix +++ b/pkgs/tools/security/rekor/default.nix @@ -54,7 +54,7 @@ let homepage = "https://github.com/sigstore/rekor"; changelog = "https://github.com/sigstore/rekor/releases/tag/v${version}"; license = licenses.asl20; - maintainers = with maintainers; [ lesuisse jk ]; + maintainers = with maintainers; [ lesuisse jk developer-guy ]; }; }; in { From 5a676e73bffe76a16f56a8aed6a7141e8484ca61 Mon Sep 17 00:00:00 2001 From: Fabian Affolter Date: Sat, 4 Mar 2023 11:29:29 +0100 Subject: [PATCH 115/154] python310Packages.angrop: 9.2.7 -> 9.2.8 Diff: https://github.com/angr/angrop/compare/refs/tags/v9.2.7...v9.2.8 --- .../python-modules/angrop/default.nix | 23 ++++--------------- 1 file changed, 5 insertions(+), 18 deletions(-) diff --git a/pkgs/development/python-modules/angrop/default.nix b/pkgs/development/python-modules/angrop/default.nix index eb8c6706b5d5..69b06e58784d 100644 --- a/pkgs/development/python-modules/angrop/default.nix +++ b/pkgs/development/python-modules/angrop/default.nix @@ -2,16 +2,15 @@ , angr , buildPythonPackage , fetchFromGitHub -, fetchpatch , progressbar , pythonOlder -, pythonRelaxDepsHook +, setuptools , tqdm }: buildPythonPackage rec { pname = "angrop"; - version = "9.2.7"; + version = "9.2.8"; format = "pyproject"; disabled = pythonOlder "3.6"; @@ -19,20 +18,12 @@ buildPythonPackage rec { src = fetchFromGitHub { owner = "angr"; repo = pname; - rev = "v${version}"; - hash = "sha256-wIPk7Cz7FSPviPFBSLrBjLr9M0o3pyoJM7wiAhHrg9Q="; + rev = "refs/tags/v${version}"; + hash = "sha256-zmWdGbFzwLDP7MUqEprZcIgA7lAdCrafWYohAehJyh0="; }; - patches = [ - (fetchpatch { - name = "compatibility-with-newer-angr.patch"; - url = "https://github.com/angr/angrop/commit/23194ee4ecdcb7a7390ec04eb133786ec3f807b1.patch"; - hash = "sha256-n9/oPUblUHSk81qwU129rnNOjsNViaegp6454CaDo+8="; - }) - ]; - nativeBuildInputs = [ - pythonRelaxDepsHook + setuptools ]; propagatedBuildInputs = [ @@ -41,10 +32,6 @@ buildPythonPackage rec { tqdm ]; - pythonRelaxDeps = [ - "angr" - ]; - # Tests have additional requirements, e.g., angr binaries # cle is executing the tests with the angr binaries already and is a requirement of angr doCheck = false; From 9692e965c64ab57aabb90285028e3e2e73430f9f Mon Sep 17 00:00:00 2001 From: K900 Date: Sat, 4 Mar 2023 11:10:37 +0300 Subject: [PATCH 116/154] virtualbox: 6.1.40 -> 7.0.6 - bump versions - add new dependencies - lrelease is an unconditional dependency now - more Qt path hackery - remove rdesktop-vrdp (removed upstream) --- .../virtualization/virtualbox/default.nix | 16 ++++++++++------ .../virtualization/virtualbox/extpack.nix | 2 +- .../virtualbox/guest-additions/default.nix | 2 +- ...x11extras.patch => qt-dependency-paths.patch} | 4 ++-- 4 files changed, 14 insertions(+), 10 deletions(-) rename pkgs/applications/virtualization/virtualbox/{qtx11extras.patch => qt-dependency-paths.patch} (78%) diff --git a/pkgs/applications/virtualization/virtualbox/default.nix b/pkgs/applications/virtualization/virtualbox/default.nix index d65f101b3774..fc3303baf339 100644 --- a/pkgs/applications/virtualization/virtualbox/default.nix +++ b/pkgs/applications/virtualization/virtualbox/default.nix @@ -3,6 +3,7 @@ , libpng, glib, lvm2, libXrandr, libXinerama, libopus, qtbase, qtx11extras , qttools, qtsvg, qtwayland, pkg-config, which, docbook_xsl, docbook_xml_dtd_43 , alsa-lib, curl, libvpx, nettools, dbus, substituteAll, gsoap, zlib +, yasm, glslang # If open-watcom-bin is not passed, VirtualBox will fall back to use # the shipped alternative sources (assembly). , open-watcom-bin @@ -23,19 +24,19 @@ let buildType = "release"; # Use maintainers/scripts/update.nix to update the version and all related hashes or # change the hashes in extpack.nix and guest-additions/default.nix as well manually. - version = "6.1.40"; + version = "7.0.6"; in stdenv.mkDerivation { pname = "virtualbox"; inherit version; src = fetchurl { url = "https://download.virtualbox.org/virtualbox/${version}/VirtualBox-${version}.tar.bz2"; - sha256 = "bc857555d3e836ad9350a8f7b03bb54d2fdc04dddb2043d09813f4634bca4814"; + sha256 = "f146d9a86a35af0abb010e628636fd800cb476cc2ce82f95b0c0ca876e1756ff"; }; outputs = [ "out" "modsrc" ]; - nativeBuildInputs = [ pkg-config which docbook_xsl docbook_xml_dtd_43 ] + nativeBuildInputs = [ pkg-config which docbook_xsl docbook_xml_dtd_43 yasm glslang ] ++ optional (!headless) wrapQtAppsHook; # Wrap manually because we wrap just a small number of executables. @@ -94,7 +95,7 @@ in stdenv.mkDerivation { qtPluginPath = "${qtbase.bin}/${qtbase.qtPluginPrefix}:${qtsvg.bin}/${qtbase.qtPluginPrefix}:${qtwayland.bin}/${qtbase.qtPluginPrefix}"; }) ++ [ - ./qtx11extras.patch + ./qt-dependency-paths.patch # https://github.com/NixOS/nixpkgs/issues/123851 ./fix-audio-driver-loading.patch ]; @@ -130,14 +131,17 @@ in stdenv.mkDerivation { VBOX_JAVA_HOME := ${jdk} ''} ${optionalString (!headless) '' + VBOX_WITH_VBOXSDL := 1 PATH_QT5_X11_EXTRAS_LIB := ${getLib qtx11extras}/lib PATH_QT5_X11_EXTRAS_INC := ${getDev qtx11extras}/include - TOOL_QT5_LRC := ${getDev qttools}/bin/lrelease + PATH_QT5_TOOLS_LIB := ${getLib qttools}/lib + PATH_QT5_TOOLS_INC := ${getDev qttools}/include ''} ${optionalString enableWebService '' # fix gsoap missing zlib include and produce errors with --as-needed VBOX_GSOAP_CXX_LIBS := gsoapssl++ z ''} + TOOL_QT5_LRC := ${getDev qttools}/bin/lrelease LOCAL_CONFIG ./configure \ @@ -174,7 +178,7 @@ in stdenv.mkDerivation { -name src -o -exec cp -avt "$libexec" {} + mkdir -p $out/bin - for file in ${optionalString (!headless) "VirtualBox VBoxSDL rdesktop-vrdp"} ${optionalString enableWebService "vboxwebsrv"} VBoxManage VBoxBalloonCtrl VBoxHeadless; do + for file in ${optionalString (!headless) "VirtualBox VBoxSDL"} ${optionalString enableWebService "vboxwebsrv"} VBoxManage VBoxBalloonCtrl VBoxHeadless; do echo "Linking $file to /bin" test -x "$libexec/$file" ln -s "$libexec/$file" $out/bin/$file diff --git a/pkgs/applications/virtualization/virtualbox/extpack.nix b/pkgs/applications/virtualization/virtualbox/extpack.nix index 7092ffb33dee..7e27e79d5dd7 100644 --- a/pkgs/applications/virtualization/virtualbox/extpack.nix +++ b/pkgs/applications/virtualization/virtualbox/extpack.nix @@ -12,7 +12,7 @@ fetchurl rec { # Manually sha256sum the extensionPack file, must be hex! # Thus do not use `nix-prefetch-url` but instead plain old `sha256sum`. # Checksums can also be found at https://www.virtualbox.org/download/hashes/${version}/SHA256SUMS - let value = "29cf8410e2514ea4393f63f5e955b8311787873679fc23ae9a897fb70ef3f84a"; + let value = "292961aa8723b54f96f89f6d8abf7d8e29259d94b7de831dbffb9ae15d346434"; in assert (builtins.stringLength value) == 64; value; meta = { diff --git a/pkgs/applications/virtualization/virtualbox/guest-additions/default.nix b/pkgs/applications/virtualization/virtualbox/guest-additions/default.nix index 1ff7f0abebaa..0601aa3e44a2 100644 --- a/pkgs/applications/virtualization/virtualbox/guest-additions/default.nix +++ b/pkgs/applications/virtualization/virtualbox/guest-additions/default.nix @@ -23,7 +23,7 @@ in stdenv.mkDerivation rec { src = fetchurl { url = "http://download.virtualbox.org/virtualbox/${version}/VBoxGuestAdditions_${version}.iso"; - sha256 = "d456c559926f1a8fdd7259056e0a50f12339fd494122cf30db7736e2032970c6"; + sha256 = "21e0f407d2a4f5c286084a70718aa20235ea75969eca0cab6cfab43a3499a010"; }; KERN_DIR = "${kernel.dev}/lib/modules/${kernel.modDirVersion}/build"; diff --git a/pkgs/applications/virtualization/virtualbox/qtx11extras.patch b/pkgs/applications/virtualization/virtualbox/qt-dependency-paths.patch similarity index 78% rename from pkgs/applications/virtualization/virtualbox/qtx11extras.patch rename to pkgs/applications/virtualization/virtualbox/qt-dependency-paths.patch index a3aa98b081d1..ae5493a327d6 100644 --- a/pkgs/applications/virtualization/virtualbox/qtx11extras.patch +++ b/pkgs/applications/virtualization/virtualbox/qt-dependency-paths.patch @@ -7,10 +7,10 @@ index 71b96a3..73391f0 100644 endif else - $(eval $(target)_LIBS += $(foreach module,$(qt_modules), $(PATH_SDK_QT5_LIB)/lib$(qt_prefix)Qt5$(module)$(qt_infix)$(SUFF_DLL)) ) -+ $(eval $(target)_LIBS += $(foreach module,$(qt_modules), $(if $(filter X11Extras,$(module)),$(PATH_QT5_X11_EXTRAS_LIB),$(PATH_SDK_QT5_LIB))/lib$(qt_prefix)Qt5$(module)$(qt_infix)$(SUFF_DLL)) ) ++ $(eval $(target)_LIBS += $(foreach module,$(qt_modules), $(if $(filter Help,$(module)),$(PATH_QT5_TOOLS_LIB),$(if $(filter X11Extras,$(module)),$(PATH_QT5_X11_EXTRAS_LIB),$(PATH_SDK_QT5_LIB)))/lib$(qt_prefix)Qt5$(module)$(qt_infix)$(SUFF_DLL)) ) endif - $(eval $(target)_INCS += $(addprefix $(PATH_SDK_QT5_INC)/Qt,$(qt_modules)) $(PATH_SDK_QT5_INC) ) -+ $(eval $(target)_INCS += $(addprefix $(PATH_SDK_QT5_INC)/Qt,$(qt_modules)) $(PATH_SDK_QT5_INC) $(PATH_QT5_X11_EXTRAS_INC)/QtX11Extras ) ++ $(eval $(target)_INCS += $(addprefix $(PATH_SDK_QT5_INC)/Qt,$(qt_modules)) $(PATH_SDK_QT5_INC) $(PATH_QT5_X11_EXTRAS_INC)/QtX11Extras $(PATH_QT5_TOOLS_INC)) endif $(eval $(target)_DEFS += $(foreach module,$(toupper $(qt_modules)), QT_$(module)_LIB) ) From e8fbf83f5e697a3d1fe1ca2fb8b74ba75dabc040 Mon Sep 17 00:00:00 2001 From: K900 Date: Sat, 4 Mar 2023 12:45:38 +0300 Subject: [PATCH 117/154] nixos/virtualbox-image: remove the raw image trick This is slower, but the raw image thing no longer works with VirtualBox 7.0.6. --- nixos/modules/virtualisation/virtualbox-image.nix | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/nixos/modules/virtualisation/virtualbox-image.nix b/nixos/modules/virtualisation/virtualbox-image.nix index bb42e6de069b..0da217fd1cb0 100644 --- a/nixos/modules/virtualisation/virtualbox-image.nix +++ b/nixos/modules/virtualisation/virtualbox-image.nix @@ -81,7 +81,7 @@ in { extraDisk = mkOption { description = lib.mdDoc '' Optional extra disk/hdd configuration. - The disk will be an 'ext4' partition on a separate VMDK file. + The disk will be an 'ext4' partition on a separate file. ''; default = null; example = { @@ -183,8 +183,8 @@ in { export HOME=$PWD export PATH=${pkgs.virtualbox}/bin:$PATH - echo "creating VirtualBox pass-through disk wrapper (no copying involved)..." - VBoxManage internalcommands createrawvmdk -filename disk.vmdk -rawdisk $diskImage + echo "converting image to VirtualBox format..." + VBoxManage convertfromraw $diskImage disk.vdi ${optionalString (cfg.extraDisk != null) '' echo "creating extra disk: data-disk.raw" @@ -196,8 +196,8 @@ in { mkpart primary ext4 1MiB -1 eval $(partx $dataDiskImage -o START,SECTORS --nr 1 --pairs) mkfs.ext4 -F -L ${cfg.extraDisk.label} $dataDiskImage -E offset=$(sectorsToBytes $START) $(sectorsToKilobytes $SECTORS)K - echo "creating extra disk: data-disk.vmdk" - VBoxManage internalcommands createrawvmdk -filename data-disk.vmdk -rawdisk $dataDiskImage + echo "creating extra disk: data-disk.vdi" + VBoxManage convertfromraw $dataDiskImage data-disk.vdi ''} echo "creating VirtualBox VM..." @@ -209,10 +209,10 @@ in { ${lib.cli.toGNUCommandLineShell { } cfg.params} VBoxManage storagectl "$vmName" ${lib.cli.toGNUCommandLineShell { } cfg.storageController} VBoxManage storageattach "$vmName" --storagectl ${cfg.storageController.name} --port 0 --device 0 --type hdd \ - --medium disk.vmdk + --medium disk.vdi ${optionalString (cfg.extraDisk != null) '' VBoxManage storageattach "$vmName" --storagectl ${cfg.storageController.name} --port 1 --device 0 --type hdd \ - --medium data-disk.vmdk + --medium data-disk.vdi ''} echo "exporting VirtualBox VM..." From 3f7908a1ecd1344d43654e9dc72cc6c671d5857c Mon Sep 17 00:00:00 2001 From: Fabian Affolter Date: Sat, 4 Mar 2023 11:43:20 +0100 Subject: [PATCH 118/154] python310Packages.pykeyatome: 2.1.1 -> 2.1.2 --- pkgs/development/python-modules/pykeyatome/default.nix | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pkgs/development/python-modules/pykeyatome/default.nix b/pkgs/development/python-modules/pykeyatome/default.nix index 0f6786c4c4cc..363b078cad32 100644 --- a/pkgs/development/python-modules/pykeyatome/default.nix +++ b/pkgs/development/python-modules/pykeyatome/default.nix @@ -13,7 +13,7 @@ buildPythonPackage rec { pname = "pykeyatome"; - version = "2.1.1"; + version = "2.1.2"; format = "setuptools"; disabled = pythonOlder "3.8"; @@ -22,7 +22,7 @@ buildPythonPackage rec { owner = "jugla"; repo = "pyKeyAtome"; rev = "refs/tags/V${version}"; - hash = "sha256-/HfWPrpW4NowFmdmU2teIiex1O03bHemnUdhOoEDRgc="; + hash = "sha256-zRXUjekawf2/zTSlXqHVB02dDkb6HbU4NN6UBgl2rtg="; }; propagatedBuildInputs = [ From 3b7a3384de8c9900b4814e2604e5357f08b92deb Mon Sep 17 00:00:00 2001 From: Fabian Affolter Date: Sat, 4 Mar 2023 12:06:07 +0100 Subject: [PATCH 119/154] python310Packages.textnets: add changelog to meta --- pkgs/development/python-modules/textnets/default.nix | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/pkgs/development/python-modules/textnets/default.nix b/pkgs/development/python-modules/textnets/default.nix index 2de03cdec9d1..da3da57d35d9 100644 --- a/pkgs/development/python-modules/textnets/default.nix +++ b/pkgs/development/python-modules/textnets/default.nix @@ -28,7 +28,7 @@ buildPythonPackage rec { src = fetchFromGitHub { owner = "jboynyc"; repo = pname; - rev = "v${version}"; + rev = "refs/tags/v${version}"; hash = "sha256-BBndY+3leJBxiImuyRL7gMD5eocE4i96+97I9hDEwec="; }; @@ -61,11 +61,14 @@ buildPythonPackage rec { en_core_web_sm ]; - pythonImportsCheck = [ pname ]; + pythonImportsCheck = [ + "textnets" + ]; meta = with lib; { description = "Text analysis with networks"; homepage = "https://textnets.readthedocs.io"; + changelog = "https://github.com/jboynyc/textnets/blob/v${version}/HISTORY.rst"; license = licenses.gpl3Only; maintainers = with maintainers; [ jboy ]; }; From 7c922af83317ae44a557bf5a0028dcc5718b3a41 Mon Sep 17 00:00:00 2001 From: Fabian Affolter Date: Sat, 4 Mar 2023 12:07:58 +0100 Subject: [PATCH 120/154] python310Packages.crate: add changelog to meta --- pkgs/development/python-modules/crate/default.nix | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/pkgs/development/python-modules/crate/default.nix b/pkgs/development/python-modules/crate/default.nix index 198c0662351b..f144a9c51fc5 100644 --- a/pkgs/development/python-modules/crate/default.nix +++ b/pkgs/development/python-modules/crate/default.nix @@ -17,7 +17,7 @@ buildPythonPackage rec { src = fetchPypi { inherit pname version; - sha256 = "sha256-8xraDCFZbpJZsh3sO5VlSHwnEfH4u4AJZkXA+L4TB60="; + hash = "sha256-8xraDCFZbpJZsh3sO5VlSHwnEfH4u4AJZkXA+L4TB60="; }; propagatedBuildInputs = [ @@ -51,6 +51,7 @@ buildPythonPackage rec { meta = with lib; { homepage = "https://github.com/crate/crate-python"; description = "A Python client library for CrateDB"; + changelog = "https://github.com/crate/crate-python/blob/${version}/CHANGES.txt"; license = licenses.asl20; maintainers = with maintainers; [ doronbehar ]; }; From e73c8f79e8e8357cbc6bbe6028ec7de845b76a2e Mon Sep 17 00:00:00 2001 From: Fabian Affolter Date: Sat, 4 Mar 2023 12:09:32 +0100 Subject: [PATCH 121/154] python310Packages.crate: disable on unsupported Python releases --- pkgs/development/python-modules/crate/default.nix | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/pkgs/development/python-modules/crate/default.nix b/pkgs/development/python-modules/crate/default.nix index f144a9c51fc5..4da552db399b 100644 --- a/pkgs/development/python-modules/crate/default.nix +++ b/pkgs/development/python-modules/crate/default.nix @@ -3,7 +3,7 @@ , buildPythonPackage , urllib3 , geojson -, isPy3k +, pythonOlder , sqlalchemy , pytestCheckHook , pytz @@ -13,7 +13,9 @@ buildPythonPackage rec { pname = "crate"; version = "0.30.0"; - disabled = !isPy3k; + format = "setuptools"; + + disabled = pythonOlder "3.7"; src = fetchPypi { inherit pname version; From 1596cae99e07644e3fad4ffafff9aafea06006fa Mon Sep 17 00:00:00 2001 From: "R. Ryantm" Date: Sat, 4 Mar 2023 12:02:43 +0000 Subject: [PATCH 122/154] plexRaw: 1.31.0.6654-02189b09f -> 1.31.1.6733-bc0674160 --- pkgs/servers/plex/raw.nix | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/pkgs/servers/plex/raw.nix b/pkgs/servers/plex/raw.nix index abc89d8e8129..5a0368d36f7d 100644 --- a/pkgs/servers/plex/raw.nix +++ b/pkgs/servers/plex/raw.nix @@ -12,16 +12,16 @@ # server, and the FHS userenv and corresponding NixOS module should # automatically pick up the changes. stdenv.mkDerivation rec { - version = "1.31.0.6654-02189b09f"; + version = "1.31.1.6733-bc0674160"; pname = "plexmediaserver"; # Fetch the source src = if stdenv.hostPlatform.system == "aarch64-linux" then fetchurl { url = "https://downloads.plex.tv/plex-media-server-new/${version}/debian/plexmediaserver_${version}_arm64.deb"; - sha256 = "sha256-ttkvYD+ALxfZpQutI1VyTbmQi/7hmvZ+YMUv3lskeWU="; + sha256 = "0nj9n250lhin58xlqvn2l0pjxdbajj0bla2wrgan8gs2m45nk3q9"; } else fetchurl { url = "https://downloads.plex.tv/plex-media-server-new/${version}/debian/plexmediaserver_${version}_amd64.deb"; - sha256 = "sha256-TTEcyIBFiuJTNHeJ9wu+4o2ol72oCvM9FdDPC83J3Mc="; + sha256 = "0a5h151gh1ja3frqzaqw3pj1kyh5p0wgnfmmxiz0q3zx1drjs611"; }; outputs = [ "out" "basedb" ]; From 19cba159e32c1652a98b5aaed4ab56c5df5f171c Mon Sep 17 00:00:00 2001 From: Fabian Affolter Date: Sat, 4 Mar 2023 13:05:46 +0100 Subject: [PATCH 123/154] python310Packages.snscrape: 0.4.3.20220106 -> 0.6.0.20230303 Diff: https://github.com/JustAnotherArchivist/snscrape/compare/refs/tags/v0.4.3.20220106...v0.6.0.20230303 --- pkgs/development/python-modules/snscrape/default.nix | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/pkgs/development/python-modules/snscrape/default.nix b/pkgs/development/python-modules/snscrape/default.nix index 379159180a06..5d87fb52b2ab 100644 --- a/pkgs/development/python-modules/snscrape/default.nix +++ b/pkgs/development/python-modules/snscrape/default.nix @@ -12,16 +12,16 @@ buildPythonPackage rec { pname = "snscrape"; - version = "0.4.3.20220106"; - format = "setuptools"; + version = "0.6.0.20230303"; + format = "pyproject"; disabled = pythonOlder "3.8"; src = fetchFromGitHub { owner = "JustAnotherArchivist"; repo = pname; - rev = "v${version}"; - hash = "sha256-gphNT1IYSiAw22sqHlV8Rm4WRP4EWUvP0UkITuepmMc="; + rev = "refs/tags/v${version}"; + hash = "sha256-FY8byS+0yAhNSRxWsrsQMR5kdZmnHutru5Z6SWVfpiE="; }; SETUPTOOLS_SCM_PRETEND_VERSION = version; From a342ebe166294285c0f0280409139b1421335e47 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Na=C3=AFm=20Favier?= Date: Sat, 4 Mar 2023 13:20:49 +0100 Subject: [PATCH 124/154] lib/strings: hide asciiTable Since it's an attribute set, the lib function location generating code tries to generate locations for each of the characters... --- lib/strings.nix | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/lib/strings.nix b/lib/strings.nix index e49ed4382240..3c3529c3285e 100644 --- a/lib/strings.nix +++ b/lib/strings.nix @@ -4,6 +4,8 @@ let inherit (builtins) length; +asciiTable = import ./ascii-table.nix; + in rec { @@ -34,8 +36,6 @@ rec { unsafeDiscardStringContext ; - asciiTable = import ./ascii-table.nix; - /* Concatenate a list of strings. Type: concatStrings :: [string] -> string From 568c84c7dbe738700a261965e886d49646e0d167 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Na=C3=AFm=20Favier?= Date: Sat, 4 Mar 2023 13:23:26 +0100 Subject: [PATCH 125/154] .github: build nixpkgs manual on `lib` changes The nixpkgs manual includes documentation for `lib` functions. --- .github/workflows/manual-nixpkgs.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/manual-nixpkgs.yml b/.github/workflows/manual-nixpkgs.yml index 599840006524..4f76a0d732c8 100644 --- a/.github/workflows/manual-nixpkgs.yml +++ b/.github/workflows/manual-nixpkgs.yml @@ -8,6 +8,7 @@ on: - master paths: - 'doc/**' + - 'lib/**' jobs: nixpkgs: From 2f8164310a51f2fa22d3667a0d6ec2ec0fa3ab4e Mon Sep 17 00:00:00 2001 From: Fabian Affolter Date: Sat, 4 Mar 2023 13:32:34 +0100 Subject: [PATCH 126/154] grype: 0.58.0 -> 0.59.0 Diff: https://github.com/anchore/grype.git/compare/v0.58.0...v0.59.0 Changelog: https://github.com/anchore/grype/releases/tag/v0.59.0 --- pkgs/tools/security/grype/default.nix | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/pkgs/tools/security/grype/default.nix b/pkgs/tools/security/grype/default.nix index 926a48bfb0cb..2585b7b4746f 100644 --- a/pkgs/tools/security/grype/default.nix +++ b/pkgs/tools/security/grype/default.nix @@ -8,13 +8,13 @@ buildGoModule rec { pname = "grype"; - version = "0.58.0"; + version = "0.59.0"; src = fetchFromGitHub { owner = "anchore"; repo = pname; rev = "v${version}"; - hash = "sha256-7yb6ufRoAB70hnoBv6ZwEtKeTJxxmWEknksCmM55eYE="; + hash = "sha256-TAoF67Fxl0OUiQd48h786+lIsdEuk4C/zdeEO/DRX/k="; # populate values that require us to use git. By doing this in postFetch we # can delete .git afterwards and maintain better reproducibility of the src. leaveDotGit = true; @@ -28,7 +28,7 @@ buildGoModule rec { }; proxyVendor = true; - vendorHash = "sha256-7i9/tufEUGVqNHP61pQuIK2tMdiBcs3vfFz1bzlHFKk="; + vendorHash = "sha256-kRxKa3HUO2yvMai03voVvsprg/Kd01OtJQHJn3ECk58="; nativeBuildInputs = [ installShellFiles From 3eaa74f21e9aec13892484f7073fbea76059bd4b Mon Sep 17 00:00:00 2001 From: Fabian Affolter Date: Sat, 4 Mar 2023 14:03:57 +0100 Subject: [PATCH 127/154] grype: clean-up --- pkgs/tools/security/grype/default.nix | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/pkgs/tools/security/grype/default.nix b/pkgs/tools/security/grype/default.nix index 2585b7b4746f..8334b034841d 100644 --- a/pkgs/tools/security/grype/default.nix +++ b/pkgs/tools/security/grype/default.nix @@ -2,7 +2,6 @@ , buildGoModule , fetchFromGitHub , installShellFiles - , openssl }: @@ -13,7 +12,7 @@ buildGoModule rec { src = fetchFromGitHub { owner = "anchore"; repo = pname; - rev = "v${version}"; + rev = "refs/tags/v${version}"; hash = "sha256-TAoF67Fxl0OUiQd48h786+lIsdEuk4C/zdeEO/DRX/k="; # populate values that require us to use git. By doing this in postFetch we # can delete .git afterwards and maintain better reproducibility of the src. @@ -26,6 +25,7 @@ buildGoModule rec { find "$out" -name .git -print0 | xargs -0 rm -rf ''; }; + proxyVendor = true; vendorHash = "sha256-kRxKa3HUO2yvMai03voVvsprg/Kd01OtJQHJn3ECk58="; @@ -34,6 +34,10 @@ buildGoModule rec { installShellFiles ]; + nativeCheckInputs = [ + openssl + ]; + subPackages = [ "." ]; excludedPackages = "test/integration"; @@ -55,7 +59,6 @@ buildGoModule rec { ldflags+=" -X github.com/anchore/grype/internal/version.buildDate=$(cat SOURCE_DATE_EPOCH)" ''; - nativeCheckInputs = [ openssl ]; preCheck = '' # test all dirs (except excluded) unset subPackages From 823f21c455b60262684cd66a7cf329c472f31e22 Mon Sep 17 00:00:00 2001 From: Fabian Affolter Date: Sat, 4 Mar 2023 14:25:47 +0100 Subject: [PATCH 128/154] python310Packages.adb-enhanced: 2.5.14 -> 2.5.16 Diff: https://github.com/ashishb/adb-enhanced/compare/2.5.14...2.5.16 --- pkgs/development/python-modules/adb-enhanced/default.nix | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pkgs/development/python-modules/adb-enhanced/default.nix b/pkgs/development/python-modules/adb-enhanced/default.nix index 79fc34cc0ecb..12a1f9f59fed 100644 --- a/pkgs/development/python-modules/adb-enhanced/default.nix +++ b/pkgs/development/python-modules/adb-enhanced/default.nix @@ -9,7 +9,7 @@ buildPythonPackage rec { pname = "adb-enhanced"; - version = "2.5.14"; + version = "2.5.16"; disabled = pythonOlder "3.4"; @@ -17,7 +17,7 @@ buildPythonPackage rec { owner = "ashishb"; repo = pname; rev = version; - sha256 = "sha256-GaPOYBQEGI40MutjjY8exABqGge2p/buk9v+NcZ5oJs="; + sha256 = "sha256-+CMXKg3LLxEXGcFQ9zSqy/1HPZS9MsQ1fZxClJ0Vrnw="; }; propagatedBuildInputs = [ From 7da5a7c203edbbc41923d82dafb7e691cc42d5e3 Mon Sep 17 00:00:00 2001 From: Fabian Affolter Date: Sat, 4 Mar 2023 14:30:25 +0100 Subject: [PATCH 129/154] python310Packages.ciscoconfparse: 1.7.15 -> 1.7.18 Diff: https://github.com/mpenning/ciscoconfparse/compare/refs/tags/1.7.15...1.7.18 Changelog: https://github.com/mpenning/ciscoconfparse/blob/1.7.18/CHANGES.md --- pkgs/development/python-modules/ciscoconfparse/default.nix | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pkgs/development/python-modules/ciscoconfparse/default.nix b/pkgs/development/python-modules/ciscoconfparse/default.nix index 52cd1a8cf28a..803cfb7b6fa0 100644 --- a/pkgs/development/python-modules/ciscoconfparse/default.nix +++ b/pkgs/development/python-modules/ciscoconfparse/default.nix @@ -13,7 +13,7 @@ buildPythonPackage rec { pname = "ciscoconfparse"; - version = "1.7.15"; + version = "1.7.18"; format = "pyproject"; disabled = pythonOlder "3.7"; @@ -22,7 +22,7 @@ buildPythonPackage rec { owner = "mpenning"; repo = pname; rev = "refs/tags/${version}"; - hash = "sha256-oGvwtaIgVvvW8Oq/dZN+Zj/PESpqWALFYPia9yeilco="; + hash = "sha256-jWInSqvMuwYJTPqHnrYWhMH/HvaQc2dFRqQu4RGFr28="; }; postPatch = '' From b3209f6dbc9b6431b8d14044b5a03a0c66f3108c Mon Sep 17 00:00:00 2001 From: "R. Ryantm" Date: Sat, 4 Mar 2023 14:10:39 +0000 Subject: [PATCH 130/154] git-machete: 3.15.2 -> 3.16.0 --- pkgs/applications/version-management/git-machete/default.nix | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pkgs/applications/version-management/git-machete/default.nix b/pkgs/applications/version-management/git-machete/default.nix index 885f6d1355e9..455b7497aa96 100644 --- a/pkgs/applications/version-management/git-machete/default.nix +++ b/pkgs/applications/version-management/git-machete/default.nix @@ -12,13 +12,13 @@ buildPythonApplication rec { pname = "git-machete"; - version = "3.15.2"; + version = "3.16.0"; src = fetchFromGitHub { owner = "virtuslab"; repo = pname; rev = "v${version}"; - hash = "sha256-hIm3JDLXUTwjuVfAHvZBWFBJNOAVWyfl/X4A6B0OoXg="; + hash = "sha256-94qYCyWqVwMMptlJIe4o4/mEHnhcMubcupd+Qs2SYH0="; }; nativeBuildInputs = [ installShellFiles ]; From 754f04afd41a11a83a7c6a5641534e9d8454eee3 Mon Sep 17 00:00:00 2001 From: Fabian Affolter Date: Sat, 4 Mar 2023 15:47:03 +0100 Subject: [PATCH 131/154] python310Packages.adb-enhanced: update disabled --- pkgs/development/python-modules/adb-enhanced/default.nix | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/pkgs/development/python-modules/adb-enhanced/default.nix b/pkgs/development/python-modules/adb-enhanced/default.nix index 12a1f9f59fed..7f6c2d109801 100644 --- a/pkgs/development/python-modules/adb-enhanced/default.nix +++ b/pkgs/development/python-modules/adb-enhanced/default.nix @@ -10,14 +10,15 @@ buildPythonPackage rec { pname = "adb-enhanced"; version = "2.5.16"; + format = "setuptools"; - disabled = pythonOlder "3.4"; + disabled = pythonOlder "3.7"; src = fetchFromGitHub { owner = "ashishb"; repo = pname; - rev = version; - sha256 = "sha256-+CMXKg3LLxEXGcFQ9zSqy/1HPZS9MsQ1fZxClJ0Vrnw="; + rev = "refs/tags/${version}"; + hash = "sha256-+CMXKg3LLxEXGcFQ9zSqy/1HPZS9MsQ1fZxClJ0Vrnw="; }; propagatedBuildInputs = [ From 3239d056982878a82e503a9b48eb33b93056ae1c Mon Sep 17 00:00:00 2001 From: Fabian Affolter Date: Sat, 4 Mar 2023 16:03:47 +0100 Subject: [PATCH 132/154] dt-schema: move to python-modules --- .../{tools/dt-schema => python-modules/dtschema}/default.nix | 0 pkgs/top-level/all-packages.nix | 2 +- pkgs/top-level/python-packages.nix | 2 ++ 3 files changed, 3 insertions(+), 1 deletion(-) rename pkgs/development/{tools/dt-schema => python-modules/dtschema}/default.nix (100%) diff --git a/pkgs/development/tools/dt-schema/default.nix b/pkgs/development/python-modules/dtschema/default.nix similarity index 100% rename from pkgs/development/tools/dt-schema/default.nix rename to pkgs/development/python-modules/dtschema/default.nix diff --git a/pkgs/top-level/all-packages.nix b/pkgs/top-level/all-packages.nix index 8086a44fe793..8389b8f0249c 100644 --- a/pkgs/top-level/all-packages.nix +++ b/pkgs/top-level/all-packages.nix @@ -6761,7 +6761,7 @@ with pkgs; dtc = callPackage ../development/compilers/dtc { }; - dt-schema = python3Packages.callPackage ../development/tools/dt-schema { }; + dt-schema = with python3Packages; toPythonApplication dtschema; dub = callPackage ../development/tools/build-managers/dub { }; diff --git a/pkgs/top-level/python-packages.nix b/pkgs/top-level/python-packages.nix index 1ac6c38143bb..c53230e62fc5 100644 --- a/pkgs/top-level/python-packages.nix +++ b/pkgs/top-level/python-packages.nix @@ -2898,6 +2898,8 @@ self: super: with self; { dtlssocket = callPackage ../development/python-modules/dtlssocket { }; + dtschema = callPackage ../development/python-modules/dtschema { }; + ducc0 = callPackage ../development/python-modules/ducc0 { }; duckdb = callPackage ../development/python-modules/duckdb { From 76f91af215e34f494758a6ccf5d7bb2d4c1716be Mon Sep 17 00:00:00 2001 From: Fabian Affolter Date: Sat, 4 Mar 2023 16:05:27 +0100 Subject: [PATCH 133/154] python310Packages.dtschema: update license --- pkgs/development/python-modules/dtschema/default.nix | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/pkgs/development/python-modules/dtschema/default.nix b/pkgs/development/python-modules/dtschema/default.nix index 20e1ed136e2b..e76f98c08abd 100644 --- a/pkgs/development/python-modules/dtschema/default.nix +++ b/pkgs/development/python-modules/dtschema/default.nix @@ -29,8 +29,7 @@ buildPythonPackage rec { meta = with lib; { description = "Tooling for devicetree validation using YAML and jsonschema"; homepage = "https://github.com/devicetree-org/dt-schema/"; - # all files have SPDX tags - license = with licenses; [ bsd2 gpl2 ]; + license = with licenses; [ bsd2 /* or */ gpl2Only ]; maintainers = with maintainers; [ sorki ]; }; } From 7f7b1782842ca88c75a0ea3e216ed239c1f3f8ee Mon Sep 17 00:00:00 2001 From: Fabian Affolter Date: Sat, 4 Mar 2023 16:06:03 +0100 Subject: [PATCH 134/154] python310Packages.dtschema: disable on unsupported Python releases --- pkgs/development/python-modules/dtschema/default.nix | 3 +++ 1 file changed, 3 insertions(+) diff --git a/pkgs/development/python-modules/dtschema/default.nix b/pkgs/development/python-modules/dtschema/default.nix index e76f98c08abd..c8a044a54f5c 100644 --- a/pkgs/development/python-modules/dtschema/default.nix +++ b/pkgs/development/python-modules/dtschema/default.nix @@ -12,6 +12,9 @@ buildPythonPackage rec { pname = "dtschema"; version = "2022.1"; + format = "setuptools"; + + disabled = pythonOlder "3.7"; src = fetchPypi { inherit pname version; From b1c19e1bf53f848eab0c2192281726cca78a497c Mon Sep 17 00:00:00 2001 From: Fabian Affolter Date: Sat, 4 Mar 2023 16:06:39 +0100 Subject: [PATCH 135/154] python310Packages.dtschema: add changelog to meta --- pkgs/development/python-modules/dtschema/default.nix | 1 + 1 file changed, 1 insertion(+) diff --git a/pkgs/development/python-modules/dtschema/default.nix b/pkgs/development/python-modules/dtschema/default.nix index c8a044a54f5c..377c315d9d92 100644 --- a/pkgs/development/python-modules/dtschema/default.nix +++ b/pkgs/development/python-modules/dtschema/default.nix @@ -32,6 +32,7 @@ buildPythonPackage rec { meta = with lib; { description = "Tooling for devicetree validation using YAML and jsonschema"; homepage = "https://github.com/devicetree-org/dt-schema/"; + changelog = "https://github.com/devicetree-org/dt-schema/releases/tag/v${version}"; license = with licenses; [ bsd2 /* or */ gpl2Only ]; maintainers = with maintainers; [ sorki ]; }; From 78409f3c62fa8cbe4b8bb70859f832972201751f Mon Sep 17 00:00:00 2001 From: Fabian Affolter Date: Sat, 4 Mar 2023 16:07:20 +0100 Subject: [PATCH 136/154] python310Packages.dtschema: add pythonImportsCheck --- pkgs/development/python-modules/dtschema/default.nix | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/pkgs/development/python-modules/dtschema/default.nix b/pkgs/development/python-modules/dtschema/default.nix index 377c315d9d92..cb043872a82c 100644 --- a/pkgs/development/python-modules/dtschema/default.nix +++ b/pkgs/development/python-modules/dtschema/default.nix @@ -29,6 +29,10 @@ buildPythonPackage rec { rfc3987 ]; + pythonImportsCheck = [ + "dtschema" + ]; + meta = with lib; { description = "Tooling for devicetree validation using YAML and jsonschema"; homepage = "https://github.com/devicetree-org/dt-schema/"; From 42a0ba411dbca26c636887e58e6359184f31f47f Mon Sep 17 00:00:00 2001 From: Fabian Affolter Date: Sat, 4 Mar 2023 16:24:16 +0100 Subject: [PATCH 137/154] aiodnsbrute: add changelog to meta --- pkgs/tools/security/aiodnsbrute/default.nix | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/pkgs/tools/security/aiodnsbrute/default.nix b/pkgs/tools/security/aiodnsbrute/default.nix index f1d170e7d599..83fe6a00ed25 100644 --- a/pkgs/tools/security/aiodnsbrute/default.nix +++ b/pkgs/tools/security/aiodnsbrute/default.nix @@ -14,8 +14,8 @@ buildPythonApplication rec { src = fetchFromGitHub { owner = "blark"; repo = pname; - rev = "v${version}"; - sha256 = "sha256-cEpk71VoQJZfKeAZummkk7yjtXKSMndgo0VleYiMlWE="; + rev = "refs/tags/v${version}"; + hash = "sha256-cEpk71VoQJZfKeAZummkk7yjtXKSMndgo0VleYiMlWE="; }; # https://github.com/blark/aiodnsbrute/pull/8 @@ -33,12 +33,14 @@ buildPythonApplication rec { # no tests present doCheck = false; - pythonImportsCheck = [ "aiodnsbrute.cli" ]; + pythonImportsCheck = [ + "aiodnsbrute.cli" + ]; meta = with lib; { description = "DNS brute force utility"; homepage = "https://github.com/blark/aiodnsbrute"; - # https://github.com/blark/aiodnsbrute/issues/5 + changelog = "https://github.com/blark/aiodnsbrute/releases/tag/v${version}"; license = with licenses; [ gpl3Only ]; maintainers = with maintainers; [ fab ]; }; From 8fc7ae71cef66ec6d24778c6cda34317fbfcb6e1 Mon Sep 17 00:00:00 2001 From: Fabian Affolter Date: Sat, 4 Mar 2023 16:29:32 +0100 Subject: [PATCH 138/154] aiodnsbrute: use python3.pkgs - remove postPatch section --- pkgs/tools/security/aiodnsbrute/default.nix | 26 +++++++-------------- pkgs/top-level/all-packages.nix | 2 +- 2 files changed, 10 insertions(+), 18 deletions(-) diff --git a/pkgs/tools/security/aiodnsbrute/default.nix b/pkgs/tools/security/aiodnsbrute/default.nix index 83fe6a00ed25..c11255e6ab80 100644 --- a/pkgs/tools/security/aiodnsbrute/default.nix +++ b/pkgs/tools/security/aiodnsbrute/default.nix @@ -1,15 +1,12 @@ { lib -, buildPythonApplication , fetchFromGitHub -, aiodns -, click -, tqdm -, uvloop +, python3 }: -buildPythonApplication rec { +python3.pkgs.buildPythonApplication rec { pname = "aiodnsbrute"; version = "0.3.3"; + format = "setuptools"; src = fetchFromGitHub { owner = "blark"; @@ -18,19 +15,14 @@ buildPythonApplication rec { hash = "sha256-cEpk71VoQJZfKeAZummkk7yjtXKSMndgo0VleYiMlWE="; }; - # https://github.com/blark/aiodnsbrute/pull/8 - prePatch = '' - substituteInPlace setup.py --replace " 'asyncio', " "" - ''; - - propagatedBuildInputs = [ - aiodns - click - tqdm - uvloop + propagatedBuildInputs = with python3.pkgs; [ + aiodns + click + tqdm + uvloop ]; - # no tests present + # Project no tests doCheck = false; pythonImportsCheck = [ diff --git a/pkgs/top-level/all-packages.nix b/pkgs/top-level/all-packages.nix index 8086a44fe793..6924b66a6cca 100644 --- a/pkgs/top-level/all-packages.nix +++ b/pkgs/top-level/all-packages.nix @@ -1362,7 +1362,7 @@ with pkgs; aioblescan = with python3Packages; toPythonApplication aioblescan; - aiodnsbrute = python3Packages.callPackage ../tools/security/aiodnsbrute { }; + aiodnsbrute = callPackage ../tools/security/aiodnsbrute { }; aircrack-ng = callPackage ../tools/networking/aircrack-ng { }; From 046c0a144709a70d0dba1c4e455f18250c4b4342 Mon Sep 17 00:00:00 2001 From: Fabian Affolter Date: Sat, 4 Mar 2023 16:48:28 +0100 Subject: [PATCH 139/154] python310Packages.dtschema: modernize --- .../python-modules/dtschema/default.nix | 30 ++++++++++++------- 1 file changed, 19 insertions(+), 11 deletions(-) diff --git a/pkgs/development/python-modules/dtschema/default.nix b/pkgs/development/python-modules/dtschema/default.nix index cb043872a82c..f2212d8c9c78 100644 --- a/pkgs/development/python-modules/dtschema/default.nix +++ b/pkgs/development/python-modules/dtschema/default.nix @@ -1,34 +1,42 @@ { lib , buildPythonPackage -, fetchPypi -, git -, ruamel-yaml +, fetchFromGitHub , jsonschema +, pythonOlder , rfc3987 -, setuptools +, ruamel-yaml , setuptools-scm }: buildPythonPackage rec { pname = "dtschema"; - version = "2022.1"; + version = "2022.01"; format = "setuptools"; disabled = pythonOlder "3.7"; - src = fetchPypi { - inherit pname version; - sha256 = "sha256-G5KzuaMbbkuLK+cNvzBld1UwvExS6ZGVW2e+GXQRFMU="; + src = fetchFromGitHub { + owner = "devicetree-org"; + repo = "dt-schema"; + rev = "refs/tags/v${version}"; + hash = "sha256-wwlXIM/eO3dII/qQpkAGLT3/15rBLi7ZiNtqYFf7Li4="; }; - nativeBuildInputs = [ setuptools-scm git ]; + SETUPTOOLS_SCM_PRETEND_VERSION = version; + + nativeBuildInputs = [ + setuptools-scm + ]; + propagatedBuildInputs = [ - setuptools - ruamel-yaml jsonschema rfc3987 + ruamel-yaml ]; + # Module has no tests + doCheck = false; + pythonImportsCheck = [ "dtschema" ]; From 946be78ef3c057166e36dbec61eaf9f305b74387 Mon Sep 17 00:00:00 2001 From: "R. Ryantm" Date: Sat, 4 Mar 2023 16:48:40 +0000 Subject: [PATCH 140/154] nixpacks: 1.4.0 -> 1.4.1 --- pkgs/applications/virtualization/nixpacks/default.nix | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/pkgs/applications/virtualization/nixpacks/default.nix b/pkgs/applications/virtualization/nixpacks/default.nix index c8072c903d26..695cd970b7bb 100644 --- a/pkgs/applications/virtualization/nixpacks/default.nix +++ b/pkgs/applications/virtualization/nixpacks/default.nix @@ -2,16 +2,16 @@ rustPlatform.buildRustPackage rec { pname = "nixpacks"; - version = "1.4.0"; + version = "1.4.1"; src = fetchFromGitHub { owner = "railwayapp"; repo = pname; rev = "v${version}"; - sha256 = "sha256-v9ycluLfkrPDzjsMXtv7w9UHgMaGzTsJw4lT/KfRAu4="; + sha256 = "sha256-zxgNHzKXekZnk0OsHw30u4L9U2mIT/MryZuAQ2EBEYg="; }; - cargoHash = "sha256-wVQEa1qS+JF6PHKvRrRFbSvj2qp6j14ErOQPkxP0uuA="; + cargoHash = "sha256-tsGyrU/5yp5PJ2d5HUoaw/jhGgYyDt6qBK+DvC79kmY="; # skip test due FHS dependency doCheck = false; From ce5093b7872009c27070520e48f460a2e16e6e6f Mon Sep 17 00:00:00 2001 From: Robert Scott Date: Sun, 25 Sep 2022 21:40:28 +0100 Subject: [PATCH 141/154] bind: enable unit tests --- pkgs/servers/dns/bind/default.nix | 13 ++++++++++++- 1 file changed, 12 insertions(+), 1 deletion(-) diff --git a/pkgs/servers/dns/bind/default.nix b/pkgs/servers/dns/bind/default.nix index 2047436d8ce6..952ef5690201 100644 --- a/pkgs/servers/dns/bind/default.nix +++ b/pkgs/servers/dns/bind/default.nix @@ -4,6 +4,7 @@ , enablePython ? false, python3 , enableGSSAPI ? true, libkrb5 , buildPackages, nixosTests +, cmocka, tzdata }: stdenv.mkDerivation rec { @@ -59,8 +60,18 @@ stdenv.mkDerivation rec { EOF ''; - doCheck = false; # requires root and the net enableParallelBuilding = true; + doCheck = !stdenv.hostPlatform.isStatic; + checkTarget = "unit"; + checkInputs = [ + cmocka + ] ++ lib.optionals (!stdenv.hostPlatform.isMusl) [ + tzdata + ]; + preCheck = lib.optionalString stdenv.hostPlatform.isMusl '' + # musl doesn't respect TZDIR, skip timezone-related tests + sed -i '/^ISC_TEST_ENTRY(isc_time_formatISO8601L/d' tests/isc/time_test.c + ''; passthru.tests = { inherit (nixosTests) bind; From 5552d9e3b0c67d96bee1f81b59847d1670703fb6 Mon Sep 17 00:00:00 2001 From: figsoda Date: Sat, 4 Mar 2023 12:06:42 -0500 Subject: [PATCH 142/154] nix-update: 0.15.0 -> 0.15.1 Diff: https://github.com/Mic92/nix-update/compare/0.15.0...0.15.1 Changelog: https://github.com/Mic92/nix-update/releases/tag/0.15.1 --- pkgs/tools/package-management/nix-update/default.nix | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pkgs/tools/package-management/nix-update/default.nix b/pkgs/tools/package-management/nix-update/default.nix index a24fd43bc6fa..8b8e6d811d53 100644 --- a/pkgs/tools/package-management/nix-update/default.nix +++ b/pkgs/tools/package-management/nix-update/default.nix @@ -8,14 +8,14 @@ buildPythonApplication rec { pname = "nix-update"; - version = "0.15.0"; + version = "0.15.1"; format = "setuptools"; src = fetchFromGitHub { owner = "Mic92"; repo = pname; rev = version; - sha256 = "sha256-Q3yExefODBrrziRnCYETrJgSn42BOR7ZsL8pu3q5D/w="; + sha256 = "sha256-AYw2czg8HwA/ATQZO0snfb5GRsz77J6cPGDQ8b4W6AI="; }; makeWrapperArgs = [ From aa571e2200aab0ccb3e348f99e465141250cf644 Mon Sep 17 00:00:00 2001 From: "R. Ryantm" Date: Sat, 4 Mar 2023 17:55:21 +0000 Subject: [PATCH 143/154] python310Packages.duckdb-engine: 0.6.8 -> 0.6.9 --- pkgs/development/python-modules/duckdb-engine/default.nix | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pkgs/development/python-modules/duckdb-engine/default.nix b/pkgs/development/python-modules/duckdb-engine/default.nix index ea812cb9d5ea..9ab39eabf908 100644 --- a/pkgs/development/python-modules/duckdb-engine/default.nix +++ b/pkgs/development/python-modules/duckdb-engine/default.nix @@ -13,7 +13,7 @@ buildPythonPackage rec { pname = "duckdb-engine"; - version = "0.6.8"; + version = "0.6.9"; format = "pyproject"; disabled = pythonOlder "3.7"; @@ -22,7 +22,7 @@ buildPythonPackage rec { repo = "duckdb_engine"; owner = "Mause"; rev = "refs/tags/v${version}"; - hash = "sha256-Vb2sXZjhBZpZdemtGZ8dajB9Ziu/obLv80R63IH/hJg="; + hash = "sha256-F1Y7NXkNnCbCxc43gBN7bt+z0D0EwnzCyBKFzbq9KcA="; }; nativeBuildInputs = [ From 152557184c48686554458a6ba23d8756b9ae584a Mon Sep 17 00:00:00 2001 From: "R. Ryantm" Date: Sat, 4 Mar 2023 18:59:01 +0000 Subject: [PATCH 144/154] rccl: 5.4.2 -> 5.4.3 --- pkgs/development/libraries/rccl/default.nix | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pkgs/development/libraries/rccl/default.nix b/pkgs/development/libraries/rccl/default.nix index d941cb19998e..b3aaaff82f08 100644 --- a/pkgs/development/libraries/rccl/default.nix +++ b/pkgs/development/libraries/rccl/default.nix @@ -13,7 +13,7 @@ stdenv.mkDerivation (finalAttrs: { pname = "rccl"; - version = "5.4.2"; + version = "5.4.3"; outputs = [ "out" From 419ff35b403af0340d5a14fa03ab364817370b08 Mon Sep 17 00:00:00 2001 From: Martin Weinelt Date: Sat, 4 Mar 2023 19:44:09 +0000 Subject: [PATCH 145/154] _5etools: Disable hydra builds The source tarball is too large for hydras output limit. https://hydra.nixos.org/build/210505879 --- pkgs/servers/web-apps/5etools/default.nix | 1 + 1 file changed, 1 insertion(+) diff --git a/pkgs/servers/web-apps/5etools/default.nix b/pkgs/servers/web-apps/5etools/default.nix index 03335be5ae84..856384aa173f 100644 --- a/pkgs/servers/web-apps/5etools/default.nix +++ b/pkgs/servers/web-apps/5etools/default.nix @@ -15,5 +15,6 @@ fetchFromGitHub rec { changelog = "https://github.com/5etools-mirror-1/5etools-mirror-1.github.io/releases/tag/v${version}"; license = [ licenses.mit ]; maintainers = with maintainers; [ urandom ]; + hydraPlatforms = []; # src tarball is 4.7G, unpackeed 4.8G, exceeds hydras output limit }; } From 3d20075cddf57e5b8c8ccff5a67d9b4206cd553c Mon Sep 17 00:00:00 2001 From: pennae Date: Sat, 4 Mar 2023 20:36:04 +0100 Subject: [PATCH 146/154] syncstorage-rs: 0.13.2 -> 0.13.5 --- pkgs/servers/syncstorage-rs/default.nix | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/pkgs/servers/syncstorage-rs/default.nix b/pkgs/servers/syncstorage-rs/default.nix index 65189094d0c7..a99db0d40d90 100644 --- a/pkgs/servers/syncstorage-rs/default.nix +++ b/pkgs/servers/syncstorage-rs/default.nix @@ -21,13 +21,13 @@ in rustPlatform.buildRustPackage rec { pname = "syncstorage-rs"; - version = "0.13.2"; + version = "0.13.5"; src = fetchFromGitHub { owner = "mozilla-services"; repo = pname; rev = version; - hash = "sha256-zxpqQpzmPPU6V5QITK9SgAAI7l3/7+h0u3/bZgiU7y4="; + hash = "sha256-eFrrZ/+8OsmIfCEoXPAKqVkZlgN8sfXueJQvQN8VCB0="; }; nativeBuildInputs = [ @@ -47,7 +47,7 @@ rustPlatform.buildRustPackage rec { --prefix PATH : ${lib.makeBinPath [ pyFxADeps ]} ''; - cargoHash = "sha256-U0xHqOh0ii4PE9UYKo+diqSoZ1ZjzBmHILvAhHSZD0A="; + cargoHash = "sha256-SgOxXzI6IZcP5Q06Aj5Pv6Rrvb7xVShUcGaViLuESOw="; buildFeatures = [ "grpcio/openssl" ]; From b7533cfe685a3d8a14612186a00f270a616b61ee Mon Sep 17 00:00:00 2001 From: "R. Ryantm" Date: Sat, 4 Mar 2023 20:01:02 +0000 Subject: [PATCH 147/154] rocprofiler: 5.4.2 -> 5.4.3 --- pkgs/development/libraries/rocprofiler/default.nix | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pkgs/development/libraries/rocprofiler/default.nix b/pkgs/development/libraries/rocprofiler/default.nix index 7dbde6e1633e..e7e0c9fed650 100644 --- a/pkgs/development/libraries/rocprofiler/default.nix +++ b/pkgs/development/libraries/rocprofiler/default.nix @@ -11,7 +11,7 @@ stdenv.mkDerivation (finalAttrs: { pname = "rocprofiler"; - version = "5.4.2"; + version = "5.4.3"; src = fetchFromGitHub { owner = "ROCm-Developer-Tools"; From 56b839cd96a0cbb391d3eb3cb5981273e42e3e34 Mon Sep 17 00:00:00 2001 From: Jonas Nick Date: Sat, 4 Mar 2023 20:09:57 +0000 Subject: [PATCH 148/154] clightning: 22.11.1 -> 23.02 --- pkgs/applications/blockchains/clightning/default.nix | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pkgs/applications/blockchains/clightning/default.nix b/pkgs/applications/blockchains/clightning/default.nix index 45854ddcd631..700a65f3a58d 100644 --- a/pkgs/applications/blockchains/clightning/default.nix +++ b/pkgs/applications/blockchains/clightning/default.nix @@ -22,11 +22,11 @@ let in stdenv.mkDerivation rec { pname = "clightning"; - version = "22.11.1"; + version = "23.02"; src = fetchurl { url = "https://github.com/ElementsProject/lightning/releases/download/v${version}/clightning-v${version}.zip"; - sha256 = "sha256-F48jmG9voNp6+IMRVkJi6O0DXVQxKyYkOA0UBCKktIw="; + sha256 = "sha256-uvk7sApIwlrkH8eERBetf/nsAkN2d35T/IEtICFflzY="; }; # when building on darwin we need dawin.cctools to provide the correct libtool From 85aed82e536bddd42d5dd5efa1aace9d8e64ddd9 Mon Sep 17 00:00:00 2001 From: Fabian Affolter Date: Sat, 4 Mar 2023 21:35:55 +0100 Subject: [PATCH 149/154] qovery-cli: 0.49.0 -> 0.50.3 Diff: https://github.com/Qovery/qovery-cli/compare/v0.49.0...v0.50.3 Changelog: https://github.com/Qovery/qovery-cli/releases/tag/v0.50.3 --- pkgs/tools/admin/qovery-cli/default.nix | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/pkgs/tools/admin/qovery-cli/default.nix b/pkgs/tools/admin/qovery-cli/default.nix index 3b35207e8499..fa31ac964ff9 100644 --- a/pkgs/tools/admin/qovery-cli/default.nix +++ b/pkgs/tools/admin/qovery-cli/default.nix @@ -8,16 +8,16 @@ buildGoModule rec { pname = "qovery-cli"; - version = "0.49.0"; + version = "0.50.3"; src = fetchFromGitHub { owner = "Qovery"; repo = pname; rev = "v${version}"; - hash = "sha256-O5JUWD7Wbe/5BM5fr6z76Re7PpRwFJV++lze+pv5el0="; + hash = "sha256-kvIY6BBkyV5TmpT8bhrn+OIP3/rbCy0EKxsFLIIFp8U="; }; - vendorHash = "sha256-Hb4bqOK4h68ZCN/bTPQLd4hC7oZUrj21DupVA4GrlNA="; + vendorHash = "sha256-595Z6/jt+d81QMIKcbg7Y5UMtF8hnZipiBkt1LQt2AI="; nativeBuildInputs = [ installShellFiles ]; From e11c196f848ec4ad1e5df815c0062f04f7468c6c Mon Sep 17 00:00:00 2001 From: Dmitry Kalinkin Date: Sat, 4 Mar 2023 15:45:31 -0500 Subject: [PATCH 150/154] python310Packages.dask-awkward: init at 2023.1.0 (#209527) --- .../python-modules/dask-awkward/default.nix | 60 +++++++++++++++++++ pkgs/top-level/python-packages.nix | 2 + 2 files changed, 62 insertions(+) create mode 100644 pkgs/development/python-modules/dask-awkward/default.nix diff --git a/pkgs/development/python-modules/dask-awkward/default.nix b/pkgs/development/python-modules/dask-awkward/default.nix new file mode 100644 index 000000000000..6dd42fcc3c0a --- /dev/null +++ b/pkgs/development/python-modules/dask-awkward/default.nix @@ -0,0 +1,60 @@ +{ lib +, buildPythonPackage +, fetchFromGitHub +, pythonOlder +, awkward +, dask +, hatch-vcs +, hatchling +, pyarrow +, pytestCheckHook +}: + +buildPythonPackage rec { + pname = "dask-awkward"; + version = "2023.1.0"; + format = "pyproject"; + + disabled = pythonOlder "3.8"; + + src = fetchFromGitHub { + owner = "dask-contrib"; + repo = pname; + rev = version; + hash = "sha256-q0mBd4yelnNL7rMWfilituo9h/xmLLLndSCBdY2egEQ="; + }; + + nativeBuildInputs = [ + hatch-vcs + hatchling + ]; + + propagatedBuildInputs = [ + awkward + dask + ]; + + SETUPTOOLS_SCM_PRETEND_VERSION = version; + + checkInputs = [ + pytestCheckHook + pyarrow + ]; + + pythonImportsCheck = [ + "dask_awkward" + ]; + + pytestFlagsArray = [ + # require internet + "--deselect=tests/test_parquet.py::test_remote_double" + "--deselect=tests/test_parquet.py::test_remote_single" + ]; + + meta = with lib; { + description = "Native Dask collection for awkward arrays, and the library to use it"; + homepage = "https://github.com/dask-contrib/dask-awkward"; + license = licenses.bsd3; + maintainers = with maintainers; [ veprbl ]; + }; +} diff --git a/pkgs/top-level/python-packages.nix b/pkgs/top-level/python-packages.nix index 1ac6c38143bb..032fa51adf4f 100644 --- a/pkgs/top-level/python-packages.nix +++ b/pkgs/top-level/python-packages.nix @@ -2233,6 +2233,8 @@ self: super: with self; { dask = callPackage ../development/python-modules/dask { }; + dask-awkward = callPackage ../development/python-modules/dask-awkward { }; + dask-gateway = callPackage ../development/python-modules/dask-gateway { }; dask-gateway-server = callPackage ../development/python-modules/dask-gateway-server { }; From 7b8f071f55380162248f5c1d02190502d779dca0 Mon Sep 17 00:00:00 2001 From: "R. Ryantm" Date: Sat, 4 Mar 2023 20:56:01 +0000 Subject: [PATCH 151/154] cloudfox: 1.9.1 -> 1.10.0 --- pkgs/tools/security/cloudfox/default.nix | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/pkgs/tools/security/cloudfox/default.nix b/pkgs/tools/security/cloudfox/default.nix index f03a1a17171a..dc58effa2ac2 100644 --- a/pkgs/tools/security/cloudfox/default.nix +++ b/pkgs/tools/security/cloudfox/default.nix @@ -5,16 +5,16 @@ buildGoModule rec { pname = "cloudfox"; - version = "1.9.1"; + version = "1.10.0"; src = fetchFromGitHub { owner = "BishopFox"; repo = pname; rev = "refs/tags/v${version}"; - hash = "sha256-TV2knPG5n5l8APeAmpDfu6vQLtEhjqH21JXAZLk0DDI="; + hash = "sha256-kB6nH/5/76r9SGyaFPXjwgZ+b5ha85Z7v1GFNgqluDY="; }; - vendorHash = "sha256-xMHlooXuLECQi7co2/WvY0TIoV0S5OgcBklICCFk3ls="; + vendorHash = "sha256-v8rEsp2mDgfjCO2VvWNIxex8F350MDnZ40bR4szv+3o="; # Some tests are failing because of wrong filename/path doCheck = false; From 4dd39ea359fc84c4055706284e71b32bbfd969dc Mon Sep 17 00:00:00 2001 From: Fabian Affolter Date: Sat, 4 Mar 2023 22:55:40 +0100 Subject: [PATCH 152/154] syncstorage-rs: add changelog to meta --- pkgs/servers/syncstorage-rs/default.nix | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/pkgs/servers/syncstorage-rs/default.nix b/pkgs/servers/syncstorage-rs/default.nix index a99db0d40d90..e71d71dc78c9 100644 --- a/pkgs/servers/syncstorage-rs/default.nix +++ b/pkgs/servers/syncstorage-rs/default.nix @@ -26,7 +26,7 @@ rustPlatform.buildRustPackage rec { src = fetchFromGitHub { owner = "mozilla-services"; repo = pname; - rev = version; + rev = "refs/tags/${version}"; hash = "sha256-eFrrZ/+8OsmIfCEoXPAKqVkZlgN8sfXueJQvQN8VCB0="; }; @@ -57,6 +57,7 @@ rustPlatform.buildRustPackage rec { meta = { description = "Mozilla Sync Storage built with Rust"; homepage = "https://github.com/mozilla-services/syncstorage-rs"; + changelog = "https://github.com/mozilla-services/syncstorage-rs/releases/tag/${version}"; license = lib.licenses.mpl20; maintainers = with lib.maintainers; [ pennae ]; platforms = lib.platforms.linux; From ebf00a9a445f8a34b707951eb5e8cd2ca7032a5e Mon Sep 17 00:00:00 2001 From: Fabian Affolter Date: Sun, 5 Mar 2023 00:07:18 +0100 Subject: [PATCH 153/154] python310Packages.pyfritzhome: 0.6.7 -> 0.6.8 Diff: https://github.com/hthiery/python-fritzhome/compare/0.6.7...0.6.8 --- .../python-modules/pyfritzhome/default.nix | 18 ++++++------------ 1 file changed, 6 insertions(+), 12 deletions(-) diff --git a/pkgs/development/python-modules/pyfritzhome/default.nix b/pkgs/development/python-modules/pyfritzhome/default.nix index b2baa5146671..2cc602050c15 100644 --- a/pkgs/development/python-modules/pyfritzhome/default.nix +++ b/pkgs/development/python-modules/pyfritzhome/default.nix @@ -1,24 +1,23 @@ { lib , buildPythonPackage , fetchFromGitHub +, pytestCheckHook , pythonOlder , requests -, nose -, mock }: buildPythonPackage rec { pname = "pyfritzhome"; - version = "0.6.7"; + version = "0.6.8"; format = "setuptools"; - disabled = pythonOlder "3.6"; + disabled = pythonOlder "3.7"; src = fetchFromGitHub { owner = "hthiery"; repo = "python-fritzhome"; - rev = version; - hash = "sha256-cRG+Dm3KG6no3/OQCZkvISW1yE5azdDVTa5oTV1sRpk="; + rev = "refs/tags/${version}"; + hash = "sha256-MIWRBwqVuS1iEuWxsE1yuGS2zHYVgnH2G4JJk7Yct6s="; }; propagatedBuildInputs = [ @@ -26,14 +25,9 @@ buildPythonPackage rec { ]; nativeCheckInputs = [ - mock - nose + pytestCheckHook ]; - checkPhase = '' - nosetests - ''; - pythonImportsCheck = [ "pyfritzhome" ]; From 50f26bcb1b9e7e037e04cadf788c8639fc3b37e6 Mon Sep 17 00:00:00 2001 From: "R. Ryantm" Date: Sat, 4 Mar 2023 22:20:36 +0000 Subject: [PATCH 154/154] clusterctl: 1.3.4 -> 1.3.5 --- pkgs/applications/networking/cluster/clusterctl/default.nix | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pkgs/applications/networking/cluster/clusterctl/default.nix b/pkgs/applications/networking/cluster/clusterctl/default.nix index 3f614815f7a5..e27ebb58bd45 100644 --- a/pkgs/applications/networking/cluster/clusterctl/default.nix +++ b/pkgs/applications/networking/cluster/clusterctl/default.nix @@ -2,13 +2,13 @@ buildGoModule rec { pname = "clusterctl"; - version = "1.3.4"; + version = "1.3.5"; src = fetchFromGitHub { owner = "kubernetes-sigs"; repo = "cluster-api"; rev = "v${version}"; - hash = "sha256-bkjtJidG+UHma15axlLcXtqtWTqesOdHHmH4db5hoAY="; + hash = "sha256-e6rs7cCSZiklMtPiFozea6EqRylepD2gfoDqQaUuly4="; }; vendorHash = "sha256-VPeaT4vPhBa6V+Ir+vNRIWgyVBzEgTDSnDtGrxxdZ0c=";