3
0
Fork 0
forked from mirrors/nixpkgs

Merge master into staging-next

This commit is contained in:
github-actions[bot] 2021-06-07 18:48:34 +00:00 committed by GitHub
commit 1593cddce5
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
60 changed files with 715 additions and 450 deletions

View file

@ -1,5 +1,15 @@
MD_TARGETS=$(addsuffix .xml, $(basename $(shell find . -type f -regex '.*\.md$$' -not -name README.md)))
PANDOC ?= pandoc
pandoc_media_dir = media
# TODO: Remove raw-attribute when we can get rid of DocBook altogether.
pandoc_commonmark_enabled_extensions = +attributes+fenced_divs+footnotes+bracketed_spans+definition_lists+pipe_tables+raw_attribute
pandoc_flags = --extract-media=$(pandoc_media_dir) \
--lua-filter=$(PANDOC_LUA_FILTERS_DIR)/diagram-generator.lua \
--lua-filter=labelless-link-is-xref.lua \
-f commonmark$(pandoc_commonmark_enabled_extensions)+smart
.PHONY: all
all: validate format out/html/index.html out/epub/manual.epub
@ -39,7 +49,7 @@ out/html/index.html: doc-support/result manual-full.xml style.css highlightjs
mkdir -p out/html/highlightjs/
cp -r highlightjs out/html/
cp -r media out/html/
cp -r $(pandoc_media_dir) out/html/
cp ./overrides.css out/html/
cp ./style.css out/html/style.css
@ -54,7 +64,7 @@ out/epub/manual.epub: manual-full.xml
doc-support/result/epub.xsl \
./manual-full.xml
cp -r media out/epub/scratch/OEBPS
cp -r $(pandoc_media_dir) out/epub/scratch/OEBPS
cp ./overrides.css out/epub/scratch/OEBPS
cp ./style.css out/epub/scratch/OEBPS
mkdir -p out/epub/scratch/OEBPS/images/callouts/
@ -89,16 +99,12 @@ functions/library/generated: doc-support/result
ln -rfs ./doc-support/result/function-docs functions/library/generated
%.section.xml: %.section.md
pandoc $^ -t docbook \
--extract-media=media \
--lua-filter=$(PANDOC_LUA_FILTERS_DIR)/diagram-generator.lua \
-f markdown+smart \
$(PANDOC) $^ -t docbook \
$(pandoc_flags) \
| cat > $@
%.chapter.xml: %.chapter.md
pandoc $^ -t docbook \
$(PANDOC) $^ -t docbook \
--top-level-division=chapter \
--extract-media=media \
--lua-filter=$(PANDOC_LUA_FILTERS_DIR)/diagram-generator.lua \
-f markdown+smart \
$(pandoc_flags) \
| cat > $@

View file

@ -20,59 +20,58 @@ The main difference between `fetchurl` and `fetchzip` is in how they store the c
`fetchpatch` works very similarly to `fetchurl` with the same arguments expected. It expects patch files as a source and performs normalization on them before computing the checksum. For example it will remove comments or other unstable parts that are sometimes added by version control systems and can change over time.
Other fetcher functions allow you to add source code directly from a VCS such as subversion or git. These are mostly straightforward nambes based on the name of the command used with the VCS system. Because they give you a working repository, they act most like `fetchzip`.
## `fetchsvn`
## `fetchsvn` {#fetchsvn}
Used with Subversion. Expects `url` to a Subversion directory, `rev`, and `sha256`.
## `fetchgit`
## `fetchgit` {#fetchgit}
Used with Git. Expects `url` to a Git repo, `rev`, and `sha256`. `rev` in this case can be full the git commit id (SHA1 hash) or a tag name like `refs/tags/v1.0`.
Additionally the following optional arguments can be given: `fetchSubmodules = true` makes `fetchgit` also fetch the submodules of a repository. If `deepClone` is set to true, the entire repository is cloned as opposing to just creating a shallow clone. `deepClone = true` also implies `leaveDotGit = true` which means that the `.git` directory of the clone won't be removed after checkout.
## `fetchfossil`
## `fetchfossil` {#fetchfossil}
Used with Fossil. Expects `url` to a Fossil archive, `rev`, and `sha256`.
## `fetchcvs`
## `fetchcvs` {#fetchcvs}
Used with CVS. Expects `cvsRoot`, `tag`, and `sha256`.
## `fetchhg`
## `fetchhg` {#fetchhg}
Used with Mercurial. Expects `url`, `rev`, and `sha256`.
A number of fetcher functions wrap part of `fetchurl` and `fetchzip`. They are mainly convenience functions intended for commonly used destinations of source code in Nixpkgs. These wrapper fetchers are listed below.
## `fetchFromGitHub`
## `fetchFromGitHub` {#fetchfromgithub}
`fetchFromGitHub` expects four arguments. `owner` is a string corresponding to the GitHub user or organization that controls this repository. `repo` corresponds to the name of the software repository. These are located at the top of every GitHub HTML page as `owner`/`repo`. `rev` corresponds to the Git commit hash or tag (e.g `v1.0`) that will be downloaded from Git. Finally, `sha256` corresponds to the hash of the extracted directory. Again, other hash algorithms are also available but `sha256` is currently preferred.
`fetchFromGitHub` uses `fetchzip` to download the source archive generated by GitHub for the specified revision. If `leaveDotGit`, `deepClone` or `fetchSubmodules` are set to `true`, `fetchFromGitHub` will use `fetchgit` instead. Refer to its section for documentation of these options.
## `fetchFromGitLab`
## `fetchFromGitLab` {#fetchfromgitlab}
This is used with GitLab repositories. The arguments expected are very similar to fetchFromGitHub above.
## `fetchFromGitiles`
## `fetchFromGitiles` {#fetchfromgitiles}
This is used with Gitiles repositories. The arguments expected are similar to fetchgit.
## `fetchFromBitbucket`
## `fetchFromBitbucket` {#fetchfrombitbucket}
This is used with BitBucket repositories. The arguments expected are very similar to fetchFromGitHub above.
## `fetchFromSavannah`
## `fetchFromSavannah` {#fetchfromsavannah}
This is used with Savannah repositories. The arguments expected are very similar to fetchFromGitHub above.
## `fetchFromRepoOrCz`
## `fetchFromRepoOrCz` {#fetchfromrepoorcz}
This is used with repo.or.cz repositories. The arguments expected are very similar to fetchFromGitHub above.
## `fetchFromSourcehut`
## `fetchFromSourcehut` {#fetchfromsourcehut}
This is used with sourcehut repositories. The arguments expected are very similar to fetchFromGitHub above. Don't forget the tilde (~) in front of the user name!

View file

@ -2,7 +2,7 @@
`pkgs.appimageTools` is a set of functions for extracting and wrapping [AppImage](https://appimage.org/) files. They are meant to be used if traditional packaging from source is infeasible, or it would take too long. To quickly run an AppImage file, `pkgs.appimage-run` can be used as well.
::: warning
::: {.warning}
The `appimageTools` API is unstable and may be subject to backwards-incompatible changes in the future.
:::

View file

@ -1,6 +1,6 @@
# pkgs.dockerTools {#sec-pkgs-dockerTools}
`pkgs.dockerTools` is a set of functions for creating and manipulating Docker images according to the [ Docker Image Specification v1.2.0 ](https://github.com/moby/moby/blob/master/image/spec/v1.2.md#docker-image-specification-v120). Docker itself is not used to perform any of the operations done by these functions.
`pkgs.dockerTools` is a set of functions for creating and manipulating Docker images according to the [Docker Image Specification v1.2.0](https://github.com/moby/moby/blob/master/image/spec/v1.2.md#docker-image-specification-v120). Docker itself is not used to perform any of the operations done by these functions.
## buildImage {#ssec-pkgs-dockerTools-buildImage}
@ -52,7 +52,7 @@ The above example will build a Docker image `redis/latest` from the given base i
> **_NOTE:_** Using this parameter requires the `kvm` device to be available.
- `config` is used to specify the configuration of the containers that will be started off the built image in Docker. The available options are listed in the [ Docker Image Specification v1.2.0 ](https://github.com/moby/moby/blob/master/image/spec/v1.2.md#image-json-field-descriptions).
- `config` is used to specify the configuration of the containers that will be started off the built image in Docker. The available options are listed in the [Docker Image Specification v1.2.0](https://github.com/moby/moby/blob/master/image/spec/v1.2.md#image-json-field-descriptions).
After the new layer has been created, its closure (to which `contents`, `config` and `runAsRoot` contribute) will be copied in the layer itself. Only new dependencies that are not already in the existing layers will be copied.

View file

@ -14,7 +14,7 @@ Currently, `makeSnap` does not support creating GUI stubs.
The following expression packages GNU Hello as a Snapcraft snap.
```{#ex-snapTools-buildSnap-hello .nix}
``` {#ex-snapTools-buildSnap-hello .nix}
let
inherit (import <nixpkgs> { }) snapTools hello;
in snapTools.makeSnap {
@ -35,7 +35,7 @@ in snapTools.makeSnap {
Graphical programs require many more integrations with the host. This example uses Firefox as an example, because it is one of the most complicated programs we could package.
```{#ex-snapTools-buildSnap-firefox .nix}
``` {#ex-snapTools-buildSnap-firefox .nix}
let
inherit (import <nixpkgs> { }) snapTools firefox;
in snapTools.makeSnap {

View file

@ -1,6 +1,6 @@
# Cataclysm: Dark Days Ahead {#cataclysm-dark-days-ahead}
## How to install Cataclysm DDA
## How to install Cataclysm DDA {#how-to-install-cataclysm-dda}
To install the latest stable release of Cataclysm DDA to your profile, execute
`nix-env -f "<nixpkgs>" -iA cataclysm-dda`. For the curses build (build
@ -34,7 +34,7 @@ cataclysm-dda.override {
}
```
## Important note for overriding packages
## Important note for overriding packages {#important-note-for-overriding-packages}
After applying `overrideAttrs`, you need to fix `passthru.pkgs` and
`passthru.withMods` attributes either manually or by using `attachPkgs`:
@ -69,7 +69,7 @@ in
goodExample2.withMods (_: []) # parallel building enabled
```
## Customizing with mods
## Customizing with mods {#customizing-with-mods}
To install Cataclysm DDA with mods of your choice, you can use `withMods`
attribute:

View file

@ -6,6 +6,6 @@ To start a development environment do
nix-shell -p elmPackages.elm elmPackages.elm-format
```
To update the Elm compiler, see <filename>nixpkgs/pkgs/development/compilers/elm/README.md</filename>.
To update the Elm compiler, see `nixpkgs/pkgs/development/compilers/elm/README.md`.
To package Elm applications, [read about elm2nix](https://github.com/hercules-ci/elm2nix#elm2nix).

View file

@ -1,6 +1,6 @@
# Firefox {#sec-firefox}
## Build wrapped Firefox with extensions and policies
## Build wrapped Firefox with extensions and policies {#build-wrapped-firefox-with-extensions-and-policies}
The `wrapFirefox` function allows to pass policies, preferences and extension that are available to firefox. With the help of `fetchFirefoxAddon` this allows build a firefox version that already comes with addons pre-installed:

View file

@ -4,11 +4,11 @@ OpenGL support varies depending on which hardware is used and which drivers are
Broadly, we support both GL vendors: Mesa and NVIDIA.
## NixOS Desktop
## NixOS Desktop {#nixos-desktop}
The NixOS desktop or other non-headless configurations are the primary target for OpenGL libraries and applications. The current solution for discovering which drivers are available is based on [libglvnd](https://gitlab.freedesktop.org/glvnd/libglvnd). `libglvnd` performs "vendor-neutral dispatch", trying a variety of techniques to find the system's GL implementation. In practice, this will be either via standard GLX for X11 users or EGL for Wayland users, and supporting either NVIDIA or Mesa extensions.
## Nix on GNU/Linux
## Nix on GNU/Linux {#nix-on-gnulinux}
If you are using a non-NixOS GNU/Linux/X11 desktop with free software video drivers, consider launching OpenGL-dependent programs from Nixpkgs with Nixpkgs versions of `libglvnd` and `mesa.drivers` in `LD_LIBRARY_PATH`. For Mesa drivers, the Linux kernel version doesn't have to match nixpkgs.

View file

@ -20,6 +20,7 @@ Use `programs.steam.enable = true;` if you want to add steam to systemPackages a
## Troubleshooting {#sec-steam-troub}
- **Steam fails to start. What do I do?**
Try to run
```ShellSession
@ -32,24 +33,26 @@ Use `programs.steam.enable = true;` if you want to add steam to systemPackages a
- The `newStdcpp` parameter was removed since NixOS 17.09 and should not be needed anymore.
- Steam ships statically linked with a version of libcrypto that conflics with the one dynamically loaded by radeonsi_dri.so. If you get the error
```
steam.sh: line 713: 7842 Segmentation fault (core dumped)
```
have a look at [this pull request](https://github.com/NixOS/nixpkgs/pull/20269).
- **Java**
1. There is no java in steam chrootenv by default. If you get a message like
```
/home/foo/.local/share/Steam/SteamApps/common/towns/towns.sh: line 1: java: command not found
```
```
/home/foo/.local/share/Steam/SteamApps/common/towns/towns.sh: line 1: java: command not found
```
You need to add
you need to add
```nix
steam.override { withJava = true; };
```
```nix
steam.override { withJava = true; };
```
## steam-run {#sec-steam-run}
@ -57,9 +60,9 @@ The FHS-compatible chroot used for steam can also be used to run other linux gam
```nix
pkgs.steam.override ({
nativeOnly = true;
newStdcpp = true;
}).run
nativeOnly = true;
newStdcpp = true;
}).run
```
to your configuration, rebuild, and run the game with

View file

@ -2,7 +2,7 @@
The Nix expressions for the X.org packages reside in `pkgs/servers/x11/xorg/default.nix`. This file is automatically generated from lists of tarballs in an X.org release. As such it should not be modified directly; rather, you should modify the lists, the generator script or the file `pkgs/servers/x11/xorg/overrides.nix`, in which you can override or add to the derivations produced by the generator.
## Katamari Tarballs
## Katamari Tarballs {#katamari-tarballs}
X.org upstream releases used to include [katamari](https://en.wiktionary.org/wiki/%E3%81%8B%E3%81%9F%E3%81%BE%E3%82%8A) releases, which included a holistic recommended version for each tarball, up until 7.7. To create a list of tarballs in a katamari release:
@ -14,11 +14,11 @@ cat $(PRINT_PATH=1 nix-prefetch-url $url | tail -n 1) \
| sort > "tarballs-$release.list"
```
## Individual Tarballs
## Individual Tarballs {#individual-tarballs}
The upstream release process for [X11R7.8](https://x.org/wiki/Releases/7.8/) does not include a planned katamari. Instead, each component of X.org is released as its own tarball. We maintain `pkgs/servers/x11/xorg/tarballs.list` as a list of tarballs for each individual package. This list includes X.org core libraries and protocol descriptions, extra newer X11 interface libraries, like `xorg.libxcb`, and classic utilities which are largely unused but still available if needed, like `xorg.imake`.
## Generating Nix Expressions
## Generating Nix Expressions {#generating-nix-expressions}
The generator is invoked as follows:
@ -29,6 +29,6 @@ cd pkgs/servers/x11/xorg
For each of the tarballs in the `.list` files, the script downloads it, unpacks it, and searches its `configure.ac` and `*.pc.in` files for dependencies. This information is used to generate `default.nix`. The generator caches downloaded tarballs between runs. Pay close attention to the `NOT FOUND: $NAME` messages at the end of the run, since they may indicate missing dependencies. (Some might be optional dependencies, however.)
## Overriding the Generator
## Overriding the Generator {#overriding-the-generator}
If the expression for a package requires derivation attributes that the generator cannot figure out automatically (say, `patches` or a `postInstall` hook), you should modify `pkgs/servers/x11/xorg/overrides.nix`.

View file

@ -37,7 +37,7 @@ This works just like `runCommand`. The only difference is that it also provides
Variant of `runCommand` that forces the derivation to be built locally, it is not substituted. This is intended for very cheap commands (<1s execution time). It saves on the network roundrip and can speed up a build.
::: note
::: {.note}
This sets [`allowSubstitutes` to `false`](https://nixos.org/nix/manual/#adv-attr-allowSubstitutes), so only use `runCommandLocal` if you are certain the user will always have a builder for the `system` of the derivation. This should be true for most trivial use cases (e.g. just copying some files to a different location or adding symlinks), because there the `system` is usually the same as `builtins.currentSystem`.
:::

View file

@ -6,7 +6,7 @@
- Do not use tab characters, i.e. configure your editor to use soft tabs. For instance, use `(setq-default indent-tabs-mode nil)` in Emacs. Everybody has different tab settings so its asking for trouble.
- Use `lowerCamelCase` for variable names, not `UpperCamelCase`. Note, this rule does not apply to package attribute names, which instead follow the rules in <xref linkend="sec-package-naming"/>.
- Use `lowerCamelCase` for variable names, not `UpperCamelCase`. Note, this rule does not apply to package attribute names, which instead follow the rules in [](#sec-package-naming).
- Function calls with attribute set arguments are written as
@ -209,7 +209,7 @@ There are a few naming guidelines:
- Dashes in the package name _should_ be preserved in new variable names, rather than converted to underscores or camel cased — e.g., `http-parser` instead of `http_parser` or `httpParser`. The hyphenated style is preferred in all three package names.
- If there are multiple versions of a package, this _should_ be reflected in the variable names in `all-packages.nix`, e.g. `json-c-0-9` and `json-c-0-11`. If there is an obvious “default” version, make an attribute like `json-c = json-c-0-9;`. See also <xref linkend="sec-versioning" />
- If there are multiple versions of a package, this _should_ be reflected in the variable names in `all-packages.nix`, e.g. `json-c-0-9` and `json-c-0-11`. If there is an obvious “default” version, make an attribute like `json-c = json-c-0-9;`. See also [](#sec-versioning)
## File naming and organisation {#sec-organisation}
@ -462,9 +462,9 @@ Preferred source hash type is sha256. There are several ways to get it.
For package updates it is enough to change one symbol to make hash fake. For new packages, you can use `lib.fakeSha256`, `lib.fakeSha512` or any other fake hash.
This is last resort method when reconstructing source URL is non-trivial and `nix-prefetch-url -A` isn't applicable (for example, [one of `kodi` dependencies](https://github.com/NixOS/nixpkgs/blob/d2ab091dd308b99e4912b805a5eb088dd536adb9/pkgs/applications/video/kodi/default.nix#L73")). The easiest way then would be replace hash with a fake one and rebuild. Nix build will fail and error message will contain desired hash.
This is last resort method when reconstructing source URL is non-trivial and `nix-prefetch-url -A` isnt applicable (for example, [one of `kodi` dependencies](https://github.com/NixOS/nixpkgs/blob/d2ab091dd308b99e4912b805a5eb088dd536adb9/pkgs/applications/video/kodi/default.nix#L73)). The easiest way then would be replace hash with a fake one and rebuild. Nix build will fail and error message will contain desired hash.
::: warning
::: {.warning}
This method has security problems. Check below for details.
:::

View file

@ -1,6 +1,6 @@
# Contributing to this documentation {#chap-contributing}
The DocBook sources of the Nixpkgs manual are in the [doc](https://github.com/NixOS/nixpkgs/tree/master/doc) subdirectory of the Nixpkgs repository.
The sources of the Nixpkgs manual are in the [doc](https://github.com/NixOS/nixpkgs/tree/master/doc) subdirectory of the Nixpkgs repository. The manual is still partially written in DocBook but it is progressively being converted to [Markdown](#sec-contributing-markup).
You can quickly check your edits with `make`:
@ -22,3 +22,78 @@ $ nix-shell
```
If the build succeeds, the manual will be in `./result/share/doc/nixpkgs/manual.html`.
## Syntax {#sec-contributing-markup}
As per [RFC 0072](https://github.com/NixOS/rfcs/pull/72), all new documentation content should be written in [CommonMark](https://commonmark.org/) Markdown dialect.
Additionally, the following syntax extensions are currently used:
- []{#ssec-contributing-markup-anchors}
Explicitly defined **anchors** on headings, to allow linking to sections. These should be always used, to ensure the anchors can be linked even when the heading text changes, and to prevent conflicts between [automatically assigned identifiers](https://github.com/jgm/commonmark-hs/blob/master/commonmark-extensions/test/auto_identifiers.md).
It uses the widely compatible [header attributes](https://github.com/jgm/commonmark-hs/blob/master/commonmark-extensions/test/attributes.md) syntax:
```markdown
## Syntax {#sec-contributing-markup}
```
- []{#ssec-contributing-markup-anchors-inline}
**Inline anchors**, which allow linking arbitrary place in the text (e.g. individual list items, sentences…).
They are defined using a hybrid of the link syntax with the attributes syntax known from headings, called [bracketed spans](https://github.com/jgm/commonmark-hs/blob/master/commonmark-extensions/test/bracketed_spans.md):
```markdown
- []{#ssec-gnome-hooks-glib} `glib` setup hook will populate `GSETTINGS_SCHEMAS_PATH` and then `wrapGAppsHook` will prepend it to `XDG_DATA_DIRS`.
```
- []{#ssec-contributing-markup-automatic-links}
If you **omit a link text** for a link pointing to a section, the text will be substituted automatically. For example, `[](#chap-contributing)` will result in [](#chap-contributing).
This syntax is taken from [MyST](https://myst-parser.readthedocs.io/en/latest/using/syntax.html#targets-and-cross-referencing).
- []{#ssec-contributing-markup-admonitions}
**Admonitions**, set off from the text to bring attention to something.
It uses pandocs [fenced `div`s syntax](https://github.com/jgm/commonmark-hs/blob/master/commonmark-extensions/test/fenced_divs.md):
```markdown
::: {.warning}
This is a warning
:::
```
which renders as
> ::: {.warning}
> This is a warning.
> :::
The following are supported:
- [`caution`](https://tdg.docbook.org/tdg/5.0/caution.html)
- [`important`](https://tdg.docbook.org/tdg/5.0/important.html)
- [`note`](https://tdg.docbook.org/tdg/5.0/note.html)
- [`tip`](https://tdg.docbook.org/tdg/5.0/tip.html)
- [`warning`](https://tdg.docbook.org/tdg/5.0/warning.html)
- []{#ssec-contributing-markup-definition-lists}
[**Definition lists**](https://github.com/jgm/commonmark-hs/blob/master/commonmark-extensions/test/definition_lists.md), for defining a group of terms:
```markdown
pear
: green or yellow bulbous fruit
watermelon
: green fruit with red flesh
```
which renders as
> pear
> : green or yellow bulbous fruit
>
> watermelon
> : green fruit with red flesh
For contributing to the legacy parts, please see [DocBook: The Definitive Guide](https://tdg.docbook.org/) or the [DocBook rocks! primer](https://web.archive.org/web/20200816233747/https://docbook.rocks/).

View file

@ -9,7 +9,7 @@ To add a package to Nixpkgs:
$ cd nixpkgs
```
2. Find a good place in the Nixpkgs tree to add the Nix expression for your package. For instance, a library package typically goes into `pkgs/development/libraries/pkgname`, while a web browser goes into `pkgs/applications/networking/browsers/pkgname`. See <xref linkend="sec-organisation" /> for some hints on the tree organisation. Create a directory for your package, e.g.
2. Find a good place in the Nixpkgs tree to add the Nix expression for your package. For instance, a library package typically goes into `pkgs/development/libraries/pkgname`, while a web browser goes into `pkgs/applications/networking/browsers/pkgname`. See [](#sec-organisation) for some hints on the tree organisation. Create a directory for your package, e.g.
```ShellSession
$ mkdir pkgs/development/libraries/libfoo

View file

@ -1,6 +1,6 @@
# Reviewing contributions {#chap-reviewing-contributions}
::: warning
::: {.warning}
The following section is a draft, and the policy for reviewing is still being discussed in issues such as [#11166](https://github.com/NixOS/nixpkgs/issues/11166) and [#20836](https://github.com/NixOS/nixpkgs/issues/20836).
:::
@ -35,15 +35,18 @@ Reviewing process:
- Building the package locally.
- pull requests are often targeted to the master or staging branch, and building the pull request locally when it is submitted can trigger many source builds.
- It is possible to rebase the changes on nixos-unstable or nixpkgs-unstable for easier review by running the following commands from a nixpkgs clone.
```ShellSession
$ git fetch origin nixos-unstable
$ git fetch origin pull/PRNUMBER/head
$ git rebase --onto nixos-unstable BASEBRANCH FETCH_HEAD
```
- The first command fetches the nixos-unstable branch.
- The second command fetches the pull request changes, `PRNUMBER` is the number at the end of the pull request title and `BASEBRANCH` the base branch of the pull request.
- The third command rebases the pull request changes to the nixos-unstable branch.
- The [nixpkgs-review](https://github.com/Mic92/nixpkgs-review) tool can be used to review a pull request content in a single command. `PRNUMBER` should be replaced by the number at the end of the pull request title. You can also provide the full github pull request url.
```ShellSession
$ nix-shell -p nixpkgs-review --run "nixpkgs-review pr PRNUMBER"
```

View file

@ -71,6 +71,7 @@ Security fixes are submitted in the same way as other changes and thus the same
- If a new version fixing the vulnerability has been released, update the package;
- If the security fix comes in the form of a patch and a CVE is available, then add the patch to the Nixpkgs tree, and apply it to the package.
The name of the patch should be the CVE identifier, so e.g. `CVE-2019-13636.patch`; If a patch is fetched the name needs to be set as well, e.g.:
```nix
(fetchpatch {
name = "CVE-2019-11068.patch";
@ -89,7 +90,7 @@ There is currently no policy when to remove a package.
Before removing a package, one should try to find a new maintainer or fix smaller issues first.
### Steps to remove a package from Nixpkgs
### Steps to remove a package from Nixpkgs {#steps-to-remove-a-package-from-nixpkgs}
We use jbidwatcher as an example for a discontinued project here.
@ -100,6 +101,7 @@ We use jbidwatcher as an example for a discontinued project here.
1. Add an alias for the package name in `pkgs/top-level/aliases.nix` (There is also `pkgs/misc/vim-plugins/aliases.nix`. Package sets typically do not have aliases, so we can't add them there.)
For example in this case:
```
jbidwatcher = throw "jbidwatcher was discontinued in march 2021"; # added 2021-03-15
```

View file

@ -166,7 +166,7 @@ lib.attrsets.setAttrByPath [ "a" "b" ] 3
<xi:include href="./locations.xml" xpointer="lib.attrsets.getAttrFromPath" />
<para>
Like <xref linkend="function-library-lib.attrsets.attrByPath" /> except without a default, and it will throw if the value doesn't exist.
Like [](#function-library-lib.attrsets.attrByPath) except without a default, and it will throw if the value doesn't exist.
</para>
<variablelist>
@ -1480,7 +1480,7 @@ lib.attrsets.zipAttrsWith
<xi:include href="./locations.xml" xpointer="lib.attrsets.zipAttrs" />
<para>
Merge sets of attributes and combine each attribute value in to a list. Similar to <xref linkend="function-library-lib.attrsets.zipAttrsWith" /> where the merge function returns a list of all values.
Merge sets of attributes and combine each attribute value in to a list. Similar to [](#function-library-lib.attrsets.zipAttrsWith) where the merge function returns a list of all values.
</para>
<variablelist>

View file

@ -0,0 +1,24 @@
local function starts_with(start, str)
return str:sub(1, #start) == start
end
local function escape_xml_arg(arg)
amps = arg:gsub('&', '&amp;')
amps_quotes = amps:gsub('"', '&quot;')
amps_quotes_lt = amps_quotes:gsub('<', '&lt;')
return amps_quotes_lt
end
function Link(elem)
has_no_content = #elem.content == 0
targets_anchor = starts_with('#', elem.target)
has_no_attributes = elem.title == '' and elem.identifier == '' and #elem.classes == 0 and #elem.attributes == 0
if has_no_content and targets_anchor and has_no_attributes then
-- xref expects idref without the pound-sign
target_without_hash = elem.target:sub(2, #elem.target)
return pandoc.RawInline('docbook', '<xref linkend="' .. escape_xml_arg(target_without_hash) .. '" />')
end
end

View file

@ -1,6 +1,6 @@
# Agda {#agda}
## How to use Agda
## How to use Agda {#how-to-use-agda}
Agda is available as the [agda](https://search.nixos.org/packages?channel=unstable&show=agda&from=0&size=30&sort=relevance&query=agda)
package.
@ -43,6 +43,7 @@ agda.withPackages (p: [
```
You can also reference a GitHub repository
```nix
agda.withPackages (p: [
(p.standard-library.overrideAttrs (oldAttrs: {
@ -59,6 +60,7 @@ agda.withPackages (p: [
If you want to use a library not added to Nixpkgs, you can add a
dependency to a local library by calling `agdaPackages.mkDerivation`.
```nix
agda.withPackages (p: [
(p.mkDerivation {
@ -92,20 +94,21 @@ See [Building Agda Packages](#building-agda-packages) for more information on `m
Agda will not by default use these libraries. To tell Agda to use a library we have some options:
* Call `agda` with the library flag:
```ShellSession
$ agda -l standard-library -i . MyFile.agda
```
```ShellSession
$ agda -l standard-library -i . MyFile.agda
```
* Write a `my-library.agda-lib` file for the project you are working on which may look like:
```
name: my-library
include: .
depend: standard-library
```
```
name: my-library
include: .
depend: standard-library
```
* Create the file `~/.agda/defaults` and add any libraries you want to use by default.
More information can be found in the [official Agda documentation on library management](https://agda.readthedocs.io/en/v2.6.1/tools/package-system.html).
## Compiling Agda
## Compiling Agda {#compiling-agda}
Agda modules can be compiled using the GHC backend with the `--compile` flag. A version of `ghc` with `ieee754` is made available to the Agda program via the `--with-compiler` flag.
This can be overridden by a different version of `ghc` as follows:
@ -116,7 +119,8 @@ agda.withPackages {
}
```
## Writing Agda packages
## Writing Agda packages {#writing-agda-packages}
To write a nix derivation for an Agda library, first check that the library has a `*.agda-lib` file.
A derivation can then be written using `agdaPackages.mkDerivation`. This has similar arguments to `stdenv.mkDerivation` with the following additions:
@ -140,19 +144,21 @@ agdaPackages.mkDerivation {
}
```
### Building Agda packages
### Building Agda packages {#building-agda-packages}
The default build phase for `agdaPackages.mkDerivation` simply runs `agda` on the `Everything.agda` file.
If something else is needed to build the package (e.g. `make`) then the `buildPhase` should be overridden.
Additionally, a `preBuild` or `configurePhase` can be used if there are steps that need to be done prior to checking the `Everything.agda` file.
`agda` and the Agda libraries contained in `buildInputs` are made available during the build phase.
### Installing Agda packages
### Installing Agda packages {#installing-agda-packages}
The default install phase copies Agda source files, Agda interface files (`*.agdai`) and `*.agda-lib` files to the output directory.
This can be overridden.
By default, Agda sources are files ending on `.agda`, or literate Agda files ending on `.lagda`, `.lagda.tex`, `.lagda.org`, `.lagda.md`, `.lagda.rst`. The list of recognised Agda source extensions can be extended by setting the `extraExtensions` config variable.
## Adding Agda packages to Nixpkgs
## Adding Agda packages to Nixpkgs {#adding-agda-packages-to-nixpkgs}
To add an Agda package to `nixpkgs`, the derivation should be written to `pkgs/development/libraries/agda/${library-name}/` and an entry should be added to `pkgs/top-level/agda-packages.nix`. Here it is called in a scope with access to all other Agda libraries, so the top line of the `default.nix` can look like:
@ -182,6 +188,7 @@ mkDerivation {
'';
}
```
This library has a file called `.agda-lib`, and so we give an empty string to `libraryFile` as nothing precedes `.agda-lib` in the filename. This file contains `name: IAL-1.3`, and so we let `libraryName = "IAL-1.3"`. This library does not use an `Everything.agda` file and instead has a Makefile, so there is no need to set `everythingFile` and we set a custom `buildPhase`.
When writing an Agda package it is essential to make sure that no `.agda-lib` file gets added to the store as a single file (for example by using `writeText`). This causes Agda to think that the nix store is a Agda library and it will attempt to write to it whenever it typechecks something. See [https://github.com/agda/agda/issues/4613](https://github.com/agda/agda/issues/4613).

View file

@ -3,8 +3,8 @@
The Android build environment provides three major features and a number of
supporting features.
Deploying an Android SDK installation with plugins
--------------------------------------------------
## Deploying an Android SDK installation with plugins {#deploying-an-android-sdk-installation-with-plugins}
The first use case is deploying the SDK with a desired set of plugins or subsets
of an SDK.
@ -136,8 +136,8 @@ in
androidComposition.platform-tools
```
Using predefined Android package compositions
---------------------------------------------
## Using predefined Android package compositions {#using-predefined-android-package-compositions}
In addition to composing an Android package set manually, it is also possible
to use a predefined composition that contains all basic packages for a specific
Android version, such as version 9.0 (API-level 28).
@ -159,12 +159,13 @@ with import <nixpkgs> {};
androidenv.androidPkgs_9_0.platform-tools
```
Building an Android application
-------------------------------
## Building an Android application {#building-an-android-application}
In addition to the SDK, it is also possible to build an Ant-based Android
project and automatically deploy all the Android plugins that a project
requires.
```nix
with import <nixpkgs> {};
@ -199,8 +200,8 @@ to build Android apps. An Android APK gets exposed as a build product and can be
installed on any Android device with a web browser by navigating to the build
result page.
Spawning emulator instances
---------------------------
## Spawning emulator instances {#spawning-emulator-instances}
For testing purposes, it can also be quite convenient to automatically generate
scripts that spawn emulator instances with all desired configuration settings.
@ -241,8 +242,8 @@ androidenv.emulateApp {
In addition to prebuilt APKs, you can also bind the APK parameter to a
`buildApp {}` function invocation shown in the previous example.
Notes on environment variables in Android projects
--------------------------------------------------
## Notes on environment variables in Android projects {#notes-on-environment-variables-in-android-projects}
* `ANDROID_SDK_ROOT` should point to the Android SDK. In your Nix expressions, this should be
`${androidComposition.androidsdk}/libexec/android-sdk`. Note that `ANDROID_HOME` is deprecated,
but if you rely on tools that need it, you can export it too.
@ -300,8 +301,8 @@ This shell.nix includes a shell hook that overwrites local.properties with the c
sdk.dir and ndk.dir values. This will ensure that the SDK and NDK directories will
both be correct when you run Android Studio inside nix-shell.
Notes on improving build.gradle compatibility
---------------------------------------------
## Notes on improving build.gradle compatibility {#notes-on-improving-build.gradle-compatibility}
Ensure that your buildToolsVersion and ndkVersion match what is declared in androidenv.
If you are using cmake, make sure its declared version is correct too.
@ -321,8 +322,8 @@ android {
```
Querying the available versions of each plugin
----------------------------------------------
## Querying the available versions of each plugin {#querying-the-available-versions-of-each-plugin}
repo.json provides all the options in one file now.
A shell script in the `pkgs/development/mobile/androidenv/` subdirectory can be used to retrieve all
@ -334,8 +335,8 @@ possible options:
The above command-line instruction queries all package versions in repo.json.
Updating the generated expressions
----------------------------------
## Updating the generated expressions {#updating-the-generated-expressions}
repo.json is generated from XML files that the Android Studio package manager uses.
To update the expressions run the `generate.sh` script that is stored in the
`pkgs/development/mobile/androidenv/` subdirectory:

View file

@ -4,9 +4,9 @@
In this document and related Nix expressions, we use the term, _BEAM_, to describe the environment. BEAM is the name of the Erlang Virtual Machine and, as far as we're concerned, from a packaging perspective, all languages that run on the BEAM are interchangeable. That which varies, like the build system, is transparent to users of any given BEAM package, so we make no distinction.
## Available versions and deprecations schedule
## Available versions and deprecations schedule {#available-versions-and-deprecations-schedule}
### Elixir
### Elixir {#elixir}
nixpkgs follows the [official elixir deprecation schedule](https://hexdocs.pm/elixir/compatibility-and-deprecations.html) and keeps the last 5 released versions of Elixir available.
@ -68,7 +68,7 @@ Erlang.mk functions similarly to Rebar3, except we use `buildErlangMk` instead o
`mixRelease` is used to make a release in the mix sense. Dependencies will need to be fetched with `fetchMixDeps` and passed to it.
#### mixRelease - Elixir Phoenix example
#### mixRelease - Elixir Phoenix example {#mixrelease---elixir-phoenix-example}
Here is how your `default.nix` file would look.
@ -148,7 +148,7 @@ Setup will require the following steps:
- you can now `nix-build .`
- To run the release, set the `RELEASE_TMP` environment variable to a directory that your program has write access to. It will be used to store the BEAM settings.
#### Example of creating a service for an Elixir - Phoenix project
#### Example of creating a service for an Elixir - Phoenix project {#example-of-creating-a-service-for-an-elixir---phoenix-project}
In order to create a service with your release, you could add a `service.nix`
in your project with the following
@ -228,7 +228,7 @@ mkShell {
}
```
#### Elixir - Phoenix project
#### Elixir - Phoenix project {#elixir---phoenix-project}
Here is an example `shell.nix`.

View file

@ -149,7 +149,7 @@ A few notes about [Full example — `default.nix`](#ex-buildBowerComponentsDefau
## Troubleshooting {#ssec-bower2nix-troubleshooting}
### ENOCACHE errors from buildBowerComponents
### ENOCACHE errors from buildBowerComponents {#enocache-errors-from-buildbowercomponents}
This means that Bower was looking for a package version which doesn't exist in the generated `bower-packages.nix`.

View file

@ -1,6 +1,6 @@
# Coq and coq packages {#sec-language-coq}
## Coq derivation: `coq`
## Coq derivation: `coq` {#coq-derivation-coq}
The Coq derivation is overridable through the `coq.override overrides`, where overrides is an attribute set which contains the arguments to override. We recommend overriding either of the following
@ -8,7 +8,7 @@ The Coq derivation is overridable through the `coq.override overrides`, where ov
* `customOCamlPackage` (optional, defaults to `null`, which lets Coq choose a version automatically), which can be set to any of the ocaml packages attribute of `ocaml-ng` (such as `ocaml-ng.ocamlPackages_4_10` which is the default for Coq 8.11 for example).
* `coq-version` (optional, defaults to the short version e.g. "8.10"), is a version number of the form "x.y" that indicates which Coq's version build behavior to mimic when using a source which is not a release. E.g. `coq.override { version = "d370a9d1328a4e1cdb9d02ee032f605a9d94ec7a"; coq-version = "8.10"; }`.
## Coq packages attribute sets: `coqPackages`
## Coq packages attribute sets: `coqPackages` {#coq-packages-attribute-sets-coqpackages}
The recommended way of defining a derivation for a Coq library, is to use the `coqPackages.mkCoqDerivation` function, which is essentially a specialization of `mkDerivation` taking into account most of the specifics of Coq libraries. The following attributes are supported:

View file

@ -1,10 +1,11 @@
# Crystal {#crystal}
## Building a Crystal package
## Building a Crystal package {#building-a-crystal-package}
This section uses [Mint](https://github.com/mint-lang/mint) as an example for how to build a Crystal package.
If the Crystal project has any dependencies, the first step is to get a `shards.nix` file encoding those. Get a copy of the project and go to its root directory such that its `shard.lock` file is in the current directory, then run `crystal2nix` in it
```bash
$ git clone https://github.com/mint-lang/mint
$ cd mint
@ -15,6 +16,7 @@ $ nix-shell -p crystal2nix --run crystal2nix
This should have generated a `shards.nix` file.
Next create a Nix file for your derivation and use `pkgs.crystal.buildCrystalPackage` as follows:
```nix
with import <nixpkgs> {};
crystal.buildCrystalPackage rec {

View file

@ -1,6 +1,6 @@
# Dotnet
# Dotnet {#dotnet}
## Local Development Workflow
## Local Development Workflow {#local-development-workflow}
For local development, it's recommended to use nix-shell to create a dotnet environment:
@ -16,7 +16,7 @@ mkShell {
}
```
### Using many sdks in a workflow
### Using many sdks in a workflow {#using-many-sdks-in-a-workflow}
It's very likely that more than one sdk will be needed on a given project. Dotnet provides several different frameworks (E.g dotnetcore, aspnetcore, etc.) as well as many versions for a given framework. Normally, dotnet is able to fetch a framework and install it relative to the executable. However, this would mean writing to the nix store in nixpkgs, which is read-only. To support the many-sdk use case, one can compose an environment using `dotnetCorePackages.combinePackages`:
@ -37,7 +37,7 @@ mkShell {
This will produce a dotnet installation that has the dotnet 3.1, 3.0, and 2.1 sdk. The first sdk listed will have it's cli utility present in the resulting environment. Example info output:
```ShellSesssion
```ShellSession
$ dotnet --info
.NET Core SDK (reflecting any global.json):
Version: 3.1.101
@ -60,15 +60,15 @@ $ dotnet --info
Microsoft.NETCore.App 3.1.1 [/nix/store/iiv98i2jdi226dgh4jzkkj2ww7f8jgpd-dotnet-core-combined/shared/Microsoft.NETCore.App]
```
## dotnet-sdk vs dotnetCorePackages.sdk
## dotnet-sdk vs dotnetCorePackages.sdk {#dotnet-sdk-vs-dotnetcorepackages.sdk}
The `dotnetCorePackages.sdk_X_Y` is preferred over the old dotnet-sdk as both major and minor version are very important for a dotnet environment. If a given minor version isn't present (or was changed), then this will likely break your ability to build a project.
## dotnetCorePackages.sdk vs dotnetCorePackages.net vs dotnetCorePackages.netcore vs dotnetCorePackages.aspnetcore
## dotnetCorePackages.sdk vs dotnetCorePackages.net vs dotnetCorePackages.netcore vs dotnetCorePackages.aspnetcore {#dotnetcorepackages.sdk-vs-dotnetcorepackages.net-vs-dotnetcorepackages.netcore-vs-dotnetcorepackages.aspnetcore}
The `dotnetCorePackages.sdk` contains both a runtime and the full sdk of a given version. The `net`, `netcore` and `aspnetcore` packages are meant to serve as minimal runtimes to deploy alongside already built applications. For runtime versions >= .NET 5 `net` is used while `netcore` is used for older .NET Core runtime version.
## Packaging a Dotnet Application
## Packaging a Dotnet Application {#packaging-a-dotnet-application}
Ideally, we would like to build against the sdk, then only have the dotnet runtime available in the runtime closure.

View file

@ -27,16 +27,14 @@ Modes of use of `emscripten`:
* dev-shell for zlib implementation hacking:
* `nix-shell -A emscriptenPackages.zlib`
## Imperative usage
## Imperative usage {#imperative-usage}
A few things to note:
* `export EMCC_DEBUG=2` is nice for debugging
* `~/.emscripten`, the build artifact cache sometimes creates issues and needs to be removed from time to time
## Declarative usage
## Declarative usage {#declarative-usage}
Let's see two different examples from `pkgs/top-level/emscripten-packages.nix`:
@ -50,7 +48,7 @@ A special requirement of the `pkgs.buildEmscriptenPackage` is the `doCheck = tru
* Use `export EMCC_DEBUG=2` from within a emscriptenPackage's `phase` to get more detailed debug output what is going wrong.
* ~/.emscripten cache is requiring us to set `HOME=$TMPDIR` in individual phases. This makes compilation slower but also makes it more deterministic.
### Usage 1: pkgs.zlib.override
### Usage 1: pkgs.zlib.override {#usage-1-pkgs.zlib.override}
This example uses `zlib` from nixpkgs but instead of compiling **C** to **ELF** it compiles **C** to **JS** since we were using `pkgs.zlib.override` and changed stdenv to `pkgs.emscriptenStdenv`. A few adaptions and hacks were set in place to make it working. One advantage is that when `pkgs.zlib` is updated, it will automatically update this package as well. However, this can also be the downside...
@ -110,7 +108,7 @@ See the `zlib` example:
'';
});
### Usage 2: pkgs.buildEmscriptenPackage
### Usage 2: pkgs.buildEmscriptenPackage {#usage-2-pkgs.buildemscriptenpackage}
This `xmlmirror` example features a emscriptenPackage which is defined completely from this context and no `pkgs.zlib.override` is used.
@ -165,7 +163,7 @@ This `xmlmirror` example features a emscriptenPackage which is defined completel
'';
};
### Declarative debugging
### Declarative debugging {#declarative-debugging}
Use `nix-shell -I nixpkgs=/some/dir/nixpkgs -A emscriptenPackages.libz` and from there you can go trough the individual steps. This makes it easy to build a good `unit test` or list the files of the project.
@ -177,7 +175,7 @@ Use `nix-shell -I nixpkgs=/some/dir/nixpkgs -A emscriptenPackages.libz` and from
6. `buildPhase`
7. ... happy hacking...
## Summary
## Summary {#summary}
Using this toolchain makes it easy to leverage `nix` from NixOS, MacOSX or even Windows (WSL+ubuntu+nix). This toolchain is reproducible, behaves like the rest of the packages from nixpkgs and contains a set of well working examples to learn and adapt from.

View file

@ -84,7 +84,7 @@ For convenience, it also adds `dconf.lib` for a GIO module implementing a GSetti
- []{#ssec-gnome-hooks-gobject-introspection} `gobject-introspection` setup hook populates `GI_TYPELIB_PATH` variable with `lib/girepository-1.0` directories of dependencies, which is then added to wrapper by `wrapGAppsHook`. It also adds `share` directories of dependencies to `XDG_DATA_DIRS`, which is intended to promote GIR files but it also [pollutes the closures](https://github.com/NixOS/nixpkgs/issues/32790) of packages using `wrapGAppsHook`.
::: warning
::: {.warning}
The setup hook [currently](https://github.com/NixOS/nixpkgs/issues/56943) does not work in expressions with `strictDeps` enabled, like Python packages. In those cases, you will need to disable it with `strictDeps = false;`.
:::

View file

@ -44,7 +44,7 @@ pet = buildGoModule rec {
The function `buildGoPackage` builds legacy Go programs, not supporting Go modules.
### Example for `buildGoPackage`
### Example for `buildGoPackage` {#example-for-buildgopackage}
In the following is an example expression using buildGoPackage, the following arguments are of special significance to the function:
@ -140,4 +140,4 @@ Removes the pre-existing vendor directory. This should only be used if the depen
### `subPackages` {#var-go-subPackages}
Limits the builder from building child packages that have not been listed. If <varname>subPackages</varname> is not specified, all child packages will be built.
Limits the builder from building child packages that have not been listed. If `subPackages` is not specified, all child packages will be built.

View file

@ -1,10 +1,10 @@
# Idris {#idris}
## Installing Idris
## Installing Idris {#installing-idris}
The easiest way to get a working idris version is to install the `idris` attribute:
```ShellSesssion
```ShellSession
$ # On NixOS
$ nix-env -i nixos.idris
$ # On non-NixOS
@ -21,7 +21,7 @@ self: super: {
And then:
```ShellSesssion
```ShellSession
$ # On NixOS
$ nix-env -iA nixos.myIdris
$ # On non-NixOS
@ -29,7 +29,8 @@ $ nix-env -iA nixpkgs.myIdris
```
To see all available Idris packages:
```ShellSesssion
```ShellSession
$ # On NixOS
$ nix-env -qaPA nixos.idrisPackages
$ # On non-NixOS
@ -37,22 +38,23 @@ $ nix-env -qaPA nixpkgs.idrisPackages
```
Similarly, entering a `nix-shell`:
```ShellSesssion
```ShellSession
$ nix-shell -p 'idrisPackages.with-packages (with idrisPackages; [ contrib pruviloj ])'
```
## Starting Idris with library support
## Starting Idris with library support {#starting-idris-with-library-support}
To have access to these libraries in idris, call it with an argument `-p <library name>` for each library:
```ShellSesssion
```ShellSession
$ nix-shell -p 'idrisPackages.with-packages (with idrisPackages; [ contrib pruviloj ])'
[nix-shell:~]$ idris -p contrib -p pruviloj
```
A listing of all available packages the Idris binary has access to is available via `--listlibs`:
```ShellSesssion
```ShellSession
$ idris --listlibs
00prelude-idx.ibc
pruviloj
@ -64,7 +66,7 @@ prelude
00contrib-idx.ibc
```
## Building an Idris project with Nix
## Building an Idris project with Nix {#building-an-idris-project-with-nix}
As an example of how a Nix expression for an Idris package can be created, here is the one for `idrisPackages.yaml`:
@ -105,7 +107,7 @@ build-idris-package {
Assuming this file is saved as `yaml.nix`, it's buildable using
```ShellSesssion
```ShellSession
$ nix-build -E '(import <nixpkgs> {}).idrisPackages.callPackage ./yaml.nix {}'
```
@ -121,11 +123,11 @@ with import <nixpkgs> {};
in another file (say `default.nix`) to be able to build it with
```ShellSesssion
```ShellSession
$ nix-build -A yaml
```
## Passing options to `idris` commands
## Passing options to `idris` commands {#passing-options-to-idris-commands}
The `build-idris-package` function provides also optional input values to set additional options for the used `idris` commands.

View file

@ -20,8 +20,8 @@ Hydra.
The Xcode build environment implements a number of features.
Deploying a proxy component wrapper exposing Xcode
--------------------------------------------------
## Deploying a proxy component wrapper exposing Xcode {#deploying-a-proxy-component-wrapper-exposing-xcode}
The first use case is deploying a Nix package that provides symlinks to the Xcode
installation on the host system. This package can be used as a build input to
any build function implemented in the Nix expression language that requires
@ -55,8 +55,8 @@ lrwxr-xr-x 1 sander staff 61 1 jan 1970 xcodebuild -> /Applications/Xcode.a
lrwxr-xr-x 1 sander staff 14 1 jan 1970 xcrun -> /usr/bin/xcrun
```
Building an iOS application
---------------------------
## Building an iOS application {#building-an-ios-application}
We can build an iOS app executable for the simulator, or an IPA/xcarchive file
for release purposes, e.g. ad-hoc, enterprise or store installations, by
executing the `xcodeenv.buildApp {}` function:
@ -99,6 +99,7 @@ xcodeenv.buildApp {
```
The above function takes a variety of parameters:
* The `name` and `src` parameters are mandatory and specify the name of the app
and the location where the source code resides
* `sdkVersion` specifies which version of the iOS SDK to use.
@ -151,8 +152,8 @@ the `xcodeenv.composeXcodeWrapper {}` function takes. For example, the
`xcodeBaseDir` parameter can be overridden to refer to a different Xcode
version.
Spawning simulator instances
----------------------------
## Spawning simulator instances {#spawning-simulator-instances}
In addition to building iOS apps, we can also automatically spawn simulator
instances:
@ -213,8 +214,8 @@ xcode.simulateApp {
By providing the result of an `xcode.buildApp {}` function and configuring the
app bundle id, the app gets deployed automatically and started.
Troubleshooting
---------------
## Troubleshooting {#troubleshooting}
In some rare cases, it may happen that after a failure, changes are not picked
up. Most likely, this is caused by a derived data cache that Xcode maintains.
To wipe it you can run:

View file

@ -1,8 +1,8 @@
# User's Guide to Lua Infrastructure {#users-guide-to-lua-infrastructure}
# Users Guide to Lua Infrastructure {#users-guide-to-lua-infrastructure}
## Using Lua
## Using Lua {#using-lua}
### Overview of Lua
### Overview of Lua {#overview-of-lua}
Several versions of the Lua interpreter are available: luajit, lua 5.1, 5.2, 5.3.
The attribute `lua` refers to the default interpreter, it is also possible to refer to specific versions, e.g. `lua5_2` refers to Lua 5.2.
@ -17,27 +17,31 @@ The main package set contains aliases to these package sets, e.g.
`luaPackages` refers to `lua5_1.pkgs` and `lua52Packages` to
`lua5_2.pkgs`.
### Installing Lua and packages
### Installing Lua and packages {#installing-lua-and-packages}
#### Lua environment defined in separate `.nix` file
#### Lua environment defined in separate `.nix` file {#lua-environment-defined-in-separate-.nix-file}
Create a file, e.g. `build.nix`, with the following expression
```nix
with import <nixpkgs> {};
lua5_2.withPackages (ps: with ps; [ busted luafilesystem ])
```
and install it in your profile with
```shell
nix-env -if build.nix
```
Now you can use the Lua interpreter, as well as the extra packages (`busted`,
`luafilesystem`) that you added to the environment.
#### Lua environment defined in `~/.config/nixpkgs/config.nix`
#### Lua environment defined in `~/.config/nixpkgs/config.nix` {#lua-environment-defined-in-.confignixpkgsconfig.nix}
If you prefer to, you could also add the environment as a package override to the Nixpkgs set, e.g.
using `config.nix`,
```nix
{ # ...
@ -46,14 +50,16 @@ using `config.nix`,
};
}
```
and install it in your profile with
```shell
nix-env -iA nixpkgs.myLuaEnv
```
The environment is installed by referring to the attribute, and considering
the `nixpkgs` channel was used.
#### Lua environment defined in `/etc/nixos/configuration.nix`
#### Lua environment defined in `/etc/nixos/configuration.nix` {#lua-environment-defined-in-etcnixosconfiguration.nix}
For the sake of completeness, here's another example how to install the environment system-wide.
@ -66,7 +72,7 @@ For the sake of completeness, here's another example how to install the environm
}
```
### How to override a Lua package using overlays?
### How to override a Lua package using overlays? {#how-to-override-a-lua-package-using-overlays}
Use the following overlay template:
@ -87,18 +93,22 @@ final: prev:
}
```
### Temporary Lua environment with `nix-shell`
### Temporary Lua environment with `nix-shell` {#temporary-lua-environment-with-nix-shell}
There are two methods for loading a shell with Lua packages. The first and recommended method
is to create an environment with `lua.buildEnv` or `lua.withPackages` and load that. E.g.
```sh
$ nix-shell -p 'lua.withPackages(ps: with ps; [ busted luafilesystem ])'
```
opens a shell from which you can launch the interpreter
```sh
[nix-shell:~] lua
```
The other method, which is not recommended, does not create an environment and requires you to list the packages directly,
```sh
@ -108,7 +118,7 @@ Again, it is possible to launch the interpreter from the shell.
The Lua interpreter has the attribute `pkgs` which contains all Lua libraries for that specific interpreter.
## Developing with Lua
## Developing with Lua {#developing-with-lua}
Now that you know how to get a working Lua environment with Nix, it is time
to go forward and start actually developing with Lua. There are two ways to
@ -116,7 +126,7 @@ package lua software, either it is on luarocks and most of it can be taken care
of by the luarocks2nix converter or the packaging has to be done manually.
Let's present the luarocks way first and the manual one in a second time.
### Packaging a library on luarocks
### Packaging a library on luarocks {#packaging-a-library-on-luarocks}
[Luarocks.org](www.luarocks.org) is the main repository of lua packages.
The site proposes two types of packages, the rockspec and the src.rock
@ -135,10 +145,11 @@ You can try converting luarocks packages to nix packages with the command `nix-s
Nix rely on luarocks to install lua packages, basically it runs:
`luarocks make --deps-mode=none --tree $out`
#### Packaging a library manually
#### Packaging a library manually {#packaging-a-library-manually}
You can develop your package as you usually would, just don't forget to wrap it
within a `toLuaModule` call, for instance
```nix
mynewlib = toLuaModule ( stdenv.mkDerivation { ... });
```
@ -146,16 +157,15 @@ mynewlib = toLuaModule ( stdenv.mkDerivation { ... });
There is also the `buildLuaPackage` function that can be used when lua modules
are not packaged for luarocks. You can see a few examples at `pkgs/top-level/lua-packages.nix`.
## Lua Reference
## Lua Reference {#lua-reference}
### Lua interpreters
### Lua interpreters {#lua-interpreters}
Versions 5.1, 5.2 and 5.3 of the lua interpreter are available as
respectively `lua5_1`, `lua5_2` and `lua5_3`. Luajit is available too.
The Nix expressions for the interpreters can be found in `pkgs/development/interpreters/lua-5`.
#### Attributes on lua interpreters packages
#### Attributes on lua interpreters packages {#attributes-on-lua-interpreters-packages}
Each interpreter has the following attributes:
@ -164,8 +174,7 @@ Each interpreter has the following attributes:
- `withPackages`. Simpler interface to `buildEnv`.
- `pkgs`. Set of Lua packages for that specific interpreter. The package set can be modified by overriding the interpreter and passing `packageOverrides`.
#### `buildLuarocksPackage` function
#### `buildLuarocksPackage` function {#buildluarockspackage-function}
The `buildLuarocksPackage` function is implemented in `pkgs/development/interpreters/lua-5/build-lua-package.nix`
The following is an example:
@ -205,16 +214,17 @@ install the package
By default `meta.platforms` is set to the same value as the interpreter unless overridden otherwise.
#### `buildLuaApplication` function
#### `buildLuaApplication` function {#buildluaapplication-function}
The `buildLuaApplication` function is practically the same as `buildLuaPackage`.
The difference is that `buildLuaPackage` by default prefixes the names of the packages with the version of the interpreter.
Because with an application we're not interested in multiple version the prefix is dropped.
#### lua.withPackages function
#### lua.withPackages function {#lua.withpackages-function}
The `lua.withPackages` takes a function as an argument that is passed the set of lua packages and returns the list of packages to be included in the environment.
Using the `withPackages` function, the previous example for the luafilesystem environment can be written like this:
```nix
with import <nixpkgs> {};
@ -223,6 +233,7 @@ lua.withPackages (ps: [ps.luafilesystem])
`withPackages` passes the correct package set for the specific interpreter version as an argument to the function. In the above example, `ps` equals `luaPackages`.
But you can also easily switch to using `lua5_2`:
```nix
with import <nixpkgs> {};
@ -231,13 +242,12 @@ lua5_2.withPackages (ps: [ps.lua])
Now, `ps` is set to `lua52Packages`, matching the version of the interpreter.
### Possible Todos
### Possible Todos {#possible-todos}
* export/use version specific variables such as `LUA_PATH_5_2`/`LUAROCKS_CONFIG_5_2`
* let luarocks check for dependencies via exporting the different rocktrees in temporary config
### Lua Contributing guidelines
### Lua Contributing guidelines {#lua-contributing-guidelines}
Following rules should be respected:

View file

@ -43,9 +43,9 @@ public class Main {
You find this demo project at https://github.com/fzakaria/nixos-maven-example
## Solving for dependencies
## Solving for dependencies {#solving-for-dependencies}
### buildMaven with NixOS/mvn2nix-maven-plugin
### buildMaven with NixOS/mvn2nix-maven-plugin {#buildmaven-with-nixosmvn2nix-maven-plugin}
> ⚠️ Although `buildMaven` is the "blessed" way within nixpkgs, as of 2020, it hasn't seen much activity in quite a while.
@ -82,6 +82,7 @@ This file is then given to the `buildMaven` function, and it returns 2 attribute
A simple derivation that runs through `mvn compile` & `mvn package` to build the JAR. You may use this as inspiration for more complicated derivations.
Here is an [example](https://github.com/fzakaria/nixos-maven-example/blob/main/build-maven-repository.nix) of building the Maven repository
```nix
{ pkgs ? import <nixpkgs> { } }:
with pkgs;
@ -103,7 +104,8 @@ The benefit over the _double invocation_ as we will see below, is that the _/nix
│   └── 4.1.3
│   ├── avalon-framework-4.1.3.jar -> /nix/store/iv5fp3955w3nq28ff9xfz86wvxbiw6n9-avalon-framework-4.1.3.jar
```
### Double Invocation
### Double Invocation {#double-invocation}
> ⚠️ This pattern is the simplest but may cause unnecessary rebuilds due to the output hash changing.
@ -163,7 +165,7 @@ The build will fail, and tell you the expected `outputHash` to place. When you'v
If your package uses _SNAPSHOT_ dependencies or _version ranges_; there is a strong likelihood that over-time your output hash will change since the resolved dependencies may change. Hence this method is less recommended then using `buildMaven`.
## Building a JAR
## Building a JAR {#building-a-jar}
Regardless of which strategy is chosen above, the step to build the derivation is the same.
@ -201,7 +203,7 @@ in stdenv.mkDerivation rec {
2 directories, 1 file
```
## Runnable JAR
## Runnable JAR {#runnable-jar}
The previous example builds a `jar` file but that's not a file one can run.
@ -213,7 +215,7 @@ We will use the same repository we built above (either _double invocation_ or _b
The following two methods are more suited to Nix then building an [UberJar](https://imagej.net/Uber-JAR) which may be the more traditional approach.
### CLASSPATH
### CLASSPATH {#classpath}
> This is ideal if you are providing a derivation for _nixpkgs_ and don't want to patch the project's `pom.xml`.
@ -252,11 +254,12 @@ in stdenv.mkDerivation rec {
}
```
### MANIFEST file via Maven Plugin
### MANIFEST file via Maven Plugin {#manifest-file-via-maven-plugin}
> This is ideal if you are the project owner and want to change your `pom.xml` to set the CLASSPATH within it.
Augment the `pom.xml` to create a JAR with the following manifest:
```xml
<build>
<plugins>

View file

@ -1,10 +1,10 @@
# Python {#python}
## User Guide
## User Guide {#user-guide}
### Using Python
### Using Python {#using-python}
#### Overview
#### Overview {#overview}
Several versions of the Python interpreter are available on Nix, as well as a
high amount of packages. The attribute `python3` refers to the default
@ -31,7 +31,7 @@ The main package set contains aliases to these package sets, e.g.
`pythonPackages` refers to `python.pkgs` and `python38Packages` to
`python38.pkgs`.
#### Installing Python and packages
#### Installing Python and packages {#installing-python-and-packages}
The Nix and NixOS manuals explain how packages are generally installed. In the
case of Python and Nix, it is important to make a distinction between whether the
@ -62,7 +62,7 @@ Philosphically, this should be familiar to users who are used to a `venv` style
of development: individual projects create their own Python environments without
impacting the global environment or each other.
#### Ad-hoc temporary Python environment with `nix-shell`
#### Ad-hoc temporary Python environment with `nix-shell` {#ad-hoc-temporary-python-environment-with-nix-shell}
The simplest way to start playing with the way nix wraps and sets up Python
environments is with `nix-shell` at the cmdline. These environments create a
@ -131,7 +131,7 @@ arbitrary dependencies. This is a good way to get a feel for how the Python
interpreter and dependencies work in Nix and NixOS, but to do some actual
development, we'll want to make it a bit more persistent.
##### Running Python scripts and using `nix-shell` as shebang
##### Running Python scripts and using `nix-shell` as shebang {#running-python-scripts-and-using-nix-shell-as-shebang}
Sometimes, we have a script whose header looks like this:
@ -146,7 +146,7 @@ print(f"The dot product of {a} and {b} is: {np.dot(a, b)}")
Executing this script requires a `python3` that has `numpy`. Using what we learned
in the previous section, we could startup a shell and just run it like so:
```ShellSesssion
```ShellSession
$ nix-shell -p 'python38.withPackages(ps: with ps; [ numpy ])' --run 'python3 foo.py'
The dot product of [1 2] and [3 4] is: 11
```
@ -203,7 +203,7 @@ of the package versions.
This is also a great way to ensure the script executes identically on different
servers.
##### Load environment from `.nix` expression
##### Load environment from `.nix` expression {#load-environment-from-.nix-expression}
We've now seen how to create an ad-hoc temporary shell session, and how to
create a single script with Python dependencies, but in the course of normal
@ -262,7 +262,7 @@ and its Python dependencies, but also tools like `black` or `mypy` and libraries
like `libffi` the `openssl` in scope. This is generic and can span any number of
tools or languages across the Nixpkgs ecosystem.
##### Installing environments globally on the system
##### Installing environments globally on the system {#installing-environments-globally-on-the-system}
Up to now, we've been creating environments scoped to an ad-hoc shell session,
or a single script, or a single project. This is generally advisable, as it
@ -315,7 +315,7 @@ If you get a conflict or prefer to keep the setup clean, you can have `nix-env`
atomically *uninstall* all other imperatively installed packages and replace
your profile with just `myEnv` by using the `--replace` flag.
##### Environment defined in `/etc/nixos/configuration.nix`
##### Environment defined in `/etc/nixos/configuration.nix` {#environment-defined-in-etcnixosconfiguration.nix}
For the sake of completeness, here's how to install the environment system-wide
on NixOS.
@ -329,7 +329,7 @@ on NixOS.
}
```
### Developing with Python
### Developing with Python {#developing-with-python}
Above, we were mostly just focused on use cases and what to do to get started
creating working Python environments in nix.
@ -338,7 +338,7 @@ Now that you know the basics to be up and running, it is time to take a step
back and take a deeper look at how Python packages are packaged on Nix. Then,
we will look at how you can use development mode with your code.
#### Python library packages in Nixpkgs
#### Python library packages in Nixpkgs {#python-library-packages-in-nixpkgs}
With Nix all packages are built by functions. The main function in Nix for
building Python libraries is `buildPythonPackage`. Let's see how we can build the
@ -425,7 +425,7 @@ of `withPackages` we used a `let` expression. You can see that we used
`toolz` from the Nixpkgs package set this time, but instead took our own version
that we introduced with the `let` expression.
#### Handling dependencies
#### Handling dependencies {#handling-dependencies}
Our example, `toolz`, does not have any dependencies on other Python packages or
system libraries. According to the manual, `buildPythonPackage` uses the
@ -537,9 +537,10 @@ buildPythonPackage rec {
};
}
```
Note also the line `doCheck = false;`, we explicitly disabled running the test-suite.
#### Testing Python Packages
#### Testing Python Packages {#testing-python-packages}
It is highly encouraged to have testing as part of the package build. This
helps to avoid situations where the package was able to build and install,
@ -559,10 +560,11 @@ thus can cause issues when a test suite asserts on that behavior.
as many tests should be enabled as possible. Failing tests can still be
a good indication that the package is not in a valid state.
#### Using pytest
#### Using pytest {#using-pytest}
Pytest is the most common test runner for python repositories. A trivial
test run would be:
```
checkInputs = [ pytest ];
checkPhase = "pytest";
@ -572,6 +574,7 @@ However, many repositories' test suites do not translate well to nix's build
sandbox, and will generally need many tests to be disabled.
To filter tests using pytest, one can do the following:
```
checkInputs = [ pytest ];
# avoid tests which need additional data or touch network
@ -587,19 +590,20 @@ easier than having to create a new package.
`-k` is used to define a predicate for test names. In this example, we are
filtering out tests which contain `download` or `update` in their test case name.
Only one `-k` argument is allows, and thus a long predicate should be concatenated
with "\" and wrapped to the next line.
Only one `-k` argument is allowed, and thus a long predicate should be concatenated
with “\\” and wrapped to the next line.
*NOTE:* In pytest==6.0.1, the use of "\" to continue a line (e.g. `-k 'not download \'`) has
*NOTE:* In pytest==6.0.1, the use of “\\” to continue a line (e.g. `-k 'not download \'`) has
been removed, in this case, it's recommended to use `pytestCheckHook`.
#### Using pytestCheckHook
#### Using pytestCheckHook {#using-pytestcheckhook}
`pytestCheckHook` is a convenient hook which will substitute the setuptools
`test` command for a checkPhase which runs `pytest`. This is also beneficial
when a package may need many items disabled to run the test suite.
Using the example above, the analagous pytestCheckHook usage would be:
```
checkInputs = [ pytestCheckHook ];
@ -637,7 +641,7 @@ Trying to concatenate the related strings to disable tests in a regular checkPha
would be much harder to read. This also enables us to comment on why specific tests
are disabled.
#### Using pythonImportsCheck
#### Using pythonImportsCheck {#using-pythonimportscheck}
Although unit tests are highly prefered to validate correctness of a package, not
all packages have test suites that can be ran easily, and some have none at all.
@ -659,7 +663,7 @@ However, this is done in it's own phase, and not dependent on whether `doCheck =
This can also be useful in verifying that the package doesn't assume commonly
present packages (e.g. `setuptools`)
### Develop local package
### Develop local package {#develop-local-package}
As a Python developer you're likely aware of [development mode](http://setuptools.readthedocs.io/en/latest/setuptools.html#development-mode)
(`python setup.py develop`); instead of installing the package this command
@ -694,7 +698,7 @@ buildPythonPackage rec {
It is important to note that due to how development mode is implemented on Nix
it is not possible to have multiple packages simultaneously in development mode.
### Organising your packages
### Organising your packages {#organising-your-packages}
So far we discussed how you can use Python on Nix, and how you can develop with
it. We've looked at how you write expressions to package Python packages, and we
@ -706,7 +710,7 @@ like to be able to use in different projects. In order to minimise unnecessary
duplication we now look at how you can maintain a repository with your
own packages. The important functions here are `import` and `callPackage`.
### Including a derivation using `callPackage`
### Including a derivation using `callPackage` {#including-a-derivation-using-callpackage}
Earlier we created a Python environment using `withPackages`, and included the
`toolz` package via a `let` expression.
@ -756,9 +760,9 @@ don't explicitly define which `python` derivation should be used. In the above
example we use `buildPythonPackage` that is part of the set `python38Packages`,
and in this case the `python38` interpreter is automatically used.
## Reference
## Reference {#reference}
### Interpreters
### Interpreters {#interpreters}
Versions 2.7, 3.6, 3.7, 3.8 and 3.9 of the CPython interpreter are available as
respectively `python27`, `python36`, `python37`, `python38` and `python39`. The
@ -773,11 +777,11 @@ All packages depending on any Python interpreter get appended
`out/{python.sitePackages}` to `$PYTHONPATH` if such directory
exists.
#### Missing `tkinter` module standard library
#### Missing `tkinter` module standard library {#missing-tkinter-module-standard-library}
To reduce closure size the `Tkinter`/`tkinter` is available as a separate package, `pythonPackages.tkinter`.
#### Attributes on interpreters packages
#### Attributes on interpreters packages {#attributes-on-interpreters-packages}
Each interpreter has the following attributes:
@ -789,7 +793,7 @@ Each interpreter has the following attributes:
- `executable`. Name of the interpreter executable, e.g. `python3.8`.
- `pkgs`. Set of Python packages for that specific interpreter. The package set can be modified by overriding the interpreter and passing `packageOverrides`.
### Optimizations
### Optimizations {#optimizations}
The Python interpreters are by default not build with optimizations enabled, because
the builds are in that case not reproducible. To enable optimizations, override the
@ -806,7 +810,7 @@ let
in mypython
```
### Building packages and applications
### Building packages and applications {#building-packages-and-applications}
Python libraries and applications that use `setuptools` or
`distutils` are typically built with respectively the `buildPythonPackage` and
@ -838,7 +842,7 @@ and the aliases
* `pkgs.python3Packages` pointing to `pkgs.python38Packages`
* `pkgs.pythonPackages` pointing to `pkgs.python2Packages`
#### `buildPythonPackage` function
#### `buildPythonPackage` function {#buildpythonpackage-function}
The `buildPythonPackage` function is implemented in
`pkgs/development/interpreters/python/mk-python-derivation`
@ -890,7 +894,7 @@ e.g. the test runner, should be added to `checkInputs`.
By default `meta.platforms` is set to the same value
as the interpreter unless overridden otherwise.
##### `buildPythonPackage` parameters
##### `buildPythonPackage` parameters {#buildpythonpackage-parameters}
All parameters from `stdenv.mkDerivation` function are still supported. The
following are specific to `buildPythonPackage`:
@ -946,7 +950,7 @@ because their behaviour is different:
`buildPythonPackage` also injects code into and wraps executables with the
paths included in this list. Items listed in `install_requires` go here.
##### Overriding Python packages
##### Overriding Python packages {#overriding-python-packages}
The `buildPythonPackage` function has a `overridePythonAttrs` method that can be
used to override the package. In the following example we create an environment
@ -974,7 +978,7 @@ with import <nixpkgs> {};
in python.withPackages(ps: [ps.blaze])).env
```
#### `buildPythonApplication` function
#### `buildPythonApplication` function {#buildpythonapplication-function}
The `buildPythonApplication` function is practically the same as
`buildPythonPackage`. The main purpose of this function is to build a Python
@ -1019,7 +1023,7 @@ luigi = callPackage ../applications/networking/cluster/luigi { };
Since the package is an application, a consumer doesn't need to care about
Python versions or modules, which is why they don't go in `pythonPackages`.
#### `toPythonApplication` function
#### `toPythonApplication` function {#topythonapplication-function}
A distinction is made between applications and libraries, however, sometimes a
package is used as both. In this case the package is added as a library to
@ -1031,11 +1035,12 @@ The Nix expression shall use `buildPythonPackage` and be called from
`python-packages.nix`. A reference shall be created from `all-packages.nix` to
the attribute in `python-packages.nix`, and the `toPythonApplication` shall be
applied to the reference:
```nix
youtube-dl = with pythonPackages; toPythonApplication youtube-dl;
```
#### `toPythonModule` function
#### `toPythonModule` function {#topythonmodule-function}
In some cases, such as bindings, a package is created using
`stdenv.mkDerivation` and added as attribute in `all-packages.nix`. The Python
@ -1052,7 +1057,7 @@ opencv = toPythonModule (pkgs.opencv.override {
Do pay attention to passing in the right Python version!
#### `python.buildEnv` function
#### `python.buildEnv` function {#python.buildenv-function}
Python environments can be created using the low-level `pkgs.buildEnv` function.
This example shows how to create an environment that has the Pyramid Web Framework.
@ -1090,8 +1095,8 @@ with import <nixpkgs> {};
will drop you into a shell where Python will have the
specified packages in its path.
##### `python.buildEnv` arguments {#python.buildenv-arguments}
##### `python.buildEnv` arguments
* `extraLibs`: List of packages installed inside the environment.
* `postBuild`: Shell command executed after the build of environment.
@ -1099,7 +1104,7 @@ specified packages in its path.
* `permitUserSite`: Skip setting the `PYTHONNOUSERSITE` environment variable in
wrapped binaries in the environment.
#### `python.withPackages` function
#### `python.withPackages` function {#python.withpackages-function}
The `python.withPackages` function provides a simpler interface to the `python.buildEnv` functionality.
It takes a function as an argument that is passed the set of python packages and returns the list
@ -1141,7 +1146,7 @@ need them, you have to use `python.buildEnv`.
Python 2 namespace packages may provide `__init__.py` that collide. In that case
`python.buildEnv` should be used with `ignoreCollisions = true`.
#### Setup hooks
#### Setup hooks {#setup-hooks}
The following are setup hooks specifically for Python packages. Most of these
are used in `buildPythonPackage`.
@ -1166,7 +1171,7 @@ are used in `buildPythonPackage`.
- `wheelUnpackHook` to move a wheel to the correct folder so it can be installed
with the `pipInstallHook`.
### Development mode
### Development mode {#development-mode}
Development or editable mode is supported. To develop Python packages
`buildPythonPackage` has additional logic inside `shellPhase` to run `pip
@ -1175,6 +1180,7 @@ install -e . --prefix $TMPDIR/`for the package.
Warning: `shellPhase` is executed only if `setup.py` exists.
Given a `default.nix`:
```nix
with import <nixpkgs> {};
@ -1197,7 +1203,7 @@ nix-shell -p pythonPackages.pyramid zlib libjpeg git
Note: There is a boolean value `lib.inNixShell` set to `true` if nix-shell is invoked.
### Tools
### Tools {#tools}
Packages inside nixpkgs are written by hand. However many tools exist in
community to help save time. No tool is preferred at the moment.
@ -1209,7 +1215,7 @@ community to help save time. No tool is preferred at the moment.
- [nixpkgs-pytools](https://github.com/nix-community/nixpkgs-pytools)
- [poetry2nix](https://github.com/nix-community/poetry2nix)
### Deterministic builds
### Deterministic builds {#deterministic-builds}
The Python interpreters are now built deterministically. Minor modifications had
to be made to the interpreters in order to generate deterministic bytecode. This
@ -1221,7 +1227,7 @@ have timestamp 1. The `buildPythonPackage` function sets `DETERMINISTIC_BUILD=1`
and [PYTHONHASHSEED=0](https://docs.python.org/3.8/using/cmdline.html#envvar-PYTHONHASHSEED).
Both are also exported in `nix-shell`.
### Automatic tests
### Automatic tests {#automatic-tests}
It is recommended to test packages as part of the build process.
Source distributions (`sdist`) often include test files, but not always.
@ -1230,7 +1236,7 @@ By default the command `python setup.py test` is run as part of the
`checkPhase`, but often it is necessary to pass a custom `checkPhase`. An
example of such a situation is when `py.test` is used.
#### Common issues
#### Common issues {#common-issues}
* Non-working tests can often be deselected. By default `buildPythonPackage`
runs `python setup.py test`. Most Python modules follows the standard test
@ -1247,18 +1253,19 @@ example of such a situation is when `py.test` is used.
'';
}
```
* Tests that attempt to access `$HOME` can be fixed by using the following
work-around before running tests (e.g. `preCheck`): `export HOME=$(mktemp -d)`
## FAQ
## FAQ {#faq}
### How to solve circular dependencies?
### How to solve circular dependencies? {#how-to-solve-circular-dependencies}
Consider the packages `A` and `B` that depend on each other. When packaging `B`,
a solution is to override package `A` not to depend on `B` as an input. The same
should also be done when packaging `A`.
### How to override a Python package?
### How to override a Python package? {#how-to-override-a-python-package}
We can override the interpreter and pass `packageOverrides`. In the following
example we rename the `pandas` package and build it.
@ -1316,7 +1323,7 @@ let
in newpkgs.inkscape
```
### `python setup.py bdist_wheel` cannot create .whl
### `python setup.py bdist_wheel` cannot create .whl {#python-setup.py-bdist_wheel-cannot-create-.whl}
Executing `python setup.py bdist_wheel` in a `nix-shell `fails with
```
@ -1349,7 +1356,7 @@ or unset `SOURCE_DATE_EPOCH`:
nix-shell --run "unset SOURCE_DATE_EPOCH; python3 setup.py bdist_wheel"
```
### `install_data` / `data_files` problems
### `install_data` / `data_files` problems {#install_data-data_files-problems}
If you get the following error:
@ -1369,7 +1376,7 @@ ${python.interpreter} setup.py install_data --install-dir=$out --root=$out
sed -i '/ = data\_files/d' setup.py
```
### Rationale of non-existent global site-packages
### Rationale of non-existent global site-packages {#rationale-of-non-existent-global-site-packages}
On most operating systems a global `site-packages` is maintained. This however
becomes problematic if you want to run multiple Python versions or have multiple
@ -1384,7 +1391,7 @@ If you want to create a Python environment for development, then the recommended
method is to use `nix-shell`, either with or without the `python.buildEnv`
function.
### How to consume Python modules using pip in a virtual environment like I am used to on other Operating Systems?
### How to consume Python modules using pip in a virtual environment like I am used to on other Operating Systems? {#how-to-consume-python-modules-using-pip-in-a-virtual-environment-like-i-am-used-to-on-other-operating-systems}
While this approach is not very idiomatic from Nix perspective, it can still be
useful when dealing with pre-existing projects or in situations where it's not
@ -1497,7 +1504,7 @@ is executed it will attempt to download the Python modules listed in
requirements.txt. However these will be cached locally within the `virtualenv`
folder and not downloaded again.
### How to override a Python package from `configuration.nix`?
### How to override a Python package from `configuration.nix`? {#how-to-override-a-python-package-from-configuration.nix}
If you need to change a package's attribute(s) from `configuration.nix` you could do:
@ -1535,7 +1542,7 @@ this snippet:
}
```
### How to override a Python package using overlays?
### How to override a Python package using overlays? {#how-to-override-a-python-package-using-overlays}
Use the following overlay template:
@ -1556,12 +1563,12 @@ self: super: {
}
```
### How to use Intel's MKL with numpy and scipy?
### How to use Intels MKL with numpy and scipy? {#how-to-use-intels-mkl-with-numpy-and-scipy}
MKL can be configured using an overlay. See the section "[Using overlays to
configure alternatives](#sec-overlays-alternatives-blas-lapack)".
### What inputs do `setup_requires`, `install_requires` and `tests_require` map to?
### What inputs do `setup_requires`, `install_requires` and `tests_require` map to? {#what-inputs-do-setup_requires-install_requires-and-tests_require-map-to}
In a `setup.py` or `setup.cfg` it is common to declare dependencies:
@ -1569,9 +1576,9 @@ In a `setup.py` or `setup.cfg` it is common to declare dependencies:
* `install_requires` corresponds to `propagatedBuildInputs`
* `tests_require` corresponds to `checkInputs`
## Contributing
## Contributing {#contributing}
### Contributing guidelines
### Contributing guidelines {#contributing-guidelines}
The following rules are desired to be respected:

View file

@ -90,19 +90,21 @@ stdenv.mkDerivation {
}
```
::: note
::: {.note}
`wrapQtAppsHook` ignores files that are non-ELF executables.
This means that scripts won't be automatically wrapped so you'll need to manually wrap them as previously mentioned.
An example of when you'd always need to do this is with Python applications that use PyQt.
:::
## Adding a library to Nixpkgs
## Adding a library to Nixpkgs {#adding-a-library-to-nixpkgs}
Add Qt libraries to `qt5-packages.nix` to make them available for every
supported Qt version.
### Example adding a Qt library {#qt-library-all-packages-nix}
The following represents the contents of `qt5-packages.nix`.
```nix
{
# ...
@ -126,13 +128,15 @@ stdenv.mkDerivation {
}
```
## Adding an application to Nixpkgs
## Adding an application to Nixpkgs {#adding-an-application-to-nixpkgs}
Add Qt applications to `qt5-packages.nix`. Add an alias to `all-packages.nix`
to select the Qt 5 version used for the application.
### Example adding a Qt application {#qt-application-all-packages-nix}
The following represents the contents of `qt5-packages.nix`.
```nix
{
# ...
@ -144,6 +148,7 @@ The following represents the contents of `qt5-packages.nix`.
```
The following represents the contents of `all-packages.nix`.
```nix
{
# ...

View file

@ -1,6 +1,6 @@
# R {#r}
## Installation
## Installation {#installation}
Define an environment for R that contains all the libraries that you'd like to
use by adding the following snippet to your $HOME/.config/nixpkgs/config.nix file:
@ -31,6 +31,7 @@ output is the name that has to be passed to rWrapper in the code snipped above.
However, if you'd like to add a file to your project source to make the
environment available for other contributors, you can create a `default.nix`
file like so:
```nix
with import <nixpkgs> {};
{
@ -50,7 +51,7 @@ with import <nixpkgs> {};
and then run `nix-shell .` to be dropped into a shell with those packages
available.
## RStudio
## RStudio {#rstudio}
RStudio uses a standard set of packages and ignores any custom R
environments or installed packages you may have. To create a custom
@ -93,7 +94,7 @@ Executing `nix-shell` will then drop you into an environment equivalent to the
one above. If you need additional packages just add them to the list and
re-enter the shell.
## Updating the package set
## Updating the package set {#updating-the-package-set}
```bash
nix-shell generate-shell.nix
@ -113,8 +114,7 @@ mv bioc-experiment-packages.nix.new bioc-experiment-packages.nix
`generate-r-packages.R <repo>` reads `<repo>-packages.nix`, therefor the renaming.
## Testing if the Nix-expression could be evaluated
## Testing if the Nix-expression could be evaluated {#testing-if-the-nix-expression-could-be-evaluated}
```bash
nix-build test-evaluation.nix --dry-run

View file

@ -1,6 +1,6 @@
# Ruby {#sec-language-ruby}
## Using Ruby
## Using Ruby {#using-ruby}
Several versions of Ruby interpreters are available on Nix, as well as over 250 gems and many applications written in Ruby. The attribute `ruby` refers to the default Ruby interpreter, which is currently MRI 2.6. It's also possible to refer to specific versions, e.g. `ruby_2_y`, `jruby`, or `mruby`.
@ -12,7 +12,7 @@ The interpreters have common attributes, namely `gems`, and `withPackages`. So y
Since not all gems have executables like `nokogiri`, it's usually more convenient to use the `withPackages` function like this: `ruby.withPackages (p: with p; [ nokogiri ])`. This will also make sure that the Ruby in your environment will be able to find the gem and it can be used in your Ruby code (for example via `ruby` or `irb` executables) via `require "nokogiri"` as usual.
### Temporary Ruby environment with `nix-shell`
### Temporary Ruby environment with `nix-shell` {#temporary-ruby-environment-with-nix-shell}
Rather than having a single Ruby environment shared by all Ruby development projects on a system, Nix allows you to create separate environments per project. `nix-shell` gives you the possibility to temporarily load another environment akin to a combined `chruby` or `rvm` and `bundle exec`.
@ -30,7 +30,7 @@ $ nix-shell -p ruby.gems.nokogiri ruby.gems.pry
Again, it's possible to launch the interpreter from the shell. The Ruby interpreter has the attribute `gems` which contains all Ruby gems for that specific interpreter.
#### Load Ruby environment from `.nix` expression
#### Load Ruby environment from `.nix` expression {#load-ruby-environment-from-.nix-expression}
As explained in the Nix manual, `nix-shell` can also load an expression from a `.nix` file. Say we want to have Ruby 2.6, `nokogori`, and `pry`. Consider a `shell.nix` file with:
@ -45,7 +45,7 @@ What's happening here?
2. Then we create a Ruby environment with the `withPackages` function.
3. The `withPackages` function expects us to provide a function as an argument that takes the set of all ruby gems and returns a list of packages to include in the environment. Here, we select the packages `nokogiri` and `pry` from the package set.
#### Execute command with `--run`
#### Execute command with `--run` {#execute-command-with---run}
A convenient flag for `nix-shell` is `--run`. It executes a command in the `nix-shell`. We can e.g. directly open a `pry` REPL:
@ -65,7 +65,7 @@ Or run a script using this environment:
$ nix-shell -p "ruby.withPackages (ps: with ps; [ nokogiri pry ])" --run "ruby example.rb"
```
#### Using `nix-shell` as shebang
#### Using `nix-shell` as shebang {#using-nix-shell-as-shebang}
In fact, for the last case, there is a more convenient method. You can add a [shebang](<https://en.wikipedia.org/wiki/Shebang_(Unix)>) to your script specifying which dependencies `nix-shell` needs. With the following shebang, you can just execute `./example.rb`, and it will run with all dependencies.
@ -80,9 +80,9 @@ body = RestClient.get('http://example.com').body
puts Nokogiri::HTML(body).at('h1').text
```
## Developing with Ruby
## Developing with Ruby {#developing-with-ruby}
### Using an existing Gemfile
### Using an existing Gemfile {#using-an-existing-gemfile}
In most cases, you'll already have a `Gemfile.lock` listing all your dependencies. This can be used to generate a `gemset.nix` which is used to fetch the gems and combine them into a single environment. The reason why you need to have a separate file for this, is that Nix requires you to have a checksum for each input to your build. Since the `Gemfile.lock` that `bundler` generates doesn't provide us with checksums, we have to first download each gem, calculate its SHA256, and store it in this separate file.
@ -120,7 +120,7 @@ One common issue that you might have is that you have Ruby 2.6, but also `bundle
mkShell { buildInputs = [ gems (lowPrio gems.wrappedRuby) ]; }
```
### Gem-specific configurations and workarounds
### Gem-specific configurations and workarounds {#gem-specific-configurations-and-workarounds}
In some cases, especially if the gem has native extensions, you might need to modify the way the gem is built.
@ -201,7 +201,7 @@ $ nix-shell --run 'ruby -rpg -e "puts PG.library_version"'
Of course for this use-case one could also use overlays since the configuration for `pg` depends on the `postgresql` alias, but for demonstration purposes this has to suffice.
### Adding a gem to the default gemset
### Adding a gem to the default gemset {#adding-a-gem-to-the-default-gemset}
Now that you know how to get a working Ruby environment with Nix, it's time to go forward and start actually developing with Ruby. We will first have a look at how Ruby gems are packaged on Nix. Then, we will look at how you can use development mode with your code.
@ -215,7 +215,7 @@ To test that it works, you can then try using the gem with:
NIX_PATH=nixpkgs=$PWD nix-shell -p "ruby.withPackages (ps: with ps; [ name-of-your-gem ])"
```
### Packaging applications
### Packaging applications {#packaging-applications}
A common task is to add a ruby executable to nixpkgs, popular examples would be `chef`, `jekyll`, or `sass`. A good way to do that is to use the `bundlerApp` function, that allows you to make a package that only exposes the listed executables, otherwise the package may cause conflicts through common paths like `bin/rake` or `bin/bundler` that aren't meant to be used.
@ -243,7 +243,7 @@ bundlerApp {
All that's left to do is to generate the corresponding `Gemfile.lock` and `gemset.nix` as described above in the `Using an existing Gemfile` section.
#### Packaging executables that require wrapping
#### Packaging executables that require wrapping {#packaging-executables-that-require-wrapping}
Sometimes your app will depend on other executables at runtime, and tries to find it through the `PATH` environment variable.

View file

@ -15,7 +15,7 @@ For other versions such as daily builds (beta and nightly),
use either `rustup` from nixpkgs (which will manage the rust installation in your home directory),
or use Mozilla's [Rust nightlies overlay](#using-the-rust-nightlies-overlay).
## Compiling Rust applications with Cargo
## Compiling Rust applications with Cargo {#compiling-rust-applications-with-cargo}
Rust applications are packaged by using the `buildRustPackage` helper from `rustPlatform`:
@ -107,7 +107,7 @@ rustPlatform.buildRustPackage rec {
}
```
### Importing a `Cargo.lock` file
### Importing a `Cargo.lock` file {#importing-a-cargo.lock-file}
Using `cargoSha256` or `cargoHash` is tedious when using
`buildRustPackage` within a project, since it requires that the hash
@ -156,7 +156,7 @@ added. To find the correct hash, you can first use `lib.fakeSha256` or
`lib.fakeHash` as a stub hash. Building the package (and thus the
vendored dependencies) will then inform you of the correct hash.
### Cross compilation
### Cross compilation {#cross-compilation}
By default, Rust packages are compiled for the host platform, just like any
other package is. The `--target` passed to rust tools is computed from this.
@ -168,6 +168,7 @@ where they are known to differ. But there are ways to customize the argument:
name will be used instead.
For example:
```nix
import <nixpkgs> {
crossSystem = (import <nixpkgs/lib>).systems.examples.armhf-embedded // {
@ -175,7 +176,9 @@ where they are known to differ. But there are ways to customize the argument:
};
}
```
will result in:
```shell
--target thumbv7em-none-eabi
```
@ -188,6 +191,7 @@ where they are known to differ. But there are ways to customize the argument:
will be used instead.
For example:
```nix
import <nixpkgs> {
crossSystem = (import <nixpkgs/lib>).systems.examples.armhf-embedded // {
@ -196,7 +200,9 @@ where they are known to differ. But there are ways to customize the argument:
};
}
```
will result in:
```shell
--target /nix/store/asdfasdfsadf-thumb-crazy.json # contains {"foo":"","bar":""}
```
@ -220,7 +226,7 @@ ad-hoc escape hatch to `buildRustPackage` can be removed.
Note that currently custom targets aren't compiled with `std`, so `cargo test`
will fail. This can be ignored by adding `doCheck = false;` to your derivation.
### Running package tests
### Running package tests {#running-package-tests}
When using `buildRustPackage`, the `checkPhase` is enabled by default and runs
`cargo test` on the package to build. To make sure that we don't compile the
@ -248,7 +254,7 @@ Another attribute, called `checkFlags`, is used to pass arguments to the test
binary itself, as stated
(here)[https://doc.rust-lang.org/cargo/commands/cargo-test.html].
#### Tests relying on the structure of the `target/` directory
#### Tests relying on the structure of the `target/` directory {#tests-relying-on-the-structure-of-the-target-directory}
Some tests may rely on the structure of the `target/` directory. Those tests
are likely to fail because we use `cargo --target` during the build. This means that
@ -258,7 +264,7 @@ rather than in `target/release/`.
This can only be worked around by patching the affected tests accordingly.
#### Disabling package-tests
#### Disabling package-tests {#disabling-package-tests}
In some instances, it may be necessary to disable testing altogether (with `doCheck = false;`):
@ -272,7 +278,7 @@ The above are just guidelines, and exceptions may be granted on a case-by-case b
However, please check if it's possible to disable a problematic subset of the
test suite and leave a comment explaining your reasoning.
#### Setting `test-threads`
#### Setting `test-threads` {#setting-test-threads}
`buildRustPackage` will use parallel test threads by default,
sometimes it may be necessary to disable this so the tests run consecutively.
@ -284,7 +290,7 @@ rustPlatform.buildRustPackage {
}
```
### Building a package in `debug` mode
### Building a package in `debug` mode {#building-a-package-in-debug-mode}
By default, `buildRustPackage` will use `release` mode for builds. If a package
should be built in `debug` mode, it can be configured like so:
@ -298,14 +304,14 @@ rustPlatform.buildRustPackage {
In this scenario, the `checkPhase` will be ran in `debug` mode as well.
### Custom `build`/`install`-procedures
### Custom `build`/`install`-procedures {#custom-buildinstall-procedures}
Some packages may use custom scripts for building/installing, e.g. with a `Makefile`.
In these cases, it's recommended to override the `buildPhase`/`installPhase`/`checkPhase`.
Otherwise, some steps may fail because of the modified directory structure of `target/`.
### Building a crate with an absent or out-of-date Cargo.lock file
### Building a crate with an absent or out-of-date Cargo.lock file {#building-a-crate-with-an-absent-or-out-of-date-cargo.lock-file}
`buildRustPackage` needs a `Cargo.lock` file to get all dependencies in the
source code in a reproducible way. If it is missing or out-of-date one can use
@ -321,13 +327,13 @@ rustPlatform.buildRustPackage rec {
}
```
## Compiling non-Rust packages that include Rust code
## Compiling non-Rust packages that include Rust code {#compiling-non-rust-packages-that-include-rust-code}
Several non-Rust packages incorporate Rust code for performance- or
security-sensitive parts. `rustPlatform` exposes several functions and
hooks that can be used to integrate Cargo in non-Rust packages.
### Vendoring of dependencies
### Vendoring of dependencies {#vendoring-of-dependencies}
Since network access is not allowed in sandboxed builds, Rust crate
dependencies need to be retrieved using a fetcher. `rustPlatform`
@ -387,7 +393,7 @@ added. To find the correct hash, you can first use `lib.fakeSha256` or
`lib.fakeHash` as a stub hash. Building `cargoDeps` will then inform
you of the correct hash.
### Hooks
### Hooks {#hooks}
`rustPlatform` provides the following hooks to automate Cargo builds:
@ -416,9 +422,9 @@ you of the correct hash.
* `cargoInstallHook`: install binaries and static/shared libraries
that were built using `cargoBuildHook`.
### Examples
### Examples {#examples}
#### Python package using `setuptools-rust`
#### Python package using `setuptools-rust` {#python-package-using-setuptools-rust}
For Python packages using `setuptools-rust`, you can use
`fetchCargoTarball` and `cargoSetupHook` to retrieve and set up Cargo
@ -504,7 +510,7 @@ buildPythonPackage rec {
}
```
#### Python package using `maturin`
#### Python package using `maturin` {#python-package-using-maturin}
Python packages that use [Maturin](https://github.com/PyO3/maturin)
can be built with `fetchCargoTarball`, `cargoSetupHook`, and
@ -545,9 +551,9 @@ buildPythonPackage rec {
}
```
## Compiling Rust crates using Nix instead of Cargo
## Compiling Rust crates using Nix instead of Cargo {#compiling-rust-crates-using-nix-instead-of-cargo}
### Simple operation
### Simple operation {#simple-operation}
When run, `cargo build` produces a file called `Cargo.lock`,
containing pinned versions of all dependencies. Nixpkgs contains a
@ -558,14 +564,15 @@ That Nix expression calls `rustc` directly (hence bypassing Cargo),
and can be used to compile a crate and all its dependencies. Here is
an example for a minimal `hello` crate:
$ cargo new hello
$ cd hello
$ cargo build
```ShellSession
$ cargo new hello
$ cd hello
$ cargo build
Compiling hello v0.1.0 (file:///tmp/hello)
Finished dev [unoptimized + debuginfo] target(s) in 0.20 secs
$ carnix -o hello.nix --src ./. Cargo.lock --standalone
$ nix-build hello.nix -A hello_0_1_0
Finished dev [unoptimized + debuginfo] target(s) in 0.20 secs
$ carnix -o hello.nix --src ./. Cargo.lock --standalone
$ nix-build hello.nix -A hello_0_1_0
```
Now, the file produced by the call to `carnix`, called `hello.nix`, looks like:
@ -644,7 +651,7 @@ Here, the `libc` crate has no `src` attribute, so `buildRustCrate`
will fetch it from [crates.io](https://crates.io). A `sha256`
attribute is still needed for Nix purity.
### Handling external dependencies
### Handling external dependencies {#handling-external-dependencies}
Some crates require external libraries. For crates from
[crates.io](https://crates.io), such libraries can be specified in
@ -703,7 +710,7 @@ with import <nixpkgs> {};
}
```
### Options and phases configuration
### Options and phases configuration {#options-and-phases-configuration}
Actually, the overrides introduced in the previous section are more
general. A number of other parameters can be overridden:
@ -750,7 +757,7 @@ general. A number of other parameters can be overridden:
};
```
### Features
### Features {#features}
One can also supply features switches. For example, if we want to
compile `diesel_cli` only with the `postgres` feature, and no default
@ -765,14 +772,15 @@ features, we would write:
Where `diesel.nix` is the file generated by Carnix, as explained above.
## Setting Up `nix-shell` {#setting-up-nix-shell}
## Setting Up `nix-shell`
Oftentimes you want to develop code from within `nix-shell`. Unfortunately
`buildRustCrate` does not support common `nix-shell` operations directly
(see [this issue](https://github.com/NixOS/nixpkgs/issues/37945))
so we will use `stdenv.mkDerivation` instead.
Using the example `hello` project above, we want to do the following:
- Have access to `cargo` and `rustc`
- Have the `openssl` library available to a crate through it's _normal_
compilation mechanism (`pkg-config`).
@ -801,13 +809,15 @@ stdenv.mkDerivation {
```
You should now be able to run the following:
```ShellSesssion
```ShellSession
$ nix-shell --pure
$ cargo build
$ cargo test
```
### Controlling Rust Version Inside `nix-shell`
### Controlling Rust Version Inside `nix-shell` {#controlling-rust-version-inside-nix-shell}
To control your rust version (i.e. use nightly) from within `shell.nix` (or
other nix expressions) you can use the following `shell.nix`
@ -839,6 +849,7 @@ stdenv.mkDerivation {
```
Now run:
```ShellSession
$ rustc --version
rustc 1.26.0-nightly (188e693b3 2018-03-26)
@ -846,31 +857,32 @@ rustc 1.26.0-nightly (188e693b3 2018-03-26)
To see that you are using nightly.
## Using the Rust nightlies overlay
## Using the Rust nightlies overlay {#using-the-rust-nightlies-overlay}
Mozilla provides an overlay for nixpkgs to bring a nightly version of Rust into scope.
This overlay can _also_ be used to install recent unstable or stable versions
of Rust, if desired.
### Rust overlay installation
### Rust overlay installation {#rust-overlay-installation}
You can use this overlay by either changing your local nixpkgs configuration,
or by adding the overlay declaratively in a nix expression, e.g. in `configuration.nix`.
For more information see [#sec-overlays-install](the manual on installing overlays).
For more information see [the manual on installing overlays](#sec-overlays-install).
#### Imperative rust overlay installation
#### Imperative rust overlay installation {#imperative-rust-overlay-installation}
Clone [nixpkgs-mozilla](https://github.com/mozilla/nixpkgs-mozilla),
and create a symbolic link to the file
[rust-overlay.nix](https://github.com/mozilla/nixpkgs-mozilla/blob/master/rust-overlay.nix)
in the `~/.config/nixpkgs/overlays` directory.
$ git clone https://github.com/mozilla/nixpkgs-mozilla.git
$ mkdir -p ~/.config/nixpkgs/overlays
$ ln -s $(pwd)/nixpkgs-mozilla/rust-overlay.nix ~/.config/nixpkgs/overlays/rust-overlay.nix
```ShellSession
$ git clone https://github.com/mozilla/nixpkgs-mozilla.git
$ mkdir -p ~/.config/nixpkgs/overlays
$ ln -s $(pwd)/nixpkgs-mozilla/rust-overlay.nix ~/.config/nixpkgs/overlays/rust-overlay.nix
```
### Declarative rust overlay installation
### Declarative rust overlay installation {#declarative-rust-overlay-installation}
Add the following to your `configuration.nix`, `home-configuration.nix`, `shell.nix`, or similar:
@ -886,7 +898,7 @@ Add the following to your `configuration.nix`, `home-configuration.nix`, `shell.
Note that this will fetch the latest overlay version when rebuilding your system.
### Rust overlay usage
### Rust overlay usage {#rust-overlay-usage}
The overlay contains attribute sets corresponding to different versions of the rust toolchain, such as:
@ -900,11 +912,15 @@ For example, you might want to add `latest.rustChannels.stable.rust` to the list
Imperatively, the latest stable version can be installed with the following command:
$ nix-env -Ai nixpkgs.latest.rustChannels.stable.rust
```ShellSession
$ nix-env -Ai nixpkgs.latest.rustChannels.stable.rust
```
Or using the attribute with nix-shell:
$ nix-shell -p nixpkgs.latest.rustChannels.stable.rust
```ShellSession
$ nix-shell -p nixpkgs.latest.rustChannels.stable.rust
```
Substitute the `nixpkgs` prefix with `nixos` on NixOS.
To install the beta or nightly channel, "stable" should be substituted by

View file

@ -5,6 +5,7 @@ Since release 15.09 there is a new TeX Live packaging that lives entirely under
## User's guide {#sec-language-texlive-user-guide}
- For basic usage just pull `texlive.combined.scheme-basic` for an environment with basic LaTeX support.
- It typically won't work to use separately installed packages together. Instead, you can build a custom set of packages like this:
```nix
@ -14,6 +15,7 @@ Since release 15.09 there is a new TeX Live packaging that lives entirely under
```
- There are all the schemes, collections and a few thousand packages, as defined upstream (perhaps with tiny differences).
- By default you only get executables and files needed during runtime, and a little documentation for the core packages. To change that, you need to add `pkgFilter` function to `combine`.
```nix

View file

@ -9,8 +9,8 @@ applications for Android and iOS devices from source code.
Not all Titanium features supported -- currently, it can only be used to build
Android and iOS apps.
Building a Titanium app
-----------------------
## Building a Titanium app {#building-a-titanium-app}
We can build a Titanium app from source for Android or iOS and for debugging or
release purposes by invoking the `titaniumenv.buildApp {}` function:
@ -103,8 +103,8 @@ When `enableWirelessDistribution` has been enabled, you must also provide the
path of the PHP script (`installURL`) (that is included with the iOS build
environment) to enable wireless ad-hoc installations.
Emulating or simulating the app
-------------------------------
## Emulating or simulating the app {#emulating-or-simulating-the-app}
It is also possible to simulate the correspond iOS simulator build by using
`xcodeenv.simulateApp {}` and emulate an Android APK by using
`androidenv.emulateApp {}`.

View file

@ -12,7 +12,7 @@ At the moment we support three different methods for managing plugins:
- Pathogen
- vim-plug
## Custom configuration
## Custom configuration {#custom-configuration}
Adding custom .vimrc lines can be done using the following code:
@ -56,7 +56,7 @@ neovim-qt.override {
}
```
## Managing plugins with Vim packages
## Managing plugins with Vim packages {#managing-plugins-with-vim-packages}
To store you plugins in Vim packages (the native Vim plugin manager, see `:help packages`) the following example can be used:
@ -116,7 +116,7 @@ The resulting package can be added to `packageOverrides` in `~/.nixpkgs/config.n
After that you can install your special grafted `myVim` or `myNeovim` packages.
### What if your favourite Vim plugin isn't already packaged?
### What if your favourite Vim plugin isnt already packaged? {#what-if-your-favourite-vim-plugin-isnt-already-packaged}
If one of your favourite plugins isn't packaged, you can package it yourself:
@ -154,7 +154,7 @@ in
}
```
## Managing plugins with vim-plug
## Managing plugins with vim-plug {#managing-plugins-with-vim-plug}
To use [vim-plug](https://github.com/junegunn/vim-plug) to manage your Vim
plugins the following example can be used:
@ -183,14 +183,14 @@ neovim.override {
}
```
## Managing plugins with VAM
## Managing plugins with VAM {#managing-plugins-with-vam}
### Handling dependencies of Vim plugins
### Handling dependencies of Vim plugins {#handling-dependencies-of-vim-plugins}
VAM introduced .json files supporting dependencies without versioning
assuming that "using latest version" is ok most of the time.
### Example
### Example {#example}
First create a vim-scripts file having one plugin name per line. Example:
@ -280,7 +280,7 @@ Sample output2:
]
```
## Adding new plugins to nixpkgs
## Adding new plugins to nixpkgs {#adding-new-plugins-to-nixpkgs}
Nix expressions for Vim plugins are stored in [pkgs/misc/vim-plugins](/pkgs/misc/vim-plugins). For the vast majority of plugins, Nix expressions are automatically generated by running [`./update.py`](/pkgs/misc/vim-plugins/update.py). This creates a [generated.nix](/pkgs/misc/vim-plugins/generated.nix) file based on the plugins listed in [vim-plugin-names](/pkgs/misc/vim-plugins/vim-plugin-names). Plugins are listed in alphabetical order in `vim-plugin-names` using the format `[github username]/[repository]`. For example https://github.com/scrooloose/nerdtree becomes `scrooloose/nerdtree`.
@ -298,7 +298,7 @@ To add a new plugin, run `./update.py --add "[owner]/[name]"`. **NOTE**: This sc
Finally, there are some plugins that are also packaged in nodePackages because they have Javascript-related build steps, such as running webpack. Those plugins are not listed in `vim-plugin-names` or managed by `update.py` at all, and are included separately in `overrides.nix`. Currently, all these plugins are related to the `coc.nvim` ecosystem of Language Server Protocol integration with vim/neovim.
## Updating plugins in nixpkgs
## Updating plugins in nixpkgs {#updating-plugins-in-nixpkgs}
Run the update script with a GitHub API token that has at least `public_repo` access. Running the script without the token is likely to result in rate-limiting (429 errors). For steps on creating an API token, please refer to [GitHub's token documentation](https://docs.github.com/en/free-pro-team@latest/github/authenticating-to-github/creating-a-personal-access-token).
@ -312,7 +312,7 @@ Alternatively, set the number of processes to a lower count to avoid rate-limiti
./pkgs/misc/vim-plugins/update.py --proc 1
```
## Important repositories
## Important repositories {#important-repositories}
- [vim-pi](https://bitbucket.org/vimcommunity/vim-pi) is a plugin repository
from VAM plugin manager meant to be used by others as well used by

View file

@ -12,7 +12,7 @@ Nixpkgs. If you like to learn more about the Nix package manager and the Nix
expression language, then you are kindly referred to the [Nix manual](https://nixos.org/nix/manual/).
The NixOS distribution is documented in the [NixOS manual](https://nixos.org/nixos/manual/).
## Overview of Nixpkgs
## Overview of Nixpkgs {#overview-of-nixpkgs}
Nix expressions describe how to build packages from source and are collected in
the [nixpkgs repository](https://github.com/NixOS/nixpkgs). Also included in the

View file

@ -6,7 +6,6 @@
This chapter will be organized in three parts. First, it will describe the basics of how to package software in a way that supports cross-compilation. Second, it will describe how to use Nixpkgs when cross-compiling. Third, it will describe the internal infrastructure supporting cross-compilation.
## Packaging in a cross-friendly manner {#sec-cross-packaging}
### Platform parameters {#ssec-cross-platform-parameters}
@ -65,8 +64,8 @@ The exact schema these fields follow is a bit ill-defined due to a long and conv
### Theory of dependency categorization {#ssec-cross-dependency-categorization}
::: note
This is a rather philosophical description that isn't very Nixpkgs-specific. For an overview of all the relevant attributes given to `mkDerivation`, see <xref linkend="ssec-stdenv-dependencies"/>. For a description of how everything is implemented, see <xref linkend="ssec-cross-dependency-implementation"/>.
::: {.note}
This is a rather philosophical description that isn't very Nixpkgs-specific. For an overview of all the relevant attributes given to `mkDerivation`, see [](#ssec-stdenv-dependencies). For a description of how everything is implemented, see [](#ssec-cross-dependency-implementation).
:::
In this section we explore the relationship between both runtime and build-time dependencies and the 3 Autoconf platforms.
@ -81,10 +80,10 @@ Finally, if the depending package is a compiler or other machine-code-producing
Putting this all together, that means we have dependencies in the form "host → target", in at most the following six combinations:
#### Possible dependency types {#possible-dependency-types}
#### Possible dependency types
| Dependency's host platform | Dependency's target platform |
| -- | -- |
| Dependencys host platform | Dependencys target platform |
|----------------------------|------------------------------|
| build | build |
| build | host |
| build | target |
@ -113,15 +112,18 @@ On less powerful machines, it can be inconvenient to cross-compile a package onl
$ nix-build '<nixpkgs>' -A pkgsCross.raspberryPi.hello
```
#### What if my package's build system needs to build a C program to be run under the build environment? {#cross-qa-build-c-program-in-build-environment}
#### What if my packages build system needs to build a C program to be run under the build environment? {#cross-qa-build-c-program-in-build-environment}
Add the following to your `mkDerivation` invocation.
```nix
depsBuildBuild = [ buildPackages.stdenv.cc ];
```
#### My package's testsuite needs to run host platform code. {#cross-testsuite-runs-host-code}
#### My packages testsuite needs to run host platform code. {#cross-testsuite-runs-host-code}
Add the following to your `mkDerivation` invocation.
```nix
doCheck = stdenv.hostPlatform == stdenv.buildPlatform;
```
@ -134,7 +136,7 @@ Nixpkgs can be instantiated with `localSystem` alone, in which case there is no
$ nix-build '<nixpkgs>' --arg crossSystem '(import <nixpkgs/lib>).systems.examples.fooBarBaz' -A whatever
```
::: note
::: {.note}
Eventually we would like to make these platform examples an unnecessary convenience so that
```ShellSession
@ -146,7 +148,7 @@ works in the vast majority of cases. The problem today is dependencies on other
While one is free to pass both parameters in full, there's a lot of logic to fill in missing fields. As discussed in the previous section, only one of `system`, `config`, and `parsed` is needed to infer the other two. Additionally, `libc` will be inferred from `parse`. Finally, `localSystem.system` is also _impurely_ inferred based on the platform evaluation occurs. This means it is often not necessary to pass `localSystem` at all, as in the command-line example in the previous paragraph.
::: note
::: {.note}
Many sources (manual, wiki, etc) probably mention passing `system`, `platform`, along with the optional `crossSystem` to Nixpkgs: `import <nixpkgs> { system = ..; platform = ..; crossSystem = ..; }`. Passing those two instead of `localSystem` is still supported for compatibility, but is discouraged. Indeed, much of the inference we do for these parameters is motivated by compatibility as much as convenience.
:::
@ -156,7 +158,7 @@ One would think that `localSystem` and `crossSystem` overlap horribly with the t
### Implementation of dependencies {#ssec-cross-dependency-implementation}
The categories of dependencies developed in <xref linkend="ssec-cross-dependency-categorization"/> are specified as lists of derivations given to `mkDerivation`, as documented in <xref linkend="ssec-stdenv-dependencies"/>. In short, each list of dependencies for "host → target" of "foo → bar" is called `depsFooBar`, with exceptions for backwards compatibility that `depsBuildHost` is instead called `nativeBuildInputs` and `depsHostTarget` is instead called `buildInputs`. Nixpkgs is now structured so that each `depsFooBar` is automatically taken from `pkgsFooBar`. (These `pkgsFooBar`s are quite new, so there is no special case for `nativeBuildInputs` and `buildInputs`.) For example, `pkgsBuildHost.gcc` should be used at build-time, while `pkgsHostTarget.gcc` should be used at run-time.
The categories of dependencies developed in [](#ssec-cross-dependency-categorization) are specified as lists of derivations given to `mkDerivation`, as documented in [](#ssec-stdenv-dependencies). In short, each list of dependencies for "host → target" of "foo → bar" is called `depsFooBar`, with exceptions for backwards compatibility that `depsBuildHost` is instead called `nativeBuildInputs` and `depsHostTarget` is instead called `buildInputs`. Nixpkgs is now structured so that each `depsFooBar` is automatically taken from `pkgsFooBar`. (These `pkgsFooBar`s are quite new, so there is no special case for `nativeBuildInputs` and `buildInputs`.) For example, `pkgsBuildHost.gcc` should be used at build-time, while `pkgsHostTarget.gcc` should be used at run-time.
Now, for most of Nixpkgs's history, there were no `pkgsFooBar` attributes, and most packages have not been refactored to use it explicitly. Prior to those, there were just `buildPackages`, `pkgs`, and `targetPackages`. Those are now redefined as aliases to `pkgsBuildHost`, `pkgsHostTarget`, and `pkgsTargetTarget`. It is acceptable, even recommended, to use them for libraries to show that the host platform is irrelevant.
@ -178,7 +180,7 @@ While there are many package sets, and thus many edges, the stages can also be a
In each stage, `pkgsBuildHost` refers to the previous stage, `pkgsBuildBuild` refers to the one before that, and `pkgsHostTarget` refers to the current one, and `pkgsTargetTarget` refers to the next one. When there is no previous or next stage, they instead refer to the current stage. Note how all the invariants regarding the mapping between dependency and depending packages' build host and target platforms are preserved. `pkgsBuildTarget` and `pkgsHostHost` are more complex in that the stage fitting the requirements isn't always a fixed chain of "prevs" and "nexts" away (modulo the "saturating" self-references at the ends). We just special case each instead. All the primary edges are implemented is in `pkgs/stdenv/booter.nix`, and secondarily aliases in `pkgs/top-level/stage.nix`.
::: note
::: {.note}
The native stages are bootstrapped in legacy ways that predate the current cross implementation. This is why the bootstrapping stages leading up to the final stages are ignored in the previous paragraph.
:::
@ -186,6 +188,7 @@ If one looks at the 3 platform triples, one can see that they overlap such that
```
(native, native, native, foreign, foreign)
```
If one imagines the saturating self references at the end being replaced with infinite stages, and then overlays those platform triples, one ends up with the infinite tuple:
```
(native..., native, native, native, foreign, foreign, foreign...)
@ -193,8 +196,8 @@ If one imagines the saturating self references at the end being replaced with in
One can then imagine any sequence of platforms such that there are bootstrap stages with their 3 platforms determined by "sliding a window" that is the 3 tuple through the sequence. This was the original model for bootstrapping. Without a target platform (assume a better world where all compilers are multi-target and all standard libraries are built in their own derivation), this is sufficient. Conversely if one wishes to cross compile "faster", with a "Canadian Cross" bootstrapping stage where `build != host != target`, more bootstrapping stages are needed since no sliding window provides the pesky `pkgsBuildTarget` package set since it skips the Canadian cross stage's "host".
::: note
It is much better to refer to `buildPackages` than `targetPackages`, or more broadly package sets that do not mention "target". There are three reasons for this.
::: {.note}
It is much better to refer to `buildPackages` than `targetPackages`, or more broadly package sets that do not mention “target”. There are three reasons for this.
First, it is because bootstrapping stages do not have a unique `targetPackages`. For example a `(x86-linux, x86-linux, arm-linux)` and `(x86-linux, x86-linux, x86-windows)` package set both have a `(x86-linux, x86-linux, x86-linux)` package set. Because there is no canonical `targetPackages` for such a native (`build == host == target`) package set, we set their `targetPackages`
@ -203,6 +206,6 @@ Second, it is because this is a frequent source of hard-to-follow "infinite recu
Thirdly, it is because everything target-mentioning only exists to accommodate compilers with lousy build systems that insist on the compiler itself and standard library being built together. Of course that is bad because bigger derivations means longer rebuilds. It is also problematic because it tends to make the standard libraries less like other libraries than they could be, complicating code and build systems alike. Because of the other problems, and because of these innate disadvantages, compilers ought to be packaged another way where possible.
:::
::: note
If one explores Nixpkgs, they will see derivations with names like `gccCross`. Such `*Cross` derivations is a holdover from before we properly distinguished between the host and target platforms—the derivation with "Cross" in the name covered the `build = host != target` case, while the other covered the `host = target`, with build platform the same or not based on whether one was using its `.nativeDrv` or `.crossDrv`. This ugliness will disappear soon.
::: {.note}
If one explores Nixpkgs, they will see derivations with names like `gccCross`. Such `*Cross` derivations is a holdover from before we properly distinguished between the host and target platforms—the derivation with “Cross” in the name covered the `build = host != target` case, while the other covered the `host = target`, with build platform the same or not based on whether one was using its `.nativeDrv` or `.crossDrv`. This ugliness will disappear soon.
:::

View file

@ -130,7 +130,7 @@ Attribute Set `lib.platforms` defines [various common lists](https://github.com/
### `tests` {#var-meta-tests}
::: warning
::: {.warning}
This attribute is special in that it is not actually under the `meta` attribute set but rather under the `passthru` attribute set. This is due to how `meta` attributes work, and the fact that they are supposed to contain only metadata, not derivations.
:::
@ -175,20 +175,20 @@ The `meta.license` attribute should preferably contain a value from `lib.license
Although its typically better to indicate the specific license, a few generic options are available:
### `lib.licenses.free`, `"free"`
### `lib.licenses.free`, `"free"` {#lib.licenses.free-free}
Catch-all for free software licenses not listed above.
### `lib.licenses.unfreeRedistributable`, `"unfree-redistributable"`
### `lib.licenses.unfreeRedistributable`, `"unfree-redistributable"` {#lib.licenses.unfreeredistributable-unfree-redistributable}
Unfree package that can be redistributed in binary form. That is, its legal to redistribute the *output* of the derivation. This means that the package can be included in the Nixpkgs channel.
Sometimes proprietary software can only be redistributed unmodified. Make sure the builder doesnt actually modify the original binaries; otherwise were breaking the license. For instance, the NVIDIA X11 drivers can be redistributed unmodified, but our builder applies `patchelf` to make them work. Thus, its license is `"unfree"` and it cannot be included in the Nixpkgs channel.
### `lib.licenses.unfree`, `"unfree"`
### `lib.licenses.unfree`, `"unfree"` {#lib.licenses.unfree-unfree}
Unfree package that cannot be redistributed. You can build it yourself, but you cannot redistribute the output of the derivation. Thus it cannot be included in the Nixpkgs channel.
### `lib.licenses.unfreeRedistributableFirmware`, `"unfree-redistributable-firmware"`
### `lib.licenses.unfreeRedistributableFirmware`, `"unfree-redistributable-firmware"` {#lib.licenses.unfreeredistributablefirmware-unfree-redistributable-firmware}
This package supplies unfree, redistributable firmware. This is a separate value from `unfree-redistributable` because not everybody cares whether firmware is free.

View file

@ -6,7 +6,7 @@ The Nix language allows a derivation to produce multiple outputs, which is simil
The main motivation is to save disk space by reducing runtime closure sizes; consequently also sizes of substituted binaries get reduced. Splitting can be used to have more granular runtime dependencies, for example the typical reduction is to split away development-only files, as those are typically not needed during runtime. As a result, closure sizes of many packages can get reduced to a half or even much less.
::: note
::: {.note}
The reduction effects could be instead achieved by building the parts in completely separate derivations. That would often additionally reduce build-time closures, but it tends to be much harder to write such derivations, as build systems typically assume all parts are being built at once. This compromise approach of single source package producing multiple binary packages is also utilized often by rpm and deb.
:::
@ -28,7 +28,7 @@ NixOS provides two ways to select the outputs to install for packages listed in
`nix-env` lacks an easy way to select the outputs to install. When installing a package, `nix-env` always installs the outputs listed in `meta.outputsToInstall`, even when the user explicitly selects an output.
::: warning
::: {.warning}
`nix-env` silenty disregards the outputs selected by the user, and instead installs the outputs from `meta.outputsToInstall`. For example,
```ShellSession
@ -38,7 +38,7 @@ $ nix-env -iA nixpkgs.coreutils.info
installs the `"out"` output (`coreutils.meta.outputsToInstall` is `[ "out" ]`) instead of the requested `"info"`.
:::
The only recourse to select an output with `nix-env` is to override the packages `meta.outputsToInstall`, using the functions described in <xref linkend="chap-overrides" />. For example, the following overlay adds the `"info"` output for the `coreutils` package:
The only recourse to select an output with `nix-env` is to override the packages `meta.outputsToInstall`, using the functions described in [](#chap-overrides). For example, the following overlay adds the `"info"` output for the `coreutils` package:
```nix
self: super:
@ -53,7 +53,7 @@ self: super:
In the Nix language the individual outputs can be reached explicitly as attributes, e.g. `coreutils.info`, but the typical case is just using packages as build inputs.
When a multiple-output derivation gets into a build input of another derivation, the `dev` output is added if it exists, otherwise the first output is added. In addition to that, `propagatedBuildOutputs` of that package which by default contain `$outputBin` and `$outputLib` are also added. (See <xref linkend="multiple-output-file-type-groups" />.)
When a multiple-output derivation gets into a build input of another derivation, the `dev` output is added if it exists, otherwise the first output is added. In addition to that, `propagatedBuildOutputs` of that package which by default contain `$outputBin` and `$outputLib` are also added. (See [](#multiple-output-file-type-groups).)
In some cases it may be desirable to combine different outputs under a single store path. A function `symlinkJoin` can be used to do this. (Note that it may negate some closure size benefits of using a multiple-output package.)
@ -69,8 +69,8 @@ outputs = [ "bin" "dev" "out" "doc" ];
Often such a single line is enough. For each output an equally named environment variable is passed to the builder and contains the path in nix store for that output. Typically you also want to have the main `out` output, as it catches any files that didnt get elsewhere.
::: note
There is a special handling of the `debug` output, described at <xref linkend="stdenv-separateDebugInfo" />.
::: {.note}
There is a special handling of the `debug` output, described at [](#stdenv-separateDebugInfo).
:::
### “Binaries first” {#multiple-output-file-binaries-first-convention}
@ -85,35 +85,35 @@ The reason for why `glibc` deviates from the convention is because referencing a
The support code currently recognizes some particular kinds of outputs and either instructs the build system of the package to put files into their desired outputs or it moves the files during the fixup phase. Each group of file types has an `outputFoo` variable specifying the output name where they should go. If that variable isnt defined by the derivation writer, it is guessed a default output name is defined, falling back to other possibilities if the output isnt defined.
#### ` $outputDev`
#### `$outputDev` {#outputdev}
is for development-only files. These include C(++) headers (`include/`), pkg-config (`lib/pkgconfig/`), cmake (`lib/cmake/`) and aclocal files (`share/aclocal/`). They go to `dev` or `out` by default.
#### ` $outputBin`
#### `$outputBin` {#outputbin}
is meant for user-facing binaries, typically residing in `bin/`. They go to `bin` or `out` by default.
#### ` $outputLib`
#### `$outputLib` {#outputlib}
is meant for libraries, typically residing in `lib/` and `libexec/`. They go to `lib` or `out` by default.
#### ` $outputDoc`
#### `$outputDoc` {#outputdoc}
is for user documentation, typically residing in `share/doc/`. It goes to `doc` or `out` by default.
#### ` $outputDevdoc`
#### `$outputDevdoc` {#outputdevdoc}
is for _developer_ documentation. Currently we count gtk-doc and devhelp books, typically residing in `share/gtk-doc/` and `share/devhelp/`, in there. It goes to `devdoc` or is removed (!) by default. This is because e.g. gtk-doc tends to be rather large and completely unused by nixpkgs users.
#### ` $outputMan`
#### `$outputMan` {#outputman}
is for man pages (except for section 3), typically residing in `share/man/man[0-9]/`. They go to `man` or `$outputBin` by default.
#### ` $outputDevman`
#### `$outputDevman` {#outputdevman}
is for section 3 man pages, typically residing in `share/man/man[0-9]/`. They go to `devman` or `$outputMan` by default.
#### ` $outputInfo`
#### `$outputInfo` {#outputinfo}
is for info pages, typically residing in `share/info/`. They go to `info` or `$outputBin` by default.

View file

@ -39,9 +39,9 @@ stdenv.mkDerivation {
}
```
This attribute ensures that the `bin` subdirectories of these packages appear in the `PATH` environment variable during the build, that their `include` subdirectories are searched by the C compiler, and so on. (See <xref linkend="ssec-setup-hooks" /> for details.)
This attribute ensures that the `bin` subdirectories of these packages appear in the `PATH` environment variable during the build, that their `include` subdirectories are searched by the C compiler, and so on. (See [](#ssec-setup-hooks) for details.)
Often it is necessary to override or modify some aspect of the build. To make this easier, the standard environment breaks the package build into a number of *phases*, all of which can be overridden or modified individually: unpacking the sources, applying patches, configuring, building, and installing. (There are some others; see <xref linkend="sec-stdenv-phases" />.) For instance, a package that doesnt supply a makefile but instead has to be compiled "manually" could be handled like this:
Often it is necessary to override or modify some aspect of the build. To make this easier, the standard environment breaks the package build into a number of *phases*, all of which can be overridden or modified individually: unpacking the sources, applying patches, configuring, building, and installing. (There are some others; see [](#sec-stdenv-phases).) For instance, a package that doesnt supply a makefile but instead has to be compiled "manually" could be handled like this:
```nix
stdenv.mkDerivation {
@ -59,7 +59,7 @@ stdenv.mkDerivation {
(Note the use of `''`-style string literals, which are very convenient for large multi-line script fragments because they dont need escaping of `"` and `\`, and because indentation is intelligently removed.)
There are many other attributes to customise the build. These are listed in <xref linkend="ssec-stdenv-attributes" />.
There are many other attributes to customise the build. These are listed in [](#ssec-stdenv-attributes).
While the standard environment provides a generic builder, you can still supply your own build script:
@ -116,9 +116,9 @@ On Linux, `stdenv` also includes the `patchelf` utility.
## Specifying dependencies {#ssec-stdenv-dependencies}
As described in the Nix manual, almost any `*.drv` store path in a derivations attribute set will induce a dependency on that derivation. `mkDerivation`, however, takes a few attributes intended to, between them, include all the dependencies of a package. This is done both for structure and consistency, but also so that certain other setup can take place. For example, certain dependencies need their bin directories added to the `PATH`. That is built-in, but other setup is done via a pluggable mechanism that works in conjunction with these dependency attributes. See <xref linkend="ssec-setup-hooks" /> for details.
As described in the Nix manual, almost any `*.drv` store path in a derivations attribute set will induce a dependency on that derivation. `mkDerivation`, however, takes a few attributes intended to, between them, include all the dependencies of a package. This is done both for structure and consistency, but also so that certain other setup can take place. For example, certain dependencies need their bin directories added to the `PATH`. That is built-in, but other setup is done via a pluggable mechanism that works in conjunction with these dependency attributes. See [](#ssec-setup-hooks) for details.
Dependencies can be broken down along three axes: their host and target platforms relative to the new derivations, and whether they are propagated. The platform distinctions are motivated by cross compilation; see <xref linkend="chap-cross" /> for exactly what each platform means. [^footnote-stdenv-ignored-build-platform] But even if one is not cross compiling, the platforms imply whether or not the dependency is needed at run-time or build-time, a concept that makes perfect sense outside of cross compilation. By default, the run-time/build-time distinction is just a hint for mental clarity, but with `strictDeps` set it is mostly enforced even in the native case.
Dependencies can be broken down along three axes: their host and target platforms relative to the new derivations, and whether they are propagated. The platform distinctions are motivated by cross compilation; see [](#chap-cross) for exactly what each platform means. [^footnote-stdenv-ignored-build-platform] But even if one is not cross compiling, the platforms imply whether or not the dependency is needed at run-time or build-time, a concept that makes perfect sense outside of cross compilation. By default, the run-time/build-time distinction is just a hint for mental clarity, but with `strictDeps` set it is mostly enforced even in the native case.
The extension of `PATH` with dependencies, alluded to above, proceeds according to the relative platforms alone. The process is carried out only for dependencies whose host platform matches the new derivations build platform i.e. dependencies which run on the platform where the new derivation will be built. [^footnote-stdenv-native-dependencies-in-path] For each dependency \<dep\> of those dependencies, `dep/bin`, if present, is added to the `PATH` environment variable.
@ -175,7 +175,8 @@ Because of the bounds checks, the uncommon cases are `h = t` and `h + 2 = t`. In
Overall, the unifying theme here is that propagation shouldnt be introducing transitive dependencies involving platforms the depending package is unaware of. \[One can imagine the dependending package asking for dependencies with the platforms it knows about; other platforms it doesnt know how to ask for. The platform description in that scenario is a kind of unforagable capability.\] The offset bounds checking and definition of `mapOffset` together ensure that this is the case. Discovering a new offset is discovering a new platform, and since those platforms werent in the derivation “spec” of the needing package, they cannot be relevant. From a capability perspective, we can imagine that the host and target platforms of a package are the capabilities a package requires, and the depending package must provide the capability to the dependency.
### Variables specifying dependencies
### Variables specifying dependencies {#variables-specifying-dependencies}
#### `depsBuildBuild` {#var-stdenv-depsBuildBuild}
A list of dependencies whose host and target platforms are the new derivations build platform. This means a `-1` host and `-1` target offset from the new derivations platforms. These are programs and libraries used at build time that produce programs and libraries also used at build time. If the dependency doesnt care about the target platform (i.e. isnt a compiler or similar tool), put it in `nativeBuildInputs` instead. The most common use of this `buildPackages.stdenv.cc`, the default C compiler for this role. That example crops up more than one might think in old commonly used C libraries.
@ -236,13 +237,13 @@ The propagated equivalent of `depsTargetTarget`. This is prefixed for the same r
## Attributes {#ssec-stdenv-attributes}
### Variables affecting `stdenv` initialisation
### Variables affecting `stdenv` initialisation {#variables-affecting-stdenv-initialisation}
#### `NIX_DEBUG` {#var-stdenv-NIX_DEBUG}
A natural number indicating how much information to log. If set to 1 or higher, `stdenv` will print moderate debugging information during the build. In particular, the `gcc` and `ld` wrapper scripts will print out the complete command line passed to the wrapped tools. If set to 6 or higher, the `stdenv` setup script will be run with `set -x` tracing. If set to 7 or higher, the `gcc` and `ld` wrapper scripts will also be run with `set -x` tracing.
### Attributes affecting build properties
### Attributes affecting build properties {#attributes-affecting-build-properties}
#### `enableParallelBuilding` {#var-stdenv-enableParallelBuilding}
@ -250,7 +251,7 @@ If set to `true`, `stdenv` will pass specific flags to `make` and other build to
Unless set to `false`, some build systems with good support for parallel building including `cmake`, `meson`, and `qmake` will set it to `true`.
### Special variables
### Special variables {#special-variables}
#### `passthru` {#var-stdenv-passthru}
@ -298,7 +299,7 @@ passthru.updateScript = [ ../../update.sh pname "--requested-release=unstable" ]
The script will be run with `UPDATE_NIX_ATTR_PATH` environment variable set to the attribute path it is supposed to update.
::: note
::: {.note}
The script will be usually run from the root of the Nixpkgs repository but you should not rely on that. Also note that the update scripts will be run in parallel by default; you should avoid running `git commit` or any other commands that cannot handle that.
:::
@ -314,7 +315,7 @@ Each phase can be overridden in its entirety either by setting the environment v
There are a number of variables that control what phases are executed and in what order:
#### Variables affecting phase control
#### Variables affecting phase control {#variables-affecting-phase-control}
##### `phases` {#var-stdenv-phases}
@ -354,21 +355,22 @@ Additional phases executed after any of the default phases.
The unpack phase is responsible for unpacking the source code of the package. The default implementation of `unpackPhase` unpacks the source files listed in the `src` environment variable to the current directory. It supports the following files by default:
#### Tar files
#### Tar files {#tar-files}
These can optionally be compressed using `gzip` (`.tar.gz`, `.tgz` or `.tar.Z`), `bzip2` (`.tar.bz2`, `.tbz2` or `.tbz`) or `xz` (`.tar.xz`, `.tar.lzma` or `.txz`).
#### Zip files
#### Zip files {#zip-files}
Zip files are unpacked using `unzip`. However, `unzip` is not in the standard environment, so you should add it to `nativeBuildInputs` yourself.
#### Directories in the Nix store
#### Directories in the Nix store {#directories-in-the-nix-store}
These are simply copied to the current directory. The hash part of the file name is stripped, e.g. `/nix/store/1wydxgby13cz...-my-sources` would be copied to `my-sources`.
Additional file types can be supported by setting the `unpackCmd` variable (see below).
#### Variables controlling the unpack phase
#### Variables controlling the unpack phase {#variables-controlling-the-unpack-phase}
##### `srcs` / `src` {#var-stdenv-src}
The list of source files or directories to be unpacked or copied. One of these must be set.
@ -405,7 +407,7 @@ The unpack phase evaluates the string `$unpackCmd` for any unrecognised file. Th
The patch phase applies the list of patches defined in the `patches` variable.
#### Variables controlling the patch phase
#### Variables controlling the patch phase {#variables-controlling-the-patch-phase}
##### `dontPatch` {#var-stdenv-dontPatch}
@ -431,7 +433,7 @@ Hook executed at the end of the patch phase.
The configure phase prepares the source tree for building. The default `configurePhase` runs `./configure` (typically an Autoconf-generated script) if it exists.
#### Variables controlling the configure phase
#### Variables controlling the configure phase {#variables-controlling-the-configure-phase}
##### `configureScript` {#var-stdenv-configureScript}
@ -491,7 +493,7 @@ Hook executed at the end of the configure phase.
The build phase is responsible for actually building the package (e.g. compiling it). The default `buildPhase` simply calls `make` if a file named `Makefile`, `makefile` or `GNUmakefile` exists in the current directory (or the `makefile` is explicitly set); otherwise it does nothing.
#### Variables controlling the build phase
#### Variables controlling the build phase {#variables-controlling-the-build-phase}
##### `dontBuild` {#var-stdenv-dontBuild}
@ -509,7 +511,7 @@ A list of strings passed as additional flags to `make`. These flags are also use
makeFlags = [ "PREFIX=$(out)" ];
```
::: note
::: {.note}
The flags are quoted in bash, but environment variables can be specified by using the make syntax.
:::
@ -545,7 +547,7 @@ Before and after running `make`, the hooks `preBuild` and `postBuild` are called
The check phase checks whether the package was built correctly by running its test suite. The default `checkPhase` calls `make check`, but only if the `doCheck` variable is enabled.
#### Variables controlling the check phase
#### Variables controlling the check phase {#variables-controlling-the-check-phase}
##### `doCheck` {#var-stdenv-doCheck}
@ -557,7 +559,7 @@ doCheck = true;
in the derivation to enable checks. The exception is cross compilation. Cross compiled builds never run tests, no matter how `doCheck` is set, as the newly-built program wont run on the platform used to build it.
##### `makeFlags` / `makeFlagsArray` / `makefile`
##### `makeFlags` / `makeFlagsArray` / `makefile` {#makeflags-makeflagsarray-makefile}
See the [build phase](#var-stdenv-makeFlags) for details.
@ -585,13 +587,13 @@ Hook executed at the end of the check phase.
The install phase is responsible for installing the package in the Nix store under `out`. The default `installPhase` creates the directory `$out` and calls `make install`.
#### Variables controlling the install phase
#### Variables controlling the install phase {#variables-controlling-the-install-phase}
##### `dontInstall` {#var-stdenv-dontInstall}
Set to true to skip the install phase.
##### `makeFlags` / `makeFlagsArray` / `makefile`
##### `makeFlags` / `makeFlagsArray` / `makefile` {#makeflags-makeflagsarray-makefile-1}
See the [build phase](#var-stdenv-makeFlags) for details.
@ -624,7 +626,7 @@ The fixup phase performs some (Nix-specific) post-processing actions on the file
- On Linux, it applies the `patchelf` command to ELF executables and libraries to remove unused directories from the `RPATH` in order to prevent unnecessary runtime dependencies.
- It rewrites the interpreter paths of shell scripts to paths found in `PATH`. E.g., `/usr/bin/perl` will be rewritten to `/nix/store/some-perl/bin/perl` found in `PATH`.
#### Variables controlling the fixup phase
#### Variables controlling the fixup phase {#variables-controlling-the-fixup-phase}
##### `dontFixup` {#var-stdenv-dontFixup}
@ -706,7 +708,7 @@ to `~/.gdbinit`. GDB will then be able to find debug information installed via `
The installCheck phase checks whether the package was installed correctly by running its test suite against the installed directories. The default `installCheck` calls `make installcheck`.
#### Variables controlling the installCheck phase
#### Variables controlling the installCheck phase {#variables-controlling-the-installcheck-phase}
##### `doInstallCheck` {#var-stdenv-doInstallCheck}
@ -742,7 +744,7 @@ Hook executed at the end of the installCheck phase.
The distribution phase is intended to produce a source distribution of the package. The default `distPhase` first calls `make dist`, then it copies the resulting source tarballs to `$out/tarballs/`. This phase is only executed if the attribute `doDist` is set.
#### Variables controlling the distribution phase
#### Variables controlling the distribution phase {#variables-controlling-the-distribution-phase}
##### `distTarget` {#var-stdenv-distTarget}
@ -879,7 +881,7 @@ The most typical use of the setup hook is actually to add other hooks which are
Packages adding a hook should not hard code a specific hook, but rather choose a variable *relative* to how they are included. Returning to the C compiler wrapper example, if the wrapper itself is an `n` dependency, then it only wants to accumulate flags from `n + 1` dependencies, as only those ones match the compilers target platform. The `hostOffset` variable is defined with the current dependencys host offset `targetOffset` with its target offset, before its setup hook is sourced. Additionally, since most environment hooks dont care about the target platform, that means the setup hook can append to the right bash array by doing something like
```{.bash}
```bash
addEnvHooks "$hostOffset" myBashFunction
```
@ -887,47 +889,47 @@ The *existence* of setups hooks has long been documented and packages inside Nix
First, lets cover some setup hooks that are part of Nixpkgs default stdenv. This means that they are run for every package built using `stdenv.mkDerivation`. Some of these are platform specific, so they may run on Linux but not Darwin or vice-versa.
### `move-docs.sh`
### `move-docs.sh` {#move-docs.sh}
This setup hook moves any installed documentation to the `/share` subdirectory directory. This includes the man, doc and info directories. This is needed for legacy programs that do not know how to use the `share` subdirectory.
### `compress-man-pages.sh`
### `compress-man-pages.sh` {#compress-man-pages.sh}
This setup hook compresses any man pages that have been installed. The compression is done using the gzip program. This helps to reduce the installed size of packages.
### `strip.sh`
### `strip.sh` {#strip.sh}
This runs the strip command on installed binaries and libraries. This removes unnecessary information like debug symbols when they are not needed. This also helps to reduce the installed size of packages.
### `patch-shebangs.sh`
### `patch-shebangs.sh` {#patch-shebangs.sh}
This setup hook patches installed scripts to use the full path to the shebang interpreter. A shebang interpreter is the first commented line of a script telling the operating system which program will run the script (e.g `#!/bin/bash`). In Nix, we want an exact path to that interpreter to be used. This often replaces `/bin/sh` with a path in the Nix store.
### `audit-tmpdir.sh`
### `audit-tmpdir.sh` {#audit-tmpdir.sh}
This verifies that no references are left from the install binaries to the directory used to build those binaries. This ensures that the binaries do not need things outside the Nix store. This is currently supported in Linux only.
### `multiple-outputs.sh`
### `multiple-outputs.sh` {#multiple-outputs.sh}
This setup hook adds configure flags that tell packages to install files into any one of the proper outputs listed in `outputs`. This behavior can be turned off by setting `setOutputFlags` to false in the derivation environment. See <xref linkend="chap-multiple-output" /> for more information.
This setup hook adds configure flags that tell packages to install files into any one of the proper outputs listed in `outputs`. This behavior can be turned off by setting `setOutputFlags` to false in the derivation environment. See [](#chap-multiple-output) for more information.
### `move-sbin.sh`
### `move-sbin.sh` {#move-sbin.sh}
This setup hook moves any binaries installed in the `sbin/` subdirectory into `bin/`. In addition, a link is provided from `sbin/` to `bin/` for compatibility.
### `move-lib64.sh`
### `move-lib64.sh` {#move-lib64.sh}
This setup hook moves any libraries installed in the `lib64/` subdirectory into `lib/`. In addition, a link is provided from `lib64/` to `lib/` for compatibility.
### `move-systemd-user-units.sh`
### `move-systemd-user-units.sh` {#move-systemd-user-units.sh}
This setup hook moves any systemd user units installed in the `lib/` subdirectory into `share/`. In addition, a link is provided from `share/` to `lib/` for compatibility. This is needed for systemd to find user services when installed into the user profile.
### `set-source-date-epoch-to-latest.sh`
### `set-source-date-epoch-to-latest.sh` {#set-source-date-epoch-to-latest.sh}
This sets `SOURCE_DATE_EPOCH` to the modification time of the most recent file.
### Bintools Wrapper
### Bintools Wrapper {#bintools-wrapper}
The Bintools Wrapper wraps the binary utilities for a bunch of miscellaneous purposes. These are GNU Binutils when targetting Linux, and a mix of cctools and GNU binutils for Darwin. \[The “Bintools” name is supposed to be a compromise between “Binutils” and “cctools” not denoting any specific implementation.\] Specifically, the underlying bintools package, and a C standard library (glibc or Darwins libSystem, just for the dynamic loader) are all fed in, and dependency finding, hardening (see below), and purity checks for each are handled by the Bintools Wrapper. Packages typically depend on CC Wrapper, which in turn (at run time) depends on the Bintools Wrapper.
@ -937,7 +939,7 @@ A final task of the setup hook is defining a number of standard environment vari
A problem with this final task is that the Bintools Wrapper is honest and defines `LD` as `ld`. Most packages, however, firstly use the C compiler for linking, secondly use `LD` anyways, defining it as the C compiler, and thirdly, only so define `LD` when it is undefined as a fallback. This triple-threat means Bintools Wrapper will break those packages, as LD is already defined as the actual linker which the package wont override yet doesnt want to use. The workaround is to define, just for the problematic package, `LD` as the C compiler. A good way to do this would be `preConfigure = "LD=$CC"`.
### CC Wrapper
### CC Wrapper {#cc-wrapper}
The CC Wrapper wraps a C toolchain for a bunch of miscellaneous purposes. Specifically, a C compiler (GCC or Clang), wrapped binary tools, and a C standard library (glibc or Darwins libSystem, just for the dynamic loader) are all fed in, and dependency finding, hardening (see below), and purity checks for each are handled by the CC Wrapper. Packages typically depend on the CC Wrapper, which in turn (at run-time) depends on the Bintools Wrapper.
@ -971,11 +973,11 @@ The `autoreconfHook` derivation adds `autoreconfPhase`, which runs autoreconf, l
Adds every file named `catalog.xml` found under the `xml/dtd` and `xml/xsl` subdirectories of each build input to the `XML_CATALOG_FILES` environment variable.
### teTeX / TeX Live
### teTeX / TeX Live {#tetex-tex-live}
Adds the `share/texmf-nix` subdirectory of each build input to the `TEXINPUTS` environment variable.
### Qt 4
### Qt 4 {#qt-4}
Sets the `QTDIR` environment variable to Qts path.
@ -983,13 +985,13 @@ Sets the `QTDIR` environment variable to Qts path.
Exports `GDK_PIXBUF_MODULE_FILE` environment variable to the builder. Add librsvg package to `buildInputs` to get svg support. See also the [setup hook description in GNOME platform docs](#ssec-gnome-hooks-gdk-pixbuf).
### GHC
### GHC {#ghc}
Creates a temporary package database and registers every Haskell build input in it (TODO: how?).
### GNOME platform
### GNOME platform {#gnome-platform}
Hooks related to GNOME platform and related libraries like GLib, GTK and GStreamer are described in <xref linkend="sec-language-gnome" />.
Hooks related to GNOME platform and related libraries like GLib, GTK and GStreamer are described in [](#sec-language-gnome).
### autoPatchelfHook {#setup-hook-autopatchelfhook}
@ -1003,7 +1005,7 @@ By default `autoPatchelf` will fail as soon as any ELF file requires a dependenc
The `autoPatchelf` command also recognizes a `--no-recurse` command line flag, which prevents it from recursing into subdirectories.
### breakpointHook
### breakpointHook {#breakpointhook}
This hook will make a build pause instead of stopping when a failure happens. It prevents nix from cleaning up the build environment immediately and allows the user to attach to a build environment using the `cntr` command. Upon build error it will print instructions on how to use `cntr`, which can be used to enter the environment for debugging. Installing cntr and running the command will provide shell access to the build sandbox of failed build. At `/var/lib/cntr` the sandboxed filesystem is mounted. All commands and files of the system are still accessible within the shell. To execute commands from the sandbox use the cntr exec subcommand. `cntr` is only supported on Linux-based platforms. To use it first add `cntr` to your `environment.systemPackages` on NixOS or alternatively to the root user on non-NixOS systems. Then in the package that is supposed to be inspected, add `breakpointHook` to `nativeBuildInputs`.
@ -1013,15 +1015,15 @@ nativeBuildInputs = [ breakpointHook ];
When a build failure happens there will be an instruction printed that shows how to attach with `cntr` to the build sandbox.
::: note
::: title
::: {.note}
::: {.title}
Caution with remote builds
:::
This wont work with remote builds as the build environment is on a different machine and cant be accessed by `cntr`. Remote builds can be turned off by setting `--option builders ''` for `nix-build` or `--builders ''` for `nix build`.
:::
### installShellFiles
### installShellFiles {#installshellfiles}
This hook helps with installing manpages and shell completion files. It exposes 2 shell functions `installManPage` and `installShellCompletion` that can be used from your `postInstall` hook.
@ -1047,61 +1049,61 @@ postInstall = ''
'';
```
### libiconv, libintl
### libiconv, libintl {#libiconv-libintl}
A few libraries automatically add to `NIX_LDFLAGS` their library, making their symbols automatically available to the linker. This includes libiconv and libintl (gettext). This is done to provide compatibility between GNU Linux, where libiconv and libintl are bundled in, and other systems where that might not be the case. Sometimes, this behavior is not desired. To disable this behavior, set `dontAddExtraLibs`.
### validatePkgConfig
### validatePkgConfig {#validatepkgconfig}
The `validatePkgConfig` hook validates all pkg-config (`.pc`) files in a package. This helps catching some common errors in pkg-config files, such as undefined variables.
### cmake
### cmake {#cmake}
Overrides the default configure phase to run the CMake command. By default, we use the Make generator of CMake. In addition, dependencies are added automatically to CMAKE_PREFIX_PATH so that packages are correctly detected by CMake. Some additional flags are passed in to give similar behavior to configure-based packages. You can disable this hooks behavior by setting configurePhase to a custom value, or by setting dontUseCmakeConfigure. cmakeFlags controls flags passed only to CMake. By default, parallel building is enabled as CMake supports parallel building almost everywhere. When Ninja is also in use, CMake will detect that and use the ninja generator.
### xcbuildHook
### xcbuildHook {#xcbuildhook}
Overrides the build and install phases to run the "xcbuild" command. This hook is needed when a project only comes with build files for the XCode build system. You can disable this behavior by setting buildPhase and configurePhase to a custom value. xcbuildFlags controls flags passed only to xcbuild.
### Meson
### Meson {#meson}
Overrides the configure phase to run meson to generate Ninja files. To run these files, you should accompany Meson with ninja. By default, `enableParallelBuilding` is enabled as Meson supports parallel building almost everywhere.
#### Variables controlling Meson
#### Variables controlling Meson {#variables-controlling-meson}
##### `mesonFlags`
##### `mesonFlags` {#mesonflags}
Controls the flags passed to meson.
##### `mesonBuildType`
##### `mesonBuildType` {#mesonbuildtype}
Which [`--buildtype`](https://mesonbuild.com/Builtin-options.html#core-options) to pass to Meson. We default to `plain`.
##### `mesonAutoFeatures`
##### `mesonAutoFeatures` {#mesonautofeatures}
What value to set [`-Dauto_features=`](https://mesonbuild.com/Builtin-options.html#core-options) to. We default to `enabled`.
##### `mesonWrapMode`
##### `mesonWrapMode` {#mesonwrapmode}
What value to set [`-Dwrap_mode=`](https://mesonbuild.com/Builtin-options.html#core-options) to. We default to `nodownload` as we disallow network access.
##### `dontUseMesonConfigure`
##### `dontUseMesonConfigure` {#dontusemesonconfigure}
Disables using Mesons `configurePhase`.
### ninja
### ninja {#ninja}
Overrides the build, install, and check phase to run ninja instead of make. You can disable this behavior with the `dontUseNinjaBuild`, `dontUseNinjaInstall`, and `dontUseNinjaCheck`, respectively. Parallel building is enabled by default in Ninja.
### unzip
### unzip {#unzip}
This setup hook will allow you to unzip .zip files specified in `$src`. There are many similar packages like `unrar`, `undmg`, etc.
### wafHook
### wafHook {#wafhook}
Overrides the configure, build, and install phases. This will run the “waf” script used by many projects. If `wafPath` (default `./waf`) doesnt exist, it will copy the version of waf available in Nixpkgs. `wafFlags` can be used to pass flags to the waf script.
### scons
### scons {#scons}
Overrides the build, install, and check phases. This uses the scons build system as a replacement for make. scons does not provide a configure phase, so everything is managed at build and install time.
@ -1119,7 +1121,7 @@ Both parameters take a list of flags as strings. The special `"all"` flag can be
The following flags are enabled by default and might require disabling with `hardeningDisable` if the program to package is incompatible.
### `format`
### `format` {#format}
Adds the `-Wformat -Wformat-security -Werror=format-security` compiler options. At present, this warns about calls to `printf` and `scanf` functions where the format string is not a string literal and there are no format arguments, as in `printf(foo);`. This may be a security hole if the format string came from untrusted input and contains `%n`.
@ -1132,7 +1134,7 @@ This needs to be turned off or fixed for errors similar to:
cc1plus: some warnings being treated as errors
```
### `stackprotector`
### `stackprotector` {#stackprotector}
Adds the `-fstack-protector-strong --param ssp-buffer-size=4` compiler options. This adds safety checks against stack overwrites rendering many potential code injection attacks into aborting situations. In the best case this turns code injection vulnerabilities into denial of service or into non-issues (depending on the application).
@ -1143,7 +1145,7 @@ bin/blib.a(bios_console.o): In function `bios_handle_cup':
/tmp/nix-build-ipxe-20141124-5cbdc41.drv-0/ipxe-5cbdc41/src/arch/i386/firmware/pcbios/bios_console.c:86: undefined reference to `__stack_chk_fail'
```
### `fortify`
### `fortify` {#fortify}
Adds the `-O2 -D_FORTIFY_SOURCE=2` compiler options. During code generation the compiler knows a great deal of information about buffer sizes (where possible), and attempts to replace insecure unlimited length buffer function calls with length-limited ones. This is especially useful for old, crufty code. Additionally, format strings in writable memory that contain `%n` are blocked. If an application depends on such a format string, it will need to be worked around.
@ -1164,7 +1166,7 @@ installwatch.c:3751:5: error: conflicting types for '__open_2'
fcntl2.h:50:4: error: call to '__open_missing_mode' declared with attribute error: open with O_CREAT or O_TMPFILE in second argument needs 3 arguments
```
### `pic`
### `pic` {#pic}
Adds the `-fPIC` compiler options. This options adds support for position independent code in shared libraries and thus making ASLR possible.
@ -1177,19 +1179,19 @@ ccbLfRgg.s: Assembler messages:
ccbLfRgg.s:33: Error: missing or invalid displacement expression `private_key_len@GOTOFF'
```
### `strictoverflow`
### `strictoverflow` {#strictoverflow}
Signed integer overflow is undefined behaviour according to the C standard. If it happens, it is an error in the program as it should check for overflow before it can happen, not afterwards. GCC provides built-in functions to perform arithmetic with overflow checking, which are correct and faster than any custom implementation. As a workaround, the option `-fno-strict-overflow` makes gcc behave as if signed integer overflows were defined.
This flag should not trigger any build or runtime errors.
### `relro`
### `relro` {#relro}
Adds the `-z relro` linker option. During program load, several ELF memory sections need to be written to by the linker, but can be turned read-only before turning over control to the program. This prevents some GOT (and .dtors) overwrite attacks, but at least the part of the GOT used by the dynamic linker (.got.plt) is still vulnerable.
This flag can break dynamic shared object loading. For instance, the module systems of Xorg and OpenCV are incompatible with this flag. In almost all cases the `bindnow` flag must also be disabled and incompatible programs typically fail with similar errors at runtime.
### `bindnow`
### `bindnow` {#bindnow}
Adds the `-z bindnow` linker option. During program load, all dynamic symbols are resolved, allowing for the complete GOT to be marked read-only (due to `relro`). This prevents GOT overwrite attacks. For very large applications, this can incur some performance loss during initial load while symbols are resolved, but this shouldnt be an issue for daemons.
@ -1201,7 +1203,7 @@ intel_drv.so: undefined symbol: vgaHWFreeHWRec
The following flags are disabled by default and should be enabled with `hardeningEnable` for packages that take untrusted input like network services.
### `pie`
### `pie` {#pie}
Adds the `-fPIE` compiler and `-pie` linker options. Position Independent Executables are needed to take advantage of Address Space Layout Randomization, supported by modern kernel versions. While ASLR can already be enforced for data areas in the stack and heap (brk and mmap), the code areas must be compiled as position-independent. Shared libraries already do this with the `pic` flag, so they gain ASLR automatically, but binary .text regions need to be build with `pie` to gain ASLR. When this happens, ROP attacks are much harder since there are no static locations to bounce off of during a memory corruption attack.

View file

@ -63,7 +63,7 @@ The second argument (`super`) corresponds to the result of the evaluation of the
The value returned by this function should be a set similar to `pkgs/top-level/all-packages.nix`, containing overridden and/or new packages.
Overlays are similar to other methods for customizing Nixpkgs, in particular the `packageOverrides` attribute described in <xref linkend="sec-modify-via-packageOverrides"/>. Indeed, `packageOverrides` acts as an overlay with only the `super` argument. It is therefore appropriate for basic use, but overlays are more powerful and easier to distribute.
Overlays are similar to other methods for customizing Nixpkgs, in particular the `packageOverrides` attribute described in [](#sec-modify-via-packageOverrides). Indeed, `packageOverrides` acts as an overlay with only the `super` argument. It is therefore appropriate for basic use, but overlays are more powerful and easier to distribute.
## Using overlays to configure alternatives {#sec-overlays-alternatives}

View file

@ -48,17 +48,17 @@ In the above example, the `separateDebugInfo` attribute is overridden to be true
The argument `oldAttrs` is conventionally used to refer to the attr set originally passed to `stdenv.mkDerivation`.
::: note
Note that `separateDebugInfo` is processed only by the `stdenv.mkDerivation` function, not the generated, raw Nix derivation. Thus, using `overrideDerivation` will not work in this case, as it overrides only the attributes of the final derivation. It is for this reason that `overrideAttrs` should be preferred in (almost) all cases to `overrideDerivation`, i.e. to allow using `stdenv.mkDerivation` to process input arguments, as well as the fact that it is easier to use (you can use the same attribute names you see in your Nix code, instead of the ones generated (e.g. `buildInputs` vs `nativeBuildInputs`), and it involves less typing).
::: {.note}
Note that `separateDebugInfo` is processed only by the `stdenv.mkDerivation` function, not the generated, raw Nix derivation. Thus, using `overrideDerivation` will not work in this case, as it overrides only the attributes of the final derivation. It is for this reason that `overrideAttrs` should be preferred in (almost) all cases to `overrideDerivation`, i.e. to allow using `stdenv.mkDerivation` to process input arguments, as well as the fact that it is easier to use (you can use the same attribute names you see in your Nix code, instead of the ones generated (e.g. `buildInputs` vs `nativeBuildInputs`), and it involves less typing).
:::
## &lt;pkg&gt;.overrideDerivation {#sec-pkg-overrideDerivation}
::: warning
::: {.warning}
You should prefer `overrideAttrs` in almost all cases, see its documentation for the reasons why. `overrideDerivation` is not deprecated and will continue to work, but is less nice to use and does not have as many abilities as `overrideAttrs`.
:::
::: warning
::: {.warning}
Do not use this function in Nixpkgs as it evaluates a Derivation before modifying it, which breaks package abstraction and removes error-checking of function arguments. In addition, this evaluation-per-function application incurs a performance penalty, which can become a problem if many overrides are used. It is only intended for ad-hoc customisation, such as in `~/.config/nixpkgs/config.nix`.
:::
@ -81,8 +81,8 @@ In the above example, the `name`, `src`, and `patches` of the derivation will be
The argument `oldAttrs` is used to refer to the attribute set of the original derivation.
::: note
A package's attributes are evaluated *before* being modified by the `overrideDerivation` function. For example, the `name` attribute reference in `url = "mirror://gnu/hello/${name}.tar.gz";` is filled-in *before* the `overrideDerivation` function modifies the attribute set. This means that overriding the `name` attribute, in this example, *will not* change the value of the `url` attribute. Instead, we need to override both the `name` *and* `url` attributes.
::: {.note}
A package's attributes are evaluated *before* being modified by the `overrideDerivation` function. For example, the `name` attribute reference in `url = "mirror://gnu/hello/${name}.tar.gz";` is filled-in *before* the `overrideDerivation` function modifies the attribute set. This means that overriding the `name` attribute, in this example, *will not* change the value of the `url` attribute. Instead, we need to override both the `name` *and* `url` attributes.
:::
## lib.makeOverridable {#sec-lib-makeOverridable}

View file

@ -24,7 +24,7 @@
</section>
<section xml:id="new-services">
<title>New Services</title>
<itemizedlist spacing="compact">
<itemizedlist>
<listitem>
<para>
<link xlink:href="https://github.com/maxmind/geoipupdate">geoipupdate</link>,
@ -32,6 +32,14 @@
<link xlink:href="options.html#opt-services.geoipupdate.enable">services.geoipupdate</link>.
</para>
</listitem>
<listitem>
<para>
<link xlink:href="https://sr.ht">sourcehut</link>, a
collection of tools useful for software development. Available
as
<link xlink:href="options.html#opt-services.sourcehut.enable">services.sourcehut</link>.
</para>
</listitem>
</itemizedlist>
</section>
<section xml:id="backward-incompatibilities">

View file

@ -14,6 +14,10 @@ In addition to numerous new and upgraded packages, this release has the followin
database updater from MaxMind. Available as
[services.geoipupdate](options.html#opt-services.geoipupdate.enable).
* [sourcehut](https://sr.ht), a collection of tools useful for software
development. Available as
[services.sourcehut](options.html#opt-services.sourcehut.enable).
## Backward Incompatibilities
* The `staticjinja` package has been upgraded from 1.0.4 to 2.0.0

View file

@ -155,6 +155,13 @@ in {
config = mkIf cfg.enable {
assertions = [
{
assertion = config.security.polkit.enable;
message = "The libvirtd module currently requires Polkit to be enabled ('security.polkit.enable = true').";
}
];
environment = {
# this file is expected in /etc/qemu and not sysconfdir (/var/lib)
etc."qemu/bridge.conf".text = lib.concatMapStringsSep "\n" (e:

View file

@ -148,6 +148,7 @@ let
, vimAlias ? false
, viAlias ? false
, configure ? {}
, extraName ? ""
}:
let
/* for compatibility with passing extraPythonPackages as a list; added 2018-07-11 */
@ -160,6 +161,7 @@ let
extraPython3Packages = compatFun extraPython3Packages;
inherit withNodeJs withRuby viAlias vimAlias;
inherit configure;
inherit extraName;
};
in
assert withPython -> throw "Python2 support has been removed from neovim, please remove withPython and extraPythonPackages.";

View file

@ -27,23 +27,24 @@ let
# set to false if you want to control where to save the generated config
# (e.g., in ~/.config/init.vim or project/.nvimrc)
, wrapRc ? true
, neovimRcContent ? ""
, ...
}@args:
let
wrapperArgsStr = if isString wrapperArgs then wrapperArgs else lib.escapeShellArgs wrapperArgs;
# If configure != {}, we can't generate the rplugin.vim file with e.g
# NVIM_SYSTEM_RPLUGIN_MANIFEST *and* NVIM_RPLUGIN_MANIFEST env vars set in
# the wrapper. That's why only when configure != {} (tested both here and
# when postBuild is evaluated), we call makeWrapper once to generate a
# wrapper with most arguments we need, excluding those that cause problems to
# generate rplugin.vim, but still required for the final wrapper.
finalMakeWrapperArgs =
[ "${neovim}/bin/nvim" "${placeholder "out"}/bin/nvim" ]
++ [ "--set" "NVIM_SYSTEM_RPLUGIN_MANIFEST" "${placeholder "out"}/rplugin.vim" ]
++ optionals wrapRc [ "--add-flags" "-u ${writeText "init.vim" args.neovimRcContent}" ]
;
# If configure != {}, we can't generate the rplugin.vim file with e.g
# NVIM_SYSTEM_RPLUGIN_MANIFEST *and* NVIM_RPLUGIN_MANIFEST env vars set in
# the wrapper. That's why only when configure != {} (tested both here and
# when postBuild is evaluated), we call makeWrapper once to generate a
# wrapper with most arguments we need, excluding those that cause problems to
# generate rplugin.vim, but still required for the final wrapper.
finalMakeWrapperArgs =
[ "${neovim}/bin/nvim" "${placeholder "out"}/bin/nvim" ]
++ [ "--set" "NVIM_SYSTEM_RPLUGIN_MANIFEST" "${placeholder "out"}/rplugin.vim" ]
++ optionals wrapRc [ "--add-flags" "-u ${writeText "init.vim" neovimRcContent}" ]
;
in
assert withPython2 -> throw "Python2 support has been removed from the neovim wrapper, please remove withPython2 and python2Env.";
@ -116,7 +117,10 @@ let
preferLocalBuild = true;
nativeBuildInputs = [ makeWrapper ];
passthru = { unwrapped = neovim; };
passthru = {
unwrapped = neovim;
initRc = neovimRcContent;
};
meta = neovim.meta // {
# To prevent builds on hydra

View file

@ -303,10 +303,6 @@ let
ln -sLt $out/lib/mozilla/pkcs11-modules $ext/lib/mozilla/pkcs11-modules/*
done
# For manpages, in case the program supplies them
mkdir -p $out/nix-support
echo ${browser} > $out/nix-support/propagated-user-env-packages
#########################
# #

View file

@ -1,23 +1,31 @@
{ lib, fetchFromGitHub, buildGoModule }:
{ lib, fetchFromGitHub, buildPythonApplication, aiohttp, python-dateutil, humanize, click, pytestCheckHook, tox }:
buildGoModule rec {
buildPythonApplication rec {
pname = "twtxt";
version = "0.1.0";
version = "1.2.3";
src = fetchFromGitHub {
owner = "jointwt";
owner = "buckket";
repo = pname;
rev = version;
sha256 = "15jhfnhpk34nmad04f7xz1w041dba8cn17hq46p9n5sarjgkjiiw";
rev = "v${version}";
sha256 = "sha256-AdM95G2Vz3UbVPI7fs8/D78BMxscbTGrCpIyyHzSmho=";
};
vendorSha256 = "1lnf8wd2rv9d292rp8jndfdg0rjs6gfw0yg49l9spw4yzifnd7f7";
# Relax some dependencies
postPatch = ''
substituteInPlace setup.py \
--replace 'aiohttp>=2.2.5,<3' 'aiohttp' \
--replace 'click>=6.7,<7' 'click' \
--replace 'humanize>=0.5.1,<1' 'humanize'
'';
subPackages = [ "cmd/twt" "cmd/twtd" ];
propagatedBuildInputs = [ aiohttp python-dateutil humanize click ];
checkInputs = [ pytestCheckHook tox ];
meta = with lib; {
description = "Self-hosted, Twitter-like decentralised microblogging platform";
homepage = "https://github.com/jointwt/twtxt";
description = "Decentralised, minimalist microblogging service for hackers";
homepage = "https://github.com/buckket/twtxt";
license = licenses.mit;
maintainers = with maintainers; [ siraben ];
};

View file

@ -14,7 +14,9 @@
if libc == null then
null
else if stdenvNoCC.targetPlatform.isNetBSD then
if libc != targetPackages.netbsdCross.headers then
if !(targetPackages ? netbsdCross) then
netbsd.ld_elf_so
else if libc != targetPackages.netbsdCross.headers then
targetPackages.netbsdCross.ld_elf_so
else
null

View file

@ -1,6 +1,7 @@
{ lib
, buildPythonPackage
, fetchPypi
, isPy3k
, ipython
, traitlets
, glibcLocales
@ -12,6 +13,7 @@
buildPythonPackage rec {
pname = "jupyter_core";
version = "4.7.1";
disabled = !isPy3k;
src = fetchPypi {
inherit pname version;

View file

@ -1,9 +1,12 @@
{ vimUtils, vim_configurable, writeText, neovim, vimPlugins
, lib, fetchFromGitHub, neovimUtils, wrapNeovimUnstable
, neovim-unwrapped
, fetchFromGitLab
, pkgs
}:
let
inherit (vimUtils) buildVimPluginFrom2Nix;
inherit (neovimUtils) makeNeovimConfig;
packages.myVimPackage.start = with vimPlugins; [ vim-nix ];
@ -16,27 +19,55 @@ let
}
];
nvimConfNix = neovimUtils.makeNeovimConfig {
nvimConfNix = makeNeovimConfig {
inherit plugins;
customRC = ''
" just a comment
'';
};
wrapNeovim = suffix: config:
nvimConfDontWrap = makeNeovimConfig {
inherit plugins;
customRC = ''
" just a comment
'';
};
wrapNeovim2 = suffix: config:
wrapNeovimUnstable neovim-unwrapped (config // {
extraName = suffix;
wrapRc = true;
});
nmt = fetchFromGitLab {
owner = "rycee";
repo = "nmt";
rev = "d2cc8c1042b1c2511f68f40e2790a8c0e29eeb42";
sha256 = "1ykcvyx82nhdq167kbnpgwkgjib8ii7c92y3427v986n2s5lsskc";
};
runTest = neovim-drv: buildCommand:
pkgs.runCommandLocal "test-${neovim-drv.name}" ({
nativeBuildInputs = [ ];
meta.platforms = neovim-drv.meta.platforms;
}) (''
source ${nmt}/bash-lib/assertions.sh
vimrc="${writeText "init.vim" neovim-drv.initRc}"
vimrcGeneric="$out/patched.vim"
mkdir $out
${pkgs.perl}/bin/perl -pe "s|\Q$NIX_STORE\E/[a-z0-9]{32}-|$NIX_STORE/eeeeeeeeeeeeeeeeeeeeeeeeeeeeeeee-|g" < "$vimrc" > "$vimrcGeneric"
'' + buildCommand);
in
{
pkgs.recurseIntoAttrs (
rec {
vim_empty_config = vimUtils.vimrcFile { beforePlugins = ""; customRC = ""; };
### neovim tests
##################
nvim_with_plugins = wrapNeovim "-with-plugins" nvimConfNix;
nvim_with_plugins = wrapNeovim2 "-with-plugins" nvimConfNix;
nvim_via_override = neovim.override {
extraName = "-via-override";
configure = {
packages.foo.start = [ vimPlugins.ale ];
customRC = ''
@ -45,6 +76,29 @@ in
};
};
# nixpkgs should detect that no wrapping is necessary
nvimShouldntWrap = wrapNeovim2 "-should-not-wrap" nvimConfNix;
# this will generate a neovimRc content but we disable wrapping
nvimDontWrap = wrapNeovim2 "-dont-wrap" (makeNeovimConfig {
wrapRc = false;
customRC = ''
" this shouldn't trigger the creation of an init.vim
'';
});
nvim_dontwrap-test = runTest nvimDontWrap ''
! grep "-u" ${nvimDontWrap}/bin/nvim
'';
nvim_via_override-test = runTest nvim_via_override ''
assertFileContent \
"$vimrcGeneric" \
"${./neovim-override.vim}"
'';
### vim tests
##################
vim_with_vim2nix = vim_configurable.customize {
@ -107,4 +161,4 @@ in
test_nvim_with_remote_plugin = neovim.override {
configure.pathogen.pluginNames = with vimPlugins; [ deoplete-nvim ];
};
}
})

View file

@ -0,0 +1,7 @@
" configuration generated by NIX
set nocompatible
set packpath^=/nix/store/eeeeeeeeeeeeeeeeeeeeeeeeeeeeeeee-vim-pack-dir
set runtimepath^=/nix/store/eeeeeeeeeeeeeeeeeeeeeeeeeeeeeeee-vim-pack-dir
:help ale

View file

@ -9346,7 +9346,7 @@ in
twitterBootstrap = callPackage ../development/web/twitter-bootstrap {};
twtxt = callPackage ../applications/networking/twtxt { };
twtxt = python3Packages.callPackage ../applications/networking/twtxt { };
txr = callPackage ../tools/misc/txr { stdenv = clangStdenv; };