mirror of
https://github.com/NixOS/nixpkgs.git
synced 2024-11-17 19:21:04 +00:00
julia.withPackages: init on supported Julias (1.6, 1.8, 1.9)
Be able to build arbitrary Julia environments in Nixpkgs, in the same style as python.withPackages.
This commit is contained in:
parent
9a9178ba2a
commit
36bf6afd42
20
pkgs/development/compilers/julia/default.nix
Normal file
20
pkgs/development/compilers/julia/default.nix
Normal file
|
@ -0,0 +1,20 @@
|
||||||
|
{ callPackage }:
|
||||||
|
|
||||||
|
let
|
||||||
|
juliaWithPackages = callPackage ../../julia-modules {};
|
||||||
|
|
||||||
|
wrapJulia = julia: julia.overrideAttrs (oldAttrs: {
|
||||||
|
passthru = (oldAttrs.passthru or {}) // {
|
||||||
|
withPackages = juliaWithPackages.override { inherit julia; };
|
||||||
|
};
|
||||||
|
});
|
||||||
|
|
||||||
|
in
|
||||||
|
|
||||||
|
{
|
||||||
|
julia_16-bin = wrapJulia (callPackage ./1.6-bin.nix {});
|
||||||
|
julia_18-bin = wrapJulia (callPackage ./1.8-bin.nix {});
|
||||||
|
julia_19-bin = wrapJulia (callPackage ./1.9-bin.nix {});
|
||||||
|
julia_18 = wrapJulia (callPackage ./1.8.nix {});
|
||||||
|
julia_19 = wrapJulia (callPackage ./1.9.nix {});
|
||||||
|
}
|
193
pkgs/development/julia-modules/default.nix
Normal file
193
pkgs/development/julia-modules/default.nix
Normal file
|
@ -0,0 +1,193 @@
|
||||||
|
{ lib
|
||||||
|
, callPackage
|
||||||
|
, runCommand
|
||||||
|
, fetchFromGitHub
|
||||||
|
, fetchgit
|
||||||
|
, fontconfig
|
||||||
|
, git
|
||||||
|
, makeWrapper
|
||||||
|
, writeText
|
||||||
|
, writeTextFile
|
||||||
|
, python3
|
||||||
|
|
||||||
|
# Artifacts dependencies
|
||||||
|
, fetchurl
|
||||||
|
, glibc
|
||||||
|
, pkgs
|
||||||
|
, stdenv
|
||||||
|
|
||||||
|
, julia
|
||||||
|
|
||||||
|
# Special registry which is equal to JuliaRegistries/General, but every Versions.toml
|
||||||
|
# entry is augmented with a Nix sha256 hash
|
||||||
|
, augmentedRegistry ? callPackage ./registry.nix {}
|
||||||
|
|
||||||
|
# Other overridable arguments
|
||||||
|
, extraLibs ? []
|
||||||
|
, precompile ? true
|
||||||
|
, setDefaultDepot ? true
|
||||||
|
, makeWrapperArgs ? ""
|
||||||
|
, packageOverrides ? {}
|
||||||
|
, makeTransitiveDependenciesImportable ? false # Used to support symbol indexing
|
||||||
|
}:
|
||||||
|
|
||||||
|
packageNames:
|
||||||
|
|
||||||
|
let
|
||||||
|
util = callPackage ./util.nix {};
|
||||||
|
|
||||||
|
in
|
||||||
|
|
||||||
|
let
|
||||||
|
# Some Julia packages require access to Python. Provide a Nixpkgs version so it
|
||||||
|
# doesn't try to install its own.
|
||||||
|
pythonToUse = let
|
||||||
|
extraPythonPackages = ((callPackage ./extra-python-packages.nix { inherit python3; }).getExtraPythonPackages packageNames);
|
||||||
|
in (if extraPythonPackages == [] then python3
|
||||||
|
else util.addPackagesToPython python3 (map (pkg: lib.getAttr pkg python3.pkgs) extraPythonPackages));
|
||||||
|
|
||||||
|
# Start by wrapping Julia so it has access to Python and any other extra libs.
|
||||||
|
# Also, prevent various packages (CondaPkg.jl, PythonCall.jl) from trying to do network calls.
|
||||||
|
juliaWrapped = runCommand "julia-${julia.version}-wrapped" { buildInputs = [makeWrapper]; inherit makeWrapperArgs; } ''
|
||||||
|
mkdir -p $out/bin
|
||||||
|
makeWrapper ${julia}/bin/julia $out/bin/julia \
|
||||||
|
--suffix LD_LIBRARY_PATH : "${lib.makeLibraryPath extraLibs}" \
|
||||||
|
--set FONTCONFIG_FILE ${fontconfig.out}/etc/fonts/fonts.conf \
|
||||||
|
--set PYTHONHOME "${pythonToUse}" \
|
||||||
|
--prefix PYTHONPATH : "${pythonToUse}/${pythonToUse.sitePackages}" \
|
||||||
|
--set PYTHON ${pythonToUse}/bin/python $makeWrapperArgs \
|
||||||
|
--set JULIA_CONDAPKG_OFFLINE yes \
|
||||||
|
--set JULIA_CONDAPKG_BACKEND Null \
|
||||||
|
--set JULIA_PYTHONCALL_EXE "@PyCall"
|
||||||
|
'';
|
||||||
|
|
||||||
|
# If our closure ends up with certain packages, add others.
|
||||||
|
packageImplications = {
|
||||||
|
# Because we want to put PythonCall in PyCall mode so it doesn't try to download
|
||||||
|
# Python packages
|
||||||
|
PythonCall = ["PyCall"];
|
||||||
|
};
|
||||||
|
|
||||||
|
# Invoke Julia resolution logic to determine the full dependency closure
|
||||||
|
packageOverridesRepoified = lib.mapAttrs util.repoifySimple packageOverrides;
|
||||||
|
closureYaml = callPackage ./package-closure.nix {
|
||||||
|
inherit augmentedRegistry julia packageNames packageImplications;
|
||||||
|
packageOverrides = packageOverridesRepoified;
|
||||||
|
};
|
||||||
|
|
||||||
|
# Generate a Nix file consisting of a map from dependency UUID --> package info with fetchgit call:
|
||||||
|
# {
|
||||||
|
# "77ba4419-2d1f-58cd-9bb1-8ffee604a2e3" = {
|
||||||
|
# src = fetchgit {...};
|
||||||
|
# name = "...";
|
||||||
|
# version = "...";
|
||||||
|
# treehash = "...";
|
||||||
|
# };
|
||||||
|
# ...
|
||||||
|
# }
|
||||||
|
dependencies = runCommand "julia-sources.nix" { buildInputs = [(python3.withPackages (ps: with ps; [toml pyyaml])) git]; } ''
|
||||||
|
python ${./python}/sources_nix.py \
|
||||||
|
"${augmentedRegistry}" \
|
||||||
|
'${lib.generators.toJSON {} packageOverridesRepoified}' \
|
||||||
|
"${closureYaml}" \
|
||||||
|
"$out"
|
||||||
|
'';
|
||||||
|
|
||||||
|
# Import the Nix file from the previous step (IFD) and turn each dependency repo into
|
||||||
|
# a dummy Git repository, as Julia expects. Format the results as a YAML map from
|
||||||
|
# dependency UUID -> Nix store location:
|
||||||
|
# {
|
||||||
|
# "77ba4419-2d1f-58cd-9bb1-8ffee604a2e3":"/nix/store/...-NaNMath.jl-0877504",
|
||||||
|
# ...
|
||||||
|
# }
|
||||||
|
# This is also the point where we apply the packageOverrides.
|
||||||
|
dependencyUuidToInfo = import dependencies { inherit fetchgit; };
|
||||||
|
fillInOverrideSrc = uuid: info:
|
||||||
|
if lib.hasAttr info.name packageOverrides then (info // { src = lib.getAttr info.name packageOverrides; }) else info;
|
||||||
|
dependencyUuidToRepo = lib.mapAttrs util.repoifyInfo (lib.mapAttrs fillInOverrideSrc dependencyUuidToInfo);
|
||||||
|
dependencyUuidToRepoYaml = writeTextFile {
|
||||||
|
name = "dependency-uuid-to-repo.yml";
|
||||||
|
text = lib.generators.toYAML {} dependencyUuidToRepo;
|
||||||
|
};
|
||||||
|
|
||||||
|
# Given the augmented registry, closure info yaml, and dependency path yaml, construct a complete
|
||||||
|
# Julia registry containing all the necessary packages
|
||||||
|
dependencyUuidToInfoYaml = writeTextFile {
|
||||||
|
name = "dependency-uuid-to-info.yml";
|
||||||
|
text = lib.generators.toYAML {} dependencyUuidToInfo;
|
||||||
|
};
|
||||||
|
fillInOverrideSrc' = uuid: info:
|
||||||
|
if lib.hasAttr info.name packageOverridesRepoified then (info // { src = lib.getAttr info.name packageOverridesRepoified; }) else info;
|
||||||
|
overridesOnly = lib.mapAttrs fillInOverrideSrc' (lib.filterAttrs (uuid: info: info.src == null) dependencyUuidToInfo);
|
||||||
|
minimalRegistry = runCommand "minimal-julia-registry" { buildInputs = [(python3.withPackages (ps: with ps; [toml pyyaml])) git]; } ''
|
||||||
|
python ${./python}/minimal_registry.py \
|
||||||
|
"${augmentedRegistry}" \
|
||||||
|
"${closureYaml}" \
|
||||||
|
'${lib.generators.toJSON {} overridesOnly}' \
|
||||||
|
"${dependencyUuidToRepoYaml}" \
|
||||||
|
"$out"
|
||||||
|
'';
|
||||||
|
|
||||||
|
# Next, deal with artifacts. Scan each artifacts file individually and generate a Nix file that
|
||||||
|
# produces the desired Overrides.toml.
|
||||||
|
artifactsNix = runCommand "julia-artifacts.nix" { buildInputs = [(python3.withPackages (ps: with ps; [toml pyyaml]))]; } ''
|
||||||
|
python ${./python}/extract_artifacts.py \
|
||||||
|
"${dependencyUuidToRepoYaml}" \
|
||||||
|
"${closureYaml}" \
|
||||||
|
"${juliaWrapped}/bin/julia" \
|
||||||
|
"${if lib.versionAtLeast julia.version "1.7" then ./extract_artifacts.jl else ./extract_artifacts_16.jl}" \
|
||||||
|
'${lib.generators.toJSON {} (import ./extra-libs.nix)}' \
|
||||||
|
"$out"
|
||||||
|
'';
|
||||||
|
|
||||||
|
# Import the artifacts Nix to build Overrides.toml (IFD)
|
||||||
|
artifacts = import artifactsNix { inherit lib fetchurl pkgs glibc stdenv; };
|
||||||
|
overridesJson = writeTextFile {
|
||||||
|
name = "Overrides.json";
|
||||||
|
text = lib.generators.toJSON {} artifacts;
|
||||||
|
};
|
||||||
|
overridesToml = runCommand "Overrides.toml" { buildInputs = [(python3.withPackages (ps: with ps; [toml]))]; } ''
|
||||||
|
python ${./python}/format_overrides.py \
|
||||||
|
"${overridesJson}" \
|
||||||
|
"$out"
|
||||||
|
'';
|
||||||
|
|
||||||
|
# Build a Julia project and depot. The project contains Project.toml/Manifest.toml, while the
|
||||||
|
# depot contains package build products (including the precompiled libraries, if precompile=true)
|
||||||
|
projectAndDepot = callPackage ./depot.nix {
|
||||||
|
inherit closureYaml extraLibs overridesToml packageImplications precompile;
|
||||||
|
julia = juliaWrapped;
|
||||||
|
registry = minimalRegistry;
|
||||||
|
packageNames = if makeTransitiveDependenciesImportable
|
||||||
|
then lib.mapAttrsToList (uuid: info: info.name) dependencyUuidToInfo
|
||||||
|
else packageNames;
|
||||||
|
};
|
||||||
|
|
||||||
|
in
|
||||||
|
|
||||||
|
runCommand "julia-${julia.version}-env" {
|
||||||
|
buildInputs = [makeWrapper];
|
||||||
|
|
||||||
|
inherit julia;
|
||||||
|
inherit juliaWrapped;
|
||||||
|
|
||||||
|
# Expose the steps we used along the way in case the user wants to use them, for example to build
|
||||||
|
# expressions and build them separately to avoid IFD.
|
||||||
|
inherit dependencies;
|
||||||
|
inherit closureYaml;
|
||||||
|
inherit dependencyUuidToInfoYaml;
|
||||||
|
inherit dependencyUuidToRepoYaml;
|
||||||
|
inherit minimalRegistry;
|
||||||
|
inherit artifactsNix;
|
||||||
|
inherit overridesJson;
|
||||||
|
inherit overridesToml;
|
||||||
|
inherit projectAndDepot;
|
||||||
|
} (''
|
||||||
|
mkdir -p $out/bin
|
||||||
|
makeWrapper ${juliaWrapped}/bin/julia $out/bin/julia \
|
||||||
|
--suffix JULIA_DEPOT_PATH : "${projectAndDepot}/depot" \
|
||||||
|
--set-default JULIA_PROJECT "${projectAndDepot}/project" \
|
||||||
|
--set-default JULIA_LOAD_PATH '@:${projectAndDepot}/project/Project.toml:@v#.#:@stdlib'
|
||||||
|
'' + lib.optionalString setDefaultDepot ''
|
||||||
|
sed -i '2 i\JULIA_DEPOT_PATH=''${JULIA_DEPOT_PATH-"$HOME/.julia"}' $out/bin/julia
|
||||||
|
'')
|
85
pkgs/development/julia-modules/depot.nix
Normal file
85
pkgs/development/julia-modules/depot.nix
Normal file
|
@ -0,0 +1,85 @@
|
||||||
|
{ lib
|
||||||
|
, runCommand
|
||||||
|
|
||||||
|
, cacert
|
||||||
|
, curl
|
||||||
|
, git
|
||||||
|
, julia
|
||||||
|
, python3
|
||||||
|
|
||||||
|
, closureYaml
|
||||||
|
, extraLibs
|
||||||
|
, overridesToml
|
||||||
|
, packageNames
|
||||||
|
, packageImplications
|
||||||
|
, precompile
|
||||||
|
, registry
|
||||||
|
}:
|
||||||
|
|
||||||
|
runCommand "julia-depot" {
|
||||||
|
nativeBuildInputs = [curl git julia (python3.withPackages (ps: with ps; [pyyaml]))] ++ extraLibs;
|
||||||
|
inherit precompile registry;
|
||||||
|
} ''
|
||||||
|
export HOME=$(pwd)
|
||||||
|
|
||||||
|
echo "Building Julia depot and project with the following inputs"
|
||||||
|
echo "Julia: ${julia}"
|
||||||
|
echo "Registry: $registry"
|
||||||
|
echo "Overrides ${overridesToml}"
|
||||||
|
|
||||||
|
mkdir -p $out/project
|
||||||
|
export JULIA_PROJECT="$out/project"
|
||||||
|
|
||||||
|
mkdir -p $out/depot/artifacts
|
||||||
|
export JULIA_DEPOT_PATH="$out/depot"
|
||||||
|
cp ${overridesToml} $out/depot/artifacts/Overrides.toml
|
||||||
|
|
||||||
|
# These can be useful to debug problems
|
||||||
|
# export JULIA_DEBUG=Pkg
|
||||||
|
# export JULIA_DEBUG=loading
|
||||||
|
|
||||||
|
export JULIA_SSL_CA_ROOTS_PATH="${cacert}/etc/ssl/certs/ca-bundle.crt"
|
||||||
|
|
||||||
|
# Only precompile if configured to below
|
||||||
|
export JULIA_PKG_PRECOMPILE_AUTO=0
|
||||||
|
|
||||||
|
# Prevent a warning where Julia tries to download package server info
|
||||||
|
export JULIA_PKG_SERVER=""
|
||||||
|
|
||||||
|
# See if we need to add any extra package names based on the closure
|
||||||
|
# and the packageImplications. We're using the full closure YAML here since
|
||||||
|
# it's available, which is slightly weird, but it should work just as well
|
||||||
|
# for finding the extra packages we need to add
|
||||||
|
python ${./python}/find_package_implications.py "${closureYaml}" '${lib.generators.toJSON {} packageImplications}' extra_package_names.txt
|
||||||
|
|
||||||
|
# git config --global --add safe.directory '/nix'
|
||||||
|
export JULIA_PKG_USE_CLI_GIT="true"
|
||||||
|
|
||||||
|
julia -e ' \
|
||||||
|
import Pkg
|
||||||
|
import Pkg.Types: PRESERVE_NONE
|
||||||
|
|
||||||
|
Pkg.Registry.add(Pkg.RegistrySpec(path="${registry}"))
|
||||||
|
|
||||||
|
input = ${lib.generators.toJSON {} packageNames} ::Vector{String}
|
||||||
|
|
||||||
|
if isfile("extra_package_names.txt")
|
||||||
|
append!(input, readlines("extra_package_names.txt"))
|
||||||
|
end
|
||||||
|
|
||||||
|
input = unique(input)
|
||||||
|
|
||||||
|
if !isempty(input)
|
||||||
|
println("Adding packages: " * join(input, " "))
|
||||||
|
Pkg.add(input; preserve=PRESERVE_NONE)
|
||||||
|
Pkg.instantiate()
|
||||||
|
|
||||||
|
if "precompile" in keys(ENV) && ENV["precompile"] != "0" && ENV["precompile"] != ""
|
||||||
|
Pkg.precompile()
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
# Remove the registry to save space
|
||||||
|
Pkg.Registry.rm("General")
|
||||||
|
'
|
||||||
|
''
|
15
pkgs/development/julia-modules/extra-libs.nix
Normal file
15
pkgs/development/julia-modules/extra-libs.nix
Normal file
|
@ -0,0 +1,15 @@
|
||||||
|
# A map from a Julia package (typically a JLL package) to extra libraries
|
||||||
|
# that they require from Nix.
|
||||||
|
# The libraries should be strings evaluated in a "with pkgs" context.
|
||||||
|
|
||||||
|
{
|
||||||
|
# Qt5Base_jll
|
||||||
|
# Needs access to dbus or you get "Cannot find libdbus-1 in your system"
|
||||||
|
# Repro: build environment with ["Plots"]
|
||||||
|
# > using Plots; plot(cos, 0, 2pi)
|
||||||
|
"ea2cea3b-5b76-57ae-a6ef-0a8af62496e1" = ["dbus.lib"];
|
||||||
|
|
||||||
|
# Qt6Base_jll
|
||||||
|
# Same reason as Qt5Base_jll
|
||||||
|
"c0090381-4147-56d7-9ebc-da0b1113ec56" = ["dbus.lib"];
|
||||||
|
}
|
22
pkgs/development/julia-modules/extra-python-packages.nix
Normal file
22
pkgs/development/julia-modules/extra-python-packages.nix
Normal file
|
@ -0,0 +1,22 @@
|
||||||
|
{ lib
|
||||||
|
, python3
|
||||||
|
}:
|
||||||
|
|
||||||
|
# This file contains an extra mapping from Julia packages to the Python packages they depend on.
|
||||||
|
|
||||||
|
with lib;
|
||||||
|
|
||||||
|
rec {
|
||||||
|
packageMapping = {
|
||||||
|
ExcelFiles = ["xlrd"];
|
||||||
|
PyPlot = ["matplotlib"];
|
||||||
|
PythonPlot = ["matplotlib"];
|
||||||
|
SymPy = ["sympy"];
|
||||||
|
};
|
||||||
|
|
||||||
|
getExtraPythonPackages = names: concatMap (name: let
|
||||||
|
allCandidates = if hasAttr name packageMapping then getAttr name packageMapping else [];
|
||||||
|
in
|
||||||
|
filter (x: hasAttr x python3.pkgs) allCandidates
|
||||||
|
) names;
|
||||||
|
}
|
63
pkgs/development/julia-modules/extract_artifacts.jl
Executable file
63
pkgs/development/julia-modules/extract_artifacts.jl
Executable file
|
@ -0,0 +1,63 @@
|
||||||
|
|
||||||
|
import Base: UUID
|
||||||
|
import Pkg.Artifacts: artifact_meta, artifact_names, find_artifacts_toml, load_artifacts_toml, select_downloadable_artifacts
|
||||||
|
import Pkg.BinaryPlatforms: AbstractPlatform, platform_key_abi, triplet
|
||||||
|
import Pkg.Operations: gen_build_code
|
||||||
|
import TOML
|
||||||
|
|
||||||
|
pkg_uuid = UUID(ARGS[1])
|
||||||
|
dir = ARGS[2]
|
||||||
|
|
||||||
|
artifacts_toml = find_artifacts_toml(dir)
|
||||||
|
|
||||||
|
if artifacts_toml == nothing
|
||||||
|
print("")
|
||||||
|
exit()
|
||||||
|
end
|
||||||
|
|
||||||
|
platform = platform_key_abi()
|
||||||
|
|
||||||
|
# Using collect_artifacts (from Pkg.jl) is more reliable than calling select_downloadable_artifacts directly.
|
||||||
|
# collect_artifacts includes support for .pkg/select_artifacts.jl, which may produce different results.
|
||||||
|
# If we use select_downloadable_artifacts here, then at depot build time it may try to download a different artifact
|
||||||
|
# and fail.
|
||||||
|
|
||||||
|
# However! The collect_artifacts from Pkg.jl doesn't allow us to pass lazy to select_downloadable_artifacts.
|
||||||
|
# So we have to paste our own version in here :(
|
||||||
|
|
||||||
|
function collect_artifacts(pkg_root::String; platform::AbstractPlatform)
|
||||||
|
# Check to see if this package has an (Julia)Artifacts.toml
|
||||||
|
artifacts_tomls = Tuple{String,Base.TOML.TOMLDict}[]
|
||||||
|
for f in artifact_names
|
||||||
|
artifacts_toml = joinpath(pkg_root, f)
|
||||||
|
if isfile(artifacts_toml)
|
||||||
|
selector_path = joinpath(pkg_root, ".pkg", "select_artifacts.jl")
|
||||||
|
|
||||||
|
# If there is a dynamic artifact selector, run that in an appropriate sandbox to select artifacts
|
||||||
|
if isfile(selector_path)
|
||||||
|
# Despite the fact that we inherit the project, since the in-memory manifest
|
||||||
|
# has not been updated yet, if we try to load any dependencies, it may fail.
|
||||||
|
# Therefore, this project inheritance is really only for Preferences, not dependencies.
|
||||||
|
select_cmd = Cmd(`$(gen_build_code(selector_path; inherit_project=true)) --startup-file=no $(triplet(platform))`)
|
||||||
|
meta_toml = String(read(select_cmd))
|
||||||
|
res = TOML.tryparse(meta_toml)
|
||||||
|
if res isa TOML.ParserError
|
||||||
|
errstr = sprint(showerror, res; context=stderr)
|
||||||
|
pkgerror("failed to parse TOML output from running $(repr(selector_path)), got: \n$errstr")
|
||||||
|
else
|
||||||
|
push!(artifacts_tomls, (artifacts_toml, TOML.parse(meta_toml)))
|
||||||
|
end
|
||||||
|
else
|
||||||
|
# Otherwise, use the standard selector from `Artifacts`
|
||||||
|
artifacts = select_downloadable_artifacts(artifacts_toml; platform, include_lazy=true)
|
||||||
|
push!(artifacts_tomls, (artifacts_toml, artifacts))
|
||||||
|
end
|
||||||
|
break
|
||||||
|
end
|
||||||
|
end
|
||||||
|
return artifacts_tomls
|
||||||
|
end
|
||||||
|
|
||||||
|
for (artifacts_toml, artifacts) in collect_artifacts(dir; platform)
|
||||||
|
TOML.print(artifacts)
|
||||||
|
end
|
33
pkgs/development/julia-modules/extract_artifacts_16.jl
Executable file
33
pkgs/development/julia-modules/extract_artifacts_16.jl
Executable file
|
@ -0,0 +1,33 @@
|
||||||
|
|
||||||
|
import Base: UUID
|
||||||
|
import Pkg.Artifacts: artifact_meta, find_artifacts_toml, load_artifacts_toml
|
||||||
|
import Pkg.BinaryPlatforms: platform_key_abi
|
||||||
|
import TOML
|
||||||
|
|
||||||
|
pkg_uuid = UUID(ARGS[1])
|
||||||
|
dir = ARGS[2]
|
||||||
|
|
||||||
|
artifacts_toml = find_artifacts_toml(dir)
|
||||||
|
|
||||||
|
if artifacts_toml == nothing
|
||||||
|
print("")
|
||||||
|
exit()
|
||||||
|
end
|
||||||
|
|
||||||
|
platform = platform_key_abi()
|
||||||
|
|
||||||
|
# Older Julia doesn't provide select_downloadable_artifacts or .pkg/select_artifacts.jl,
|
||||||
|
# so gather the artifacts the old-fashioned way
|
||||||
|
artifact_dict = load_artifacts_toml(artifacts_toml; pkg_uuid=pkg_uuid)
|
||||||
|
|
||||||
|
results = Dict()
|
||||||
|
for name in keys(artifact_dict)
|
||||||
|
# Get the metadata about this name for the requested platform
|
||||||
|
meta = artifact_meta(name, artifact_dict, artifacts_toml; platform=platform)
|
||||||
|
|
||||||
|
# If there are no instances of this name for the desired platform, skip it
|
||||||
|
meta === nothing && continue
|
||||||
|
|
||||||
|
results[name] = meta
|
||||||
|
end
|
||||||
|
TOML.print(results)
|
180
pkgs/development/julia-modules/package-closure.nix
Normal file
180
pkgs/development/julia-modules/package-closure.nix
Normal file
|
@ -0,0 +1,180 @@
|
||||||
|
{ lib
|
||||||
|
, julia
|
||||||
|
, python3
|
||||||
|
, runCommand
|
||||||
|
|
||||||
|
, augmentedRegistry
|
||||||
|
, packageNames
|
||||||
|
, packageOverrides
|
||||||
|
, packageImplications
|
||||||
|
}:
|
||||||
|
|
||||||
|
let
|
||||||
|
# The specific package resolution code depends on the Julia version
|
||||||
|
# These are pretty similar and could be combined to reduce duplication
|
||||||
|
resolveCode = if lib.versionOlder julia.version "1.7" then resolveCode1_6 else resolveCode1_8;
|
||||||
|
|
||||||
|
resolveCode1_6 = ''
|
||||||
|
import Pkg.API: check_package_name
|
||||||
|
import Pkg.Types: Context!, PRESERVE_NONE, manifest_info, project_deps_resolve!, registry_resolve!, stdlib_resolve!, ensure_resolved
|
||||||
|
import Pkg.Operations: _resolve, assert_can_add, is_dep, update_package_add
|
||||||
|
|
||||||
|
foreach(pkg -> check_package_name(pkg.name, :add), pkgs)
|
||||||
|
pkgs = deepcopy(pkgs) # deepcopy for avoid mutating PackageSpec members
|
||||||
|
Context!(ctx)
|
||||||
|
|
||||||
|
project_deps_resolve!(ctx, pkgs)
|
||||||
|
registry_resolve!(ctx, pkgs)
|
||||||
|
stdlib_resolve!(pkgs)
|
||||||
|
ensure_resolved(ctx, pkgs, registry=true)
|
||||||
|
|
||||||
|
assert_can_add(ctx, pkgs)
|
||||||
|
|
||||||
|
for (i, pkg) in pairs(pkgs)
|
||||||
|
entry = manifest_info(ctx, pkg.uuid)
|
||||||
|
pkgs[i] = update_package_add(ctx, pkg, entry, is_dep(ctx, pkg))
|
||||||
|
end
|
||||||
|
|
||||||
|
foreach(pkg -> ctx.env.project.deps[pkg.name] = pkg.uuid, pkgs)
|
||||||
|
|
||||||
|
pkgs, deps_map = _resolve(ctx, pkgs, PRESERVE_NONE)
|
||||||
|
'';
|
||||||
|
|
||||||
|
resolveCode1_8 = ''
|
||||||
|
import Pkg.API: handle_package_input!
|
||||||
|
import Pkg.Types: PRESERVE_NONE, project_deps_resolve!, registry_resolve!, stdlib_resolve!, ensure_resolved
|
||||||
|
import Pkg.Operations: _resolve, assert_can_add, update_package_add
|
||||||
|
|
||||||
|
foreach(handle_package_input!, pkgs)
|
||||||
|
|
||||||
|
# The handle_package_input! call above clears pkg.path, so we have to apply package overrides after
|
||||||
|
overrides = Dict{String, String}(${builtins.concatStringsSep ", " (lib.mapAttrsToList (name: path: ''"${name}" => "${path}"'') packageOverrides)})
|
||||||
|
println("Package overrides: ")
|
||||||
|
println(overrides)
|
||||||
|
for pkg in pkgs
|
||||||
|
if pkg.name in keys(overrides)
|
||||||
|
pkg.path = overrides[pkg.name]
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
project_deps_resolve!(ctx.env, pkgs)
|
||||||
|
registry_resolve!(ctx.registries, pkgs)
|
||||||
|
stdlib_resolve!(pkgs)
|
||||||
|
ensure_resolved(ctx, ctx.env.manifest, pkgs, registry=true)
|
||||||
|
|
||||||
|
assert_can_add(ctx, pkgs)
|
||||||
|
|
||||||
|
for (i, pkg) in pairs(pkgs)
|
||||||
|
entry = Pkg.Types.manifest_info(ctx.env.manifest, pkg.uuid)
|
||||||
|
is_dep = any(uuid -> uuid == pkg.uuid, [uuid for (name, uuid) in ctx.env.project.deps])
|
||||||
|
pkgs[i] = update_package_add(ctx, pkg, entry, is_dep)
|
||||||
|
end
|
||||||
|
|
||||||
|
foreach(pkg -> ctx.env.project.deps[pkg.name] = pkg.uuid, pkgs)
|
||||||
|
|
||||||
|
# Save the original pkgs for later. We might need to augment it with the weak dependencies
|
||||||
|
orig_pkgs = pkgs
|
||||||
|
|
||||||
|
pkgs, deps_map = _resolve(ctx.io, ctx.env, ctx.registries, pkgs, PRESERVE_NONE, ctx.julia_version)
|
||||||
|
|
||||||
|
if VERSION >= VersionNumber("1.9")
|
||||||
|
# Check for weak dependencies, which appear on the RHS of the deps_map but not in pkgs.
|
||||||
|
# Build up weak_name_to_uuid
|
||||||
|
uuid_to_name = Dict()
|
||||||
|
for pkg in pkgs
|
||||||
|
uuid_to_name[pkg.uuid] = pkg.name
|
||||||
|
end
|
||||||
|
weak_name_to_uuid = Dict()
|
||||||
|
for (uuid, deps) in pairs(deps_map)
|
||||||
|
for (dep_name, dep_uuid) in pairs(deps)
|
||||||
|
if !haskey(uuid_to_name, dep_uuid)
|
||||||
|
weak_name_to_uuid[dep_name] = dep_uuid
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
# If we have nontrivial weak dependencies, add each one to the initial pkgs and then re-run _resolve
|
||||||
|
if !isempty(weak_name_to_uuid)
|
||||||
|
println("Found weak dependencies: $(keys(weak_name_to_uuid))")
|
||||||
|
|
||||||
|
orig_uuids = Set([pkg.uuid for pkg in orig_pkgs])
|
||||||
|
|
||||||
|
for (name, uuid) in pairs(weak_name_to_uuid)
|
||||||
|
if uuid in orig_uuids
|
||||||
|
continue
|
||||||
|
end
|
||||||
|
|
||||||
|
pkg = PackageSpec(name, uuid)
|
||||||
|
|
||||||
|
push!(orig_uuids, uuid)
|
||||||
|
push!(orig_pkgs, pkg)
|
||||||
|
ctx.env.project.deps[name] = uuid
|
||||||
|
entry = Pkg.Types.manifest_info(ctx.env.manifest, uuid)
|
||||||
|
orig_pkgs[length(orig_pkgs)] = update_package_add(ctx, pkg, entry, false)
|
||||||
|
end
|
||||||
|
|
||||||
|
pkgs, deps_map = _resolve(ctx.io, ctx.env, ctx.registries, orig_pkgs, PRESERVE_NONE, ctx.julia_version)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
'';
|
||||||
|
|
||||||
|
juliaExpression = packageNames: ''
|
||||||
|
import Pkg
|
||||||
|
Pkg.Registry.add(Pkg.RegistrySpec(path="${augmentedRegistry}"))
|
||||||
|
|
||||||
|
import Pkg.Types: Context, PackageSpec
|
||||||
|
|
||||||
|
input = ${lib.generators.toJSON {} packageNames}
|
||||||
|
|
||||||
|
if isfile("extra_package_names.txt")
|
||||||
|
append!(input, readlines("extra_package_names.txt"))
|
||||||
|
end
|
||||||
|
|
||||||
|
input = unique(input)
|
||||||
|
|
||||||
|
println("Resolving packages: " * join(input, " "))
|
||||||
|
|
||||||
|
pkgs = [PackageSpec(pkg) for pkg in input]
|
||||||
|
|
||||||
|
ctx = Context()
|
||||||
|
|
||||||
|
${resolveCode}
|
||||||
|
|
||||||
|
open(ENV["out"], "w") do io
|
||||||
|
for spec in pkgs
|
||||||
|
println(io, "- name: " * spec.name)
|
||||||
|
println(io, " uuid: " * string(spec.uuid))
|
||||||
|
println(io, " version: " * string(spec.version))
|
||||||
|
if endswith(spec.name, "_jll") && haskey(deps_map, spec.uuid)
|
||||||
|
println(io, " depends_on: ")
|
||||||
|
for (dep_name, dep_uuid) in pairs(deps_map[spec.uuid])
|
||||||
|
println(io, " \"$(dep_name)\": \"$(dep_uuid)\"")
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
'';
|
||||||
|
in
|
||||||
|
|
||||||
|
runCommand "julia-package-closure.yml" { buildInputs = [julia (python3.withPackages (ps: with ps; [pyyaml]))]; } ''
|
||||||
|
mkdir home
|
||||||
|
export HOME=$(pwd)/home
|
||||||
|
|
||||||
|
echo "Resolving Julia packages with the following inputs"
|
||||||
|
echo "Julia: ${julia}"
|
||||||
|
echo "Registry: ${augmentedRegistry}"
|
||||||
|
|
||||||
|
# Prevent a warning where Julia tries to download package server info
|
||||||
|
export JULIA_PKG_SERVER=""
|
||||||
|
|
||||||
|
julia -e '${juliaExpression packageNames}';
|
||||||
|
|
||||||
|
# See if we need to add any extra package names based on the closure
|
||||||
|
# and the packageImplications
|
||||||
|
python ${./python}/find_package_implications.py "$out" '${lib.generators.toJSON {} packageImplications}' extra_package_names.txt
|
||||||
|
|
||||||
|
if [ -f extra_package_names.txt ]; then
|
||||||
|
echo "Re-resolving with additional package names"
|
||||||
|
julia -e '${juliaExpression packageNames}';
|
||||||
|
fi
|
||||||
|
''
|
59
pkgs/development/julia-modules/python/dag.py
Normal file
59
pkgs/development/julia-modules/python/dag.py
Normal file
|
@ -0,0 +1,59 @@
|
||||||
|
|
||||||
|
# This file based on a ChatGPT reponse for the following prompt:
|
||||||
|
# "can you write code in python to build up a DAG representing
|
||||||
|
# a dependency tree, and then a function that can return all the
|
||||||
|
# dependencies of a given node?"
|
||||||
|
|
||||||
|
class Node:
|
||||||
|
def __init__(self, name):
|
||||||
|
self.name = name
|
||||||
|
self.dependencies = set()
|
||||||
|
|
||||||
|
|
||||||
|
class DAG:
|
||||||
|
def __init__(self):
|
||||||
|
self.nodes = {}
|
||||||
|
|
||||||
|
def add_node(self, node_name, dependencies=None):
|
||||||
|
if node_name in self.nodes:
|
||||||
|
raise ValueError(f"Node '{node_name}' already exists in the graph.")
|
||||||
|
|
||||||
|
node = Node(node_name)
|
||||||
|
if dependencies:
|
||||||
|
node.dependencies.update(dependencies)
|
||||||
|
|
||||||
|
self.nodes[node_name] = node
|
||||||
|
|
||||||
|
def add_dependency(self, node_name, dependency_name):
|
||||||
|
if node_name not in self.nodes:
|
||||||
|
raise ValueError(f"Node '{node_name}' does not exist in the graph.")
|
||||||
|
|
||||||
|
if dependency_name not in self.nodes:
|
||||||
|
raise ValueError(f"Dependency '{dependency_name}' does not exist in the graph.")
|
||||||
|
|
||||||
|
self.nodes[node_name].dependencies.add(dependency_name)
|
||||||
|
|
||||||
|
def get_dependencies(self, node_name):
|
||||||
|
if node_name not in self.nodes:
|
||||||
|
raise ValueError(f"Node '{node_name}' does not exist in the graph.")
|
||||||
|
|
||||||
|
node = self.nodes[node_name]
|
||||||
|
dependencies = set()
|
||||||
|
|
||||||
|
def traverse_dependencies(current_node):
|
||||||
|
for dependency in current_node.dependencies:
|
||||||
|
dependencies.add(dependency)
|
||||||
|
if dependency in self.nodes:
|
||||||
|
traverse_dependencies(self.nodes[dependency])
|
||||||
|
|
||||||
|
traverse_dependencies(node)
|
||||||
|
return dependencies
|
||||||
|
|
||||||
|
def has_node(self, node_name):
|
||||||
|
return node_name in self.nodes
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
graph_str = ""
|
||||||
|
for node_name, node in self.nodes.items():
|
||||||
|
graph_str += f"{node_name} -> {', '.join(node.dependencies)}\n"
|
||||||
|
return graph_str
|
14
pkgs/development/julia-modules/python/dedup_overrides.py
Executable file
14
pkgs/development/julia-modules/python/dedup_overrides.py
Executable file
|
@ -0,0 +1,14 @@
|
||||||
|
|
||||||
|
import json
|
||||||
|
from pathlib import Path
|
||||||
|
import sys
|
||||||
|
import toml
|
||||||
|
|
||||||
|
overrides_path = Path(sys.argv[1])
|
||||||
|
out_path = Path(sys.argv[2])
|
||||||
|
|
||||||
|
with open(overrides_path, "r") as f:
|
||||||
|
overrides = json.loads(f.read())
|
||||||
|
|
||||||
|
with open(out_path, "w") as f:
|
||||||
|
toml.dump(overrides, f)
|
99
pkgs/development/julia-modules/python/extract_artifacts.py
Executable file
99
pkgs/development/julia-modules/python/extract_artifacts.py
Executable file
|
@ -0,0 +1,99 @@
|
||||||
|
|
||||||
|
import json
|
||||||
|
from pathlib import Path
|
||||||
|
import multiprocessing
|
||||||
|
import subprocess
|
||||||
|
import sys
|
||||||
|
import toml
|
||||||
|
import yaml
|
||||||
|
|
||||||
|
import dag
|
||||||
|
|
||||||
|
dependencies_path = Path(sys.argv[1])
|
||||||
|
closure_yaml_path = Path(sys.argv[2])
|
||||||
|
julia_path = Path(sys.argv[3])
|
||||||
|
extract_artifacts_script = Path(sys.argv[4])
|
||||||
|
extra_libs = json.loads(sys.argv[5])
|
||||||
|
out_path = Path(sys.argv[6])
|
||||||
|
|
||||||
|
with open(dependencies_path, "r") as f:
|
||||||
|
dependencies = yaml.safe_load(f)
|
||||||
|
dependency_uuids = dependencies.keys()
|
||||||
|
|
||||||
|
with open(closure_yaml_path, "r") as f:
|
||||||
|
# Build up a map of UUID -> closure information
|
||||||
|
closure_yaml_list = yaml.safe_load(f) or []
|
||||||
|
closure_yaml = {}
|
||||||
|
for item in closure_yaml_list:
|
||||||
|
closure_yaml[item["uuid"]] = item
|
||||||
|
|
||||||
|
# Build up a dependency graph of UUIDs
|
||||||
|
closure_dependencies_dag = dag.DAG()
|
||||||
|
for uuid, contents in closure_yaml.items():
|
||||||
|
if contents.get("depends_on"):
|
||||||
|
closure_dependencies_dag.add_node(uuid, dependencies=contents["depends_on"].values())
|
||||||
|
|
||||||
|
with open(out_path, "w") as f:
|
||||||
|
f.write("{ lib, fetchurl, glibc, pkgs, stdenv }:\n\n")
|
||||||
|
f.write("rec {\n")
|
||||||
|
|
||||||
|
def process_item(item):
|
||||||
|
uuid, src = item
|
||||||
|
lines = []
|
||||||
|
artifacts = toml.loads(subprocess.check_output([julia_path, extract_artifacts_script, uuid, src]).decode())
|
||||||
|
if not artifacts: return f' uuid-{uuid} = {{}};\n'
|
||||||
|
|
||||||
|
lines.append(f' uuid-{uuid} = {{')
|
||||||
|
|
||||||
|
for artifact_name, details in artifacts.items():
|
||||||
|
if len(details["download"]) == 0: continue
|
||||||
|
download = details["download"][0]
|
||||||
|
url = download["url"]
|
||||||
|
sha256 = download["sha256"]
|
||||||
|
|
||||||
|
git_tree_sha1 = details["git-tree-sha1"]
|
||||||
|
|
||||||
|
depends_on = set()
|
||||||
|
if closure_dependencies_dag.has_node(uuid):
|
||||||
|
depends_on = set(closure_dependencies_dag.get_dependencies(uuid)).intersection(dependency_uuids)
|
||||||
|
|
||||||
|
other_libs = extra_libs.get(uuid, [])
|
||||||
|
|
||||||
|
fixup = f"""fixupPhase = let
|
||||||
|
libs = lib.concatMap (lib.mapAttrsToList (k: v: v.path))
|
||||||
|
[{" ".join(["uuid-" + x for x in depends_on])}];
|
||||||
|
in ''
|
||||||
|
find $out -type f -executable -exec \
|
||||||
|
patchelf --set-rpath \$ORIGIN:\$ORIGIN/../lib:${{lib.makeLibraryPath (["$out" glibc] ++ libs ++ (with pkgs; [{" ".join(other_libs)}]))}} {{}} \;
|
||||||
|
find $out -type f -executable -exec \
|
||||||
|
patchelf --set-interpreter ${{glibc}}/lib/ld-linux-x86-64.so.2 {{}} \;
|
||||||
|
''"""
|
||||||
|
|
||||||
|
derivation = f"""{{
|
||||||
|
name = "{artifact_name}";
|
||||||
|
src = fetchurl {{
|
||||||
|
url = "{url}";
|
||||||
|
sha256 = "{sha256}";
|
||||||
|
}};
|
||||||
|
sourceRoot = ".";
|
||||||
|
dontConfigure = true;
|
||||||
|
dontBuild = true;
|
||||||
|
installPhase = "cp -r . $out";
|
||||||
|
{fixup};
|
||||||
|
}}"""
|
||||||
|
|
||||||
|
lines.append(f""" "{artifact_name}" = {{
|
||||||
|
sha1 = "{git_tree_sha1}";
|
||||||
|
path = stdenv.mkDerivation {derivation};
|
||||||
|
}};\n""")
|
||||||
|
|
||||||
|
lines.append(' };\n')
|
||||||
|
|
||||||
|
return "\n".join(lines)
|
||||||
|
|
||||||
|
with multiprocessing.Pool(10) as pool:
|
||||||
|
for s in pool.map(process_item, dependencies.items()):
|
||||||
|
f.write(s)
|
||||||
|
|
||||||
|
f.write(f"""
|
||||||
|
}}\n""")
|
|
@ -0,0 +1,24 @@
|
||||||
|
|
||||||
|
import json
|
||||||
|
import os
|
||||||
|
from pathlib import Path
|
||||||
|
import subprocess
|
||||||
|
import sys
|
||||||
|
import yaml
|
||||||
|
|
||||||
|
dependencies_path = Path(sys.argv[1])
|
||||||
|
package_implications_json = sys.argv[2]
|
||||||
|
out_path = Path(sys.argv[3])
|
||||||
|
|
||||||
|
package_implications = json.loads(package_implications_json)
|
||||||
|
with open(dependencies_path) as f:
|
||||||
|
desired_packages = yaml.safe_load(f) or []
|
||||||
|
|
||||||
|
extra_package_names = []
|
||||||
|
for pkg in desired_packages:
|
||||||
|
if pkg["name"] in package_implications:
|
||||||
|
extra_package_names.extend(package_implications[pkg["name"]])
|
||||||
|
|
||||||
|
if len(extra_package_names) > 0:
|
||||||
|
with open(out_path, "w") as f:
|
||||||
|
f.write("\n".join(extra_package_names))
|
22
pkgs/development/julia-modules/python/format_overrides.py
Normal file
22
pkgs/development/julia-modules/python/format_overrides.py
Normal file
|
@ -0,0 +1,22 @@
|
||||||
|
|
||||||
|
import json
|
||||||
|
from pathlib import Path
|
||||||
|
import sys
|
||||||
|
import toml
|
||||||
|
|
||||||
|
overrides_path = Path(sys.argv[1])
|
||||||
|
out_path = Path(sys.argv[2])
|
||||||
|
|
||||||
|
with open(overrides_path, "r") as f:
|
||||||
|
overrides = json.loads(f.read())
|
||||||
|
|
||||||
|
result = {}
|
||||||
|
|
||||||
|
for (uuid, artifacts) in overrides.items():
|
||||||
|
if len(artifacts) == 0: continue
|
||||||
|
|
||||||
|
for (name, info) in artifacts.items():
|
||||||
|
result[info["sha1"]] = info["path"]
|
||||||
|
|
||||||
|
with open(out_path, "w") as f:
|
||||||
|
toml.dump(result, f)
|
98
pkgs/development/julia-modules/python/minimal_registry.py
Executable file
98
pkgs/development/julia-modules/python/minimal_registry.py
Executable file
|
@ -0,0 +1,98 @@
|
||||||
|
|
||||||
|
from collections import defaultdict
|
||||||
|
import copy
|
||||||
|
import json
|
||||||
|
import os
|
||||||
|
from pathlib import Path
|
||||||
|
import shutil
|
||||||
|
import subprocess
|
||||||
|
import sys
|
||||||
|
import tempfile
|
||||||
|
import toml
|
||||||
|
import util
|
||||||
|
import yaml
|
||||||
|
|
||||||
|
|
||||||
|
registry_path = Path(sys.argv[1])
|
||||||
|
desired_packages_path = Path(sys.argv[2])
|
||||||
|
package_overrides = json.loads(sys.argv[3])
|
||||||
|
dependencies_path = Path(sys.argv[4])
|
||||||
|
out_path = Path(sys.argv[5])
|
||||||
|
|
||||||
|
with open(desired_packages_path, "r") as f:
|
||||||
|
desired_packages = yaml.safe_load(f) or []
|
||||||
|
|
||||||
|
uuid_to_versions = defaultdict(list)
|
||||||
|
for pkg in desired_packages:
|
||||||
|
uuid_to_versions[pkg["uuid"]].append(pkg["version"])
|
||||||
|
|
||||||
|
with open(dependencies_path, "r") as f:
|
||||||
|
uuid_to_store_path = yaml.safe_load(f)
|
||||||
|
|
||||||
|
os.makedirs(out_path)
|
||||||
|
|
||||||
|
registry = toml.load(registry_path / "Registry.toml")
|
||||||
|
registry["packages"] = {k: v for k, v in registry["packages"].items() if k in uuid_to_versions}
|
||||||
|
|
||||||
|
for (uuid, versions) in uuid_to_versions.items():
|
||||||
|
if uuid in package_overrides:
|
||||||
|
info = package_overrides[uuid]
|
||||||
|
|
||||||
|
# Make a registry entry based on the info from the package override
|
||||||
|
path = Path(info["name"][0].upper()) / Path(info["name"])
|
||||||
|
registry["packages"][uuid] = {
|
||||||
|
"name": info["name"],
|
||||||
|
"path": str(path),
|
||||||
|
}
|
||||||
|
|
||||||
|
os.makedirs(out_path / path)
|
||||||
|
|
||||||
|
# Read the Project.yaml from the src
|
||||||
|
project = toml.load(Path(info["src"]) / "Project.toml")
|
||||||
|
|
||||||
|
# Generate all the registry files
|
||||||
|
with open(out_path / path / Path("Compat.toml"), "w") as f:
|
||||||
|
f.write('["%s"]\n' % info["version"])
|
||||||
|
# Write nothing in Compat.toml, because we've already resolved everything
|
||||||
|
with open(out_path / path / Path("Deps.toml"), "w") as f:
|
||||||
|
f.write('["%s"]\n' % info["version"])
|
||||||
|
toml.dump(project["deps"], f)
|
||||||
|
with open(out_path / path / Path("Versions.toml"), "w") as f:
|
||||||
|
f.write('["%s"]\n' % info["version"])
|
||||||
|
f.write('git-tree-sha1 = "%s"\n' % info["treehash"])
|
||||||
|
with open(out_path / path / Path("Package.toml"), "w") as f:
|
||||||
|
toml.dump({
|
||||||
|
"name": info["name"],
|
||||||
|
"uuid": uuid,
|
||||||
|
"repo": "file://" + info["src"],
|
||||||
|
}, f)
|
||||||
|
|
||||||
|
elif uuid in registry["packages"]:
|
||||||
|
registry_info = registry["packages"][uuid]
|
||||||
|
name = registry_info["name"]
|
||||||
|
path = registry_info["path"]
|
||||||
|
|
||||||
|
os.makedirs(out_path / path)
|
||||||
|
|
||||||
|
# Copy some files to the minimal repo unchanged
|
||||||
|
for f in ["Compat.toml", "Deps.toml"]:
|
||||||
|
if (registry_path / path / f).exists():
|
||||||
|
shutil.copy2(registry_path / path / f, out_path / path)
|
||||||
|
|
||||||
|
# Copy the Versions.toml file, trimming down to the versions we care about
|
||||||
|
all_versions = toml.load(registry_path / path / "Versions.toml")
|
||||||
|
versions_to_keep = {k: v for k, v in all_versions.items() if k in versions}
|
||||||
|
for k, v in versions_to_keep.items():
|
||||||
|
del v["nix-sha256"]
|
||||||
|
with open(out_path / path / "Versions.toml", "w") as f:
|
||||||
|
toml.dump(versions_to_keep, f)
|
||||||
|
|
||||||
|
# Fill in the local store path for the repo
|
||||||
|
if not uuid in uuid_to_store_path: continue
|
||||||
|
package_toml = toml.load(registry_path / path / "Package.toml")
|
||||||
|
package_toml["repo"] = "file://" + uuid_to_store_path[uuid]
|
||||||
|
with open(out_path / path / "Package.toml", "w") as f:
|
||||||
|
toml.dump(package_toml, f)
|
||||||
|
|
||||||
|
with open(out_path / "Registry.toml", "w") as f:
|
||||||
|
toml.dump(registry, f)
|
67
pkgs/development/julia-modules/python/sources_nix.py
Executable file
67
pkgs/development/julia-modules/python/sources_nix.py
Executable file
|
@ -0,0 +1,67 @@
|
||||||
|
|
||||||
|
import json
|
||||||
|
from pathlib import Path
|
||||||
|
import re
|
||||||
|
import shutil
|
||||||
|
import sys
|
||||||
|
import toml
|
||||||
|
import util
|
||||||
|
import yaml
|
||||||
|
|
||||||
|
|
||||||
|
registry_path = Path(sys.argv[1])
|
||||||
|
package_overrides = json.loads(sys.argv[2])
|
||||||
|
desired_packages_path = Path(sys.argv[3])
|
||||||
|
out_path = Path(sys.argv[4])
|
||||||
|
|
||||||
|
with open(desired_packages_path, "r") as f:
|
||||||
|
desired_packages = yaml.safe_load(f) or []
|
||||||
|
|
||||||
|
registry = toml.load(registry_path / "Registry.toml")
|
||||||
|
|
||||||
|
def ensure_version_valid(version):
|
||||||
|
"""
|
||||||
|
Ensure a version string is a valid Julia-parsable version.
|
||||||
|
It doesn't really matter what it looks like as it's just used for overrides.
|
||||||
|
"""
|
||||||
|
return re.sub('[^0-9\.]','', version)
|
||||||
|
|
||||||
|
with open(out_path, "w") as f:
|
||||||
|
f.write("{fetchgit}:\n")
|
||||||
|
f.write("{\n")
|
||||||
|
for pkg in desired_packages:
|
||||||
|
uuid = pkg["uuid"]
|
||||||
|
|
||||||
|
if pkg["name"] in package_overrides:
|
||||||
|
treehash = util.get_commit_info(package_overrides[pkg["name"]])["tree"]
|
||||||
|
f.write(f""" "{uuid}" = {{
|
||||||
|
src = null; # Overridden: will fill in later
|
||||||
|
name = "{pkg["name"]}";
|
||||||
|
version = "{ensure_version_valid(pkg["version"])}";
|
||||||
|
treehash = "{treehash}";
|
||||||
|
}};\n""")
|
||||||
|
elif uuid in registry["packages"]:
|
||||||
|
registry_info = registry["packages"][uuid]
|
||||||
|
path = registry_info["path"]
|
||||||
|
packageToml = toml.load(registry_path / path / "Package.toml")
|
||||||
|
|
||||||
|
all_versions = toml.load(registry_path / path / "Versions.toml")
|
||||||
|
if not pkg["version"] in all_versions: continue
|
||||||
|
version_to_use = all_versions[pkg["version"]]
|
||||||
|
|
||||||
|
repo = packageToml["repo"]
|
||||||
|
f.write(f""" "{uuid}" = {{
|
||||||
|
src = fetchgit {{
|
||||||
|
url = "{repo}";
|
||||||
|
rev = "{version_to_use["git-tree-sha1"]}";
|
||||||
|
sha256 = "{version_to_use["nix-sha256"]}";
|
||||||
|
}};
|
||||||
|
name = "{pkg["name"]}";
|
||||||
|
version = "{pkg["version"]}";
|
||||||
|
treehash = "{version_to_use["git-tree-sha1"]}";
|
||||||
|
}};\n""")
|
||||||
|
else:
|
||||||
|
# print("Warning: couldn't figure out what to do with pkg in sources_nix.py", pkg)
|
||||||
|
pass
|
||||||
|
|
||||||
|
f.write("}")
|
12
pkgs/development/julia-modules/python/util.py
Normal file
12
pkgs/development/julia-modules/python/util.py
Normal file
|
@ -0,0 +1,12 @@
|
||||||
|
|
||||||
|
import os
|
||||||
|
import subprocess
|
||||||
|
import tempfile
|
||||||
|
|
||||||
|
def get_commit_info(repo):
|
||||||
|
with tempfile.TemporaryDirectory() as home_dir:
|
||||||
|
env_with_home = os.environ.copy()
|
||||||
|
env_with_home["HOME"] = home_dir
|
||||||
|
subprocess.check_output(["git", "config", "--global", "--add", "safe.directory", repo], env=env_with_home)
|
||||||
|
lines = subprocess.check_output(["git", "log", "--pretty=raw"], cwd=repo, env=env_with_home).decode().split("\n")
|
||||||
|
return dict([x.split() for x in lines if len(x.split()) == 2])
|
9
pkgs/development/julia-modules/registry.nix
Normal file
9
pkgs/development/julia-modules/registry.nix
Normal file
|
@ -0,0 +1,9 @@
|
||||||
|
{ fetchFromGitHub }:
|
||||||
|
|
||||||
|
fetchFromGitHub {
|
||||||
|
owner = "CodeDownIO";
|
||||||
|
repo = "General";
|
||||||
|
rev = "baf9e22ecdf97b6424a611ac4a565c6ee60d3f44";
|
||||||
|
sha256 = "1nd3x2z8r6578149pbpkx9qw2ajln1kfy7w5kjsnv56v180h0ddf";
|
||||||
|
# date = "2023-12-14T12:20:00+00:00";
|
||||||
|
}
|
45
pkgs/development/julia-modules/util.nix
Normal file
45
pkgs/development/julia-modules/util.nix
Normal file
|
@ -0,0 +1,45 @@
|
||||||
|
{ git
|
||||||
|
, runCommand
|
||||||
|
}:
|
||||||
|
|
||||||
|
{
|
||||||
|
# Add packages to a Python environment. Works if you pass something like either
|
||||||
|
# a) python3
|
||||||
|
# b) python3.withPackages (ps: [...])
|
||||||
|
# See https://github.com/NixOS/nixpkgs/pull/97467#issuecomment-689315186
|
||||||
|
addPackagesToPython = python: packages:
|
||||||
|
if python ? "env" then python.override (old: {
|
||||||
|
extraLibs = old.extraLibs ++ packages;
|
||||||
|
})
|
||||||
|
else python.withPackages (ps: packages);
|
||||||
|
|
||||||
|
# Convert an ordinary source checkout into a repo with a single commit
|
||||||
|
repoifySimple = name: path:
|
||||||
|
runCommand ''${name}-repoified'' {buildInputs = [git];} ''
|
||||||
|
mkdir -p $out
|
||||||
|
cp -r ${path}/. $out
|
||||||
|
cd $out
|
||||||
|
chmod -R u+w .
|
||||||
|
rm -rf .git
|
||||||
|
git init
|
||||||
|
git add . -f
|
||||||
|
git config user.email "julia2nix@localhost"
|
||||||
|
git config user.name "julia2nix"
|
||||||
|
git commit -m "Dummy commit"
|
||||||
|
'';
|
||||||
|
|
||||||
|
# Convert an dependency source info into a repo with a single commit
|
||||||
|
repoifyInfo = uuid: info:
|
||||||
|
runCommand ''julia-${info.name}-${info.version}'' {buildInputs = [git];} ''
|
||||||
|
mkdir -p $out
|
||||||
|
cp -r ${info.src}/. $out
|
||||||
|
cd $out
|
||||||
|
chmod -R u+w .
|
||||||
|
rm -rf .git
|
||||||
|
git init
|
||||||
|
git add . -f
|
||||||
|
git config user.email "julia2nix@localhost"
|
||||||
|
git config user.name "julia2nix"
|
||||||
|
git commit -m "Dummy commit"
|
||||||
|
'';
|
||||||
|
}
|
|
@ -16517,17 +16517,17 @@ with pkgs;
|
||||||
|
|
||||||
juniper = callPackage ../development/compilers/juniper { };
|
juniper = callPackage ../development/compilers/juniper { };
|
||||||
|
|
||||||
|
inherit (callPackage ../development/compilers/julia { })
|
||||||
|
julia_16-bin
|
||||||
|
julia_18-bin
|
||||||
|
julia_19-bin
|
||||||
|
julia_18
|
||||||
|
julia_19;
|
||||||
|
|
||||||
julia-lts = julia_16-bin;
|
julia-lts = julia_16-bin;
|
||||||
julia-stable = julia_19;
|
julia-stable = julia_19;
|
||||||
julia = julia-stable;
|
julia = julia-stable;
|
||||||
|
|
||||||
julia_16-bin = callPackage ../development/compilers/julia/1.6-bin.nix { };
|
|
||||||
julia_18-bin = callPackage ../development/compilers/julia/1.8-bin.nix { };
|
|
||||||
julia_19-bin = callPackage ../development/compilers/julia/1.9-bin.nix { };
|
|
||||||
|
|
||||||
julia_18 = callPackage ../development/compilers/julia/1.8.nix { };
|
|
||||||
julia_19 = callPackage ../development/compilers/julia/1.9.nix { };
|
|
||||||
|
|
||||||
julia-lts-bin = julia_16-bin;
|
julia-lts-bin = julia_16-bin;
|
||||||
julia-stable-bin = julia_19-bin;
|
julia-stable-bin = julia_19-bin;
|
||||||
julia-bin = julia-stable-bin;
|
julia-bin = julia-stable-bin;
|
||||||
|
|
Loading…
Reference in a new issue