1
0
Fork 1
mirror of https://github.com/NixOS/nixpkgs.git synced 2024-11-22 21:50:55 +00:00

sage: fix docbuild

This is only a hotfix without really knowing the issue. See
https://groups.google.com/forum/#!msg/sage-packaging/VU4h8IWGFLA/mrmCMocYBwAJ
This commit is contained in:
Timo Kaufmann 2018-10-21 14:16:06 +02:00
parent 1a26028ed8
commit 38c2be5ffe
2 changed files with 76 additions and 0 deletions

View file

@ -0,0 +1,71 @@
commit f1c59929c3c180ac283334c2b3c901ac8c82f6b1
Author: Timo Kaufmann <timokau@zoho.com>
Date: Sat Oct 20 20:07:41 2018 +0200
Revert "Something related to the sphinxbuild seems to be leaking memory"
This reverts commit 7d85dc796c58c3de57401bc22d3587b94e205091.
diff --git a/src/sage_setup/docbuild/__init__.py b/src/sage_setup/docbuild/__init__.py
index 0b24b1a60b..084c3f89d7 100644
--- a/src/sage_setup/docbuild/__init__.py
+++ b/src/sage_setup/docbuild/__init__.py
@@ -265,29 +265,35 @@ class DocBuilder(object):
# import the customized builder for object.inv files
inventory = builder_helper('inventory')
-def build_many(target, args):
- # Pool() uses an actual fork() to run each new instance. This is important
- # for performance reasons, i.e., don't use a forkserver when it becomes
- # available with Python 3: Here, sage is already initialized which is quite
- # costly, with a forkserver we would have to reinitialize it for every
- # document we build. At the same time, don't serialize this by taking the
- # pool (and thus the call to fork()) out completely: The call to Sphinx
- # leaks memory, so we need to build each document in its own process to
- # control the RAM usage.
- from multiprocessing import Pool
- pool = Pool(NUM_THREADS, maxtasksperchild=1)
- # map_async handles KeyboardInterrupt correctly. Plain map and
- # apply_async does not, so don't use it.
- x = pool.map_async(target, args, 1)
- try:
- ret = x.get(99999)
- pool.close()
- pool.join()
- except Exception:
- pool.terminate()
- if ABORT_ON_ERROR:
- raise
- return ret
+if NUM_THREADS > 1:
+ def build_many(target, args):
+ from multiprocessing import Pool
+ pool = Pool(NUM_THREADS, maxtasksperchild=1)
+ # map_async handles KeyboardInterrupt correctly. Plain map and
+ # apply_async does not, so don't use it.
+ x = pool.map_async(target, args, 1)
+ try:
+ ret = x.get(99999)
+ pool.close()
+ pool.join()
+ except Exception:
+ pool.terminate()
+ if ABORT_ON_ERROR:
+ raise
+ return ret
+else:
+ def build_many(target, args):
+ results = []
+
+ for arg in args:
+ try:
+ results.append(target(arg))
+ except Exception:
+ if ABORT_ON_ERROR:
+ raise
+
+ return results
+
##########################################
# Parallel Building Ref Manual #

View file

@ -27,6 +27,11 @@ stdenv.mkDerivation rec {
# https://trac.sagemath.org/ticket/25316
# https://github.com/python/cpython/pull/7476
./patches/python-5755-hotpatch.patch
# Revert the commit that made the sphinx build fork even in the single thread
# case. For some yet unknown reason, that breaks the docbuild on nix and archlinux.
# See https://groups.google.com/forum/#!msg/sage-packaging/VU4h8IWGFLA/mrmCMocYBwAJ.
./patches/revert-sphinx-always-fork.patch
];
packageUpgradePatches = [