3
0
Fork 0
forked from mirrors/nixpkgs
nixpkgs/pkgs/applications/science/math/sage/patches/revert-sphinx-always-fork.patch
Timo Kaufmann 38c2be5ffe sage: fix docbuild
This is only a hotfix without really knowing the issue. See
https://groups.google.com/forum/#!msg/sage-packaging/VU4h8IWGFLA/mrmCMocYBwAJ
2018-10-21 14:16:06 +02:00

72 lines
2.5 KiB
Diff

commit f1c59929c3c180ac283334c2b3c901ac8c82f6b1
Author: Timo Kaufmann <timokau@zoho.com>
Date: Sat Oct 20 20:07:41 2018 +0200
Revert "Something related to the sphinxbuild seems to be leaking memory"
This reverts commit 7d85dc796c58c3de57401bc22d3587b94e205091.
diff --git a/src/sage_setup/docbuild/__init__.py b/src/sage_setup/docbuild/__init__.py
index 0b24b1a60b..084c3f89d7 100644
--- a/src/sage_setup/docbuild/__init__.py
+++ b/src/sage_setup/docbuild/__init__.py
@@ -265,29 +265,35 @@ class DocBuilder(object):
# import the customized builder for object.inv files
inventory = builder_helper('inventory')
-def build_many(target, args):
- # Pool() uses an actual fork() to run each new instance. This is important
- # for performance reasons, i.e., don't use a forkserver when it becomes
- # available with Python 3: Here, sage is already initialized which is quite
- # costly, with a forkserver we would have to reinitialize it for every
- # document we build. At the same time, don't serialize this by taking the
- # pool (and thus the call to fork()) out completely: The call to Sphinx
- # leaks memory, so we need to build each document in its own process to
- # control the RAM usage.
- from multiprocessing import Pool
- pool = Pool(NUM_THREADS, maxtasksperchild=1)
- # map_async handles KeyboardInterrupt correctly. Plain map and
- # apply_async does not, so don't use it.
- x = pool.map_async(target, args, 1)
- try:
- ret = x.get(99999)
- pool.close()
- pool.join()
- except Exception:
- pool.terminate()
- if ABORT_ON_ERROR:
- raise
- return ret
+if NUM_THREADS > 1:
+ def build_many(target, args):
+ from multiprocessing import Pool
+ pool = Pool(NUM_THREADS, maxtasksperchild=1)
+ # map_async handles KeyboardInterrupt correctly. Plain map and
+ # apply_async does not, so don't use it.
+ x = pool.map_async(target, args, 1)
+ try:
+ ret = x.get(99999)
+ pool.close()
+ pool.join()
+ except Exception:
+ pool.terminate()
+ if ABORT_ON_ERROR:
+ raise
+ return ret
+else:
+ def build_many(target, args):
+ results = []
+
+ for arg in args:
+ try:
+ results.append(target(arg))
+ except Exception:
+ if ABORT_ON_ERROR:
+ raise
+
+ return results
+
##########################################
# Parallel Building Ref Manual #