summaryrefslogtreecommitdiff
path: root/Lib/compileall.py
diff options
context:
space:
mode:
authorBrett Cannon <brett@python.org>2014-09-12 10:39:48 -0400
committerBrett Cannon <brett@python.org>2014-09-12 10:39:48 -0400
commitf1a8df0ac984162795815aae6696155fcd22fdfb (patch)
tree7f2793a0d8e402bc6ed122436af0225e2e8269ce /Lib/compileall.py
parenta56411e5d4c91027c65780c8bdc81dd943cd9c87 (diff)
downloadcpython-git-f1a8df0ac984162795815aae6696155fcd22fdfb.tar.gz
Issue #16104: Allow compileall to do parallel bytecode compilation.
Both compileall.compile_dir() and the CLI for compileall now allow for specifying how many workers to use (or 0 to use all CPUs). Thanks to Claudiu Popa for the patch.
Diffstat (limited to 'Lib/compileall.py')
-rw-r--r--Lib/compileall.py79
1 files changed, 56 insertions, 23 deletions
diff --git a/Lib/compileall.py b/Lib/compileall.py
index 513d899e34..f1c9d27ad6 100644
--- a/Lib/compileall.py
+++ b/Lib/compileall.py
@@ -16,23 +16,15 @@ import importlib.util
import py_compile
import struct
-__all__ = ["compile_dir","compile_file","compile_path"]
+try:
+ from concurrent.futures import ProcessPoolExecutor
+except ImportError:
+ ProcessPoolExecutor = None
+from functools import partial
-def compile_dir(dir, maxlevels=10, ddir=None, force=False, rx=None,
- quiet=False, legacy=False, optimize=-1):
- """Byte-compile all modules in the given directory tree.
-
- Arguments (only dir is required):
+__all__ = ["compile_dir","compile_file","compile_path"]
- dir: the directory to byte-compile
- maxlevels: maximum recursion level (default 10)
- ddir: the directory that will be prepended to the path to the
- file as it is compiled into each byte-code file.
- force: if True, force compilation, even if timestamps are up-to-date
- quiet: if True, be quiet during compilation
- legacy: if True, produce legacy pyc paths instead of PEP 3147 paths
- optimize: optimization level or -1 for level of the interpreter
- """
+def _walk_dir(dir, ddir=None, maxlevels=10, quiet=False):
if not quiet:
print('Listing {!r}...'.format(dir))
try:
@@ -41,7 +33,6 @@ def compile_dir(dir, maxlevels=10, ddir=None, force=False, rx=None,
print("Can't list {!r}".format(dir))
names = []
names.sort()
- success = 1
for name in names:
if name == '__pycache__':
continue
@@ -51,13 +42,50 @@ def compile_dir(dir, maxlevels=10, ddir=None, force=False, rx=None,
else:
dfile = None
if not os.path.isdir(fullname):
- if not compile_file(fullname, ddir, force, rx, quiet,
- legacy, optimize):
- success = 0
+ yield fullname
elif (maxlevels > 0 and name != os.curdir and name != os.pardir and
os.path.isdir(fullname) and not os.path.islink(fullname)):
- if not compile_dir(fullname, maxlevels - 1, dfile, force, rx,
- quiet, legacy, optimize):
+ yield from _walk_dir(fullname, ddir=dfile,
+ maxlevels=maxlevels - 1, quiet=quiet)
+
+def compile_dir(dir, maxlevels=10, ddir=None, force=False, rx=None,
+ quiet=False, legacy=False, optimize=-1, workers=1):
+ """Byte-compile all modules in the given directory tree.
+
+ Arguments (only dir is required):
+
+ dir: the directory to byte-compile
+ maxlevels: maximum recursion level (default 10)
+ ddir: the directory that will be prepended to the path to the
+ file as it is compiled into each byte-code file.
+ force: if True, force compilation, even if timestamps are up-to-date
+ quiet: if True, be quiet during compilation
+ legacy: if True, produce legacy pyc paths instead of PEP 3147 paths
+ optimize: optimization level or -1 for level of the interpreter
+ workers: maximum number of parallel workers
+ """
+ files = _walk_dir(dir, quiet=quiet, maxlevels=maxlevels,
+ ddir=ddir)
+ success = 1
+ if workers is not None and workers != 1:
+ if workers < 0:
+ raise ValueError('workers must be greater or equal to 0')
+ if ProcessPoolExecutor is None:
+ raise NotImplementedError('multiprocessing support not available')
+
+ workers = workers or None
+ with ProcessPoolExecutor(max_workers=workers) as executor:
+ results = executor.map(partial(compile_file,
+ ddir=ddir, force=force,
+ rx=rx, quiet=quiet,
+ legacy=legacy,
+ optimize=optimize),
+ files)
+ success = min(results, default=1)
+ else:
+ for file in files:
+ if not compile_file(file, ddir, force, rx, quiet,
+ legacy, optimize):
success = 0
return success
@@ -196,8 +224,10 @@ def main():
help=('zero or more file and directory names '
'to compile; if no arguments given, defaults '
'to the equivalent of -l sys.path'))
- args = parser.parse_args()
+ parser.add_argument('-j', '--workers', default=1,
+ type=int, help='Run compileall concurrently')
+ args = parser.parse_args()
compile_dests = args.compile_dest
if (args.ddir and (len(compile_dests) != 1
@@ -223,6 +253,9 @@ def main():
print("Error reading file list {}".format(args.flist))
return False
+ if args.workers is not None:
+ args.workers = args.workers or None
+
success = True
try:
if compile_dests:
@@ -234,7 +267,7 @@ def main():
else:
if not compile_dir(dest, maxlevels, args.ddir,
args.force, args.rx, args.quiet,
- args.legacy):
+ args.legacy, workers=args.workers):
success = False
return success
else: