From 2a8b3e81e9d87db9b265ba4e4318d65ec4a64d2e Mon Sep 17 00:00:00 2001 From: James Ennis Date: Tue, 4 Jun 2019 17:08:22 +0100 Subject: stream.py: Avoid overworking in load_selection where possible load_selection marks elements in the pipeline as required. In doing this elements need to pass through update state at least another 2 times. For commands which do not invoke the scheduler, we should avoid doing this work. --- src/buildstream/_frontend/cli.py | 3 ++- src/buildstream/_stream.py | 15 ++++++++++----- 2 files changed, 12 insertions(+), 6 deletions(-) diff --git a/src/buildstream/_frontend/cli.py b/src/buildstream/_frontend/cli.py index a74cc6c5e..275b2a96d 100644 --- a/src/buildstream/_frontend/cli.py +++ b/src/buildstream/_frontend/cli.py @@ -549,8 +549,9 @@ def shell(app, element, sysroot, mount, isolate, build_, cli_buildtree, command) if not element: raise AppError('Missing argument "ELEMENT".') + no_scheduler = False if cli_buildtree != 'never' else True dependencies = app.stream.load_selection((element,), selection=PipelineSelection.NONE, - use_artifact_config=True) + use_artifact_config=True, no_scheduler=no_scheduler) element = dependencies[0] prompt = app.shell_prompt(element) mounts = [ diff --git a/src/buildstream/_stream.py b/src/buildstream/_stream.py index 243c782d0..bee711813 100644 --- a/src/buildstream/_stream.py +++ b/src/buildstream/_stream.py @@ -356,7 +356,8 @@ class Stream(): except_targets=except_targets, track_except_targets=except_targets, track_cross_junctions=cross_junctions, - fetch_subprojects=True) + fetch_subprojects=True, + no_scheduler=True) track_queue = TrackQueue(self._scheduler) self._add_queue(track_queue, track=True) @@ -491,7 +492,7 @@ class Stream(): tar=False): # We only have one target in a checkout command - elements, _ = self._load((target,), (), fetch_subprojects=True) + elements, _ = self._load((target,), (), fetch_subprojects=True, no_scheduler=True) target = elements[0] self._check_location_writable(location, force=force, tar=tar) @@ -550,7 +551,8 @@ class Stream(): # def artifact_log(self, targets): # Return list of Element and/or ArtifactElement objects - target_objects = self.load_selection(targets, selection=PipelineSelection.NONE, load_refs=True) + target_objects = self.load_selection(targets, selection=PipelineSelection.NONE, + load_refs=True, no_scheduler=True) logsdirs = [] for obj in target_objects: @@ -576,7 +578,8 @@ class Stream(): # def artifact_delete(self, targets, no_prune): # Return list of Element and/or ArtifactElement objects - target_objects = self.load_selection(targets, selection=PipelineSelection.NONE, load_refs=True) + target_objects = self.load_selection(targets, selection=PipelineSelection.NONE, + load_refs=True, no_scheduler=True) # Some of the targets may refer to the same key, so first obtain a # set of the refs to be removed. @@ -627,10 +630,12 @@ class Stream(): self._check_location_writable(location, force=force, tar=tar) + no_scheduler = not fetch elements, _ = self._load((target,), (), selection=deps, except_targets=except_targets, - fetch_subprojects=True) + fetch_subprojects=True, + no_scheduler=no_scheduler) # Assert all sources are cached in the source dir if fetch: -- cgit v1.2.1