diff options
author | Darius Makovsky <traveltissues@protonmail.com> | 2019-10-09 18:20:58 +0100 |
---|---|---|
committer | bst-marge-bot <marge-bot@buildstream.build> | 2019-10-16 16:22:09 +0000 |
commit | 1eedab4fd1acdd7ec5ed89ea9e735098aacaa801 (patch) | |
tree | 7cf40741677d42d86fa41f089274f875ff3c7240 | |
parent | 4fce5b13f8f6d7eff07df960984779dbd24fab9f (diff) | |
download | buildstream-1eedab4fd1acdd7ec5ed89ea9e735098aacaa801.tar.gz |
Use workspace_close and workspace_open to reset workspaces
* tracking not needed in reset
* support workspace opening for already open workspaces
remove existing files to preserve behaviour
Add ignore_workspaces kwarg to element loading via Stream().load
Setting this to true will ignore special handling of sources for open
workspaces and load the sources specified rather than a workspace
source. This avoids having to reload elements when re-opening
workspaces.
-rw-r--r-- | src/buildstream/_loader/loader.py | 17 | ||||
-rw-r--r-- | src/buildstream/_pipeline.py | 5 | ||||
-rw-r--r-- | src/buildstream/_project.py | 6 | ||||
-rw-r--r-- | src/buildstream/_stream.py | 60 |
4 files changed, 42 insertions, 46 deletions
diff --git a/src/buildstream/_loader/loader.py b/src/buildstream/_loader/loader.py index d6f8ca861..0c0d9d65a 100644 --- a/src/buildstream/_loader/loader.py +++ b/src/buildstream/_loader/loader.py @@ -95,11 +95,12 @@ class Loader(): # ticker (callable): An optional function for tracking load progress # targets (list of str): Target, element-path relative bst filenames in the project # task (Task): A task object to report progress to + # ignore_workspaces (bool): Whether to load workspace sources for open workspaces # # Raises: LoadError # # Returns: The toplevel LoadElement - def load(self, targets, task, rewritable=False, ticker=None): + def load(self, targets, task, rewritable=False, ticker=None, ignore_workspaces=False): for filename in targets: if os.path.isabs(filename): @@ -148,7 +149,7 @@ class Loader(): # Finally, wrap what we have into LoadElements and return the target # - ret.append(loader._collect_element(element, task)) + ret.append(loader._collect_element(element, task, ignore_workspaces=ignore_workspaces)) self._clean_caches() @@ -426,11 +427,12 @@ class Loader(): # Args: # element (LoadElement): The element for which to load a MetaElement # task (Task): A task to write progress information to + # ignore_workspaces (bool): Whether to load workspace sources for open workspaces # # Returns: # (MetaElement): A partially loaded MetaElement # - def _collect_element_no_deps(self, element, task): + def _collect_element_no_deps(self, element, task, ignore_workspaces=False): # Return the already built one, if we already built it meta_element = self._meta_elements.get(element.name) if meta_element: @@ -467,7 +469,7 @@ class Loader(): # metasources. When sources are instantiated, the workspace will own the # element sources. These can then be referred to when resetting or similar operations. workspace = self._context.get_workspaces().get_workspace(element.name) - if workspace: + if workspace and not ignore_workspaces: workspace_node = {'kind': 'workspace'} workspace_node['path'] = workspace.get_absolute_path() workspace_node['ref'] = str(workspace.to_dict().get('last_successful', 'ignored')) @@ -500,13 +502,14 @@ class Loader(): # Args: # top_element (LoadElement): The element for which to load a MetaElement # task (Task): The task to update with progress changes + # ignore_workspaces (bool): Whether to load workspace sources for open workspaces # # Returns: # (MetaElement): A fully loaded MetaElement # - def _collect_element(self, top_element, task): + def _collect_element(self, top_element, task, ignore_workspaces=False): element_queue = [top_element] - meta_element_queue = [self._collect_element_no_deps(top_element, task)] + meta_element_queue = [self._collect_element_no_deps(top_element, task, ignore_workspaces=ignore_workspaces)] while element_queue: element = element_queue.pop() @@ -523,7 +526,7 @@ class Loader(): name = dep.element.name if name not in loader._meta_elements: - meta_dep = loader._collect_element_no_deps(dep.element, task) + meta_dep = loader._collect_element_no_deps(dep.element, task, ignore_workspaces=ignore_workspaces) element_queue.append(dep.element) meta_element_queue.append(meta_dep) else: diff --git a/src/buildstream/_pipeline.py b/src/buildstream/_pipeline.py index b9efc7826..943d65e44 100644 --- a/src/buildstream/_pipeline.py +++ b/src/buildstream/_pipeline.py @@ -94,17 +94,18 @@ class Pipeline(): # target_groups (list of lists): Groups of toplevel targets to load # rewritable (bool): Whether the loaded files should be rewritable # this is a bit more expensive due to deep copies + # ignore_workspaces (bool): Whether to load workspace sources for open workspaces # # Returns: # (tuple of lists): A tuple of grouped Element objects corresponding to target_groups # - def load(self, target_groups, *, rewritable=False): + def load(self, target_groups, *, rewritable=False, ignore_workspaces=False): # First concatenate all the lists for the loader's sake targets = list(itertools.chain(*target_groups)) with PROFILER.profile(Topics.LOAD_PIPELINE, "_".join(t.replace(os.sep, "-") for t in targets)): - elements = self._project.load_elements(targets, rewritable=rewritable) + elements = self._project.load_elements(targets, rewritable=rewritable, ignore_workspaces=ignore_workspaces) # Now create element groups to match the input target groups elt_iter = iter(elements) diff --git a/src/buildstream/_project.py b/src/buildstream/_project.py index 54a011e0d..cf0001e71 100644 --- a/src/buildstream/_project.py +++ b/src/buildstream/_project.py @@ -427,13 +427,15 @@ class Project(): # targets (list): Target names # rewritable (bool): Whether the loaded files should be rewritable # this is a bit more expensive due to deep copies + # ignore_workspaces (bool): Whether to load workspace sources # # Returns: # (list): A list of loaded Element # - def load_elements(self, targets, *, rewritable=False): + def load_elements(self, targets, *, rewritable=False, ignore_workspaces=False): with self._context.messenger.simple_task("Loading elements", silent_nested=True) as task: - meta_elements = self.loader.load(targets, task, rewritable=rewritable, ticker=None) + meta_elements = self.loader.load(targets, task, rewritable=rewritable, ticker=None, + ignore_workspaces=ignore_workspaces) with self._context.messenger.simple_task("Resolving elements") as task: if task: diff --git a/src/buildstream/_stream.py b/src/buildstream/_stream.py index 6e4e5caec..c7ada6eb6 100644 --- a/src/buildstream/_stream.py +++ b/src/buildstream/_stream.py @@ -798,7 +798,8 @@ class Stream(): elements, track_elements = self._load(targets, track_targets, selection=PipelineSelection.REDIRECT, - track_selection=PipelineSelection.REDIRECT) + track_selection=PipelineSelection.REDIRECT, + ignore_workspaces=True) workspaces = self._context.get_workspaces() @@ -825,9 +826,14 @@ class Stream(): # Check for workspace config workspace = workspaces.get_workspace(target._get_full_name()) - if workspace and not force: - raise StreamError("Element '{}' already has workspace defined at: {}" - .format(target.name, workspace.get_absolute_path())) + if workspace: + if not force: + raise StreamError("Element '{}' already has workspace defined at: {}" + .format(target.name, workspace.get_absolute_path())) + if not no_checkout: + target.warn("Replacing existing workspace for element '{}' defined at: {}" + .format(target.name, workspace.get_absolute_path())) + self.workspace_close(target._get_full_name(), remove_dir=not no_checkout) target_consistency = target._get_consistency() if not no_checkout and target_consistency < Consistency.CACHED and \ @@ -863,6 +869,9 @@ class Stream(): if not (no_checkout or force) and os.listdir(directory): raise StreamError("For element '{}', Directory path is not empty: {}" .format(target.name, directory), reason='bad-directory') + if os.listdir(directory): + if force and not no_checkout: + utils._force_rmtree(directory) # So far this function has tried to catch as many issues as possible with out making any changes # Now it does the bits that can not be made atomic. @@ -928,14 +937,9 @@ class Stream(): # def workspace_reset(self, targets, *, soft, track_first): - if track_first: - track_targets = targets - else: - track_targets = () - - elements, track_elements = self._load(targets, track_targets, - selection=PipelineSelection.REDIRECT, - track_selection=PipelineSelection.REDIRECT) + elements, _ = self._load(targets, [], + selection=PipelineSelection.REDIRECT, + track_selection=PipelineSelection.REDIRECT) nonexisting = [] for element in elements: @@ -944,37 +948,21 @@ class Stream(): if nonexisting: raise StreamError("Workspace does not exist", detail="\n".join(nonexisting)) - # Do the tracking first - if track_first: - self._fetch(elements, track_elements=track_elements, fetch_original=True) - workspaces = self._context.get_workspaces() - for element in elements: workspace = workspaces.get_workspace(element._get_full_name()) workspace_path = workspace.get_absolute_path() + if soft: workspace.prepared = False self._message(MessageType.INFO, "Reset workspace state for {} at: {}" .format(element.name, workspace_path)) continue - with element.timed_activity("Removing workspace directory {}" - .format(workspace_path)): - try: - shutil.rmtree(workspace_path) - except OSError as e: - raise StreamError("Could not remove '{}': {}" - .format(workspace_path, e)) from e - - workspaces.delete_workspace(element._get_full_name()) - workspaces.create_workspace(element, workspace_path, checkout=True) - - self._message(MessageType.INFO, - "Reset workspace for {} at: {}".format(element.name, - workspace_path)) - - workspaces.save_config() + self.workspace_close(element._get_full_name(), remove_dir=True) + workspaces.save_config() + self.workspace_open([element._get_full_name()], + no_checkout=False, track_first=track_first, force=True, custom_dir=workspace_path) # workspace_exists # @@ -1182,6 +1170,7 @@ class Stream(): # use_source_config (bool): Whether to initialize remote source caches with the config # artifact_remote_url (str): A remote url for initializing the artifacts # source_remote_url (str): A remote url for initializing source caches + # ignore_workspaces (bool): Whether to load workspace sources for open workspaces # # Returns: # (list of Element): The primary element selection @@ -1199,7 +1188,8 @@ class Stream(): artifact_remote_url=None, source_remote_url=None, dynamic_plan=False, - load_refs=False): + load_refs=False, + ignore_workspaces=False): # Classify element and artifact strings target_elements, target_artifacts = self._classify_artifacts(targets) @@ -1220,7 +1210,7 @@ class Stream(): loadable = [target_elements, except_targets, track_targets, track_except_targets] if any(loadable): elements, except_elements, track_elements, track_except_elements = \ - self._pipeline.load(loadable, rewritable=rewritable) + self._pipeline.load(loadable, rewritable=rewritable, ignore_workspaces=ignore_workspaces) else: elements, except_elements, track_elements, track_except_elements = [], [], [], [] |