summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorDarius Makovsky <traveltissues@protonmail.com>2019-11-06 09:16:18 +0000
committerDarius Makovsky <traveltissues@protonmail.com>2019-11-06 09:16:21 +0000
commit2245af28b79cf978d7eb19fdb06273a4515c62d6 (patch)
tree569e5e1baada26a6d7efc4cdaa0a45ad36bcd9b5
parent602cf41d04f79dfa2dcae72e690cad3168b7321d (diff)
downloadbuildstream-2245af28b79cf978d7eb19fdb06273a4515c62d6.tar.gz
Remove unnecessary ignore_workspaces kwarg
Attempting to open a workspace for the same element without closing it now raises. This makes this kwarg unnecessary and tests should close workspaces between attempts to open.
-rw-r--r--src/buildstream/_loader/loader.py17
-rw-r--r--src/buildstream/_pipeline.py5
-rw-r--r--src/buildstream/_project.py6
-rw-r--r--src/buildstream/_stream.py9
-rw-r--r--tests/frontend/workspace.py18
5 files changed, 30 insertions, 25 deletions
diff --git a/src/buildstream/_loader/loader.py b/src/buildstream/_loader/loader.py
index cceda284c..e5859e9e8 100644
--- a/src/buildstream/_loader/loader.py
+++ b/src/buildstream/_loader/loader.py
@@ -95,12 +95,11 @@ class Loader():
# ticker (callable): An optional function for tracking load progress
# targets (list of str): Target, element-path relative bst filenames in the project
# task (Task): A task object to report progress to
- # ignore_workspaces (bool): Whether to load workspace sources for open workspaces
#
# Raises: LoadError
#
# Returns: The toplevel LoadElement
- def load(self, targets, task, rewritable=False, ticker=None, ignore_workspaces=False):
+ def load(self, targets, task, rewritable=False, ticker=None):
for filename in targets:
if os.path.isabs(filename):
@@ -149,7 +148,7 @@ class Loader():
# Finally, wrap what we have into LoadElements and return the target
#
- ret.append(loader._collect_element(element, task, ignore_workspaces=ignore_workspaces))
+ ret.append(loader._collect_element(element, task))
self._clean_caches()
@@ -427,12 +426,11 @@ class Loader():
# Args:
# element (LoadElement): The element for which to load a MetaElement
# task (Task): A task to write progress information to
- # ignore_workspaces (bool): Whether to load workspace sources for open workspaces
#
# Returns:
# (MetaElement): A partially loaded MetaElement
#
- def _collect_element_no_deps(self, element, task, ignore_workspaces=False):
+ def _collect_element_no_deps(self, element, task):
# Return the already built one, if we already built it
meta_element = self._meta_elements.get(element.name)
if meta_element:
@@ -448,7 +446,7 @@ class Loader():
# metasource.
workspace = self._context.get_workspaces().get_workspace(element.name)
skip_workspace = True
- if workspace and not ignore_workspaces:
+ if workspace:
workspace_node = {'kind': 'workspace'}
workspace_node['path'] = workspace.get_absolute_path()
workspace_node['ref'] = str(workspace.to_dict().get('last_successful', 'ignored'))
@@ -495,14 +493,13 @@ class Loader():
# Args:
# top_element (LoadElement): The element for which to load a MetaElement
# task (Task): The task to update with progress changes
- # ignore_workspaces (bool): Whether to load workspace sources for open workspaces
#
# Returns:
# (MetaElement): A fully loaded MetaElement
#
- def _collect_element(self, top_element, task, ignore_workspaces=False):
+ def _collect_element(self, top_element, task):
element_queue = [top_element]
- meta_element_queue = [self._collect_element_no_deps(top_element, task, ignore_workspaces=ignore_workspaces)]
+ meta_element_queue = [self._collect_element_no_deps(top_element, task)]
while element_queue:
element = element_queue.pop()
@@ -519,7 +516,7 @@ class Loader():
name = dep.element.name
if name not in loader._meta_elements:
- meta_dep = loader._collect_element_no_deps(dep.element, task, ignore_workspaces=ignore_workspaces)
+ meta_dep = loader._collect_element_no_deps(dep.element, task)
element_queue.append(dep.element)
meta_element_queue.append(meta_dep)
else:
diff --git a/src/buildstream/_pipeline.py b/src/buildstream/_pipeline.py
index 943d65e44..b9efc7826 100644
--- a/src/buildstream/_pipeline.py
+++ b/src/buildstream/_pipeline.py
@@ -94,18 +94,17 @@ class Pipeline():
# target_groups (list of lists): Groups of toplevel targets to load
# rewritable (bool): Whether the loaded files should be rewritable
# this is a bit more expensive due to deep copies
- # ignore_workspaces (bool): Whether to load workspace sources for open workspaces
#
# Returns:
# (tuple of lists): A tuple of grouped Element objects corresponding to target_groups
#
- def load(self, target_groups, *, rewritable=False, ignore_workspaces=False):
+ def load(self, target_groups, *, rewritable=False):
# First concatenate all the lists for the loader's sake
targets = list(itertools.chain(*target_groups))
with PROFILER.profile(Topics.LOAD_PIPELINE, "_".join(t.replace(os.sep, "-") for t in targets)):
- elements = self._project.load_elements(targets, rewritable=rewritable, ignore_workspaces=ignore_workspaces)
+ elements = self._project.load_elements(targets, rewritable=rewritable)
# Now create element groups to match the input target groups
elt_iter = iter(elements)
diff --git a/src/buildstream/_project.py b/src/buildstream/_project.py
index cf0001e71..54a011e0d 100644
--- a/src/buildstream/_project.py
+++ b/src/buildstream/_project.py
@@ -427,15 +427,13 @@ class Project():
# targets (list): Target names
# rewritable (bool): Whether the loaded files should be rewritable
# this is a bit more expensive due to deep copies
- # ignore_workspaces (bool): Whether to load workspace sources
#
# Returns:
# (list): A list of loaded Element
#
- def load_elements(self, targets, *, rewritable=False, ignore_workspaces=False):
+ def load_elements(self, targets, *, rewritable=False):
with self._context.messenger.simple_task("Loading elements", silent_nested=True) as task:
- meta_elements = self.loader.load(targets, task, rewritable=rewritable, ticker=None,
- ignore_workspaces=ignore_workspaces)
+ meta_elements = self.loader.load(targets, task, rewritable=rewritable, ticker=None)
with self._context.messenger.simple_task("Resolving elements") as task:
if task:
diff --git a/src/buildstream/_stream.py b/src/buildstream/_stream.py
index 4824c7d81..b284235bb 100644
--- a/src/buildstream/_stream.py
+++ b/src/buildstream/_stream.py
@@ -786,8 +786,7 @@ class Stream():
elements, track_elements = self._load(targets, track_targets,
selection=PipelineSelection.REDIRECT,
- track_selection=PipelineSelection.REDIRECT,
- ignore_workspaces=True)
+ track_selection=PipelineSelection.REDIRECT)
workspaces = self._context.get_workspaces()
@@ -1158,7 +1157,6 @@ class Stream():
# use_source_config (bool): Whether to initialize remote source caches with the config
# artifact_remote_url (str): A remote url for initializing the artifacts
# source_remote_url (str): A remote url for initializing source caches
- # ignore_workspaces (bool): Whether to load workspace sources for open workspaces
#
# Returns:
# (list of Element): The primary element selection
@@ -1176,8 +1174,7 @@ class Stream():
artifact_remote_url=None,
source_remote_url=None,
dynamic_plan=False,
- load_refs=False,
- ignore_workspaces=False):
+ load_refs=False):
# Classify element and artifact strings
target_elements, target_artifacts = self._classify_artifacts(targets)
@@ -1198,7 +1195,7 @@ class Stream():
loadable = [target_elements, except_targets, track_targets, track_except_targets]
if any(loadable):
elements, except_elements, track_elements, track_except_elements = \
- self._pipeline.load(loadable, rewritable=rewritable, ignore_workspaces=ignore_workspaces)
+ self._pipeline.load(loadable, rewritable=rewritable)
else:
elements, except_elements, track_elements, track_except_elements = [], [], [], []
diff --git a/tests/frontend/workspace.py b/tests/frontend/workspace.py
index 43cd6f381..0c7e63525 100644
--- a/tests/frontend/workspace.py
+++ b/tests/frontend/workspace.py
@@ -315,6 +315,15 @@ def test_open_track(cli, tmpdir, datafiles):
@pytest.mark.datafiles(DATA_DIR)
+def test_open_noclose_open(cli, tmpdir, datafiles):
+ # opening the same workspace twice without closing it should fail
+ element_name, project, _ = open_workspace(cli, tmpdir, datafiles, 'git')
+
+ result = cli.run(project=project, args=['workspace', 'open', element_name])
+ result.assert_main_error(ErrorDomain.STREAM, None)
+
+
+@pytest.mark.datafiles(DATA_DIR)
def test_open_force(cli, tmpdir, datafiles):
element_name, project, workspace = open_workspace(cli, tmpdir, datafiles, 'git')
@@ -338,6 +347,9 @@ def test_open_force(cli, tmpdir, datafiles):
def test_open_force_open(cli, tmpdir, datafiles):
element_name, project, workspace = open_workspace(cli, tmpdir, datafiles, 'git')
+ result = cli.run(project=project, args=['workspace', 'close', element_name])
+ result.assert_success()
+
# Assert the workspace dir exists
assert os.path.exists(workspace)
@@ -396,18 +408,20 @@ def test_open_force_different_workspace(cli, tmpdir, datafiles):
# Assert that workspace 2 contains the unmodified file
assert os.path.exists(os.path.join(workspace2, 'usr', 'bin', 'hello'))
+ result = cli.run(project=project, args=['workspace', 'close', element_name2])
+ result.assert_success()
+
# Now open the workspace again with --force, this should happily succeed
result = cli.run(project=project, args=[
'workspace', 'open', '--force', '--directory', workspace, element_name2
])
+ result.assert_success()
# Assert that the file in workspace 1 has been replaced
# With the file from workspace 2
assert os.path.exists(hello_path)
assert not os.path.exists(hello1_path)
- result.assert_success()
-
@pytest.mark.datafiles(DATA_DIR)
def test_close(cli, tmpdir, datafiles):