summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorbst-marge-bot <marge-bot@buildstream.build>2019-10-16 17:30:06 +0000
committerbst-marge-bot <marge-bot@buildstream.build>2019-10-16 17:30:06 +0000
commit9147994107c27e7760a575b9a8b2f2cc3ce46b98 (patch)
treef4623530d314a9e20143256f6281b665b32fa6d5
parentf6e24e258e07403bf635710e17632beec2709b10 (diff)
parent84c1fbbd71a96f6641c006734b190ba6d5193fc3 (diff)
downloadbuildstream-9147994107c27e7760a575b9a8b2f2cc3ce46b98.tar.gz
Merge branch 'traveltissues/1140-softreset' into 'master'
Workspaces via sourcecache MR 2 Closes #1140 See merge request BuildStream/buildstream!1640
-rw-r--r--src/buildstream/_basecache.py4
-rw-r--r--src/buildstream/_loader/loader.py17
-rw-r--r--src/buildstream/_pipeline.py5
-rw-r--r--src/buildstream/_project.py6
-rw-r--r--src/buildstream/_stream.py60
-rw-r--r--src/buildstream/plugins/sources/workspace.py11
-rw-r--r--tests/frontend/workspace.py52
7 files changed, 101 insertions, 54 deletions
diff --git a/src/buildstream/_basecache.py b/src/buildstream/_basecache.py
index c2772d02c..fc2e92456 100644
--- a/src/buildstream/_basecache.py
+++ b/src/buildstream/_basecache.py
@@ -161,7 +161,9 @@ class BaseCache():
def setup_remotes(self, *, use_config=False, remote_url=None):
# Ensure we do not double-initialise since this can be expensive
- assert not self._remotes_setup
+ if self._remotes_setup:
+ return
+
self._remotes_setup = True
# Initialize remote caches. We allow the commandline to override
diff --git a/src/buildstream/_loader/loader.py b/src/buildstream/_loader/loader.py
index d6f8ca861..0c0d9d65a 100644
--- a/src/buildstream/_loader/loader.py
+++ b/src/buildstream/_loader/loader.py
@@ -95,11 +95,12 @@ class Loader():
# ticker (callable): An optional function for tracking load progress
# targets (list of str): Target, element-path relative bst filenames in the project
# task (Task): A task object to report progress to
+ # ignore_workspaces (bool): Whether to load workspace sources for open workspaces
#
# Raises: LoadError
#
# Returns: The toplevel LoadElement
- def load(self, targets, task, rewritable=False, ticker=None):
+ def load(self, targets, task, rewritable=False, ticker=None, ignore_workspaces=False):
for filename in targets:
if os.path.isabs(filename):
@@ -148,7 +149,7 @@ class Loader():
# Finally, wrap what we have into LoadElements and return the target
#
- ret.append(loader._collect_element(element, task))
+ ret.append(loader._collect_element(element, task, ignore_workspaces=ignore_workspaces))
self._clean_caches()
@@ -426,11 +427,12 @@ class Loader():
# Args:
# element (LoadElement): The element for which to load a MetaElement
# task (Task): A task to write progress information to
+ # ignore_workspaces (bool): Whether to load workspace sources for open workspaces
#
# Returns:
# (MetaElement): A partially loaded MetaElement
#
- def _collect_element_no_deps(self, element, task):
+ def _collect_element_no_deps(self, element, task, ignore_workspaces=False):
# Return the already built one, if we already built it
meta_element = self._meta_elements.get(element.name)
if meta_element:
@@ -467,7 +469,7 @@ class Loader():
# metasources. When sources are instantiated, the workspace will own the
# element sources. These can then be referred to when resetting or similar operations.
workspace = self._context.get_workspaces().get_workspace(element.name)
- if workspace:
+ if workspace and not ignore_workspaces:
workspace_node = {'kind': 'workspace'}
workspace_node['path'] = workspace.get_absolute_path()
workspace_node['ref'] = str(workspace.to_dict().get('last_successful', 'ignored'))
@@ -500,13 +502,14 @@ class Loader():
# Args:
# top_element (LoadElement): The element for which to load a MetaElement
# task (Task): The task to update with progress changes
+ # ignore_workspaces (bool): Whether to load workspace sources for open workspaces
#
# Returns:
# (MetaElement): A fully loaded MetaElement
#
- def _collect_element(self, top_element, task):
+ def _collect_element(self, top_element, task, ignore_workspaces=False):
element_queue = [top_element]
- meta_element_queue = [self._collect_element_no_deps(top_element, task)]
+ meta_element_queue = [self._collect_element_no_deps(top_element, task, ignore_workspaces=ignore_workspaces)]
while element_queue:
element = element_queue.pop()
@@ -523,7 +526,7 @@ class Loader():
name = dep.element.name
if name not in loader._meta_elements:
- meta_dep = loader._collect_element_no_deps(dep.element, task)
+ meta_dep = loader._collect_element_no_deps(dep.element, task, ignore_workspaces=ignore_workspaces)
element_queue.append(dep.element)
meta_element_queue.append(meta_dep)
else:
diff --git a/src/buildstream/_pipeline.py b/src/buildstream/_pipeline.py
index b9efc7826..943d65e44 100644
--- a/src/buildstream/_pipeline.py
+++ b/src/buildstream/_pipeline.py
@@ -94,17 +94,18 @@ class Pipeline():
# target_groups (list of lists): Groups of toplevel targets to load
# rewritable (bool): Whether the loaded files should be rewritable
# this is a bit more expensive due to deep copies
+ # ignore_workspaces (bool): Whether to load workspace sources for open workspaces
#
# Returns:
# (tuple of lists): A tuple of grouped Element objects corresponding to target_groups
#
- def load(self, target_groups, *, rewritable=False):
+ def load(self, target_groups, *, rewritable=False, ignore_workspaces=False):
# First concatenate all the lists for the loader's sake
targets = list(itertools.chain(*target_groups))
with PROFILER.profile(Topics.LOAD_PIPELINE, "_".join(t.replace(os.sep, "-") for t in targets)):
- elements = self._project.load_elements(targets, rewritable=rewritable)
+ elements = self._project.load_elements(targets, rewritable=rewritable, ignore_workspaces=ignore_workspaces)
# Now create element groups to match the input target groups
elt_iter = iter(elements)
diff --git a/src/buildstream/_project.py b/src/buildstream/_project.py
index 54a011e0d..cf0001e71 100644
--- a/src/buildstream/_project.py
+++ b/src/buildstream/_project.py
@@ -427,13 +427,15 @@ class Project():
# targets (list): Target names
# rewritable (bool): Whether the loaded files should be rewritable
# this is a bit more expensive due to deep copies
+ # ignore_workspaces (bool): Whether to load workspace sources
#
# Returns:
# (list): A list of loaded Element
#
- def load_elements(self, targets, *, rewritable=False):
+ def load_elements(self, targets, *, rewritable=False, ignore_workspaces=False):
with self._context.messenger.simple_task("Loading elements", silent_nested=True) as task:
- meta_elements = self.loader.load(targets, task, rewritable=rewritable, ticker=None)
+ meta_elements = self.loader.load(targets, task, rewritable=rewritable, ticker=None,
+ ignore_workspaces=ignore_workspaces)
with self._context.messenger.simple_task("Resolving elements") as task:
if task:
diff --git a/src/buildstream/_stream.py b/src/buildstream/_stream.py
index 6e4e5caec..c7ada6eb6 100644
--- a/src/buildstream/_stream.py
+++ b/src/buildstream/_stream.py
@@ -798,7 +798,8 @@ class Stream():
elements, track_elements = self._load(targets, track_targets,
selection=PipelineSelection.REDIRECT,
- track_selection=PipelineSelection.REDIRECT)
+ track_selection=PipelineSelection.REDIRECT,
+ ignore_workspaces=True)
workspaces = self._context.get_workspaces()
@@ -825,9 +826,14 @@ class Stream():
# Check for workspace config
workspace = workspaces.get_workspace(target._get_full_name())
- if workspace and not force:
- raise StreamError("Element '{}' already has workspace defined at: {}"
- .format(target.name, workspace.get_absolute_path()))
+ if workspace:
+ if not force:
+ raise StreamError("Element '{}' already has workspace defined at: {}"
+ .format(target.name, workspace.get_absolute_path()))
+ if not no_checkout:
+ target.warn("Replacing existing workspace for element '{}' defined at: {}"
+ .format(target.name, workspace.get_absolute_path()))
+ self.workspace_close(target._get_full_name(), remove_dir=not no_checkout)
target_consistency = target._get_consistency()
if not no_checkout and target_consistency < Consistency.CACHED and \
@@ -863,6 +869,9 @@ class Stream():
if not (no_checkout or force) and os.listdir(directory):
raise StreamError("For element '{}', Directory path is not empty: {}"
.format(target.name, directory), reason='bad-directory')
+ if os.listdir(directory):
+ if force and not no_checkout:
+ utils._force_rmtree(directory)
# So far this function has tried to catch as many issues as possible with out making any changes
# Now it does the bits that can not be made atomic.
@@ -928,14 +937,9 @@ class Stream():
#
def workspace_reset(self, targets, *, soft, track_first):
- if track_first:
- track_targets = targets
- else:
- track_targets = ()
-
- elements, track_elements = self._load(targets, track_targets,
- selection=PipelineSelection.REDIRECT,
- track_selection=PipelineSelection.REDIRECT)
+ elements, _ = self._load(targets, [],
+ selection=PipelineSelection.REDIRECT,
+ track_selection=PipelineSelection.REDIRECT)
nonexisting = []
for element in elements:
@@ -944,37 +948,21 @@ class Stream():
if nonexisting:
raise StreamError("Workspace does not exist", detail="\n".join(nonexisting))
- # Do the tracking first
- if track_first:
- self._fetch(elements, track_elements=track_elements, fetch_original=True)
-
workspaces = self._context.get_workspaces()
-
for element in elements:
workspace = workspaces.get_workspace(element._get_full_name())
workspace_path = workspace.get_absolute_path()
+
if soft:
workspace.prepared = False
self._message(MessageType.INFO, "Reset workspace state for {} at: {}"
.format(element.name, workspace_path))
continue
- with element.timed_activity("Removing workspace directory {}"
- .format(workspace_path)):
- try:
- shutil.rmtree(workspace_path)
- except OSError as e:
- raise StreamError("Could not remove '{}': {}"
- .format(workspace_path, e)) from e
-
- workspaces.delete_workspace(element._get_full_name())
- workspaces.create_workspace(element, workspace_path, checkout=True)
-
- self._message(MessageType.INFO,
- "Reset workspace for {} at: {}".format(element.name,
- workspace_path))
-
- workspaces.save_config()
+ self.workspace_close(element._get_full_name(), remove_dir=True)
+ workspaces.save_config()
+ self.workspace_open([element._get_full_name()],
+ no_checkout=False, track_first=track_first, force=True, custom_dir=workspace_path)
# workspace_exists
#
@@ -1182,6 +1170,7 @@ class Stream():
# use_source_config (bool): Whether to initialize remote source caches with the config
# artifact_remote_url (str): A remote url for initializing the artifacts
# source_remote_url (str): A remote url for initializing source caches
+ # ignore_workspaces (bool): Whether to load workspace sources for open workspaces
#
# Returns:
# (list of Element): The primary element selection
@@ -1199,7 +1188,8 @@ class Stream():
artifact_remote_url=None,
source_remote_url=None,
dynamic_plan=False,
- load_refs=False):
+ load_refs=False,
+ ignore_workspaces=False):
# Classify element and artifact strings
target_elements, target_artifacts = self._classify_artifacts(targets)
@@ -1220,7 +1210,7 @@ class Stream():
loadable = [target_elements, except_targets, track_targets, track_except_targets]
if any(loadable):
elements, except_elements, track_elements, track_except_elements = \
- self._pipeline.load(loadable, rewritable=rewritable)
+ self._pipeline.load(loadable, rewritable=rewritable, ignore_workspaces=ignore_workspaces)
else:
elements, except_elements, track_elements, track_except_elements = [], [], [], []
diff --git a/src/buildstream/plugins/sources/workspace.py b/src/buildstream/plugins/sources/workspace.py
index 8dbcc6218..9cc524f89 100644
--- a/src/buildstream/plugins/sources/workspace.py
+++ b/src/buildstream/plugins/sources/workspace.py
@@ -105,14 +105,11 @@ class WorkspaceSource(Source):
self.__digest = self.__cas_dir._get_digest().hash
return (self.path, self.__digest)
+ # init_workspace()
+ #
+ # Raises AssertionError: existing workspaces should not be reinitialized
def init_workspace(self, directory: Directory) -> None:
- # for each source held by the workspace we must call init_workspace
- # those sources may override `init_workspace` expecting str or Directory
- # and this will need to be extracted from the directory passed to this method
- assert isinstance(directory, Directory)
- directory = directory.external_directory
- for source in self.get_element_sources():
- source._init_workspace(directory)
+ raise AssertionError('Attempting to re-open an existing workspace')
def get_consistency(self):
# always return cached state
diff --git a/tests/frontend/workspace.py b/tests/frontend/workspace.py
index dc9f38540..b53a62da8 100644
--- a/tests/frontend/workspace.py
+++ b/tests/frontend/workspace.py
@@ -570,6 +570,58 @@ def test_reset(cli, tmpdir, datafiles):
@pytest.mark.datafiles(DATA_DIR)
+def test_reset_soft(cli, tmpdir, datafiles):
+ # Open the workspace
+ element_name, project, workspace = open_workspace(cli, tmpdir, datafiles, 'git', False)
+
+ assert cli.get_element_state(project, element_name) == 'buildable'
+
+ hello_path = os.path.join(workspace, 'usr', 'bin', 'hello')
+ pony_path = os.path.join(workspace, 'etc', 'pony.conf')
+
+ assert os.path.exists(os.path.join(workspace, 'usr', 'bin'))
+ assert os.path.exists(hello_path)
+ assert not os.path.exists(pony_path)
+
+ key_1 = cli.get_element_key(project, element_name)
+ assert key_1 != "{:?<64}".format('')
+ result = cli.run(project=project, args=['build', element_name])
+ result.assert_success()
+ assert cli.get_element_state(project, element_name) == 'cached'
+ key_2 = cli.get_element_key(project, element_name)
+ assert key_2 != "{:?<64}".format('')
+
+ # workspace keys are not recalculated
+ assert key_1 == key_2
+
+ wait_for_cache_granularity()
+
+ # Modify workspace
+ shutil.rmtree(os.path.join(workspace, 'usr', 'bin'))
+ os.makedirs(os.path.join(workspace, 'etc'))
+ with open(os.path.join(workspace, 'etc', 'pony.conf'), 'w') as f:
+ f.write("PONY='pink'")
+
+ assert not os.path.exists(os.path.join(workspace, 'usr', 'bin'))
+ assert os.path.exists(pony_path)
+
+ # Now soft-reset the open workspace, this should not revert the changes
+ result = cli.run(project=project, args=[
+ 'workspace', 'reset', '--soft', element_name
+ ])
+ result.assert_success()
+ # we removed this dir
+ assert not os.path.exists(os.path.join(workspace, 'usr', 'bin'))
+ # and added this one
+ assert os.path.exists(os.path.join(workspace, 'etc', 'pony.conf'))
+
+ assert cli.get_element_state(project, element_name) == 'buildable'
+ key_3 = cli.get_element_key(project, element_name)
+ assert key_3 != "{:?<64}".format('')
+ assert key_1 != key_3
+
+
+@pytest.mark.datafiles(DATA_DIR)
def test_reset_multiple(cli, tmpdir, datafiles):
# Open the workspaces
tmpdir_alpha = os.path.join(str(tmpdir), 'alpha')