summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorJürg Billeter <j@bitron.ch>2019-06-20 13:03:31 +0200
committerbst-marge-bot <marge-bot@buildstream.build>2019-06-25 09:08:17 +0000
commitcb0331a256af8e90e80f6937995fd9acad8725e3 (patch)
tree9cc281e28fec1af1df1169a881d536b8d168fd69
parent722d456799221ab57742b6e199a79d8476c0af98 (diff)
downloadbuildstream-cb0331a256af8e90e80f6937995fd9acad8725e3.tar.gz
Always fetch subprojects as needed
Treat junction element sources the same as sources of any other element and always fetch subprojects as needed. Do not ask the user to manually fetch subprojects.
-rw-r--r--src/buildstream/_exceptions.py3
-rw-r--r--src/buildstream/_includes.py2
-rw-r--r--src/buildstream/_loader/loader.py51
-rw-r--r--src/buildstream/_pipeline.py10
-rw-r--r--src/buildstream/_project.py9
-rw-r--r--src/buildstream/_stream.py24
-rw-r--r--tests/format/junctions.py14
-rw-r--r--tests/frontend/show.py9
8 files changed, 32 insertions, 90 deletions
diff --git a/src/buildstream/_exceptions.py b/src/buildstream/_exceptions.py
index 819f9538c..f57e4b34a 100644
--- a/src/buildstream/_exceptions.py
+++ b/src/buildstream/_exceptions.py
@@ -198,9 +198,6 @@ class LoadErrorReason(Enum):
# Failure to load a project from a specified junction
INVALID_JUNCTION = 13
- # Subproject needs to be fetched
- SUBPROJECT_FETCH_NEEDED = 14
-
# Subproject has no ref
SUBPROJECT_INCONSISTENT = 15
diff --git a/src/buildstream/_includes.py b/src/buildstream/_includes.py
index f792b7716..8f507b566 100644
--- a/src/buildstream/_includes.py
+++ b/src/buildstream/_includes.py
@@ -104,7 +104,7 @@ class Includes:
shortname = include
if ':' in include:
junction, include = include.split(':', 1)
- junction_loader = loader._get_loader(junction, fetch_subprojects=True)
+ junction_loader = loader._get_loader(junction)
current_loader = junction_loader
else:
current_loader = loader
diff --git a/src/buildstream/_loader/loader.py b/src/buildstream/_loader/loader.py
index d52a8a72e..217debf32 100644
--- a/src/buildstream/_loader/loader.py
+++ b/src/buildstream/_loader/loader.py
@@ -85,12 +85,11 @@ class Loader():
# this is a bit more expensive due to deep copies
# ticker (callable): An optional function for tracking load progress
# targets (list of str): Target, element-path relative bst filenames in the project
- # fetch_subprojects (bool): Whether to fetch subprojects while loading
#
# Raises: LoadError
#
# Returns: The toplevel LoadElement
- def load(self, targets, rewritable=False, ticker=None, fetch_subprojects=False):
+ def load(self, targets, rewritable=False, ticker=None):
for filename in targets:
if os.path.isabs(filename):
@@ -109,9 +108,8 @@ class Loader():
for target in targets:
with PROFILER.profile(Topics.LOAD_PROJECT, target):
- _junction, name, loader = self._parse_name(target, rewritable, ticker,
- fetch_subprojects=fetch_subprojects)
- element = loader._load_file(name, rewritable, ticker, fetch_subprojects)
+ _junction, name, loader = self._parse_name(target, rewritable, ticker)
+ element = loader._load_file(name, rewritable, ticker)
target_elements.append(element)
#
@@ -255,13 +253,12 @@ class Loader():
# filename (str): The element-path relative bst file
# rewritable (bool): Whether we should load in round trippable mode
# ticker (callable): A callback to report loaded filenames to the frontend
- # fetch_subprojects (bool): Whether to fetch subprojects while loading
# provenance (Provenance): The location from where the file was referred to, or None
#
# Returns:
# (LoadElement): A loaded LoadElement
#
- def _load_file(self, filename, rewritable, ticker, fetch_subprojects, provenance=None):
+ def _load_file(self, filename, rewritable, ticker, provenance=None):
# Silently ignore already loaded files
if filename in self._elements:
@@ -290,14 +287,12 @@ class Loader():
current_element[2].append(dep.name)
if dep.junction:
- self._load_file(dep.junction, rewritable, ticker,
- fetch_subprojects, dep.provenance)
+ self._load_file(dep.junction, rewritable, ticker, dep.provenance)
loader = self._get_loader(dep.junction,
rewritable=rewritable,
ticker=ticker,
- fetch_subprojects=fetch_subprojects,
provenance=dep.provenance)
- dep_element = loader._load_file(dep.name, rewritable, ticker, fetch_subprojects, dep.provenance)
+ dep_element = loader._load_file(dep.name, rewritable, ticker, dep.provenance)
else:
dep_element = self._elements.get(dep.name)
@@ -553,13 +548,12 @@ class Loader():
#
# Args:
# filename (str): Junction name
- # fetch_subprojects (bool): Whether to fetch subprojects while loading
#
# Raises: LoadError
#
# Returns: A Loader or None if specified junction does not exist
def _get_loader(self, filename, *, rewritable=False, ticker=None, level=0,
- fetch_subprojects=False, provenance=None):
+ provenance=None):
provenance_str = ""
if provenance is not None:
@@ -582,14 +576,13 @@ class Loader():
# junctions in the parent take precedence over junctions defined
# in subprojects
loader = self._parent._get_loader(filename, rewritable=rewritable, ticker=ticker,
- level=level + 1, fetch_subprojects=fetch_subprojects,
- provenance=provenance)
+ level=level + 1, provenance=provenance)
if loader:
self._loaders[filename] = loader
return loader
try:
- self._load_file(filename, rewritable, ticker, fetch_subprojects)
+ self._load_file(filename, rewritable, ticker)
except LoadError as e:
if e.reason != LoadErrorReason.MISSING_FILE:
# other load error
@@ -619,26 +612,18 @@ class Loader():
# find loader for that project.
if element.target:
subproject_loader = self._get_loader(element.target_junction, rewritable=rewritable, ticker=ticker,
- level=level, fetch_subprojects=fetch_subprojects,
- provenance=provenance)
+ level=level, provenance=provenance)
loader = subproject_loader._get_loader(element.target_element, rewritable=rewritable, ticker=ticker,
- level=level, fetch_subprojects=fetch_subprojects,
- provenance=provenance)
+ level=level, provenance=provenance)
self._loaders[filename] = loader
return loader
# Handle the case where a subproject needs to be fetched
#
if element._get_consistency() == Consistency.RESOLVED:
- if fetch_subprojects:
- if ticker:
- ticker(filename, 'Fetching subproject')
- element._fetch()
- else:
- detail = "Try fetching the project with `bst source fetch {}`".format(filename)
- raise LoadError(LoadErrorReason.SUBPROJECT_FETCH_NEEDED,
- "{}Subproject fetch needed for junction: {}".format(provenance_str, filename),
- detail=detail)
+ if ticker:
+ ticker(filename, 'Fetching subproject')
+ element._fetch()
# Handle the case where a subproject has no ref
#
@@ -698,14 +683,13 @@ class Loader():
# rewritable (bool): Whether the loaded files should be rewritable
# this is a bit more expensive due to deep copies
# ticker (callable): An optional function for tracking load progress
- # fetch_subprojects (bool): Whether to fetch subprojects while loading
#
# Returns:
# (tuple): - (str): name of the junction element
# - (str): name of the element
# - (Loader): loader for sub-project
#
- def _parse_name(self, name, rewritable, ticker, fetch_subprojects=False):
+ def _parse_name(self, name, rewritable, ticker):
# We allow to split only once since deep junctions names are forbidden.
# Users who want to refer to elements in sub-sub-projects are required
# to create junctions on the top level project.
@@ -713,9 +697,8 @@ class Loader():
if len(junction_path) == 1:
return None, junction_path[-1], self
else:
- self._load_file(junction_path[-2], rewritable, ticker, fetch_subprojects)
- loader = self._get_loader(junction_path[-2], rewritable=rewritable, ticker=ticker,
- fetch_subprojects=fetch_subprojects)
+ self._load_file(junction_path[-2], rewritable, ticker)
+ loader = self._get_loader(junction_path[-2], rewritable=rewritable, ticker=ticker)
return junction_path[-2], junction_path[-1], loader
# Print a warning message, checks warning_token against project configuration
diff --git a/src/buildstream/_pipeline.py b/src/buildstream/_pipeline.py
index e6ae94cfd..0758cf5ff 100644
--- a/src/buildstream/_pipeline.py
+++ b/src/buildstream/_pipeline.py
@@ -92,25 +92,19 @@ class Pipeline():
#
# Args:
# target_groups (list of lists): Groups of toplevel targets to load
- # fetch_subprojects (bool): Whether we should fetch subprojects as a part of the
- # loading process, if they are not yet locally cached
# rewritable (bool): Whether the loaded files should be rewritable
# this is a bit more expensive due to deep copies
#
# Returns:
# (tuple of lists): A tuple of grouped Element objects corresponding to target_groups
#
- def load(self, target_groups, *,
- fetch_subprojects=True,
- rewritable=False):
+ def load(self, target_groups, *, rewritable=False):
# First concatenate all the lists for the loader's sake
targets = list(itertools.chain(*target_groups))
with PROFILER.profile(Topics.LOAD_PIPELINE, "_".join(t.replace(os.sep, "-") for t in targets)):
- elements = self._project.load_elements(targets,
- rewritable=rewritable,
- fetch_subprojects=fetch_subprojects)
+ elements = self._project.load_elements(targets, rewritable=rewritable)
# Now create element groups to match the input target groups
elt_iter = iter(elements)
diff --git a/src/buildstream/_project.py b/src/buildstream/_project.py
index 1fdc84acb..0787a7bf4 100644
--- a/src/buildstream/_project.py
+++ b/src/buildstream/_project.py
@@ -440,18 +440,13 @@ class Project():
# targets (list): Target names
# rewritable (bool): Whether the loaded files should be rewritable
# this is a bit more expensive due to deep copies
- # fetch_subprojects (bool): Whether we should fetch subprojects as a part of the
- # loading process, if they are not yet locally cached
#
# Returns:
# (list): A list of loaded Element
#
- def load_elements(self, targets, *,
- rewritable=False, fetch_subprojects=False):
+ def load_elements(self, targets, *, rewritable=False):
with self._context.timed_activity("Loading elements", silent_nested=True):
- meta_elements = self.loader.load(targets, rewritable=rewritable,
- ticker=None,
- fetch_subprojects=fetch_subprojects)
+ meta_elements = self.loader.load(targets, rewritable=rewritable, ticker=None)
with self._context.timed_activity("Resolving elements"):
elements = [
diff --git a/src/buildstream/_stream.py b/src/buildstream/_stream.py
index 8097f451d..c5e056280 100644
--- a/src/buildstream/_stream.py
+++ b/src/buildstream/_stream.py
@@ -121,7 +121,6 @@ class Stream():
target_objects, _ = self._load(targets, (),
selection=selection,
except_targets=except_targets,
- fetch_subprojects=False,
use_artifact_config=use_artifact_config,
load_refs=load_refs)
@@ -242,7 +241,6 @@ class Stream():
use_artifact_config=use_config,
artifact_remote_url=remote,
use_source_config=True,
- fetch_subprojects=True,
dynamic_plan=True)
# Remove the tracking elements from the main targets
@@ -323,7 +321,6 @@ class Stream():
except_targets=except_targets,
track_except_targets=track_except_targets,
track_cross_junctions=track_cross_junctions,
- fetch_subprojects=True,
use_source_config=use_source_config,
source_remote_url=remote)
@@ -356,8 +353,7 @@ class Stream():
selection=selection, track_selection=selection,
except_targets=except_targets,
track_except_targets=except_targets,
- track_cross_junctions=cross_junctions,
- fetch_subprojects=True)
+ track_cross_junctions=cross_junctions)
track_queue = TrackQueue(self._scheduler)
self._add_queue(track_queue, track=True)
@@ -390,8 +386,7 @@ class Stream():
selection=selection,
ignore_junction_targets=ignore_junction_targets,
use_artifact_config=use_config,
- artifact_remote_url=remote,
- fetch_subprojects=True)
+ artifact_remote_url=remote)
if not self._artifacts.has_fetch_remotes():
raise StreamError("No artifact caches available for pulling artifacts")
@@ -431,8 +426,7 @@ class Stream():
selection=selection,
ignore_junction_targets=ignore_junction_targets,
use_artifact_config=use_config,
- artifact_remote_url=remote,
- fetch_subprojects=True)
+ artifact_remote_url=remote)
if not self._artifacts.has_push_remotes():
raise StreamError("No artifact caches available for pushing artifacts")
@@ -496,9 +490,7 @@ class Stream():
# if pulling we need to ensure dependency artifacts are also pulled
selection = PipelineSelection.RUN if pull else PipelineSelection.NONE
- elements, _ = self._load(
- (target,), (), selection=selection,
- fetch_subprojects=True, use_artifact_config=True)
+ elements, _ = self._load((target,), (), selection=selection, use_artifact_config=True)
target = elements[-1]
@@ -644,8 +636,7 @@ class Stream():
elements, _ = self._load((target,), (),
selection=deps,
- except_targets=except_targets,
- fetch_subprojects=True)
+ except_targets=except_targets)
# Assert all sources are cached in the source dir
if fetch:
@@ -1039,7 +1030,6 @@ class Stream():
# use_source_config (bool): Whether to initialize remote source caches with the config
# artifact_remote_url (str): A remote url for initializing the artifacts
# source_remote_url (str): A remote url for initializing source caches
- # fetch_subprojects (bool): Whether to fetch subprojects while loading
#
# Returns:
# (list of Element): The primary element selection
@@ -1056,7 +1046,6 @@ class Stream():
use_source_config=False,
artifact_remote_url=None,
source_remote_url=None,
- fetch_subprojects=False,
dynamic_plan=False,
load_refs=False):
@@ -1075,8 +1064,7 @@ class Stream():
# Load all target elements
elements, except_elements, track_elements, track_except_elements = \
self._pipeline.load([target_elements, except_targets, track_targets, track_except_targets],
- rewritable=rewritable,
- fetch_subprojects=fetch_subprojects)
+ rewritable=rewritable)
# Obtain the ArtifactElement objects
artifacts = [self._project.create_artifact_element(ref) for ref in target_artifacts]
diff --git a/tests/format/junctions.py b/tests/format/junctions.py
index a85308e39..8842bc617 100644
--- a/tests/format/junctions.py
+++ b/tests/format/junctions.py
@@ -333,18 +333,8 @@ def test_git_show(cli, tmpdir, datafiles):
}
_yaml.dump(element, os.path.join(project, 'base.bst'))
- # Verify that bst show does not implicitly fetch subproject
- result = cli.run(project=project, args=['show', 'target.bst'])
- result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.SUBPROJECT_FETCH_NEEDED)
-
- # Assert that we have the expected provenance encoded into the error
- assert "target.bst [line 3 column 2]" in result.stderr
-
- # Explicitly fetch subproject
- result = cli.run(project=project, args=['source', 'fetch', 'base.bst'])
- result.assert_success()
-
- # Check that bst show succeeds now and the pipeline includes the subproject element
+ # Check that bst show succeeds with implicit subproject fetching and the
+ # pipeline includes the subproject element
element_list = cli.get_pipeline(project, ['target.bst'])
assert 'base.bst:target.bst' in element_list
diff --git a/tests/frontend/show.py b/tests/frontend/show.py
index 0f6d74c65..4ef97dd84 100644
--- a/tests/frontend/show.py
+++ b/tests/frontend/show.py
@@ -277,15 +277,10 @@ def test_unfetched_junction(cli, tmpdir, datafiles, ref_storage, element_name, w
])
result.assert_success()
- # Assert the correct error when trying to show the pipeline
+ # Assert successful bst show (requires implicit subproject fetching)
result = cli.run(project=project, silent=True, args=[
'show', element_name])
-
- # If a workspace is open, no fetch is needed
- if workspaced:
- result.assert_success()
- else:
- result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.SUBPROJECT_FETCH_NEEDED)
+ result.assert_success()
@pytest.mark.datafiles(os.path.join(DATA_DIR, 'project'))