summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorValentin David <valentin.david@gmail.com>2018-08-02 15:17:03 +0000
committerValentin David <valentin.david@gmail.com>2018-08-02 15:17:03 +0000
commit4e1160a93282e290ac1581a63b5ee8a6f4f3f4b4 (patch)
treea8e2ca4f2a7e2bebde42cb6ec2ba5f6ce32e26a6
parent8656a65d21bd002b1fd490481e736f1748e8db86 (diff)
parent171e803f5dab2644c7bcd2e22acecef64880e1ce (diff)
downloadbuildstream-4e1160a93282e290ac1581a63b5ee8a6f4f3f4b4.tar.gz
Merge branch 'valentindavid/331_include' into 'master'
Add support for include in project.conf Closes #331 See merge request BuildStream/buildstream!471
-rw-r--r--buildstream/_elementfactory.py13
-rw-r--r--buildstream/_exceptions.py3
-rw-r--r--buildstream/_frontend/app.py27
-rw-r--r--buildstream/_frontend/widget.py7
-rw-r--r--buildstream/_includes.py128
-rw-r--r--buildstream/_loader/loader.py100
-rw-r--r--buildstream/_loader/metaelement.py5
-rw-r--r--buildstream/_loader/metasource.py2
-rw-r--r--buildstream/_options/optionpool.py13
-rw-r--r--buildstream/_pipeline.py41
-rw-r--r--buildstream/_platform/linux.py4
-rw-r--r--buildstream/_platform/platform.py3
-rw-r--r--buildstream/_platform/unix.py4
-rw-r--r--buildstream/_plugincontext.py18
-rw-r--r--buildstream/_project.py558
-rw-r--r--buildstream/_sourcefactory.py13
-rw-r--r--buildstream/_stream.py9
-rw-r--r--buildstream/_versions.py2
-rw-r--r--buildstream/_yaml.py29
-rw-r--r--buildstream/element.py69
-rw-r--r--buildstream/source.py60
-rw-r--r--doc/source/format_intro.rst44
-rw-r--r--tests/artifactcache/config.py1
-rw-r--r--tests/format/include.py263
-rw-r--r--tests/format/include/conditional/element.bst1
-rw-r--r--tests/format/include/conditional/extra_conf.yml6
-rw-r--r--tests/format/include/conditional/project.conf13
-rw-r--r--tests/format/include/file/element.bst1
-rw-r--r--tests/format/include/file/extra_conf.yml2
-rw-r--r--tests/format/include/file/project.conf4
-rw-r--r--tests/format/include/file_with_subproject/element.bst1
-rw-r--r--tests/format/include/file_with_subproject/extra_conf.yml2
-rw-r--r--tests/format/include/file_with_subproject/project.bst4
-rw-r--r--tests/format/include/file_with_subproject/project.conf4
-rw-r--r--tests/format/include/file_with_subproject/subproject/project.conf1
-rw-r--r--tests/format/include/inner/element.bst1
-rw-r--r--tests/format/include/inner/extra_conf.yml7
-rw-r--r--tests/format/include/inner/project.conf5
-rw-r--r--tests/format/include/junction/element.bst1
-rw-r--r--tests/format/include/junction/project.conf4
-rw-r--r--tests/format/include/junction/subproject/extra_conf.yml2
-rw-r--r--tests/format/include/junction/subproject/project.conf1
-rw-r--r--tests/format/include/local_to_junction/element.bst1
-rw-r--r--tests/format/include/local_to_junction/project.conf4
-rw-r--r--tests/format/include/local_to_junction/subproject/extra_conf.yml2
-rw-r--r--tests/format/include/local_to_junction/subproject/internal.yml2
-rw-r--r--tests/format/include/local_to_junction/subproject/project.conf1
-rw-r--r--tests/format/include/options/element.bst1
-rw-r--r--tests/format/include/options/extra_conf.yml8
-rw-r--r--tests/format/include/options/project.conf4
-rw-r--r--tests/format/include/overrides-junction/element.bst1
-rw-r--r--tests/format/include/overrides-junction/project.conf20
-rw-r--r--tests/format/include/overrides-junction/subproject/extra_conf.yml16
-rw-r--r--tests/format/include/overrides-junction/subproject/project.conf1
-rw-r--r--tests/format/include/overrides/element.bst1
-rw-r--r--tests/format/include/overrides/extra_conf.yml15
-rw-r--r--tests/format/include/overrides/extra_conf2.yml5
-rw-r--r--tests/format/include/overrides/project.conf22
-rw-r--r--tests/format/include/overrides/subproject/project.conf1
-rw-r--r--tests/format/include/recursive/element.bst1
-rw-r--r--tests/format/include/recursive/extra_conf.yml2
-rw-r--r--tests/format/include/recursive/extra_conf2.yml2
-rw-r--r--tests/format/include/recursive/project.conf4
-rw-r--r--tests/format/include/string/element.bst1
-rw-r--r--tests/format/include/string/extra_conf.yml2
-rw-r--r--tests/format/include/string/project.conf3
-rw-r--r--tests/format/include/sub-include/element.bst1
-rw-r--r--tests/format/include/sub-include/manual_conf.yml2
-rw-r--r--tests/format/include/sub-include/project.conf6
-rw-r--r--tests/format/include_composition.py131
-rw-r--r--tests/frontend/__init__.py34
-rw-r--r--tests/frontend/buildcheckout.py4
-rw-r--r--tests/frontend/fetch.py4
-rw-r--r--tests/frontend/mirror.py163
-rw-r--r--tests/frontend/pull.py3
-rw-r--r--tests/frontend/push.py3
-rw-r--r--tests/frontend/show.py4
-rw-r--r--tests/frontend/track.py136
-rw-r--r--tests/frontend/track_cross_junction.py4
-rw-r--r--tests/loader/__init__.py4
-rw-r--r--tests/loader/basics.py27
-rw-r--r--tests/loader/dependencies.py40
-rw-r--r--tests/plugins/basics.py32
-rw-r--r--tests/plugins/third_party.py6
-rw-r--r--tests/testutils/__init__.py1
-rw-r--r--tests/testutils/junction.py36
-rw-r--r--tests/yaml/yaml.py2
87 files changed, 1783 insertions, 456 deletions
diff --git a/buildstream/_elementfactory.py b/buildstream/_elementfactory.py
index 9d5b258cb..00847e66f 100644
--- a/buildstream/_elementfactory.py
+++ b/buildstream/_elementfactory.py
@@ -31,9 +31,13 @@ from .element import Element
#
class ElementFactory(PluginContext):
- def __init__(self, plugin_base, plugin_origins=None):
+ def __init__(self, plugin_base, *,
+ format_versions={},
+ plugin_origins=None):
- super().__init__(plugin_base, Element, [_site.element_plugins], plugin_origins)
+ super().__init__(plugin_base, Element, [_site.element_plugins],
+ plugin_origins=plugin_origins,
+ format_versions=format_versions)
# create():
#
@@ -54,4 +58,7 @@ class ElementFactory(PluginContext):
#
def create(self, context, project, artifacts, meta):
element_type, default_config = self.lookup(meta.kind)
- return element_type(context, project, artifacts, meta, default_config)
+ element = element_type(context, project, artifacts, meta, default_config)
+ version = self._format_versions.get(meta.kind, 0)
+ self._assert_plugin_format(element, version)
+ return element
diff --git a/buildstream/_exceptions.py b/buildstream/_exceptions.py
index 5187357c5..3fb5e5775 100644
--- a/buildstream/_exceptions.py
+++ b/buildstream/_exceptions.py
@@ -214,6 +214,9 @@ class LoadErrorReason(Enum):
# socket)
PROJ_PATH_INVALID_KIND = 20
+ # A recursive include has been encountered.
+ RECURSIVE_INCLUDE = 21
+
# LoadError
#
diff --git a/buildstream/_frontend/app.py b/buildstream/_frontend/app.py
index 04a90ea32..5c13bf0bf 100644
--- a/buildstream/_frontend/app.py
+++ b/buildstream/_frontend/app.py
@@ -33,6 +33,7 @@ from .. import Scope
# Import various buildstream internals
from .._context import Context
+from .._platform import Platform
from .._project import Project
from .._exceptions import BstError, StreamError, LoadError, LoadErrorReason, AppError
from .._message import Message, MessageType, unconditional_messages
@@ -198,6 +199,20 @@ class App():
if option_value is not None:
setattr(self.context, context_attr, option_value)
+ Platform.create_instance(self.context)
+
+ # Create the logger right before setting the message handler
+ self.logger = LogLine(self.context,
+ self._content_profile,
+ self._format_profile,
+ self._success_profile,
+ self._error_profile,
+ self._detail_profile,
+ indent=INDENT)
+
+ # Propagate pipeline feedback to the user
+ self.context.set_message_handler(self._message_handler)
+
#
# Load the Project
#
@@ -218,18 +233,6 @@ class App():
except BstError as e:
self._error_exit(e, "Error loading project")
- # Create the logger right before setting the message handler
- self.logger = LogLine(self.context,
- self._content_profile,
- self._format_profile,
- self._success_profile,
- self._error_profile,
- self._detail_profile,
- indent=INDENT)
-
- # Propagate pipeline feedback to the user
- self.context.set_message_handler(self._message_handler)
-
# Now that we have a logger and message handler,
# we can override the global exception hook.
sys.excepthook = self._global_exception_handler
diff --git a/buildstream/_frontend/widget.py b/buildstream/_frontend/widget.py
index 9e8534f7a..3abc31d40 100644
--- a/buildstream/_frontend/widget.py
+++ b/buildstream/_frontend/widget.py
@@ -483,8 +483,11 @@ class LogLine(Widget):
text += '\n'
# Plugins
- text += self._format_plugins(project._element_factory.loaded_dependencies,
- project._source_factory.loaded_dependencies)
+ text += self._format_plugins(project.first_pass_config.element_factory.loaded_dependencies,
+ project.first_pass_config.source_factory.loaded_dependencies)
+ if project.config.element_factory and project.config.source_factory:
+ text += self._format_plugins(project.config.element_factory.loaded_dependencies,
+ project.config.source_factory.loaded_dependencies)
# Pipeline state
text += self.content_profile.fmt("Pipeline\n", bold=True)
diff --git a/buildstream/_includes.py b/buildstream/_includes.py
new file mode 100644
index 000000000..e4afeaf82
--- /dev/null
+++ b/buildstream/_includes.py
@@ -0,0 +1,128 @@
+import os
+from collections import Mapping
+from . import _yaml
+from ._exceptions import LoadError, LoadErrorReason
+
+
+# Includes()
+#
+# This takes care of processing include directives "(@)".
+#
+# Args:
+# loader (Loader): The Loader object
+class Includes:
+
+ def __init__(self, loader):
+ self._loader = loader
+ self._loaded = {}
+
+ # process()
+ #
+ # Process recursively include directives in a YAML node.
+ #
+ # Args:
+ # node (dict): A YAML node
+ # included (set): Fail for recursion if trying to load any files in this set
+ # current_loader (Loader): Use alternative loader (for junction files)
+ # only_local (bool): Whether to ignore junction files
+ def process(self, node, *,
+ included=set(),
+ current_loader=None,
+ only_local=False):
+ if current_loader is None:
+ current_loader = self._loader
+
+ if isinstance(node.get('(@)'), str):
+ includes = [_yaml.node_get(node, str, '(@)')]
+ else:
+ includes = _yaml.node_get(node, list, '(@)', default_value=None)
+ if '(@)' in node:
+ del node['(@)']
+
+ if includes:
+ for include in reversed(includes):
+ if only_local and ':' in include:
+ continue
+ include_node, file_path, sub_loader = self._include_file(include,
+ current_loader)
+ if file_path in included:
+ provenance = _yaml.node_get_provenance(node)
+ raise LoadError(LoadErrorReason.RECURSIVE_INCLUDE,
+ "{}: trying to recursively include {}". format(provenance,
+ file_path))
+ # Because the included node will be modified, we need
+ # to copy it so that we do not modify the toplevel
+ # node of the provenance.
+ include_node = _yaml.node_chain_copy(include_node)
+
+ try:
+ included.add(file_path)
+ self.process(include_node, included=included,
+ current_loader=sub_loader,
+ only_local=only_local)
+ finally:
+ included.remove(file_path)
+
+ _yaml.composite(include_node, node)
+ to_delete = [key for key, _ in _yaml.node_items(node) if key not in include_node]
+ for key, value in include_node.items():
+ node[key] = value
+ for key in to_delete:
+ del node[key]
+
+ for _, value in _yaml.node_items(node):
+ self._process_value(value,
+ included=included,
+ current_loader=current_loader,
+ only_local=only_local)
+
+ # _include_file()
+ #
+ # Load include YAML file from with a loader.
+ #
+ # Args:
+ # include (str): file path relative to loader's project directory.
+ # Can be prefixed with junctio name.
+ # loader (Loader): Loader for the current project.
+ def _include_file(self, include, loader):
+ shortname = include
+ if ':' in include:
+ junction, include = include.split(':', 1)
+ junction_loader = loader._get_loader(junction, fetch_subprojects=True)
+ current_loader = junction_loader
+ else:
+ current_loader = loader
+ project = current_loader.project
+ directory = project.directory
+ file_path = os.path.join(directory, include)
+ key = (current_loader, file_path)
+ if file_path not in self._loaded:
+ self._loaded[key] = _yaml.load(os.path.join(directory, include),
+ shortname=shortname,
+ project=project)
+ return self._loaded[key], file_path, current_loader
+
+ # _process_value()
+ #
+ # Select processing for value that could be a list or a dictionary.
+ #
+ # Args:
+ # value: Value to process. Can be a list or a dictionary.
+ # included (set): Fail for recursion if trying to load any files in this set
+ # current_loader (Loader): Use alternative loader (for junction files)
+ # only_local (bool): Whether to ignore junction files
+ def _process_value(self, value, *,
+ included=set(),
+ current_loader=None,
+ only_local=False):
+ if isinstance(value, Mapping):
+ self.process(value,
+ included=included,
+ current_loader=current_loader,
+ only_local=only_local)
+ elif isinstance(value, list):
+ for v in value:
+ self._process_value(v,
+ included=included,
+ current_loader=current_loader,
+ only_local=only_local)
diff --git a/buildstream/_loader/loader.py b/buildstream/_loader/loader.py
index e9b9d95f1..6e46197ab 100644
--- a/buildstream/_loader/loader.py
+++ b/buildstream/_loader/loader.py
@@ -25,11 +25,11 @@ import shutil
from .._exceptions import LoadError, LoadErrorReason
from .. import Consistency
-from .._project import Project
from .. import _yaml
from ..element import Element
from .._profile import Topics, profile_start, profile_end
from .._platform import Platform
+from .._includes import Includes
from .types import Symbol, Dependency
from .loadelement import LoadElement
@@ -46,30 +46,19 @@ from . import MetaSource
# Args:
# context (Context): The Context object
# project (Project): The toplevel Project object
-# filenames (list of str): Target, element-path relative bst filenames in the project
# parent (Loader): A parent Loader object, in the case this is a junctioned Loader
# tempdir (str): A directory to cleanup with the Loader, given to the loader by a parent
# loader in the case that this loader is a subproject loader.
-# fetch_subprojects (bool): Whether to fetch subprojects while loading
#
class Loader():
- def __init__(self, context, project, filenames, *, parent=None, tempdir=None, fetch_subprojects=False):
+ def __init__(self, context, project, *, parent=None, tempdir=None):
# Ensure we have an absolute path for the base directory
basedir = project.element_path
if not os.path.isabs(basedir):
basedir = os.path.abspath(basedir)
- for filename in filenames:
- if os.path.isabs(filename):
- # XXX Should this just be an assertion ?
- # Expect that the caller gives us the right thing at least ?
- raise LoadError(LoadErrorReason.INVALID_DATA,
- "Target '{}' was not specified as a relative "
- "path to the base project directory: {}"
- .format(filename, basedir))
-
#
# Public members
#
@@ -78,11 +67,10 @@ class Loader():
#
# Private members
#
- self._fetch_subprojects = fetch_subprojects
self._context = context
self._options = project.options # Project options (OptionPool)
self._basedir = basedir # Base project directory
- self._targets = filenames # Target bst elements
+ self._first_pass_options = project.first_pass_config.options # Project options (OptionPool)
self._tempdir = tempdir # A directory to cleanup
self._parent = parent # The parent loader
@@ -90,6 +78,8 @@ class Loader():
self._elements = {} # Dict of elements
self._loaders = {} # Dict of junction loaders
+ self._includes = Includes(self)
+
# load():
#
# Loads the project based on the parameters given to the constructor
@@ -98,20 +88,32 @@ class Loader():
# rewritable (bool): Whether the loaded files should be rewritable
# this is a bit more expensive due to deep copies
# ticker (callable): An optional function for tracking load progress
+ # targets (list of str): Target, element-path relative bst filenames in the project
+ # fetch_subprojects (bool): Whether to fetch subprojects while loading
#
# Raises: LoadError
#
# Returns: The toplevel LoadElement
- def load(self, rewritable=False, ticker=None):
+ def load(self, targets, rewritable=False, ticker=None, fetch_subprojects=False):
+
+ for filename in targets:
+ if os.path.isabs(filename):
+ # XXX Should this just be an assertion ?
+ # Expect that the caller gives us the right thing at least ?
+ raise LoadError(LoadErrorReason.INVALID_DATA,
+ "Target '{}' was not specified as a relative "
+ "path to the base project directory: {}"
+ .format(filename, self._basedir))
# First pass, recursively load files and populate our table of LoadElements
#
deps = []
- for target in self._targets:
+ for target in targets:
profile_start(Topics.LOAD_PROJECT, target)
- junction, name, loader = self._parse_name(target, rewritable, ticker)
- loader._load_file(name, rewritable, ticker)
+ junction, name, loader = self._parse_name(target, rewritable, ticker,
+ fetch_subprojects=fetch_subprojects)
+ loader._load_file(name, rewritable, ticker, fetch_subprojects)
deps.append(Dependency(name, junction=junction))
profile_end(Topics.LOAD_PROJECT, target)
@@ -126,7 +128,7 @@ class Loader():
dummy = DummyTarget(name='', full_name='', deps=deps)
self._elements[''] = dummy
- profile_key = "_".join(t for t in self._targets)
+ profile_key = "_".join(t for t in targets)
profile_start(Topics.CIRCULAR_CHECK, profile_key)
self._check_circular_deps('')
profile_end(Topics.CIRCULAR_CHECK, profile_key)
@@ -135,9 +137,10 @@ class Loader():
#
# Sort direct dependencies of elements by their dependency ordering
#
- for target in self._targets:
+ for target in targets:
profile_start(Topics.SORT_DEPENDENCIES, target)
- junction, name, loader = self._parse_name(target, rewritable, ticker)
+ junction, name, loader = self._parse_name(target, rewritable, ticker,
+ fetch_subprojects=fetch_subprojects)
loader._sort_dependencies(name)
profile_end(Topics.SORT_DEPENDENCIES, target)
# Finally, wrap what we have into LoadElements and return the target
@@ -198,11 +201,12 @@ class Loader():
# filename (str): The element-path relative bst file
# rewritable (bool): Whether we should load in round trippable mode
# ticker (callable): A callback to report loaded filenames to the frontend
+ # fetch_subprojects (bool): Whether to fetch subprojects while loading
#
# Returns:
# (LoadElement): A loaded LoadElement
#
- def _load_file(self, filename, rewritable, ticker):
+ def _load_file(self, filename, rewritable, ticker, fetch_subprojects):
# Silently ignore already loaded files
if filename in self._elements:
@@ -215,7 +219,7 @@ class Loader():
# Load the data and process any conditional statements therein
fullpath = os.path.join(self._basedir, filename)
try:
- node = _yaml.load(fullpath, shortname=filename, copy_tree=rewritable)
+ node = _yaml.load(fullpath, shortname=filename, copy_tree=rewritable, project=self.project)
except LoadError as e:
if e.reason == LoadErrorReason.MISSING_FILE:
# If we can't find the file, try to suggest plausible
@@ -241,7 +245,15 @@ class Loader():
message, detail=detail) from e
else:
raise
- self._options.process_node(node)
+ kind = _yaml.node_get(node, str, Symbol.KIND)
+ if kind == "junction":
+ self._first_pass_options.process_node(node)
+ else:
+ self.project.ensure_fully_loaded()
+
+ self._includes.process(node)
+
+ self._options.process_node(node)
element = LoadElement(node, filename, self)
@@ -250,12 +262,13 @@ class Loader():
# Load all dependency files for the new LoadElement
for dep in element.deps:
if dep.junction:
- self._load_file(dep.junction, rewritable, ticker)
- loader = self._get_loader(dep.junction, rewritable=rewritable, ticker=ticker)
+ self._load_file(dep.junction, rewritable, ticker, fetch_subprojects)
+ loader = self._get_loader(dep.junction, rewritable=rewritable, ticker=ticker,
+ fetch_subprojects=fetch_subprojects)
else:
loader = self
- dep_element = loader._load_file(dep.name, rewritable, ticker)
+ dep_element = loader._load_file(dep.name, rewritable, ticker, fetch_subprojects)
if _yaml.node_get(dep_element.node, str, Symbol.KIND) == 'junction':
raise LoadError(LoadErrorReason.INVALID_DATA,
@@ -432,7 +445,8 @@ class Loader():
_yaml.node_get(node, Mapping, Symbol.ENVIRONMENT, default_value={}),
_yaml.node_get(node, list, Symbol.ENV_NOCACHE, default_value=[]),
_yaml.node_get(node, Mapping, Symbol.PUBLIC, default_value={}),
- _yaml.node_get(node, Mapping, Symbol.SANDBOX, default_value={}))
+ _yaml.node_get(node, Mapping, Symbol.SANDBOX, default_value={}),
+ element_kind == 'junction')
# Cache it now, make sure it's already there before recursing
self._meta_elements[element_name] = meta_element
@@ -454,11 +468,12 @@ class Loader():
#
# Args:
# filename (str): Junction name
+ # fetch_subprojects (bool): Whether to fetch subprojects while loading
#
# Raises: LoadError
#
# Returns: A Loader or None if specified junction does not exist
- def _get_loader(self, filename, *, rewritable=False, ticker=None, level=0):
+ def _get_loader(self, filename, *, rewritable=False, ticker=None, level=0, fetch_subprojects=False):
# return previously determined result
if filename in self._loaders:
loader = self._loaders[filename]
@@ -475,13 +490,14 @@ class Loader():
if self._parent:
# junctions in the parent take precedence over junctions defined
# in subprojects
- loader = self._parent._get_loader(filename, rewritable=rewritable, ticker=ticker, level=level + 1)
+ loader = self._parent._get_loader(filename, rewritable=rewritable, ticker=ticker,
+ level=level + 1, fetch_subprojects=fetch_subprojects)
if loader:
self._loaders[filename] = loader
return loader
try:
- self._load_file(filename, rewritable, ticker)
+ self._load_file(filename, rewritable, ticker, fetch_subprojects)
except LoadError as e:
if e.reason != LoadErrorReason.MISSING_FILE:
# other load error
@@ -510,7 +526,7 @@ class Loader():
# Handle the case where a subproject needs to be fetched
#
if source.get_consistency() == Consistency.RESOLVED:
- if self._fetch_subprojects:
+ if fetch_subprojects:
if ticker:
ticker(filename, 'Fetching subproject from {} source'.format(source.get_kind()))
source._fetch()
@@ -536,7 +552,9 @@ class Loader():
# Load the project
project_dir = os.path.join(basedir, element.path)
try:
- project = Project(project_dir, self._context, junction=element)
+ from .._project import Project
+ project = Project(project_dir, self._context, junction=element,
+ parent_loader=self, tempdir=basedir)
except LoadError as e:
if e.reason == LoadErrorReason.MISSING_PROJECT_CONF:
raise LoadError(reason=LoadErrorReason.INVALID_JUNCTION,
@@ -546,11 +564,7 @@ class Loader():
else:
raise
- loader = Loader(self._context, project, [],
- parent=self,
- tempdir=basedir,
- fetch_subprojects=self._fetch_subprojects)
-
+ loader = project.loader
self._loaders[filename] = loader
return loader
@@ -581,13 +595,14 @@ class Loader():
# rewritable (bool): Whether the loaded files should be rewritable
# this is a bit more expensive due to deep copies
# ticker (callable): An optional function for tracking load progress
+ # fetch_subprojects (bool): Whether to fetch subprojects while loading
#
# Returns:
# (tuple): - (str): name of the junction element
# - (str): name of the element
# - (Loader): loader for sub-project
#
- def _parse_name(self, name, rewritable, ticker):
+ def _parse_name(self, name, rewritable, ticker, fetch_subprojects=False):
# We allow to split only once since deep junctions names are forbidden.
# Users who want to refer to elements in sub-sub-projects are required
# to create junctions on the top level project.
@@ -595,6 +610,7 @@ class Loader():
if len(junction_path) == 1:
return None, junction_path[-1], self
else:
- self._load_file(junction_path[-2], rewritable, ticker)
- loader = self._get_loader(junction_path[-2], rewritable=rewritable, ticker=ticker)
+ self._load_file(junction_path[-2], rewritable, ticker, fetch_subprojects)
+ loader = self._get_loader(junction_path[-2], rewritable=rewritable, ticker=ticker,
+ fetch_subprojects=fetch_subprojects)
return junction_path[-2], junction_path[-1], loader
diff --git a/buildstream/_loader/metaelement.py b/buildstream/_loader/metaelement.py
index 16788e92b..c13d5591e 100644
--- a/buildstream/_loader/metaelement.py
+++ b/buildstream/_loader/metaelement.py
@@ -36,9 +36,11 @@ class MetaElement():
# env_nocache: List of environment vars which should not be considered in cache keys
# public: Public domain data dictionary
# sandbox: Configuration specific to the sandbox environment
+ # first_pass: The element is to be loaded with first pass configuration (junction)
#
def __init__(self, project, name, kind, provenance, sources, config,
- variables, environment, env_nocache, public, sandbox):
+ variables, environment, env_nocache, public, sandbox,
+ first_pass):
self.project = project
self.name = name
self.kind = kind
@@ -52,3 +54,4 @@ class MetaElement():
self.sandbox = sandbox
self.build_dependencies = []
self.dependencies = []
+ self.first_pass = first_pass
diff --git a/buildstream/_loader/metasource.py b/buildstream/_loader/metasource.py
index 3bcc21ec6..da2c0e292 100644
--- a/buildstream/_loader/metasource.py
+++ b/buildstream/_loader/metasource.py
@@ -30,6 +30,7 @@ class MetaSource():
# element_kind: The kind of the owning element
# kind: The kind of the source
# config: The configuration data for the source
+ # first_pass: This source will be used with first project pass configuration (used for junctions).
#
def __init__(self, element_name, element_index, element_kind, kind, config, directory):
self.element_name = element_name
@@ -38,3 +39,4 @@ class MetaSource():
self.kind = kind
self.config = config
self.directory = directory
+ self.first_pass = False
diff --git a/buildstream/_options/optionpool.py b/buildstream/_options/optionpool.py
index f90fd820c..b53e87a3d 100644
--- a/buildstream/_options/optionpool.py
+++ b/buildstream/_options/optionpool.py
@@ -107,16 +107,19 @@ class OptionPool():
#
# Args:
# cli_options (list): A list of (str, str) tuples
+ # ignore_unknown (bool): Whether to silently ignore unknown options.
#
- def load_cli_values(self, cli_options):
+ def load_cli_values(self, cli_options, *, ignore_unknown=False):
for option_name, option_value in cli_options:
try:
option = self._options[option_name]
except KeyError as e:
- raise LoadError(LoadErrorReason.INVALID_DATA,
- "Unknown option '{}' specified on the command line"
- .format(option_name)) from e
- option.set_value(option_value)
+ if not ignore_unknown:
+ raise LoadError(LoadErrorReason.INVALID_DATA,
+ "Unknown option '{}' specified on the command line"
+ .format(option_name)) from e
+ else:
+ option.set_value(option_value)
# resolve()
#
diff --git a/buildstream/_pipeline.py b/buildstream/_pipeline.py
index 800a331fd..f3db2838c 100644
--- a/buildstream/_pipeline.py
+++ b/buildstream/_pipeline.py
@@ -25,9 +25,7 @@ from operator import itemgetter
from ._exceptions import PipelineError
from ._message import Message, MessageType
-from ._loader import Loader
from ._profile import Topics, profile_start, profile_end
-from .element import Element
from . import Scope, Consistency
from ._project import ProjectRefStorage
@@ -80,7 +78,6 @@ class Pipeline():
# Private members
#
self._artifacts = artifacts
- self._loader = None
# load()
#
@@ -109,30 +106,9 @@ class Pipeline():
profile_start(Topics.LOAD_PIPELINE, "_".join(t.replace(os.sep, '-') for t in targets))
- self._loader = Loader(self._context, self._project, targets,
- fetch_subprojects=fetch_subprojects)
-
- with self._context.timed_activity("Loading pipeline", silent_nested=True):
- meta_elements = self._loader.load(rewritable, None)
-
- # Resolve the real elements now that we've loaded the project
- with self._context.timed_activity("Resolving pipeline"):
- elements = [
- Element._new_from_meta(meta, self._artifacts)
- for meta in meta_elements
- ]
-
- # Now warn about any redundant source references which may have
- # been discovered in the resolve() phase.
- redundant_refs = Element._get_redundant_source_refs()
- if redundant_refs:
- detail = "The following inline specified source references will be ignored:\n\n"
- lines = [
- "{}:{}".format(source._get_provenance(), ref)
- for source, ref in redundant_refs
- ]
- detail += "\n".join(lines)
- self._message(MessageType.WARN, "Ignoring redundant source references", detail=detail)
+ elements = self._project.load_elements(targets, self._artifacts,
+ rewritable=rewritable,
+ fetch_subprojects=fetch_subprojects)
# Now create element groups to match the input target groups
elt_iter = iter(elements)
@@ -388,17 +364,6 @@ class Pipeline():
detail += " " + element._get_full_name() + "\n"
raise PipelineError("Inconsistent pipeline", detail=detail, reason="inconsistent-pipeline")
- # cleanup()
- #
- # Cleans up resources used by the Pipeline.
- #
- def cleanup(self):
- if self._loader:
- self._loader.cleanup()
-
- # Reset the element loader state
- Element._reset_load_state()
-
#############################################################
# Private Methods #
#############################################################
diff --git a/buildstream/_platform/linux.py b/buildstream/_platform/linux.py
index 56ebb410e..a5fd0d687 100644
--- a/buildstream/_platform/linux.py
+++ b/buildstream/_platform/linux.py
@@ -30,9 +30,9 @@ from . import Platform
class Linux(Platform):
- def __init__(self, context, project):
+ def __init__(self, context):
- super().__init__(context, project)
+ super().__init__(context)
self._die_with_parent_available = _site.check_bwrap_version(0, 1, 8)
self._user_ns_available = self._check_user_ns_available(context)
diff --git a/buildstream/_platform/platform.py b/buildstream/_platform/platform.py
index 29da33563..8a074eb62 100644
--- a/buildstream/_platform/platform.py
+++ b/buildstream/_platform/platform.py
@@ -35,9 +35,8 @@ class Platform():
# Args:
# context (context): The project context
#
- def __init__(self, context, project):
+ def __init__(self, context):
self.context = context
- self.project = project
@classmethod
def create_instance(cls, *args, **kwargs):
diff --git a/buildstream/_platform/unix.py b/buildstream/_platform/unix.py
index be4c129d3..0306a4ac5 100644
--- a/buildstream/_platform/unix.py
+++ b/buildstream/_platform/unix.py
@@ -28,9 +28,9 @@ from . import Platform
class Unix(Platform):
- def __init__(self, context, project):
+ def __init__(self, context):
- super().__init__(context, project)
+ super().__init__(context)
self._artifact_cache = CASCache(context)
# Not necessarily 100% reliable, but we want to fail early.
diff --git a/buildstream/_plugincontext.py b/buildstream/_plugincontext.py
index 38d2231ba..5a7097485 100644
--- a/buildstream/_plugincontext.py
+++ b/buildstream/_plugincontext.py
@@ -20,7 +20,7 @@
import os
import inspect
-from ._exceptions import PluginError
+from ._exceptions import PluginError, LoadError, LoadErrorReason
from . import utils
@@ -41,7 +41,9 @@ from . import utils
#
class PluginContext():
- def __init__(self, plugin_base, base_type, site_plugin_path, plugin_origins=None, dependencies=None):
+ def __init__(self, plugin_base, base_type, site_plugin_path, *,
+ plugin_origins=None, dependencies=None,
+ format_versions={}):
# The plugin kinds which were loaded
self.loaded_dependencies = []
@@ -58,6 +60,7 @@ class PluginContext():
self._plugin_base = plugin_base
self._site_source = plugin_base.make_plugin_source(searchpath=site_plugin_path)
self._alternate_sources = {}
+ self._format_versions = format_versions
# lookup():
#
@@ -219,3 +222,14 @@ class PluginContext():
self._base_type.__name__, kind,
plugin_type.BST_REQUIRED_VERSION_MAJOR,
plugin_type.BST_REQUIRED_VERSION_MINOR))
+
+ # _assert_plugin_format()
+ #
+ # Helper to raise a PluginError if the loaded plugin is of a lesser version then
+ # the required version for this plugin
+ #
+ def _assert_plugin_format(self, plugin, version):
+ if plugin.BST_FORMAT_VERSION < version:
+ raise LoadError(LoadErrorReason.UNSUPPORTED_PLUGIN,
+ "{}: Format version {} is too old for requested version {}"
+ .format(plugin, plugin.BST_FORMAT_VERSION, version))
diff --git a/buildstream/_project.py b/buildstream/_project.py
index 3ac562836..a0f3a5059 100644
--- a/buildstream/_project.py
+++ b/buildstream/_project.py
@@ -34,6 +34,10 @@ from ._elementfactory import ElementFactory
from ._sourcefactory import SourceFactory
from ._projectrefs import ProjectRefs, ProjectRefStorage
from ._versions import BST_FORMAT_VERSION
+from ._loader import Loader
+from .element import Element
+from ._message import Message, MessageType
+from ._includes import Includes
# Project Configuration file
@@ -62,13 +66,28 @@ class HostMount():
self.host_path = self.path
+# Represents project configuration that can have different values for junctions.
+class ProjectConfig:
+ def __init__(self):
+ self.element_factory = None
+ self.source_factory = None
+ self.options = None # OptionPool
+ self.base_variables = {} # The base set of variables
+ self.element_overrides = {} # Element specific configurations
+ self.source_overrides = {} # Source specific configurations
+ self.mirrors = OrderedDict() # contains dicts of alias-mappings to URIs.
+ self.default_mirror = None # The name of the preferred mirror.
+ self._aliases = {} # Aliases dictionary
+
+
# Project()
#
# The Project Configuration
#
class Project():
- def __init__(self, directory, context, *, junction=None, cli_options=None, default_mirror=None):
+ def __init__(self, directory, context, *, junction=None, cli_options=None,
+ default_mirror=None, parent_loader=None, tempdir=None):
# The project name
self.name = None
@@ -83,40 +102,60 @@ class Project():
self.refs = ProjectRefs(self.directory, 'project.refs')
self.junction_refs = ProjectRefs(self.directory, 'junction.refs')
- self.options = None # OptionPool
+ self.config = ProjectConfig()
+ self.first_pass_config = ProjectConfig()
+
self.junction = junction # The junction Element object, if this is a subproject
self.fail_on_overlap = False # Whether overlaps are treated as errors
self.ref_storage = None # ProjectRefStorage setting
- self.base_variables = {} # The base set of variables
self.base_environment = {} # The base set of environment variables
self.base_env_nocache = None # The base nocache mask (list) for the environment
- self.element_overrides = {} # Element specific configurations
- self.source_overrides = {} # Source specific configurations
- self.mirrors = OrderedDict() # contains dicts of alias-mappings to URIs.
- self.default_mirror = default_mirror # The name of the preferred mirror.
#
# Private Members
#
self._context = context # The invocation Context
- self._aliases = {} # Aliases dictionary
- self._plugin_source_origins = [] # Origins of custom sources
- self._plugin_element_origins = [] # Origins of custom elements
+
+ self._default_mirror = default_mirror # The name of the preferred mirror.
self._cli_options = cli_options
self._cache_key = None
- self._source_format_versions = {}
- self._element_format_versions = {}
self._shell_command = [] # The default interactive shell command
self._shell_environment = {} # Statically set environment vars
self._shell_host_files = [] # A list of HostMount objects
+ self.artifact_cache_specs = None
+ self._sandbox = None
+ self._splits = None
+
+ self._context.add_project(self)
+
+ self._partially_loaded = False
+ self._fully_loaded = False
+ self._project_includes = None
+
profile_start(Topics.LOAD_PROJECT, self.directory.replace(os.sep, '-'))
- self._load()
+ self._load(parent_loader=parent_loader, tempdir=tempdir)
profile_end(Topics.LOAD_PROJECT, self.directory.replace(os.sep, '-'))
- self._context.add_project(self)
+ self._partially_loaded = True
+
+ @property
+ def options(self):
+ return self.config.options
+
+ @property
+ def base_variables(self):
+ return self.config.base_variables
+
+ @property
+ def element_overrides(self):
+ return self.config.element_overrides
+
+ @property
+ def source_overrides(self):
+ return self.config.source_overrides
# translate_url():
#
@@ -125,6 +164,7 @@ class Project():
#
# Args:
# url (str): A url, which may be using an alias
+ # first_pass (bool): Whether to use first pass configuration (for junctions)
#
# Returns:
# str: The fully qualified url, with aliases resolved
@@ -132,10 +172,15 @@ class Project():
# This method is provided for :class:`.Source` objects to resolve
# fully qualified urls based on the shorthand which is allowed
# to be specified in the YAML
- def translate_url(self, url):
+ def translate_url(self, url, *, first_pass=False):
+ if first_pass:
+ config = self.first_pass_config
+ else:
+ config = self.config
+
if url and utils._ALIAS_SEPARATOR in url:
url_alias, url_body = url.split(utils._ALIAS_SEPARATOR, 1)
- alias_url = self._aliases.get(url_alias)
+ alias_url = config._aliases.get(url_alias)
if alias_url:
url = alias_url + url_body
@@ -176,15 +221,16 @@ class Project():
# Args:
# artifacts (ArtifactCache): The artifact cache
# meta (MetaElement): The loaded MetaElement
+ # first_pass (bool): Whether to use first pass configuration (for junctions)
#
# Returns:
# (Element): A newly created Element object of the appropriate kind
#
- def create_element(self, artifacts, meta):
- element = self._element_factory.create(self._context, self, artifacts, meta)
- version = self._element_format_versions.get(meta.kind, 0)
- self._assert_plugin_format(element, version)
- return element
+ def create_element(self, artifacts, meta, *, first_pass=False):
+ if first_pass:
+ return self.first_pass_config.element_factory.create(self._context, self, artifacts, meta)
+ else:
+ return self.config.element_factory.create(self._context, self, artifacts, meta)
# create_source()
#
@@ -192,15 +238,16 @@ class Project():
#
# Args:
# meta (MetaSource): The loaded MetaSource
+ # first_pass (bool): Whether to use first pass configuration (for junctions)
#
# Returns:
# (Source): A newly created Source object of the appropriate kind
#
- def create_source(self, meta):
- source = self._source_factory.create(self._context, self, meta)
- version = self._source_format_versions.get(meta.kind, 0)
- self._assert_plugin_format(source, version)
- return source
+ def create_source(self, meta, *, first_pass=False):
+ if first_pass:
+ return self.first_pass_config.source_factory.create(self._context, self, meta)
+ else:
+ return self.config.source_factory.create(self._context, self, meta)
# get_alias_uri()
#
@@ -208,62 +255,140 @@ class Project():
#
# Args:
# alias (str): The alias.
+ # first_pass (bool): Whether to use first pass configuration (for junctions)
#
# Returns:
# str: The URI for the given alias; or None: if there is no URI for
# that alias.
- def get_alias_uri(self, alias):
- return self._aliases.get(alias)
+ def get_alias_uri(self, alias, *, first_pass=False):
+ if first_pass:
+ config = self.first_pass_config
+ else:
+ config = self.config
+
+ return config._aliases.get(alias)
# get_alias_uris()
#
+ # Args:
+ # alias (str): The alias.
+ # first_pass (bool): Whether to use first pass configuration (for junctions)
+ #
# Returns a list of every URI to replace an alias with
- def get_alias_uris(self, alias):
- if not alias or alias not in self._aliases:
+ def get_alias_uris(self, alias, *, first_pass=False):
+ if first_pass:
+ config = self.first_pass_config
+ else:
+ config = self.config
+
+ if not alias or alias not in config._aliases:
return [None]
mirror_list = []
- for key, alias_mapping in self.mirrors.items():
+ for key, alias_mapping in config.mirrors.items():
if alias in alias_mapping:
- if key == self.default_mirror:
+ if key == config.default_mirror:
mirror_list = alias_mapping[alias] + mirror_list
else:
mirror_list += alias_mapping[alias]
- mirror_list.append(self._aliases[alias])
+ mirror_list.append(config._aliases[alias])
return mirror_list
+ # load_elements()
+ #
+ # Loads elements from target names.
+ #
+ # Args:
+ # targets (list): Target names
+ # artifacts (ArtifactCache): Artifact cache
+ # rewritable (bool): Whether the loaded files should be rewritable
+ # this is a bit more expensive due to deep copies
+ # fetch_subprojects (bool): Whether we should fetch subprojects as a part of the
+ # loading process, if they are not yet locally cached
+ #
+ # Returns:
+ # (list): A list of loaded Element
+ #
+ def load_elements(self, targets, artifacts, *,
+ rewritable=False, fetch_subprojects=False):
+ with self._context.timed_activity("Loading elements", silent_nested=True):
+ meta_elements = self.loader.load(targets, rewritable=rewritable,
+ ticker=None,
+ fetch_subprojects=fetch_subprojects)
+
+ with self._context.timed_activity("Resolving elements"):
+ elements = [
+ Element._new_from_meta(meta, artifacts)
+ for meta in meta_elements
+ ]
+
+ # Now warn about any redundant source references which may have
+ # been discovered in the resolve() phase.
+ redundant_refs = Element._get_redundant_source_refs()
+ if redundant_refs:
+ detail = "The following inline specified source references will be ignored:\n\n"
+ lines = [
+ "{}:{}".format(source._get_provenance(), ref)
+ for source, ref in redundant_refs
+ ]
+ detail += "\n".join(lines)
+ self._context.message(
+ Message(None, MessageType.WARN, "Ignoring redundant source references", detail=detail))
+
+ return elements
+
+ # ensure_fully_loaded()
+ #
+ # Ensure project has finished loading. At first initialization, a
+ # project can only load junction elements. Other elements require
+ # project to be fully loaded.
+ #
+ def ensure_fully_loaded(self):
+ if self._fully_loaded:
+ return
+ assert self._partially_loaded
+ self._fully_loaded = True
+
+ if self.junction:
+ self.junction._get_project().ensure_fully_loaded()
+
+ self._load_second_pass()
+
+ # cleanup()
+ #
+ # Cleans up resources used loading elements
+ #
+ def cleanup(self):
+ self.loader.cleanup()
+
+ # Reset the element loader state
+ Element._reset_load_state()
+
# _load():
#
- # Loads the project configuration file in the project directory.
+ # Loads the project configuration file in the project
+ # directory process the first pass.
#
# Raises: LoadError if there was a problem with the project.conf
#
- def _load(self):
+ def _load(self, parent_loader=None, tempdir=None):
# Load builtin default
projectfile = os.path.join(self.directory, _PROJECT_CONF_FILE)
- config = _yaml.load(_site.default_project_config)
+ self._default_config_node = _yaml.load(_site.default_project_config)
# Load project local config and override the builtin
try:
- project_conf = _yaml.load(projectfile)
+ self._project_conf = _yaml.load(projectfile)
except LoadError as e:
# Raise a more specific error here
raise LoadError(LoadErrorReason.MISSING_PROJECT_CONF, str(e))
- _yaml.composite(config, project_conf)
-
- # Element and Source type configurations will be composited later onto
- # element/source types, so we delete it from here and run our final
- # assertion after.
- self.element_overrides = _yaml.node_get(config, Mapping, 'elements', default_value={})
- self.source_overrides = _yaml.node_get(config, Mapping, 'sources', default_value={})
- config.pop('elements', None)
- config.pop('sources', None)
- _yaml.node_final_assertions(config)
+ pre_config_node = _yaml.node_copy(self._default_config_node)
+ _yaml.composite(pre_config_node, self._project_conf)
# Assert project's format version early, before validating toplevel keys
- format_version = _yaml.node_get(config, int, 'format-version')
+ format_version = _yaml.node_get(pre_config_node, int, 'format-version')
if BST_FORMAT_VERSION < format_version:
major, minor = utils.get_bst_version()
raise LoadError(
@@ -271,59 +396,70 @@ class Project():
"Project requested format version {}, but BuildStream {}.{} only supports up until format version {}"
.format(format_version, major, minor, BST_FORMAT_VERSION))
- _yaml.node_validate(config, [
- 'format-version',
- 'element-path', 'variables',
- 'environment', 'environment-nocache',
- 'split-rules', 'elements', 'plugins',
- 'aliases', 'name',
- 'artifacts', 'options',
- 'fail-on-overlap', 'shell',
- 'ref-storage', 'sandbox', 'mirrors',
- ])
-
# The project name, element path and option declarations
# are constant and cannot be overridden by option conditional statements
- self.name = _yaml.node_get(config, str, 'name')
+ self.name = _yaml.node_get(pre_config_node, str, 'name')
# Validate that project name is a valid symbol name
- _yaml.assert_symbol_name(_yaml.node_get_provenance(config, 'name'),
+ _yaml.assert_symbol_name(_yaml.node_get_provenance(pre_config_node, 'name'),
self.name, "project name")
self.element_path = os.path.join(
self.directory,
- _yaml.node_get_project_path(config, 'element-path', self.directory,
+ _yaml.node_get_project_path(pre_config_node, 'element-path', self.directory,
check_is_dir=True)
)
- # Load project options
- options_node = _yaml.node_get(config, Mapping, 'options', default_value={})
- self.options = OptionPool(self.element_path)
- self.options.load(options_node)
- if self.junction:
- # load before user configuration
- self.options.load_yaml_values(self.junction.options, transform=self.junction._subst_string)
+ self.config.options = OptionPool(self.element_path)
+ self.first_pass_config.options = OptionPool(self.element_path)
- # Collect option values specified in the user configuration
- overrides = self._context.get_overrides(self.name)
- override_options = _yaml.node_get(overrides, Mapping, 'options', default_value={})
- self.options.load_yaml_values(override_options)
- if self._cli_options:
- self.options.load_cli_values(self._cli_options)
+ self.loader = Loader(self._context, self,
+ parent=parent_loader,
+ tempdir=tempdir)
- # We're done modifying options, now we can use them for substitutions
- self.options.resolve()
+ self._project_includes = Includes(self.loader)
- #
- # Now resolve any conditionals in the remaining configuration,
- # any conditionals specified for project option declarations,
- # or conditionally specifying the project name; will be ignored.
- #
- self.options.process_node(config)
+ project_conf_first_pass = _yaml.node_copy(self._project_conf)
+ self._project_includes.process(project_conf_first_pass, only_local=True)
+ config_no_include = _yaml.node_copy(self._default_config_node)
+ _yaml.composite(config_no_include, project_conf_first_pass)
- # Override default_mirror if not set by command-line
- if not self.default_mirror:
- self.default_mirror = _yaml.node_get(overrides, str, 'default-mirror', default_value=None)
+ self._load_pass(config_no_include, self.first_pass_config,
+ ignore_unknown=True)
+
+ # Use separate file for storing source references
+ self.ref_storage = _yaml.node_get(pre_config_node, str, 'ref-storage')
+ if self.ref_storage not in [ProjectRefStorage.INLINE, ProjectRefStorage.PROJECT_REFS]:
+ p = _yaml.node_get_provenance(pre_config_node, 'ref-storage')
+ raise LoadError(LoadErrorReason.INVALID_DATA,
+ "{}: Invalid value '{}' specified for ref-storage"
+ .format(p, self.ref_storage))
+
+ if self.ref_storage == ProjectRefStorage.PROJECT_REFS:
+ self.junction_refs.load(self.first_pass_config.options)
+
+ # _load_second_pass()
+ #
+ # Process the second pass of loading the project configuration.
+ #
+ def _load_second_pass(self):
+ project_conf_second_pass = _yaml.node_copy(self._project_conf)
+ self._project_includes.process(project_conf_second_pass)
+ config = _yaml.node_copy(self._default_config_node)
+ _yaml.composite(config, project_conf_second_pass)
+
+ self._load_pass(config, self.config)
+
+ _yaml.node_validate(config, [
+ 'format-version',
+ 'element-path', 'variables',
+ 'environment', 'environment-nocache',
+ 'split-rules', 'elements', 'plugins',
+ 'aliases', 'name',
+ 'artifacts', 'options',
+ 'fail-on-overlap', 'shell',
+ 'ref-storage', 'sandbox', 'mirrors'
+ ])
#
# Now all YAML composition is done, from here on we just load
@@ -333,66 +469,6 @@ class Project():
# Load artifacts pull/push configuration for this project
self.artifact_cache_specs = ArtifactCache.specs_from_config_node(config, self.directory)
- # Plugin origins and versions
- origins = _yaml.node_get(config, list, 'plugins', default_value=[])
- for origin in origins:
- allowed_origin_fields = [
- 'origin', 'sources', 'elements',
- 'package-name', 'path',
- ]
- allowed_origins = ['core', 'local', 'pip']
- _yaml.node_validate(origin, allowed_origin_fields)
-
- if origin['origin'] not in allowed_origins:
- raise LoadError(
- LoadErrorReason.INVALID_YAML,
- "Origin '{}' is not one of the allowed types"
- .format(origin['origin']))
-
- # Store source versions for checking later
- source_versions = _yaml.node_get(origin, Mapping, 'sources', default_value={})
- for key, _ in _yaml.node_items(source_versions):
- if key in self._source_format_versions:
- raise LoadError(
- LoadErrorReason.INVALID_YAML,
- "Duplicate listing of source '{}'".format(key))
- self._source_format_versions[key] = _yaml.node_get(source_versions, int, key)
-
- # Store element versions for checking later
- element_versions = _yaml.node_get(origin, Mapping, 'elements', default_value={})
- for key, _ in _yaml.node_items(element_versions):
- if key in self._element_format_versions:
- raise LoadError(
- LoadErrorReason.INVALID_YAML,
- "Duplicate listing of element '{}'".format(key))
- self._element_format_versions[key] = _yaml.node_get(element_versions, int, key)
-
- # Store the origins if they're not 'core'.
- # core elements are loaded by default, so storing is unnecessary.
- if _yaml.node_get(origin, str, 'origin') != 'core':
- self._store_origin(origin, 'sources', self._plugin_source_origins)
- self._store_origin(origin, 'elements', self._plugin_element_origins)
-
- pluginbase = PluginBase(package='buildstream.plugins')
- self._element_factory = ElementFactory(pluginbase, self._plugin_element_origins)
- self._source_factory = SourceFactory(pluginbase, self._plugin_source_origins)
-
- # Source url aliases
- self._aliases = _yaml.node_get(config, Mapping, 'aliases', default_value={})
-
- # Load base variables
- self.base_variables = _yaml.node_get(config, Mapping, 'variables')
-
- # Add the project name as a default variable
- self.base_variables['project-name'] = self.name
-
- # Extend variables with automatic variables and option exports
- # Initialize it as a string as all variables are processed as strings.
- self.base_variables['max-jobs'] = str(multiprocessing.cpu_count())
-
- # Export options into variables, if that was requested
- self.options.export_variables(self.base_variables)
-
# Load sandbox environment variables
self.base_environment = _yaml.node_get(config, Mapping, 'environment')
self.base_env_nocache = _yaml.node_get(config, list, 'environment-nocache')
@@ -406,18 +482,9 @@ class Project():
# Fail on overlap
self.fail_on_overlap = _yaml.node_get(config, bool, 'fail-on-overlap')
- # Use separate file for storing source references
- self.ref_storage = _yaml.node_get(config, str, 'ref-storage')
- if self.ref_storage not in [ProjectRefStorage.INLINE, ProjectRefStorage.PROJECT_REFS]:
- p = _yaml.node_get_provenance(config, 'ref-storage')
- raise LoadError(LoadErrorReason.INVALID_DATA,
- "{}: Invalid value '{}' specified for ref-storage"
- .format(p, self.ref_storage))
-
# Load project.refs if it exists, this may be ignored.
if self.ref_storage == ProjectRefStorage.PROJECT_REFS:
self.refs.load(self.options)
- self.junction_refs.load(self.options)
# Parse shell options
shell_options = _yaml.node_get(config, Mapping, 'shell')
@@ -449,6 +516,71 @@ class Project():
self._shell_host_files.append(mount)
+ # _load_pass():
+ #
+ # Loads parts of the project configuration that are different
+ # for first and second pass configurations.
+ #
+ # Args:
+ # config (dict) - YaML node of the configuration file.
+ # output (ProjectConfig) - ProjectConfig to load configuration onto.
+ # ignore_unknown (bool) - Whether option loader shoud ignore unknown options.
+ #
+ def _load_pass(self, config, output, *,
+ ignore_unknown=False):
+
+ # Element and Source type configurations will be composited later onto
+ # element/source types, so we delete it from here and run our final
+ # assertion after.
+ output.element_overrides = _yaml.node_get(config, Mapping, 'elements', default_value={})
+ output.source_overrides = _yaml.node_get(config, Mapping, 'sources', default_value={})
+ config.pop('elements', None)
+ config.pop('sources', None)
+ _yaml.node_final_assertions(config)
+
+ self._load_plugin_factories(config, output)
+
+ # Load project options
+ options_node = _yaml.node_get(config, Mapping, 'options', default_value={})
+ output.options.load(options_node)
+ if self.junction:
+ # load before user configuration
+ output.options.load_yaml_values(self.junction.options, transform=self.junction._subst_string)
+
+ # Collect option values specified in the user configuration
+ overrides = self._context.get_overrides(self.name)
+ override_options = _yaml.node_get(overrides, Mapping, 'options', default_value={})
+ output.options.load_yaml_values(override_options)
+ if self._cli_options:
+ output.options.load_cli_values(self._cli_options, ignore_unknown=ignore_unknown)
+
+ # We're done modifying options, now we can use them for substitutions
+ output.options.resolve()
+
+ #
+ # Now resolve any conditionals in the remaining configuration,
+ # any conditionals specified for project option declarations,
+ # or conditionally specifying the project name; will be ignored.
+ #
+ output.options.process_node(config)
+
+ # Load base variables
+ output.base_variables = _yaml.node_get(config, Mapping, 'variables')
+
+ # Add the project name as a default variable
+ output.base_variables['project-name'] = self.name
+
+ # Extend variables with automatic variables and option exports
+ # Initialize it as a string as all variables are processed as strings.
+ output.base_variables['max-jobs'] = str(multiprocessing.cpu_count())
+
+ # Export options into variables, if that was requested
+ output.options.export_variables(output.base_variables)
+
+ # Override default_mirror if not set by command-line
+ output.default_mirror = self._default_mirror or _yaml.node_get(overrides, str,
+ 'default-mirror', default_value=None)
+
mirrors = _yaml.node_get(config, list, 'mirrors', default_value=[])
for mirror in mirrors:
allowed_mirror_fields = [
@@ -460,20 +592,90 @@ class Project():
for alias_mapping, uris in _yaml.node_items(mirror['aliases']):
assert isinstance(uris, list)
alias_mappings[alias_mapping] = list(uris)
- self.mirrors[mirror_name] = alias_mappings
- if not self.default_mirror:
- self.default_mirror = mirror_name
+ output.mirrors[mirror_name] = alias_mappings
+ if not output.default_mirror:
+ output.default_mirror = mirror_name
+
+ # Source url aliases
+ output._aliases = _yaml.node_get(config, Mapping, 'aliases', default_value={})
- # _assert_plugin_format()
+ # _ensure_project_dir()
#
- # Helper to raise a PluginError if the loaded plugin is of a lesser version then
- # the required version for this plugin
+ # Returns path of the project directory, if a configuration file is found
+ # in given directory or any of its parent directories.
#
- def _assert_plugin_format(self, plugin, version):
- if plugin.BST_FORMAT_VERSION < version:
- raise LoadError(LoadErrorReason.UNSUPPORTED_PLUGIN,
- "{}: Format version {} is too old for requested version {}"
- .format(plugin, plugin.BST_FORMAT_VERSION, version))
+ # Args:
+ # directory (str) - directory from where the command was invoked
+ #
+ # Raises:
+ # LoadError if project.conf is not found
+ #
+ def _ensure_project_dir(self, directory):
+ directory = os.path.abspath(directory)
+ while not os.path.isfile(os.path.join(directory, _PROJECT_CONF_FILE)):
+ parent_dir = os.path.dirname(directory)
+ if directory == parent_dir:
+ raise LoadError(
+ LoadErrorReason.MISSING_PROJECT_CONF,
+ '{} not found in current directory or any of its parent directories'
+ .format(_PROJECT_CONF_FILE))
+ directory = parent_dir
+
+ return directory
+
+ def _load_plugin_factories(self, config, output):
+ plugin_source_origins = [] # Origins of custom sources
+ plugin_element_origins = [] # Origins of custom elements
+
+ # Plugin origins and versions
+ origins = _yaml.node_get(config, list, 'plugins', default_value=[])
+ source_format_versions = {}
+ element_format_versions = {}
+ for origin in origins:
+ allowed_origin_fields = [
+ 'origin', 'sources', 'elements',
+ 'package-name', 'path',
+ ]
+ allowed_origins = ['core', 'local', 'pip']
+ _yaml.node_validate(origin, allowed_origin_fields)
+
+ if origin['origin'] not in allowed_origins:
+ raise LoadError(
+ LoadErrorReason.INVALID_YAML,
+ "Origin '{}' is not one of the allowed types"
+ .format(origin['origin']))
+
+ # Store source versions for checking later
+ source_versions = _yaml.node_get(origin, Mapping, 'sources', default_value={})
+ for key, _ in _yaml.node_items(source_versions):
+ if key in source_format_versions:
+ raise LoadError(
+ LoadErrorReason.INVALID_YAML,
+ "Duplicate listing of source '{}'".format(key))
+ source_format_versions[key] = _yaml.node_get(source_versions, int, key)
+
+ # Store element versions for checking later
+ element_versions = _yaml.node_get(origin, Mapping, 'elements', default_value={})
+ for key, _ in _yaml.node_items(element_versions):
+ if key in element_format_versions:
+ raise LoadError(
+ LoadErrorReason.INVALID_YAML,
+ "Duplicate listing of element '{}'".format(key))
+ element_format_versions[key] = _yaml.node_get(element_versions, int, key)
+
+ # Store the origins if they're not 'core'.
+ # core elements are loaded by default, so storing is unnecessary.
+ if _yaml.node_get(origin, str, 'origin') != 'core':
+ self._store_origin(origin, 'sources', plugin_source_origins)
+ self._store_origin(origin, 'elements', plugin_element_origins)
+
+ pluginbase = PluginBase(package='buildstream.plugins')
+ output.element_factory = ElementFactory(pluginbase,
+ plugin_origins=plugin_element_origins,
+ format_versions=element_format_versions)
+ output.source_factory = SourceFactory(pluginbase,
+ plugin_origins=plugin_source_origins,
+ format_versions=source_format_versions)
# _store_origin()
#
@@ -508,27 +710,3 @@ class Project():
# paths are passed in relative to the project, but must be absolute
origin_dict['path'] = os.path.join(self.directory, path)
destination.append(origin_dict)
-
- # _ensure_project_dir()
- #
- # Returns path of the project directory, if a configuration file is found
- # in given directory or any of its parent directories.
- #
- # Args:
- # directory (str) - directory from where the command was invoked
- #
- # Raises:
- # LoadError if project.conf is not found
- #
- def _ensure_project_dir(self, directory):
- directory = os.path.abspath(directory)
- while not os.path.isfile(os.path.join(directory, _PROJECT_CONF_FILE)):
- parent_dir = os.path.dirname(directory)
- if directory == parent_dir:
- raise LoadError(
- LoadErrorReason.MISSING_PROJECT_CONF,
- '{} not found in current directory or any of its parent directories'
- .format(_PROJECT_CONF_FILE))
- directory = parent_dir
-
- return directory
diff --git a/buildstream/_sourcefactory.py b/buildstream/_sourcefactory.py
index 88a130e10..1d959a140 100644
--- a/buildstream/_sourcefactory.py
+++ b/buildstream/_sourcefactory.py
@@ -31,9 +31,13 @@ from .source import Source
#
class SourceFactory(PluginContext):
- def __init__(self, plugin_base, plugin_origins=None):
+ def __init__(self, plugin_base, *,
+ format_versions={},
+ plugin_origins=None):
- super().__init__(plugin_base, Source, [_site.source_plugins], plugin_origins)
+ super().__init__(plugin_base, Source, [_site.source_plugins],
+ format_versions=format_versions,
+ plugin_origins=plugin_origins)
# create():
#
@@ -54,4 +58,7 @@ class SourceFactory(PluginContext):
#
def create(self, context, project, meta):
source_type, _ = self.lookup(meta.kind)
- return source_type(context, project, meta)
+ source = source_type(context, project, meta)
+ version = self._format_versions.get(meta.kind, 0)
+ self._assert_plugin_format(source, version)
+ return source
diff --git a/buildstream/_stream.py b/buildstream/_stream.py
index ad4e1066c..f17d641de 100644
--- a/buildstream/_stream.py
+++ b/buildstream/_stream.py
@@ -71,7 +71,6 @@ class Stream():
#
# Private members
#
- Platform.create_instance(context, project)
self._platform = Platform.get_platform()
self._artifacts = self._platform.artifactcache
self._context = context
@@ -90,8 +89,8 @@ class Stream():
# Cleans up application state
#
def cleanup(self):
- if self._pipeline:
- self._pipeline.cleanup()
+ if self._project:
+ self._project.cleanup()
# load_selection()
#
@@ -906,6 +905,10 @@ class Stream():
for element in track_selected:
element._schedule_tracking()
+ # ArtifactCache.setup_remotes expects all projects to be fully loaded
+ for project in self._context.get_projects():
+ project.ensure_fully_loaded()
+
# Connect to remote caches, this needs to be done before resolving element state
self._artifacts.setup_remotes(use_config=use_artifact_config, remote_url=artifact_remote_url)
diff --git a/buildstream/_versions.py b/buildstream/_versions.py
index 4531d9a72..39ff30fc3 100644
--- a/buildstream/_versions.py
+++ b/buildstream/_versions.py
@@ -23,7 +23,7 @@
# This version is bumped whenever enhancements are made
# to the `project.conf` format or the core element format.
#
-BST_FORMAT_VERSION = 11
+BST_FORMAT_VERSION = 12
# The base BuildStream artifact version
diff --git a/buildstream/_yaml.py b/buildstream/_yaml.py
index 33ee444aa..66500fbad 100644
--- a/buildstream/_yaml.py
+++ b/buildstream/_yaml.py
@@ -38,6 +38,19 @@ RoundTripConstructor.add_constructor(u'tag:yaml.org,2002:float', RoundTripConstr
PROVENANCE_KEY = '__bst_provenance_info'
+# Provides information about file for provenance
+#
+# Args:
+# name (str): Full path to the file
+# shortname (str): Relative path to the file
+# project (Project): Project where the shortname is relative from
+class ProvenanceFile():
+ def __init__(self, name, shortname, project):
+ self.name = name
+ self.shortname = shortname
+ self.project = project
+
+
# Provenance tracks the origin of a given node in the parsed dictionary.
#
# Args:
@@ -57,7 +70,7 @@ class Provenance():
# Convert a Provenance to a string for error reporting
def __str__(self):
- return "{} [line {:d} column {:d}]".format(self.filename, self.line, self.col)
+ return "{} [line {:d} column {:d}]".format(self.filename.shortname, self.line, self.col)
# Abstract method
def clone(self):
@@ -175,13 +188,15 @@ class CompositeTypeError(CompositeError):
#
# Raises: LoadError
#
-def load(filename, shortname=None, copy_tree=False):
+def load(filename, shortname=None, copy_tree=False, *, project=None):
if not shortname:
shortname = filename
+ file = ProvenanceFile(filename, shortname, project)
+
try:
with open(filename) as f:
- return load_data(f, shortname=shortname, copy_tree=copy_tree)
+ return load_data(f, file, copy_tree=copy_tree)
except FileNotFoundError as e:
raise LoadError(LoadErrorReason.MISSING_FILE,
"Could not find file at {}".format(filename)) from e
@@ -193,7 +208,7 @@ def load(filename, shortname=None, copy_tree=False):
# Like load(), but doesnt require the data to be in a file
#
-def load_data(data, shortname=None, copy_tree=False):
+def load_data(data, file=None, copy_tree=False):
try:
contents = yaml.load(data, yaml.loader.RoundTripLoader, preserve_quotes=True)
@@ -208,9 +223,9 @@ def load_data(data, shortname=None, copy_tree=False):
else:
raise LoadError(LoadErrorReason.INVALID_YAML,
"YAML file has content of type '{}' instead of expected type 'dict': {}"
- .format(type(contents).__name__, shortname))
+ .format(type(contents).__name__, file.name))
- return node_decorated_copy(shortname, contents, copy_tree=copy_tree)
+ return node_decorated_copy(file, contents, copy_tree=copy_tree)
# Dumps a previously loaded YAML node to a file
@@ -506,7 +521,7 @@ def node_items(node):
def ensure_provenance(node):
provenance = node.get(PROVENANCE_KEY)
if not provenance:
- provenance = DictProvenance('', node, node)
+ provenance = DictProvenance(ProvenanceFile('', '', None), node, node)
node[PROVENANCE_KEY] = provenance
return provenance
diff --git a/buildstream/element.py b/buildstream/element.py
index bec87815c..e2a032197 100644
--- a/buildstream/element.py
+++ b/buildstream/element.py
@@ -211,6 +211,11 @@ class Element(Plugin):
super().__init__(meta.name, context, project, meta.provenance, "element")
+ self.__is_junction = meta.kind == "junction"
+
+ if not self.__is_junction:
+ project.ensure_fully_loaded()
+
self.normal_name = os.path.splitext(self.name.replace(os.sep, '-'))[0]
"""A normalized element name
@@ -903,16 +908,20 @@ class Element(Plugin):
@classmethod
def _new_from_meta(cls, meta, artifacts):
+ if not meta.first_pass:
+ meta.project.ensure_fully_loaded()
+
if meta in cls.__instantiated_elements:
return cls.__instantiated_elements[meta]
- project = meta.project
- element = project.create_element(artifacts, meta)
+ element = meta.project.create_element(artifacts, meta, first_pass=meta.first_pass)
cls.__instantiated_elements[meta] = element
# Instantiate sources
for meta_source in meta.sources:
- source = project.create_source(meta_source)
+ meta_source.first_pass = meta.kind == "junction"
+ source = meta.project.create_source(meta_source,
+ first_pass=meta.first_pass)
redundant_ref = source._load_ref()
element.__sources.append(source)
@@ -2166,16 +2175,21 @@ class Element(Plugin):
def __compose_default_splits(self, defaults):
project = self._get_project()
- project_splits = _yaml.node_chain_copy(project._splits)
element_public = _yaml.node_get(defaults, Mapping, 'public', default_value={})
element_bst = _yaml.node_get(element_public, Mapping, 'bst', default_value={})
element_splits = _yaml.node_get(element_bst, Mapping, 'split-rules', default_value={})
- # Extend project wide split rules with any split rules defined by the element
- _yaml.composite(project_splits, element_splits)
+ if self.__is_junction:
+ splits = _yaml.node_chain_copy(element_splits)
+ else:
+ assert project._splits is not None
+
+ splits = _yaml.node_chain_copy(project._splits)
+ # Extend project wide split rules with any split rules defined by the element
+ _yaml.composite(splits, element_splits)
- element_bst['split-rules'] = project_splits
+ element_bst['split-rules'] = splits
element_public['bst'] = element_bst
defaults['public'] = element_public
@@ -2199,7 +2213,11 @@ class Element(Plugin):
# Override the element's defaults with element specific
# overrides from the project.conf
project = self._get_project()
- elements = project.element_overrides
+ if self.__is_junction:
+ elements = project.first_pass_config.element_overrides
+ else:
+ elements = project.element_overrides
+
overrides = elements.get(self.get_kind())
if overrides:
_yaml.composite(defaults, overrides)
@@ -2212,10 +2230,14 @@ class Element(Plugin):
# creating sandboxes for this element
#
def __extract_environment(self, meta):
- project = self._get_project()
default_env = _yaml.node_get(self.__defaults, Mapping, 'environment', default_value={})
- environment = _yaml.node_chain_copy(project.base_environment)
+ if self.__is_junction:
+ environment = {}
+ else:
+ project = self._get_project()
+ environment = _yaml.node_chain_copy(project.base_environment)
+
_yaml.composite(environment, default_env)
_yaml.composite(environment, meta.environment)
_yaml.node_final_assertions(environment)
@@ -2228,8 +2250,13 @@ class Element(Plugin):
return final_env
def __extract_env_nocache(self, meta):
- project = self._get_project()
- project_nocache = project.base_env_nocache
+ if self.__is_junction:
+ project_nocache = []
+ else:
+ project = self._get_project()
+ project.ensure_fully_loaded()
+ project_nocache = project.base_env_nocache
+
default_nocache = _yaml.node_get(self.__defaults, list, 'environment-nocache', default_value=[])
element_nocache = meta.env_nocache
@@ -2244,10 +2271,15 @@ class Element(Plugin):
# substituting command strings to be run in the sandbox
#
def __extract_variables(self, meta):
- project = self._get_project()
default_vars = _yaml.node_get(self.__defaults, Mapping, 'variables', default_value={})
- variables = _yaml.node_chain_copy(project.base_variables)
+ project = self._get_project()
+ if self.__is_junction:
+ variables = _yaml.node_chain_copy(project.first_pass_config.base_variables)
+ else:
+ project.ensure_fully_loaded()
+ variables = _yaml.node_chain_copy(project.base_variables)
+
_yaml.composite(variables, default_vars)
_yaml.composite(variables, meta.variables)
_yaml.node_final_assertions(variables)
@@ -2271,13 +2303,18 @@ class Element(Plugin):
# Sandbox-specific configuration data, to be passed to the sandbox's constructor.
#
def __extract_sandbox_config(self, meta):
- project = self._get_project()
+ if self.__is_junction:
+ sandbox_config = {'build-uid': 0,
+ 'build-gid': 0}
+ else:
+ project = self._get_project()
+ project.ensure_fully_loaded()
+ sandbox_config = _yaml.node_chain_copy(project._sandbox)
# The default config is already composited with the project overrides
sandbox_defaults = _yaml.node_get(self.__defaults, Mapping, 'sandbox', default_value={})
sandbox_defaults = _yaml.node_chain_copy(sandbox_defaults)
- sandbox_config = _yaml.node_chain_copy(project._sandbox)
_yaml.composite(sandbox_config, sandbox_defaults)
_yaml.composite(sandbox_config, meta.sandbox)
_yaml.node_final_assertions(sandbox_config)
diff --git a/buildstream/source.py b/buildstream/source.py
index 2f3f1c281..d58bfe2a3 100644
--- a/buildstream/source.py
+++ b/buildstream/source.py
@@ -227,8 +227,10 @@ class Source(Plugin):
# Collect the composited element configuration and
# ask the element to configure itself.
- self.__init_defaults()
+ self.__init_defaults(meta)
self.__config = self.__extract_config(meta)
+ self.__first_pass = meta.first_pass
+
self.configure(self.__config)
COMMON_CONFIG_KEYS = ['kind', 'directory']
@@ -454,7 +456,7 @@ class Source(Plugin):
self.__expected_alias = url_alias
project = self._get_project()
- return project.translate_url(url)
+ return project.translate_url(url, first_pass=self.__first_pass)
def get_project_directory(self):
"""Fetch the project base directory
@@ -524,7 +526,7 @@ class Source(Plugin):
for fetcher in source_fetchers:
alias = fetcher._get_alias()
success = False
- for uri in project.get_alias_uris(alias):
+ for uri in project.get_alias_uris(alias, first_pass=self.__first_pass):
try:
fetcher.fetch(uri)
# FIXME: Need to consider temporary vs. permanent failures,
@@ -538,13 +540,17 @@ class Source(Plugin):
raise last_error
else:
alias = self._get_alias()
- if not project.mirrors or not alias:
+ if self.__first_pass:
+ mirrors = project.first_pass_config.mirrors
+ else:
+ mirrors = project.config.mirrors
+ if not mirrors or not alias:
self.fetch()
return
context = self._get_context()
source_kind = type(self)
- for uri in project.get_alias_uris(alias):
+ for uri in project.get_alias_uris(alias, first_pass=self.__first_pass):
new_source = source_kind(context, project, self.__meta,
alias_override=(alias, uri))
new_source._preflight()
@@ -739,24 +745,29 @@ class Source(Plugin):
#
# Step 3 - Apply the change in project data
#
- if project is toplevel:
- if toplevel.ref_storage == ProjectRefStorage.PROJECT_REFS:
- do_save_refs(toplevel_refs)
- else:
+ if toplevel.ref_storage == ProjectRefStorage.PROJECT_REFS:
+ do_save_refs(toplevel_refs)
+ else:
+ if provenance.filename.project is toplevel:
# Save the ref in the originating file
#
- fullname = os.path.join(toplevel.element_path, provenance.filename)
try:
- _yaml.dump(provenance.toplevel, fullname)
+ _yaml.dump(_yaml.node_sanitize(provenance.toplevel), provenance.filename.name)
except OSError as e:
raise SourceError("{}: Error saving source reference to '{}': {}"
- .format(self, provenance.filename, e),
+ .format(self, provenance.filename.name, e),
reason="save-ref-error") from e
- else:
- if toplevel.ref_storage == ProjectRefStorage.PROJECT_REFS:
- do_save_refs(toplevel_refs)
- else:
+ elif provenance.filename.project is project:
self.warn("{}: Not persisting new reference in junctioned project".format(self))
+ elif provenance.filename.project is None:
+ assert provenance.filename.name == ''
+ assert provenance.filename.shortname == ''
+ raise SourceError("{}: Error saving source reference to synthetic node."
+ .format(self))
+ else:
+ raise SourceError("{}: Cannot track source in a fragment from a junction"
+ .format(provenance.filename.shortname),
+ reason="tracking-junction-fragment")
return changed
@@ -779,7 +790,7 @@ class Source(Plugin):
def _get_alias(self):
alias = self.__expected_alias
project = self._get_project()
- if project.get_alias_uri(alias):
+ if project.get_alias_uri(alias, first_pass=self.__first_pass):
# The alias must already be defined in the project's aliases
# otherwise http://foo gets treated like it contains an alias
return alias
@@ -795,7 +806,11 @@ class Source(Plugin):
project = self._get_project()
# If there are no mirrors, or no aliases to replace, there's nothing to do here.
alias = self._get_alias()
- if not project.mirrors or not alias:
+ if self.__first_pass:
+ mirrors = project.first_pass_config.mirrors
+ else:
+ mirrors = project.config.mirrors
+ if not mirrors or not alias:
return self.track()
context = self._get_context()
@@ -803,7 +818,7 @@ class Source(Plugin):
# NOTE: We are assuming here that tracking only requires substituting the
# first alias used
- for uri in reversed(project.get_alias_uris(alias)):
+ for uri in reversed(project.get_alias_uris(alias, first_pass=self.__first_pass)):
new_source = source_kind(context, project, self.__meta,
alias_override=(alias, uri))
new_source._preflight()
@@ -831,10 +846,13 @@ class Source(Plugin):
reason="ensure-stage-dir-fail") from e
return directory
- def __init_defaults(self):
+ def __init_defaults(self, meta):
if not self.__defaults_set:
project = self._get_project()
- sources = project.source_overrides
+ if meta.first_pass:
+ sources = project.first_pass_config.source_overrides
+ else:
+ sources = project.source_overrides
type(self).__defaults = sources.get(self.get_kind(), {})
type(self).__defaults_set = True
diff --git a/doc/source/format_intro.rst b/doc/source/format_intro.rst
index b1780f9dc..23c37aeae 100644
--- a/doc/source/format_intro.rst
+++ b/doc/source/format_intro.rst
@@ -289,3 +289,47 @@ free form and not validated.
# This element's `make install` is broken, replace it.
(=):
- cp src/program %{bindir}
+
+(@) Include
+~~~~~~~~~~~
+Indicates that content should be loaded from files.
+
+This include directive expects a string, or a list of strings when
+including multiple files. Each of these strings represent a project
+relative filename to include. Files can be included from subprojects
+by prefixing the string with the locally defined :mod:`junction
+element <elements.junction>` and colon (':').
+
+The include directive can be used in any dictionary declared in the
+:ref:`project.conf <projectconf>`, in any :ref:`.bst file
+<format_basics>`, or recursively included in a another include file.
+
+The including YAML fragment has priority over the files it includes,
+and overrides any values introduced by the includes. When including
+multiple files, files are included in the order they are declared in
+the include list, and each subsequent include file takes priority over
+the previous one.
+
+.. important::
+
+ Cross junction include files are not processed when loading
+ :mod:`junction elements <elements.junction>`. Variables,
+ :ref:`element overrides <project_element_overrides>`, :ref:`source
+ overrides <project_source_overrides>` and :ref:`mirrors
+ <project_essentials_mirrors>` used in the declaration of a junction
+ must be declared in the :ref:`project.conf <projectconf>` or in
+ included files which are local to the project declaring the
+ junction itself.
+
+:mod:`Junction elements <elements.junction>` cannot use include directives.
+
+**Example:**
+
+.. code:: yaml
+
+ elements:
+ (@): junction.bst:includes/element-overrides.bst
+
+.. note::
+
+ The include directive is available since :ref:`format version 12 <project_format_version>`
diff --git a/tests/artifactcache/config.py b/tests/artifactcache/config.py
index 079e511ef..f59474708 100644
--- a/tests/artifactcache/config.py
+++ b/tests/artifactcache/config.py
@@ -98,6 +98,7 @@ def test_artifact_cache_precedence(tmpdir, override_caches, project_caches, user
context = Context()
context.load(config=user_config_file)
project = Project(str(project_dir), context)
+ project.ensure_fully_loaded()
# Use the helper from the artifactcache module to parse our configuration.
parsed_cache_specs = _configured_remote_artifact_cache_specs(context, project)
diff --git a/tests/format/include.py b/tests/format/include.py
new file mode 100644
index 000000000..36e723ed0
--- /dev/null
+++ b/tests/format/include.py
@@ -0,0 +1,263 @@
+import os
+import pytest
+from buildstream import _yaml
+from buildstream._exceptions import ErrorDomain, LoadErrorReason
+from tests.testutils import cli, generate_junction, create_repo
+
+
+# Project directory
+DATA_DIR = os.path.join(
+ os.path.dirname(os.path.realpath(__file__)),
+ 'include'
+)
+
+
+@pytest.mark.datafiles(DATA_DIR)
+def test_include_project_file(cli, datafiles):
+ project = os.path.join(str(datafiles), 'file')
+ result = cli.run(project=project, args=[
+ 'show',
+ '--deps', 'none',
+ '--format', '%{vars}',
+ 'element.bst'])
+ result.assert_success()
+ loaded = _yaml.load_data(result.output)
+ assert loaded['included'] == 'True'
+
+
+@pytest.mark.datafiles(DATA_DIR)
+def test_include_junction_file(cli, tmpdir, datafiles):
+ project = os.path.join(str(datafiles), 'junction')
+
+ generate_junction(tmpdir,
+ os.path.join(project, 'subproject'),
+ os.path.join(project, 'junction.bst'),
+ store_ref=True)
+
+ result = cli.run(project=project, args=[
+ 'show',
+ '--deps', 'none',
+ '--format', '%{vars}',
+ 'element.bst'])
+ result.assert_success()
+ loaded = _yaml.load_data(result.output)
+ assert loaded['included'] == 'True'
+
+
+@pytest.mark.datafiles(DATA_DIR)
+def test_include_junction_options(cli, tmpdir, datafiles):
+ project = os.path.join(str(datafiles), 'options')
+
+ result = cli.run(project=project, args=[
+ '-o', 'build_arch', 'x86_64',
+ 'show',
+ '--deps', 'none',
+ '--format', '%{vars}',
+ 'element.bst'])
+ result.assert_success()
+ loaded = _yaml.load_data(result.output)
+ assert loaded['build_arch'] == 'x86_64'
+
+
+@pytest.mark.datafiles(DATA_DIR)
+def test_junction_element_partial_project_project(cli, tmpdir, datafiles):
+ """
+ Junction elements never depend on fully include processed project.
+ """
+
+ project = os.path.join(str(datafiles), 'junction')
+
+ subproject_path = os.path.join(project, 'subproject')
+ junction_path = os.path.join(project, 'junction.bst')
+
+ repo = create_repo('git', str(tmpdir))
+
+ ref = repo.create(subproject_path)
+
+ element = {
+ 'kind': 'junction',
+ 'sources': [
+ repo.source_config(ref=ref)
+ ]
+ }
+ _yaml.dump(element, junction_path)
+
+ result = cli.run(project=project, args=[
+ 'show',
+ '--deps', 'none',
+ '--format', '%{vars}',
+ 'junction.bst'])
+ result.assert_success()
+ loaded = _yaml.load_data(result.output)
+ assert 'included' not in loaded
+
+
+@pytest.mark.datafiles(DATA_DIR)
+def test_junction_element_not_partial_project_file(cli, tmpdir, datafiles):
+ """
+ Junction elements never depend on fully include processed project.
+ """
+
+ project = os.path.join(str(datafiles), 'file_with_subproject')
+
+ subproject_path = os.path.join(project, 'subproject')
+ junction_path = os.path.join(project, 'junction.bst')
+
+ repo = create_repo('git', str(tmpdir))
+
+ ref = repo.create(subproject_path)
+
+ element = {
+ 'kind': 'junction',
+ 'sources': [
+ repo.source_config(ref=ref)
+ ]
+ }
+ _yaml.dump(element, junction_path)
+
+ result = cli.run(project=project, args=[
+ 'show',
+ '--deps', 'none',
+ '--format', '%{vars}',
+ 'junction.bst'])
+ result.assert_success()
+ loaded = _yaml.load_data(result.output)
+ assert 'included' in loaded
+
+
+@pytest.mark.datafiles(DATA_DIR)
+def test_include_element_overrides(cli, tmpdir, datafiles):
+ project = os.path.join(str(datafiles), 'overrides')
+
+ result = cli.run(project=project, args=[
+ 'show',
+ '--deps', 'none',
+ '--format', '%{vars}',
+ 'element.bst'])
+ result.assert_success()
+ loaded = _yaml.load_data(result.output)
+ assert 'manual_main_override' in loaded
+ assert 'manual_included_override' in loaded
+
+
+@pytest.mark.datafiles(DATA_DIR)
+def test_include_element_overrides_composition(cli, tmpdir, datafiles):
+ project = os.path.join(str(datafiles), 'overrides')
+
+ result = cli.run(project=project, args=[
+ 'show',
+ '--deps', 'none',
+ '--format', '%{config}',
+ 'element.bst'])
+ result.assert_success()
+ loaded = _yaml.load_data(result.output)
+ assert 'build-commands' in loaded
+ assert loaded['build-commands'] == ['first', 'second']
+
+
+@pytest.mark.datafiles(DATA_DIR)
+def test_include_element_overrides_sub_include(cli, tmpdir, datafiles):
+ project = os.path.join(str(datafiles), 'sub-include')
+
+ result = cli.run(project=project, args=[
+ 'show',
+ '--deps', 'none',
+ '--format', '%{vars}',
+ 'element.bst'])
+ result.assert_success()
+ loaded = _yaml.load_data(result.output)
+ assert 'included' in loaded
+
+
+@pytest.mark.datafiles(DATA_DIR)
+def test_junction_do_not_use_included_overrides(cli, tmpdir, datafiles):
+ project = os.path.join(str(datafiles), 'overrides-junction')
+
+ generate_junction(tmpdir,
+ os.path.join(project, 'subproject'),
+ os.path.join(project, 'junction.bst'),
+ store_ref=True)
+
+ result = cli.run(project=project, args=[
+ 'show',
+ '--deps', 'none',
+ '--format', '%{vars}',
+ 'junction.bst'])
+ result.assert_success()
+ loaded = _yaml.load_data(result.output)
+ assert 'main_override' in loaded
+ assert 'included_override' not in loaded
+
+
+@pytest.mark.datafiles(DATA_DIR)
+def test_conditional_in_fragment(cli, tmpdir, datafiles):
+ project = os.path.join(str(datafiles), 'conditional')
+
+ result = cli.run(project=project, args=[
+ '-o', 'build_arch', 'x86_64',
+ 'show',
+ '--deps', 'none',
+ '--format', '%{vars}',
+ 'element.bst'])
+ result.assert_success()
+ loaded = _yaml.load_data(result.output)
+ assert 'size' in loaded
+ assert loaded['size'] == '8'
+
+
+@pytest.mark.datafiles(DATA_DIR)
+def test_inner(cli, datafiles):
+ project = os.path.join(str(datafiles), 'inner')
+ result = cli.run(project=project, args=[
+ '-o', 'build_arch', 'x86_64',
+ 'show',
+ '--deps', 'none',
+ '--format', '%{vars}',
+ 'element.bst'])
+ result.assert_success()
+ loaded = _yaml.load_data(result.output)
+ assert loaded['build_arch'] == 'x86_64'
+
+
+@pytest.mark.datafiles(DATA_DIR)
+def test_recusive_include(cli, tmpdir, datafiles):
+ project = os.path.join(str(datafiles), 'recursive')
+
+ result = cli.run(project=project, args=[
+ 'show',
+ '--deps', 'none',
+ '--format', '%{vars}',
+ 'element.bst'])
+ result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.RECURSIVE_INCLUDE)
+
+
+@pytest.mark.datafiles(DATA_DIR)
+def test_local_to_junction(cli, tmpdir, datafiles):
+ project = os.path.join(str(datafiles), 'local_to_junction')
+
+ generate_junction(tmpdir,
+ os.path.join(project, 'subproject'),
+ os.path.join(project, 'junction.bst'),
+ store_ref=True)
+
+ result = cli.run(project=project, args=[
+ 'show',
+ '--deps', 'none',
+ '--format', '%{vars}',
+ 'element.bst'])
+ result.assert_success()
+ loaded = _yaml.load_data(result.output)
+ assert loaded['included'] == 'True'
+
+
+@pytest.mark.datafiles(DATA_DIR)
+def test_include_project_file(cli, datafiles):
+ project = os.path.join(str(datafiles), 'string')
+ result = cli.run(project=project, args=[
+ 'show',
+ '--deps', 'none',
+ '--format', '%{vars}',
+ 'element.bst'])
+ result.assert_success()
+ loaded = _yaml.load_data(result.output)
+ assert loaded['included'] == 'True'
diff --git a/tests/format/include/conditional/element.bst b/tests/format/include/conditional/element.bst
new file mode 100644
index 000000000..4d7f70266
--- /dev/null
+++ b/tests/format/include/conditional/element.bst
@@ -0,0 +1 @@
+kind: manual
diff --git a/tests/format/include/conditional/extra_conf.yml b/tests/format/include/conditional/extra_conf.yml
new file mode 100644
index 000000000..dd58c9855
--- /dev/null
+++ b/tests/format/include/conditional/extra_conf.yml
@@ -0,0 +1,6 @@
+variables:
+ (?):
+ - build_arch == "i586":
+ size: "4"
+ - build_arch == "x86_64":
+ size: "8"
diff --git a/tests/format/include/conditional/project.conf b/tests/format/include/conditional/project.conf
new file mode 100644
index 000000000..cb54779d3
--- /dev/null
+++ b/tests/format/include/conditional/project.conf
@@ -0,0 +1,13 @@
+name: test
+
+options:
+ build_arch:
+ type: arch
+ description: Architecture
+ variable: build_arch
+ values:
+ - i586
+ - x86_64
+
+(@):
+ - extra_conf.yml
diff --git a/tests/format/include/file/element.bst b/tests/format/include/file/element.bst
new file mode 100644
index 000000000..4d7f70266
--- /dev/null
+++ b/tests/format/include/file/element.bst
@@ -0,0 +1 @@
+kind: manual
diff --git a/tests/format/include/file/extra_conf.yml b/tests/format/include/file/extra_conf.yml
new file mode 100644
index 000000000..404ecd6dd
--- /dev/null
+++ b/tests/format/include/file/extra_conf.yml
@@ -0,0 +1,2 @@
+variables:
+ included: 'True'
diff --git a/tests/format/include/file/project.conf b/tests/format/include/file/project.conf
new file mode 100644
index 000000000..a7791a416
--- /dev/null
+++ b/tests/format/include/file/project.conf
@@ -0,0 +1,4 @@
+name: test
+
+(@):
+ - extra_conf.yml
diff --git a/tests/format/include/file_with_subproject/element.bst b/tests/format/include/file_with_subproject/element.bst
new file mode 100644
index 000000000..4d7f70266
--- /dev/null
+++ b/tests/format/include/file_with_subproject/element.bst
@@ -0,0 +1 @@
+kind: manual
diff --git a/tests/format/include/file_with_subproject/extra_conf.yml b/tests/format/include/file_with_subproject/extra_conf.yml
new file mode 100644
index 000000000..404ecd6dd
--- /dev/null
+++ b/tests/format/include/file_with_subproject/extra_conf.yml
@@ -0,0 +1,2 @@
+variables:
+ included: 'True'
diff --git a/tests/format/include/file_with_subproject/project.bst b/tests/format/include/file_with_subproject/project.bst
new file mode 100644
index 000000000..4836c5f8b
--- /dev/null
+++ b/tests/format/include/file_with_subproject/project.bst
@@ -0,0 +1,4 @@
+name: test
+
+(@):
+ - junction.bst:extra_conf.yml
diff --git a/tests/format/include/file_with_subproject/project.conf b/tests/format/include/file_with_subproject/project.conf
new file mode 100644
index 000000000..a7791a416
--- /dev/null
+++ b/tests/format/include/file_with_subproject/project.conf
@@ -0,0 +1,4 @@
+name: test
+
+(@):
+ - extra_conf.yml
diff --git a/tests/format/include/file_with_subproject/subproject/project.conf b/tests/format/include/file_with_subproject/subproject/project.conf
new file mode 100644
index 000000000..7a6655421
--- /dev/null
+++ b/tests/format/include/file_with_subproject/subproject/project.conf
@@ -0,0 +1 @@
+name: test-sub
diff --git a/tests/format/include/inner/element.bst b/tests/format/include/inner/element.bst
new file mode 100644
index 000000000..4d7f70266
--- /dev/null
+++ b/tests/format/include/inner/element.bst
@@ -0,0 +1 @@
+kind: manual
diff --git a/tests/format/include/inner/extra_conf.yml b/tests/format/include/inner/extra_conf.yml
new file mode 100644
index 000000000..4c1847b5f
--- /dev/null
+++ b/tests/format/include/inner/extra_conf.yml
@@ -0,0 +1,7 @@
+build_arch:
+ type: arch
+ description: Architecture
+ variable: build_arch
+ values:
+ - i586
+ - x86_64
diff --git a/tests/format/include/inner/project.conf b/tests/format/include/inner/project.conf
new file mode 100644
index 000000000..8bdfc428a
--- /dev/null
+++ b/tests/format/include/inner/project.conf
@@ -0,0 +1,5 @@
+name: test
+
+options:
+ (@):
+ - extra_conf.yml
diff --git a/tests/format/include/junction/element.bst b/tests/format/include/junction/element.bst
new file mode 100644
index 000000000..4d7f70266
--- /dev/null
+++ b/tests/format/include/junction/element.bst
@@ -0,0 +1 @@
+kind: manual
diff --git a/tests/format/include/junction/project.conf b/tests/format/include/junction/project.conf
new file mode 100644
index 000000000..4836c5f8b
--- /dev/null
+++ b/tests/format/include/junction/project.conf
@@ -0,0 +1,4 @@
+name: test
+
+(@):
+ - junction.bst:extra_conf.yml
diff --git a/tests/format/include/junction/subproject/extra_conf.yml b/tests/format/include/junction/subproject/extra_conf.yml
new file mode 100644
index 000000000..404ecd6dd
--- /dev/null
+++ b/tests/format/include/junction/subproject/extra_conf.yml
@@ -0,0 +1,2 @@
+variables:
+ included: 'True'
diff --git a/tests/format/include/junction/subproject/project.conf b/tests/format/include/junction/subproject/project.conf
new file mode 100644
index 000000000..7a6655421
--- /dev/null
+++ b/tests/format/include/junction/subproject/project.conf
@@ -0,0 +1 @@
+name: test-sub
diff --git a/tests/format/include/local_to_junction/element.bst b/tests/format/include/local_to_junction/element.bst
new file mode 100644
index 000000000..4d7f70266
--- /dev/null
+++ b/tests/format/include/local_to_junction/element.bst
@@ -0,0 +1 @@
+kind: manual
diff --git a/tests/format/include/local_to_junction/project.conf b/tests/format/include/local_to_junction/project.conf
new file mode 100644
index 000000000..4836c5f8b
--- /dev/null
+++ b/tests/format/include/local_to_junction/project.conf
@@ -0,0 +1,4 @@
+name: test
+
+(@):
+ - junction.bst:extra_conf.yml
diff --git a/tests/format/include/local_to_junction/subproject/extra_conf.yml b/tests/format/include/local_to_junction/subproject/extra_conf.yml
new file mode 100644
index 000000000..1c0b8ccdd
--- /dev/null
+++ b/tests/format/include/local_to_junction/subproject/extra_conf.yml
@@ -0,0 +1,2 @@
+(@):
+ - internal.yml
diff --git a/tests/format/include/local_to_junction/subproject/internal.yml b/tests/format/include/local_to_junction/subproject/internal.yml
new file mode 100644
index 000000000..404ecd6dd
--- /dev/null
+++ b/tests/format/include/local_to_junction/subproject/internal.yml
@@ -0,0 +1,2 @@
+variables:
+ included: 'True'
diff --git a/tests/format/include/local_to_junction/subproject/project.conf b/tests/format/include/local_to_junction/subproject/project.conf
new file mode 100644
index 000000000..7a6655421
--- /dev/null
+++ b/tests/format/include/local_to_junction/subproject/project.conf
@@ -0,0 +1 @@
+name: test-sub
diff --git a/tests/format/include/options/element.bst b/tests/format/include/options/element.bst
new file mode 100644
index 000000000..4d7f70266
--- /dev/null
+++ b/tests/format/include/options/element.bst
@@ -0,0 +1 @@
+kind: manual
diff --git a/tests/format/include/options/extra_conf.yml b/tests/format/include/options/extra_conf.yml
new file mode 100644
index 000000000..ad1401e0a
--- /dev/null
+++ b/tests/format/include/options/extra_conf.yml
@@ -0,0 +1,8 @@
+options:
+ build_arch:
+ type: arch
+ description: Architecture
+ variable: build_arch
+ values:
+ - i586
+ - x86_64
diff --git a/tests/format/include/options/project.conf b/tests/format/include/options/project.conf
new file mode 100644
index 000000000..a7791a416
--- /dev/null
+++ b/tests/format/include/options/project.conf
@@ -0,0 +1,4 @@
+name: test
+
+(@):
+ - extra_conf.yml
diff --git a/tests/format/include/overrides-junction/element.bst b/tests/format/include/overrides-junction/element.bst
new file mode 100644
index 000000000..4d7f70266
--- /dev/null
+++ b/tests/format/include/overrides-junction/element.bst
@@ -0,0 +1 @@
+kind: manual
diff --git a/tests/format/include/overrides-junction/project.conf b/tests/format/include/overrides-junction/project.conf
new file mode 100644
index 000000000..d03bec634
--- /dev/null
+++ b/tests/format/include/overrides-junction/project.conf
@@ -0,0 +1,20 @@
+name: test
+
+elements:
+ junction:
+ variables:
+ main_override: True
+ manual:
+ variables:
+ manual_main_override: True
+ config:
+ build-commands:
+ - "first"
+
+sources:
+ git:
+ variables:
+ from_main: True
+
+(@):
+ - junction.bst:extra_conf.yml
diff --git a/tests/format/include/overrides-junction/subproject/extra_conf.yml b/tests/format/include/overrides-junction/subproject/extra_conf.yml
new file mode 100644
index 000000000..3cd3530c5
--- /dev/null
+++ b/tests/format/include/overrides-junction/subproject/extra_conf.yml
@@ -0,0 +1,16 @@
+elements:
+ junction:
+ variables:
+ included_override: True
+ manual:
+ variables:
+ manual_included_override: True
+ config:
+ build-commands:
+ (>):
+ - "second"
+
+sources:
+ git:
+ variables:
+ from_included: True
diff --git a/tests/format/include/overrides-junction/subproject/project.conf b/tests/format/include/overrides-junction/subproject/project.conf
new file mode 100644
index 000000000..7a6655421
--- /dev/null
+++ b/tests/format/include/overrides-junction/subproject/project.conf
@@ -0,0 +1 @@
+name: test-sub
diff --git a/tests/format/include/overrides/element.bst b/tests/format/include/overrides/element.bst
new file mode 100644
index 000000000..4d7f70266
--- /dev/null
+++ b/tests/format/include/overrides/element.bst
@@ -0,0 +1 @@
+kind: manual
diff --git a/tests/format/include/overrides/extra_conf.yml b/tests/format/include/overrides/extra_conf.yml
new file mode 100644
index 000000000..ccb874bd7
--- /dev/null
+++ b/tests/format/include/overrides/extra_conf.yml
@@ -0,0 +1,15 @@
+elements:
+ junction:
+ variables:
+ included_override: True
+ manual:
+ variables:
+ manual_included_override: True
+ config:
+ build-commands:
+ - "ignored"
+
+sources:
+ git:
+ variables:
+ from_included: True
diff --git a/tests/format/include/overrides/extra_conf2.yml b/tests/format/include/overrides/extra_conf2.yml
new file mode 100644
index 000000000..750abd725
--- /dev/null
+++ b/tests/format/include/overrides/extra_conf2.yml
@@ -0,0 +1,5 @@
+elements:
+ manual:
+ config:
+ build-commands:
+ - "first"
diff --git a/tests/format/include/overrides/project.conf b/tests/format/include/overrides/project.conf
new file mode 100644
index 000000000..fa3c75703
--- /dev/null
+++ b/tests/format/include/overrides/project.conf
@@ -0,0 +1,22 @@
+name: test
+
+elements:
+ junction:
+ variables:
+ main_override: True
+ manual:
+ variables:
+ manual_main_override: True
+ config:
+ build-commands:
+ (>):
+ - "second"
+
+sources:
+ git:
+ variables:
+ from_main: True
+
+(@):
+ - extra_conf.yml
+ - extra_conf2.yml
diff --git a/tests/format/include/overrides/subproject/project.conf b/tests/format/include/overrides/subproject/project.conf
new file mode 100644
index 000000000..7a6655421
--- /dev/null
+++ b/tests/format/include/overrides/subproject/project.conf
@@ -0,0 +1 @@
+name: test-sub
diff --git a/tests/format/include/recursive/element.bst b/tests/format/include/recursive/element.bst
new file mode 100644
index 000000000..4d7f70266
--- /dev/null
+++ b/tests/format/include/recursive/element.bst
@@ -0,0 +1 @@
+kind: manual
diff --git a/tests/format/include/recursive/extra_conf.yml b/tests/format/include/recursive/extra_conf.yml
new file mode 100644
index 000000000..57db0d3c6
--- /dev/null
+++ b/tests/format/include/recursive/extra_conf.yml
@@ -0,0 +1,2 @@
+(@):
+ - extra_conf2.yml
diff --git a/tests/format/include/recursive/extra_conf2.yml b/tests/format/include/recursive/extra_conf2.yml
new file mode 100644
index 000000000..e8dd5e2ed
--- /dev/null
+++ b/tests/format/include/recursive/extra_conf2.yml
@@ -0,0 +1,2 @@
+(@):
+ - extra_conf.yml
diff --git a/tests/format/include/recursive/project.conf b/tests/format/include/recursive/project.conf
new file mode 100644
index 000000000..a7791a416
--- /dev/null
+++ b/tests/format/include/recursive/project.conf
@@ -0,0 +1,4 @@
+name: test
+
+(@):
+ - extra_conf.yml
diff --git a/tests/format/include/string/element.bst b/tests/format/include/string/element.bst
new file mode 100644
index 000000000..4d7f70266
--- /dev/null
+++ b/tests/format/include/string/element.bst
@@ -0,0 +1 @@
+kind: manual
diff --git a/tests/format/include/string/extra_conf.yml b/tests/format/include/string/extra_conf.yml
new file mode 100644
index 000000000..404ecd6dd
--- /dev/null
+++ b/tests/format/include/string/extra_conf.yml
@@ -0,0 +1,2 @@
+variables:
+ included: 'True'
diff --git a/tests/format/include/string/project.conf b/tests/format/include/string/project.conf
new file mode 100644
index 000000000..6ee9988e9
--- /dev/null
+++ b/tests/format/include/string/project.conf
@@ -0,0 +1,3 @@
+name: test
+
+(@): extra_conf.yml
diff --git a/tests/format/include/sub-include/element.bst b/tests/format/include/sub-include/element.bst
new file mode 100644
index 000000000..4d7f70266
--- /dev/null
+++ b/tests/format/include/sub-include/element.bst
@@ -0,0 +1 @@
+kind: manual
diff --git a/tests/format/include/sub-include/manual_conf.yml b/tests/format/include/sub-include/manual_conf.yml
new file mode 100644
index 000000000..9c2c0dd34
--- /dev/null
+++ b/tests/format/include/sub-include/manual_conf.yml
@@ -0,0 +1,2 @@
+variables:
+ included: True
diff --git a/tests/format/include/sub-include/project.conf b/tests/format/include/sub-include/project.conf
new file mode 100644
index 000000000..7f7df84c8
--- /dev/null
+++ b/tests/format/include/sub-include/project.conf
@@ -0,0 +1,6 @@
+name: test
+
+elements:
+ manual:
+ (@):
+ - manual_conf.yml
diff --git a/tests/format/include_composition.py b/tests/format/include_composition.py
new file mode 100644
index 000000000..b73fca392
--- /dev/null
+++ b/tests/format/include_composition.py
@@ -0,0 +1,131 @@
+import os
+from buildstream._context import Context
+from buildstream._project import Project
+from buildstream._includes import Includes
+from buildstream import _yaml
+
+
+def make_includes(basedir):
+ _yaml.dump({'name': 'test'},
+ os.path.join(basedir, 'project.conf'))
+ context = Context()
+ project = Project(basedir, context)
+ loader = project.loader
+ return Includes(loader)
+
+
+def test_main_has_prority(tmpdir):
+ includes = make_includes(str(tmpdir))
+
+ _yaml.dump({'(@)': ['a.yml'],
+ 'test': ['main']},
+ str(tmpdir.join('main.yml')))
+
+ main = _yaml.load(str(tmpdir.join('main.yml')))
+
+ _yaml.dump({'test': ['a']},
+ str(tmpdir.join('a.yml')))
+
+ includes.process(main)
+
+ assert main['test'] == ['main']
+
+
+def test_include_cannot_append(tmpdir):
+ includes = make_includes(str(tmpdir))
+
+ _yaml.dump({'(@)': ['a.yml'],
+ 'test': ['main']},
+ str(tmpdir.join('main.yml')))
+ main = _yaml.load(str(tmpdir.join('main.yml')))
+
+ _yaml.dump({'test': {'(>)': ['a']}},
+ str(tmpdir.join('a.yml')))
+
+ includes.process(main)
+
+ assert main['test'] == ['main']
+
+
+def test_main_can_append(tmpdir):
+ includes = make_includes(str(tmpdir))
+
+ _yaml.dump({'(@)': ['a.yml'],
+ 'test': {'(>)': ['main']}},
+ str(tmpdir.join('main.yml')))
+ main = _yaml.load(str(tmpdir.join('main.yml')))
+
+ _yaml.dump({'test': ['a']},
+ str(tmpdir.join('a.yml')))
+
+ includes.process(main)
+
+ assert main['test'] == ['a', 'main']
+
+
+def test_sibling_cannot_append_backward(tmpdir):
+ includes = make_includes(str(tmpdir))
+
+ _yaml.dump({'(@)': ['a.yml', 'b.yml']},
+ str(tmpdir.join('main.yml')))
+ main = _yaml.load(str(tmpdir.join('main.yml')))
+
+ _yaml.dump({'test': {'(>)': ['a']}},
+ str(tmpdir.join('a.yml')))
+ _yaml.dump({'test': ['b']},
+ str(tmpdir.join('b.yml')))
+
+ includes.process(main)
+
+ assert main['test'] == ['b']
+
+
+def test_sibling_can_append_forward(tmpdir):
+ includes = make_includes(str(tmpdir))
+
+ _yaml.dump({'(@)': ['a.yml', 'b.yml']},
+ str(tmpdir.join('main.yml')))
+ main = _yaml.load(str(tmpdir.join('main.yml')))
+
+ _yaml.dump({'test': ['a']},
+ str(tmpdir.join('a.yml')))
+ _yaml.dump({'test': {'(>)': ['b']}},
+ str(tmpdir.join('b.yml')))
+
+ includes.process(main)
+
+ assert main['test'] == ['a', 'b']
+
+
+def test_lastest_sibling_has_priority(tmpdir):
+ includes = make_includes(str(tmpdir))
+
+ _yaml.dump({'(@)': ['a.yml', 'b.yml']},
+ str(tmpdir.join('main.yml')))
+ main = _yaml.load(str(tmpdir.join('main.yml')))
+
+ _yaml.dump({'test': ['a']},
+ str(tmpdir.join('a.yml')))
+ _yaml.dump({'test': ['b']},
+ str(tmpdir.join('b.yml')))
+
+ includes.process(main)
+
+ assert main['test'] == ['b']
+
+
+def test_main_keeps_keys(tmpdir):
+ includes = make_includes(str(tmpdir))
+
+ _yaml.dump({'(@)': ['a.yml'],
+ 'something': 'else'},
+ str(tmpdir.join('main.yml')))
+ main = _yaml.load(str(tmpdir.join('main.yml')))
+
+ _yaml.dump({'test': ['a']},
+ str(tmpdir.join('a.yml')))
+
+ includes.process(main)
+
+ assert main['test'] == ['a']
+ assert main['something'] == 'else'
diff --git a/tests/frontend/__init__.py b/tests/frontend/__init__.py
index 2eadf1519..8cf7625a9 100644
--- a/tests/frontend/__init__.py
+++ b/tests/frontend/__init__.py
@@ -1,5 +1,4 @@
import os
-from tests.testutils import create_repo
from buildstream import _yaml
@@ -9,36 +8,3 @@ def configure_project(path, config):
config['name'] = 'test'
config['element-path'] = 'elements'
_yaml.dump(config, os.path.join(path, 'project.conf'))
-
-
-# generate_junction()
-#
-# Generates a junction element with a git repository
-#
-# Args:
-# tmpdir: The tmpdir fixture, for storing the generated git repo
-# subproject_path: The path for the subproject, to add to the git repo
-# junction_path: The location to store the generated junction element
-# store_ref: Whether to store the ref in the junction.bst file
-#
-# Returns:
-# (str): The ref
-#
-def generate_junction(tmpdir, subproject_path, junction_path, *, store_ref=True):
- # Create a repo to hold the subproject and generate
- # a junction element for it
- #
- repo = create_repo('git', str(tmpdir))
- source_ref = ref = repo.create(subproject_path)
- if not store_ref:
- source_ref = None
-
- element = {
- 'kind': 'junction',
- 'sources': [
- repo.source_config(ref=source_ref)
- ]
- }
- _yaml.dump(element, junction_path)
-
- return ref
diff --git a/tests/frontend/buildcheckout.py b/tests/frontend/buildcheckout.py
index f27005ada..d0f52d6a7 100644
--- a/tests/frontend/buildcheckout.py
+++ b/tests/frontend/buildcheckout.py
@@ -2,12 +2,12 @@ import os
import tarfile
import hashlib
import pytest
-from tests.testutils import cli, create_repo, ALL_REPO_KINDS
+from tests.testutils import cli, create_repo, ALL_REPO_KINDS, generate_junction
from buildstream import _yaml
from buildstream._exceptions import ErrorDomain, LoadErrorReason
-from . import configure_project, generate_junction
+from . import configure_project
# Project directory
DATA_DIR = os.path.join(
diff --git a/tests/frontend/fetch.py b/tests/frontend/fetch.py
index ee3a3c3d5..e896f4a67 100644
--- a/tests/frontend/fetch.py
+++ b/tests/frontend/fetch.py
@@ -1,11 +1,11 @@
import os
import pytest
-from tests.testutils import cli, create_repo, ALL_REPO_KINDS
+from tests.testutils import cli, create_repo, ALL_REPO_KINDS, generate_junction
from buildstream import _yaml
from buildstream._exceptions import ErrorDomain, LoadErrorReason
-from . import configure_project, generate_junction
+from . import configure_project
# Project directory
TOP_DIR = os.path.dirname(os.path.realpath(__file__))
diff --git a/tests/frontend/mirror.py b/tests/frontend/mirror.py
index 62c796ab8..f37cc18af 100644
--- a/tests/frontend/mirror.py
+++ b/tests/frontend/mirror.py
@@ -1,10 +1,10 @@
import os
import pytest
-from tests.testutils import cli, create_repo, ALL_REPO_KINDS
+from tests.testutils import cli, create_repo, ALL_REPO_KINDS, generate_junction
from buildstream import _yaml
-
+from buildstream._exceptions import ErrorDomain
# Project directory
TOP_DIR = os.path.dirname(os.path.realpath(__file__))
@@ -400,3 +400,162 @@ def test_mirror_track_upstream_absent(cli, tmpdir, datafiles, kind):
source = new_element['sources'][0]
if 'ref' in source:
assert source['ref'] == mirror_ref
+
+
+@pytest.mark.datafiles(DATA_DIR)
+@pytest.mark.parametrize("kind", [(kind) for kind in ALL_REPO_KINDS])
+def test_mirror_from_includes(cli, tmpdir, datafiles, kind):
+ if kind == 'git':
+ # FIXME: Mirroring fallback does not work with git because it tries to
+ # fetch submodules on upstream.
+ pytest.skip("Bug #537 - Mirror fallback does not work for git")
+ if kind == 'ostree':
+ # FIXME: Mirroring fallback fails with ostree
+ pytest.skip("Bug #538 - ostree mirror fallback breaks assertion")
+
+ bin_files_path = os.path.join(str(datafiles), 'files', 'bin-files', 'usr')
+ upstream_repodir = os.path.join(str(tmpdir), 'upstream')
+ mirror_repodir = os.path.join(str(tmpdir), 'mirror')
+ project_dir = os.path.join(str(tmpdir), 'project')
+ os.makedirs(project_dir)
+ element_dir = os.path.join(project_dir, 'elements')
+
+ # Create repo objects of the upstream and mirror
+ upstream_repo = create_repo(kind, upstream_repodir)
+ upstream_ref = upstream_repo.create(bin_files_path)
+ mirror_repo = upstream_repo.copy(mirror_repodir)
+
+ element = {
+ 'kind': 'import',
+ 'sources': [
+ upstream_repo.source_config(ref=upstream_ref)
+ ]
+ }
+ element_name = 'test.bst'
+ element_path = os.path.join(element_dir, element_name)
+ full_repo = element['sources'][0]['url']
+ upstream_map, repo_name = os.path.split(full_repo)
+ alias = 'foo-' + kind
+ aliased_repo = alias + ':' + repo_name
+ element['sources'][0]['url'] = aliased_repo
+ full_mirror = mirror_repo.source_config()['url']
+ mirror_map, _ = os.path.split(full_mirror)
+ os.makedirs(element_dir)
+ _yaml.dump(element, element_path)
+
+ config_project_dir = str(tmpdir.join('config'))
+ os.makedirs(config_project_dir, exist_ok=True)
+ config_project = {
+ 'name': 'config'
+ }
+ _yaml.dump(config_project, os.path.join(config_project_dir, 'project.conf'))
+ extra_mirrors = {
+ 'mirrors': [
+ {
+ 'name': 'middle-earth',
+ 'aliases': {
+ alias: [mirror_map + "/"],
+ }
+ }
+ ]
+ }
+ _yaml.dump(extra_mirrors, os.path.join(config_project_dir, 'mirrors.yml'))
+ generate_junction(str(tmpdir.join('config_repo')),
+ config_project_dir,
+ os.path.join(element_dir, 'config.bst'))
+
+ project = {
+ 'name': 'test',
+ 'element-path': 'elements',
+ 'aliases': {
+ alias: upstream_map + "/"
+ },
+ '(@)': [
+ 'config.bst:mirrors.yml'
+ ]
+ }
+ project_file = os.path.join(project_dir, 'project.conf')
+ _yaml.dump(project, project_file)
+
+ # Now make the upstream unavailable.
+ os.rename(upstream_repo.repo, '{}.bak'.format(upstream_repo.repo))
+ result = cli.run(project=project_dir, args=['fetch', element_name])
+ result.assert_success()
+
+
+@pytest.mark.datafiles(DATA_DIR)
+@pytest.mark.parametrize("kind", [(kind) for kind in ALL_REPO_KINDS])
+def test_mirror_junction_from_includes(cli, tmpdir, datafiles, kind):
+ bin_files_path = os.path.join(str(datafiles), 'files', 'bin-files', 'usr')
+ dev_files_path = os.path.join(str(datafiles), 'files', 'dev-files', 'usr')
+ upstream_repodir = os.path.join(str(tmpdir), 'upstream')
+ mirror_repodir = os.path.join(str(tmpdir), 'mirror')
+ project_dir = os.path.join(str(tmpdir), 'project')
+ os.makedirs(project_dir)
+ element_dir = os.path.join(project_dir, 'elements')
+
+ # Create repo objects of the upstream and mirror
+ upstream_repo = create_repo(kind, upstream_repodir)
+ upstream_ref = upstream_repo.create(bin_files_path)
+ mirror_repo = upstream_repo.copy(mirror_repodir)
+
+ element = {
+ 'kind': 'junction',
+ 'sources': [
+ upstream_repo.source_config(ref=upstream_ref)
+ ]
+ }
+ element_name = 'test.bst'
+ element_path = os.path.join(element_dir, element_name)
+ full_repo = element['sources'][0]['url']
+ upstream_map, repo_name = os.path.split(full_repo)
+ alias = 'foo-' + kind
+ aliased_repo = alias + ':' + repo_name
+ element['sources'][0]['url'] = aliased_repo
+ full_mirror = mirror_repo.source_config()['url']
+ mirror_map, _ = os.path.split(full_mirror)
+ os.makedirs(element_dir)
+ _yaml.dump(element, element_path)
+
+ config_project_dir = str(tmpdir.join('config'))
+ os.makedirs(config_project_dir, exist_ok=True)
+ config_project = {
+ 'name': 'config'
+ }
+ _yaml.dump(config_project, os.path.join(config_project_dir, 'project.conf'))
+ extra_mirrors = {
+ 'mirrors': [
+ {
+ 'name': 'middle-earth',
+ 'aliases': {
+ alias: [mirror_map + "/"],
+ }
+ }
+ ]
+ }
+ _yaml.dump(extra_mirrors, os.path.join(config_project_dir, 'mirrors.yml'))
+ generate_junction(str(tmpdir.join('config_repo')),
+ config_project_dir,
+ os.path.join(element_dir, 'config.bst'))
+
+ project = {
+ 'name': 'test',
+ 'element-path': 'elements',
+ 'aliases': {
+ alias: upstream_map + "/"
+ },
+ '(@)': [
+ 'config.bst:mirrors.yml'
+ ]
+ }
+ project_file = os.path.join(project_dir, 'project.conf')
+ _yaml.dump(project, project_file)
+
+ # Now make the upstream unavailable.
+ os.rename(upstream_repo.repo, '{}.bak'.format(upstream_repo.repo))
+ result = cli.run(project=project_dir, args=['fetch', element_name])
+ result.assert_main_error(ErrorDomain.STREAM, None)
+ # Now make the upstream available again.
+ os.rename('{}.bak'.format(upstream_repo.repo), upstream_repo.repo)
+ result = cli.run(project=project_dir, args=['fetch', element_name])
+ result.assert_success()
diff --git a/tests/frontend/pull.py b/tests/frontend/pull.py
index 388aa103b..9d2d5d1a2 100644
--- a/tests/frontend/pull.py
+++ b/tests/frontend/pull.py
@@ -1,9 +1,8 @@
import os
import shutil
import pytest
-from tests.testutils import cli, create_artifact_share
+from tests.testutils import cli, create_artifact_share, generate_junction
-from . import generate_junction
# Project directory
DATA_DIR = os.path.join(
diff --git a/tests/frontend/push.py b/tests/frontend/push.py
index be324ca53..6ee301e6d 100644
--- a/tests/frontend/push.py
+++ b/tests/frontend/push.py
@@ -3,7 +3,8 @@ import pytest
from buildstream._exceptions import ErrorDomain
from tests.testutils import cli, create_artifact_share, create_element_size
-from . import configure_project, generate_junction
+from tests.testutils import generate_junction
+from . import configure_project
# Project directory
diff --git a/tests/frontend/show.py b/tests/frontend/show.py
index 80c48381a..ac2e71407 100644
--- a/tests/frontend/show.py
+++ b/tests/frontend/show.py
@@ -3,11 +3,11 @@ import sys
import shutil
import itertools
import pytest
-from tests.testutils import cli
+from tests.testutils import cli, generate_junction
from buildstream import _yaml
from buildstream._exceptions import ErrorDomain, LoadErrorReason
-from . import configure_project, generate_junction
+from . import configure_project
# Project directory
DATA_DIR = os.path.join(
diff --git a/tests/frontend/track.py b/tests/frontend/track.py
index 51768d650..1cf962f88 100644
--- a/tests/frontend/track.py
+++ b/tests/frontend/track.py
@@ -1,11 +1,11 @@
import os
import pytest
-from tests.testutils import cli, create_repo, ALL_REPO_KINDS
+from tests.testutils import cli, create_repo, ALL_REPO_KINDS, generate_junction
from buildstream._exceptions import ErrorDomain, LoadErrorReason
from buildstream import _yaml
-from . import configure_project, generate_junction
+from . import configure_project
# Project directory
TOP_DIR = os.path.dirname(os.path.realpath(__file__))
@@ -480,3 +480,135 @@ def test_cross_junction(cli, tmpdir, datafiles, ref_storage, kind):
assert cli.get_element_state(project, 'junction.bst:import-etc-repo.bst') == 'buildable'
assert os.path.exists(os.path.join(project, 'project.refs'))
+
+
+@pytest.mark.datafiles(DATA_DIR)
+@pytest.mark.parametrize("ref_storage", [('inline'), ('project.refs')])
+@pytest.mark.parametrize("kind", [(kind) for kind in ALL_REPO_KINDS])
+def test_track_include(cli, tmpdir, datafiles, ref_storage, kind):
+ project = os.path.join(datafiles.dirname, datafiles.basename)
+ dev_files_path = os.path.join(project, 'files', 'dev-files')
+ element_path = os.path.join(project, 'elements')
+ element_name = 'track-test-{}.bst'.format(kind)
+
+ configure_project(project, {
+ 'ref-storage': ref_storage
+ })
+
+ # Create our repo object of the given source type with
+ # the dev files, and then collect the initial ref.
+ #
+ repo = create_repo(kind, str(tmpdir))
+ ref = repo.create(dev_files_path)
+
+ # Generate the element
+ element = {
+ 'kind': 'import',
+ '(@)': ['elements/sources.yml']
+ }
+ sources = {
+ 'sources': [
+ repo.source_config()
+ ]
+ }
+
+ _yaml.dump(element, os.path.join(element_path, element_name))
+ _yaml.dump(sources, os.path.join(element_path, 'sources.yml'))
+
+ # Assert that a fetch is needed
+ assert cli.get_element_state(project, element_name) == 'no reference'
+
+ # Now first try to track it
+ result = cli.run(project=project, args=['track', element_name])
+ result.assert_success()
+
+ # And now fetch it: The Source has probably already cached the
+ # latest ref locally, but it is not required to have cached
+ # the associated content of the latest ref at track time, that
+ # is the job of fetch.
+ result = cli.run(project=project, args=['fetch', element_name])
+ result.assert_success()
+
+ # Assert that we are now buildable because the source is
+ # now cached.
+ assert cli.get_element_state(project, element_name) == 'buildable'
+
+ # Assert there was a project.refs created, depending on the configuration
+ if ref_storage == 'project.refs':
+ assert os.path.exists(os.path.join(project, 'project.refs'))
+ else:
+ assert not os.path.exists(os.path.join(project, 'project.refs'))
+ new_sources = _yaml.load(os.path.join(element_path, 'sources.yml'))
+ assert 'sources' in new_sources
+ assert len(new_sources['sources']) == 1
+ assert 'ref' in new_sources['sources'][0]
+ assert ref == new_sources['sources'][0]['ref']
+
+
+@pytest.mark.datafiles(DATA_DIR)
+@pytest.mark.parametrize("ref_storage", [('inline'), ('project.refs')])
+@pytest.mark.parametrize("kind", [(kind) for kind in ALL_REPO_KINDS])
+def test_track_include_junction(cli, tmpdir, datafiles, ref_storage, kind):
+ project = os.path.join(datafiles.dirname, datafiles.basename)
+ dev_files_path = os.path.join(project, 'files', 'dev-files')
+ element_path = os.path.join(project, 'elements')
+ element_name = 'track-test-{}.bst'.format(kind)
+ subproject_path = os.path.join(project, 'files', 'sub-project')
+ sub_element_path = os.path.join(subproject_path, 'elements')
+ junction_path = os.path.join(element_path, 'junction.bst')
+
+ configure_project(project, {
+ 'ref-storage': ref_storage
+ })
+
+ # Create our repo object of the given source type with
+ # the dev files, and then collect the initial ref.
+ #
+ repo = create_repo(kind, str(tmpdir.join('element_repo')))
+ ref = repo.create(dev_files_path)
+
+ # Generate the element
+ element = {
+ 'kind': 'import',
+ '(@)': ['junction.bst:elements/sources.yml']
+ }
+ sources = {
+ 'sources': [
+ repo.source_config()
+ ]
+ }
+
+ _yaml.dump(element, os.path.join(element_path, element_name))
+ _yaml.dump(sources, os.path.join(sub_element_path, 'sources.yml'))
+
+ generate_junction(str(tmpdir.join('junction_repo')),
+ subproject_path, junction_path, store_ref=True)
+
+ result = cli.run(project=project, args=['track', 'junction.bst'])
+ result.assert_success()
+
+ # Assert that a fetch is needed
+ assert cli.get_element_state(project, element_name) == 'no reference'
+
+ # Now first try to track it
+ result = cli.run(project=project, args=['track', element_name])
+
+ # Assert there was a project.refs created, depending on the configuration
+ if ref_storage == 'inline':
+ # FIXME: We should expect an error. But only a warning is emitted
+ # result.assert_main_error(ErrorDomain.SOURCE, 'tracking-junction-fragment')
+
+ assert 'junction.bst:elements/sources.yml: Cannot track source in a fragment from a junction' in result.stderr
+ else:
+ assert os.path.exists(os.path.join(project, 'project.refs'))
+
+ # And now fetch it: The Source has probably already cached the
+ # latest ref locally, but it is not required to have cached
+ # the associated content of the latest ref at track time, that
+ # is the job of fetch.
+ result = cli.run(project=project, args=['fetch', element_name])
+ result.assert_success()
+
+ # Assert that we are now buildable because the source is
+ # now cached.
+ assert cli.get_element_state(project, element_name) == 'buildable'
diff --git a/tests/frontend/track_cross_junction.py b/tests/frontend/track_cross_junction.py
index 34c39ddd2..423edbdef 100644
--- a/tests/frontend/track_cross_junction.py
+++ b/tests/frontend/track_cross_junction.py
@@ -1,10 +1,8 @@
import os
import pytest
-from tests.testutils import cli, create_repo, ALL_REPO_KINDS
+from tests.testutils import cli, create_repo, ALL_REPO_KINDS, generate_junction
from buildstream import _yaml
-from . import generate_junction
-
def generate_element(repo, element_path, dep_name=None):
element = {
diff --git a/tests/loader/__init__.py b/tests/loader/__init__.py
index d64b776a4..fcefdacf5 100644
--- a/tests/loader/__init__.py
+++ b/tests/loader/__init__.py
@@ -8,7 +8,7 @@ from buildstream._loader import Loader
# be removed in favor of testing the functionality via
# the CLI like in the frontend tests anyway.
#
-def make_loader(basedir, targets):
+def make_loader(basedir):
context = Context()
project = Project(basedir, context)
- return Loader(context, project, targets)
+ return project.loader
diff --git a/tests/loader/basics.py b/tests/loader/basics.py
index 3526697c5..d7fc28f83 100644
--- a/tests/loader/basics.py
+++ b/tests/loader/basics.py
@@ -18,9 +18,9 @@ DATA_DIR = os.path.join(
def test_one_file(datafiles):
basedir = os.path.join(datafiles.dirname, datafiles.basename)
- loader = make_loader(basedir, ['elements/onefile.bst'])
+ loader = make_loader(basedir)
- element = loader.load()[0]
+ element = loader.load(['elements/onefile.bst'])[0]
assert(isinstance(element, MetaElement))
assert(element.kind == 'pony')
@@ -30,10 +30,10 @@ def test_one_file(datafiles):
def test_missing_file(datafiles):
basedir = os.path.join(datafiles.dirname, datafiles.basename)
- loader = make_loader(basedir, ['elements/missing.bst'])
+ loader = make_loader(basedir)
with pytest.raises(LoadError) as exc:
- element = loader.load()[0]
+ element = loader.load(['elements/missing.bst'])[0]
assert (exc.value.reason == LoadErrorReason.MISSING_FILE)
@@ -42,10 +42,10 @@ def test_missing_file(datafiles):
def test_invalid_reference(datafiles):
basedir = os.path.join(datafiles.dirname, datafiles.basename)
- loader = make_loader(basedir, ['elements/badreference.bst'])
+ loader = make_loader(basedir)
with pytest.raises(LoadError) as exc:
- element = loader.load()[0]
+ element = loader.load(['elements/badreference.bst'])[0]
assert (exc.value.reason == LoadErrorReason.INVALID_YAML)
@@ -54,10 +54,10 @@ def test_invalid_reference(datafiles):
def test_invalid_yaml(datafiles):
basedir = os.path.join(datafiles.dirname, datafiles.basename)
- loader = make_loader(basedir, ['elements/badfile.bst'])
+ loader = make_loader(basedir)
with pytest.raises(LoadError) as exc:
- element = loader.load()[0]
+ element = loader.load(['elements/badfile.bst'])[0]
assert (exc.value.reason == LoadErrorReason.INVALID_YAML)
@@ -69,7 +69,8 @@ def test_fail_fullpath_target(datafiles):
fullpath = os.path.join(basedir, 'elements', 'onefile.bst')
with pytest.raises(LoadError) as exc:
- loader = make_loader(basedir, [fullpath])
+ loader = make_loader(basedir)
+ loader.load([fullpath])
assert (exc.value.reason == LoadErrorReason.INVALID_DATA)
@@ -78,10 +79,10 @@ def test_fail_fullpath_target(datafiles):
def test_invalid_key(datafiles):
basedir = os.path.join(datafiles.dirname, datafiles.basename)
- loader = make_loader(basedir, ['elements/invalidkey.bst'])
+ loader = make_loader(basedir)
with pytest.raises(LoadError) as exc:
- element = loader.load()[0]
+ element = loader.load(['elements/invalidkey.bst'])[0]
assert (exc.value.reason == LoadErrorReason.INVALID_DATA)
@@ -90,9 +91,9 @@ def test_invalid_key(datafiles):
def test_invalid_directory_load(datafiles):
basedir = os.path.join(datafiles.dirname, datafiles.basename)
- loader = make_loader(basedir, ['elements/'])
+ loader = make_loader(basedir)
with pytest.raises(LoadError) as exc:
- element = loader.load()[0]
+ element = loader.load(['elements/'])[0]
assert (exc.value.reason == LoadErrorReason.LOADING_DIRECTORY)
diff --git a/tests/loader/dependencies.py b/tests/loader/dependencies.py
index 0816e3cfb..4bb13a380 100644
--- a/tests/loader/dependencies.py
+++ b/tests/loader/dependencies.py
@@ -18,8 +18,8 @@ DATA_DIR = os.path.join(
def test_two_files(datafiles):
basedir = os.path.join(datafiles.dirname, datafiles.basename)
- loader = make_loader(basedir, ['elements/target.bst'])
- element = loader.load()[0]
+ loader = make_loader(basedir)
+ element = loader.load(['elements/target.bst'])[0]
assert(isinstance(element, MetaElement))
assert(element.kind == 'pony')
@@ -34,8 +34,8 @@ def test_two_files(datafiles):
def test_shared_dependency(datafiles):
basedir = os.path.join(datafiles.dirname, datafiles.basename)
- loader = make_loader(basedir, ['elements/shareddeptarget.bst'])
- element = loader.load()[0]
+ loader = make_loader(basedir)
+ element = loader.load(['elements/shareddeptarget.bst'])[0]
# Toplevel is 'pony' with 2 dependencies
#
@@ -77,8 +77,8 @@ def test_shared_dependency(datafiles):
def test_dependency_dict(datafiles):
basedir = os.path.join(datafiles.dirname, datafiles.basename)
- loader = make_loader(basedir, ['elements/target-depdict.bst'])
- element = loader.load()[0]
+ loader = make_loader(basedir)
+ element = loader.load(['elements/target-depdict.bst'])[0]
assert(isinstance(element, MetaElement))
assert(element.kind == 'pony')
@@ -92,10 +92,10 @@ def test_dependency_dict(datafiles):
@pytest.mark.datafiles(DATA_DIR)
def test_invalid_dependency_declaration(datafiles):
basedir = os.path.join(datafiles.dirname, datafiles.basename)
- loader = make_loader(basedir, ['elements/invaliddep.bst'])
+ loader = make_loader(basedir)
with pytest.raises(LoadError) as exc:
- element = loader.load()[0]
+ element = loader.load(['elements/invaliddep.bst'])[0]
assert (exc.value.reason == LoadErrorReason.INVALID_DATA)
@@ -103,10 +103,10 @@ def test_invalid_dependency_declaration(datafiles):
@pytest.mark.datafiles(DATA_DIR)
def test_circular_dependency(datafiles):
basedir = os.path.join(datafiles.dirname, datafiles.basename)
- loader = make_loader(basedir, ['elements/circulartarget.bst'])
+ loader = make_loader(basedir)
with pytest.raises(LoadError) as exc:
- element = loader.load()[0]
+ element = loader.load(['elements/circulartarget.bst'])[0]
assert (exc.value.reason == LoadErrorReason.CIRCULAR_DEPENDENCY)
@@ -114,10 +114,10 @@ def test_circular_dependency(datafiles):
@pytest.mark.datafiles(DATA_DIR)
def test_invalid_dependency_type(datafiles):
basedir = os.path.join(datafiles.dirname, datafiles.basename)
- loader = make_loader(basedir, ['elements/invaliddeptype.bst'])
+ loader = make_loader(basedir)
with pytest.raises(LoadError) as exc:
- element = loader.load()[0]
+ element = loader.load(['elements/invaliddeptype.bst'])[0]
assert (exc.value.reason == LoadErrorReason.INVALID_DATA)
@@ -125,8 +125,8 @@ def test_invalid_dependency_type(datafiles):
@pytest.mark.datafiles(DATA_DIR)
def test_build_dependency(datafiles):
basedir = os.path.join(datafiles.dirname, datafiles.basename)
- loader = make_loader(basedir, ['elements/builddep.bst'])
- element = loader.load()[0]
+ loader = make_loader(basedir)
+ element = loader.load(['elements/builddep.bst'])[0]
assert(isinstance(element, MetaElement))
assert(element.kind == 'pony')
@@ -141,8 +141,8 @@ def test_build_dependency(datafiles):
@pytest.mark.datafiles(DATA_DIR)
def test_runtime_dependency(datafiles):
basedir = os.path.join(datafiles.dirname, datafiles.basename)
- loader = make_loader(basedir, ['elements/runtimedep.bst'])
- element = loader.load()[0]
+ loader = make_loader(basedir)
+ element = loader.load(['elements/runtimedep.bst'])[0]
assert(isinstance(element, MetaElement))
assert(element.kind == 'pony')
@@ -157,8 +157,8 @@ def test_runtime_dependency(datafiles):
@pytest.mark.datafiles(DATA_DIR)
def test_build_runtime_dependency(datafiles):
basedir = os.path.join(datafiles.dirname, datafiles.basename)
- loader = make_loader(basedir, ['elements/target.bst'])
- element = loader.load()[0]
+ loader = make_loader(basedir)
+ element = loader.load(['elements/target.bst'])[0]
assert(isinstance(element, MetaElement))
assert(element.kind == 'pony')
@@ -174,8 +174,8 @@ def test_build_runtime_dependency(datafiles):
@pytest.mark.datafiles(DATA_DIR)
def test_all_dependency(datafiles):
basedir = os.path.join(datafiles.dirname, datafiles.basename)
- loader = make_loader(basedir, ['elements/alldep.bst'])
- element = loader.load()[0]
+ loader = make_loader(basedir)
+ element = loader.load(['elements/alldep.bst'])[0]
assert(isinstance(element, MetaElement))
assert(element.kind == 'pony')
diff --git a/tests/plugins/basics.py b/tests/plugins/basics.py
index 740a20c66..4c9cbafe4 100644
--- a/tests/plugins/basics.py
+++ b/tests/plugins/basics.py
@@ -49,7 +49,7 @@ def test_custom_source(plugin_fixture, datafiles):
datafiles.basename),
'plugins': {'foo': 0}
}]
- factory = SourceFactory(plugin_fixture['base'], plugins)
+ factory = SourceFactory(plugin_fixture['base'], plugin_origins=plugins)
assert(isinstance(factory, SourceFactory))
foo_type, _ = factory.lookup('foo')
@@ -64,7 +64,7 @@ def test_custom_element(plugin_fixture, datafiles):
datafiles.basename),
'plugins': {'foo': 0}
}]
- factory = ElementFactory(plugin_fixture['base'], plugins)
+ factory = ElementFactory(plugin_fixture['base'], plugin_origins=plugins)
assert(isinstance(factory, ElementFactory))
foo_type, _ = factory.lookup('foo')
@@ -101,7 +101,7 @@ def test_source_notatype(plugin_fixture, datafiles):
datafiles.basename),
'plugins': {'foo': 0}
}]
- factory = SourceFactory(plugin_fixture['base'], plugins)
+ factory = SourceFactory(plugin_fixture['base'], plugin_origins=plugins)
with pytest.raises(PluginError) as exc:
foo_type = factory.lookup('foo')
@@ -115,7 +115,7 @@ def test_element_notatype(plugin_fixture, datafiles):
datafiles.basename),
'plugins': {'foo': 0}
}]
- factory = ElementFactory(plugin_fixture['base'], plugins)
+ factory = ElementFactory(plugin_fixture['base'], plugin_origins=plugins)
with pytest.raises(PluginError) as exc:
foo_type = factory.lookup('foo')
@@ -130,7 +130,7 @@ def test_source_wrongtype(plugin_fixture, datafiles):
datafiles.basename),
'plugins': {'foo': 0}
}]
- factory = SourceFactory(plugin_fixture['base'], plugins)
+ factory = SourceFactory(plugin_fixture['base'], plugin_origins=plugins)
with pytest.raises(PluginError) as exc:
foo_type = factory.lookup('foo')
@@ -145,7 +145,7 @@ def test_element_wrongtype(plugin_fixture, datafiles):
datafiles.basename),
'plugins': {'foo': 0}
}]
- factory = ElementFactory(plugin_fixture['base'], plugins)
+ factory = ElementFactory(plugin_fixture['base'], plugin_origins=plugins)
with pytest.raises(PluginError) as exc:
foo_type = factory.lookup('foo')
@@ -159,7 +159,7 @@ def test_source_missing_setup(plugin_fixture, datafiles):
datafiles.basename),
'plugins': {'foo': 0}
}]
- factory = SourceFactory(plugin_fixture['base'], plugins)
+ factory = SourceFactory(plugin_fixture['base'], plugin_origins=plugins)
with pytest.raises(PluginError) as exc:
foo_type = factory.lookup('foo')
@@ -173,7 +173,7 @@ def test_element_missing_setup(plugin_fixture, datafiles):
datafiles.basename),
'plugins': {'foo': 0}
}]
- factory = ElementFactory(plugin_fixture['base'], plugins)
+ factory = ElementFactory(plugin_fixture['base'], plugin_origins=plugins)
with pytest.raises(PluginError) as exc:
foo_type = factory.lookup('foo')
@@ -188,7 +188,7 @@ def test_source_bad_setup(plugin_fixture, datafiles):
datafiles.basename),
'plugins': {'foo': 0}
}]
- factory = SourceFactory(plugin_fixture['base'], plugins)
+ factory = SourceFactory(plugin_fixture['base'], plugin_origins=plugins)
with pytest.raises(PluginError) as exc:
foo_type = factory.lookup('foo')
@@ -203,7 +203,7 @@ def test_element_bad_setup(plugin_fixture, datafiles):
datafiles.basename),
'plugins': {'foo': 0}
}]
- factory = ElementFactory(plugin_fixture['base'], plugins)
+ factory = ElementFactory(plugin_fixture['base'], plugin_origins=plugins)
with pytest.raises(PluginError) as exc:
foo_type = factory.lookup('foo')
@@ -218,7 +218,7 @@ def test_source_badversion(plugin_fixture, datafiles):
datafiles.basename),
'plugins': {'foo': 0}
}]
- factory = SourceFactory(plugin_fixture['base'], plugins)
+ factory = SourceFactory(plugin_fixture['base'], plugin_origins=plugins)
with pytest.raises(PluginError) as exc:
foo_type = factory.lookup('foo')
@@ -233,7 +233,7 @@ def test_element_badversion(plugin_fixture, datafiles):
datafiles.basename),
'plugins': {'foo': 0}
}]
- factory = ElementFactory(plugin_fixture['base'], plugins)
+ factory = ElementFactory(plugin_fixture['base'], plugin_origins=plugins)
with pytest.raises(PluginError) as exc:
foo_type = factory.lookup('foo')
@@ -260,8 +260,8 @@ def test_source_multicontext(plugin_fixture, datafiles):
'plugins': {'foo': 0}
}
- factory1 = SourceFactory(plugin_fixture['base'], [plugins1])
- factory2 = SourceFactory(plugin_fixture['base'], [plugins2])
+ factory1 = SourceFactory(plugin_fixture['base'], plugin_origins=[plugins1])
+ factory2 = SourceFactory(plugin_fixture['base'], plugin_origins=[plugins2])
assert(isinstance(factory1, SourceFactory))
assert(isinstance(factory2, SourceFactory))
@@ -289,8 +289,8 @@ def test_element_multicontext(plugin_fixture, datafiles):
'plugins': {'foo': 0}
}
- factory1 = ElementFactory(plugin_fixture['base'], [plugins1])
- factory2 = ElementFactory(plugin_fixture['base'], [plugins2])
+ factory1 = ElementFactory(plugin_fixture['base'], plugin_origins=[plugins1])
+ factory2 = ElementFactory(plugin_fixture['base'], plugin_origins=[plugins2])
assert(isinstance(factory1, ElementFactory))
assert(isinstance(factory2, ElementFactory))
diff --git a/tests/plugins/third_party.py b/tests/plugins/third_party.py
index a786fc7c2..b7791a2e4 100644
--- a/tests/plugins/third_party.py
+++ b/tests/plugins/third_party.py
@@ -33,7 +33,8 @@ def test_custom_pip_element(plugin_fixture, entry_fixture, datafiles):
'path': str(datafiles),
'plugins': {'foop': 0}
}]
- factory = ElementFactory(plugin_fixture['base'], origin_data)
+ factory = ElementFactory(plugin_fixture['base'],
+ plugin_origins=origin_data)
assert(isinstance(factory, ElementFactory))
entry_fixture(datafiles, 'buildstream.plugins', 'third_party_element:foop')
@@ -50,7 +51,8 @@ def test_custom_pip_source(plugin_fixture, entry_fixture, datafiles):
'path': str(datafiles),
'plugins': {'foop': 0}
}]
- factory = SourceFactory(plugin_fixture['base'], origin_data)
+ factory = SourceFactory(plugin_fixture['base'],
+ plugin_origins=origin_data)
assert(isinstance(factory, SourceFactory))
entry_fixture(datafiles, 'buildstream.plugins', 'third_party_source:foop')
diff --git a/tests/testutils/__init__.py b/tests/testutils/__init__.py
index 93143b505..e9db94989 100644
--- a/tests/testutils/__init__.py
+++ b/tests/testutils/__init__.py
@@ -2,3 +2,4 @@ from .runcli import cli, cli_integration
from .repo import create_repo, ALL_REPO_KINDS
from .artifactshare import create_artifact_share
from .element_generators import create_element_size
+from .junction import generate_junction
diff --git a/tests/testutils/junction.py b/tests/testutils/junction.py
new file mode 100644
index 000000000..efc429ef6
--- /dev/null
+++ b/tests/testutils/junction.py
@@ -0,0 +1,36 @@
+import os
+from tests.testutils import create_repo
+from buildstream import _yaml
+
+
+# generate_junction()
+#
+# Generates a junction element with a git repository
+#
+# Args:
+# tmpdir: The tmpdir fixture, for storing the generated git repo
+# subproject_path: The path for the subproject, to add to the git repo
+# junction_path: The location to store the generated junction element
+# store_ref: Whether to store the ref in the junction.bst file
+#
+# Returns:
+# (str): The ref
+#
+def generate_junction(tmpdir, subproject_path, junction_path, *, store_ref=True):
+ # Create a repo to hold the subproject and generate
+ # a junction element for it
+ #
+ repo = create_repo('git', str(tmpdir))
+ source_ref = ref = repo.create(subproject_path)
+ if not store_ref:
+ source_ref = None
+
+ element = {
+ 'kind': 'junction',
+ 'sources': [
+ repo.source_config(ref=source_ref)
+ ]
+ }
+ _yaml.dump(element, junction_path)
+
+ return ref
diff --git a/tests/yaml/yaml.py b/tests/yaml/yaml.py
index 3b9f385ed..781763717 100644
--- a/tests/yaml/yaml.py
+++ b/tests/yaml/yaml.py
@@ -33,7 +33,7 @@ def assert_provenance(filename, line, col, node, key=None, indices=[]):
else:
assert(isinstance(provenance, _yaml.DictProvenance))
- assert(provenance.filename == filename)
+ assert(provenance.filename.shortname == filename)
assert(provenance.line == line)
assert(provenance.col == col)