summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorDaniel Silverstone <daniel.silverstone@codethink.co.uk>2019-03-25 15:08:37 +0000
committerDaniel Silverstone <daniel.silverstone@codethink.co.uk>2019-03-27 21:26:07 +0000
commit3816dcf8901b06f2b9c0153e5b5fe394acf104a5 (patch)
treee380f6c5386d97bc03707ccf8f20871b683e3b0b
parent1e698622caee2da202a2511bbf41f476224d4cb8 (diff)
downloadbuildstream-3816dcf8901b06f2b9c0153e5b5fe394acf104a5.tar.gz
The new YAML World Order
Replace YAML internals with a new Node type, and refactor everything to use it cleanly. This work was also by James Ennis <james.ennis@codethink.co.uk> Signed-off-by: Daniel Silverstone <daniel.silverstone@codethink.co.uk>
-rw-r--r--buildstream/_artifact.py23
-rw-r--r--buildstream/_artifactelement.py2
-rw-r--r--buildstream/_basecache.py13
-rw-r--r--buildstream/_context.py10
-rw-r--r--buildstream/_frontend/cli.py2
-rw-r--r--buildstream/_gitsourcebase.py2
-rw-r--r--buildstream/_includes.py20
-rw-r--r--buildstream/_loader/loadelement.py2
-rw-r--r--buildstream/_loader/loader.py6
-rw-r--r--buildstream/_loader/metaelement.py12
-rw-r--r--buildstream/_loader/types.py4
-rw-r--r--buildstream/_options/optionpool.py13
-rw-r--r--buildstream/_plugincontext.py16
-rw-r--r--buildstream/_project.py60
-rw-r--r--buildstream/_projectrefs.py39
-rw-r--r--buildstream/_variables.py2
-rw-r--r--buildstream/_workspaces.py32
-rw-r--r--buildstream/_yaml.py1570
-rw-r--r--buildstream/element.py44
-rw-r--r--buildstream/plugin.py71
-rw-r--r--buildstream/plugins/elements/filter.py6
-rw-r--r--buildstream/plugintestutils/runcli.py1
-rw-r--r--buildstream/sandbox/_sandboxremote.py20
-rw-r--r--buildstream/source.py142
-rw-r--r--tests/artifactcache/junctions.py8
-rw-r--r--tests/elements/filter.py28
-rw-r--r--tests/elements/filter/basic/element_plugins/dynamic.py2
-rw-r--r--tests/format/include.py35
-rw-r--r--tests/format/include_composition.py20
-rw-r--r--tests/format/optionarch.py2
-rw-r--r--tests/format/optionbool.py4
-rw-r--r--tests/format/optioneltmask.py4
-rw-r--r--tests/format/optionenum.py4
-rw-r--r--tests/format/optionexports.py2
-rw-r--r--tests/format/optionflags.py4
-rw-r--r--tests/format/optionos.py2
-rw-r--r--tests/format/optionoverrides.py2
-rw-r--r--tests/format/options.py16
-rw-r--r--tests/format/project.py10
-rw-r--r--tests/format/projectoverrides.py2
-rw-r--r--tests/format/variables.py8
-rw-r--r--tests/frontend/cross_junction_workspace.py16
-rw-r--r--tests/frontend/init.py22
-rw-r--r--tests/frontend/project/sources/fetch_source.py6
-rw-r--r--tests/frontend/workspace.py18
-rw-r--r--tests/integration/pullbuildtrees.py2
-rw-r--r--tests/internals/pluginfactory.py97
-rw-r--r--tests/internals/yaml.py202
-rw-r--r--tests/internals/yaml/roundtrip-test.yaml55
-rw-r--r--tests/internals/yaml/traversal.yaml20
-rw-r--r--tests/sources/generic/mirror.py12
-rw-r--r--tests/sources/generic/track.py15
-rw-r--r--tests/sources/git.py55
-rw-r--r--tests/testutils/yaml.py16
54 files changed, 1774 insertions, 1027 deletions
diff --git a/buildstream/_artifact.py b/buildstream/_artifact.py
index 9cb6f5744..71b3c6f5c 100644
--- a/buildstream/_artifact.py
+++ b/buildstream/_artifact.py
@@ -279,7 +279,12 @@ class Artifact():
return build_result
data = _yaml.load(meta_file, shortname='meta/build-result.yaml')
- build_result = (data["success"], data.get("description"), data.get("detail"))
+
+ success = _yaml.node_get(data, bool, 'success')
+ description = _yaml.node_get(data, str, 'description', default_value=None)
+ detail = _yaml.node_get(data, str, 'detail', default_value=None)
+
+ build_result = (success, description, detail)
return build_result
@@ -310,13 +315,13 @@ class Artifact():
# Parse the expensive yaml now and cache the result
meta_file = artifact_vdir._objpath('meta', 'keys.yaml')
meta = _yaml.load(meta_file, shortname='meta/keys.yaml')
- strong_key = meta['strong']
- weak_key = meta['weak']
+ strong_key = _yaml.node_get(meta, str, 'strong')
+ weak_key = _yaml.node_get(meta, str, 'weak')
assert key in (strong_key, weak_key)
- metadata_keys[strong_key] = meta
- metadata_keys[weak_key] = meta
+ metadata_keys[strong_key] = _yaml.node_sanitize(meta)
+ metadata_keys[weak_key] = _yaml.node_sanitize(meta)
return (strong_key, weak_key, metadata_keys)
@@ -351,8 +356,8 @@ class Artifact():
# Cache it under both strong and weak keys
strong_key, weak_key, metadata_keys = self.get_metadata_keys(key, metadata_keys)
- metadata_dependencies[strong_key] = meta
- metadata_dependencies[weak_key] = meta
+ metadata_dependencies[strong_key] = _yaml.node_sanitize(meta)
+ metadata_dependencies[weak_key] = _yaml.node_sanitize(meta)
return (meta, metadata_dependencies, metadata_keys)
@@ -385,7 +390,7 @@ class Artifact():
# Parse the expensive yaml now and cache the result
meta_file = artifact_vdir._objpath('meta', 'workspaced.yaml')
meta = _yaml.load(meta_file, shortname='meta/workspaced.yaml')
- workspaced = meta['workspaced']
+ workspaced = _yaml.node_get(meta, bool, 'workspaced')
# Cache it under both strong and weak keys
strong_key, weak_key, metadata_keys = self.get_metadata_keys(key, metadata_keys)
@@ -424,7 +429,7 @@ class Artifact():
# Parse the expensive yaml now and cache the result
meta_file = artifact_vdir._objpath('meta', 'workspaced-dependencies.yaml')
meta = _yaml.load(meta_file, shortname='meta/workspaced-dependencies.yaml')
- workspaced = meta['workspaced-dependencies']
+ workspaced = _yaml.node_sanitize(_yaml.node_get(meta, list, 'workspaced-dependencies'))
# Cache it under both strong and weak keys
strong_key, weak_key, metadata_keys = self.get_metadata_keys(key, metadata_keys)
diff --git a/buildstream/_artifactelement.py b/buildstream/_artifactelement.py
index a7915eb28..8210b4d97 100644
--- a/buildstream/_artifactelement.py
+++ b/buildstream/_artifactelement.py
@@ -84,7 +84,7 @@ def verify_artifact_ref(ref):
try:
project, element, key = ref.split('/', 2) # This will raise a Value error if unable to split
# Explicitly raise a ValueError if the key lenght is not as expected
- if len(key) != len(_cachekey.generate_key({})):
+ if len(key) != len(_cachekey.generate_key(_yaml.new_empty_node())):
raise ValueError
except ValueError:
raise ArtifactElementError("Artifact: {} is not of the expected format".format(ref))
diff --git a/buildstream/_basecache.py b/buildstream/_basecache.py
index 696cbf9c1..1ca03cbf5 100644
--- a/buildstream/_basecache.py
+++ b/buildstream/_basecache.py
@@ -16,13 +16,13 @@
# Authors:
# Raoul Hidalgo Charman <raoul.hidalgocharman@codethink.co.uk>
#
-from collections.abc import Mapping
import multiprocessing
from . import utils
from . import _yaml
from ._cas import CASRemote
from ._message import Message, MessageType
+from ._exceptions import LoadError
# Base Cache for Caches to derive from
@@ -70,11 +70,12 @@ class BaseCache():
def specs_from_config_node(cls, config_node, basedir=None):
cache_specs = []
- artifacts = config_node.get(cls.config_node_name, [])
- if isinstance(artifacts, Mapping):
- # pylint: disable=not-callable
- cache_specs.append(cls.spec_class._new_from_config_node(artifacts, basedir))
- elif isinstance(artifacts, list):
+ try:
+ artifacts = [_yaml.node_get(config_node, dict, cls.config_node_name)]
+ except LoadError:
+ artifacts = _yaml.node_get(config_node, list, cls.config_node_name, default_value=[])
+
+ if isinstance(artifacts, list):
for spec_node in artifacts:
cache_specs.append(cls.spec_class._new_from_config_node(spec_node, basedir))
else:
diff --git a/buildstream/_context.py b/buildstream/_context.py
index d14577842..afbf69726 100644
--- a/buildstream/_context.py
+++ b/buildstream/_context.py
@@ -157,7 +157,7 @@ class Context():
self._artifactcache = None
self._sourcecache = None
self._projects = []
- self._project_overrides = {}
+ self._project_overrides = _yaml.new_empty_node()
self._workspaces = None
self._workspace_project_cache = WorkspaceProjectCache()
self._log_handle = None
@@ -203,11 +203,11 @@ class Context():
_yaml.composite(defaults, user_config)
# Give obsoletion warnings
- if defaults.get('builddir'):
+ if _yaml.node_contains(defaults, 'builddir'):
raise LoadError(LoadErrorReason.INVALID_DATA,
"builddir is obsolete, use cachedir")
- if defaults.get('artifactdir'):
+ if _yaml.node_contains(defaults, 'artifactdir'):
raise LoadError(LoadErrorReason.INVALID_DATA,
"artifactdir is obsolete")
@@ -306,7 +306,7 @@ class Context():
self.sched_network_retries = _yaml.node_get(scheduler, int, 'network-retries')
# Load per-projects overrides
- self._project_overrides = _yaml.node_get(defaults, Mapping, 'projects', default_value={})
+ self._project_overrides = _yaml.node_get(defaults, dict, 'projects', default_value={})
# Shallow validation of overrides, parts of buildstream which rely
# on the overrides are expected to validate elsewhere.
@@ -441,7 +441,7 @@ class Context():
if self._cache_key is None:
# Anything that alters the build goes into the unique key
- self._cache_key = _cachekey.generate_key({})
+ self._cache_key = _cachekey.generate_key(_yaml.new_empty_node())
return self._cache_key
diff --git a/buildstream/_frontend/cli.py b/buildstream/_frontend/cli.py
index a1a780cc4..cc8cd5e54 100644
--- a/buildstream/_frontend/cli.py
+++ b/buildstream/_frontend/cli.py
@@ -100,7 +100,7 @@ def complete_target(args, incomplete):
return []
# The project is not required to have an element-path
- element_directory = project.get('element-path')
+ element_directory = _yaml.node_get(project, str, 'element-path', default_value='')
# If a project was loaded, use its element-path to
# adjust our completion's base directory
diff --git a/buildstream/_gitsourcebase.py b/buildstream/_gitsourcebase.py
index 8c640da8a..d4c54fd89 100644
--- a/buildstream/_gitsourcebase.py
+++ b/buildstream/_gitsourcebase.py
@@ -422,7 +422,7 @@ class _GitSourceBase(Source):
self.mark_download_url(url, primary=False)
self.submodule_overrides[path] = url
- if 'checkout' in submodule:
+ if self.node_has_member(submodule, 'checkout'):
checkout = self.node_get_member(submodule, bool, 'checkout')
self.submodule_checkout_overrides[path] = checkout
diff --git a/buildstream/_includes.py b/buildstream/_includes.py
index 8db12bde8..f792b7716 100644
--- a/buildstream/_includes.py
+++ b/buildstream/_includes.py
@@ -1,5 +1,4 @@
import os
-from collections.abc import Mapping
from . import _yaml
from ._exceptions import LoadError, LoadErrorReason
@@ -36,17 +35,20 @@ class Includes:
if current_loader is None:
current_loader = self._loader
- if isinstance(node.get('(@)'), str):
- includes = [_yaml.node_get(node, str, '(@)')]
- else:
- includes = _yaml.node_get(node, list, '(@)', default_value=None)
+ includes = _yaml.node_get(node, None, '(@)', default_value=None)
+ if isinstance(includes, str):
+ includes = [includes]
+
+ if not isinstance(includes, list) and includes is not None:
+ provenance = _yaml.node_get_provenance(node, key='(@)')
+ raise LoadError(LoadErrorReason.INVALID_DATA,
+ "{}: {} must either be list or str".format(provenance, includes))
include_provenance = None
- if '(@)' in node:
+ if includes:
include_provenance = _yaml.node_get_provenance(node, key='(@)')
- del node['(@)']
+ _yaml.node_del(node, '(@)')
- if includes:
for include in reversed(includes):
if only_local and ':' in include:
continue
@@ -130,7 +132,7 @@ class Includes:
included=set(),
current_loader=None,
only_local=False):
- if isinstance(value, Mapping):
+ if _yaml.is_node(value):
self.process(value,
included=included,
current_loader=current_loader,
diff --git a/buildstream/_loader/loadelement.py b/buildstream/_loader/loadelement.py
index 677335404..684c32554 100644
--- a/buildstream/_loader/loadelement.py
+++ b/buildstream/_loader/loadelement.py
@@ -176,6 +176,6 @@ def _extract_depends_from_node(node, *, key=None):
output_deps.append(dependency)
# Now delete the field, we dont want it anymore
- del node[key]
+ _yaml.node_del(node, key, safe=True)
return output_deps
diff --git a/buildstream/_loader/loader.py b/buildstream/_loader/loader.py
index aaa6f0019..4c2bfec77 100644
--- a/buildstream/_loader/loader.py
+++ b/buildstream/_loader/loader.py
@@ -121,7 +121,7 @@ class Loader():
# Set up a dummy element that depends on all top-level targets
# to resolve potential circular dependencies between them
- dummy_target = LoadElement("", "", self)
+ dummy_target = LoadElement(_yaml.new_empty_node(), "", self)
dummy_target.dependencies.extend(
LoadElement.Dependency(element, Symbol.RUNTIME)
for element in target_elements
@@ -420,12 +420,12 @@ class Loader():
for i in range(len(sources)):
source = _yaml.node_get(node, Mapping, Symbol.SOURCES, indices=[i])
kind = _yaml.node_get(source, str, Symbol.KIND)
- del source[Symbol.KIND]
+ _yaml.node_del(source, Symbol.KIND)
# Directory is optional
directory = _yaml.node_get(source, str, Symbol.DIRECTORY, default_value=None)
if directory:
- del source[Symbol.DIRECTORY]
+ _yaml.node_del(source, Symbol.DIRECTORY)
index = sources.index(source)
meta_source = MetaSource(element.name, index, element_kind, kind, source, directory)
diff --git a/buildstream/_loader/metaelement.py b/buildstream/_loader/metaelement.py
index 943b925ff..da963512b 100644
--- a/buildstream/_loader/metaelement.py
+++ b/buildstream/_loader/metaelement.py
@@ -17,6 +17,8 @@
# Authors:
# Tristan Van Berkom <tristan.vanberkom@codethink.co.uk>
+from .. import _yaml
+
class MetaElement():
@@ -46,12 +48,12 @@ class MetaElement():
self.kind = kind
self.provenance = provenance
self.sources = sources
- self.config = config or {}
- self.variables = variables or {}
- self.environment = environment or {}
+ self.config = config or _yaml.new_empty_node()
+ self.variables = variables or _yaml.new_empty_node()
+ self.environment = environment or _yaml.new_empty_node()
self.env_nocache = env_nocache or []
- self.public = public or {}
- self.sandbox = sandbox or {}
+ self.public = public or _yaml.new_empty_node()
+ self.sandbox = sandbox or _yaml.new_empty_node()
self.build_dependencies = []
self.dependencies = []
self.first_pass = first_pass
diff --git a/buildstream/_loader/types.py b/buildstream/_loader/types.py
index eb6932b0b..f9dd38ca0 100644
--- a/buildstream/_loader/types.py
+++ b/buildstream/_loader/types.py
@@ -17,8 +17,6 @@
# Authors:
# Tristan Van Berkom <tristan.vanberkom@codethink.co.uk>
-from collections.abc import Mapping
-
from .._exceptions import LoadError, LoadErrorReason
from .. import _yaml
@@ -69,7 +67,7 @@ class Dependency():
self.dep_type = default_dep_type
self.junction = None
- elif isinstance(dep, Mapping):
+ elif _yaml.is_node(dep):
if default_dep_type:
_yaml.node_validate(dep, ['filename', 'junction'])
dep_type = default_dep_type
diff --git a/buildstream/_options/optionpool.py b/buildstream/_options/optionpool.py
index 3132af564..5b248111f 100644
--- a/buildstream/_options/optionpool.py
+++ b/buildstream/_options/optionpool.py
@@ -18,7 +18,6 @@
# Tristan Van Berkom <tristan.vanberkom@codethink.co.uk>
#
-from collections.abc import Mapping
import jinja2
from .. import _yaml
@@ -153,7 +152,7 @@ class OptionPool():
def export_variables(self, variables):
for _, option in self._options.items():
if option.variable:
- variables[option.variable] = option.get_value()
+ _yaml.node_set(variables, option.variable, option.get_value())
# printable_variables()
#
@@ -170,7 +169,7 @@ class OptionPool():
# process_node()
#
# Args:
- # node (Mapping): A YAML Loaded dictionary
+ # node (node): A YAML Loaded dictionary
#
def process_node(self, node):
@@ -187,7 +186,7 @@ class OptionPool():
# and process any indirectly nested conditionals.
#
for _, value in _yaml.node_items(node):
- if isinstance(value, Mapping):
+ if _yaml.is_node(value):
self.process_node(value)
elif isinstance(value, list):
self._process_list(value)
@@ -238,7 +237,7 @@ class OptionPool():
#
def _process_list(self, values):
for value in values:
- if isinstance(value, Mapping):
+ if _yaml.is_node(value):
self.process_node(value)
elif isinstance(value, list):
self._process_list(value)
@@ -268,7 +267,7 @@ class OptionPool():
_yaml.node_get_provenance(node, '(?)', indices=[i])
for i in range(len(conditions))
]
- del node['(?)']
+ _yaml.node_del(node, '(?)')
for condition, p in zip(conditions, provenance):
tuples = list(_yaml.node_items(condition))
@@ -283,7 +282,7 @@ class OptionPool():
# Prepend the provenance of the error
raise LoadError(e.reason, "{}: {}".format(p, e)) from e
- if not hasattr(value, 'get'):
+ if not _yaml.is_node(value):
raise LoadError(LoadErrorReason.ILLEGAL_COMPOSITE,
"{}: Only values of type 'dict' can be composed.".format(p))
diff --git a/buildstream/_plugincontext.py b/buildstream/_plugincontext.py
index 5a7097485..7a5407cf6 100644
--- a/buildstream/_plugincontext.py
+++ b/buildstream/_plugincontext.py
@@ -22,6 +22,7 @@ import inspect
from ._exceptions import PluginError, LoadError, LoadErrorReason
from . import utils
+from . import _yaml
# A Context for loading plugin types
@@ -135,18 +136,21 @@ class PluginContext():
source = None
defaults = None
loaded_dependency = False
+
for origin in self._plugin_origins:
- if kind not in origin['plugins']:
+ if kind not in _yaml.node_get(origin, list, 'plugins'):
continue
- if origin['origin'] == 'local':
- source = self._get_local_plugin_source(origin['path'])
- elif origin['origin'] == 'pip':
- source, defaults = self._get_pip_plugin_source(origin['package-name'], kind)
+ if _yaml.node_get(origin, str, 'origin') == 'local':
+ local_path = _yaml.node_get(origin, str, 'path')
+ source = self._get_local_plugin_source(local_path)
+ elif _yaml.node_get(origin, str, 'origin') == 'pip':
+ package_name = _yaml.node_get(origin, str, 'package-name')
+ source, defaults = self._get_pip_plugin_source(package_name, kind)
else:
raise PluginError("Failed to load plugin '{}': "
"Unexpected plugin origin '{}'"
- .format(kind, origin['origin']))
+ .format(kind, _yaml.node_get(origin, str, 'origin')))
loaded_dependency = True
break
diff --git a/buildstream/_project.py b/buildstream/_project.py
index b6a8727f5..f5da960cf 100644
--- a/buildstream/_project.py
+++ b/buildstream/_project.py
@@ -201,7 +201,7 @@ class Project():
if url and utils._ALIAS_SEPARATOR in url:
url_alias, url_body = url.split(utils._ALIAS_SEPARATOR, 1)
- alias_url = config._aliases.get(url_alias)
+ alias_url = _yaml.node_get(config._aliases, str, url_alias, default_value=None)
if alias_url:
url = alias_url + url_body
@@ -231,7 +231,7 @@ class Project():
# Anything that alters the build goes into the unique key
# (currently nothing here)
- self._cache_key = _cachekey.generate_key({})
+ self._cache_key = _cachekey.generate_key(_yaml.new_empty_node())
return self._cache_key
@@ -406,7 +406,7 @@ class Project():
else:
config = self.config
- return config._aliases.get(alias)
+ return _yaml.node_get(config._aliases, str, alias, default_value=None)
# get_alias_uris()
#
@@ -421,7 +421,7 @@ class Project():
else:
config = self.config
- if not alias or alias not in config._aliases:
+ if not alias or not _yaml.node_contains(config._aliases, alias):
return [None]
mirror_list = []
@@ -431,7 +431,7 @@ class Project():
mirror_list = alias_mapping[alias] + mirror_list
else:
mirror_list += alias_mapping[alias]
- mirror_list.append(config._aliases[alias])
+ mirror_list.append(_yaml.node_get(config._aliases, str, alias))
return mirror_list
# load_elements()
@@ -589,20 +589,9 @@ class Project():
self._validate_node(pre_config_node)
- # FIXME:
- #
- # Performing this check manually in the absense
- # of proper support from _yaml.node_get(), this should
- # be removed in favor of a proper accessor function
- # from the _yaml module when #591 is fixed.
- #
- if self._project_conf.get('name') is None:
- raise LoadError(LoadErrorReason.INVALID_DATA,
- "{}: project.conf does not contain expected key '{}'".format(projectfile, 'name'))
-
# The project name, element path and option declarations
# are constant and cannot be overridden by option conditional statements
- self.name = _yaml.node_get(pre_config_node, str, 'name')
+ self.name = _yaml.node_get(self._project_conf, str, 'name')
# Validate that project name is a valid symbol name
_yaml.assert_symbol_name(_yaml.node_get_provenance(pre_config_node, 'name'),
@@ -772,8 +761,8 @@ class Project():
# assertion after.
output.element_overrides = _yaml.node_get(config, Mapping, 'elements', default_value={})
output.source_overrides = _yaml.node_get(config, Mapping, 'sources', default_value={})
- config.pop('elements', None)
- config.pop('sources', None)
+ _yaml.node_del(config, 'elements', safe=True)
+ _yaml.node_del(config, 'sources', safe=True)
_yaml.node_final_assertions(config)
self._load_plugin_factories(config, output)
@@ -809,7 +798,7 @@ class Project():
output.base_variables = _yaml.node_get(config, Mapping, 'variables')
# Add the project name as a default variable
- output.base_variables['project-name'] = self.name
+ _yaml.node_set(output.base_variables, 'project-name', self.name)
# Extend variables with automatic variables and option exports
# Initialize it as a string as all variables are processed as strings.
@@ -817,7 +806,7 @@ class Project():
# max-jobs value seems to be around 8-10 if we have enough cores
# users should set values based on workload and build infrastructure
platform = Platform.get_platform()
- output.base_variables['max-jobs'] = str(platform.get_cpu_count(8))
+ _yaml.node_set(output.base_variables, 'max-jobs', str(platform.get_cpu_count(8)))
# Export options into variables, if that was requested
output.options.export_variables(output.base_variables)
@@ -834,7 +823,7 @@ class Project():
_yaml.node_validate(mirror, allowed_mirror_fields)
mirror_name = _yaml.node_get(mirror, str, 'name')
alias_mappings = {}
- for alias_mapping, uris in _yaml.node_items(mirror['aliases']):
+ for alias_mapping, uris in _yaml.node_items(_yaml.node_get(mirror, Mapping, 'aliases')):
assert isinstance(uris, list)
alias_mappings[alias_mapping] = list(uris)
output.mirrors[mirror_name] = alias_mappings
@@ -897,11 +886,12 @@ class Project():
allowed_origins = ['core', 'local', 'pip']
_yaml.node_validate(origin, allowed_origin_fields)
- if origin['origin'] not in allowed_origins:
+ origin_value = _yaml.node_get(origin, str, 'origin')
+ if origin_value not in allowed_origins:
raise LoadError(
LoadErrorReason.INVALID_YAML,
"Origin '{}' is not one of the allowed types"
- .format(origin['origin']))
+ .format(origin_value))
# Store source versions for checking later
source_versions = _yaml.node_get(origin, Mapping, 'sources', default_value={})
@@ -940,11 +930,11 @@ class Project():
# Helper function to store plugin origins
#
# Args:
- # origin (dict) - a dictionary indicating the origin of a group of
+ # origin (node) - a node indicating the origin of a group of
# plugins.
# plugin_group (str) - The name of the type of plugin that is being
# loaded
- # destination (list) - A list of dicts to store the origins in
+ # destination (list) - A list of nodes to store the origins in
#
# Raises:
# LoadError if 'origin' is an unexpected value
@@ -954,19 +944,21 @@ class Project():
raise LoadError(LoadErrorReason.INVALID_DATA,
"Unexpected plugin group: {}, expecting {}"
.format(plugin_group, expected_groups))
- if plugin_group in origin:
- origin_dict = _yaml.node_copy(origin)
+ node_keys = [key for key, _ in _yaml.node_items(origin)]
+ if plugin_group in node_keys:
+ origin_node = _yaml.node_copy(origin)
plugins = _yaml.node_get(origin, Mapping, plugin_group, default_value={})
- origin_dict['plugins'] = [k for k, _ in _yaml.node_items(plugins)]
+ _yaml.node_set(origin_node, 'plugins', [k for k, _ in _yaml.node_items(plugins)])
for group in expected_groups:
- if group in origin_dict:
- del origin_dict[group]
- if origin_dict['origin'] == 'local':
+ if _yaml.node_contains(origin_node, group):
+ _yaml.node_del(origin_node, group)
+
+ if _yaml.node_get(origin_node, str, 'origin') == 'local':
path = self.get_path_from_node(origin, 'path',
check_is_dir=True)
# paths are passed in relative to the project, but must be absolute
- origin_dict['path'] = os.path.join(self.directory, path)
- destination.append(origin_dict)
+ _yaml.node_set(origin_node, 'path', os.path.join(self.directory, path))
+ destination.append(origin_node)
# _warning_is_fatal():
#
diff --git a/buildstream/_projectrefs.py b/buildstream/_projectrefs.py
index 4009d7449..b1443ef32 100644
--- a/buildstream/_projectrefs.py
+++ b/buildstream/_projectrefs.py
@@ -61,7 +61,6 @@ class ProjectRefs():
# options (OptionPool): To resolve conditional statements
#
def load(self, options):
-
try:
self._toplevel_node = _yaml.load(self._fullpath, shortname=self._base_name, copy_tree=True)
provenance = _yaml.node_get_provenance(self._toplevel_node)
@@ -80,22 +79,15 @@ class ProjectRefs():
# Ignore failure if the file doesnt exist, it'll be created and
# for now just assumed to be empty
- self._toplevel_node = {}
+ self._toplevel_node = _yaml.new_synthetic_file(self._fullpath)
self._toplevel_save = self._toplevel_node
_yaml.node_validate(self._toplevel_node, ['projects'])
# Ensure we create our toplevel entry point on the fly here
for node in [self._toplevel_node, self._toplevel_save]:
- if 'projects' not in node:
- node['projects'] = {}
-
- # save()
- #
- # Save the project.refs file with any local changes
- #
- def save(self):
- _yaml.dump(self._toplevel_save, self._fullpath)
+ if not _yaml.node_contains(node, 'projects'):
+ _yaml.node_set(node, 'projects', _yaml.new_empty_node(ref_node=node))
# lookup_ref()
#
@@ -117,11 +109,6 @@ class ProjectRefs():
if write:
- if node is not None:
- provenance = _yaml.node_get_provenance(node)
- if provenance:
- node = provenance.node
-
# If we couldnt find the orignal, create a new one.
#
if node is None:
@@ -134,22 +121,24 @@ class ProjectRefs():
# Looks up a ref node in the project.refs file, creates one if ensure is True.
#
def _lookup(self, toplevel, project, element, source_index, *, ensure=False):
-
# Fetch the project
try:
- project_node = toplevel['projects'][project]
- except KeyError:
+ projects = _yaml.node_get(toplevel, dict, 'projects')
+ project_node = _yaml.node_get(projects, dict, project)
+ except LoadError:
if not ensure:
return None
- project_node = toplevel['projects'][project] = {}
+ project_node = _yaml.new_empty_node(ref_node=projects)
+ _yaml.node_set(projects, project, project_node)
# Fetch the element
try:
- element_list = project_node[element]
- except KeyError:
+ element_list = _yaml.node_get(project_node, list, element)
+ except LoadError:
if not ensure:
return None
- element_list = project_node[element] = []
+ element_list = []
+ _yaml.node_set(project_node, element, element_list)
# Fetch the source index
try:
@@ -159,8 +148,8 @@ class ProjectRefs():
return None
# Pad the list with empty newly created dictionaries
- element_list.extend({} for _ in range(len(element_list), source_index + 1))
+ _yaml.node_extend_list(project_node, element, source_index + 1, {})
- node = element_list[source_index]
+ node = _yaml.node_get(project_node, dict, element, indices=[source_index])
return node
diff --git a/buildstream/_variables.py b/buildstream/_variables.py
index 436b80962..74314cf1f 100644
--- a/buildstream/_variables.py
+++ b/buildstream/_variables.py
@@ -118,7 +118,7 @@ class Variables():
# Initialize it as a string as all variables are processed as strings.
#
if _yaml.node_get(node, bool, 'notparallel', default_value=False):
- node['max-jobs'] = str(1)
+ _yaml.node_set(node, 'max-jobs', str(1))
ret = {}
for key, value in _yaml.node_items(node):
diff --git a/buildstream/_workspaces.py b/buildstream/_workspaces.py
index 24a3cc8d3..9fbfb7e63 100644
--- a/buildstream/_workspaces.py
+++ b/buildstream/_workspaces.py
@@ -114,7 +114,7 @@ class WorkspaceProject():
def load(cls, directory):
workspace_file = os.path.join(directory, WORKSPACE_PROJECT_FILE)
if os.path.exists(workspace_file):
- data_dict = _yaml.load(workspace_file)
+ data_dict = _yaml.node_sanitize(_yaml.roundtrip_load(workspace_file), dict_type=dict)
return cls.from_dict(directory, data_dict)
else:
return None
@@ -417,7 +417,7 @@ class Workspaces():
# A tuple in the following format: (str, Workspace), where the
# first element is the name of the workspaced element.
def list(self):
- for element, _ in _yaml.node_items(self._workspaces):
+ for element in self._workspaces.keys():
yield (element, self._workspaces[element])
# create_workspace()
@@ -526,12 +526,11 @@ class Workspaces():
'format-version': BST_WORKSPACE_FORMAT_VERSION,
'workspaces': {
element: workspace.to_dict()
- for element, workspace in _yaml.node_items(self._workspaces)
+ for element, workspace in self._workspaces.items()
}
}
os.makedirs(self._bst_directory, exist_ok=True)
- _yaml.dump(_yaml.node_sanitize(config),
- self._get_filename())
+ _yaml.dump(config, self._get_filename())
# _load_config()
#
@@ -570,16 +569,24 @@ class Workspaces():
# Raises: LoadError if there was a problem with the workspace config
#
def _parse_workspace_config(self, workspaces):
- version = _yaml.node_get(workspaces, int, "format-version", default_value=0)
+ try:
+ version = _yaml.node_get(workspaces, int, 'format-version', default_value=0)
+ except ValueError:
+ raise LoadError(LoadErrorReason.INVALID_DATA,
+ "Format version is not an integer in workspace configuration")
if version == 0:
# Pre-versioning format can be of two forms
for element, config in _yaml.node_items(workspaces):
+ if _yaml.is_node(config):
+ # Get a dict
+ config = _yaml.node_sanitize(config, dict_type=dict)
+
if isinstance(config, str):
pass
elif isinstance(config, dict):
- sources = list(_yaml.node_items(config))
+ sources = list(config.items())
if len(sources) > 1:
detail = "There are multiple workspaces open for '{}'.\n" + \
"This is not supported anymore.\n" + \
@@ -587,7 +594,7 @@ class Workspaces():
raise LoadError(LoadErrorReason.INVALID_DATA,
detail.format(element, self._get_filename()))
- workspaces[element] = sources[0][1]
+ _yaml.node_set(workspaces, element, sources[0][1])
else:
raise LoadError(LoadErrorReason.INVALID_DATA,
@@ -599,7 +606,8 @@ class Workspaces():
}
elif 1 <= version <= BST_WORKSPACE_FORMAT_VERSION:
- workspaces = _yaml.node_get(workspaces, dict, "workspaces", default_value={})
+ workspaces = _yaml.node_get(workspaces, dict, "workspaces",
+ default_value=_yaml.new_empty_node())
res = {element: self._load_workspace(node)
for element, node in _yaml.node_items(workspaces)}
@@ -616,7 +624,7 @@ class Workspaces():
# Loads a new workspace from a YAML node
#
# Args:
- # node: A YAML Node
+ # node: A YAML dict
#
# Returns:
# (Workspace): A newly instantiated Workspace
@@ -626,7 +634,9 @@ class Workspaces():
'prepared': _yaml.node_get(node, bool, 'prepared', default_value=False),
'path': _yaml.node_get(node, str, 'path'),
'last_successful': _yaml.node_get(node, str, 'last_successful', default_value=None),
- 'running_files': _yaml.node_get(node, dict, 'running_files', default_value=None),
+ 'running_files': _yaml.node_sanitize(
+ _yaml.node_get(node, dict, 'running_files', default_value=None),
+ dict_type=dict),
}
return Workspace.from_dict(self._toplevel_project, dictionary)
diff --git a/buildstream/_yaml.py b/buildstream/_yaml.py
index 4c2ae2b4d..1512a3cb8 100644
--- a/buildstream/_yaml.py
+++ b/buildstream/_yaml.py
@@ -1,5 +1,6 @@
#
# Copyright (C) 2018 Codethink Limited
+# Copyright (C) 2019 Bloomberg LLP
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
@@ -16,147 +17,111 @@
#
# Authors:
# Tristan Van Berkom <tristan.vanberkom@codethink.co.uk>
+# Daniel Silverstone <daniel.silverstone@codethink.co.uk>
+# James Ennis <james.ennis@codethink.co.uk>
import sys
-import collections
import string
-from copy import deepcopy
from contextlib import ExitStack
+from collections import OrderedDict, namedtuple
+from collections.abc import Mapping, Sequence
+from copy import deepcopy
+from itertools import count
from ruamel import yaml
-from ruamel.yaml.representer import SafeRepresenter, RoundTripRepresenter
-from ruamel.yaml.constructor import RoundTripConstructor
from ._exceptions import LoadError, LoadErrorReason
-# This overrides the ruamel constructor to treat everything as a string
-RoundTripConstructor.add_constructor(u'tag:yaml.org,2002:int', RoundTripConstructor.construct_yaml_str)
-RoundTripConstructor.add_constructor(u'tag:yaml.org,2002:float', RoundTripConstructor.construct_yaml_str)
-
-# We store information in the loaded yaml on a DictProvenance
-# stored in all dictionaries under this key
-PROVENANCE_KEY = '__bst_provenance_info'
-
-# Provides information about file for provenance
+# Without this, pylint complains about all the `type(foo) is blah` checks
+# because it feels isinstance() is more idiomatic. Sadly, it is much slower to
+# do `isinstance(foo, blah)` for reasons I am unable to fathom. As such, we
+# blanket disable the check for this module.
#
-# Args:
-# name (str): Full path to the file
-# shortname (str): Relative path to the file
-# project (Project): Project where the shortname is relative from
-class ProvenanceFile():
- def __init__(self, name, shortname, project):
- self.name = name
- self.shortname = shortname
- self.project = project
+# pylint: disable=unidiomatic-typecheck
-# Provenance tracks the origin of a given node in the parsed dictionary.
+# Node()
#
-# Args:
-# node (dict, list, value): A binding to the originally parsed value
-# filename (str): The filename the node was loaded from
-# toplevel (dict): The toplevel of the loaded file, suitable for later dumps
-# line (int): The line number where node was parsed
-# col (int): The column number where node was parsed
-#
-class Provenance():
- def __init__(self, filename, node, toplevel, line=0, col=0):
- self.filename = filename
- self.node = node
- self.toplevel = toplevel
- self.line = line
- self.col = col
-
- # Convert a Provenance to a string for error reporting
- def __str__(self):
- filename = self.filename.shortname
- if self.filename.project and self.filename.project.junction:
- filename = "{}:{}".format(self.filename.project.junction.name, self.filename.shortname)
-
- return "{} [line {:d} column {:d}]".format(filename, self.line, self.col)
-
- # Abstract method
- def clone(self):
- pass # pragma: nocover
-
-
-# A Provenance for dictionaries, these are stored in the copy of the
-# loaded YAML tree and track the provenance of all members
+# Container for YAML loaded data and its provenance
#
-class DictProvenance(Provenance):
- def __init__(self, filename, node, toplevel, line=None, col=None):
-
- if line is None or col is None:
- # Special case for loading an empty dict
- if hasattr(node, 'lc'):
- line = node.lc.line + 1
- col = node.lc.col
- else:
- line = 1
- col = 0
-
- super(DictProvenance, self).__init__(filename, node, toplevel, line=line, col=col)
-
- self.members = {}
-
- def clone(self):
- provenance = DictProvenance(self.filename, self.node, self.toplevel,
- line=self.line, col=self.col)
-
- provenance.members = {
- member_name: member.clone()
- for member_name, member in self.members.items()
- }
- return provenance
-
-
-# A Provenance for dict members
+# All nodes returned (and all internal lists/strings) have this type (rather
+# than a plain tuple, to distinguish them in things like node_sanitize)
#
-class MemberProvenance(Provenance):
- def __init__(self, filename, parent_dict, member_name, toplevel,
- node=None, line=None, col=None):
-
- if parent_dict is not None:
- node = parent_dict[member_name]
- line, col = parent_dict.lc.value(member_name)
- line += 1
-
- super(MemberProvenance, self).__init__(
- filename, node, toplevel, line=line, col=col)
+# Members:
+# value (str/list/dict): The loaded value.
+# file_index (int): Index within _FILE_LIST (a list of loaded file paths).
+# Negative indices indicate synthetic nodes so that
+# they can be referenced.
+# line (int): The line number within the file where the value appears.
+# col (int): The column number within the file where the value appears.
+#
+# For efficiency, each field should be accessed by its integer index:
+# value = Node[0]
+# file_index = Node[1]
+# line = Node[2]
+# column = Node[3]
+#
+class Node(namedtuple('Node', ['value', 'file_index', 'line', 'column'])):
+ def __contains__(self, what):
+ assert False, \
+ "BUG: Attempt to do `{} in {}` test".format(what, self)
- # Only used if member is a list
- self.elements = []
- def clone(self):
- provenance = MemberProvenance(self.filename, None, None, self.toplevel,
- node=self.node, line=self.line, col=self.col)
- provenance.elements = [e.clone() for e in self.elements]
- return provenance
+# File name handling
+_FILE_LIST = []
-# A Provenance for list elements
-#
-class ElementProvenance(Provenance):
- def __init__(self, filename, parent_list, index, toplevel,
- node=None, line=None, col=None):
+# Purely synthetic node will have None for the file number, have line number
+# zero, and a negative column number which comes from inverting the next value
+# out of this counter. Synthetic nodes created with a reference node will
+# have a file number from the reference node, some unknown line number, and
+# a negative column number from this counter.
+_SYNTHETIC_COUNTER = count(start=-1, step=-1)
- if parent_list is not None:
- node = parent_list[index]
- line, col = parent_list.lc.item(index)
- line += 1
- super(ElementProvenance, self).__init__(
- filename, node, toplevel, line=line, col=col)
+# Returned from node_get_provenance
+class ProvenanceInformation:
- # Only used if element is a list
- self.elements = []
+ __slots__ = (
+ "filename",
+ "shortname",
+ "displayname",
+ "line",
+ "col",
+ "toplevel",
+ "node",
+ "project",
+ "is_synthetic",
+ )
- def clone(self):
- provenance = ElementProvenance(self.filename, None, None, self.toplevel,
- node=self.node, line=self.line, col=self.col)
+ def __init__(self, nodeish):
+ self.node = nodeish
+ if (nodeish is None) or (nodeish[1] is None):
+ self.filename = ""
+ self.shortname = ""
+ self.displayname = ""
+ self.line = 1
+ self.col = 0
+ self.toplevel = None
+ self.project = None
+ else:
+ fileinfo = _FILE_LIST[nodeish[1]]
+ self.filename = fileinfo[0]
+ self.shortname = fileinfo[1]
+ self.displayname = fileinfo[2]
+ # We add 1 here to convert from computerish to humanish
+ self.line = nodeish[2] + 1
+ self.col = nodeish[3]
+ self.toplevel = fileinfo[3]
+ self.project = fileinfo[4]
+ self.is_synthetic = (self.filename == '') or (self.col < 0)
- provenance.elements = [e.clone for e in self.elements]
- return provenance
+ # Convert a Provenance to a string for error reporting
+ def __str__(self):
+ if self.is_synthetic:
+ return "{} [synthetic node]".format(self.displayname)
+ else:
+ return "{} [line {:d} column {:d}]".format(self.displayname, self.line, self.col)
# These exceptions are intended to be caught entirely within
@@ -166,17 +131,165 @@ class CompositeError(Exception):
def __init__(self, path, message):
super(CompositeError, self).__init__(message)
self.path = path
+ self.message = message
+
+
+class YAMLLoadError(Exception):
+ pass
+
+
+# Representer for YAML events comprising input to the BuildStream format.
+#
+# All streams MUST represent a single document which must be a Mapping.
+# Anything else is considered an error.
+#
+# Mappings must only have string keys, values are always represented as
+# strings if they are scalar, or else as simple dictionaries and lists.
+#
+class Representer:
+ __slots__ = (
+ "_file_index",
+ "state",
+ "output",
+ "keys",
+ )
+
+ # Initialise a new representer
+ #
+ # The file index is used to store into the Node instances so that the
+ # provenance of the YAML can be tracked.
+ #
+ # Args:
+ # file_index (int): The index of this YAML file
+ def __init__(self, file_index):
+ self._file_index = file_index
+ self.state = "init"
+ self.output = []
+ self.keys = []
+
+ # Handle a YAML parse event
+ #
+ # Args:
+ # event (YAML Event): The event to be handled
+ #
+ # Raises:
+ # YAMLLoadError: Something went wrong.
+ def handle_event(self, event):
+ if getattr(event, "anchor", None) is not None:
+ raise YAMLLoadError("Anchors are disallowed in BuildStream at line {} column {}"
+ .format(event.start_mark.line, event.start_mark.column))
+
+ if event.__class__.__name__ == "ScalarEvent":
+ if event.tag is not None:
+ if not event.tag.startswith("tag:yaml.org,2002:"):
+ raise YAMLLoadError(
+ "Non-core tag expressed in input. " +
+ "This is disallowed in BuildStream. At line {} column {}"
+ .format(event.start_mark.line, event.start_mark.column))
+
+ handler = "_handle_{}_{}".format(self.state, event.__class__.__name__)
+ handler = getattr(self, handler, None)
+ if handler is None:
+ raise YAMLLoadError(
+ "Invalid input detected. No handler for {} in state {} at line {} column {}"
+ .format(event, self.state, event.start_mark.line, event.start_mark.column))
+
+ if handler is None:
+ raise YAMLLoadError(
+ "Invalid input detected. No handler for {} in state {} at line {} column {}"
+ .format(event, self.state, event.start_mark.line, event.start_mark.column))
+
+ self.state = handler(event) # pylint: disable=not-callable
+
+ # Get the output of the YAML parse
+ #
+ # Returns:
+ # (Node or None): Return the Node instance of the top level mapping or
+ # None if there wasn't one.
+ def get_output(self):
+ try:
+ return self.output[0]
+ except IndexError:
+ return None
+
+ def _handle_init_StreamStartEvent(self, ev):
+ return "stream"
+
+ def _handle_stream_DocumentStartEvent(self, ev):
+ return "doc"
+
+ def _handle_doc_MappingStartEvent(self, ev):
+ newmap = Node({}, self._file_index, ev.start_mark.line, ev.start_mark.column)
+ self.output.append(newmap)
+ return "wait_key"
+
+ def _handle_wait_key_ScalarEvent(self, ev):
+ self.keys.append(ev.value)
+ return "wait_value"
+
+ def _handle_wait_value_ScalarEvent(self, ev):
+ key = self.keys.pop()
+ self.output[-1][0][key] = \
+ Node(ev.value, self._file_index, ev.start_mark.line, ev.start_mark.column)
+ return "wait_key"
+
+ def _handle_wait_value_MappingStartEvent(self, ev):
+ new_state = self._handle_doc_MappingStartEvent(ev)
+ key = self.keys.pop()
+ self.output[-2][0][key] = self.output[-1]
+ return new_state
+
+ def _handle_wait_key_MappingEndEvent(self, ev):
+ # We've finished a mapping, so pop it off the output stack
+ # unless it's the last one in which case we leave it
+ if len(self.output) > 1:
+ self.output.pop()
+ if type(self.output[-1][0]) is list:
+ return "wait_list_item"
+ else:
+ return "wait_key"
+ else:
+ return "doc"
+
+ def _handle_wait_value_SequenceStartEvent(self, ev):
+ self.output.append(Node([], self._file_index, ev.start_mark.line, ev.start_mark.column))
+ self.output[-2][0][self.keys[-1]] = self.output[-1]
+ return "wait_list_item"
+
+ def _handle_wait_list_item_SequenceStartEvent(self, ev):
+ self.keys.append(len(self.output[-1][0]))
+ self.output.append(Node([], self._file_index, ev.start_mark.line, ev.start_mark.column))
+ self.output[-2][0].append(self.output[-1])
+ return "wait_list_item"
+
+ def _handle_wait_list_item_SequenceEndEvent(self, ev):
+ # When ending a sequence, we need to pop a key because we retain the
+ # key until the end so that if we need to mutate the underlying entry
+ # we can.
+ key = self.keys.pop()
+ self.output.pop()
+ if type(key) is int:
+ return "wait_list_item"
+ else:
+ return "wait_key"
+
+ def _handle_wait_list_item_ScalarEvent(self, ev):
+ self.output[-1][0].append(
+ Node(ev.value, self._file_index, ev.start_mark.line, ev.start_mark.column))
+ return "wait_list_item"
+
+ def _handle_wait_list_item_MappingStartEvent(self, ev):
+ new_state = self._handle_doc_MappingStartEvent(ev)
+ self.output[-2][0].append(self.output[-1])
+ return new_state
+ def _handle_doc_DocumentEndEvent(self, ev):
+ if len(self.output) != 1:
+ raise YAMLLoadError("Zero, or more than one document found in YAML stream")
+ return "stream"
-class CompositeTypeError(CompositeError):
- def __init__(self, path, expected_type, actual_type):
- super(CompositeTypeError, self).__init__(
- path,
- "Error compositing dictionary key '{}', expected source type '{}' "
- "but received type '{}'"
- .format(path, expected_type.__name__, actual_type.__name__))
- self.expected_type = expected_type
- self.actual_type = actual_type
+ def _handle_stream_StreamEndEvent(self, ev):
+ return "init"
# Loads a dictionary from some YAML
@@ -186,6 +299,7 @@ class CompositeTypeError(CompositeError):
# shortname (str): The filename in shorthand for error reporting (or None)
# copy_tree (bool): Whether to make a copy, preserving the original toplevels
# for later serialization
+# project (Project): The (optional) project to associate the parsed YAML with
#
# Returns (dict): A loaded copy of the YAML file with provenance information
#
@@ -195,14 +309,22 @@ def load(filename, shortname=None, copy_tree=False, *, project=None):
if not shortname:
shortname = filename
- file = ProvenanceFile(filename, shortname, project)
+ if (project is not None) and (project.junction is not None):
+ displayname = "{}:{}".format(project.junction.name, shortname)
+ else:
+ displayname = shortname
+
+ file_number = len(_FILE_LIST)
+ _FILE_LIST.append((filename, shortname, displayname, None, project))
try:
- data = None
with open(filename) as f:
contents = f.read()
- data = load_data(contents, file, copy_tree=copy_tree)
+ data = load_data(contents,
+ file_index=file_number,
+ file_name=filename,
+ copy_tree=copy_tree)
return data
except FileNotFoundError as e:
@@ -216,97 +338,57 @@ def load(filename, shortname=None, copy_tree=False, *, project=None):
# Like load(), but doesnt require the data to be in a file
#
-def load_data(data, file=None, copy_tree=False):
+def load_data(data, file_index=None, file_name=None, copy_tree=False):
try:
- contents = yaml.load(data, yaml.loader.RoundTripLoader, preserve_quotes=True)
- except (yaml.scanner.ScannerError, yaml.composer.ComposerError, yaml.parser.ParserError) as e:
+ rep = Representer(file_index)
+ for event in yaml.parse(data, Loader=yaml.CBaseLoader):
+ rep.handle_event(event)
+ contents = rep.get_output()
+ except YAMLLoadError as e:
raise LoadError(LoadErrorReason.INVALID_YAML,
- "Malformed YAML:\n\n{}\n\n{}\n".format(e.problem, e.problem_mark)) from e
+ "Malformed YAML:\n\n{}\n\n".format(e)) from e
+ except Exception as e:
+ raise LoadError(LoadErrorReason.INVALID_YAML,
+ "Severely malformed YAML:\n\n{}\n\n".format(e)) from e
- if not isinstance(contents, dict):
+ if not isinstance(contents, tuple) or not isinstance(contents[0], dict):
# Special case allowance for None, when the loaded file has only comments in it.
if contents is None:
- contents = {}
+ contents = Node({}, file_index, 0, 0)
else:
raise LoadError(LoadErrorReason.INVALID_YAML,
"YAML file has content of type '{}' instead of expected type 'dict': {}"
- .format(type(contents).__name__, file.name))
+ .format(type(contents[0]).__name__, file_name))
+
+ # Store this away because we'll use it later for "top level" provenance
+ if file_index is not None:
+ _FILE_LIST[file_index] = (
+ _FILE_LIST[file_index][0], # Filename
+ _FILE_LIST[file_index][1], # Shortname
+ _FILE_LIST[file_index][2], # Displayname
+ contents,
+ _FILE_LIST[file_index][4], # Project
+ )
- return node_decorated_copy(file, contents, copy_tree=copy_tree)
+ if copy_tree:
+ contents = node_copy(contents)
+ return contents
-# Dumps a previously loaded YAML node to a file
+# dump()
#
-# Args:
-# node (dict): A node previously loaded with _yaml.load() above
-# filename (str): The YAML file to load
+# Write a YAML node structure out to disk.
#
-def dump(node, filename=None):
- with ExitStack() as stack:
- if filename:
- from . import utils
- f = stack.enter_context(utils.save_file_atomic(filename, 'w'))
- else:
- f = sys.stdout
- yaml.round_trip_dump(node, f)
-
-
-# node_decorated_copy()
-#
-# Create a copy of a loaded dict tree decorated with Provenance
-# information, used directly after loading yaml
+# This will always call `node_sanitize` on its input, so if you wanted
+# to output something close to what you read in, consider using the
+# `roundtrip_load` and `roundtrip_dump` function pair instead.
#
# Args:
-# filename (str): The filename
-# toplevel (node): The toplevel dictionary node
-# copy_tree (bool): Whether to load a copy and preserve the original
-#
-# Returns: A copy of the toplevel decorated with Provinance
-#
-def node_decorated_copy(filename, toplevel, copy_tree=False):
- if copy_tree:
- result = deepcopy(toplevel)
- else:
- result = toplevel
-
- node_decorate_dict(filename, result, toplevel, toplevel)
-
- return result
-
-
-def node_decorate_dict(filename, target, source, toplevel):
- provenance = DictProvenance(filename, source, toplevel)
- target[PROVENANCE_KEY] = provenance
-
- for key, value in node_items(source):
- member = MemberProvenance(filename, source, key, toplevel)
- provenance.members[key] = member
-
- target_value = target.get(key)
- if isinstance(value, collections.abc.Mapping):
- node_decorate_dict(filename, target_value, value, toplevel)
- elif isinstance(value, list):
- member.elements = node_decorate_list(filename, target_value, value, toplevel)
-
-
-def node_decorate_list(filename, target, source, toplevel):
-
- elements = []
-
- for item in source:
- idx = source.index(item)
- target_item = target[idx]
- element = ElementProvenance(filename, source, idx, toplevel)
-
- if isinstance(item, collections.abc.Mapping):
- node_decorate_dict(filename, target_item, item, toplevel)
- elif isinstance(item, list):
- element.elements = node_decorate_list(filename, target_item, item, toplevel)
-
- elements.append(element)
-
- return elements
+# contents (any): Content to write out
+# filename (str): The (optional) file name to write out to
+def dump(contents, filename=None):
+ roundtrip_dump(node_sanitize(contents), file=filename)
# node_get_provenance()
@@ -321,15 +403,20 @@ def node_decorate_list(filename, target, source, toplevel):
# Returns: The Provenance of the dict, member or list element
#
def node_get_provenance(node, key=None, indices=None):
+ assert is_node(node)
+
+ if key is None:
+ # Retrieving the provenance for this node directly
+ return ProvenanceInformation(node)
- provenance = node.get(PROVENANCE_KEY)
- if provenance and key:
- provenance = provenance.members.get(key)
- if provenance and indices is not None:
- for index in indices:
- provenance = provenance.elements[index]
+ if key and not indices:
+ return ProvenanceInformation(node[0].get(key))
- return provenance
+ nodeish = node[0].get(key)
+ for idx in indices:
+ nodeish = nodeish[0][idx]
+
+ return ProvenanceInformation(nodeish)
# A sentinel to be used as a default argument for functions that need
@@ -361,41 +448,51 @@ _sentinel = object()
# Returned strings are stripped of leading and trailing whitespace
#
def node_get(node, expected_type, key, indices=None, *, default_value=_sentinel, allow_none=False):
- value = node.get(key, default_value)
- if value is _sentinel:
- provenance = node_get_provenance(node)
- raise LoadError(LoadErrorReason.INVALID_DATA,
- "{}: Dictionary did not contain expected key '{}'".format(provenance, key))
+ assert type(node) is Node
path = key
- if indices is not None:
+ if indices is None:
+ if default_value is _sentinel:
+ value = node[0].get(key, Node(default_value, None, 0, 0))
+ else:
+ value = node[0].get(key, Node(default_value, None, 0, next(_SYNTHETIC_COUNTER)))
+
+ if value[0] is _sentinel:
+ provenance = node_get_provenance(node)
+ raise LoadError(LoadErrorReason.INVALID_DATA,
+ "{}: Dictionary did not contain expected key '{}'".format(provenance, key))
+ else:
# Implied type check of the element itself
- value = node_get(node, list, key)
+ # No need to synthesise useful node content as we destructure it immediately
+ value = Node(node_get(node, list, key), None, 0, 0)
for index in indices:
- value = value[index]
+ value = value[0][index]
+ if type(value) is not Node:
+ value = (value,)
path += '[{:d}]'.format(index)
# Optionally allow None as a valid value for any type
- if value is None and (allow_none or default_value is None):
+ if value[0] is None and (allow_none or default_value is None):
return None
- if not isinstance(value, expected_type):
+ if (expected_type is not None) and (not isinstance(value[0], expected_type)):
# Attempt basic conversions if possible, typically we want to
# be able to specify numeric values and convert them to strings,
# but we dont want to try converting dicts/lists
try:
- if (expected_type == bool and isinstance(value, str)):
+ if (expected_type == bool and isinstance(value[0], str)):
# Dont coerce booleans to string, this makes "False" strings evaluate to True
- if value in ('True', 'true'):
- value = True
- elif value in ('False', 'false'):
- value = False
+ # We don't structure into full nodes since there's no need.
+ if value[0] in ('True', 'true'):
+ value = (True, None, 0, 0)
+ elif value[0] in ('False', 'false'):
+ value = (False, None, 0, 0)
else:
raise ValueError()
elif not (expected_type == list or
expected_type == dict or
- isinstance(value, (list, dict))):
- value = expected_type(value)
+ isinstance(value[0], (list, dict))):
+ value = (expected_type(value[0]), None, 0, 0)
else:
raise ValueError()
except (ValueError, TypeError):
@@ -404,13 +501,115 @@ def node_get(node, expected_type, key, indices=None, *, default_value=_sentinel,
"{}: Value of '{}' is not of the expected type '{}'"
.format(provenance, path, expected_type.__name__))
+ # Now collapse lists, and scalars, to their value, leaving nodes as-is
+ if type(value[0]) is not dict:
+ value = value[0]
+
# Trim it at the bud, let all loaded strings from yaml be stripped of whitespace
- if isinstance(value, str):
+ if type(value) is str:
value = value.strip()
+ elif type(value) is list:
+ # Now we create a fresh list which unwraps the str and list types
+ # semi-recursively.
+ value = __trim_list_provenance(value)
+
return value
+def __trim_list_provenance(value):
+ ret = []
+ for entry in value:
+ if type(entry) is not Node:
+ entry = (entry, None, 0, 0)
+ if type(entry[0]) is list:
+ ret.append(__trim_list_provenance(entry[0]))
+ elif type(entry[0]) is dict:
+ ret.append(entry)
+ else:
+ ret.append(entry[0])
+ return ret
+
+
+# node_set()
+#
+# Set an item within the node. If using `indices` be aware that the entry must
+# already exist, or else a KeyError will be raised. Use `node_extend_list` to
+# create entries before using `node_set`
+#
+# Args:
+# node (tuple): The node
+# key (str): The key name
+# value: The value
+# indices: Any indices to index into the list referenced by key, like in
+# `node_get` (must be a list of integers)
+#
+def node_set(node, key, value, indices=None):
+ if indices:
+ node = node[0][key]
+ key = indices.pop()
+ for idx in indices:
+ node = node[0][idx]
+ if type(value) is Node:
+ node[0][key] = value
+ else:
+ try:
+ # Need to do this just in case we're modifying a list
+ old_value = node[0][key]
+ except KeyError:
+ old_value = None
+ if old_value is None:
+ node[0][key] = Node(value, node[1], node[2], next(_SYNTHETIC_COUNTER))
+ else:
+ node[0][key] = Node(value, old_value[1], old_value[2], old_value[3])
+
+
+# node_extend_list()
+#
+# Extend a list inside a node to a given length, using the passed
+# default value to fill it out.
+#
+# Valid default values are:
+# Any string
+# An empty dict
+# An empty list
+#
+# Args:
+# node (node): The node
+# key (str): The list name in the node
+# length (int): The length to extend the list to
+# default (any): The default value to extend with.
+def node_extend_list(node, key, length, default):
+ assert type(default) is str or default in ([], {})
+
+ list_node = node[0].get(key)
+ if list_node is None:
+ list_node = node[0][key] = Node([], node[1], node[2], next(_SYNTHETIC_COUNTER))
+
+ assert type(list_node[0]) is list
+
+ the_list = list_node[0]
+ def_type = type(default)
+
+ file_index = node[1]
+ if the_list:
+ line_num = the_list[-1][2]
+ else:
+ line_num = list_node[2]
+
+ while length > len(the_list):
+ if def_type is str:
+ value = default
+ elif def_type is list:
+ value = []
+ else:
+ value = {}
+
+ line_num += 1
+
+ the_list.append(Node(value, file_index, line_num, next(_SYNTHETIC_COUNTER)))
+
+
# node_items()
#
# A convenience generator for iterating over loaded key/value
@@ -424,44 +623,147 @@ def node_get(node, expected_type, key, indices=None, *, default_value=_sentinel,
# (anything): The value for the key
#
def node_items(node):
- for key, value in node.items():
- if key == PROVENANCE_KEY:
- continue
- yield (key, value)
+ if type(node) is not Node:
+ node = Node(node, None, 0, 0)
+ for key, value in node[0].items():
+ if type(value) is not Node:
+ value = Node(value, None, 0, 0)
+ if type(value[0]) is dict:
+ yield (key, value)
+ elif type(value[0]) is list:
+ yield (key, __trim_list_provenance(value[0]))
+ else:
+ yield (key, value[0])
+
+
+# node_del()
+#
+# A convenience generator for iterating over loaded key/value
+# tuples in a dictionary loaded from project YAML.
+#
+# Args:
+# node (dict): The dictionary node
+# key (str): The key we want to remove
+# safe (bool): Whether to raise a KeyError if unable
+#
+def node_del(node, key, safe=False):
+ try:
+ del node[0][key]
+ except KeyError:
+ if not safe:
+ raise
+
+# is_node()
+#
+# A test method which returns whether or not the passed in value
+# is a valid YAML node. It is not valid to call this on a Node
+# object which is not a Mapping.
+#
+# Args:
+# maybenode (any): The object to test for nodeness
+#
+# Returns:
+# (bool): Whether or not maybenode was a Node
+#
+def is_node(maybenode):
+ # It's a programming error to give this a Node which isn't a mapping
+ # so assert that.
+ assert (type(maybenode) is not Node) or (type(maybenode[0]) is dict)
+ # Now return the type check
+ return type(maybenode) is Node
-# Gives a node a dummy provenance, in case of compositing dictionaries
-# where the target is an empty {}
-def ensure_provenance(node):
- provenance = node.get(PROVENANCE_KEY)
- if not provenance:
- provenance = DictProvenance(ProvenanceFile('', '', None), node, node)
- node[PROVENANCE_KEY] = provenance
- return provenance
+# new_synthetic_file()
+#
+# Create a new synthetic mapping node, with an associated file entry
+# (in _FILE_LIST) such that later tracking can correctly determine which
+# file needs writing to in order to persist the changes.
+#
+# Args:
+# filename (str): The name of the synthetic file to create
+# project (Project): The optional project to associate this synthetic file with
+#
+# Returns:
+# (Node): An empty YAML mapping node, whose provenance is to this new
+# synthetic file
+#
+def new_synthetic_file(filename, project=None):
+ file_index = len(_FILE_LIST)
+ node = Node({}, file_index, 0, 0)
+ _FILE_LIST.append((filename,
+ filename,
+ "<synthetic {}>".format(filename),
+ node,
+ project))
+ return node
-# is_ruamel_str():
+# new_empty_node()
#
# Args:
-# value: A value loaded from ruamel
+# ref_node (Node): Optional node whose provenance should be referenced
#
-# This returns if the value is "stringish", since ruamel
-# has some complex types to represent strings, this is needed
-# to avoid compositing exceptions in order to allow various
-# string types to be interchangable and acceptable
+# Returns
+# (Node): A new empty YAML mapping node
#
-def is_ruamel_str(value):
+def new_empty_node(ref_node=None):
+ if ref_node is not None:
+ return Node({}, ref_node[1], ref_node[2], next(_SYNTHETIC_COUNTER))
+ else:
+ return Node({}, None, 0, 0)
- if isinstance(value, str):
- return True
- elif isinstance(value, yaml.scalarstring.ScalarString):
- return True
- return False
+# new_node_from_dict()
+#
+# Args:
+# indict (dict): The input dictionary
+#
+# Returns:
+# (Node): A new synthetic YAML tree which represents this dictionary
+#
+def new_node_from_dict(indict):
+ ret = {}
+ for k, v in indict.items():
+ vtype = type(v)
+ if vtype is dict:
+ ret[k] = new_node_from_dict(v)
+ elif vtype is list:
+ ret[k] = __new_node_from_list(v)
+ else:
+ ret[k] = Node(str(v), None, 0, next(_SYNTHETIC_COUNTER))
+ return Node(ret, None, 0, next(_SYNTHETIC_COUNTER))
+
+
+# Internal function to help new_node_from_dict() to handle lists
+def __new_node_from_list(inlist):
+ ret = []
+ for v in inlist:
+ vtype = type(v)
+ if vtype is dict:
+ ret.append(new_node_from_dict(v))
+ elif vtype is list:
+ ret.append(__new_node_from_list(v))
+ else:
+ ret.append(Node(str(v), None, 0, next(_SYNTHETIC_COUNTER)))
+ return Node(ret, None, 0, next(_SYNTHETIC_COUNTER))
+
+
+# node_contains()
+#
+# Args:
+# node (Node): The mapping node to query the contents of
+# entry (str): The key to look for in the mapping node
+#
+# Returns:
+# (bool): Whether entry is in the mapping in node.
+#
+def node_contains(node, entry):
+ assert type(node) is Node
+ return entry in node[0]
-# is_composite_list
+# _is_composite_list
#
# Checks if the given node is a Mapping with array composition
# directives.
@@ -477,9 +779,9 @@ def is_ruamel_str(value):
# (LoadError): If node was a mapping and contained a mix of
# list composition directives and other keys
#
-def is_composite_list(node):
+def _is_composite_list(node):
- if isinstance(node, collections.abc.Mapping):
+ if type(node[0]) is dict:
has_directives = False
has_keys = False
@@ -499,309 +801,156 @@ def is_composite_list(node):
return False
-# composite_list_prepend
-#
-# Internal helper for list composition
-#
-# Args:
-# target_node (dict): A simple dictionary
-# target_key (dict): The key indicating a literal array to prepend to
-# source_node (dict): Another simple dictionary
-# source_key (str): The key indicating an array to prepend to the target
-#
-# Returns:
-# (bool): True if a source list was found and compositing occurred
-#
-def composite_list_prepend(target_node, target_key, source_node, source_key):
-
- source_list = node_get(source_node, list, source_key, default_value=[])
- if not source_list:
- return False
-
- target_provenance = node_get_provenance(target_node)
- source_provenance = node_get_provenance(source_node)
-
- if target_node.get(target_key) is None:
- target_node[target_key] = []
-
- source_list = list_copy(source_list)
- target_list = target_node[target_key]
-
- for element in reversed(source_list):
- target_list.insert(0, element)
-
- if not target_provenance.members.get(target_key):
- target_provenance.members[target_key] = source_provenance.members[source_key].clone()
- else:
- for p in reversed(source_provenance.members[source_key].elements):
- target_provenance.members[target_key].elements.insert(0, p.clone())
-
- return True
-
-
-# composite_list_append
+# _compose_composite_list()
#
-# Internal helper for list composition
+# Composes a composite list (i.e. a dict with list composition directives)
+# on top of a target list which is a composite list itself.
#
# Args:
-# target_node (dict): A simple dictionary
-# target_key (dict): The key indicating a literal array to append to
-# source_node (dict): Another simple dictionary
-# source_key (str): The key indicating an array to append to the target
-#
-# Returns:
-# (bool): True if a source list was found and compositing occurred
-#
-def composite_list_append(target_node, target_key, source_node, source_key):
-
- source_list = node_get(source_node, list, source_key, default_value=[])
- if not source_list:
- return False
-
- target_provenance = node_get_provenance(target_node)
- source_provenance = node_get_provenance(source_node)
-
- if target_node.get(target_key) is None:
- target_node[target_key] = []
-
- source_list = list_copy(source_list)
- target_list = target_node[target_key]
-
- target_list.extend(source_list)
-
- if not target_provenance.members.get(target_key):
- target_provenance.members[target_key] = source_provenance.members[source_key].clone()
+# target (Node): A composite list
+# source (Node): A composite list
+#
+def _compose_composite_list(target, source):
+ clobber = source[0].get("(=)")
+ prefix = source[0].get("(<)")
+ suffix = source[0].get("(>)")
+ if clobber is not None:
+ # We want to clobber the target list
+ # which basically means replacing the target list
+ # with ourselves
+ target[0]["(=)"] = clobber
+ if prefix is not None:
+ target[0]["(<)"] = prefix
+ elif "(<)" in target[0]:
+ target[0]["(<)"][0].clear()
+ if suffix is not None:
+ target[0]["(>)"] = suffix
+ elif "(>)" in target[0]:
+ target[0]["(>)"][0].clear()
else:
- target_provenance.members[target_key].elements.extend([
- p.clone() for p in source_provenance.members[source_key].elements
- ])
-
- return True
-
-
-# composite_list_overwrite
-#
-# Internal helper for list composition
-#
-# Args:
-# target_node (dict): A simple dictionary
-# target_key (dict): The key indicating a literal array to overwrite
-# source_node (dict): Another simple dictionary
-# source_key (str): The key indicating an array to overwrite the target with
-#
-# Returns:
-# (bool): True if a source list was found and compositing occurred
-#
-def composite_list_overwrite(target_node, target_key, source_node, source_key):
-
- # We need to handle the legitimate case of overwriting a list with an empty
- # list, hence the slightly odd default_value of [None] rather than [].
- source_list = node_get(source_node, list, source_key, default_value=[None])
- if source_list == [None]:
- return False
-
- target_provenance = node_get_provenance(target_node)
- source_provenance = node_get_provenance(source_node)
-
- target_node[target_key] = list_copy(source_list)
- target_provenance.members[target_key] = source_provenance.members[source_key].clone()
-
- return True
+ # Not clobbering, so prefix the prefix and suffix the suffix
+ if prefix is not None:
+ if "(<)" in target[0]:
+ for v in reversed(prefix[0]):
+ target[0]["(<)"][0].insert(0, v)
+ else:
+ target[0]["(<)"] = prefix
+ if suffix is not None:
+ if "(>)" in target[0]:
+ target[0]["(>)"][0].extend(suffix[0])
+ else:
+ target[0]["(>)"] = suffix
-# composite_list():
+# _compose_list()
#
-# Composite the source value onto the target value, if either
-# sides are lists, or dictionaries containing list compositing directives
+# Compose a composite list (a dict with composition directives) on top of a
+# simple list.
#
# Args:
-# target_node (dict): A simple dictionary
-# source_node (dict): Another simple dictionary
-# key (str): The key to compose on
+# target (Node): The target list to be composed into
+# source (Node): The composition list to be composed from
#
-# Returns:
-# (bool): True if both sides were logical lists
-#
-# Raises:
-# (LoadError): If one side was a logical list and the other was not
-#
-def composite_list(target_node, source_node, key):
- target_value = target_node.get(key)
- source_value = source_node[key]
-
- target_key_provenance = node_get_provenance(target_node, key)
- source_key_provenance = node_get_provenance(source_node, key)
-
- # Whenever a literal list is encountered in the source, it
- # overwrites the target values and provenance completely.
- #
- if isinstance(source_value, list):
-
- source_provenance = node_get_provenance(source_node)
- target_provenance = node_get_provenance(target_node)
-
- # Assert target type
- if not (target_value is None or
- isinstance(target_value, list) or
- is_composite_list(target_value)):
- raise LoadError(LoadErrorReason.INVALID_DATA,
- "{}: List cannot overwrite value at: {}"
- .format(source_key_provenance, target_key_provenance))
-
- composite_list_overwrite(target_node, key, source_node, key)
- return True
-
- # When a composite list is encountered in the source, then
- # multiple outcomes can occur...
- #
- elif is_composite_list(source_value):
-
- # If there is nothing there, then the composite list
- # is copied in it's entirety as is, and preserved
- # for later composition
- #
- if target_value is None:
- source_provenance = node_get_provenance(source_node)
- target_provenance = node_get_provenance(target_node)
-
- target_node[key] = node_copy(source_value)
- target_provenance.members[key] = source_provenance.members[key].clone()
-
- # If the target is a literal list, then composition
- # occurs directly onto that target, leaving the target
- # as a literal list to overwrite anything in later composition
- #
- elif isinstance(target_value, list):
- composite_list_overwrite(target_node, key, source_value, '(=)')
- composite_list_prepend(target_node, key, source_value, '(<)')
- composite_list_append(target_node, key, source_value, '(>)')
-
- # If the target is a composite list, then composition
- # occurs in the target composite list, and the composite
- # target list is preserved in dictionary form for further
- # composition.
- #
- elif is_composite_list(target_value):
-
- if composite_list_overwrite(target_value, '(=)', source_value, '(=)'):
-
- # When overwriting a target with composition directives, remove any
- # existing prepend/append directives in the target before adding our own
- target_provenance = node_get_provenance(target_value)
+def _compose_list(target, source):
+ clobber = source[0].get("(=)")
+ prefix = source[0].get("(<)")
+ suffix = source[0].get("(>)")
+ if clobber is not None:
+ target[0].clear()
+ target[0].extend(clobber[0])
+ if prefix is not None:
+ for v in reversed(prefix[0]):
+ target[0].insert(0, v)
+ if suffix is not None:
+ target[0].extend(suffix[0])
- for directive in ['(<)', '(>)']:
- try:
- del target_value[directive]
- del target_provenance.members[directive]
- except KeyError:
- # Ignore errors from deletion of non-existing keys
- pass
- # Prepend to the target prepend array, and append to the append array
- composite_list_prepend(target_value, '(<)', source_value, '(<)')
- composite_list_append(target_value, '(>)', source_value, '(>)')
-
- else:
- raise LoadError(LoadErrorReason.INVALID_DATA,
- "{}: List cannot overwrite value at: {}"
- .format(source_key_provenance, target_key_provenance))
-
- # We handled list composition in some way
- return True
-
- # Source value was not a logical list
- return False
-
-
-# composite_dict():
+# composite_dict()
#
-# Composites values in target with values from source
+# Compose one mapping node onto another
#
# Args:
-# target (dict): A simple dictionary
-# source (dict): Another simple dictionary
+# target (Node): The target to compose into
+# source (Node): The source to compose from
+# path (list): The path to the current composition node
#
# Raises: CompositeError
#
-# Unlike the dictionary update() method, nested values in source
-# will not obsolete entire subdictionaries in target, instead both
-# dictionaries will be recursed and a composition of both will result
-#
-# This is useful for overriding configuration files and element
-# configurations.
-#
def composite_dict(target, source, path=None):
- target_provenance = ensure_provenance(target)
- source_provenance = ensure_provenance(source)
-
- for key, source_value in node_items(source):
-
- # Track the full path of keys, only for raising CompositeError
- if path:
- thispath = path + '.' + key
- else:
- thispath = key
-
- # Handle list composition separately
- if composite_list(target, source, key):
- continue
-
- target_value = target.get(key)
-
- if isinstance(source_value, collections.abc.Mapping):
-
- # Handle creating new dicts on target side
- if target_value is None:
- target_value = {}
- target[key] = target_value
-
- # Give the new dict provenance
- value_provenance = source_value.get(PROVENANCE_KEY)
- if value_provenance:
- target_value[PROVENANCE_KEY] = value_provenance.clone()
-
- # Add a new provenance member element to the containing dict
- target_provenance.members[key] = source_provenance.members[key]
-
- if not isinstance(target_value, collections.abc.Mapping):
- raise CompositeTypeError(thispath, type(target_value), type(source_value))
-
- # Recurse into matching dictionary
- composite_dict(target_value, source_value, path=thispath)
-
+ if path is None:
+ path = []
+ for k, v in source[0].items():
+ path.append(k)
+ if type(v[0]) is list:
+ # List clobbers anything list-like
+ target_value = target[0].get(k)
+ if not (target_value is None or
+ type(target_value[0]) is list or
+ _is_composite_list(target_value)):
+ raise CompositeError(path,
+ "{}: List cannot overwrite {} at: {}"
+ .format(node_get_provenance(source, k),
+ k,
+ node_get_provenance(target, k)))
+ # Looks good, clobber it
+ target[0][k] = v
+ elif _is_composite_list(v):
+ if k not in target[0]:
+ # Composite list clobbers empty space
+ target[0][k] = v
+ elif type(target[0][k][0]) is list:
+ # Composite list composes into a list
+ _compose_list(target[0][k], v)
+ elif _is_composite_list(target[0][k]):
+ # Composite list merges into composite list
+ _compose_composite_list(target[0][k], v)
+ else:
+ # Else composing on top of normal dict or a scalar, so raise...
+ raise CompositeError(path,
+ "{}: Cannot compose lists onto {}".format(
+ node_get_provenance(v),
+ node_get_provenance(target[0][k])))
+ elif type(v[0]) is dict:
+ # We're composing a dict into target now
+ if k not in target[0]:
+ # Target lacks a dict at that point, make a fresh one with
+ # the same provenance as the incoming dict
+ target[0][k] = Node({}, v[1], v[2], v[3])
+ if type(target[0]) is not dict:
+ raise CompositeError(path,
+ "{}: Cannot compose dictionary onto {}".format(
+ node_get_provenance(v),
+ node_get_provenance(target[0][k])))
+ composite_dict(target[0][k], v, path)
else:
-
- if target_value is not None:
-
- # Exception here: depending on how strings were declared ruamel may
- # use a different type, but for our purposes, any stringish type will do.
- if not (is_ruamel_str(source_value) and is_ruamel_str(target_value)) \
- and not isinstance(source_value, type(target_value)):
- raise CompositeTypeError(thispath, type(target_value), type(source_value))
-
- # Overwrite simple values, lists and mappings have already been handled
- target_provenance.members[key] = source_provenance.members[key].clone()
- target[key] = source_value
+ target_value = target[0].get(k)
+ if target_value is not None and type(target_value[0]) is not str:
+ raise CompositeError(path,
+ "{}: Cannot compose scalar on non-scalar at {}".format(
+ node_get_provenance(v),
+ node_get_provenance(target[0][k])))
+ target[0][k] = v
+ path.pop()
# Like composite_dict(), but raises an all purpose LoadError for convenience
#
def composite(target, source):
- assert hasattr(source, 'get')
+ assert type(source[0]) is dict
+ assert type(target[0]) is dict
- source_provenance = node_get_provenance(source)
try:
composite_dict(target, source)
- except CompositeTypeError as e:
+ except CompositeError as e:
+ source_provenance = node_get_provenance(source)
error_prefix = ""
if source_provenance:
error_prefix = "{}: ".format(source_provenance)
raise LoadError(LoadErrorReason.ILLEGAL_COMPOSITE,
- "{}Expected '{}' type for configuration '{}', instead received '{}'"
+ "{}Failure composing {}: {}"
.format(error_prefix,
- e.expected_type.__name__,
e.path,
- e.actual_type.__name__)) from e
+ e.message)) from e
# Like composite(target, source), but where target overrides source instead.
@@ -809,64 +958,60 @@ def composite(target, source):
def composite_and_move(target, source):
composite(source, target)
- to_delete = [key for key, _ in node_items(target) if key not in source]
- for key, value in source.items():
- target[key] = value
+ to_delete = [key for key in target[0].keys() if key not in source[0]]
+ for key, value in source[0].items():
+ target[0][key] = value
for key in to_delete:
- del target[key]
-
-
-# SanitizedDict is an OrderedDict that is dumped as unordered mapping.
-# This provides deterministic output for unordered mappings.
-#
-class SanitizedDict(collections.OrderedDict):
- pass
-
-
-RoundTripRepresenter.add_representer(SanitizedDict,
- SafeRepresenter.represent_dict)
+ del target[0][key]
# Types we can short-circuit in node_sanitize for speed.
-__SANITIZE_SHORT_CIRCUIT_TYPES = (int, float, str, bool, tuple)
+__SANITIZE_SHORT_CIRCUIT_TYPES = (int, float, str, bool)
# node_sanitize()
#
-# Returnes an alphabetically ordered recursive copy
+# Returns an alphabetically ordered recursive copy
# of the source node with internal provenance information stripped.
#
# Only dicts are ordered, list elements are left in order.
#
-def node_sanitize(node):
+def node_sanitize(node, *, dict_type=OrderedDict):
+ node_type = type(node)
+
+ # If we have an unwrappable node, unwrap it
+ if node_type is Node:
+ node = node[0]
+ node_type = type(node)
+
# Short-circuit None which occurs ca. twice per element
if node is None:
return node
- node_type = type(node)
# Next short-circuit integers, floats, strings, booleans, and tuples
if node_type in __SANITIZE_SHORT_CIRCUIT_TYPES:
return node
- # Now short-circuit lists. Note this is only for the raw list
- # type, CommentedSeq and others get caught later.
+
+ # Now short-circuit lists.
elif node_type is list:
- return [node_sanitize(elt) for elt in node]
+ return [node_sanitize(elt, dict_type=dict_type) for elt in node]
# Finally dict, and other Mappings need special handling
- if node_type is dict or isinstance(node, collections.abc.Mapping):
- result = SanitizedDict()
+ elif node_type is dict:
+ result = dict_type()
- key_list = [key for key, _ in node_items(node)]
+ key_list = [key for key, _ in node.items()]
for key in sorted(key_list):
- result[key] = node_sanitize(node[key])
+ result[key] = node_sanitize(node[key], dict_type=dict_type)
return result
- # Catch the case of CommentedSeq and friends. This is more rare and so
- # we keep complexity down by still using isinstance here.
- elif isinstance(node, list):
- return [node_sanitize(elt) for elt in node]
- # Everything else (such as commented scalars) just gets returned as-is.
+ # Sometimes we're handed tuples and we can't be sure what they contain
+ # so we have to sanitize into them
+ elif node_type is tuple:
+ return tuple((node_sanitize(v, dict_type=dict_type) for v in node))
+
+ # Everything else just gets returned as-is.
return node
@@ -888,8 +1033,7 @@ def node_validate(node, valid_keys):
# Probably the fastest way to do this: https://stackoverflow.com/a/23062482
valid_keys = set(valid_keys)
- valid_keys.add(PROVENANCE_KEY)
- invalid = next((key for key in node if key not in valid_keys), None)
+ invalid = next((key for key in node[0] if key not in valid_keys), None)
if invalid:
provenance = node_get_provenance(node, key=invalid)
@@ -908,62 +1052,54 @@ def node_validate(node, valid_keys):
# These types just have their value copied
-__QUICK_TYPES = (str, bool,
- yaml.scalarstring.PreservedScalarString,
- yaml.scalarstring.SingleQuotedScalarString,
- yaml.scalarstring.DoubleQuotedScalarString)
-
-# These types have to be iterated like a dictionary
-__DICT_TYPES = (dict, yaml.comments.CommentedMap)
-
-# These types have to be iterated like a list
-__LIST_TYPES = (list, yaml.comments.CommentedSeq)
-
-# These are the provenance types, which have to be cloned rather than any other
-# copying tactic.
-__PROVENANCE_TYPES = (Provenance, DictProvenance, MemberProvenance, ElementProvenance)
+__QUICK_TYPES = (str, bool)
# These are the directives used to compose lists, we need this because it's
# slightly faster during the node_final_assertions checks
__NODE_ASSERT_COMPOSITION_DIRECTIVES = ('(>)', '(<)', '(=)')
+# node_copy()
+#
+# Make a deep copy of the given YAML node, preserving provenance.
+#
+# Args:
+# source (Node): The YAML node to copy
+#
+# Returns:
+# (Node): A deep copy of source with provenance preserved.
+#
def node_copy(source):
copy = {}
- for key, value in source.items():
- value_type = type(value)
- if value_type in __DICT_TYPES:
+ for key, value in source[0].items():
+ value_type = type(value[0])
+ if value_type is dict:
copy[key] = node_copy(value)
- elif value_type in __LIST_TYPES:
- copy[key] = list_copy(value)
- elif value_type in __PROVENANCE_TYPES:
- copy[key] = value.clone()
+ elif value_type is list:
+ copy[key] = _list_copy(value)
elif value_type in __QUICK_TYPES:
copy[key] = value
else:
raise ValueError("Unable to be quick about node_copy of {}".format(value_type))
- ensure_provenance(copy)
-
- return copy
+ return Node(copy, source[1], source[2], source[3])
-def list_copy(source):
+# Internal function to help node_copy() but for lists.
+def _list_copy(source):
copy = []
- for item in source:
- item_type = type(item)
- if item_type in __DICT_TYPES:
+ for item in source[0]:
+ item_type = type(item[0])
+ if item_type is dict:
copy.append(node_copy(item))
- elif item_type in __LIST_TYPES:
- copy.append(list_copy(item))
- elif item_type in __PROVENANCE_TYPES:
- copy.append(item.clone())
+ elif item_type is list:
+ copy.append(_list_copy(item))
elif item_type in __QUICK_TYPES:
copy.append(item)
else:
raise ValueError("Unable to be quick about list_copy of {}".format(item_type))
- return copy
+ return Node(copy, source[1], source[2], source[3])
# node_final_assertions()
@@ -978,7 +1114,9 @@ def list_copy(source):
# (LoadError): If any assertions fail
#
def node_final_assertions(node):
- for key, value in node_items(node):
+ assert type(node) is Node
+
+ for key, value in node[0].items():
# Assert that list composition directives dont remain, this
# indicates that the user intended to override a list which
@@ -989,22 +1127,23 @@ def node_final_assertions(node):
raise LoadError(LoadErrorReason.TRAILING_LIST_DIRECTIVE,
"{}: Attempt to override non-existing list".format(provenance))
- value_type = type(value)
+ value_type = type(value[0])
- if value_type in __DICT_TYPES:
+ if value_type is dict:
node_final_assertions(value)
- elif value_type in __LIST_TYPES:
- list_final_assertions(value)
+ elif value_type is list:
+ _list_final_assertions(value)
-def list_final_assertions(values):
- for value in values:
- value_type = type(value)
+# Helper function for node_final_assertions(), but for lists.
+def _list_final_assertions(values):
+ for value in values[0]:
+ value_type = type(value[0])
- if value_type in __DICT_TYPES:
+ if value_type is dict:
node_final_assertions(value)
- elif value_type in __LIST_TYPES:
- list_final_assertions(value)
+ elif value_type is list:
+ _list_final_assertions(value)
# assert_symbol_name()
@@ -1025,7 +1164,6 @@ def list_final_assertions(values):
# Note that dashes are generally preferred for variable names and
# usage in YAML, but things such as option names which will be
# evaluated with jinja2 cannot use dashes.
-#
def assert_symbol_name(provenance, symbol_name, purpose, *, allow_dashes=True):
valid_chars = string.digits + string.ascii_letters + '_'
if allow_dashes:
@@ -1051,3 +1189,229 @@ def assert_symbol_name(provenance, symbol_name, purpose, *, allow_dashes=True):
raise LoadError(LoadErrorReason.INVALID_SYMBOL_NAME,
message, detail=detail)
+
+
+# node_find_target()
+#
+# Searches the given node tree for the given target node.
+#
+# This is typically used when trying to walk a path to a given node
+# for the purpose of then modifying a similar tree of objects elsewhere
+#
+# Args:
+# node (Node): The node at the root of the tree to search
+# target (Node): The node you are looking for in that tree
+#
+# Returns:
+# (list): A path from `node` to `target` or None if `target` is not in the subtree
+def node_find_target(node, target):
+ assert type(node) is Node
+ assert type(target) is Node
+
+ path = []
+ if _walk_find_target(node, path, target):
+ return path
+ return None
+
+
+# Helper for node_find_target() which walks a value
+def _walk_find_target(node, path, target):
+ if node[1:] == target[1:]:
+ return True
+ elif type(node[0]) is dict:
+ return _walk_dict_node(node, path, target)
+ elif type(node[0]) is list:
+ return _walk_list_node(node, path, target)
+ return False
+
+
+# Helper for node_find_target() which walks a list
+def _walk_list_node(node, path, target):
+ for i, v in enumerate(node[0]):
+ path.append(i)
+ if _walk_find_target(v, path, target):
+ return True
+ del path[-1]
+ return False
+
+
+# Helper for node_find_target() which walks a mapping
+def _walk_dict_node(node, path, target):
+ for k, v in node[0].items():
+ path.append(k)
+ if _walk_find_target(v, path, target):
+ return True
+ del path[-1]
+ return False
+
+
+###############################################################################
+
+# Roundtrip code
+
+# Always represent things consistently:
+
+yaml.RoundTripRepresenter.add_representer(OrderedDict,
+ yaml.SafeRepresenter.represent_dict)
+
+# Always parse things consistently
+
+yaml.RoundTripConstructor.add_constructor(u'tag:yaml.org,2002:int',
+ yaml.RoundTripConstructor.construct_yaml_str)
+yaml.RoundTripConstructor.add_constructor(u'tag:yaml.org,2002:float',
+ yaml.RoundTripConstructor.construct_yaml_str)
+yaml.RoundTripConstructor.add_constructor(u'tag:yaml.org,2002:bool',
+ yaml.RoundTripConstructor.construct_yaml_str)
+yaml.RoundTripConstructor.add_constructor(u'tag:yaml.org,2002:null',
+ yaml.RoundTripConstructor.construct_yaml_str)
+yaml.RoundTripConstructor.add_constructor(u'tag:yaml.org,2002:timestamp',
+ yaml.RoundTripConstructor.construct_yaml_str)
+
+
+# HardlineDumper
+#
+# This is a dumper used during roundtrip_dump which forces every scalar to be
+# a plain string, in order to match the output format to the input format.
+#
+# If you discover something is broken, please add a test case to the roundtrip
+# test in tests/internals/yaml/roundtrip-test.yaml
+#
+class HardlineDumper(yaml.RoundTripDumper):
+ def __init__(self, *args, **kwargs):
+ yaml.RoundTripDumper.__init__(self, *args, **kwargs)
+ # For each of YAML 1.1 and 1.2, force everything to be a plain string
+ for version in [(1, 1), (1, 2), None]:
+ self.add_version_implicit_resolver(
+ version,
+ u'tag:yaml.org,2002:str',
+ yaml.util.RegExp(r'.*'),
+ None)
+
+
+# roundtrip_load()
+#
+# Load a YAML file into memory in a form which allows roundtripping as best
+# as ruamel permits.
+#
+# Note, the returned objects can be treated as Mappings and Lists and Strings
+# but replacing content wholesale with plain dicts and lists may result
+# in a loss of comments and formatting.
+#
+# Args:
+# filename (str): The file to load in
+# allow_missing (bool): Optionally set this to True to allow missing files
+#
+# Returns:
+# (Mapping): The loaded YAML mapping.
+#
+# Raises:
+# (LoadError): If the file is missing, or a directory, this is raised.
+# Also if the YAML is malformed.
+#
+def roundtrip_load(filename, *, allow_missing=False):
+ try:
+ with open(filename, "r") as fh:
+ data = fh.read()
+ contents = roundtrip_load_data(data, filename=filename)
+ except FileNotFoundError as e:
+ if allow_missing:
+ # Missing files are always empty dictionaries
+ return {}
+ else:
+ raise LoadError(LoadErrorReason.MISSING_FILE,
+ "Could not find file at {}".format(filename)) from e
+ except IsADirectoryError as e:
+ raise LoadError(LoadErrorReason.LOADING_DIRECTORY,
+ "{} is a directory."
+ .format(filename)) from e
+ return contents
+
+
+# roundtrip_load_data()
+#
+# Parse the given contents as YAML, returning them as a roundtrippable data
+# structure.
+#
+# A lack of content will be returned as an empty mapping.
+#
+# Args:
+# contents (str): The contents to be parsed as YAML
+# filename (str): Optional filename to be used in error reports
+#
+# Returns:
+# (Mapping): The loaded YAML mapping
+#
+# Raises:
+# (LoadError): Raised on invalid YAML, or YAML which parses to something other
+# than a Mapping
+#
+def roundtrip_load_data(contents, *, filename=None):
+ try:
+ contents = yaml.load(contents, yaml.RoundTripLoader, preserve_quotes=True)
+ except (yaml.scanner.ScannerError, yaml.composer.ComposerError, yaml.parser.ParserError) as e:
+ raise LoadError(LoadErrorReason.INVALID_YAML,
+ "Malformed YAML:\n\n{}\n\n{}\n".format(e.problem, e.problem_mark)) from e
+
+ # Special case empty files at this point
+ if contents is None:
+ # We'll make them empty mappings like the main Node loader
+ contents = {}
+
+ if not isinstance(contents, Mapping):
+ raise LoadError(LoadErrorReason.INVALID_YAML,
+ "YAML file has content of type '{}' instead of expected type 'dict': {}"
+ .format(type(contents).__name__, filename))
+
+ return contents
+
+
+# roundtrip_dump()
+#
+# Dumps the given contents as a YAML file. Ideally the contents came from
+# parsing with `roundtrip_load` or `roundtrip_load_data` so that they will be
+# dumped in the same form as they came from.
+#
+# If `file` is a string, it is the filename to write to, if `file` has a
+# `write` method, it's treated as a stream, otherwise output is to stdout.
+#
+# Args:
+# contents (Mapping or list): The content to write out as YAML.
+# file (any): The file to write to
+#
+def roundtrip_dump(contents, file=None):
+ assert type(contents) is not Node
+
+ def stringify_dict(thing):
+ for k, v in thing.items():
+ if type(v) is str:
+ pass
+ elif isinstance(v, Mapping):
+ stringify_dict(v)
+ elif isinstance(v, Sequence):
+ stringify_list(v)
+ else:
+ thing[k] = str(v)
+
+ def stringify_list(thing):
+ for i, v in enumerate(thing):
+ if type(v) is str:
+ pass
+ elif isinstance(v, Mapping):
+ stringify_dict(v)
+ elif isinstance(v, Sequence):
+ stringify_list(v)
+ else:
+ thing[i] = str(v)
+
+ contents = deepcopy(contents)
+ stringify_dict(contents)
+
+ with ExitStack() as stack:
+ if type(file) is str:
+ from . import utils
+ f = stack.enter_context(utils.save_file_atomic(file, 'w'))
+ elif hasattr(file, 'write'):
+ f = file
+ else:
+ f = sys.stdout
+ yaml.round_trip_dump(contents, f, Dumper=HardlineDumper)
diff --git a/buildstream/element.py b/buildstream/element.py
index b27f3e7df..23127d125 100644
--- a/buildstream/element.py
+++ b/buildstream/element.py
@@ -246,7 +246,7 @@ class Element(Plugin):
# Collect the composited variables and resolve them
variables = self.__extract_variables(meta)
- variables['element-name'] = self.name
+ _yaml.node_set(variables, 'element-name', self.name)
self.__variables = Variables(variables)
# Collect the composited environment now that we have variables
@@ -737,10 +737,11 @@ class Element(Plugin):
ignored = {}
overlaps = OrderedDict()
files_written = {}
- old_dep_keys = {}
+ old_dep_keys = None
workspace = self._get_workspace()
if self.__can_build_incrementally() and workspace.last_successful:
+ # Workspaces do not need to work with the special node types
old_dep_keys = self.__get_artifact_metadata_dependencies(workspace.last_successful)
for dep in self.dependencies(scope):
@@ -752,9 +753,9 @@ class Element(Plugin):
if workspace and old_dep_keys:
dep.__assert_cached()
- if dep.name in old_dep_keys:
+ if _yaml.node_contains(old_dep_keys, dep.name):
key_new = dep._get_cache_key()
- key_old = old_dep_keys[dep.name]
+ key_old = _yaml.node_get(old_dep_keys, str, dep.name)
# We only need to worry about modified and added
# files, since removed files will be picked up by
@@ -875,7 +876,7 @@ class Element(Plugin):
if self.__dynamic_public is None:
self.__load_public_data()
- data = self.__dynamic_public.get(domain)
+ data = _yaml.node_get(self.__dynamic_public, Mapping, domain, default_value=None)
if data is not None:
data = _yaml.node_copy(data)
@@ -899,7 +900,7 @@ class Element(Plugin):
if data is not None:
data = _yaml.node_copy(data)
- self.__dynamic_public[domain] = data
+ _yaml.node_set(self.__dynamic_public, domain, data)
def get_environment(self):
"""Fetch the environment suitable for running in the sandbox
@@ -2172,7 +2173,7 @@ class Element(Plugin):
# Filter out nocache variables from the element's environment
cache_env = {
key: value
- for key, value in self.node_items(self.__environment)
+ for key, value in self.__environment.items()
if key not in self.__env_nocache
}
@@ -2434,15 +2435,15 @@ class Element(Plugin):
# Extend project wide split rules with any split rules defined by the element
_yaml.composite(splits, element_splits)
- element_bst['split-rules'] = splits
- element_public['bst'] = element_bst
- defaults['public'] = element_public
+ _yaml.node_set(element_bst, 'split-rules', splits)
+ _yaml.node_set(element_public, 'bst', element_bst)
+ _yaml.node_set(defaults, 'public', element_public)
def __init_defaults(self, plugin_conf):
# Defaults are loaded once per class and then reused
#
if self.__defaults is None:
- defaults = {}
+ defaults = _yaml.new_empty_node()
if plugin_conf is not None:
# Load the plugin's accompanying .yaml file if one was provided
@@ -2463,7 +2464,7 @@ class Element(Plugin):
else:
elements = project.element_overrides
- overrides = elements.get(self.get_kind())
+ overrides = _yaml.node_get(elements, Mapping, self.get_kind(), default_value=None)
if overrides:
_yaml.composite(defaults, overrides)
@@ -2477,7 +2478,7 @@ class Element(Plugin):
default_env = _yaml.node_get(self.__defaults, Mapping, 'environment', default_value={})
if self.__is_junction:
- environment = {}
+ environment = _yaml.new_empty_node()
else:
project = self._get_project()
environment = _yaml.node_copy(project.base_environment)
@@ -2531,7 +2532,7 @@ class Element(Plugin):
for var in ('project-name', 'element-name', 'max-jobs'):
provenance = _yaml.node_get_provenance(variables, var)
- if provenance and provenance.filename != '':
+ if provenance and not provenance.is_synthetic:
raise LoadError(LoadErrorReason.PROTECTED_VARIABLE_REDEFINED,
"{}: invalid redefinition of protected variable '{}'"
.format(provenance, var))
@@ -2556,8 +2557,10 @@ class Element(Plugin):
#
def __extract_sandbox_config(self, meta):
if self.__is_junction:
- sandbox_config = {'build-uid': 0,
- 'build-gid': 0}
+ sandbox_config = _yaml.new_node_from_dict({
+ 'build-uid': 0,
+ 'build-gid': 0
+ })
else:
project = self._get_project()
project.ensure_fully_loaded()
@@ -2609,23 +2612,24 @@ class Element(Plugin):
# element specific defaults
_yaml.composite(base_splits, element_splits)
- element_bst['split-rules'] = base_splits
- element_public['bst'] = element_bst
+ _yaml.node_set(element_bst, 'split-rules', base_splits)
+ _yaml.node_set(element_public, 'bst', element_bst)
_yaml.node_final_assertions(element_public)
# Also, resolve any variables in the public split rules directly
for domain, splits in self.node_items(base_splits):
- base_splits[domain] = [
+ splits = [
self.__variables.subst(split.strip())
for split in splits
]
+ _yaml.node_set(base_splits, domain, splits)
return element_public
def __init_splits(self):
bstdata = self.get_public_data('bst')
- splits = bstdata.get('split-rules')
+ splits = self.node_get_member(bstdata, dict, 'split-rules')
self.__splits = {
domain: re.compile('^(?:' + '|'.join([utils._glob2re(r) for r in rules]) + ')$')
for domain, rules in self.node_items(splits)
diff --git a/buildstream/plugin.py b/buildstream/plugin.py
index f9c1dd838..19d82769b 100644
--- a/buildstream/plugin.py
+++ b/buildstream/plugin.py
@@ -411,6 +411,69 @@ class Plugin():
"""
return _yaml.node_get(node, expected_type, member_name, default_value=default, allow_none=allow_none)
+ def node_set_member(self, node, key, value):
+ """Set the value of a node member
+ Args:
+ node (node): A dictionary loaded from YAML
+ key (str): The key name
+ value: The value
+
+ Returns:
+ None
+
+ Raises:
+ None
+
+ **Example:**
+
+ .. code:: python
+
+ # Set a string 'tomjon' in node[name]
+ self.node_set_member(node, 'name', 'tomjon')
+ """
+ _yaml.node_set(node, key, value)
+
+ def node_has_member(self, node, key):
+ """Essentially the `key in node` test
+ Args:
+ node (node): A dictionary loaded from YAML
+ key (str): The key name
+
+ Returns:
+ bool: Whether or not key is in node
+
+ Raises:
+ None
+
+ **Example:**
+
+ .. code:: python
+
+ # To see if `name` is set in `node
+ present = self.node_has_member(node, 'name')
+ """
+ return _yaml.node_contains(node, key)
+
+ def new_empty_node(self):
+ """Create an empty 'Node' object to be handled by BuildStream's core
+ Args:
+ None
+
+ Returns:
+ Node: An empty Node object
+
+ Raises:
+ None
+
+ **Example:**
+
+ .. code:: python
+
+ # Create an empty Node object to store metadata information
+ metadata = self.new_empty_node()
+ """
+ return _yaml.new_empty_node()
+
def node_get_project_path(self, node, key, *,
check_is_file=False, check_is_dir=False):
"""Fetches a project path from a dictionary node and validates it
@@ -841,10 +904,12 @@ class Plugin():
else:
silenced_warnings = set()
project = self.__project
- plugin_overrides = {**project.element_overrides, **project.source_overrides}
- for key, value in self.node_items(plugin_overrides):
- if value.get('suppress-deprecation-warnings', False):
+ for key, value in self.node_items(project.element_overrides):
+ if _yaml.node_get(value, bool, 'suppress-deprecation-warnings', default_value=False):
+ silenced_warnings.add(key)
+ for key, value in self.node_items(project.source_overrides):
+ if _yaml.node_get(value, bool, 'suppress-deprecation-warnings', default_value=False):
silenced_warnings.add(key)
return self.get_kind() in silenced_warnings
diff --git a/buildstream/plugins/elements/filter.py b/buildstream/plugins/elements/filter.py
index 232f4ccca..cbb4fe7af 100644
--- a/buildstream/plugins/elements/filter.py
+++ b/buildstream/plugins/elements/filter.py
@@ -217,14 +217,14 @@ class FilterElement(Element):
for dep in self.dependencies(Scope.BUILD, recurse=False):
# Check that all the included/excluded domains exist
pub_data = dep.get_public_data('bst')
- split_rules = pub_data.get('split-rules', {})
+ split_rules = self.node_get_member(pub_data, dict, 'split-rules', {})
unfound_includes = []
for domain in self.include:
- if domain not in split_rules:
+ if not self.node_has_member(split_rules, domain):
unfound_includes.append(domain)
unfound_excludes = []
for domain in self.exclude:
- if domain not in split_rules:
+ if not self.node_has_member(split_rules, domain):
unfound_excludes.append(domain)
detail = []
diff --git a/buildstream/plugintestutils/runcli.py b/buildstream/plugintestutils/runcli.py
index 2320189bd..1ddb95745 100644
--- a/buildstream/plugintestutils/runcli.py
+++ b/buildstream/plugintestutils/runcli.py
@@ -53,6 +53,7 @@ from _pytest.capture import MultiCapture, FDCapture, FDCaptureBinary
from buildstream._frontend import cli as bst_cli
from buildstream import _yaml
+
# Special private exception accessor, for test case purposes
from buildstream._exceptions import BstError, get_last_exception, get_last_task_error
diff --git a/buildstream/sandbox/_sandboxremote.py b/buildstream/sandbox/_sandboxremote.py
index ada8268c0..f7ee6471f 100644
--- a/buildstream/sandbox/_sandboxremote.py
+++ b/buildstream/sandbox/_sandboxremote.py
@@ -107,7 +107,7 @@ class SandboxRemote(Sandbox):
def specs_from_config_node(config_node, basedir=None):
def require_node(config, keyname):
- val = config.get(keyname)
+ val = _yaml.node_get(config, dict, keyname, default_value=None)
if val is None:
provenance = _yaml.node_get_provenance(remote_config, key=keyname)
raise _yaml.LoadError(_yaml.LoadErrorReason.INVALID_DATA,
@@ -116,7 +116,7 @@ class SandboxRemote(Sandbox):
.format(str(provenance), keyname))
return val
- remote_config = config_node.get("remote-execution", None)
+ remote_config = _yaml.node_get(config_node, dict, 'remote-execution', default_value=None)
if remote_config is None:
return None
@@ -126,7 +126,7 @@ class SandboxRemote(Sandbox):
exec_config = require_node(remote_config, 'execution-service')
storage_config = require_node(remote_config, 'storage-service')
- action_config = remote_config.get('action-cache-service', {})
+ action_config = _yaml.node_get(remote_config, dict, 'action-cache-service', default_value={})
tls_keys = ['client-key', 'client-cert', 'server-cert']
@@ -137,9 +137,9 @@ class SandboxRemote(Sandbox):
# Maintain some backwards compatibility with older configs, in which
# 'url' was the only valid key for remote-execution:
- if 'url' in remote_config:
- if 'execution-service' not in remote_config:
- exec_config = {'url': remote_config['url']}
+ if _yaml.node_contains(remote_config, 'url'):
+ if not _yaml.node_contains(remote_config, 'execution-service'):
+ exec_config = _yaml.new_node_from_dict({'url': remote_config['url']})
else:
provenance = _yaml.node_get_provenance(remote_config, key='url')
raise _yaml.LoadError(_yaml.LoadErrorReason.INVALID_DATA,
@@ -158,7 +158,7 @@ class SandboxRemote(Sandbox):
for config_key, config in zip(service_keys, service_configs):
# Either both or none of the TLS client key/cert pair must be specified:
- if ('client-key' in config) != ('client-cert' in config):
+ if _yaml.node_contains(config, 'client-key') != _yaml.node_contains(config, 'client-cert'):
provenance = _yaml.node_get_provenance(remote_config, key=config_key)
raise _yaml.LoadError(_yaml.LoadErrorReason.INVALID_DATA,
"{}: TLS client key/cert pair is incomplete. "
@@ -167,10 +167,10 @@ class SandboxRemote(Sandbox):
.format(str(provenance)))
for tls_key in tls_keys:
- if tls_key in config:
- config[tls_key] = resolve_path(config[tls_key])
+ if _yaml.node_contains(config, tls_key):
+ _yaml.node_set(config, tls_key, resolve_path(_yaml.node_get(config, str, tls_key)))
- return RemoteExecutionSpec(*service_configs)
+ return RemoteExecutionSpec(*[_yaml.node_sanitize(conf) for conf in service_configs])
def run_remote_command(self, channel, action_digest):
# Sends an execution request to the remote execution server.
diff --git a/buildstream/source.py b/buildstream/source.py
index 9e1a8ef3e..36885ee2a 100644
--- a/buildstream/source.py
+++ b/buildstream/source.py
@@ -864,57 +864,117 @@ class Source(Plugin):
#
# Step 1 - Obtain the node
#
- if project is toplevel:
- if toplevel.ref_storage == ProjectRefStorage.PROJECT_REFS:
- node = toplevel_refs.lookup_ref(project.name, element_name, element_idx, write=True)
- else:
- node = provenance.node
- else:
- if toplevel.ref_storage == ProjectRefStorage.PROJECT_REFS:
- node = toplevel_refs.lookup_ref(project.name, element_name, element_idx, write=True)
+ node = {}
+ if toplevel.ref_storage == ProjectRefStorage.PROJECT_REFS:
+ node = toplevel_refs.lookup_ref(project.name, element_name, element_idx, write=True)
+
+ if project is toplevel and not node:
+ node = provenance.node
+
+ # Ensure the node is not from a junction
+ if not toplevel.ref_storage == ProjectRefStorage.PROJECT_REFS and provenance.project is not toplevel:
+ if provenance.project is project:
+ self.warn("{}: Not persisting new reference in junctioned project".format(self))
+ elif provenance.project is None:
+ assert provenance.filename == ""
+ assert provenance.shortname == ""
+ raise SourceError("{}: Error saving source reference to synthetic node."
+ .format(self))
else:
- node = {}
+ raise SourceError("{}: Cannot track source in a fragment from a junction"
+ .format(provenance.shortname),
+ reason="tracking-junction-fragment")
#
# Step 2 - Set the ref in memory, and determine changed state
#
- if not self._set_ref(new_ref, node):
+ clean = _yaml.node_sanitize(node, dict_type=dict)
+ to_modify = _yaml.node_sanitize(node, dict_type=dict)
+ if not self._set_ref(new_ref, to_modify):
+ # Note: We do not look for and propagate changes at this point
+ # which might result in desync depending if something changes about
+ # tracking in the future. For now, this is quite safe.
return False
- def do_save_refs(refs):
- try:
- refs.save()
- except OSError as e:
- raise SourceError("{}: Error saving source reference to 'project.refs': {}"
- .format(self, e),
- reason="save-ref-error") from e
+ actions = {}
+ for k, v in clean.items():
+ if k not in to_modify:
+ actions[k] = 'del'
+ else:
+ if v != to_modify[k]:
+ actions[k] = 'mod'
+ for k in to_modify.keys():
+ if k not in clean:
+ actions[k] = 'add'
+
+ def walk_container(container, path):
+ # For each step along path, synthesise if we need to.
+ # If we're synthesising missing list entries, we know we're
+ # doing this for project.refs so synthesise empty dicts for the
+ # intervening entries too
+ lpath = [step for step in path]
+ lpath.append("") # We know the last step will be a string key
+ for step, next_step in zip(lpath, lpath[1:]):
+ if type(step) is str: # pylint: disable=unidiomatic-typecheck
+ # handle dict container
+ if step not in container:
+ if type(next_step) is str: # pylint: disable=unidiomatic-typecheck
+ container[step] = {}
+ else:
+ container[step] = []
+ container = container[step]
+ else:
+ # handle list container
+ if len(container) <= step:
+ while len(container) <= step:
+ container.append({})
+ container = container[step]
+ return container
+
+ def process_value(action, container, path, key, new_value):
+ container = walk_container(container, path)
+ if action == 'del':
+ del container[key]
+ elif action == 'mod':
+ container[key] = new_value
+ elif action == 'add':
+ container[key] = new_value
+ else:
+ assert False, \
+ "BUG: Unknown action: {}".format(action)
+
+ roundtrip_cache = {}
+ for key, action in actions.items():
+ # Obtain the top level node and its file
+ if action == 'add':
+ provenance = _yaml.node_get_provenance(node)
+ else:
+ provenance = _yaml.node_get_provenance(node, key=key)
+ toplevel_node = provenance.toplevel
+
+ # Get the path to whatever changed
+ path = _yaml.node_find_target(toplevel_node, node)
+ roundtrip_file = roundtrip_cache.get(provenance.filename)
+ if not roundtrip_file:
+ roundtrip_file = roundtrip_cache[provenance.filename] = _yaml.roundtrip_load(
+ provenance.filename,
+ allow_missing=True
+ )
+
+ # Get the value of the round trip file that we need to change
+ process_value(action, roundtrip_file, path, key, to_modify.get(key))
#
# Step 3 - Apply the change in project data
#
- if toplevel.ref_storage == ProjectRefStorage.PROJECT_REFS:
- do_save_refs(toplevel_refs)
- else:
- if provenance.filename.project is toplevel:
- # Save the ref in the originating file
- #
- try:
- _yaml.dump(provenance.toplevel, provenance.filename.name)
- except OSError as e:
- raise SourceError("{}: Error saving source reference to '{}': {}"
- .format(self, provenance.filename.name, e),
- reason="save-ref-error") from e
- elif provenance.filename.project is project:
- self.warn("{}: Not persisting new reference in junctioned project".format(self))
- elif provenance.filename.project is None:
- assert provenance.filename.name == ''
- assert provenance.filename.shortname == ''
- raise SourceError("{}: Error saving source reference to synthetic node."
- .format(self))
- else:
- raise SourceError("{}: Cannot track source in a fragment from a junction"
- .format(provenance.filename.shortname),
- reason="tracking-junction-fragment")
+ for filename, data in roundtrip_cache.items():
+ # This is our roundtrip dump from the track
+ try:
+ _yaml.roundtrip_dump(data, filename)
+ except OSError as e:
+ raise SourceError("{}: Error saving source reference to '{}': {}"
+ .format(self, filename, e),
+ reason="save-ref-error") from e
return True
@@ -1164,7 +1224,7 @@ class Source(Plugin):
sources = project.first_pass_config.source_overrides
else:
sources = project.source_overrides
- type(self).__defaults = sources.get(self.get_kind(), {})
+ type(self).__defaults = _yaml.node_get(sources, Mapping, self.get_kind(), default_value={})
type(self).__defaults_set = True
# This will resolve the final configuration to be handed
diff --git a/tests/artifactcache/junctions.py b/tests/artifactcache/junctions.py
index 693a3eb1f..6655a4f1d 100644
--- a/tests/artifactcache/junctions.py
+++ b/tests/artifactcache/junctions.py
@@ -29,11 +29,9 @@ def assert_shared(cli, share, project_name, project, element_name):
def project_set_artifacts(project, url):
project_conf_file = os.path.join(project, 'project.conf')
project_config = _yaml.load(project_conf_file)
- project_config.update({
- 'artifacts': {
- 'url': url,
- 'push': True
- }
+ _yaml.node_set(project_config, 'artifacts', {
+ 'url': url,
+ 'push': True
})
_yaml.dump(_yaml.node_sanitize(project_config), filename=project_conf_file)
diff --git a/tests/elements/filter.py b/tests/elements/filter.py
index afc428641..614d1bc68 100644
--- a/tests/elements/filter.py
+++ b/tests/elements/filter.py
@@ -239,7 +239,9 @@ def test_filter_track(datafiles, cli, tmpdir):
# Now check that a ref field exists
new_input = _yaml.load(input_file)
- assert new_input["sources"][0]["ref"] == ref
+ source_node = _yaml.node_get(new_input, dict, 'sources', indices=[0])
+ new_input_ref = _yaml.node_get(source_node, str, 'ref')
+ assert new_input_ref == ref
@pytest.mark.datafiles(os.path.join(DATA_DIR, 'basic'))
@@ -292,7 +294,8 @@ def test_filter_track_excepted(datafiles, cli, tmpdir):
# Now check that a ref field exists
new_input = _yaml.load(input_file)
- assert "ref" not in new_input["sources"][0]
+ source_node = _yaml.node_get(new_input, dict, 'sources', indices=[0])
+ assert not _yaml.node_contains(source_node, 'ref')
@pytest.mark.datafiles(os.path.join(DATA_DIR, 'basic'))
@@ -345,7 +348,9 @@ def test_filter_track_multi_to_one(datafiles, cli, tmpdir):
# Now check that a ref field exists
new_input = _yaml.load(input_file)
- assert new_input["sources"][0]["ref"] == ref
+ source_node = _yaml.node_get(new_input, dict, 'sources', indices=[0])
+ new_ref = _yaml.node_get(source_node, str, 'ref')
+ assert new_ref == ref
@pytest.mark.datafiles(os.path.join(DATA_DIR, 'basic'))
@@ -408,9 +413,14 @@ def test_filter_track_multi(datafiles, cli, tmpdir):
# Now check that a ref field exists
new_input = _yaml.load(input_file)
- assert new_input["sources"][0]["ref"] == ref
+ source_node = _yaml.node_get(new_input, dict, 'sources', indices=[0])
+ new_ref = _yaml.node_get(source_node, str, 'ref')
+ assert new_ref == ref
+
new_input2 = _yaml.load(input2_file)
- assert new_input2["sources"][0]["ref"] == ref
+ source_node2 = _yaml.node_get(new_input2, dict, 'sources', indices=[0])
+ new_ref2 = _yaml.node_get(source_node2, str, 'ref')
+ assert new_ref2 == ref
@pytest.mark.datafiles(os.path.join(DATA_DIR, 'basic'))
@@ -472,9 +482,13 @@ def test_filter_track_multi_exclude(datafiles, cli, tmpdir):
# Now check that a ref field exists
new_input = _yaml.load(input_file)
- assert "ref" not in new_input["sources"][0]
+ source_node = _yaml.node_get(new_input, dict, 'sources', indices=[0])
+ assert not _yaml.node_contains(source_node, 'ref')
+
new_input2 = _yaml.load(input2_file)
- assert new_input2["sources"][0]["ref"] == ref
+ source_node2 = _yaml.node_get(new_input2, dict, 'sources', indices=[0])
+ new_ref2 = _yaml.node_get(source_node2, str, 'ref')
+ assert new_ref2 == ref
@pytest.mark.datafiles(os.path.join(DATA_DIR, 'basic'))
diff --git a/tests/elements/filter/basic/element_plugins/dynamic.py b/tests/elements/filter/basic/element_plugins/dynamic.py
index 1208a4a4d..c6d128b72 100644
--- a/tests/elements/filter/basic/element_plugins/dynamic.py
+++ b/tests/elements/filter/basic/element_plugins/dynamic.py
@@ -25,7 +25,7 @@ class DynamicElement(Element):
dep.stage_artifact(sandbox)
bstdata = self.get_public_data("bst")
- bstdata["split-rules"] = self.split_rules
+ self.node_set_member(bstdata, "split-rules", self.split_rules)
self.set_public_data("bst", bstdata)
return ""
diff --git a/tests/format/include.py b/tests/format/include.py
index 455ccd47d..308e1751b 100644
--- a/tests/format/include.py
+++ b/tests/format/include.py
@@ -28,7 +28,7 @@ def test_include_project_file(cli, datafiles):
'element.bst'])
result.assert_success()
loaded = _yaml.load_data(result.output)
- assert loaded['included'] == 'True'
+ assert _yaml.node_get(loaded, bool, 'included')
def test_include_missing_file(cli, tmpdir):
@@ -87,7 +87,7 @@ def test_include_junction_file(cli, tmpdir, datafiles):
'element.bst'])
result.assert_success()
loaded = _yaml.load_data(result.output)
- assert loaded['included'] == 'True'
+ assert _yaml.node_get(loaded, bool, 'included')
@pytest.mark.datafiles(DATA_DIR)
@@ -102,7 +102,7 @@ def test_include_junction_options(cli, datafiles):
'element.bst'])
result.assert_success()
loaded = _yaml.load_data(result.output)
- assert loaded['build_arch'] == 'x86_64'
+ assert _yaml.node_get(loaded, str, 'build_arch') == 'x86_64'
@pytest.mark.datafiles(DATA_DIR)
@@ -135,7 +135,7 @@ def test_junction_element_partial_project_project(cli, tmpdir, datafiles):
'junction.bst'])
result.assert_success()
loaded = _yaml.load_data(result.output)
- assert 'included' not in loaded
+ assert _yaml.node_get(loaded, str, 'included', default_value=None) is None
@pytest.mark.datafiles(DATA_DIR)
@@ -168,7 +168,7 @@ def test_junction_element_not_partial_project_file(cli, tmpdir, datafiles):
'junction.bst'])
result.assert_success()
loaded = _yaml.load_data(result.output)
- assert 'included' in loaded
+ assert _yaml.node_get(loaded, str, 'included', default_value=None) is not None
@pytest.mark.datafiles(DATA_DIR)
@@ -182,8 +182,8 @@ def test_include_element_overrides(cli, datafiles):
'element.bst'])
result.assert_success()
loaded = _yaml.load_data(result.output)
- assert 'manual_main_override' in loaded
- assert 'manual_included_override' in loaded
+ assert _yaml.node_get(loaded, str, 'manual_main_override', default_value=None) is not None
+ assert _yaml.node_get(loaded, str, 'manual_included_override', default_value=None) is not None
@pytest.mark.datafiles(DATA_DIR)
@@ -197,8 +197,7 @@ def test_include_element_overrides_composition(cli, datafiles):
'element.bst'])
result.assert_success()
loaded = _yaml.load_data(result.output)
- assert 'build-commands' in loaded
- assert loaded['build-commands'] == ['first', 'second']
+ assert _yaml.node_get(loaded, list, 'build-commands') == ['first', 'second']
@pytest.mark.datafiles(DATA_DIR)
@@ -214,8 +213,9 @@ def test_list_overide_does_not_fail_upon_first_composition(cli, datafiles):
loaded = _yaml.load_data(result.output)
# Assert that the explicitly overwritten public data is present
- assert 'foo-commands' in loaded['bst']
- assert loaded['bst']['foo-commands'] == ['need', 'this']
+ bst = _yaml.node_get(loaded, dict, 'bst')
+ assert _yaml.node_contains(bst, 'foo-commands')
+ assert _yaml.node_get(bst, list, 'foo-commands') == ['need', 'this']
@pytest.mark.datafiles(DATA_DIR)
@@ -229,7 +229,7 @@ def test_include_element_overrides_sub_include(cli, datafiles):
'element.bst'])
result.assert_success()
loaded = _yaml.load_data(result.output)
- assert 'included' in loaded
+ assert _yaml.node_get(loaded, str, 'included', default_value=None) is not None
@pytest.mark.datafiles(DATA_DIR)
@@ -248,8 +248,8 @@ def test_junction_do_not_use_included_overrides(cli, tmpdir, datafiles):
'junction.bst'])
result.assert_success()
loaded = _yaml.load_data(result.output)
- assert 'main_override' in loaded
- assert 'included_override' not in loaded
+ assert _yaml.node_get(loaded, str, 'main_override', default_value=None) is not None
+ assert _yaml.node_get(loaded, str, 'included_override', default_value=None) is None
@pytest.mark.datafiles(DATA_DIR)
@@ -264,8 +264,7 @@ def test_conditional_in_fragment(cli, datafiles):
'element.bst'])
result.assert_success()
loaded = _yaml.load_data(result.output)
- assert 'size' in loaded
- assert loaded['size'] == '8'
+ assert _yaml.node_get(loaded, str, 'size') == '8'
@pytest.mark.datafiles(DATA_DIR)
@@ -279,7 +278,7 @@ def test_inner(cli, datafiles):
'element.bst'])
result.assert_success()
loaded = _yaml.load_data(result.output)
- assert loaded['build_arch'] == 'x86_64'
+ assert _yaml.node_get(loaded, str, 'build_arch') == 'x86_64'
@pytest.mark.datafiles(DATA_DIR)
@@ -311,4 +310,4 @@ def test_local_to_junction(cli, tmpdir, datafiles):
'element.bst'])
result.assert_success()
loaded = _yaml.load_data(result.output)
- assert loaded['included'] == 'True'
+ assert _yaml.node_get(loaded, bool, 'included')
diff --git a/tests/format/include_composition.py b/tests/format/include_composition.py
index 5a060132a..4afde817d 100644
--- a/tests/format/include_composition.py
+++ b/tests/format/include_composition.py
@@ -28,7 +28,7 @@ def test_main_has_prority(tmpdir):
includes.process(main)
- assert main['test'] == ['main']
+ assert _yaml.node_get(main, list, 'test') == ['main']
def test_include_cannot_append(tmpdir):
@@ -44,7 +44,7 @@ def test_include_cannot_append(tmpdir):
includes.process(main)
- assert main['test'] == ['main']
+ assert _yaml.node_get(main, list, 'test') == ['main']
def test_main_can_append(tmpdir):
@@ -60,7 +60,7 @@ def test_main_can_append(tmpdir):
includes.process(main)
- assert main['test'] == ['a', 'main']
+ assert _yaml.node_get(main, list, 'test') == ['a', 'main']
def test_sibling_cannot_append_backward(tmpdir):
@@ -77,7 +77,7 @@ def test_sibling_cannot_append_backward(tmpdir):
includes.process(main)
- assert main['test'] == ['b']
+ assert _yaml.node_get(main, list, 'test') == ['b']
def test_sibling_can_append_forward(tmpdir):
@@ -94,7 +94,7 @@ def test_sibling_can_append_forward(tmpdir):
includes.process(main)
- assert main['test'] == ['a', 'b']
+ assert _yaml.node_get(main, list, 'test') == ['a', 'b']
def test_lastest_sibling_has_priority(tmpdir):
@@ -111,7 +111,7 @@ def test_lastest_sibling_has_priority(tmpdir):
includes.process(main)
- assert main['test'] == ['b']
+ assert _yaml.node_get(main, list, 'test') == ['b']
def test_main_keeps_keys(tmpdir):
@@ -127,8 +127,8 @@ def test_main_keeps_keys(tmpdir):
includes.process(main)
- assert main['test'] == ['a']
- assert main['something'] == 'else'
+ assert _yaml.node_get(main, list, 'test') == ['a']
+ assert _yaml.node_get(main, str, 'something') == 'else'
def test_overwrite_directive_on_later_composite(tmpdir):
@@ -151,5 +151,5 @@ def test_overwrite_directive_on_later_composite(tmpdir):
includes.process(main)
- assert main['test'] == ['Overwritten']
- assert main['foo'] == 'should be present'
+ assert _yaml.node_get(main, list, 'test') == ['Overwritten']
+ assert _yaml.node_get(main, str, 'foo') == 'should be present'
diff --git a/tests/format/optionarch.py b/tests/format/optionarch.py
index 8358526b1..86363b834 100644
--- a/tests/format/optionarch.py
+++ b/tests/format/optionarch.py
@@ -64,7 +64,7 @@ def test_conditional(cli, datafiles, uname, value, expected):
result.assert_success()
loaded = _yaml.load_data(result.output)
- assert loaded['result'] == expected
+ assert _yaml.node_get(loaded, str, 'result') == expected
@pytest.mark.datafiles(DATA_DIR)
diff --git a/tests/format/optionbool.py b/tests/format/optionbool.py
index 74b0910bf..40f8dfb33 100644
--- a/tests/format/optionbool.py
+++ b/tests/format/optionbool.py
@@ -42,7 +42,7 @@ def test_conditional_cli(cli, datafiles, target, option, expected):
result.assert_success()
loaded = _yaml.load_data(result.output)
- assert loaded['thepony'] == expected
+ assert _yaml.node_get(loaded, str, 'thepony') == expected
# Test configuration of boolean option in the config file
@@ -71,7 +71,7 @@ def test_conditional_config(cli, datafiles, target, option, expected):
result.assert_success()
loaded = _yaml.load_data(result.output)
- assert loaded['thepony'] == expected
+ assert _yaml.node_get(loaded, str, 'thepony') == expected
@pytest.mark.datafiles(DATA_DIR)
diff --git a/tests/format/optioneltmask.py b/tests/format/optioneltmask.py
index 6305636ee..08f002b58 100644
--- a/tests/format/optioneltmask.py
+++ b/tests/format/optioneltmask.py
@@ -28,7 +28,7 @@ def test_conditional_cli(cli, datafiles, target, value, expected):
result.assert_success()
loaded = _yaml.load_data(result.output)
- assert loaded['debug'] == expected
+ assert _yaml.node_get(loaded, str, 'debug') == expected
@pytest.mark.datafiles(DATA_DIR)
@@ -56,7 +56,7 @@ def test_conditional_config(cli, datafiles, target, value, expected):
result.assert_success()
loaded = _yaml.load_data(result.output)
- assert loaded['debug'] == expected
+ assert _yaml.node_get(loaded, str, 'debug') == expected
@pytest.mark.datafiles(DATA_DIR)
diff --git a/tests/format/optionenum.py b/tests/format/optionenum.py
index f33423433..d729cbfee 100644
--- a/tests/format/optionenum.py
+++ b/tests/format/optionenum.py
@@ -33,7 +33,7 @@ def test_conditional_cli(cli, datafiles, target, option, value, expected):
result.assert_success()
loaded = _yaml.load_data(result.output)
- assert loaded['result'] == expected
+ assert _yaml.node_get(loaded, str, 'result') == expected
@pytest.mark.datafiles(DATA_DIR)
@@ -66,7 +66,7 @@ def test_conditional_config(cli, datafiles, target, option, value, expected):
result.assert_success()
loaded = _yaml.load_data(result.output)
- assert loaded['result'] == expected
+ assert _yaml.node_get(loaded, str, 'result') == expected
@pytest.mark.datafiles(DATA_DIR)
diff --git a/tests/format/optionexports.py b/tests/format/optionexports.py
index f7cce0c92..8ad063e74 100644
--- a/tests/format/optionexports.py
+++ b/tests/format/optionexports.py
@@ -36,4 +36,4 @@ def test_export(cli, datafiles, option_name, option_value, var_name, var_value):
result.assert_success()
loaded = _yaml.load_data(result.output)
- assert loaded[var_name] == var_value
+ assert _yaml.node_get(loaded, str, var_name) == var_value
diff --git a/tests/format/optionflags.py b/tests/format/optionflags.py
index e18f6c960..fe61870d7 100644
--- a/tests/format/optionflags.py
+++ b/tests/format/optionflags.py
@@ -39,7 +39,7 @@ def test_conditional_cli(cli, datafiles, target, option, value, expected):
result.assert_success()
loaded = _yaml.load_data(result.output)
- assert loaded['result'] == expected
+ assert _yaml.node_get(loaded, str, 'result') == expected
@pytest.mark.datafiles(DATA_DIR)
@@ -69,7 +69,7 @@ def test_conditional_config(cli, datafiles, target, option, value, expected):
result.assert_success()
loaded = _yaml.load_data(result.output)
- assert loaded['result'] == expected
+ assert _yaml.node_get(loaded, str, 'result') == expected
@pytest.mark.datafiles(DATA_DIR)
diff --git a/tests/format/optionos.py b/tests/format/optionos.py
index 8e464ae5f..989989a99 100644
--- a/tests/format/optionos.py
+++ b/tests/format/optionos.py
@@ -62,7 +62,7 @@ def test_conditionals(cli, datafiles, uname, value, expected):
result.assert_success()
loaded = _yaml.load_data(result.output)
- assert loaded['result'] == expected
+ assert _yaml.node_get(loaded, str, 'result') == expected
@pytest.mark.datafiles(DATA_DIR)
diff --git a/tests/format/optionoverrides.py b/tests/format/optionoverrides.py
index dc4e7c3a1..2e4d26a16 100644
--- a/tests/format/optionoverrides.py
+++ b/tests/format/optionoverrides.py
@@ -29,4 +29,4 @@ def test_override(cli, datafiles, arch):
expected_value = '--host={}-unknown-linux-gnu'.format(arch)
loaded = _yaml.load_data(result.output)
- assert loaded['conf-global'] == expected_value
+ assert _yaml.node_get(loaded, str, 'conf-global') == expected_value
diff --git a/tests/format/options.py b/tests/format/options.py
index 7be140105..c9f29d64d 100644
--- a/tests/format/options.py
+++ b/tests/format/options.py
@@ -136,7 +136,7 @@ def test_simple_conditional(cli, datafiles, opt_option, expected_prefix):
'element.bst'])
result.assert_success()
loaded = _yaml.load_data(result.output)
- assert loaded['prefix'] == expected_prefix
+ assert _yaml.node_get(loaded, str, 'prefix') == expected_prefix
@pytest.mark.datafiles(DATA_DIR)
@@ -159,7 +159,7 @@ def test_nested_conditional(cli, datafiles, debug, logging, expected):
'element.bst'])
result.assert_success()
loaded = _yaml.load_data(result.output)
- assert loaded['debug'] == expected
+ assert _yaml.node_get(loaded, str, 'debug') == expected
@pytest.mark.datafiles(DATA_DIR)
@@ -182,7 +182,7 @@ def test_compound_and_conditional(cli, datafiles, debug, logging, expected):
'element.bst'])
result.assert_success()
loaded = _yaml.load_data(result.output)
- assert loaded['debug'] == expected
+ assert _yaml.node_get(loaded, str, 'debug') == expected
@pytest.mark.datafiles(DATA_DIR)
@@ -205,7 +205,7 @@ def test_compound_or_conditional(cli, datafiles, debug, logging, expected):
'element.bst'])
result.assert_success()
loaded = _yaml.load_data(result.output)
- assert loaded['logging'] == expected
+ assert _yaml.node_get(loaded, str, 'logging') == expected
@pytest.mark.datafiles(DATA_DIR)
@@ -223,10 +223,10 @@ def test_deep_nesting_level1(cli, datafiles, option, expected):
'element.bst'])
result.assert_success()
loaded = _yaml.load_data(result.output)
- shallow_list = loaded['shallow-nest']
+ shallow_list = _yaml.node_get(loaded, list, 'shallow-nest')
first_dict = shallow_list[0]
- assert first_dict['animal'] == expected
+ assert _yaml.node_get(first_dict, str, 'animal') == expected
@pytest.mark.datafiles(DATA_DIR)
@@ -244,8 +244,8 @@ def test_deep_nesting_level2(cli, datafiles, option, expected):
'element-deeper.bst'])
result.assert_success()
loaded = _yaml.load_data(result.output)
- shallow_list = loaded['deep-nest']
+ shallow_list = _yaml.node_get(loaded, list, 'deep-nest')
deeper_list = shallow_list[0]
first_dict = deeper_list[0]
- assert first_dict['animal'] == expected
+ assert _yaml.node_get(first_dict, str, 'animal') == expected
diff --git a/tests/format/project.py b/tests/format/project.py
index b9b5898be..5813bb389 100644
--- a/tests/format/project.py
+++ b/tests/format/project.py
@@ -82,8 +82,8 @@ def test_load_default_project(cli, datafiles):
# Read back some of our project defaults from the env
env = _yaml.load_data(result.output)
- assert env['USER'] == "tomjon"
- assert env['TERM'] == "dumb"
+ assert _yaml.node_get(env, str, 'USER') == "tomjon"
+ assert _yaml.node_get(env, str, 'TERM') == "dumb"
@pytest.mark.datafiles(os.path.join(DATA_DIR))
@@ -97,8 +97,8 @@ def test_load_project_from_subdir(cli, datafiles):
# Read back some of our project defaults from the env
env = _yaml.load_data(result.output)
- assert env['USER'] == "tomjon"
- assert env['TERM'] == "dumb"
+ assert _yaml.node_get(env, str, 'USER') == "tomjon"
+ assert _yaml.node_get(env, str, 'TERM') == "dumb"
@pytest.mark.datafiles(os.path.join(DATA_DIR))
@@ -111,7 +111,7 @@ def test_override_project_path(cli, datafiles):
# Read back the overridden path
env = _yaml.load_data(result.output)
- assert env['PATH'] == "/bin:/sbin"
+ assert _yaml.node_get(env, str, 'PATH') == "/bin:/sbin"
@pytest.mark.datafiles(os.path.join(DATA_DIR))
diff --git a/tests/format/projectoverrides.py b/tests/format/projectoverrides.py
index 25360e1e5..60851b9d5 100644
--- a/tests/format/projectoverrides.py
+++ b/tests/format/projectoverrides.py
@@ -24,6 +24,6 @@ def test_prepend_configure_commands(cli, datafiles):
result.assert_success()
loaded = _yaml.load_data(result.output)
- config_commands = loaded['configure-commands']
+ config_commands = _yaml.node_get(loaded, list, 'configure-commands')
assert len(config_commands) == 3
assert config_commands[0] == 'echo "Hello World!"'
diff --git a/tests/format/variables.py b/tests/format/variables.py
index fe8d8ae8a..b62de7460 100644
--- a/tests/format/variables.py
+++ b/tests/format/variables.py
@@ -33,7 +33,7 @@ def print_warning(msg):
('autotools.bst', 'make-install', "make -j1 DESTDIR=\"/buildstream-install\" install"),
('cmake.bst', 'cmake',
"cmake -B_builddir -H\".\" -G\"Unix Makefiles\" " + "-DCMAKE_INSTALL_PREFIX:PATH=\"/usr\" \\\n" +
- "-DCMAKE_INSTALL_LIBDIR:PATH=\"lib\" "),
+ "-DCMAKE_INSTALL_LIBDIR:PATH=\"lib\""),
('distutils.bst', 'python-install',
"python3 ./setup.py install --prefix \"/usr\" \\\n" +
"--root \"/buildstream-install\""),
@@ -49,7 +49,7 @@ def test_defaults(cli, datafiles, target, varname, expected):
])
result.assert_success()
result_vars = _yaml.load_data(result.output)
- assert result_vars[varname] == expected
+ assert _yaml.node_get(result_vars, str, varname) == expected
################################################################
@@ -59,7 +59,7 @@ def test_defaults(cli, datafiles, target, varname, expected):
('autotools.bst', 'make-install', "make -j1 DESTDIR=\"/custom/install/root\" install"),
('cmake.bst', 'cmake',
"cmake -B_builddir -H\".\" -G\"Ninja\" " + "-DCMAKE_INSTALL_PREFIX:PATH=\"/opt\" \\\n" +
- "-DCMAKE_INSTALL_LIBDIR:PATH=\"lib\" "),
+ "-DCMAKE_INSTALL_LIBDIR:PATH=\"lib\""),
('distutils.bst', 'python-install',
"python3 ./setup.py install --prefix \"/opt\" \\\n" +
"--root \"/custom/install/root\""),
@@ -75,7 +75,7 @@ def test_overrides(cli, datafiles, target, varname, expected):
])
result.assert_success()
result_vars = _yaml.load_data(result.output)
- assert result_vars[varname] == expected
+ assert _yaml.node_get(result_vars, str, varname) == expected
@pytest.mark.datafiles(os.path.join(DATA_DIR, 'missing_variables'))
diff --git a/tests/frontend/cross_junction_workspace.py b/tests/frontend/cross_junction_workspace.py
index 7b1fc7f8d..325c39264 100644
--- a/tests/frontend/cross_junction_workspace.py
+++ b/tests/frontend/cross_junction_workspace.py
@@ -76,11 +76,11 @@ def test_list_cross_junction(cli, tmpdir):
result.assert_success()
loaded = _yaml.load_data(result.output)
- assert isinstance(loaded.get('workspaces'), list)
- workspaces = loaded['workspaces']
+ assert isinstance(_yaml.node_get(loaded, None, 'workspaces'), list)
+ workspaces = _yaml.node_get(loaded, list, 'workspaces')
assert len(workspaces) == 1
- assert 'element' in workspaces[0]
- assert workspaces[0]['element'] == element
+ assert _yaml.node_contains(workspaces[0], 'element')
+ assert _yaml.node_get(workspaces[0], str, 'element') == element
def test_close_cross_junction(cli, tmpdir):
@@ -98,8 +98,8 @@ def test_close_cross_junction(cli, tmpdir):
result.assert_success()
loaded = _yaml.load_data(result.output)
- assert isinstance(loaded.get('workspaces'), list)
- workspaces = loaded['workspaces']
+ assert isinstance(_yaml.node_get(loaded, None, 'workspaces'), list)
+ workspaces = _yaml.node_get(loaded, list, 'workspaces')
assert not workspaces
@@ -117,8 +117,8 @@ def test_close_all_cross_junction(cli, tmpdir):
result.assert_success()
loaded = _yaml.load_data(result.output)
- assert isinstance(loaded.get('workspaces'), list)
- workspaces = loaded['workspaces']
+ assert isinstance(_yaml.node_get(loaded, None, 'workspaces'), list)
+ workspaces = _yaml.node_get(loaded, list, 'workspaces')
assert not workspaces
diff --git a/tests/frontend/init.py b/tests/frontend/init.py
index 304833824..06babce43 100644
--- a/tests/frontend/init.py
+++ b/tests/frontend/init.py
@@ -19,9 +19,9 @@ def test_defaults(cli, tmpdir):
result.assert_success()
project_conf = _yaml.load(project_path)
- assert project_conf['name'] == 'foo'
- assert project_conf['format-version'] == str(BST_FORMAT_VERSION)
- assert project_conf['element-path'] == 'elements'
+ assert _yaml.node_get(project_conf, str, 'name') == 'foo'
+ assert _yaml.node_get(project_conf, str, 'format-version') == str(BST_FORMAT_VERSION)
+ assert _yaml.node_get(project_conf, str, 'element-path') == 'elements'
def test_all_options(cli, tmpdir):
@@ -37,9 +37,9 @@ def test_all_options(cli, tmpdir):
result.assert_success()
project_conf = _yaml.load(project_path)
- assert project_conf['name'] == 'foo'
- assert project_conf['format-version'] == str(2)
- assert project_conf['element-path'] == 'ponies'
+ assert _yaml.node_get(project_conf, str, 'name') == 'foo'
+ assert _yaml.node_get(project_conf, str, 'format-version') == str(2)
+ assert _yaml.node_get(project_conf, str, 'element-path') == 'ponies'
elements_dir = os.path.join(project, 'ponies')
assert os.path.isdir(elements_dir)
@@ -70,8 +70,8 @@ def test_force_overwrite_project(cli, tmpdir):
result.assert_success()
project_conf = _yaml.load(project_path)
- assert project_conf['name'] == 'foo'
- assert project_conf['format-version'] == str(BST_FORMAT_VERSION)
+ assert _yaml.node_get(project_conf, str, 'name') == 'foo'
+ assert _yaml.node_get(project_conf, str, 'format-version') == str(BST_FORMAT_VERSION)
@pytest.mark.parametrize("project_name", [('Micheal Jackson'), ('one+one')])
@@ -122,6 +122,6 @@ def test_element_path_interactive(cli, tmp_path, monkeypatch, element_path):
assert full_element_path.exists()
project_conf = _yaml.load(str(project_conf_path))
- assert project_conf['name'] == 'project_name'
- assert project_conf['format-version'] == '0'
- assert project_conf['element-path'] == element_path
+ assert _yaml.node_get(project_conf, str, 'name') == 'project_name'
+ assert _yaml.node_get(project_conf, str, 'format-version') == '0'
+ assert _yaml.node_get(project_conf, str, 'element-path') == element_path
diff --git a/tests/frontend/project/sources/fetch_source.py b/tests/frontend/project/sources/fetch_source.py
index 06596607b..6bcaca028 100644
--- a/tests/frontend/project/sources/fetch_source.py
+++ b/tests/frontend/project/sources/fetch_source.py
@@ -41,8 +41,10 @@ class FetchSource(Source):
self.original_urls = self.node_get_member(node, list, 'urls')
self.output_file = self.node_get_member(node, str, 'output-text')
self.fetch_succeeds = {}
- if 'fetch-succeeds' in node:
- self.fetch_succeeds = {x[0]: x[1] for x in self.node_items(node['fetch-succeeds'])}
+ if self.node_has_member(node, 'fetch-succeeds'):
+ fetch_succeeds_node = self.node_get_member(node, dict, 'fetch-succeeds')
+ for key, value in self.node_items(fetch_succeeds_node):
+ self.fetch_succeeds[key] = value in ('True', 'true')
# First URL is the primary one for this test
#
diff --git a/tests/frontend/workspace.py b/tests/frontend/workspace.py
index 204cfb7a8..c54022f36 100644
--- a/tests/frontend/workspace.py
+++ b/tests/frontend/workspace.py
@@ -184,11 +184,11 @@ def test_open_bzr_customize(cli, tmpdir, datafiles):
# Check that the correct origin branch is set
element_config = _yaml.load(os.path.join(project, "elements", element_name))
- source_config = element_config['sources'][0]
+ source_config = _yaml.node_get(element_config, dict, 'sources', [0])
output = subprocess.check_output(["bzr", "info"], cwd=workspace)
- stripped_url = source_config['url'].lstrip("file:///")
+ stripped_url = _yaml.node_get(source_config, str, 'url').lstrip("file:///")
expected_output_str = ("checkout of branch: /{}/{}"
- .format(stripped_url, source_config['track']))
+ .format(stripped_url, _yaml.node_get(source_config, str, 'track')))
assert expected_output_str in str(output)
@@ -608,13 +608,13 @@ def test_list(cli, tmpdir, datafiles):
result.assert_success()
loaded = _yaml.load_data(result.output)
- assert isinstance(loaded.get('workspaces'), list)
- workspaces = loaded['workspaces']
+ assert isinstance(_yaml.node_get(loaded, None, 'workspaces'), list)
+ workspaces = _yaml.node_get(loaded, list, 'workspaces')
assert len(workspaces) == 1
space = workspaces[0]
- assert space['element'] == element_name
- assert space['directory'] == workspace
+ assert _yaml.node_get(space, str, 'element') == element_name
+ assert _yaml.node_get(space, str, 'directory') == workspace
@pytest.mark.datafiles(DATA_DIR)
@@ -1133,7 +1133,7 @@ def test_external_track(cli, datafiles, tmpdir_factory, guess_element):
# Delete the ref from the source so that we can detect if the
# element has been tracked
element_contents = _yaml.load(element_file)
- del element_contents['sources'][0]['ref']
+ _yaml.node_del(_yaml.node_get(element_contents, dict, 'sources', [0]), 'ref')
_yaml.dump(_yaml.node_sanitize(element_contents), element_file)
result = cli.run(project=project, args=['-C', workspace, 'source', 'track', *arg_elm])
@@ -1141,7 +1141,7 @@ def test_external_track(cli, datafiles, tmpdir_factory, guess_element):
# Element is tracked now
element_contents = _yaml.load(element_file)
- assert 'ref' in element_contents['sources'][0]
+ assert _yaml.node_contains(_yaml.node_get(element_contents, dict, 'sources', [0]), 'ref')
@pytest.mark.datafiles(DATA_DIR)
diff --git a/tests/integration/pullbuildtrees.py b/tests/integration/pullbuildtrees.py
index 308905690..e8f84d50d 100644
--- a/tests/integration/pullbuildtrees.py
+++ b/tests/integration/pullbuildtrees.py
@@ -192,4 +192,4 @@ def test_invalid_cache_pullbuildtrees(cli, datafiles, value, success):
if success:
res.assert_success()
else:
- res.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.ILLEGAL_COMPOSITE)
+ res.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.INVALID_DATA)
diff --git a/tests/internals/pluginfactory.py b/tests/internals/pluginfactory.py
index d46c13e36..e9e63672f 100644
--- a/tests/internals/pluginfactory.py
+++ b/tests/internals/pluginfactory.py
@@ -8,6 +8,7 @@ from pluginbase import PluginBase
from buildstream._elementfactory import ElementFactory
from buildstream._sourcefactory import SourceFactory
from buildstream._exceptions import PluginError
+from buildstream import _yaml
DATA_DIR = os.path.join(
os.path.dirname(os.path.realpath(__file__)),
@@ -46,12 +47,12 @@ def test_element_factory(plugin_fixture):
##############################################################
@pytest.mark.datafiles(os.path.join(DATA_DIR, 'customsource'))
def test_custom_source(plugin_fixture, datafiles):
- plugins = [{
+ plugins = [_yaml.new_node_from_dict({
'origin': 'local',
'path': os.path.join(datafiles.dirname,
datafiles.basename),
- 'plugins': {'foo': 0}
- }]
+ 'plugins': ['foo']
+ })]
factory = SourceFactory(plugin_fixture['base'], plugin_origins=plugins)
assert isinstance(factory, SourceFactory)
@@ -61,12 +62,12 @@ def test_custom_source(plugin_fixture, datafiles):
@pytest.mark.datafiles(os.path.join(DATA_DIR, 'customelement'))
def test_custom_element(plugin_fixture, datafiles):
- plugins = [{
+ plugins = [_yaml.new_node_from_dict({
'origin': 'local',
'path': os.path.join(datafiles.dirname,
datafiles.basename),
- 'plugins': {'foo': 0}
- }]
+ 'plugins': ['foo']
+ })]
factory = ElementFactory(plugin_fixture['base'], plugin_origins=plugins)
assert isinstance(factory, ElementFactory)
@@ -98,12 +99,12 @@ def test_missing_element(plugin_fixture):
# Load a factory with a plugin that returns a value instead of Source subclass
@pytest.mark.datafiles(os.path.join(DATA_DIR, 'notatype'))
def test_source_notatype(plugin_fixture, datafiles):
- plugins = [{
+ plugins = [_yaml.new_node_from_dict({
'origin': 'local',
'path': os.path.join(datafiles.dirname,
datafiles.basename),
- 'plugins': {'foo': 0}
- }]
+ 'plugins': ['foo']
+ })]
factory = SourceFactory(plugin_fixture['base'], plugin_origins=plugins)
with pytest.raises(PluginError):
factory.lookup('foo')
@@ -112,12 +113,12 @@ def test_source_notatype(plugin_fixture, datafiles):
# Load a factory with a plugin that returns a value instead of Element subclass
@pytest.mark.datafiles(os.path.join(DATA_DIR, 'notatype'))
def test_element_notatype(plugin_fixture, datafiles):
- plugins = [{
+ plugins = [_yaml.new_node_from_dict({
'origin': 'local',
'path': os.path.join(datafiles.dirname,
datafiles.basename),
- 'plugins': {'foo': 0}
- }]
+ 'plugins': ['foo']
+ })]
factory = ElementFactory(plugin_fixture['base'], plugin_origins=plugins)
with pytest.raises(PluginError):
factory.lookup('foo')
@@ -127,12 +128,12 @@ def test_element_notatype(plugin_fixture, datafiles):
# which is not a Source subclass
@pytest.mark.datafiles(os.path.join(DATA_DIR, 'wrongtype'))
def test_source_wrongtype(plugin_fixture, datafiles):
- plugins = [{
+ plugins = [_yaml.new_node_from_dict({
'origin': 'local',
'path': os.path.join(datafiles.dirname,
datafiles.basename),
- 'plugins': {'foo': 0}
- }]
+ 'plugins': ['foo']
+ })]
factory = SourceFactory(plugin_fixture['base'], plugin_origins=plugins)
with pytest.raises(PluginError):
factory.lookup('foo')
@@ -142,12 +143,12 @@ def test_source_wrongtype(plugin_fixture, datafiles):
# which is not a Element subclass
@pytest.mark.datafiles(os.path.join(DATA_DIR, 'wrongtype'))
def test_element_wrongtype(plugin_fixture, datafiles):
- plugins = [{
+ plugins = [_yaml.new_node_from_dict({
'origin': 'local',
'path': os.path.join(datafiles.dirname,
datafiles.basename),
- 'plugins': {'foo': 0}
- }]
+ 'plugins': ['foo']
+ })]
factory = ElementFactory(plugin_fixture['base'], plugin_origins=plugins)
with pytest.raises(PluginError):
factory.lookup('foo')
@@ -156,12 +157,12 @@ def test_element_wrongtype(plugin_fixture, datafiles):
# Load a factory with a plugin which fails to provide a setup() function
@pytest.mark.datafiles(os.path.join(DATA_DIR, 'nosetup'))
def test_source_missing_setup(plugin_fixture, datafiles):
- plugins = [{
+ plugins = [_yaml.new_node_from_dict({
'origin': 'local',
'path': os.path.join(datafiles.dirname,
datafiles.basename),
- 'plugins': {'foo': 0}
- }]
+ 'plugins': ['foo']
+ })]
factory = SourceFactory(plugin_fixture['base'], plugin_origins=plugins)
with pytest.raises(PluginError):
factory.lookup('foo')
@@ -170,12 +171,12 @@ def test_source_missing_setup(plugin_fixture, datafiles):
# Load a factory with a plugin which fails to provide a setup() function
@pytest.mark.datafiles(os.path.join(DATA_DIR, 'nosetup'))
def test_element_missing_setup(plugin_fixture, datafiles):
- plugins = [{
+ plugins = [_yaml.new_node_from_dict({
'origin': 'local',
'path': os.path.join(datafiles.dirname,
datafiles.basename),
- 'plugins': {'foo': 0}
- }]
+ 'plugins': ['foo']
+ })]
factory = ElementFactory(plugin_fixture['base'], plugin_origins=plugins)
with pytest.raises(PluginError):
factory.lookup('foo')
@@ -185,12 +186,12 @@ def test_element_missing_setup(plugin_fixture, datafiles):
# that is not a function
@pytest.mark.datafiles(os.path.join(DATA_DIR, 'badsetup'))
def test_source_bad_setup(plugin_fixture, datafiles):
- plugins = [{
+ plugins = [_yaml.new_node_from_dict({
'origin': 'local',
'path': os.path.join(datafiles.dirname,
datafiles.basename),
- 'plugins': {'foo': 0}
- }]
+ 'plugins': ['foo']
+ })]
factory = SourceFactory(plugin_fixture['base'], plugin_origins=plugins)
with pytest.raises(PluginError):
factory.lookup('foo')
@@ -200,12 +201,12 @@ def test_source_bad_setup(plugin_fixture, datafiles):
# that is not a function
@pytest.mark.datafiles(os.path.join(DATA_DIR, 'badsetup'))
def test_element_bad_setup(plugin_fixture, datafiles):
- plugins = [{
+ plugins = [_yaml.new_node_from_dict({
'origin': 'local',
'path': os.path.join(datafiles.dirname,
datafiles.basename),
- 'plugins': {'foo': 0}
- }]
+ 'plugins': ['foo']
+ })]
factory = ElementFactory(plugin_fixture['base'], plugin_origins=plugins)
with pytest.raises(PluginError):
factory.lookup('foo')
@@ -215,12 +216,12 @@ def test_element_bad_setup(plugin_fixture, datafiles):
# high version of buildstream
@pytest.mark.datafiles(os.path.join(DATA_DIR, 'badversionsource'))
def test_source_badversion(plugin_fixture, datafiles):
- plugins = [{
+ plugins = [_yaml.new_node_from_dict({
'origin': 'local',
'path': os.path.join(datafiles.dirname,
datafiles.basename),
- 'plugins': {'foo': 0}
- }]
+ 'plugins': ['foo']
+ })]
factory = SourceFactory(plugin_fixture['base'], plugin_origins=plugins)
with pytest.raises(PluginError):
factory.lookup('foo')
@@ -230,12 +231,12 @@ def test_source_badversion(plugin_fixture, datafiles):
# high version of buildstream
@pytest.mark.datafiles(os.path.join(DATA_DIR, 'badversionelement'))
def test_element_badversion(plugin_fixture, datafiles):
- plugins = [{
+ plugins = [_yaml.new_node_from_dict({
'origin': 'local',
'path': os.path.join(datafiles.dirname,
datafiles.basename),
- 'plugins': {'foo': 0}
- }]
+ 'plugins': ['foo']
+ })]
factory = ElementFactory(plugin_fixture['base'], plugin_origins=plugins)
with pytest.raises(PluginError):
factory.lookup('foo')
@@ -248,20 +249,20 @@ def test_element_badversion(plugin_fixture, datafiles):
# Load two factories, both of which define a different 'foo' plugin
@pytest.mark.datafiles(DATA_DIR)
def test_source_multicontext(plugin_fixture, datafiles):
- plugins1 = {
+ plugins1 = _yaml.new_node_from_dict({
'origin': 'local',
'path': os.path.join(datafiles.dirname,
datafiles.basename,
'customsource'),
- 'plugins': {'foo': 0}
- }
- plugins2 = {
+ 'plugins': ['foo']
+ })
+ plugins2 = _yaml.new_node_from_dict({
'origin': 'local',
'path': os.path.join(datafiles.dirname,
datafiles.basename,
'anothersource'),
- 'plugins': {'foo': 0}
- }
+ 'plugins': ['foo']
+ })
factory1 = SourceFactory(plugin_fixture['base'], plugin_origins=[plugins1])
factory2 = SourceFactory(plugin_fixture['base'], plugin_origins=[plugins2])
@@ -277,20 +278,20 @@ def test_source_multicontext(plugin_fixture, datafiles):
# Load two factories, both of which define a different 'foo' plugin
@pytest.mark.datafiles(DATA_DIR)
def test_element_multicontext(plugin_fixture, datafiles):
- plugins1 = {
+ plugins1 = _yaml.new_node_from_dict({
'origin': 'local',
'path': os.path.join(datafiles.dirname,
datafiles.basename,
'customelement'),
- 'plugins': {'foo': 0}
- }
- plugins2 = {
+ 'plugins': ['foo']
+ })
+ plugins2 = _yaml.new_node_from_dict({
'origin': 'local',
'path': os.path.join(datafiles.dirname,
datafiles.basename,
'anotherelement'),
- 'plugins': {'foo': 0}
- }
+ 'plugins': ['foo']
+ })
factory1 = ElementFactory(plugin_fixture['base'], plugin_origins=[plugins1])
factory2 = ElementFactory(plugin_fixture['base'], plugin_origins=[plugins2])
diff --git a/tests/internals/yaml.py b/tests/internals/yaml.py
index 86ae11d45..8d8494e4f 100644
--- a/tests/internals/yaml.py
+++ b/tests/internals/yaml.py
@@ -1,11 +1,13 @@
from collections.abc import Mapping
import os
+from io import StringIO
import pytest
from buildstream import _yaml
from buildstream._exceptions import LoadError, LoadErrorReason
+
DATA_DIR = os.path.join(
os.path.dirname(os.path.realpath(__file__)),
'yaml',
@@ -20,21 +22,15 @@ def test_load_yaml(datafiles):
'basics.yaml')
loaded = _yaml.load(filename)
- assert loaded.get('kind') == 'pony'
+ assert loaded[0].get('kind')[0] == 'pony'
def assert_provenance(filename, line, col, node, key=None, indices=None):
provenance = _yaml.node_get_provenance(node, key=key, indices=indices)
- if key:
- if indices:
- assert isinstance(provenance, _yaml.ElementProvenance)
- else:
- assert isinstance(provenance, _yaml.MemberProvenance)
- else:
- assert isinstance(provenance, _yaml.DictProvenance)
+ assert isinstance(provenance, _yaml.ProvenanceInformation)
- assert provenance.filename.shortname == filename
+ assert provenance.shortname == filename
assert provenance.line == line
assert provenance.col == col
@@ -47,7 +43,7 @@ def test_basic_provenance(datafiles):
'basics.yaml')
loaded = _yaml.load(filename)
- assert loaded.get('kind') == 'pony'
+ assert loaded[0].get('kind')[0] == 'pony'
assert_provenance(filename, 1, 0, loaded)
@@ -60,7 +56,7 @@ def test_member_provenance(datafiles):
'basics.yaml')
loaded = _yaml.load(filename)
- assert loaded.get('kind') == 'pony'
+ assert loaded[0].get('kind')[0] == 'pony'
assert_provenance(filename, 2, 13, loaded, 'description')
@@ -72,7 +68,7 @@ def test_element_provenance(datafiles):
'basics.yaml')
loaded = _yaml.load(filename)
- assert loaded.get('kind') == 'pony'
+ assert loaded[0].get('kind')[0] == 'pony'
assert_provenance(filename, 5, 2, loaded, 'moods', [1])
@@ -106,7 +102,7 @@ def test_node_get(datafiles):
'basics.yaml')
base = _yaml.load(filename)
- assert base.get('kind') == 'pony'
+ assert base[0].get('kind')[0] == 'pony'
children = _yaml.node_get(base, list, 'children')
assert isinstance(children, list)
@@ -122,6 +118,58 @@ def test_node_get(datafiles):
assert exc.value.reason == LoadErrorReason.INVALID_DATA
+@pytest.mark.datafiles(os.path.join(DATA_DIR))
+def test_node_set(datafiles):
+
+ filename = os.path.join(datafiles.dirname,
+ datafiles.basename,
+ 'basics.yaml')
+
+ base = _yaml.load(filename)
+
+ assert not _yaml.node_contains(base, 'mother')
+ _yaml.node_set(base, 'mother', 'snow white')
+ assert _yaml.node_get(base, str, 'mother') == 'snow white'
+
+
+@pytest.mark.datafiles(os.path.join(DATA_DIR))
+def test_node_set_overwrite(datafiles):
+
+ filename = os.path.join(datafiles.dirname,
+ datafiles.basename,
+ 'basics.yaml')
+
+ base = _yaml.load(filename)
+
+ # Overwrite a string
+ assert _yaml.node_get(base, str, 'kind') == 'pony'
+ _yaml.node_set(base, 'kind', 'cow')
+ assert _yaml.node_get(base, str, 'kind') == 'cow'
+
+ # Overwrite a list as a string
+ assert _yaml.node_get(base, list, 'moods') == ['happy', 'sad']
+ _yaml.node_set(base, 'moods', 'unemotional')
+ assert _yaml.node_get(base, str, 'moods') == 'unemotional'
+
+
+@pytest.mark.datafiles(os.path.join(DATA_DIR))
+def test_node_set_list_element(datafiles):
+
+ filename = os.path.join(datafiles.dirname,
+ datafiles.basename,
+ 'basics.yaml')
+
+ base = _yaml.load(filename)
+
+ assert _yaml.node_get(base, list, 'moods') == ['happy', 'sad']
+ assert _yaml.node_get(base, str, 'moods', indices=[0]) == 'happy'
+
+ _yaml.node_set(base, 'moods', 'confused', indices=[0])
+
+ assert _yaml.node_get(base, list, 'moods') == ['confused', 'sad']
+ assert _yaml.node_get(base, str, 'moods', indices=[0]) == 'confused'
+
+
# Really this is testing _yaml.node_copy(), we want to
# be sure that compositing values still preserves the original
# values in the copied dict.
@@ -166,7 +214,6 @@ def test_composite_preserve_originals(datafiles):
# prov_col: The expected provenance column of "mood"
#
@pytest.mark.datafiles(os.path.join(DATA_DIR))
-@pytest.mark.parametrize('caching', [('raw'), ('cached')])
@pytest.mark.parametrize("filename,index,length,mood,prov_file,prov_line,prov_col", [
# Test results of compositing with the (<) prepend directive
@@ -199,12 +246,12 @@ def test_composite_preserve_originals(datafiles):
])
def test_list_composition(datafiles, filename, tmpdir,
index, length, mood,
- prov_file, prov_line, prov_col, caching):
+ prov_file, prov_line, prov_col):
base_file = os.path.join(datafiles.dirname, datafiles.basename, 'basics.yaml')
overlay_file = os.path.join(datafiles.dirname, datafiles.basename, filename)
base = _yaml.load(base_file, 'basics.yaml')
- overlay = _yaml.load(overlay_file, filename)
+ overlay = _yaml.load(overlay_file, shortname=filename)
_yaml.composite_dict(base, overlay)
@@ -212,7 +259,7 @@ def test_list_composition(datafiles, filename, tmpdir,
assert len(children) == length
child = children[index]
- assert child['mood'] == mood
+ assert _yaml.node_get(child, str, 'mood') == mood
assert_provenance(prov_file, prov_line, prov_col, child, 'mood')
@@ -230,6 +277,20 @@ def test_list_deletion(datafiles):
assert not children
+# Test that extending a non-existent list works as expected
+@pytest.mark.datafiles(os.path.join(DATA_DIR))
+def test_nonexistent_list_extension(datafiles):
+ base = os.path.join(datafiles.dirname, datafiles.basename, 'basics.yaml')
+
+ base = _yaml.load(base, shortname='basics.yaml')
+ assert not _yaml.node_contains(base, 'todo')
+
+ _yaml.node_extend_list(base, 'todo', 3, 'empty')
+
+ assert len(_yaml.node_get(base, list, 'todo')) == 3
+ assert _yaml.node_get(base, list, 'todo') == ['empty', 'empty', 'empty']
+
+
# Tests for deep list composition
#
# Same as test_list_composition(), but adds an additional file
@@ -257,7 +318,6 @@ def test_list_deletion(datafiles):
# prov_col: The expected provenance column of "mood"
#
@pytest.mark.datafiles(os.path.join(DATA_DIR))
-@pytest.mark.parametrize('caching', [('raw'), ('cached')])
@pytest.mark.parametrize("filename1,filename2,index,length,mood,prov_file,prov_line,prov_col", [
# Test results of compositing literal list with (>) and then (<)
@@ -316,7 +376,7 @@ def test_list_deletion(datafiles):
])
def test_list_composition_twice(datafiles, tmpdir, filename1, filename2,
index, length, mood,
- prov_file, prov_line, prov_col, caching):
+ prov_file, prov_line, prov_col):
file_base = os.path.join(datafiles.dirname, datafiles.basename, 'basics.yaml')
file1 = os.path.join(datafiles.dirname, datafiles.basename, filename1)
file2 = os.path.join(datafiles.dirname, datafiles.basename, filename2)
@@ -324,9 +384,9 @@ def test_list_composition_twice(datafiles, tmpdir, filename1, filename2,
#####################
# Round 1 - Fight !
#####################
- base = _yaml.load(file_base, 'basics.yaml')
- overlay1 = _yaml.load(file1, filename1)
- overlay2 = _yaml.load(file2, filename2)
+ base = _yaml.load(file_base, shortname='basics.yaml')
+ overlay1 = _yaml.load(file1, shortname=filename1)
+ overlay2 = _yaml.load(file2, shortname=filename2)
_yaml.composite_dict(base, overlay1)
_yaml.composite_dict(base, overlay2)
@@ -335,15 +395,15 @@ def test_list_composition_twice(datafiles, tmpdir, filename1, filename2,
assert len(children) == length
child = children[index]
- assert child['mood'] == mood
+ assert _yaml.node_get(child, str, 'mood') == mood
assert_provenance(prov_file, prov_line, prov_col, child, 'mood')
#####################
# Round 2 - Fight !
#####################
- base = _yaml.load(file_base, 'basics.yaml')
- overlay1 = _yaml.load(file1, filename1)
- overlay2 = _yaml.load(file2, filename2)
+ base = _yaml.load(file_base, shortname='basics.yaml')
+ overlay1 = _yaml.load(file1, shortname=filename1)
+ overlay2 = _yaml.load(file2, shortname=filename2)
_yaml.composite_dict(overlay1, overlay2)
_yaml.composite_dict(base, overlay1)
@@ -352,7 +412,7 @@ def test_list_composition_twice(datafiles, tmpdir, filename1, filename2,
assert len(children) == length
child = children[index]
- assert child['mood'] == mood
+ assert _yaml.node_get(child, str, 'mood') == mood
assert_provenance(prov_file, prov_line, prov_col, child, 'mood')
@@ -394,3 +454,91 @@ def test_value_doesnt_match_expected(datafiles):
with pytest.raises(LoadError) as exc:
_yaml.node_get(test_dict, int, "Test4")
assert exc.value.reason == LoadErrorReason.INVALID_DATA
+
+
+@pytest.mark.datafiles(os.path.join(DATA_DIR))
+@pytest.mark.parametrize('fromdisk', [(True), (False)])
+def test_roundtrip_dump(datafiles, fromdisk):
+ filename = os.path.join(datafiles.dirname,
+ datafiles.basename,
+ "roundtrip-test.yaml")
+ with open(filename, "r") as fh:
+ rt_raw = fh.read()
+ if fromdisk:
+ rt_loaded = _yaml.roundtrip_load(filename)
+ else:
+ rt_loaded = _yaml.roundtrip_load_data(rt_raw, filename=filename)
+
+ # Now walk the loaded data structure, checking for ints etc.
+ def walk_node(node):
+ for v in node.values():
+ if isinstance(v, list):
+ walk_list(v)
+ elif isinstance(v, Mapping):
+ walk_node(v)
+ else:
+ assert isinstance(v, str)
+
+ def walk_list(l):
+ for v in l:
+ if isinstance(v, list):
+ walk_list(v)
+ elif isinstance(v, Mapping):
+ walk_node(v)
+ else:
+ assert isinstance(v, str)
+
+ walk_node(rt_loaded)
+
+ outfile = StringIO()
+ _yaml.roundtrip_dump(rt_loaded, file=outfile)
+ rt_back = outfile.getvalue()
+
+ assert rt_raw == rt_back
+
+
+@pytest.mark.datafiles(os.path.join(DATA_DIR))
+@pytest.mark.parametrize('case', [
+ ['a', 'b', 'c'],
+ ['foo', 1],
+ ['stuff', 0, 'colour'],
+ ['bird', 0, 1],
+])
+def test_node_find_target(datafiles, case):
+ filename = os.path.join(datafiles.dirname,
+ datafiles.basename,
+ "traversal.yaml")
+ # We set copy_tree in order to ensure that the nodes in `loaded`
+ # are not the same nodes as in `prov.toplevel`
+ loaded = _yaml.load(filename, copy_tree=True)
+
+ prov = _yaml.node_get_provenance(loaded)
+
+ toplevel = prov.toplevel
+
+ assert toplevel is not loaded
+
+ # Walk down the node tree, with insider knowledge of how nodes are
+ # laid out. Client code should never do this.
+ def _walk(node, entry, rest):
+ if rest:
+ return _walk(node[0][entry], rest[0], rest[1:])
+ else:
+ return node[0][entry]
+
+ want = _walk(loaded, case[0], case[1:])
+ found_path = _yaml.node_find_target(toplevel, want)
+
+ assert case == found_path
+
+
+@pytest.mark.datafiles(os.path.join(DATA_DIR))
+def test_node_find_target_fails(datafiles):
+ filename = os.path.join(datafiles.dirname,
+ datafiles.basename,
+ "traversal.yaml")
+ loaded = _yaml.load(filename, copy_tree=True)
+
+ brand_new = _yaml.new_empty_node()
+
+ assert _yaml.node_find_target(loaded, brand_new) is None
diff --git a/tests/internals/yaml/roundtrip-test.yaml b/tests/internals/yaml/roundtrip-test.yaml
new file mode 100644
index 000000000..2e3f5c721
--- /dev/null
+++ b/tests/internals/yaml/roundtrip-test.yaml
@@ -0,0 +1,55 @@
+# This is the roundtrip test YAML document
+# It contains comments such as this
+
+# The test which uses this YAML file will also ensure that
+# any interesting values we load (such as floats, ints, etc)
+# are actually loaded as strings.
+
+# Blank lines such as the above
+
+this:
+ is:
+ a:
+ # Double-quoted
+ - "nested"
+ - mapping
+ # Single-quoted
+ - 'with'
+ - a
+ - list
+
+# Comments can go with mappings too
+such:
+ as: this
+
+# We roundtrip integers and floats as strings, to prevent truncation.
+
+trunctest:
+ simple-int: 123
+ small-float: 123.456
+ int: 000000000000000000000001
+ float: 1.000000000000000000000
+
+# We also roundtrip booleans in various forms as strings to prevent
+# normalisation to 'True' and 'False'
+
+boolcheck:
+- True
+- False
+- true
+- false
+# These are valid YAML booleans, though BuildStream doesn't support them
+- TRUE
+- FALSE
+# The following are YAML 1.1 only
+- yes
+- no
+- on
+- off
+
+nullcheck: null
+
+timestamp: 2019-03-14
+
+# That is all
+
diff --git a/tests/internals/yaml/traversal.yaml b/tests/internals/yaml/traversal.yaml
new file mode 100644
index 000000000..0321e5e53
--- /dev/null
+++ b/tests/internals/yaml/traversal.yaml
@@ -0,0 +1,20 @@
+# This document is used to test the target finding algorithm
+# employed during tracking.
+
+a:
+ b:
+ c: fish
+
+foo:
+ - bar
+ - baz
+ - meta
+
+stuff:
+ - kind: banana
+ colour: yellow
+ - kind: orange
+ colour: orange
+
+bird:
+ - [ 'a', 'b' ]
diff --git a/tests/sources/generic/mirror.py b/tests/sources/generic/mirror.py
index f5a11d2d7..b011872b1 100644
--- a/tests/sources/generic/mirror.py
+++ b/tests/sources/generic/mirror.py
@@ -357,9 +357,9 @@ def test_mirror_track_upstream_present(cli, tmpdir, datafiles, kind):
# Tracking tries upstream first. Check the ref is from upstream.
new_element = _yaml.load(element_path)
- source = new_element['sources'][0]
- if 'ref' in source:
- assert source['ref'] == upstream_ref
+ source = _yaml.node_get(new_element, dict, 'sources', [0])
+ if _yaml.node_contains(source, 'ref'):
+ assert _yaml.node_get(source, str, 'ref') == upstream_ref
@pytest.mark.datafiles(DATA_DIR)
@@ -422,6 +422,6 @@ def test_mirror_track_upstream_absent(cli, tmpdir, datafiles, kind):
# Check that tracking fell back to the mirror
new_element = _yaml.load(element_path)
- source = new_element['sources'][0]
- if 'ref' in source:
- assert source['ref'] == mirror_ref
+ source = _yaml.node_get(new_element, dict, 'sources', [0])
+ if _yaml.node_contains(source, 'ref'):
+ assert _yaml.node_get(source, str, 'ref') == mirror_ref
diff --git a/tests/sources/generic/track.py b/tests/sources/generic/track.py
index 4726a453e..05519719b 100644
--- a/tests/sources/generic/track.py
+++ b/tests/sources/generic/track.py
@@ -318,11 +318,18 @@ def test_track_include(cli, tmpdir, datafiles, ref_storage, kind):
assert os.path.exists(os.path.join(project, 'project.refs'))
else:
assert not os.path.exists(os.path.join(project, 'project.refs'))
+
new_sources = _yaml.load(os.path.join(element_path, 'sources.yml'))
- assert 'sources' in new_sources
- assert len(new_sources['sources']) == 1
- assert 'ref' in new_sources['sources'][0]
- assert ref == new_sources['sources'][0]['ref']
+
+ # Get all of the sources
+ assert _yaml.node_contains(new_sources, 'sources')
+ sources_list = _yaml.node_get(new_sources, list, 'sources')
+ assert len(sources_list) == 1
+
+ # Get the first source from the sources list
+ new_source = _yaml.node_get(new_sources, dict, 'sources', indices=[0])
+ assert _yaml.node_contains(new_source, 'ref')
+ assert ref == _yaml.node_get(new_source, str, 'ref')
@pytest.mark.datafiles(DATA_DIR)
diff --git a/tests/sources/git.py b/tests/sources/git.py
index 8922a47d7..c11b90c06 100644
--- a/tests/sources/git.py
+++ b/tests/sources/git.py
@@ -761,7 +761,7 @@ def test_track_fetch(cli, tmpdir, datafiles, ref_format, tag, extra_commit):
result.assert_success()
element = _yaml.load(element_path)
- new_ref = element['sources'][0]['ref']
+ new_ref = _yaml.node_get(_yaml.node_get(element, dict, 'sources', [0]), str, 'ref')
if ref_format == 'git-describe' and tag:
# Check and strip prefix
@@ -786,7 +786,7 @@ def test_git_describe(cli, tmpdir, datafiles, ref_storage, tag_type):
project = str(datafiles)
project_config = _yaml.load(os.path.join(project, 'project.conf'))
- project_config['ref-storage'] = ref_storage
+ _yaml.node_set(project_config, 'ref-storage', ref_storage)
_yaml.dump(_yaml.node_sanitize(project_config), os.path.join(project, 'project.conf'))
repofiles = os.path.join(str(tmpdir), 'repofiles')
@@ -850,16 +850,18 @@ def test_git_describe(cli, tmpdir, datafiles, ref_storage, tag_type):
if ref_storage == 'inline':
element = _yaml.load(element_path)
- tags = _yaml.node_sanitize(element['sources'][0]['tags'])
+ tags = _yaml.node_get(_yaml.node_get(element, dict, 'sources', [0]), list, 'tags')
assert len(tags) == 2
for tag in tags:
- assert 'tag' in tag
- assert 'commit' in tag
- assert 'annotated' in tag
- assert tag['annotated'] == (tag_type == 'annotated')
+ assert _yaml.node_contains(tag, 'tag')
+ assert _yaml.node_contains(tag, 'commit')
+ assert _yaml.node_contains(tag, 'annotated')
+ assert _yaml.node_get(tag, bool, 'annotated') == (tag_type == 'annotated')
- assert {(tag['tag'], tag['commit']) for tag in tags} == {('tag1', repo.rev_parse('tag1^{commit}')),
- ('tag2', repo.rev_parse('tag2^{commit}'))}
+ assert {(_yaml.node_get(tag, str, 'tag'),
+ _yaml.node_get(tag, str, 'commit'))
+ for tag in tags} == {('tag1', repo.rev_parse('tag1^{commit}')),
+ ('tag2', repo.rev_parse('tag2^{commit}'))}
checkout = os.path.join(str(tmpdir), 'checkout')
@@ -898,7 +900,7 @@ def test_git_describe_head_is_tagged(cli, tmpdir, datafiles, ref_storage, tag_ty
project = str(datafiles)
project_config = _yaml.load(os.path.join(project, 'project.conf'))
- project_config['ref-storage'] = ref_storage
+ _yaml.node_set(project_config, 'ref-storage', ref_storage)
_yaml.dump(_yaml.node_sanitize(project_config), os.path.join(project, 'project.conf'))
repofiles = os.path.join(str(tmpdir), 'repofiles')
@@ -961,15 +963,19 @@ def test_git_describe_head_is_tagged(cli, tmpdir, datafiles, ref_storage, tag_ty
if ref_storage == 'inline':
element = _yaml.load(element_path)
- tags = _yaml.node_sanitize(element['sources'][0]['tags'])
+ source = _yaml.node_get(element, dict, 'sources', indices=[0])
+ tags = _yaml.node_get(source, list, 'tags')
assert len(tags) == 1
- for tag in tags:
- assert 'tag' in tag
- assert 'commit' in tag
- assert 'annotated' in tag
- assert tag['annotated'] == (tag_type == 'annotated')
- assert {(tag['tag'], tag['commit']) for tag in tags} == {('tag', repo.rev_parse('tag^{commit}'))}
+ tag = _yaml.node_get(source, dict, 'tags', indices=[0])
+ assert _yaml.node_contains(tag, 'tag')
+ assert _yaml.node_contains(tag, 'commit')
+ assert _yaml.node_contains(tag, 'annotated')
+ assert _yaml.node_get(tag, bool, 'annotated') == (tag_type == 'annotated')
+
+ tag_name = _yaml.node_get(tag, str, 'tag')
+ commit = _yaml.node_get(tag, str, 'commit')
+ assert (tag_name, commit) == ('tag', repo.rev_parse('tag^{commit}'))
checkout = os.path.join(str(tmpdir), 'checkout')
@@ -1009,7 +1015,7 @@ def test_git_describe_relevant_history(cli, tmpdir, datafiles):
project = str(datafiles)
project_config = _yaml.load(os.path.join(project, 'project.conf'))
- project_config['ref-storage'] = 'project.refs'
+ _yaml.node_set(project_config, 'ref-storage', 'project.refs')
_yaml.dump(_yaml.node_sanitize(project_config), os.path.join(project, 'project.conf'))
repofiles = os.path.join(str(tmpdir), 'repofiles')
@@ -1089,7 +1095,7 @@ def test_default_do_not_track_tags(cli, tmpdir, datafiles):
project = str(datafiles)
project_config = _yaml.load(os.path.join(project, 'project.conf'))
- project_config['ref-storage'] = 'inline'
+ _yaml.node_set(project_config, 'ref-storage', 'inline')
_yaml.dump(_yaml.node_sanitize(project_config), os.path.join(project, 'project.conf'))
repofiles = os.path.join(str(tmpdir), 'repofiles')
@@ -1120,7 +1126,8 @@ def test_default_do_not_track_tags(cli, tmpdir, datafiles):
result.assert_success()
element = _yaml.load(element_path)
- assert 'tags' not in element['sources'][0]
+ source = _yaml.node_get(element, dict, 'sources', indices=[0])
+ assert not _yaml.node_contains(source, 'tags')
@pytest.mark.skipif(HAVE_GIT is False, reason="git is not available")
@@ -1145,17 +1152,17 @@ def test_overwrite_rogue_tag_multiple_remotes(cli, tmpdir, datafiles):
repodir, reponame = os.path.split(repo.repo)
project_config = _yaml.load(os.path.join(project, 'project.conf'))
- project_config['aliases'] = {
+ _yaml.node_set(project_config, 'aliases', _yaml.new_node_from_dict({
'repo': 'http://example.com/'
- }
- project_config['mirrors'] = [
+ }))
+ _yaml.node_set(project_config, 'mirrors', [
{
'name': 'middle-earth',
'aliases': {
'repo': ['file://{}/'.format(repodir)]
}
}
- ]
+ ])
_yaml.dump(_yaml.node_sanitize(project_config), os.path.join(project, 'project.conf'))
repo.add_annotated_tag('tag', 'tag')
diff --git a/tests/testutils/yaml.py b/tests/testutils/yaml.py
index 60cb02ec6..a1dcb8646 100644
--- a/tests/testutils/yaml.py
+++ b/tests/testutils/yaml.py
@@ -22,7 +22,7 @@ from buildstream import _yaml
# yaml_file_get_provenance()
#
-# Load a yaml file and return it's _yaml.Provenance object.
+# Load a yaml file and return its _yaml.ProvenanceInformation object.
#
# This is useful for checking the provenance in BuildStream output is as
# expected.
@@ -34,14 +34,14 @@ from buildstream import _yaml
# indices (list of indexes): Optional index path, in the case of list values
#
# Returns:
-# The Provenance of the dict, member or list element
+# The ProvenanceInformation of the dict, member or list element
#
def yaml_file_get_provenance(path, shortname, key=None, indices=None):
- with open(path) as data:
- element_yaml = _yaml.load_data(
- data,
- _yaml.ProvenanceFile(path, shortname, project=None),
- )
- provenance = _yaml.node_get_provenance(element_yaml, key, indices)
+ file_node = _yaml.load(path, shortname)
+ if key:
+ required_node = _yaml.node_get(file_node, dict, key, indices=indices)
+ else:
+ required_node = file_node
+ provenance = _yaml.node_get_provenance(required_node)
assert provenance is not None
return provenance