diff options
author | Benjamin Schubert <ben.c.schubert@gmail.com> | 2019-06-13 18:54:28 +0100 |
---|---|---|
committer | bst-marge-bot <marge-bot@buildstream.build> | 2019-07-15 14:14:02 +0000 |
commit | 50335363cd49e7945567609cf8b1273576b27667 (patch) | |
tree | 4dbe6507e8c39ce37f228bb1f007f075bca6e6cf | |
parent | 95a6e2d03157da46f9056956111847989326e3db (diff) | |
download | buildstream-50335363cd49e7945567609cf8b1273576b27667.tar.gz |
_yaml: Remove 'node_get' and migrate all remaining calls to new API
-rw-r--r-- | src/buildstream/_context.py | 7 | ||||
-rw-r--r-- | src/buildstream/_loader/loader.py | 2 | ||||
-rw-r--r-- | src/buildstream/_projectrefs.py | 2 | ||||
-rw-r--r-- | src/buildstream/_yaml.pxd | 1 | ||||
-rw-r--r-- | src/buildstream/_yaml.pyx | 99 | ||||
-rw-r--r-- | src/buildstream/element.py | 9 | ||||
-rw-r--r-- | src/buildstream/plugins/elements/compose.py | 4 | ||||
-rw-r--r-- | src/buildstream/plugins/elements/filter.py | 4 | ||||
-rw-r--r-- | src/buildstream/plugins/elements/script.py | 2 | ||||
-rw-r--r-- | src/buildstream/plugins/sources/pip.py | 6 | ||||
-rw-r--r-- | src/buildstream/sandbox/_sandboxremote.py | 2 | ||||
-rw-r--r-- | src/buildstream/testing/_sourcetests/mirror.py | 8 | ||||
-rw-r--r-- | src/buildstream/testing/_sourcetests/track.py | 6 | ||||
-rw-r--r-- | tests/elements/filter.py | 14 | ||||
-rw-r--r-- | tests/frontend/cross_junction_workspace.py | 12 | ||||
-rw-r--r-- | tests/frontend/project/sources/fetch_source.py | 2 | ||||
-rw-r--r-- | tests/frontend/workspace.py | 10 | ||||
-rw-r--r-- | tests/internals/yaml.py | 32 | ||||
-rw-r--r-- | tests/sources/git.py | 12 |
19 files changed, 67 insertions, 167 deletions
diff --git a/src/buildstream/_context.py b/src/buildstream/_context.py index 331be7e3e..18257ea33 100644 --- a/src/buildstream/_context.py +++ b/src/buildstream/_context.py @@ -402,14 +402,13 @@ class Context(): # get_overrides(): # # Fetch the override dictionary for the active project. This returns - # a node loaded from YAML and as such, values loaded from the returned - # node should be loaded using the _yaml.node_get() family of functions. + # a node loaded from YAML. # # Args: # project_name (str): The project name # # Returns: - # (dict): The overrides dictionary for the specified project + # (MappingNode): The overrides dictionary for the specified project # def get_overrides(self, project_name): return self._project_overrides.get_mapping(project_name, default={}) @@ -493,7 +492,7 @@ class Context(): # _node_get_option_str() # -# Like _yaml.node_get(), but also checks value is one of the allowed option +# Like Node.get_scalar().as_str(), but also checks value is one of the allowed option # strings. Fetches a value from a dictionary node, and makes sure it's one of # the pre-defined options. # diff --git a/src/buildstream/_loader/loader.py b/src/buildstream/_loader/loader.py index 64e7fafd0..33a7762d1 100644 --- a/src/buildstream/_loader/loader.py +++ b/src/buildstream/_loader/loader.py @@ -469,8 +469,6 @@ class Loader(): sources = node.get_sequence(Symbol.SOURCES, default=[]) element_kind = node.get_str(Symbol.KIND) - # Safe loop calling into _yaml.node_get() for each element ensures - # we have good error reporting for index, source in enumerate(sources): kind = source.get_str(Symbol.KIND) _yaml.node_del(source, Symbol.KIND) diff --git a/src/buildstream/_projectrefs.py b/src/buildstream/_projectrefs.py index f296858cf..e72d4757e 100644 --- a/src/buildstream/_projectrefs.py +++ b/src/buildstream/_projectrefs.py @@ -150,6 +150,6 @@ class ProjectRefs(): # Pad the list with empty newly created dictionaries _yaml.node_extend_list(project_node, element, source_index + 1, {}) - node = _yaml.node_get(project_node, dict, element, indices=[source_index]) + node = project_node.get_sequence(element).mapping_at(source_index) return node diff --git a/src/buildstream/_yaml.pxd b/src/buildstream/_yaml.pxd index 91fc1180d..3fffda426 100644 --- a/src/buildstream/_yaml.pxd +++ b/src/buildstream/_yaml.pxd @@ -62,7 +62,6 @@ cdef class ProvenanceInformation: cpdef void node_del(Node node, str key, bint safe=*) except * -cpdef object node_get(Node node, object expected_type, str key, list indices=*, object default_value=*, bint allow_none=*) cpdef void node_validate(Node node, list valid_keys) except * cpdef void node_set(Node node, object key, object value, list indices=*) except * cpdef list node_keys(Node node) diff --git a/src/buildstream/_yaml.pyx b/src/buildstream/_yaml.pyx index a2327dbc5..e69ff8ee9 100644 --- a/src/buildstream/_yaml.pyx +++ b/src/buildstream/_yaml.pyx @@ -206,7 +206,7 @@ cdef class SequenceNode(Node): if type(value) is not MappingNode: provenance = node_get_provenance(self) - path = ["[{}]".format(p) for p in node_find_target(provenance, self)] + ["[{}]".format(index)] + path = ["[{}]".format(p) for p in node_find_target(provenance.toplevel, self)] + ["[{}]".format(index)] raise LoadError(LoadErrorReason.INVALID_DATA, "{}: Value of '{}' is not of the expected type '{}'" .format(provenance, path, MappingNode.__name__)) @@ -217,7 +217,7 @@ cdef class SequenceNode(Node): if type(value) is not SequenceNode: provenance = node_get_provenance(self) - path = ["[{}]".format(p) for p in node_find_target(provenance, self)] + ["[{}]".format(index)] + path = ["[{}]".format(p) for p in node_find_target(provenance.toplevel, self)] + ["[{}]".format(index)] raise LoadError(LoadErrorReason.INVALID_DATA, "{}: Value of '{}' is not of the expected type '{}'" .format(provenance, path, SequenceNode.__name__)) @@ -670,101 +670,6 @@ cpdef ProvenanceInformation node_get_provenance(Node node, str key=None, list in return ProvenanceInformation(nodeish) -# node_get() -# -# Fetches a value from a dictionary node and checks it for -# an expected value. Use default_value when parsing a value -# which is only optionally supplied. -# -# Args: -# node (dict): The dictionary node -# expected_type (type): The expected type for the value being searched -# key (str): The key to get a value for in node -# indices (list of ints): Optionally decend into lists of lists -# default_value: Optionally return this value if the key is not found -# allow_none: (bool): Allow None to be a valid value -# -# Returns: -# The value if found in node, otherwise default_value is returned -# -# Raises: -# LoadError, when the value found is not of the expected type -# -# Note: -# Returned strings are stripped of leading and trailing whitespace -# -cpdef object node_get(Node node, object expected_type, str key, list indices=None, object default_value=_sentinel, bint allow_none=False): - if indices is None: - value = node.value.get(key, _sentinel) - - if value is _sentinel: - if default_value is _sentinel: - provenance = node_get_provenance(node) - raise LoadError(LoadErrorReason.INVALID_DATA, - "{}: Dictionary did not contain expected key '{}'".format(provenance, key)) - - value = _create_node(default_value, _SYNTHETIC_FILE_INDEX, 0, next_synthetic_counter()) - else: - # Implied type check of the element itself - # No need to synthesise useful node content as we destructure it immediately - value = SequenceNode(node_get(node, list, key), _SYNTHETIC_FILE_INDEX, 0, 0) - for index in indices: - value = value.value[index] - # FIXME: this should always be nodes, we should be able to remove that - if type(value) not in [Node, MappingNode, SequenceNode]: - value = _create_node(value, _SYNTHETIC_FILE_INDEX, 0, 0) - - # Optionally allow None as a valid value for any type - if value.value is None and (allow_none or default_value is None): - return None - - if (expected_type is not None) and (type(value.value) is not expected_type): - # Attempt basic conversions if possible, typically we want to - # be able to specify numeric values and convert them to strings, - # but we dont want to try converting dicts/lists - try: - if expected_type == bool and type(value.value) is str: - # Dont coerce booleans to string, this makes "False" strings evaluate to True - # We don't structure into full nodes since there's no need. - if value.value in ('True', 'true'): - value = ScalarNode(True, _SYNTHETIC_FILE_INDEX, 0, 0) - elif value.value in ('False', 'false'): - value = ScalarNode(False, _SYNTHETIC_FILE_INDEX, 0, 0) - else: - raise ValueError() - elif not (expected_type == list or - expected_type == dict or - isinstance(value.value, (list, dict))): - value = _create_node(expected_type(value.value), _SYNTHETIC_FILE_INDEX, 0, 0) - else: - raise ValueError() - except (ValueError, TypeError): - provenance = node_get_provenance(node, key=key, indices=indices) - if indices: - path = [key, *["[{:d}]".format(i) for i in indices]] - path = "".join(path) - else: - path = key - raise LoadError(LoadErrorReason.INVALID_DATA, - "{}: Value of '{}' is not of the expected type '{}'" - .format(provenance, path, expected_type.__name__)) - - # Now collapse lists, and scalars, to their value, leaving nodes as-is - if type(value.value) is not dict: - value = value.value - - # Trim it at the bud, let all loaded strings from yaml be stripped of whitespace - if type(value) is str: - value = value.strip() - - elif type(value) is list: - # Now we create a fresh list which unwraps the str and list types - # semi-recursively. - value = __trim_list_provenance(value) - - return value - - cdef list __trim_list_provenance(list value): cdef list ret = [] cdef Node entry diff --git a/src/buildstream/element.py b/src/buildstream/element.py index bd42c45b8..ed5ce97ba 100644 --- a/src/buildstream/element.py +++ b/src/buildstream/element.py @@ -531,13 +531,12 @@ class Element(Plugin): :class:`.LoadError` """ - value = self.node_get_member(node, list, member_name) ret = [] - for index, x in enumerate(value): + for value in node.get_sequence(member_name): try: - ret.append(self.__variables.subst(x)) + ret.append(self.__variables.subst(value.as_str())) except LoadError as e: - provenance = _yaml.node_get_provenance(node, key=member_name, indices=[index]) + provenance = _yaml.node_get_provenance(value) raise LoadError(e.reason, '{}: {}'.format(provenance, e), detail=e.detail) from e return ret @@ -2811,7 +2810,7 @@ class Element(Plugin): # If this ever changes, things will go wrong unexpectedly. if not self.__whitelist_regex: bstdata = self.get_public_data('bst') - whitelist = _yaml.node_get(bstdata, list, 'overlap-whitelist', default_value=[]) + whitelist = bstdata.get_sequence('overlap-whitelist', default=[]).as_str_list() whitelist_expressions = [utils._glob2re(self.__variables.subst(exp.strip())) for exp in whitelist] expression = ('^(?:' + '|'.join(whitelist_expressions) + ')$') self.__whitelist_regex = re.compile(expression) diff --git a/src/buildstream/plugins/elements/compose.py b/src/buildstream/plugins/elements/compose.py index b2e52170e..a9f18875f 100644 --- a/src/buildstream/plugins/elements/compose.py +++ b/src/buildstream/plugins/elements/compose.py @@ -66,8 +66,8 @@ class ComposeElement(Element): # We name this variable 'integration' only to avoid # collision with the Element.integrate() method. self.integration = node.get_bool('integrate') - self.include = self.node_get_member(node, list, 'include') - self.exclude = self.node_get_member(node, list, 'exclude') + self.include = node.get_sequence('include').as_str_list() + self.exclude = node.get_sequence('exclude').as_str_list() self.include_orphans = node.get_bool('include-orphans') def preflight(self): diff --git a/src/buildstream/plugins/elements/filter.py b/src/buildstream/plugins/elements/filter.py index e62a9d49b..bba56f351 100644 --- a/src/buildstream/plugins/elements/filter.py +++ b/src/buildstream/plugins/elements/filter.py @@ -171,8 +171,8 @@ class FilterElement(Element): 'include', 'exclude', 'include-orphans' ]) - self.include = self.node_get_member(node, list, 'include') - self.exclude = self.node_get_member(node, list, 'exclude') + self.include = node.get_sequence('include').as_str_list() + self.exclude = node.get_sequence('exclude').as_str_list() self.include_orphans = node.get_bool('include-orphans') self.include_provenance = self.node_provenance(node, member_name='include') self.exclude_provenance = self.node_provenance(node, member_name='exclude') diff --git a/src/buildstream/plugins/elements/script.py b/src/buildstream/plugins/elements/script.py index df03bad17..1c694060c 100644 --- a/src/buildstream/plugins/elements/script.py +++ b/src/buildstream/plugins/elements/script.py @@ -46,7 +46,7 @@ class ScriptElement(buildstream.ScriptElement): BST_VIRTUAL_DIRECTORY = True def configure(self, node): - for n in self.node_get_member(node, list, 'layout', []): + for n in node.get_sequence('layout', []): dst = self.node_subst_member(n, 'destination') elm = self.node_subst_member(n, 'element', None) self.layout_add(elm, dst) diff --git a/src/buildstream/plugins/sources/pip.py b/src/buildstream/plugins/sources/pip.py index 816b9e95b..627f26397 100644 --- a/src/buildstream/plugins/sources/pip.py +++ b/src/buildstream/plugins/sources/pip.py @@ -114,14 +114,14 @@ class PipSource(Source): self.ref = node.get_str('ref', None) self.original_url = node.get_str('url', _PYPI_INDEX_URL) self.index_url = self.translate_url(self.original_url) - self.packages = self.node_get_member(node, list, 'packages', []) - self.requirements_files = self.node_get_member(node, list, 'requirements-files', []) + self.packages = node.get_sequence('packages', []).as_str_list() + self.requirements_files = node.get_sequence('requirements-files', []).as_str_list() if not (self.packages or self.requirements_files): raise SourceError("{}: Either 'packages' or 'requirements-files' must be specified". format(self)) def preflight(self): - # Try to find a pip version that supports download command + # Try to find a pip version that spports download command self.host_pip = None for python in reversed(_PYTHON_VERSIONS): try: diff --git a/src/buildstream/sandbox/_sandboxremote.py b/src/buildstream/sandbox/_sandboxremote.py index fbba17f3f..965aea155 100644 --- a/src/buildstream/sandbox/_sandboxremote.py +++ b/src/buildstream/sandbox/_sandboxremote.py @@ -173,7 +173,7 @@ class SandboxRemote(Sandbox): for tls_key in tls_keys: if tls_key in config: - _yaml.node_set(config, tls_key, resolve_path(_yaml.node_get(config, str, tls_key))) + _yaml.node_set(config, tls_key, resolve_path(config.get_str(tls_key))) return RemoteExecutionSpec(*[_yaml.node_sanitize(conf) for conf in service_configs]) diff --git a/src/buildstream/testing/_sourcetests/mirror.py b/src/buildstream/testing/_sourcetests/mirror.py index d682bb2ef..afbadbef7 100644 --- a/src/buildstream/testing/_sourcetests/mirror.py +++ b/src/buildstream/testing/_sourcetests/mirror.py @@ -357,9 +357,9 @@ def test_mirror_track_upstream_present(cli, tmpdir, datafiles, kind): # Tracking tries upstream first. Check the ref is from upstream. new_element = _yaml.load(element_path) - source = _yaml.node_get(new_element, dict, 'sources', [0]) + source = new_element.get_sequence('sources').mapping_at(0) if 'ref' in source: - assert _yaml.node_get(source, str, 'ref') == upstream_ref + assert source.get_str('ref') == upstream_ref @pytest.mark.datafiles(DATA_DIR) @@ -422,6 +422,6 @@ def test_mirror_track_upstream_absent(cli, tmpdir, datafiles, kind): # Check that tracking fell back to the mirror new_element = _yaml.load(element_path) - source = _yaml.node_get(new_element, dict, 'sources', [0]) + source = new_element.get_sequence('sources').mapping_at(0) if 'ref' in source: - assert _yaml.node_get(source, str, 'ref') == mirror_ref + assert source.get_str('ref') == mirror_ref diff --git a/src/buildstream/testing/_sourcetests/track.py b/src/buildstream/testing/_sourcetests/track.py index 668ea29e5..08173e79b 100644 --- a/src/buildstream/testing/_sourcetests/track.py +++ b/src/buildstream/testing/_sourcetests/track.py @@ -321,13 +321,13 @@ def test_track_include(cli, tmpdir, datafiles, ref_storage, kind): # Get all of the sources assert 'sources' in new_sources - sources_list = _yaml.node_get(new_sources, list, 'sources') + sources_list = new_sources.get_sequence('sources') assert len(sources_list) == 1 # Get the first source from the sources list - new_source = _yaml.node_get(new_sources, dict, 'sources', indices=[0]) + new_source = sources_list.mapping_at(0) assert 'ref' in new_source - assert ref == _yaml.node_get(new_source, str, 'ref') + assert ref == new_source.get_scalar('ref').as_str() @pytest.mark.datafiles(DATA_DIR) diff --git a/tests/elements/filter.py b/tests/elements/filter.py index 8292c09bb..f9c80afff 100644 --- a/tests/elements/filter.py +++ b/tests/elements/filter.py @@ -239,7 +239,7 @@ def test_filter_track(datafiles, cli, tmpdir): # Now check that a ref field exists new_input = _yaml.load(input_file) - source_node = _yaml.node_get(new_input, dict, 'sources', indices=[0]) + source_node = new_input.get_sequence('sources').mapping_at(0) new_input_ref = source_node.get_str('ref') assert new_input_ref == ref @@ -294,7 +294,7 @@ def test_filter_track_excepted(datafiles, cli, tmpdir): # Now check that a ref field exists new_input = _yaml.load(input_file) - source_node = _yaml.node_get(new_input, dict, 'sources', indices=[0]) + source_node = new_input.get_sequence('sources').mapping_at(0) assert 'ref' not in source_node @@ -348,7 +348,7 @@ def test_filter_track_multi_to_one(datafiles, cli, tmpdir): # Now check that a ref field exists new_input = _yaml.load(input_file) - source_node = _yaml.node_get(new_input, dict, 'sources', indices=[0]) + source_node = new_input.get_sequence('sources').mapping_at(0) new_ref = source_node.get_str('ref') assert new_ref == ref @@ -413,12 +413,12 @@ def test_filter_track_multi(datafiles, cli, tmpdir): # Now check that a ref field exists new_input = _yaml.load(input_file) - source_node = _yaml.node_get(new_input, dict, 'sources', indices=[0]) + source_node = new_input.get_sequence('sources').mapping_at(0) new_ref = source_node.get_str('ref') assert new_ref == ref new_input2 = _yaml.load(input2_file) - source_node2 = _yaml.node_get(new_input2, dict, 'sources', indices=[0]) + source_node2 = new_input2.get_sequence('sources').mapping_at(0) new_ref2 = source_node2.get_str('ref') assert new_ref2 == ref @@ -482,11 +482,11 @@ def test_filter_track_multi_exclude(datafiles, cli, tmpdir): # Now check that a ref field exists new_input = _yaml.load(input_file) - source_node = _yaml.node_get(new_input, dict, 'sources', indices=[0]) + source_node = new_input.get_sequence('sources').mapping_at(0) assert 'ref' not in source_node new_input2 = _yaml.load(input2_file) - source_node2 = _yaml.node_get(new_input2, dict, 'sources', indices=[0]) + source_node2 = new_input2.get_sequence('sources').mapping_at(0) new_ref2 = source_node2.get_str('ref') assert new_ref2 == ref diff --git a/tests/frontend/cross_junction_workspace.py b/tests/frontend/cross_junction_workspace.py index 974aba4bd..38aafa558 100644 --- a/tests/frontend/cross_junction_workspace.py +++ b/tests/frontend/cross_junction_workspace.py @@ -75,10 +75,12 @@ def test_list_cross_junction(cli, tmpdir): result.assert_success() loaded = _yaml.load_data(result.output) - workspaces = _yaml.node_get(loaded, list, 'workspaces') + workspaces = loaded.get_sequence('workspaces') assert len(workspaces) == 1 - assert 'element' in workspaces[0] - assert workspaces[0].get_str('element') == element + first_workspace = workspaces.mapping_at(0) + + assert 'element' in first_workspace + assert first_workspace.get_str('element') == element def test_close_cross_junction(cli, tmpdir): @@ -96,7 +98,7 @@ def test_close_cross_junction(cli, tmpdir): result.assert_success() loaded = _yaml.load_data(result.output) - workspaces = _yaml.node_get(loaded, list, 'workspaces') + workspaces = loaded.get_sequence('workspaces') assert not workspaces @@ -114,7 +116,7 @@ def test_close_all_cross_junction(cli, tmpdir): result.assert_success() loaded = _yaml.load_data(result.output) - workspaces = _yaml.node_get(loaded, list, 'workspaces') + workspaces = loaded.get_sequence('workspaces') assert not workspaces diff --git a/tests/frontend/project/sources/fetch_source.py b/tests/frontend/project/sources/fetch_source.py index 9a873d1fe..7c0df4595 100644 --- a/tests/frontend/project/sources/fetch_source.py +++ b/tests/frontend/project/sources/fetch_source.py @@ -38,7 +38,7 @@ class FetchFetcher(SourceFetcher): class FetchSource(Source): # Read config to know which URLs to fetch def configure(self, node): - self.original_urls = self.node_get_member(node, list, 'urls') + self.original_urls = node.get_sequence('urls').as_str_list() self.output_file = node.get_str('output-text') self.fetch_succeeds = {} if 'fetch-succeeds' in node: diff --git a/tests/frontend/workspace.py b/tests/frontend/workspace.py index 339f0d496..b240e5728 100644 --- a/tests/frontend/workspace.py +++ b/tests/frontend/workspace.py @@ -184,7 +184,7 @@ def test_open_bzr_customize(cli, tmpdir, datafiles): # Check that the correct origin branch is set element_config = _yaml.load(os.path.join(project, "elements", element_name)) - source_config = _yaml.node_get(element_config, dict, 'sources', [0]) + source_config = element_config.get_sequence('sources').mapping_at(0) output = subprocess.check_output(["bzr", "info"], cwd=workspace) stripped_url = source_config.get_str('url').lstrip("file:///") expected_output_str = ("checkout of branch: /{}/{}" @@ -608,10 +608,10 @@ def test_list(cli, tmpdir, datafiles): result.assert_success() loaded = _yaml.load_data(result.output) - workspaces = _yaml.node_get(loaded, list, 'workspaces') + workspaces = loaded.get_sequence('workspaces') assert len(workspaces) == 1 - space = workspaces[0] + space = workspaces.mapping_at(0) assert space.get_str('element') == element_name assert space.get_str('directory') == workspace @@ -1132,7 +1132,7 @@ def test_external_track(cli, datafiles, tmpdir_factory, guess_element): # Delete the ref from the source so that we can detect if the # element has been tracked element_contents = _yaml.load(element_file) - _yaml.node_del(_yaml.node_get(element_contents, dict, 'sources', [0]), 'ref') + _yaml.node_del(element_contents.get_sequence('sources').mapping_at(0), 'ref') _yaml.dump(element_contents, element_file) result = cli.run(project=project, args=['-C', workspace, 'source', 'track', *arg_elm]) @@ -1140,7 +1140,7 @@ def test_external_track(cli, datafiles, tmpdir_factory, guess_element): # Element is tracked now element_contents = _yaml.load(element_file) - assert 'ref' in _yaml.node_get(element_contents, dict, 'sources', [0]) + assert 'ref' in element_contents.get_sequence('sources').mapping_at(0) @pytest.mark.datafiles(DATA_DIR) diff --git a/tests/internals/yaml.py b/tests/internals/yaml.py index 0e747fa48..aa83d949a 100644 --- a/tests/internals/yaml.py +++ b/tests/internals/yaml.py @@ -103,11 +103,11 @@ def test_node_get(datafiles): base = _yaml.load(filename) assert base.value.get('kind').value == 'pony' - children = _yaml.node_get(base, list, 'children') - assert isinstance(children, list) + children = base.get_sequence('children') + assert isinstance(children, _yaml.SequenceNode) assert len(children) == 7 - child = _yaml.node_get(base, dict, 'children', indices=[6]) + child = base.get_sequence('children').mapping_at(6) assert_provenance(filename, 20, 8, child, 'mood') extra = base.get_mapping('extra') @@ -146,7 +146,7 @@ def test_node_set_overwrite(datafiles): assert base.get_str('kind') == 'cow' # Overwrite a list as a string - assert _yaml.node_get(base, list, 'moods') == ['happy', 'sad'] + assert base.get_sequence('moods').as_str_list() == ['happy', 'sad'] _yaml.node_set(base, 'moods', 'unemotional') assert base.get_str('moods') == 'unemotional' @@ -160,13 +160,11 @@ def test_node_set_list_element(datafiles): base = _yaml.load(filename) - assert _yaml.node_get(base, list, 'moods') == ['happy', 'sad'] - assert _yaml.node_get(base, str, 'moods', indices=[0]) == 'happy' + assert base.get_sequence('moods').as_str_list() == ['happy', 'sad'] _yaml.node_set(base, 'moods', 'confused', indices=[0]) - assert _yaml.node_get(base, list, 'moods') == ['confused', 'sad'] - assert _yaml.node_get(base, str, 'moods', indices=[0]) == 'confused' + assert base.get_sequence('moods').as_str_list() == ['confused', 'sad'] # Really this is testing _yaml.node_copy(), we want to @@ -254,9 +252,9 @@ def test_list_composition(datafiles, filename, tmpdir, _yaml.composite_dict(base, overlay) - children = _yaml.node_get(base, list, 'children') + children = base.get_sequence('children') assert len(children) == length - child = children[index] + child = children.mapping_at(index) assert child.get_str('mood') == mood assert_provenance(prov_file, prov_line, prov_col, child, 'mood') @@ -272,7 +270,7 @@ def test_list_deletion(datafiles): overlay = _yaml.load(overlay, shortname='listoverwriteempty.yaml') _yaml.composite_dict(base, overlay) - children = _yaml.node_get(base, list, 'children') + children = base.get_sequence('children') assert not children @@ -286,8 +284,8 @@ def test_nonexistent_list_extension(datafiles): _yaml.node_extend_list(base, 'todo', 3, 'empty') - assert len(_yaml.node_get(base, list, 'todo')) == 3 - assert _yaml.node_get(base, list, 'todo') == ['empty', 'empty', 'empty'] + assert len(base.get_sequence('todo')) == 3 + assert base.get_sequence('todo').as_str_list() == ['empty', 'empty', 'empty'] # Tests for deep list composition @@ -390,9 +388,9 @@ def test_list_composition_twice(datafiles, tmpdir, filename1, filename2, _yaml.composite_dict(base, overlay1) _yaml.composite_dict(base, overlay2) - children = _yaml.node_get(base, list, 'children') + children = base.get_sequence('children') assert len(children) == length - child = children[index] + child = children.mapping_at(index) assert child.get_str('mood') == mood assert_provenance(prov_file, prov_line, prov_col, child, 'mood') @@ -407,9 +405,9 @@ def test_list_composition_twice(datafiles, tmpdir, filename1, filename2, _yaml.composite_dict(overlay1, overlay2) _yaml.composite_dict(base, overlay1) - children = _yaml.node_get(base, list, 'children') + children = base.get_sequence('children') assert len(children) == length - child = children[index] + child = children.mapping_at(index) assert child.get_str('mood') == mood assert_provenance(prov_file, prov_line, prov_col, child, 'mood') diff --git a/tests/sources/git.py b/tests/sources/git.py index e3f803469..69155e02b 100644 --- a/tests/sources/git.py +++ b/tests/sources/git.py @@ -760,7 +760,7 @@ def test_track_fetch(cli, tmpdir, datafiles, ref_format, tag, extra_commit): result.assert_success() element = _yaml.load(element_path) - new_ref = _yaml.node_get(element, dict, 'sources', [0]).get_str('ref') + new_ref = element.get_sequence('sources').mapping_at(0).get_str('ref') if ref_format == 'git-describe' and tag: # Check and strip prefix @@ -849,7 +849,7 @@ def test_git_describe(cli, tmpdir, datafiles, ref_storage, tag_type): if ref_storage == 'inline': element = _yaml.load(element_path) - tags = _yaml.node_get(_yaml.node_get(element, dict, 'sources', [0]), list, 'tags') + tags = element.get_sequence('sources').mapping_at(0).get_sequence('tags') assert len(tags) == 2 for tag in tags: assert 'tag' in tag @@ -962,11 +962,11 @@ def test_git_describe_head_is_tagged(cli, tmpdir, datafiles, ref_storage, tag_ty if ref_storage == 'inline': element = _yaml.load(element_path) - source = _yaml.node_get(element, dict, 'sources', indices=[0]) - tags = _yaml.node_get(source, list, 'tags') + source = element.get_sequence('sources').mapping_at(0) + tags = source.get_sequence('tags') assert len(tags) == 1 - tag = _yaml.node_get(source, dict, 'tags', indices=[0]) + tag = source.get_sequence('tags').mapping_at(0) assert 'tag' in tag assert 'commit' in tag assert 'annotated' in tag @@ -1125,7 +1125,7 @@ def test_default_do_not_track_tags(cli, tmpdir, datafiles): result.assert_success() element = _yaml.load(element_path) - source = _yaml.node_get(element, dict, 'sources', indices=[0]) + source = element.get_sequence('sources').mapping_at(0) assert 'tags' not in source |