summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorbst-marge-bot <marge-bot@buildstream.build>2019-06-04 10:46:25 +0000
committerbst-marge-bot <marge-bot@buildstream.build>2019-06-04 10:46:25 +0000
commit1233899f7b151447f3a820d1c41e162beddf8236 (patch)
tree2713e208c8f07adf0d3c7663759ef3e1bdbade35
parent6b1f04769601328dfdd724103a4e08fc8623599c (diff)
parent5fbd1b592db0d81d03aaff2599a2d4930306cee8 (diff)
downloadbuildstream-1233899f7b151447f3a820d1c41e162beddf8236.tar.gz
Merge branch 'bschubert/optimize-node_get' into 'master'
Optimize _yaml.node_get See merge request BuildStream/buildstream!1370
-rw-r--r--NEWS3
-rwxr-xr-xdoc/bst2html.py5
-rw-r--r--src/buildstream/_context.py11
-rw-r--r--src/buildstream/_gitsourcebase.py5
-rw-r--r--src/buildstream/_loader/loader.py13
-rw-r--r--src/buildstream/_project.py35
-rw-r--r--src/buildstream/_yaml.pyx20
-rw-r--r--src/buildstream/element.py33
-rw-r--r--src/buildstream/plugins/elements/junction.py3
-rw-r--r--src/buildstream/source.py5
-rw-r--r--tests/internals/yaml.py15
11 files changed, 71 insertions, 77 deletions
diff --git a/NEWS b/NEWS
index ee935711e..c37ad1b6a 100644
--- a/NEWS
+++ b/NEWS
@@ -2,6 +2,9 @@
buildstream 1.3.1
=================
+ o BREAKING CHANGE: `node_get_member` doesn't accept 'expected_type=Mapping'
+ anymore. Please use 'expected_type=dict' instead.
+
o BREAKING CHANGE: Artifact as a Proto. The caching of buildstream artifacts
has changed from a reference based impelementation. Existing artifacts and
artifact servers are not compatible, as such remote artifact servers need to
diff --git a/doc/bst2html.py b/doc/bst2html.py
index 2f4012695..448ab1389 100755
--- a/doc/bst2html.py
+++ b/doc/bst2html.py
@@ -29,7 +29,6 @@ import sys
import re
import shlex
import subprocess
-from collections.abc import Mapping
from contextlib import contextmanager
from tempfile import TemporaryDirectory
@@ -348,7 +347,7 @@ def run_session(description, tempdir, source_cache, palette, config_file, force)
# tarball. This workaround lets us build docs from
# a source distribution tarball.
#
- symlinks = _yaml.node_get(desc, Mapping, 'workaround-symlinks', default_value={})
+ symlinks = _yaml.node_get(desc, dict, 'workaround-symlinks', default_value={})
for symlink, target in _yaml.node_items(symlinks):
# Resolve real path to where symlink should be
@@ -382,7 +381,7 @@ def run_session(description, tempdir, source_cache, palette, config_file, force)
#
commands = _yaml.node_get(desc, list, 'commands')
for c in commands:
- command = _yaml.node_get(desc, Mapping, 'commands', indices=[commands.index(c)])
+ command = _yaml.node_get(desc, dict, 'commands', indices=[commands.index(c)])
# Get the directory where this command should be run
directory = _yaml.node_get(command, str, 'directory')
diff --git a/src/buildstream/_context.py b/src/buildstream/_context.py
index 151ea636a..3f6e6ac3c 100644
--- a/src/buildstream/_context.py
+++ b/src/buildstream/_context.py
@@ -21,7 +21,6 @@ import os
import shutil
import datetime
from collections import deque
-from collections.abc import Mapping
from contextlib import contextmanager
from . import utils
from . import _cachekey
@@ -252,7 +251,7 @@ class Context():
# Load quota configuration
# We need to find the first existing directory in the path of our
# cachedir - the cachedir may not have been created yet.
- cache = _yaml.node_get(defaults, Mapping, 'cache')
+ cache = _yaml.node_get(defaults, dict, 'cache')
_yaml.node_validate(cache, ['quota', 'pull-buildtrees', 'cache-buildtrees'])
self.config_cache_quota_string = _yaml.node_get(cache, str, 'quota')
@@ -281,7 +280,7 @@ class Context():
cache, 'cache-buildtrees', ['always', 'auto', 'never'])
# Load logging config
- logging = _yaml.node_get(defaults, Mapping, 'logging')
+ logging = _yaml.node_get(defaults, dict, 'logging')
_yaml.node_validate(logging, [
'key-length', 'verbose',
'error-lines', 'message-lines',
@@ -296,7 +295,7 @@ class Context():
self.log_message_format = _yaml.node_get(logging, str, 'message-format')
# Load scheduler config
- scheduler = _yaml.node_get(defaults, Mapping, 'scheduler')
+ scheduler = _yaml.node_get(defaults, dict, 'scheduler')
_yaml.node_validate(scheduler, [
'on-error', 'fetchers', 'builders',
'pushers', 'network-retries'
@@ -406,10 +405,10 @@ class Context():
# project_name (str): The project name
#
# Returns:
- # (Mapping): The overrides dictionary for the specified project
+ # (dict): The overrides dictionary for the specified project
#
def get_overrides(self, project_name):
- return _yaml.node_get(self._project_overrides, Mapping, project_name, default_value={})
+ return _yaml.node_get(self._project_overrides, dict, project_name, default_value={})
# get_strict():
#
diff --git a/src/buildstream/_gitsourcebase.py b/src/buildstream/_gitsourcebase.py
index 7d07c56cb..7f02af40d 100644
--- a/src/buildstream/_gitsourcebase.py
+++ b/src/buildstream/_gitsourcebase.py
@@ -24,7 +24,6 @@
import os
import re
import shutil
-from collections.abc import Mapping
from io import StringIO
from tempfile import TemporaryFile
@@ -413,9 +412,9 @@ class _GitSourceBase(Source):
# and submodule_checkout_overrides dictionaries.
self.submodule_overrides = {}
self.submodule_checkout_overrides = {}
- modules = self.node_get_member(node, Mapping, 'submodules', {})
+ modules = self.node_get_member(node, dict, 'submodules', {})
for path, _ in self.node_items(modules):
- submodule = self.node_get_member(modules, Mapping, path)
+ submodule = self.node_get_member(modules, dict, path)
url = self.node_get_member(submodule, str, 'url', None)
# Make sure to mark all URLs that are specified in the configuration
diff --git a/src/buildstream/_loader/loader.py b/src/buildstream/_loader/loader.py
index e279501ff..752883d96 100644
--- a/src/buildstream/_loader/loader.py
+++ b/src/buildstream/_loader/loader.py
@@ -19,7 +19,6 @@
import os
from functools import cmp_to_key
-from collections.abc import Mapping
from .._exceptions import LoadError, LoadErrorReason
from .. import Consistency
@@ -475,7 +474,7 @@ class Loader():
# Safe loop calling into _yaml.node_get() for each element ensures
# we have good error reporting
for i in range(len(sources)):
- source = _yaml.node_get(node, Mapping, Symbol.SOURCES, indices=[i])
+ source = _yaml.node_get(node, dict, Symbol.SOURCES, indices=[i])
kind = _yaml.node_get(source, str, Symbol.KIND)
_yaml.node_del(source, Symbol.KIND)
@@ -490,12 +489,12 @@ class Loader():
meta_element = MetaElement(self.project, element.name, element_kind,
elt_provenance, meta_sources,
- _yaml.node_get(node, Mapping, Symbol.CONFIG, default_value={}),
- _yaml.node_get(node, Mapping, Symbol.VARIABLES, default_value={}),
- _yaml.node_get(node, Mapping, Symbol.ENVIRONMENT, default_value={}),
+ _yaml.node_get(node, dict, Symbol.CONFIG, default_value={}),
+ _yaml.node_get(node, dict, Symbol.VARIABLES, default_value={}),
+ _yaml.node_get(node, dict, Symbol.ENVIRONMENT, default_value={}),
_yaml.node_get(node, list, Symbol.ENV_NOCACHE, default_value=[]),
- _yaml.node_get(node, Mapping, Symbol.PUBLIC, default_value={}),
- _yaml.node_get(node, Mapping, Symbol.SANDBOX, default_value={}),
+ _yaml.node_get(node, dict, Symbol.PUBLIC, default_value={}),
+ _yaml.node_get(node, dict, Symbol.SANDBOX, default_value={}),
element_kind == 'junction')
# Cache it now, make sure it's already there before recursing
diff --git a/src/buildstream/_project.py b/src/buildstream/_project.py
index c40321c66..1fdc84acb 100644
--- a/src/buildstream/_project.py
+++ b/src/buildstream/_project.py
@@ -21,7 +21,6 @@
import os
import sys
from collections import OrderedDict
-from collections.abc import Mapping
from pathlib import Path
from pluginbase import PluginBase
from . import utils
@@ -606,7 +605,7 @@ class Project():
self.config.options = OptionPool(self.element_path)
self.first_pass_config.options = OptionPool(self.element_path)
- defaults = _yaml.node_get(pre_config_node, Mapping, 'defaults')
+ defaults = _yaml.node_get(pre_config_node, dict, 'defaults')
_yaml.node_validate(defaults, ['targets'])
self._default_targets = _yaml.node_get(defaults, list, "targets")
@@ -683,14 +682,14 @@ class Project():
self.remote_execution_specs = self._context.remote_execution_specs
# Load sandbox environment variables
- self.base_environment = _yaml.node_get(config, Mapping, 'environment')
+ self.base_environment = _yaml.node_get(config, dict, 'environment')
self.base_env_nocache = _yaml.node_get(config, list, 'environment-nocache')
# Load sandbox configuration
- self._sandbox = _yaml.node_get(config, Mapping, 'sandbox')
+ self._sandbox = _yaml.node_get(config, dict, 'sandbox')
# Load project split rules
- self._splits = _yaml.node_get(config, Mapping, 'split-rules')
+ self._splits = _yaml.node_get(config, dict, 'split-rules')
# Support backwards compatibility for fail-on-overlap
fail_on_overlap = _yaml.node_get(config, bool, 'fail-on-overlap', default_value=None)
@@ -714,12 +713,12 @@ class Project():
self.refs.load(self.options)
# Parse shell options
- shell_options = _yaml.node_get(config, Mapping, 'shell')
+ shell_options = _yaml.node_get(config, dict, 'shell')
_yaml.node_validate(shell_options, ['command', 'environment', 'host-files'])
self._shell_command = _yaml.node_get(shell_options, list, 'command')
# Perform environment expansion right away
- shell_environment = _yaml.node_get(shell_options, Mapping, 'environment', default_value={})
+ shell_environment = _yaml.node_get(shell_options, dict, 'environment', default_value={})
for key in _yaml.node_keys(shell_environment):
value = _yaml.node_get(shell_environment, str, key)
self._shell_environment[key] = os.path.expandvars(value)
@@ -732,7 +731,7 @@ class Project():
else:
# Some validation
index = host_files.index(host_file)
- host_file_desc = _yaml.node_get(shell_options, Mapping, 'host-files', indices=[index])
+ host_file_desc = _yaml.node_get(shell_options, dict, 'host-files', indices=[index])
_yaml.node_validate(host_file_desc, ['path', 'host_path', 'optional'])
# Parse the host mount
@@ -759,8 +758,8 @@ class Project():
# Element and Source type configurations will be composited later onto
# element/source types, so we delete it from here and run our final
# assertion after.
- output.element_overrides = _yaml.node_get(config, Mapping, 'elements', default_value={})
- output.source_overrides = _yaml.node_get(config, Mapping, 'sources', default_value={})
+ output.element_overrides = _yaml.node_get(config, dict, 'elements', default_value={})
+ output.source_overrides = _yaml.node_get(config, dict, 'sources', default_value={})
_yaml.node_del(config, 'elements', safe=True)
_yaml.node_del(config, 'sources', safe=True)
_yaml.node_final_assertions(config)
@@ -768,7 +767,7 @@ class Project():
self._load_plugin_factories(config, output)
# Load project options
- options_node = _yaml.node_get(config, Mapping, 'options', default_value={})
+ options_node = _yaml.node_get(config, dict, 'options', default_value={})
output.options.load(options_node)
if self.junction:
# load before user configuration
@@ -776,7 +775,7 @@ class Project():
# Collect option values specified in the user configuration
overrides = self._context.get_overrides(self.name)
- override_options = _yaml.node_get(overrides, Mapping, 'options', default_value={})
+ override_options = _yaml.node_get(overrides, dict, 'options', default_value={})
output.options.load_yaml_values(override_options)
if self._cli_options:
output.options.load_cli_values(self._cli_options, ignore_unknown=ignore_unknown)
@@ -795,7 +794,7 @@ class Project():
output.options.process_node(output.source_overrides)
# Load base variables
- output.base_variables = _yaml.node_get(config, Mapping, 'variables')
+ output.base_variables = _yaml.node_get(config, dict, 'variables')
# Add the project name as a default variable
_yaml.node_set(output.base_variables, 'project-name', self.name)
@@ -823,7 +822,7 @@ class Project():
_yaml.node_validate(mirror, allowed_mirror_fields)
mirror_name = _yaml.node_get(mirror, str, 'name')
alias_mappings = {}
- for alias_mapping, uris in _yaml.node_items(_yaml.node_get(mirror, Mapping, 'aliases')):
+ for alias_mapping, uris in _yaml.node_items(_yaml.node_get(mirror, dict, 'aliases')):
assert isinstance(uris, list)
alias_mappings[alias_mapping] = list(uris)
output.mirrors[mirror_name] = alias_mappings
@@ -831,7 +830,7 @@ class Project():
output.default_mirror = mirror_name
# Source url aliases
- output._aliases = _yaml.node_get(config, Mapping, 'aliases', default_value={})
+ output._aliases = _yaml.node_get(config, dict, 'aliases', default_value={})
# _find_project_dir()
#
@@ -894,7 +893,7 @@ class Project():
.format(origin_value))
# Store source versions for checking later
- source_versions = _yaml.node_get(origin, Mapping, 'sources', default_value={})
+ source_versions = _yaml.node_get(origin, dict, 'sources', default_value={})
for key in _yaml.node_keys(source_versions):
if key in source_format_versions:
raise LoadError(
@@ -903,7 +902,7 @@ class Project():
source_format_versions[key] = _yaml.node_get(source_versions, int, key)
# Store element versions for checking later
- element_versions = _yaml.node_get(origin, Mapping, 'elements', default_value={})
+ element_versions = _yaml.node_get(origin, dict, 'elements', default_value={})
for key in _yaml.node_keys(element_versions):
if key in element_format_versions:
raise LoadError(
@@ -947,7 +946,7 @@ class Project():
node_keys = [key for key in _yaml.node_keys(origin)]
if plugin_group in node_keys:
origin_node = _yaml.node_copy(origin)
- plugins = _yaml.node_get(origin, Mapping, plugin_group, default_value={})
+ plugins = _yaml.node_get(origin, dict, plugin_group, default_value={})
_yaml.node_set(origin_node, 'plugins', [k for k in _yaml.node_keys(plugins)])
for group in expected_groups:
if group in origin_node:
diff --git a/src/buildstream/_yaml.pyx b/src/buildstream/_yaml.pyx
index 50f0c64a9..4505e2f95 100644
--- a/src/buildstream/_yaml.pyx
+++ b/src/buildstream/_yaml.pyx
@@ -525,15 +525,15 @@ _sentinel = object()
#
cpdef object node_get(Node node, object expected_type, str key, list indices=None, object default_value=_sentinel, bint allow_none=False):
if indices is None:
- if default_value is _sentinel:
- value = node.value.get(key, Node(default_value, _SYNTHETIC_FILE_INDEX, 0, 0))
- else:
- value = node.value.get(key, Node(default_value, _SYNTHETIC_FILE_INDEX, 0, next_synthetic_counter()))
+ value = node.value.get(key, _sentinel)
- if value.value is _sentinel:
- provenance = node_get_provenance(node)
- raise LoadError(LoadErrorReason.INVALID_DATA,
- "{}: Dictionary did not contain expected key '{}'".format(provenance, key))
+ if value is _sentinel:
+ if default_value is _sentinel:
+ provenance = node_get_provenance(node)
+ raise LoadError(LoadErrorReason.INVALID_DATA,
+ "{}: Dictionary did not contain expected key '{}'".format(provenance, key))
+
+ value = Node(default_value, _SYNTHETIC_FILE_INDEX, 0, next_synthetic_counter())
else:
# Implied type check of the element itself
# No need to synthesise useful node content as we destructure it immediately
@@ -547,12 +547,12 @@ cpdef object node_get(Node node, object expected_type, str key, list indices=Non
if value.value is None and (allow_none or default_value is None):
return None
- if (expected_type is not None) and (not isinstance(value.value, expected_type)):
+ if (expected_type is not None) and (type(value.value) is not expected_type):
# Attempt basic conversions if possible, typically we want to
# be able to specify numeric values and convert them to strings,
# but we dont want to try converting dicts/lists
try:
- if (expected_type == bool and isinstance(value.value, str)):
+ if expected_type == bool and type(value.value) is str:
# Dont coerce booleans to string, this makes "False" strings evaluate to True
# We don't structure into full nodes since there's no need.
if value.value in ('True', 'true'):
diff --git a/src/buildstream/element.py b/src/buildstream/element.py
index 8e006ea6b..909a0e851 100644
--- a/src/buildstream/element.py
+++ b/src/buildstream/element.py
@@ -77,7 +77,6 @@ import re
import stat
import copy
from collections import OrderedDict
-from collections.abc import Mapping
import contextlib
from contextlib import contextmanager
from functools import partial
@@ -886,7 +885,7 @@ class Element(Plugin):
if self.__dynamic_public is None:
self.__load_public_data()
- data = _yaml.node_get(self.__dynamic_public, Mapping, domain, default_value=None)
+ data = _yaml.node_get(self.__dynamic_public, dict, domain, default_value=None)
if data is not None:
data = _yaml.node_copy(data)
@@ -2434,9 +2433,9 @@ class Element(Plugin):
@classmethod
def __compose_default_splits(cls, project, defaults, is_junction):
- element_public = _yaml.node_get(defaults, Mapping, 'public', default_value={})
- element_bst = _yaml.node_get(element_public, Mapping, 'bst', default_value={})
- element_splits = _yaml.node_get(element_bst, Mapping, 'split-rules', default_value={})
+ element_public = _yaml.node_get(defaults, dict, 'public', default_value={})
+ element_bst = _yaml.node_get(element_public, dict, 'bst', default_value={})
+ element_splits = _yaml.node_get(element_bst, dict, 'split-rules', default_value={})
if is_junction:
splits = _yaml.node_copy(element_splits)
@@ -2476,7 +2475,7 @@ class Element(Plugin):
else:
elements = project.element_overrides
- overrides = _yaml.node_get(elements, Mapping, kind, default_value=None)
+ overrides = _yaml.node_get(elements, dict, kind, default_value=None)
if overrides:
_yaml.composite(defaults, overrides)
@@ -2488,7 +2487,7 @@ class Element(Plugin):
#
@classmethod
def __extract_environment(cls, project, meta):
- default_env = _yaml.node_get(cls.__defaults, Mapping, 'environment', default_value={})
+ default_env = _yaml.node_get(cls.__defaults, dict, 'environment', default_value={})
if meta.is_junction:
environment = _yaml.new_empty_node()
@@ -2534,7 +2533,7 @@ class Element(Plugin):
#
@classmethod
def __extract_variables(cls, project, meta):
- default_vars = _yaml.node_get(cls.__defaults, Mapping, 'variables',
+ default_vars = _yaml.node_get(cls.__defaults, dict, 'variables',
default_value={})
if meta.is_junction:
@@ -2562,7 +2561,7 @@ class Element(Plugin):
def __extract_config(cls, meta):
# The default config is already composited with the project overrides
- config = _yaml.node_get(cls.__defaults, Mapping, 'config', default_value={})
+ config = _yaml.node_get(cls.__defaults, dict, 'config', default_value={})
config = _yaml.node_copy(config)
_yaml.composite(config, meta.config)
@@ -2588,7 +2587,7 @@ class Element(Plugin):
host_os = platform.get_host_os()
# The default config is already composited with the project overrides
- sandbox_defaults = _yaml.node_get(cls.__defaults, Mapping, 'sandbox', default_value={})
+ sandbox_defaults = _yaml.node_get(cls.__defaults, dict, 'sandbox', default_value={})
sandbox_defaults = _yaml.node_copy(sandbox_defaults)
_yaml.composite(sandbox_config, sandbox_defaults)
@@ -2615,15 +2614,15 @@ class Element(Plugin):
#
@classmethod
def __extract_public(cls, meta):
- base_public = _yaml.node_get(cls.__defaults, Mapping, 'public', default_value={})
+ base_public = _yaml.node_get(cls.__defaults, dict, 'public', default_value={})
base_public = _yaml.node_copy(base_public)
- base_bst = _yaml.node_get(base_public, Mapping, 'bst', default_value={})
- base_splits = _yaml.node_get(base_bst, Mapping, 'split-rules', default_value={})
+ base_bst = _yaml.node_get(base_public, dict, 'bst', default_value={})
+ base_splits = _yaml.node_get(base_bst, dict, 'split-rules', default_value={})
element_public = _yaml.node_copy(meta.public)
- element_bst = _yaml.node_get(element_public, Mapping, 'bst', default_value={})
- element_splits = _yaml.node_get(element_bst, Mapping, 'split-rules', default_value={})
+ element_bst = _yaml.node_get(element_public, dict, 'bst', default_value={})
+ element_splits = _yaml.node_get(element_bst, dict, 'split-rules', default_value={})
# Allow elements to extend the default splits defined in their project or
# element specific defaults
@@ -2638,8 +2637,8 @@ class Element(Plugin):
# Expand the splits in the public data using the Variables in the element
def __expand_splits(self, element_public):
- element_bst = _yaml.node_get(element_public, Mapping, 'bst', default_value={})
- element_splits = _yaml.node_get(element_bst, Mapping, 'split-rules', default_value={})
+ element_bst = _yaml.node_get(element_public, dict, 'bst', default_value={})
+ element_splits = _yaml.node_get(element_bst, dict, 'split-rules', default_value={})
# Resolve any variables in the public split rules directly
for domain, splits in self.node_items(element_splits):
diff --git a/src/buildstream/plugins/elements/junction.py b/src/buildstream/plugins/elements/junction.py
index 15ef115d9..4222de360 100644
--- a/src/buildstream/plugins/elements/junction.py
+++ b/src/buildstream/plugins/elements/junction.py
@@ -162,7 +162,6 @@ Note that when targeting another junction, the names of the junction element
must not be the same as the name of the target.
"""
-from collections.abc import Mapping
from buildstream import Element, ElementError
from buildstream._pipeline import PipelineError
@@ -177,7 +176,7 @@ class JunctionElement(Element):
def configure(self, node):
self.path = self.node_get_member(node, str, 'path', default='')
- self.options = self.node_get_member(node, Mapping, 'options', default={})
+ self.options = self.node_get_member(node, dict, 'options', default={})
self.target = self.node_get_member(node, str, 'target', default=None)
self.target_element = None
self.target_junction = None
diff --git a/src/buildstream/source.py b/src/buildstream/source.py
index f6cefb386..9fc9cf17d 100644
--- a/src/buildstream/source.py
+++ b/src/buildstream/source.py
@@ -162,7 +162,6 @@ Class Reference
"""
import os
-from collections.abc import Mapping
from contextlib import contextmanager
from . import _yaml, utils
@@ -1280,7 +1279,7 @@ class Source(Plugin):
sources = project.first_pass_config.source_overrides
else:
sources = project.source_overrides
- cls.__defaults = _yaml.node_get(sources, Mapping, meta.kind, default_value={})
+ cls.__defaults = _yaml.node_get(sources, dict, meta.kind, default_value={})
cls.__defaults_set = True
# This will resolve the final configuration to be handed
@@ -1288,7 +1287,7 @@ class Source(Plugin):
#
@classmethod
def __extract_config(cls, meta):
- config = _yaml.node_get(cls.__defaults, Mapping, 'config', default_value={})
+ config = _yaml.node_get(cls.__defaults, dict, 'config', default_value={})
config = _yaml.node_copy(config)
_yaml.composite(config, meta.config)
diff --git a/tests/internals/yaml.py b/tests/internals/yaml.py
index 6b26fdfaf..cd3bdc535 100644
--- a/tests/internals/yaml.py
+++ b/tests/internals/yaml.py
@@ -1,4 +1,3 @@
-from collections.abc import Mapping
import os
from io import StringIO
@@ -108,12 +107,12 @@ def test_node_get(datafiles):
assert isinstance(children, list)
assert len(children) == 7
- child = _yaml.node_get(base, Mapping, 'children', indices=[6])
+ child = _yaml.node_get(base, dict, 'children', indices=[6])
assert_provenance(filename, 20, 8, child, 'mood')
- extra = _yaml.node_get(base, Mapping, 'extra')
+ extra = _yaml.node_get(base, dict, 'extra')
with pytest.raises(LoadError) as exc:
- _yaml.node_get(extra, Mapping, 'old')
+ _yaml.node_get(extra, dict, 'old')
assert exc.value.reason == LoadErrorReason.INVALID_DATA
@@ -189,8 +188,8 @@ def test_composite_preserve_originals(datafiles):
base_copy = _yaml.node_copy(base)
_yaml.composite_dict(base_copy, overlay)
- copy_extra = _yaml.node_get(base_copy, Mapping, 'extra')
- orig_extra = _yaml.node_get(base, Mapping, 'extra')
+ copy_extra = _yaml.node_get(base_copy, dict, 'extra')
+ orig_extra = _yaml.node_get(base, dict, 'extra')
# Test that the node copy has the overridden value...
assert _yaml.node_get(copy_extra, str, 'old') == 'override'
@@ -474,7 +473,7 @@ def test_roundtrip_dump(datafiles, fromdisk):
for v in node.values():
if isinstance(v, list):
walk_list(v)
- elif isinstance(v, Mapping):
+ elif isinstance(v, dict):
walk_node(v)
else:
assert isinstance(v, str)
@@ -483,7 +482,7 @@ def test_roundtrip_dump(datafiles, fromdisk):
for v in l:
if isinstance(v, list):
walk_list(v)
- elif isinstance(v, Mapping):
+ elif isinstance(v, dict):
walk_node(v)
else:
assert isinstance(v, str)