summaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
authorBenjamin Schubert <contact@benschubert.me>2019-06-18 11:05:52 +0100
committerBenjamin Schubert <ben.c.schubert@gmail.com>2019-07-01 22:26:50 +0100
commit7155ae81763cc67fdbfd119bdffe30d72afc82a5 (patch)
treea1b5c6e0b05a4ee171b1dddc4e891954e5e92cda /src
parent7585f65696cf8cec4c587c85f8ca068f57e902c6 (diff)
downloadbuildstream-7155ae81763cc67fdbfd119bdffe30d72afc82a5.tar.gz
_yaml: Remove 'node_set'. Now use __setitem__
- Implement __setitem__ on 'MappingNode' - Implement __setitem__ on 'SequenceNode' - Adapt all call sites to use the new calling way.
Diffstat (limited to 'src')
-rw-r--r--src/buildstream/_options/optionpool.py2
-rw-r--r--src/buildstream/_project.py10
-rw-r--r--src/buildstream/_projectrefs.py6
-rw-r--r--src/buildstream/_variables.pyx2
-rw-r--r--src/buildstream/_workspaces.py2
-rw-r--r--src/buildstream/_yaml.pxd1
-rw-r--r--src/buildstream/_yaml.pyx100
-rw-r--r--src/buildstream/element.py16
-rw-r--r--src/buildstream/plugin.py22
-rw-r--r--src/buildstream/sandbox/_sandboxremote.py2
10 files changed, 82 insertions, 81 deletions
diff --git a/src/buildstream/_options/optionpool.py b/src/buildstream/_options/optionpool.py
index 96cff898d..eae03f181 100644
--- a/src/buildstream/_options/optionpool.py
+++ b/src/buildstream/_options/optionpool.py
@@ -152,7 +152,7 @@ class OptionPool():
def export_variables(self, variables):
for _, option in self._options.items():
if option.variable:
- _yaml.node_set(variables, option.variable, option.get_value())
+ variables[option.variable] = option.get_value()
# printable_variables()
#
diff --git a/src/buildstream/_project.py b/src/buildstream/_project.py
index 2793b161a..af15bdc45 100644
--- a/src/buildstream/_project.py
+++ b/src/buildstream/_project.py
@@ -585,6 +585,7 @@ class Project():
# The project name, element path and option declarations
# are constant and cannot be overridden by option conditional statements
+ # FIXME: we should be keeping node information for further composition here
self.name = self._project_conf.get_str('name')
# Validate that project name is a valid symbol name
@@ -790,7 +791,7 @@ class Project():
output.base_variables = config.get_mapping('variables')
# Add the project name as a default variable
- _yaml.node_set(output.base_variables, 'project-name', self.name)
+ output.base_variables['project-name'] = self.name
# Extend variables with automatic variables and option exports
# Initialize it as a string as all variables are processed as strings.
@@ -798,7 +799,7 @@ class Project():
# max-jobs value seems to be around 8-10 if we have enough cores
# users should set values based on workload and build infrastructure
platform = Platform.get_platform()
- _yaml.node_set(output.base_variables, 'max-jobs', str(platform.get_cpu_count(8)))
+ output.base_variables['max-jobs'] = str(platform.get_cpu_count(8))
# Export options into variables, if that was requested
output.options.export_variables(output.base_variables)
@@ -939,7 +940,8 @@ class Project():
if plugin_group in origin.keys():
origin_node = origin.copy()
plugins = origin.get_mapping(plugin_group, default={})
- _yaml.node_set(origin_node, 'plugins', plugins.keys())
+ origin_node['plugins'] = plugins.keys()
+
for group in expected_groups:
if group in origin_node:
del origin_node[group]
@@ -948,7 +950,7 @@ class Project():
path = self.get_path_from_node(origin, 'path',
check_is_dir=True)
# paths are passed in relative to the project, but must be absolute
- _yaml.node_set(origin_node, 'path', os.path.join(self.directory, path))
+ origin_node['path'] = os.path.join(self.directory, path)
destination.append(origin_node)
# _warning_is_fatal():
diff --git a/src/buildstream/_projectrefs.py b/src/buildstream/_projectrefs.py
index e72d4757e..f1796a16d 100644
--- a/src/buildstream/_projectrefs.py
+++ b/src/buildstream/_projectrefs.py
@@ -87,7 +87,7 @@ class ProjectRefs():
# Ensure we create our toplevel entry point on the fly here
for node in [self._toplevel_node, self._toplevel_save]:
if 'projects' not in node:
- _yaml.node_set(node, 'projects', _yaml.new_empty_node(ref_node=node))
+ node['projects'] = _yaml.new_empty_node(ref_node=node)
# lookup_ref()
#
@@ -129,7 +129,7 @@ class ProjectRefs():
if not ensure:
return None
project_node = _yaml.new_empty_node(ref_node=projects)
- _yaml.node_set(projects, project, project_node)
+ projects[project] = project_node
# Fetch the element
try:
@@ -138,7 +138,7 @@ class ProjectRefs():
if not ensure:
return None
element_list = _yaml.new_empty_list_node()
- _yaml.node_set(project_node, element, element_list)
+ project_node[element] = element_list
# Fetch the source index
try:
diff --git a/src/buildstream/_variables.pyx b/src/buildstream/_variables.pyx
index f1d3c2ab0..ba8338f23 100644
--- a/src/buildstream/_variables.pyx
+++ b/src/buildstream/_variables.pyx
@@ -121,7 +121,7 @@ cdef class Variables:
# Initialize it as a string as all variables are processed as strings.
#
if node.get_bool('notparallel', False):
- _yaml.node_set(node, 'max-jobs', str(1))
+ node['max-jobs'] = str(1)
cdef dict ret = {}
cdef str key
diff --git a/src/buildstream/_workspaces.py b/src/buildstream/_workspaces.py
index a674ce1f5..95c1817ce 100644
--- a/src/buildstream/_workspaces.py
+++ b/src/buildstream/_workspaces.py
@@ -592,7 +592,7 @@ class Workspaces():
raise LoadError(LoadErrorReason.INVALID_DATA,
detail.format(element, self._get_filename()))
- _yaml.node_set(workspaces, element, sources[0])
+ workspaces[element] = sources[0]
else:
raise LoadError(LoadErrorReason.INVALID_DATA,
diff --git a/src/buildstream/_yaml.pxd b/src/buildstream/_yaml.pxd
index 3e0ee86c2..98ea64b28 100644
--- a/src/buildstream/_yaml.pxd
+++ b/src/buildstream/_yaml.pxd
@@ -69,5 +69,4 @@ cdef class ProvenanceInformation:
cpdef void node_validate(Node node, list valid_keys) except *
-cpdef void node_set(Node node, object key, object value, list indices=*) except *
cpdef ProvenanceInformation node_get_provenance(Node node, str key=*, list indices=*)
diff --git a/src/buildstream/_yaml.pyx b/src/buildstream/_yaml.pyx
index ae00924bc..6d5ac9508 100644
--- a/src/buildstream/_yaml.pyx
+++ b/src/buildstream/_yaml.pyx
@@ -250,6 +250,36 @@ cdef class MappingNode(Node):
def __delitem__(self, str key):
del self.value[key]
+ def __setitem__(self, str key, object value):
+ if type(value) in [MappingNode, ScalarNode, SequenceNode]:
+ self.value[key] = value
+ else:
+ node = _create_node_recursive(value)
+
+ # FIXME: Do we really want to override provenance?
+ #
+ # Related to https://gitlab.com/BuildStream/buildstream/issues/1058
+ #
+ # There are only two cases were nodes are set in the code (hence without provenance):
+ # - When automatic variables are set by the core (e-g: max-jobs)
+ # - when plugins call Element.set_public_data
+ #
+ # The first case should never throw errors, so it is of limited interests.
+ #
+ # The second is more important. What should probably be done here is to have 'set_public_data'
+ # able of creating a fake provenance with the name of the plugin, the project and probably the
+ # element name.
+ #
+ # We would therefore have much better error messages, and would be able to get rid of most synthetic
+ # nodes.
+ old_value = self.value.get(key)
+ if old_value:
+ node.file_index = old_value.file_index
+ node.line = old_value.line
+ node.column = old_value.column
+
+ self.value[key] = node
+
cdef class SequenceNode(Node):
def __init__(self, list value, int file_index, int line, int column):
@@ -302,6 +332,21 @@ cdef class SequenceNode(Node):
def __reversed__(self):
return reversed(self.value)
+ def __setitem__(self, int key, object value):
+ if type(value) in [MappingNode, ScalarNode, SequenceNode]:
+ self.value[key] = value
+ else:
+ node = _create_node_recursive(value)
+
+ # FIXME: Do we really want to override provenance?
+ # See __setitem__ on 'MappingNode' for more context
+ old_value = self.value[key]
+ if old_value:
+ node.file_index = old_value.file_index
+ node.line = old_value.line
+ node.column = old_value.column
+
+ self.value[key] = node
# Metadata container for a yaml toplevel node.
#
@@ -599,6 +644,22 @@ cdef Node _create_node(object value, int file_index, int line, int column):
"Node values can only be 'list', 'dict', 'bool', 'str', 'int' or None. Not {}".format(type_value))
+cdef Node _create_node_recursive(object value):
+ cdef value_type = type(value)
+
+ if value_type is list:
+ node = __new_node_from_list(value)
+ elif value_type is str:
+ node = ScalarNode(value, _SYNTHETIC_FILE_INDEX, 0, next_synthetic_counter())
+ elif value_type is dict:
+ node = new_node_from_dict(value)
+ else:
+ raise ValueError(
+ "Unable to assign a value of type {} to a Node.".format(value_type))
+
+ return node
+
+
# Loads a dictionary from some YAML
#
# Args:
@@ -739,45 +800,6 @@ cpdef ProvenanceInformation node_get_provenance(Node node, str key=None, list in
return ProvenanceInformation(nodeish)
-# node_set()
-#
-# Set an item within the node. If using `indices` be aware that the entry must
-# already exist, or else a KeyError will be raised. Use `node_extend_list` to
-# create entries before using `node_set`
-#
-# Args:
-# node (Node): The node
-# key (str): The key name
-# value: The value
-# indices: Any indices to index into the list referenced by key, like in
-# `node_get` (must be a list of integers)
-#
-cpdef void node_set(Node node, object key, object value, list indices=None) except *:
- cdef int idx
-
- if type(value) is list:
- value = __new_node_from_list(value)
-
- if indices:
- node = <Node> (<dict> node.value)[key]
- key = indices.pop()
- for idx in indices:
- node = <Node> (<list> node.value)[idx]
- if type(value) in [Node, MappingNode, ScalarNode, SequenceNode]:
- node.value[key] = value
- else:
- try:
- # Need to do this just in case we're modifying a list
- old_value = <Node> node.value[key]
- except KeyError:
- old_value = None
-
- if old_value is None:
- node.value[key] = _create_node(value, node.file_index, node.line, next_synthetic_counter())
- else:
- node.value[key] = _create_node(value, old_value.file_index, old_value.line, old_value.column)
-
-
# node_extend_list()
#
# Extend a list inside a node to a given length, using the passed
diff --git a/src/buildstream/element.py b/src/buildstream/element.py
index 13a4b88ef..1a5439fda 100644
--- a/src/buildstream/element.py
+++ b/src/buildstream/element.py
@@ -254,7 +254,7 @@ class Element(Plugin):
# Collect the composited variables and resolve them
variables = self.__extract_variables(project, meta)
- _yaml.node_set(variables, 'element-name', self.name)
+ variables['element-name'] = self.name
self.__variables = Variables(variables)
# Collect the composited environment now that we have variables
@@ -874,7 +874,7 @@ class Element(Plugin):
if data is not None:
data = data.copy()
- _yaml.node_set(self.__dynamic_public, domain, data)
+ self.__dynamic_public[domain] = data
def get_environment(self):
"""Fetch the environment suitable for running in the sandbox
@@ -2506,9 +2506,9 @@ class Element(Plugin):
# Extend project wide split rules with any split rules defined by the element
_yaml.composite(splits, element_splits)
- _yaml.node_set(element_bst, 'split-rules', splits)
- _yaml.node_set(element_public, 'bst', element_bst)
- _yaml.node_set(defaults, 'public', element_public)
+ element_bst['split-rules'] = splits
+ element_public['bst'] = element_bst
+ defaults['public'] = element_public
@classmethod
def __init_defaults(cls, project, plugin_conf, kind, is_junction):
@@ -2687,8 +2687,8 @@ class Element(Plugin):
# element specific defaults
_yaml.composite(base_splits, element_splits)
- _yaml.node_set(element_bst, 'split-rules', base_splits)
- _yaml.node_set(element_public, 'bst', element_bst)
+ element_bst['split-rules'] = base_splits
+ element_public['bst'] = element_bst
_yaml.node_final_assertions(element_public)
@@ -2705,7 +2705,7 @@ class Element(Plugin):
self.__variables.subst(split.strip())
for split in splits.as_str_list()
]
- _yaml.node_set(element_splits, domain, splits)
+ element_splits[domain] = splits
return element_public
diff --git a/src/buildstream/plugin.py b/src/buildstream/plugin.py
index 400b6802a..3a6312092 100644
--- a/src/buildstream/plugin.py
+++ b/src/buildstream/plugin.py
@@ -363,28 +363,6 @@ class Plugin():
provenance = _yaml.node_get_provenance(node, key=member_name)
return str(provenance)
- def node_set_member(self, node, key, value):
- """Set the value of a node member
- Args:
- node (node): A dictionary loaded from YAML
- key (str): The key name
- value: The value
-
- Returns:
- None
-
- Raises:
- None
-
- **Example:**
-
- .. code:: python
-
- # Set a string 'tomjon' in node[name]
- self.node_set_member(node, 'name', 'tomjon')
- """
- _yaml.node_set(node, key, value)
-
def new_empty_node(self):
"""Create an empty 'Node' object to be handled by BuildStream's core
Args:
diff --git a/src/buildstream/sandbox/_sandboxremote.py b/src/buildstream/sandbox/_sandboxremote.py
index 37ba8506b..38dfe3b09 100644
--- a/src/buildstream/sandbox/_sandboxremote.py
+++ b/src/buildstream/sandbox/_sandboxremote.py
@@ -173,7 +173,7 @@ class SandboxRemote(Sandbox):
for tls_key in tls_keys:
if tls_key in config:
- _yaml.node_set(config, tls_key, resolve_path(config.get_str(tls_key)))
+ config[tls_key] = resolve_path(config.get_str(tls_key))
return RemoteExecutionSpec(*[_yaml.node_sanitize(conf) for conf in service_configs])