summaryrefslogtreecommitdiff
path: root/src/buildstream
diff options
context:
space:
mode:
authorBenjamin Schubert <contact@benschubert.me>2019-07-04 09:09:23 +0000
committerBenjamin Schubert <contact@benschubert.me>2019-07-04 09:09:23 +0000
commitc7a7e44838a0df64088924caecb9fea1ef9e91af (patch)
tree043b285ebbbc4b3574c04ae590d56a28ec4fbd38 /src/buildstream
parentb4b0f79f6e09a89f1eee785702f47e5975e41bf7 (diff)
parent482221aaacf690b73d67a8c1ec4eb15686abfd5b (diff)
downloadbuildstream-c7a7e44838a0df64088924caecb9fea1ef9e91af.tar.gz
Merge branch 'bschubert/rework-node-creations' into 'bschubert/new-node-api'
Rework synthetic node creation See merge request BuildStream/buildstream!1447
Diffstat (limited to 'src/buildstream')
-rw-r--r--src/buildstream/_context.py2
-rw-r--r--src/buildstream/_loader/loader.py2
-rw-r--r--src/buildstream/_loader/metaelement.py10
-rw-r--r--src/buildstream/_projectrefs.py19
-rw-r--r--src/buildstream/_yaml.pxd1
-rw-r--r--src/buildstream/_yaml.pyx131
-rw-r--r--src/buildstream/element.py6
7 files changed, 56 insertions, 115 deletions
diff --git a/src/buildstream/_context.py b/src/buildstream/_context.py
index 524305177..9b0db39d7 100644
--- a/src/buildstream/_context.py
+++ b/src/buildstream/_context.py
@@ -159,7 +159,7 @@ class Context():
self._artifactcache = None
self._sourcecache = None
self._projects = []
- self._project_overrides = _yaml.new_empty_node()
+ self._project_overrides = _yaml.Node.from_dict({})
self._workspaces = None
self._workspace_project_cache = WorkspaceProjectCache()
self._log_handle = None
diff --git a/src/buildstream/_loader/loader.py b/src/buildstream/_loader/loader.py
index 73da5434a..da1e466c9 100644
--- a/src/buildstream/_loader/loader.py
+++ b/src/buildstream/_loader/loader.py
@@ -120,7 +120,7 @@ class Loader():
# Set up a dummy element that depends on all top-level targets
# to resolve potential circular dependencies between them
- dummy_target = LoadElement(_yaml.new_empty_node(), "", self)
+ dummy_target = LoadElement(_yaml.Node.from_dict({}), "", self)
dummy_target.dependencies.extend(
LoadElement.Dependency(element, Symbol.RUNTIME)
for element in target_elements
diff --git a/src/buildstream/_loader/metaelement.py b/src/buildstream/_loader/metaelement.py
index 45eb6f4d0..8214e303d 100644
--- a/src/buildstream/_loader/metaelement.py
+++ b/src/buildstream/_loader/metaelement.py
@@ -48,12 +48,12 @@ class MetaElement():
self.kind = kind
self.provenance = provenance
self.sources = sources
- self.config = config or _yaml.new_empty_node()
- self.variables = variables or _yaml.new_empty_node()
- self.environment = environment or _yaml.new_empty_node()
+ self.config = config or _yaml.Node.from_dict({})
+ self.variables = variables or _yaml.Node.from_dict({})
+ self.environment = environment or _yaml.Node.from_dict({})
self.env_nocache = env_nocache or []
- self.public = public or _yaml.new_empty_node()
- self.sandbox = sandbox or _yaml.new_empty_node()
+ self.public = public or _yaml.Node.from_dict({})
+ self.sandbox = sandbox or _yaml.Node.from_dict({})
self.build_dependencies = []
self.dependencies = []
self.first_pass = first_pass
diff --git a/src/buildstream/_projectrefs.py b/src/buildstream/_projectrefs.py
index d10456b14..a46765250 100644
--- a/src/buildstream/_projectrefs.py
+++ b/src/buildstream/_projectrefs.py
@@ -87,7 +87,7 @@ class ProjectRefs():
# Ensure we create our toplevel entry point on the fly here
for node in [self._toplevel_node, self._toplevel_save]:
if 'projects' not in node:
- node['projects'] = _yaml.new_empty_node(ref_node=node)
+ node['projects'] = {}
# lookup_ref()
#
@@ -121,15 +121,16 @@ class ProjectRefs():
# Looks up a ref node in the project.refs file, creates one if ensure is True.
#
def _lookup(self, toplevel, project, element, source_index, *, ensure=False):
+ projects = toplevel.get_mapping('projects')
+
# Fetch the project
try:
- projects = toplevel.get_mapping('projects')
project_node = projects.get_mapping(project)
except LoadError:
if not ensure:
return None
- project_node = _yaml.new_empty_node(ref_node=projects)
- projects[project] = project_node
+ projects[project] = {}
+ project_node = projects.get_mapping(project)
# Fetch the element
try:
@@ -137,8 +138,8 @@ class ProjectRefs():
except LoadError:
if not ensure:
return None
- element_list = _yaml.new_empty_list_node()
- project_node[element] = element_list
+ project_node[element] = []
+ element_list = project_node.get_sequence(element)
# Fetch the source index
try:
@@ -147,9 +148,7 @@ class ProjectRefs():
if not ensure:
return None
- # Pad the list with empty newly created dictionaries
- _yaml.node_extend_list(project_node, element, source_index + 1, {})
-
- node = project_node.get_sequence(element).mapping_at(source_index)
+ element_list.append({})
+ node = element_list.mapping_at(source_index)
return node
diff --git a/src/buildstream/_yaml.pxd b/src/buildstream/_yaml.pxd
index 32a39dfd4..d7109bba5 100644
--- a/src/buildstream/_yaml.pxd
+++ b/src/buildstream/_yaml.pxd
@@ -67,6 +67,7 @@ cdef class ScalarNode(Node):
cdef class SequenceNode(Node):
+ cpdef void append(self, object value)
cpdef MappingNode mapping_at(self, int index)
cpdef SequenceNode sequence_at(self, int index)
cpdef list as_str_list(self)
diff --git a/src/buildstream/_yaml.pyx b/src/buildstream/_yaml.pyx
index 6420474eb..b67227d7b 100644
--- a/src/buildstream/_yaml.pyx
+++ b/src/buildstream/_yaml.pyx
@@ -69,6 +69,14 @@ cdef class Node:
self.line = line
self.column = column
+ @classmethod
+ def from_dict(cls, dict value):
+ if value:
+ return _new_node_from_dict(value, Node(None, _SYNTHETIC_FILE_INDEX, 0, next_synthetic_counter()))
+ else:
+ # We got an empty dict, we can shortcut
+ return MappingNode({}, _SYNTHETIC_FILE_INDEX, 0, next_synthetic_counter())
+
cdef bint _walk_find(self, Node target, list path) except *:
raise NotImplementedError()
@@ -488,7 +496,7 @@ cdef class MappingNode(Node):
if type(value) in [MappingNode, ScalarNode, SequenceNode]:
self.value[key] = value
else:
- node = _create_node_recursive(value)
+ node = _create_node_recursive(value, self)
# FIXME: Do we really want to override provenance?
#
@@ -554,6 +562,13 @@ cdef class SequenceNode(Node):
self.line = line
self.column = column
+ cpdef void append(self, object value):
+ if type(value) in [MappingNode, ScalarNode, SequenceNode]:
+ self.value.append(value)
+ else:
+ node = _create_node_recursive(value, self)
+ self.value.append(node)
+
cpdef SequenceNode copy(self):
cdef list copy = []
cdef Node entry
@@ -644,7 +659,7 @@ cdef class SequenceNode(Node):
if type(value) in [MappingNode, ScalarNode, SequenceNode]:
self.value[key] = value
else:
- node = _create_node_recursive(value)
+ node = _create_node_recursive(value, self)
# FIXME: Do we really want to override provenance?
# See __setitem__ on 'MappingNode' for more context
@@ -939,28 +954,15 @@ cdef class Representer:
return RepresenterState.init
-cdef Node _create_node(object value, int file_index, int line, int column):
- cdef type_value = type(value)
-
- if type_value in [bool, str, type(None), int]:
- return ScalarNode(value, file_index, line, column)
- elif type_value is dict:
- return MappingNode(value, file_index, line, column)
- elif type_value is list:
- return SequenceNode(value, file_index, line, column)
- raise ValueError(
- "Node values can only be 'list', 'dict', 'bool', 'str', 'int' or None. Not {}".format(type_value))
-
-
-cdef Node _create_node_recursive(object value):
+cdef Node _create_node_recursive(object value, Node ref_node):
cdef value_type = type(value)
if value_type is list:
- node = __new_node_from_list(value)
+ node = _new_node_from_list(value, ref_node)
elif value_type is str:
- node = ScalarNode(value, _SYNTHETIC_FILE_INDEX, 0, next_synthetic_counter())
+ node = ScalarNode(value, ref_node.file_index, ref_node.line, next_synthetic_counter())
elif value_type is dict:
- node = new_node_from_dict(value)
+ node = _new_node_from_dict(value, ref_node)
else:
raise ValueError(
"Unable to assign a value of type {} to a Node.".format(value_type))
@@ -1093,50 +1095,6 @@ cpdef ProvenanceInformation node_get_provenance(Node node, str key=None, list in
return ProvenanceInformation(nodeish)
-# node_extend_list()
-#
-# Extend a list inside a node to a given length, using the passed
-# default value to fill it out.
-#
-# Valid default values are:
-# Any string
-# An empty dict
-# An empty list
-#
-# Args:
-# node (node): The node
-# key (str): The list name in the node
-# length (int): The length to extend the list to
-# default (any): The default value to extend with.
-def node_extend_list(Node node, str key, Py_ssize_t length, object default):
- assert type(default) is str or default in ([], {})
-
- cdef Node list_node = <Node> node.value.get(key)
- if list_node is None:
- list_node = node.value[key] = SequenceNode([], node.file_index, node.line, next_synthetic_counter())
-
- cdef list the_list = list_node.value
- def_type = type(default)
-
- file_index = node.file_index
- if the_list:
- line_num = the_list[-1][2]
- else:
- line_num = list_node.line
-
- while length > len(the_list):
- if def_type is str:
- value = default
- elif def_type is list:
- value = []
- else:
- value = {}
-
- line_num += 1
-
- the_list.append(_create_node(value, file_index, line_num, next_synthetic_counter()))
-
-
# is_node()
#
# A test method which returns whether or not the passed in value
@@ -1183,26 +1141,6 @@ def new_synthetic_file(str filename, object project=None):
return node
-# new_empty_node()
-#
-# Args:
-# ref_node (Node): Optional node whose provenance should be referenced
-#
-# Returns
-# (Node): A new empty YAML mapping node
-#
-def new_empty_node(Node ref_node=None):
- if ref_node is not None:
- return MappingNode({}, ref_node.file_index, ref_node.line, next_synthetic_counter())
- else:
- return MappingNode({}, _SYNTHETIC_FILE_INDEX, 0, 0)
-
-
-# FIXME: we should never need that
-def new_empty_list_node():
- return SequenceNode([], _SYNTHETIC_FILE_INDEX, 0, 0)
-
-
# new_node_from_dict()
#
# Args:
@@ -1211,32 +1149,35 @@ def new_empty_list_node():
# Returns:
# (Node): A new synthetic YAML tree which represents this dictionary
#
-cpdef Node new_node_from_dict(dict indict):
- cdef dict ret = {}
+cdef Node _new_node_from_dict(dict indict, Node ref_node):
+ cdef MappingNode ret = MappingNode({}, ref_node.file_index, ref_node.line, next_synthetic_counter())
cdef str k
+
for k, v in indict.items():
vtype = type(v)
if vtype is dict:
- ret[k] = new_node_from_dict(v)
+ ret.value[k] = _new_node_from_dict(v, ref_node)
elif vtype is list:
- ret[k] = __new_node_from_list(v)
+ ret.value[k] = _new_node_from_list(v, ref_node)
else:
- ret[k] = ScalarNode(str(v), _SYNTHETIC_FILE_INDEX, 0, next_synthetic_counter())
- return MappingNode(ret, _SYNTHETIC_FILE_INDEX, 0, next_synthetic_counter())
+ ret.value[k] = ScalarNode(str(v), ref_node.file_index, ref_node.line, next_synthetic_counter())
+ return ret
# Internal function to help new_node_from_dict() to handle lists
-cdef Node __new_node_from_list(list inlist):
- cdef list ret = []
+cdef Node _new_node_from_list(list inlist, Node ref_node):
+ cdef SequenceNode ret = SequenceNode([], ref_node.file_index, ref_node.line, next_synthetic_counter())
+
for v in inlist:
vtype = type(v)
if vtype is dict:
- ret.append(new_node_from_dict(v))
+ ret.value.append(_new_node_from_dict(v, ref_node))
elif vtype is list:
- ret.append(__new_node_from_list(v))
+ ret.value.append(_new_node_from_list(v, ref_node))
else:
- ret.append(ScalarNode(str(v), _SYNTHETIC_FILE_INDEX, 0, next_synthetic_counter()))
- return SequenceNode(ret, _SYNTHETIC_FILE_INDEX, 0, next_synthetic_counter())
+ ret.value.append(ScalarNode(str(v), ref_node.file_index, ref_node.line, next_synthetic_counter()))
+
+ return ret
# node_validate()
diff --git a/src/buildstream/element.py b/src/buildstream/element.py
index a4496e192..280eeb8f1 100644
--- a/src/buildstream/element.py
+++ b/src/buildstream/element.py
@@ -2515,7 +2515,7 @@ class Element(Plugin):
# Defaults are loaded once per class and then reused
#
if cls.__defaults is None:
- defaults = _yaml.new_empty_node()
+ defaults = _yaml.Node.from_dict({})
if plugin_conf is not None:
# Load the plugin's accompanying .yaml file if one was provided
@@ -2550,7 +2550,7 @@ class Element(Plugin):
default_env = cls.__defaults.get_mapping("environment", default={})
if meta.is_junction:
- environment = _yaml.new_empty_node()
+ environment = _yaml.Node.from_dict({})
else:
environment = project.base_environment.copy()
@@ -2633,7 +2633,7 @@ class Element(Plugin):
@classmethod
def __extract_sandbox_config(cls, project, meta):
if meta.is_junction:
- sandbox_config = _yaml.new_node_from_dict({
+ sandbox_config = _yaml.Node.from_dict({
'build-uid': 0,
'build-gid': 0
})