summaryrefslogtreecommitdiff
path: root/src/buildstream
diff options
context:
space:
mode:
Diffstat (limited to 'src/buildstream')
-rw-r--r--src/buildstream/_options/optionpool.py2
-rw-r--r--src/buildstream/_project.py8
-rw-r--r--src/buildstream/_projectrefs.py6
-rw-r--r--src/buildstream/_workspaces.py2
-rw-r--r--src/buildstream/_yaml.pyx39
-rw-r--r--src/buildstream/element.py16
6 files changed, 50 insertions, 23 deletions
diff --git a/src/buildstream/_options/optionpool.py b/src/buildstream/_options/optionpool.py
index 1b8683186..1d676e54a 100644
--- a/src/buildstream/_options/optionpool.py
+++ b/src/buildstream/_options/optionpool.py
@@ -152,7 +152,7 @@ class OptionPool():
def export_variables(self, variables):
for _, option in self._options.items():
if option.variable:
- _yaml.node_set(variables, option.variable, option.get_value())
+ variables[option.variable] = option.get_value()
# printable_variables()
#
diff --git a/src/buildstream/_project.py b/src/buildstream/_project.py
index 43bdc73c9..70ebb6e63 100644
--- a/src/buildstream/_project.py
+++ b/src/buildstream/_project.py
@@ -797,7 +797,7 @@ class Project():
output.base_variables = config.get_mapping('variables')
# Add the project name as a default variable
- _yaml.node_set(output.base_variables, 'project-name', self.name)
+ output.base_variables['project-name'] = self.name
# Extend variables with automatic variables and option exports
# Initialize it as a string as all variables are processed as strings.
@@ -805,7 +805,7 @@ class Project():
# max-jobs value seems to be around 8-10 if we have enough cores
# users should set values based on workload and build infrastructure
platform = Platform.get_platform()
- _yaml.node_set(output.base_variables, 'max-jobs', str(platform.get_cpu_count(8)))
+ output.base_variables['max-jobs'] = platform.get_cpu_count(8)
# Export options into variables, if that was requested
output.options.export_variables(output.base_variables)
@@ -947,7 +947,7 @@ class Project():
if plugin_group in node_keys:
origin_node = origin.copy()
plugins = origin.get_mapping(plugin_group, default={})
- _yaml.node_set(origin_node, 'plugins', [k for k in _yaml.node_keys(plugins)])
+ origin_node['plugins'] = [k for k in _yaml.node_keys(plugins)]
for group in expected_groups:
if group in origin_node:
_yaml.node_del(origin_node, group)
@@ -956,7 +956,7 @@ class Project():
path = self.get_path_from_node(origin, 'path',
check_is_dir=True)
# paths are passed in relative to the project, but must be absolute
- _yaml.node_set(origin_node, 'path', os.path.join(self.directory, path))
+ origin_node['path'] = os.path.join(self.directory, path)
destination.append(origin_node)
# _warning_is_fatal():
diff --git a/src/buildstream/_projectrefs.py b/src/buildstream/_projectrefs.py
index f296858cf..a0f930ecf 100644
--- a/src/buildstream/_projectrefs.py
+++ b/src/buildstream/_projectrefs.py
@@ -87,7 +87,7 @@ class ProjectRefs():
# Ensure we create our toplevel entry point on the fly here
for node in [self._toplevel_node, self._toplevel_save]:
if 'projects' not in node:
- _yaml.node_set(node, 'projects', _yaml.new_empty_node(ref_node=node))
+ node['projects'] = _yaml.new_empty_node(ref_node=node)
# lookup_ref()
#
@@ -129,7 +129,7 @@ class ProjectRefs():
if not ensure:
return None
project_node = _yaml.new_empty_node(ref_node=projects)
- _yaml.node_set(projects, project, project_node)
+ projects[project] = project_node
# Fetch the element
try:
@@ -138,7 +138,7 @@ class ProjectRefs():
if not ensure:
return None
element_list = _yaml.new_empty_list_node()
- _yaml.node_set(project_node, element, element_list)
+ project_node[element] = element_list
# Fetch the source index
try:
diff --git a/src/buildstream/_workspaces.py b/src/buildstream/_workspaces.py
index 21944c799..2095ce755 100644
--- a/src/buildstream/_workspaces.py
+++ b/src/buildstream/_workspaces.py
@@ -594,7 +594,7 @@ class Workspaces():
raise LoadError(LoadErrorReason.INVALID_DATA,
detail.format(element, self._get_filename()))
- _yaml.node_set(workspaces, element, sources[0][1])
+ workspaces[element] = sources[0][1]
else:
raise LoadError(LoadErrorReason.INVALID_DATA,
diff --git a/src/buildstream/_yaml.pyx b/src/buildstream/_yaml.pyx
index 6030e285e..ae2347255 100644
--- a/src/buildstream/_yaml.pyx
+++ b/src/buildstream/_yaml.pyx
@@ -83,6 +83,14 @@ cdef class ScalarNode(Node):
def __init__(self, object value, int file_index, int line, int column):
if type(value) is str:
value = value.strip()
+ elif type(value) is bool:
+ if value:
+ value = "True"
+ else:
+ value = "False"
+ elif type(value) is int:
+ value = str(value)
+
self.value = value
self.file_index = file_index
self.line = line
@@ -132,6 +140,17 @@ cdef class MappingNode(Node):
self.line = line
self.column = column
+ def __setitem__(self, str key, object value):
+ if type(value) in [MappingNode, ScalarNode, SequenceNode]:
+ self.value[key] = value
+ else:
+ old_value = self.value.get(key, None)
+
+ if old_value is None:
+ self.value[key] = _create_node(value, self.file_index, self.line, next_synthetic_counter())
+ else:
+ self.value[key] = _create_node(value, old_value.file_index, old_value.line, old_value.column)
+
cdef Node get(self, str key, default, default_constructor):
value = self.value.get(key, _sentinel)
@@ -532,13 +551,21 @@ cdef class Representer:
cdef Node _create_node(object value, int file_index, int line, int column):
- if type(value) in [bool, str, type(None), int]:
+ cdef value_type = type(value)
+
+ if value_type in [bool, str, type(None), int]:
return ScalarNode(value, file_index, line, column)
- elif type(value) is dict:
- return MappingNode(value, file_index, line, column)
- elif type(value) is list:
- return SequenceNode(value, file_index, line, column)
- return Node(value, file_index, line, column)
+ elif value_type is dict:
+ new_node = MappingNode({}, file_index, line, column)
+ for key, entry in (<dict> value).items():
+ (<MappingNode> new_node)[key] = entry
+ return new_node
+ elif value_type is list:
+ value = __new_node_from_list(value).value
+ new_node = SequenceNode(value, file_index, line, column)
+ return new_node
+
+ raise ValueError("Can't create a new node for type {}".format(value_type))
# Loads a dictionary from some YAML
diff --git a/src/buildstream/element.py b/src/buildstream/element.py
index 006d42d6b..7509914e4 100644
--- a/src/buildstream/element.py
+++ b/src/buildstream/element.py
@@ -251,7 +251,7 @@ class Element(Plugin):
# Collect the composited variables and resolve them
variables = self.__extract_variables(project, meta)
- _yaml.node_set(variables, 'element-name', self.name)
+ variables['element-name'] = self.name
self.__variables = Variables(variables)
# Collect the composited environment now that we have variables
@@ -920,7 +920,7 @@ class Element(Plugin):
if data is not None:
data = data.copy()
- _yaml.node_set(self.__dynamic_public, domain, data)
+ self.__dynamic_public[domain] = data
def get_environment(self):
"""Fetch the environment suitable for running in the sandbox
@@ -2536,9 +2536,9 @@ class Element(Plugin):
# Extend project wide split rules with any split rules defined by the element
_yaml.composite(splits, element_splits)
- _yaml.node_set(element_bst, 'split-rules', splits)
- _yaml.node_set(element_public, 'bst', element_bst)
- _yaml.node_set(defaults, 'public', element_public)
+ element_bst['split-rules'] = splits
+ element_public['bst'] = element_bst
+ defaults['public'] = element_public
@classmethod
def __init_defaults(cls, project, plugin_conf, kind, is_junction):
@@ -2717,8 +2717,8 @@ class Element(Plugin):
# element specific defaults
_yaml.composite(base_splits, element_splits)
- _yaml.node_set(element_bst, 'split-rules', base_splits)
- _yaml.node_set(element_public, 'bst', element_bst)
+ element_bst['split-rules'] = base_splits
+ element_public['bst'] = element_bst
_yaml.node_final_assertions(element_public)
@@ -2735,7 +2735,7 @@ class Element(Plugin):
self.__variables.subst(split.strip())
for split in splits
]
- _yaml.node_set(element_splits, domain, splits)
+ element_splits[domain] = splits
return element_public