summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorTom Pollard <tom.pollard@codethink.co.uk>2019-02-28 17:45:03 +0000
committerbst-marge-bot <marge-bot@buildstream.build>2019-03-12 18:11:28 +0000
commit28080d88da7ddb351aa18e7a8e18759c5de2747f (patch)
tree61cf19a37d4928995f2c1654df678ddf7fb2b651
parent21c4633481575a96139500f919d8bb54a4b7861c (diff)
downloadbuildstream-28080d88da7ddb351aa18e7a8e18759c5de2747f.tar.gz
_artifact.py: Transition element.py get_artifact_metadata*() methods
This includes the methods that interact with the metadata stored within the artifact yaml: keys, depedencies, workspaced and workspaced dependencies.
-rw-r--r--buildstream/_artifact.py149
-rw-r--r--buildstream/element.py87
2 files changed, 180 insertions, 56 deletions
diff --git a/buildstream/_artifact.py b/buildstream/_artifact.py
index 9c21d8863..45ea53408 100644
--- a/buildstream/_artifact.py
+++ b/buildstream/_artifact.py
@@ -273,6 +273,155 @@ class Artifact():
return build_result
+ # get_metadata_keys():
+ #
+ # Retrieve the strong and weak keys from the given artifact.
+ #
+ # Args:
+ # key (str): The artifact key, or None for the default key
+ # metadata_keys (dict): The elements cached strong/weak
+ # metadata keys, empty if not yet cached
+ #
+ # Returns:
+ # (str): The strong key
+ # (str): The weak key
+ # (dict): The key dict, None if not updated
+ #
+ def get_metadata_keys(self, key, metadata_keys):
+
+ # Now extract it and possibly derive the key
+ artifact_vdir, key = self._get_directory(key)
+
+ # Now try the cache, once we're sure about the key
+ if key in metadata_keys:
+ return (metadata_keys[key]['strong'],
+ metadata_keys[key]['weak'], None)
+
+ # Parse the expensive yaml now and cache the result
+ meta_file = artifact_vdir._objpath(['meta', 'keys.yaml'])
+ meta = _yaml.load(meta_file, shortname='meta/keys.yaml')
+ strong_key = meta['strong']
+ weak_key = meta['weak']
+
+ assert key in (strong_key, weak_key)
+
+ metadata_keys[strong_key] = meta
+ metadata_keys[weak_key] = meta
+
+ return (strong_key, weak_key, metadata_keys)
+
+ # get_metadata_dependencies():
+ #
+ # Retrieve the hash of dependency keys from the given artifact.
+ #
+ # Args:
+ # key (str): The artifact key, or None for the default key
+ # metadata_dependencies (dict): The elements cached dependency metadata keys,
+ # empty if not yet cached
+ # metadata_keys (dict): The elements cached strong/weak
+ # metadata keys, empty if not yet cached
+ #
+ # Returns:
+ # (dict): A dictionary of element names and their keys
+ # (dict): The depedencies key dict, None if not updated
+ # (dict): The elements key dict, None if not updated
+ #
+ def get_metadata_dependencies(self, key, metadata_dependencies, metadata_keys):
+
+ # Extract it and possibly derive the key
+ artifact_vdir, key = self._get_directory(key)
+
+ # Now try the cache, once we're sure about the key
+ if key in metadata_dependencies:
+ return (metadata_dependencies[key], None, None)
+
+ # Parse the expensive yaml now and cache the result
+ meta_file = artifact_vdir._objpath(['meta', 'dependencies.yaml'])
+ meta = _yaml.load(meta_file, shortname='meta/dependencies.yaml')
+
+ # Cache it under both strong and weak keys
+ strong_key, weak_key, metadata_keys = self.get_metadata_keys(key, metadata_keys)
+ metadata_dependencies[strong_key] = meta
+ metadata_dependencies[weak_key] = meta
+
+ return (meta, metadata_dependencies, metadata_keys)
+
+ # get_metadata_workspaced():
+ #
+ # Retrieve the hash of dependency from the given artifact.
+ #
+ # Args:
+ # key (str): The artifact key, or None for the default key
+ # meta_data_workspaced (dict): The elements cached boolean metadata
+ # of whether it's workspaced, empty if
+ # not yet cached
+ # metadata_keys (dict): The elements cached strong/weak
+ # metadata keys, empty if not yet cached
+ #
+ # Returns:
+ # (bool): Whether the given artifact was workspaced
+ # (dict): The workspaced key dict, None if not updated
+ # (dict): The elements key dict, None if not updated
+ #
+ def get_metadata_workspaced(self, key, metadata_workspaced, metadata_keys):
+
+ # Extract it and possibly derive the key
+ artifact_vdir, key = self._get_directory(key)
+
+ # Now try the cache, once we're sure about the key
+ if key in metadata_workspaced:
+ return (metadata_workspaced[key], None, None)
+
+ # Parse the expensive yaml now and cache the result
+ meta_file = artifact_vdir._objpath(['meta', 'workspaced.yaml'])
+ meta = _yaml.load(meta_file, shortname='meta/workspaced.yaml')
+ workspaced = meta['workspaced']
+
+ # Cache it under both strong and weak keys
+ strong_key, weak_key, metadata_keys = self.get_metadata_keys(key, metadata_keys)
+ metadata_workspaced[strong_key] = workspaced
+ metadata_workspaced[weak_key] = workspaced
+
+ return (workspaced, metadata_workspaced, metadata_keys)
+
+ # get_metadata_workspaced_dependencies():
+ #
+ # Retrieve the hash of workspaced dependencies keys from the given artifact.
+ #
+ # Args:
+ # key (str): The artifact key, or None for the default key
+ # metadata_workspaced_dependencies (dict): The elements cached metadata of
+ # which dependencies are workspaced,
+ # empty if not yet cached
+ # metadata_keys (dict): The elements cached strong/weak
+ # metadata keys, empty if not yet cached
+ #
+ # Returns:
+ # (list): List of which dependencies are workspaced
+ # (dict): The workspaced depedencies key dict, None if not updated
+ # (dict): The elements key dict, None if not updated
+ #
+ def get_metadata_workspaced_dependencies(self, key, metadata_workspaced_dependencies,
+ metadata_keys):
+
+ # Extract it and possibly derive the key
+ artifact_vdir, key = self._get_directory(key)
+
+ # Now try the cache, once we're sure about the key
+ if key in metadata_workspaced_dependencies:
+ return (metadata_workspaced_dependencies[key], None, None)
+
+ # Parse the expensive yaml now and cache the result
+ meta_file = artifact_vdir._objpath(['meta', 'workspaced-dependencies.yaml'])
+ meta = _yaml.load(meta_file, shortname='meta/workspaced-dependencies.yaml')
+ workspaced = meta['workspaced-dependencies']
+
+ # Cache it under both strong and weak keys
+ strong_key, weak_key, metadata_keys = self.get_metadata_keys(key, metadata_keys)
+ metadata_workspaced_dependencies[strong_key] = workspaced
+ metadata_workspaced_dependencies[weak_key] = workspaced
+ return (workspaced, metadata_workspaced_dependencies, metadata_keys)
+
# _get_directory():
#
# Get a virtual directory for the artifact contents
diff --git a/buildstream/element.py b/buildstream/element.py
index ce04f8163..982fd894f 100644
--- a/buildstream/element.py
+++ b/buildstream/element.py
@@ -2652,24 +2652,14 @@ class Element(Plugin):
#
def __get_artifact_metadata_keys(self, key=None):
- # Now extract it and possibly derive the key
- artifact_vdir, key = self.__get_artifact_directory(key)
+ metadata_keys = self.__metadata_keys
- # Now try the cache, once we're sure about the key
- if key in self.__metadata_keys:
- return (self.__metadata_keys[key]['strong'],
- self.__metadata_keys[key]['weak'])
+ strong_key, weak_key, metadata_keys = self.__artifact.get_metadata_keys(key, metadata_keys)
- # Parse the expensive yaml now and cache the result
- meta_file = artifact_vdir._objpath(['meta', 'keys.yaml'])
- meta = _yaml.load(meta_file, shortname='meta/keys.yaml')
- strong_key = meta['strong']
- weak_key = meta['weak']
+ # Update keys if needed
+ if metadata_keys:
+ self.__metadata_keys = metadata_keys
- assert key in (strong_key, weak_key)
-
- self.__metadata_keys[strong_key] = meta
- self.__metadata_keys[weak_key] = meta
return (strong_key, weak_key)
# __get_artifact_metadata_dependencies():
@@ -2684,21 +2674,16 @@ class Element(Plugin):
#
def __get_artifact_metadata_dependencies(self, key=None):
- # Extract it and possibly derive the key
- artifact_vdir, key = self.__get_artifact_directory(key)
-
- # Now try the cache, once we're sure about the key
- if key in self.__metadata_dependencies:
- return self.__metadata_dependencies[key]
+ metadata = [self.__metadata_dependencies, self.__metadata_keys]
+ meta, meta_deps, meta_keys = self.__artifact.get_metadata_dependencies(key, *metadata)
- # Parse the expensive yaml now and cache the result
- meta_file = artifact_vdir._objpath(['meta', 'dependencies.yaml'])
- meta = _yaml.load(meta_file, shortname='meta/dependencies.yaml')
+ # Update deps if needed
+ if meta_deps:
+ self.__metadata_dependencies = meta_deps
+ # Update keys if needed, no need to check if deps not updated
+ if meta_keys:
+ self.__metadata_keys = meta_keys
- # Cache it under both strong and weak keys
- strong_key, weak_key = self.__get_artifact_metadata_keys(key)
- self.__metadata_dependencies[strong_key] = meta
- self.__metadata_dependencies[weak_key] = meta
return meta
# __get_artifact_metadata_workspaced():
@@ -2711,24 +2696,19 @@ class Element(Plugin):
# Returns:
# (bool): Whether the given artifact was workspaced
#
- def __get_artifact_metadata_workspaced(self, key=None):
- # Extract it and possibly derive the key
- artifact_vdir, key = self.__get_artifact_directory(key)
+ def __get_artifact_metadata_workspaced(self, key=None):
- # Now try the cache, once we're sure about the key
- if key in self.__metadata_workspaced:
- return self.__metadata_workspaced[key]
+ metadata = [self.__metadata_workspaced, self.__metadata_keys]
+ workspaced, meta_workspaced, meta_keys = self.__artifact.get_metadata_workspaced(key, *metadata)
- # Parse the expensive yaml now and cache the result
- meta_file = artifact_vdir._objpath(['meta', 'workspaced.yaml'])
- meta = _yaml.load(meta_file, shortname='meta/workspaced.yaml')
- workspaced = meta['workspaced']
+ # Update workspaced if needed
+ if meta_workspaced:
+ self.__metadata_workspaced = meta_workspaced
+ # Update keys if needed, no need to check if workspaced not updated
+ if meta_keys:
+ self.__metadata_keys = meta_keys
- # Cache it under both strong and weak keys
- strong_key, weak_key = self.__get_artifact_metadata_keys(key)
- self.__metadata_workspaced[strong_key] = workspaced
- self.__metadata_workspaced[weak_key] = workspaced
return workspaced
# __get_artifact_metadata_workspaced_dependencies():
@@ -2743,22 +2723,17 @@ class Element(Plugin):
#
def __get_artifact_metadata_workspaced_dependencies(self, key=None):
- # Extract it and possibly derive the key
- artifact_vdir, key = self.__get_artifact_directory(key)
-
- # Now try the cache, once we're sure about the key
- if key in self.__metadata_workspaced_dependencies:
- return self.__metadata_workspaced_dependencies[key]
+ metadata = [self.__metadata_workspaced_dependencies, self.__metadata_keys]
+ workspaced, meta_workspaced_deps,\
+ meta_keys = self.__artifact.get_metadata_workspaced_dependencies(key, *metadata)
- # Parse the expensive yaml now and cache the result
- meta_file = artifact_vdir._objpath(['meta', 'workspaced-dependencies.yaml'])
- meta = _yaml.load(meta_file, shortname='meta/workspaced-dependencies.yaml')
- workspaced = meta['workspaced-dependencies']
+ # Update workspaced if needed
+ if meta_workspaced_deps:
+ self.__metadata_workspaced_dependencies = meta_workspaced_deps
+ # Update keys if needed, no need to check if workspaced not updated
+ if meta_keys:
+ self.__metadata_keys = meta_keys
- # Cache it under both strong and weak keys
- strong_key, weak_key = self.__get_artifact_metadata_keys(key)
- self.__metadata_workspaced_dependencies[strong_key] = workspaced
- self.__metadata_workspaced_dependencies[weak_key] = workspaced
return workspaced
# __load_public_data():