diff options
author | bst-marge-bot <marge-bot@buildstream.build> | 2019-04-26 11:22:39 +0000 |
---|---|---|
committer | bst-marge-bot <marge-bot@buildstream.build> | 2019-04-26 11:22:39 +0000 |
commit | 140a9eb9f4ce2c3547e9f61262460dcc34f758c2 (patch) | |
tree | 7eebf2583e2fd8198e5692167774b96373963f9c | |
parent | a41b50ba81aebae2e32c92ca5a310b293738273d (diff) | |
parent | 39f86533ee1251c744137bad4c8685855126d271 (diff) | |
download | buildstream-140a9eb9f4ce2c3547e9f61262460dcc34f758c2.tar.gz |
Merge branch 'tpollard/artifactmetadata' into 'master'
Simplify metadata within Artifact class
See merge request BuildStream/buildstream!1314
-rw-r--r-- | buildstream/_artifact.py | 105 | ||||
-rw-r--r-- | buildstream/element.py | 6 |
2 files changed, 42 insertions, 69 deletions
diff --git a/buildstream/_artifact.py b/buildstream/_artifact.py index 4d9e4bf08..6cf51ee2d 100644 --- a/buildstream/_artifact.py +++ b/buildstream/_artifact.py @@ -56,11 +56,10 @@ class Artifact(): self._cache_key = strong_key self._weak_cache_key = weak_key - # hash tables of loaded artifact metadata, hashed by key - self._metadata_keys = {} # Strong and weak keys for this key - self._metadata_dependencies = {} # Dictionary of dependency strong keys - self._metadata_workspaced = {} # Boolean of whether it's workspaced - self._metadata_workspaced_dependencies = {} # List of which dependencies are workspaced + self._metadata_keys = None # Strong and weak key tuple extracted from the artifact + self._metadata_dependencies = None # Dictionary of dependency strong keys from the artifact + self._metadata_workspaced = None # Boolean of whether it's a workspaced artifact + self._metadata_workspaced_dependencies = None # List of which dependencies are workspaced from the artifact # get_files(): # @@ -68,12 +67,9 @@ class Artifact(): # # Returns: # (Directory): The virtual directory object - # (str): The chosen key # def get_files(self): - subdir = "files" - - return self._get_subdirectory(subdir) + return self._get_subdirectory("files") # get_buildtree(): # @@ -81,12 +77,9 @@ class Artifact(): # # Returns: # (Directory): The virtual directory object - # (str): The chosen key # def get_buildtree(self): - subdir = "buildtree" - - return self._get_subdirectory(subdir) + return self._get_subdirectory("buildtree") # get_extract_key(): # @@ -218,7 +211,7 @@ class Artifact(): # def buildtree_exists(self): - artifact_vdir, _ = self._get_directory() + artifact_vdir = self._get_directory() return artifact_vdir._exists('buildtree') # load_public_data(): @@ -231,7 +224,7 @@ class Artifact(): def load_public_data(self): # Load the public data from the artifact - meta_vdir, _ = self._get_subdirectory('meta') + meta_vdir = self._get_subdirectory('meta') meta_file = meta_vdir._objpath('public.yaml') data = _yaml.load(meta_file, shortname='public.yaml') @@ -248,7 +241,7 @@ class Artifact(): # def load_build_result(self): - meta_vdir, _ = self._get_subdirectory('meta') + meta_vdir = self._get_subdirectory('meta') meta_file = meta_vdir._objpath('build-result.yaml') if not os.path.exists(meta_file): @@ -275,12 +268,11 @@ class Artifact(): # def get_metadata_keys(self): - # Now extract it and possibly derive the key - meta_vdir, key = self._get_subdirectory('meta') + if self._metadata_keys is not None: + return self._metadata_keys - # Now try the cache, once we're sure about the key - if key in self._metadata_keys: - return (self._metadata_keys[key]['strong'], self._metadata_keys[key]['weak']) + # Extract the metadata dir + meta_vdir = self._get_subdirectory('meta') # Parse the expensive yaml now and cache the result meta_file = meta_vdir._objpath('keys.yaml') @@ -288,12 +280,9 @@ class Artifact(): strong_key = _yaml.node_get(meta, str, 'strong') weak_key = _yaml.node_get(meta, str, 'weak') - assert key in (strong_key, weak_key) + self._metadata_keys = (strong_key, weak_key) - self._metadata_keys[strong_key] = _yaml.node_sanitize(meta) - self._metadata_keys[weak_key] = _yaml.node_sanitize(meta) - - return (strong_key, weak_key) + return self._metadata_keys # get_metadata_dependencies(): # @@ -304,23 +293,19 @@ class Artifact(): # def get_metadata_dependencies(self): - # Extract it and possibly derive the key - meta_vdir, key = self._get_subdirectory('meta') + if self._metadata_dependencies is not None: + return self._metadata_dependencies - # Now try the cache, once we're sure about the key - if key in self._metadata_dependencies: - return self._metadata_dependencies[key] + # Extract the metadata dir + meta_vdir = self._get_subdirectory('meta') # Parse the expensive yaml now and cache the result meta_file = meta_vdir._objpath('dependencies.yaml') meta = _yaml.load(meta_file, shortname='dependencies.yaml') - # Cache it under both strong and weak keys - strong_key, weak_key = self.get_metadata_keys() - self._metadata_dependencies[strong_key] = _yaml.node_sanitize(meta) - self._metadata_dependencies[weak_key] = _yaml.node_sanitize(meta) + self._metadata_dependencies = meta - return meta + return self._metadata_dependencies # get_metadata_workspaced(): # @@ -331,25 +316,19 @@ class Artifact(): # def get_metadata_workspaced(self): - # Extract it and possibly derive the key - meta_vdir, key = self._get_subdirectory('meta') + if self._metadata_workspaced is not None: + return self._metadata_workspaced - # Now try the cache, once we're sure about the key - if key in self._metadata_workspaced: - return self._metadata_workspaced[key] + # Extract the metadata dir + meta_vdir = self._get_subdirectory('meta') # Parse the expensive yaml now and cache the result meta_file = meta_vdir._objpath('workspaced.yaml') meta = _yaml.load(meta_file, shortname='workspaced.yaml') - workspaced = _yaml.node_get(meta, bool, 'workspaced') - - # Cache it under both strong and weak keys - strong_key, weak_key = self.get_metadata_keys() - self._metadata_workspaced[strong_key] = workspaced - self._metadata_workspaced[weak_key] = workspaced + self._metadata_workspaced = _yaml.node_get(meta, bool, 'workspaced') - return workspaced + return self._metadata_workspaced # get_metadata_workspaced_dependencies(): # @@ -360,24 +339,20 @@ class Artifact(): # def get_metadata_workspaced_dependencies(self): - # Extract it and possibly derive the key - meta_vdir, key = self._get_subdirectory('meta') + if self._metadata_workspaced_dependencies is not None: + return self._metadata_workspaced_dependencies - # Now try the cache, once we're sure about the key - if key in self._metadata_workspaced_dependencies: - return self._metadata_workspaced_dependencies[key] + # Extract the metadata dir + meta_vdir = self._get_subdirectory('meta') # Parse the expensive yaml now and cache the result meta_file = meta_vdir._objpath('workspaced-dependencies.yaml') meta = _yaml.load(meta_file, shortname='workspaced-dependencies.yaml') - workspaced = _yaml.node_sanitize(_yaml.node_get(meta, list, 'workspaced-dependencies')) - # Cache it under both strong and weak keys - strong_key, weak_key = self.get_metadata_keys() - self._metadata_workspaced_dependencies[strong_key] = workspaced - self._metadata_workspaced_dependencies[weak_key] = workspaced + self._metadata_workspaced_dependencies = _yaml.node_sanitize(_yaml.node_get(meta, list, + 'workspaced-dependencies')) - return workspaced + return self._metadata_workspaced_dependencies # cached(): # @@ -395,7 +370,7 @@ class Artifact(): context = self._context try: - vdir, _ = self._get_directory() + vdir = self._get_directory() except ArtifactError: # Either ref or top-level artifact directory missing return False @@ -433,7 +408,7 @@ class Artifact(): if not self._element._cached(): return False - log_vdir, _ = self._get_subdirectory('logs') + log_vdir = self._get_subdirectory('logs') logsdigest = log_vdir._get_digest() return self._artifacts.cas.contains_directory(logsdigest, with_files=True) @@ -448,7 +423,6 @@ class Artifact(): # # Returns: # (Directory): The virtual directory object - # (str): The chosen key # def _get_directory(self, key=None): @@ -457,7 +431,7 @@ class Artifact(): if key is None: key = self.get_extract_key() - return (self._artifacts.get_artifact_directory(element, key), key) + return self._artifacts.get_artifact_directory(element, key) # _get_subdirectory(): # @@ -470,11 +444,10 @@ class Artifact(): # # Returns: # (Directory): The virtual subdirectory object - # (str): The chosen key # def _get_subdirectory(self, subdir, key=None): - artifact_vdir, key = self._get_directory(key) + artifact_vdir = self._get_directory(key) sub_vdir = artifact_vdir.descend(subdir) - return (sub_vdir, key) + return sub_vdir diff --git a/buildstream/element.py b/buildstream/element.py index 95081b940..05884c008 100644 --- a/buildstream/element.py +++ b/buildstream/element.py @@ -675,7 +675,7 @@ class Element(Plugin): self.__assert_cached() with self.timed_activity("Staging {}/{}".format(self.name, self._get_brief_display_key())): - files_vdir, _ = self.__artifact.get_files() + files_vdir = self.__artifact.get_files() # Hard link it into the staging area # @@ -1525,7 +1525,7 @@ class Element(Plugin): # Check if we have a cached buildtree to use elif usebuildtree: - import_dir, _ = self.__artifact.get_buildtree() + import_dir = self.__artifact.get_buildtree() if import_dir.is_empty(): detail = "Element type either does not expect a buildtree or it was explictily cached without one." self.warn("WARNING: {} Artifact contains an empty buildtree".format(self.name), detail=detail) @@ -2774,7 +2774,7 @@ class Element(Plugin): def __compute_splits(self, include=None, exclude=None, orphans=True): filter_func = self.__split_filter_func(include=include, exclude=exclude, orphans=orphans) - files_vdir, _ = self.__artifact.get_files() + files_vdir = self.__artifact.get_files() element_files = files_vdir.list_relative_paths() |