summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorJames Ennis <james.ennis@codethink.com>2019-02-13 10:48:21 +0000
committerJames Ennis <james.ennis@codethink.com>2019-02-13 10:48:21 +0000
commit8c6baf2360a756562291a9b3f398aaa6d5f06491 (patch)
tree5618cc5bf3d3148a3560f2b6f72fc8aea4b8ac19
parent6de65306882ebbbcb0cb791cc123645b0a756069 (diff)
parent6ae41474c175c454f850ac1ccdbfe663d60214bf (diff)
downloadbuildstream-8c6baf2360a756562291a9b3f398aaa6d5f06491.tar.gz
Merge branch 'jennis/refactor_artifact_log' into 'master'
Refactor artifact log command See merge request BuildStream/buildstream!1101
-rw-r--r--buildstream/_artifactcache.py84
-rw-r--r--buildstream/_artifactelement.py88
-rw-r--r--buildstream/_cas/cascache.py24
-rw-r--r--buildstream/_exceptions.py9
-rw-r--r--buildstream/_frontend/cli.py98
-rw-r--r--buildstream/_loader/metaelement.py18
-rw-r--r--buildstream/_project.py14
-rw-r--r--buildstream/_stream.py122
-rw-r--r--buildstream/element.py114
-rw-r--r--tests/artifactcache/pull.py2
-rw-r--r--tests/artifactcache/push.py2
11 files changed, 375 insertions, 200 deletions
diff --git a/buildstream/_artifactcache.py b/buildstream/_artifactcache.py
index 5404dc12e..bc0032bec 100644
--- a/buildstream/_artifactcache.py
+++ b/buildstream/_artifactcache.py
@@ -19,7 +19,6 @@
import multiprocessing
import os
-import string
from collections.abc import Mapping
from .types import _KeyStrength
@@ -29,6 +28,7 @@ from . import utils
from . import _yaml
from ._cas import CASRemote, CASRemoteSpec
+from .storage._casbaseddirectory import CasBasedDirectory
CACHE_SIZE_FILE = "cache_size"
@@ -112,37 +112,6 @@ class ArtifactCache():
self._calculate_cache_quota()
- # get_artifact_fullname()
- #
- # Generate a full name for an artifact, including the
- # project namespace, element name and cache key.
- #
- # This can also be used as a relative path safely, and
- # will normalize parts of the element name such that only
- # digits, letters and some select characters are allowed.
- #
- # Args:
- # element (Element): The Element object
- # key (str): The element's cache key
- #
- # Returns:
- # (str): The relative path for the artifact
- #
- def get_artifact_fullname(self, element, key):
- project = element._get_project()
-
- # Normalize ostree ref unsupported chars
- valid_chars = string.digits + string.ascii_letters + '-._'
- element_name = ''.join([
- x if x in valid_chars else '_'
- for x in element.normal_name
- ])
-
- assert key is not None
-
- # assume project and element names are not allowed to contain slashes
- return '{0}/{1}/{2}'.format(project.name, element_name, key)
-
# setup_remotes():
#
# Sets up which remotes to use
@@ -241,7 +210,7 @@ class ArtifactCache():
for key in (strong_key, weak_key):
if key:
try:
- ref = self.get_artifact_fullname(element, key)
+ ref = element.get_artifact_name(key)
self.cas.update_mtime(ref)
except CASError:
@@ -521,7 +490,7 @@ class ArtifactCache():
# Returns: True if the artifact is in the cache, False otherwise
#
def contains(self, element, key):
- ref = self.get_artifact_fullname(element, key)
+ ref = element.get_artifact_name(key)
return self.cas.contains(ref)
@@ -538,19 +507,21 @@ class ArtifactCache():
# Returns: True if the subdir exists & is populated in the cache, False otherwise
#
def contains_subdir_artifact(self, element, key, subdir):
- ref = self.get_artifact_fullname(element, key)
+ ref = element.get_artifact_name(key)
return self.cas.contains_subdir_artifact(ref, subdir)
# list_artifacts():
#
# List artifacts in this cache in LRU order.
#
+ # Args:
+ # glob (str): An option glob expression to be used to list artifacts satisfying the glob
+ #
# Returns:
- # ([str]) - A list of artifact names as generated by
- # `ArtifactCache.get_artifact_fullname` in LRU order
+ # ([str]) - A list of artifact names as generated in LRU order
#
- def list_artifacts(self):
- return self.cas.list_refs()
+ def list_artifacts(self, *, glob=None):
+ return self.cas.list_refs(glob=glob)
# remove():
#
@@ -559,8 +530,7 @@ class ArtifactCache():
#
# Args:
# ref (artifact_name): The name of the artifact to remove (as
- # generated by
- # `ArtifactCache.get_artifact_fullname`)
+ # generated by `Element.get_artifact_name`)
#
# Returns:
# (int): The amount of space recovered in the cache, in bytes
@@ -606,7 +576,7 @@ class ArtifactCache():
# Returns: path to extracted artifact
#
def extract(self, element, key, subdir=None):
- ref = self.get_artifact_fullname(element, key)
+ ref = element.get_artifact_name(key)
path = os.path.join(self.extractdir, element._get_project().name, element.normal_name)
@@ -622,7 +592,7 @@ class ArtifactCache():
# keys (list): The cache keys to use
#
def commit(self, element, content, keys):
- refs = [self.get_artifact_fullname(element, key) for key in keys]
+ refs = [element.get_artifact_name(key) for key in keys]
self.cas.commit(refs, content)
@@ -638,8 +608,8 @@ class ArtifactCache():
# subdir (str): A subdirectory to limit the comparison to
#
def diff(self, element, key_a, key_b, *, subdir=None):
- ref_a = self.get_artifact_fullname(element, key_a)
- ref_b = self.get_artifact_fullname(element, key_b)
+ ref_a = element.get_artifact_name(key_a)
+ ref_b = element.get_artifact_name(key_b)
return self.cas.diff(ref_a, ref_b, subdir=subdir)
@@ -700,7 +670,7 @@ class ArtifactCache():
# (ArtifactError): if there was an error
#
def push(self, element, keys):
- refs = [self.get_artifact_fullname(element, key) for key in list(keys)]
+ refs = [element.get_artifact_name(key) for key in list(keys)]
project = element._get_project()
@@ -738,7 +708,7 @@ class ArtifactCache():
# (bool): True if pull was successful, False if artifact was not available
#
def pull(self, element, key, *, progress=None, subdir=None, excluded_subdirs=None):
- ref = self.get_artifact_fullname(element, key)
+ ref = element.get_artifact_name(key)
project = element._get_project()
@@ -850,11 +820,27 @@ class ArtifactCache():
# newkey (str): A new cache key for the artifact
#
def link_key(self, element, oldkey, newkey):
- oldref = self.get_artifact_fullname(element, oldkey)
- newref = self.get_artifact_fullname(element, newkey)
+ oldref = element.get_artifact_name(oldkey)
+ newref = element.get_artifact_name(newkey)
self.cas.link_ref(oldref, newref)
+ # get_artifact_logs():
+ #
+ # Get the logs of an existing artifact
+ #
+ # Args:
+ # ref (str): The ref of the artifact
+ #
+ # Returns:
+ # logsdir (CasBasedDirectory): A CasBasedDirectory containing the artifact's logs
+ #
+ def get_artifact_logs(self, ref):
+ descend = ["logs"]
+ cache_id = self.cas.resolve_ref(ref, update_mtime=True)
+ vdir = CasBasedDirectory(self.cas, cache_id).descend(descend)
+ return vdir
+
################################################
# Local Private Methods #
################################################
diff --git a/buildstream/_artifactelement.py b/buildstream/_artifactelement.py
new file mode 100644
index 000000000..a88e83aab
--- /dev/null
+++ b/buildstream/_artifactelement.py
@@ -0,0 +1,88 @@
+#
+# Copyright (C) 2019 Bloomberg Finance LP
+#
+# This program is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library. If not, see <http://www.gnu.org/licenses/>.
+#
+# Authors:
+# James Ennis <james.ennis@codethink.co.uk>
+from . import Element
+from . import _cachekey
+from ._exceptions import ArtifactElementError
+from ._loader.metaelement import MetaElement
+
+
+# ArtifactElement()
+#
+# Object to be used for directly processing an artifact
+#
+# Args:
+# context (Context): The Context object
+# ref (str): The artifact ref
+#
+class ArtifactElement(Element):
+ def __init__(self, context, ref):
+ _, element, key = verify_artifact_ref(ref)
+
+ self._ref = ref
+ self._key = key
+
+ project = context.get_toplevel_project()
+ meta = MetaElement(project, element) # NOTE element has no .bst suffix
+ plugin_conf = None
+
+ super().__init__(context, project, meta, plugin_conf)
+
+ # Override Element.get_artifact_name()
+ def get_artifact_name(self, key=None):
+ return self._ref
+
+ # Dummy configure method
+ def configure(self, node):
+ pass
+
+ # Dummy preflight method
+ def preflight(self):
+ pass
+
+ # Override Element._calculate_cache_key
+ def _calculate_cache_key(self, dependencies=None):
+ return self._key
+
+
+# verify_artifact_ref()
+#
+# Verify that a ref string matches the format of an artifact
+#
+# Args:
+# ref (str): The artifact ref
+#
+# Returns:
+# project (str): The project's name
+# element (str): The element's name
+# key (str): The cache key
+#
+# Raises:
+# ArtifactElementError if the ref string does not match
+# the expected format
+#
+def verify_artifact_ref(ref):
+ try:
+ project, element, key = ref.split('/', 2) # This will raise a Value error if unable to split
+ # Explicitly raise a ValueError if the key lenght is not as expected
+ if len(key) != len(_cachekey.generate_key({})):
+ raise ValueError
+ except ValueError:
+ raise ArtifactElementError("Artifact: {} is not of the expected format".format(ref))
+
+ return project, element, key
diff --git a/buildstream/_cas/cascache.py b/buildstream/_cas/cascache.py
index 560587055..792bf3eb9 100644
--- a/buildstream/_cas/cascache.py
+++ b/buildstream/_cas/cascache.py
@@ -24,6 +24,7 @@ import stat
import errno
import uuid
import contextlib
+from fnmatch import fnmatch
import grpc
@@ -472,22 +473,35 @@ class CASCache():
#
# List refs in Least Recently Modified (LRM) order.
#
+ # Args:
+ # glob (str) - An optional glob expression to be used to list refs satisfying the glob
+ #
# Returns:
# (list) - A list of refs in LRM order
#
- def list_refs(self):
+ def list_refs(self, *, glob=None):
# string of: /path/to/repo/refs/heads
ref_heads = os.path.join(self.casdir, 'refs', 'heads')
+ path = ref_heads
+
+ if glob is not None:
+ globdir = os.path.dirname(glob)
+ if not any(c in "*?[" for c in globdir):
+ # path prefix contains no globbing characters so
+ # append the glob to optimise the os.walk()
+ path = os.path.join(ref_heads, globdir)
refs = []
mtimes = []
- for root, _, files in os.walk(ref_heads):
+ for root, _, files in os.walk(path):
for filename in files:
ref_path = os.path.join(root, filename)
- refs.append(os.path.relpath(ref_path, ref_heads))
- # Obtain the mtime (the time a file was last modified)
- mtimes.append(os.path.getmtime(ref_path))
+ relative_path = os.path.relpath(ref_path, ref_heads) # Relative to refs head
+ if not glob or fnmatch(relative_path, glob):
+ refs.append(relative_path)
+ # Obtain the mtime (the time a file was last modified)
+ mtimes.append(os.path.getmtime(ref_path))
# NOTE: Sorted will sort from earliest to latest, thus the
# first ref of this list will be the file modified earliest.
diff --git a/buildstream/_exceptions.py b/buildstream/_exceptions.py
index 6d8ea6d38..0797e7207 100644
--- a/buildstream/_exceptions.py
+++ b/buildstream/_exceptions.py
@@ -344,3 +344,12 @@ class AppError(BstError):
#
class SkipJob(Exception):
pass
+
+
+# ArtifactElementError
+#
+# Raised when errors are encountered by artifact elements
+#
+class ArtifactElementError(BstError):
+ def __init__(self, message, *, detail=None, reason=None):
+ super().__init__(message, detail=detail, domain=ErrorDomain.ELEMENT, reason=reason)
diff --git a/buildstream/_frontend/cli.py b/buildstream/_frontend/cli.py
index b3c4634a9..e3c5059b9 100644
--- a/buildstream/_frontend/cli.py
+++ b/buildstream/_frontend/cli.py
@@ -1,7 +1,6 @@
import os
import sys
from contextlib import ExitStack
-from fnmatch import fnmatch
from functools import partial
from tempfile import TemporaryDirectory
@@ -901,38 +900,6 @@ def workspace_list(app):
#############################################################
# Artifact Commands #
#############################################################
-def _classify_artifacts(names, cas, project_directory):
- element_targets = []
- artifact_refs = []
- element_globs = []
- artifact_globs = []
-
- for name in names:
- if name.endswith('.bst'):
- if any(c in "*?[" for c in name):
- element_globs.append(name)
- else:
- element_targets.append(name)
- else:
- if any(c in "*?[" for c in name):
- artifact_globs.append(name)
- else:
- artifact_refs.append(name)
-
- if element_globs:
- for dirpath, _, filenames in os.walk(project_directory):
- for filename in filenames:
- element_path = os.path.join(dirpath, filename).lstrip(project_directory).lstrip('/')
- if any(fnmatch(element_path, glob) for glob in element_globs):
- element_targets.append(element_path)
-
- if artifact_globs:
- artifact_refs.extend(ref for ref in cas.list_refs()
- if any(fnmatch(ref, glob) for glob in artifact_globs))
-
- return element_targets, artifact_refs
-
-
@cli.group(short_help="Manipulate cached artifacts")
def artifact():
"""Manipulate cached artifacts"""
@@ -1111,53 +1078,24 @@ def artifact_push(app, elements, deps, remote):
@click.pass_obj
def artifact_log(app, artifacts):
"""Show logs of all artifacts"""
- from .._exceptions import CASError
- from .._message import MessageType
- from .._pipeline import PipelineSelection
- from ..storage._casbaseddirectory import CasBasedDirectory
-
- with ExitStack() as stack:
- stack.enter_context(app.initialized())
- cache = app.context.artifactcache
-
- elements, artifacts = _classify_artifacts(artifacts, cache.cas,
- app.project.directory)
-
- vdirs = []
- extractdirs = []
- if artifacts:
- for ref in artifacts:
- try:
- cache_id = cache.cas.resolve_ref(ref, update_mtime=True)
- vdir = CasBasedDirectory(cache.cas, cache_id)
- vdirs.append(vdir)
- except CASError as e:
- app._message(MessageType.WARN, "Artifact {} is not cached".format(ref), detail=str(e))
- continue
- if elements:
- elements = app.stream.load_selection(elements, selection=PipelineSelection.NONE)
- for element in elements:
- if not element._cached():
- app._message(MessageType.WARN, "Element {} is not cached".format(element))
- continue
- ref = cache.get_artifact_fullname(element, element._get_cache_key())
- cache_id = cache.cas.resolve_ref(ref, update_mtime=True)
- vdir = CasBasedDirectory(cache.cas, cache_id)
- vdirs.append(vdir)
-
- for vdir in vdirs:
- # NOTE: If reading the logs feels unresponsive, here would be a good place to provide progress information.
- logsdir = vdir.descend(["logs"])
- td = stack.enter_context(TemporaryDirectory())
- logsdir.export_files(td, can_link=True)
- extractdirs.append(td)
-
- for extractdir in extractdirs:
- for log in (os.path.join(extractdir, log) for log in os.listdir(extractdir)):
- # NOTE: Should click gain the ability to pass files to the pager this can be optimised.
- with open(log) as f:
- data = f.read()
- click.echo_via_pager(data)
+ with app.initialized():
+ logsdirs = app.stream.artifact_log(artifacts)
+
+ with ExitStack() as stack:
+ extractdirs = []
+ for logsdir in logsdirs:
+ # NOTE: If reading the logs feels unresponsive, here would be a good place
+ # to provide progress information.
+ td = stack.enter_context(TemporaryDirectory())
+ logsdir.export_files(td, can_link=True)
+ extractdirs.append(td)
+
+ for extractdir in extractdirs:
+ for log in (os.path.join(extractdir, log) for log in os.listdir(extractdir)):
+ # NOTE: Should click gain the ability to pass files to the pager this can be optimised.
+ with open(log) as f:
+ data = f.read()
+ click.echo_via_pager(data)
##################################################################
diff --git a/buildstream/_loader/metaelement.py b/buildstream/_loader/metaelement.py
index c13d5591e..943b925ff 100644
--- a/buildstream/_loader/metaelement.py
+++ b/buildstream/_loader/metaelement.py
@@ -38,20 +38,20 @@ class MetaElement():
# sandbox: Configuration specific to the sandbox environment
# first_pass: The element is to be loaded with first pass configuration (junction)
#
- def __init__(self, project, name, kind, provenance, sources, config,
- variables, environment, env_nocache, public, sandbox,
- first_pass):
+ def __init__(self, project, name, kind=None, provenance=None, sources=None, config=None,
+ variables=None, environment=None, env_nocache=None, public=None,
+ sandbox=None, first_pass=False):
self.project = project
self.name = name
self.kind = kind
self.provenance = provenance
self.sources = sources
- self.config = config
- self.variables = variables
- self.environment = environment
- self.env_nocache = env_nocache
- self.public = public
- self.sandbox = sandbox
+ self.config = config or {}
+ self.variables = variables or {}
+ self.environment = environment or {}
+ self.env_nocache = env_nocache or []
+ self.public = public or {}
+ self.sandbox = sandbox or {}
self.build_dependencies = []
self.dependencies = []
self.first_pass = first_pass
diff --git a/buildstream/_project.py b/buildstream/_project.py
index 51cdd5e2b..028bdcc9f 100644
--- a/buildstream/_project.py
+++ b/buildstream/_project.py
@@ -26,6 +26,7 @@ from . import utils
from . import _cachekey
from . import _site
from . import _yaml
+from ._artifactelement import ArtifactElement
from ._profile import Topics, profile_start, profile_end
from ._exceptions import LoadError, LoadErrorReason
from ._options import OptionPool
@@ -255,6 +256,19 @@ class Project():
else:
return self.config.element_factory.create(self._context, self, meta)
+ # create_artifact_element()
+ #
+ # Instantiate and return an ArtifactElement
+ #
+ # Args:
+ # ref (str): A string of the artifact ref
+ #
+ # Returns:
+ # (ArtifactElement): A newly created ArtifactElement object of the appropriate kind
+ #
+ def create_artifact_element(self, ref):
+ return ArtifactElement(self._context, ref)
+
# create_source()
#
# Instantiate and return a Source
diff --git a/buildstream/_stream.py b/buildstream/_stream.py
index 588780558..caaa48908 100644
--- a/buildstream/_stream.py
+++ b/buildstream/_stream.py
@@ -27,8 +27,10 @@ import shutil
import tarfile
import tempfile
from contextlib import contextmanager, suppress
+from fnmatch import fnmatch
-from ._exceptions import StreamError, ImplError, BstError, set_last_task_error
+from ._artifactelement import verify_artifact_ref
+from ._exceptions import StreamError, ImplError, BstError, ArtifactElementError, set_last_task_error
from ._message import Message, MessageType
from ._scheduler import Scheduler, SchedStatus, TrackQueue, FetchQueue, BuildQueue, PullQueue, PushQueue
from ._pipeline import Pipeline, PipelineSelection
@@ -108,19 +110,21 @@ class Stream():
def load_selection(self, targets, *,
selection=PipelineSelection.NONE,
except_targets=(),
- use_artifact_config=False):
+ use_artifact_config=False,
+ load_refs=False):
profile_start(Topics.LOAD_SELECTION, "_".join(t.replace(os.sep, '-') for t in targets))
- elements, _ = self._load(targets, (),
- selection=selection,
- except_targets=except_targets,
- fetch_subprojects=False,
- use_artifact_config=use_artifact_config)
+ target_objects, _ = self._load(targets, (),
+ selection=selection,
+ except_targets=except_targets,
+ fetch_subprojects=False,
+ use_artifact_config=use_artifact_config,
+ load_refs=load_refs)
profile_end(Topics.LOAD_SELECTION, "_".join(t.replace(os.sep, '-') for t in targets))
- return elements
+ return target_objects
# shell()
#
@@ -491,6 +495,31 @@ class Stream():
raise StreamError("Error while staging dependencies into a sandbox"
": '{}'".format(e), detail=e.detail, reason=e.reason) from e
+ # artifact_log()
+ #
+ # Show the full log of an artifact
+ #
+ # Args:
+ # targets (str): Targets to view the logs of
+ #
+ # Returns:
+ # logsdir (list): A list of CasBasedDirectory objects containing artifact logs
+ #
+ def artifact_log(self, targets):
+ # Return list of Element and/or ArtifactElement objects
+ target_objects = self.load_selection(targets, selection=PipelineSelection.NONE, load_refs=True)
+
+ logsdirs = []
+ for obj in target_objects:
+ ref = obj.get_artifact_name()
+ if not obj._cached():
+ self._message(MessageType.WARN, "{} is not cached".format(ref))
+ continue
+
+ logsdirs.append(self._artifacts.get_artifact_logs(ref))
+
+ return logsdirs
+
# source_checkout()
#
# Checkout sources of the target element to the specified location
@@ -922,25 +951,36 @@ class Stream():
use_artifact_config=False,
artifact_remote_url=None,
fetch_subprojects=False,
- dynamic_plan=False):
+ dynamic_plan=False,
+ load_refs=False):
+
+ # Classify element and artifact strings
+ target_elements, target_artifacts = self._classify_artifacts(targets)
+
+ if target_artifacts and not load_refs:
+ detail = '\n'.join(target_artifacts)
+ raise ArtifactElementError("Cannot perform this operation with artifact refs:", detail=detail)
# Load rewritable if we have any tracking selection to make
rewritable = False
if track_targets:
rewritable = True
- # Load all targets
+ # Load all target elements
elements, except_elements, track_elements, track_except_elements = \
- self._pipeline.load([targets, except_targets, track_targets, track_except_targets],
+ self._pipeline.load([target_elements, except_targets, track_targets, track_except_targets],
rewritable=rewritable,
fetch_subprojects=fetch_subprojects)
+ # Obtain the ArtifactElement objects
+ artifacts = [self._project.create_artifact_element(ref) for ref in target_artifacts]
+
# Optionally filter out junction elements
if ignore_junction_targets:
elements = [e for e in elements if e.get_kind() != 'junction']
# Hold on to the targets
- self.targets = elements
+ self.targets = elements + artifacts
# Here we should raise an error if the track_elements targets
# are not dependencies of the primary targets, this is not
@@ -997,9 +1037,9 @@ class Stream():
# Now move on to loading primary selection.
#
- self._pipeline.resolve_elements(elements)
- selected = self._pipeline.get_selection(elements, selection, silent=False)
- selected = self._pipeline.except_elements(elements,
+ self._pipeline.resolve_elements(self.targets)
+ selected = self._pipeline.get_selection(self.targets, selection, silent=False)
+ selected = self._pipeline.except_elements(self.targets,
selected,
except_elements)
@@ -1331,3 +1371,55 @@ class Stream():
required_list.append(element)
return required_list
+
+ # _classify_artifacts()
+ #
+ # Split up a list of targets into element names and artifact refs
+ #
+ # Args:
+ # targets (list): A list of targets
+ #
+ # Returns:
+ # (list): element names present in the targets
+ # (list): artifact refs present in the targets
+ #
+ def _classify_artifacts(self, targets):
+ element_targets = []
+ artifact_refs = []
+ element_globs = []
+ artifact_globs = []
+
+ for target in targets:
+ if target.endswith('.bst'):
+ if any(c in "*?[" for c in target):
+ element_globs.append(target)
+ else:
+ element_targets.append(target)
+ else:
+ if any(c in "*?[" for c in target):
+ artifact_globs.append(target)
+ else:
+ try:
+ verify_artifact_ref(target)
+ except ArtifactElementError:
+ element_targets.append(target)
+ continue
+ artifact_refs.append(target)
+
+ if element_globs:
+ for dirpath, _, filenames in os.walk(self._project.element_path):
+ for filename in filenames:
+ element_path = os.path.join(dirpath, filename)
+ length = len(self._project.element_path) + 1
+ element_path = element_path[length:] # Strip out the element_path
+
+ if any(fnmatch(element_path, glob) for glob in element_globs):
+ element_targets.append(element_path)
+
+ if artifact_globs:
+ for glob in artifact_globs:
+ artifact_refs.extend(self._artifacts.list_artifacts(glob=glob))
+ if not artifact_refs:
+ self._message(MessageType.WARN, "No artifacts found for globs: {}".format(', '.join(artifact_globs)))
+
+ return element_targets, artifact_refs
diff --git a/buildstream/element.py b/buildstream/element.py
index a243826ed..e03f1e171 100644
--- a/buildstream/element.py
+++ b/buildstream/element.py
@@ -82,6 +82,7 @@ import contextlib
from contextlib import contextmanager
import tempfile
import shutil
+import string
from . import _yaml
from ._variables import Variables
@@ -577,6 +578,38 @@ class Element(Plugin):
self.__assert_cached()
return self.__compute_splits(include, exclude, orphans)
+ def get_artifact_name(self, key=None):
+ """Compute and return this element's full artifact name
+
+ Generate a full name for an artifact, including the project
+ namespace, element name and cache key.
+
+ This can also be used as a relative path safely, and
+ will normalize parts of the element name such that only
+ digits, letters and some select characters are allowed.
+
+ Args:
+ key (str): The element's cache key. Defaults to None
+
+ Returns:
+ (str): The relative path for the artifact
+ """
+ project = self._get_project()
+ if key is None:
+ key = self._get_cache_key()
+
+ assert key is not None
+
+ valid_chars = string.digits + string.ascii_letters + '-._'
+ element_name = ''.join([
+ x if x in valid_chars else '_'
+ for x in self.normal_name
+ ])
+
+ # Note that project names are not allowed to contain slashes. Element names containing
+ # a '/' will have this replaced with a '-' upon Element object instantiation.
+ return '{0}/{1}/{2}'.format(project.name, element_name, key)
+
def stage_artifact(self, sandbox, *, path=None, include=None, exclude=None, orphans=True, update_mtimes=None):
"""Stage this element's output artifact in the sandbox
@@ -1118,7 +1151,7 @@ class Element(Plugin):
e.name for e in self.dependencies(Scope.BUILD, recurse=False)
]
- self.__weak_cache_key = self.__calculate_cache_key(dependencies)
+ self.__weak_cache_key = self._calculate_cache_key(dependencies)
if self.__weak_cache_key is None:
# Weak cache key could not be calculated yet
@@ -1147,8 +1180,7 @@ class Element(Plugin):
dependencies = [
e.__strict_cache_key for e in self.dependencies(Scope.BUILD)
]
- self.__strict_cache_key = self.__calculate_cache_key(dependencies)
-
+ self.__strict_cache_key = self._calculate_cache_key(dependencies)
if self.__strict_cache_key is None:
# Strict cache key could not be calculated yet
return
@@ -1190,7 +1222,7 @@ class Element(Plugin):
dependencies = [
e._get_cache_key() for e in self.dependencies(Scope.BUILD)
]
- self.__cache_key = self.__calculate_cache_key(dependencies)
+ self.__cache_key = self._calculate_cache_key(dependencies)
if self.__cache_key is None:
# Strong cache key could not be calculated yet
@@ -2032,41 +2064,7 @@ class Element(Plugin):
source._fetch(previous_sources)
previous_sources.append(source)
- #############################################################
- # Private Local Methods #
- #############################################################
-
- # __update_source_state()
- #
- # Updates source consistency state
- #
- def __update_source_state(self):
-
- # Cannot resolve source state until tracked
- if self.__tracking_scheduled:
- return
-
- self.__consistency = Consistency.CACHED
- workspace = self._get_workspace()
-
- # Special case for workspaces
- if workspace:
-
- # A workspace is considered inconsistent in the case
- # that its directory went missing
- #
- fullpath = workspace.get_absolute_path()
- if not os.path.exists(fullpath):
- self.__consistency = Consistency.INCONSISTENT
- else:
-
- # Determine overall consistency of the element
- for source in self.__sources:
- source._update_state()
- source_consistency = source._get_consistency()
- self.__consistency = min(self.__consistency, source_consistency)
-
- # __calculate_cache_key():
+ # _calculate_cache_key():
#
# Calculates the cache key
#
@@ -2075,7 +2073,7 @@ class Element(Plugin):
#
# None is returned if information for the cache key is missing.
#
- def __calculate_cache_key(self, dependencies):
+ def _calculate_cache_key(self, dependencies):
# No cache keys for dependencies which have no cache keys
if None in dependencies:
return None
@@ -2114,6 +2112,40 @@ class Element(Plugin):
return _cachekey.generate_key(cache_key_dict)
+ #############################################################
+ # Private Local Methods #
+ #############################################################
+
+ # __update_source_state()
+ #
+ # Updates source consistency state
+ #
+ def __update_source_state(self):
+
+ # Cannot resolve source state until tracked
+ if self.__tracking_scheduled:
+ return
+
+ self.__consistency = Consistency.CACHED
+ workspace = self._get_workspace()
+
+ # Special case for workspaces
+ if workspace:
+
+ # A workspace is considered inconsistent in the case
+ # that its directory went missing
+ #
+ fullpath = workspace.get_absolute_path()
+ if not os.path.exists(fullpath):
+ self.__consistency = Consistency.INCONSISTENT
+ else:
+
+ # Determine overall consistency of the element
+ for source in self.__sources:
+ source._update_state()
+ source_consistency = source._get_consistency()
+ self.__consistency = min(self.__consistency, source_consistency)
+
# __can_build_incrementally()
#
# Check if the element can be built incrementally, this
@@ -2297,6 +2329,8 @@ class Element(Plugin):
defaults['public'] = element_public
def __init_defaults(self, plugin_conf):
+ if plugin_conf is None:
+ return
# Defaults are loaded once per class and then reused
#
diff --git a/tests/artifactcache/pull.py b/tests/artifactcache/pull.py
index edd5a93ba..4ab35f066 100644
--- a/tests/artifactcache/pull.py
+++ b/tests/artifactcache/pull.py
@@ -211,7 +211,7 @@ def test_pull_tree(cli, tmpdir, datafiles):
assert artifactcache.contains(element, element_key)
# Retrieve the Directory object from the cached artifact
- artifact_ref = artifactcache.get_artifact_fullname(element, element_key)
+ artifact_ref = element.get_artifact_name(element_key)
artifact_digest = cas.resolve_ref(artifact_ref)
queue = multiprocessing.Queue()
diff --git a/tests/artifactcache/push.py b/tests/artifactcache/push.py
index ed2a140e7..3b2e1be09 100644
--- a/tests/artifactcache/push.py
+++ b/tests/artifactcache/push.py
@@ -190,7 +190,7 @@ def test_push_directory(cli, tmpdir, datafiles):
assert artifactcache.has_push_remotes(element=element)
# Recreate the CasBasedDirectory object from the cached artifact
- artifact_ref = artifactcache.get_artifact_fullname(element, element_key)
+ artifact_ref = element.get_artifact_name(element_key)
artifact_digest = cas.resolve_ref(artifact_ref)
queue = multiprocessing.Queue()