summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorBenjamin Schubert <contact@benschubert.me>2020-01-16 18:49:54 +0000
committerBenjamin Schubert <contact@benschubert.me>2020-01-16 18:49:54 +0000
commita5b2396539e1621eef75a035b12c1c4266b5c9fe (patch)
tree174524d511845a01449ce4be73982f2c0abfafb8
parentb41f5dd977d13937f338d3737733353d613a2245 (diff)
parent3554f36185d4ce507fbc76d7a694849ae717d1a8 (diff)
downloadbuildstream-a5b2396539e1621eef75a035b12c1c4266b5c9fe.tar.gz
Merge branch 'bschubert/optimize-consistency' into 'master'
Optimize consistency and state handling Closes #1126 See merge request BuildStream/buildstream!1739
-rw-r--r--NEWS21
-rw-r--r--src/buildstream/__init__.py2
-rw-r--r--src/buildstream/_frontend/widget.py7
-rw-r--r--src/buildstream/_gitsourcebase.py13
-rw-r--r--src/buildstream/_loader/loader.py17
-rw-r--r--src/buildstream/_pipeline.py10
-rw-r--r--src/buildstream/_scheduler/queues/fetchqueue.py11
-rw-r--r--src/buildstream/_stream.py2
-rw-r--r--src/buildstream/element.py121
-rw-r--r--src/buildstream/plugin.py4
-rw-r--r--src/buildstream/plugins/sources/_downloadablefilesource.py17
-rw-r--r--src/buildstream/plugins/sources/bzr.py13
-rw-r--r--src/buildstream/plugins/sources/local.py9
-rw-r--r--src/buildstream/plugins/sources/patch.py9
-rw-r--r--src/buildstream/plugins/sources/pip.py10
-rw-r--r--src/buildstream/plugins/sources/workspace.py16
-rw-r--r--src/buildstream/source.py135
-rw-r--r--src/buildstream/types.py35
-rw-r--r--tests/format/project/plugin-no-load-ref/plugins/noloadref.py5
-rw-r--r--tests/format/project/plugin-preflight-error/errorplugin/preflighterror.py5
-rw-r--r--tests/frontend/consistencyerror/bug.bst2
-rw-r--r--tests/frontend/consistencyerror/error.bst2
-rw-r--r--tests/frontend/consistencyerror/plugins/consistencybug.py5
-rw-r--r--tests/frontend/consistencyerror/plugins/consistencyerror.py5
-rw-r--r--tests/frontend/project/sources/fetch_source.py13
-rw-r--r--tests/internals/pluginloading/badversionsource/customsources/foo.py5
-rw-r--r--tests/internals/pluginloading/customsource/pluginsources/foo.py5
-rw-r--r--tests/sourcecache/fetch.py12
-rw-r--r--tests/sourcecache/push.py4
-rw-r--r--tests/sourcecache/staging.py6
-rw-r--r--tests/sources/git.py10
-rw-r--r--tests/sources/no-fetch-cached/plugins/sources/always_cached.py9
-rw-r--r--tests/sources/previous_source_access/plugins/sources/foo_transform.py15
-rw-r--r--tests/sources/project_key_test/plugins/sources/key-test.py14
-rw-r--r--tox.ini4
35 files changed, 288 insertions, 285 deletions
diff --git a/NEWS b/NEWS
index 293085cfe..a4002ff50 100644
--- a/NEWS
+++ b/NEWS
@@ -20,6 +20,27 @@ API
Plugins authors that do this and believe BuildStream should be testing that
part of their plugins should open an issue on BuildStream.
+ o BREAKING CHANGE: `Consistency` has been removed and
+ `Source.get_consistency()` has been replaced by `Source.is_resolved()`
+ and `Source.is_cached()`.
+
+ `Source.is_resolved()` has a default of `self.get_ref() is not None`,
+ so if the only thing your plugin requires for being resolved is to
+ have a valid ref, you don't need to do anything there.
+
+ `Source.is_cached()` is there to replace a `Consistency.CACHED`
+ implementation and will need to be implemented by plugin authors.
+
+Core
+----
+
+ o BREAKING CHANGE: Once a source has been put in the internal BuildStream
+ cache, its content will not get checked for validity unless a workspace
+ is opened on it. If you see a warning that was not fatal as you fetch
+ your source and want to re-test it to make sure it's gone without changing
+ its ref (most plugins would handle that correctly), you will need to delete
+ the internal source cache first.
+
Miscellaneous
-------------
diff --git a/src/buildstream/__init__.py b/src/buildstream/__init__.py
index c78fcbbf6..c68c63e39 100644
--- a/src/buildstream/__init__.py
+++ b/src/buildstream/__init__.py
@@ -30,7 +30,7 @@ if "_BST_COMPLETION" not in os.environ:
from .utils import UtilError, ProgramNotFoundError
from .sandbox import Sandbox, SandboxFlags, SandboxCommandError
- from .types import Scope, Consistency, CoreWarnings
+ from .types import Scope, CoreWarnings
from .node import MappingNode, Node, ProvenanceInformation, ScalarNode, SequenceNode
from .plugin import Plugin
from .source import Source, SourceError, SourceFetcher
diff --git a/src/buildstream/_frontend/widget.py b/src/buildstream/_frontend/widget.py
index 98ebf31f3..fcc951d00 100644
--- a/src/buildstream/_frontend/widget.py
+++ b/src/buildstream/_frontend/widget.py
@@ -27,7 +27,7 @@ from ruamel import yaml
import click
from .profile import Profile
-from .. import Consistency, Scope
+from .. import Scope
from .. import __version__ as bst_version
from .._exceptions import ImplError
from .._message import MessageType
@@ -346,8 +346,7 @@ class LogLine(Widget):
line = p.fmt_subst(line, "key", cache_key, fg="yellow", dim=dim_keys)
line = p.fmt_subst(line, "full-key", full_key, fg="yellow", dim=dim_keys)
- consistency = element._get_consistency()
- if consistency == Consistency.INCONSISTENT:
+ if not element._has_all_sources_resolved():
line = p.fmt_subst(line, "state", "no reference", fg="red")
else:
if element.get_kind() == "junction":
@@ -356,7 +355,7 @@ class LogLine(Widget):
line = p.fmt_subst(line, "state", "failed", fg="red")
elif element._cached_success():
line = p.fmt_subst(line, "state", "cached", fg="magenta")
- elif consistency == Consistency.RESOLVED and not element._source_cached():
+ elif not element._has_all_sources_in_source_cache() and not element._has_all_sources_cached():
line = p.fmt_subst(line, "state", "fetch needed", fg="red")
elif element._buildable():
line = p.fmt_subst(line, "state", "buildable", fg="green")
diff --git a/src/buildstream/_gitsourcebase.py b/src/buildstream/_gitsourcebase.py
index 5b7f051b9..a8253e36f 100644
--- a/src/buildstream/_gitsourcebase.py
+++ b/src/buildstream/_gitsourcebase.py
@@ -30,7 +30,7 @@ from tempfile import TemporaryFile
from configparser import RawConfigParser
from .source import Source, SourceError, SourceFetcher
-from .types import Consistency, CoreWarnings
+from .types import CoreWarnings
from . import utils
from .types import FastEnum
from .utils import move_atomic, DirectoryExistsError
@@ -517,12 +517,11 @@ class _GitSourceBase(Source):
return key
- def get_consistency(self):
- if self._have_all_refs():
- return Consistency.CACHED
- elif self.mirror.ref is not None:
- return Consistency.RESOLVED
- return Consistency.INCONSISTENT
+ def is_resolved(self):
+ return self.mirror.ref is not None
+
+ def is_cached(self):
+ return self._have_all_refs()
def load_ref(self, node):
self.mirror.ref = node.get_str("ref", None)
diff --git a/src/buildstream/_loader/loader.py b/src/buildstream/_loader/loader.py
index 0c6c725c7..531655c09 100644
--- a/src/buildstream/_loader/loader.py
+++ b/src/buildstream/_loader/loader.py
@@ -20,7 +20,6 @@
import os
from .._exceptions import LoadError, LoadErrorReason
-from .. import Consistency
from .. import _yaml
from ..element import Element
from ..node import Node
@@ -651,16 +650,9 @@ class Loader:
self._loaders[filename] = loader
return loader
- # Handle the case where a subproject needs to be fetched
- #
- if element._get_consistency() >= Consistency.RESOLVED and not element._source_cached():
- if ticker:
- ticker(filename, "Fetching subproject")
- self._fetch_subprojects([element])
-
# Handle the case where a subproject has no ref
#
- elif element._get_consistency() == Consistency.INCONSISTENT:
+ if not element._has_all_sources_resolved():
detail = "Try tracking the junction element with `bst source track {}`".format(filename)
raise LoadError(
"{}Subproject has no ref for junction: {}".format(provenance_str, filename),
@@ -668,6 +660,13 @@ class Loader:
detail=detail,
)
+ # Handle the case where a subproject needs to be fetched
+ #
+ if not element._has_all_sources_in_source_cache():
+ if ticker:
+ ticker(filename, "Fetching subproject")
+ self._fetch_subprojects([element])
+
sources = list(element.sources())
if len(sources) == 1 and sources[0]._get_local_path():
# Optimization for junctions with a single local source
diff --git a/src/buildstream/_pipeline.py b/src/buildstream/_pipeline.py
index 353a9c3a0..8de97bea6 100644
--- a/src/buildstream/_pipeline.py
+++ b/src/buildstream/_pipeline.py
@@ -29,7 +29,7 @@ from pyroaring import BitMap # pylint: disable=no-name-in-module
from ._exceptions import PipelineError
from ._message import Message, MessageType
from ._profile import Topics, PROFILER
-from . import Scope, Consistency
+from . import Scope
from ._project import ProjectRefStorage
from .types import _PipelineSelection
@@ -340,7 +340,7 @@ class Pipeline:
inconsistent_workspaced = []
with self._context.messenger.timed_activity("Checking sources"):
for element in elements:
- if element._get_consistency() == Consistency.INCONSISTENT:
+ if not element._has_all_sources_resolved():
if element._get_workspace():
inconsistent_workspaced.append(element)
else:
@@ -351,7 +351,7 @@ class Pipeline:
for element in inconsistent:
detail += " Element: {} is inconsistent\n".format(element._get_full_name())
for source in element.sources():
- if source._get_consistency() == Consistency.INCONSISTENT:
+ if not source.is_resolved():
detail += " {} is missing ref\n".format(source)
detail += "\n"
detail += "Try tracking these elements first with `bst source track`\n"
@@ -375,7 +375,7 @@ class Pipeline:
uncached = []
with self._context.messenger.timed_activity("Checking sources"):
for element in elements:
- if element._get_consistency() < Consistency.CACHED and not element._source_cached():
+ if not element._has_all_sources_in_source_cache() and not element._has_all_sources_cached():
uncached.append(element)
if uncached:
@@ -383,7 +383,7 @@ class Pipeline:
for element in uncached:
detail += " Following sources for element: {} are not cached:\n".format(element._get_full_name())
for source in element.sources():
- if source._get_consistency() < Consistency.CACHED:
+ if not source._is_cached():
detail += " {}\n".format(source)
detail += "\n"
detail += (
diff --git a/src/buildstream/_scheduler/queues/fetchqueue.py b/src/buildstream/_scheduler/queues/fetchqueue.py
index 4f38f377a..18bf392d3 100644
--- a/src/buildstream/_scheduler/queues/fetchqueue.py
+++ b/src/buildstream/_scheduler/queues/fetchqueue.py
@@ -18,9 +18,6 @@
# Tristan Van Berkom <tristan.vanberkom@codethink.co.uk>
# Jürg Billeter <juerg.billeter@codethink.co.uk>
-# BuildStream toplevel imports
-from ... import Consistency
-
# Local imports
from . import Queue, QueueStatus
from ..resources import ResourceType
@@ -69,13 +66,7 @@ class FetchQueue(Queue):
if status is JobStatus.FAIL:
return
- element._fetch_done()
-
- # Successful fetch, we must be CACHED or in the sourcecache
- if self._should_fetch_original:
- assert element._get_consistency() == Consistency.CACHED
- else:
- assert element._source_cached()
+ element._fetch_done(self._should_fetch_original)
def register_pending_element(self, element):
# Set a "can_query_cache" callback for an element not yet ready
diff --git a/src/buildstream/_stream.py b/src/buildstream/_stream.py
index db9794c45..18297c2e4 100644
--- a/src/buildstream/_stream.py
+++ b/src/buildstream/_stream.py
@@ -212,7 +212,7 @@ class Stream:
# definitions to control the execution environment only.
if directory is None:
- if not element._source_cached():
+ if not element._has_all_sources_in_source_cache():
raise StreamError(
"Sources for element {} are not cached."
"Element must be fetched.".format(element._get_full_name())
diff --git a/src/buildstream/element.py b/src/buildstream/element.py
index 595724fcf..d7ede31f1 100644
--- a/src/buildstream/element.py
+++ b/src/buildstream/element.py
@@ -100,7 +100,7 @@ from .plugin import Plugin
from .sandbox import SandboxFlags, SandboxCommandError
from .sandbox._config import SandboxConfig
from .sandbox._sandboxremote import SandboxRemote
-from .types import Consistency, CoreWarnings, Scope, _CacheBuildTrees, _KeyStrength
+from .types import CoreWarnings, Scope, _CacheBuildTrees, _KeyStrength
from ._artifact import Artifact
from .storage.directory import Directory
@@ -257,12 +257,13 @@ class Element(Plugin):
self.__strict_cache_key = None # Our cached cache key for strict builds
self.__artifacts = context.artifactcache # Artifact cache
self.__sourcecache = context.sourcecache # Source cache
- self.__consistency = Consistency.INCONSISTENT # Cached overall consistency state
+ self.__is_resolved = False # Whether the source is fully resolved or not
self.__assemble_scheduled = False # Element is scheduled to be assembled
self.__assemble_done = False # Element is assembled
self.__pull_done = False # Whether pull was attempted
self.__cached_successfully = None # If the Element is known to be successfully cached
- self.__source_cached = None # If the sources are known to be successfully cached
+ self.__has_all_sources_in_source_cache = None # If the sources are known to be successfully cached
+ self.__has_all_sources_cached = False # Whether all sources have a local copy of their respective sources
self.__splits = None # Resolved regex objects for computing split domains
self.__whitelist_regex = None # Resolved regex object to check if file is allowed to overlap
self.__tainted = None # Whether the artifact is tainted and should not be shared
@@ -1072,13 +1073,6 @@ class Element(Plugin):
cls.__instantiated_elements = {}
cls.__redundant_source_refs = []
- # _get_consistency()
- #
- # Returns cached consistency state
- #
- def _get_consistency(self):
- return self.__consistency
-
# _cached():
#
# Returns:
@@ -1169,7 +1163,7 @@ class Element(Plugin):
# (bool): Whether this element can currently be built
#
def _buildable(self):
- if self._get_consistency() < Consistency.CACHED and not self._source_cached():
+ if not (self._has_all_sources_in_source_cache() or self._has_all_sources_cached()):
return False
if not self.__assemble_scheduled:
@@ -1230,7 +1224,7 @@ class Element(Plugin):
# notably jobs executed in sub-processes. Changes are performed by
# invocations of the following methods:
#
- # - __update_source_state()
+ # - __update_resolved_state()
# - Computes the state of all sources of the element.
# - __update_cache_keys()
# - Computes the strong and weak cache keys.
@@ -1252,7 +1246,7 @@ class Element(Plugin):
# side effects.
#
# *This* method starts the process by invoking
- # `__update_source_state()`, which will cause all necessary state
+ # `__update_resolved_state()`, which will cause all necessary state
# changes. Other functions should use the appropriate methods and
# only update what they expect to change - this will ensure that
# the minimum amount of work is done.
@@ -1273,7 +1267,7 @@ class Element(Plugin):
# pull/build, however should not occur during initialization
# (since we will eventualyl visit reverse dependencies during
# our initialization anyway).
- self.__update_source_state()
+ self.__update_resolved_state()
# _get_display_key():
#
@@ -1322,7 +1316,14 @@ class Element(Plugin):
def _tracking_done(self):
# Tracking may change the sources' refs, and therefore the
# source state. We need to update source state.
- self.__update_source_state()
+ self.__update_resolved_state()
+
+ # Check whether sources are now cached.
+ # This is done here so that we don't throw an exception trying to show the pipeline at the end
+ # This has for side-effect to cache this fact too, which will change the object's state.
+ # This is done here rather than later so we can validate that the sources are valid locally
+ self._has_all_sources_in_source_cache()
+ self._has_all_sources_cached()
# _track():
#
@@ -1442,7 +1443,7 @@ class Element(Plugin):
else:
# Assert sources are cached
- assert self._source_cached()
+ assert self._has_all_sources_in_source_cache()
if self.__sources:
@@ -1734,15 +1735,16 @@ class Element(Plugin):
#
# Indicates that fetching the sources for this element has been done.
#
- def _fetch_done(self):
- # We are not updating the state recursively here since fetching can
- # never end up in updating them.
+ # Args:
+ # fetched_original (bool): Whether the original sources had been asked (and fetched) or not
+ #
+ def _fetch_done(self, fetched_original):
+ self.__has_all_sources_in_source_cache = True
+ if fetched_original:
+ self.__has_all_sources_cached = True
- # Fetching changes the source state from RESOLVED to CACHED
- # Fetching cannot change the source state from INCONSISTENT to CACHED because
- # we prevent fetching when it's INCONSISTENT.
- # Therefore, only the source state will change.
- self.__update_source_state()
+ for source in self.__sources:
+ source._fetch_done(fetched_original)
# _pull_pending()
#
@@ -1830,11 +1832,11 @@ class Element(Plugin):
def _skip_source_push(self):
if not self.__sources or self._get_workspace():
return True
- return not (self.__sourcecache.has_push_remotes(plugin=self) and self._source_cached())
+ return not (self.__sourcecache.has_push_remotes(plugin=self) and self._has_all_sources_in_source_cache())
def _source_push(self):
# try and push sources if we've got them
- if self.__sourcecache.has_push_remotes(plugin=self) and self._source_cached():
+ if self.__sourcecache.has_push_remotes(plugin=self) and self._has_all_sources_in_source_cache():
for source in self.sources():
if not self.__sourcecache.push(source):
return False
@@ -2094,7 +2096,7 @@ class Element(Plugin):
continue
# try and fetch from source cache
- if source._get_consistency() < Consistency.CACHED and self.__sourcecache.has_fetch_remotes():
+ if not source._is_cached() and self.__sourcecache.has_fetch_remotes():
if self.__sourcecache.pull(source):
continue
@@ -2103,8 +2105,7 @@ class Element(Plugin):
# We need to fetch original sources
if fetch_needed or fetch_original:
for source in self.sources():
- source_consistency = source._get_consistency()
- if source_consistency != Consistency.CACHED:
+ if not source._is_cached():
source._fetch(previous_sources)
previous_sources.append(source)
@@ -2156,9 +2157,9 @@ class Element(Plugin):
return _cachekey.generate_key(cache_key_dict)
# Check if sources are cached, generating the source key if it hasn't been
- def _source_cached(self):
- if self.__source_cached is not None:
- return self.__source_cached
+ def _has_all_sources_in_source_cache(self):
+ if self.__has_all_sources_in_source_cache is not None:
+ return self.__has_all_sources_in_source_cache
if self.__sources:
sourcecache = self._get_context().sourcecache
@@ -2176,21 +2177,35 @@ class Element(Plugin):
if not sourcecache.contains(source):
return False
- self.__source_cached = True
+ self.__has_all_sources_in_source_cache = True
return True
+ # _has_all_sources_resolved()
+ #
+ # Get whether all sources of the element are resolved
+ #
+ def _has_all_sources_resolved(self):
+ return self.__is_resolved
+
+ # _has_all_sources_cached()
+ #
+ # Get whether all the sources of the element have their own cached
+ # copy of their sources.
+ #
+ def _has_all_sources_cached(self):
+ if not self.__has_all_sources_cached:
+ self.__has_all_sources_cached = all(source._is_cached() for source in self.__sources)
+ return self.__has_all_sources_cached
+
def _should_fetch(self, fetch_original=False):
""" return bool of if we need to run the fetch stage for this element
Args:
fetch_original (bool): whether we need to original unstaged source
"""
- if (self._get_consistency() == Consistency.CACHED and fetch_original) or (
- self._source_cached() and not fetch_original
- ):
- return False
- else:
- return True
+ if fetch_original:
+ return not self._has_all_sources_cached()
+ return not self._has_all_sources_in_source_cache()
# _set_can_query_cache_callback()
#
@@ -2330,28 +2345,20 @@ class Element(Plugin):
# Private Local Methods #
#############################################################
- # __update_source_state()
+ # __update_resolved_state()
#
- # Updates source consistency state
+ # Updates source's resolved state
#
# An element's source state must be resolved before it may compute
# cache keys, because the source's ref, whether defined in yaml or
# from the workspace, is a component of the element's cache keys.
#
- def __update_source_state(self):
-
- old_consistency = self.__consistency
- self.__consistency = Consistency.CACHED
-
- # Determine overall consistency of the element
+ def __update_resolved_state(self):
for source in self.__sources:
- # FIXME: It'd be nice to remove this eventually
- source._update_state()
- self.__consistency = min(self.__consistency, source._get_consistency())
-
- # If the source state changes, our cache key must also change,
- # since it contains the source's key.
- if old_consistency != self.__consistency:
+ if not source.is_resolved():
+ break
+ else:
+ self.__is_resolved = True
self.__update_cache_keys()
# __can_build_incrementally()
@@ -2963,7 +2970,7 @@ class Element(Plugin):
# Caches the sources into the local CAS
#
def __cache_sources(self):
- if self.__sources and not self._source_cached():
+ if self.__sources and not self._has_all_sources_in_source_cache():
last_requires_previous = 0
# commit all other sources by themselves
for ix, source in enumerate(self.__sources):
@@ -2997,7 +3004,7 @@ class Element(Plugin):
# Note that it does not update *all* cache keys - In non-strict mode, the
# strong cache key is updated in __update_cache_key_non_strict()
#
- # If the element's consistency is Consistency.INCONSISTENT this is
+ # If the element's is not resolved, this is
# a no-op (since inconsistent elements cannot have cache keys).
#
# The weak and strict cache keys will be calculated if not already
@@ -3013,7 +3020,7 @@ class Element(Plugin):
# dependency has changed.
#
def __update_cache_keys(self):
- if self._get_consistency() == Consistency.INCONSISTENT:
+ if not self._has_all_sources_resolved():
# Tracking may still be pending
return
diff --git a/src/buildstream/plugin.py b/src/buildstream/plugin.py
index 0cbd72e27..9794b9bba 100644
--- a/src/buildstream/plugin.py
+++ b/src/buildstream/plugin.py
@@ -353,8 +353,8 @@ class Plugin:
which could possibly affect the output and return a dictionary of these settings.
For Sources, this is guaranteed to only be called if
- :func:`Source.get_consistency() <buildstream.source.Source.get_consistency>`
- has not returned :func:`Consistency.INCONSISTENT <buildstream.source.Consistency.INCONSISTENT>`
+ :func:`Source.is_resolved() <buildstream.source.Source.is_resolved>`
+ has returned `True`
which is to say that the Source is expected to have an exact *ref* indicating
exactly what source is going to be staged.
"""
diff --git a/src/buildstream/plugins/sources/_downloadablefilesource.py b/src/buildstream/plugins/sources/_downloadablefilesource.py
index 4e43ee3e3..50f8561fb 100644
--- a/src/buildstream/plugins/sources/_downloadablefilesource.py
+++ b/src/buildstream/plugins/sources/_downloadablefilesource.py
@@ -7,7 +7,7 @@ import contextlib
import shutil
import netrc
-from buildstream import Source, SourceError, Consistency
+from buildstream import Source, SourceError
from buildstream import utils
@@ -88,15 +88,8 @@ class DownloadableFileSource(Source):
def get_unique_key(self):
return [self.original_url, self.ref]
- def get_consistency(self):
- if self.ref is None:
- return Consistency.INCONSISTENT
-
- if os.path.isfile(self._get_mirror_file()):
- return Consistency.CACHED
-
- else:
- return Consistency.RESOLVED
+ def is_cached(self) -> bool:
+ return os.path.isfile(self._get_mirror_file())
def load_ref(self, node):
self.ref = node.get_str("ref", None)
@@ -130,7 +123,7 @@ class DownloadableFileSource(Source):
# Just a defensive check, it is impossible for the
# file to be already cached because Source.fetch() will
- # not be called if the source is already Consistency.CACHED.
+ # not be called if the source is already cached.
#
if os.path.isfile(self._get_mirror_file()):
return # pragma: nocover
@@ -178,7 +171,7 @@ class DownloadableFileSource(Source):
etag = self._get_etag(self.ref)
# Do not re-download the file if the ETag matches.
- if etag and self.get_consistency() == Consistency.CACHED:
+ if etag and self.is_cached():
request.add_header("If-None-Match", etag)
opener = self.__get_urlopener()
diff --git a/src/buildstream/plugins/sources/bzr.py b/src/buildstream/plugins/sources/bzr.py
index 30ce55585..8a02eff95 100644
--- a/src/buildstream/plugins/sources/bzr.py
+++ b/src/buildstream/plugins/sources/bzr.py
@@ -59,7 +59,7 @@ import shutil
import fcntl
from contextlib import contextmanager
-from buildstream import Source, SourceError, Consistency
+from buildstream import Source, SourceError
from buildstream import utils
@@ -81,16 +81,9 @@ class BzrSource(Source):
def get_unique_key(self):
return [self.original_url, self.tracking, self.ref]
- def get_consistency(self):
- if self.ref is None or self.tracking is None:
- return Consistency.INCONSISTENT
-
- # Lock for the _check_ref()
+ def is_cached(self):
with self._locked():
- if self._check_ref():
- return Consistency.CACHED
- else:
- return Consistency.RESOLVED
+ return self._check_ref()
def load_ref(self, node):
self.ref = node.get_str("ref", None)
diff --git a/src/buildstream/plugins/sources/local.py b/src/buildstream/plugins/sources/local.py
index 90d8a8f6f..57bcf14df 100644
--- a/src/buildstream/plugins/sources/local.py
+++ b/src/buildstream/plugins/sources/local.py
@@ -38,7 +38,7 @@ details on common configuration options for sources.
import os
from buildstream.storage.directory import Directory
-from buildstream import Source, SourceError, Consistency
+from buildstream import Source, SourceError
class LocalSource(Source):
@@ -61,8 +61,11 @@ class LocalSource(Source):
def preflight(self):
pass
- def get_consistency(self):
- return Consistency.CACHED
+ def is_resolved(self):
+ return True
+
+ def is_cached(self):
+ return True
# We dont have a ref, we're a local file...
def load_ref(self, node):
diff --git a/src/buildstream/plugins/sources/patch.py b/src/buildstream/plugins/sources/patch.py
index 082983023..f33dfedec 100644
--- a/src/buildstream/plugins/sources/patch.py
+++ b/src/buildstream/plugins/sources/patch.py
@@ -45,7 +45,7 @@ details on common configuration options for sources.
"""
import os
-from buildstream import Source, SourceError, Consistency
+from buildstream import Source, SourceError
from buildstream import utils
@@ -67,8 +67,11 @@ class PatchSource(Source):
def get_unique_key(self):
return [self.path, utils.sha256sum(self.fullpath), self.strip_level]
- def get_consistency(self):
- return Consistency.CACHED
+ def is_resolved(self):
+ return True
+
+ def is_cached(self):
+ return True
def load_ref(self, node):
pass
diff --git a/src/buildstream/plugins/sources/pip.py b/src/buildstream/plugins/sources/pip.py
index 2c9773787..eac2f3d01 100644
--- a/src/buildstream/plugins/sources/pip.py
+++ b/src/buildstream/plugins/sources/pip.py
@@ -72,7 +72,7 @@ import hashlib
import os
import re
-from buildstream import Consistency, Source, SourceError, utils
+from buildstream import Source, SourceError, utils
_OUTPUT_DIRNAME = ".bst_pip_downloads"
_PYPI_INDEX_URL = "https://pypi.org/simple/"
@@ -136,12 +136,8 @@ class PipSource(Source):
def get_unique_key(self):
return [self.original_url, self.ref]
- def get_consistency(self):
- if not self.ref:
- return Consistency.INCONSISTENT
- if os.path.exists(self._mirror) and os.listdir(self._mirror):
- return Consistency.CACHED
- return Consistency.RESOLVED
+ def is_cached(self):
+ return os.path.exists(self._mirror) and os.listdir(self._mirror)
def get_ref(self):
return self.ref
diff --git a/src/buildstream/plugins/sources/workspace.py b/src/buildstream/plugins/sources/workspace.py
index 3d4c93b5c..ce62f3aff 100644
--- a/src/buildstream/plugins/sources/workspace.py
+++ b/src/buildstream/plugins/sources/workspace.py
@@ -38,7 +38,7 @@ workspace. The node constructed would be specified as follows:
import os
from buildstream.storage.directory import Directory
-from buildstream import Source, SourceError, Consistency
+from buildstream import Source, SourceError
from buildstream.types import SourceRef
from buildstream.node import MappingNode
@@ -69,6 +69,12 @@ class WorkspaceSource(Source):
def preflight(self) -> None:
pass # pragma: nocover
+ def is_cached(self):
+ return True
+
+ def is_resolved(self):
+ return os.path.exists(self._get_local_path())
+
def get_ref(self) -> None:
return None
@@ -84,14 +90,6 @@ class WorkspaceSource(Source):
def init_workspace(self, directory: Directory) -> None:
raise AssertionError("Attempting to re-open an existing workspace")
- def get_consistency(self) -> Consistency:
- if not os.path.exists(self._get_local_path()):
- # A workspace is considered inconsistent in the case that
- # its directory went missing
- return Consistency.INCONSISTENT
- else:
- return Consistency.CACHED
-
def fetch(self) -> None: # pylint: disable=arguments-differ
pass # pragma: nocover
diff --git a/src/buildstream/source.py b/src/buildstream/source.py
index f49cdb493..4839cf0fe 100644
--- a/src/buildstream/source.py
+++ b/src/buildstream/source.py
@@ -59,10 +59,6 @@ For loading and configuration purposes, Sources must implement the
Sources expose the following abstract methods. Unless explicitly mentioned,
these methods are mandatory to implement.
-* :func:`Source.get_consistency() <buildstream.source.Source.get_consistency>`
-
- Report the sources consistency state.
-
* :func:`Source.load_ref() <buildstream.source.Source.load_ref>`
Load the ref from a specific YAML node
@@ -169,7 +165,7 @@ from typing import Iterable, Iterator, Optional, Tuple, TYPE_CHECKING
from . import _yaml, utils
from .node import MappingNode
from .plugin import Plugin
-from .types import Consistency, SourceRef, Union, List
+from .types import SourceRef, Union, List
from ._exceptions import BstError, ImplError, PluginError, ErrorDomain
from ._loader.metasource import MetaSource
from ._projectrefs import ProjectRefStorage
@@ -356,7 +352,6 @@ class Source(Plugin):
self.__element_index = meta.element_index # The index of the source in the owning element's source list
self.__element_kind = meta.element_kind # The kind of the element owning this source
self.__directory = meta.directory # Staging relative directory
- self.__consistency = Consistency.INCONSISTENT # Cached consistency state
self.__meta_kind = meta.kind # The kind of this source, required for unpickling
self.__key = None # Cache key for source
@@ -379,6 +374,8 @@ class Source(Plugin):
self._configure(self.__config)
self.__digest = None
+ self.__is_cached = None
+
COMMON_CONFIG_KEYS = ["kind", "directory"]
"""Common source config keys
@@ -389,13 +386,6 @@ class Source(Plugin):
#############################################################
# Abstract Methods #
#############################################################
- def get_consistency(self) -> int:
- """Report whether the source has a resolved reference
-
- Returns:
- (:class:`.Consistency`): The source consistency
- """
- raise ImplError("Source plugin '{}' does not implement get_consistency()".format(self.get_kind()))
def load_ref(self, node: MappingNode) -> None:
"""Loads the *ref* for this Source from the specified *node*.
@@ -560,15 +550,27 @@ class Source(Plugin):
"""Implement any validations once we know the sources are cached
This is guaranteed to be called only once for a given session
- once the sources are known to be
- :attr:`Consistency.CACHED <buildstream.types.Consistency.CACHED>`,
- if source tracking is enabled in the session for this source,
+ once the sources are known to be cached.
+ If source tracking is enabled in the session for this source,
then this will only be called if the sources become cached after
tracking completes.
*Since: 1.4*
"""
+ def is_cached(self) -> bool:
+ """Get whether the source has a local copy of its data.
+
+ This method is guaranteed to only be called whenever
+ :func:`Source.is_resolved() <buildstream.source.Source.is_resolved>`
+ returns `True`.
+
+ Returns: whether the source is cached locally or not.
+
+ *Since: 1.93.0*
+ """
+ raise ImplError("Source plugin '{}' does not implement is_cached()".format(self.get_kind()))
+
#############################################################
# Public Methods #
#############################################################
@@ -705,6 +707,22 @@ class Source(Plugin):
with utils._tempdir(dir=mirrordir) as tempdir:
yield tempdir
+ def is_resolved(self) -> bool:
+ """Get whether the source is resolved.
+
+ This has a default implementation that checks whether the source
+ has a ref or not. If it has a ref, it is assumed to be resolved.
+
+ Sources that never have a ref or have uncommon requirements can
+ override this method to specify when they should be considered
+ resolved
+
+ Returns: whether the source is fully resolved or not
+
+ *Since: 1.93.0*
+ """
+ return self.get_ref() is not None
+
#############################################################
# Private Abstract Methods used in BuildStream #
#############################################################
@@ -752,41 +770,39 @@ class Source(Plugin):
# Prepend provenance to the error
raise SourceError("{}: {}".format(self, e), reason=e.reason) from e
- # Update cached consistency for a source
- #
- # This must be called whenever the state of a source may have changed.
+ # Get whether the source is cached by the source plugin
#
- def _update_state(self):
+ def _is_cached(self):
+ if self.__is_cached is None:
+ # We guarantee we only ever call this when we are resolved.
+ assert self.is_resolved()
- if self.__consistency < Consistency.CACHED:
+ # Set to 'False' on the first call, this prevents throwing multiple errors if the
+ # plugin throws exception when we display the end result pipeline.
+ # Otherwise, the summary would throw a second exception and we would not
+ # have a nice error reporting.
+ self.__is_cached = False
- # Source consistency interrogations are silent.
- context = self._get_context()
- with context.messenger.silence():
- try:
- self.__consistency = self.get_consistency() # pylint: disable=assignment-from-no-return
- except SourceError:
- # SourceErrors should be preserved so that the
- # plugin can communicate real error cases.
- raise
- except Exception as err: # pylint: disable=broad-except
- # Generic errors point to bugs in the plugin, so
- # we need to catch them and make sure they do not
- # cause stacktraces
- raise PluginError(
- "Source plugin '{}' failed to compute source consistency: {}".format(self.get_kind(), err),
- reason="source-bug",
- )
-
- # Give the Source an opportunity to validate the cached
- # sources as soon as the Source becomes Consistency.CACHED.
- if self.__consistency == Consistency.CACHED:
- self.validate_cache()
-
- # Return cached consistency
- #
- def _get_consistency(self):
- return self.__consistency
+ try:
+ self.__is_cached = self.is_cached() # pylint: disable=assignment-from-no-return
+ except SourceError:
+ # SourceErrors should be preserved so that the
+ # plugin can communicate real error cases.
+ raise
+ except Exception as err: # pylint: broad-except
+ # Generic errors point to bugs in the plugin, so
+ # we need to catch them and make sure they do not
+ # cause stacktraces
+
+ raise PluginError(
+ "Source plugin '{}' failed to check its cached state: {}".format(self.get_kind(), err),
+ reason="source-bug",
+ )
+
+ if self.__is_cached:
+ self.validate_cache()
+
+ return self.__is_cached
# Wrapper function around plugin provided fetch method
#
@@ -803,6 +819,24 @@ class Source(Plugin):
else:
self.__do_fetch()
+ self.validate_cache()
+
+ # _fetch_done()
+ #
+ # Indicates that fetching the source has been done.
+ #
+ # Args:
+ # fetched_original (bool): Whether the original sources had been asked (and fetched) or not
+ #
+ def _fetch_done(self, fetched_original):
+ if fetched_original:
+ # The original was fetched, we know we are cached
+ self.__is_cached = True
+ else:
+ # The original was not requested, we might or might not be cached
+ # Don't recompute, but allow recomputation later if needed
+ self.__is_cached = None
+
# Wrapper for stage() api which gives the source
# plugin a fully constructed path considering the
# 'directory' option
@@ -1234,7 +1268,6 @@ class Source(Plugin):
#
clone._preflight()
clone._load_ref()
- clone._update_state()
return clone
@@ -1411,9 +1444,9 @@ class Source(Plugin):
for index, src in enumerate(previous_sources):
# BuildStream should track sources in the order they appear so
# previous sources should never be in an inconsistent state
- assert src.get_consistency() != Consistency.INCONSISTENT
+ assert src.is_resolved()
- if src.get_consistency() == Consistency.RESOLVED:
+ if not src._is_cached():
src._fetch(previous_sources[0:index])
diff --git a/src/buildstream/types.py b/src/buildstream/types.py
index 0e596bddb..6e217e1a2 100644
--- a/src/buildstream/types.py
+++ b/src/buildstream/types.py
@@ -116,41 +116,6 @@ class Scope(FastEnum):
"""
-class Consistency(FastEnum):
- """Defines the various consistency states of a :class:`.Source`.
- """
-
- INCONSISTENT = 0
- """Inconsistent
-
- Inconsistent sources have no explicit reference set. They cannot
- produce a cache key, be fetched or staged. They can only be tracked.
- """
-
- RESOLVED = 1
- """Resolved
-
- Resolved sources have a reference and can produce a cache key and
- be fetched, however they cannot be staged.
- """
-
- CACHED = 2
- """Cached
-
- Sources have a cached unstaged copy in the source directory.
- """
-
- def __ge__(self, other):
- if self.__class__ is not other.__class__:
- raise ValueError("Unexpected comparison between {} and {}".format(self, repr(other)))
- return self.value >= other.value
-
- def __lt__(self, other):
- if self.__class__ is not other.__class__:
- raise ValueError("Unexpected comparison between {} and {}".format(self, repr(other)))
- return self.value < other.value
-
-
class CoreWarnings:
"""CoreWarnings()
diff --git a/tests/format/project/plugin-no-load-ref/plugins/noloadref.py b/tests/format/project/plugin-no-load-ref/plugins/noloadref.py
index 5d44cd5c7..e2fe0ac46 100644
--- a/tests/format/project/plugin-no-load-ref/plugins/noloadref.py
+++ b/tests/format/project/plugin-no-load-ref/plugins/noloadref.py
@@ -1,4 +1,4 @@
-from buildstream import Source, Consistency
+from buildstream import Source
# Just a dummy plugin which does not support the new load_ref() method.
@@ -15,9 +15,6 @@ class NoLoadRefSource(Source):
def get_unique_key(self):
return {}
- def get_consistency(self):
- return Consistency.CACHED
-
def get_ref(self):
return None
diff --git a/tests/format/project/plugin-preflight-error/errorplugin/preflighterror.py b/tests/format/project/plugin-preflight-error/errorplugin/preflighterror.py
index 3fa8afb93..db2895f8b 100644
--- a/tests/format/project/plugin-preflight-error/errorplugin/preflighterror.py
+++ b/tests/format/project/plugin-preflight-error/errorplugin/preflighterror.py
@@ -1,4 +1,4 @@
-from buildstream import Source, SourceError, Consistency
+from buildstream import Source, SourceError
class PreflightErrorSource(Source):
@@ -13,9 +13,6 @@ class PreflightErrorSource(Source):
def get_unique_key(self):
return {}
- def get_consistency(self):
- return Consistency.CACHED
-
def get_ref(self):
return None
diff --git a/tests/frontend/consistencyerror/bug.bst b/tests/frontend/consistencyerror/bug.bst
index a66002046..5639abee2 100644
--- a/tests/frontend/consistencyerror/bug.bst
+++ b/tests/frontend/consistencyerror/bug.bst
@@ -1,4 +1,4 @@
kind: import
-description: An element with an unhandled exception at get_consistency time
+description: An element with an unhandled exception in checking whether it is cached or not
sources:
- kind: consistencybug
diff --git a/tests/frontend/consistencyerror/error.bst b/tests/frontend/consistencyerror/error.bst
index ccf11c942..e377aa97a 100644
--- a/tests/frontend/consistencyerror/error.bst
+++ b/tests/frontend/consistencyerror/error.bst
@@ -1,4 +1,4 @@
kind: import
-description: An element with a failing source at get_consistency time
+description: An element with a failing source when checking whether it is cached or not
sources:
- kind: consistencyerror
diff --git a/tests/frontend/consistencyerror/plugins/consistencybug.py b/tests/frontend/consistencyerror/plugins/consistencybug.py
index c442d883a..abcbbc997 100644
--- a/tests/frontend/consistencyerror/plugins/consistencybug.py
+++ b/tests/frontend/consistencyerror/plugins/consistencybug.py
@@ -11,7 +11,10 @@ class ConsistencyBugSource(Source):
def get_unique_key(self):
return {}
- def get_consistency(self):
+ def is_resolved(self):
+ return True
+
+ def is_cached(self):
# Raise an unhandled exception (not a BstError)
raise Exception("Something went terribly wrong")
diff --git a/tests/frontend/consistencyerror/plugins/consistencyerror.py b/tests/frontend/consistencyerror/plugins/consistencyerror.py
index 125baf39c..2e30d4842 100644
--- a/tests/frontend/consistencyerror/plugins/consistencyerror.py
+++ b/tests/frontend/consistencyerror/plugins/consistencyerror.py
@@ -11,7 +11,10 @@ class ConsistencyErrorSource(Source):
def get_unique_key(self):
return {}
- def get_consistency(self):
+ def is_resolved(self):
+ return True
+
+ def is_cached(self):
# Raise an error unconditionally
raise SourceError("Something went terribly wrong", reason="the-consistency-error")
diff --git a/tests/frontend/project/sources/fetch_source.py b/tests/frontend/project/sources/fetch_source.py
index 51bfe1049..c62d1d29a 100644
--- a/tests/frontend/project/sources/fetch_source.py
+++ b/tests/frontend/project/sources/fetch_source.py
@@ -1,6 +1,6 @@
import os
-from buildstream import Source, Consistency, SourceError, SourceFetcher
+from buildstream import Source, SourceError, SourceFetcher
# Expected config
# sources:
@@ -66,17 +66,20 @@ class FetchSource(Source):
def get_unique_key(self):
return {"urls": self.original_urls, "output_file": self.output_file}
- def get_consistency(self):
+ def is_resolved(self):
+ return True
+
+ def is_cached(self) -> bool:
if not os.path.exists(self.output_file):
- return Consistency.RESOLVED
+ return False
with open(self.output_file, "r") as f:
contents = f.read()
for url in self.original_urls:
if url not in contents:
- return Consistency.RESOLVED
+ return False
- return Consistency.CACHED
+ return True
# We dont have a ref, we're a local file...
def load_ref(self, node):
diff --git a/tests/internals/pluginloading/badversionsource/customsources/foo.py b/tests/internals/pluginloading/badversionsource/customsources/foo.py
index f50855fd1..628f99b29 100644
--- a/tests/internals/pluginloading/badversionsource/customsources/foo.py
+++ b/tests/internals/pluginloading/badversionsource/customsources/foo.py
@@ -1,4 +1,4 @@
-from buildstream import Source, Consistency
+from buildstream import Source
class BarSource(Source):
@@ -11,9 +11,6 @@ class BarSource(Source):
def configure(self, node):
pass
- def get_consistency(self):
- return Consistency.INCONSISTENT
-
def setup():
return BarSource
diff --git a/tests/internals/pluginloading/customsource/pluginsources/foo.py b/tests/internals/pluginloading/customsource/pluginsources/foo.py
index 706c96f3b..fce5239b1 100644
--- a/tests/internals/pluginloading/customsource/pluginsources/foo.py
+++ b/tests/internals/pluginloading/customsource/pluginsources/foo.py
@@ -1,4 +1,4 @@
-from buildstream import Source, Consistency
+from buildstream import Source
class FooSource(Source):
@@ -11,9 +11,6 @@ class FooSource(Source):
def get_unique_key(self):
pass
- def get_consistency(self):
- return Consistency.INCONSISTENT
-
def setup():
return FooSource
diff --git a/tests/sourcecache/fetch.py b/tests/sourcecache/fetch.py
index 4096b56b8..e21f84c89 100644
--- a/tests/sourcecache/fetch.py
+++ b/tests/sourcecache/fetch.py
@@ -83,7 +83,7 @@ def test_source_fetch(cli, tmpdir, datafiles):
project.ensure_fully_loaded()
element = project.load_elements([element_name])[0]
- assert not element._source_cached()
+ assert not element._has_all_sources_in_source_cache()
source = list(element.sources())[0]
assert not share.get_source_proto(source._get_source_name())
@@ -112,7 +112,7 @@ def test_source_fetch(cli, tmpdir, datafiles):
assert "Pulled source" in res.stderr
# check that we have the source in the cas now and it's not fetched
- assert element._source_cached()
+ assert element._has_all_sources_in_source_cache()
assert os.listdir(os.path.join(str(tmpdir), "cache", "sources", "git")) == []
@@ -129,7 +129,7 @@ def test_fetch_fallback(cli, tmpdir, datafiles):
project.ensure_fully_loaded()
element = project.load_elements([element_name])[0]
- assert not element._source_cached()
+ assert not element._has_all_sources_in_source_cache()
source = list(element.sources())[0]
assert not share.get_source_proto(source._get_source_name())
@@ -145,7 +145,7 @@ def test_fetch_fallback(cli, tmpdir, datafiles):
assert ("SUCCESS Fetching from {}".format(repo.source_config(ref=ref)["url"])) in res.stderr
# Check that the source in both in the source dir and the local CAS
- assert element._source_cached()
+ assert element._has_all_sources_in_source_cache()
@pytest.mark.datafiles(DATA_DIR)
@@ -160,7 +160,7 @@ def test_pull_fail(cli, tmpdir, datafiles):
project.ensure_fully_loaded()
element = project.load_elements([element_name])[0]
- assert not element._source_cached()
+ assert not element._has_all_sources_in_source_cache()
source = list(element.sources())[0]
# remove files and check that it doesn't build
@@ -191,7 +191,7 @@ def test_source_pull_partial_fallback_fetch(cli, tmpdir, datafiles):
project.ensure_fully_loaded()
element = project.load_elements([element_name])[0]
- assert not element._source_cached()
+ assert not element._has_all_sources_in_source_cache()
source = list(element.sources())[0]
assert not share.get_artifact_proto(source._get_source_name())
diff --git a/tests/sourcecache/push.py b/tests/sourcecache/push.py
index 771a94ca1..210bbfcff 100644
--- a/tests/sourcecache/push.py
+++ b/tests/sourcecache/push.py
@@ -84,7 +84,7 @@ def test_source_push_split(cli, tmpdir, datafiles):
project.ensure_fully_loaded()
element = project.load_elements(["push.bst"])[0]
- assert not element._source_cached()
+ assert not element._has_all_sources_in_source_cache()
source = list(element.sources())[0]
# check we don't have it in the current cache
@@ -133,7 +133,7 @@ def test_source_push(cli, tmpdir, datafiles):
project.ensure_fully_loaded()
element = project.load_elements(["push.bst"])[0]
- assert not element._source_cached()
+ assert not element._has_all_sources_in_source_cache()
source = list(element.sources())[0]
# check we don't have it in the current cache
diff --git a/tests/sourcecache/staging.py b/tests/sourcecache/staging.py
index 994adb32a..dbfc028f2 100644
--- a/tests/sourcecache/staging.py
+++ b/tests/sourcecache/staging.py
@@ -65,7 +65,7 @@ def test_source_staged(tmpdir, cli, datafiles):
# seems to be the only way to get the sources?
element = project.load_elements(["import-bin.bst"])[0]
source = list(element.sources())[0]
- assert element._source_cached()
+ assert element._has_all_sources_in_source_cache()
assert sourcecache.contains(source)
# Extract the file and check it's the same as the one we imported
@@ -99,7 +99,7 @@ def test_source_fetch(tmpdir, cli, datafiles):
element = project.load_elements(["import-dev.bst"])[0]
source = list(element.sources())[0]
- assert element._source_cached()
+ assert element._has_all_sources_in_source_cache()
# check that the directory structures are identical
digest = sourcecache.export(source)._get_digest()
@@ -132,7 +132,7 @@ def test_staged_source_build(tmpdir, datafiles, cli):
element = project.load_elements(["import-dev.bst"])[0]
# check consistency of the source
- assert not element._source_cached()
+ assert not element._has_all_sources_in_source_cache()
res = cli.run(project=project_dir, args=["build", "target.bst"])
res.assert_success()
diff --git a/tests/sources/git.py b/tests/sources/git.py
index 096338bbe..25976ffca 100644
--- a/tests/sources/git.py
+++ b/tests/sources/git.py
@@ -499,7 +499,9 @@ def test_unlisted_submodule(cli, tmpdir, datafiles, fail):
result.assert_main_error(ErrorDomain.PLUGIN, "git:unlisted-submodule")
else:
result.assert_success()
- assert "git:unlisted-submodule" in result.stderr
+ # We have cached things internally and successfully. Therefore, the plugin
+ # is not involved in checking whether the cache is correct or not.
+ assert "git:unlisted-submodule" not in result.stderr
@pytest.mark.skipif(HAVE_GIT is False, reason="git is not available")
@@ -541,7 +543,7 @@ def test_track_unlisted_submodule(cli, tmpdir, datafiles, fail):
assert "git:unlisted-submodule" not in result.stderr
# We won't get a warning/error when tracking either, the source
- # has not become Consistency.CACHED so the opportunity to check
+ # has not become cached so the opportunity to check
# for the warning has not yet arisen.
result = cli.run(project=project, args=["source", "track", "target.bst"])
result.assert_success()
@@ -613,7 +615,9 @@ def test_invalid_submodule(cli, tmpdir, datafiles, fail):
result.assert_main_error(ErrorDomain.PLUGIN, "git:invalid-submodule")
else:
result.assert_success()
- assert "git:invalid-submodule" in result.stderr
+ # We have cached things internally and successfully. Therefore, the plugin
+ # is not involved in checking whether the cache is correct or not.
+ assert "git:invalid-submodule" not in result.stderr
@pytest.mark.skipif(HAVE_GIT is False, reason="git is not available")
diff --git a/tests/sources/no-fetch-cached/plugins/sources/always_cached.py b/tests/sources/no-fetch-cached/plugins/sources/always_cached.py
index 623ab19ab..aef13279b 100644
--- a/tests/sources/no-fetch-cached/plugins/sources/always_cached.py
+++ b/tests/sources/no-fetch-cached/plugins/sources/always_cached.py
@@ -7,7 +7,7 @@ Used to test that BuildStream core does not call fetch() for cached sources.
"""
-from buildstream import Consistency, Source
+from buildstream import Source
class AlwaysCachedSource(Source):
@@ -20,8 +20,11 @@ class AlwaysCachedSource(Source):
def get_unique_key(self):
return None
- def get_consistency(self):
- return Consistency.CACHED
+ def is_resolved(self):
+ return True
+
+ def is_cached(self):
+ return True
def load_ref(self, node):
pass
diff --git a/tests/sources/previous_source_access/plugins/sources/foo_transform.py b/tests/sources/previous_source_access/plugins/sources/foo_transform.py
index 906a8f6be..dbc44c8aa 100644
--- a/tests/sources/previous_source_access/plugins/sources/foo_transform.py
+++ b/tests/sources/previous_source_access/plugins/sources/foo_transform.py
@@ -10,7 +10,7 @@ previous sources, and copies its contents to a file called "filetransform".
import os
import hashlib
-from buildstream import Consistency, Source, SourceError, utils
+from buildstream import Source, SourceError, utils
class FooTransformSource(Source):
@@ -39,19 +39,18 @@ class FooTransformSource(Source):
def get_unique_key(self):
return (self.ref,)
- def get_consistency(self):
- if self.ref is None:
- return Consistency.INCONSISTENT
+ def is_cached(self):
# If we have a file called "filetransform", verify that its checksum
- # matches our ref. Otherwise, it resolved but not cached.
+ # matches our ref. Otherwise, it is not cached.
fpath = os.path.join(self.mirror, "filetransform")
try:
with open(fpath, "rb") as f:
if hashlib.sha256(f.read()).hexdigest() == self.ref.strip():
- return Consistency.CACHED
- except Exception:
+ return True
+ except FileNotFoundError:
pass
- return Consistency.RESOLVED
+
+ return False
def get_ref(self):
return self.ref
diff --git a/tests/sources/project_key_test/plugins/sources/key-test.py b/tests/sources/project_key_test/plugins/sources/key-test.py
index 428846a3e..88a211738 100644
--- a/tests/sources/project_key_test/plugins/sources/key-test.py
+++ b/tests/sources/project_key_test/plugins/sources/key-test.py
@@ -1,6 +1,6 @@
import os
-from buildstream import Source, Consistency
+from buildstream import Source
class KeyTest(Source):
@@ -11,17 +11,17 @@ class KeyTest(Source):
pass
def configure(self, node):
- self.ref = node.get_bool("ref", False)
+ if node.get_scalar("ref", None).is_none():
+ self.ref = None
+ else:
+ self.ref = True
def get_unique_key(self):
assert self.ref
return "abcdefg"
- def get_consistency(self):
- if self.ref:
- return Consistency.RESOLVED
- else:
- return Consistency.INCONSISTENT
+ def is_cached(self):
+ return False
def load_ref(self, node):
pass
diff --git a/tox.ini b/tox.ini
index ea45c1acc..8eb16a61a 100644
--- a/tox.ini
+++ b/tox.ini
@@ -6,9 +6,9 @@ envlist = py{35,36,37,38}
skip_missing_interpreters = true
isolated_build = true
-# Configuration variables to share accross environments
+# Configuration variables to share across environments
[config]
-BST_PLUGINS_EXPERIMENTAL_VERSION = 0.13.0
+BST_PLUGINS_EXPERIMENTAL_VERSION = 0.14.0
#
# Defaults for all environments