summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorRaoul Hidalgo Charman <raoul.hidalgocharman@codethink.co.uk>2019-02-28 15:13:38 +0000
committerJürg Billeter <j@bitron.ch>2019-03-14 07:12:34 +0000
commit6a1e74619e0009ca5611bba025b8dcf08bec353e (patch)
treec75f062f36ebf71556cc5ed3dbe78d1f0b4693ec
parent5de8de8ec76609c3eb3a75443d2566b55c29b440 (diff)
downloadbuildstream-6a1e74619e0009ca5611bba025b8dcf08bec353e.tar.gz
Integrate source cache with rest of buildstream
This involve introducing new Consistency states `STAGED` and `BOTH` that represent when the source is just in the local CAS and in both the local CAS and unstaged in the source directory. Sources are staged for each element into the local CAS during the fetch stage. If the sources are in the local consistency state `STAGED` when wanting to open a workspace, the original sources are fetched. Relavant tests this affects have been changed. Part of #440
-rw-r--r--buildstream/_artifactcache.py6
-rw-r--r--buildstream/_basecache.py2
-rw-r--r--buildstream/_cas/cascache.py2
-rw-r--r--buildstream/_context.py24
-rw-r--r--buildstream/_frontend/cli.py2
-rw-r--r--buildstream/_frontend/widget.py2
-rw-r--r--buildstream/_loader/loader.py41
-rw-r--r--buildstream/_pipeline.py5
-rw-r--r--buildstream/_project.py7
-rw-r--r--buildstream/_scheduler/queues/fetchqueue.py8
-rw-r--r--buildstream/_stream.py30
-rw-r--r--buildstream/element.py93
-rw-r--r--buildstream/source.py7
-rw-r--r--buildstream/types.py3
-rw-r--r--tests/artifactcache/expiry.py6
-rw-r--r--tests/frontend/buildtrack.py2
-rw-r--r--tests/frontend/project/sources/fetch_source.py3
-rw-r--r--tests/internals/pluginloading/customsource/pluginsources/foo.py3
18 files changed, 187 insertions, 59 deletions
diff --git a/buildstream/_artifactcache.py b/buildstream/_artifactcache.py
index e04648bee..49f07cb50 100644
--- a/buildstream/_artifactcache.py
+++ b/buildstream/_artifactcache.py
@@ -58,7 +58,7 @@ class ArtifactCache(BaseCache):
self._required_elements = set() # The elements required for this session
self.casquota.add_ref_callbacks(self.required_artifacts())
- self.casquota.add_remove_callbacks((lambda x: not x.startswith('sources/'), self.remove))
+ self.casquota.add_remove_callbacks((lambda x: not x.startswith('@'), self.remove))
# mark_required_elements():
#
@@ -178,7 +178,9 @@ class ArtifactCache(BaseCache):
# ([str]) - A list of artifact names as generated in LRU order
#
def list_artifacts(self, *, glob=None):
- return self.cas.list_refs(glob=glob)
+ return list(filter(
+ lambda x: not x.startswith('@'),
+ self.cas.list_refs(glob=glob)))
# remove():
#
diff --git a/buildstream/_basecache.py b/buildstream/_basecache.py
index 56cb1d7a0..a8c58e48f 100644
--- a/buildstream/_basecache.py
+++ b/buildstream/_basecache.py
@@ -78,7 +78,7 @@ class BaseCache():
for spec_node in artifacts:
cache_specs.append(cls.spec_class._new_from_config_node(spec_node, basedir))
else:
- provenance = _yaml.node_get_provenance(config_node, key='artifacts')
+ provenance = _yaml.node_get_provenance(config_node, key=cls.config_node_name)
raise _yaml.LoadError(_yaml.LoadErrorReason.INVALID_DATA,
"%s: 'artifacts' must be a single 'url:' mapping, or a list of mappings" %
(str(provenance)))
diff --git a/buildstream/_cas/cascache.py b/buildstream/_cas/cascache.py
index bb65269ad..04a09299a 100644
--- a/buildstream/_cas/cascache.py
+++ b/buildstream/_cas/cascache.py
@@ -1341,7 +1341,7 @@ class CASQuota:
default_conf = os.path.join(os.environ['XDG_CONFIG_HOME'],
'buildstream.conf')
detail = ("Aborted after removing {} refs and saving {} disk space.\n"
- "The remaining {} in the cache is required by the {} elements in your build plan\n\n"
+ "The remaining {} in the cache is required by the {} references in your build plan\n\n"
"There is not enough space to complete the build.\n"
"Please increase the cache-quota in {} and/or make more disk space."
.format(removed_ref_count,
diff --git a/buildstream/_context.py b/buildstream/_context.py
index 8a9f485be..286e2d223 100644
--- a/buildstream/_context.py
+++ b/buildstream/_context.py
@@ -32,6 +32,7 @@ from ._exceptions import LoadError, LoadErrorReason, BstError
from ._message import Message, MessageType
from ._profile import Topics, profile_start, profile_end
from ._artifactcache import ArtifactCache
+from ._sourcecache import SourceCache
from ._cas import CASCache, CASQuota, CASCacheUsage
from ._workspaces import Workspaces, WorkspaceProjectCache
from .plugin import _plugin_lookup
@@ -65,6 +66,9 @@ class Context():
# The directory where various sources are stored
self.sourcedir = None
+ # specs for source cache remotes
+ self.source_cache_specs = None
+
# The directory where build sandboxes will be created
self.builddir = None
@@ -145,6 +149,7 @@ class Context():
self._message_handler = None
self._message_depth = deque()
self._artifactcache = None
+ self._sourcecache = None
self._projects = []
self._project_overrides = {}
self._workspaces = None
@@ -162,6 +167,7 @@ class Context():
# Args:
# config (filename): The user specified configuration file, if any
#
+
# Raises:
# LoadError
#
@@ -201,7 +207,7 @@ class Context():
_yaml.node_validate(defaults, [
'cachedir', 'sourcedir', 'builddir', 'logdir', 'scheduler',
- 'artifacts', 'logging', 'projects', 'cache', 'prompt',
+ 'artifacts', 'source-caches', 'logging', 'projects', 'cache', 'prompt',
'workspacedir', 'remote-execution',
])
@@ -253,6 +259,9 @@ class Context():
# Load artifact share configuration
self.artifact_cache_specs = ArtifactCache.specs_from_config_node(defaults)
+ # Load source cache config
+ self.source_cache_specs = SourceCache.specs_from_config_node(defaults)
+
self.remote_execution_specs = SandboxRemote.specs_from_config_node(defaults)
# Load pull build trees configuration
@@ -296,8 +305,10 @@ class Context():
# Shallow validation of overrides, parts of buildstream which rely
# on the overrides are expected to validate elsewhere.
for _, overrides in _yaml.node_items(self._project_overrides):
- _yaml.node_validate(overrides, ['artifacts', 'options', 'strict', 'default-mirror',
- 'remote-execution'])
+ _yaml.node_validate(overrides,
+ ['artifacts', 'source-caches', 'options',
+ 'strict', 'default-mirror',
+ 'remote-execution'])
profile_end(Topics.LOAD_CONTEXT, 'load')
@@ -318,6 +329,13 @@ class Context():
def get_cache_usage(self):
return CASCacheUsage(self.get_casquota())
+ @property
+ def sourcecache(self):
+ if not self._sourcecache:
+ self._sourcecache = SourceCache(self)
+
+ return self._sourcecache
+
# add_project():
#
# Add a project to the context.
diff --git a/buildstream/_frontend/cli.py b/buildstream/_frontend/cli.py
index 398bd85fc..5681fd103 100644
--- a/buildstream/_frontend/cli.py
+++ b/buildstream/_frontend/cli.py
@@ -137,7 +137,7 @@ def complete_artifact(orig_args, args, incomplete):
# element targets are valid artifact names
complete_list = complete_target(args, incomplete)
- complete_list.extend(ref for ref in ctx.artifactcache.cas.list_refs() if ref.startswith(incomplete))
+ complete_list.extend(ref for ref in ctx.artifactcache.list_artifacts() if ref.startswith(incomplete))
return complete_list
diff --git a/buildstream/_frontend/widget.py b/buildstream/_frontend/widget.py
index d1df06284..15bd9cf79 100644
--- a/buildstream/_frontend/widget.py
+++ b/buildstream/_frontend/widget.py
@@ -382,7 +382,7 @@ class LogLine(Widget):
line = p.fmt_subst(line, 'state', "failed", fg='red')
elif element._cached_success():
line = p.fmt_subst(line, 'state', "cached", fg='magenta')
- elif consistency == Consistency.RESOLVED:
+ elif consistency == Consistency.RESOLVED and not element._source_cached():
line = p.fmt_subst(line, 'state', "fetch needed", fg='red')
elif element._buildable():
line = p.fmt_subst(line, 'state', "buildable", fg='green')
diff --git a/buildstream/_loader/loader.py b/buildstream/_loader/loader.py
index 1607c5b5e..9b91e91fe 100644
--- a/buildstream/_loader/loader.py
+++ b/buildstream/_loader/loader.py
@@ -523,28 +523,29 @@ class Loader():
element._preflight()
sources = list(element.sources())
- for idx, source in enumerate(sources):
- # Handle the case where a subproject needs to be fetched
- #
- if source.get_consistency() == Consistency.RESOLVED:
- if fetch_subprojects:
- if ticker:
- ticker(filename, 'Fetching subproject from {} source'.format(source.get_kind()))
- source._fetch(sources[0:idx])
- else:
- detail = "Try fetching the project with `bst source fetch {}`".format(filename)
- raise LoadError(LoadErrorReason.SUBPROJECT_FETCH_NEEDED,
- "Subproject fetch needed for junction: {}".format(filename),
+ if not element._source_cached():
+ for idx, source in enumerate(sources):
+ # Handle the case where a subproject needs to be fetched
+ #
+ if source.get_consistency() == Consistency.RESOLVED:
+ if fetch_subprojects:
+ if ticker:
+ ticker(filename, 'Fetching subproject from {} source'.format(source.get_kind()))
+ source._fetch(sources[0:idx])
+ else:
+ detail = "Try fetching the project with `bst source fetch {}`".format(filename)
+ raise LoadError(LoadErrorReason.SUBPROJECT_FETCH_NEEDED,
+ "Subproject fetch needed for junction: {}".format(filename),
+ detail=detail)
+
+ # Handle the case where a subproject has no ref
+ #
+ elif source.get_consistency() == Consistency.INCONSISTENT:
+ detail = "Try tracking the junction element with `bst source track {}`".format(filename)
+ raise LoadError(LoadErrorReason.SUBPROJECT_INCONSISTENT,
+ "Subproject has no ref for junction: {}".format(filename),
detail=detail)
- # Handle the case where a subproject has no ref
- #
- elif source.get_consistency() == Consistency.INCONSISTENT:
- detail = "Try tracking the junction element with `bst source track {}`".format(filename)
- raise LoadError(LoadErrorReason.SUBPROJECT_INCONSISTENT,
- "Subproject has no ref for junction: {}".format(filename),
- detail=detail)
-
workspace = element._get_workspace()
if workspace:
# If a workspace is open, load it from there instead
diff --git a/buildstream/_pipeline.py b/buildstream/_pipeline.py
index 7cf36f5a7..004776293 100644
--- a/buildstream/_pipeline.py
+++ b/buildstream/_pipeline.py
@@ -395,7 +395,8 @@ class Pipeline():
uncached = []
with self._context.timed_activity("Checking sources"):
for element in elements:
- if element._get_consistency() != Consistency.CACHED:
+ if element._get_consistency() < Consistency.CACHED and \
+ not element._source_cached():
uncached.append(element)
if uncached:
@@ -403,7 +404,7 @@ class Pipeline():
for element in uncached:
detail += " Following sources for element: {} are not cached:\n".format(element._get_full_name())
for source in element.sources():
- if source._get_consistency() != Consistency.CACHED:
+ if source._get_consistency() < Consistency.CACHED:
detail += " {}\n".format(source)
detail += '\n'
detail += "Try fetching these elements first with `bst source fetch`,\n" + \
diff --git a/buildstream/_project.py b/buildstream/_project.py
index 6cbba497f..21ea91683 100644
--- a/buildstream/_project.py
+++ b/buildstream/_project.py
@@ -34,6 +34,7 @@ from ._profile import Topics, profile_start, profile_end
from ._exceptions import LoadError, LoadErrorReason
from ._options import OptionPool
from ._artifactcache import ArtifactCache
+from ._sourcecache import SourceCache
from .sandbox import SandboxRemote
from ._elementfactory import ElementFactory
from ._sourcefactory import SourceFactory
@@ -140,6 +141,7 @@ class Project():
self._shell_host_files = [] # A list of HostMount objects
self.artifact_cache_specs = None
+ self.source_cache_specs = None
self.remote_execution_specs = None
self._sandbox = None
self._splits = None
@@ -333,7 +335,7 @@ class Project():
'artifacts', 'options',
'fail-on-overlap', 'shell', 'fatal-warnings',
'ref-storage', 'sandbox', 'mirrors', 'remote-execution',
- 'sources', '(@)'
+ 'sources', 'source-caches', '(@)'
])
# create_element()
@@ -672,6 +674,9 @@ class Project():
parent = self.junction._get_project()
self.artifact_cache_specs = parent.artifact_cache_specs + self.artifact_cache_specs
+ # Load source caches with pull/push config
+ self.source_cache_specs = SourceCache.specs_from_config_node(config, self.directory)
+
# Load remote-execution configuration for this project
project_specs = SandboxRemote.specs_from_config_node(config, self.directory)
override_specs = SandboxRemote.specs_from_config_node(
diff --git a/buildstream/_scheduler/queues/fetchqueue.py b/buildstream/_scheduler/queues/fetchqueue.py
index fc11fd1d1..db5e470f9 100644
--- a/buildstream/_scheduler/queues/fetchqueue.py
+++ b/buildstream/_scheduler/queues/fetchqueue.py
@@ -35,13 +35,14 @@ class FetchQueue(Queue):
complete_name = "Fetched"
resources = [ResourceType.DOWNLOAD]
- def __init__(self, scheduler, skip_cached=False):
+ def __init__(self, scheduler, skip_cached=False, fetch_original=False):
super().__init__(scheduler)
self._skip_cached = skip_cached
+ self._fetch_original = fetch_original
def process(self, element):
- element._fetch()
+ element._fetch(fetch_original=self._fetch_original)
def status(self, element):
# state of dependencies may have changed, recalculate element state
@@ -62,7 +63,8 @@ class FetchQueue(Queue):
# This will automatically skip elements which
# have no sources.
- if element._get_consistency() == Consistency.CACHED:
+
+ if not element._should_fetch(self._fetch_original):
return QueueStatus.SKIP
return QueueStatus.READY
diff --git a/buildstream/_stream.py b/buildstream/_stream.py
index 5c880427c..f1600a8e9 100644
--- a/buildstream/_stream.py
+++ b/buildstream/_stream.py
@@ -19,6 +19,8 @@
# Jürg Billeter <juerg.billeter@codethink.co.uk>
# Tristan Maat <tristan.maat@codethink.co.uk>
+import itertools
+import functools
import os
import sys
import stat
@@ -587,9 +589,9 @@ class Stream():
except_targets=except_targets,
fetch_subprojects=True)
- # Assert all sources are cached
+ # Assert all sources are cached in the source dir
if fetch:
- self._fetch(elements)
+ self._fetch(elements, fetch_original=True)
self._pipeline.assert_sources_cached(elements)
# Stage all sources determined by scope
@@ -636,7 +638,7 @@ class Stream():
track_elements = []
if track_first:
track_elements = elements
- self._fetch(elements, track_elements=track_elements)
+ self._fetch(elements, track_elements=track_elements, fetch_original=True)
expanded_directories = []
# To try to be more atomic, loop through the elements and raise any errors we can early
@@ -656,7 +658,9 @@ class Stream():
raise StreamError("Element '{}' already has workspace defined at: {}"
.format(target.name, workspace.get_absolute_path()))
- if not no_checkout and target._get_consistency() != Consistency.CACHED:
+ target_consistency = target._get_consistency()
+ if not no_checkout and target_consistency < Consistency.CACHED and \
+ target_consistency._source_cached():
raise StreamError("Could not stage uncached source. For {} ".format(target.name) +
"Use `--track` to track and " +
"fetch the latest version of the " +
@@ -771,7 +775,7 @@ class Stream():
# Do the tracking first
if track_first:
- self._fetch(elements, track_elements=track_elements)
+ self._fetch(elements, track_elements=track_elements, fetch_original=True)
workspaces = self._context.get_workspaces()
@@ -1090,7 +1094,13 @@ class Stream():
# It must include all the artifacts which are required by the
# final product. Note that this is a superset of the build plan.
#
- self._artifacts.mark_required_elements(self._pipeline.dependencies(elements, Scope.ALL))
+ # use partial as we send this to both Artifact and Source caches
+ required_elements = functools.partial(self._pipeline.dependencies, elements, Scope.ALL)
+ self._artifacts.mark_required_elements(required_elements())
+
+ self._context.sourcecache.mark_required_sources(
+ itertools.chain.from_iterable(
+ [element.sources() for element in required_elements()]))
if selection == PipelineSelection.PLAN and dynamic_plan:
# We use a dynamic build plan, only request artifacts of top-level targets,
@@ -1181,8 +1191,9 @@ class Stream():
# Args:
# elements (list of Element): Elements to fetch
# track_elements (list of Element): Elements to track
+ # fetch_original (Bool): Whether to fetch original unstaged
#
- def _fetch(self, elements, *, track_elements=None):
+ def _fetch(self, elements, *, track_elements=None, fetch_original=False):
if track_elements is None:
track_elements = []
@@ -1195,7 +1206,8 @@ class Stream():
# Filter out elements with cached sources, only from the fetch plan
# let the track plan resolve new refs.
- cached = [elt for elt in fetch_plan if elt._get_consistency() == Consistency.CACHED]
+ cached = [elt for elt in fetch_plan
+ if not elt._should_fetch(fetch_original)]
fetch_plan = self._pipeline.subtract_elements(fetch_plan, cached)
# Construct queues, enqueue and run
@@ -1204,7 +1216,7 @@ class Stream():
if track_elements:
track_queue = TrackQueue(self._scheduler)
self._add_queue(track_queue, track=True)
- self._add_queue(FetchQueue(self._scheduler))
+ self._add_queue(FetchQueue(self._scheduler, fetch_original=fetch_original))
if track_elements:
self._enqueue_plan(track_elements, queue=track_queue)
diff --git a/buildstream/element.py b/buildstream/element.py
index a77f7e6dc..901a9507f 100644
--- a/buildstream/element.py
+++ b/buildstream/element.py
@@ -88,7 +88,7 @@ from . import _yaml
from ._variables import Variables
from ._versions import BST_CORE_ARTIFACT_VERSION
from ._exceptions import BstError, LoadError, LoadErrorReason, ImplError, \
- ErrorDomain
+ ErrorDomain, SourceCacheError
from .utils import UtilError
from . import Plugin, Consistency, Scope
from . import SandboxFlags, SandboxCommandError
@@ -956,11 +956,16 @@ class Element(Plugin):
element = meta.project.create_element(meta, first_pass=meta.first_pass)
cls.__instantiated_elements[meta] = element
- # Instantiate sources
+ # Instantiate sources and generate their keys
+ previous_sources = []
for meta_source in meta.sources:
meta_source.first_pass = meta.kind == "junction"
source = meta.project.create_source(meta_source,
first_pass=meta.first_pass)
+
+ source._generate_key(previous_sources)
+ previous_sources.append(source)
+
redundant_ref = source._load_ref()
element.__sources.append(source)
@@ -1080,7 +1085,8 @@ class Element(Plugin):
# (bool): Whether this element can currently be built
#
def _buildable(self):
- if self._get_consistency() != Consistency.CACHED:
+ if self._get_consistency() < Consistency.CACHED and \
+ not self._source_cached():
return False
for dependency in self.dependencies(Scope.BUILD):
@@ -1363,6 +1369,12 @@ class Element(Plugin):
self.__tracking_scheduled = False
self.__tracking_done = True
+ # update keys
+ sources = list(self.sources())
+ if sources:
+ source = sources.pop()
+ source._generate_key(sources)
+
self._update_state()
# _track():
@@ -1457,6 +1469,7 @@ class Element(Plugin):
# usebuildtree (bool): use a the elements build tree as its source.
#
def _stage_sources_at(self, vdirectory, mount_workspaces=True, usebuildtree=False):
+
context = self._get_context()
# It's advantageous to have this temporary directory on
@@ -1486,10 +1499,20 @@ class Element(Plugin):
if import_dir.is_empty():
detail = "Element type either does not expect a buildtree or it was explictily cached without one."
self.warn("WARNING: {} Artifact contains an empty buildtree".format(self.name), detail=detail)
+
+ # No workspace or cached buildtree, stage source from source cache
else:
- # No workspace or cached buildtree, stage source directly
- for source in self.sources():
- source._stage(import_dir)
+ # Ensure sources are cached
+ self.__cache_sources()
+
+ if list(self.sources()):
+
+ sourcecache = self._get_context().sourcecache
+ try:
+ import_dir = sourcecache.export(list(self.sources())[-1])
+ except SourceCacheError as e:
+ raise ElementError("Error trying to export source for {}: {}"
+ .format(self.name, e))
with utils._deterministic_umask():
vdirectory.import_files(import_dir)
@@ -1946,8 +1969,12 @@ class Element(Plugin):
os.makedirs(context.builddir, exist_ok=True)
with utils._tempdir(dir=context.builddir, prefix='workspace-{}'
.format(self.normal_name)) as temp:
+ last_source = None
for source in self.sources():
- source._init_workspace(temp)
+ last_source = source
+
+ if last_source:
+ last_source._init_workspace(temp)
# Now hardlink the files into the workspace target.
utils.link_files(temp, workspace.get_absolute_path())
@@ -2038,13 +2065,26 @@ class Element(Plugin):
# Raises:
# SourceError: If one of the element sources has an error
#
- def _fetch(self):
+ def _fetch(self, fetch_original=False):
previous_sources = []
+ source = None
+ sourcecache = self._get_context().sourcecache
+
+ # check whether the final source is cached
+ for source in self.sources():
+ pass
+
+ if source and not fetch_original and sourcecache.contains(source):
+ return
+
for source in self.sources():
- if source._get_consistency() < Consistency.CACHED:
+ source_consistency = source._get_consistency()
+ if source_consistency != Consistency.CACHED:
source._fetch(previous_sources)
previous_sources.append(source)
+ self.__cache_sources()
+
# _calculate_cache_key():
#
# Calculates the cache key
@@ -2093,6 +2133,27 @@ class Element(Plugin):
return _cachekey.generate_key(cache_key_dict)
+ def _source_cached(self):
+ source = None
+ for source in self.sources():
+ pass
+ if source:
+ return self._get_context().sourcecache.contains(source)
+ else:
+ return True
+
+ def _should_fetch(self, fetch_original=False):
+ """ return bool of if we need to run the fetch stage for this element
+
+ Args:
+ fetch_original (bool): whether we need to original unstaged source
+ """
+ if (self._get_consistency() == Consistency.CACHED and fetch_original) or \
+ (self._source_cached() and not fetch_original):
+ return False
+ else:
+ return True
+
#############################################################
# Private Local Methods #
#############################################################
@@ -2124,8 +2185,7 @@ class Element(Plugin):
# Determine overall consistency of the element
for source in self.__sources:
source._update_state()
- source_consistency = source._get_consistency()
- self.__consistency = min(self.__consistency, source_consistency)
+ self.__consistency = min(self.__consistency, source._get_consistency())
# __can_build_incrementally()
#
@@ -2832,6 +2892,17 @@ class Element(Plugin):
return (subdir, excluded_subdirs)
+ # __cache_sources():
+ #
+ # Caches the sources into the local CAS
+ #
+ def __cache_sources(self):
+ sources = list(self.sources())
+ if sources:
+ sourcecache = self._get_context().sourcecache
+ if not sourcecache.contains(sources[-1]):
+ sources[-1]._cache(sources[:-1])
+
def _overlap_error_detail(f, forbidden_overlap_elements, elements):
if forbidden_overlap_elements:
diff --git a/buildstream/source.py b/buildstream/source.py
index abbf758c9..b5c38335b 100644
--- a/buildstream/source.py
+++ b/buildstream/source.py
@@ -290,6 +290,8 @@ class Source(Plugin):
super().__init__("{}-{}".format(meta.element_name, meta.element_index),
context, project, provenance, "source")
+ self.__source_cache = context.sourcecache
+
self.__element_name = meta.element_name # The name of the element owning this source
self.__element_index = meta.element_index # The index of the source in the owning element's source list
self.__element_kind = meta.element_kind # The kind of the element owning this source
@@ -691,6 +693,7 @@ class Source(Plugin):
#
# Args:
# previous_sources (list): List of Sources listed prior to this source
+ # fetch_original (bool): whether to fetch full source, or use local CAS
#
def _fetch(self, previous_sources):
@@ -703,6 +706,10 @@ class Source(Plugin):
else:
self.__do_fetch()
+ def _cache(self, previous_sources):
+ # stage the source into the source cache
+ self.__source_cache.commit(self, previous_sources)
+
# Wrapper for stage() api which gives the source
# plugin a fully constructed path considering the
# 'directory' option
diff --git a/buildstream/types.py b/buildstream/types.py
index 23d78b08c..ba4b99eb7 100644
--- a/buildstream/types.py
+++ b/buildstream/types.py
@@ -76,8 +76,7 @@ class Consistency():
CACHED = 2
"""Cached
- Cached sources have a reference which is present in the local
- source cache. Only cached sources can be staged.
+ Sources have a cached unstaged copy in the source directory.
"""
diff --git a/tests/artifactcache/expiry.py b/tests/artifactcache/expiry.py
index 23dc61d0f..e39357534 100644
--- a/tests/artifactcache/expiry.py
+++ b/tests/artifactcache/expiry.py
@@ -297,11 +297,13 @@ def test_never_delete_required_track(cli, datafiles):
res.assert_main_error(ErrorDomain.STREAM, None)
res.assert_task_error(ErrorDomain.CAS, 'cache-too-full')
- # Expect the same result that we did in test_never_delete_required()
+ # Expect the almost the same result that we did in test_never_delete_required()
+ # As the source will be downloaded first, we will be over the limit once
+ # the source for dep2.bst is downloaded
#
states = cli.get_element_states(project, ['target.bst'])
assert states['dep1.bst'] == 'cached'
- assert states['dep2.bst'] == 'cached'
+ assert states['dep2.bst'] == 'buildable'
assert states['dep3.bst'] != 'cached'
assert states['target.bst'] != 'cached'
diff --git a/tests/frontend/buildtrack.py b/tests/frontend/buildtrack.py
index 9c56fb4a0..9c3efadd8 100644
--- a/tests/frontend/buildtrack.py
+++ b/tests/frontend/buildtrack.py
@@ -125,6 +125,8 @@ def test_build_track(cli, datafiles, tmpdir, ref_storage,
# Delete element sources
source_dir = os.path.join(project, 'cache', 'sources')
shutil.rmtree(source_dir)
+ source_refs = os.path.join(project, 'cache', 'cas', 'refs', 'heads', '@sources')
+ shutil.rmtree(source_refs)
# Delete artifacts one by one and assert element states
for target in set(tracked):
diff --git a/tests/frontend/project/sources/fetch_source.py b/tests/frontend/project/sources/fetch_source.py
index d454f69e0..06596607b 100644
--- a/tests/frontend/project/sources/fetch_source.py
+++ b/tests/frontend/project/sources/fetch_source.py
@@ -62,6 +62,9 @@ class FetchSource(Source):
if not os.path.exists(output_dir):
raise SourceError("Directory '{}' does not exist".format(output_dir))
+ def stage(self, directory):
+ pass
+
def fetch(self):
for fetcher in self.fetchers:
fetcher.fetch()
diff --git a/tests/internals/pluginloading/customsource/pluginsources/foo.py b/tests/internals/pluginloading/customsource/pluginsources/foo.py
index d2b0d9c6d..8dd16801c 100644
--- a/tests/internals/pluginloading/customsource/pluginsources/foo.py
+++ b/tests/internals/pluginloading/customsource/pluginsources/foo.py
@@ -9,6 +9,9 @@ class FooSource(Source):
def configure(self, node):
pass
+ def get_unique_key(self):
+ pass
+
def get_consistency(self):
return Consistency.INCONSISTENT