From 4b62b9362870045a6203d01f2abf3e437bf23ae6 Mon Sep 17 00:00:00 2001 From: Raoul Hidalgo Charman Date: Mon, 11 Feb 2019 12:07:55 +0000 Subject: context: remove artifactdir Will check and move old artifact directory if it exists, and create symlink linking old directory to new. Part of #870 --- buildstream/_artifactcache.py | 14 +++++++------- buildstream/_context.py | 33 +++++++++++++++++++-------------- buildstream/_frontend/widget.py | 2 +- buildstream/element.py | 2 +- tests/artifactcache/cache_size.py | 3 +-- tests/artifactcache/expiry.py | 2 +- tests/frontend/pull.py | 4 ++-- tests/integration/artifact.py | 27 ++++++++++++++++----------- tests/integration/pullbuildtrees.py | 4 ++-- tests/integration/shellbuildtrees.py | 12 ++++-------- tests/internals/context.py | 6 +++--- tests/internals/pluginloading.py | 2 +- 12 files changed, 58 insertions(+), 53 deletions(-) diff --git a/buildstream/_artifactcache.py b/buildstream/_artifactcache.py index b72b20fda..cab9bff3c 100644 --- a/buildstream/_artifactcache.py +++ b/buildstream/_artifactcache.py @@ -87,7 +87,7 @@ class ArtifactCacheUsage(): class ArtifactCache(): def __init__(self, context): self.context = context - self.extractdir = os.path.join(context.artifactdir, 'extract') + self.extractdir = context.extractdir self.cas = context.get_cascache() @@ -894,7 +894,7 @@ class ArtifactCache(): # def _write_cache_size(self, size): assert isinstance(size, int) - size_file_path = os.path.join(self.context.artifactdir, CACHE_SIZE_FILE) + size_file_path = os.path.join(self.context.casdir, CACHE_SIZE_FILE) with utils.save_file_atomic(size_file_path, "w") as f: f.write(str(size)) @@ -907,7 +907,7 @@ class ArtifactCache(): # (int): The size of the artifact cache, as recorded in the file # def _read_cache_size(self): - size_file_path = os.path.join(self.context.artifactdir, CACHE_SIZE_FILE) + size_file_path = os.path.join(self.context.casdir, CACHE_SIZE_FILE) if not os.path.exists(size_file_path): return None @@ -942,7 +942,7 @@ class ArtifactCache(): try: cache_quota = utils._parse_size(self.context.config_cache_quota, - self.context.artifactdir) + self.context.casdir) except utils.UtilError as e: raise LoadError(LoadErrorReason.INVALID_DATA, "{}\nPlease specify the value in bytes or as a % of full disk space.\n" @@ -973,7 +973,7 @@ class ArtifactCache(): "has {total_size} total disk space.") .format( quota=self.context.config_cache_quota, - local_cache_path=self.context.artifactdir, + local_cache_path=self.context.casdir, total_size=utils._pretty_size(total_size)), reason='insufficient-storage-for-quota') elif cache_quota > cache_size + available_space: @@ -991,7 +991,7 @@ class ArtifactCache(): "The filesystem containing {local_cache_path} only " + "has {available_size} available.") .format(quota=self.context.config_cache_quota, - local_cache_path=self.context.artifactdir, + local_cache_path=self.context.casdir, available_size=available)) # Place a slight headroom (2e9 (2GB) on the cache_quota) into @@ -1019,7 +1019,7 @@ class ArtifactCache(): # about it's disk size and available bytes. # def _get_cache_volume_size(self): - return utils._get_volume_size(self.context.artifactdir) + return utils._get_volume_size(self.context.casdir) # _configured_remote_artifact_cache_specs(): diff --git a/buildstream/_context.py b/buildstream/_context.py index cb537cf60..f7f298f3b 100644 --- a/buildstream/_context.py +++ b/buildstream/_context.py @@ -70,15 +70,15 @@ class Context(): # The directory for CAS self.casdir = None + # Extract directory + self.extractdir = None + # The directory for temporary files self.tmpdir = None # Default root location for workspaces self.workspacedir = None - # The local binary artifact cache directory - self.artifactdir = None - # The locations from which to push and pull prebuilt artifacts self.artifact_cache_specs = None @@ -194,18 +194,16 @@ class Context(): "builddir is obsolete, use cachedir") if defaults.get('artifactdir'): - print("artifactdir is deprecated, use cachedir") - else: - defaults['artifactdir'] = os.path.join(defaults['cachedir'], 'artifacts') + raise LoadError(LoadErrorReason.INVALID_DATA, + "artifactdir is obsolete") _yaml.node_validate(defaults, [ - 'cachedir', 'sourcedir', 'builddir', 'artifactdir', 'logdir', - 'scheduler', 'artifacts', 'logging', 'projects', - 'cache', 'prompt', 'workspacedir', 'remote-execution', + 'cachedir', 'sourcedir', 'builddir', 'logdir', 'scheduler', + 'artifacts', 'logging', 'projects', 'cache', 'prompt', + 'workspacedir', 'remote-execution', ]) - for directory in ['cachedir', 'sourcedir', 'artifactdir', 'logdir', - 'workspacedir']: + for directory in ['cachedir', 'sourcedir', 'logdir', 'workspacedir']: # Allow the ~ tilde expansion and any environment variables in # path specification in the config files. # @@ -216,14 +214,21 @@ class Context(): setattr(self, directory, path) # add directories not set by users + self.extractdir = os.path.join(self.cachedir, 'extract') self.tmpdir = os.path.join(self.cachedir, 'tmp') self.casdir = os.path.join(self.cachedir, 'cas') self.builddir = os.path.join(self.cachedir, 'build') + # Move old artifact cas to cas if it exists and create symlink + old_casdir = os.path.join(self.cachedir, 'artifacts', 'cas') + if (os.path.exists(old_casdir) and not os.path.islink(old_casdir) and + not os.path.exists(self.casdir)): + os.rename(old_casdir, self.casdir) + os.symlink(self.casdir, old_casdir) + # Load quota configuration - # We need to find the first existing directory in the path of - # our artifactdir - the artifactdir may not have been created - # yet. + # We need to find the first existing directory in the path of our + # cachedir - the cachedir may not have been created yet. cache = _yaml.node_get(defaults, Mapping, 'cache') _yaml.node_validate(cache, ['quota', 'pull-buildtrees', 'cache-buildtrees']) diff --git a/buildstream/_frontend/widget.py b/buildstream/_frontend/widget.py index 2920d657d..9d1467178 100644 --- a/buildstream/_frontend/widget.py +++ b/buildstream/_frontend/widget.py @@ -495,10 +495,10 @@ class LogLine(Widget): values = OrderedDict() values["Configuration File"] = \ "Default Configuration" if not context.config_origin else context.config_origin + values["Cache Directory"] = context.cachedir values["Log Files"] = context.logdir values["Source Mirrors"] = context.sourcedir values["Build Area"] = context.builddir - values["Artifact Cache"] = context.artifactdir values["Strict Build Plan"] = "Yes" if context.get_strict() else "No" values["Maximum Fetch Tasks"] = context.sched_fetchers values["Maximum Build Tasks"] = context.sched_builders diff --git a/buildstream/element.py b/buildstream/element.py index d5ec5c436..5c06065b4 100644 --- a/buildstream/element.py +++ b/buildstream/element.py @@ -1448,7 +1448,7 @@ class Element(Plugin): # It's advantageous to have this temporary directory on # the same file system as the rest of our cache. with self.timed_activity("Staging sources", silent_nested=True), \ - utils._tempdir(dir=context.artifactdir, prefix='staging-temp') as temp_staging_directory: + utils._tempdir(dir=context.tmpdir, prefix='staging-temp') as temp_staging_directory: import_dir = temp_staging_directory diff --git a/tests/artifactcache/cache_size.py b/tests/artifactcache/cache_size.py index 980289157..be0f1989b 100644 --- a/tests/artifactcache/cache_size.py +++ b/tests/artifactcache/cache_size.py @@ -50,7 +50,6 @@ def test_cache_size_write(cli, tmpdir): create_project(project_dir) # Artifact cache must be in a known place - artifactdir = os.path.join(project_dir, "artifacts") casdir = os.path.join(project_dir, "cas") cli.configure({"cachedir": project_dir}) @@ -59,7 +58,7 @@ def test_cache_size_write(cli, tmpdir): res.assert_success() # Inspect the artifact cache - sizefile = os.path.join(artifactdir, CACHE_SIZE_FILE) + sizefile = os.path.join(casdir, CACHE_SIZE_FILE) assert os.path.isfile(sizefile) with open(sizefile, "r") as f: size_data = f.read() diff --git a/tests/artifactcache/expiry.py b/tests/artifactcache/expiry.py index ac122eb19..d67362dff 100644 --- a/tests/artifactcache/expiry.py +++ b/tests/artifactcache/expiry.py @@ -417,7 +417,7 @@ def test_extract_expiry(cli, datafiles, tmpdir): res.assert_success() # Get a snapshot of the extracts in advance - extractdir = os.path.join(project, 'cache', 'artifacts', 'extract', 'test', 'target') + extractdir = os.path.join(project, 'cache', 'extract', 'test', 'target') extracts = os.listdir(extractdir) assert(len(extracts) == 1) extract = os.path.join(extractdir, extracts[0]) diff --git a/tests/frontend/pull.py b/tests/frontend/pull.py index 072b76e69..1326beccc 100644 --- a/tests/frontend/pull.py +++ b/tests/frontend/pull.py @@ -510,8 +510,8 @@ def test_pull_access_rights(caplog, cli, tmpdir, datafiles): shutil.rmtree(checkout) - artifacts = os.path.join(cli.directory, 'artifacts') - shutil.rmtree(artifacts) + casdir = os.path.join(cli.directory, 'cas') + shutil.rmtree(casdir) result = cli.run(project=project, args=['artifact', 'pull', 'compose-all.bst']) result.assert_success() diff --git a/tests/integration/artifact.py b/tests/integration/artifact.py index c36bc6aa9..35cad2599 100644 --- a/tests/integration/artifact.py +++ b/tests/integration/artifact.py @@ -53,7 +53,7 @@ def test_cache_buildtrees(cli, tmpdir, datafiles): create_artifact_share(os.path.join(str(tmpdir), 'share3')) as share3: cli.configure({ 'artifacts': {'url': share1.repo, 'push': True}, - 'artifactdir': os.path.join(str(tmpdir), 'artifacts') + 'cachedir': str(tmpdir) }) # Build autotools element with cache-buildtrees set via the @@ -69,20 +69,22 @@ def test_cache_buildtrees(cli, tmpdir, datafiles): # to not cache buildtrees cache_key = cli.get_element_key(project, element_name) elementdigest = share1.has_artifact('test', element_name, cache_key) - buildtreedir = os.path.join(str(tmpdir), 'artifacts', 'extract', 'test', 'autotools-amhello', + buildtreedir = os.path.join(str(tmpdir), 'extract', 'test', 'autotools-amhello', elementdigest.hash, 'buildtree') assert os.path.isdir(buildtreedir) assert not os.listdir(buildtreedir) # Delete the local cached artifacts, and assert the when pulled with --pull-buildtrees # that is was cached in share1 as expected with an empty buildtree dir - shutil.rmtree(os.path.join(str(tmpdir), 'artifacts')) + shutil.rmtree(os.path.join(str(tmpdir), 'cas')) + shutil.rmtree(os.path.join(str(tmpdir), 'extract')) assert cli.get_element_state(project, element_name) != 'cached' result = cli.run(project=project, args=['--pull-buildtrees', 'artifact', 'pull', element_name]) assert element_name in result.get_pulled_elements() assert os.path.isdir(buildtreedir) assert not os.listdir(buildtreedir) - shutil.rmtree(os.path.join(str(tmpdir), 'artifacts')) + shutil.rmtree(os.path.join(str(tmpdir), 'cas')) + shutil.rmtree(os.path.join(str(tmpdir), 'extract')) # Assert that the default behaviour of pull to not include buildtrees on the artifact # in share1 which was purposely cached with an empty one behaves as expected. As such the @@ -91,13 +93,14 @@ def test_cache_buildtrees(cli, tmpdir, datafiles): result = cli.run(project=project, args=['artifact', 'pull', element_name]) assert element_name in result.get_pulled_elements() assert not os.path.isdir(buildtreedir) - shutil.rmtree(os.path.join(str(tmpdir), 'artifacts')) + shutil.rmtree(os.path.join(str(tmpdir), 'cas')) + shutil.rmtree(os.path.join(str(tmpdir), 'extract')) # Repeat building the artifacts, this time with the default behaviour of caching buildtrees, # as such the buildtree dir should not be empty cli.configure({ 'artifacts': {'url': share2.repo, 'push': True}, - 'artifactdir': os.path.join(str(tmpdir), 'artifacts') + 'cachedir': str(tmpdir) }) result = cli.run(project=project, args=['build', element_name]) assert result.exit_code == 0 @@ -106,27 +109,29 @@ def test_cache_buildtrees(cli, tmpdir, datafiles): # Cache key will be the same however the digest hash will have changed as expected, so reconstruct paths elementdigest = share2.has_artifact('test', element_name, cache_key) - buildtreedir = os.path.join(str(tmpdir), 'artifacts', 'extract', 'test', 'autotools-amhello', + buildtreedir = os.path.join(str(tmpdir), 'extract', 'test', 'autotools-amhello', elementdigest.hash, 'buildtree') assert os.path.isdir(buildtreedir) assert os.listdir(buildtreedir) is not None # Delete the local cached artifacts, and assert that when pulled with --pull-buildtrees # that it was cached in share2 as expected with a populated buildtree dir - shutil.rmtree(os.path.join(str(tmpdir), 'artifacts')) + shutil.rmtree(os.path.join(str(tmpdir), 'cas')) + shutil.rmtree(os.path.join(str(tmpdir), 'extract')) assert cli.get_element_state(project, element_name) != 'cached' result = cli.run(project=project, args=['--pull-buildtrees', 'artifact', 'pull', element_name]) assert element_name in result.get_pulled_elements() assert os.path.isdir(buildtreedir) assert os.listdir(buildtreedir) is not None - shutil.rmtree(os.path.join(str(tmpdir), 'artifacts')) + shutil.rmtree(os.path.join(str(tmpdir), 'cas')) + shutil.rmtree(os.path.join(str(tmpdir), 'extract')) # Clarify that the user config option for cache-buildtrees works as the cli # main option does. Point to share3 which does not have the artifacts cached to force # a build cli.configure({ 'artifacts': {'url': share3.repo, 'push': True}, - 'artifactdir': os.path.join(str(tmpdir), 'artifacts'), + 'cachedir': str(tmpdir), 'cache': {'cache-buildtrees': 'never'} }) result = cli.run(project=project, args=['build', element_name]) @@ -134,7 +139,7 @@ def test_cache_buildtrees(cli, tmpdir, datafiles): assert cli.get_element_state(project, element_name) == 'cached' cache_key = cli.get_element_key(project, element_name) elementdigest = share3.has_artifact('test', element_name, cache_key) - buildtreedir = os.path.join(str(tmpdir), 'artifacts', 'extract', 'test', 'autotools-amhello', + buildtreedir = os.path.join(str(tmpdir), 'extract', 'test', 'autotools-amhello', elementdigest.hash, 'buildtree') assert os.path.isdir(buildtreedir) assert not os.listdir(buildtreedir) diff --git a/tests/integration/pullbuildtrees.py b/tests/integration/pullbuildtrees.py index 7beb515e7..538ed8c37 100644 --- a/tests/integration/pullbuildtrees.py +++ b/tests/integration/pullbuildtrees.py @@ -21,8 +21,8 @@ DATA_DIR = os.path.join( # to false, which is the default user context. The cache has to be # cleared as just forcefully removing the refpath leaves dangling objects. def default_state(cli, tmpdir, share): - shutil.rmtree(os.path.join(str(tmpdir), 'artifacts')) shutil.rmtree(os.path.join(str(tmpdir), 'cas')) + shutil.rmtree(os.path.join(str(tmpdir), 'extract')) cli.configure({ 'artifacts': {'url': share.repo, 'push': False}, 'cachedir': str(tmpdir), @@ -75,7 +75,7 @@ def test_pullbuildtrees(cli2, tmpdir, datafiles): result = cli2.run(project=project, args=['artifact', 'pull', element_name]) assert element_name in result.get_pulled_elements() elementdigest = share1.has_artifact('test', element_name, cli2.get_element_key(project, element_name)) - buildtreedir = os.path.join(str(tmpdir), 'artifacts', 'extract', 'test', 'autotools-amhello', + buildtreedir = os.path.join(str(tmpdir), 'extract', 'test', 'autotools-amhello', elementdigest.hash, 'buildtree') assert not os.path.isdir(buildtreedir) result = cli2.run(project=project, args=['--pull-buildtrees', 'artifact', 'pull', element_name]) diff --git a/tests/integration/shellbuildtrees.py b/tests/integration/shellbuildtrees.py index e6b50c098..3fd761f05 100644 --- a/tests/integration/shellbuildtrees.py +++ b/tests/integration/shellbuildtrees.py @@ -62,7 +62,7 @@ def test_buildtree_staged_warn_empty_cached(cli_integration, tmpdir, datafiles): # Switch to a temp artifact cache dir to ensure the artifact is rebuilt, # caching an empty buildtree cli_integration.configure({ - 'artifactdir': os.path.join(os.path.join(str(tmpdir), 'artifacts')) + 'cachedir': str(tmpdir) }) res = cli_integration.run(project=project, args=['--cache-buildtrees', 'never', 'build', element_name]) @@ -139,7 +139,7 @@ def test_buildtree_from_failure_option_never(cli_integration, tmpdir, datafiles) # Switch to a temp artifact cache dir to ensure the artifact is rebuilt, # caching an empty buildtree cli_integration.configure({ - 'artifactdir': os.path.join(os.path.join(str(tmpdir), 'artifacts')) + 'cachedir': str(tmpdir) }) res = cli_integration.run(project=project, args=['--cache-buildtrees', 'never', 'build', element_name]) @@ -163,7 +163,7 @@ def test_buildtree_from_failure_option_failure(cli_integration, tmpdir, datafile # default behaviour (which is always) as the buildtree will explicitly have been # cached with content. cli_integration.configure({ - 'artifactdir': os.path.join(os.path.join(str(tmpdir), 'artifacts')) + 'cachedir': str(tmpdir) }) res = cli_integration.run(project=project, args=['--cache-buildtrees', 'failure', 'build', element_name]) @@ -195,10 +195,7 @@ def test_buildtree_pulled(cli, tmpdir, datafiles): assert cli.get_element_state(project, element_name) == 'cached' # Discard the cache - cli.configure({ - 'artifacts': {'url': share.repo, 'push': True}, - 'artifactdir': os.path.join(cli.directory, 'artifacts2') - }) + shutil.rmtree(str(os.path.join(str(tmpdir), 'cache', 'cas'))) assert cli.get_element_state(project, element_name) != 'cached' # Pull from cache, ensuring cli options is set to pull the buildtree @@ -231,7 +228,6 @@ def test_buildtree_options(cli, tmpdir, datafiles): assert share.has_artifact('test', element_name, cli.get_element_key(project, element_name)) # Discard the cache - shutil.rmtree(str(os.path.join(str(tmpdir), 'cache', 'artifacts'))) shutil.rmtree(str(os.path.join(str(tmpdir), 'cache', 'cas'))) assert cli.get_element_state(project, element_name) != 'cached' diff --git a/tests/internals/context.py b/tests/internals/context.py index 5a4b37ac5..17d950f85 100644 --- a/tests/internals/context.py +++ b/tests/internals/context.py @@ -43,7 +43,7 @@ def test_context_load(context_fixture): context.load(config=os.devnull) assert(context.sourcedir == os.path.join(cache_home, 'buildstream', 'sources')) assert(context.builddir == os.path.join(cache_home, 'buildstream', 'build')) - assert(context.artifactdir == os.path.join(cache_home, 'buildstream', 'artifacts')) + assert(context.cachedir == os.path.join(cache_home, 'buildstream')) assert(context.logdir == os.path.join(cache_home, 'buildstream', 'logs')) @@ -57,7 +57,7 @@ def test_context_load_envvar(context_fixture): context.load(config=os.devnull) assert(context.sourcedir == os.path.join('/', 'some', 'path', 'buildstream', 'sources')) assert(context.builddir == os.path.join('/', 'some', 'path', 'buildstream', 'build')) - assert(context.artifactdir == os.path.join('/', 'some', 'path', 'buildstream', 'artifacts')) + assert(context.cachedir == os.path.join('/', 'some', 'path', 'buildstream')) assert(context.logdir == os.path.join('/', 'some', 'path', 'buildstream', 'logs')) # Reset the environment variable @@ -79,7 +79,7 @@ def test_context_load_user_config(context_fixture, datafiles): assert(context.sourcedir == os.path.expanduser('~/pony')) assert(context.builddir == os.path.join(cache_home, 'buildstream', 'build')) - assert(context.artifactdir == os.path.join(cache_home, 'buildstream', 'artifacts')) + assert(context.cachedir == os.path.join(cache_home, 'buildstream')) assert(context.logdir == os.path.join(cache_home, 'buildstream', 'logs')) diff --git a/tests/internals/pluginloading.py b/tests/internals/pluginloading.py index 574e59f12..607d253ee 100644 --- a/tests/internals/pluginloading.py +++ b/tests/internals/pluginloading.py @@ -16,7 +16,7 @@ def create_pipeline(tmpdir, basedir, target): context = Context() context.load(config=os.devnull) context.deploydir = os.path.join(str(tmpdir), 'deploy') - context.artifactdir = os.path.join(str(tmpdir), 'artifact') + context.casdir = os.path.join(str(tmpdir), 'cas') project = Project(basedir, context) def dummy_handler(message, context): -- cgit v1.2.1