summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorChandan Singh <csingh43@bloomberg.net>2018-09-22 00:32:04 +0100
committerChandan Singh <csingh43@bloomberg.net>2018-10-26 16:21:29 +0100
commit06a29a600cebc9226f1c524b60f5028f7d7ec5d5 (patch)
tree84e310a7ecea45c3fda8a2610d7575d7d895ee7b
parentebcc348d9eca81f3fd8405b0eb89942ef13db7e6 (diff)
downloadbuildstream-chandan/source-checkout.tar.gz
Add `bst source-checkout` commandchandan/source-checkout
As discussed in https://mail.gnome.org/archives/buildstream-list/2018-September/msg00064.html, add `bst source-checkout` command. This will allow users to checkout sources for a given target. * _frontend/cli.py: Add source-checkout command * _pipeline.py: Add assert_sources_cached() method * _stream.py: Add source_checkout method, abstract out __check_location_writable() method that used to part of checkout()
-rw-r--r--buildstream/_frontend/cli.py27
-rw-r--r--buildstream/_pipeline.py27
-rw-r--r--buildstream/_stream.py124
-rw-r--r--tests/completions/completions.py1
-rw-r--r--tests/frontend/project/elements/checkout-deps.bst10
-rw-r--r--tests/frontend/project/files/etc-files/etc/buildstream/config1
-rw-r--r--tests/frontend/source_checkout.py121
7 files changed, 284 insertions, 27 deletions
diff --git a/buildstream/_frontend/cli.py b/buildstream/_frontend/cli.py
index 85632959f..53edc0b71 100644
--- a/buildstream/_frontend/cli.py
+++ b/buildstream/_frontend/cli.py
@@ -663,6 +663,33 @@ def checkout(app, element, location, force, deps, integrate, hardlinks, tar):
##################################################################
+# Source Checkout Command #
+##################################################################
+@cli.command(name='source-checkout', short_help='Checkout sources for an element')
+@click.option('--except', 'except_', multiple=True,
+ type=click.Path(readable=False),
+ help="Except certain dependencies")
+@click.option('--deps', '-d', default='none',
+ type=click.Choice(['build', 'none', 'run', 'all']),
+ help='The dependencies whose sources to checkout (default: none)')
+@click.option('--fetch', 'fetch_', default=False, is_flag=True,
+ help='Fetch elements if they are not fetched')
+@click.argument('element',
+ type=click.Path(readable=False))
+@click.argument('location', type=click.Path())
+@click.pass_obj
+def source_checkout(app, element, location, deps, fetch_, except_):
+ """Checkout sources of an element to the specified location
+ """
+ with app.initialized():
+ app.stream.source_checkout(element,
+ location=location,
+ deps=deps,
+ fetch=fetch_,
+ except_targets=except_)
+
+
+##################################################################
# Workspace Command #
##################################################################
@cli.group(short_help="Manipulate developer workspaces")
diff --git a/buildstream/_pipeline.py b/buildstream/_pipeline.py
index 91f176caf..343a6d9c8 100644
--- a/buildstream/_pipeline.py
+++ b/buildstream/_pipeline.py
@@ -383,6 +383,33 @@ class Pipeline():
detail += " " + element._get_full_name() + "\n"
raise PipelineError("Inconsistent pipeline", detail=detail, reason="inconsistent-pipeline-workspaced")
+ # assert_sources_cached()
+ #
+ # Asserts that sources for the given list of elements are cached.
+ #
+ # Args:
+ # elements (list): The list of elements
+ #
+ def assert_sources_cached(self, elements):
+ uncached = []
+ with self._context.timed_activity("Checking sources"):
+ for element in elements:
+ if element._get_consistency() != Consistency.CACHED:
+ uncached.append(element)
+
+ if uncached:
+ detail = "Sources are not cached for the following elements:\n\n"
+ for element in uncached:
+ detail += " Following sources for element: {} are not cached:\n".format(element._get_full_name())
+ for source in element.sources():
+ if source._get_consistency() != Consistency.CACHED:
+ detail += " {}\n".format(source)
+ detail += '\n'
+ detail += "Try fetching these elements first with `bst fetch`,\n" + \
+ "or run this command with `--fetch` option\n"
+
+ raise PipelineError("Uncached sources", detail=detail, reason="uncached-sources")
+
#############################################################
# Private Methods #
#############################################################
diff --git a/buildstream/_stream.py b/buildstream/_stream.py
index e7a71978b..82de8fda3 100644
--- a/buildstream/_stream.py
+++ b/buildstream/_stream.py
@@ -379,27 +379,7 @@ class Stream():
elements, _ = self._load((target,), (), fetch_subprojects=True)
target = elements[0]
- if not tar:
- try:
- os.makedirs(location, exist_ok=True)
- except OSError as e:
- raise StreamError("Failed to create checkout directory: '{}'"
- .format(e)) from e
-
- if not tar:
- if not os.access(location, os.W_OK):
- raise StreamError("Checkout directory '{}' not writable"
- .format(location))
- if not force and os.listdir(location):
- raise StreamError("Checkout directory '{}' not empty"
- .format(location))
- elif os.path.exists(location) and location != '-':
- if not os.access(location, os.W_OK):
- raise StreamError("Output file '{}' not writable"
- .format(location))
- if not force and os.path.exists(location):
- raise StreamError("Output file '{}' already exists"
- .format(location))
+ self._check_location_writable(location, force=force, tar=tar)
# Stage deps into a temporary sandbox first
try:
@@ -436,6 +416,42 @@ class Stream():
raise StreamError("Error while staging dependencies into a sandbox"
": '{}'".format(e), detail=e.detail, reason=e.reason) from e
+ # source_checkout()
+ #
+ # Checkout sources of the target element to the specified location
+ #
+ # Args:
+ # target (str): The target element whose sources to checkout
+ # location (str): Location to checkout the sources to
+ # deps (str): The dependencies to checkout
+ # fetch (bool): Whether to fetch missing sources
+ # except_targets (list): List of targets to except from staging
+ #
+ def source_checkout(self, target, *,
+ location=None,
+ deps='none',
+ fetch=False,
+ except_targets=()):
+
+ self._check_location_writable(location)
+
+ elements, _ = self._load((target,), (),
+ selection=deps,
+ except_targets=except_targets,
+ fetch_subprojects=True)
+
+ # Assert all sources are cached
+ if fetch:
+ self._fetch(elements)
+ self._pipeline.assert_sources_cached(elements)
+
+ # Stage all sources determined by scope
+ try:
+ self._write_element_sources(location, elements)
+ except BstError as e:
+ raise StreamError("Error while writing sources"
+ ": '{}'".format(e), detail=e.detail, reason=e.reason) from e
+
# workspace_open
#
# Open a project workspace
@@ -719,7 +735,7 @@ class Stream():
if self._write_element_script(source_directory, element)
]
- self._write_element_sources(tempdir, elements)
+ self._write_element_sources(os.path.join(tempdir, "source"), elements)
self._write_build_script(tempdir, elements)
self._collect_sources(tempdir, tar_location,
target.normal_name, compression)
@@ -1061,6 +1077,39 @@ class Stream():
self._enqueue_plan(fetch_plan)
self._run()
+ # _check_location_writable()
+ #
+ # Check if given location is writable.
+ #
+ # Args:
+ # location (str): Destination path
+ # force (bool): Allow files to be overwritten
+ # tar (bool): Whether destination is a tarball
+ #
+ # Raises:
+ # (StreamError): If the destination is not writable
+ #
+ def _check_location_writable(self, location, force=False, tar=False):
+ if not tar:
+ try:
+ os.makedirs(location, exist_ok=True)
+ except OSError as e:
+ raise StreamError("Failed to create destination directory: '{}'"
+ .format(e)) from e
+ if not os.access(location, os.W_OK):
+ raise StreamError("Destination directory '{}' not writable"
+ .format(location))
+ if not force and os.listdir(location):
+ raise StreamError("Destination directory '{}' not empty"
+ .format(location))
+ elif os.path.exists(location) and location != '-':
+ if not os.access(location, os.W_OK):
+ raise StreamError("Output file '{}' not writable"
+ .format(location))
+ if not force and os.path.exists(location):
+ raise StreamError("Output file '{}' already exists"
+ .format(location))
+
# Helper function for checkout()
#
def _checkout_hardlinks(self, sandbox_vroot, directory):
@@ -1082,11 +1131,10 @@ class Stream():
# Write all source elements to the given directory
def _write_element_sources(self, directory, elements):
for element in elements:
- source_dir = os.path.join(directory, "source")
- element_source_dir = os.path.join(source_dir, element.normal_name)
- os.makedirs(element_source_dir)
-
- element._stage_sources_at(element_source_dir)
+ element_source_dir = self._get_element_dirname(directory, element)
+ if list(element.sources()):
+ os.makedirs(element_source_dir)
+ element._stage_sources_at(element_source_dir)
# Write a master build script to the sandbox
def _write_build_script(self, directory, elements):
@@ -1115,3 +1163,25 @@ class Stream():
with tarfile.open(tar_name, permissions) as tar:
tar.add(directory, arcname=element_name)
+
+ # _get_element_dirname()
+ #
+ # Get path to directory for an element based on its normal name.
+ #
+ # For cross-junction elements, the path will be prefixed with the name
+ # of the junction element.
+ #
+ # Args:
+ # directory (str): path to base directory
+ # element (Element): the element
+ #
+ # Returns:
+ # (str): Path to directory for this element
+ #
+ def _get_element_dirname(self, directory, element):
+ parts = [element.normal_name]
+ while element._get_project() != self._project:
+ element = element._get_project().junction
+ parts.append(element.normal_name)
+
+ return os.path.join(directory, *reversed(parts))
diff --git a/tests/completions/completions.py b/tests/completions/completions.py
index e6d15e68a..5f7ce7415 100644
--- a/tests/completions/completions.py
+++ b/tests/completions/completions.py
@@ -15,6 +15,7 @@ MAIN_COMMANDS = [
'push ',
'shell ',
'show ',
+ 'source-checkout ',
'source-bundle ',
'track ',
'workspace '
diff --git a/tests/frontend/project/elements/checkout-deps.bst b/tests/frontend/project/elements/checkout-deps.bst
new file mode 100644
index 000000000..a2c1d93cc
--- /dev/null
+++ b/tests/frontend/project/elements/checkout-deps.bst
@@ -0,0 +1,10 @@
+kind: import
+description: It is important for this element to have both build and runtime dependencies
+sources:
+- kind: local
+ path: files/etc-files
+depends:
+- filename: import-dev.bst
+ type: build
+- filename: import-bin.bst
+ type: runtime
diff --git a/tests/frontend/project/files/etc-files/etc/buildstream/config b/tests/frontend/project/files/etc-files/etc/buildstream/config
new file mode 100644
index 000000000..04204c7c9
--- /dev/null
+++ b/tests/frontend/project/files/etc-files/etc/buildstream/config
@@ -0,0 +1 @@
+config
diff --git a/tests/frontend/source_checkout.py b/tests/frontend/source_checkout.py
new file mode 100644
index 000000000..58dfdbd42
--- /dev/null
+++ b/tests/frontend/source_checkout.py
@@ -0,0 +1,121 @@
+import os
+import pytest
+
+from tests.testutils import cli
+
+from buildstream import utils, _yaml
+from buildstream._exceptions import ErrorDomain, LoadErrorReason
+
+# Project directory
+DATA_DIR = os.path.join(
+ os.path.dirname(os.path.realpath(__file__)),
+ 'project',
+)
+
+
+def generate_remote_import_element(input_path, output_path):
+ return {
+ 'kind': 'import',
+ 'sources': [
+ {
+ 'kind': 'remote',
+ 'url': 'file://{}'.format(input_path),
+ 'filename': output_path,
+ 'ref': utils.sha256sum(input_path),
+ }
+ ]
+ }
+
+
+@pytest.mark.datafiles(DATA_DIR)
+def test_source_checkout(datafiles, cli):
+ project = os.path.join(datafiles.dirname, datafiles.basename)
+ checkout = os.path.join(cli.directory, 'source-checkout')
+ target = 'checkout-deps.bst'
+
+ result = cli.run(project=project, args=['source-checkout', target, '--deps', 'none', checkout])
+ result.assert_success()
+
+ assert os.path.exists(os.path.join(checkout, 'checkout-deps', 'etc', 'buildstream', 'config'))
+
+
+@pytest.mark.datafiles(DATA_DIR)
+@pytest.mark.parametrize('deps', [('build'), ('none'), ('run'), ('all')])
+def test_source_checkout_deps(datafiles, cli, deps):
+ project = os.path.join(datafiles.dirname, datafiles.basename)
+ checkout = os.path.join(cli.directory, 'source-checkout')
+ target = 'checkout-deps.bst'
+
+ result = cli.run(project=project, args=['source-checkout', target, '--deps', deps, checkout])
+ result.assert_success()
+
+ # Sources of the target
+ if deps == 'build':
+ assert not os.path.exists(os.path.join(checkout, 'checkout-deps'))
+ else:
+ assert os.path.exists(os.path.join(checkout, 'checkout-deps', 'etc', 'buildstream', 'config'))
+
+ # Sources of the target's build dependencies
+ if deps in ('build', 'all'):
+ assert os.path.exists(os.path.join(checkout, 'import-dev', 'usr', 'include', 'pony.h'))
+ else:
+ assert not os.path.exists(os.path.join(checkout, 'import-dev'))
+
+ # Sources of the target's runtime dependencies
+ if deps in ('run', 'all'):
+ assert os.path.exists(os.path.join(checkout, 'import-bin', 'usr', 'bin', 'hello'))
+ else:
+ assert not os.path.exists(os.path.join(checkout, 'import-bin'))
+
+
+@pytest.mark.datafiles(DATA_DIR)
+def test_source_checkout_except(datafiles, cli):
+ project = os.path.join(datafiles.dirname, datafiles.basename)
+ checkout = os.path.join(cli.directory, 'source-checkout')
+ target = 'checkout-deps.bst'
+
+ result = cli.run(project=project, args=['source-checkout', target,
+ '--deps', 'all',
+ '--except', 'import-bin.bst',
+ checkout])
+ result.assert_success()
+
+ # Sources for the target should be present
+ assert os.path.exists(os.path.join(checkout, 'checkout-deps', 'etc', 'buildstream', 'config'))
+
+ # Sources for import-bin.bst should not be present
+ assert not os.path.exists(os.path.join(checkout, 'import-bin'))
+
+ # Sources for other dependencies should be present
+ assert os.path.exists(os.path.join(checkout, 'import-dev', 'usr', 'include', 'pony.h'))
+
+
+@pytest.mark.datafiles(DATA_DIR)
+@pytest.mark.parametrize('fetch', [(False), (True)])
+def test_source_checkout_fetch(datafiles, cli, fetch):
+ project = os.path.join(datafiles.dirname, datafiles.basename)
+ checkout = os.path.join(cli.directory, 'source-checkout')
+ target = 'remote-import-dev.bst'
+ target_path = os.path.join(project, 'elements', target)
+
+ # Create an element with remote source
+ element = generate_remote_import_element(
+ os.path.join(project, 'files', 'dev-files', 'usr', 'include', 'pony.h'),
+ 'pony.h')
+ _yaml.dump(element, target_path)
+
+ # Testing --fetch option requires that we do not have the sources
+ # cached already
+ assert cli.get_element_state(project, target) == 'fetch needed'
+
+ args = ['source-checkout']
+ if fetch:
+ args += ['--fetch']
+ args += [target, checkout]
+ result = cli.run(project=project, args=args)
+
+ if fetch:
+ result.assert_success()
+ assert os.path.exists(os.path.join(checkout, 'remote-import-dev', 'pony.h'))
+ else:
+ result.assert_main_error(ErrorDomain.PIPELINE, 'uncached-sources')