summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorSam Thursfield <sam.thursfield@codethink.co.uk>2014-07-24 16:32:21 +0100
committerSam Thursfield <sam.thursfield@codethink.co.uk>2014-09-11 13:25:16 +0100
commit199d843c09f1436a3e418ebe9cc572764d75978a (patch)
tree334d13948553d0cb52da976a5db8fbb2b8df037e
parent7eb8cc2d54f435f6a5d52c089282fc46380c043a (diff)
downloadmorph-199d843c09f1436a3e418ebe9cc572764d75978a.tar.gz
Add import/ tools
This is a generic tool which allows using metadata from foreign packaging systems to create morphologies. So far it supports RubyGems, but it should be extendable to other packaging systems. It is not complete and lacks many things.
-rw-r--r--import/main.py618
-rwxr-xr-ximport/rubygem.to_chunk430
-rwxr-xr-ximport/rubygem.to_lorry188
-rwxr-xr-xscripts/import-rubygem189
4 files changed, 1425 insertions, 0 deletions
diff --git a/import/main.py b/import/main.py
new file mode 100644
index 00000000..3ef66fe1
--- /dev/null
+++ b/import/main.py
@@ -0,0 +1,618 @@
+#!/usr/bin/python
+# Import foreign packaging systems into Baserock
+#
+# Copyright (C) 2014 Codethink Limited
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; version 2 of the License.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+
+
+import cliapp
+import morphlib
+import networkx
+
+import contextlib
+import copy
+import json
+import logging
+import os
+import sys
+
+from logging import debug
+
+
+@contextlib.contextmanager
+def cwd(path):
+ old_cwd = os.getcwd()
+ try:
+ os.chdir(path)
+ yield
+ finally:
+ os.chdir(old_cwd)
+
+
+class LorrySet(object):
+ '''Manages a set of .lorry files.
+
+ The structure of .lorry files makes the code a little more confusing than
+ I would like. A lorry "entry" is a dict of one entry mapping name to info.
+ A lorry "file" is a dict of one or more of these entries merged together.
+ If it were a list of entries with 'name' fields, the code would be neater.
+
+ '''
+ def __init__(self, lorries_path):
+ self.path = lorries_path
+
+ if os.path.exists(lorries_path):
+ self.data = self.parse_all_lorries()
+ else:
+ os.makedirs(lorries_path)
+ self.data = {}
+
+ def all_lorry_files(self):
+ for dirpath, dirnames, filenames in os.walk(self.path):
+ for filename in filenames:
+ if filename.endswith('.lorry'):
+ yield os.path.join(dirpath, filename)
+
+ def parse_all_lorries(self):
+ lorry_set = {}
+ for lorry_file in self.all_lorry_files():
+ with open(lorry_file, 'r') as f:
+ lorry = json.load(f)
+
+ lorry_items = lorry.items()
+
+ for key, value in lorry_items:
+ if key in lorry_set:
+ raise Exception(
+ '%s: duplicates existing lorry %s' % (lorry_file, key))
+
+ lorry_set.update(lorry_items)
+
+ return lorry_set
+
+ def get_lorry(self, name):
+ return {name: self.data[name]}
+
+ def find_lorry_for_package(self, kind, package_name):
+ key = 'x-products-%s' % kind
+ for name, lorry in self.data.iteritems():
+ products = lorry.get(key, [])
+ for entry in products:
+ if entry == package_name:
+ return {name: lorry}
+
+ return None
+
+ def _check_for_conflicts_in_standard_fields(self, existing, new):
+ '''Ensure that two lorries for the same project do actually match.'''
+ for field, value in existing.iteritems():
+ if field.startswith('x-'):
+ continue
+ if field == 'url':
+ # FIXME: need a much better way of detecting whether the URLs
+ # are equivalent ... right now HTTP vs. HTTPS will cause an
+ # error, for example!
+ matches = (value.rstrip('/') == new[field].rstrip('/'))
+ else:
+ matches = (value == new[field])
+ if not matches:
+ raise Exception(
+ 'Lorry %s conflicts with existing entry %s at field %s' %
+ (new, existing, field))
+
+ def _merge_products_fields(self, existing, new):
+ '''Merge the x-products- fields from new lorry into an existing one.'''
+ is_product_field = lambda x: x.startswith('x-products-')
+
+ existing_fields = [f for f in existing.iterkeys() if
+ is_product_field(f)]
+ new_fields = [f for f in new.iterkeys() if f not in existing_fields and
+ is_product_field(f)]
+
+ for field in existing_fields:
+ existing[field].extend(new[field])
+ existing[field] = list(set(existing[field]))
+
+ for field in new_fields:
+ existing[field] = new[field]
+
+ def add(self, filename, lorry_entry):
+ logging.debug('Adding %s to lorryset', filename)
+
+ filename = os.path.join(self.path, '%s.lorry' % filename)
+
+ assert len(lorry_entry) == 1
+
+ project_name = lorry_entry.keys()[0]
+ info = lorry_entry.values()[0]
+
+ if len(project_name) == 0:
+ raise cliapp.AppException(
+ 'Invalid lorry %s: %s' % (filename, lorry_entry))
+
+ if project_name in self.data:
+ stored_lorry = self.get_lorry(project_name)
+
+ self._check_for_conflicts_in_standard_fields(
+ stored_lorry[project_name], lorry_entry[project_name])
+ self._merge_products_fields(
+ stored_lorry[project_name], lorry_entry[project_name])
+ lorry_entry = stored_lorry
+ else:
+ self.data[project_name] = info
+
+ with morphlib.savefile.SaveFile(filename, 'w') as f:
+ json.dump(lorry_entry, f, indent=4)
+
+
+# FIXME: this tool extends the morphology format to store
+# packaging-system-specific dependency information. Here is a hack to make that
+# work. Long term, we must either make 'dependency' field an official thing, or
+# communicate the dependency information in a separate way (which would be a
+# bit more code than this, I think).
+class MorphologyLoader(morphlib.morphloader.MorphologyLoader):
+ pass
+MorphologyLoader._static_defaults['chunk']['x-build-dependencies-rubygem'] = []
+MorphologyLoader._static_defaults['chunk']['x-runtime-dependencies-rubygem'] = []
+
+
+class MorphologySet(morphlib.morphset.MorphologySet):
+ def __init__(self, path):
+ super(MorphologySet, self).__init__()
+
+ self.path = path
+ self.loader = MorphologyLoader()
+
+ if os.path.exists(path):
+ self.load_all_morphologies()
+ else:
+ os.makedirs(path)
+
+ def load_all_morphologies(self):
+ logging.info('Loading all .morph files under %s', self.path)
+
+ class FakeGitDir(morphlib.gitdir.GitDirectory):
+ '''Ugh
+
+ This is here because the default constructor will search up the
+ directory heirarchy until it finds a '.git' directory, but that
+ may be totally the wrong place for our purpose: we don't have a
+ Git directory at all.
+
+ '''
+ def __init__(self, path):
+ self.dirname = path
+ self._config = {}
+
+ gitdir = FakeGitDir(self.path)
+ finder = morphlib.morphologyfinder.MorphologyFinder(gitdir)
+ loader = MorphologyLoader()
+ for filename in (f for f in finder.list_morphologies()
+ if not gitdir.is_symlink(f)):
+ text = finder.read_morphology(filename)
+ morph = loader.load_from_string(text, filename=filename)
+ morph.repo_url = None # self.root_repository_url
+ morph.ref = None # self.system_branch_name
+ self.add_morphology(morph)
+
+ def get_morphology(self, repo_url, ref, filename):
+ return self._get_morphology(repo_url, ref, filename)
+
+ def save_morphology(self, filename, morphology):
+ self.add_morphology(morphology)
+ morphology_to_save = copy.copy(morphology)
+ self.loader.unset_defaults(morphology_to_save)
+ filename = os.path.join(self.path, filename)
+ self.loader.save_to_file(filename, morphology_to_save)
+
+
+class GitDirectory(morphlib.gitdir.GitDirectory):
+ def has_ref(self, ref):
+ try:
+ self._rev_parse(ref)
+ return True
+ except morphlib.gitdir.InvalidRefError:
+ return False
+
+
+class BaserockImportException(cliapp.AppException):
+ pass
+
+
+class Package(object):
+ '''A package in the processing queue.
+
+ In order to provide helpful errors, this item keeps track of what
+ packages depend on it, and hence of why it was added to the queue.
+
+ '''
+ def __init__(self, name, version):
+ self.name = name
+ self.version = version
+ self.required_by = []
+ self.morphology = None
+ self.is_build_dep = False
+ self.version_in_use = version
+
+ def __str__(self):
+ if len(self.required_by) > 0:
+ required_msg = ', '.join(self.required_by)
+ required_msg = ', required by: ' + required_msg
+ else:
+ required_msg = ''
+ return '%s-%s%s' % (self.name, self.version, required_msg)
+
+ def add_required_by(self, item):
+ self.required_by.append('%s-%s' % (item.name, item.version))
+
+ def match(self, name, version):
+ return (self.name==name and self.version==version)
+
+ def set_morphology(self, morphology):
+ self.morphology = morphology
+
+ def set_is_build_dep(self, is_build_dep):
+ self.is_build_dep = is_build_dep
+
+ def set_version_in_use(self, version_in_use):
+ self.version_in_use = version_in_use
+
+
+def find(iterable, match):
+ return next((x for x in iterable if match(x)), None)
+
+
+class BaserockImportApplication(cliapp.Application):
+ def add_settings(self):
+ self.settings.string(['lorries-dir'],
+ 'location for Lorry files',
+ metavar='PATH',
+ default=os.path.abspath('./lorries'))
+ self.settings.string(['definitions-dir'],
+ 'location for morphology files',
+ metavar='PATH',
+ default=os.path.abspath('./definitions'))
+ self.settings.string(['checkouts-dir'],
+ 'location for Git checkouts',
+ metavar='PATH',
+ default=os.path.abspath('./checkouts'))
+
+ def setup_logging_format(self):
+ # FIXME: due to a bug in cliapp, this method is actually
+ # never called! :(
+ return "main: %(levelname)s: %(message)s"
+
+ def setup_logging_for_import_plugins(self):
+ log = self.settings['log']
+
+ if log == '/dev/stdout':
+ # The plugins output results on /dev/stdout, logs would interfere
+ debug('Redirecting import plugin logs to /dev/stderr')
+ log = '/dev/stderr'
+
+ os.environ['BASEROCK_IMPORT_LOG'] = log
+ os.environ['BASEROCK_IMPORT_LOG_LEVEL'] = self.settings['log-level']
+
+ def process_args(self, *args):
+ self.setup_logging_for_import_plugins()
+ super(BaserockImportApplication, self).process_args(*args)
+
+ def status(self, msg, *args):
+ print msg % args
+ logging.info(msg % args)
+
+ def run_import_plugin(self, command, **kwargs):
+ log = self.settings['log']
+
+ if log == '/dev/stdout':
+ # The plugins output results on /dev/stdout, logs would interfere
+ debug('Redirecting import plugin logs to /dev/stderr')
+ log = '/dev/stderr'
+
+ extra_env = kwargs.get('extra_env', {})
+ extra_env['BASEROCK_IMPORT_LOG'] = log
+ extra_env['BASEROCK_IMPORT_LOG_LEVEL'] = self.settings['log-level']
+ kwargs['extra_env'] = extra_env
+
+ #cliapp.runcmd(
+
+ def cmd_rubygem(self, args):
+ if len(args) != 1:
+ raise cliapp.AppException(
+ 'Please pass the name of a RubyGem on the commandline.')
+
+ #try:
+ self.import_package_and_all_dependencies('rubygem', args[0])
+ #except:
+ #import pdb, traceback
+ #print sys.format_exc
+ #print traceback.print_tb(sys.exc_traceback)
+ #pdb.post_mortem(sys.exc_traceback)
+
+ def process_dependency_list(self, current_item, deps, to_process,
+ processed, these_are_build_deps):
+ # All deps are added as nodes to the 'processed' graph. Runtime
+ # dependencies only need to appear in the stratum, but build
+ # dependencies have ordering constraints, so we add edges in
+ # the graph for build-dependencies too.
+
+ for dep_name, dep_version in deps.iteritems():
+ dep_package = find(
+ processed, lambda i: i.match(dep_name, dep_version))
+
+ if dep_package is None:
+ # Not yet processed
+ queue_item = find(
+ to_process, lambda i: i.match(dep_name, dep_version))
+ if queue_item is None:
+ queue_item = Package(dep_name, dep_version)
+ to_process.append(queue_item)
+ dep_package = queue_item
+
+ dep_package.add_required_by(current_item)
+
+ if these_are_build_deps or current_item.is_build_dep:
+ # A runtime dep of a build dep becomes a build dep
+ # itself.
+ dep_package.set_is_build_dep(True)
+ processed.add_edge(dep_package, current_item)
+
+ def import_package_and_all_dependencies(self, kind, goal_name,
+ goal_version='master'):
+ lorry_set = LorrySet(self.settings['lorries-dir'])
+ morph_set = MorphologySet(self.settings['definitions-dir'])
+
+ chunk_dir = os.path.join(morph_set.path, 'strata', goal_name)
+ if not os.path.exists(chunk_dir):
+ os.makedirs(chunk_dir)
+
+ to_process = [Package(goal_name, goal_version)]
+ processed = networkx.DiGraph()
+
+ ignored_errors = []
+
+ while len(to_process) > 0:
+ current_item = to_process.pop()
+ name = current_item.name
+ version = current_item.version
+
+ try:
+ lorry = self.find_or_create_lorry_file(lorry_set, kind, name)
+
+ source_repo, url = self.fetch_or_update_source(lorry)
+
+ try:
+ checked_out_version, ref = self.checkout_source_version(
+ source_repo, name, version)
+ current_item.set_version_in_use(checked_out_version)
+ chunk_morph = self.find_or_create_chunk_morph(
+ morph_set, goal_name, kind, name, checked_out_version,
+ source_repo, url, ref)
+ except BaserockImportException as e:
+ #logging.warning('Ignoring error %r and continuing!', e)
+ #ignored_errors.append(name)
+ sys.stderr.write(
+ "Couldn't auto-generate a chunk morphology for %s, "
+ "please provide one manually and continue.\n" % name)
+ raise
+
+ current_item.set_morphology(chunk_morph)
+
+ build_deps = chunk_morph['x-build-dependencies-%s' % kind]
+ runtime_deps = chunk_morph['x-runtime-dependencies-%s' % kind]
+
+ processed.add_node(current_item)
+
+ self.process_dependency_list(
+ current_item, build_deps, to_process, processed, True)
+ self.process_dependency_list(
+ current_item, runtime_deps, to_process, processed, False)
+ except BaserockImportException:
+ sys.stderr.write('Error processing package %s\n' %
+ current_item)
+ raise
+
+ if len(ignored_errors) > 0:
+ sys.stderr.write('Ignored errors in %i packages: %s\n' %
+ (len(ignored_errors), ', '.join(ignored_errors)))
+
+ self.maybe_generate_stratum_morph(processed, goal_name)
+
+ def generate_lorry_for_package(self, kind, name):
+ tool = '%s.to_lorry' % kind
+ self.status('Calling %s to generate lorry for %s', tool, name)
+ lorry_text = cliapp.runcmd([os.path.abspath(tool), name])
+ lorry = json.loads(lorry_text)
+ return lorry
+
+ def find_or_create_lorry_file(self, lorry_set, kind, name):
+ # Note that the lorry file may already exist for 'name', but lorry
+ # files are named for project name rather than package name. In this
+ # case we will generate the lorry, and try to add it to the set, at
+ # which point LorrySet will notice the existing one and merge the two.
+ lorry = lorry_set.find_lorry_for_package(kind, name)
+
+ if lorry is None:
+ lorry = self.generate_lorry_for_package(kind, name)
+
+ if len(lorry) != 1:
+ raise Exception(
+ 'Expected generated lorry file with one entry.')
+
+ lorry_filename = lorry.keys()[0]
+
+ if lorry_filename == '':
+ raise cliapp.AppException(
+ 'Invalid lorry data for %s: %s' % (name, lorry))
+
+ lorry_set.add(lorry_filename, lorry)
+
+ return lorry
+
+ def fetch_or_update_source(self, lorry):
+ assert len(lorry) == 1
+ lorry_entry = lorry.values()[0]
+
+ url = lorry_entry['url']
+ reponame = os.path.basename(url.rstrip('/'))
+ repopath = os.path.join(self.settings['checkouts-dir'], reponame)
+
+ # FIXME: we should use Lorry here, so that we can import other VCSes.
+ # But for now, this hack is fine!
+ if os.path.exists(repopath):
+ self.status('Updating repo %s', url)
+
+ # FIXME: doesn't update the source right now, to save time.
+ #cliapp.runcmd(['git', 'remote', 'update', 'origin'],
+ # cwd=repopath)
+ else:
+ self.status('Cloning repo %s', url)
+ cliapp.runcmd(['git', 'clone', url, repopath])
+
+ repo = GitDirectory(repopath)
+ if repo.dirname != repopath:
+ # Work around strange/unintentional behaviour in GitDirectory class
+ # when 'repopath' isn't actually a Git repo at all.
+ logging.error(
+ 'Got git directory %s for %s!', repo.dirname, repopath)
+ raise cliapp.AppException(
+ '%s exists but is not the root of a Git repository' % repopath)
+ return repo, url
+
+ def checkout_source_version(self, source_repo, name, version):
+ # FIXME: we need to be a bit smarter than this. Right now we assume
+ # that 'version' is a valid Git ref.
+
+ possible_names = [
+ version,
+ 'v%s' % version,
+ '%s-%s' % (name, version)
+ ]
+
+ for tag_name in possible_names:
+ if source_repo.has_ref(tag_name):
+ source_repo.checkout(tag_name)
+ ref = tag_name
+ break
+ else:
+ #raise BaserockImportException(
+ # 'Could not find ref for %s version %s.' % (name, version))
+ logging.error(
+ "Couldn't find tag %s in repo %s. I'm going to cheat and "
+ "use 'master' for now.", tag_name, source_repo)
+ source_repo.checkout('master')
+ ref = version = 'master'
+
+ return version, ref
+
+ def generate_chunk_morph_for_package(self, kind, source_repo, name,
+ filename):
+ tool = '%s.to_chunk' % kind
+ self.status('Calling %s to generate chunk morph for %s', tool, name)
+ try:
+ text = cliapp.runcmd(
+ [os.path.abspath(tool), source_repo.dirname, name])
+ except cliapp.AppException as e:
+ raise BaserockImportException(e.message)
+
+ loader = MorphologyLoader()
+ return loader.load_from_string(text, filename)
+
+ def find_or_create_chunk_morph(self, morph_set, goal_name, kind, name,
+ version, source_repo, repo_url, named_ref):
+ morphology_filename = 'strata/%s/%s-%s.morph' % (goal_name, name, version)
+ sha1 = source_repo.resolve_ref_to_commit(named_ref)
+ morphology = morph_set.get_morphology(repo_url, sha1, morphology_filename)
+
+ if morphology is None:
+ # Existing chunk morphologies loaded from disk don't contain the repo
+ # and ref information. That's stored in the stratum morph. So the
+ # first time we touch a chunk morph we need to set this info.
+ logging.debug("Didn't find morphology for %s|%s|%s", repo_url, sha1,
+ morphology_filename)
+ morphology = morph_set.get_morphology(None, None, morphology_filename)
+
+ if morphology is None:
+ logging.debug("Didn't find morphology for None|None|%s",
+ morphology_filename)
+ morphology = self.generate_chunk_morph_for_package(
+ kind, source_repo, name, morphology_filename)
+ morph_set.save_morphology(morphology_filename, morphology)
+
+ morphology.repo_url = repo_url
+ morphology.ref = sha1
+ morphology.named_ref = named_ref
+
+ return morphology
+
+ def maybe_generate_stratum_morph(self, graph, goal_name):
+ filename = os.path.join(
+ self.settings['definitions-dir'], 'strata', '%s.morph' % goal_name)
+
+ if os.path.exists(filename):
+ self.status(msg='Found stratum morph for %s at %s, not overwriting'
+ % (goal_name, filename))
+ return
+
+ self.status(msg='Generating stratum morph for %s' % goal_name)
+
+ chunk_packages = networkx.topological_sort(graph)
+ chunk_entries = []
+
+ for package in chunk_packages:
+ m = package.morphology
+ if m is None:
+ raise cliapp.AppException('No morphology for %s' % package)
+
+ def format_build_dep(name, version):
+ dep_package = find(graph, lambda p: p.match(name, version))
+ return '%s-%s' % (name, dep_package.version_in_use)
+
+ build_depends = [
+ format_build_dep(name, version) for name, version in
+ m['x-build-dependencies-rubygem'].iteritems()
+ ]
+
+ entry = {
+ 'name': m['name'],
+ 'repo': m.repo_url,
+ 'ref': m.ref,
+ 'unpetrify-ref': m.named_ref,
+ 'morph': m.filename,
+ 'build-depends': build_depends,
+ }
+ chunk_entries.append(entry)
+
+ stratum_name = goal_name
+ stratum = {
+ 'name': stratum_name,
+ 'kind': 'stratum',
+ 'description': 'Autogenerated by Baserock import tool',
+ 'build-depends': [
+ {'morph': 'strata/ruby.morph'}
+ ],
+ 'chunks': chunk_entries,
+ }
+
+ loader = morphlib.morphloader.MorphologyLoader()
+ morphology = loader.load_from_string(json.dumps(stratum),
+ filename=filename)
+
+ loader.unset_defaults(morphology)
+ loader.save_to_file(filename, morphology)
+
+
+app = BaserockImportApplication(progname='import')
+app.run()
diff --git a/import/rubygem.to_chunk b/import/rubygem.to_chunk
new file mode 100755
index 00000000..f1cc0377
--- /dev/null
+++ b/import/rubygem.to_chunk
@@ -0,0 +1,430 @@
+#!/usr/bin/env ruby
+#
+# Create a chunk morphology to integrate a RubyGem in Baserock
+#
+# Copyright (C) 2014 Codethink Limited
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; version 2 of the License.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+
+require 'bundler'
+require 'logger'
+require 'optparse'
+require 'yaml'
+
+BASEROCK_RUBY_VERSION = '2.0.0'
+
+# I'm no longer convinced about 'ignoring' Gems. My thinking is that it is
+# much easier to add a missing dependency than it is to detect and remove
+# unneeded dependencies. Therefore, a whilelist is perhaps the way forwards
+# instead.
+
+BUILD_DEPENDENCY_WHITELIST = [
+ 'hoe',
+ # rake is bundled with Ruby, so it is not included in the whitelist.
+]
+
+#IGNORED_GROUPS = [:compat_testing, :test]
+#
+# Users of traditional distros seem to find it useful to override the versions
+# of these Gems that come bundled with the MRI Ruby intepreter with newer
+# versions from rubygems.org. In Baserock it should be just as easy to update
+# MRI. We should avoid building components from two places.
+#BUNDLED_GEMS = [
+# 'rake',
+#]
+
+# Ignoring the :test group isn't enough for these Gems, they are often in the
+# :development group too and thus we need to explicitly ignore them.
+#TEST_GEMS = [
+# 'rspec',
+# 'rspec_junit_formatter',
+# 'rspec-core',
+# 'rspec-expectations',
+# 'rspec-mocks',
+# 'simplecov',
+#]
+#
+#IGNORED_GEMS = BUNDLED_GEMS + TEST_GEMS
+
+# Log information was passed in from the main import process, probably.
+# This global constant approach seems a little ugly, but it seems to be
+# recommended here:
+# <https://stackoverflow.com/questions/1681745/share-global-logger-among-module-classes>
+#
+log_file = ENV['BASEROCK_IMPORT_LOG'] || '/dev/null'
+
+if log_file.length == 0 then log_file = '/dev/null' end
+
+Log = Logger.new(log_file)
+
+Log.level = case ENV['BASEROCK_IMPORT_LOG_LEVEL']
+ when 'debug' then Logger::DEBUG
+ when 'warning' then Logger::WARN
+ when 'error' then Logger::ERROR
+ when 'critical', 'fatal' then Logger::FATAL
+ else Logger::INFO
+ end
+
+Log.formatter = proc do |severity, datetime, progname, msg|
+ "rubygem.to_chunk: #{severity}: #{msg}\n"
+end
+
+class << Bundler
+ def default_gemfile
+ # This is a hack to make things not crash when there's no Gemfile
+ Pathname.new('.')
+ end
+end
+
+def spec_is_from_current_source_tree(spec)
+ spec.source.instance_of? Bundler::Source::Path and
+ spec.source.path.fnmatch?('.')
+end
+
+class Dsl < Bundler::Dsl
+ # The Bundler::Dsl class parses the Gemfile. We override it so that we can
+ # extend the class of the Bundler::Definition instance that is created, and
+ # so we can filter the results down to a specific Gem from the repo rather
+ # than the top-level one.
+
+ def self.evaluate(gemfile, lockfile, unlock, target_gem_name)
+ builder = new
+ builder.eval_gemfile(gemfile)
+ builder.to_definition(lockfile, unlock, target_gem_name)
+ end
+
+ def to_definition(lockfile, unlock, target_gem_name)
+ @sources << rubygems_source unless @sources.include?(rubygems_source)
+
+ #@dependencies = filter_dependencies_for_target_gem(@dependencies,
+ # target_gem_name)
+ #Log.debug "The modified list of dependencies is: #{@dependencies}"
+
+ Definition.new(lockfile, @dependencies, @sources, unlock, @ruby_version)
+ end
+
+ def filter_dependencies_for_target_gem(dependencies, target_gem_name)
+ # Find the local Bundler::Source object, remove everything from that
+ # source except the Gem we actually care about. This is necessary
+ # because Bundler is designed for people who want to develop or deploy
+ # all Gems from a given repo, but in this case we only care about *one*
+ # Gem from the repo, which may not be the top level one.
+
+ # Note that this doesn't solve all our problems!!!! For Rails, for
+ # example, the top-level Gemfile lists a bunch of stuff that isn't
+ # needed for all the Gems. For example some databases, which are not at
+ # all necessary for activesupport! And jquery-rails, which brings in
+ # railties, which brings in actionpack, which is just not needed!
+ #
+ # To be honest, I have no idea what to do about this right now. Maybe
+ # a blacklist for certain nested Gems?
+ #
+ # One possible solution is to ignore everything the Gemfile says except
+ # for the target gemspec. So ditch @dependencies altogether except for
+ # the one Gem we want. Will need to test this with the whole dependency
+ # graph of Chef and see if it works ....
+ local_source = nil
+ new_deps = []
+ have_target = false
+ dependencies.each do |dep|
+ Log.debug " - #{dep} #{dep.source} #{dep.groups}"
+ if spec_is_from_current_source_tree(dep)
+ local_source = local_source || dep.source
+ if dep.name == target_gem_name
+ new_deps << dep
+ have_target = true
+ end
+ else
+ new_deps << dep
+ end
+ end
+
+ if not local_source
+ # While Bundler recommends using 'gemspec' in the Gemfile[1] it's not
+ # required, and some Gems are old enough to not have a .gemspec anyway.
+ # In this case the code will fail later on at get_spec_for_gem(), right
+ # now :) We need to manually search for Gemspecs.
+ Log.info "No gemspecs were included in the Gemfile, so the full " +
+ "list of specified dependencies will be used."
+ return dependencies
+ end
+
+ if not have_target
+ target_dep = Bundler::Dependency.new(
+ target_gem_name, '>= 0',
+ {"type" => :runtime, "source" => local_source}
+ )
+ new_deps << target_dep
+ Log.debug "The target gem #{target_dep} was not found in the " +
+ "dependencies list, so I have added it."
+ Log.debug "Its source is: #{target_dep.source.inspect}"
+ end
+ new_deps
+ end
+end
+
+class Definition < Bundler::Definition
+ # The Bundler::Definition class holds the dependency info we need.
+
+ def self.build(gemfile, lockfile, unlock, target_gem_name)
+ # Overridden so that our subclassed Dsl is used.
+ unlock ||= {}
+ gemfile = Pathname.new(gemfile).expand_path
+
+ unless gemfile.file?
+ raise Bundler::GemfileNotFound, "#{gemfile} not found"
+ end
+
+ Dsl.evaluate(gemfile, lockfile, unlock, target_gem_name)
+ end
+
+ def requested_dependencies
+ # Overridden to remove more stuff from the list: excluding certain
+ # groups using Bundler.settings.without is a good first step, but some
+ # test tools seem to be in the generic :development group and thus
+ # need to be explicitly removed from the list.
+ #result = super.reject { |d| IGNORED_GEMS.member? d.name }
+ #removed = dependencies - result
+ #Log.info "Removed dependencies: #{removed.collect {|d| d.name}}"
+
+ #result
+ super
+ end
+
+ def resolve_dependencies
+ # The term "build dependencies" is my own. RubyGems seem to mostly care
+ # about "needed at runtime" (:runtime) vs. "useful during development"
+ # (:development). We actually want "needed at runtime or during `rake
+ # install`" but we have to work this out for ourselves.
+
+ # Note you can set ENV['DEBUG_RESOLVER'] for more debug info.
+
+ # Here we do the equivalent of resolve_remotely! and resolve_cached!
+ # combined. In the hope that they work OK together. Ideally we'd
+ # cache the specs after fetching them the first time so that on the
+ # next run we only needed to fetch the ones we didn't already have. Not
+ # sure the Bundler code makes this at all easy though. Probably
+ # extending Source::Rubygems would be the way forwards.
+ @remote = true
+ @sources.each { |s| s.remote! }
+ @sources.each { |s| s.cached! }
+
+ build_deps = specs_for([:development])
+ # FIXME: this list seems to always just contain 'bundler'.
+ # not what I want, I think. Any value achieves the same thing so
+ # I guess ':runtime' is not right. Maybe Bundler doesn't track
+ # runtime deps at all?
+ runtime_deps = specs_for([:runtime])
+ STDERR.puts "Build deps: "
+ build_deps.each { |s| STDERR.puts " - #{s.name}" }
+ STDERR.puts "Runtime deps:"
+ runtime_deps.each { |s| STDERR.puts " - #{s.name}" }
+ return [build_deps, runtime_deps]
+ end
+end
+
+class RubyGemChunkMorphologyGenerator
+ def parse_options(arguments)
+ # No options so far ..
+ opts = OptionParser.new
+
+ opts.banner = "Usage: rubygem.import SOURCE_DIR GEM_NAME"
+ opts.separator ""
+ opts.separator "This tool reads the Gemfile and optionally the " +
+ "Gemfile.lock from a Ruby project "
+ opts.separator "source tree in SOURCE_DIR. It outputs a chunk " +
+ "morphology for GEM_NAME on stdout."
+ opts.separator ""
+ opts.separator "It is intended for use with the `baserock-import` tool."
+
+ parsed_arguments = opts.parse!(arguments)
+
+ if parsed_arguments.length != 2
+ STDERR.puts opts.help
+ exit 1
+ end
+
+ parsed_arguments
+ end
+
+ def error(message)
+ Log.error(message)
+ STDERR.puts(message)
+ end
+
+ def load_local_gemspecs()
+ # Look for .gemspec files in the source repo.
+ #
+ # If there is no .gemspec, but you set 'name' and 'version' then
+ # inside Bundler::Source::Path.load_spec_files this call will create a
+ # fake gemspec matching that name and version. That's probably not useful.
+
+ dir = '.'
+
+ source = Bundler::Source::Path.new({
+ 'path' => dir,
+ })
+
+ Log.info "Loaded #{source.specs.count} specs from source dir."
+ source.specs.each do |spec|
+ Log.debug " * #{spec.inspect} #{spec.dependencies.inspect}"
+ end
+
+ source
+ end
+
+ def load_definition(target_gem_name)
+ # Load and parse the Gemfile and, if found, the Gemfile.lock file.
+ Log.info("Loading Gemfile and Gemfile.lock for gem #{target_gem_name}")
+ definition = Definition.build(
+ 'Gemfile', 'Gemfile.lock', update=false, target_gem_name)
+ end
+
+ def get_spec_for_gem(specs, gem_name)
+ found = specs[gem_name].select {|s| Gem::Platform.match(s.platform)}
+ if found.empty?
+ raise Exception,
+ "No Gemspecs found matching '#{gem_name}'"
+ elsif found.length != 1
+ raise Exception,
+ "Unsure which Gem to use for #{gem_name}, got #{found}"
+ end
+ found[0]
+ end
+
+ def chunk_name_for_gemspec(spec)
+ # Chunk names are the Gem's "full name" (name + version number), so
+ # that we don't break in the rare but possible case that two different
+ # versions of the same Gem are required for something to work. It'd be
+ # nicer to only use the full_name if we detect such a conflict.
+ spec.full_name
+ end
+
+ def generate_chunk_morph_for_gem(spec)
+ description = 'Automatically generated by rubygem.import'
+
+ bin_dir = "\"$DESTDIR/$PREFIX/bin\""
+ gem_dir = "\"$DESTDIR/$PREFIX/lib/ruby/gems/#{BASEROCK_RUBY_VERSION}\""
+
+ # There's more splitting to be done, but putting the docs in the
+ # correct artifact is the single biggest win for enabling smaller
+ # system images.
+ split_rules = [
+ {
+ 'artifact' => "#{spec.full_name}-doc",
+ 'include' => [
+ "usr/lib/ruby/gems/#{BASEROCK_RUBY_VERSION}/doc/.*"
+ ]
+ }
+ ]
+
+ build_commands = [
+ "gem build #{spec.name}.gemspec",
+ ]
+
+ install_commands = [
+ "mkdir -p #{gem_dir}",
+ "gem install --install-dir #{gem_dir} --bindir #{bin_dir} " +
+ "--ignore-dependencies --local ./#{spec.full_name}.gem"
+ ]
+
+ {
+ 'name' => chunk_name_for_gemspec(spec),
+ 'kind' => 'chunk',
+ 'description' => description,
+ 'build-system' => 'manual',
+ ##'gem-url' => "http://rubygems.org/downloads/#{spec.full_name}.gem",
+ 'products' => split_rules,
+ 'build-commands' => build_commands,
+ 'install-commands' => install_commands,
+ }
+ end
+
+ def build_deps_for_gem(spec)
+ deps = spec.dependencies.select do |d|
+ d.type == :development && BUILD_DEPENDENCY_WHITELIST.member?(d.name)
+ end
+ end
+
+ def runtime_deps_for_gem(spec)
+ spec.dependencies.select {|d| d.type == :runtime}
+ end
+
+ def write_morph(file, morph)
+ file.write(YAML.dump(morph))
+ end
+
+ def run
+ source_dir_name, gem_name = parse_options(ARGV)
+
+ Log.info("Creating chunk morph for #{gem_name} based on " +
+ "source code in #{source_dir_name}")
+
+ Dir.chdir(source_dir_name)
+
+ ## Find the .gemspec file in the project repo corresponding to the Gem
+ ## requested on the commandline.
+ #local_source = load_local_gemspecs
+ #local_specset = Bundler::SpecSet.new(local_source.local_specs)
+ #spec = get_spec_for_gem(local_specset, gem_name)
+
+ # Instead of reading the real Gemfile, invent one that simply includes the
+ # chosen .gemspec. If present, the Gemfile.lock will be honoured.
+ fake_gemfile = Bundler::Dsl.new
+ fake_gemfile.source('https://rubygems.org')
+ fake_gemfile.gemspec({:name => gem_name})
+
+ definition = fake_gemfile.to_definition('Gemfile.lock', true)
+ resolved_specs = definition.resolve_remotely!
+
+ #build_specs, runtime_specs = definition.resolve_dependencies
+
+ spec = get_spec_for_gem(resolved_specs, gem_name)
+
+ if not spec_is_from_current_source_tree(spec)
+ error "Specified gem '#{spec.name}' doesn't live in the source in " +
+ "'#{source_dir_name}'"
+ Log.debug "SPEC: #{spec.inspect} #{spec.source}"
+ exit 1
+ end
+
+ morph = generate_chunk_morph_for_gem(spec)
+
+ # One might think that you could use the Bundler::Dependency.groups
+ # field to filter but it doesn't seem to be useful. Instead we go back to
+ # the Gem::Specification of the target Gem and use the dependencies fild
+ # there. We look up each dependency in the resolved_specset to find out
+ # what version Bundler has chosen of it.
+
+ def format_deps_for_morphology(specset, dep_list)
+ info = dep_list.collect do |dep|
+ spec = specset[dep][0]
+ [spec.name, spec.version.to_s]
+ end
+ Hash[info]
+ end
+
+ build_deps = format_deps_for_morphology(
+ resolved_specs, build_deps_for_gem(spec))
+ runtime_deps = format_deps_for_morphology(
+ resolved_specs, runtime_deps_for_gem(spec))
+
+ morph['x-build-dependencies-rubygem'] = build_deps
+ morph['x-runtime-dependencies-rubygem'] = runtime_deps
+
+ write_morph(STDOUT, morph)
+ end
+end
+
+RubyGemChunkMorphologyGenerator.new.run
diff --git a/import/rubygem.to_lorry b/import/rubygem.to_lorry
new file mode 100755
index 00000000..60e9d925
--- /dev/null
+++ b/import/rubygem.to_lorry
@@ -0,0 +1,188 @@
+#!/usr/bin/python
+#
+# Create a Baserock .lorry file for a given RubyGem
+#
+# Copyright (C) 2014 Codethink Limited
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; version 2 of the License.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+
+
+import requests
+import requests_cache
+
+import logging
+import json
+import os
+import sys
+import urlparse
+
+
+known_source_uris = {
+ 'ast': 'https://github.com/openSUSE/ast',
+ 'brass': 'https://github.com/rubyworks/brass',
+ 'coveralls': 'https://github.com/lemurheavy/coveralls-ruby',
+ 'diff-lcs': 'https://github.com/halostatue/diff-lcs',
+ 'erubis': 'https://github.com/kwatch/erubis',
+ 'fog-brightbox': 'https://github.com/brightbox/fog-brightbox',
+ 'highline': 'https://github.com/JEG2/highline',
+ 'hoe': 'https://github.com/seattlerb/hoe',
+ 'indexer': 'https://github.com/rubyworks/indexer',
+ 'json': 'https://github.com/flori/json',
+ 'method_source': 'https://github.com/banister/method_source',
+ 'mixlib-authentication': 'https://github.com/opscode/mixlib-authentication',
+ 'mixlib-cli': 'https://github.com/opscode/mixlib-cli',
+ 'mixlib-log': 'https://github.com/opscode/mixlib-log',
+ 'mixlib-shellout': 'http://github.com/opscode/mixlib-shellout',
+ 'ohai': 'http://github.com/opscode/ohai',
+ 'rack-cache': 'https://github.com/rtomayko/rack-cache',
+ 'actionmailer': 'https://github.com/rails/rails',
+ 'actionpack': 'https://github.com/rails/rails',
+ 'actionview': 'https://github.com/rails/rails',
+ 'activemodel': 'https://github.com/rails/rails',
+ 'activerecord': 'https://github.com/rails/rails',
+ 'activesupport': 'https://github.com/rails/rails',
+ 'rails': 'https://github.com/rails/rails',
+ 'railties': 'https://github.com/rails/rails',
+ 'pg': 'https://github.com/ged/ruby-pg',
+ 'sigar': 'https://github.com/hyperic/sigar',
+ 'sprockets': 'https://github.com/sstephenson/sprockets',
+ 'tins': 'https://github.com/flori/tins',
+}
+
+
+class RubyGemsWebServiceClient(object):
+ def __init__(self):
+ # Save hammering the rubygems.org API: 'requests' API calls are
+ # transparently cached in an SQLite database, instead.
+ requests_cache.install_cache('rubygems_api_cache')
+
+ def _request(self, url):
+ r = requests.get(url)
+ if r.ok:
+ return json.loads(r.text)
+ else:
+ raise Exception('Request to %s failed: %s' % (r.url, r.reason))
+
+ def get_gem_info(self, gem_name):
+ info = self._request(
+ 'http://rubygems.org/api/v1/gems/%s.json' % gem_name)
+
+ if info['name'] != gem_name:
+ # Sanity check
+ raise Exception('Received info for Gem "%s", requested "%s"' %
+ info['name'], gem_name)
+
+ return info
+
+
+class RubyGemLorryGenerator(object):
+ def find_upstream_repo_for_gem(self, gem_name, gem_info):
+ source_code_uri = gem_info['source_code_uri']
+
+ if gem_name in known_source_uris:
+ logging.debug('Found %s in known_source_uris', gem_name)
+ known_uri = known_source_uris[gem_name]
+ if source_code_uri is not None and known_uri != source_code_uri:
+ sys.stderr.write(
+ '%s: Hardcoded source URI %s doesn\'t match spec URI %s\n' %
+ (gem_name, known_uri, source_code_uri))
+ return known_uri
+
+ if source_code_uri is not None and len(source_code_uri) > 0:
+ logging.debug('Got source_code_uri %s', source_code_uri)
+ if source_code_uri.endswith('/tree'):
+ source_code_uri = source_code_uri[:-len('/tree')]
+
+ return source_code_uri
+
+ homepage_uri = gem_info['homepage_uri']
+ if homepage_uri is not None and len(homepage_uri) > 0:
+ logging.debug('Got homepage_uri %s', source_code_uri)
+ netloc = urlparse.urlsplit(homepage_uri)[1]
+ if netloc == 'github.com':
+ return homepage_uri
+
+ # Further possible leads on locating source code.
+ # http://ruby-toolbox.com/projects/$gemname -> sometimes contains an
+ # upstream link, even if the gem info does not.
+ # https://github.com/search?q=$gemname -> often the first result is
+ # the correct one, but you can never know.
+
+ raise Exception('Did not manage to automatically find the upstream '
+ 'source URL for Gem %s.' % gem_name)
+
+ def project_name_from_repo(self, repo_url):
+ if repo_url.endswith('/tree/master'):
+ repo_url = repo_url[:-len('/tree/master')]
+ if repo_url.endswith('/'):
+ repo_url = repo_url[:-1]
+ if repo_url.endswith('.git'):
+ repo_url = repo_url[:-len('.git')]
+ return os.path.basename(repo_url)
+
+ def generate_lorry_for_gem(self, gem_name):
+ rubygems_client = RubyGemsWebServiceClient()
+
+ gem_info = rubygems_client.get_gem_info(gem_name)
+
+ gem_source_url = self.find_upstream_repo_for_gem(gem_name, gem_info)
+ logging.info('Got URL <%s> for %s', gem_source_url, gem_name)
+
+ project_name = self.project_name_from_repo(gem_source_url)
+
+ # One repo may produce multiple Gems. It's up to the caller to merge
+ # multiple .lorry files that get generated for the same repo.
+
+ lorry = {
+ project_name: {
+ 'type': 'git',
+ 'url': gem_source_url,
+ 'x-products-rubygem': [gem_name]
+ }
+ }
+
+ return lorry
+
+
+def setup_logging():
+ stream_name = os.environ.get('BASEROCK_IMPORT_LOG', '')
+ level = os.environ.get('BASEROCK_IMPORT_LOG_LEVEL', 'info')
+
+ level_id = logging._levelNames.get(level.upper(), logging.INFO)
+ if len(stream_name) > 0:
+ logging.basicConfig(stream=open(stream_name, 'a'), level=level_id)
+
+
+def write_lorry(stream, lorry):
+ json.dump(lorry, stream, indent=4)
+
+
+def main():
+ if len(sys.argv) != 2:
+ sys.stderr.write(
+ 'Please call me with the name of a RubyGem as an argument.\n')
+ sys.exit(1)
+
+ setup_logging()
+
+ gem_name = sys.argv[1]
+
+ lorry_generator = RubyGemLorryGenerator()
+ lorry = lorry_generator.generate_lorry_for_gem(gem_name)
+
+ write_lorry(sys.stdout, lorry)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/scripts/import-rubygem b/scripts/import-rubygem
new file mode 100755
index 00000000..6b869a7d
--- /dev/null
+++ b/scripts/import-rubygem
@@ -0,0 +1,189 @@
+#!/usr/bin/env ruby
+#
+# Create a stratum to integrate a Ruby project in Baserock, using RubyGems
+#
+# Copyright (C) 2014 Codethink Limited
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; version 2 of the License.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+
+require 'bundler'
+require 'optparse'
+require 'yaml'
+
+BASEROCK_RUBY_VERSION = '2.0.0'
+
+def parse_options(arguments)
+ # No options so far ..
+ opts = OptionParser.new
+
+ opts.banner = "Usage: import-ruby PROJECT_DIR OUTPUT_DIR"
+ opts.separator ""
+ opts.separator "This tool reads the Gemfile and optionally the " +
+ "Gemfile.lock from a Ruby project "
+ opts.separator "source tree in PROJECT_DIR. It outputs a stratum " +
+ "morphology and a set of chunk "
+ opts.separator "morphology files to OUTPUT_DIR."
+
+ parsed_arguments = opts.parse!(arguments)
+
+ if parsed_arguments.length != 2 then
+ STDERR.puts opts.help
+ exit 1
+ end
+
+ parsed_arguments
+end
+
+def get_project_name(project_dir_name)
+ # One Git repo can produce any number of Gems, or none, so it's hard to
+ # work out a project name that way. Instead, use the repo name :)
+ project_name = File.basename(project_dir_name)
+end
+
+def load_gemfile()
+ # Load and parse the Gemfile and, if found, the Gemfile.lock file.
+ definition = Bundler::Definition.build(
+ 'Gemfile', 'Gemfile.lock', update=false)
+end
+
+def get_all_specs_for_project(dir_name)
+ Dir.chdir(dir_name) { begin
+ load_gemfile.specs
+ rescue Bundler::GemNotFound
+ # If we're missing some Gem info, try remotely resolving. This is very
+ # slow so it's nice if it can be avoided. Perhaps setting up a local
+ # mirror of the necessary specs would avoid this problem. There seems
+ # to be no way to "reset" the Definition instance after the exception,
+ # so we have to call load_gemfile again.
+ STDERR.puts "Resolving definitions remotely (this may take a while!)"
+ load_gemfile.resolve_remotely!
+ rescue Bundler::GemfileNotFound
+ STDERR.puts "Did not find a Gemfile in #{dir_name}."
+ exit
+ end }
+end
+
+def generate_morphs_for_specset(project_name, specs)
+ # Chunk names are the Gem's "full name" (name + version number), so that we
+ # don't break in the rare but possible case that two different versions of
+ # the same Gem are required for something to work. It'd be nicer to only
+ # use the full_name if we detect such a conflict. If we do, at least this
+ # function below can be removed with the much simpler:
+ # spec.deps.collect |dep| dep.name
+ runtime_depends = proc do |spec|
+ result = []
+ spec.dependencies.each do |dep|
+ next if dep.type == :development
+ found = specs[dep]
+ if found.length != 1
+ raise Exception,
+ "Unsure which Gem to use for #{dep}, got #{found}"
+ end
+ result << found[0].full_name
+ end
+ result
+ end
+
+ description = 'Automatically generated by import-ruby. This is a ' +
+ 'prototype of a method for integrating RubyGems into ' +
+ 'Baserock.'
+
+ bin_dir = "\"$DESTDIR/$PREFIX/bin\""
+ gem_dir = "\"$DESTDIR/$PREFIX/lib/ruby/gems/#{BASEROCK_RUBY_VERSION}\""
+
+ chunk_morphs = specs.collect do |spec|
+ # There's more splitting to be done, but putting the docs in the
+ # correct artifact is the single biggest win for enabling smaller
+ # system images.
+ split_rules = [
+ {
+ 'artifact' => "#{spec.full_name}-doc",
+ 'include' => [
+ "usr/lib/ruby/gems/#{BASEROCK_RUBY_VERSION}/doc/.*"
+ ]
+ }
+ ]
+
+ install_commands = [
+ "mkdir -p #{gem_dir}",
+ "gem install --install-dir #{gem_dir} --bindir #{bin_dir} " +
+ "--ignore-dependencies --local #{spec.full_name}.gem"
+ ]
+
+ {
+ 'name' => spec.full_name,
+ 'kind' => 'chunk',
+ 'description' => description,
+ 'build-system' => 'manual',
+ # FIXME: this is not how we should calculate the URL field!
+ 'gem-url' => "http://rubygems.org/downloads/#{spec.full_name}.gem",
+ 'products' => split_rules,
+ 'install-commands' => install_commands
+ }
+ end
+
+ chunks = specs.collect do |spec|
+ {
+ 'name' => spec.full_name,
+ 'description' => description,
+ # This is a dummy value; there is no repo for these chunks.
+ # The 'repo' field should perhaps become optional!
+ 'repo' => 'baserock:baserock/definitions',
+ 'ref' => 'master',
+ 'morph' => File.join(project_name, spec.full_name + '.morph'),
+ # Runtime depends must be present at "build" (Gem install) time.
+ 'build-depends' => runtime_depends.call(spec),
+ # This feature is not in morph.git master yet
+ 'build-mode' => 'rubygem',
+ }
+ end
+
+ stratum_morph = {
+ 'name' => project_name,
+ 'kind' => 'stratum',
+ 'description' => description,
+ 'build-depends' => [
+ { 'morph' => 'ruby' }
+ ],
+ 'chunks' => chunks,
+ }
+
+ return [stratum_morph] + chunk_morphs
+end
+
+def write_morphs(morphs, project_name, target_dir_name)
+ target_dir_name = File.join(target_dir_name, project_name)
+ FileUtils.makedirs(target_dir_name)
+ Dir.chdir(target_dir_name) do
+ morphs.each do |morph|
+ morph_filename = morph['name'] + '.morph'
+ File.open(morph_filename, 'w') do |file|
+ file.write(YAML.dump(morph))
+ end
+ end
+ end
+end
+
+def run
+ project_dir_name, target_dir_name = parse_options(ARGV)
+
+ project_name = get_project_name(project_dir_name)
+ specset = get_all_specs_for_project(project_dir_name)
+
+ morphs = generate_morphs_for_specset(project_name, specset)
+
+ write_morphs(morphs, project_name, target_dir_name)
+end
+
+run