summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorRichard Maw <richard.maw@gmail.com>2014-10-01 22:11:42 +0000
committerRichard Maw <richard.maw@gmail.com>2014-10-08 12:13:56 +0000
commit2689f61a305ec576f229f1d9f0929d7eb11d1acc (patch)
tree85e4e0259360d8f824b1ee775f0511f939284d50
parentf814cf6c4a8cae6b5a28349897626d42f5fd9f9e (diff)
downloadmorph-2689f61a305ec576f229f1d9f0929d7eb11d1acc.tar.gz
Fix issues with distbuild caused by moving to building per-source
-rw-r--r--distbuild/build_controller.py11
-rw-r--r--distbuild/initiator_connection.py2
-rw-r--r--distbuild/serialise.py8
-rw-r--r--distbuild/worker_build_scheduler.py27
-rw-r--r--morphlib/plugins/distbuild_plugin.py2
-rw-r--r--morphlib/plugins/list_artifacts_plugin.py6
6 files changed, 31 insertions, 25 deletions
diff --git a/distbuild/build_controller.py b/distbuild/build_controller.py
index e8a8dc37..93f97fac 100644
--- a/distbuild/build_controller.py
+++ b/distbuild/build_controller.py
@@ -142,7 +142,7 @@ def map_build_graph(artifact, callback):
a = queue.pop()
if a not in done:
result.append(callback(a))
- queue.extend(a.dependencies)
+ queue.extend(a.source.dependencies)
done.add(a)
return result
@@ -388,7 +388,8 @@ class BuildController(distbuild.StateMachine):
def _find_artifacts_that_are_ready_to_build(self):
def is_ready_to_build(artifact):
return (artifact.state == UNBUILT and
- all(a.state == BUILT for a in artifact.dependencies))
+ all(a.state == BUILT
+ for a in artifact.source.dependencies))
return [a
for a in map_build_graph(self._artifact, lambda a: a)
@@ -424,7 +425,7 @@ class BuildController(distbuild.StateMachine):
logging.debug(
'Requesting worker-build of %s (%s)' %
- (artifact.name, artifact.cache_key))
+ (artifact.name, artifact.source.cache_key))
request = distbuild.WorkerBuildRequest(artifact,
self._request['id'])
self.mainloop.queue_event(distbuild.WorkerBuildQueuer, request)
@@ -540,7 +541,7 @@ class BuildController(distbuild.StateMachine):
def _find_artifact(self, cache_key):
artifacts = map_build_graph(self._artifact, lambda a: a)
- wanted = [a for a in artifacts if a.cache_key == cache_key]
+ wanted = [a for a in artifacts if a.source.cache_key == cache_key]
if wanted:
return wanted[0]
else:
@@ -637,7 +638,7 @@ class BuildController(distbuild.StateMachine):
baseurl = urlparse.urljoin(
self._artifact_cache_server, '/1.0/artifacts')
filename = ('%s.%s.%s' %
- (self._artifact.cache_key,
+ (self._artifact.source.cache_key,
self._artifact.source.morphology['kind'],
self._artifact.name))
url = '%s?filename=%s' % (baseurl, urllib.quote(filename))
diff --git a/distbuild/initiator_connection.py b/distbuild/initiator_connection.py
index 0f009fcc..db982230 100644
--- a/distbuild/initiator_connection.py
+++ b/distbuild/initiator_connection.py
@@ -171,7 +171,7 @@ class InitiatorConnection(distbuild.StateMachine):
'name': distbuild.build_step_name(artifact),
'build-depends': [
distbuild.build_step_name(x)
- for x in artifact.dependencies
+ for x in artifact.source.dependencies
]
}
diff --git a/distbuild/serialise.py b/distbuild/serialise.py
index 0a60b0c2..2b39000e 100644
--- a/distbuild/serialise.py
+++ b/distbuild/serialise.py
@@ -78,7 +78,8 @@ def serialise_artifact(artifact):
for (_, sa) in a.source.artifacts.iteritems():
if id(sa) not in encoded_artifacts:
encoded_artifacts[id(sa)] = encode_artifact(sa)
- encoded_morphologies[id(a.source.morphology)] = encode_morphology(a.source.morphology)
+ encoded_morphologies[id(a.source.morphology)] = \
+ encode_morphology(a.source.morphology)
encoded_sources[id(a.source)] = encode_source(a.source)
if id(a) not in encoded_artifacts: # pragma: no cover
@@ -164,7 +165,10 @@ def deserialise_artifact(encoded):
morphology = morphologies[source_dict['morphology']]
kind = morphology['kind']
ruler = getattr(morphlib.artifactsplitrule, 'unify_%s_matches' % kind)
- rules = ruler(morphology, le_dicts['default_split_rules'][kind])
+ if kind in ('chunk', 'stratum'):
+ rules = ruler(morphology, le_dicts['default_split_rules'][kind])
+ else: # pragma: no cover
+ rules = ruler(morphology)
sources[source_id] = decode_source(source_dict, morphology, rules)
# clear the source artifacts that get automatically generated
diff --git a/distbuild/worker_build_scheduler.py b/distbuild/worker_build_scheduler.py
index 6cda5972..be732153 100644
--- a/distbuild/worker_build_scheduler.py
+++ b/distbuild/worker_build_scheduler.py
@@ -262,13 +262,13 @@ class WorkerBuildQueuer(distbuild.StateMachine):
logging.debug('Worker build step already started: %s' %
event.artifact.basename())
progress = WorkerBuildStepAlreadyStarted(event.initiator_id,
- event.artifact.cache_key, job.who.name())
+ event.artifact.source.cache_key, job.who.name())
else:
logging.debug('Job created but not building yet '
'(waiting for a worker to become available): %s' %
event.artifact.basename())
progress = WorkerBuildWaiting(event.initiator_id,
- event.artifact.cache_key)
+ event.artifact.source.cache_key)
self.mainloop.queue_event(WorkerConnection, progress)
else:
@@ -279,7 +279,7 @@ class WorkerBuildQueuer(distbuild.StateMachine):
self._give_job(job)
else:
progress = WorkerBuildWaiting(event.initiator_id,
- event.artifact.cache_key)
+ event.artifact.source.cache_key)
self.mainloop.queue_event(WorkerConnection, progress)
def _handle_cancel(self, event_source, event):
@@ -483,7 +483,7 @@ class WorkerConnection(distbuild.StateMachine):
% (self._worker_name, msg))
started = WorkerBuildStepStarted(self._job.initiators,
- self._job.artifact.cache_key, self.name())
+ self._job.artifact.source.cache_key, self.name())
self.mainloop.queue_event(WorkerConnection, _JobStarted(self._job))
self.mainloop.queue_event(WorkerConnection, started)
@@ -510,7 +510,7 @@ class WorkerConnection(distbuild.StateMachine):
logging.debug('WC: emitting: %s', repr(new))
self.mainloop.queue_event(
WorkerConnection,
- WorkerBuildOutput(new, self._job.artifact.cache_key))
+ WorkerBuildOutput(new, self._job.artifact.source.cache_key))
def _handle_exec_response(self, msg):
logging.debug('WC: finished building: %s' % self._job.artifact.name)
@@ -522,7 +522,8 @@ class WorkerConnection(distbuild.StateMachine):
if new['exit'] != 0:
# Build failed.
- new_event = WorkerBuildFailed(new, self._job.artifact.cache_key)
+ new_event = WorkerBuildFailed(new,
+ self._job.artifact.source.cache_key)
self.mainloop.queue_event(WorkerConnection, new_event)
self.mainloop.queue_event(WorkerConnection, _JobFailed(self._job))
self.mainloop.queue_event(self, _BuildFailed())
@@ -556,10 +557,6 @@ class WorkerConnection(distbuild.StateMachine):
if kind == 'stratum':
suffixes.append(filename + '.meta')
- elif kind == 'system':
- # FIXME: This is a really ugly hack.
- if filename.endswith('-rootfs'):
- suffixes.append(filename[:-len('-rootfs')] + '-kernel')
suffixes = [urllib.quote(x) for x in suffixes]
suffixes = ','.join(suffixes)
@@ -571,7 +568,7 @@ class WorkerConnection(distbuild.StateMachine):
'/1.0/fetch?host=%s:%d&cacheid=%s&artifacts=%s' %
(urllib.quote(worker_host),
self._worker_cache_server_port,
- urllib.quote(self._job.artifact.cache_key),
+ urllib.quote(self._job.artifact.source.cache_key),
suffixes))
msg = distbuild.message(
@@ -582,7 +579,7 @@ class WorkerConnection(distbuild.StateMachine):
self.mainloop.queue_event(distbuild.HelperRouter, req)
progress = WorkerBuildCaching(self._job.initiators,
- self._job.artifact.cache_key)
+ self._job.artifact.source.cache_key)
self.mainloop.queue_event(WorkerConnection, progress)
def _maybe_handle_helper_result(self, event_source, event):
@@ -594,7 +591,8 @@ class WorkerConnection(distbuild.StateMachine):
logging.debug('Shared artifact cache population done')
new_event = WorkerBuildFinished(
- self._exec_response_msg, self._job.artifact.cache_key)
+ self._exec_response_msg,
+ self._job.artifact.source.cache_key)
self.mainloop.queue_event(WorkerConnection, new_event)
self.mainloop.queue_event(self, _Cached())
else:
@@ -612,7 +610,8 @@ class WorkerConnection(distbuild.StateMachine):
_JobFailed(self._job))
new_event = WorkerBuildFailed(
- self._exec_response_msg, self._job.artifact.cache_key)
+ self._exec_response_msg,
+ self._job.artifact.source.cache_key)
self.mainloop.queue_event(WorkerConnection, new_event)
self.mainloop.queue_event(self, _BuildFailed())
diff --git a/morphlib/plugins/distbuild_plugin.py b/morphlib/plugins/distbuild_plugin.py
index 26c26498..1858a9ba 100644
--- a/morphlib/plugins/distbuild_plugin.py
+++ b/morphlib/plugins/distbuild_plugin.py
@@ -103,7 +103,7 @@ class WorkerBuild(cliapp.Plugin):
self.app.subcommands['gc']([])
arch = artifact.arch
- bc.build_artifact(artifact, bc.new_build_env(arch))
+ bc.build_source(artifact.source, bc.new_build_env(arch))
def is_system_artifact(self, filename):
return re.match(r'^[0-9a-fA-F]{64}\.system\.', filename)
diff --git a/morphlib/plugins/list_artifacts_plugin.py b/morphlib/plugins/list_artifacts_plugin.py
index 8074206b..61c8d160 100644
--- a/morphlib/plugins/list_artifacts_plugin.py
+++ b/morphlib/plugins/list_artifacts_plugin.py
@@ -105,11 +105,13 @@ class ListArtifactsPlugin(cliapp.Plugin):
self.app.settings, system_artifact.source.morphology['arch'])
ckc = morphlib.cachekeycomputer.CacheKeyComputer(build_env)
- artifact_files = set()
- for artifact in system_artifact.walk():
+ for source in set(a.source for a in system_artifact.walk()):
artifact.cache_key = ckc.compute_key(artifact)
artifact.cache_id = ckc.get_cache_id(artifact)
+ artifact_files = set()
+ for artifact in system_artifact.walk():
+
artifact_files.add(artifact.basename())
if artifact.source.morphology.needs_artifact_metadata_cached: