summaryrefslogtreecommitdiff
path: root/distbuild
diff options
context:
space:
mode:
Diffstat (limited to 'distbuild')
-rw-r--r--distbuild/build_controller.py11
-rw-r--r--distbuild/initiator.py8
-rw-r--r--distbuild/initiator_connection.py2
-rw-r--r--distbuild/jm.py10
-rw-r--r--distbuild/serialise.py223
-rw-r--r--distbuild/serialise_tests.py102
-rw-r--r--distbuild/sockserv.py7
-rw-r--r--distbuild/worker_build_scheduler.py27
8 files changed, 210 insertions, 180 deletions
diff --git a/distbuild/build_controller.py b/distbuild/build_controller.py
index e8a8dc37..93f97fac 100644
--- a/distbuild/build_controller.py
+++ b/distbuild/build_controller.py
@@ -142,7 +142,7 @@ def map_build_graph(artifact, callback):
a = queue.pop()
if a not in done:
result.append(callback(a))
- queue.extend(a.dependencies)
+ queue.extend(a.source.dependencies)
done.add(a)
return result
@@ -388,7 +388,8 @@ class BuildController(distbuild.StateMachine):
def _find_artifacts_that_are_ready_to_build(self):
def is_ready_to_build(artifact):
return (artifact.state == UNBUILT and
- all(a.state == BUILT for a in artifact.dependencies))
+ all(a.state == BUILT
+ for a in artifact.source.dependencies))
return [a
for a in map_build_graph(self._artifact, lambda a: a)
@@ -424,7 +425,7 @@ class BuildController(distbuild.StateMachine):
logging.debug(
'Requesting worker-build of %s (%s)' %
- (artifact.name, artifact.cache_key))
+ (artifact.name, artifact.source.cache_key))
request = distbuild.WorkerBuildRequest(artifact,
self._request['id'])
self.mainloop.queue_event(distbuild.WorkerBuildQueuer, request)
@@ -540,7 +541,7 @@ class BuildController(distbuild.StateMachine):
def _find_artifact(self, cache_key):
artifacts = map_build_graph(self._artifact, lambda a: a)
- wanted = [a for a in artifacts if a.cache_key == cache_key]
+ wanted = [a for a in artifacts if a.source.cache_key == cache_key]
if wanted:
return wanted[0]
else:
@@ -637,7 +638,7 @@ class BuildController(distbuild.StateMachine):
baseurl = urlparse.urljoin(
self._artifact_cache_server, '/1.0/artifacts')
filename = ('%s.%s.%s' %
- (self._artifact.cache_key,
+ (self._artifact.source.cache_key,
self._artifact.source.morphology['kind'],
self._artifact.name))
url = '%s?filename=%s' % (baseurl, urllib.quote(filename))
diff --git a/distbuild/initiator.py b/distbuild/initiator.py
index b60700fd..b0993aa3 100644
--- a/distbuild/initiator.py
+++ b/distbuild/initiator.py
@@ -18,6 +18,7 @@
import cliapp
import logging
+import os
import random
import sys
@@ -48,6 +49,7 @@ class Initiator(distbuild.StateMachine):
self._morphology = morphology
self._steps = None
self._step_outputs = {}
+ self._step_output_dir = app.settings['initiator-step-output-dir']
self.debug_transitions = False
def setup(self):
@@ -120,7 +122,11 @@ class Initiator(distbuild.StateMachine):
def _open_output(self, msg):
assert msg['step_name'] not in self._step_outputs
- filename = 'build-step-%s.log' % msg['step_name']
+ if self._step_output_dir:
+ filename = os.path.join(self._step_output_dir,
+ 'build-step-%s.log' % msg['step_name'])
+ else:
+ filename = '/dev/null'
f = open(filename, 'a')
self._step_outputs[msg['step_name']] = f
diff --git a/distbuild/initiator_connection.py b/distbuild/initiator_connection.py
index 0f009fcc..db982230 100644
--- a/distbuild/initiator_connection.py
+++ b/distbuild/initiator_connection.py
@@ -171,7 +171,7 @@ class InitiatorConnection(distbuild.StateMachine):
'name': distbuild.build_step_name(artifact),
'build-depends': [
distbuild.build_step_name(x)
- for x in artifact.dependencies
+ for x in artifact.source.dependencies
]
}
diff --git a/distbuild/jm.py b/distbuild/jm.py
index 513c69fa..615100e4 100644
--- a/distbuild/jm.py
+++ b/distbuild/jm.py
@@ -22,6 +22,7 @@ import logging
import os
import socket
import sys
+import yaml
from sm import StateMachine
from stringbuffer import StringBuffer
@@ -79,7 +80,12 @@ class JsonMachine(StateMachine):
def send(self, msg):
'''Send a message to the other side.'''
- self.sockbuf.write('%s\n' % json.dumps(msg))
+ if self.debug_json:
+ logging.debug('JsonMachine: Sending message %s' % repr(msg))
+ s = json.dumps(yaml.safe_dump(msg))
+ if self.debug_json:
+ logging.debug('JsonMachine: As %s' % repr(s))
+ self.sockbuf.write('%s\n' % s)
def close(self):
'''Tell state machine it should shut down.
@@ -103,7 +109,7 @@ class JsonMachine(StateMachine):
line = line.rstrip()
if self.debug_json:
logging.debug('JsonMachine: line: %s' % repr(line))
- msg = json.loads(line)
+ msg = yaml.load(json.loads(line))
self.mainloop.queue_event(self, JsonNewMessage(msg))
def _send_eof(self, event_source, event):
diff --git a/distbuild/serialise.py b/distbuild/serialise.py
index d410b6cf..a7c6c4b9 100644
--- a/distbuild/serialise.py
+++ b/distbuild/serialise.py
@@ -17,16 +17,12 @@
import json
+import yaml
import morphlib
import logging
-morphology_attributes = [
- 'needs_artifact_metadata_cached',
-]
-
-
def serialise_artifact(artifact):
'''Serialise an Artifact object and its dependencies into string form.'''
@@ -34,93 +30,88 @@ def serialise_artifact(artifact):
result = {}
for key in morphology.keys():
result[key] = morphology[key]
- for x in morphology_attributes:
- result['__%s' % x] = getattr(morphology, x)
return result
- def encode_source(source):
+ def encode_source(source, prune_leaf=False):
source_dic = {
+ 'name': source.name,
'repo': None,
'repo_name': source.repo_name,
'original_ref': source.original_ref,
'sha1': source.sha1,
'tree': source.tree,
- 'morphology': encode_morphology(source.morphology),
+ 'morphology': id(source.morphology),
'filename': source.filename,
-
- # dict keys are converted to strings by json
- # so we encode the artifact ids as strings
- 'artifact_ids': [str(id(artifact)) for (_, artifact)
- in source.artifacts.iteritems()],
+ 'artifact_ids': [],
+ 'cache_id': source.cache_id,
+ 'cache_key': source.cache_key,
+ 'dependencies': [],
}
+ if not prune_leaf:
+ source_dic['artifact_ids'].extend(id(artifact) for (_, artifact)
+ in source.artifacts.iteritems())
+ source_dic['dependencies'].extend(id(d)
+ for d in source.dependencies)
if source.morphology['kind'] == 'chunk':
source_dic['build_mode'] = source.build_mode
source_dic['prefix'] = source.prefix
return source_dic
- def encode_artifact(a, artifacts, source_id):
- if artifact.source.morphology['kind'] == 'system':
+ def encode_artifact(a):
+ if artifact.source.morphology['kind'] == 'system': # pragma: no cover
arch = artifact.source.morphology['arch']
else:
arch = artifact.arch
return {
- 'source_id': source_id,
+ 'source_id': id(a.source),
'name': a.name,
- 'cache_id': a.cache_id,
- 'cache_key': a.cache_key,
- 'dependencies': [str(id(artifacts[id(d)]))
- for d in a.dependencies],
- 'arch': arch
+ 'arch': arch,
+ 'dependents': [id(d)
+ for d in a.dependents],
}
- visited = set()
- def traverse(a):
- visited.add(a)
- for dep in a.dependencies:
- if dep in visited:
- continue
- for ret in traverse(dep):
- yield ret
- yield a
-
-
- artifacts = {}
encoded_artifacts = {}
encoded_sources = {}
+ encoded_morphologies = {}
+ visited_artifacts = {}
- for a in traverse(artifact):
+ for a in artifact.walk():
if id(a.source) not in encoded_sources:
- if a.source.morphology['kind'] == 'chunk':
- for (_, sa) in a.source.artifacts.iteritems():
- if id(sa) not in artifacts:
- artifacts[id(sa)] = sa
- encoded_artifacts[id(sa)] = encode_artifact(sa,
- artifacts, id(a.source))
- else:
- # We create separate sources for strata and systems,
- # this is a bit of a hack, but needed to allow
- # us to build strata and systems independently
-
- s = a.source
- t = morphlib.source.Source(s.repo_name, s.original_ref,
- s.sha1, s.tree, s.morphology, s.filename)
-
- t.artifacts = {a.name: a}
- a.source = t
-
+ for sa in a.source.artifacts.itervalues():
+ if id(sa) not in encoded_artifacts:
+ visited_artifacts[id(sa)] = sa
+ encoded_artifacts[id(sa)] = encode_artifact(sa)
+ encoded_morphologies[id(a.source.morphology)] = \
+ encode_morphology(a.source.morphology)
encoded_sources[id(a.source)] = encode_source(a.source)
- if id(a) not in artifacts:
- artifacts[id(a)] = a
- encoded_artifacts[id(a)] = encode_artifact(a, artifacts,
- id(a.source))
-
- encoded_artifacts['_root'] = str(id(artifact))
-
- return json.dumps({'sources': encoded_sources,
- 'artifacts': encoded_artifacts})
+ if id(a) not in encoded_artifacts: # pragma: no cover
+ visited_artifacts[id(a)] = a
+ encoded_artifacts[id(a)] = encode_artifact(a)
+
+ # Include one level of dependents above encoded artifacts, as we need
+ # them to be able to tell whether two sources are in the same stratum.
+ for a in visited_artifacts.itervalues():
+ for source in a.dependents: # pragma: no cover
+ if id(source) not in encoded_sources:
+ encoded_morphologies[id(source.morphology)] = \
+ encode_morphology(source.morphology)
+ encoded_sources[id(source)] = \
+ encode_source(source, prune_leaf=True)
+
+ content = {
+ 'sources': encoded_sources,
+ 'artifacts': encoded_artifacts,
+ 'morphologies': encoded_morphologies,
+ 'root_artifact': id(artifact),
+ 'default_split_rules': {
+ 'chunk': morphlib.artifactsplitrule.DEFAULT_CHUNK_RULES,
+ 'stratum': morphlib.artifactsplitrule.DEFAULT_STRATUM_RULES,
+ },
+ }
+ return json.dumps(yaml.dump(content))
def deserialise_artifact(encoded):
@@ -141,38 +132,25 @@ def deserialise_artifact(encoded):
'''
- class FakeMorphology(dict):
-
- def get_commands(self, which):
- '''Get commands to run from a morphology or build system'''
- if self[which] is None:
- attr = '_'.join(which.split('-'))
- bs = morphlib.buildsystem.lookup_build_system(
- self['build-system'])
- return getattr(bs, attr)
- else:
- return self[which]
-
- morphology = FakeMorphology(le_dict)
- for x in morphology_attributes:
- setattr(morphology, x, le_dict['__%s' % x])
- del morphology['__%s' % x]
- return morphology
-
- def decode_source(le_dict):
+ return morphlib.morphology.Morphology(le_dict)
+
+ def decode_source(le_dict, morphology, split_rules):
'''Convert a dict into a Source object.'''
- morphology = decode_morphology(le_dict['morphology'])
- source = morphlib.source.Source(le_dict['repo_name'],
+ source = morphlib.source.Source(le_dict['name'],
+ le_dict['repo_name'],
le_dict['original_ref'],
le_dict['sha1'],
le_dict['tree'],
morphology,
- le_dict['filename'])
+ le_dict['filename'],
+ split_rules)
if morphology['kind'] == 'chunk':
source.build_mode = le_dict['build_mode']
source.prefix = le_dict['prefix']
+ source.cache_id = le_dict['cache_id']
+ source.cache_key = le_dict['cache_key']
return source
def decode_artifact(artifact_dict, source):
@@ -183,48 +161,57 @@ def deserialise_artifact(encoded):
'''
artifact = morphlib.artifact.Artifact(source, artifact_dict['name'])
- artifact.cache_id = artifact_dict['cache_id']
- artifact.cache_key = artifact_dict['cache_key']
artifact.arch = artifact_dict['arch']
artifact.source = source
return artifact
- le_dicts = json.loads(encoded)
+ le_dicts = yaml.load(json.loads(encoded))
artifacts_dict = le_dicts['artifacts']
sources_dict = le_dicts['sources']
-
- artifact_ids = ([artifacts_dict['_root']] +
- filter(lambda k: k != '_root', artifacts_dict.keys()))
-
- source_ids = [sid for sid in sources_dict.keys()]
+ morphologies_dict = le_dicts['morphologies']
+ root_artifact = le_dicts['root_artifact']
+ assert root_artifact in artifacts_dict
artifacts = {}
sources = {}
-
- for source_id in source_ids:
+ morphologies = {id: decode_morphology(d)
+ for (id, d) in morphologies_dict.iteritems()}
+
+ # Decode sources
+ for source_id, source_dict in sources_dict.iteritems():
+ morphology = morphologies[source_dict['morphology']]
+ kind = morphology['kind']
+ ruler = getattr(morphlib.artifactsplitrule, 'unify_%s_matches' % kind)
+ if kind in ('chunk', 'stratum'):
+ rules = ruler(morphology, le_dicts['default_split_rules'][kind])
+ else: # pragma: no cover
+ rules = ruler(morphology)
+ sources[source_id] = decode_source(source_dict, morphology, rules)
+
+ # decode artifacts
+ for artifact_id, artifact_dict in artifacts_dict.iteritems():
+ source_id = artifact_dict['source_id']
+ source = sources[source_id]
+ artifact = decode_artifact(artifact_dict, source)
+ artifacts[artifact_id] = artifact
+
+ # add source artifacts reference
+ for source_id, source in sources.iteritems():
source_dict = sources_dict[source_id]
- sources[source_id] = decode_source(source_dict)
-
- # clear the source artifacts that get automatically generated
- # we want to add the ones that were sent to us
- sources[source_id].artifacts = {}
- source_artifacts = source_dict['artifact_ids']
-
- for artifact_id in source_artifacts:
- if artifact_id not in artifacts:
- artifact_dict = artifacts_dict[artifact_id]
- artifact = decode_artifact(artifact_dict, sources[source_id])
-
- artifacts[artifact_id] = artifact
-
- key = artifacts[artifact_id].name
- sources[source_id].artifacts[key] = artifacts[artifact_id]
-
- # now add the dependencies
- for artifact_id in artifact_ids:
- artifact = artifacts[artifact_id]
- artifact.dependencies = [artifacts[aid] for aid in
- artifacts_dict[artifact_id]['dependencies']]
-
- return artifacts[artifacts_dict['_root']]
+ source.artifacts = {artifacts[a].name: artifacts[a]
+ for a in source_dict['artifact_ids']}
+
+ # add source dependencies
+ for source_id, source_dict in sources_dict.iteritems():
+ source = sources[source_id]
+ source.dependencies = [artifacts[aid]
+ for aid in source_dict['dependencies']]
+
+ # add artifact dependents
+ for artifact_id, artifact in artifacts.iteritems():
+ artifact_dict = artifacts_dict[artifact_id]
+ artifact.dependents = [sources[sid]
+ for sid in artifact_dict['dependents']]
+
+ return artifacts[root_artifact]
diff --git a/distbuild/serialise_tests.py b/distbuild/serialise_tests.py
index 2ad3a384..d80c3dd7 100644
--- a/distbuild/serialise_tests.py
+++ b/distbuild/serialise_tests.py
@@ -23,13 +23,22 @@ import distbuild
class MockMorphology(object):
- def __init__(self, name):
+ def __init__(self, name, kind):
self.dict = {
'name': '%s.morphology.name' % name,
- 'kind': '%s.morphology.kind' % name,
+ 'kind': kind,
+ 'chunks': [],
+ 'products': [
+ {
+ 'artifact': name,
+ 'include': [r'.*'],
+ },
+ ],
}
- self.needs_staging_area = None
- self.needs_artifact_metadata_cached = None
+
+ @property
+ def needs_artifact_metadata_cached(self):
+ return self.dict['kind'] == 'stratum'
def keys(self):
return self.dict.keys()
@@ -40,36 +49,57 @@ class MockMorphology(object):
class MockSource(object):
- def __init__(self, name):
+ build_mode = 'staging'
+ prefix = '/usr'
+ def __init__(self, name, kind):
+ self.name = name
self.repo = None
self.repo_name = '%s.source.repo_name' % name
self.original_ref = '%s.source.original_ref' % name
self.sha1 = '%s.source.sha1' % name
self.tree = '%s.source.tree' % name
- self.morphology = MockMorphology(name)
+ self.morphology = MockMorphology(name, kind)
self.filename = '%s.source.filename' % name
-
-
-class MockArtifact(object):
-
- def __init__(self, name):
- self.source = MockSource(name)
- self.name = name
+ self.dependencies = []
self.cache_id = {
'blip': '%s.blip' % name,
'integer': 42,
}
self.cache_key = '%s.cache_key' % name
- self.dependencies = []
+ self.artifacts = {}
+
+
+class MockArtifact(object):
+
+ arch = 'testarch'
+
+ def __init__(self, name, kind):
+ self.source = MockSource(name, kind)
+ self.source.artifacts = {name: self}
+ self.name = name
+ self.dependents = []
+
+ def walk(self): # pragma: no cover
+ done = set()
+
+ def depth_first(a):
+ if a not in done:
+ done.add(a)
+ for dep in a.source.dependencies:
+ for ret in depth_first(dep):
+ yield ret
+ yield a
+
+ return list(depth_first(self))
class SerialisationTests(unittest.TestCase):
def setUp(self):
- self.art1 = MockArtifact('name1')
- self.art2 = MockArtifact('name2')
- self.art3 = MockArtifact('name3')
- self.art4 = MockArtifact('name4')
+ self.art1 = MockArtifact('name1', 'stratum')
+ self.art2 = MockArtifact('name2', 'chunk')
+ self.art3 = MockArtifact('name3', 'chunk')
+ self.art4 = MockArtifact('name4', 'chunk')
def assertEqualMorphologies(self, a, b):
self.assertEqual(sorted(a.keys()), sorted(b.keys()))
@@ -77,11 +107,8 @@ class SerialisationTests(unittest.TestCase):
a_values = [a[k] for k in keys]
b_values = [b[k] for k in keys]
self.assertEqual(a_values, b_values)
- self.assertEqual(a.needs_staging_area, b.needs_staging_area)
self.assertEqual(a.needs_artifact_metadata_cached,
b.needs_artifact_metadata_cached)
- self.assertEqual(a.needs_staging_area,
- b.needs_staging_area)
def assertEqualSources(self, a, b):
self.assertEqual(a.repo, b.repo)
@@ -95,30 +122,29 @@ class SerialisationTests(unittest.TestCase):
def assertEqualArtifacts(self, a, b):
self.assertEqualSources(a.source, b.source)
self.assertEqual(a.name, b.name)
- self.assertEqual(a.cache_id, b.cache_id)
- self.assertEqual(a.cache_key, b.cache_key)
- self.assertEqual(len(a.dependencies), len(b.dependencies))
- for i in range(len(a.dependencies)):
- self.assertEqualArtifacts(a.dependencies[i], b.dependencies[i])
+ self.assertEqual(a.source.cache_id, b.source.cache_id)
+ self.assertEqual(a.source.cache_key, b.source.cache_key)
+ self.assertEqual(len(a.source.dependencies),
+ len(b.source.dependencies))
+ for i in range(len(a.source.dependencies)):
+ self.assertEqualArtifacts(a.source.dependencies[i],
+ b.source.dependencies[i])
def verify_round_trip(self, artifact):
encoded = distbuild.serialise_artifact(artifact)
decoded = distbuild.deserialise_artifact(encoded)
self.assertEqualArtifacts(artifact, decoded)
- def key(a):
- return a.cache_key
-
objs = {}
queue = [decoded]
while queue:
obj = queue.pop()
- k = key(obj)
+ k = obj.source.cache_key
if k in objs:
self.assertTrue(obj is objs[k])
else:
objs[k] = obj
- queue.extend(obj.dependencies)
+ queue.extend(obj.source.dependencies)
def test_returns_string(self):
encoded = distbuild.serialise_artifact(self.art1)
@@ -128,21 +154,21 @@ class SerialisationTests(unittest.TestCase):
self.verify_round_trip(self.art1)
def test_works_with_single_dependency(self):
- self.art1.dependencies = [self.art2]
+ self.art1.source.dependencies = [self.art2]
self.verify_round_trip(self.art1)
def test_works_with_two_dependencies(self):
- self.art1.dependencies = [self.art2, self.art3]
+ self.art1.source.dependencies = [self.art2, self.art3]
self.verify_round_trip(self.art1)
def test_works_with_two_levels_of_dependencies(self):
- self.art2.dependencies = [self.art4]
- self.art1.dependencies = [self.art2, self.art3]
+ self.art2.source.dependencies = [self.art4]
+ self.art1.source.dependencies = [self.art2, self.art3]
self.verify_round_trip(self.art1)
def test_works_with_dag(self):
- self.art2.dependencies = [self.art4]
- self.art3.dependencies = [self.art4]
- self.art1.dependencies = [self.art2, self.art3]
+ self.art2.source.dependencies = [self.art4]
+ self.art3.source.dependencies = [self.art4]
+ self.art1.source.dependencies = [self.art2, self.art3]
self.verify_round_trip(self.art1)
diff --git a/distbuild/sockserv.py b/distbuild/sockserv.py
index a5215e79..156394e2 100644
--- a/distbuild/sockserv.py
+++ b/distbuild/sockserv.py
@@ -26,15 +26,20 @@ class ListenServer(StateMachine):
'''Listen for new connections on a port, send events for them.'''
- def __init__(self, addr, port, machine, extra_args=None):
+ def __init__(self, addr, port, machine, extra_args=None, port_file=''):
StateMachine.__init__(self, 'listening')
self._addr = addr
self._port = port
self._machine = machine
self._extra_args = extra_args or []
+ self._port_file = port_file
def setup(self):
src = ListeningSocketEventSource(self._addr, self._port)
+ if self._port_file:
+ host, port = src.sock.getsockname()
+ with open(self._port_file, 'w') as f:
+ f.write('%s\n' % port)
self.mainloop.add_event_source(src)
spec = [
diff --git a/distbuild/worker_build_scheduler.py b/distbuild/worker_build_scheduler.py
index 6cda5972..be732153 100644
--- a/distbuild/worker_build_scheduler.py
+++ b/distbuild/worker_build_scheduler.py
@@ -262,13 +262,13 @@ class WorkerBuildQueuer(distbuild.StateMachine):
logging.debug('Worker build step already started: %s' %
event.artifact.basename())
progress = WorkerBuildStepAlreadyStarted(event.initiator_id,
- event.artifact.cache_key, job.who.name())
+ event.artifact.source.cache_key, job.who.name())
else:
logging.debug('Job created but not building yet '
'(waiting for a worker to become available): %s' %
event.artifact.basename())
progress = WorkerBuildWaiting(event.initiator_id,
- event.artifact.cache_key)
+ event.artifact.source.cache_key)
self.mainloop.queue_event(WorkerConnection, progress)
else:
@@ -279,7 +279,7 @@ class WorkerBuildQueuer(distbuild.StateMachine):
self._give_job(job)
else:
progress = WorkerBuildWaiting(event.initiator_id,
- event.artifact.cache_key)
+ event.artifact.source.cache_key)
self.mainloop.queue_event(WorkerConnection, progress)
def _handle_cancel(self, event_source, event):
@@ -483,7 +483,7 @@ class WorkerConnection(distbuild.StateMachine):
% (self._worker_name, msg))
started = WorkerBuildStepStarted(self._job.initiators,
- self._job.artifact.cache_key, self.name())
+ self._job.artifact.source.cache_key, self.name())
self.mainloop.queue_event(WorkerConnection, _JobStarted(self._job))
self.mainloop.queue_event(WorkerConnection, started)
@@ -510,7 +510,7 @@ class WorkerConnection(distbuild.StateMachine):
logging.debug('WC: emitting: %s', repr(new))
self.mainloop.queue_event(
WorkerConnection,
- WorkerBuildOutput(new, self._job.artifact.cache_key))
+ WorkerBuildOutput(new, self._job.artifact.source.cache_key))
def _handle_exec_response(self, msg):
logging.debug('WC: finished building: %s' % self._job.artifact.name)
@@ -522,7 +522,8 @@ class WorkerConnection(distbuild.StateMachine):
if new['exit'] != 0:
# Build failed.
- new_event = WorkerBuildFailed(new, self._job.artifact.cache_key)
+ new_event = WorkerBuildFailed(new,
+ self._job.artifact.source.cache_key)
self.mainloop.queue_event(WorkerConnection, new_event)
self.mainloop.queue_event(WorkerConnection, _JobFailed(self._job))
self.mainloop.queue_event(self, _BuildFailed())
@@ -556,10 +557,6 @@ class WorkerConnection(distbuild.StateMachine):
if kind == 'stratum':
suffixes.append(filename + '.meta')
- elif kind == 'system':
- # FIXME: This is a really ugly hack.
- if filename.endswith('-rootfs'):
- suffixes.append(filename[:-len('-rootfs')] + '-kernel')
suffixes = [urllib.quote(x) for x in suffixes]
suffixes = ','.join(suffixes)
@@ -571,7 +568,7 @@ class WorkerConnection(distbuild.StateMachine):
'/1.0/fetch?host=%s:%d&cacheid=%s&artifacts=%s' %
(urllib.quote(worker_host),
self._worker_cache_server_port,
- urllib.quote(self._job.artifact.cache_key),
+ urllib.quote(self._job.artifact.source.cache_key),
suffixes))
msg = distbuild.message(
@@ -582,7 +579,7 @@ class WorkerConnection(distbuild.StateMachine):
self.mainloop.queue_event(distbuild.HelperRouter, req)
progress = WorkerBuildCaching(self._job.initiators,
- self._job.artifact.cache_key)
+ self._job.artifact.source.cache_key)
self.mainloop.queue_event(WorkerConnection, progress)
def _maybe_handle_helper_result(self, event_source, event):
@@ -594,7 +591,8 @@ class WorkerConnection(distbuild.StateMachine):
logging.debug('Shared artifact cache population done')
new_event = WorkerBuildFinished(
- self._exec_response_msg, self._job.artifact.cache_key)
+ self._exec_response_msg,
+ self._job.artifact.source.cache_key)
self.mainloop.queue_event(WorkerConnection, new_event)
self.mainloop.queue_event(self, _Cached())
else:
@@ -612,7 +610,8 @@ class WorkerConnection(distbuild.StateMachine):
_JobFailed(self._job))
new_event = WorkerBuildFailed(
- self._exec_response_msg, self._job.artifact.cache_key)
+ self._exec_response_msg,
+ self._job.artifact.source.cache_key)
self.mainloop.queue_event(WorkerConnection, new_event)
self.mainloop.queue_event(self, _BuildFailed())