diff options
author | Sam Thursfield <sam.thursfield@codethink.co.uk> | 2015-04-21 17:20:17 +0000 |
---|---|---|
committer | Sam Thursfield <sam.thursfield@codethink.co.uk> | 2015-04-21 17:20:17 +0000 |
commit | 9fd7ef6fe83282139f7f636a955bcf069e8fb79c (patch) | |
tree | 7f2c96b97ab60e1280752a6a41797e04f217a0ef | |
parent | 006096c5c8b8bdcf3fe89d9c4d7585aac5981d33 (diff) | |
parent | 0e03798a5a57b877297f22382bc28ed5cd488082 (diff) | |
download | morph-9fd7ef6fe83282139f7f636a955bcf069e8fb79c.tar.gz |
Merge branch 'review/adam_coldrick/speed-up-artifact-serialisation' into sam/distbuild-megamix
Change-Id: I8ce95cb111c2c84f7774a37abcfea4a85262c32a
-rw-r--r-- | distbuild/build_controller.py | 29 | ||||
-rw-r--r-- | distbuild/serialise.py | 273 | ||||
-rw-r--r-- | distbuild/serialise_tests.py | 77 | ||||
-rw-r--r-- | distbuild/worker_build_scheduler.py | 29 | ||||
-rw-r--r-- | morphlib/plugins/distbuild_plugin.py | 30 |
5 files changed, 186 insertions, 252 deletions
diff --git a/distbuild/build_controller.py b/distbuild/build_controller.py index 3971fe68..35b231f9 100644 --- a/distbuild/build_controller.py +++ b/distbuild/build_controller.py @@ -113,7 +113,7 @@ class _Abort(object): def build_step_name(artifact): '''Return user-comprehensible name for a given artifact.''' - return artifact.source.name + return artifact.source_name def map_build_graph(artifact, callback, components=[]): @@ -135,7 +135,7 @@ def map_build_graph(artifact, callback, components=[]): a = queue.pop() if a not in done: result.append(callback(a)) - queue.extend(a.source.dependencies) + queue.extend(a.dependencies) done.add(a) if a in components: mapped_components.append(a) @@ -145,8 +145,7 @@ def map_build_graph(artifact, callback, components=[]): def find_artifacts(components, artifact): found = [] for a in artifact.walk(): - name = a.source.morphology['name'] - if name in components: + if a.name in components: found.append(a) return found @@ -428,7 +427,7 @@ class BuildController(distbuild.StateMachine): def is_ready_to_build(artifact): return (artifact.state == UNBUILT and all(a.state == BUILT - for a in artifact.source.dependencies)) + for a in artifact.dependencies)) artifacts, _ = map_build_graph(self._artifact, lambda a: a, self._components) @@ -472,19 +471,19 @@ class BuildController(distbuild.StateMachine): logging.debug( 'Requesting worker-build of %s (%s)' % - (artifact.name, artifact.source.cache_key)) + (artifact.name, artifact.cache_key)) request = distbuild.WorkerBuildRequest(artifact, self._request['id']) self.mainloop.queue_event(distbuild.WorkerBuildQueuer, request) artifact.state = BUILDING - if artifact.source.morphology['kind'] == 'chunk': + if artifact.kind == 'chunk': # Chunk artifacts are not built independently # so when we're building any chunk artifact # we're also building all the chunk artifacts # in this source for a in ready: - if a.source == artifact.source: + if a.cache_key == artifact.cache_key: a.state = BUILDING def _maybe_notify_initiator_disconnected(self, event_source, event): @@ -588,7 +587,7 @@ class BuildController(distbuild.StateMachine): def _find_artifact(self, cache_key): artifacts, _ = map_build_graph(self._artifact, lambda a: a, self._components) - wanted = [a for a in artifacts if a.source.cache_key == cache_key] + wanted = [a for a in artifacts if a.cache_key == cache_key] if wanted: return wanted[0] else: @@ -614,10 +613,10 @@ class BuildController(distbuild.StateMachine): artifact.state = BUILT def set_state(a): - if a.source == artifact.source: + if a.cache_key == artifact.cache_key: a.state = BUILT - if artifact.source.morphology['kind'] == 'chunk': + if artifact.kind == 'chunk': # Building a single chunk artifact # yields all chunk artifacts for the given source # so we set the state of this source's artifacts @@ -677,14 +676,14 @@ class BuildController(distbuild.StateMachine): urls = [] for c in self._components: name = ('%s.%s.%s' % - (c.source.cache_key, - c.source.morphology['kind'], + (c.cache_key, + c.kind, c.name)) urls.append('%s?filename=%s' % (baseurl, urllib.quote(name))) if not self._components: name = ('%s.%s.%s' % - (self._artifact.source.cache_key, - self._artifact.source.morphology['kind'], + (self._artifact.cache_key, + self._artifact.kind, self._artifact.name)) urls.append('%s?filename=%s' % (baseurl, urllib.quote(name))) diff --git a/distbuild/serialise.py b/distbuild/serialise.py index 3e39e684..5f8872a6 100644 --- a/distbuild/serialise.py +++ b/distbuild/serialise.py @@ -16,46 +16,68 @@ import json +import logging import yaml import morphlib -import logging -def serialise_artifact(artifact): +class ArtifactReference(object): # pragma: no cover + + '''Container for some basic information about an artifact.''' + + def __init__(self, basename, encoded): + self._basename = basename + self._dict = encoded + + def __getattr__(self, name): + if not name.startswith('_'): + return self._dict.get(name) + else: + super(ArtifactReference, self).__getattr(name) + + def __setattr__(self, name, val): + if not name.startswith('_'): + self._dict[name] = val + else: + super(ArtifactReference, self).__setattr__(name, val) + + def basename(self): + return self._basename + + def walk(self): + done = set() + + def depth_first(a): + if a not in done: + done.add(a) + for dep in a.dependencies: + for ret in depth_first(dep): + yield ret + yield a + + return list(depth_first(self)) + + +def serialise_artifact(artifact, repo, ref): '''Serialise an Artifact object and its dependencies into string form.''' - def encode_morphology(morphology): - result = {} - for key in morphology.keys(): - result[key] = morphology[key] - return result - - def encode_source(source, prune_leaf=False): - source_dic = { - 'name': source.name, - 'repo': None, - 'repo_name': source.repo_name, - 'original_ref': source.original_ref, - 'sha1': source.sha1, - 'tree': source.tree, - 'morphology': id(source.morphology), + def encode_source(source): + s_dict = { 'filename': source.filename, - 'artifact_ids': [], - 'cache_id': source.cache_id, - 'cache_key': source.cache_key, - 'dependencies': [], + 'kind': source.morphology['kind'], + 'source_name': source.name, + 'source_repo': source.repo_name, + 'source_ref': source.original_ref, + 'source_sha1': source.sha1, + 'source_artifacts': [], + 'dependencies': [] } - if not prune_leaf: - source_dic['artifact_ids'].extend(id(artifact) for (_, artifact) - in source.artifacts.iteritems()) - source_dic['dependencies'].extend(id(d) - for d in source.dependencies) - - if source.morphology['kind'] == 'chunk': - source_dic['build_mode'] = source.build_mode - source_dic['prefix'] = source.prefix - return source_dic + for dep in source.dependencies: + s_dict['dependencies'].append(dep.basename()) + for sa in source.artifacts: + s_dict['source_artifacts'].append(sa) + return s_dict def encode_artifact(a): if artifact.source.morphology['kind'] == 'system': # pragma: no cover @@ -63,53 +85,61 @@ def serialise_artifact(artifact): else: arch = artifact.arch - return { - 'source_id': id(a.source), - 'name': a.name, + a_dict = { 'arch': arch, - 'dependents': [id(d) - for d in a.dependents], + 'cache_key': a.source.cache_key, + 'name': a.name, + 'repo': repo, + 'ref': ref, + } + return a_dict + + def encode_artifact_reference(a): # pragma: no cover + a_dict = { + 'arch': a.arch, + 'cache_key': a.cache_key, + 'name': a.name, + 'repo': a.repo, + 'ref': a.ref + } + s_dict = { + 'filename': a.filename, + 'kind': a.kind, + 'source_name': a.source_name, + 'source_repo': a.source_repo, + 'source_ref': a.source_ref, + 'source_sha1': a.source_sha1, + 'source_artifacts': [], + 'dependencies': [] } + for dep in a.dependencies: + s_dict['dependencies'].append(dep.basename()) + for sa in a.source_artifacts: + s_dict['source_artifacts'].append(sa) + return a_dict, s_dict encoded_artifacts = {} encoded_sources = {} - encoded_morphologies = {} - visited_artifacts = {} - - for a in artifact.walk(): - if id(a.source) not in encoded_sources: - for sa in a.source.artifacts.itervalues(): - if id(sa) not in encoded_artifacts: - visited_artifacts[id(sa)] = sa - encoded_artifacts[id(sa)] = encode_artifact(sa) - encoded_morphologies[id(a.source.morphology)] = \ - encode_morphology(a.source.morphology) - encoded_sources[id(a.source)] = encode_source(a.source) - - if id(a) not in encoded_artifacts: # pragma: no cover - visited_artifacts[id(a)] = a - encoded_artifacts[id(a)] = encode_artifact(a) - - # Include one level of dependents above encoded artifacts, as we need - # them to be able to tell whether two sources are in the same stratum. - for a in visited_artifacts.itervalues(): - for source in a.dependents: # pragma: no cover - if id(source) not in encoded_sources: - encoded_morphologies[id(source.morphology)] = \ - encode_morphology(source.morphology) - encoded_sources[id(source)] = \ - encode_source(source, prune_leaf=True) + + if isinstance(artifact, ArtifactReference): # pragma: no cover + root_filename = artifact.root_filename + a_dict, s_dict = encode_artifact_reference(artifact) + encoded_artifacts[artifact.basename()] = a_dict + encoded_sources[artifact.cache_key] = s_dict + else: + root_filename = artifact.source.filename + for a in artifact.walk(): + if a.basename() not in encoded_artifacts: # pragma: no cover + encoded_artifacts[a.basename()] = encode_artifact(a) + encoded_sources[a.source.cache_key] = encode_source(a.source) content = { - 'sources': encoded_sources, + 'root-artifact': artifact.basename(), + 'root-filename': root_filename, 'artifacts': encoded_artifacts, - 'morphologies': encoded_morphologies, - 'root_artifact': id(artifact), - 'default_split_rules': { - 'chunk': morphlib.artifactsplitrule.DEFAULT_CHUNK_RULES, - 'stratum': morphlib.artifactsplitrule.DEFAULT_STRATUM_RULES, - }, + 'sources': encoded_sources } + return json.dumps(yaml.dump(content)) @@ -122,95 +152,24 @@ def deserialise_artifact(encoded): purposes, by Morph. ''' - - def decode_morphology(le_dict): - '''Convert a dict into something that kinda acts like a Morphology. - - As it happens, we don't need the full Morphology so we cheat. - Cheating is good. - - ''' - - return morphlib.morphology.Morphology(le_dict) - - def decode_source(le_dict, morphology, split_rules): - '''Convert a dict into a Source object.''' - - source = morphlib.source.Source(le_dict['name'], - le_dict['repo_name'], - le_dict['original_ref'], - le_dict['sha1'], - le_dict['tree'], - morphology, - le_dict['filename'], - split_rules) - - if morphology['kind'] == 'chunk': - source.build_mode = le_dict['build_mode'] - source.prefix = le_dict['prefix'] - source.cache_id = le_dict['cache_id'] - source.cache_key = le_dict['cache_key'] - return source - - def decode_artifact(artifact_dict, source): - '''Convert dict into an Artifact object. - - Do not set dependencies, that will be dealt with later. - - ''' - - artifact = morphlib.artifact.Artifact(source, artifact_dict['name']) - artifact.arch = artifact_dict['arch'] - artifact.source = source - - return artifact - - le_dicts = yaml.load(json.loads(encoded)) - artifacts_dict = le_dicts['artifacts'] - sources_dict = le_dicts['sources'] - morphologies_dict = le_dicts['morphologies'] - root_artifact = le_dicts['root_artifact'] - assert root_artifact in artifacts_dict + content = yaml.load(json.loads(encoded)) + root = content['root-artifact'] + encoded_artifacts = content['artifacts'] + encoded_sources = content['sources'] artifacts = {} - sources = {} - morphologies = {id: decode_morphology(d) - for (id, d) in morphologies_dict.iteritems()} - - # Decode sources - for source_id, source_dict in sources_dict.iteritems(): - morphology = morphologies[source_dict['morphology']] - kind = morphology['kind'] - ruler = getattr(morphlib.artifactsplitrule, 'unify_%s_matches' % kind) - if kind in ('chunk', 'stratum'): - rules = ruler(morphology, le_dicts['default_split_rules'][kind]) - else: # pragma: no cover - rules = ruler(morphology) - sources[source_id] = decode_source(source_dict, morphology, rules) # decode artifacts - for artifact_id, artifact_dict in artifacts_dict.iteritems(): - source_id = artifact_dict['source_id'] - source = sources[source_id] - artifact = decode_artifact(artifact_dict, source) - artifacts[artifact_id] = artifact - - # add source artifacts reference - for source_id, source in sources.iteritems(): - source_dict = sources_dict[source_id] - source.artifacts = {artifacts[a].name: artifacts[a] - for a in source_dict['artifact_ids']} - - # add source dependencies - for source_id, source_dict in sources_dict.iteritems(): - source = sources[source_id] - source.dependencies = [artifacts[aid] - for aid in source_dict['dependencies']] - - # add artifact dependents - for artifact_id, artifact in artifacts.iteritems(): - artifact_dict = artifacts_dict[artifact_id] - artifact.dependents = [sources[sid] - for sid in artifact_dict['dependents']] - - return artifacts[root_artifact] + for basename, artifact_dict in encoded_artifacts.iteritems(): + artifact_dict.update(encoded_sources[artifact_dict['cache_key']]) + artifact = ArtifactReference(basename, artifact_dict) + artifact.root_filename = content['root-filename'] + artifacts[basename] = artifact + + # add dependencies + for basename, a_dict in encoded_artifacts.iteritems(): + artifact = artifacts[basename] + artifact.dependencies = [artifacts.get(dep) + for dep in artifact.dependencies] + + return artifacts[root] diff --git a/distbuild/serialise_tests.py b/distbuild/serialise_tests.py index a0ad78f8..2de3ab85 100644 --- a/distbuild/serialise_tests.py +++ b/distbuild/serialise_tests.py @@ -20,32 +20,6 @@ import unittest import distbuild -class MockMorphology(object): - - def __init__(self, name, kind): - self.dict = { - 'name': '%s.morphology.name' % name, - 'kind': kind, - 'chunks': [], - 'products': [ - { - 'artifact': name, - 'include': [r'.*'], - }, - ], - } - - @property - def needs_artifact_metadata_cached(self): - return self.dict['kind'] == 'stratum' - - def keys(self): - return self.dict.keys() - - def __getitem__(self, key): - return self.dict[key] - - class MockSource(object): build_mode = 'staging' @@ -57,7 +31,7 @@ class MockSource(object): self.original_ref = '%s.source.original_ref' % name self.sha1 = '%s.source.sha1' % name self.tree = '%s.source.tree' % name - self.morphology = MockMorphology(name, kind) + self.morphology = {'kind': kind} self.filename = '%s.source.filename' % name self.dependencies = [] self.cache_id = { @@ -78,6 +52,11 @@ class MockArtifact(object): self.name = name self.dependents = [] + def basename(self): + return '%s.%s.%s' % (self.source.cache_key, + self.source.morphology['kind'], + self.name) + def walk(self): # pragma: no cover done = set() @@ -100,53 +79,28 @@ class SerialisationTests(unittest.TestCase): self.art3 = MockArtifact('name3', 'chunk') self.art4 = MockArtifact('name4', 'chunk') - def assertEqualMorphologies(self, a, b): - self.assertEqual(sorted(a.keys()), sorted(b.keys())) - keys = sorted(a.keys()) - a_values = [a[k] for k in keys] - b_values = [b[k] for k in keys] - self.assertEqual(a_values, b_values) - self.assertEqual(a.needs_artifact_metadata_cached, - b.needs_artifact_metadata_cached) - - def assertEqualSources(self, a, b): - self.assertEqual(a.repo, b.repo) - self.assertEqual(a.repo_name, b.repo_name) - self.assertEqual(a.original_ref, b.original_ref) - self.assertEqual(a.sha1, b.sha1) - self.assertEqual(a.tree, b.tree) - self.assertEqualMorphologies(a.morphology, b.morphology) - self.assertEqual(a.filename, b.filename) - - def assertEqualArtifacts(self, a, b): - self.assertEqualSources(a.source, b.source) - self.assertEqual(a.name, b.name) - self.assertEqual(a.source.cache_id, b.source.cache_id) - self.assertEqual(a.source.cache_key, b.source.cache_key) - self.assertEqual(len(a.source.dependencies), - len(b.source.dependencies)) - for i in range(len(a.source.dependencies)): - self.assertEqualArtifacts(a.source.dependencies[i], - b.source.dependencies[i]) - def verify_round_trip(self, artifact): - encoded = distbuild.serialise_artifact(artifact) + encoded = distbuild.serialise_artifact(artifact, + artifact.source.repo_name, + artifact.source.sha1) decoded = distbuild.deserialise_artifact(encoded) - self.assertEqualArtifacts(artifact, decoded) + self.assertEqual(artifact.basename(), decoded.basename()) objs = {} queue = [decoded] while queue: obj = queue.pop() - k = obj.source.cache_key + k = obj.cache_key if k in objs: self.assertTrue(obj is objs[k]) else: objs[k] = obj - queue.extend(obj.source.dependencies) + queue.extend(obj.dependencies) def test_returns_string(self): - encoded = distbuild.serialise_artifact(self.art1) + encoded = distbuild.serialise_artifact(self.art1, + self.art1.source.repo_name, + self.art1.source.sha1) self.assertEqual(type(encoded), str) def test_works_without_dependencies(self): @@ -170,4 +124,3 @@ class SerialisationTests(unittest.TestCase): self.art3.source.dependencies = [self.art4] self.art1.source.dependencies = [self.art2, self.art3] self.verify_round_trip(self.art1) - diff --git a/distbuild/worker_build_scheduler.py b/distbuild/worker_build_scheduler.py index 8b581172..07962e15 100644 --- a/distbuild/worker_build_scheduler.py +++ b/distbuild/worker_build_scheduler.py @@ -276,13 +276,13 @@ class WorkerBuildQueuer(distbuild.StateMachine): logging.debug('Worker build step already started: %s' % event.artifact.basename()) progress = WorkerBuildStepAlreadyStarted(event.initiator_id, - event.artifact.source.cache_key, job.who.name()) + event.artifact.cache_key, job.who.name()) else: logging.debug('Job created but not building yet ' '(waiting for a worker to become available): %s' % event.artifact.basename()) progress = WorkerBuildWaiting(event.initiator_id, - event.artifact.source.cache_key) + event.artifact.cache_key) self.mainloop.queue_event(WorkerConnection, progress) else: @@ -293,7 +293,7 @@ class WorkerBuildQueuer(distbuild.StateMachine): self._give_job(job) else: progress = WorkerBuildWaiting(event.initiator_id, - event.artifact.source.cache_key) + event.artifact.cache_key) self.mainloop.queue_event(WorkerConnection, progress) def _handle_cancel(self, event_source, event): @@ -513,15 +513,18 @@ class WorkerConnection(distbuild.StateMachine): '--build-log-on-stdout', job.artifact.name, ] + msg = distbuild.message('exec-request', id=job.id, argv=argv, - stdin_contents=distbuild.serialise_artifact(job.artifact), + stdin_contents=distbuild.serialise_artifact(job.artifact, + job.artifact.repo, + job.artifact.ref), ) self._jm.send(msg) started = WorkerBuildStepStarted(job.initiators, - job.artifact.source.cache_key, self.name()) + job.artifact.cache_key, self.name()) self.mainloop.queue_event(WorkerConnection, _JobStarted(job)) self.mainloop.queue_event(WorkerConnection, started) @@ -557,7 +560,7 @@ class WorkerConnection(distbuild.StateMachine): logging.debug('WC: emitting: %s', repr(new)) self.mainloop.queue_event( WorkerConnection, - WorkerBuildOutput(new, job.artifact.source.cache_key)) + WorkerBuildOutput(new, job.artifact.cache_key)) def _handle_exec_response(self, msg, job): '''Handle completion of a job that the worker is or was running.''' @@ -570,7 +573,7 @@ class WorkerConnection(distbuild.StateMachine): if new['exit'] != 0: # Build failed. - new_event = WorkerBuildFailed(new, job.artifact.source.cache_key) + new_event = WorkerBuildFailed(new, job.artifact.cache_key) self.mainloop.queue_event(WorkerConnection, new_event) self.mainloop.queue_event(WorkerConnection, _JobFailed(job)) self.mainloop.queue_event(self, _BuildFailed()) @@ -596,10 +599,10 @@ class WorkerConnection(distbuild.StateMachine): logging.debug('Requesting shared artifact cache to get artifacts') job = self._current_job - kind = job.artifact.source.morphology['kind'] + kind = job.artifact.kind if kind == 'chunk': - source_artifacts = job.artifact.source.artifacts + source_artifacts = job.artifact.source_artifacts suffixes = ['%s.%s' % (kind, name) for name in source_artifacts] suffixes.append('build-log') @@ -620,7 +623,7 @@ class WorkerConnection(distbuild.StateMachine): '/1.0/fetch?host=%s:%d&cacheid=%s&artifacts=%s' % (urllib.quote(worker_host), self._worker_cache_server_port, - urllib.quote(job.artifact.source.cache_key), + urllib.quote(job.artifact.cache_key), suffixes)) msg = distbuild.message( @@ -631,7 +634,7 @@ class WorkerConnection(distbuild.StateMachine): self.mainloop.queue_event(distbuild.HelperRouter, req) progress = WorkerBuildCaching(job.initiators, - job.artifact.source.cache_key) + job.artifact.cache_key) self.mainloop.queue_event(WorkerConnection, progress) def _maybe_handle_helper_result(self, event_source, event): @@ -644,7 +647,7 @@ class WorkerConnection(distbuild.StateMachine): new_event = WorkerBuildFinished( self._current_job_exec_response, - self._current_job.artifact.source.cache_key) + self._current_job.artifact.cache_key) self.mainloop.queue_event(WorkerConnection, new_event) self.mainloop.queue_event(self, _Cached()) else: @@ -663,7 +666,7 @@ class WorkerConnection(distbuild.StateMachine): new_event = WorkerBuildFailed( self._current_job_exec_response, - self._current_job.artifact.source.cache_key) + self._current_job.artifact.cache_key) self.mainloop.queue_event(WorkerConnection, new_event) self.mainloop.queue_event(self, _BuildFailed()) diff --git a/morphlib/plugins/distbuild_plugin.py b/morphlib/plugins/distbuild_plugin.py index 8aaead10..09669988 100644 --- a/morphlib/plugins/distbuild_plugin.py +++ b/morphlib/plugins/distbuild_plugin.py @@ -113,7 +113,9 @@ class SerialiseArtifactPlugin(cliapp.Plugin): srcpool = build_command.create_source_pool( repo_name, ref, filename, original_ref=original_ref) artifact = build_command.resolve_artifacts(srcpool) - self.app.output.write(distbuild.serialise_artifact(artifact)) + self.app.output.write(distbuild.serialise_artifact(artifact, + repo_name, + ref)) self.app.output.write('\n') @@ -137,9 +139,14 @@ class WorkerBuild(cliapp.Plugin): distbuild.add_crash_conditions(self.app.settings['crash-condition']) serialized = sys.stdin.readline() - artifact = distbuild.deserialise_artifact(serialized) - + artifact_reference = distbuild.deserialise_artifact(serialized) + bc = morphlib.buildcommand.BuildCommand(self.app) + source_pool = bc.create_source_pool(artifact_reference.repo, + artifact_reference.ref, + artifact_reference.root_filename) + + root = bc.resolve_artifacts(source_pool) # Now, before we start the build, we garbage collect the caches # to ensure we have room. First we remove all system artifacts @@ -152,8 +159,21 @@ class WorkerBuild(cliapp.Plugin): self.app.subcommands['gc']([]) - arch = artifact.arch - bc.build_source(artifact.source, bc.new_build_env(arch)) + source = self.find_source(source_pool, artifact_reference) + build_env = bc.new_build_env(artifact_reference.arch) + bc.build_source(source, build_env) + + def find_source(self, source_pool, artifact_reference): + for s in source_pool.lookup(artifact_reference.source_repo, + artifact_reference.source_ref, + artifact_reference.filename): + if s.cache_key == artifact_reference.cache_key: + return s + for s in source_pool.lookup(artifact_reference.source_repo, + artifact_reference.source_sha1, + artifact_reference.filename): + if s.cache_key == artifact_reference.cache_key: + return s def is_system_artifact(self, filename): return re.match(r'^[0-9a-fA-F]{64}\.system\.', filename) |