1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
|
# Copyright (C) 2014-2015 Codethink Limited
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; version 2 of the License.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
import collections
import cPickle
import logging
import pylru
import cliapp
import morphlib
class PickleCacheManager(object):
'''Cache manager for PyLRU that reads and writes to Pickle files.
The 'pickle' format is less than ideal in many ways and is actually
slower than JSON in Python. However, the data we need to cache is keyed
by tuples and in JSON a dict can only be keyed with strings. For now,
using 'pickle' seems to be the least worst option.
'''
def __init__(self, filename, size):
self.filename = filename
self.size = size
def _populate_cache_from_file(self, filename, cache):
try:
with open(filename, 'r') as f:
data = cPickle.load(f)
for key, value in data.iteritems():
cache[key] = value
except (EOFError, IOError, cPickle.PickleError) as e:
logging.warning('Failed to load cache %s: %s', self.filename, e)
def load_cache(self):
'''Create a pylru.lrucache object prepopulated with saved data.'''
cache = pylru.lrucache(self.size)
# There should be a more efficient way to do this, by hooking into
# the json module directly.
self._populate_cache_from_file(self.filename, cache)
return cache
def save_cache(self, cache):
'''Save the data from a pylru.lrucache object to disk.
Any changes that have been made by other instances or processes since
load_cache() was called will be overwritten.
'''
data = {}
for key, value in cache.items():
data[key] = value
try:
with morphlib.savefile.SaveFile(self.filename, 'w') as f:
cPickle.dump(data, f)
except (IOError, cPickle.PickleError) as e:
logging.warning('Failed to save cache to %s: %s', self.filename, e)
class SourceResolverError(cliapp.AppException):
pass
class MorphologyNotFoundError(SourceResolverError):
def __init__(self, filename):
SourceResolverError.__init__(
self, "Couldn't find morphology: %s" % filename)
class NotcachedError(SourceResolverError):
def __init__(self, repo_name):
SourceResolverError.__init__(
self, "Repository %s is not cached locally and there is no "
"remote cache specified" % repo_name)
class SourceResolver(object):
'''Provides a way of resolving the set of sources for a given system.
There are two levels of caching involved in resolving the sources to build.
The canonical source for each source is specified in the build-command
(for strata and systems) or in the stratum morphology (for chunks). It will
be either a normal URL, or a keyed URL using a repo-alias like
'baserock:baserock/definitions'.
The 'remote repo cache' is a Baserock Trove system. It functions as a
normal Git server, but in addition it runs a service on port 8080 called
'morph-cache-server' which can resolve refs, list their contents and read
specific files from the repos it holds. This allows the SourceResolver to
work out how to build something without cloning the whole repo. (If a local
build of that source ends up being necessary then it will get cloned into
the local cache later on).
The second layer of caching is the local repository cache, which mirrors
entire repositories in $cachedir/gits. If a repo is not in the remote repo
cache then it must be present in the local repo cache.
'''
def __init__(self, local_repo_cache, remote_repo_cache, update_repos,
status_cb=None):
self.lrc = local_repo_cache
self.rrc = remote_repo_cache
self.update = update_repos
self.status = status_cb
self._resolved_morphologies = {}
def resolve_ref(self, reponame, ref):
'''Resolves commit and tree sha1s of the ref in a repo and returns it.
If update is True then this has the side-effect of updating
or cloning the repository into the local repo cache.
'''
absref = None
if self.lrc.has_repo(reponame):
repo = self.lrc.get_repo(reponame)
if self.update and repo.requires_update_for_ref(ref):
self.status(msg='Updating cached git repository %(reponame)s '
'for ref %(ref)s', reponame=reponame, ref=ref)
repo.update()
# If the user passed --no-git-update, and the ref is a SHA1 not
# available locally, this call will raise an exception.
absref = repo.resolve_ref_to_commit(ref)
tree = repo.resolve_ref_to_tree(absref)
elif self.rrc is not None:
try:
absref, tree = self.rrc.resolve_ref(reponame, ref)
if absref is not None:
self.status(msg='Resolved %(reponame)s %(ref)s via remote '
'repo cache',
reponame=reponame,
ref=ref,
chatty=True)
except BaseException, e:
logging.warning('Caught (and ignored) exception: %s' % str(e))
if absref is None:
if self.update:
self.status(msg='Caching git repository %(reponame)s',
reponame=reponame)
repo = self.lrc.cache_repo(reponame)
repo.update()
else:
repo = self.lrc.get_repo(reponame)
absref = repo.resolve_ref_to_commit(ref)
tree = repo.resolve_ref_to_tree(absref)
return absref, tree
def _get_morphology(self, reponame, sha1, filename):
'''Read the morphology at the specified location.'''
key = (reponame, sha1, filename)
if key in self._resolved_morphologies:
return self._resolved_morphologies[key]
morph_name = os.path.splitext(os.path.basename(filename))[0]
loader = morphlib.morphloader.MorphologyLoader()
if self._lrc.has_repo(reponame):
self.status(msg="Looking for %s in local repo cache" % filename,
chatty=True)
try:
repo = self._lrc.get_repo(reponame)
text = repo.read_file(filename, sha1)
morph = loader.load_from_string(text)
except IOError:
morph = None
file_list = repo.list_files(ref=sha1, recurse=False)
elif self._rrc is not None:
self.status(msg="Retrieving %(reponame)s %(sha1)s %(filename)s"
" from the remote git cache.",
reponame=reponame, sha1=sha1, filename=filename,
chatty=True)
try:
text = self._rrc.cat_file(reponame, sha1, filename)
morph = loader.load_from_string(text)
except morphlib.remoterepocache.CatFileError:
morph = None
file_list = self._rrc.ls_tree(reponame, sha1)
else:
raise NotcachedError(reponame)
if morph is None:
self.status(msg="File %s doesn't exist: attempting to infer "
"chunk morph from repo's build system"
% filename, chatty=True)
bs = morphlib.buildsystem.detect_build_system(file_list)
if bs is None:
raise MorphologyNotFoundError(filename)
morph = bs.get_morphology(morph_name)
loader.validate(morph)
loader.set_commands(morph)
loader.set_defaults(morph)
self._resolved_morphologies[morph] = morph
return morph
def traverse_morphs(self, definitions_repo, definitions_ref,
system_filenames,
visit=lambda rn, rf, fn, arf, m: None,
definitions_original_ref=None):
definitions_queue = collections.deque(system_filenames)
chunk_in_definitions_repo_queue = []
chunk_in_source_repo_queue = []
resolved_commits = {}
resolved_trees = {}
resolved_morphologies = {}
# Resolve the (repo, ref) pair for the definitions repo, cache result.
definitions_absref, definitions_tree = self.resolve_ref(
definitions_repo, definitions_ref)
if definitions_original_ref:
definitions_ref = definitions_original_ref
# First, process the system and its stratum morphologies. These will
# all live in the same Git repository, and will point to various chunk
# morphologies.
while definitions_queue:
filename = definitions_queue.popleft()
morphology = self._get_morphology(
definitions_repo, definitions_absref, filename)
visit(definitions_repo, definitions_ref, filename,
definitions_absref, definitions_tree, morphology)
if morphology['kind'] == 'cluster':
raise cliapp.AppException(
"Cannot build a morphology of type 'cluster'.")
elif morphology['kind'] == 'system':
definitions_queue.extend(
morphlib.util.sanitise_morphology_path(s['morph'])
for s in morphology['strata'])
elif morphology['kind'] == 'stratum':
if morphology['build-depends']:
definitions_queue.extend(
morphlib.util.sanitise_morphology_path(s['morph'])
for s in morphology['build-depends'])
for c in morphology['chunks']:
if 'morph' not in c:
path = morphlib.util.sanitise_morphology_path(
c.get('morph', c['name']))
chunk_in_source_repo_queue.append(
(c['repo'], c['ref'], path))
continue
chunk_in_definitions_repo_queue.append(
(c['repo'], c['ref'], c['morph']))
# Now process all the chunks involved in the build. First those with
# morphologies in definitions.git, and then (for compatibility reasons
# only) those with the morphology in the chunk's source repository.
def process_chunk(repo, ref, filename):
if (repo, ref) not in resolved_trees:
commit_sha1, tree_sha1 = self.resolve_ref(repo, ref)
resolved_commits[repo, ref] = commit_sha1
resolved_trees[repo, commit_sha1] = tree_sha1
absref = resolved_commits[repo, ref]
tree = resolved_trees[repo, absref]
key = (definitions_repo, definitions_absref, filename)
morphology = self._get_morphology(*key)
visit(repo, ref, filename, absref, tree, morphology)
for repo, ref, filename in chunk_in_definitions_repo_queue:
process_chunk_repo(repo, ref, filename)
for repo, ref, filename in chunk_in_source_repo_queue:
process_chunk_repo(repo, ref, filename)
def create_source_pool(lrc, rrc, repo, ref, filename,
original_ref=None, update_repos=True,
status_cb=None):
'''Find all the sources involved in building a given system.
Given a system morphology, this function will traverse the tree of stratum
and chunk morphologies that the system points to and create appropriate
Source objects. These are added to a new SourcePool object, which is
returned.
Note that Git submodules are not considered 'sources' in the current
implementation, and so they must be handled separately.
The 'lrc' and 'rrc' parameters specify the local and remote Git repository
caches used for resolving the sources.
'''
pool = morphlib.sourcepool.SourcePool()
def add_to_pool(reponame, ref, filename, absref, tree, morphology):
sources = morphlib.source.make_sources(reponame, ref,
filename, absref,
tree, morphology)
for source in sources:
pool.add(source)
resolver = SourceResolver(lrc, rrc, update_repos, status_cb)
resolver.traverse_morphs(repo, ref, [filename],
visit=add_to_pool,
definitions_original_ref=original_ref)
return pool
|