1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
|
import multiprocessing
import os
import signal
import pytest
from buildstream import _yaml, _signals, utils
from buildstream._context import Context
from buildstream._project import Project
from buildstream._protos.build.bazel.remote.execution.v2 import remote_execution_pb2
from buildstream.testing import cli
from tests.testutils import create_artifact_share
# Project directory
DATA_DIR = os.path.join(
os.path.dirname(os.path.realpath(__file__)),
"project",
)
# Handle messages from the pipeline
def message_handler(message, context):
pass
# Since parent processes wait for queue events, we need
# to put something on it if the called process raises an
# exception.
def _queue_wrapper(target, queue, *args):
try:
target(*args, queue=queue)
except Exception as e:
queue.put(str(e))
raise
def tree_maker(cas, tree, directory):
if tree.root.ByteSize() == 0:
tree.root.CopyFrom(directory)
for directory_node in directory.directories:
child_directory = tree.children.add()
with open(cas.objpath(directory_node.digest), 'rb') as f:
child_directory.ParseFromString(f.read())
tree_maker(cas, tree, child_directory)
@pytest.mark.datafiles(DATA_DIR)
def test_pull(cli, tmpdir, datafiles):
project_dir = str(datafiles)
# Set up an artifact cache.
with create_artifact_share(os.path.join(str(tmpdir), 'artifactshare')) as share:
# Configure artifact share
cache_dir = os.path.join(str(tmpdir), 'cache')
user_config_file = str(tmpdir.join('buildstream.conf'))
user_config = {
'scheduler': {
'pushers': 1
},
'artifacts': {
'url': share.repo,
'push': True,
},
'cachedir': cache_dir
}
# Write down the user configuration file
_yaml.dump(_yaml.node_sanitize(user_config), filename=user_config_file)
# Ensure CLI calls will use it
cli.configure(user_config)
# First build the project with the artifact cache configured
result = cli.run(project=project_dir, args=['build', 'target.bst'])
result.assert_success()
# Assert that we are now cached locally
assert cli.get_element_state(project_dir, 'target.bst') == 'cached'
# Assert that we shared/pushed the cached artifact
assert share.has_artifact(cli.get_artifact_name(project_dir, 'test', 'target.bst'))
# Delete the artifact locally
cli.remove_artifact_from_cache(project_dir, 'target.bst')
# Assert that we are not cached locally anymore
assert cli.get_element_state(project_dir, 'target.bst') != 'cached'
# Fake minimal context
context = Context()
context.load(config=user_config_file)
context.set_message_handler(message_handler)
# Load the project
project = Project(project_dir, context)
project.ensure_fully_loaded()
# Assert that the element's artifact is **not** cached
element = project.load_elements(['target.bst'])[0]
element_key = cli.get_element_key(project_dir, 'target.bst')
assert not cli.artifact.is_cached(cache_dir, element, element_key)
queue = multiprocessing.Queue()
# Use subprocess to avoid creation of gRPC threads in main BuildStream process
# See https://github.com/grpc/grpc/blob/master/doc/fork_support.md for details
process = multiprocessing.Process(target=_queue_wrapper,
args=(_test_pull, queue, user_config_file, project_dir,
cache_dir, 'target.bst', element_key))
try:
# Keep SIGINT blocked in the child process
with _signals.blocked([signal.SIGINT], ignore=False):
process.start()
error = queue.get()
process.join()
except KeyboardInterrupt:
utils._kill_process_tree(process.pid)
raise
assert not error
assert cli.artifact.is_cached(cache_dir, element, element_key)
def _test_pull(user_config_file, project_dir, cache_dir,
element_name, element_key, queue):
# Fake minimal context
context = Context()
context.load(config=user_config_file)
context.cachedir = cache_dir
context.casdir = os.path.join(cache_dir, 'cas')
context.tmpdir = os.path.join(cache_dir, 'tmp')
context.set_message_handler(message_handler)
# Load the project manually
project = Project(project_dir, context)
project.ensure_fully_loaded()
# Create a local artifact cache handle
artifactcache = context.artifactcache
# Load the target element
element = project.load_elements([element_name])[0]
# Manually setup the CAS remote
artifactcache.setup_remotes(use_config=True)
if artifactcache.has_push_remotes(plugin=element):
# Push the element's artifact
if not artifactcache.pull(element, element_key):
queue.put("Pull operation failed")
else:
queue.put(None)
else:
queue.put("No remote configured for element {}".format(element_name))
@pytest.mark.datafiles(DATA_DIR)
def test_pull_tree(cli, tmpdir, datafiles):
project_dir = str(datafiles)
# Set up an artifact cache.
with create_artifact_share(os.path.join(str(tmpdir), 'artifactshare')) as share:
# Configure artifact share
rootcache_dir = os.path.join(str(tmpdir), 'cache')
user_config_file = str(tmpdir.join('buildstream.conf'))
user_config = {
'scheduler': {
'pushers': 1
},
'artifacts': {
'url': share.repo,
'push': True,
},
'cachedir': rootcache_dir
}
# Write down the user configuration file
_yaml.dump(_yaml.node_sanitize(user_config), filename=user_config_file)
# Ensure CLI calls will use it
cli.configure(user_config)
# First build the project with the artifact cache configured
result = cli.run(project=project_dir, args=['build', 'target.bst'])
result.assert_success()
# Assert that we are now cached locally
assert cli.get_element_state(project_dir, 'target.bst') == 'cached'
# Assert that we shared/pushed the cached artifact
assert share.has_artifact(cli.get_artifact_name(project_dir, 'test', 'target.bst'))
# Fake minimal context
context = Context()
context.load(config=user_config_file)
context.set_message_handler(message_handler)
# Load the project and CAS cache
project = Project(project_dir, context)
project.ensure_fully_loaded()
artifactcache = context.artifactcache
cas = context.get_cascache()
# Assert that the element's artifact is cached
element = project.load_elements(['target.bst'])[0]
element_key = cli.get_element_key(project_dir, 'target.bst')
assert cli.artifact.is_cached(rootcache_dir, element, element_key)
# Retrieve the Directory object from the cached artifact
artifact_digest = cli.artifact.get_digest(rootcache_dir, element, element_key)
queue = multiprocessing.Queue()
# Use subprocess to avoid creation of gRPC threads in main BuildStream process
# See https://github.com/grpc/grpc/blob/master/doc/fork_support.md for details
process = multiprocessing.Process(target=_queue_wrapper,
args=(_test_push_tree, queue, user_config_file, project_dir,
artifact_digest))
try:
# Keep SIGINT blocked in the child process
with _signals.blocked([signal.SIGINT], ignore=False):
process.start()
tree_hash, tree_size = queue.get()
process.join()
except KeyboardInterrupt:
utils._kill_process_tree(process.pid)
raise
assert tree_hash and tree_size
# Now delete the artifact locally
cli.remove_artifact_from_cache(project_dir, 'target.bst')
# Assert that we are not cached locally anymore
assert cli.get_element_state(project_dir, 'target.bst') != 'cached'
tree_digest = remote_execution_pb2.Digest(hash=tree_hash,
size_bytes=tree_size)
queue = multiprocessing.Queue()
# Use subprocess to avoid creation of gRPC threads in main BuildStream process
process = multiprocessing.Process(target=_queue_wrapper,
args=(_test_pull_tree, queue, user_config_file, project_dir,
tree_digest))
try:
# Keep SIGINT blocked in the child process
with _signals.blocked([signal.SIGINT], ignore=False):
process.start()
directory_hash, directory_size = queue.get()
process.join()
except KeyboardInterrupt:
utils._kill_process_tree(process.pid)
raise
# Directory size now zero with AaaP and stack element commit #1cbc5e63dc
assert directory_hash and not directory_size
directory_digest = remote_execution_pb2.Digest(hash=directory_hash,
size_bytes=directory_size)
# Ensure the entire Tree stucture has been pulled
assert os.path.exists(cas.objpath(directory_digest))
def _test_push_tree(user_config_file, project_dir, artifact_digest, queue):
# Fake minimal context
context = Context()
context.load(config=user_config_file)
context.set_message_handler(message_handler)
# Load the project manually
project = Project(project_dir, context)
project.ensure_fully_loaded()
# Create a local artifact cache and cas handle
artifactcache = context.artifactcache
cas = context.get_cascache()
# Manually setup the CAS remote
artifactcache.setup_remotes(use_config=True)
if artifactcache.has_push_remotes():
directory = remote_execution_pb2.Directory()
with open(cas.objpath(artifact_digest), 'rb') as f:
directory.ParseFromString(f.read())
# Build the Tree object while we are still cached
tree = remote_execution_pb2.Tree()
tree_maker(cas, tree, directory)
# Push the Tree as a regular message
tree_digest = artifactcache.push_message(project, tree)
queue.put((tree_digest.hash, tree_digest.size_bytes))
else:
queue.put("No remote configured")
def _test_pull_tree(user_config_file, project_dir, artifact_digest, queue):
# Fake minimal context
context = Context()
context.load(config=user_config_file)
context.set_message_handler(message_handler)
# Load the project manually
project = Project(project_dir, context)
project.ensure_fully_loaded()
# Create a local artifact cache handle
artifactcache = context.artifactcache
# Manually setup the CAS remote
artifactcache.setup_remotes(use_config=True)
if artifactcache.has_push_remotes():
# Pull the artifact using the Tree object
directory_digest = artifactcache.pull_tree(project, artifact_digest)
queue.put((directory_digest.hash, directory_digest.size_bytes))
else:
queue.put("No remote configured")
|