1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
|
import os
import shutil
import pytest
from buildstream._exceptions import ErrorDomain
from tests.testutils import cli, create_artifact_share
from tests.testutils.site import IS_LINUX
# Project directory
DATA_DIR = os.path.join(
os.path.dirname(os.path.realpath(__file__)),
"project",
)
# Assert that a given artifact is in the share
#
def assert_shared(cli, share, project, element_name):
# NOTE: 'test' here is the name of the project
# specified in the project.conf we are testing with.
#
cache_key = cli.get_element_key(project, element_name)
if not share.has_artifact('test', element_name, cache_key):
raise AssertionError("Artifact share at {} does not contain the expected element {}"
.format(share.repo, element_name))
# Assert that a given artifact is NOT in the share
#
def assert_not_shared(cli, share, project, element_name):
# NOTE: 'test' here is the name of the project
# specified in the project.conf we are testing with.
#
cache_key = cli.get_element_key(project, element_name)
if share.has_artifact('test', element_name, cache_key):
raise AssertionError("Artifact share at {} unexpectedly contains the element {}"
.format(share.repo, element_name))
@pytest.mark.datafiles(DATA_DIR)
def test_push(cli, tmpdir, datafiles):
project = str(datafiles)
# First build the project without the artifact cache configured
result = cli.run(project=project, args=['build', 'target.bst'])
result.assert_success()
# Assert that we are now cached locally
assert cli.get_element_state(project, 'target.bst') == 'cached'
# Set up two artifact shares.
share1 = create_artifact_share(os.path.join(str(tmpdir), 'artifactshare1'))
share2 = create_artifact_share(os.path.join(str(tmpdir), 'artifactshare2'))
# Try pushing with no remotes configured. This should fail.
result = cli.run(project=project, args=['push', 'target.bst'])
result.assert_main_error(ErrorDomain.PIPELINE, None)
# Configure bst to pull but not push from a cache and run `bst push`.
# This should also fail.
cli.configure({
'artifacts': {'url': share1.repo, 'push': False},
})
result = cli.run(project=project, args=['push', 'target.bst'])
result.assert_main_error(ErrorDomain.PIPELINE, None)
# Configure bst to push to one of the caches and run `bst push`. This works.
cli.configure({
'artifacts': [
{'url': share1.repo, 'push': False},
{'url': share2.repo, 'push': True},
]
})
result = cli.run(project=project, args=['push', 'target.bst'])
assert_not_shared(cli, share1, project, 'target.bst')
assert_shared(cli, share2, project, 'target.bst')
# Now try pushing to both (making sure to empty the cache we just pushed
# to).
shutil.rmtree(share2.directory)
share2 = create_artifact_share(os.path.join(str(tmpdir), 'artifactshare2'))
cli.configure({
'artifacts': [
{'url': share1.repo, 'push': True},
{'url': share2.repo, 'push': True},
]
})
result = cli.run(project=project, args=['push', 'target.bst'])
assert_shared(cli, share1, project, 'target.bst')
assert_shared(cli, share2, project, 'target.bst')
@pytest.mark.skipif(not IS_LINUX, reason='Only available on linux')
@pytest.mark.datafiles(DATA_DIR)
def test_push_all(cli, tmpdir, datafiles):
project = os.path.join(datafiles.dirname, datafiles.basename)
share = create_artifact_share(os.path.join(str(tmpdir), 'artifactshare'))
# First build it without the artifact cache configured
result = cli.run(project=project, args=['build', 'target.bst'])
result.assert_success()
# Assert that we are now cached locally
assert cli.get_element_state(project, 'target.bst') == 'cached'
# Configure artifact share
cli.configure({
#
# FIXME: This test hangs "sometimes" if we allow
# concurrent push.
#
# It's not too bad to ignore since we're
# using the local artifact cache functionality
# only, but it should probably be fixed.
#
'scheduler': {
'pushers': 1
},
'artifacts': {
'url': share.repo,
'push': True,
}
})
# Now try bst push all the deps
result = cli.run(project=project, args=[
'push', 'target.bst',
'--deps', 'all'
])
result.assert_success()
# And finally assert that all the artifacts are in the share
assert_shared(cli, share, project, 'target.bst')
assert_shared(cli, share, project, 'import-bin.bst')
assert_shared(cli, share, project, 'import-dev.bst')
assert_shared(cli, share, project, 'compose-all.bst')
|