summaryrefslogtreecommitdiff
path: root/src/buildstream/_elementsourcescache.py
blob: 194f3fd4aff6784bbcb8626d73d6942ba97b80e2 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
#
#  Copyright (C) 2019-2020 Bloomberg Finance LP
#
#  This program is free software; you can redistribute it and/or
#  modify it under the terms of the GNU Lesser General Public
#  License as published by the Free Software Foundation; either
#  version 2 of the License, or (at your option) any later version.
#
#  This library is distributed in the hope that it will be useful,
#  but WITHOUT ANY WARRANTY; without even the implied warranty of
#  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.	 See the GNU
#  Lesser General Public License for more details.
#
#  You should have received a copy of the GNU Lesser General Public
#  License along with this library. If not, see <http://www.gnu.org/licenses/>.
#
import os
import grpc

from ._cas.casremote import BlobNotFound
from ._assetcache import AssetCache
from ._exceptions import AssetCacheError, CASError, CASRemoteError, SourceCacheError
from . import utils
from ._protos.buildstream.v2 import source_pb2

REMOTE_ASSET_SOURCE_URN_TEMPLATE = "urn:fdc:buildstream.build:2020:source:{}"


# Class that keeps config of remotes and deals with caching of sources.
#
# Args:
#    context (Context): The Buildstream context
#
class ElementSourcesCache(AssetCache):

    spec_name = "source_cache_specs"
    config_node_name = "source-caches"

    def __init__(self, context):
        super().__init__(context)

        self._basedir = os.path.join(context.cachedir, "elementsources")
        os.makedirs(self._basedir, exist_ok=True)

    # load_proto():
    #
    # Load source proto from local cache.
    #
    # Args:
    #    sources (ElementSources): The sources whose proto we want to load
    #
    def load_proto(self, sources):
        ref = sources.get_cache_key()
        path = self._source_path(ref)

        if not os.path.exists(path):
            return None

        source_proto = source_pb2.Source()
        with open(path, "r+b") as f:
            source_proto.ParseFromString(f.read())
            return source_proto

    def store_proto(self, sources, proto):
        ref = sources.get_cache_key()
        path = self._source_path(ref)

        with utils.save_file_atomic(path, "w+b") as f:
            f.write(proto.SerializeToString())

    # pull():
    #
    # Attempts to pull sources from configured remote source caches.
    #
    # Args:
    #    sources (ElementSources): The sources we want to fetch
    #
    # Returns:
    #    (bool): True if pull successful, False if not
    #
    def pull(self, sources, plugin):
        project = sources.get_project()

        ref = sources.get_cache_key()
        display_key = sources.get_brief_display_key()

        uri = REMOTE_ASSET_SOURCE_URN_TEMPLATE.format(ref)

        source_digest = None
        errors = []
        # Start by pulling our source proto, so that we know which
        # blobs to pull
        for remote in self._index_remotes[project]:
            remote.init()
            try:
                plugin.status("Pulling source {} <- {}".format(display_key, remote))
                response = remote.fetch_blob([uri])
                if response:
                    source_digest = response.blob_digest
                    break

                plugin.info("Remote ({}) does not have source {} cached".format(remote, display_key))
            except AssetCacheError as e:
                plugin.warn("Could not pull from remote {}: {}".format(remote, e))
                errors.append(e)

        if errors and not source_digest:
            raise SourceCacheError(
                "Failed to pull source {}".format(display_key), detail="\n".join(str(e) for e in errors)
            )

        # If we don't have a source proto, we can't pull source files
        if not source_digest:
            return False

        errors = []
        for remote in self._storage_remotes[project]:
            remote.init()
            try:
                plugin.status("Pulling data for source {} <- {}".format(display_key, remote))

                if self._pull_source_storage(ref, source_digest, remote):
                    plugin.info("Pulled source {} <- {}".format(display_key, remote))
                    return True

                plugin.info("Remote ({}) does not have source {} cached".format(remote, display_key))
            except BlobNotFound as e:
                # Not all blobs are available on this remote
                plugin.info("Remote cas ({}) does not have blob {} cached".format(remote, e.blob))
                continue
            except CASError as e:
                plugin.warn("Could not pull from remote {}: {}".format(remote, e))
                errors.append(e)

        if errors:
            raise SourceCacheError(
                "Failed to pull source {}".format(display_key), detail="\n".join(str(e) for e in errors)
            )

        return False

    # push():
    #
    # Push sources to remote repository.
    #
    # Args:
    #    sources (ElementSources): The sources to be pushed
    #
    # Returns:
    #   (bool): True if any remote was updated, False if no pushes were required
    #
    # Raises:
    #   (SourceCacheError): if there was an error
    #
    def push(self, sources, plugin):
        project = sources.get_project()

        ref = sources.get_cache_key()
        display_key = sources.get_brief_display_key()

        uri = REMOTE_ASSET_SOURCE_URN_TEMPLATE.format(ref)

        index_remotes = [r for r in self._index_remotes[project] if r.push]
        storage_remotes = [r for r in self._storage_remotes[project] if r.push]

        source_proto = self.load_proto(sources)
        source_digest = self.cas.add_object(buffer=source_proto.SerializeToString())

        pushed = False

        # First push our files to all storage remotes, so that they
        # can perform file checks on their end
        for remote in storage_remotes:
            remote.init()
            plugin.status("Pushing data from source {} -> {}".format(display_key, remote))

            if self._push_source_blobs(source_proto, source_digest, remote):
                plugin.info("Pushed data from source {} -> {}".format(display_key, remote))
            else:
                plugin.info("Remote ({}) already has all data of source {} cached".format(remote, display_key()))

        for remote in index_remotes:
            remote.init()
            plugin.status("Pushing source {} -> {}".format(display_key, remote))

            if self._push_source_proto(uri, source_proto, source_digest, remote):
                plugin.info("Pushed source {} -> {}".format(display_key, remote))
                pushed = True
            else:
                plugin.info("Remote ({}) already has source {} cached".format(remote, display_key))

        return pushed

    def _get_source(self, ref):
        path = self._source_path(ref)
        source_proto = source_pb2.Source()
        try:
            with open(path, "r+b") as f:
                source_proto.ParseFromString(f.read())
                return source_proto
        except FileNotFoundError as e:
            raise SourceCacheError("Attempted to access unavailable source: {}".format(e)) from e

    def _source_path(self, ref):
        return os.path.join(self._basedir, ref)

    # _push_source_blobs()
    #
    # Push the blobs that make up an source to the remote server.
    #
    # Args:
    #    source_proto: The source proto whose blobs to push.
    #    source_digest: The digest of the source proto.
    #    remote (CASRemote): The remote to push the blobs to.
    #
    # Returns:
    #    (bool) - True if we uploaded anything, False otherwise.
    #
    # Raises:
    #    SourceCacheError: If we fail to push blobs (*unless* they're
    #    already there or we run out of space on the server).
    #
    def _push_source_blobs(self, source_proto, source_digest, remote):
        try:
            # Push source files
            self.cas._send_directory(remote, source_proto.files)
            # Push source proto
            self.cas.send_blobs(remote, [source_digest])

        except CASRemoteError as cas_error:
            if cas_error.reason != "cache-too-full":
                raise SourceCacheError("Failed to push source blobs: {}".format(cas_error))
            return False
        except grpc.RpcError as e:
            if e.code() != grpc.StatusCode.RESOURCE_EXHAUSTED:
                raise SourceCacheError(
                    "Failed to push source blobs with status {}: {}".format(e.code().name, e.details())
                )
            return False

        return True

    # _push_source_proto()
    #
    # Pushes the source proto to remote.
    #
    # Args:
    #    source_proto: The source proto.
    #    source_digest: The digest of the source proto.
    #    remote (AssetRemote): Remote to push to
    #
    # Returns:
    #    (bool): Whether we pushed the source.
    #
    # Raises:
    #    SourceCacheError: If the push fails for any reason except the
    #    source already existing.
    #
    def _push_source_proto(self, uri, source_proto, source_digest, remote):
        try:
            response = remote.fetch_blob([uri])
            # Skip push if source is already on the server
            if response and response.blob_digest == source_digest:
                return False
        except grpc.RpcError as e:
            if e.code() != grpc.StatusCode.NOT_FOUND:
                raise SourceCacheError(
                    "Error checking source cache with status {}: {}".format(e.code().name, e.details())
                )

        referenced_directories = [source_proto.files]

        try:
            remote.push_blob(
                [uri], source_digest, references_directories=referenced_directories,
            )
        except grpc.RpcError as e:
            raise SourceCacheError("Failed to push source with status {}: {}".format(e.code().name, e.details()))

        return True

    # _pull_source_storage():
    #
    # Pull source blobs from the given remote.
    #
    # Args:
    #    key (str): The specific key for the source to pull
    #    remote (CASRemote): remote to pull from
    #
    # Returns:
    #    (bool): True if we pulled any blobs.
    #
    # Raises:
    #    SourceCacheError: If the pull failed for any reason except the
    #    blobs not existing on the server.
    #
    def _pull_source_storage(self, key, source_digest, remote):
        try:
            # Fetch and parse source proto
            self.cas.fetch_blobs(remote, [source_digest])
            source = source_pb2.Source()
            with self.cas.open(source_digest, "rb") as f:
                source.ParseFromString(f.read())

            # Write the source proto to cache
            source_path = os.path.join(self._basedir, key)
            with utils.save_file_atomic(source_path, mode="wb") as f:
                f.write(source.SerializeToString())

            self.cas._fetch_directory(remote, source.files)
        except grpc.RpcError as e:
            if e.code() != grpc.StatusCode.NOT_FOUND:
                raise SourceCacheError("Failed to pull source with status {}: {}".format(e.code().name, e.details()))
            return False

        return True

    def _push_source(self, source_ref, remote):
        uri = REMOTE_ASSET_SOURCE_URN_TEMPLATE.format(source_ref)

        try:
            remote.init()
            source_proto = self._get_source(source_ref)
            remote.push_directory([uri], source_proto.files)
            return True

        except grpc.RpcError as e:
            if e.code() != grpc.StatusCode.RESOURCE_EXHAUSTED:
                raise SourceCacheError("Failed to push source with status {}: {}".format(e.code().name, e.details()))
            return False