From b823a616c555a7d4c0cb93416d78db6053a06667 Mon Sep 17 00:00:00 2001 From: James Ennis Date: Wed, 30 May 2018 15:56:41 +0100 Subject: pushreceive.py: Ensure huge artifacts are not pushed --- buildstream/_artifactcache/pushreceive.py | 37 ++++++++++++++++++++++++++----- tests/frontend/push.py | 1 - 2 files changed, 32 insertions(+), 6 deletions(-) diff --git a/buildstream/_artifactcache/pushreceive.py b/buildstream/_artifactcache/pushreceive.py index e17e1de9a..ab74e80ea 100644 --- a/buildstream/_artifactcache/pushreceive.py +++ b/buildstream/_artifactcache/pushreceive.py @@ -56,6 +56,11 @@ class PushExistsException(Exception): pass +# Trying to push an artifact that is too large +class ArtifactTooLargeException(Exception): + pass + + class PushCommandType(Enum): info = 0 update = 1 @@ -285,6 +290,7 @@ class PushMessageReader(object): # which mounts the repo stats = os.statvfs(repopath) free_disk_space = stats.f_bfree * stats.f_bsize + total_disk_space = stats.f_blocks * stats.f_bsize # Open a TarFile for reading uncompressed tar from a stream tar = tarfile.TarFile.open(mode='r|', fileobj=self.file) @@ -309,6 +315,11 @@ class PushMessageReader(object): # obtain size of tar object in bytes artifact_size = tar_info.size + if artifact_size > total_disk_space - buffer_: + raise ArtifactTooLargeException("Artifact of size: {} is too large for " + "the filesystem which mounts the remote " + "cache".format(artifact_size)) + if artifact_size > free_disk_space - buffer_: # Clean up the cache with a buffer of 2GB removed_size = clean_up_cache(repo, artifact_size, free_disk_space, buffer_) @@ -579,7 +590,13 @@ class OSTreePusher(object): objects = self.needed_objects(commits) # Send all the objects to receiver, checking status after each - self.writer.send_putobjects(self.repo, objects) + try: + self.writer.send_putobjects(self.repo, objects) + except BrokenPipeError: + # If the remote closes, we receive a BrokenPipeError + # Return 1 to notify the frontend that something went + # wrong on the server. + return 1 # Inform receiver that all objects have been sent self.writer.send_done() @@ -626,8 +643,11 @@ class OSTreeReceiver(object): def run(self): try: exit_code = self.do_run() - self.close() - return exit_code + except ArtifactTooLargeException: + logging.warning("The artifact was too large for the filesystem which mounts " + "the remote cache.") + exit_code = 0 + except: # BLIND EXCEPT - Just abort if we receive any exception, this # can be a broken pipe, a tarfile read error when the remote @@ -635,6 +655,9 @@ class OSTreeReceiver(object): self.close() raise + self.close() + return exit_code + def do_run(self): # Receive remote info args = self.reader.receive_info() @@ -844,8 +867,12 @@ def clean_up_cache(repo, artifact_size, free_disk_space, buffer_): try: to_remove = LRP_artifacts.pop(0) # The first element in the list is the LRP artifact except IndexError: - logging.info("There are no more artifacts left in the cache. Adding artifact...") - break + # This exception is caught if there are no more artifacts in the list + # LRP_artifacts. This means the the artifact is too large for the filesystem + # so we abort the process + raise ArtifactTooLargeException("Artifact of size {} is too large for " + "the filesystem which mounts the remote " + "cache".format(artifact_size)) removed_size += _ostree.remove(repo, to_remove, defer_prune=False) diff --git a/tests/frontend/push.py b/tests/frontend/push.py index 3111a04f3..3d40c7deb 100644 --- a/tests/frontend/push.py +++ b/tests/frontend/push.py @@ -267,7 +267,6 @@ def test_artifact_expires(cli, datafiles, tmpdir): # Test that a large artifact, whose size exceeds the quota, is not pushed # to the remote share -@pytest.mark.xfail @pytest.mark.datafiles(DATA_DIR) def test_artifact_too_large(cli, datafiles, tmpdir): project = os.path.join(datafiles.dirname, datafiles.basename) -- cgit v1.2.1