diff options
Diffstat (limited to 'spec/requests/api/runner_spec.rb')
-rw-r--r-- | spec/requests/api/runner_spec.rb | 123 |
1 files changed, 98 insertions, 25 deletions
diff --git a/spec/requests/api/runner_spec.rb b/spec/requests/api/runner_spec.rb index 5084b36c761..28d51ac86c6 100644 --- a/spec/requests/api/runner_spec.rb +++ b/spec/requests/api/runner_spec.rb @@ -950,12 +950,53 @@ describe API::Runner do describe 'POST /api/v4/jobs/:id/artifacts/authorize' do context 'when using token as parameter' do - it 'authorizes posting artifacts to running job' do - authorize_artifacts_with_token_in_params + context 'posting artifacts to running job' do + subject do + authorize_artifacts_with_token_in_params + end - expect(response).to have_gitlab_http_status(200) - expect(response.content_type.to_s).to eq(Gitlab::Workhorse::INTERNAL_API_CONTENT_TYPE) - expect(json_response['TempPath']).not_to be_nil + shared_examples 'authorizes local file' do + it 'succeeds' do + subject + + expect(response).to have_gitlab_http_status(200) + expect(response.content_type.to_s).to eq(Gitlab::Workhorse::INTERNAL_API_CONTENT_TYPE) + expect(json_response['TempPath']).to eq(JobArtifactUploader.workhorse_local_upload_path) + expect(json_response['RemoteObject']).to be_nil + end + end + + context 'when using local storage' do + it_behaves_like 'authorizes local file' + end + + context 'when using remote storage' do + context 'when direct upload is enabled' do + before do + stub_artifacts_object_storage(enabled: true, direct_upload: true) + end + + it 'succeeds' do + subject + + expect(response).to have_gitlab_http_status(200) + expect(response.content_type.to_s).to eq(Gitlab::Workhorse::INTERNAL_API_CONTENT_TYPE) + expect(json_response['TempPath']).to eq(JobArtifactUploader.workhorse_local_upload_path) + expect(json_response['RemoteObject']).to have_key('ID') + expect(json_response['RemoteObject']).to have_key('GetURL') + expect(json_response['RemoteObject']).to have_key('StoreURL') + expect(json_response['RemoteObject']).to have_key('DeleteURL') + end + end + + context 'when direct upload is disabled' do + before do + stub_artifacts_object_storage(enabled: true, direct_upload: false) + end + + it_behaves_like 'authorizes local file' + end + end end it 'fails to post too large artifact' do @@ -1051,20 +1092,45 @@ describe API::Runner do end end - context 'when uses regular file post' do - before do - upload_artifacts(file_upload, headers_with_token, false) + context 'when uses accelerated file post' do + context 'for file stored locally' do + before do + upload_artifacts(file_upload, headers_with_token) + end + + it_behaves_like 'successful artifacts upload' end - it_behaves_like 'successful artifacts upload' - end + context 'for file stored remotelly' do + let!(:fog_connection) do + stub_artifacts_object_storage(direct_upload: true) + end - context 'when uses accelerated file post' do - before do - upload_artifacts(file_upload, headers_with_token, true) - end + before do + fog_connection.directories.get('artifacts').files.create( + key: 'tmp/upload/12312300', + body: 'content' + ) - it_behaves_like 'successful artifacts upload' + upload_artifacts(file_upload, headers_with_token, + { 'file.remote_id' => remote_id }) + end + + context 'when valid remote_id is used' do + let(:remote_id) { '12312300' } + + it_behaves_like 'successful artifacts upload' + end + + context 'when invalid remote_id is used' do + let(:remote_id) { 'invalid id' } + + it 'responds with bad request' do + expect(response).to have_gitlab_http_status(500) + expect(json_response['message']).to eq("Missing file") + end + end + end end context 'when using runners token' do @@ -1159,11 +1225,13 @@ describe API::Runner do let!(:artifacts) { file_upload } let!(:artifacts_sha256) { Digest::SHA256.file(artifacts.path).hexdigest } let!(:metadata) { file_upload2 } + let!(:metadata_sha256) { Digest::SHA256.file(metadata.path).hexdigest } let(:stored_artifacts_file) { job.reload.artifacts_file.file } let(:stored_metadata_file) { job.reload.artifacts_metadata.file } let(:stored_artifacts_size) { job.reload.artifacts_size } let(:stored_artifacts_sha256) { job.reload.job_artifacts_archive.file_sha256 } + let(:stored_metadata_sha256) { job.reload.job_artifacts_metadata.file_sha256 } before do post(api("/jobs/#{job.id}/artifacts"), post_data, headers_with_token) @@ -1175,7 +1243,8 @@ describe API::Runner do 'file.name' => artifacts.original_filename, 'file.sha256' => artifacts_sha256, 'metadata.path' => metadata.path, - 'metadata.name' => metadata.original_filename } + 'metadata.name' => metadata.original_filename, + 'metadata.sha256' => metadata_sha256 } end it 'stores artifacts and artifacts metadata' do @@ -1184,6 +1253,7 @@ describe API::Runner do expect(stored_metadata_file.original_filename).to eq(metadata.original_filename) expect(stored_artifacts_size).to eq(72821) expect(stored_artifacts_sha256).to eq(artifacts_sha256) + expect(stored_metadata_sha256).to eq(metadata_sha256) end end @@ -1204,15 +1274,19 @@ describe API::Runner do end context 'when artifacts are being stored outside of tmp path' do + let(:new_tmpdir) { Dir.mktmpdir } + before do + # init before overwriting tmp dir + file_upload + # by configuring this path we allow to pass file from @tmpdir only # but all temporary files are stored in system tmp directory - @tmpdir = Dir.mktmpdir - allow(JobArtifactUploader).to receive(:workhorse_upload_path).and_return(@tmpdir) + allow(Dir).to receive(:tmpdir).and_return(new_tmpdir) end after do - FileUtils.remove_entry @tmpdir + FileUtils.remove_entry(new_tmpdir) end it' "fails to post artifacts for outside of tmp path"' do @@ -1222,12 +1296,11 @@ describe API::Runner do end end - def upload_artifacts(file, headers = {}, accelerated = true) - params = if accelerated - { 'file.path' => file.path, 'file.name' => file.original_filename } - else - { 'file' => file } - end + def upload_artifacts(file, headers = {}, params = {}) + params = params.merge({ + 'file.path' => file.path, + 'file.name' => file.original_filename + }) post api("/jobs/#{job.id}/artifacts"), params, headers end |