diff options
author | Toon Claes <toon@gitlab.com> | 2019-02-28 19:57:34 +0100 |
---|---|---|
committer | Toon Claes <toon@gitlab.com> | 2019-02-28 19:57:34 +0100 |
commit | 62d7990b9bb30cf33ed87017c5c633d1cccc75c2 (patch) | |
tree | c3e1b69c58a412ba1c6f50a0337a23d9f9d6e1a4 /spec/requests/api/runner_spec.rb | |
parent | f6453eca992a9c142268e78ac782cef98110d183 (diff) | |
download | gitlab-ce-tc-standard-gem.tar.gz |
Ran standardrb --fix on the whole codebasetc-standard-gem
Inspired by https://twitter.com/searls/status/1101137953743613952 I
decided to try https://github.com/testdouble/standard on our codebase.
It's opinionated, but at least it's a _standard_.
Diffstat (limited to 'spec/requests/api/runner_spec.rb')
-rw-r--r-- | spec/requests/api/runner_spec.rb | 1162 |
1 files changed, 583 insertions, 579 deletions
diff --git a/spec/requests/api/runner_spec.rb b/spec/requests/api/runner_spec.rb index 43c06f7c973..7f87a1a4f60 100644 --- a/spec/requests/api/runner_spec.rb +++ b/spec/requests/api/runner_spec.rb @@ -1,10 +1,10 @@ -require 'spec_helper' +require "spec_helper" describe API::Runner, :clean_gitlab_redis_shared_state do include StubGitlabCalls include RedisHelpers - let(:registration_token) { 'abcdefg123456' } + let(:registration_token) { "abcdefg123456" } before do stub_feature_flags(ci_enable_live_trace: true) @@ -13,44 +13,44 @@ describe API::Runner, :clean_gitlab_redis_shared_state do allow_any_instance_of(Ci::Runner).to receive(:cache_attributes) end - describe '/api/v4/runners' do - describe 'POST /api/v4/runners' do - context 'when no token is provided' do - it 'returns 400 error' do - post api('/runners') + describe "/api/v4/runners" do + describe "POST /api/v4/runners" do + context "when no token is provided" do + it "returns 400 error" do + post api("/runners") expect(response).to have_gitlab_http_status 400 end end - context 'when invalid token is provided' do - it 'returns 403 error' do - post api('/runners'), params: { token: 'invalid' } + context "when invalid token is provided" do + it "returns 403 error" do + post api("/runners"), params: {token: "invalid"} expect(response).to have_gitlab_http_status 403 end end - context 'when valid token is provided' do - it 'creates runner with default values' do - post api('/runners'), params: { token: registration_token } + context "when valid token is provided" do + it "creates runner with default values" do + post api("/runners"), params: {token: registration_token} runner = Ci::Runner.first expect(response).to have_gitlab_http_status 201 - expect(json_response['id']).to eq(runner.id) - expect(json_response['token']).to eq(runner.token) + expect(json_response["id"]).to eq(runner.id) + expect(json_response["token"]).to eq(runner.token) expect(runner.run_untagged).to be true expect(runner.active).to be true expect(runner.token).not_to eq(registration_token) expect(runner).to be_instance_type end - context 'when project token is used' do + context "when project token is used" do let(:project) { create(:project) } - it 'creates project runner' do - post api('/runners'), params: { token: project.runners_token } + it "creates project runner" do + post api("/runners"), params: {token: project.runners_token} expect(response).to have_gitlab_http_status 201 expect(project.runners.size).to eq(1) @@ -61,11 +61,11 @@ describe API::Runner, :clean_gitlab_redis_shared_state do end end - context 'when group token is used' do + context "when group token is used" do let(:group) { create(:group) } - it 'creates a group runner' do - post api('/runners'), params: { token: group.runners_token } + it "creates a group runner" do + post api("/runners"), params: {token: group.runners_token} expect(response).to have_http_status 201 expect(group.runners.size).to eq(1) @@ -77,90 +77,91 @@ describe API::Runner, :clean_gitlab_redis_shared_state do end end - context 'when runner description is provided' do - it 'creates runner' do - post api('/runners'), params: { - token: registration_token, - description: 'server.hostname' - } + context "when runner description is provided" do + it "creates runner" do + post api("/runners"), params: { + token: registration_token, + description: "server.hostname", + } expect(response).to have_gitlab_http_status 201 - expect(Ci::Runner.first.description).to eq('server.hostname') + expect(Ci::Runner.first.description).to eq("server.hostname") end end - context 'when runner tags are provided' do - it 'creates runner' do - post api('/runners'), params: { - token: registration_token, - tag_list: 'tag1, tag2' - } + context "when runner tags are provided" do + it "creates runner" do + post api("/runners"), params: { + token: registration_token, + tag_list: "tag1, tag2", + } expect(response).to have_gitlab_http_status 201 - expect(Ci::Runner.first.tag_list.sort).to eq(%w(tag1 tag2)) + expect(Ci::Runner.first.tag_list.sort).to eq(%w[tag1 tag2]) end end - context 'when option for running untagged jobs is provided' do - context 'when tags are provided' do - it 'creates runner' do - post api('/runners'), params: { - token: registration_token, - run_untagged: false, - tag_list: ['tag'] - } + context "when option for running untagged jobs is provided" do + context "when tags are provided" do + it "creates runner" do + post api("/runners"), params: { + token: registration_token, + run_untagged: false, + tag_list: ["tag"], + } expect(response).to have_gitlab_http_status 201 expect(Ci::Runner.first.run_untagged).to be false - expect(Ci::Runner.first.tag_list.sort).to eq(['tag']) + expect(Ci::Runner.first.tag_list.sort).to eq(["tag"]) end end - context 'when tags are not provided' do - it 'returns 400 error' do - post api('/runners'), params: { - token: registration_token, - run_untagged: false - } + context "when tags are not provided" do + it "returns 400 error" do + post api("/runners"), params: { + token: registration_token, + run_untagged: false, + } expect(response).to have_gitlab_http_status 400 - expect(json_response['message']).to include( - 'tags_list' => ['can not be empty when runner is not allowed to pick untagged jobs']) + expect(json_response["message"]).to include( + "tags_list" => ["can not be empty when runner is not allowed to pick untagged jobs"] + ) end end end - context 'when option for locking Runner is provided' do - it 'creates runner' do - post api('/runners'), params: { - token: registration_token, - locked: true - } + context "when option for locking Runner is provided" do + it "creates runner" do + post api("/runners"), params: { + token: registration_token, + locked: true, + } expect(response).to have_gitlab_http_status 201 expect(Ci::Runner.first.locked).to be true end end - context 'when option for activating a Runner is provided' do - context 'when active is set to true' do - it 'creates runner' do - post api('/runners'), params: { - token: registration_token, - active: true - } + context "when option for activating a Runner is provided" do + context "when active is set to true" do + it "creates runner" do + post api("/runners"), params: { + token: registration_token, + active: true, + } expect(response).to have_gitlab_http_status 201 expect(Ci::Runner.first.active).to be true end end - context 'when active is set to false' do - it 'creates runner' do - post api('/runners'), params: { - token: registration_token, - active: false - } + context "when active is set to false" do + it "creates runner" do + post api("/runners"), params: { + token: registration_token, + active: false, + } expect(response).to have_gitlab_http_status 201 expect(Ci::Runner.first.active).to be false @@ -168,23 +169,23 @@ describe API::Runner, :clean_gitlab_redis_shared_state do end end - context 'when maximum job timeout is specified' do - it 'creates runner' do - post api('/runners'), params: { - token: registration_token, - maximum_timeout: 9000 - } + context "when maximum job timeout is specified" do + it "creates runner" do + post api("/runners"), params: { + token: registration_token, + maximum_timeout: 9000, + } expect(response).to have_gitlab_http_status 201 expect(Ci::Runner.first.maximum_timeout).to eq(9000) end - context 'when maximum job timeout is empty' do - it 'creates runner' do - post api('/runners'), params: { - token: registration_token, - maximum_timeout: '' - } + context "when maximum job timeout is empty" do + it "creates runner" do + post api("/runners"), params: { + token: registration_token, + maximum_timeout: "", + } expect(response).to have_gitlab_http_status 201 expect(Ci::Runner.first.maximum_timeout).to be_nil @@ -192,15 +193,15 @@ describe API::Runner, :clean_gitlab_redis_shared_state do end end - %w(name version revision platform architecture).each do |param| + %w[name version revision platform architecture].each do |param| context "when info parameter '#{param}' info is present" do let(:value) { "#{param}_value" } it "updates provided Runner's parameter" do - post api('/runners'), params: { - token: registration_token, - info: { param => value } - } + post api("/runners"), params: { + token: registration_token, + info: {param => value}, + } expect(response).to have_gitlab_http_status 201 expect(Ci::Runner.first.read_attribute(param.to_sym)).to eq(value) @@ -209,71 +210,71 @@ describe API::Runner, :clean_gitlab_redis_shared_state do end it "sets the runner's ip_address" do - post api('/runners'), - params: { token: registration_token }, - headers: { 'X-Forwarded-For' => '123.111.123.111' } + post api("/runners"), + params: {token: registration_token}, + headers: {"X-Forwarded-For" => "123.111.123.111"} expect(response).to have_gitlab_http_status 201 - expect(Ci::Runner.first.ip_address).to eq('123.111.123.111') + expect(Ci::Runner.first.ip_address).to eq("123.111.123.111") end end - describe 'DELETE /api/v4/runners' do - context 'when no token is provided' do - it 'returns 400 error' do - delete api('/runners') + describe "DELETE /api/v4/runners" do + context "when no token is provided" do + it "returns 400 error" do + delete api("/runners") expect(response).to have_gitlab_http_status 400 end end - context 'when invalid token is provided' do - it 'returns 403 error' do - delete api('/runners'), params: { token: 'invalid' } + context "when invalid token is provided" do + it "returns 403 error" do + delete api("/runners"), params: {token: "invalid"} expect(response).to have_gitlab_http_status 403 end end - context 'when valid token is provided' do + context "when valid token is provided" do let(:runner) { create(:ci_runner) } - it 'deletes Runner' do - delete api('/runners'), params: { token: runner.token } + it "deletes Runner" do + delete api("/runners"), params: {token: runner.token} expect(response).to have_gitlab_http_status 204 expect(Ci::Runner.count).to eq(0) end - it_behaves_like '412 response' do - let(:request) { api('/runners') } - let(:params) { { token: runner.token } } + it_behaves_like "412 response" do + let(:request) { api("/runners") } + let(:params) { {token: runner.token} } end end end - describe 'POST /api/v4/runners/verify' do + describe "POST /api/v4/runners/verify" do let(:runner) { create(:ci_runner) } - context 'when no token is provided' do - it 'returns 400 error' do - post api('/runners/verify') + context "when no token is provided" do + it "returns 400 error" do + post api("/runners/verify") expect(response).to have_gitlab_http_status :bad_request end end - context 'when invalid token is provided' do - it 'returns 403 error' do - post api('/runners/verify'), params: { token: 'invalid-token' } + context "when invalid token is provided" do + it "returns 403 error" do + post api("/runners/verify"), params: {token: "invalid-token"} expect(response).to have_gitlab_http_status 403 end end - context 'when valid token is provided' do - it 'verifies Runner credentials' do - post api('/runners/verify'), params: { token: runner.token } + context "when valid token is provided" do + it "verifies Runner credentials" do + post api("/runners/verify"), params: {token: runner.token} expect(response).to have_gitlab_http_status 200 end @@ -281,309 +282,309 @@ describe API::Runner, :clean_gitlab_redis_shared_state do end end - describe '/api/v4/jobs' do + describe "/api/v4/jobs" do let(:project) { create(:project, shared_runners_enabled: false) } - let(:pipeline) { create(:ci_pipeline_without_jobs, project: project, ref: 'master') } + let(:pipeline) { create(:ci_pipeline_without_jobs, project: project, ref: "master") } let(:runner) { create(:ci_runner, :project, projects: [project]) } let(:job) do create(:ci_build, :artifacts, :extended_options, - pipeline: pipeline, name: 'spinach', stage: 'test', stage_idx: 0) + pipeline: pipeline, name: "spinach", stage: "test", stage_idx: 0) end - describe 'POST /api/v4/jobs/request' do + describe "POST /api/v4/jobs/request" do let!(:last_update) {} let!(:new_update) { } - let(:user_agent) { 'gitlab-runner 9.0.0 (9-0-stable; go1.7.4; linux/amd64)' } + let(:user_agent) { "gitlab-runner 9.0.0 (9-0-stable; go1.7.4; linux/amd64)" } before do job stub_container_registry_config(enabled: false) end - shared_examples 'no jobs available' do + shared_examples "no jobs available" do before do request_job end - context 'when runner sends version in User-Agent' do - context 'for stable version' do - it 'gives 204 and set X-GitLab-Last-Update' do + context "when runner sends version in User-Agent" do + context "for stable version" do + it "gives 204 and set X-GitLab-Last-Update" do expect(response).to have_gitlab_http_status(204) - expect(response.header).to have_key('X-GitLab-Last-Update') + expect(response.header).to have_key("X-GitLab-Last-Update") end end - context 'when last_update is up-to-date' do + context "when last_update is up-to-date" do let(:last_update) { runner.ensure_runner_queue_value } - it 'gives 204 and set the same X-GitLab-Last-Update' do + it "gives 204 and set the same X-GitLab-Last-Update" do expect(response).to have_gitlab_http_status(204) - expect(response.header['X-GitLab-Last-Update']).to eq(last_update) + expect(response.header["X-GitLab-Last-Update"]).to eq(last_update) end end - context 'when last_update is outdated' do + context "when last_update is outdated" do let(:last_update) { runner.ensure_runner_queue_value } let(:new_update) { runner.tick_runner_queue } - it 'gives 204 and set a new X-GitLab-Last-Update' do + it "gives 204 and set a new X-GitLab-Last-Update" do expect(response).to have_gitlab_http_status(204) - expect(response.header['X-GitLab-Last-Update']).to eq(new_update) + expect(response.header["X-GitLab-Last-Update"]).to eq(new_update) end end - context 'when beta version is sent' do - let(:user_agent) { 'gitlab-runner 9.0.0~beta.167.g2b2bacc (master; go1.7.4; linux/amd64)' } + context "when beta version is sent" do + let(:user_agent) { "gitlab-runner 9.0.0~beta.167.g2b2bacc (master; go1.7.4; linux/amd64)" } it { expect(response).to have_gitlab_http_status(204) } end - context 'when pre-9-0 version is sent' do - let(:user_agent) { 'gitlab-ci-multi-runner 1.6.0 (1-6-stable; go1.6.3; linux/amd64)' } + context "when pre-9-0 version is sent" do + let(:user_agent) { "gitlab-ci-multi-runner 1.6.0 (1-6-stable; go1.6.3; linux/amd64)" } it { expect(response).to have_gitlab_http_status(204) } end - context 'when pre-9-0 beta version is sent' do - let(:user_agent) { 'gitlab-ci-multi-runner 1.6.0~beta.167.g2b2bacc (master; go1.6.3; linux/amd64)' } + context "when pre-9-0 beta version is sent" do + let(:user_agent) { "gitlab-ci-multi-runner 1.6.0~beta.167.g2b2bacc (master; go1.6.3; linux/amd64)" } it { expect(response).to have_gitlab_http_status(204) } end end end - context 'when no token is provided' do - it 'returns 400 error' do - post api('/jobs/request') + context "when no token is provided" do + it "returns 400 error" do + post api("/jobs/request") expect(response).to have_gitlab_http_status 400 end end - context 'when invalid token is provided' do - it 'returns 403 error' do - post api('/jobs/request'), params: { token: 'invalid' } + context "when invalid token is provided" do + it "returns 403 error" do + post api("/jobs/request"), params: {token: "invalid"} expect(response).to have_gitlab_http_status 403 end end - context 'when valid token is provided' do - context 'when Runner is not active' do + context "when valid token is provided" do + context "when Runner is not active" do let(:runner) { create(:ci_runner, :inactive) } let(:update_value) { runner.ensure_runner_queue_value } - it 'returns 204 error' do + it "returns 204 error" do request_job expect(response).to have_gitlab_http_status(204) - expect(response.header['X-GitLab-Last-Update']).to eq(update_value) + expect(response.header["X-GitLab-Last-Update"]).to eq(update_value) end end - context 'when jobs are finished' do + context "when jobs are finished" do before do job.success end - it_behaves_like 'no jobs available' + it_behaves_like "no jobs available" end - context 'when other projects have pending jobs' do + context "when other projects have pending jobs" do before do job.success create(:ci_build, :pending) end - it_behaves_like 'no jobs available' + it_behaves_like "no jobs available" end - context 'when shared runner requests job for project without shared_runners_enabled' do + context "when shared runner requests job for project without shared_runners_enabled" do let(:runner) { create(:ci_runner, :instance) } - it_behaves_like 'no jobs available' + it_behaves_like "no jobs available" end - context 'when there is a pending job' do + context "when there is a pending job" do let(:expected_job_info) do - { 'name' => job.name, - 'stage' => job.stage, - 'project_id' => job.project.id, - 'project_name' => job.project.name } + {"name" => job.name, + "stage" => job.stage, + "project_id" => job.project.id, + "project_name" => job.project.name,} end let(:expected_git_info) do - { 'repo_url' => job.repo_url, - 'ref' => job.ref, - 'sha' => job.sha, - 'before_sha' => job.before_sha, - 'ref_type' => 'branch', - 'refspecs' => %w[+refs/heads/*:refs/remotes/origin/* +refs/tags/*:refs/tags/*], - 'depth' => 0 } + {"repo_url" => job.repo_url, + "ref" => job.ref, + "sha" => job.sha, + "before_sha" => job.before_sha, + "ref_type" => "branch", + "refspecs" => %w[+refs/heads/*:refs/remotes/origin/* +refs/tags/*:refs/tags/*], + "depth" => 0,} end let(:expected_steps) do - [{ 'name' => 'script', - 'script' => %w(echo), - 'timeout' => job.metadata_timeout, - 'when' => 'on_success', - 'allow_failure' => false }, - { 'name' => 'after_script', - 'script' => %w(ls date), - 'timeout' => job.metadata_timeout, - 'when' => 'always', - 'allow_failure' => true }] + [{"name" => "script", + "script" => %w[echo], + "timeout" => job.metadata_timeout, + "when" => "on_success", + "allow_failure" => false,}, + {"name" => "after_script", + "script" => %w[ls date], + "timeout" => job.metadata_timeout, + "when" => "always", + "allow_failure" => true,},] end let(:expected_variables) do - [{ 'key' => 'CI_JOB_NAME', 'value' => 'spinach', 'public' => true, 'masked' => false }, - { 'key' => 'CI_JOB_STAGE', 'value' => 'test', 'public' => true, 'masked' => false }, - { 'key' => 'DB_NAME', 'value' => 'postgres', 'public' => true, 'masked' => false }] + [{"key" => "CI_JOB_NAME", "value" => "spinach", "public" => true, "masked" => false}, + {"key" => "CI_JOB_STAGE", "value" => "test", "public" => true, "masked" => false}, + {"key" => "DB_NAME", "value" => "postgres", "public" => true, "masked" => false},] end let(:expected_artifacts) do - [{ 'name' => 'artifacts_file', - 'untracked' => false, - 'paths' => %w(out/), - 'when' => 'always', - 'expire_in' => '7d', - "artifact_type" => "archive", - "artifact_format" => "zip" }] + [{"name" => "artifacts_file", + "untracked" => false, + "paths" => %w[out/], + "when" => "always", + "expire_in" => "7d", + "artifact_type" => "archive", + "artifact_format" => "zip",}] end let(:expected_cache) do - [{ 'key' => 'cache_key', - 'untracked' => false, - 'paths' => ['vendor/*'], - 'policy' => 'pull-push' }] + [{"key" => "cache_key", + "untracked" => false, + "paths" => ["vendor/*"], + "policy" => "pull-push",}] end - let(:expected_features) { { 'trace_sections' => true } } + let(:expected_features) { {"trace_sections" => true} } - it 'picks a job' do - request_job info: { platform: :darwin } + it "picks a job" do + request_job info: {platform: :darwin} expect(response).to have_gitlab_http_status(201) - expect(response.headers).not_to have_key('X-GitLab-Last-Update') - expect(runner.reload.platform).to eq('darwin') - expect(json_response['id']).to eq(job.id) - expect(json_response['token']).to eq(job.token) - expect(json_response['job_info']).to eq(expected_job_info) - expect(json_response['git_info']).to eq(expected_git_info) - expect(json_response['image']).to eq({ 'name' => 'ruby:2.1', 'entrypoint' => '/bin/sh' }) - expect(json_response['services']).to eq([{ 'name' => 'postgres', 'entrypoint' => nil, - 'alias' => nil, 'command' => nil }, - { 'name' => 'docker:stable-dind', 'entrypoint' => '/bin/sh', - 'alias' => 'docker', 'command' => 'sleep 30' }]) - expect(json_response['steps']).to eq(expected_steps) - expect(json_response['artifacts']).to eq(expected_artifacts) - expect(json_response['cache']).to eq(expected_cache) - expect(json_response['variables']).to include(*expected_variables) - expect(json_response['features']).to eq(expected_features) - end - - context 'when job is made for tag' do - let!(:job) { create(:ci_build, :tag, pipeline: pipeline, name: 'spinach', stage: 'test', stage_idx: 0) } - - it 'sets branch as ref_type' do + expect(response.headers).not_to have_key("X-GitLab-Last-Update") + expect(runner.reload.platform).to eq("darwin") + expect(json_response["id"]).to eq(job.id) + expect(json_response["token"]).to eq(job.token) + expect(json_response["job_info"]).to eq(expected_job_info) + expect(json_response["git_info"]).to eq(expected_git_info) + expect(json_response["image"]).to eq({"name" => "ruby:2.1", "entrypoint" => "/bin/sh"}) + expect(json_response["services"]).to eq([{"name" => "postgres", "entrypoint" => nil, + "alias" => nil, "command" => nil,}, + {"name" => "docker:stable-dind", "entrypoint" => "/bin/sh", + "alias" => "docker", "command" => "sleep 30",},]) + expect(json_response["steps"]).to eq(expected_steps) + expect(json_response["artifacts"]).to eq(expected_artifacts) + expect(json_response["cache"]).to eq(expected_cache) + expect(json_response["variables"]).to include(*expected_variables) + expect(json_response["features"]).to eq(expected_features) + end + + context "when job is made for tag" do + let!(:job) { create(:ci_build, :tag, pipeline: pipeline, name: "spinach", stage: "test", stage_idx: 0) } + + it "sets branch as ref_type" do request_job expect(response).to have_gitlab_http_status(201) - expect(json_response['git_info']['ref_type']).to eq('tag') + expect(json_response["git_info"]["ref_type"]).to eq("tag") end - context 'when GIT_DEPTH is specified' do + context "when GIT_DEPTH is specified" do before do - create(:ci_pipeline_variable, key: 'GIT_DEPTH', value: 1, pipeline: pipeline) + create(:ci_pipeline_variable, key: "GIT_DEPTH", value: 1, pipeline: pipeline) end - it 'specifies refspecs' do + it "specifies refspecs" do request_job expect(response).to have_gitlab_http_status(201) - expect(json_response['git_info']['refspecs']).to include("+refs/tags/#{job.ref}:refs/tags/#{job.ref}") + expect(json_response["git_info"]["refspecs"]).to include("+refs/tags/#{job.ref}:refs/tags/#{job.ref}") end end - context 'when GIT_DEPTH is not specified' do - it 'specifies refspecs' do + context "when GIT_DEPTH is not specified" do + it "specifies refspecs" do request_job expect(response).to have_gitlab_http_status(201) - expect(json_response['git_info']['refspecs']) - .to contain_exactly('+refs/tags/*:refs/tags/*', '+refs/heads/*:refs/remotes/origin/*') + expect(json_response["git_info"]["refspecs"]) + .to contain_exactly("+refs/tags/*:refs/tags/*", "+refs/heads/*:refs/remotes/origin/*") end end end - context 'when job is made for branch' do - it 'sets tag as ref_type' do + context "when job is made for branch" do + it "sets tag as ref_type" do request_job expect(response).to have_gitlab_http_status(201) - expect(json_response['git_info']['ref_type']).to eq('branch') + expect(json_response["git_info"]["ref_type"]).to eq("branch") end - context 'when GIT_DEPTH is specified' do + context "when GIT_DEPTH is specified" do before do - create(:ci_pipeline_variable, key: 'GIT_DEPTH', value: 1, pipeline: pipeline) + create(:ci_pipeline_variable, key: "GIT_DEPTH", value: 1, pipeline: pipeline) end - it 'specifies refspecs' do + it "specifies refspecs" do request_job expect(response).to have_gitlab_http_status(201) - expect(json_response['git_info']['refspecs']).to include("+refs/heads/#{job.ref}:refs/remotes/origin/#{job.ref}") + expect(json_response["git_info"]["refspecs"]).to include("+refs/heads/#{job.ref}:refs/remotes/origin/#{job.ref}") end end - context 'when GIT_DEPTH is not specified' do - it 'specifies refspecs' do + context "when GIT_DEPTH is not specified" do + it "specifies refspecs" do request_job expect(response).to have_gitlab_http_status(201) - expect(json_response['git_info']['refspecs']) - .to contain_exactly('+refs/tags/*:refs/tags/*', '+refs/heads/*:refs/remotes/origin/*') + expect(json_response["git_info"]["refspecs"]) + .to contain_exactly("+refs/tags/*:refs/tags/*", "+refs/heads/*:refs/remotes/origin/*") end end end - context 'when job is made for merge request' do - let(:pipeline) { create(:ci_pipeline_without_jobs, source: :merge_request, project: project, ref: 'feature', merge_request: merge_request) } - let!(:job) { create(:ci_build, pipeline: pipeline, name: 'spinach', ref: 'feature', stage: 'test', stage_idx: 0) } + context "when job is made for merge request" do + let(:pipeline) { create(:ci_pipeline_without_jobs, source: :merge_request, project: project, ref: "feature", merge_request: merge_request) } + let!(:job) { create(:ci_build, pipeline: pipeline, name: "spinach", ref: "feature", stage: "test", stage_idx: 0) } let(:merge_request) { create(:merge_request) } - it 'sets branch as ref_type' do + it "sets branch as ref_type" do request_job expect(response).to have_gitlab_http_status(201) - expect(json_response['git_info']['ref_type']).to eq('branch') + expect(json_response["git_info"]["ref_type"]).to eq("branch") end - context 'when GIT_DEPTH is specified' do + context "when GIT_DEPTH is specified" do before do - create(:ci_pipeline_variable, key: 'GIT_DEPTH', value: 1, pipeline: pipeline) + create(:ci_pipeline_variable, key: "GIT_DEPTH", value: 1, pipeline: pipeline) end - it 'returns the overwritten git depth for merge request refspecs' do + it "returns the overwritten git depth for merge request refspecs" do request_job expect(response).to have_gitlab_http_status(201) - expect(json_response['git_info']['depth']).to eq(1) + expect(json_response["git_info"]["depth"]).to eq(1) end end end - it 'updates runner info' do + it "updates runner info" do expect { request_job }.to change { runner.reload.contacted_at } end - %w(version revision platform architecture).each do |param| + %w[version revision platform architecture].each do |param| context "when info parameter '#{param}' is present" do let(:value) { "#{param}_value" } it "updates provided Runner's parameter" do - request_job info: { param => value } + request_job info: {param => value} expect(response).to have_gitlab_http_status(201) expect(runner.reload.read_attribute(param.to_sym)).to eq(value) @@ -592,86 +593,88 @@ describe API::Runner, :clean_gitlab_redis_shared_state do end it "sets the runner's ip_address" do - post api('/jobs/request'), - params: { token: runner.token }, - headers: { 'User-Agent' => user_agent, 'X-Forwarded-For' => '123.222.123.222' } + post api("/jobs/request"), + params: {token: runner.token}, + headers: {"User-Agent" => user_agent, "X-Forwarded-For" => "123.222.123.222"} expect(response).to have_gitlab_http_status 201 - expect(runner.reload.ip_address).to eq('123.222.123.222') + expect(runner.reload.ip_address).to eq("123.222.123.222") end it "handles multiple X-Forwarded-For addresses" do - post api('/jobs/request'), - params: { token: runner.token }, - headers: { 'User-Agent' => user_agent, 'X-Forwarded-For' => '123.222.123.222, 127.0.0.1' } + post api("/jobs/request"), + params: {token: runner.token}, + headers: {"User-Agent" => user_agent, "X-Forwarded-For" => "123.222.123.222, 127.0.0.1"} expect(response).to have_gitlab_http_status 201 - expect(runner.reload.ip_address).to eq('123.222.123.222') + expect(runner.reload.ip_address).to eq("123.222.123.222") end - context 'when concurrently updating a job' do + context "when concurrently updating a job" do before do expect_any_instance_of(Ci::Build).to receive(:run!) - .and_raise(ActiveRecord::StaleObjectError.new(nil, nil)) + .and_raise(ActiveRecord::StaleObjectError.new(nil, nil)) end - it 'returns a conflict' do + it "returns a conflict" do request_job expect(response).to have_gitlab_http_status(409) - expect(response.headers).not_to have_key('X-GitLab-Last-Update') + expect(response.headers).not_to have_key("X-GitLab-Last-Update") end end - context 'when project and pipeline have multiple jobs' do - let!(:job) { create(:ci_build, :tag, pipeline: pipeline, name: 'spinach', stage: 'test', stage_idx: 0) } - let!(:job2) { create(:ci_build, :tag, pipeline: pipeline, name: 'rubocop', stage: 'test', stage_idx: 0) } - let!(:test_job) { create(:ci_build, pipeline: pipeline, name: 'deploy', stage: 'deploy', stage_idx: 1) } + context "when project and pipeline have multiple jobs" do + let!(:job) { create(:ci_build, :tag, pipeline: pipeline, name: "spinach", stage: "test", stage_idx: 0) } + let!(:job2) { create(:ci_build, :tag, pipeline: pipeline, name: "rubocop", stage: "test", stage_idx: 0) } + let!(:test_job) { create(:ci_build, pipeline: pipeline, name: "deploy", stage: "deploy", stage_idx: 1) } before do job.success job2.success end - it 'returns dependent jobs' do + it "returns dependent jobs" do request_job expect(response).to have_gitlab_http_status(201) - expect(json_response['id']).to eq(test_job.id) - expect(json_response['dependencies'].count).to eq(2) - expect(json_response['dependencies']).to include( - { 'id' => job.id, 'name' => job.name, 'token' => job.token }, - { 'id' => job2.id, 'name' => job2.name, 'token' => job2.token }) + expect(json_response["id"]).to eq(test_job.id) + expect(json_response["dependencies"].count).to eq(2) + expect(json_response["dependencies"]).to include( + {"id" => job.id, "name" => job.name, "token" => job.token}, + {"id" => job2.id, "name" => job2.name, "token" => job2.token} + ) end end - context 'when pipeline have jobs with artifacts' do - let!(:job) { create(:ci_build, :tag, :artifacts, pipeline: pipeline, name: 'spinach', stage: 'test', stage_idx: 0) } - let!(:test_job) { create(:ci_build, pipeline: pipeline, name: 'deploy', stage: 'deploy', stage_idx: 1) } + context "when pipeline have jobs with artifacts" do + let!(:job) { create(:ci_build, :tag, :artifacts, pipeline: pipeline, name: "spinach", stage: "test", stage_idx: 0) } + let!(:test_job) { create(:ci_build, pipeline: pipeline, name: "deploy", stage: "deploy", stage_idx: 1) } before do job.success end - it 'returns dependent jobs' do + it "returns dependent jobs" do request_job expect(response).to have_gitlab_http_status(201) - expect(json_response['id']).to eq(test_job.id) - expect(json_response['dependencies'].count).to eq(1) - expect(json_response['dependencies']).to include( - { 'id' => job.id, 'name' => job.name, 'token' => job.token, - 'artifacts_file' => { 'filename' => 'ci_build_artifacts.zip', 'size' => 106365 } }) + expect(json_response["id"]).to eq(test_job.id) + expect(json_response["dependencies"].count).to eq(1) + expect(json_response["dependencies"]).to include( + {"id" => job.id, "name" => job.name, "token" => job.token, + "artifacts_file" => {"filename" => "ci_build_artifacts.zip", "size" => 106365},} + ) end end - context 'when explicit dependencies are defined' do - let!(:job) { create(:ci_build, :tag, pipeline: pipeline, name: 'spinach', stage: 'test', stage_idx: 0) } - let!(:job2) { create(:ci_build, :tag, pipeline: pipeline, name: 'rubocop', stage: 'test', stage_idx: 0) } + context "when explicit dependencies are defined" do + let!(:job) { create(:ci_build, :tag, pipeline: pipeline, name: "spinach", stage: "test", stage_idx: 0) } + let!(:job2) { create(:ci_build, :tag, pipeline: pipeline, name: "rubocop", stage: "test", stage_idx: 0) } let!(:test_job) do - create(:ci_build, pipeline: pipeline, token: 'test-job-token', name: 'deploy', - stage: 'deploy', stage_idx: 1, - options: { script: ['bash'], dependencies: [job2.name] }) + create(:ci_build, pipeline: pipeline, token: "test-job-token", name: "deploy", + stage: "deploy", stage_idx: 1, + options: {script: ["bash"], dependencies: [job2.name]}) end before do @@ -679,23 +682,23 @@ describe API::Runner, :clean_gitlab_redis_shared_state do job2.success end - it 'returns dependent jobs' do + it "returns dependent jobs" do request_job expect(response).to have_gitlab_http_status(201) - expect(json_response['id']).to eq(test_job.id) - expect(json_response['dependencies'].count).to eq(1) - expect(json_response['dependencies'][0]).to include('id' => job2.id, 'name' => job2.name, 'token' => job2.token) + expect(json_response["id"]).to eq(test_job.id) + expect(json_response["dependencies"].count).to eq(1) + expect(json_response["dependencies"][0]).to include("id" => job2.id, "name" => job2.name, "token" => job2.token) end end - context 'when dependencies is an empty array' do - let!(:job) { create(:ci_build, :tag, pipeline: pipeline, name: 'spinach', stage: 'test', stage_idx: 0) } - let!(:job2) { create(:ci_build, :tag, pipeline: pipeline, name: 'rubocop', stage: 'test', stage_idx: 0) } + context "when dependencies is an empty array" do + let!(:job) { create(:ci_build, :tag, pipeline: pipeline, name: "spinach", stage: "test", stage_idx: 0) } + let!(:job2) { create(:ci_build, :tag, pipeline: pipeline, name: "rubocop", stage: "test", stage_idx: 0) } let!(:empty_dependencies_job) do - create(:ci_build, pipeline: pipeline, token: 'test-job-token', name: 'empty_dependencies_job', - stage: 'deploy', stage_idx: 1, - options: { script: ['bash'], dependencies: [] }) + create(:ci_build, pipeline: pipeline, token: "test-job-token", name: "empty_dependencies_job", + stage: "deploy", stage_idx: 1, + options: {script: ["bash"], dependencies: []}) end before do @@ -703,150 +706,150 @@ describe API::Runner, :clean_gitlab_redis_shared_state do job2.success end - it 'returns an empty array' do + it "returns an empty array" do request_job expect(response).to have_gitlab_http_status(201) - expect(json_response['id']).to eq(empty_dependencies_job.id) - expect(json_response['dependencies'].count).to eq(0) + expect(json_response["id"]).to eq(empty_dependencies_job.id) + expect(json_response["dependencies"].count).to eq(0) end end - context 'when job has no tags' do + context "when job has no tags" do before do job.update(tags: []) end - context 'when runner is allowed to pick untagged jobs' do + context "when runner is allowed to pick untagged jobs" do before do runner.update_column(:run_untagged, true) end - it 'picks job' do + it "picks job" do request_job expect(response).to have_gitlab_http_status 201 end end - context 'when runner is not allowed to pick untagged jobs' do + context "when runner is not allowed to pick untagged jobs" do before do runner.update_column(:run_untagged, false) end - it_behaves_like 'no jobs available' + it_behaves_like "no jobs available" end end - context 'when triggered job is available' do + context "when triggered job is available" do let(:expected_variables) do - [{ 'key' => 'CI_JOB_NAME', 'value' => 'spinach', 'public' => true, 'masked' => false }, - { 'key' => 'CI_JOB_STAGE', 'value' => 'test', 'public' => true, 'masked' => false }, - { 'key' => 'CI_PIPELINE_TRIGGERED', 'value' => 'true', 'public' => true, 'masked' => false }, - { 'key' => 'DB_NAME', 'value' => 'postgres', 'public' => true, 'masked' => false }, - { 'key' => 'SECRET_KEY', 'value' => 'secret_value', 'public' => false, 'masked' => false }, - { 'key' => 'TRIGGER_KEY_1', 'value' => 'TRIGGER_VALUE_1', 'public' => false, 'masked' => false }] + [{"key" => "CI_JOB_NAME", "value" => "spinach", "public" => true, "masked" => false}, + {"key" => "CI_JOB_STAGE", "value" => "test", "public" => true, "masked" => false}, + {"key" => "CI_PIPELINE_TRIGGERED", "value" => "true", "public" => true, "masked" => false}, + {"key" => "DB_NAME", "value" => "postgres", "public" => true, "masked" => false}, + {"key" => "SECRET_KEY", "value" => "secret_value", "public" => false, "masked" => false}, + {"key" => "TRIGGER_KEY_1", "value" => "TRIGGER_VALUE_1", "public" => false, "masked" => false},] end let(:trigger) { create(:ci_trigger, project: project) } let!(:trigger_request) { create(:ci_trigger_request, pipeline: pipeline, builds: [job], trigger: trigger) } before do - project.variables << Ci::Variable.new(key: 'SECRET_KEY', value: 'secret_value') + project.variables << Ci::Variable.new(key: "SECRET_KEY", value: "secret_value") end - shared_examples 'expected variables behavior' do - it 'returns variables for triggers' do + shared_examples "expected variables behavior" do + it "returns variables for triggers" do request_job expect(response).to have_gitlab_http_status(201) - expect(json_response['variables']).to include(*expected_variables) + expect(json_response["variables"]).to include(*expected_variables) end end - context 'when variables are stored in trigger_request' do + context "when variables are stored in trigger_request" do before do - trigger_request.update_attribute(:variables, { TRIGGER_KEY_1: 'TRIGGER_VALUE_1' } ) + trigger_request.update_attribute(:variables, {TRIGGER_KEY_1: "TRIGGER_VALUE_1"}) end - it_behaves_like 'expected variables behavior' + it_behaves_like "expected variables behavior" end - context 'when variables are stored in pipeline_variables' do + context "when variables are stored in pipeline_variables" do before do - create(:ci_pipeline_variable, pipeline: pipeline, key: :TRIGGER_KEY_1, value: 'TRIGGER_VALUE_1') + create(:ci_pipeline_variable, pipeline: pipeline, key: :TRIGGER_KEY_1, value: "TRIGGER_VALUE_1") end - it_behaves_like 'expected variables behavior' + it_behaves_like "expected variables behavior" end end - describe 'registry credentials support' do - let(:registry_url) { 'registry.example.com:5005' } + describe "registry credentials support" do + let(:registry_url) { "registry.example.com:5005" } let(:registry_credentials) do - { 'type' => 'registry', - 'url' => registry_url, - 'username' => 'gitlab-ci-token', - 'password' => job.token } + {"type" => "registry", + "url" => registry_url, + "username" => "gitlab-ci-token", + "password" => job.token,} end - context 'when registry is enabled' do + context "when registry is enabled" do before do stub_container_registry_config(enabled: true, host_port: registry_url) end - it 'sends registry credentials key' do + it "sends registry credentials key" do request_job - expect(json_response).to have_key('credentials') - expect(json_response['credentials']).to include(registry_credentials) + expect(json_response).to have_key("credentials") + expect(json_response["credentials"]).to include(registry_credentials) end end - context 'when registry is disabled' do + context "when registry is disabled" do before do stub_container_registry_config(enabled: false, host_port: registry_url) end - it 'does not send registry credentials' do + it "does not send registry credentials" do request_job - expect(json_response).to have_key('credentials') - expect(json_response['credentials']).not_to include(registry_credentials) + expect(json_response).to have_key("credentials") + expect(json_response["credentials"]).not_to include(registry_credentials) end end end - describe 'timeout support' do - context 'when project specifies job timeout' do + describe "timeout support" do + context "when project specifies job timeout" do let(:project) { create(:project, shared_runners_enabled: false, build_timeout: 1234) } - it 'contains info about timeout taken from project' do + it "contains info about timeout taken from project" do request_job expect(response).to have_gitlab_http_status(201) - expect(json_response['runner_info']).to include({ 'timeout' => 1234 }) + expect(json_response["runner_info"]).to include({"timeout" => 1234}) end - context 'when runner specifies lower timeout' do + context "when runner specifies lower timeout" do let(:runner) { create(:ci_runner, :project, maximum_timeout: 1000, projects: [project]) } - it 'contains info about timeout overridden by runner' do + it "contains info about timeout overridden by runner" do request_job expect(response).to have_gitlab_http_status(201) - expect(json_response['runner_info']).to include({ 'timeout' => 1000 }) + expect(json_response["runner_info"]).to include({"timeout" => 1000}) end end - context 'when runner specifies bigger timeout' do + context "when runner specifies bigger timeout" do let(:runner) { create(:ci_runner, :project, maximum_timeout: 2000, projects: [project]) } - it 'contains info about timeout not overridden by runner' do + it "contains info about timeout not overridden by runner" do request_job expect(response).to have_gitlab_http_status(201) - expect(json_response['runner_info']).to include({ 'timeout' => 1234 }) + expect(json_response["runner_info"]).to include({"timeout" => 1234}) end end end @@ -855,64 +858,64 @@ describe API::Runner, :clean_gitlab_redis_shared_state do def request_job(token = runner.token, **params) new_params = params.merge(token: token, last_update: last_update) - post api('/jobs/request'), params: new_params, headers: { 'User-Agent' => user_agent } + post api("/jobs/request"), params: new_params, headers: {"User-Agent" => user_agent} end end end - describe 'PUT /api/v4/jobs/:id' do + describe "PUT /api/v4/jobs/:id" do let(:job) { create(:ci_build, :pending, :trace_live, pipeline: pipeline, runner_id: runner.id) } before do job.run! end - context 'when status is given' do - it 'mark job as succeeded' do - update_job(state: 'success') + context "when status is given" do + it "mark job as succeeded" do + update_job(state: "success") job.reload expect(job).to be_success end - it 'mark job as failed' do - update_job(state: 'failed') + it "mark job as failed" do + update_job(state: "failed") job.reload expect(job).to be_failed expect(job).to be_unknown_failure end - context 'when failure_reason is script_failure' do + context "when failure_reason is script_failure" do before do - update_job(state: 'failed', failure_reason: 'script_failure') + update_job(state: "failed", failure_reason: "script_failure") job.reload end it { expect(job).to be_script_failure } end - context 'when failure_reason is runner_system_failure' do + context "when failure_reason is runner_system_failure" do before do - update_job(state: 'failed', failure_reason: 'runner_system_failure') + update_job(state: "failed", failure_reason: "runner_system_failure") job.reload end it { expect(job).to be_runner_system_failure } end - context 'when failure_reason is unrecognized value' do + context "when failure_reason is unrecognized value" do before do - update_job(state: 'failed', failure_reason: 'what_is_this') + update_job(state: "failed", failure_reason: "what_is_this") job.reload end it { expect(job).to be_unknown_failure } end - context 'when failure_reason is job_execution_timeout' do + context "when failure_reason is job_execution_timeout" do before do - update_job(state: 'failed', failure_reason: 'job_execution_timeout') + update_job(state: "failed", failure_reason: "job_execution_timeout") job.reload end @@ -920,76 +923,76 @@ describe API::Runner, :clean_gitlab_redis_shared_state do end end - context 'when trace is given' do - it 'creates a trace artifact' do + context "when trace is given" do + it "creates a trace artifact" do allow(BuildFinishedWorker).to receive(:perform_async).with(job.id) do ArchiveTraceWorker.new.perform(job.id) end - update_job(state: 'success', trace: 'BUILD TRACE UPDATED') + update_job(state: "success", trace: "BUILD TRACE UPDATED") job.reload expect(response).to have_gitlab_http_status(200) - expect(job.trace.raw).to eq 'BUILD TRACE UPDATED' - expect(job.job_artifacts_trace.open.read).to eq 'BUILD TRACE UPDATED' + expect(job.trace.raw).to eq "BUILD TRACE UPDATED" + expect(job.job_artifacts_trace.open.read).to eq "BUILD TRACE UPDATED" end - context 'when concurrent update of trace is happening' do + context "when concurrent update of trace is happening" do before do - job.trace.write('wb') do - update_job(state: 'success', trace: 'BUILD TRACE UPDATED') + job.trace.write("wb") do + update_job(state: "success", trace: "BUILD TRACE UPDATED") end end - it 'returns that operation conflicts' do + it "returns that operation conflicts" do expect(response.status).to eq(409) end end end - context 'when no trace is given' do - it 'does not override trace information' do + context "when no trace is given" do + it "does not override trace information" do update_job - expect(job.reload.trace.raw).to eq 'BUILD TRACE' + expect(job.reload.trace.raw).to eq "BUILD TRACE" end - context 'when running state is sent' do - it 'updates update_at value' do + context "when running state is sent" do + it "updates update_at value" do expect { update_job_after_time }.to change { job.reload.updated_at } end end - context 'when other state is sent' do + context "when other state is sent" do it "doesn't update update_at value" do - expect { update_job_after_time(20.minutes, state: 'success') }.not_to change { job.reload.updated_at } + expect { update_job_after_time(20.minutes, state: "success") }.not_to change { job.reload.updated_at } end end end - context 'when job has been erased' do + context "when job has been erased" do let(:job) { create(:ci_build, runner_id: runner.id, erased_at: Time.now) } - it 'responds with forbidden' do + it "responds with forbidden" do update_job expect(response).to have_gitlab_http_status(403) end end - context 'when job has already been finished' do + context "when job has already been finished" do before do - job.trace.set('Job failed') + job.trace.set("Job failed") job.drop!(:script_failure) end - it 'does not update job status and job trace' do - update_job(state: 'success', trace: 'BUILD TRACE UPDATED') + it "does not update job status and job trace" do + update_job(state: "success", trace: "BUILD TRACE UPDATED") job.reload expect(response).to have_gitlab_http_status(403) - expect(response.header['Job-Status']).to eq 'failed' - expect(job.trace.raw).to eq 'Job failed' + expect(response.header["Job-Status"]).to eq "failed" + expect(job.trace.raw).to eq "Job failed" expect(job).to be_failed end end @@ -999,219 +1002,219 @@ describe API::Runner, :clean_gitlab_redis_shared_state do put api("/jobs/#{job.id}"), params: new_params end - def update_job_after_time(update_interval = 20.minutes, state = 'running') + def update_job_after_time(update_interval = 20.minutes, state = "running") Timecop.travel(job.updated_at + update_interval) do update_job(job.token, state: state) end end end - describe 'PATCH /api/v4/jobs/:id/trace' do + describe "PATCH /api/v4/jobs/:id/trace" do let(:job) { create(:ci_build, :running, :trace_live, runner_id: runner.id, pipeline: pipeline) } - let(:headers) { { API::Helpers::Runner::JOB_TOKEN_HEADER => job.token, 'Content-Type' => 'text/plain' } } - let(:headers_with_range) { headers.merge({ 'Content-Range' => '11-20' }) } + let(:headers) { {API::Helpers::Runner::JOB_TOKEN_HEADER => job.token, "Content-Type" => "text/plain"} } + let(:headers_with_range) { headers.merge({"Content-Range" => "11-20"}) } let(:update_interval) { 10.seconds.to_i } before do initial_patch_the_trace end - context 'when request is valid' do - it 'gets correct response' do + context "when request is valid" do + it "gets correct response" do expect(response.status).to eq 202 - expect(job.reload.trace.raw).to eq 'BUILD TRACE appended' - expect(response.header).to have_key 'Range' - expect(response.header).to have_key 'Job-Status' + expect(job.reload.trace.raw).to eq "BUILD TRACE appended" + expect(response.header).to have_key "Range" + expect(response.header).to have_key "Job-Status" end - context 'when job has been updated recently' do + context "when job has been updated recently" do it { expect { patch_the_trace }.not_to change { job.updated_at }} it "changes the job's trace" do patch_the_trace - expect(job.reload.trace.raw).to eq 'BUILD TRACE appended appended' + expect(job.reload.trace.raw).to eq "BUILD TRACE appended appended" end - context 'when Runner makes a force-patch' do + context "when Runner makes a force-patch" do it { expect { force_patch_the_trace }.not_to change { job.updated_at }} it "doesn't change the build.trace" do force_patch_the_trace - expect(job.reload.trace.raw).to eq 'BUILD TRACE appended' + expect(job.reload.trace.raw).to eq "BUILD TRACE appended" end end end - context 'when job was not updated recently' do + context "when job was not updated recently" do let(:update_interval) { 15.minutes.to_i } it { expect { patch_the_trace }.to change { job.updated_at } } - it 'changes the job.trace' do + it "changes the job.trace" do patch_the_trace - expect(job.reload.trace.raw).to eq 'BUILD TRACE appended appended' + expect(job.reload.trace.raw).to eq "BUILD TRACE appended appended" end - context 'when Runner makes a force-patch' do + context "when Runner makes a force-patch" do it { expect { force_patch_the_trace }.to change { job.updated_at } } it "doesn't change the job.trace" do force_patch_the_trace - expect(job.reload.trace.raw).to eq 'BUILD TRACE appended' + expect(job.reload.trace.raw).to eq "BUILD TRACE appended" end end end - context 'when project for the build has been deleted' do + context "when project for the build has been deleted" do let(:job) do create(:ci_build, :running, :trace_live, runner_id: runner.id, pipeline: pipeline) do |job| job.project.update(pending_delete: true) end end - it 'responds with forbidden' do + it "responds with forbidden" do expect(response.status).to eq(403) end end - context 'when trace is patched' do + context "when trace is patched" do before do patch_the_trace end - it 'has valid trace' do + it "has valid trace" do expect(response.status).to eq(202) - expect(job.reload.trace.raw).to eq 'BUILD TRACE appended appended' + expect(job.reload.trace.raw).to eq "BUILD TRACE appended appended" end - context 'when job is cancelled' do + context "when job is cancelled" do before do job.cancel end - context 'when trace is patched' do + context "when trace is patched" do before do patch_the_trace end - it 'returns Forbidden ' do + it "returns Forbidden " do expect(response.status).to eq(403) end end end - context 'when redis data are flushed' do + context "when redis data are flushed" do before do redis_shared_state_cleanup! end - it 'has empty trace' do - expect(job.reload.trace.raw).to eq '' + it "has empty trace" do + expect(job.reload.trace.raw).to eq "" end - context 'when we perform partial patch' do + context "when we perform partial patch" do before do - patch_the_trace('hello', headers.merge({ 'Content-Range' => "28-32/5" })) + patch_the_trace("hello", headers.merge({"Content-Range" => "28-32/5"})) end - it 'returns an error' do + it "returns an error" do expect(response.status).to eq(416) - expect(response.header['Range']).to eq('0-0') + expect(response.header["Range"]).to eq("0-0") end end - context 'when we resend full trace' do + context "when we resend full trace" do before do - patch_the_trace('BUILD TRACE appended appended hello', headers.merge({ 'Content-Range' => "0-34/35" })) + patch_the_trace("BUILD TRACE appended appended hello", headers.merge({"Content-Range" => "0-34/35"})) end - it 'succeeds with updating trace' do + it "succeeds with updating trace" do expect(response.status).to eq(202) - expect(job.reload.trace.raw).to eq 'BUILD TRACE appended appended hello' + expect(job.reload.trace.raw).to eq "BUILD TRACE appended appended hello" end end end end - context 'when concurrent update of trace is happening' do + context "when concurrent update of trace is happening" do before do - job.trace.write('wb') do + job.trace.write("wb") do patch_the_trace end end - it 'returns that operation conflicts' do + it "returns that operation conflicts" do expect(response.status).to eq(409) end end - context 'when the job is canceled' do + context "when the job is canceled" do before do job.cancel patch_the_trace end - it 'receives status in header' do - expect(response.header['Job-Status']).to eq 'canceled' + it "receives status in header" do + expect(response.header["Job-Status"]).to eq "canceled" end end end - context 'when Runner makes a force-patch' do + context "when Runner makes a force-patch" do before do force_patch_the_trace end - it 'gets correct response' do + it "gets correct response" do expect(response.status).to eq 202 - expect(job.reload.trace.raw).to eq 'BUILD TRACE appended' - expect(response.header).to have_key 'Range' - expect(response.header).to have_key 'Job-Status' + expect(job.reload.trace.raw).to eq "BUILD TRACE appended" + expect(response.header).to have_key "Range" + expect(response.header).to have_key "Job-Status" end end - context 'when content-range start is too big' do - let(:headers_with_range) { headers.merge({ 'Content-Range' => '15-20/6' }) } + context "when content-range start is too big" do + let(:headers_with_range) { headers.merge({"Content-Range" => "15-20/6"}) } - it 'gets 416 error response with range headers' do + it "gets 416 error response with range headers" do expect(response.status).to eq 416 - expect(response.header).to have_key 'Range' - expect(response.header['Range']).to eq '0-11' + expect(response.header).to have_key "Range" + expect(response.header["Range"]).to eq "0-11" end end - context 'when content-range start is too small' do - let(:headers_with_range) { headers.merge({ 'Content-Range' => '8-20/13' }) } + context "when content-range start is too small" do + let(:headers_with_range) { headers.merge({"Content-Range" => "8-20/13"}) } - it 'gets 416 error response with range headers' do + it "gets 416 error response with range headers" do expect(response.status).to eq 416 - expect(response.header).to have_key 'Range' - expect(response.header['Range']).to eq '0-11' + expect(response.header).to have_key "Range" + expect(response.header["Range"]).to eq "0-11" end end - context 'when Content-Range header is missing' do + context "when Content-Range header is missing" do let(:headers_with_range) { headers } it { expect(response.status).to eq 400 } end - context 'when job has been errased' do + context "when job has been errased" do let(:job) { create(:ci_build, runner_id: runner.id, erased_at: Time.now) } it { expect(response.status).to eq 403 } end - def patch_the_trace(content = ' appended', request_headers = nil) + def patch_the_trace(content = " appended", request_headers = nil) unless request_headers job.trace.read do |stream| offset = stream.size limit = offset + content.length - 1 - request_headers = headers.merge({ 'Content-Range' => "#{offset}-#{limit}" }) + request_headers = headers.merge({"Content-Range" => "#{offset}-#{limit}"}) end end @@ -1222,80 +1225,80 @@ describe API::Runner, :clean_gitlab_redis_shared_state do end def initial_patch_the_trace - patch_the_trace(' appended', headers_with_range) + patch_the_trace(" appended", headers_with_range) end def force_patch_the_trace - 2.times { patch_the_trace('') } + 2.times { patch_the_trace("") } end end - describe 'artifacts' do + describe "artifacts" do let(:job) { create(:ci_build, :pending, pipeline: pipeline, runner_id: runner.id) } - let(:jwt_token) { JWT.encode({ 'iss' => 'gitlab-workhorse' }, Gitlab::Workhorse.secret, 'HS256') } - let(:headers) { { 'GitLab-Workhorse' => '1.0', Gitlab::Workhorse::INTERNAL_API_REQUEST_HEADER => jwt_token } } + let(:jwt_token) { JWT.encode({"iss" => "gitlab-workhorse"}, Gitlab::Workhorse.secret, "HS256") } + let(:headers) { {"GitLab-Workhorse" => "1.0", Gitlab::Workhorse::INTERNAL_API_REQUEST_HEADER => jwt_token} } let(:headers_with_token) { headers.merge(API::Helpers::Runner::JOB_TOKEN_HEADER => job.token) } - let(:file_upload) { fixture_file_upload('spec/fixtures/banana_sample.gif', 'image/gif') } - let(:file_upload2) { fixture_file_upload('spec/fixtures/dk.png', 'image/gif') } + let(:file_upload) { fixture_file_upload("spec/fixtures/banana_sample.gif", "image/gif") } + let(:file_upload2) { fixture_file_upload("spec/fixtures/dk.png", "image/gif") } before do stub_artifacts_object_storage job.run! end - describe 'POST /api/v4/jobs/:id/artifacts/authorize' do - context 'when using token as parameter' do - context 'posting artifacts to running job' do + describe "POST /api/v4/jobs/:id/artifacts/authorize" do + context "when using token as parameter" do + context "posting artifacts to running job" do subject do authorize_artifacts_with_token_in_params end - shared_examples 'authorizes local file' do - it 'succeeds' do + shared_examples "authorizes local file" do + it "succeeds" do subject expect(response).to have_gitlab_http_status(200) expect(response.content_type.to_s).to eq(Gitlab::Workhorse::INTERNAL_API_CONTENT_TYPE) - expect(json_response['TempPath']).to eq(JobArtifactUploader.workhorse_local_upload_path) - expect(json_response['RemoteObject']).to be_nil + expect(json_response["TempPath"]).to eq(JobArtifactUploader.workhorse_local_upload_path) + expect(json_response["RemoteObject"]).to be_nil end end - context 'when using local storage' do - it_behaves_like 'authorizes local file' + context "when using local storage" do + it_behaves_like "authorizes local file" end - context 'when using remote storage' do - context 'when direct upload is enabled' do + context "when using remote storage" do + context "when direct upload is enabled" do before do stub_artifacts_object_storage(enabled: true, direct_upload: true) end - it 'succeeds' do + it "succeeds" do subject expect(response).to have_gitlab_http_status(200) expect(response.content_type.to_s).to eq(Gitlab::Workhorse::INTERNAL_API_CONTENT_TYPE) - expect(json_response['TempPath']).to eq(JobArtifactUploader.workhorse_local_upload_path) - expect(json_response['RemoteObject']).to have_key('ID') - expect(json_response['RemoteObject']).to have_key('GetURL') - expect(json_response['RemoteObject']).to have_key('StoreURL') - expect(json_response['RemoteObject']).to have_key('DeleteURL') - expect(json_response['RemoteObject']).to have_key('MultipartUpload') + expect(json_response["TempPath"]).to eq(JobArtifactUploader.workhorse_local_upload_path) + expect(json_response["RemoteObject"]).to have_key("ID") + expect(json_response["RemoteObject"]).to have_key("GetURL") + expect(json_response["RemoteObject"]).to have_key("StoreURL") + expect(json_response["RemoteObject"]).to have_key("DeleteURL") + expect(json_response["RemoteObject"]).to have_key("MultipartUpload") end end - context 'when direct upload is disabled' do + context "when direct upload is disabled" do before do stub_artifacts_object_storage(enabled: true, direct_upload: false) end - it_behaves_like 'authorizes local file' + it_behaves_like "authorizes local file" end end end - it 'fails to post too large artifact' do + it "fails to post too large artifact" do stub_application_setting(max_artifacts_size: 0) authorize_artifacts_with_token_in_params(filesize: 100) @@ -1304,16 +1307,16 @@ describe API::Runner, :clean_gitlab_redis_shared_state do end end - context 'when using token as header' do - it 'authorizes posting artifacts to running job' do + context "when using token as header" do + it "authorizes posting artifacts to running job" do authorize_artifacts_with_token_in_headers expect(response).to have_gitlab_http_status(200) expect(response.content_type.to_s).to eq(Gitlab::Workhorse::INTERNAL_API_CONTENT_TYPE) - expect(json_response['TempPath']).not_to be_nil + expect(json_response["TempPath"]).not_to be_nil end - it 'fails to post too large artifact' do + it "fails to post too large artifact" do stub_application_setting(max_artifacts_size: 0) authorize_artifacts_with_token_in_headers(filesize: 100) @@ -1322,15 +1325,15 @@ describe API::Runner, :clean_gitlab_redis_shared_state do end end - context 'when using runners token' do - it 'fails to authorize artifacts posting' do + context "when using runners token" do + it "fails to authorize artifacts posting" do authorize_artifacts(token: job.project.runners_token) expect(response).to have_gitlab_http_status(403) end end - it 'reject requests that did not go through gitlab-workhorse' do + it "reject requests that did not go through gitlab-workhorse" do headers.delete(Gitlab::Workhorse::INTERNAL_API_REQUEST_HEADER) authorize_artifacts @@ -1338,9 +1341,9 @@ describe API::Runner, :clean_gitlab_redis_shared_state do expect(response).to have_gitlab_http_status(500) end - context 'authorization token is invalid' do - it 'responds with forbidden' do - authorize_artifacts(token: 'invalid', filesize: 100 ) + context "authorization token is invalid" do + it "responds with forbidden" do + authorize_artifacts(token: "invalid", filesize: 100) expect(response).to have_gitlab_http_status(403) end @@ -1360,77 +1363,77 @@ describe API::Runner, :clean_gitlab_redis_shared_state do end end - describe 'POST /api/v4/jobs/:id/artifacts' do - context 'when artifacts are being stored inside of tmp path' do + describe "POST /api/v4/jobs/:id/artifacts" do + context "when artifacts are being stored inside of tmp path" do before do # by configuring this path we allow to pass temp file from any path - allow(JobArtifactUploader).to receive(:workhorse_upload_path).and_return('/') + allow(JobArtifactUploader).to receive(:workhorse_upload_path).and_return("/") end - context 'when job has been erased' do + context "when job has been erased" do let(:job) { create(:ci_build, erased_at: Time.now) } before do upload_artifacts(file_upload, headers_with_token) end - it 'responds with forbidden' do + it "responds with forbidden" do upload_artifacts(file_upload, headers_with_token) expect(response).to have_gitlab_http_status(403) end end - context 'when job is running' do - shared_examples 'successful artifacts upload' do - it 'updates successfully' do + context "when job is running" do + shared_examples "successful artifacts upload" do + it "updates successfully" do expect(response).to have_gitlab_http_status(201) end end - context 'when uses accelerated file post' do - context 'for file stored locally' do + context "when uses accelerated file post" do + context "for file stored locally" do before do upload_artifacts(file_upload, headers_with_token) end - it_behaves_like 'successful artifacts upload' + it_behaves_like "successful artifacts upload" end - context 'for file stored remotelly' do + context "for file stored remotelly" do let!(:fog_connection) do stub_artifacts_object_storage(direct_upload: true) end before do - fog_connection.directories.new(key: 'artifacts').files.create( - key: 'tmp/uploads/12312300', - body: 'content' + fog_connection.directories.new(key: "artifacts").files.create( + key: "tmp/uploads/12312300", + body: "content" ) upload_artifacts(file_upload, headers_with_token, - { 'file.remote_id' => remote_id }) + {"file.remote_id" => remote_id}) end - context 'when valid remote_id is used' do - let(:remote_id) { '12312300' } + context "when valid remote_id is used" do + let(:remote_id) { "12312300" } - it_behaves_like 'successful artifacts upload' + it_behaves_like "successful artifacts upload" end - context 'when invalid remote_id is used' do - let(:remote_id) { 'invalid id' } + context "when invalid remote_id is used" do + let(:remote_id) { "invalid id" } - it 'responds with bad request' do + it "responds with bad request" do expect(response).to have_gitlab_http_status(500) - expect(json_response['message']).to eq("Missing file") + expect(json_response["message"]).to eq("Missing file") end end end end - context 'when using runners token' do - it 'responds with forbidden' do + context "when using runners token" do + it "responds with forbidden" do upload_artifacts(file_upload, headers.merge(API::Helpers::Runner::JOB_TOKEN_HEADER => job.project.runners_token)) expect(response).to have_gitlab_http_status(403) @@ -1438,8 +1441,8 @@ describe API::Runner, :clean_gitlab_redis_shared_state do end end - context 'when artifacts file is too large' do - it 'fails to post too large artifact' do + context "when artifacts file is too large" do + it "fails to post too large artifact" do stub_application_setting(max_artifacts_size: 0) upload_artifacts(file_upload, headers_with_token) @@ -1448,28 +1451,28 @@ describe API::Runner, :clean_gitlab_redis_shared_state do end end - context 'when artifacts post request does not contain file' do - it 'fails to post artifacts without file' do + context "when artifacts post request does not contain file" do + it "fails to post artifacts without file" do post api("/jobs/#{job.id}/artifacts"), params: {}, headers: headers_with_token expect(response).to have_gitlab_http_status(400) end end - context 'GitLab Workhorse is not configured' do - it 'fails to post artifacts without GitLab-Workhorse' do - post api("/jobs/#{job.id}/artifacts"), params: { token: job.token }, headers: {} + context "GitLab Workhorse is not configured" do + it "fails to post artifacts without GitLab-Workhorse" do + post api("/jobs/#{job.id}/artifacts"), params: {token: job.token}, headers: {} expect(response).to have_gitlab_http_status(403) end end - context 'when setting an expire date' do + context "when setting an expire date" do let(:default_artifacts_expire_in) {} let(:post_data) do - { 'file.path' => file_upload.path, - 'file.name' => file_upload.original_filename, - 'expire_in' => expire_in } + {"file.path" => file_upload.path, + "file.name" => file_upload.original_filename, + "expire_in" => expire_in,} end before do @@ -1478,37 +1481,37 @@ describe API::Runner, :clean_gitlab_redis_shared_state do post(api("/jobs/#{job.id}/artifacts"), params: post_data, headers: headers_with_token) end - context 'when an expire_in is given' do - let(:expire_in) { '7 days' } + context "when an expire_in is given" do + let(:expire_in) { "7 days" } - it 'updates when specified' do + it "updates when specified" do expect(response).to have_gitlab_http_status(201) expect(job.reload.artifacts_expire_at).to be_within(5.minutes).of(7.days.from_now) end end - context 'when no expire_in is given' do + context "when no expire_in is given" do let(:expire_in) { nil } - it 'ignores if not specified' do + it "ignores if not specified" do expect(response).to have_gitlab_http_status(201) expect(job.reload.artifacts_expire_at).to be_nil end - context 'with application default' do - context 'when default is 5 days' do - let(:default_artifacts_expire_in) { '5 days' } + context "with application default" do + context "when default is 5 days" do + let(:default_artifacts_expire_in) { "5 days" } - it 'sets to application default' do + it "sets to application default" do expect(response).to have_gitlab_http_status(201) expect(job.reload.artifacts_expire_at).to be_within(5.minutes).of(5.days.from_now) end end - context 'when default is 0' do - let(:default_artifacts_expire_in) { '0' } + context "when default is 0" do + let(:default_artifacts_expire_in) { "0" } - it 'does not set expire_in' do + it "does not set expire_in" do expect(response).to have_gitlab_http_status(201) expect(job.reload.artifacts_expire_at).to be_nil end @@ -1517,7 +1520,7 @@ describe API::Runner, :clean_gitlab_redis_shared_state do end end - context 'posts artifacts file and metadata file' do + context "posts artifacts file and metadata file" do let!(:artifacts) { file_upload } let!(:artifacts_sha256) { Digest::SHA256.file(artifacts.path).hexdigest } let!(:metadata) { file_upload2 } @@ -1533,17 +1536,17 @@ describe API::Runner, :clean_gitlab_redis_shared_state do post(api("/jobs/#{job.id}/artifacts"), params: post_data, headers: headers_with_token) end - context 'when posts data accelerated by workhorse is correct' do + context "when posts data accelerated by workhorse is correct" do let(:post_data) do - { 'file.path' => artifacts.path, - 'file.name' => artifacts.original_filename, - 'file.sha256' => artifacts_sha256, - 'metadata.path' => metadata.path, - 'metadata.name' => metadata.original_filename, - 'metadata.sha256' => metadata_sha256 } + {"file.path" => artifacts.path, + "file.name" => artifacts.original_filename, + "file.sha256" => artifacts_sha256, + "metadata.path" => metadata.path, + "metadata.name" => metadata.original_filename, + "metadata.sha256" => metadata_sha256,} end - it 'stores artifacts and artifacts metadata' do + it "stores artifacts and artifacts metadata" do expect(response).to have_gitlab_http_status(201) expect(stored_artifacts_file.original_filename).to eq(artifacts.original_filename) expect(stored_metadata_file.original_filename).to eq(metadata.original_filename) @@ -1553,26 +1556,26 @@ describe API::Runner, :clean_gitlab_redis_shared_state do end end - context 'when there is no artifacts file in post data' do + context "when there is no artifacts file in post data" do let(:post_data) do - { 'metadata' => metadata } + {"metadata" => metadata} end - it 'is expected to respond with bad request' do + it "is expected to respond with bad request" do expect(response).to have_gitlab_http_status(400) end - it 'does not store metadata' do + it "does not store metadata" do expect(stored_metadata_file).to be_nil end end end - context 'when artifact_type is archive' do - context 'when artifact_format is zip' do - let(:params) { { artifact_type: :archive, artifact_format: :zip } } + context "when artifact_type is archive" do + context "when artifact_format is zip" do + let(:params) { {artifact_type: :archive, artifact_format: :zip} } - it 'stores junit test report' do + it "stores junit test report" do upload_artifacts(file_upload, headers_with_token, params) expect(response).to have_gitlab_http_status(201) @@ -1580,10 +1583,10 @@ describe API::Runner, :clean_gitlab_redis_shared_state do end end - context 'when artifact_format is gzip' do - let(:params) { { artifact_type: :archive, artifact_format: :gzip } } + context "when artifact_format is gzip" do + let(:params) { {artifact_type: :archive, artifact_format: :gzip} } - it 'returns an error' do + it "returns an error" do upload_artifacts(file_upload, headers_with_token, params) expect(response).to have_gitlab_http_status(400) @@ -1592,12 +1595,12 @@ describe API::Runner, :clean_gitlab_redis_shared_state do end end - context 'when artifact_type is junit' do - context 'when artifact_format is gzip' do - let(:file_upload) { fixture_file_upload('spec/fixtures/junit/junit.xml.gz') } - let(:params) { { artifact_type: :junit, artifact_format: :gzip } } + context "when artifact_type is junit" do + context "when artifact_format is gzip" do + let(:file_upload) { fixture_file_upload("spec/fixtures/junit/junit.xml.gz") } + let(:params) { {artifact_type: :junit, artifact_format: :gzip} } - it 'stores junit test report' do + it "stores junit test report" do upload_artifacts(file_upload, headers_with_token, params) expect(response).to have_gitlab_http_status(201) @@ -1605,11 +1608,11 @@ describe API::Runner, :clean_gitlab_redis_shared_state do end end - context 'when artifact_format is raw' do - let(:file_upload) { fixture_file_upload('spec/fixtures/junit/junit.xml.gz') } - let(:params) { { artifact_type: :junit, artifact_format: :raw } } + context "when artifact_format is raw" do + let(:file_upload) { fixture_file_upload("spec/fixtures/junit/junit.xml.gz") } + let(:params) { {artifact_type: :junit, artifact_format: :raw} } - it 'returns an error' do + it "returns an error" do upload_artifacts(file_upload, headers_with_token, params) expect(response).to have_gitlab_http_status(400) @@ -1619,7 +1622,7 @@ describe API::Runner, :clean_gitlab_redis_shared_state do end end - context 'when artifacts are being stored outside of tmp path' do + context "when artifacts are being stored outside of tmp path" do let(:new_tmpdir) { Dir.mktmpdir } before do @@ -1644,18 +1647,18 @@ describe API::Runner, :clean_gitlab_redis_shared_state do def upload_artifacts(file, headers = {}, params = {}) params = params.merge({ - 'file.path' => file.path, - 'file.name' => file.original_filename + "file.path" => file.path, + "file.name" => file.original_filename, }) post api("/jobs/#{job.id}/artifacts"), params: params, headers: headers end end - describe 'GET /api/v4/jobs/:id/artifacts' do + describe "GET /api/v4/jobs/:id/artifacts" do let(:token) { job.token } - context 'when job has artifacts' do + context "when job has artifacts" do let(:job) { create(:ci_build) } let(:store) { JobArtifactUploader::Store::LOCAL } @@ -1663,67 +1666,68 @@ describe API::Runner, :clean_gitlab_redis_shared_state do create(:ci_job_artifact, :archive, file_store: store, job: job) end - context 'when using job token' do - context 'when artifacts are stored locally' do + context "when using job token" do + context "when artifacts are stored locally" do let(:download_headers) do - { 'Content-Transfer-Encoding' => 'binary', - 'Content-Disposition' => %q(attachment; filename="ci_build_artifacts.zip"; filename*=UTF-8''ci_build_artifacts.zip) } + {"Content-Transfer-Encoding" => "binary", + "Content-Disposition" => %q(attachment; filename="ci_build_artifacts.zip"; filename*=UTF-8''ci_build_artifacts.zip),} end before do download_artifact end - it 'download artifacts' do + it "download artifacts" do expect(response).to have_http_status(200) expect(response.headers.to_h).to include download_headers end end - context 'when artifacts are stored remotely' do + context "when artifacts are stored remotely" do let(:store) { JobArtifactUploader::Store::REMOTE } let!(:job) { create(:ci_build) } - context 'when proxy download is being used' do + context "when proxy download is being used" do before do download_artifact(direct_download: false) end - it 'uses workhorse send-url' do + it "uses workhorse send-url" do expect(response).to have_gitlab_http_status(200) expect(response.headers.to_h).to include( - 'Gitlab-Workhorse-Send-Data' => /send-url:/) + "Gitlab-Workhorse-Send-Data" => /send-url:/ + ) end end - context 'when direct download is being used' do + context "when direct download is being used" do before do download_artifact(direct_download: true) end - it 'receive redirect for downloading artifacts' do + it "receive redirect for downloading artifacts" do expect(response).to have_gitlab_http_status(302) - expect(response.headers).to include('Location') + expect(response.headers).to include("Location") end end end end - context 'when using runnners token' do + context "when using runnners token" do let(:token) { job.project.runners_token } before do download_artifact end - it 'responds with forbidden' do + it "responds with forbidden" do expect(response).to have_gitlab_http_status(403) end end end - context 'when job does not has artifacts' do - it 'responds with not found' do + context "when job does not has artifacts" do + it "responds with not found" do download_artifact expect(response).to have_gitlab_http_status(404) |