diff options
author | Matija Čupić <matteeyah@gmail.com> | 2018-04-20 15:53:40 +0200 |
---|---|---|
committer | Matija Čupić <matteeyah@gmail.com> | 2018-04-20 15:53:40 +0200 |
commit | d1052289139c3be664908e0266c9389f7c797bd5 (patch) | |
tree | 0109aef97381949ca7877ad1c3b6fabed46a4cf7 /spec | |
parent | e911a0f896c3699c69b27509b1b38d173f96c81a (diff) | |
parent | 87e592dc0a81d94efac157dfd7382df4e5b2a223 (diff) | |
download | gitlab-ce-d1052289139c3be664908e0266c9389f7c797bd5.tar.gz |
Merge branch 'master' into 38759-fetch-available-parameters-directly-from-gke-when-creating-a-cluster
Diffstat (limited to 'spec')
778 files changed, 40600 insertions, 15592 deletions
diff --git a/spec/controllers/admin/application_settings_controller_spec.rb b/spec/controllers/admin/application_settings_controller_spec.rb index cc1b1e5039e..b4fc2aa326f 100644 --- a/spec/controllers/admin/application_settings_controller_spec.rb +++ b/spec/controllers/admin/application_settings_controller_spec.rb @@ -72,11 +72,10 @@ describe Admin::ApplicationSettingsController do expect(ApplicationSetting.current.restricted_visibility_levels).to eq([10, 20]) end - it 'falls back to defaults when settings are omitted' do - put :update, application_setting: {} + it 'updates the restricted_visibility_levels when empty array is passed' do + put :update, application_setting: { restricted_visibility_levels: [] } expect(response).to redirect_to(admin_application_settings_path) - expect(ApplicationSetting.current.default_project_visibility).to eq(Gitlab::VisibilityLevel::PRIVATE) expect(ApplicationSetting.current.restricted_visibility_levels).to be_empty end end diff --git a/spec/controllers/admin/projects_controller_spec.rb b/spec/controllers/admin/projects_controller_spec.rb index d5a3c250f31..cc200b9fed9 100644 --- a/spec/controllers/admin/projects_controller_spec.rb +++ b/spec/controllers/admin/projects_controller_spec.rb @@ -31,5 +31,15 @@ describe Admin::ProjectsController do expect(response.body).not_to match(pending_delete_project.name) expect(response.body).to match(project.name) end + + it 'does not have N+1 queries', :use_clean_rails_memory_store_caching, :request_store do + get :index + + control_count = ActiveRecord::QueryRecorder.new { get :index }.count + + create(:project) + + expect { get :index }.not_to exceed_query_limit(control_count) + end end end diff --git a/spec/controllers/concerns/checks_collaboration_spec.rb b/spec/controllers/concerns/checks_collaboration_spec.rb new file mode 100644 index 00000000000..1bd764290ae --- /dev/null +++ b/spec/controllers/concerns/checks_collaboration_spec.rb @@ -0,0 +1,55 @@ +require 'spec_helper' + +describe ChecksCollaboration do + include ProjectForksHelper + + let(:helper) do + fake_class = Class.new(ApplicationController) do + include ChecksCollaboration + end + + fake_class.new + end + + describe '#can_collaborate_with_project?' do + let(:user) { create(:user) } + let(:project) { create(:project, :public) } + + before do + allow(helper).to receive(:current_user).and_return(user) + allow(helper).to receive(:can?) do |user, ability, subject| + Ability.allowed?(user, ability, subject) + end + end + + it 'is true if the user can push to the project' do + project.add_developer(user) + + expect(helper.can_collaborate_with_project?(project)).to be_truthy + end + + it 'is true when the user can push to a branch of the project' do + fake_access = double('Gitlab::UserAccess') + expect(fake_access).to receive(:can_push_to_branch?).with('a-branch').and_return(true) + expect(Gitlab::UserAccess).to receive(:new).with(user, project: project).and_return(fake_access) + + expect(helper.can_collaborate_with_project?(project, ref: 'a-branch')).to be_truthy + end + + context 'when the user has forked the project' do + before do + fork_project(project, user, namespace: user.namespace) + end + + it 'is true' do + expect(helper.can_collaborate_with_project?(project)).to be_truthy + end + + it 'is false when the project is archived' do + project.archived = true + + expect(helper.can_collaborate_with_project?(project)).to be_falsy + end + end + end +end diff --git a/spec/controllers/concerns/send_file_upload_spec.rb b/spec/controllers/concerns/send_file_upload_spec.rb new file mode 100644 index 00000000000..f4c99ea4064 --- /dev/null +++ b/spec/controllers/concerns/send_file_upload_spec.rb @@ -0,0 +1,89 @@ +require 'spec_helper' + +describe SendFileUpload do + let(:uploader_class) do + Class.new(GitlabUploader) do + include ObjectStorage::Concern + + storage_options Gitlab.config.uploads + + private + + # user/:id + def dynamic_segment + File.join(model.class.to_s.underscore, model.id.to_s) + end + end + end + + let(:controller_class) do + Class.new do + include SendFileUpload + end + end + + let(:object) { build_stubbed(:user) } + let(:uploader) { uploader_class.new(object, :file) } + + describe '#send_upload' do + let(:controller) { controller_class.new } + let(:temp_file) { Tempfile.new('test') } + + subject { controller.send_upload(uploader) } + + before do + FileUtils.touch(temp_file) + end + + after do + FileUtils.rm_f(temp_file) + end + + context 'when local file is used' do + before do + uploader.store!(temp_file) + end + + it 'sends a file' do + expect(controller).to receive(:send_file).with(uploader.path, anything) + + subject + end + end + + context 'when remote file is used' do + before do + stub_uploads_object_storage(uploader: uploader_class) + uploader.object_store = ObjectStorage::Store::REMOTE + uploader.store!(temp_file) + end + + context 'and proxying is enabled' do + before do + allow(Gitlab.config.uploads.object_store).to receive(:proxy_download) { true } + end + + it 'sends a file' do + headers = double + expect(headers).to receive(:store).with(Gitlab::Workhorse::SEND_DATA_HEADER, /^send-url:/) + expect(controller).to receive(:headers) { headers } + expect(controller).to receive(:head).with(:ok) + + subject + end + end + + context 'and proxying is disabled' do + before do + allow(Gitlab.config.uploads.object_store).to receive(:proxy_download) { false } + end + + it 'sends a file' do + expect(controller).to receive(:redirect_to).with(/#{uploader.path}/) + + subject + end + end + end + end +end diff --git a/spec/controllers/dashboard_controller_spec.rb b/spec/controllers/dashboard_controller_spec.rb index 97c2c3fb940..3458d679107 100644 --- a/spec/controllers/dashboard_controller_spec.rb +++ b/spec/controllers/dashboard_controller_spec.rb @@ -11,9 +11,11 @@ describe DashboardController do describe 'GET issues' do it_behaves_like 'issuables list meta-data', :issue, :issues + it_behaves_like 'issuables requiring filter', :issues end describe 'GET merge requests' do it_behaves_like 'issuables list meta-data', :merge_request, :merge_requests + it_behaves_like 'issuables requiring filter', :merge_requests end end diff --git a/spec/controllers/omniauth_callbacks_controller_spec.rb b/spec/controllers/omniauth_callbacks_controller_spec.rb index c639ad32ec6..5f0e8c5eca9 100644 --- a/spec/controllers/omniauth_callbacks_controller_spec.rb +++ b/spec/controllers/omniauth_callbacks_controller_spec.rb @@ -3,72 +3,125 @@ require 'spec_helper' describe OmniauthCallbacksController do include LoginHelpers - let(:user) { create(:omniauth_user, extern_uid: 'my-uid', provider: provider) } - let(:provider) { :github } + let(:user) { create(:omniauth_user, extern_uid: extern_uid, provider: provider) } before do - mock_auth_hash(provider.to_s, 'my-uid', user.email) + mock_auth_hash(provider.to_s, extern_uid, user.email) stub_omniauth_provider(provider, context: request) end - it 'allows sign in' do - post provider + context 'when the user is on the last sign in attempt' do + let(:extern_uid) { 'my-uid' } - expect(request.env['warden']).to be_authenticated - end + before do + user.update(failed_attempts: User.maximum_attempts.pred) + subject.response = ActionDispatch::Response.new + end - shared_context 'sign_up' do - let(:user) { double(email: 'new@example.com') } + context 'when using a form based provider' do + let(:provider) { :ldap } - before do - stub_omniauth_setting(block_auto_created_users: false) + it 'locks the user when sign in fails' do + allow(subject).to receive(:params).and_return(ActionController::Parameters.new(username: user.username)) + request.env['omniauth.error.strategy'] = OmniAuth::Strategies::LDAP.new(nil) + + subject.send(:failure) + + expect(user.reload).to be_access_locked + end end - end - context 'sign up' do - include_context 'sign_up' + context 'when using a button based provider' do + let(:provider) { :github } - it 'is allowed' do - post provider + it 'does not lock the user when sign in fails' do + request.env['omniauth.error.strategy'] = OmniAuth::Strategies::GitHub.new(nil) - expect(request.env['warden']).to be_authenticated + subject.send(:failure) + + expect(user.reload).not_to be_access_locked + end end end - context 'when OAuth is disabled' do - before do - stub_env('IN_MEMORY_APPLICATION_SETTINGS', 'false') - settings = Gitlab::CurrentSettings.current_application_settings - settings.update(disabled_oauth_sign_in_sources: [provider.to_s]) - end + context 'strategies' do + context 'github' do + let(:extern_uid) { 'my-uid' } + let(:provider) { :github } - it 'prevents login via POST' do - post provider + it 'allows sign in' do + post provider - expect(request.env['warden']).not_to be_authenticated - end + expect(request.env['warden']).to be_authenticated + end - it 'shows warning when attempting login' do - post provider + shared_context 'sign_up' do + let(:user) { double(email: 'new@example.com') } - expect(response).to redirect_to new_user_session_path - expect(flash[:alert]).to eq('Signing in using GitHub has been disabled') - end + before do + stub_omniauth_setting(block_auto_created_users: false) + end + end + + context 'sign up' do + include_context 'sign_up' + + it 'is allowed' do + post provider + + expect(request.env['warden']).to be_authenticated + end + end + + context 'when OAuth is disabled' do + before do + stub_env('IN_MEMORY_APPLICATION_SETTINGS', 'false') + settings = Gitlab::CurrentSettings.current_application_settings + settings.update(disabled_oauth_sign_in_sources: [provider.to_s]) + end + + it 'prevents login via POST' do + post provider + + expect(request.env['warden']).not_to be_authenticated + end - it 'allows linking the disabled provider' do - user.identities.destroy_all - sign_in(user) + it 'shows warning when attempting login' do + post provider - expect { post provider }.to change { user.reload.identities.count }.by(1) + expect(response).to redirect_to new_user_session_path + expect(flash[:alert]).to eq('Signing in using GitHub has been disabled') + end + + it 'allows linking the disabled provider' do + user.identities.destroy_all + sign_in(user) + + expect { post provider }.to change { user.reload.identities.count }.by(1) + end + + context 'sign up' do + include_context 'sign_up' + + it 'is prevented' do + post provider + + expect(request.env['warden']).not_to be_authenticated + end + end + end end - context 'sign up' do - include_context 'sign_up' + context 'auth0' do + let(:extern_uid) { '' } + let(:provider) { :auth0 } - it 'is prevented' do - post provider + it 'does not allow sign in without extern_uid' do + post 'auth0' expect(request.env['warden']).not_to be_authenticated + expect(response.status).to eq(302) + expect(controller).to set_flash[:alert].to('Wrong extern UID provided. Make sure Auth0 is configured correctly.') end end end diff --git a/spec/controllers/profiles_controller_spec.rb b/spec/controllers/profiles_controller_spec.rb index 03cbbb21e62..de6ef919221 100644 --- a/spec/controllers/profiles_controller_spec.rb +++ b/spec/controllers/profiles_controller_spec.rb @@ -84,6 +84,35 @@ describe ProfilesController, :request_store do expect(user.username).to eq(new_username) end + it 'updates a username using JSON request' do + sign_in(user) + + put :update_username, + user: { username: new_username }, + format: :json + + expect(response.status).to eq(200) + expect(json_response['message']).to eq('Username successfully changed') + end + + it 'renders an error message when the username was not updated' do + sign_in(user) + + put :update_username, + user: { username: 'invalid username.git' }, + format: :json + + expect(response.status).to eq(422) + expect(json_response['message']).to match(/Username change failed/) + end + + it 'raises a correct error when the username is missing' do + sign_in(user) + + expect { put :update_username, user: { gandalf: 'you shall not pass' } } + .to raise_error(ActionController::ParameterMissing) + end + context 'with legacy storage' do it 'moves dependent projects to new namespace' do project = create(:project_empty_repo, :legacy_storage, namespace: namespace) diff --git a/spec/controllers/projects/artifacts_controller_spec.rb b/spec/controllers/projects/artifacts_controller_spec.rb index 25a2e13fe1a..4ea6f869aa3 100644 --- a/spec/controllers/projects/artifacts_controller_spec.rb +++ b/spec/controllers/projects/artifacts_controller_spec.rb @@ -145,9 +145,23 @@ describe Projects::ArtifactsController do context 'when using local file storage' do it_behaves_like 'a valid file' do let(:job) { create(:ci_build, :success, :artifacts, pipeline: pipeline) } + let(:store) { ObjectStorage::Store::LOCAL } let(:archive_path) { JobArtifactUploader.root } end end + + context 'when using remote file storage' do + before do + stub_artifacts_object_storage + end + + it_behaves_like 'a valid file' do + let!(:artifact) { create(:ci_job_artifact, :archive, :remote_store, job: job) } + let!(:job) { create(:ci_build, :success, pipeline: pipeline) } + let(:store) { ObjectStorage::Store::REMOTE } + let(:archive_path) { 'https://' } + end + end end end diff --git a/spec/controllers/projects/branches_controller_spec.rb b/spec/controllers/projects/branches_controller_spec.rb index 3b9e06cb5ad..16fb377b002 100644 --- a/spec/controllers/projects/branches_controller_spec.rb +++ b/spec/controllers/projects/branches_controller_spec.rb @@ -398,6 +398,22 @@ describe Projects::BranchesController do end end + # We need :request_store because Gitaly only counts the queries whenever + # `RequestStore.active?` in GitalyClient.enforce_gitaly_request_limits + # And the main goal of this test is making sure TooManyInvocationsError + # was not raised whenever the cache is enabled yet cold. + context 'when cache is enabled yet cold', :request_store do + it 'return with a status 200' do + get :index, + namespace_id: project.namespace, + project_id: project, + state: 'all', + format: :html + + expect(response).to have_gitlab_http_status(200) + end + end + context 'when branch contains an invalid UTF-8 sequence' do before do project.repository.create_branch("wrong-\xE5-utf8-sequence") @@ -414,7 +430,7 @@ describe Projects::BranchesController do end end - context 'when depreated sort/search/page parameters are specified' do + context 'when deprecated sort/search/page parameters are specified' do it 'returns with a status 301 when sort specified' do get :index, namespace_id: project.namespace, diff --git a/spec/controllers/projects/ci/lints_controller_spec.rb b/spec/controllers/projects/ci/lints_controller_spec.rb new file mode 100644 index 00000000000..1249a5528a9 --- /dev/null +++ b/spec/controllers/projects/ci/lints_controller_spec.rb @@ -0,0 +1,123 @@ +require 'spec_helper' + +describe Projects::Ci::LintsController do + let(:project) { create(:project, :repository) } + let(:user) { create(:user) } + + before do + sign_in(user) + end + + describe 'GET #show' do + context 'with enough privileges' do + before do + project.add_developer(user) + + get :show, namespace_id: project.namespace, project_id: project + end + + it 'should be success' do + expect(response).to be_success + end + + it 'should render show page' do + expect(response).to render_template :show + end + + it 'should retrieve project' do + expect(assigns(:project)).to eq(project) + end + end + + context 'without enough privileges' do + before do + project.add_guest(user) + + get :show, namespace_id: project.namespace, project_id: project + end + + it 'should respond with 404' do + expect(response).to have_gitlab_http_status(404) + end + end + end + + describe 'POST #create' do + let(:remote_file_path) { 'https://gitlab.com/gitlab-org/gitlab-ce/blob/1234/.gitlab-ci-1.yml' } + + let(:remote_file_content) do + <<~HEREDOC + before_script: + - apt-get update -qq && apt-get install -y -qq sqlite3 libsqlite3-dev nodejs + - ruby -v + - which ruby + - gem install bundler --no-ri --no-rdoc + - bundle install --jobs $(nproc) "${FLAGS[@]}" + HEREDOC + end + + let(:content) do + <<~HEREDOC + include: + - #{remote_file_path} + + rubocop: + script: + - bundle exec rubocop + HEREDOC + end + + context 'with a valid gitlab-ci.yml' do + before do + WebMock.stub_request(:get, remote_file_path).to_return(body: remote_file_content) + project.add_developer(user) + + post :create, namespace_id: project.namespace, project_id: project, content: content + end + + it 'should be success' do + expect(response).to be_success + end + + it 'render show page' do + expect(response).to render_template :show + end + + it 'should retrieve project' do + expect(assigns(:project)).to eq(project) + end + end + + context 'with an invalid gitlab-ci.yml' do + let(:content) do + <<~HEREDOC + rubocop: + scriptt: + - bundle exec rubocop + HEREDOC + end + + before do + project.add_developer(user) + + post :create, namespace_id: project.namespace, project_id: project, content: content + end + + it 'should assign errors' do + expect(assigns[:error]).to eq('jobs:rubocop config contains unknown keys: scriptt') + end + end + + context 'without enough privileges' do + before do + project.add_guest(user) + + post :create, namespace_id: project.namespace, project_id: project, content: content + end + + it 'should respond with 404' do + expect(response).to have_gitlab_http_status(404) + end + end + end +end diff --git a/spec/controllers/projects/clusters_controller_spec.rb b/spec/controllers/projects/clusters_controller_spec.rb index 15ce418d0d6..82b20e12850 100644 --- a/spec/controllers/projects/clusters_controller_spec.rb +++ b/spec/controllers/projects/clusters_controller_spec.rb @@ -18,7 +18,7 @@ describe Projects::ClustersController do context 'when project has one or more clusters' do let(:project) { create(:project) } let!(:enabled_cluster) { create(:cluster, :provided_by_gcp, projects: [project]) } - let!(:disabled_cluster) { create(:cluster, :disabled, :provided_by_gcp, projects: [project]) } + let!(:disabled_cluster) { create(:cluster, :disabled, :provided_by_gcp, :production_environment, projects: [project]) } it 'lists available clusters' do go @@ -32,7 +32,7 @@ describe Projects::ClustersController do before do allow(Clusters::Cluster).to receive(:paginates_per).and_return(1) - create_list(:cluster, 2, :provided_by_gcp, projects: [project]) + create_list(:cluster, 2, :provided_by_gcp, :production_environment, projects: [project]) get :index, namespace_id: project.namespace, project_id: project, page: last_page end diff --git a/spec/controllers/projects/discussions_controller_spec.rb b/spec/controllers/projects/discussions_controller_spec.rb index fcb0c2f28c8..53647749a60 100644 --- a/spec/controllers/projects/discussions_controller_spec.rb +++ b/spec/controllers/projects/discussions_controller_spec.rb @@ -16,6 +16,53 @@ describe Projects::DiscussionsController do } end + describe 'GET show' do + before do + sign_in user + end + + context 'when user is not authorized to read the MR' do + it 'returns 404' do + get :show, request_params, format: :json + + expect(response).to have_gitlab_http_status(404) + end + end + + context 'when user is authorized to read the MR' do + before do + project.add_reporter(user) + end + + it 'returns status 200' do + get :show, request_params, format: :json + + expect(response).to have_gitlab_http_status(200) + end + + it 'returns status 404 if MR does not exists' do + merge_request.destroy! + + get :show, request_params, format: :json + + expect(response).to have_gitlab_http_status(404) + end + end + + context 'when user is authorized but note is LegacyDiffNote' do + before do + project.add_developer(user) + note.update!(type: 'LegacyDiffNote') + end + + it 'returns status 200' do + get :show, request_params, format: :json + + expect(response).to have_gitlab_http_status(200) + end + end + end + describe 'POST resolve' do before do sign_in user diff --git a/spec/controllers/projects/issues_controller_spec.rb b/spec/controllers/projects/issues_controller_spec.rb index 9918d52e402..ca86b0bc737 100644 --- a/spec/controllers/projects/issues_controller_spec.rb +++ b/spec/controllers/projects/issues_controller_spec.rb @@ -938,7 +938,7 @@ describe Projects::IssuesController do end describe 'POST create_merge_request' do - let(:project) { create(:project, :repository) } + let(:project) { create(:project, :repository, :public) } before do project.add_developer(user) @@ -955,6 +955,22 @@ describe Projects::IssuesController do expect(response).to match_response_schema('merge_request') end + it 'is not available when the project is archived' do + project.update!(archived: true) + + create_merge_request + + expect(response).to have_gitlab_http_status(404) + end + + it 'is not available for users who cannot create merge requests' do + sign_in(create(:user)) + + create_merge_request + + expect(response).to have_gitlab_http_status(404) + end + def create_merge_request post :create_merge_request, namespace_id: project.namespace.to_param, project_id: project.to_param, @@ -974,7 +990,7 @@ describe Projects::IssuesController do it 'returns discussion json' do get :discussions, namespace_id: project.namespace, project_id: project, id: issue.iid - expect(json_response.first.keys).to match_array(%w[id reply_id expanded notes diff_discussion individual_note resolvable resolve_with_issue_path resolved]) + expect(json_response.first.keys).to match_array(%w[id reply_id expanded notes diff_discussion individual_note resolvable resolved]) end context 'with cross-reference system note', :request_store do diff --git a/spec/controllers/projects/jobs_controller_spec.rb b/spec/controllers/projects/jobs_controller_spec.rb index f3e303bb0fe..f677cec3408 100644 --- a/spec/controllers/projects/jobs_controller_spec.rb +++ b/spec/controllers/projects/jobs_controller_spec.rb @@ -1,7 +1,9 @@ +# coding: utf-8 require 'spec_helper' describe Projects::JobsController do include ApiHelpers + include HttpIOHelpers let(:project) { create(:project, :public) } let(:pipeline) { create(:ci_pipeline, project: project) } @@ -188,7 +190,10 @@ describe Projects::JobsController do expect(response).to have_gitlab_http_status(:ok) expect(json_response['id']).to eq job.id expect(json_response['status']).to eq job.status - expect(json_response['html']).to be_nil + end + + it 'returns no job log message' do + expect(json_response['html']).to eq('No job log') end end @@ -203,6 +208,41 @@ describe Projects::JobsController do end end + context 'when trace artifact is in ObjectStorage' do + let!(:job) { create(:ci_build, :success, :trace_artifact, pipeline: pipeline) } + + before do + allow_any_instance_of(JobArtifactUploader).to receive(:file_storage?) { false } + allow_any_instance_of(JobArtifactUploader).to receive(:url) { remote_trace_url } + allow_any_instance_of(JobArtifactUploader).to receive(:size) { remote_trace_size } + end + + context 'when there are no network issues' do + before do + stub_remote_trace_206 + + get_trace + end + + it 'returns a trace' do + expect(response).to have_gitlab_http_status(:ok) + expect(json_response['id']).to eq job.id + expect(json_response['status']).to eq job.status + expect(json_response['html']).to eq(job.trace.html) + end + end + + context 'when there is a network issue' do + before do + stub_remote_trace_500 + end + + it 'returns a trace' do + expect { get_trace }.to raise_error(Gitlab::Ci::Trace::HttpIO::FailedToGetChunkError) + end + end + end + def get_trace get :trace, namespace_id: project.namespace, project_id: project, @@ -446,14 +486,18 @@ describe Projects::JobsController do end describe 'GET raw' do - before do - get_raw + subject do + post :raw, namespace_id: project.namespace, + project_id: project, + id: job.id end context 'when job has a trace artifact' do let(:job) { create(:ci_build, :trace_artifact, pipeline: pipeline) } it 'returns a trace' do + response = subject + expect(response).to have_gitlab_http_status(:ok) expect(response.content_type).to eq 'text/plain; charset=utf-8' expect(response.body).to eq job.job_artifacts_trace.open.read @@ -464,24 +508,51 @@ describe Projects::JobsController do let(:job) { create(:ci_build, :trace_live, pipeline: pipeline) } it 'send a trace file' do + response = subject + expect(response).to have_gitlab_http_status(:ok) expect(response.content_type).to eq 'text/plain; charset=utf-8' expect(response.body).to eq 'BUILD TRACE' end end + context 'when job has a trace in database' do + let(:job) { create(:ci_build, pipeline: pipeline) } + + before do + job.update_column(:trace, 'Sample trace') + end + + it 'send a trace file' do + response = subject + + expect(response).to have_gitlab_http_status(:ok) + expect(response.content_type).to eq 'text/plain; charset=utf-8' + expect(response.body).to eq 'Sample trace' + end + end + context 'when job does not have a trace file' do let(:job) { create(:ci_build, pipeline: pipeline) } it 'returns not_found' do - expect(response).to have_gitlab_http_status(:not_found) + response = subject + + expect(response).to have_gitlab_http_status(:ok) + expect(response.body).to eq '' end end - def get_raw - post :raw, namespace_id: project.namespace, - project_id: project, - id: job.id + context 'when the trace artifact is in ObjectStorage' do + let!(:job) { create(:ci_build, :trace_artifact, pipeline: pipeline) } + + before do + allow_any_instance_of(JobArtifactUploader).to receive(:file_storage?) { false } + end + + it 'redirect to the trace file url' do + expect(subject).to redirect_to(job.job_artifacts_trace.file.url) + end end end end diff --git a/spec/controllers/projects/milestones_controller_spec.rb b/spec/controllers/projects/milestones_controller_spec.rb index 306094f7ffb..548c5ef36e7 100644 --- a/spec/controllers/projects/milestones_controller_spec.rb +++ b/spec/controllers/projects/milestones_controller_spec.rb @@ -20,14 +20,23 @@ describe Projects::MilestonesController do describe "#show" do render_views - def view_milestone - get :show, namespace_id: project.namespace.id, project_id: project.id, id: milestone.iid + def view_milestone(options = {}) + params = { namespace_id: project.namespace.id, project_id: project.id, id: milestone.iid } + get :show, params.merge(options) end it 'shows milestone page' do view_milestone expect(response).to have_gitlab_http_status(200) + expect(response.content_type).to eq 'text/html' + end + + it 'returns milestone json' do + view_milestone format: :json + + expect(response).to have_http_status(404) + expect(response.content_type).to eq 'application/json' end end @@ -98,7 +107,7 @@ describe Projects::MilestonesController do it 'shows group milestone' do post :promote, namespace_id: project.namespace.id, project_id: project.id, id: milestone.iid - expect(flash[:notice]).to eq("#{milestone.title} promoted to group milestone") + expect(flash[:notice]).to eq("#{milestone.title} promoted to <a href=\"#{group_milestone_path(project.group, milestone.iid)}\">group milestone</a>.") expect(response).to redirect_to(project_milestones_path(project)) end end diff --git a/spec/controllers/projects/pages_controller_spec.rb b/spec/controllers/projects/pages_controller_spec.rb index 4705c50de7e..11f54eef531 100644 --- a/spec/controllers/projects/pages_controller_spec.rb +++ b/spec/controllers/projects/pages_controller_spec.rb @@ -65,4 +65,41 @@ describe Projects::PagesController do end end end + + describe 'PATCH update' do + let(:request_params) do + { + namespace_id: project.namespace, + project_id: project, + project: { pages_https_only: false } + } + end + + let(:update_service) { double(execute: { status: :success }) } + + before do + allow(Projects::UpdateService).to receive(:new) { update_service } + end + + it 'returns 302 status' do + patch :update, request_params + + expect(response).to have_gitlab_http_status(:found) + end + + it 'redirects back to the pages settings' do + patch :update, request_params + + expect(response).to redirect_to(project_pages_path(project)) + end + + it 'calls the update service' do + expect(Projects::UpdateService) + .to receive(:new) + .with(project, user, request_params[:project]) + .and_return(update_service) + + patch :update, request_params + end + end end diff --git a/spec/controllers/projects/pages_domains_controller_spec.rb b/spec/controllers/projects/pages_domains_controller_spec.rb index 83a3799e883..d4058a5c515 100644 --- a/spec/controllers/projects/pages_domains_controller_spec.rb +++ b/spec/controllers/projects/pages_domains_controller_spec.rb @@ -13,7 +13,7 @@ describe Projects::PagesDomainsController do end let(:pages_domain_params) do - build(:pages_domain, :with_certificate, :with_key, domain: 'my.otherdomain.com').slice(:key, :certificate, :domain) + build(:pages_domain, domain: 'my.otherdomain.com').slice(:key, :certificate, :domain) end before do @@ -68,7 +68,7 @@ describe Projects::PagesDomainsController do end let(:pages_domain_params) do - attributes_for(:pages_domain, :with_certificate, :with_key).slice(:key, :certificate) + attributes_for(:pages_domain).slice(:key, :certificate) end let(:params) do diff --git a/spec/controllers/projects/pipeline_schedules_controller_spec.rb b/spec/controllers/projects/pipeline_schedules_controller_spec.rb index 966ffdf6996..3506305f755 100644 --- a/spec/controllers/projects/pipeline_schedules_controller_spec.rb +++ b/spec/controllers/projects/pipeline_schedules_controller_spec.rb @@ -80,7 +80,7 @@ describe Projects::PipelineSchedulesController do context 'when variables_attributes has one variable' do let(:schedule) do basic_param.merge({ - variables_attributes: [{ key: 'AAA', value: 'AAA123' }] + variables_attributes: [{ key: 'AAA', secret_value: 'AAA123' }] }) end @@ -101,7 +101,8 @@ describe Projects::PipelineSchedulesController do context 'when variables_attributes has two variables and duplicated' do let(:schedule) do basic_param.merge({ - variables_attributes: [{ key: 'AAA', value: 'AAA123' }, { key: 'AAA', value: 'BBB123' }] + variables_attributes: [{ key: 'AAA', secret_value: 'AAA123' }, + { key: 'AAA', secret_value: 'BBB123' }] }) end @@ -152,7 +153,7 @@ describe Projects::PipelineSchedulesController do context 'when params include one variable' do let(:schedule) do basic_param.merge({ - variables_attributes: [{ key: 'AAA', value: 'AAA123' }] + variables_attributes: [{ key: 'AAA', secret_value: 'AAA123' }] }) end @@ -169,7 +170,8 @@ describe Projects::PipelineSchedulesController do context 'when params include two duplicated variables' do let(:schedule) do basic_param.merge({ - variables_attributes: [{ key: 'AAA', value: 'AAA123' }, { key: 'AAA', value: 'BBB123' }] + variables_attributes: [{ key: 'AAA', secret_value: 'AAA123' }, + { key: 'AAA', secret_value: 'BBB123' }] }) end @@ -194,7 +196,7 @@ describe Projects::PipelineSchedulesController do context 'when adds a new variable' do let(:schedule) do basic_param.merge({ - variables_attributes: [{ key: 'AAA', value: 'AAA123' }] + variables_attributes: [{ key: 'AAA', secret_value: 'AAA123' }] }) end @@ -209,7 +211,7 @@ describe Projects::PipelineSchedulesController do context 'when adds a new duplicated variable' do let(:schedule) do basic_param.merge({ - variables_attributes: [{ key: 'CCC', value: 'AAA123' }] + variables_attributes: [{ key: 'CCC', secret_value: 'AAA123' }] }) end @@ -224,7 +226,7 @@ describe Projects::PipelineSchedulesController do context 'when updates a variable' do let(:schedule) do basic_param.merge({ - variables_attributes: [{ id: pipeline_schedule_variable.id, value: 'new_value' }] + variables_attributes: [{ id: pipeline_schedule_variable.id, secret_value: 'new_value' }] }) end @@ -252,7 +254,7 @@ describe Projects::PipelineSchedulesController do let(:schedule) do basic_param.merge({ variables_attributes: [{ id: pipeline_schedule_variable.id, _destroy: true }, - { key: 'CCC', value: 'CCC123' }] + { key: 'CCC', secret_value: 'CCC123' }] }) end diff --git a/spec/controllers/projects/pipelines_settings_controller_spec.rb b/spec/controllers/projects/pipelines_settings_controller_spec.rb index 1cc488bef32..694896b6bcf 100644 --- a/spec/controllers/projects/pipelines_settings_controller_spec.rb +++ b/spec/controllers/projects/pipelines_settings_controller_spec.rb @@ -11,60 +11,11 @@ describe Projects::PipelinesSettingsController do sign_in(user) end - describe 'PATCH update' do - subject do - patch :update, - namespace_id: project.namespace.to_param, - project_id: project, - project: { - auto_devops_attributes: params - } - end - - context 'when updating the auto_devops settings' do - let(:params) { { enabled: '', domain: 'mepmep.md' } } - - it 'redirects to the settings page' do - subject - - expect(response).to have_gitlab_http_status(302) - expect(flash[:notice]).to eq("Pipelines settings for '#{project.name}' were successfully updated.") - end - - context 'following the instance default' do - let(:params) { { enabled: '' } } - - it 'allows enabled to be set to nil' do - subject - project_auto_devops.reload - - expect(project_auto_devops.enabled).to be_nil - end - end - - context 'when run_auto_devops_pipeline is true' do - before do - expect_any_instance_of(Projects::UpdateService).to receive(:run_auto_devops_pipeline?).and_return(true) - end - - it 'queues a CreatePipelineWorker' do - expect(CreatePipelineWorker).to receive(:perform_async).with(project.id, user.id, project.default_branch, :web, any_args) - - subject - end - end - - context 'when run_auto_devops_pipeline is not true' do - before do - expect_any_instance_of(Projects::UpdateService).to receive(:run_auto_devops_pipeline?).and_return(false) - end - - it 'does not queue a CreatePipelineWorker' do - expect(CreatePipelineWorker).not_to receive(:perform_async).with(project.id, user.id, :web, any_args) + describe 'GET show' do + it 'redirects with 302 status code' do + get :show, namespace_id: project.namespace, project_id: project - subject - end - end + expect(response).to have_gitlab_http_status(302) end end end diff --git a/spec/controllers/projects/protected_branches_controller_spec.rb b/spec/controllers/projects/protected_branches_controller_spec.rb index 80be135b5d8..096e29bc39f 100644 --- a/spec/controllers/projects/protected_branches_controller_spec.rb +++ b/spec/controllers/projects/protected_branches_controller_spec.rb @@ -1,6 +1,16 @@ require('spec_helper') describe Projects::ProtectedBranchesController do + let(:project) { create(:project, :repository) } + let(:protected_branch) { create(:protected_branch, project: project) } + let(:project_params) { { namespace_id: project.namespace.to_param, project_id: project } } + let(:base_params) { project_params.merge(id: protected_branch.id) } + let(:user) { create(:user) } + + before do + project.add_master(user) + end + describe "GET #index" do let(:project) { create(:project_empty_repo, :public) } @@ -8,4 +18,91 @@ describe Projects::ProtectedBranchesController do get(:index, namespace_id: project.namespace.to_param, project_id: project) end end + + describe "POST #create" do + let(:master_access_level) { [{ access_level: Gitlab::Access::MASTER }] } + let(:access_level_params) do + { merge_access_levels_attributes: master_access_level, + push_access_levels_attributes: master_access_level } + end + let(:create_params) { attributes_for(:protected_branch).merge(access_level_params) } + + before do + sign_in(user) + end + + it 'creates the protected branch rule' do + expect do + post(:create, project_params.merge(protected_branch: create_params)) + end.to change(ProtectedBranch, :count).by(1) + end + + context 'when a policy restricts rule deletion' do + before do + policy = instance_double(ProtectedBranchPolicy, can?: false) + allow(ProtectedBranchPolicy).to receive(:new).and_return(policy) + end + + it "prevents creation of the protected branch rule" do + post(:create, project_params.merge(protected_branch: create_params)) + + expect(ProtectedBranch.count).to eq 0 + end + end + end + + describe "PUT #update" do + let(:update_params) { { name: 'new_name' } } + + before do + sign_in(user) + end + + it 'updates the protected branch rule' do + put(:update, base_params.merge(protected_branch: update_params)) + + expect(protected_branch.reload.name).to eq('new_name') + expect(json_response["name"]).to eq('new_name') + end + + context 'when a policy restricts rule deletion' do + before do + policy = instance_double(ProtectedBranchPolicy, can?: false) + allow(ProtectedBranchPolicy).to receive(:new).and_return(policy) + end + + it "prevents update of the protected branch rule" do + old_name = protected_branch.name + + put(:update, base_params.merge(protected_branch: update_params)) + + expect(protected_branch.reload.name).to eq(old_name) + end + end + end + + describe "DELETE #destroy" do + before do + sign_in(user) + end + + it "deletes the protected branch rule" do + delete(:destroy, base_params) + + expect { ProtectedBranch.find(protected_branch.id) }.to raise_error(ActiveRecord::RecordNotFound) + end + + context 'when a policy restricts rule deletion' do + before do + policy = instance_double(ProtectedBranchPolicy, can?: false) + allow(ProtectedBranchPolicy).to receive(:new).and_return(policy) + end + + it "prevents deletion of the protected branch rule" do + delete(:destroy, base_params) + + expect(response.status).to eq(403) + end + end + end end diff --git a/spec/controllers/projects/raw_controller_spec.rb b/spec/controllers/projects/raw_controller_spec.rb index b7df42168e0..08e2ccf893a 100644 --- a/spec/controllers/projects/raw_controller_spec.rb +++ b/spec/controllers/projects/raw_controller_spec.rb @@ -8,10 +8,7 @@ describe Projects::RawController do let(:id) { 'master/README.md' } it 'delivers ASCII file' do - get(:show, - namespace_id: public_project.namespace.to_param, - project_id: public_project, - id: id) + get_show(public_project, id) expect(response).to have_gitlab_http_status(200) expect(response.header['Content-Type']).to eq('text/plain; charset=utf-8') @@ -25,10 +22,7 @@ describe Projects::RawController do let(:id) { 'master/files/images/6049019_460s.jpg' } it 'sets image content type header' do - get(:show, - namespace_id: public_project.namespace.to_param, - project_id: public_project, - id: id) + get_show(public_project, id) expect(response).to have_gitlab_http_status(200) expect(response.header['Content-Type']).to eq('image/jpeg') @@ -54,21 +48,40 @@ describe Projects::RawController do it 'serves the file' do expect(controller).to receive(:send_file).with("#{LfsObjectUploader.root}/91/ef/f75a492a3ed0dfcb544d7f31326bc4014c8551849c192fd1e48d4dd2c897", filename: 'lfs_object.iso', disposition: 'attachment') - get(:show, - namespace_id: public_project.namespace.to_param, - project_id: public_project, - id: id) + get_show(public_project, id) expect(response).to have_gitlab_http_status(200) end + + context 'and lfs uses object storage' do + before do + lfs_object.file = fixture_file_upload(Rails.root + "spec/fixtures/dk.png", "`/png") + lfs_object.save! + stub_lfs_object_storage + lfs_object.file.migrate!(LfsObjectUploader::Store::REMOTE) + end + + it 'responds with redirect to file' do + get_show(public_project, id) + + expect(response).to have_gitlab_http_status(302) + expect(response.location).to include(lfs_object.reload.file.path) + end + + it 'sets content disposition' do + get_show(public_project, id) + + file_uri = URI.parse(response.location) + params = CGI.parse(file_uri.query) + + expect(params["response-content-disposition"].first).to eq 'attachment;filename="lfs_object.iso"' + end + end end context 'when project does not have access' do it 'does not serve the file' do - get(:show, - namespace_id: public_project.namespace.to_param, - project_id: public_project, - id: id) + get_show(public_project, id) expect(response).to have_gitlab_http_status(404) end @@ -81,10 +94,7 @@ describe Projects::RawController do end it 'delivers ASCII file' do - get(:show, - namespace_id: public_project.namespace.to_param, - project_id: public_project, - id: id) + get_show(public_project, id) expect(response).to have_gitlab_http_status(200) expect(response.header['Content-Type']).to eq('text/plain; charset=utf-8') @@ -95,4 +105,10 @@ describe Projects::RawController do end end end + + def get_show(project, id) + get(:show, namespace_id: project.namespace.to_param, + project_id: project, + id: id) + end end diff --git a/spec/controllers/projects/repositories_controller_spec.rb b/spec/controllers/projects/repositories_controller_spec.rb index 04d16e98913..a102a3a3c8c 100644 --- a/spec/controllers/projects/repositories_controller_spec.rb +++ b/spec/controllers/projects/repositories_controller_spec.rb @@ -6,7 +6,7 @@ describe Projects::RepositoriesController do describe "GET archive" do context 'as a guest' do it 'responds with redirect in correct format' do - get :archive, namespace_id: project.namespace, project_id: project, format: "zip", ref: 'master' + get :archive, namespace_id: project.namespace, project_id: project, id: "master", format: "zip" expect(response.header["Content-Type"]).to start_with('text/html') expect(response).to be_redirect @@ -22,18 +22,55 @@ describe Projects::RepositoriesController do end it "uses Gitlab::Workhorse" do - get :archive, namespace_id: project.namespace, project_id: project, ref: "master", format: "zip" + get :archive, namespace_id: project.namespace, project_id: project, id: "master", format: "zip" expect(response.header[Gitlab::Workhorse::SEND_DATA_HEADER]).to start_with("git-archive:") end + it 'responds with redirect to the short name archive if fully qualified' do + get :archive, namespace_id: project.namespace, project_id: project, id: "master/#{project.path}-master", format: "zip" + + expect(assigns(:ref)).to eq("master") + expect(response.header[Gitlab::Workhorse::SEND_DATA_HEADER]).to start_with("git-archive:") + end + + it 'handles legacy queries with no ref' do + get :archive, namespace_id: project.namespace, project_id: project, format: "zip" + + expect(response.header[Gitlab::Workhorse::SEND_DATA_HEADER]).to start_with("git-archive:") + end + + it 'handles legacy queries with the ref specified as ref in params' do + get :archive, namespace_id: project.namespace, project_id: project, ref: 'feature', format: 'zip' + + expect(response).to have_gitlab_http_status(200) + expect(assigns(:ref)).to eq('feature') + expect(response.header[Gitlab::Workhorse::SEND_DATA_HEADER]).to start_with("git-archive:") + end + + it 'handles legacy queries with the ref specified as id in params' do + get :archive, namespace_id: project.namespace, project_id: project, id: 'feature', format: 'zip' + + expect(response).to have_gitlab_http_status(200) + expect(assigns(:ref)).to eq('feature') + expect(response.header[Gitlab::Workhorse::SEND_DATA_HEADER]).to start_with("git-archive:") + end + + it 'prioritizes the id param over the ref param when both are specified' do + get :archive, namespace_id: project.namespace, project_id: project, id: 'feature', ref: 'feature_conflict', format: 'zip' + + expect(response).to have_gitlab_http_status(200) + expect(assigns(:ref)).to eq('feature') + expect(response.header[Gitlab::Workhorse::SEND_DATA_HEADER]).to start_with("git-archive:") + end + context "when the service raises an error" do before do allow(Gitlab::Workhorse).to receive(:send_git_archive).and_raise("Archive failed") end it "renders Not Found" do - get :archive, namespace_id: project.namespace, project_id: project, ref: "master", format: "zip" + get :archive, namespace_id: project.namespace, project_id: project, id: "master", format: "zip" expect(response).to have_gitlab_http_status(404) end diff --git a/spec/controllers/projects/settings/ci_cd_controller_spec.rb b/spec/controllers/projects/settings/ci_cd_controller_spec.rb index 293e76798ae..7dae9b85d78 100644 --- a/spec/controllers/projects/settings/ci_cd_controller_spec.rb +++ b/spec/controllers/projects/settings/ci_cd_controller_spec.rb @@ -1,8 +1,9 @@ require('spec_helper') describe Projects::Settings::CiCdController do - let(:project) { create(:project, :public, :access_requestable) } - let(:user) { create(:user) } + set(:user) { create(:user) } + set(:project_auto_devops) { create(:project_auto_devops) } + let(:project) { project_auto_devops.project } before do project.add_master(user) @@ -55,4 +56,107 @@ describe Projects::Settings::CiCdController do end end end + + describe 'PATCH update' do + let(:params) { { ci_config_path: '' } } + + subject do + patch :update, + namespace_id: project.namespace.to_param, + project_id: project, + project: params + end + + it 'redirects to the settings page' do + subject + + expect(response).to have_gitlab_http_status(302) + expect(flash[:notice]).to eq("Pipelines settings for '#{project.name}' were successfully updated.") + end + + context 'when updating the auto_devops settings' do + let(:params) { { auto_devops_attributes: { enabled: '', domain: 'mepmep.md' } } } + + context 'following the instance default' do + let(:params) { { auto_devops_attributes: { enabled: '' } } } + + it 'allows enabled to be set to nil' do + subject + project_auto_devops.reload + + expect(project_auto_devops.enabled).to be_nil + end + end + + context 'when run_auto_devops_pipeline is true' do + before do + expect_any_instance_of(Projects::UpdateService).to receive(:run_auto_devops_pipeline?).and_return(true) + end + + context 'when the project repository is empty' do + it 'sets a warning flash' do + expect(subject).to set_flash[:warning] + end + + it 'does not queue a CreatePipelineWorker' do + expect(CreatePipelineWorker).not_to receive(:perform_async).with(project.id, user.id, project.default_branch, :web, any_args) + + subject + end + end + + context 'when the project repository is not empty' do + let(:project) { create(:project, :repository) } + + it 'sets a success flash' do + allow(CreatePipelineWorker).to receive(:perform_async).with(project.id, user.id, project.default_branch, :web, any_args) + + expect(subject).to set_flash[:success] + end + + it 'queues a CreatePipelineWorker' do + expect(CreatePipelineWorker).to receive(:perform_async).with(project.id, user.id, project.default_branch, :web, any_args) + + subject + end + end + end + + context 'when run_auto_devops_pipeline is not true' do + before do + expect_any_instance_of(Projects::UpdateService).to receive(:run_auto_devops_pipeline?).and_return(false) + end + + it 'does not queue a CreatePipelineWorker' do + expect(CreatePipelineWorker).not_to receive(:perform_async).with(project.id, user.id, :web, any_args) + + subject + end + end + end + + context 'when updating general settings' do + context 'when build_timeout_human_readable is not specified' do + let(:params) { { build_timeout_human_readable: '' } } + + it 'set default timeout' do + subject + + project.reload + expect(project.build_timeout).to eq(3600) + end + end + + context 'when build_timeout_human_readable is specified' do + let(:params) { { build_timeout_human_readable: '1h 30m' } } + + it 'set specified timeout' do + subject + + project.reload + expect(project.build_timeout).to eq(5400) + end + end + end + end end diff --git a/spec/controllers/root_controller_spec.rb b/spec/controllers/root_controller_spec.rb index b32eb39b1fb..7688538a468 100644 --- a/spec/controllers/root_controller_spec.rb +++ b/spec/controllers/root_controller_spec.rb @@ -90,6 +90,30 @@ describe RootController do end end + context 'who has customized their dashboard setting for assigned issues' do + before do + user.dashboard = 'issues' + end + + it 'redirects to their assigned issues' do + get :index + + expect(response).to redirect_to issues_dashboard_path(assignee_id: user.id) + end + end + + context 'who has customized their dashboard setting for assigned merge requests' do + before do + user.dashboard = 'merge_requests' + end + + it 'redirects to their assigned merge requests' do + get :index + + expect(response).to redirect_to merge_requests_dashboard_path(assignee_id: user.id) + end + end + context 'who uses the default dashboard setting' do it 'renders the default dashboard' do get :index diff --git a/spec/db/production/settings_spec.rb b/spec/db/production/settings_spec.rb index 79e67330854..c8d016070f5 100644 --- a/spec/db/production/settings_spec.rb +++ b/spec/db/production/settings_spec.rb @@ -2,10 +2,15 @@ require 'spec_helper' require 'rainbow/ext/string' describe 'seed production settings' do - include StubENV let(:settings_file) { Rails.root.join('db/fixtures/production/010_settings.rb') } let(:settings) { Gitlab::CurrentSettings.current_application_settings } + before do + # It's important to set this variable so that we don't save a memoized + # (supposed to be) in-memory record in `Gitlab::CurrentSettings.in_memory_application_settings` + stub_env('IN_MEMORY_APPLICATION_SETTINGS', 'false') + end + context 'GITLAB_SHARED_RUNNERS_REGISTRATION_TOKEN is set in the environment' do before do stub_env('GITLAB_SHARED_RUNNERS_REGISTRATION_TOKEN', '013456789') diff --git a/spec/factories/appearances.rb b/spec/factories/appearances.rb index 5f9c57c0c8d..18c7453bd1b 100644 --- a/spec/factories/appearances.rb +++ b/spec/factories/appearances.rb @@ -2,8 +2,21 @@ FactoryBot.define do factory :appearance do - title "MepMep" - description "This is my Community Edition instance" + title "GitLab Community Edition" + description "Open source software to collaborate on code" new_project_guidelines "Custom project guidelines" end + + trait :with_logo do + logo { fixture_file_upload('spec/fixtures/dk.png') } + end + + trait :with_header_logo do + header_logo { fixture_file_upload('spec/fixtures/dk.png') } + end + + trait :with_logos do + with_logo + with_header_logo + end end diff --git a/spec/factories/award_emoji.rb b/spec/factories/award_emoji.rb index a0abbbce686..d37e2bf511e 100644 --- a/spec/factories/award_emoji.rb +++ b/spec/factories/award_emoji.rb @@ -4,6 +4,10 @@ FactoryBot.define do user awardable factory: :issue + after(:create) do |award, evaluator| + award.awardable.project.add_guest(evaluator.user) + end + trait :upvote trait :downvote do name "thumbsdown" diff --git a/spec/factories/ci/builds.rb b/spec/factories/ci/builds.rb index f6ba3a581ca..4acc008ed38 100644 --- a/spec/factories/ci/builds.rb +++ b/spec/factories/ci/builds.rb @@ -62,6 +62,7 @@ FactoryBot.define do end trait :pending do + queued_at 'Di 29. Okt 09:50:59 CET 2013' status 'pending' end @@ -206,7 +207,7 @@ FactoryBot.define do options do { image: { name: 'ruby:2.1', entrypoint: '/bin/sh' }, - services: ['postgres', { name: 'docker:dind', entrypoint: '/bin/sh', command: 'sleep 30', alias: 'docker' }], + services: ['postgres', { name: 'docker:stable-dind', entrypoint: '/bin/sh', command: 'sleep 30', alias: 'docker' }], after_script: %w(ls date), artifacts: { name: 'artifacts_file', @@ -237,5 +238,15 @@ FactoryBot.define do trait :protected do protected true end + + trait :script_failure do + failed + failure_reason 1 + end + + trait :api_failure do + failed + failure_reason 2 + end end end diff --git a/spec/factories/ci/job_artifacts.rb b/spec/factories/ci/job_artifacts.rb index 8544d54ccaa..3d3287d8168 100644 --- a/spec/factories/ci/job_artifacts.rb +++ b/spec/factories/ci/job_artifacts.rb @@ -5,6 +5,10 @@ FactoryBot.define do job factory: :ci_build file_type :archive + trait :remote_store do + file_store JobArtifactUploader::Store::REMOTE + end + after :build do |artifact| artifact.project ||= artifact.job.project end diff --git a/spec/factories/clusters/clusters.rb b/spec/factories/clusters/clusters.rb index 20d5580f0c2..98566f907f9 100644 --- a/spec/factories/clusters/clusters.rb +++ b/spec/factories/clusters/clusters.rb @@ -32,5 +32,9 @@ FactoryBot.define do trait :disabled do enabled false end + + trait :production_environment do + sequence(:environment_scope) { |n| "production#{n}/*" } + end end end diff --git a/spec/factories/deploy_tokens.rb b/spec/factories/deploy_tokens.rb new file mode 100644 index 00000000000..5fea4a9d5a6 --- /dev/null +++ b/spec/factories/deploy_tokens.rb @@ -0,0 +1,14 @@ +FactoryBot.define do + factory :deploy_token do + token { SecureRandom.hex(50) } + sequence(:name) { |n| "PDT #{n}" } + read_repository true + read_registry true + revoked false + expires_at { 5.days.from_now } + + trait :revoked do + revoked true + end + end +end diff --git a/spec/factories/internal_ids.rb b/spec/factories/internal_ids.rb new file mode 100644 index 00000000000..fbde07a391a --- /dev/null +++ b/spec/factories/internal_ids.rb @@ -0,0 +1,7 @@ +FactoryBot.define do + factory :internal_id do + project + usage :issues + last_value { project.issues.maximum(:iid) || 0 } + end +end diff --git a/spec/factories/lfs_objects.rb b/spec/factories/lfs_objects.rb index caaed4d5246..eaf3a4ed497 100644 --- a/spec/factories/lfs_objects.rb +++ b/spec/factories/lfs_objects.rb @@ -15,4 +15,8 @@ FactoryBot.define do trait :correct_oid do oid 'b804383982bb89b00e828e3f44c038cc991d3d1768009fc39ba8e2c081b9fb75' end + + trait :object_storage do + file_store { LfsObjectUploader::Store::REMOTE } + end end diff --git a/spec/factories/pages_domains.rb b/spec/factories/pages_domains.rb index 35b44e1c52e..20671da016e 100644 --- a/spec/factories/pages_domains.rb +++ b/spec/factories/pages_domains.rb @@ -4,25 +4,7 @@ FactoryBot.define do verified_at { Time.now } enabled_until { 1.week.from_now } - trait :disabled do - verified_at nil - enabled_until nil - end - - trait :unverified do - verified_at nil - end - - trait :reverify do - enabled_until { 1.hour.from_now } - end - - trait :expired do - enabled_until { 1.hour.ago } - end - - trait :with_certificate do - certificate '-----BEGIN CERTIFICATE----- + certificate '-----BEGIN CERTIFICATE----- MIICGzCCAYSgAwIBAgIBATANBgkqhkiG9w0BAQUFADAbMRkwFwYDVQQDExB0ZXN0 LWNlcnRpZmljYXRlMB4XDTE2MDIxMjE0MzIwMFoXDTIwMDQxMjE0MzIwMFowGzEZ MBcGA1UEAxMQdGVzdC1jZXJ0aWZpY2F0ZTCBnzANBgkqhkiG9w0BAQEFAAOBjQAw @@ -36,10 +18,8 @@ joZp2JHYvNlTPkRJ/J4TcXxBTJmArcQgTIuNoBtC+0A/SwdK4MfTCUY4vNWNdese 5A4K65Nb7Oh1AdQieTBHNXXCdyFsva9/ScfQGEl7p55a52jOPs0StPd7g64uvjlg YHi2yesCrOvVXt+lgPTd -----END CERTIFICATE-----' - end - trait :with_key do - key '-----BEGIN PRIVATE KEY----- + key '-----BEGIN PRIVATE KEY----- MIICdgIBADANBgkqhkiG9w0BAQEFAASCAmAwggJcAgEAAoGBAKS+CfS9GcRSdYSN SzyH5QJQBr5umRL6E+KilOV39iYFO/9oHjUdapTRWkrwnNPCp7qaeck4Jr8iv14t PVNDfNr76eGb6/3YknOAP0QOjLWunoC8kjU+N/JHU52NrUeX3qEy8EKV9LeCDJcB @@ -55,6 +35,30 @@ EPjGlXIT+aW2XiPmK3ZlCDcWIenE+lmtbOpI159Wpk8BGXs/s/xBAkEAlAY3ymgx 63BDJEwvOb2IaP8lDDxNsXx9XJNVvQbv5n15vNsLHbjslHfAhAbxnLQ1fLhUPqSi nNp/xedE1YxutQ== -----END PRIVATE KEY-----' + + trait :disabled do + verified_at nil + enabled_until nil + end + + trait :unverified do + verified_at nil + end + + trait :reverify do + enabled_until { 1.hour.from_now } + end + + trait :expired do + enabled_until { 1.hour.ago } + end + + trait :without_certificate do + certificate nil + end + + trait :without_key do + key nil end trait :with_missing_chain do diff --git a/spec/factories/project_deploy_tokens.rb b/spec/factories/project_deploy_tokens.rb new file mode 100644 index 00000000000..4866cb58d88 --- /dev/null +++ b/spec/factories/project_deploy_tokens.rb @@ -0,0 +1,6 @@ +FactoryBot.define do + factory :project_deploy_token do + project + deploy_token + end +end diff --git a/spec/factories/project_hooks.rb b/spec/factories/project_hooks.rb index 493b7bc021c..a448d565e4b 100644 --- a/spec/factories/project_hooks.rb +++ b/spec/factories/project_hooks.rb @@ -15,6 +15,7 @@ FactoryBot.define do issues_events true confidential_issues_events true note_events true + confidential_note_events true job_events true pipeline_events true wiki_page_events true diff --git a/spec/factories/redirect_routes.rb b/spec/factories/redirect_routes.rb index c29c81c5df9..774232d0b34 100644 --- a/spec/factories/redirect_routes.rb +++ b/spec/factories/redirect_routes.rb @@ -2,14 +2,5 @@ FactoryBot.define do factory :redirect_route do sequence(:path) { |n| "redirect#{n}" } source factory: :group - permanent false - - trait :permanent do - permanent true - end - - trait :temporary do - permanent false - end end end diff --git a/spec/factories/uploads.rb b/spec/factories/uploads.rb index ff3a2a76acc..b45f6f30e40 100644 --- a/spec/factories/uploads.rb +++ b/spec/factories/uploads.rb @@ -5,6 +5,7 @@ FactoryBot.define do uploader "AvatarUploader" mount_point :avatar secret nil + store ObjectStorage::Store::LOCAL # we should build a mount agnostic upload by default transient do @@ -27,6 +28,10 @@ FactoryBot.define do secret SecureRandom.hex end + trait :object_storage do + store ObjectStorage::Store::REMOTE + end + trait :namespace_upload do model { build(:group) } path { File.join(secret, filename) } diff --git a/spec/factories/users_star_projects.rb b/spec/factories/users_star_projects.rb new file mode 100644 index 00000000000..6afd08a2084 --- /dev/null +++ b/spec/factories/users_star_projects.rb @@ -0,0 +1,6 @@ +FactoryBot.define do + factory :users_star_project do + project + user + end +end diff --git a/spec/features/admin/admin_broadcast_messages_spec.rb b/spec/features/admin/admin_broadcast_messages_spec.rb index 9cb351282a0..430a8d22b0f 100644 --- a/spec/features/admin/admin_broadcast_messages_spec.rb +++ b/spec/features/admin/admin_broadcast_messages_spec.rb @@ -45,7 +45,7 @@ feature 'Admin Broadcast Messages' do page.within('.broadcast-message-preview') do expect(page).to have_selector('strong', text: 'Markdown') - expect(page).to have_selector('gl-emoji[data-name="tada"]') + expect(page).to have_emoji('tada') end end end diff --git a/spec/features/admin/admin_disables_git_access_protocol_spec.rb b/spec/features/admin/admin_disables_git_access_protocol_spec.rb index 9ea3cfa72c6..9946cc77d1d 100644 --- a/spec/features/admin/admin_disables_git_access_protocol_spec.rb +++ b/spec/features/admin/admin_disables_git_access_protocol_spec.rb @@ -55,14 +55,19 @@ feature 'Admin disables Git access protocol' do end def disable_http_protocol - visit admin_application_settings_path - find('#application_setting_enabled_git_access_protocol').find(:xpath, 'option[2]').select_option - click_on 'Save' + switch_git_protocol(2) end def disable_ssh_protocol + switch_git_protocol(3) + end + + def switch_git_protocol(value) visit admin_application_settings_path - find('#application_setting_enabled_git_access_protocol').find(:xpath, 'option[3]').select_option - click_on 'Save' + + page.within('.as-visibility-access') do + find('#application_setting_enabled_git_access_protocol').find(:xpath, "option[#{value}]").select_option + click_on 'Save' + end end end diff --git a/spec/features/admin/admin_settings_spec.rb b/spec/features/admin/admin_settings_spec.rb index 39b213988f0..7853d2952ea 100644 --- a/spec/features/admin/admin_settings_spec.rb +++ b/spec/features/admin/admin_settings_spec.rb @@ -10,18 +10,21 @@ feature 'Admin updates settings' do end scenario 'Change visibility settings' do - choose "application_setting_default_project_visibility_20" - click_button 'Save' + page.within('.as-visibility-access') do + choose "application_setting_default_project_visibility_20" + click_button 'Save changes' + end expect(page).to have_content "Application settings saved successfully" end scenario 'Uncheck all restricted visibility levels' do - find('#application_setting_visibility_level_0').set(false) - find('#application_setting_visibility_level_10').set(false) - find('#application_setting_visibility_level_20').set(false) - - click_button 'Save' + page.within('.as-visibility-access') do + find('#application_setting_visibility_level_0').set(false) + find('#application_setting_visibility_level_10').set(false) + find('#application_setting_visibility_level_20').set(false) + click_button 'Save changes' + end expect(page).to have_content "Application settings saved successfully" expect(find('#application_setting_visibility_level_0')).not_to be_checked @@ -29,34 +32,204 @@ feature 'Admin updates settings' do expect(find('#application_setting_visibility_level_20')).not_to be_checked end - scenario 'Change application settings' do - uncheck 'Gravatar enabled' - fill_in 'Home page URL', with: 'https://about.gitlab.com/' - fill_in 'Help page text', with: 'Example text' - check 'Hide marketing-related entries from help' - fill_in 'Support page URL', with: 'http://example.com/help' - uncheck 'Project export enabled' - click_button 'Save' + scenario 'Modify import sources' do + expect(Gitlab::CurrentSettings.import_sources).not_to be_empty + + page.within('.as-visibility-access') do + Gitlab::ImportSources.options.map do |name, _| + uncheck name + end + + click_button 'Save changes' + end + + expect(page).to have_content "Application settings saved successfully" + expect(Gitlab::CurrentSettings.import_sources).to be_empty + + page.within('.as-visibility-access') do + check "Repo by URL" + click_button 'Save changes' + end + + expect(page).to have_content "Application settings saved successfully" + expect(Gitlab::CurrentSettings.import_sources).to eq(['git']) + end + + scenario 'Change Visibility and Access Controls' do + page.within('.as-visibility-access') do + uncheck 'Project export enabled' + click_button 'Save changes' + end + + expect(Gitlab::CurrentSettings.project_export_enabled).to be_falsey + expect(page).to have_content "Application settings saved successfully" + end + + scenario 'Change Account and Limit Settings' do + page.within('.as-account-limit') do + uncheck 'Gravatar enabled' + click_button 'Save changes' + end expect(Gitlab::CurrentSettings.gravatar_enabled).to be_falsey + expect(page).to have_content "Application settings saved successfully" + end + + scenario 'Change Sign-in restrictions' do + page.within('.as-signin') do + fill_in 'Home page URL', with: 'https://about.gitlab.com/' + click_button 'Save changes' + end + expect(Gitlab::CurrentSettings.home_page_url).to eq "https://about.gitlab.com/" + expect(page).to have_content "Application settings saved successfully" + end + + scenario 'Modify oauth providers' do + expect(Gitlab::CurrentSettings.disabled_oauth_sign_in_sources).to be_empty + + page.within('.as-signin') do + uncheck 'Google' + click_button 'Save changes' + end + + expect(page).to have_content "Application settings saved successfully" + expect(Gitlab::CurrentSettings.disabled_oauth_sign_in_sources).to include('google_oauth2') + + page.within('.as-signin') do + check "Google" + click_button 'Save changes' + end + + expect(page).to have_content "Application settings saved successfully" + expect(Gitlab::CurrentSettings.disabled_oauth_sign_in_sources).not_to include('google_oauth2') + end + + scenario 'Change Help page' do + page.within('.as-help-page') do + fill_in 'Help page text', with: 'Example text' + check 'Hide marketing-related entries from help' + fill_in 'Support page URL', with: 'http://example.com/help' + click_button 'Save changes' + end + expect(Gitlab::CurrentSettings.help_page_text).to eq "Example text" expect(Gitlab::CurrentSettings.help_page_hide_commercial_content).to be_truthy expect(Gitlab::CurrentSettings.help_page_support_url).to eq "http://example.com/help" - expect(Gitlab::CurrentSettings.project_export_enabled).to be_falsey expect(page).to have_content "Application settings saved successfully" end - scenario 'Change AutoDevOps settings' do - check 'Enabled Auto DevOps (Beta) for projects by default' - fill_in 'Auto devops domain', with: 'domain.com' - click_button 'Save' + scenario 'Change Pages settings' do + page.within('.as-pages') do + fill_in 'Maximum size of pages (MB)', with: 15 + check 'Require users to prove ownership of custom domains' + click_button 'Save changes' + end + + expect(Gitlab::CurrentSettings.max_pages_size).to eq 15 + expect(Gitlab::CurrentSettings.pages_domain_verification_enabled?).to be_truthy + expect(page).to have_content "Application settings saved successfully" + end + + scenario 'Change CI/CD settings' do + page.within('.as-ci-cd') do + check 'Enabled Auto DevOps (Beta) for projects by default' + fill_in 'Auto devops domain', with: 'domain.com' + click_button 'Save changes' + end expect(Gitlab::CurrentSettings.auto_devops_enabled?).to be true expect(Gitlab::CurrentSettings.auto_devops_domain).to eq('domain.com') expect(page).to have_content "Application settings saved successfully" end + scenario 'Change Influx settings' do + page.within('.as-influx') do + check 'Enable InfluxDB Metrics' + click_button 'Save changes' + end + + expect(Gitlab::CurrentSettings.metrics_enabled?).to be true + expect(page).to have_content "Application settings saved successfully" + end + + scenario 'Change Prometheus settings' do + page.within('.as-prometheus') do + check 'Enable Prometheus Metrics' + click_button 'Save changes' + end + + expect(Gitlab::CurrentSettings.prometheus_metrics_enabled?).to be true + expect(page).to have_content "Application settings saved successfully" + end + + scenario 'Change Performance bar settings' do + group = create(:group) + + page.within('.as-performance-bar') do + check 'Enable the Performance Bar' + fill_in 'Allowed group', with: group.path + click_on 'Save changes' + end + + expect(page).to have_content "Application settings saved successfully" + expect(find_field('Enable the Performance Bar')).to be_checked + expect(find_field('Allowed group').value).to eq group.path + + page.within('.as-performance-bar') do + uncheck 'Enable the Performance Bar' + click_on 'Save changes' + end + + expect(page).to have_content 'Application settings saved successfully' + expect(find_field('Enable the Performance Bar')).not_to be_checked + expect(find_field('Allowed group').value).to be_nil + end + + scenario 'Change Background jobs settings' do + page.within('.as-background') do + fill_in 'Throttling Factor', with: 1 + click_button 'Save changes' + end + + expect(Gitlab::CurrentSettings.sidekiq_throttling_factor).to eq(1) + expect(page).to have_content "Application settings saved successfully" + end + + scenario 'Change Spam settings' do + page.within('.as-spam') do + check 'Enable reCAPTCHA' + fill_in 'reCAPTCHA Site Key', with: 'key' + fill_in 'reCAPTCHA Private Key', with: 'key' + fill_in 'IPs per user', with: 15 + click_button 'Save changes' + end + + expect(page).to have_content "Application settings saved successfully" + expect(Gitlab::CurrentSettings.recaptcha_enabled).to be true + expect(Gitlab::CurrentSettings.unique_ips_limit_per_user).to eq(15) + end + + scenario 'Configure web terminal' do + page.within('.as-terminal') do + fill_in 'Max session time', with: 15 + click_button 'Save changes' + end + + expect(page).to have_content "Application settings saved successfully" + expect(Gitlab::CurrentSettings.terminal_max_session_time).to eq(15) + end + + scenario 'Enable outbound requests' do + page.within('.as-outbound') do + check 'Allow requests to the local network from hooks and services' + click_button 'Save changes' + end + + expect(page).to have_content "Application settings saved successfully" + expect(Gitlab::CurrentSettings.allow_local_requests_from_hooks_and_services).to be true + end + scenario 'Change Slack Notifications Service template settings' do first(:link, 'Service Templates').click click_link 'Slack notifications' @@ -81,20 +254,14 @@ feature 'Admin updates settings' do expect(find('#service_push_channel').value).to eq '#test_channel' end - context 'sign-in restrictions', :js do - it 'de-activates oauth sign-in source' do - find('input#application_setting_enabled_oauth_sign_in_sources_[value=gitlab]').send_keys(:return) - - expect(find('.btn', text: 'GitLab.com')).not_to have_css('.active') - end - end - scenario 'Change Keys settings' do - select 'Are forbidden', from: 'RSA SSH keys' - select 'Are allowed', from: 'DSA SSH keys' - select 'Must be at least 384 bits', from: 'ECDSA SSH keys' - select 'Are forbidden', from: 'ED25519 SSH keys' - click_on 'Save' + page.within('.as-visibility-access') do + select 'Are forbidden', from: 'RSA SSH keys' + select 'Are allowed', from: 'DSA SSH keys' + select 'Must be at least 384 bits', from: 'ECDSA SSH keys' + select 'Are forbidden', from: 'ED25519 SSH keys' + click_on 'Save changes' + end forbidden = ApplicationSetting::FORBIDDEN_KEY_VALUE.to_s @@ -105,29 +272,6 @@ feature 'Admin updates settings' do expect(find_field('ED25519 SSH keys').value).to eq(forbidden) end - scenario 'Change Performance Bar settings' do - group = create(:group) - - check 'Enable the Performance Bar' - fill_in 'Allowed group', with: group.path - - click_on 'Save' - - expect(page).to have_content 'Application settings saved successfully' - - expect(find_field('Enable the Performance Bar')).to be_checked - expect(find_field('Allowed group').value).to eq group.path - - uncheck 'Enable the Performance Bar' - - click_on 'Save' - - expect(page).to have_content 'Application settings saved successfully' - - expect(find_field('Enable the Performance Bar')).not_to be_checked - expect(find_field('Allowed group').value).to be_nil - end - def check_all_events page.check('Active') page.check('Push') diff --git a/spec/features/atom/dashboard_issues_spec.rb b/spec/features/atom/dashboard_issues_spec.rb index d673bac4995..fb6c71ce997 100644 --- a/spec/features/atom/dashboard_issues_spec.rb +++ b/spec/features/atom/dashboard_issues_spec.rb @@ -13,17 +13,26 @@ describe "Dashboard Issues Feed" do end describe "atom feed" do - it "renders atom feed via personal access token" do + it "returns 400 if no filter is used" do personal_access_token = create(:personal_access_token, user: user) visit issues_dashboard_path(:atom, private_token: personal_access_token.token) expect(response_headers['Content-Type']).to have_content('application/atom+xml') + expect(page.status_code).to eq(400) + end + + it "renders atom feed via personal access token" do + personal_access_token = create(:personal_access_token, user: user) + + visit issues_dashboard_path(:atom, private_token: personal_access_token.token, assignee_id: user.id) + + expect(response_headers['Content-Type']).to have_content('application/atom+xml') expect(body).to have_selector('title', text: "#{user.name} issues") end it "renders atom feed via RSS token" do - visit issues_dashboard_path(:atom, rss_token: user.rss_token) + visit issues_dashboard_path(:atom, rss_token: user.rss_token, assignee_id: user.id) expect(response_headers['Content-Type']).to have_content('application/atom+xml') expect(body).to have_selector('title', text: "#{user.name} issues") @@ -44,7 +53,7 @@ describe "Dashboard Issues Feed" do let!(:issue2) { create(:issue, author: user, assignees: [assignee], project: project2, description: 'test desc') } it "renders issue fields" do - visit issues_dashboard_path(:atom, rss_token: user.rss_token) + visit issues_dashboard_path(:atom, rss_token: user.rss_token, assignee_id: assignee.id) entry = find(:xpath, "//feed/entry[contains(summary/text(),'#{issue2.title}')]") @@ -67,7 +76,7 @@ describe "Dashboard Issues Feed" do end it "renders issue label and milestone info" do - visit issues_dashboard_path(:atom, rss_token: user.rss_token) + visit issues_dashboard_path(:atom, rss_token: user.rss_token, assignee_id: assignee.id) entry = find(:xpath, "//feed/entry[contains(summary/text(),'#{issue1.title}')]") diff --git a/spec/features/boards/new_issue_spec.rb b/spec/features/boards/new_issue_spec.rb index 6769acb7c9c..e880f0096c1 100644 --- a/spec/features/boards/new_issue_spec.rb +++ b/spec/features/boards/new_issue_spec.rb @@ -63,6 +63,13 @@ describe 'Issue Boards new issue', :js do page.within(first('.board .issue-count-badge-count')) do expect(page).to have_content('1') end + + page.within(first('.card')) do + issue = project.issues.find_by_title('bug') + + expect(page).to have_content(issue.to_reference) + expect(page).to have_link(issue.title, href: issue_path(issue)) + end end it 'shows sidebar when creating new issue' do diff --git a/spec/features/boards/sidebar_spec.rb b/spec/features/boards/sidebar_spec.rb index d4c44c1adf9..4d31123a699 100644 --- a/spec/features/boards/sidebar_spec.rb +++ b/spec/features/boards/sidebar_spec.rb @@ -237,6 +237,22 @@ describe 'Issue Boards', :js do end context 'labels' do + it 'shows current labels when editing' do + click_card(card) + + page.within('.labels') do + click_link 'Edit' + + wait_for_requests + + page.within('.value') do + expect(page).to have_selector('.label', count: 2) + expect(page).to have_content(development.title) + expect(page).to have_content(stretch.title) + end + end + end + it 'adds a single label' do click_card(card) @@ -296,7 +312,9 @@ describe 'Issue Boards', :js do wait_for_requests - click_link stretch.title + within('.dropdown-menu-labels') do + click_link stretch.title + end wait_for_requests diff --git a/spec/features/dashboard/issues_filter_spec.rb b/spec/features/dashboard/issues_filter_spec.rb index 8759950e013..bab34ac9346 100644 --- a/spec/features/dashboard/issues_filter_spec.rb +++ b/spec/features/dashboard/issues_filter_spec.rb @@ -1,7 +1,7 @@ require 'spec_helper' feature 'Dashboard Issues filtering', :js do - include SortingHelper + include Spec::Support::Helpers::Features::SortingHelpers let(:user) { create(:user) } let(:project) { create(:project) } @@ -17,6 +17,12 @@ feature 'Dashboard Issues filtering', :js do visit_issues end + context 'without any filter' do + it 'shows error message' do + expect(page).to have_content 'Please select at least one filter to see results' + end + end + context 'filtering by milestone' do it 'shows all issues with no milestone' do show_milestone_dropdown @@ -27,15 +33,6 @@ feature 'Dashboard Issues filtering', :js do expect(page).to have_selector('.issue', count: 1) end - it 'shows all issues with any milestone' do - show_milestone_dropdown - - click_link 'Any Milestone' - - expect(page).to have_issuable_counts(open: 2, closed: 0, all: 2) - expect(page).to have_selector('.issue', count: 2) - end - it 'shows all issues with the selected milestone' do show_milestone_dropdown @@ -68,13 +65,6 @@ feature 'Dashboard Issues filtering', :js do let(:label) { create(:label, project: project) } let!(:label_link) { create(:label_link, label: label, target: issue) } - it 'shows all issues without filter' do - page.within 'ul.content-list' do - expect(page).to have_content issue.title - expect(page).to have_content issue2.title - end - end - it 'shows all issues with the selected label' do page.within '.labels-filter' do find('.dropdown').click @@ -89,15 +79,19 @@ feature 'Dashboard Issues filtering', :js do end context 'sorting' do - it 'shows sorted issues' do - sorting_by('Created date') - visit_issues + before do + visit_issues(assignee_id: user.id) + end + + it 'remembers last sorting value' do + sort_by('Created date') + visit_issues(assignee_id: user.id) expect(find('.issues-filters')).to have_content('Created date') end it 'keeps sorting issues after visiting Projects Issues page' do - sorting_by('Created date') + sort_by('Created date') visit project_issues_path(project) expect(find('.issues-filters')).to have_content('Created date') diff --git a/spec/features/dashboard/issues_spec.rb b/spec/features/dashboard/issues_spec.rb index 8d1d5a51750..e41a2e4ce09 100644 --- a/spec/features/dashboard/issues_spec.rb +++ b/spec/features/dashboard/issues_spec.rb @@ -51,15 +51,6 @@ RSpec.describe 'Dashboard Issues' do expect(page).not_to have_content(other_issue.title) end - it 'shows all issues' do - click_link('Reset filters') - - expect(page).to have_content(authored_issue.title) - expect(page).to have_content(authored_issue_on_public_project.title) - expect(page).to have_content(assigned_issue.title) - expect(page).to have_content(other_issue.title) - end - it 'state filter tabs work' do find('#state-closed').click expect(page).to have_current_path(issues_dashboard_url(assignee_id: current_user.id, state: 'closed'), url: true) diff --git a/spec/features/dashboard/merge_requests_spec.rb b/spec/features/dashboard/merge_requests_spec.rb index c8f3a8449f5..0965b745c03 100644 --- a/spec/features/dashboard/merge_requests_spec.rb +++ b/spec/features/dashboard/merge_requests_spec.rb @@ -1,8 +1,8 @@ require 'spec_helper' feature 'Dashboard Merge Requests' do + include Spec::Support::Helpers::Features::SortingHelpers include FilterItemSelectHelper - include SortingHelper include ProjectForksHelper let(:current_user) { create :user } @@ -103,19 +103,15 @@ feature 'Dashboard Merge Requests' do expect(page).not_to have_content(other_merge_request.title) end - it 'shows all merge requests', :js do + it 'shows error message without filter', :js do filter_item_select('Any Assignee', '.js-assignee-search') filter_item_select('Any Author', '.js-author-search') - expect(page).to have_content(authored_merge_request.title) - expect(page).to have_content(authored_merge_request_from_fork.title) - expect(page).to have_content(assigned_merge_request.title) - expect(page).to have_content(assigned_merge_request_from_fork.title) - expect(page).to have_content(other_merge_request.title) + expect(page).to have_content('Please select at least one filter to see results') end it 'shows sorted merge requests' do - sorting_by('Created date') + sort_by('Created date') visit merge_requests_dashboard_path(assignee_id: current_user.id) @@ -123,7 +119,7 @@ feature 'Dashboard Merge Requests' do end it 'keeps sorting merge requests after visiting Projects MR page' do - sorting_by('Created date') + sort_by('Created date') visit project_merge_requests_path(project) diff --git a/spec/features/dashboard/projects_spec.rb b/spec/features/dashboard/projects_spec.rb index 986f864f0b5..257a3822503 100644 --- a/spec/features/dashboard/projects_spec.rb +++ b/spec/features/dashboard/projects_spec.rb @@ -89,7 +89,7 @@ feature 'Dashboard Projects' do end describe 'with a pipeline', :clean_gitlab_redis_shared_state do - let(:pipeline) { create(:ci_pipeline, project: project, sha: project.commit.sha) } + let(:pipeline) { create(:ci_pipeline, project: project, sha: project.commit.sha, ref: project.default_branch) } before do # Since the cache isn't updated when a new pipeline is created @@ -102,7 +102,7 @@ feature 'Dashboard Projects' do visit dashboard_projects_path page.within('.controls') do - expect(page).to have_xpath("//a[@href='#{pipelines_project_commit_path(project, project.commit)}']") + expect(page).to have_xpath("//a[@href='#{pipelines_project_commit_path(project, project.commit, ref: pipeline.ref)}']") expect(page).to have_css('.ci-status-link') expect(page).to have_css('.ci-status-icon-success') expect(page).to have_link('Commit: passed') diff --git a/spec/features/groups/activity_spec.rb b/spec/features/groups/activity_spec.rb index d3b25ec3d6c..7bc809b3104 100644 --- a/spec/features/groups/activity_spec.rb +++ b/spec/features/groups/activity_spec.rb @@ -8,11 +8,30 @@ feature 'Group activity page' do context 'when signed in' do before do sign_in(user) - visit path end - it_behaves_like "it has an RSS button with current_user's RSS token" - it_behaves_like "an autodiscoverable RSS feed with current_user's RSS token" + describe 'RSS' do + before do + visit path + end + + it_behaves_like "it has an RSS button with current_user's RSS token" + it_behaves_like "an autodiscoverable RSS feed with current_user's RSS token" + end + + context 'when project is in the group', :js do + let(:project) { create(:project, :public, namespace: group) } + + before do + project.add_master(user) + + visit path + end + + it 'renders user joined to project event' do + expect(page).to have_content 'joined project' + end + end end context 'when signed out' do diff --git a/spec/features/groups/group_settings_spec.rb b/spec/features/groups/group_settings_spec.rb index b83bad3befb..1ce30015e81 100644 --- a/spec/features/groups/group_settings_spec.rb +++ b/spec/features/groups/group_settings_spec.rb @@ -76,6 +76,27 @@ feature 'Edit group settings' do end end end + + describe 'edit group avatar' do + before do + visit edit_group_path(group) + + attach_file(:group_avatar, Rails.root.join('spec', 'fixtures', 'banana_sample.gif')) + + expect { click_button 'Save group' }.to change { group.reload.avatar? }.to(true) + end + + it 'uploads new group avatar' do + expect(group.avatar).to be_instance_of AvatarUploader + expect(group.avatar.url).to eq "/uploads/-/system/group/avatar/#{group.id}/banana_sample.gif" + expect(page).to have_link('Remove avatar') + end + + it 'removes group avatar' do + expect { click_link 'Remove avatar' }.to change { group.reload.avatar? }.to(false) + expect(page).not_to have_link('Remove avatar') + end + end end def update_path(new_group_path) diff --git a/spec/features/groups/issues_spec.rb b/spec/features/groups/issues_spec.rb index 450bc0ff8cf..90bf7ba49f6 100644 --- a/spec/features/groups/issues_spec.rb +++ b/spec/features/groups/issues_spec.rb @@ -3,8 +3,11 @@ require 'spec_helper' feature 'Group issues page' do include FilteredSearchHelpers + let(:group) { create(:group) } + let(:project) { create(:project, :public, group: group)} + let(:path) { issues_group_path(group) } + context 'with shared examples' do - let(:path) { issues_group_path(group) } let(:issuable) { create(:issue, project: project, title: "this is my created issuable")} include_examples 'project features apply to issuables', Issue @@ -31,7 +34,6 @@ feature 'Group issues page' do let(:access_level) { ProjectFeature::ENABLED } let(:user) { user_in_group } let(:user2) { user_outside_group } - let(:path) { issues_group_path(group) } it 'filters by only group users' do filtered_search.set('assignee:') @@ -43,9 +45,7 @@ feature 'Group issues page' do end context 'issues list', :nested_groups do - let(:group) { create(:group)} let(:subgroup) { create(:group, parent: group) } - let(:project) { create(:project, :public, group: group)} let(:subgroup_project) { create(:project, :public, group: subgroup)} let!(:issue) { create(:issue, project: project, title: 'root group issue') } let!(:subgroup_issue) { create(:issue, project: subgroup_project, title: 'subgroup issue') } @@ -59,5 +59,17 @@ feature 'Group issues page' do expect(page).to have_content('subgroup issue') end end + + context 'when project is archived' do + before do + project.archive! + end + + it 'does not render issue' do + visit path + + expect(page).not_to have_content issue.title[0..80] + end + end end end diff --git a/spec/features/groups/merge_requests_spec.rb b/spec/features/groups/merge_requests_spec.rb index 7ce6a61d50c..672ae785c2d 100644 --- a/spec/features/groups/merge_requests_spec.rb +++ b/spec/features/groups/merge_requests_spec.rb @@ -5,14 +5,14 @@ feature 'Group merge requests page' do let(:path) { merge_requests_group_path(group) } let(:issuable) { create(:merge_request, source_project: project, target_project: project, title: 'this is my created issuable') } + let(:access_level) { ProjectFeature::ENABLED } + let(:user) { user_in_group } include_examples 'project features apply to issuables', MergeRequest context 'archived issuable' do let(:project_archived) { create(:project, :archived, :merge_requests_enabled, :repository, group: group) } let(:issuable_archived) { create(:merge_request, source_project: project_archived, target_project: project_archived, title: 'issuable of an archived project') } - let(:access_level) { ProjectFeature::ENABLED } - let(:user) { user_in_group } before do issuable_archived @@ -36,9 +36,17 @@ feature 'Group merge requests page' do end end + context 'when merge request assignee to user' do + before do + issuable.update!(assignee: user) + + visit path + end + + it { expect(page).to have_content issuable.title[0..80] } + end + context 'group filtered search', :js do - let(:access_level) { ProjectFeature::ENABLED } - let(:user) { user_in_group } let(:user2) { user_outside_group } it 'filters by assignee only group users' do diff --git a/spec/features/groups/settings/group_badges_spec.rb b/spec/features/groups/settings/group_badges_spec.rb new file mode 100644 index 00000000000..92217294446 --- /dev/null +++ b/spec/features/groups/settings/group_badges_spec.rb @@ -0,0 +1,124 @@ +require 'spec_helper' + +feature 'Group Badges' do + include WaitForRequests + + let(:user) { create(:user) } + let(:group) { create(:group) } + let(:badge_link_url) { 'https://gitlab.com/gitlab-org/gitlab-ee/commits/master'} + let(:badge_image_url) { 'https://gitlab.com/gitlab-org/gitlab-ee/badges/master/build.svg'} + let!(:badge_1) { create(:group_badge, group: group) } + let!(:badge_2) { create(:group_badge, group: group) } + + before do + group.add_owner(user) + sign_in(user) + + visit(group_settings_badges_path(group)) + end + + it 'shows a list of badges', :js do + page.within '.badge-settings' do + wait_for_requests + + rows = all('.panel-body > div') + expect(rows.length).to eq 2 + expect(rows[0]).to have_content badge_1.link_url + expect(rows[1]).to have_content badge_2.link_url + end + end + + context 'adding a badge', :js do + it 'user can preview a badge' do + page.within '.badge-settings form' do + fill_in 'badge-link-url', with: badge_link_url + fill_in 'badge-image-url', with: badge_image_url + within '#badge-preview' do + expect(find('a')[:href]).to eq badge_link_url + expect(find('a img')[:src]).to eq badge_image_url + end + end + end + + it do + page.within '.badge-settings' do + fill_in 'badge-link-url', with: badge_link_url + fill_in 'badge-image-url', with: badge_image_url + + click_button 'Add badge' + wait_for_requests + + within '.panel-body' do + expect(find('a')[:href]).to eq badge_link_url + expect(find('a img')[:src]).to eq badge_image_url + end + end + end + end + + context 'editing a badge', :js do + it 'form is shown when clicking edit button in list' do + page.within '.badge-settings' do + wait_for_requests + rows = all('.panel-body > div') + expect(rows.length).to eq 2 + rows[1].find('[aria-label="Edit"]').click + + within 'form' do + expect(find('#badge-link-url').value).to eq badge_2.link_url + expect(find('#badge-image-url').value).to eq badge_2.image_url + end + end + end + + it 'updates a badge when submitting the edit form' do + page.within '.badge-settings' do + wait_for_requests + rows = all('.panel-body > div') + expect(rows.length).to eq 2 + rows[1].find('[aria-label="Edit"]').click + within 'form' do + fill_in 'badge-link-url', with: badge_link_url + fill_in 'badge-image-url', with: badge_image_url + + click_button 'Save changes' + wait_for_requests + end + + rows = all('.panel-body > div') + expect(rows.length).to eq 2 + expect(rows[1]).to have_content badge_link_url + end + end + end + + context 'deleting a badge', :js do + def click_delete_button(badge_row) + badge_row.find('[aria-label="Delete"]').click + end + + it 'shows a modal when deleting a badge' do + wait_for_requests + rows = all('.panel-body > div') + expect(rows.length).to eq 2 + + click_delete_button(rows[1]) + + expect(find('.modal .modal-title')).to have_content 'Delete badge?' + end + + it 'deletes a badge when confirming the modal' do + wait_for_requests + rows = all('.panel-body > div') + expect(rows.length).to eq 2 + click_delete_button(rows[1]) + + find('.modal .btn-danger').click + wait_for_requests + + rows = all('.panel-body > div') + expect(rows.length).to eq 1 + expect(rows[0]).to have_content badge_1.link_url + end + end +end diff --git a/spec/features/groups/show_spec.rb b/spec/features/groups/show_spec.rb index ceccc471405..3a0424d60f8 100644 --- a/spec/features/groups/show_spec.rb +++ b/spec/features/groups/show_spec.rb @@ -15,14 +15,44 @@ feature 'Group show page' do end it_behaves_like "an autodiscoverable RSS feed with current_user's RSS token" + + context 'when group does not exist' do + let(:path) { group_path('not-exist') } + + it { expect(status_code).to eq(404) } + end end context 'when signed out' do - before do - visit path + describe 'RSS' do + before do + visit path + end + + it_behaves_like "an autodiscoverable RSS feed without an RSS token" + end + + context 'when group has a public project', :js do + let!(:project) { create(:project, :public, namespace: group) } + + it 'renders public project' do + visit path + + expect(page).to have_link group.name + expect(page).to have_link project.name + end end - it_behaves_like "an autodiscoverable RSS feed without an RSS token" + context 'when group has a private project', :js do + let!(:project) { create(:project, :private, namespace: group) } + + it 'does not render private project' do + visit path + + expect(page).to have_link group.name + expect(page).not_to have_link project.name + end + end end context 'subgroup support' do @@ -68,7 +98,7 @@ feature 'Group show page' do it 'shows the project info' do expect(page).to have_content(project.title) - expect(page).to have_selector('gl-emoji[data-name="smile"]') + expect(page).to have_emoji('smile') end end end diff --git a/spec/features/groups/user_browse_projects_group_page_spec.rb b/spec/features/groups/user_browse_projects_group_page_spec.rb new file mode 100644 index 00000000000..e81c3180e78 --- /dev/null +++ b/spec/features/groups/user_browse_projects_group_page_spec.rb @@ -0,0 +1,29 @@ +require 'rails_helper' + +describe 'User browse group projects page' do + let(:user) { create :user } + let(:group) { create :group } + + context 'when user is owner' do + before do + group.add_owner(user) + end + + context 'when user signed in' do + before do + sign_in(user) + end + + context 'when group has archived project', :js do + let!(:project) { create :project, :archived, namespace: group } + + it 'renders projects list' do + visit projects_group_path(group) + + expect(page).to have_link project.name + expect(page).to have_xpath("//span[@class='label label-warning']", text: 'archived') + end + end + end + end +end diff --git a/spec/features/ide_spec.rb b/spec/features/ide_spec.rb new file mode 100644 index 00000000000..b3f24c2966d --- /dev/null +++ b/spec/features/ide_spec.rb @@ -0,0 +1,25 @@ +require 'spec_helper' + +describe 'IDE', :js do + describe 'sub-groups' do + let(:user) { create(:user) } + let(:group) { create(:group) } + let(:subgroup) { create(:group, parent: group) } + let(:subgroup_project) { create(:project, :repository, namespace: subgroup) } + + before do + subgroup_project.add_master(user) + sign_in(user) + + visit project_path(subgroup_project) + + click_link('Web IDE') + + wait_for_requests + end + + it 'loads project in web IDE' do + expect(page).to have_selector('.context-header', text: subgroup_project.name) + end + end +end diff --git a/spec/features/issuables/discussion_lock_spec.rb b/spec/features/issuables/discussion_lock_spec.rb index ecbe51a7bc2..7ea29ff252b 100644 --- a/spec/features/issuables/discussion_lock_spec.rb +++ b/spec/features/issuables/discussion_lock_spec.rb @@ -14,7 +14,7 @@ describe 'Discussion Lock', :js do project.add_developer(user) end - context 'when the discussion is unlocked' do + context 'when the discussion is unlocked' do it 'the user can lock the issue' do visit project_issue_path(project, issue) diff --git a/spec/features/issues/filtered_search/filter_issues_spec.rb b/spec/features/issues/filtered_search/filter_issues_spec.rb index b3c50964810..08ba91a2682 100644 --- a/spec/features/issues/filtered_search/filter_issues_spec.rb +++ b/spec/features/issues/filtered_search/filter_issues_spec.rb @@ -22,15 +22,6 @@ describe 'Filter issues', :js do end end - def expect_issues_list_count(open_count, closed_count = 0) - all_count = open_count + closed_count - - expect(page).to have_issuable_counts(open: open_count, closed: closed_count, all: all_count) - page.within '.issues-list' do - expect(page).to have_selector('.issue', count: open_count) - end - end - before do project.add_master(user) diff --git a/spec/features/issues/form_spec.rb b/spec/features/issues/form_spec.rb index 38c618d300e..4625a50b8d9 100644 --- a/spec/features/issues/form_spec.rb +++ b/spec/features/issues/form_spec.rb @@ -226,6 +226,23 @@ describe 'New/edit issue', :js do expect(page).to have_selector('.atwho-view') end + + describe 'milestone' do + let!(:milestone) { create(:milestone, title: '"><img src=x onerror=alert(document.domain)>', project: project) } + + it 'escapes milestone' do + click_button 'Milestone' + + page.within '.issue-milestone' do + click_link milestone.title + end + + page.within '.js-milestone-select' do + expect(page).to have_content milestone.title + expect(page).not_to have_selector 'img' + end + end + end end context 'edit issue' do diff --git a/spec/features/issues/issue_sidebar_spec.rb b/spec/features/issues/issue_sidebar_spec.rb index b835558b142..830c794376d 100644 --- a/spec/features/issues/issue_sidebar_spec.rb +++ b/spec/features/issues/issue_sidebar_spec.rb @@ -5,9 +5,9 @@ feature 'Issue Sidebar' do let(:group) { create(:group, :nested) } let(:project) { create(:project, :public, namespace: group) } - let(:issue) { create(:issue, project: project) } let!(:user) { create(:user)} let!(:label) { create(:label, project: project, title: 'bug') } + let(:issue) { create(:labeled_issue, project: project, labels: [label]) } let!(:xss_label) { create(:label, project: project, title: '<script>alert("xss");</script>') } before do @@ -112,11 +112,18 @@ feature 'Issue Sidebar' do context 'editing issue labels', :js do before do + issue.update_attributes(labels: [label]) page.within('.block.labels') do find('.edit-link').click end end + it 'shows the current set of labels' do + page.within('.issuable-show-labels') do + expect(page).to have_content label.title + end + end + it 'shows option to create a project label' do page.within('.block.labels') do expect(page).to have_content 'Create project' @@ -161,6 +168,50 @@ feature 'Issue Sidebar' do end end end + + context 'interacting with collapsed sidebar', :js do + collapsed_sidebar_selector = 'aside.right-sidebar.right-sidebar-collapsed' + expanded_sidebar_selector = 'aside.right-sidebar.right-sidebar-expanded' + confidentiality_sidebar_block = '.block.confidentiality' + lock_sidebar_block = '.block.lock' + collapsed_sidebar_block_icon = '.sidebar-collapsed-icon' + + before do + resize_screen_sm + end + + it 'confidentiality block expands then collapses sidebar' do + expect(page).to have_css(collapsed_sidebar_selector) + + page.within(confidentiality_sidebar_block) do + find(collapsed_sidebar_block_icon).click + end + + expect(page).to have_css(expanded_sidebar_selector) + + page.within(confidentiality_sidebar_block) do + page.find('button', text: 'Cancel').click + end + + expect(page).to have_css(collapsed_sidebar_selector) + end + + it 'lock block expands then collapses sidebar' do + expect(page).to have_css(collapsed_sidebar_selector) + + page.within(lock_sidebar_block) do + find(collapsed_sidebar_block_icon).click + end + + expect(page).to have_css(expanded_sidebar_selector) + + page.within(lock_sidebar_block) do + page.find('button', text: 'Cancel').click + end + + expect(page).to have_css(collapsed_sidebar_selector) + end + end end context 'as a guest' do diff --git a/spec/features/issues/spam_issues_spec.rb b/spec/features/issues/spam_issues_spec.rb index a75ca1d42b3..73022afbda2 100644 --- a/spec/features/issues/spam_issues_spec.rb +++ b/spec/features/issues/spam_issues_spec.rb @@ -34,9 +34,6 @@ describe 'New issue', :js do click_button 'Submit issue' - # reCAPTCHA alerts when it can't contact the server, so just accept it and move on - page.driver.browser.switch_to.alert.accept - # it is impossible to test recaptcha automatically and there is no possibility to fill in recaptcha # recaptcha verification is skipped in test environment and it always returns true expect(page).not_to have_content('issue title') diff --git a/spec/features/issues/todo_spec.rb b/spec/features/issues/todo_spec.rb index 8e6493bbd93..4a44ec302fc 100644 --- a/spec/features/issues/todo_spec.rb +++ b/spec/features/issues/todo_spec.rb @@ -14,7 +14,7 @@ feature 'Manually create a todo item from issue', :js do it 'creates todo when clicking button' do page.within '.issuable-sidebar' do click_button 'Add todo' - expect(page).to have_content 'Mark done' + expect(page).to have_content 'Mark todo as done' end page.within '.header-content .todos-count' do @@ -31,7 +31,7 @@ feature 'Manually create a todo item from issue', :js do it 'marks a todo as done' do page.within '.issuable-sidebar' do click_button 'Add todo' - click_button 'Mark done' + click_button 'Mark todo as done' end expect(page).to have_selector('.todos-count', visible: false) diff --git a/spec/features/issues/user_uses_slash_commands_spec.rb b/spec/features/issues/user_uses_slash_commands_spec.rb index ea7a97d02a0..ff2a0e15719 100644 --- a/spec/features/issues/user_uses_slash_commands_spec.rb +++ b/spec/features/issues/user_uses_slash_commands_spec.rb @@ -1,7 +1,7 @@ require 'rails_helper' feature 'Issues > User uses quick actions', :js do - include QuickActionsHelpers + include Spec::Support::Helpers::Features::NotesHelpers it_behaves_like 'issuable record that supports quick actions in its description and notes', :issue do let(:issuable) { create(:issue, project: project) } @@ -36,7 +36,7 @@ feature 'Issues > User uses quick actions', :js do context 'when the current user can update the due date' do it 'does not create a note, and sets the due date accordingly' do - write_note("/due 2016-08-28") + add_note("/due 2016-08-28") expect(page).not_to have_content '/due 2016-08-28' expect(page).to have_content 'Commands applied' @@ -57,7 +57,7 @@ feature 'Issues > User uses quick actions', :js do end it 'does not create a note, and sets the due date accordingly' do - write_note("/due 2016-08-28") + add_note("/due 2016-08-28") expect(page).not_to have_content 'Commands applied' @@ -75,7 +75,7 @@ feature 'Issues > User uses quick actions', :js do it 'does not create a note, and removes the due date accordingly' do expect(issue.due_date).to eq Date.new(2016, 8, 28) - write_note("/remove_due_date") + add_note("/remove_due_date") expect(page).not_to have_content '/remove_due_date' expect(page).to have_content 'Commands applied' @@ -96,7 +96,7 @@ feature 'Issues > User uses quick actions', :js do end it 'does not create a note, and sets the due date accordingly' do - write_note("/remove_due_date") + add_note("/remove_due_date") expect(page).not_to have_content 'Commands applied' @@ -111,7 +111,7 @@ feature 'Issues > User uses quick actions', :js do let(:issue) { create(:issue, project: project) } it 'does not recognize the command nor create a note' do - write_note("/wip") + add_note("/wip") expect(page).not_to have_content '/wip' end @@ -123,7 +123,7 @@ feature 'Issues > User uses quick actions', :js do context 'when the current user can update issues' do it 'does not create a note, and marks the issue as a duplicate' do - write_note("/duplicate ##{original_issue.to_reference}") + add_note("/duplicate ##{original_issue.to_reference}") expect(page).not_to have_content "/duplicate #{original_issue.to_reference}" expect(page).to have_content 'Commands applied' @@ -143,7 +143,7 @@ feature 'Issues > User uses quick actions', :js do end it 'does not create a note, and does not mark the issue as a duplicate' do - write_note("/duplicate ##{original_issue.to_reference}") + add_note("/duplicate ##{original_issue.to_reference}") expect(page).not_to have_content 'Commands applied' expect(page).not_to have_content "marked this issue as a duplicate of #{original_issue.to_reference}" @@ -166,7 +166,7 @@ feature 'Issues > User uses quick actions', :js do end it 'moves the issue' do - write_note("/move #{target_project.full_path}") + add_note("/move #{target_project.full_path}") expect(page).to have_content 'Commands applied' expect(issue.reload).to be_closed @@ -186,7 +186,7 @@ feature 'Issues > User uses quick actions', :js do end it 'does not move the issue' do - write_note("/move #{project_unauthorized.full_path}") + add_note("/move #{project_unauthorized.full_path}") expect(page).not_to have_content 'Commands applied' expect(issue.reload).to be_open @@ -200,7 +200,7 @@ feature 'Issues > User uses quick actions', :js do end it 'does not move the issue' do - write_note("/move not/valid") + add_note("/move not/valid") expect(page).not_to have_content 'Commands applied' expect(issue.reload).to be_open @@ -223,7 +223,7 @@ feature 'Issues > User uses quick actions', :js do end it 'applies the commands to both issues and moves the issue' do - write_note("/label ~#{bug.title} ~#{wontfix.title}\n\n/milestone %\"#{milestone.title}\"\n\n/move #{target_project.full_path}") + add_note("/label ~#{bug.title} ~#{wontfix.title}\n\n/milestone %\"#{milestone.title}\"\n\n/move #{target_project.full_path}") expect(page).to have_content 'Commands applied' expect(issue.reload).to be_closed @@ -242,7 +242,7 @@ feature 'Issues > User uses quick actions', :js do end it 'moves the issue and applies the commands to both issues' do - write_note("/move #{target_project.full_path}\n\n/label ~#{bug.title} ~#{wontfix.title}\n\n/milestone %\"#{milestone.title}\"") + add_note("/move #{target_project.full_path}\n\n/label ~#{bug.title} ~#{wontfix.title}\n\n/milestone %\"#{milestone.title}\"") expect(page).to have_content 'Commands applied' expect(issue.reload).to be_closed diff --git a/spec/features/labels_hierarchy_spec.rb b/spec/features/labels_hierarchy_spec.rb new file mode 100644 index 00000000000..3e05e7b7f38 --- /dev/null +++ b/spec/features/labels_hierarchy_spec.rb @@ -0,0 +1,305 @@ +require 'spec_helper' + +feature 'Labels Hierarchy', :js, :nested_groups do + include FilteredSearchHelpers + + let!(:user) { create(:user) } + let!(:grandparent) { create(:group) } + let!(:parent) { create(:group, parent: grandparent) } + let!(:child) { create(:group, parent: parent) } + let!(:project_1) { create(:project, namespace: parent) } + + let!(:grandparent_group_label) { create(:group_label, group: grandparent, title: 'Label_1') } + let!(:parent_group_label) { create(:group_label, group: parent, title: 'Label_2') } + let!(:child_group_label) { create(:group_label, group: child, title: 'Label_3') } + let!(:project_label_1) { create(:label, project: project_1, title: 'Label_4') } + + before do + grandparent.add_owner(user) + + sign_in(user) + end + + shared_examples 'assigning labels from sidebar' do + it 'can assign all ancestors labels' do + [grandparent_group_label, parent_group_label, project_label_1].each do |label| + page.within('.block.labels') do + find('.edit-link').click + end + + wait_for_requests + + find('a.label-item', text: label.title).click + find('.dropdown-menu-close-icon').click + + wait_for_requests + + expect(page).to have_selector('span.label', text: label.title) + end + end + + it 'does not find child group labels on dropdown' do + page.within('.block.labels') do + find('.edit-link').click + end + + wait_for_requests + + expect(page).not_to have_selector('span.label', text: child_group_label.title) + end + end + + shared_examples 'filtering by ancestor labels for projects' do |board = false| + it 'filters by ancestor labels' do + [grandparent_group_label, parent_group_label, project_label_1].each do |label| + select_label_on_dropdown(label.title) + + wait_for_requests + + if board + expect(page).to have_selector('.card-title') do |card| + expect(card).to have_selector('a', text: labeled_issue.title) + end + else + expect_issues_list_count(1) + expect(page).to have_selector('span.issue-title-text', text: labeled_issue.title) + end + end + end + + it 'does not filter by descendant group labels' do + filtered_search.set("label:") + + wait_for_requests + + expect(page).not_to have_selector('.btn-link', text: child_group_label.title) + end + end + + shared_examples 'filtering by ancestor labels for groups' do |board = false| + let(:project_2) { create(:project, namespace: parent) } + let!(:project_label_2) { create(:label, project: project_2, title: 'Label_4') } + + let(:project_3) { create(:project, namespace: child) } + let!(:group_label_3) { create(:group_label, group: child, title: 'Label_5') } + let!(:project_label_3) { create(:label, project: project_3, title: 'Label_6') } + + let!(:labeled_issue_2) { create(:labeled_issue, project: project_2, labels: [grandparent_group_label, parent_group_label, project_label_2]) } + let!(:labeled_issue_3) { create(:labeled_issue, project: project_3, labels: [grandparent_group_label, parent_group_label, group_label_3]) } + + let!(:issue_2) { create(:issue, project: project_2) } + + it 'filters by ancestors and current group labels' do + [grandparent_group_label, parent_group_label].each do |label| + select_label_on_dropdown(label.title) + + wait_for_requests + + if board + expect(page).to have_selector('.card-title') do |card| + expect(card).to have_selector('a', text: labeled_issue.title) + end + + expect(page).to have_selector('.card-title') do |card| + expect(card).to have_selector('a', text: labeled_issue_2.title) + end + else + expect_issues_list_count(3) + expect(page).to have_selector('span.issue-title-text', text: labeled_issue.title) + expect(page).to have_selector('span.issue-title-text', text: labeled_issue_2.title) + expect(page).to have_selector('span.issue-title-text', text: labeled_issue_3.title) + end + end + end + + it 'filters by descendant group labels' do + wait_for_requests + + select_label_on_dropdown(group_label_3.title) + + if board + expect(page).to have_selector('.card-title') do |card| + expect(card).not_to have_selector('a', text: labeled_issue_2.title) + end + + expect(page).to have_selector('.card-title') do |card| + expect(card).to have_selector('a', text: labeled_issue_3.title) + end + else + expect_issues_list_count(1) + expect(page).to have_selector('span.issue-title-text', text: labeled_issue_3.title) + end + end + + it 'does not filter by descendant group project labels' do + filtered_search.set("label:") + + wait_for_requests + + expect(page).not_to have_selector('.btn-link', text: project_label_3.title) + end + end + + context 'when creating new issuable' do + before do + visit new_project_issue_path(project_1) + end + + it 'should be able to assign ancestor group labels' do + fill_in 'issue_title', with: 'new created issue' + fill_in 'issue_description', with: 'new issue description' + + find(".js-label-select").click + wait_for_requests + + find('a.label-item', text: grandparent_group_label.title).click + find('a.label-item', text: parent_group_label.title).click + find('a.label-item', text: project_label_1.title).click + + find('.btn-create').click + + expect(page.find('.issue-details h2.title')).to have_content('new created issue') + expect(page).to have_selector('span.label', text: grandparent_group_label.title) + expect(page).to have_selector('span.label', text: parent_group_label.title) + expect(page).to have_selector('span.label', text: project_label_1.title) + end + end + + context 'issuable sidebar' do + let!(:issue) { create(:issue, project: project_1) } + + context 'on issue sidebar' do + before do + visit project_issue_path(project_1, issue) + end + + it_behaves_like 'assigning labels from sidebar' + end + + context 'on project board issue sidebar' do + let(:board) { create(:board, project: project_1) } + + before do + visit project_board_path(project_1, board) + + wait_for_requests + + find('.card').click + end + + it_behaves_like 'assigning labels from sidebar' + end + + context 'on group board issue sidebar' do + let(:board) { create(:board, group: parent) } + + before do + visit group_board_path(parent, board) + + wait_for_requests + + find('.card').click + end + + it_behaves_like 'assigning labels from sidebar' + end + end + + context 'issuable filtering' do + let!(:labeled_issue) { create(:labeled_issue, project: project_1, labels: [grandparent_group_label, parent_group_label, project_label_1]) } + let!(:issue) { create(:issue, project: project_1) } + + context 'on project issuable list' do + before do + visit project_issues_path(project_1) + end + + it_behaves_like 'filtering by ancestor labels for projects' + + it 'does not filter by descendant group labels' do + filtered_search.set("label:") + + wait_for_requests + + expect(page).not_to have_selector('.btn-link', text: child_group_label.title) + end + end + + context 'on group issuable list' do + before do + visit issues_group_path(parent) + end + + it_behaves_like 'filtering by ancestor labels for groups' + end + + context 'on project boards filter' do + let(:board) { create(:board, project: project_1) } + + before do + visit project_board_path(project_1, board) + end + + it_behaves_like 'filtering by ancestor labels for projects', true + end + + context 'on group boards filter' do + let(:board) { create(:board, group: parent) } + + before do + visit group_board_path(parent, board) + end + + it_behaves_like 'filtering by ancestor labels for groups', true + end + end + + context 'creating boards lists' do + context 'on project boards' do + let(:board) { create(:board, project: project_1) } + + before do + visit project_board_path(project_1, board) + find('.js-new-board-list').click + wait_for_requests + end + + it 'creates lists from all ancestor labels' do + [grandparent_group_label, parent_group_label, project_label_1].each do |label| + find('a', text: label.title).click + end + + wait_for_requests + + expect(page).to have_selector('.board-title-text', text: grandparent_group_label.title) + expect(page).to have_selector('.board-title-text', text: parent_group_label.title) + expect(page).to have_selector('.board-title-text', text: project_label_1.title) + end + end + + context 'on group boards' do + let(:board) { create(:board, group: parent) } + + before do + visit group_board_path(parent, board) + find('.js-new-board-list').click + wait_for_requests + end + + it 'creates lists from all ancestor group labels' do + [grandparent_group_label, parent_group_label].each do |label| + find('a', text: label.title).click + end + + wait_for_requests + + expect(page).to have_selector('.board-title-text', text: grandparent_group_label.title) + expect(page).to have_selector('.board-title-text', text: parent_group_label.title) + end + + it 'does not create lists from descendant groups' do + expect(page).not_to have_selector('a', text: child_group_label.title) + end + end + end +end diff --git a/spec/features/markdown/copy_as_gfm_spec.rb b/spec/features/markdown/copy_as_gfm_spec.rb index f82ed6300cc..4d897f09b57 100644 --- a/spec/features/markdown/copy_as_gfm_spec.rb +++ b/spec/features/markdown/copy_as_gfm_spec.rb @@ -20,7 +20,7 @@ describe 'Copy as GFM', :js do end # The filters referenced in lib/banzai/pipeline/gfm_pipeline.rb convert GitLab Flavored Markdown (GFM) to HTML. - # The handlers defined in app/assets/javascripts/copy_as_gfm.js consequently convert that same HTML to GFM. + # The handlers defined in app/assets/javascripts/behaviors/markdown/copy_as_gfm.js consequently convert that same HTML to GFM. # To make sure these filters and handlers are properly aligned, this spec tests the GFM-to-HTML-to-GFM cycle # by verifying (`html_to_gfm(gfm_to_html(gfm)) == gfm`) for a number of examples of GFM for every filter, using the `verify` helper. diff --git a/spec/features/merge_request/user_awards_emoji_spec.rb b/spec/features/merge_request/user_awards_emoji_spec.rb index 2f24cfbd9e3..859a4c65562 100644 --- a/spec/features/merge_request/user_awards_emoji_spec.rb +++ b/spec/features/merge_request/user_awards_emoji_spec.rb @@ -35,6 +35,14 @@ describe 'Merge request > User awards emoji', :js do expect(page).to have_selector('.emoji-menu', count: 1) end + + describe 'the project is archived' do + let(:project) { create(:project, :public, :repository, :archived) } + + it 'does not see award menu button' do + expect(page).not_to have_selector('.js-award-holder') + end + end end describe 'logged out' do diff --git a/spec/features/merge_request/user_cherry_picks_spec.rb b/spec/features/merge_request/user_cherry_picks_spec.rb index 494096b21c0..61d1bdaa95a 100644 --- a/spec/features/merge_request/user_cherry_picks_spec.rb +++ b/spec/features/merge_request/user_cherry_picks_spec.rb @@ -40,6 +40,14 @@ describe 'Merge request > User cherry-picks', :js do expect(page).to have_link 'Cherry-pick' end + + it 'hides the cherry pick button for an archived project' do + project.update!(archived: true) + + visit project_merge_request_path(project, merge_request) + + expect(page).not_to have_link 'Cherry-pick' + end end end end diff --git a/spec/features/merge_request/user_resolves_diff_notes_and_discussions_resolve_spec.rb b/spec/features/merge_request/user_resolves_diff_notes_and_discussions_resolve_spec.rb index b4ad4b64d8e..0fd2840c426 100644 --- a/spec/features/merge_request/user_resolves_diff_notes_and_discussions_resolve_spec.rb +++ b/spec/features/merge_request/user_resolves_diff_notes_and_discussions_resolve_spec.rb @@ -5,7 +5,7 @@ describe 'Merge request > User resolves diff notes and discussions', :js do let(:user) { project.creator } let(:guest) { create(:user) } let(:merge_request) { create(:merge_request_with_diffs, source_project: project, author: user, title: "Bug NS-04") } - let!(:note) { create(:diff_note_on_merge_request, project: project, noteable: merge_request) } + let!(:note) { create(:diff_note_on_merge_request, project: project, noteable: merge_request, note: "| Markdown | Table |\n|-------|---------|\n| first | second |") } let(:path) { "files/ruby/popen.rb" } let(:position) do Gitlab::Diff::Position.new( @@ -111,6 +111,15 @@ describe 'Merge request > User resolves diff notes and discussions', :js do expect(page.find(".line-holder-placeholder")).to be_visible expect(page.find(".timeline-content #note_#{note.id}")).to be_visible end + + it 'renders tables in lazy-loaded resolved diff dicussions' do + find(".timeline-content .discussion[data-discussion-id='#{note.discussion_id}'] .discussion-toggle-button").click + + wait_for_requests + + expect(page.find(".timeline-content #note_#{note.id}")).not_to have_css(".line_holder") + expect(page.find(".timeline-content #note_#{note.id}")).to have_css("tr", count: 2) + end end describe 'side-by-side view' do diff --git a/spec/features/merge_request/user_scrolls_to_note_on_load_spec.rb b/spec/features/merge_request/user_scrolls_to_note_on_load_spec.rb index 565e375600b..3b6fffb7abd 100644 --- a/spec/features/merge_request/user_scrolls_to_note_on_load_spec.rb +++ b/spec/features/merge_request/user_scrolls_to_note_on_load_spec.rb @@ -27,6 +27,23 @@ describe 'Merge request > User scrolls to note on load', :js do expect(fragment_position_top).to be < (page_scroll_y + page_height) end + it 'renders un-collapsed notes with diff' do + page.current_window.resize_to(1000, 1000) + + visit "#{project_merge_request_path(project, merge_request)}#{fragment_id}" + + page.execute_script "window.scrollTo(0,0)" + + note_element = find(fragment_id) + note_container = note_element.ancestor('.js-toggle-container') + + expect(note_element.visible?).to eq true + + page.within note_container do + expect(page).not_to have_selector('.js-error-lazy-load-diff') + end + end + it 'expands collapsed notes' do visit "#{project_merge_request_path(project, merge_request)}#{collapsed_fragment_id}" note_element = find(collapsed_fragment_id) diff --git a/spec/features/merge_request/user_sees_mini_pipeline_graph_spec.rb b/spec/features/merge_request/user_sees_mini_pipeline_graph_spec.rb index a43ba05c64c..fd1629746ef 100644 --- a/spec/features/merge_request/user_sees_mini_pipeline_graph_spec.rb +++ b/spec/features/merge_request/user_sees_mini_pipeline_graph_spec.rb @@ -9,6 +9,7 @@ describe 'Merge request < User sees mini pipeline graph', :js do before do build.run + build.trace.set('hello') sign_in(user) visit_merge_request end @@ -26,15 +27,15 @@ describe 'Merge request < User sees mini pipeline graph', :js do let(:artifacts_file2) { fixture_file_upload(Rails.root.join('spec/fixtures/dk.png'), 'image/png') } before do - create(:ci_build, pipeline: pipeline, legacy_artifacts_file: artifacts_file1) - create(:ci_build, pipeline: pipeline, when: 'manual') + create(:ci_build, :success, :trace_artifact, pipeline: pipeline, legacy_artifacts_file: artifacts_file1) + create(:ci_build, :manual, pipeline: pipeline, when: 'manual') end it 'avoids repeated database queries' do before = ActiveRecord::QueryRecorder.new { visit_merge_request(format: :json, serializer: 'widget') } - create(:ci_build, pipeline: pipeline, legacy_artifacts_file: artifacts_file2) - create(:ci_build, pipeline: pipeline, when: 'manual') + create(:ci_build, :success, :trace_artifact, pipeline: pipeline, legacy_artifacts_file: artifacts_file2) + create(:ci_build, :manual, pipeline: pipeline, when: 'manual') after = ActiveRecord::QueryRecorder.new { visit_merge_request(format: :json, serializer: 'widget') } diff --git a/spec/features/merge_request/user_uses_slash_commands_spec.rb b/spec/features/merge_request/user_uses_slash_commands_spec.rb index bd739e69d6c..7f261b580f7 100644 --- a/spec/features/merge_request/user_uses_slash_commands_spec.rb +++ b/spec/features/merge_request/user_uses_slash_commands_spec.rb @@ -1,7 +1,7 @@ require 'rails_helper' describe 'Merge request > User uses quick actions', :js do - include QuickActionsHelpers + include Spec::Support::Helpers::Features::NotesHelpers let(:project) { create(:project, :public, :repository) } let(:user) { project.creator } @@ -33,7 +33,7 @@ describe 'Merge request > User uses quick actions', :js do describe 'toggling the WIP prefix in the title from note' do context 'when the current user can toggle the WIP prefix' do it 'adds the WIP: prefix to the title' do - write_note("/wip") + add_note("/wip") expect(page).not_to have_content '/wip' expect(page).to have_content 'Commands applied' @@ -44,7 +44,7 @@ describe 'Merge request > User uses quick actions', :js do it 'removes the WIP: prefix from the title' do merge_request.title = merge_request.wip_title merge_request.save - write_note("/wip") + add_note("/wip") expect(page).not_to have_content '/wip' expect(page).to have_content 'Commands applied' @@ -62,7 +62,7 @@ describe 'Merge request > User uses quick actions', :js do end it 'does not change the WIP prefix' do - write_note("/wip") + add_note("/wip") expect(page).not_to have_content '/wip' expect(page).not_to have_content 'Commands applied' @@ -75,7 +75,7 @@ describe 'Merge request > User uses quick actions', :js do describe 'merging the MR from the note' do context 'when the current user can merge the MR' do it 'merges the MR' do - write_note("/merge") + add_note("/merge") expect(page).to have_content 'Commands applied' @@ -90,7 +90,7 @@ describe 'Merge request > User uses quick actions', :js do end it 'does not merge the MR' do - write_note("/merge") + add_note("/merge") expect(page).not_to have_content 'Your commands have been executed!' @@ -107,7 +107,7 @@ describe 'Merge request > User uses quick actions', :js do end it 'does not merge the MR' do - write_note("/merge") + add_note("/merge") expect(page).not_to have_content 'Your commands have been executed!' @@ -118,7 +118,7 @@ describe 'Merge request > User uses quick actions', :js do describe 'adding a due date from note' do it 'does not recognize the command nor create a note' do - write_note('/due 2016-08-28') + add_note('/due 2016-08-28') expect(page).not_to have_content '/due 2016-08-28' end @@ -162,7 +162,7 @@ describe 'Merge request > User uses quick actions', :js do describe '/target_branch command from note' do context 'when the current user can change target branch' do it 'changes target branch from a note' do - write_note("message start \n/target_branch merge-test\n message end.") + add_note("message start \n/target_branch merge-test\n message end.") wait_for_requests expect(page).not_to have_content('/target_branch') @@ -173,7 +173,7 @@ describe 'Merge request > User uses quick actions', :js do end it 'does not fail when target branch does not exists' do - write_note('/target_branch totally_not_existing_branch') + add_note('/target_branch totally_not_existing_branch') expect(page).not_to have_content('/target_branch') @@ -190,7 +190,7 @@ describe 'Merge request > User uses quick actions', :js do end it 'does not change target branch' do - write_note('/target_branch merge-test') + add_note('/target_branch merge-test') expect(page).not_to have_content '/target_branch merge-test' diff --git a/spec/features/milestone_spec.rb b/spec/features/milestone_spec.rb index 19152bf1f0f..6c51e4bbe26 100644 --- a/spec/features/milestone_spec.rb +++ b/spec/features/milestone_spec.rb @@ -108,4 +108,18 @@ feature 'Milestone' do expect(page).to have_selector('.js-delete-milestone-button', count: 0) end end + + feature 'deprecation popover', :js do + it 'opens deprecation popover' do + milestone = create(:milestone, project: project) + + visit group_milestone_path(group, milestone, title: milestone.title) + + expect(page).to have_selector('.milestone-deprecation-message') + + find('.milestone-deprecation-message .js-popover-link').click + + expect(page).to have_selector('.milestone-deprecation-message .popover') + end + end end diff --git a/spec/features/milestones/show_spec.rb b/spec/features/milestones/show_spec.rb deleted file mode 100644 index 50c5e0bb65f..00000000000 --- a/spec/features/milestones/show_spec.rb +++ /dev/null @@ -1,26 +0,0 @@ -require 'rails_helper' - -describe 'Milestone show' do - let(:user) { create(:user) } - let(:project) { create(:project) } - let(:milestone) { create(:milestone, project: project) } - let(:labels) { create_list(:label, 2, project: project) } - let(:issue_params) { { project: project, assignees: [user], author: user, milestone: milestone, labels: labels } } - - before do - project.add_user(user, :developer) - sign_in(user) - end - - def visit_milestone - visit project_milestone_path(project, milestone) - end - - it 'avoids N+1 database queries' do - create(:labeled_issue, issue_params) - control = ActiveRecord::QueryRecorder.new { visit_milestone } - create_list(:labeled_issue, 10, issue_params) - - expect { visit_milestone }.not_to exceed_query_limit(control) - end -end diff --git a/spec/features/milestones/user_creates_milestone_spec.rb b/spec/features/milestones/user_creates_milestone_spec.rb new file mode 100644 index 00000000000..8fd057d587c --- /dev/null +++ b/spec/features/milestones/user_creates_milestone_spec.rb @@ -0,0 +1,29 @@ +require "rails_helper" + +describe "User creates milestone", :js do + set(:user) { create(:user) } + set(:project) { create(:project) } + + before do + project.add_developer(user) + sign_in(user) + + visit(new_project_milestone_path(project)) + end + + it "creates milestone" do + TITLE = "v2.3".freeze + + fill_in("Title", with: TITLE) + fill_in("Description", with: "# Description header") + click_button("Create milestone") + + expect(page).to have_content(TITLE) + .and have_content("Issues") + .and have_header_with_correct_id_and_link(1, "Description header", "description-header") + + visit(activity_project_path(project)) + + expect(page).to have_content("#{user.name} opened milestone") + end +end diff --git a/spec/features/milestones/user_deletes_milestone_spec.rb b/spec/features/milestones/user_deletes_milestone_spec.rb new file mode 100644 index 00000000000..414702daba4 --- /dev/null +++ b/spec/features/milestones/user_deletes_milestone_spec.rb @@ -0,0 +1,25 @@ +require "rails_helper" + +describe "User deletes milestone", :js do + set(:user) { create(:user) } + set(:project) { create(:project) } + set(:milestone) { create(:milestone, project: project) } + + before do + project.add_developer(user) + sign_in(user) + + visit(project_milestones_path(project)) + end + + it "deletes milestone" do + click_button("Delete") + click_button("Delete milestone") + + expect(page).to have_content("No milestones to show") + + visit(activity_project_path(project)) + + expect(page).to have_content("#{user.name} destroyed milestone") + end +end diff --git a/spec/features/milestones/user_views_milestone_spec.rb b/spec/features/milestones/user_views_milestone_spec.rb new file mode 100644 index 00000000000..83d8e2ff9e9 --- /dev/null +++ b/spec/features/milestones/user_views_milestone_spec.rb @@ -0,0 +1,31 @@ +require "rails_helper" + +describe "User views milestone" do + set(:user) { create(:user) } + set(:project) { create(:project) } + set(:milestone) { create(:milestone, project: project) } + set(:labels) { create_list(:label, 2, project: project) } + + before do + project.add_developer(user) + sign_in(user) + end + + it "avoids N+1 database queries" do + ISSUE_PARAMS = { project: project, assignees: [user], author: user, milestone: milestone, labels: labels }.freeze + + create(:labeled_issue, ISSUE_PARAMS) + + control = ActiveRecord::QueryRecorder.new { visit_milestone } + + create(:labeled_issue, ISSUE_PARAMS) + + expect { visit_milestone }.not_to exceed_query_limit(control) + end + + private + + def visit_milestone + visit(project_milestone_path(project, milestone)) + end +end diff --git a/spec/features/milestones/user_views_milestones_spec.rb b/spec/features/milestones/user_views_milestones_spec.rb new file mode 100644 index 00000000000..bebe40f73fd --- /dev/null +++ b/spec/features/milestones/user_views_milestones_spec.rb @@ -0,0 +1,35 @@ +require "rails_helper" + +describe "User views milestones" do + set(:user) { create(:user) } + set(:project) { create(:project) } + set(:milestone) { create(:milestone, project: project) } + + before do + project.add_developer(user) + sign_in(user) + + visit(project_milestones_path(project)) + end + + it "shows milestone" do + expect(page).to have_content(milestone.title) + .and have_content(milestone.expires_at) + .and have_content("Issues") + end + + context "with issues" do + set(:issue) { create(:issue, project: project, milestone: milestone) } + set(:closed_issue) { create(:closed_issue, project: project, milestone: milestone) } + + it "opens milestone" do + click_link(milestone.title) + + expect(current_path).to eq(project_milestone_path(project, milestone)) + expect(page).to have_content(milestone.title) + .and have_selector("#tab-issues li.issuable-row", count: 2) + .and have_content(issue.title) + .and have_content(closed_issue.title) + end + end +end diff --git a/spec/features/profile_spec.rb b/spec/features/profile_spec.rb index 0848857ed1e..15dcb30cbdd 100644 --- a/spec/features/profile_spec.rb +++ b/spec/features/profile_spec.rb @@ -97,9 +97,13 @@ describe 'Profile account page', :js do end it 'changes my username' do - fill_in 'user_username', with: 'new-username' + fill_in 'username-change-input', with: 'new-username' - click_button('Update username') + page.find('[data-target="#username-change-confirmation-modal"]').click + + page.within('.modal') do + find('.js-modal-primary-action').click + end expect(page).to have_content('new-username') end diff --git a/spec/features/profiles/account_spec.rb b/spec/features/profiles/account_spec.rb index 171e061e60e..215b658eb7b 100644 --- a/spec/features/profiles/account_spec.rb +++ b/spec/features/profiles/account_spec.rb @@ -1,6 +1,6 @@ require 'rails_helper' -feature 'Profile > Account' do +feature 'Profile > Account', :js do given(:user) { create(:user, username: 'foo') } before do @@ -43,14 +43,14 @@ feature 'Profile > Account' do update_username(new_username) visit new_project_path expect(current_path).to eq(new_project_path) - expect(find('.breadcrumbs-sub-title')).to have_content(project.path) + expect(find('.breadcrumbs-sub-title')).to have_content('Details') end scenario 'the old project path redirects to the new path' do update_username(new_username) visit old_project_path expect(current_path).to eq(new_project_path) - expect(find('.breadcrumbs-sub-title')).to have_content(project.path) + expect(find('.breadcrumbs-sub-title')).to have_content('Details') end end end @@ -59,6 +59,12 @@ end def update_username(new_username) allow(user.namespace).to receive(:move_dir) visit profile_account_path - fill_in 'user_username', with: new_username - click_button 'Update username' + + fill_in 'username-change-input', with: new_username + + page.find('[data-target="#username-change-confirmation-modal"]').click + + page.within('.modal') do + find('.js-modal-primary-action').click + end end diff --git a/spec/features/profiles/user_visits_notifications_tab_spec.rb b/spec/features/profiles/user_visits_notifications_tab_spec.rb index 1952fdae798..95953fbcfac 100644 --- a/spec/features/profiles/user_visits_notifications_tab_spec.rb +++ b/spec/features/profiles/user_visits_notifications_tab_spec.rb @@ -16,6 +16,6 @@ feature 'User visits the notifications tab', :js do first('#notifications-button').click click_link('On mention') - expect(page).to have_content('On mention') + expect(page).to have_selector('#notifications-button', text: 'On mention') end end diff --git a/spec/features/projects/activity/rss_spec.rb b/spec/features/projects/activity/rss_spec.rb index 2693e539268..cd1cfe07998 100644 --- a/spec/features/projects/activity/rss_spec.rb +++ b/spec/features/projects/activity/rss_spec.rb @@ -1,8 +1,8 @@ require 'spec_helper' feature 'Project Activity RSS' do - let(:user) { create(:user) } - let(:project) { create(:project, visibility_level: Gitlab::VisibilityLevel::PUBLIC) } + let(:project) { create(:project, :public) } + let(:user) { project.owner } let(:path) { activity_project_path(project) } before do @@ -11,8 +11,7 @@ feature 'Project Activity RSS' do context 'when signed in' do before do - project.add_developer(user) - sign_in(user) + sign_in(project.owner) visit path end diff --git a/spec/features/projects/activity/user_sees_activity_spec.rb b/spec/features/projects/activity/user_sees_activity_spec.rb new file mode 100644 index 00000000000..644a837dc14 --- /dev/null +++ b/spec/features/projects/activity/user_sees_activity_spec.rb @@ -0,0 +1,21 @@ +require 'spec_helper' + +feature 'Projects > Activity > User sees activity' do + let(:project) { create(:project, :repository, :public) } + let(:user) { project.creator } + + before do + event = create(:push_event, project: project, author: user) + create(:push_event_payload, + event: event, + action: :created, + commit_to: '6d394385cf567f80a8fd85055db1ab4c5295806f', + ref: 'fix', + commit_count: 1) + visit activity_project_path(project) + end + + it 'shows the last push in the activity page', :js do + expect(page).to have_content "#{user.name} pushed new branch fix" + end +end diff --git a/spec/features/projects/awards/user_interacts_with_awards_in_issue_spec.rb b/spec/features/projects/awards/user_interacts_with_awards_in_issue_spec.rb index adff0a10f0e..12e07647ecd 100644 --- a/spec/features/projects/awards/user_interacts_with_awards_in_issue_spec.rb +++ b/spec/features/projects/awards/user_interacts_with_awards_in_issue_spec.rb @@ -99,6 +99,74 @@ describe 'User interacts with awards in an issue', :js do click_button('Comment') end - expect(page).to have_selector('gl-emoji[data-name="smile"]') + expect(page).to have_emoji('smile') + end + + context 'when a project is archived' do + let(:project) { create(:project, :archived) } + + it 'hides the add award button' do + page.within('.awards') do + expect(page).not_to have_css('.js-add-award') + end + end + end + + context 'awards on a note' do + let!(:note) { create(:note, noteable: issue, project: issue.project) } + let!(:award_emoji) { create(:award_emoji, awardable: note, name: '100') } + + it 'shows the award on the note' do + page.within('.note-awards') do + expect(page).to have_emoji('100') + end + end + + it 'allows adding a vote to an award' do + page.within('.note-awards') do + find('gl-emoji[data-name="100"]').click + end + wait_for_requests + + expect(note.reload.award_emoji.size).to eq(2) + end + + it 'allows adding a new emoji' do + page.within('.note-actions') do + find('a.js-add-award').click + end + page.within('.emoji-menu-content') do + find('gl-emoji[data-name="8ball"]').click + end + wait_for_requests + + page.within('.note-awards') do + expect(page).to have_emoji('8ball') + end + expect(note.reload.award_emoji.size).to eq(2) + end + + context 'when the project is archived' do + let(:project) { create(:project, :archived) } + + it 'hides the buttons for adding new emoji' do + page.within('.note-awards') do + expect(page).not_to have_css('.award-menu-holder') + end + + page.within('.note-actions') do + expect(page).not_to have_css('a.js-add-award') + end + end + + it 'does not allow toggling existing emoji' do + page.within('.note-awards') do + find('gl-emoji[data-name="100"]').click + end + wait_for_requests + + expect(note.reload.award_emoji.size).to eq(1) + end + end end end diff --git a/spec/features/projects/badges/list_spec.rb b/spec/features/projects/badges/list_spec.rb index c705e479690..0abef4bc447 100644 --- a/spec/features/projects/badges/list_spec.rb +++ b/spec/features/projects/badges/list_spec.rb @@ -6,7 +6,7 @@ feature 'list of badges' do project = create(:project, :repository) project.add_master(user) sign_in(user) - visit project_pipelines_settings_path(project) + visit project_settings_ci_cd_path(project) end scenario 'user wants to see build status badge' do diff --git a/spec/features/projects/branches/user_creates_branch_spec.rb b/spec/features/projects/branches/user_creates_branch_spec.rb new file mode 100644 index 00000000000..b706ad64954 --- /dev/null +++ b/spec/features/projects/branches/user_creates_branch_spec.rb @@ -0,0 +1,46 @@ +require "spec_helper" + +describe "User creates branch", :js do + include Spec::Support::Helpers::Features::BranchesHelpers + + let(:user) { create(:user) } + let(:project) { create(:project, :repository) } + + before do + project.add_developer(user) + sign_in(user) + + visit(new_project_branch_path(project)) + end + + it "creates new branch" do + BRANCH_NAME = "deploy_keys".freeze + + create_branch(BRANCH_NAME) + + expect(page).to have_content(BRANCH_NAME) + end + + context "when branch name is invalid" do + it "does not create new branch" do + INVALID_BRANCH_NAME = "1.0 stable".freeze + + fill_in("branch_name", with: INVALID_BRANCH_NAME) + page.find("body").click # defocus the branch_name input + + select_branch("master") + click_button("Create branch") + + expect(page).to have_content("Branch name is invalid") + expect(page).to have_content("can't contain spaces") + end + end + + context "when branch name already exists" do + it "does not create new branch" do + create_branch("master") + + expect(page).to have_content("Branch already exists") + end + end +end diff --git a/spec/features/projects/branches/user_deletes_branch_spec.rb b/spec/features/projects/branches/user_deletes_branch_spec.rb new file mode 100644 index 00000000000..96f215e1606 --- /dev/null +++ b/spec/features/projects/branches/user_deletes_branch_spec.rb @@ -0,0 +1,23 @@ +require "spec_helper" + +describe "User deletes branch", :js do + set(:user) { create(:user) } + set(:project) { create(:project, :repository) } + + before do + project.add_developer(user) + sign_in(user) + + visit(project_branches_path(project)) + end + + it "deletes branch" do + fill_in("branch-search", with: "improve/awesome").native.send_keys(:enter) + + page.within(".js-branch-improve\\/awesome") do + accept_alert { find(".btn-remove").click } + end + + expect(page).to have_css(".js-branch-improve\\/awesome", visible: :hidden) + end +end diff --git a/spec/features/projects/branches/user_views_branches_spec.rb b/spec/features/projects/branches/user_views_branches_spec.rb new file mode 100644 index 00000000000..62ae793151c --- /dev/null +++ b/spec/features/projects/branches/user_views_branches_spec.rb @@ -0,0 +1,34 @@ +require "spec_helper" + +describe "User views branches" do + set(:project) { create(:project, :repository) } + set(:user) { project.owner } + + before do + sign_in(user) + end + + context "all branches" do + before do + visit(project_branches_path(project)) + end + + it "shows branches" do + expect(page).to have_content("Branches").and have_content("master") + end + end + + context "protected branches" do + set(:protected_branch) { create(:protected_branch, project: project) } + + before do + visit(project_protected_branches_path(project)) + end + + it "shows branches" do + page.within(".protected-branches-list") do + expect(page).to have_content(protected_branch.name).and have_no_content("master") + end + end + end +end diff --git a/spec/features/projects/branches_spec.rb b/spec/features/projects/branches_spec.rb index 2a9d9e6416c..b7ce1b9993a 100644 --- a/spec/features/projects/branches_spec.rb +++ b/spec/features/projects/branches_spec.rb @@ -195,6 +195,26 @@ describe 'Branches' do expect(page).to have_content("Protected branches can be managed in project settings") end end + + it 'shows the merge request button' do + visit project_branches_path(project) + + page.within first('.all-branches li') do + expect(page).to have_content 'Merge request' + end + end + + context 'when the project is archived' do + let(:project) { create(:project, :public, :repository, :archived) } + + it 'does not show the merge request button when the project is archived' do + visit project_branches_path(project) + + page.within first('.all-branches li') do + expect(page).not_to have_content 'Merge request' + end + end + end end context 'logged out' do @@ -204,7 +224,7 @@ describe 'Branches' do it 'does not show merge request button' do page.within first('.all-branches li') do - expect(page).not_to have_content 'Merge Request' + expect(page).not_to have_content 'Merge request' end end end diff --git a/spec/features/ci_lint_spec.rb b/spec/features/projects/ci/lint_spec.rb index 220b934154e..313950072e7 100644 --- a/spec/features/ci_lint_spec.rb +++ b/spec/features/projects/ci/lint_spec.rb @@ -1,10 +1,14 @@ require 'spec_helper' describe 'CI Lint', :js do + let(:project) { create(:project, :repository) } + let(:user) { create(:user) } + before do - sign_in(create(:user)) + project.add_developer(user) + sign_in(user) - visit ci_lint_path + visit project_ci_lint_path(project) find('#ci-editor') execute_script("ace.edit('ci-editor').setValue(#{yaml_content.to_json});") diff --git a/spec/features/projects/clusters/gcp_spec.rb b/spec/features/projects/clusters/gcp_spec.rb index 4d47cdb500c..dfe8e02dce0 100644 --- a/spec/features/projects/clusters/gcp_spec.rb +++ b/spec/features/projects/clusters/gcp_spec.rb @@ -33,7 +33,7 @@ feature 'Gcp Cluster', :js do visit project_clusters_path(project) click_link 'Add Kubernetes cluster' - click_link 'Create on GKE' + click_link 'Create on Google Kubernetes Engine' end context 'when user filled form with valid parameters' do @@ -139,7 +139,7 @@ feature 'Gcp Cluster', :js do visit project_clusters_path(project) click_link 'Add Kubernetes cluster' - click_link 'Create on GKE' + click_link 'Create on Google Kubernetes Engine' fill_in 'cluster_provider_gcp_attributes_gcp_project_id', with: 'gcp-project-123' fill_in 'cluster_name', with: 'dev-cluster' @@ -159,7 +159,7 @@ feature 'Gcp Cluster', :js do visit project_clusters_path(project) click_link 'Add Kubernetes cluster' - click_link 'Create on GKE' + click_link 'Create on Google Kubernetes Engine' fill_in 'cluster_provider_gcp_attributes_gcp_project_id', with: 'gcp-project-123' fill_in 'cluster_name', with: 'dev-cluster' @@ -177,7 +177,7 @@ feature 'Gcp Cluster', :js do visit project_clusters_path(project) click_link 'Add Kubernetes cluster' - click_link 'Create on GKE' + click_link 'Create on Google Kubernetes Engine' end it 'user sees a login page' do diff --git a/spec/features/projects/clusters_spec.rb b/spec/features/projects/clusters_spec.rb index bd9f7745cf8..a251a2f4e52 100644 --- a/spec/features/projects/clusters_spec.rb +++ b/spec/features/projects/clusters_spec.rb @@ -83,7 +83,7 @@ feature 'Clusters', :js do visit project_clusters_path(project) click_link 'Add Kubernetes cluster' - click_link 'Create on GKE' + click_link 'Create on Google Kubernetes Engine' end it 'user sees a login page' do diff --git a/spec/features/projects/commit/cherry_pick_spec.rb b/spec/features/projects/commit/cherry_pick_spec.rb index c4c399e3058..1df45865d6f 100644 --- a/spec/features/projects/commit/cherry_pick_spec.rb +++ b/spec/features/projects/commit/cherry_pick_spec.rb @@ -89,4 +89,15 @@ describe 'Cherry-pick Commits' do expect(page).to have_content('The commit has been successfully cherry-picked.') end end + + context 'when the project is archived' do + let(:project) { create(:project, :repository, :archived, namespace: group) } + + it 'does not show the cherry-pick link' do + find('.header-action-buttons a.dropdown-toggle').click + + expect(page).not_to have_text("Cherry-pick") + expect(page).not_to have_css("a[href='#modal-cherry-pick-commit']") + end + end end diff --git a/spec/features/projects/commit/user_comments_on_commit_spec.rb b/spec/features/projects/commit/user_comments_on_commit_spec.rb new file mode 100644 index 00000000000..5174f793367 --- /dev/null +++ b/spec/features/projects/commit/user_comments_on_commit_spec.rb @@ -0,0 +1,110 @@ +require "spec_helper" + +describe "User comments on commit", :js do + include Spec::Support::Helpers::Features::NotesHelpers + include RepoHelpers + + let(:project) { create(:project, :repository) } + let(:user) { create(:user) } + + COMMENT_TEXT = "XML attached".freeze + + before do + sign_in(user) + project.add_developer(user) + + visit(project_commit_path(project, sample_commit.id)) + end + + context "when adding new comment" do + it "adds comment" do + EMOJI = ":+1:".freeze + + page.within(".js-main-target-form") do + expect(page).not_to have_link("Cancel") + + fill_in("note[note]", with: "#{COMMENT_TEXT} #{EMOJI}") + + # Check on `Preview` tab + click_link("Preview") + + expect(find(".js-md-preview")).to have_content(COMMENT_TEXT).and have_css("gl-emoji") + expect(page).not_to have_css(".js-note-text") + + # Check on `Write` tab + click_link("Write") + + expect(page).to have_field("note[note]", with: "#{COMMENT_TEXT} #{EMOJI}") + + # Submit comment from the `Preview` tab to get rid of a separate `it` block + # which would specially tests if everything gets cleared from the note form. + click_link("Preview") + click_button("Comment") + end + + wait_for_requests + + page.within(".note") do + expect(page).to have_content(COMMENT_TEXT).and have_css("gl-emoji") + end + + page.within(".js-main-target-form") do + expect(page).to have_field("note[note]", with: "").and have_no_css(".js-md-preview") + end + end + end + + context "when editing comment" do + before do + add_note(COMMENT_TEXT) + end + + it "edits comment" do + NEW_COMMENT_TEXT = "+1 Awesome!".freeze + + page.within(".main-notes-list") do + note = find(".note") + note.hover + + note.find(".js-note-edit").click + end + + page.find(".current-note-edit-form textarea") + + page.within(".current-note-edit-form") do + fill_in("note[note]", with: NEW_COMMENT_TEXT) + click_button("Save comment") + end + + wait_for_requests + + page.within(".note") do + expect(page).to have_content(NEW_COMMENT_TEXT) + end + end + end + + context "when deleting comment" do + before do + add_note(COMMENT_TEXT) + end + + it "deletes comment" do + page.within(".note") do + expect(page).to have_content(COMMENT_TEXT) + end + + page.within(".main-notes-list") do + note = find(".note") + note.hover + + find(".more-actions").click + find(".more-actions .dropdown-menu li", match: :first) + + accept_confirm { find(".js-note-delete").click } + end + + expect(page).not_to have_css(".note") + end + end +end diff --git a/spec/features/projects/commit/user_reverts_commit_spec.rb b/spec/features/projects/commit/user_reverts_commit_spec.rb index 221f1d7757e..42844a03ea6 100644 --- a/spec/features/projects/commit/user_reverts_commit_spec.rb +++ b/spec/features/projects/commit/user_reverts_commit_spec.rb @@ -10,13 +10,16 @@ describe 'User reverts a commit', :js do sign_in(user) visit(project_commit_path(project, sample_commit.id)) + end + def click_revert find('.header-action-buttons .dropdown').click find('a[href="#modal-revert-commit"]').click end context 'without creating a new merge request' do before do + click_revert page.within('#modal-revert-commit') do uncheck('create_merge_request') click_button('Revert') @@ -44,6 +47,10 @@ describe 'User reverts a commit', :js do end context 'with creating a new merge request' do + before do + click_revert + end + it 'reverts a commit' do page.within('#modal-revert-commit') do click_button('Revert') @@ -53,4 +60,14 @@ describe 'User reverts a commit', :js do expect(page).to have_content("From revert-#{Commit.truncate_sha(sample_commit.id)} into master") end end + + context 'when the project is archived' do + let(:project) { create(:project, :repository, :archived, namespace: user.namespace) } + + it 'does not show the revert link' do + find('.header-action-buttons .dropdown').click + + expect(page).not_to have_link('Revert') + end + end end diff --git a/spec/features/projects/edit_spec.rb b/spec/features/projects/edit_spec.rb deleted file mode 100644 index 1d4b4d0fdca..00000000000 --- a/spec/features/projects/edit_spec.rb +++ /dev/null @@ -1,62 +0,0 @@ -require 'rails_helper' - -feature 'Project edit', :js do - let(:admin) { create(:admin) } - let(:user) { create(:user) } - let(:project) { create(:project) } - - context 'feature visibility' do - before do - project.add_master(user) - sign_in(user) - - visit edit_project_path(project) - end - - context 'merge requests select' do - it 'hides merge requests section' do - find('.project-feature-controls[data-for="project[project_feature_attributes][merge_requests_access_level]"] .project-feature-toggle').click - - expect(page).to have_selector('.merge-requests-feature', visible: false) - end - - context 'given project with merge_requests_disabled access level' do - let(:project) { create(:project, :merge_requests_disabled) } - - it 'hides merge requests section' do - expect(page).to have_selector('.merge-requests-feature', visible: false) - end - end - end - - context 'builds select' do - it 'hides builds select section' do - find('.project-feature-controls[data-for="project[project_feature_attributes][builds_access_level]"] .project-feature-toggle').click - - expect(page).to have_selector('.builds-feature', visible: false) - end - - context 'given project with builds_disabled access level' do - let(:project) { create(:project, :builds_disabled) } - - it 'hides builds select section' do - expect(page).to have_selector('.builds-feature', visible: false) - end - end - end - end - - context 'LFS enabled setting' do - before do - sign_in(admin) - end - - it 'displays the correct elements' do - allow(Gitlab.config.lfs).to receive(:enabled).and_return(true) - visit edit_project_path(project) - - expect(page).to have_content('Git Large File Storage') - expect(page).to have_selector('input[name="project[lfs_enabled]"] + button', visible: true) - end - end -end diff --git a/spec/features/projects/files/browse_files_spec.rb b/spec/features/projects/files/browse_files_spec.rb deleted file mode 100644 index 2c38c380d9d..00000000000 --- a/spec/features/projects/files/browse_files_spec.rb +++ /dev/null @@ -1,46 +0,0 @@ -require 'spec_helper' - -feature 'user browses project', :js do - let(:project) { create(:project, :repository) } - let(:user) { create(:user) } - - before do - project.add_master(user) - sign_in(user) - visit project_tree_path(project, project.default_branch) - end - - scenario "can see blame of '.gitignore'" do - click_link ".gitignore" - click_link 'Blame' - - expect(page).to have_content "*.rb" - expect(page).to have_content "Dmitriy Zaporozhets" - expect(page).to have_content "Initial commit" - end - - scenario 'can see raw content of LFS pointer with LFS disabled' do - allow_any_instance_of(Project).to receive(:lfs_enabled?).and_return(false) - click_link 'files' - click_link 'lfs' - click_link 'lfs_object.iso' - wait_for_requests - - expect(page).not_to have_content 'Download (1.5 MB)' - expect(page).to have_content 'version https://git-lfs.github.com/spec/v1' - expect(page).to have_content 'oid sha256:91eff75a492a3ed0dfcb544d7f31326bc4014c8551849c192fd1e48d4dd2c897' - expect(page).to have_content 'size 1575078' - end - - scenario 'can see last commit for current directory' do - last_commit = project.repository.last_commit_for_path(project.default_branch, 'files') - - click_link 'files' - wait_for_requests - - page.within('.blob-commit-info') do - expect(page).to have_content last_commit.short_id - expect(page).to have_content last_commit.author_name - end - end -end diff --git a/spec/features/projects/files/creating_a_file_spec.rb b/spec/features/projects/files/creating_a_file_spec.rb deleted file mode 100644 index 8d982636525..00000000000 --- a/spec/features/projects/files/creating_a_file_spec.rb +++ /dev/null @@ -1,37 +0,0 @@ -require 'spec_helper' - -feature 'User wants to create a file' do - let(:project) { create(:project, :repository) } - let(:user) { create(:user) } - - background do - project.add_master(user) - sign_in user - visit project_new_blob_path(project, project.default_branch) - end - - def submit_new_file(options) - file_name = find('#file_name') - file_name.set options[:file_name] || 'README.md' - - file_content = find('#file-content', visible: false) - file_content.set options[:file_content] || 'Some content' - - click_button 'Commit changes' - end - - scenario 'file name contains Chinese characters' do - submit_new_file(file_name: '测试.md') - expect(page).to have_content 'The file has been successfully created.' - end - - scenario 'directory name contains Chinese characters' do - submit_new_file(file_name: '中文/测试.md') - expect(page).to have_content 'The file has been successfully created' - end - - scenario 'file name contains directory traversal' do - submit_new_file(file_name: '../README.md') - expect(page).to have_content 'Path cannot include directory traversal' - end -end diff --git a/spec/features/projects/files/dockerfile_dropdown_spec.rb b/spec/features/projects/files/dockerfile_dropdown_spec.rb index f4a39e331fd..004585f7c9e 100644 --- a/spec/features/projects/files/dockerfile_dropdown_spec.rb +++ b/spec/features/projects/files/dockerfile_dropdown_spec.rb @@ -1,22 +1,15 @@ require 'spec_helper' -require 'fileutils' -feature 'User wants to add a Dockerfile file' do +describe 'Projects > Files > User wants to add a Dockerfile file' do before do - user = create(:user) project = create(:project, :repository) - project.add_master(user) - - sign_in user - + sign_in project.owner visit project_new_blob_path(project, 'master', file_name: 'Dockerfile') end - scenario 'user can see Dockerfile dropdown' do + it 'user can pick a Dockerfile file from the dropdown', :js do expect(page).to have_css('.dockerfile-selector') - end - scenario 'user can pick a Dockerfile file from the dropdown', :js do find('.js-dockerfile-selector').click wait_for_requests diff --git a/spec/features/projects/files/download_buttons_spec.rb b/spec/features/projects/files/download_buttons_spec.rb index 2101627f324..03cb3530e2b 100644 --- a/spec/features/projects/files/download_buttons_spec.rb +++ b/spec/features/projects/files/download_buttons_spec.rb @@ -1,42 +1,36 @@ require 'spec_helper' -feature 'Download buttons in files tree' do - given(:user) { create(:user) } - given(:role) { :developer } - given(:status) { 'success' } - given(:project) { create(:project, :repository) } +describe 'Projects > Files > Download buttons in files tree' do + let(:project) { create(:project, :repository) } + let(:user) { project.creator } - given(:pipeline) do + let(:pipeline) do create(:ci_pipeline, project: project, sha: project.commit.sha, ref: project.default_branch, - status: status) + status: 'success') end - given!(:build) do + let!(:build) do create(:ci_build, :success, :artifacts, pipeline: pipeline, status: pipeline.status, name: 'build') end - background do + before do sign_in(user) - project.add_role(user, role) - end + project.add_developer(user) - describe 'when files tree' do - context 'with artifacts' do - before do - visit project_tree_path(project, project.default_branch) - end + visit project_tree_path(project, project.default_branch) + end - scenario 'shows download artifacts button' do - href = latest_succeeded_project_artifacts_path(project, "#{project.default_branch}/download", job: 'build') + context 'with artifacts' do + it 'shows download artifacts button' do + href = latest_succeeded_project_artifacts_path(project, "#{project.default_branch}/download", job: 'build') - expect(page).to have_link "Download '#{build.name}'", href: href - end + expect(page).to have_link "Download '#{build.name}'", href: href end end end diff --git a/spec/features/projects/files/edit_file_soft_wrap_spec.rb b/spec/features/projects/files/edit_file_soft_wrap_spec.rb index 8d32ada5795..41af70d8ebc 100644 --- a/spec/features/projects/files/edit_file_soft_wrap_spec.rb +++ b/spec/features/projects/files/edit_file_soft_wrap_spec.rb @@ -1,10 +1,9 @@ require 'spec_helper' -feature 'User uses soft wrap whilst editing file', :js do +describe 'Projects > Files > User uses soft wrap whilst editing file', :js do before do - user = create(:user) project = create(:project, :repository) - project.add_master(user) + user = project.owner sign_in user visit project_new_blob_path(project, 'master', file_name: 'test_file-name') page.within('.file-editor.code') do @@ -23,7 +22,7 @@ feature 'User uses soft wrap whilst editing file', :js do let(:toggle_button) { find('.soft-wrap-toggle') } - scenario 'user clicks the "Soft wrap" button and then "No wrap" button' do + it 'user clicks the "Soft wrap" button and then "No wrap" button' do wrapped_content_width = get_content_width toggle_button.click expect(toggle_button).to have_content 'No wrap' diff --git a/spec/features/projects/files/editing_a_file_spec.rb b/spec/features/projects/files/editing_a_file_spec.rb index d874cdbff8d..4074e67e2d2 100644 --- a/spec/features/projects/files/editing_a_file_spec.rb +++ b/spec/features/projects/files/editing_a_file_spec.rb @@ -1,8 +1,8 @@ require 'spec_helper' -feature 'User wants to edit a file' do +describe 'Projects > Files > User wants to edit a file' do let(:project) { create(:project, :repository) } - let(:user) { create(:user) } + let(:user) { project.owner } let(:commit_params) do { start_branch: project.default_branch, @@ -15,14 +15,13 @@ feature 'User wants to edit a file' do } end - background do - project.add_master(user) + before do sign_in user visit project_edit_blob_path(project, File.join(project.default_branch, '.gitignore')) end - scenario 'file has been updated since the user opened the edit page' do + it 'file has been updated since the user opened the edit page' do Files::UpdateService.new(project, user, commit_params).execute click_button 'Commit changes' diff --git a/spec/features/projects/files/files_sort_submodules_with_folders_spec.rb b/spec/features/projects/files/files_sort_submodules_with_folders_spec.rb index ead9f7e9168..b6dbf76bc9b 100644 --- a/spec/features/projects/files/files_sort_submodules_with_folders_spec.rb +++ b/spec/features/projects/files/files_sort_submodules_with_folders_spec.rb @@ -1,16 +1,15 @@ require 'spec_helper' -feature 'User views files page' do - let(:user) { create(:user) } +describe 'Projects > Files > User views files page' do let(:project) { create(:forked_project_with_submodules) } + let(:user) { project.owner } before do - project.add_master(user) sign_in user visit project_tree_path(project, project.repository.root_ref) end - scenario 'user sees folders and submodules sorted together, followed by files' do + it 'user sees folders and submodules sorted together, followed by files' do rows = all('td.tree-item-file-name').map(&:text) tree = project.repository.tree diff --git a/spec/features/projects/files/find_file_keyboard_spec.rb b/spec/features/projects/files/find_file_keyboard_spec.rb index e9ff06c72d8..cd0235f2b9e 100644 --- a/spec/features/projects/files/find_file_keyboard_spec.rb +++ b/spec/features/projects/files/find_file_keyboard_spec.rb @@ -1,11 +1,10 @@ require 'spec_helper' -feature 'Find file keyboard shortcuts', :js do - let(:user) { create(:user) } +describe 'Projects > Files > Find file keyboard shortcuts', :js do let(:project) { create(:project, :repository) } + let(:user) { project.owner } before do - project.add_master(user) sign_in user visit project_find_file_path(project, project.repository.root_ref) diff --git a/spec/features/projects/files/gitignore_dropdown_spec.rb b/spec/features/projects/files/gitignore_dropdown_spec.rb index 79f3fd09b48..9fa4c053a40 100644 --- a/spec/features/projects/files/gitignore_dropdown_spec.rb +++ b/spec/features/projects/files/gitignore_dropdown_spec.rb @@ -1,25 +1,24 @@ require 'spec_helper' -feature 'User wants to add a .gitignore file' do +describe 'Projects > Files > User wants to add a .gitignore file' do before do - user = create(:user) project = create(:project, :repository) - project.add_master(user) - sign_in user + sign_in project.owner visit project_new_blob_path(project, 'master', file_name: '.gitignore') end - scenario 'user can see .gitignore dropdown' do + it 'user can pick a .gitignore file from the dropdown', :js do expect(page).to have_css('.gitignore-selector') - end - scenario 'user can pick a .gitignore file from the dropdown', :js do find('.js-gitignore-selector').click + wait_for_requests + within '.gitignore-selector' do find('.dropdown-input-field').set('rails') find('.dropdown-content li', text: 'Rails').click end + wait_for_requests expect(page).to have_css('.gitignore-selector .dropdown-toggle-text', text: 'Rails') diff --git a/spec/features/projects/files/gitlab_ci_yml_dropdown_spec.rb b/spec/features/projects/files/gitlab_ci_yml_dropdown_spec.rb index db6c67b802e..53aff183562 100644 --- a/spec/features/projects/files/gitlab_ci_yml_dropdown_spec.rb +++ b/spec/features/projects/files/gitlab_ci_yml_dropdown_spec.rb @@ -1,25 +1,24 @@ require 'spec_helper' -feature 'User wants to add a .gitlab-ci.yml file' do +describe 'Projects > Files > User wants to add a .gitlab-ci.yml file' do before do - user = create(:user) project = create(:project, :repository) - project.add_master(user) - sign_in user + sign_in project.owner visit project_new_blob_path(project, 'master', file_name: '.gitlab-ci.yml') end - scenario 'user can see .gitlab-ci.yml dropdown' do + it 'user can pick a template from the dropdown', :js do expect(page).to have_css('.gitlab-ci-yml-selector') - end - scenario 'user can pick a template from the dropdown', :js do find('.js-gitlab-ci-yml-selector').click + wait_for_requests + within '.gitlab-ci-yml-selector' do find('.dropdown-input-field').set('Jekyll') find('.dropdown-content li', text: 'Jekyll').click end + wait_for_requests expect(page).to have_css('.gitlab-ci-yml-selector .dropdown-toggle-text', text: 'Jekyll') diff --git a/spec/features/projects/files/project_owner_creates_license_file_spec.rb b/spec/features/projects/files/project_owner_creates_license_file_spec.rb index 07599600876..b410199fd1f 100644 --- a/spec/features/projects/files/project_owner_creates_license_file_spec.rb +++ b/spec/features/projects/files/project_owner_creates_license_file_spec.rb @@ -1,17 +1,17 @@ require 'spec_helper' -feature 'project owner creates a license file', :js do - let(:project_master) { create(:user) } +describe 'Projects > Files > Project owner creates a license file', :js do let(:project) { create(:project, :repository) } - background do + let(:project_master) { project.owner } + + before do project.repository.delete_file(project_master, 'LICENSE', message: 'Remove LICENSE', branch_name: 'master') - project.add_master(project_master) sign_in(project_master) visit project_path(project) end - scenario 'project master creates a license file manually from a template' do + it 'project master creates a license file manually from a template' do visit project_tree_path(project, project.repository.root_ref) find('.add-to-tree').click click_link 'New file' @@ -35,7 +35,7 @@ feature 'project owner creates a license file', :js do expect(page).to have_content("Copyright (c) #{Time.now.year} #{project.namespace.human_name}") end - scenario 'project master creates a license file from the "Add license" link' do + it 'project master creates a license file from the "Add license" link' do click_link 'Add License' expect(page).to have_content('New file') diff --git a/spec/features/projects/files/project_owner_sees_link_to_create_license_file_in_empty_project_spec.rb b/spec/features/projects/files/project_owner_sees_link_to_create_license_file_in_empty_project_spec.rb index 7f1d1934103..53d8ace7c94 100644 --- a/spec/features/projects/files/project_owner_sees_link_to_create_license_file_in_empty_project_spec.rb +++ b/spec/features/projects/files/project_owner_sees_link_to_create_license_file_in_empty_project_spec.rb @@ -1,15 +1,14 @@ require 'spec_helper' -feature 'project owner sees a link to create a license file in empty project', :js do - let(:project_master) { create(:user) } +describe 'Projects > Files > Project owner sees a link to create a license file in empty project', :js do let(:project) { create(:project_empty_repo) } + let(:project_master) { project.owner } - background do - project.add_master(project_master) + before do sign_in(project_master) end - scenario 'project master creates a license file from a template' do + it 'project master creates a license file from a template' do visit project_path(project) click_on 'Add License' expect(page).to have_content('New file') diff --git a/spec/features/projects/files/template_selector_menu_spec.rb b/spec/features/projects/files/template_selector_menu_spec.rb new file mode 100644 index 00000000000..b549a69ddf3 --- /dev/null +++ b/spec/features/projects/files/template_selector_menu_spec.rb @@ -0,0 +1,66 @@ +require 'spec_helper' + +feature 'Template selector menu', :js do + let(:project) { create(:project, :repository) } + let(:user) { create(:user) } + + before do + project.add_master(user) + sign_in user + end + + context 'editing a non-matching file' do + before do + create_and_edit_file('README.md') + end + + scenario 'is not displayed' do + check_template_selector_menu_display(false) + end + + context 'user toggles preview' do + before do + click_link 'Preview' + end + + scenario 'template selector menu is not displayed' do + check_template_selector_menu_display(false) + click_link 'Write' + check_template_selector_menu_display(false) + end + end + end + + context 'editing a matching file' do + before do + visit project_edit_blob_path(project, File.join(project.default_branch, 'LICENSE')) + end + + scenario 'is displayed' do + check_template_selector_menu_display(true) + end + + context 'user toggles preview' do + before do + click_link 'Preview' + end + + scenario 'template selector menu is hidden and shown correctly' do + check_template_selector_menu_display(false) + click_link 'Write' + check_template_selector_menu_display(true) + end + end + end +end + +def check_template_selector_menu_display(is_visible) + count = is_visible ? 1 : 0 + expect(page).to have_css('.template-selectors-menu', count: count) +end + +def create_and_edit_file(file_name) + visit project_new_blob_path(project, 'master', file_name: file_name) + click_button "Commit changes" + visit project_edit_blob_path(project, File.join(project.default_branch, file_name)) +end diff --git a/spec/features/projects/files/template_type_dropdown_spec.rb b/spec/features/projects/files/template_type_dropdown_spec.rb index 97408a9c41e..342a93b328f 100644 --- a/spec/features/projects/files/template_type_dropdown_spec.rb +++ b/spec/features/projects/files/template_type_dropdown_spec.rb @@ -1,11 +1,10 @@ require 'spec_helper' -feature 'Template type dropdown selector', :js do +describe 'Projects > Files > Template type dropdown selector', :js do let(:project) { create(:project, :repository) } - let(:user) { create(:user) } + let(:user) { project.owner } before do - project.add_master(user) sign_in user end @@ -14,16 +13,16 @@ feature 'Template type dropdown selector', :js do create_and_edit_file('.random-file.js') end - scenario 'not displayed' do + it 'not displayed' do check_type_selector_display(false) end - scenario 'selects every template type correctly' do + it 'selects every template type correctly' do fill_in 'file_path', with: '.gitignore' try_selecting_all_types end - scenario 'updates toggle value when input matches' do + it 'updates toggle value when input matches' do fill_in 'file_path', with: '.gitignore' check_type_selector_toggle_text('.gitignore') end @@ -34,15 +33,15 @@ feature 'Template type dropdown selector', :js do visit project_edit_blob_path(project, File.join(project.default_branch, 'LICENSE')) end - scenario 'displayed' do + it 'displayed' do check_type_selector_display(true) end - scenario 'is displayed when input matches' do + it 'is displayed when input matches' do check_type_selector_display(true) end - scenario 'selects every template type correctly' do + it 'selects every template type correctly' do try_selecting_all_types end @@ -51,7 +50,7 @@ feature 'Template type dropdown selector', :js do click_link 'Preview changes' end - scenario 'type selector is hidden and shown correctly' do + it 'type selector is hidden and shown correctly' do check_type_selector_display(false) click_link 'Write' check_type_selector_display(true) @@ -64,15 +63,15 @@ feature 'Template type dropdown selector', :js do visit project_new_blob_path(project, 'master', file_name: '.gitignore') end - scenario 'is displayed' do + it 'is displayed' do check_type_selector_display(true) end - scenario 'toggle is set to the correct value' do + it 'toggle is set to the correct value' do check_type_selector_toggle_text('.gitignore') end - scenario 'selects every template type correctly' do + it 'selects every template type correctly' do try_selecting_all_types end end @@ -82,15 +81,15 @@ feature 'Template type dropdown selector', :js do visit project_new_blob_path(project, project.default_branch) end - scenario 'type selector is shown' do + it 'type selector is shown' do check_type_selector_display(true) end - scenario 'toggle is set to the proper value' do + it 'toggle is set to the proper value' do check_type_selector_toggle_text('Choose type') end - scenario 'selects every template type correctly' do + it 'selects every template type correctly' do try_selecting_all_types end end diff --git a/spec/features/projects/files/undo_template_spec.rb b/spec/features/projects/files/undo_template_spec.rb index fbf35fb4e1c..5de0bc009fb 100644 --- a/spec/features/projects/files/undo_template_spec.rb +++ b/spec/features/projects/files/undo_template_spec.rb @@ -1,11 +1,10 @@ require 'spec_helper' -feature 'Template Undo Button', :js do +describe 'Projects > Files > Template Undo Button', :js do let(:project) { create(:project, :repository) } - let(:user) { create(:user) } + let(:user) { project.owner } before do - project.add_master(user) sign_in user end @@ -15,7 +14,7 @@ feature 'Template Undo Button', :js do select_file_template('.js-license-selector', 'Apache License 2.0') end - scenario 'reverts template application' do + it 'reverts template application' do try_template_undo('http://www.apache.org/licenses/', 'Apply a license template') end end @@ -27,7 +26,7 @@ feature 'Template Undo Button', :js do select_file_template('.js-license-selector', 'Apache License 2.0') end - scenario 'reverts template application' do + it 'reverts template application' do try_template_undo('http://www.apache.org/licenses/', 'Apply a license template') end end diff --git a/spec/features/projects/user_browses_a_tree_with_a_folder_containing_only_a_folder.rb b/spec/features/projects/files/user_browses_a_tree_with_a_folder_containing_only_a_folder.rb index a17e65cc5b9..2d67837763c 100644 --- a/spec/features/projects/user_browses_a_tree_with_a_folder_containing_only_a_folder.rb +++ b/spec/features/projects/files/user_browses_a_tree_with_a_folder_containing_only_a_folder.rb @@ -1,9 +1,9 @@ require 'spec_helper' # This is a regression test for https://gitlab.com/gitlab-org/gitlab-ce/issues/37569 -describe 'User browses a tree with a folder containing only a folder' do +describe 'Projects > Files > User browses a tree with a folder containing only a folder' do let(:project) { create(:project, :empty_repo) } - let(:user) { project.creator } + let(:user) { project.owner } before do # We need to disable the tree.flat_path provided by Gitaly to reproduce the issue diff --git a/spec/features/projects/user_browses_files_spec.rb b/spec/features/projects/files/user_browses_files_spec.rb index 62e6419cc42..9c1f11f4c12 100644 --- a/spec/features/projects/user_browses_files_spec.rb +++ b/spec/features/projects/files/user_browses_files_spec.rb @@ -1,8 +1,6 @@ require 'spec_helper' -describe 'User browses files' do - include DropzoneHelper - +describe 'Projects > Files > User browses files' do let(:fork_message) do "You're not allowed to make changes to this project directly. "\ "A fork of this project has been created that you can make changes in, so you can submit a merge request." @@ -12,13 +10,24 @@ describe 'User browses files' do let(:project2_tree_path_root_ref) { project_tree_path(project2, project2.repository.root_ref) } let(:tree_path_ref_6d39438) { project_tree_path(project, '6d39438') } let(:tree_path_root_ref) { project_tree_path(project, project.repository.root_ref) } - let(:user) { create(:user) } + let(:user) { project.owner } before do - project.add_master(user) sign_in(user) end + it 'shows last commit for current directory' do + visit(tree_path_root_ref) + + click_link 'files' + + last_commit = project.repository.last_commit_for_path(project.default_branch, 'files') + page.within('.blob-commit-info') do + expect(page).to have_content last_commit.short_id + expect(page).to have_content last_commit.author_name + end + end + context 'when browsing the master branch' do before do visit(tree_path_root_ref) @@ -48,7 +57,7 @@ describe 'User browses files' do expect(page).not_to have_link('Browse Files') end - it 'shows the "Browse Code" link' do + it 'shows the "Browse Files" link' do click_link('History') expect(page).to have_link('Browse Files') @@ -121,6 +130,14 @@ describe 'User browses files' do wait_for_requests expect(page).to have_content('*.rbc') end + + it 'is possible to blame' do + click_link 'Blame' + + expect(page).to have_content "*.rb" + expect(page).to have_content "Dmitriy Zaporozhets" + expect(page).to have_content "Initial commit" + end end context 'when browsing a raw file' do @@ -133,57 +150,4 @@ describe 'User browses files' do expect(source).to eq('') # Body is filled in by gitlab-workhorse end end - - context 'when browsing an LFS object' do - before do - allow_any_instance_of(Project).to receive(:lfs_enabled?).and_return(true) - visit(project_tree_path(project, 'lfs')) - end - - it 'shows an LFS object' do - click_link('files') - click_link('lfs') - click_link('lfs_object.iso') - - expect(page).to have_content('Download (1.5 MB)') - expect(page).not_to have_content('version https://git-lfs.github.com/spec/v1') - expect(page).not_to have_content('oid sha256:91eff75a492a3ed0dfcb544d7f31326bc4014c8551849c192fd1e48d4dd2c897') - expect(page).not_to have_content('size 1575078') - - page.within('.content') do - expect(page).to have_content('Delete') - expect(page).to have_content('History') - expect(page).to have_content('Permalink') - expect(page).to have_content('Replace') - expect(page).not_to have_content('Annotate') - expect(page).not_to have_content('Blame') - expect(page).not_to have_content('Edit') - expect(page).to have_link('Download') - end - end - end - - context 'when previewing a file content' do - before do - visit(tree_path_root_ref) - end - - it 'shows a preview of a file content', :js do - find('.add-to-tree').click - click_link('Upload file') - drop_in_dropzone(File.join(Rails.root, 'spec', 'fixtures', 'logo_sample.svg')) - - page.within('#modal-upload-blob') do - fill_in(:commit_message, with: 'New commit message') - fill_in(:branch_name, with: 'new_branch_name', visible: true) - click_button('Upload file') - end - - wait_for_all_requests - - visit(project_blob_path(project, 'new_branch_name/logo_sample.svg')) - - expect(page).to have_css('.file-content img') - end - end end diff --git a/spec/features/projects/files/user_browses_lfs_files_spec.rb b/spec/features/projects/files/user_browses_lfs_files_spec.rb new file mode 100644 index 00000000000..c559a301ca1 --- /dev/null +++ b/spec/features/projects/files/user_browses_lfs_files_spec.rb @@ -0,0 +1,57 @@ +require 'spec_helper' + +describe 'Projects > Files > User browses LFS files' do + let(:project) { create(:project, :repository) } + let(:user) { project.owner } + + before do + sign_in(user) + end + + context 'when LFS is disabled', :js do + before do + allow_any_instance_of(Project).to receive(:lfs_enabled?).and_return(false) + visit project_tree_path(project, 'lfs') + end + + it 'is possible to see raw content of LFS pointer' do + click_link 'files' + click_link 'lfs' + click_link 'lfs_object.iso' + + expect(page).to have_content 'version https://git-lfs.github.com/spec/v1' + expect(page).to have_content 'oid sha256:91eff75a492a3ed0dfcb544d7f31326bc4014c8551849c192fd1e48d4dd2c897' + expect(page).to have_content 'size 1575078' + expect(page).not_to have_content 'Download (1.5 MB)' + end + end + + context 'when LFS is enabled' do + before do + allow_any_instance_of(Project).to receive(:lfs_enabled?).and_return(true) + visit project_tree_path(project, 'lfs') + end + + it 'shows an LFS object' do + click_link('files') + click_link('lfs') + click_link('lfs_object.iso') + + expect(page).to have_content('Download (1.5 MB)') + expect(page).not_to have_content('version https://git-lfs.github.com/spec/v1') + expect(page).not_to have_content('oid sha256:91eff75a492a3ed0dfcb544d7f31326bc4014c8551849c192fd1e48d4dd2c897') + expect(page).not_to have_content('size 1575078') + + page.within('.content') do + expect(page).to have_content('Delete') + expect(page).to have_content('History') + expect(page).to have_content('Permalink') + expect(page).to have_content('Replace') + expect(page).not_to have_content('Annotate') + expect(page).not_to have_content('Blame') + expect(page).not_to have_content('Edit') + expect(page).to have_link('Download') + end + end + end +end diff --git a/spec/features/projects/user_creates_directory_spec.rb b/spec/features/projects/files/user_creates_directory_spec.rb index 00e48f6fabd..847b5f0860f 100644 --- a/spec/features/projects/user_creates_directory_spec.rb +++ b/spec/features/projects/files/user_creates_directory_spec.rb @@ -1,6 +1,6 @@ require 'spec_helper' -feature 'User creates a directory', :js do +describe 'Projects > Files > User creates a directory', :js do let(:fork_message) do "You're not allowed to make changes to this project directly. "\ "A fork of this project has been created that you can make changes in, so you can submit a merge request." diff --git a/spec/features/projects/user_creates_files_spec.rb b/spec/features/projects/files/user_creates_files_spec.rb index 8993533676b..208cc8d81f7 100644 --- a/spec/features/projects/user_creates_files_spec.rb +++ b/spec/features/projects/files/user_creates_files_spec.rb @@ -1,6 +1,6 @@ require 'spec_helper' -describe 'User creates files' do +describe 'Projects > Files > User creates files' do let(:fork_message) do "You're not allowed to make changes to this project directly. "\ "A fork of this project has been created that you can make changes in, so you can submit a merge request." @@ -59,6 +59,31 @@ describe 'User creates files' do expect(page).to have_selector('.file-editor') end + def submit_new_file(options) + file_name = find('#file_name') + file_name.set options[:file_name] || 'README.md' + + file_content = find('#file-content', visible: false) + file_content.set options[:file_content] || 'Some content' + + click_button 'Commit changes' + end + + it 'allows Chinese characters in file name' do + submit_new_file(file_name: '测试.md') + expect(page).to have_content 'The file has been successfully created.' + end + + it 'allows Chinese characters in directory name' do + submit_new_file(file_name: '中文/测试.md') + expect(page).to have_content 'The file has been successfully created' + end + + it 'does not allow directory traversal in file name' do + submit_new_file(file_name: '../README.md') + expect(page).to have_content 'Path cannot include directory traversal' + end + it 'creates and commit a new file', :js do find('#editor') execute_script("ace.edit('editor').setValue('*.rbca')") diff --git a/spec/features/projects/user_deletes_files_spec.rb b/spec/features/projects/files/user_deletes_files_spec.rb index 9d55197e719..36d3e001a64 100644 --- a/spec/features/projects/user_deletes_files_spec.rb +++ b/spec/features/projects/files/user_deletes_files_spec.rb @@ -1,6 +1,6 @@ require 'spec_helper' -describe 'User deletes files' do +describe 'Projects > Files > User deletes files' do let(:fork_message) do "You're not allowed to make changes to this project directly. "\ "A fork of this project has been created that you can make changes in, so you can submit a merge request." diff --git a/spec/features/projects/user_edits_files_spec.rb b/spec/features/projects/files/user_edits_files_spec.rb index 05c2be473da..dc6e4fd27cb 100644 --- a/spec/features/projects/user_edits_files_spec.rb +++ b/spec/features/projects/files/user_edits_files_spec.rb @@ -1,6 +1,6 @@ require 'spec_helper' -describe 'User edits files' do +describe 'Projects > Files > User edits files' do include ProjectForksHelper let(:project) { create(:project, :repository, name: 'Shop') } let(:project2) { create(:project, :repository, name: 'Another Project', path: 'another-project') } @@ -12,6 +12,23 @@ describe 'User edits files' do sign_in(user) end + shared_examples 'unavailable for an archived project' do + it 'does not show the edit link for an archived project', :js do + project.update!(archived: true) + visit project_tree_path(project, project.repository.root_ref) + + click_link('.gitignore') + + aggregate_failures 'available edit buttons' do + expect(page).not_to have_text('Edit') + expect(page).not_to have_text('Web IDE') + + expect(page).not_to have_text('Replace') + expect(page).not_to have_text('Delete') + end + end + end + context 'when an user has write access' do before do project.add_master(user) @@ -85,6 +102,8 @@ describe 'User edits files' do expect(page).to have_css('.line_holder.new') end + + it_behaves_like 'unavailable for an archived project' end context 'when an user does not have write access' do @@ -168,6 +187,10 @@ describe 'User edits files' do expect(page).to have_content("From #{forked_project.full_path}") expect(page).to have_content("into #{project2.full_path}") end + + it_behaves_like 'unavailable for an archived project' do + let(:project) { project2 } + end end end end diff --git a/spec/features/projects/files/user_reads_pipeline_status_spec.rb b/spec/features/projects/files/user_reads_pipeline_status_spec.rb new file mode 100644 index 00000000000..2fb9da2f0a2 --- /dev/null +++ b/spec/features/projects/files/user_reads_pipeline_status_spec.rb @@ -0,0 +1,46 @@ +require 'spec_helper' + +describe 'user reads pipeline status', :js do + let(:project) { create(:project, :repository) } + let(:user) { create(:user) } + let(:v110_pipeline) { create_pipeline('v1.1.0', 'success') } + let(:x110_pipeline) { create_pipeline('x1.1.0', 'failed') } + + before do + project.add_master(user) + + project.repository.add_tag(user, 'x1.1.0', 'v1.1.0') + v110_pipeline + x110_pipeline + + sign_in(user) + end + + shared_examples 'visiting project tree' do + scenario 'sees the correct pipeline status' do + visit project_tree_path(project, expected_pipeline.ref) + wait_for_requests + + page.within('.blob-commit-info') do + expect(page).to have_link('', href: project_pipeline_path(project, expected_pipeline)) + expect(page).to have_selector(".ci-status-icon-#{expected_pipeline.status}") + end + end + end + + it_behaves_like 'visiting project tree' do + let(:expected_pipeline) { v110_pipeline } + end + + it_behaves_like 'visiting project tree' do + let(:expected_pipeline) { x110_pipeline } + end + + def create_pipeline(ref, status) + create(:ci_pipeline, + project: project, + ref: ref, + sha: project.commit(ref).sha, + status: status) + end +end diff --git a/spec/features/projects/user_replaces_files_spec.rb b/spec/features/projects/files/user_replaces_files_spec.rb index 74872403b35..9ac3417b671 100644 --- a/spec/features/projects/user_replaces_files_spec.rb +++ b/spec/features/projects/files/user_replaces_files_spec.rb @@ -1,6 +1,6 @@ require 'spec_helper' -describe 'User replaces files' do +describe 'Projects > Files > User replaces files' do include DropzoneHelper let(:fork_message) do diff --git a/spec/features/projects/files/user_searches_for_files_spec.rb b/spec/features/projects/files/user_searches_for_files_spec.rb index a105685bca7..a90e4918fb1 100644 --- a/spec/features/projects/files/user_searches_for_files_spec.rb +++ b/spec/features/projects/files/user_searches_for_files_spec.rb @@ -1,8 +1,7 @@ require 'spec_helper' -describe 'User searches for files' do - let(:user) { create(:user) } - let(:project) { create(:project, :repository) } +describe 'Projects > Files > User searches for files' do + let(:user) { project.owner } before do sign_in(user) @@ -10,11 +9,10 @@ describe 'User searches for files' do describe 'project main screen' do context 'when project is empty' do - let(:empty_project) { create(:project) } + let(:project) { create(:project) } before do - empty_project.add_developer(user) - visit project_path(empty_project) + visit project_path(project) end it 'does not show any result' do @@ -26,6 +24,8 @@ describe 'User searches for files' do end context 'when project is not empty' do + let(:project) { create(:project, :repository) } + before do project.add_developer(user) visit project_path(project) @@ -38,16 +38,16 @@ describe 'User searches for files' do end describe 'project tree screen' do + let(:project) { create(:project, :repository) } + before do project.add_developer(user) visit project_tree_path(project, project.default_branch) end - it 'shows "Find file" button' do + it 'shows found files' do expect(page).to have_selector('.tree-controls .shortcuts-find-file') - end - it 'shows found files' do fill_in('search', with: 'coffee') click_button('Go') diff --git a/spec/features/projects/user_uploads_files_spec.rb b/spec/features/projects/files/user_uploads_files_spec.rb index 75898afcda9..8b212faa29d 100644 --- a/spec/features/projects/user_uploads_files_spec.rb +++ b/spec/features/projects/files/user_uploads_files_spec.rb @@ -1,17 +1,17 @@ require 'spec_helper' -describe 'User uploads files' do +describe 'Projects > Files > User uploads files' do include DropzoneHelper let(:fork_message) do "You're not allowed to make changes to this project directly. "\ "A fork of this project has been created that you can make changes in, so you can submit a merge request." end - let(:project) { create(:project, :repository, name: 'Shop') } + let(:user) { create(:user) } + let(:project) { create(:project, :repository, name: 'Shop', creator: user) } let(:project2) { create(:project, :repository, name: 'Another Project', path: 'another-project') } let(:project_tree_path_root_ref) { project_tree_path(project, project.repository.root_ref) } let(:project2_tree_path_root_ref) { project_tree_path(project2, project2.repository.root_ref) } - let(:user) { create(:user) } before do project.add_master(user) @@ -23,7 +23,7 @@ describe 'User uploads files' do visit(project_tree_path_root_ref) end - it 'uploads and commit a new file', :js do + it 'uploads and commit a new text file', :js do find('.add-to-tree').click click_link('Upload file') drop_in_dropzone(File.join(Rails.root, 'spec', 'fixtures', 'doc_sample.txt')) @@ -46,6 +46,24 @@ describe 'User uploads files' do expect(page).to have_content('Lorem ipsum dolor sit amet') expect(page).to have_content('Sed ut perspiciatis unde omnis') end + + it 'uploads and commit a new image file', :js do + find('.add-to-tree').click + click_link('Upload file') + drop_in_dropzone(File.join(Rails.root, 'spec', 'fixtures', 'logo_sample.svg')) + + page.within('#modal-upload-blob') do + fill_in(:commit_message, with: 'New commit message') + fill_in(:branch_name, with: 'new_branch_name', visible: true) + click_button('Upload file') + end + + wait_for_all_requests + + visit(project_blob_path(project, 'new_branch_name/logo_sample.svg')) + + expect(page).to have_css('.file-content img') + end end context 'when an user does not have write access' do diff --git a/spec/features/projects/guest_navigation_menu_spec.rb b/spec/features/projects/guest_navigation_menu_spec.rb deleted file mode 100644 index 199682b943c..00000000000 --- a/spec/features/projects/guest_navigation_menu_spec.rb +++ /dev/null @@ -1,82 +0,0 @@ -require 'spec_helper' - -describe 'Guest navigation menu' do - let(:project) { create(:project, :private, public_builds: false) } - let(:guest) { create(:user) } - - before do - project.add_guest(guest) - - sign_in(guest) - end - - it 'shows allowed tabs only' do - visit project_path(project) - - within('.nav-sidebar') do - expect(page).to have_content 'Overview' - expect(page).to have_content 'Issues' - expect(page).to have_content 'Wiki' - - expect(page).not_to have_content 'Repository' - expect(page).not_to have_content 'Pipelines' - expect(page).not_to have_content 'Merge Requests' - end - end - - it 'does not show fork button' do - visit project_path(project) - - within('.count-buttons') do - expect(page).not_to have_link 'Fork' - end - end - - it 'does not show clone path' do - visit project_path(project) - - within('.project-repo-buttons') do - expect(page).not_to have_selector '.project-clone-holder' - end - end - - describe 'project landing page' do - before do - project.project_feature.update!( - issues_access_level: ProjectFeature::DISABLED, - wiki_access_level: ProjectFeature::DISABLED - ) - end - - it 'does not show the project file list landing page' do - visit project_path(project) - - expect(page).not_to have_selector '.project-stats' - expect(page).not_to have_selector '.project-last-commit' - expect(page).not_to have_selector '.project-show-files' - expect(page).to have_selector '.project-show-customize_workflow' - end - - it 'shows the customize workflow when issues and wiki are disabled' do - visit project_path(project) - - expect(page).to have_selector '.project-show-customize_workflow' - end - - it 'shows the wiki when enabled' do - project.project_feature.update!(wiki_access_level: ProjectFeature::PRIVATE) - - visit project_path(project) - - expect(page).to have_selector '.project-show-wiki' - end - - it 'shows the issues when enabled' do - project.project_feature.update!(issues_access_level: ProjectFeature::PRIVATE) - - visit project_path(project) - - expect(page).to have_selector '.issues-list' - end - end -end diff --git a/spec/features/projects/hook_logs/user_reads_log_spec.rb b/spec/features/projects/hook_logs/user_reads_log_spec.rb new file mode 100644 index 00000000000..18e975fa653 --- /dev/null +++ b/spec/features/projects/hook_logs/user_reads_log_spec.rb @@ -0,0 +1,21 @@ +require 'spec_helper' + +feature 'Hook logs' do + given(:web_hook_log) { create(:web_hook_log, response_body: '<script>') } + given(:project) { web_hook_log.web_hook.project } + given(:user) { create(:user) } + + before do + project.add_master(user) + + sign_in(user) + end + + scenario 'user reads log without getting XSS' do + visit( + project_hook_hook_log_path( + project, web_hook_log.web_hook, web_hook_log)) + + expect(page).to have_content('<script>') + end +end diff --git a/spec/features/projects/issues/user_comments_on_issue_spec.rb b/spec/features/projects/issues/user_comments_on_issue_spec.rb new file mode 100644 index 00000000000..c45fdc7642f --- /dev/null +++ b/spec/features/projects/issues/user_comments_on_issue_spec.rb @@ -0,0 +1,73 @@ +require "spec_helper" + +describe "User comments on issue", :js do + include Spec::Support::Helpers::Features::NotesHelpers + + let(:project) { create(:project_empty_repo, :public) } + let(:issue) { create(:issue, project: project) } + let(:user) { create(:user) } + + before do + project.add_guest(user) + sign_in(user) + + visit(project_issue_path(project, issue)) + end + + context "when adding comments" do + it "adds comment" do + content = "XML attached" + target_form = ".js-main-target-form" + + add_note(content) + + page.within(".note") do + expect(page).to have_content(content) + end + + page.within(target_form) do + find(".error-alert", visible: false) + end + end + + it "adds comment with code block" do + comment = "```\nCommand [1]: /usr/local/bin/git , see [text](doc/text)\n```" + + add_note(comment) + + expect(page).to have_content(comment) + end + end + + context "when editing comments" do + it "edits comment" do + add_note("# Comment with a header") + + page.within(".note-body > .note-text") do + expect(page).to have_content("Comment with a header").and have_no_css("#comment-with-a-header") + end + + page.within(".main-notes-list") do + note = find(".note") + + note.hover + note.find(".js-note-edit").click + end + + expect(page).to have_css(".current-note-edit-form textarea") + + comment = "+1 Awesome!" + + page.within(".current-note-edit-form") do + fill_in("note[note]", with: comment) + click_button("Save comment") + end + + wait_for_requests + + page.within(".note") do + expect(page).to have_content(comment) + end + end + end +end diff --git a/spec/features/projects/issues/user_creates_issue_spec.rb b/spec/features/projects/issues/user_creates_issue_spec.rb new file mode 100644 index 00000000000..e76f7c5589d --- /dev/null +++ b/spec/features/projects/issues/user_creates_issue_spec.rb @@ -0,0 +1,87 @@ +require "spec_helper" + +describe "User creates issue" do + let(:project) { create(:project_empty_repo, :public) } + let(:user) { create(:user) } + + context "when signed in as guest" do + before do + project.add_guest(user) + sign_in(user) + + visit(new_project_issue_path(project)) + end + + it "creates issue" do + page.within(".issue-form") do + expect(page).to have_no_content("Assign to") + .and have_no_content("Labels") + .and have_no_content("Milestone") + end + + issue_title = "500 error on profile" + + fill_in("Title", with: issue_title) + click_button("Submit issue") + + expect(page).to have_content(issue_title) + .and have_content(user.name) + .and have_content(project.name) + end + end + + context "when signed in as developer", :js do + before do + project.add_developer(user) + sign_in(user) + + visit(new_project_issue_path(project)) + end + + context "when previewing" do + it "previews content" do + form = first(".gfm-form") + textarea = first(".gfm-form textarea") + + page.within(form) do + click_link("Preview") + + preview = find(".js-md-preview") # this element is findable only when the "Preview" link is clicked. + + expect(preview).to have_content("Nothing to preview.") + + click_link("Write") + fill_in("Description", with: "Bug fixed :smile:") + click_link("Preview") + + expect(preview).to have_css("gl-emoji") + expect(textarea).not_to be_visible + end + end + end + + context "with labels" do + LABEL_TITLES = %w(bug feature enhancement).freeze + + before do + LABEL_TITLES.each do |title| + create(:label, project: project, title: title) + end + end + + it "creates issue" do + issue_title = "500 error on profile" + + fill_in("Title", with: issue_title) + click_button("Label") + click_link(LABEL_TITLES.first) + click_button("Submit issue") + + expect(page).to have_content(issue_title) + .and have_content(user.name) + .and have_content(project.name) + .and have_content(LABEL_TITLES.first) + end + end + end +end diff --git a/spec/features/projects/issues/user_edits_issue_spec.rb b/spec/features/projects/issues/user_edits_issue_spec.rb new file mode 100644 index 00000000000..1d9c3abc20f --- /dev/null +++ b/spec/features/projects/issues/user_edits_issue_spec.rb @@ -0,0 +1,25 @@ +require "spec_helper" + +describe "User edits issue", :js do + set(:project) { create(:project_empty_repo, :public) } + set(:user) { create(:user) } + set(:issue) { create(:issue, project: project, author: user) } + + before do + project.add_developer(user) + sign_in(user) + + visit(edit_project_issue_path(project, issue)) + end + + it "previews content" do + form = first(".gfm-form") + + page.within(form) do + fill_in("Description", with: "Bug fixed :smile:") + click_link("Preview") + end + + expect(form).to have_link("Write") + end +end diff --git a/spec/features/projects/issues/user_sorts_issues_spec.rb b/spec/features/projects/issues/user_sorts_issues_spec.rb new file mode 100644 index 00000000000..c3d63000dac --- /dev/null +++ b/spec/features/projects/issues/user_sorts_issues_spec.rb @@ -0,0 +1,39 @@ +require "spec_helper" + +describe "User sorts issues" do + set(:project) { create(:project_empty_repo, :public) } + set(:issue1) { create(:issue, project: project) } + set(:issue2) { create(:issue, project: project) } + set(:issue3) { create(:issue, project: project) } + + before do + create_list(:award_emoji, 2, :upvote, awardable: issue1) + create_list(:award_emoji, 2, :downvote, awardable: issue2) + create(:award_emoji, :downvote, awardable: issue1) + create(:award_emoji, :upvote, awardable: issue2) + + visit(project_issues_path(project)) + end + + it "sorts by popularity" do + find("button.dropdown-toggle").click + + page.within(".content ul.dropdown-menu.dropdown-menu-align-right li") do + click_link("Popularity") + end + + page.within(".issues-list") do + page.within("li.issue:nth-child(1)") do + expect(page).to have_content(issue1.title) + end + + page.within("li.issue:nth-child(2)") do + expect(page).to have_content(issue2.title) + end + + page.within("li.issue:nth-child(3)") do + expect(page).to have_content(issue3.title) + end + end + end +end diff --git a/spec/features/projects/issues/user_toggles_subscription_spec.rb b/spec/features/projects/issues/user_toggles_subscription_spec.rb new file mode 100644 index 00000000000..117a614b980 --- /dev/null +++ b/spec/features/projects/issues/user_toggles_subscription_spec.rb @@ -0,0 +1,28 @@ +require "spec_helper" + +describe "User toggles subscription", :js do + set(:project) { create(:project_empty_repo, :public) } + set(:user) { create(:user) } + set(:issue) { create(:issue, project: project, author: user) } + + before do + project.add_developer(user) + sign_in(user) + + visit(project_issue_path(project, issue)) + end + + it "unsibscribes from issue" do + subscription_button = find(".js-issuable-subscribe-button") + + # Check we're subscribed. + expect(subscription_button).to have_css("button.is-checked") + + # Toggle subscription. + find(".js-issuable-subscribe-button button").click + wait_for_requests + + # Check we're unsubscribed. + expect(subscription_button).to have_css("button:not(.is-checked)") + end +end diff --git a/spec/features/projects/issues/user_views_issue_spec.rb b/spec/features/projects/issues/user_views_issue_spec.rb new file mode 100644 index 00000000000..4093876c289 --- /dev/null +++ b/spec/features/projects/issues/user_views_issue_spec.rb @@ -0,0 +1,32 @@ +require "spec_helper" + +describe "User views issue" do + set(:project) { create(:project_empty_repo, :public) } + set(:user) { create(:user) } + set(:issue) { create(:issue, project: project, description: "# Description header", author: user) } + + before do + project.add_developer(user) + sign_in(user) + + visit(project_issue_path(project, issue)) + end + + it { expect(page).to have_header_with_correct_id_and_link(1, "Description header", "description-header") } + + it 'shows the merge request and issue actions', :aggregate_failures do + expect(page).to have_link('New issue') + expect(page).to have_button('Create merge request') + expect(page).to have_link('Close issue') + end + + context 'when the project is archived' do + let(:project) { create(:project, :public, :archived) } + + it 'hides the merge request and issue actions', :aggregate_failures do + expect(page).not_to have_link('New issue') + expect(page).not_to have_button('Create merge request') + expect(page).not_to have_link('Close issue') + end + end +end diff --git a/spec/features/projects/issues/user_views_issues_spec.rb b/spec/features/projects/issues/user_views_issues_spec.rb index d35009b8974..58afb4efb86 100644 --- a/spec/features/projects/issues/user_views_issues_spec.rb +++ b/spec/features/projects/issues/user_views_issues_spec.rb @@ -1,56 +1,116 @@ -require 'spec_helper' +require "spec_helper" -describe 'User views issues' do +describe "User views issues" do + let!(:closed_issue) { create(:closed_issue, project: project) } + let!(:open_issue1) { create(:issue, project: project) } + let!(:open_issue2) { create(:issue, project: project) } set(:user) { create(:user) } - shared_examples_for 'shows issues' do - it 'shows issues' do - expect(page).to have_content(project.name) - .and have_content(issue1.title) - .and have_content(issue2.title) - .and have_no_selector('.js-new-board-list') + shared_examples "opens issue from list" do + it "opens issue" do + click_link(issue.title) + + expect(page).to have_content(issue.title) end end - context 'when project is public' do - set(:project) { create(:project_empty_repo, :public) } - set(:issue1) { create(:issue, project: project) } - set(:issue2) { create(:issue, project: project) } + shared_examples "open issues" do + context "open issues" do + let(:label) { create(:label, project: project, title: "bug") } - context 'when signed in' do before do - project.add_developer(user) - sign_in(user) + open_issue1.labels << label + + visit(project_issues_path(project, state: :opened)) + end - visit(project_issues_path(project)) + it "shows open issues" do + expect(page).to have_content(project.name) + .and have_content(open_issue1.title) + .and have_content(open_issue2.title) + .and have_no_content(closed_issue.title) + .and have_no_selector(".js-new-board-list") end - include_examples 'shows issues' + it "opens issues by label" do + page.within(".issues-list") do + click_link(label.title) + end + + expect(page).to have_content(open_issue1.title) + .and have_no_content(open_issue2.title) + .and have_no_content(closed_issue.title) + end + + include_examples "opens issue from list" do + let(:issue) { open_issue1 } + end end + end - context 'when not signed in' do + shared_examples "closed issues" do + context "closed issues" do before do - visit(project_issues_path(project)) + visit(project_issues_path(project, state: :closed)) + end + + it "shows closed issues" do + expect(page).to have_content(project.name) + .and have_content(closed_issue.title) + .and have_no_content(open_issue1.title) + .and have_no_content(open_issue2.title) + .and have_no_selector(".js-new-board-list") end - include_examples 'shows issues' + include_examples "opens issue from list" do + let(:issue) { closed_issue } + end end end - context 'when project is internal' do - set(:project) { create(:project_empty_repo, :internal) } - set(:issue1) { create(:issue, project: project) } - set(:issue2) { create(:issue, project: project) } - - context 'when signed in' do + shared_examples "all issues" do + context "all issues" do before do - project.add_developer(user) - sign_in(user) + visit(project_issues_path(project, state: :all)) + end - visit(project_issues_path(project)) + it "shows all issues" do + expect(page).to have_content(project.name) + .and have_content(closed_issue.title) + .and have_content(open_issue1.title) + .and have_content(open_issue2.title) + .and have_no_selector(".js-new-board-list") end - include_examples 'shows issues' + include_examples "opens issue from list" do + let(:issue) { closed_issue } + end + end + end + + %w[internal public].each do |visibility| + shared_examples "#{visibility} project" do + context "when project is #{visibility}" do + let(:project) { create(:project_empty_repo, :"#{visibility}") } + + include_examples "open issues" + include_examples "closed issues" + include_examples "all issues" + end end end + + context "when signed in as developer" do + before do + project.add_developer(user) + sign_in(user) + end + + include_examples "public project" + include_examples "internal project" + end + + context "when not signed in" do + include_examples "public project" + end end diff --git a/spec/features/projects/jobs/permissions_spec.rb b/spec/features/projects/jobs/permissions_spec.rb new file mode 100644 index 00000000000..31abadf9bd6 --- /dev/null +++ b/spec/features/projects/jobs/permissions_spec.rb @@ -0,0 +1,130 @@ +require 'spec_helper' + +describe 'Project Jobs Permissions' do + let(:user) { create(:user) } + let(:group) { create(:group, name: 'some group') } + let(:project) { create(:project, :repository, namespace: group) } + let(:pipeline) { create(:ci_empty_pipeline, project: project, sha: project.commit.sha, ref: 'master') } + let!(:job) { create(:ci_build, :running, :coverage, :trace_artifact, pipeline: pipeline) } + + before do + sign_in(user) + + project.enable_ci + end + + describe 'jobs pages' do + shared_examples 'recent job page details responds with status' do |status| + before do + visit project_job_path(project, job) + end + + it { expect(status_code).to eq(status) } + end + + shared_examples 'project jobs page responds with status' do |status| + before do + visit project_jobs_path(project) + end + + it { expect(status_code).to eq(status) } + end + + context 'when public access for jobs is disabled' do + before do + project.update(public_builds: false) + end + + context 'when user is a guest' do + before do + project.add_guest(user) + end + + it_behaves_like 'recent job page details responds with status', 404 + it_behaves_like 'project jobs page responds with status', 404 + end + + context 'when project is internal' do + before do + project.update(visibility_level: Gitlab::VisibilityLevel::INTERNAL) + end + + it_behaves_like 'recent job page details responds with status', 404 + it_behaves_like 'project jobs page responds with status', 404 + end + end + + context 'when public access for jobs is enabled' do + before do + project.update(public_builds: true) + end + + context 'when project is internal' do + before do + project.update(visibility_level: Gitlab::VisibilityLevel::INTERNAL) + end + + it_behaves_like 'recent job page details responds with status', 200 do + it 'renders job details', :js do + expect(page).to have_content "Job ##{job.id}" + expect(page).to have_css '#build-trace' + end + end + + it_behaves_like 'project jobs page responds with status', 200 do + it 'renders job' do + page.within('.build') do + expect(page).to have_content("##{job.id}") + .and have_content(job.sha[0..7]) + .and have_content(job.ref) + .and have_content(job.name) + end + end + end + end + end + end + + describe 'artifacts page' do + context 'when recent job has artifacts available' do + before do + artifacts = Rails.root.join('spec/fixtures/ci_build_artifacts.zip') + archive = fixture_file_upload(artifacts, 'application/zip') + + job.update_attributes(legacy_artifacts_file: archive) + end + + context 'when public access for jobs is disabled' do + before do + project.update(public_builds: false) + end + + context 'when user with guest role' do + before do + project.add_guest(user) + end + + it 'responds with 404 status' do + visit download_project_job_artifacts_path(project, job) + + expect(status_code).to eq(404) + end + end + + context 'when user with reporter role' do + before do + project.add_reporter(user) + end + + it 'starts download artifact' do + visit download_project_job_artifacts_path(project, job) + + expect(status_code).to eq(200) + expect(page.response_headers['Content-Type']).to eq 'application/zip' + expect(page.response_headers['Content-Transfer-Encoding']).to eq 'binary' + end + end + end + end + end +end diff --git a/spec/features/projects/jobs/user_browses_job_spec.rb b/spec/features/projects/jobs/user_browses_job_spec.rb index 4c49cff30d4..bff5bbe99af 100644 --- a/spec/features/projects/jobs/user_browses_job_spec.rb +++ b/spec/features/projects/jobs/user_browses_job_spec.rb @@ -1,16 +1,15 @@ require 'spec_helper' describe 'User browses a job', :js do - let!(:build) { create(:ci_build, :running, :coverage, pipeline: pipeline) } - let(:pipeline) { create(:ci_empty_pipeline, project: project, sha: project.commit.sha, ref: 'master') } - let(:project) { create(:project, :repository, namespace: user.namespace) } let(:user) { create(:user) } + let(:user_access_level) { :developer } + let(:project) { create(:project, :repository, namespace: user.namespace) } + let(:pipeline) { create(:ci_empty_pipeline, project: project, sha: project.commit.sha, ref: 'master') } + let!(:build) { create(:ci_build, :success, :trace_artifact, :coverage, pipeline: pipeline) } before do project.add_master(user) project.enable_ci - build.success - build.trace.set('job trace') sign_in(user) @@ -21,7 +20,9 @@ describe 'User browses a job', :js do expect(page).to have_content("Job ##{build.id}") expect(page).to have_css('#build-trace') - accept_confirm { click_link('Erase') } + # scroll to the top of the page first + execute_script "window.scrollTo(0,0)" + accept_confirm { find('.js-erase-link').click } expect(page).to have_no_css('.artifacts') expect(build).not_to have_trace @@ -34,4 +35,26 @@ describe 'User browses a job', :js do expect(build.project.running_or_pending_build_count).to eq(build.project.builds.running_or_pending.count(:all)) end + + context 'with a failed job' do + let!(:build) { create(:ci_build, :failed, :trace_artifact, pipeline: pipeline) } + + it 'displays the failure reason' do + within('.builds-container') do + build_link = first('.build-job > a') + expect(build_link['data-title']).to eq('test - failed <br> (unknown failure)') + end + end + end + + context 'when a failed job has been retried' do + let!(:build) { create(:ci_build, :failed, :retried, :trace_artifact, pipeline: pipeline) } + + it 'displays the failure reason and retried label' do + within('.builds-container') do + build_link = first('.build-job > a') + expect(build_link['data-title']).to eq('test - failed <br> (unknown failure) (retried)') + end + end + end end diff --git a/spec/features/projects/jobs/user_browses_jobs_spec.rb b/spec/features/projects/jobs/user_browses_jobs_spec.rb index 767777f3bf9..36ebbeadd4a 100644 --- a/spec/features/projects/jobs/user_browses_jobs_spec.rb +++ b/spec/features/projects/jobs/user_browses_jobs_spec.rb @@ -29,4 +29,15 @@ describe 'User browses jobs' do expect(ci_lint_tool_link[:href]).to end_with(ci_lint_path) end end + + context 'with a failed job' do + let!(:build) { create(:ci_build, :coverage, :failed, pipeline: pipeline) } + + it 'displays a tooltip with the failure reason' do + page.within('.ci-table') do + failed_job_link = page.find('.ci-failed') + expect(failed_job_link[:title]).to eq('Failed <br> (unknown failure)') + end + end + end end diff --git a/spec/features/projects/jobs_spec.rb b/spec/features/projects/jobs_spec.rb index 5d311f2dde3..a00db6dd161 100644 --- a/spec/features/projects/jobs_spec.rb +++ b/spec/features/projects/jobs_spec.rb @@ -113,7 +113,7 @@ feature 'Jobs' do describe "GET /:project/jobs/:id" do context "Job from project" do - let(:job) { create(:ci_build, :success, pipeline: pipeline) } + let(:job) { create(:ci_build, :success, :trace_live, pipeline: pipeline) } before do visit project_job_path(project, job) @@ -136,7 +136,7 @@ feature 'Jobs' do end context 'when job is not running', :js do - let(:job) { create(:ci_build, :success, pipeline: pipeline) } + let(:job) { create(:ci_build, :success, :trace_artifact, pipeline: pipeline) } before do visit project_job_path(project, job) @@ -153,7 +153,7 @@ feature 'Jobs' do end context 'if job failed' do - let(:job) { create(:ci_build, :failed, pipeline: pipeline) } + let(:job) { create(:ci_build, :failed, :trace_artifact, pipeline: pipeline) } before do visit project_job_path(project, job) @@ -339,7 +339,7 @@ feature 'Jobs' do context 'job is successfull and has deployment' do let(:deployment) { create(:deployment) } - let(:job) { create(:ci_build, :success, environment: environment.name, deployments: [deployment], pipeline: pipeline) } + let(:job) { create(:ci_build, :success, :trace_artifact, environment: environment.name, deployments: [deployment], pipeline: pipeline) } it 'shows a link for the job' do visit project_job_path(project, job) @@ -349,7 +349,7 @@ feature 'Jobs' do end context 'job is complete and not successful' do - let(:job) { create(:ci_build, :failed, environment: environment.name, pipeline: pipeline) } + let(:job) { create(:ci_build, :failed, :trace_artifact, environment: environment.name, pipeline: pipeline) } it 'shows a link for the job' do visit project_job_path(project, job) @@ -360,7 +360,7 @@ feature 'Jobs' do context 'job creates a new deployment' do let!(:deployment) { create(:deployment, environment: environment, sha: project.commit.id) } - let(:job) { create(:ci_build, :success, environment: environment.name, pipeline: pipeline) } + let(:job) { create(:ci_build, :success, :trace_artifact, environment: environment.name, pipeline: pipeline) } it 'shows a link to latest deployment' do visit project_job_path(project, job) @@ -379,6 +379,7 @@ feature 'Jobs' do end it 'shows manual action empty state' do + expect(page).to have_content(job.detailed_status(user).illustration[:title]) expect(page).to have_content('This job requires a manual action') expect(page).to have_content('This job depends on a user to trigger its process. Often they are used to deploy code to production environments') expect(page).to have_link('Trigger this manual action') @@ -402,6 +403,7 @@ feature 'Jobs' do end it 'shows empty state' do + expect(page).to have_content(job.detailed_status(user).illustration[:title]) expect(page).to have_content('This job has not been triggered yet') expect(page).to have_content('This job depends on upstream jobs that need to succeed in order for this job to be triggered') end @@ -415,10 +417,64 @@ feature 'Jobs' do end it 'shows pending empty state' do + expect(page).to have_content(job.detailed_status(user).illustration[:title]) expect(page).to have_content('This job has not started yet') expect(page).to have_content('This job is in pending state and is waiting to be picked by a runner') end end + + context 'Canceled job' do + context 'with log' do + let(:job) { create(:ci_build, :canceled, :trace_artifact, pipeline: pipeline) } + + before do + visit project_job_path(project, job) + end + + it 'renders job log' do + expect(page).to have_selector('.js-build-output') + end + end + + context 'without log' do + let(:job) { create(:ci_build, :canceled, pipeline: pipeline) } + + before do + visit project_job_path(project, job) + end + + it 'renders empty state' do + expect(page).to have_content(job.detailed_status(user).illustration[:title]) + expect(page).not_to have_selector('.js-build-output') + expect(page).to have_content('This job has been canceled') + end + end + end + + context 'Skipped job' do + let(:job) { create(:ci_build, :skipped, pipeline: pipeline) } + + before do + visit project_job_path(project, job) + end + + it 'renders empty state' do + expect(page).to have_content(job.detailed_status(user).illustration[:title]) + expect(page).not_to have_selector('.js-build-output') + expect(page).to have_content('This job has been skipped') + end + end + + context 'when job is failed but has no trace' do + let(:job) { create(:ci_build, :failed, pipeline: pipeline) } + + it 'renders empty state' do + visit project_job_path(project, job) + + expect(job).not_to have_trace + expect(page).to have_content('This job does not have a trace.') + end + end end describe "POST /:project/jobs/:id/cancel", :js do @@ -435,16 +491,18 @@ feature 'Jobs' do end end - describe "POST /:project/jobs/:id/retry" do + describe "POST /:project/jobs/:id/retry", :js do context "Job from project", :js do before do job.run! + job.cancel! visit project_job_path(project, job) - find('.js-cancel-job').click() + wait_for_requests + find('.js-retry-button').click end - it 'shows the right status and buttons', :js do + it 'shows the right status and buttons' do page.within('aside.right-sidebar') do expect(page).to have_content 'Cancel' end diff --git a/spec/features/projects/labels/user_creates_labels_spec.rb b/spec/features/projects/labels/user_creates_labels_spec.rb new file mode 100644 index 00000000000..9fd7f3ee775 --- /dev/null +++ b/spec/features/projects/labels/user_creates_labels_spec.rb @@ -0,0 +1,88 @@ +require "spec_helper" + +describe "User creates labels" do + set(:project) { create(:project_empty_repo, :public) } + set(:user) { create(:user) } + + shared_examples_for "label creation" do + it "creates new label" do + title = "bug" + + create_label(title) + + page.within(".other-labels .manage-labels-list") do + expect(page).to have_content(title) + end + end + end + + context "in project" do + before do + project.add_master(user) + sign_in(user) + + visit(new_project_label_path(project)) + end + + context "when data is valid" do + include_examples "label creation" + end + + context "when data is invalid" do + context "when title is invalid" do + it "shows error message" do + create_label("") + + page.within(".label-form") do + expect(page).to have_content("Title can't be blank") + end + end + end + + context "when color is invalid" do + it "shows error message" do + create_label("feature", "#12") + + page.within(".label-form") do + expect(page).to have_content("Color must be a valid color code") + end + end + end + end + + context "when label already exists" do + let!(:label) { create(:label, project: project) } + + it "shows error message" do + create_label(label.title) + + page.within(".label-form") do + expect(page).to have_content("Title has already been taken") + end + end + end + end + + context "in another project" do + set(:another_project) { create(:project_empty_repo, :public) } + + before do + create(:label, project: project, title: "bug") # Create label for `project` (not `another_project`) project. + + another_project.add_master(user) + sign_in(user) + + visit(new_project_label_path(another_project)) + end + + include_examples "label creation" + end + + private + + def create_label(title, color = "#F95610") + fill_in("Title", with: title) + fill_in("Background color", with: color) + click_button("Create label") + end +end diff --git a/spec/features/projects/labels/user_edits_labels_spec.rb b/spec/features/projects/labels/user_edits_labels_spec.rb new file mode 100644 index 00000000000..d1041ff5c1e --- /dev/null +++ b/spec/features/projects/labels/user_edits_labels_spec.rb @@ -0,0 +1,25 @@ +require "spec_helper" + +describe "User edits labels" do + set(:project) { create(:project_empty_repo, :public) } + set(:label) { create(:label, project: project) } + set(:user) { create(:user) } + + before do + project.add_master(user) + sign_in(user) + + visit(edit_project_label_path(project, label)) + end + + it "updates label's title" do + new_title = "fix" + + fill_in("Title", with: new_title) + click_button("Save changes") + + page.within(".other-labels .manage-labels-list") do + expect(page).to have_content(new_title).and have_no_content(label.title) + end + end +end diff --git a/spec/features/projects/labels/user_removes_labels_spec.rb b/spec/features/projects/labels/user_removes_labels_spec.rb new file mode 100644 index 00000000000..f4fda6de465 --- /dev/null +++ b/spec/features/projects/labels/user_removes_labels_spec.rb @@ -0,0 +1,52 @@ +require "spec_helper" + +describe "User removes labels" do + let(:project) { create(:project_empty_repo, :public) } + let(:user) { create(:user) } + + before do + project.add_master(user) + sign_in(user) + end + + context "when one label" do + let!(:label) { create(:label, project: project) } + + before do + visit(project_labels_path(project)) + end + + it "removes label" do + page.within(".labels") do + page.first(".label-list-item") do + first(".remove-row").click + first(:link, "Delete label").click + end + end + + expect(page).to have_content("Label was removed").and have_no_content(label.title) + end + end + + context "when many labels", :js do + before do + create_list(:label, 3, project: project) + + visit(project_labels_path(project)) + end + + it "removes all labels" do + page.within(".labels") do + loop do + li = page.first(".label-list-item") + break unless li + + li.click_link("Delete") + click_link("Delete label") + end + + expect(page).to have_content("Generate a default set of labels").and have_content("New label") + end + end + end +end diff --git a/spec/features/projects/labels/user_views_labels_spec.rb b/spec/features/projects/labels/user_views_labels_spec.rb new file mode 100644 index 00000000000..0cbeca4e392 --- /dev/null +++ b/spec/features/projects/labels/user_views_labels_spec.rb @@ -0,0 +1,23 @@ +require "spec_helper" + +describe "User views labels" do + set(:project) { create(:project_empty_repo, :public) } + set(:user) { create(:user) } + + LABEL_TITLES = %w[bug enhancement feature].freeze + + before do + LABEL_TITLES.each { |title| create(:label, project: project, title: title) } + + project.add_guest(user) + sign_in(user) + + visit(project_labels_path(project)) + end + + it "shows all labels" do + page.within('.other-labels .manage-labels-list') do + LABEL_TITLES.each { |title| expect(page).to have_content(title) } + end + end +end diff --git a/spec/features/projects/merge_request_button_spec.rb b/spec/features/projects/merge_request_button_spec.rb index 40689964b91..b571d5a0e26 100644 --- a/spec/features/projects/merge_request_button_spec.rb +++ b/spec/features/projects/merge_request_button_spec.rb @@ -45,6 +45,18 @@ feature 'Merge Request button' do end end end + + context 'when the project is archived' do + it 'hides the link' do + project.update!(archived: true) + + visit url + + within("#content-body") do + expect(page).not_to have_link(label) + end + end + end end context 'logged in as non-member' do diff --git a/spec/features/projects/merge_requests/user_reverts_merge_request_spec.rb b/spec/features/projects/merge_requests/user_reverts_merge_request_spec.rb index a41d683dbbb..f3e97bc9eb2 100644 --- a/spec/features/projects/merge_requests/user_reverts_merge_request_spec.rb +++ b/spec/features/projects/merge_requests/user_reverts_merge_request_spec.rb @@ -56,4 +56,12 @@ describe 'User reverts a merge request', :js do expect(page).to have_content('The merge request has been successfully reverted. You can now submit a merge request to get this change into the original branch.') end + + it 'cannot revert a merge requests for an archived project' do + project.update!(archived: true) + + visit(merge_request_path(merge_request)) + + expect(page).not_to have_link('Revert') + end end diff --git a/spec/features/projects/merge_requests/user_views_open_merge_requests_spec.rb b/spec/features/projects/merge_requests/user_views_open_merge_requests_spec.rb index bf95dbb7d09..115e548b691 100644 --- a/spec/features/projects/merge_requests/user_views_open_merge_requests_spec.rb +++ b/spec/features/projects/merge_requests/user_views_open_merge_requests_spec.rb @@ -94,6 +94,18 @@ describe 'User views open merge requests' do end include_examples 'shows merge requests' + + it 'shows the new merge request button' do + expect(page).to have_link('New merge request') + end + + context 'when the project is archived' do + let(:project) { create(:project, :public, :repository, :archived) } + + it 'hides the new merge request button' do + expect(page).not_to have_link('New merge request') + end + end end end diff --git a/spec/features/projects/milestones/milestones_sorting_spec.rb b/spec/features/projects/milestones/milestones_sorting_spec.rb index c531b81e04d..b64786d4eec 100644 --- a/spec/features/projects/milestones/milestones_sorting_spec.rb +++ b/spec/features/projects/milestones/milestones_sorting_spec.rb @@ -1,7 +1,6 @@ require 'spec_helper' feature 'Milestones sorting', :js do - include SortingHelper let(:user) { create(:user) } let(:project) { create(:project, name: 'test', namespace: user.namespace) } diff --git a/spec/features/projects/pages_spec.rb b/spec/features/projects/pages_spec.rb index 233d2e67b9d..bdd49f731c7 100644 --- a/spec/features/projects/pages_spec.rb +++ b/spec/features/projects/pages_spec.rb @@ -40,11 +40,6 @@ feature 'Pages' do end context 'when support for external domains is disabled' do - before do - allow(Gitlab.config.pages).to receive(:external_http).and_return(nil) - allow(Gitlab.config.pages).to receive(:external_https).and_return(nil) - end - it 'renders message that support is disabled' do visit project_pages_path(project) @@ -52,7 +47,9 @@ feature 'Pages' do end end - context 'when pages are exposed on external HTTP address' do + context 'when pages are exposed on external HTTP address', :http_pages_enabled do + given(:project) { create(:project, pages_https_only: false) } + shared_examples 'adds new domain' do it 'adds new domain' do visit new_project_pages_domain_path(project) @@ -64,11 +61,6 @@ feature 'Pages' do end end - before do - allow(Gitlab.config.pages).to receive(:external_http).and_return(['1.1.1.1:80']) - allow(Gitlab.config.pages).to receive(:external_https).and_return(nil) - end - it 'allows to add new domain' do visit project_pages_path(project) @@ -80,13 +72,13 @@ feature 'Pages' do context 'when project in group namespace' do it_behaves_like 'adds new domain' do let(:group) { create :group } - let(:project) { create :project, namespace: group } + let(:project) { create(:project, namespace: group, pages_https_only: false) } end end context 'when pages domain is added' do before do - project.pages_domains.create!(domain: 'my.test.domain.com') + create(:pages_domain, project: project, domain: 'my.test.domain.com') visit new_project_pages_domain_path(project) end @@ -104,7 +96,7 @@ feature 'Pages' do end end - context 'when pages are exposed on external HTTPS address' do + context 'when pages are exposed on external HTTPS address', :https_pages_enabled do let(:certificate_pem) do <<~PEM -----BEGIN CERTIFICATE----- @@ -145,11 +137,6 @@ feature 'Pages' do KEY end - before do - allow(Gitlab.config.pages).to receive(:external_http).and_return(['1.1.1.1:80']) - allow(Gitlab.config.pages).to receive(:external_https).and_return(['1.1.1.1:443']) - end - it 'adds new domain with certificate' do visit new_project_pages_domain_path(project) @@ -163,7 +150,7 @@ feature 'Pages' do describe 'updating the certificate for an existing domain' do let!(:domain) do - create(:pages_domain, :with_key, :with_certificate, project: project) + create(:pages_domain, project: project) end it 'allows the certificate to be updated' do @@ -237,6 +224,70 @@ feature 'Pages' do it_behaves_like 'no pages deployed' end + describe 'HTTPS settings', :js, :https_pages_enabled do + background do + project.namespace.update(owner: user) + + allow_any_instance_of(Project).to receive(:pages_deployed?) { true } + end + + scenario 'tries to change the setting' do + visit project_pages_path(project) + expect(page).to have_content("Force domains with SSL certificates to use HTTPS") + + uncheck :project_pages_https_only + + click_button 'Save' + + expect(page).to have_text('Your changes have been saved') + expect(page).not_to have_checked_field('project_pages_https_only') + end + + context 'setting could not be updated' do + let(:service) { instance_double('Projects::UpdateService') } + + before do + allow(Projects::UpdateService).to receive(:new).and_return(service) + allow(service).to receive(:execute).and_return(status: :error) + end + + scenario 'tries to change the setting' do + visit project_pages_path(project) + + uncheck :project_pages_https_only + + click_button 'Save' + + expect(page).to have_text('Something went wrong on our end') + end + end + + context 'non-HTTPS domain exists' do + given(:project) { create(:project, pages_https_only: false) } + + before do + create(:pages_domain, :without_key, :without_certificate, project: project) + end + + scenario 'the setting is disabled' do + visit project_pages_path(project) + + expect(page).to have_field(:project_pages_https_only, disabled: true) + expect(page).not_to have_button('Save') + end + end + + context 'HTTPS pages are disabled', :https_pages_disabled do + scenario 'the setting is unavailable' do + visit project_pages_path(project) + + expect(page).not_to have_field(:project_pages_https_only) + expect(page).not_to have_content('Force domains with SSL certificates to use HTTPS') + expect(page).not_to have_button('Save') + end + end + end + describe 'Remove page' do context 'when user is the owner' do let(:project) { create :project, :repository } diff --git a/spec/features/projects/pipeline_schedules_spec.rb b/spec/features/projects/pipeline_schedules_spec.rb index 65e24862d43..065d00d51d4 100644 --- a/spec/features/projects/pipeline_schedules_spec.rb +++ b/spec/features/projects/pipeline_schedules_spec.rb @@ -160,9 +160,9 @@ feature 'Pipeline Schedules', :js do click_link 'New schedule' fill_in_schedule_form all('[name="schedule[variables_attributes][][key]"]')[0].set('AAA') - all('[name="schedule[variables_attributes][][value]"]')[0].set('AAA123') + all('[name="schedule[variables_attributes][][secret_value]"]')[0].set('AAA123') all('[name="schedule[variables_attributes][][key]"]')[1].set('BBB') - all('[name="schedule[variables_attributes][][value]"]')[1].set('BBB123') + all('[name="schedule[variables_attributes][][secret_value]"]')[1].set('BBB123') save_pipeline_schedule end diff --git a/spec/features/projects/pipelines/pipeline_spec.rb b/spec/features/projects/pipelines/pipeline_spec.rb index 266ef693d0b..990e5c4d9df 100644 --- a/spec/features/projects/pipelines/pipeline_spec.rb +++ b/spec/features/projects/pipelines/pipeline_spec.rb @@ -115,6 +115,13 @@ describe 'Pipeline', :js do expect(page).not_to have_content('Retry job') end + + it 'should include the failure reason' do + page.within('#ci-badge-test') do + build_link = page.find('.js-pipeline-graph-job-link') + expect(build_link['data-original-title']).to eq('test - failed <br> (unknown failure)') + end + end end context 'when pipeline has manual jobs' do @@ -289,6 +296,15 @@ describe 'Pipeline', :js do it { expect(build_manual.reload).to be_pending } end + + context 'failed jobs' do + it 'displays a tooltip with the failure reason' do + page.within('.ci-table') do + failed_job_link = page.find('.ci-failed') + expect(failed_job_link[:title]).to eq('Failed <br> (unknown failure)') + end + end + end end describe 'GET /:project/pipelines/:id/failures' do diff --git a/spec/features/projects/pipelines/pipelines_spec.rb b/spec/features/projects/pipelines/pipelines_spec.rb index 33ad59abfdf..705ba78a0b7 100644 --- a/spec/features/projects/pipelines/pipelines_spec.rb +++ b/spec/features/projects/pipelines/pipelines_spec.rb @@ -349,6 +349,18 @@ describe 'Pipelines', :js do it { expect(page).not_to have_selector('.build-artifacts') } end + + context 'with trace artifact' do + before do + create(:ci_build, :success, :trace_artifact, pipeline: pipeline) + + visit_project_pipelines + end + + it 'does not show trace artifact as artifacts' do + expect(page).not_to have_selector('.build-artifacts') + end + end end context 'mini pipeline graph' do @@ -382,6 +394,23 @@ describe 'Pipelines', :js do expect(build.reload).to be_canceled end end + + context 'for a failed pipeline' do + let!(:build) do + create(:ci_build, :failed, pipeline: pipeline, + stage: 'build', + name: 'build') + end + + it 'should display the failure reason' do + find('.js-builds-dropdown-button').click + + within('.js-builds-dropdown-list') do + build_element = page.find('.mini-pipeline-graph-dropdown-item') + expect(build_element['data-title']).to eq('build - failed <br> (unknown failure)') + end + end + end end context 'with pagination' do @@ -488,7 +517,7 @@ describe 'Pipelines', :js do end it 'creates a new pipeline' do - expect { click_on 'Create pipeline' } + expect { click_on 'Run pipeline' } .to change { Ci::Pipeline.count }.by(1) expect(Ci::Pipeline.last).to be_web @@ -497,7 +526,7 @@ describe 'Pipelines', :js do context 'without gitlab-ci.yml' do before do - click_on 'Create pipeline' + click_on 'Run pipeline' end it { expect(page).to have_content('Missing .gitlab-ci.yml file') } @@ -510,7 +539,7 @@ describe 'Pipelines', :js do click_link 'master' end - expect { click_on 'Create pipeline' } + expect { click_on 'Run pipeline' } .to change { Ci::Pipeline.count }.by(1) end end @@ -528,7 +557,7 @@ describe 'Pipelines', :js do it 'has field to add a new pipeline' do expect(page).to have_selector('.js-branch-select') expect(find('.js-branch-select')).to have_content project.default_branch - expect(page).to have_content('Create for') + expect(page).to have_content('Run on') end end diff --git a/spec/features/projects/project_settings_spec.rb b/spec/features/projects/project_settings_spec.rb deleted file mode 100644 index a3ea778d401..00000000000 --- a/spec/features/projects/project_settings_spec.rb +++ /dev/null @@ -1,205 +0,0 @@ -require 'spec_helper' - -describe 'Edit Project Settings' do - include Select2Helper - - let(:user) { create(:user) } - let(:project) { create(:project, namespace: user.namespace, path: 'gitlab', name: 'sample') } - - before do - sign_in(user) - end - - describe 'Project settings section', :js do - it 'shows errors for invalid project name' do - visit edit_project_path(project) - fill_in 'project_name_edit', with: 'foo&bar' - page.within('.general-settings') do - click_button 'Save changes' - end - expect(page).to have_field 'project_name_edit', with: 'foo&bar' - expect(page).to have_content "Name can contain only letters, digits, emojis, '_', '.', dash, space. It must start with letter, digit, emoji or '_'." - expect(page).to have_button 'Save changes' - end - - it 'shows a successful notice when the project is updated' do - visit edit_project_path(project) - fill_in 'project_name_edit', with: 'hello world' - page.within('.general-settings') do - click_button 'Save changes' - end - expect(page).to have_content "Project 'hello world' was successfully updated." - end - end - - describe 'Merge request settings section' do - it 'shows "Merge commit" strategy' do - visit edit_project_path(project) - - page.within '.merge-requests-feature' do - expect(page).to have_content 'Merge commit' - end - end - - it 'shows "Merge commit with semi-linear history " strategy' do - visit edit_project_path(project) - - page.within '.merge-requests-feature' do - expect(page).to have_content 'Merge commit with semi-linear history' - end - end - - it 'shows "Fast-forward merge" strategy' do - visit edit_project_path(project) - - page.within '.merge-requests-feature' do - expect(page).to have_content 'Fast-forward merge' - end - end - end - - describe 'Rename repository section' do - context 'with invalid characters' do - it 'shows errors for invalid project path/name' do - rename_project(project, name: 'foo&bar', path: 'foo&bar') - expect(page).to have_field 'Project name', with: 'foo&bar' - expect(page).to have_field 'Path', with: 'foo&bar' - expect(page).to have_content "Name can contain only letters, digits, emojis, '_', '.', dash, space. It must start with letter, digit, emoji or '_'." - expect(page).to have_content "Path can contain only letters, digits, '_', '-' and '.'. Cannot start with '-', end in '.git' or end in '.atom'" - end - end - - context 'when changing project name' do - it 'renames the repository' do - rename_project(project, name: 'bar') - expect(find('.breadcrumbs')).to have_content(project.name) - end - - context 'with emojis' do - it 'shows error for invalid project name' do - rename_project(project, name: '🚀 foo bar ☁️') - expect(page).to have_field 'Project name', with: '🚀 foo bar ☁️' - expect(page).not_to have_content "Name can contain only letters, digits, emojis '_', '.', dash and space. It must start with letter, digit, emoji or '_'." - end - end - end - - context 'when changing project path' do - let(:project) { create(:project, :repository, namespace: user.namespace, name: 'gitlabhq') } - - before(:context) do - TestEnv.clean_test_path - end - - after do - TestEnv.clean_test_path - end - - specify 'the project is accessible via the new path' do - rename_project(project, path: 'bar') - new_path = namespace_project_path(project.namespace, 'bar') - visit new_path - expect(current_path).to eq(new_path) - expect(find('.breadcrumbs')).to have_content(project.name) - end - - specify 'the project is accessible via a redirect from the old path' do - old_path = project_path(project) - rename_project(project, path: 'bar') - new_path = namespace_project_path(project.namespace, 'bar') - visit old_path - expect(current_path).to eq(new_path) - expect(find('.breadcrumbs')).to have_content(project.name) - end - - context 'and a new project is added with the same path' do - it 'overrides the redirect' do - old_path = project_path(project) - rename_project(project, path: 'bar') - new_project = create(:project, namespace: user.namespace, path: 'gitlabhq', name: 'quz') - visit old_path - expect(current_path).to eq(old_path) - expect(find('.breadcrumbs')).to have_content(new_project.name) - end - end - end - end - - describe 'Transfer project section', :js do - let!(:project) { create(:project, :repository, namespace: user.namespace, name: 'gitlabhq') } - let!(:group) { create(:group) } - - before(:context) do - TestEnv.clean_test_path - end - - before do - group.add_owner(user) - end - - after do - TestEnv.clean_test_path - end - - specify 'the project is accessible via the new path' do - transfer_project(project, group) - new_path = namespace_project_path(group, project) - - visit new_path - wait_for_requests - - expect(current_path).to eq(new_path) - expect(find('.breadcrumbs')).to have_content(project.name) - end - - specify 'the project is accessible via a redirect from the old path' do - old_path = project_path(project) - transfer_project(project, group) - new_path = namespace_project_path(group, project) - - visit old_path - wait_for_requests - - expect(current_path).to eq(new_path) - expect(find('.breadcrumbs')).to have_content(project.name) - end - - context 'and a new project is added with the same path' do - it 'overrides the redirect' do - old_path = project_path(project) - transfer_project(project, group) - new_project = create(:project, namespace: user.namespace, path: 'gitlabhq', name: 'quz') - visit old_path - expect(current_path).to eq(old_path) - expect(find('.breadcrumbs')).to have_content(new_project.name) - end - end - end -end - -def rename_project(project, name: nil, path: nil) - visit edit_project_path(project) - fill_in('project_name', with: name) if name - fill_in('Path', with: path) if path - click_button('Rename project') - wait_for_edit_project_page_reload - project.reload -end - -def transfer_project(project, namespace) - visit edit_project_path(project) - select2(namespace.id, from: '#new_namespace_id') - click_button('Transfer project') - confirm_transfer_modal - wait_for_edit_project_page_reload - project.reload -end - -def confirm_transfer_modal - fill_in('confirm_name_input', with: project.path) - click_button 'Confirm' -end - -def wait_for_edit_project_page_reload - expect(find('.project-edit-container')).to have_content('Rename repository') -end diff --git a/spec/features/projects/settings/forked_project_settings_spec.rb b/spec/features/projects/settings/forked_project_settings_spec.rb index 28954a4fb40..a4d1b78b83b 100644 --- a/spec/features/projects/settings/forked_project_settings_spec.rb +++ b/spec/features/projects/settings/forked_project_settings_spec.rb @@ -1,6 +1,6 @@ require 'spec_helper' -feature 'Settings for a forked project', :js do +describe 'Projects > Settings > For a forked project', :js do include ProjectForksHelper let(:user) { create(:user) } let(:original_project) { create(:project) } diff --git a/spec/features/projects/settings/integration_settings_spec.rb b/spec/features/projects/settings/integration_settings_spec.rb index f6a1a46df11..5178d63050e 100644 --- a/spec/features/projects/settings/integration_settings_spec.rb +++ b/spec/features/projects/settings/integration_settings_spec.rb @@ -1,20 +1,20 @@ require 'spec_helper' -feature 'Integration settings' do +describe 'Projects > Settings > Integration settings' do let(:project) { create(:project) } let(:user) { create(:user) } let(:role) { :developer } let(:integrations_path) { project_settings_integrations_path(project) } - background do + before do sign_in(user) project.add_role(user, role) end context 'for developer' do - given(:role) { :developer } + let(:role) { :developer } - scenario 'to be disallowed to view' do + it 'to be disallowed to view' do visit integrations_path expect(page.status_code).to eq(404) @@ -22,13 +22,13 @@ feature 'Integration settings' do end context 'for master' do - given(:role) { :master } + let(:role) { :master } context 'Webhooks' do let(:hook) { create(:project_hook, :all_events_enabled, enable_ssl_verification: true, project: project) } let(:url) { generate(:url) } - scenario 'show list of webhooks' do + it 'show list of webhooks' do hook visit integrations_path @@ -46,7 +46,7 @@ feature 'Integration settings' do expect(page).to have_content('Wiki page events') end - scenario 'create webhook' do + it 'create webhook' do visit integrations_path fill_in 'hook_url', with: url @@ -63,7 +63,7 @@ feature 'Integration settings' do expect(page).to have_content('Job events') end - scenario 'edit existing webhook' do + it 'edit existing webhook' do hook visit integrations_path @@ -76,7 +76,7 @@ feature 'Integration settings' do expect(page).to have_content(url) end - scenario 'test existing webhook', :js do + it 'test existing webhook', :js do WebMock.stub_request(:post, hook.url) visit integrations_path @@ -87,14 +87,14 @@ feature 'Integration settings' do end context 'remove existing webhook' do - scenario 'from webhooks list page' do + it 'from webhooks list page' do hook visit integrations_path expect { click_link 'Remove' }.to change(ProjectHook, :count).by(-1) end - scenario 'from webhook edit page' do + it 'from webhook edit page' do hook visit integrations_path click_link 'Edit' @@ -108,7 +108,7 @@ feature 'Integration settings' do let(:hook) { create(:project_hook, project: project) } let(:hook_log) { create(:web_hook_log, web_hook: hook, internal_error_message: 'some error') } - scenario 'show list of hook logs' do + it 'show list of hook logs' do hook_log visit edit_project_hook_path(project, hook) @@ -116,7 +116,7 @@ feature 'Integration settings' do expect(page).to have_content(hook_log.url) end - scenario 'show hook log details' do + it 'show hook log details' do hook_log visit edit_project_hook_path(project, hook) click_link 'View details' @@ -126,7 +126,7 @@ feature 'Integration settings' do expect(page).to have_content('Resend Request') end - scenario 'retry hook log' do + it 'retry hook log' do WebMock.stub_request(:post, hook.url) hook_log diff --git a/spec/features/projects/settings/lfs_settings_spec.rb b/spec/features/projects/settings/lfs_settings_spec.rb new file mode 100644 index 00000000000..0fd28a5681c --- /dev/null +++ b/spec/features/projects/settings/lfs_settings_spec.rb @@ -0,0 +1,21 @@ +require 'rails_helper' + +describe 'Projects > Settings > LFS settings' do + let(:admin) { create(:admin) } + let(:project) { create(:project) } + + context 'LFS enabled setting' do + before do + allow(Gitlab.config.lfs).to receive(:enabled).and_return(true) + + sign_in(admin) + end + + it 'displays the correct elements', :js do + visit edit_project_path(project) + + expect(page).to have_content('Git Large File Storage') + expect(page).to have_selector('input[name="project[lfs_enabled]"] + button', visible: true) + end + end +end diff --git a/spec/features/projects/settings/pipelines_settings_spec.rb b/spec/features/projects/settings/pipelines_settings_spec.rb index d0720855564..d9020333f28 100644 --- a/spec/features/projects/settings/pipelines_settings_spec.rb +++ b/spec/features/projects/settings/pipelines_settings_spec.rb @@ -1,19 +1,19 @@ require 'spec_helper' -feature "Pipelines settings" do +describe "Projects > Settings > Pipelines settings" do let(:project) { create(:project) } let(:user) { create(:user) } let(:role) { :developer } - background do + before do sign_in(user) project.add_role(user, role) end context 'for developer' do - given(:role) { :developer } + let(:role) { :developer } - scenario 'to be disallowed to view' do + it 'to be disallowed to view' do visit project_settings_ci_cd_path(project) expect(page.status_code).to eq(404) @@ -21,9 +21,9 @@ feature "Pipelines settings" do end context 'for master' do - given(:role) { :master } + let(:role) { :master } - scenario 'be allowed to change' do + it 'be allowed to change' do visit project_settings_ci_cd_path(project) fill_in('Test coverage parsing', with: 'coverage_regex') @@ -34,7 +34,7 @@ feature "Pipelines settings" do expect(page).to have_field('Test coverage parsing', with: 'coverage_regex') end - scenario 'updates auto_cancel_pending_pipelines' do + it 'updates auto_cancel_pending_pipelines' do visit project_settings_ci_cd_path(project) page.check('Auto-cancel redundant, pending pipelines') diff --git a/spec/features/projects/settings/project_badges_spec.rb b/spec/features/projects/settings/project_badges_spec.rb new file mode 100644 index 00000000000..cc3551a4c21 --- /dev/null +++ b/spec/features/projects/settings/project_badges_spec.rb @@ -0,0 +1,125 @@ +require 'spec_helper' + +feature 'Project Badges' do + include WaitForRequests + + let(:user) { create(:user) } + let(:group) { create(:group) } + let(:project) { create(:project, namespace: group) } + let(:badge_link_url) { 'https://gitlab.com/gitlab-org/gitlab-ee/commits/master'} + let(:badge_image_url) { 'https://gitlab.com/gitlab-org/gitlab-ee/badges/master/build.svg'} + let!(:project_badge) { create(:project_badge, project: project) } + let!(:group_badge) { create(:group_badge, group: group) } + + before do + group.add_master(user) + sign_in(user) + + visit(project_settings_badges_path(project)) + end + + it 'shows a list of badges', :js do + page.within '.badge-settings' do + wait_for_requests + + rows = all('.panel-body > div') + expect(rows.length).to eq 2 + expect(rows[0]).to have_content group_badge.link_url + expect(rows[1]).to have_content project_badge.link_url + end + end + + context 'adding a badge', :js do + it 'user can preview a badge' do + page.within '.badge-settings form' do + fill_in 'badge-link-url', with: badge_link_url + fill_in 'badge-image-url', with: badge_image_url + within '#badge-preview' do + expect(find('a')[:href]).to eq badge_link_url + expect(find('a img')[:src]).to eq badge_image_url + end + end + end + + it do + page.within '.badge-settings' do + fill_in 'badge-link-url', with: badge_link_url + fill_in 'badge-image-url', with: badge_image_url + + click_button 'Add badge' + wait_for_requests + + within '.panel-body' do + expect(find('a')[:href]).to eq badge_link_url + expect(find('a img')[:src]).to eq badge_image_url + end + end + end + end + + context 'editing a badge', :js do + it 'form is shown when clicking edit button in list' do + page.within '.badge-settings' do + wait_for_requests + rows = all('.panel-body > div') + expect(rows.length).to eq 2 + rows[1].find('[aria-label="Edit"]').click + + within 'form' do + expect(find('#badge-link-url').value).to eq project_badge.link_url + expect(find('#badge-image-url').value).to eq project_badge.image_url + end + end + end + + it 'updates a badge when submitting the edit form' do + page.within '.badge-settings' do + wait_for_requests + rows = all('.panel-body > div') + expect(rows.length).to eq 2 + rows[1].find('[aria-label="Edit"]').click + within 'form' do + fill_in 'badge-link-url', with: badge_link_url + fill_in 'badge-image-url', with: badge_image_url + + click_button 'Save changes' + wait_for_requests + end + + rows = all('.panel-body > div') + expect(rows.length).to eq 2 + expect(rows[1]).to have_content badge_link_url + end + end + end + + context 'deleting a badge', :js do + def click_delete_button(badge_row) + badge_row.find('[aria-label="Delete"]').click + end + + it 'shows a modal when deleting a badge' do + wait_for_requests + rows = all('.panel-body > div') + expect(rows.length).to eq 2 + + click_delete_button(rows[1]) + + expect(find('.modal .modal-title')).to have_content 'Delete badge?' + end + + it 'deletes a badge when confirming the modal' do + wait_for_requests + rows = all('.panel-body > div') + expect(rows.length).to eq 2 + click_delete_button(rows[1]) + + find('.modal .btn-danger').click + wait_for_requests + + rows = all('.panel-body > div') + expect(rows.length).to eq 1 + expect(rows[0]).to have_content group_badge.link_url + end + end +end diff --git a/spec/features/projects/settings/repository_settings_spec.rb b/spec/features/projects/settings/repository_settings_spec.rb index 14670e91006..e1dfe617691 100644 --- a/spec/features/projects/settings/repository_settings_spec.rb +++ b/spec/features/projects/settings/repository_settings_spec.rb @@ -1,19 +1,19 @@ require 'spec_helper' -feature 'Repository settings' do +describe 'Projects > Settings > Repository settings' do let(:project) { create(:project_empty_repo) } let(:user) { create(:user) } let(:role) { :developer } - background do + before do project.add_role(user, role) sign_in(user) end context 'for developer' do - given(:role) { :developer } + let(:role) { :developer } - scenario 'is not allowed to view' do + it 'is not allowed to view' do visit project_settings_repository_path(project) expect(page.status_code).to eq(404) @@ -21,14 +21,14 @@ feature 'Repository settings' do end context 'for master' do - given(:role) { :master } + let(:role) { :master } context 'Deploy Keys', :js do let(:private_deploy_key) { create(:deploy_key, title: 'private_deploy_key', public: false) } let(:public_deploy_key) { create(:another_deploy_key, title: 'public_deploy_key', public: true) } let(:new_ssh_key) { attributes_for(:key)[:key] } - scenario 'get list of keys' do + it 'get list of keys' do project.deploy_keys << private_deploy_key project.deploy_keys << public_deploy_key @@ -38,7 +38,7 @@ feature 'Repository settings' do expect(page).to have_content('public_deploy_key') end - scenario 'add a new deploy key' do + it 'add a new deploy key' do visit project_settings_repository_path(project) fill_in 'deploy_key_title', with: 'new_deploy_key' @@ -50,7 +50,7 @@ feature 'Repository settings' do expect(page).to have_content('Write access allowed') end - scenario 'edit an existing deploy key' do + it 'edit an existing deploy key' do project.deploy_keys << private_deploy_key visit project_settings_repository_path(project) @@ -64,7 +64,7 @@ feature 'Repository settings' do expect(page).to have_content('Write access allowed') end - scenario 'edit a deploy key from projects user has access to' do + it 'edit a deploy key from projects user has access to' do project2 = create(:project_empty_repo) project2.add_role(user, role) project2.deploy_keys << private_deploy_key @@ -79,7 +79,7 @@ feature 'Repository settings' do expect(page).to have_content('updated_deploy_key') end - scenario 'remove an existing deploy key' do + it 'remove an existing deploy key' do project.deploy_keys << private_deploy_key visit project_settings_repository_path(project) @@ -88,5 +88,32 @@ feature 'Repository settings' do expect(page).not_to have_content(private_deploy_key.title) end end + + context 'Deploy tokens' do + let!(:deploy_token) { create(:deploy_token, projects: [project]) } + + before do + stub_container_registry_config(enabled: true) + visit project_settings_repository_path(project) + end + + scenario 'view deploy tokens' do + within('.deploy-tokens') do + expect(page).to have_content(deploy_token.name) + expect(page).to have_content('read_repository') + expect(page).to have_content('read_registry') + end + end + + scenario 'add a new deploy token' do + fill_in 'deploy_token_name', with: 'new_deploy_key' + fill_in 'deploy_token_expires_at', with: (Date.today + 1.month).to_s + check 'deploy_token_read_repository' + check 'deploy_token_read_registry' + click_button 'Create deploy token' + + expect(page).to have_content('Your new project deploy token has been created') + end + end end end diff --git a/spec/features/projects/user_archives_project_spec.rb b/spec/features/projects/settings/user_archives_project_spec.rb index 72063d13c2a..38c8a8c2468 100644 --- a/spec/features/projects/user_archives_project_spec.rb +++ b/spec/features/projects/settings/user_archives_project_spec.rb @@ -1,21 +1,19 @@ require 'spec_helper' -describe 'User archives a project' do +describe 'Projects > Settings > User archives a project' do let(:user) { create(:user) } before do project.add_master(user) sign_in(user) + + visit edit_project_path(project) end context 'when a project is archived' do let(:project) { create(:project, :archived, namespace: user.namespace) } - before do - visit(edit_project_path(project)) - end - it 'unarchives a project' do expect(page).to have_content('Unarchive project') @@ -28,10 +26,6 @@ describe 'User archives a project' do context 'when a project is unarchived' do let(:project) { create(:project, :repository, namespace: user.namespace) } - before do - visit(edit_project_path(project)) - end - it 'archives a project' do expect(page).to have_content('Archive project') diff --git a/spec/features/projects/settings/user_changes_avatar_spec.rb b/spec/features/projects/settings/user_changes_avatar_spec.rb new file mode 100644 index 00000000000..2dcc79d8a12 --- /dev/null +++ b/spec/features/projects/settings/user_changes_avatar_spec.rb @@ -0,0 +1,44 @@ +require 'spec_helper' + +describe 'Projects > Settings > User changes avatar' do + let(:project) { create(:project, :repository) } + let(:user) { project.creator } + + before do + project.add_master(user) + sign_in(user) + end + + it 'saves the new avatar' do + expect(project.reload.avatar.url).to be_nil + + save_avatar(project) + + expect(project.reload.avatar.url).to eq "/uploads/-/system/project/avatar/#{project.id}/banana_sample.gif" + end + + context 'with an avatar already set' do + before do + save_avatar(project) + end + + it 'is possible to remove the avatar' do + click_link 'Remove avatar' + + expect(page).not_to have_link('Remove avatar') + + expect(project.reload.avatar.url).to be_nil + end + end + + def save_avatar(project) + visit edit_project_path(project) + attach_file( + :project_avatar, + File.join(Rails.root, 'spec', 'fixtures', 'banana_sample.gif') + ) + page.within '.general-settings' do + click_button 'Save changes' + end + end +end diff --git a/spec/features/projects/settings/user_changes_default_branch_spec.rb b/spec/features/projects/settings/user_changes_default_branch_spec.rb new file mode 100644 index 00000000000..e925539351d --- /dev/null +++ b/spec/features/projects/settings/user_changes_default_branch_spec.rb @@ -0,0 +1,20 @@ +require 'spec_helper' + +describe 'Projects > Settings > User changes default branch' do + let(:user) { create(:user) } + let(:project) { create(:project, :repository, namespace: user.namespace) } + + before do + sign_in(user) + visit edit_project_path(project) + end + + it 'allows to change the default branch' do + select 'fix', from: 'project_default_branch' + page.within '.general-settings' do + click_button 'Save changes' + end + + expect(find(:css, 'select#project_default_branch').value).to eq 'fix' + end +end diff --git a/spec/features/projects/settings/user_manages_group_links_spec.rb b/spec/features/projects/settings/user_manages_group_links_spec.rb index 91e8059865c..fdf42797091 100644 --- a/spec/features/projects/settings/user_manages_group_links_spec.rb +++ b/spec/features/projects/settings/user_manages_group_links_spec.rb @@ -1,6 +1,6 @@ require 'spec_helper' -describe 'User manages group links' do +describe 'Projects > Settings > User manages group links' do include Select2Helper let(:user) { create(:user) } diff --git a/spec/features/projects/settings/merge_requests_settings_spec.rb b/spec/features/projects/settings/user_manages_merge_requests_settings_spec.rb index 015db603d33..b6e65fcbda1 100644 --- a/spec/features/projects/settings/merge_requests_settings_spec.rb +++ b/spec/features/projects/settings/user_manages_merge_requests_settings_spec.rb @@ -1,21 +1,35 @@ require 'spec_helper' -feature 'Project settings > Merge Requests', :js do - let(:project) { create(:project, :public) } +describe 'Projects > Settings > User manages merge request settings' do let(:user) { create(:user) } + let(:project) { create(:project, :public, namespace: user.namespace, path: 'gitlab', name: 'sample') } - background do - project.add_master(user) + before do sign_in(user) + visit edit_project_path(project) end - context 'when Merge Request and Pipelines are initially enabled' do - context 'when Pipelines are initially enabled' do - before do - visit edit_project_path(project) - end + it 'shows "Merge commit" strategy' do + page.within '.merge-requests-feature' do + expect(page).to have_content 'Merge commit' + end + end + + it 'shows "Merge commit with semi-linear history " strategy' do + page.within '.merge-requests-feature' do + expect(page).to have_content 'Merge commit with semi-linear history' + end + end - scenario 'shows the Merge Requests settings' do + it 'shows "Fast-forward merge" strategy' do + page.within '.merge-requests-feature' do + expect(page).to have_content 'Fast-forward merge' + end + end + + context 'when Merge Request and Pipelines are initially enabled', :js do + context 'when Pipelines are initially enabled' do + it 'shows the Merge Requests settings' do expect(page).to have_content('Only allow merge requests to be merged if the pipeline succeeds') expect(page).to have_content('Only allow merge requests to be merged if all discussions are resolved') @@ -29,13 +43,13 @@ feature 'Project settings > Merge Requests', :js do end end - context 'when Pipelines are initially disabled' do + context 'when Pipelines are initially disabled', :js do before do project.project_feature.update_attribute('builds_access_level', ProjectFeature::DISABLED) visit edit_project_path(project) end - scenario 'shows the Merge Requests settings that do not depend on Builds feature' do + it 'shows the Merge Requests settings that do not depend on Builds feature' do expect(page).not_to have_content('Only allow merge requests to be merged if the pipeline succeeds') expect(page).to have_content('Only allow merge requests to be merged if all discussions are resolved') @@ -50,13 +64,13 @@ feature 'Project settings > Merge Requests', :js do end end - context 'when Merge Request are initially disabled' do + context 'when Merge Request are initially disabled', :js do before do project.project_feature.update_attribute('merge_requests_access_level', ProjectFeature::DISABLED) visit edit_project_path(project) end - scenario 'does not show the Merge Requests settings' do + it 'does not show the Merge Requests settings' do expect(page).not_to have_content('Only allow merge requests to be merged if the pipeline succeeds') expect(page).not_to have_content('Only allow merge requests to be merged if all discussions are resolved') @@ -70,17 +84,13 @@ feature 'Project settings > Merge Requests', :js do end end - describe 'Checkbox to enable merge request link' do - before do - visit edit_project_path(project) - end - - scenario 'is initially checked' do + describe 'Checkbox to enable merge request link', :js do + it 'is initially checked' do checkbox = find_field('project_printing_merge_request_link_enabled') expect(checkbox).to be_checked end - scenario 'when unchecked sets :printing_merge_request_link_enabled to false' do + it 'when unchecked sets :printing_merge_request_link_enabled to false' do uncheck('project_printing_merge_request_link_enabled') within('.merge-request-settings-form') do click_on('Save changes') diff --git a/spec/features/projects/settings/user_manages_project_members_spec.rb b/spec/features/projects/settings/user_manages_project_members_spec.rb index 0a4f57bcd21..8af95522165 100644 --- a/spec/features/projects/settings/user_manages_project_members_spec.rb +++ b/spec/features/projects/settings/user_manages_project_members_spec.rb @@ -1,6 +1,6 @@ require 'spec_helper' -describe 'User manages project members' do +describe 'Projects > Settings > User manages project members' do let(:group) { create(:group, name: 'OpenSource') } let(:project) { create(:project) } let(:project2) { create(:project) } diff --git a/spec/features/projects/settings/user_renames_a_project_spec.rb b/spec/features/projects/settings/user_renames_a_project_spec.rb new file mode 100644 index 00000000000..64c9af4b706 --- /dev/null +++ b/spec/features/projects/settings/user_renames_a_project_spec.rb @@ -0,0 +1,100 @@ +require 'spec_helper' + +describe 'Projects > Settings > User renames a project' do + let(:user) { create(:user) } + let(:project) { create(:project, namespace: user.namespace, path: 'gitlab', name: 'sample') } + + before do + sign_in(user) + visit edit_project_path(project) + end + + def rename_project(project, name: nil, path: nil) + fill_in('project_name', with: name) if name + fill_in('Path', with: path) if path + click_button('Rename project') + wait_for_edit_project_page_reload + project.reload + end + + def wait_for_edit_project_page_reload + expect(find('.project-edit-container')).to have_content('Rename repository') + end + + context 'with invalid characters' do + it 'shows errors for invalid project path/name' do + rename_project(project, name: 'foo&bar', path: 'foo&bar') + expect(page).to have_field 'Project name', with: 'foo&bar' + expect(page).to have_field 'Path', with: 'foo&bar' + expect(page).to have_content "Name can contain only letters, digits, emojis, '_', '.', dash, space. It must start with letter, digit, emoji or '_'." + expect(page).to have_content "Path can contain only letters, digits, '_', '-' and '.'. Cannot start with '-', end in '.git' or end in '.atom'" + end + end + + it 'shows a successful notice when the project is updated' do + fill_in 'project_name_edit', with: 'hello world' + page.within('.general-settings') do + click_button 'Save changes' + end + + expect(page).to have_content "Project 'hello world' was successfully updated." + end + + context 'when changing project name' do + it 'renames the repository' do + rename_project(project, name: 'bar') + expect(find('.breadcrumbs')).to have_content(project.name) + end + + context 'with emojis' do + it 'shows error for invalid project name' do + rename_project(project, name: '🚀 foo bar ☁️') + expect(page).to have_field 'Project name', with: '🚀 foo bar ☁️' + expect(page).not_to have_content "Name can contain only letters, digits, emojis '_', '.', dash and space. It must start with letter, digit, emoji or '_'." + end + end + end + + context 'when changing project path' do + let(:project) { create(:project, :repository, namespace: user.namespace, name: 'gitlabhq') } + + before(:context) do + TestEnv.clean_test_path + end + + after do + TestEnv.clean_test_path + end + + it 'the project is accessible via the new path' do + rename_project(project, path: 'bar') + new_path = namespace_project_path(project.namespace, 'bar') + visit new_path + + expect(current_path).to eq(new_path) + expect(find('.breadcrumbs')).to have_content(project.name) + end + + it 'the project is accessible via a redirect from the old path' do + old_path = project_path(project) + rename_project(project, path: 'bar') + new_path = namespace_project_path(project.namespace, 'bar') + visit old_path + + expect(current_path).to eq(new_path) + expect(find('.breadcrumbs')).to have_content(project.name) + end + + context 'and a new project is added with the same path' do + it 'overrides the redirect' do + old_path = project_path(project) + rename_project(project, path: 'bar') + new_project = create(:project, namespace: user.namespace, path: 'gitlabhq', name: 'quz') + visit old_path + + expect(current_path).to eq(old_path) + expect(find('.breadcrumbs')).to have_content(new_project.name) + end + end + end +end diff --git a/spec/features/projects/settings/user_tags_project_spec.rb b/spec/features/projects/settings/user_tags_project_spec.rb new file mode 100644 index 00000000000..57b4b1287fa --- /dev/null +++ b/spec/features/projects/settings/user_tags_project_spec.rb @@ -0,0 +1,23 @@ +require 'spec_helper' + +describe 'Projects > Settings > User tags a project' do + let(:user) { create(:user) } + let(:project) { create(:project, namespace: user.namespace) } + + before do + sign_in(user) + visit edit_project_path(project) + end + + context 'when a project is archived' do + it 'unarchives a project' do + fill_in 'Tags', with: 'tag1, tag2' + + page.within '.general-settings' do + click_button 'Save changes' + end + + expect(find_field('Tags').value).to eq 'tag1, tag2' + end + end +end diff --git a/spec/features/projects/settings/user_transfers_a_project_spec.rb b/spec/features/projects/settings/user_transfers_a_project_spec.rb new file mode 100644 index 00000000000..96b7cf1f93b --- /dev/null +++ b/spec/features/projects/settings/user_transfers_a_project_spec.rb @@ -0,0 +1,73 @@ +require 'spec_helper' + +describe 'Projects > Settings > User transfers a project', :js do + let(:user) { create(:user) } + let(:project) { create(:project, :repository, namespace: user.namespace) } + let(:group) { create(:group) } + + before do + group.add_owner(user) + sign_in(user) + end + + def transfer_project(project, group) + visit edit_project_path(project) + + page.within('.js-project-transfer-form') do + page.find('.select2-container').click + end + + page.find("div[role='option']", text: group.full_name).click + + click_button('Transfer project') + + fill_in 'confirm_name_input', with: project.name + + click_button 'Confirm' + + wait_for_requests + end + + it 'allows transferring a project to a group' do + old_path = project_path(project) + transfer_project(project, group) + new_path = namespace_project_path(group, project) + + expect(project.reload.namespace).to eq(group) + + visit new_path + wait_for_requests + + expect(current_path).to eq(new_path) + expect(find('.breadcrumbs')).to have_content(project.name) + + visit old_path + wait_for_requests + + expect(current_path).to eq(new_path) + expect(find('.breadcrumbs')).to have_content(project.name) + end + + context 'and a new project is added with the same path' do + it 'overrides the redirect' do + old_path = project_path(project) + project_path = project.path + transfer_project(project, group) + new_project = create(:project, namespace: user.namespace, path: project_path) + visit old_path + + expect(current_path).to eq(old_path) + expect(find('.breadcrumbs')).to have_content(new_project.name) + end + end + + context 'when nested groups are available', :nested_groups do + it 'allows transferring a project to a subgroup' do + subgroup = create(:group, parent: group) + + transfer_project(project, subgroup) + + expect(project.reload.namespace).to eq(subgroup) + end + end +end diff --git a/spec/features/projects/settings/visibility_settings_spec.rb b/spec/features/projects/settings/visibility_settings_spec.rb index 06f6702670b..2ec6990313f 100644 --- a/spec/features/projects/settings/visibility_settings_spec.rb +++ b/spec/features/projects/settings/visibility_settings_spec.rb @@ -1,6 +1,6 @@ require 'spec_helper' -feature 'Visibility settings', :js do +describe 'Projects > Settings > Visibility settings', :js do let(:user) { create(:user) } let(:project) { create(:project, namespace: user.namespace, visibility_level: 20) } @@ -10,14 +10,14 @@ feature 'Visibility settings', :js do visit edit_project_path(project) end - scenario 'project visibility select is available' do + it 'project visibility select is available' do visibility_select_container = find('.project-visibility-setting') expect(visibility_select_container.find('select').value).to eq project.visibility_level.to_s expect(visibility_select_container).to have_content 'The project can be accessed by anyone, regardless of authentication.' end - scenario 'project visibility description updates on change' do + it 'project visibility description updates on change' do visibility_select_container = find('.project-visibility-setting') visibility_select = visibility_select_container.find('select') visibility_select.select('Private') @@ -25,6 +25,38 @@ feature 'Visibility settings', :js do expect(visibility_select.value).to eq '0' expect(visibility_select_container).to have_content 'Access must be granted explicitly to each user.' end + + context 'merge requests select' do + it 'hides merge requests section' do + find('.project-feature-controls[data-for="project[project_feature_attributes][merge_requests_access_level]"] .project-feature-toggle').click + + expect(page).to have_selector('.merge-requests-feature', visible: false) + end + + context 'given project with merge_requests_disabled access level' do + let(:project) { create(:project, :merge_requests_disabled, namespace: user.namespace) } + + it 'hides merge requests section' do + expect(page).to have_selector('.merge-requests-feature', visible: false) + end + end + end + + context 'builds select' do + it 'hides builds select section' do + find('.project-feature-controls[data-for="project[project_feature_attributes][builds_access_level]"] .project-feature-toggle').click + + expect(page).to have_selector('.builds-feature', visible: false) + end + + context 'given project with builds_disabled access level' do + let(:project) { create(:project, :builds_disabled, namespace: user.namespace) } + + it 'hides builds select section' do + expect(page).to have_selector('.builds-feature', visible: false) + end + end + end end context 'as master' do @@ -36,7 +68,7 @@ feature 'Visibility settings', :js do visit edit_project_path(project) end - scenario 'project visibility is locked' do + it 'project visibility is locked' do visibility_select_container = find('.project-visibility-setting') expect(visibility_select_container).to have_selector 'select[name="project[visibility_level]"]:disabled' diff --git a/spec/features/projects/developer_views_empty_project_instructions_spec.rb b/spec/features/projects/show/developer_views_empty_project_instructions_spec.rb index bf55917bf4c..8803b5222be 100644 --- a/spec/features/projects/developer_views_empty_project_instructions_spec.rb +++ b/spec/features/projects/show/developer_views_empty_project_instructions_spec.rb @@ -1,6 +1,6 @@ require 'rails_helper' -feature 'Developer views empty project instructions' do +feature 'Projects > Show > Developer views empty project instructions' do let(:project) { create(:project, :empty_repo) } let(:developer) { create(:user) } diff --git a/spec/features/projects/main/download_buttons_spec.rb b/spec/features/projects/show/download_buttons_spec.rb index 81f08e44cf3..254affd4a94 100644 --- a/spec/features/projects/main/download_buttons_spec.rb +++ b/spec/features/projects/show/download_buttons_spec.rb @@ -1,6 +1,6 @@ require 'spec_helper' -feature 'Download buttons in project main page' do +feature 'Projects > Show > Download buttons' do given(:user) { create(:user) } given(:role) { :developer } given(:status) { 'success' } diff --git a/spec/features/projects/no_password_spec.rb b/spec/features/projects/show/no_password_spec.rb index b3b3212556c..b3b3212556c 100644 --- a/spec/features/projects/no_password_spec.rb +++ b/spec/features/projects/show/no_password_spec.rb diff --git a/spec/features/projects/redirects_spec.rb b/spec/features/projects/show/redirects_spec.rb index d1d8ca07035..8d41c547d77 100644 --- a/spec/features/projects/redirects_spec.rb +++ b/spec/features/projects/show/redirects_spec.rb @@ -1,6 +1,6 @@ require 'spec_helper' -describe 'Project redirects' do +describe 'Projects > Show > Redirects' do let(:user) { create :user } let(:public_project) { create :project, :public } let(:private_project) { create :project, :private } diff --git a/spec/features/projects/main/rss_spec.rb b/spec/features/projects/show/rss_spec.rb index 3c98c11b490..d02eaf34533 100644 --- a/spec/features/projects/main/rss_spec.rb +++ b/spec/features/projects/show/rss_spec.rb @@ -1,6 +1,6 @@ require 'spec_helper' -feature 'Project RSS' do +feature 'Projects > Show > RSS' do let(:user) { create(:user) } let(:project) { create(:project, :repository, visibility_level: Gitlab::VisibilityLevel::PUBLIC) } let(:path) { project_path(project) } diff --git a/spec/features/projects/user_interacts_with_stars_spec.rb b/spec/features/projects/show/user_interacts_with_stars_spec.rb index d9d2e0ab171..ba28c0e1b8a 100644 --- a/spec/features/projects/user_interacts_with_stars_spec.rb +++ b/spec/features/projects/show/user_interacts_with_stars_spec.rb @@ -1,6 +1,6 @@ require 'spec_helper' -describe 'User interacts with project stars' do +describe 'Projects > Show > User interacts with project stars' do let(:project) { create(:project, :public, :repository) } context 'when user is signed in', :js do diff --git a/spec/features/projects/show/user_manages_notifications_spec.rb b/spec/features/projects/show/user_manages_notifications_spec.rb new file mode 100644 index 00000000000..31b105229be --- /dev/null +++ b/spec/features/projects/show/user_manages_notifications_spec.rb @@ -0,0 +1,19 @@ +require 'spec_helper' + +describe 'Projects > Show > User manages notifications', :js do + let(:project) { create(:project, :public, :repository) } + + before do + sign_in(project.owner) + visit project_path(project) + end + + it 'changes the notification setting' do + first('.notifications-btn').click + click_link 'On mention' + + page.within '#notifications-button' do + expect(page).to have_content 'On mention' + end + end +end diff --git a/spec/features/projects/show/user_sees_collaboration_links_spec.rb b/spec/features/projects/show/user_sees_collaboration_links_spec.rb new file mode 100644 index 00000000000..7b3711531c6 --- /dev/null +++ b/spec/features/projects/show/user_sees_collaboration_links_spec.rb @@ -0,0 +1,87 @@ +require 'spec_helper' + +describe 'Projects > Show > Collaboration links' do + let(:project) { create(:project, :repository) } + let(:user) { create(:user) } + + before do + project.add_developer(user) + sign_in(user) + end + + it 'shows all the expected links' do + visit project_path(project) + + # The navigation bar + page.within('.header-new') do + aggregate_failures 'dropdown links in the navigation bar' do + expect(page).to have_link('New issue') + expect(page).to have_link('New merge request') + expect(page).to have_link('New snippet', href: new_project_snippet_path(project)) + end + end + + # The project header + page.within('.project-home-panel') do + aggregate_failures 'dropdown links in the project home panel' do + expect(page).to have_link('New issue') + expect(page).to have_link('New merge request') + expect(page).to have_link('New snippet') + expect(page).to have_link('New file') + expect(page).to have_link('New branch') + expect(page).to have_link('New tag') + end + end + + # The dropdown above the tree + page.within('.repo-breadcrumb') do + aggregate_failures 'dropdown links above the repo tree' do + expect(page).to have_link('New file') + expect(page).to have_link('Upload file') + expect(page).to have_link('New directory') + expect(page).to have_link('New branch') + expect(page).to have_link('New tag') + end + end + + # The Web IDE + expect(page).to have_link('Web IDE') + end + + it 'hides the links when the project is archived' do + project.update!(archived: true) + + visit project_path(project) + + page.within('.header-new') do + aggregate_failures 'dropdown links' do + expect(page).not_to have_link('New issue') + expect(page).not_to have_link('New merge request') + expect(page).not_to have_link('New snippet', href: new_project_snippet_path(project)) + end + end + + page.within('.project-home-panel') do + aggregate_failures 'dropdown links' do + expect(page).not_to have_link('New issue') + expect(page).not_to have_link('New merge request') + expect(page).not_to have_link('New snippet') + expect(page).not_to have_link('New file') + expect(page).not_to have_link('New branch') + expect(page).not_to have_link('New tag') + end + end + + page.within('.repo-breadcrumb') do + aggregate_failures 'dropdown links' do + expect(page).not_to have_link('New file') + expect(page).not_to have_link('Upload file') + expect(page).not_to have_link('New directory') + expect(page).not_to have_link('New branch') + expect(page).not_to have_link('New tag') + end + end + + expect(page).not_to have_link('Web IDE') + end +end diff --git a/spec/features/projects/show/user_sees_deletion_failure_message_spec.rb b/spec/features/projects/show/user_sees_deletion_failure_message_spec.rb new file mode 100644 index 00000000000..aa23bef6fd8 --- /dev/null +++ b/spec/features/projects/show/user_sees_deletion_failure_message_spec.rb @@ -0,0 +1,18 @@ +require 'spec_helper' + +describe 'Projects > Show > User sees a deletion failure message' do + let(:project) { create(:project, :empty_repo, pending_delete: true) } + + before do + sign_in(project.owner) + end + + it 'shows error message if deletion for project fails' do + project.update_attributes(delete_error: "Something went wrong", pending_delete: false) + + visit project_path(project) + + expect(page).to have_selector('.project-deletion-failed-message') + expect(page).to have_content("This project was scheduled for deletion, but failed with the following message: #{project.delete_error}") + end +end diff --git a/spec/features/projects/user_views_details_spec.rb b/spec/features/projects/show/user_sees_git_instructions_spec.rb index ffc063654cd..9a82fee1b5d 100644 --- a/spec/features/projects/user_views_details_spec.rb +++ b/spec/features/projects/show/user_sees_git_instructions_spec.rb @@ -1,6 +1,6 @@ require 'spec_helper' -describe 'User views details' do +describe 'Projects > Show > User sees Git instructions' do set(:user) { create(:user) } shared_examples_for 'redirects to the sign in page' do @@ -9,6 +9,16 @@ describe 'User views details' do end end + shared_examples_for 'shows details of empty project with no repo' do + it 'shows Git command line instructions' do + click_link 'Create empty repository' + + page.within '.empty_wrapper' do + expect(page).to have_content('Command line instructions') + end + end + end + shared_examples_for 'shows details of empty project' do let(:user_has_ssh_key) { false } @@ -36,6 +46,17 @@ describe 'User views details' do end context 'when project is public' do + context 'when project has no repo' do + set(:project) { create(:project, :public) } + + before do + sign_in(project.owner) + visit project_path(project) + end + + include_examples 'shows details of empty project with no repo' + end + context 'when project is empty' do set(:project) { create(:project_empty_repo, :public) } diff --git a/spec/features/projects/show/user_sees_last_commit_ci_status_spec.rb b/spec/features/projects/show/user_sees_last_commit_ci_status_spec.rb new file mode 100644 index 00000000000..e277bfb8011 --- /dev/null +++ b/spec/features/projects/show/user_sees_last_commit_ci_status_spec.rb @@ -0,0 +1,18 @@ +require 'spec_helper' + +describe 'Projects > Show > User sees last commit CI status' do + set(:project) { create(:project, :repository, :public) } + + it 'shows the project README', :js do + project.enable_ci + pipeline = create(:ci_pipeline, project: project, sha: project.commit.sha, ref: 'master') + pipeline.skip + + visit project_path(project) + + page.within '.blob-commit-info' do + expect(page).to have_content(project.commit.sha[0..6]) + expect(page).to have_link('Commit: skipped') + end + end +end diff --git a/spec/features/projects/show/user_sees_readme_spec.rb b/spec/features/projects/show/user_sees_readme_spec.rb new file mode 100644 index 00000000000..d80606c1c23 --- /dev/null +++ b/spec/features/projects/show/user_sees_readme_spec.rb @@ -0,0 +1,16 @@ +require 'spec_helper' + +describe 'Projects > Show > User sees README' do + set(:user) { create(:user) } + + set(:project) { create(:project, :repository, :public) } + + it 'shows the project README', :js do + visit project_path(project) + wait_for_requests + + page.within('.readme-holder') do + expect(page).to have_content 'testme' + end + end +end diff --git a/spec/features/projects/show/user_sees_setup_shortcut_buttons_spec.rb b/spec/features/projects/show/user_sees_setup_shortcut_buttons_spec.rb new file mode 100644 index 00000000000..a906fa20233 --- /dev/null +++ b/spec/features/projects/show/user_sees_setup_shortcut_buttons_spec.rb @@ -0,0 +1,318 @@ +require 'spec_helper' + +describe 'Projects > Show > User sees setup shortcut buttons' do + # For "New file", "Add License" functionality, + # see spec/features/projects/files/project_owner_creates_license_file_spec.rb + # see spec/features/projects/files/project_owner_sees_link_to_create_license_file_in_empty_project_spec.rb + + let(:user) { create(:user) } + + describe 'empty project' do + let(:project) { create(:project, :public, :empty_repo) } + let(:presenter) { project.present(current_user: user) } + + describe 'as a normal user' do + before do + sign_in(user) + + visit project_path(project) + end + + it 'no Auto DevOps button if can not manage pipelines' do + page.within('.project-stats') do + expect(page).not_to have_link('Enable Auto DevOps') + expect(page).not_to have_link('Auto DevOps enabled') + end + end + + it '"Auto DevOps enabled" button not linked' do + project.create_auto_devops!(enabled: true) + + visit project_path(project) + + page.within('.project-stats') do + expect(page).to have_text('Auto DevOps enabled') + end + end + end + + describe 'as a master' do + before do + project.add_master(user) + sign_in(user) + + visit project_path(project) + end + + it '"New file" button linked to new file page' do + page.within('.project-stats') do + expect(page).to have_link('New file', href: project_new_blob_path(project, project.default_branch || 'master')) + end + end + + it '"Add Readme" button linked to new file populated for a readme' do + page.within('.project-stats') do + expect(page).to have_link('Add Readme', href: presenter.add_readme_path) + end + end + + it '"Add License" button linked to new file populated for a license' do + page.within('.project-stats') do + expect(page).to have_link('Add License', href: presenter.add_license_path) + end + end + + describe 'Auto DevOps button' do + it '"Enable Auto DevOps" button linked to settings page' do + page.within('.project-stats') do + expect(page).to have_link('Enable Auto DevOps', href: project_settings_ci_cd_path(project, anchor: 'js-general-pipeline-settings')) + end + end + + it '"Auto DevOps enabled" anchor linked to settings page' do + project.create_auto_devops!(enabled: true) + + visit project_path(project) + + page.within('.project-stats') do + expect(page).to have_link('Auto DevOps enabled', href: project_settings_ci_cd_path(project, anchor: 'js-general-pipeline-settings')) + end + end + end + + describe 'Kubernetes cluster button' do + it '"Add Kubernetes cluster" button linked to clusters page' do + page.within('.project-stats') do + expect(page).to have_link('Add Kubernetes cluster', href: new_project_cluster_path(project)) + end + end + + it '"Kubernetes cluster" anchor linked to cluster page' do + cluster = create(:cluster, :provided_by_gcp, projects: [project]) + + visit project_path(project) + + page.within('.project-stats') do + expect(page).to have_link('Kubernetes configured', href: project_cluster_path(project, cluster)) + end + end + end + end + end + + describe 'populated project' do + let(:project) { create(:project, :public, :repository) } + let(:presenter) { project.present(current_user: user) } + + describe 'as a normal user' do + before do + sign_in(user) + + visit project_path(project) + end + + it 'no Auto DevOps button if can not manage pipelines' do + page.within('.project-stats') do + expect(page).not_to have_link('Enable Auto DevOps') + expect(page).not_to have_link('Auto DevOps enabled') + end + end + + it '"Auto DevOps enabled" button not linked' do + project.create_auto_devops!(enabled: true) + + visit project_path(project) + + page.within('.project-stats') do + expect(page).to have_text('Auto DevOps enabled') + end + end + + it 'no Kubernetes cluster button if can not manage clusters' do + page.within('.project-stats') do + expect(page).not_to have_link('Add Kubernetes cluster') + expect(page).not_to have_link('Kubernetes configured') + end + end + end + + describe 'as a master' do + before do + allow_any_instance_of(AutoDevopsHelper).to receive(:show_auto_devops_callout?).and_return(false) + project.add_master(user) + sign_in(user) + + visit project_path(project) + end + + it 'no "Add Changelog" button if the project already has a changelog' do + expect(project.repository.changelog).not_to be_nil + + page.within('.project-stats') do + expect(page).not_to have_link('Add Changelog') + end + end + + it 'no "Add License" button if the project already has a license' do + expect(project.repository.license_blob).not_to be_nil + + page.within('.project-stats') do + expect(page).not_to have_link('Add License') + end + end + + it 'no "Add Contribution guide" button if the project already has a contribution guide' do + expect(project.repository.contribution_guide).not_to be_nil + + page.within('.project-stats') do + expect(page).not_to have_link('Add Contribution guide') + end + end + + describe 'GitLab CI configuration button' do + it '"Set up CI/CD" button linked to new file populated for a .gitlab-ci.yml' do + expect(project.repository.gitlab_ci_yml).to be_nil + + page.within('.project-stats') do + expect(page).to have_link('Set up CI/CD', href: presenter.add_ci_yml_path) + end + end + + it 'no "Set up CI/CD" button if the project already has a .gitlab-ci.yml' do + Files::CreateService.new( + project, + project.creator, + start_branch: 'master', + branch_name: 'master', + commit_message: "Add .gitlab-ci.yml", + file_path: '.gitlab-ci.yml', + file_content: File.read(Rails.root.join('spec/support/gitlab_stubs/gitlab_ci.yml')) + ).execute + + expect(project.repository.gitlab_ci_yml).not_to be_nil + + visit project_path(project) + + page.within('.project-stats') do + expect(page).not_to have_link('Set up CI/CD') + end + end + + it 'no "Set up CI/CD" button if the project has Auto DevOps enabled' do + project.create_auto_devops!(enabled: true) + + visit project_path(project) + + page.within('.project-stats') do + expect(page).not_to have_link('Set up CI/CD') + end + end + end + + describe 'Auto DevOps button' do + it '"Enable Auto DevOps" button linked to settings page' do + page.within('.project-stats') do + expect(page).to have_link('Enable Auto DevOps', href: project_settings_ci_cd_path(project, anchor: 'js-general-pipeline-settings')) + end + end + + it '"Enable Auto DevOps" button linked to settings page' do + project.create_auto_devops!(enabled: true) + + visit project_path(project) + + page.within('.project-stats') do + expect(page).to have_link('Auto DevOps enabled', href: project_settings_ci_cd_path(project, anchor: 'js-general-pipeline-settings')) + end + end + + it 'no Auto DevOps button if Auto DevOps callout is shown' do + allow_any_instance_of(AutoDevopsHelper).to receive(:show_auto_devops_callout?).and_return(true) + + visit project_path(project) + + expect(page).to have_selector('.js-autodevops-banner') + + page.within('.project-stats') do + expect(page).not_to have_link('Enable Auto DevOps') + expect(page).not_to have_link('Auto DevOps enabled') + end + end + + it 'no "Enable Auto DevOps" button when .gitlab-ci.yml already exists' do + Files::CreateService.new( + project, + project.creator, + start_branch: 'master', + branch_name: 'master', + commit_message: "Add .gitlab-ci.yml", + file_path: '.gitlab-ci.yml', + file_content: File.read(Rails.root.join('spec/support/gitlab_stubs/gitlab_ci.yml')) + ).execute + + expect(project.repository.gitlab_ci_yml).not_to be_nil + + visit project_path(project) + + page.within('.project-stats') do + expect(page).not_to have_link('Enable Auto DevOps') + expect(page).not_to have_link('Auto DevOps enabled') + end + end + end + + describe 'Kubernetes cluster button' do + it '"Add Kubernetes cluster" button linked to clusters page' do + page.within('.project-stats') do + expect(page).to have_link('Add Kubernetes cluster', href: new_project_cluster_path(project)) + end + end + + it '"Kubernetes cluster" button linked to cluster page' do + cluster = create(:cluster, :provided_by_gcp, projects: [project]) + + visit project_path(project) + + page.within('.project-stats') do + expect(page).to have_link('Kubernetes configured', href: project_cluster_path(project, cluster)) + end + end + end + + describe '"Set up Koding" button' do + it 'no "Set up Koding" button if Koding disabled' do + stub_application_setting(koding_enabled?: false) + + visit project_path(project) + + page.within('.project-stats') do + expect(page).not_to have_link('Set up Koding') + end + end + + it 'no "Set up Koding" button if the project already has a .koding.yml' do + stub_application_setting(koding_enabled?: true) + allow(Gitlab::CurrentSettings.current_application_settings).to receive(:koding_url).and_return('http://koding.example.com') + expect(project.repository.changelog).not_to be_nil + allow_any_instance_of(Repository).to receive(:koding_yml).and_return(project.repository.changelog) + + visit project_path(project) + + page.within('.project-stats') do + expect(page).not_to have_link('Set up Koding') + end + end + + it '"Set up Koding" button linked to new file populated for a .koding.yml' do + stub_application_setting(koding_enabled?: true) + + visit project_path(project) + + page.within('.project-stats') do + expect(page).to have_link('Set up Koding', href: presenter.add_koding_stack_path) + end + end + end + end + end +end diff --git a/spec/features/projects/show_project_spec.rb b/spec/features/projects/show_project_spec.rb deleted file mode 100644 index 0a014e9f080..00000000000 --- a/spec/features/projects/show_project_spec.rb +++ /dev/null @@ -1,337 +0,0 @@ -require 'spec_helper' - -describe 'Project show page', :feature do - context 'when project pending delete' do - let(:project) { create(:project, :empty_repo, pending_delete: true) } - - before do - sign_in(project.owner) - end - - it 'shows error message if deletion for project fails' do - project.update_attributes(delete_error: "Something went wrong", pending_delete: false) - - visit project_path(project) - - expect(page).to have_selector('.project-deletion-failed-message') - expect(page).to have_content("This project was scheduled for deletion, but failed with the following message: #{project.delete_error}") - end - end - - describe 'stat button existence' do - # For "New file", "Add License" functionality, - # see spec/features/projects/files/project_owner_creates_license_file_spec.rb - # see spec/features/projects/files/project_owner_sees_link_to_create_license_file_in_empty_project_spec.rb - - let(:user) { create(:user) } - - describe 'empty project' do - let(:project) { create(:project, :public, :empty_repo) } - let(:presenter) { project.present(current_user: user) } - - describe 'as a normal user' do - before do - sign_in(user) - - visit project_path(project) - end - - it 'no Auto DevOps button if can not manage pipelines' do - page.within('.project-stats') do - expect(page).not_to have_link('Enable Auto DevOps') - expect(page).not_to have_link('Auto DevOps enabled') - end - end - - it '"Auto DevOps enabled" button not linked' do - project.create_auto_devops!(enabled: true) - - visit project_path(project) - - page.within('.project-stats') do - expect(page).to have_text('Auto DevOps enabled') - end - end - end - - describe 'as a master' do - before do - project.add_master(user) - sign_in(user) - - visit project_path(project) - end - - it '"New file" button linked to new file page' do - page.within('.project-stats') do - expect(page).to have_link('New file', href: project_new_blob_path(project, project.default_branch || 'master')) - end - end - - it '"Add Readme" button linked to new file populated for a readme' do - page.within('.project-stats') do - expect(page).to have_link('Add Readme', href: presenter.add_readme_path) - end - end - - it '"Add License" button linked to new file populated for a license' do - page.within('.project-stats') do - expect(page).to have_link('Add License', href: presenter.add_license_path) - end - end - - describe 'Auto DevOps button' do - it '"Enable Auto DevOps" button linked to settings page' do - page.within('.project-stats') do - expect(page).to have_link('Enable Auto DevOps', href: project_settings_ci_cd_path(project, anchor: 'js-general-pipeline-settings')) - end - end - - it '"Auto DevOps enabled" anchor linked to settings page' do - project.create_auto_devops!(enabled: true) - - visit project_path(project) - - page.within('.project-stats') do - expect(page).to have_link('Auto DevOps enabled', href: project_settings_ci_cd_path(project, anchor: 'js-general-pipeline-settings')) - end - end - end - - describe 'Kubernetes cluster button' do - it '"Add Kubernetes cluster" button linked to clusters page' do - page.within('.project-stats') do - expect(page).to have_link('Add Kubernetes cluster', href: new_project_cluster_path(project)) - end - end - - it '"Kubernetes cluster" anchor linked to cluster page' do - cluster = create(:cluster, :provided_by_gcp, projects: [project]) - - visit project_path(project) - - page.within('.project-stats') do - expect(page).to have_link('Kubernetes configured', href: project_cluster_path(project, cluster)) - end - end - end - end - end - - describe 'populated project' do - let(:project) { create(:project, :public, :repository) } - let(:presenter) { project.present(current_user: user) } - - describe 'as a normal user' do - before do - sign_in(user) - - visit project_path(project) - end - - it 'no Auto DevOps button if can not manage pipelines' do - page.within('.project-stats') do - expect(page).not_to have_link('Enable Auto DevOps') - expect(page).not_to have_link('Auto DevOps enabled') - end - end - - it '"Auto DevOps enabled" button not linked' do - project.create_auto_devops!(enabled: true) - - visit project_path(project) - - page.within('.project-stats') do - expect(page).to have_text('Auto DevOps enabled') - end - end - - it 'no Kubernetes cluster button if can not manage clusters' do - page.within('.project-stats') do - expect(page).not_to have_link('Add Kubernetes cluster') - expect(page).not_to have_link('Kubernetes configured') - end - end - end - - describe 'as a master' do - before do - allow_any_instance_of(AutoDevopsHelper).to receive(:show_auto_devops_callout?).and_return(false) - project.add_master(user) - sign_in(user) - - visit project_path(project) - end - - it 'no "Add Changelog" button if the project already has a changelog' do - expect(project.repository.changelog).not_to be_nil - - page.within('.project-stats') do - expect(page).not_to have_link('Add Changelog') - end - end - - it 'no "Add License" button if the project already has a license' do - expect(project.repository.license_blob).not_to be_nil - - page.within('.project-stats') do - expect(page).not_to have_link('Add License') - end - end - - it 'no "Add Contribution guide" button if the project already has a contribution guide' do - expect(project.repository.contribution_guide).not_to be_nil - - page.within('.project-stats') do - expect(page).not_to have_link('Add Contribution guide') - end - end - - describe 'GitLab CI configuration button' do - it '"Set up CI/CD" button linked to new file populated for a .gitlab-ci.yml' do - expect(project.repository.gitlab_ci_yml).to be_nil - - page.within('.project-stats') do - expect(page).to have_link('Set up CI/CD', href: presenter.add_ci_yml_path) - end - end - - it 'no "Set up CI/CD" button if the project already has a .gitlab-ci.yml' do - Files::CreateService.new( - project, - project.creator, - start_branch: 'master', - branch_name: 'master', - commit_message: "Add .gitlab-ci.yml", - file_path: '.gitlab-ci.yml', - file_content: File.read(Rails.root.join('spec/support/gitlab_stubs/gitlab_ci.yml')) - ).execute - - expect(project.repository.gitlab_ci_yml).not_to be_nil - - visit project_path(project) - - page.within('.project-stats') do - expect(page).not_to have_link('Set up CI/CD') - end - end - - it 'no "Set up CI/CD" button if the project has Auto DevOps enabled' do - project.create_auto_devops!(enabled: true) - - visit project_path(project) - - page.within('.project-stats') do - expect(page).not_to have_link('Set up CI/CD') - end - end - end - - describe 'Auto DevOps button' do - it '"Enable Auto DevOps" button linked to settings page' do - page.within('.project-stats') do - expect(page).to have_link('Enable Auto DevOps', href: project_settings_ci_cd_path(project, anchor: 'js-general-pipeline-settings')) - end - end - - it '"Enable Auto DevOps" button linked to settings page' do - project.create_auto_devops!(enabled: true) - - visit project_path(project) - - page.within('.project-stats') do - expect(page).to have_link('Auto DevOps enabled', href: project_settings_ci_cd_path(project, anchor: 'js-general-pipeline-settings')) - end - end - - it 'no Auto DevOps button if Auto DevOps callout is shown' do - allow_any_instance_of(AutoDevopsHelper).to receive(:show_auto_devops_callout?).and_return(true) - - visit project_path(project) - - expect(page).to have_selector('.js-autodevops-banner') - - page.within('.project-stats') do - expect(page).not_to have_link('Enable Auto DevOps') - expect(page).not_to have_link('Auto DevOps enabled') - end - end - - it 'no "Enable Auto DevOps" button when .gitlab-ci.yml already exists' do - Files::CreateService.new( - project, - project.creator, - start_branch: 'master', - branch_name: 'master', - commit_message: "Add .gitlab-ci.yml", - file_path: '.gitlab-ci.yml', - file_content: File.read(Rails.root.join('spec/support/gitlab_stubs/gitlab_ci.yml')) - ).execute - - expect(project.repository.gitlab_ci_yml).not_to be_nil - - visit project_path(project) - - page.within('.project-stats') do - expect(page).not_to have_link('Enable Auto DevOps') - expect(page).not_to have_link('Auto DevOps enabled') - end - end - end - - describe 'Kubernetes cluster button' do - it '"Add Kubernetes cluster" button linked to clusters page' do - page.within('.project-stats') do - expect(page).to have_link('Add Kubernetes cluster', href: new_project_cluster_path(project)) - end - end - - it '"Kubernetes cluster" button linked to cluster page' do - cluster = create(:cluster, :provided_by_gcp, projects: [project]) - - visit project_path(project) - - page.within('.project-stats') do - expect(page).to have_link('Kubernetes configured', href: project_cluster_path(project, cluster)) - end - end - end - - describe '"Set up Koding" button' do - it 'no "Set up Koding" button if Koding disabled' do - stub_application_setting(koding_enabled?: false) - - visit project_path(project) - - page.within('.project-stats') do - expect(page).not_to have_link('Set up Koding') - end - end - - it 'no "Set up Koding" button if the project already has a .koding.yml' do - stub_application_setting(koding_enabled?: true) - allow(Gitlab::CurrentSettings.current_application_settings).to receive(:koding_url).and_return('http://koding.example.com') - expect(project.repository.changelog).not_to be_nil - allow_any_instance_of(Repository).to receive(:koding_yml).and_return(project.repository.changelog) - - visit project_path(project) - - page.within('.project-stats') do - expect(page).not_to have_link('Set up Koding') - end - end - - it '"Set up Koding" button linked to new file populated for a .koding.yml' do - stub_application_setting(koding_enabled?: true) - - visit project_path(project) - - page.within('.project-stats') do - expect(page).to have_link('Set up Koding', href: presenter.add_koding_stack_path) - end - end - end - end - end - end -end diff --git a/spec/features/projects/snippets/create_snippet_spec.rb b/spec/features/projects/snippets/create_snippet_spec.rb index 3466a3dfb77..2388feeb980 100644 --- a/spec/features/projects/snippets/create_snippet_spec.rb +++ b/spec/features/projects/snippets/create_snippet_spec.rb @@ -1,6 +1,6 @@ require 'rails_helper' -feature 'Create Snippet', :js do +describe 'Projects > Snippets > Create Snippet', :js do include DropzoneHelper let(:user) { create(:user) } diff --git a/spec/features/projects/snippets/show_spec.rb b/spec/features/projects/snippets/show_spec.rb index 216f2af7c88..004ac55b656 100644 --- a/spec/features/projects/snippets/show_spec.rb +++ b/spec/features/projects/snippets/show_spec.rb @@ -1,6 +1,6 @@ require 'spec_helper' -feature 'Project snippet', :js do +describe 'Projects > Snippets > Project snippet', :js do let(:user) { create(:user) } let(:project) { create(:project, :repository) } let(:snippet) { create(:project_snippet, project: project, file_name: file_name, content: content) } diff --git a/spec/features/projects/snippets/user_comments_on_snippet_spec.rb b/spec/features/projects/snippets/user_comments_on_snippet_spec.rb index 1bd2098af6d..01cf9740d1f 100644 --- a/spec/features/projects/snippets/user_comments_on_snippet_spec.rb +++ b/spec/features/projects/snippets/user_comments_on_snippet_spec.rb @@ -1,6 +1,6 @@ require 'spec_helper' -describe 'User comments on a snippet', :js do +describe 'Projects > Snippets > User comments on a snippet', :js do let(:project) { create(:project) } let!(:snippet) { create(:project_snippet, project: project, author: user) } let(:user) { create(:user) } @@ -22,4 +22,16 @@ describe 'User comments on a snippet', :js do expect(page).to have_content('Good snippet!') end + + it 'should have autocomplete' do + find('#note_note').native.send_keys('') + fill_in 'note[note]', with: '@' + + expect(page).to have_selector('.atwho-view') + end + + it 'should have zen mode' do + find('.js-zen-enter').click() + expect(page).to have_selector('.fullscreen') + end end diff --git a/spec/features/projects/snippets/user_deletes_snippet_spec.rb b/spec/features/projects/snippets/user_deletes_snippet_spec.rb index ca5f7981c33..e64837ad59e 100644 --- a/spec/features/projects/snippets/user_deletes_snippet_spec.rb +++ b/spec/features/projects/snippets/user_deletes_snippet_spec.rb @@ -1,6 +1,6 @@ require 'spec_helper' -describe 'User deletes a snippet' do +describe 'Projects > Snippets > User deletes a snippet' do let(:project) { create(:project) } let!(:snippet) { create(:project_snippet, project: project, author: user) } let(:user) { create(:user) } diff --git a/spec/features/projects/snippets/user_updates_snippet_spec.rb b/spec/features/projects/snippets/user_updates_snippet_spec.rb index 09a390443cf..eaedbbf32b6 100644 --- a/spec/features/projects/snippets/user_updates_snippet_spec.rb +++ b/spec/features/projects/snippets/user_updates_snippet_spec.rb @@ -1,6 +1,6 @@ require 'spec_helper' -describe 'User updates a snippet' do +describe 'Projects > Snippets > User updates a snippet' do let(:project) { create(:project) } let!(:snippet) { create(:project_snippet, project: project, author: user) } let(:user) { create(:user) } diff --git a/spec/features/projects/snippets/user_views_snippets_spec.rb b/spec/features/projects/snippets/user_views_snippets_spec.rb index e9992e00ca8..376b76e0001 100644 --- a/spec/features/projects/snippets/user_views_snippets_spec.rb +++ b/spec/features/projects/snippets/user_views_snippets_spec.rb @@ -1,9 +1,10 @@ require 'spec_helper' -describe 'User views snippets' do +describe 'Projects > Snippets > User views snippets' do let(:project) { create(:project) } let!(:project_snippet) { create(:project_snippet, project: project, author: user) } let!(:snippet) { create(:snippet, author: user) } + let(:snippets) { [project_snippet, snippet] } # Used by the shared examples let(:user) { create(:user) } before do @@ -13,6 +14,17 @@ describe 'User views snippets' do visit(project_snippets_path(project)) end + context 'pagination' do + before do + create(:project_snippet, project: project, author: user) + allow(Snippet).to receive(:default_per_page).and_return(1) + + visit project_snippets_path(project) + end + + it_behaves_like 'paginated snippets' + end + it 'shows snippets' do expect(page).to have_content(project_snippet.title) expect(page).not_to have_content(snippet.title) diff --git a/spec/features/projects/snippets_spec.rb b/spec/features/projects/snippets_spec.rb deleted file mode 100644 index 0fa7ca9afd4..00000000000 --- a/spec/features/projects/snippets_spec.rb +++ /dev/null @@ -1,49 +0,0 @@ -require 'spec_helper' - -describe 'Project snippets', :js do - context 'when the project has snippets' do - let(:project) { create(:project, :public) } - let!(:snippets) { create_list(:project_snippet, 2, :public, author: project.owner, project: project) } - let!(:other_snippet) { create(:project_snippet) } - - context 'pagination' do - before do - allow(Snippet).to receive(:default_per_page).and_return(1) - - visit project_snippets_path(project) - end - - it_behaves_like 'paginated snippets' - end - - context 'list content' do - it 'contains all project snippets' do - visit project_snippets_path(project) - - expect(page).to have_selector('.snippet-row', count: 2) - - expect(page).to have_content(snippets[0].title) - expect(page).to have_content(snippets[1].title) - end - end - - context 'when submitting a note' do - before do - sign_in(create(:admin)) - visit project_snippet_path(project, snippets[0]) - end - - it 'should have autocomplete' do - find('#note_note').native.send_keys('') - fill_in 'note[note]', with: '@' - - expect(page).to have_selector('.atwho-view') - end - - it 'should have zen mode' do - find('.js-zen-enter').click() - expect(page).to have_selector('.fullscreen') - end - end - end -end diff --git a/spec/features/projects/tree/create_directory_spec.rb b/spec/features/projects/tree/create_directory_spec.rb new file mode 100644 index 00000000000..b242e41df1c --- /dev/null +++ b/spec/features/projects/tree/create_directory_spec.rb @@ -0,0 +1,55 @@ +require 'spec_helper' + +feature 'Multi-file editor new directory', :js do + let(:user) { create(:user) } + let(:project) { create(:project, :repository) } + + before do + project.add_master(user) + sign_in(user) + + visit project_tree_path(project, :master) + + wait_for_requests + + click_link('Web IDE') + + wait_for_requests + end + + after do + set_cookie('new_repo', 'false') + end + + it 'creates directory in current directory' do + find('.add-to-tree').click + + click_link('New directory') + + page.within('.modal') do + find('.form-control').set('folder name') + + click_button('Create directory') + end + + find('.add-to-tree').click + + click_link('New file') + + page.within('.modal-dialog') do + find('.form-control').set('file name') + + click_button('Create file') + end + + wait_for_requests + + click_button 'Stage all' + + fill_in('commit-message', with: 'commit message ide') + + click_button('Commit') + + expect(page).to have_content('folder name') + end +end diff --git a/spec/features/projects/tree/create_file_spec.rb b/spec/features/projects/tree/create_file_spec.rb new file mode 100644 index 00000000000..7d65456e049 --- /dev/null +++ b/spec/features/projects/tree/create_file_spec.rb @@ -0,0 +1,45 @@ +require 'spec_helper' + +feature 'Multi-file editor new file', :js do + let(:user) { create(:user) } + let(:project) { create(:project, :repository) } + + before do + project.add_master(user) + sign_in(user) + + visit project_path(project) + + wait_for_requests + + click_link('Web IDE') + + wait_for_requests + end + + after do + set_cookie('new_repo', 'false') + end + + it 'creates file in current directory' do + find('.add-to-tree').click + + click_link('New file') + + page.within('.modal') do + find('.form-control').set('file name') + + click_button('Create file') + end + + wait_for_requests + + click_button 'Stage all' + + fill_in('commit-message', with: 'commit message ide') + + click_button('Commit') + + expect(page).to have_content('file name') + end +end diff --git a/spec/features/projects/tree/tree_show_spec.rb b/spec/features/projects/tree/tree_show_spec.rb index c8a17871508..c4b3fb9d171 100644 --- a/spec/features/projects/tree/tree_show_spec.rb +++ b/spec/features/projects/tree/tree_show_spec.rb @@ -25,4 +25,18 @@ feature 'Projects tree' do expect(page).to have_selector('.label-lfs', text: 'LFS') end end + + context 'web IDE', :js do + before do + visit project_tree_path(project, File.join('master', 'bar')) + + click_link 'Web IDE' + + find('.ide-file-list') + end + + it 'opens folder in IDE' do + expect(page).to have_selector('.is-open', text: 'bar') + end + end end diff --git a/spec/features/projects/tree/upload_file_spec.rb b/spec/features/projects/tree/upload_file_spec.rb new file mode 100644 index 00000000000..8e53ae15700 --- /dev/null +++ b/spec/features/projects/tree/upload_file_spec.rb @@ -0,0 +1,51 @@ +require 'spec_helper' + +feature 'Multi-file editor upload file', :js do + let(:user) { create(:user) } + let(:project) { create(:project, :repository) } + let(:txt_file) { File.join(Rails.root, 'spec', 'fixtures', 'doc_sample.txt') } + let(:img_file) { File.join(Rails.root, 'spec', 'fixtures', 'dk.png') } + + before do + project.add_master(user) + sign_in(user) + + visit project_tree_path(project, :master) + + wait_for_requests + + click_link('Web IDE') + + wait_for_requests + end + + after do + set_cookie('new_repo', 'false') + end + + it 'uploads text file' do + find('.add-to-tree').click + + # make the field visible so capybara can use it + execute_script('document.querySelector("#file-upload").classList.remove("hidden")') + attach_file('file-upload', txt_file) + + find('.add-to-tree').click + + expect(page).to have_selector('.multi-file-tab', text: 'doc_sample.txt') + expect(find('.blob-editor-container .lines-content')['innerText']).to have_content(File.open(txt_file, &:readline)) + end + + it 'uploads image file' do + find('.add-to-tree').click + + # make the field visible so capybara can use it + execute_script('document.querySelector("#file-upload").classList.remove("hidden")') + attach_file('file-upload', img_file) + + find('.add-to-tree').click + + expect(page).to have_selector('.multi-file-tab', text: 'dk.png') + expect(page).not_to have_selector('.monaco-editor') + end +end diff --git a/spec/features/projects/user_sees_sidebar_spec.rb b/spec/features/projects/user_sees_sidebar_spec.rb new file mode 100644 index 00000000000..cf80517b934 --- /dev/null +++ b/spec/features/projects/user_sees_sidebar_spec.rb @@ -0,0 +1,106 @@ +require 'spec_helper' + +describe 'Projects > User sees sidebar' do + let(:user) { create(:user) } + let(:project) { create(:project, :private, public_builds: false, namespace: user.namespace) } + + context 'as owner' do + before do + sign_in(user) + end + + context 'when snippets are disabled' do + before do + project.project_feature.update_attribute('snippets_access_level', ProjectFeature::DISABLED) + end + + it 'does not display a "Snippets" link' do + visit project_path(project) + + within('.nav-sidebar') do + expect(page).not_to have_content 'Snippets' + end + end + end + end + + context 'as guest' do + let(:guest) { create(:user) } + + before do + project.add_guest(guest) + + sign_in(guest) + end + + it 'shows allowed tabs only' do + visit project_path(project) + + within('.nav-sidebar') do + expect(page).to have_content 'Overview' + expect(page).to have_content 'Issues' + expect(page).to have_content 'Wiki' + + expect(page).not_to have_content 'Repository' + expect(page).not_to have_content 'CI / CD' + expect(page).not_to have_content 'Merge Requests' + end + end + + it 'does not show fork button' do + visit project_path(project) + + within('.count-buttons') do + expect(page).not_to have_link 'Fork' + end + end + + it 'does not show clone path' do + visit project_path(project) + + within('.project-repo-buttons') do + expect(page).not_to have_selector '.project-clone-holder' + end + end + + describe 'project landing page' do + before do + project.project_feature.update!( + issues_access_level: ProjectFeature::DISABLED, + wiki_access_level: ProjectFeature::DISABLED + ) + end + + it 'does not show the project file list landing page' do + visit project_path(project) + + expect(page).not_to have_selector '.project-stats' + expect(page).not_to have_selector '.project-last-commit' + expect(page).not_to have_selector '.project-show-files' + expect(page).to have_selector '.project-show-customize_workflow' + end + + it 'shows the customize workflow when issues and wiki are disabled' do + visit project_path(project) + + expect(page).to have_selector '.project-show-customize_workflow' + end + + it 'shows the wiki when enabled' do + project.project_feature.update!(wiki_access_level: ProjectFeature::PRIVATE) + + visit project_path(project) + + expect(page).to have_selector '.project-show-wiki' + end + + it 'shows the issues when enabled' do + project.project_feature.update!(issues_access_level: ProjectFeature::PRIVATE) + + visit project_path(project) + + expect(page).to have_selector '.issues-list' + end + end + end +end diff --git a/spec/features/projects/user_transfers_a_project_spec.rb b/spec/features/projects/user_transfers_a_project_spec.rb deleted file mode 100644 index 78f72b644ff..00000000000 --- a/spec/features/projects/user_transfers_a_project_spec.rb +++ /dev/null @@ -1,49 +0,0 @@ -require 'spec_helper' - -feature 'User transfers a project', :js do - let(:user) { create(:user) } - let(:project) { create(:project, :repository, namespace: user.namespace) } - - before do - sign_in user - end - - def transfer_project(project, group) - visit edit_project_path(project) - - page.within('.js-project-transfer-form') do - page.find('.select2-container').click - end - - page.find("div[role='option']", text: group.full_name).click - - click_button('Transfer project') - - fill_in 'confirm_name_input', with: project.name - - click_button 'Confirm' - - wait_for_requests - end - - it 'allows transferring a project to a subgroup of a namespace' do - group = create(:group) - group.add_owner(user) - - transfer_project(project, group) - - expect(project.reload.namespace).to eq(group) - end - - context 'when nested groups are available', :nested_groups do - it 'allows transferring a project to a subgroup' do - parent = create(:group) - parent.add_owner(user) - subgroup = create(:group, parent: parent) - - transfer_project(project, subgroup) - - expect(project.reload.namespace).to eq(subgroup) - end - end -end diff --git a/spec/features/projects/user_uses_shortcuts_spec.rb b/spec/features/projects/user_uses_shortcuts_spec.rb index fb0d8c766fe..47c5a8161d9 100644 --- a/spec/features/projects/user_uses_shortcuts_spec.rb +++ b/spec/features/projects/user_uses_shortcuts_spec.rb @@ -11,12 +11,12 @@ describe 'User uses shortcuts', :js do visit(project_path(project)) end - context 'when navigating to the Overview pages' do + context 'when navigating to the Project pages' do it 'redirects to the details page' do find('body').native.send_key('g') find('body').native.send_key('p') - expect(page).to have_active_navigation('Overview') + expect(page).to have_active_navigation('Project') expect(page).to have_active_sub_navigation('Details') end @@ -24,7 +24,7 @@ describe 'User uses shortcuts', :js do find('body').native.send_key('g') find('body').native.send_key('e') - expect(page).to have_active_navigation('Overview') + expect(page).to have_active_navigation('Project') expect(page).to have_active_sub_navigation('Activity') end end diff --git a/spec/features/protected_branches_spec.rb b/spec/features/protected_branches_spec.rb index a4084818284..43cabd3b9f2 100644 --- a/spec/features/protected_branches_spec.rb +++ b/spec/features/protected_branches_spec.rb @@ -142,7 +142,10 @@ feature 'Protected Branches', :js do set_protected_branch_name('*-stable') click_on "Protect" - within(".protected-branches-list") { expect(page).to have_content("2 matching branches") } + within(".protected-branches-list") do + expect(page).to have_content("Protected branch (2)") + expect(page).to have_content("2 matching branches") + end end it "displays all the branches matching the wildcard" do diff --git a/spec/features/protected_tags_spec.rb b/spec/features/protected_tags_spec.rb index 8cc6f17b8d9..efccaeaff6c 100644 --- a/spec/features/protected_tags_spec.rb +++ b/spec/features/protected_tags_spec.rb @@ -65,7 +65,10 @@ feature 'Protected Tags', :js do set_protected_tag_name('*-stable') click_on "Protect" - within(".protected-tags-list") { expect(page).to have_content("2 matching tags") } + within(".protected-tags-list") do + expect(page).to have_content("Protected tag (2)") + expect(page).to have_content("2 matching tags") + end end it "displays all the tags matching the wildcard" do diff --git a/spec/features/read_only_spec.rb b/spec/features/read_only_spec.rb new file mode 100644 index 00000000000..8bfaf558466 --- /dev/null +++ b/spec/features/read_only_spec.rb @@ -0,0 +1,25 @@ +require 'rails_helper' + +describe 'read-only message' do + set(:user) { create(:user) } + + before do + sign_in(user) + end + + it 'shows read-only banner when database is read-only' do + allow(Gitlab::Database).to receive(:read_only?).and_return(true) + + visit root_dashboard_path + + expect(page).to have_content('You are on a read-only GitLab instance.') + end + + it 'does not show read-only banner when database is able to read-write' do + allow(Gitlab::Database).to receive(:read_only?).and_return(false) + + visit root_dashboard_path + + expect(page).not_to have_content('You are on a read-only GitLab instance.') + end +end diff --git a/spec/features/search/user_uses_header_search_field_spec.rb b/spec/features/search/user_uses_header_search_field_spec.rb index 5ddea36add5..a9128104b87 100644 --- a/spec/features/search/user_uses_header_search_field_spec.rb +++ b/spec/features/search/user_uses_header_search_field_spec.rb @@ -9,49 +9,25 @@ describe 'User uses header search field' do before do project.add_reporter(user) sign_in(user) - - visit(project_path(project)) - end - - it 'starts searching by pressing the enter key', :js do - fill_in('search', with: 'gitlab') - find('#search').native.send_keys(:enter) - - page.within('.breadcrumbs-sub-title') do - expect(page).to have_content('Search') - end end - it 'contains location badge' do - expect(page).to have_selector('.has-location-badge') - end - - context 'when clicking the search field', :js do + context 'when user is in a global scope', :js do before do + visit(root_path) page.find('#search').click end - it 'shows category search dropdown' do - expect(page).to have_selector('.dropdown-header', text: /#{project.name}/i) - end - context 'when clicking issues' do - let!(:issue) { create(:issue, project: project, author: user, assignees: [user]) } - it 'shows assigned issues' do - find('.dropdown-menu').click_link('Issues assigned to me') + find('.search-input-container .dropdown-menu').click_link('Issues assigned to me') - expect(page).to have_selector('.filtered-search') - expect_tokens([assignee_token(user.name)]) - expect_filtered_search_input_empty + expect(find('.js-assignee-search')).to have_content(user.name) end it 'shows created issues' do - find('.dropdown-menu').click_link("Issues I've created") + find('.search-input-container .dropdown-menu').click_link("Issues I've created") - expect(page).to have_selector('.filtered-search') - expect_tokens([author_token(user.name)]) - expect_filtered_search_input_empty + expect(find('.js-author-search')).to have_content(user.name) end end @@ -59,32 +35,97 @@ describe 'User uses header search field' do let!(:merge_request) { create(:merge_request, source_project: project, author: user, assignee: user) } it 'shows assigned merge requests' do - find('.dropdown-menu').click_link('Merge requests assigned to me') + find('.search-input-container .dropdown-menu').click_link('Merge requests assigned to me') - expect(page).to have_selector('.merge-requests-holder') - expect_tokens([assignee_token(user.name)]) - expect_filtered_search_input_empty + expect(find('.js-assignee-search')).to have_content(user.name) end it 'shows created merge requests' do - find('.dropdown-menu').click_link("Merge requests I've created") + find('.search-input-container .dropdown-menu').click_link("Merge requests I've created") - expect(page).to have_selector('.merge-requests-holder') - expect_tokens([author_token(user.name)]) - expect_filtered_search_input_empty + expect(find('.js-author-search')).to have_content(user.name) end end end - context 'when entering text into the search field', :js do + context 'when user is in a project scope' do before do - page.within('.search-input-wrap') do - fill_in('search', with: project.name[0..3]) + visit(project_path(project)) + end + + it 'starts searching by pressing the enter key', :js do + fill_in('search', with: 'gitlab') + find('#search').native.send_keys(:enter) + + page.within('.breadcrumbs-sub-title') do + expect(page).to have_content('Search') end end - it 'does not display the category search dropdown' do - expect(page).not_to have_selector('.dropdown-header', text: /#{project.name}/i) + it 'contains location badge' do + expect(page).to have_selector('.has-location-badge') + end + + context 'when clicking the search field', :js do + before do + page.find('#search').click + end + + it 'shows category search dropdown' do + expect(page).to have_selector('.dropdown-header', text: /#{project.name}/i) + end + + context 'when clicking issues' do + let!(:issue) { create(:issue, project: project, author: user, assignees: [user]) } + + it 'shows assigned issues' do + find('.dropdown-menu').click_link('Issues assigned to me') + + expect(page).to have_selector('.filtered-search') + expect_tokens([assignee_token(user.name)]) + expect_filtered_search_input_empty + end + + it 'shows created issues' do + find('.dropdown-menu').click_link("Issues I've created") + + expect(page).to have_selector('.filtered-search') + expect_tokens([author_token(user.name)]) + expect_filtered_search_input_empty + end + end + + context 'when clicking merge requests' do + let!(:merge_request) { create(:merge_request, source_project: project, author: user, assignee: user) } + + it 'shows assigned merge requests' do + find('.dropdown-menu').click_link('Merge requests assigned to me') + + expect(page).to have_selector('.merge-requests-holder') + expect_tokens([assignee_token(user.name)]) + expect_filtered_search_input_empty + end + + it 'shows created merge requests' do + find('.dropdown-menu').click_link("Merge requests I've created") + + expect(page).to have_selector('.merge-requests-holder') + expect_tokens([author_token(user.name)]) + expect_filtered_search_input_empty + end + end + end + + context 'when entering text into the search field', :js do + before do + page.within('.search-input-wrap') do + fill_in('search', with: project.name[0..3]) + end + end + + it 'does not display the category search dropdown' do + expect(page).not_to have_selector('.dropdown-header', text: /#{project.name}/i) + end end end end diff --git a/spec/features/snippets/embedded_snippet_spec.rb b/spec/features/snippets/embedded_snippet_spec.rb new file mode 100644 index 00000000000..ab661f6fc69 --- /dev/null +++ b/spec/features/snippets/embedded_snippet_spec.rb @@ -0,0 +1,25 @@ +require 'spec_helper' + +describe 'Embedded Snippets' do + let(:snippet) { create(:personal_snippet, :public, file_name: 'random_dir.rb', content: content) } + let(:content) { "require 'fileutils'\nFileUtils.mkdir_p 'some/random_dir'\n" } + + it 'loads snippet', :js do + script_url = "http://#{Capybara.current_session.server.host}:#{Capybara.current_session.server.port}/#{snippet_path(snippet, format: 'js')}" + embed_body = "<html><body><script src=\"#{script_url}\"></script></body></html>" + + rack_app = proc do + ['200', { 'Content-Type' => 'text/html' }, [embed_body]] + end + + server = Capybara::Server.new(rack_app) + server.boot + + visit("http://#{server.host}:#{server.port}/embedded_snippet.html") + + expect(page).to have_content("random_dir.rb") + expect(page).to have_content("require 'fileutils'") + expect(page).to have_link('Open raw') + expect(page).to have_link('Download') + end +end diff --git a/spec/features/user_can_display_performance_bar_spec.rb b/spec/features/user_can_display_performance_bar_spec.rb index 975c157bcf5..e069c2fddd1 100644 --- a/spec/features/user_can_display_performance_bar_spec.rb +++ b/spec/features/user_can_display_performance_bar_spec.rb @@ -3,7 +3,7 @@ require 'rails_helper' describe 'User can display performance bar', :js do shared_examples 'performance bar cannot be displayed' do it 'does not show the performance bar by default' do - expect(page).not_to have_css('#peek') + expect(page).not_to have_css('#js-peek') end context 'when user press `pb`' do @@ -12,14 +12,14 @@ describe 'User can display performance bar', :js do end it 'does not show the performance bar by default' do - expect(page).not_to have_css('#peek') + expect(page).not_to have_css('#js-peek') end end end shared_examples 'performance bar can be displayed' do it 'does not show the performance bar by default' do - expect(page).not_to have_css('#peek') + expect(page).not_to have_css('#js-peek') end context 'when user press `pb`' do @@ -28,7 +28,7 @@ describe 'User can display performance bar', :js do end it 'shows the performance bar' do - expect(page).to have_css('#peek') + expect(page).to have_css('#js-peek') end end end @@ -41,7 +41,7 @@ describe 'User can display performance bar', :js do it 'shows the performance bar by default' do refresh # Because we're stubbing Rails.env after the 1st visit to root_path - expect(page).to have_css('#peek') + expect(page).to have_css('#js-peek') end end diff --git a/spec/features/user_sorts_things_spec.rb b/spec/features/user_sorts_things_spec.rb new file mode 100644 index 00000000000..69ebdddaeec --- /dev/null +++ b/spec/features/user_sorts_things_spec.rb @@ -0,0 +1,57 @@ +require "spec_helper" + +# The main goal of this spec is not to check whether the sorting UI works, but +# to check if the sorting option set by user is being kept persisted while going through pages. +# The `it`s are named here by convention `starting point -> some pages -> final point`. +# All those specs are moved out to this spec intentionally to keep them all in one place. +describe "User sorts things" do + include Spec::Support::Helpers::Features::SortingHelpers + include Helpers::DashboardHelper + + set(:project) { create(:project_empty_repo, :public) } + set(:current_user) { create(:user) } # Using `current_user` instead of just `user` because of the hardoced call in `assigned_mrs_dashboard_path` which is used below. + set(:issue) { create(:issue, project: project, author: current_user) } + set(:merge_request) { create(:merge_request, target_project: project, source_project: project, author: current_user) } + + before do + project.add_developer(current_user) + sign_in(current_user) + end + + it "issues -> project home page -> issues" do + sort_option = "Last updated" + + visit(project_issues_path(project)) + + sort_by(sort_option) + + visit(project_path(project)) + visit(project_issues_path(project)) + + expect(find(".issues-filters")).to have_content(sort_option) + end + + it "issues -> merge requests" do + sort_option = "Last updated" + + visit(project_issues_path(project)) + + sort_by(sort_option) + + visit(project_merge_requests_path(project)) + + expect(find(".issues-filters")).to have_content(sort_option) + end + + it "merge requests -> dashboard merge requests" do + sort_option = "Last updated" + + visit(project_merge_requests_path(project)) + + sort_by(sort_option) + + visit(assigned_mrs_dashboard_path) + + expect(find(".issues-filters")).to have_content(sort_option) + end +end diff --git a/spec/features/users/login_spec.rb b/spec/features/users/login_spec.rb index bc75dc5d19b..9e10bfb2adc 100644 --- a/spec/features/users/login_spec.rb +++ b/spec/features/users/login_spec.rb @@ -392,7 +392,7 @@ feature 'Login' do end def ensure_one_active_tab - expect(page).to have_selector('.nav-tabs > li.active', count: 1) + expect(page).to have_selector('ul.new-session-tabs > li.active', count: 1) end def ensure_one_active_pane diff --git a/spec/finders/clusters_finder_spec.rb b/spec/finders/clusters_finder_spec.rb index c10efac2432..da529e0670f 100644 --- a/spec/finders/clusters_finder_spec.rb +++ b/spec/finders/clusters_finder_spec.rb @@ -6,7 +6,7 @@ describe ClustersFinder do describe '#execute' do let(:enabled_cluster) { create(:cluster, :provided_by_gcp, projects: [project]) } - let(:disabled_cluster) { create(:cluster, :disabled, :provided_by_gcp, projects: [project]) } + let(:disabled_cluster) { create(:cluster, :disabled, :provided_by_gcp, :production_environment, projects: [project]) } subject { described_class.new(project, user, scope).execute } diff --git a/spec/finders/group_descendants_finder_spec.rb b/spec/finders/group_descendants_finder_spec.rb index 375bcc9087e..796d40cb625 100644 --- a/spec/finders/group_descendants_finder_spec.rb +++ b/spec/finders/group_descendants_finder_spec.rb @@ -35,15 +35,6 @@ describe GroupDescendantsFinder do expect(finder.execute).to contain_exactly(project) end - it 'does not include projects shared with the group' do - project = create(:project, namespace: group) - other_project = create(:project) - other_project.project_group_links.create(group: group, - group_access: ProjectGroupLink::MASTER) - - expect(finder.execute).to contain_exactly(project) - end - context 'when archived is `true`' do let(:params) { { archived: 'true' } } diff --git a/spec/finders/labels_finder_spec.rb b/spec/finders/labels_finder_spec.rb index d434c501110..899d0d22819 100644 --- a/spec/finders/labels_finder_spec.rb +++ b/spec/finders/labels_finder_spec.rb @@ -71,6 +71,24 @@ describe LabelsFinder do end end + context 'when group has no projects' do + let(:empty_group) { create(:group) } + let!(:empty_group_label_1) { create(:group_label, group: empty_group, title: 'Label 1 (empty group)') } + let!(:empty_group_label_2) { create(:group_label, group: empty_group, title: 'Label 2 (empty group)') } + + before do + empty_group.add_developer(user) + end + + context 'when only group labels is false' do + it 'returns group labels' do + finder = described_class.new(user, group_id: empty_group.id) + + expect(finder.execute).to eq [empty_group_label_1, empty_group_label_2] + end + end + end + context 'when including labels from group ancestors', :nested_groups do it 'returns labels from group and its ancestors' do private_group_1.add_developer(user) @@ -110,7 +128,21 @@ describe LabelsFinder do end end - context 'filtering by project_id' do + context 'filtering by project_id', :nested_groups do + context 'when include_ancestor_groups is true' do + let!(:sub_project) { create(:project, namespace: private_subgroup_1 ) } + let!(:project_label) { create(:label, project: sub_project, title: 'Label 5') } + let(:finder) { described_class.new(user, project_id: sub_project.id, include_ancestor_groups: true) } + + before do + private_group_1.add_developer(user) + end + + it 'returns all ancestor labels' do + expect(finder.execute).to match_array([private_subgroup_label_1, private_group_label_1, project_label]) + end + end + it 'returns labels available for the project' do finder = described_class.new(user, project_id: project_1.id) diff --git a/spec/finders/merge_request_target_project_finder_spec.rb b/spec/finders/merge_request_target_project_finder_spec.rb index c81bfd7932c..f302cf80ce8 100644 --- a/spec/finders/merge_request_target_project_finder_spec.rb +++ b/spec/finders/merge_request_target_project_finder_spec.rb @@ -19,6 +19,12 @@ describe MergeRequestTargetProjectFinder do expect(finder.execute).to contain_exactly(forked_project) end + + it 'does not contain archived projects' do + base_project.update!(archived: true) + + expect(finder.execute).to contain_exactly(other_fork, forked_project) + end end context 'public projects' do diff --git a/spec/fixtures/api/schemas/issue.json b/spec/fixtures/api/schemas/issue.json index b579e32c9aa..8833825e3fb 100644 --- a/spec/fixtures/api/schemas/issue.json +++ b/spec/fixtures/api/schemas/issue.json @@ -15,6 +15,8 @@ "relative_position": { "type": "integer" }, "issue_sidebar_endpoint": { "type": "string" }, "toggle_subscription_endpoint": { "type": "string" }, + "reference_path": { "type": "string" }, + "real_path": { "type": "string" }, "project": { "id": { "type": "integer" }, "path": { "type": "string" } diff --git a/spec/fixtures/api/schemas/public_api/v4/project/export_status.json b/spec/fixtures/api/schemas/public_api/v4/project/export_status.json index d24a6f93f4b..81c8815caf6 100644 --- a/spec/fixtures/api/schemas/public_api/v4/project/export_status.json +++ b/spec/fixtures/api/schemas/public_api/v4/project/export_status.json @@ -1,7 +1,9 @@ { "type": "object", "allOf": [ - { "$ref": "identity.json" }, + { + "$ref": "identity.json" + }, { "required": [ "export_status" @@ -9,7 +11,12 @@ "properties": { "export_status": { "type": "string", - "enum": ["none", "started", "finished"] + "enum": [ + "none", + "started", + "finished", + "after_export_action" + ] } } } diff --git a/spec/fixtures/api/schemas/public_api/v4/tag.json b/spec/fixtures/api/schemas/public_api/v4/tag.json index 52cfe86aeeb..10d4edb7ffb 100644 --- a/spec/fixtures/api/schemas/public_api/v4/tag.json +++ b/spec/fixtures/api/schemas/public_api/v4/tag.json @@ -10,6 +10,7 @@ "name": { "type": "string" }, "message": { "type": ["string", "null"] }, "commit": { "$ref": "commit/basic.json" }, + "target": { "type": "string" }, "release": { "oneOf": [ { "type": "null" }, diff --git a/spec/fixtures/big-image.png b/spec/fixtures/big-image.png Binary files differnew file mode 100644 index 00000000000..a333363ac36 --- /dev/null +++ b/spec/fixtures/big-image.png diff --git a/spec/fixtures/exported-project.gz b/spec/fixtures/exported-project.gz Binary files differnew file mode 100644 index 00000000000..352384f16c8 --- /dev/null +++ b/spec/fixtures/exported-project.gz diff --git a/spec/fixtures/trace/sample_trace b/spec/fixtures/trace/sample_trace index 55fcb9d2756..c65cf05d5ca 100644 --- a/spec/fixtures/trace/sample_trace +++ b/spec/fixtures/trace/sample_trace @@ -1,24 +1,24 @@ -[0KRunning with gitlab-runner 10.4.0 (857480b6) - on docker-auto-scale-com (9a6801bd) -[0;m[0KUsing Docker executor with image dev.gitlab.org:5005/gitlab/gitlab-build-images:ruby-2.3.6-golang-1.9-git-2.14-chrome-63.0-node-8.x-yarn-1.2-postgresql-9.6 ... -[0;m[0KStarting service postgres:9.2 ... -[0;m[0KPulling docker image postgres:9.2 ... -[0;m[0KUsing docker image postgres:9.2 ID=sha256:18cdbca56093c841d28e629eb8acd4224afe0aa4c57c839351fc181888b8a470 for postgres service... +[0KRunning with gitlab-runner 10.6.0 (a3543a27) +[0;m[0K on docker-auto-scale-com 30d62d59 +[0;m[0KUsing Docker executor with image dev.gitlab.org:5005/gitlab/gitlab-build-images:ruby-2.3.6-golang-1.9-git-2.16-chrome-63.0-node-8.x-yarn-1.2-postgresql-9.6 ... +[0;m[0KStarting service mysql:latest ... +[0;m[0KPulling docker image mysql:latest ... +[0;m[0KUsing docker image sha256:5195076672a7e30525705a18f7d352c920bbd07a5ae72b30e374081fe660a011 for mysql:latest ... [0;m[0KStarting service redis:alpine ... [0;m[0KPulling docker image redis:alpine ... -[0;m[0KUsing docker image redis:alpine ID=sha256:cb1ec54b370d4a91dff57d00f91fd880dc710160a58440adaa133e0f84ae999d for redis service... +[0;m[0KUsing docker image sha256:98bd7cfc43b8ef0ff130465e3d5427c0771002c2f35a6a9b62cb2d04602bed0a for redis:alpine ... [0;m[0KWaiting for services to be up and running... -[0;m[0KUsing docker image sha256:3006a02a5a6f0a116358a13bbc46ee46fb2471175efd5b7f9b1c22345ec2a8e9 for predefined container... -[0;m[0KPulling docker image dev.gitlab.org:5005/gitlab/gitlab-build-images:ruby-2.3.6-golang-1.9-git-2.14-chrome-63.0-node-8.x-yarn-1.2-postgresql-9.6 ... -[0;m[0KUsing docker image dev.gitlab.org:5005/gitlab/gitlab-build-images:ruby-2.3.6-golang-1.9-git-2.14-chrome-63.0-node-8.x-yarn-1.2-postgresql-9.6 ID=sha256:1f59be408f12738509ffe4177d65e9de6391f32461de83d9d45f58517b30af99 for build container... -[0;msection_start:1517486886:prepare_script -[0KRunning on runner-9a6801bd-project-13083-concurrent-0 via runner-9a6801bd-gsrm-1517484168-a8449153... -section_end:1517486887:prepare_script -[0Ksection_start:1517486887:get_sources -[0K[32;1mFetching changes for 42624-gitaly-bundle-isolation-not-working-in-ci with git depth set to 20...[0;m +[0;m[0KPulling docker image dev.gitlab.org:5005/gitlab/gitlab-build-images:ruby-2.3.6-golang-1.9-git-2.16-chrome-63.0-node-8.x-yarn-1.2-postgresql-9.6 ... +[0;m[0KUsing docker image sha256:1b06077bb03d9d42d801b53f45701bb6a7e862ca02e1e75f30ca7fcf1270eb02 for dev.gitlab.org:5005/gitlab/gitlab-build-images:ruby-2.3.6-golang-1.9-git-2.16-chrome-63.0-node-8.x-yarn-1.2-postgresql-9.6 ... +[0;msection_start:1522927103:prepare_script +[0KRunning on runner-30d62d59-project-13083-concurrent-0 via runner-30d62d59-prm-1522922015-ddc29478... +section_end:1522927104:prepare_script +[0Ksection_start:1522927104:get_sources +[0K[32;1mFetching changes for master with git depth set to 20...[0;m Removing .gitlab_shell_secret Removing .gitlab_workhorse_secret Removing .yarn-cache/ +Removing builds/2018_04/ Removing config/database.yml Removing config/gitlab.yml Removing config/redis.cache.yml @@ -26,1160 +26,3420 @@ Removing config/redis.queues.yml Removing config/redis.shared_state.yml Removing config/resque.yml Removing config/secrets.yml -Removing coverage/ -Removing knapsack/ Removing log/api_json.log Removing log/application.log Removing log/gitaly-test.log -Removing log/githost.log Removing log/grpc.log Removing log/test_json.log -Removing node_modules/ -Removing public/assets/ -Removing rspec_flaky/ -Removing shared/tmp/ Removing tmp/tests/ Removing vendor/ruby/ -HEAD is now at 4cea24f Converted todos.js to axios +HEAD is now at b7cbff3d Add `direct_upload` setting for artifacts From https://gitlab.com/gitlab-org/gitlab-ce - * [new branch] 42624-gitaly-bundle-isolation-not-working-in-ci -> origin/42624-gitaly-bundle-isolation-not-working-in-ci -[32;1mChecking out f42a5e24 as 42624-gitaly-bundle-isolation-not-working-in-ci...[0;m + 2dbcb9cb..641bb13b master -> origin/master +[32;1mChecking out 21488c74 as master...[0;m [32;1mSkipping Git submodules setup[0;m -section_end:1517486896:get_sources -[0Ksection_start:1517486896:restore_cache +section_end:1522927113:get_sources +[0Ksection_start:1522927113:restore_cache [0K[32;1mChecking cache for ruby-2.3.6-with-yarn...[0;m Downloading cache.zip from http://runners-cache-5-internal.gitlab.com:444/runner/project/13083/ruby-2.3.6-with-yarn[0;m [32;1mSuccessfully extracted cache[0;m -section_end:1517486919:restore_cache -[0Ksection_start:1517486919:download_artifacts -[0K[32;1mDownloading artifacts for retrieve-tests-metadata (50551658)...[0;m -Downloading artifacts from coordinator... ok [0;m id[0;m=50551658 responseStatus[0;m=200 OK token[0;m=HhF7y_1X -[32;1mDownloading artifacts for compile-assets (50551659)...[0;m -Downloading artifacts from coordinator... ok [0;m id[0;m=50551659 responseStatus[0;m=200 OK token[0;m=wTz6JrCP -[32;1mDownloading artifacts for setup-test-env (50551660)...[0;m -Downloading artifacts from coordinator... ok [0;m id[0;m=50551660 responseStatus[0;m=200 OK token[0;m=DTGgeVF5 +section_end:1522927128:restore_cache +[0Ksection_start:1522927128:download_artifacts +[0K[32;1mDownloading artifacts for retrieve-tests-metadata (61303215)...[0;m +Downloading artifacts from coordinator... ok [0;m id[0;m=61303215 responseStatus[0;m=200 OK token[0;m=AdWPNg2R +[32;1mDownloading artifacts for compile-assets (61303216)...[0;m +Downloading artifacts from coordinator... ok [0;m id[0;m=61303216 responseStatus[0;m=200 OK token[0;m=iy2yYbq8 +[32;1mDownloading artifacts for setup-test-env (61303217)...[0;m +Downloading artifacts from coordinator... ok [0;m id[0;m=61303217 responseStatus[0;m=200 OK token[0;m=ur1g79-4 [0;33mWARNING: tmp/tests/gitlab-shell/.gitlab_shell_secret: chmod tmp/tests/gitlab-shell/.gitlab_shell_secret: no such file or directory (suppressing repeats)[0;m -section_end:1517486934:download_artifacts -[0Ksection_start:1517486934:build_script +section_end:1522927141:download_artifacts +[0Ksection_start:1522927141:build_script [0K[32;1m$ bundle --version[0;m Bundler version 1.16.1 +[32;1m$ date[0;m +Thu Apr 5 11:19:01 UTC 2018 [32;1m$ source scripts/utils.sh[0;m +[32;1m$ date[0;m +Thu Apr 5 11:19:01 UTC 2018 [32;1m$ source scripts/prepare_build.sh[0;m The Gemfile's dependencies are satisfied -Successfully installed knapsack-1.15.0 +Successfully installed knapsack-1.16.0 1 gem installed -NOTICE: database "gitlabhq_test" does not exist, skipping -DROP DATABASE -CREATE DATABASE -CREATE ROLE -GRANT -- enable_extension("plpgsql") - -> 0.0156s + -> 0.0010s -- enable_extension("pg_trgm") - -> 0.0156s + -> 0.0000s -- create_table("abuse_reports", {:force=>:cascade}) - -> 0.0119s + -> 0.0401s -- create_table("appearances", {:force=>:cascade}) - -> 0.0065s + -> 0.1035s -- create_table("application_settings", {:force=>:cascade}) - -> 0.0382s + -> 0.0871s -- create_table("audit_events", {:force=>:cascade}) - -> 0.0056s + -> 0.0539s -- add_index("audit_events", ["entity_id", "entity_type"], {:name=>"index_audit_events_on_entity_id_and_entity_type", :using=>:btree}) - -> 0.0040s + -> 0.0647s -- create_table("award_emoji", {:force=>:cascade}) - -> 0.0058s + -> 0.0134s -- add_index("award_emoji", ["awardable_type", "awardable_id"], {:name=>"index_award_emoji_on_awardable_type_and_awardable_id", :using=>:btree}) - -> 0.0068s + -> 0.0074s -- add_index("award_emoji", ["user_id", "name"], {:name=>"index_award_emoji_on_user_id_and_name", :using=>:btree}) - -> 0.0043s + -> 0.0072s +-- create_table("badges", {:force=>:cascade}) + -> 0.0122s +-- add_index("badges", ["group_id"], {:name=>"index_badges_on_group_id", :using=>:btree}) + -> 0.0086s +-- add_index("badges", ["project_id"], {:name=>"index_badges_on_project_id", :using=>:btree}) + -> 0.0069s -- create_table("boards", {:force=>:cascade}) - -> 0.0049s + -> 0.0075s +-- add_index("boards", ["group_id"], {:name=>"index_boards_on_group_id", :using=>:btree}) + -> 0.0050s -- add_index("boards", ["project_id"], {:name=>"index_boards_on_project_id", :using=>:btree}) - -> 0.0056s + -> 0.0051s -- create_table("broadcast_messages", {:force=>:cascade}) - -> 0.0056s + -> 0.0082s -- add_index("broadcast_messages", ["starts_at", "ends_at", "id"], {:name=>"index_broadcast_messages_on_starts_at_and_ends_at_and_id", :using=>:btree}) - -> 0.0041s + -> 0.0063s -- create_table("chat_names", {:force=>:cascade}) - -> 0.0056s + -> 0.0084s -- add_index("chat_names", ["service_id", "team_id", "chat_id"], {:name=>"index_chat_names_on_service_id_and_team_id_and_chat_id", :unique=>true, :using=>:btree}) - -> 0.0039s + -> 0.0088s -- add_index("chat_names", ["user_id", "service_id"], {:name=>"index_chat_names_on_user_id_and_service_id", :unique=>true, :using=>:btree}) - -> 0.0036s + -> 0.0077s -- create_table("chat_teams", {:force=>:cascade}) - -> 0.0068s + -> 0.0120s -- add_index("chat_teams", ["namespace_id"], {:name=>"index_chat_teams_on_namespace_id", :unique=>true, :using=>:btree}) - -> 0.0098s + -> 0.0135s -- create_table("ci_build_trace_section_names", {:force=>:cascade}) - -> 0.0048s + -> 0.0125s -- add_index("ci_build_trace_section_names", ["project_id", "name"], {:name=>"index_ci_build_trace_section_names_on_project_id_and_name", :unique=>true, :using=>:btree}) - -> 0.0035s + -> 0.0087s -- create_table("ci_build_trace_sections", {:force=>:cascade}) - -> 0.0040s + -> 0.0094s -- add_index("ci_build_trace_sections", ["build_id", "section_name_id"], {:name=>"index_ci_build_trace_sections_on_build_id_and_section_name_id", :unique=>true, :using=>:btree}) - -> 0.0035s + -> 0.0916s -- add_index("ci_build_trace_sections", ["project_id"], {:name=>"index_ci_build_trace_sections_on_project_id", :using=>:btree}) - -> 0.0033s + -> 0.0089s +-- add_index("ci_build_trace_sections", ["section_name_id"], {:name=>"index_ci_build_trace_sections_on_section_name_id", :using=>:btree}) + -> 0.0132s -- create_table("ci_builds", {:force=>:cascade}) - -> 0.0062s + -> 0.0140s +-- add_index("ci_builds", ["artifacts_expire_at"], {:name=>"index_ci_builds_on_artifacts_expire_at", :where=>"(artifacts_file <> ''::text)", :using=>:btree}) + -> 0.0325s -- add_index("ci_builds", ["auto_canceled_by_id"], {:name=>"index_ci_builds_on_auto_canceled_by_id", :using=>:btree}) - -> 0.0035s + -> 0.0081s -- add_index("ci_builds", ["commit_id", "stage_idx", "created_at"], {:name=>"index_ci_builds_on_commit_id_and_stage_idx_and_created_at", :using=>:btree}) - -> 0.0032s + -> 0.0114s -- add_index("ci_builds", ["commit_id", "status", "type"], {:name=>"index_ci_builds_on_commit_id_and_status_and_type", :using=>:btree}) - -> 0.0032s + -> 0.0119s -- add_index("ci_builds", ["commit_id", "type", "name", "ref"], {:name=>"index_ci_builds_on_commit_id_and_type_and_name_and_ref", :using=>:btree}) - -> 0.0035s + -> 0.0116s -- add_index("ci_builds", ["commit_id", "type", "ref"], {:name=>"index_ci_builds_on_commit_id_and_type_and_ref", :using=>:btree}) - -> 0.0042s + -> 0.0144s -- add_index("ci_builds", ["project_id", "id"], {:name=>"index_ci_builds_on_project_id_and_id", :using=>:btree}) - -> 0.0031s + -> 0.0136s -- add_index("ci_builds", ["protected"], {:name=>"index_ci_builds_on_protected", :using=>:btree}) - -> 0.0031s + -> 0.0113s -- add_index("ci_builds", ["runner_id"], {:name=>"index_ci_builds_on_runner_id", :using=>:btree}) - -> 0.0033s + -> 0.0082s -- add_index("ci_builds", ["stage_id"], {:name=>"index_ci_builds_on_stage_id", :using=>:btree}) - -> 0.0035s + -> 0.0086s -- add_index("ci_builds", ["status", "type", "runner_id"], {:name=>"index_ci_builds_on_status_and_type_and_runner_id", :using=>:btree}) - -> 0.0031s + -> 0.0091s -- add_index("ci_builds", ["status"], {:name=>"index_ci_builds_on_status", :using=>:btree}) - -> 0.0032s + -> 0.0081s -- add_index("ci_builds", ["token"], {:name=>"index_ci_builds_on_token", :unique=>true, :using=>:btree}) - -> 0.0028s + -> 0.0103s -- add_index("ci_builds", ["updated_at"], {:name=>"index_ci_builds_on_updated_at", :using=>:btree}) - -> 0.0047s + -> 0.0149s -- add_index("ci_builds", ["user_id"], {:name=>"index_ci_builds_on_user_id", :using=>:btree}) - -> 0.0029s + -> 0.0156s +-- create_table("ci_builds_metadata", {:force=>:cascade}) + -> 0.0134s +-- add_index("ci_builds_metadata", ["build_id"], {:name=>"index_ci_builds_metadata_on_build_id", :unique=>true, :using=>:btree}) + -> 0.0067s +-- add_index("ci_builds_metadata", ["project_id"], {:name=>"index_ci_builds_metadata_on_project_id", :using=>:btree}) + -> 0.0061s -- create_table("ci_group_variables", {:force=>:cascade}) - -> 0.0055s + -> 0.0088s -- add_index("ci_group_variables", ["group_id", "key"], {:name=>"index_ci_group_variables_on_group_id_and_key", :unique=>true, :using=>:btree}) - -> 0.0028s + -> 0.0073s -- create_table("ci_job_artifacts", {:force=>:cascade}) - -> 0.0048s + -> 0.0089s +-- add_index("ci_job_artifacts", ["expire_at", "job_id"], {:name=>"index_ci_job_artifacts_on_expire_at_and_job_id", :using=>:btree}) + -> 0.0061s -- add_index("ci_job_artifacts", ["job_id", "file_type"], {:name=>"index_ci_job_artifacts_on_job_id_and_file_type", :unique=>true, :using=>:btree}) - -> 0.0027s + -> 0.0077s -- add_index("ci_job_artifacts", ["project_id"], {:name=>"index_ci_job_artifacts_on_project_id", :using=>:btree}) - -> 0.0028s + -> 0.0071s -- create_table("ci_pipeline_schedule_variables", {:force=>:cascade}) - -> 0.0044s + -> 0.0512s -- add_index("ci_pipeline_schedule_variables", ["pipeline_schedule_id", "key"], {:name=>"index_ci_pipeline_schedule_variables_on_schedule_id_and_key", :unique=>true, :using=>:btree}) - -> 0.0032s + -> 0.0144s -- create_table("ci_pipeline_schedules", {:force=>:cascade}) - -> 0.0047s + -> 0.0603s -- add_index("ci_pipeline_schedules", ["next_run_at", "active"], {:name=>"index_ci_pipeline_schedules_on_next_run_at_and_active", :using=>:btree}) - -> 0.0029s + -> 0.0247s -- add_index("ci_pipeline_schedules", ["project_id"], {:name=>"index_ci_pipeline_schedules_on_project_id", :using=>:btree}) - -> 0.0028s + -> 0.0082s -- create_table("ci_pipeline_variables", {:force=>:cascade}) - -> 0.0045s + -> 0.0112s -- add_index("ci_pipeline_variables", ["pipeline_id", "key"], {:name=>"index_ci_pipeline_variables_on_pipeline_id_and_key", :unique=>true, :using=>:btree}) - -> 0.0030s + -> 0.0075s -- create_table("ci_pipelines", {:force=>:cascade}) - -> 0.0057s + -> 0.0111s -- add_index("ci_pipelines", ["auto_canceled_by_id"], {:name=>"index_ci_pipelines_on_auto_canceled_by_id", :using=>:btree}) - -> 0.0030s + -> 0.0074s -- add_index("ci_pipelines", ["pipeline_schedule_id"], {:name=>"index_ci_pipelines_on_pipeline_schedule_id", :using=>:btree}) - -> 0.0031s + -> 0.0086s -- add_index("ci_pipelines", ["project_id", "ref", "status", "id"], {:name=>"index_ci_pipelines_on_project_id_and_ref_and_status_and_id", :using=>:btree}) - -> 0.0032s + -> 0.0104s -- add_index("ci_pipelines", ["project_id", "sha"], {:name=>"index_ci_pipelines_on_project_id_and_sha", :using=>:btree}) - -> 0.0032s + -> 0.0107s -- add_index("ci_pipelines", ["project_id"], {:name=>"index_ci_pipelines_on_project_id", :using=>:btree}) - -> 0.0035s + -> 0.0084s -- add_index("ci_pipelines", ["status"], {:name=>"index_ci_pipelines_on_status", :using=>:btree}) - -> 0.0032s + -> 0.0065s -- add_index("ci_pipelines", ["user_id"], {:name=>"index_ci_pipelines_on_user_id", :using=>:btree}) - -> 0.0029s + -> 0.0071s -- create_table("ci_runner_projects", {:force=>:cascade}) - -> 0.0035s + -> 0.0077s -- add_index("ci_runner_projects", ["project_id"], {:name=>"index_ci_runner_projects_on_project_id", :using=>:btree}) - -> 0.0029s + -> 0.0072s -- add_index("ci_runner_projects", ["runner_id"], {:name=>"index_ci_runner_projects_on_runner_id", :using=>:btree}) - -> 0.0028s + -> 0.0064s -- create_table("ci_runners", {:force=>:cascade}) - -> 0.0059s + -> 0.0090s -- add_index("ci_runners", ["contacted_at"], {:name=>"index_ci_runners_on_contacted_at", :using=>:btree}) - -> 0.0030s + -> 0.0078s -- add_index("ci_runners", ["is_shared"], {:name=>"index_ci_runners_on_is_shared", :using=>:btree}) - -> 0.0030s + -> 0.0054s -- add_index("ci_runners", ["locked"], {:name=>"index_ci_runners_on_locked", :using=>:btree}) - -> 0.0030s + -> 0.0052s -- add_index("ci_runners", ["token"], {:name=>"index_ci_runners_on_token", :using=>:btree}) - -> 0.0029s + -> 0.0057s -- create_table("ci_stages", {:force=>:cascade}) - -> 0.0046s --- add_index("ci_stages", ["pipeline_id", "name"], {:name=>"index_ci_stages_on_pipeline_id_and_name", :using=>:btree}) - -> 0.0031s + -> 0.0059s +-- add_index("ci_stages", ["pipeline_id", "name"], {:name=>"index_ci_stages_on_pipeline_id_and_name", :unique=>true, :using=>:btree}) + -> 0.0054s -- add_index("ci_stages", ["pipeline_id"], {:name=>"index_ci_stages_on_pipeline_id", :using=>:btree}) - -> 0.0030s + -> 0.0045s -- add_index("ci_stages", ["project_id"], {:name=>"index_ci_stages_on_project_id", :using=>:btree}) - -> 0.0028s + -> 0.0053s -- create_table("ci_trigger_requests", {:force=>:cascade}) - -> 0.0058s + -> 0.0079s -- add_index("ci_trigger_requests", ["commit_id"], {:name=>"index_ci_trigger_requests_on_commit_id", :using=>:btree}) - -> 0.0031s + -> 0.0059s -- create_table("ci_triggers", {:force=>:cascade}) - -> 0.0043s + -> 0.0100s -- add_index("ci_triggers", ["project_id"], {:name=>"index_ci_triggers_on_project_id", :using=>:btree}) - -> 0.0033s --- create_table("ci_variables", {:force=>:cascade}) -> 0.0059s +-- create_table("ci_variables", {:force=>:cascade}) + -> 0.0110s -- add_index("ci_variables", ["project_id", "key", "environment_scope"], {:name=>"index_ci_variables_on_project_id_and_key_and_environment_scope", :unique=>true, :using=>:btree}) - -> 0.0031s + -> 0.0066s -- create_table("cluster_platforms_kubernetes", {:force=>:cascade}) - -> 0.0053s + -> 0.0082s -- add_index("cluster_platforms_kubernetes", ["cluster_id"], {:name=>"index_cluster_platforms_kubernetes_on_cluster_id", :unique=>true, :using=>:btree}) - -> 0.0028s + -> 0.0047s -- create_table("cluster_projects", {:force=>:cascade}) - -> 0.0032s + -> 0.0079s -- add_index("cluster_projects", ["cluster_id"], {:name=>"index_cluster_projects_on_cluster_id", :using=>:btree}) - -> 0.0035s + -> 0.0045s -- add_index("cluster_projects", ["project_id"], {:name=>"index_cluster_projects_on_project_id", :using=>:btree}) - -> 0.0030s + -> 0.0044s -- create_table("cluster_providers_gcp", {:force=>:cascade}) - -> 0.0051s + -> 0.0247s -- add_index("cluster_providers_gcp", ["cluster_id"], {:name=>"index_cluster_providers_gcp_on_cluster_id", :unique=>true, :using=>:btree}) - -> 0.0034s + -> 0.0088s -- create_table("clusters", {:force=>:cascade}) - -> 0.0052s + -> 0.0767s -- add_index("clusters", ["enabled"], {:name=>"index_clusters_on_enabled", :using=>:btree}) - -> 0.0031s + -> 0.0162s -- add_index("clusters", ["user_id"], {:name=>"index_clusters_on_user_id", :using=>:btree}) - -> 0.0028s + -> 0.0216s -- create_table("clusters_applications_helm", {:force=>:cascade}) - -> 0.0045s + -> 0.0379s -- create_table("clusters_applications_ingress", {:force=>:cascade}) - -> 0.0044s + -> 0.0409s -- create_table("clusters_applications_prometheus", {:force=>:cascade}) - -> 0.0047s + -> 0.0178s +-- create_table("clusters_applications_runners", {:force=>:cascade}) + -> 0.0471s +-- add_index("clusters_applications_runners", ["cluster_id"], {:name=>"index_clusters_applications_runners_on_cluster_id", :unique=>true, :using=>:btree}) + -> 0.0487s +-- add_index("clusters_applications_runners", ["runner_id"], {:name=>"index_clusters_applications_runners_on_runner_id", :using=>:btree}) + -> 0.0094s -- create_table("container_repositories", {:force=>:cascade}) - -> 0.0050s + -> 0.0142s -- add_index("container_repositories", ["project_id", "name"], {:name=>"index_container_repositories_on_project_id_and_name", :unique=>true, :using=>:btree}) - -> 0.0032s + -> 0.0080s -- add_index("container_repositories", ["project_id"], {:name=>"index_container_repositories_on_project_id", :using=>:btree}) - -> 0.0032s + -> 0.0070s -- create_table("conversational_development_index_metrics", {:force=>:cascade}) - -> 0.0076s + -> 0.0204s -- create_table("deploy_keys_projects", {:force=>:cascade}) - -> 0.0037s + -> 0.0154s -- add_index("deploy_keys_projects", ["project_id"], {:name=>"index_deploy_keys_projects_on_project_id", :using=>:btree}) - -> 0.0032s + -> 0.0471s -- create_table("deployments", {:force=>:cascade}) - -> 0.0049s + -> 0.0191s -- add_index("deployments", ["created_at"], {:name=>"index_deployments_on_created_at", :using=>:btree}) - -> 0.0034s + -> 0.0552s -- add_index("deployments", ["environment_id", "id"], {:name=>"index_deployments_on_environment_id_and_id", :using=>:btree}) - -> 0.0028s + -> 0.0294s -- add_index("deployments", ["environment_id", "iid", "project_id"], {:name=>"index_deployments_on_environment_id_and_iid_and_project_id", :using=>:btree}) - -> 0.0029s + -> 0.0408s -- add_index("deployments", ["project_id", "iid"], {:name=>"index_deployments_on_project_id_and_iid", :unique=>true, :using=>:btree}) - -> 0.0032s + -> 0.0094s -- create_table("emails", {:force=>:cascade}) - -> 0.0046s + -> 0.0127s -- add_index("emails", ["confirmation_token"], {:name=>"index_emails_on_confirmation_token", :unique=>true, :using=>:btree}) - -> 0.0030s + -> 0.0082s -- add_index("emails", ["email"], {:name=>"index_emails_on_email", :unique=>true, :using=>:btree}) - -> 0.0035s + -> 0.0110s -- add_index("emails", ["user_id"], {:name=>"index_emails_on_user_id", :using=>:btree}) - -> 0.0028s + -> 0.0079s -- create_table("environments", {:force=>:cascade}) - -> 0.0052s + -> 0.0106s -- add_index("environments", ["project_id", "name"], {:name=>"index_environments_on_project_id_and_name", :unique=>true, :using=>:btree}) - -> 0.0031s + -> 0.0086s -- add_index("environments", ["project_id", "slug"], {:name=>"index_environments_on_project_id_and_slug", :unique=>true, :using=>:btree}) - -> 0.0028s + -> 0.0076s -- create_table("events", {:force=>:cascade}) - -> 0.0046s + -> 0.0122s -- add_index("events", ["action"], {:name=>"index_events_on_action", :using=>:btree}) - -> 0.0032s --- add_index("events", ["author_id"], {:name=>"index_events_on_author_id", :using=>:btree}) - -> 0.0027s + -> 0.0068s +-- add_index("events", ["author_id", "project_id"], {:name=>"index_events_on_author_id_and_project_id", :using=>:btree}) + -> 0.0081s -- add_index("events", ["project_id", "id"], {:name=>"index_events_on_project_id_and_id", :using=>:btree}) - -> 0.0027s + -> 0.0064s -- add_index("events", ["target_type", "target_id"], {:name=>"index_events_on_target_type_and_target_id", :using=>:btree}) - -> 0.0027s + -> 0.0087s -- create_table("feature_gates", {:force=>:cascade}) - -> 0.0046s + -> 0.0105s -- add_index("feature_gates", ["feature_key", "key", "value"], {:name=>"index_feature_gates_on_feature_key_and_key_and_value", :unique=>true, :using=>:btree}) - -> 0.0031s + -> 0.0080s -- create_table("features", {:force=>:cascade}) - -> 0.0041s + -> 0.0086s -- add_index("features", ["key"], {:name=>"index_features_on_key", :unique=>true, :using=>:btree}) - -> 0.0030s + -> 0.0058s -- create_table("fork_network_members", {:force=>:cascade}) - -> 0.0033s + -> 0.0081s -- add_index("fork_network_members", ["fork_network_id"], {:name=>"index_fork_network_members_on_fork_network_id", :using=>:btree}) - -> 0.0033s + -> 0.0056s -- add_index("fork_network_members", ["project_id"], {:name=>"index_fork_network_members_on_project_id", :unique=>true, :using=>:btree}) - -> 0.0029s + -> 0.0053s -- create_table("fork_networks", {:force=>:cascade}) - -> 0.0049s + -> 0.0081s -- add_index("fork_networks", ["root_project_id"], {:name=>"index_fork_networks_on_root_project_id", :unique=>true, :using=>:btree}) - -> 0.0029s + -> 0.0051s -- create_table("forked_project_links", {:force=>:cascade}) - -> 0.0032s + -> 0.0070s -- add_index("forked_project_links", ["forked_to_project_id"], {:name=>"index_forked_project_links_on_forked_to_project_id", :unique=>true, :using=>:btree}) - -> 0.0030s + -> 0.0061s -- create_table("gcp_clusters", {:force=>:cascade}) - -> 0.0074s + -> 0.0090s -- add_index("gcp_clusters", ["project_id"], {:name=>"index_gcp_clusters_on_project_id", :unique=>true, :using=>:btree}) - -> 0.0030s + -> 0.0073s -- create_table("gpg_key_subkeys", {:force=>:cascade}) - -> 0.0042s + -> 0.0092s -- add_index("gpg_key_subkeys", ["fingerprint"], {:name=>"index_gpg_key_subkeys_on_fingerprint", :unique=>true, :using=>:btree}) - -> 0.0029s + -> 0.0063s -- add_index("gpg_key_subkeys", ["gpg_key_id"], {:name=>"index_gpg_key_subkeys_on_gpg_key_id", :using=>:btree}) - -> 0.0032s + -> 0.0603s -- add_index("gpg_key_subkeys", ["keyid"], {:name=>"index_gpg_key_subkeys_on_keyid", :unique=>true, :using=>:btree}) - -> 0.0027s + -> 0.0705s -- create_table("gpg_keys", {:force=>:cascade}) - -> 0.0042s + -> 0.0235s -- add_index("gpg_keys", ["fingerprint"], {:name=>"index_gpg_keys_on_fingerprint", :unique=>true, :using=>:btree}) - -> 0.0032s + -> 0.0220s -- add_index("gpg_keys", ["primary_keyid"], {:name=>"index_gpg_keys_on_primary_keyid", :unique=>true, :using=>:btree}) - -> 0.0026s + -> 0.0329s -- add_index("gpg_keys", ["user_id"], {:name=>"index_gpg_keys_on_user_id", :using=>:btree}) - -> 0.0028s + -> 0.0087s -- create_table("gpg_signatures", {:force=>:cascade}) - -> 0.0054s + -> 0.0126s -- add_index("gpg_signatures", ["commit_sha"], {:name=>"index_gpg_signatures_on_commit_sha", :unique=>true, :using=>:btree}) - -> 0.0029s + -> 0.0105s -- add_index("gpg_signatures", ["gpg_key_id"], {:name=>"index_gpg_signatures_on_gpg_key_id", :using=>:btree}) - -> 0.0026s + -> 0.0094s -- add_index("gpg_signatures", ["gpg_key_primary_keyid"], {:name=>"index_gpg_signatures_on_gpg_key_primary_keyid", :using=>:btree}) - -> 0.0029s + -> 0.0100s -- add_index("gpg_signatures", ["gpg_key_subkey_id"], {:name=>"index_gpg_signatures_on_gpg_key_subkey_id", :using=>:btree}) - -> 0.0032s + -> 0.0079s -- add_index("gpg_signatures", ["project_id"], {:name=>"index_gpg_signatures_on_project_id", :using=>:btree}) - -> 0.0028s + -> 0.0081s -- create_table("group_custom_attributes", {:force=>:cascade}) - -> 0.0044s + -> 0.0092s -- add_index("group_custom_attributes", ["group_id", "key"], {:name=>"index_group_custom_attributes_on_group_id_and_key", :unique=>true, :using=>:btree}) - -> 0.0032s + -> 0.0086s -- add_index("group_custom_attributes", ["key", "value"], {:name=>"index_group_custom_attributes_on_key_and_value", :using=>:btree}) - -> 0.0028s + -> 0.0071s -- create_table("identities", {:force=>:cascade}) - -> 0.0043s + -> 0.0114s -- add_index("identities", ["user_id"], {:name=>"index_identities_on_user_id", :using=>:btree}) - -> 0.0034s + -> 0.0064s +-- create_table("internal_ids", {:id=>:bigserial, :force=>:cascade}) + -> 0.0097s +-- add_index("internal_ids", ["usage", "project_id"], {:name=>"index_internal_ids_on_usage_and_project_id", :unique=>true, :using=>:btree}) + -> 0.0073s -- create_table("issue_assignees", {:id=>false, :force=>:cascade}) - -> 0.0013s + -> 0.0127s -- add_index("issue_assignees", ["issue_id", "user_id"], {:name=>"index_issue_assignees_on_issue_id_and_user_id", :unique=>true, :using=>:btree}) - -> 0.0028s + -> 0.0110s -- add_index("issue_assignees", ["user_id"], {:name=>"index_issue_assignees_on_user_id", :using=>:btree}) - -> 0.0029s + -> 0.0079s -- create_table("issue_metrics", {:force=>:cascade}) - -> 0.0032s + -> 0.0098s -- add_index("issue_metrics", ["issue_id"], {:name=>"index_issue_metrics", :using=>:btree}) - -> 0.0029s + -> 0.0053s -- create_table("issues", {:force=>:cascade}) - -> 0.0051s + -> 0.0090s -- add_index("issues", ["author_id"], {:name=>"index_issues_on_author_id", :using=>:btree}) - -> 0.0028s + -> 0.0056s -- add_index("issues", ["confidential"], {:name=>"index_issues_on_confidential", :using=>:btree}) - -> 0.0029s + -> 0.0055s -- add_index("issues", ["description"], {:name=>"index_issues_on_description_trigram", :using=>:gin, :opclasses=>{"description"=>"gin_trgm_ops"}}) - -> 0.0022s + -> 0.0006s -- add_index("issues", ["milestone_id"], {:name=>"index_issues_on_milestone_id", :using=>:btree}) - -> 0.0027s + -> 0.0061s -- add_index("issues", ["moved_to_id"], {:name=>"index_issues_on_moved_to_id", :where=>"(moved_to_id IS NOT NULL)", :using=>:btree}) - -> 0.0030s + -> 0.0051s -- add_index("issues", ["project_id", "created_at", "id", "state"], {:name=>"index_issues_on_project_id_and_created_at_and_id_and_state", :using=>:btree}) - -> 0.0039s + -> 0.0069s -- add_index("issues", ["project_id", "due_date", "id", "state"], {:name=>"idx_issues_on_project_id_and_due_date_and_id_and_state_partial", :where=>"(due_date IS NOT NULL)", :using=>:btree}) - -> 0.0031s + -> 0.0073s -- add_index("issues", ["project_id", "iid"], {:name=>"index_issues_on_project_id_and_iid", :unique=>true, :using=>:btree}) - -> 0.0032s + -> 0.0060s -- add_index("issues", ["project_id", "updated_at", "id", "state"], {:name=>"index_issues_on_project_id_and_updated_at_and_id_and_state", :using=>:btree}) - -> 0.0035s + -> 0.0094s -- add_index("issues", ["relative_position"], {:name=>"index_issues_on_relative_position", :using=>:btree}) - -> 0.0030s + -> 0.0070s -- add_index("issues", ["state"], {:name=>"index_issues_on_state", :using=>:btree}) - -> 0.0027s + -> 0.0078s -- add_index("issues", ["title"], {:name=>"index_issues_on_title_trigram", :using=>:gin, :opclasses=>{"title"=>"gin_trgm_ops"}}) - -> 0.0021s + -> 0.0007s -- add_index("issues", ["updated_at"], {:name=>"index_issues_on_updated_at", :using=>:btree}) - -> 0.0030s + -> 0.0068s -- add_index("issues", ["updated_by_id"], {:name=>"index_issues_on_updated_by_id", :where=>"(updated_by_id IS NOT NULL)", :using=>:btree}) - -> 0.0028s + -> 0.0066s -- create_table("keys", {:force=>:cascade}) - -> 0.0048s + -> 0.0087s -- add_index("keys", ["fingerprint"], {:name=>"index_keys_on_fingerprint", :unique=>true, :using=>:btree}) - -> 0.0028s + -> 0.0066s -- add_index("keys", ["user_id"], {:name=>"index_keys_on_user_id", :using=>:btree}) - -> 0.0029s + -> 0.0063s -- create_table("label_links", {:force=>:cascade}) - -> 0.0041s + -> 0.0073s -- add_index("label_links", ["label_id"], {:name=>"index_label_links_on_label_id", :using=>:btree}) - -> 0.0027s + -> 0.0050s -- add_index("label_links", ["target_id", "target_type"], {:name=>"index_label_links_on_target_id_and_target_type", :using=>:btree}) - -> 0.0028s + -> 0.0062s -- create_table("label_priorities", {:force=>:cascade}) - -> 0.0031s + -> 0.0073s -- add_index("label_priorities", ["priority"], {:name=>"index_label_priorities_on_priority", :using=>:btree}) - -> 0.0028s + -> 0.0058s -- add_index("label_priorities", ["project_id", "label_id"], {:name=>"index_label_priorities_on_project_id_and_label_id", :unique=>true, :using=>:btree}) - -> 0.0027s + -> 0.0056s -- create_table("labels", {:force=>:cascade}) - -> 0.0046s + -> 0.0087s -- add_index("labels", ["group_id", "project_id", "title"], {:name=>"index_labels_on_group_id_and_project_id_and_title", :unique=>true, :using=>:btree}) - -> 0.0028s + -> 0.0074s -- add_index("labels", ["project_id"], {:name=>"index_labels_on_project_id", :using=>:btree}) - -> 0.0032s + -> 0.0061s -- add_index("labels", ["template"], {:name=>"index_labels_on_template", :where=>"template", :using=>:btree}) - -> 0.0027s + -> 0.0060s -- add_index("labels", ["title"], {:name=>"index_labels_on_title", :using=>:btree}) - -> 0.0030s + -> 0.0076s -- add_index("labels", ["type", "project_id"], {:name=>"index_labels_on_type_and_project_id", :using=>:btree}) - -> 0.0028s + -> 0.0061s +-- create_table("lfs_file_locks", {:force=>:cascade}) + -> 0.0078s +-- add_index("lfs_file_locks", ["project_id", "path"], {:name=>"index_lfs_file_locks_on_project_id_and_path", :unique=>true, :using=>:btree}) + -> 0.0067s +-- add_index("lfs_file_locks", ["user_id"], {:name=>"index_lfs_file_locks_on_user_id", :using=>:btree}) + -> 0.0060s -- create_table("lfs_objects", {:force=>:cascade}) - -> 0.0040s + -> 0.0109s -- add_index("lfs_objects", ["oid"], {:name=>"index_lfs_objects_on_oid", :unique=>true, :using=>:btree}) - -> 0.0032s + -> 0.0059s -- create_table("lfs_objects_projects", {:force=>:cascade}) - -> 0.0035s + -> 0.0091s -- add_index("lfs_objects_projects", ["project_id"], {:name=>"index_lfs_objects_projects_on_project_id", :using=>:btree}) - -> 0.0025s + -> 0.0060s -- create_table("lists", {:force=>:cascade}) - -> 0.0033s + -> 0.0115s -- add_index("lists", ["board_id", "label_id"], {:name=>"index_lists_on_board_id_and_label_id", :unique=>true, :using=>:btree}) - -> 0.0026s + -> 0.0055s -- add_index("lists", ["label_id"], {:name=>"index_lists_on_label_id", :using=>:btree}) - -> 0.0026s + -> 0.0055s -- create_table("members", {:force=>:cascade}) - -> 0.0046s + -> 0.0140s -- add_index("members", ["access_level"], {:name=>"index_members_on_access_level", :using=>:btree}) - -> 0.0028s + -> 0.0067s -- add_index("members", ["invite_token"], {:name=>"index_members_on_invite_token", :unique=>true, :using=>:btree}) - -> 0.0027s + -> 0.0069s -- add_index("members", ["requested_at"], {:name=>"index_members_on_requested_at", :using=>:btree}) - -> 0.0025s + -> 0.0057s -- add_index("members", ["source_id", "source_type"], {:name=>"index_members_on_source_id_and_source_type", :using=>:btree}) - -> 0.0027s + -> 0.0057s -- add_index("members", ["user_id"], {:name=>"index_members_on_user_id", :using=>:btree}) - -> 0.0026s + -> 0.0073s -- create_table("merge_request_diff_commits", {:id=>false, :force=>:cascade}) - -> 0.0027s + -> 0.0087s -- add_index("merge_request_diff_commits", ["merge_request_diff_id", "relative_order"], {:name=>"index_merge_request_diff_commits_on_mr_diff_id_and_order", :unique=>true, :using=>:btree}) - -> 0.0032s + -> 0.0151s -- add_index("merge_request_diff_commits", ["sha"], {:name=>"index_merge_request_diff_commits_on_sha", :using=>:btree}) - -> 0.0029s + -> 0.0057s -- create_table("merge_request_diff_files", {:id=>false, :force=>:cascade}) - -> 0.0027s + -> 0.0094s -- add_index("merge_request_diff_files", ["merge_request_diff_id", "relative_order"], {:name=>"index_merge_request_diff_files_on_mr_diff_id_and_order", :unique=>true, :using=>:btree}) - -> 0.0027s + -> 0.0138s -- create_table("merge_request_diffs", {:force=>:cascade}) - -> 0.0042s + -> 0.0077s -- add_index("merge_request_diffs", ["merge_request_id", "id"], {:name=>"index_merge_request_diffs_on_merge_request_id_and_id", :using=>:btree}) - -> 0.0030s + -> 0.0060s -- create_table("merge_request_metrics", {:force=>:cascade}) - -> 0.0034s + -> 0.0098s -- add_index("merge_request_metrics", ["first_deployed_to_production_at"], {:name=>"index_merge_request_metrics_on_first_deployed_to_production_at", :using=>:btree}) - -> 0.0028s + -> 0.0060s -- add_index("merge_request_metrics", ["merge_request_id"], {:name=>"index_merge_request_metrics", :using=>:btree}) - -> 0.0025s + -> 0.0050s -- add_index("merge_request_metrics", ["pipeline_id"], {:name=>"index_merge_request_metrics_on_pipeline_id", :using=>:btree}) - -> 0.0026s + -> 0.0045s -- create_table("merge_requests", {:force=>:cascade}) -> 0.0066s -- add_index("merge_requests", ["assignee_id"], {:name=>"index_merge_requests_on_assignee_id", :using=>:btree}) - -> 0.0029s + -> 0.0072s -- add_index("merge_requests", ["author_id"], {:name=>"index_merge_requests_on_author_id", :using=>:btree}) - -> 0.0026s + -> 0.0050s -- add_index("merge_requests", ["created_at"], {:name=>"index_merge_requests_on_created_at", :using=>:btree}) - -> 0.0026s + -> 0.0053s -- add_index("merge_requests", ["description"], {:name=>"index_merge_requests_on_description_trigram", :using=>:gin, :opclasses=>{"description"=>"gin_trgm_ops"}}) - -> 0.0020s + -> 0.0008s -- add_index("merge_requests", ["head_pipeline_id"], {:name=>"index_merge_requests_on_head_pipeline_id", :using=>:btree}) - -> 0.0027s + -> 0.0053s -- add_index("merge_requests", ["latest_merge_request_diff_id"], {:name=>"index_merge_requests_on_latest_merge_request_diff_id", :using=>:btree}) - -> 0.0025s + -> 0.0048s -- add_index("merge_requests", ["merge_user_id"], {:name=>"index_merge_requests_on_merge_user_id", :where=>"(merge_user_id IS NOT NULL)", :using=>:btree}) - -> 0.0029s + -> 0.0051s -- add_index("merge_requests", ["milestone_id"], {:name=>"index_merge_requests_on_milestone_id", :using=>:btree}) - -> 0.0030s + -> 0.0055s -- add_index("merge_requests", ["source_branch"], {:name=>"index_merge_requests_on_source_branch", :using=>:btree}) - -> 0.0026s + -> 0.0055s -- add_index("merge_requests", ["source_project_id", "source_branch"], {:name=>"index_merge_requests_on_source_project_and_branch_state_opened", :where=>"((state)::text = 'opened'::text)", :using=>:btree}) - -> 0.0029s + -> 0.0061s -- add_index("merge_requests", ["source_project_id", "source_branch"], {:name=>"index_merge_requests_on_source_project_id_and_source_branch", :using=>:btree}) - -> 0.0031s + -> 0.0068s -- add_index("merge_requests", ["target_branch"], {:name=>"index_merge_requests_on_target_branch", :using=>:btree}) - -> 0.0028s + -> 0.0054s -- add_index("merge_requests", ["target_project_id", "iid"], {:name=>"index_merge_requests_on_target_project_id_and_iid", :unique=>true, :using=>:btree}) - -> 0.0027s + -> 0.0061s -- add_index("merge_requests", ["target_project_id", "merge_commit_sha", "id"], {:name=>"index_merge_requests_on_tp_id_and_merge_commit_sha_and_id", :using=>:btree}) - -> 0.0029s + -> 0.0077s -- add_index("merge_requests", ["title"], {:name=>"index_merge_requests_on_title", :using=>:btree}) - -> 0.0026s + -> 0.0105s -- add_index("merge_requests", ["title"], {:name=>"index_merge_requests_on_title_trigram", :using=>:gin, :opclasses=>{"title"=>"gin_trgm_ops"}}) - -> 0.0020s + -> 0.0008s -- add_index("merge_requests", ["updated_by_id"], {:name=>"index_merge_requests_on_updated_by_id", :where=>"(updated_by_id IS NOT NULL)", :using=>:btree}) - -> 0.0029s + -> 0.0074s -- create_table("merge_requests_closing_issues", {:force=>:cascade}) - -> 0.0031s + -> 0.0125s -- add_index("merge_requests_closing_issues", ["issue_id"], {:name=>"index_merge_requests_closing_issues_on_issue_id", :using=>:btree}) - -> 0.0026s + -> 0.0064s -- add_index("merge_requests_closing_issues", ["merge_request_id"], {:name=>"index_merge_requests_closing_issues_on_merge_request_id", :using=>:btree}) - -> 0.0028s + -> 0.0061s -- create_table("milestones", {:force=>:cascade}) - -> 0.0044s + -> 0.0064s -- add_index("milestones", ["description"], {:name=>"index_milestones_on_description_trigram", :using=>:gin, :opclasses=>{"description"=>"gin_trgm_ops"}}) - -> 0.0022s + -> 0.0007s -- add_index("milestones", ["due_date"], {:name=>"index_milestones_on_due_date", :using=>:btree}) - -> 0.0033s + -> 0.0053s -- add_index("milestones", ["group_id"], {:name=>"index_milestones_on_group_id", :using=>:btree}) - -> 0.0028s + -> 0.0068s -- add_index("milestones", ["project_id", "iid"], {:name=>"index_milestones_on_project_id_and_iid", :unique=>true, :using=>:btree}) - -> 0.0028s + -> 0.0057s -- add_index("milestones", ["title"], {:name=>"index_milestones_on_title", :using=>:btree}) - -> 0.0026s + -> 0.0051s -- add_index("milestones", ["title"], {:name=>"index_milestones_on_title_trigram", :using=>:gin, :opclasses=>{"title"=>"gin_trgm_ops"}}) - -> 0.0021s + -> 0.0006s -- create_table("namespaces", {:force=>:cascade}) - -> 0.0068s + -> 0.0083s -- add_index("namespaces", ["created_at"], {:name=>"index_namespaces_on_created_at", :using=>:btree}) - -> 0.0030s + -> 0.0061s -- add_index("namespaces", ["name", "parent_id"], {:name=>"index_namespaces_on_name_and_parent_id", :unique=>true, :using=>:btree}) - -> 0.0030s + -> 0.0062s -- add_index("namespaces", ["name"], {:name=>"index_namespaces_on_name_trigram", :using=>:gin, :opclasses=>{"name"=>"gin_trgm_ops"}}) - -> 0.0020s + -> 0.0006s -- add_index("namespaces", ["owner_id"], {:name=>"index_namespaces_on_owner_id", :using=>:btree}) - -> 0.0028s + -> 0.0061s -- add_index("namespaces", ["parent_id", "id"], {:name=>"index_namespaces_on_parent_id_and_id", :unique=>true, :using=>:btree}) - -> 0.0032s + -> 0.0072s -- add_index("namespaces", ["path"], {:name=>"index_namespaces_on_path", :using=>:btree}) - -> 0.0031s + -> 0.0056s -- add_index("namespaces", ["path"], {:name=>"index_namespaces_on_path_trigram", :using=>:gin, :opclasses=>{"path"=>"gin_trgm_ops"}}) - -> 0.0019s + -> 0.0006s -- add_index("namespaces", ["require_two_factor_authentication"], {:name=>"index_namespaces_on_require_two_factor_authentication", :using=>:btree}) - -> 0.0029s + -> 0.0061s -- add_index("namespaces", ["type"], {:name=>"index_namespaces_on_type", :using=>:btree}) - -> 0.0032s --- create_table("notes", {:force=>:cascade}) -> 0.0055s +-- create_table("notes", {:force=>:cascade}) + -> 0.0092s -- add_index("notes", ["author_id"], {:name=>"index_notes_on_author_id", :using=>:btree}) - -> 0.0029s + -> 0.0072s -- add_index("notes", ["commit_id"], {:name=>"index_notes_on_commit_id", :using=>:btree}) - -> 0.0028s + -> 0.0057s -- add_index("notes", ["created_at"], {:name=>"index_notes_on_created_at", :using=>:btree}) - -> 0.0029s + -> 0.0065s -- add_index("notes", ["discussion_id"], {:name=>"index_notes_on_discussion_id", :using=>:btree}) - -> 0.0029s + -> 0.0064s -- add_index("notes", ["line_code"], {:name=>"index_notes_on_line_code", :using=>:btree}) - -> 0.0029s + -> 0.0078s -- add_index("notes", ["note"], {:name=>"index_notes_on_note_trigram", :using=>:gin, :opclasses=>{"note"=>"gin_trgm_ops"}}) - -> 0.0024s + -> 0.0006s -- add_index("notes", ["noteable_id", "noteable_type"], {:name=>"index_notes_on_noteable_id_and_noteable_type", :using=>:btree}) - -> 0.0029s + -> 0.0102s -- add_index("notes", ["noteable_type"], {:name=>"index_notes_on_noteable_type", :using=>:btree}) - -> 0.0030s + -> 0.0092s -- add_index("notes", ["project_id", "noteable_type"], {:name=>"index_notes_on_project_id_and_noteable_type", :using=>:btree}) - -> 0.0027s + -> 0.0082s -- add_index("notes", ["updated_at"], {:name=>"index_notes_on_updated_at", :using=>:btree}) - -> 0.0026s + -> 0.0062s -- create_table("notification_settings", {:force=>:cascade}) - -> 0.0053s + -> 0.0088s -- add_index("notification_settings", ["source_id", "source_type"], {:name=>"index_notification_settings_on_source_id_and_source_type", :using=>:btree}) - -> 0.0028s + -> 0.0405s -- add_index("notification_settings", ["user_id", "source_id", "source_type"], {:name=>"index_notifications_on_user_id_and_source_id_and_source_type", :unique=>true, :using=>:btree}) - -> 0.0030s + -> 0.0677s -- add_index("notification_settings", ["user_id"], {:name=>"index_notification_settings_on_user_id", :using=>:btree}) - -> 0.0031s + -> 0.1199s -- create_table("oauth_access_grants", {:force=>:cascade}) - -> 0.0042s + -> 0.0140s -- add_index("oauth_access_grants", ["token"], {:name=>"index_oauth_access_grants_on_token", :unique=>true, :using=>:btree}) - -> 0.0031s + -> 0.0076s -- create_table("oauth_access_tokens", {:force=>:cascade}) - -> 0.0051s + -> 0.0167s -- add_index("oauth_access_tokens", ["refresh_token"], {:name=>"index_oauth_access_tokens_on_refresh_token", :unique=>true, :using=>:btree}) - -> 0.0030s + -> 0.0098s -- add_index("oauth_access_tokens", ["resource_owner_id"], {:name=>"index_oauth_access_tokens_on_resource_owner_id", :using=>:btree}) - -> 0.0025s + -> 0.0074s -- add_index("oauth_access_tokens", ["token"], {:name=>"index_oauth_access_tokens_on_token", :unique=>true, :using=>:btree}) - -> 0.0026s + -> 0.0078s -- create_table("oauth_applications", {:force=>:cascade}) - -> 0.0049s + -> 0.0112s -- add_index("oauth_applications", ["owner_id", "owner_type"], {:name=>"index_oauth_applications_on_owner_id_and_owner_type", :using=>:btree}) - -> 0.0030s + -> 0.0079s -- add_index("oauth_applications", ["uid"], {:name=>"index_oauth_applications_on_uid", :unique=>true, :using=>:btree}) - -> 0.0032s + -> 0.0114s -- create_table("oauth_openid_requests", {:force=>:cascade}) - -> 0.0048s + -> 0.0102s -- create_table("pages_domains", {:force=>:cascade}) - -> 0.0052s + -> 0.0102s -- add_index("pages_domains", ["domain"], {:name=>"index_pages_domains_on_domain", :unique=>true, :using=>:btree}) - -> 0.0027s + -> 0.0067s +-- add_index("pages_domains", ["project_id", "enabled_until"], {:name=>"index_pages_domains_on_project_id_and_enabled_until", :using=>:btree}) + -> 0.0114s -- add_index("pages_domains", ["project_id"], {:name=>"index_pages_domains_on_project_id", :using=>:btree}) - -> 0.0030s + -> 0.0066s +-- add_index("pages_domains", ["verified_at", "enabled_until"], {:name=>"index_pages_domains_on_verified_at_and_enabled_until", :using=>:btree}) + -> 0.0073s +-- add_index("pages_domains", ["verified_at"], {:name=>"index_pages_domains_on_verified_at", :using=>:btree}) + -> 0.0063s -- create_table("personal_access_tokens", {:force=>:cascade}) - -> 0.0056s + -> 0.0084s -- add_index("personal_access_tokens", ["token"], {:name=>"index_personal_access_tokens_on_token", :unique=>true, :using=>:btree}) - -> 0.0032s + -> 0.0075s -- add_index("personal_access_tokens", ["user_id"], {:name=>"index_personal_access_tokens_on_user_id", :using=>:btree}) - -> 0.0028s + -> 0.0066s -- create_table("project_authorizations", {:id=>false, :force=>:cascade}) - -> 0.0018s + -> 0.0087s -- add_index("project_authorizations", ["project_id"], {:name=>"index_project_authorizations_on_project_id", :using=>:btree}) - -> 0.0033s + -> 0.0056s -- add_index("project_authorizations", ["user_id", "project_id", "access_level"], {:name=>"index_project_authorizations_on_user_id_project_id_access_level", :unique=>true, :using=>:btree}) - -> 0.0029s + -> 0.0075s -- create_table("project_auto_devops", {:force=>:cascade}) - -> 0.0043s + -> 0.0079s -- add_index("project_auto_devops", ["project_id"], {:name=>"index_project_auto_devops_on_project_id", :unique=>true, :using=>:btree}) - -> 0.0029s + -> 0.0067s -- create_table("project_custom_attributes", {:force=>:cascade}) - -> 0.0047s + -> 0.0071s -- add_index("project_custom_attributes", ["key", "value"], {:name=>"index_project_custom_attributes_on_key_and_value", :using=>:btree}) - -> 0.0030s + -> 0.0060s -- add_index("project_custom_attributes", ["project_id", "key"], {:name=>"index_project_custom_attributes_on_project_id_and_key", :unique=>true, :using=>:btree}) - -> 0.0028s + -> 0.0069s -- create_table("project_features", {:force=>:cascade}) - -> 0.0038s + -> 0.0100s -- add_index("project_features", ["project_id"], {:name=>"index_project_features_on_project_id", :using=>:btree}) - -> 0.0029s + -> 0.0069s -- create_table("project_group_links", {:force=>:cascade}) - -> 0.0036s + -> 0.0117s -- add_index("project_group_links", ["group_id"], {:name=>"index_project_group_links_on_group_id", :using=>:btree}) - -> 0.0028s + -> 0.0121s -- add_index("project_group_links", ["project_id"], {:name=>"index_project_group_links_on_project_id", :using=>:btree}) - -> 0.0030s + -> 0.0076s -- create_table("project_import_data", {:force=>:cascade}) - -> 0.0049s + -> 0.0084s -- add_index("project_import_data", ["project_id"], {:name=>"index_project_import_data_on_project_id", :using=>:btree}) - -> 0.0027s + -> 0.0058s -- create_table("project_statistics", {:force=>:cascade}) - -> 0.0046s + -> 0.0075s -- add_index("project_statistics", ["namespace_id"], {:name=>"index_project_statistics_on_namespace_id", :using=>:btree}) - -> 0.0027s + -> 0.0054s -- add_index("project_statistics", ["project_id"], {:name=>"index_project_statistics_on_project_id", :unique=>true, :using=>:btree}) - -> 0.0029s + -> 0.0054s -- create_table("projects", {:force=>:cascade}) - -> 0.0090s + -> 0.0077s -- add_index("projects", ["ci_id"], {:name=>"index_projects_on_ci_id", :using=>:btree}) - -> 0.0033s + -> 0.0070s -- add_index("projects", ["created_at"], {:name=>"index_projects_on_created_at", :using=>:btree}) - -> 0.0030s + -> 0.0060s -- add_index("projects", ["creator_id"], {:name=>"index_projects_on_creator_id", :using=>:btree}) - -> 0.0028s + -> 0.0071s -- add_index("projects", ["description"], {:name=>"index_projects_on_description_trigram", :using=>:gin, :opclasses=>{"description"=>"gin_trgm_ops"}}) - -> 0.0022s + -> 0.0009s +-- add_index("projects", ["id"], {:name=>"index_projects_on_id_partial_for_visibility", :unique=>true, :where=>"(visibility_level = ANY (ARRAY[10, 20]))", :using=>:btree}) + -> 0.0062s -- add_index("projects", ["last_activity_at"], {:name=>"index_projects_on_last_activity_at", :using=>:btree}) - -> 0.0032s + -> 0.0060s -- add_index("projects", ["last_repository_check_failed"], {:name=>"index_projects_on_last_repository_check_failed", :using=>:btree}) - -> 0.0030s + -> 0.0063s -- add_index("projects", ["last_repository_updated_at"], {:name=>"index_projects_on_last_repository_updated_at", :using=>:btree}) - -> 0.0031s + -> 0.0633s -- add_index("projects", ["name"], {:name=>"index_projects_on_name_trigram", :using=>:gin, :opclasses=>{"name"=>"gin_trgm_ops"}}) - -> 0.0022s + -> 0.0012s -- add_index("projects", ["namespace_id"], {:name=>"index_projects_on_namespace_id", :using=>:btree}) - -> 0.0028s + -> 0.0167s -- add_index("projects", ["path"], {:name=>"index_projects_on_path", :using=>:btree}) - -> 0.0028s + -> 0.0222s -- add_index("projects", ["path"], {:name=>"index_projects_on_path_trigram", :using=>:gin, :opclasses=>{"path"=>"gin_trgm_ops"}}) - -> 0.0023s + -> 0.0010s -- add_index("projects", ["pending_delete"], {:name=>"index_projects_on_pending_delete", :using=>:btree}) - -> 0.0029s + -> 0.0229s -- add_index("projects", ["repository_storage"], {:name=>"index_projects_on_repository_storage", :using=>:btree}) - -> 0.0026s + -> 0.0173s -- add_index("projects", ["runners_token"], {:name=>"index_projects_on_runners_token", :using=>:btree}) - -> 0.0034s + -> 0.0167s -- add_index("projects", ["star_count"], {:name=>"index_projects_on_star_count", :using=>:btree}) - -> 0.0028s + -> 0.0491s -- add_index("projects", ["visibility_level"], {:name=>"index_projects_on_visibility_level", :using=>:btree}) - -> 0.0027s + -> 0.0598s -- create_table("protected_branch_merge_access_levels", {:force=>:cascade}) - -> 0.0042s + -> 0.1964s -- add_index("protected_branch_merge_access_levels", ["protected_branch_id"], {:name=>"index_protected_branch_merge_access", :using=>:btree}) - -> 0.0029s + -> 0.1112s -- create_table("protected_branch_push_access_levels", {:force=>:cascade}) - -> 0.0037s + -> 0.0195s -- add_index("protected_branch_push_access_levels", ["protected_branch_id"], {:name=>"index_protected_branch_push_access", :using=>:btree}) - -> 0.0030s + -> 0.0069s -- create_table("protected_branches", {:force=>:cascade}) - -> 0.0048s + -> 0.0113s -- add_index("protected_branches", ["project_id"], {:name=>"index_protected_branches_on_project_id", :using=>:btree}) - -> 0.0030s + -> 0.0071s -- create_table("protected_tag_create_access_levels", {:force=>:cascade}) - -> 0.0037s + -> 0.0180s -- add_index("protected_tag_create_access_levels", ["protected_tag_id"], {:name=>"index_protected_tag_create_access", :using=>:btree}) - -> 0.0029s + -> 0.0068s -- add_index("protected_tag_create_access_levels", ["user_id"], {:name=>"index_protected_tag_create_access_levels_on_user_id", :using=>:btree}) - -> 0.0029s + -> 0.0077s -- create_table("protected_tags", {:force=>:cascade}) - -> 0.0051s + -> 0.0115s -- add_index("protected_tags", ["project_id"], {:name=>"index_protected_tags_on_project_id", :using=>:btree}) - -> 0.0034s + -> 0.0081s -- create_table("push_event_payloads", {:id=>false, :force=>:cascade}) - -> 0.0030s + -> 0.0108s -- add_index("push_event_payloads", ["event_id"], {:name=>"index_push_event_payloads_on_event_id", :unique=>true, :using=>:btree}) - -> 0.0029s + -> 0.0189s -- create_table("redirect_routes", {:force=>:cascade}) - -> 0.0049s + -> 0.0106s -- add_index("redirect_routes", ["path"], {:name=>"index_redirect_routes_on_path", :unique=>true, :using=>:btree}) - -> 0.0031s + -> 0.0075s -- add_index("redirect_routes", ["source_type", "source_id"], {:name=>"index_redirect_routes_on_source_type_and_source_id", :using=>:btree}) - -> 0.0034s + -> 0.0099s -- create_table("releases", {:force=>:cascade}) - -> 0.0043s + -> 0.0126s -- add_index("releases", ["project_id", "tag"], {:name=>"index_releases_on_project_id_and_tag", :using=>:btree}) - -> 0.0032s + -> 0.0066s -- add_index("releases", ["project_id"], {:name=>"index_releases_on_project_id", :using=>:btree}) - -> 0.0030s + -> 0.0060s -- create_table("routes", {:force=>:cascade}) - -> 0.0055s + -> 0.0091s -- add_index("routes", ["path"], {:name=>"index_routes_on_path", :unique=>true, :using=>:btree}) - -> 0.0028s + -> 0.0073s -- add_index("routes", ["path"], {:name=>"index_routes_on_path_text_pattern_ops", :using=>:btree, :opclasses=>{"path"=>"varchar_pattern_ops"}}) - -> 0.0026s + -> 0.0004s -- add_index("routes", ["source_type", "source_id"], {:name=>"index_routes_on_source_type_and_source_id", :unique=>true, :using=>:btree}) - -> 0.0029s + -> 0.0111s -- create_table("sent_notifications", {:force=>:cascade}) - -> 0.0048s + -> 0.0093s -- add_index("sent_notifications", ["reply_key"], {:name=>"index_sent_notifications_on_reply_key", :unique=>true, :using=>:btree}) - -> 0.0029s + -> 0.0060s -- create_table("services", {:force=>:cascade}) - -> 0.0091s + -> 0.0099s -- add_index("services", ["project_id"], {:name=>"index_services_on_project_id", :using=>:btree}) - -> 0.0028s + -> 0.0068s -- add_index("services", ["template"], {:name=>"index_services_on_template", :using=>:btree}) - -> 0.0031s + -> 0.0076s -- create_table("snippets", {:force=>:cascade}) - -> 0.0050s + -> 0.0073s -- add_index("snippets", ["author_id"], {:name=>"index_snippets_on_author_id", :using=>:btree}) - -> 0.0030s + -> 0.0055s -- add_index("snippets", ["file_name"], {:name=>"index_snippets_on_file_name_trigram", :using=>:gin, :opclasses=>{"file_name"=>"gin_trgm_ops"}}) - -> 0.0020s + -> 0.0006s -- add_index("snippets", ["project_id"], {:name=>"index_snippets_on_project_id", :using=>:btree}) - -> 0.0028s + -> 0.0058s -- add_index("snippets", ["title"], {:name=>"index_snippets_on_title_trigram", :using=>:gin, :opclasses=>{"title"=>"gin_trgm_ops"}}) - -> 0.0020s + -> 0.0005s -- add_index("snippets", ["updated_at"], {:name=>"index_snippets_on_updated_at", :using=>:btree}) - -> 0.0026s + -> 0.0100s -- add_index("snippets", ["visibility_level"], {:name=>"index_snippets_on_visibility_level", :using=>:btree}) - -> 0.0026s + -> 0.0091s -- create_table("spam_logs", {:force=>:cascade}) - -> 0.0048s + -> 0.0129s -- create_table("subscriptions", {:force=>:cascade}) - -> 0.0041s + -> 0.0094s -- add_index("subscriptions", ["subscribable_id", "subscribable_type", "user_id", "project_id"], {:name=>"index_subscriptions_on_subscribable_and_user_id_and_project_id", :unique=>true, :using=>:btree}) - -> 0.0030s + -> 0.0107s -- create_table("system_note_metadata", {:force=>:cascade}) - -> 0.0040s + -> 0.0138s -- add_index("system_note_metadata", ["note_id"], {:name=>"index_system_note_metadata_on_note_id", :unique=>true, :using=>:btree}) - -> 0.0029s + -> 0.0060s -- create_table("taggings", {:force=>:cascade}) - -> 0.0047s + -> 0.0121s -- add_index("taggings", ["tag_id", "taggable_id", "taggable_type", "context", "tagger_id", "tagger_type"], {:name=>"taggings_idx", :unique=>true, :using=>:btree}) - -> 0.0030s + -> 0.0078s +-- add_index("taggings", ["tag_id"], {:name=>"index_taggings_on_tag_id", :using=>:btree}) + -> 0.0058s -- add_index("taggings", ["taggable_id", "taggable_type", "context"], {:name=>"index_taggings_on_taggable_id_and_taggable_type_and_context", :using=>:btree}) - -> 0.0025s + -> 0.0059s +-- add_index("taggings", ["taggable_id", "taggable_type"], {:name=>"index_taggings_on_taggable_id_and_taggable_type", :using=>:btree}) + -> 0.0056s -- create_table("tags", {:force=>:cascade}) - -> 0.0044s + -> 0.0063s -- add_index("tags", ["name"], {:name=>"index_tags_on_name", :unique=>true, :using=>:btree}) - -> 0.0026s + -> 0.0055s -- create_table("timelogs", {:force=>:cascade}) - -> 0.0033s + -> 0.0061s -- add_index("timelogs", ["issue_id"], {:name=>"index_timelogs_on_issue_id", :using=>:btree}) - -> 0.0027s + -> 0.0063s -- add_index("timelogs", ["merge_request_id"], {:name=>"index_timelogs_on_merge_request_id", :using=>:btree}) - -> 0.0033s + -> 0.0052s -- add_index("timelogs", ["user_id"], {:name=>"index_timelogs_on_user_id", :using=>:btree}) - -> 0.0028s + -> 0.0055s -- create_table("todos", {:force=>:cascade}) - -> 0.0043s + -> 0.0065s -- add_index("todos", ["author_id"], {:name=>"index_todos_on_author_id", :using=>:btree}) - -> 0.0027s + -> 0.0081s -- add_index("todos", ["commit_id"], {:name=>"index_todos_on_commit_id", :using=>:btree}) - -> 0.0028s + -> 0.0085s -- add_index("todos", ["note_id"], {:name=>"index_todos_on_note_id", :using=>:btree}) - -> 0.0028s + -> 0.0083s -- add_index("todos", ["project_id"], {:name=>"index_todos_on_project_id", :using=>:btree}) - -> 0.0027s + -> 0.0094s -- add_index("todos", ["target_type", "target_id"], {:name=>"index_todos_on_target_type_and_target_id", :using=>:btree}) - -> 0.0028s + -> 0.0070s +-- add_index("todos", ["user_id", "id"], {:name=>"index_todos_on_user_id_and_id_done", :where=>"((state)::text = 'done'::text)", :using=>:btree}) + -> 0.0099s +-- add_index("todos", ["user_id", "id"], {:name=>"index_todos_on_user_id_and_id_pending", :where=>"((state)::text = 'pending'::text)", :using=>:btree}) + -> 0.0080s -- add_index("todos", ["user_id"], {:name=>"index_todos_on_user_id", :using=>:btree}) - -> 0.0026s + -> 0.0061s -- create_table("trending_projects", {:force=>:cascade}) - -> 0.0030s --- add_index("trending_projects", ["project_id"], {:name=>"index_trending_projects_on_project_id", :using=>:btree}) - -> 0.0027s + -> 0.0081s +-- add_index("trending_projects", ["project_id"], {:name=>"index_trending_projects_on_project_id", :unique=>true, :using=>:btree}) + -> 0.0046s -- create_table("u2f_registrations", {:force=>:cascade}) - -> 0.0048s + -> 0.0063s -- add_index("u2f_registrations", ["key_handle"], {:name=>"index_u2f_registrations_on_key_handle", :using=>:btree}) - -> 0.0029s + -> 0.0052s -- add_index("u2f_registrations", ["user_id"], {:name=>"index_u2f_registrations_on_user_id", :using=>:btree}) - -> 0.0028s + -> 0.0072s -- create_table("uploads", {:force=>:cascade}) - -> 0.0044s + -> 0.0067s -- add_index("uploads", ["checksum"], {:name=>"index_uploads_on_checksum", :using=>:btree}) - -> 0.0028s + -> 0.0046s -- add_index("uploads", ["model_id", "model_type"], {:name=>"index_uploads_on_model_id_and_model_type", :using=>:btree}) - -> 0.0027s --- add_index("uploads", ["path"], {:name=>"index_uploads_on_path", :using=>:btree}) - -> 0.0028s + -> 0.0049s +-- add_index("uploads", ["uploader", "path"], {:name=>"index_uploads_on_uploader_and_path", :using=>:btree}) + -> 0.0052s -- create_table("user_agent_details", {:force=>:cascade}) - -> 0.0051s + -> 0.0059s -- add_index("user_agent_details", ["subject_id", "subject_type"], {:name=>"index_user_agent_details_on_subject_id_and_subject_type", :using=>:btree}) - -> 0.0028s + -> 0.0052s +-- create_table("user_callouts", {:force=>:cascade}) + -> 0.0059s +-- add_index("user_callouts", ["user_id", "feature_name"], {:name=>"index_user_callouts_on_user_id_and_feature_name", :unique=>true, :using=>:btree}) + -> 0.0094s +-- add_index("user_callouts", ["user_id"], {:name=>"index_user_callouts_on_user_id", :using=>:btree}) + -> 0.0064s -- create_table("user_custom_attributes", {:force=>:cascade}) - -> 0.0044s + -> 0.0086s -- add_index("user_custom_attributes", ["key", "value"], {:name=>"index_user_custom_attributes_on_key_and_value", :using=>:btree}) - -> 0.0027s + -> 0.0080s -- add_index("user_custom_attributes", ["user_id", "key"], {:name=>"index_user_custom_attributes_on_user_id_and_key", :unique=>true, :using=>:btree}) - -> 0.0026s --- create_table("user_synced_attributes_metadata", {:force=>:cascade}) + -> 0.0066s +-- create_table("user_interacted_projects", {:id=>false, :force=>:cascade}) + -> 0.0108s +-- add_index("user_interacted_projects", ["project_id", "user_id"], {:name=>"index_user_interacted_projects_on_project_id_and_user_id", :unique=>true, :using=>:btree}) + -> 0.0114s +-- add_index("user_interacted_projects", ["user_id"], {:name=>"index_user_interacted_projects_on_user_id", :using=>:btree}) -> 0.0056s +-- create_table("user_synced_attributes_metadata", {:force=>:cascade}) + -> 0.0115s -- add_index("user_synced_attributes_metadata", ["user_id"], {:name=>"index_user_synced_attributes_metadata_on_user_id", :unique=>true, :using=>:btree}) - -> 0.0027s + -> 0.0054s -- create_table("users", {:force=>:cascade}) - -> 0.0134s + -> 0.0111s -- add_index("users", ["admin"], {:name=>"index_users_on_admin", :using=>:btree}) - -> 0.0030s + -> 0.0065s -- add_index("users", ["confirmation_token"], {:name=>"index_users_on_confirmation_token", :unique=>true, :using=>:btree}) - -> 0.0029s + -> 0.0065s -- add_index("users", ["created_at"], {:name=>"index_users_on_created_at", :using=>:btree}) - -> 0.0034s + -> 0.0068s -- add_index("users", ["email"], {:name=>"index_users_on_email", :unique=>true, :using=>:btree}) - -> 0.0030s + -> 0.0066s -- add_index("users", ["email"], {:name=>"index_users_on_email_trigram", :using=>:gin, :opclasses=>{"email"=>"gin_trgm_ops"}}) - -> 0.0431s + -> 0.0011s -- add_index("users", ["ghost"], {:name=>"index_users_on_ghost", :using=>:btree}) - -> 0.0051s + -> 0.0063s -- add_index("users", ["incoming_email_token"], {:name=>"index_users_on_incoming_email_token", :using=>:btree}) - -> 0.0044s + -> 0.0057s -- add_index("users", ["name"], {:name=>"index_users_on_name", :using=>:btree}) - -> 0.0044s + -> 0.0056s -- add_index("users", ["name"], {:name=>"index_users_on_name_trigram", :using=>:gin, :opclasses=>{"name"=>"gin_trgm_ops"}}) - -> 0.0034s + -> 0.0011s -- add_index("users", ["reset_password_token"], {:name=>"index_users_on_reset_password_token", :unique=>true, :using=>:btree}) - -> 0.0044s + -> 0.0055s -- add_index("users", ["rss_token"], {:name=>"index_users_on_rss_token", :using=>:btree}) - -> 0.0046s + -> 0.0068s -- add_index("users", ["state"], {:name=>"index_users_on_state", :using=>:btree}) - -> 0.0040s + -> 0.0067s -- add_index("users", ["username"], {:name=>"index_users_on_username", :using=>:btree}) - -> 0.0046s + -> 0.0072s -- add_index("users", ["username"], {:name=>"index_users_on_username_trigram", :using=>:gin, :opclasses=>{"username"=>"gin_trgm_ops"}}) - -> 0.0044s + -> 0.0012s -- create_table("users_star_projects", {:force=>:cascade}) - -> 0.0055s + -> 0.0100s -- add_index("users_star_projects", ["project_id"], {:name=>"index_users_star_projects_on_project_id", :using=>:btree}) - -> 0.0037s + -> 0.0061s -- add_index("users_star_projects", ["user_id", "project_id"], {:name=>"index_users_star_projects_on_user_id_and_project_id", :unique=>true, :using=>:btree}) - -> 0.0044s + -> 0.0068s -- create_table("web_hook_logs", {:force=>:cascade}) - -> 0.0060s + -> 0.0097s -- add_index("web_hook_logs", ["web_hook_id"], {:name=>"index_web_hook_logs_on_web_hook_id", :using=>:btree}) - -> 0.0034s + -> 0.0057s -- create_table("web_hooks", {:force=>:cascade}) - -> 0.0120s + -> 0.0080s -- add_index("web_hooks", ["project_id"], {:name=>"index_web_hooks_on_project_id", :using=>:btree}) - -> 0.0038s + -> 0.0062s -- add_index("web_hooks", ["type"], {:name=>"index_web_hooks_on_type", :using=>:btree}) - -> 0.0036s + -> 0.0065s +-- add_foreign_key("badges", "namespaces", {:column=>"group_id", :on_delete=>:cascade}) + -> 0.0158s +-- add_foreign_key("badges", "projects", {:on_delete=>:cascade}) + -> 0.0140s +-- add_foreign_key("boards", "namespaces", {:column=>"group_id", :on_delete=>:cascade}) + -> 0.0138s -- add_foreign_key("boards", "projects", {:name=>"fk_f15266b5f9", :on_delete=>:cascade}) - -> 0.0030s + -> 0.0118s -- add_foreign_key("chat_teams", "namespaces", {:on_delete=>:cascade}) - -> 0.0021s + -> 0.0130s -- add_foreign_key("ci_build_trace_section_names", "projects", {:on_delete=>:cascade}) - -> 0.0022s + -> 0.0131s -- add_foreign_key("ci_build_trace_sections", "ci_build_trace_section_names", {:column=>"section_name_id", :name=>"fk_264e112c66", :on_delete=>:cascade}) - -> 0.0018s + -> 0.0210s -- add_foreign_key("ci_build_trace_sections", "ci_builds", {:column=>"build_id", :name=>"fk_4ebe41f502", :on_delete=>:cascade}) - -> 0.0024s + -> 0.0823s -- add_foreign_key("ci_build_trace_sections", "projects", {:on_delete=>:cascade}) - -> 0.0019s + -> 0.0942s -- add_foreign_key("ci_builds", "ci_pipelines", {:column=>"auto_canceled_by_id", :name=>"fk_a2141b1522", :on_delete=>:nullify}) - -> 0.0023s + -> 0.1346s -- add_foreign_key("ci_builds", "ci_stages", {:column=>"stage_id", :name=>"fk_3a9eaa254d", :on_delete=>:cascade}) - -> 0.0020s + -> 0.0506s -- add_foreign_key("ci_builds", "projects", {:name=>"fk_befce0568a", :on_delete=>:cascade}) - -> 0.0024s + -> 0.0403s +-- add_foreign_key("ci_builds_metadata", "ci_builds", {:column=>"build_id", :on_delete=>:cascade}) + -> 0.0160s +-- add_foreign_key("ci_builds_metadata", "projects", {:on_delete=>:cascade}) + -> 0.0165s -- add_foreign_key("ci_group_variables", "namespaces", {:column=>"group_id", :name=>"fk_33ae4d58d8", :on_delete=>:cascade}) - -> 0.0024s + -> 0.0153s -- add_foreign_key("ci_job_artifacts", "ci_builds", {:column=>"job_id", :on_delete=>:cascade}) - -> 0.0019s + -> 0.0160s -- add_foreign_key("ci_job_artifacts", "projects", {:on_delete=>:cascade}) - -> 0.0020s + -> 0.0278s -- add_foreign_key("ci_pipeline_schedule_variables", "ci_pipeline_schedules", {:column=>"pipeline_schedule_id", :name=>"fk_41c35fda51", :on_delete=>:cascade}) - -> 0.0027s + -> 0.0193s -- add_foreign_key("ci_pipeline_schedules", "projects", {:name=>"fk_8ead60fcc4", :on_delete=>:cascade}) - -> 0.0022s + -> 0.0184s -- add_foreign_key("ci_pipeline_schedules", "users", {:column=>"owner_id", :name=>"fk_9ea99f58d2", :on_delete=>:nullify}) - -> 0.0025s + -> 0.0158s -- add_foreign_key("ci_pipeline_variables", "ci_pipelines", {:column=>"pipeline_id", :name=>"fk_f29c5f4380", :on_delete=>:cascade}) - -> 0.0018s + -> 0.0097s -- add_foreign_key("ci_pipelines", "ci_pipeline_schedules", {:column=>"pipeline_schedule_id", :name=>"fk_3d34ab2e06", :on_delete=>:nullify}) - -> 0.0019s + -> 0.0693s -- add_foreign_key("ci_pipelines", "ci_pipelines", {:column=>"auto_canceled_by_id", :name=>"fk_262d4c2d19", :on_delete=>:nullify}) - -> 0.0029s + -> 0.1599s -- add_foreign_key("ci_pipelines", "projects", {:name=>"fk_86635dbd80", :on_delete=>:cascade}) - -> 0.0023s + -> 0.1505s -- add_foreign_key("ci_runner_projects", "projects", {:name=>"fk_4478a6f1e4", :on_delete=>:cascade}) - -> 0.0036s + -> 0.0984s -- add_foreign_key("ci_stages", "ci_pipelines", {:column=>"pipeline_id", :name=>"fk_fb57e6cc56", :on_delete=>:cascade}) - -> 0.0017s + -> 0.1152s -- add_foreign_key("ci_stages", "projects", {:name=>"fk_2360681d1d", :on_delete=>:cascade}) - -> 0.0020s + -> 0.1062s -- add_foreign_key("ci_trigger_requests", "ci_triggers", {:column=>"trigger_id", :name=>"fk_b8ec8b7245", :on_delete=>:cascade}) - -> 0.0016s + -> 0.0455s -- add_foreign_key("ci_triggers", "projects", {:name=>"fk_e3e63f966e", :on_delete=>:cascade}) - -> 0.0021s + -> 0.0725s -- add_foreign_key("ci_triggers", "users", {:column=>"owner_id", :name=>"fk_e8e10d1964", :on_delete=>:cascade}) - -> 0.0019s + -> 0.0774s -- add_foreign_key("ci_variables", "projects", {:name=>"fk_ada5eb64b3", :on_delete=>:cascade}) - -> 0.0021s + -> 0.0626s -- add_foreign_key("cluster_platforms_kubernetes", "clusters", {:on_delete=>:cascade}) - -> 0.0019s + -> 0.0529s -- add_foreign_key("cluster_projects", "clusters", {:on_delete=>:cascade}) - -> 0.0018s + -> 0.0678s -- add_foreign_key("cluster_projects", "projects", {:on_delete=>:cascade}) - -> 0.0020s + -> 0.0391s -- add_foreign_key("cluster_providers_gcp", "clusters", {:on_delete=>:cascade}) - -> 0.0017s + -> 0.0328s -- add_foreign_key("clusters", "users", {:on_delete=>:nullify}) - -> 0.0018s + -> 0.1266s -- add_foreign_key("clusters_applications_helm", "clusters", {:on_delete=>:cascade}) - -> 0.0019s + -> 0.0489s +-- add_foreign_key("clusters_applications_ingress", "clusters", {:name=>"fk_753a7b41c1", :on_delete=>:cascade}) + -> 0.0565s +-- add_foreign_key("clusters_applications_prometheus", "clusters", {:name=>"fk_557e773639", :on_delete=>:cascade}) + -> 0.0174s +-- add_foreign_key("clusters_applications_runners", "ci_runners", {:column=>"runner_id", :name=>"fk_02de2ded36", :on_delete=>:nullify}) + -> 0.0182s +-- add_foreign_key("clusters_applications_runners", "clusters", {:on_delete=>:cascade}) + -> 0.0208s -- add_foreign_key("container_repositories", "projects") - -> 0.0020s + -> 0.0186s -- add_foreign_key("deploy_keys_projects", "projects", {:name=>"fk_58a901ca7e", :on_delete=>:cascade}) - -> 0.0019s + -> 0.0140s -- add_foreign_key("deployments", "projects", {:name=>"fk_b9a3851b82", :on_delete=>:cascade}) - -> 0.0021s + -> 0.0328s -- add_foreign_key("environments", "projects", {:name=>"fk_d1c8c1da6a", :on_delete=>:cascade}) - -> 0.0019s + -> 0.0221s -- add_foreign_key("events", "projects", {:on_delete=>:cascade}) - -> 0.0020s + -> 0.0212s -- add_foreign_key("events", "users", {:column=>"author_id", :name=>"fk_edfd187b6f", :on_delete=>:cascade}) - -> 0.0020s + -> 0.0150s -- add_foreign_key("fork_network_members", "fork_networks", {:on_delete=>:cascade}) - -> 0.0016s + -> 0.0134s -- add_foreign_key("fork_network_members", "projects", {:column=>"forked_from_project_id", :name=>"fk_b01280dae4", :on_delete=>:nullify}) - -> 0.0019s + -> 0.0200s -- add_foreign_key("fork_network_members", "projects", {:on_delete=>:cascade}) - -> 0.0018s + -> 0.0162s -- add_foreign_key("fork_networks", "projects", {:column=>"root_project_id", :name=>"fk_e7b436b2b5", :on_delete=>:nullify}) - -> 0.0018s + -> 0.0138s -- add_foreign_key("forked_project_links", "projects", {:column=>"forked_to_project_id", :name=>"fk_434510edb0", :on_delete=>:cascade}) - -> 0.0018s + -> 0.0137s -- add_foreign_key("gcp_clusters", "projects", {:on_delete=>:cascade}) - -> 0.0029s + -> 0.0148s -- add_foreign_key("gcp_clusters", "services", {:on_delete=>:nullify}) - -> 0.0022s + -> 0.0216s -- add_foreign_key("gcp_clusters", "users", {:on_delete=>:nullify}) - -> 0.0019s + -> 0.0156s -- add_foreign_key("gpg_key_subkeys", "gpg_keys", {:on_delete=>:cascade}) - -> 0.0017s + -> 0.0139s -- add_foreign_key("gpg_keys", "users", {:on_delete=>:cascade}) - -> 0.0019s + -> 0.0142s -- add_foreign_key("gpg_signatures", "gpg_key_subkeys", {:on_delete=>:nullify}) - -> 0.0016s + -> 0.0216s -- add_foreign_key("gpg_signatures", "gpg_keys", {:on_delete=>:nullify}) - -> 0.0016s + -> 0.0211s -- add_foreign_key("gpg_signatures", "projects", {:on_delete=>:cascade}) - -> 0.0016s + -> 0.0215s -- add_foreign_key("group_custom_attributes", "namespaces", {:column=>"group_id", :on_delete=>:cascade}) - -> 0.0014s + -> 0.0174s +-- add_foreign_key("internal_ids", "projects", {:on_delete=>:cascade}) + -> 0.0143s -- add_foreign_key("issue_assignees", "issues", {:name=>"fk_b7d881734a", :on_delete=>:cascade}) - -> 0.0019s + -> 0.0139s -- add_foreign_key("issue_assignees", "users", {:name=>"fk_5e0c8d9154", :on_delete=>:cascade}) - -> 0.0015s + -> 0.0138s -- add_foreign_key("issue_metrics", "issues", {:on_delete=>:cascade}) - -> 0.0016s + -> 0.0106s -- add_foreign_key("issues", "issues", {:column=>"moved_to_id", :name=>"fk_a194299be1", :on_delete=>:nullify}) - -> 0.0014s + -> 0.0366s -- add_foreign_key("issues", "milestones", {:name=>"fk_96b1dd429c", :on_delete=>:nullify}) - -> 0.0016s + -> 0.0309s -- add_foreign_key("issues", "projects", {:name=>"fk_899c8f3231", :on_delete=>:cascade}) - -> 0.0016s + -> 0.0314s -- add_foreign_key("issues", "users", {:column=>"author_id", :name=>"fk_05f1e72feb", :on_delete=>:nullify}) - -> 0.0015s + -> 0.0504s +-- add_foreign_key("issues", "users", {:column=>"closed_by_id", :name=>"fk_c63cbf6c25", :on_delete=>:nullify}) + -> 0.0428s -- add_foreign_key("issues", "users", {:column=>"updated_by_id", :name=>"fk_ffed080f01", :on_delete=>:nullify}) - -> 0.0017s + -> 0.0333s -- add_foreign_key("label_priorities", "labels", {:on_delete=>:cascade}) - -> 0.0015s + -> 0.0143s -- add_foreign_key("label_priorities", "projects", {:on_delete=>:cascade}) - -> 0.0015s + -> 0.0160s -- add_foreign_key("labels", "namespaces", {:column=>"group_id", :on_delete=>:cascade}) - -> 0.0015s + -> 0.0176s -- add_foreign_key("labels", "projects", {:name=>"fk_7de4989a69", :on_delete=>:cascade}) - -> 0.0016s + -> 0.0216s +-- add_foreign_key("lfs_file_locks", "projects", {:on_delete=>:cascade}) + -> 0.0144s +-- add_foreign_key("lfs_file_locks", "users", {:on_delete=>:cascade}) + -> 0.0178s -- add_foreign_key("lists", "boards", {:name=>"fk_0d3f677137", :on_delete=>:cascade}) - -> 0.0015s + -> 0.0161s -- add_foreign_key("lists", "labels", {:name=>"fk_7a5553d60f", :on_delete=>:cascade}) - -> 0.0014s + -> 0.0137s -- add_foreign_key("members", "users", {:name=>"fk_2e88fb7ce9", :on_delete=>:cascade}) - -> 0.0016s + -> 0.0171s -- add_foreign_key("merge_request_diff_commits", "merge_request_diffs", {:on_delete=>:cascade}) - -> 0.0014s + -> 0.0143s -- add_foreign_key("merge_request_diff_files", "merge_request_diffs", {:on_delete=>:cascade}) - -> 0.0014s + -> 0.0106s -- add_foreign_key("merge_request_diffs", "merge_requests", {:name=>"fk_8483f3258f", :on_delete=>:cascade}) - -> 0.0019s + -> 0.0119s -- add_foreign_key("merge_request_metrics", "ci_pipelines", {:column=>"pipeline_id", :on_delete=>:cascade}) - -> 0.0017s + -> 0.0163s -- add_foreign_key("merge_request_metrics", "merge_requests", {:on_delete=>:cascade}) - -> 0.0016s + -> 0.0204s -- add_foreign_key("merge_request_metrics", "users", {:column=>"latest_closed_by_id", :name=>"fk_ae440388cc", :on_delete=>:nullify}) - -> 0.0015s + -> 0.0196s -- add_foreign_key("merge_request_metrics", "users", {:column=>"merged_by_id", :name=>"fk_7f28d925f3", :on_delete=>:nullify}) - -> 0.0015s + -> 0.0202s -- add_foreign_key("merge_requests", "ci_pipelines", {:column=>"head_pipeline_id", :name=>"fk_fd82eae0b9", :on_delete=>:nullify}) - -> 0.0014s + -> 0.0394s -- add_foreign_key("merge_requests", "merge_request_diffs", {:column=>"latest_merge_request_diff_id", :name=>"fk_06067f5644", :on_delete=>:nullify}) - -> 0.0014s + -> 0.0532s -- add_foreign_key("merge_requests", "milestones", {:name=>"fk_6a5165a692", :on_delete=>:nullify}) - -> 0.0015s + -> 0.0291s -- add_foreign_key("merge_requests", "projects", {:column=>"source_project_id", :name=>"fk_3308fe130c", :on_delete=>:nullify}) - -> 0.0017s + -> 0.0278s -- add_foreign_key("merge_requests", "projects", {:column=>"target_project_id", :name=>"fk_a6963e8447", :on_delete=>:cascade}) - -> 0.0016s + -> 0.0367s -- add_foreign_key("merge_requests", "users", {:column=>"assignee_id", :name=>"fk_6149611a04", :on_delete=>:nullify}) - -> 0.0016s + -> 0.0327s -- add_foreign_key("merge_requests", "users", {:column=>"author_id", :name=>"fk_e719a85f8a", :on_delete=>:nullify}) - -> 0.0017s + -> 0.0337s -- add_foreign_key("merge_requests", "users", {:column=>"merge_user_id", :name=>"fk_ad525e1f87", :on_delete=>:nullify}) - -> 0.0018s + -> 0.0517s -- add_foreign_key("merge_requests", "users", {:column=>"updated_by_id", :name=>"fk_641731faff", :on_delete=>:nullify}) - -> 0.0017s + -> 0.0335s -- add_foreign_key("merge_requests_closing_issues", "issues", {:on_delete=>:cascade}) - -> 0.0016s + -> 0.0167s -- add_foreign_key("merge_requests_closing_issues", "merge_requests", {:on_delete=>:cascade}) - -> 0.0014s + -> 0.0191s -- add_foreign_key("milestones", "namespaces", {:column=>"group_id", :name=>"fk_95650a40d4", :on_delete=>:cascade}) - -> 0.0014s + -> 0.0206s -- add_foreign_key("milestones", "projects", {:name=>"fk_9bd0a0c791", :on_delete=>:cascade}) - -> 0.0017s + -> 0.0221s -- add_foreign_key("notes", "projects", {:name=>"fk_99e097b079", :on_delete=>:cascade}) - -> 0.0019s + -> 0.0332s -- add_foreign_key("oauth_openid_requests", "oauth_access_grants", {:column=>"access_grant_id", :name=>"fk_oauth_openid_requests_oauth_access_grants_access_grant_id"}) - -> 0.0014s + -> 0.0128s -- add_foreign_key("pages_domains", "projects", {:name=>"fk_ea2f6dfc6f", :on_delete=>:cascade}) - -> 0.0021s + -> 0.0220s -- add_foreign_key("personal_access_tokens", "users") - -> 0.0016s + -> 0.0187s -- add_foreign_key("project_authorizations", "projects", {:on_delete=>:cascade}) - -> 0.0016s + -> 0.0149s -- add_foreign_key("project_authorizations", "users", {:on_delete=>:cascade}) - -> 0.0016s + -> 0.0167s -- add_foreign_key("project_auto_devops", "projects", {:on_delete=>:cascade}) - -> 0.0026s + -> 0.0142s -- add_foreign_key("project_custom_attributes", "projects", {:on_delete=>:cascade}) - -> 0.0016s + -> 0.0218s -- add_foreign_key("project_features", "projects", {:name=>"fk_18513d9b92", :on_delete=>:cascade}) - -> 0.0020s + -> 0.0204s -- add_foreign_key("project_group_links", "projects", {:name=>"fk_daa8cee94c", :on_delete=>:cascade}) - -> 0.0016s + -> 0.0174s -- add_foreign_key("project_import_data", "projects", {:name=>"fk_ffb9ee3a10", :on_delete=>:cascade}) - -> 0.0016s + -> 0.0138s -- add_foreign_key("project_statistics", "projects", {:on_delete=>:cascade}) - -> 0.0021s + -> 0.0125s -- add_foreign_key("protected_branch_merge_access_levels", "protected_branches", {:name=>"fk_8a3072ccb3", :on_delete=>:cascade}) - -> 0.0014s + -> 0.0157s -- add_foreign_key("protected_branch_push_access_levels", "protected_branches", {:name=>"fk_9ffc86a3d9", :on_delete=>:cascade}) - -> 0.0014s + -> 0.0112s -- add_foreign_key("protected_branches", "projects", {:name=>"fk_7a9c6d93e7", :on_delete=>:cascade}) - -> 0.0016s + -> 0.0122s -- add_foreign_key("protected_tag_create_access_levels", "namespaces", {:column=>"group_id"}) - -> 0.0016s + -> 0.0131s -- add_foreign_key("protected_tag_create_access_levels", "protected_tags", {:name=>"fk_f7dfda8c51", :on_delete=>:cascade}) - -> 0.0013s + -> 0.0168s -- add_foreign_key("protected_tag_create_access_levels", "users") - -> 0.0018s + -> 0.0221s -- add_foreign_key("protected_tags", "projects", {:name=>"fk_8e4af87648", :on_delete=>:cascade}) - -> 0.0015s + -> 0.0135s -- add_foreign_key("push_event_payloads", "events", {:name=>"fk_36c74129da", :on_delete=>:cascade}) - -> 0.0013s + -> 0.0107s -- add_foreign_key("releases", "projects", {:name=>"fk_47fe2a0596", :on_delete=>:cascade}) - -> 0.0015s + -> 0.0131s -- add_foreign_key("services", "projects", {:name=>"fk_71cce407f9", :on_delete=>:cascade}) - -> 0.0015s + -> 0.0142s -- add_foreign_key("snippets", "projects", {:name=>"fk_be41fd4bb7", :on_delete=>:cascade}) - -> 0.0017s + -> 0.0178s -- add_foreign_key("subscriptions", "projects", {:on_delete=>:cascade}) - -> 0.0018s + -> 0.0160s -- add_foreign_key("system_note_metadata", "notes", {:name=>"fk_d83a918cb1", :on_delete=>:cascade}) - -> 0.0015s + -> 0.0156s -- add_foreign_key("timelogs", "issues", {:name=>"fk_timelogs_issues_issue_id", :on_delete=>:cascade}) - -> 0.0015s + -> 0.0183s -- add_foreign_key("timelogs", "merge_requests", {:name=>"fk_timelogs_merge_requests_merge_request_id", :on_delete=>:cascade}) - -> 0.0016s + -> 0.0198s +-- add_foreign_key("todos", "notes", {:name=>"fk_91d1f47b13", :on_delete=>:cascade}) + -> 0.0276s -- add_foreign_key("todos", "projects", {:name=>"fk_45054f9c45", :on_delete=>:cascade}) - -> 0.0018s + -> 0.0175s +-- add_foreign_key("todos", "users", {:column=>"author_id", :name=>"fk_ccf0373936", :on_delete=>:cascade}) + -> 0.0182s +-- add_foreign_key("todos", "users", {:name=>"fk_d94154aa95", :on_delete=>:cascade}) + -> 0.0184s -- add_foreign_key("trending_projects", "projects", {:on_delete=>:cascade}) - -> 0.0015s + -> 0.0338s -- add_foreign_key("u2f_registrations", "users") - -> 0.0017s + -> 0.0176s +-- add_foreign_key("user_callouts", "users", {:on_delete=>:cascade}) + -> 0.0160s -- add_foreign_key("user_custom_attributes", "users", {:on_delete=>:cascade}) - -> 0.0019s + -> 0.0191s +-- add_foreign_key("user_interacted_projects", "projects", {:name=>"fk_722ceba4f7", :on_delete=>:cascade}) + -> 0.0171s +-- add_foreign_key("user_interacted_projects", "users", {:name=>"fk_0894651f08", :on_delete=>:cascade}) + -> 0.0155s -- add_foreign_key("user_synced_attributes_metadata", "users", {:on_delete=>:cascade}) - -> 0.0016s + -> 0.0164s -- add_foreign_key("users_star_projects", "projects", {:name=>"fk_22cd27ddfc", :on_delete=>:cascade}) - -> 0.0016s + -> 0.0180s -- add_foreign_key("web_hook_logs", "web_hooks", {:on_delete=>:cascade}) - -> 0.0014s + -> 0.0164s -- add_foreign_key("web_hooks", "projects", {:name=>"fk_0c8ca6d9d1", :on_delete=>:cascade}) - -> 0.0017s + -> 0.0172s -- initialize_schema_migrations_table() - -> 0.0112s + -> 0.0212s +Adding limits to schema.rb for mysql +-- column_exists?(:merge_request_diffs, :st_commits) + -> 0.0010s +-- column_exists?(:merge_request_diffs, :st_diffs) + -> 0.0006s +-- change_column(:snippets, :content, :text, {:limit=>2147483647}) + -> 0.0308s +-- change_column(:notes, :st_diff, :text, {:limit=>2147483647}) + -> 0.0366s +-- change_column(:snippets, :content_html, :text, {:limit=>2147483647}) + -> 0.0272s +-- change_column(:merge_request_diff_files, :diff, :text, {:limit=>2147483647}) + -> 0.0170s +[32;1m$ date[0;m +Thu Apr 5 11:19:41 UTC 2018 [32;1m$ JOB_NAME=( $CI_JOB_NAME )[0;m [32;1m$ export CI_NODE_INDEX=${JOB_NAME[-2]}[0;m [32;1m$ export CI_NODE_TOTAL=${JOB_NAME[-1]}[0;m [32;1m$ export KNAPSACK_REPORT_PATH=knapsack/${CI_PROJECT_NAME}/${JOB_NAME[0]}_node_${CI_NODE_INDEX}_${CI_NODE_TOTAL}_report.json[0;m [32;1m$ export KNAPSACK_GENERATE_REPORT=true[0;m +[32;1m$ export SUITE_FLAKY_RSPEC_REPORT_PATH=${FLAKY_RSPEC_SUITE_REPORT_PATH}[0;m +[32;1m$ export FLAKY_RSPEC_REPORT_PATH=rspec_flaky/all_${JOB_NAME[0]}_${CI_NODE_INDEX}_${CI_NODE_TOTAL}_report.json[0;m +[32;1m$ export NEW_FLAKY_RSPEC_REPORT_PATH=rspec_flaky/new_${JOB_NAME[0]}_${CI_NODE_INDEX}_${CI_NODE_TOTAL}_report.json[0;m +[32;1m$ export FLAKY_RSPEC_GENERATE_REPORT=true[0;m [32;1m$ export CACHE_CLASSES=true[0;m -[32;1m$ cp ${KNAPSACK_SPINACH_SUITE_REPORT_PATH} ${KNAPSACK_REPORT_PATH}[0;m +[32;1m$ cp ${KNAPSACK_RSPEC_SUITE_REPORT_PATH} ${KNAPSACK_REPORT_PATH}[0;m +[32;1m$ [[ -f $FLAKY_RSPEC_REPORT_PATH ]] || echo "{}" > ${FLAKY_RSPEC_REPORT_PATH}[0;m +[32;1m$ [[ -f $NEW_FLAKY_RSPEC_REPORT_PATH ]] || echo "{}" > ${NEW_FLAKY_RSPEC_REPORT_PATH}[0;m [32;1m$ scripts/gitaly-test-spawn[0;m -Gem.path: ["/root/.gem/ruby/2.3.0", "/usr/local/lib/ruby/gems/2.3.0", "/usr/local/bundle"] -ENV['BUNDLE_GEMFILE']: nil -ENV['RUBYOPT']: nil -bundle config in /builds/gitlab-org/gitlab-ce -scripts/gitaly-test-spawn:10:in `<main>': undefined local variable or method `gitaly_dir' for main:Object (NameError) -Did you mean? gitaly_dir -Settings are listed in order of priority. The top value will be used. -retry -Set for your local app (/usr/local/bundle/config): 3 +59 +[32;1m$ knapsack rspec "--color --format documentation"[0;m + +Report specs: +spec/services/todo_service_spec.rb +spec/lib/gitlab/import_export/project_tree_saver_spec.rb +spec/lib/gitlab/background_migration/deserialize_merge_request_diffs_and_commits_spec.rb +spec/controllers/projects/merge_requests_controller_spec.rb +spec/controllers/groups_controller_spec.rb +spec/features/projects/import_export/import_file_spec.rb +spec/lib/gitlab/middleware/go_spec.rb +spec/services/groups/transfer_service_spec.rb +spec/features/projects/blobs/edit_spec.rb +spec/services/boards/lists/move_service_spec.rb +spec/services/create_deployment_service_spec.rb +spec/controllers/groups/milestones_controller_spec.rb +spec/helpers/groups_helper_spec.rb +spec/requests/api/v3/todos_spec.rb +spec/models/project_services/teamcity_service_spec.rb +spec/lib/gitlab/conflict/file_spec.rb +spec/lib/banzai/filter/snippet_reference_filter_spec.rb +spec/finders/autocomplete_users_finder_spec.rb +spec/models/service_spec.rb +spec/services/test_hooks/project_service_spec.rb +spec/features/projects/merge_requests/user_views_open_merge_request_spec.rb +spec/finders/runner_jobs_finder_spec.rb +spec/features/projects/snippets_spec.rb +spec/requests/api/v3/environments_spec.rb +spec/requests/api/namespaces_spec.rb +spec/services/merge_requests/get_urls_service_spec.rb +spec/models/lfs_file_lock_spec.rb +spec/lib/gitlab/ci/config/entry/boolean_spec.rb + +Leftover specs: + +Knapsack report generator started! + +==> Setting up GitLab Shell... + GitLab Shell setup in 0.307428917 seconds... + +==> Setting up Gitaly... + Gitaly setup in 0.000135767 seconds... + +TodoService + updates cached counts when a todo is created + Issues + #new_issue + creates a todo if assigned + does not create a todo if unassigned + creates a todo if assignee is the current user + creates a todo for each valid mentioned user + creates a directly addressed todo for each valid addressed user + creates correct todos for each valid user based on the type of mention + does not create todo if user can not see the issue when issue is confidential + does not create directly addressed todo if user cannot see the issue when issue is confidential + when a private group is mentioned + creates a todo for group members + #update_issue + creates a todo for each valid mentioned user not included in skip_users + creates a todo for each valid user not included in skip_users based on the type of mention + creates a directly addressed todo for each valid addressed user not included in skip_users + does not create a todo if user was already mentioned and todo is pending + does not create a todo if user was already mentioned and todo is done + does not create a directly addressed todo if user was already mentioned or addressed and todo is pending + does not create a directly addressed todo if user was already mentioned or addressed and todo is done + does not create todo if user can not see the issue when issue is confidential + does not create a directly addressed todo if user can not see the issue when issue is confidential + issues with a task list + does not create todo when tasks are marked as completed + does not create directly addressed todo when tasks are marked as completed + does not raise an error when description not change + #close_issue + marks related pending todos to the target for the user as done + #destroy_target + refreshes the todos count cache for users with todos on the target + does not refresh the todos count cache for users with only done todos on the target + yields the target to the caller + #reassigned_issue + creates a pending todo for new assignee + does not create a todo if unassigned + creates a todo if new assignee is the current user + #mark_pending_todos_as_done + marks related pending todos to the target for the user as done + cached counts + updates when todos change + #mark_todos_as_done + behaves like updating todos state + updates related todos for the user with the new_state + returns the updated ids + cached counts + updates when todos change + #mark_todos_as_done_by_ids + behaves like updating todos state + updates related todos for the user with the new_state + returns the updated ids + cached counts + updates when todos change + #mark_todos_as_pending + behaves like updating todos state + updates related todos for the user with the new_state + returns the updated ids + cached counts + updates when todos change + #mark_todos_as_pending_by_ids + behaves like updating todos state + updates related todos for the user with the new_state + returns the updated ids + cached counts + updates when todos change + #new_note + mark related pending todos to the noteable for the note author as done + does not mark related pending todos it is a system note + creates a todo for each valid mentioned user + creates a todo for each valid user based on the type of mention + creates a directly addressed todo for each valid addressed user + does not create todo if user can not see the issue when leaving a note on a confidential issue + does not create a directly addressed todo if user can not see the issue when leaving a note on a confidential issue + does not create todo when leaving a note on snippet + on commit + creates a todo for each valid mentioned user when leaving a note on commit + creates a directly addressed todo for each valid mentioned user when leaving a note on commit + #mark_todo + creates a todo from a issue + #todo_exists? + returns false when no todo exist for the given issuable + returns true when a todo exist for the given issuable + Merge Requests + #new_merge_request + creates a pending todo if assigned + does not create a todo if unassigned + does not create a todo if assignee is the current user + creates a todo for each valid mentioned user + creates a todo for each valid user based on the type of mention + creates a directly addressed todo for each valid addressed user + #update_merge_request + creates a todo for each valid mentioned user not included in skip_users + creates a todo for each valid user not included in skip_users based on the type of mention + creates a directly addressed todo for each valid addressed user not included in skip_users + does not create a todo if user was already mentioned and todo is pending + does not create a todo if user was already mentioned and todo is done + does not create a directly addressed todo if user was already mentioned or addressed and todo is pending + does not create a directly addressed todo if user was already mentioned or addressed and todo is done + with a task list + does not create todo when tasks are marked as completed + does not create directly addressed todo when tasks are marked as completed + does not raise an error when description not change + #close_merge_request + marks related pending todos to the target for the user as done + #reassigned_merge_request + creates a pending todo for new assignee + does not create a todo if unassigned + creates a todo if new assignee is the current user + does not create a todo for guests + does not create a directly addressed todo for guests + #merge_merge_request + marks related pending todos to the target for the user as done + does not create todo for guests + does not create directly addressed todo for guests + #new_award_emoji + marks related pending todos to the target for the user as done + #merge_request_build_failed + creates a pending todo for the merge request author + creates a pending todo for merge_user + #merge_request_push + marks related pending todos to the target for the user as done + #merge_request_became_unmergeable + creates a pending todo for a merge_user + #mark_todo + creates a todo from a merge request + #new_note + creates a todo for mentioned user on new diff note + creates a directly addressed todo for addressed user on new diff note + creates a todo for mentioned user on legacy diff note + does not create todo for guests + #update_note + creates a todo for each valid mentioned user not included in skip_users + creates a todo for each valid user not included in skip_users based on the type of mention + creates a directly addressed todo for each valid addressed user not included in skip_users + does not create a todo if user was already mentioned and todo is pending + does not create a todo if user was already mentioned and todo is done + does not create a directly addressed todo if user was already mentioned or addressed and todo is pending + does not create a directly addressed todo if user was already mentioned or addressed and todo is done + #mark_todos_as_done + marks a relation of todos as done + marks an array of todos as done + returns the ids of updated todos + when some of the todos are done already + returns the ids of those still pending + returns an empty array if all are done + #mark_todos_as_done_by_ids + marks an array of todo ids as done + marks a single todo id as done + caches the number of todos of a user + +Gitlab::ImportExport::ProjectTreeSaver + saves the project tree into a json object + saves project successfully + JSON + saves the correct json + has milestones + has merge requests + has merge request's milestones + has merge request's source branch SHA + has merge request's target branch SHA + has events + has snippets + has snippet notes + has releases + has issues + has issue comments + has issue assignees + has author on issue comments + has project members + has merge requests diffs + has merge request diff files + has merge request diff commits + has merge requests comments + has author on merge requests comments + has pipeline stages + has pipeline statuses + has pipeline builds + has no when YML attributes but only the DB column + has pipeline commits + has ci pipeline notes + has labels with no associations + has labels associated to records + has project and group labels + has priorities associated to labels + saves the correct service type + saves the properties for a service + has project feature + has custom attributes + has badges + does not complain about non UTF-8 characters in MR diff files + with description override + overrides the project description + group members + does not export group members if it has no permission + does not export group members as master + exports group members as group owner + as admin + exports group members as admin + exports group members as project members + project attributes + contains the html description + does not contain the runners token + +Gitlab::BackgroundMigration::DeserializeMergeRequestDiffsAndCommits + #perform + when the diff IDs passed do not exist + does not raise + when the merge request diff has no serialised commits or diffs + does not raise + processing multiple merge request diffs + when BUFFER_ROWS is exceeded + inserts commit rows in chunks of BUFFER_ROWS + inserts diff rows in chunks of DIFF_FILE_BUFFER_ROWS + when BUFFER_ROWS is not exceeded + only updates once + when some rows were already inserted due to a previous failure + does not raise + logs a message + ends up with the correct rows + when the merge request diff update fails + raises an error + logs the error + still adds diff commits + still adds diff files + when the merge request diff has valid commits and diffs + creates correct entries in the merge_request_diff_commits table + creates correct entries in the merge_request_diff_files table + sets the st_commits and st_diffs columns to nil + when the merge request diff has diffs but no commits + creates correct entries in the merge_request_diff_commits table + creates correct entries in the merge_request_diff_files table + sets the st_commits and st_diffs columns to nil + when the merge request diffs do not have too_large set + creates correct entries in the merge_request_diff_commits table + creates correct entries in the merge_request_diff_files table + sets the st_commits and st_diffs columns to nil + when the merge request diffs do not have a_mode and b_mode set + creates correct entries in the merge_request_diff_commits table + creates correct entries in the merge_request_diff_files table + sets the st_commits and st_diffs columns to nil + when the merge request diffs have binary content + creates correct entries in the merge_request_diff_commits table + creates correct entries in the merge_request_diff_files table + sets the st_commits and st_diffs columns to nil + when the merge request diff has commits, but no diffs + creates correct entries in the merge_request_diff_commits table + creates correct entries in the merge_request_diff_files table + sets the st_commits and st_diffs columns to nil + when the merge request diffs have invalid content + creates correct entries in the merge_request_diff_commits table + creates correct entries in the merge_request_diff_files table + sets the st_commits and st_diffs columns to nil + when the merge request diffs are Rugged::Patch instances + creates correct entries in the merge_request_diff_commits table + creates correct entries in the merge_request_diff_files table + sets the st_commits and st_diffs columns to nil + when the merge request diffs are Rugged::Diff::Delta instances + creates correct entries in the merge_request_diff_commits table + creates correct entries in the merge_request_diff_files table + sets the st_commits and st_diffs columns to nil + +Projects::MergeRequestsController + GET commit_change_content + renders commit_change_content template + GET show + behaves like loads labels + loads labels into the @labels variable + as html + renders merge request page + loads notes + with special_role FIRST_TIME_CONTRIBUTOR + as json + with basic serializer param + renders basic MR entity as json + with widget serializer param + renders widget MR entity as json + when no serialiser was passed + renders widget MR entity as json + as diff + triggers workhorse to serve the request + as patch + triggers workhorse to serve the request + GET index + behaves like issuables list meta-data + creates indexed meta-data object for issuable notes and votes count + when given empty collection + doesn't execute any queries with false conditions + when page param + redirects to last_page if page number is larger than number of pages + redirects to specified page + does not redirect to external sites when provided a host field + when filtering by opened state + with opened merge requests + lists those merge requests + with reopened merge requests + lists those merge requests + PUT update + changing the assignee + limits the attributes exposed on the assignee + when user does not have access to update issue + responds with 404 + there is no source project + closes MR without errors + allows editing of a closed merge request + does not allow to update target branch closed merge request + behaves like update invalid issuable + when updating causes conflicts + renders edit when format is html + renders json error message when format is json + when updating an invalid issuable + renders edit when merge request is invalid + POST merge + when user cannot access + returns 404 + when the merge request is not mergeable + returns :failed + when the sha parameter does not match the source SHA + returns :sha_mismatch + when the sha parameter matches the source SHA + returns :success + starts the merge immediately + when the pipeline succeeds is passed + returns :merge_when_pipeline_succeeds + sets the MR to merge when the pipeline succeeds + when project.only_allow_merge_if_pipeline_succeeds? is true + returns :merge_when_pipeline_succeeds + and head pipeline is not the current one + returns :failed + only_allow_merge_if_all_discussions_are_resolved? setting + when enabled + with unresolved discussion + returns :failed + with all discussions resolved + returns :success + when disabled + with unresolved discussion + returns :success + with all discussions resolved + returns :success + DELETE destroy + denies access to users unless they're admin or project owner + when the user is owner + deletes the merge request + delegates the update of the todos count cache to TodoService + GET commits + renders the commits template to a string + GET pipelines + responds with serialized pipelines + POST remove_wip + removes the wip status + renders MergeRequest as JSON + POST cancel_merge_when_pipeline_succeeds + calls MergeRequests::MergeWhenPipelineSucceedsService + should respond with numeric status code success + renders MergeRequest as JSON + POST assign_related_issues + shows a flash message on success + correctly pluralizes flash message on success + calls MergeRequests::AssignIssuesService + is skipped when not signed in + GET ci_environments_status + the environment is from a forked project + links to the environment on that project + GET pipeline_status.json + when head_pipeline exists + return a detailed head_pipeline status in json + when head_pipeline does not exist + return empty + POST #rebase + successfully + enqeues a RebaseWorker + with a forked project + user cannot push to source branch + returns 404 + user can push to source branch + returns 200 + +GroupsController + GET #show + as html + assigns whether or not a group has children + as atom + assigns events for all the projects in the group + GET #new + when creating subgroups + and can_create_group is true + and logged in as Admin + behaves like member with ability to create subgroups + renders the new page (PENDING: around hook at ./spec/spec_helper.rb:186 did not execute the example) + and logged in as Owner + behaves like member with ability to create subgroups + renders the new page (PENDING: around hook at ./spec/spec_helper.rb:186 did not execute the example) + and logged in as Guest + behaves like member without ability to create subgroups + renders the 404 page (PENDING: around hook at ./spec/spec_helper.rb:186 did not execute the example) + and logged in as Developer + behaves like member without ability to create subgroups + renders the 404 page (PENDING: around hook at ./spec/spec_helper.rb:186 did not execute the example) + and logged in as Master + behaves like member without ability to create subgroups + renders the 404 page (PENDING: around hook at ./spec/spec_helper.rb:186 did not execute the example) + and can_create_group is false + and logged in as Admin + behaves like member with ability to create subgroups + renders the new page (PENDING: around hook at ./spec/spec_helper.rb:186 did not execute the example) + and logged in as Owner + behaves like member with ability to create subgroups + renders the new page (PENDING: around hook at ./spec/spec_helper.rb:186 did not execute the example) + and logged in as Guest + behaves like member without ability to create subgroups + renders the 404 page (PENDING: around hook at ./spec/spec_helper.rb:186 did not execute the example) + and logged in as Developer + behaves like member without ability to create subgroups + renders the 404 page (PENDING: around hook at ./spec/spec_helper.rb:186 did not execute the example) + and logged in as Master + behaves like member without ability to create subgroups + renders the 404 page (PENDING: around hook at ./spec/spec_helper.rb:186 did not execute the example) + GET #activity + as json + includes all projects in event feed + POST #create + when creating subgroups + and can_create_group is true + and logged in as Owner + creates the subgroup (PENDING: around hook at ./spec/spec_helper.rb:186 did not execute the example) + and logged in as Developer + renders the new template (PENDING: around hook at ./spec/spec_helper.rb:186 did not execute the example) + and can_create_group is false + and logged in as Owner + creates the subgroup (PENDING: around hook at ./spec/spec_helper.rb:186 did not execute the example) + and logged in as Developer + renders the new template (PENDING: around hook at ./spec/spec_helper.rb:186 did not execute the example) + when creating a top level group + and can_create_group is enabled + creates the Group + and can_create_group is disabled + does not create the Group + GET #index + as a user + redirects to Groups Dashboard + as a guest + redirects to Explore Groups + GET #issues + sorting by votes + sorts most popular issues + sorts least popular issues + GET #merge_requests + sorting by votes + sorts most popular merge requests + sorts least popular merge requests + DELETE #destroy + as another user + returns 404 + as the group owner + schedules a group destroy + redirects to the root path + PUT update + updates the path successfully + does not update the path on error + #ensure_canonical_path + for a GET request + when requesting groups at the root path + when requesting the canonical path with different casing + redirects to the correct casing + when requesting a redirected path + redirects to the canonical path + when the old group path is a substring of the scheme or host + does not modify the requested host + when the old group path is substring of groups + does not modify the /groups part of the path + when requesting groups under the /groups path + when requesting the canonical path + non-show path + with exactly matching casing + does not redirect + with different casing + redirects to the correct casing + show path + with exactly matching casing + does not redirect + with different casing + redirects to the correct casing at the root path + when requesting a redirected path + redirects to the canonical path + when the old group path is a substring of the scheme or host + does not modify the requested host + when the old group path is substring of groups + does not modify the /groups part of the path + when the old group path is substring of groups plus the new path + does not modify the /groups part of the path + for a POST request + when requesting the canonical path with different casing + does not 404 + does not redirect to the correct casing + when requesting a redirected path + returns not found + for a DELETE request + when requesting the canonical path with different casing + does not 404 + does not redirect to the correct casing + when requesting a redirected path + returns not found + PUT transfer + when transfering to a subgroup goes right + should return a notice (PENDING: around hook at ./spec/spec_helper.rb:190 did not execute the example) + should redirect to the new path (PENDING: around hook at ./spec/spec_helper.rb:190 did not execute the example) + when converting to a root group goes right + should return a notice (PENDING: around hook at ./spec/spec_helper.rb:190 did not execute the example) + should redirect to the new path (PENDING: around hook at ./spec/spec_helper.rb:190 did not execute the example) + When the transfer goes wrong + should return an alert (PENDING: around hook at ./spec/spec_helper.rb:190 did not execute the example) + should redirect to the current path (PENDING: around hook at ./spec/spec_helper.rb:190 did not execute the example) + when the user is not allowed to transfer the group + should be denied (PENDING: around hook at ./spec/spec_helper.rb:190 did not execute the example) + +Import/Export - project import integration test +Starting the Capybara driver server... + invalid project + when selecting the namespace + prefilled the path + user imports an exported project successfully + path is not prefilled + user imports an exported project successfully + +Gitlab::Middleware::Go + #call + when go-get=0 + skips go-import generation + when go-get=1 + with SSH disabled + with simple 2-segment project path + with subpackages + returns the full project path + without subpackages + returns the full project path + with a nested project path + with subpackages + behaves like a nested project + when the project is public + returns the full project path + when the project is private + when not authenticated + behaves like unauthorized + returns the 2-segment group path + when authenticated + using warden + when active + behaves like authenticated + with access to the project + returns the full project path + without access to the project + behaves like unauthorized + returns the 2-segment group path + when blocked + behaves like unauthorized + returns the 2-segment group path + using a personal access token + with api scope + behaves like authenticated + with access to the project + returns the full project path + without access to the project + behaves like unauthorized + returns the 2-segment group path + with read_user scope + behaves like unauthorized + returns the 2-segment group path + with a subpackage that is not a valid project path + behaves like a nested project + when the project is public + returns the full project path + when the project is private + when not authenticated + behaves like unauthorized + returns the 2-segment group path + when authenticated + using warden + when active + behaves like authenticated + with access to the project + returns the full project path + without access to the project + behaves like unauthorized + returns the 2-segment group path + when blocked + behaves like unauthorized + returns the 2-segment group path + using a personal access token + with api scope + behaves like authenticated + with access to the project + returns the full project path + without access to the project + behaves like unauthorized + returns the 2-segment group path + with read_user scope + behaves like unauthorized + returns the 2-segment group path + without subpackages + behaves like a nested project + when the project is public + returns the full project path + when the project is private + when not authenticated + behaves like unauthorized + returns the 2-segment group path + when authenticated + using warden + when active + behaves like authenticated + with access to the project + returns the full project path + without access to the project + behaves like unauthorized + returns the 2-segment group path + when blocked + behaves like unauthorized + returns the 2-segment group path + using a personal access token + with api scope + behaves like authenticated + with access to the project + returns the full project path + without access to the project + behaves like unauthorized + returns the 2-segment group path + with read_user scope + behaves like unauthorized + returns the 2-segment group path + with a bogus path + skips go-import generation + with HTTP disabled + with simple 2-segment project path + with subpackages + returns the full project path + without subpackages + returns the full project path + with a nested project path + with subpackages + behaves like a nested project + when the project is public + returns the full project path + when the project is private + when not authenticated + behaves like unauthorized + returns the 2-segment group path + when authenticated + using warden + when active + behaves like authenticated + with access to the project + returns the full project path + without access to the project + behaves like unauthorized + returns the 2-segment group path + when blocked + behaves like unauthorized + returns the 2-segment group path + using a personal access token + with api scope + behaves like authenticated + with access to the project + returns the full project path + without access to the project + behaves like unauthorized + returns the 2-segment group path + with read_user scope + behaves like unauthorized + returns the 2-segment group path + with a subpackage that is not a valid project path + behaves like a nested project + when the project is public + returns the full project path + when the project is private + when not authenticated + behaves like unauthorized + returns the 2-segment group path + when authenticated + using warden + when active + behaves like authenticated + with access to the project + returns the full project path + without access to the project + behaves like unauthorized + returns the 2-segment group path + when blocked + behaves like unauthorized + returns the 2-segment group path + using a personal access token + with api scope + behaves like authenticated + with access to the project + returns the full project path + without access to the project + behaves like unauthorized + returns the 2-segment group path + with read_user scope + behaves like unauthorized + returns the 2-segment group path + without subpackages + behaves like a nested project + when the project is public + returns the full project path + when the project is private + when not authenticated + behaves like unauthorized + returns the 2-segment group path + when authenticated + using warden + when active + behaves like authenticated + with access to the project + returns the full project path + without access to the project + behaves like unauthorized + returns the 2-segment group path + when blocked + behaves like unauthorized + returns the 2-segment group path + using a personal access token + with api scope + behaves like authenticated + with access to the project + returns the full project path + without access to the project + behaves like unauthorized + returns the 2-segment group path + with read_user scope + behaves like unauthorized + returns the 2-segment group path + with a bogus path + skips go-import generation + with nothing disabled + with simple 2-segment project path + with subpackages + returns the full project path + without subpackages + returns the full project path + with a nested project path + with subpackages + behaves like a nested project + when the project is public + returns the full project path + when the project is private + when not authenticated + behaves like unauthorized + returns the 2-segment group path + when authenticated + using warden + when active + behaves like authenticated + with access to the project + returns the full project path + without access to the project + behaves like unauthorized + returns the 2-segment group path + when blocked + behaves like unauthorized + returns the 2-segment group path + using a personal access token + with api scope + behaves like authenticated + with access to the project + returns the full project path + without access to the project + behaves like unauthorized + returns the 2-segment group path + with read_user scope + behaves like unauthorized + returns the 2-segment group path + with a subpackage that is not a valid project path + behaves like a nested project + when the project is public + returns the full project path + when the project is private + when not authenticated + behaves like unauthorized + returns the 2-segment group path + when authenticated + using warden + when active + behaves like authenticated + with access to the project + returns the full project path + without access to the project + behaves like unauthorized + returns the 2-segment group path + when blocked + behaves like unauthorized + returns the 2-segment group path + using a personal access token + with api scope + behaves like authenticated + with access to the project + returns the full project path + without access to the project + behaves like unauthorized + returns the 2-segment group path + with read_user scope + behaves like unauthorized + returns the 2-segment group path + without subpackages + behaves like a nested project + when the project is public + returns the full project path + when the project is private + when not authenticated + behaves like unauthorized + returns the 2-segment group path + when authenticated + using warden + when active + behaves like authenticated + with access to the project + returns the full project path + without access to the project + behaves like unauthorized + returns the 2-segment group path + when blocked + behaves like unauthorized + returns the 2-segment group path + using a personal access token + with api scope + behaves like authenticated + with access to the project + returns the full project path + without access to the project + behaves like unauthorized + returns the 2-segment group path + with read_user scope + behaves like unauthorized + returns the 2-segment group path + with a bogus path + skips go-import generation + with nothing disabled (blank string) + with simple 2-segment project path + with subpackages + returns the full project path + without subpackages + returns the full project path + with a nested project path + with subpackages + behaves like a nested project + when the project is public + returns the full project path + when the project is private + when not authenticated + behaves like unauthorized + returns the 2-segment group path + when authenticated + using warden + when active + behaves like authenticated + with access to the project + returns the full project path + without access to the project + behaves like unauthorized + returns the 2-segment group path + when blocked + behaves like unauthorized + returns the 2-segment group path + using a personal access token + with api scope + behaves like authenticated + with access to the project + returns the full project path + without access to the project + behaves like unauthorized + returns the 2-segment group path + with read_user scope + behaves like unauthorized + returns the 2-segment group path + with a subpackage that is not a valid project path + behaves like a nested project + when the project is public + returns the full project path + when the project is private + when not authenticated + behaves like unauthorized + returns the 2-segment group path + when authenticated + using warden + when active + behaves like authenticated + with access to the project + returns the full project path + without access to the project + behaves like unauthorized + returns the 2-segment group path + when blocked + behaves like unauthorized + returns the 2-segment group path + using a personal access token + with api scope + behaves like authenticated + with access to the project + returns the full project path + without access to the project + behaves like unauthorized + returns the 2-segment group path + with read_user scope + behaves like unauthorized + returns the 2-segment group path + without subpackages + behaves like a nested project + when the project is public + returns the full project path + when the project is private + when not authenticated + behaves like unauthorized + returns the 2-segment group path + when authenticated + using warden + when active + behaves like authenticated + with access to the project + returns the full project path + without access to the project + behaves like unauthorized + returns the 2-segment group path + when blocked + behaves like unauthorized + returns the 2-segment group path + using a personal access token + with api scope + behaves like authenticated + with access to the project + returns the full project path + without access to the project + behaves like unauthorized + returns the 2-segment group path + with read_user scope + behaves like unauthorized + returns the 2-segment group path + with a bogus path + skips go-import generation + +Groups::TransferService + #execute + when transforming a group into a root group + behaves like ensuring allowed transfer for a group + with other database than PostgreSQL + should return false (PENDING: around hook at ./spec/spec_helper.rb:190 did not execute the example) + should add an error on group (PENDING: around hook at ./spec/spec_helper.rb:190 did not execute the example) + when there's an exception on Gitlab shell directories + should return false (PENDING: around hook at ./spec/spec_helper.rb:190 did not execute the example) + should add an error on group (PENDING: around hook at ./spec/spec_helper.rb:190 did not execute the example) + when the group is already a root group + should add an error on group (PENDING: around hook at ./spec/spec_helper.rb:190 did not execute the example) + when the user does not have the right policies + should return false (PENDING: around hook at ./spec/spec_helper.rb:190 did not execute the example) + should add an error on group (PENDING: around hook at ./spec/spec_helper.rb:190 did not execute the example) + when there is a group with the same path + should return false (PENDING: around hook at ./spec/spec_helper.rb:190 did not execute the example) + should add an error on group (PENDING: around hook at ./spec/spec_helper.rb:190 did not execute the example) + when the group is a subgroup and the transfer is valid + should update group attributes (PENDING: around hook at ./spec/spec_helper.rb:190 did not execute the example) + should update group children path (PENDING: around hook at ./spec/spec_helper.rb:190 did not execute the example) + should update group projects path (PENDING: around hook at ./spec/spec_helper.rb:190 did not execute the example) + when transferring a subgroup into another group + behaves like ensuring allowed transfer for a group + with other database than PostgreSQL + should return false (PENDING: around hook at ./spec/spec_helper.rb:190 did not execute the example) + should add an error on group (PENDING: around hook at ./spec/spec_helper.rb:190 did not execute the example) + when there's an exception on Gitlab shell directories + should return false (PENDING: around hook at ./spec/spec_helper.rb:190 did not execute the example) + should add an error on group (PENDING: around hook at ./spec/spec_helper.rb:190 did not execute the example) + when the new parent group is the same as the previous parent group + should return false (PENDING: around hook at ./spec/spec_helper.rb:190 did not execute the example) + should add an error on group (PENDING: around hook at ./spec/spec_helper.rb:190 did not execute the example) + when the user does not have the right policies + should return false (PENDING: around hook at ./spec/spec_helper.rb:190 did not execute the example) + should add an error on group (PENDING: around hook at ./spec/spec_helper.rb:190 did not execute the example) + when the parent has a group with the same path + should return false (PENDING: around hook at ./spec/spec_helper.rb:190 did not execute the example) + should add an error on group (PENDING: around hook at ./spec/spec_helper.rb:190 did not execute the example) + when the parent group has a project with the same path + should return false (PENDING: around hook at ./spec/spec_helper.rb:190 did not execute the example) + should add an error on group (PENDING: around hook at ./spec/spec_helper.rb:190 did not execute the example) + when the group is allowed to be transferred + should update visibility for the group based on the parent group (PENDING: around hook at ./spec/spec_helper.rb:190 did not execute the example) + should update parent group to the new parent (PENDING: around hook at ./spec/spec_helper.rb:190 did not execute the example) + should return the group as children of the new parent (PENDING: around hook at ./spec/spec_helper.rb:190 did not execute the example) + should create a redirect for the group (PENDING: around hook at ./spec/spec_helper.rb:190 did not execute the example) + when the group has a lower visibility than the parent group + should not update the visibility for the group (PENDING: around hook at ./spec/spec_helper.rb:190 did not execute the example) + when the group has a higher visibility than the parent group + should update visibility level based on the parent group (PENDING: around hook at ./spec/spec_helper.rb:190 did not execute the example) + when transferring a group with group descendants + should update subgroups path (PENDING: around hook at ./spec/spec_helper.rb:190 did not execute the example) + should create redirects for the subgroups (PENDING: around hook at ./spec/spec_helper.rb:190 did not execute the example) + when the new parent has a higher visibility than the children + should not update the children visibility (PENDING: around hook at ./spec/spec_helper.rb:190 did not execute the example) + when the new parent has a lower visibility than the children + should update children visibility to match the new parent (PENDING: around hook at ./spec/spec_helper.rb:190 did not execute the example) + when transferring a group with project descendants + should update projects path (PENDING: around hook at ./spec/spec_helper.rb:190 did not execute the example) + should create permanent redirects for the projects (PENDING: around hook at ./spec/spec_helper.rb:190 did not execute the example) + when the new parent has a higher visibility than the projects + should not update projects visibility (PENDING: around hook at ./spec/spec_helper.rb:190 did not execute the example) + when the new parent has a lower visibility than the projects + should update projects visibility to match the new parent (PENDING: around hook at ./spec/spec_helper.rb:190 did not execute the example) + when transferring a group with subgroups & projects descendants + should update subgroups path (PENDING: around hook at ./spec/spec_helper.rb:190 did not execute the example) + should update projects path (PENDING: around hook at ./spec/spec_helper.rb:190 did not execute the example) + should create redirect for the subgroups and projects (PENDING: around hook at ./spec/spec_helper.rb:190 did not execute the example) + when transfering a group with nested groups and projects + should update subgroups path (PENDING: around hook at ./spec/spec_helper.rb:190 did not execute the example) + should update projects path (PENDING: around hook at ./spec/spec_helper.rb:190 did not execute the example) + should create redirect for the subgroups and projects (PENDING: around hook at ./spec/spec_helper.rb:190 did not execute the example) + when updating the group goes wrong + should restore group and projects visibility (PENDING: around hook at ./spec/spec_helper.rb:190 did not execute the example) + +Editing file blob + as a developer + from MR diff + returns me to the mr + from blob file path + updates content + previews content + visit blob edit + redirects to sign in and returns + as developer + redirects to sign in and returns + as guest + redirects to sign in and returns + as developer + on some branch + shows blob editor with same branch + with protected branch + shows blob editor with patch branch + as master + shows blob editor with same branch + +Boards::Lists::MoveService + #execute + when board parent is a project + behaves like lists move service + keeps position of lists when list type is closed + when list type is set to label + keeps position of lists when new position is nil + keeps position of lists when new positon is equal to old position + keeps position of lists when new positon is negative + keeps position of lists when new positon is equal to number of labels lists + keeps position of lists when new positon is greater than number of labels lists + increments position of intermediate lists when new positon is equal to first position + decrements position of intermediate lists when new positon is equal to last position + decrements position of intermediate lists when new position is greater than old position + increments position of intermediate lists when new position is lower than old position + when board parent is a group + behaves like lists move service + keeps position of lists when list type is closed + when list type is set to label + keeps position of lists when new position is nil + keeps position of lists when new positon is equal to old position + keeps position of lists when new positon is negative + keeps position of lists when new positon is equal to number of labels lists + keeps position of lists when new positon is greater than number of labels lists + increments position of intermediate lists when new positon is equal to first position + decrements position of intermediate lists when new positon is equal to last position + decrements position of intermediate lists when new position is greater than old position + increments position of intermediate lists when new position is lower than old position + +CreateDeploymentService + #execute + when environment exists + creates a deployment + when environment does not exist + does not create a deployment + when start action is defined + and environment is stopped + makes environment available + creates a deployment + when stop action is defined + and environment is available + makes environment stopped + does not create a deployment + when variables are used + creates a new deployment + does not create a new environment + updates external url + when project was removed + does not create deployment or environment + #expanded_environment_url + when yaml environment uses $CI_COMMIT_REF_NAME + should eq "http://review/master" + when yaml environment uses $CI_ENVIRONMENT_SLUG + should eq "http://review/prod-slug" + when yaml environment uses yaml_variables containing symbol keys + should eq "http://review/host" + when yaml environment does not have url + returns the external_url from persisted environment + processing of builds + without environment specified + behaves like does not create deployment + does not create a new deployment + does not call a service + when environment is specified + when job succeeds + behaves like creates deployment + creates a new deployment + calls a service + is set as deployable + updates environment URL + when job fails + behaves like does not create deployment + does not create a new deployment + does not call a service + when job is retried + behaves like creates deployment + creates a new deployment + calls a service + is set as deployable + updates environment URL + merge request metrics + while updating the 'first_deployed_to_production_at' time + for merge requests merged before the current deploy + sets the time if the deploy's environment is 'production' + doesn't set the time if the deploy's environment is not 'production' + does not raise errors if the merge request does not have a metrics record + for merge requests merged before the previous deploy + if the 'first_deployed_to_production_at' time is already set + does not overwrite the older 'first_deployed_to_production_at' time + if the 'first_deployed_to_production_at' time is not already set + does not overwrite the older 'first_deployed_to_production_at' time + +Groups::MilestonesController + #index + shows group milestones page + as JSON + lists legacy group milestones and group milestones + #show + when there is a title parameter + searchs for a legacy group milestone + when there is not a title parameter + searchs for a group milestone + behaves like milestone tabs + #merge_requests + as html + redirects to milestone#show + as json + renders the merge requests tab template to a string + #participants + as html + redirects to milestone#show + as json + renders the participants tab template to a string + #labels + as html + redirects to milestone#show + as json + renders the labels tab template to a string + #create + creates group milestone with Chinese title + #update + updates group milestone + legacy group milestones + updates only group milestones state + #ensure_canonical_path + for a GET request + when requesting the canonical path + non-show path + with exactly matching casing + does not redirect + with different casing + redirects to the correct casing + show path + with exactly matching casing + does not redirect + with different casing + redirects to the correct casing + when requesting a redirected path + redirects to the canonical path + when the old group path is a substring of the scheme or host + does not modify the requested host + when the old group path is substring of groups + does not modify the /groups part of the path + when the old group path is substring of groups plus the new path + does not modify the /groups part of the path + for a non-GET request + when requesting the canonical path with different casing + does not 404 + does not redirect to the correct casing + when requesting a redirected path + returns not found + +GroupsHelper + group_icon + returns an url for the avatar + group_icon_url + returns an url for the avatar + gives default avatar_icon when no avatar is present + group_lfs_status + only one project in group + returns all projects as enabled + returns all projects as disabled + more than one project in group + LFS enabled in group + returns both projects as enabled + returns only one as enabled + LFS disabled in group + returns both projects as disabled + returns only one as disabled + group_title + outputs the groups in the correct order (PENDING: around hook at ./spec/spec_helper.rb:186 did not execute the example) + #share_with_group_lock_help_text + root_share_with_group_locked: false, subgroup_share_with_group_locked: false, sub_subgroup_share_with_group_locked: false, current_user: :root_owner, help_text: :default_help, linked_ancestor: nil + has the correct help text with correct ancestor links (PENDING: around hook at ./spec/spec_helper.rb:186 did not execute the example) + root_share_with_group_locked: false, subgroup_share_with_group_locked: false, sub_subgroup_share_with_group_locked: false, current_user: :sub_owner, help_text: :default_help, linked_ancestor: nil + has the correct help text with correct ancestor links (PENDING: around hook at ./spec/spec_helper.rb:186 did not execute the example) + root_share_with_group_locked: false, subgroup_share_with_group_locked: false, sub_subgroup_share_with_group_locked: false, current_user: :sub_sub_owner, help_text: :default_help, linked_ancestor: nil + has the correct help text with correct ancestor links (PENDING: around hook at ./spec/spec_helper.rb:186 did not execute the example) + root_share_with_group_locked: false, subgroup_share_with_group_locked: false, sub_subgroup_share_with_group_locked: true, current_user: :root_owner, help_text: :default_help, linked_ancestor: nil + has the correct help text with correct ancestor links (PENDING: around hook at ./spec/spec_helper.rb:186 did not execute the example) + root_share_with_group_locked: false, subgroup_share_with_group_locked: false, sub_subgroup_share_with_group_locked: true, current_user: :sub_owner, help_text: :default_help, linked_ancestor: nil + has the correct help text with correct ancestor links (PENDING: around hook at ./spec/spec_helper.rb:186 did not execute the example) + root_share_with_group_locked: false, subgroup_share_with_group_locked: false, sub_subgroup_share_with_group_locked: true, current_user: :sub_sub_owner, help_text: :default_help, linked_ancestor: nil + has the correct help text with correct ancestor links (PENDING: around hook at ./spec/spec_helper.rb:186 did not execute the example) + root_share_with_group_locked: false, subgroup_share_with_group_locked: true, sub_subgroup_share_with_group_locked: false, current_user: :root_owner, help_text: :ancestor_locked_and_has_been_overridden, linked_ancestor: :subgroup + has the correct help text with correct ancestor links (PENDING: around hook at ./spec/spec_helper.rb:186 did not execute the example) + root_share_with_group_locked: false, subgroup_share_with_group_locked: true, sub_subgroup_share_with_group_locked: false, current_user: :sub_owner, help_text: :ancestor_locked_and_has_been_overridden, linked_ancestor: :subgroup + has the correct help text with correct ancestor links (PENDING: around hook at ./spec/spec_helper.rb:186 did not execute the example) + root_share_with_group_locked: false, subgroup_share_with_group_locked: true, sub_subgroup_share_with_group_locked: false, current_user: :sub_sub_owner, help_text: :ancestor_locked_and_has_been_overridden, linked_ancestor: :subgroup + has the correct help text with correct ancestor links (PENDING: around hook at ./spec/spec_helper.rb:186 did not execute the example) + root_share_with_group_locked: false, subgroup_share_with_group_locked: true, sub_subgroup_share_with_group_locked: true, current_user: :root_owner, help_text: :ancestor_locked_but_you_can_override, linked_ancestor: :subgroup + has the correct help text with correct ancestor links (PENDING: around hook at ./spec/spec_helper.rb:186 did not execute the example) + root_share_with_group_locked: false, subgroup_share_with_group_locked: true, sub_subgroup_share_with_group_locked: true, current_user: :sub_owner, help_text: :ancestor_locked_but_you_can_override, linked_ancestor: :subgroup + has the correct help text with correct ancestor links (PENDING: around hook at ./spec/spec_helper.rb:186 did not execute the example) + root_share_with_group_locked: false, subgroup_share_with_group_locked: true, sub_subgroup_share_with_group_locked: true, current_user: :sub_sub_owner, help_text: :ancestor_locked_so_ask_the_owner, linked_ancestor: :subgroup + has the correct help text with correct ancestor links (PENDING: around hook at ./spec/spec_helper.rb:186 did not execute the example) + root_share_with_group_locked: true, subgroup_share_with_group_locked: false, sub_subgroup_share_with_group_locked: false, current_user: :root_owner, help_text: :default_help, linked_ancestor: nil + has the correct help text with correct ancestor links (PENDING: around hook at ./spec/spec_helper.rb:186 did not execute the example) + root_share_with_group_locked: true, subgroup_share_with_group_locked: false, sub_subgroup_share_with_group_locked: false, current_user: :sub_owner, help_text: :default_help, linked_ancestor: nil + has the correct help text with correct ancestor links (PENDING: around hook at ./spec/spec_helper.rb:186 did not execute the example) + root_share_with_group_locked: true, subgroup_share_with_group_locked: false, sub_subgroup_share_with_group_locked: false, current_user: :sub_sub_owner, help_text: :default_help, linked_ancestor: nil + has the correct help text with correct ancestor links (PENDING: around hook at ./spec/spec_helper.rb:186 did not execute the example) + root_share_with_group_locked: true, subgroup_share_with_group_locked: false, sub_subgroup_share_with_group_locked: true, current_user: :root_owner, help_text: :default_help, linked_ancestor: nil + has the correct help text with correct ancestor links (PENDING: around hook at ./spec/spec_helper.rb:186 did not execute the example) + root_share_with_group_locked: true, subgroup_share_with_group_locked: false, sub_subgroup_share_with_group_locked: true, current_user: :sub_owner, help_text: :default_help, linked_ancestor: nil + has the correct help text with correct ancestor links (PENDING: around hook at ./spec/spec_helper.rb:186 did not execute the example) + root_share_with_group_locked: true, subgroup_share_with_group_locked: false, sub_subgroup_share_with_group_locked: true, current_user: :sub_sub_owner, help_text: :default_help, linked_ancestor: nil + has the correct help text with correct ancestor links (PENDING: around hook at ./spec/spec_helper.rb:186 did not execute the example) + root_share_with_group_locked: true, subgroup_share_with_group_locked: true, sub_subgroup_share_with_group_locked: false, current_user: :root_owner, help_text: :ancestor_locked_and_has_been_overridden, linked_ancestor: :root_group + has the correct help text with correct ancestor links (PENDING: around hook at ./spec/spec_helper.rb:186 did not execute the example) + root_share_with_group_locked: true, subgroup_share_with_group_locked: true, sub_subgroup_share_with_group_locked: false, current_user: :sub_owner, help_text: :ancestor_locked_and_has_been_overridden, linked_ancestor: :root_group + has the correct help text with correct ancestor links (PENDING: around hook at ./spec/spec_helper.rb:186 did not execute the example) + root_share_with_group_locked: true, subgroup_share_with_group_locked: true, sub_subgroup_share_with_group_locked: false, current_user: :sub_sub_owner, help_text: :ancestor_locked_and_has_been_overridden, linked_ancestor: :root_group + has the correct help text with correct ancestor links (PENDING: around hook at ./spec/spec_helper.rb:186 did not execute the example) + root_share_with_group_locked: true, subgroup_share_with_group_locked: true, sub_subgroup_share_with_group_locked: true, current_user: :root_owner, help_text: :ancestor_locked_but_you_can_override, linked_ancestor: :root_group + has the correct help text with correct ancestor links (PENDING: around hook at ./spec/spec_helper.rb:186 did not execute the example) + root_share_with_group_locked: true, subgroup_share_with_group_locked: true, sub_subgroup_share_with_group_locked: true, current_user: :sub_owner, help_text: :ancestor_locked_so_ask_the_owner, linked_ancestor: :root_group + has the correct help text with correct ancestor links (PENDING: around hook at ./spec/spec_helper.rb:186 did not execute the example) + root_share_with_group_locked: true, subgroup_share_with_group_locked: true, sub_subgroup_share_with_group_locked: true, current_user: :sub_sub_owner, help_text: :ancestor_locked_so_ask_the_owner, linked_ancestor: :root_group + has the correct help text with correct ancestor links (PENDING: around hook at ./spec/spec_helper.rb:186 did not execute the example) + #group_sidebar_links + returns all the expected links + includes settings when the user can admin the group + excludes cross project features when the user cannot read cross project + +API::V3::Todos + DELETE /todos/:id + when unauthenticated + returns authentication error + when authenticated + marks a todo as done + updates todos cache + returns 404 if the todo does not belong to the current user + DELETE /todos + when unauthenticated + returns authentication error + when authenticated + marks all todos as done + updates todos cache + +TeamcityService + Associations + should belong to project + should have one service_hook + Validations + when service is active + should validate that :build_type cannot be empty/falsy + should validate that :teamcity_url cannot be empty/falsy + behaves like issue tracker service URL attribute + should allow :teamcity_url to be ‹"https://example.com"› + should not allow :teamcity_url to be ‹"example.com"› + should not allow :teamcity_url to be ‹"ftp://example.com"› + should not allow :teamcity_url to be ‹"herp-and-derp"› + #username + does not validate the presence of username if password is nil + validates the presence of username if password is present + #password + does not validate the presence of password if username is nil + validates the presence of password if username is present + when service is inactive + should not validate that :build_type cannot be empty/falsy + should not validate that :teamcity_url cannot be empty/falsy + should not validate that :username cannot be empty/falsy + should not validate that :password cannot be empty/falsy + Callbacks + before_update :reset_password + saves password if new url is set together with password when no password was previously set + when a password was previously set + resets password if url changed + does not reset password if username changed + does not reset password if new url is set together with password, even if it's the same password + #build_page + returns the contents of the reactive cache + #commit_status + returns the contents of the reactive cache + #calculate_reactive_cache + build_page + returns a specific URL when status is 500 + returns a build URL when teamcity_url has no trailing slash + teamcity_url has trailing slash + returns a build URL + commit_status + sets commit status to :error when status is 500 + sets commit status to "pending" when status is 404 + sets commit status to "success" when build status contains SUCCESS + sets commit status to "failed" when build status contains FAILURE + sets commit status to "pending" when build status contains Pending + sets commit status to :error when build status is unknown + +Gitlab::Conflict::File + #resolve_lines + raises ResolutionError when passed a hash without resolutions for all sections + when resolving everything to the same side + has the correct number of lines + has content matching the chosen lines + with mixed resolutions + has the correct number of lines + returns a file containing only the chosen parts of the resolved sections + #highlight_lines! + modifies the existing lines + is called implicitly when rich_text is accessed on a line + sets the rich_text of the lines matching the text content + highlights the lines correctly + #sections + only inserts match lines when there is a gap between sections + sets conflict to false for sections with only unchanged lines + only includes a maximum of CONTEXT_LINES (plus an optional match line) in context sections + sets conflict to true for sections with only changed lines + adds unique IDs to conflict sections, and not to other sections + with an example file + sets the correct match line headers + does not add match lines where they are not needed + creates context sections of the correct length + #as_json + includes the blob path for the file + includes the blob icon for the file + with the full_content option passed + includes the full content of the conflict + includes the detected language of the conflict file + +Banzai::Filter::SnippetReferenceFilter + requires project context + ignores valid references contained inside 'pre' element + ignores valid references contained inside 'code' element + ignores valid references contained inside 'a' element + ignores valid references contained inside 'style' element + internal reference + links to a valid reference + links with adjacent text + ignores invalid snippet IDs + includes a title attribute + escapes the title attribute + includes default classes + includes a data-project attribute + includes a data-snippet attribute + supports an :only_path context + cross-project / cross-namespace complete reference + links to a valid reference + link has valid text + has valid text + ignores invalid snippet IDs on the referenced project + cross-project / same-namespace complete reference + links to a valid reference + link has valid text + has valid text + ignores invalid snippet IDs on the referenced project + cross-project shorthand reference + links to a valid reference + link has valid text + has valid text + ignores invalid snippet IDs on the referenced project + cross-project URL reference + links to a valid reference + links with adjacent text + ignores invalid snippet IDs on the referenced project + group context + links to a valid reference + +AutocompleteUsersFinder + #execute + should contain exactly #<User id:2126 @johndoe>, #<User id:2128 @user2119>, #<User id:2129 @user2120>, and #<User id:2130 @user2121> + when current_user not passed or nil + should contain exactly + when project passed + should contain exactly #<User id:2140 @user2127> + when author_id passed + should contain exactly #<User id:2146 @user2131> and #<User id:2142 @notsorandom> + when group passed and project not passed + should contain exactly #<User id:2147 @johndoe> + when passed a subgroup + includes users from parent groups as well (PENDING: around hook at ./spec/spec_helper.rb:186 did not execute the example) + when filtered by search + should contain exactly #<User id:2152 @johndoe> + when filtered by skip_users + should contain exactly #<User id:2157 @johndoe> and #<User id:2159 @user2138> + when todos exist + when filtered by todo_filter without todo_state_filter + should contain exactly + when filtered by todo_filter with pending todo_state_filter + should contain exactly #<User id:2175 @johndoe> + when filtered by todo_filter with done todo_state_filter + should contain exactly #<User id:2190 @user2163> + when filtered by current_user + should contain exactly #<User id:2202 @notsorandom>, #<User id:2201 @johndoe>, #<User id:2203 @user2174>, and #<User id:2204 @user2175> + when filtered by author_id + should contain exactly #<User id:2206 @notsorandom>, #<User id:2205 @johndoe>, #<User id:2207 @user2176>, #<User id:2208 @user2177>, and #<User id:2209 @user2178> + +Service + Associations + should belong to project + should have one service_hook + Validations + should validate that :type cannot be empty/falsy + Scopes + .confidential_note_hooks + includes services where confidential_note_events is true + excludes services where confidential_note_events is false + Test Button + #can_test? + when repository is not empty + returns true + when repository is empty + returns true + #test + when repository is not empty + test runs execute + when repository is empty + test runs execute + Template + .build_from_template + when template is invalid + sets service template to inactive when template is invalid + for pushover service + is prefilled for projects pushover service + has all fields prefilled + {property}_changed? + returns false when the property has not been assigned a new value + returns true when the property has been assigned a different value + returns true when the property has been assigned a different value twice + returns false when the property has been re-assigned the same value + returns false when the property has been assigned a new value then saved + {property}_touched? + returns false when the property has not been assigned a new value + returns true when the property has been assigned a different value + returns true when the property has been assigned a different value twice + returns true when the property has been re-assigned the same value + returns false when the property has been assigned a new value then saved + {property}_was + returns nil when the property has not been assigned a new value + returns the previous value when the property has been assigned a different value + returns initial value when the property has been re-assigned the same value + returns initial value when the property has been assigned multiple values + returns nil when the property has been assigned a new value then saved + initialize service with no properties + does not raise error + creates the properties + callbacks + on create + updates the has_external_issue_tracker boolean + on update + updates the has_external_issue_tracker boolean + #deprecated? + should return false by default + #deprecation_message + should be empty by default + .find_by_template + returns service template + #api_field_names + filters out sensitive fields + +TestHooks::ProjectService + #execute + hook with not implemented test + returns error message + push_events + returns error message if not enough data + executes hook + tag_push_events + returns error message if not enough data + executes hook + note_events + returns error message if not enough data + executes hook + issues_events + returns error message if not enough data + executes hook + confidential_issues_events + returns error message if not enough data + executes hook + merge_requests_events + returns error message if not enough data + executes hook + job_events + returns error message if not enough data + executes hook + pipeline_events + returns error message if not enough data + executes hook + wiki_page_events + returns error message if wiki disabled + returns error message if not enough data + executes hook + +User views an open merge request + when a merge request does not have repository + renders both the title and the description + when a merge request has repository + when rendering description preview + renders empty description preview + renders description preview + when the branch is rebased on the target + does not show diverged commits count + when the branch is diverged on the target + shows diverged commits count + +RunnerJobsFinder + #execute + when params is empty + returns all jobs assigned to Runner + when params contains status + when status is created + returns matched job + when status is pending + returns matched job + when status is running + returns matched job + when status is success + returns matched job + when status is failed + returns matched job + when status is canceled + returns matched job + when status is skipped + returns matched job + when status is manual + returns matched job + +Project snippets + when the project has snippets + pagination + behaves like paginated snippets + is limited to 20 items per page + clicking on the link to the second page + shows the remaining snippets + list content + contains all project snippets + when submitting a note + should have autocomplete + should have zen mode + +API::V3::Environments + GET /projects/:id/environments + as member of the project + returns project environments + behaves like a paginated resources + has pagination headers + as non member + returns a 404 status code + POST /projects/:id/environments + as a member + creates a environment with valid params + requires name to be passed + returns a 400 if environment already exists + returns a 400 if slug is specified + a non member + rejects the request + returns a 400 when the required params are missing + PUT /projects/:id/environments/:environment_id + returns a 200 if name and external_url are changed + won't allow slug to be changed + won't update the external_url if only the name is passed + returns a 404 if the environment does not exist + DELETE /projects/:id/environments/:environment_id + as a master + returns a 200 for an existing environment + returns a 404 for non existing id + a non member + rejects the request + +API::Namespaces + GET /namespaces + when unauthenticated + returns authentication error + when authenticated as admin + returns correct attributes + admin: returns an array of all namespaces + admin: returns an array of matched namespaces + when authenticated as a regular user + returns correct attributes when user can admin group + returns correct attributes when user cannot admin group + user: returns an array of namespaces + admin: returns an array of matched namespaces + GET /namespaces/:id + when unauthenticated + returns authentication error + when authenticated as regular user + when requested namespace is not owned by user + when requesting group + returns not-found + when requesting personal namespace + returns not-found + when requested namespace is owned by user + behaves like namespace reader + when namespace exists + when requested by ID + when requesting group + behaves like can access namespace + returns namespace details + when requesting personal namespace + behaves like can access namespace + returns namespace details + when requested by path + when requesting group + behaves like can access namespace + returns namespace details + when requesting personal namespace + behaves like can access namespace + returns namespace details + when namespace doesn't exist + returns not-found + when authenticated as admin + when requested namespace is not owned by user + when requesting group + behaves like can access namespace + returns namespace details + when requesting personal namespace + behaves like can access namespace + returns namespace details + when requested namespace is owned by user + behaves like namespace reader + when namespace exists + when requested by ID + when requesting group + behaves like can access namespace + returns namespace details + when requesting personal namespace + behaves like can access namespace + returns namespace details + when requested by path + when requesting group + behaves like can access namespace + returns namespace details + when requesting personal namespace + behaves like can access namespace + returns namespace details + when namespace doesn't exist + returns not-found + +MergeRequests::GetUrlsService + #execute + pushing to default branch + behaves like no_merge_request_url + returns no URL + pushing to project with MRs disabled + behaves like no_merge_request_url + returns no URL + pushing one completely new branch + behaves like new_merge_request_link + returns url to create new merge request + pushing to existing branch but no merge request + behaves like new_merge_request_link + returns url to create new merge request + pushing to deleted branch + behaves like no_merge_request_url + returns no URL + pushing to existing branch and merge request opened + behaves like show_merge_request_url + returns url to view merge request + pushing to existing branch and merge request is reopened + behaves like show_merge_request_url + returns url to view merge request + pushing to existing branch from forked project + behaves like show_merge_request_url + returns url to view merge request + pushing to existing branch and merge request is closed + behaves like new_merge_request_link + returns url to create new merge request + pushing to existing branch and merge request is merged + behaves like new_merge_request_link + returns url to create new merge request + pushing new branch and existing branch (with merge request created) at once + returns 2 urls for both creating new and showing merge request + when printing_merge_request_link_enabled is false + returns empty array + +LfsFileLock + should belong to project + should belong to user + should validate that :project_id cannot be empty/falsy + should validate that :user_id cannot be empty/falsy + should validate that :path cannot be empty/falsy + #can_be_unlocked_by? + when it's forced + can be unlocked by the author + can be unlocked by a master + can't be unlocked by other user + when it isn't forced + can be unlocked by the author + can't be unlocked by a master + can't be unlocked by other user + +Gitlab::Ci::Config::Entry::Boolean + validations + when entry config value is valid + #value + returns key value + #valid? + is valid + when entry value is not valid + #errors + saves errors +Knapsack report was generated. Preview: +{ + "spec/services/todo_service_spec.rb": 53.71851348876953, + "spec/lib/gitlab/import_export/project_tree_saver_spec.rb": 48.39624857902527, + "spec/lib/gitlab/background_migration/deserialize_merge_request_diffs_and_commits_spec.rb": 35.17360734939575, + "spec/controllers/projects/merge_requests_controller_spec.rb": 25.50887441635132, + "spec/controllers/groups_controller_spec.rb": 13.007296323776245, + "spec/features/projects/import_export/import_file_spec.rb": 16.827879428863525, + "spec/lib/gitlab/middleware/go_spec.rb": 12.497276306152344, + "spec/features/projects/blobs/edit_spec.rb": 11.511932134628296, + "spec/services/boards/lists/move_service_spec.rb": 8.695446491241455, + "spec/services/create_deployment_service_spec.rb": 6.754847526550293, + "spec/controllers/groups/milestones_controller_spec.rb": 6.8740551471710205, + "spec/helpers/groups_helper_spec.rb": 0.9002459049224854, + "spec/requests/api/v3/todos_spec.rb": 6.5924904346466064, + "spec/models/project_services/teamcity_service_spec.rb": 2.9881808757781982, + "spec/lib/gitlab/conflict/file_spec.rb": 5.294132709503174, + "spec/lib/banzai/filter/snippet_reference_filter_spec.rb": 4.118850469589233, + "spec/finders/autocomplete_users_finder_spec.rb": 3.864232063293457, + "spec/models/service_spec.rb": 3.1697962284088135, + "spec/services/test_hooks/project_service_spec.rb": 4.167759656906128, + "spec/features/projects/merge_requests/user_views_open_merge_request_spec.rb": 4.707003355026245, + "spec/finders/runner_jobs_finder_spec.rb": 3.2137575149536133, + "spec/features/projects/snippets_spec.rb": 3.631467580795288, + "spec/requests/api/v3/environments_spec.rb": 2.314746856689453, + "spec/requests/api/namespaces_spec.rb": 2.352935314178467, + "spec/services/merge_requests/get_urls_service_spec.rb": 2.8039824962615967, + "spec/models/lfs_file_lock_spec.rb": 0.7295050621032715, + "spec/lib/gitlab/ci/config/entry/boolean_spec.rb": 0.007024049758911133 +} + +Knapsack global time execution for tests: 04m 49s + +Pending: (Failures listed here are expected and do not affect your suite's status) + + 1) GroupsController GET #new when creating subgroups and can_create_group is true and logged in as Admin behaves like member with ability to create subgroups renders the new page + # around hook at ./spec/spec_helper.rb:186 did not execute the example + # ./spec/controllers/groups_controller_spec.rb:15 + + 2) GroupsController GET #new when creating subgroups and can_create_group is true and logged in as Owner behaves like member with ability to create subgroups renders the new page + # around hook at ./spec/spec_helper.rb:186 did not execute the example + # ./spec/controllers/groups_controller_spec.rb:15 + + 3) GroupsController GET #new when creating subgroups and can_create_group is true and logged in as Guest behaves like member without ability to create subgroups renders the 404 page + # around hook at ./spec/spec_helper.rb:186 did not execute the example + # ./spec/controllers/groups_controller_spec.rb:25 + + 4) GroupsController GET #new when creating subgroups and can_create_group is true and logged in as Developer behaves like member without ability to create subgroups renders the 404 page + # around hook at ./spec/spec_helper.rb:186 did not execute the example + # ./spec/controllers/groups_controller_spec.rb:25 + + 5) GroupsController GET #new when creating subgroups and can_create_group is true and logged in as Master behaves like member without ability to create subgroups renders the 404 page + # around hook at ./spec/spec_helper.rb:186 did not execute the example + # ./spec/controllers/groups_controller_spec.rb:25 + + 6) GroupsController GET #new when creating subgroups and can_create_group is false and logged in as Admin behaves like member with ability to create subgroups renders the new page + # around hook at ./spec/spec_helper.rb:186 did not execute the example + # ./spec/controllers/groups_controller_spec.rb:15 + + 7) GroupsController GET #new when creating subgroups and can_create_group is false and logged in as Owner behaves like member with ability to create subgroups renders the new page + # around hook at ./spec/spec_helper.rb:186 did not execute the example + # ./spec/controllers/groups_controller_spec.rb:15 + + 8) GroupsController GET #new when creating subgroups and can_create_group is false and logged in as Guest behaves like member without ability to create subgroups renders the 404 page + # around hook at ./spec/spec_helper.rb:186 did not execute the example + # ./spec/controllers/groups_controller_spec.rb:25 + + 9) GroupsController GET #new when creating subgroups and can_create_group is false and logged in as Developer behaves like member without ability to create subgroups renders the 404 page + # around hook at ./spec/spec_helper.rb:186 did not execute the example + # ./spec/controllers/groups_controller_spec.rb:25 + + 10) GroupsController GET #new when creating subgroups and can_create_group is false and logged in as Master behaves like member without ability to create subgroups renders the 404 page + # around hook at ./spec/spec_helper.rb:186 did not execute the example + # ./spec/controllers/groups_controller_spec.rb:25 + + 11) GroupsController POST #create when creating subgroups and can_create_group is true and logged in as Owner creates the subgroup + # around hook at ./spec/spec_helper.rb:186 did not execute the example + # ./spec/controllers/groups_controller_spec.rb:117 + + 12) GroupsController POST #create when creating subgroups and can_create_group is true and logged in as Developer renders the new template + # around hook at ./spec/spec_helper.rb:186 did not execute the example + # ./spec/controllers/groups_controller_spec.rb:129 + + 13) GroupsController POST #create when creating subgroups and can_create_group is false and logged in as Owner creates the subgroup + # around hook at ./spec/spec_helper.rb:186 did not execute the example + # ./spec/controllers/groups_controller_spec.rb:117 + + 14) GroupsController POST #create when creating subgroups and can_create_group is false and logged in as Developer renders the new template + # around hook at ./spec/spec_helper.rb:186 did not execute the example + # ./spec/controllers/groups_controller_spec.rb:129 + + 15) GroupsController PUT transfer when transfering to a subgroup goes right should return a notice + # around hook at ./spec/spec_helper.rb:190 did not execute the example + # ./spec/controllers/groups_controller_spec.rb:516 + + 16) GroupsController PUT transfer when transfering to a subgroup goes right should redirect to the new path + # around hook at ./spec/spec_helper.rb:190 did not execute the example + # ./spec/controllers/groups_controller_spec.rb:520 + + 17) GroupsController PUT transfer when converting to a root group goes right should return a notice + # around hook at ./spec/spec_helper.rb:190 did not execute the example + # ./spec/controllers/groups_controller_spec.rb:535 + + 18) GroupsController PUT transfer when converting to a root group goes right should redirect to the new path + # around hook at ./spec/spec_helper.rb:190 did not execute the example + # ./spec/controllers/groups_controller_spec.rb:539 + + 19) GroupsController PUT transfer When the transfer goes wrong should return an alert + # around hook at ./spec/spec_helper.rb:190 did not execute the example + # ./spec/controllers/groups_controller_spec.rb:557 + + 20) GroupsController PUT transfer When the transfer goes wrong should redirect to the current path + # around hook at ./spec/spec_helper.rb:190 did not execute the example + # ./spec/controllers/groups_controller_spec.rb:561 + + 21) GroupsController PUT transfer when the user is not allowed to transfer the group should be denied + # around hook at ./spec/spec_helper.rb:190 did not execute the example + # ./spec/controllers/groups_controller_spec.rb:577 + + 22) Groups::TransferService#execute when transforming a group into a root group behaves like ensuring allowed transfer for a group with other database than PostgreSQL should return false + # around hook at ./spec/spec_helper.rb:190 did not execute the example + # ./spec/services/groups/transfer_service_spec.rb:15 + + 23) Groups::TransferService#execute when transforming a group into a root group behaves like ensuring allowed transfer for a group with other database than PostgreSQL should add an error on group + # around hook at ./spec/spec_helper.rb:190 did not execute the example + # ./spec/services/groups/transfer_service_spec.rb:19 + + 24) Groups::TransferService#execute when transforming a group into a root group behaves like ensuring allowed transfer for a group when there's an exception on Gitlab shell directories should return false + # around hook at ./spec/spec_helper.rb:190 did not execute the example + # ./spec/services/groups/transfer_service_spec.rb:33 + + 25) Groups::TransferService#execute when transforming a group into a root group behaves like ensuring allowed transfer for a group when there's an exception on Gitlab shell directories should add an error on group + # around hook at ./spec/spec_helper.rb:190 did not execute the example + # ./spec/services/groups/transfer_service_spec.rb:37 + + 26) Groups::TransferService#execute when transforming a group into a root group when the group is already a root group should add an error on group + # around hook at ./spec/spec_helper.rb:190 did not execute the example + # ./spec/services/groups/transfer_service_spec.rb:53 + + 27) Groups::TransferService#execute when transforming a group into a root group when the user does not have the right policies should return false + # around hook at ./spec/spec_helper.rb:190 did not execute the example + # ./spec/services/groups/transfer_service_spec.rb:62 + + 28) Groups::TransferService#execute when transforming a group into a root group when the user does not have the right policies should add an error on group + # around hook at ./spec/spec_helper.rb:190 did not execute the example + # ./spec/services/groups/transfer_service_spec.rb:66 + + 29) Groups::TransferService#execute when transforming a group into a root group when there is a group with the same path should return false + # around hook at ./spec/spec_helper.rb:190 did not execute the example + # ./spec/services/groups/transfer_service_spec.rb:79 + + 30) Groups::TransferService#execute when transforming a group into a root group when there is a group with the same path should add an error on group + # around hook at ./spec/spec_helper.rb:190 did not execute the example + # ./spec/services/groups/transfer_service_spec.rb:83 + + 31) Groups::TransferService#execute when transforming a group into a root group when the group is a subgroup and the transfer is valid should update group attributes + # around hook at ./spec/spec_helper.rb:190 did not execute the example + # ./spec/services/groups/transfer_service_spec.rb:99 + + 32) Groups::TransferService#execute when transforming a group into a root group when the group is a subgroup and the transfer is valid should update group children path + # around hook at ./spec/spec_helper.rb:190 did not execute the example + # ./spec/services/groups/transfer_service_spec.rb:103 + + 33) Groups::TransferService#execute when transforming a group into a root group when the group is a subgroup and the transfer is valid should update group projects path + # around hook at ./spec/spec_helper.rb:190 did not execute the example + # ./spec/services/groups/transfer_service_spec.rb:109 + + 34) Groups::TransferService#execute when transferring a subgroup into another group behaves like ensuring allowed transfer for a group with other database than PostgreSQL should return false + # around hook at ./spec/spec_helper.rb:190 did not execute the example + # ./spec/services/groups/transfer_service_spec.rb:15 + + 35) Groups::TransferService#execute when transferring a subgroup into another group behaves like ensuring allowed transfer for a group with other database than PostgreSQL should add an error on group + # around hook at ./spec/spec_helper.rb:190 did not execute the example + # ./spec/services/groups/transfer_service_spec.rb:19 + + 36) Groups::TransferService#execute when transferring a subgroup into another group behaves like ensuring allowed transfer for a group when there's an exception on Gitlab shell directories should return false + # around hook at ./spec/spec_helper.rb:190 did not execute the example + # ./spec/services/groups/transfer_service_spec.rb:33 + + 37) Groups::TransferService#execute when transferring a subgroup into another group behaves like ensuring allowed transfer for a group when there's an exception on Gitlab shell directories should add an error on group + # around hook at ./spec/spec_helper.rb:190 did not execute the example + # ./spec/services/groups/transfer_service_spec.rb:37 + + 38) Groups::TransferService#execute when transferring a subgroup into another group when the new parent group is the same as the previous parent group should return false + # around hook at ./spec/spec_helper.rb:190 did not execute the example + # ./spec/services/groups/transfer_service_spec.rb:125 + + 39) Groups::TransferService#execute when transferring a subgroup into another group when the new parent group is the same as the previous parent group should add an error on group + # around hook at ./spec/spec_helper.rb:190 did not execute the example + # ./spec/services/groups/transfer_service_spec.rb:129 + + 40) Groups::TransferService#execute when transferring a subgroup into another group when the user does not have the right policies should return false + # around hook at ./spec/spec_helper.rb:190 did not execute the example + # ./spec/services/groups/transfer_service_spec.rb:138 + + 41) Groups::TransferService#execute when transferring a subgroup into another group when the user does not have the right policies should add an error on group + # around hook at ./spec/spec_helper.rb:190 did not execute the example + # ./spec/services/groups/transfer_service_spec.rb:142 + + 42) Groups::TransferService#execute when transferring a subgroup into another group when the parent has a group with the same path should return false + # around hook at ./spec/spec_helper.rb:190 did not execute the example + # ./spec/services/groups/transfer_service_spec.rb:155 + + 43) Groups::TransferService#execute when transferring a subgroup into another group when the parent has a group with the same path should add an error on group + # around hook at ./spec/spec_helper.rb:190 did not execute the example + # ./spec/services/groups/transfer_service_spec.rb:159 + + 44) Groups::TransferService#execute when transferring a subgroup into another group when the parent group has a project with the same path should return false + # around hook at ./spec/spec_helper.rb:190 did not execute the example + # ./spec/services/groups/transfer_service_spec.rb:174 + + 45) Groups::TransferService#execute when transferring a subgroup into another group when the parent group has a project with the same path should add an error on group + # around hook at ./spec/spec_helper.rb:190 did not execute the example + # ./spec/services/groups/transfer_service_spec.rb:178 + + 46) Groups::TransferService#execute when transferring a subgroup into another group when the group is allowed to be transferred should update visibility for the group based on the parent group + # around hook at ./spec/spec_helper.rb:190 did not execute the example + # ./spec/services/groups/transfer_service_spec.rb:212 + + 47) Groups::TransferService#execute when transferring a subgroup into another group when the group is allowed to be transferred should update parent group to the new parent + # around hook at ./spec/spec_helper.rb:190 did not execute the example + # ./spec/services/groups/transfer_service_spec.rb:216 + + 48) Groups::TransferService#execute when transferring a subgroup into another group when the group is allowed to be transferred should return the group as children of the new parent + # around hook at ./spec/spec_helper.rb:190 did not execute the example + # ./spec/services/groups/transfer_service_spec.rb:220 + + 49) Groups::TransferService#execute when transferring a subgroup into another group when the group is allowed to be transferred should create a redirect for the group + # around hook at ./spec/spec_helper.rb:190 did not execute the example + # ./spec/services/groups/transfer_service_spec.rb:225 + + 50) Groups::TransferService#execute when transferring a subgroup into another group when the group is allowed to be transferred when the group has a lower visibility than the parent group should not update the visibility for the group + # around hook at ./spec/spec_helper.rb:190 did not execute the example + # ./spec/services/groups/transfer_service_spec.rb:194 + + 51) Groups::TransferService#execute when transferring a subgroup into another group when the group is allowed to be transferred when the group has a higher visibility than the parent group should update visibility level based on the parent group + # around hook at ./spec/spec_helper.rb:190 did not execute the example + # ./spec/services/groups/transfer_service_spec.rb:205 + + 52) Groups::TransferService#execute when transferring a subgroup into another group when transferring a group with group descendants should update subgroups path + # around hook at ./spec/spec_helper.rb:190 did not execute the example + # ./spec/services/groups/transfer_service_spec.rb:239 + + 53) Groups::TransferService#execute when transferring a subgroup into another group when transferring a group with group descendants should create redirects for the subgroups + # around hook at ./spec/spec_helper.rb:190 did not execute the example + # ./spec/services/groups/transfer_service_spec.rb:246 + + 54) Groups::TransferService#execute when transferring a subgroup into another group when transferring a group with group descendants when the new parent has a higher visibility than the children should not update the children visibility + # around hook at ./spec/spec_helper.rb:190 did not execute the example + # ./spec/services/groups/transfer_service_spec.rb:253 + + 55) Groups::TransferService#execute when transferring a subgroup into another group when transferring a group with group descendants when the new parent has a lower visibility than the children should update children visibility to match the new parent + # around hook at ./spec/spec_helper.rb:190 did not execute the example + # ./spec/services/groups/transfer_service_spec.rb:264 + + 56) Groups::TransferService#execute when transferring a subgroup into another group when transferring a group with project descendants should update projects path + # around hook at ./spec/spec_helper.rb:190 did not execute the example + # ./spec/services/groups/transfer_service_spec.rb:282 + + 57) Groups::TransferService#execute when transferring a subgroup into another group when transferring a group with project descendants should create permanent redirects for the projects + # around hook at ./spec/spec_helper.rb:190 did not execute the example + # ./spec/services/groups/transfer_service_spec.rb:289 + + 58) Groups::TransferService#execute when transferring a subgroup into another group when transferring a group with project descendants when the new parent has a higher visibility than the projects should not update projects visibility + # around hook at ./spec/spec_helper.rb:190 did not execute the example + # ./spec/services/groups/transfer_service_spec.rb:296 + + 59) Groups::TransferService#execute when transferring a subgroup into another group when transferring a group with project descendants when the new parent has a lower visibility than the projects should update projects visibility to match the new parent + # around hook at ./spec/spec_helper.rb:190 did not execute the example + # ./spec/services/groups/transfer_service_spec.rb:307 + + 60) Groups::TransferService#execute when transferring a subgroup into another group when transferring a group with subgroups & projects descendants should update subgroups path + # around hook at ./spec/spec_helper.rb:190 did not execute the example + # ./spec/services/groups/transfer_service_spec.rb:327 + + 61) Groups::TransferService#execute when transferring a subgroup into another group when transferring a group with subgroups & projects descendants should update projects path + # around hook at ./spec/spec_helper.rb:190 did not execute the example + # ./spec/services/groups/transfer_service_spec.rb:334 + + 62) Groups::TransferService#execute when transferring a subgroup into another group when transferring a group with subgroups & projects descendants should create redirect for the subgroups and projects + # around hook at ./spec/spec_helper.rb:190 did not execute the example + # ./spec/services/groups/transfer_service_spec.rb:341 + + 63) Groups::TransferService#execute when transferring a subgroup into another group when transfering a group with nested groups and projects should update subgroups path + # around hook at ./spec/spec_helper.rb:190 did not execute the example + # ./spec/services/groups/transfer_service_spec.rb:363 + + 64) Groups::TransferService#execute when transferring a subgroup into another group when transfering a group with nested groups and projects should update projects path + # around hook at ./spec/spec_helper.rb:190 did not execute the example + # ./spec/services/groups/transfer_service_spec.rb:375 + + 65) Groups::TransferService#execute when transferring a subgroup into another group when transfering a group with nested groups and projects should create redirect for the subgroups and projects + # around hook at ./spec/spec_helper.rb:190 did not execute the example + # ./spec/services/groups/transfer_service_spec.rb:383 + + 66) Groups::TransferService#execute when transferring a subgroup into another group when updating the group goes wrong should restore group and projects visibility + # around hook at ./spec/spec_helper.rb:190 did not execute the example + # ./spec/services/groups/transfer_service_spec.rb:405 + + 67) GroupsHelper group_title outputs the groups in the correct order + # around hook at ./spec/spec_helper.rb:186 did not execute the example + # ./spec/helpers/groups_helper_spec.rb:106 + + 68) GroupsHelper#share_with_group_lock_help_text root_share_with_group_locked: false, subgroup_share_with_group_locked: false, sub_subgroup_share_with_group_locked: false, current_user: :root_owner, help_text: :default_help, linked_ancestor: nil has the correct help text with correct ancestor links + # around hook at ./spec/spec_helper.rb:186 did not execute the example + # ./spec/helpers/groups_helper_spec.rb:198 + + 69) GroupsHelper#share_with_group_lock_help_text root_share_with_group_locked: false, subgroup_share_with_group_locked: false, sub_subgroup_share_with_group_locked: false, current_user: :sub_owner, help_text: :default_help, linked_ancestor: nil has the correct help text with correct ancestor links + # around hook at ./spec/spec_helper.rb:186 did not execute the example + # ./spec/helpers/groups_helper_spec.rb:198 + + 70) GroupsHelper#share_with_group_lock_help_text root_share_with_group_locked: false, subgroup_share_with_group_locked: false, sub_subgroup_share_with_group_locked: false, current_user: :sub_sub_owner, help_text: :default_help, linked_ancestor: nil has the correct help text with correct ancestor links + # around hook at ./spec/spec_helper.rb:186 did not execute the example + # ./spec/helpers/groups_helper_spec.rb:198 + + 71) GroupsHelper#share_with_group_lock_help_text root_share_with_group_locked: false, subgroup_share_with_group_locked: false, sub_subgroup_share_with_group_locked: true, current_user: :root_owner, help_text: :default_help, linked_ancestor: nil has the correct help text with correct ancestor links + # around hook at ./spec/spec_helper.rb:186 did not execute the example + # ./spec/helpers/groups_helper_spec.rb:198 + + 72) GroupsHelper#share_with_group_lock_help_text root_share_with_group_locked: false, subgroup_share_with_group_locked: false, sub_subgroup_share_with_group_locked: true, current_user: :sub_owner, help_text: :default_help, linked_ancestor: nil has the correct help text with correct ancestor links + # around hook at ./spec/spec_helper.rb:186 did not execute the example + # ./spec/helpers/groups_helper_spec.rb:198 + + 73) GroupsHelper#share_with_group_lock_help_text root_share_with_group_locked: false, subgroup_share_with_group_locked: false, sub_subgroup_share_with_group_locked: true, current_user: :sub_sub_owner, help_text: :default_help, linked_ancestor: nil has the correct help text with correct ancestor links + # around hook at ./spec/spec_helper.rb:186 did not execute the example + # ./spec/helpers/groups_helper_spec.rb:198 + + 74) GroupsHelper#share_with_group_lock_help_text root_share_with_group_locked: false, subgroup_share_with_group_locked: true, sub_subgroup_share_with_group_locked: false, current_user: :root_owner, help_text: :ancestor_locked_and_has_been_overridden, linked_ancestor: :subgroup has the correct help text with correct ancestor links + # around hook at ./spec/spec_helper.rb:186 did not execute the example + # ./spec/helpers/groups_helper_spec.rb:198 + + 75) GroupsHelper#share_with_group_lock_help_text root_share_with_group_locked: false, subgroup_share_with_group_locked: true, sub_subgroup_share_with_group_locked: false, current_user: :sub_owner, help_text: :ancestor_locked_and_has_been_overridden, linked_ancestor: :subgroup has the correct help text with correct ancestor links + # around hook at ./spec/spec_helper.rb:186 did not execute the example + # ./spec/helpers/groups_helper_spec.rb:198 + + 76) GroupsHelper#share_with_group_lock_help_text root_share_with_group_locked: false, subgroup_share_with_group_locked: true, sub_subgroup_share_with_group_locked: false, current_user: :sub_sub_owner, help_text: :ancestor_locked_and_has_been_overridden, linked_ancestor: :subgroup has the correct help text with correct ancestor links + # around hook at ./spec/spec_helper.rb:186 did not execute the example + # ./spec/helpers/groups_helper_spec.rb:198 + + 77) GroupsHelper#share_with_group_lock_help_text root_share_with_group_locked: false, subgroup_share_with_group_locked: true, sub_subgroup_share_with_group_locked: true, current_user: :root_owner, help_text: :ancestor_locked_but_you_can_override, linked_ancestor: :subgroup has the correct help text with correct ancestor links + # around hook at ./spec/spec_helper.rb:186 did not execute the example + # ./spec/helpers/groups_helper_spec.rb:198 + + 78) GroupsHelper#share_with_group_lock_help_text root_share_with_group_locked: false, subgroup_share_with_group_locked: true, sub_subgroup_share_with_group_locked: true, current_user: :sub_owner, help_text: :ancestor_locked_but_you_can_override, linked_ancestor: :subgroup has the correct help text with correct ancestor links + # around hook at ./spec/spec_helper.rb:186 did not execute the example + # ./spec/helpers/groups_helper_spec.rb:198 + + 79) GroupsHelper#share_with_group_lock_help_text root_share_with_group_locked: false, subgroup_share_with_group_locked: true, sub_subgroup_share_with_group_locked: true, current_user: :sub_sub_owner, help_text: :ancestor_locked_so_ask_the_owner, linked_ancestor: :subgroup has the correct help text with correct ancestor links + # around hook at ./spec/spec_helper.rb:186 did not execute the example + # ./spec/helpers/groups_helper_spec.rb:198 + + 80) GroupsHelper#share_with_group_lock_help_text root_share_with_group_locked: true, subgroup_share_with_group_locked: false, sub_subgroup_share_with_group_locked: false, current_user: :root_owner, help_text: :default_help, linked_ancestor: nil has the correct help text with correct ancestor links + # around hook at ./spec/spec_helper.rb:186 did not execute the example + # ./spec/helpers/groups_helper_spec.rb:198 + + 81) GroupsHelper#share_with_group_lock_help_text root_share_with_group_locked: true, subgroup_share_with_group_locked: false, sub_subgroup_share_with_group_locked: false, current_user: :sub_owner, help_text: :default_help, linked_ancestor: nil has the correct help text with correct ancestor links + # around hook at ./spec/spec_helper.rb:186 did not execute the example + # ./spec/helpers/groups_helper_spec.rb:198 + + 82) GroupsHelper#share_with_group_lock_help_text root_share_with_group_locked: true, subgroup_share_with_group_locked: false, sub_subgroup_share_with_group_locked: false, current_user: :sub_sub_owner, help_text: :default_help, linked_ancestor: nil has the correct help text with correct ancestor links + # around hook at ./spec/spec_helper.rb:186 did not execute the example + # ./spec/helpers/groups_helper_spec.rb:198 + + 83) GroupsHelper#share_with_group_lock_help_text root_share_with_group_locked: true, subgroup_share_with_group_locked: false, sub_subgroup_share_with_group_locked: true, current_user: :root_owner, help_text: :default_help, linked_ancestor: nil has the correct help text with correct ancestor links + # around hook at ./spec/spec_helper.rb:186 did not execute the example + # ./spec/helpers/groups_helper_spec.rb:198 + + 84) GroupsHelper#share_with_group_lock_help_text root_share_with_group_locked: true, subgroup_share_with_group_locked: false, sub_subgroup_share_with_group_locked: true, current_user: :sub_owner, help_text: :default_help, linked_ancestor: nil has the correct help text with correct ancestor links + # around hook at ./spec/spec_helper.rb:186 did not execute the example + # ./spec/helpers/groups_helper_spec.rb:198 -path -Set for your local app (/usr/local/bundle/config): "vendor" -Set via BUNDLE_PATH: "/usr/local/bundle" + 85) GroupsHelper#share_with_group_lock_help_text root_share_with_group_locked: true, subgroup_share_with_group_locked: false, sub_subgroup_share_with_group_locked: true, current_user: :sub_sub_owner, help_text: :default_help, linked_ancestor: nil has the correct help text with correct ancestor links + # around hook at ./spec/spec_helper.rb:186 did not execute the example + # ./spec/helpers/groups_helper_spec.rb:198 -jobs -Set for your local app (/usr/local/bundle/config): "2" + 86) GroupsHelper#share_with_group_lock_help_text root_share_with_group_locked: true, subgroup_share_with_group_locked: true, sub_subgroup_share_with_group_locked: false, current_user: :root_owner, help_text: :ancestor_locked_and_has_been_overridden, linked_ancestor: :root_group has the correct help text with correct ancestor links + # around hook at ./spec/spec_helper.rb:186 did not execute the example + # ./spec/helpers/groups_helper_spec.rb:198 -clean -Set for your local app (/usr/local/bundle/config): "true" + 87) GroupsHelper#share_with_group_lock_help_text root_share_with_group_locked: true, subgroup_share_with_group_locked: true, sub_subgroup_share_with_group_locked: false, current_user: :sub_owner, help_text: :ancestor_locked_and_has_been_overridden, linked_ancestor: :root_group has the correct help text with correct ancestor links + # around hook at ./spec/spec_helper.rb:186 did not execute the example + # ./spec/helpers/groups_helper_spec.rb:198 -without -Set for your local app (/usr/local/bundle/config): [:production] + 88) GroupsHelper#share_with_group_lock_help_text root_share_with_group_locked: true, subgroup_share_with_group_locked: true, sub_subgroup_share_with_group_locked: false, current_user: :sub_sub_owner, help_text: :ancestor_locked_and_has_been_overridden, linked_ancestor: :root_group has the correct help text with correct ancestor links + # around hook at ./spec/spec_helper.rb:186 did not execute the example + # ./spec/helpers/groups_helper_spec.rb:198 -silence_root_warning -Set via BUNDLE_SILENCE_ROOT_WARNING: true + 89) GroupsHelper#share_with_group_lock_help_text root_share_with_group_locked: true, subgroup_share_with_group_locked: true, sub_subgroup_share_with_group_locked: true, current_user: :root_owner, help_text: :ancestor_locked_but_you_can_override, linked_ancestor: :root_group has the correct help text with correct ancestor links + # around hook at ./spec/spec_helper.rb:186 did not execute the example + # ./spec/helpers/groups_helper_spec.rb:198 -app_config -Set via BUNDLE_APP_CONFIG: "/usr/local/bundle" + 90) GroupsHelper#share_with_group_lock_help_text root_share_with_group_locked: true, subgroup_share_with_group_locked: true, sub_subgroup_share_with_group_locked: true, current_user: :sub_owner, help_text: :ancestor_locked_so_ask_the_owner, linked_ancestor: :root_group has the correct help text with correct ancestor links + # around hook at ./spec/spec_helper.rb:186 did not execute the example + # ./spec/helpers/groups_helper_spec.rb:198 -install_flags -Set via BUNDLE_INSTALL_FLAGS: "--without=production --jobs=2 --path=vendor --retry=3 --quiet" + 91) GroupsHelper#share_with_group_lock_help_text root_share_with_group_locked: true, subgroup_share_with_group_locked: true, sub_subgroup_share_with_group_locked: true, current_user: :sub_sub_owner, help_text: :ancestor_locked_so_ask_the_owner, linked_ancestor: :root_group has the correct help text with correct ancestor links + # around hook at ./spec/spec_helper.rb:186 did not execute the example + # ./spec/helpers/groups_helper_spec.rb:198 -bin -Set via BUNDLE_BIN: "/usr/local/bundle/bin" + 92) AutocompleteUsersFinder#execute when passed a subgroup includes users from parent groups as well + # around hook at ./spec/spec_helper.rb:186 did not execute the example + # ./spec/finders/autocomplete_users_finder_spec.rb:55 -gemfile -Set via BUNDLE_GEMFILE: "/builds/gitlab-org/gitlab-ce/Gemfile" +Finished in 5 minutes 7 seconds (files took 16.6 seconds to load) +819 examples, 0 failures, 92 pending -section_end:1517486961:build_script -[0Ksection_start:1517486961:after_script -[0Ksection_end:1517486962:after_script -[0Ksection_start:1517486962:upload_artifacts +section_end:1522927514:build_script +[0Ksection_start:1522927514:after_script +[0K[32;1mRunning after script...[0;m +[32;1m$ date[0;m +Thu Apr 5 11:25:14 UTC 2018 +section_end:1522927515:after_script +[0Ksection_start:1522927515:archive_cache +[0K[32;1mNot uploading cache ruby-2.3.6-with-yarn due to policy[0;m +section_end:1522927516:archive_cache +[0Ksection_start:1522927516:upload_artifacts [0K[32;1mUploading artifacts...[0;m -[0;33mWARNING: coverage/: no matching files [0;m +coverage/: found 5 matching files [0;m knapsack/: found 5 matching files [0;m +rspec_flaky/: found 4 matching files [0;m [0;33mWARNING: tmp/capybara/: no matching files [0;m -Uploading artifacts to coordinator... ok [0;m id[0;m=50551722 responseStatus[0;m=201 Created token[0;m=XkN753rp -section_end:1517486963:upload_artifacts -[0K[31;1mERROR: Job failed: exit code 1 -[0;m
\ No newline at end of file +Uploading artifacts to coordinator... ok [0;m id[0;m=61303283 responseStatus[0;m=201 Created token[0;m=rusBKvxM +section_end:1522927520:upload_artifacts +[0K[32;1mJob succeeded +[0;m diff --git a/spec/helpers/diff_helper_spec.rb b/spec/helpers/diff_helper_spec.rb index 15cbe36ae76..53c010fa0db 100644 --- a/spec/helpers/diff_helper_spec.rb +++ b/spec/helpers/diff_helper_spec.rb @@ -135,11 +135,37 @@ describe DiffHelper do it "returns strings with marked inline diffs" do marked_old_line, marked_new_line = mark_inline_diffs(old_line, new_line) - expect(marked_old_line).to eq(%q{abc <span class="idiff left right deletion">'def'</span>}) + expect(marked_old_line).to eq(%q{abc <span class="idiff left right deletion">'def'</span>}) expect(marked_old_line).to be_html_safe - expect(marked_new_line).to eq(%q{abc <span class="idiff left right addition">"def"</span>}) + expect(marked_new_line).to eq(%q{abc <span class="idiff left right addition">"def"</span>}) expect(marked_new_line).to be_html_safe end + + context 'when given HTML' do + it 'sanitizes it' do + old_line = %{test.txt} + new_line = %{<img src=x onerror=alert(document.domain)>} + + marked_old_line, marked_new_line = mark_inline_diffs(old_line, new_line) + + expect(marked_old_line).to eq(%q{<span class="idiff left right deletion">test.txt</span>}) + expect(marked_old_line).to be_html_safe + expect(marked_new_line).to eq(%q{<span class="idiff left right addition"><img src=x onerror=alert(document.domain)></span>}) + expect(marked_new_line).to be_html_safe + end + + it 'sanitizes the entire line, not just the changes' do + old_line = %{<img src=x onerror=alert(document.domain)>} + new_line = %{<img src=y onerror=alert(document.domain)>} + + marked_old_line, marked_new_line = mark_inline_diffs(old_line, new_line) + + expect(marked_old_line).to eq(%q{<img src=<span class="idiff left right deletion">x</span> onerror=alert(document.domain)>}) + expect(marked_old_line).to be_html_safe + expect(marked_new_line).to eq(%q{<img src=<span class="idiff left right addition">y</span> onerror=alert(document.domain)>}) + expect(marked_new_line).to be_html_safe + end + end end describe '#parallel_diff_discussions' do diff --git a/spec/helpers/icons_helper_spec.rb b/spec/helpers/icons_helper_spec.rb index 2f23ed55d99..93d8e672f8c 100644 --- a/spec/helpers/icons_helper_spec.rb +++ b/spec/helpers/icons_helper_spec.rb @@ -162,4 +162,11 @@ describe IconsHelper do expect(file_type_icon_class('file', 0, 'CHANGELOG')).to eq 'file-text-o' end end + + describe '#external_snippet_icon' do + it 'returns external snippet icon' do + expect(external_snippet_icon('download').to_s) + .to eq("<span class=\"gl-snippet-icon gl-snippet-icon-download\"></span>") + end + end end diff --git a/spec/helpers/issuables_helper_spec.rb b/spec/helpers/issuables_helper_spec.rb index 2fecd1a3d27..7b59fde999d 100644 --- a/spec/helpers/issuables_helper_spec.rb +++ b/spec/helpers/issuables_helper_spec.rb @@ -22,11 +22,15 @@ describe IssuablesHelper do end describe '#issuable_labels_tooltip' do - it 'returns label text' do + it 'returns label text with no labels' do + expect(issuable_labels_tooltip([])).to eq("Labels") + end + + it 'returns label text with labels within max limit' do expect(issuable_labels_tooltip([label])).to eq(label.title) end - it 'returns label text' do + it 'returns label text with labels exceeding max limit' do expect(issuable_labels_tooltip([label, label2], limit: 1)).to eq("#{label.title}, and 1 more") end end @@ -40,22 +44,22 @@ describe IssuablesHelper do end it 'returns "Open" when state is :opened' do - expect(helper.issuables_state_counter_text(:issues, :opened)) + expect(helper.issuables_state_counter_text(:issues, :opened, true)) .to eq('<span>Open</span> <span class="badge">42</span>') end it 'returns "Closed" when state is :closed' do - expect(helper.issuables_state_counter_text(:issues, :closed)) + expect(helper.issuables_state_counter_text(:issues, :closed, true)) .to eq('<span>Closed</span> <span class="badge">42</span>') end it 'returns "Merged" when state is :merged' do - expect(helper.issuables_state_counter_text(:merge_requests, :merged)) + expect(helper.issuables_state_counter_text(:merge_requests, :merged, true)) .to eq('<span>Merged</span> <span class="badge">42</span>') end it 'returns "All" when state is :all' do - expect(helper.issuables_state_counter_text(:merge_requests, :all)) + expect(helper.issuables_state_counter_text(:merge_requests, :all, true)) .to eq('<span>All</span> <span class="badge">42</span>') end end @@ -101,27 +105,6 @@ describe IssuablesHelper do end end - describe '#issuable_filter_present?' do - it 'returns true when any key is present' do - allow(helper).to receive(:params).and_return( - ActionController::Parameters.new(milestone_title: 'Velit consectetur asperiores natus delectus.', - project_id: 'gitlabhq', - scope: 'all') - ) - - expect(helper.issuable_filter_present?).to be_truthy - end - - it 'returns false when no key is present' do - allow(helper).to receive(:params).and_return( - ActionController::Parameters.new(project_id: 'gitlabhq', - scope: 'all') - ) - - expect(helper.issuable_filter_present?).to be_falsey - end - end - describe '#updated_at_by' do let(:user) { create(:user) } let(:unedited_issuable) { create(:issue) } diff --git a/spec/helpers/issues_helper_spec.rb b/spec/helpers/issues_helper_spec.rb index aeef5352333..8bb2e234e9a 100644 --- a/spec/helpers/issues_helper_spec.rb +++ b/spec/helpers/issues_helper_spec.rb @@ -96,13 +96,32 @@ describe IssuesHelper do describe '#award_state_class' do let!(:upvote) { create(:award_emoji) } + let(:awardable) { upvote.awardable } + let(:user) { upvote.user } + + before do + allow(helper).to receive(:can?) do |*args| + Ability.allowed?(*args) + end + end it "returns disabled string for unauthenticated user" do - expect(award_state_class(AwardEmoji.all, nil)).to eq("disabled") + expect(helper.award_state_class(awardable, AwardEmoji.all, nil)).to eq("disabled") + end + + it "returns disabled for a user that does not have access to the awardable" do + expect(helper.award_state_class(awardable, AwardEmoji.all, build(:user))).to eq("disabled") end it "returns active string for author" do - expect(award_state_class(AwardEmoji.all, upvote.user)).to eq("active") + expect(helper.award_state_class(awardable, AwardEmoji.all, upvote.user)).to eq("active") + end + + it "is blank for a user that has access to the awardable" do + user = build(:user) + expect(helper).to receive(:can?).with(user, :award_emoji, awardable).and_return(true) + + expect(helper.award_state_class(awardable, AwardEmoji.all, user)).to be_blank end end @@ -144,4 +163,26 @@ describe IssuesHelper do end end end + + describe '#show_new_issue_link?' do + before do + allow(helper).to receive(:current_user) + end + + it 'is false when no project there is no project' do + expect(helper.show_new_issue_link?(nil)).to be_falsey + end + + it 'is true when there is a project and no logged in user' do + expect(helper.show_new_issue_link?(build(:project))).to be_truthy + end + + it 'is true when the current user does not have access to the project' do + project = build(:project) + allow(helper).to receive(:current_user).and_return(project.owner) + + expect(helper).to receive(:can?).with(project.owner, :create_issue, project).and_return(true) + expect(helper.show_new_issue_link?(project)).to be_truthy + end + end end diff --git a/spec/helpers/milestones_helper_spec.rb b/spec/helpers/milestones_helper_spec.rb index 70b4a89cb86..f5185cb2857 100644 --- a/spec/helpers/milestones_helper_spec.rb +++ b/spec/helpers/milestones_helper_spec.rb @@ -83,58 +83,4 @@ describe MilestonesHelper do end end end - - describe '#milestone_remaining_days' do - around do |example| - Timecop.freeze(Time.utc(2017, 3, 17)) { example.run } - end - - context 'when less than 31 days remaining' do - let(:milestone_remaining) { milestone_remaining_days(build_stubbed(:milestone, due_date: 12.days.from_now.utc)) } - - it 'returns days remaining' do - expect(milestone_remaining).to eq("<strong>12</strong> days remaining") - end - end - - context 'when less than 1 year and more than 30 days remaining' do - let(:milestone_remaining) { milestone_remaining_days(build_stubbed(:milestone, due_date: 2.months.from_now.utc)) } - - it 'returns months remaining' do - expect(milestone_remaining).to eq("<strong>2</strong> months remaining") - end - end - - context 'when more than 1 year remaining' do - let(:milestone_remaining) { milestone_remaining_days(build_stubbed(:milestone, due_date: (1.year.from_now + 2.days).utc)) } - - it 'returns years remaining' do - expect(milestone_remaining).to eq("<strong>1</strong> year remaining") - end - end - - context 'when milestone is expired' do - let(:milestone_remaining) { milestone_remaining_days(build_stubbed(:milestone, due_date: 2.days.ago.utc)) } - - it 'returns "Past due"' do - expect(milestone_remaining).to eq("<strong>Past due</strong>") - end - end - - context 'when milestone has start_date in the future' do - let(:milestone_remaining) { milestone_remaining_days(build_stubbed(:milestone, start_date: 2.days.from_now.utc)) } - - it 'returns "Upcoming"' do - expect(milestone_remaining).to eq("<strong>Upcoming</strong>") - end - end - - context 'when milestone has start_date in the past' do - let(:milestone_remaining) { milestone_remaining_days(build_stubbed(:milestone, start_date: 2.days.ago.utc)) } - - it 'returns days elapsed' do - expect(milestone_remaining).to eq("<strong>2</strong> days elapsed") - end - end - end end diff --git a/spec/helpers/page_layout_helper_spec.rb b/spec/helpers/page_layout_helper_spec.rb index baf927a9acc..b77114a8152 100644 --- a/spec/helpers/page_layout_helper_spec.rb +++ b/spec/helpers/page_layout_helper_spec.rb @@ -50,6 +50,11 @@ describe PageLayoutHelper do allow(Rails).to receive(:env).and_return(ActiveSupport::StringInquirer.new('development')) expect(helper.favicon).to eq 'favicon-blue.ico' end + + it 'has yellow favicon for canary' do + stub_env('CANARY', 'true') + expect(helper.favicon).to eq 'favicon-yellow.ico' + end end describe 'page_image' do diff --git a/spec/helpers/preferences_helper_spec.rb b/spec/helpers/preferences_helper_spec.rb index e2a0c4322ff..c9d2ec8a4ae 100644 --- a/spec/helpers/preferences_helper_spec.rb +++ b/spec/helpers/preferences_helper_spec.rb @@ -21,7 +21,9 @@ describe PreferencesHelper do ["Your Projects' Activity", 'project_activity'], ["Starred Projects' Activity", 'starred_project_activity'], ["Your Groups", 'groups'], - ["Your Todos", 'todos'] + ["Your Todos", 'todos'], + ["Assigned Issues", 'issues'], + ["Assigned Merge Requests", 'merge_requests'] ] end end diff --git a/spec/helpers/projects_helper_spec.rb b/spec/helpers/projects_helper_spec.rb index ce96e90e2d7..46c55da24f8 100644 --- a/spec/helpers/projects_helper_spec.rb +++ b/spec/helpers/projects_helper_spec.rb @@ -322,74 +322,6 @@ describe ProjectsHelper do end end - describe "#project_feature_access_select" do - let(:project) { create(:project, :public) } - let(:user) { create(:user) } - - context "when project is internal or public" do - it "shows all options" do - helper.instance_variable_set(:@project, project) - result = helper.project_feature_access_select(:issues_access_level) - expect(result).to include("Disabled") - expect(result).to include("Only team members") - expect(result).to include("Everyone with access") - end - end - - context "when project is private" do - before do - project.update_attributes(visibility_level: Gitlab::VisibilityLevel::PRIVATE) - end - - it "shows only allowed options" do - helper.instance_variable_set(:@project, project) - result = helper.project_feature_access_select(:issues_access_level) - expect(result).to include("Disabled") - expect(result).to include("Only team members") - expect(result).to have_selector('option[disabled]', text: "Everyone with access") - end - end - - context "when project moves from public to private" do - before do - project.update_attributes(visibility_level: Gitlab::VisibilityLevel::PRIVATE) - end - - it "shows the highest allowed level selected" do - helper.instance_variable_set(:@project, project) - result = helper.project_feature_access_select(:issues_access_level) - - expect(result).to include("Disabled") - expect(result).to include("Only team members") - expect(result).to have_selector('option[disabled]', text: "Everyone with access") - expect(result).to have_selector('option[selected]', text: "Only team members") - end - end - end - - describe "#visibility_select_options" do - let(:project) { create(:project, :repository) } - let(:user) { create(:user) } - - before do - allow(helper).to receive(:current_user).and_return(user) - - stub_application_setting(restricted_visibility_levels: [Gitlab::VisibilityLevel::PUBLIC]) - end - - it "does not include the Public restricted level" do - expect(helper.send(:visibility_select_options, project, Gitlab::VisibilityLevel::PRIVATE)).not_to include('Public') - end - - it "includes the Internal level" do - expect(helper.send(:visibility_select_options, project, Gitlab::VisibilityLevel::PRIVATE)).to include('Internal') - end - - it "includes the Private level" do - expect(helper.send(:visibility_select_options, project, Gitlab::VisibilityLevel::PRIVATE)).to include('Private') - end - end - describe '#get_project_nav_tabs' do let(:project) { create(:project) } let(:user) { create(:user) } diff --git a/spec/helpers/snippets_helper_spec.rb b/spec/helpers/snippets_helper_spec.rb new file mode 100644 index 00000000000..0323ffb641c --- /dev/null +++ b/spec/helpers/snippets_helper_spec.rb @@ -0,0 +1,33 @@ +require 'spec_helper' + +describe SnippetsHelper do + include IconsHelper + + describe '#embedded_snippet_raw_button' do + it 'gives view raw button of embedded snippets for project snippets' do + @snippet = create(:project_snippet, :public) + + expect(embedded_snippet_raw_button.to_s).to eq("<a class=\"btn\" target=\"_blank\" rel=\"noopener noreferrer\" title=\"Open raw\" href=\"#{raw_project_snippet_url(@snippet.project, @snippet)}\">#{external_snippet_icon('doc_code')}</a>") + end + + it 'gives view raw button of embedded snippets for personal snippets' do + @snippet = create(:personal_snippet, :public) + + expect(embedded_snippet_raw_button.to_s).to eq("<a class=\"btn\" target=\"_blank\" rel=\"noopener noreferrer\" title=\"Open raw\" href=\"#{raw_snippet_url(@snippet)}\">#{external_snippet_icon('doc_code')}</a>") + end + end + + describe '#embedded_snippet_download_button' do + it 'gives download button of embedded snippets for project snippets' do + @snippet = create(:project_snippet, :public) + + expect(embedded_snippet_download_button.to_s).to eq("<a class=\"btn\" target=\"_blank\" title=\"Download\" rel=\"noopener noreferrer\" href=\"#{raw_project_snippet_url(@snippet.project, @snippet, inline: false)}\">#{external_snippet_icon('download')}</a>") + end + + it 'gives download button of embedded snippets for personal snippets' do + @snippet = create(:personal_snippet, :public) + + expect(embedded_snippet_download_button.to_s).to eq("<a class=\"btn\" target=\"_blank\" title=\"Download\" rel=\"noopener noreferrer\" href=\"#{raw_snippet_url(@snippet, inline: false)}\">#{external_snippet_icon('download')}</a>") + end + end +end diff --git a/spec/helpers/tree_helper_spec.rb b/spec/helpers/tree_helper_spec.rb index ccac6e29447..ffdf6561a53 100644 --- a/spec/helpers/tree_helper_spec.rb +++ b/spec/helpers/tree_helper_spec.rb @@ -8,6 +8,7 @@ describe TreeHelper do describe '.render_tree' do before do @id = sha + @path = "" @project = project @lfs_blob_ids = [] end @@ -61,6 +62,15 @@ describe TreeHelper do end end end + + context 'when the root path contains a plus character' do + let(:root_path) { 'gtk/C++' } + let(:tree_item) { double(flat_path: 'gtk/C++/glade') } + + it 'returns the flattened path' do + expect(subject).to eq('glade') + end + end end describe '#commit_in_single_accessible_branch' do diff --git a/spec/initializers/6_validations_spec.rb b/spec/initializers/6_validations_spec.rb index 83283f03940..1dc307ea922 100644 --- a/spec/initializers/6_validations_spec.rb +++ b/spec/initializers/6_validations_spec.rb @@ -15,7 +15,7 @@ describe '6_validations' do describe 'validate_storages_config' do context 'with correct settings' do before do - mock_storages('foo' => { 'path' => 'tmp/tests/paths/a/b/c' }, 'bar' => { 'path' => 'tmp/tests/paths/a/b/d' }) + mock_storages('foo' => Gitlab::GitalyClient::StorageSettings.new('path' => 'tmp/tests/paths/a/b/c'), 'bar' => Gitlab::GitalyClient::StorageSettings.new('path' => 'tmp/tests/paths/a/b/d')) end it 'passes through' do @@ -25,7 +25,7 @@ describe '6_validations' do context 'when one of the settings is incorrect' do before do - mock_storages('foo' => { 'path' => 'tmp/tests/paths/a/b/c', 'failure_count_threshold' => 'not a number' }) + mock_storages('foo' => Gitlab::GitalyClient::StorageSettings.new('path' => 'tmp/tests/paths/a/b/c', 'failure_count_threshold' => 'not a number')) end it 'throws an error' do @@ -35,7 +35,7 @@ describe '6_validations' do context 'with invalid storage names' do before do - mock_storages('name with spaces' => { 'path' => 'tmp/tests/paths/a/b/c' }) + mock_storages('name with spaces' => Gitlab::GitalyClient::StorageSettings.new('path' => 'tmp/tests/paths/a/b/c')) end it 'throws an error' do @@ -67,7 +67,7 @@ describe '6_validations' do describe 'validate_storages_paths' do context 'with correct settings' do before do - mock_storages('foo' => { 'path' => 'tmp/tests/paths/a/b/c' }, 'bar' => { 'path' => 'tmp/tests/paths/a/b/d' }) + mock_storages('foo' => Gitlab::GitalyClient::StorageSettings.new('path' => 'tmp/tests/paths/a/b/c'), 'bar' => Gitlab::GitalyClient::StorageSettings.new('path' => 'tmp/tests/paths/a/b/d')) end it 'passes through' do @@ -77,7 +77,7 @@ describe '6_validations' do context 'with nested storage paths' do before do - mock_storages('foo' => { 'path' => 'tmp/tests/paths/a/b/c' }, 'bar' => { 'path' => 'tmp/tests/paths/a/b/c/d' }) + mock_storages('foo' => Gitlab::GitalyClient::StorageSettings.new('path' => 'tmp/tests/paths/a/b/c'), 'bar' => Gitlab::GitalyClient::StorageSettings.new('path' => 'tmp/tests/paths/a/b/c/d')) end it 'throws an error' do @@ -87,7 +87,7 @@ describe '6_validations' do context 'with similar but un-nested storage paths' do before do - mock_storages('foo' => { 'path' => 'tmp/tests/paths/a/b/c' }, 'bar' => { 'path' => 'tmp/tests/paths/a/b/c2' }) + mock_storages('foo' => Gitlab::GitalyClient::StorageSettings.new('path' => 'tmp/tests/paths/a/b/c'), 'bar' => Gitlab::GitalyClient::StorageSettings.new('path' => 'tmp/tests/paths/a/b/c2')) end it 'passes through' do @@ -97,7 +97,7 @@ describe '6_validations' do describe 'inaccessible storage' do before do - mock_storages('foo' => { 'path' => 'tmp/tests/a/path/that/does/not/exist' }) + mock_storages('foo' => Gitlab::GitalyClient::StorageSettings.new('path' => 'tmp/tests/a/path/that/does/not/exist')) end it 'passes through with a warning' do diff --git a/spec/initializers/artifacts_direct_upload_support_spec.rb b/spec/initializers/artifacts_direct_upload_support_spec.rb new file mode 100644 index 00000000000..bfb71da3388 --- /dev/null +++ b/spec/initializers/artifacts_direct_upload_support_spec.rb @@ -0,0 +1,71 @@ +require 'spec_helper' + +describe 'Artifacts direct upload support' do + subject do + load Rails.root.join('config/initializers/artifacts_direct_upload_support.rb') + end + + let(:connection) do + { provider: provider } + end + + before do + stub_artifacts_setting( + object_store: { + enabled: enabled, + direct_upload: direct_upload, + connection: connection + }) + end + + context 'when object storage is enabled' do + let(:enabled) { true } + + context 'when direct upload is enabled' do + let(:direct_upload) { true } + + context 'when provider is Google' do + let(:provider) { 'Google' } + + it 'succeeds' do + expect { subject }.not_to raise_error + end + end + + context 'when connection is empty' do + let(:connection) { nil } + + it 'raises an error' do + expect { subject }.to raise_error /object storage provider when 'direct_upload' of artifacts is used/ + end + end + + context 'when other provider is used' do + let(:provider) { 'AWS' } + + it 'raises an error' do + expect { subject }.to raise_error /object storage provider when 'direct_upload' of artifacts is used/ + end + end + end + + context 'when direct upload is disabled' do + let(:direct_upload) { false } + let(:provider) { 'AWS' } + + it 'succeeds' do + expect { subject }.not_to raise_error + end + end + end + + context 'when object storage is disabled' do + let(:enabled) { false } + let(:direct_upload) { false } + let(:provider) { 'AWS' } + + it 'succeeds' do + expect { subject }.not_to raise_error + end + end +end diff --git a/spec/initializers/fog_google_https_private_urls_spec.rb b/spec/initializers/fog_google_https_private_urls_spec.rb new file mode 100644 index 00000000000..de3c157ab7b --- /dev/null +++ b/spec/initializers/fog_google_https_private_urls_spec.rb @@ -0,0 +1,24 @@ +require 'spec_helper' + +describe 'Fog::Storage::GoogleXML::File' do + let(:storage) do + Fog.mock! + Fog::Storage.new({ + google_storage_access_key_id: "asdf", + google_storage_secret_access_key: "asdf", + provider: "Google" + }) + end + + let(:file) do + directory = storage.directories.create(key: 'data') + directory.files.create( + body: 'Hello World!', + key: 'hello_world.txt' + ) + end + + it 'delegates to #get_https_url' do + expect(file.url(Time.now)).to start_with("https://") + end +end diff --git a/spec/initializers/gollum_spec.rb b/spec/initializers/gollum_spec.rb deleted file mode 100644 index adf824a8947..00000000000 --- a/spec/initializers/gollum_spec.rb +++ /dev/null @@ -1,62 +0,0 @@ -require 'spec_helper' - -describe 'gollum' do - let(:project) { create(:project) } - let(:user) { project.owner } - let(:wiki) { ProjectWiki.new(project, user) } - let(:gollum_wiki) { Gollum::Wiki.new(wiki.repository.path) } - - before do - create_page(page_name, 'content1') - end - - after do - destroy_page(page_name) - end - - context 'with simple paths' do - let(:page_name) { 'page1' } - - it 'returns the entry hash if it matches the file name' do - expect(tree_entry(page_name)).not_to be_nil - end - - it 'returns nil if the path does not fit completely' do - expect(tree_entry("foo/#{page_name}")).to be_nil - end - end - - context 'with complex paths' do - let(:page_name) { '/foo/bar/page2' } - - it 'returns the entry hash if it matches the file name' do - expect(tree_entry(page_name)).not_to be_nil - end - - it 'returns nil if the path does not fit completely' do - expect(tree_entry("foo1/bar/page2")).to be_nil - expect(tree_entry("foo/bar1/page2")).to be_nil - end - end - - def tree_entry(name) - gollum_wiki.repo.git.tree_entry(wiki_commits[0].commit, name + '.md') - end - - def wiki_commits - gollum_wiki.repo.commits - end - - def commit_details - Gitlab::Git::Wiki::CommitDetails.new(user.name, user.email, "test commit") - end - - def create_page(name, content) - wiki.wiki.write_page(name, :markdown, content, commit_details) - end - - def destroy_page(name) - page = wiki.find_page(name).page - wiki.delete_page(page, "test commit") - end -end diff --git a/spec/initializers/settings_spec.rb b/spec/initializers/settings_spec.rb index 838ca9fabef..57f5adbbc40 100644 --- a/spec/initializers/settings_spec.rb +++ b/spec/initializers/settings_spec.rb @@ -1,5 +1,5 @@ require 'spec_helper' -require_relative '../../config/initializers/1_settings' +require_relative '../../config/initializers/1_settings' unless defined?(Settings) describe Settings do describe '#ldap' do diff --git a/spec/javascripts/api_spec.js b/spec/javascripts/api_spec.js index 5477581c1b9..3d7ccf432be 100644 --- a/spec/javascripts/api_spec.js +++ b/spec/javascripts/api_spec.js @@ -35,14 +35,14 @@ describe('Api', () => { }); describe('group', () => { - it('fetches a group', (done) => { + it('fetches a group', done => { const groupId = '123456'; const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/groups/${groupId}`; mock.onGet(expectedUrl).reply(200, { name: 'test', }); - Api.group(groupId, (response) => { + Api.group(groupId, response => { expect(response.name).toBe('test'); done(); }); @@ -50,15 +50,17 @@ describe('Api', () => { }); describe('groups', () => { - it('fetches groups', (done) => { + it('fetches groups', done => { const query = 'dummy query'; const options = { unused: 'option' }; const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/groups.json`; - mock.onGet(expectedUrl).reply(200, [{ - name: 'test', - }]); + mock.onGet(expectedUrl).reply(200, [ + { + name: 'test', + }, + ]); - Api.groups(query, options, (response) => { + Api.groups(query, options, response => { expect(response.length).toBe(1); expect(response[0].name).toBe('test'); done(); @@ -67,14 +69,16 @@ describe('Api', () => { }); describe('namespaces', () => { - it('fetches namespaces', (done) => { + it('fetches namespaces', done => { const query = 'dummy query'; const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/namespaces.json`; - mock.onGet(expectedUrl).reply(200, [{ - name: 'test', - }]); + mock.onGet(expectedUrl).reply(200, [ + { + name: 'test', + }, + ]); - Api.namespaces(query, (response) => { + Api.namespaces(query, response => { expect(response.length).toBe(1); expect(response[0].name).toBe('test'); done(); @@ -83,31 +87,35 @@ describe('Api', () => { }); describe('projects', () => { - it('fetches projects with membership when logged in', (done) => { + it('fetches projects with membership when logged in', done => { const query = 'dummy query'; const options = { unused: 'option' }; const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/projects.json`; window.gon.current_user_id = 1; - mock.onGet(expectedUrl).reply(200, [{ - name: 'test', - }]); + mock.onGet(expectedUrl).reply(200, [ + { + name: 'test', + }, + ]); - Api.projects(query, options, (response) => { + Api.projects(query, options, response => { expect(response.length).toBe(1); expect(response[0].name).toBe('test'); done(); }); }); - it('fetches projects without membership when not logged in', (done) => { + it('fetches projects without membership when not logged in', done => { const query = 'dummy query'; const options = { unused: 'option' }; const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/projects.json`; - mock.onGet(expectedUrl).reply(200, [{ - name: 'test', - }]); + mock.onGet(expectedUrl).reply(200, [ + { + name: 'test', + }, + ]); - Api.projects(query, options, (response) => { + Api.projects(query, options, response => { expect(response.length).toBe(1); expect(response[0].name).toBe('test'); done(); @@ -115,8 +123,65 @@ describe('Api', () => { }); }); + describe('mergerequest', () => { + it('fetches a merge request', done => { + const projectPath = 'abc'; + const mergeRequestId = '123456'; + const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/projects/${projectPath}/merge_requests/${mergeRequestId}`; + mock.onGet(expectedUrl).reply(200, { + title: 'test', + }); + + Api.mergeRequest(projectPath, mergeRequestId) + .then(({ data }) => { + expect(data.title).toBe('test'); + }) + .then(done) + .catch(done.fail); + }); + }); + + describe('mergerequest changes', () => { + it('fetches the changes of a merge request', done => { + const projectPath = 'abc'; + const mergeRequestId = '123456'; + const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/projects/${projectPath}/merge_requests/${mergeRequestId}/changes`; + mock.onGet(expectedUrl).reply(200, { + title: 'test', + }); + + Api.mergeRequestChanges(projectPath, mergeRequestId) + .then(({ data }) => { + expect(data.title).toBe('test'); + }) + .then(done) + .catch(done.fail); + }); + }); + + describe('mergerequest versions', () => { + it('fetches the versions of a merge request', done => { + const projectPath = 'abc'; + const mergeRequestId = '123456'; + const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/projects/${projectPath}/merge_requests/${mergeRequestId}/versions`; + mock.onGet(expectedUrl).reply(200, [ + { + id: 123, + }, + ]); + + Api.mergeRequestVersions(projectPath, mergeRequestId) + .then(({ data }) => { + expect(data.length).toBe(1); + expect(data[0].id).toBe(123); + }) + .then(done) + .catch(done.fail); + }); + }); + describe('newLabel', () => { - it('creates a new label', (done) => { + it('creates a new label', done => { const namespace = 'some namespace'; const project = 'some project'; const labelData = { some: 'data' }; @@ -124,36 +189,42 @@ describe('Api', () => { const expectedData = { label: labelData, }; - mock.onPost(expectedUrl).reply((config) => { + mock.onPost(expectedUrl).reply(config => { expect(config.data).toBe(JSON.stringify(expectedData)); - return [200, { - name: 'test', - }]; + return [ + 200, + { + name: 'test', + }, + ]; }); - Api.newLabel(namespace, project, labelData, (response) => { + Api.newLabel(namespace, project, labelData, response => { expect(response.name).toBe('test'); done(); }); }); - it('creates a group label', (done) => { + it('creates a group label', done => { const namespace = 'group/subgroup'; const labelData = { some: 'data' }; const expectedUrl = `${dummyUrlRoot}/groups/${namespace}/-/labels`; const expectedData = { label: labelData, }; - mock.onPost(expectedUrl).reply((config) => { + mock.onPost(expectedUrl).reply(config => { expect(config.data).toBe(JSON.stringify(expectedData)); - return [200, { - name: 'test', - }]; + return [ + 200, + { + name: 'test', + }, + ]; }); - Api.newLabel(namespace, undefined, labelData, (response) => { + Api.newLabel(namespace, undefined, labelData, response => { expect(response.name).toBe('test'); done(); }); @@ -161,15 +232,17 @@ describe('Api', () => { }); describe('groupProjects', () => { - it('fetches group projects', (done) => { + it('fetches group projects', done => { const groupId = '123456'; const query = 'dummy query'; const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/groups/${groupId}/projects.json`; - mock.onGet(expectedUrl).reply(200, [{ - name: 'test', - }]); + mock.onGet(expectedUrl).reply(200, [ + { + name: 'test', + }, + ]); - Api.groupProjects(groupId, query, (response) => { + Api.groupProjects(groupId, query, response => { expect(response.length).toBe(1); expect(response[0].name).toBe('test'); done(); @@ -178,13 +251,13 @@ describe('Api', () => { }); describe('licenseText', () => { - it('fetches a license text', (done) => { + it('fetches a license text', done => { const licenseKey = "driver's license"; const data = { unused: 'option' }; const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/templates/licenses/${licenseKey}`; mock.onGet(expectedUrl).reply(200, 'test'); - Api.licenseText(licenseKey, data, (response) => { + Api.licenseText(licenseKey, data, response => { expect(response).toBe('test'); done(); }); @@ -192,12 +265,12 @@ describe('Api', () => { }); describe('gitignoreText', () => { - it('fetches a gitignore text', (done) => { + it('fetches a gitignore text', done => { const gitignoreKey = 'ignore git'; const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/templates/gitignores/${gitignoreKey}`; mock.onGet(expectedUrl).reply(200, 'test'); - Api.gitignoreText(gitignoreKey, (response) => { + Api.gitignoreText(gitignoreKey, response => { expect(response).toBe('test'); done(); }); @@ -205,12 +278,12 @@ describe('Api', () => { }); describe('gitlabCiYml', () => { - it('fetches a .gitlab-ci.yml', (done) => { + it('fetches a .gitlab-ci.yml', done => { const gitlabCiYmlKey = 'Y CI ML'; const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/templates/gitlab_ci_ymls/${gitlabCiYmlKey}`; mock.onGet(expectedUrl).reply(200, 'test'); - Api.gitlabCiYml(gitlabCiYmlKey, (response) => { + Api.gitlabCiYml(gitlabCiYmlKey, response => { expect(response).toBe('test'); done(); }); @@ -218,12 +291,12 @@ describe('Api', () => { }); describe('dockerfileYml', () => { - it('fetches a Dockerfile', (done) => { + it('fetches a Dockerfile', done => { const dockerfileYmlKey = 'a giant whale'; const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/templates/dockerfiles/${dockerfileYmlKey}`; mock.onGet(expectedUrl).reply(200, 'test'); - Api.dockerfileYml(dockerfileYmlKey, (response) => { + Api.dockerfileYml(dockerfileYmlKey, response => { expect(response).toBe('test'); done(); }); @@ -231,12 +304,14 @@ describe('Api', () => { }); describe('issueTemplate', () => { - it('fetches an issue template', (done) => { + it('fetches an issue template', done => { const namespace = 'some namespace'; const project = 'some project'; const templateKey = ' template #%?.key '; const templateType = 'template type'; - const expectedUrl = `${dummyUrlRoot}/${namespace}/${project}/templates/${templateType}/${encodeURIComponent(templateKey)}`; + const expectedUrl = `${dummyUrlRoot}/${namespace}/${project}/templates/${templateType}/${encodeURIComponent( + templateKey, + )}`; mock.onGet(expectedUrl).reply(200, 'test'); Api.issueTemplate(namespace, project, templateKey, templateType, (error, response) => { @@ -247,13 +322,15 @@ describe('Api', () => { }); describe('users', () => { - it('fetches users', (done) => { + it('fetches users', done => { const query = 'dummy query'; const options = { unused: 'option' }; const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/users.json`; - mock.onGet(expectedUrl).reply(200, [{ - name: 'test', - }]); + mock.onGet(expectedUrl).reply(200, [ + { + name: 'test', + }, + ]); Api.users(query, options) .then(({ data }) => { diff --git a/spec/javascripts/badges/components/badge_form_spec.js b/spec/javascripts/badges/components/badge_form_spec.js new file mode 100644 index 00000000000..dd21ec279cb --- /dev/null +++ b/spec/javascripts/badges/components/badge_form_spec.js @@ -0,0 +1,171 @@ +import Vue from 'vue'; +import store from '~/badges/store'; +import BadgeForm from '~/badges/components/badge_form.vue'; +import { mountComponentWithStore } from 'spec/helpers/vue_mount_component_helper'; +import { createDummyBadge } from '../dummy_badge'; + +describe('BadgeForm component', () => { + const Component = Vue.extend(BadgeForm); + let vm; + + beforeEach(() => { + setFixtures(` + <div id="dummy-element"></div> + `); + }); + + afterEach(() => { + vm.$destroy(); + }); + + describe('methods', () => { + beforeEach(() => { + vm = mountComponentWithStore(Component, { + el: '#dummy-element', + store, + props: { + isEditing: false, + }, + }); + }); + + describe('onCancel', () => { + it('calls stopEditing', () => { + spyOn(vm, 'stopEditing'); + + vm.onCancel(); + + expect(vm.stopEditing).toHaveBeenCalled(); + }); + }); + + describe('onSubmit', () => { + describe('if isEditing is true', () => { + beforeEach(() => { + spyOn(vm, 'saveBadge').and.returnValue(Promise.resolve()); + store.replaceState({ + ...store.state, + isSaving: false, + badgeInEditForm: createDummyBadge(), + }); + vm.isEditing = true; + }); + + it('returns immediately if imageUrl is empty', () => { + store.state.badgeInEditForm.imageUrl = ''; + + vm.onSubmit(); + + expect(vm.saveBadge).not.toHaveBeenCalled(); + }); + + it('returns immediately if linkUrl is empty', () => { + store.state.badgeInEditForm.linkUrl = ''; + + vm.onSubmit(); + + expect(vm.saveBadge).not.toHaveBeenCalled(); + }); + + it('returns immediately if isSaving is true', () => { + store.state.isSaving = true; + + vm.onSubmit(); + + expect(vm.saveBadge).not.toHaveBeenCalled(); + }); + + it('calls saveBadge', () => { + vm.onSubmit(); + + expect(vm.saveBadge).toHaveBeenCalled(); + }); + }); + + describe('if isEditing is false', () => { + beforeEach(() => { + spyOn(vm, 'addBadge').and.returnValue(Promise.resolve()); + store.replaceState({ + ...store.state, + isSaving: false, + badgeInAddForm: createDummyBadge(), + }); + vm.isEditing = false; + }); + + it('returns immediately if imageUrl is empty', () => { + store.state.badgeInAddForm.imageUrl = ''; + + vm.onSubmit(); + + expect(vm.addBadge).not.toHaveBeenCalled(); + }); + + it('returns immediately if linkUrl is empty', () => { + store.state.badgeInAddForm.linkUrl = ''; + + vm.onSubmit(); + + expect(vm.addBadge).not.toHaveBeenCalled(); + }); + + it('returns immediately if isSaving is true', () => { + store.state.isSaving = true; + + vm.onSubmit(); + + expect(vm.addBadge).not.toHaveBeenCalled(); + }); + + it('calls addBadge', () => { + vm.onSubmit(); + + expect(vm.addBadge).toHaveBeenCalled(); + }); + }); + }); + }); + + describe('if isEditing is false', () => { + beforeEach(() => { + vm = mountComponentWithStore(Component, { + el: '#dummy-element', + store, + props: { + isEditing: false, + }, + }); + }); + + it('renders one button', () => { + const buttons = vm.$el.querySelectorAll('.row-content-block button'); + expect(buttons.length).toBe(1); + const buttonAddElement = buttons[0]; + expect(buttonAddElement).toBeVisible(); + expect(buttonAddElement).toHaveText('Add badge'); + }); + }); + + describe('if isEditing is true', () => { + beforeEach(() => { + vm = mountComponentWithStore(Component, { + el: '#dummy-element', + store, + props: { + isEditing: true, + }, + }); + }); + + it('renders two buttons', () => { + const buttons = vm.$el.querySelectorAll('.row-content-block button'); + expect(buttons.length).toBe(2); + const buttonSaveElement = buttons[0]; + expect(buttonSaveElement).toBeVisible(); + expect(buttonSaveElement).toHaveText('Save changes'); + const buttonCancelElement = buttons[1]; + expect(buttonCancelElement).toBeVisible(); + expect(buttonCancelElement).toHaveText('Cancel'); + }); + }); +}); diff --git a/spec/javascripts/badges/components/badge_list_row_spec.js b/spec/javascripts/badges/components/badge_list_row_spec.js new file mode 100644 index 00000000000..21bd00d82f0 --- /dev/null +++ b/spec/javascripts/badges/components/badge_list_row_spec.js @@ -0,0 +1,97 @@ +import $ from 'jquery'; +import Vue from 'vue'; +import { GROUP_BADGE, PROJECT_BADGE } from '~/badges/constants'; +import store from '~/badges/store'; +import BadgeListRow from '~/badges/components/badge_list_row.vue'; +import { mountComponentWithStore } from 'spec/helpers/vue_mount_component_helper'; +import { createDummyBadge } from '../dummy_badge'; + +describe('BadgeListRow component', () => { + const Component = Vue.extend(BadgeListRow); + let badge; + let vm; + + beforeEach(() => { + setFixtures(` + <div id="delete-badge-modal" class="modal"></div> + <div id="dummy-element"></div> + `); + store.replaceState({ + ...store.state, + kind: PROJECT_BADGE, + }); + badge = createDummyBadge(); + vm = mountComponentWithStore(Component, { + el: '#dummy-element', + store, + props: { badge }, + }); + }); + + afterEach(() => { + vm.$destroy(); + }); + + it('renders the badge', () => { + const badgeElement = vm.$el.querySelector('.project-badge'); + expect(badgeElement).not.toBeNull(); + expect(badgeElement.getAttribute('src')).toBe(badge.renderedImageUrl); + }); + + it('renders the badge link', () => { + expect(vm.$el).toContainText(badge.linkUrl); + }); + + it('renders the badge kind', () => { + expect(vm.$el).toContainText('Project Badge'); + }); + + it('shows edit and delete buttons', () => { + const buttons = vm.$el.querySelectorAll('.table-button-footer button'); + expect(buttons).toHaveLength(2); + const buttonEditElement = buttons[0]; + expect(buttonEditElement).toBeVisible(); + expect(buttonEditElement).toHaveSpriteIcon('pencil'); + const buttonDeleteElement = buttons[1]; + expect(buttonDeleteElement).toBeVisible(); + expect(buttonDeleteElement).toHaveSpriteIcon('remove'); + }); + + it('calls editBadge when clicking then edit button', () => { + spyOn(vm, 'editBadge'); + + const editButton = vm.$el.querySelector('.table-button-footer button:first-of-type'); + editButton.click(); + + expect(vm.editBadge).toHaveBeenCalled(); + }); + + it('calls updateBadgeInModal and shows modal when clicking then delete button', done => { + spyOn(vm, 'updateBadgeInModal'); + $('#delete-badge-modal').on('shown.bs.modal', () => done()); + + const deleteButton = vm.$el.querySelector('.table-button-footer button:last-of-type'); + deleteButton.click(); + + expect(vm.updateBadgeInModal).toHaveBeenCalled(); + }); + + describe('for a group badge', () => { + beforeEach(done => { + badge.kind = GROUP_BADGE; + + Vue.nextTick() + .then(done) + .catch(done.fail); + }); + + it('renders the badge kind', () => { + expect(vm.$el).toContainText('Group Badge'); + }); + + it('hides edit and delete buttons', () => { + const buttons = vm.$el.querySelectorAll('.table-button-footer button'); + expect(buttons).toHaveLength(0); + }); + }); +}); diff --git a/spec/javascripts/badges/components/badge_list_spec.js b/spec/javascripts/badges/components/badge_list_spec.js new file mode 100644 index 00000000000..9439c578973 --- /dev/null +++ b/spec/javascripts/badges/components/badge_list_spec.js @@ -0,0 +1,88 @@ +import Vue from 'vue'; +import { GROUP_BADGE, PROJECT_BADGE } from '~/badges/constants'; +import store from '~/badges/store'; +import BadgeList from '~/badges/components/badge_list.vue'; +import { mountComponentWithStore } from 'spec/helpers/vue_mount_component_helper'; +import { createDummyBadge } from '../dummy_badge'; + +describe('BadgeList component', () => { + const Component = Vue.extend(BadgeList); + const numberOfDummyBadges = 3; + let vm; + + beforeEach(() => { + setFixtures('<div id="dummy-element"></div>'); + const badges = []; + for (let id = 0; id < numberOfDummyBadges; id += 1) { + badges.push({ id, ...createDummyBadge() }); + } + store.replaceState({ + ...store.state, + badges, + kind: PROJECT_BADGE, + isLoading: false, + }); + vm = mountComponentWithStore(Component, { + el: '#dummy-element', + store, + }); + }); + + afterEach(() => { + vm.$destroy(); + }); + + it('renders a header with the badge count', () => { + const header = vm.$el.querySelector('.panel-heading'); + expect(header).toHaveText(new RegExp(`Your badges\\s+${numberOfDummyBadges}`)); + }); + + it('renders a row for each badge', () => { + const rows = vm.$el.querySelectorAll('.gl-responsive-table-row'); + expect(rows).toHaveLength(numberOfDummyBadges); + }); + + it('renders a message if no badges exist', done => { + store.state.badges = []; + + Vue.nextTick() + .then(() => { + expect(vm.$el).toContainText('This project has no badges'); + }) + .then(done) + .catch(done.fail); + }); + + it('shows a loading icon when loading', done => { + store.state.isLoading = true; + + Vue.nextTick() + .then(() => { + const loadingIcon = vm.$el.querySelector('.fa-spinner'); + expect(loadingIcon).toBeVisible(); + }) + .then(done) + .catch(done.fail); + }); + + describe('for group badges', () => { + beforeEach(done => { + store.state.kind = GROUP_BADGE; + + Vue.nextTick() + .then(done) + .catch(done.fail); + }); + + it('renders a message if no badges exist', done => { + store.state.badges = []; + + Vue.nextTick() + .then(() => { + expect(vm.$el).toContainText('This group has no badges'); + }) + .then(done) + .catch(done.fail); + }); + }); +}); diff --git a/spec/javascripts/badges/components/badge_settings_spec.js b/spec/javascripts/badges/components/badge_settings_spec.js new file mode 100644 index 00000000000..3db02982ad4 --- /dev/null +++ b/spec/javascripts/badges/components/badge_settings_spec.js @@ -0,0 +1,109 @@ +import $ from 'jquery'; +import Vue from 'vue'; +import store from '~/badges/store'; +import BadgeSettings from '~/badges/components/badge_settings.vue'; +import { mountComponentWithStore } from 'spec/helpers/vue_mount_component_helper'; +import { createDummyBadge } from '../dummy_badge'; + +describe('BadgeSettings component', () => { + const Component = Vue.extend(BadgeSettings); + let vm; + + beforeEach(() => { + setFixtures(` + <div id="dummy-element"></div> + <button + id="dummy-modal-button" + type="button" + data-toggle="modal" + data-target="#delete-badge-modal" + >Show modal</button> + `); + vm = mountComponentWithStore(Component, { + el: '#dummy-element', + store, + }); + }); + + afterEach(() => { + vm.$destroy(); + }); + + it('displays modal if button is clicked', done => { + const badge = createDummyBadge(); + store.state.badgeInModal = badge; + const modal = vm.$el.querySelector('#delete-badge-modal'); + const button = document.getElementById('dummy-modal-button'); + + $(modal).on('shown.bs.modal', () => { + expect(modal).toContainText('Delete badge?'); + const badgeElement = modal.querySelector('img.project-badge'); + expect(badgeElement).not.toBe(null); + expect(badgeElement.getAttribute('src')).toBe(badge.renderedImageUrl); + + done(); + }); + + Vue.nextTick() + .then(() => { + button.click(); + }) + .catch(done.fail); + }); + + it('displays a form to add a badge', () => { + const form = vm.$el.querySelector('form:nth-of-type(2)'); + expect(form).not.toBe(null); + const button = form.querySelector('.btn-success'); + expect(button).not.toBe(null); + expect(button).toHaveText(/Add badge/); + }); + + it('displays badge list', () => { + const badgeListElement = vm.$el.querySelector('.panel'); + expect(badgeListElement).not.toBe(null); + expect(badgeListElement).toBeVisible(); + expect(badgeListElement).toContainText('Your badges'); + }); + + describe('when editing', () => { + beforeEach(done => { + store.state.isEditing = true; + + Vue.nextTick() + .then(done) + .catch(done.fail); + }); + + it('displays a form to edit a badge', () => { + const form = vm.$el.querySelector('form:nth-of-type(1)'); + expect(form).not.toBe(null); + const submitButton = form.querySelector('.btn-success'); + expect(submitButton).not.toBe(null); + expect(submitButton).toHaveText(/Save changes/); + const cancelButton = form.querySelector('.btn-cancel'); + expect(cancelButton).not.toBe(null); + expect(cancelButton).toHaveText(/Cancel/); + }); + + it('displays no badge list', () => { + const badgeListElement = vm.$el.querySelector('.panel'); + expect(badgeListElement).toBeHidden(); + }); + }); + + describe('methods', () => { + describe('onSubmitModal', () => { + it('triggers ', () => { + spyOn(vm, 'deleteBadge').and.callFake(() => Promise.resolve()); + const modal = vm.$el.querySelector('#delete-badge-modal'); + const deleteButton = modal.querySelector('.btn-danger'); + + deleteButton.click(); + + const badge = store.state.badgeInModal; + expect(vm.deleteBadge).toHaveBeenCalledWith(badge); + }); + }); + }); +}); diff --git a/spec/javascripts/badges/components/badge_spec.js b/spec/javascripts/badges/components/badge_spec.js new file mode 100644 index 00000000000..fd1ecc9cdd8 --- /dev/null +++ b/spec/javascripts/badges/components/badge_spec.js @@ -0,0 +1,147 @@ +import Vue from 'vue'; +import Badge from '~/badges/components/badge.vue'; +import mountComponent from 'spec/helpers/vue_mount_component_helper'; +import { DUMMY_IMAGE_URL, TEST_HOST } from 'spec/test_constants'; + +describe('Badge component', () => { + const Component = Vue.extend(Badge); + const dummyProps = { + imageUrl: DUMMY_IMAGE_URL, + linkUrl: `${TEST_HOST}/badge/link/url`, + }; + let vm; + + const findElements = () => { + const buttons = vm.$el.querySelectorAll('button'); + return { + badgeImage: vm.$el.querySelector('img.project-badge'), + loadingIcon: vm.$el.querySelector('.fa-spinner'), + reloadButton: buttons[buttons.length - 1], + }; + }; + + const createComponent = (props, el = null) => { + vm = mountComponent(Component, props, el); + const { badgeImage } = findElements(); + return new Promise(resolve => badgeImage.addEventListener('load', resolve)).then(() => + Vue.nextTick(), + ); + }; + + afterEach(() => { + vm.$destroy(); + }); + + describe('watchers', () => { + describe('imageUrl', () => { + it('sets isLoading and resets numRetries and hasError', done => { + const props = { ...dummyProps }; + createComponent(props) + .then(() => { + expect(vm.isLoading).toBe(false); + vm.hasError = true; + vm.numRetries = 42; + + vm.imageUrl = `${props.imageUrl}#something/else`; + + return Vue.nextTick(); + }) + .then(() => { + expect(vm.isLoading).toBe(true); + expect(vm.numRetries).toBe(0); + expect(vm.hasError).toBe(false); + }) + .then(done) + .catch(done.fail); + }); + }); + }); + + describe('methods', () => { + beforeEach(done => { + createComponent({ ...dummyProps }) + .then(done) + .catch(done.fail); + }); + + it('onError resets isLoading and sets hasError', () => { + vm.hasError = false; + vm.isLoading = true; + + vm.onError(); + + expect(vm.hasError).toBe(true); + expect(vm.isLoading).toBe(false); + }); + + it('onLoad sets isLoading', () => { + vm.isLoading = true; + + vm.onLoad(); + + expect(vm.isLoading).toBe(false); + }); + + it('reloadImage resets isLoading and hasError and increases numRetries', () => { + vm.hasError = true; + vm.isLoading = false; + vm.numRetries = 0; + + vm.reloadImage(); + + expect(vm.hasError).toBe(false); + expect(vm.isLoading).toBe(true); + expect(vm.numRetries).toBe(1); + }); + }); + + describe('behavior', () => { + beforeEach(done => { + setFixtures('<div id="dummy-element"></div>'); + createComponent({ ...dummyProps }, '#dummy-element') + .then(done) + .catch(done.fail); + }); + + it('shows a badge image after loading', () => { + expect(vm.isLoading).toBe(false); + expect(vm.hasError).toBe(false); + const { badgeImage, loadingIcon, reloadButton } = findElements(); + expect(badgeImage).toBeVisible(); + expect(loadingIcon).toBeHidden(); + expect(reloadButton).toBeHidden(); + expect(vm.$el.innerText).toBe(''); + }); + + it('shows a loading icon when loading', done => { + vm.isLoading = true; + + Vue.nextTick() + .then(() => { + const { badgeImage, loadingIcon, reloadButton } = findElements(); + expect(badgeImage).toBeHidden(); + expect(loadingIcon).toBeVisible(); + expect(reloadButton).toBeHidden(); + expect(vm.$el.innerText).toBe(''); + }) + .then(done) + .catch(done.fail); + }); + + it('shows an error and reload button if loading failed', done => { + vm.hasError = true; + + Vue.nextTick() + .then(() => { + const { badgeImage, loadingIcon, reloadButton } = findElements(); + expect(badgeImage).toBeHidden(); + expect(loadingIcon).toBeHidden(); + expect(reloadButton).toBeVisible(); + expect(reloadButton).toHaveSpriteIcon('retry'); + expect(vm.$el.innerText.trim()).toBe('No badge image'); + }) + .then(done) + .catch(done.fail); + }); + }); +}); diff --git a/spec/javascripts/badges/dummy_badge.js b/spec/javascripts/badges/dummy_badge.js new file mode 100644 index 00000000000..6aaff21c503 --- /dev/null +++ b/spec/javascripts/badges/dummy_badge.js @@ -0,0 +1,23 @@ +import { PROJECT_BADGE } from '~/badges/constants'; +import { DUMMY_IMAGE_URL, TEST_HOST } from 'spec/test_constants'; + +export const createDummyBadge = () => { + const id = Math.floor(1000 * Math.random()); + return { + id, + imageUrl: `${TEST_HOST}/badges/${id}/image/url`, + isDeleting: false, + linkUrl: `${TEST_HOST}/badges/${id}/link/url`, + kind: PROJECT_BADGE, + renderedImageUrl: `${DUMMY_IMAGE_URL}?id=${id}`, + renderedLinkUrl: `${TEST_HOST}/badges/${id}/rendered/link/url`, + }; +}; + +export const createDummyBadgeResponse = () => ({ + image_url: `${TEST_HOST}/badge/image/url`, + link_url: `${TEST_HOST}/badge/link/url`, + kind: PROJECT_BADGE, + rendered_image_url: DUMMY_IMAGE_URL, + rendered_link_url: `${TEST_HOST}/rendered/badge/link/url`, +}); diff --git a/spec/javascripts/badges/store/actions_spec.js b/spec/javascripts/badges/store/actions_spec.js new file mode 100644 index 00000000000..bb6263c6de4 --- /dev/null +++ b/spec/javascripts/badges/store/actions_spec.js @@ -0,0 +1,607 @@ +import axios from '~/lib/utils/axios_utils'; +import MockAdapter from 'axios-mock-adapter'; +import actions, { transformBackendBadge } from '~/badges/store/actions'; +import mutationTypes from '~/badges/store/mutation_types'; +import createState from '~/badges/store/state'; +import { TEST_HOST } from 'spec/test_constants'; +import testAction from 'spec/helpers/vuex_action_helper'; +import { createDummyBadge, createDummyBadgeResponse } from '../dummy_badge'; + +describe('Badges store actions', () => { + const dummyEndpointUrl = `${TEST_HOST}/badges/endpoint`; + const dummyBadges = [{ ...createDummyBadge(), id: 5 }, { ...createDummyBadge(), id: 6 }]; + + let axiosMock; + let badgeId; + let state; + + beforeEach(() => { + axiosMock = new MockAdapter(axios); + state = { + ...createState(), + apiEndpointUrl: dummyEndpointUrl, + badges: dummyBadges, + }; + badgeId = state.badges[0].id; + }); + + afterEach(() => { + axiosMock.restore(); + }); + + describe('requestNewBadge', () => { + it('commits REQUEST_NEW_BADGE', done => { + testAction( + actions.requestNewBadge, + null, + state, + [{ type: mutationTypes.REQUEST_NEW_BADGE }], + [], + done, + ); + }); + }); + + describe('receiveNewBadge', () => { + it('commits RECEIVE_NEW_BADGE', done => { + const newBadge = createDummyBadge(); + testAction( + actions.receiveNewBadge, + newBadge, + state, + [{ type: mutationTypes.RECEIVE_NEW_BADGE, payload: newBadge }], + [], + done, + ); + }); + }); + + describe('receiveNewBadgeError', () => { + it('commits RECEIVE_NEW_BADGE_ERROR', done => { + testAction( + actions.receiveNewBadgeError, + null, + state, + [{ type: mutationTypes.RECEIVE_NEW_BADGE_ERROR }], + [], + done, + ); + }); + }); + + describe('addBadge', () => { + let badgeInAddForm; + let dispatch; + let endpointMock; + + beforeEach(() => { + endpointMock = axiosMock.onPost(dummyEndpointUrl); + dispatch = jasmine.createSpy('dispatch'); + badgeInAddForm = createDummyBadge(); + state = { + ...state, + badgeInAddForm, + }; + }); + + it('dispatches requestNewBadge and receiveNewBadge for successful response', done => { + const dummyResponse = createDummyBadgeResponse(); + + endpointMock.replyOnce(req => { + expect(req.data).toBe( + JSON.stringify({ + image_url: badgeInAddForm.imageUrl, + link_url: badgeInAddForm.linkUrl, + }), + ); + expect(dispatch.calls.allArgs()).toEqual([['requestNewBadge']]); + dispatch.calls.reset(); + return [200, dummyResponse]; + }); + + const dummyBadge = transformBackendBadge(dummyResponse); + actions + .addBadge({ state, dispatch }) + .then(() => { + expect(dispatch.calls.allArgs()).toEqual([['receiveNewBadge', dummyBadge]]); + }) + .then(done) + .catch(done.fail); + }); + + it('dispatches requestNewBadge and receiveNewBadgeError for error response', done => { + endpointMock.replyOnce(req => { + expect(req.data).toBe( + JSON.stringify({ + image_url: badgeInAddForm.imageUrl, + link_url: badgeInAddForm.linkUrl, + }), + ); + expect(dispatch.calls.allArgs()).toEqual([['requestNewBadge']]); + dispatch.calls.reset(); + return [500, '']; + }); + + actions + .addBadge({ state, dispatch }) + .then(() => done.fail('Expected Ajax call to fail!')) + .catch(() => { + expect(dispatch.calls.allArgs()).toEqual([['receiveNewBadgeError']]); + }) + .then(done) + .catch(done.fail); + }); + }); + + describe('requestDeleteBadge', () => { + it('commits REQUEST_DELETE_BADGE', done => { + testAction( + actions.requestDeleteBadge, + badgeId, + state, + [{ type: mutationTypes.REQUEST_DELETE_BADGE, payload: badgeId }], + [], + done, + ); + }); + }); + + describe('receiveDeleteBadge', () => { + it('commits RECEIVE_DELETE_BADGE', done => { + testAction( + actions.receiveDeleteBadge, + badgeId, + state, + [{ type: mutationTypes.RECEIVE_DELETE_BADGE, payload: badgeId }], + [], + done, + ); + }); + }); + + describe('receiveDeleteBadgeError', () => { + it('commits RECEIVE_DELETE_BADGE_ERROR', done => { + testAction( + actions.receiveDeleteBadgeError, + badgeId, + state, + [{ type: mutationTypes.RECEIVE_DELETE_BADGE_ERROR, payload: badgeId }], + [], + done, + ); + }); + }); + + describe('deleteBadge', () => { + let dispatch; + let endpointMock; + + beforeEach(() => { + endpointMock = axiosMock.onDelete(`${dummyEndpointUrl}/${badgeId}`); + dispatch = jasmine.createSpy('dispatch'); + }); + + it('dispatches requestDeleteBadge and receiveDeleteBadge for successful response', done => { + endpointMock.replyOnce(() => { + expect(dispatch.calls.allArgs()).toEqual([['requestDeleteBadge', badgeId]]); + dispatch.calls.reset(); + return [200, '']; + }); + + actions + .deleteBadge({ state, dispatch }, { id: badgeId }) + .then(() => { + expect(dispatch.calls.allArgs()).toEqual([['receiveDeleteBadge', badgeId]]); + }) + .then(done) + .catch(done.fail); + }); + + it('dispatches requestDeleteBadge and receiveDeleteBadgeError for error response', done => { + endpointMock.replyOnce(() => { + expect(dispatch.calls.allArgs()).toEqual([['requestDeleteBadge', badgeId]]); + dispatch.calls.reset(); + return [500, '']; + }); + + actions + .deleteBadge({ state, dispatch }, { id: badgeId }) + .then(() => done.fail('Expected Ajax call to fail!')) + .catch(() => { + expect(dispatch.calls.allArgs()).toEqual([['receiveDeleteBadgeError', badgeId]]); + }) + .then(done) + .catch(done.fail); + }); + }); + + describe('editBadge', () => { + it('commits START_EDITING', done => { + const dummyBadge = createDummyBadge(); + testAction( + actions.editBadge, + dummyBadge, + state, + [{ type: mutationTypes.START_EDITING, payload: dummyBadge }], + [], + done, + ); + }); + }); + + describe('requestLoadBadges', () => { + it('commits REQUEST_LOAD_BADGES', done => { + const dummyData = 'this is not real data'; + testAction( + actions.requestLoadBadges, + dummyData, + state, + [{ type: mutationTypes.REQUEST_LOAD_BADGES, payload: dummyData }], + [], + done, + ); + }); + }); + + describe('receiveLoadBadges', () => { + it('commits RECEIVE_LOAD_BADGES', done => { + const badges = dummyBadges; + testAction( + actions.receiveLoadBadges, + badges, + state, + [{ type: mutationTypes.RECEIVE_LOAD_BADGES, payload: badges }], + [], + done, + ); + }); + }); + + describe('receiveLoadBadgesError', () => { + it('commits RECEIVE_LOAD_BADGES_ERROR', done => { + testAction( + actions.receiveLoadBadgesError, + null, + state, + [{ type: mutationTypes.RECEIVE_LOAD_BADGES_ERROR }], + [], + done, + ); + }); + }); + + describe('loadBadges', () => { + let dispatch; + let endpointMock; + + beforeEach(() => { + endpointMock = axiosMock.onGet(dummyEndpointUrl); + dispatch = jasmine.createSpy('dispatch'); + }); + + it('dispatches requestLoadBadges and receiveLoadBadges for successful response', done => { + const dummyData = 'this is just some data'; + const dummyReponse = [ + createDummyBadgeResponse(), + createDummyBadgeResponse(), + createDummyBadgeResponse(), + ]; + endpointMock.replyOnce(() => { + expect(dispatch.calls.allArgs()).toEqual([['requestLoadBadges', dummyData]]); + dispatch.calls.reset(); + return [200, dummyReponse]; + }); + + actions + .loadBadges({ state, dispatch }, dummyData) + .then(() => { + const badges = dummyReponse.map(transformBackendBadge); + expect(dispatch.calls.allArgs()).toEqual([['receiveLoadBadges', badges]]); + }) + .then(done) + .catch(done.fail); + }); + + it('dispatches requestLoadBadges and receiveLoadBadgesError for error response', done => { + const dummyData = 'this is just some data'; + endpointMock.replyOnce(() => { + expect(dispatch.calls.allArgs()).toEqual([['requestLoadBadges', dummyData]]); + dispatch.calls.reset(); + return [500, '']; + }); + + actions + .loadBadges({ state, dispatch }, dummyData) + .then(() => done.fail('Expected Ajax call to fail!')) + .catch(() => { + expect(dispatch.calls.allArgs()).toEqual([['receiveLoadBadgesError']]); + }) + .then(done) + .catch(done.fail); + }); + }); + + describe('requestRenderedBadge', () => { + it('commits REQUEST_RENDERED_BADGE', done => { + testAction( + actions.requestRenderedBadge, + null, + state, + [{ type: mutationTypes.REQUEST_RENDERED_BADGE }], + [], + done, + ); + }); + }); + + describe('receiveRenderedBadge', () => { + it('commits RECEIVE_RENDERED_BADGE', done => { + const dummyBadge = createDummyBadge(); + testAction( + actions.receiveRenderedBadge, + dummyBadge, + state, + [{ type: mutationTypes.RECEIVE_RENDERED_BADGE, payload: dummyBadge }], + [], + done, + ); + }); + }); + + describe('receiveRenderedBadgeError', () => { + it('commits RECEIVE_RENDERED_BADGE_ERROR', done => { + testAction( + actions.receiveRenderedBadgeError, + null, + state, + [{ type: mutationTypes.RECEIVE_RENDERED_BADGE_ERROR }], + [], + done, + ); + }); + }); + + describe('renderBadge', () => { + let dispatch; + let endpointMock; + let badgeInForm; + + beforeEach(() => { + badgeInForm = createDummyBadge(); + state = { + ...state, + badgeInAddForm: badgeInForm, + }; + const urlParameters = [ + `link_url=${encodeURIComponent(badgeInForm.linkUrl)}`, + `image_url=${encodeURIComponent(badgeInForm.imageUrl)}`, + ].join('&'); + endpointMock = axiosMock.onGet(`${dummyEndpointUrl}/render?${urlParameters}`); + dispatch = jasmine.createSpy('dispatch'); + }); + + it('returns immediately if imageUrl is empty', done => { + spyOn(axios, 'get'); + badgeInForm.imageUrl = ''; + + actions + .renderBadge({ state, dispatch }) + .then(() => { + expect(axios.get).not.toHaveBeenCalled(); + }) + .then(done) + .catch(done.fail); + }); + + it('returns immediately if linkUrl is empty', done => { + spyOn(axios, 'get'); + badgeInForm.linkUrl = ''; + + actions + .renderBadge({ state, dispatch }) + .then(() => { + expect(axios.get).not.toHaveBeenCalled(); + }) + .then(done) + .catch(done.fail); + }); + + it('escapes user input', done => { + spyOn(axios, 'get').and.callFake(() => Promise.resolve({ data: createDummyBadgeResponse() })); + badgeInForm.imageUrl = '&make-sandwhich=true'; + badgeInForm.linkUrl = '<script>I am dangerous!</script>'; + + actions + .renderBadge({ state, dispatch }) + .then(() => { + expect(axios.get.calls.count()).toBe(1); + const url = axios.get.calls.argsFor(0)[0]; + expect(url).toMatch(`^${dummyEndpointUrl}/render?`); + expect(url).toMatch('\\?link_url=%3Cscript%3EI%20am%20dangerous!%3C%2Fscript%3E&'); + expect(url).toMatch('&image_url=%26make-sandwhich%3Dtrue$'); + }) + .then(done) + .catch(done.fail); + }); + + it('dispatches requestRenderedBadge and receiveRenderedBadge for successful response', done => { + const dummyReponse = createDummyBadgeResponse(); + endpointMock.replyOnce(() => { + expect(dispatch.calls.allArgs()).toEqual([['requestRenderedBadge']]); + dispatch.calls.reset(); + return [200, dummyReponse]; + }); + + actions + .renderBadge({ state, dispatch }) + .then(() => { + const renderedBadge = transformBackendBadge(dummyReponse); + expect(dispatch.calls.allArgs()).toEqual([['receiveRenderedBadge', renderedBadge]]); + }) + .then(done) + .catch(done.fail); + }); + + it('dispatches requestRenderedBadge and receiveRenderedBadgeError for error response', done => { + endpointMock.replyOnce(() => { + expect(dispatch.calls.allArgs()).toEqual([['requestRenderedBadge']]); + dispatch.calls.reset(); + return [500, '']; + }); + + actions + .renderBadge({ state, dispatch }) + .then(() => done.fail('Expected Ajax call to fail!')) + .catch(() => { + expect(dispatch.calls.allArgs()).toEqual([['receiveRenderedBadgeError']]); + }) + .then(done) + .catch(done.fail); + }); + }); + + describe('requestUpdatedBadge', () => { + it('commits REQUEST_UPDATED_BADGE', done => { + testAction( + actions.requestUpdatedBadge, + null, + state, + [{ type: mutationTypes.REQUEST_UPDATED_BADGE }], + [], + done, + ); + }); + }); + + describe('receiveUpdatedBadge', () => { + it('commits RECEIVE_UPDATED_BADGE', done => { + const updatedBadge = createDummyBadge(); + testAction( + actions.receiveUpdatedBadge, + updatedBadge, + state, + [{ type: mutationTypes.RECEIVE_UPDATED_BADGE, payload: updatedBadge }], + [], + done, + ); + }); + }); + + describe('receiveUpdatedBadgeError', () => { + it('commits RECEIVE_UPDATED_BADGE_ERROR', done => { + testAction( + actions.receiveUpdatedBadgeError, + null, + state, + [{ type: mutationTypes.RECEIVE_UPDATED_BADGE_ERROR }], + [], + done, + ); + }); + }); + + describe('saveBadge', () => { + let badgeInEditForm; + let dispatch; + let endpointMock; + + beforeEach(() => { + badgeInEditForm = createDummyBadge(); + state = { + ...state, + badgeInEditForm, + }; + endpointMock = axiosMock.onPut(`${dummyEndpointUrl}/${badgeInEditForm.id}`); + dispatch = jasmine.createSpy('dispatch'); + }); + + it('dispatches requestUpdatedBadge and receiveUpdatedBadge for successful response', done => { + const dummyResponse = createDummyBadgeResponse(); + + endpointMock.replyOnce(req => { + expect(req.data).toBe( + JSON.stringify({ + image_url: badgeInEditForm.imageUrl, + link_url: badgeInEditForm.linkUrl, + }), + ); + expect(dispatch.calls.allArgs()).toEqual([['requestUpdatedBadge']]); + dispatch.calls.reset(); + return [200, dummyResponse]; + }); + + const updatedBadge = transformBackendBadge(dummyResponse); + actions + .saveBadge({ state, dispatch }) + .then(() => { + expect(dispatch.calls.allArgs()).toEqual([['receiveUpdatedBadge', updatedBadge]]); + }) + .then(done) + .catch(done.fail); + }); + + it('dispatches requestUpdatedBadge and receiveUpdatedBadgeError for error response', done => { + endpointMock.replyOnce(req => { + expect(req.data).toBe( + JSON.stringify({ + image_url: badgeInEditForm.imageUrl, + link_url: badgeInEditForm.linkUrl, + }), + ); + expect(dispatch.calls.allArgs()).toEqual([['requestUpdatedBadge']]); + dispatch.calls.reset(); + return [500, '']; + }); + + actions + .saveBadge({ state, dispatch }) + .then(() => done.fail('Expected Ajax call to fail!')) + .catch(() => { + expect(dispatch.calls.allArgs()).toEqual([['receiveUpdatedBadgeError']]); + }) + .then(done) + .catch(done.fail); + }); + }); + + describe('stopEditing', () => { + it('commits STOP_EDITING', done => { + testAction( + actions.stopEditing, + null, + state, + [{ type: mutationTypes.STOP_EDITING }], + [], + done, + ); + }); + }); + + describe('updateBadgeInForm', () => { + it('commits UPDATE_BADGE_IN_FORM', done => { + const dummyBadge = createDummyBadge(); + testAction( + actions.updateBadgeInForm, + dummyBadge, + state, + [{ type: mutationTypes.UPDATE_BADGE_IN_FORM, payload: dummyBadge }], + [], + done, + ); + }); + + describe('updateBadgeInModal', () => { + it('commits UPDATE_BADGE_IN_MODAL', done => { + const dummyBadge = createDummyBadge(); + testAction( + actions.updateBadgeInModal, + dummyBadge, + state, + [{ type: mutationTypes.UPDATE_BADGE_IN_MODAL, payload: dummyBadge }], + [], + done, + ); + }); + }); + }); +}); diff --git a/spec/javascripts/badges/store/mutations_spec.js b/spec/javascripts/badges/store/mutations_spec.js new file mode 100644 index 00000000000..8d26f83339d --- /dev/null +++ b/spec/javascripts/badges/store/mutations_spec.js @@ -0,0 +1,418 @@ +import { GROUP_BADGE, PROJECT_BADGE } from '~/badges/constants'; +import store from '~/badges/store'; +import types from '~/badges/store/mutation_types'; +import createState from '~/badges/store/state'; +import { createDummyBadge } from '../dummy_badge'; + +describe('Badges store mutations', () => { + let dummyBadge; + + beforeEach(() => { + dummyBadge = createDummyBadge(); + store.replaceState(createState()); + }); + + describe('RECEIVE_DELETE_BADGE', () => { + beforeEach(() => { + const badges = [ + { ...dummyBadge, id: dummyBadge.id - 1 }, + dummyBadge, + { ...dummyBadge, id: dummyBadge.id + 1 }, + ]; + + store.replaceState({ + ...store.state, + badges, + }); + }); + + it('removes deleted badge', () => { + const badgeCount = store.state.badges.length; + + store.commit(types.RECEIVE_DELETE_BADGE, dummyBadge.id); + + expect(store.state.badges.length).toBe(badgeCount - 1); + expect(store.state.badges.indexOf(dummyBadge)).toBe(-1); + }); + }); + + describe('RECEIVE_DELETE_BADGE_ERROR', () => { + beforeEach(() => { + const badges = [ + { ...dummyBadge, id: dummyBadge.id - 1, isDeleting: false }, + { ...dummyBadge, isDeleting: true }, + { ...dummyBadge, id: dummyBadge.id + 1, isDeleting: true }, + ]; + + store.replaceState({ + ...store.state, + badges, + }); + }); + + it('sets isDeleting to false', () => { + const badgeCount = store.state.badges.length; + + store.commit(types.RECEIVE_DELETE_BADGE_ERROR, dummyBadge.id); + + expect(store.state.badges.length).toBe(badgeCount); + expect(store.state.badges[0].isDeleting).toBe(false); + expect(store.state.badges[1].isDeleting).toBe(false); + expect(store.state.badges[2].isDeleting).toBe(true); + }); + }); + + describe('RECEIVE_LOAD_BADGES', () => { + beforeEach(() => { + store.replaceState({ + ...store.state, + isLoading: 'not false', + }); + }); + + it('sets badges and isLoading to false', () => { + const badges = [createDummyBadge()]; + store.commit(types.RECEIVE_LOAD_BADGES, badges); + + expect(store.state.isLoading).toBe(false); + expect(store.state.badges).toBe(badges); + }); + }); + + describe('RECEIVE_LOAD_BADGES_ERROR', () => { + beforeEach(() => { + store.replaceState({ + ...store.state, + isLoading: 'not false', + }); + }); + + it('sets isLoading to false', () => { + store.commit(types.RECEIVE_LOAD_BADGES_ERROR); + + expect(store.state.isLoading).toBe(false); + }); + }); + + describe('RECEIVE_NEW_BADGE', () => { + beforeEach(() => { + const badges = [ + { ...dummyBadge, id: dummyBadge.id - 1, kind: GROUP_BADGE }, + { ...dummyBadge, id: dummyBadge.id + 1, kind: GROUP_BADGE }, + { ...dummyBadge, id: dummyBadge.id - 1, kind: PROJECT_BADGE }, + { ...dummyBadge, id: dummyBadge.id + 1, kind: PROJECT_BADGE }, + ]; + store.replaceState({ + ...store.state, + badgeInAddForm: createDummyBadge(), + badges, + isSaving: 'dummy value', + renderedBadge: createDummyBadge(), + }); + }); + + it('resets the add form', () => { + store.commit(types.RECEIVE_NEW_BADGE, dummyBadge); + + expect(store.state.badgeInAddForm).toBe(null); + expect(store.state.isSaving).toBe(false); + expect(store.state.renderedBadge).toBe(null); + }); + + it('inserts group badge at correct position', () => { + const badgeCount = store.state.badges.length; + dummyBadge = { ...dummyBadge, kind: GROUP_BADGE }; + + store.commit(types.RECEIVE_NEW_BADGE, dummyBadge); + + expect(store.state.badges.length).toBe(badgeCount + 1); + expect(store.state.badges.indexOf(dummyBadge)).toBe(1); + }); + + it('inserts project badge at correct position', () => { + const badgeCount = store.state.badges.length; + dummyBadge = { ...dummyBadge, kind: PROJECT_BADGE }; + + store.commit(types.RECEIVE_NEW_BADGE, dummyBadge); + + expect(store.state.badges.length).toBe(badgeCount + 1); + expect(store.state.badges.indexOf(dummyBadge)).toBe(3); + }); + }); + + describe('RECEIVE_NEW_BADGE_ERROR', () => { + beforeEach(() => { + store.replaceState({ + ...store.state, + isSaving: 'dummy value', + }); + }); + + it('sets isSaving to false', () => { + store.commit(types.RECEIVE_NEW_BADGE_ERROR); + + expect(store.state.isSaving).toBe(false); + }); + }); + + describe('RECEIVE_RENDERED_BADGE', () => { + beforeEach(() => { + store.replaceState({ + ...store.state, + isRendering: 'dummy value', + renderedBadge: 'dummy value', + }); + }); + + it('sets renderedBadge', () => { + store.commit(types.RECEIVE_RENDERED_BADGE, dummyBadge); + + expect(store.state.isRendering).toBe(false); + expect(store.state.renderedBadge).toBe(dummyBadge); + }); + }); + + describe('RECEIVE_RENDERED_BADGE_ERROR', () => { + beforeEach(() => { + store.replaceState({ + ...store.state, + isRendering: 'dummy value', + }); + }); + + it('sets isRendering to false', () => { + store.commit(types.RECEIVE_RENDERED_BADGE_ERROR); + + expect(store.state.isRendering).toBe(false); + }); + }); + + describe('RECEIVE_UPDATED_BADGE', () => { + beforeEach(() => { + const badges = [ + { ...dummyBadge, id: dummyBadge.id - 1 }, + dummyBadge, + { ...dummyBadge, id: dummyBadge.id + 1 }, + ]; + store.replaceState({ + ...store.state, + badgeInEditForm: createDummyBadge(), + badges, + isEditing: 'dummy value', + isSaving: 'dummy value', + renderedBadge: createDummyBadge(), + }); + }); + + it('resets the edit form', () => { + store.commit(types.RECEIVE_UPDATED_BADGE, dummyBadge); + + expect(store.state.badgeInAddForm).toBe(null); + expect(store.state.isSaving).toBe(false); + expect(store.state.renderedBadge).toBe(null); + }); + + it('replaces the updated badge', () => { + const badgeCount = store.state.badges.length; + const badgeIndex = store.state.badges.indexOf(dummyBadge); + const newBadge = { id: dummyBadge.id, dummy: 'value' }; + + store.commit(types.RECEIVE_UPDATED_BADGE, newBadge); + + expect(store.state.badges.length).toBe(badgeCount); + expect(store.state.badges[badgeIndex]).toBe(newBadge); + }); + }); + + describe('RECEIVE_UPDATED_BADGE_ERROR', () => { + beforeEach(() => { + store.replaceState({ + ...store.state, + isSaving: 'dummy value', + }); + }); + + it('sets isSaving to false', () => { + store.commit(types.RECEIVE_NEW_BADGE_ERROR); + + expect(store.state.isSaving).toBe(false); + }); + }); + + describe('REQUEST_DELETE_BADGE', () => { + beforeEach(() => { + const badges = [ + { ...dummyBadge, id: dummyBadge.id - 1, isDeleting: false }, + { ...dummyBadge, isDeleting: false }, + { ...dummyBadge, id: dummyBadge.id + 1, isDeleting: true }, + ]; + + store.replaceState({ + ...store.state, + badges, + }); + }); + + it('sets isDeleting to true', () => { + const badgeCount = store.state.badges.length; + + store.commit(types.REQUEST_DELETE_BADGE, dummyBadge.id); + + expect(store.state.badges.length).toBe(badgeCount); + expect(store.state.badges[0].isDeleting).toBe(false); + expect(store.state.badges[1].isDeleting).toBe(true); + expect(store.state.badges[2].isDeleting).toBe(true); + }); + }); + + describe('REQUEST_LOAD_BADGES', () => { + beforeEach(() => { + store.replaceState({ + ...store.state, + apiEndpointUrl: 'some endpoint', + docsUrl: 'some url', + isLoading: 'dummy value', + kind: 'some kind', + }); + }); + + it('sets isLoading to true and initializes the store', () => { + const dummyData = { + apiEndpointUrl: 'dummy endpoint', + docsUrl: 'dummy url', + kind: 'dummy kind', + }; + + store.commit(types.REQUEST_LOAD_BADGES, dummyData); + + expect(store.state.isLoading).toBe(true); + expect(store.state.apiEndpointUrl).toBe(dummyData.apiEndpointUrl); + expect(store.state.docsUrl).toBe(dummyData.docsUrl); + expect(store.state.kind).toBe(dummyData.kind); + }); + }); + + describe('REQUEST_NEW_BADGE', () => { + beforeEach(() => { + store.replaceState({ + ...store.state, + isSaving: 'dummy value', + }); + }); + + it('sets isSaving to true', () => { + store.commit(types.REQUEST_NEW_BADGE); + + expect(store.state.isSaving).toBe(true); + }); + }); + + describe('REQUEST_RENDERED_BADGE', () => { + beforeEach(() => { + store.replaceState({ + ...store.state, + isRendering: 'dummy value', + }); + }); + + it('sets isRendering to true', () => { + store.commit(types.REQUEST_RENDERED_BADGE); + + expect(store.state.isRendering).toBe(true); + }); + }); + + describe('REQUEST_UPDATED_BADGE', () => { + beforeEach(() => { + store.replaceState({ + ...store.state, + isSaving: 'dummy value', + }); + }); + + it('sets isSaving to true', () => { + store.commit(types.REQUEST_NEW_BADGE); + + expect(store.state.isSaving).toBe(true); + }); + }); + + describe('START_EDITING', () => { + beforeEach(() => { + store.replaceState({ + ...store.state, + badgeInEditForm: 'dummy value', + isEditing: 'dummy value', + renderedBadge: 'dummy value', + }); + }); + + it('initializes the edit form', () => { + store.commit(types.START_EDITING, dummyBadge); + + expect(store.state.isEditing).toBe(true); + expect(store.state.badgeInEditForm).toEqual(dummyBadge); + expect(store.state.renderedBadge).toEqual(dummyBadge); + }); + }); + + describe('STOP_EDITING', () => { + beforeEach(() => { + store.replaceState({ + ...store.state, + badgeInEditForm: 'dummy value', + isEditing: 'dummy value', + renderedBadge: 'dummy value', + }); + }); + + it('resets the edit form', () => { + store.commit(types.STOP_EDITING); + + expect(store.state.isEditing).toBe(false); + expect(store.state.badgeInEditForm).toBe(null); + expect(store.state.renderedBadge).toBe(null); + }); + }); + + describe('UPDATE_BADGE_IN_FORM', () => { + beforeEach(() => { + store.replaceState({ + ...store.state, + badgeInAddForm: 'dummy value', + badgeInEditForm: 'dummy value', + }); + }); + + it('sets badgeInEditForm if isEditing is true', () => { + store.state.isEditing = true; + + store.commit(types.UPDATE_BADGE_IN_FORM, dummyBadge); + + expect(store.state.badgeInEditForm).toBe(dummyBadge); + }); + + it('sets badgeInAddForm if isEditing is false', () => { + store.state.isEditing = false; + + store.commit(types.UPDATE_BADGE_IN_FORM, dummyBadge); + + expect(store.state.badgeInAddForm).toBe(dummyBadge); + }); + }); + + describe('UPDATE_BADGE_IN_MODAL', () => { + beforeEach(() => { + store.replaceState({ + ...store.state, + badgeInModal: 'dummy value', + }); + }); + + it('sets badgeInModal', () => { + store.commit(types.UPDATE_BADGE_IN_MODAL, dummyBadge); + + expect(store.state.badgeInModal).toBe(dummyBadge); + }); + }); +}); diff --git a/spec/javascripts/behaviors/copy_as_gfm_spec.js b/spec/javascripts/behaviors/copy_as_gfm_spec.js index b8155144e2a..efbe09a10a2 100644 --- a/spec/javascripts/behaviors/copy_as_gfm_spec.js +++ b/spec/javascripts/behaviors/copy_as_gfm_spec.js @@ -1,4 +1,4 @@ -import { CopyAsGFM } from '~/behaviors/copy_as_gfm'; +import { CopyAsGFM } from '~/behaviors/markdown/copy_as_gfm'; describe('CopyAsGFM', () => { describe('CopyAsGFM.pasteGFM', () => { diff --git a/spec/javascripts/boards/board_blank_state_spec.js b/spec/javascripts/boards/board_blank_state_spec.js index f757dadfada..664ea202e93 100644 --- a/spec/javascripts/boards/board_blank_state_spec.js +++ b/spec/javascripts/boards/board_blank_state_spec.js @@ -1,7 +1,7 @@ /* global BoardService */ import Vue from 'vue'; import '~/boards/stores/boards_store'; -import boardBlankState from '~/boards/components/board_blank_state'; +import BoardBlankState from '~/boards/components/board_blank_state.vue'; import { mockBoardService } from './mock_data'; describe('Boards blank state', () => { @@ -9,7 +9,7 @@ describe('Boards blank state', () => { let fail = false; beforeEach((done) => { - const Comp = Vue.extend(boardBlankState); + const Comp = Vue.extend(BoardBlankState); gl.issueBoards.BoardsStore.create(); gl.boardService = mockBoardService(); diff --git a/spec/javascripts/boards/issue_card_spec.js b/spec/javascripts/boards/issue_card_spec.js index 37088a6421c..be1ea0b57b4 100644 --- a/spec/javascripts/boards/issue_card_spec.js +++ b/spec/javascripts/boards/issue_card_spec.js @@ -41,6 +41,8 @@ describe('Issue card component', () => { confidential: false, labels: [list.label], assignees: [], + reference_path: '#1', + real_path: '/test/1', }); component = new Vue({ diff --git a/spec/javascripts/boards/mock_data.js b/spec/javascripts/boards/mock_data.js index 0671facb285..81f1a97112f 100644 --- a/spec/javascripts/boards/mock_data.js +++ b/spec/javascripts/boards/mock_data.js @@ -1,7 +1,4 @@ /* global BoardService */ -/* eslint-disable comma-dangle, no-unused-vars, quote-props */ -import _ from 'underscore'; - export const listObj = { id: 300, position: 0, @@ -11,8 +8,8 @@ export const listObj = { id: 5000, title: 'Testing', color: 'red', - description: 'testing;' - } + description: 'testing;', + }, }; export const listObjDuplicate = { @@ -24,35 +21,37 @@ export const listObjDuplicate = { id: listObj.label.id, title: 'Testing', color: 'red', - description: 'testing;' - } + description: 'testing;', + }, }; export const BoardsMockData = { - 'GET': { + GET: { '/test/-/boards/1/lists/300/issues?id=300&page=1&=': { - issues: [{ - title: 'Testing', - id: 1, - iid: 1, - confidential: false, - labels: [], - assignees: [], - }], - } + issues: [ + { + title: 'Testing', + id: 1, + iid: 1, + confidential: false, + labels: [], + assignees: [], + }, + ], + }, + }, + POST: { + '/test/-/boards/1/lists': listObj, }, - 'POST': { - '/test/-/boards/1/lists': listObj + PUT: { + '/test/issue-boards/board/1/lists{/id}': {}, }, - 'PUT': { - '/test/issue-boards/board/1/lists{/id}': {} + DELETE: { + '/test/issue-boards/board/1/lists{/id}': {}, }, - 'DELETE': { - '/test/issue-boards/board/1/lists{/id}': {} - } }; -export const boardsMockInterceptor = (config) => { +export const boardsMockInterceptor = config => { const body = BoardsMockData[config.method.toUpperCase()][config.url]; return [200, body]; }; diff --git a/spec/javascripts/boards/modal_store_spec.js b/spec/javascripts/boards/modal_store_spec.js index e9d77f035e3..797693a21aa 100644 --- a/spec/javascripts/boards/modal_store_spec.js +++ b/spec/javascripts/boards/modal_store_spec.js @@ -4,12 +4,11 @@ import '~/vue_shared/models/label'; import '~/boards/models/issue'; import '~/boards/models/list'; import '~/boards/models/assignee'; -import '~/boards/stores/modal_store'; +import Store from '~/boards/stores/modal_store'; describe('Modal store', () => { let issue; let issue2; - const Store = gl.issueBoards.ModalStore; beforeEach(() => { // Setup default state diff --git a/spec/javascripts/branches/branches_delete_modal_spec.js b/spec/javascripts/branches/branches_delete_modal_spec.js new file mode 100644 index 00000000000..b223b8e2c0a --- /dev/null +++ b/spec/javascripts/branches/branches_delete_modal_spec.js @@ -0,0 +1,40 @@ +import $ from 'jquery'; +import DeleteModal from '~/branches/branches_delete_modal'; + +describe('branches delete modal', () => { + describe('setDisableDeleteButton', () => { + let submitSpy; + let $deleteButton; + + beforeEach(() => { + setFixtures(` + <div id="modal-delete-branch"> + <form> + <button type="submit" class="js-delete-branch">Delete</button> + </form> + </div> + `); + $deleteButton = $('.js-delete-branch'); + submitSpy = jasmine.createSpy('submit').and.callFake(event => event.preventDefault()); + $('#modal-delete-branch form').on('submit', submitSpy); + // eslint-disable-next-line no-new + new DeleteModal(); + }); + + it('does not submit if button is disabled', () => { + $deleteButton.attr('disabled', true); + + $deleteButton.click(); + + expect(submitSpy).not.toHaveBeenCalled(); + }); + + it('submits if button is not disabled', () => { + $deleteButton.attr('disabled', false); + + $deleteButton.click(); + + expect(submitSpy).toHaveBeenCalled(); + }); + }); +}); diff --git a/spec/javascripts/ci_variable_list/native_form_variable_list_spec.js b/spec/javascripts/ci_variable_list/native_form_variable_list_spec.js index 1ea8d86cb7e..94a0c999d66 100644 --- a/spec/javascripts/ci_variable_list/native_form_variable_list_spec.js +++ b/spec/javascripts/ci_variable_list/native_form_variable_list_spec.js @@ -20,7 +20,7 @@ describe('NativeFormVariableList', () => { it('should clear out the `name` attribute on the inputs for the last empty row on form submission (avoid BE validation)', () => { const $row = $wrapper.find('.js-row'); expect($row.find('.js-ci-variable-input-key').attr('name')).toBe('schedule[variables_attributes][][key]'); - expect($row.find('.js-ci-variable-input-value').attr('name')).toBe('schedule[variables_attributes][][value]'); + expect($row.find('.js-ci-variable-input-value').attr('name')).toBe('schedule[variables_attributes][][secret_value]'); $wrapper.closest('form').trigger('trigger-submit'); diff --git a/spec/javascripts/collapsed_sidebar_todo_spec.js b/spec/javascripts/collapsed_sidebar_todo_spec.js index 2abf52a1676..8427e8a0ba7 100644 --- a/spec/javascripts/collapsed_sidebar_todo_spec.js +++ b/spec/javascripts/collapsed_sidebar_todo_spec.js @@ -85,7 +85,7 @@ describe('Issuable right sidebar collapsed todo toggle', () => { setTimeout(() => { expect( document.querySelector('.issuable-sidebar-header .js-issuable-todo').textContent.trim(), - ).toBe('Mark done'); + ).toBe('Mark todo as done'); done(); }); @@ -97,7 +97,7 @@ describe('Issuable right sidebar collapsed todo toggle', () => { setTimeout(() => { expect( document.querySelector('.js-issuable-todo.sidebar-collapsed-icon').getAttribute('data-original-title'), - ).toBe('Mark done'); + ).toBe('Mark todo as done'); done(); }); @@ -128,13 +128,13 @@ describe('Issuable right sidebar collapsed todo toggle', () => { .catch(done.fail); }); - it('updates aria-label to mark done', (done) => { + it('updates aria-label to mark todo as done', (done) => { document.querySelector('.js-issuable-todo.sidebar-collapsed-icon').click(); setTimeout(() => { expect( document.querySelector('.js-issuable-todo.sidebar-collapsed-icon').getAttribute('aria-label'), - ).toBe('Mark done'); + ).toBe('Mark todo as done'); done(); }); @@ -147,7 +147,7 @@ describe('Issuable right sidebar collapsed todo toggle', () => { .then(() => { expect( document.querySelector('.js-issuable-todo.sidebar-collapsed-icon').getAttribute('aria-label'), - ).toBe('Mark done'); + ).toBe('Mark todo as done'); document.querySelector('.js-issuable-todo.sidebar-collapsed-icon').click(); }) diff --git a/spec/javascripts/commit/pipelines/pipelines_spec.js b/spec/javascripts/commit/pipelines/pipelines_spec.js index 0afe09d87bc..53820770f3f 100644 --- a/spec/javascripts/commit/pipelines/pipelines_spec.js +++ b/spec/javascripts/commit/pipelines/pipelines_spec.js @@ -1,113 +1,82 @@ -import _ from 'underscore'; import Vue from 'vue'; +import MockAdapter from 'axios-mock-adapter'; +import axios from '~/lib/utils/axios_utils'; import pipelinesTable from '~/commit/pipelines/pipelines_table.vue'; +import mountComponent from 'spec/helpers/vue_mount_component_helper'; describe('Pipelines table in Commits and Merge requests', () => { const jsonFixtureName = 'pipelines/pipelines.json'; let pipeline; let PipelinesTable; + let mock; + let vm; preloadFixtures(jsonFixtureName); beforeEach(() => { + mock = new MockAdapter(axios); + const pipelines = getJSONFixture(jsonFixtureName).pipelines; PipelinesTable = Vue.extend(pipelinesTable); pipeline = pipelines.find(p => p.user !== null && p.commit !== null); }); + afterEach(() => { + vm.$destroy(); + mock.restore(); + }); + describe('successful request', () => { describe('without pipelines', () => { - const pipelinesEmptyResponse = (request, next) => { - next(request.respondWith(JSON.stringify([]), { - status: 200, - })); - }; - beforeEach(function () { - Vue.http.interceptors.push(pipelinesEmptyResponse); - - this.component = new PipelinesTable({ - propsData: { - endpoint: 'endpoint', - helpPagePath: 'foo', - emptyStateSvgPath: 'foo', - errorStateSvgPath: 'foo', - autoDevopsHelpPath: 'foo', - }, - }).$mount(); - }); + mock.onGet('endpoint.json').reply(200, []); - afterEach(function () { - Vue.http.interceptors = _.without( - Vue.http.interceptors, pipelinesEmptyResponse, - ); - this.component.$destroy(); + vm = mountComponent(PipelinesTable, { + endpoint: 'endpoint.json', + helpPagePath: 'foo', + emptyStateSvgPath: 'foo', + errorStateSvgPath: 'foo', + autoDevopsHelpPath: 'foo', + }); }); it('should render the empty state', function (done) { setTimeout(() => { - expect(this.component.$el.querySelector('.empty-state')).toBeDefined(); - expect(this.component.$el.querySelector('.realtime-loading')).toBe(null); - expect(this.component.$el.querySelector('.js-pipelines-error-state')).toBe(null); + expect(vm.$el.querySelector('.empty-state')).toBeDefined(); + expect(vm.$el.querySelector('.realtime-loading')).toBe(null); + expect(vm.$el.querySelector('.js-pipelines-error-state')).toBe(null); done(); - }, 1); + }, 0); }); }); describe('with pipelines', () => { - const pipelinesResponse = (request, next) => { - next(request.respondWith(JSON.stringify([pipeline]), { - status: 200, - })); - }; - beforeEach(() => { - Vue.http.interceptors.push(pipelinesResponse); - - this.component = new PipelinesTable({ - propsData: { - endpoint: 'endpoint', - helpPagePath: 'foo', - emptyStateSvgPath: 'foo', - errorStateSvgPath: 'foo', - autoDevopsHelpPath: 'foo', - }, - }).$mount(); - }); - - afterEach(() => { - Vue.http.interceptors = _.without( - Vue.http.interceptors, pipelinesResponse, - ); - this.component.$destroy(); + mock.onGet('endpoint.json').reply(200, [pipeline]); + vm = mountComponent(PipelinesTable, { + endpoint: 'endpoint.json', + helpPagePath: 'foo', + emptyStateSvgPath: 'foo', + errorStateSvgPath: 'foo', + autoDevopsHelpPath: 'foo', + }); }); it('should render a table with the received pipelines', (done) => { setTimeout(() => { - expect(this.component.$el.querySelectorAll('.ci-table .commit').length).toEqual(1); - expect(this.component.$el.querySelector('.realtime-loading')).toBe(null); - expect(this.component.$el.querySelector('.empty-state')).toBe(null); - expect(this.component.$el.querySelector('.js-pipelines-error-state')).toBe(null); + expect(vm.$el.querySelectorAll('.ci-table .commit').length).toEqual(1); + expect(vm.$el.querySelector('.realtime-loading')).toBe(null); + expect(vm.$el.querySelector('.empty-state')).toBe(null); + expect(vm.$el.querySelector('.js-pipelines-error-state')).toBe(null); done(); }, 0); }); }); describe('pipeline badge counts', () => { - const pipelinesResponse = (request, next) => { - next(request.respondWith(JSON.stringify([pipeline]), { - status: 200, - })); - }; - beforeEach(() => { - Vue.http.interceptors.push(pipelinesResponse); - }); - - afterEach(() => { - Vue.http.interceptors = _.without(Vue.http.interceptors, pipelinesResponse); - this.component.$destroy(); + mock.onGet('endpoint.json').reply(200, [pipeline]); }); it('should receive update-pipelines-count event', (done) => { @@ -119,54 +88,38 @@ describe('Pipelines table in Commits and Merge requests', () => { done(); }); - this.component = new PipelinesTable({ - propsData: { - endpoint: 'endpoint', - helpPagePath: 'foo', - emptyStateSvgPath: 'foo', - errorStateSvgPath: 'foo', - autoDevopsHelpPath: 'foo', - }, - }).$mount(); - element.appendChild(this.component.$el); - }); - }); - }); - - describe('unsuccessfull request', () => { - const pipelinesErrorResponse = (request, next) => { - next(request.respondWith(JSON.stringify([]), { - status: 500, - })); - }; - - beforeEach(function () { - Vue.http.interceptors.push(pipelinesErrorResponse); - - this.component = new PipelinesTable({ - propsData: { - endpoint: 'endpoint', + vm = mountComponent(PipelinesTable, { + endpoint: 'endpoint.json', helpPagePath: 'foo', emptyStateSvgPath: 'foo', errorStateSvgPath: 'foo', autoDevopsHelpPath: 'foo', - }, - }).$mount(); + }); + + element.appendChild(vm.$el); + }); }); + }); - afterEach(function () { - Vue.http.interceptors = _.without( - Vue.http.interceptors, pipelinesErrorResponse, - ); - this.component.$destroy(); + describe('unsuccessfull request', () => { + beforeEach(() => { + mock.onGet('endpoint.json').reply(500, []); + + vm = mountComponent(PipelinesTable, { + endpoint: 'endpoint.json', + helpPagePath: 'foo', + emptyStateSvgPath: 'foo', + errorStateSvgPath: 'foo', + autoDevopsHelpPath: 'foo', + }); }); it('should render error state', function (done) { setTimeout(() => { - expect(this.component.$el.querySelector('.js-pipelines-error-state')).toBeDefined(); - expect(this.component.$el.querySelector('.realtime-loading')).toBe(null); - expect(this.component.$el.querySelector('.js-empty-state')).toBe(null); - expect(this.component.$el.querySelector('.ci-table')).toBe(null); + expect(vm.$el.querySelector('.js-pipelines-error-state')).toBeDefined(); + expect(vm.$el.querySelector('.realtime-loading')).toBe(null); + expect(vm.$el.querySelector('.js-empty-state')).toBe(null); + expect(vm.$el.querySelector('.ci-table')).toBe(null); done(); }, 0); }); diff --git a/spec/javascripts/droplab/constants_spec.js b/spec/javascripts/droplab/constants_spec.js index b9d28db74cc..23b69defec6 100644 --- a/spec/javascripts/droplab/constants_spec.js +++ b/spec/javascripts/droplab/constants_spec.js @@ -1,39 +1,37 @@ -/* eslint-disable */ - import * as constants from '~/droplab/constants'; -describe('constants', function () { - describe('DATA_TRIGGER', function () { +describe('constants', function() { + describe('DATA_TRIGGER', function() { it('should be `data-dropdown-trigger`', function() { expect(constants.DATA_TRIGGER).toBe('data-dropdown-trigger'); }); }); - describe('DATA_DROPDOWN', function () { + describe('DATA_DROPDOWN', function() { it('should be `data-dropdown`', function() { expect(constants.DATA_DROPDOWN).toBe('data-dropdown'); }); }); - describe('SELECTED_CLASS', function () { + describe('SELECTED_CLASS', function() { it('should be `droplab-item-selected`', function() { expect(constants.SELECTED_CLASS).toBe('droplab-item-selected'); }); }); - describe('ACTIVE_CLASS', function () { + describe('ACTIVE_CLASS', function() { it('should be `droplab-item-active`', function() { expect(constants.ACTIVE_CLASS).toBe('droplab-item-active'); }); }); - describe('TEMPLATE_REGEX', function () { + describe('TEMPLATE_REGEX', function() { it('should be a handlebars templating syntax regex', function() { expect(constants.TEMPLATE_REGEX).toEqual(/\{\{(.+?)\}\}/g); }); }); - describe('IGNORE_CLASS', function () { + describe('IGNORE_CLASS', function() { it('should be `droplab-item-ignore`', function() { expect(constants.IGNORE_CLASS).toBe('droplab-item-ignore'); }); diff --git a/spec/javascripts/feature_highlight/feature_highlight_helper_spec.js b/spec/javascripts/feature_highlight/feature_highlight_helper_spec.js index 480c138b9db..2ab6a0077b5 100644 --- a/spec/javascripts/feature_highlight/feature_highlight_helper_spec.js +++ b/spec/javascripts/feature_highlight/feature_highlight_helper_spec.js @@ -3,12 +3,11 @@ import MockAdapter from 'axios-mock-adapter'; import axios from '~/lib/utils/axios_utils'; import { getSelector, - togglePopover, dismiss, - mouseleave, - mouseenter, inserted, } from '~/feature_highlight/feature_highlight_helper'; +import { togglePopover } from '~/shared/popover'; + import getSetTimeoutPromise from 'spec/helpers/set_timeout_promise_helper'; describe('feature highlight helper', () => { @@ -19,110 +18,6 @@ describe('feature highlight helper', () => { }); }); - describe('togglePopover', () => { - describe('togglePopover(true)', () => { - it('returns true when popover is shown', () => { - const context = { - hasClass: () => false, - popover: () => {}, - toggleClass: () => {}, - }; - - expect(togglePopover.call(context, true)).toEqual(true); - }); - - it('returns false when popover is already shown', () => { - const context = { - hasClass: () => true, - }; - - expect(togglePopover.call(context, true)).toEqual(false); - }); - - it('shows popover', (done) => { - const context = { - hasClass: () => false, - popover: () => {}, - toggleClass: () => {}, - }; - - spyOn(context, 'popover').and.callFake((method) => { - expect(method).toEqual('show'); - done(); - }); - - togglePopover.call(context, true); - }); - - it('adds disable-animation and js-popover-show class', (done) => { - const context = { - hasClass: () => false, - popover: () => {}, - toggleClass: () => {}, - }; - - spyOn(context, 'toggleClass').and.callFake((classNames, show) => { - expect(classNames).toEqual('disable-animation js-popover-show'); - expect(show).toEqual(true); - done(); - }); - - togglePopover.call(context, true); - }); - }); - - describe('togglePopover(false)', () => { - it('returns true when popover is hidden', () => { - const context = { - hasClass: () => true, - popover: () => {}, - toggleClass: () => {}, - }; - - expect(togglePopover.call(context, false)).toEqual(true); - }); - - it('returns false when popover is already hidden', () => { - const context = { - hasClass: () => false, - }; - - expect(togglePopover.call(context, false)).toEqual(false); - }); - - it('hides popover', (done) => { - const context = { - hasClass: () => true, - popover: () => {}, - toggleClass: () => {}, - }; - - spyOn(context, 'popover').and.callFake((method) => { - expect(method).toEqual('hide'); - done(); - }); - - togglePopover.call(context, false); - }); - - it('removes disable-animation and js-popover-show class', (done) => { - const context = { - hasClass: () => true, - popover: () => {}, - toggleClass: () => {}, - }; - - spyOn(context, 'toggleClass').and.callFake((classNames, show) => { - expect(classNames).toEqual('disable-animation js-popover-show'); - expect(show).toEqual(false); - done(); - }); - - togglePopover.call(context, false); - }); - }); - }); - describe('dismiss', () => { let mock; const context = { @@ -163,56 +58,6 @@ describe('feature highlight helper', () => { }); }); - describe('mouseleave', () => { - it('calls hide popover if .popover:hover is false', () => { - const fakeJquery = { - length: 0, - }; - - spyOn($.fn, 'init').and.callFake(selector => (selector === '.popover:hover' ? fakeJquery : $.fn)); - spyOn(togglePopover, 'call'); - mouseleave(); - expect(togglePopover.call).toHaveBeenCalledWith(jasmine.any(Object), false); - }); - - it('does not call hide popover if .popover:hover is true', () => { - const fakeJquery = { - length: 1, - }; - - spyOn($.fn, 'init').and.callFake(selector => (selector === '.popover:hover' ? fakeJquery : $.fn)); - spyOn(togglePopover, 'call'); - mouseleave(); - expect(togglePopover.call).not.toHaveBeenCalledWith(false); - }); - }); - - describe('mouseenter', () => { - const context = {}; - - it('shows popover', () => { - spyOn(togglePopover, 'call').and.returnValue(false); - mouseenter.call(context); - expect(togglePopover.call).toHaveBeenCalledWith(jasmine.any(Object), true); - }); - - it('registers mouseleave event if popover is showed', (done) => { - spyOn(togglePopover, 'call').and.returnValue(true); - spyOn($.fn, 'on').and.callFake((eventName) => { - expect(eventName).toEqual('mouseleave'); - done(); - }); - mouseenter.call(context); - }); - - it('does not register mouseleave event if popover is not showed', () => { - spyOn(togglePopover, 'call').and.returnValue(false); - const spy = spyOn($.fn, 'on').and.callFake(() => {}); - mouseenter.call(context); - expect(spy).not.toHaveBeenCalled(); - }); - }); - describe('inserted', () => { it('registers click event callback', (done) => { const context = { diff --git a/spec/javascripts/feature_highlight/feature_highlight_spec.js b/spec/javascripts/feature_highlight/feature_highlight_spec.js index d2dd39d49d1..ec46d4f905a 100644 --- a/spec/javascripts/feature_highlight/feature_highlight_spec.js +++ b/spec/javascripts/feature_highlight/feature_highlight_spec.js @@ -1,6 +1,6 @@ import $ from 'jquery'; -import * as featureHighlightHelper from '~/feature_highlight/feature_highlight_helper'; import * as featureHighlight from '~/feature_highlight/feature_highlight'; +import * as popover from '~/shared/popover'; import axios from '~/lib/utils/axios_utils'; import MockAdapter from 'axios-mock-adapter'; @@ -29,7 +29,6 @@ describe('feature highlight', () => { mock = new MockAdapter(axios); mock.onGet('/test').reply(200); spyOn(window, 'addEventListener'); - spyOn(window, 'removeEventListener'); featureHighlight.setupFeatureHighlightPopover('test', 0); }); @@ -45,14 +44,14 @@ describe('feature highlight', () => { }); it('setup mouseenter', () => { - const toggleSpy = spyOn(featureHighlightHelper.togglePopover, 'call'); + const toggleSpy = spyOn(popover.togglePopover, 'call'); $(selector).trigger('mouseenter'); expect(toggleSpy).toHaveBeenCalledWith(jasmine.any(Object), true); }); it('setup debounced mouseleave', (done) => { - const toggleSpy = spyOn(featureHighlightHelper.togglePopover, 'call'); + const toggleSpy = spyOn(popover.togglePopover, 'call'); $(selector).trigger('mouseleave'); // Even though we've set the debounce to 0ms, setTimeout is needed for the debounce @@ -64,12 +63,7 @@ describe('feature highlight', () => { it('setup show.bs.popover', () => { $(selector).trigger('show.bs.popover'); - expect(window.addEventListener).toHaveBeenCalledWith('scroll', jasmine.any(Function)); - }); - - it('setup hide.bs.popover', () => { - $(selector).trigger('hide.bs.popover'); - expect(window.removeEventListener).toHaveBeenCalledWith('scroll', jasmine.any(Function)); + expect(window.addEventListener).toHaveBeenCalledWith('scroll', jasmine.any(Function), { once: true }); }); it('removes disabled attribute', () => { @@ -85,7 +79,7 @@ describe('feature highlight', () => { it('toggles when clicked', () => { $(selector).trigger('mouseenter'); const popoverId = $(selector).attr('aria-describedby'); - const toggleSpy = spyOn(featureHighlightHelper.togglePopover, 'call'); + const toggleSpy = spyOn(popover.togglePopover, 'call'); $(`#${popoverId} .dismiss-feature-highlight`).click(); diff --git a/spec/javascripts/fixtures/gl_dropdown.html.haml b/spec/javascripts/fixtures/gl_dropdown.html.haml index a20390c08ee..43d57c2c4dc 100644 --- a/spec/javascripts/fixtures/gl_dropdown.html.haml +++ b/spec/javascripts/fixtures/gl_dropdown.html.haml @@ -1,7 +1,8 @@ %div .dropdown.inline %button#js-project-dropdown.dropdown-menu-toggle{type: 'button', data: {toggle: 'dropdown'}} - Projects + .dropdown-toggle-text + Projects %i.fa.fa-chevron-down.dropdown-toggle-caret.js-projects-dropdown-toggle .dropdown-menu.dropdown-select.dropdown-menu-selectable .dropdown-title diff --git a/spec/javascripts/fixtures/linked_tabs.html.haml b/spec/javascripts/fixtures/linked_tabs.html.haml index 93c0cf97ff0..c38fe8b1f25 100644 --- a/spec/javascripts/fixtures/linked_tabs.html.haml +++ b/spec/javascripts/fixtures/linked_tabs.html.haml @@ -1,4 +1,4 @@ -%ul.nav.nav-tabs.linked-tabs +%ul.nav-links.new-session-tabs.linked-tabs %li %a{ href: 'foo/bar/1', data: { target: 'div#tab1', action: 'tab1', toggle: 'tab' } } Tab 1 diff --git a/spec/javascripts/fixtures/one_white_pixel.png b/spec/javascripts/fixtures/one_white_pixel.png Binary files differnew file mode 100644 index 00000000000..073fcf40a18 --- /dev/null +++ b/spec/javascripts/fixtures/one_white_pixel.png diff --git a/spec/javascripts/fixtures/projects.rb b/spec/javascripts/fixtures/projects.rb index b344b389241..e8865b04874 100644 --- a/spec/javascripts/fixtures/projects.rb +++ b/spec/javascripts/fixtures/projects.rb @@ -17,8 +17,6 @@ describe 'Projects (JavaScript fixtures)', type: :controller do end before do - # EE-specific start - # EE specific end project.add_master(admin) sign_in(admin) end diff --git a/spec/javascripts/fixtures/signin_tabs.html.haml b/spec/javascripts/fixtures/signin_tabs.html.haml index 12b8d423cbe..2e00fe7865e 100644 --- a/spec/javascripts/fixtures/signin_tabs.html.haml +++ b/spec/javascripts/fixtures/signin_tabs.html.haml @@ -1,5 +1,5 @@ -%ul.nav-tabs +%ul.nav-links.new-session-tabs + %li.active + %a{ href: '#ldap' } LDAP %li - %a.active{ id: 'standard', href: '#standard'} Standard - %li - %a{ id: 'ldap', href: '#ldap'} Ldap + %a{ href: '#login-pane'} Standard diff --git a/spec/javascripts/gfm_auto_complete_spec.js b/spec/javascripts/gfm_auto_complete_spec.js index dc0a5bc275c..1cb20a1e7ff 100644 --- a/spec/javascripts/gfm_auto_complete_spec.js +++ b/spec/javascripts/gfm_auto_complete_spec.js @@ -81,13 +81,21 @@ describe('GfmAutoComplete', function () { }); it('should quote if value contains any non-alphanumeric characters', () => { - expect(beforeInsert(atwhoInstance, '~label-20')).toBe('~"label-20"'); + expect(beforeInsert(atwhoInstance, '~label-20')).toBe('~"label\\-20"'); expect(beforeInsert(atwhoInstance, '~label 20')).toBe('~"label 20"'); }); it('should quote integer labels', () => { expect(beforeInsert(atwhoInstance, '~1234')).toBe('~"1234"'); }); + + it('should escape Markdown emphasis characters, except in the first character', () => { + expect(beforeInsert(atwhoInstance, '@_group')).toEqual('@\\_group'); + expect(beforeInsert(atwhoInstance, '~_bug')).toEqual('~\\_bug'); + expect(beforeInsert(atwhoInstance, '~a `bug`')).toEqual('~"a \\`bug\\`"'); + expect(beforeInsert(atwhoInstance, '~a ~bug')).toEqual('~"a \\~bug"'); + expect(beforeInsert(atwhoInstance, '~a **bug')).toEqual('~"a \\*\\*bug"'); + }); }); describe('DefaultOptions.matcher', function () { diff --git a/spec/javascripts/gl_dropdown_spec.js b/spec/javascripts/gl_dropdown_spec.js index 0e4a7017406..5393502196e 100644 --- a/spec/javascripts/gl_dropdown_spec.js +++ b/spec/javascripts/gl_dropdown_spec.js @@ -256,4 +256,29 @@ describe('glDropdown', function describeDropdown() { }); }); }); + + it('should keep selected item after selecting a second time', () => { + const options = { + isSelectable(item, $el) { + return !$el.hasClass('is-active'); + }, + toggleLabel(item) { + return item && item.id; + }, + }; + initDropDown.call(this, false, false, options); + const $item = $(`${ITEM_SELECTOR}:first() a`, this.$dropdownMenuElement); + + // select item the first time + this.dropdownButtonElement.click(); + $item.click(); + expect($item).toHaveClass('is-active'); + // select item the second time + this.dropdownButtonElement.click(); + $item.click(); + expect($item).toHaveClass('is-active'); + + expect($('.dropdown-toggle-text')).toHaveText(this.projectsData[0].id.toString()); + }); }); + diff --git a/spec/javascripts/helpers/vue_component_helper.js b/spec/javascripts/helpers/vue_component_helper.js new file mode 100644 index 00000000000..e0fe18e5560 --- /dev/null +++ b/spec/javascripts/helpers/vue_component_helper.js @@ -0,0 +1,18 @@ +/** + * Replaces line break with an empty space + * @param {*} data + */ +export const removeBreakLine = data => data.replace(/\r?\n|\r/g, ' '); + +/** + * Removes line breaks, spaces and trims the given text + * @param {String} str + * @returns {String} + */ +export const trimText = str => + str + .replace(/\r?\n|\r/g, '') + .replace(/\s\s+/g, ' ') + .trim(); + +export const removeWhitespace = str => str.replace(/\s\s+/g, ' '); diff --git a/spec/javascripts/helpers/vue_mount_component_helper.js b/spec/javascripts/helpers/vue_mount_component_helper.js index 34acdfbfba9..effacbcff4e 100644 --- a/spec/javascripts/helpers/vue_mount_component_helper.js +++ b/spec/javascripts/helpers/vue_mount_component_helper.js @@ -3,6 +3,12 @@ export const createComponentWithStore = (Component, store, propsData = {}) => ne propsData, }); +export const mountComponentWithStore = (Component, { el, props, store }) => + new Component({ + store, + propsData: props || { }, + }).$mount(el); + export default (Component, props = {}, el = null) => new Component({ propsData: props, }).$mount(el); diff --git a/spec/javascripts/helpers/vuex_action_helper.js b/spec/javascripts/helpers/vuex_action_helper.js index 2d386fe1da5..83f29d1b0c2 100644 --- a/spec/javascripts/helpers/vuex_action_helper.js +++ b/spec/javascripts/helpers/vuex_action_helper.js @@ -1,37 +1,71 @@ -/* eslint-disable */ - /** - * helper for testing action with expected mutations + * helper for testing action with expected mutations inspired in * https://vuex.vuejs.org/en/testing.html + * + * @example + * testAction( + * actions.actionName, // action + * { }, // mocked response + * state, // state + * [ + * { type: types.MUTATION} + * { type: types.MUTATION_1, payload: {}} + * ], // mutations + * [ + * { type: 'actionName', payload: {}}, + * { type: 'actionName1', payload: {}} + * ] //actions + * done, + * ); */ -export default (action, payload, state, expectedMutations, done) => { - let count = 0; +export default (action, payload, state, expectedMutations, expectedActions, done) => { + let mutationsCount = 0; + let actionsCount = 0; // mock commit - const commit = (type, payload) => { - const mutation = expectedMutations[count]; - - try { - expect(mutation.type).to.equal(type); - if (payload) { - expect(mutation.payload).to.deep.equal(payload); - } - } catch (error) { - done(error); + const commit = (type, mutationPayload) => { + const mutation = expectedMutations[mutationsCount]; + + expect(mutation.type).toEqual(type); + + if (mutation.payload) { + expect(mutation.payload).toEqual(mutationPayload); } - count++; - if (count >= expectedMutations.length) { + mutationsCount += 1; + if (mutationsCount >= expectedMutations.length) { + done(); + } + }; + + // mock dispatch + const dispatch = (type, actionPayload) => { + const actionExpected = expectedActions[actionsCount]; + + expect(actionExpected.type).toEqual(type); + + if (actionExpected.payload) { + expect(actionExpected.payload).toEqual(actionPayload); + } + + actionsCount += 1; + if (actionsCount >= expectedActions.length) { done(); } }; // call the action with mocked store and arguments - action({ commit, state }, payload); + action({ commit, state, dispatch }, payload); // check if no mutations should have been dispatched if (expectedMutations.length === 0) { - expect(count).to.equal(0); + expect(mutationsCount).toEqual(0); + done(); + } + + // check if no mutations should have been dispatched + if (expectedActions.length === 0) { + expect(actionsCount).toEqual(0); done(); } }; diff --git a/spec/javascripts/ide/components/changed_file_icon_spec.js b/spec/javascripts/ide/components/changed_file_icon_spec.js new file mode 100644 index 00000000000..541864e912e --- /dev/null +++ b/spec/javascripts/ide/components/changed_file_icon_spec.js @@ -0,0 +1,46 @@ +import Vue from 'vue'; +import changedFileIcon from '~/ide/components/changed_file_icon.vue'; +import createComponent from 'spec/helpers/vue_mount_component_helper'; + +describe('IDE changed file icon', () => { + let vm; + + beforeEach(() => { + const component = Vue.extend(changedFileIcon); + + vm = createComponent(component, { + file: { + tempFile: false, + changed: true, + }, + }); + }); + + afterEach(() => { + vm.$destroy(); + }); + + describe('changedIcon', () => { + it('equals file-modified when not a temp file and has changes', () => { + expect(vm.changedIcon).toBe('file-modified'); + }); + + it('equals file-addition when a temp file', () => { + vm.file.tempFile = true; + + expect(vm.changedIcon).toBe('file-addition'); + }); + }); + + describe('changedIconClass', () => { + it('includes multi-file-modified when not a temp file', () => { + expect(vm.changedIconClass).toContain('multi-file-modified'); + }); + + it('includes multi-file-addition when a temp file', () => { + vm.file.tempFile = true; + + expect(vm.changedIconClass).toContain('multi-file-addition'); + }); + }); +}); diff --git a/spec/javascripts/ide/components/commit_sidebar/actions_spec.js b/spec/javascripts/ide/components/commit_sidebar/actions_spec.js new file mode 100644 index 00000000000..144e78d14b5 --- /dev/null +++ b/spec/javascripts/ide/components/commit_sidebar/actions_spec.js @@ -0,0 +1,35 @@ +import Vue from 'vue'; +import store from '~/ide/stores'; +import commitActions from '~/ide/components/commit_sidebar/actions.vue'; +import { createComponentWithStore } from 'spec/helpers/vue_mount_component_helper'; +import { resetStore } from 'spec/ide/helpers'; + +describe('IDE commit sidebar actions', () => { + let vm; + + beforeEach(done => { + const Component = Vue.extend(commitActions); + + vm = createComponentWithStore(Component, store); + + vm.$store.state.currentBranchId = 'master'; + + vm.$mount(); + + Vue.nextTick(done); + }); + + afterEach(() => { + vm.$destroy(); + + resetStore(vm.$store); + }); + + it('renders 3 groups', () => { + expect(vm.$el.querySelectorAll('input[type="radio"]').length).toBe(3); + }); + + it('renders current branch text', () => { + expect(vm.$el.textContent).toContain('Commit to master branch'); + }); +}); diff --git a/spec/javascripts/ide/components/commit_sidebar/empty_state_spec.js b/spec/javascripts/ide/components/commit_sidebar/empty_state_spec.js new file mode 100644 index 00000000000..b80d08de7b1 --- /dev/null +++ b/spec/javascripts/ide/components/commit_sidebar/empty_state_spec.js @@ -0,0 +1,95 @@ +import Vue from 'vue'; +import store from '~/ide/stores'; +import emptyState from '~/ide/components/commit_sidebar/empty_state.vue'; +import { createComponentWithStore } from '../../../helpers/vue_mount_component_helper'; +import { resetStore } from '../../helpers'; + +describe('IDE commit panel empty state', () => { + let vm; + + beforeEach(() => { + const Component = Vue.extend(emptyState); + + vm = createComponentWithStore(Component, store, { + noChangesStateSvgPath: 'no-changes', + committedStateSvgPath: 'committed-state', + }); + + vm.$mount(); + }); + + afterEach(() => { + vm.$destroy(); + + resetStore(vm.$store); + }); + + describe('statusSvg', () => { + it('uses noChangesStateSvgPath when commit message is empty', () => { + expect(vm.statusSvg).toBe('no-changes'); + expect(vm.$el.querySelector('img').getAttribute('src')).toBe( + 'no-changes', + ); + }); + + it('uses committedStateSvgPath when commit message exists', done => { + vm.$store.state.lastCommitMsg = 'testing'; + + Vue.nextTick(() => { + expect(vm.statusSvg).toBe('committed-state'); + expect(vm.$el.querySelector('img').getAttribute('src')).toBe( + 'committed-state', + ); + + done(); + }); + }); + }); + + it('renders no changes text when last commit message is empty', () => { + expect(vm.$el.textContent).toContain('No changes'); + }); + + it('renders last commit message when it exists', done => { + vm.$store.state.lastCommitMsg = 'testing commit message'; + + Vue.nextTick(() => { + expect(vm.$el.textContent).toContain('testing commit message'); + + done(); + }); + }); + + describe('toggle button', () => { + it('calls store action', () => { + spyOn(vm, 'toggleRightPanelCollapsed'); + + vm.$el.querySelector('.multi-file-commit-panel-collapse-btn').click(); + + expect(vm.toggleRightPanelCollapsed).toHaveBeenCalled(); + }); + + it('renders collapsed class', done => { + vm.$el.querySelector('.multi-file-commit-panel-collapse-btn').click(); + + Vue.nextTick(() => { + expect(vm.$el.querySelector('.is-collapsed')).not.toBeNull(); + + done(); + }); + }); + }); + + describe('collapsed state', () => { + beforeEach(done => { + vm.$store.state.rightPanelCollapsed = true; + + Vue.nextTick(done); + }); + + it('does not render text & svg', () => { + expect(vm.$el.querySelector('img')).toBeNull(); + expect(vm.$el.textContent).not.toContain('No changes'); + }); + }); +}); diff --git a/spec/javascripts/ide/components/commit_sidebar/list_collapsed_spec.js b/spec/javascripts/ide/components/commit_sidebar/list_collapsed_spec.js new file mode 100644 index 00000000000..9af3c15a4e3 --- /dev/null +++ b/spec/javascripts/ide/components/commit_sidebar/list_collapsed_spec.js @@ -0,0 +1,72 @@ +import Vue from 'vue'; +import store from '~/ide/stores'; +import listCollapsed from '~/ide/components/commit_sidebar/list_collapsed.vue'; +import { createComponentWithStore } from 'spec/helpers/vue_mount_component_helper'; +import { file } from '../../helpers'; +import { removeWhitespace } from '../../../helpers/vue_component_helper'; + +describe('Multi-file editor commit sidebar list collapsed', () => { + let vm; + + beforeEach(() => { + const Component = Vue.extend(listCollapsed); + + vm = createComponentWithStore(Component, store, { + files: [ + { + ...file('file1'), + tempFile: true, + }, + file('file2'), + ], + iconName: 'staged', + title: 'Staged', + }); + + vm.$mount(); + }); + + afterEach(() => { + vm.$destroy(); + }); + + it('renders added & modified files count', () => { + expect(removeWhitespace(vm.$el.textContent).trim()).toBe('1 1'); + }); + + describe('addedFilesLength', () => { + it('returns an length of temp files', () => { + expect(vm.addedFilesLength).toBe(1); + }); + }); + + describe('modifiedFilesLength', () => { + it('returns an length of modified files', () => { + expect(vm.modifiedFilesLength).toBe(1); + }); + }); + + describe('addedFilesIconClass', () => { + it('includes multi-file-addition when addedFiles is not empty', () => { + expect(vm.addedFilesIconClass).toContain('multi-file-addition'); + }); + + it('excludes multi-file-addition when addedFiles is empty', () => { + vm.files = []; + + expect(vm.addedFilesIconClass).not.toContain('multi-file-addition'); + }); + }); + + describe('modifiedFilesClass', () => { + it('includes multi-file-modified when addedFiles is not empty', () => { + expect(vm.modifiedFilesClass).toContain('multi-file-modified'); + }); + + it('excludes multi-file-modified when addedFiles is empty', () => { + vm.files = []; + + expect(vm.modifiedFilesClass).not.toContain('multi-file-modified'); + }); + }); +}); diff --git a/spec/javascripts/ide/components/commit_sidebar/list_item_spec.js b/spec/javascripts/ide/components/commit_sidebar/list_item_spec.js new file mode 100644 index 00000000000..cc7e0a3f26d --- /dev/null +++ b/spec/javascripts/ide/components/commit_sidebar/list_item_spec.js @@ -0,0 +1,92 @@ +import Vue from 'vue'; +import store from '~/ide/stores'; +import listItem from '~/ide/components/commit_sidebar/list_item.vue'; +import router from '~/ide/ide_router'; +import { createComponentWithStore } from 'spec/helpers/vue_mount_component_helper'; +import { file, resetStore } from '../../helpers'; + +describe('Multi-file editor commit sidebar list item', () => { + let vm; + let f; + + beforeEach(() => { + const Component = Vue.extend(listItem); + + f = file('test-file'); + + store.state.entries[f.path] = f; + + vm = createComponentWithStore(Component, store, { + file: f, + actionComponent: 'stage-button', + }).$mount(); + }); + + afterEach(() => { + vm.$destroy(); + + resetStore(store); + }); + + it('renders file path', () => { + expect(vm.$el.querySelector('.multi-file-commit-list-path').textContent.trim()).toBe(f.path); + }); + + it('renders actionn button', () => { + expect(vm.$el.querySelector('.multi-file-discard-btn')).not.toBeNull(); + }); + + it('opens a closed file in the editor when clicking the file path', done => { + spyOn(vm, 'openPendingTab').and.callThrough(); + spyOn(router, 'push'); + + vm.$el.querySelector('.multi-file-commit-list-path').click(); + + setTimeout(() => { + expect(vm.openPendingTab).toHaveBeenCalled(); + expect(router.push).toHaveBeenCalled(); + + done(); + }); + }); + + it('calls updateViewer with diff when clicking file', done => { + spyOn(vm, 'openFileInEditor').and.callThrough(); + spyOn(vm, 'updateViewer').and.callThrough(); + spyOn(router, 'push'); + + vm.$el.querySelector('.multi-file-commit-list-path').click(); + + setTimeout(() => { + expect(vm.updateViewer).toHaveBeenCalledWith('diff'); + + done(); + }); + }); + + describe('computed', () => { + describe('iconName', () => { + it('returns modified when not a tempFile', () => { + expect(vm.iconName).toBe('file-modified'); + }); + + it('returns addition when not a tempFile', () => { + f.tempFile = true; + + expect(vm.iconName).toBe('file-addition'); + }); + }); + + describe('iconClass', () => { + it('returns modified when not a tempFile', () => { + expect(vm.iconClass).toContain('multi-file-modified'); + }); + + it('returns addition when not a tempFile', () => { + f.tempFile = true; + + expect(vm.iconClass).toContain('multi-file-addition'); + }); + }); + }); +}); diff --git a/spec/javascripts/ide/components/commit_sidebar/list_spec.js b/spec/javascripts/ide/components/commit_sidebar/list_spec.js new file mode 100644 index 00000000000..62fc3f90ad1 --- /dev/null +++ b/spec/javascripts/ide/components/commit_sidebar/list_spec.js @@ -0,0 +1,93 @@ +import Vue from 'vue'; +import store from '~/ide/stores'; +import commitSidebarList from '~/ide/components/commit_sidebar/list.vue'; +import { createComponentWithStore } from 'spec/helpers/vue_mount_component_helper'; +import { file, resetStore } from '../../helpers'; + +describe('Multi-file editor commit sidebar list', () => { + let vm; + + beforeEach(() => { + const Component = Vue.extend(commitSidebarList); + + vm = createComponentWithStore(Component, store, { + title: 'Staged', + fileList: [], + iconName: 'staged', + action: 'stageAllChanges', + actionBtnText: 'stage all', + itemActionComponent: 'stage-button', + }); + + vm.$store.state.rightPanelCollapsed = false; + + vm.$mount(); + }); + + afterEach(() => { + vm.$destroy(); + + resetStore(vm.$store); + }); + + describe('with a list of files', () => { + beforeEach(done => { + const f = file('file name'); + f.changed = true; + vm.fileList.push(f); + + Vue.nextTick(done); + }); + + it('renders list', () => { + expect(vm.$el.querySelectorAll('li').length).toBe(1); + }); + }); + + describe('empty files array', () => { + it('renders no changes text when empty', () => { + expect(vm.$el.textContent).toContain('No changes'); + }); + }); + + describe('collapsed', () => { + beforeEach(done => { + vm.$store.state.rightPanelCollapsed = true; + + Vue.nextTick(done); + }); + + it('hides list', () => { + expect(vm.$el.querySelector('.list-unstyled')).toBeNull(); + expect(vm.$el.querySelector('.help-block')).toBeNull(); + }); + }); + + describe('with toggle', () => { + beforeEach(done => { + spyOn(vm, 'toggleRightPanelCollapsed'); + + vm.showToggle = true; + + Vue.nextTick(done); + }); + + it('calls setPanelCollapsedStatus when clickin toggle', () => { + vm.$el.querySelector('.multi-file-commit-panel-collapse-btn').click(); + + expect(vm.toggleRightPanelCollapsed).toHaveBeenCalled(); + }); + }); + + describe('action button', () => { + beforeEach(() => { + spyOn(vm, 'stageAllChanges'); + }); + + it('calls store action when clicked', () => { + vm.$el.querySelector('.ide-staged-action-btn').click(); + + expect(vm.stageAllChanges).toHaveBeenCalled(); + }); + }); +}); diff --git a/spec/javascripts/ide/components/commit_sidebar/message_field_spec.js b/spec/javascripts/ide/components/commit_sidebar/message_field_spec.js new file mode 100644 index 00000000000..d62d58101d6 --- /dev/null +++ b/spec/javascripts/ide/components/commit_sidebar/message_field_spec.js @@ -0,0 +1,174 @@ +import Vue from 'vue'; +import CommitMessageField from '~/ide/components/commit_sidebar/message_field.vue'; +import createComponent from 'spec/helpers/vue_mount_component_helper'; + +describe('IDE commit message field', () => { + const Component = Vue.extend(CommitMessageField); + let vm; + + beforeEach(() => { + setFixtures('<div id="app"></div>'); + + vm = createComponent( + Component, + { + text: '', + }, + '#app', + ); + }); + + afterEach(() => { + vm.$destroy(); + }); + + it('adds is-focused class on focus', done => { + vm.$el.querySelector('textarea').focus(); + + vm.$nextTick(() => { + expect(vm.$el.querySelector('.is-focused')).not.toBeNull(); + + done(); + }); + }); + + it('removed is-focused class on blur', done => { + vm.$el.querySelector('textarea').focus(); + + vm + .$nextTick() + .then(() => { + expect(vm.$el.querySelector('.is-focused')).not.toBeNull(); + + vm.$el.querySelector('textarea').blur(); + + return vm.$nextTick(); + }) + .then(() => { + expect(vm.$el.querySelector('.is-focused')).toBeNull(); + + done(); + }) + .then(done) + .catch(done.fail); + }); + + it('emits input event on input', () => { + spyOn(vm, '$emit'); + + const textarea = vm.$el.querySelector('textarea'); + textarea.value = 'testing'; + + textarea.dispatchEvent(new Event('input')); + + expect(vm.$emit).toHaveBeenCalledWith('input', 'testing'); + }); + + describe('highlights', () => { + describe('subject line', () => { + it('does not highlight less than 50 characters', done => { + vm.text = 'text less than 50 chars'; + + vm + .$nextTick() + .then(() => { + expect(vm.$el.querySelector('.highlights span').textContent).toContain( + 'text less than 50 chars', + ); + expect(vm.$el.querySelector('mark').style.display).toBe('none'); + }) + .then(done) + .catch(done.fail); + }); + + it('highlights characters over 50 length', done => { + vm.text = + 'text less than 50 chars that should not highlighted. text more than 50 should be highlighted'; + + vm + .$nextTick() + .then(() => { + expect(vm.$el.querySelector('.highlights span').textContent).toContain( + 'text less than 50 chars that should not highlighte', + ); + expect(vm.$el.querySelector('mark').style.display).not.toBe('none'); + expect(vm.$el.querySelector('mark').textContent).toBe( + 'd. text more than 50 should be highlighted', + ); + }) + .then(done) + .catch(done.fail); + }); + }); + + describe('body text', () => { + it('does not highlight body text less tan 72 characters', done => { + vm.text = 'subject line\nbody content'; + + vm + .$nextTick() + .then(() => { + expect(vm.$el.querySelectorAll('.highlights span').length).toBe(2); + expect(vm.$el.querySelectorAll('mark')[1].style.display).toBe('none'); + }) + .then(done) + .catch(done.fail); + }); + + it('highlights body text more than 72 characters', done => { + vm.text = + 'subject line\nbody content that will be highlighted when it is more than 72 characters in length'; + + vm + .$nextTick() + .then(() => { + expect(vm.$el.querySelectorAll('.highlights span').length).toBe(2); + expect(vm.$el.querySelectorAll('mark')[1].style.display).not.toBe('none'); + expect(vm.$el.querySelectorAll('mark')[1].textContent).toBe(' in length'); + }) + .then(done) + .catch(done.fail); + }); + + it('highlights body text & subject line', done => { + vm.text = + 'text less than 50 chars that should not highlighted\nbody content that will be highlighted when it is more than 72 characters in length'; + + vm + .$nextTick() + .then(() => { + expect(vm.$el.querySelectorAll('.highlights span').length).toBe(2); + expect(vm.$el.querySelectorAll('mark').length).toBe(2); + + expect(vm.$el.querySelectorAll('mark')[0].textContent).toContain('d'); + expect(vm.$el.querySelectorAll('mark')[1].textContent).toBe(' in length'); + }) + .then(done) + .catch(done.fail); + }); + }); + }); + + describe('scrolling textarea', () => { + it('updates transform of highlights', done => { + vm.text = 'subject line\n\n\n\n\n\n\n\n\n\n\nbody content'; + + vm + .$nextTick() + .then(() => { + vm.$el.querySelector('textarea').scrollTo(0, 50); + + vm.handleScroll(); + }) + .then(vm.$nextTick) + .then(() => { + expect(vm.scrollTop).toBe(50); + expect(vm.$el.querySelector('.highlights').style.transform).toBe( + 'translate3d(0px, -50px, 0px)', + ); + }) + .then(done) + .catch(done.fail); + }); + }); +}); diff --git a/spec/javascripts/ide/components/commit_sidebar/radio_group_spec.js b/spec/javascripts/ide/components/commit_sidebar/radio_group_spec.js new file mode 100644 index 00000000000..21bfe4be52f --- /dev/null +++ b/spec/javascripts/ide/components/commit_sidebar/radio_group_spec.js @@ -0,0 +1,117 @@ +import Vue from 'vue'; +import store from '~/ide/stores'; +import radioGroup from '~/ide/components/commit_sidebar/radio_group.vue'; +import { createComponentWithStore } from 'spec/helpers/vue_mount_component_helper'; +import { resetStore } from 'spec/ide/helpers'; + +describe('IDE commit sidebar radio group', () => { + let vm; + + beforeEach(done => { + const Component = Vue.extend(radioGroup); + + store.state.commit.commitAction = '2'; + + vm = createComponentWithStore(Component, store, { + value: '1', + label: 'test', + checked: true, + }); + + vm.$mount(); + + Vue.nextTick(done); + }); + + afterEach(() => { + vm.$destroy(); + + resetStore(vm.$store); + }); + + it('uses label if present', () => { + expect(vm.$el.textContent).toContain('test'); + }); + + it('uses slot if label is not present', done => { + vm.$destroy(); + + vm = new Vue({ + components: { + radioGroup, + }, + store, + template: ` + <radio-group + value="1" + > + Testing slot + </radio-group> + `, + }); + + vm.$mount(); + + Vue.nextTick(() => { + expect(vm.$el.textContent).toContain('Testing slot'); + + done(); + }); + }); + + it('updates store when changing radio button', done => { + vm.$el.querySelector('input').dispatchEvent(new Event('change')); + + Vue.nextTick(() => { + expect(store.state.commit.commitAction).toBe('1'); + + done(); + }); + }); + + describe('with input', () => { + beforeEach(done => { + vm.$destroy(); + + const Component = Vue.extend(radioGroup); + + store.state.commit.commitAction = '1'; + + vm = createComponentWithStore(Component, store, { + value: '1', + label: 'test', + checked: true, + showInput: true, + }); + + vm.$mount(); + + Vue.nextTick(done); + }); + + it('renders input box when commitAction matches value', () => { + expect(vm.$el.querySelector('.form-control')).not.toBeNull(); + }); + + it('hides input when commitAction doesnt match value', done => { + store.state.commit.commitAction = '2'; + + Vue.nextTick(() => { + expect(vm.$el.querySelector('.form-control')).toBeNull(); + done(); + }); + }); + + it('updates branch name in store on input', done => { + const input = vm.$el.querySelector('.form-control'); + input.value = 'testing-123'; + input.dispatchEvent(new Event('input')); + + Vue.nextTick(() => { + expect(store.state.commit.newBranchName).toBe('testing-123'); + + done(); + }); + }); + }); +}); diff --git a/spec/javascripts/ide/components/commit_sidebar/stage_button_spec.js b/spec/javascripts/ide/components/commit_sidebar/stage_button_spec.js new file mode 100644 index 00000000000..6bf8710bda7 --- /dev/null +++ b/spec/javascripts/ide/components/commit_sidebar/stage_button_spec.js @@ -0,0 +1,46 @@ +import Vue from 'vue'; +import store from '~/ide/stores'; +import stageButton from '~/ide/components/commit_sidebar/stage_button.vue'; +import { createComponentWithStore } from '../../../helpers/vue_mount_component_helper'; +import { file, resetStore } from '../../helpers'; + +describe('IDE stage file button', () => { + let vm; + let f; + + beforeEach(() => { + const Component = Vue.extend(stageButton); + f = file(); + + vm = createComponentWithStore(Component, store, { + path: f.path, + }); + + spyOn(vm, 'stageChange'); + spyOn(vm, 'discardFileChanges'); + + vm.$mount(); + }); + + afterEach(() => { + vm.$destroy(); + + resetStore(vm.$store); + }); + + it('renders button to discard & stage', () => { + expect(vm.$el.querySelectorAll('.btn').length).toBe(2); + }); + + it('calls store with stage button', () => { + vm.$el.querySelectorAll('.btn')[0].click(); + + expect(vm.stageChange).toHaveBeenCalledWith(f.path); + }); + + it('calls store with discard button', () => { + vm.$el.querySelectorAll('.btn')[1].click(); + + expect(vm.discardFileChanges).toHaveBeenCalledWith(f.path); + }); +}); diff --git a/spec/javascripts/ide/components/commit_sidebar/unstage_button_spec.js b/spec/javascripts/ide/components/commit_sidebar/unstage_button_spec.js new file mode 100644 index 00000000000..917bbb9fb46 --- /dev/null +++ b/spec/javascripts/ide/components/commit_sidebar/unstage_button_spec.js @@ -0,0 +1,39 @@ +import Vue from 'vue'; +import store from '~/ide/stores'; +import unstageButton from '~/ide/components/commit_sidebar/unstage_button.vue'; +import { createComponentWithStore } from '../../../helpers/vue_mount_component_helper'; +import { file, resetStore } from '../../helpers'; + +describe('IDE unstage file button', () => { + let vm; + let f; + + beforeEach(() => { + const Component = Vue.extend(unstageButton); + f = file(); + + vm = createComponentWithStore(Component, store, { + path: f.path, + }); + + spyOn(vm, 'unstageChange'); + + vm.$mount(); + }); + + afterEach(() => { + vm.$destroy(); + + resetStore(vm.$store); + }); + + it('renders button to unstage', () => { + expect(vm.$el.querySelectorAll('.btn').length).toBe(1); + }); + + it('calls store with unnstage button', () => { + vm.$el.querySelector('.btn').click(); + + expect(vm.unstageChange).toHaveBeenCalledWith(f.path); + }); +}); diff --git a/spec/javascripts/ide/components/ide_context_bar_spec.js b/spec/javascripts/ide/components/ide_context_bar_spec.js new file mode 100644 index 00000000000..e17b051f137 --- /dev/null +++ b/spec/javascripts/ide/components/ide_context_bar_spec.js @@ -0,0 +1,37 @@ +import Vue from 'vue'; +import store from '~/ide/stores'; +import ideContextBar from '~/ide/components/ide_context_bar.vue'; +import { createComponentWithStore } from 'spec/helpers/vue_mount_component_helper'; + +describe('Multi-file editor right context bar', () => { + let vm; + + beforeEach(() => { + const Component = Vue.extend(ideContextBar); + + vm = createComponentWithStore(Component, store, { + noChangesStateSvgPath: 'svg', + committedStateSvgPath: 'svg', + }); + + vm.$store.state.rightPanelCollapsed = false; + + vm.$mount(); + }); + + afterEach(() => { + vm.$destroy(); + }); + + describe('collapsed', () => { + beforeEach(done => { + vm.$store.state.rightPanelCollapsed = true; + + Vue.nextTick(done); + }); + + it('adds collapsed class', () => { + expect(vm.$el.querySelector('.is-collapsed')).not.toBeNull(); + }); + }); +}); diff --git a/spec/javascripts/ide/components/ide_external_links_spec.js b/spec/javascripts/ide/components/ide_external_links_spec.js new file mode 100644 index 00000000000..9f6cb459f3b --- /dev/null +++ b/spec/javascripts/ide/components/ide_external_links_spec.js @@ -0,0 +1,43 @@ +import Vue from 'vue'; +import ideExternalLinks from '~/ide/components/ide_external_links.vue'; +import createComponent from 'spec/helpers/vue_mount_component_helper'; + +describe('ide external links component', () => { + let vm; + let fakeReferrer; + let Component; + + const fakeProjectUrl = '/project/'; + + beforeEach(() => { + Component = Vue.extend(ideExternalLinks); + }); + + afterEach(() => { + vm.$destroy(); + }); + + describe('goBackUrl', () => { + it('renders the Go Back link with the referrer when present', () => { + fakeReferrer = '/example/README.md'; + spyOnProperty(document, 'referrer').and.returnValue(fakeReferrer); + + vm = createComponent(Component, { + projectUrl: fakeProjectUrl, + }).$mount(); + + expect(vm.goBackUrl).toEqual(fakeReferrer); + }); + + it('renders the Go Back link with the project url when referrer is not present', () => { + fakeReferrer = ''; + spyOnProperty(document, 'referrer').and.returnValue(fakeReferrer); + + vm = createComponent(Component, { + projectUrl: fakeProjectUrl, + }).$mount(); + + expect(vm.goBackUrl).toEqual(fakeProjectUrl); + }); + }); +}); diff --git a/spec/javascripts/ide/components/ide_file_buttons_spec.js b/spec/javascripts/ide/components/ide_file_buttons_spec.js new file mode 100644 index 00000000000..8ac8d1b2acf --- /dev/null +++ b/spec/javascripts/ide/components/ide_file_buttons_spec.js @@ -0,0 +1,61 @@ +import Vue from 'vue'; +import repoFileButtons from '~/ide/components/ide_file_buttons.vue'; +import createVueComponent from '../../helpers/vue_mount_component_helper'; +import { file } from '../helpers'; + +describe('RepoFileButtons', () => { + const activeFile = file(); + let vm; + + function createComponent() { + const RepoFileButtons = Vue.extend(repoFileButtons); + + activeFile.rawPath = 'test'; + activeFile.blamePath = 'test'; + activeFile.commitsPath = 'test'; + + return createVueComponent(RepoFileButtons, { + file: activeFile, + }); + } + + afterEach(() => { + vm.$destroy(); + }); + + it('renders Raw, Blame, History and Permalink', done => { + vm = createComponent(); + + vm.$nextTick(() => { + const raw = vm.$el.querySelector('.raw'); + const blame = vm.$el.querySelector('.blame'); + const history = vm.$el.querySelector('.history'); + + expect(raw.href).toMatch(`/${activeFile.rawPath}`); + expect(raw.getAttribute('data-original-title')).toEqual('Raw'); + expect(blame.href).toMatch(`/${activeFile.blamePath}`); + expect(blame.getAttribute('data-original-title')).toEqual('Blame'); + expect(history.href).toMatch(`/${activeFile.commitsPath}`); + expect(history.getAttribute('data-original-title')).toEqual('History'); + expect(vm.$el.querySelector('.permalink').getAttribute('data-original-title')).toEqual( + 'Permalink', + ); + + done(); + }); + }); + + it('renders Download', done => { + activeFile.binary = true; + vm = createComponent(); + + vm.$nextTick(() => { + const raw = vm.$el.querySelector('.raw'); + + expect(raw.href).toMatch(`/${activeFile.rawPath}`); + expect(raw.getAttribute('data-original-title')).toEqual('Download'); + + done(); + }); + }); +}); diff --git a/spec/javascripts/ide/components/ide_project_tree_spec.js b/spec/javascripts/ide/components/ide_project_tree_spec.js new file mode 100644 index 00000000000..657682cb39c --- /dev/null +++ b/spec/javascripts/ide/components/ide_project_tree_spec.js @@ -0,0 +1,39 @@ +import Vue from 'vue'; +import ProjectTree from '~/ide/components/ide_project_tree.vue'; +import createComponent from 'spec/helpers/vue_mount_component_helper'; + +describe('IDE project tree', () => { + const Component = Vue.extend(ProjectTree); + let vm; + + beforeEach(() => { + vm = createComponent(Component, { + project: { + id: 1, + name: 'test', + web_url: gl.TEST_HOST, + avatar_url: '', + branches: [], + }, + }); + }); + + afterEach(() => { + vm.$destroy(); + }); + + it('renders identicon when projct has no avatar', () => { + expect(vm.$el.querySelector('.identicon')).not.toBeNull(); + }); + + it('renders avatar image if project has avatar', done => { + vm.project.avatar_url = gl.TEST_HOST; + + vm.$nextTick(() => { + expect(vm.$el.querySelector('.identicon')).toBeNull(); + expect(vm.$el.querySelector('img.avatar')).not.toBeNull(); + + done(); + }); + }); +}); diff --git a/spec/javascripts/ide/components/ide_repo_tree_spec.js b/spec/javascripts/ide/components/ide_repo_tree_spec.js new file mode 100644 index 00000000000..e0fbc90ca61 --- /dev/null +++ b/spec/javascripts/ide/components/ide_repo_tree_spec.js @@ -0,0 +1,43 @@ +import Vue from 'vue'; +import ideRepoTree from '~/ide/components/ide_repo_tree.vue'; +import createComponent from '../../helpers/vue_mount_component_helper'; +import { file } from '../helpers'; + +describe('IdeRepoTree', () => { + let vm; + let tree; + + beforeEach(() => { + const IdeRepoTree = Vue.extend(ideRepoTree); + + tree = { + tree: [file()], + loading: false, + }; + + vm = createComponent(IdeRepoTree, { + tree, + }); + }); + + afterEach(() => { + vm.$destroy(); + }); + + it('renders a sidebar', () => { + expect(vm.$el.querySelector('.loading-file')).toBeNull(); + expect(vm.$el.querySelector('.file')).not.toBeNull(); + }); + + it('renders 3 loading files if tree is loading', done => { + tree.loading = true; + + vm.$nextTick(() => { + expect( + vm.$el.querySelectorAll('.multi-file-loading-container').length, + ).toEqual(3); + + done(); + }); + }); +}); diff --git a/spec/javascripts/ide/components/ide_side_bar_spec.js b/spec/javascripts/ide/components/ide_side_bar_spec.js new file mode 100644 index 00000000000..699dae1ce2f --- /dev/null +++ b/spec/javascripts/ide/components/ide_side_bar_spec.js @@ -0,0 +1,42 @@ +import Vue from 'vue'; +import store from '~/ide/stores'; +import ideSidebar from '~/ide/components/ide_side_bar.vue'; +import { createComponentWithStore } from 'spec/helpers/vue_mount_component_helper'; +import { resetStore } from '../helpers'; + +describe('IdeSidebar', () => { + let vm; + + beforeEach(() => { + const Component = Vue.extend(ideSidebar); + + vm = createComponentWithStore(Component, store).$mount(); + }); + + afterEach(() => { + vm.$destroy(); + + resetStore(vm.$store); + }); + + it('renders a sidebar', () => { + expect( + vm.$el.querySelector('.multi-file-commit-panel-inner'), + ).not.toBeNull(); + }); + + it('renders loading icon component', done => { + vm.$store.state.loading = true; + + vm.$nextTick(() => { + expect( + vm.$el.querySelector('.multi-file-loading-container'), + ).not.toBeNull(); + expect( + vm.$el.querySelectorAll('.multi-file-loading-container').length, + ).toBe(3); + + done(); + }); + }); +}); diff --git a/spec/javascripts/ide/components/ide_spec.js b/spec/javascripts/ide/components/ide_spec.js new file mode 100644 index 00000000000..5bd890094cc --- /dev/null +++ b/spec/javascripts/ide/components/ide_spec.js @@ -0,0 +1,41 @@ +import Vue from 'vue'; +import store from '~/ide/stores'; +import ide from '~/ide/components/ide.vue'; +import { createComponentWithStore } from 'spec/helpers/vue_mount_component_helper'; +import { file, resetStore } from '../helpers'; + +describe('ide component', () => { + let vm; + + beforeEach(() => { + const Component = Vue.extend(ide); + + vm = createComponentWithStore(Component, store, { + emptyStateSvgPath: 'svg', + noChangesStateSvgPath: 'svg', + committedStateSvgPath: 'svg', + }).$mount(); + }); + + afterEach(() => { + vm.$destroy(); + + resetStore(vm.$store); + }); + + it('does not render panel right when no files open', () => { + expect(vm.$el.querySelector('.panel-right')).toBeNull(); + }); + + it('renders panel right when files are open', done => { + vm.$store.state.trees['abcproject/mybranch'] = { + tree: [file()], + }; + + Vue.nextTick(() => { + expect(vm.$el.querySelector('.panel-right')).toBeNull(); + + done(); + }); + }); +}); diff --git a/spec/javascripts/ide/components/new_dropdown/index_spec.js b/spec/javascripts/ide/components/new_dropdown/index_spec.js new file mode 100644 index 00000000000..e08abe7d849 --- /dev/null +++ b/spec/javascripts/ide/components/new_dropdown/index_spec.js @@ -0,0 +1,84 @@ +import Vue from 'vue'; +import store from '~/ide/stores'; +import newDropdown from '~/ide/components/new_dropdown/index.vue'; +import { createComponentWithStore } from 'spec/helpers/vue_mount_component_helper'; +import { resetStore } from '../../helpers'; + +describe('new dropdown component', () => { + let vm; + + beforeEach(() => { + const component = Vue.extend(newDropdown); + + vm = createComponentWithStore(component, store, { + branch: 'master', + path: '', + }); + + vm.$store.state.currentProjectId = 'abcproject'; + vm.$store.state.path = ''; + vm.$store.state.trees['abcproject/mybranch'] = { + tree: [], + }; + + vm.$mount(); + }); + + afterEach(() => { + vm.$destroy(); + + resetStore(vm.$store); + }); + + it('renders new file, upload and new directory links', () => { + expect(vm.$el.querySelectorAll('a')[0].textContent.trim()).toBe('New file'); + expect(vm.$el.querySelectorAll('a')[1].textContent.trim()).toBe( + 'Upload file', + ); + expect(vm.$el.querySelectorAll('a')[2].textContent.trim()).toBe( + 'New directory', + ); + }); + + describe('createNewItem', () => { + it('sets modalType to blob when new file is clicked', () => { + vm.$el.querySelectorAll('a')[0].click(); + + expect(vm.modalType).toBe('blob'); + }); + + it('sets modalType to tree when new directory is clicked', () => { + vm.$el.querySelectorAll('a')[2].click(); + + expect(vm.modalType).toBe('tree'); + }); + + it('opens modal when link is clicked', done => { + vm.$el.querySelectorAll('a')[0].click(); + + Vue.nextTick(() => { + expect(vm.$el.querySelector('.modal')).not.toBeNull(); + + done(); + }); + }); + }); + + describe('hideModal', () => { + beforeAll(done => { + vm.openModal = true; + Vue.nextTick(done); + }); + + it('closes modal after toggling', done => { + vm.hideModal(); + + Vue.nextTick() + .then(() => { + expect(vm.$el.querySelector('.modal')).toBeNull(); + }) + .then(done) + .catch(done.fail); + }); + }); +}); diff --git a/spec/javascripts/ide/components/new_dropdown/modal_spec.js b/spec/javascripts/ide/components/new_dropdown/modal_spec.js new file mode 100644 index 00000000000..a6e1e5a0d35 --- /dev/null +++ b/spec/javascripts/ide/components/new_dropdown/modal_spec.js @@ -0,0 +1,82 @@ +import Vue from 'vue'; +import modal from '~/ide/components/new_dropdown/modal.vue'; +import createComponent from 'spec/helpers/vue_mount_component_helper'; + +describe('new file modal component', () => { + const Component = Vue.extend(modal); + let vm; + + afterEach(() => { + vm.$destroy(); + }); + + ['tree', 'blob'].forEach(type => { + describe(type, () => { + beforeEach(() => { + vm = createComponent(Component, { + type, + branchId: 'master', + path: '', + }); + + vm.entryName = 'testing'; + }); + + it(`sets modal title as ${type}`, () => { + const title = type === 'tree' ? 'directory' : 'file'; + + expect(vm.$el.querySelector('.modal-title').textContent.trim()).toBe( + `Create new ${title}`, + ); + }); + + it(`sets button label as ${type}`, () => { + const title = type === 'tree' ? 'directory' : 'file'; + + expect(vm.$el.querySelector('.btn-success').textContent.trim()).toBe( + `Create ${title}`, + ); + }); + + it(`sets form label as ${type}`, () => { + const title = type === 'tree' ? 'Directory' : 'File'; + + expect(vm.$el.querySelector('.label-light').textContent.trim()).toBe( + `${title} name`, + ); + }); + + describe('createEntryInStore', () => { + it('$emits create', () => { + spyOn(vm, '$emit'); + + vm.createEntryInStore(); + + expect(vm.$emit).toHaveBeenCalledWith('create', { + branchId: 'master', + name: 'testing', + type, + }); + }); + }); + }); + }); + + it('focuses field on mount', () => { + document.body.innerHTML += '<div class="js-test"></div>'; + + vm = createComponent( + Component, + { + type: 'tree', + branchId: 'master', + path: '', + }, + '.js-test', + ); + + expect(document.activeElement).toBe(vm.$refs.fieldName); + + vm.$el.remove(); + }); +}); diff --git a/spec/javascripts/ide/components/new_dropdown/upload_spec.js b/spec/javascripts/ide/components/new_dropdown/upload_spec.js new file mode 100644 index 00000000000..2bc5d701601 --- /dev/null +++ b/spec/javascripts/ide/components/new_dropdown/upload_spec.js @@ -0,0 +1,87 @@ +import Vue from 'vue'; +import upload from '~/ide/components/new_dropdown/upload.vue'; +import createComponent from 'spec/helpers/vue_mount_component_helper'; + +describe('new dropdown upload', () => { + let vm; + + beforeEach(() => { + const Component = Vue.extend(upload); + + vm = createComponent(Component, { + branchId: 'master', + path: '', + }); + + vm.entryName = 'testing'; + + spyOn(vm, '$emit'); + }); + + afterEach(() => { + vm.$destroy(); + }); + + describe('readFile', () => { + beforeEach(() => { + spyOn(FileReader.prototype, 'readAsText'); + spyOn(FileReader.prototype, 'readAsDataURL'); + }); + + it('calls readAsText for text files', () => { + const file = { + type: 'text/html', + }; + + vm.readFile(file); + + expect(FileReader.prototype.readAsText).toHaveBeenCalledWith(file); + }); + + it('calls readAsDataURL for non-text files', () => { + const file = { + type: 'images/png', + }; + + vm.readFile(file); + + expect(FileReader.prototype.readAsDataURL).toHaveBeenCalledWith(file); + }); + }); + + describe('createFile', () => { + const target = { + result: 'content', + }; + const binaryTarget = { + result: 'base64,base64content', + }; + const file = { + name: 'file', + }; + + it('creates new file', () => { + vm.createFile(target, file, true); + + expect(vm.$emit).toHaveBeenCalledWith('create', { + name: file.name, + branchId: 'master', + type: 'blob', + content: target.result, + base64: false, + }); + }); + + it('splits content on base64 if binary', () => { + vm.createFile(binaryTarget, file, false); + + expect(vm.$emit).toHaveBeenCalledWith('create', { + name: file.name, + branchId: 'master', + type: 'blob', + content: binaryTarget.result.split('base64,')[1], + base64: true, + }); + }); + }); +}); diff --git a/spec/javascripts/ide/components/repo_commit_section_spec.js b/spec/javascripts/ide/components/repo_commit_section_spec.js new file mode 100644 index 00000000000..768f6e99bf1 --- /dev/null +++ b/spec/javascripts/ide/components/repo_commit_section_spec.js @@ -0,0 +1,264 @@ +import Vue from 'vue'; +import store from '~/ide/stores'; +import service from '~/ide/services'; +import repoCommitSection from '~/ide/components/repo_commit_section.vue'; +import { createComponentWithStore } from 'spec/helpers/vue_mount_component_helper'; +import getSetTimeoutPromise from 'spec/helpers/set_timeout_promise_helper'; +import { file, resetStore } from '../helpers'; + +describe('RepoCommitSection', () => { + let vm; + + function createComponent() { + const Component = Vue.extend(repoCommitSection); + + vm = createComponentWithStore(Component, store, { + noChangesStateSvgPath: 'svg', + committedStateSvgPath: 'commitsvg', + }); + + vm.$store.state.currentProjectId = 'abcproject'; + vm.$store.state.currentBranchId = 'master'; + vm.$store.state.projects.abcproject = { + web_url: '', + branches: { + master: { + workingReference: '1', + }, + }, + }; + + const files = [file('file1'), file('file2')].map(f => + Object.assign(f, { + type: 'blob', + }), + ); + + vm.$store.state.rightPanelCollapsed = false; + vm.$store.state.currentBranch = 'master'; + vm.$store.state.changedFiles = [...files]; + vm.$store.state.changedFiles.forEach(f => + Object.assign(f, { + changed: true, + content: 'changedFile testing', + }), + ); + + vm.$store.state.stagedFiles = [{ ...files[0] }, { ...files[1] }]; + vm.$store.state.stagedFiles.forEach(f => + Object.assign(f, { + changed: true, + content: 'testing', + }), + ); + + vm.$store.state.changedFiles.forEach(f => { + vm.$store.state.entries[f.path] = f; + }); + + return vm.$mount(); + } + + beforeEach(done => { + vm = createComponent(); + + spyOn(service, 'getTreeData').and.returnValue( + Promise.resolve({ + headers: { + 'page-title': 'test', + }, + json: () => + Promise.resolve({ + last_commit_path: 'last_commit_path', + parent_tree_url: 'parent_tree_url', + path: '/', + trees: [{ name: 'tree' }], + blobs: [{ name: 'blob' }], + submodules: [{ name: 'submodule' }], + }), + }), + ); + + Vue.nextTick(done); + }); + + afterEach(() => { + vm.$destroy(); + + resetStore(vm.$store); + }); + + describe('empty Stage', () => { + it('renders no changes text', () => { + resetStore(vm.$store); + const Component = Vue.extend(repoCommitSection); + + vm = createComponentWithStore(Component, store, { + noChangesStateSvgPath: 'nochangessvg', + committedStateSvgPath: 'svg', + }).$mount(); + + expect( + vm.$el.querySelector('.js-empty-state').textContent.trim(), + ).toContain('No changes'); + expect( + vm.$el.querySelector('.js-empty-state img').getAttribute('src'), + ).toBe('nochangessvg'); + }); + }); + + it('renders a commit section', () => { + const changedFileElements = [ + ...vm.$el.querySelectorAll('.multi-file-commit-list li'), + ]; + const submitCommit = vm.$el.querySelector('form .btn'); + const allFiles = vm.$store.state.changedFiles.concat( + vm.$store.state.stagedFiles, + ); + + expect(vm.$el.querySelector('.multi-file-commit-form')).not.toBeNull(); + expect(changedFileElements.length).toEqual(4); + + changedFileElements.forEach((changedFile, i) => { + expect(changedFile.textContent.trim()).toContain(allFiles[i].path); + }); + + expect(submitCommit.disabled).toBeTruthy(); + expect(submitCommit.querySelector('.fa-spinner.fa-spin')).toBeNull(); + }); + + it('adds changed files into staged files', done => { + vm.$el.querySelector('.ide-staged-action-btn').click(); + + Vue.nextTick(() => { + expect( + vm.$el.querySelector('.ide-commit-list-container').textContent, + ).toContain('No changes'); + + done(); + }); + }); + + it('stages a single file', done => { + vm.$el.querySelector('.multi-file-discard-btn .btn').click(); + + Vue.nextTick(() => { + expect( + vm.$el + .querySelector('.ide-commit-list-container') + .querySelectorAll('li').length, + ).toBe(1); + + done(); + }); + }); + + it('discards a single file', done => { + vm.$el.querySelectorAll('.multi-file-discard-btn .btn')[1].click(); + + Vue.nextTick(() => { + expect( + vm.$el.querySelector('.ide-commit-list-container').textContent, + ).not.toContain('file1'); + expect( + vm.$el + .querySelector('.ide-commit-list-container') + .querySelectorAll('li').length, + ).toBe(1); + + done(); + }); + }); + + it('removes all staged files', done => { + vm.$el.querySelectorAll('.ide-staged-action-btn')[1].click(); + + Vue.nextTick(() => { + expect( + vm.$el.querySelectorAll('.ide-commit-list-container')[1].textContent, + ).toContain('No changes'); + + done(); + }); + }); + + it('unstages a single file', done => { + vm.$el + .querySelectorAll('.multi-file-discard-btn')[2] + .querySelector('.btn') + .click(); + + Vue.nextTick(() => { + expect( + vm.$el + .querySelectorAll('.ide-commit-list-container')[1] + .querySelectorAll('li').length, + ).toBe(1); + + done(); + }); + }); + + it('updates commitMessage in store on input', done => { + const textarea = vm.$el.querySelector('textarea'); + + textarea.value = 'testing commit message'; + + textarea.dispatchEvent(new Event('input')); + + getSetTimeoutPromise() + .then(() => { + expect(vm.$store.state.commit.commitMessage).toBe( + 'testing commit message', + ); + }) + .then(done) + .catch(done.fail); + }); + + describe('discard draft button', () => { + it('hidden when commitMessage is empty', () => { + expect( + vm.$el.querySelector('.multi-file-commit-form .btn-default'), + ).toBeNull(); + }); + + it('resets commitMessage when clicking discard button', done => { + vm.$store.state.commit.commitMessage = 'testing commit message'; + + getSetTimeoutPromise() + .then(() => { + vm.$el.querySelector('.multi-file-commit-form .btn-default').click(); + }) + .then(Vue.nextTick) + .then(() => { + expect(vm.$store.state.commit.commitMessage).not.toBe( + 'testing commit message', + ); + }) + .then(done) + .catch(done.fail); + }); + }); + + describe('when submitting', () => { + beforeEach(() => { + spyOn(vm, 'commitChanges'); + }); + + it('calls commitChanges', done => { + vm.$store.state.commit.commitMessage = 'testing commit message'; + + getSetTimeoutPromise() + .then(() => { + vm.$el.querySelector('.multi-file-commit-form .btn-success').click(); + }) + .then(Vue.nextTick) + .then(() => { + expect(vm.commitChanges).toHaveBeenCalled(); + }) + .then(done) + .catch(done.fail); + }); + }); +}); diff --git a/spec/javascripts/ide/components/repo_editor_spec.js b/spec/javascripts/ide/components/repo_editor_spec.js new file mode 100644 index 00000000000..b06a6c62a1c --- /dev/null +++ b/spec/javascripts/ide/components/repo_editor_spec.js @@ -0,0 +1,298 @@ +import Vue from 'vue'; +import MockAdapter from 'axios-mock-adapter'; +import axios from '~/lib/utils/axios_utils'; +import store from '~/ide/stores'; +import repoEditor from '~/ide/components/repo_editor.vue'; +import monacoLoader from '~/ide/monaco_loader'; +import Editor from '~/ide/lib/editor'; +import { createComponentWithStore } from '../../helpers/vue_mount_component_helper'; +import setTimeoutPromise from '../../helpers/set_timeout_promise_helper'; +import { file, resetStore } from '../helpers'; + +describe('RepoEditor', () => { + let vm; + + beforeEach(done => { + const f = file(); + const RepoEditor = Vue.extend(repoEditor); + + vm = createComponentWithStore(RepoEditor, store, { + file: f, + }); + + f.active = true; + f.tempFile = true; + vm.$store.state.openFiles.push(f); + vm.$store.state.entries[f.path] = f; + vm.monaco = true; + + vm.$mount(); + + monacoLoader(['vs/editor/editor.main'], () => { + setTimeout(done, 0); + }); + }); + + afterEach(() => { + vm.$destroy(); + + resetStore(vm.$store); + + Editor.editorInstance.dispose(); + }); + + it('renders an ide container', done => { + Vue.nextTick(() => { + expect(vm.shouldHideEditor).toBeFalsy(); + + done(); + }); + }); + + it('renders only an edit tab', done => { + Vue.nextTick(() => { + const tabs = vm.$el.querySelectorAll('.ide-mode-tabs .nav-links li'); + expect(tabs.length).toBe(1); + expect(tabs[0].textContent.trim()).toBe('Edit'); + + done(); + }); + }); + + describe('when file is markdown', () => { + beforeEach(done => { + vm.file.previewMode = { + id: 'markdown', + previewTitle: 'Preview Markdown', + }; + + vm.$nextTick(done); + }); + + it('renders an Edit and a Preview Tab', done => { + Vue.nextTick(() => { + const tabs = vm.$el.querySelectorAll('.ide-mode-tabs .nav-links li'); + expect(tabs.length).toBe(2); + expect(tabs[0].textContent.trim()).toBe('Edit'); + expect(tabs[1].textContent.trim()).toBe('Preview Markdown'); + + done(); + }); + }); + }); + + describe('when file is markdown and viewer mode is review', () => { + let mock; + + beforeEach(done => { + mock = new MockAdapter(axios); + + vm.file.projectId = 'namespace/project'; + vm.file.previewMode = { + id: 'markdown', + previewTitle: 'Preview Markdown', + }; + vm.file.content = 'testing 123'; + vm.$store.state.viewer = 'diff'; + + mock.onPost(/(.*)\/preview_markdown/).reply(200, { + body: '<p>testing 123</p>', + }); + + vm.$nextTick(done); + }); + + afterEach(() => { + mock.restore(); + }); + + it('renders an Edit and a Preview Tab', done => { + Vue.nextTick(() => { + const tabs = vm.$el.querySelectorAll('.ide-mode-tabs .nav-links li'); + expect(tabs.length).toBe(2); + expect(tabs[0].textContent.trim()).toBe('Review'); + expect(tabs[1].textContent.trim()).toBe('Preview Markdown'); + + done(); + }); + }); + + it('renders markdown for tempFile', done => { + vm.file.tempFile = true; + vm.file.path = `${vm.file.path}.md`; + vm.$store.state.entries[vm.file.path] = vm.file; + + vm + .$nextTick() + .then(() => { + vm.$el.querySelectorAll('.ide-mode-tabs .nav-links a')[1].click(); + }) + .then(setTimeoutPromise) + .then(() => { + expect(vm.$el.querySelector('.preview-container').innerHTML).toContain( + '<p>testing 123</p>', + ); + }) + .then(done) + .catch(done.fail); + }); + }); + + describe('when open file is binary and not raw', () => { + beforeEach(done => { + vm.file.binary = true; + + vm.$nextTick(done); + }); + + it('does not render the IDE', () => { + expect(vm.shouldHideEditor).toBeTruthy(); + }); + }); + + describe('createEditorInstance', () => { + it('calls createInstance when viewer is editor', done => { + spyOn(vm.editor, 'createInstance'); + + vm.createEditorInstance(); + + vm.$nextTick(() => { + expect(vm.editor.createInstance).toHaveBeenCalled(); + + done(); + }); + }); + + it('calls createDiffInstance when viewer is diff', done => { + vm.$store.state.viewer = 'diff'; + + spyOn(vm.editor, 'createDiffInstance'); + + vm.createEditorInstance(); + + vm.$nextTick(() => { + expect(vm.editor.createDiffInstance).toHaveBeenCalled(); + + done(); + }); + }); + + it('calls createDiffInstance when viewer is a merge request diff', done => { + vm.$store.state.viewer = 'mrdiff'; + + spyOn(vm.editor, 'createDiffInstance'); + + vm.createEditorInstance(); + + vm.$nextTick(() => { + expect(vm.editor.createDiffInstance).toHaveBeenCalled(); + + done(); + }); + }); + }); + + describe('setupEditor', () => { + it('creates new model', () => { + spyOn(vm.editor, 'createModel').and.callThrough(); + + Editor.editorInstance.modelManager.dispose(); + + vm.setupEditor(); + + expect(vm.editor.createModel).toHaveBeenCalledWith(vm.file, null); + expect(vm.model).not.toBeNull(); + }); + + it('attaches model to editor', () => { + spyOn(vm.editor, 'attachModel').and.callThrough(); + + Editor.editorInstance.modelManager.dispose(); + + vm.setupEditor(); + + expect(vm.editor.attachModel).toHaveBeenCalledWith(vm.model); + }); + + it('adds callback methods', () => { + spyOn(vm.editor, 'onPositionChange').and.callThrough(); + + Editor.editorInstance.modelManager.dispose(); + + vm.setupEditor(); + + expect(vm.editor.onPositionChange).toHaveBeenCalled(); + expect(vm.model.events.size).toBe(2); + }); + + it('updates state when model content changed', done => { + vm.model.setValue('testing 123'); + + setTimeout(() => { + expect(vm.file.content).toBe('testing 123'); + + done(); + }); + }); + + it('sets head model as staged file', () => { + spyOn(vm.editor, 'createModel').and.callThrough(); + + Editor.editorInstance.modelManager.dispose(); + + vm.$store.state.stagedFiles.push({ ...vm.file, key: 'staged' }); + vm.file.staged = true; + vm.file.key = `unstaged-${vm.file.key}`; + + vm.setupEditor(); + + expect(vm.editor.createModel).toHaveBeenCalledWith(vm.file, vm.$store.state.stagedFiles[0]); + }); + }); + + describe('editor updateDimensions', () => { + beforeEach(() => { + spyOn(vm.editor, 'updateDimensions').and.callThrough(); + spyOn(vm.editor, 'updateDiffView'); + }); + + it('calls updateDimensions when rightPanelCollapsed is changed', done => { + vm.$store.state.rightPanelCollapsed = true; + + vm.$nextTick(() => { + expect(vm.editor.updateDimensions).toHaveBeenCalled(); + expect(vm.editor.updateDiffView).toHaveBeenCalled(); + + done(); + }); + }); + + it('calls updateDimensions when panelResizing is false', done => { + vm.$store.state.panelResizing = true; + + vm + .$nextTick() + .then(() => { + vm.$store.state.panelResizing = false; + }) + .then(vm.$nextTick) + .then(() => { + expect(vm.editor.updateDimensions).toHaveBeenCalled(); + expect(vm.editor.updateDiffView).toHaveBeenCalled(); + }) + .then(done) + .catch(done.fail); + }); + + it('does not call updateDimensions when panelResizing is true', done => { + vm.$store.state.panelResizing = true; + + vm.$nextTick(() => { + expect(vm.editor.updateDimensions).not.toHaveBeenCalled(); + expect(vm.editor.updateDiffView).not.toHaveBeenCalled(); + + done(); + }); + }); + }); +}); diff --git a/spec/javascripts/ide/components/repo_file_spec.js b/spec/javascripts/ide/components/repo_file_spec.js new file mode 100644 index 00000000000..ff391cb4351 --- /dev/null +++ b/spec/javascripts/ide/components/repo_file_spec.js @@ -0,0 +1,80 @@ +import Vue from 'vue'; +import store from '~/ide/stores'; +import repoFile from '~/ide/components/repo_file.vue'; +import router from '~/ide/ide_router'; +import { createComponentWithStore } from '../../helpers/vue_mount_component_helper'; +import { file } from '../helpers'; + +describe('RepoFile', () => { + let vm; + + function createComponent(propsData) { + const RepoFile = Vue.extend(repoFile); + + vm = createComponentWithStore(RepoFile, store, propsData); + + vm.$mount(); + } + + afterEach(() => { + vm.$destroy(); + }); + + it('renders link, icon and name', () => { + createComponent({ + file: file('t4'), + level: 0, + }); + + const name = vm.$el.querySelector('.ide-file-name'); + + expect(name.href).toMatch(''); + expect(name.textContent.trim()).toEqual(vm.file.name); + }); + + it('fires clickFile when the link is clicked', done => { + spyOn(router, 'push'); + createComponent({ + file: file('t3'), + level: 0, + }); + + vm.$el.querySelector('.file-name').click(); + + setTimeout(() => { + expect(router.push).toHaveBeenCalledWith(`/project${vm.file.url}`); + + done(); + }); + }); + + describe('locked file', () => { + let f; + + beforeEach(() => { + f = file('locked file'); + f.file_lock = { + user: { + name: 'testuser', + updated_at: new Date(), + }, + }; + + createComponent({ + file: f, + level: 0, + }); + }); + + it('renders lock icon', () => { + expect(vm.$el.querySelector('.file-status-icon')).not.toBeNull(); + }); + + it('renders a tooltip', () => { + expect( + vm.$el.querySelector('.ide-file-name span:nth-child(2)').dataset + .originalTitle, + ).toContain('Locked by testuser'); + }); + }); +}); diff --git a/spec/javascripts/ide/components/repo_loading_file_spec.js b/spec/javascripts/ide/components/repo_loading_file_spec.js new file mode 100644 index 00000000000..7c20b8302f9 --- /dev/null +++ b/spec/javascripts/ide/components/repo_loading_file_spec.js @@ -0,0 +1,63 @@ +import Vue from 'vue'; +import store from '~/ide/stores'; +import repoLoadingFile from '~/ide/components/repo_loading_file.vue'; +import { resetStore } from '../helpers'; + +describe('RepoLoadingFile', () => { + let vm; + + function createComponent() { + const RepoLoadingFile = Vue.extend(repoLoadingFile); + + return new RepoLoadingFile({ + store, + }).$mount(); + } + + function assertLines(lines) { + lines.forEach((line, n) => { + const index = n + 1; + expect(line.classList.contains(`skeleton-line-${index}`)).toBeTruthy(); + }); + } + + function assertColumns(columns) { + columns.forEach(column => { + const container = column.querySelector('.animation-container'); + const lines = [...container.querySelectorAll(':scope > div')]; + + expect(container).toBeTruthy(); + expect(lines.length).toEqual(3); + assertLines(lines); + }); + } + + afterEach(() => { + vm.$destroy(); + + resetStore(vm.$store); + }); + + it('renders 3 columns of animated LoC', () => { + vm = createComponent(); + const columns = [...vm.$el.querySelectorAll('td')]; + + expect(columns.length).toEqual(3); + assertColumns(columns); + }); + + it('renders 1 column of animated LoC if isMini', done => { + vm = createComponent(); + vm.$store.state.leftPanelCollapsed = true; + vm.$store.state.openFiles.push('test'); + + vm.$nextTick(() => { + const columns = [...vm.$el.querySelectorAll('td')]; + + expect(columns.length).toEqual(1); + assertColumns(columns); + + done(); + }); + }); +}); diff --git a/spec/javascripts/ide/components/repo_tab_spec.js b/spec/javascripts/ide/components/repo_tab_spec.js new file mode 100644 index 00000000000..8cabc6e8935 --- /dev/null +++ b/spec/javascripts/ide/components/repo_tab_spec.js @@ -0,0 +1,165 @@ +import Vue from 'vue'; +import store from '~/ide/stores'; +import repoTab from '~/ide/components/repo_tab.vue'; +import router from '~/ide/ide_router'; +import { file, resetStore } from '../helpers'; + +describe('RepoTab', () => { + let vm; + + function createComponent(propsData) { + const RepoTab = Vue.extend(repoTab); + + return new RepoTab({ + store, + propsData, + }).$mount(); + } + + beforeEach(() => { + spyOn(router, 'push'); + }); + + afterEach(() => { + vm.$destroy(); + + resetStore(vm.$store); + }); + + it('renders a close link and a name link', () => { + vm = createComponent({ + tab: file(), + }); + vm.$store.state.openFiles.push(vm.tab); + const close = vm.$el.querySelector('.multi-file-tab-close'); + const name = vm.$el.querySelector(`[title="${vm.tab.url}"]`); + + expect(close.innerHTML).toContain('#close'); + expect(name.textContent.trim()).toEqual(vm.tab.name); + }); + + it('fires clickFile when the link is clicked', () => { + vm = createComponent({ + tab: file(), + }); + + spyOn(vm, 'clickFile'); + + vm.$el.click(); + + expect(vm.clickFile).toHaveBeenCalledWith(vm.tab); + }); + + it('calls closeFile when clicking close button', () => { + vm = createComponent({ + tab: file(), + }); + + spyOn(vm, 'closeFile'); + + vm.$el.querySelector('.multi-file-tab-close').click(); + + expect(vm.closeFile).toHaveBeenCalledWith(vm.tab); + }); + + it('changes icon on hover', done => { + const tab = file(); + tab.changed = true; + vm = createComponent({ + tab, + }); + + vm.$el.dispatchEvent(new Event('mouseover')); + + Vue.nextTick() + .then(() => { + expect(vm.$el.querySelector('.multi-file-modified')).toBeNull(); + + vm.$el.dispatchEvent(new Event('mouseout')); + }) + .then(Vue.nextTick) + .then(() => { + expect(vm.$el.querySelector('.multi-file-modified')).not.toBeNull(); + + done(); + }) + .catch(done.fail); + }); + + describe('locked file', () => { + let f; + + beforeEach(() => { + f = file('locked file'); + f.file_lock = { + user: { + name: 'testuser', + updated_at: new Date(), + }, + }; + + vm = createComponent({ + tab: f, + }); + }); + + afterEach(() => { + vm.$destroy(); + }); + + it('renders lock icon', () => { + expect(vm.$el.querySelector('.file-status-icon')).not.toBeNull(); + }); + + it('renders a tooltip', () => { + expect( + vm.$el.querySelector('span:nth-child(2)').dataset.originalTitle, + ).toContain('Locked by testuser'); + }); + }); + + describe('methods', () => { + describe('closeTab', () => { + it('closes tab if file has changed', done => { + const tab = file(); + tab.changed = true; + tab.opened = true; + vm = createComponent({ + tab, + }); + vm.$store.state.openFiles.push(tab); + vm.$store.state.changedFiles.push(tab); + vm.$store.state.entries[tab.path] = tab; + vm.$store.dispatch('setFileActive', tab.path); + + vm.$el.querySelector('.multi-file-tab-close').click(); + + vm.$nextTick(() => { + expect(tab.opened).toBeFalsy(); + expect(vm.$store.state.changedFiles.length).toBe(1); + + done(); + }); + }); + + it('closes tab when clicking close btn', done => { + const tab = file('lose'); + tab.opened = true; + vm = createComponent({ + tab, + }); + vm.$store.state.openFiles.push(tab); + vm.$store.state.entries[tab.path] = tab; + vm.$store.dispatch('setFileActive', tab.path); + + vm.$el.querySelector('.multi-file-tab-close').click(); + + vm.$nextTick(() => { + expect(tab.opened).toBeFalsy(); + + done(); + }); + }); + }); + }); +}); diff --git a/spec/javascripts/ide/components/repo_tabs_spec.js b/spec/javascripts/ide/components/repo_tabs_spec.js new file mode 100644 index 00000000000..cb785ba2cd3 --- /dev/null +++ b/spec/javascripts/ide/components/repo_tabs_spec.js @@ -0,0 +1,85 @@ +import Vue from 'vue'; +import repoTabs from '~/ide/components/repo_tabs.vue'; +import createComponent from '../../helpers/vue_mount_component_helper'; +import { file } from '../helpers'; + +describe('RepoTabs', () => { + const openedFiles = [file('open1'), file('open2')]; + const RepoTabs = Vue.extend(repoTabs); + let vm; + + afterEach(() => { + vm.$destroy(); + }); + + it('renders a list of tabs', done => { + vm = createComponent(RepoTabs, { + files: openedFiles, + viewer: 'editor', + hasChanges: false, + activeFile: file('activeFile'), + hasMergeRequest: false, + }); + openedFiles[0].active = true; + + vm.$nextTick(() => { + const tabs = [...vm.$el.querySelectorAll('.multi-file-tab')]; + + expect(tabs.length).toEqual(2); + expect(tabs[0].classList.contains('active')).toEqual(true); + expect(tabs[1].classList.contains('active')).toEqual(false); + + done(); + }); + }); + + describe('updated', () => { + it('sets showShadow as true when scroll width is larger than width', done => { + const el = document.createElement('div'); + el.innerHTML = '<div id="test-app"></div>'; + document.body.appendChild(el); + + const style = document.createElement('style'); + style.innerText = ` + .multi-file-tabs { + width: 100px; + } + + .multi-file-tabs .list-unstyled { + display: flex; + overflow-x: auto; + } + `; + document.head.appendChild(style); + + vm = createComponent( + RepoTabs, + { + files: [], + viewer: 'editor', + hasChanges: false, + activeFile: file('activeFile'), + hasMergeRequest: false, + }, + '#test-app', + ); + + vm + .$nextTick() + .then(() => { + expect(vm.showShadow).toEqual(false); + + vm.files = openedFiles; + }) + .then(vm.$nextTick) + .then(() => { + expect(vm.showShadow).toEqual(true); + + style.remove(); + el.remove(); + }) + .then(done) + .catch(done.fail); + }); + }); +}); diff --git a/spec/javascripts/ide/helpers.js b/spec/javascripts/ide/helpers.js new file mode 100644 index 00000000000..98db6defc7a --- /dev/null +++ b/spec/javascripts/ide/helpers.js @@ -0,0 +1,22 @@ +import { decorateData } from '~/ide/stores/utils'; +import state from '~/ide/stores/state'; +import commitState from '~/ide/stores/modules/commit/state'; + +export const resetStore = store => { + const newState = { + ...state(), + commit: commitState(), + }; + store.replaceState(newState); +}; + +export const file = (name = 'name', id = name, type = '') => + decorateData({ + id, + type, + icon: 'icon', + url: 'url', + name, + path: name, + lastCommit: {}, + }); diff --git a/spec/javascripts/ide/lib/common/disposable_spec.js b/spec/javascripts/ide/lib/common/disposable_spec.js new file mode 100644 index 00000000000..af12ca15369 --- /dev/null +++ b/spec/javascripts/ide/lib/common/disposable_spec.js @@ -0,0 +1,44 @@ +import Disposable from '~/ide/lib/common/disposable'; + +describe('Multi-file editor library disposable class', () => { + let instance; + let disposableClass; + + beforeEach(() => { + instance = new Disposable(); + + disposableClass = { + dispose: jasmine.createSpy('dispose'), + }; + }); + + afterEach(() => { + instance.dispose(); + }); + + describe('add', () => { + it('adds disposable classes', () => { + instance.add(disposableClass); + + expect(instance.disposers.size).toBe(1); + }); + }); + + describe('dispose', () => { + beforeEach(() => { + instance.add(disposableClass); + }); + + it('calls dispose on all cached disposers', () => { + instance.dispose(); + + expect(disposableClass.dispose).toHaveBeenCalled(); + }); + + it('clears cached disposers', () => { + instance.dispose(); + + expect(instance.disposers.size).toBe(0); + }); + }); +}); diff --git a/spec/javascripts/ide/lib/common/model_manager_spec.js b/spec/javascripts/ide/lib/common/model_manager_spec.js new file mode 100644 index 00000000000..c00d590c580 --- /dev/null +++ b/spec/javascripts/ide/lib/common/model_manager_spec.js @@ -0,0 +1,132 @@ +/* global monaco */ +import eventHub from '~/ide/eventhub'; +import monacoLoader from '~/ide/monaco_loader'; +import ModelManager from '~/ide/lib/common/model_manager'; +import { file } from '../../helpers'; + +describe('Multi-file editor library model manager', () => { + let instance; + + beforeEach(done => { + monacoLoader(['vs/editor/editor.main'], () => { + instance = new ModelManager(monaco); + + done(); + }); + }); + + afterEach(() => { + instance.dispose(); + }); + + describe('addModel', () => { + it('caches model', () => { + instance.addModel(file()); + + expect(instance.models.size).toBe(1); + }); + + it('caches model by file path', () => { + const f = file('path-name'); + instance.addModel(f); + + expect(instance.models.keys().next().value).toBe(f.key); + }); + + it('adds model into disposable', () => { + spyOn(instance.disposable, 'add').and.callThrough(); + + instance.addModel(file()); + + expect(instance.disposable.add).toHaveBeenCalled(); + }); + + it('returns cached model', () => { + spyOn(instance.models, 'get').and.callThrough(); + + instance.addModel(file()); + instance.addModel(file()); + + expect(instance.models.get).toHaveBeenCalled(); + }); + + it('adds eventHub listener', () => { + const f = file(); + spyOn(eventHub, '$on').and.callThrough(); + + instance.addModel(f); + + expect(eventHub.$on).toHaveBeenCalledWith( + `editor.update.model.dispose.${f.key}`, + jasmine.anything(), + ); + }); + }); + + describe('hasCachedModel', () => { + it('returns false when no models exist', () => { + expect(instance.hasCachedModel('path')).toBeFalsy(); + }); + + it('returns true when model exists', () => { + const f = file('path-name'); + + instance.addModel(f); + + expect(instance.hasCachedModel(f.key)).toBeTruthy(); + }); + }); + + describe('getModel', () => { + it('returns cached model', () => { + instance.addModel(file('path-name')); + + expect(instance.getModel('path-name')).not.toBeNull(); + }); + }); + + describe('removeCachedModel', () => { + let f; + + beforeEach(() => { + f = file(); + + instance.addModel(f); + }); + + it('clears cached model', () => { + instance.removeCachedModel(f); + + expect(instance.models.size).toBe(0); + }); + + it('removes eventHub listener', () => { + spyOn(eventHub, '$off').and.callThrough(); + + instance.removeCachedModel(f); + + expect(eventHub.$off).toHaveBeenCalledWith( + `editor.update.model.dispose.${f.key}`, + jasmine.anything(), + ); + }); + }); + + describe('dispose', () => { + it('clears cached models', () => { + instance.addModel(file()); + + instance.dispose(); + + expect(instance.models.size).toBe(0); + }); + + it('calls disposable dispose', () => { + spyOn(instance.disposable, 'dispose').and.callThrough(); + + instance.dispose(); + + expect(instance.disposable.dispose).toHaveBeenCalled(); + }); + }); +}); diff --git a/spec/javascripts/ide/lib/common/model_spec.js b/spec/javascripts/ide/lib/common/model_spec.js new file mode 100644 index 00000000000..7a6c22b6d27 --- /dev/null +++ b/spec/javascripts/ide/lib/common/model_spec.js @@ -0,0 +1,139 @@ +/* global monaco */ +import eventHub from '~/ide/eventhub'; +import monacoLoader from '~/ide/monaco_loader'; +import Model from '~/ide/lib/common/model'; +import { file } from '../../helpers'; + +describe('Multi-file editor library model', () => { + let model; + + beforeEach(done => { + spyOn(eventHub, '$on').and.callThrough(); + + monacoLoader(['vs/editor/editor.main'], () => { + const f = file('path'); + f.mrChange = { diff: 'ABC' }; + f.baseRaw = 'test'; + model = new Model(monaco, f); + + done(); + }); + }); + + afterEach(() => { + model.dispose(); + }); + + it('creates original model & base model & new model', () => { + expect(model.originalModel).not.toBeNull(); + expect(model.model).not.toBeNull(); + expect(model.baseModel).not.toBeNull(); + }); + + it('creates model with head file to compare against', () => { + const f = file('path'); + model.dispose(); + + model = new Model(monaco, f, { + ...f, + content: '123 testing', + }); + + expect(model.head).not.toBeNull(); + expect(model.getOriginalModel().getValue()).toBe('123 testing'); + }); + + it('adds eventHub listener', () => { + expect(eventHub.$on).toHaveBeenCalledWith( + `editor.update.model.dispose.${model.file.key}`, + jasmine.anything(), + ); + }); + + describe('path', () => { + it('returns file path', () => { + expect(model.path).toBe(model.file.key); + }); + }); + + describe('getModel', () => { + it('returns model', () => { + expect(model.getModel()).toBe(model.model); + }); + }); + + describe('getOriginalModel', () => { + it('returns original model', () => { + expect(model.getOriginalModel()).toBe(model.originalModel); + }); + }); + + describe('getBaseModel', () => { + it('returns base model', () => { + expect(model.getBaseModel()).toBe(model.baseModel); + }); + }); + + describe('setValue', () => { + it('updates models value', () => { + model.setValue('testing 123'); + + expect(model.getModel().getValue()).toBe('testing 123'); + }); + }); + + describe('onChange', () => { + it('calls callback on change', done => { + const spy = jasmine.createSpy(); + model.onChange(spy); + + model.getModel().setValue('123'); + + setTimeout(() => { + expect(spy).toHaveBeenCalledWith(model, jasmine.anything()); + done(); + }); + }); + }); + + describe('dispose', () => { + it('calls disposable dispose', () => { + spyOn(model.disposable, 'dispose').and.callThrough(); + + model.dispose(); + + expect(model.disposable.dispose).toHaveBeenCalled(); + }); + + it('clears events', () => { + model.onChange(() => {}); + + expect(model.events.size).toBe(1); + + model.dispose(); + + expect(model.events.size).toBe(0); + }); + + it('removes eventHub listener', () => { + spyOn(eventHub, '$off').and.callThrough(); + + model.dispose(); + + expect(eventHub.$off).toHaveBeenCalledWith( + `editor.update.model.dispose.${model.file.key}`, + jasmine.anything(), + ); + }); + + it('calls onDispose callback', () => { + const disposeSpy = jasmine.createSpy(); + + model.onDispose(disposeSpy); + + model.dispose(); + + expect(disposeSpy).toHaveBeenCalled(); + }); + }); +}); diff --git a/spec/javascripts/ide/lib/decorations/controller_spec.js b/spec/javascripts/ide/lib/decorations/controller_spec.js new file mode 100644 index 00000000000..e1c4ca570b6 --- /dev/null +++ b/spec/javascripts/ide/lib/decorations/controller_spec.js @@ -0,0 +1,149 @@ +/* global monaco */ +import monacoLoader from '~/ide/monaco_loader'; +import editor from '~/ide/lib/editor'; +import DecorationsController from '~/ide/lib/decorations/controller'; +import Model from '~/ide/lib/common/model'; +import { file } from '../../helpers'; + +describe('Multi-file editor library decorations controller', () => { + let editorInstance; + let controller; + let model; + + beforeEach(done => { + monacoLoader(['vs/editor/editor.main'], () => { + editorInstance = editor.create(monaco); + editorInstance.createInstance(document.createElement('div')); + + controller = new DecorationsController(editorInstance); + model = new Model(monaco, file('path')); + + done(); + }); + }); + + afterEach(() => { + model.dispose(); + editorInstance.dispose(); + controller.dispose(); + }); + + describe('getAllDecorationsForModel', () => { + it('returns empty array when no decorations exist for model', () => { + const decorations = controller.getAllDecorationsForModel(model); + + expect(decorations).toEqual([]); + }); + + it('returns decorations by model URL', () => { + controller.addDecorations(model, 'key', [{ decoration: 'decorationValue' }]); + + const decorations = controller.getAllDecorationsForModel(model); + + expect(decorations[0]).toEqual({ decoration: 'decorationValue' }); + }); + }); + + describe('addDecorations', () => { + it('caches decorations in a new map', () => { + controller.addDecorations(model, 'key', [{ decoration: 'decorationValue' }]); + + expect(controller.decorations.size).toBe(1); + }); + + it('does not create new cache model', () => { + controller.addDecorations(model, 'key', [{ decoration: 'decorationValue' }]); + controller.addDecorations(model, 'key', [{ decoration: 'decorationValue2' }]); + + expect(controller.decorations.size).toBe(1); + }); + + it('caches decorations by model URL', () => { + controller.addDecorations(model, 'key', [{ decoration: 'decorationValue' }]); + + expect(controller.decorations.size).toBe(1); + expect(controller.decorations.keys().next().value).toBe('path--path'); + }); + + it('calls decorate method', () => { + spyOn(controller, 'decorate'); + + controller.addDecorations(model, 'key', [{ decoration: 'decorationValue' }]); + + expect(controller.decorate).toHaveBeenCalled(); + }); + }); + + describe('decorate', () => { + it('sets decorations on editor instance', () => { + spyOn(controller.editor.instance, 'deltaDecorations'); + + controller.decorate(model); + + expect(controller.editor.instance.deltaDecorations).toHaveBeenCalledWith([], []); + }); + + it('caches decorations', () => { + spyOn(controller.editor.instance, 'deltaDecorations').and.returnValue([]); + + controller.decorate(model); + + expect(controller.editorDecorations.size).toBe(1); + }); + + it('caches decorations by model URL', () => { + spyOn(controller.editor.instance, 'deltaDecorations').and.returnValue([]); + + controller.decorate(model); + + expect(controller.editorDecorations.keys().next().value).toBe('path--path'); + }); + }); + + describe('dispose', () => { + it('clears cached decorations', () => { + controller.addDecorations(model, 'key', [{ decoration: 'decorationValue' }]); + + controller.dispose(); + + expect(controller.decorations.size).toBe(0); + }); + + it('clears cached editorDecorations', () => { + controller.addDecorations(model, 'key', [{ decoration: 'decorationValue' }]); + + controller.dispose(); + + expect(controller.editorDecorations.size).toBe(0); + }); + }); + + describe('hasDecorations', () => { + it('returns true when decorations are cached', () => { + controller.addDecorations(model, 'key', [{ decoration: 'decorationValue' }]); + + expect(controller.hasDecorations(model)).toBe(true); + }); + + it('returns false when no model decorations exist', () => { + expect(controller.hasDecorations(model)).toBe(false); + }); + }); + + describe('removeDecorations', () => { + beforeEach(() => { + controller.addDecorations(model, 'key', [{ decoration: 'decorationValue' }]); + controller.decorate(model); + }); + + it('removes cached decorations', () => { + expect(controller.decorations.size).not.toBe(0); + expect(controller.editorDecorations.size).not.toBe(0); + + controller.removeDecorations(model); + + expect(controller.decorations.size).toBe(0); + expect(controller.editorDecorations.size).toBe(0); + }); + }); +}); diff --git a/spec/javascripts/ide/lib/diff/controller_spec.js b/spec/javascripts/ide/lib/diff/controller_spec.js new file mode 100644 index 00000000000..fd8ab3b4f1d --- /dev/null +++ b/spec/javascripts/ide/lib/diff/controller_spec.js @@ -0,0 +1,220 @@ +/* global monaco */ +import monacoLoader from '~/ide/monaco_loader'; +import editor from '~/ide/lib/editor'; +import ModelManager from '~/ide/lib/common/model_manager'; +import DecorationsController from '~/ide/lib/decorations/controller'; +import DirtyDiffController, { getDiffChangeType, getDecorator } from '~/ide/lib/diff/controller'; +import { computeDiff } from '~/ide/lib/diff/diff'; +import { file } from '../../helpers'; + +describe('Multi-file editor library dirty diff controller', () => { + let editorInstance; + let controller; + let modelManager; + let decorationsController; + let model; + + beforeEach(done => { + monacoLoader(['vs/editor/editor.main'], () => { + editorInstance = editor.create(monaco); + editorInstance.createInstance(document.createElement('div')); + + modelManager = new ModelManager(monaco); + decorationsController = new DecorationsController(editorInstance); + + model = modelManager.addModel(file('path')); + + controller = new DirtyDiffController(modelManager, decorationsController); + + done(); + }); + }); + + afterEach(() => { + controller.dispose(); + model.dispose(); + decorationsController.dispose(); + editorInstance.dispose(); + }); + + describe('getDiffChangeType', () => { + ['added', 'removed', 'modified'].forEach(type => { + it(`returns ${type}`, () => { + const change = { + [type]: true, + }; + + expect(getDiffChangeType(change)).toBe(type); + }); + }); + }); + + describe('getDecorator', () => { + ['added', 'removed', 'modified'].forEach(type => { + it(`returns with linesDecorationsClassName for ${type}`, () => { + const change = { + [type]: true, + }; + + expect(getDecorator(change).options.linesDecorationsClassName).toBe( + `dirty-diff dirty-diff-${type}`, + ); + }); + + it('returns with line numbers', () => { + const change = { + lineNumber: 1, + endLineNumber: 2, + [type]: true, + }; + + const range = getDecorator(change).range; + + expect(range.startLineNumber).toBe(1); + expect(range.endLineNumber).toBe(2); + expect(range.startColumn).toBe(1); + expect(range.endColumn).toBe(1); + }); + }); + }); + + describe('attachModel', () => { + it('adds change event callback', () => { + spyOn(model, 'onChange'); + + controller.attachModel(model); + + expect(model.onChange).toHaveBeenCalled(); + }); + + it('adds dispose event callback', () => { + spyOn(model, 'onDispose'); + + controller.attachModel(model); + + expect(model.onDispose).toHaveBeenCalled(); + }); + + it('calls throttledComputeDiff on change', () => { + spyOn(controller, 'throttledComputeDiff'); + + controller.attachModel(model); + + model.getModel().setValue('123'); + + expect(controller.throttledComputeDiff).toHaveBeenCalled(); + }); + + it('caches model', () => { + controller.attachModel(model); + + expect(controller.models.has(model.url)).toBe(true); + }); + }); + + describe('computeDiff', () => { + it('posts to worker', () => { + spyOn(controller.dirtyDiffWorker, 'postMessage'); + + controller.computeDiff(model); + + expect(controller.dirtyDiffWorker.postMessage).toHaveBeenCalledWith({ + path: model.path, + originalContent: '', + newContent: '', + }); + }); + }); + + describe('reDecorate', () => { + it('calls computeDiff when no decorations are cached', () => { + spyOn(controller, 'computeDiff'); + + controller.reDecorate(model); + + expect(controller.computeDiff).toHaveBeenCalledWith(model); + }); + + it('calls decorate when decorations are cached', () => { + spyOn(controller.decorationsController, 'decorate'); + + controller.decorationsController.decorations.set(model.url, 'test'); + + controller.reDecorate(model); + + expect(controller.decorationsController.decorate).toHaveBeenCalledWith(model); + }); + }); + + describe('decorate', () => { + it('adds decorations into decorations controller', () => { + spyOn(controller.decorationsController, 'addDecorations'); + + controller.decorate({ data: { changes: [], path: model.path } }); + + expect(controller.decorationsController.addDecorations).toHaveBeenCalledWith( + model, + 'dirtyDiff', + jasmine.anything(), + ); + }); + + it('adds decorations into editor', () => { + const spy = spyOn(controller.decorationsController.editor.instance, 'deltaDecorations'); + + controller.decorate({ + data: { changes: computeDiff('123', '1234'), path: model.path }, + }); + + expect(spy).toHaveBeenCalledWith( + [], + [ + { + range: new monaco.Range(1, 1, 1, 1), + options: { + isWholeLine: true, + linesDecorationsClassName: 'dirty-diff dirty-diff-modified', + }, + }, + ], + ); + }); + }); + + describe('dispose', () => { + it('calls disposable dispose', () => { + spyOn(controller.disposable, 'dispose').and.callThrough(); + + controller.dispose(); + + expect(controller.disposable.dispose).toHaveBeenCalled(); + }); + + it('terminates worker', () => { + spyOn(controller.dirtyDiffWorker, 'terminate').and.callThrough(); + + controller.dispose(); + + expect(controller.dirtyDiffWorker.terminate).toHaveBeenCalled(); + }); + + it('removes worker event listener', () => { + spyOn(controller.dirtyDiffWorker, 'removeEventListener').and.callThrough(); + + controller.dispose(); + + expect(controller.dirtyDiffWorker.removeEventListener).toHaveBeenCalledWith( + 'message', + jasmine.anything(), + ); + }); + + it('clears cached models', () => { + controller.attachModel(model); + + model.dispose(); + + expect(controller.models.size).toBe(0); + }); + }); +}); diff --git a/spec/javascripts/ide/lib/diff/diff_spec.js b/spec/javascripts/ide/lib/diff/diff_spec.js new file mode 100644 index 00000000000..57f3ac3d365 --- /dev/null +++ b/spec/javascripts/ide/lib/diff/diff_spec.js @@ -0,0 +1,80 @@ +import { computeDiff } from '~/ide/lib/diff/diff'; + +describe('Multi-file editor library diff calculator', () => { + describe('computeDiff', () => { + it('returns empty array if no changes', () => { + const diff = computeDiff('123', '123'); + + expect(diff).toEqual([]); + }); + + describe('modified', () => { + it('', () => { + const diff = computeDiff('123', '1234')[0]; + + expect(diff.added).toBeTruthy(); + expect(diff.modified).toBeTruthy(); + expect(diff.removed).toBeUndefined(); + }); + + it('', () => { + const diff = computeDiff('123\n123\n123', '123\n1234\n123')[0]; + + expect(diff.added).toBeTruthy(); + expect(diff.modified).toBeTruthy(); + expect(diff.removed).toBeUndefined(); + expect(diff.lineNumber).toBe(2); + }); + }); + + describe('added', () => { + it('', () => { + const diff = computeDiff('123', '123\n123')[0]; + + expect(diff.added).toBeTruthy(); + expect(diff.modified).toBeUndefined(); + expect(diff.removed).toBeUndefined(); + }); + + it('', () => { + const diff = computeDiff('123\n123\n123', '123\n123\n1234\n123')[0]; + + expect(diff.added).toBeTruthy(); + expect(diff.modified).toBeUndefined(); + expect(diff.removed).toBeUndefined(); + expect(diff.lineNumber).toBe(3); + }); + }); + + describe('removed', () => { + it('', () => { + const diff = computeDiff('123', '')[0]; + + expect(diff.added).toBeUndefined(); + expect(diff.modified).toBeUndefined(); + expect(diff.removed).toBeTruthy(); + }); + + it('', () => { + const diff = computeDiff('123\n123\n123', '123\n123')[0]; + + expect(diff.added).toBeUndefined(); + expect(diff.modified).toBeTruthy(); + expect(diff.removed).toBeTruthy(); + expect(diff.lineNumber).toBe(2); + }); + }); + + it('includes line number of change', () => { + const diff = computeDiff('123', '')[0]; + + expect(diff.lineNumber).toBe(1); + }); + + it('includes end line number of change', () => { + const diff = computeDiff('123', '')[0]; + + expect(diff.endLineNumber).toBe(1); + }); + }); +}); diff --git a/spec/javascripts/ide/lib/editor_options_spec.js b/spec/javascripts/ide/lib/editor_options_spec.js new file mode 100644 index 00000000000..d149a883166 --- /dev/null +++ b/spec/javascripts/ide/lib/editor_options_spec.js @@ -0,0 +1,11 @@ +import editorOptions from '~/ide/lib/editor_options'; + +describe('Multi-file editor library editor options', () => { + it('returns an array', () => { + expect(editorOptions).toEqual(jasmine.any(Array)); + }); + + it('contains readOnly option', () => { + expect(editorOptions[0].readOnly).toBeDefined(); + }); +}); diff --git a/spec/javascripts/ide/lib/editor_spec.js b/spec/javascripts/ide/lib/editor_spec.js new file mode 100644 index 00000000000..530bdfa2759 --- /dev/null +++ b/spec/javascripts/ide/lib/editor_spec.js @@ -0,0 +1,271 @@ +/* global monaco */ +import monacoLoader from '~/ide/monaco_loader'; +import editor from '~/ide/lib/editor'; +import { file } from '../helpers'; + +describe('Multi-file editor library', () => { + let instance; + let el; + let holder; + + beforeEach(done => { + el = document.createElement('div'); + holder = document.createElement('div'); + el.appendChild(holder); + + document.body.appendChild(el); + + monacoLoader(['vs/editor/editor.main'], () => { + instance = editor.create(monaco); + + done(); + }); + }); + + afterEach(() => { + instance.dispose(); + + el.remove(); + }); + + it('creates instance of editor', () => { + expect(editor.editorInstance).not.toBeNull(); + }); + + it('creates instance returns cached instance', () => { + expect(editor.create(monaco)).toEqual(instance); + }); + + describe('createInstance', () => { + it('creates editor instance', () => { + spyOn(instance.monaco.editor, 'create').and.callThrough(); + + instance.createInstance(holder); + + expect(instance.monaco.editor.create).toHaveBeenCalled(); + }); + + it('creates dirty diff controller', () => { + instance.createInstance(holder); + + expect(instance.dirtyDiffController).not.toBeNull(); + }); + + it('creates model manager', () => { + instance.createInstance(holder); + + expect(instance.modelManager).not.toBeNull(); + }); + }); + + describe('createDiffInstance', () => { + it('creates editor instance', () => { + spyOn(instance.monaco.editor, 'createDiffEditor').and.callThrough(); + + instance.createDiffInstance(holder); + + expect(instance.monaco.editor.createDiffEditor).toHaveBeenCalledWith(holder, { + model: null, + contextmenu: true, + minimap: { + enabled: false, + }, + readOnly: true, + scrollBeyondLastLine: false, + quickSuggestions: false, + occurrencesHighlight: false, + renderLineHighlight: 'none', + hideCursorInOverviewRuler: true, + wordWrap: 'on', + renderSideBySide: true, + }); + }); + }); + + describe('createModel', () => { + it('calls model manager addModel', () => { + spyOn(instance.modelManager, 'addModel'); + + instance.createModel('FILE'); + + expect(instance.modelManager.addModel).toHaveBeenCalledWith('FILE', null); + }); + }); + + describe('attachModel', () => { + let model; + + beforeEach(() => { + instance.createInstance(document.createElement('div')); + + model = instance.createModel(file()); + }); + + it('sets the current model on the instance', () => { + instance.attachModel(model); + + expect(instance.currentModel).toBe(model); + }); + + it('attaches the model to the current instance', () => { + spyOn(instance.instance, 'setModel'); + + instance.attachModel(model); + + expect(instance.instance.setModel).toHaveBeenCalledWith(model.getModel()); + }); + + it('sets original & modified when diff editor', () => { + spyOn(instance.instance, 'getEditorType').and.returnValue('vs.editor.IDiffEditor'); + spyOn(instance.instance, 'setModel'); + + instance.attachModel(model); + + expect(instance.instance.setModel).toHaveBeenCalledWith({ + original: model.getOriginalModel(), + modified: model.getModel(), + }); + }); + + it('attaches the model to the dirty diff controller', () => { + spyOn(instance.dirtyDiffController, 'attachModel'); + + instance.attachModel(model); + + expect(instance.dirtyDiffController.attachModel).toHaveBeenCalledWith(model); + }); + + it('re-decorates with the dirty diff controller', () => { + spyOn(instance.dirtyDiffController, 'reDecorate'); + + instance.attachModel(model); + + expect(instance.dirtyDiffController.reDecorate).toHaveBeenCalledWith(model); + }); + }); + + describe('attachMergeRequestModel', () => { + let model; + + beforeEach(() => { + instance.createDiffInstance(document.createElement('div')); + + const f = file(); + f.mrChanges = { diff: 'ABC' }; + f.baseRaw = 'testing'; + + model = instance.createModel(f); + }); + + it('sets original & modified', () => { + spyOn(instance.instance, 'setModel'); + + instance.attachMergeRequestModel(model); + + expect(instance.instance.setModel).toHaveBeenCalledWith({ + original: model.getBaseModel(), + modified: model.getModel(), + }); + }); + }); + + describe('clearEditor', () => { + it('resets the editor model', () => { + instance.createInstance(document.createElement('div')); + + spyOn(instance.instance, 'setModel'); + + instance.clearEditor(); + + expect(instance.instance.setModel).toHaveBeenCalledWith(null); + }); + }); + + describe('dispose', () => { + it('calls disposble dispose method', () => { + spyOn(instance.disposable, 'dispose').and.callThrough(); + + instance.dispose(); + + expect(instance.disposable.dispose).toHaveBeenCalled(); + }); + + it('resets instance', () => { + instance.createInstance(document.createElement('div')); + + expect(instance.instance).not.toBeNull(); + + instance.dispose(); + + expect(instance.instance).toBeNull(); + }); + + it('does not dispose modelManager', () => { + spyOn(instance.modelManager, 'dispose'); + + instance.dispose(); + + expect(instance.modelManager.dispose).not.toHaveBeenCalled(); + }); + + it('does not dispose decorationsController', () => { + spyOn(instance.decorationsController, 'dispose'); + + instance.dispose(); + + expect(instance.decorationsController.dispose).not.toHaveBeenCalled(); + }); + }); + + describe('updateDiffView', () => { + describe('edit mode', () => { + it('does not update options', () => { + instance.createInstance(holder); + + spyOn(instance.instance, 'updateOptions'); + + instance.updateDiffView(); + + expect(instance.instance.updateOptions).not.toHaveBeenCalled(); + }); + }); + + describe('diff mode', () => { + beforeEach(() => { + instance.createDiffInstance(holder); + + spyOn(instance.instance, 'updateOptions').and.callThrough(); + }); + + it('sets renderSideBySide to false if el is less than 700 pixels', () => { + spyOnProperty(instance.instance.getDomNode(), 'offsetWidth').and.returnValue(600); + + expect(instance.instance.updateOptions).not.toHaveBeenCalledWith({ + renderSideBySide: false, + }); + }); + + it('sets renderSideBySide to false if el is more than 700 pixels', () => { + spyOnProperty(instance.instance.getDomNode(), 'offsetWidth').and.returnValue(800); + + expect(instance.instance.updateOptions).not.toHaveBeenCalledWith({ + renderSideBySide: true, + }); + }); + }); + }); + + describe('isDiffEditorType', () => { + it('returns true when diff editor', () => { + instance.createDiffInstance(holder); + + expect(instance.isDiffEditorType).toBe(true); + }); + + it('returns false when not diff editor', () => { + instance.createInstance(holder); + + expect(instance.isDiffEditorType).toBe(false); + }); + }); +}); diff --git a/spec/javascripts/ide/monaco_loader_spec.js b/spec/javascripts/ide/monaco_loader_spec.js new file mode 100644 index 00000000000..7ab315aa8c8 --- /dev/null +++ b/spec/javascripts/ide/monaco_loader_spec.js @@ -0,0 +1,15 @@ +import monacoContext from 'monaco-editor/dev/vs/loader'; +import monacoLoader from '~/ide/monaco_loader'; + +describe('MonacoLoader', () => { + it('calls require.config and exports require', () => { + expect(monacoContext.require.getConfig()).toEqual( + jasmine.objectContaining({ + paths: { + vs: `${__webpack_public_path__}monaco-editor/vs`, // eslint-disable-line camelcase + }, + }), + ); + expect(monacoLoader).toBe(monacoContext.require); + }); +}); diff --git a/spec/javascripts/ide/stores/actions/file_spec.js b/spec/javascripts/ide/stores/actions/file_spec.js new file mode 100644 index 00000000000..ce5c525bed7 --- /dev/null +++ b/spec/javascripts/ide/stores/actions/file_spec.js @@ -0,0 +1,628 @@ +import Vue from 'vue'; +import store from '~/ide/stores'; +import * as actions from '~/ide/stores/actions/file'; +import * as types from '~/ide/stores/mutation_types'; +import service from '~/ide/services'; +import router from '~/ide/ide_router'; +import eventHub from '~/ide/eventhub'; +import { file, resetStore } from '../../helpers'; +import testAction from '../../../helpers/vuex_action_helper'; + +describe('IDE store file actions', () => { + beforeEach(() => { + spyOn(router, 'push'); + }); + + afterEach(() => { + resetStore(store); + }); + + describe('closeFile', () => { + let localFile; + + beforeEach(() => { + localFile = file('testFile'); + localFile.active = true; + localFile.opened = true; + localFile.parentTreeUrl = 'parentTreeUrl'; + + store.state.openFiles.push(localFile); + store.state.entries[localFile.path] = localFile; + }); + + it('closes open files', done => { + store + .dispatch('closeFile', localFile) + .then(() => { + expect(localFile.opened).toBeFalsy(); + expect(localFile.active).toBeFalsy(); + expect(store.state.openFiles.length).toBe(0); + + done(); + }) + .catch(done.fail); + }); + + it('closes file even if file has changes', done => { + store.state.changedFiles.push(localFile); + + store + .dispatch('closeFile', localFile) + .then(Vue.nextTick) + .then(() => { + expect(store.state.openFiles.length).toBe(0); + expect(store.state.changedFiles.length).toBe(1); + + done(); + }) + .catch(done.fail); + }); + + it('closes file & opens next available file', done => { + const f = { + ...file('newOpenFile'), + url: '/newOpenFile', + }; + + store.state.openFiles.push(f); + store.state.entries[f.path] = f; + + store + .dispatch('closeFile', localFile) + .then(Vue.nextTick) + .then(() => { + expect(router.push).toHaveBeenCalledWith(`/project${f.url}`); + + done(); + }) + .catch(done.fail); + }); + + it('removes file if it pending', done => { + store.state.openFiles.push({ + ...localFile, + pending: true, + }); + + store + .dispatch('closeFile', localFile) + .then(() => { + expect(store.state.openFiles.length).toBe(0); + + done(); + }) + .catch(done.fail); + }); + }); + + describe('setFileActive', () => { + let localFile; + let scrollToTabSpy; + let oldScrollToTab; + + beforeEach(() => { + scrollToTabSpy = jasmine.createSpy('scrollToTab'); + oldScrollToTab = store._actions.scrollToTab; // eslint-disable-line + store._actions.scrollToTab = [scrollToTabSpy]; // eslint-disable-line + + localFile = file('setThisActive'); + + store.state.entries[localFile.path] = localFile; + }); + + afterEach(() => { + store._actions.scrollToTab = oldScrollToTab; // eslint-disable-line + }); + + it('calls scrollToTab', done => { + store + .dispatch('setFileActive', localFile.path) + .then(() => { + expect(scrollToTabSpy).toHaveBeenCalled(); + + done(); + }) + .catch(done.fail); + }); + + it('sets the file active', done => { + store + .dispatch('setFileActive', localFile.path) + .then(() => { + expect(localFile.active).toBeTruthy(); + + done(); + }) + .catch(done.fail); + }); + + it('returns early if file is already active', done => { + localFile.active = true; + + store + .dispatch('setFileActive', localFile.path) + .then(() => { + expect(scrollToTabSpy).not.toHaveBeenCalled(); + + done(); + }) + .catch(done.fail); + }); + + it('sets current active file to not active', done => { + const f = file('newActive'); + store.state.entries[f.path] = f; + localFile.active = true; + store.state.openFiles.push(localFile); + + store + .dispatch('setFileActive', f.path) + .then(() => { + expect(localFile.active).toBeFalsy(); + + done(); + }) + .catch(done.fail); + }); + + it('resets location.hash for line highlighting', done => { + location.hash = 'test'; + + store + .dispatch('setFileActive', localFile.path) + .then(() => { + expect(location.hash).not.toBe('test'); + + done(); + }) + .catch(done.fail); + }); + }); + + describe('getFileData', () => { + let localFile; + + beforeEach(() => { + spyOn(service, 'getFileData').and.returnValue( + Promise.resolve({ + headers: { + 'page-title': 'testing getFileData', + }, + json: () => + Promise.resolve({ + blame_path: 'blame_path', + commits_path: 'commits_path', + permalink: 'permalink', + raw_path: 'raw_path', + binary: false, + html: '123', + render_error: '', + }), + }), + ); + + localFile = file(`newCreate-${Math.random()}`); + localFile.url = 'getFileDataURL'; + store.state.entries[localFile.path] = localFile; + }); + + it('calls the service', done => { + store + .dispatch('getFileData', { path: localFile.path }) + .then(() => { + expect(service.getFileData).toHaveBeenCalledWith('getFileDataURL'); + + done(); + }) + .catch(done.fail); + }); + + it('sets the file data', done => { + store + .dispatch('getFileData', { path: localFile.path }) + .then(() => { + expect(localFile.blamePath).toBe('blame_path'); + + done(); + }) + .catch(done.fail); + }); + + it('sets document title', done => { + store + .dispatch('getFileData', { path: localFile.path }) + .then(() => { + expect(document.title).toBe('testing getFileData'); + + done(); + }) + .catch(done.fail); + }); + + it('sets the file as active', done => { + store + .dispatch('getFileData', { path: localFile.path }) + .then(() => { + expect(localFile.active).toBeTruthy(); + + done(); + }) + .catch(done.fail); + }); + + it('sets the file not as active if we pass makeFileActive false', done => { + store + .dispatch('getFileData', { path: localFile.path, makeFileActive: false }) + .then(() => { + expect(localFile.active).toBeFalsy(); + + done(); + }) + .catch(done.fail); + }); + + it('adds the file to open files', done => { + store + .dispatch('getFileData', { path: localFile.path }) + .then(() => { + expect(store.state.openFiles.length).toBe(1); + expect(store.state.openFiles[0].name).toBe(localFile.name); + + done(); + }) + .catch(done.fail); + }); + }); + + describe('getRawFileData', () => { + let tmpFile; + + beforeEach(() => { + spyOn(service, 'getRawFileData').and.returnValue(Promise.resolve('raw')); + + tmpFile = file('tmpFile'); + store.state.entries[tmpFile.path] = tmpFile; + }); + + it('calls getRawFileData service method', done => { + store + .dispatch('getRawFileData', { path: tmpFile.path }) + .then(() => { + expect(service.getRawFileData).toHaveBeenCalledWith(tmpFile); + + done(); + }) + .catch(done.fail); + }); + + it('updates file raw data', done => { + store + .dispatch('getRawFileData', { path: tmpFile.path }) + .then(() => { + expect(tmpFile.raw).toBe('raw'); + + done(); + }) + .catch(done.fail); + }); + + it('calls also getBaseRawFileData service method', done => { + spyOn(service, 'getBaseRawFileData').and.returnValue(Promise.resolve('baseraw')); + + tmpFile.mrChange = { new_file: false }; + + store + .dispatch('getRawFileData', { path: tmpFile.path, baseSha: 'SHA' }) + .then(() => { + expect(service.getBaseRawFileData).toHaveBeenCalledWith(tmpFile, 'SHA'); + expect(tmpFile.baseRaw).toBe('baseraw'); + + done(); + }) + .catch(done.fail); + }); + }); + + describe('changeFileContent', () => { + let tmpFile; + + beforeEach(() => { + tmpFile = file('tmpFile'); + store.state.entries[tmpFile.path] = tmpFile; + }); + + it('updates file content', done => { + store + .dispatch('changeFileContent', { + path: tmpFile.path, + content: 'content', + }) + .then(() => { + expect(tmpFile.content).toBe('content'); + + done(); + }) + .catch(done.fail); + }); + + it('adds file into changedFiles array', done => { + store + .dispatch('changeFileContent', { + path: tmpFile.path, + content: 'content', + }) + .then(() => { + expect(store.state.changedFiles.length).toBe(1); + + done(); + }) + .catch(done.fail); + }); + + it('adds file once into changedFiles array', done => { + store + .dispatch('changeFileContent', { + path: tmpFile.path, + content: 'content', + }) + .then(() => + store.dispatch('changeFileContent', { + path: tmpFile.path, + content: 'content 123', + }), + ) + .then(() => { + expect(store.state.changedFiles.length).toBe(1); + + done(); + }) + .catch(done.fail); + }); + + it('removes file from changedFiles array if not changed', done => { + store + .dispatch('changeFileContent', { + path: tmpFile.path, + content: 'content', + }) + .then(() => + store.dispatch('changeFileContent', { + path: tmpFile.path, + content: '', + }), + ) + .then(() => { + expect(store.state.changedFiles.length).toBe(0); + + done(); + }) + .catch(done.fail); + }); + }); + + describe('discardFileChanges', () => { + let tmpFile; + + beforeEach(() => { + spyOn(eventHub, '$on'); + spyOn(eventHub, '$emit'); + + tmpFile = file(); + tmpFile.content = 'testing'; + + store.state.changedFiles.push(tmpFile); + store.state.entries[tmpFile.path] = tmpFile; + }); + + it('resets file content', done => { + store + .dispatch('discardFileChanges', tmpFile.path) + .then(() => { + expect(tmpFile.content).not.toBe('testing'); + + done(); + }) + .catch(done.fail); + }); + + it('removes file from changedFiles array', done => { + store + .dispatch('discardFileChanges', tmpFile.path) + .then(() => { + expect(store.state.changedFiles.length).toBe(0); + + done(); + }) + .catch(done.fail); + }); + + it('closes temp file', done => { + tmpFile.tempFile = true; + tmpFile.opened = true; + + store + .dispatch('discardFileChanges', tmpFile.path) + .then(() => { + expect(tmpFile.opened).toBeFalsy(); + + done(); + }) + .catch(done.fail); + }); + + it('does not re-open a closed temp file', done => { + tmpFile.tempFile = true; + + expect(tmpFile.opened).toBeFalsy(); + + store + .dispatch('discardFileChanges', tmpFile.path) + .then(() => { + expect(tmpFile.opened).toBeFalsy(); + + done(); + }) + .catch(done.fail); + }); + + it('pushes route for active file', done => { + tmpFile.active = true; + store.state.openFiles.push(tmpFile); + + store + .dispatch('discardFileChanges', tmpFile.path) + .then(() => { + expect(router.push).toHaveBeenCalledWith(`/project${tmpFile.url}`); + + done(); + }) + .catch(done.fail); + }); + + it('emits eventHub event to dispose cached model', done => { + store + .dispatch('discardFileChanges', tmpFile.path) + .then(() => { + expect(eventHub.$emit).toHaveBeenCalled(); + + done(); + }) + .catch(done.fail); + }); + }); + + describe('stageChange', () => { + it('calls STAGE_CHANGE with file path', done => { + testAction( + actions.stageChange, + 'path', + store.state, + [{ type: types.STAGE_CHANGE, payload: 'path' }], + [], + done, + ); + }); + }); + + describe('unstageChange', () => { + it('calls UNSTAGE_CHANGE with file path', done => { + testAction( + actions.unstageChange, + 'path', + store.state, + [{ type: types.UNSTAGE_CHANGE, payload: 'path' }], + [], + done, + ); + }); + }); + + describe('openPendingTab', () => { + let f; + + beforeEach(() => { + f = { + ...file(), + projectId: '123', + }; + + store.state.entries[f.path] = f; + }); + + it('makes file pending in openFiles', done => { + store + .dispatch('openPendingTab', { file: f, keyPrefix: 'pending' }) + .then(() => { + expect(store.state.openFiles[0].pending).toBe(true); + }) + .then(done) + .catch(done.fail); + }); + + it('returns true when opened', done => { + store + .dispatch('openPendingTab', { file: f, keyPrefix: 'pending' }) + .then(added => { + expect(added).toBe(true); + }) + .then(done) + .catch(done.fail); + }); + + it('pushes router URL when added', done => { + store.state.currentBranchId = 'master'; + + store + .dispatch('openPendingTab', { file: f, keyPrefix: 'pending' }) + .then(() => { + expect(router.push).toHaveBeenCalledWith('/project/123/tree/master/'); + }) + .then(done) + .catch(done.fail); + }); + + it('calls scrollToTab', done => { + const scrollToTabSpy = jasmine.createSpy('scrollToTab'); + const oldScrollToTab = store._actions.scrollToTab; // eslint-disable-line + store._actions.scrollToTab = [scrollToTabSpy]; // eslint-disable-line + + store + .dispatch('openPendingTab', { file: f, keyPrefix: 'pending' }) + .then(() => { + expect(scrollToTabSpy).toHaveBeenCalled(); + store._actions.scrollToTab = oldScrollToTab; // eslint-disable-line + }) + .then(done) + .catch(done.fail); + }); + + it('returns false when passed in file is active & viewer is diff', done => { + f.active = true; + store.state.openFiles.push(f); + store.state.viewer = 'diff'; + + store + .dispatch('openPendingTab', { file: f, keyPrefix: 'pending' }) + .then(added => { + expect(added).toBe(false); + }) + .then(done) + .catch(done.fail); + }); + }); + + describe('removePendingTab', () => { + let f; + + beforeEach(() => { + spyOn(eventHub, '$emit'); + + f = { + ...file('pendingFile'), + pending: true, + }; + }); + + it('removes pending file from open files', done => { + store.state.openFiles.push(f); + + store + .dispatch('removePendingTab', f) + .then(() => { + expect(store.state.openFiles.length).toBe(0); + }) + .then(done) + .catch(done.fail); + }); + + it('emits event to dispose model', done => { + store + .dispatch('removePendingTab', f) + .then(() => { + expect(eventHub.$emit).toHaveBeenCalledWith(`editor.update.model.dispose.${f.key}`); + }) + .then(done) + .catch(done.fail); + }); + }); +}); diff --git a/spec/javascripts/ide/stores/actions/merge_request_spec.js b/spec/javascripts/ide/stores/actions/merge_request_spec.js new file mode 100644 index 00000000000..b4ec4a0b173 --- /dev/null +++ b/spec/javascripts/ide/stores/actions/merge_request_spec.js @@ -0,0 +1,110 @@ +import store from '~/ide/stores'; +import service from '~/ide/services'; +import { resetStore } from '../../helpers'; + +describe('IDE store merge request actions', () => { + beforeEach(() => { + store.state.projects.abcproject = { + mergeRequests: {}, + }; + }); + + afterEach(() => { + resetStore(store); + }); + + describe('getMergeRequestData', () => { + beforeEach(() => { + spyOn(service, 'getProjectMergeRequestData').and.returnValue( + Promise.resolve({ data: { title: 'mergerequest' } }), + ); + }); + + it('calls getProjectMergeRequestData service method', done => { + store + .dispatch('getMergeRequestData', { projectId: 'abcproject', mergeRequestId: 1 }) + .then(() => { + expect(service.getProjectMergeRequestData).toHaveBeenCalledWith('abcproject', 1); + + done(); + }) + .catch(done.fail); + }); + + it('sets the Merge Request Object', done => { + store + .dispatch('getMergeRequestData', { projectId: 'abcproject', mergeRequestId: 1 }) + .then(() => { + expect(store.state.projects.abcproject.mergeRequests['1'].title).toBe('mergerequest'); + expect(store.state.currentMergeRequestId).toBe(1); + + done(); + }) + .catch(done.fail); + }); + }); + + describe('getMergeRequestChanges', () => { + beforeEach(() => { + spyOn(service, 'getProjectMergeRequestChanges').and.returnValue( + Promise.resolve({ data: { title: 'mergerequest' } }), + ); + + store.state.projects.abcproject.mergeRequests['1'] = { changes: [] }; + }); + + it('calls getProjectMergeRequestChanges service method', done => { + store + .dispatch('getMergeRequestChanges', { projectId: 'abcproject', mergeRequestId: 1 }) + .then(() => { + expect(service.getProjectMergeRequestChanges).toHaveBeenCalledWith('abcproject', 1); + + done(); + }) + .catch(done.fail); + }); + + it('sets the Merge Request Changes Object', done => { + store + .dispatch('getMergeRequestChanges', { projectId: 'abcproject', mergeRequestId: 1 }) + .then(() => { + expect(store.state.projects.abcproject.mergeRequests['1'].changes.title).toBe( + 'mergerequest', + ); + done(); + }) + .catch(done.fail); + }); + }); + + describe('getMergeRequestVersions', () => { + beforeEach(() => { + spyOn(service, 'getProjectMergeRequestVersions').and.returnValue( + Promise.resolve({ data: [{ id: 789 }] }), + ); + + store.state.projects.abcproject.mergeRequests['1'] = { versions: [] }; + }); + + it('calls getProjectMergeRequestVersions service method', done => { + store + .dispatch('getMergeRequestVersions', { projectId: 'abcproject', mergeRequestId: 1 }) + .then(() => { + expect(service.getProjectMergeRequestVersions).toHaveBeenCalledWith('abcproject', 1); + + done(); + }) + .catch(done.fail); + }); + + it('sets the Merge Request Versions Object', done => { + store + .dispatch('getMergeRequestVersions', { projectId: 'abcproject', mergeRequestId: 1 }) + .then(() => { + expect(store.state.projects.abcproject.mergeRequests['1'].versions.length).toBe(1); + done(); + }) + .catch(done.fail); + }); + }); +}); diff --git a/spec/javascripts/ide/stores/actions/tree_spec.js b/spec/javascripts/ide/stores/actions/tree_spec.js new file mode 100644 index 00000000000..e0ef57a3966 --- /dev/null +++ b/spec/javascripts/ide/stores/actions/tree_spec.js @@ -0,0 +1,166 @@ +import Vue from 'vue'; +import store from '~/ide/stores'; +import service from '~/ide/services'; +import router from '~/ide/ide_router'; +import { file, resetStore } from '../../helpers'; + +describe('Multi-file store tree actions', () => { + let projectTree; + + const basicCallParameters = { + endpoint: 'rootEndpoint', + projectId: 'abcproject', + branch: 'master', + branchId: 'master', + }; + + beforeEach(() => { + spyOn(router, 'push'); + + store.state.currentProjectId = 'abcproject'; + store.state.currentBranchId = 'master'; + store.state.projects.abcproject = { + web_url: '', + branches: { + master: { + workingReference: '1', + }, + }, + }; + }); + + afterEach(() => { + resetStore(store); + }); + + describe('getFiles', () => { + beforeEach(() => { + spyOn(service, 'getFiles').and.returnValue( + Promise.resolve({ + json: () => + Promise.resolve([ + 'file.txt', + 'folder/fileinfolder.js', + 'folder/subfolder/fileinsubfolder.js', + ]), + }), + ); + }); + + it('calls service getFiles', done => { + store + .dispatch('getFiles', basicCallParameters) + .then(() => { + expect(service.getFiles).toHaveBeenCalledWith('', 'master'); + + done(); + }) + .catch(done.fail); + }); + + it('adds data into tree', done => { + store + .dispatch('getFiles', basicCallParameters) + .then(() => { + projectTree = store.state.trees['abcproject/master']; + expect(projectTree.tree.length).toBe(2); + expect(projectTree.tree[0].type).toBe('tree'); + expect(projectTree.tree[0].tree[1].name).toBe('fileinfolder.js'); + expect(projectTree.tree[1].type).toBe('blob'); + expect(projectTree.tree[0].tree[0].tree[0].type).toBe('blob'); + expect(projectTree.tree[0].tree[0].tree[0].name).toBe('fileinsubfolder.js'); + + done(); + }) + .catch(done.fail); + }); + }); + + describe('toggleTreeOpen', () => { + let tree; + + beforeEach(() => { + tree = file('testing', '1', 'tree'); + store.state.entries[tree.path] = tree; + }); + + it('toggles the tree open', done => { + store + .dispatch('toggleTreeOpen', tree.path) + .then(() => { + expect(tree.opened).toBeTruthy(); + + done(); + }) + .catch(done.fail); + }); + }); + + describe('getLastCommitData', () => { + beforeEach(() => { + spyOn(service, 'getTreeLastCommit').and.returnValue( + Promise.resolve({ + headers: { + 'more-logs-url': null, + }, + json: () => + Promise.resolve([ + { + type: 'tree', + file_name: 'testing', + commit: { + message: 'commit message', + authored_date: '123', + }, + }, + ]), + }), + ); + + store.state.trees['abcproject/mybranch'] = { + tree: [], + }; + + projectTree = store.state.trees['abcproject/mybranch']; + projectTree.tree.push(file('testing', '1', 'tree')); + projectTree.lastCommitPath = 'lastcommitpath'; + }); + + it('calls service with lastCommitPath', done => { + store + .dispatch('getLastCommitData', projectTree) + .then(() => { + expect(service.getTreeLastCommit).toHaveBeenCalledWith('lastcommitpath'); + + done(); + }) + .catch(done.fail); + }); + + it('updates trees last commit data', done => { + store + .dispatch('getLastCommitData', projectTree) + .then(Vue.nextTick) + .then(() => { + expect(projectTree.tree[0].lastCommit.message).toBe('commit message'); + + done(); + }) + .catch(done.fail); + }); + + it('does not update entry if not found', done => { + projectTree.tree[0].name = 'a'; + + store + .dispatch('getLastCommitData', projectTree) + .then(Vue.nextTick) + .then(() => { + expect(projectTree.tree[0].lastCommit.message).not.toBe('commit message'); + + done(); + }) + .catch(done.fail); + }); + }); +}); diff --git a/spec/javascripts/ide/stores/actions_spec.js b/spec/javascripts/ide/stores/actions_spec.js new file mode 100644 index 00000000000..22a7441ba92 --- /dev/null +++ b/spec/javascripts/ide/stores/actions_spec.js @@ -0,0 +1,343 @@ +import * as urlUtils from '~/lib/utils/url_utility'; +import store from '~/ide/stores'; +import * as actions from '~/ide/stores/actions'; +import * as types from '~/ide/stores/mutation_types'; +import router from '~/ide/ide_router'; +import { resetStore, file } from '../helpers'; +import testAction from '../../helpers/vuex_action_helper'; + +describe('Multi-file store actions', () => { + beforeEach(() => { + spyOn(router, 'push'); + }); + + afterEach(() => { + resetStore(store); + }); + + describe('redirectToUrl', () => { + it('calls visitUrl', done => { + spyOn(urlUtils, 'visitUrl'); + + store + .dispatch('redirectToUrl', 'test') + .then(() => { + expect(urlUtils.visitUrl).toHaveBeenCalledWith('test'); + + done(); + }) + .catch(done.fail); + }); + }); + + describe('setInitialData', () => { + it('commits initial data', done => { + store + .dispatch('setInitialData', { canCommit: true }) + .then(() => { + expect(store.state.canCommit).toBeTruthy(); + done(); + }) + .catch(done.fail); + }); + }); + + describe('discardAllChanges', () => { + beforeEach(() => { + const f = file('discardAll'); + f.changed = true; + + store.state.openFiles.push(f); + store.state.changedFiles.push(f); + store.state.entries[f.path] = f; + }); + + it('discards changes in file', done => { + store + .dispatch('discardAllChanges') + .then(() => { + expect(store.state.openFiles.changed).toBeFalsy(); + }) + .then(done) + .catch(done.fail); + }); + + it('removes all files from changedFiles state', done => { + store + .dispatch('discardAllChanges') + .then(() => { + expect(store.state.changedFiles.length).toBe(0); + expect(store.state.openFiles.length).toBe(1); + }) + .then(done) + .catch(done.fail); + }); + }); + + describe('closeAllFiles', () => { + beforeEach(() => { + const f = file('closeAll'); + store.state.openFiles.push(f); + store.state.openFiles[0].opened = true; + store.state.entries[f.path] = f; + }); + + it('closes all open files', done => { + store + .dispatch('closeAllFiles') + .then(() => { + expect(store.state.openFiles.length).toBe(0); + + done(); + }) + .catch(done.fail); + }); + }); + + describe('createTempEntry', () => { + beforeEach(() => { + document.body.innerHTML += '<div class="flash-container"></div>'; + + store.state.currentProjectId = 'abcproject'; + store.state.currentBranchId = 'mybranch'; + + store.state.trees['abcproject/mybranch'] = { + tree: [], + }; + store.state.projects.abcproject = { + web_url: '', + }; + }); + + afterEach(() => { + document.querySelector('.flash-container').remove(); + }); + + describe('tree', () => { + it('creates temp tree', done => { + store + .dispatch('createTempEntry', { + branchId: store.state.currentBranchId, + name: 'test', + type: 'tree', + }) + .then(() => { + const entry = store.state.entries.test; + + expect(entry).not.toBeNull(); + expect(entry.type).toBe('tree'); + + done(); + }) + .catch(done.fail); + }); + + it('creates new folder inside another tree', done => { + const tree = { + type: 'tree', + name: 'testing', + path: 'testing', + tree: [], + }; + + store.state.entries[tree.path] = tree; + + store + .dispatch('createTempEntry', { + branchId: store.state.currentBranchId, + name: 'testing/test', + type: 'tree', + }) + .then(() => { + expect(tree.tree[0].tempFile).toBeTruthy(); + expect(tree.tree[0].name).toBe('test'); + expect(tree.tree[0].type).toBe('tree'); + + done(); + }) + .catch(done.fail); + }); + + it('does not create new tree if already exists', done => { + const tree = { + type: 'tree', + path: 'testing', + tempFile: false, + tree: [], + }; + + store.state.entries[tree.path] = tree; + + store + .dispatch('createTempEntry', { + branchId: store.state.currentBranchId, + name: 'testing', + type: 'tree', + }) + .then(() => { + expect(store.state.entries[tree.path].tempFile).toEqual(false); + expect(document.querySelector('.flash-alert')).not.toBeNull(); + + done(); + }) + .catch(done.fail); + }); + }); + + describe('blob', () => { + it('creates temp file', done => { + store + .dispatch('createTempEntry', { + name: 'test', + branchId: 'mybranch', + type: 'blob', + }) + .then(f => { + expect(f.tempFile).toBeTruthy(); + expect(store.state.trees['abcproject/mybranch'].tree.length).toBe(1); + + done(); + }) + .catch(done.fail); + }); + + it('adds tmp file to open files', done => { + store + .dispatch('createTempEntry', { + name: 'test', + branchId: 'mybranch', + type: 'blob', + }) + .then(f => { + expect(store.state.openFiles.length).toBe(1); + expect(store.state.openFiles[0].name).toBe(f.name); + + done(); + }) + .catch(done.fail); + }); + + it('adds tmp file to changed files', done => { + store + .dispatch('createTempEntry', { + name: 'test', + branchId: 'mybranch', + type: 'blob', + }) + .then(f => { + expect(store.state.changedFiles.length).toBe(1); + expect(store.state.changedFiles[0].name).toBe(f.name); + + done(); + }) + .catch(done.fail); + }); + + it('sets tmp file as active', done => { + store + .dispatch('createTempEntry', { + name: 'test', + branchId: 'mybranch', + type: 'blob', + }) + .then(f => { + expect(f.active).toBeTruthy(); + + done(); + }) + .catch(done.fail); + }); + + it('creates flash message if file already exists', done => { + const f = file('test', '1', 'blob'); + store.state.trees['abcproject/mybranch'].tree = [f]; + store.state.entries[f.path] = f; + + store + .dispatch('createTempEntry', { + name: 'test', + branchId: 'mybranch', + type: 'blob', + }) + .then(() => { + expect(document.querySelector('.flash-alert')).not.toBeNull(); + + done(); + }) + .catch(done.fail); + }); + }); + }); + + describe('popHistoryState', () => {}); + + describe('scrollToTab', () => { + it('focuses the current active element', done => { + document.body.innerHTML += + '<div id="tabs"><div class="active"><div class="repo-tab"></div></div></div>'; + const el = document.querySelector('.repo-tab'); + spyOn(el, 'focus'); + + store + .dispatch('scrollToTab') + .then(() => { + setTimeout(() => { + expect(el.focus).toHaveBeenCalled(); + + document.getElementById('tabs').remove(); + + done(); + }); + }) + .catch(done.fail); + }); + }); + + describe('stageAllChanges', () => { + it('adds all files from changedFiles to stagedFiles', done => { + store.state.changedFiles.push(file(), file('new')); + + testAction( + actions.stageAllChanges, + null, + store.state, + [ + { type: types.STAGE_CHANGE, payload: store.state.changedFiles[0].path }, + { type: types.STAGE_CHANGE, payload: store.state.changedFiles[1].path }, + ], + [], + done, + ); + }); + }); + + describe('unstageAllChanges', () => { + it('removes all files from stagedFiles after unstaging', done => { + store.state.stagedFiles.push(file(), file('new')); + + testAction( + actions.unstageAllChanges, + null, + store.state, + [ + { type: types.UNSTAGE_CHANGE, payload: store.state.stagedFiles[0].path }, + { type: types.UNSTAGE_CHANGE, payload: store.state.stagedFiles[1].path }, + ], + [], + done, + ); + }); + }); + + describe('updateViewer', () => { + it('updates viewer state', done => { + store + .dispatch('updateViewer', 'diff') + .then(() => { + expect(store.state.viewer).toBe('diff'); + }) + .then(done) + .catch(done.fail); + }); + }); +}); diff --git a/spec/javascripts/ide/stores/getters_spec.js b/spec/javascripts/ide/stores/getters_spec.js new file mode 100644 index 00000000000..8d04b83928c --- /dev/null +++ b/spec/javascripts/ide/stores/getters_spec.js @@ -0,0 +1,67 @@ +import * as getters from '~/ide/stores/getters'; +import state from '~/ide/stores/state'; +import { file } from '../helpers'; + +describe('IDE store getters', () => { + let localState; + + beforeEach(() => { + localState = state(); + }); + + describe('activeFile', () => { + it('returns the current active file', () => { + localState.openFiles.push(file()); + localState.openFiles.push(file('active')); + localState.openFiles[1].active = true; + + expect(getters.activeFile(localState).name).toBe('active'); + }); + + it('returns undefined if no active files are found', () => { + localState.openFiles.push(file()); + localState.openFiles.push(file('active')); + + expect(getters.activeFile(localState)).toBeNull(); + }); + }); + + describe('modifiedFiles', () => { + it('returns a list of modified files', () => { + localState.openFiles.push(file()); + localState.changedFiles.push(file('changed')); + localState.changedFiles[0].changed = true; + + const modifiedFiles = getters.modifiedFiles(localState); + + expect(modifiedFiles.length).toBe(1); + expect(modifiedFiles[0].name).toBe('changed'); + }); + + it('returns angle left when collapsed', () => { + localState.rightPanelCollapsed = true; + + expect(getters.collapseButtonIcon(localState)).toBe('angle-double-left'); + }); + }); + + describe('currentMergeRequest', () => { + it('returns Current Merge Request', () => { + localState.currentProjectId = 'abcproject'; + localState.currentMergeRequestId = 1; + localState.projects.abcproject = { + mergeRequests: { + 1: { mergeId: 1 }, + }, + }; + + expect(getters.currentMergeRequest(localState).mergeId).toBe(1); + }); + + it('returns null if no active Merge Request was found', () => { + localState.currentProjectId = 'otherproject'; + + expect(getters.currentMergeRequest(localState)).toBeNull(); + }); + }); +}); diff --git a/spec/javascripts/ide/stores/modules/commit/actions_spec.js b/spec/javascripts/ide/stores/modules/commit/actions_spec.js new file mode 100644 index 00000000000..116967208e0 --- /dev/null +++ b/spec/javascripts/ide/stores/modules/commit/actions_spec.js @@ -0,0 +1,517 @@ +import store from '~/ide/stores'; +import service from '~/ide/services'; +import router from '~/ide/ide_router'; +import * as urlUtils from '~/lib/utils/url_utility'; +import eventHub from '~/ide/eventhub'; +import * as consts from '~/ide/stores/modules/commit/constants'; +import { resetStore, file } from 'spec/ide/helpers'; + +describe('IDE commit module actions', () => { + beforeEach(() => { + spyOn(router, 'push'); + }); + + afterEach(() => { + resetStore(store); + }); + + describe('updateCommitMessage', () => { + it('updates store with new commit message', done => { + store + .dispatch('commit/updateCommitMessage', 'testing') + .then(() => { + expect(store.state.commit.commitMessage).toBe('testing'); + }) + .then(done) + .catch(done.fail); + }); + }); + + describe('discardDraft', () => { + it('resets commit message to blank', done => { + store.state.commit.commitMessage = 'testing'; + + store + .dispatch('commit/discardDraft') + .then(() => { + expect(store.state.commit.commitMessage).not.toBe('testing'); + }) + .then(done) + .catch(done.fail); + }); + }); + + describe('updateCommitAction', () => { + it('updates store with new commit action', done => { + store + .dispatch('commit/updateCommitAction', '1') + .then(() => { + expect(store.state.commit.commitAction).toBe('1'); + }) + .then(done) + .catch(done.fail); + }); + }); + + describe('updateBranchName', () => { + it('updates store with new branch name', done => { + store + .dispatch('commit/updateBranchName', 'branch-name') + .then(() => { + expect(store.state.commit.newBranchName).toBe('branch-name'); + }) + .then(done) + .catch(done.fail); + }); + }); + + describe('setLastCommitMessage', () => { + beforeEach(() => { + Object.assign(store.state, { + currentProjectId: 'abcproject', + projects: { + abcproject: { + web_url: 'http://testing', + }, + }, + }); + }); + + it('updates commit message with short_id', done => { + store + .dispatch('commit/setLastCommitMessage', { short_id: '123' }) + .then(() => { + expect(store.state.lastCommitMsg).toContain( + 'Your changes have been committed. Commit <a href="http://testing/commit/123" class="commit-sha">123</a>', + ); + }) + .then(done) + .catch(done.fail); + }); + + it('updates commit message with stats', done => { + store + .dispatch('commit/setLastCommitMessage', { + short_id: '123', + stats: { + additions: '1', + deletions: '2', + }, + }) + .then(() => { + expect(store.state.lastCommitMsg).toBe( + 'Your changes have been committed. Commit <a href="http://testing/commit/123" class="commit-sha">123</a> with 1 additions, 2 deletions.', + ); + }) + .then(done) + .catch(done.fail); + }); + }); + + describe('checkCommitStatus', () => { + beforeEach(() => { + store.state.currentProjectId = 'abcproject'; + store.state.currentBranchId = 'master'; + store.state.projects.abcproject = { + branches: { + master: { + workingReference: '1', + }, + }, + }; + }); + + it('calls service', done => { + spyOn(service, 'getBranchData').and.returnValue( + Promise.resolve({ + data: { + commit: { id: '123' }, + }, + }), + ); + + store + .dispatch('commit/checkCommitStatus') + .then(() => { + expect(service.getBranchData).toHaveBeenCalledWith('abcproject', 'master'); + + done(); + }) + .catch(done.fail); + }); + + it('returns true if current ref does not equal returned ID', done => { + spyOn(service, 'getBranchData').and.returnValue( + Promise.resolve({ + data: { + commit: { id: '123' }, + }, + }), + ); + + store + .dispatch('commit/checkCommitStatus') + .then(val => { + expect(val).toBeTruthy(); + + done(); + }) + .catch(done.fail); + }); + + it('returns false if current ref equals returned ID', done => { + spyOn(service, 'getBranchData').and.returnValue( + Promise.resolve({ + data: { + commit: { id: '1' }, + }, + }), + ); + + store + .dispatch('commit/checkCommitStatus') + .then(val => { + expect(val).toBeFalsy(); + + done(); + }) + .catch(done.fail); + }); + }); + + describe('updateFilesAfterCommit', () => { + const data = { + id: '123', + message: 'testing commit message', + committed_date: '123', + committer_name: 'root', + }; + const branch = 'master'; + let f; + + beforeEach(() => { + spyOn(eventHub, '$emit'); + + f = file('changedFile'); + Object.assign(f, { + active: true, + changed: true, + content: 'file content', + }); + + store.state.currentProjectId = 'abcproject'; + store.state.currentBranchId = 'master'; + store.state.projects.abcproject = { + web_url: 'web_url', + branches: { + master: { + workingReference: '', + }, + }, + }; + store.state.stagedFiles.push(f, { + ...file('changedFile2'), + changed: true, + }); + store.state.openFiles = store.state.stagedFiles; + + store.state.stagedFiles.forEach(stagedFile => { + store.state.entries[stagedFile.path] = stagedFile; + }); + }); + + it('updates stores working reference', done => { + store + .dispatch('commit/updateFilesAfterCommit', { + data, + branch, + }) + .then(() => { + expect(store.state.projects.abcproject.branches.master.workingReference).toBe(data.id); + }) + .then(done) + .catch(done.fail); + }); + + it('resets all files changed status', done => { + store + .dispatch('commit/updateFilesAfterCommit', { + data, + branch, + }) + .then(() => { + store.state.openFiles.forEach(entry => { + expect(entry.changed).toBeFalsy(); + }); + }) + .then(done) + .catch(done.fail); + }); + + it('sets files commit data', done => { + store + .dispatch('commit/updateFilesAfterCommit', { + data, + branch, + }) + .then(() => { + expect(f.lastCommit.message).toBe(data.message); + }) + .then(done) + .catch(done.fail); + }); + + it('updates raw content for changed file', done => { + store + .dispatch('commit/updateFilesAfterCommit', { + data, + branch, + }) + .then(() => { + expect(f.raw).toBe(f.content); + }) + .then(done) + .catch(done.fail); + }); + + it('emits changed event for file', done => { + store + .dispatch('commit/updateFilesAfterCommit', { + data, + branch, + }) + .then(() => { + expect(eventHub.$emit).toHaveBeenCalledWith(`editor.update.model.content.${f.key}`, { + content: f.content, + changed: false, + }); + }) + .then(done) + .catch(done.fail); + }); + + it('pushes route to new branch if commitAction is new branch', done => { + store.state.commit.commitAction = consts.COMMIT_TO_NEW_BRANCH; + + store + .dispatch('commit/updateFilesAfterCommit', { + data, + branch, + }) + .then(() => { + expect(router.push).toHaveBeenCalledWith(`/project/abcproject/blob/master/${f.path}`); + }) + .then(done) + .catch(done.fail); + }); + }); + + describe('commitChanges', () => { + beforeEach(() => { + spyOn(urlUtils, 'visitUrl'); + + document.body.innerHTML += '<div class="flash-container"></div>'; + + store.state.currentProjectId = 'abcproject'; + store.state.currentBranchId = 'master'; + store.state.projects.abcproject = { + web_url: 'webUrl', + branches: { + master: { + workingReference: '1', + }, + }, + }; + + const f = { + ...file('changed'), + type: 'blob', + active: true, + }; + store.state.stagedFiles.push(f); + store.state.changedFiles = [ + { + ...f, + }, + ]; + store.state.openFiles = store.state.changedFiles; + + store.state.openFiles.forEach(localF => { + store.state.entries[localF.path] = localF; + }); + + store.state.commit.commitAction = '2'; + store.state.commit.commitMessage = 'testing 123'; + }); + + afterEach(() => { + document.querySelector('.flash-container').remove(); + }); + + describe('success', () => { + beforeEach(() => { + spyOn(service, 'commit').and.returnValue( + Promise.resolve({ + data: { + id: '123456', + short_id: '123', + message: 'test message', + committed_date: 'date', + stats: { + additions: '1', + deletions: '2', + }, + }, + }), + ); + }); + + it('calls service', done => { + store + .dispatch('commit/commitChanges') + .then(() => { + expect(service.commit).toHaveBeenCalledWith('abcproject', { + branch: jasmine.anything(), + commit_message: 'testing 123', + actions: [ + { + action: 'update', + file_path: jasmine.anything(), + content: jasmine.anything(), + encoding: jasmine.anything(), + }, + ], + start_branch: 'master', + }); + + done(); + }) + .catch(done.fail); + }); + + it('pushes router to new route', done => { + store + .dispatch('commit/commitChanges') + .then(() => { + expect(router.push).toHaveBeenCalledWith( + `/project/${store.state.currentProjectId}/blob/${ + store.getters['commit/newBranchName'] + }/changed`, + ); + + done(); + }) + .catch(done.fail); + }); + + it('sets last Commit Msg', done => { + store + .dispatch('commit/commitChanges') + .then(() => { + expect(store.state.lastCommitMsg).toBe( + 'Your changes have been committed. Commit <a href="webUrl/commit/123" class="commit-sha">123</a> with 1 additions, 2 deletions.', + ); + + done(); + }) + .catch(done.fail); + }); + + it('adds commit data to files', done => { + store + .dispatch('commit/commitChanges') + .then(() => { + expect(store.state.entries[store.state.openFiles[0].path].lastCommit.message).toBe( + 'test message', + ); + + done(); + }) + .catch(done.fail); + }); + + it('resets stores commit actions', done => { + store.state.commit.commitAction = consts.COMMIT_TO_NEW_BRANCH; + + store + .dispatch('commit/commitChanges') + .then(() => { + expect(store.state.commit.commitAction).not.toBe(consts.COMMIT_TO_NEW_BRANCH); + }) + .then(done) + .catch(done.fail); + }); + + it('removes all staged files', done => { + store + .dispatch('commit/commitChanges') + .then(() => { + expect(store.state.stagedFiles.length).toBe(0); + }) + .then(done) + .catch(done.fail); + }); + + describe('merge request', () => { + it('redirects to new merge request page', done => { + spyOn(eventHub, '$on'); + + store.state.commit.commitAction = '3'; + + store + .dispatch('commit/commitChanges') + .then(() => { + expect(urlUtils.visitUrl).toHaveBeenCalledWith( + `webUrl/merge_requests/new?merge_request[source_branch]=${ + store.getters['commit/newBranchName'] + }&merge_request[target_branch]=master`, + ); + + done(); + }) + .catch(done.fail); + }); + + it('resets changed files before redirecting', done => { + spyOn(eventHub, '$on'); + + store.state.commit.commitAction = '3'; + + store + .dispatch('commit/commitChanges') + .then(() => { + expect(store.state.stagedFiles.length).toBe(0); + + done(); + }) + .catch(done.fail); + }); + }); + }); + + describe('failed', () => { + beforeEach(() => { + spyOn(service, 'commit').and.returnValue( + Promise.resolve({ + data: { + message: 'failed message', + }, + }), + ); + }); + + it('shows failed message', done => { + store + .dispatch('commit/commitChanges') + .then(() => { + const alert = document.querySelector('.flash-container'); + + expect(alert.textContent.trim()).toBe('failed message'); + + done(); + }) + .catch(done.fail); + }); + }); + }); +}); diff --git a/spec/javascripts/ide/stores/modules/commit/getters_spec.js b/spec/javascripts/ide/stores/modules/commit/getters_spec.js new file mode 100644 index 00000000000..55580f046ad --- /dev/null +++ b/spec/javascripts/ide/stores/modules/commit/getters_spec.js @@ -0,0 +1,128 @@ +import commitState from '~/ide/stores/modules/commit/state'; +import * as consts from '~/ide/stores/modules/commit/constants'; +import * as getters from '~/ide/stores/modules/commit/getters'; + +describe('IDE commit module getters', () => { + let state; + + beforeEach(() => { + state = commitState(); + }); + + describe('discardDraftButtonDisabled', () => { + it('returns true when commitMessage is empty', () => { + expect(getters.discardDraftButtonDisabled(state)).toBeTruthy(); + }); + + it('returns false when commitMessage is not empty & loading is false', () => { + state.commitMessage = 'test'; + state.submitCommitLoading = false; + + expect(getters.discardDraftButtonDisabled(state)).toBeFalsy(); + }); + + it('returns true when commitMessage is not empty & loading is true', () => { + state.commitMessage = 'test'; + state.submitCommitLoading = true; + + expect(getters.discardDraftButtonDisabled(state)).toBeTruthy(); + }); + }); + + describe('commitButtonDisabled', () => { + const localGetters = { + discardDraftButtonDisabled: false, + }; + const rootState = { + stagedFiles: ['a'], + }; + + it('returns false when discardDraftButtonDisabled is false & stagedFiles is not empty', () => { + expect( + getters.commitButtonDisabled(state, localGetters, rootState), + ).toBeFalsy(); + }); + + it('returns true when discardDraftButtonDisabled is false & stagedFiles is empty', () => { + rootState.stagedFiles.length = 0; + + expect( + getters.commitButtonDisabled(state, localGetters, rootState), + ).toBeTruthy(); + }); + + it('returns true when discardDraftButtonDisabled is true', () => { + localGetters.discardDraftButtonDisabled = true; + + expect( + getters.commitButtonDisabled(state, localGetters, rootState), + ).toBeTruthy(); + }); + + it('returns true when discardDraftButtonDisabled is false & changedFiles is not empty', () => { + localGetters.discardDraftButtonDisabled = false; + rootState.stagedFiles.length = 0; + + expect( + getters.commitButtonDisabled(state, localGetters, rootState), + ).toBeTruthy(); + }); + }); + + describe('newBranchName', () => { + it('includes username, currentBranchId, patch & random number', () => { + gon.current_username = 'username'; + + const branch = getters.newBranchName(state, null, { + currentBranchId: 'testing', + }); + + expect(branch).toMatch(/username-testing-patch-\d{5}$/); + }); + }); + + describe('branchName', () => { + const rootState = { + currentBranchId: 'master', + }; + const localGetters = { + newBranchName: 'newBranchName', + }; + + beforeEach(() => { + Object.assign(state, { + newBranchName: 'state-newBranchName', + }); + }); + + it('defualts to currentBranchId', () => { + expect(getters.branchName(state, null, rootState)).toBe('master'); + }); + + ['COMMIT_TO_NEW_BRANCH', 'COMMIT_TO_NEW_BRANCH_MR'].forEach(type => { + describe(type, () => { + beforeEach(() => { + Object.assign(state, { + commitAction: consts[type], + }); + }); + + it('uses newBranchName when not empty', () => { + expect(getters.branchName(state, localGetters, rootState)).toBe( + 'state-newBranchName', + ); + }); + + it('uses getters newBranchName when state newBranchName is empty', () => { + Object.assign(state, { + newBranchName: '', + }); + + expect(getters.branchName(state, localGetters, rootState)).toBe( + 'newBranchName', + ); + }); + }); + }); + }); +}); diff --git a/spec/javascripts/ide/stores/modules/commit/mutations_spec.js b/spec/javascripts/ide/stores/modules/commit/mutations_spec.js new file mode 100644 index 00000000000..5de7a281d34 --- /dev/null +++ b/spec/javascripts/ide/stores/modules/commit/mutations_spec.js @@ -0,0 +1,42 @@ +import commitState from '~/ide/stores/modules/commit/state'; +import mutations from '~/ide/stores/modules/commit/mutations'; + +describe('IDE commit module mutations', () => { + let state; + + beforeEach(() => { + state = commitState(); + }); + + describe('UPDATE_COMMIT_MESSAGE', () => { + it('updates commitMessage', () => { + mutations.UPDATE_COMMIT_MESSAGE(state, 'testing'); + + expect(state.commitMessage).toBe('testing'); + }); + }); + + describe('UPDATE_COMMIT_ACTION', () => { + it('updates commitAction', () => { + mutations.UPDATE_COMMIT_ACTION(state, 'testing'); + + expect(state.commitAction).toBe('testing'); + }); + }); + + describe('UPDATE_NEW_BRANCH_NAME', () => { + it('updates newBranchName', () => { + mutations.UPDATE_NEW_BRANCH_NAME(state, 'testing'); + + expect(state.newBranchName).toBe('testing'); + }); + }); + + describe('UPDATE_LOADING', () => { + it('updates submitCommitLoading', () => { + mutations.UPDATE_LOADING(state, true); + + expect(state.submitCommitLoading).toBeTruthy(); + }); + }); +}); diff --git a/spec/javascripts/ide/stores/mutations/branch_spec.js b/spec/javascripts/ide/stores/mutations/branch_spec.js new file mode 100644 index 00000000000..a7167537ef2 --- /dev/null +++ b/spec/javascripts/ide/stores/mutations/branch_spec.js @@ -0,0 +1,18 @@ +import mutations from '~/ide/stores/mutations/branch'; +import state from '~/ide/stores/state'; + +describe('Multi-file store branch mutations', () => { + let localState; + + beforeEach(() => { + localState = state(); + }); + + describe('SET_CURRENT_BRANCH', () => { + it('sets currentBranch', () => { + mutations.SET_CURRENT_BRANCH(localState, 'master'); + + expect(localState.currentBranchId).toBe('master'); + }); + }); +}); diff --git a/spec/javascripts/ide/stores/mutations/file_spec.js b/spec/javascripts/ide/stores/mutations/file_spec.js new file mode 100644 index 00000000000..6fba934810d --- /dev/null +++ b/spec/javascripts/ide/stores/mutations/file_spec.js @@ -0,0 +1,318 @@ +import mutations from '~/ide/stores/mutations/file'; +import state from '~/ide/stores/state'; +import { file } from '../../helpers'; + +describe('IDE store file mutations', () => { + let localState; + let localFile; + + beforeEach(() => { + localState = state(); + localFile = { + ...file(), + type: 'blob', + }; + + localState.entries[localFile.path] = localFile; + }); + + describe('SET_FILE_ACTIVE', () => { + it('sets the file active', () => { + mutations.SET_FILE_ACTIVE(localState, { + path: localFile.path, + active: true, + }); + + expect(localFile.active).toBeTruthy(); + }); + + it('sets pending tab as not active', () => { + localState.openFiles.push({ + ...localFile, + pending: true, + active: true, + }); + + mutations.SET_FILE_ACTIVE(localState, { + path: localFile.path, + active: true, + }); + + expect(localState.openFiles[0].active).toBe(false); + }); + }); + + describe('TOGGLE_FILE_OPEN', () => { + beforeEach(() => { + mutations.TOGGLE_FILE_OPEN(localState, localFile.path); + }); + + it('adds into opened files', () => { + expect(localFile.opened).toBeTruthy(); + expect(localState.openFiles.length).toBe(1); + }); + + it('removes from opened files', () => { + mutations.TOGGLE_FILE_OPEN(localState, localFile.path); + + expect(localFile.opened).toBeFalsy(); + expect(localState.openFiles.length).toBe(0); + }); + }); + + describe('SET_FILE_DATA', () => { + it('sets extra file data', () => { + mutations.SET_FILE_DATA(localState, { + data: { + blame_path: 'blame', + commits_path: 'commits', + permalink: 'permalink', + raw_path: 'raw', + binary: true, + render_error: 'render_error', + }, + file: localFile, + }); + + expect(localFile.blamePath).toBe('blame'); + expect(localFile.commitsPath).toBe('commits'); + expect(localFile.permalink).toBe('permalink'); + expect(localFile.rawPath).toBe('raw'); + expect(localFile.binary).toBeTruthy(); + expect(localFile.renderError).toBe('render_error'); + expect(localFile.raw).toBeNull(); + expect(localFile.baseRaw).toBeNull(); + }); + }); + + describe('SET_FILE_RAW_DATA', () => { + it('sets raw data', () => { + mutations.SET_FILE_RAW_DATA(localState, { + file: localFile, + raw: 'testing', + }); + + expect(localFile.raw).toBe('testing'); + }); + }); + + describe('SET_FILE_BASE_RAW_DATA', () => { + it('sets raw data from base branch', () => { + mutations.SET_FILE_BASE_RAW_DATA(localState, { + file: localFile, + baseRaw: 'testing', + }); + + expect(localFile.baseRaw).toBe('testing'); + }); + }); + + describe('UPDATE_FILE_CONTENT', () => { + beforeEach(() => { + localFile.raw = 'test'; + }); + + it('sets content', () => { + mutations.UPDATE_FILE_CONTENT(localState, { + path: localFile.path, + content: 'test', + }); + + expect(localFile.content).toBe('test'); + }); + + it('sets changed if content does not match raw', () => { + mutations.UPDATE_FILE_CONTENT(localState, { + path: localFile.path, + content: 'testing', + }); + + expect(localFile.content).toBe('testing'); + expect(localFile.changed).toBeTruthy(); + }); + + it('sets changed if file is a temp file', () => { + localFile.tempFile = true; + + mutations.UPDATE_FILE_CONTENT(localState, { + path: localFile.path, + content: '', + }); + + expect(localFile.changed).toBeTruthy(); + }); + }); + + describe('SET_FILE_MERGE_REQUEST_CHANGE', () => { + it('sets file mr change', () => { + mutations.SET_FILE_MERGE_REQUEST_CHANGE(localState, { + file: localFile, + mrChange: { diff: 'ABC' }, + }); + + expect(localFile.mrChange.diff).toBe('ABC'); + }); + }); + + describe('DISCARD_FILE_CHANGES', () => { + beforeEach(() => { + localFile.content = 'test'; + localFile.changed = true; + }); + + it('resets content and changed', () => { + mutations.DISCARD_FILE_CHANGES(localState, localFile.path); + + expect(localFile.content).toBe(''); + expect(localFile.changed).toBeFalsy(); + }); + }); + + describe('ADD_FILE_TO_CHANGED', () => { + it('adds file into changed files array', () => { + mutations.ADD_FILE_TO_CHANGED(localState, localFile.path); + + expect(localState.changedFiles.length).toBe(1); + }); + }); + + describe('REMOVE_FILE_FROM_CHANGED', () => { + it('removes files from changed files array', () => { + localState.changedFiles.push(localFile); + + mutations.REMOVE_FILE_FROM_CHANGED(localState, localFile.path); + + expect(localState.changedFiles.length).toBe(0); + }); + }); + + describe('STAGE_CHANGE', () => { + it('adds file into stagedFiles array', () => { + mutations.STAGE_CHANGE(localState, localFile.path); + + expect(localState.stagedFiles.length).toBe(1); + expect(localState.stagedFiles[0]).toEqual(localFile); + }); + + it('updates stagedFile if it is already staged', () => { + mutations.STAGE_CHANGE(localState, localFile.path); + + localFile.raw = 'testing 123'; + + mutations.STAGE_CHANGE(localState, localFile.path); + + expect(localState.stagedFiles.length).toBe(1); + expect(localState.stagedFiles[0].raw).toEqual('testing 123'); + }); + }); + + describe('UNSTAGE_CHANGE', () => { + let f; + + beforeEach(() => { + f = { + ...file(), + type: 'blob', + staged: true, + }; + + localState.stagedFiles.push(f); + localState.changedFiles.push(f); + localState.entries[f.path] = f; + }); + + it('removes from stagedFiles array', () => { + mutations.UNSTAGE_CHANGE(localState, f.path); + + expect(localState.stagedFiles.length).toBe(0); + expect(localState.changedFiles.length).toBe(1); + }); + }); + + describe('TOGGLE_FILE_CHANGED', () => { + it('updates file changed status', () => { + mutations.TOGGLE_FILE_CHANGED(localState, { + file: localFile, + changed: true, + }); + + expect(localFile.changed).toBeTruthy(); + }); + }); + + describe('SET_FILE_VIEWMODE', () => { + it('updates file view mode', () => { + mutations.SET_FILE_VIEWMODE(localState, { + file: localFile, + viewMode: 'preview', + }); + + expect(localFile.viewMode).toBe('preview'); + }); + }); + + describe('ADD_PENDING_TAB', () => { + beforeEach(() => { + const f = { + ...file('openFile'), + path: 'openFile', + active: true, + opened: true, + }; + + localState.entries[f.path] = f; + localState.openFiles.push(f); + }); + + it('adds file into openFiles as pending', () => { + mutations.ADD_PENDING_TAB(localState, { file: localFile }); + + expect(localState.openFiles.length).toBe(2); + expect(localState.openFiles[1].pending).toBe(true); + expect(localState.openFiles[1].key).toBe(`pending-${localFile.key}`); + }); + + it('updates open file to pending', () => { + mutations.ADD_PENDING_TAB(localState, { file: localState.openFiles[0] }); + + expect(localState.openFiles.length).toBe(1); + }); + + it('updates pending open file to active', () => { + localState.openFiles.push({ + ...localFile, + pending: true, + }); + + mutations.ADD_PENDING_TAB(localState, { file: localFile }); + + expect(localState.openFiles[1].pending).toBe(true); + expect(localState.openFiles[1].active).toBe(true); + }); + + it('sets all openFiles to not active', () => { + mutations.ADD_PENDING_TAB(localState, { file: localFile }); + + expect(localState.openFiles.length).toBe(2); + + localState.openFiles.forEach(f => { + if (f.pending) { + expect(f.active).toBe(true); + } else { + expect(f.active).toBe(false); + } + }); + }); + }); + + describe('REMOVE_PENDING_TAB', () => { + it('removes pending tab from openFiles', () => { + localFile.key = 'testing'; + localState.openFiles.push(localFile); + + mutations.REMOVE_PENDING_TAB(localState, localFile); + + expect(localState.openFiles.length).toBe(0); + }); + }); +}); diff --git a/spec/javascripts/ide/stores/mutations/merge_request_spec.js b/spec/javascripts/ide/stores/mutations/merge_request_spec.js new file mode 100644 index 00000000000..f724bf464f5 --- /dev/null +++ b/spec/javascripts/ide/stores/mutations/merge_request_spec.js @@ -0,0 +1,65 @@ +import mutations from '~/ide/stores/mutations/merge_request'; +import state from '~/ide/stores/state'; + +describe('IDE store merge request mutations', () => { + let localState; + + beforeEach(() => { + localState = state(); + localState.projects = { abcproject: { mergeRequests: {} } }; + + mutations.SET_MERGE_REQUEST(localState, { + projectPath: 'abcproject', + mergeRequestId: 1, + mergeRequest: { + title: 'mr', + }, + }); + }); + + describe('SET_CURRENT_MERGE_REQUEST', () => { + it('sets current merge request', () => { + mutations.SET_CURRENT_MERGE_REQUEST(localState, 2); + + expect(localState.currentMergeRequestId).toBe(2); + }); + }); + + describe('SET_MERGE_REQUEST', () => { + it('setsmerge request data', () => { + const newMr = localState.projects.abcproject.mergeRequests[1]; + + expect(newMr.title).toBe('mr'); + expect(newMr.active).toBeTruthy(); + }); + }); + + describe('SET_MERGE_REQUEST_CHANGES', () => { + it('sets merge request changes', () => { + mutations.SET_MERGE_REQUEST_CHANGES(localState, { + projectPath: 'abcproject', + mergeRequestId: 1, + changes: { + diff: 'abc', + }, + }); + + const newMr = localState.projects.abcproject.mergeRequests[1]; + expect(newMr.changes.diff).toBe('abc'); + }); + }); + + describe('SET_MERGE_REQUEST_VERSIONS', () => { + it('sets merge request versions', () => { + mutations.SET_MERGE_REQUEST_VERSIONS(localState, { + projectPath: 'abcproject', + mergeRequestId: 1, + versions: [{ id: 123 }], + }); + + const newMr = localState.projects.abcproject.mergeRequests[1]; + expect(newMr.versions.length).toBe(1); + expect(newMr.versions[0].id).toBe(123); + }); + }); +}); diff --git a/spec/javascripts/ide/stores/mutations/tree_spec.js b/spec/javascripts/ide/stores/mutations/tree_spec.js new file mode 100644 index 00000000000..67e9f7509da --- /dev/null +++ b/spec/javascripts/ide/stores/mutations/tree_spec.js @@ -0,0 +1,79 @@ +import mutations from '~/ide/stores/mutations/tree'; +import state from '~/ide/stores/state'; +import { file } from '../../helpers'; + +describe('Multi-file store tree mutations', () => { + let localState; + let localTree; + + beforeEach(() => { + localState = state(); + localTree = file(); + + localState.entries[localTree.path] = localTree; + }); + + describe('TOGGLE_TREE_OPEN', () => { + it('toggles tree open', () => { + mutations.TOGGLE_TREE_OPEN(localState, localTree.path); + + expect(localTree.opened).toBeTruthy(); + + mutations.TOGGLE_TREE_OPEN(localState, localTree.path); + + expect(localTree.opened).toBeFalsy(); + }); + }); + + describe('SET_DIRECTORY_DATA', () => { + const data = [ + { + name: 'tree', + }, + { + name: 'submodule', + }, + { + name: 'blob', + }, + ]; + + it('adds directory data', () => { + localState.trees['project/master'] = { + tree: [], + }; + + mutations.SET_DIRECTORY_DATA(localState, { + data, + treePath: 'project/master', + }); + + const tree = localState.trees['project/master']; + + expect(tree.tree.length).toBe(3); + expect(tree.tree[0].name).toBe('tree'); + expect(tree.tree[1].name).toBe('submodule'); + expect(tree.tree[2].name).toBe('blob'); + }); + + it('keeps loading state', () => { + mutations.CREATE_TREE(localState, { treePath: 'project/master' }); + mutations.SET_DIRECTORY_DATA(localState, { + data, + treePath: 'project/master', + }); + + expect(localState.trees['project/master'].loading).toBe(true); + }); + }); + + describe('REMOVE_ALL_CHANGES_FILES', () => { + it('removes all files from changedFiles state', () => { + localState.changedFiles.push(file('REMOVE_ALL_CHANGES_FILES')); + + mutations.REMOVE_ALL_CHANGES_FILES(localState); + + expect(localState.changedFiles.length).toBe(0); + }); + }); +}); diff --git a/spec/javascripts/ide/stores/mutations_spec.js b/spec/javascripts/ide/stores/mutations_spec.js new file mode 100644 index 00000000000..26e7ed4535e --- /dev/null +++ b/spec/javascripts/ide/stores/mutations_spec.js @@ -0,0 +1,89 @@ +import mutations from '~/ide/stores/mutations'; +import state from '~/ide/stores/state'; +import { file } from '../helpers'; + +describe('Multi-file store mutations', () => { + let localState; + let entry; + + beforeEach(() => { + localState = state(); + entry = file(); + + localState.entries[entry.path] = entry; + }); + + describe('SET_INITIAL_DATA', () => { + it('sets all initial data', () => { + mutations.SET_INITIAL_DATA(localState, { + test: 'test', + }); + + expect(localState.test).toBe('test'); + }); + }); + + describe('TOGGLE_LOADING', () => { + it('toggles loading of entry', () => { + mutations.TOGGLE_LOADING(localState, { entry }); + + expect(entry.loading).toBeTruthy(); + + mutations.TOGGLE_LOADING(localState, { entry }); + + expect(entry.loading).toBeFalsy(); + }); + + it('toggles loading of entry and sets specific value', () => { + mutations.TOGGLE_LOADING(localState, { entry }); + + expect(entry.loading).toBeTruthy(); + + mutations.TOGGLE_LOADING(localState, { entry, forceValue: true }); + + expect(entry.loading).toBeTruthy(); + }); + }); + + describe('SET_LEFT_PANEL_COLLAPSED', () => { + it('sets left panel collapsed', () => { + mutations.SET_LEFT_PANEL_COLLAPSED(localState, true); + + expect(localState.leftPanelCollapsed).toBeTruthy(); + + mutations.SET_LEFT_PANEL_COLLAPSED(localState, false); + + expect(localState.leftPanelCollapsed).toBeFalsy(); + }); + }); + + describe('SET_RIGHT_PANEL_COLLAPSED', () => { + it('sets right panel collapsed', () => { + mutations.SET_RIGHT_PANEL_COLLAPSED(localState, true); + + expect(localState.rightPanelCollapsed).toBeTruthy(); + + mutations.SET_RIGHT_PANEL_COLLAPSED(localState, false); + + expect(localState.rightPanelCollapsed).toBeFalsy(); + }); + }); + + describe('CLEAR_STAGED_CHANGES', () => { + it('clears stagedFiles array', () => { + localState.stagedFiles.push('a'); + + mutations.CLEAR_STAGED_CHANGES(localState); + + expect(localState.stagedFiles.length).toBe(0); + }); + }); + + describe('UPDATE_VIEWER', () => { + it('sets viewer state', () => { + mutations.UPDATE_VIEWER(localState, 'diff'); + + expect(localState.viewer).toBe('diff'); + }); + }); +}); diff --git a/spec/javascripts/ide/stores/utils_spec.js b/spec/javascripts/ide/stores/utils_spec.js new file mode 100644 index 00000000000..f38ac6dd82f --- /dev/null +++ b/spec/javascripts/ide/stores/utils_spec.js @@ -0,0 +1,66 @@ +import * as utils from '~/ide/stores/utils'; + +describe('Multi-file store utils', () => { + describe('setPageTitle', () => { + it('sets the document page title', () => { + utils.setPageTitle('test'); + + expect(document.title).toBe('test'); + }); + }); + + describe('findIndexOfFile', () => { + let localState; + + beforeEach(() => { + localState = [ + { + path: '1', + }, + { + path: '2', + }, + ]; + }); + + it('finds in the index of an entry by path', () => { + const index = utils.findIndexOfFile(localState, { + path: '2', + }); + + expect(index).toBe(1); + }); + }); + + describe('findEntry', () => { + let localState; + + beforeEach(() => { + localState = { + tree: [ + { + type: 'tree', + name: 'test', + }, + { + type: 'blob', + name: 'file', + }, + ], + }; + }); + + it('returns an entry found by name', () => { + const foundEntry = utils.findEntry(localState.tree, 'tree', 'test'); + + expect(foundEntry.type).toBe('tree'); + expect(foundEntry.name).toBe('test'); + }); + + it('returns undefined when no entry found', () => { + const foundEntry = utils.findEntry(localState.tree, 'blob', 'test'); + + expect(foundEntry).toBeUndefined(); + }); + }); +}); diff --git a/spec/javascripts/issue_show/components/app_spec.js b/spec/javascripts/issue_show/components/app_spec.js index 584db6c6632..d5a87b5ce20 100644 --- a/spec/javascripts/issue_show/components/app_spec.js +++ b/spec/javascripts/issue_show/components/app_spec.js @@ -1,8 +1,7 @@ import Vue from 'vue'; import MockAdapter from 'axios-mock-adapter'; import axios from '~/lib/utils/axios_utils'; -import '~/render_math'; -import '~/render_gfm'; +import '~/behaviors/markdown/render_gfm'; import * as urlUtils from '~/lib/utils/url_utility'; import issuableApp from '~/issue_show/components/app.vue'; import eventHub from '~/issue_show/event_hub'; diff --git a/spec/javascripts/issue_spec.js b/spec/javascripts/issue_spec.js index f37426a72d4..047ecab27db 100644 --- a/spec/javascripts/issue_spec.js +++ b/spec/javascripts/issue_spec.js @@ -92,6 +92,7 @@ describe('Issue', function() { function mockCanCreateBranch(canCreateBranch) { mock.onGet(/(.*)\/can_create_branch$/).reply(200, { can_create_branch: canCreateBranch, + suggested_branch_name: 'foo-99', }); } diff --git a/spec/javascripts/jobs/header_spec.js b/spec/javascripts/jobs/header_spec.js index 0961605ce5c..4f861c39d3f 100644 --- a/spec/javascripts/jobs/header_spec.js +++ b/spec/javascripts/jobs/header_spec.js @@ -36,14 +36,28 @@ describe('Job details header', () => { }, isLoading: false, }; - - vm = mountComponent(HeaderComponent, props); }); afterEach(() => { vm.$destroy(); }); + describe('job reason', () => { + it('should not render the reason when reason is absent', () => { + vm = mountComponent(HeaderComponent, props); + + expect(vm.shouldRenderReason).toBe(false); + }); + + it('should render the reason when reason is present', () => { + props.job.callout_message = 'There is an unknown failure, please try again'; + + vm = mountComponent(HeaderComponent, props); + + expect(vm.shouldRenderReason).toBe(true); + }); + }); + describe('triggered job', () => { beforeEach(() => { vm = mountComponent(HeaderComponent, props); @@ -51,14 +65,17 @@ describe('Job details header', () => { it('should render provided job information', () => { expect( - vm.$el.querySelector('.header-main-content').textContent.replace(/\s+/g, ' ').trim(), + vm.$el + .querySelector('.header-main-content') + .textContent.replace(/\s+/g, ' ') + .trim(), ).toEqual('failed Job #123 triggered 3 weeks ago by Foo'); }); it('should render new issue link', () => { - expect( - vm.$el.querySelector('.js-new-issue').getAttribute('href'), - ).toEqual(props.job.new_issue_path); + expect(vm.$el.querySelector('.js-new-issue').getAttribute('href')).toEqual( + props.job.new_issue_path, + ); }); }); @@ -68,7 +85,10 @@ describe('Job details header', () => { vm = mountComponent(HeaderComponent, props); expect( - vm.$el.querySelector('.header-main-content').textContent.replace(/\s+/g, ' ').trim(), + vm.$el + .querySelector('.header-main-content') + .textContent.replace(/\s+/g, ' ') + .trim(), ).toEqual('failed Job #123 created 3 weeks ago by Foo'); }); }); diff --git a/spec/javascripts/jobs/mock_data.js b/spec/javascripts/jobs/mock_data.js index 43589d54be4..25ca8eb6c0b 100644 --- a/spec/javascripts/jobs/mock_data.js +++ b/spec/javascripts/jobs/mock_data.js @@ -115,6 +115,10 @@ export default { commit_path: '/root/ci-mock/commit/c58647773a6b5faf066d4ad6ff2c9fbba5f180f6', }, }, + metadata: { + timeout_human_readable: '1m 40s', + timeout_source: 'runner', + }, merge_request: { iid: 2, path: '/root/ci-mock/merge_requests/2', diff --git a/spec/javascripts/jobs/sidebar_detail_row_spec.js b/spec/javascripts/jobs/sidebar_detail_row_spec.js index 3ac65709c4a..e6bfb0c4adc 100644 --- a/spec/javascripts/jobs/sidebar_detail_row_spec.js +++ b/spec/javascripts/jobs/sidebar_detail_row_spec.js @@ -37,4 +37,25 @@ describe('Sidebar detail row', () => { vm.$el.textContent.replace(/\s+/g, ' ').trim(), ).toEqual('this is the title: this is the value'); }); + + describe('when helpUrl not provided', () => { + it('should not render help', () => { + expect(vm.$el.querySelector('.help-button')).toBeNull(); + }); + }); + + describe('when helpUrl provided', () => { + beforeEach(() => { + vm = new SidebarDetailRow({ + propsData: { + helpUrl: 'help url', + value: 'foo', + }, + }).$mount(); + }); + + it('should render help', () => { + expect(vm.$el.querySelector('.help-button a').getAttribute('href')).toEqual('help url'); + }); + }); }); diff --git a/spec/javascripts/jobs/sidebar_details_block_spec.js b/spec/javascripts/jobs/sidebar_details_block_spec.js index 95532ef5382..6b397c22fb9 100644 --- a/spec/javascripts/jobs/sidebar_details_block_spec.js +++ b/spec/javascripts/jobs/sidebar_details_block_spec.js @@ -31,10 +31,25 @@ describe('Sidebar details block', () => { }); }); + describe("when user can't retry", () => { + it('should not render a retry button', () => { + vm = new SidebarComponent({ + propsData: { + job: {}, + canUserRetry: false, + isLoading: true, + }, + }).$mount(); + + expect(vm.$el.querySelector('.js-retry-job')).toBeNull(); + }); + }); + beforeEach(() => { vm = new SidebarComponent({ propsData: { job, + canUserRetry: true, isLoading: false, }, }).$mount(); @@ -42,7 +57,9 @@ describe('Sidebar details block', () => { describe('actions', () => { it('should render link to new issue', () => { - expect(vm.$el.querySelector('.js-new-issue').getAttribute('href')).toEqual(job.new_issue_path); + expect(vm.$el.querySelector('.js-new-issue').getAttribute('href')).toEqual( + job.new_issue_path, + ); expect(vm.$el.querySelector('.js-new-issue').textContent.trim()).toEqual('New issue'); }); @@ -57,55 +74,49 @@ describe('Sidebar details block', () => { describe('information', () => { it('should render merge request link', () => { - expect( - trimWhitespace(vm.$el.querySelector('.js-job-mr')), - ).toEqual('Merge Request: !2'); + expect(trimWhitespace(vm.$el.querySelector('.js-job-mr'))).toEqual('Merge Request: !2'); - expect( - vm.$el.querySelector('.js-job-mr a').getAttribute('href'), - ).toEqual(job.merge_request.path); + expect(vm.$el.querySelector('.js-job-mr a').getAttribute('href')).toEqual( + job.merge_request.path, + ); }); it('should render job duration', () => { - expect( - trimWhitespace(vm.$el.querySelector('.js-job-duration')), - ).toEqual('Duration: 6 seconds'); + expect(trimWhitespace(vm.$el.querySelector('.js-job-duration'))).toEqual( + 'Duration: 6 seconds', + ); }); it('should render erased date', () => { - expect( - trimWhitespace(vm.$el.querySelector('.js-job-erased')), - ).toEqual('Erased: 3 weeks ago'); + expect(trimWhitespace(vm.$el.querySelector('.js-job-erased'))).toEqual('Erased: 3 weeks ago'); }); it('should render finished date', () => { - expect( - trimWhitespace(vm.$el.querySelector('.js-job-finished')), - ).toEqual('Finished: 3 weeks ago'); + expect(trimWhitespace(vm.$el.querySelector('.js-job-finished'))).toEqual( + 'Finished: 3 weeks ago', + ); }); it('should render queued date', () => { - expect( - trimWhitespace(vm.$el.querySelector('.js-job-queued')), - ).toEqual('Queued: 9 seconds'); + expect(trimWhitespace(vm.$el.querySelector('.js-job-queued'))).toEqual('Queued: 9 seconds'); }); it('should render runner ID', () => { + expect(trimWhitespace(vm.$el.querySelector('.js-job-runner'))).toEqual('Runner: #1'); + }); + + it('should render timeout information', () => { expect( - trimWhitespace(vm.$el.querySelector('.js-job-runner')), - ).toEqual('Runner: #1'); + trimWhitespace(vm.$el.querySelector('.js-job-timeout')), + ).toEqual('Timeout: 1m 40s (from runner)'); }); it('should render coverage', () => { - expect( - trimWhitespace(vm.$el.querySelector('.js-job-coverage')), - ).toEqual('Coverage: 20%'); + expect(trimWhitespace(vm.$el.querySelector('.js-job-coverage'))).toEqual('Coverage: 20%'); }); it('should render tags', () => { - expect( - trimWhitespace(vm.$el.querySelector('.js-job-tags')), - ).toEqual('Tags: tag'); + expect(trimWhitespace(vm.$el.querySelector('.js-job-tags'))).toEqual('Tags: tag'); }); }); }); diff --git a/spec/javascripts/lib/utils/text_markdown_spec.js b/spec/javascripts/lib/utils/text_markdown_spec.js index a95a7e2a5be..ca0e7c395a0 100644 --- a/spec/javascripts/lib/utils/text_markdown_spec.js +++ b/spec/javascripts/lib/utils/text_markdown_spec.js @@ -1,4 +1,4 @@ -import textUtils from '~/lib/utils/text_markdown'; +import { insertMarkdownText } from '~/lib/utils/text_markdown'; describe('init markdown', () => { let textArea; @@ -21,7 +21,7 @@ describe('init markdown', () => { textArea.selectionStart = 0; textArea.selectionEnd = 0; - textUtils.insertText(textArea, textArea.value, '*', null, '', false); + insertMarkdownText(textArea, textArea.value, '*', null, '', false); expect(textArea.value).toEqual(`${initialValue}* `); }); @@ -32,7 +32,7 @@ describe('init markdown', () => { textArea.value = initialValue; textArea.setSelectionRange(initialValue.length, initialValue.length); - textUtils.insertText(textArea, textArea.value, '*', null, '', false); + insertMarkdownText(textArea, textArea.value, '*', null, '', false); expect(textArea.value).toEqual(`${initialValue}\n* `); }); @@ -43,7 +43,7 @@ describe('init markdown', () => { textArea.value = initialValue; textArea.setSelectionRange(initialValue.length, initialValue.length); - textUtils.insertText(textArea, textArea.value, '*', null, '', false); + insertMarkdownText(textArea, textArea.value, '*', null, '', false); expect(textArea.value).toEqual(`${initialValue}* `); }); @@ -54,7 +54,7 @@ describe('init markdown', () => { textArea.value = initialValue; textArea.setSelectionRange(initialValue.length, initialValue.length); - textUtils.insertText(textArea, textArea.value, '*', null, '', false); + insertMarkdownText(textArea, textArea.value, '*', null, '', false); expect(textArea.value).toEqual(`${initialValue}* `); }); diff --git a/spec/javascripts/lib/utils/text_utility_spec.js b/spec/javascripts/lib/utils/text_utility_spec.js index e57a55fa71a..ae00fb76714 100644 --- a/spec/javascripts/lib/utils/text_utility_spec.js +++ b/spec/javascripts/lib/utils/text_utility_spec.js @@ -65,11 +65,15 @@ describe('text_utility', () => { describe('stripHtml', () => { it('replaces html tag with the default replacement', () => { - expect(textUtils.stripHtml('This is a text with <p>html</p>.')).toEqual('This is a text with html.'); + expect(textUtils.stripHtml('This is a text with <p>html</p>.')).toEqual( + 'This is a text with html.', + ); }); it('replaces html tags with the provided replacement', () => { - expect(textUtils.stripHtml('This is a text with <p>html</p>.', ' ')).toEqual('This is a text with html .'); + expect(textUtils.stripHtml('This is a text with <p>html</p>.', ' ')).toEqual( + 'This is a text with html .', + ); }); }); @@ -78,4 +82,10 @@ describe('text_utility', () => { expect(textUtils.convertToCamelCase('snake_case')).toBe('snakeCase'); }); }); + + describe('convertToSentenceCase', () => { + it('converts Sentence Case to Sentence case', () => { + expect(textUtils.convertToSentenceCase('Hello World')).toBe('Hello world'); + }); + }); }); diff --git a/spec/javascripts/matchers.js b/spec/javascripts/matchers.js new file mode 100644 index 00000000000..7cc5e753c22 --- /dev/null +++ b/spec/javascripts/matchers.js @@ -0,0 +1,35 @@ +export default { + toHaveSpriteIcon: () => ({ + compare(element, iconName) { + if (!iconName) { + throw new Error('toHaveSpriteIcon is missing iconName argument!'); + } + + if (!(element instanceof HTMLElement)) { + throw new Error(`${element} is not a DOM element!`); + } + + const iconReferences = [].slice.apply(element.querySelectorAll('svg use')); + const matchingIcon = iconReferences.find(reference => reference.getAttribute('xlink:href').endsWith(`#${iconName}`)); + const result = { + pass: !!matchingIcon, + }; + + if (result.pass) { + result.message = `${element.outerHTML} contains the sprite icon "${iconName}"!`; + } else { + result.message = `${element.outerHTML} does not contain the sprite icon "${iconName}"!`; + + const existingIcons = iconReferences.map((reference) => { + const iconUrl = reference.getAttribute('xlink:href'); + return `"${iconUrl.replace(/^.+#/, '')}"`; + }); + if (existingIcons.length > 0) { + result.message += ` (only found ${existingIcons.join(',')})`; + } + } + + return result; + }, + }), +}; diff --git a/spec/javascripts/merge_request_notes_spec.js b/spec/javascripts/merge_request_notes_spec.js index eb644e698da..dc9dc4d4249 100644 --- a/spec/javascripts/merge_request_notes_spec.js +++ b/spec/javascripts/merge_request_notes_spec.js @@ -3,8 +3,7 @@ import _ from 'underscore'; import 'autosize'; import '~/gl_form'; import '~/lib/utils/text_utility'; -import '~/render_gfm'; -import '~/render_math'; +import '~/behaviors/markdown/render_gfm'; import Notes from '~/notes'; const upArrowKeyCode = 38; diff --git a/spec/javascripts/monitoring/dashboard_spec.js b/spec/javascripts/monitoring/dashboard_spec.js index 29b355307ef..eba6dcf47c5 100644 --- a/spec/javascripts/monitoring/dashboard_spec.js +++ b/spec/javascripts/monitoring/dashboard_spec.js @@ -18,6 +18,7 @@ describe('Dashboard', () => { deploymentEndpoint: null, emptyGettingStartedSvgPath: '/path/to/getting-started.svg', emptyLoadingSvgPath: '/path/to/loading.svg', + emptyNoDataSvgPath: '/path/to/no-data.svg', emptyUnableToConnectSvgPath: '/path/to/unable-to-connect.svg', }; diff --git a/spec/javascripts/monitoring/dashboard_state_spec.js b/spec/javascripts/monitoring/dashboard_state_spec.js index df3198dd3e2..b4c5f4baa78 100644 --- a/spec/javascripts/monitoring/dashboard_state_spec.js +++ b/spec/javascripts/monitoring/dashboard_state_spec.js @@ -2,13 +2,22 @@ import Vue from 'vue'; import EmptyState from '~/monitoring/components/empty_state.vue'; import { statePaths } from './mock_data'; -const createComponent = (propsData) => { +function createComponent(props) { const Component = Vue.extend(EmptyState); return new Component({ - propsData, + propsData: { + ...props, + settingsPath: statePaths.settingsPath, + clustersPath: statePaths.clustersPath, + documentationPath: statePaths.documentationPath, + emptyGettingStartedSvgPath: '/path/to/getting-started.svg', + emptyLoadingSvgPath: '/path/to/loading.svg', + emptyNoDataSvgPath: '/path/to/no-data.svg', + emptyUnableToConnectSvgPath: '/path/to/unable-to-connect.svg', + }, }).$mount(); -}; +} function getTextFromNode(component, selector) { return component.$el.querySelector(selector).firstChild.nodeValue.trim(); @@ -19,11 +28,6 @@ describe('EmptyState', () => { it('currentState', () => { const component = createComponent({ selectedState: 'gettingStarted', - settingsPath: statePaths.settingsPath, - documentationPath: statePaths.documentationPath, - emptyGettingStartedSvgPath: 'foo', - emptyLoadingSvgPath: 'foo', - emptyUnableToConnectSvgPath: 'foo', }); expect(component.currentState).toBe(component.states.gettingStarted); @@ -32,11 +36,6 @@ describe('EmptyState', () => { it('showButtonDescription returns a description with a link for the unableToConnect state', () => { const component = createComponent({ selectedState: 'unableToConnect', - settingsPath: statePaths.settingsPath, - documentationPath: statePaths.documentationPath, - emptyGettingStartedSvgPath: 'foo', - emptyLoadingSvgPath: 'foo', - emptyUnableToConnectSvgPath: 'foo', }); expect(component.showButtonDescription).toEqual(true); @@ -45,11 +44,6 @@ describe('EmptyState', () => { it('showButtonDescription returns the description without a link for any other state', () => { const component = createComponent({ selectedState: 'loading', - settingsPath: statePaths.settingsPath, - documentationPath: statePaths.documentationPath, - emptyGettingStartedSvgPath: 'foo', - emptyLoadingSvgPath: 'foo', - emptyUnableToConnectSvgPath: 'foo', }); expect(component.showButtonDescription).toEqual(false); @@ -59,12 +53,6 @@ describe('EmptyState', () => { it('should show the gettingStarted state', () => { const component = createComponent({ selectedState: 'gettingStarted', - settingsPath: statePaths.settingsPath, - clustersPath: statePaths.clustersPath, - documentationPath: statePaths.documentationPath, - emptyGettingStartedSvgPath: 'foo', - emptyLoadingSvgPath: 'foo', - emptyUnableToConnectSvgPath: 'foo', }); expect(component.$el.querySelector('svg')).toBeDefined(); @@ -76,11 +64,6 @@ describe('EmptyState', () => { it('should show the loading state', () => { const component = createComponent({ selectedState: 'loading', - settingsPath: statePaths.settingsPath, - documentationPath: statePaths.documentationPath, - emptyGettingStartedSvgPath: 'foo', - emptyLoadingSvgPath: 'foo', - emptyUnableToConnectSvgPath: 'foo', }); expect(component.$el.querySelector('svg')).toBeDefined(); @@ -92,11 +75,6 @@ describe('EmptyState', () => { it('should show the unableToConnect state', () => { const component = createComponent({ selectedState: 'unableToConnect', - settingsPath: statePaths.settingsPath, - documentationPath: statePaths.documentationPath, - emptyGettingStartedSvgPath: 'foo', - emptyLoadingSvgPath: 'foo', - emptyUnableToConnectSvgPath: 'foo', }); expect(component.$el.querySelector('svg')).toBeDefined(); diff --git a/spec/javascripts/monitoring/graph/axis_spec.js b/spec/javascripts/monitoring/graph/axis_spec.js new file mode 100644 index 00000000000..c7adba00637 --- /dev/null +++ b/spec/javascripts/monitoring/graph/axis_spec.js @@ -0,0 +1,65 @@ +import Vue from 'vue'; +import GraphAxis from '~/monitoring/components/graph/axis.vue'; +import measurements from '~/monitoring/utils/measurements'; + +const createComponent = propsData => { + const Component = Vue.extend(GraphAxis); + + return new Component({ + propsData, + }).$mount(); +}; + +const defaultValuesComponent = { + graphWidth: 500, + graphHeight: 300, + graphHeightOffset: 120, + margin: measurements.large.margin, + measurements: measurements.large, + yAxisLabel: 'Values', + unitOfDisplay: 'MB', +}; + +function getTextFromNode(component, selector) { + return component.$el.querySelector(selector).firstChild.nodeValue.trim(); +} + +describe('Axis', () => { + describe('Computed props', () => { + it('textTransform', () => { + const component = createComponent(defaultValuesComponent); + + expect(component.textTransform).toContain('translate(15, 120) rotate(-90)'); + }); + + it('xPosition', () => { + const component = createComponent(defaultValuesComponent); + + expect(component.xPosition).toEqual(180); + }); + + it('yPosition', () => { + const component = createComponent(defaultValuesComponent); + + expect(component.yPosition).toEqual(240); + }); + + it('rectTransform', () => { + const component = createComponent(defaultValuesComponent); + + expect(component.rectTransform).toContain('translate(0, 120) rotate(-90)'); + }); + }); + + it('has 2 rect-axis-text rect svg elements', () => { + const component = createComponent(defaultValuesComponent); + + expect(component.$el.querySelectorAll('.rect-axis-text').length).toEqual(2); + }); + + it('contains text to signal the usage, title and time with multiple time series', () => { + const component = createComponent(defaultValuesComponent); + + expect(getTextFromNode(component, '.y-label-text')).toEqual('Values (MB)'); + }); +}); diff --git a/spec/javascripts/monitoring/graph/legend_spec.js b/spec/javascripts/monitoring/graph/legend_spec.js index 145c8db28d5..abcc51aa077 100644 --- a/spec/javascripts/monitoring/graph/legend_spec.js +++ b/spec/javascripts/monitoring/graph/legend_spec.js @@ -1,106 +1,44 @@ import Vue from 'vue'; import GraphLegend from '~/monitoring/components/graph/legend.vue'; -import measurements from '~/monitoring/utils/measurements'; import createTimeSeries from '~/monitoring/utils/multiple_time_series'; +import mountComponent from 'spec/helpers/vue_mount_component_helper'; import { singleRowMetricsMultipleSeries, convertDatesMultipleSeries } from '../mock_data'; -const createComponent = (propsData) => { - const Component = Vue.extend(GraphLegend); - - return new Component({ - propsData, - }).$mount(); -}; - const convertedMetrics = convertDatesMultipleSeries(singleRowMetricsMultipleSeries); -const defaultValuesComponent = { - graphWidth: 500, - graphHeight: 300, - graphHeightOffset: 120, - margin: measurements.large.margin, - measurements: measurements.large, - areaColorRgb: '#f0f0f0', - legendTitle: 'Title', - yAxisLabel: 'Values', - metricUsage: 'Value', - unitOfDisplay: 'Req/Sec', - currentDataIndex: 0, -}; +const defaultValuesComponent = {}; -const timeSeries = createTimeSeries(convertedMetrics[0].queries, - defaultValuesComponent.graphWidth, defaultValuesComponent.graphHeight, - defaultValuesComponent.graphHeightOffset); +const timeSeries = createTimeSeries(convertedMetrics[0].queries, 500, 300, 120); defaultValuesComponent.timeSeries = timeSeries; -function getTextFromNode(component, selector) { - return component.$el.querySelector(selector).firstChild.nodeValue.trim(); -} - -describe('GraphLegend', () => { - describe('Computed props', () => { - it('textTransform', () => { - const component = createComponent(defaultValuesComponent); - - expect(component.textTransform).toContain('translate(15, 120) rotate(-90)'); - }); - - it('xPosition', () => { - const component = createComponent(defaultValuesComponent); - - expect(component.xPosition).toEqual(180); - }); - - it('yPosition', () => { - const component = createComponent(defaultValuesComponent); - - expect(component.yPosition).toEqual(240); - }); - - it('rectTransform', () => { - const component = createComponent(defaultValuesComponent); +describe('Legend Component', () => { + let vm; + let Legend; - expect(component.rectTransform).toContain('translate(0, 120) rotate(-90)'); - }); + beforeEach(() => { + Legend = Vue.extend(GraphLegend); }); - describe('methods', () => { - it('translateLegendGroup should only change Y direction', () => { - const component = createComponent(defaultValuesComponent); - - const translatedCoordinate = component.translateLegendGroup(1); - expect(translatedCoordinate.indexOf('translate(0, ')).not.toEqual(-1); + describe('View', () => { + beforeEach(() => { + vm = mountComponent(Legend, { + legendTitle: 'legend', + timeSeries, + currentDataIndex: 0, + unitOfDisplay: 'Req/Sec', + }); }); - it('formatMetricUsage should contain the unit of display and the current value selected via "currentDataIndex"', () => { - const component = createComponent(defaultValuesComponent); + it('should render the usage, title and time with multiple time series', () => { + const titles = vm.$el.querySelectorAll('.legend-metric-title'); - const formattedMetricUsage = component.formatMetricUsage(timeSeries[0]); - const valueFromSeries = timeSeries[0].values[component.currentDataIndex].value; - expect(formattedMetricUsage.indexOf(component.unitOfDisplay)).not.toEqual(-1); - expect(formattedMetricUsage.indexOf(valueFromSeries)).not.toEqual(-1); + expect(titles[0].textContent.indexOf('1xx')).not.toEqual(-1); + expect(titles[1].textContent.indexOf('2xx')).not.toEqual(-1); }); - }); - - it('has 2 rect-axis-text rect svg elements', () => { - const component = createComponent(defaultValuesComponent); - - expect(component.$el.querySelectorAll('.rect-axis-text').length).toEqual(2); - }); - it('contains text to signal the usage, title and time with multiple time series', () => { - const component = createComponent(defaultValuesComponent); - const titles = component.$el.querySelectorAll('.legend-metric-title'); - - expect(titles[0].textContent.indexOf('1xx')).not.toEqual(-1); - expect(titles[1].textContent.indexOf('2xx')).not.toEqual(-1); - expect(getTextFromNode(component, '.y-label-text')).toEqual(component.yAxisLabel); - }); - - it('should contain the same number of legend groups as the timeSeries length', () => { - const component = createComponent(defaultValuesComponent); - - expect(component.$el.querySelectorAll('.legend-group').length).toEqual(component.timeSeries.length); + it('should container the same number of rows in the table as time series', () => { + expect(vm.$el.querySelectorAll('.prometheus-table tr').length).toEqual(vm.timeSeries.length); + }); }); }); diff --git a/spec/javascripts/monitoring/graph/track_info_spec.js b/spec/javascripts/monitoring/graph/track_info_spec.js new file mode 100644 index 00000000000..d3121d553f9 --- /dev/null +++ b/spec/javascripts/monitoring/graph/track_info_spec.js @@ -0,0 +1,44 @@ +import Vue from 'vue'; +import TrackInfo from '~/monitoring/components/graph/track_info.vue'; +import mountComponent from 'spec/helpers/vue_mount_component_helper'; +import createTimeSeries from '~/monitoring/utils/multiple_time_series'; +import { singleRowMetricsMultipleSeries, convertDatesMultipleSeries } from '../mock_data'; + +const convertedMetrics = convertDatesMultipleSeries(singleRowMetricsMultipleSeries); +const timeSeries = createTimeSeries(convertedMetrics[0].queries, 500, 300, 120); + +describe('TrackInfo component', () => { + let vm; + let Component; + + beforeEach(() => { + Component = Vue.extend(TrackInfo); + }); + + afterEach(() => { + vm.$destroy(); + }); + + describe('Computed props', () => { + beforeEach(() => { + vm = mountComponent(Component, { track: timeSeries[0] }); + }); + + it('summaryMetrics', () => { + expect(vm.summaryMetrics).toEqual('Avg: 0.000 · Max: 0.000'); + }); + }); + + describe('Rendered output', () => { + beforeEach(() => { + vm = mountComponent(Component, { track: timeSeries[0] }); + }); + + it('contains metric tag and the summary metrics', () => { + const metricTag = vm.$el.querySelector('strong'); + + expect(metricTag.textContent.trim()).toEqual(vm.track.metricTag); + expect(vm.$el.textContent).toContain('Avg: 0.000 · Max: 0.000'); + }); + }); +}); diff --git a/spec/javascripts/monitoring/graph/track_line_spec.js b/spec/javascripts/monitoring/graph/track_line_spec.js new file mode 100644 index 00000000000..45106830a67 --- /dev/null +++ b/spec/javascripts/monitoring/graph/track_line_spec.js @@ -0,0 +1,52 @@ +import Vue from 'vue'; +import TrackLine from '~/monitoring/components/graph/track_line.vue'; +import mountComponent from 'spec/helpers/vue_mount_component_helper'; +import createTimeSeries from '~/monitoring/utils/multiple_time_series'; +import { singleRowMetricsMultipleSeries, convertDatesMultipleSeries } from '../mock_data'; + +const convertedMetrics = convertDatesMultipleSeries(singleRowMetricsMultipleSeries); +const timeSeries = createTimeSeries(convertedMetrics[0].queries, 500, 300, 120); + +describe('TrackLine component', () => { + let vm; + let Component; + + beforeEach(() => { + Component = Vue.extend(TrackLine); + }); + + afterEach(() => { + vm.$destroy(); + }); + + describe('Computed props', () => { + it('stylizedLine for dashed lineStyles', () => { + vm = mountComponent(Component, { track: { ...timeSeries[0], lineStyle: 'dashed' } }); + + expect(vm.stylizedLine).toEqual('6, 3'); + }); + + it('stylizedLine for dotted lineStyles', () => { + vm = mountComponent(Component, { track: { ...timeSeries[0], lineStyle: 'dotted' } }); + + expect(vm.stylizedLine).toEqual('3, 3'); + }); + }); + + describe('Rendered output', () => { + it('has an svg with a line', () => { + vm = mountComponent(Component, { track: { ...timeSeries[0] } }); + const svgEl = vm.$el.querySelector('svg'); + const lineEl = vm.$el.querySelector('svg line'); + + expect(svgEl.getAttribute('width')).toEqual('15'); + expect(svgEl.getAttribute('height')).toEqual('6'); + + expect(lineEl.getAttribute('stroke-width')).toEqual('4'); + expect(lineEl.getAttribute('x1')).toEqual('0'); + expect(lineEl.getAttribute('x2')).toEqual('15'); + expect(lineEl.getAttribute('y1')).toEqual('2'); + expect(lineEl.getAttribute('y2')).toEqual('2'); + }); + }); +}); diff --git a/spec/javascripts/monitoring/graph_spec.js b/spec/javascripts/monitoring/graph_spec.js index b1d69752bad..1213c80ba3a 100644 --- a/spec/javascripts/monitoring/graph_spec.js +++ b/spec/javascripts/monitoring/graph_spec.js @@ -2,11 +2,15 @@ import Vue from 'vue'; import Graph from '~/monitoring/components/graph.vue'; import MonitoringMixins from '~/monitoring/mixins/monitoring_mixins'; import eventHub from '~/monitoring/event_hub'; -import { deploymentData, convertDatesMultipleSeries, singleRowMetricsMultipleSeries } from './mock_data'; +import { + deploymentData, + convertDatesMultipleSeries, + singleRowMetricsMultipleSeries, +} from './mock_data'; const tagsPath = 'http://test.host/frontend-fixtures/environments-project/tags'; const projectPath = 'http://test.host/frontend-fixtures/environments-project'; -const createComponent = (propsData) => { +const createComponent = propsData => { const Component = Vue.extend(Graph); return new Component({ @@ -14,7 +18,9 @@ const createComponent = (propsData) => { }).$mount(); }; -const convertedMetrics = convertDatesMultipleSeries(singleRowMetricsMultipleSeries); +const convertedMetrics = convertDatesMultipleSeries( + singleRowMetricsMultipleSeries, +); describe('Graph', () => { beforeEach(() => { @@ -31,7 +37,9 @@ describe('Graph', () => { projectPath, }); - expect(component.$el.querySelector('.text-center').innerText.trim()).toBe(component.graphData.title); + expect(component.$el.querySelector('.text-center').innerText.trim()).toBe( + component.graphData.title, + ); }); describe('Computed props', () => { @@ -46,8 +54,9 @@ describe('Graph', () => { }); const transformedHeight = `${component.graphHeight - 100}`; - expect(component.axisTransform.indexOf(transformedHeight)) - .not.toEqual(-1); + expect(component.axisTransform.indexOf(transformedHeight)).not.toEqual( + -1, + ); }); it('outerViewBox gets a width and height property based on the DOM size of the element', () => { @@ -63,11 +72,11 @@ describe('Graph', () => { const viewBoxArray = component.outerViewBox.split(' '); expect(typeof component.outerViewBox).toEqual('string'); expect(viewBoxArray[2]).toEqual(component.graphWidth.toString()); - expect(viewBoxArray[3]).toEqual(component.graphHeight.toString()); + expect(viewBoxArray[3]).toEqual((component.graphHeight - 50).toString()); }); }); - it('sends an event to the eventhub when it has finished resizing', (done) => { + it('sends an event to the eventhub when it has finished resizing', done => { const component = createComponent({ graphData: convertedMetrics[1], classType: 'col-md-6', diff --git a/spec/javascripts/monitoring/mock_data.js b/spec/javascripts/monitoring/mock_data.js index f30208b27b6..50da6da2e07 100644 --- a/spec/javascripts/monitoring/mock_data.js +++ b/spec/javascripts/monitoring/mock_data.js @@ -3,2426 +3,645 @@ export const mockApiEndpoint = `${gl.TEST_HOST}/monitoring/mock`; export const metricsGroupsAPIResponse = { - 'success': true, - 'data': [ + success: true, + data: [ { - 'group': 'Kubernetes', - 'priority': 1, - 'metrics': [ - { - 'title': 'Memory usage', - 'weight': 1, - 'queries': [ + group: 'Kubernetes', + priority: 1, + metrics: [ + { + title: 'Memory usage', + weight: 1, + queries: [ + { + query_range: 'avg(container_memory_usage_bytes{%{environment_filter}}) / 2^20', + y_label: 'Memory', + unit: 'MiB', + result: [ { - 'query_range': 'avg(container_memory_usage_bytes{%{environment_filter}}) / 2^20', - 'y_label': 'Memory', - 'unit': 'MiB', - 'result': [ - { - 'metric': {}, - 'values': [ - [ - 1495700554.925, - '8.0390625' - ], - [ - 1495700614.925, - '8.0390625' - ], - [ - 1495700674.925, - '8.0390625' - ], - [ - 1495700734.925, - '8.0390625' - ], - [ - 1495700794.925, - '8.0390625' - ], - [ - 1495700854.925, - '8.0390625' - ], - [ - 1495700914.925, - '8.0390625' - ], - [ - 1495700974.925, - '8.0390625' - ], - [ - 1495701034.925, - '8.0390625' - ], - [ - 1495701094.925, - '8.0390625' - ], - [ - 1495701154.925, - '8.0390625' - ], - [ - 1495701214.925, - '8.0390625' - ], - [ - 1495701274.925, - '8.0390625' - ], - [ - 1495701334.925, - '8.0390625' - ], - [ - 1495701394.925, - '8.0390625' - ], - [ - 1495701454.925, - '8.0390625' - ], - [ - 1495701514.925, - '8.0390625' - ], - [ - 1495701574.925, - '8.0390625' - ], - [ - 1495701634.925, - '8.0390625' - ], - [ - 1495701694.925, - '8.0390625' - ], - [ - 1495701754.925, - '8.0390625' - ], - [ - 1495701814.925, - '8.0390625' - ], - [ - 1495701874.925, - '8.0390625' - ], - [ - 1495701934.925, - '8.0390625' - ], - [ - 1495701994.925, - '8.0390625' - ], - [ - 1495702054.925, - '8.0390625' - ], - [ - 1495702114.925, - '8.0390625' - ], - [ - 1495702174.925, - '8.0390625' - ], - [ - 1495702234.925, - '8.0390625' - ], - [ - 1495702294.925, - '8.0390625' - ], - [ - 1495702354.925, - '8.0390625' - ], - [ - 1495702414.925, - '8.0390625' - ], - [ - 1495702474.925, - '8.0390625' - ], - [ - 1495702534.925, - '8.0390625' - ], - [ - 1495702594.925, - '8.0390625' - ], - [ - 1495702654.925, - '8.0390625' - ], - [ - 1495702714.925, - '8.0390625' - ], - [ - 1495702774.925, - '8.0390625' - ], - [ - 1495702834.925, - '8.0390625' - ], - [ - 1495702894.925, - '8.0390625' - ], - [ - 1495702954.925, - '8.0390625' - ], - [ - 1495703014.925, - '8.0390625' - ], - [ - 1495703074.925, - '8.0390625' - ], - [ - 1495703134.925, - '8.0390625' - ], - [ - 1495703194.925, - '8.0390625' - ], - [ - 1495703254.925, - '8.03515625' - ], - [ - 1495703314.925, - '8.03515625' - ], - [ - 1495703374.925, - '8.03515625' - ], - [ - 1495703434.925, - '8.03515625' - ], - [ - 1495703494.925, - '8.03515625' - ], - [ - 1495703554.925, - '8.03515625' - ], - [ - 1495703614.925, - '8.03515625' - ], - [ - 1495703674.925, - '8.03515625' - ], - [ - 1495703734.925, - '8.03515625' - ], - [ - 1495703794.925, - '8.03515625' - ], - [ - 1495703854.925, - '8.03515625' - ], - [ - 1495703914.925, - '8.03515625' - ], - [ - 1495703974.925, - '8.03515625' - ], - [ - 1495704034.925, - '8.03515625' - ], - [ - 1495704094.925, - '8.03515625' - ], - [ - 1495704154.925, - '8.03515625' - ], - [ - 1495704214.925, - '7.9296875' - ], - [ - 1495704274.925, - '7.9296875' - ], - [ - 1495704334.925, - '7.9296875' - ], - [ - 1495704394.925, - '7.9296875' - ], - [ - 1495704454.925, - '7.9296875' - ], - [ - 1495704514.925, - '7.9296875' - ], - [ - 1495704574.925, - '7.9296875' - ], - [ - 1495704634.925, - '7.9296875' - ], - [ - 1495704694.925, - '7.9296875' - ], - [ - 1495704754.925, - '7.9296875' - ], - [ - 1495704814.925, - '7.9296875' - ], - [ - 1495704874.925, - '7.9296875' - ], - [ - 1495704934.925, - '7.9296875' - ], - [ - 1495704994.925, - '7.9296875' - ], - [ - 1495705054.925, - '7.9296875' - ], - [ - 1495705114.925, - '7.9296875' - ], - [ - 1495705174.925, - '7.9296875' - ], - [ - 1495705234.925, - '7.9296875' - ], - [ - 1495705294.925, - '7.9296875' - ], - [ - 1495705354.925, - '7.9296875' - ], - [ - 1495705414.925, - '7.9296875' - ], - [ - 1495705474.925, - '7.9296875' - ], - [ - 1495705534.925, - '7.9296875' - ], - [ - 1495705594.925, - '7.9296875' - ], - [ - 1495705654.925, - '7.9296875' - ], - [ - 1495705714.925, - '7.9296875' - ], - [ - 1495705774.925, - '7.9296875' - ], - [ - 1495705834.925, - '7.9296875' - ], - [ - 1495705894.925, - '7.9296875' - ], - [ - 1495705954.925, - '7.9296875' - ], - [ - 1495706014.925, - '7.9296875' - ], - [ - 1495706074.925, - '7.9296875' - ], - [ - 1495706134.925, - '7.9296875' - ], - [ - 1495706194.925, - '7.9296875' - ], - [ - 1495706254.925, - '7.9296875' - ], - [ - 1495706314.925, - '7.9296875' - ], - [ - 1495706374.925, - '7.9296875' - ], - [ - 1495706434.925, - '7.9296875' - ], - [ - 1495706494.925, - '7.9296875' - ], - [ - 1495706554.925, - '7.9296875' - ], - [ - 1495706614.925, - '7.9296875' - ], - [ - 1495706674.925, - '7.9296875' - ], - [ - 1495706734.925, - '7.9296875' - ], - [ - 1495706794.925, - '7.9296875' - ], - [ - 1495706854.925, - '7.9296875' - ], - [ - 1495706914.925, - '7.9296875' - ], - [ - 1495706974.925, - '7.9296875' - ], - [ - 1495707034.925, - '7.9296875' - ], - [ - 1495707094.925, - '7.9296875' - ], - [ - 1495707154.925, - '7.9296875' - ], - [ - 1495707214.925, - '7.9296875' - ], - [ - 1495707274.925, - '7.9296875' - ], - [ - 1495707334.925, - '7.9296875' - ], - [ - 1495707394.925, - '7.9296875' - ], - [ - 1495707454.925, - '7.9296875' - ], - [ - 1495707514.925, - '7.9296875' - ], - [ - 1495707574.925, - '7.9296875' - ], - [ - 1495707634.925, - '7.9296875' - ], - [ - 1495707694.925, - '7.9296875' - ], - [ - 1495707754.925, - '7.9296875' - ], - [ - 1495707814.925, - '7.9296875' - ], - [ - 1495707874.925, - '7.9296875' - ], - [ - 1495707934.925, - '7.9296875' - ], - [ - 1495707994.925, - '7.9296875' - ], - [ - 1495708054.925, - '7.9296875' - ], - [ - 1495708114.925, - '7.9296875' - ], - [ - 1495708174.925, - '7.9296875' - ], - [ - 1495708234.925, - '7.9296875' - ], - [ - 1495708294.925, - '7.9296875' - ], - [ - 1495708354.925, - '7.9296875' - ], - [ - 1495708414.925, - '7.9296875' - ], - [ - 1495708474.925, - '7.9296875' - ], - [ - 1495708534.925, - '7.9296875' - ], - [ - 1495708594.925, - '7.9296875' - ], - [ - 1495708654.925, - '7.9296875' - ], - [ - 1495708714.925, - '7.9296875' - ], - [ - 1495708774.925, - '7.9296875' - ], - [ - 1495708834.925, - '7.9296875' - ], - [ - 1495708894.925, - '7.9296875' - ], - [ - 1495708954.925, - '7.8984375' - ], - [ - 1495709014.925, - '7.8984375' - ], - [ - 1495709074.925, - '7.8984375' - ], - [ - 1495709134.925, - '7.8984375' - ], - [ - 1495709194.925, - '7.8984375' - ], - [ - 1495709254.925, - '7.89453125' - ], - [ - 1495709314.925, - '7.89453125' - ], - [ - 1495709374.925, - '7.89453125' - ], - [ - 1495709434.925, - '7.89453125' - ], - [ - 1495709494.925, - '7.89453125' - ], - [ - 1495709554.925, - '7.89453125' - ], - [ - 1495709614.925, - '7.89453125' - ], - [ - 1495709674.925, - '7.89453125' - ], - [ - 1495709734.925, - '7.89453125' - ], - [ - 1495709794.925, - '7.89453125' - ], - [ - 1495709854.925, - '7.89453125' - ], - [ - 1495709914.925, - '7.89453125' - ], - [ - 1495709974.925, - '7.89453125' - ], - [ - 1495710034.925, - '7.89453125' - ], - [ - 1495710094.925, - '7.89453125' - ], - [ - 1495710154.925, - '7.89453125' - ], - [ - 1495710214.925, - '7.89453125' - ], - [ - 1495710274.925, - '7.89453125' - ], - [ - 1495710334.925, - '7.89453125' - ], - [ - 1495710394.925, - '7.89453125' - ], - [ - 1495710454.925, - '7.89453125' - ], - [ - 1495710514.925, - '7.89453125' - ], - [ - 1495710574.925, - '7.89453125' - ], - [ - 1495710634.925, - '7.89453125' - ], - [ - 1495710694.925, - '7.89453125' - ], - [ - 1495710754.925, - '7.89453125' - ], - [ - 1495710814.925, - '7.89453125' - ], - [ - 1495710874.925, - '7.89453125' - ], - [ - 1495710934.925, - '7.89453125' - ], - [ - 1495710994.925, - '7.89453125' - ], - [ - 1495711054.925, - '7.89453125' - ], - [ - 1495711114.925, - '7.89453125' - ], - [ - 1495711174.925, - '7.8515625' - ], - [ - 1495711234.925, - '7.8515625' - ], - [ - 1495711294.925, - '7.8515625' - ], - [ - 1495711354.925, - '7.8515625' - ], - [ - 1495711414.925, - '7.8515625' - ], - [ - 1495711474.925, - '7.8515625' - ], - [ - 1495711534.925, - '7.8515625' - ], - [ - 1495711594.925, - '7.8515625' - ], - [ - 1495711654.925, - '7.8515625' - ], - [ - 1495711714.925, - '7.8515625' - ], - [ - 1495711774.925, - '7.8515625' - ], - [ - 1495711834.925, - '7.8515625' - ], - [ - 1495711894.925, - '7.8515625' - ], - [ - 1495711954.925, - '7.8515625' - ], - [ - 1495712014.925, - '7.8515625' - ], - [ - 1495712074.925, - '7.8515625' - ], - [ - 1495712134.925, - '7.8515625' - ], - [ - 1495712194.925, - '7.8515625' - ], - [ - 1495712254.925, - '7.8515625' - ], - [ - 1495712314.925, - '7.8515625' - ], - [ - 1495712374.925, - '7.8515625' - ], - [ - 1495712434.925, - '7.83203125' - ], - [ - 1495712494.925, - '7.83203125' - ], - [ - 1495712554.925, - '7.83203125' - ], - [ - 1495712614.925, - '7.83203125' - ], - [ - 1495712674.925, - '7.83203125' - ], - [ - 1495712734.925, - '7.83203125' - ], - [ - 1495712794.925, - '7.83203125' - ], - [ - 1495712854.925, - '7.83203125' - ], - [ - 1495712914.925, - '7.83203125' - ], - [ - 1495712974.925, - '7.83203125' - ], - [ - 1495713034.925, - '7.83203125' - ], - [ - 1495713094.925, - '7.83203125' - ], - [ - 1495713154.925, - '7.83203125' - ], - [ - 1495713214.925, - '7.83203125' - ], - [ - 1495713274.925, - '7.83203125' - ], - [ - 1495713334.925, - '7.83203125' - ], - [ - 1495713394.925, - '7.8125' - ], - [ - 1495713454.925, - '7.8125' - ], - [ - 1495713514.925, - '7.8125' - ], - [ - 1495713574.925, - '7.8125' - ], - [ - 1495713634.925, - '7.8125' - ], - [ - 1495713694.925, - '7.8125' - ], - [ - 1495713754.925, - '7.8125' - ], - [ - 1495713814.925, - '7.8125' - ], - [ - 1495713874.925, - '7.8125' - ], - [ - 1495713934.925, - '7.8125' - ], - [ - 1495713994.925, - '7.8125' - ], - [ - 1495714054.925, - '7.8125' - ], - [ - 1495714114.925, - '7.8125' - ], - [ - 1495714174.925, - '7.8125' - ], - [ - 1495714234.925, - '7.8125' - ], - [ - 1495714294.925, - '7.8125' - ], - [ - 1495714354.925, - '7.80859375' - ], - [ - 1495714414.925, - '7.80859375' - ], - [ - 1495714474.925, - '7.80859375' - ], - [ - 1495714534.925, - '7.80859375' - ], - [ - 1495714594.925, - '7.80859375' - ], - [ - 1495714654.925, - '7.80859375' - ], - [ - 1495714714.925, - '7.80859375' - ], - [ - 1495714774.925, - '7.80859375' - ], - [ - 1495714834.925, - '7.80859375' - ], - [ - 1495714894.925, - '7.80859375' - ], - [ - 1495714954.925, - '7.80859375' - ], - [ - 1495715014.925, - '7.80859375' - ], - [ - 1495715074.925, - '7.80859375' - ], - [ - 1495715134.925, - '7.80859375' - ], - [ - 1495715194.925, - '7.80859375' - ], - [ - 1495715254.925, - '7.80859375' - ], - [ - 1495715314.925, - '7.80859375' - ], - [ - 1495715374.925, - '7.80859375' - ], - [ - 1495715434.925, - '7.80859375' - ], - [ - 1495715494.925, - '7.80859375' - ], - [ - 1495715554.925, - '7.80859375' - ], - [ - 1495715614.925, - '7.80859375' - ], - [ - 1495715674.925, - '7.80859375' - ], - [ - 1495715734.925, - '7.80859375' - ], - [ - 1495715794.925, - '7.80859375' - ], - [ - 1495715854.925, - '7.80859375' - ], - [ - 1495715914.925, - '7.80078125' - ], - [ - 1495715974.925, - '7.80078125' - ], - [ - 1495716034.925, - '7.80078125' - ], - [ - 1495716094.925, - '7.80078125' - ], - [ - 1495716154.925, - '7.80078125' - ], - [ - 1495716214.925, - '7.796875' - ], - [ - 1495716274.925, - '7.796875' - ], - [ - 1495716334.925, - '7.796875' - ], - [ - 1495716394.925, - '7.796875' - ], - [ - 1495716454.925, - '7.796875' - ], - [ - 1495716514.925, - '7.796875' - ], - [ - 1495716574.925, - '7.796875' - ], - [ - 1495716634.925, - '7.796875' - ], - [ - 1495716694.925, - '7.796875' - ], - [ - 1495716754.925, - '7.796875' - ], - [ - 1495716814.925, - '7.796875' - ], - [ - 1495716874.925, - '7.79296875' - ], - [ - 1495716934.925, - '7.79296875' - ], - [ - 1495716994.925, - '7.79296875' - ], - [ - 1495717054.925, - '7.79296875' - ], - [ - 1495717114.925, - '7.79296875' - ], - [ - 1495717174.925, - '7.7890625' - ], - [ - 1495717234.925, - '7.7890625' - ], - [ - 1495717294.925, - '7.7890625' - ], - [ - 1495717354.925, - '7.7890625' - ], - [ - 1495717414.925, - '7.7890625' - ], - [ - 1495717474.925, - '7.7890625' - ], - [ - 1495717534.925, - '7.7890625' - ], - [ - 1495717594.925, - '7.7890625' - ], - [ - 1495717654.925, - '7.7890625' - ], - [ - 1495717714.925, - '7.7890625' - ], - [ - 1495717774.925, - '7.7890625' - ], - [ - 1495717834.925, - '7.77734375' - ], - [ - 1495717894.925, - '7.77734375' - ], - [ - 1495717954.925, - '7.77734375' - ], - [ - 1495718014.925, - '7.77734375' - ], - [ - 1495718074.925, - '7.77734375' - ], - [ - 1495718134.925, - '7.7421875' - ], - [ - 1495718194.925, - '7.7421875' - ], - [ - 1495718254.925, - '7.7421875' - ], - [ - 1495718314.925, - '7.7421875' - ] - ] - } - ] - } - ] + metric: {}, + values: [ + [1495700554.925, '8.0390625'], + [1495700614.925, '8.0390625'], + [1495700674.925, '8.0390625'], + [1495700734.925, '8.0390625'], + [1495700794.925, '8.0390625'], + [1495700854.925, '8.0390625'], + [1495700914.925, '8.0390625'], + [1495700974.925, '8.0390625'], + [1495701034.925, '8.0390625'], + [1495701094.925, '8.0390625'], + [1495701154.925, '8.0390625'], + [1495701214.925, '8.0390625'], + [1495701274.925, '8.0390625'], + [1495701334.925, '8.0390625'], + [1495701394.925, '8.0390625'], + [1495701454.925, '8.0390625'], + [1495701514.925, '8.0390625'], + [1495701574.925, '8.0390625'], + [1495701634.925, '8.0390625'], + [1495701694.925, '8.0390625'], + [1495701754.925, '8.0390625'], + [1495701814.925, '8.0390625'], + [1495701874.925, '8.0390625'], + [1495701934.925, '8.0390625'], + [1495701994.925, '8.0390625'], + [1495702054.925, '8.0390625'], + [1495702114.925, '8.0390625'], + [1495702174.925, '8.0390625'], + [1495702234.925, '8.0390625'], + [1495702294.925, '8.0390625'], + [1495702354.925, '8.0390625'], + [1495702414.925, '8.0390625'], + [1495702474.925, '8.0390625'], + [1495702534.925, '8.0390625'], + [1495702594.925, '8.0390625'], + [1495702654.925, '8.0390625'], + [1495702714.925, '8.0390625'], + [1495702774.925, '8.0390625'], + [1495702834.925, '8.0390625'], + [1495702894.925, '8.0390625'], + [1495702954.925, '8.0390625'], + [1495703014.925, '8.0390625'], + [1495703074.925, '8.0390625'], + [1495703134.925, '8.0390625'], + [1495703194.925, '8.0390625'], + [1495703254.925, '8.03515625'], + [1495703314.925, '8.03515625'], + [1495703374.925, '8.03515625'], + [1495703434.925, '8.03515625'], + [1495703494.925, '8.03515625'], + [1495703554.925, '8.03515625'], + [1495703614.925, '8.03515625'], + [1495703674.925, '8.03515625'], + [1495703734.925, '8.03515625'], + [1495703794.925, '8.03515625'], + [1495703854.925, '8.03515625'], + [1495703914.925, '8.03515625'], + [1495703974.925, '8.03515625'], + [1495704034.925, '8.03515625'], + [1495704094.925, '8.03515625'], + [1495704154.925, '8.03515625'], + [1495704214.925, '7.9296875'], + [1495704274.925, '7.9296875'], + [1495704334.925, '7.9296875'], + [1495704394.925, '7.9296875'], + [1495704454.925, '7.9296875'], + [1495704514.925, '7.9296875'], + [1495704574.925, '7.9296875'], + [1495704634.925, '7.9296875'], + [1495704694.925, '7.9296875'], + [1495704754.925, '7.9296875'], + [1495704814.925, '7.9296875'], + [1495704874.925, '7.9296875'], + [1495704934.925, '7.9296875'], + [1495704994.925, '7.9296875'], + [1495705054.925, '7.9296875'], + [1495705114.925, '7.9296875'], + [1495705174.925, '7.9296875'], + [1495705234.925, '7.9296875'], + [1495705294.925, '7.9296875'], + [1495705354.925, '7.9296875'], + [1495705414.925, '7.9296875'], + [1495705474.925, '7.9296875'], + [1495705534.925, '7.9296875'], + [1495705594.925, '7.9296875'], + [1495705654.925, '7.9296875'], + [1495705714.925, '7.9296875'], + [1495705774.925, '7.9296875'], + [1495705834.925, '7.9296875'], + [1495705894.925, '7.9296875'], + [1495705954.925, '7.9296875'], + [1495706014.925, '7.9296875'], + [1495706074.925, '7.9296875'], + [1495706134.925, '7.9296875'], + [1495706194.925, '7.9296875'], + [1495706254.925, '7.9296875'], + [1495706314.925, '7.9296875'], + [1495706374.925, '7.9296875'], + [1495706434.925, '7.9296875'], + [1495706494.925, '7.9296875'], + [1495706554.925, '7.9296875'], + [1495706614.925, '7.9296875'], + [1495706674.925, '7.9296875'], + [1495706734.925, '7.9296875'], + [1495706794.925, '7.9296875'], + [1495706854.925, '7.9296875'], + [1495706914.925, '7.9296875'], + [1495706974.925, '7.9296875'], + [1495707034.925, '7.9296875'], + [1495707094.925, '7.9296875'], + [1495707154.925, '7.9296875'], + [1495707214.925, '7.9296875'], + [1495707274.925, '7.9296875'], + [1495707334.925, '7.9296875'], + [1495707394.925, '7.9296875'], + [1495707454.925, '7.9296875'], + [1495707514.925, '7.9296875'], + [1495707574.925, '7.9296875'], + [1495707634.925, '7.9296875'], + [1495707694.925, '7.9296875'], + [1495707754.925, '7.9296875'], + [1495707814.925, '7.9296875'], + [1495707874.925, '7.9296875'], + [1495707934.925, '7.9296875'], + [1495707994.925, '7.9296875'], + [1495708054.925, '7.9296875'], + [1495708114.925, '7.9296875'], + [1495708174.925, '7.9296875'], + [1495708234.925, '7.9296875'], + [1495708294.925, '7.9296875'], + [1495708354.925, '7.9296875'], + [1495708414.925, '7.9296875'], + [1495708474.925, '7.9296875'], + [1495708534.925, '7.9296875'], + [1495708594.925, '7.9296875'], + [1495708654.925, '7.9296875'], + [1495708714.925, '7.9296875'], + [1495708774.925, '7.9296875'], + [1495708834.925, '7.9296875'], + [1495708894.925, '7.9296875'], + [1495708954.925, '7.8984375'], + [1495709014.925, '7.8984375'], + [1495709074.925, '7.8984375'], + [1495709134.925, '7.8984375'], + [1495709194.925, '7.8984375'], + [1495709254.925, '7.89453125'], + [1495709314.925, '7.89453125'], + [1495709374.925, '7.89453125'], + [1495709434.925, '7.89453125'], + [1495709494.925, '7.89453125'], + [1495709554.925, '7.89453125'], + [1495709614.925, '7.89453125'], + [1495709674.925, '7.89453125'], + [1495709734.925, '7.89453125'], + [1495709794.925, '7.89453125'], + [1495709854.925, '7.89453125'], + [1495709914.925, '7.89453125'], + [1495709974.925, '7.89453125'], + [1495710034.925, '7.89453125'], + [1495710094.925, '7.89453125'], + [1495710154.925, '7.89453125'], + [1495710214.925, '7.89453125'], + [1495710274.925, '7.89453125'], + [1495710334.925, '7.89453125'], + [1495710394.925, '7.89453125'], + [1495710454.925, '7.89453125'], + [1495710514.925, '7.89453125'], + [1495710574.925, '7.89453125'], + [1495710634.925, '7.89453125'], + [1495710694.925, '7.89453125'], + [1495710754.925, '7.89453125'], + [1495710814.925, '7.89453125'], + [1495710874.925, '7.89453125'], + [1495710934.925, '7.89453125'], + [1495710994.925, '7.89453125'], + [1495711054.925, '7.89453125'], + [1495711114.925, '7.89453125'], + [1495711174.925, '7.8515625'], + [1495711234.925, '7.8515625'], + [1495711294.925, '7.8515625'], + [1495711354.925, '7.8515625'], + [1495711414.925, '7.8515625'], + [1495711474.925, '7.8515625'], + [1495711534.925, '7.8515625'], + [1495711594.925, '7.8515625'], + [1495711654.925, '7.8515625'], + [1495711714.925, '7.8515625'], + [1495711774.925, '7.8515625'], + [1495711834.925, '7.8515625'], + [1495711894.925, '7.8515625'], + [1495711954.925, '7.8515625'], + [1495712014.925, '7.8515625'], + [1495712074.925, '7.8515625'], + [1495712134.925, '7.8515625'], + [1495712194.925, '7.8515625'], + [1495712254.925, '7.8515625'], + [1495712314.925, '7.8515625'], + [1495712374.925, '7.8515625'], + [1495712434.925, '7.83203125'], + [1495712494.925, '7.83203125'], + [1495712554.925, '7.83203125'], + [1495712614.925, '7.83203125'], + [1495712674.925, '7.83203125'], + [1495712734.925, '7.83203125'], + [1495712794.925, '7.83203125'], + [1495712854.925, '7.83203125'], + [1495712914.925, '7.83203125'], + [1495712974.925, '7.83203125'], + [1495713034.925, '7.83203125'], + [1495713094.925, '7.83203125'], + [1495713154.925, '7.83203125'], + [1495713214.925, '7.83203125'], + [1495713274.925, '7.83203125'], + [1495713334.925, '7.83203125'], + [1495713394.925, '7.8125'], + [1495713454.925, '7.8125'], + [1495713514.925, '7.8125'], + [1495713574.925, '7.8125'], + [1495713634.925, '7.8125'], + [1495713694.925, '7.8125'], + [1495713754.925, '7.8125'], + [1495713814.925, '7.8125'], + [1495713874.925, '7.8125'], + [1495713934.925, '7.8125'], + [1495713994.925, '7.8125'], + [1495714054.925, '7.8125'], + [1495714114.925, '7.8125'], + [1495714174.925, '7.8125'], + [1495714234.925, '7.8125'], + [1495714294.925, '7.8125'], + [1495714354.925, '7.80859375'], + [1495714414.925, '7.80859375'], + [1495714474.925, '7.80859375'], + [1495714534.925, '7.80859375'], + [1495714594.925, '7.80859375'], + [1495714654.925, '7.80859375'], + [1495714714.925, '7.80859375'], + [1495714774.925, '7.80859375'], + [1495714834.925, '7.80859375'], + [1495714894.925, '7.80859375'], + [1495714954.925, '7.80859375'], + [1495715014.925, '7.80859375'], + [1495715074.925, '7.80859375'], + [1495715134.925, '7.80859375'], + [1495715194.925, '7.80859375'], + [1495715254.925, '7.80859375'], + [1495715314.925, '7.80859375'], + [1495715374.925, '7.80859375'], + [1495715434.925, '7.80859375'], + [1495715494.925, '7.80859375'], + [1495715554.925, '7.80859375'], + [1495715614.925, '7.80859375'], + [1495715674.925, '7.80859375'], + [1495715734.925, '7.80859375'], + [1495715794.925, '7.80859375'], + [1495715854.925, '7.80859375'], + [1495715914.925, '7.80078125'], + [1495715974.925, '7.80078125'], + [1495716034.925, '7.80078125'], + [1495716094.925, '7.80078125'], + [1495716154.925, '7.80078125'], + [1495716214.925, '7.796875'], + [1495716274.925, '7.796875'], + [1495716334.925, '7.796875'], + [1495716394.925, '7.796875'], + [1495716454.925, '7.796875'], + [1495716514.925, '7.796875'], + [1495716574.925, '7.796875'], + [1495716634.925, '7.796875'], + [1495716694.925, '7.796875'], + [1495716754.925, '7.796875'], + [1495716814.925, '7.796875'], + [1495716874.925, '7.79296875'], + [1495716934.925, '7.79296875'], + [1495716994.925, '7.79296875'], + [1495717054.925, '7.79296875'], + [1495717114.925, '7.79296875'], + [1495717174.925, '7.7890625'], + [1495717234.925, '7.7890625'], + [1495717294.925, '7.7890625'], + [1495717354.925, '7.7890625'], + [1495717414.925, '7.7890625'], + [1495717474.925, '7.7890625'], + [1495717534.925, '7.7890625'], + [1495717594.925, '7.7890625'], + [1495717654.925, '7.7890625'], + [1495717714.925, '7.7890625'], + [1495717774.925, '7.7890625'], + [1495717834.925, '7.77734375'], + [1495717894.925, '7.77734375'], + [1495717954.925, '7.77734375'], + [1495718014.925, '7.77734375'], + [1495718074.925, '7.77734375'], + [1495718134.925, '7.7421875'], + [1495718194.925, '7.7421875'], + [1495718254.925, '7.7421875'], + [1495718314.925, '7.7421875'], + ], + }, + ], + }, + ], }, { - 'title': 'CPU usage', - 'weight': 1, - 'queries': [ + title: 'CPU usage', + weight: 1, + queries: [ + { + query_range: + 'avg(rate(container_cpu_usage_seconds_total{%{environment_filter}}[2m])) * 100', + result: [ { - 'query_range': 'avg(rate(container_cpu_usage_seconds_total{%{environment_filter}}[2m])) * 100', - 'result': [ - { - 'metric': {}, - 'values': [ - [ - 1495700554.925, - '0.0010794445585559514' - ], - [ - 1495700614.925, - '0.003927214935433527' - ], - [ - 1495700674.925, - '0.0053045219047619975' - ], - [ - 1495700734.925, - '0.0048892095238097155' - ], - [ - 1495700794.925, - '0.005827140952381137' - ], - [ - 1495700854.925, - '0.00569846906219937' - ], - [ - 1495700914.925, - '0.004972616802849382' - ], - [ - 1495700974.925, - '0.005117509523809902' - ], - [ - 1495701034.925, - '0.00512389061919564' - ], - [ - 1495701094.925, - '0.005199100501890691' - ], - [ - 1495701154.925, - '0.005415746394885837' - ], - [ - 1495701214.925, - '0.005607682788146286' - ], - [ - 1495701274.925, - '0.005641300000000118' - ], - [ - 1495701334.925, - '0.0071166279368766495' - ], - [ - 1495701394.925, - '0.0063242138095234044' - ], - [ - 1495701454.925, - '0.005793314698235304' - ], - [ - 1495701514.925, - '0.00703934942237556' - ], - [ - 1495701574.925, - '0.006357007076123191' - ], - [ - 1495701634.925, - '0.003753167300126738' - ], - [ - 1495701694.925, - '0.005018469678430698' - ], - [ - 1495701754.925, - '0.0045217153371887' - ], - [ - 1495701814.925, - '0.006140104285714119' - ], - [ - 1495701874.925, - '0.004818684285714102' - ], - [ - 1495701934.925, - '0.005079509718955242' - ], - [ - 1495701994.925, - '0.005059981142498263' - ], - [ - 1495702054.925, - '0.005269098389538773' - ], - [ - 1495702114.925, - '0.005269954285714175' - ], - [ - 1495702174.925, - '0.014199241435795856' - ], - [ - 1495702234.925, - '0.01511936843111017' - ], - [ - 1495702294.925, - '0.0060933692920682875' - ], - [ - 1495702354.925, - '0.004945682380952493' - ], - [ - 1495702414.925, - '0.005641266666666565' - ], - [ - 1495702474.925, - '0.005223752857142996' - ], - [ - 1495702534.925, - '0.005743098505699831' - ], - [ - 1495702594.925, - '0.00538493380952391' - ], - [ - 1495702654.925, - '0.005507793883751339' - ], - [ - 1495702714.925, - '0.005666705714285466' - ], - [ - 1495702774.925, - '0.006231530000000112' - ], - [ - 1495702834.925, - '0.006570768635394899' - ], - [ - 1495702894.925, - '0.005551146666666895' - ], - [ - 1495702954.925, - '0.005602604737098058' - ], - [ - 1495703014.925, - '0.00613993580402159' - ], - [ - 1495703074.925, - '0.004770258764368832' - ], - [ - 1495703134.925, - '0.005512376671364914' - ], - [ - 1495703194.925, - '0.005254436666666674' - ], - [ - 1495703254.925, - '0.0050109839141320505' - ], - [ - 1495703314.925, - '0.0049478019256960016' - ], - [ - 1495703374.925, - '0.0037666860965123463' - ], - [ - 1495703434.925, - '0.004813526061656314' - ], - [ - 1495703494.925, - '0.005047748095238278' - ], - [ - 1495703554.925, - '0.00386494081008772' - ], - [ - 1495703614.925, - '0.004304037408111405' - ], - [ - 1495703674.925, - '0.004999466661587168' - ], - [ - 1495703734.925, - '0.004689140476190834' - ], - [ - 1495703794.925, - '0.004746126153582475' - ], - [ - 1495703854.925, - '0.004482706382572302' - ], - [ - 1495703914.925, - '0.004032808931864524' - ], - [ - 1495703974.925, - '0.005728319047618988' - ], - [ - 1495704034.925, - '0.004436139179627006' - ], - [ - 1495704094.925, - '0.004553455714285617' - ], - [ - 1495704154.925, - '0.003455244285714341' - ], - [ - 1495704214.925, - '0.004742244761904621' - ], - [ - 1495704274.925, - '0.005366978571428422' - ], - [ - 1495704334.925, - '0.004257954837665058' - ], - [ - 1495704394.925, - '0.005431603259831257' - ], - [ - 1495704454.925, - '0.0052009214498621986' - ], - [ - 1495704514.925, - '0.004317201904761618' - ], - [ - 1495704574.925, - '0.004307384285714157' - ], - [ - 1495704634.925, - '0.004789801146644822' - ], - [ - 1495704694.925, - '0.0051429795906706485' - ], - [ - 1495704754.925, - '0.005322495714285479' - ], - [ - 1495704814.925, - '0.004512809333244233' - ], - [ - 1495704874.925, - '0.004953843582568726' - ], - [ - 1495704934.925, - '0.005812690120858119' - ], - [ - 1495704994.925, - '0.004997024285714838' - ], - [ - 1495705054.925, - '0.005246216154439592' - ], - [ - 1495705114.925, - '0.0063494966618726795' - ], - [ - 1495705174.925, - '0.005306004342898225' - ], - [ - 1495705234.925, - '0.005081412857142978' - ], - [ - 1495705294.925, - '0.00511409523809522' - ], - [ - 1495705354.925, - '0.0047861001481192' - ], - [ - 1495705414.925, - '0.005107688228042962' - ], - [ - 1495705474.925, - '0.005271929582294012' - ], - [ - 1495705534.925, - '0.004453254502681249' - ], - [ - 1495705594.925, - '0.005799134293959226' - ], - [ - 1495705654.925, - '0.005340865929502478' - ], - [ - 1495705714.925, - '0.004911654761904942' - ], - [ - 1495705774.925, - '0.005888234873953261' - ], - [ - 1495705834.925, - '0.005565283333332954' - ], - [ - 1495705894.925, - '0.005522869047618869' - ], - [ - 1495705954.925, - '0.005177549737621646' - ], - [ - 1495706014.925, - '0.0053145810232096465' - ], - [ - 1495706074.925, - '0.004751095238095275' - ], - [ - 1495706134.925, - '0.006242077142856976' - ], - [ - 1495706194.925, - '0.00621034406957871' - ], - [ - 1495706254.925, - '0.006887592738978596' - ], - [ - 1495706314.925, - '0.006328128779726213' - ], - [ - 1495706374.925, - '0.007488363809523927' - ], - [ - 1495706434.925, - '0.006193758571428157' - ], - [ - 1495706494.925, - '0.0068798371839706935' - ], - [ - 1495706554.925, - '0.005757034340423128' - ], - [ - 1495706614.925, - '0.004571388497294698' - ], - [ - 1495706674.925, - '0.00620283044923395' - ], - [ - 1495706734.925, - '0.005607562380952455' - ], - [ - 1495706794.925, - '0.005506969933620308' - ], - [ - 1495706854.925, - '0.005621118095238131' - ], - [ - 1495706914.925, - '0.004876606098698849' - ], - [ - 1495706974.925, - '0.0047871205988517206' - ], - [ - 1495707034.925, - '0.00526405939458784' - ], - [ - 1495707094.925, - '0.005716323800605852' - ], - [ - 1495707154.925, - '0.005301459523809575' - ], - [ - 1495707214.925, - '0.0051613042857144905' - ], - [ - 1495707274.925, - '0.005384792857142714' - ], - [ - 1495707334.925, - '0.005259719047619222' - ], - [ - 1495707394.925, - '0.00584101142857182' - ], - [ - 1495707454.925, - '0.0060066121920326326' - ], - [ - 1495707514.925, - '0.006359978571428453' - ], - [ - 1495707574.925, - '0.006315876322151109' - ], - [ - 1495707634.925, - '0.005590012517198831' - ], - [ - 1495707694.925, - '0.005517419877137072' - ], - [ - 1495707754.925, - '0.006089813430348506' - ], - [ - 1495707814.925, - '0.00466754476190479' - ], - [ - 1495707874.925, - '0.006059954380517721' - ], - [ - 1495707934.925, - '0.005085657142856972' - ], - [ - 1495707994.925, - '0.005897665238095296' - ], - [ - 1495708054.925, - '0.0062282023199555885' - ], - [ - 1495708114.925, - '0.00526214553236979' - ], - [ - 1495708174.925, - '0.0044803300000000644' - ], - [ - 1495708234.925, - '0.005421443333333592' - ], - [ - 1495708294.925, - '0.005694326244512144' - ], - [ - 1495708354.925, - '0.005527721904761457' - ], - [ - 1495708414.925, - '0.005988819523809819' - ], - [ - 1495708474.925, - '0.005484704285714448' - ], - [ - 1495708534.925, - '0.005041123649230085' - ], - [ - 1495708594.925, - '0.005717767639612059' - ], - [ - 1495708654.925, - '0.005412954417342863' - ], - [ - 1495708714.925, - '0.005833343333333254' - ], - [ - 1495708774.925, - '0.005448135238094969' - ], - [ - 1495708834.925, - '0.005117341428571432' - ], - [ - 1495708894.925, - '0.005888345825277833' - ], - [ - 1495708954.925, - '0.005398543809524135' - ], - [ - 1495709014.925, - '0.005325611428571416' - ], - [ - 1495709074.925, - '0.005848668571428527' - ], - [ - 1495709134.925, - '0.005135003105145044' - ], - [ - 1495709194.925, - '0.0054551400000003' - ], - [ - 1495709254.925, - '0.005319472937322171' - ], - [ - 1495709314.925, - '0.00585677857142792' - ], - [ - 1495709374.925, - '0.0062146261904759215' - ], - [ - 1495709434.925, - '0.0067105060904182265' - ], - [ - 1495709494.925, - '0.005829691904762108' - ], - [ - 1495709554.925, - '0.005719280952381261' - ], - [ - 1495709614.925, - '0.005682603793416407' - ], - [ - 1495709674.925, - '0.0055272846277326934' - ], - [ - 1495709734.925, - '0.0057123680952386735' - ], - [ - 1495709794.925, - '0.00520597958075818' - ], - [ - 1495709854.925, - '0.005584358957263837' - ], - [ - 1495709914.925, - '0.005601104275197466' - ], - [ - 1495709974.925, - '0.005991657142857066' - ], - [ - 1495710034.925, - '0.00553722238095218' - ], - [ - 1495710094.925, - '0.005127883122696293' - ], - [ - 1495710154.925, - '0.005498111927534584' - ], - [ - 1495710214.925, - '0.005609934069084202' - ], - [ - 1495710274.925, - '0.00459206285714307' - ], - [ - 1495710334.925, - '0.0047910828571428084' - ], - [ - 1495710394.925, - '0.0056014671288845685' - ], - [ - 1495710454.925, - '0.005686936791078528' - ], - [ - 1495710514.925, - '0.00444480476190448' - ], - [ - 1495710574.925, - '0.005780394696738921' - ], - [ - 1495710634.925, - '0.0053107227550210365' - ], - [ - 1495710694.925, - '0.005096031495761817' - ], - [ - 1495710754.925, - '0.005451377979091524' - ], - [ - 1495710814.925, - '0.005328136666667083' - ], - [ - 1495710874.925, - '0.006020612857143043' - ], - [ - 1495710934.925, - '0.0061063585714285365' - ], - [ - 1495710994.925, - '0.006018346015752312' - ], - [ - 1495711054.925, - '0.005069130952381193' - ], - [ - 1495711114.925, - '0.005458406190476052' - ], - [ - 1495711174.925, - '0.00577219190476179' - ], - [ - 1495711234.925, - '0.005760814645658314' - ], - [ - 1495711294.925, - '0.005371875716579101' - ], - [ - 1495711354.925, - '0.0064232666666665834' - ], - [ - 1495711414.925, - '0.009369806836906667' - ], - [ - 1495711474.925, - '0.008956864761904692' - ], - [ - 1495711534.925, - '0.005266849368559271' - ], - [ - 1495711594.925, - '0.005335111364934262' - ], - [ - 1495711654.925, - '0.006461778319586945' - ], - [ - 1495711714.925, - '0.004687939890762393' - ], - [ - 1495711774.925, - '0.004438831245760684' - ], - [ - 1495711834.925, - '0.005142786666666613' - ], - [ - 1495711894.925, - '0.007257734212054963' - ], - [ - 1495711954.925, - '0.005621991904761494' - ], - [ - 1495712014.925, - '0.007868689999999862' - ], - [ - 1495712074.925, - '0.00910970215275738' - ], - [ - 1495712134.925, - '0.006151004285714278' - ], - [ - 1495712194.925, - '0.005447120924961522' - ], - [ - 1495712254.925, - '0.005150705153929503' - ], - [ - 1495712314.925, - '0.006358108714969314' - ], - [ - 1495712374.925, - '0.0057725354795696475' - ], - [ - 1495712434.925, - '0.005232139047619015' - ], - [ - 1495712494.925, - '0.004932809617949037' - ], - [ - 1495712554.925, - '0.004511607508499662' - ], - [ - 1495712614.925, - '0.00440487701522666' - ], - [ - 1495712674.925, - '0.005479113333333174' - ], - [ - 1495712734.925, - '0.004726317619047547' - ], - [ - 1495712794.925, - '0.005582041102958029' - ], - [ - 1495712854.925, - '0.006381481216082099' - ], - [ - 1495712914.925, - '0.005474260014095208' - ], - [ - 1495712974.925, - '0.00567597142857188' - ], - [ - 1495713034.925, - '0.0064741233333332985' - ], - [ - 1495713094.925, - '0.005467475714285271' - ], - [ - 1495713154.925, - '0.004868648393824457' - ], - [ - 1495713214.925, - '0.005254923286444893' - ], - [ - 1495713274.925, - '0.005599217150312865' - ], - [ - 1495713334.925, - '0.005105413720618919' - ], - [ - 1495713394.925, - '0.007246073333333279' - ], - [ - 1495713454.925, - '0.005990312380952272' - ], - [ - 1495713514.925, - '0.005594601853351101' - ], - [ - 1495713574.925, - '0.004739258673727054' - ], - [ - 1495713634.925, - '0.003932121428571783' - ], - [ - 1495713694.925, - '0.005018188268459395' - ], - [ - 1495713754.925, - '0.004538238095237985' - ], - [ - 1495713814.925, - '0.00561816643265435' - ], - [ - 1495713874.925, - '0.0063132584495033586' - ], - [ - 1495713934.925, - '0.00442385238095213' - ], - [ - 1495713994.925, - '0.004181795887658453' - ], - [ - 1495714054.925, - '0.004437759047619037' - ], - [ - 1495714114.925, - '0.006421748157178241' - ], - [ - 1495714174.925, - '0.006525143809523842' - ], - [ - 1495714234.925, - '0.004715904935144247' - ], - [ - 1495714294.925, - '0.005966040152763461' - ], - [ - 1495714354.925, - '0.005614535466921674' - ], - [ - 1495714414.925, - '0.004934375119415906' - ], - [ - 1495714474.925, - '0.0054122933333327385' - ], - [ - 1495714534.925, - '0.004926540699612279' - ], - [ - 1495714594.925, - '0.006124649517134237' - ], - [ - 1495714654.925, - '0.004629427092013995' - ], - [ - 1495714714.925, - '0.005117951257607005' - ], - [ - 1495714774.925, - '0.004868774512685422' - ], - [ - 1495714834.925, - '0.005310093333333399' - ], - [ - 1495714894.925, - '0.0054907752286127345' - ], - [ - 1495714954.925, - '0.004597678117351089' - ], - [ - 1495715014.925, - '0.0059622552380952' - ], - [ - 1495715074.925, - '0.005352457072655368' - ], - [ - 1495715134.925, - '0.005491630952381143' - ], - [ - 1495715194.925, - '0.006391770078379791' - ], - [ - 1495715254.925, - '0.005933472857142518' - ], - [ - 1495715314.925, - '0.005301314285714163' - ], - [ - 1495715374.925, - '0.0058352959724814165' - ], - [ - 1495715434.925, - '0.006154755147867044' - ], - [ - 1495715494.925, - '0.009391935637482038' - ], - [ - 1495715554.925, - '0.007846462857142592' - ], - [ - 1495715614.925, - '0.00477608215316353' - ], - [ - 1495715674.925, - '0.006132865238094998' - ], - [ - 1495715734.925, - '0.006159762457649516' - ], - [ - 1495715794.925, - '0.005957307073265968' - ], - [ - 1495715854.925, - '0.006652319091792501' - ], - [ - 1495715914.925, - '0.005493557402895287' - ], - [ - 1495715974.925, - '0.0058652434829145166' - ], - [ - 1495716034.925, - '0.005627400430468021' - ], - [ - 1495716094.925, - '0.006240656190475609' - ], - [ - 1495716154.925, - '0.006305997676168624' - ], - [ - 1495716214.925, - '0.005388057732783248' - ], - [ - 1495716274.925, - '0.0052814916048421244' - ], - [ - 1495716334.925, - '0.00699498614272497' - ], - [ - 1495716394.925, - '0.00627768693035141' - ], - [ - 1495716454.925, - '0.0042411487048161145' - ], - [ - 1495716514.925, - '0.005348647473627653' - ], - [ - 1495716574.925, - '0.0047176657142853975' - ], - [ - 1495716634.925, - '0.004437898571428686' - ], - [ - 1495716694.925, - '0.004923527366927261' - ], - [ - 1495716754.925, - '0.005131935066048421' - ], - [ - 1495716814.925, - '0.005046949523809611' - ], - [ - 1495716874.925, - '0.00547184095238092' - ], - [ - 1495716934.925, - '0.005224140016380444' - ], - [ - 1495716994.925, - '0.005297991171665292' - ], - [ - 1495717054.925, - '0.005492965995623498' - ], - [ - 1495717114.925, - '0.005754660000000403' - ], - [ - 1495717174.925, - '0.005949557138639285' - ], - [ - 1495717234.925, - '0.006091816112534666' - ], - [ - 1495717294.925, - '0.005554210080192063' - ], - [ - 1495717354.925, - '0.006411504395279871' - ], - [ - 1495717414.925, - '0.006319643996609606' - ], - [ - 1495717474.925, - '0.005539174405717675' - ], - [ - 1495717534.925, - '0.0053157078842772255' - ], - [ - 1495717594.925, - '0.005247480952381066' - ], - [ - 1495717654.925, - '0.004820141620396252' - ], - [ - 1495717714.925, - '0.005906173868322844' - ], - [ - 1495717774.925, - '0.006173117219570961' - ], - [ - 1495717834.925, - '0.005963340952380661' - ], - [ - 1495717894.925, - '0.005698976627681527' - ], - [ - 1495717954.925, - '0.004751279096346378' - ], - [ - 1495718014.925, - '0.005733142379359711' - ], - [ - 1495718074.925, - '0.004831689010348035' - ], - [ - 1495718134.925, - '0.005188370476191092' - ], - [ - 1495718194.925, - '0.004793227554547938' - ], - [ - 1495718254.925, - '0.003997442857142731' - ], - [ - 1495718314.925, - '0.004386040132951264' - ] - ] - } - ] - } - ] - } - ] - } + metric: {}, + values: [ + [1495700554.925, '0.0010794445585559514'], + [1495700614.925, '0.003927214935433527'], + [1495700674.925, '0.0053045219047619975'], + [1495700734.925, '0.0048892095238097155'], + [1495700794.925, '0.005827140952381137'], + [1495700854.925, '0.00569846906219937'], + [1495700914.925, '0.004972616802849382'], + [1495700974.925, '0.005117509523809902'], + [1495701034.925, '0.00512389061919564'], + [1495701094.925, '0.005199100501890691'], + [1495701154.925, '0.005415746394885837'], + [1495701214.925, '0.005607682788146286'], + [1495701274.925, '0.005641300000000118'], + [1495701334.925, '0.0071166279368766495'], + [1495701394.925, '0.0063242138095234044'], + [1495701454.925, '0.005793314698235304'], + [1495701514.925, '0.00703934942237556'], + [1495701574.925, '0.006357007076123191'], + [1495701634.925, '0.003753167300126738'], + [1495701694.925, '0.005018469678430698'], + [1495701754.925, '0.0045217153371887'], + [1495701814.925, '0.006140104285714119'], + [1495701874.925, '0.004818684285714102'], + [1495701934.925, '0.005079509718955242'], + [1495701994.925, '0.005059981142498263'], + [1495702054.925, '0.005269098389538773'], + [1495702114.925, '0.005269954285714175'], + [1495702174.925, '0.014199241435795856'], + [1495702234.925, '0.01511936843111017'], + [1495702294.925, '0.0060933692920682875'], + [1495702354.925, '0.004945682380952493'], + [1495702414.925, '0.005641266666666565'], + [1495702474.925, '0.005223752857142996'], + [1495702534.925, '0.005743098505699831'], + [1495702594.925, '0.00538493380952391'], + [1495702654.925, '0.005507793883751339'], + [1495702714.925, '0.005666705714285466'], + [1495702774.925, '0.006231530000000112'], + [1495702834.925, '0.006570768635394899'], + [1495702894.925, '0.005551146666666895'], + [1495702954.925, '0.005602604737098058'], + [1495703014.925, '0.00613993580402159'], + [1495703074.925, '0.004770258764368832'], + [1495703134.925, '0.005512376671364914'], + [1495703194.925, '0.005254436666666674'], + [1495703254.925, '0.0050109839141320505'], + [1495703314.925, '0.0049478019256960016'], + [1495703374.925, '0.0037666860965123463'], + [1495703434.925, '0.004813526061656314'], + [1495703494.925, '0.005047748095238278'], + [1495703554.925, '0.00386494081008772'], + [1495703614.925, '0.004304037408111405'], + [1495703674.925, '0.004999466661587168'], + [1495703734.925, '0.004689140476190834'], + [1495703794.925, '0.004746126153582475'], + [1495703854.925, '0.004482706382572302'], + [1495703914.925, '0.004032808931864524'], + [1495703974.925, '0.005728319047618988'], + [1495704034.925, '0.004436139179627006'], + [1495704094.925, '0.004553455714285617'], + [1495704154.925, '0.003455244285714341'], + [1495704214.925, '0.004742244761904621'], + [1495704274.925, '0.005366978571428422'], + [1495704334.925, '0.004257954837665058'], + [1495704394.925, '0.005431603259831257'], + [1495704454.925, '0.0052009214498621986'], + [1495704514.925, '0.004317201904761618'], + [1495704574.925, '0.004307384285714157'], + [1495704634.925, '0.004789801146644822'], + [1495704694.925, '0.0051429795906706485'], + [1495704754.925, '0.005322495714285479'], + [1495704814.925, '0.004512809333244233'], + [1495704874.925, '0.004953843582568726'], + [1495704934.925, '0.005812690120858119'], + [1495704994.925, '0.004997024285714838'], + [1495705054.925, '0.005246216154439592'], + [1495705114.925, '0.0063494966618726795'], + [1495705174.925, '0.005306004342898225'], + [1495705234.925, '0.005081412857142978'], + [1495705294.925, '0.00511409523809522'], + [1495705354.925, '0.0047861001481192'], + [1495705414.925, '0.005107688228042962'], + [1495705474.925, '0.005271929582294012'], + [1495705534.925, '0.004453254502681249'], + [1495705594.925, '0.005799134293959226'], + [1495705654.925, '0.005340865929502478'], + [1495705714.925, '0.004911654761904942'], + [1495705774.925, '0.005888234873953261'], + [1495705834.925, '0.005565283333332954'], + [1495705894.925, '0.005522869047618869'], + [1495705954.925, '0.005177549737621646'], + [1495706014.925, '0.0053145810232096465'], + [1495706074.925, '0.004751095238095275'], + [1495706134.925, '0.006242077142856976'], + [1495706194.925, '0.00621034406957871'], + [1495706254.925, '0.006887592738978596'], + [1495706314.925, '0.006328128779726213'], + [1495706374.925, '0.007488363809523927'], + [1495706434.925, '0.006193758571428157'], + [1495706494.925, '0.0068798371839706935'], + [1495706554.925, '0.005757034340423128'], + [1495706614.925, '0.004571388497294698'], + [1495706674.925, '0.00620283044923395'], + [1495706734.925, '0.005607562380952455'], + [1495706794.925, '0.005506969933620308'], + [1495706854.925, '0.005621118095238131'], + [1495706914.925, '0.004876606098698849'], + [1495706974.925, '0.0047871205988517206'], + [1495707034.925, '0.00526405939458784'], + [1495707094.925, '0.005716323800605852'], + [1495707154.925, '0.005301459523809575'], + [1495707214.925, '0.0051613042857144905'], + [1495707274.925, '0.005384792857142714'], + [1495707334.925, '0.005259719047619222'], + [1495707394.925, '0.00584101142857182'], + [1495707454.925, '0.0060066121920326326'], + [1495707514.925, '0.006359978571428453'], + [1495707574.925, '0.006315876322151109'], + [1495707634.925, '0.005590012517198831'], + [1495707694.925, '0.005517419877137072'], + [1495707754.925, '0.006089813430348506'], + [1495707814.925, '0.00466754476190479'], + [1495707874.925, '0.006059954380517721'], + [1495707934.925, '0.005085657142856972'], + [1495707994.925, '0.005897665238095296'], + [1495708054.925, '0.0062282023199555885'], + [1495708114.925, '0.00526214553236979'], + [1495708174.925, '0.0044803300000000644'], + [1495708234.925, '0.005421443333333592'], + [1495708294.925, '0.005694326244512144'], + [1495708354.925, '0.005527721904761457'], + [1495708414.925, '0.005988819523809819'], + [1495708474.925, '0.005484704285714448'], + [1495708534.925, '0.005041123649230085'], + [1495708594.925, '0.005717767639612059'], + [1495708654.925, '0.005412954417342863'], + [1495708714.925, '0.005833343333333254'], + [1495708774.925, '0.005448135238094969'], + [1495708834.925, '0.005117341428571432'], + [1495708894.925, '0.005888345825277833'], + [1495708954.925, '0.005398543809524135'], + [1495709014.925, '0.005325611428571416'], + [1495709074.925, '0.005848668571428527'], + [1495709134.925, '0.005135003105145044'], + [1495709194.925, '0.0054551400000003'], + [1495709254.925, '0.005319472937322171'], + [1495709314.925, '0.00585677857142792'], + [1495709374.925, '0.0062146261904759215'], + [1495709434.925, '0.0067105060904182265'], + [1495709494.925, '0.005829691904762108'], + [1495709554.925, '0.005719280952381261'], + [1495709614.925, '0.005682603793416407'], + [1495709674.925, '0.0055272846277326934'], + [1495709734.925, '0.0057123680952386735'], + [1495709794.925, '0.00520597958075818'], + [1495709854.925, '0.005584358957263837'], + [1495709914.925, '0.005601104275197466'], + [1495709974.925, '0.005991657142857066'], + [1495710034.925, '0.00553722238095218'], + [1495710094.925, '0.005127883122696293'], + [1495710154.925, '0.005498111927534584'], + [1495710214.925, '0.005609934069084202'], + [1495710274.925, '0.00459206285714307'], + [1495710334.925, '0.0047910828571428084'], + [1495710394.925, '0.0056014671288845685'], + [1495710454.925, '0.005686936791078528'], + [1495710514.925, '0.00444480476190448'], + [1495710574.925, '0.005780394696738921'], + [1495710634.925, '0.0053107227550210365'], + [1495710694.925, '0.005096031495761817'], + [1495710754.925, '0.005451377979091524'], + [1495710814.925, '0.005328136666667083'], + [1495710874.925, '0.006020612857143043'], + [1495710934.925, '0.0061063585714285365'], + [1495710994.925, '0.006018346015752312'], + [1495711054.925, '0.005069130952381193'], + [1495711114.925, '0.005458406190476052'], + [1495711174.925, '0.00577219190476179'], + [1495711234.925, '0.005760814645658314'], + [1495711294.925, '0.005371875716579101'], + [1495711354.925, '0.0064232666666665834'], + [1495711414.925, '0.009369806836906667'], + [1495711474.925, '0.008956864761904692'], + [1495711534.925, '0.005266849368559271'], + [1495711594.925, '0.005335111364934262'], + [1495711654.925, '0.006461778319586945'], + [1495711714.925, '0.004687939890762393'], + [1495711774.925, '0.004438831245760684'], + [1495711834.925, '0.005142786666666613'], + [1495711894.925, '0.007257734212054963'], + [1495711954.925, '0.005621991904761494'], + [1495712014.925, '0.007868689999999862'], + [1495712074.925, '0.00910970215275738'], + [1495712134.925, '0.006151004285714278'], + [1495712194.925, '0.005447120924961522'], + [1495712254.925, '0.005150705153929503'], + [1495712314.925, '0.006358108714969314'], + [1495712374.925, '0.0057725354795696475'], + [1495712434.925, '0.005232139047619015'], + [1495712494.925, '0.004932809617949037'], + [1495712554.925, '0.004511607508499662'], + [1495712614.925, '0.00440487701522666'], + [1495712674.925, '0.005479113333333174'], + [1495712734.925, '0.004726317619047547'], + [1495712794.925, '0.005582041102958029'], + [1495712854.925, '0.006381481216082099'], + [1495712914.925, '0.005474260014095208'], + [1495712974.925, '0.00567597142857188'], + [1495713034.925, '0.0064741233333332985'], + [1495713094.925, '0.005467475714285271'], + [1495713154.925, '0.004868648393824457'], + [1495713214.925, '0.005254923286444893'], + [1495713274.925, '0.005599217150312865'], + [1495713334.925, '0.005105413720618919'], + [1495713394.925, '0.007246073333333279'], + [1495713454.925, '0.005990312380952272'], + [1495713514.925, '0.005594601853351101'], + [1495713574.925, '0.004739258673727054'], + [1495713634.925, '0.003932121428571783'], + [1495713694.925, '0.005018188268459395'], + [1495713754.925, '0.004538238095237985'], + [1495713814.925, '0.00561816643265435'], + [1495713874.925, '0.0063132584495033586'], + [1495713934.925, '0.00442385238095213'], + [1495713994.925, '0.004181795887658453'], + [1495714054.925, '0.004437759047619037'], + [1495714114.925, '0.006421748157178241'], + [1495714174.925, '0.006525143809523842'], + [1495714234.925, '0.004715904935144247'], + [1495714294.925, '0.005966040152763461'], + [1495714354.925, '0.005614535466921674'], + [1495714414.925, '0.004934375119415906'], + [1495714474.925, '0.0054122933333327385'], + [1495714534.925, '0.004926540699612279'], + [1495714594.925, '0.006124649517134237'], + [1495714654.925, '0.004629427092013995'], + [1495714714.925, '0.005117951257607005'], + [1495714774.925, '0.004868774512685422'], + [1495714834.925, '0.005310093333333399'], + [1495714894.925, '0.0054907752286127345'], + [1495714954.925, '0.004597678117351089'], + [1495715014.925, '0.0059622552380952'], + [1495715074.925, '0.005352457072655368'], + [1495715134.925, '0.005491630952381143'], + [1495715194.925, '0.006391770078379791'], + [1495715254.925, '0.005933472857142518'], + [1495715314.925, '0.005301314285714163'], + [1495715374.925, '0.0058352959724814165'], + [1495715434.925, '0.006154755147867044'], + [1495715494.925, '0.009391935637482038'], + [1495715554.925, '0.007846462857142592'], + [1495715614.925, '0.00477608215316353'], + [1495715674.925, '0.006132865238094998'], + [1495715734.925, '0.006159762457649516'], + [1495715794.925, '0.005957307073265968'], + [1495715854.925, '0.006652319091792501'], + [1495715914.925, '0.005493557402895287'], + [1495715974.925, '0.0058652434829145166'], + [1495716034.925, '0.005627400430468021'], + [1495716094.925, '0.006240656190475609'], + [1495716154.925, '0.006305997676168624'], + [1495716214.925, '0.005388057732783248'], + [1495716274.925, '0.0052814916048421244'], + [1495716334.925, '0.00699498614272497'], + [1495716394.925, '0.00627768693035141'], + [1495716454.925, '0.0042411487048161145'], + [1495716514.925, '0.005348647473627653'], + [1495716574.925, '0.0047176657142853975'], + [1495716634.925, '0.004437898571428686'], + [1495716694.925, '0.004923527366927261'], + [1495716754.925, '0.005131935066048421'], + [1495716814.925, '0.005046949523809611'], + [1495716874.925, '0.00547184095238092'], + [1495716934.925, '0.005224140016380444'], + [1495716994.925, '0.005297991171665292'], + [1495717054.925, '0.005492965995623498'], + [1495717114.925, '0.005754660000000403'], + [1495717174.925, '0.005949557138639285'], + [1495717234.925, '0.006091816112534666'], + [1495717294.925, '0.005554210080192063'], + [1495717354.925, '0.006411504395279871'], + [1495717414.925, '0.006319643996609606'], + [1495717474.925, '0.005539174405717675'], + [1495717534.925, '0.0053157078842772255'], + [1495717594.925, '0.005247480952381066'], + [1495717654.925, '0.004820141620396252'], + [1495717714.925, '0.005906173868322844'], + [1495717774.925, '0.006173117219570961'], + [1495717834.925, '0.005963340952380661'], + [1495717894.925, '0.005698976627681527'], + [1495717954.925, '0.004751279096346378'], + [1495718014.925, '0.005733142379359711'], + [1495718074.925, '0.004831689010348035'], + [1495718134.925, '0.005188370476191092'], + [1495718194.925, '0.004793227554547938'], + [1495718254.925, '0.003997442857142731'], + [1495718314.925, '0.004386040132951264'], + ], + }, + ], + }, + ], + }, + ], + }, ], - 'last_update': '2017-05-25T13:18:34.949Z' + last_update: '2017-05-25T13:18:34.949Z', }; export default metricsGroupsAPIResponse; @@ -2432,41 +651,44 @@ export const deploymentData = [ id: 111, iid: 3, sha: 'f5bcd1d9dac6fa4137e2510b9ccd134ef2e84187', - commitUrl: 'http://test.host/frontend-fixtures/environments-project/commit/f5bcd1d9dac6fa4137e2510b9ccd134ef2e84187', + commitUrl: + 'http://test.host/frontend-fixtures/environments-project/commit/f5bcd1d9dac6fa4137e2510b9ccd134ef2e84187', ref: { - name: 'master' + name: 'master', }, created_at: '2017-05-31T21:23:37.881Z', tag: false, tagUrl: 'http://test.host/frontend-fixtures/environments-project/tags/false', - 'last?': true + 'last?': true, }, { id: 110, iid: 2, sha: 'f5bcd1d9dac6fa4137e2510b9ccd134ef2e84187', - commitUrl: 'http://test.host/frontend-fixtures/environments-project/commit/f5bcd1d9dac6fa4137e2510b9ccd134ef2e84187', + commitUrl: + 'http://test.host/frontend-fixtures/environments-project/commit/f5bcd1d9dac6fa4137e2510b9ccd134ef2e84187', ref: { - name: 'master' + name: 'master', }, created_at: '2017-05-30T20:08:04.629Z', tag: false, - tagUrl: 'http://test.host/frontend-fixtures/environments-project/tags/false', - 'last?': false + tagUrl: 'http://test.host/frontend-fixtures/environments-project/tags/false', + 'last?': false, }, { id: 109, iid: 1, sha: '6511e58faafaa7ad2228990ec57f19d66f7db7c2', - commitUrl: 'http://test.host/frontend-fixtures/environments-project/commit/6511e58faafaa7ad2228990ec57f19d66f7db7c2', + commitUrl: + 'http://test.host/frontend-fixtures/environments-project/commit/6511e58faafaa7ad2228990ec57f19d66f7db7c2', ref: { - name: 'update2-readme' + name: 'update2-readme', }, created_at: '2017-05-30T17:42:38.409Z', tag: false, tagUrl: 'http://test.host/frontend-fixtures/environments-project/tags/false', - 'last?': false - } + 'last?': false, + }, ]; export const statePaths = { @@ -2476,5844 +698,5844 @@ export const statePaths = { }; export const singleRowMetricsMultipleSeries = [ - { - 'title': 'Multiple Time Series', - 'weight': 1, - 'y_label': 'Request Rates', - 'queries': [ - { - 'query_range': 'sum(rate(nginx_responses_total{environment="production"}[2m])) by (status_code)', - 'label': 'Requests', - 'unit': 'Req/sec', - 'result': [ - { - 'metric': { - 'status_code': '1xx' - }, - 'values': [ - { - 'time': '2017-08-27T11:01:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T11:02:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T11:03:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T11:04:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T11:05:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T11:06:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T11:07:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T11:08:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T11:09:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T11:10:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T11:11:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T11:12:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T11:13:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T11:14:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T11:15:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T11:16:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T11:17:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T11:18:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T11:19:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T11:20:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T11:21:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T11:22:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T11:23:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T11:24:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T11:25:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T11:26:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T11:27:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T11:28:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T11:29:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T11:30:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T11:31:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T11:32:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T11:33:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T11:34:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T11:35:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T11:36:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T11:37:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T11:38:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T11:39:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T11:40:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T11:41:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T11:42:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T11:43:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T11:44:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T11:45:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T11:46:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T11:47:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T11:48:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T11:49:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T11:50:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T11:51:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T11:52:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T11:53:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T11:54:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T11:55:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T11:56:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T11:57:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T11:58:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T11:59:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T12:00:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T12:01:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T12:02:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T12:03:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T12:04:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T12:05:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T12:06:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T12:07:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T12:08:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T12:09:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T12:10:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T12:11:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T12:12:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T12:13:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T12:14:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T12:15:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T12:16:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T12:17:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T12:18:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T12:19:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T12:20:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T12:21:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T12:22:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T12:23:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T12:24:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T12:25:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T12:26:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T12:27:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T12:28:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T12:29:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T12:30:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T12:31:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T12:32:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T12:33:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T12:34:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T12:35:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T12:36:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T12:37:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T12:38:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T12:39:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T12:40:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T12:41:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T12:42:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T12:43:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T12:44:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T12:45:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T12:46:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T12:47:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T12:48:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T12:49:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T12:50:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T12:51:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T12:52:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T12:53:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T12:54:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T12:55:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T12:56:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T12:57:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T12:58:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T12:59:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T13:00:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T13:01:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T13:02:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T13:03:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T13:04:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T13:05:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T13:06:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T13:07:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T13:08:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T13:09:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T13:10:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T13:11:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T13:12:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T13:13:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T13:14:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T13:15:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T13:16:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T13:17:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T13:18:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T13:19:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T13:20:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T13:21:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T13:22:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T13:23:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T13:24:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T13:25:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T13:26:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T13:27:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T13:28:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T13:29:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T13:30:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T13:31:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T13:32:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T13:33:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T13:34:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T13:35:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T13:36:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T13:37:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T13:38:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T13:39:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T13:40:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T13:41:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T13:42:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T13:43:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T13:44:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T13:45:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T13:46:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T13:47:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T13:48:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T13:49:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T13:50:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T13:51:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T13:52:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T13:53:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T13:54:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T13:55:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T13:56:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T13:57:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T13:58:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T13:59:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T14:00:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T14:01:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T14:02:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T14:03:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T14:04:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T14:05:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T14:06:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T14:07:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T14:08:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T14:09:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T14:10:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T14:11:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T14:12:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T14:13:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T14:14:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T14:15:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T14:16:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T14:17:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T14:18:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T14:19:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T14:20:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T14:21:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T14:22:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T14:23:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T14:24:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T14:25:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T14:26:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T14:27:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T14:28:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T14:29:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T14:30:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T14:31:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T14:32:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T14:33:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T14:34:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T14:35:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T14:36:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T14:37:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T14:38:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T14:39:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T14:40:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T14:41:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T14:42:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T14:43:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T14:44:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T14:45:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T14:46:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T14:47:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T14:48:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T14:49:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T14:50:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T14:51:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T14:52:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T14:53:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T14:54:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T14:55:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T14:56:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T14:57:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T14:58:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T14:59:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T15:00:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T15:01:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T15:02:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T15:03:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T15:04:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T15:05:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T15:06:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T15:07:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T15:08:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T15:09:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T15:10:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T15:11:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T15:12:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T15:13:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T15:14:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T15:15:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T15:16:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T15:17:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T15:18:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T15:19:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T15:20:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T15:21:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T15:22:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T15:23:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T15:24:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T15:25:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T15:26:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T15:27:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T15:28:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T15:29:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T15:30:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T15:31:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T15:32:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T15:33:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T15:34:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T15:35:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T15:36:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T15:37:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T15:38:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T15:39:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T15:40:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T15:41:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T15:42:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T15:43:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T15:44:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T15:45:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T15:46:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T15:47:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T15:48:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T15:49:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T15:50:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T15:51:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T15:52:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T15:53:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T15:54:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T15:55:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T15:56:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T15:57:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T15:58:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T15:59:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T16:00:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T16:01:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T16:02:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T16:03:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T16:04:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T16:05:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T16:06:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T16:07:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T16:08:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T16:09:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T16:10:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T16:11:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T16:12:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T16:13:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T16:14:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T16:15:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T16:16:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T16:17:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T16:18:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T16:19:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T16:20:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T16:21:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T16:22:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T16:23:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T16:24:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T16:25:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T16:26:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T16:27:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T16:28:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T16:29:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T16:30:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T16:31:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T16:32:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T16:33:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T16:34:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T16:35:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T16:36:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T16:37:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T16:38:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T16:39:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T16:40:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T16:41:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T16:42:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T16:43:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T16:44:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T16:45:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T16:46:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T16:47:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T16:48:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T16:49:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T16:50:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T16:51:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T16:52:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T16:53:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T16:54:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T16:55:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T16:56:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T16:57:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T16:58:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T16:59:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T17:00:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T17:01:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T17:02:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T17:03:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T17:04:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T17:05:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T17:06:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T17:07:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T17:08:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T17:09:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T17:10:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T17:11:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T17:12:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T17:13:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T17:14:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T17:15:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T17:16:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T17:17:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T17:18:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T17:19:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T17:20:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T17:21:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T17:22:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T17:23:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T17:24:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T17:25:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T17:26:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T17:27:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T17:28:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T17:29:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T17:30:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T17:31:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T17:32:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T17:33:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T17:34:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T17:35:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T17:36:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T17:37:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T17:38:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T17:39:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T17:40:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T17:41:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T17:42:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T17:43:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T17:44:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T17:45:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T17:46:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T17:47:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T17:48:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T17:49:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T17:50:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T17:51:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T17:52:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T17:53:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T17:54:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T17:55:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T17:56:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T17:57:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T17:58:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T17:59:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T18:00:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T18:01:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T18:02:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T18:03:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T18:04:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T18:05:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T18:06:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T18:07:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T18:08:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T18:09:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T18:10:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T18:11:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T18:12:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T18:13:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T18:14:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T18:15:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T18:16:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T18:17:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T18:18:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T18:19:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T18:20:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T18:21:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T18:22:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T18:23:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T18:24:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T18:25:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T18:26:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T18:27:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T18:28:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T18:29:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T18:30:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T18:31:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T18:32:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T18:33:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T18:34:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T18:35:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T18:36:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T18:37:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T18:38:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T18:39:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T18:40:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T18:41:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T18:42:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T18:43:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T18:44:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T18:45:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T18:46:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T18:47:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T18:48:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T18:49:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T18:50:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T18:51:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T18:52:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T18:53:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T18:54:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T18:55:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T18:56:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T18:57:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T18:58:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T18:59:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T19:00:51.462Z', - 'value': '0' - }, - { - 'time': '2017-08-27T19:01:51.462Z', - 'value': '0' - } - ] - }, - { - 'metric': { - 'status_code': '2xx' - }, - 'values': [ - { - 'time': '2017-08-27T11:01:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T11:02:51.462Z', - 'value': '1.2571428571428571' - }, - { - 'time': '2017-08-27T11:03:51.462Z', - 'value': '1.3333333333333333' - }, - { - 'time': '2017-08-27T11:04:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T11:05:51.462Z', - 'value': '1.314285714285714' - }, - { - 'time': '2017-08-27T11:06:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T11:07:51.462Z', - 'value': '1.314285714285714' - }, - { - 'time': '2017-08-27T11:08:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T11:09:51.462Z', - 'value': '1.3333333333333333' - }, - { - 'time': '2017-08-27T11:10:51.462Z', - 'value': '1.3333333333333333' - }, - { - 'time': '2017-08-27T11:11:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T11:12:51.462Z', - 'value': '1.3333333333333333' - }, - { - 'time': '2017-08-27T11:13:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T11:14:51.462Z', - 'value': '1.314285714285714' - }, - { - 'time': '2017-08-27T11:15:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T11:16:51.462Z', - 'value': '1.314285714285714' - }, - { - 'time': '2017-08-27T11:17:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T11:18:51.462Z', - 'value': '1.314285714285714' - }, - { - 'time': '2017-08-27T11:19:51.462Z', - 'value': '1.314285714285714' - }, - { - 'time': '2017-08-27T11:20:51.462Z', - 'value': '1.314285714285714' - }, - { - 'time': '2017-08-27T11:21:51.462Z', - 'value': '1.314285714285714' - }, - { - 'time': '2017-08-27T11:22:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T11:23:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T11:24:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T11:25:51.462Z', - 'value': '1.314285714285714' - }, - { - 'time': '2017-08-27T11:26:51.462Z', - 'value': '1.3333333333333333' - }, - { - 'time': '2017-08-27T11:27:51.462Z', - 'value': '1.3333333333333333' - }, - { - 'time': '2017-08-27T11:28:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T11:29:51.462Z', - 'value': '1.3333333333333333' - }, - { - 'time': '2017-08-27T11:30:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T11:31:51.462Z', - 'value': '1.3333333333333333' - }, - { - 'time': '2017-08-27T11:32:51.462Z', - 'value': '1.3333333333333333' - }, - { - 'time': '2017-08-27T11:33:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T11:34:51.462Z', - 'value': '1.333320635041571' - }, - { - 'time': '2017-08-27T11:35:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T11:36:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T11:37:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T11:38:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T11:39:51.462Z', - 'value': '1.314285714285714' - }, - { - 'time': '2017-08-27T11:40:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T11:41:51.462Z', - 'value': '1.3333587306424883' - }, - { - 'time': '2017-08-27T11:42:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T11:43:51.462Z', - 'value': '1.3333333333333333' - }, - { - 'time': '2017-08-27T11:44:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T11:45:51.462Z', - 'value': '1.314285714285714' - }, - { - 'time': '2017-08-27T11:46:51.462Z', - 'value': '1.314285714285714' - }, - { - 'time': '2017-08-27T11:47:51.462Z', - 'value': '1.314285714285714' - }, - { - 'time': '2017-08-27T11:48:51.462Z', - 'value': '1.3333333333333333' - }, - { - 'time': '2017-08-27T11:49:51.462Z', - 'value': '1.314285714285714' - }, - { - 'time': '2017-08-27T11:50:51.462Z', - 'value': '1.3333333333333333' - }, - { - 'time': '2017-08-27T11:51:51.462Z', - 'value': '1.3333333333333333' - }, - { - 'time': '2017-08-27T11:52:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T11:53:51.462Z', - 'value': '1.3333333333333333' - }, - { - 'time': '2017-08-27T11:54:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T11:55:51.462Z', - 'value': '1.3333333333333333' - }, - { - 'time': '2017-08-27T11:56:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T11:57:51.462Z', - 'value': '1.3333333333333333' - }, - { - 'time': '2017-08-27T11:58:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T11:59:51.462Z', - 'value': '1.314285714285714' - }, - { - 'time': '2017-08-27T12:00:51.462Z', - 'value': '1.314285714285714' - }, - { - 'time': '2017-08-27T12:01:51.462Z', - 'value': '1.3333460318669703' - }, - { - 'time': '2017-08-27T12:02:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T12:03:51.462Z', - 'value': '1.3333333333333333' - }, - { - 'time': '2017-08-27T12:04:51.462Z', - 'value': '1.314285714285714' - }, - { - 'time': '2017-08-27T12:05:51.462Z', - 'value': '1.31427319739812' - }, - { - 'time': '2017-08-27T12:06:51.462Z', - 'value': '1.314285714285714' - }, - { - 'time': '2017-08-27T12:07:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T12:08:51.462Z', - 'value': '1.3333333333333333' - }, - { - 'time': '2017-08-27T12:09:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T12:10:51.462Z', - 'value': '1.314285714285714' - }, - { - 'time': '2017-08-27T12:11:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T12:12:51.462Z', - 'value': '1.3333333333333333' - }, - { - 'time': '2017-08-27T12:13:51.462Z', - 'value': '1.314285714285714' - }, - { - 'time': '2017-08-27T12:14:51.462Z', - 'value': '1.3333333333333333' - }, - { - 'time': '2017-08-27T12:15:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T12:16:51.462Z', - 'value': '1.314285714285714' - }, - { - 'time': '2017-08-27T12:17:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T12:18:51.462Z', - 'value': '1.314285714285714' - }, - { - 'time': '2017-08-27T12:19:51.462Z', - 'value': '1.314285714285714' - }, - { - 'time': '2017-08-27T12:20:51.462Z', - 'value': '1.314285714285714' - }, - { - 'time': '2017-08-27T12:21:51.462Z', - 'value': '1.3333333333333333' - }, - { - 'time': '2017-08-27T12:22:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T12:23:51.462Z', - 'value': '1.314285714285714' - }, - { - 'time': '2017-08-27T12:24:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T12:25:51.462Z', - 'value': '1.3333333333333333' - }, - { - 'time': '2017-08-27T12:26:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T12:27:51.462Z', - 'value': '1.3333333333333333' - }, - { - 'time': '2017-08-27T12:28:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T12:29:51.462Z', - 'value': '1.3333333333333333' - }, - { - 'time': '2017-08-27T12:30:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T12:31:51.462Z', - 'value': '1.3333333333333333' - }, - { - 'time': '2017-08-27T12:32:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T12:33:51.462Z', - 'value': '1.314285714285714' - }, - { - 'time': '2017-08-27T12:34:51.462Z', - 'value': '1.314285714285714' - }, - { - 'time': '2017-08-27T12:35:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T12:36:51.462Z', - 'value': '1.3333333333333333' - }, - { - 'time': '2017-08-27T12:37:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T12:38:51.462Z', - 'value': '1.3333333333333333' - }, - { - 'time': '2017-08-27T12:39:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T12:40:51.462Z', - 'value': '1.3333333333333333' - }, - { - 'time': '2017-08-27T12:41:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T12:42:51.462Z', - 'value': '1.314285714285714' - }, - { - 'time': '2017-08-27T12:43:51.462Z', - 'value': '1.314285714285714' - }, - { - 'time': '2017-08-27T12:44:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T12:45:51.462Z', - 'value': '1.3333333333333333' - }, - { - 'time': '2017-08-27T12:46:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T12:47:51.462Z', - 'value': '1.314285714285714' - }, - { - 'time': '2017-08-27T12:48:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T12:49:51.462Z', - 'value': '1.3333333333333333' - }, - { - 'time': '2017-08-27T12:50:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T12:51:51.462Z', - 'value': '1.3333333333333333' - }, - { - 'time': '2017-08-27T12:52:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T12:53:51.462Z', - 'value': '1.314285714285714' - }, - { - 'time': '2017-08-27T12:54:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T12:55:51.462Z', - 'value': '1.3333333333333333' - }, - { - 'time': '2017-08-27T12:56:51.462Z', - 'value': '1.314285714285714' - }, - { - 'time': '2017-08-27T12:57:51.462Z', - 'value': '1.314285714285714' - }, - { - 'time': '2017-08-27T12:58:51.462Z', - 'value': '1.3333333333333333' - }, - { - 'time': '2017-08-27T12:59:51.462Z', - 'value': '1.314285714285714' - }, - { - 'time': '2017-08-27T13:00:51.462Z', - 'value': '1.314285714285714' - }, - { - 'time': '2017-08-27T13:01:51.462Z', - 'value': '1.295225759754669' - }, - { - 'time': '2017-08-27T13:02:51.462Z', - 'value': '1.3333333333333333' - }, - { - 'time': '2017-08-27T13:03:51.462Z', - 'value': '1.2952627669098458' - }, - { - 'time': '2017-08-27T13:04:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T13:05:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T13:06:51.462Z', - 'value': '1.3333333333333333' - }, - { - 'time': '2017-08-27T13:07:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T13:08:51.462Z', - 'value': '1.3333333333333333' - }, - { - 'time': '2017-08-27T13:09:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T13:10:51.462Z', - 'value': '1.3333333333333333' - }, - { - 'time': '2017-08-27T13:11:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T13:12:51.462Z', - 'value': '1.3333333333333333' - }, - { - 'time': '2017-08-27T13:13:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T13:14:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T13:15:51.462Z', - 'value': '1.2571428571428571' - }, - { - 'time': '2017-08-27T13:16:51.462Z', - 'value': '1.3333587306424883' - }, - { - 'time': '2017-08-27T13:17:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T13:18:51.462Z', - 'value': '1.3333333333333333' - }, - { - 'time': '2017-08-27T13:19:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T13:20:51.462Z', - 'value': '1.314285714285714' - }, - { - 'time': '2017-08-27T13:21:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T13:22:51.462Z', - 'value': '1.3333333333333333' - }, - { - 'time': '2017-08-27T13:23:51.462Z', - 'value': '1.276190476190476' - }, - { - 'time': '2017-08-27T13:24:51.462Z', - 'value': '1.2571428571428571' - }, - { - 'time': '2017-08-27T13:25:51.462Z', - 'value': '1.314285714285714' - }, - { - 'time': '2017-08-27T13:26:51.462Z', - 'value': '1.3333333333333333' - }, - { - 'time': '2017-08-27T13:27:51.462Z', - 'value': '1.314285714285714' - }, - { - 'time': '2017-08-27T13:28:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T13:29:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T13:30:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T13:31:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T13:32:51.462Z', - 'value': '1.314285714285714' - }, - { - 'time': '2017-08-27T13:33:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T13:34:51.462Z', - 'value': '1.3333333333333333' - }, - { - 'time': '2017-08-27T13:35:51.462Z', - 'value': '1.314285714285714' - }, - { - 'time': '2017-08-27T13:36:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T13:37:51.462Z', - 'value': '1.3333333333333333' - }, - { - 'time': '2017-08-27T13:38:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T13:39:51.462Z', - 'value': '1.3333333333333333' - }, - { - 'time': '2017-08-27T13:40:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T13:41:51.462Z', - 'value': '1.3333333333333333' - }, - { - 'time': '2017-08-27T13:42:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T13:43:51.462Z', - 'value': '1.3333333333333333' - }, - { - 'time': '2017-08-27T13:44:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T13:45:51.462Z', - 'value': '1.3333333333333333' - }, - { - 'time': '2017-08-27T13:46:51.462Z', - 'value': '1.2571428571428571' - }, - { - 'time': '2017-08-27T13:47:51.462Z', - 'value': '1.276190476190476' - }, - { - 'time': '2017-08-27T13:48:51.462Z', - 'value': '1.314285714285714' - }, - { - 'time': '2017-08-27T13:49:51.462Z', - 'value': '1.295225759754669' - }, - { - 'time': '2017-08-27T13:50:51.462Z', - 'value': '1.3333333333333333' - }, - { - 'time': '2017-08-27T13:51:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T13:52:51.462Z', - 'value': '1.3333333333333333' - }, - { - 'time': '2017-08-27T13:53:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T13:54:51.462Z', - 'value': '1.3333333333333333' - }, - { - 'time': '2017-08-27T13:55:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T13:56:51.462Z', - 'value': '1.3333333333333333' - }, - { - 'time': '2017-08-27T13:57:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T13:58:51.462Z', - 'value': '1.314285714285714' - }, - { - 'time': '2017-08-27T13:59:51.462Z', - 'value': '1.295225759754669' - }, - { - 'time': '2017-08-27T14:00:51.462Z', - 'value': '1.314285714285714' - }, - { - 'time': '2017-08-27T14:01:51.462Z', - 'value': '1.314285714285714' - }, - { - 'time': '2017-08-27T14:02:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T14:03:51.462Z', - 'value': '1.3333333333333333' - }, - { - 'time': '2017-08-27T14:04:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T14:05:51.462Z', - 'value': '1.314285714285714' - }, - { - 'time': '2017-08-27T14:06:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T14:07:51.462Z', - 'value': '1.3333333333333333' - }, - { - 'time': '2017-08-27T14:08:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T14:09:51.462Z', - 'value': '1.3333333333333333' - }, - { - 'time': '2017-08-27T14:10:51.462Z', - 'value': '1.3333333333333333' - }, - { - 'time': '2017-08-27T14:11:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T14:12:51.462Z', - 'value': '1.3333333333333333' - }, - { - 'time': '2017-08-27T14:13:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T14:14:51.462Z', - 'value': '1.3333333333333333' - }, - { - 'time': '2017-08-27T14:15:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T14:16:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T14:17:51.462Z', - 'value': '1.314285714285714' - }, - { - 'time': '2017-08-27T14:18:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T14:19:51.462Z', - 'value': '1.314285714285714' - }, - { - 'time': '2017-08-27T14:20:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T14:21:51.462Z', - 'value': '1.3333079369916765' - }, - { - 'time': '2017-08-27T14:22:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T14:23:51.462Z', - 'value': '1.3333333333333333' - }, - { - 'time': '2017-08-27T14:24:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T14:25:51.462Z', - 'value': '1.314285714285714' - }, - { - 'time': '2017-08-27T14:26:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T14:27:51.462Z', - 'value': '1.314285714285714' - }, - { - 'time': '2017-08-27T14:28:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T14:29:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T14:30:51.462Z', - 'value': '1.314285714285714' - }, - { - 'time': '2017-08-27T14:31:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T14:32:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T14:33:51.462Z', - 'value': '1.2571428571428571' - }, - { - 'time': '2017-08-27T14:34:51.462Z', - 'value': '1.3333333333333333' - }, - { - 'time': '2017-08-27T14:35:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T14:36:51.462Z', - 'value': '1.3333333333333333' - }, - { - 'time': '2017-08-27T14:37:51.462Z', - 'value': '1.3333333333333333' - }, - { - 'time': '2017-08-27T14:38:51.462Z', - 'value': '1.314285714285714' - }, - { - 'time': '2017-08-27T14:39:51.462Z', - 'value': '1.3333333333333333' - }, - { - 'time': '2017-08-27T14:40:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T14:41:51.462Z', - 'value': '1.3333333333333333' - }, - { - 'time': '2017-08-27T14:42:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T14:43:51.462Z', - 'value': '1.276190476190476' - }, - { - 'time': '2017-08-27T14:44:51.462Z', - 'value': '1.2571428571428571' - }, - { - 'time': '2017-08-27T14:45:51.462Z', - 'value': '1.3333333333333333' - }, - { - 'time': '2017-08-27T14:46:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T14:47:51.462Z', - 'value': '1.3333333333333333' - }, - { - 'time': '2017-08-27T14:48:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T14:49:51.462Z', - 'value': '1.3333333333333333' - }, - { - 'time': '2017-08-27T14:50:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T14:51:51.462Z', - 'value': '1.314285714285714' - }, - { - 'time': '2017-08-27T14:52:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T14:53:51.462Z', - 'value': '1.333320635041571' - }, - { - 'time': '2017-08-27T14:54:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T14:55:51.462Z', - 'value': '1.3333333333333333' - }, - { - 'time': '2017-08-27T14:56:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T14:57:51.462Z', - 'value': '1.314285714285714' - }, - { - 'time': '2017-08-27T14:58:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T14:59:51.462Z', - 'value': '1.314285714285714' - }, - { - 'time': '2017-08-27T15:00:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T15:01:51.462Z', - 'value': '1.3333333333333333' - }, - { - 'time': '2017-08-27T15:02:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T15:03:51.462Z', - 'value': '1.3333333333333333' - }, - { - 'time': '2017-08-27T15:04:51.462Z', - 'value': '1.2571428571428571' - }, - { - 'time': '2017-08-27T15:05:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T15:06:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T15:07:51.462Z', - 'value': '1.3333333333333333' - }, - { - 'time': '2017-08-27T15:08:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T15:09:51.462Z', - 'value': '1.3333333333333333' - }, - { - 'time': '2017-08-27T15:10:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T15:11:51.462Z', - 'value': '1.3333333333333333' - }, - { - 'time': '2017-08-27T15:12:51.462Z', - 'value': '1.31427319739812' - }, - { - 'time': '2017-08-27T15:13:51.462Z', - 'value': '1.3333333333333333' - }, - { - 'time': '2017-08-27T15:14:51.462Z', - 'value': '1.3333333333333333' - }, - { - 'time': '2017-08-27T15:15:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T15:16:51.462Z', - 'value': '1.314285714285714' - }, - { - 'time': '2017-08-27T15:17:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T15:18:51.462Z', - 'value': '1.3333333333333333' - }, - { - 'time': '2017-08-27T15:19:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T15:20:51.462Z', - 'value': '1.314285714285714' - }, - { - 'time': '2017-08-27T15:21:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T15:22:51.462Z', - 'value': '1.3333460318669703' - }, - { - 'time': '2017-08-27T15:23:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T15:24:51.462Z', - 'value': '1.3333333333333333' - }, - { - 'time': '2017-08-27T15:25:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T15:26:51.462Z', - 'value': '1.3333333333333333' - }, - { - 'time': '2017-08-27T15:27:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T15:28:51.462Z', - 'value': '1.3333333333333333' - }, - { - 'time': '2017-08-27T15:29:51.462Z', - 'value': '1.3333333333333333' - }, - { - 'time': '2017-08-27T15:30:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T15:31:51.462Z', - 'value': '1.314285714285714' - }, - { - 'time': '2017-08-27T15:32:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T15:33:51.462Z', - 'value': '1.314285714285714' - }, - { - 'time': '2017-08-27T15:34:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T15:35:51.462Z', - 'value': '1.314285714285714' - }, - { - 'time': '2017-08-27T15:36:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T15:37:51.462Z', - 'value': '1.3333333333333333' - }, - { - 'time': '2017-08-27T15:38:51.462Z', - 'value': '1.314285714285714' - }, - { - 'time': '2017-08-27T15:39:51.462Z', - 'value': '1.3333333333333333' - }, - { - 'time': '2017-08-27T15:40:51.462Z', - 'value': '1.3333333333333333' - }, - { - 'time': '2017-08-27T15:41:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T15:42:51.462Z', - 'value': '1.3333333333333333' - }, - { - 'time': '2017-08-27T15:43:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T15:44:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T15:45:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T15:46:51.462Z', - 'value': '1.3333333333333333' - }, - { - 'time': '2017-08-27T15:47:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T15:48:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T15:49:51.462Z', - 'value': '1.314285714285714' - }, - { - 'time': '2017-08-27T15:50:51.462Z', - 'value': '1.3333333333333333' - }, - { - 'time': '2017-08-27T15:51:51.462Z', - 'value': '1.3333333333333333' - }, - { - 'time': '2017-08-27T15:52:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T15:53:51.462Z', - 'value': '1.3333333333333333' - }, - { - 'time': '2017-08-27T15:54:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T15:55:51.462Z', - 'value': '1.3333587306424883' - }, - { - 'time': '2017-08-27T15:56:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T15:57:51.462Z', - 'value': '1.3333333333333333' - }, - { - 'time': '2017-08-27T15:58:51.462Z', - 'value': '1.3333333333333333' - }, - { - 'time': '2017-08-27T15:59:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T16:00:51.462Z', - 'value': '1.3333333333333333' - }, - { - 'time': '2017-08-27T16:01:51.462Z', - 'value': '1.314285714285714' - }, - { - 'time': '2017-08-27T16:02:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T16:03:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T16:04:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T16:05:51.462Z', - 'value': '1.3333333333333333' - }, - { - 'time': '2017-08-27T16:06:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T16:07:51.462Z', - 'value': '1.3333333333333333' - }, - { - 'time': '2017-08-27T16:08:51.462Z', - 'value': '1.314285714285714' - }, - { - 'time': '2017-08-27T16:09:51.462Z', - 'value': '1.314285714285714' - }, - { - 'time': '2017-08-27T16:10:51.462Z', - 'value': '1.3333333333333333' - }, - { - 'time': '2017-08-27T16:11:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T16:12:51.462Z', - 'value': '1.314285714285714' - }, - { - 'time': '2017-08-27T16:13:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T16:14:51.462Z', - 'value': '1.3333333333333333' - }, - { - 'time': '2017-08-27T16:15:51.462Z', - 'value': '1.314285714285714' - }, - { - 'time': '2017-08-27T16:16:51.462Z', - 'value': '1.314285714285714' - }, - { - 'time': '2017-08-27T16:17:51.462Z', - 'value': '1.314285714285714' - }, - { - 'time': '2017-08-27T16:18:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T16:19:51.462Z', - 'value': '1.3333333333333333' - }, - { - 'time': '2017-08-27T16:20:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T16:21:51.462Z', - 'value': '1.3333333333333333' - }, - { - 'time': '2017-08-27T16:22:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T16:23:51.462Z', - 'value': '1.3333333333333333' - }, - { - 'time': '2017-08-27T16:24:51.462Z', - 'value': '1.295225759754669' - }, - { - 'time': '2017-08-27T16:25:51.462Z', - 'value': '1.3333333333333333' - }, - { - 'time': '2017-08-27T16:26:51.462Z', - 'value': '1.314285714285714' - }, - { - 'time': '2017-08-27T16:27:51.462Z', - 'value': '1.314285714285714' - }, - { - 'time': '2017-08-27T16:28:51.462Z', - 'value': '1.3333333333333333' - }, - { - 'time': '2017-08-27T16:29:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T16:30:51.462Z', - 'value': '1.3333333333333333' - }, - { - 'time': '2017-08-27T16:31:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T16:32:51.462Z', - 'value': '1.314285714285714' - }, - { - 'time': '2017-08-27T16:33:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T16:34:51.462Z', - 'value': '1.314285714285714' - }, - { - 'time': '2017-08-27T16:35:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T16:36:51.462Z', - 'value': '1.3333333333333333' - }, - { - 'time': '2017-08-27T16:37:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T16:38:51.462Z', - 'value': '1.3333333333333333' - }, - { - 'time': '2017-08-27T16:39:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T16:40:51.462Z', - 'value': '1.3333333333333333' - }, - { - 'time': '2017-08-27T16:41:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T16:42:51.462Z', - 'value': '1.314285714285714' - }, - { - 'time': '2017-08-27T16:43:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T16:44:51.462Z', - 'value': '1.314285714285714' - }, - { - 'time': '2017-08-27T16:45:51.462Z', - 'value': '1.3142982314117277' - }, - { - 'time': '2017-08-27T16:46:51.462Z', - 'value': '1.3333333333333333' - }, - { - 'time': '2017-08-27T16:47:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T16:48:51.462Z', - 'value': '1.333320635041571' - }, - { - 'time': '2017-08-27T16:49:51.462Z', - 'value': '1.31427319739812' - }, - { - 'time': '2017-08-27T16:50:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T16:51:51.462Z', - 'value': '1.314285714285714' - }, - { - 'time': '2017-08-27T16:52:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T16:53:51.462Z', - 'value': '1.314285714285714' - }, - { - 'time': '2017-08-27T16:54:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T16:55:51.462Z', - 'value': '1.314285714285714' - }, - { - 'time': '2017-08-27T16:56:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T16:57:51.462Z', - 'value': '1.3333333333333333' - }, - { - 'time': '2017-08-27T16:58:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T16:59:51.462Z', - 'value': '1.3333333333333333' - }, - { - 'time': '2017-08-27T17:00:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T17:01:51.462Z', - 'value': '1.3333333333333333' - }, - { - 'time': '2017-08-27T17:02:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T17:03:51.462Z', - 'value': '1.3333333333333333' - }, - { - 'time': '2017-08-27T17:04:51.462Z', - 'value': '1.2952504309564854' - }, - { - 'time': '2017-08-27T17:05:51.462Z', - 'value': '1.314285714285714' - }, - { - 'time': '2017-08-27T17:06:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T17:07:51.462Z', - 'value': '1.314285714285714' - }, - { - 'time': '2017-08-27T17:08:51.462Z', - 'value': '1.314285714285714' - }, - { - 'time': '2017-08-27T17:09:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T17:10:51.462Z', - 'value': '1.3333333333333333' - }, - { - 'time': '2017-08-27T17:11:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T17:12:51.462Z', - 'value': '1.3333333333333333' - }, - { - 'time': '2017-08-27T17:13:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T17:14:51.462Z', - 'value': '1.3333333333333333' - }, - { - 'time': '2017-08-27T17:15:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T17:16:51.462Z', - 'value': '1.3333333333333333' - }, - { - 'time': '2017-08-27T17:17:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T17:18:51.462Z', - 'value': '1.3333333333333333' - }, - { - 'time': '2017-08-27T17:19:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T17:20:51.462Z', - 'value': '1.3333333333333333' - }, - { - 'time': '2017-08-27T17:21:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T17:22:51.462Z', - 'value': '1.3333333333333333' - }, - { - 'time': '2017-08-27T17:23:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T17:24:51.462Z', - 'value': '1.314285714285714' - }, - { - 'time': '2017-08-27T17:25:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T17:26:51.462Z', - 'value': '1.3333333333333333' - }, - { - 'time': '2017-08-27T17:27:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T17:28:51.462Z', - 'value': '1.3333333333333333' - }, - { - 'time': '2017-08-27T17:29:51.462Z', - 'value': '1.295225759754669' - }, - { - 'time': '2017-08-27T17:30:51.462Z', - 'value': '1.3333333333333333' - }, - { - 'time': '2017-08-27T17:31:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T17:32:51.462Z', - 'value': '1.3333333333333333' - }, - { - 'time': '2017-08-27T17:33:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T17:34:51.462Z', - 'value': '1.295225759754669' - }, - { - 'time': '2017-08-27T17:35:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T17:36:51.462Z', - 'value': '1.314285714285714' - }, - { - 'time': '2017-08-27T17:37:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T17:38:51.462Z', - 'value': '1.3333333333333333' - }, - { - 'time': '2017-08-27T17:39:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T17:40:51.462Z', - 'value': '1.3333333333333333' - }, - { - 'time': '2017-08-27T17:41:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T17:42:51.462Z', - 'value': '1.3333333333333333' - }, - { - 'time': '2017-08-27T17:43:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T17:44:51.462Z', - 'value': '1.314285714285714' - }, - { - 'time': '2017-08-27T17:45:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T17:46:51.462Z', - 'value': '1.3333333333333333' - }, - { - 'time': '2017-08-27T17:47:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T17:48:51.462Z', - 'value': '1.314285714285714' - }, - { - 'time': '2017-08-27T17:49:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T17:50:51.462Z', - 'value': '1.314285714285714' - }, - { - 'time': '2017-08-27T17:51:51.462Z', - 'value': '1.3333333333333333' - }, - { - 'time': '2017-08-27T17:52:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T17:53:51.462Z', - 'value': '1.3333333333333333' - }, - { - 'time': '2017-08-27T17:54:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T17:55:51.462Z', - 'value': '1.3333333333333333' - }, - { - 'time': '2017-08-27T17:56:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T17:57:51.462Z', - 'value': '1.314285714285714' - }, - { - 'time': '2017-08-27T17:58:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T17:59:51.462Z', - 'value': '1.314285714285714' - }, - { - 'time': '2017-08-27T18:00:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T18:01:51.462Z', - 'value': '1.3333333333333333' - }, - { - 'time': '2017-08-27T18:02:51.462Z', - 'value': '1.3333333333333333' - }, - { - 'time': '2017-08-27T18:03:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T18:04:51.462Z', - 'value': '1.3333333333333333' - }, - { - 'time': '2017-08-27T18:05:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T18:06:51.462Z', - 'value': '1.3333333333333333' - }, - { - 'time': '2017-08-27T18:07:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T18:08:51.462Z', - 'value': '1.3333333333333333' - }, - { - 'time': '2017-08-27T18:09:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T18:10:51.462Z', - 'value': '1.3333333333333333' - }, - { - 'time': '2017-08-27T18:11:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T18:12:51.462Z', - 'value': '1.314285714285714' - }, - { - 'time': '2017-08-27T18:13:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T18:14:51.462Z', - 'value': '1.3333333333333333' - }, - { - 'time': '2017-08-27T18:15:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T18:16:51.462Z', - 'value': '1.3333333333333333' - }, - { - 'time': '2017-08-27T18:17:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T18:18:51.462Z', - 'value': '1.3333333333333333' - }, - { - 'time': '2017-08-27T18:19:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T18:20:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T18:21:51.462Z', - 'value': '1.3333333333333333' - }, - { - 'time': '2017-08-27T18:22:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T18:23:51.462Z', - 'value': '1.3333333333333333' - }, - { - 'time': '2017-08-27T18:24:51.462Z', - 'value': '1.2571428571428571' - }, - { - 'time': '2017-08-27T18:25:51.462Z', - 'value': '1.3333333333333333' - }, - { - 'time': '2017-08-27T18:26:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T18:27:51.462Z', - 'value': '1.3333333333333333' - }, - { - 'time': '2017-08-27T18:28:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T18:29:51.462Z', - 'value': '1.3333333333333333' - }, - { - 'time': '2017-08-27T18:30:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T18:31:51.462Z', - 'value': '1.3333333333333333' - }, - { - 'time': '2017-08-27T18:32:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T18:33:51.462Z', - 'value': '1.3333333333333333' - }, - { - 'time': '2017-08-27T18:34:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T18:35:51.462Z', - 'value': '1.3333333333333333' - }, - { - 'time': '2017-08-27T18:36:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T18:37:51.462Z', - 'value': '1.314285714285714' - }, - { - 'time': '2017-08-27T18:38:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T18:39:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T18:40:51.462Z', - 'value': '1.3333333333333333' - }, - { - 'time': '2017-08-27T18:41:51.462Z', - 'value': '1.580952380952381' - }, - { - 'time': '2017-08-27T18:42:51.462Z', - 'value': '1.7333333333333334' - }, - { - 'time': '2017-08-27T18:43:51.462Z', - 'value': '2.057142857142857' - }, - { - 'time': '2017-08-27T18:44:51.462Z', - 'value': '2.1904761904761902' - }, - { - 'time': '2017-08-27T18:45:51.462Z', - 'value': '1.8285714285714287' - }, - { - 'time': '2017-08-27T18:46:51.462Z', - 'value': '2.1142857142857143' - }, - { - 'time': '2017-08-27T18:47:51.462Z', - 'value': '1.619047619047619' - }, - { - 'time': '2017-08-27T18:48:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T18:49:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T18:50:51.462Z', - 'value': '1.314285714285714' - }, - { - 'time': '2017-08-27T18:51:51.462Z', - 'value': '1.2952504309564854' - }, - { - 'time': '2017-08-27T18:52:51.462Z', - 'value': '1.3333333333333333' - }, - { - 'time': '2017-08-27T18:53:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T18:54:51.462Z', - 'value': '1.3333333333333333' - }, - { - 'time': '2017-08-27T18:55:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T18:56:51.462Z', - 'value': '1.314285714285714' - }, - { - 'time': '2017-08-27T18:57:51.462Z', - 'value': '1.295238095238095' - }, - { - 'time': '2017-08-27T18:58:51.462Z', - 'value': '1.7142857142857142' - }, - { - 'time': '2017-08-27T18:59:51.462Z', - 'value': '1.7333333333333334' - }, - { - 'time': '2017-08-27T19:00:51.462Z', - 'value': '1.3904761904761904' - }, - { - 'time': '2017-08-27T19:01:51.462Z', - 'value': '1.5047619047619047' - } - ] - }, - ], - 'when': [ - { - 'value': 'hundred(s)', - 'color': 'green', - }, - ], - } - ] - }, - { - 'title': 'Throughput', - 'weight': 1, - 'y_label': 'Requests / Sec', - 'queries': [ - { - 'query_range': 'sum(rate(nginx_requests_total{server_zone!=\'*\', server_zone!=\'_\', container_name!=\'POD\',environment=\'production\'}[2m]))', - 'label': 'Total', - 'unit': 'req / sec', - 'result': [ - { - 'metric': { - - }, - 'values': [ - { - 'time': '2017-08-27T11:01:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T11:02:51.462Z', - 'value': '0.45714285714285713' - }, - { - 'time': '2017-08-27T11:03:51.462Z', - 'value': '0.4952380952380952' - }, - { - 'time': '2017-08-27T11:04:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T11:05:51.462Z', - 'value': '0.4857142857142857' - }, - { - 'time': '2017-08-27T11:06:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T11:07:51.462Z', - 'value': '0.4857142857142857' - }, - { - 'time': '2017-08-27T11:08:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T11:09:51.462Z', - 'value': '0.4952380952380952' - }, - { - 'time': '2017-08-27T11:10:51.462Z', - 'value': '0.4952380952380952' - }, - { - 'time': '2017-08-27T11:11:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T11:12:51.462Z', - 'value': '0.4952380952380952' - }, - { - 'time': '2017-08-27T11:13:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T11:14:51.462Z', - 'value': '0.4857142857142857' - }, - { - 'time': '2017-08-27T11:15:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T11:16:51.462Z', - 'value': '0.4857142857142857' - }, - { - 'time': '2017-08-27T11:17:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T11:18:51.462Z', - 'value': '0.4857142857142857' - }, - { - 'time': '2017-08-27T11:19:51.462Z', - 'value': '0.4857142857142857' - }, - { - 'time': '2017-08-27T11:20:51.462Z', - 'value': '0.4857142857142857' - }, - { - 'time': '2017-08-27T11:21:51.462Z', - 'value': '0.4857142857142857' - }, - { - 'time': '2017-08-27T11:22:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T11:23:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T11:24:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T11:25:51.462Z', - 'value': '0.4857142857142857' - }, - { - 'time': '2017-08-27T11:26:51.462Z', - 'value': '0.4952380952380952' - }, - { - 'time': '2017-08-27T11:27:51.462Z', - 'value': '0.4952380952380952' - }, - { - 'time': '2017-08-27T11:28:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T11:29:51.462Z', - 'value': '0.4952380952380952' - }, - { - 'time': '2017-08-27T11:30:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T11:31:51.462Z', - 'value': '0.4952380952380952' - }, - { - 'time': '2017-08-27T11:32:51.462Z', - 'value': '0.4952380952380952' - }, - { - 'time': '2017-08-27T11:33:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T11:34:51.462Z', - 'value': '0.4952333787297264' - }, - { - 'time': '2017-08-27T11:35:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T11:36:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T11:37:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T11:38:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T11:39:51.462Z', - 'value': '0.4857142857142857' - }, - { - 'time': '2017-08-27T11:40:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T11:41:51.462Z', - 'value': '0.49524752852435283' - }, - { - 'time': '2017-08-27T11:42:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T11:43:51.462Z', - 'value': '0.4952380952380952' - }, - { - 'time': '2017-08-27T11:44:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T11:45:51.462Z', - 'value': '0.4857142857142857' - }, - { - 'time': '2017-08-27T11:46:51.462Z', - 'value': '0.4857142857142857' - }, - { - 'time': '2017-08-27T11:47:51.462Z', - 'value': '0.4857142857142857' - }, - { - 'time': '2017-08-27T11:48:51.462Z', - 'value': '0.4952380952380952' - }, - { - 'time': '2017-08-27T11:49:51.462Z', - 'value': '0.4857142857142857' - }, - { - 'time': '2017-08-27T11:50:51.462Z', - 'value': '0.4952380952380952' - }, - { - 'time': '2017-08-27T11:51:51.462Z', - 'value': '0.4952380952380952' - }, - { - 'time': '2017-08-27T11:52:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T11:53:51.462Z', - 'value': '0.4952380952380952' - }, - { - 'time': '2017-08-27T11:54:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T11:55:51.462Z', - 'value': '0.4952380952380952' - }, - { - 'time': '2017-08-27T11:56:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T11:57:51.462Z', - 'value': '0.4952380952380952' - }, - { - 'time': '2017-08-27T11:58:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T11:59:51.462Z', - 'value': '0.4857142857142857' - }, - { - 'time': '2017-08-27T12:00:51.462Z', - 'value': '0.4857142857142857' - }, - { - 'time': '2017-08-27T12:01:51.462Z', - 'value': '0.49524281183630325' - }, - { - 'time': '2017-08-27T12:02:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T12:03:51.462Z', - 'value': '0.4952380952380952' - }, - { - 'time': '2017-08-27T12:04:51.462Z', - 'value': '0.4857142857142857' - }, - { - 'time': '2017-08-27T12:05:51.462Z', - 'value': '0.4857096599080009' - }, - { - 'time': '2017-08-27T12:06:51.462Z', - 'value': '0.4857142857142857' - }, - { - 'time': '2017-08-27T12:07:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T12:08:51.462Z', - 'value': '0.4952380952380952' - }, - { - 'time': '2017-08-27T12:09:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T12:10:51.462Z', - 'value': '0.4857142857142857' - }, - { - 'time': '2017-08-27T12:11:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T12:12:51.462Z', - 'value': '0.4952380952380952' - }, - { - 'time': '2017-08-27T12:13:51.462Z', - 'value': '0.4857142857142857' - }, - { - 'time': '2017-08-27T12:14:51.462Z', - 'value': '0.4952380952380952' - }, - { - 'time': '2017-08-27T12:15:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T12:16:51.462Z', - 'value': '0.4857142857142857' - }, - { - 'time': '2017-08-27T12:17:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T12:18:51.462Z', - 'value': '0.4857142857142857' - }, - { - 'time': '2017-08-27T12:19:51.462Z', - 'value': '0.4857142857142857' - }, - { - 'time': '2017-08-27T12:20:51.462Z', - 'value': '0.4857142857142857' - }, - { - 'time': '2017-08-27T12:21:51.462Z', - 'value': '0.4952380952380952' - }, - { - 'time': '2017-08-27T12:22:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T12:23:51.462Z', - 'value': '0.4857142857142857' - }, - { - 'time': '2017-08-27T12:24:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T12:25:51.462Z', - 'value': '0.4952380952380952' - }, - { - 'time': '2017-08-27T12:26:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T12:27:51.462Z', - 'value': '0.4952380952380952' - }, - { - 'time': '2017-08-27T12:28:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T12:29:51.462Z', - 'value': '0.4952380952380952' - }, - { - 'time': '2017-08-27T12:30:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T12:31:51.462Z', - 'value': '0.4952380952380952' - }, - { - 'time': '2017-08-27T12:32:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T12:33:51.462Z', - 'value': '0.4857142857142857' - }, - { - 'time': '2017-08-27T12:34:51.462Z', - 'value': '0.4857142857142857' - }, - { - 'time': '2017-08-27T12:35:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T12:36:51.462Z', - 'value': '0.4952380952380952' - }, - { - 'time': '2017-08-27T12:37:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T12:38:51.462Z', - 'value': '0.4952380952380952' - }, - { - 'time': '2017-08-27T12:39:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T12:40:51.462Z', - 'value': '0.4952380952380952' - }, - { - 'time': '2017-08-27T12:41:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T12:42:51.462Z', - 'value': '0.4857142857142857' - }, - { - 'time': '2017-08-27T12:43:51.462Z', - 'value': '0.4857142857142857' - }, - { - 'time': '2017-08-27T12:44:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T12:45:51.462Z', - 'value': '0.4952380952380952' - }, - { - 'time': '2017-08-27T12:46:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T12:47:51.462Z', - 'value': '0.4857142857142857' - }, - { - 'time': '2017-08-27T12:48:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T12:49:51.462Z', - 'value': '0.4952380952380952' - }, - { - 'time': '2017-08-27T12:50:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T12:51:51.462Z', - 'value': '0.4952380952380952' - }, - { - 'time': '2017-08-27T12:52:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T12:53:51.462Z', - 'value': '0.4857142857142857' - }, - { - 'time': '2017-08-27T12:54:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T12:55:51.462Z', - 'value': '0.4952380952380952' - }, - { - 'time': '2017-08-27T12:56:51.462Z', - 'value': '0.4857142857142857' - }, - { - 'time': '2017-08-27T12:57:51.462Z', - 'value': '0.4857142857142857' - }, - { - 'time': '2017-08-27T12:58:51.462Z', - 'value': '0.4952380952380952' - }, - { - 'time': '2017-08-27T12:59:51.462Z', - 'value': '0.4857142857142857' - }, - { - 'time': '2017-08-27T13:00:51.462Z', - 'value': '0.4857142857142857' - }, - { - 'time': '2017-08-27T13:01:51.462Z', - 'value': '0.4761859410862754' - }, - { - 'time': '2017-08-27T13:02:51.462Z', - 'value': '0.4952380952380952' - }, - { - 'time': '2017-08-27T13:03:51.462Z', - 'value': '0.4761995466580315' - }, - { - 'time': '2017-08-27T13:04:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T13:05:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T13:06:51.462Z', - 'value': '0.4952380952380952' - }, - { - 'time': '2017-08-27T13:07:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T13:08:51.462Z', - 'value': '0.4952380952380952' - }, - { - 'time': '2017-08-27T13:09:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T13:10:51.462Z', - 'value': '0.4952380952380952' - }, - { - 'time': '2017-08-27T13:11:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T13:12:51.462Z', - 'value': '0.4952380952380952' - }, - { - 'time': '2017-08-27T13:13:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T13:14:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T13:15:51.462Z', - 'value': '0.45714285714285713' - }, - { - 'time': '2017-08-27T13:16:51.462Z', - 'value': '0.49524752852435283' - }, - { - 'time': '2017-08-27T13:17:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T13:18:51.462Z', - 'value': '0.4952380952380952' - }, - { - 'time': '2017-08-27T13:19:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T13:20:51.462Z', - 'value': '0.4857142857142857' - }, - { - 'time': '2017-08-27T13:21:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T13:22:51.462Z', - 'value': '0.4952380952380952' - }, - { - 'time': '2017-08-27T13:23:51.462Z', - 'value': '0.4666666666666667' - }, - { - 'time': '2017-08-27T13:24:51.462Z', - 'value': '0.45714285714285713' - }, - { - 'time': '2017-08-27T13:25:51.462Z', - 'value': '0.4857142857142857' - }, - { - 'time': '2017-08-27T13:26:51.462Z', - 'value': '0.4952380952380952' - }, - { - 'time': '2017-08-27T13:27:51.462Z', - 'value': '0.4857142857142857' - }, - { - 'time': '2017-08-27T13:28:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T13:29:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T13:30:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T13:31:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T13:32:51.462Z', - 'value': '0.4857142857142857' - }, - { - 'time': '2017-08-27T13:33:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T13:34:51.462Z', - 'value': '0.4952380952380952' - }, - { - 'time': '2017-08-27T13:35:51.462Z', - 'value': '0.4857142857142857' - }, - { - 'time': '2017-08-27T13:36:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T13:37:51.462Z', - 'value': '0.4952380952380952' - }, - { - 'time': '2017-08-27T13:38:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T13:39:51.462Z', - 'value': '0.4952380952380952' - }, - { - 'time': '2017-08-27T13:40:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T13:41:51.462Z', - 'value': '0.4952380952380952' - }, - { - 'time': '2017-08-27T13:42:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T13:43:51.462Z', - 'value': '0.4952380952380952' - }, - { - 'time': '2017-08-27T13:44:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T13:45:51.462Z', - 'value': '0.4952380952380952' - }, - { - 'time': '2017-08-27T13:46:51.462Z', - 'value': '0.45714285714285713' - }, - { - 'time': '2017-08-27T13:47:51.462Z', - 'value': '0.4666666666666667' - }, - { - 'time': '2017-08-27T13:48:51.462Z', - 'value': '0.4857142857142857' - }, - { - 'time': '2017-08-27T13:49:51.462Z', - 'value': '0.4761859410862754' - }, - { - 'time': '2017-08-27T13:50:51.462Z', - 'value': '0.4952380952380952' - }, - { - 'time': '2017-08-27T13:51:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T13:52:51.462Z', - 'value': '0.4952380952380952' - }, - { - 'time': '2017-08-27T13:53:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T13:54:51.462Z', - 'value': '0.4952380952380952' - }, - { - 'time': '2017-08-27T13:55:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T13:56:51.462Z', - 'value': '0.4952380952380952' - }, - { - 'time': '2017-08-27T13:57:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T13:58:51.462Z', - 'value': '0.4857142857142857' - }, - { - 'time': '2017-08-27T13:59:51.462Z', - 'value': '0.4761859410862754' - }, - { - 'time': '2017-08-27T14:00:51.462Z', - 'value': '0.4857142857142857' - }, - { - 'time': '2017-08-27T14:01:51.462Z', - 'value': '0.4857142857142857' - }, - { - 'time': '2017-08-27T14:02:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T14:03:51.462Z', - 'value': '0.4952380952380952' - }, - { - 'time': '2017-08-27T14:04:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T14:05:51.462Z', - 'value': '0.4857142857142857' - }, - { - 'time': '2017-08-27T14:06:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T14:07:51.462Z', - 'value': '0.4952380952380952' - }, - { - 'time': '2017-08-27T14:08:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T14:09:51.462Z', - 'value': '0.4952380952380952' - }, - { - 'time': '2017-08-27T14:10:51.462Z', - 'value': '0.4952380952380952' - }, - { - 'time': '2017-08-27T14:11:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T14:12:51.462Z', - 'value': '0.4952380952380952' - }, - { - 'time': '2017-08-27T14:13:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T14:14:51.462Z', - 'value': '0.4952380952380952' - }, - { - 'time': '2017-08-27T14:15:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T14:16:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T14:17:51.462Z', - 'value': '0.4857142857142857' - }, - { - 'time': '2017-08-27T14:18:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T14:19:51.462Z', - 'value': '0.4857142857142857' - }, - { - 'time': '2017-08-27T14:20:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T14:21:51.462Z', - 'value': '0.4952286623111941' - }, - { - 'time': '2017-08-27T14:22:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T14:23:51.462Z', - 'value': '0.4952380952380952' - }, - { - 'time': '2017-08-27T14:24:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T14:25:51.462Z', - 'value': '0.4857142857142857' - }, - { - 'time': '2017-08-27T14:26:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T14:27:51.462Z', - 'value': '0.4857142857142857' - }, - { - 'time': '2017-08-27T14:28:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T14:29:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T14:30:51.462Z', - 'value': '0.4857142857142857' - }, - { - 'time': '2017-08-27T14:31:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T14:32:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T14:33:51.462Z', - 'value': '0.45714285714285713' - }, - { - 'time': '2017-08-27T14:34:51.462Z', - 'value': '0.4952380952380952' - }, - { - 'time': '2017-08-27T14:35:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T14:36:51.462Z', - 'value': '0.4952380952380952' - }, - { - 'time': '2017-08-27T14:37:51.462Z', - 'value': '0.4952380952380952' - }, - { - 'time': '2017-08-27T14:38:51.462Z', - 'value': '0.4857142857142857' - }, - { - 'time': '2017-08-27T14:39:51.462Z', - 'value': '0.4952380952380952' - }, - { - 'time': '2017-08-27T14:40:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T14:41:51.462Z', - 'value': '0.4952380952380952' - }, - { - 'time': '2017-08-27T14:42:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T14:43:51.462Z', - 'value': '0.4666666666666667' - }, - { - 'time': '2017-08-27T14:44:51.462Z', - 'value': '0.45714285714285713' - }, - { - 'time': '2017-08-27T14:45:51.462Z', - 'value': '0.4952380952380952' - }, - { - 'time': '2017-08-27T14:46:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T14:47:51.462Z', - 'value': '0.4952380952380952' - }, - { - 'time': '2017-08-27T14:48:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T14:49:51.462Z', - 'value': '0.4952380952380952' - }, - { - 'time': '2017-08-27T14:50:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T14:51:51.462Z', - 'value': '0.4857142857142857' - }, - { - 'time': '2017-08-27T14:52:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T14:53:51.462Z', - 'value': '0.4952333787297264' - }, - { - 'time': '2017-08-27T14:54:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T14:55:51.462Z', - 'value': '0.4952380952380952' - }, - { - 'time': '2017-08-27T14:56:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T14:57:51.462Z', - 'value': '0.4857142857142857' - }, - { - 'time': '2017-08-27T14:58:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T14:59:51.462Z', - 'value': '0.4857142857142857' - }, - { - 'time': '2017-08-27T15:00:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T15:01:51.462Z', - 'value': '0.4952380952380952' - }, - { - 'time': '2017-08-27T15:02:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T15:03:51.462Z', - 'value': '0.4952380952380952' - }, - { - 'time': '2017-08-27T15:04:51.462Z', - 'value': '0.45714285714285713' - }, - { - 'time': '2017-08-27T15:05:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T15:06:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T15:07:51.462Z', - 'value': '0.4952380952380952' - }, - { - 'time': '2017-08-27T15:08:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T15:09:51.462Z', - 'value': '0.4952380952380952' - }, - { - 'time': '2017-08-27T15:10:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T15:11:51.462Z', - 'value': '0.4952380952380952' - }, - { - 'time': '2017-08-27T15:12:51.462Z', - 'value': '0.4857096599080009' - }, - { - 'time': '2017-08-27T15:13:51.462Z', - 'value': '0.4952380952380952' - }, - { - 'time': '2017-08-27T15:14:51.462Z', - 'value': '0.4952380952380952' - }, - { - 'time': '2017-08-27T15:15:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T15:16:51.462Z', - 'value': '0.4857142857142857' - }, - { - 'time': '2017-08-27T15:17:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T15:18:51.462Z', - 'value': '0.4952380952380952' - }, - { - 'time': '2017-08-27T15:19:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T15:20:51.462Z', - 'value': '0.4857142857142857' - }, - { - 'time': '2017-08-27T15:21:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T15:22:51.462Z', - 'value': '0.49524281183630325' - }, - { - 'time': '2017-08-27T15:23:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T15:24:51.462Z', - 'value': '0.4952380952380952' - }, - { - 'time': '2017-08-27T15:25:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T15:26:51.462Z', - 'value': '0.4952380952380952' - }, - { - 'time': '2017-08-27T15:27:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T15:28:51.462Z', - 'value': '0.4952380952380952' - }, - { - 'time': '2017-08-27T15:29:51.462Z', - 'value': '0.4952380952380952' - }, - { - 'time': '2017-08-27T15:30:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T15:31:51.462Z', - 'value': '0.4857142857142857' - }, - { - 'time': '2017-08-27T15:32:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T15:33:51.462Z', - 'value': '0.4857142857142857' - }, - { - 'time': '2017-08-27T15:34:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T15:35:51.462Z', - 'value': '0.4857142857142857' - }, - { - 'time': '2017-08-27T15:36:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T15:37:51.462Z', - 'value': '0.4952380952380952' - }, - { - 'time': '2017-08-27T15:38:51.462Z', - 'value': '0.4857142857142857' - }, - { - 'time': '2017-08-27T15:39:51.462Z', - 'value': '0.4952380952380952' - }, - { - 'time': '2017-08-27T15:40:51.462Z', - 'value': '0.4952380952380952' - }, - { - 'time': '2017-08-27T15:41:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T15:42:51.462Z', - 'value': '0.4952380952380952' - }, - { - 'time': '2017-08-27T15:43:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T15:44:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T15:45:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T15:46:51.462Z', - 'value': '0.4952380952380952' - }, - { - 'time': '2017-08-27T15:47:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T15:48:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T15:49:51.462Z', - 'value': '0.4857142857142857' - }, - { - 'time': '2017-08-27T15:50:51.462Z', - 'value': '0.4952380952380952' - }, - { - 'time': '2017-08-27T15:51:51.462Z', - 'value': '0.4952380952380952' - }, - { - 'time': '2017-08-27T15:52:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T15:53:51.462Z', - 'value': '0.4952380952380952' - }, - { - 'time': '2017-08-27T15:54:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T15:55:51.462Z', - 'value': '0.49524752852435283' - }, - { - 'time': '2017-08-27T15:56:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T15:57:51.462Z', - 'value': '0.4952380952380952' - }, - { - 'time': '2017-08-27T15:58:51.462Z', - 'value': '0.4952380952380952' - }, - { - 'time': '2017-08-27T15:59:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T16:00:51.462Z', - 'value': '0.4952380952380952' - }, - { - 'time': '2017-08-27T16:01:51.462Z', - 'value': '0.4857142857142857' - }, - { - 'time': '2017-08-27T16:02:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T16:03:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T16:04:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T16:05:51.462Z', - 'value': '0.4952380952380952' - }, - { - 'time': '2017-08-27T16:06:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T16:07:51.462Z', - 'value': '0.4952380952380952' - }, - { - 'time': '2017-08-27T16:08:51.462Z', - 'value': '0.4857142857142857' - }, - { - 'time': '2017-08-27T16:09:51.462Z', - 'value': '0.4857142857142857' - }, - { - 'time': '2017-08-27T16:10:51.462Z', - 'value': '0.4952380952380952' - }, - { - 'time': '2017-08-27T16:11:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T16:12:51.462Z', - 'value': '0.4857142857142857' - }, - { - 'time': '2017-08-27T16:13:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T16:14:51.462Z', - 'value': '0.4952380952380952' - }, - { - 'time': '2017-08-27T16:15:51.462Z', - 'value': '0.4857142857142857' - }, - { - 'time': '2017-08-27T16:16:51.462Z', - 'value': '0.4857142857142857' - }, - { - 'time': '2017-08-27T16:17:51.462Z', - 'value': '0.4857142857142857' - }, - { - 'time': '2017-08-27T16:18:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T16:19:51.462Z', - 'value': '0.4952380952380952' - }, - { - 'time': '2017-08-27T16:20:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T16:21:51.462Z', - 'value': '0.4952380952380952' - }, - { - 'time': '2017-08-27T16:22:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T16:23:51.462Z', - 'value': '0.4952380952380952' - }, - { - 'time': '2017-08-27T16:24:51.462Z', - 'value': '0.4761859410862754' - }, - { - 'time': '2017-08-27T16:25:51.462Z', - 'value': '0.4952380952380952' - }, - { - 'time': '2017-08-27T16:26:51.462Z', - 'value': '0.4857142857142857' - }, - { - 'time': '2017-08-27T16:27:51.462Z', - 'value': '0.4857142857142857' - }, - { - 'time': '2017-08-27T16:28:51.462Z', - 'value': '0.4952380952380952' - }, - { - 'time': '2017-08-27T16:29:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T16:30:51.462Z', - 'value': '0.4952380952380952' - }, - { - 'time': '2017-08-27T16:31:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T16:32:51.462Z', - 'value': '0.4857142857142857' - }, - { - 'time': '2017-08-27T16:33:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T16:34:51.462Z', - 'value': '0.4857142857142857' - }, - { - 'time': '2017-08-27T16:35:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T16:36:51.462Z', - 'value': '0.4952380952380952' - }, - { - 'time': '2017-08-27T16:37:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T16:38:51.462Z', - 'value': '0.4952380952380952' - }, - { - 'time': '2017-08-27T16:39:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T16:40:51.462Z', - 'value': '0.4952380952380952' - }, - { - 'time': '2017-08-27T16:41:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T16:42:51.462Z', - 'value': '0.4857142857142857' - }, - { - 'time': '2017-08-27T16:43:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T16:44:51.462Z', - 'value': '0.4857142857142857' - }, - { - 'time': '2017-08-27T16:45:51.462Z', - 'value': '0.485718911608682' - }, - { - 'time': '2017-08-27T16:46:51.462Z', - 'value': '0.4952380952380952' - }, - { - 'time': '2017-08-27T16:47:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T16:48:51.462Z', - 'value': '0.4952333787297264' - }, - { - 'time': '2017-08-27T16:49:51.462Z', - 'value': '0.4857096599080009' - }, - { - 'time': '2017-08-27T16:50:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T16:51:51.462Z', - 'value': '0.4857142857142857' - }, - { - 'time': '2017-08-27T16:52:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T16:53:51.462Z', - 'value': '0.4857142857142857' - }, - { - 'time': '2017-08-27T16:54:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T16:55:51.462Z', - 'value': '0.4857142857142857' - }, - { - 'time': '2017-08-27T16:56:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T16:57:51.462Z', - 'value': '0.4952380952380952' - }, - { - 'time': '2017-08-27T16:58:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T16:59:51.462Z', - 'value': '0.4952380952380952' - }, - { - 'time': '2017-08-27T17:00:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T17:01:51.462Z', - 'value': '0.4952380952380952' - }, - { - 'time': '2017-08-27T17:02:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T17:03:51.462Z', - 'value': '0.4952380952380952' - }, - { - 'time': '2017-08-27T17:04:51.462Z', - 'value': '0.47619501138106085' - }, - { - 'time': '2017-08-27T17:05:51.462Z', - 'value': '0.4857142857142857' - }, - { - 'time': '2017-08-27T17:06:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T17:07:51.462Z', - 'value': '0.4857142857142857' - }, - { - 'time': '2017-08-27T17:08:51.462Z', - 'value': '0.4857142857142857' - }, - { - 'time': '2017-08-27T17:09:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T17:10:51.462Z', - 'value': '0.4952380952380952' - }, - { - 'time': '2017-08-27T17:11:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T17:12:51.462Z', - 'value': '0.4952380952380952' - }, - { - 'time': '2017-08-27T17:13:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T17:14:51.462Z', - 'value': '0.4952380952380952' - }, - { - 'time': '2017-08-27T17:15:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T17:16:51.462Z', - 'value': '0.4952380952380952' - }, - { - 'time': '2017-08-27T17:17:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T17:18:51.462Z', - 'value': '0.4952380952380952' - }, - { - 'time': '2017-08-27T17:19:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T17:20:51.462Z', - 'value': '0.4952380952380952' - }, - { - 'time': '2017-08-27T17:21:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T17:22:51.462Z', - 'value': '0.4952380952380952' - }, - { - 'time': '2017-08-27T17:23:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T17:24:51.462Z', - 'value': '0.4857142857142857' - }, - { - 'time': '2017-08-27T17:25:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T17:26:51.462Z', - 'value': '0.4952380952380952' - }, - { - 'time': '2017-08-27T17:27:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T17:28:51.462Z', - 'value': '0.4952380952380952' - }, - { - 'time': '2017-08-27T17:29:51.462Z', - 'value': '0.4761859410862754' - }, - { - 'time': '2017-08-27T17:30:51.462Z', - 'value': '0.4952380952380952' - }, - { - 'time': '2017-08-27T17:31:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T17:32:51.462Z', - 'value': '0.4952380952380952' - }, - { - 'time': '2017-08-27T17:33:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T17:34:51.462Z', - 'value': '0.4761859410862754' - }, - { - 'time': '2017-08-27T17:35:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T17:36:51.462Z', - 'value': '0.4857142857142857' - }, - { - 'time': '2017-08-27T17:37:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T17:38:51.462Z', - 'value': '0.4952380952380952' - }, - { - 'time': '2017-08-27T17:39:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T17:40:51.462Z', - 'value': '0.4952380952380952' - }, - { - 'time': '2017-08-27T17:41:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T17:42:51.462Z', - 'value': '0.4952380952380952' - }, - { - 'time': '2017-08-27T17:43:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T17:44:51.462Z', - 'value': '0.4857142857142857' - }, - { - 'time': '2017-08-27T17:45:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T17:46:51.462Z', - 'value': '0.4952380952380952' - }, - { - 'time': '2017-08-27T17:47:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T17:48:51.462Z', - 'value': '0.4857142857142857' - }, - { - 'time': '2017-08-27T17:49:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T17:50:51.462Z', - 'value': '0.4857142857142857' - }, - { - 'time': '2017-08-27T17:51:51.462Z', - 'value': '0.4952380952380952' - }, - { - 'time': '2017-08-27T17:52:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T17:53:51.462Z', - 'value': '0.4952380952380952' - }, - { - 'time': '2017-08-27T17:54:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T17:55:51.462Z', - 'value': '0.4952380952380952' - }, - { - 'time': '2017-08-27T17:56:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T17:57:51.462Z', - 'value': '0.4857142857142857' - }, - { - 'time': '2017-08-27T17:58:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T17:59:51.462Z', - 'value': '0.4857142857142857' - }, - { - 'time': '2017-08-27T18:00:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T18:01:51.462Z', - 'value': '0.4952380952380952' - }, - { - 'time': '2017-08-27T18:02:51.462Z', - 'value': '0.4952380952380952' - }, - { - 'time': '2017-08-27T18:03:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T18:04:51.462Z', - 'value': '0.4952380952380952' - }, - { - 'time': '2017-08-27T18:05:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T18:06:51.462Z', - 'value': '0.4952380952380952' - }, - { - 'time': '2017-08-27T18:07:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T18:08:51.462Z', - 'value': '0.4952380952380952' - }, - { - 'time': '2017-08-27T18:09:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T18:10:51.462Z', - 'value': '0.4952380952380952' - }, - { - 'time': '2017-08-27T18:11:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T18:12:51.462Z', - 'value': '0.4857142857142857' - }, - { - 'time': '2017-08-27T18:13:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T18:14:51.462Z', - 'value': '0.4952380952380952' - }, - { - 'time': '2017-08-27T18:15:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T18:16:51.462Z', - 'value': '0.4952380952380952' - }, - { - 'time': '2017-08-27T18:17:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T18:18:51.462Z', - 'value': '0.4952380952380952' - }, - { - 'time': '2017-08-27T18:19:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T18:20:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T18:21:51.462Z', - 'value': '0.4952380952380952' - }, - { - 'time': '2017-08-27T18:22:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T18:23:51.462Z', - 'value': '0.4952380952380952' - }, - { - 'time': '2017-08-27T18:24:51.462Z', - 'value': '0.45714285714285713' - }, - { - 'time': '2017-08-27T18:25:51.462Z', - 'value': '0.4952380952380952' - }, - { - 'time': '2017-08-27T18:26:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T18:27:51.462Z', - 'value': '0.4952380952380952' - }, - { - 'time': '2017-08-27T18:28:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T18:29:51.462Z', - 'value': '0.4952380952380952' - }, - { - 'time': '2017-08-27T18:30:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T18:31:51.462Z', - 'value': '0.4952380952380952' - }, - { - 'time': '2017-08-27T18:32:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T18:33:51.462Z', - 'value': '0.4952380952380952' - }, - { - 'time': '2017-08-27T18:34:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T18:35:51.462Z', - 'value': '0.4952380952380952' - }, - { - 'time': '2017-08-27T18:36:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T18:37:51.462Z', - 'value': '0.4857142857142857' - }, - { - 'time': '2017-08-27T18:38:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T18:39:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T18:40:51.462Z', - 'value': '0.4952380952380952' - }, - { - 'time': '2017-08-27T18:41:51.462Z', - 'value': '0.6190476190476191' - }, - { - 'time': '2017-08-27T18:42:51.462Z', - 'value': '0.6952380952380952' - }, - { - 'time': '2017-08-27T18:43:51.462Z', - 'value': '0.857142857142857' - }, - { - 'time': '2017-08-27T18:44:51.462Z', - 'value': '0.9238095238095239' - }, - { - 'time': '2017-08-27T18:45:51.462Z', - 'value': '0.7428571428571429' - }, - { - 'time': '2017-08-27T18:46:51.462Z', - 'value': '0.8857142857142857' - }, - { - 'time': '2017-08-27T18:47:51.462Z', - 'value': '0.638095238095238' - }, - { - 'time': '2017-08-27T18:48:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T18:49:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T18:50:51.462Z', - 'value': '0.4857142857142857' - }, - { - 'time': '2017-08-27T18:51:51.462Z', - 'value': '0.47619501138106085' - }, - { - 'time': '2017-08-27T18:52:51.462Z', - 'value': '0.4952380952380952' - }, - { - 'time': '2017-08-27T18:53:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T18:54:51.462Z', - 'value': '0.4952380952380952' - }, - { - 'time': '2017-08-27T18:55:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T18:56:51.462Z', - 'value': '0.4857142857142857' - }, - { - 'time': '2017-08-27T18:57:51.462Z', - 'value': '0.47619047619047616' - }, - { - 'time': '2017-08-27T18:58:51.462Z', - 'value': '0.6857142857142856' - }, - { - 'time': '2017-08-27T18:59:51.462Z', - 'value': '0.6952380952380952' - }, - { - 'time': '2017-08-27T19:00:51.462Z', - 'value': '0.5238095238095237' - }, - { - 'time': '2017-08-27T19:01:51.462Z', - 'value': '0.5904761904761905' - } - ] - } - ] - } - ] - } + { + title: 'Multiple Time Series', + weight: 1, + y_label: 'Request Rates', + queries: [ + { + query_range: + 'sum(rate(nginx_responses_total{environment="production"}[2m])) by (status_code)', + label: 'Requests', + unit: 'Req/sec', + result: [ + { + metric: { + status_code: '1xx', + }, + values: [ + { + time: '2017-08-27T11:01:51.462Z', + value: '0', + }, + { + time: '2017-08-27T11:02:51.462Z', + value: '0', + }, + { + time: '2017-08-27T11:03:51.462Z', + value: '0', + }, + { + time: '2017-08-27T11:04:51.462Z', + value: '0', + }, + { + time: '2017-08-27T11:05:51.462Z', + value: '0', + }, + { + time: '2017-08-27T11:06:51.462Z', + value: '0', + }, + { + time: '2017-08-27T11:07:51.462Z', + value: '0', + }, + { + time: '2017-08-27T11:08:51.462Z', + value: '0', + }, + { + time: '2017-08-27T11:09:51.462Z', + value: '0', + }, + { + time: '2017-08-27T11:10:51.462Z', + value: '0', + }, + { + time: '2017-08-27T11:11:51.462Z', + value: '0', + }, + { + time: '2017-08-27T11:12:51.462Z', + value: '0', + }, + { + time: '2017-08-27T11:13:51.462Z', + value: '0', + }, + { + time: '2017-08-27T11:14:51.462Z', + value: '0', + }, + { + time: '2017-08-27T11:15:51.462Z', + value: '0', + }, + { + time: '2017-08-27T11:16:51.462Z', + value: '0', + }, + { + time: '2017-08-27T11:17:51.462Z', + value: '0', + }, + { + time: '2017-08-27T11:18:51.462Z', + value: '0', + }, + { + time: '2017-08-27T11:19:51.462Z', + value: '0', + }, + { + time: '2017-08-27T11:20:51.462Z', + value: '0', + }, + { + time: '2017-08-27T11:21:51.462Z', + value: '0', + }, + { + time: '2017-08-27T11:22:51.462Z', + value: '0', + }, + { + time: '2017-08-27T11:23:51.462Z', + value: '0', + }, + { + time: '2017-08-27T11:24:51.462Z', + value: '0', + }, + { + time: '2017-08-27T11:25:51.462Z', + value: '0', + }, + { + time: '2017-08-27T11:26:51.462Z', + value: '0', + }, + { + time: '2017-08-27T11:27:51.462Z', + value: '0', + }, + { + time: '2017-08-27T11:28:51.462Z', + value: '0', + }, + { + time: '2017-08-27T11:29:51.462Z', + value: '0', + }, + { + time: '2017-08-27T11:30:51.462Z', + value: '0', + }, + { + time: '2017-08-27T11:31:51.462Z', + value: '0', + }, + { + time: '2017-08-27T11:32:51.462Z', + value: '0', + }, + { + time: '2017-08-27T11:33:51.462Z', + value: '0', + }, + { + time: '2017-08-27T11:34:51.462Z', + value: '0', + }, + { + time: '2017-08-27T11:35:51.462Z', + value: '0', + }, + { + time: '2017-08-27T11:36:51.462Z', + value: '0', + }, + { + time: '2017-08-27T11:37:51.462Z', + value: '0', + }, + { + time: '2017-08-27T11:38:51.462Z', + value: '0', + }, + { + time: '2017-08-27T11:39:51.462Z', + value: '0', + }, + { + time: '2017-08-27T11:40:51.462Z', + value: '0', + }, + { + time: '2017-08-27T11:41:51.462Z', + value: '0', + }, + { + time: '2017-08-27T11:42:51.462Z', + value: '0', + }, + { + time: '2017-08-27T11:43:51.462Z', + value: '0', + }, + { + time: '2017-08-27T11:44:51.462Z', + value: '0', + }, + { + time: '2017-08-27T11:45:51.462Z', + value: '0', + }, + { + time: '2017-08-27T11:46:51.462Z', + value: '0', + }, + { + time: '2017-08-27T11:47:51.462Z', + value: '0', + }, + { + time: '2017-08-27T11:48:51.462Z', + value: '0', + }, + { + time: '2017-08-27T11:49:51.462Z', + value: '0', + }, + { + time: '2017-08-27T11:50:51.462Z', + value: '0', + }, + { + time: '2017-08-27T11:51:51.462Z', + value: '0', + }, + { + time: '2017-08-27T11:52:51.462Z', + value: '0', + }, + { + time: '2017-08-27T11:53:51.462Z', + value: '0', + }, + { + time: '2017-08-27T11:54:51.462Z', + value: '0', + }, + { + time: '2017-08-27T11:55:51.462Z', + value: '0', + }, + { + time: '2017-08-27T11:56:51.462Z', + value: '0', + }, + { + time: '2017-08-27T11:57:51.462Z', + value: '0', + }, + { + time: '2017-08-27T11:58:51.462Z', + value: '0', + }, + { + time: '2017-08-27T11:59:51.462Z', + value: '0', + }, + { + time: '2017-08-27T12:00:51.462Z', + value: '0', + }, + { + time: '2017-08-27T12:01:51.462Z', + value: '0', + }, + { + time: '2017-08-27T12:02:51.462Z', + value: '0', + }, + { + time: '2017-08-27T12:03:51.462Z', + value: '0', + }, + { + time: '2017-08-27T12:04:51.462Z', + value: '0', + }, + { + time: '2017-08-27T12:05:51.462Z', + value: '0', + }, + { + time: '2017-08-27T12:06:51.462Z', + value: '0', + }, + { + time: '2017-08-27T12:07:51.462Z', + value: '0', + }, + { + time: '2017-08-27T12:08:51.462Z', + value: '0', + }, + { + time: '2017-08-27T12:09:51.462Z', + value: '0', + }, + { + time: '2017-08-27T12:10:51.462Z', + value: '0', + }, + { + time: '2017-08-27T12:11:51.462Z', + value: '0', + }, + { + time: '2017-08-27T12:12:51.462Z', + value: '0', + }, + { + time: '2017-08-27T12:13:51.462Z', + value: '0', + }, + { + time: '2017-08-27T12:14:51.462Z', + value: '0', + }, + { + time: '2017-08-27T12:15:51.462Z', + value: '0', + }, + { + time: '2017-08-27T12:16:51.462Z', + value: '0', + }, + { + time: '2017-08-27T12:17:51.462Z', + value: '0', + }, + { + time: '2017-08-27T12:18:51.462Z', + value: '0', + }, + { + time: '2017-08-27T12:19:51.462Z', + value: '0', + }, + { + time: '2017-08-27T12:20:51.462Z', + value: '0', + }, + { + time: '2017-08-27T12:21:51.462Z', + value: '0', + }, + { + time: '2017-08-27T12:22:51.462Z', + value: '0', + }, + { + time: '2017-08-27T12:23:51.462Z', + value: '0', + }, + { + time: '2017-08-27T12:24:51.462Z', + value: '0', + }, + { + time: '2017-08-27T12:25:51.462Z', + value: '0', + }, + { + time: '2017-08-27T12:26:51.462Z', + value: '0', + }, + { + time: '2017-08-27T12:27:51.462Z', + value: '0', + }, + { + time: '2017-08-27T12:28:51.462Z', + value: '0', + }, + { + time: '2017-08-27T12:29:51.462Z', + value: '0', + }, + { + time: '2017-08-27T12:30:51.462Z', + value: '0', + }, + { + time: '2017-08-27T12:31:51.462Z', + value: '0', + }, + { + time: '2017-08-27T12:32:51.462Z', + value: '0', + }, + { + time: '2017-08-27T12:33:51.462Z', + value: '0', + }, + { + time: '2017-08-27T12:34:51.462Z', + value: '0', + }, + { + time: '2017-08-27T12:35:51.462Z', + value: '0', + }, + { + time: '2017-08-27T12:36:51.462Z', + value: '0', + }, + { + time: '2017-08-27T12:37:51.462Z', + value: '0', + }, + { + time: '2017-08-27T12:38:51.462Z', + value: '0', + }, + { + time: '2017-08-27T12:39:51.462Z', + value: '0', + }, + { + time: '2017-08-27T12:40:51.462Z', + value: '0', + }, + { + time: '2017-08-27T12:41:51.462Z', + value: '0', + }, + { + time: '2017-08-27T12:42:51.462Z', + value: '0', + }, + { + time: '2017-08-27T12:43:51.462Z', + value: '0', + }, + { + time: '2017-08-27T12:44:51.462Z', + value: '0', + }, + { + time: '2017-08-27T12:45:51.462Z', + value: '0', + }, + { + time: '2017-08-27T12:46:51.462Z', + value: '0', + }, + { + time: '2017-08-27T12:47:51.462Z', + value: '0', + }, + { + time: '2017-08-27T12:48:51.462Z', + value: '0', + }, + { + time: '2017-08-27T12:49:51.462Z', + value: '0', + }, + { + time: '2017-08-27T12:50:51.462Z', + value: '0', + }, + { + time: '2017-08-27T12:51:51.462Z', + value: '0', + }, + { + time: '2017-08-27T12:52:51.462Z', + value: '0', + }, + { + time: '2017-08-27T12:53:51.462Z', + value: '0', + }, + { + time: '2017-08-27T12:54:51.462Z', + value: '0', + }, + { + time: '2017-08-27T12:55:51.462Z', + value: '0', + }, + { + time: '2017-08-27T12:56:51.462Z', + value: '0', + }, + { + time: '2017-08-27T12:57:51.462Z', + value: '0', + }, + { + time: '2017-08-27T12:58:51.462Z', + value: '0', + }, + { + time: '2017-08-27T12:59:51.462Z', + value: '0', + }, + { + time: '2017-08-27T13:00:51.462Z', + value: '0', + }, + { + time: '2017-08-27T13:01:51.462Z', + value: '0', + }, + { + time: '2017-08-27T13:02:51.462Z', + value: '0', + }, + { + time: '2017-08-27T13:03:51.462Z', + value: '0', + }, + { + time: '2017-08-27T13:04:51.462Z', + value: '0', + }, + { + time: '2017-08-27T13:05:51.462Z', + value: '0', + }, + { + time: '2017-08-27T13:06:51.462Z', + value: '0', + }, + { + time: '2017-08-27T13:07:51.462Z', + value: '0', + }, + { + time: '2017-08-27T13:08:51.462Z', + value: '0', + }, + { + time: '2017-08-27T13:09:51.462Z', + value: '0', + }, + { + time: '2017-08-27T13:10:51.462Z', + value: '0', + }, + { + time: '2017-08-27T13:11:51.462Z', + value: '0', + }, + { + time: '2017-08-27T13:12:51.462Z', + value: '0', + }, + { + time: '2017-08-27T13:13:51.462Z', + value: '0', + }, + { + time: '2017-08-27T13:14:51.462Z', + value: '0', + }, + { + time: '2017-08-27T13:15:51.462Z', + value: '0', + }, + { + time: '2017-08-27T13:16:51.462Z', + value: '0', + }, + { + time: '2017-08-27T13:17:51.462Z', + value: '0', + }, + { + time: '2017-08-27T13:18:51.462Z', + value: '0', + }, + { + time: '2017-08-27T13:19:51.462Z', + value: '0', + }, + { + time: '2017-08-27T13:20:51.462Z', + value: '0', + }, + { + time: '2017-08-27T13:21:51.462Z', + value: '0', + }, + { + time: '2017-08-27T13:22:51.462Z', + value: '0', + }, + { + time: '2017-08-27T13:23:51.462Z', + value: '0', + }, + { + time: '2017-08-27T13:24:51.462Z', + value: '0', + }, + { + time: '2017-08-27T13:25:51.462Z', + value: '0', + }, + { + time: '2017-08-27T13:26:51.462Z', + value: '0', + }, + { + time: '2017-08-27T13:27:51.462Z', + value: '0', + }, + { + time: '2017-08-27T13:28:51.462Z', + value: '0', + }, + { + time: '2017-08-27T13:29:51.462Z', + value: '0', + }, + { + time: '2017-08-27T13:30:51.462Z', + value: '0', + }, + { + time: '2017-08-27T13:31:51.462Z', + value: '0', + }, + { + time: '2017-08-27T13:32:51.462Z', + value: '0', + }, + { + time: '2017-08-27T13:33:51.462Z', + value: '0', + }, + { + time: '2017-08-27T13:34:51.462Z', + value: '0', + }, + { + time: '2017-08-27T13:35:51.462Z', + value: '0', + }, + { + time: '2017-08-27T13:36:51.462Z', + value: '0', + }, + { + time: '2017-08-27T13:37:51.462Z', + value: '0', + }, + { + time: '2017-08-27T13:38:51.462Z', + value: '0', + }, + { + time: '2017-08-27T13:39:51.462Z', + value: '0', + }, + { + time: '2017-08-27T13:40:51.462Z', + value: '0', + }, + { + time: '2017-08-27T13:41:51.462Z', + value: '0', + }, + { + time: '2017-08-27T13:42:51.462Z', + value: '0', + }, + { + time: '2017-08-27T13:43:51.462Z', + value: '0', + }, + { + time: '2017-08-27T13:44:51.462Z', + value: '0', + }, + { + time: '2017-08-27T13:45:51.462Z', + value: '0', + }, + { + time: '2017-08-27T13:46:51.462Z', + value: '0', + }, + { + time: '2017-08-27T13:47:51.462Z', + value: '0', + }, + { + time: '2017-08-27T13:48:51.462Z', + value: '0', + }, + { + time: '2017-08-27T13:49:51.462Z', + value: '0', + }, + { + time: '2017-08-27T13:50:51.462Z', + value: '0', + }, + { + time: '2017-08-27T13:51:51.462Z', + value: '0', + }, + { + time: '2017-08-27T13:52:51.462Z', + value: '0', + }, + { + time: '2017-08-27T13:53:51.462Z', + value: '0', + }, + { + time: '2017-08-27T13:54:51.462Z', + value: '0', + }, + { + time: '2017-08-27T13:55:51.462Z', + value: '0', + }, + { + time: '2017-08-27T13:56:51.462Z', + value: '0', + }, + { + time: '2017-08-27T13:57:51.462Z', + value: '0', + }, + { + time: '2017-08-27T13:58:51.462Z', + value: '0', + }, + { + time: '2017-08-27T13:59:51.462Z', + value: '0', + }, + { + time: '2017-08-27T14:00:51.462Z', + value: '0', + }, + { + time: '2017-08-27T14:01:51.462Z', + value: '0', + }, + { + time: '2017-08-27T14:02:51.462Z', + value: '0', + }, + { + time: '2017-08-27T14:03:51.462Z', + value: '0', + }, + { + time: '2017-08-27T14:04:51.462Z', + value: '0', + }, + { + time: '2017-08-27T14:05:51.462Z', + value: '0', + }, + { + time: '2017-08-27T14:06:51.462Z', + value: '0', + }, + { + time: '2017-08-27T14:07:51.462Z', + value: '0', + }, + { + time: '2017-08-27T14:08:51.462Z', + value: '0', + }, + { + time: '2017-08-27T14:09:51.462Z', + value: '0', + }, + { + time: '2017-08-27T14:10:51.462Z', + value: '0', + }, + { + time: '2017-08-27T14:11:51.462Z', + value: '0', + }, + { + time: '2017-08-27T14:12:51.462Z', + value: '0', + }, + { + time: '2017-08-27T14:13:51.462Z', + value: '0', + }, + { + time: '2017-08-27T14:14:51.462Z', + value: '0', + }, + { + time: '2017-08-27T14:15:51.462Z', + value: '0', + }, + { + time: '2017-08-27T14:16:51.462Z', + value: '0', + }, + { + time: '2017-08-27T14:17:51.462Z', + value: '0', + }, + { + time: '2017-08-27T14:18:51.462Z', + value: '0', + }, + { + time: '2017-08-27T14:19:51.462Z', + value: '0', + }, + { + time: '2017-08-27T14:20:51.462Z', + value: '0', + }, + { + time: '2017-08-27T14:21:51.462Z', + value: '0', + }, + { + time: '2017-08-27T14:22:51.462Z', + value: '0', + }, + { + time: '2017-08-27T14:23:51.462Z', + value: '0', + }, + { + time: '2017-08-27T14:24:51.462Z', + value: '0', + }, + { + time: '2017-08-27T14:25:51.462Z', + value: '0', + }, + { + time: '2017-08-27T14:26:51.462Z', + value: '0', + }, + { + time: '2017-08-27T14:27:51.462Z', + value: '0', + }, + { + time: '2017-08-27T14:28:51.462Z', + value: '0', + }, + { + time: '2017-08-27T14:29:51.462Z', + value: '0', + }, + { + time: '2017-08-27T14:30:51.462Z', + value: '0', + }, + { + time: '2017-08-27T14:31:51.462Z', + value: '0', + }, + { + time: '2017-08-27T14:32:51.462Z', + value: '0', + }, + { + time: '2017-08-27T14:33:51.462Z', + value: '0', + }, + { + time: '2017-08-27T14:34:51.462Z', + value: '0', + }, + { + time: '2017-08-27T14:35:51.462Z', + value: '0', + }, + { + time: '2017-08-27T14:36:51.462Z', + value: '0', + }, + { + time: '2017-08-27T14:37:51.462Z', + value: '0', + }, + { + time: '2017-08-27T14:38:51.462Z', + value: '0', + }, + { + time: '2017-08-27T14:39:51.462Z', + value: '0', + }, + { + time: '2017-08-27T14:40:51.462Z', + value: '0', + }, + { + time: '2017-08-27T14:41:51.462Z', + value: '0', + }, + { + time: '2017-08-27T14:42:51.462Z', + value: '0', + }, + { + time: '2017-08-27T14:43:51.462Z', + value: '0', + }, + { + time: '2017-08-27T14:44:51.462Z', + value: '0', + }, + { + time: '2017-08-27T14:45:51.462Z', + value: '0', + }, + { + time: '2017-08-27T14:46:51.462Z', + value: '0', + }, + { + time: '2017-08-27T14:47:51.462Z', + value: '0', + }, + { + time: '2017-08-27T14:48:51.462Z', + value: '0', + }, + { + time: '2017-08-27T14:49:51.462Z', + value: '0', + }, + { + time: '2017-08-27T14:50:51.462Z', + value: '0', + }, + { + time: '2017-08-27T14:51:51.462Z', + value: '0', + }, + { + time: '2017-08-27T14:52:51.462Z', + value: '0', + }, + { + time: '2017-08-27T14:53:51.462Z', + value: '0', + }, + { + time: '2017-08-27T14:54:51.462Z', + value: '0', + }, + { + time: '2017-08-27T14:55:51.462Z', + value: '0', + }, + { + time: '2017-08-27T14:56:51.462Z', + value: '0', + }, + { + time: '2017-08-27T14:57:51.462Z', + value: '0', + }, + { + time: '2017-08-27T14:58:51.462Z', + value: '0', + }, + { + time: '2017-08-27T14:59:51.462Z', + value: '0', + }, + { + time: '2017-08-27T15:00:51.462Z', + value: '0', + }, + { + time: '2017-08-27T15:01:51.462Z', + value: '0', + }, + { + time: '2017-08-27T15:02:51.462Z', + value: '0', + }, + { + time: '2017-08-27T15:03:51.462Z', + value: '0', + }, + { + time: '2017-08-27T15:04:51.462Z', + value: '0', + }, + { + time: '2017-08-27T15:05:51.462Z', + value: '0', + }, + { + time: '2017-08-27T15:06:51.462Z', + value: '0', + }, + { + time: '2017-08-27T15:07:51.462Z', + value: '0', + }, + { + time: '2017-08-27T15:08:51.462Z', + value: '0', + }, + { + time: '2017-08-27T15:09:51.462Z', + value: '0', + }, + { + time: '2017-08-27T15:10:51.462Z', + value: '0', + }, + { + time: '2017-08-27T15:11:51.462Z', + value: '0', + }, + { + time: '2017-08-27T15:12:51.462Z', + value: '0', + }, + { + time: '2017-08-27T15:13:51.462Z', + value: '0', + }, + { + time: '2017-08-27T15:14:51.462Z', + value: '0', + }, + { + time: '2017-08-27T15:15:51.462Z', + value: '0', + }, + { + time: '2017-08-27T15:16:51.462Z', + value: '0', + }, + { + time: '2017-08-27T15:17:51.462Z', + value: '0', + }, + { + time: '2017-08-27T15:18:51.462Z', + value: '0', + }, + { + time: '2017-08-27T15:19:51.462Z', + value: '0', + }, + { + time: '2017-08-27T15:20:51.462Z', + value: '0', + }, + { + time: '2017-08-27T15:21:51.462Z', + value: '0', + }, + { + time: '2017-08-27T15:22:51.462Z', + value: '0', + }, + { + time: '2017-08-27T15:23:51.462Z', + value: '0', + }, + { + time: '2017-08-27T15:24:51.462Z', + value: '0', + }, + { + time: '2017-08-27T15:25:51.462Z', + value: '0', + }, + { + time: '2017-08-27T15:26:51.462Z', + value: '0', + }, + { + time: '2017-08-27T15:27:51.462Z', + value: '0', + }, + { + time: '2017-08-27T15:28:51.462Z', + value: '0', + }, + { + time: '2017-08-27T15:29:51.462Z', + value: '0', + }, + { + time: '2017-08-27T15:30:51.462Z', + value: '0', + }, + { + time: '2017-08-27T15:31:51.462Z', + value: '0', + }, + { + time: '2017-08-27T15:32:51.462Z', + value: '0', + }, + { + time: '2017-08-27T15:33:51.462Z', + value: '0', + }, + { + time: '2017-08-27T15:34:51.462Z', + value: '0', + }, + { + time: '2017-08-27T15:35:51.462Z', + value: '0', + }, + { + time: '2017-08-27T15:36:51.462Z', + value: '0', + }, + { + time: '2017-08-27T15:37:51.462Z', + value: '0', + }, + { + time: '2017-08-27T15:38:51.462Z', + value: '0', + }, + { + time: '2017-08-27T15:39:51.462Z', + value: '0', + }, + { + time: '2017-08-27T15:40:51.462Z', + value: '0', + }, + { + time: '2017-08-27T15:41:51.462Z', + value: '0', + }, + { + time: '2017-08-27T15:42:51.462Z', + value: '0', + }, + { + time: '2017-08-27T15:43:51.462Z', + value: '0', + }, + { + time: '2017-08-27T15:44:51.462Z', + value: '0', + }, + { + time: '2017-08-27T15:45:51.462Z', + value: '0', + }, + { + time: '2017-08-27T15:46:51.462Z', + value: '0', + }, + { + time: '2017-08-27T15:47:51.462Z', + value: '0', + }, + { + time: '2017-08-27T15:48:51.462Z', + value: '0', + }, + { + time: '2017-08-27T15:49:51.462Z', + value: '0', + }, + { + time: '2017-08-27T15:50:51.462Z', + value: '0', + }, + { + time: '2017-08-27T15:51:51.462Z', + value: '0', + }, + { + time: '2017-08-27T15:52:51.462Z', + value: '0', + }, + { + time: '2017-08-27T15:53:51.462Z', + value: '0', + }, + { + time: '2017-08-27T15:54:51.462Z', + value: '0', + }, + { + time: '2017-08-27T15:55:51.462Z', + value: '0', + }, + { + time: '2017-08-27T15:56:51.462Z', + value: '0', + }, + { + time: '2017-08-27T15:57:51.462Z', + value: '0', + }, + { + time: '2017-08-27T15:58:51.462Z', + value: '0', + }, + { + time: '2017-08-27T15:59:51.462Z', + value: '0', + }, + { + time: '2017-08-27T16:00:51.462Z', + value: '0', + }, + { + time: '2017-08-27T16:01:51.462Z', + value: '0', + }, + { + time: '2017-08-27T16:02:51.462Z', + value: '0', + }, + { + time: '2017-08-27T16:03:51.462Z', + value: '0', + }, + { + time: '2017-08-27T16:04:51.462Z', + value: '0', + }, + { + time: '2017-08-27T16:05:51.462Z', + value: '0', + }, + { + time: '2017-08-27T16:06:51.462Z', + value: '0', + }, + { + time: '2017-08-27T16:07:51.462Z', + value: '0', + }, + { + time: '2017-08-27T16:08:51.462Z', + value: '0', + }, + { + time: '2017-08-27T16:09:51.462Z', + value: '0', + }, + { + time: '2017-08-27T16:10:51.462Z', + value: '0', + }, + { + time: '2017-08-27T16:11:51.462Z', + value: '0', + }, + { + time: '2017-08-27T16:12:51.462Z', + value: '0', + }, + { + time: '2017-08-27T16:13:51.462Z', + value: '0', + }, + { + time: '2017-08-27T16:14:51.462Z', + value: '0', + }, + { + time: '2017-08-27T16:15:51.462Z', + value: '0', + }, + { + time: '2017-08-27T16:16:51.462Z', + value: '0', + }, + { + time: '2017-08-27T16:17:51.462Z', + value: '0', + }, + { + time: '2017-08-27T16:18:51.462Z', + value: '0', + }, + { + time: '2017-08-27T16:19:51.462Z', + value: '0', + }, + { + time: '2017-08-27T16:20:51.462Z', + value: '0', + }, + { + time: '2017-08-27T16:21:51.462Z', + value: '0', + }, + { + time: '2017-08-27T16:22:51.462Z', + value: '0', + }, + { + time: '2017-08-27T16:23:51.462Z', + value: '0', + }, + { + time: '2017-08-27T16:24:51.462Z', + value: '0', + }, + { + time: '2017-08-27T16:25:51.462Z', + value: '0', + }, + { + time: '2017-08-27T16:26:51.462Z', + value: '0', + }, + { + time: '2017-08-27T16:27:51.462Z', + value: '0', + }, + { + time: '2017-08-27T16:28:51.462Z', + value: '0', + }, + { + time: '2017-08-27T16:29:51.462Z', + value: '0', + }, + { + time: '2017-08-27T16:30:51.462Z', + value: '0', + }, + { + time: '2017-08-27T16:31:51.462Z', + value: '0', + }, + { + time: '2017-08-27T16:32:51.462Z', + value: '0', + }, + { + time: '2017-08-27T16:33:51.462Z', + value: '0', + }, + { + time: '2017-08-27T16:34:51.462Z', + value: '0', + }, + { + time: '2017-08-27T16:35:51.462Z', + value: '0', + }, + { + time: '2017-08-27T16:36:51.462Z', + value: '0', + }, + { + time: '2017-08-27T16:37:51.462Z', + value: '0', + }, + { + time: '2017-08-27T16:38:51.462Z', + value: '0', + }, + { + time: '2017-08-27T16:39:51.462Z', + value: '0', + }, + { + time: '2017-08-27T16:40:51.462Z', + value: '0', + }, + { + time: '2017-08-27T16:41:51.462Z', + value: '0', + }, + { + time: '2017-08-27T16:42:51.462Z', + value: '0', + }, + { + time: '2017-08-27T16:43:51.462Z', + value: '0', + }, + { + time: '2017-08-27T16:44:51.462Z', + value: '0', + }, + { + time: '2017-08-27T16:45:51.462Z', + value: '0', + }, + { + time: '2017-08-27T16:46:51.462Z', + value: '0', + }, + { + time: '2017-08-27T16:47:51.462Z', + value: '0', + }, + { + time: '2017-08-27T16:48:51.462Z', + value: '0', + }, + { + time: '2017-08-27T16:49:51.462Z', + value: '0', + }, + { + time: '2017-08-27T16:50:51.462Z', + value: '0', + }, + { + time: '2017-08-27T16:51:51.462Z', + value: '0', + }, + { + time: '2017-08-27T16:52:51.462Z', + value: '0', + }, + { + time: '2017-08-27T16:53:51.462Z', + value: '0', + }, + { + time: '2017-08-27T16:54:51.462Z', + value: '0', + }, + { + time: '2017-08-27T16:55:51.462Z', + value: '0', + }, + { + time: '2017-08-27T16:56:51.462Z', + value: '0', + }, + { + time: '2017-08-27T16:57:51.462Z', + value: '0', + }, + { + time: '2017-08-27T16:58:51.462Z', + value: '0', + }, + { + time: '2017-08-27T16:59:51.462Z', + value: '0', + }, + { + time: '2017-08-27T17:00:51.462Z', + value: '0', + }, + { + time: '2017-08-27T17:01:51.462Z', + value: '0', + }, + { + time: '2017-08-27T17:02:51.462Z', + value: '0', + }, + { + time: '2017-08-27T17:03:51.462Z', + value: '0', + }, + { + time: '2017-08-27T17:04:51.462Z', + value: '0', + }, + { + time: '2017-08-27T17:05:51.462Z', + value: '0', + }, + { + time: '2017-08-27T17:06:51.462Z', + value: '0', + }, + { + time: '2017-08-27T17:07:51.462Z', + value: '0', + }, + { + time: '2017-08-27T17:08:51.462Z', + value: '0', + }, + { + time: '2017-08-27T17:09:51.462Z', + value: '0', + }, + { + time: '2017-08-27T17:10:51.462Z', + value: '0', + }, + { + time: '2017-08-27T17:11:51.462Z', + value: '0', + }, + { + time: '2017-08-27T17:12:51.462Z', + value: '0', + }, + { + time: '2017-08-27T17:13:51.462Z', + value: '0', + }, + { + time: '2017-08-27T17:14:51.462Z', + value: '0', + }, + { + time: '2017-08-27T17:15:51.462Z', + value: '0', + }, + { + time: '2017-08-27T17:16:51.462Z', + value: '0', + }, + { + time: '2017-08-27T17:17:51.462Z', + value: '0', + }, + { + time: '2017-08-27T17:18:51.462Z', + value: '0', + }, + { + time: '2017-08-27T17:19:51.462Z', + value: '0', + }, + { + time: '2017-08-27T17:20:51.462Z', + value: '0', + }, + { + time: '2017-08-27T17:21:51.462Z', + value: '0', + }, + { + time: '2017-08-27T17:22:51.462Z', + value: '0', + }, + { + time: '2017-08-27T17:23:51.462Z', + value: '0', + }, + { + time: '2017-08-27T17:24:51.462Z', + value: '0', + }, + { + time: '2017-08-27T17:25:51.462Z', + value: '0', + }, + { + time: '2017-08-27T17:26:51.462Z', + value: '0', + }, + { + time: '2017-08-27T17:27:51.462Z', + value: '0', + }, + { + time: '2017-08-27T17:28:51.462Z', + value: '0', + }, + { + time: '2017-08-27T17:29:51.462Z', + value: '0', + }, + { + time: '2017-08-27T17:30:51.462Z', + value: '0', + }, + { + time: '2017-08-27T17:31:51.462Z', + value: '0', + }, + { + time: '2017-08-27T17:32:51.462Z', + value: '0', + }, + { + time: '2017-08-27T17:33:51.462Z', + value: '0', + }, + { + time: '2017-08-27T17:34:51.462Z', + value: '0', + }, + { + time: '2017-08-27T17:35:51.462Z', + value: '0', + }, + { + time: '2017-08-27T17:36:51.462Z', + value: '0', + }, + { + time: '2017-08-27T17:37:51.462Z', + value: '0', + }, + { + time: '2017-08-27T17:38:51.462Z', + value: '0', + }, + { + time: '2017-08-27T17:39:51.462Z', + value: '0', + }, + { + time: '2017-08-27T17:40:51.462Z', + value: '0', + }, + { + time: '2017-08-27T17:41:51.462Z', + value: '0', + }, + { + time: '2017-08-27T17:42:51.462Z', + value: '0', + }, + { + time: '2017-08-27T17:43:51.462Z', + value: '0', + }, + { + time: '2017-08-27T17:44:51.462Z', + value: '0', + }, + { + time: '2017-08-27T17:45:51.462Z', + value: '0', + }, + { + time: '2017-08-27T17:46:51.462Z', + value: '0', + }, + { + time: '2017-08-27T17:47:51.462Z', + value: '0', + }, + { + time: '2017-08-27T17:48:51.462Z', + value: '0', + }, + { + time: '2017-08-27T17:49:51.462Z', + value: '0', + }, + { + time: '2017-08-27T17:50:51.462Z', + value: '0', + }, + { + time: '2017-08-27T17:51:51.462Z', + value: '0', + }, + { + time: '2017-08-27T17:52:51.462Z', + value: '0', + }, + { + time: '2017-08-27T17:53:51.462Z', + value: '0', + }, + { + time: '2017-08-27T17:54:51.462Z', + value: '0', + }, + { + time: '2017-08-27T17:55:51.462Z', + value: '0', + }, + { + time: '2017-08-27T17:56:51.462Z', + value: '0', + }, + { + time: '2017-08-27T17:57:51.462Z', + value: '0', + }, + { + time: '2017-08-27T17:58:51.462Z', + value: '0', + }, + { + time: '2017-08-27T17:59:51.462Z', + value: '0', + }, + { + time: '2017-08-27T18:00:51.462Z', + value: '0', + }, + { + time: '2017-08-27T18:01:51.462Z', + value: '0', + }, + { + time: '2017-08-27T18:02:51.462Z', + value: '0', + }, + { + time: '2017-08-27T18:03:51.462Z', + value: '0', + }, + { + time: '2017-08-27T18:04:51.462Z', + value: '0', + }, + { + time: '2017-08-27T18:05:51.462Z', + value: '0', + }, + { + time: '2017-08-27T18:06:51.462Z', + value: '0', + }, + { + time: '2017-08-27T18:07:51.462Z', + value: '0', + }, + { + time: '2017-08-27T18:08:51.462Z', + value: '0', + }, + { + time: '2017-08-27T18:09:51.462Z', + value: '0', + }, + { + time: '2017-08-27T18:10:51.462Z', + value: '0', + }, + { + time: '2017-08-27T18:11:51.462Z', + value: '0', + }, + { + time: '2017-08-27T18:12:51.462Z', + value: '0', + }, + { + time: '2017-08-27T18:13:51.462Z', + value: '0', + }, + { + time: '2017-08-27T18:14:51.462Z', + value: '0', + }, + { + time: '2017-08-27T18:15:51.462Z', + value: '0', + }, + { + time: '2017-08-27T18:16:51.462Z', + value: '0', + }, + { + time: '2017-08-27T18:17:51.462Z', + value: '0', + }, + { + time: '2017-08-27T18:18:51.462Z', + value: '0', + }, + { + time: '2017-08-27T18:19:51.462Z', + value: '0', + }, + { + time: '2017-08-27T18:20:51.462Z', + value: '0', + }, + { + time: '2017-08-27T18:21:51.462Z', + value: '0', + }, + { + time: '2017-08-27T18:22:51.462Z', + value: '0', + }, + { + time: '2017-08-27T18:23:51.462Z', + value: '0', + }, + { + time: '2017-08-27T18:24:51.462Z', + value: '0', + }, + { + time: '2017-08-27T18:25:51.462Z', + value: '0', + }, + { + time: '2017-08-27T18:26:51.462Z', + value: '0', + }, + { + time: '2017-08-27T18:27:51.462Z', + value: '0', + }, + { + time: '2017-08-27T18:28:51.462Z', + value: '0', + }, + { + time: '2017-08-27T18:29:51.462Z', + value: '0', + }, + { + time: '2017-08-27T18:30:51.462Z', + value: '0', + }, + { + time: '2017-08-27T18:31:51.462Z', + value: '0', + }, + { + time: '2017-08-27T18:32:51.462Z', + value: '0', + }, + { + time: '2017-08-27T18:33:51.462Z', + value: '0', + }, + { + time: '2017-08-27T18:34:51.462Z', + value: '0', + }, + { + time: '2017-08-27T18:35:51.462Z', + value: '0', + }, + { + time: '2017-08-27T18:36:51.462Z', + value: '0', + }, + { + time: '2017-08-27T18:37:51.462Z', + value: '0', + }, + { + time: '2017-08-27T18:38:51.462Z', + value: '0', + }, + { + time: '2017-08-27T18:39:51.462Z', + value: '0', + }, + { + time: '2017-08-27T18:40:51.462Z', + value: '0', + }, + { + time: '2017-08-27T18:41:51.462Z', + value: '0', + }, + { + time: '2017-08-27T18:42:51.462Z', + value: '0', + }, + { + time: '2017-08-27T18:43:51.462Z', + value: '0', + }, + { + time: '2017-08-27T18:44:51.462Z', + value: '0', + }, + { + time: '2017-08-27T18:45:51.462Z', + value: '0', + }, + { + time: '2017-08-27T18:46:51.462Z', + value: '0', + }, + { + time: '2017-08-27T18:47:51.462Z', + value: '0', + }, + { + time: '2017-08-27T18:48:51.462Z', + value: '0', + }, + { + time: '2017-08-27T18:49:51.462Z', + value: '0', + }, + { + time: '2017-08-27T18:50:51.462Z', + value: '0', + }, + { + time: '2017-08-27T18:51:51.462Z', + value: '0', + }, + { + time: '2017-08-27T18:52:51.462Z', + value: '0', + }, + { + time: '2017-08-27T18:53:51.462Z', + value: '0', + }, + { + time: '2017-08-27T18:54:51.462Z', + value: '0', + }, + { + time: '2017-08-27T18:55:51.462Z', + value: '0', + }, + { + time: '2017-08-27T18:56:51.462Z', + value: '0', + }, + { + time: '2017-08-27T18:57:51.462Z', + value: '0', + }, + { + time: '2017-08-27T18:58:51.462Z', + value: '0', + }, + { + time: '2017-08-27T18:59:51.462Z', + value: '0', + }, + { + time: '2017-08-27T19:00:51.462Z', + value: '0', + }, + { + time: '2017-08-27T19:01:51.462Z', + value: '0', + }, + ], + }, + { + metric: { + status_code: '2xx', + }, + values: [ + { + time: '2017-08-27T11:01:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T11:02:51.462Z', + value: '1.2571428571428571', + }, + { + time: '2017-08-27T11:03:51.462Z', + value: '1.3333333333333333', + }, + { + time: '2017-08-27T11:04:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T11:05:51.462Z', + value: '1.314285714285714', + }, + { + time: '2017-08-27T11:06:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T11:07:51.462Z', + value: '1.314285714285714', + }, + { + time: '2017-08-27T11:08:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T11:09:51.462Z', + value: '1.3333333333333333', + }, + { + time: '2017-08-27T11:10:51.462Z', + value: '1.3333333333333333', + }, + { + time: '2017-08-27T11:11:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T11:12:51.462Z', + value: '1.3333333333333333', + }, + { + time: '2017-08-27T11:13:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T11:14:51.462Z', + value: '1.314285714285714', + }, + { + time: '2017-08-27T11:15:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T11:16:51.462Z', + value: '1.314285714285714', + }, + { + time: '2017-08-27T11:17:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T11:18:51.462Z', + value: '1.314285714285714', + }, + { + time: '2017-08-27T11:19:51.462Z', + value: '1.314285714285714', + }, + { + time: '2017-08-27T11:20:51.462Z', + value: '1.314285714285714', + }, + { + time: '2017-08-27T11:21:51.462Z', + value: '1.314285714285714', + }, + { + time: '2017-08-27T11:22:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T11:23:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T11:24:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T11:25:51.462Z', + value: '1.314285714285714', + }, + { + time: '2017-08-27T11:26:51.462Z', + value: '1.3333333333333333', + }, + { + time: '2017-08-27T11:27:51.462Z', + value: '1.3333333333333333', + }, + { + time: '2017-08-27T11:28:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T11:29:51.462Z', + value: '1.3333333333333333', + }, + { + time: '2017-08-27T11:30:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T11:31:51.462Z', + value: '1.3333333333333333', + }, + { + time: '2017-08-27T11:32:51.462Z', + value: '1.3333333333333333', + }, + { + time: '2017-08-27T11:33:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T11:34:51.462Z', + value: '1.333320635041571', + }, + { + time: '2017-08-27T11:35:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T11:36:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T11:37:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T11:38:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T11:39:51.462Z', + value: '1.314285714285714', + }, + { + time: '2017-08-27T11:40:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T11:41:51.462Z', + value: '1.3333587306424883', + }, + { + time: '2017-08-27T11:42:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T11:43:51.462Z', + value: '1.3333333333333333', + }, + { + time: '2017-08-27T11:44:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T11:45:51.462Z', + value: '1.314285714285714', + }, + { + time: '2017-08-27T11:46:51.462Z', + value: '1.314285714285714', + }, + { + time: '2017-08-27T11:47:51.462Z', + value: '1.314285714285714', + }, + { + time: '2017-08-27T11:48:51.462Z', + value: '1.3333333333333333', + }, + { + time: '2017-08-27T11:49:51.462Z', + value: '1.314285714285714', + }, + { + time: '2017-08-27T11:50:51.462Z', + value: '1.3333333333333333', + }, + { + time: '2017-08-27T11:51:51.462Z', + value: '1.3333333333333333', + }, + { + time: '2017-08-27T11:52:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T11:53:51.462Z', + value: '1.3333333333333333', + }, + { + time: '2017-08-27T11:54:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T11:55:51.462Z', + value: '1.3333333333333333', + }, + { + time: '2017-08-27T11:56:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T11:57:51.462Z', + value: '1.3333333333333333', + }, + { + time: '2017-08-27T11:58:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T11:59:51.462Z', + value: '1.314285714285714', + }, + { + time: '2017-08-27T12:00:51.462Z', + value: '1.314285714285714', + }, + { + time: '2017-08-27T12:01:51.462Z', + value: '1.3333460318669703', + }, + { + time: '2017-08-27T12:02:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T12:03:51.462Z', + value: '1.3333333333333333', + }, + { + time: '2017-08-27T12:04:51.462Z', + value: '1.314285714285714', + }, + { + time: '2017-08-27T12:05:51.462Z', + value: '1.31427319739812', + }, + { + time: '2017-08-27T12:06:51.462Z', + value: '1.314285714285714', + }, + { + time: '2017-08-27T12:07:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T12:08:51.462Z', + value: '1.3333333333333333', + }, + { + time: '2017-08-27T12:09:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T12:10:51.462Z', + value: '1.314285714285714', + }, + { + time: '2017-08-27T12:11:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T12:12:51.462Z', + value: '1.3333333333333333', + }, + { + time: '2017-08-27T12:13:51.462Z', + value: '1.314285714285714', + }, + { + time: '2017-08-27T12:14:51.462Z', + value: '1.3333333333333333', + }, + { + time: '2017-08-27T12:15:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T12:16:51.462Z', + value: '1.314285714285714', + }, + { + time: '2017-08-27T12:17:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T12:18:51.462Z', + value: '1.314285714285714', + }, + { + time: '2017-08-27T12:19:51.462Z', + value: '1.314285714285714', + }, + { + time: '2017-08-27T12:20:51.462Z', + value: '1.314285714285714', + }, + { + time: '2017-08-27T12:21:51.462Z', + value: '1.3333333333333333', + }, + { + time: '2017-08-27T12:22:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T12:23:51.462Z', + value: '1.314285714285714', + }, + { + time: '2017-08-27T12:24:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T12:25:51.462Z', + value: '1.3333333333333333', + }, + { + time: '2017-08-27T12:26:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T12:27:51.462Z', + value: '1.3333333333333333', + }, + { + time: '2017-08-27T12:28:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T12:29:51.462Z', + value: '1.3333333333333333', + }, + { + time: '2017-08-27T12:30:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T12:31:51.462Z', + value: '1.3333333333333333', + }, + { + time: '2017-08-27T12:32:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T12:33:51.462Z', + value: '1.314285714285714', + }, + { + time: '2017-08-27T12:34:51.462Z', + value: '1.314285714285714', + }, + { + time: '2017-08-27T12:35:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T12:36:51.462Z', + value: '1.3333333333333333', + }, + { + time: '2017-08-27T12:37:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T12:38:51.462Z', + value: '1.3333333333333333', + }, + { + time: '2017-08-27T12:39:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T12:40:51.462Z', + value: '1.3333333333333333', + }, + { + time: '2017-08-27T12:41:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T12:42:51.462Z', + value: '1.314285714285714', + }, + { + time: '2017-08-27T12:43:51.462Z', + value: '1.314285714285714', + }, + { + time: '2017-08-27T12:44:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T12:45:51.462Z', + value: '1.3333333333333333', + }, + { + time: '2017-08-27T12:46:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T12:47:51.462Z', + value: '1.314285714285714', + }, + { + time: '2017-08-27T12:48:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T12:49:51.462Z', + value: '1.3333333333333333', + }, + { + time: '2017-08-27T12:50:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T12:51:51.462Z', + value: '1.3333333333333333', + }, + { + time: '2017-08-27T12:52:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T12:53:51.462Z', + value: '1.314285714285714', + }, + { + time: '2017-08-27T12:54:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T12:55:51.462Z', + value: '1.3333333333333333', + }, + { + time: '2017-08-27T12:56:51.462Z', + value: '1.314285714285714', + }, + { + time: '2017-08-27T12:57:51.462Z', + value: '1.314285714285714', + }, + { + time: '2017-08-27T12:58:51.462Z', + value: '1.3333333333333333', + }, + { + time: '2017-08-27T12:59:51.462Z', + value: '1.314285714285714', + }, + { + time: '2017-08-27T13:00:51.462Z', + value: '1.314285714285714', + }, + { + time: '2017-08-27T13:01:51.462Z', + value: '1.295225759754669', + }, + { + time: '2017-08-27T13:02:51.462Z', + value: '1.3333333333333333', + }, + { + time: '2017-08-27T13:03:51.462Z', + value: '1.2952627669098458', + }, + { + time: '2017-08-27T13:04:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T13:05:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T13:06:51.462Z', + value: '1.3333333333333333', + }, + { + time: '2017-08-27T13:07:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T13:08:51.462Z', + value: '1.3333333333333333', + }, + { + time: '2017-08-27T13:09:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T13:10:51.462Z', + value: '1.3333333333333333', + }, + { + time: '2017-08-27T13:11:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T13:12:51.462Z', + value: '1.3333333333333333', + }, + { + time: '2017-08-27T13:13:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T13:14:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T13:15:51.462Z', + value: '1.2571428571428571', + }, + { + time: '2017-08-27T13:16:51.462Z', + value: '1.3333587306424883', + }, + { + time: '2017-08-27T13:17:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T13:18:51.462Z', + value: '1.3333333333333333', + }, + { + time: '2017-08-27T13:19:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T13:20:51.462Z', + value: '1.314285714285714', + }, + { + time: '2017-08-27T13:21:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T13:22:51.462Z', + value: '1.3333333333333333', + }, + { + time: '2017-08-27T13:23:51.462Z', + value: '1.276190476190476', + }, + { + time: '2017-08-27T13:24:51.462Z', + value: '1.2571428571428571', + }, + { + time: '2017-08-27T13:25:51.462Z', + value: '1.314285714285714', + }, + { + time: '2017-08-27T13:26:51.462Z', + value: '1.3333333333333333', + }, + { + time: '2017-08-27T13:27:51.462Z', + value: '1.314285714285714', + }, + { + time: '2017-08-27T13:28:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T13:29:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T13:30:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T13:31:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T13:32:51.462Z', + value: '1.314285714285714', + }, + { + time: '2017-08-27T13:33:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T13:34:51.462Z', + value: '1.3333333333333333', + }, + { + time: '2017-08-27T13:35:51.462Z', + value: '1.314285714285714', + }, + { + time: '2017-08-27T13:36:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T13:37:51.462Z', + value: '1.3333333333333333', + }, + { + time: '2017-08-27T13:38:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T13:39:51.462Z', + value: '1.3333333333333333', + }, + { + time: '2017-08-27T13:40:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T13:41:51.462Z', + value: '1.3333333333333333', + }, + { + time: '2017-08-27T13:42:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T13:43:51.462Z', + value: '1.3333333333333333', + }, + { + time: '2017-08-27T13:44:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T13:45:51.462Z', + value: '1.3333333333333333', + }, + { + time: '2017-08-27T13:46:51.462Z', + value: '1.2571428571428571', + }, + { + time: '2017-08-27T13:47:51.462Z', + value: '1.276190476190476', + }, + { + time: '2017-08-27T13:48:51.462Z', + value: '1.314285714285714', + }, + { + time: '2017-08-27T13:49:51.462Z', + value: '1.295225759754669', + }, + { + time: '2017-08-27T13:50:51.462Z', + value: '1.3333333333333333', + }, + { + time: '2017-08-27T13:51:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T13:52:51.462Z', + value: '1.3333333333333333', + }, + { + time: '2017-08-27T13:53:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T13:54:51.462Z', + value: '1.3333333333333333', + }, + { + time: '2017-08-27T13:55:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T13:56:51.462Z', + value: '1.3333333333333333', + }, + { + time: '2017-08-27T13:57:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T13:58:51.462Z', + value: '1.314285714285714', + }, + { + time: '2017-08-27T13:59:51.462Z', + value: '1.295225759754669', + }, + { + time: '2017-08-27T14:00:51.462Z', + value: '1.314285714285714', + }, + { + time: '2017-08-27T14:01:51.462Z', + value: '1.314285714285714', + }, + { + time: '2017-08-27T14:02:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T14:03:51.462Z', + value: '1.3333333333333333', + }, + { + time: '2017-08-27T14:04:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T14:05:51.462Z', + value: '1.314285714285714', + }, + { + time: '2017-08-27T14:06:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T14:07:51.462Z', + value: '1.3333333333333333', + }, + { + time: '2017-08-27T14:08:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T14:09:51.462Z', + value: '1.3333333333333333', + }, + { + time: '2017-08-27T14:10:51.462Z', + value: '1.3333333333333333', + }, + { + time: '2017-08-27T14:11:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T14:12:51.462Z', + value: '1.3333333333333333', + }, + { + time: '2017-08-27T14:13:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T14:14:51.462Z', + value: '1.3333333333333333', + }, + { + time: '2017-08-27T14:15:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T14:16:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T14:17:51.462Z', + value: '1.314285714285714', + }, + { + time: '2017-08-27T14:18:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T14:19:51.462Z', + value: '1.314285714285714', + }, + { + time: '2017-08-27T14:20:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T14:21:51.462Z', + value: '1.3333079369916765', + }, + { + time: '2017-08-27T14:22:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T14:23:51.462Z', + value: '1.3333333333333333', + }, + { + time: '2017-08-27T14:24:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T14:25:51.462Z', + value: '1.314285714285714', + }, + { + time: '2017-08-27T14:26:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T14:27:51.462Z', + value: '1.314285714285714', + }, + { + time: '2017-08-27T14:28:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T14:29:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T14:30:51.462Z', + value: '1.314285714285714', + }, + { + time: '2017-08-27T14:31:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T14:32:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T14:33:51.462Z', + value: '1.2571428571428571', + }, + { + time: '2017-08-27T14:34:51.462Z', + value: '1.3333333333333333', + }, + { + time: '2017-08-27T14:35:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T14:36:51.462Z', + value: '1.3333333333333333', + }, + { + time: '2017-08-27T14:37:51.462Z', + value: '1.3333333333333333', + }, + { + time: '2017-08-27T14:38:51.462Z', + value: '1.314285714285714', + }, + { + time: '2017-08-27T14:39:51.462Z', + value: '1.3333333333333333', + }, + { + time: '2017-08-27T14:40:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T14:41:51.462Z', + value: '1.3333333333333333', + }, + { + time: '2017-08-27T14:42:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T14:43:51.462Z', + value: '1.276190476190476', + }, + { + time: '2017-08-27T14:44:51.462Z', + value: '1.2571428571428571', + }, + { + time: '2017-08-27T14:45:51.462Z', + value: '1.3333333333333333', + }, + { + time: '2017-08-27T14:46:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T14:47:51.462Z', + value: '1.3333333333333333', + }, + { + time: '2017-08-27T14:48:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T14:49:51.462Z', + value: '1.3333333333333333', + }, + { + time: '2017-08-27T14:50:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T14:51:51.462Z', + value: '1.314285714285714', + }, + { + time: '2017-08-27T14:52:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T14:53:51.462Z', + value: '1.333320635041571', + }, + { + time: '2017-08-27T14:54:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T14:55:51.462Z', + value: '1.3333333333333333', + }, + { + time: '2017-08-27T14:56:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T14:57:51.462Z', + value: '1.314285714285714', + }, + { + time: '2017-08-27T14:58:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T14:59:51.462Z', + value: '1.314285714285714', + }, + { + time: '2017-08-27T15:00:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T15:01:51.462Z', + value: '1.3333333333333333', + }, + { + time: '2017-08-27T15:02:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T15:03:51.462Z', + value: '1.3333333333333333', + }, + { + time: '2017-08-27T15:04:51.462Z', + value: '1.2571428571428571', + }, + { + time: '2017-08-27T15:05:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T15:06:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T15:07:51.462Z', + value: '1.3333333333333333', + }, + { + time: '2017-08-27T15:08:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T15:09:51.462Z', + value: '1.3333333333333333', + }, + { + time: '2017-08-27T15:10:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T15:11:51.462Z', + value: '1.3333333333333333', + }, + { + time: '2017-08-27T15:12:51.462Z', + value: '1.31427319739812', + }, + { + time: '2017-08-27T15:13:51.462Z', + value: '1.3333333333333333', + }, + { + time: '2017-08-27T15:14:51.462Z', + value: '1.3333333333333333', + }, + { + time: '2017-08-27T15:15:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T15:16:51.462Z', + value: '1.314285714285714', + }, + { + time: '2017-08-27T15:17:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T15:18:51.462Z', + value: '1.3333333333333333', + }, + { + time: '2017-08-27T15:19:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T15:20:51.462Z', + value: '1.314285714285714', + }, + { + time: '2017-08-27T15:21:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T15:22:51.462Z', + value: '1.3333460318669703', + }, + { + time: '2017-08-27T15:23:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T15:24:51.462Z', + value: '1.3333333333333333', + }, + { + time: '2017-08-27T15:25:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T15:26:51.462Z', + value: '1.3333333333333333', + }, + { + time: '2017-08-27T15:27:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T15:28:51.462Z', + value: '1.3333333333333333', + }, + { + time: '2017-08-27T15:29:51.462Z', + value: '1.3333333333333333', + }, + { + time: '2017-08-27T15:30:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T15:31:51.462Z', + value: '1.314285714285714', + }, + { + time: '2017-08-27T15:32:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T15:33:51.462Z', + value: '1.314285714285714', + }, + { + time: '2017-08-27T15:34:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T15:35:51.462Z', + value: '1.314285714285714', + }, + { + time: '2017-08-27T15:36:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T15:37:51.462Z', + value: '1.3333333333333333', + }, + { + time: '2017-08-27T15:38:51.462Z', + value: '1.314285714285714', + }, + { + time: '2017-08-27T15:39:51.462Z', + value: '1.3333333333333333', + }, + { + time: '2017-08-27T15:40:51.462Z', + value: '1.3333333333333333', + }, + { + time: '2017-08-27T15:41:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T15:42:51.462Z', + value: '1.3333333333333333', + }, + { + time: '2017-08-27T15:43:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T15:44:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T15:45:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T15:46:51.462Z', + value: '1.3333333333333333', + }, + { + time: '2017-08-27T15:47:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T15:48:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T15:49:51.462Z', + value: '1.314285714285714', + }, + { + time: '2017-08-27T15:50:51.462Z', + value: '1.3333333333333333', + }, + { + time: '2017-08-27T15:51:51.462Z', + value: '1.3333333333333333', + }, + { + time: '2017-08-27T15:52:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T15:53:51.462Z', + value: '1.3333333333333333', + }, + { + time: '2017-08-27T15:54:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T15:55:51.462Z', + value: '1.3333587306424883', + }, + { + time: '2017-08-27T15:56:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T15:57:51.462Z', + value: '1.3333333333333333', + }, + { + time: '2017-08-27T15:58:51.462Z', + value: '1.3333333333333333', + }, + { + time: '2017-08-27T15:59:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T16:00:51.462Z', + value: '1.3333333333333333', + }, + { + time: '2017-08-27T16:01:51.462Z', + value: '1.314285714285714', + }, + { + time: '2017-08-27T16:02:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T16:03:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T16:04:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T16:05:51.462Z', + value: '1.3333333333333333', + }, + { + time: '2017-08-27T16:06:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T16:07:51.462Z', + value: '1.3333333333333333', + }, + { + time: '2017-08-27T16:08:51.462Z', + value: '1.314285714285714', + }, + { + time: '2017-08-27T16:09:51.462Z', + value: '1.314285714285714', + }, + { + time: '2017-08-27T16:10:51.462Z', + value: '1.3333333333333333', + }, + { + time: '2017-08-27T16:11:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T16:12:51.462Z', + value: '1.314285714285714', + }, + { + time: '2017-08-27T16:13:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T16:14:51.462Z', + value: '1.3333333333333333', + }, + { + time: '2017-08-27T16:15:51.462Z', + value: '1.314285714285714', + }, + { + time: '2017-08-27T16:16:51.462Z', + value: '1.314285714285714', + }, + { + time: '2017-08-27T16:17:51.462Z', + value: '1.314285714285714', + }, + { + time: '2017-08-27T16:18:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T16:19:51.462Z', + value: '1.3333333333333333', + }, + { + time: '2017-08-27T16:20:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T16:21:51.462Z', + value: '1.3333333333333333', + }, + { + time: '2017-08-27T16:22:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T16:23:51.462Z', + value: '1.3333333333333333', + }, + { + time: '2017-08-27T16:24:51.462Z', + value: '1.295225759754669', + }, + { + time: '2017-08-27T16:25:51.462Z', + value: '1.3333333333333333', + }, + { + time: '2017-08-27T16:26:51.462Z', + value: '1.314285714285714', + }, + { + time: '2017-08-27T16:27:51.462Z', + value: '1.314285714285714', + }, + { + time: '2017-08-27T16:28:51.462Z', + value: '1.3333333333333333', + }, + { + time: '2017-08-27T16:29:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T16:30:51.462Z', + value: '1.3333333333333333', + }, + { + time: '2017-08-27T16:31:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T16:32:51.462Z', + value: '1.314285714285714', + }, + { + time: '2017-08-27T16:33:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T16:34:51.462Z', + value: '1.314285714285714', + }, + { + time: '2017-08-27T16:35:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T16:36:51.462Z', + value: '1.3333333333333333', + }, + { + time: '2017-08-27T16:37:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T16:38:51.462Z', + value: '1.3333333333333333', + }, + { + time: '2017-08-27T16:39:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T16:40:51.462Z', + value: '1.3333333333333333', + }, + { + time: '2017-08-27T16:41:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T16:42:51.462Z', + value: '1.314285714285714', + }, + { + time: '2017-08-27T16:43:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T16:44:51.462Z', + value: '1.314285714285714', + }, + { + time: '2017-08-27T16:45:51.462Z', + value: '1.3142982314117277', + }, + { + time: '2017-08-27T16:46:51.462Z', + value: '1.3333333333333333', + }, + { + time: '2017-08-27T16:47:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T16:48:51.462Z', + value: '1.333320635041571', + }, + { + time: '2017-08-27T16:49:51.462Z', + value: '1.31427319739812', + }, + { + time: '2017-08-27T16:50:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T16:51:51.462Z', + value: '1.314285714285714', + }, + { + time: '2017-08-27T16:52:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T16:53:51.462Z', + value: '1.314285714285714', + }, + { + time: '2017-08-27T16:54:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T16:55:51.462Z', + value: '1.314285714285714', + }, + { + time: '2017-08-27T16:56:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T16:57:51.462Z', + value: '1.3333333333333333', + }, + { + time: '2017-08-27T16:58:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T16:59:51.462Z', + value: '1.3333333333333333', + }, + { + time: '2017-08-27T17:00:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T17:01:51.462Z', + value: '1.3333333333333333', + }, + { + time: '2017-08-27T17:02:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T17:03:51.462Z', + value: '1.3333333333333333', + }, + { + time: '2017-08-27T17:04:51.462Z', + value: '1.2952504309564854', + }, + { + time: '2017-08-27T17:05:51.462Z', + value: '1.314285714285714', + }, + { + time: '2017-08-27T17:06:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T17:07:51.462Z', + value: '1.314285714285714', + }, + { + time: '2017-08-27T17:08:51.462Z', + value: '1.314285714285714', + }, + { + time: '2017-08-27T17:09:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T17:10:51.462Z', + value: '1.3333333333333333', + }, + { + time: '2017-08-27T17:11:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T17:12:51.462Z', + value: '1.3333333333333333', + }, + { + time: '2017-08-27T17:13:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T17:14:51.462Z', + value: '1.3333333333333333', + }, + { + time: '2017-08-27T17:15:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T17:16:51.462Z', + value: '1.3333333333333333', + }, + { + time: '2017-08-27T17:17:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T17:18:51.462Z', + value: '1.3333333333333333', + }, + { + time: '2017-08-27T17:19:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T17:20:51.462Z', + value: '1.3333333333333333', + }, + { + time: '2017-08-27T17:21:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T17:22:51.462Z', + value: '1.3333333333333333', + }, + { + time: '2017-08-27T17:23:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T17:24:51.462Z', + value: '1.314285714285714', + }, + { + time: '2017-08-27T17:25:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T17:26:51.462Z', + value: '1.3333333333333333', + }, + { + time: '2017-08-27T17:27:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T17:28:51.462Z', + value: '1.3333333333333333', + }, + { + time: '2017-08-27T17:29:51.462Z', + value: '1.295225759754669', + }, + { + time: '2017-08-27T17:30:51.462Z', + value: '1.3333333333333333', + }, + { + time: '2017-08-27T17:31:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T17:32:51.462Z', + value: '1.3333333333333333', + }, + { + time: '2017-08-27T17:33:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T17:34:51.462Z', + value: '1.295225759754669', + }, + { + time: '2017-08-27T17:35:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T17:36:51.462Z', + value: '1.314285714285714', + }, + { + time: '2017-08-27T17:37:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T17:38:51.462Z', + value: '1.3333333333333333', + }, + { + time: '2017-08-27T17:39:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T17:40:51.462Z', + value: '1.3333333333333333', + }, + { + time: '2017-08-27T17:41:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T17:42:51.462Z', + value: '1.3333333333333333', + }, + { + time: '2017-08-27T17:43:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T17:44:51.462Z', + value: '1.314285714285714', + }, + { + time: '2017-08-27T17:45:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T17:46:51.462Z', + value: '1.3333333333333333', + }, + { + time: '2017-08-27T17:47:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T17:48:51.462Z', + value: '1.314285714285714', + }, + { + time: '2017-08-27T17:49:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T17:50:51.462Z', + value: '1.314285714285714', + }, + { + time: '2017-08-27T17:51:51.462Z', + value: '1.3333333333333333', + }, + { + time: '2017-08-27T17:52:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T17:53:51.462Z', + value: '1.3333333333333333', + }, + { + time: '2017-08-27T17:54:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T17:55:51.462Z', + value: '1.3333333333333333', + }, + { + time: '2017-08-27T17:56:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T17:57:51.462Z', + value: '1.314285714285714', + }, + { + time: '2017-08-27T17:58:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T17:59:51.462Z', + value: '1.314285714285714', + }, + { + time: '2017-08-27T18:00:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T18:01:51.462Z', + value: '1.3333333333333333', + }, + { + time: '2017-08-27T18:02:51.462Z', + value: '1.3333333333333333', + }, + { + time: '2017-08-27T18:03:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T18:04:51.462Z', + value: '1.3333333333333333', + }, + { + time: '2017-08-27T18:05:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T18:06:51.462Z', + value: '1.3333333333333333', + }, + { + time: '2017-08-27T18:07:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T18:08:51.462Z', + value: '1.3333333333333333', + }, + { + time: '2017-08-27T18:09:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T18:10:51.462Z', + value: '1.3333333333333333', + }, + { + time: '2017-08-27T18:11:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T18:12:51.462Z', + value: '1.314285714285714', + }, + { + time: '2017-08-27T18:13:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T18:14:51.462Z', + value: '1.3333333333333333', + }, + { + time: '2017-08-27T18:15:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T18:16:51.462Z', + value: '1.3333333333333333', + }, + { + time: '2017-08-27T18:17:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T18:18:51.462Z', + value: '1.3333333333333333', + }, + { + time: '2017-08-27T18:19:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T18:20:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T18:21:51.462Z', + value: '1.3333333333333333', + }, + { + time: '2017-08-27T18:22:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T18:23:51.462Z', + value: '1.3333333333333333', + }, + { + time: '2017-08-27T18:24:51.462Z', + value: '1.2571428571428571', + }, + { + time: '2017-08-27T18:25:51.462Z', + value: '1.3333333333333333', + }, + { + time: '2017-08-27T18:26:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T18:27:51.462Z', + value: '1.3333333333333333', + }, + { + time: '2017-08-27T18:28:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T18:29:51.462Z', + value: '1.3333333333333333', + }, + { + time: '2017-08-27T18:30:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T18:31:51.462Z', + value: '1.3333333333333333', + }, + { + time: '2017-08-27T18:32:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T18:33:51.462Z', + value: '1.3333333333333333', + }, + { + time: '2017-08-27T18:34:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T18:35:51.462Z', + value: '1.3333333333333333', + }, + { + time: '2017-08-27T18:36:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T18:37:51.462Z', + value: '1.314285714285714', + }, + { + time: '2017-08-27T18:38:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T18:39:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T18:40:51.462Z', + value: '1.3333333333333333', + }, + { + time: '2017-08-27T18:41:51.462Z', + value: '1.580952380952381', + }, + { + time: '2017-08-27T18:42:51.462Z', + value: '1.7333333333333334', + }, + { + time: '2017-08-27T18:43:51.462Z', + value: '2.057142857142857', + }, + { + time: '2017-08-27T18:44:51.462Z', + value: '2.1904761904761902', + }, + { + time: '2017-08-27T18:45:51.462Z', + value: '1.8285714285714287', + }, + { + time: '2017-08-27T18:46:51.462Z', + value: '2.1142857142857143', + }, + { + time: '2017-08-27T18:47:51.462Z', + value: '1.619047619047619', + }, + { + time: '2017-08-27T18:48:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T18:49:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T18:50:51.462Z', + value: '1.314285714285714', + }, + { + time: '2017-08-27T18:51:51.462Z', + value: '1.2952504309564854', + }, + { + time: '2017-08-27T18:52:51.462Z', + value: '1.3333333333333333', + }, + { + time: '2017-08-27T18:53:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T18:54:51.462Z', + value: '1.3333333333333333', + }, + { + time: '2017-08-27T18:55:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T18:56:51.462Z', + value: '1.314285714285714', + }, + { + time: '2017-08-27T18:57:51.462Z', + value: '1.295238095238095', + }, + { + time: '2017-08-27T18:58:51.462Z', + value: '1.7142857142857142', + }, + { + time: '2017-08-27T18:59:51.462Z', + value: '1.7333333333333334', + }, + { + time: '2017-08-27T19:00:51.462Z', + value: '1.3904761904761904', + }, + { + time: '2017-08-27T19:01:51.462Z', + value: '1.5047619047619047', + }, + ], + }, + ], + when: [ + { + value: 'hundred(s)', + color: 'green', + }, + ], + }, + ], + }, + { + title: 'Throughput', + weight: 1, + y_label: 'Requests / Sec', + queries: [ + { + query_range: + "sum(rate(nginx_requests_total{server_zone!='*', server_zone!='_', container_name!='POD',environment='production'}[2m]))", + label: 'Total', + unit: 'req / sec', + result: [ + { + metric: {}, + values: [ + { + time: '2017-08-27T11:01:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T11:02:51.462Z', + value: '0.45714285714285713', + }, + { + time: '2017-08-27T11:03:51.462Z', + value: '0.4952380952380952', + }, + { + time: '2017-08-27T11:04:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T11:05:51.462Z', + value: '0.4857142857142857', + }, + { + time: '2017-08-27T11:06:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T11:07:51.462Z', + value: '0.4857142857142857', + }, + { + time: '2017-08-27T11:08:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T11:09:51.462Z', + value: '0.4952380952380952', + }, + { + time: '2017-08-27T11:10:51.462Z', + value: '0.4952380952380952', + }, + { + time: '2017-08-27T11:11:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T11:12:51.462Z', + value: '0.4952380952380952', + }, + { + time: '2017-08-27T11:13:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T11:14:51.462Z', + value: '0.4857142857142857', + }, + { + time: '2017-08-27T11:15:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T11:16:51.462Z', + value: '0.4857142857142857', + }, + { + time: '2017-08-27T11:17:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T11:18:51.462Z', + value: '0.4857142857142857', + }, + { + time: '2017-08-27T11:19:51.462Z', + value: '0.4857142857142857', + }, + { + time: '2017-08-27T11:20:51.462Z', + value: '0.4857142857142857', + }, + { + time: '2017-08-27T11:21:51.462Z', + value: '0.4857142857142857', + }, + { + time: '2017-08-27T11:22:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T11:23:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T11:24:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T11:25:51.462Z', + value: '0.4857142857142857', + }, + { + time: '2017-08-27T11:26:51.462Z', + value: '0.4952380952380952', + }, + { + time: '2017-08-27T11:27:51.462Z', + value: '0.4952380952380952', + }, + { + time: '2017-08-27T11:28:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T11:29:51.462Z', + value: '0.4952380952380952', + }, + { + time: '2017-08-27T11:30:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T11:31:51.462Z', + value: '0.4952380952380952', + }, + { + time: '2017-08-27T11:32:51.462Z', + value: '0.4952380952380952', + }, + { + time: '2017-08-27T11:33:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T11:34:51.462Z', + value: '0.4952333787297264', + }, + { + time: '2017-08-27T11:35:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T11:36:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T11:37:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T11:38:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T11:39:51.462Z', + value: '0.4857142857142857', + }, + { + time: '2017-08-27T11:40:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T11:41:51.462Z', + value: '0.49524752852435283', + }, + { + time: '2017-08-27T11:42:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T11:43:51.462Z', + value: '0.4952380952380952', + }, + { + time: '2017-08-27T11:44:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T11:45:51.462Z', + value: '0.4857142857142857', + }, + { + time: '2017-08-27T11:46:51.462Z', + value: '0.4857142857142857', + }, + { + time: '2017-08-27T11:47:51.462Z', + value: '0.4857142857142857', + }, + { + time: '2017-08-27T11:48:51.462Z', + value: '0.4952380952380952', + }, + { + time: '2017-08-27T11:49:51.462Z', + value: '0.4857142857142857', + }, + { + time: '2017-08-27T11:50:51.462Z', + value: '0.4952380952380952', + }, + { + time: '2017-08-27T11:51:51.462Z', + value: '0.4952380952380952', + }, + { + time: '2017-08-27T11:52:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T11:53:51.462Z', + value: '0.4952380952380952', + }, + { + time: '2017-08-27T11:54:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T11:55:51.462Z', + value: '0.4952380952380952', + }, + { + time: '2017-08-27T11:56:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T11:57:51.462Z', + value: '0.4952380952380952', + }, + { + time: '2017-08-27T11:58:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T11:59:51.462Z', + value: '0.4857142857142857', + }, + { + time: '2017-08-27T12:00:51.462Z', + value: '0.4857142857142857', + }, + { + time: '2017-08-27T12:01:51.462Z', + value: '0.49524281183630325', + }, + { + time: '2017-08-27T12:02:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T12:03:51.462Z', + value: '0.4952380952380952', + }, + { + time: '2017-08-27T12:04:51.462Z', + value: '0.4857142857142857', + }, + { + time: '2017-08-27T12:05:51.462Z', + value: '0.4857096599080009', + }, + { + time: '2017-08-27T12:06:51.462Z', + value: '0.4857142857142857', + }, + { + time: '2017-08-27T12:07:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T12:08:51.462Z', + value: '0.4952380952380952', + }, + { + time: '2017-08-27T12:09:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T12:10:51.462Z', + value: '0.4857142857142857', + }, + { + time: '2017-08-27T12:11:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T12:12:51.462Z', + value: '0.4952380952380952', + }, + { + time: '2017-08-27T12:13:51.462Z', + value: '0.4857142857142857', + }, + { + time: '2017-08-27T12:14:51.462Z', + value: '0.4952380952380952', + }, + { + time: '2017-08-27T12:15:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T12:16:51.462Z', + value: '0.4857142857142857', + }, + { + time: '2017-08-27T12:17:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T12:18:51.462Z', + value: '0.4857142857142857', + }, + { + time: '2017-08-27T12:19:51.462Z', + value: '0.4857142857142857', + }, + { + time: '2017-08-27T12:20:51.462Z', + value: '0.4857142857142857', + }, + { + time: '2017-08-27T12:21:51.462Z', + value: '0.4952380952380952', + }, + { + time: '2017-08-27T12:22:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T12:23:51.462Z', + value: '0.4857142857142857', + }, + { + time: '2017-08-27T12:24:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T12:25:51.462Z', + value: '0.4952380952380952', + }, + { + time: '2017-08-27T12:26:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T12:27:51.462Z', + value: '0.4952380952380952', + }, + { + time: '2017-08-27T12:28:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T12:29:51.462Z', + value: '0.4952380952380952', + }, + { + time: '2017-08-27T12:30:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T12:31:51.462Z', + value: '0.4952380952380952', + }, + { + time: '2017-08-27T12:32:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T12:33:51.462Z', + value: '0.4857142857142857', + }, + { + time: '2017-08-27T12:34:51.462Z', + value: '0.4857142857142857', + }, + { + time: '2017-08-27T12:35:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T12:36:51.462Z', + value: '0.4952380952380952', + }, + { + time: '2017-08-27T12:37:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T12:38:51.462Z', + value: '0.4952380952380952', + }, + { + time: '2017-08-27T12:39:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T12:40:51.462Z', + value: '0.4952380952380952', + }, + { + time: '2017-08-27T12:41:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T12:42:51.462Z', + value: '0.4857142857142857', + }, + { + time: '2017-08-27T12:43:51.462Z', + value: '0.4857142857142857', + }, + { + time: '2017-08-27T12:44:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T12:45:51.462Z', + value: '0.4952380952380952', + }, + { + time: '2017-08-27T12:46:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T12:47:51.462Z', + value: '0.4857142857142857', + }, + { + time: '2017-08-27T12:48:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T12:49:51.462Z', + value: '0.4952380952380952', + }, + { + time: '2017-08-27T12:50:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T12:51:51.462Z', + value: '0.4952380952380952', + }, + { + time: '2017-08-27T12:52:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T12:53:51.462Z', + value: '0.4857142857142857', + }, + { + time: '2017-08-27T12:54:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T12:55:51.462Z', + value: '0.4952380952380952', + }, + { + time: '2017-08-27T12:56:51.462Z', + value: '0.4857142857142857', + }, + { + time: '2017-08-27T12:57:51.462Z', + value: '0.4857142857142857', + }, + { + time: '2017-08-27T12:58:51.462Z', + value: '0.4952380952380952', + }, + { + time: '2017-08-27T12:59:51.462Z', + value: '0.4857142857142857', + }, + { + time: '2017-08-27T13:00:51.462Z', + value: '0.4857142857142857', + }, + { + time: '2017-08-27T13:01:51.462Z', + value: '0.4761859410862754', + }, + { + time: '2017-08-27T13:02:51.462Z', + value: '0.4952380952380952', + }, + { + time: '2017-08-27T13:03:51.462Z', + value: '0.4761995466580315', + }, + { + time: '2017-08-27T13:04:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T13:05:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T13:06:51.462Z', + value: '0.4952380952380952', + }, + { + time: '2017-08-27T13:07:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T13:08:51.462Z', + value: '0.4952380952380952', + }, + { + time: '2017-08-27T13:09:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T13:10:51.462Z', + value: '0.4952380952380952', + }, + { + time: '2017-08-27T13:11:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T13:12:51.462Z', + value: '0.4952380952380952', + }, + { + time: '2017-08-27T13:13:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T13:14:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T13:15:51.462Z', + value: '0.45714285714285713', + }, + { + time: '2017-08-27T13:16:51.462Z', + value: '0.49524752852435283', + }, + { + time: '2017-08-27T13:17:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T13:18:51.462Z', + value: '0.4952380952380952', + }, + { + time: '2017-08-27T13:19:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T13:20:51.462Z', + value: '0.4857142857142857', + }, + { + time: '2017-08-27T13:21:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T13:22:51.462Z', + value: '0.4952380952380952', + }, + { + time: '2017-08-27T13:23:51.462Z', + value: '0.4666666666666667', + }, + { + time: '2017-08-27T13:24:51.462Z', + value: '0.45714285714285713', + }, + { + time: '2017-08-27T13:25:51.462Z', + value: '0.4857142857142857', + }, + { + time: '2017-08-27T13:26:51.462Z', + value: '0.4952380952380952', + }, + { + time: '2017-08-27T13:27:51.462Z', + value: '0.4857142857142857', + }, + { + time: '2017-08-27T13:28:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T13:29:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T13:30:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T13:31:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T13:32:51.462Z', + value: '0.4857142857142857', + }, + { + time: '2017-08-27T13:33:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T13:34:51.462Z', + value: '0.4952380952380952', + }, + { + time: '2017-08-27T13:35:51.462Z', + value: '0.4857142857142857', + }, + { + time: '2017-08-27T13:36:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T13:37:51.462Z', + value: '0.4952380952380952', + }, + { + time: '2017-08-27T13:38:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T13:39:51.462Z', + value: '0.4952380952380952', + }, + { + time: '2017-08-27T13:40:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T13:41:51.462Z', + value: '0.4952380952380952', + }, + { + time: '2017-08-27T13:42:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T13:43:51.462Z', + value: '0.4952380952380952', + }, + { + time: '2017-08-27T13:44:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T13:45:51.462Z', + value: '0.4952380952380952', + }, + { + time: '2017-08-27T13:46:51.462Z', + value: '0.45714285714285713', + }, + { + time: '2017-08-27T13:47:51.462Z', + value: '0.4666666666666667', + }, + { + time: '2017-08-27T13:48:51.462Z', + value: '0.4857142857142857', + }, + { + time: '2017-08-27T13:49:51.462Z', + value: '0.4761859410862754', + }, + { + time: '2017-08-27T13:50:51.462Z', + value: '0.4952380952380952', + }, + { + time: '2017-08-27T13:51:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T13:52:51.462Z', + value: '0.4952380952380952', + }, + { + time: '2017-08-27T13:53:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T13:54:51.462Z', + value: '0.4952380952380952', + }, + { + time: '2017-08-27T13:55:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T13:56:51.462Z', + value: '0.4952380952380952', + }, + { + time: '2017-08-27T13:57:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T13:58:51.462Z', + value: '0.4857142857142857', + }, + { + time: '2017-08-27T13:59:51.462Z', + value: '0.4761859410862754', + }, + { + time: '2017-08-27T14:00:51.462Z', + value: '0.4857142857142857', + }, + { + time: '2017-08-27T14:01:51.462Z', + value: '0.4857142857142857', + }, + { + time: '2017-08-27T14:02:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T14:03:51.462Z', + value: '0.4952380952380952', + }, + { + time: '2017-08-27T14:04:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T14:05:51.462Z', + value: '0.4857142857142857', + }, + { + time: '2017-08-27T14:06:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T14:07:51.462Z', + value: '0.4952380952380952', + }, + { + time: '2017-08-27T14:08:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T14:09:51.462Z', + value: '0.4952380952380952', + }, + { + time: '2017-08-27T14:10:51.462Z', + value: '0.4952380952380952', + }, + { + time: '2017-08-27T14:11:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T14:12:51.462Z', + value: '0.4952380952380952', + }, + { + time: '2017-08-27T14:13:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T14:14:51.462Z', + value: '0.4952380952380952', + }, + { + time: '2017-08-27T14:15:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T14:16:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T14:17:51.462Z', + value: '0.4857142857142857', + }, + { + time: '2017-08-27T14:18:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T14:19:51.462Z', + value: '0.4857142857142857', + }, + { + time: '2017-08-27T14:20:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T14:21:51.462Z', + value: '0.4952286623111941', + }, + { + time: '2017-08-27T14:22:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T14:23:51.462Z', + value: '0.4952380952380952', + }, + { + time: '2017-08-27T14:24:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T14:25:51.462Z', + value: '0.4857142857142857', + }, + { + time: '2017-08-27T14:26:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T14:27:51.462Z', + value: '0.4857142857142857', + }, + { + time: '2017-08-27T14:28:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T14:29:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T14:30:51.462Z', + value: '0.4857142857142857', + }, + { + time: '2017-08-27T14:31:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T14:32:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T14:33:51.462Z', + value: '0.45714285714285713', + }, + { + time: '2017-08-27T14:34:51.462Z', + value: '0.4952380952380952', + }, + { + time: '2017-08-27T14:35:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T14:36:51.462Z', + value: '0.4952380952380952', + }, + { + time: '2017-08-27T14:37:51.462Z', + value: '0.4952380952380952', + }, + { + time: '2017-08-27T14:38:51.462Z', + value: '0.4857142857142857', + }, + { + time: '2017-08-27T14:39:51.462Z', + value: '0.4952380952380952', + }, + { + time: '2017-08-27T14:40:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T14:41:51.462Z', + value: '0.4952380952380952', + }, + { + time: '2017-08-27T14:42:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T14:43:51.462Z', + value: '0.4666666666666667', + }, + { + time: '2017-08-27T14:44:51.462Z', + value: '0.45714285714285713', + }, + { + time: '2017-08-27T14:45:51.462Z', + value: '0.4952380952380952', + }, + { + time: '2017-08-27T14:46:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T14:47:51.462Z', + value: '0.4952380952380952', + }, + { + time: '2017-08-27T14:48:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T14:49:51.462Z', + value: '0.4952380952380952', + }, + { + time: '2017-08-27T14:50:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T14:51:51.462Z', + value: '0.4857142857142857', + }, + { + time: '2017-08-27T14:52:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T14:53:51.462Z', + value: '0.4952333787297264', + }, + { + time: '2017-08-27T14:54:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T14:55:51.462Z', + value: '0.4952380952380952', + }, + { + time: '2017-08-27T14:56:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T14:57:51.462Z', + value: '0.4857142857142857', + }, + { + time: '2017-08-27T14:58:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T14:59:51.462Z', + value: '0.4857142857142857', + }, + { + time: '2017-08-27T15:00:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T15:01:51.462Z', + value: '0.4952380952380952', + }, + { + time: '2017-08-27T15:02:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T15:03:51.462Z', + value: '0.4952380952380952', + }, + { + time: '2017-08-27T15:04:51.462Z', + value: '0.45714285714285713', + }, + { + time: '2017-08-27T15:05:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T15:06:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T15:07:51.462Z', + value: '0.4952380952380952', + }, + { + time: '2017-08-27T15:08:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T15:09:51.462Z', + value: '0.4952380952380952', + }, + { + time: '2017-08-27T15:10:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T15:11:51.462Z', + value: '0.4952380952380952', + }, + { + time: '2017-08-27T15:12:51.462Z', + value: '0.4857096599080009', + }, + { + time: '2017-08-27T15:13:51.462Z', + value: '0.4952380952380952', + }, + { + time: '2017-08-27T15:14:51.462Z', + value: '0.4952380952380952', + }, + { + time: '2017-08-27T15:15:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T15:16:51.462Z', + value: '0.4857142857142857', + }, + { + time: '2017-08-27T15:17:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T15:18:51.462Z', + value: '0.4952380952380952', + }, + { + time: '2017-08-27T15:19:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T15:20:51.462Z', + value: '0.4857142857142857', + }, + { + time: '2017-08-27T15:21:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T15:22:51.462Z', + value: '0.49524281183630325', + }, + { + time: '2017-08-27T15:23:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T15:24:51.462Z', + value: '0.4952380952380952', + }, + { + time: '2017-08-27T15:25:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T15:26:51.462Z', + value: '0.4952380952380952', + }, + { + time: '2017-08-27T15:27:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T15:28:51.462Z', + value: '0.4952380952380952', + }, + { + time: '2017-08-27T15:29:51.462Z', + value: '0.4952380952380952', + }, + { + time: '2017-08-27T15:30:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T15:31:51.462Z', + value: '0.4857142857142857', + }, + { + time: '2017-08-27T15:32:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T15:33:51.462Z', + value: '0.4857142857142857', + }, + { + time: '2017-08-27T15:34:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T15:35:51.462Z', + value: '0.4857142857142857', + }, + { + time: '2017-08-27T15:36:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T15:37:51.462Z', + value: '0.4952380952380952', + }, + { + time: '2017-08-27T15:38:51.462Z', + value: '0.4857142857142857', + }, + { + time: '2017-08-27T15:39:51.462Z', + value: '0.4952380952380952', + }, + { + time: '2017-08-27T15:40:51.462Z', + value: '0.4952380952380952', + }, + { + time: '2017-08-27T15:41:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T15:42:51.462Z', + value: '0.4952380952380952', + }, + { + time: '2017-08-27T15:43:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T15:44:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T15:45:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T15:46:51.462Z', + value: '0.4952380952380952', + }, + { + time: '2017-08-27T15:47:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T15:48:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T15:49:51.462Z', + value: '0.4857142857142857', + }, + { + time: '2017-08-27T15:50:51.462Z', + value: '0.4952380952380952', + }, + { + time: '2017-08-27T15:51:51.462Z', + value: '0.4952380952380952', + }, + { + time: '2017-08-27T15:52:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T15:53:51.462Z', + value: '0.4952380952380952', + }, + { + time: '2017-08-27T15:54:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T15:55:51.462Z', + value: '0.49524752852435283', + }, + { + time: '2017-08-27T15:56:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T15:57:51.462Z', + value: '0.4952380952380952', + }, + { + time: '2017-08-27T15:58:51.462Z', + value: '0.4952380952380952', + }, + { + time: '2017-08-27T15:59:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T16:00:51.462Z', + value: '0.4952380952380952', + }, + { + time: '2017-08-27T16:01:51.462Z', + value: '0.4857142857142857', + }, + { + time: '2017-08-27T16:02:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T16:03:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T16:04:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T16:05:51.462Z', + value: '0.4952380952380952', + }, + { + time: '2017-08-27T16:06:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T16:07:51.462Z', + value: '0.4952380952380952', + }, + { + time: '2017-08-27T16:08:51.462Z', + value: '0.4857142857142857', + }, + { + time: '2017-08-27T16:09:51.462Z', + value: '0.4857142857142857', + }, + { + time: '2017-08-27T16:10:51.462Z', + value: '0.4952380952380952', + }, + { + time: '2017-08-27T16:11:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T16:12:51.462Z', + value: '0.4857142857142857', + }, + { + time: '2017-08-27T16:13:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T16:14:51.462Z', + value: '0.4952380952380952', + }, + { + time: '2017-08-27T16:15:51.462Z', + value: '0.4857142857142857', + }, + { + time: '2017-08-27T16:16:51.462Z', + value: '0.4857142857142857', + }, + { + time: '2017-08-27T16:17:51.462Z', + value: '0.4857142857142857', + }, + { + time: '2017-08-27T16:18:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T16:19:51.462Z', + value: '0.4952380952380952', + }, + { + time: '2017-08-27T16:20:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T16:21:51.462Z', + value: '0.4952380952380952', + }, + { + time: '2017-08-27T16:22:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T16:23:51.462Z', + value: '0.4952380952380952', + }, + { + time: '2017-08-27T16:24:51.462Z', + value: '0.4761859410862754', + }, + { + time: '2017-08-27T16:25:51.462Z', + value: '0.4952380952380952', + }, + { + time: '2017-08-27T16:26:51.462Z', + value: '0.4857142857142857', + }, + { + time: '2017-08-27T16:27:51.462Z', + value: '0.4857142857142857', + }, + { + time: '2017-08-27T16:28:51.462Z', + value: '0.4952380952380952', + }, + { + time: '2017-08-27T16:29:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T16:30:51.462Z', + value: '0.4952380952380952', + }, + { + time: '2017-08-27T16:31:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T16:32:51.462Z', + value: '0.4857142857142857', + }, + { + time: '2017-08-27T16:33:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T16:34:51.462Z', + value: '0.4857142857142857', + }, + { + time: '2017-08-27T16:35:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T16:36:51.462Z', + value: '0.4952380952380952', + }, + { + time: '2017-08-27T16:37:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T16:38:51.462Z', + value: '0.4952380952380952', + }, + { + time: '2017-08-27T16:39:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T16:40:51.462Z', + value: '0.4952380952380952', + }, + { + time: '2017-08-27T16:41:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T16:42:51.462Z', + value: '0.4857142857142857', + }, + { + time: '2017-08-27T16:43:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T16:44:51.462Z', + value: '0.4857142857142857', + }, + { + time: '2017-08-27T16:45:51.462Z', + value: '0.485718911608682', + }, + { + time: '2017-08-27T16:46:51.462Z', + value: '0.4952380952380952', + }, + { + time: '2017-08-27T16:47:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T16:48:51.462Z', + value: '0.4952333787297264', + }, + { + time: '2017-08-27T16:49:51.462Z', + value: '0.4857096599080009', + }, + { + time: '2017-08-27T16:50:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T16:51:51.462Z', + value: '0.4857142857142857', + }, + { + time: '2017-08-27T16:52:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T16:53:51.462Z', + value: '0.4857142857142857', + }, + { + time: '2017-08-27T16:54:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T16:55:51.462Z', + value: '0.4857142857142857', + }, + { + time: '2017-08-27T16:56:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T16:57:51.462Z', + value: '0.4952380952380952', + }, + { + time: '2017-08-27T16:58:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T16:59:51.462Z', + value: '0.4952380952380952', + }, + { + time: '2017-08-27T17:00:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T17:01:51.462Z', + value: '0.4952380952380952', + }, + { + time: '2017-08-27T17:02:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T17:03:51.462Z', + value: '0.4952380952380952', + }, + { + time: '2017-08-27T17:04:51.462Z', + value: '0.47619501138106085', + }, + { + time: '2017-08-27T17:05:51.462Z', + value: '0.4857142857142857', + }, + { + time: '2017-08-27T17:06:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T17:07:51.462Z', + value: '0.4857142857142857', + }, + { + time: '2017-08-27T17:08:51.462Z', + value: '0.4857142857142857', + }, + { + time: '2017-08-27T17:09:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T17:10:51.462Z', + value: '0.4952380952380952', + }, + { + time: '2017-08-27T17:11:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T17:12:51.462Z', + value: '0.4952380952380952', + }, + { + time: '2017-08-27T17:13:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T17:14:51.462Z', + value: '0.4952380952380952', + }, + { + time: '2017-08-27T17:15:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T17:16:51.462Z', + value: '0.4952380952380952', + }, + { + time: '2017-08-27T17:17:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T17:18:51.462Z', + value: '0.4952380952380952', + }, + { + time: '2017-08-27T17:19:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T17:20:51.462Z', + value: '0.4952380952380952', + }, + { + time: '2017-08-27T17:21:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T17:22:51.462Z', + value: '0.4952380952380952', + }, + { + time: '2017-08-27T17:23:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T17:24:51.462Z', + value: '0.4857142857142857', + }, + { + time: '2017-08-27T17:25:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T17:26:51.462Z', + value: '0.4952380952380952', + }, + { + time: '2017-08-27T17:27:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T17:28:51.462Z', + value: '0.4952380952380952', + }, + { + time: '2017-08-27T17:29:51.462Z', + value: '0.4761859410862754', + }, + { + time: '2017-08-27T17:30:51.462Z', + value: '0.4952380952380952', + }, + { + time: '2017-08-27T17:31:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T17:32:51.462Z', + value: '0.4952380952380952', + }, + { + time: '2017-08-27T17:33:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T17:34:51.462Z', + value: '0.4761859410862754', + }, + { + time: '2017-08-27T17:35:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T17:36:51.462Z', + value: '0.4857142857142857', + }, + { + time: '2017-08-27T17:37:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T17:38:51.462Z', + value: '0.4952380952380952', + }, + { + time: '2017-08-27T17:39:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T17:40:51.462Z', + value: '0.4952380952380952', + }, + { + time: '2017-08-27T17:41:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T17:42:51.462Z', + value: '0.4952380952380952', + }, + { + time: '2017-08-27T17:43:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T17:44:51.462Z', + value: '0.4857142857142857', + }, + { + time: '2017-08-27T17:45:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T17:46:51.462Z', + value: '0.4952380952380952', + }, + { + time: '2017-08-27T17:47:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T17:48:51.462Z', + value: '0.4857142857142857', + }, + { + time: '2017-08-27T17:49:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T17:50:51.462Z', + value: '0.4857142857142857', + }, + { + time: '2017-08-27T17:51:51.462Z', + value: '0.4952380952380952', + }, + { + time: '2017-08-27T17:52:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T17:53:51.462Z', + value: '0.4952380952380952', + }, + { + time: '2017-08-27T17:54:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T17:55:51.462Z', + value: '0.4952380952380952', + }, + { + time: '2017-08-27T17:56:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T17:57:51.462Z', + value: '0.4857142857142857', + }, + { + time: '2017-08-27T17:58:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T17:59:51.462Z', + value: '0.4857142857142857', + }, + { + time: '2017-08-27T18:00:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T18:01:51.462Z', + value: '0.4952380952380952', + }, + { + time: '2017-08-27T18:02:51.462Z', + value: '0.4952380952380952', + }, + { + time: '2017-08-27T18:03:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T18:04:51.462Z', + value: '0.4952380952380952', + }, + { + time: '2017-08-27T18:05:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T18:06:51.462Z', + value: '0.4952380952380952', + }, + { + time: '2017-08-27T18:07:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T18:08:51.462Z', + value: '0.4952380952380952', + }, + { + time: '2017-08-27T18:09:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T18:10:51.462Z', + value: '0.4952380952380952', + }, + { + time: '2017-08-27T18:11:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T18:12:51.462Z', + value: '0.4857142857142857', + }, + { + time: '2017-08-27T18:13:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T18:14:51.462Z', + value: '0.4952380952380952', + }, + { + time: '2017-08-27T18:15:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T18:16:51.462Z', + value: '0.4952380952380952', + }, + { + time: '2017-08-27T18:17:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T18:18:51.462Z', + value: '0.4952380952380952', + }, + { + time: '2017-08-27T18:19:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T18:20:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T18:21:51.462Z', + value: '0.4952380952380952', + }, + { + time: '2017-08-27T18:22:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T18:23:51.462Z', + value: '0.4952380952380952', + }, + { + time: '2017-08-27T18:24:51.462Z', + value: '0.45714285714285713', + }, + { + time: '2017-08-27T18:25:51.462Z', + value: '0.4952380952380952', + }, + { + time: '2017-08-27T18:26:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T18:27:51.462Z', + value: '0.4952380952380952', + }, + { + time: '2017-08-27T18:28:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T18:29:51.462Z', + value: '0.4952380952380952', + }, + { + time: '2017-08-27T18:30:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T18:31:51.462Z', + value: '0.4952380952380952', + }, + { + time: '2017-08-27T18:32:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T18:33:51.462Z', + value: '0.4952380952380952', + }, + { + time: '2017-08-27T18:34:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T18:35:51.462Z', + value: '0.4952380952380952', + }, + { + time: '2017-08-27T18:36:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T18:37:51.462Z', + value: '0.4857142857142857', + }, + { + time: '2017-08-27T18:38:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T18:39:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T18:40:51.462Z', + value: '0.4952380952380952', + }, + { + time: '2017-08-27T18:41:51.462Z', + value: '0.6190476190476191', + }, + { + time: '2017-08-27T18:42:51.462Z', + value: '0.6952380952380952', + }, + { + time: '2017-08-27T18:43:51.462Z', + value: '0.857142857142857', + }, + { + time: '2017-08-27T18:44:51.462Z', + value: '0.9238095238095239', + }, + { + time: '2017-08-27T18:45:51.462Z', + value: '0.7428571428571429', + }, + { + time: '2017-08-27T18:46:51.462Z', + value: '0.8857142857142857', + }, + { + time: '2017-08-27T18:47:51.462Z', + value: '0.638095238095238', + }, + { + time: '2017-08-27T18:48:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T18:49:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T18:50:51.462Z', + value: '0.4857142857142857', + }, + { + time: '2017-08-27T18:51:51.462Z', + value: '0.47619501138106085', + }, + { + time: '2017-08-27T18:52:51.462Z', + value: '0.4952380952380952', + }, + { + time: '2017-08-27T18:53:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T18:54:51.462Z', + value: '0.4952380952380952', + }, + { + time: '2017-08-27T18:55:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T18:56:51.462Z', + value: '0.4857142857142857', + }, + { + time: '2017-08-27T18:57:51.462Z', + value: '0.47619047619047616', + }, + { + time: '2017-08-27T18:58:51.462Z', + value: '0.6857142857142856', + }, + { + time: '2017-08-27T18:59:51.462Z', + value: '0.6952380952380952', + }, + { + time: '2017-08-27T19:00:51.462Z', + value: '0.5238095238095237', + }, + { + time: '2017-08-27T19:01:51.462Z', + value: '0.5904761904761905', + }, + ], + }, + ], + }, + ], + }, ]; export function convertDatesMultipleSeries(multipleSeries) { const convertedMultiple = multipleSeries; multipleSeries.forEach((column, index) => { let convertedResult = []; - convertedResult = column.queries[0].result.map((resultObj) => { + convertedResult = column.queries[0].result.map(resultObj => { const convertedMetrics = {}; convertedMetrics.values = resultObj.values.map(val => ({ - time: new Date(val.time), - value: val.value, + time: new Date(val.time), + value: val.value, })); convertedMetrics.metric = resultObj.metric; return convertedMetrics; diff --git a/spec/javascripts/notes/components/diff_file_header_spec.js b/spec/javascripts/notes/components/diff_file_header_spec.js index aed30a087a6..ef6d513444a 100644 --- a/spec/javascripts/notes/components/diff_file_header_spec.js +++ b/spec/javascripts/notes/components/diff_file_header_spec.js @@ -1,7 +1,7 @@ import Vue from 'vue'; import DiffFileHeader from '~/notes/components/diff_file_header.vue'; import { convertObjectPropsToCamelCase } from '~/lib/utils/common_utils'; -import mountComponent from '../../helpers/vue_mount_component_helper'; +import mountComponent from 'spec/helpers/vue_mount_component_helper'; const discussionFixture = 'merge_requests/diff_discussion.json'; diff --git a/spec/javascripts/notes/components/diff_with_note_spec.js b/spec/javascripts/notes/components/diff_with_note_spec.js index 7f1f4bf0bcd..f4ec7132dbd 100644 --- a/spec/javascripts/notes/components/diff_with_note_spec.js +++ b/spec/javascripts/notes/components/diff_with_note_spec.js @@ -1,7 +1,7 @@ import Vue from 'vue'; import DiffWithNote from '~/notes/components/diff_with_note.vue'; import { convertObjectPropsToCamelCase } from '~/lib/utils/common_utils'; -import mountComponent from '../../helpers/vue_mount_component_helper'; +import mountComponent from 'spec/helpers/vue_mount_component_helper'; const discussionFixture = 'merge_requests/diff_discussion.json'; const imageDiscussionFixture = 'merge_requests/image_diff_discussion.json'; diff --git a/spec/javascripts/notes/components/note_actions_spec.js b/spec/javascripts/notes/components/note_actions_spec.js index ab81aabb992..1dfe890e05e 100644 --- a/spec/javascripts/notes/components/note_actions_spec.js +++ b/spec/javascripts/notes/components/note_actions_spec.js @@ -3,7 +3,7 @@ import store from '~/notes/stores'; import noteActions from '~/notes/components/note_actions.vue'; import { userDataMock } from '../mock_data'; -describe('issse_note_actions component', () => { +describe('issue_note_actions component', () => { let vm; let Component; @@ -24,6 +24,7 @@ describe('issse_note_actions component', () => { authorId: 26, canDelete: true, canEdit: true, + canAwardEmoji: true, canReportAsAbuse: true, noteId: 539, reportAbusePath: '/abuse_reports/new?ref_url=http%3A%2F%2Flocalhost%3A3000%2Fgitlab-org%2Fgitlab-ce%2Fissues%2F7%23note_539&user_id=26', @@ -70,6 +71,7 @@ describe('issse_note_actions component', () => { authorId: 26, canDelete: false, canEdit: false, + canAwardEmoji: false, canReportAsAbuse: false, noteId: 539, reportAbusePath: '/abuse_reports/new?ref_url=http%3A%2F%2Flocalhost%3A3000%2Fgitlab-org%2Fgitlab-ce%2Fissues%2F7%23note_539&user_id=26', diff --git a/spec/javascripts/notes/components/note_app_spec.js b/spec/javascripts/notes/components/note_app_spec.js index ac39418c3e6..0e792eee5e9 100644 --- a/spec/javascripts/notes/components/note_app_spec.js +++ b/spec/javascripts/notes/components/note_app_spec.js @@ -3,7 +3,7 @@ import _ from 'underscore'; import Vue from 'vue'; import notesApp from '~/notes/components/notes_app.vue'; import service from '~/notes/services/notes_service'; -import '~/render_gfm'; +import '~/behaviors/markdown/render_gfm'; import * as mockData from '../mock_data'; const vueMatchers = { diff --git a/spec/javascripts/notes/components/note_awards_list_spec.js b/spec/javascripts/notes/components/note_awards_list_spec.js index 15995ec5a05..1c30d8691b1 100644 --- a/spec/javascripts/notes/components/note_awards_list_spec.js +++ b/spec/javascripts/notes/components/note_awards_list_spec.js @@ -29,6 +29,7 @@ describe('note_awards_list component', () => { awards: awardsMock, noteAuthorId: 2, noteId: 545, + canAwardEmoji: true, toggleAwardPath: '/gitlab-org/gitlab-ce/notes/545/toggle_award_emoji', }, }).$mount(); @@ -43,14 +44,45 @@ describe('note_awards_list component', () => { expect(vm.$el.querySelector('.js-awards-block button [data-name="cartwheel_tone3"]')).toBeDefined(); }); - it('should be possible to remove awareded emoji', () => { + it('should be possible to remove awarded emoji', () => { spyOn(vm, 'handleAward').and.callThrough(); + spyOn(vm, 'toggleAwardRequest').and.callThrough(); vm.$el.querySelector('.js-awards-block button').click(); expect(vm.handleAward).toHaveBeenCalledWith('flag_tz'); + expect(vm.toggleAwardRequest).toHaveBeenCalled(); }); it('should be possible to add new emoji', () => { expect(vm.$el.querySelector('.js-add-award')).toBeDefined(); }); + + describe('when the user cannot award emoji', () => { + beforeEach(() => { + const Component = Vue.extend(awardsNote); + + vm = new Component({ + store, + propsData: { + awards: awardsMock, + noteAuthorId: 2, + noteId: 545, + canAwardEmoji: false, + toggleAwardPath: '/gitlab-org/gitlab-ce/notes/545/toggle_award_emoji', + }, + }).$mount(); + }); + + it('should not be possible to remove awarded emoji', () => { + spyOn(vm, 'toggleAwardRequest').and.callThrough(); + + vm.$el.querySelector('.js-awards-block button').click(); + + expect(vm.toggleAwardRequest).not.toHaveBeenCalled(); + }); + + it('should not be possible to add new emoji', () => { + expect(vm.$el.querySelector('.js-add-award')).toBeNull(); + }); + }); }); diff --git a/spec/javascripts/notes/components/note_body_spec.js b/spec/javascripts/notes/components/note_body_spec.js index 0ff804f0e55..4e551496ff0 100644 --- a/spec/javascripts/notes/components/note_body_spec.js +++ b/spec/javascripts/notes/components/note_body_spec.js @@ -18,6 +18,7 @@ describe('issue_note_body component', () => { propsData: { note, canEdit: true, + canAwardEmoji: true, }, }).$mount(); }); diff --git a/spec/javascripts/notes/components/noteable_discussion_spec.js b/spec/javascripts/notes/components/noteable_discussion_spec.js index 19504e4f7c8..cda550760fe 100644 --- a/spec/javascripts/notes/components/noteable_discussion_spec.js +++ b/spec/javascripts/notes/components/noteable_discussion_spec.js @@ -25,26 +25,34 @@ describe('issue_discussion component', () => { }); it('should render user avatar', () => { - expect(vm.$el.querySelector('.user-avatar-link')).toBeDefined(); + expect(vm.$el.querySelector('.user-avatar-link')).not.toBeNull(); }); it('should render discussion header', () => { - expect(vm.$el.querySelector('.discussion-header')).toBeDefined(); + expect(vm.$el.querySelector('.discussion-header')).not.toBeNull(); expect(vm.$el.querySelector('.notes').children.length).toEqual(discussionMock.notes.length); }); describe('actions', () => { it('should render reply button', () => { - expect(vm.$el.querySelector('.js-vue-discussion-reply').textContent.trim()).toEqual('Reply...'); + expect(vm.$el.querySelector('.js-vue-discussion-reply').textContent.trim()).toEqual( + 'Reply...', + ); }); - it('should toggle reply form', (done) => { + it('should toggle reply form', done => { vm.$el.querySelector('.js-vue-discussion-reply').click(); Vue.nextTick(() => { - expect(vm.$refs.noteForm).toBeDefined(); + expect(vm.$refs.noteForm).not.toBeNull(); expect(vm.isReplying).toEqual(true); done(); }); }); + + it('does not render jump to discussion button', () => { + expect( + vm.$el.querySelector('*[data-original-title="Jump to next unresolved discussion"]'), + ).toBeNull(); + }); }); }); diff --git a/spec/javascripts/notes/mock_data.js b/spec/javascripts/notes/mock_data.js index 5be13ed0dfe..bfe3a65feee 100644 --- a/spec/javascripts/notes/mock_data.js +++ b/spec/javascripts/notes/mock_data.js @@ -1,4 +1,3 @@ -/* eslint-disable */ export const notesDataMock = { discussionsPath: '/gitlab-org/gitlab-ce/issues/26/discussions.json', lastFetchedAt: 1501862675, @@ -10,6 +9,7 @@ export const notesDataMock = { totalNotes: 1, closePath: '/twitter/flight/issues/9.json?issue%5Bstate_event%5D=close', reopenPath: '/twitter/flight/issues/9.json?issue%5Bstate_event%5D=reopen', + canAwardEmoji: true, }; export const userDataMock = { @@ -31,6 +31,7 @@ export const noteableDataMock = { current_user: { can_create_note: true, can_update: true, + can_award_emoji: true, }, description: '', due_date: null, @@ -43,7 +44,8 @@ export const noteableDataMock = { milestone: null, milestone_id: null, moved_to_id: null, - preview_note_path: '/gitlab-org/gitlab-ce/preview_markdown?quick_actions_target_id=98&quick_actions_target_type=Issue', + preview_note_path: + '/gitlab-org/gitlab-ce/preview_markdown?quick_actions_target_id=98&quick_actions_target_type=Issue', project_id: 2, state: 'opened', time_estimate: 0, @@ -52,6 +54,7 @@ export const noteableDataMock = { updated_at: '2017-08-04T09:53:01.226Z', updated_by_id: 1, web_url: '/gitlab-org/gitlab-ce/issues/26', + noteableType: 'issue', }; export const lastFetchedAt = '1501862675'; @@ -60,465 +63,515 @@ export const individualNote = { expanded: true, id: '0fb4e0e3f9276e55ff32eb4195add694aece4edd', individual_note: true, - notes: [{ - id: 1390, - attachment: { - url: null, - filename: null, - image: false, - }, - author: { - id: 1, - name: 'Root', - username: 'root', - state: 'active', - avatar_url: 'test', - path: '/root', + notes: [ + { + id: 1390, + attachment: { + url: null, + filename: null, + image: false, + }, + author: { + id: 1, + name: 'Root', + username: 'root', + state: 'active', + avatar_url: 'test', + path: '/root', + }, + created_at: '2017-08-01T17: 09: 33.762Z', + updated_at: '2017-08-01T17: 09: 33.762Z', + system: false, + noteable_id: 98, + noteable_type: 'Issue', + type: null, + human_access: 'Owner', + note: 'sdfdsaf', + note_html: "<p dir='auto'>sdfdsaf</p>", + current_user: { + can_edit: true, + can_award_emoji: true, + }, + discussion_id: '0fb4e0e3f9276e55ff32eb4195add694aece4edd', + emoji_awardable: true, + award_emoji: [ + { name: 'baseball', user: { id: 1, name: 'Root', username: 'root' } }, + { name: 'art', user: { id: 1, name: 'Root', username: 'root' } }, + ], + toggle_award_path: '/gitlab-org/gitlab-ce/notes/1390/toggle_award_emoji', + report_abuse_path: + '/abuse_reports/new?ref_url=http%3A%2F%2Flocalhost%3A3000%2Fgitlab-org%2Fgitlab-ce%2Fissues%2F26%23note_1390&user_id=1', + path: '/gitlab-org/gitlab-ce/notes/1390', }, - created_at: '2017-08-01T17: 09: 33.762Z', - updated_at: '2017-08-01T17: 09: 33.762Z', - system: false, - noteable_id: 98, - noteable_type: 'Issue', - type: null, - human_access: 'Owner', - note: 'sdfdsaf', - note_html: '<p dir=\'auto\'>sdfdsaf</p>', - current_user: { can_edit: true }, - discussion_id: '0fb4e0e3f9276e55ff32eb4195add694aece4edd', - emoji_awardable: true, - award_emoji: [ - { name: 'baseball', user: { id: 1, name: 'Root', username: 'root' } }, - { name: 'art', user: { id: 1, name: 'Root', username: 'root' } }, - ], - toggle_award_path: '/gitlab-org/gitlab-ce/notes/1390/toggle_award_emoji', - report_abuse_path: '/abuse_reports/new?ref_url=http%3A%2F%2Flocalhost%3A3000%2Fgitlab-org%2Fgitlab-ce%2Fissues%2F26%23note_1390&user_id=1', - path: '/gitlab-org/gitlab-ce/notes/1390', - }], + ], reply_id: '0fb4e0e3f9276e55ff32eb4195add694aece4edd', }; export const note = { - "id": 546, - "attachment": { - "url": null, - "filename": null, - "image": false + id: 546, + attachment: { + url: null, + filename: null, + image: false, }, - "author": { - "id": 1, - "name": "Administrator", - "username": "root", - "state": "active", - "avatar_url": "https://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon", - "path": "/root" + author: { + id: 1, + name: 'Administrator', + username: 'root', + state: 'active', + avatar_url: 'https://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon', + path: '/root', }, - "created_at": "2017-08-10T15:24:03.087Z", - "updated_at": "2017-08-10T15:24:03.087Z", - "system": false, - "noteable_id": 67, - "noteable_type": "Issue", - "noteable_iid": 7, - "type": null, - "human_access": "Owner", - "note": "Vel id placeat reprehenderit sit numquam.", - "note_html": "<p dir=\"auto\">Vel id placeat reprehenderit sit numquam.</p>", - "current_user": { - "can_edit": true + created_at: '2017-08-10T15:24:03.087Z', + updated_at: '2017-08-10T15:24:03.087Z', + system: false, + noteable_id: 67, + noteable_type: 'Issue', + noteable_iid: 7, + type: null, + human_access: 'Owner', + note: 'Vel id placeat reprehenderit sit numquam.', + note_html: '<p dir="auto">Vel id placeat reprehenderit sit numquam.</p>', + current_user: { + can_edit: true, + can_award_emoji: true, }, - "discussion_id": "d3842a451b7f3d9a5dfce329515127b2d29a4cd0", - "emoji_awardable": true, - "award_emoji": [{ - "name": "baseball", - "user": { - "id": 1, - "name": "Administrator", - "username": "root" - } - }, { - "name": "bath_tone3", - "user": { - "id": 1, - "name": "Administrator", - "username": "root" - } - }], - "toggle_award_path": "/gitlab-org/gitlab-ce/notes/546/toggle_award_emoji", - "report_abuse_path": "/abuse_reports/new?ref_url=http%3A%2F%2Flocalhost%3A3000%2Fgitlab-org%2Fgitlab-ce%2Fissues%2F7%23note_546&user_id=1", - "path": "/gitlab-org/gitlab-ce/notes/546" - } + discussion_id: 'd3842a451b7f3d9a5dfce329515127b2d29a4cd0', + emoji_awardable: true, + award_emoji: [ + { + name: 'baseball', + user: { + id: 1, + name: 'Administrator', + username: 'root', + }, + }, + { + name: 'bath_tone3', + user: { + id: 1, + name: 'Administrator', + username: 'root', + }, + }, + ], + toggle_award_path: '/gitlab-org/gitlab-ce/notes/546/toggle_award_emoji', + report_abuse_path: + '/abuse_reports/new?ref_url=http%3A%2F%2Flocalhost%3A3000%2Fgitlab-org%2Fgitlab-ce%2Fissues%2F7%23note_546&user_id=1', + path: '/gitlab-org/gitlab-ce/notes/546', +}; export const discussionMock = { id: '9e3bd2f71a01de45fd166e6719eb380ad9f270b1', reply_id: '9e3bd2f71a01de45fd166e6719eb380ad9f270b1', expanded: true, - notes: [{ - id: 1395, - attachment: { - url: null, - filename: null, - image: false, - }, - author: { - id: 1, - name: 'Root', - username: 'root', - state: 'active', - avatar_url: null, - path: '/root', - }, - created_at: '2017-08-02T10:51:58.559Z', - updated_at: '2017-08-02T10:51:58.559Z', - system: false, - noteable_id: 98, - noteable_type: 'Issue', - type: 'DiscussionNote', - human_access: 'Owner', - note: 'THIS IS A DICUSSSION!', - note_html: '<p dir=\'auto\'>THIS IS A DICUSSSION!</p>', - current_user: { - can_edit: true, - }, - discussion_id: '9e3bd2f71a01de45fd166e6719eb380ad9f270b1', - emoji_awardable: true, - award_emoji: [], - toggle_award_path: '/gitlab-org/gitlab-ce/notes/1395/toggle_award_emoji', - report_abuse_path: '/abuse_reports/new?ref_url=http%3A%2F%2Flocalhost%3A3000%2Fgitlab-org%2Fgitlab-ce%2Fissues%2F26%23note_1395&user_id=1', - path: '/gitlab-org/gitlab-ce/notes/1395', - }, { - id: 1396, - attachment: { - url: null, - filename: null, - image: false, - }, - author: { - id: 1, - name: 'Root', - username: 'root', - state: 'active', - avatar_url: null, - path: '/root', - }, - created_at: '2017-08-02T10:56:50.980Z', - updated_at: '2017-08-03T14:19:35.691Z', - system: false, - noteable_id: 98, - noteable_type: 'Issue', - type: 'DiscussionNote', - human_access: 'Owner', - note: 'sadfasdsdgdsf', - note_html: '<p dir=\'auto\'>sadfasdsdgdsf</p>', - last_edited_at: '2017-08-03T14:19:35.691Z', - last_edited_by: { - id: 1, - name: 'Root', - username: 'root', - state: 'active', - avatar_url: null, - path: '/root', - }, - current_user: { - can_edit: true, + notes: [ + { + id: 1395, + attachment: { + url: null, + filename: null, + image: false, + }, + author: { + id: 1, + name: 'Root', + username: 'root', + state: 'active', + avatar_url: null, + path: '/root', + }, + created_at: '2017-08-02T10:51:58.559Z', + updated_at: '2017-08-02T10:51:58.559Z', + system: false, + noteable_id: 98, + noteable_type: 'Issue', + type: 'DiscussionNote', + human_access: 'Owner', + note: 'THIS IS A DICUSSSION!', + note_html: "<p dir='auto'>THIS IS A DICUSSSION!</p>", + current_user: { + can_edit: true, + can_award_emoji: true, + }, + discussion_id: '9e3bd2f71a01de45fd166e6719eb380ad9f270b1', + emoji_awardable: true, + award_emoji: [], + toggle_award_path: '/gitlab-org/gitlab-ce/notes/1395/toggle_award_emoji', + report_abuse_path: + '/abuse_reports/new?ref_url=http%3A%2F%2Flocalhost%3A3000%2Fgitlab-org%2Fgitlab-ce%2Fissues%2F26%23note_1395&user_id=1', + path: '/gitlab-org/gitlab-ce/notes/1395', }, - discussion_id: '9e3bd2f71a01de45fd166e6719eb380ad9f270b1', - emoji_awardable: true, - award_emoji: [], - toggle_award_path: '/gitlab-org/gitlab-ce/notes/1396/toggle_award_emoji', - report_abuse_path: '/abuse_reports/new?ref_url=http%3A%2F%2Flocalhost%3A3000%2Fgitlab-org%2Fgitlab-ce%2Fissues%2F26%23note_1396&user_id=1', - path: '/gitlab-org/gitlab-ce/notes/1396', - }, { - id: 1437, - attachment: { - url: null, - filename: null, - image: false, + { + id: 1396, + attachment: { + url: null, + filename: null, + image: false, + }, + author: { + id: 1, + name: 'Root', + username: 'root', + state: 'active', + avatar_url: null, + path: '/root', + }, + created_at: '2017-08-02T10:56:50.980Z', + updated_at: '2017-08-03T14:19:35.691Z', + system: false, + noteable_id: 98, + noteable_type: 'Issue', + type: 'DiscussionNote', + human_access: 'Owner', + note: 'sadfasdsdgdsf', + note_html: "<p dir='auto'>sadfasdsdgdsf</p>", + last_edited_at: '2017-08-03T14:19:35.691Z', + last_edited_by: { + id: 1, + name: 'Root', + username: 'root', + state: 'active', + avatar_url: null, + path: '/root', + }, + current_user: { + can_edit: true, + can_award_emoji: true, + }, + discussion_id: '9e3bd2f71a01de45fd166e6719eb380ad9f270b1', + emoji_awardable: true, + award_emoji: [], + toggle_award_path: '/gitlab-org/gitlab-ce/notes/1396/toggle_award_emoji', + report_abuse_path: + '/abuse_reports/new?ref_url=http%3A%2F%2Flocalhost%3A3000%2Fgitlab-org%2Fgitlab-ce%2Fissues%2F26%23note_1396&user_id=1', + path: '/gitlab-org/gitlab-ce/notes/1396', }, - author: { - id: 1, - name: 'Root', - username: 'root', - state: 'active', - avatar_url: null, - path: '/root', + { + id: 1437, + attachment: { + url: null, + filename: null, + image: false, + }, + author: { + id: 1, + name: 'Root', + username: 'root', + state: 'active', + avatar_url: null, + path: '/root', + }, + created_at: '2017-08-03T18:11:18.780Z', + updated_at: '2017-08-04T09:52:31.062Z', + system: false, + noteable_id: 98, + noteable_type: 'Issue', + type: 'DiscussionNote', + human_access: 'Owner', + note: 'adsfasf Should disappear', + note_html: "<p dir='auto'>adsfasf Should disappear</p>", + last_edited_at: '2017-08-04T09:52:31.062Z', + last_edited_by: { + id: 1, + name: 'Root', + username: 'root', + state: 'active', + avatar_url: null, + path: '/root', + }, + current_user: { + can_edit: true, + can_award_emoji: true, + }, + discussion_id: '9e3bd2f71a01de45fd166e6719eb380ad9f270b1', + emoji_awardable: true, + award_emoji: [], + toggle_award_path: '/gitlab-org/gitlab-ce/notes/1437/toggle_award_emoji', + report_abuse_path: + '/abuse_reports/new?ref_url=http%3A%2F%2Flocalhost%3A3000%2Fgitlab-org%2Fgitlab-ce%2Fissues%2F26%23note_1437&user_id=1', + path: '/gitlab-org/gitlab-ce/notes/1437', }, - created_at: '2017-08-03T18:11:18.780Z', - updated_at: '2017-08-04T09:52:31.062Z', - system: false, - noteable_id: 98, - noteable_type: 'Issue', - type: 'DiscussionNote', - human_access: 'Owner', - note: 'adsfasf Should disappear', - note_html: '<p dir=\'auto\'>adsfasf Should disappear</p>', - last_edited_at: '2017-08-04T09:52:31.062Z', - last_edited_by: { + ], + individual_note: false, +}; + +export const loggedOutnoteableData = { + id: 98, + iid: 26, + author_id: 1, + description: '', + lock_version: 1, + milestone_id: null, + state: 'opened', + title: 'asdsa', + updated_by_id: 1, + created_at: '2017-02-07T10:11:18.395Z', + updated_at: '2017-08-08T10:22:51.564Z', + time_estimate: 0, + total_time_spent: 0, + human_time_estimate: null, + human_total_time_spent: null, + milestone: null, + labels: [], + branch_name: null, + confidential: false, + assignees: [ + { id: 1, name: 'Root', username: 'root', state: 'active', avatar_url: null, - path: '/root', + web_url: 'http://localhost:3000/root', }, - current_user: { - can_edit: true, - }, - discussion_id: '9e3bd2f71a01de45fd166e6719eb380ad9f270b1', - emoji_awardable: true, - award_emoji: [], - toggle_award_path: '/gitlab-org/gitlab-ce/notes/1437/toggle_award_emoji', - report_abuse_path: '/abuse_reports/new?ref_url=http%3A%2F%2Flocalhost%3A3000%2Fgitlab-org%2Fgitlab-ce%2Fissues%2F26%23note_1437&user_id=1', - path: '/gitlab-org/gitlab-ce/notes/1437', - }], - individual_note: false, -}; - -export const loggedOutnoteableData = { - "id": 98, - "iid": 26, - "author_id": 1, - "description": "", - "lock_version": 1, - "milestone_id": null, - "state": "opened", - "title": "asdsa", - "updated_by_id": 1, - "created_at": "2017-02-07T10:11:18.395Z", - "updated_at": "2017-08-08T10:22:51.564Z", - "time_estimate": 0, - "total_time_spent": 0, - "human_time_estimate": null, - "human_total_time_spent": null, - "milestone": null, - "labels": [], - "branch_name": null, - "confidential": false, - "assignees": [{ - "id": 1, - "name": "Root", - "username": "root", - "state": "active", - "avatar_url": null, - "web_url": "http://localhost:3000/root" - }], - "due_date": null, - "moved_to_id": null, - "project_id": 2, - "web_url": "/gitlab-org/gitlab-ce/issues/26", - "current_user": { - "can_create_note": false, - "can_update": false + ], + due_date: null, + moved_to_id: null, + project_id: 2, + web_url: '/gitlab-org/gitlab-ce/issues/26', + current_user: { + can_create_note: false, + can_update: false, }, - "create_note_path": "/gitlab-org/gitlab-ce/notes?target_id=98&target_type=issue", - "preview_note_path": "/gitlab-org/gitlab-ce/preview_markdown?quick_actions_target_id=98&quick_actions_target_type=Issue" -} + create_note_path: '/gitlab-org/gitlab-ce/notes?target_id=98&target_type=issue', + preview_note_path: + '/gitlab-org/gitlab-ce/preview_markdown?quick_actions_target_id=98&quick_actions_target_type=Issue', +}; export const INDIVIDUAL_NOTE_RESPONSE_MAP = { - 'GET': { - '/gitlab-org/gitlab-ce/issues/26/discussions.json': [{ - "id": "0fb4e0e3f9276e55ff32eb4195add694aece4edd", - "reply_id": "0fb4e0e3f9276e55ff32eb4195add694aece4edd", - "expanded": true, - "notes": [{ - "id": 1390, - "attachment": { - "url": null, - "filename": null, - "image": false - }, - "author": { - "id": 1, - "name": "Root", - "username": "root", - "state": "active", - "avatar_url": null, - "path": "/root" - }, - "created_at": "2017-08-01T17:09:33.762Z", - "updated_at": "2017-08-01T17:09:33.762Z", - "system": false, - "noteable_id": 98, - "noteable_type": "Issue", - "type": null, - "human_access": "Owner", - "note": "sdfdsaf", - "note_html": "\u003cp dir=\"auto\"\u003esdfdsaf\u003c/p\u003e", - "current_user": { - "can_edit": true - }, - "discussion_id": "0fb4e0e3f9276e55ff32eb4195add694aece4edd", - "emoji_awardable": true, - "award_emoji": [{ - "name": "baseball", - "user": { - "id": 1, - "name": "Root", - "username": "root" - } - }, { - "name": "art", - "user": { - "id": 1, - "name": "Root", - "username": "root" - } - }], - "toggle_award_path": "/gitlab-org/gitlab-ce/notes/1390/toggle_award_emoji", - "report_abuse_path": "/abuse_reports/new?ref_url=http%3A%2F%2Flocalhost%3A3000%2Fgitlab-org%2Fgitlab-ce%2Fissues%2F26%23note_1390\u0026user_id=1", - "path": "/gitlab-org/gitlab-ce/notes/1390" - }], - "individual_note": true - }, { - "id": "70d5c92a4039a36c70100c6691c18c27e4b0a790", - "reply_id": "70d5c92a4039a36c70100c6691c18c27e4b0a790", - "expanded": true, - "notes": [{ - "id": 1391, - "attachment": { - "url": null, - "filename": null, - "image": false - }, - "author": { - "id": 1, - "name": "Root", - "username": "root", - "state": "active", - "avatar_url": null, - "path": "/root" - }, - "created_at": "2017-08-02T10:51:38.685Z", - "updated_at": "2017-08-02T10:51:38.685Z", - "system": false, - "noteable_id": 98, - "noteable_type": "Issue", - "type": null, - "human_access": "Owner", - "note": "New note!", - "note_html": "\u003cp dir=\"auto\"\u003eNew note!\u003c/p\u003e", - "current_user": { - "can_edit": true - }, - "discussion_id": "70d5c92a4039a36c70100c6691c18c27e4b0a790", - "emoji_awardable": true, - "award_emoji": [], - "toggle_award_path": "/gitlab-org/gitlab-ce/notes/1391/toggle_award_emoji", - "report_abuse_path": "/abuse_reports/new?ref_url=http%3A%2F%2Flocalhost%3A3000%2Fgitlab-org%2Fgitlab-ce%2Fissues%2F26%23note_1391\u0026user_id=1", - "path": "/gitlab-org/gitlab-ce/notes/1391" - }], - "individual_note": true - }], + GET: { + '/gitlab-org/gitlab-ce/issues/26/discussions.json': [ + { + id: '0fb4e0e3f9276e55ff32eb4195add694aece4edd', + reply_id: '0fb4e0e3f9276e55ff32eb4195add694aece4edd', + expanded: true, + notes: [ + { + id: 1390, + attachment: { + url: null, + filename: null, + image: false, + }, + author: { + id: 1, + name: 'Root', + username: 'root', + state: 'active', + avatar_url: null, + path: '/root', + }, + created_at: '2017-08-01T17:09:33.762Z', + updated_at: '2017-08-01T17:09:33.762Z', + system: false, + noteable_id: 98, + noteable_type: 'Issue', + type: null, + human_access: 'Owner', + note: 'sdfdsaf', + note_html: '\u003cp dir="auto"\u003esdfdsaf\u003c/p\u003e', + current_user: { + can_edit: true, + can_award_emoji: true, + }, + discussion_id: '0fb4e0e3f9276e55ff32eb4195add694aece4edd', + emoji_awardable: true, + award_emoji: [ + { + name: 'baseball', + user: { + id: 1, + name: 'Root', + username: 'root', + }, + }, + { + name: 'art', + user: { + id: 1, + name: 'Root', + username: 'root', + }, + }, + ], + toggle_award_path: '/gitlab-org/gitlab-ce/notes/1390/toggle_award_emoji', + report_abuse_path: + '/abuse_reports/new?ref_url=http%3A%2F%2Flocalhost%3A3000%2Fgitlab-org%2Fgitlab-ce%2Fissues%2F26%23note_1390\u0026user_id=1', + path: '/gitlab-org/gitlab-ce/notes/1390', + }, + ], + individual_note: true, + }, + { + id: '70d5c92a4039a36c70100c6691c18c27e4b0a790', + reply_id: '70d5c92a4039a36c70100c6691c18c27e4b0a790', + expanded: true, + notes: [ + { + id: 1391, + attachment: { + url: null, + filename: null, + image: false, + }, + author: { + id: 1, + name: 'Root', + username: 'root', + state: 'active', + avatar_url: null, + path: '/root', + }, + created_at: '2017-08-02T10:51:38.685Z', + updated_at: '2017-08-02T10:51:38.685Z', + system: false, + noteable_id: 98, + noteable_type: 'Issue', + type: null, + human_access: 'Owner', + note: 'New note!', + note_html: '\u003cp dir="auto"\u003eNew note!\u003c/p\u003e', + current_user: { + can_edit: true, + can_award_emoji: true, + }, + discussion_id: '70d5c92a4039a36c70100c6691c18c27e4b0a790', + emoji_awardable: true, + award_emoji: [], + toggle_award_path: '/gitlab-org/gitlab-ce/notes/1391/toggle_award_emoji', + report_abuse_path: + '/abuse_reports/new?ref_url=http%3A%2F%2Flocalhost%3A3000%2Fgitlab-org%2Fgitlab-ce%2Fissues%2F26%23note_1391\u0026user_id=1', + path: '/gitlab-org/gitlab-ce/notes/1391', + }, + ], + individual_note: true, + }, + ], '/gitlab-org/gitlab-ce/noteable/issue/98/notes': { last_fetched_at: 1512900838, notes: [], }, }, - 'PUT': { + PUT: { '/gitlab-org/gitlab-ce/notes/1471': { - "commands_changes": null, - "valid": true, - "id": 1471, - "attachment": null, - "author": { - "id": 1, - "name": "Root", - "username": "root", - "state": "active", - "avatar_url": null, - "path": "/root" + commands_changes: null, + valid: true, + id: 1471, + attachment: null, + author: { + id: 1, + name: 'Root', + username: 'root', + state: 'active', + avatar_url: null, + path: '/root', }, - "created_at": "2017-08-08T16:53:00.666Z", - "updated_at": "2017-12-10T11:03:21.876Z", - "system": false, - "noteable_id": 124, - "noteable_type": "Issue", - "noteable_iid": 29, - "type": "DiscussionNote", - "human_access": "Owner", - "note": "Adding a comment", - "note_html": "\u003cp dir=\"auto\"\u003eAdding a comment\u003c/p\u003e", - "last_edited_at": "2017-12-10T11:03:21.876Z", - "last_edited_by": { - "id": 1, - "name": 'Root', - "username": 'root', - "state": 'active', - "avatar_url": null, - "path": '/root', + created_at: '2017-08-08T16:53:00.666Z', + updated_at: '2017-12-10T11:03:21.876Z', + system: false, + noteable_id: 124, + noteable_type: 'Issue', + noteable_iid: 29, + type: 'DiscussionNote', + human_access: 'Owner', + note: 'Adding a comment', + note_html: '\u003cp dir="auto"\u003eAdding a comment\u003c/p\u003e', + last_edited_at: '2017-12-10T11:03:21.876Z', + last_edited_by: { + id: 1, + name: 'Root', + username: 'root', + state: 'active', + avatar_url: null, + path: '/root', }, - "current_user": { - "can_edit": true + current_user: { + can_edit: true, + can_award_emoji: true, }, - "discussion_id": "a3ed36e29b1957efb3b68c53e2d7a2b24b1df052", - "emoji_awardable": true, - "award_emoji": [], - "toggle_award_path": "/gitlab-org/gitlab-ce/notes/1471/toggle_award_emoji", - "report_abuse_path": "/abuse_reports/new?ref_url=http%3A%2F%2Flocalhost%3A3000%2Fgitlab-org%2Fgitlab-ce%2Fissues%2F29%23note_1471\u0026user_id=1", - "path": "/gitlab-org/gitlab-ce/notes/1471" + discussion_id: 'a3ed36e29b1957efb3b68c53e2d7a2b24b1df052', + emoji_awardable: true, + award_emoji: [], + toggle_award_path: '/gitlab-org/gitlab-ce/notes/1471/toggle_award_emoji', + report_abuse_path: + '/abuse_reports/new?ref_url=http%3A%2F%2Flocalhost%3A3000%2Fgitlab-org%2Fgitlab-ce%2Fissues%2F29%23note_1471\u0026user_id=1', + path: '/gitlab-org/gitlab-ce/notes/1471', }, - } + }, }; export const DISCUSSION_NOTE_RESPONSE_MAP = { ...INDIVIDUAL_NOTE_RESPONSE_MAP, - 'GET': { + GET: { ...INDIVIDUAL_NOTE_RESPONSE_MAP.GET, - '/gitlab-org/gitlab-ce/issues/26/discussions.json': [{ - "id": "a3ed36e29b1957efb3b68c53e2d7a2b24b1df052", - "reply_id": "a3ed36e29b1957efb3b68c53e2d7a2b24b1df052", - "expanded": true, - "notes": [{ - "id": 1471, - "attachment": { - "url": null, - "filename": null, - "image": false - }, - "author": { - "id": 1, - "name": "Root", - "username": "root", - "state": "active", - "avatar_url": null, - "path": "/root" - }, - "created_at": "2017-08-08T16:53:00.666Z", - "updated_at": "2017-08-08T16:53:00.666Z", - "system": false, - "noteable_id": 124, - "noteable_type": "Issue", - "noteable_iid": 29, - "type": "DiscussionNote", - "human_access": "Owner", - "note": "Adding a comment", - "note_html": "\u003cp dir=\"auto\"\u003eAdding a comment\u003c/p\u003e", - "current_user": { - "can_edit": true - }, - "discussion_id": "a3ed36e29b1957efb3b68c53e2d7a2b24b1df052", - "emoji_awardable": true, - "award_emoji": [], - "toggle_award_path": "/gitlab-org/gitlab-ce/notes/1471/toggle_award_emoji", - "report_abuse_path": "/abuse_reports/new?ref_url=http%3A%2F%2Flocalhost%3A3000%2Fgitlab-org%2Fgitlab-ce%2Fissues%2F29%23note_1471\u0026user_id=1", - "path": "/gitlab-org/gitlab-ce/notes/1471" - }], - "individual_note": false - }], + '/gitlab-org/gitlab-ce/issues/26/discussions.json': [ + { + id: 'a3ed36e29b1957efb3b68c53e2d7a2b24b1df052', + reply_id: 'a3ed36e29b1957efb3b68c53e2d7a2b24b1df052', + expanded: true, + notes: [ + { + id: 1471, + attachment: { + url: null, + filename: null, + image: false, + }, + author: { + id: 1, + name: 'Root', + username: 'root', + state: 'active', + avatar_url: null, + path: '/root', + }, + created_at: '2017-08-08T16:53:00.666Z', + updated_at: '2017-08-08T16:53:00.666Z', + system: false, + noteable_id: 124, + noteable_type: 'Issue', + noteable_iid: 29, + type: 'DiscussionNote', + human_access: 'Owner', + note: 'Adding a comment', + note_html: '\u003cp dir="auto"\u003eAdding a comment\u003c/p\u003e', + current_user: { + can_edit: true, + can_award_emoji: true, + }, + discussion_id: 'a3ed36e29b1957efb3b68c53e2d7a2b24b1df052', + emoji_awardable: true, + award_emoji: [], + toggle_award_path: '/gitlab-org/gitlab-ce/notes/1471/toggle_award_emoji', + report_abuse_path: + '/abuse_reports/new?ref_url=http%3A%2F%2Flocalhost%3A3000%2Fgitlab-org%2Fgitlab-ce%2Fissues%2F29%23note_1471\u0026user_id=1', + path: '/gitlab-org/gitlab-ce/notes/1471', + }, + ], + individual_note: false, + }, + ], }, }; export function individualNoteInterceptor(request, next) { const body = INDIVIDUAL_NOTE_RESPONSE_MAP[request.method.toUpperCase()][request.url]; - next(request.respondWith(JSON.stringify(body), { - status: 200, - })); + next( + request.respondWith(JSON.stringify(body), { + status: 200, + }), + ); } export function discussionNoteInterceptor(request, next) { const body = DISCUSSION_NOTE_RESPONSE_MAP[request.method.toUpperCase()][request.url]; - next(request.respondWith(JSON.stringify(body), { - status: 200, - })); + next( + request.respondWith(JSON.stringify(body), { + status: 200, + }), + ); } diff --git a/spec/javascripts/notes/stores/actions_spec.js b/spec/javascripts/notes/stores/actions_spec.js index 91249b2c79e..520a25cc5c6 100644 --- a/spec/javascripts/notes/stores/actions_spec.js +++ b/spec/javascripts/notes/stores/actions_spec.js @@ -5,7 +5,13 @@ import * as actions from '~/notes/stores/actions'; import store from '~/notes/stores'; import testAction from '../../helpers/vuex_action_helper'; import { resetStore } from '../helpers'; -import { discussionMock, notesDataMock, userDataMock, noteableDataMock, individualNote } from '../mock_data'; +import { + discussionMock, + notesDataMock, + userDataMock, + noteableDataMock, + individualNote, +} from '../mock_data'; describe('Actions Notes Store', () => { afterEach(() => { @@ -13,66 +19,103 @@ describe('Actions Notes Store', () => { }); describe('setNotesData', () => { - it('should set received notes data', (done) => { - testAction(actions.setNotesData, null, { notesData: {} }, [ - { type: 'SET_NOTES_DATA', payload: notesDataMock }, - ], done); + it('should set received notes data', done => { + testAction( + actions.setNotesData, + notesDataMock, + { notesData: {} }, + [{ type: 'SET_NOTES_DATA', payload: notesDataMock }], + [], + done, + ); }); }); describe('setNoteableData', () => { - it('should set received issue data', (done) => { - testAction(actions.setNoteableData, null, { noteableData: {} }, [ - { type: 'SET_NOTEABLE_DATA', payload: noteableDataMock }, - ], done); + it('should set received issue data', done => { + testAction( + actions.setNoteableData, + noteableDataMock, + { noteableData: {} }, + [{ type: 'SET_NOTEABLE_DATA', payload: noteableDataMock }], + [], + done, + ); }); }); describe('setUserData', () => { - it('should set received user data', (done) => { - testAction(actions.setUserData, null, { userData: {} }, [ - { type: 'SET_USER_DATA', payload: userDataMock }, - ], done); + it('should set received user data', done => { + testAction( + actions.setUserData, + userDataMock, + { userData: {} }, + [{ type: 'SET_USER_DATA', payload: userDataMock }], + [], + done, + ); }); }); describe('setLastFetchedAt', () => { - it('should set received timestamp', (done) => { - testAction(actions.setLastFetchedAt, null, { lastFetchedAt: {} }, [ - { type: 'SET_LAST_FETCHED_AT', payload: 'timestamp' }, - ], done); + it('should set received timestamp', done => { + testAction( + actions.setLastFetchedAt, + 'timestamp', + { lastFetchedAt: {} }, + [{ type: 'SET_LAST_FETCHED_AT', payload: 'timestamp' }], + [], + done, + ); }); }); describe('setInitialNotes', () => { - it('should set initial notes', (done) => { - testAction(actions.setInitialNotes, null, { notes: [] }, [ - { type: 'SET_INITIAL_NOTES', payload: [individualNote] }, - ], done); + it('should set initial notes', done => { + testAction( + actions.setInitialNotes, + [individualNote], + { notes: [] }, + [{ type: 'SET_INITIAL_NOTES', payload: [individualNote] }], + [], + done, + ); }); }); describe('setTargetNoteHash', () => { - it('should set target note hash', (done) => { - testAction(actions.setTargetNoteHash, null, { notes: [] }, [ - { type: 'SET_TARGET_NOTE_HASH', payload: 'hash' }, - ], done); + it('should set target note hash', done => { + testAction( + actions.setTargetNoteHash, + 'hash', + { notes: [] }, + [{ type: 'SET_TARGET_NOTE_HASH', payload: 'hash' }], + [], + done, + ); }); }); describe('toggleDiscussion', () => { - it('should toggle discussion', (done) => { - testAction(actions.toggleDiscussion, null, { notes: [discussionMock] }, [ - { type: 'TOGGLE_DISCUSSION', payload: { discussionId: discussionMock.id } }, - ], done); + it('should toggle discussion', done => { + testAction( + actions.toggleDiscussion, + { discussionId: discussionMock.id }, + { notes: [discussionMock] }, + [{ type: 'TOGGLE_DISCUSSION', payload: { discussionId: discussionMock.id } }], + [], + done, + ); }); }); describe('async methods', () => { const interceptor = (request, next) => { - next(request.respondWith(JSON.stringify({}), { - status: 200, - })); + next( + request.respondWith(JSON.stringify({}), { + status: 200, + }), + ); }; beforeEach(() => { @@ -84,8 +127,9 @@ describe('Actions Notes Store', () => { }); describe('closeIssue', () => { - it('sets state as closed', (done) => { - store.dispatch('closeIssue', { notesData: { closeIssuePath: '' } }) + it('sets state as closed', done => { + store + .dispatch('closeIssue', { notesData: { closeIssuePath: '' } }) .then(() => { expect(store.state.noteableData.state).toEqual('closed'); expect(store.state.isToggleStateButtonLoading).toEqual(false); @@ -96,8 +140,9 @@ describe('Actions Notes Store', () => { }); describe('reopenIssue', () => { - it('sets state as reopened', (done) => { - store.dispatch('reopenIssue', { notesData: { reopenIssuePath: '' } }) + it('sets state as reopened', done => { + store + .dispatch('reopenIssue', { notesData: { reopenIssuePath: '' } }) .then(() => { expect(store.state.noteableData.state).toEqual('reopened'); expect(store.state.isToggleStateButtonLoading).toEqual(false); @@ -110,7 +155,7 @@ describe('Actions Notes Store', () => { describe('emitStateChangedEvent', () => { it('emits an event on the document', () => { - document.addEventListener('issuable_vue_app:change', (event) => { + document.addEventListener('issuable_vue_app:change', event => { expect(event.detail.data).toEqual({ id: '1', state: 'closed' }); expect(event.detail.isClosed).toEqual(false); }); @@ -120,40 +165,47 @@ describe('Actions Notes Store', () => { }); describe('toggleStateButtonLoading', () => { - it('should set loading as true', (done) => { - testAction(actions.toggleStateButtonLoading, true, {}, [ - { type: 'TOGGLE_STATE_BUTTON_LOADING', payload: true }, - ], done); + it('should set loading as true', done => { + testAction( + actions.toggleStateButtonLoading, + true, + {}, + [{ type: 'TOGGLE_STATE_BUTTON_LOADING', payload: true }], + [], + done, + ); }); - it('should set loading as false', (done) => { - testAction(actions.toggleStateButtonLoading, false, {}, [ - { type: 'TOGGLE_STATE_BUTTON_LOADING', payload: false }, - ], done); + it('should set loading as false', done => { + testAction( + actions.toggleStateButtonLoading, + false, + {}, + [{ type: 'TOGGLE_STATE_BUTTON_LOADING', payload: false }], + [], + done, + ); }); }); describe('toggleIssueLocalState', () => { - it('sets issue state as closed', (done) => { - testAction(actions.toggleIssueLocalState, 'closed', {}, [ - { type: 'CLOSE_ISSUE', payload: 'closed' }, - ], done); + it('sets issue state as closed', done => { + testAction(actions.toggleIssueLocalState, 'closed', {}, [{ type: 'CLOSE_ISSUE' }], [], done); }); - it('sets issue state as reopened', (done) => { - testAction(actions.toggleIssueLocalState, 'reopened', {}, [ - { type: 'REOPEN_ISSUE', payload: 'reopened' }, - ], done); + it('sets issue state as reopened', done => { + testAction(actions.toggleIssueLocalState, 'reopened', {}, [{ type: 'REOPEN_ISSUE' }], [], done); }); }); describe('poll', () => { - beforeEach((done) => { + beforeEach(done => { jasmine.clock().install(); spyOn(Vue.http, 'get').and.callThrough(); - store.dispatch('setNotesData', notesDataMock) + store + .dispatch('setNotesData', notesDataMock) .then(done) .catch(done.fail); }); @@ -162,23 +214,29 @@ describe('Actions Notes Store', () => { jasmine.clock().uninstall(); }); - it('calls service with last fetched state', (done) => { + it('calls service with last fetched state', done => { const interceptor = (request, next) => { - next(request.respondWith(JSON.stringify({ - notes: [], - last_fetched_at: '123456', - }), { - status: 200, - headers: { - 'poll-interval': '1000', - }, - })); + next( + request.respondWith( + JSON.stringify({ + notes: [], + last_fetched_at: '123456', + }), + { + status: 200, + headers: { + 'poll-interval': '1000', + }, + }, + ), + ); }; Vue.http.interceptors.push(interceptor); Vue.http.interceptors.push(headersInterceptor); - store.dispatch('poll') + store + .dispatch('poll') .then(() => new Promise(resolve => requestAnimationFrame(resolve))) .then(() => { expect(Vue.http.get).toHaveBeenCalledWith(jasmine.anything(), { @@ -192,9 +250,12 @@ describe('Actions Notes Store', () => { jasmine.clock().tick(1500); }) - .then(() => new Promise((resolve) => { - requestAnimationFrame(resolve); - })) + .then( + () => + new Promise(resolve => { + requestAnimationFrame(resolve); + }), + ) .then(() => { expect(Vue.http.get.calls.count()).toBe(2); expect(Vue.http.get.calls.mostRecent().args[1].headers).toEqual({ diff --git a/spec/javascripts/notes_spec.js b/spec/javascripts/notes_spec.js index ba0a70bed17..ec56ab0e2f0 100644 --- a/spec/javascripts/notes_spec.js +++ b/spec/javascripts/notes_spec.js @@ -7,7 +7,7 @@ import * as urlUtils from '~/lib/utils/url_utility'; import 'autosize'; import '~/gl_form'; import '~/lib/utils/text_utility'; -import '~/render_gfm'; +import '~/behaviors/markdown/render_gfm'; import Notes from '~/notes'; import timeoutPromise from './helpers/set_timeout_promise_helper'; @@ -16,15 +16,15 @@ import timeoutPromise from './helpers/set_timeout_promise_helper'; window.gl = window.gl || {}; gl.utils = gl.utils || {}; - const htmlEscape = (comment) => { - const escapedString = comment.replace(/["&'<>]/g, (a) => { + const htmlEscape = comment => { + const escapedString = comment.replace(/["&'<>]/g, a => { const escapedToken = { '&': '&', '<': '<', '>': '>', '"': '"', "'": ''', - '`': '`' + '`': '`', }[a]; return escapedToken; @@ -39,7 +39,7 @@ import timeoutPromise from './helpers/set_timeout_promise_helper'; var commentsTemplate = 'merge_requests/merge_request_with_comment.html.raw'; preloadFixtures(commentsTemplate); - beforeEach(function () { + beforeEach(function() { loadFixtures(commentsTemplate); gl.utils.disableButtonIfEmptyField = _.noop; window.project_uploads_path = 'http://test.host/uploads'; @@ -51,6 +51,17 @@ import timeoutPromise from './helpers/set_timeout_promise_helper'; $('body').removeAttr('data-page'); }); + describe('addBinding', () => { + it('calls postComment when comment button is clicked', () => { + spyOn(Notes.prototype, 'postComment'); + this.notes = new Notes('', []); + + $('.js-comment-button').click(); + + expect(Notes.prototype.postComment).toHaveBeenCalled(); + }); + }); + describe('task lists', function() { let mock; @@ -58,7 +69,13 @@ import timeoutPromise from './helpers/set_timeout_promise_helper'; spyOn(axios, 'patch').and.callThrough(); mock = new MockAdapter(axios); - mock.onPatch(`${gl.TEST_HOST}/frontend-fixtures/merge-requests-project/merge_requests/1.json`).reply(200, {}); + mock + .onPatch( + `${ + gl.TEST_HOST + }/frontend-fixtures/merge-requests-project/merge_requests/1.json`, + ) + .reply(200, {}); $('.js-comment-button').on('click', function(e) { e.preventDefault(); @@ -73,18 +90,27 @@ import timeoutPromise from './helpers/set_timeout_promise_helper'; it('modifies the Markdown field', function() { const changeEvent = document.createEvent('HTMLEvents'); changeEvent.initEvent('change', true, true); - $('input[type=checkbox]').attr('checked', true)[1].dispatchEvent(changeEvent); + $('input[type=checkbox]') + .attr('checked', true)[1] + .dispatchEvent(changeEvent); - expect($('.js-task-list-field.original-task-list').val()).toBe('- [x] Task List Item'); + expect($('.js-task-list-field.original-task-list').val()).toBe( + '- [x] Task List Item', + ); }); it('submits an ajax request on tasklist:changed', function(done) { $('.js-task-list-container').trigger('tasklist:changed'); setTimeout(() => { - expect(axios.patch).toHaveBeenCalledWith(`${gl.TEST_HOST}/frontend-fixtures/merge-requests-project/merge_requests/1.json`, { - note: { note: '' }, - }); + expect(axios.patch).toHaveBeenCalledWith( + `${ + gl.TEST_HOST + }/frontend-fixtures/merge-requests-project/merge_requests/1.json`, + { + note: { note: '' }, + }, + ); done(); }); }); @@ -100,10 +126,10 @@ import timeoutPromise from './helpers/set_timeout_promise_helper'; spyOn(this.notes, 'renderNote').and.stub(); $(textarea).data('autosave', { - reset: function() {} + reset: function() {}, }); - $('.js-comment-button').on('click', (e) => { + $('.js-comment-button').on('click', e => { const $form = $(this); e.preventDefault(); this.notes.addNote($form); @@ -149,7 +175,7 @@ import timeoutPromise from './helpers/set_timeout_promise_helper'; <div class="note-text">${sampleComment}</div> </li>`, note: sampleComment, - valid: true + valid: true, }; $form = $('form.js-main-target-form'); $notesContainer = $('ul.main-notes-list'); @@ -163,7 +189,7 @@ import timeoutPromise from './helpers/set_timeout_promise_helper'; mock.restore(); }); - it('updates note and resets edit form', (done) => { + it('updates note and resets edit form', done => { spyOn(this.notes, 'revertNoteEditForm'); spyOn(this.notes, 'setupNewNote'); @@ -175,7 +201,9 @@ import timeoutPromise from './helpers/set_timeout_promise_helper'; updatedNote.note = 'bar'; this.notes.updateNote(updatedNote, $targetNote); - expect(this.notes.revertNoteEditForm).toHaveBeenCalledWith($targetNote); + expect(this.notes.revertNoteEditForm).toHaveBeenCalledWith( + $targetNote, + ); expect(this.notes.setupNewNote).toHaveBeenCalled(); done(); @@ -231,17 +259,14 @@ import timeoutPromise from './helpers/set_timeout_promise_helper'; note: 'heya', html: '<div>heya</div>', }; - $notesList = jasmine.createSpyObj('$notesList', [ - 'find', - 'append', - ]); + $notesList = jasmine.createSpyObj('$notesList', ['find', 'append']); notes = jasmine.createSpyObj('notes', [ 'setupNewNote', 'refresh', 'collapseLongCommitList', 'updateNotesCount', - 'putConflictEditWarningInPlace' + 'putConflictEditWarningInPlace', ]); notes.taskList = jasmine.createSpyObj('tasklist', ['init']); notes.note_ids = []; @@ -258,7 +283,10 @@ import timeoutPromise from './helpers/set_timeout_promise_helper'; Notes.isNewNote.and.returnValue(true); Notes.prototype.renderNote.call(notes, note, null, $notesList); - expect(Notes.animateAppendNote).toHaveBeenCalledWith(note.html, $notesList); + expect(Notes.animateAppendNote).toHaveBeenCalledWith( + note.html, + $notesList, + ); }); }); @@ -273,7 +301,10 @@ import timeoutPromise from './helpers/set_timeout_promise_helper'; Notes.prototype.renderNote.call(notes, note, null, $notesList); - expect(Notes.animateUpdateNote).toHaveBeenCalledWith(note.html, $note); + expect(Notes.animateUpdateNote).toHaveBeenCalledWith( + note.html, + $note, + ); expect(notes.setupNewNote).toHaveBeenCalledWith($newNote); }); @@ -301,7 +332,10 @@ import timeoutPromise from './helpers/set_timeout_promise_helper'; $notesList.find.and.returnValue($note); Notes.prototype.renderNote.call(notes, note, null, $notesList); - expect(notes.putConflictEditWarningInPlace).toHaveBeenCalledWith(note, $note); + expect(notes.putConflictEditWarningInPlace).toHaveBeenCalledWith( + note, + $note, + ); }); }); }); @@ -311,11 +345,11 @@ import timeoutPromise from './helpers/set_timeout_promise_helper'; it('should consider same note text as the same', () => { const result = Notes.isUpdatedNote( { - note: 'initial' + note: 'initial', }, $(`<div> <div class="original-note-content">initial</div> - </div>`) + </div>`), ); expect(result).toEqual(false); @@ -324,11 +358,11 @@ import timeoutPromise from './helpers/set_timeout_promise_helper'; it('should consider same note with trailing newline as the same', () => { const result = Notes.isUpdatedNote( { - note: 'initial\n' + note: 'initial\n', }, $(`<div> <div class="original-note-content">initial\n</div> - </div>`) + </div>`), ); expect(result).toEqual(false); @@ -337,11 +371,11 @@ import timeoutPromise from './helpers/set_timeout_promise_helper'; it('should consider different notes as different', () => { const result = Notes.isUpdatedNote( { - note: 'foo' + note: 'foo', }, $(`<div> <div class="original-note-content">bar</div> - </div>`) + </div>`), ); expect(result).toEqual(true); @@ -397,7 +431,10 @@ import timeoutPromise from './helpers/set_timeout_promise_helper'; it('should call Notes.animateAppendNote', () => { Notes.prototype.renderDiscussionNote.call(notes, note, $form); - expect(Notes.animateAppendNote).toHaveBeenCalledWith(note.discussion_html, $('.main-notes-list')); + expect(Notes.animateAppendNote).toHaveBeenCalledWith( + note.discussion_html, + $('.main-notes-list'), + ); }); it('should append to row selected with line_code', () => { @@ -428,7 +465,10 @@ import timeoutPromise from './helpers/set_timeout_promise_helper'; }); it('should call Notes.animateAppendNote', () => { - expect(Notes.animateAppendNote).toHaveBeenCalledWith(note.html, discussionContainer); + expect(Notes.animateAppendNote).toHaveBeenCalledWith( + note.html, + discussionContainer, + ); }); }); }); @@ -461,9 +501,7 @@ import timeoutPromise from './helpers/set_timeout_promise_helper'; beforeEach(() => { noteHTML = '<div></div>'; - $note = jasmine.createSpyObj('$note', [ - 'replaceWith' - ]); + $note = jasmine.createSpyObj('$note', ['replaceWith']); $updatedNote = Notes.animateUpdateNote(noteHTML, $note); }); @@ -501,7 +539,7 @@ import timeoutPromise from './helpers/set_timeout_promise_helper'; <div class="note-text">${sampleComment}</div> </li>`, note: sampleComment, - valid: true + valid: true, }; let $form; let $notesContainer; @@ -534,10 +572,12 @@ import timeoutPromise from './helpers/set_timeout_promise_helper'; mockNotesPost(); $('.js-comment-button').click(); - expect($notesContainer.find('.note.being-posted').length > 0).toEqual(true); + expect($notesContainer.find('.note.being-posted').length > 0).toEqual( + true, + ); }); - it('should remove placeholder note when new comment is done posting', (done) => { + it('should remove placeholder note when new comment is done posting', done => { mockNotesPost(); $('.js-comment-button').click(); @@ -549,19 +589,44 @@ import timeoutPromise from './helpers/set_timeout_promise_helper'; }); }); - it('should show actual note element when new comment is done posting', (done) => { + describe('postComment', () => { + it('disables the submit button', done => { + const $submitButton = $form.find('.js-comment-submit-button'); + expect($submitButton).not.toBeDisabled(); + const dummyEvent = { + preventDefault() {}, + target: $submitButton, + }; + mock.onPost(NOTES_POST_PATH).replyOnce(() => { + expect($submitButton).toBeDisabled(); + return [200, note]; + }); + + this.notes + .postComment(dummyEvent) + .then(() => { + expect($submitButton).not.toBeDisabled(); + }) + .then(done) + .catch(done.fail); + }); + }); + + it('should show actual note element when new comment is done posting', done => { mockNotesPost(); $('.js-comment-button').click(); setTimeout(() => { - expect($notesContainer.find(`#note_${note.id}`).length > 0).toEqual(true); + expect($notesContainer.find(`#note_${note.id}`).length > 0).toEqual( + true, + ); done(); }); }); - it('should reset Form when new comment is done posting', (done) => { + it('should reset Form when new comment is done posting', done => { mockNotesPost(); $('.js-comment-button').click(); @@ -573,19 +638,24 @@ import timeoutPromise from './helpers/set_timeout_promise_helper'; }); }); - it('should show flash error message when new comment failed to be posted', (done) => { + it('should show flash error message when new comment failed to be posted', done => { mockNotesPostError(); $('.js-comment-button').click(); setTimeout(() => { - expect($notesContainer.parent().find('.flash-container .flash-text').is(':visible')).toEqual(true); + expect( + $notesContainer + .parent() + .find('.flash-container .flash-text') + .is(':visible'), + ).toEqual(true); done(); }); }); - it('should show flash error message when comment failed to be updated', (done) => { + it('should show flash error message when comment failed to be updated', done => { mockNotesPost(); $('.js-comment-button').click(); @@ -606,7 +676,12 @@ import timeoutPromise from './helpers/set_timeout_promise_helper'; .then(() => { const $updatedNoteEl = $notesContainer.find(`#note_${note.id}`); expect($updatedNoteEl.hasClass('.being-posted')).toEqual(false); // Remove being-posted visuals - expect($updatedNoteEl.find('.note-text').text().trim()).toEqual(sampleComment); // See if comment reverted back to original + expect( + $updatedNoteEl + .find('.note-text') + .text() + .trim(), + ).toEqual(sampleComment); // See if comment reverted back to original expect($('.flash-container').is(':visible')).toEqual(true); // Flash error message shown done(); @@ -620,12 +695,12 @@ import timeoutPromise from './helpers/set_timeout_promise_helper'; const note = { commands_changes: { assignee_id: 1, - emoji_award: '100' + emoji_award: '100', }, errors: { - commands_only: ['Commands applied'] + commands_only: ['Commands applied'], }, - valid: false + valid: false, }; let $form; let $notesContainer; @@ -640,12 +715,12 @@ import timeoutPromise from './helpers/set_timeout_promise_helper'; window.gon.current_user_fullname = 'Administrator'; gl.awardsHandler = { addAwardToEmojiBar: () => {}, - scrollToAwards: () => {} + scrollToAwards: () => {}, }; gl.GfmAutoComplete = { dataSources: { - commands: '/root/test-project/autocomplete_sources/commands' - } + commands: '/root/test-project/autocomplete_sources/commands', + }, }; $form = $('form.js-main-target-form'); $notesContainer = $('ul.main-notes-list'); @@ -656,14 +731,18 @@ import timeoutPromise from './helpers/set_timeout_promise_helper'; mock.restore(); }); - it('should remove slash command placeholder when comment with slash commands is done posting', (done) => { + it('should remove slash command placeholder when comment with slash commands is done posting', done => { spyOn(gl.awardsHandler, 'addAwardToEmojiBar').and.callThrough(); $('.js-comment-button').click(); - expect($notesContainer.find('.system-note.being-posted').length).toEqual(1); // Placeholder shown + expect( + $notesContainer.find('.system-note.being-posted').length, + ).toEqual(1); // Placeholder shown setTimeout(() => { - expect($notesContainer.find('.system-note.being-posted').length).toEqual(0); // Placeholder removed + expect( + $notesContainer.find('.system-note.being-posted').length, + ).toEqual(0); // Placeholder removed done(); }); }); @@ -678,7 +757,7 @@ import timeoutPromise from './helpers/set_timeout_promise_helper'; <div class="note-text">${sampleComment}</div> </li>`, note: sampleComment, - valid: true + valid: true, }; let $form; let $notesContainer; @@ -700,7 +779,7 @@ import timeoutPromise from './helpers/set_timeout_promise_helper'; mock.restore(); }); - it('should not render a script tag', (done) => { + it('should not render a script tag', done => { $('.js-comment-button').click(); setTimeout(() => { @@ -709,8 +788,15 @@ import timeoutPromise from './helpers/set_timeout_promise_helper'; $noteEl.find('textarea.js-note-text').html(updatedComment); $noteEl.find('.js-comment-save-button').click(); - const $updatedNoteEl = $notesContainer.find(`#note_${note.id}`).find('.js-task-list-container'); - expect($updatedNoteEl.find('.note-text').text().trim()).toEqual(''); + const $updatedNoteEl = $notesContainer + .find(`#note_${note.id}`) + .find('.js-task-list-container'); + expect( + $updatedNoteEl + .find('.note-text') + .text() + .trim(), + ).toEqual(''); done(); }); @@ -730,7 +816,9 @@ import timeoutPromise from './helpers/set_timeout_promise_helper'; it('should return form metadata object from form reference', () => { $form.find('textarea.js-note-text').val(sampleComment); - const { formData, formContent, formAction } = this.notes.getFormData($form); + const { formData, formContent, formAction } = this.notes.getFormData( + $form, + ); expect(formData.indexOf(sampleComment) > -1).toBe(true); expect(formContent).toEqual(sampleComment); @@ -746,7 +834,9 @@ import timeoutPromise from './helpers/set_timeout_promise_helper'; const { formContent } = this.notes.getFormData($form); expect(_.escape).toHaveBeenCalledWith(sampleComment); - expect(formContent).toEqual('<script>alert("Boom!");</script>'); + expect(formContent).toEqual( + '<script>alert("Boom!");</script>', + ); }); }); @@ -756,7 +846,8 @@ import timeoutPromise from './helpers/set_timeout_promise_helper'; }); it('should return true when comment begins with a quick action', () => { - const sampleComment = '/wip\n/milestone %1.0\n/merge\n/unassign Merging this'; + const sampleComment = + '/wip\n/milestone %1.0\n/merge\n/unassign Merging this'; const hasQuickActions = this.notes.hasQuickActions(sampleComment); expect(hasQuickActions).toBeTruthy(); @@ -780,7 +871,8 @@ import timeoutPromise from './helpers/set_timeout_promise_helper'; describe('stripQuickActions', () => { it('should strip quick actions from the comment which begins with a quick action', () => { this.notes = new Notes(); - const sampleComment = '/wip\n/milestone %1.0\n/merge\n/unassign Merging this'; + const sampleComment = + '/wip\n/milestone %1.0\n/merge\n/unassign Merging this'; const stripedComment = this.notes.stripQuickActions(sampleComment); expect(stripedComment).toBe(''); @@ -788,7 +880,8 @@ import timeoutPromise from './helpers/set_timeout_promise_helper'; it('should strip quick actions from the comment but leaves plain comment if it is present', () => { this.notes = new Notes(); - const sampleComment = '/wip\n/milestone %1.0\n/merge\n/unassign\nMerging this'; + const sampleComment = + '/wip\n/milestone %1.0\n/merge\n/unassign\nMerging this'; const stripedComment = this.notes.stripQuickActions(sampleComment); expect(stripedComment).toBe('Merging this'); @@ -796,7 +889,8 @@ import timeoutPromise from './helpers/set_timeout_promise_helper'; it('should NOT strip string that has slashes within', () => { this.notes = new Notes(); - const sampleComment = 'http://127.0.0.1:3000/root/gitlab-shell/issues/1'; + const sampleComment = + 'http://127.0.0.1:3000/root/gitlab-shell/issues/1'; const stripedComment = this.notes.stripQuickActions(sampleComment); expect(stripedComment).toBe(sampleComment); @@ -807,7 +901,7 @@ import timeoutPromise from './helpers/set_timeout_promise_helper'; const availableQuickActions = [ { name: 'close', description: 'Close this issue', params: [] }, { name: 'title', description: 'Change title', params: [{}] }, - { name: 'estimate', description: 'Set time estimate', params: [{}] } + { name: 'estimate', description: 'Set time estimate', params: [{}] }, ]; beforeEach(() => { @@ -816,17 +910,29 @@ import timeoutPromise from './helpers/set_timeout_promise_helper'; it('should return executing quick action description when note has single quick action', () => { const sampleComment = '/close'; - expect(this.notes.getQuickActionDescription(sampleComment, availableQuickActions)).toBe('Applying command to close this issue'); + expect( + this.notes.getQuickActionDescription( + sampleComment, + availableQuickActions, + ), + ).toBe('Applying command to close this issue'); }); it('should return generic multiple quick action description when note has multiple quick actions', () => { const sampleComment = '/close\n/title [Duplicate] Issue foobar'; - expect(this.notes.getQuickActionDescription(sampleComment, availableQuickActions)).toBe('Applying multiple commands'); + expect( + this.notes.getQuickActionDescription( + sampleComment, + availableQuickActions, + ), + ).toBe('Applying multiple commands'); }); it('should return generic quick action description when available quick actions list is not populated', () => { const sampleComment = '/close\n/title [Duplicate] Issue foobar'; - expect(this.notes.getQuickActionDescription(sampleComment)).toBe('Applying command'); + expect(this.notes.getQuickActionDescription(sampleComment)).toBe( + 'Applying command', + ); }); }); @@ -856,14 +962,35 @@ import timeoutPromise from './helpers/set_timeout_promise_helper'; expect($tempNote.attr('id')).toEqual(uniqueId); expect($tempNote.hasClass('being-posted')).toBeTruthy(); expect($tempNote.hasClass('fade-in-half')).toBeTruthy(); - $tempNote.find('.timeline-icon > a, .note-header-info > a').each(function() { - expect($(this).attr('href')).toEqual(`/${currentUsername}`); - }); - expect($tempNote.find('.timeline-icon .avatar').attr('src')).toEqual(currentUserAvatar); - expect($tempNote.find('.timeline-content').hasClass('discussion')).toBeFalsy(); - expect($tempNoteHeader.find('.hidden-xs').text().trim()).toEqual(currentUserFullname); - expect($tempNoteHeader.find('.note-headline-light').text().trim()).toEqual(`@${currentUsername}`); - expect($tempNote.find('.note-body .note-text p').text().trim()).toEqual(sampleComment); + $tempNote + .find('.timeline-icon > a, .note-header-info > a') + .each(function() { + expect($(this).attr('href')).toEqual(`/${currentUsername}`); + }); + expect($tempNote.find('.timeline-icon .avatar').attr('src')).toEqual( + currentUserAvatar, + ); + expect( + $tempNote.find('.timeline-content').hasClass('discussion'), + ).toBeFalsy(); + expect( + $tempNoteHeader + .find('.hidden-xs') + .text() + .trim(), + ).toEqual(currentUserFullname); + expect( + $tempNoteHeader + .find('.note-headline-light') + .text() + .trim(), + ).toEqual(`@${currentUsername}`); + expect( + $tempNote + .find('.note-body .note-text p') + .text() + .trim(), + ).toEqual(sampleComment); }); it('should return constructed placeholder element for discussion note based on form contents', () => { @@ -872,11 +999,13 @@ import timeoutPromise from './helpers/set_timeout_promise_helper'; uniqueId, isDiscussionNote: true, currentUsername, - currentUserFullname + currentUserFullname, }); expect($tempNote.prop('nodeName')).toEqual('LI'); - expect($tempNote.find('.timeline-content').hasClass('discussion')).toBeTruthy(); + expect( + $tempNote.find('.timeline-content').hasClass('discussion'), + ).toBeTruthy(); }); it('should return a escaped user name', () => { @@ -890,7 +1019,12 @@ import timeoutPromise from './helpers/set_timeout_promise_helper'; currentUserAvatar, }); const $tempNoteHeader = $tempNote.find('.note-header'); - expect($tempNoteHeader.find('.hidden-xs').text().trim()).toEqual('Foo <script>alert("XSS")</script>'); + expect( + $tempNoteHeader + .find('.hidden-xs') + .text() + .trim(), + ).toEqual('Foo <script>alert("XSS")</script>'); }); }); @@ -913,7 +1047,12 @@ import timeoutPromise from './helpers/set_timeout_promise_helper'; expect($tempNote.attr('id')).toEqual(uniqueId); expect($tempNote.hasClass('being-posted')).toBeTruthy(); expect($tempNote.hasClass('fade-in-half')).toBeTruthy(); - expect($tempNote.find('.timeline-content i').text().trim()).toEqual(sampleCommandDescription); + expect( + $tempNote + .find('.timeline-content i') + .text() + .trim(), + ).toEqual(sampleCommandDescription); }); }); @@ -923,7 +1062,11 @@ import timeoutPromise from './helpers/set_timeout_promise_helper'; }); it('shows a flash message', () => { - this.notes.addFlash('Error message', FLASH_TYPE_ALERT, this.notes.parentTimeline.get(0)); + this.notes.addFlash( + 'Error message', + FLASH_TYPE_ALERT, + this.notes.parentTimeline.get(0), + ); expect($('.flash-alert').is(':visible')).toBeTruthy(); }); @@ -936,7 +1079,11 @@ import timeoutPromise from './helpers/set_timeout_promise_helper'; }); it('hides visible flash message', () => { - this.notes.addFlash('Error message 1', FLASH_TYPE_ALERT, this.notes.parentTimeline.get(0)); + this.notes.addFlash( + 'Error message 1', + FLASH_TYPE_ALERT, + this.notes.parentTimeline.get(0), + ); this.notes.clearFlash(); @@ -944,4 +1091,4 @@ import timeoutPromise from './helpers/set_timeout_promise_helper'; }); }); }); -}).call(window); +}.call(window)); diff --git a/spec/javascripts/pages/labels/components/promote_label_modal_spec.js b/spec/javascripts/pages/labels/components/promote_label_modal_spec.js index ba2e07f02f7..a24f8204fe1 100644 --- a/spec/javascripts/pages/labels/components/promote_label_modal_spec.js +++ b/spec/javascripts/pages/labels/components/promote_label_modal_spec.js @@ -2,7 +2,7 @@ import Vue from 'vue'; import promoteLabelModal from '~/pages/projects/labels/components/promote_label_modal.vue'; import eventHub from '~/pages/projects/labels/event_hub'; import axios from '~/lib/utils/axios_utils'; -import mountComponent from '../../../helpers/vue_mount_component_helper'; +import mountComponent from 'spec/helpers/vue_mount_component_helper'; describe('Promote label modal', () => { let vm; @@ -12,6 +12,7 @@ describe('Promote label modal', () => { labelColor: '#5cb85c', labelTextColor: '#ffffff', url: `${gl.TEST_HOST}/dummy/promote/labels`, + groupName: 'group', }; describe('Modal title and description', () => { @@ -24,7 +25,7 @@ describe('Promote label modal', () => { }); it('contains the proper description', () => { - expect(vm.text).toContain('Promoting this label will make it available for all projects inside the group'); + expect(vm.text).toContain(`Promoting ${labelMockData.labelTitle} will make it available for all projects inside ${labelMockData.groupName}`); }); it('contains a label span with the color', () => { diff --git a/spec/javascripts/pages/milestones/shared/components/promote_milestone_modal_spec.js b/spec/javascripts/pages/milestones/shared/components/promote_milestone_modal_spec.js index bf044fe8fb5..8b220423637 100644 --- a/spec/javascripts/pages/milestones/shared/components/promote_milestone_modal_spec.js +++ b/spec/javascripts/pages/milestones/shared/components/promote_milestone_modal_spec.js @@ -2,7 +2,7 @@ import Vue from 'vue'; import promoteMilestoneModal from '~/pages/milestones/shared/components/promote_milestone_modal.vue'; import eventHub from '~/pages/milestones/shared/event_hub'; import axios from '~/lib/utils/axios_utils'; -import mountComponent from '../../../../helpers/vue_mount_component_helper'; +import mountComponent from 'spec/helpers/vue_mount_component_helper'; describe('Promote milestone modal', () => { let vm; @@ -10,6 +10,7 @@ describe('Promote milestone modal', () => { const milestoneMockData = { milestoneTitle: 'v1.0', url: `${gl.TEST_HOST}/dummy/promote/milestones`, + groupName: 'group', }; describe('Modal title and description', () => { @@ -22,7 +23,7 @@ describe('Promote milestone modal', () => { }); it('contains the proper description', () => { - expect(vm.text).toContain('Promoting this milestone will make it available for all projects inside the group.'); + expect(vm.text).toContain(`Promoting ${milestoneMockData.milestoneTitle} will make it available for all projects inside ${milestoneMockData.groupName}.`); }); it('contains the correct title', () => { diff --git a/spec/javascripts/performance_bar/components/detailed_metric_spec.js b/spec/javascripts/performance_bar/components/detailed_metric_spec.js new file mode 100644 index 00000000000..c4611dc7662 --- /dev/null +++ b/spec/javascripts/performance_bar/components/detailed_metric_spec.js @@ -0,0 +1,80 @@ +import Vue from 'vue'; +import detailedMetric from '~/performance_bar/components/detailed_metric.vue'; +import mountComponent from 'spec/helpers/vue_mount_component_helper'; + +describe('detailedMetric', () => { + let vm; + + afterEach(() => { + vm.$destroy(); + }); + + describe('when the current request has no details', () => { + beforeEach(() => { + vm = mountComponent(Vue.extend(detailedMetric), { + currentRequest: {}, + metric: 'gitaly', + header: 'Gitaly calls', + details: 'details', + keys: ['feature', 'request'], + }); + }); + + it('does not render the element', () => { + expect(vm.$el.innerHTML).toEqual(undefined); + }); + }); + + describe('when the current request has details', () => { + const requestDetails = [ + { duration: '100', feature: 'find_commit', request: 'abcdef' }, + { duration: '23', feature: 'rebase_in_progress', request: '' }, + ]; + + beforeEach(() => { + vm = mountComponent(Vue.extend(detailedMetric), { + currentRequest: { + details: { + gitaly: { + duration: '123ms', + calls: '456', + details: requestDetails, + }, + }, + }, + metric: 'gitaly', + header: 'Gitaly calls', + details: 'details', + keys: ['feature', 'request'], + }); + }); + + it('diplays details', () => { + expect(vm.$el.innerText.replace(/\s+/g, ' ')).toContain('123ms / 456'); + }); + + it('adds a modal with a table of the details', () => { + vm.$el + .querySelectorAll('.performance-bar-modal td strong') + .forEach((duration, index) => { + expect(duration.innerText).toContain(requestDetails[index].duration); + }); + + vm.$el + .querySelectorAll('.performance-bar-modal td:nth-child(2)') + .forEach((feature, index) => { + expect(feature.innerText).toContain(requestDetails[index].feature); + }); + + vm.$el + .querySelectorAll('.performance-bar-modal td:nth-child(3)') + .forEach((request, index) => { + expect(request.innerText).toContain(requestDetails[index].request); + }); + }); + + it('displays the metric name', () => { + expect(vm.$el.innerText).toContain('gitaly'); + }); + }); +}); diff --git a/spec/javascripts/performance_bar/components/performance_bar_app_spec.js b/spec/javascripts/performance_bar/components/performance_bar_app_spec.js new file mode 100644 index 00000000000..9ab9ab1c9f4 --- /dev/null +++ b/spec/javascripts/performance_bar/components/performance_bar_app_spec.js @@ -0,0 +1,88 @@ +import Vue from 'vue'; +import axios from '~/lib/utils/axios_utils'; +import performanceBarApp from '~/performance_bar/components/performance_bar_app.vue'; +import PerformanceBarService from '~/performance_bar/services/performance_bar_service'; +import PerformanceBarStore from '~/performance_bar/stores/performance_bar_store'; + +import mountComponent from 'spec/helpers/vue_mount_component_helper'; +import MockAdapter from 'axios-mock-adapter'; + +describe('performance bar', () => { + let mock; + let vm; + + beforeEach(() => { + const store = new PerformanceBarStore(); + + mock = new MockAdapter(axios); + + mock.onGet('/-/peek/results').reply( + 200, + { + data: { + gc: { + invokes: 0, + invoke_time: '0.00', + use_size: 0, + total_size: 0, + total_object: 0, + gc_time: '0.00', + }, + host: { hostname: 'web-01' }, + }, + }, + {}, + ); + + vm = mountComponent(Vue.extend(performanceBarApp), { + store, + env: 'development', + requestId: '123', + peekUrl: '/-/peek/results', + profileUrl: '?lineprofiler=true', + }); + }); + + afterEach(() => { + vm.$destroy(); + mock.restore(); + }); + + it('sets the class to match the environment', () => { + expect(vm.$el.getAttribute('class')).toContain('development'); + }); + + describe('loadRequestDetails', () => { + beforeEach(() => { + spyOn(vm.store, 'addRequest').and.callThrough(); + }); + + it('does nothing if the request cannot be tracked', () => { + spyOn(vm.store, 'canTrackRequest').and.callFake(() => false); + + vm.loadRequestDetails('123', 'https://gitlab.com/'); + + expect(vm.store.addRequest).not.toHaveBeenCalled(); + }); + + it('adds the request immediately', () => { + vm.loadRequestDetails('123', 'https://gitlab.com/'); + + expect(vm.store.addRequest).toHaveBeenCalledWith( + '123', + 'https://gitlab.com/', + ); + }); + + it('makes an HTTP request for the request details', () => { + spyOn(PerformanceBarService, 'fetchRequestDetails').and.callThrough(); + + vm.loadRequestDetails('456', 'https://gitlab.com/'); + + expect(PerformanceBarService.fetchRequestDetails).toHaveBeenCalledWith( + '/-/peek/results', + '456', + ); + }); + }); +}); diff --git a/spec/javascripts/performance_bar/components/request_selector_spec.js b/spec/javascripts/performance_bar/components/request_selector_spec.js new file mode 100644 index 00000000000..6108a29f8c4 --- /dev/null +++ b/spec/javascripts/performance_bar/components/request_selector_spec.js @@ -0,0 +1,47 @@ +import Vue from 'vue'; +import requestSelector from '~/performance_bar/components/request_selector.vue'; +import mountComponent from 'spec/helpers/vue_mount_component_helper'; + +describe('request selector', () => { + const requests = [ + { id: '123', url: 'https://gitlab.com/' }, + { + id: '456', + url: 'https://gitlab.com/gitlab-org/gitlab-ce/merge_requests/1', + }, + { + id: '789', + url: + 'https://gitlab.com/gitlab-org/gitlab-ce/merge_requests/1.json?serializer=widget', + }, + ]; + + let vm; + + beforeEach(() => { + vm = mountComponent(Vue.extend(requestSelector), { + requests, + currentRequest: requests[1], + }); + }); + + afterEach(() => { + vm.$destroy(); + }); + + function optionText(requestId) { + return vm.$el.querySelector(`[value='${requestId}']`).innerText.trim(); + } + + it('displays the last component of the path', () => { + expect(optionText(requests[2].id)).toEqual('1.json?serializer=widget'); + }); + + it('keeps the last two components of the path when the last component is numeric', () => { + expect(optionText(requests[1].id)).toEqual('merge_requests/1'); + }); + + it('ignores trailing slashes', () => { + expect(optionText(requests[0].id)).toEqual('gitlab.com'); + }); +}); diff --git a/spec/javascripts/performance_bar/components/simple_metric_spec.js b/spec/javascripts/performance_bar/components/simple_metric_spec.js new file mode 100644 index 00000000000..98b843e9711 --- /dev/null +++ b/spec/javascripts/performance_bar/components/simple_metric_spec.js @@ -0,0 +1,47 @@ +import Vue from 'vue'; +import simpleMetric from '~/performance_bar/components/simple_metric.vue'; +import mountComponent from 'spec/helpers/vue_mount_component_helper'; + +describe('simpleMetric', () => { + let vm; + + afterEach(() => { + vm.$destroy(); + }); + + describe('when the current request has no details', () => { + beforeEach(() => { + vm = mountComponent(Vue.extend(simpleMetric), { + currentRequest: {}, + metric: 'gitaly', + }); + }); + + it('does not display details', () => { + expect(vm.$el.innerText).not.toContain('/'); + }); + + it('displays the metric name', () => { + expect(vm.$el.innerText).toContain('gitaly'); + }); + }); + + describe('when the current request has details', () => { + beforeEach(() => { + vm = mountComponent(Vue.extend(simpleMetric), { + currentRequest: { + details: { gitaly: { duration: '123ms', calls: '456' } }, + }, + metric: 'gitaly', + }); + }); + + it('diplays details', () => { + expect(vm.$el.innerText.replace(/\s+/g, ' ')).toContain('123ms / 456'); + }); + + it('displays the metric name', () => { + expect(vm.$el.innerText).toContain('gitaly'); + }); + }); +}); diff --git a/spec/javascripts/pipelines/graph/action_component_spec.js b/spec/javascripts/pipelines/graph/action_component_spec.js index e8fcd4b1a36..581209f215d 100644 --- a/spec/javascripts/pipelines/graph/action_component_spec.js +++ b/spec/javascripts/pipelines/graph/action_component_spec.js @@ -1,25 +1,30 @@ import Vue from 'vue'; import actionComponent from '~/pipelines/components/graph/action_component.vue'; +import eventHub from '~/pipelines/event_hub'; +import mountComponent from '../../helpers/vue_mount_component_helper'; describe('pipeline graph action component', () => { let component; beforeEach((done) => { const ActionComponent = Vue.extend(actionComponent); - component = new ActionComponent({ - propsData: { - tooltipText: 'bar', - link: 'foo', - actionMethod: 'post', - actionIcon: 'cancel', - }, - }).$mount(); + component = mountComponent(ActionComponent, { + tooltipText: 'bar', + link: 'foo', + actionIcon: 'cancel', + }); Vue.nextTick(done); }); - it('should render a link', () => { - expect(component.$el.getAttribute('href')).toEqual('foo'); + afterEach(() => { + component.$destroy(); + }); + + it('should emit an event with the provided link', () => { + eventHub.$on('graphAction', (link) => { + expect(link).toEqual('foo'); + }); }); it('should render the provided title as a bootstrap tooltip', () => { diff --git a/spec/javascripts/pipelines/graph/job_component_spec.js b/spec/javascripts/pipelines/graph/job_component_spec.js index ce181a1e515..c9677ae209a 100644 --- a/spec/javascripts/pipelines/graph/job_component_spec.js +++ b/spec/javascripts/pipelines/graph/job_component_spec.js @@ -13,6 +13,7 @@ describe('pipeline graph job component', () => { icon: 'icon_status_success', text: 'passed', label: 'passed', + tooltip: 'passed', group: 'success', details_path: '/root/ci-mock/builds/4256', has_details: true, @@ -137,6 +138,7 @@ describe('pipeline graph job component', () => { status: { icon: 'icon_status_success', label: 'success', + tooltip: 'success', }, }, }); diff --git a/spec/javascripts/pipelines/graph/mock_data.js b/spec/javascripts/pipelines/graph/mock_data.js index b9494f86d74..70eba98e939 100644 --- a/spec/javascripts/pipelines/graph/mock_data.js +++ b/spec/javascripts/pipelines/graph/mock_data.js @@ -1,232 +1,261 @@ -/* eslint-disable quote-props, quotes, comma-dangle */ export default { - "id": 123, - "user": { - "name": "Root", - "username": "root", - "id": 1, - "state": "active", - "avatar_url": null, - "web_url": "http://localhost:3000/root" + id: 123, + user: { + name: 'Root', + username: 'root', + id: 1, + state: 'active', + avatar_url: null, + web_url: 'http://localhost:3000/root', }, - "active": false, - "coverage": null, - "path": "/root/ci-mock/pipelines/123", - "details": { - "status": { - "icon": "icon_status_success", - "text": "passed", - "label": "passed", - "group": "success", - "has_details": true, - "details_path": "/root/ci-mock/pipelines/123", - "favicon": "/assets/ci_favicons/dev/favicon_status_success-308b4fc054cdd1b68d0865e6cfb7b02e92e3472f201507418f8eddb74ac11a59.ico" + active: false, + coverage: null, + path: '/root/ci-mock/pipelines/123', + details: { + status: { + icon: 'icon_status_success', + text: 'passed', + label: 'passed', + group: 'success', + has_details: true, + details_path: '/root/ci-mock/pipelines/123', + favicon: + '/assets/ci_favicons/dev/favicon_status_success-308b4fc054cdd1b68d0865e6cfb7b02e92e3472f201507418f8eddb74ac11a59.ico', }, - "duration": 9, - "finished_at": "2017-04-19T14:30:27.542Z", - "stages": [{ - "name": "test", - "title": "test: passed", - "groups": [{ - "name": "test", - "size": 1, - "status": { - "icon": "icon_status_success", - "text": "passed", - "label": "passed", - "group": "success", - "has_details": true, - "details_path": "/root/ci-mock/builds/4153", - "favicon": "/assets/ci_favicons/dev/favicon_status_success-308b4fc054cdd1b68d0865e6cfb7b02e92e3472f201507418f8eddb74ac11a59.ico", - "action": { - "icon": "retry", - "title": "Retry", - "path": "/root/ci-mock/builds/4153/retry", - "method": "post" - } + duration: 9, + finished_at: '2017-04-19T14:30:27.542Z', + stages: [ + { + name: 'test', + title: 'test: passed', + groups: [ + { + name: 'test', + size: 1, + status: { + icon: 'icon_status_success', + text: 'passed', + label: 'passed', + group: 'success', + has_details: true, + details_path: '/root/ci-mock/builds/4153', + favicon: + '/assets/ci_favicons/dev/favicon_status_success-308b4fc054cdd1b68d0865e6cfb7b02e92e3472f201507418f8eddb74ac11a59.ico', + action: { + icon: 'retry', + title: 'Retry', + path: '/root/ci-mock/builds/4153/retry', + method: 'post', + }, + }, + jobs: [ + { + id: 4153, + name: 'test', + build_path: '/root/ci-mock/builds/4153', + retry_path: '/root/ci-mock/builds/4153/retry', + playable: false, + created_at: '2017-04-13T09:25:18.959Z', + updated_at: '2017-04-13T09:25:23.118Z', + status: { + icon: 'icon_status_success', + text: 'passed', + label: 'passed', + group: 'success', + has_details: true, + details_path: '/root/ci-mock/builds/4153', + favicon: + '/assets/ci_favicons/dev/favicon_status_success-308b4fc054cdd1b68d0865e6cfb7b02e92e3472f201507418f8eddb74ac11a59.ico', + action: { + icon: 'retry', + title: 'Retry', + path: '/root/ci-mock/builds/4153/retry', + method: 'post', + }, + }, + }, + ], + }, + ], + status: { + icon: 'icon_status_success', + text: 'passed', + label: 'passed', + group: 'success', + has_details: true, + details_path: '/root/ci-mock/pipelines/123#test', + favicon: + '/assets/ci_favicons/dev/favicon_status_success-308b4fc054cdd1b68d0865e6cfb7b02e92e3472f201507418f8eddb74ac11a59.ico', }, - "jobs": [{ - "id": 4153, - "name": "test", - "build_path": "/root/ci-mock/builds/4153", - "retry_path": "/root/ci-mock/builds/4153/retry", - "playable": false, - "created_at": "2017-04-13T09:25:18.959Z", - "updated_at": "2017-04-13T09:25:23.118Z", - "status": { - "icon": "icon_status_success", - "text": "passed", - "label": "passed", - "group": "success", - "has_details": true, - "details_path": "/root/ci-mock/builds/4153", - "favicon": "/assets/ci_favicons/dev/favicon_status_success-308b4fc054cdd1b68d0865e6cfb7b02e92e3472f201507418f8eddb74ac11a59.ico", - "action": { - "icon": "retry", - "title": "Retry", - "path": "/root/ci-mock/builds/4153/retry", - "method": "post" - } - } - }] - }], - "status": { - "icon": "icon_status_success", - "text": "passed", - "label": "passed", - "group": "success", - "has_details": true, - "details_path": "/root/ci-mock/pipelines/123#test", - "favicon": "/assets/ci_favicons/dev/favicon_status_success-308b4fc054cdd1b68d0865e6cfb7b02e92e3472f201507418f8eddb74ac11a59.ico" + path: '/root/ci-mock/pipelines/123#test', + dropdown_path: '/root/ci-mock/pipelines/123/stage.json?stage=test', }, - "path": "/root/ci-mock/pipelines/123#test", - "dropdown_path": "/root/ci-mock/pipelines/123/stage.json?stage=test" - }, { - "name": "deploy", - "title": "deploy: passed", - "groups": [{ - "name": "deploy to production", - "size": 1, - "status": { - "icon": "icon_status_success", - "text": "passed", - "label": "passed", - "group": "success", - "has_details": true, - "details_path": "/root/ci-mock/builds/4166", - "favicon": "/assets/ci_favicons/dev/favicon_status_success-308b4fc054cdd1b68d0865e6cfb7b02e92e3472f201507418f8eddb74ac11a59.ico", - "action": { - "icon": "retry", - "title": "Retry", - "path": "/root/ci-mock/builds/4166/retry", - "method": "post" - } + { + name: 'deploy', + title: 'deploy: passed', + groups: [ + { + name: 'deploy to production', + size: 1, + status: { + icon: 'icon_status_success', + text: 'passed', + label: 'passed', + group: 'success', + has_details: true, + details_path: '/root/ci-mock/builds/4166', + favicon: + '/assets/ci_favicons/dev/favicon_status_success-308b4fc054cdd1b68d0865e6cfb7b02e92e3472f201507418f8eddb74ac11a59.ico', + action: { + icon: 'retry', + title: 'Retry', + path: '/root/ci-mock/builds/4166/retry', + method: 'post', + }, + }, + jobs: [ + { + id: 4166, + name: 'deploy to production', + build_path: '/root/ci-mock/builds/4166', + retry_path: '/root/ci-mock/builds/4166/retry', + playable: false, + created_at: '2017-04-19T14:29:46.463Z', + updated_at: '2017-04-19T14:30:27.498Z', + status: { + icon: 'icon_status_success', + text: 'passed', + label: 'passed', + group: 'success', + has_details: true, + details_path: '/root/ci-mock/builds/4166', + favicon: + '/assets/ci_favicons/dev/favicon_status_success-308b4fc054cdd1b68d0865e6cfb7b02e92e3472f201507418f8eddb74ac11a59.ico', + action: { + icon: 'retry', + title: 'Retry', + path: '/root/ci-mock/builds/4166/retry', + method: 'post', + }, + }, + }, + ], + }, + { + name: 'deploy to staging', + size: 1, + status: { + icon: 'icon_status_success', + text: 'passed', + label: 'passed', + group: 'success', + has_details: true, + details_path: '/root/ci-mock/builds/4159', + favicon: + '/assets/ci_favicons/dev/favicon_status_success-308b4fc054cdd1b68d0865e6cfb7b02e92e3472f201507418f8eddb74ac11a59.ico', + action: { + icon: 'retry', + title: 'Retry', + path: '/root/ci-mock/builds/4159/retry', + method: 'post', + }, + }, + jobs: [ + { + id: 4159, + name: 'deploy to staging', + build_path: '/root/ci-mock/builds/4159', + retry_path: '/root/ci-mock/builds/4159/retry', + playable: false, + created_at: '2017-04-18T16:32:08.420Z', + updated_at: '2017-04-18T16:32:12.631Z', + status: { + icon: 'icon_status_success', + text: 'passed', + label: 'passed', + group: 'success', + has_details: true, + details_path: '/root/ci-mock/builds/4159', + favicon: + '/assets/ci_favicons/dev/favicon_status_success-308b4fc054cdd1b68d0865e6cfb7b02e92e3472f201507418f8eddb74ac11a59.ico', + action: { + icon: 'retry', + title: 'Retry', + path: '/root/ci-mock/builds/4159/retry', + method: 'post', + }, + }, + }, + ], + }, + ], + status: { + icon: 'icon_status_success', + text: 'passed', + label: 'passed', + group: 'success', + has_details: true, + details_path: '/root/ci-mock/pipelines/123#deploy', + favicon: + '/assets/ci_favicons/dev/favicon_status_success-308b4fc054cdd1b68d0865e6cfb7b02e92e3472f201507418f8eddb74ac11a59.ico', }, - "jobs": [{ - "id": 4166, - "name": "deploy to production", - "build_path": "/root/ci-mock/builds/4166", - "retry_path": "/root/ci-mock/builds/4166/retry", - "playable": false, - "created_at": "2017-04-19T14:29:46.463Z", - "updated_at": "2017-04-19T14:30:27.498Z", - "status": { - "icon": "icon_status_success", - "text": "passed", - "label": "passed", - "group": "success", - "has_details": true, - "details_path": "/root/ci-mock/builds/4166", - "favicon": "/assets/ci_favicons/dev/favicon_status_success-308b4fc054cdd1b68d0865e6cfb7b02e92e3472f201507418f8eddb74ac11a59.ico", - "action": { - "icon": "retry", - "title": "Retry", - "path": "/root/ci-mock/builds/4166/retry", - "method": "post" - } - } - }] - }, { - "name": "deploy to staging", - "size": 1, - "status": { - "icon": "icon_status_success", - "text": "passed", - "label": "passed", - "group": "success", - "has_details": true, - "details_path": "/root/ci-mock/builds/4159", - "favicon": "/assets/ci_favicons/dev/favicon_status_success-308b4fc054cdd1b68d0865e6cfb7b02e92e3472f201507418f8eddb74ac11a59.ico", - "action": { - "icon": "retry", - "title": "Retry", - "path": "/root/ci-mock/builds/4159/retry", - "method": "post" - } - }, - "jobs": [{ - "id": 4159, - "name": "deploy to staging", - "build_path": "/root/ci-mock/builds/4159", - "retry_path": "/root/ci-mock/builds/4159/retry", - "playable": false, - "created_at": "2017-04-18T16:32:08.420Z", - "updated_at": "2017-04-18T16:32:12.631Z", - "status": { - "icon": "icon_status_success", - "text": "passed", - "label": "passed", - "group": "success", - "has_details": true, - "details_path": "/root/ci-mock/builds/4159", - "favicon": "/assets/ci_favicons/dev/favicon_status_success-308b4fc054cdd1b68d0865e6cfb7b02e92e3472f201507418f8eddb74ac11a59.ico", - "action": { - "icon": "retry", - "title": "Retry", - "path": "/root/ci-mock/builds/4159/retry", - "method": "post" - } - } - }] - }], - "status": { - "icon": "icon_status_success", - "text": "passed", - "label": "passed", - "group": "success", - "has_details": true, - "details_path": "/root/ci-mock/pipelines/123#deploy", - "favicon": "/assets/ci_favicons/dev/favicon_status_success-308b4fc054cdd1b68d0865e6cfb7b02e92e3472f201507418f8eddb74ac11a59.ico" + path: '/root/ci-mock/pipelines/123#deploy', + dropdown_path: '/root/ci-mock/pipelines/123/stage.json?stage=deploy', + }, + ], + artifacts: [], + manual_actions: [ + { + name: 'deploy to production', + path: '/root/ci-mock/builds/4166/play', + playable: false, }, - "path": "/root/ci-mock/pipelines/123#deploy", - "dropdown_path": "/root/ci-mock/pipelines/123/stage.json?stage=deploy" - }], - "artifacts": [], - "manual_actions": [{ - "name": "deploy to production", - "path": "/root/ci-mock/builds/4166/play", - "playable": false - }] + ], }, - "flags": { - "latest": true, - "triggered": false, - "stuck": false, - "yaml_errors": false, - "retryable": false, - "cancelable": false + flags: { + latest: true, + triggered: false, + stuck: false, + yaml_errors: false, + retryable: false, + cancelable: false, }, - "ref": { - "name": "master", - "path": "/root/ci-mock/tree/master", - "tag": false, - "branch": true + ref: { + name: 'master', + path: '/root/ci-mock/tree/master', + tag: false, + branch: true, }, - "commit": { - "id": "798e5f902592192afaba73f4668ae30e56eae492", - "short_id": "798e5f90", - "title": "Merge branch 'new-branch' into 'master'\r", - "created_at": "2017-04-13T10:25:17.000+01:00", - "parent_ids": ["54d483b1ed156fbbf618886ddf7ab023e24f8738", "c8e2d38a6c538822e81c57022a6e3a0cfedebbcc"], - "message": "Merge branch 'new-branch' into 'master'\r\n\r\nAdd new file\r\n\r\nSee merge request !1", - "author_name": "Root", - "author_email": "admin@example.com", - "authored_date": "2017-04-13T10:25:17.000+01:00", - "committer_name": "Root", - "committer_email": "admin@example.com", - "committed_date": "2017-04-13T10:25:17.000+01:00", - "author": { - "name": "Root", - "username": "root", - "id": 1, - "state": "active", - "avatar_url": null, - "web_url": "http://localhost:3000/root" + commit: { + id: '798e5f902592192afaba73f4668ae30e56eae492', + short_id: '798e5f90', + title: "Merge branch 'new-branch' into 'master'\r", + created_at: '2017-04-13T10:25:17.000+01:00', + parent_ids: [ + '54d483b1ed156fbbf618886ddf7ab023e24f8738', + 'c8e2d38a6c538822e81c57022a6e3a0cfedebbcc', + ], + message: + "Merge branch 'new-branch' into 'master'\r\n\r\nAdd new file\r\n\r\nSee merge request !1", + author_name: 'Root', + author_email: 'admin@example.com', + authored_date: '2017-04-13T10:25:17.000+01:00', + committer_name: 'Root', + committer_email: 'admin@example.com', + committed_date: '2017-04-13T10:25:17.000+01:00', + author: { + name: 'Root', + username: 'root', + id: 1, + state: 'active', + avatar_url: null, + web_url: 'http://localhost:3000/root', }, - "author_gravatar_url": null, - "commit_url": "http://localhost:3000/root/ci-mock/commit/798e5f902592192afaba73f4668ae30e56eae492", - "commit_path": "/root/ci-mock/commit/798e5f902592192afaba73f4668ae30e56eae492" + author_gravatar_url: null, + commit_url: + 'http://localhost:3000/root/ci-mock/commit/798e5f902592192afaba73f4668ae30e56eae492', + commit_path: '/root/ci-mock/commit/798e5f902592192afaba73f4668ae30e56eae492', }, - "created_at": "2017-04-13T09:25:18.881Z", - "updated_at": "2017-04-19T14:30:27.561Z" + created_at: '2017-04-13T09:25:18.881Z', + updated_at: '2017-04-19T14:30:27.561Z', }; diff --git a/spec/javascripts/pipelines/mock_data.js b/spec/javascripts/pipelines/mock_data.js new file mode 100644 index 00000000000..59092e0f041 --- /dev/null +++ b/spec/javascripts/pipelines/mock_data.js @@ -0,0 +1,326 @@ +export const pipelineWithStages = { + id: 20333396, + user: { + id: 128633, + name: 'Rémy Coutable', + username: 'rymai', + state: 'active', + avatar_url: + 'https://secure.gravatar.com/avatar/263da227929cc0035cb0eba512bcf81a?s=80\u0026d=identicon', + web_url: 'https://gitlab.com/rymai', + path: '/rymai', + }, + active: true, + coverage: '58.24', + source: 'push', + created_at: '2018-04-11T14:04:53.881Z', + updated_at: '2018-04-11T14:05:00.792Z', + path: '/gitlab-org/gitlab-ee/pipelines/20333396', + flags: { + latest: true, + stuck: false, + auto_devops: false, + yaml_errors: false, + retryable: false, + cancelable: true, + failure_reason: false, + }, + details: { + status: { + icon: 'status_running', + text: 'running', + label: 'running', + group: 'running', + has_details: true, + details_path: '/gitlab-org/gitlab-ee/pipelines/20333396', + favicon: + 'https://assets.gitlab-static.net/assets/ci_favicons/favicon_status_running-2eb56be2871937954b2ba6d6f4ee9fdf7e5e1c146ac45f7be98119ccaca1aca9.ico', + }, + duration: null, + finished_at: null, + stages: [ + { + name: 'build', + title: 'build: skipped', + status: { + icon: 'status_skipped', + text: 'skipped', + label: 'skipped', + group: 'skipped', + has_details: true, + details_path: '/gitlab-org/gitlab-ee/pipelines/20333396#build', + favicon: + 'https://assets.gitlab-static.net/assets/ci_favicons/favicon_status_skipped-a2eee568a5bffdb494050c7b62dde241de9189280836288ac8923d369f16222d.ico', + }, + path: '/gitlab-org/gitlab-ee/pipelines/20333396#build', + dropdown_path: '/gitlab-org/gitlab-ee/pipelines/20333396/stage.json?stage=build', + }, + { + name: 'prepare', + title: 'prepare: passed', + status: { + icon: 'status_success', + text: 'passed', + label: 'passed', + group: 'success', + has_details: true, + details_path: '/gitlab-org/gitlab-ee/pipelines/20333396#prepare', + favicon: + 'https://assets.gitlab-static.net/assets/ci_favicons/favicon_status_success-26f59841becbef8c6fe414e9e74471d8bfd6a91b5855c19fe7f5923a40a7da47.ico', + }, + path: '/gitlab-org/gitlab-ee/pipelines/20333396#prepare', + dropdown_path: '/gitlab-org/gitlab-ee/pipelines/20333396/stage.json?stage=prepare', + }, + { + name: 'test', + title: 'test: running', + status: { + icon: 'status_running', + text: 'running', + label: 'running', + group: 'running', + has_details: true, + details_path: '/gitlab-org/gitlab-ee/pipelines/20333396#test', + favicon: + 'https://assets.gitlab-static.net/assets/ci_favicons/favicon_status_running-2eb56be2871937954b2ba6d6f4ee9fdf7e5e1c146ac45f7be98119ccaca1aca9.ico', + }, + path: '/gitlab-org/gitlab-ee/pipelines/20333396#test', + dropdown_path: '/gitlab-org/gitlab-ee/pipelines/20333396/stage.json?stage=test', + }, + { + name: 'post-test', + title: 'post-test: created', + status: { + icon: 'status_created', + text: 'created', + label: 'created', + group: 'created', + has_details: true, + details_path: '/gitlab-org/gitlab-ee/pipelines/20333396#post-test', + favicon: + 'https://assets.gitlab-static.net/assets/ci_favicons/favicon_status_created-e997aa0b7db73165df8a9d6803932b18d7b7cc37d604d2d96e378fea2dba9c5f.ico', + }, + path: '/gitlab-org/gitlab-ee/pipelines/20333396#post-test', + dropdown_path: '/gitlab-org/gitlab-ee/pipelines/20333396/stage.json?stage=post-test', + }, + { + name: 'pages', + title: 'pages: created', + status: { + icon: 'status_created', + text: 'created', + label: 'created', + group: 'created', + has_details: true, + details_path: '/gitlab-org/gitlab-ee/pipelines/20333396#pages', + favicon: + 'https://assets.gitlab-static.net/assets/ci_favicons/favicon_status_created-e997aa0b7db73165df8a9d6803932b18d7b7cc37d604d2d96e378fea2dba9c5f.ico', + }, + path: '/gitlab-org/gitlab-ee/pipelines/20333396#pages', + dropdown_path: '/gitlab-org/gitlab-ee/pipelines/20333396/stage.json?stage=pages', + }, + { + name: 'post-cleanup', + title: 'post-cleanup: created', + status: { + icon: 'status_created', + text: 'created', + label: 'created', + group: 'created', + has_details: true, + details_path: '/gitlab-org/gitlab-ee/pipelines/20333396#post-cleanup', + favicon: + 'https://assets.gitlab-static.net/assets/ci_favicons/favicon_status_created-e997aa0b7db73165df8a9d6803932b18d7b7cc37d604d2d96e378fea2dba9c5f.ico', + }, + path: '/gitlab-org/gitlab-ee/pipelines/20333396#post-cleanup', + dropdown_path: '/gitlab-org/gitlab-ee/pipelines/20333396/stage.json?stage=post-cleanup', + }, + ], + artifacts: [ + { + name: 'gitlab:assets:compile', + expired: false, + expire_at: '2018-05-12T14:22:54.730Z', + path: '/gitlab-org/gitlab-ee/-/jobs/62411438/artifacts/download', + keep_path: '/gitlab-org/gitlab-ee/-/jobs/62411438/artifacts/keep', + browse_path: '/gitlab-org/gitlab-ee/-/jobs/62411438/artifacts/browse', + }, + { + name: 'rspec-mysql 12 28', + expired: false, + expire_at: '2018-05-12T14:22:45.136Z', + path: '/gitlab-org/gitlab-ee/-/jobs/62411397/artifacts/download', + keep_path: '/gitlab-org/gitlab-ee/-/jobs/62411397/artifacts/keep', + browse_path: '/gitlab-org/gitlab-ee/-/jobs/62411397/artifacts/browse', + }, + { + name: 'rspec-mysql 6 28', + expired: false, + expire_at: '2018-05-12T14:22:41.523Z', + path: '/gitlab-org/gitlab-ee/-/jobs/62411391/artifacts/download', + keep_path: '/gitlab-org/gitlab-ee/-/jobs/62411391/artifacts/keep', + browse_path: '/gitlab-org/gitlab-ee/-/jobs/62411391/artifacts/browse', + }, + { + name: 'rspec-pg geo 0 1', + expired: false, + expire_at: '2018-05-12T14:22:13.287Z', + path: '/gitlab-org/gitlab-ee/-/jobs/62411353/artifacts/download', + keep_path: '/gitlab-org/gitlab-ee/-/jobs/62411353/artifacts/keep', + browse_path: '/gitlab-org/gitlab-ee/-/jobs/62411353/artifacts/browse', + }, + { + name: 'rspec-mysql 0 28', + expired: false, + expire_at: '2018-05-12T14:22:06.834Z', + path: '/gitlab-org/gitlab-ee/-/jobs/62411385/artifacts/download', + keep_path: '/gitlab-org/gitlab-ee/-/jobs/62411385/artifacts/keep', + browse_path: '/gitlab-org/gitlab-ee/-/jobs/62411385/artifacts/browse', + }, + { + name: 'spinach-mysql 0 2', + expired: false, + expire_at: '2018-05-12T14:21:51.409Z', + path: '/gitlab-org/gitlab-ee/-/jobs/62411423/artifacts/download', + keep_path: '/gitlab-org/gitlab-ee/-/jobs/62411423/artifacts/keep', + browse_path: '/gitlab-org/gitlab-ee/-/jobs/62411423/artifacts/browse', + }, + { + name: 'karma', + expired: false, + expire_at: '2018-05-12T14:21:20.934Z', + path: '/gitlab-org/gitlab-ee/-/jobs/62411440/artifacts/download', + keep_path: '/gitlab-org/gitlab-ee/-/jobs/62411440/artifacts/keep', + browse_path: '/gitlab-org/gitlab-ee/-/jobs/62411440/artifacts/browse', + }, + { + name: 'spinach-pg 0 2', + expired: false, + expire_at: '2018-05-12T14:20:01.028Z', + path: '/gitlab-org/gitlab-ee/-/jobs/62411419/artifacts/download', + keep_path: '/gitlab-org/gitlab-ee/-/jobs/62411419/artifacts/keep', + browse_path: '/gitlab-org/gitlab-ee/-/jobs/62411419/artifacts/browse', + }, + { + name: 'spinach-pg 1 2', + expired: false, + expire_at: '2018-05-12T14:19:04.336Z', + path: '/gitlab-org/gitlab-ee/-/jobs/62411421/artifacts/download', + keep_path: '/gitlab-org/gitlab-ee/-/jobs/62411421/artifacts/keep', + browse_path: '/gitlab-org/gitlab-ee/-/jobs/62411421/artifacts/browse', + }, + { + name: 'sast', + expired: null, + expire_at: null, + path: '/gitlab-org/gitlab-ee/-/jobs/62411442/artifacts/download', + browse_path: '/gitlab-org/gitlab-ee/-/jobs/62411442/artifacts/browse', + }, + { + name: 'codequality', + expired: false, + expire_at: '2018-04-18T14:16:24.484Z', + path: '/gitlab-org/gitlab-ee/-/jobs/62411441/artifacts/download', + keep_path: '/gitlab-org/gitlab-ee/-/jobs/62411441/artifacts/keep', + browse_path: '/gitlab-org/gitlab-ee/-/jobs/62411441/artifacts/browse', + }, + { + name: 'cache gems', + expired: null, + expire_at: null, + path: '/gitlab-org/gitlab-ee/-/jobs/62411447/artifacts/download', + browse_path: '/gitlab-org/gitlab-ee/-/jobs/62411447/artifacts/browse', + }, + { + name: 'dependency_scanning', + expired: null, + expire_at: null, + path: '/gitlab-org/gitlab-ee/-/jobs/62411443/artifacts/download', + browse_path: '/gitlab-org/gitlab-ee/-/jobs/62411443/artifacts/browse', + }, + { + name: 'compile-assets', + expired: false, + expire_at: '2018-04-18T14:12:07.638Z', + path: '/gitlab-org/gitlab-ee/-/jobs/62411334/artifacts/download', + keep_path: '/gitlab-org/gitlab-ee/-/jobs/62411334/artifacts/keep', + browse_path: '/gitlab-org/gitlab-ee/-/jobs/62411334/artifacts/browse', + }, + { + name: 'setup-test-env', + expired: false, + expire_at: '2018-04-18T14:10:27.024Z', + path: '/gitlab-org/gitlab-ee/-/jobs/62411336/artifacts/download', + keep_path: '/gitlab-org/gitlab-ee/-/jobs/62411336/artifacts/keep', + browse_path: '/gitlab-org/gitlab-ee/-/jobs/62411336/artifacts/browse', + }, + { + name: 'retrieve-tests-metadata', + expired: false, + expire_at: '2018-05-12T14:06:35.926Z', + path: '/gitlab-org/gitlab-ee/-/jobs/62411333/artifacts/download', + keep_path: '/gitlab-org/gitlab-ee/-/jobs/62411333/artifacts/keep', + browse_path: '/gitlab-org/gitlab-ee/-/jobs/62411333/artifacts/browse', + }, + ], + manual_actions: [ + { + name: 'package-and-qa', + path: '/gitlab-org/gitlab-ee/-/jobs/62411330/play', + playable: true, + }, + { + name: 'review-docs-deploy', + path: '/gitlab-org/gitlab-ee/-/jobs/62411332/play', + playable: true, + }, + ], + }, + ref: { + name: 'master', + path: '/gitlab-org/gitlab-ee/commits/master', + tag: false, + branch: true, + }, + commit: { + id: 'e6a2885c503825792cb8a84a8731295e361bd059', + short_id: 'e6a2885c', + title: "Merge branch 'ce-to-ee-2018-04-11' into 'master'", + created_at: '2018-04-11T14:04:39.000Z', + parent_ids: [ + '5d9b5118f6055f72cff1a82b88133609912f2c1d', + '6fdc6ee76a8062fe41b1a33f7c503334a6ebdc02', + ], + message: + "Merge branch 'ce-to-ee-2018-04-11' into 'master'\n\nCE upstream - 2018-04-11 12:26 UTC\n\nSee merge request gitlab-org/gitlab-ee!5326", + author_name: 'Rémy Coutable', + author_email: 'remy@rymai.me', + authored_date: '2018-04-11T14:04:39.000Z', + committer_name: 'Rémy Coutable', + committer_email: 'remy@rymai.me', + committed_date: '2018-04-11T14:04:39.000Z', + author: { + id: 128633, + name: 'Rémy Coutable', + username: 'rymai', + state: 'active', + avatar_url: + 'https://secure.gravatar.com/avatar/263da227929cc0035cb0eba512bcf81a?s=80\u0026d=identicon', + web_url: 'https://gitlab.com/rymai', + path: '/rymai', + }, + author_gravatar_url: + 'https://secure.gravatar.com/avatar/263da227929cc0035cb0eba512bcf81a?s=80\u0026d=identicon', + commit_url: + 'https://gitlab.com/gitlab-org/gitlab-ee/commit/e6a2885c503825792cb8a84a8731295e361bd059', + commit_path: '/gitlab-org/gitlab-ee/commit/e6a2885c503825792cb8a84a8731295e361bd059', + }, + cancel_path: '/gitlab-org/gitlab-ee/pipelines/20333396/cancel', + triggered_by: null, + triggered: [], +}; + +export const stageReply = { + html: + '\u003cli\u003e\n\u003ca class="mini-pipeline-graph-dropdown-item" data-toggle="tooltip" data-title="karma - failed \u0026lt;br\u0026gt; (script failure)" data-html="true" data-container="body" href="/gitlab-org/gitlab-ce/-/jobs/62402048"\u003e\u003cspan class="ci-status-icon ci-status-icon-failed"\u003e\u003csvg\u003e\u003cuse xlink:href="https://gitlab.com/assets/icons-fe86f87a3d244c952cc0ec8d7f88c5effefcbe454d751d8449d4a1a32aaaf9a0.svg#status_failed"\u003e\u003c/use\u003e\u003c/svg\u003e\u003c/span\u003e\n\u003cspan class="ci-build-text"\u003ekarma\u003c/span\u003e\n\u003c/a\u003e\u003ca class="ci-action-icon-wrapper js-ci-action-icon" data-toggle="tooltip" data-title="Retry" data-container="body" rel="nofollow" data-method="post" href="/gitlab-org/gitlab-ce/-/jobs/62402048/retry"\u003e\u003csvg class=" icon-action-retry"\u003e\u003cuse xlink:href="https://gitlab.com/assets/icons-fe86f87a3d244c952cc0ec8d7f88c5effefcbe454d751d8449d4a1a32aaaf9a0.svg#retry"\u003e\u003c/use\u003e\u003c/svg\u003e\n\u003c/a\u003e\n\u003c/li\u003e\n\u003cli\u003e\n\u003ca class="mini-pipeline-graph-dropdown-item" data-toggle="tooltip" data-title="codequality - passed" data-html="true" data-container="body" href="/gitlab-org/gitlab-ce/-/jobs/62398081"\u003e\u003cspan class="ci-status-icon ci-status-icon-success"\u003e\u003csvg\u003e\u003cuse xlink:href="https://gitlab.com/assets/icons-fe86f87a3d244c952cc0ec8d7f88c5effefcbe454d751d8449d4a1a32aaaf9a0.svg#status_success"\u003e\u003c/use\u003e\u003c/svg\u003e\u003c/span\u003e\n\u003cspan class="ci-build-text"\u003ecodequality\u003c/span\u003e\n\u003c/a\u003e\u003ca class="ci-action-icon-wrapper js-ci-action-icon" data-toggle="tooltip" data-title="Retry" data-container="body" rel="nofollow" data-method="post" href="/gitlab-org/gitlab-ce/-/jobs/62398081/retry"\u003e\u003csvg class=" icon-action-retry"\u003e\u003cuse xlink:href="https://gitlab.com/assets/icons-fe86f87a3d244c952cc0ec8d7f88c5effefcbe454d751d8449d4a1a32aaaf9a0.svg#retry"\u003e\u003c/use\u003e\u003c/svg\u003e\n\u003c/a\u003e\n\u003c/li\u003e\n\u003cli\u003e\n\u003ca class="mini-pipeline-graph-dropdown-item" data-toggle="tooltip" data-title="db:check-schema-pg - passed" data-html="true" data-container="body" href="/gitlab-org/gitlab-ce/-/jobs/62398066"\u003e\u003cspan class="ci-status-icon ci-status-icon-success"\u003e\u003csvg\u003e\u003cuse xlink:href="https://gitlab.com/assets/icons-fe86f87a3d244c952cc0ec8d7f88c5effefcbe454d751d8449d4a1a32aaaf9a0.svg#status_success"\u003e\u003c/use\u003e\u003c/svg\u003e\u003c/span\u003e\n\u003cspan class="ci-build-text"\u003edb:check-schema-pg\u003c/span\u003e\n\u003c/a\u003e\u003ca class="ci-action-icon-wrapper js-ci-action-icon" data-toggle="tooltip" data-title="Retry" data-container="body" rel="nofollow" data-method="post" href="/gitlab-org/gitlab-ce/-/jobs/62398066/retry"\u003e\u003csvg class=" icon-action-retry"\u003e\u003cuse xlink:href="https://gitlab.com/assets/icons-fe86f87a3d244c952cc0ec8d7f88c5effefcbe454d751d8449d4a1a32aaaf9a0.svg#retry"\u003e\u003c/use\u003e\u003c/svg\u003e\n\u003c/a\u003e\n\u003c/li\u003e\n\u003cli\u003e\n\u003ca class="mini-pipeline-graph-dropdown-item" data-toggle="tooltip" data-title="db:migrate:reset-mysql - passed" data-html="true" data-container="body" href="/gitlab-org/gitlab-ce/-/jobs/62398065"\u003e\u003cspan class="ci-status-icon ci-status-icon-success"\u003e\u003csvg\u003e\u003cuse xlink:href="https://gitlab.com/assets/icons-fe86f87a3d244c952cc0ec8d7f88c5effefcbe454d751d8449d4a1a32aaaf9a0.svg#status_success"\u003e\u003c/use\u003e\u003c/svg\u003e\u003c/span\u003e\n\u003cspan class="ci-build-text"\u003edb:migrate:reset-mysql\u003c/span\u003e\n\u003c/a\u003e\u003ca class="ci-action-icon-wrapper js-ci-action-icon" data-toggle="tooltip" data-title="Retry" data-container="body" rel="nofollow" data-method="post" href="/gitlab-org/gitlab-ce/-/jobs/62398065/retry"\u003e\u003csvg class=" icon-action-retry"\u003e\u003cuse xlink:href="https://gitlab.com/assets/icons-fe86f87a3d244c952cc0ec8d7f88c5effefcbe454d751d8449d4a1a32aaaf9a0.svg#retry"\u003e\u003c/use\u003e\u003c/svg\u003e\n\u003c/a\u003e\n\u003c/li\u003e\n\u003cli\u003e\n\u003ca class="mini-pipeline-graph-dropdown-item" data-toggle="tooltip" data-title="db:migrate:reset-pg - passed" data-html="true" data-container="body" href="/gitlab-org/gitlab-ce/-/jobs/62398064"\u003e\u003cspan class="ci-status-icon ci-status-icon-success"\u003e\u003csvg\u003e\u003cuse xlink:href="https://gitlab.com/assets/icons-fe86f87a3d244c952cc0ec8d7f88c5effefcbe454d751d8449d4a1a32aaaf9a0.svg#status_success"\u003e\u003c/use\u003e\u003c/svg\u003e\u003c/span\u003e\n\u003cspan class="ci-build-text"\u003edb:migrate:reset-pg\u003c/span\u003e\n\u003c/a\u003e\u003ca class="ci-action-icon-wrapper js-ci-action-icon" data-toggle="tooltip" data-title="Retry" data-container="body" rel="nofollow" data-method="post" href="/gitlab-org/gitlab-ce/-/jobs/62398064/retry"\u003e\u003csvg class=" icon-action-retry"\u003e\u003cuse xlink:href="https://gitlab.com/assets/icons-fe86f87a3d244c952cc0ec8d7f88c5effefcbe454d751d8449d4a1a32aaaf9a0.svg#retry"\u003e\u003c/use\u003e\u003c/svg\u003e\n\u003c/a\u003e\n\u003c/li\u003e\n\u003cli\u003e\n\u003ca class="mini-pipeline-graph-dropdown-item" data-toggle="tooltip" data-title="db:rollback-mysql - passed" data-html="true" data-container="body" href="/gitlab-org/gitlab-ce/-/jobs/62398070"\u003e\u003cspan class="ci-status-icon ci-status-icon-success"\u003e\u003csvg\u003e\u003cuse xlink:href="https://gitlab.com/assets/icons-fe86f87a3d244c952cc0ec8d7f88c5effefcbe454d751d8449d4a1a32aaaf9a0.svg#status_success"\u003e\u003c/use\u003e\u003c/svg\u003e\u003c/span\u003e\n\u003cspan class="ci-build-text"\u003edb:rollback-mysql\u003c/span\u003e\n\u003c/a\u003e\u003ca class="ci-action-icon-wrapper js-ci-action-icon" data-toggle="tooltip" data-title="Retry" data-container="body" rel="nofollow" data-method="post" href="/gitlab-org/gitlab-ce/-/jobs/62398070/retry"\u003e\u003csvg class=" icon-action-retry"\u003e\u003cuse xlink:href="https://gitlab.com/assets/icons-fe86f87a3d244c952cc0ec8d7f88c5effefcbe454d751d8449d4a1a32aaaf9a0.svg#retry"\u003e\u003c/use\u003e\u003c/svg\u003e\n\u003c/a\u003e\n\u003c/li\u003e\n\u003cli\u003e\n\u003ca class="mini-pipeline-graph-dropdown-item" data-toggle="tooltip" data-title="db:rollback-pg - passed" data-html="true" data-container="body" href="/gitlab-org/gitlab-ce/-/jobs/62398069"\u003e\u003cspan class="ci-status-icon ci-status-icon-success"\u003e\u003csvg\u003e\u003cuse xlink:href="https://gitlab.com/assets/icons-fe86f87a3d244c952cc0ec8d7f88c5effefcbe454d751d8449d4a1a32aaaf9a0.svg#status_success"\u003e\u003c/use\u003e\u003c/svg\u003e\u003c/span\u003e\n\u003cspan class="ci-build-text"\u003edb:rollback-pg\u003c/span\u003e\n\u003c/a\u003e\u003ca class="ci-action-icon-wrapper js-ci-action-icon" data-toggle="tooltip" data-title="Retry" data-container="body" rel="nofollow" data-method="post" href="/gitlab-org/gitlab-ce/-/jobs/62398069/retry"\u003e\u003csvg class=" icon-action-retry"\u003e\u003cuse xlink:href="https://gitlab.com/assets/icons-fe86f87a3d244c952cc0ec8d7f88c5effefcbe454d751d8449d4a1a32aaaf9a0.svg#retry"\u003e\u003c/use\u003e\u003c/svg\u003e\n\u003c/a\u003e\n\u003c/li\u003e\n\u003cli\u003e\n\u003ca class="mini-pipeline-graph-dropdown-item" data-toggle="tooltip" data-title="dependency_scanning - passed" data-html="true" data-container="body" href="/gitlab-org/gitlab-ce/-/jobs/62398083"\u003e\u003cspan class="ci-status-icon ci-status-icon-success"\u003e\u003csvg\u003e\u003cuse xlink:href="https://gitlab.com/assets/icons-fe86f87a3d244c952cc0ec8d7f88c5effefcbe454d751d8449d4a1a32aaaf9a0.svg#status_success"\u003e\u003c/use\u003e\u003c/svg\u003e\u003c/span\u003e\n\u003cspan class="ci-build-text"\u003edependency_scanning\u003c/span\u003e\n\u003c/a\u003e\u003ca class="ci-action-icon-wrapper js-ci-action-icon" data-toggle="tooltip" data-title="Retry" data-container="body" rel="nofollow" data-method="post" href="/gitlab-org/gitlab-ce/-/jobs/62398083/retry"\u003e\u003csvg class=" icon-action-retry"\u003e\u003cuse xlink:href="https://gitlab.com/assets/icons-fe86f87a3d244c952cc0ec8d7f88c5effefcbe454d751d8449d4a1a32aaaf9a0.svg#retry"\u003e\u003c/use\u003e\u003c/svg\u003e\n\u003c/a\u003e\n\u003c/li\u003e\n\u003cli\u003e\n\u003ca class="mini-pipeline-graph-dropdown-item" data-toggle="tooltip" data-title="docs lint - passed" data-html="true" data-container="body" href="/gitlab-org/gitlab-ce/-/jobs/62398061"\u003e\u003cspan class="ci-status-icon ci-status-icon-success"\u003e\u003csvg\u003e\u003cuse xlink:href="https://gitlab.com/assets/icons-fe86f87a3d244c952cc0ec8d7f88c5effefcbe454d751d8449d4a1a32aaaf9a0.svg#status_success"\u003e\u003c/use\u003e\u003c/svg\u003e\u003c/span\u003e\n\u003cspan class="ci-build-text"\u003edocs lint\u003c/span\u003e\n\u003c/a\u003e\u003ca class="ci-action-icon-wrapper js-ci-action-icon" data-toggle="tooltip" data-title="Retry" data-container="body" rel="nofollow" data-method="post" href="/gitlab-org/gitlab-ce/-/jobs/62398061/retry"\u003e\u003csvg class=" icon-action-retry"\u003e\u003cuse xlink:href="https://gitlab.com/assets/icons-fe86f87a3d244c952cc0ec8d7f88c5effefcbe454d751d8449d4a1a32aaaf9a0.svg#retry"\u003e\u003c/use\u003e\u003c/svg\u003e\n\u003c/a\u003e\n\u003c/li\u003e\n\u003cli\u003e\n\u003ca class="mini-pipeline-graph-dropdown-item" data-toggle="tooltip" data-title="downtime_check - passed" data-html="true" data-container="body" href="/gitlab-org/gitlab-ce/-/jobs/62398062"\u003e\u003cspan class="ci-status-icon ci-status-icon-success"\u003e\u003csvg\u003e\u003cuse xlink:href="https://gitlab.com/assets/icons-fe86f87a3d244c952cc0ec8d7f88c5effefcbe454d751d8449d4a1a32aaaf9a0.svg#status_success"\u003e\u003c/use\u003e\u003c/svg\u003e\u003c/span\u003e\n\u003cspan class="ci-build-text"\u003edowntime_check\u003c/span\u003e\n\u003c/a\u003e\u003ca class="ci-action-icon-wrapper js-ci-action-icon" data-toggle="tooltip" data-title="Retry" data-container="body" rel="nofollow" data-method="post" href="/gitlab-org/gitlab-ce/-/jobs/62398062/retry"\u003e\u003csvg class=" icon-action-retry"\u003e\u003cuse xlink:href="https://gitlab.com/assets/icons-fe86f87a3d244c952cc0ec8d7f88c5effefcbe454d751d8449d4a1a32aaaf9a0.svg#retry"\u003e\u003c/use\u003e\u003c/svg\u003e\n\u003c/a\u003e\n\u003c/li\u003e\n\u003cli\u003e\n\u003ca class="mini-pipeline-graph-dropdown-item" data-toggle="tooltip" data-title="ee_compat_check - passed" data-html="true" data-container="body" href="/gitlab-org/gitlab-ce/-/jobs/62398063"\u003e\u003cspan class="ci-status-icon ci-status-icon-success"\u003e\u003csvg\u003e\u003cuse xlink:href="https://gitlab.com/assets/icons-fe86f87a3d244c952cc0ec8d7f88c5effefcbe454d751d8449d4a1a32aaaf9a0.svg#status_success"\u003e\u003c/use\u003e\u003c/svg\u003e\u003c/span\u003e\n\u003cspan class="ci-build-text"\u003eee_compat_check\u003c/span\u003e\n\u003c/a\u003e\u003ca class="ci-action-icon-wrapper js-ci-action-icon" data-toggle="tooltip" data-title="Retry" data-container="body" rel="nofollow" data-method="post" href="/gitlab-org/gitlab-ce/-/jobs/62398063/retry"\u003e\u003csvg class=" icon-action-retry"\u003e\u003cuse xlink:href="https://gitlab.com/assets/icons-fe86f87a3d244c952cc0ec8d7f88c5effefcbe454d751d8449d4a1a32aaaf9a0.svg#retry"\u003e\u003c/use\u003e\u003c/svg\u003e\n\u003c/a\u003e\n\u003c/li\u003e\n\u003cli\u003e\n\u003ca class="mini-pipeline-graph-dropdown-item" data-toggle="tooltip" data-title="gitlab:assets:compile - passed" data-html="true" data-container="body" href="/gitlab-org/gitlab-ce/-/jobs/62398075"\u003e\u003cspan class="ci-status-icon ci-status-icon-success"\u003e\u003csvg\u003e\u003cuse xlink:href="https://gitlab.com/assets/icons-fe86f87a3d244c952cc0ec8d7f88c5effefcbe454d751d8449d4a1a32aaaf9a0.svg#status_success"\u003e\u003c/use\u003e\u003c/svg\u003e\u003c/span\u003e\n\u003cspan class="ci-build-text"\u003egitlab:assets:compile\u003c/span\u003e\n\u003c/a\u003e\u003ca class="ci-action-icon-wrapper js-ci-action-icon" data-toggle="tooltip" data-title="Retry" data-container="body" rel="nofollow" data-method="post" href="/gitlab-org/gitlab-ce/-/jobs/62398075/retry"\u003e\u003csvg class=" icon-action-retry"\u003e\u003cuse xlink:href="https://gitlab.com/assets/icons-fe86f87a3d244c952cc0ec8d7f88c5effefcbe454d751d8449d4a1a32aaaf9a0.svg#retry"\u003e\u003c/use\u003e\u003c/svg\u003e\n\u003c/a\u003e\n\u003c/li\u003e\n\u003cli\u003e\n\u003ca class="mini-pipeline-graph-dropdown-item" data-toggle="tooltip" data-title="gitlab:setup-mysql - passed" data-html="true" data-container="body" href="/gitlab-org/gitlab-ce/-/jobs/62398073"\u003e\u003cspan class="ci-status-icon ci-status-icon-success"\u003e\u003csvg\u003e\u003cuse xlink:href="https://gitlab.com/assets/icons-fe86f87a3d244c952cc0ec8d7f88c5effefcbe454d751d8449d4a1a32aaaf9a0.svg#status_success"\u003e\u003c/use\u003e\u003c/svg\u003e\u003c/span\u003e\n\u003cspan class="ci-build-text"\u003egitlab:setup-mysql\u003c/span\u003e\n\u003c/a\u003e\u003ca class="ci-action-icon-wrapper js-ci-action-icon" data-toggle="tooltip" data-title="Retry" data-container="body" rel="nofollow" data-method="post" href="/gitlab-org/gitlab-ce/-/jobs/62398073/retry"\u003e\u003csvg class=" icon-action-retry"\u003e\u003cuse xlink:href="https://gitlab.com/assets/icons-fe86f87a3d244c952cc0ec8d7f88c5effefcbe454d751d8449d4a1a32aaaf9a0.svg#retry"\u003e\u003c/use\u003e\u003c/svg\u003e\n\u003c/a\u003e\n\u003c/li\u003e\n\u003cli\u003e\n\u003ca class="mini-pipeline-graph-dropdown-item" data-toggle="tooltip" data-title="gitlab:setup-pg - passed" data-html="true" data-container="body" href="/gitlab-org/gitlab-ce/-/jobs/62398071"\u003e\u003cspan class="ci-status-icon ci-status-icon-success"\u003e\u003csvg\u003e\u003cuse xlink:href="https://gitlab.com/assets/icons-fe86f87a3d244c952cc0ec8d7f88c5effefcbe454d751d8449d4a1a32aaaf9a0.svg#status_success"\u003e\u003c/use\u003e\u003c/svg\u003e\u003c/span\u003e\n\u003cspan class="ci-build-text"\u003egitlab:setup-pg\u003c/span\u003e\n\u003c/a\u003e\u003ca class="ci-action-icon-wrapper js-ci-action-icon" data-toggle="tooltip" data-title="Retry" data-container="body" rel="nofollow" data-method="post" href="/gitlab-org/gitlab-ce/-/jobs/62398071/retry"\u003e\u003csvg class=" icon-action-retry"\u003e\u003cuse xlink:href="https://gitlab.com/assets/icons-fe86f87a3d244c952cc0ec8d7f88c5effefcbe454d751d8449d4a1a32aaaf9a0.svg#retry"\u003e\u003c/use\u003e\u003c/svg\u003e\n\u003c/a\u003e\n\u003c/li\u003e\n\u003cli\u003e\n\u003ca class="mini-pipeline-graph-dropdown-item" data-toggle="tooltip" data-title="gitlab_git_test - passed" data-html="true" data-container="body" href="/gitlab-org/gitlab-ce/-/jobs/62398086"\u003e\u003cspan class="ci-status-icon ci-status-icon-success"\u003e\u003csvg\u003e\u003cuse xlink:href="https://gitlab.com/assets/icons-fe86f87a3d244c952cc0ec8d7f88c5effefcbe454d751d8449d4a1a32aaaf9a0.svg#status_success"\u003e\u003c/use\u003e\u003c/svg\u003e\u003c/span\u003e\n\u003cspan class="ci-build-text"\u003egitlab_git_test\u003c/span\u003e\n\u003c/a\u003e\u003ca class="ci-action-icon-wrapper js-ci-action-icon" data-toggle="tooltip" data-title="Retry" data-container="body" rel="nofollow" data-method="post" href="/gitlab-org/gitlab-ce/-/jobs/62398086/retry"\u003e\u003csvg class=" icon-action-retry"\u003e\u003cuse xlink:href="https://gitlab.com/assets/icons-fe86f87a3d244c952cc0ec8d7f88c5effefcbe454d751d8449d4a1a32aaaf9a0.svg#retry"\u003e\u003c/use\u003e\u003c/svg\u003e\n\u003c/a\u003e\n\u003c/li\u003e\n\u003cli\u003e\n\u003ca class="mini-pipeline-graph-dropdown-item" data-toggle="tooltip" data-title="migration:path-mysql - passed" data-html="true" data-container="body" href="/gitlab-org/gitlab-ce/-/jobs/62398068"\u003e\u003cspan class="ci-status-icon ci-status-icon-success"\u003e\u003csvg\u003e\u003cuse xlink:href="https://gitlab.com/assets/icons-fe86f87a3d244c952cc0ec8d7f88c5effefcbe454d751d8449d4a1a32aaaf9a0.svg#status_success"\u003e\u003c/use\u003e\u003c/svg\u003e\u003c/span\u003e\n\u003cspan class="ci-build-text"\u003emigration:path-mysql\u003c/span\u003e\n\u003c/a\u003e\u003ca class="ci-action-icon-wrapper js-ci-action-icon" data-toggle="tooltip" data-title="Retry" data-container="body" rel="nofollow" data-method="post" href="/gitlab-org/gitlab-ce/-/jobs/62398068/retry"\u003e\u003csvg class=" icon-action-retry"\u003e\u003cuse xlink:href="https://gitlab.com/assets/icons-fe86f87a3d244c952cc0ec8d7f88c5effefcbe454d751d8449d4a1a32aaaf9a0.svg#retry"\u003e\u003c/use\u003e\u003c/svg\u003e\n\u003c/a\u003e\n\u003c/li\u003e\n\u003cli\u003e\n\u003ca class="mini-pipeline-graph-dropdown-item" data-toggle="tooltip" data-title="migration:path-pg - passed" data-html="true" data-container="body" href="/gitlab-org/gitlab-ce/-/jobs/62398067"\u003e\u003cspan class="ci-status-icon ci-status-icon-success"\u003e\u003csvg\u003e\u003cuse xlink:href="https://gitlab.com/assets/icons-fe86f87a3d244c952cc0ec8d7f88c5effefcbe454d751d8449d4a1a32aaaf9a0.svg#status_success"\u003e\u003c/use\u003e\u003c/svg\u003e\u003c/span\u003e\n\u003cspan class="ci-build-text"\u003emigration:path-pg\u003c/span\u003e\n\u003c/a\u003e\u003ca class="ci-action-icon-wrapper js-ci-action-icon" data-toggle="tooltip" data-title="Retry" data-container="body" rel="nofollow" data-method="post" href="/gitlab-org/gitlab-ce/-/jobs/62398067/retry"\u003e\u003csvg class=" icon-action-retry"\u003e\u003cuse xlink:href="https://gitlab.com/assets/icons-fe86f87a3d244c952cc0ec8d7f88c5effefcbe454d751d8449d4a1a32aaaf9a0.svg#retry"\u003e\u003c/use\u003e\u003c/svg\u003e\n\u003c/a\u003e\n\u003c/li\u003e\n\u003cli\u003e\n\u003ca class="mini-pipeline-graph-dropdown-item" data-toggle="tooltip" data-title="qa:internal - passed" data-html="true" data-container="body" href="/gitlab-org/gitlab-ce/-/jobs/62398084"\u003e\u003cspan class="ci-status-icon ci-status-icon-success"\u003e\u003csvg\u003e\u003cuse xlink:href="https://gitlab.com/assets/icons-fe86f87a3d244c952cc0ec8d7f88c5effefcbe454d751d8449d4a1a32aaaf9a0.svg#status_success"\u003e\u003c/use\u003e\u003c/svg\u003e\u003c/span\u003e\n\u003cspan class="ci-build-text"\u003eqa:internal\u003c/span\u003e\n\u003c/a\u003e\u003ca class="ci-action-icon-wrapper js-ci-action-icon" data-toggle="tooltip" data-title="Retry" data-container="body" rel="nofollow" data-method="post" href="/gitlab-org/gitlab-ce/-/jobs/62398084/retry"\u003e\u003csvg class=" icon-action-retry"\u003e\u003cuse xlink:href="https://gitlab.com/assets/icons-fe86f87a3d244c952cc0ec8d7f88c5effefcbe454d751d8449d4a1a32aaaf9a0.svg#retry"\u003e\u003c/use\u003e\u003c/svg\u003e\n\u003c/a\u003e\n\u003c/li\u003e\n\u003cli\u003e\n\u003ca class="mini-pipeline-graph-dropdown-item" data-toggle="tooltip" data-title="qa:selectors - passed" data-html="true" data-container="body" href="/gitlab-org/gitlab-ce/-/jobs/62398085"\u003e\u003cspan class="ci-status-icon ci-status-icon-success"\u003e\u003csvg\u003e\u003cuse xlink:href="https://gitlab.com/assets/icons-fe86f87a3d244c952cc0ec8d7f88c5effefcbe454d751d8449d4a1a32aaaf9a0.svg#status_success"\u003e\u003c/use\u003e\u003c/svg\u003e\u003c/span\u003e\n\u003cspan class="ci-build-text"\u003eqa:selectors\u003c/span\u003e\n\u003c/a\u003e\u003ca class="ci-action-icon-wrapper js-ci-action-icon" data-toggle="tooltip" data-title="Retry" data-container="body" rel="nofollow" data-method="post" href="/gitlab-org/gitlab-ce/-/jobs/62398085/retry"\u003e\u003csvg class=" icon-action-retry"\u003e\u003cuse xlink:href="https://gitlab.com/assets/icons-fe86f87a3d244c952cc0ec8d7f88c5effefcbe454d751d8449d4a1a32aaaf9a0.svg#retry"\u003e\u003c/use\u003e\u003c/svg\u003e\n\u003c/a\u003e\n\u003c/li\u003e\n\u003cli\u003e\n\u003ca class="mini-pipeline-graph-dropdown-item" data-toggle="tooltip" data-title="rspec-mysql 0 28 - passed" data-html="true" data-container="body" href="/gitlab-org/gitlab-ce/-/jobs/62398020"\u003e\u003cspan class="ci-status-icon ci-status-icon-success"\u003e\u003csvg\u003e\u003cuse xlink:href="https://gitlab.com/assets/icons-fe86f87a3d244c952cc0ec8d7f88c5effefcbe454d751d8449d4a1a32aaaf9a0.svg#status_success"\u003e\u003c/use\u003e\u003c/svg\u003e\u003c/span\u003e\n\u003cspan class="ci-build-text"\u003erspec-mysql 0 28\u003c/span\u003e\n\u003c/a\u003e\u003ca class="ci-action-icon-wrapper js-ci-action-icon" data-toggle="tooltip" data-title="Retry" data-container="body" rel="nofollow" data-method="post" href="/gitlab-org/gitlab-ce/-/jobs/62398020/retry"\u003e\u003csvg class=" icon-action-retry"\u003e\u003cuse xlink:href="https://gitlab.com/assets/icons-fe86f87a3d244c952cc0ec8d7f88c5effefcbe454d751d8449d4a1a32aaaf9a0.svg#retry"\u003e\u003c/use\u003e\u003c/svg\u003e\n\u003c/a\u003e\n\u003c/li\u003e\n\u003cli\u003e\n\u003ca class="mini-pipeline-graph-dropdown-item" data-toggle="tooltip" data-title="rspec-mysql 1 28 - passed" data-html="true" data-container="body" href="/gitlab-org/gitlab-ce/-/jobs/62398022"\u003e\u003cspan class="ci-status-icon ci-status-icon-success"\u003e\u003csvg\u003e\u003cuse xlink:href="https://gitlab.com/assets/icons-fe86f87a3d244c952cc0ec8d7f88c5effefcbe454d751d8449d4a1a32aaaf9a0.svg#status_success"\u003e\u003c/use\u003e\u003c/svg\u003e\u003c/span\u003e\n\u003cspan class="ci-build-text"\u003erspec-mysql 1 28\u003c/span\u003e\n\u003c/a\u003e\u003ca class="ci-action-icon-wrapper js-ci-action-icon" data-toggle="tooltip" data-title="Retry" data-container="body" rel="nofollow" data-method="post" href="/gitlab-org/gitlab-ce/-/jobs/62398022/retry"\u003e\u003csvg class=" icon-action-retry"\u003e\u003cuse xlink:href="https://gitlab.com/assets/icons-fe86f87a3d244c952cc0ec8d7f88c5effefcbe454d751d8449d4a1a32aaaf9a0.svg#retry"\u003e\u003c/use\u003e\u003c/svg\u003e\n\u003c/a\u003e\n\u003c/li\u003e\n\u003cli\u003e\n\u003ca class="mini-pipeline-graph-dropdown-item" data-toggle="tooltip" data-title="rspec-mysql 10 28 - passed" data-html="true" data-container="body" href="/gitlab-org/gitlab-ce/-/jobs/62398033"\u003e\u003cspan class="ci-status-icon ci-status-icon-success"\u003e\u003csvg\u003e\u003cuse xlink:href="https://gitlab.com/assets/icons-fe86f87a3d244c952cc0ec8d7f88c5effefcbe454d751d8449d4a1a32aaaf9a0.svg#status_success"\u003e\u003c/use\u003e\u003c/svg\u003e\u003c/span\u003e\n\u003cspan class="ci-build-text"\u003erspec-mysql 10 28\u003c/span\u003e\n\u003c/a\u003e\u003ca class="ci-action-icon-wrapper js-ci-action-icon" data-toggle="tooltip" data-title="Retry" data-container="body" rel="nofollow" data-method="post" href="/gitlab-org/gitlab-ce/-/jobs/62398033/retry"\u003e\u003csvg class=" icon-action-retry"\u003e\u003cuse xlink:href="https://gitlab.com/assets/icons-fe86f87a3d244c952cc0ec8d7f88c5effefcbe454d751d8449d4a1a32aaaf9a0.svg#retry"\u003e\u003c/use\u003e\u003c/svg\u003e\n\u003c/a\u003e\n\u003c/li\u003e\n\u003cli\u003e\n\u003ca class="mini-pipeline-graph-dropdown-item" data-toggle="tooltip" data-title="rspec-mysql 11 28 - passed" data-html="true" data-container="body" href="/gitlab-org/gitlab-ce/-/jobs/62398034"\u003e\u003cspan class="ci-status-icon ci-status-icon-success"\u003e\u003csvg\u003e\u003cuse xlink:href="https://gitlab.com/assets/icons-fe86f87a3d244c952cc0ec8d7f88c5effefcbe454d751d8449d4a1a32aaaf9a0.svg#status_success"\u003e\u003c/use\u003e\u003c/svg\u003e\u003c/span\u003e\n\u003cspan class="ci-build-text"\u003erspec-mysql 11 28\u003c/span\u003e\n\u003c/a\u003e\u003ca class="ci-action-icon-wrapper js-ci-action-icon" data-toggle="tooltip" data-title="Retry" data-container="body" rel="nofollow" data-method="post" href="/gitlab-org/gitlab-ce/-/jobs/62398034/retry"\u003e\u003csvg class=" icon-action-retry"\u003e\u003cuse xlink:href="https://gitlab.com/assets/icons-fe86f87a3d244c952cc0ec8d7f88c5effefcbe454d751d8449d4a1a32aaaf9a0.svg#retry"\u003e\u003c/use\u003e\u003c/svg\u003e\n\u003c/a\u003e\n\u003c/li\u003e\n\u003cli\u003e\n\u003ca class="mini-pipeline-graph-dropdown-item" data-toggle="tooltip" data-title="rspec-mysql 12 28 - passed" data-html="true" data-container="body" href="/gitlab-org/gitlab-ce/-/jobs/62398035"\u003e\u003cspan class="ci-status-icon ci-status-icon-success"\u003e\u003csvg\u003e\u003cuse xlink:href="https://gitlab.com/assets/icons-fe86f87a3d244c952cc0ec8d7f88c5effefcbe454d751d8449d4a1a32aaaf9a0.svg#status_success"\u003e\u003c/use\u003e\u003c/svg\u003e\u003c/span\u003e\n\u003cspan class="ci-build-text"\u003erspec-mysql 12 28\u003c/span\u003e\n\u003c/a\u003e\u003ca class="ci-action-icon-wrapper js-ci-action-icon" data-toggle="tooltip" data-title="Retry" data-container="body" rel="nofollow" data-method="post" href="/gitlab-org/gitlab-ce/-/jobs/62398035/retry"\u003e\u003csvg class=" icon-action-retry"\u003e\u003cuse xlink:href="https://gitlab.com/assets/icons-fe86f87a3d244c952cc0ec8d7f88c5effefcbe454d751d8449d4a1a32aaaf9a0.svg#retry"\u003e\u003c/use\u003e\u003c/svg\u003e\n\u003c/a\u003e\n\u003c/li\u003e\n\u003cli\u003e\n\u003ca class="mini-pipeline-graph-dropdown-item" data-toggle="tooltip" data-title="rspec-mysql 13 28 - passed" data-html="true" data-container="body" href="/gitlab-org/gitlab-ce/-/jobs/62398036"\u003e\u003cspan class="ci-status-icon ci-status-icon-success"\u003e\u003csvg\u003e\u003cuse xlink:href="https://gitlab.com/assets/icons-fe86f87a3d244c952cc0ec8d7f88c5effefcbe454d751d8449d4a1a32aaaf9a0.svg#status_success"\u003e\u003c/use\u003e\u003c/svg\u003e\u003c/span\u003e\n\u003cspan class="ci-build-text"\u003erspec-mysql 13 28\u003c/span\u003e\n\u003c/a\u003e\u003ca class="ci-action-icon-wrapper js-ci-action-icon" data-toggle="tooltip" data-title="Retry" data-container="body" rel="nofollow" data-method="post" href="/gitlab-org/gitlab-ce/-/jobs/62398036/retry"\u003e\u003csvg class=" icon-action-retry"\u003e\u003cuse xlink:href="https://gitlab.com/assets/icons-fe86f87a3d244c952cc0ec8d7f88c5effefcbe454d751d8449d4a1a32aaaf9a0.svg#retry"\u003e\u003c/use\u003e\u003c/svg\u003e\n\u003c/a\u003e\n\u003c/li\u003e\n\u003cli\u003e\n\u003ca class="mini-pipeline-graph-dropdown-item" data-toggle="tooltip" data-title="rspec-mysql 14 28 - passed" data-html="true" data-container="body" href="/gitlab-org/gitlab-ce/-/jobs/62398037"\u003e\u003cspan class="ci-status-icon ci-status-icon-success"\u003e\u003csvg\u003e\u003cuse xlink:href="https://gitlab.com/assets/icons-fe86f87a3d244c952cc0ec8d7f88c5effefcbe454d751d8449d4a1a32aaaf9a0.svg#status_success"\u003e\u003c/use\u003e\u003c/svg\u003e\u003c/span\u003e\n\u003cspan class="ci-build-text"\u003erspec-mysql 14 28\u003c/span\u003e\n\u003c/a\u003e\u003ca class="ci-action-icon-wrapper js-ci-action-icon" data-toggle="tooltip" data-title="Retry" data-container="body" rel="nofollow" data-method="post" href="/gitlab-org/gitlab-ce/-/jobs/62398037/retry"\u003e\u003csvg class=" icon-action-retry"\u003e\u003cuse xlink:href="https://gitlab.com/assets/icons-fe86f87a3d244c952cc0ec8d7f88c5effefcbe454d751d8449d4a1a32aaaf9a0.svg#retry"\u003e\u003c/use\u003e\u003c/svg\u003e\n\u003c/a\u003e\n\u003c/li\u003e\n\u003cli\u003e\n\u003ca class="mini-pipeline-graph-dropdown-item" data-toggle="tooltip" data-title="rspec-mysql 15 28 - passed" data-html="true" data-container="body" href="/gitlab-org/gitlab-ce/-/jobs/62398038"\u003e\u003cspan class="ci-status-icon ci-status-icon-success"\u003e\u003csvg\u003e\u003cuse xlink:href="https://gitlab.com/assets/icons-fe86f87a3d244c952cc0ec8d7f88c5effefcbe454d751d8449d4a1a32aaaf9a0.svg#status_success"\u003e\u003c/use\u003e\u003c/svg\u003e\u003c/span\u003e\n\u003cspan class="ci-build-text"\u003erspec-mysql 15 28\u003c/span\u003e\n\u003c/a\u003e\u003ca class="ci-action-icon-wrapper js-ci-action-icon" data-toggle="tooltip" data-title="Retry" data-container="body" rel="nofollow" data-method="post" href="/gitlab-org/gitlab-ce/-/jobs/62398038/retry"\u003e\u003csvg class=" icon-action-retry"\u003e\u003cuse xlink:href="https://gitlab.com/assets/icons-fe86f87a3d244c952cc0ec8d7f88c5effefcbe454d751d8449d4a1a32aaaf9a0.svg#retry"\u003e\u003c/use\u003e\u003c/svg\u003e\n\u003c/a\u003e\n\u003c/li\u003e\n\u003cli\u003e\n\u003ca class="mini-pipeline-graph-dropdown-item" data-toggle="tooltip" data-title="rspec-mysql 16 28 - passed" data-html="true" data-container="body" href="/gitlab-org/gitlab-ce/-/jobs/62398039"\u003e\u003cspan class="ci-status-icon ci-status-icon-success"\u003e\u003csvg\u003e\u003cuse xlink:href="https://gitlab.com/assets/icons-fe86f87a3d244c952cc0ec8d7f88c5effefcbe454d751d8449d4a1a32aaaf9a0.svg#status_success"\u003e\u003c/use\u003e\u003c/svg\u003e\u003c/span\u003e\n\u003cspan class="ci-build-text"\u003erspec-mysql 16 28\u003c/span\u003e\n\u003c/a\u003e\u003ca class="ci-action-icon-wrapper js-ci-action-icon" data-toggle="tooltip" data-title="Retry" data-container="body" rel="nofollow" data-method="post" href="/gitlab-org/gitlab-ce/-/jobs/62398039/retry"\u003e\u003csvg class=" icon-action-retry"\u003e\u003cuse xlink:href="https://gitlab.com/assets/icons-fe86f87a3d244c952cc0ec8d7f88c5effefcbe454d751d8449d4a1a32aaaf9a0.svg#retry"\u003e\u003c/use\u003e\u003c/svg\u003e\n\u003c/a\u003e\n\u003c/li\u003e\n\u003cli\u003e\n\u003ca class="mini-pipeline-graph-dropdown-item" data-toggle="tooltip" data-title="rspec-mysql 17 28 - passed" data-html="true" data-container="body" href="/gitlab-org/gitlab-ce/-/jobs/62398040"\u003e\u003cspan class="ci-status-icon ci-status-icon-success"\u003e\u003csvg\u003e\u003cuse xlink:href="https://gitlab.com/assets/icons-fe86f87a3d244c952cc0ec8d7f88c5effefcbe454d751d8449d4a1a32aaaf9a0.svg#status_success"\u003e\u003c/use\u003e\u003c/svg\u003e\u003c/span\u003e\n\u003cspan class="ci-build-text"\u003erspec-mysql 17 28\u003c/span\u003e\n\u003c/a\u003e\u003ca class="ci-action-icon-wrapper js-ci-action-icon" data-toggle="tooltip" data-title="Retry" data-container="body" rel="nofollow" data-method="post" href="/gitlab-org/gitlab-ce/-/jobs/62398040/retry"\u003e\u003csvg class=" icon-action-retry"\u003e\u003cuse xlink:href="https://gitlab.com/assets/icons-fe86f87a3d244c952cc0ec8d7f88c5effefcbe454d751d8449d4a1a32aaaf9a0.svg#retry"\u003e\u003c/use\u003e\u003c/svg\u003e\n\u003c/a\u003e\n\u003c/li\u003e\n\u003cli\u003e\n\u003ca class="mini-pipeline-graph-dropdown-item" data-toggle="tooltip" data-title="rspec-mysql 18 28 - passed" data-html="true" data-container="body" href="/gitlab-org/gitlab-ce/-/jobs/62398041"\u003e\u003cspan class="ci-status-icon ci-status-icon-success"\u003e\u003csvg\u003e\u003cuse xlink:href="https://gitlab.com/assets/icons-fe86f87a3d244c952cc0ec8d7f88c5effefcbe454d751d8449d4a1a32aaaf9a0.svg#status_success"\u003e\u003c/use\u003e\u003c/svg\u003e\u003c/span\u003e\n\u003cspan class="ci-build-text"\u003erspec-mysql 18 28\u003c/span\u003e\n\u003c/a\u003e\u003ca class="ci-action-icon-wrapper js-ci-action-icon" data-toggle="tooltip" data-title="Retry" data-container="body" rel="nofollow" data-method="post" href="/gitlab-org/gitlab-ce/-/jobs/62398041/retry"\u003e\u003csvg class=" icon-action-retry"\u003e\u003cuse xlink:href="https://gitlab.com/assets/icons-fe86f87a3d244c952cc0ec8d7f88c5effefcbe454d751d8449d4a1a32aaaf9a0.svg#retry"\u003e\u003c/use\u003e\u003c/svg\u003e\n\u003c/a\u003e\n\u003c/li\u003e\n\u003cli\u003e\n\u003ca class="mini-pipeline-graph-dropdown-item" data-toggle="tooltip" data-title="rspec-mysql 19 28 - passed" data-html="true" data-container="body" href="/gitlab-org/gitlab-ce/-/jobs/62398042"\u003e\u003cspan class="ci-status-icon ci-status-icon-success"\u003e\u003csvg\u003e\u003cuse xlink:href="https://gitlab.com/assets/icons-fe86f87a3d244c952cc0ec8d7f88c5effefcbe454d751d8449d4a1a32aaaf9a0.svg#status_success"\u003e\u003c/use\u003e\u003c/svg\u003e\u003c/span\u003e\n\u003cspan class="ci-build-text"\u003erspec-mysql 19 28\u003c/span\u003e\n\u003c/a\u003e\u003ca class="ci-action-icon-wrapper js-ci-action-icon" data-toggle="tooltip" data-title="Retry" data-container="body" rel="nofollow" data-method="post" href="/gitlab-org/gitlab-ce/-/jobs/62398042/retry"\u003e\u003csvg class=" icon-action-retry"\u003e\u003cuse xlink:href="https://gitlab.com/assets/icons-fe86f87a3d244c952cc0ec8d7f88c5effefcbe454d751d8449d4a1a32aaaf9a0.svg#retry"\u003e\u003c/use\u003e\u003c/svg\u003e\n\u003c/a\u003e\n\u003c/li\u003e\n\u003cli\u003e\n\u003ca class="mini-pipeline-graph-dropdown-item" data-toggle="tooltip" data-title="rspec-mysql 2 28 - passed" data-html="true" data-container="body" href="/gitlab-org/gitlab-ce/-/jobs/62398024"\u003e\u003cspan class="ci-status-icon ci-status-icon-success"\u003e\u003csvg\u003e\u003cuse xlink:href="https://gitlab.com/assets/icons-fe86f87a3d244c952cc0ec8d7f88c5effefcbe454d751d8449d4a1a32aaaf9a0.svg#status_success"\u003e\u003c/use\u003e\u003c/svg\u003e\u003c/span\u003e\n\u003cspan class="ci-build-text"\u003erspec-mysql 2 28\u003c/span\u003e\n\u003c/a\u003e\u003ca class="ci-action-icon-wrapper js-ci-action-icon" data-toggle="tooltip" data-title="Retry" data-container="body" rel="nofollow" data-method="post" href="/gitlab-org/gitlab-ce/-/jobs/62398024/retry"\u003e\u003csvg class=" icon-action-retry"\u003e\u003cuse xlink:href="https://gitlab.com/assets/icons-fe86f87a3d244c952cc0ec8d7f88c5effefcbe454d751d8449d4a1a32aaaf9a0.svg#retry"\u003e\u003c/use\u003e\u003c/svg\u003e\n\u003c/a\u003e\n\u003c/li\u003e\n\u003cli\u003e\n\u003ca class="mini-pipeline-graph-dropdown-item" data-toggle="tooltip" data-title="rspec-mysql 20 28 - passed" data-html="true" data-container="body" href="/gitlab-org/gitlab-ce/-/jobs/62398043"\u003e\u003cspan class="ci-status-icon ci-status-icon-success"\u003e\u003csvg\u003e\u003cuse xlink:href="https://gitlab.com/assets/icons-fe86f87a3d244c952cc0ec8d7f88c5effefcbe454d751d8449d4a1a32aaaf9a0.svg#status_success"\u003e\u003c/use\u003e\u003c/svg\u003e\u003c/span\u003e\n\u003cspan class="ci-build-text"\u003erspec-mysql 20 28\u003c/span\u003e\n\u003c/a\u003e\u003ca class="ci-action-icon-wrapper js-ci-action-icon" data-toggle="tooltip" data-title="Retry" data-container="body" rel="nofollow" data-method="post" href="/gitlab-org/gitlab-ce/-/jobs/62398043/retry"\u003e\u003csvg class=" icon-action-retry"\u003e\u003cuse xlink:href="https://gitlab.com/assets/icons-fe86f87a3d244c952cc0ec8d7f88c5effefcbe454d751d8449d4a1a32aaaf9a0.svg#retry"\u003e\u003c/use\u003e\u003c/svg\u003e\n\u003c/a\u003e\n\u003c/li\u003e\n\u003cli\u003e\n\u003ca class="mini-pipeline-graph-dropdown-item" data-toggle="tooltip" data-title="rspec-mysql 21 28 - passed" data-html="true" data-container="body" href="/gitlab-org/gitlab-ce/-/jobs/62398044"\u003e\u003cspan class="ci-status-icon ci-status-icon-success"\u003e\u003csvg\u003e\u003cuse xlink:href="https://gitlab.com/assets/icons-fe86f87a3d244c952cc0ec8d7f88c5effefcbe454d751d8449d4a1a32aaaf9a0.svg#status_success"\u003e\u003c/use\u003e\u003c/svg\u003e\u003c/span\u003e\n\u003cspan class="ci-build-text"\u003erspec-mysql 21 28\u003c/span\u003e\n\u003c/a\u003e\u003ca class="ci-action-icon-wrapper js-ci-action-icon" data-toggle="tooltip" data-title="Retry" data-container="body" rel="nofollow" data-method="post" href="/gitlab-org/gitlab-ce/-/jobs/62398044/retry"\u003e\u003csvg class=" icon-action-retry"\u003e\u003cuse xlink:href="https://gitlab.com/assets/icons-fe86f87a3d244c952cc0ec8d7f88c5effefcbe454d751d8449d4a1a32aaaf9a0.svg#retry"\u003e\u003c/use\u003e\u003c/svg\u003e\n\u003c/a\u003e\n\u003c/li\u003e\n\u003cli\u003e\n\u003ca class="mini-pipeline-graph-dropdown-item" data-toggle="tooltip" data-title="rspec-mysql 22 28 - passed" data-html="true" data-container="body" href="/gitlab-org/gitlab-ce/-/jobs/62398046"\u003e\u003cspan class="ci-status-icon ci-status-icon-success"\u003e\u003csvg\u003e\u003cuse xlink:href="https://gitlab.com/assets/icons-fe86f87a3d244c952cc0ec8d7f88c5effefcbe454d751d8449d4a1a32aaaf9a0.svg#status_success"\u003e\u003c/use\u003e\u003c/svg\u003e\u003c/span\u003e\n\u003cspan class="ci-build-text"\u003erspec-mysql 22 28\u003c/span\u003e\n\u003c/a\u003e\u003ca class="ci-action-icon-wrapper js-ci-action-icon" data-toggle="tooltip" data-title="Retry" data-container="body" rel="nofollow" data-method="post" href="/gitlab-org/gitlab-ce/-/jobs/62398046/retry"\u003e\u003csvg class=" icon-action-retry"\u003e\u003cuse xlink:href="https://gitlab.com/assets/icons-fe86f87a3d244c952cc0ec8d7f88c5effefcbe454d751d8449d4a1a32aaaf9a0.svg#retry"\u003e\u003c/use\u003e\u003c/svg\u003e\n\u003c/a\u003e\n\u003c/li\u003e\n\u003cli\u003e\n\u003ca class="mini-pipeline-graph-dropdown-item" data-toggle="tooltip" data-title="rspec-mysql 23 28 - passed" data-html="true" data-container="body" href="/gitlab-org/gitlab-ce/-/jobs/62398047"\u003e\u003cspan class="ci-status-icon ci-status-icon-success"\u003e\u003csvg\u003e\u003cuse xlink:href="https://gitlab.com/assets/icons-fe86f87a3d244c952cc0ec8d7f88c5effefcbe454d751d8449d4a1a32aaaf9a0.svg#status_success"\u003e\u003c/use\u003e\u003c/svg\u003e\u003c/span\u003e\n\u003cspan class="ci-build-text"\u003erspec-mysql 23 28\u003c/span\u003e\n\u003c/a\u003e\u003ca class="ci-action-icon-wrapper js-ci-action-icon" data-toggle="tooltip" data-title="Retry" data-container="body" rel="nofollow" data-method="post" href="/gitlab-org/gitlab-ce/-/jobs/62398047/retry"\u003e\u003csvg class=" icon-action-retry"\u003e\u003cuse xlink:href="https://gitlab.com/assets/icons-fe86f87a3d244c952cc0ec8d7f88c5effefcbe454d751d8449d4a1a32aaaf9a0.svg#retry"\u003e\u003c/use\u003e\u003c/svg\u003e\n\u003c/a\u003e\n\u003c/li\u003e\n\u003cli\u003e\n\u003ca class="mini-pipeline-graph-dropdown-item" data-toggle="tooltip" data-title="rspec-mysql 24 28 - passed" data-html="true" data-container="body" href="/gitlab-org/gitlab-ce/-/jobs/62398048"\u003e\u003cspan class="ci-status-icon ci-status-icon-success"\u003e\u003csvg\u003e\u003cuse xlink:href="https://gitlab.com/assets/icons-fe86f87a3d244c952cc0ec8d7f88c5effefcbe454d751d8449d4a1a32aaaf9a0.svg#status_success"\u003e\u003c/use\u003e\u003c/svg\u003e\u003c/span\u003e\n\u003cspan class="ci-build-text"\u003erspec-mysql 24 28\u003c/span\u003e\n\u003c/a\u003e\u003ca class="ci-action-icon-wrapper js-ci-action-icon" data-toggle="tooltip" data-title="Retry" data-container="body" rel="nofollow" data-method="post" href="/gitlab-org/gitlab-ce/-/jobs/62398048/retry"\u003e\u003csvg class=" icon-action-retry"\u003e\u003cuse xlink:href="https://gitlab.com/assets/icons-fe86f87a3d244c952cc0ec8d7f88c5effefcbe454d751d8449d4a1a32aaaf9a0.svg#retry"\u003e\u003c/use\u003e\u003c/svg\u003e\n\u003c/a\u003e\n\u003c/li\u003e\n\u003cli\u003e\n\u003ca class="mini-pipeline-graph-dropdown-item" data-toggle="tooltip" data-title="rspec-mysql 25 28 - passed" data-html="true" data-container="body" href="/gitlab-org/gitlab-ce/-/jobs/62398049"\u003e\u003cspan class="ci-status-icon ci-status-icon-success"\u003e\u003csvg\u003e\u003cuse xlink:href="https://gitlab.com/assets/icons-fe86f87a3d244c952cc0ec8d7f88c5effefcbe454d751d8449d4a1a32aaaf9a0.svg#status_success"\u003e\u003c/use\u003e\u003c/svg\u003e\u003c/span\u003e\n\u003cspan class="ci-build-text"\u003erspec-mysql 25 28\u003c/span\u003e\n\u003c/a\u003e\u003ca class="ci-action-icon-wrapper js-ci-action-icon" data-toggle="tooltip" data-title="Retry" data-container="body" rel="nofollow" data-method="post" href="/gitlab-org/gitlab-ce/-/jobs/62398049/retry"\u003e\u003csvg class=" icon-action-retry"\u003e\u003cuse xlink:href="https://gitlab.com/assets/icons-fe86f87a3d244c952cc0ec8d7f88c5effefcbe454d751d8449d4a1a32aaaf9a0.svg#retry"\u003e\u003c/use\u003e\u003c/svg\u003e\n\u003c/a\u003e\n\u003c/li\u003e\n\u003cli\u003e\n\u003ca class="mini-pipeline-graph-dropdown-item" data-toggle="tooltip" data-title="rspec-mysql 26 28 - passed" data-html="true" data-container="body" href="/gitlab-org/gitlab-ce/-/jobs/62398050"\u003e\u003cspan class="ci-status-icon ci-status-icon-success"\u003e\u003csvg\u003e\u003cuse xlink:href="https://gitlab.com/assets/icons-fe86f87a3d244c952cc0ec8d7f88c5effefcbe454d751d8449d4a1a32aaaf9a0.svg#status_success"\u003e\u003c/use\u003e\u003c/svg\u003e\u003c/span\u003e\n\u003cspan class="ci-build-text"\u003erspec-mysql 26 28\u003c/span\u003e\n\u003c/a\u003e\u003ca class="ci-action-icon-wrapper js-ci-action-icon" data-toggle="tooltip" data-title="Retry" data-container="body" rel="nofollow" data-method="post" href="/gitlab-org/gitlab-ce/-/jobs/62398050/retry"\u003e\u003csvg class=" icon-action-retry"\u003e\u003cuse xlink:href="https://gitlab.com/assets/icons-fe86f87a3d244c952cc0ec8d7f88c5effefcbe454d751d8449d4a1a32aaaf9a0.svg#retry"\u003e\u003c/use\u003e\u003c/svg\u003e\n\u003c/a\u003e\n\u003c/li\u003e\n\u003cli\u003e\n\u003ca class="mini-pipeline-graph-dropdown-item" data-toggle="tooltip" data-title="rspec-mysql 27 28 - passed" data-html="true" data-container="body" href="/gitlab-org/gitlab-ce/-/jobs/62398051"\u003e\u003cspan class="ci-status-icon ci-status-icon-success"\u003e\u003csvg\u003e\u003cuse xlink:href="https://gitlab.com/assets/icons-fe86f87a3d244c952cc0ec8d7f88c5effefcbe454d751d8449d4a1a32aaaf9a0.svg#status_success"\u003e\u003c/use\u003e\u003c/svg\u003e\u003c/span\u003e\n\u003cspan class="ci-build-text"\u003erspec-mysql 27 28\u003c/span\u003e\n\u003c/a\u003e\u003ca class="ci-action-icon-wrapper js-ci-action-icon" data-toggle="tooltip" data-title="Retry" data-container="body" rel="nofollow" data-method="post" href="/gitlab-org/gitlab-ce/-/jobs/62398051/retry"\u003e\u003csvg class=" icon-action-retry"\u003e\u003cuse xlink:href="https://gitlab.com/assets/icons-fe86f87a3d244c952cc0ec8d7f88c5effefcbe454d751d8449d4a1a32aaaf9a0.svg#retry"\u003e\u003c/use\u003e\u003c/svg\u003e\n\u003c/a\u003e\n\u003c/li\u003e\n\u003cli\u003e\n\u003ca class="mini-pipeline-graph-dropdown-item" data-toggle="tooltip" data-title="rspec-mysql 3 28 - passed" data-html="true" data-container="body" href="/gitlab-org/gitlab-ce/-/jobs/62398025"\u003e\u003cspan class="ci-status-icon ci-status-icon-success"\u003e\u003csvg\u003e\u003cuse xlink:href="https://gitlab.com/assets/icons-fe86f87a3d244c952cc0ec8d7f88c5effefcbe454d751d8449d4a1a32aaaf9a0.svg#status_success"\u003e\u003c/use\u003e\u003c/svg\u003e\u003c/span\u003e\n\u003cspan class="ci-build-text"\u003erspec-mysql 3 28\u003c/span\u003e\n\u003c/a\u003e\u003ca class="ci-action-icon-wrapper js-ci-action-icon" data-toggle="tooltip" data-title="Retry" data-container="body" rel="nofollow" data-method="post" href="/gitlab-org/gitlab-ce/-/jobs/62398025/retry"\u003e\u003csvg class=" icon-action-retry"\u003e\u003cuse xlink:href="https://gitlab.com/assets/icons-fe86f87a3d244c952cc0ec8d7f88c5effefcbe454d751d8449d4a1a32aaaf9a0.svg#retry"\u003e\u003c/use\u003e\u003c/svg\u003e\n\u003c/a\u003e\n\u003c/li\u003e\n\u003cli\u003e\n\u003ca class="mini-pipeline-graph-dropdown-item" data-toggle="tooltip" data-title="rspec-mysql 4 28 - passed" data-html="true" data-container="body" href="/gitlab-org/gitlab-ce/-/jobs/62398027"\u003e\u003cspan class="ci-status-icon ci-status-icon-success"\u003e\u003csvg\u003e\u003cuse xlink:href="https://gitlab.com/assets/icons-fe86f87a3d244c952cc0ec8d7f88c5effefcbe454d751d8449d4a1a32aaaf9a0.svg#status_success"\u003e\u003c/use\u003e\u003c/svg\u003e\u003c/span\u003e\n\u003cspan class="ci-build-text"\u003erspec-mysql 4 28\u003c/span\u003e\n\u003c/a\u003e\u003ca class="ci-action-icon-wrapper js-ci-action-icon" data-toggle="tooltip" data-title="Retry" data-container="body" rel="nofollow" data-method="post" href="/gitlab-org/gitlab-ce/-/jobs/62398027/retry"\u003e\u003csvg class=" icon-action-retry"\u003e\u003cuse xlink:href="https://gitlab.com/assets/icons-fe86f87a3d244c952cc0ec8d7f88c5effefcbe454d751d8449d4a1a32aaaf9a0.svg#retry"\u003e\u003c/use\u003e\u003c/svg\u003e\n\u003c/a\u003e\n\u003c/li\u003e\n\u003cli\u003e\n\u003ca class="mini-pipeline-graph-dropdown-item" data-toggle="tooltip" data-title="rspec-mysql 5 28 - passed" data-html="true" data-container="body" href="/gitlab-org/gitlab-ce/-/jobs/62398028"\u003e\u003cspan class="ci-status-icon ci-status-icon-success"\u003e\u003csvg\u003e\u003cuse xlink:href="https://gitlab.com/assets/icons-fe86f87a3d244c952cc0ec8d7f88c5effefcbe454d751d8449d4a1a32aaaf9a0.svg#status_success"\u003e\u003c/use\u003e\u003c/svg\u003e\u003c/span\u003e\n\u003cspan class="ci-build-text"\u003erspec-mysql 5 28\u003c/span\u003e\n\u003c/a\u003e\u003ca class="ci-action-icon-wrapper js-ci-action-icon" data-toggle="tooltip" data-title="Retry" data-container="body" rel="nofollow" data-method="post" href="/gitlab-org/gitlab-ce/-/jobs/62398028/retry"\u003e\u003csvg class=" icon-action-retry"\u003e\u003cuse xlink:href="https://gitlab.com/assets/icons-fe86f87a3d244c952cc0ec8d7f88c5effefcbe454d751d8449d4a1a32aaaf9a0.svg#retry"\u003e\u003c/use\u003e\u003c/svg\u003e\n\u003c/a\u003e\n\u003c/li\u003e\n\u003cli\u003e\n\u003ca class="mini-pipeline-graph-dropdown-item" data-toggle="tooltip" data-title="rspec-mysql 6 28 - passed" data-html="true" data-container="body" href="/gitlab-org/gitlab-ce/-/jobs/62398029"\u003e\u003cspan class="ci-status-icon ci-status-icon-success"\u003e\u003csvg\u003e\u003cuse xlink:href="https://gitlab.com/assets/icons-fe86f87a3d244c952cc0ec8d7f88c5effefcbe454d751d8449d4a1a32aaaf9a0.svg#status_success"\u003e\u003c/use\u003e\u003c/svg\u003e\u003c/span\u003e\n\u003cspan class="ci-build-text"\u003erspec-mysql 6 28\u003c/span\u003e\n\u003c/a\u003e\u003ca class="ci-action-icon-wrapper js-ci-action-icon" data-toggle="tooltip" data-title="Retry" data-container="body" rel="nofollow" data-method="post" href="/gitlab-org/gitlab-ce/-/jobs/62398029/retry"\u003e\u003csvg class=" icon-action-retry"\u003e\u003cuse xlink:href="https://gitlab.com/assets/icons-fe86f87a3d244c952cc0ec8d7f88c5effefcbe454d751d8449d4a1a32aaaf9a0.svg#retry"\u003e\u003c/use\u003e\u003c/svg\u003e\n\u003c/a\u003e\n\u003c/li\u003e\n\u003cli\u003e\n\u003ca class="mini-pipeline-graph-dropdown-item" data-toggle="tooltip" data-title="rspec-mysql 7 28 - passed" data-html="true" data-container="body" href="/gitlab-org/gitlab-ce/-/jobs/62398030"\u003e\u003cspan class="ci-status-icon ci-status-icon-success"\u003e\u003csvg\u003e\u003cuse xlink:href="https://gitlab.com/assets/icons-fe86f87a3d244c952cc0ec8d7f88c5effefcbe454d751d8449d4a1a32aaaf9a0.svg#status_success"\u003e\u003c/use\u003e\u003c/svg\u003e\u003c/span\u003e\n\u003cspan class="ci-build-text"\u003erspec-mysql 7 28\u003c/span\u003e\n\u003c/a\u003e\u003ca class="ci-action-icon-wrapper js-ci-action-icon" data-toggle="tooltip" data-title="Retry" data-container="body" rel="nofollow" data-method="post" href="/gitlab-org/gitlab-ce/-/jobs/62398030/retry"\u003e\u003csvg class=" icon-action-retry"\u003e\u003cuse xlink:href="https://gitlab.com/assets/icons-fe86f87a3d244c952cc0ec8d7f88c5effefcbe454d751d8449d4a1a32aaaf9a0.svg#retry"\u003e\u003c/use\u003e\u003c/svg\u003e\n\u003c/a\u003e\n\u003c/li\u003e\n\u003cli\u003e\n\u003ca class="mini-pipeline-graph-dropdown-item" data-toggle="tooltip" data-title="rspec-mysql 8 28 - passed" data-html="true" data-container="body" href="/gitlab-org/gitlab-ce/-/jobs/62398031"\u003e\u003cspan class="ci-status-icon ci-status-icon-success"\u003e\u003csvg\u003e\u003cuse xlink:href="https://gitlab.com/assets/icons-fe86f87a3d244c952cc0ec8d7f88c5effefcbe454d751d8449d4a1a32aaaf9a0.svg#status_success"\u003e\u003c/use\u003e\u003c/svg\u003e\u003c/span\u003e\n\u003cspan class="ci-build-text"\u003erspec-mysql 8 28\u003c/span\u003e\n\u003c/a\u003e\u003ca class="ci-action-icon-wrapper js-ci-action-icon" data-toggle="tooltip" data-title="Retry" data-container="body" rel="nofollow" data-method="post" href="/gitlab-org/gitlab-ce/-/jobs/62398031/retry"\u003e\u003csvg class=" icon-action-retry"\u003e\u003cuse xlink:href="https://gitlab.com/assets/icons-fe86f87a3d244c952cc0ec8d7f88c5effefcbe454d751d8449d4a1a32aaaf9a0.svg#retry"\u003e\u003c/use\u003e\u003c/svg\u003e\n\u003c/a\u003e\n\u003c/li\u003e\n\u003cli\u003e\n\u003ca class="mini-pipeline-graph-dropdown-item" data-toggle="tooltip" data-title="rspec-mysql 9 28 - passed" data-html="true" data-container="body" href="/gitlab-org/gitlab-ce/-/jobs/62398032"\u003e\u003cspan class="ci-status-icon ci-status-icon-success"\u003e\u003csvg\u003e\u003cuse xlink:href="https://gitlab.com/assets/icons-fe86f87a3d244c952cc0ec8d7f88c5effefcbe454d751d8449d4a1a32aaaf9a0.svg#status_success"\u003e\u003c/use\u003e\u003c/svg\u003e\u003c/span\u003e\n\u003cspan class="ci-build-text"\u003erspec-mysql 9 28\u003c/span\u003e\n\u003c/a\u003e\u003ca class="ci-action-icon-wrapper js-ci-action-icon" data-toggle="tooltip" data-title="Retry" data-container="body" rel="nofollow" data-method="post" href="/gitlab-org/gitlab-ce/-/jobs/62398032/retry"\u003e\u003csvg class=" icon-action-retry"\u003e\u003cuse xlink:href="https://gitlab.com/assets/icons-fe86f87a3d244c952cc0ec8d7f88c5effefcbe454d751d8449d4a1a32aaaf9a0.svg#retry"\u003e\u003c/use\u003e\u003c/svg\u003e\n\u003c/a\u003e\n\u003c/li\u003e\n\u003cli\u003e\n\u003ca class="mini-pipeline-graph-dropdown-item" data-toggle="tooltip" data-title="rspec-pg 0 28 - passed" data-html="true" data-container="body" href="/gitlab-org/gitlab-ce/-/jobs/62397981"\u003e\u003cspan class="ci-status-icon ci-status-icon-success"\u003e\u003csvg\u003e\u003cuse xlink:href="https://gitlab.com/assets/icons-fe86f87a3d244c952cc0ec8d7f88c5effefcbe454d751d8449d4a1a32aaaf9a0.svg#status_success"\u003e\u003c/use\u003e\u003c/svg\u003e\u003c/span\u003e\n\u003cspan class="ci-build-text"\u003erspec-pg 0 28\u003c/span\u003e\n\u003c/a\u003e\u003ca class="ci-action-icon-wrapper js-ci-action-icon" data-toggle="tooltip" data-title="Retry" data-container="body" rel="nofollow" data-method="post" href="/gitlab-org/gitlab-ce/-/jobs/62397981/retry"\u003e\u003csvg class=" icon-action-retry"\u003e\u003cuse xlink:href="https://gitlab.com/assets/icons-fe86f87a3d244c952cc0ec8d7f88c5effefcbe454d751d8449d4a1a32aaaf9a0.svg#retry"\u003e\u003c/use\u003e\u003c/svg\u003e\n\u003c/a\u003e\n\u003c/li\u003e\n\u003cli\u003e\n\u003ca class="mini-pipeline-graph-dropdown-item" data-toggle="tooltip" data-title="rspec-pg 1 28 - passed" data-html="true" data-container="body" href="/gitlab-org/gitlab-ce/-/jobs/62397985"\u003e\u003cspan class="ci-status-icon ci-status-icon-success"\u003e\u003csvg\u003e\u003cuse xlink:href="https://gitlab.com/assets/icons-fe86f87a3d244c952cc0ec8d7f88c5effefcbe454d751d8449d4a1a32aaaf9a0.svg#status_success"\u003e\u003c/use\u003e\u003c/svg\u003e\u003c/span\u003e\n\u003cspan class="ci-build-text"\u003erspec-pg 1 28\u003c/span\u003e\n\u003c/a\u003e\u003ca class="ci-action-icon-wrapper js-ci-action-icon" data-toggle="tooltip" data-title="Retry" data-container="body" rel="nofollow" data-method="post" href="/gitlab-org/gitlab-ce/-/jobs/62397985/retry"\u003e\u003csvg class=" icon-action-retry"\u003e\u003cuse xlink:href="https://gitlab.com/assets/icons-fe86f87a3d244c952cc0ec8d7f88c5effefcbe454d751d8449d4a1a32aaaf9a0.svg#retry"\u003e\u003c/use\u003e\u003c/svg\u003e\n\u003c/a\u003e\n\u003c/li\u003e\n\u003cli\u003e\n\u003ca class="mini-pipeline-graph-dropdown-item" data-toggle="tooltip" data-title="rspec-pg 10 28 - passed" data-html="true" data-container="body" href="/gitlab-org/gitlab-ce/-/jobs/62398000"\u003e\u003cspan class="ci-status-icon ci-status-icon-success"\u003e\u003csvg\u003e\u003cuse xlink:href="https://gitlab.com/assets/icons-fe86f87a3d244c952cc0ec8d7f88c5effefcbe454d751d8449d4a1a32aaaf9a0.svg#status_success"\u003e\u003c/use\u003e\u003c/svg\u003e\u003c/span\u003e\n\u003cspan class="ci-build-text"\u003erspec-pg 10 28\u003c/span\u003e\n\u003c/a\u003e\u003ca class="ci-action-icon-wrapper js-ci-action-icon" data-toggle="tooltip" data-title="Retry" data-container="body" rel="nofollow" data-method="post" href="/gitlab-org/gitlab-ce/-/jobs/62398000/retry"\u003e\u003csvg class=" icon-action-retry"\u003e\u003cuse xlink:href="https://gitlab.com/assets/icons-fe86f87a3d244c952cc0ec8d7f88c5effefcbe454d751d8449d4a1a32aaaf9a0.svg#retry"\u003e\u003c/use\u003e\u003c/svg\u003e\n\u003c/a\u003e\n\u003c/li\u003e\n\u003cli\u003e\n\u003ca class="mini-pipeline-graph-dropdown-item" data-toggle="tooltip" data-title="rspec-pg 11 28 - passed" data-html="true" data-container="body" href="/gitlab-org/gitlab-ce/-/jobs/62398001"\u003e\u003cspan class="ci-status-icon ci-status-icon-success"\u003e\u003csvg\u003e\u003cuse xlink:href="https://gitlab.com/assets/icons-fe86f87a3d244c952cc0ec8d7f88c5effefcbe454d751d8449d4a1a32aaaf9a0.svg#status_success"\u003e\u003c/use\u003e\u003c/svg\u003e\u003c/span\u003e\n\u003cspan class="ci-build-text"\u003erspec-pg 11 28\u003c/span\u003e\n\u003c/a\u003e\u003ca class="ci-action-icon-wrapper js-ci-action-icon" data-toggle="tooltip" data-title="Retry" data-container="body" rel="nofollow" data-method="post" href="/gitlab-org/gitlab-ce/-/jobs/62398001/retry"\u003e\u003csvg class=" icon-action-retry"\u003e\u003cuse xlink:href="https://gitlab.com/assets/icons-fe86f87a3d244c952cc0ec8d7f88c5effefcbe454d751d8449d4a1a32aaaf9a0.svg#retry"\u003e\u003c/use\u003e\u003c/svg\u003e\n\u003c/a\u003e\n\u003c/li\u003e\n\u003cli\u003e\n\u003ca class="mini-pipeline-graph-dropdown-item" data-toggle="tooltip" data-title="rspec-pg 12 28 - passed" data-html="true" data-container="body" href="/gitlab-org/gitlab-ce/-/jobs/62398002"\u003e\u003cspan class="ci-status-icon ci-status-icon-success"\u003e\u003csvg\u003e\u003cuse xlink:href="https://gitlab.com/assets/icons-fe86f87a3d244c952cc0ec8d7f88c5effefcbe454d751d8449d4a1a32aaaf9a0.svg#status_success"\u003e\u003c/use\u003e\u003c/svg\u003e\u003c/span\u003e\n\u003cspan class="ci-build-text"\u003erspec-pg 12 28\u003c/span\u003e\n\u003c/a\u003e\u003ca class="ci-action-icon-wrapper js-ci-action-icon" data-toggle="tooltip" data-title="Retry" data-container="body" rel="nofollow" data-method="post" href="/gitlab-org/gitlab-ce/-/jobs/62398002/retry"\u003e\u003csvg class=" icon-action-retry"\u003e\u003cuse xlink:href="https://gitlab.com/assets/icons-fe86f87a3d244c952cc0ec8d7f88c5effefcbe454d751d8449d4a1a32aaaf9a0.svg#retry"\u003e\u003c/use\u003e\u003c/svg\u003e\n\u003c/a\u003e\n\u003c/li\u003e\n\u003cli\u003e\n\u003ca class="mini-pipeline-graph-dropdown-item" data-toggle="tooltip" data-title="rspec-pg 13 28 - passed" data-html="true" data-container="body" href="/gitlab-org/gitlab-ce/-/jobs/62398003"\u003e\u003cspan class="ci-status-icon ci-status-icon-success"\u003e\u003csvg\u003e\u003cuse xlink:href="https://gitlab.com/assets/icons-fe86f87a3d244c952cc0ec8d7f88c5effefcbe454d751d8449d4a1a32aaaf9a0.svg#status_success"\u003e\u003c/use\u003e\u003c/svg\u003e\u003c/span\u003e\n\u003cspan class="ci-build-text"\u003erspec-pg 13 28\u003c/span\u003e\n\u003c/a\u003e\u003ca class="ci-action-icon-wrapper js-ci-action-icon" data-toggle="tooltip" data-title="Retry" data-container="body" rel="nofollow" data-method="post" href="/gitlab-org/gitlab-ce/-/jobs/62398003/retry"\u003e\u003csvg class=" icon-action-retry"\u003e\u003cuse xlink:href="https://gitlab.com/assets/icons-fe86f87a3d244c952cc0ec8d7f88c5effefcbe454d751d8449d4a1a32aaaf9a0.svg#retry"\u003e\u003c/use\u003e\u003c/svg\u003e\n\u003c/a\u003e\n\u003c/li\u003e\n\u003cli\u003e\n\u003ca class="mini-pipeline-graph-dropdown-item" data-toggle="tooltip" data-title="rspec-pg 14 28 - passed" data-html="true" data-container="body" href="/gitlab-org/gitlab-ce/-/jobs/62398004"\u003e\u003cspan class="ci-status-icon ci-status-icon-success"\u003e\u003csvg\u003e\u003cuse xlink:href="https://gitlab.com/assets/icons-fe86f87a3d244c952cc0ec8d7f88c5effefcbe454d751d8449d4a1a32aaaf9a0.svg#status_success"\u003e\u003c/use\u003e\u003c/svg\u003e\u003c/span\u003e\n\u003cspan class="ci-build-text"\u003erspec-pg 14 28\u003c/span\u003e\n\u003c/a\u003e\u003ca class="ci-action-icon-wrapper js-ci-action-icon" data-toggle="tooltip" data-title="Retry" data-container="body" rel="nofollow" data-method="post" href="/gitlab-org/gitlab-ce/-/jobs/62398004/retry"\u003e\u003csvg class=" icon-action-retry"\u003e\u003cuse xlink:href="https://gitlab.com/assets/icons-fe86f87a3d244c952cc0ec8d7f88c5effefcbe454d751d8449d4a1a32aaaf9a0.svg#retry"\u003e\u003c/use\u003e\u003c/svg\u003e\n\u003c/a\u003e\n\u003c/li\u003e\n\u003cli\u003e\n\u003ca class="mini-pipeline-graph-dropdown-item" data-toggle="tooltip" data-title="rspec-pg 15 28 - passed" data-html="true" data-container="body" href="/gitlab-org/gitlab-ce/-/jobs/62398006"\u003e\u003cspan class="ci-status-icon ci-status-icon-success"\u003e\u003csvg\u003e\u003cuse xlink:href="https://gitlab.com/assets/icons-fe86f87a3d244c952cc0ec8d7f88c5effefcbe454d751d8449d4a1a32aaaf9a0.svg#status_success"\u003e\u003c/use\u003e\u003c/svg\u003e\u003c/span\u003e\n\u003cspan class="ci-build-text"\u003erspec-pg 15 28\u003c/span\u003e\n\u003c/a\u003e\u003ca class="ci-action-icon-wrapper js-ci-action-icon" data-toggle="tooltip" data-title="Retry" data-container="body" rel="nofollow" data-method="post" href="/gitlab-org/gitlab-ce/-/jobs/62398006/retry"\u003e\u003csvg class=" icon-action-retry"\u003e\u003cuse xlink:href="https://gitlab.com/assets/icons-fe86f87a3d244c952cc0ec8d7f88c5effefcbe454d751d8449d4a1a32aaaf9a0.svg#retry"\u003e\u003c/use\u003e\u003c/svg\u003e\n\u003c/a\u003e\n\u003c/li\u003e\n\u003cli\u003e\n\u003ca class="mini-pipeline-graph-dropdown-item" data-toggle="tooltip" data-title="rspec-pg 16 28 - passed" data-html="true" data-container="body" href="/gitlab-org/gitlab-ce/-/jobs/62398007"\u003e\u003cspan class="ci-status-icon ci-status-icon-success"\u003e\u003csvg\u003e\u003cuse xlink:href="https://gitlab.com/assets/icons-fe86f87a3d244c952cc0ec8d7f88c5effefcbe454d751d8449d4a1a32aaaf9a0.svg#status_success"\u003e\u003c/use\u003e\u003c/svg\u003e\u003c/span\u003e\n\u003cspan class="ci-build-text"\u003erspec-pg 16 28\u003c/span\u003e\n\u003c/a\u003e\u003ca class="ci-action-icon-wrapper js-ci-action-icon" data-toggle="tooltip" data-title="Retry" data-container="body" rel="nofollow" data-method="post" href="/gitlab-org/gitlab-ce/-/jobs/62398007/retry"\u003e\u003csvg class=" icon-action-retry"\u003e\u003cuse xlink:href="https://gitlab.com/assets/icons-fe86f87a3d244c952cc0ec8d7f88c5effefcbe454d751d8449d4a1a32aaaf9a0.svg#retry"\u003e\u003c/use\u003e\u003c/svg\u003e\n\u003c/a\u003e\n\u003c/li\u003e\n\u003cli\u003e\n\u003ca class="mini-pipeline-graph-dropdown-item" data-toggle="tooltip" data-title="rspec-pg 17 28 - passed" data-html="true" data-container="body" href="/gitlab-org/gitlab-ce/-/jobs/62398008"\u003e\u003cspan class="ci-status-icon ci-status-icon-success"\u003e\u003csvg\u003e\u003cuse xlink:href="https://gitlab.com/assets/icons-fe86f87a3d244c952cc0ec8d7f88c5effefcbe454d751d8449d4a1a32aaaf9a0.svg#status_success"\u003e\u003c/use\u003e\u003c/svg\u003e\u003c/span\u003e\n\u003cspan class="ci-build-text"\u003erspec-pg 17 28\u003c/span\u003e\n\u003c/a\u003e\u003ca class="ci-action-icon-wrapper js-ci-action-icon" data-toggle="tooltip" data-title="Retry" data-container="body" rel="nofollow" data-method="post" href="/gitlab-org/gitlab-ce/-/jobs/62398008/retry"\u003e\u003csvg class=" icon-action-retry"\u003e\u003cuse xlink:href="https://gitlab.com/assets/icons-fe86f87a3d244c952cc0ec8d7f88c5effefcbe454d751d8449d4a1a32aaaf9a0.svg#retry"\u003e\u003c/use\u003e\u003c/svg\u003e\n\u003c/a\u003e\n\u003c/li\u003e\n\u003cli\u003e\n\u003ca class="mini-pipeline-graph-dropdown-item" data-toggle="tooltip" data-title="rspec-pg 18 28 - passed" data-html="true" data-container="body" href="/gitlab-org/gitlab-ce/-/jobs/62398009"\u003e\u003cspan class="ci-status-icon ci-status-icon-success"\u003e\u003csvg\u003e\u003cuse xlink:href="https://gitlab.com/assets/icons-fe86f87a3d244c952cc0ec8d7f88c5effefcbe454d751d8449d4a1a32aaaf9a0.svg#status_success"\u003e\u003c/use\u003e\u003c/svg\u003e\u003c/span\u003e\n\u003cspan class="ci-build-text"\u003erspec-pg 18 28\u003c/span\u003e\n\u003c/a\u003e\u003ca class="ci-action-icon-wrapper js-ci-action-icon" data-toggle="tooltip" data-title="Retry" data-container="body" rel="nofollow" data-method="post" href="/gitlab-org/gitlab-ce/-/jobs/62398009/retry"\u003e\u003csvg class=" icon-action-retry"\u003e\u003cuse xlink:href="https://gitlab.com/assets/icons-fe86f87a3d244c952cc0ec8d7f88c5effefcbe454d751d8449d4a1a32aaaf9a0.svg#retry"\u003e\u003c/use\u003e\u003c/svg\u003e\n\u003c/a\u003e\n\u003c/li\u003e\n\u003cli\u003e\n\u003ca class="mini-pipeline-graph-dropdown-item" data-toggle="tooltip" data-title="rspec-pg 19 28 - passed" data-html="true" data-container="body" href="/gitlab-org/gitlab-ce/-/jobs/62398010"\u003e\u003cspan class="ci-status-icon ci-status-icon-success"\u003e\u003csvg\u003e\u003cuse xlink:href="https://gitlab.com/assets/icons-fe86f87a3d244c952cc0ec8d7f88c5effefcbe454d751d8449d4a1a32aaaf9a0.svg#status_success"\u003e\u003c/use\u003e\u003c/svg\u003e\u003c/span\u003e\n\u003cspan class="ci-build-text"\u003erspec-pg 19 28\u003c/span\u003e\n\u003c/a\u003e\u003ca class="ci-action-icon-wrapper js-ci-action-icon" data-toggle="tooltip" data-title="Retry" data-container="body" rel="nofollow" data-method="post" href="/gitlab-org/gitlab-ce/-/jobs/62398010/retry"\u003e\u003csvg class=" icon-action-retry"\u003e\u003cuse xlink:href="https://gitlab.com/assets/icons-fe86f87a3d244c952cc0ec8d7f88c5effefcbe454d751d8449d4a1a32aaaf9a0.svg#retry"\u003e\u003c/use\u003e\u003c/svg\u003e\n\u003c/a\u003e\n\u003c/li\u003e\n\u003cli\u003e\n\u003ca class="mini-pipeline-graph-dropdown-item" data-toggle="tooltip" data-title="rspec-pg 2 28 - passed" data-html="true" data-container="body" href="/gitlab-org/gitlab-ce/-/jobs/62397986"\u003e\u003cspan class="ci-status-icon ci-status-icon-success"\u003e\u003csvg\u003e\u003cuse xlink:href="https://gitlab.com/assets/icons-fe86f87a3d244c952cc0ec8d7f88c5effefcbe454d751d8449d4a1a32aaaf9a0.svg#status_success"\u003e\u003c/use\u003e\u003c/svg\u003e\u003c/span\u003e\n\u003cspan class="ci-build-text"\u003erspec-pg 2 28\u003c/span\u003e\n\u003c/a\u003e\u003ca class="ci-action-icon-wrapper js-ci-action-icon" data-toggle="tooltip" data-title="Retry" data-container="body" rel="nofollow" data-method="post" href="/gitlab-org/gitlab-ce/-/jobs/62397986/retry"\u003e\u003csvg class=" icon-action-retry"\u003e\u003cuse xlink:href="https://gitlab.com/assets/icons-fe86f87a3d244c952cc0ec8d7f88c5effefcbe454d751d8449d4a1a32aaaf9a0.svg#retry"\u003e\u003c/use\u003e\u003c/svg\u003e\n\u003c/a\u003e\n\u003c/li\u003e\n\u003cli\u003e\n\u003ca class="mini-pipeline-graph-dropdown-item" data-toggle="tooltip" data-title="rspec-pg 20 28 - passed" data-html="true" data-container="body" href="/gitlab-org/gitlab-ce/-/jobs/62398012"\u003e\u003cspan class="ci-status-icon ci-status-icon-success"\u003e\u003csvg\u003e\u003cuse xlink:href="https://gitlab.com/assets/icons-fe86f87a3d244c952cc0ec8d7f88c5effefcbe454d751d8449d4a1a32aaaf9a0.svg#status_success"\u003e\u003c/use\u003e\u003c/svg\u003e\u003c/span\u003e\n\u003cspan class="ci-build-text"\u003erspec-pg 20 28\u003c/span\u003e\n\u003c/a\u003e\u003ca class="ci-action-icon-wrapper js-ci-action-icon" data-toggle="tooltip" data-title="Retry" data-container="body" rel="nofollow" data-method="post" href="/gitlab-org/gitlab-ce/-/jobs/62398012/retry"\u003e\u003csvg class=" icon-action-retry"\u003e\u003cuse xlink:href="https://gitlab.com/assets/icons-fe86f87a3d244c952cc0ec8d7f88c5effefcbe454d751d8449d4a1a32aaaf9a0.svg#retry"\u003e\u003c/use\u003e\u003c/svg\u003e\n\u003c/a\u003e\n\u003c/li\u003e\n\u003cli\u003e\n\u003ca class="mini-pipeline-graph-dropdown-item" data-toggle="tooltip" data-title="rspec-pg 21 28 - passed" data-html="true" data-container="body" href="/gitlab-org/gitlab-ce/-/jobs/62398013"\u003e\u003cspan class="ci-status-icon ci-status-icon-success"\u003e\u003csvg\u003e\u003cuse xlink:href="https://gitlab.com/assets/icons-fe86f87a3d244c952cc0ec8d7f88c5effefcbe454d751d8449d4a1a32aaaf9a0.svg#status_success"\u003e\u003c/use\u003e\u003c/svg\u003e\u003c/span\u003e\n\u003cspan class="ci-build-text"\u003erspec-pg 21 28\u003c/span\u003e\n\u003c/a\u003e\u003ca class="ci-action-icon-wrapper js-ci-action-icon" data-toggle="tooltip" data-title="Retry" data-container="body" rel="nofollow" data-method="post" href="/gitlab-org/gitlab-ce/-/jobs/62398013/retry"\u003e\u003csvg class=" icon-action-retry"\u003e\u003cuse xlink:href="https://gitlab.com/assets/icons-fe86f87a3d244c952cc0ec8d7f88c5effefcbe454d751d8449d4a1a32aaaf9a0.svg#retry"\u003e\u003c/use\u003e\u003c/svg\u003e\n\u003c/a\u003e\n\u003c/li\u003e\n\u003cli\u003e\n\u003ca class="mini-pipeline-graph-dropdown-item" data-toggle="tooltip" data-title="rspec-pg 22 28 - passed" data-html="true" data-container="body" href="/gitlab-org/gitlab-ce/-/jobs/62398014"\u003e\u003cspan class="ci-status-icon ci-status-icon-success"\u003e\u003csvg\u003e\u003cuse xlink:href="https://gitlab.com/assets/icons-fe86f87a3d244c952cc0ec8d7f88c5effefcbe454d751d8449d4a1a32aaaf9a0.svg#status_success"\u003e\u003c/use\u003e\u003c/svg\u003e\u003c/span\u003e\n\u003cspan class="ci-build-text"\u003erspec-pg 22 28\u003c/span\u003e\n\u003c/a\u003e\u003ca class="ci-action-icon-wrapper js-ci-action-icon" data-toggle="tooltip" data-title="Retry" data-container="body" rel="nofollow" data-method="post" href="/gitlab-org/gitlab-ce/-/jobs/62398014/retry"\u003e\u003csvg class=" icon-action-retry"\u003e\u003cuse xlink:href="https://gitlab.com/assets/icons-fe86f87a3d244c952cc0ec8d7f88c5effefcbe454d751d8449d4a1a32aaaf9a0.svg#retry"\u003e\u003c/use\u003e\u003c/svg\u003e\n\u003c/a\u003e\n\u003c/li\u003e\n\u003cli\u003e\n\u003ca class="mini-pipeline-graph-dropdown-item" data-toggle="tooltip" data-title="rspec-pg 23 28 - passed" data-html="true" data-container="body" href="/gitlab-org/gitlab-ce/-/jobs/62398015"\u003e\u003cspan class="ci-status-icon ci-status-icon-success"\u003e\u003csvg\u003e\u003cuse xlink:href="https://gitlab.com/assets/icons-fe86f87a3d244c952cc0ec8d7f88c5effefcbe454d751d8449d4a1a32aaaf9a0.svg#status_success"\u003e\u003c/use\u003e\u003c/svg\u003e\u003c/span\u003e\n\u003cspan class="ci-build-text"\u003erspec-pg 23 28\u003c/span\u003e\n\u003c/a\u003e\u003ca class="ci-action-icon-wrapper js-ci-action-icon" data-toggle="tooltip" data-title="Retry" data-container="body" rel="nofollow" data-method="post" href="/gitlab-org/gitlab-ce/-/jobs/62398015/retry"\u003e\u003csvg class=" icon-action-retry"\u003e\u003cuse xlink:href="https://gitlab.com/assets/icons-fe86f87a3d244c952cc0ec8d7f88c5effefcbe454d751d8449d4a1a32aaaf9a0.svg#retry"\u003e\u003c/use\u003e\u003c/svg\u003e\n\u003c/a\u003e\n\u003c/li\u003e\n\u003cli\u003e\n\u003ca class="mini-pipeline-graph-dropdown-item" data-toggle="tooltip" data-title="rspec-pg 24 28 - passed" data-html="true" data-container="body" href="/gitlab-org/gitlab-ce/-/jobs/62398016"\u003e\u003cspan class="ci-status-icon ci-status-icon-success"\u003e\u003csvg\u003e\u003cuse xlink:href="https://gitlab.com/assets/icons-fe86f87a3d244c952cc0ec8d7f88c5effefcbe454d751d8449d4a1a32aaaf9a0.svg#status_success"\u003e\u003c/use\u003e\u003c/svg\u003e\u003c/span\u003e\n\u003cspan class="ci-build-text"\u003erspec-pg 24 28\u003c/span\u003e\n\u003c/a\u003e\u003ca class="ci-action-icon-wrapper js-ci-action-icon" data-toggle="tooltip" data-title="Retry" data-container="body" rel="nofollow" data-method="post" href="/gitlab-org/gitlab-ce/-/jobs/62398016/retry"\u003e\u003csvg class=" icon-action-retry"\u003e\u003cuse xlink:href="https://gitlab.com/assets/icons-fe86f87a3d244c952cc0ec8d7f88c5effefcbe454d751d8449d4a1a32aaaf9a0.svg#retry"\u003e\u003c/use\u003e\u003c/svg\u003e\n\u003c/a\u003e\n\u003c/li\u003e\n\u003cli\u003e\n\u003ca class="mini-pipeline-graph-dropdown-item" data-toggle="tooltip" data-title="rspec-pg 25 28 - passed" data-html="true" data-container="body" href="/gitlab-org/gitlab-ce/-/jobs/62398017"\u003e\u003cspan class="ci-status-icon ci-status-icon-success"\u003e\u003csvg\u003e\u003cuse xlink:href="https://gitlab.com/assets/icons-fe86f87a3d244c952cc0ec8d7f88c5effefcbe454d751d8449d4a1a32aaaf9a0.svg#status_success"\u003e\u003c/use\u003e\u003c/svg\u003e\u003c/span\u003e\n\u003cspan class="ci-build-text"\u003erspec-pg 25 28\u003c/span\u003e\n\u003c/a\u003e\u003ca class="ci-action-icon-wrapper js-ci-action-icon" data-toggle="tooltip" data-title="Retry" data-container="body" rel="nofollow" data-method="post" href="/gitlab-org/gitlab-ce/-/jobs/62398017/retry"\u003e\u003csvg class=" icon-action-retry"\u003e\u003cuse xlink:href="https://gitlab.com/assets/icons-fe86f87a3d244c952cc0ec8d7f88c5effefcbe454d751d8449d4a1a32aaaf9a0.svg#retry"\u003e\u003c/use\u003e\u003c/svg\u003e\n\u003c/a\u003e\n\u003c/li\u003e\n\u003cli\u003e\n\u003ca class="mini-pipeline-graph-dropdown-item" data-toggle="tooltip" data-title="rspec-pg 26 28 - passed" data-html="true" data-container="body" href="/gitlab-org/gitlab-ce/-/jobs/62398018"\u003e\u003cspan class="ci-status-icon ci-status-icon-success"\u003e\u003csvg\u003e\u003cuse xlink:href="https://gitlab.com/assets/icons-fe86f87a3d244c952cc0ec8d7f88c5effefcbe454d751d8449d4a1a32aaaf9a0.svg#status_success"\u003e\u003c/use\u003e\u003c/svg\u003e\u003c/span\u003e\n\u003cspan class="ci-build-text"\u003erspec-pg 26 28\u003c/span\u003e\n\u003c/a\u003e\u003ca class="ci-action-icon-wrapper js-ci-action-icon" data-toggle="tooltip" data-title="Retry" data-container="body" rel="nofollow" data-method="post" href="/gitlab-org/gitlab-ce/-/jobs/62398018/retry"\u003e\u003csvg class=" icon-action-retry"\u003e\u003cuse xlink:href="https://gitlab.com/assets/icons-fe86f87a3d244c952cc0ec8d7f88c5effefcbe454d751d8449d4a1a32aaaf9a0.svg#retry"\u003e\u003c/use\u003e\u003c/svg\u003e\n\u003c/a\u003e\n\u003c/li\u003e\n\u003cli\u003e\n\u003ca class="mini-pipeline-graph-dropdown-item" data-toggle="tooltip" data-title="rspec-pg 27 28 - passed" data-html="true" data-container="body" href="/gitlab-org/gitlab-ce/-/jobs/62398019"\u003e\u003cspan class="ci-status-icon ci-status-icon-success"\u003e\u003csvg\u003e\u003cuse xlink:href="https://gitlab.com/assets/icons-fe86f87a3d244c952cc0ec8d7f88c5effefcbe454d751d8449d4a1a32aaaf9a0.svg#status_success"\u003e\u003c/use\u003e\u003c/svg\u003e\u003c/span\u003e\n\u003cspan class="ci-build-text"\u003erspec-pg 27 28\u003c/span\u003e\n\u003c/a\u003e\u003ca class="ci-action-icon-wrapper js-ci-action-icon" data-toggle="tooltip" data-title="Retry" data-container="body" rel="nofollow" data-method="post" href="/gitlab-org/gitlab-ce/-/jobs/62398019/retry"\u003e\u003csvg class=" icon-action-retry"\u003e\u003cuse xlink:href="https://gitlab.com/assets/icons-fe86f87a3d244c952cc0ec8d7f88c5effefcbe454d751d8449d4a1a32aaaf9a0.svg#retry"\u003e\u003c/use\u003e\u003c/svg\u003e\n\u003c/a\u003e\n\u003c/li\u003e\n\u003cli\u003e\n\u003ca class="mini-pipeline-graph-dropdown-item" data-toggle="tooltip" data-title="rspec-pg 3 28 - passed" data-html="true" data-container="body" href="/gitlab-org/gitlab-ce/-/jobs/62397988"\u003e\u003cspan class="ci-status-icon ci-status-icon-success"\u003e\u003csvg\u003e\u003cuse xlink:href="https://gitlab.com/assets/icons-fe86f87a3d244c952cc0ec8d7f88c5effefcbe454d751d8449d4a1a32aaaf9a0.svg#status_success"\u003e\u003c/use\u003e\u003c/svg\u003e\u003c/span\u003e\n\u003cspan class="ci-build-text"\u003erspec-pg 3 28\u003c/span\u003e\n\u003c/a\u003e\u003ca class="ci-action-icon-wrapper js-ci-action-icon" data-toggle="tooltip" data-title="Retry" data-container="body" rel="nofollow" data-method="post" href="/gitlab-org/gitlab-ce/-/jobs/62397988/retry"\u003e\u003csvg class=" icon-action-retry"\u003e\u003cuse xlink:href="https://gitlab.com/assets/icons-fe86f87a3d244c952cc0ec8d7f88c5effefcbe454d751d8449d4a1a32aaaf9a0.svg#retry"\u003e\u003c/use\u003e\u003c/svg\u003e\n\u003c/a\u003e\n\u003c/li\u003e\n\u003cli\u003e\n\u003ca class="mini-pipeline-graph-dropdown-item" data-toggle="tooltip" data-title="rspec-pg 4 28 - passed" data-html="true" data-container="body" href="/gitlab-org/gitlab-ce/-/jobs/62397989"\u003e\u003cspan class="ci-status-icon ci-status-icon-success"\u003e\u003csvg\u003e\u003cuse xlink:href="https://gitlab.com/assets/icons-fe86f87a3d244c952cc0ec8d7f88c5effefcbe454d751d8449d4a1a32aaaf9a0.svg#status_success"\u003e\u003c/use\u003e\u003c/svg\u003e\u003c/span\u003e\n\u003cspan class="ci-build-text"\u003erspec-pg 4 28\u003c/span\u003e\n\u003c/a\u003e\u003ca class="ci-action-icon-wrapper js-ci-action-icon" data-toggle="tooltip" data-title="Retry" data-container="body" rel="nofollow" data-method="post" href="/gitlab-org/gitlab-ce/-/jobs/62397989/retry"\u003e\u003csvg class=" icon-action-retry"\u003e\u003cuse xlink:href="https://gitlab.com/assets/icons-fe86f87a3d244c952cc0ec8d7f88c5effefcbe454d751d8449d4a1a32aaaf9a0.svg#retry"\u003e\u003c/use\u003e\u003c/svg\u003e\n\u003c/a\u003e\n\u003c/li\u003e\n\u003cli\u003e\n\u003ca class="mini-pipeline-graph-dropdown-item" data-toggle="tooltip" data-title="rspec-pg 5 28 - passed" data-html="true" data-container="body" href="/gitlab-org/gitlab-ce/-/jobs/62397991"\u003e\u003cspan class="ci-status-icon ci-status-icon-success"\u003e\u003csvg\u003e\u003cuse xlink:href="https://gitlab.com/assets/icons-fe86f87a3d244c952cc0ec8d7f88c5effefcbe454d751d8449d4a1a32aaaf9a0.svg#status_success"\u003e\u003c/use\u003e\u003c/svg\u003e\u003c/span\u003e\n\u003cspan class="ci-build-text"\u003erspec-pg 5 28\u003c/span\u003e\n\u003c/a\u003e\u003ca class="ci-action-icon-wrapper js-ci-action-icon" data-toggle="tooltip" data-title="Retry" data-container="body" rel="nofollow" data-method="post" href="/gitlab-org/gitlab-ce/-/jobs/62397991/retry"\u003e\u003csvg class=" icon-action-retry"\u003e\u003cuse xlink:href="https://gitlab.com/assets/icons-fe86f87a3d244c952cc0ec8d7f88c5effefcbe454d751d8449d4a1a32aaaf9a0.svg#retry"\u003e\u003c/use\u003e\u003c/svg\u003e\n\u003c/a\u003e\n\u003c/li\u003e\n\u003cli\u003e\n\u003ca class="mini-pipeline-graph-dropdown-item" data-toggle="tooltip" data-title="rspec-pg 6 28 - passed" data-html="true" data-container="body" href="/gitlab-org/gitlab-ce/-/jobs/62397993"\u003e\u003cspan class="ci-status-icon ci-status-icon-success"\u003e\u003csvg\u003e\u003cuse xlink:href="https://gitlab.com/assets/icons-fe86f87a3d244c952cc0ec8d7f88c5effefcbe454d751d8449d4a1a32aaaf9a0.svg#status_success"\u003e\u003c/use\u003e\u003c/svg\u003e\u003c/span\u003e\n\u003cspan class="ci-build-text"\u003erspec-pg 6 28\u003c/span\u003e\n\u003c/a\u003e\u003ca class="ci-action-icon-wrapper js-ci-action-icon" data-toggle="tooltip" data-title="Retry" data-container="body" rel="nofollow" data-method="post" href="/gitlab-org/gitlab-ce/-/jobs/62397993/retry"\u003e\u003csvg class=" icon-action-retry"\u003e\u003cuse xlink:href="https://gitlab.com/assets/icons-fe86f87a3d244c952cc0ec8d7f88c5effefcbe454d751d8449d4a1a32aaaf9a0.svg#retry"\u003e\u003c/use\u003e\u003c/svg\u003e\n\u003c/a\u003e\n\u003c/li\u003e\n\u003cli\u003e\n\u003ca class="mini-pipeline-graph-dropdown-item" data-toggle="tooltip" data-title="rspec-pg 7 28 - passed" data-html="true" data-container="body" href="/gitlab-org/gitlab-ce/-/jobs/62397994"\u003e\u003cspan class="ci-status-icon ci-status-icon-success"\u003e\u003csvg\u003e\u003cuse xlink:href="https://gitlab.com/assets/icons-fe86f87a3d244c952cc0ec8d7f88c5effefcbe454d751d8449d4a1a32aaaf9a0.svg#status_success"\u003e\u003c/use\u003e\u003c/svg\u003e\u003c/span\u003e\n\u003cspan class="ci-build-text"\u003erspec-pg 7 28\u003c/span\u003e\n\u003c/a\u003e\u003ca class="ci-action-icon-wrapper js-ci-action-icon" data-toggle="tooltip" data-title="Retry" data-container="body" rel="nofollow" data-method="post" href="/gitlab-org/gitlab-ce/-/jobs/62397994/retry"\u003e\u003csvg class=" icon-action-retry"\u003e\u003cuse xlink:href="https://gitlab.com/assets/icons-fe86f87a3d244c952cc0ec8d7f88c5effefcbe454d751d8449d4a1a32aaaf9a0.svg#retry"\u003e\u003c/use\u003e\u003c/svg\u003e\n\u003c/a\u003e\n\u003c/li\u003e\n\u003cli\u003e\n\u003ca class="mini-pipeline-graph-dropdown-item" data-toggle="tooltip" data-title="rspec-pg 8 28 - passed" data-html="true" data-container="body" href="/gitlab-org/gitlab-ce/-/jobs/62397995"\u003e\u003cspan class="ci-status-icon ci-status-icon-success"\u003e\u003csvg\u003e\u003cuse xlink:href="https://gitlab.com/assets/icons-fe86f87a3d244c952cc0ec8d7f88c5effefcbe454d751d8449d4a1a32aaaf9a0.svg#status_success"\u003e\u003c/use\u003e\u003c/svg\u003e\u003c/span\u003e\n\u003cspan class="ci-build-text"\u003erspec-pg 8 28\u003c/span\u003e\n\u003c/a\u003e\u003ca class="ci-action-icon-wrapper js-ci-action-icon" data-toggle="tooltip" data-title="Retry" data-container="body" rel="nofollow" data-method="post" href="/gitlab-org/gitlab-ce/-/jobs/62397995/retry"\u003e\u003csvg class=" icon-action-retry"\u003e\u003cuse xlink:href="https://gitlab.com/assets/icons-fe86f87a3d244c952cc0ec8d7f88c5effefcbe454d751d8449d4a1a32aaaf9a0.svg#retry"\u003e\u003c/use\u003e\u003c/svg\u003e\n\u003c/a\u003e\n\u003c/li\u003e\n\u003cli\u003e\n\u003ca class="mini-pipeline-graph-dropdown-item" data-toggle="tooltip" data-title="rspec-pg 9 28 - passed" data-html="true" data-container="body" href="/gitlab-org/gitlab-ce/-/jobs/62397996"\u003e\u003cspan class="ci-status-icon ci-status-icon-success"\u003e\u003csvg\u003e\u003cuse xlink:href="https://gitlab.com/assets/icons-fe86f87a3d244c952cc0ec8d7f88c5effefcbe454d751d8449d4a1a32aaaf9a0.svg#status_success"\u003e\u003c/use\u003e\u003c/svg\u003e\u003c/span\u003e\n\u003cspan class="ci-build-text"\u003erspec-pg 9 28\u003c/span\u003e\n\u003c/a\u003e\u003ca class="ci-action-icon-wrapper js-ci-action-icon" data-toggle="tooltip" data-title="Retry" data-container="body" rel="nofollow" data-method="post" href="/gitlab-org/gitlab-ce/-/jobs/62397996/retry"\u003e\u003csvg class=" icon-action-retry"\u003e\u003cuse xlink:href="https://gitlab.com/assets/icons-fe86f87a3d244c952cc0ec8d7f88c5effefcbe454d751d8449d4a1a32aaaf9a0.svg#retry"\u003e\u003c/use\u003e\u003c/svg\u003e\n\u003c/a\u003e\n\u003c/li\u003e\n\u003cli\u003e\n\u003ca class="mini-pipeline-graph-dropdown-item" data-toggle="tooltip" data-title="sast - passed" data-html="true" data-container="body" href="/gitlab-org/gitlab-ce/-/jobs/62398082"\u003e\u003cspan class="ci-status-icon ci-status-icon-success"\u003e\u003csvg\u003e\u003cuse xlink:href="https://gitlab.com/assets/icons-fe86f87a3d244c952cc0ec8d7f88c5effefcbe454d751d8449d4a1a32aaaf9a0.svg#status_success"\u003e\u003c/use\u003e\u003c/svg\u003e\u003c/span\u003e\n\u003cspan class="ci-build-text"\u003esast\u003c/span\u003e\n\u003c/a\u003e\u003ca class="ci-action-icon-wrapper js-ci-action-icon" data-toggle="tooltip" data-title="Retry" data-container="body" rel="nofollow" data-method="post" href="/gitlab-org/gitlab-ce/-/jobs/62398082/retry"\u003e\u003csvg class=" icon-action-retry"\u003e\u003cuse xlink:href="https://gitlab.com/assets/icons-fe86f87a3d244c952cc0ec8d7f88c5effefcbe454d751d8449d4a1a32aaaf9a0.svg#retry"\u003e\u003c/use\u003e\u003c/svg\u003e\n\u003c/a\u003e\n\u003c/li\u003e\n\u003cli\u003e\n\u003ca class="mini-pipeline-graph-dropdown-item" data-toggle="tooltip" data-title="spinach-mysql 0 2 - passed" data-html="true" data-container="body" href="/gitlab-org/gitlab-ce/-/jobs/62398058"\u003e\u003cspan class="ci-status-icon ci-status-icon-success"\u003e\u003csvg\u003e\u003cuse xlink:href="https://gitlab.com/assets/icons-fe86f87a3d244c952cc0ec8d7f88c5effefcbe454d751d8449d4a1a32aaaf9a0.svg#status_success"\u003e\u003c/use\u003e\u003c/svg\u003e\u003c/span\u003e\n\u003cspan class="ci-build-text"\u003espinach-mysql 0 2\u003c/span\u003e\n\u003c/a\u003e\u003ca class="ci-action-icon-wrapper js-ci-action-icon" data-toggle="tooltip" data-title="Retry" data-container="body" rel="nofollow" data-method="post" href="/gitlab-org/gitlab-ce/-/jobs/62398058/retry"\u003e\u003csvg class=" icon-action-retry"\u003e\u003cuse xlink:href="https://gitlab.com/assets/icons-fe86f87a3d244c952cc0ec8d7f88c5effefcbe454d751d8449d4a1a32aaaf9a0.svg#retry"\u003e\u003c/use\u003e\u003c/svg\u003e\n\u003c/a\u003e\n\u003c/li\u003e\n\u003cli\u003e\n\u003ca class="mini-pipeline-graph-dropdown-item" data-toggle="tooltip" data-title="spinach-mysql 1 2 - passed" data-html="true" data-container="body" href="/gitlab-org/gitlab-ce/-/jobs/62398059"\u003e\u003cspan class="ci-status-icon ci-status-icon-success"\u003e\u003csvg\u003e\u003cuse xlink:href="https://gitlab.com/assets/icons-fe86f87a3d244c952cc0ec8d7f88c5effefcbe454d751d8449d4a1a32aaaf9a0.svg#status_success"\u003e\u003c/use\u003e\u003c/svg\u003e\u003c/span\u003e\n\u003cspan class="ci-build-text"\u003espinach-mysql 1 2\u003c/span\u003e\n\u003c/a\u003e\u003ca class="ci-action-icon-wrapper js-ci-action-icon" data-toggle="tooltip" data-title="Retry" data-container="body" rel="nofollow" data-method="post" href="/gitlab-org/gitlab-ce/-/jobs/62398059/retry"\u003e\u003csvg class=" icon-action-retry"\u003e\u003cuse xlink:href="https://gitlab.com/assets/icons-fe86f87a3d244c952cc0ec8d7f88c5effefcbe454d751d8449d4a1a32aaaf9a0.svg#retry"\u003e\u003c/use\u003e\u003c/svg\u003e\n\u003c/a\u003e\n\u003c/li\u003e\n\u003cli\u003e\n\u003ca class="mini-pipeline-graph-dropdown-item" data-toggle="tooltip" data-title="spinach-pg 0 2 - passed" data-html="true" data-container="body" href="/gitlab-org/gitlab-ce/-/jobs/62398053"\u003e\u003cspan class="ci-status-icon ci-status-icon-success"\u003e\u003csvg\u003e\u003cuse xlink:href="https://gitlab.com/assets/icons-fe86f87a3d244c952cc0ec8d7f88c5effefcbe454d751d8449d4a1a32aaaf9a0.svg#status_success"\u003e\u003c/use\u003e\u003c/svg\u003e\u003c/span\u003e\n\u003cspan class="ci-build-text"\u003espinach-pg 0 2\u003c/span\u003e\n\u003c/a\u003e\u003ca class="ci-action-icon-wrapper js-ci-action-icon" data-toggle="tooltip" data-title="Retry" data-container="body" rel="nofollow" data-method="post" href="/gitlab-org/gitlab-ce/-/jobs/62398053/retry"\u003e\u003csvg class=" icon-action-retry"\u003e\u003cuse xlink:href="https://gitlab.com/assets/icons-fe86f87a3d244c952cc0ec8d7f88c5effefcbe454d751d8449d4a1a32aaaf9a0.svg#retry"\u003e\u003c/use\u003e\u003c/svg\u003e\n\u003c/a\u003e\n\u003c/li\u003e\n\u003cli\u003e\n\u003ca class="mini-pipeline-graph-dropdown-item" data-toggle="tooltip" data-title="spinach-pg 1 2 - passed" data-html="true" data-container="body" href="/gitlab-org/gitlab-ce/-/jobs/62398056"\u003e\u003cspan class="ci-status-icon ci-status-icon-success"\u003e\u003csvg\u003e\u003cuse xlink:href="https://gitlab.com/assets/icons-fe86f87a3d244c952cc0ec8d7f88c5effefcbe454d751d8449d4a1a32aaaf9a0.svg#status_success"\u003e\u003c/use\u003e\u003c/svg\u003e\u003c/span\u003e\n\u003cspan class="ci-build-text"\u003espinach-pg 1 2\u003c/span\u003e\n\u003c/a\u003e\u003ca class="ci-action-icon-wrapper js-ci-action-icon" data-toggle="tooltip" data-title="Retry" data-container="body" rel="nofollow" data-method="post" href="/gitlab-org/gitlab-ce/-/jobs/62398056/retry"\u003e\u003csvg class=" icon-action-retry"\u003e\u003cuse xlink:href="https://gitlab.com/assets/icons-fe86f87a3d244c952cc0ec8d7f88c5effefcbe454d751d8449d4a1a32aaaf9a0.svg#retry"\u003e\u003c/use\u003e\u003c/svg\u003e\n\u003c/a\u003e\n\u003c/li\u003e\n\u003cli\u003e\n\u003ca class="mini-pipeline-graph-dropdown-item" data-toggle="tooltip" data-title="static-analysis - passed" data-html="true" data-container="body" href="/gitlab-org/gitlab-ce/-/jobs/62398060"\u003e\u003cspan class="ci-status-icon ci-status-icon-success"\u003e\u003csvg\u003e\u003cuse xlink:href="https://gitlab.com/assets/icons-fe86f87a3d244c952cc0ec8d7f88c5effefcbe454d751d8449d4a1a32aaaf9a0.svg#status_success"\u003e\u003c/use\u003e\u003c/svg\u003e\u003c/span\u003e\n\u003cspan class="ci-build-text"\u003estatic-analysis\u003c/span\u003e\n\u003c/a\u003e\u003ca class="ci-action-icon-wrapper js-ci-action-icon" data-toggle="tooltip" data-title="Retry" data-container="body" rel="nofollow" data-method="post" href="/gitlab-org/gitlab-ce/-/jobs/62398060/retry"\u003e\u003csvg class=" icon-action-retry"\u003e\u003cuse xlink:href="https://gitlab.com/assets/icons-fe86f87a3d244c952cc0ec8d7f88c5effefcbe454d751d8449d4a1a32aaaf9a0.svg#retry"\u003e\u003c/use\u003e\u003c/svg\u003e\n\u003c/a\u003e\n\u003c/li\u003e\n', +}; diff --git a/spec/javascripts/pipelines/pipeline_details_mediator_spec.js b/spec/javascripts/pipelines/pipeline_details_mediator_spec.js index e58a8018ed5..61ee2dc13ca 100644 --- a/spec/javascripts/pipelines/pipeline_details_mediator_spec.js +++ b/spec/javascripts/pipelines/pipeline_details_mediator_spec.js @@ -1,42 +1,36 @@ -import _ from 'underscore'; -import Vue from 'vue'; +import MockAdapter from 'axios-mock-adapter'; +import axios from '~/lib/utils/axios_utils'; import PipelineMediator from '~/pipelines/pipeline_details_mediator'; describe('PipelineMdediator', () => { let mediator; + let mock; + beforeEach(() => { - mediator = new PipelineMediator({ endpoint: 'foo' }); + mock = new MockAdapter(axios); + mediator = new PipelineMediator({ endpoint: 'foo.json' }); + }); + + afterEach(() => { + mock.restore(); }); it('should set defaults', () => { - expect(mediator.options).toEqual({ endpoint: 'foo' }); + expect(mediator.options).toEqual({ endpoint: 'foo.json' }); expect(mediator.state.isLoading).toEqual(false); expect(mediator.store).toBeDefined(); expect(mediator.service).toBeDefined(); }); describe('request and store data', () => { - const interceptor = (request, next) => { - next(request.respondWith(JSON.stringify({ foo: 'bar' }), { - status: 200, - })); - }; - - beforeEach(() => { - Vue.http.interceptors.push(interceptor); - }); - - afterEach(() => { - Vue.http.interceptors = _.without(Vue.http.interceptor, interceptor); - }); - - it('should store received data', (done) => { + it('should store received data', done => { + mock.onGet('foo.json').reply(200, { id: '121123' }); mediator.fetchPipeline(); setTimeout(() => { - expect(mediator.store.state.pipeline).toEqual({ foo: 'bar' }); + expect(mediator.store.state.pipeline).toEqual({ id: '121123' }); done(); - }); + }, 0); }); }); }); diff --git a/spec/javascripts/pipelines/pipelines_spec.js b/spec/javascripts/pipelines/pipelines_spec.js index 7e242eb45e1..ff17602da2b 100644 --- a/spec/javascripts/pipelines/pipelines_spec.js +++ b/spec/javascripts/pipelines/pipelines_spec.js @@ -1,8 +1,10 @@ -import _ from 'underscore'; import Vue from 'vue'; +import MockAdapter from 'axios-mock-adapter'; +import axios from '~/lib/utils/axios_utils'; import pipelinesComp from '~/pipelines/components/pipelines.vue'; import Store from '~/pipelines/stores/pipelines_store'; import mountComponent from 'spec/helpers/vue_mount_component_helper'; +import { pipelineWithStages, stageReply } from './mock_data'; describe('Pipelines', () => { const jsonFixtureName = 'pipelines/pipelines.json'; @@ -12,6 +14,8 @@ describe('Pipelines', () => { let PipelinesComponent; let pipelines; let vm; + let mock; + const paths = { endpoint: 'twitter/flight/pipelines.json', autoDevopsPath: '/help/topics/autodevops/index.md', @@ -34,6 +38,8 @@ describe('Pipelines', () => { }; beforeEach(() => { + mock = new MockAdapter(axios); + pipelines = getJSONFixture(jsonFixtureName); PipelinesComponent = Vue.extend(pipelinesComp); @@ -41,38 +47,14 @@ describe('Pipelines', () => { afterEach(() => { vm.$destroy(); + mock.restore(); }); - const pipelinesInterceptor = (request, next) => { - next(request.respondWith(JSON.stringify(pipelines), { - status: 200, - })); - }; - - const emptyStateInterceptor = (request, next) => { - next(request.respondWith(JSON.stringify({ - pipelines: [], - count: { - all: 0, - pending: 0, - running: 0, - finished: 0, - }, - }), { - status: 200, - })); - }; - - const errorInterceptor = (request, next) => { - next(request.respondWith(JSON.stringify({}), { - status: 500, - })); - }; - describe('With permission', () => { describe('With pipelines in main tab', () => { beforeEach((done) => { - Vue.http.interceptors.push(pipelinesInterceptor); + mock.onGet('twitter/flight/pipelines.json').reply(200, pipelines); + vm = mountComponent(PipelinesComponent, { store: new Store(), hasGitlabCi: true, @@ -85,12 +67,6 @@ describe('Pipelines', () => { }); }); - afterEach(() => { - Vue.http.interceptors = _.without( - Vue.http.interceptors, pipelinesInterceptor, - ); - }); - it('renders tabs', () => { expect(vm.$el.querySelector('.js-pipelines-tab-all').textContent.trim()).toContain('All'); }); @@ -116,7 +92,15 @@ describe('Pipelines', () => { describe('Without pipelines on main tab with CI', () => { beforeEach((done) => { - Vue.http.interceptors.push(emptyStateInterceptor); + mock.onGet('twitter/flight/pipelines.json').reply(200, { + pipelines: [], + count: { + all: 0, + pending: 0, + running: 0, + finished: 0, + }, + }); vm = mountComponent(PipelinesComponent, { store: new Store(), hasGitlabCi: true, @@ -129,12 +113,6 @@ describe('Pipelines', () => { }); }); - afterEach(() => { - Vue.http.interceptors = _.without( - Vue.http.interceptors, emptyStateInterceptor, - ); - }); - it('renders tabs', () => { expect(vm.$el.querySelector('.js-pipelines-tab-all').textContent.trim()).toContain('All'); }); @@ -158,7 +136,15 @@ describe('Pipelines', () => { describe('Without pipelines nor CI', () => { beforeEach((done) => { - Vue.http.interceptors.push(emptyStateInterceptor); + mock.onGet('twitter/flight/pipelines.json').reply(200, { + pipelines: [], + count: { + all: 0, + pending: 0, + running: 0, + finished: 0, + }, + }); vm = mountComponent(PipelinesComponent, { store: new Store(), hasGitlabCi: false, @@ -171,12 +157,6 @@ describe('Pipelines', () => { }); }); - afterEach(() => { - Vue.http.interceptors = _.without( - Vue.http.interceptors, emptyStateInterceptor, - ); - }); - it('renders empty state', () => { expect(vm.$el.querySelector('.js-empty-state h4').textContent.trim()).toEqual('Build with confidence'); expect(vm.$el.querySelector('.js-get-started-pipelines').getAttribute('href')).toEqual(paths.helpPagePath); @@ -192,7 +172,7 @@ describe('Pipelines', () => { describe('When API returns error', () => { beforeEach((done) => { - Vue.http.interceptors.push(errorInterceptor); + mock.onGet('twitter/flight/pipelines.json').reply(500, {}); vm = mountComponent(PipelinesComponent, { store: new Store(), hasGitlabCi: false, @@ -205,12 +185,6 @@ describe('Pipelines', () => { }); }); - afterEach(() => { - Vue.http.interceptors = _.without( - Vue.http.interceptors, errorInterceptor, - ); - }); - it('renders tabs', () => { expect(vm.$el.querySelector('.js-pipelines-tab-all').textContent.trim()).toContain('All'); }); @@ -230,7 +204,8 @@ describe('Pipelines', () => { describe('Without permission', () => { describe('With pipelines in main tab', () => { beforeEach((done) => { - Vue.http.interceptors.push(pipelinesInterceptor); + mock.onGet('twitter/flight/pipelines.json').reply(200, pipelines); + vm = mountComponent(PipelinesComponent, { store: new Store(), hasGitlabCi: false, @@ -243,12 +218,6 @@ describe('Pipelines', () => { }); }); - afterEach(() => { - Vue.http.interceptors = _.without( - Vue.http.interceptors, pipelinesInterceptor, - ); - }); - it('renders tabs', () => { expect(vm.$el.querySelector('.js-pipelines-tab-all').textContent.trim()).toContain('All'); }); @@ -268,7 +237,16 @@ describe('Pipelines', () => { describe('Without pipelines on main tab with CI', () => { beforeEach((done) => { - Vue.http.interceptors.push(emptyStateInterceptor); + mock.onGet('twitter/flight/pipelines.json').reply(200, { + pipelines: [], + count: { + all: 0, + pending: 0, + running: 0, + finished: 0, + }, + }); + vm = mountComponent(PipelinesComponent, { store: new Store(), hasGitlabCi: true, @@ -281,11 +259,6 @@ describe('Pipelines', () => { }); }); - afterEach(() => { - Vue.http.interceptors = _.without( - Vue.http.interceptors, emptyStateInterceptor, - ); - }); it('renders tabs', () => { expect(vm.$el.querySelector('.js-pipelines-tab-all').textContent.trim()).toContain('All'); }); @@ -303,7 +276,16 @@ describe('Pipelines', () => { describe('Without pipelines nor CI', () => { beforeEach((done) => { - Vue.http.interceptors.push(emptyStateInterceptor); + mock.onGet('twitter/flight/pipelines.json').reply(200, { + pipelines: [], + count: { + all: 0, + pending: 0, + running: 0, + finished: 0, + }, + }); + vm = mountComponent(PipelinesComponent, { store: new Store(), hasGitlabCi: false, @@ -316,12 +298,6 @@ describe('Pipelines', () => { }); }); - afterEach(() => { - Vue.http.interceptors = _.without( - Vue.http.interceptors, emptyStateInterceptor, - ); - }); - it('renders empty state without button to set CI', () => { expect(vm.$el.querySelector('.js-empty-state').textContent.trim()).toEqual('This project is not currently set up to run pipelines.'); expect(vm.$el.querySelector('.js-get-started-pipelines')).toBeNull(); @@ -337,7 +313,8 @@ describe('Pipelines', () => { describe('When API returns error', () => { beforeEach((done) => { - Vue.http.interceptors.push(errorInterceptor); + mock.onGet('twitter/flight/pipelines.json').reply(500, {}); + vm = mountComponent(PipelinesComponent, { store: new Store(), hasGitlabCi: false, @@ -350,12 +327,6 @@ describe('Pipelines', () => { }); }); - afterEach(() => { - Vue.http.interceptors = _.without( - Vue.http.interceptors, errorInterceptor, - ); - }); - it('renders tabs', () => { expect(vm.$el.querySelector('.js-pipelines-tab-all').textContent.trim()).toContain('All'); }); @@ -375,7 +346,8 @@ describe('Pipelines', () => { describe('successfull request', () => { describe('with pipelines', () => { beforeEach(() => { - Vue.http.interceptors.push(pipelinesInterceptor); + mock.onGet('twitter/flight/pipelines.json').reply(200, pipelines); + vm = mountComponent(PipelinesComponent, { store: new Store(), hasGitlabCi: true, @@ -384,12 +356,6 @@ describe('Pipelines', () => { }); }); - afterEach(() => { - Vue.http.interceptors = _.without( - Vue.http.interceptors, pipelinesInterceptor, - ); - }); - it('should render table', (done) => { setTimeout(() => { expect(vm.$el.querySelector('.table-holder')).toBeDefined(); @@ -703,4 +669,79 @@ describe('Pipelines', () => { }); }); }); + + describe('updates results when a staged is clicked', () => { + beforeEach(() => { + const copyPipeline = Object.assign({}, pipelineWithStages); + copyPipeline.id += 1; + mock + .onGet('twitter/flight/pipelines.json').reply(200, { + pipelines: [pipelineWithStages], + count: { + all: 1, + finished: 1, + pending: 0, + running: 0, + }, + }, { + 'POLL-INTERVAL': 100, + }) + .onGet(pipelineWithStages.details.stages[0].dropdown_path) + .reply(200, stageReply); + + vm = mountComponent(PipelinesComponent, { + store: new Store(), + hasGitlabCi: true, + canCreatePipeline: true, + ...paths, + }); + }); + + describe('when a request is being made', () => { + it('stops polling, cancels the request, fetches pipelines & restarts polling', (done) => { + spyOn(vm.poll, 'stop'); + spyOn(vm.poll, 'restart'); + spyOn(vm, 'getPipelines').and.returnValue(Promise.resolve()); + spyOn(vm.service.cancelationSource, 'cancel').and.callThrough(); + + setTimeout(() => { + vm.isMakingRequest = true; + return vm.$nextTick() + .then(() => { + vm.$el.querySelector('.js-builds-dropdown-button').click(); + }) + .then(() => { + expect(vm.service.cancelationSource.cancel).toHaveBeenCalled(); + expect(vm.poll.stop).toHaveBeenCalled(); + + setTimeout(() => { + expect(vm.getPipelines).toHaveBeenCalled(); + expect(vm.poll.restart).toHaveBeenCalled(); + done(); + }, 0); + }); + }, 0); + }); + }); + + describe('when no request is being made', () => { + it('stops polling, fetches pipelines & restarts polling', (done) => { + spyOn(vm.poll, 'stop'); + spyOn(vm.poll, 'restart'); + spyOn(vm, 'getPipelines').and.returnValue(Promise.resolve()); + + setTimeout(() => { + vm.$el.querySelector('.js-builds-dropdown-button').click(); + + expect(vm.poll.stop).toHaveBeenCalled(); + + setTimeout(() => { + expect(vm.getPipelines).toHaveBeenCalled(); + expect(vm.poll.restart).toHaveBeenCalled(); + done(); + }, 0); + }, 0); + }); + }); + }); }); diff --git a/spec/javascripts/pipelines/stage_spec.js b/spec/javascripts/pipelines/stage_spec.js index 61c2f783acc..be1632e7206 100644 --- a/spec/javascripts/pipelines/stage_spec.js +++ b/spec/javascripts/pipelines/stage_spec.js @@ -1,27 +1,36 @@ -import _ from 'underscore'; import Vue from 'vue'; +import MockAdapter from 'axios-mock-adapter'; +import axios from '~/lib/utils/axios_utils'; import stage from '~/pipelines/components/stage.vue'; +import eventHub from '~/pipelines/event_hub'; +import mountComponent from 'spec/helpers/vue_mount_component_helper'; describe('Pipelines stage component', () => { let StageComponent; let component; + let mock; beforeEach(() => { + mock = new MockAdapter(axios); + StageComponent = Vue.extend(stage); - component = new StageComponent({ - propsData: { - stage: { - status: { - group: 'success', - icon: 'icon_status_success', - title: 'success', - }, - dropdown_path: 'foo', + component = mountComponent(StageComponent, { + stage: { + status: { + group: 'success', + icon: 'icon_status_success', + title: 'success', }, - updateDropdown: false, + dropdown_path: 'path.json', }, - }).$mount(); + updateDropdown: false, + }); + }); + + afterEach(() => { + component.$destroy(); + mock.restore(); }); it('should render a dropdown with the status icon', () => { @@ -31,49 +40,27 @@ describe('Pipelines stage component', () => { }); describe('with successfull request', () => { - const interceptor = (request, next) => { - next(request.respondWith(JSON.stringify({ html: 'foo' }), { - status: 200, - })); - }; - beforeEach(() => { - Vue.http.interceptors.push(interceptor); - }); - - afterEach(() => { - Vue.http.interceptors = _.without( - Vue.http.interceptors, interceptor, - ); + mock.onGet('path.json').reply(200, { html: 'foo' }); }); - it('should render the received data', (done) => { + it('should render the received data and emit `clickedDropdown` event', done => { + spyOn(eventHub, '$emit'); component.$el.querySelector('button').click(); setTimeout(() => { expect( component.$el.querySelector('.js-builds-dropdown-container ul').textContent.trim(), ).toEqual('foo'); + expect(eventHub.$emit).toHaveBeenCalledWith('clickedDropdown'); done(); }, 0); }); }); describe('when request fails', () => { - const interceptor = (request, next) => { - next(request.respondWith(JSON.stringify({}), { - status: 500, - })); - }; - beforeEach(() => { - Vue.http.interceptors.push(interceptor); - }); - - afterEach(() => { - Vue.http.interceptors = _.without( - Vue.http.interceptors, interceptor, - ); + mock.onGet('path.json').reply(500); }); it('should close the dropdown', () => { @@ -86,33 +73,18 @@ describe('Pipelines stage component', () => { }); describe('update endpoint correctly', () => { - const updatedInterceptor = (request, next) => { - if (request.url === 'bar') { - next(request.respondWith(JSON.stringify({ html: 'this is the updated content' }), { - status: 200, - })); - } - next(); - }; - beforeEach(() => { - Vue.http.interceptors.push(updatedInterceptor); - }); - - afterEach(() => { - Vue.http.interceptors = _.without( - Vue.http.interceptors, updatedInterceptor, - ); + mock.onGet('bar.json').reply(200, { html: 'this is the updated content' }); }); - it('should update the stage to request the new endpoint provided', (done) => { + it('should update the stage to request the new endpoint provided', done => { component.stage = { status: { group: 'running', icon: 'running', title: 'running', }, - dropdown_path: 'bar', + dropdown_path: 'bar.json', }; Vue.nextTick(() => { @@ -121,7 +93,7 @@ describe('Pipelines stage component', () => { setTimeout(() => { expect( component.$el.querySelector('.js-builds-dropdown-container ul').textContent.trim(), - ).toEqual('this is the updated content'); + ).toEqual('this is the updated content'); done(); }); }); diff --git a/spec/javascripts/profile/account/components/update_username_spec.js b/spec/javascripts/profile/account/components/update_username_spec.js new file mode 100644 index 00000000000..bac306edf5a --- /dev/null +++ b/spec/javascripts/profile/account/components/update_username_spec.js @@ -0,0 +1,172 @@ +import Vue from 'vue'; +import axios from '~/lib/utils/axios_utils'; +import MockAdapter from 'axios-mock-adapter'; + +import updateUsername from '~/profile/account/components/update_username.vue'; +import mountComponent from 'spec/helpers/vue_mount_component_helper'; + +describe('UpdateUsername component', () => { + const rootUrl = gl.TEST_HOST; + const actionUrl = `${gl.TEST_HOST}/update/username`; + const username = 'hasnoname'; + const newUsername = 'new_username'; + let Component; + let vm; + let axiosMock; + + beforeEach(() => { + axiosMock = new MockAdapter(axios); + Component = Vue.extend(updateUsername); + vm = mountComponent(Component, { + actionUrl, + rootUrl, + initialUsername: username, + }); + }); + + afterEach(() => { + vm.$destroy(); + axiosMock.restore(); + }); + + const findElements = () => { + const modalSelector = `#${vm.$options.modalId}`; + + return { + input: vm.$el.querySelector(`#${vm.$options.inputId}`), + openModalBtn: vm.$el.querySelector(`[data-target="${modalSelector}"]`), + modal: vm.$el.querySelector(modalSelector), + modalBody: vm.$el.querySelector(`${modalSelector} .modal-body`), + modalHeader: vm.$el.querySelector(`${modalSelector} .modal-title`), + confirmModalBtn: vm.$el.querySelector(`${modalSelector} .btn-warning`), + }; + }; + + it('has a disabled button if the username was not changed', done => { + const { input, openModalBtn } = findElements(); + input.dispatchEvent(new Event('input')); + + Vue.nextTick() + .then(() => { + expect(vm.username).toBe(username); + expect(vm.newUsername).toBe(username); + expect(openModalBtn).toBeDisabled(); + }) + .then(done) + .catch(done.fail); + }); + + it('has an enabled button which if the username was changed', done => { + const { input, openModalBtn } = findElements(); + input.value = newUsername; + input.dispatchEvent(new Event('input')); + + Vue.nextTick() + .then(() => { + expect(vm.username).toBe(username); + expect(vm.newUsername).toBe(newUsername); + expect(openModalBtn).not.toBeDisabled(); + }) + .then(done) + .catch(done.fail); + }); + + it('confirmation modal contains proper header and body', done => { + const { modalBody, modalHeader } = findElements(); + + vm.newUsername = newUsername; + + Vue.nextTick() + .then(() => { + expect(modalHeader.textContent).toContain('Change username?'); + expect(modalBody.textContent).toContain( + `You are going to change the username ${username} to ${newUsername}`, + ); + }) + .then(done) + .catch(done.fail); + }); + + it('confirmation modal should escape usernames properly', done => { + const { modalBody } = findElements(); + + vm.username = vm.newUsername = '<i>Italic</i>'; + + Vue.nextTick() + .then(() => { + expect(modalBody.innerHTML).toContain('<i>Italic</i>'); + expect(modalBody.innerHTML).not.toContain(vm.username); + }) + .then(done) + .catch(done.fail); + }); + + it('executes API call on confirmation button click', done => { + const { confirmModalBtn } = findElements(); + + axiosMock.onPut(actionUrl).replyOnce(() => [200, { message: 'Username changed' }]); + spyOn(axios, 'put').and.callThrough(); + + vm.newUsername = newUsername; + + Vue.nextTick() + .then(() => { + confirmModalBtn.click(); + expect(axios.put).toHaveBeenCalledWith(actionUrl, { user: { username: newUsername } }); + }) + .then(done) + .catch(done.fail); + }); + + it('sets the username after a successful update', done => { + const { input, openModalBtn } = findElements(); + + axiosMock.onPut(actionUrl).replyOnce(() => { + expect(input).toBeDisabled(); + expect(openModalBtn).toBeDisabled(); + + return [200, { message: 'Username changed' }]; + }); + + vm.newUsername = newUsername; + + vm + .onConfirm() + .then(() => { + expect(vm.username).toBe(newUsername); + expect(vm.newUsername).toBe(newUsername); + expect(input).not.toBeDisabled(); + expect(input.value).toBe(newUsername); + expect(openModalBtn).toBeDisabled(); + }) + .then(done) + .catch(done.fail); + }); + + it('does not set the username after a erroneous update', done => { + const { input, openModalBtn } = findElements(); + + axiosMock.onPut(actionUrl).replyOnce(() => { + expect(input).toBeDisabled(); + expect(openModalBtn).toBeDisabled(); + + return [400, { message: 'Invalid username' }]; + }); + + const invalidUsername = 'anything.git'; + vm.newUsername = invalidUsername; + + vm + .onConfirm() + .then(() => done.fail('Expected onConfirm to throw!')) + .catch(() => { + expect(vm.username).toBe(username); + expect(vm.newUsername).toBe(invalidUsername); + expect(input).not.toBeDisabled(); + expect(input.value).toBe(invalidUsername); + expect(openModalBtn).not.toBeDisabled(); + }) + .then(done) + .catch(done.fail); + }); +}); diff --git a/spec/javascripts/registry/stores/actions_spec.js b/spec/javascripts/registry/stores/actions_spec.js index 3c9da4f107b..bc4c444655a 100644 --- a/spec/javascripts/registry/stores/actions_spec.js +++ b/spec/javascripts/registry/stores/actions_spec.js @@ -29,57 +29,96 @@ describe('Actions Registry Store', () => { describe('fetchRepos', () => { beforeEach(() => { interceptor = (request, next) => { - next(request.respondWith(JSON.stringify(reposServerResponse), { - status: 200, - })); + next( + request.respondWith(JSON.stringify(reposServerResponse), { + status: 200, + }), + ); }; Vue.http.interceptors.push(interceptor); }); - it('should set receveived repos', (done) => { - testAction(actions.fetchRepos, null, mockedState, [ - { type: types.TOGGLE_MAIN_LOADING }, - { type: types.SET_REPOS_LIST, payload: reposServerResponse }, - ], done); + it('should set receveived repos', done => { + testAction( + actions.fetchRepos, + null, + mockedState, + [ + { type: types.TOGGLE_MAIN_LOADING }, + { type: types.TOGGLE_MAIN_LOADING }, + { type: types.SET_REPOS_LIST, payload: reposServerResponse }, + ], + [], + done, + ); }); }); describe('fetchList', () => { beforeEach(() => { interceptor = (request, next) => { - next(request.respondWith(JSON.stringify(registryServerResponse), { - status: 200, - })); + next( + request.respondWith(JSON.stringify(registryServerResponse), { + status: 200, + }), + ); }; Vue.http.interceptors.push(interceptor); }); - it('should set received list', (done) => { + it('should set received list', done => { mockedState.repos = parsedReposServerResponse; - testAction(actions.fetchList, { repo: mockedState.repos[1] }, mockedState, [ - { type: types.TOGGLE_REGISTRY_LIST_LOADING }, - { type: types.SET_REGISTRY_LIST, payload: registryServerResponse }, - ], done); + const repo = mockedState.repos[1]; + + testAction( + actions.fetchList, + { repo }, + mockedState, + [ + { type: types.TOGGLE_REGISTRY_LIST_LOADING, payload: repo }, + { type: types.TOGGLE_REGISTRY_LIST_LOADING, payload: repo }, + { + type: types.SET_REGISTRY_LIST, + payload: { + repo, + resp: registryServerResponse, + headers: jasmine.anything(), + }, + }, + ], + [], + done, + ); }); }); }); describe('setMainEndpoint', () => { - it('should commit set main endpoint', (done) => { - testAction(actions.setMainEndpoint, 'endpoint', mockedState, [ - { type: types.SET_MAIN_ENDPOINT, payload: 'endpoint' }, - ], done); + it('should commit set main endpoint', done => { + testAction( + actions.setMainEndpoint, + 'endpoint', + mockedState, + [{ type: types.SET_MAIN_ENDPOINT, payload: 'endpoint' }], + [], + done, + ); }); }); describe('toggleLoading', () => { - it('should commit toggle main loading', (done) => { - testAction(actions.toggleLoading, null, mockedState, [ - { type: types.TOGGLE_MAIN_LOADING }, - ], done); + it('should commit toggle main loading', done => { + testAction( + actions.toggleLoading, + null, + mockedState, + [{ type: types.TOGGLE_MAIN_LOADING }], + [], + done, + ); }); }); }); diff --git a/spec/javascripts/search_autocomplete_spec.js b/spec/javascripts/search_autocomplete_spec.js index 40115792652..1a27955983d 100644 --- a/spec/javascripts/search_autocomplete_spec.js +++ b/spec/javascripts/search_autocomplete_spec.js @@ -6,8 +6,21 @@ import SearchAutocomplete from '~/search_autocomplete'; import '~/lib/utils/common_utils'; import * as urlUtils from '~/lib/utils/url_utility'; -(function() { - var assertLinks, dashboardIssuesPath, dashboardMRsPath, groupIssuesPath, groupMRsPath, groupName, mockDashboardOptions, mockGroupOptions, mockProjectOptions, projectIssuesPath, projectMRsPath, projectName, userId, widget; +describe('Search autocomplete dropdown', () => { + var assertLinks, + dashboardIssuesPath, + dashboardMRsPath, + groupIssuesPath, + groupMRsPath, + groupName, + mockDashboardOptions, + mockGroupOptions, + mockProjectOptions, + projectIssuesPath, + projectMRsPath, + projectName, + userId, + widget; var userName = 'root'; widget = null; @@ -66,133 +79,126 @@ import * as urlUtils from '~/lib/utils/url_utility'; // Mock `gl` object in window for dashboard specific page. App code will need it. mockDashboardOptions = function() { window.gl || (window.gl = {}); - return window.gl.dashboardOptions = { + return (window.gl.dashboardOptions = { issuesPath: dashboardIssuesPath, - mrPath: dashboardMRsPath - }; + mrPath: dashboardMRsPath, + }); }; // Mock `gl` object in window for project specific page. App code will need it. mockProjectOptions = function() { window.gl || (window.gl = {}); - return window.gl.projectOptions = { + return (window.gl.projectOptions = { 'gitlab-ce': { issuesPath: projectIssuesPath, mrPath: projectMRsPath, - projectName: projectName - } - }; + projectName: projectName, + }, + }); }; mockGroupOptions = function() { window.gl || (window.gl = {}); - return window.gl.groupOptions = { + return (window.gl.groupOptions = { 'gitlab-org': { issuesPath: groupIssuesPath, mrPath: groupMRsPath, - projectName: groupName - } - }; + projectName: groupName, + }, + }); }; assertLinks = function(list, issuesPath, mrsPath) { - var a1, a2, a3, a4, issuesAssignedToMeLink, issuesIHaveCreatedLink, mrsAssignedToMeLink, mrsIHaveCreatedLink; if (issuesPath) { - issuesAssignedToMeLink = issuesPath + "/?assignee_username=" + userName; - issuesIHaveCreatedLink = issuesPath + "/?author_username=" + userName; - a1 = "a[href='" + issuesAssignedToMeLink + "']"; - a2 = "a[href='" + issuesIHaveCreatedLink + "']"; - expect(list.find(a1).length).toBe(1); - expect(list.find(a1).text()).toBe('Issues assigned to me'); - expect(list.find(a2).length).toBe(1); - expect(list.find(a2).text()).toBe("Issues I've created"); + const issuesAssignedToMeLink = `a[href="${issuesPath}/?assignee_id=${userId}"]`; + const issuesIHaveCreatedLink = `a[href="${issuesPath}/?author_id=${userId}"]`; + expect(list.find(issuesAssignedToMeLink).length).toBe(1); + expect(list.find(issuesAssignedToMeLink).text()).toBe('Issues assigned to me'); + expect(list.find(issuesIHaveCreatedLink).length).toBe(1); + expect(list.find(issuesIHaveCreatedLink).text()).toBe("Issues I've created"); } - mrsAssignedToMeLink = mrsPath + "/?assignee_username=" + userName; - mrsIHaveCreatedLink = mrsPath + "/?author_username=" + userName; - a3 = "a[href='" + mrsAssignedToMeLink + "']"; - a4 = "a[href='" + mrsIHaveCreatedLink + "']"; - expect(list.find(a3).length).toBe(1); - expect(list.find(a3).text()).toBe('Merge requests assigned to me'); - expect(list.find(a4).length).toBe(1); - return expect(list.find(a4).text()).toBe("Merge requests I've created"); + const mrsAssignedToMeLink = `a[href="${mrsPath}/?assignee_id=${userId}"]`; + const mrsIHaveCreatedLink = `a[href="${mrsPath}/?author_id=${userId}"]`; + expect(list.find(mrsAssignedToMeLink).length).toBe(1); + expect(list.find(mrsAssignedToMeLink).text()).toBe('Merge requests assigned to me'); + expect(list.find(mrsIHaveCreatedLink).length).toBe(1); + expect(list.find(mrsIHaveCreatedLink).text()).toBe("Merge requests I've created"); }; - describe('Search autocomplete dropdown', function() { - preloadFixtures('static/search_autocomplete.html.raw'); - beforeEach(function() { - loadFixtures('static/search_autocomplete.html.raw'); + preloadFixtures('static/search_autocomplete.html.raw'); + beforeEach(function() { + loadFixtures('static/search_autocomplete.html.raw'); - // Prevent turbolinks from triggering within gl_dropdown - spyOn(urlUtils, 'visitUrl').and.returnValue(true); + // Prevent turbolinks from triggering within gl_dropdown + spyOn(urlUtils, 'visitUrl').and.returnValue(true); - window.gon = {}; - window.gon.current_user_id = userId; - window.gon.current_username = userName; + window.gon = {}; + window.gon.current_user_id = userId; + window.gon.current_username = userName; - return widget = new SearchAutocomplete(); - }); + return (widget = new SearchAutocomplete()); + }); - afterEach(function() { - // Undo what we did to the shared <body> - removeBodyAttributes(); - window.gon = {}; - }); - it('should show Dashboard specific dropdown menu', function() { - var list; - addBodyAttributes(); - mockDashboardOptions(); - widget.searchInput.triggerHandler('focus'); - list = widget.wrap.find('.dropdown-menu').find('ul'); - return assertLinks(list, dashboardIssuesPath, dashboardMRsPath); - }); - it('should show Group specific dropdown menu', function() { - var list; - addBodyAttributes('group'); - mockGroupOptions(); - widget.searchInput.triggerHandler('focus'); - list = widget.wrap.find('.dropdown-menu').find('ul'); - return assertLinks(list, groupIssuesPath, groupMRsPath); - }); - it('should show Project specific dropdown menu', function() { - var list; - addBodyAttributes('project'); - mockProjectOptions(); - widget.searchInput.triggerHandler('focus'); - list = widget.wrap.find('.dropdown-menu').find('ul'); - return assertLinks(list, projectIssuesPath, projectMRsPath); - }); - it('should show only Project mergeRequest dropdown menu items when project issues are disabled', function() { - addBodyAttributes('project'); - disableProjectIssues(); - mockProjectOptions(); - widget.searchInput.triggerHandler('focus'); - const list = widget.wrap.find('.dropdown-menu').find('ul'); - assertLinks(list, null, projectMRsPath); - }); - it('should not show category related menu if there is text in the input', function() { - var link, list; - addBodyAttributes('project'); - mockProjectOptions(); - widget.searchInput.val('help'); - widget.searchInput.triggerHandler('focus'); - list = widget.wrap.find('.dropdown-menu').find('ul'); - link = "a[href='" + projectIssuesPath + "/?assignee_id=" + userId + "']"; - return expect(list.find(link).length).toBe(0); - }); - return it('should not submit the search form when selecting an autocomplete row with the keyboard', function() { - var ENTER = 13; - var DOWN = 40; - addBodyAttributes(); - mockDashboardOptions(true); - var submitSpy = spyOnEvent('form', 'submit'); - widget.searchInput.triggerHandler('focus'); - widget.wrap.trigger($.Event('keydown', { which: DOWN })); - var enterKeyEvent = $.Event('keydown', { which: ENTER }); - widget.searchInput.trigger(enterKeyEvent); - // This does not currently catch failing behavior. For security reasons, - // browsers will not trigger default behavior (form submit, in this - // example) on JavaScript-created keypresses. - expect(submitSpy).not.toHaveBeenTriggered(); - }); + afterEach(function() { + // Undo what we did to the shared <body> + removeBodyAttributes(); + window.gon = {}; + }); + it('should show Dashboard specific dropdown menu', function() { + var list; + addBodyAttributes(); + mockDashboardOptions(); + widget.searchInput.triggerHandler('focus'); + list = widget.wrap.find('.dropdown-menu').find('ul'); + return assertLinks(list, dashboardIssuesPath, dashboardMRsPath); + }); + it('should show Group specific dropdown menu', function() { + var list; + addBodyAttributes('group'); + mockGroupOptions(); + widget.searchInput.triggerHandler('focus'); + list = widget.wrap.find('.dropdown-menu').find('ul'); + return assertLinks(list, groupIssuesPath, groupMRsPath); + }); + it('should show Project specific dropdown menu', function() { + var list; + addBodyAttributes('project'); + mockProjectOptions(); + widget.searchInput.triggerHandler('focus'); + list = widget.wrap.find('.dropdown-menu').find('ul'); + return assertLinks(list, projectIssuesPath, projectMRsPath); + }); + it('should show only Project mergeRequest dropdown menu items when project issues are disabled', function() { + addBodyAttributes('project'); + disableProjectIssues(); + mockProjectOptions(); + widget.searchInput.triggerHandler('focus'); + const list = widget.wrap.find('.dropdown-menu').find('ul'); + assertLinks(list, null, projectMRsPath); + }); + it('should not show category related menu if there is text in the input', function() { + var link, list; + addBodyAttributes('project'); + mockProjectOptions(); + widget.searchInput.val('help'); + widget.searchInput.triggerHandler('focus'); + list = widget.wrap.find('.dropdown-menu').find('ul'); + link = "a[href='" + projectIssuesPath + '/?assignee_id=' + userId + "']"; + return expect(list.find(link).length).toBe(0); + }); + it('should not submit the search form when selecting an autocomplete row with the keyboard', function() { + var ENTER = 13; + var DOWN = 40; + addBodyAttributes(); + mockDashboardOptions(true); + var submitSpy = spyOnEvent('form', 'submit'); + widget.searchInput.triggerHandler('focus'); + widget.wrap.trigger($.Event('keydown', { which: DOWN })); + var enterKeyEvent = $.Event('keydown', { which: ENTER }); + widget.searchInput.trigger(enterKeyEvent); + // This does not currently catch failing behavior. For security reasons, + // browsers will not trigger default behavior (form submit, in this + // example) on JavaScript-created keypresses. + expect(submitSpy).not.toHaveBeenTriggered(); }); -}).call(window); +}); diff --git a/spec/javascripts/shared/popover_spec.js b/spec/javascripts/shared/popover_spec.js new file mode 100644 index 00000000000..1d574c9424b --- /dev/null +++ b/spec/javascripts/shared/popover_spec.js @@ -0,0 +1,162 @@ +import $ from 'jquery'; +import { + togglePopover, + mouseleave, + mouseenter, +} from '~/shared/popover'; + +describe('popover', () => { + describe('togglePopover', () => { + describe('togglePopover(true)', () => { + it('returns true when popover is shown', () => { + const context = { + hasClass: () => false, + popover: () => {}, + toggleClass: () => {}, + }; + + expect(togglePopover.call(context, true)).toEqual(true); + }); + + it('returns false when popover is already shown', () => { + const context = { + hasClass: () => true, + }; + + expect(togglePopover.call(context, true)).toEqual(false); + }); + + it('shows popover', (done) => { + const context = { + hasClass: () => false, + popover: () => {}, + toggleClass: () => {}, + }; + + spyOn(context, 'popover').and.callFake((method) => { + expect(method).toEqual('show'); + done(); + }); + + togglePopover.call(context, true); + }); + + it('adds disable-animation and js-popover-show class', (done) => { + const context = { + hasClass: () => false, + popover: () => {}, + toggleClass: () => {}, + }; + + spyOn(context, 'toggleClass').and.callFake((classNames, show) => { + expect(classNames).toEqual('disable-animation js-popover-show'); + expect(show).toEqual(true); + done(); + }); + + togglePopover.call(context, true); + }); + }); + + describe('togglePopover(false)', () => { + it('returns true when popover is hidden', () => { + const context = { + hasClass: () => true, + popover: () => {}, + toggleClass: () => {}, + }; + + expect(togglePopover.call(context, false)).toEqual(true); + }); + + it('returns false when popover is already hidden', () => { + const context = { + hasClass: () => false, + }; + + expect(togglePopover.call(context, false)).toEqual(false); + }); + + it('hides popover', (done) => { + const context = { + hasClass: () => true, + popover: () => {}, + toggleClass: () => {}, + }; + + spyOn(context, 'popover').and.callFake((method) => { + expect(method).toEqual('hide'); + done(); + }); + + togglePopover.call(context, false); + }); + + it('removes disable-animation and js-popover-show class', (done) => { + const context = { + hasClass: () => true, + popover: () => {}, + toggleClass: () => {}, + }; + + spyOn(context, 'toggleClass').and.callFake((classNames, show) => { + expect(classNames).toEqual('disable-animation js-popover-show'); + expect(show).toEqual(false); + done(); + }); + + togglePopover.call(context, false); + }); + }); + }); + + describe('mouseleave', () => { + it('calls hide popover if .popover:hover is false', () => { + const fakeJquery = { + length: 0, + }; + + spyOn($.fn, 'init').and.callFake(selector => (selector === '.popover:hover' ? fakeJquery : $.fn)); + spyOn(togglePopover, 'call'); + mouseleave(); + expect(togglePopover.call).toHaveBeenCalledWith(jasmine.any(Object), false); + }); + + it('does not call hide popover if .popover:hover is true', () => { + const fakeJquery = { + length: 1, + }; + + spyOn($.fn, 'init').and.callFake(selector => (selector === '.popover:hover' ? fakeJquery : $.fn)); + spyOn(togglePopover, 'call'); + mouseleave(); + expect(togglePopover.call).not.toHaveBeenCalledWith(false); + }); + }); + + describe('mouseenter', () => { + const context = {}; + + it('shows popover', () => { + spyOn(togglePopover, 'call').and.returnValue(false); + mouseenter.call(context); + expect(togglePopover.call).toHaveBeenCalledWith(jasmine.any(Object), true); + }); + + it('registers mouseleave event if popover is showed', (done) => { + spyOn(togglePopover, 'call').and.returnValue(true); + spyOn($.fn, 'on').and.callFake((eventName) => { + expect(eventName).toEqual('mouseleave'); + done(); + }); + mouseenter.call(context); + }); + + it('does not register mouseleave event if popover is not showed', () => { + spyOn(togglePopover, 'call').and.returnValue(false); + const spy = spyOn($.fn, 'on').and.callFake(() => {}); + mouseenter.call(context); + expect(spy).not.toHaveBeenCalled(); + }); + }); +}); diff --git a/spec/javascripts/shortcuts_dashboard_navigation_spec.js b/spec/javascripts/shortcuts_dashboard_navigation_spec.js new file mode 100644 index 00000000000..888b49004bf --- /dev/null +++ b/spec/javascripts/shortcuts_dashboard_navigation_spec.js @@ -0,0 +1,24 @@ +import findAndFollowLink from '~/shortcuts_dashboard_navigation'; +import * as urlUtility from '~/lib/utils/url_utility'; + +describe('findAndFollowLink', () => { + it('visits a link when the selector exists', () => { + const href = '/some/path'; + const locationSpy = spyOn(urlUtility, 'visitUrl'); + + setFixtures(`<a class="my-shortcut" href="${href}">link</a>`); + + findAndFollowLink('.my-shortcut'); + + expect(locationSpy).toHaveBeenCalledWith(href); + }); + + it('does not throw an exception when the selector does not exist', () => { + const locationSpy = spyOn(urlUtility, 'visitUrl'); + + // this should not throw an exception + findAndFollowLink('.this-selector-does-not-exist'); + + expect(locationSpy).not.toHaveBeenCalled(); + }); +}); diff --git a/spec/javascripts/shortcuts_issuable_spec.js b/spec/javascripts/shortcuts_issuable_spec.js index faaf710cf6f..b0d714cbefb 100644 --- a/spec/javascripts/shortcuts_issuable_spec.js +++ b/spec/javascripts/shortcuts_issuable_spec.js @@ -1,5 +1,5 @@ import $ from 'jquery'; -import initCopyAsGFM from '~/behaviors/copy_as_gfm'; +import initCopyAsGFM from '~/behaviors/markdown/copy_as_gfm'; import ShortcutsIssuable from '~/shortcuts_issuable'; initCopyAsGFM(); diff --git a/spec/javascripts/sidebar/confidential_issue_sidebar_spec.js b/spec/javascripts/sidebar/confidential_issue_sidebar_spec.js index 88a33caf2e3..0c173062835 100644 --- a/spec/javascripts/sidebar/confidential_issue_sidebar_spec.js +++ b/spec/javascripts/sidebar/confidential_issue_sidebar_spec.js @@ -62,4 +62,22 @@ describe('Confidential Issue Sidebar Block', () => { done(); }); }); + + it('displays the edit form when opened from collapsed state', (done) => { + expect(vm1.edit).toBe(false); + + vm1.$el.querySelector('.sidebar-collapsed-icon').click(); + + expect(vm1.edit).toBe(true); + + setTimeout(() => { + expect( + vm1.$el + .innerHTML + .includes('You are going to turn off the confidentiality.'), + ).toBe(true); + + done(); + }); + }); }); diff --git a/spec/javascripts/sidebar/lock/lock_issue_sidebar_spec.js b/spec/javascripts/sidebar/lock/lock_issue_sidebar_spec.js index 696fca516bc..9abc3daf221 100644 --- a/spec/javascripts/sidebar/lock/lock_issue_sidebar_spec.js +++ b/spec/javascripts/sidebar/lock/lock_issue_sidebar_spec.js @@ -68,4 +68,22 @@ describe('LockIssueSidebar', () => { done(); }); }); + + it('displays the edit form when opened from collapsed state', (done) => { + expect(vm1.isLockDialogOpen).toBe(false); + + vm1.$el.querySelector('.sidebar-collapsed-icon').click(); + + expect(vm1.isLockDialogOpen).toBe(true); + + setTimeout(() => { + expect( + vm1.$el + .innerHTML + .includes('Unlock this issue?'), + ).toBe(true); + + done(); + }); + }); }); diff --git a/spec/javascripts/sidebar/mock_data.js b/spec/javascripts/sidebar/mock_data.js index d9e84e35f69..8b6e8b24f00 100644 --- a/spec/javascripts/sidebar/mock_data.js +++ b/spec/javascripts/sidebar/mock_data.js @@ -1,7 +1,5 @@ -/* eslint-disable quote-props*/ - const RESPONSE_MAP = { - 'GET': { + GET: { '/gitlab-org/gitlab-shell/issues/5.json': { id: 45, iid: 5, @@ -27,7 +25,8 @@ const RESPONSE_MAP = { username: 'user0', id: 22, state: 'active', - avatar_url: 'https://www.gravatar.com/avatar/52e4ce24a915fb7e51e1ad3b57f4b00a?s=80\u0026d=identicon', + avatar_url: + 'https://www.gravatar.com/avatar/52e4ce24a915fb7e51e1ad3b57f4b00a?s=80\u0026d=identicon', web_url: 'http: //localhost:3001/user0', }, { @@ -35,7 +34,8 @@ const RESPONSE_MAP = { username: 'tajuana', id: 18, state: 'active', - avatar_url: 'https://www.gravatar.com/avatar/4852a41fb41616bf8f140d3701673f53?s=80\u0026d=identicon', + avatar_url: + 'https://www.gravatar.com/avatar/4852a41fb41616bf8f140d3701673f53?s=80\u0026d=identicon', web_url: 'http: //localhost:3001/tajuana', }, { @@ -43,7 +43,8 @@ const RESPONSE_MAP = { username: 'michaele.will', id: 16, state: 'active', - avatar_url: 'https://www.gravatar.com/avatar/e301827eb03be955c9c172cb9a8e4e8a?s=80\u0026d=identicon', + avatar_url: + 'https://www.gravatar.com/avatar/e301827eb03be955c9c172cb9a8e4e8a?s=80\u0026d=identicon', web_url: 'http: //localhost:3001/michaele.will', }, ], @@ -72,7 +73,8 @@ const RESPONSE_MAP = { username: 'user0', id: 22, state: 'active', - avatar_url: 'https://www.gravatar.com/avatar/52e4ce24a915fb7e51e1ad3b57f4b00a?s=80\u0026d=identicon', + avatar_url: + 'https://www.gravatar.com/avatar/52e4ce24a915fb7e51e1ad3b57f4b00a?s=80\u0026d=identicon', web_url: 'http://localhost:3001/user0', }, { @@ -80,7 +82,8 @@ const RESPONSE_MAP = { username: 'tajuana', id: 18, state: 'active', - avatar_url: 'https://www.gravatar.com/avatar/4852a41fb41616bf8f140d3701673f53?s=80\u0026d=identicon', + avatar_url: + 'https://www.gravatar.com/avatar/4852a41fb41616bf8f140d3701673f53?s=80\u0026d=identicon', web_url: 'http://localhost:3001/tajuana', }, { @@ -88,7 +91,8 @@ const RESPONSE_MAP = { username: 'michaele.will', id: 16, state: 'active', - avatar_url: 'https://www.gravatar.com/avatar/e301827eb03be955c9c172cb9a8e4e8a?s=80\u0026d=identicon', + avatar_url: + 'https://www.gravatar.com/avatar/e301827eb03be955c9c172cb9a8e4e8a?s=80\u0026d=identicon', web_url: 'http://localhost:3001/michaele.will', }, ], @@ -100,7 +104,8 @@ const RESPONSE_MAP = { username: 'user0', id: 22, state: 'active', - avatar_url: 'https://www.gravatar.com/avatar/52e4ce24a915fb7e51e1ad3b57f4b00a?s=80\u0026d=identicon', + avatar_url: + 'https://www.gravatar.com/avatar/52e4ce24a915fb7e51e1ad3b57f4b00a?s=80\u0026d=identicon', web_url: 'http://localhost:3001/user0', }, { @@ -108,7 +113,8 @@ const RESPONSE_MAP = { username: 'tajuana', id: 18, state: 'active', - avatar_url: 'https://www.gravatar.com/avatar/4852a41fb41616bf8f140d3701673f53?s=80\u0026d=identicon', + avatar_url: + 'https://www.gravatar.com/avatar/4852a41fb41616bf8f140d3701673f53?s=80\u0026d=identicon', web_url: 'http://localhost:3001/tajuana', }, { @@ -116,7 +122,8 @@ const RESPONSE_MAP = { username: 'michaele.will', id: 16, state: 'active', - avatar_url: 'https://www.gravatar.com/avatar/e301827eb03be955c9c172cb9a8e4e8a?s=80\u0026d=identicon', + avatar_url: + 'https://www.gravatar.com/avatar/e301827eb03be955c9c172cb9a8e4e8a?s=80\u0026d=identicon', web_url: 'http://localhost:3001/michaele.will', }, ], @@ -126,20 +133,21 @@ const RESPONSE_MAP = { }, '/autocomplete/projects?project_id=15': [ { - 'id': 0, - 'name_with_namespace': 'No project', - }, { - 'id': 20, - 'name_with_namespace': 'foo / bar', + id: 0, + name_with_namespace: 'No project', + }, + { + id: 20, + name_with_namespace: 'foo / bar', }, ], }, - 'PUT': { + PUT: { '/gitlab-org/gitlab-shell/issues/5.json': { data: {}, }, }, - 'POST': { + POST: { '/gitlab-org/gitlab-shell/issues/5/move': { id: 123, iid: 5, @@ -182,7 +190,8 @@ const mockData = { id: 1, name: 'Administrator', username: 'root', - avatar_url: 'https://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon', + avatar_url: + 'https://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon', }, rootPath: '/', fullPath: '/gitlab-org/gitlab-shell', @@ -201,12 +210,14 @@ const mockData = { }, }; -mockData.sidebarMockInterceptor = function (request, next) { +mockData.sidebarMockInterceptor = function(request, next) { const body = this.responseMap[request.method.toUpperCase()][request.url]; - next(request.respondWith(JSON.stringify(body), { - status: 200, - })); + next( + request.respondWith(JSON.stringify(body), { + status: 200, + }), + ); }.bind(mockData); export default mockData; diff --git a/spec/javascripts/signin_tabs_memoizer_spec.js b/spec/javascripts/signin_tabs_memoizer_spec.js index b1b03ef1e09..423432c9e5d 100644 --- a/spec/javascripts/signin_tabs_memoizer_spec.js +++ b/spec/javascripts/signin_tabs_memoizer_spec.js @@ -4,7 +4,7 @@ import SigninTabsMemoizer from '~/pages/sessions/new/signin_tabs_memoizer'; (() => { describe('SigninTabsMemoizer', () => { const fixtureTemplate = 'static/signin_tabs.html.raw'; - const tabSelector = 'ul.nav-tabs'; + const tabSelector = 'ul.new-session-tabs'; const currentTabKey = 'current_signin_tab'; let memo; @@ -27,7 +27,7 @@ import SigninTabsMemoizer from '~/pages/sessions/new/signin_tabs_memoizer'; it('does nothing if no tab was previously selected', () => { createMemoizer(); - expect(document.querySelector('li a.active').getAttribute('id')).toEqual('standard'); + expect(document.querySelector(`${tabSelector} > li.active a`).getAttribute('href')).toEqual('#ldap'); }); it('shows last selected tab on boot', () => { @@ -48,9 +48,9 @@ import SigninTabsMemoizer from '~/pages/sessions/new/signin_tabs_memoizer'; it('saves last selected tab on change', () => { createMemoizer(); - document.getElementById('standard').click(); + document.querySelector('a[href="#login-pane"]').click(); - expect(memo.readData()).toEqual('#standard'); + expect(memo.readData()).toEqual('#login-pane'); }); it('overrides last selected tab with hash tag when given', () => { diff --git a/spec/javascripts/test_bundle.js b/spec/javascripts/test_bundle.js index 1bcfdfe72b6..14bff05e537 100644 --- a/spec/javascripts/test_bundle.js +++ b/spec/javascripts/test_bundle.js @@ -5,8 +5,12 @@ import '~/commons'; import Vue from 'vue'; import VueResource from 'vue-resource'; +import Translate from '~/vue_shared/translate'; import { getDefaultAdapter } from '~/lib/utils/axios_utils'; +import { FIXTURES_PATH, TEST_HOST } from './test_constants'; + +import customMatchers from './matchers'; const isHeadlessChrome = /\bHeadlessChrome\//.test(navigator.userAgent); Vue.config.devtools = !isHeadlessChrome; @@ -19,29 +23,33 @@ Vue.config.warnHandler = (msg, vm, trace) => { }; let hasVueErrors = false; -Vue.config.errorHandler = function (err) { +Vue.config.errorHandler = function(err) { hasVueErrors = true; fail(err); }; Vue.use(VueResource); +Vue.use(Translate); // enable test fixtures -jasmine.getFixtures().fixturesPath = '/base/spec/javascripts/fixtures'; -jasmine.getJSONFixtures().fixturesPath = '/base/spec/javascripts/fixtures'; +jasmine.getFixtures().fixturesPath = FIXTURES_PATH; +jasmine.getJSONFixtures().fixturesPath = FIXTURES_PATH; + +beforeAll(() => jasmine.addMatchers(customMatchers)); // globalize common libraries window.$ = window.jQuery = $; // stub expected globals window.gl = window.gl || {}; -window.gl.TEST_HOST = 'http://test.host'; +window.gl.TEST_HOST = TEST_HOST; window.gon = window.gon || {}; window.gon.test_env = true; +gon.relative_url_root = ''; let hasUnhandledPromiseRejections = false; -window.addEventListener('unhandledrejection', (event) => { +window.addEventListener('unhandledrejection', event => { hasUnhandledPromiseRejections = true; console.error('Unhandled promise rejection:'); console.error(event.reason.stack || event.reason); @@ -64,15 +72,25 @@ beforeEach(() => { const axiosDefaultAdapter = getDefaultAdapter(); +let testFiles = process.env.TEST_FILES || []; +if (testFiles.length > 0) { + testFiles = testFiles.map(path => path.replace(/^spec\/javascripts\//, '').replace(/\.js$/, '')); + console.log(`Running only tests matching: ${testFiles}`); +} else { + console.log('Running all tests'); +} + // render all of our tests const testsContext = require.context('.', true, /_spec$/); -testsContext.keys().forEach(function (path) { +testsContext.keys().forEach(function(path) { try { - testsContext(path); + if (testFiles.length === 0 || testFiles.some(p => path.includes(p))) { + testsContext(path); + } } catch (err) { console.error('[ERROR] Unable to load spec: ', path); - describe('Test bundle', function () { - it(`includes '${path}'`, function () { + describe('Test bundle', function() { + it(`includes '${path}'`, function() { expect(err).toBeNull(); }); }); @@ -80,7 +98,7 @@ testsContext.keys().forEach(function (path) { }); describe('test errors', () => { - beforeAll((done) => { + beforeAll(done => { if (hasUnhandledPromiseRejections || hasVueWarnings || hasVueErrors) { setTimeout(done, 1000); } else { @@ -144,18 +162,18 @@ if (process.env.BABEL_ENV === 'coverage') { './issue_show/index.js', ]; - describe('Uncovered files', function () { + describe('Uncovered files', function() { const sourceFiles = require.context('~', true, /\.js$/); $.holdReady(true); - sourceFiles.keys().forEach(function (path) { + sourceFiles.keys().forEach(function(path) { // ignore if there is a matching spec file if (testsContext.keys().indexOf(`${path.replace(/\.js$/, '')}_spec`) > -1) { return; } - it(`includes '${path}'`, function () { + it(`includes '${path}'`, function() { try { sourceFiles(path); } catch (err) { diff --git a/spec/javascripts/test_constants.js b/spec/javascripts/test_constants.js new file mode 100644 index 00000000000..df59195e9f6 --- /dev/null +++ b/spec/javascripts/test_constants.js @@ -0,0 +1,4 @@ +export const FIXTURES_PATH = '/base/spec/javascripts/fixtures'; +export const TEST_HOST = 'http://test.host'; + +export const DUMMY_IMAGE_URL = `${FIXTURES_PATH}/one_white_pixel.png`; diff --git a/spec/javascripts/visibility_select_spec.js b/spec/javascripts/visibility_select_spec.js deleted file mode 100644 index 82714cb69bd..00000000000 --- a/spec/javascripts/visibility_select_spec.js +++ /dev/null @@ -1,98 +0,0 @@ -import VisibilitySelect from '~/visibility_select'; - -(() => { - describe('VisibilitySelect', function () { - const lockedElement = document.createElement('div'); - lockedElement.dataset.helpBlock = 'lockedHelpBlock'; - - const checkedElement = document.createElement('div'); - checkedElement.dataset.description = 'checkedDescription'; - - const mockElements = { - container: document.createElement('div'), - select: document.createElement('div'), - '.help-block': document.createElement('div'), - '.js-locked': lockedElement, - 'option:checked': checkedElement, - }; - - beforeEach(function () { - spyOn(Element.prototype, 'querySelector').and.callFake(selector => mockElements[selector]); - }); - - describe('constructor', function () { - beforeEach(function () { - this.visibilitySelect = new VisibilitySelect(mockElements.container); - }); - - it('sets the container member', function () { - expect(this.visibilitySelect.container).toEqual(mockElements.container); - }); - - it('queries and sets the helpBlock member', function () { - expect(Element.prototype.querySelector).toHaveBeenCalledWith('.help-block'); - expect(this.visibilitySelect.helpBlock).toEqual(mockElements['.help-block']); - }); - - it('queries and sets the select member', function () { - expect(Element.prototype.querySelector).toHaveBeenCalledWith('select'); - expect(this.visibilitySelect.select).toEqual(mockElements.select); - }); - - describe('if there is no container element provided', function () { - it('throws an error', function () { - expect(() => new VisibilitySelect()).toThrowError('VisibilitySelect requires a container element as argument 1'); - }); - }); - }); - - describe('init', function () { - describe('if there is a select', function () { - beforeEach(function () { - this.visibilitySelect = new VisibilitySelect(mockElements.container); - }); - - it('calls updateHelpText', function () { - spyOn(VisibilitySelect.prototype, 'updateHelpText'); - this.visibilitySelect.init(); - expect(this.visibilitySelect.updateHelpText).toHaveBeenCalled(); - }); - - it('adds a change event listener', function () { - spyOn(this.visibilitySelect.select, 'addEventListener'); - this.visibilitySelect.init(); - expect(this.visibilitySelect.select.addEventListener.calls.argsFor(0)).toContain('change'); - }); - }); - - describe('if there is no select', function () { - beforeEach(function () { - mockElements.select = undefined; - this.visibilitySelect = new VisibilitySelect(mockElements.container); - this.visibilitySelect.init(); - }); - - it('updates the helpBlock text to the locked `data-help-block` messaged', function () { - expect(this.visibilitySelect.helpBlock.textContent) - .toEqual(lockedElement.dataset.helpBlock); - }); - - afterEach(function () { - mockElements.select = document.createElement('div'); - }); - }); - }); - - describe('updateHelpText', function () { - beforeEach(function () { - this.visibilitySelect = new VisibilitySelect(mockElements.container); - this.visibilitySelect.init(); - }); - - it('updates the helpBlock text to the selected options `data-description`', function () { - expect(this.visibilitySelect.helpBlock.textContent) - .toEqual(checkedElement.dataset.description); - }); - }); - }); -})(); diff --git a/spec/javascripts/vue_mr_widget/components/mr_widget_header_spec.js b/spec/javascripts/vue_mr_widget/components/mr_widget_header_spec.js index 235c33fac0d..9b9c9656979 100644 --- a/spec/javascripts/vue_mr_widget/components/mr_widget_header_spec.js +++ b/spec/javascripts/vue_mr_widget/components/mr_widget_header_spec.js @@ -17,46 +17,58 @@ describe('MRWidgetHeader', () => { describe('computed', () => { describe('shouldShowCommitsBehindText', () => { it('return true when there are divergedCommitsCount', () => { - vm = mountComponent(Component, { mr: { - divergedCommitsCount: 12, - sourceBranch: 'mr-widget-refactor', - sourceBranchLink: '<a href="/foo/bar/mr-widget-refactor">Link</a>', - targetBranch: 'master', - } }); + vm = mountComponent(Component, { + mr: { + divergedCommitsCount: 12, + sourceBranch: 'mr-widget-refactor', + sourceBranchLink: '<a href="/foo/bar/mr-widget-refactor">Link</a>', + targetBranch: 'master', + statusPath: 'abc', + }, + }); expect(vm.shouldShowCommitsBehindText).toEqual(true); }); it('returns false where there are no divergedComits count', () => { - vm = mountComponent(Component, { mr: { - divergedCommitsCount: 0, - sourceBranch: 'mr-widget-refactor', - sourceBranchLink: '<a href="/foo/bar/mr-widget-refactor">Link</a>', - targetBranch: 'master', - } }); + vm = mountComponent(Component, { + mr: { + divergedCommitsCount: 0, + sourceBranch: 'mr-widget-refactor', + sourceBranchLink: '<a href="/foo/bar/mr-widget-refactor">Link</a>', + targetBranch: 'master', + statusPath: 'abc', + }, + }); expect(vm.shouldShowCommitsBehindText).toEqual(false); }); }); describe('commitsText', () => { it('returns singular when there is one commit', () => { - vm = mountComponent(Component, { mr: { - divergedCommitsCount: 1, - sourceBranch: 'mr-widget-refactor', - sourceBranchLink: '<a href="/foo/bar/mr-widget-refactor">Link</a>', - targetBranch: 'master', - } }); + vm = mountComponent(Component, { + mr: { + divergedCommitsCount: 1, + sourceBranch: 'mr-widget-refactor', + sourceBranchLink: '<a href="/foo/bar/mr-widget-refactor">Link</a>', + targetBranch: 'master', + statusPath: 'abc', + }, + }); expect(vm.commitsText).toEqual('1 commit behind'); }); it('returns plural when there is more than one commit', () => { - vm = mountComponent(Component, { mr: { - divergedCommitsCount: 2, - sourceBranch: 'mr-widget-refactor', - sourceBranchLink: '<a href="/foo/bar/mr-widget-refactor">Link</a>', - targetBranch: 'master', - } }); + vm = mountComponent(Component, { + mr: { + divergedCommitsCount: 2, + sourceBranch: 'mr-widget-refactor', + sourceBranchLink: '<a href="/foo/bar/mr-widget-refactor">Link</a>', + targetBranch: 'master', + statusPath: 'abc', + }, + }); expect(vm.commitsText).toEqual('2 commits behind'); }); @@ -66,24 +78,27 @@ describe('MRWidgetHeader', () => { describe('template', () => { describe('common elements', () => { beforeEach(() => { - vm = mountComponent(Component, { mr: { - divergedCommitsCount: 12, - sourceBranch: 'mr-widget-refactor', - sourceBranchLink: '<a href="/foo/bar/mr-widget-refactor">mr-widget-refactor</a>', - sourceBranchRemoved: false, - targetBranchPath: 'foo/bar/commits-path', - targetBranchTreePath: 'foo/bar/tree/path', - targetBranch: 'master', - isOpen: true, - emailPatchesPath: '/mr/email-patches', - plainDiffPath: '/mr/plainDiffPath', - } }); + vm = mountComponent(Component, { + mr: { + divergedCommitsCount: 12, + sourceBranch: 'mr-widget-refactor', + sourceBranchLink: '<a href="/foo/bar/mr-widget-refactor">mr-widget-refactor</a>', + sourceBranchRemoved: false, + targetBranchPath: 'foo/bar/commits-path', + targetBranchTreePath: 'foo/bar/tree/path', + targetBranch: 'master', + isOpen: true, + emailPatchesPath: '/mr/email-patches', + plainDiffPath: '/mr/plainDiffPath', + statusPath: 'abc', + }, + }); }); it('renders source branch link', () => { - expect( - vm.$el.querySelector('.js-source-branch').innerHTML, - ).toEqual('<a href="/foo/bar/mr-widget-refactor">mr-widget-refactor</a>'); + expect(vm.$el.querySelector('.js-source-branch').innerHTML).toEqual( + '<a href="/foo/bar/mr-widget-refactor">mr-widget-refactor</a>', + ); }); it('renders clipboard button', () => { @@ -101,18 +116,21 @@ describe('MRWidgetHeader', () => { }); beforeEach(() => { - vm = mountComponent(Component, { mr: { - divergedCommitsCount: 12, - sourceBranch: 'mr-widget-refactor', - sourceBranchLink: '<a href="/foo/bar/mr-widget-refactor">mr-widget-refactor</a>', - sourceBranchRemoved: false, - targetBranchPath: 'foo/bar/commits-path', - targetBranchTreePath: 'foo/bar/tree/path', - targetBranch: 'master', - isOpen: true, - emailPatchesPath: '/mr/email-patches', - plainDiffPath: '/mr/plainDiffPath', - } }); + vm = mountComponent(Component, { + mr: { + divergedCommitsCount: 12, + sourceBranch: 'mr-widget-refactor', + sourceBranchLink: '<a href="/foo/bar/mr-widget-refactor">mr-widget-refactor</a>', + sourceBranchRemoved: false, + targetBranchPath: 'foo/bar/commits-path', + targetBranchTreePath: 'foo/bar/tree/path', + targetBranch: 'master', + isOpen: true, + emailPatchesPath: '/mr/email-patches', + plainDiffPath: '/mr/plainDiffPath', + statusPath: 'abc', + }, + }); }); it('renders checkout branch button with modal trigger', () => { @@ -123,39 +141,49 @@ describe('MRWidgetHeader', () => { expect(button.getAttribute('data-toggle')).toEqual('modal'); }); + it('renders web ide button', () => { + const button = vm.$el.querySelector('.js-web-ide'); + + expect(button.textContent.trim()).toEqual('Web IDE'); + expect(button.getAttribute('href')).toEqual('undefined/-/ide/projectabc'); + }); + it('renders download dropdown with links', () => { - expect( - vm.$el.querySelector('.js-download-email-patches').textContent.trim(), - ).toEqual('Email patches'); + expect(vm.$el.querySelector('.js-download-email-patches').textContent.trim()).toEqual( + 'Email patches', + ); - expect( - vm.$el.querySelector('.js-download-email-patches').getAttribute('href'), - ).toEqual('/mr/email-patches'); + expect(vm.$el.querySelector('.js-download-email-patches').getAttribute('href')).toEqual( + '/mr/email-patches', + ); - expect( - vm.$el.querySelector('.js-download-plain-diff').textContent.trim(), - ).toEqual('Plain diff'); + expect(vm.$el.querySelector('.js-download-plain-diff').textContent.trim()).toEqual( + 'Plain diff', + ); - expect( - vm.$el.querySelector('.js-download-plain-diff').getAttribute('href'), - ).toEqual('/mr/plainDiffPath'); + expect(vm.$el.querySelector('.js-download-plain-diff').getAttribute('href')).toEqual( + '/mr/plainDiffPath', + ); }); }); describe('with a closed merge request', () => { beforeEach(() => { - vm = mountComponent(Component, { mr: { - divergedCommitsCount: 12, - sourceBranch: 'mr-widget-refactor', - sourceBranchLink: '<a href="/foo/bar/mr-widget-refactor">mr-widget-refactor</a>', - sourceBranchRemoved: false, - targetBranchPath: 'foo/bar/commits-path', - targetBranchTreePath: 'foo/bar/tree/path', - targetBranch: 'master', - isOpen: false, - emailPatchesPath: '/mr/email-patches', - plainDiffPath: '/mr/plainDiffPath', - } }); + vm = mountComponent(Component, { + mr: { + divergedCommitsCount: 12, + sourceBranch: 'mr-widget-refactor', + sourceBranchLink: '<a href="/foo/bar/mr-widget-refactor">mr-widget-refactor</a>', + sourceBranchRemoved: false, + targetBranchPath: 'foo/bar/commits-path', + targetBranchTreePath: 'foo/bar/tree/path', + targetBranch: 'master', + isOpen: false, + emailPatchesPath: '/mr/email-patches', + plainDiffPath: '/mr/plainDiffPath', + statusPath: 'abc', + }, + }); }); it('does not render checkout branch button with modal trigger', () => { @@ -165,30 +193,29 @@ describe('MRWidgetHeader', () => { }); it('does not render download dropdown with links', () => { - expect( - vm.$el.querySelector('.js-download-email-patches'), - ).toEqual(null); + expect(vm.$el.querySelector('.js-download-email-patches')).toEqual(null); - expect( - vm.$el.querySelector('.js-download-plain-diff'), - ).toEqual(null); + expect(vm.$el.querySelector('.js-download-plain-diff')).toEqual(null); }); }); describe('without diverged commits', () => { beforeEach(() => { - vm = mountComponent(Component, { mr: { - divergedCommitsCount: 0, - sourceBranch: 'mr-widget-refactor', - sourceBranchLink: '<a href="/foo/bar/mr-widget-refactor">mr-widget-refactor</a>', - sourceBranchRemoved: false, - targetBranchPath: 'foo/bar/commits-path', - targetBranchTreePath: 'foo/bar/tree/path', - targetBranch: 'master', - isOpen: true, - emailPatchesPath: '/mr/email-patches', - plainDiffPath: '/mr/plainDiffPath', - } }); + vm = mountComponent(Component, { + mr: { + divergedCommitsCount: 0, + sourceBranch: 'mr-widget-refactor', + sourceBranchLink: '<a href="/foo/bar/mr-widget-refactor">mr-widget-refactor</a>', + sourceBranchRemoved: false, + targetBranchPath: 'foo/bar/commits-path', + targetBranchTreePath: 'foo/bar/tree/path', + targetBranch: 'master', + isOpen: true, + emailPatchesPath: '/mr/email-patches', + plainDiffPath: '/mr/plainDiffPath', + statusPath: 'abc', + }, + }); }); it('does not render diverged commits info', () => { @@ -198,22 +225,27 @@ describe('MRWidgetHeader', () => { describe('with diverged commits', () => { beforeEach(() => { - vm = mountComponent(Component, { mr: { - divergedCommitsCount: 12, - sourceBranch: 'mr-widget-refactor', - sourceBranchLink: '<a href="/foo/bar/mr-widget-refactor">mr-widget-refactor</a>', - sourceBranchRemoved: false, - targetBranchPath: 'foo/bar/commits-path', - targetBranchTreePath: 'foo/bar/tree/path', - targetBranch: 'master', - isOpen: true, - emailPatchesPath: '/mr/email-patches', - plainDiffPath: '/mr/plainDiffPath', - } }); + vm = mountComponent(Component, { + mr: { + divergedCommitsCount: 12, + sourceBranch: 'mr-widget-refactor', + sourceBranchLink: '<a href="/foo/bar/mr-widget-refactor">mr-widget-refactor</a>', + sourceBranchRemoved: false, + targetBranchPath: 'foo/bar/commits-path', + targetBranchTreePath: 'foo/bar/tree/path', + targetBranch: 'master', + isOpen: true, + emailPatchesPath: '/mr/email-patches', + plainDiffPath: '/mr/plainDiffPath', + statusPath: 'abc', + }, + }); }); it('renders diverged commits info', () => { - expect(vm.$el.querySelector('.diverged-commits-count').textContent.trim()).toEqual('(12 commits behind)'); + expect(vm.$el.querySelector('.diverged-commits-count').textContent.trim()).toEqual( + '(12 commits behind)', + ); }); }); }); diff --git a/spec/javascripts/vue_mr_widget/components/mr_widget_memory_usage_spec.js b/spec/javascripts/vue_mr_widget/components/mr_widget_memory_usage_spec.js index 31710551399..91e81a0675a 100644 --- a/spec/javascripts/vue_mr_widget/components/mr_widget_memory_usage_spec.js +++ b/spec/javascripts/vue_mr_widget/components/mr_widget_memory_usage_spec.js @@ -23,9 +23,7 @@ const metricsMockData = { memory_values: [ { metric: {}, - values: [ - [1493716685, '4.30859375'], - ], + values: [[1493716685, '4.30859375']], }, ], }, @@ -53,7 +51,7 @@ const createComponent = () => { const messages = { loadingMetrics: 'Loading deployment statistics', - hasMetrics: 'Memory usage unchanged from 0MB to 0MB', + hasMetrics: 'Memory usage is unchanged at 0MB', loadFailed: 'Failed to load deployment statistics', metricsUnavailable: 'Deployment statistics are not available currently', }; @@ -92,26 +90,26 @@ describe('MemoryUsage', () => { }); describe('computed', () => { - describe('memoryChangeType', () => { - it('should return "increased" if memoryFrom value is less than memoryTo value', () => { + describe('memoryChangeMessage', () => { + it('should contain "increased" if memoryFrom value is less than memoryTo value', () => { vm.memoryFrom = 4.28; vm.memoryTo = 9.13; - expect(vm.memoryChangeType).toEqual('increased'); + expect(vm.memoryChangeMessage.indexOf('increased')).not.toEqual('-1'); }); - it('should return "decreased" if memoryFrom value is less than memoryTo value', () => { + it('should contain "decreased" if memoryFrom value is less than memoryTo value', () => { vm.memoryFrom = 9.13; vm.memoryTo = 4.28; - expect(vm.memoryChangeType).toEqual('decreased'); + expect(vm.memoryChangeMessage.indexOf('decreased')).not.toEqual('-1'); }); - it('should return "unchanged" if memoryFrom value equal to memoryTo value', () => { + it('should contain "unchanged" if memoryFrom value equal to memoryTo value', () => { vm.memoryFrom = 1; vm.memoryTo = 1; - expect(vm.memoryChangeType).toEqual('unchanged'); + expect(vm.memoryChangeMessage.indexOf('unchanged')).not.toEqual('-1'); }); }); }); @@ -130,7 +128,13 @@ describe('MemoryUsage', () => { describe('computeGraphData', () => { it('should populate sparkline graph', () => { vm.computeGraphData(metrics, deployment_time); - const { hasMetrics, memoryMetrics, deploymentTime, memoryFrom, memoryTo } = vm; + const { + hasMetrics, + memoryMetrics, + deploymentTime, + memoryFrom, + memoryTo, + } = vm; expect(hasMetrics).toBeTruthy(); expect(memoryMetrics.length > 0).toBeTruthy(); @@ -141,20 +145,26 @@ describe('MemoryUsage', () => { }); describe('loadMetrics', () => { - const returnServicePromise = () => new Promise((resolve) => { - resolve({ - data: metricsMockData, + const returnServicePromise = () => + new Promise(resolve => { + resolve({ + data: metricsMockData, + }); }); - }); - it('should load metrics data using MRWidgetService', (done) => { - spyOn(MRWidgetService, 'fetchMetrics').and.returnValue(returnServicePromise(true)); + it('should load metrics data using MRWidgetService', done => { + spyOn(MRWidgetService, 'fetchMetrics').and.returnValue( + returnServicePromise(true), + ); spyOn(vm, 'computeGraphData'); vm.loadMetrics(); setTimeout(() => { expect(MRWidgetService.fetchMetrics).toHaveBeenCalledWith(url); - expect(vm.computeGraphData).toHaveBeenCalledWith(metrics, deployment_time); + expect(vm.computeGraphData).toHaveBeenCalledWith( + metrics, + deployment_time, + ); done(); }, 333); }); @@ -167,51 +177,67 @@ describe('MemoryUsage', () => { expect(el.querySelector('.js-usage-info')).toBeDefined(); }); - it('should show loading metrics message while metrics are being loaded', (done) => { + it('should show loading metrics message while metrics are being loaded', done => { vm.loadingMetrics = true; vm.hasMetrics = false; vm.loadFailed = false; Vue.nextTick(() => { - expect(el.querySelector('.js-usage-info.usage-info-loading')).toBeDefined(); - expect(el.querySelector('.js-usage-info .usage-info-load-spinner')).toBeDefined(); - expect(el.querySelector('.js-usage-info').innerText).toContain(messages.loadingMetrics); + expect( + el.querySelector('.js-usage-info.usage-info-loading'), + ).toBeDefined(); + expect( + el.querySelector('.js-usage-info .usage-info-load-spinner'), + ).toBeDefined(); + expect(el.querySelector('.js-usage-info').innerText).toContain( + messages.loadingMetrics, + ); done(); }); }); - it('should show deployment memory usage when metrics are loaded', (done) => { + it('should show deployment memory usage when metrics are loaded', done => { vm.loadingMetrics = false; vm.hasMetrics = true; vm.loadFailed = false; Vue.nextTick(() => { expect(el.querySelector('.memory-graph-container')).toBeDefined(); - expect(el.querySelector('.js-usage-info').innerText).toContain(messages.hasMetrics); + expect(el.querySelector('.js-usage-info').innerText).toContain( + messages.hasMetrics, + ); done(); }); }); - it('should show failure message when metrics loading failed', (done) => { + it('should show failure message when metrics loading failed', done => { vm.loadingMetrics = false; vm.hasMetrics = false; vm.loadFailed = true; Vue.nextTick(() => { - expect(el.querySelector('.js-usage-info.usage-info-failed')).toBeDefined(); - expect(el.querySelector('.js-usage-info').innerText).toContain(messages.loadFailed); + expect( + el.querySelector('.js-usage-info.usage-info-failed'), + ).toBeDefined(); + expect(el.querySelector('.js-usage-info').innerText).toContain( + messages.loadFailed, + ); done(); }); }); - it('should show metrics unavailable message when metrics loading failed', (done) => { + it('should show metrics unavailable message when metrics loading failed', done => { vm.loadingMetrics = false; vm.hasMetrics = false; vm.loadFailed = false; Vue.nextTick(() => { - expect(el.querySelector('.js-usage-info.usage-info-unavailable')).toBeDefined(); - expect(el.querySelector('.js-usage-info').innerText).toContain(messages.metricsUnavailable); + expect( + el.querySelector('.js-usage-info.usage-info-unavailable'), + ).toBeDefined(); + expect(el.querySelector('.js-usage-info').innerText).toContain( + messages.metricsUnavailable, + ); done(); }); }); diff --git a/spec/javascripts/vue_mr_widget/components/mr_widget_pipeline_spec.js b/spec/javascripts/vue_mr_widget/components/mr_widget_pipeline_spec.js index 431cb7f3913..ea8007d2029 100644 --- a/spec/javascripts/vue_mr_widget/components/mr_widget_pipeline_spec.js +++ b/spec/javascripts/vue_mr_widget/components/mr_widget_pipeline_spec.js @@ -113,6 +113,46 @@ describe('MRWidgetPipeline', () => { }); }); + describe('without commit path', () => { + beforeEach(() => { + const mockCopy = Object.assign({}, mockData); + delete mockCopy.pipeline.commit; + + vm = mountComponent(Component, { + pipeline: mockCopy.pipeline, + hasCi: true, + ciStatus: 'success', + }); + }); + + it('should render pipeline ID', () => { + expect( + vm.$el.querySelector('.pipeline-id').textContent.trim(), + ).toEqual(`#${mockData.pipeline.id}`); + }); + + it('should render pipeline status', () => { + expect( + vm.$el.querySelector('.media-body').textContent.trim(), + ).toContain(mockData.pipeline.details.status.label); + + expect( + vm.$el.querySelector('.js-commit-link'), + ).toBeNull(); + }); + + it('should render pipeline graph', () => { + expect(vm.$el.querySelector('.mr-widget-pipeline-graph')).toBeDefined(); + expect(vm.$el.querySelectorAll('.stage-container').length).toEqual(mockData.pipeline.details.stages.length); + }); + + it('should render coverage information', () => { + expect( + vm.$el.querySelector('.media-body').textContent, + ).toContain(`Coverage ${mockData.pipeline.coverage}`); + }); + }); + describe('without coverage', () => { it('should not render a coverage', () => { const mockCopy = Object.assign({}, mockData); diff --git a/spec/javascripts/vue_mr_widget/components/states/mr_widget_conflicts_spec.js b/spec/javascripts/vue_mr_widget/components/states/mr_widget_conflicts_spec.js index 5323523abc0..3d05dbfa305 100644 --- a/spec/javascripts/vue_mr_widget/components/states/mr_widget_conflicts_spec.js +++ b/spec/javascripts/vue_mr_widget/components/states/mr_widget_conflicts_spec.js @@ -1,6 +1,7 @@ import Vue from 'vue'; import conflictsComponent from '~/vue_merge_request_widget/components/states/mr_widget_conflicts.vue'; import mountComponent from 'spec/helpers/vue_mount_component_helper'; +import { removeBreakLine } from 'spec/helpers/vue_component_helper'; describe('MRWidgetConflicts', () => { let Component; @@ -78,8 +79,9 @@ describe('MRWidgetConflicts', () => { }); it('should tell you to rebase locally', () => { - expect(vm.$el.textContent.trim().replace(/\s\s+/g, ' ')).toContain('Fast-forward merge is not possible.'); - expect(vm.$el.textContent.trim().replace(/\s\s+/g, ' ')).toContain('To merge this request, first rebase locally'); + expect( + removeBreakLine(vm.$el.textContent).trim(), + ).toContain('Fast-forward merge is not possible. To merge this request, first rebase locally.'); }); }); }); diff --git a/spec/javascripts/vue_mr_widget/components/states/mr_widget_pipeline_blocked_spec.js b/spec/javascripts/vue_mr_widget/components/states/mr_widget_pipeline_blocked_spec.js index baacbc03fb1..ab096a56918 100644 --- a/spec/javascripts/vue_mr_widget/components/states/mr_widget_pipeline_blocked_spec.js +++ b/spec/javascripts/vue_mr_widget/components/states/mr_widget_pipeline_blocked_spec.js @@ -1,6 +1,7 @@ import Vue from 'vue'; import pipelineBlockedComponent from '~/vue_merge_request_widget/components/states/mr_widget_pipeline_blocked.vue'; import mountComponent from 'spec/helpers/vue_mount_component_helper'; +import { removeBreakLine } from 'spec/helpers/vue_component_helper'; describe('MRWidgetPipelineBlocked', () => { let vm; @@ -18,6 +19,8 @@ describe('MRWidgetPipelineBlocked', () => { }); it('renders information text', () => { - expect(vm.$el.textContent.trim().replace(/[\r\n]+/g, ' ')).toContain('Pipeline blocked. The pipeline for this merge request requires a manual action to proceed'); + expect( + removeBreakLine(vm.$el.textContent).trim(), + ).toContain('Pipeline blocked. The pipeline for this merge request requires a manual action to proceed'); }); }); diff --git a/spec/javascripts/vue_mr_widget/components/states/mr_widget_pipeline_failed_spec.js b/spec/javascripts/vue_mr_widget/components/states/mr_widget_pipeline_failed_spec.js index 78bac1c61a5..5573d7c5c93 100644 --- a/spec/javascripts/vue_mr_widget/components/states/mr_widget_pipeline_failed_spec.js +++ b/spec/javascripts/vue_mr_widget/components/states/mr_widget_pipeline_failed_spec.js @@ -1,16 +1,19 @@ import Vue from 'vue'; -import pipelineFailedComponent from '~/vue_merge_request_widget/components/states/mr_widget_pipeline_failed'; +import PipelineFailed from '~/vue_merge_request_widget/components/states/pipeline_failed.vue'; +import { removeBreakLine } from 'spec/helpers/vue_component_helper'; -describe('MRWidgetPipelineFailed', () => { +describe('PipelineFailed', () => { describe('template', () => { - const Component = Vue.extend(pipelineFailedComponent); + const Component = Vue.extend(PipelineFailed); const vm = new Component({ el: document.createElement('div'), }); it('should have correct elements', () => { expect(vm.$el.classList.contains('mr-widget-body')).toBeTruthy(); expect(vm.$el.querySelector('button').getAttribute('disabled')).toBeTruthy(); - expect(vm.$el.innerText).toContain('The pipeline for this merge request failed. Please retry the job or push a new commit to fix the failure'); + expect( + removeBreakLine(vm.$el.innerText).trim(), + ).toContain('The pipeline for this merge request failed. Please retry the job or push a new commit to fix the failure'); }); }); }); diff --git a/spec/javascripts/vue_mr_widget/components/states/mr_widget_ready_to_merge_spec.js b/spec/javascripts/vue_mr_widget/components/states/mr_widget_ready_to_merge_spec.js index 58f683fb3e6..300b7882d03 100644 --- a/spec/javascripts/vue_mr_widget/components/states/mr_widget_ready_to_merge_spec.js +++ b/spec/javascripts/vue_mr_widget/components/states/mr_widget_ready_to_merge_spec.js @@ -1,12 +1,12 @@ import Vue from 'vue'; -import readyToMergeComponent from '~/vue_merge_request_widget/components/states/mr_widget_ready_to_merge'; +import ReadyToMerge from '~/vue_merge_request_widget/components/states/ready_to_merge.vue'; import eventHub from '~/vue_merge_request_widget/event_hub'; import * as simplePoll from '~/lib/utils/simple_poll'; const commitMessage = 'This is the commit message'; const commitMessageWithDescription = 'This is the commit message description'; const createComponent = (customConfig = {}) => { - const Component = Vue.extend(readyToMergeComponent); + const Component = Vue.extend(ReadyToMerge); const mr = { isPipelineActive: false, pipeline: null, @@ -36,7 +36,7 @@ const createComponent = (customConfig = {}) => { }); }; -describe('MRWidgetReadyToMerge', () => { +describe('ReadyToMerge', () => { let vm; beforeEach(() => { @@ -49,7 +49,7 @@ describe('MRWidgetReadyToMerge', () => { describe('props', () => { it('should have props', () => { - const { mr, service } = readyToMergeComponent.props; + const { mr, service } = ReadyToMerge.props; expect(mr.type instanceof Object).toBeTruthy(); expect(mr.required).toBeTruthy(); diff --git a/spec/javascripts/vue_mr_widget/components/states/mr_widget_sha_mismatch_spec.js b/spec/javascripts/vue_mr_widget/components/states/mr_widget_sha_mismatch_spec.js index 4c67504b642..abf642c166a 100644 --- a/spec/javascripts/vue_mr_widget/components/states/mr_widget_sha_mismatch_spec.js +++ b/spec/javascripts/vue_mr_widget/components/states/mr_widget_sha_mismatch_spec.js @@ -1,16 +1,25 @@ import Vue from 'vue'; -import shaMismatchComponent from '~/vue_merge_request_widget/components/states/mr_widget_sha_mismatch'; +import ShaMismatch from '~/vue_merge_request_widget/components/states/sha_mismatch.vue'; +import mountComponent from 'spec/helpers/vue_mount_component_helper'; +import { removeBreakLine } from 'spec/helpers/vue_component_helper'; -describe('MRWidgetSHAMismatch', () => { - describe('template', () => { - const Component = Vue.extend(shaMismatchComponent); - const vm = new Component({ - el: document.createElement('div'), - }); - it('should have correct elements', () => { - expect(vm.$el.classList.contains('mr-widget-body')).toBeTruthy(); - expect(vm.$el.querySelector('button').getAttribute('disabled')).toBeTruthy(); - expect(vm.$el.innerText).toContain('The source branch HEAD has recently changed. Please reload the page and review the changes before merging'); - }); +describe('ShaMismatch', () => { + let vm; + + beforeEach(() => { + const Component = Vue.extend(ShaMismatch); + vm = mountComponent(Component); + }); + + afterEach(() => { + vm.$destroy(); + }); + + it('should render information message', () => { + expect(vm.$el.querySelector('button').disabled).toEqual(true); + + expect( + removeBreakLine(vm.$el.textContent).trim(), + ).toContain('The source branch HEAD has recently changed. Please reload the page and review the changes before merging'); }); }); diff --git a/spec/javascripts/vue_mr_widget/components/states/mr_widget_unresolved_discussions_spec.js b/spec/javascripts/vue_mr_widget/components/states/mr_widget_unresolved_discussions_spec.js index fe87f110354..d797f1266df 100644 --- a/spec/javascripts/vue_mr_widget/components/states/mr_widget_unresolved_discussions_spec.js +++ b/spec/javascripts/vue_mr_widget/components/states/mr_widget_unresolved_discussions_spec.js @@ -1,47 +1,37 @@ import Vue from 'vue'; -import unresolvedDiscussionsComponent from '~/vue_merge_request_widget/components/states/mr_widget_unresolved_discussions'; +import UnresolvedDiscussions from '~/vue_merge_request_widget/components/states/unresolved_discussions.vue'; +import mountComponent from 'spec/helpers/vue_mount_component_helper'; -describe('MRWidgetUnresolvedDiscussions', () => { - describe('props', () => { - it('should have props', () => { - const { mr } = unresolvedDiscussionsComponent.props; +describe('UnresolvedDiscussions', () => { + const Component = Vue.extend(UnresolvedDiscussions); + let vm; - expect(mr.type instanceof Object).toBeTruthy(); - expect(mr.required).toBeTruthy(); - }); + afterEach(() => { + vm.$destroy(); }); - describe('template', () => { - let el; - let vm; - const path = 'foo/bar'; - + describe('with discussions path', () => { beforeEach(() => { - const Component = Vue.extend(unresolvedDiscussionsComponent); - const mr = { - createIssueToResolveDiscussionsPath: path, - }; - vm = new Component({ - el: document.createElement('div'), - propsData: { mr }, - }); - el = vm.$el; + vm = mountComponent(Component, { mr: { + createIssueToResolveDiscussionsPath: gl.TEST_HOST, + } }); }); it('should have correct elements', () => { - expect(el.classList.contains('mr-widget-body')).toBeTruthy(); - expect(el.innerText).toContain('There are unresolved discussions. Please resolve these discussions'); - expect(el.innerText).toContain('Create an issue to resolve them later'); - expect(el.querySelector('.js-create-issue').getAttribute('href')).toEqual(path); + expect(vm.$el.innerText).toContain('There are unresolved discussions. Please resolve these discussions'); + expect(vm.$el.innerText).toContain('Create an issue to resolve them later'); + expect(vm.$el.querySelector('.js-create-issue').getAttribute('href')).toEqual(gl.TEST_HOST); }); + }); - it('should not show create issue button if user cannot create issue', (done) => { - vm.mr.createIssueToResolveDiscussionsPath = ''; + describe('without discussions path', () => { + beforeEach(() => { + vm = mountComponent(Component, { mr: {} }); + }); - Vue.nextTick(() => { - expect(el.querySelector('.js-create-issue')).toEqual(null); - done(); - }); + it('should not show create issue link if user cannot create issue', () => { + expect(vm.$el.innerText).toContain('There are unresolved discussions. Please resolve these discussions'); + expect(vm.$el.querySelector('.js-create-issue')).toEqual(null); }); }); }); diff --git a/spec/javascripts/vue_mr_widget/mock_data.js b/spec/javascripts/vue_mr_widget/mock_data.js index 3dd75307484..3fc7663b9c2 100644 --- a/spec/javascripts/vue_mr_widget/mock_data.js +++ b/spec/javascripts/vue_mr_widget/mock_data.js @@ -1,213 +1,218 @@ -/* eslint-disable */ - export default { - "id": 132, - "iid": 22, - "assignee_id": null, - "author_id": 1, - "description": "", - "lock_version": null, - "milestone_id": null, - "position": 0, - "state": "merged", - "title": "Update README.md", - "updated_by_id": null, - "created_at": "2017-04-07T12:27:26.718Z", - "updated_at": "2017-04-07T15:39:25.852Z", - "time_estimate": 0, - "total_time_spent": 0, - "human_time_estimate": null, - "human_total_time_spent": null, - "in_progress_merge_commit_sha": null, - "merge_commit_sha": "53027d060246c8f47e4a9310fb332aa52f221775", - "merge_error": null, - "merge_params": { - "force_remove_source_branch": null + id: 132, + iid: 22, + assignee_id: null, + author_id: 1, + description: '', + lock_version: null, + milestone_id: null, + position: 0, + state: 'merged', + title: 'Update README.md', + updated_by_id: null, + created_at: '2017-04-07T12:27:26.718Z', + updated_at: '2017-04-07T15:39:25.852Z', + time_estimate: 0, + total_time_spent: 0, + human_time_estimate: null, + human_total_time_spent: null, + in_progress_merge_commit_sha: null, + merge_commit_sha: '53027d060246c8f47e4a9310fb332aa52f221775', + merge_error: null, + merge_params: { + force_remove_source_branch: null, }, - "merge_status": "can_be_merged", - "merge_user_id": null, - "merge_when_pipeline_succeeds": false, - "source_branch": "daaaa", - "source_branch_link": "daaaa", - "source_project_id": 19, - "target_branch": "master", - "target_project_id": 19, - "metrics": { - "merged_by": { - "name": "Administrator", - "username": "root", - "id": 1, - "state": "active", - "avatar_url": "https://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon", - "web_url": "http://localhost:3000/root" + merge_status: 'can_be_merged', + merge_user_id: null, + merge_when_pipeline_succeeds: false, + source_branch: 'daaaa', + source_branch_link: 'daaaa', + source_project_id: 19, + target_branch: 'master', + target_project_id: 19, + metrics: { + merged_by: { + name: 'Administrator', + username: 'root', + id: 1, + state: 'active', + avatar_url: + 'https://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon', + web_url: 'http://localhost:3000/root', }, - "merged_at": "2017-04-07T15:39:25.696Z", - "closed_by": null, - "closed_at": null + merged_at: '2017-04-07T15:39:25.696Z', + closed_by: null, + closed_at: null, }, - "author": { - "name": "Administrator", - "username": "root", - "id": 1, - "state": "active", - "avatar_url": "https://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon", - "web_url": "http://localhost:3000/root" + author: { + name: 'Administrator', + username: 'root', + id: 1, + state: 'active', + avatar_url: 'https://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon', + web_url: 'http://localhost:3000/root', }, - "merge_user": null, - "diff_head_sha": "104096c51715e12e7ae41f9333e9fa35b73f385d", - "diff_head_commit_short_id": "104096c5", - "merge_commit_message": "Merge branch 'daaaa' into 'master'\n\nUpdate README.md\n\nSee merge request !22", - "pipeline": { - "id": 172, - "user": { - "name": "Administrator", - "username": "root", - "id": 1, - "state": "active", - "avatar_url": "https://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon", - "web_url": "http://localhost:3000/root" + merge_user: null, + diff_head_sha: '104096c51715e12e7ae41f9333e9fa35b73f385d', + diff_head_commit_short_id: '104096c5', + merge_commit_message: + "Merge branch 'daaaa' into 'master'\n\nUpdate README.md\n\nSee merge request !22", + pipeline: { + id: 172, + user: { + name: 'Administrator', + username: 'root', + id: 1, + state: 'active', + avatar_url: + 'https://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon', + web_url: 'http://localhost:3000/root', }, - "active": false, - "coverage": "92.16", - "path": "/root/acets-app/pipelines/172", - "details": { - "status": { - "icon": "icon_status_success", - "favicon": "favicon_status_success", - "text": "passed", - "label": "passed", - "group": "success", - "has_details": true, - "details_path": "/root/acets-app/pipelines/172" + active: false, + coverage: '92.16', + path: '/root/acets-app/pipelines/172', + details: { + status: { + icon: 'icon_status_success', + favicon: 'favicon_status_success', + text: 'passed', + label: 'passed', + group: 'success', + has_details: true, + details_path: '/root/acets-app/pipelines/172', }, - "duration": null, - "finished_at": "2017-04-07T14:00:14.256Z", - "stages": [ + duration: null, + finished_at: '2017-04-07T14:00:14.256Z', + stages: [ { - "name": "build", - "title": "build: failed", - "status": { - "icon": "icon_status_failed", - "favicon": "favicon_status_failed", - "text": "failed", - "label": "failed", - "group": "failed", - "has_details": true, - "details_path": "/root/acets-app/pipelines/172#build" + name: 'build', + title: 'build: failed', + status: { + icon: 'icon_status_failed', + favicon: 'favicon_status_failed', + text: 'failed', + label: 'failed', + group: 'failed', + has_details: true, + details_path: '/root/acets-app/pipelines/172#build', }, - "path": "/root/acets-app/pipelines/172#build", - "dropdown_path": "/root/acets-app/pipelines/172/stage.json?stage=build" + path: '/root/acets-app/pipelines/172#build', + dropdown_path: '/root/acets-app/pipelines/172/stage.json?stage=build', }, { - "name": "review", - "title": "review: skipped", - "status": { - "icon": "icon_status_skipped", - "favicon": "favicon_status_skipped", - "text": "skipped", - "label": "skipped", - "group": "skipped", - "has_details": true, - "details_path": "/root/acets-app/pipelines/172#review" + name: 'review', + title: 'review: skipped', + status: { + icon: 'icon_status_skipped', + favicon: 'favicon_status_skipped', + text: 'skipped', + label: 'skipped', + group: 'skipped', + has_details: true, + details_path: '/root/acets-app/pipelines/172#review', }, - "path": "/root/acets-app/pipelines/172#review", - "dropdown_path": "/root/acets-app/pipelines/172/stage.json?stage=review" - } - ], - "artifacts": [ - + path: '/root/acets-app/pipelines/172#review', + dropdown_path: '/root/acets-app/pipelines/172/stage.json?stage=review', + }, ], - "manual_actions": [ + artifacts: [], + manual_actions: [ { - "name": "stop_review", - "path": "/root/acets-app/builds/1427/play", - "playable": false - } - ] + name: 'stop_review', + path: '/root/acets-app/builds/1427/play', + playable: false, + }, + ], }, - "flags": { - "latest": false, - "triggered": false, - "stuck": false, - "yaml_errors": false, - "retryable": true, - "cancelable": false + flags: { + latest: false, + triggered: false, + stuck: false, + yaml_errors: false, + retryable: true, + cancelable: false, }, - "ref": { - "name": "daaaa", - "path": "/root/acets-app/tree/daaaa", - "tag": false, - "branch": true + ref: { + name: 'daaaa', + path: '/root/acets-app/tree/daaaa', + tag: false, + branch: true, }, - "commit": { - "id": "104096c51715e12e7ae41f9333e9fa35b73f385d", - "short_id": "104096c5", - "title": "Update README.md", - "created_at": "2017-04-07T15:27:18.000+03:00", - "parent_ids": [ - "2396536178668d8930c29d904e53bd4d06228b32" - ], - "message": "Update README.md", - "author_name": "Administrator", - "author_email": "admin@example.com", - "authored_date": "2017-04-07T15:27:18.000+03:00", - "committer_name": "Administrator", - "committer_email": "admin@example.com", - "committed_date": "2017-04-07T15:27:18.000+03:00", - "author": { - "name": "Administrator", - "username": "root", - "id": 1, - "state": "active", - "avatar_url": "https://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon", - "web_url": "http://localhost:3000/root" + commit: { + id: '104096c51715e12e7ae41f9333e9fa35b73f385d', + short_id: '104096c5', + title: 'Update README.md', + created_at: '2017-04-07T15:27:18.000+03:00', + parent_ids: ['2396536178668d8930c29d904e53bd4d06228b32'], + message: 'Update README.md', + author_name: 'Administrator', + author_email: 'admin@example.com', + authored_date: '2017-04-07T15:27:18.000+03:00', + committer_name: 'Administrator', + committer_email: 'admin@example.com', + committed_date: '2017-04-07T15:27:18.000+03:00', + author: { + name: 'Administrator', + username: 'root', + id: 1, + state: 'active', + avatar_url: + 'https://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon', + web_url: 'http://localhost:3000/root', }, - "author_gravatar_url": "https://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon", - "commit_url": "http://localhost:3000/root/acets-app/commit/104096c51715e12e7ae41f9333e9fa35b73f385d", - "commit_path": "/root/acets-app/commit/104096c51715e12e7ae41f9333e9fa35b73f385d" + author_gravatar_url: + 'https://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon', + commit_url: + 'http://localhost:3000/root/acets-app/commit/104096c51715e12e7ae41f9333e9fa35b73f385d', + commit_path: '/root/acets-app/commit/104096c51715e12e7ae41f9333e9fa35b73f385d', }, - "retry_path": "/root/acets-app/pipelines/172/retry", - "created_at": "2017-04-07T12:27:19.520Z", - "updated_at": "2017-04-07T15:28:44.800Z" + retry_path: '/root/acets-app/pipelines/172/retry', + created_at: '2017-04-07T12:27:19.520Z', + updated_at: '2017-04-07T15:28:44.800Z', }, - "work_in_progress": false, - "source_branch_exists": false, - "mergeable_discussions_state": true, - "conflicts_can_be_resolved_in_ui": false, - "branch_missing": true, - "commits_count": 1, - "has_conflicts": false, - "can_be_merged": true, - "has_ci": true, - "ci_status": "success", - "pipeline_status_path": "/root/acets-app/merge_requests/22/pipeline_status", - "issues_links": { - "closing": "", - "mentioned_but_not_closing": "" + work_in_progress: false, + source_branch_exists: false, + mergeable_discussions_state: true, + conflicts_can_be_resolved_in_ui: false, + branch_missing: true, + commits_count: 1, + has_conflicts: false, + can_be_merged: true, + has_ci: true, + ci_status: 'success', + pipeline_status_path: '/root/acets-app/merge_requests/22/pipeline_status', + issues_links: { + closing: '', + mentioned_but_not_closing: '', }, - "current_user": { - "can_resolve_conflicts": true, - "can_remove_source_branch": false, - "can_revert_on_current_merge_request": true, - "can_cherry_pick_on_current_merge_request": true + current_user: { + can_resolve_conflicts: true, + can_remove_source_branch: false, + can_revert_on_current_merge_request: true, + can_cherry_pick_on_current_merge_request: true, }, - "target_branch_path": "/root/acets-app/branches/master", - "source_branch_path": "/root/acets-app/branches/daaaa", - "conflict_resolution_ui_path": "/root/acets-app/merge_requests/22/conflicts", - "remove_wip_path": "/root/acets-app/merge_requests/22/remove_wip", - "cancel_merge_when_pipeline_succeeds_path": "/root/acets-app/merge_requests/22/cancel_merge_when_pipeline_succeeds", - "create_issue_to_resolve_discussions_path": "/root/acets-app/issues/new?merge_request_to_resolve_discussions_of=22", - "merge_path": "/root/acets-app/merge_requests/22/merge", - "cherry_pick_in_fork_path": "/root/acets-app/forks?continue%5Bnotice%5D=You%27re+not+allowed+to+make+changes+to+this+project+directly.+A+fork+of+this+project+has+been+created+that+you+can+make+changes+in%2C+so+you+can+submit+a+merge+request.+Try+to+revert+this+commit+again.&continue%5Bnotice_now%5D=You%27re+not+allowed+to+make+changes+to+this+project+directly.+A+fork+of+this+project+is+being+created+that+you+can+make+changes+in%2C+so+you+can+submit+a+merge+request.&continue%5Bto%5D=%2Froot%2Facets-app%2Fmerge_requests%2F22&namespace_key=1", - "revert_in_fork_path": "/root/acets-app/forks?continue%5Bnotice%5D=You%27re+not+allowed+to+make+changes+to+this+project+directly.+A+fork+of+this+project+has+been+created+that+you+can+make+changes+in%2C+so+you+can+submit+a+merge+request.+Try+to+cherry-pick+this+commit+again.&continue%5Bnotice_now%5D=You%27re+not+allowed+to+make+changes+to+this+project+directly.+A+fork+of+this+project+is+being+created+that+you+can+make+changes+in%2C+so+you+can+submit+a+merge+request.&continue%5Bto%5D=%2Froot%2Facets-app%2Fmerge_requests%2F22&namespace_key=1", - "email_patches_path": "/root/acets-app/merge_requests/22.patch", - "plain_diff_path": "/root/acets-app/merge_requests/22.diff", - "status_path": "/root/acets-app/merge_requests/22.json", - "merge_check_path": "/root/acets-app/merge_requests/22/merge_check", - "ci_environments_status_url": "/root/acets-app/merge_requests/22/ci_environments_status", - "project_archived": false, - "merge_commit_message_with_description": "Merge branch 'daaaa' into 'master'\n\nUpdate README.md\n\nSee merge request !22", - "diverged_commits_count": 0, - "only_allow_merge_if_pipeline_succeeds": false, - "commit_change_content_path": "/root/acets-app/merge_requests/22/commit_change_content" -} + target_branch_path: '/root/acets-app/branches/master', + source_branch_path: '/root/acets-app/branches/daaaa', + conflict_resolution_ui_path: '/root/acets-app/merge_requests/22/conflicts', + remove_wip_path: '/root/acets-app/merge_requests/22/remove_wip', + cancel_merge_when_pipeline_succeeds_path: + '/root/acets-app/merge_requests/22/cancel_merge_when_pipeline_succeeds', + create_issue_to_resolve_discussions_path: + '/root/acets-app/issues/new?merge_request_to_resolve_discussions_of=22', + merge_path: '/root/acets-app/merge_requests/22/merge', + cherry_pick_in_fork_path: + '/root/acets-app/forks?continue%5Bnotice%5D=You%27re+not+allowed+to+make+changes+to+this+project+directly.+A+fork+of+this+project+has+been+created+that+you+can+make+changes+in%2C+so+you+can+submit+a+merge+request.+Try+to+revert+this+commit+again.&continue%5Bnotice_now%5D=You%27re+not+allowed+to+make+changes+to+this+project+directly.+A+fork+of+this+project+is+being+created+that+you+can+make+changes+in%2C+so+you+can+submit+a+merge+request.&continue%5Bto%5D=%2Froot%2Facets-app%2Fmerge_requests%2F22&namespace_key=1', + revert_in_fork_path: + '/root/acets-app/forks?continue%5Bnotice%5D=You%27re+not+allowed+to+make+changes+to+this+project+directly.+A+fork+of+this+project+has+been+created+that+you+can+make+changes+in%2C+so+you+can+submit+a+merge+request.+Try+to+cherry-pick+this+commit+again.&continue%5Bnotice_now%5D=You%27re+not+allowed+to+make+changes+to+this+project+directly.+A+fork+of+this+project+is+being+created+that+you+can+make+changes+in%2C+so+you+can+submit+a+merge+request.&continue%5Bto%5D=%2Froot%2Facets-app%2Fmerge_requests%2F22&namespace_key=1', + email_patches_path: '/root/acets-app/merge_requests/22.patch', + plain_diff_path: '/root/acets-app/merge_requests/22.diff', + status_path: '/root/acets-app/merge_requests/22.json', + merge_check_path: '/root/acets-app/merge_requests/22/merge_check', + ci_environments_status_url: '/root/acets-app/merge_requests/22/ci_environments_status', + project_archived: false, + merge_commit_message_with_description: + "Merge branch 'daaaa' into 'master'\n\nUpdate README.md\n\nSee merge request !22", + diverged_commits_count: 0, + only_allow_merge_if_pipeline_succeeds: false, + commit_change_content_path: '/root/acets-app/merge_requests/22/commit_change_content', +}; diff --git a/spec/javascripts/vue_shared/components/callout_spec.js b/spec/javascripts/vue_shared/components/callout_spec.js new file mode 100644 index 00000000000..e62bd86f4ca --- /dev/null +++ b/spec/javascripts/vue_shared/components/callout_spec.js @@ -0,0 +1,45 @@ +import Vue from 'vue'; +import callout from '~/vue_shared/components/callout.vue'; +import createComponent from 'spec/helpers/vue_mount_component_helper'; + +describe('Callout Component', () => { + let CalloutComponent; + let vm; + const exampleMessage = 'This is a callout message!'; + + beforeEach(() => { + CalloutComponent = Vue.extend(callout); + }); + + afterEach(() => { + vm.$destroy(); + }); + + it('should render the appropriate variant of callout', () => { + vm = createComponent(CalloutComponent, { + category: 'info', + message: exampleMessage, + }); + + expect(vm.$el.getAttribute('class')).toEqual('bs-callout bs-callout-info'); + + expect(vm.$el.tagName).toEqual('DIV'); + }); + + it('should render accessibility attributes', () => { + vm = createComponent(CalloutComponent, { + message: exampleMessage, + }); + + expect(vm.$el.getAttribute('role')).toEqual('alert'); + expect(vm.$el.getAttribute('aria-live')).toEqual('assertive'); + }); + + it('should render the provided message', () => { + vm = createComponent(CalloutComponent, { + message: exampleMessage, + }); + + expect(vm.$el.innerHTML.trim()).toEqual(exampleMessage); + }); +}); diff --git a/spec/javascripts/vue_shared/components/commit_spec.js b/spec/javascripts/vue_shared/components/commit_spec.js index fdead874209..ed66361bfc3 100644 --- a/spec/javascripts/vue_shared/components/commit_spec.js +++ b/spec/javascripts/vue_shared/components/commit_spec.js @@ -1,5 +1,6 @@ import Vue from 'vue'; import commitComp from '~/vue_shared/components/commit.vue'; +import mountComponent from '../../helpers/vue_mount_component_helper'; describe('Commit component', () => { let props; @@ -10,25 +11,28 @@ describe('Commit component', () => { CommitComponent = Vue.extend(commitComp); }); + afterEach(() => { + component.$destroy(); + }); + it('should render a fork icon if it does not represent a tag', () => { - component = new CommitComponent({ - propsData: { - tag: false, - commitRef: { - name: 'master', - ref_url: 'http://localhost/namespace2/gitlabhq/tree/master', - }, - commitUrl: 'https://gitlab.com/gitlab-org/gitlab-ce/commit/b7836eddf62d663c665769e1b0960197fd215067', - shortSha: 'b7836edd', - title: 'Commit message', - author: { - avatar_url: 'https://gitlab.com/uploads/-/system/user/avatar/300478/avatar.png', - web_url: 'https://gitlab.com/jschatz1', - path: '/jschatz1', - username: 'jschatz1', - }, + component = mountComponent(CommitComponent, { + tag: false, + commitRef: { + name: 'master', + ref_url: 'http://localhost/namespace2/gitlabhq/tree/master', }, - }).$mount(); + commitUrl: + 'https://gitlab.com/gitlab-org/gitlab-ce/commit/b7836eddf62d663c665769e1b0960197fd215067', + shortSha: 'b7836edd', + title: 'Commit message', + author: { + avatar_url: 'https://gitlab.com/uploads/-/system/user/avatar/300478/avatar.png', + web_url: 'https://gitlab.com/jschatz1', + path: '/jschatz1', + username: 'jschatz1', + }, + }); expect(component.$el.querySelector('.icon-container').children).toContain('svg'); }); @@ -41,7 +45,8 @@ describe('Commit component', () => { name: 'master', ref_url: 'http://localhost/namespace2/gitlabhq/tree/master', }, - commitUrl: 'https://gitlab.com/gitlab-org/gitlab-ce/commit/b7836eddf62d663c665769e1b0960197fd215067', + commitUrl: + 'https://gitlab.com/gitlab-org/gitlab-ce/commit/b7836eddf62d663c665769e1b0960197fd215067', shortSha: 'b7836edd', title: 'Commit message', author: { @@ -53,9 +58,7 @@ describe('Commit component', () => { commitIconSvg: '<svg></svg>', }; - component = new CommitComponent({ - propsData: props, - }).$mount(); + component = mountComponent(CommitComponent, props); }); it('should render a tag icon if it represents a tag', () => { @@ -63,7 +66,9 @@ describe('Commit component', () => { }); it('should render a link to the ref url', () => { - expect(component.$el.querySelector('.ref-name').getAttribute('href')).toEqual(props.commitRef.ref_url); + expect(component.$el.querySelector('.ref-name').getAttribute('href')).toEqual( + props.commitRef.ref_url, + ); }); it('should render the ref name', () => { @@ -71,7 +76,9 @@ describe('Commit component', () => { }); it('should render the commit short sha with a link to the commit url', () => { - expect(component.$el.querySelector('.commit-sha').getAttribute('href')).toEqual(props.commitUrl); + expect(component.$el.querySelector('.commit-sha').getAttribute('href')).toEqual( + props.commitUrl, + ); expect(component.$el.querySelector('.commit-sha').textContent).toContain(props.shortSha); }); @@ -88,21 +95,25 @@ describe('Commit component', () => { it('Should render the author avatar with title and alt attributes', () => { expect( - component.$el.querySelector('.commit-title .avatar-image-container img').getAttribute('data-original-title'), + component.$el + .querySelector('.commit-title .avatar-image-container img') + .getAttribute('data-original-title'), ).toContain(props.author.username); expect( - component.$el.querySelector('.commit-title .avatar-image-container img').getAttribute('alt'), + component.$el + .querySelector('.commit-title .avatar-image-container img') + .getAttribute('alt'), ).toContain(`${props.author.username}'s avatar`); }); }); it('should render the commit title', () => { - expect( - component.$el.querySelector('a.commit-row-message').getAttribute('href'), - ).toEqual(props.commitUrl); - expect( - component.$el.querySelector('a.commit-row-message').textContent, - ).toContain(props.title); + expect(component.$el.querySelector('a.commit-row-message').getAttribute('href')).toEqual( + props.commitUrl, + ); + expect(component.$el.querySelector('a.commit-row-message').textContent).toContain( + props.title, + ); }); }); @@ -114,19 +125,18 @@ describe('Commit component', () => { name: 'master', ref_url: 'http://localhost/namespace2/gitlabhq/tree/master', }, - commitUrl: 'https://gitlab.com/gitlab-org/gitlab-ce/commit/b7836eddf62d663c665769e1b0960197fd215067', + commitUrl: + 'https://gitlab.com/gitlab-org/gitlab-ce/commit/b7836eddf62d663c665769e1b0960197fd215067', shortSha: 'b7836edd', title: null, author: {}, }; - component = new CommitComponent({ - propsData: props, - }).$mount(); + component = mountComponent(CommitComponent, props); - expect( - component.$el.querySelector('.commit-title span').textContent, - ).toContain('Cant find HEAD commit for this branch'); + expect(component.$el.querySelector('.commit-title span').textContent).toContain( + "Can't find HEAD commit for this branch", + ); }); }); }); diff --git a/spec/javascripts/vue_shared/components/content_viewer/content_viewer_spec.js b/spec/javascripts/vue_shared/components/content_viewer/content_viewer_spec.js new file mode 100644 index 00000000000..383f0cd29ea --- /dev/null +++ b/spec/javascripts/vue_shared/components/content_viewer/content_viewer_spec.js @@ -0,0 +1,70 @@ +import Vue from 'vue'; +import MockAdapter from 'axios-mock-adapter'; +import axios from '~/lib/utils/axios_utils'; +import contentViewer from '~/vue_shared/components/content_viewer/content_viewer.vue'; +import mountComponent from 'spec/helpers/vue_mount_component_helper'; + +describe('ContentViewer', () => { + let vm; + let mock; + + function createComponent(props) { + const ContentViewer = Vue.extend(contentViewer); + vm = mountComponent(ContentViewer, props); + } + + afterEach(() => { + vm.$destroy(); + if (mock) mock.restore(); + }); + + it('markdown preview renders + loads rendered markdown from server', done => { + mock = new MockAdapter(axios); + mock.onPost(`${gon.relative_url_root}/testproject/preview_markdown`).reply(200, { + body: '<b>testing</b>', + }); + + createComponent({ + path: 'test.md', + content: '* Test', + projectPath: 'testproject', + }); + + const previewContainer = vm.$el.querySelector('.md-previewer'); + + setTimeout(() => { + expect(previewContainer.textContent).toContain('testing'); + + done(); + }); + }); + + it('renders image preview', done => { + createComponent({ + path: 'test.jpg', + fileSize: 1024, + }); + + setTimeout(() => { + expect(vm.$el.querySelector('.image_file img').getAttribute('src')).toBe('test.jpg'); + + done(); + }); + }); + + it('renders fallback download control', done => { + createComponent({ + path: 'test.abc', + fileSize: 1024, + }); + + setTimeout(() => { + expect(vm.$el.querySelector('.file-info').textContent.trim()).toContain( + 'test.abc (1.00 KiB)', + ); + expect(vm.$el.querySelector('.btn.btn-default').textContent.trim()).toContain('Download'); + + done(); + }); + }); +}); diff --git a/spec/javascripts/vue_shared/components/modal_spec.js b/spec/javascripts/vue_shared/components/deprecated_modal_spec.js index d01a94c25e5..59d4e549a91 100644 --- a/spec/javascripts/vue_shared/components/modal_spec.js +++ b/spec/javascripts/vue_shared/components/deprecated_modal_spec.js @@ -1,11 +1,11 @@ import $ from 'jquery'; import Vue from 'vue'; -import modal from '~/vue_shared/components/modal.vue'; +import DeprecatedModal from '~/vue_shared/components/deprecated_modal.vue'; import mountComponent from 'spec/helpers/vue_mount_component_helper'; -const modalComponent = Vue.extend(modal); +const modalComponent = Vue.extend(DeprecatedModal); -describe('Modal', () => { +describe('DeprecatedModal', () => { let vm; afterEach(() => { diff --git a/spec/javascripts/vue_shared/components/markdown/header_spec.js b/spec/javascripts/vue_shared/components/markdown/header_spec.js index edebd822295..02117638b63 100644 --- a/spec/javascripts/vue_shared/components/markdown/header_spec.js +++ b/spec/javascripts/vue_shared/components/markdown/header_spec.js @@ -1,10 +1,11 @@ import Vue from 'vue'; +import $ from 'jquery'; import headerComponent from '~/vue_shared/components/markdown/header.vue'; describe('Markdown field header component', () => { let vm; - beforeEach((done) => { + beforeEach(done => { const Component = Vue.extend(headerComponent); vm = new Component({ @@ -17,24 +18,18 @@ describe('Markdown field header component', () => { }); it('renders markdown buttons', () => { - expect( - vm.$el.querySelectorAll('.js-md').length, - ).toBe(7); + expect(vm.$el.querySelectorAll('.js-md').length).toBe(7); }); it('renders `write` link as active when previewMarkdown is false', () => { - expect( - vm.$el.querySelector('li:nth-child(1)').classList.contains('active'), - ).toBeTruthy(); + expect(vm.$el.querySelector('li:nth-child(1)').classList.contains('active')).toBeTruthy(); }); - it('renders `preview` link as active when previewMarkdown is true', (done) => { + it('renders `preview` link as active when previewMarkdown is true', done => { vm.previewMarkdown = true; Vue.nextTick(() => { - expect( - vm.$el.querySelector('li:nth-child(2)').classList.contains('active'), - ).toBeTruthy(); + expect(vm.$el.querySelector('li:nth-child(2)').classList.contains('active')).toBeTruthy(); done(); }); @@ -52,16 +47,24 @@ describe('Markdown field header component', () => { expect(vm.$emit).toHaveBeenCalledWith('write-markdown'); }); - it('blurs preview link after click', (done) => { + it('does not emit toggle markdown event when triggered from another form', () => { + spyOn(vm, '$emit'); + + $(document).triggerHandler('markdown-preview:show', [ + $('<form><textarea class="markdown-area"></textarea></textarea></form>'), + ]); + + expect(vm.$emit).not.toHaveBeenCalled(); + }); + + it('blurs preview link after click', done => { const link = vm.$el.querySelector('li:nth-child(2) a'); spyOn(HTMLElement.prototype, 'blur'); link.click(); setTimeout(() => { - expect( - link.blur, - ).toHaveBeenCalled(); + expect(link.blur).toHaveBeenCalled(); done(); }); diff --git a/spec/javascripts/vue_shared/components/markdown/toolbar_spec.js b/spec/javascripts/vue_shared/components/markdown/toolbar_spec.js index 818ef0af3c2..3e708f865c8 100644 --- a/spec/javascripts/vue_shared/components/markdown/toolbar_spec.js +++ b/spec/javascripts/vue_shared/components/markdown/toolbar_spec.js @@ -1,6 +1,6 @@ import Vue from 'vue'; import toolbar from '~/vue_shared/components/markdown/toolbar.vue'; -import mountComponent from '../../../helpers/vue_mount_component_helper'; +import mountComponent from 'spec/helpers/vue_mount_component_helper'; describe('toolbar', () => { let vm; diff --git a/spec/javascripts/vue_shared/components/mock_data.js b/spec/javascripts/vue_shared/components/mock_data.js index 0d781bdca74..15b56c58c33 100644 --- a/spec/javascripts/vue_shared/components/mock_data.js +++ b/spec/javascripts/vue_shared/components/mock_data.js @@ -1,5 +1,3 @@ -/* eslint-disable */ - export const mockMetrics = [ [1493716685, '4.30859375'], [1493716745, '4.30859375'], diff --git a/spec/javascripts/vue_shared/components/sidebar/labels_select/base_spec.js b/spec/javascripts/vue_shared/components/sidebar/labels_select/base_spec.js index 8daaf018396..6fe95153204 100644 --- a/spec/javascripts/vue_shared/components/sidebar/labels_select/base_spec.js +++ b/spec/javascripts/vue_shared/components/sidebar/labels_select/base_spec.js @@ -3,9 +3,9 @@ import Vue from 'vue'; import LabelsSelect from '~/labels_select'; import baseComponent from '~/vue_shared/components/sidebar/labels_select/base.vue'; -import { mockConfig, mockLabels } from './mock_data'; +import mountComponent from 'spec/helpers/vue_mount_component_helper'; -import mountComponent from '../../../../helpers/vue_mount_component_helper'; +import { mockConfig, mockLabels } from './mock_data'; const createComponent = (config = mockConfig) => { const Component = Vue.extend(baseComponent); diff --git a/spec/javascripts/vue_shared/components/sidebar/labels_select/dropdown_button_spec.js b/spec/javascripts/vue_shared/components/sidebar/labels_select/dropdown_button_spec.js index ec63ac306d0..f25c70db125 100644 --- a/spec/javascripts/vue_shared/components/sidebar/labels_select/dropdown_button_spec.js +++ b/spec/javascripts/vue_shared/components/sidebar/labels_select/dropdown_button_spec.js @@ -2,9 +2,9 @@ import Vue from 'vue'; import dropdownButtonComponent from '~/vue_shared/components/sidebar/labels_select/dropdown_button.vue'; -import { mockConfig, mockLabels } from './mock_data'; +import mountComponent from 'spec/helpers/vue_mount_component_helper'; -import mountComponent from '../../../../helpers/vue_mount_component_helper'; +import { mockConfig, mockLabels } from './mock_data'; const componentConfig = Object.assign({}, mockConfig, { fieldName: 'label_id[]', diff --git a/spec/javascripts/vue_shared/components/sidebar/labels_select/dropdown_create_label_spec.js b/spec/javascripts/vue_shared/components/sidebar/labels_select/dropdown_create_label_spec.js index 5cb4bb6fea6..ce559fe0335 100644 --- a/spec/javascripts/vue_shared/components/sidebar/labels_select/dropdown_create_label_spec.js +++ b/spec/javascripts/vue_shared/components/sidebar/labels_select/dropdown_create_label_spec.js @@ -2,9 +2,9 @@ import Vue from 'vue'; import dropdownCreateLabelComponent from '~/vue_shared/components/sidebar/labels_select/dropdown_create_label.vue'; -import { mockSuggestedColors } from './mock_data'; +import mountComponent from 'spec/helpers/vue_mount_component_helper'; -import mountComponent from '../../../../helpers/vue_mount_component_helper'; +import { mockSuggestedColors } from './mock_data'; const createComponent = (headerTitle) => { const Component = Vue.extend(dropdownCreateLabelComponent); diff --git a/spec/javascripts/vue_shared/components/sidebar/labels_select/dropdown_footer_spec.js b/spec/javascripts/vue_shared/components/sidebar/labels_select/dropdown_footer_spec.js index 0f4fa716f8a..debeab25bd6 100644 --- a/spec/javascripts/vue_shared/components/sidebar/labels_select/dropdown_footer_spec.js +++ b/spec/javascripts/vue_shared/components/sidebar/labels_select/dropdown_footer_spec.js @@ -2,9 +2,9 @@ import Vue from 'vue'; import dropdownFooterComponent from '~/vue_shared/components/sidebar/labels_select/dropdown_footer.vue'; -import { mockConfig } from './mock_data'; +import mountComponent from 'spec/helpers/vue_mount_component_helper'; -import mountComponent from '../../../../helpers/vue_mount_component_helper'; +import { mockConfig } from './mock_data'; const createComponent = ( labelsWebUrl = mockConfig.labelsWebUrl, diff --git a/spec/javascripts/vue_shared/components/sidebar/labels_select/dropdown_header_spec.js b/spec/javascripts/vue_shared/components/sidebar/labels_select/dropdown_header_spec.js index 325fa47c957..cdf234bb0c4 100644 --- a/spec/javascripts/vue_shared/components/sidebar/labels_select/dropdown_header_spec.js +++ b/spec/javascripts/vue_shared/components/sidebar/labels_select/dropdown_header_spec.js @@ -2,7 +2,7 @@ import Vue from 'vue'; import dropdownHeaderComponent from '~/vue_shared/components/sidebar/labels_select/dropdown_header.vue'; -import mountComponent from '../../../../helpers/vue_mount_component_helper'; +import mountComponent from 'spec/helpers/vue_mount_component_helper'; const createComponent = () => { const Component = Vue.extend(dropdownHeaderComponent); diff --git a/spec/javascripts/vue_shared/components/sidebar/labels_select/dropdown_hidden_input_spec.js b/spec/javascripts/vue_shared/components/sidebar/labels_select/dropdown_hidden_input_spec.js index 703b87498c7..88733922a59 100644 --- a/spec/javascripts/vue_shared/components/sidebar/labels_select/dropdown_hidden_input_spec.js +++ b/spec/javascripts/vue_shared/components/sidebar/labels_select/dropdown_hidden_input_spec.js @@ -2,9 +2,9 @@ import Vue from 'vue'; import dropdownHiddenInputComponent from '~/vue_shared/components/sidebar/labels_select/dropdown_hidden_input.vue'; -import { mockLabels } from './mock_data'; +import mountComponent from 'spec/helpers/vue_mount_component_helper'; -import mountComponent from '../../../../helpers/vue_mount_component_helper'; +import { mockLabels } from './mock_data'; const createComponent = (name = 'label_id[]', label = mockLabels[0]) => { const Component = Vue.extend(dropdownHiddenInputComponent); diff --git a/spec/javascripts/vue_shared/components/sidebar/labels_select/dropdown_search_input_spec.js b/spec/javascripts/vue_shared/components/sidebar/labels_select/dropdown_search_input_spec.js index 69e11d966c2..57608d957e7 100644 --- a/spec/javascripts/vue_shared/components/sidebar/labels_select/dropdown_search_input_spec.js +++ b/spec/javascripts/vue_shared/components/sidebar/labels_select/dropdown_search_input_spec.js @@ -2,7 +2,7 @@ import Vue from 'vue'; import dropdownSearchInputComponent from '~/vue_shared/components/sidebar/labels_select/dropdown_search_input.vue'; -import mountComponent from '../../../../helpers/vue_mount_component_helper'; +import mountComponent from 'spec/helpers/vue_mount_component_helper'; const createComponent = () => { const Component = Vue.extend(dropdownSearchInputComponent); diff --git a/spec/javascripts/vue_shared/components/sidebar/labels_select/dropdown_title_spec.js b/spec/javascripts/vue_shared/components/sidebar/labels_select/dropdown_title_spec.js index c3580933072..7c3d2711f65 100644 --- a/spec/javascripts/vue_shared/components/sidebar/labels_select/dropdown_title_spec.js +++ b/spec/javascripts/vue_shared/components/sidebar/labels_select/dropdown_title_spec.js @@ -2,7 +2,7 @@ import Vue from 'vue'; import dropdownTitleComponent from '~/vue_shared/components/sidebar/labels_select/dropdown_title.vue'; -import mountComponent from '../../../../helpers/vue_mount_component_helper'; +import mountComponent from 'spec/helpers/vue_mount_component_helper'; const createComponent = (canEdit = true) => { const Component = Vue.extend(dropdownTitleComponent); diff --git a/spec/javascripts/vue_shared/components/sidebar/labels_select/dropdown_value_collapsed_spec.js b/spec/javascripts/vue_shared/components/sidebar/labels_select/dropdown_value_collapsed_spec.js index 93b42795bea..39040670a87 100644 --- a/spec/javascripts/vue_shared/components/sidebar/labels_select/dropdown_value_collapsed_spec.js +++ b/spec/javascripts/vue_shared/components/sidebar/labels_select/dropdown_value_collapsed_spec.js @@ -2,9 +2,9 @@ import Vue from 'vue'; import dropdownValueCollapsedComponent from '~/vue_shared/components/sidebar/labels_select/dropdown_value_collapsed.vue'; -import { mockLabels } from './mock_data'; +import mountComponent from 'spec/helpers/vue_mount_component_helper'; -import mountComponent from '../../../../helpers/vue_mount_component_helper'; +import { mockLabels } from './mock_data'; const createComponent = (labels = mockLabels) => { const Component = Vue.extend(dropdownValueCollapsedComponent); diff --git a/spec/javascripts/vue_shared/components/sidebar/labels_select/dropdown_value_spec.js b/spec/javascripts/vue_shared/components/sidebar/labels_select/dropdown_value_spec.js index 66e0957b431..4397b00acfa 100644 --- a/spec/javascripts/vue_shared/components/sidebar/labels_select/dropdown_value_spec.js +++ b/spec/javascripts/vue_shared/components/sidebar/labels_select/dropdown_value_spec.js @@ -2,9 +2,9 @@ import Vue from 'vue'; import dropdownValueComponent from '~/vue_shared/components/sidebar/labels_select/dropdown_value.vue'; -import { mockConfig, mockLabels } from './mock_data'; +import mountComponent from 'spec/helpers/vue_mount_component_helper'; -import mountComponent from '../../../../helpers/vue_mount_component_helper'; +import { mockConfig, mockLabels } from './mock_data'; const createComponent = ( labels = mockLabels, diff --git a/spec/javascripts/vue_shared/components/skeleton_loading_container_spec.js b/spec/javascripts/vue_shared/components/skeleton_loading_container_spec.js index bbd50863069..34487885cf0 100644 --- a/spec/javascripts/vue_shared/components/skeleton_loading_container_spec.js +++ b/spec/javascripts/vue_shared/components/skeleton_loading_container_spec.js @@ -14,8 +14,8 @@ describe('Skeleton loading container', () => { vm.$destroy(); }); - it('renders 6 skeleton lines by default', () => { - expect(vm.$el.querySelector('.skeleton-line-6')).not.toBeNull(); + it('renders 3 skeleton lines by default', () => { + expect(vm.$el.querySelector('.skeleton-line-3')).not.toBeNull(); }); it('renders in full mode by default', () => { diff --git a/spec/lib/api/helpers_spec.rb b/spec/lib/api/helpers_spec.rb index 3c4deba4712..58a49124ce6 100644 --- a/spec/lib/api/helpers_spec.rb +++ b/spec/lib/api/helpers_spec.rb @@ -3,6 +3,48 @@ require 'spec_helper' describe API::Helpers do subject { Class.new.include(described_class).new } + describe '#find_project' do + let(:project) { create(:project) } + + shared_examples 'project finder' do + context 'when project exists' do + it 'returns requested project' do + expect(subject.find_project(existing_id)).to eq(project) + end + + it 'returns nil' do + expect(subject.find_project(non_existing_id)).to be_nil + end + end + end + + context 'when ID is used as an argument' do + let(:existing_id) { project.id } + let(:non_existing_id) { (Project.maximum(:id) || 0) + 1 } + + it_behaves_like 'project finder' + end + + context 'when PATH is used as an argument' do + let(:existing_id) { project.full_path } + let(:non_existing_id) { 'something/else' } + + it_behaves_like 'project finder' + + context 'with an invalid PATH' do + let(:non_existing_id) { 'undefined' } # path without slash + + it_behaves_like 'project finder' + + it 'does not hit the database' do + expect(Project).not_to receive(:find_by_full_path) + + subject.find_project(non_existing_id) + end + end + end + end + describe '#find_namespace' do let(:namespace) { create(:namespace) } diff --git a/spec/lib/backup/files_spec.rb b/spec/lib/backup/files_spec.rb new file mode 100644 index 00000000000..14d055cbcc1 --- /dev/null +++ b/spec/lib/backup/files_spec.rb @@ -0,0 +1,66 @@ +require 'spec_helper' + +describe Backup::Files do + let(:progress) { StringIO.new } + let!(:project) { create(:project) } + + before do + allow(progress).to receive(:puts) + allow(progress).to receive(:print) + allow(FileUtils).to receive(:mkdir_p).and_return(true) + allow(FileUtils).to receive(:mv).and_return(true) + allow(File).to receive(:exist?).and_return(true) + allow(File).to receive(:realpath).with("/var/gitlab-registry").and_return("/var/gitlab-registry") + allow(File).to receive(:realpath).with("/var/gitlab-registry/..").and_return("/var") + + allow_any_instance_of(String).to receive(:color) do |string, _color| + string + end + + allow_any_instance_of(described_class).to receive(:progress).and_return(progress) + end + + describe '#restore' do + subject { described_class.new('registry', '/var/gitlab-registry') } + let(:timestamp) { Time.utc(2017, 3, 22) } + + around do |example| + Timecop.freeze(timestamp) { example.run } + end + + describe 'folders with permission' do + before do + allow(subject).to receive(:run_pipeline!).and_return(true) + allow(subject).to receive(:backup_existing_files).and_return(true) + allow(Dir).to receive(:glob).with("/var/gitlab-registry/*", File::FNM_DOTMATCH).and_return(["/var/gitlab-registry/.", "/var/gitlab-registry/..", "/var/gitlab-registry/sample1"]) + end + + it 'moves all necessary files' do + allow(subject).to receive(:backup_existing_files).and_call_original + expect(FileUtils).to receive(:mv).with(["/var/gitlab-registry/sample1"], File.join(Gitlab.config.backup.path, "tmp", "registry.#{Time.now.to_i}")) + subject.restore + end + + it 'raises no errors' do + expect { subject.restore }.not_to raise_error + end + + it 'calls tar command with unlink' do + expect(subject).to receive(:run_pipeline!).with([%w(gzip -cd), %w(tar --unlink-first --recursive-unlink -C /var/gitlab-registry -xf -)], any_args) + subject.restore + end + end + + describe 'folders without permissions' do + before do + allow(FileUtils).to receive(:mv).and_raise(Errno::EACCES) + allow(subject).to receive(:run_pipeline!).and_return(true) + end + + it 'shows error message' do + expect(subject).to receive(:access_denied_error).with("/var/gitlab-registry") + subject.restore + end + end + end +end diff --git a/spec/lib/backup/manager_spec.rb b/spec/lib/backup/manager_spec.rb index 5100f5737c2..84688845fa5 100644 --- a/spec/lib/backup/manager_spec.rb +++ b/spec/lib/backup/manager_spec.rb @@ -278,6 +278,10 @@ describe Backup::Manager do connection.directories.create(key: Gitlab.config.backup.upload.remote_directory) end + after do + Fog.unmock! + end + context 'target path' do it 'uses the tar filename by default' do expect_any_instance_of(Fog::Collection).to receive(:create) diff --git a/spec/lib/backup/repository_spec.rb b/spec/lib/backup/repository_spec.rb index a9b5ed1112a..e4c1c9bafc0 100644 --- a/spec/lib/backup/repository_spec.rb +++ b/spec/lib/backup/repository_spec.rb @@ -7,6 +7,8 @@ describe Backup::Repository do before do allow(progress).to receive(:puts) allow(progress).to receive(:print) + allow(FileUtils).to receive(:mkdir_p).and_return(true) + allow(FileUtils).to receive(:mv).and_return(true) allow_any_instance_of(String).to receive(:color) do |string, _color| string @@ -33,7 +35,7 @@ describe Backup::Repository do let(:timestamp) { Time.utc(2017, 3, 22) } let(:temp_dirs) do Gitlab.config.repositories.storages.map do |name, storage| - File.join(storage['path'], '..', 'repositories.old.' + timestamp.to_i.to_s) + File.join(storage.legacy_disk_path, '..', 'repositories.old.' + timestamp.to_i.to_s) end end @@ -68,6 +70,17 @@ describe Backup::Repository do end end end + + describe 'folders without permissions' do + before do + allow(FileUtils).to receive(:mv).and_raise(Errno::EACCES) + end + + it 'shows error message' do + expect(subject).to receive(:access_denied_error) + subject.restore + end + end end describe '#empty_repo?' do diff --git a/spec/lib/banzai/commit_renderer_spec.rb b/spec/lib/banzai/commit_renderer_spec.rb index e7ebb2a332f..1f53657c59c 100644 --- a/spec/lib/banzai/commit_renderer_spec.rb +++ b/spec/lib/banzai/commit_renderer_spec.rb @@ -6,7 +6,10 @@ describe Banzai::CommitRenderer do user = build(:user) project = create(:project, :repository) - expect(Banzai::ObjectRenderer).to receive(:new).with(project, user).and_call_original + expect(Banzai::ObjectRenderer) + .to receive(:new) + .with(user: user, default_project: project) + .and_call_original described_class::ATTRIBUTES.each do |attr| expect_any_instance_of(Banzai::ObjectRenderer).to receive(:render).with([project.commit], attr).once.and_call_original diff --git a/spec/lib/banzai/cross_project_reference_spec.rb b/spec/lib/banzai/cross_project_reference_spec.rb index 68ca960caab..aadfe7637dd 100644 --- a/spec/lib/banzai/cross_project_reference_spec.rb +++ b/spec/lib/banzai/cross_project_reference_spec.rb @@ -14,6 +14,16 @@ describe Banzai::CrossProjectReference do end end + context 'when no project was referenced in group context' do + it 'returns the group from context' do + group = double + + allow(self).to receive(:context).and_return({ group: group }) + + expect(parent_from_ref(nil)).to eq group + end + end + context 'when referenced project does not exist' do it 'returns nil' do expect(parent_from_ref('invalid/reference')).to be_nil diff --git a/spec/lib/banzai/filter/autolink_filter_spec.rb b/spec/lib/banzai/filter/autolink_filter_spec.rb index b502daea418..a50329473ad 100644 --- a/spec/lib/banzai/filter/autolink_filter_spec.rb +++ b/spec/lib/banzai/filter/autolink_filter_spec.rb @@ -122,14 +122,10 @@ describe Banzai::Filter::AutolinkFilter do end it 'does not include trailing punctuation' do - doc = filter("See #{link}.") - expect(doc.at_css('a').text).to eq link - - doc = filter("See #{link}, ok?") - expect(doc.at_css('a').text).to eq link - - doc = filter("See #{link}...") - expect(doc.at_css('a').text).to eq link + ['.', ', ok?', '...', '?', '!', ': is that ok?'].each do |trailing_punctuation| + doc = filter("See #{link}#{trailing_punctuation}") + expect(doc.at_css('a').text).to eq link + end end it 'includes trailing punctuation when part of a balanced pair' do @@ -171,6 +167,15 @@ describe Banzai::Filter::AutolinkFilter do expect(actual).to eq(expected_complicated_link) end + it 'does not double-encode HTML entities' do + encoded_link = "#{link}?foo=bar&baz=quux" + expected_encoded_link = %Q{<a href="#{encoded_link}">#{encoded_link}</a>} + actual = unescape(filter(encoded_link).to_html) + + expect(actual).to eq(Rinku.auto_link(encoded_link)) + expect(actual).to eq(expected_encoded_link) + end + it 'does not include trailing HTML entities' do doc = filter("See <<<#{link}>>>") diff --git a/spec/lib/banzai/filter/commit_range_reference_filter_spec.rb b/spec/lib/banzai/filter/commit_range_reference_filter_spec.rb index a41a28a56f1..e1af5a15371 100644 --- a/spec/lib/banzai/filter/commit_range_reference_filter_spec.rb +++ b/spec/lib/banzai/filter/commit_range_reference_filter_spec.rb @@ -233,4 +233,20 @@ describe Banzai::Filter::CommitRangeReferenceFilter do expect(reference_filter(act).to_html).to eq exp end end + + context 'group context' do + let(:context) { { project: nil, group: create(:group) } } + + it 'ignores internal references' do + exp = act = "See #{range.to_reference}" + + expect(reference_filter(act, context).to_html).to eq exp + end + + it 'links to a full-path reference' do + reference = "#{project.full_path}@#{commit1.short_id}...#{commit2.short_id}" + + expect(reference_filter("See #{reference}", context).css('a').first.text).to eql(reference) + end + end end diff --git a/spec/lib/banzai/filter/commit_reference_filter_spec.rb b/spec/lib/banzai/filter/commit_reference_filter_spec.rb index 35f8792ff35..d6c9e9e4b19 100644 --- a/spec/lib/banzai/filter/commit_reference_filter_spec.rb +++ b/spec/lib/banzai/filter/commit_reference_filter_spec.rb @@ -207,4 +207,51 @@ describe Banzai::Filter::CommitReferenceFilter do expect(reference_filter(act).to_html).to match(%r{<a.+>#{Regexp.escape(invalidate_reference(reference))}</a>}) end end + + context 'URL reference for a commit patch' do + let(:namespace) { create(:namespace) } + let(:project2) { create(:project, :public, :repository, namespace: namespace) } + let(:commit) { project2.commit } + let(:link) { urls.project_commit_url(project2, commit.id) } + let(:extension) { '.patch' } + let(:reference) { link + extension } + + it 'links to a valid reference' do + doc = reference_filter("See #{reference}") + + expect(doc.css('a').first.attr('href')) + .to eq reference + end + + it 'has valid text' do + doc = reference_filter("See #{reference}") + + expect(doc.text).to eq("See #{commit.reference_link_text(project)} (patch)") + end + + it 'does not link to patch when extension match is after the path' do + invalidate_commit_reference = reference_filter("#{link}/builds.patch") + + doc = reference_filter("See (#{invalidate_commit_reference})") + + expect(doc.css('a').first.attr('href')).to eq "#{link}/builds" + expect(doc.text).to eq("See (#{commit.reference_link_text(project)} (builds).patch)") + end + end + + context 'group context' do + let(:context) { { project: nil, group: create(:group) } } + + it 'ignores internal references' do + exp = act = "See #{commit.id}" + + expect(reference_filter(act, context).to_html).to eq exp + end + + it 'links to a valid reference' do + act = "See #{project.full_path}@#{commit.id}" + + expect(reference_filter(act, context).css('a').first.text).to eql("#{project.full_path}@#{commit.short_id}") + end + end end diff --git a/spec/lib/banzai/filter/commit_trailers_filter_spec.rb b/spec/lib/banzai/filter/commit_trailers_filter_spec.rb new file mode 100644 index 00000000000..1fd145116df --- /dev/null +++ b/spec/lib/banzai/filter/commit_trailers_filter_spec.rb @@ -0,0 +1,171 @@ +require 'spec_helper' +require 'ffaker' + +describe Banzai::Filter::CommitTrailersFilter do + include FilterSpecHelper + include CommitTrailersSpecHelper + + let(:secondary_email) { create(:email, :confirmed) } + let(:user) { create(:user) } + + let(:trailer) { "#{FFaker::Lorem.word}-by:"} + + let(:commit_message) { trailer_line(trailer, user.name, user.email) } + let(:commit_message_html) { commit_html(commit_message) } + + context 'detects' do + let(:email) { FFaker::Internet.email } + + it 'trailers in the form of *-by and replace users with links' do + doc = filter(commit_message_html) + + expect_to_have_user_link_with_avatar(doc, user: user, trailer: trailer) + end + + it 'trailers prefixed with whitespaces' do + message_html = commit_html("\n\r #{commit_message}") + + doc = filter(message_html) + + expect_to_have_user_link_with_avatar(doc, user: user, trailer: trailer) + end + + it 'GitLab users via a secondary email' do + _, message_html = build_commit_message( + trailer: trailer, + name: secondary_email.user.name, + email: secondary_email.email + ) + + doc = filter(message_html) + + expect_to_have_user_link_with_avatar( + doc, + user: secondary_email.user, + trailer: trailer, + email: secondary_email.email + ) + end + + it 'non GitLab users and replaces them with mailto links' do + _, message_html = build_commit_message( + trailer: trailer, + name: FFaker::Name.name, + email: email + ) + + doc = filter(message_html) + + expect_to_have_mailto_link(doc, email: email, trailer: trailer) + end + + it 'multiple trailers in the same message' do + different_trailer = "#{FFaker::Lorem.word}-by:" + message = commit_html %( + #{commit_message} + #{trailer_line(different_trailer, FFaker::Name.name, email)} + ) + + doc = filter(message) + + expect_to_have_user_link_with_avatar(doc, user: user, trailer: trailer) + expect_to_have_mailto_link(doc, email: email, trailer: different_trailer) + end + + context 'special names' do + where(:name) do + [ + 'John S. Doe', + 'L33t H@x0r' + ] + end + + with_them do + it do + message, message_html = build_commit_message( + trailer: trailer, + name: name, + email: email + ) + + doc = filter(message_html) + + expect_to_have_mailto_link(doc, email: email, trailer: trailer) + expect(doc.text).to match Regexp.escape(message) + end + end + end + end + + context "ignores" do + it 'commit messages without trailers' do + exp = message = commit_html(FFaker::Lorem.sentence) + doc = filter(message) + + expect(doc.to_html).to match Regexp.escape(exp) + end + + it 'trailers that are inline the commit message body' do + message = commit_html %( + #{FFaker::Lorem.sentence} #{commit_message} #{FFaker::Lorem.sentence} + ) + + doc = filter(message) + + expect(doc.css('a').size).to eq 0 + end + end + + context "structure" do + it 'preserves the commit trailer structure' do + doc = filter(commit_message_html) + + expect_to_have_user_link_with_avatar(doc, user: user, trailer: trailer) + expect(doc.text).to match Regexp.escape(commit_message) + end + + it 'preserves the original name used in the commit message' do + message, message_html = build_commit_message( + trailer: trailer, + name: FFaker::Name.name, + email: user.email + ) + + doc = filter(message_html) + + expect_to_have_user_link_with_avatar(doc, user: user, trailer: trailer) + expect(doc.text).to match Regexp.escape(message) + end + + it 'preserves the original email used in the commit message' do + message, message_html = build_commit_message( + trailer: trailer, + name: secondary_email.user.name, + email: secondary_email.email + ) + + doc = filter(message_html) + + expect_to_have_user_link_with_avatar( + doc, + user: secondary_email.user, + trailer: trailer, + email: secondary_email.email + ) + expect(doc.text).to match Regexp.escape(message) + end + + it 'only replaces trailer lines not the full commit message' do + commit_body = FFaker::Lorem.paragraph + message = commit_html %( + #{commit_body} + #{commit_message} + ) + + doc = filter(message) + + expect_to_have_user_link_with_avatar(doc, user: user, trailer: trailer) + expect(doc.text).to include(commit_body) + end + end +end diff --git a/spec/lib/banzai/filter/issuable_state_filter_spec.rb b/spec/lib/banzai/filter/issuable_state_filter_spec.rb index 17347768a49..a5373517ac8 100644 --- a/spec/lib/banzai/filter/issuable_state_filter_spec.rb +++ b/spec/lib/banzai/filter/issuable_state_filter_spec.rb @@ -8,6 +8,7 @@ describe Banzai::Filter::IssuableStateFilter do let(:context) { { current_user: user, issuable_state_filter_enabled: true } } let(:closed_issue) { create_issue(:closed) } let(:project) { create(:project, :public) } + let(:group) { create(:group) } let(:other_project) { create(:project, :public) } def create_link(text, data) @@ -77,6 +78,13 @@ describe Banzai::Filter::IssuableStateFilter do expect(doc.css('a').last.text).to eq("#{closed_issue.to_reference(other_project)} (closed)") end + it 'handles references from group scopes' do + link = create_link(closed_issue.to_reference(other_project), issue: closed_issue.id, reference_type: 'issue') + doc = filter(link, context.merge(project: nil, group: group)) + + expect(doc.css('a').last.text).to eq("#{closed_issue.to_reference(other_project)} (closed)") + end + it 'skips cross project references if the user cannot read cross project' do expect(Ability).to receive(:allowed?).with(user, :read_cross_project) { false } link = create_link(closed_issue.to_reference(other_project), issue: closed_issue.id, reference_type: 'issue') diff --git a/spec/lib/banzai/filter/label_reference_filter_spec.rb b/spec/lib/banzai/filter/label_reference_filter_spec.rb index 0c524a1551f..392905076dc 100644 --- a/spec/lib/banzai/filter/label_reference_filter_spec.rb +++ b/spec/lib/banzai/filter/label_reference_filter_spec.rb @@ -596,6 +596,27 @@ describe Banzai::Filter::LabelReferenceFilter do end describe 'group context' do + it 'points to the page defined in label_url_method' do + group = create(:group) + label = create(:group_label, group: group) + reference = "~#{label.name}" + + result = reference_filter("See #{reference}", { project: nil, group: group, label_url_method: :group_url } ) + + expect(result.css('a').first.attr('href')).to eq(urls.group_url(group, label_name: label.name)) + end + + it 'finds labels also in ancestor groups' do + group = create(:group) + label = create(:group_label, group: group) + subgroup = create(:group, parent: group) + reference = "~#{label.name}" + + result = reference_filter("See #{reference}", { project: nil, group: subgroup, label_url_method: :group_url } ) + + expect(result.css('a').first.attr('href')).to eq(urls.group_url(subgroup, label_name: label.name)) + end + it 'points to referenced project issues page' do project = create(:project) label = create(:label, project: project) @@ -604,6 +625,7 @@ describe Banzai::Filter::LabelReferenceFilter do result = reference_filter("See #{reference}", { project: nil, group: create(:group) } ) expect(result.css('a').first.attr('href')).to eq(urls.project_issues_url(project, label_name: label.name)) + expect(result.css('a').first.text).to eq "#{label.name} in #{project.full_name}" end end end diff --git a/spec/lib/banzai/filter/merge_request_reference_filter_spec.rb b/spec/lib/banzai/filter/merge_request_reference_filter_spec.rb index eeb82822f68..a1dd72c498f 100644 --- a/spec/lib/banzai/filter/merge_request_reference_filter_spec.rb +++ b/spec/lib/banzai/filter/merge_request_reference_filter_spec.rb @@ -196,6 +196,41 @@ describe Banzai::Filter::MergeRequestReferenceFilter do end end + context 'URL reference for a commit' do + let(:mr) { create(:merge_request, :with_diffs) } + let(:reference) do + urls.project_merge_request_url(mr.project, mr) + "/diffs?commit_id=#{mr.diff_head_sha}" + end + let(:commit) { mr.commits.find { |commit| commit.sha == mr.diff_head_sha } } + + it 'links to a valid reference' do + doc = reference_filter("See #{reference}") + + expect(doc.css('a').first.attr('href')) + .to eq reference + end + + it 'has valid text' do + doc = reference_filter("See #{reference}") + + expect(doc.text).to eq("See #{mr.to_reference(full: true)} (#{commit.short_id})") + end + + it 'has valid title attribute' do + doc = reference_filter("See #{reference}") + + expect(doc.css('a').first.attr('title')).to eq(commit.title) + end + + it 'ignores invalid commit short_ids on link text' do + invalidate_commit_reference = + urls.project_merge_request_url(mr.project, mr) + "/diffs?commit_id=12345678" + doc = reference_filter("See #{invalidate_commit_reference}") + + expect(doc.text).to eq("See #{mr.to_reference(full: true)} (diffs)") + end + end + context 'cross-project URL reference' do let(:namespace) { create(:namespace, name: 'cross-reference') } let(:project2) { create(:project, :public, namespace: namespace) } diff --git a/spec/lib/banzai/filter/milestone_reference_filter_spec.rb b/spec/lib/banzai/filter/milestone_reference_filter_spec.rb index 6a9087d2e59..f8fa9b2d13d 100644 --- a/spec/lib/banzai/filter/milestone_reference_filter_spec.rb +++ b/spec/lib/banzai/filter/milestone_reference_filter_spec.rb @@ -343,14 +343,22 @@ describe Banzai::Filter::MilestoneReferenceFilter do end context 'group context' do + let(:context) { { project: nil, group: create(:group) } } + let(:milestone) { create(:milestone, project: project) } + it 'links to a valid reference' do - milestone = create(:milestone, project: project) reference = "#{project.full_path}%#{milestone.iid}" - result = reference_filter("See #{reference}", { project: nil, group: create(:group) } ) + result = reference_filter("See #{reference}", context) expect(result.css('a').first.attr('href')).to eq(urls.milestone_url(milestone)) end + + it 'ignores internal references' do + exp = act = "See %#{milestone.iid}" + + expect(reference_filter(act, context).to_html).to eq exp + end end context 'when milestone is open' do diff --git a/spec/lib/banzai/filter/snippet_reference_filter_spec.rb b/spec/lib/banzai/filter/snippet_reference_filter_spec.rb index e068e02d4fc..21cf092428d 100644 --- a/spec/lib/banzai/filter/snippet_reference_filter_spec.rb +++ b/spec/lib/banzai/filter/snippet_reference_filter_spec.rb @@ -210,5 +210,11 @@ describe Banzai::Filter::SnippetReferenceFilter do expect(result.css('a').first.attr('href')).to eq(urls.project_snippet_url(project, snippet)) end + + it 'ignores internal references' do + exp = act = "See $#{snippet.id}" + + expect(reference_filter(act, project: nil, group: create(:group)).to_html).to eq exp + end end end diff --git a/spec/lib/banzai/issuable_extractor_spec.rb b/spec/lib/banzai/issuable_extractor_spec.rb index 69763476dac..f42951d9781 100644 --- a/spec/lib/banzai/issuable_extractor_spec.rb +++ b/spec/lib/banzai/issuable_extractor_spec.rb @@ -3,7 +3,7 @@ require 'spec_helper' describe Banzai::IssuableExtractor do let(:project) { create(:project) } let(:user) { create(:user) } - let(:extractor) { described_class.new(project, user) } + let(:extractor) { described_class.new(Banzai::RenderContext.new(project, user)) } let(:issue) { create(:issue, project: project) } let(:merge_request) { create(:merge_request, source_project: project) } let(:issue_link) do diff --git a/spec/lib/banzai/object_renderer_spec.rb b/spec/lib/banzai/object_renderer_spec.rb index 074d521a5c6..209a547c3b3 100644 --- a/spec/lib/banzai/object_renderer_spec.rb +++ b/spec/lib/banzai/object_renderer_spec.rb @@ -3,8 +3,15 @@ require 'spec_helper' describe Banzai::ObjectRenderer do let(:project) { create(:project, :repository) } let(:user) { project.owner } - let(:renderer) { described_class.new(project, user, custom_value: 'value') } - let(:object) { Note.new(note: 'hello', note_html: '<p dir="auto">hello</p>', cached_markdown_version: CacheMarkdownField::CACHE_VERSION) } + let(:renderer) do + described_class.new( + default_project: project, + user: user, + redaction_context: { custom_value: 'value' } + ) + end + + let(:object) { Note.new(note: 'hello', note_html: '<p dir="auto">hello</p>', cached_markdown_version: CacheMarkdownField::CACHE_COMMONMARK_VERSION) } describe '#render' do context 'with cache' do diff --git a/spec/lib/banzai/redactor_spec.rb b/spec/lib/banzai/redactor_spec.rb index 441f3725985..aaeec953e4b 100644 --- a/spec/lib/banzai/redactor_spec.rb +++ b/spec/lib/banzai/redactor_spec.rb @@ -3,7 +3,7 @@ require 'spec_helper' describe Banzai::Redactor do let(:user) { create(:user) } let(:project) { build(:project) } - let(:redactor) { described_class.new(project, user) } + let(:redactor) { described_class.new(Banzai::RenderContext.new(project, user)) } describe '#redact' do context 'when reference not visible to user' do @@ -54,7 +54,7 @@ describe Banzai::Redactor do context 'when project is in pending delete' do let!(:issue) { create(:issue, project: project) } - let(:redactor) { described_class.new(project, user) } + let(:redactor) { described_class.new(Banzai::RenderContext.new(project, user)) } before do project.update(pending_delete: true) diff --git a/spec/lib/banzai/reference_parser/base_parser_spec.rb b/spec/lib/banzai/reference_parser/base_parser_spec.rb index 6175d4c4ca9..4e6e8eca38a 100644 --- a/spec/lib/banzai/reference_parser/base_parser_spec.rb +++ b/spec/lib/banzai/reference_parser/base_parser_spec.rb @@ -5,13 +5,14 @@ describe Banzai::ReferenceParser::BaseParser do let(:user) { create(:user) } let(:project) { create(:project, :public) } + let(:context) { Banzai::RenderContext.new(project, user) } subject do klass = Class.new(described_class) do self.reference_type = :foo end - klass.new(project, user) + klass.new(context) end describe '.reference_type=' do @@ -23,6 +24,19 @@ describe Banzai::ReferenceParser::BaseParser do end end + describe '#project_for_node' do + it 'returns the Project for a node' do + document = instance_double('document', fragment?: false) + project = instance_double('project') + object = instance_double('object', project: project) + node = instance_double('node', document: document) + + context.associate_document(document, object) + + expect(subject.project_for_node(node)).to eq(project) + end + end + describe '#nodes_visible_to_user' do let(:link) { empty_html_link } @@ -164,7 +178,7 @@ describe Banzai::ReferenceParser::BaseParser do self.reference_type = :test end - instance = dummy.new(project, user) + instance = dummy.new(Banzai::RenderContext.new(project, user)) document = Nokogiri::HTML.fragment('<a class="gfm"></a><a class="gfm" data-reference-type="test"></a>') expect(instance).to receive(:gather_references) diff --git a/spec/lib/banzai/reference_parser/commit_parser_spec.rb b/spec/lib/banzai/reference_parser/commit_parser_spec.rb index 3505659c2c3..cca53a8b9b9 100644 --- a/spec/lib/banzai/reference_parser/commit_parser_spec.rb +++ b/spec/lib/banzai/reference_parser/commit_parser_spec.rb @@ -5,7 +5,7 @@ describe Banzai::ReferenceParser::CommitParser do let(:project) { create(:project, :public) } let(:user) { create(:user) } - subject { described_class.new(project, user) } + subject { described_class.new(Banzai::RenderContext.new(project, user)) } let(:link) { empty_html_link } describe '#nodes_visible_to_user' do diff --git a/spec/lib/banzai/reference_parser/commit_range_parser_spec.rb b/spec/lib/banzai/reference_parser/commit_range_parser_spec.rb index 21813177deb..ff3b82cc482 100644 --- a/spec/lib/banzai/reference_parser/commit_range_parser_spec.rb +++ b/spec/lib/banzai/reference_parser/commit_range_parser_spec.rb @@ -5,7 +5,7 @@ describe Banzai::ReferenceParser::CommitRangeParser do let(:project) { create(:project, :public) } let(:user) { create(:user) } - subject { described_class.new(project, user) } + subject { described_class.new(Banzai::RenderContext.new(project, user)) } let(:link) { empty_html_link } describe '#nodes_visible_to_user' do @@ -107,12 +107,9 @@ describe Banzai::ReferenceParser::CommitRangeParser do describe '#find_object' do let(:range) { double(:range) } - before do - expect(CommitRange).to receive(:new).and_return(range) - end - context 'when the range has valid commits' do it 'returns the commit range' do + expect(CommitRange).to receive(:new).and_return(range) expect(range).to receive(:valid_commits?).and_return(true) expect(subject.find_object(project, '123..456')).to eq(range) @@ -121,10 +118,19 @@ describe Banzai::ReferenceParser::CommitRangeParser do context 'when the range does not have any valid commits' do it 'returns nil' do + expect(CommitRange).to receive(:new).and_return(range) expect(range).to receive(:valid_commits?).and_return(false) expect(subject.find_object(project, '123..456')).to be_nil end end + + context 'group context' do + it 'returns nil' do + group = create(:group) + + expect(subject.find_object(group, '123..456')).to be_nil + end + end end end diff --git a/spec/lib/banzai/reference_parser/external_issue_parser_spec.rb b/spec/lib/banzai/reference_parser/external_issue_parser_spec.rb index 25969b65168..1cb31e57114 100644 --- a/spec/lib/banzai/reference_parser/external_issue_parser_spec.rb +++ b/spec/lib/banzai/reference_parser/external_issue_parser_spec.rb @@ -5,7 +5,7 @@ describe Banzai::ReferenceParser::ExternalIssueParser do let(:project) { create(:project, :public) } let(:user) { create(:user) } - subject { described_class.new(project, user) } + subject { described_class.new(Banzai::RenderContext.new(project, user)) } let(:link) { empty_html_link } describe '#nodes_visible_to_user' do diff --git a/spec/lib/banzai/reference_parser/issue_parser_spec.rb b/spec/lib/banzai/reference_parser/issue_parser_spec.rb index 0a63567ee40..77c2064caba 100644 --- a/spec/lib/banzai/reference_parser/issue_parser_spec.rb +++ b/spec/lib/banzai/reference_parser/issue_parser_spec.rb @@ -7,7 +7,7 @@ describe Banzai::ReferenceParser::IssueParser do let(:user) { create(:user) } let(:issue) { create(:issue, project: project) } let(:link) { empty_html_link } - subject { described_class.new(project, user) } + subject { described_class.new(Banzai::RenderContext.new(project, user)) } describe '#nodes_visible_to_user' do context 'when the link has a data-issue attribute' do @@ -117,4 +117,27 @@ describe Banzai::ReferenceParser::IssueParser do expect(subject.records_for_nodes(nodes)).to eq({ link => issue }) end end + + context 'when checking multiple merge requests on another project' do + let(:other_project) { create(:project, :public) } + let(:other_issue) { create(:issue, project: other_project) } + + let(:control_links) do + [issue_link(other_issue)] + end + + let(:actual_links) do + control_links + [issue_link(create(:issue, project: other_project))] + end + + def issue_link(issue) + Nokogiri::HTML.fragment(%Q{<a data-issue="#{issue.id}"></a>}).children[0] + end + + before do + project.add_developer(user) + end + + it_behaves_like 'no N+1 queries' + end end diff --git a/spec/lib/banzai/reference_parser/label_parser_spec.rb b/spec/lib/banzai/reference_parser/label_parser_spec.rb index b700161d6c2..e4df2533821 100644 --- a/spec/lib/banzai/reference_parser/label_parser_spec.rb +++ b/spec/lib/banzai/reference_parser/label_parser_spec.rb @@ -6,7 +6,7 @@ describe Banzai::ReferenceParser::LabelParser do let(:project) { create(:project, :public) } let(:user) { create(:user) } let(:label) { create(:label, project: project) } - subject { described_class.new(project, user) } + subject { described_class.new(Banzai::RenderContext.new(project, user)) } let(:link) { empty_html_link } describe '#nodes_visible_to_user' do diff --git a/spec/lib/banzai/reference_parser/merge_request_parser_spec.rb b/spec/lib/banzai/reference_parser/merge_request_parser_spec.rb index 775749ae3a7..5417b1f00be 100644 --- a/spec/lib/banzai/reference_parser/merge_request_parser_spec.rb +++ b/spec/lib/banzai/reference_parser/merge_request_parser_spec.rb @@ -4,14 +4,13 @@ describe Banzai::ReferenceParser::MergeRequestParser do include ReferenceParserHelpers let(:user) { create(:user) } - let(:merge_request) { create(:merge_request) } - subject { described_class.new(merge_request.target_project, user) } + let(:project) { create(:project, :public) } + let(:merge_request) { create(:merge_request, source_project: project) } + subject { described_class.new(Banzai::RenderContext.new(merge_request.target_project, user)) } let(:link) { empty_html_link } describe '#nodes_visible_to_user' do context 'when the link has a data-issue attribute' do - let(:project) { merge_request.target_project } - before do project.update_attribute(:visibility_level, Gitlab::VisibilityLevel::PUBLIC) link['data-merge-request'] = merge_request.id.to_s @@ -40,4 +39,27 @@ describe Banzai::ReferenceParser::MergeRequestParser do end end end + + context 'when checking multiple merge requests on another project' do + let(:other_project) { create(:project, :public) } + let(:other_merge_request) { create(:merge_request, source_project: other_project) } + + let(:control_links) do + [merge_request_link(other_merge_request)] + end + + let(:actual_links) do + control_links + [merge_request_link(create(:merge_request, :conflict, source_project: other_project))] + end + + def merge_request_link(merge_request) + Nokogiri::HTML.fragment(%Q{<a data-merge-request="#{merge_request.id}"></a>}).children[0] + end + + before do + project.add_developer(user) + end + + it_behaves_like 'no N+1 queries' + end end diff --git a/spec/lib/banzai/reference_parser/milestone_parser_spec.rb b/spec/lib/banzai/reference_parser/milestone_parser_spec.rb index 7dacdf8d629..751d042ffde 100644 --- a/spec/lib/banzai/reference_parser/milestone_parser_spec.rb +++ b/spec/lib/banzai/reference_parser/milestone_parser_spec.rb @@ -6,7 +6,7 @@ describe Banzai::ReferenceParser::MilestoneParser do let(:project) { create(:project, :public) } let(:user) { create(:user) } let(:milestone) { create(:milestone, project: project) } - subject { described_class.new(project, user) } + subject { described_class.new(Banzai::RenderContext.new(project, user)) } let(:link) { empty_html_link } describe '#nodes_visible_to_user' do diff --git a/spec/lib/banzai/reference_parser/snippet_parser_spec.rb b/spec/lib/banzai/reference_parser/snippet_parser_spec.rb index 69ec3f66aa8..d410bd4c164 100644 --- a/spec/lib/banzai/reference_parser/snippet_parser_spec.rb +++ b/spec/lib/banzai/reference_parser/snippet_parser_spec.rb @@ -9,7 +9,7 @@ describe Banzai::ReferenceParser::SnippetParser do let(:external_user) { create(:user, :external) } let(:project_member) { create(:user) } - subject { described_class.new(project, user) } + subject { described_class.new(Banzai::RenderContext.new(project, user)) } let(:link) { empty_html_link } def visible_references(snippet_visibility, user = nil) diff --git a/spec/lib/banzai/reference_parser/user_parser_spec.rb b/spec/lib/banzai/reference_parser/user_parser_spec.rb index b079a3be029..112447f098e 100644 --- a/spec/lib/banzai/reference_parser/user_parser_spec.rb +++ b/spec/lib/banzai/reference_parser/user_parser_spec.rb @@ -6,7 +6,7 @@ describe Banzai::ReferenceParser::UserParser do let(:group) { create(:group) } let(:user) { create(:user) } let(:project) { create(:project, :public, group: group, creator: user) } - subject { described_class.new(project, user) } + subject { described_class.new(Banzai::RenderContext.new(project, user)) } let(:link) { empty_html_link } describe '#referenced_by' do diff --git a/spec/lib/banzai/render_context_spec.rb b/spec/lib/banzai/render_context_spec.rb new file mode 100644 index 00000000000..ad17db11613 --- /dev/null +++ b/spec/lib/banzai/render_context_spec.rb @@ -0,0 +1,37 @@ +# frozen_string_literal: true + +require 'spec_helper' + +describe Banzai::RenderContext do + let(:document) { Nokogiri::HTML.fragment('<p>hello</p>') } + + describe '#project_for_node' do + it 'returns the default project if no associated project was found' do + project = instance_double('project') + context = described_class.new(project) + + expect(context.project_for_node(document)).to eq(project) + end + + it 'returns the associated project if one was associated explicitly' do + project = instance_double('project') + obj = instance_double('object', project: project) + context = described_class.new + + context.associate_document(document, obj) + + expect(context.project_for_node(document)).to eq(project) + end + + it 'returns the project associated with a DocumentFragment when using a node' do + project = instance_double('project') + obj = instance_double('object', project: project) + context = described_class.new + node = document.children.first + + context.associate_document(document, obj) + + expect(context.project_for_node(node)).to eq(project) + end + end +end diff --git a/spec/lib/forever_spec.rb b/spec/lib/forever_spec.rb new file mode 100644 index 00000000000..cf40c467c72 --- /dev/null +++ b/spec/lib/forever_spec.rb @@ -0,0 +1,21 @@ +require 'spec_helper' + +describe Forever do + describe '.date' do + subject { described_class.date } + + context 'when using PostgreSQL' do + it 'should return Postgresql future date' do + allow(Gitlab::Database).to receive(:postgresql?).and_return(true) + expect(subject).to eq(described_class::POSTGRESQL_DATE) + end + end + + context 'when using MySQL' do + it 'should return MySQL future date' do + allow(Gitlab::Database).to receive(:postgresql?).and_return(false) + expect(subject).to eq(described_class::MYSQL_DATE) + end + end + end +end diff --git a/spec/lib/gitlab/asciidoc_spec.rb b/spec/lib/gitlab/asciidoc_spec.rb index 2a0e19ae796..e1782cff81a 100644 --- a/spec/lib/gitlab/asciidoc_spec.rb +++ b/spec/lib/gitlab/asciidoc_spec.rb @@ -48,7 +48,7 @@ module Gitlab }, 'images' => { input: 'image:https://localhost.com/image.png[Alt text" onerror="alert(7)]', - output: "<img src=\"https://localhost.com/image.png\" alt=\"Alt text\">" + output: "<div>\n<p><span><img src=\"https://localhost.com/image.png\" alt='Alt text\" onerror=\"alert(7)'></span></p>\n</div>" }, 'pre' => { input: '```mypre"><script>alert(3)</script>', diff --git a/spec/lib/gitlab/auth/ldap/access_spec.rb b/spec/lib/gitlab/auth/ldap/access_spec.rb index 9b3916bf9e3..6b251d824f7 100644 --- a/spec/lib/gitlab/auth/ldap/access_spec.rb +++ b/spec/lib/gitlab/auth/ldap/access_spec.rb @@ -1,6 +1,8 @@ require 'spec_helper' describe Gitlab::Auth::LDAP::Access do + include LdapHelpers + let(:access) { described_class.new user } let(:user) { create(:omniauth_user) } @@ -32,8 +34,10 @@ describe Gitlab::Auth::LDAP::Access do end context 'when the user is found' do + let(:ldap_user) { Gitlab::Auth::LDAP::Person.new(Net::LDAP::Entry.new, 'ldapmain') } + before do - allow(Gitlab::Auth::LDAP::Person).to receive(:find_by_dn).and_return(:ldap_user) + allow(Gitlab::Auth::LDAP::Person).to receive(:find_by_dn).and_return(ldap_user) end context 'and the user is disabled via active directory' do @@ -120,6 +124,22 @@ describe Gitlab::Auth::LDAP::Access do end end end + + context 'when the connection fails' do + before do + raise_ldap_connection_error + end + + it 'does not block the user' do + access.allowed? + + expect(user.ldap_blocked?).to be_falsey + end + + it 'denies access' do + expect(access.allowed?).to be_falsey + end + end end describe '#block_user' do diff --git a/spec/lib/gitlab/auth/ldap/adapter_spec.rb b/spec/lib/gitlab/auth/ldap/adapter_spec.rb index 10c60d792bd..3eeaf3862f6 100644 --- a/spec/lib/gitlab/auth/ldap/adapter_spec.rb +++ b/spec/lib/gitlab/auth/ldap/adapter_spec.rb @@ -124,16 +124,36 @@ describe Gitlab::Auth::LDAP::Adapter do context "when the search raises an LDAP exception" do before do + allow(adapter).to receive(:renew_connection_adapter).and_return(ldap) allow(ldap).to receive(:search) { raise Net::LDAP::Error, "some error" } allow(Rails.logger).to receive(:warn) end - it { is_expected.to eq [] } + context 'retries the operation' do + before do + stub_const("#{described_class}::MAX_SEARCH_RETRIES", 3) + end + + it 'as many times as MAX_SEARCH_RETRIES' do + expect(ldap).to receive(:search).exactly(3).times + expect { subject }.to raise_error(Gitlab::Auth::LDAP::LDAPConnectionError) + end + + context 'when no more retries' do + before do + stub_const("#{described_class}::MAX_SEARCH_RETRIES", 1) + end - it 'logs the error' do - subject - expect(Rails.logger).to have_received(:warn).with( - "LDAP search raised exception Net::LDAP::Error: some error") + it 'raises the exception' do + expect { subject }.to raise_error(Gitlab::Auth::LDAP::LDAPConnectionError) + end + + it 'logs the error' do + expect { subject }.to raise_error(Gitlab::Auth::LDAP::LDAPConnectionError) + expect(Rails.logger).to have_received(:warn).with( + "LDAP search raised exception Net::LDAP::Error: some error") + end + end end end end diff --git a/spec/lib/gitlab/auth/o_auth/user_spec.rb b/spec/lib/gitlab/auth/o_auth/user_spec.rb index 0c71f1d8ca6..64f3d09a25b 100644 --- a/spec/lib/gitlab/auth/o_auth/user_spec.rb +++ b/spec/lib/gitlab/auth/o_auth/user_spec.rb @@ -1,6 +1,8 @@ require 'spec_helper' describe Gitlab::Auth::OAuth::User do + include LdapHelpers + let(:oauth_user) { described_class.new(auth_hash) } let(:gl_user) { oauth_user.gl_user } let(:uid) { 'my-uid' } @@ -38,10 +40,6 @@ describe Gitlab::Auth::OAuth::User do end describe '#save' do - def stub_ldap_config(messages) - allow(Gitlab::Auth::LDAP::Config).to receive_messages(messages) - end - let(:provider) { 'twitter' } describe 'when account exists on server' do @@ -269,20 +267,47 @@ describe Gitlab::Auth::OAuth::User do end context 'when an LDAP person is not found by uid' do - it 'tries to find an LDAP person by DN and adds the omniauth identity to the user' do + it 'tries to find an LDAP person by email and adds the omniauth identity to the user' do allow(Gitlab::Auth::LDAP::Person).to receive(:find_by_uid).and_return(nil) - allow(Gitlab::Auth::LDAP::Person).to receive(:find_by_dn).and_return(ldap_user) + allow(Gitlab::Auth::LDAP::Person).to receive(:find_by_email).and_return(ldap_user) + + oauth_user.save + + identities_as_hash = gl_user.identities.map { |id| { provider: id.provider, extern_uid: id.extern_uid } } + expect(identities_as_hash).to match_array(result_identities(dn, uid)) + end + + context 'when also not found by email' do + it 'tries to find an LDAP person by DN and adds the omniauth identity to the user' do + allow(Gitlab::Auth::LDAP::Person).to receive(:find_by_uid).and_return(nil) + allow(Gitlab::Auth::LDAP::Person).to receive(:find_by_email).and_return(nil) + allow(Gitlab::Auth::LDAP::Person).to receive(:find_by_dn).and_return(ldap_user) + + oauth_user.save + + identities_as_hash = gl_user.identities.map { |id| { provider: id.provider, extern_uid: id.extern_uid } } + expect(identities_as_hash).to match_array(result_identities(dn, uid)) + end + end + end + def result_identities(dn, uid) + [ + { provider: 'ldapmain', extern_uid: dn }, + { provider: 'twitter', extern_uid: uid } + ] + end + + context 'when there is an LDAP connection error' do + before do + raise_ldap_connection_error + end + + it 'does not save the identity' do oauth_user.save identities_as_hash = gl_user.identities.map { |id| { provider: id.provider, extern_uid: id.extern_uid } } - expect(identities_as_hash) - .to match_array( - [ - { provider: 'ldapmain', extern_uid: dn }, - { provider: 'twitter', extern_uid: uid } - ] - ) + expect(identities_as_hash).to match_array([{ provider: 'twitter', extern_uid: uid }]) end end end @@ -739,4 +764,19 @@ describe Gitlab::Auth::OAuth::User do expect(oauth_user.find_user).to eql gl_user end end + + describe '#find_ldap_person' do + context 'when LDAP connection fails' do + before do + raise_ldap_connection_error + end + + it 'returns nil' do + adapter = Gitlab::Auth::LDAP::Adapter.new('ldapmain') + hash = OmniAuth::AuthHash.new(uid: 'whatever', provider: 'ldapmain') + + expect(oauth_user.send(:find_ldap_person, hash, adapter)).to be_nil + end + end + end end diff --git a/spec/lib/gitlab/auth_spec.rb b/spec/lib/gitlab/auth_spec.rb index f969f9e8e38..9ccd0b206cc 100644 --- a/spec/lib/gitlab/auth_spec.rb +++ b/spec/lib/gitlab/auth_spec.rb @@ -5,7 +5,7 @@ describe Gitlab::Auth do describe 'constants' do it 'API_SCOPES contains all scopes for API access' do - expect(subject::API_SCOPES).to eq %i[api read_user sudo] + expect(subject::API_SCOPES).to eq %i[api read_user sudo read_repository] end it 'OPENID_SCOPES contains all scopes for OpenID Connect' do @@ -19,7 +19,7 @@ describe Gitlab::Auth do it 'optional_scopes contains all non-default scopes' do stub_container_registry_config(enabled: true) - expect(subject.optional_scopes).to eq %i[read_user sudo read_registry openid] + expect(subject.optional_scopes).to eq %i[read_user sudo read_repository read_registry openid] end context 'registry_scopes' do @@ -231,7 +231,7 @@ describe Gitlab::Auth do .to eq(Gitlab::Auth::Result.new(user, nil, :gitlab_or_ldap, full_authentication_abilities)) end - it 'falls through oauth authentication when the username is oauth2' do + it 'fails through oauth authentication when the username is oauth2' do user = create( :user, username: 'oauth2', @@ -255,6 +255,122 @@ describe Gitlab::Auth do expect { gl_auth.find_for_git_client('foo', 'bar', project: nil, ip: 'ip') }.to raise_error(Gitlab::Auth::MissingPersonalAccessTokenError) end + + context 'while using deploy tokens' do + let(:project) { create(:project) } + let(:auth_failure) { Gitlab::Auth::Result.new(nil, nil) } + + context 'when the deploy token has read_repository as scope' do + let(:deploy_token) { create(:deploy_token, read_registry: false, projects: [project]) } + let(:login) { deploy_token.username } + + it 'succeeds when login and token are valid' do + auth_success = Gitlab::Auth::Result.new(deploy_token, project, :deploy_token, [:download_code]) + + expect(gl_auth).to receive(:rate_limit!).with('ip', success: true, login: login) + expect(gl_auth.find_for_git_client(login, deploy_token.token, project: project, ip: 'ip')) + .to eq(auth_success) + end + + it 'fails when login is not valid' do + expect(gl_auth).to receive(:rate_limit!).with('ip', success: false, login: 'random_login') + expect(gl_auth.find_for_git_client('random_login', deploy_token.token, project: project, ip: 'ip')) + .to eq(auth_failure) + end + + it 'fails when token is not valid' do + expect(gl_auth).to receive(:rate_limit!).with('ip', success: false, login: login) + expect(gl_auth.find_for_git_client(login, '123123', project: project, ip: 'ip')) + .to eq(auth_failure) + end + + it 'fails if token is nil' do + expect(gl_auth).to receive(:rate_limit!).with('ip', success: false, login: login) + expect(gl_auth.find_for_git_client(login, nil, project: project, ip: 'ip')) + .to eq(auth_failure) + end + + it 'fails if token is not related to project' do + another_deploy_token = create(:deploy_token) + expect(gl_auth).to receive(:rate_limit!).with('ip', success: false, login: login) + expect(gl_auth.find_for_git_client(login, another_deploy_token.token, project: project, ip: 'ip')) + .to eq(auth_failure) + end + + it 'fails if token has been revoked' do + deploy_token.revoke! + + expect(deploy_token.revoked?).to be_truthy + expect(gl_auth).to receive(:rate_limit!).with('ip', success: false, login: 'deploy-token') + expect(gl_auth.find_for_git_client('deploy-token', deploy_token.token, project: project, ip: 'ip')) + .to eq(auth_failure) + end + end + + context 'when the deploy token has read_registry as a scope' do + let(:deploy_token) { create(:deploy_token, read_repository: false, projects: [project]) } + let(:login) { deploy_token.username } + + context 'when registry enabled' do + before do + stub_container_registry_config(enabled: true) + end + + it 'succeeds when login and token are valid' do + auth_success = Gitlab::Auth::Result.new(deploy_token, project, :deploy_token, [:read_container_image]) + + expect(gl_auth).to receive(:rate_limit!).with('ip', success: true, login: login) + expect(gl_auth.find_for_git_client(login, deploy_token.token, project: nil, ip: 'ip')) + .to eq(auth_success) + end + + it 'fails when login is not valid' do + expect(gl_auth).to receive(:rate_limit!).with('ip', success: false, login: 'random_login') + expect(gl_auth.find_for_git_client('random_login', deploy_token.token, project: project, ip: 'ip')) + .to eq(auth_failure) + end + + it 'fails when token is not valid' do + expect(gl_auth).to receive(:rate_limit!).with('ip', success: false, login: login) + expect(gl_auth.find_for_git_client(login, '123123', project: project, ip: 'ip')) + .to eq(auth_failure) + end + + it 'fails if token is nil' do + expect(gl_auth).to receive(:rate_limit!).with('ip', success: false, login: login) + expect(gl_auth.find_for_git_client(login, nil, project: nil, ip: 'ip')) + .to eq(auth_failure) + end + + it 'fails if token is not related to project' do + expect(gl_auth).to receive(:rate_limit!).with('ip', success: false, login: login) + expect(gl_auth.find_for_git_client(login, 'abcdef', project: nil, ip: 'ip')) + .to eq(auth_failure) + end + + it 'fails if token has been revoked' do + deploy_token.revoke! + + expect(deploy_token.revoked?).to be_truthy + expect(gl_auth).to receive(:rate_limit!).with('ip', success: false, login: 'deploy-token') + expect(gl_auth.find_for_git_client('deploy-token', deploy_token.token, project: nil, ip: 'ip')) + .to eq(auth_failure) + end + end + + context 'when registry disabled' do + before do + stub_container_registry_config(enabled: false) + end + + it 'fails when login and token are valid' do + expect(gl_auth).to receive(:rate_limit!).with('ip', success: false, login: login) + expect(gl_auth.find_for_git_client(login, deploy_token.token, project: nil, ip: 'ip')) + .to eq(auth_failure) + end + end + end + end end describe 'find_with_user_password' do @@ -315,13 +431,19 @@ describe Gitlab::Auth do it "tries to autheticate with db before ldap" do expect(Gitlab::Auth::LDAP::Authentication).not_to receive(:login) - gl_auth.find_with_user_password(username, password) + expect(gl_auth.find_with_user_password(username, password)).to eq(user) + end + + it "does not find user by using ldap as fallback to for authentication" do + expect(Gitlab::Auth::LDAP::Authentication).to receive(:login).and_return(nil) + + expect(gl_auth.find_with_user_password('ldap_user', 'password')).to be_nil end - it "uses ldap as fallback to for authentication" do - expect(Gitlab::Auth::LDAP::Authentication).to receive(:login) + it "find new user by using ldap as fallback to for authentication" do + expect(Gitlab::Auth::LDAP::Authentication).to receive(:login).and_return(user) - gl_auth.find_with_user_password('ldap_user', 'password') + expect(gl_auth.find_with_user_password('ldap_user', 'password')).to eq(user) end end diff --git a/spec/lib/gitlab/background_migration/migrate_build_stage_spec.rb b/spec/lib/gitlab/background_migration/migrate_build_stage_spec.rb index e112e9e9e3d..5ce84c61042 100644 --- a/spec/lib/gitlab/background_migration/migrate_build_stage_spec.rb +++ b/spec/lib/gitlab/background_migration/migrate_build_stage_spec.rb @@ -51,4 +51,20 @@ describe Gitlab::BackgroundMigration::MigrateBuildStage, :migration, schema: 201 expect { described_class.new.perform(1, 6) } .to raise_error ActiveRecord::RecordNotUnique end + + context 'when invalid class can be loaded due to single table inheritance' do + let(:commit_status) do + jobs.create!(id: 7, commit_id: 1, project_id: 123, stage_idx: 4, + stage: 'post-deploy', status: :failed) + end + + before do + commit_status.update_column(:type, 'SomeClass') + end + + it 'does ignore single table inheritance type' do + expect { described_class.new.perform(1, 7) }.not_to raise_error + expect(jobs.find(7)).to have_attributes(stage_id: (a_value > 0)) + end + end end diff --git a/spec/lib/gitlab/background_migration/set_confidential_note_events_on_services_spec.rb b/spec/lib/gitlab/background_migration/set_confidential_note_events_on_services_spec.rb new file mode 100644 index 00000000000..6f3fb994f17 --- /dev/null +++ b/spec/lib/gitlab/background_migration/set_confidential_note_events_on_services_spec.rb @@ -0,0 +1,31 @@ +require 'spec_helper' + +describe Gitlab::BackgroundMigration::SetConfidentialNoteEventsOnServices, :migration, schema: 20180122154930 do + let(:services) { table(:services) } + + describe '#perform' do + it 'migrates services where note_events is true' do + service = services.create(confidential_note_events: nil, note_events: true) + + subject.perform(service.id, service.id) + + expect(service.reload.confidential_note_events).to eq(true) + end + + it 'ignores services where note_events is false' do + service = services.create(confidential_note_events: nil, note_events: false) + + subject.perform(service.id, service.id) + + expect(service.reload.confidential_note_events).to eq(nil) + end + + it 'ignores services where confidential_note_events has already been set' do + service = services.create(confidential_note_events: false, note_events: true) + + subject.perform(service.id, service.id) + + expect(service.reload.confidential_note_events).to eq(false) + end + end +end diff --git a/spec/lib/gitlab/background_migration/set_confidential_note_events_on_webhooks_spec.rb b/spec/lib/gitlab/background_migration/set_confidential_note_events_on_webhooks_spec.rb new file mode 100644 index 00000000000..82b484b7d5b --- /dev/null +++ b/spec/lib/gitlab/background_migration/set_confidential_note_events_on_webhooks_spec.rb @@ -0,0 +1,31 @@ +require 'spec_helper' + +describe Gitlab::BackgroundMigration::SetConfidentialNoteEventsOnWebhooks, :migration, schema: 20180104131052 do + let(:web_hooks) { table(:web_hooks) } + + describe '#perform' do + it 'migrates hooks where note_events is true' do + hook = web_hooks.create(confidential_note_events: nil, note_events: true) + + subject.perform(hook.id, hook.id) + + expect(hook.reload.confidential_note_events).to eq(true) + end + + it 'ignores hooks where note_events is false' do + hook = web_hooks.create(confidential_note_events: nil, note_events: false) + + subject.perform(hook.id, hook.id) + + expect(hook.reload.confidential_note_events).to eq(nil) + end + + it 'ignores hooks where confidential_note_events has already been set' do + hook = web_hooks.create(confidential_note_events: false, note_events: true) + + subject.perform(hook.id, hook.id) + + expect(hook.reload.confidential_note_events).to eq(false) + end + end +end diff --git a/spec/lib/gitlab/bare_repository_import/repository_spec.rb b/spec/lib/gitlab/bare_repository_import/repository_spec.rb index 5cb1f4deb5f..0dc3705825d 100644 --- a/spec/lib/gitlab/bare_repository_import/repository_spec.rb +++ b/spec/lib/gitlab/bare_repository_import/repository_spec.rb @@ -54,7 +54,7 @@ describe ::Gitlab::BareRepositoryImport::Repository do context 'hashed storage' do let(:gitlab_shell) { Gitlab::Shell.new } let(:repository_storage) { 'default' } - let(:root_path) { Gitlab.config.repositories.storages[repository_storage]['path'] } + let(:root_path) { Gitlab.config.repositories.storages[repository_storage].legacy_disk_path } let(:hash) { '6b86b273ff34fce19d6b804eff5a3f5747ada4eaa22f1d49c01e52ddb7875b4b' } let(:hashed_path) { "@hashed/6b/86/6b86b273ff34fce19d6b804eff5a3f5747ada4eaa22f1d49c01e52ddb7875b4b" } let(:repo_path) { File.join(root_path, "#{hashed_path}.git") } diff --git a/spec/lib/gitlab/bitbucket_import/importer_spec.rb b/spec/lib/gitlab/bitbucket_import/importer_spec.rb index a6a1d9e619f..c63120b0b29 100644 --- a/spec/lib/gitlab/bitbucket_import/importer_spec.rb +++ b/spec/lib/gitlab/bitbucket_import/importer_spec.rb @@ -137,7 +137,7 @@ describe Gitlab::BitbucketImport::Importer do it 'imports to the project disk_path' do expect(project.wiki).to receive(:repository_exists?) { false } expect(importer.gitlab_shell).to receive(:import_repository).with( - project.repository_storage_path, + project.repository_storage, project.wiki.disk_path, project.import_url + '/wiki' ) diff --git a/spec/lib/gitlab/cache/ci/project_pipeline_status_spec.rb b/spec/lib/gitlab/cache/ci/project_pipeline_status_spec.rb index 16704ff5e77..18658588a40 100644 --- a/spec/lib/gitlab/cache/ci/project_pipeline_status_spec.rb +++ b/spec/lib/gitlab/cache/ci/project_pipeline_status_spec.rb @@ -3,7 +3,7 @@ require 'spec_helper' describe Gitlab::Cache::Ci::ProjectPipelineStatus, :clean_gitlab_redis_cache do let!(:project) { create(:project, :repository) } let(:pipeline_status) { described_class.new(project) } - let(:cache_key) { "projects/#{project.id}/pipeline_status" } + let(:cache_key) { described_class.cache_key_for_project(project) } describe '.load_for_project' do it "loads the status" do diff --git a/spec/lib/gitlab/checks/project_moved_spec.rb b/spec/lib/gitlab/checks/project_moved_spec.rb index e263d29656c..8e9386b1ba1 100644 --- a/spec/lib/gitlab/checks/project_moved_spec.rb +++ b/spec/lib/gitlab/checks/project_moved_spec.rb @@ -44,44 +44,17 @@ describe Gitlab::Checks::ProjectMoved, :clean_gitlab_redis_shared_state do end describe '#message' do - context 'when the push is rejected' do - it 'returns a redirect message telling the user to try again' do - project_moved = described_class.new(project, user, 'http', 'foo/bar') - message = "Project 'foo/bar' was moved to '#{project.full_path}'." + - "\n\nPlease update your Git remote:" + - "\n\n git remote set-url origin #{project.http_url_to_repo} and try again.\n" + it 'returns a redirect message' do + project_moved = described_class.new(project, user, 'http', 'foo/bar') + message = <<~MSG + Project 'foo/bar' was moved to '#{project.full_path}'. - expect(project_moved.message(rejected: true)).to eq(message) - end - end + Please update your Git remote: - context 'when the push is not rejected' do - it 'returns a redirect message' do - project_moved = described_class.new(project, user, 'http', 'foo/bar') - message = "Project 'foo/bar' was moved to '#{project.full_path}'." + - "\n\nPlease update your Git remote:" + - "\n\n git remote set-url origin #{project.http_url_to_repo}\n" + git remote set-url origin #{project.http_url_to_repo} + MSG - expect(project_moved.message).to eq(message) - end - end - end - - describe '#permanent_redirect?' do - context 'with a permanent RedirectRoute' do - it 'returns true' do - project.route.create_redirect('foo/bar', permanent: true) - project_moved = described_class.new(project, user, 'http', 'foo/bar') - expect(project_moved.permanent_redirect?).to be_truthy - end - end - - context 'without a permanent RedirectRoute' do - it 'returns false' do - project.route.create_redirect('foo/bar') - project_moved = described_class.new(project, user, 'http', 'foo/bar') - expect(project_moved.permanent_redirect?).to be_falsy - end + expect(project_moved.message).to eq(message) end end end diff --git a/spec/lib/gitlab/ci/build/policy/variables_spec.rb b/spec/lib/gitlab/ci/build/policy/variables_spec.rb new file mode 100644 index 00000000000..2ce858836e3 --- /dev/null +++ b/spec/lib/gitlab/ci/build/policy/variables_spec.rb @@ -0,0 +1,72 @@ +require 'spec_helper' + +describe Gitlab::Ci::Build::Policy::Variables do + set(:project) { create(:project) } + + let(:pipeline) do + build(:ci_empty_pipeline, project: project, ref: 'master', source: :push) + end + + let(:ci_build) do + build(:ci_build, pipeline: pipeline, project: project, ref: 'master') + end + + let(:seed) { double('build seed', to_resource: ci_build) } + + before do + pipeline.variables.build(key: 'CI_PROJECT_NAME', value: '') + end + + describe '#satisfied_by?' do + it 'is satisfied by at least one matching statement' do + policy = described_class.new(['$CI_PROJECT_ID', '$UNDEFINED']) + + expect(policy).to be_satisfied_by(pipeline, seed) + end + + it 'is not satisfied by an overriden empty variable' do + policy = described_class.new(['$CI_PROJECT_NAME']) + + expect(policy).not_to be_satisfied_by(pipeline, seed) + end + + it 'is satisfied by a truthy pipeline expression' do + policy = described_class.new([%($CI_PIPELINE_SOURCE == "push")]) + + expect(policy).to be_satisfied_by(pipeline, seed) + end + + it 'is not satisfied by a falsy pipeline expression' do + policy = described_class.new([%($CI_PIPELINE_SOURCE == "invalid source")]) + + expect(policy).not_to be_satisfied_by(pipeline, seed) + end + + it 'is satisfied by a truthy expression using undefined variable' do + policy = described_class.new(['$UNDEFINED == null']) + + expect(policy).to be_satisfied_by(pipeline, seed) + end + + it 'is not satisfied by a falsy expression using undefined variable' do + policy = described_class.new(['$UNDEFINED']) + + expect(policy).not_to be_satisfied_by(pipeline, seed) + end + + it 'allows to evaluate regular secret variables' do + create(:ci_variable, project: project, key: 'SECRET', value: 'my secret') + + policy = described_class.new(["$SECRET == 'my secret'"]) + + expect(policy).to be_satisfied_by(pipeline, seed) + end + + it 'does not persist neither pipeline nor build' do + described_class.new('$VAR').satisfied_by?(pipeline, seed) + + expect(pipeline).not_to be_persisted + expect(seed.to_resource).not_to be_persisted + end + end +end diff --git a/spec/lib/gitlab/ci/build/step_spec.rb b/spec/lib/gitlab/ci/build/step_spec.rb index 5a21282712a..cce4efaa069 100644 --- a/spec/lib/gitlab/ci/build/step_spec.rb +++ b/spec/lib/gitlab/ci/build/step_spec.rb @@ -5,10 +5,14 @@ describe Gitlab::Ci::Build::Step do shared_examples 'has correct script' do subject { described_class.from_commands(job) } + before do + job.run! + end + it 'fabricates an object' do expect(subject.name).to eq(:script) expect(subject.script).to eq(script) - expect(subject.timeout).to eq(job.timeout) + expect(subject.timeout).to eq(job.metadata_timeout) expect(subject.when).to eq('on_success') expect(subject.allow_failure).to be_falsey end @@ -47,6 +51,10 @@ describe Gitlab::Ci::Build::Step do subject { described_class.from_after_script(job) } + before do + job.run! + end + context 'when after_script is empty' do it 'doesn not fabricate an object' do is_expected.to be_nil @@ -59,7 +67,7 @@ describe Gitlab::Ci::Build::Step do it 'fabricates an object' do expect(subject.name).to eq(:after_script) expect(subject.script).to eq(['ls -la', 'date']) - expect(subject.timeout).to eq(job.timeout) + expect(subject.timeout).to eq(job.metadata_timeout) expect(subject.when).to eq('always') expect(subject.allow_failure).to be_truthy end diff --git a/spec/lib/gitlab/ci/config/entry/policy_spec.rb b/spec/lib/gitlab/ci/config/entry/policy_spec.rb index 5e83abf645b..08718c382b9 100644 --- a/spec/lib/gitlab/ci/config/entry/policy_spec.rb +++ b/spec/lib/gitlab/ci/config/entry/policy_spec.rb @@ -83,6 +83,39 @@ describe Gitlab::Ci::Config::Entry::Policy do end end + context 'when specifying valid variables expressions policy' do + let(:config) { { variables: ['$VAR == null'] } } + + it 'is a correct configuraton' do + expect(entry).to be_valid + expect(entry.value).to eq(config) + end + end + + context 'when specifying variables expressions in invalid format' do + let(:config) { { variables: '$MY_VAR' } } + + it 'reports an error about invalid format' do + expect(entry.errors).to include /should be an array of strings/ + end + end + + context 'when specifying invalid variables expressions statement' do + let(:config) { { variables: ['$MY_VAR =='] } } + + it 'reports an error about invalid statement' do + expect(entry.errors).to include /invalid expression syntax/ + end + end + + context 'when specifying invalid variables expressions token' do + let(:config) { { variables: ['$MY_VAR == 123'] } } + + it 'reports an error about invalid statement' do + expect(entry.errors).to include /invalid expression syntax/ + end + end + context 'when specifying unknown policy' do let(:config) { { refs: ['master'], invalid: :something } } diff --git a/spec/lib/gitlab/ci/pipeline/chain/create_spec.rb b/spec/lib/gitlab/ci/pipeline/chain/create_spec.rb index 1b03227d67b..dc12ba076bc 100644 --- a/spec/lib/gitlab/ci/pipeline/chain/create_spec.rb +++ b/spec/lib/gitlab/ci/pipeline/chain/create_spec.rb @@ -5,23 +5,23 @@ describe Gitlab::Ci::Pipeline::Chain::Create do set(:user) { create(:user) } let(:pipeline) do - build(:ci_pipeline_with_one_job, project: project, - ref: 'master') + build(:ci_empty_pipeline, project: project, ref: 'master') end let(:command) do Gitlab::Ci::Pipeline::Chain::Command.new( - project: project, - current_user: user, seeds_block: nil) + project: project, current_user: user) end let(:step) { described_class.new(pipeline, command) } - before do - step.perform! - end - context 'when pipeline is ready to be saved' do + before do + pipeline.stages.build(name: 'test', project: project) + + step.perform! + end + it 'saves a pipeline' do expect(pipeline).to be_persisted end @@ -32,6 +32,7 @@ describe Gitlab::Ci::Pipeline::Chain::Create do it 'creates stages' do expect(pipeline.reload.stages).to be_one + expect(pipeline.stages.first).to be_persisted end end @@ -40,6 +41,10 @@ describe Gitlab::Ci::Pipeline::Chain::Create do build(:ci_pipeline, project: project, ref: nil) end + before do + step.perform! + end + it 'breaks the chain' do expect(step.break?).to be true end @@ -49,18 +54,4 @@ describe Gitlab::Ci::Pipeline::Chain::Create do .to include /Failed to persist the pipeline/ end end - - context 'when there is a seed block present' do - let(:seeds) { spy('pipeline seeds') } - - let(:command) do - double('command', project: project, - current_user: user, - seeds_block: seeds) - end - - it 'executes the block' do - expect(seeds).to have_received(:call).with(pipeline) - end - end end diff --git a/spec/lib/gitlab/ci/pipeline/chain/populate_spec.rb b/spec/lib/gitlab/ci/pipeline/chain/populate_spec.rb new file mode 100644 index 00000000000..8312fa47cfa --- /dev/null +++ b/spec/lib/gitlab/ci/pipeline/chain/populate_spec.rb @@ -0,0 +1,158 @@ +require 'spec_helper' + +describe Gitlab::Ci::Pipeline::Chain::Populate do + set(:project) { create(:project) } + set(:user) { create(:user) } + + let(:pipeline) do + build(:ci_pipeline_with_one_job, project: project, + ref: 'master', + user: user) + end + + let(:command) do + Gitlab::Ci::Pipeline::Chain::Command.new( + project: project, + current_user: user, + seeds_block: nil) + end + + let(:step) { described_class.new(pipeline, command) } + + context 'when pipeline doesn not have seeds block' do + before do + step.perform! + end + + it 'does not persist the pipeline' do + expect(pipeline).not_to be_persisted + end + + it 'does not break the chain' do + expect(step.break?).to be false + end + + it 'populates pipeline with stages' do + expect(pipeline.stages).to be_one + expect(pipeline.stages.first).not_to be_persisted + end + + it 'populates pipeline with builds' do + expect(pipeline.builds).to be_one + expect(pipeline.builds.first).not_to be_persisted + expect(pipeline.stages.first.builds).to be_one + expect(pipeline.stages.first.builds.first).not_to be_persisted + end + + it 'correctly assigns user' do + expect(pipeline.builds).to all(have_attributes(user: user)) + end + end + + context 'when pipeline is empty' do + let(:config) do + { rspec: { + script: 'ls', + only: ['something'] + } } + end + + let(:pipeline) do + build(:ci_pipeline, project: project, config: config) + end + + before do + step.perform! + end + + it 'breaks the chain' do + expect(step.break?).to be true + end + + it 'appends an error about missing stages' do + expect(pipeline.errors.to_a) + .to include 'No stages / jobs for this pipeline.' + end + end + + context 'when pipeline has validation errors' do + let(:pipeline) do + build(:ci_pipeline, project: project, ref: nil) + end + + before do + step.perform! + end + + it 'breaks the chain' do + expect(step.break?).to be true + end + + it 'appends validation error' do + expect(pipeline.errors.to_a) + .to include 'Failed to build the pipeline!' + end + end + + context 'when there is a seed blocks present' do + let(:command) do + Gitlab::Ci::Pipeline::Chain::Command.new( + project: project, + current_user: user, + seeds_block: seeds_block) + end + + context 'when seeds block builds some resources' do + let(:seeds_block) do + ->(pipeline) { pipeline.variables.build(key: 'VAR', value: '123') } + end + + it 'populates pipeline with resources described in the seeds block' do + step.perform! + + expect(pipeline).not_to be_persisted + expect(pipeline.variables).not_to be_empty + expect(pipeline.variables.first).not_to be_persisted + expect(pipeline.variables.first.key).to eq 'VAR' + expect(pipeline.variables.first.value).to eq '123' + end + end + + context 'when seeds block tries to persist some resources' do + let(:seeds_block) do + ->(pipeline) { pipeline.variables.create!(key: 'VAR', value: '123') } + end + + it 'raises exception' do + expect { step.perform! }.to raise_error(ActiveRecord::RecordNotSaved) + end + end + end + + context 'when pipeline gets persisted during the process' do + let(:pipeline) { create(:ci_pipeline, project: project) } + + it 'raises error' do + expect { step.perform! }.to raise_error(described_class::PopulateError) + end + end + + context 'when using only/except build policies' do + let(:config) do + { rspec: { script: 'rspec', stage: 'test', only: ['master'] }, + prod: { script: 'cap prod', stage: 'deploy', only: ['tags'] } } + end + + let(:pipeline) do + build(:ci_pipeline, ref: 'master', config: config) + end + + it 'populates pipeline according to used policies' do + step.perform! + + expect(pipeline.stages.size).to eq 1 + expect(pipeline.builds.size).to eq 1 + expect(pipeline.builds.first.name).to eq 'rspec' + end + end +end diff --git a/spec/lib/gitlab/ci/pipeline/chain/validate/config_spec.rb b/spec/lib/gitlab/ci/pipeline/chain/validate/config_spec.rb index 5c12c6e6392..c53294d091c 100644 --- a/spec/lib/gitlab/ci/pipeline/chain/validate/config_spec.rb +++ b/spec/lib/gitlab/ci/pipeline/chain/validate/config_spec.rb @@ -76,28 +76,6 @@ describe Gitlab::Ci::Pipeline::Chain::Validate::Config do end end - context 'when pipeline has no stages / jobs' do - let(:config) do - { rspec: { - script: 'ls', - only: ['something'] - } } - end - - let(:pipeline) do - build(:ci_pipeline, project: project, config: config) - end - - it 'appends an error about missing stages' do - expect(pipeline.errors.to_a) - .to include 'No stages / jobs for this pipeline.' - end - - it 'breaks the chain' do - expect(step.break?).to be true - end - end - context 'when pipeline contains configuration validation errors' do let(:config) { { rspec: {} } } diff --git a/spec/lib/gitlab/ci/pipeline/expression/lexeme/string_spec.rb b/spec/lib/gitlab/ci/pipeline/expression/lexeme/string_spec.rb index 86234dfb9e5..1ccb792d1da 100644 --- a/spec/lib/gitlab/ci/pipeline/expression/lexeme/string_spec.rb +++ b/spec/lib/gitlab/ci/pipeline/expression/lexeme/string_spec.rb @@ -73,6 +73,22 @@ describe Gitlab::Ci::Pipeline::Expression::Lexeme::String do expect(token).not_to be_nil expect(token.build.evaluate).to eq 'some " string' end + + it 'allows to use an empty string inside single quotes' do + scanner = StringScanner.new(%('')) + + token = described_class.scan(scanner) + + expect(token.build.evaluate).to eq '' + end + + it 'allow to use an empty string inside double quotes' do + scanner = StringScanner.new(%("")) + + token = described_class.scan(scanner) + + expect(token.build.evaluate).to eq '' + end end end diff --git a/spec/lib/gitlab/ci/pipeline/expression/statement_spec.rb b/spec/lib/gitlab/ci/pipeline/expression/statement_spec.rb index 472a58599d8..6685bf5385b 100644 --- a/spec/lib/gitlab/ci/pipeline/expression/statement_spec.rb +++ b/spec/lib/gitlab/ci/pipeline/expression/statement_spec.rb @@ -1,14 +1,23 @@ require 'spec_helper' describe Gitlab::Ci::Pipeline::Expression::Statement do - let(:pipeline) { build(:ci_pipeline) } - subject do - described_class.new(text, pipeline) + described_class.new(text, variables) + end + + let(:variables) do + { 'PRESENT_VARIABLE' => 'my variable', + EMPTY_VARIABLE: '' } end - before do - pipeline.variables.build([key: 'VARIABLE', value: 'my variable']) + describe '.new' do + context 'when variables are not provided' do + it 'allows to properly initializes the statement' do + statement = described_class.new('$PRESENT_VARIABLE') + + expect(statement.evaluate).to be_nil + end + end end describe '#parse_tree' do @@ -23,18 +32,26 @@ describe Gitlab::Ci::Pipeline::Expression::Statement do context 'when expression grammar is incorrect' do table = [ - '$VAR "text"', # missing operator - '== "123"', # invalid right side - "'single quotes'", # single quotes string - '$VAR ==', # invalid right side - '12345', # unknown syntax - '' # empty statement + '$VAR "text"', # missing operator + '== "123"', # invalid left side + '"some string"', # only string provided + '$VAR ==', # invalid right side + '12345', # unknown syntax + '' # empty statement ] table.each do |syntax| - it "raises an error when syntax is `#{syntax}`" do - expect { described_class.new(syntax, pipeline).parse_tree } - .to raise_error described_class::StatementError + context "when expression grammar is #{syntax.inspect}" do + let(:text) { syntax } + + it 'aises a statement error exception' do + expect { subject.parse_tree } + .to raise_error described_class::StatementError + end + + it 'is an invalid statement' do + expect(subject).not_to be_valid + end end end end @@ -47,10 +64,14 @@ describe Gitlab::Ci::Pipeline::Expression::Statement do expect(subject.parse_tree) .to be_a Gitlab::Ci::Pipeline::Expression::Lexeme::Equals end + + it 'is a valid statement' do + expect(subject).to be_valid + end end context 'when using a single token' do - let(:text) { '$VARIABLE' } + let(:text) { '$PRESENT_VARIABLE' } it 'returns a single token instance' do expect(subject.parse_tree) @@ -62,14 +83,17 @@ describe Gitlab::Ci::Pipeline::Expression::Statement do describe '#evaluate' do statements = [ - ['$VARIABLE == "my variable"', true], - ["$VARIABLE == 'my variable'", true], - ['"my variable" == $VARIABLE', true], - ['$VARIABLE == null', false], - ['$VAR == null', true], - ['null == $VAR', true], - ['$VARIABLE', 'my variable'], - ['$VAR', nil] + ['$PRESENT_VARIABLE == "my variable"', true], + ["$PRESENT_VARIABLE == 'my variable'", true], + ['"my variable" == $PRESENT_VARIABLE', true], + ['$PRESENT_VARIABLE == null', false], + ['$EMPTY_VARIABLE == null', false], + ['"" == $EMPTY_VARIABLE', true], + ['$EMPTY_VARIABLE', ''], + ['$UNDEFINED_VARIABLE == null', true], + ['null == $UNDEFINED_VARIABLE', true], + ['$PRESENT_VARIABLE', 'my variable'], + ['$UNDEFINED_VARIABLE', nil] ] statements.each do |expression, value| @@ -82,4 +106,25 @@ describe Gitlab::Ci::Pipeline::Expression::Statement do end end end + + describe '#truthful?' do + statements = [ + ['$PRESENT_VARIABLE == "my variable"', true], + ["$PRESENT_VARIABLE == 'no match'", false], + ['$UNDEFINED_VARIABLE == null', true], + ['$PRESENT_VARIABLE', true], + ['$UNDEFINED_VARIABLE', false], + ['$EMPTY_VARIABLE', false] + ] + + statements.each do |expression, value| + context "when using expression `#{expression}`" do + let(:text) { expression } + + it "returns `#{value.inspect}`" do + expect(subject.truthful?).to eq value + end + end + end + end end diff --git a/spec/lib/gitlab/ci/pipeline/seed/build_spec.rb b/spec/lib/gitlab/ci/pipeline/seed/build_spec.rb new file mode 100644 index 00000000000..fffa727c2ed --- /dev/null +++ b/spec/lib/gitlab/ci/pipeline/seed/build_spec.rb @@ -0,0 +1,232 @@ +require 'spec_helper' + +describe Gitlab::Ci::Pipeline::Seed::Build do + let(:pipeline) { create(:ci_empty_pipeline) } + + let(:attributes) do + { name: 'rspec', + ref: 'master', + commands: 'rspec' } + end + + subject do + described_class.new(pipeline, attributes) + end + + describe '#attributes' do + it 'returns hash attributes of a build' do + expect(subject.attributes).to be_a Hash + expect(subject.attributes) + .to include(:name, :project, :ref, :commands) + end + end + + describe '#to_resource' do + it 'returns a valid build resource' do + expect(subject.to_resource).to be_a(::Ci::Build) + expect(subject.to_resource).to be_valid + end + + it 'memoizes a resource object' do + build = subject.to_resource + + expect(build.object_id).to eq subject.to_resource.object_id + end + + it 'can not be persisted without explicit assignment' do + build = subject.to_resource + + pipeline.save! + + expect(build).not_to be_persisted + end + end + + describe 'applying only/except policies' do + context 'when no branch policy is specified' do + let(:attributes) { { name: 'rspec' } } + + it { is_expected.to be_included } + end + + context 'when branch policy does not match' do + context 'when using only' do + let(:attributes) { { name: 'rspec', only: { refs: ['deploy'] } } } + + it { is_expected.not_to be_included } + end + + context 'when using except' do + let(:attributes) { { name: 'rspec', except: { refs: ['deploy'] } } } + + it { is_expected.to be_included } + end + end + + context 'when branch regexp policy does not match' do + context 'when using only' do + let(:attributes) { { name: 'rspec', only: { refs: ['/^deploy$/'] } } } + + it { is_expected.not_to be_included } + end + + context 'when using except' do + let(:attributes) { { name: 'rspec', except: { refs: ['/^deploy$/'] } } } + + it { is_expected.to be_included } + end + end + + context 'when branch policy matches' do + context 'when using only' do + let(:attributes) { { name: 'rspec', only: { refs: %w[deploy master] } } } + + it { is_expected.to be_included } + end + + context 'when using except' do + let(:attributes) { { name: 'rspec', except: { refs: %w[deploy master] } } } + + it { is_expected.not_to be_included } + end + end + + context 'when keyword policy matches' do + context 'when using only' do + let(:attributes) { { name: 'rspec', only: { refs: ['branches'] } } } + + it { is_expected.to be_included } + end + + context 'when using except' do + let(:attributes) { { name: 'rspec', except: { refs: ['branches'] } } } + + it { is_expected.not_to be_included } + end + end + + context 'when keyword policy does not match' do + context 'when using only' do + let(:attributes) { { name: 'rspec', only: { refs: ['tags'] } } } + + it { is_expected.not_to be_included } + end + + context 'when using except' do + let(:attributes) { { name: 'rspec', except: { refs: ['tags'] } } } + + it { is_expected.to be_included } + end + end + + context 'when keywords and pipeline source policy matches' do + possibilities = [%w[pushes push], + %w[web web], + %w[triggers trigger], + %w[schedules schedule], + %w[api api], + %w[external external]] + + context 'when using only' do + possibilities.each do |keyword, source| + context "when using keyword `#{keyword}` and source `#{source}`" do + let(:pipeline) do + build(:ci_empty_pipeline, ref: 'deploy', tag: false, source: source) + end + + let(:attributes) { { name: 'rspec', only: { refs: [keyword] } } } + + it { is_expected.to be_included } + end + end + end + + context 'when using except' do + possibilities.each do |keyword, source| + context "when using keyword `#{keyword}` and source `#{source}`" do + let(:pipeline) do + build(:ci_empty_pipeline, ref: 'deploy', tag: false, source: source) + end + + let(:attributes) { { name: 'rspec', except: { refs: [keyword] } } } + + it { is_expected.not_to be_included } + end + end + end + end + + context 'when keywords and pipeline source does not match' do + possibilities = [%w[pushes web], + %w[web push], + %w[triggers schedule], + %w[schedules external], + %w[api trigger], + %w[external api]] + + context 'when using only' do + possibilities.each do |keyword, source| + context "when using keyword `#{keyword}` and source `#{source}`" do + let(:pipeline) do + build(:ci_empty_pipeline, ref: 'deploy', tag: false, source: source) + end + + let(:attributes) { { name: 'rspec', only: { refs: [keyword] } } } + + it { is_expected.not_to be_included } + end + end + end + + context 'when using except' do + possibilities.each do |keyword, source| + context "when using keyword `#{keyword}` and source `#{source}`" do + let(:pipeline) do + build(:ci_empty_pipeline, ref: 'deploy', tag: false, source: source) + end + + let(:attributes) { { name: 'rspec', except: { refs: [keyword] } } } + + it { is_expected.to be_included } + end + end + end + end + + context 'when repository path matches' do + context 'when using only' do + let(:attributes) do + { name: 'rspec', only: { refs: ["branches@#{pipeline.project_full_path}"] } } + end + + it { is_expected.to be_included } + end + + context 'when using except' do + let(:attributes) do + { name: 'rspec', except: { refs: ["branches@#{pipeline.project_full_path}"] } } + end + + it { is_expected.not_to be_included } + end + end + + context 'when repository path does not matches' do + context 'when using only' do + let(:attributes) do + { name: 'rspec', only: { refs: ['branches@fork'] } } + end + + it { is_expected.not_to be_included } + end + + context 'when using except' do + let(:attributes) do + { name: 'rspec', except: { refs: ['branches@fork'] } } + end + + it { is_expected.to be_included } + end + end + end +end diff --git a/spec/lib/gitlab/ci/pipeline/seed/stage_spec.rb b/spec/lib/gitlab/ci/pipeline/seed/stage_spec.rb new file mode 100644 index 00000000000..eb1b285c7bd --- /dev/null +++ b/spec/lib/gitlab/ci/pipeline/seed/stage_spec.rb @@ -0,0 +1,123 @@ +require 'spec_helper' + +describe Gitlab::Ci::Pipeline::Seed::Stage do + let(:pipeline) { create(:ci_empty_pipeline) } + + let(:attributes) do + { name: 'test', + index: 0, + builds: [{ name: 'rspec' }, + { name: 'spinach' }, + { name: 'deploy', only: { refs: ['feature'] } }] } + end + + subject do + described_class.new(pipeline, attributes) + end + + describe '#size' do + it 'returns a number of jobs in the stage' do + expect(subject.size).to eq 2 + end + end + + describe '#attributes' do + it 'returns hash attributes of a stage' do + expect(subject.attributes).to be_a Hash + expect(subject.attributes).to include(:name, :project) + end + end + + describe '#included?' do + context 'when it contains builds seeds' do + let(:attributes) do + { name: 'test', + index: 0, + builds: [{ name: 'deploy', only: { refs: ['master'] } }] } + end + + it { is_expected.to be_included } + end + + context 'when it does not contain build seeds' do + let(:attributes) do + { name: 'test', + index: 0, + builds: [{ name: 'deploy', only: { refs: ['feature'] } }] } + end + + it { is_expected.not_to be_included } + end + end + + describe '#seeds' do + it 'returns build seeds' do + expect(subject.seeds).to all(be_a Gitlab::Ci::Pipeline::Seed::Build) + end + + it 'returns build seeds including valid attributes' do + expect(subject.seeds.size).to eq 2 + expect(subject.seeds.map(&:attributes)).to all(include(ref: 'master')) + expect(subject.seeds.map(&:attributes)).to all(include(tag: false)) + expect(subject.seeds.map(&:attributes)).to all(include(project: pipeline.project)) + expect(subject.seeds.map(&:attributes)) + .to all(include(trigger_request: pipeline.trigger_requests.first)) + end + + context 'when a ref is protected' do + before do + allow_any_instance_of(Project).to receive(:protected_for?).and_return(true) + end + + it 'returns protected builds' do + expect(subject.seeds.map(&:attributes)).to all(include(protected: true)) + end + end + + context 'when a ref is not protected' do + before do + allow_any_instance_of(Project).to receive(:protected_for?).and_return(false) + end + + it 'returns unprotected builds' do + expect(subject.seeds.map(&:attributes)).to all(include(protected: false)) + end + end + + it 'filters seeds using only/except policies' do + expect(subject.seeds.map(&:attributes)).to satisfy do |seeds| + seeds.any? { |hash| hash.fetch(:name) == 'rspec' } + end + + expect(subject.seeds.map(&:attributes)).not_to satisfy do |seeds| + seeds.any? { |hash| hash.fetch(:name) == 'deploy' } + end + end + end + + describe '#to_resource' do + it 'builds a valid stage object with all builds' do + subject.to_resource.save! + + expect(pipeline.reload.stages.count).to eq 1 + expect(pipeline.reload.builds.count).to eq 2 + expect(pipeline.builds).to all(satisfy { |job| job.stage_id.present? }) + expect(pipeline.builds).to all(satisfy { |job| job.pipeline.present? }) + expect(pipeline.builds).to all(satisfy { |job| job.project.present? }) + expect(pipeline.stages) + .to all(satisfy { |stage| stage.pipeline.present? }) + expect(pipeline.stages) + .to all(satisfy { |stage| stage.project.present? }) + end + + it 'can not be persisted without explicit pipeline assignment' do + stage = subject.to_resource + + pipeline.save! + + expect(stage).not_to be_persisted + expect(pipeline.reload.stages.count).to eq 0 + expect(pipeline.reload.builds.count).to eq 0 + end + end +end diff --git a/spec/lib/gitlab/ci/stage/seed_spec.rb b/spec/lib/gitlab/ci/stage/seed_spec.rb deleted file mode 100644 index 3fe8d50c49a..00000000000 --- a/spec/lib/gitlab/ci/stage/seed_spec.rb +++ /dev/null @@ -1,83 +0,0 @@ -require 'spec_helper' - -describe Gitlab::Ci::Stage::Seed do - let(:pipeline) { create(:ci_empty_pipeline) } - - let(:builds) do - [{ name: 'rspec' }, { name: 'spinach' }] - end - - subject do - described_class.new(pipeline, 'test', builds) - end - - describe '#size' do - it 'returns a number of jobs in the stage' do - expect(subject.size).to eq 2 - end - end - - describe '#stage' do - it 'returns hash attributes of a stage' do - expect(subject.stage).to be_a Hash - expect(subject.stage).to include(:name, :project) - end - end - - describe '#builds' do - it 'returns hash attributes of all builds' do - expect(subject.builds.size).to eq 2 - expect(subject.builds).to all(include(ref: 'master')) - expect(subject.builds).to all(include(tag: false)) - expect(subject.builds).to all(include(project: pipeline.project)) - expect(subject.builds) - .to all(include(trigger_request: pipeline.trigger_requests.first)) - end - - context 'when a ref is protected' do - before do - allow_any_instance_of(Project).to receive(:protected_for?).and_return(true) - end - - it 'returns protected builds' do - expect(subject.builds).to all(include(protected: true)) - end - end - - context 'when a ref is unprotected' do - before do - allow_any_instance_of(Project).to receive(:protected_for?).and_return(false) - end - - it 'returns unprotected builds' do - expect(subject.builds).to all(include(protected: false)) - end - end - end - - describe '#user=' do - let(:user) { build(:user) } - - it 'assignes relevant pipeline attributes' do - subject.user = user - - expect(subject.builds).to all(include(user: user)) - end - end - - describe '#create!' do - it 'creates all stages and builds' do - subject.create! - - expect(pipeline.reload.stages.count).to eq 1 - expect(pipeline.reload.builds.count).to eq 2 - expect(pipeline.builds).to all(satisfy { |job| job.stage_id.present? }) - expect(pipeline.builds).to all(satisfy { |job| job.pipeline.present? }) - expect(pipeline.builds).to all(satisfy { |job| job.project.present? }) - expect(pipeline.stages) - .to all(satisfy { |stage| stage.pipeline.present? }) - expect(pipeline.stages) - .to all(satisfy { |stage| stage.project.present? }) - end - end -end diff --git a/spec/lib/gitlab/ci/status/build/action_spec.rb b/spec/lib/gitlab/ci/status/build/action_spec.rb index d612d29e3e0..bdec582b57b 100644 --- a/spec/lib/gitlab/ci/status/build/action_spec.rb +++ b/spec/lib/gitlab/ci/status/build/action_spec.rb @@ -53,4 +53,14 @@ describe Gitlab::Ci::Status::Build::Action do end end end + + describe '#badge_tooltip' do + let(:user) { create(:user) } + let(:build) { create(:ci_build, :non_playable) } + let(:status) { Gitlab::Ci::Status::Core.new(build, user) } + + it 'returns the status' do + expect(subject.badge_tooltip).to eq('created') + end + end end diff --git a/spec/lib/gitlab/ci/status/build/cancelable_spec.rb b/spec/lib/gitlab/ci/status/build/cancelable_spec.rb index 9cdebaa5cf2..78d6fa65b5a 100644 --- a/spec/lib/gitlab/ci/status/build/cancelable_spec.rb +++ b/spec/lib/gitlab/ci/status/build/cancelable_spec.rb @@ -40,6 +40,24 @@ describe Gitlab::Ci::Status::Build::Cancelable do end end + describe '#status_tooltip' do + it 'does not override status status_tooltip' do + expect(status).to receive(:status_tooltip) + + subject.status_tooltip + end + end + + describe '#badge_tooltip' do + let(:user) { create(:user) } + let(:build) { create(:ci_build) } + let(:status) { Gitlab::Ci::Status::Core.new(build, user) } + + it 'returns the status' do + expect(subject.badge_tooltip).to eq('pending') + end + end + describe 'action details' do let(:user) { create(:user) } let(:build) { create(:ci_build) } @@ -72,6 +90,10 @@ describe Gitlab::Ci::Status::Build::Cancelable do describe '#action_title' do it { expect(subject.action_title).to eq 'Cancel' } end + + describe '#action_button_title' do + it { expect(subject.action_button_title).to eq 'Cancel this job' } + end end describe '.matches?' do diff --git a/spec/lib/gitlab/ci/status/build/canceled_spec.rb b/spec/lib/gitlab/ci/status/build/canceled_spec.rb new file mode 100644 index 00000000000..c6b5cc68770 --- /dev/null +++ b/spec/lib/gitlab/ci/status/build/canceled_spec.rb @@ -0,0 +1,33 @@ +require 'spec_helper' + +describe Gitlab::Ci::Status::Build::Canceled do + let(:user) { create(:user) } + + subject do + described_class.new(double('subject')) + end + + describe '#illustration' do + it { expect(subject.illustration).to include(:image, :size, :title) } + end + + describe '.matches?' do + subject {described_class.matches?(build, user) } + + context 'when build is canceled' do + let(:build) { create(:ci_build, :canceled) } + + it 'is a correct match' do + expect(subject).to be true + end + end + + context 'when build is not canceled' do + let(:build) { create(:ci_build) } + + it 'does not match' do + expect(subject).to be false + end + end + end +end diff --git a/spec/lib/gitlab/ci/status/build/common_spec.rb b/spec/lib/gitlab/ci/status/build/common_spec.rb index 2cce7a23ea7..ca3c66f0152 100644 --- a/spec/lib/gitlab/ci/status/build/common_spec.rb +++ b/spec/lib/gitlab/ci/status/build/common_spec.rb @@ -38,4 +38,10 @@ describe Gitlab::Ci::Status::Build::Common do expect(subject.details_path).to include "jobs/#{build.id}" end end + + describe '#illustration' do + it 'provides a fallback empty state illustration' do + expect(subject.illustration).not_to be_empty + end + end end diff --git a/spec/lib/gitlab/ci/status/build/created_spec.rb b/spec/lib/gitlab/ci/status/build/created_spec.rb new file mode 100644 index 00000000000..8bdfe6ef7a2 --- /dev/null +++ b/spec/lib/gitlab/ci/status/build/created_spec.rb @@ -0,0 +1,33 @@ +require 'spec_helper' + +describe Gitlab::Ci::Status::Build::Created do + let(:user) { create(:user) } + + subject do + described_class.new(double('subject')) + end + + describe '#illustration' do + it { expect(subject.illustration).to include(:image, :size, :title, :content) } + end + + describe '.matches?' do + subject {described_class.matches?(build, user) } + + context 'when build is created' do + let(:build) { create(:ci_build, :created) } + + it 'is a correct match' do + expect(subject).to be true + end + end + + context 'when build is not created' do + let(:build) { create(:ci_build) } + + it 'does not match' do + expect(subject).to be false + end + end + end +end diff --git a/spec/lib/gitlab/ci/status/build/erased_spec.rb b/spec/lib/gitlab/ci/status/build/erased_spec.rb new file mode 100644 index 00000000000..0acd271e375 --- /dev/null +++ b/spec/lib/gitlab/ci/status/build/erased_spec.rb @@ -0,0 +1,33 @@ +require 'spec_helper' + +describe Gitlab::Ci::Status::Build::Erased do + let(:user) { create(:user) } + + subject do + described_class.new(double('subject')) + end + + describe '#illustration' do + it { expect(subject.illustration).to include(:image, :size, :title) } + end + + describe '.matches?' do + subject { described_class.matches?(build, user) } + + context 'when build is erased' do + let(:build) { create(:ci_build, :success, :erased) } + + it 'is a correct match' do + expect(subject).to be true + end + end + + context 'when build is not erased' do + let(:build) { create(:ci_build, :success, :trace_artifact) } + + it 'does not match' do + expect(subject).to be false + end + end + end +end diff --git a/spec/lib/gitlab/ci/status/build/factory_spec.rb b/spec/lib/gitlab/ci/status/build/factory_spec.rb index d196bc6a4c2..d53a7d468e3 100644 --- a/spec/lib/gitlab/ci/status/build/factory_spec.rb +++ b/spec/lib/gitlab/ci/status/build/factory_spec.rb @@ -13,7 +13,7 @@ describe Gitlab::Ci::Status::Build::Factory do end context 'when build is successful' do - let(:build) { create(:ci_build, :success) } + let(:build) { create(:ci_build, :success, :trace_artifact) } it 'matches correct core status' do expect(factory.core_status).to be_a Gitlab::Ci::Status::Success @@ -38,6 +38,33 @@ describe Gitlab::Ci::Status::Build::Factory do end end + context 'when build is erased' do + let(:build) { create(:ci_build, :success, :erased) } + + it 'matches correct core status' do + expect(factory.core_status).to be_a Gitlab::Ci::Status::Success + end + + it 'matches correct extended statuses' do + expect(factory.extended_statuses) + .to eq [Gitlab::Ci::Status::Build::Erased, + Gitlab::Ci::Status::Build::Retryable] + end + + it 'fabricates a retryable build status' do + expect(status).to be_a Gitlab::Ci::Status::Build::Retryable + end + + it 'fabricates status with correct details' do + expect(status.text).to eq 'passed' + expect(status.icon).to eq 'status_success' + expect(status.favicon).to eq 'favicon_status_success' + expect(status.label).to eq 'passed' + expect(status).to have_details + expect(status).to have_action + end + end + context 'when build is failed' do context 'when build is not allowed to fail' do let(:build) { create(:ci_build, :failed) } @@ -48,11 +75,12 @@ describe Gitlab::Ci::Status::Build::Factory do it 'matches correct extended statuses' do expect(factory.extended_statuses) - .to eq [Gitlab::Ci::Status::Build::Retryable] + .to eq [Gitlab::Ci::Status::Build::Retryable, + Gitlab::Ci::Status::Build::Failed] end - it 'fabricates a retryable build status' do - expect(status).to be_a Gitlab::Ci::Status::Build::Retryable + it 'fabricates a failed build status' do + expect(status).to be_a Gitlab::Ci::Status::Build::Failed end it 'fabricates status with correct details' do @@ -60,13 +88,14 @@ describe Gitlab::Ci::Status::Build::Factory do expect(status.icon).to eq 'status_failed' expect(status.favicon).to eq 'favicon_status_failed' expect(status.label).to eq 'failed' + expect(status.status_tooltip).to eq 'failed <br> (unknown failure)' expect(status).to have_details expect(status).to have_action end end context 'when build is allowed to fail' do - let(:build) { create(:ci_build, :failed, :allowed_to_fail) } + let(:build) { create(:ci_build, :failed, :allowed_to_fail, :trace_artifact) } it 'matches correct core status' do expect(factory.core_status).to be_a Gitlab::Ci::Status::Failed @@ -75,6 +104,7 @@ describe Gitlab::Ci::Status::Build::Factory do it 'matches correct extended statuses' do expect(factory.extended_statuses) .to eq [Gitlab::Ci::Status::Build::Retryable, + Gitlab::Ci::Status::Build::Failed, Gitlab::Ci::Status::Build::FailedAllowed] end @@ -104,7 +134,7 @@ describe Gitlab::Ci::Status::Build::Factory do it 'matches correct extended statuses' do expect(factory.extended_statuses) - .to eq [Gitlab::Ci::Status::Build::Retryable] + .to eq [Gitlab::Ci::Status::Build::Canceled, Gitlab::Ci::Status::Build::Retryable] end it 'fabricates a retryable build status' do @@ -115,6 +145,7 @@ describe Gitlab::Ci::Status::Build::Factory do expect(status.text).to eq 'canceled' expect(status.icon).to eq 'status_canceled' expect(status.favicon).to eq 'favicon_status_canceled' + expect(status.illustration).to include(:image, :size, :title) expect(status.label).to eq 'canceled' expect(status).to have_details expect(status).to have_action @@ -156,7 +187,7 @@ describe Gitlab::Ci::Status::Build::Factory do it 'matches correct extended statuses' do expect(factory.extended_statuses) - .to eq [Gitlab::Ci::Status::Build::Cancelable] + .to eq [Gitlab::Ci::Status::Build::Pending, Gitlab::Ci::Status::Build::Cancelable] end it 'fabricates a cancelable build status' do @@ -167,6 +198,7 @@ describe Gitlab::Ci::Status::Build::Factory do expect(status.text).to eq 'pending' expect(status.icon).to eq 'status_pending' expect(status.favicon).to eq 'favicon_status_pending' + expect(status.illustration).to include(:image, :size, :title, :content) expect(status.label).to eq 'pending' expect(status).to have_details expect(status).to have_action @@ -180,18 +212,19 @@ describe Gitlab::Ci::Status::Build::Factory do expect(factory.core_status).to be_a Gitlab::Ci::Status::Skipped end - it 'does not match extended statuses' do - expect(factory.extended_statuses).to be_empty + it 'matches correct extended statuses' do + expect(factory.extended_statuses).to eq [Gitlab::Ci::Status::Build::Skipped] end - it 'fabricates a core skipped status' do - expect(status).to be_a Gitlab::Ci::Status::Skipped + it 'fabricates a skipped build status' do + expect(status).to be_a Gitlab::Ci::Status::Build::Skipped end it 'fabricates status with correct details' do expect(status.text).to eq 'skipped' expect(status.icon).to eq 'status_skipped' expect(status.favicon).to eq 'favicon_status_skipped' + expect(status.illustration).to include(:image, :size, :title) expect(status.label).to eq 'skipped' expect(status).to have_details expect(status).not_to have_action @@ -208,7 +241,8 @@ describe Gitlab::Ci::Status::Build::Factory do it 'matches correct extended statuses' do expect(factory.extended_statuses) - .to eq [Gitlab::Ci::Status::Build::Play, + .to eq [Gitlab::Ci::Status::Build::Manual, + Gitlab::Ci::Status::Build::Play, Gitlab::Ci::Status::Build::Action] end @@ -221,6 +255,7 @@ describe Gitlab::Ci::Status::Build::Factory do expect(status.group).to eq 'manual' expect(status.icon).to eq 'status_manual' expect(status.favicon).to eq 'favicon_status_manual' + expect(status.illustration).to include(:image, :size, :title, :content) expect(status.label).to include 'manual play action' expect(status).to have_details expect(status.action_path).to include 'play' @@ -255,7 +290,8 @@ describe Gitlab::Ci::Status::Build::Factory do it 'matches correct extended statuses' do expect(factory.extended_statuses) - .to eq [Gitlab::Ci::Status::Build::Stop, + .to eq [Gitlab::Ci::Status::Build::Manual, + Gitlab::Ci::Status::Build::Stop, Gitlab::Ci::Status::Build::Action] end diff --git a/spec/lib/gitlab/ci/status/build/failed_allowed_spec.rb b/spec/lib/gitlab/ci/status/build/failed_allowed_spec.rb index 99a5a7e4aca..bfaa508785e 100644 --- a/spec/lib/gitlab/ci/status/build/failed_allowed_spec.rb +++ b/spec/lib/gitlab/ci/status/build/failed_allowed_spec.rb @@ -3,6 +3,7 @@ require 'spec_helper' describe Gitlab::Ci::Status::Build::FailedAllowed do let(:status) { double('core status') } let(:user) { double('user') } + let(:build) { create(:ci_build, :failed, :allowed_to_fail) } subject do described_class.new(status) @@ -68,6 +69,28 @@ describe Gitlab::Ci::Status::Build::FailedAllowed do end end + describe '#badge_tooltip' do + let(:user) { create(:user) } + let(:failed_status) { Gitlab::Ci::Status::Failed.new(build, user) } + let(:build_status) { Gitlab::Ci::Status::Build::Failed.new(failed_status) } + let(:status) { described_class.new(build_status) } + + it 'does override badge_tooltip' do + expect(status.badge_tooltip).to eq('failed <br> (unknown failure)') + end + end + + describe '#status_tooltip' do + let(:user) { create(:user) } + let(:failed_status) { Gitlab::Ci::Status::Failed.new(build, user) } + let(:build_status) { Gitlab::Ci::Status::Build::Failed.new(failed_status) } + let(:status) { described_class.new(build_status) } + + it 'does override status_tooltip' do + expect(status.status_tooltip).to eq 'failed <br> (unknown failure) (allowed to fail)' + end + end + describe '.matches?' do subject { described_class.matches?(build, user) } diff --git a/spec/lib/gitlab/ci/status/build/failed_spec.rb b/spec/lib/gitlab/ci/status/build/failed_spec.rb new file mode 100644 index 00000000000..cadb424ea2c --- /dev/null +++ b/spec/lib/gitlab/ci/status/build/failed_spec.rb @@ -0,0 +1,83 @@ +require 'spec_helper' + +describe Gitlab::Ci::Status::Build::Failed do + let(:build) { create(:ci_build, :script_failure) } + let(:status) { double('core status') } + let(:user) { double('user') } + + subject { described_class.new(status) } + + describe '#text' do + it 'does not override status text' do + expect(status).to receive(:text) + + subject.text + end + end + + describe '#icon' do + it 'does not override status icon' do + expect(status).to receive(:icon) + + subject.icon + end + end + + describe '#group' do + it 'does not override status group' do + expect(status).to receive(:group) + + subject.group + end + end + + describe '#favicon' do + it 'does not override status label' do + expect(status).to receive(:favicon) + + subject.favicon + end + end + + describe '#label' do + it 'does not override label' do + expect(status).to receive(:label) + + subject.label + end + end + + describe '#badge_tooltip' do + let(:user) { create(:user) } + let(:status) { Gitlab::Ci::Status::Failed.new(build, user) } + + it 'does override badge_tooltip' do + expect(subject.badge_tooltip).to eq 'failed <br> (script failure)' + end + end + + describe '#status_tooltip' do + let(:user) { create(:user) } + let(:status) { Gitlab::Ci::Status::Failed.new(build, user) } + + it 'does override status_tooltip' do + expect(subject.status_tooltip).to eq 'failed <br> (script failure)' + end + end + + describe '.matches?' do + context 'with a failed build' do + it 'returns true' do + expect(described_class.matches?(build, user)).to be_truthy + end + end + + context 'with any other type of build' do + let(:build) { create(:ci_build, :success) } + + it 'returns false' do + expect(described_class.matches?(build, user)).to be_falsy + end + end + end +end diff --git a/spec/lib/gitlab/ci/status/build/manual_spec.rb b/spec/lib/gitlab/ci/status/build/manual_spec.rb new file mode 100644 index 00000000000..6386296f992 --- /dev/null +++ b/spec/lib/gitlab/ci/status/build/manual_spec.rb @@ -0,0 +1,34 @@ +require 'spec_helper' + +describe Gitlab::Ci::Status::Build::Manual do + let(:user) { create(:user) } + + subject do + build = create(:ci_build, :manual) + described_class.new(Gitlab::Ci::Status::Core.new(build, user)) + end + + describe '#illustration' do + it { expect(subject.illustration).to include(:image, :size, :title, :content) } + end + + describe '.matches?' do + subject {described_class.matches?(build, user) } + + context 'when build is manual' do + let(:build) { create(:ci_build, :manual) } + + it 'is a correct match' do + expect(subject).to be true + end + end + + context 'when build is not manual' do + let(:build) { create(:ci_build) } + + it 'does not match' do + expect(subject).to be false + end + end + end +end diff --git a/spec/lib/gitlab/ci/status/build/pending_spec.rb b/spec/lib/gitlab/ci/status/build/pending_spec.rb new file mode 100644 index 00000000000..4cf70828e53 --- /dev/null +++ b/spec/lib/gitlab/ci/status/build/pending_spec.rb @@ -0,0 +1,33 @@ +require 'spec_helper' + +describe Gitlab::Ci::Status::Build::Pending do + let(:user) { create(:user) } + + subject do + described_class.new(double('subject')) + end + + describe '#illustration' do + it { expect(subject.illustration).to include(:image, :size, :title, :content) } + end + + describe '.matches?' do + subject {described_class.matches?(build, user) } + + context 'when build is pending' do + let(:build) { create(:ci_build, :pending) } + + it 'is a correct match' do + expect(subject).to be true + end + end + + context 'when build is not pending' do + let(:build) { create(:ci_build, :success) } + + it 'does not match' do + expect(subject).to be false + end + end + end +end diff --git a/spec/lib/gitlab/ci/status/build/play_spec.rb b/spec/lib/gitlab/ci/status/build/play_spec.rb index 81d5f553fd1..f128c1d4ca4 100644 --- a/spec/lib/gitlab/ci/status/build/play_spec.rb +++ b/spec/lib/gitlab/ci/status/build/play_spec.rb @@ -14,6 +14,22 @@ describe Gitlab::Ci::Status::Build::Play do end end + describe '#status_tooltip' do + it 'does not override status status_tooltip' do + expect(status).to receive(:status_tooltip) + + subject.status_tooltip + end + end + + describe '#badge_tooltip' do + it 'does not override status badge_tooltip' do + expect(status).to receive(:badge_tooltip) + + subject.badge_tooltip + end + end + describe '#has_action?' do context 'when user is allowed to update build' do context 'when user is allowed to trigger protected action' do @@ -53,6 +69,10 @@ describe Gitlab::Ci::Status::Build::Play do it { expect(subject.action_title).to eq 'Play' } end + describe '#action_button_title' do + it { expect(subject.action_button_title).to eq 'Trigger this manual action' } + end + describe '.matches?' do subject { described_class.matches?(build, user) } diff --git a/spec/lib/gitlab/ci/status/build/retried_spec.rb b/spec/lib/gitlab/ci/status/build/retried_spec.rb new file mode 100644 index 00000000000..ee9acaf1c21 --- /dev/null +++ b/spec/lib/gitlab/ci/status/build/retried_spec.rb @@ -0,0 +1,96 @@ +require 'spec_helper' + +describe Gitlab::Ci::Status::Build::Retried do + let(:build) { create(:ci_build, :retried) } + let(:status) { double('core status') } + let(:user) { double('user') } + + subject { described_class.new(status) } + + describe '#text' do + it 'does not override status text' do + expect(status).to receive(:text) + + subject.text + end + end + + describe '#icon' do + it 'does not override status icon' do + expect(status).to receive(:icon) + + subject.icon + end + end + + describe '#group' do + it 'does not override status group' do + expect(status).to receive(:group) + + subject.group + end + end + + describe '#favicon' do + it 'does not override status label' do + expect(status).to receive(:favicon) + + subject.favicon + end + end + + describe '#label' do + it 'does not override status label' do + expect(status).to receive(:label) + + subject.label + end + end + + describe '#badge_tooltip' do + let(:user) { create(:user) } + let(:build) { create(:ci_build, :retried) } + let(:status) { Gitlab::Ci::Status::Success.new(build, user) } + + it 'returns status' do + expect(status.badge_tooltip).to eq('pending') + end + end + + describe '#status_tooltip' do + let(:user) { create(:user) } + + context 'with a failed build' do + let(:build) { create(:ci_build, :failed, :retried) } + let(:failed_status) { Gitlab::Ci::Status::Failed.new(build, user) } + let(:status) { Gitlab::Ci::Status::Build::Failed.new(failed_status) } + + it 'does override status_tooltip' do + expect(subject.status_tooltip).to eq 'failed <br> (unknown failure) (retried)' + end + end + + context 'with another build' do + let(:build) { create(:ci_build, :retried) } + let(:status) { Gitlab::Ci::Status::Success.new(build, user) } + + it 'does override status_tooltip' do + expect(subject.status_tooltip).to eq 'passed (retried)' + end + end + end + + describe '.matches?' do + subject { described_class.matches?(build, user) } + + context 'with a retried build' do + it { is_expected.to be_truthy } + end + + context 'with a build that has not been retried' do + let(:build) { create(:ci_build, :success) } + + it { is_expected.to be_falsy } + end + end +end diff --git a/spec/lib/gitlab/ci/status/build/retryable_spec.rb b/spec/lib/gitlab/ci/status/build/retryable_spec.rb index 14d42e0d70f..84d98588f2d 100644 --- a/spec/lib/gitlab/ci/status/build/retryable_spec.rb +++ b/spec/lib/gitlab/ci/status/build/retryable_spec.rb @@ -40,6 +40,24 @@ describe Gitlab::Ci::Status::Build::Retryable do end end + describe '#status_tooltip' do + it 'does not override status status_tooltip' do + expect(status).to receive(:status_tooltip) + + subject.status_tooltip + end + end + + describe '#badge_tooltip' do + let(:user) { create(:user) } + let(:build) { create(:ci_build) } + let(:status) { Gitlab::Ci::Status::Core.new(build, user) } + + it 'does return status' do + expect(status.badge_tooltip).to eq('pending') + end + end + describe 'action details' do let(:user) { create(:user) } let(:build) { create(:ci_build) } @@ -72,6 +90,10 @@ describe Gitlab::Ci::Status::Build::Retryable do describe '#action_title' do it { expect(subject.action_title).to eq 'Retry' } end + + describe '#action_button_title' do + it { expect(subject.action_button_title).to eq 'Retry this job' } + end end describe '.matches?' do diff --git a/spec/lib/gitlab/ci/status/build/skipped_spec.rb b/spec/lib/gitlab/ci/status/build/skipped_spec.rb new file mode 100644 index 00000000000..46f6933025a --- /dev/null +++ b/spec/lib/gitlab/ci/status/build/skipped_spec.rb @@ -0,0 +1,33 @@ +require 'spec_helper' + +describe Gitlab::Ci::Status::Build::Skipped do + let(:user) { create(:user) } + + subject do + described_class.new(double('subject')) + end + + describe '#illustration' do + it { expect(subject.illustration).to include(:image, :size, :title) } + end + + describe '.matches?' do + subject {described_class.matches?(build, user) } + + context 'when build is skipped' do + let(:build) { create(:ci_build, :skipped) } + + it 'is a correct match' do + expect(subject).to be true + end + end + + context 'when build is not skipped' do + let(:build) { create(:ci_build) } + + it 'does not match' do + expect(subject).to be false + end + end + end +end diff --git a/spec/lib/gitlab/ci/status/build/stop_spec.rb b/spec/lib/gitlab/ci/status/build/stop_spec.rb index 18e250772f0..5b7534c96c1 100644 --- a/spec/lib/gitlab/ci/status/build/stop_spec.rb +++ b/spec/lib/gitlab/ci/status/build/stop_spec.rb @@ -44,6 +44,10 @@ describe Gitlab::Ci::Status::Build::Stop do describe '#action_title' do it { expect(subject.action_title).to eq 'Stop' } end + + describe '#action_button_title' do + it { expect(subject.action_button_title).to eq 'Stop this environment' } + end end describe '.matches?' do @@ -77,4 +81,24 @@ describe Gitlab::Ci::Status::Build::Stop do end end end + + describe '#status_tooltip' do + it 'does not override status status_tooltip' do + expect(status).to receive(:status_tooltip) + + subject.status_tooltip + end + end + + describe '#badge_tooltip' do + let(:user) { create(:user) } + let(:build) { create(:ci_build, :playable) } + let(:status) { Gitlab::Ci::Status::Core.new(build, user) } + + it 'does not override status badge_tooltip' do + expect(status).to receive(:badge_tooltip) + + subject.badge_tooltip + end + end end diff --git a/spec/lib/gitlab/ci/status/success_warning_spec.rb b/spec/lib/gitlab/ci/status/success_warning_spec.rb index 4582354e739..6d05545d1d8 100644 --- a/spec/lib/gitlab/ci/status/success_warning_spec.rb +++ b/spec/lib/gitlab/ci/status/success_warning_spec.rb @@ -1,8 +1,10 @@ require 'spec_helper' describe Gitlab::Ci::Status::SuccessWarning do + let(:status) { double('status') } + subject do - described_class.new(double('status')) + described_class.new(status) end describe '#test' do diff --git a/spec/lib/gitlab/ci/trace/http_io_spec.rb b/spec/lib/gitlab/ci/trace/http_io_spec.rb new file mode 100644 index 00000000000..5474e2f518c --- /dev/null +++ b/spec/lib/gitlab/ci/trace/http_io_spec.rb @@ -0,0 +1,315 @@ +require 'spec_helper' + +describe Gitlab::Ci::Trace::HttpIO do + include HttpIOHelpers + + let(:http_io) { described_class.new(url, size) } + let(:url) { remote_trace_url } + let(:size) { remote_trace_size } + + describe '#close' do + subject { http_io.close } + + it { is_expected.to be_nil } + end + + describe '#binmode' do + subject { http_io.binmode } + + it { is_expected.to be_nil } + end + + describe '#binmode?' do + subject { http_io.binmode? } + + it { is_expected.to be_truthy } + end + + describe '#path' do + subject { http_io.path } + + it { is_expected.to be_nil } + end + + describe '#url' do + subject { http_io.url } + + it { is_expected.to eq(url) } + end + + describe '#seek' do + subject { http_io.seek(pos, where) } + + context 'when moves pos to end of the file' do + let(:pos) { 0 } + let(:where) { IO::SEEK_END } + + it { is_expected.to eq(size) } + end + + context 'when moves pos to middle of the file' do + let(:pos) { size / 2 } + let(:where) { IO::SEEK_SET } + + it { is_expected.to eq(size / 2) } + end + + context 'when moves pos around' do + it 'matches the result' do + expect(http_io.seek(0)).to eq(0) + expect(http_io.seek(100, IO::SEEK_CUR)).to eq(100) + expect { http_io.seek(size + 1, IO::SEEK_CUR) }.to raise_error('new position is outside of file') + end + end + end + + describe '#eof?' do + subject { http_io.eof? } + + context 'when current pos is at end of the file' do + before do + http_io.seek(size, IO::SEEK_SET) + end + + it { is_expected.to be_truthy } + end + + context 'when current pos is not at end of the file' do + before do + http_io.seek(0, IO::SEEK_SET) + end + + it { is_expected.to be_falsey } + end + end + + describe '#each_line' do + subject { http_io.each_line } + + let(:string_io) { StringIO.new(remote_trace_body) } + + before do + stub_remote_trace_206 + end + + it 'yields lines' do + expect { |b| http_io.each_line(&b) }.to yield_successive_args(*string_io.each_line.to_a) + end + + context 'when buckets on GCS' do + context 'when BUFFER_SIZE is larger than file size' do + before do + stub_remote_trace_200 + set_larger_buffer_size_than(size) + end + + it 'calls get_chunk only once' do + expect_any_instance_of(Net::HTTP).to receive(:request).once.and_call_original + + http_io.each_line { |line| } + end + end + end + end + + describe '#read' do + subject { http_io.read(length) } + + context 'when there are no network issue' do + before do + stub_remote_trace_206 + end + + context 'when read whole size' do + let(:length) { nil } + + context 'when BUFFER_SIZE is smaller than file size' do + before do + set_smaller_buffer_size_than(size) + end + + it 'reads a trace' do + is_expected.to eq(remote_trace_body) + end + end + + context 'when BUFFER_SIZE is larger than file size' do + before do + set_larger_buffer_size_than(size) + end + + it 'reads a trace' do + is_expected.to eq(remote_trace_body) + end + end + end + + context 'when read only first 100 bytes' do + let(:length) { 100 } + + context 'when BUFFER_SIZE is smaller than file size' do + before do + set_smaller_buffer_size_than(size) + end + + it 'reads a trace' do + is_expected.to eq(remote_trace_body[0, length]) + end + end + + context 'when BUFFER_SIZE is larger than file size' do + before do + set_larger_buffer_size_than(size) + end + + it 'reads a trace' do + is_expected.to eq(remote_trace_body[0, length]) + end + end + end + + context 'when tries to read oversize' do + let(:length) { size + 1000 } + + context 'when BUFFER_SIZE is smaller than file size' do + before do + set_smaller_buffer_size_than(size) + end + + it 'reads a trace' do + is_expected.to eq(remote_trace_body) + end + end + + context 'when BUFFER_SIZE is larger than file size' do + before do + set_larger_buffer_size_than(size) + end + + it 'reads a trace' do + is_expected.to eq(remote_trace_body) + end + end + end + + context 'when tries to read 0 bytes' do + let(:length) { 0 } + + context 'when BUFFER_SIZE is smaller than file size' do + before do + set_smaller_buffer_size_than(size) + end + + it 'reads a trace' do + is_expected.to be_empty + end + end + + context 'when BUFFER_SIZE is larger than file size' do + before do + set_larger_buffer_size_than(size) + end + + it 'reads a trace' do + is_expected.to be_empty + end + end + end + end + + context 'when there is anetwork issue' do + let(:length) { nil } + + before do + stub_remote_trace_500 + end + + it 'reads a trace' do + expect { subject }.to raise_error(Gitlab::Ci::Trace::HttpIO::FailedToGetChunkError) + end + end + end + + describe '#readline' do + subject { http_io.readline } + + let(:string_io) { StringIO.new(remote_trace_body) } + + before do + stub_remote_trace_206 + end + + shared_examples 'all line matching' do + it 'reads a line' do + (0...remote_trace_body.lines.count).each do + expect(http_io.readline).to eq(string_io.readline) + end + end + end + + context 'when there is anetwork issue' do + let(:length) { nil } + + before do + stub_remote_trace_500 + end + + it 'reads a trace' do + expect { subject }.to raise_error(Gitlab::Ci::Trace::HttpIO::FailedToGetChunkError) + end + end + + context 'when BUFFER_SIZE is smaller than file size' do + before do + set_smaller_buffer_size_than(size) + end + + it_behaves_like 'all line matching' + end + + context 'when BUFFER_SIZE is larger than file size' do + before do + set_larger_buffer_size_than(size) + end + + it_behaves_like 'all line matching' + end + + context 'when pos is at middle of the file' do + before do + set_smaller_buffer_size_than(size) + + http_io.seek(size / 2) + string_io.seek(size / 2) + end + + it 'reads from pos' do + expect(http_io.readline).to eq(string_io.readline) + end + end + end + + describe '#write' do + subject { http_io.write(nil) } + + it { expect { subject }.to raise_error(NotImplementedError) } + end + + describe '#truncate' do + subject { http_io.truncate(nil) } + + it { expect { subject }.to raise_error(NotImplementedError) } + end + + describe '#flush' do + subject { http_io.flush } + + it { expect { subject }.to raise_error(NotImplementedError) } + end + + describe '#present?' do + subject { http_io.present? } + + it { is_expected.to be_truthy } + end +end diff --git a/spec/lib/gitlab/ci/trace_spec.rb b/spec/lib/gitlab/ci/trace_spec.rb index 448c6fb57dd..6a9c6442282 100644 --- a/spec/lib/gitlab/ci/trace_spec.rb +++ b/spec/lib/gitlab/ci/trace_spec.rb @@ -458,7 +458,7 @@ describe Gitlab::Ci::Trace do context 'when job does not have trace artifact' do context 'when trace file stored in default path' do let!(:build) { create(:ci_build, :success, :trace_live) } - let!(:src_path) { trace.read { |s| return s.path } } + let!(:src_path) { trace.read { |s| s.path } } let!(:src_checksum) { Digest::SHA256.file(src_path).hexdigest } it_behaves_like 'archive trace file' @@ -510,6 +510,28 @@ describe Gitlab::Ci::Trace do it_behaves_like 'source trace in database stays intact', error: ActiveRecord::RecordInvalid end + + context 'when there is a validation error on Ci::Build' do + before do + allow_any_instance_of(Ci::Build).to receive(:save).and_return(false) + allow_any_instance_of(Ci::Build).to receive_message_chain(:errors, :full_messages) + .and_return(%w[Error Error]) + end + + context "when erase old trace with 'save'" do + before do + build.send(:write_attribute, :trace, nil) + build.save + end + + it 'old trace is not deleted' do + build.reload + expect(build.trace.raw).to eq(trace_content) + end + end + + it_behaves_like 'archive trace in database' + end end end diff --git a/spec/lib/gitlab/ci/variables/collection/item_spec.rb b/spec/lib/gitlab/ci/variables/collection/item_spec.rb index cc1257484d2..e79f0a7f257 100644 --- a/spec/lib/gitlab/ci/variables/collection/item_spec.rb +++ b/spec/lib/gitlab/ci/variables/collection/item_spec.rb @@ -5,6 +5,18 @@ describe Gitlab::Ci::Variables::Collection::Item do { key: 'VAR', value: 'something', public: true } end + describe '.new' do + it 'raises error if unknown key i specified' do + expect { described_class.new(key: 'VAR', value: 'abc', files: true) } + .to raise_error ArgumentError, 'unknown keyword: files' + end + + it 'raises error when required keywords are not specified' do + expect { described_class.new(key: 'VAR') } + .to raise_error ArgumentError, 'missing keyword: value' + end + end + describe '.fabricate' do it 'supports using a hash' do resource = described_class.fabricate(variable) @@ -46,9 +58,26 @@ describe Gitlab::Ci::Variables::Collection::Item do end end - describe '#to_hash' do - it 'returns a hash representation of a collection item' do - expect(described_class.new(**variable).to_hash).to eq variable + describe '#to_runner_variable' do + context 'when variable is not a file-related' do + it 'returns a runner-compatible hash representation' do + runner_variable = described_class + .new(**variable) + .to_runner_variable + + expect(runner_variable).to eq variable + end + end + + context 'when variable is file-related' do + it 'appends file description component' do + runner_variable = described_class + .new(key: 'VAR', value: 'value', file: true) + .to_runner_variable + + expect(runner_variable) + .to eq(key: 'VAR', value: 'value', public: true, file: true) + end end end end diff --git a/spec/lib/gitlab/ci/variables/collection_spec.rb b/spec/lib/gitlab/ci/variables/collection_spec.rb index 90b6e178242..cb2f7718c9c 100644 --- a/spec/lib/gitlab/ci/variables/collection_spec.rb +++ b/spec/lib/gitlab/ci/variables/collection_spec.rb @@ -7,7 +7,7 @@ describe Gitlab::Ci::Variables::Collection do collection = described_class.new([variable]) - expect(collection.first.to_hash).to eq variable + expect(collection.first.to_runner_variable).to eq variable end it 'can be initialized without an argument' do @@ -96,4 +96,19 @@ describe Gitlab::Ci::Variables::Collection do .to eq [{ key: 'TEST', value: 1, public: true }] end end + + describe '#to_hash' do + it 'returns regular hash in valid order without duplicates' do + collection = described_class.new + .append(key: 'TEST1', value: 'test-1') + .append(key: 'TEST2', value: 'test-2') + .append(key: 'TEST1', value: 'test-3') + + expect(collection.to_hash).to eq('TEST1' => 'test-3', + 'TEST2' => 'test-2') + + expect(collection.to_hash).to include(TEST1: 'test-3') + expect(collection.to_hash).not_to include(TEST1: 'test-1') + end + end end diff --git a/spec/lib/gitlab/ci/yaml_processor_spec.rb b/spec/lib/gitlab/ci/yaml_processor_spec.rb index f83f932e61e..ecb16daec96 100644 --- a/spec/lib/gitlab/ci/yaml_processor_spec.rb +++ b/spec/lib/gitlab/ci/yaml_processor_spec.rb @@ -18,6 +18,34 @@ module Gitlab describe '#build_attributes' do subject { described_class.new(config).build_attributes(:rspec) } + describe 'attributes list' do + let(:config) do + YAML.dump( + before_script: ['pwd'], + rspec: { script: 'rspec' } + ) + end + + it 'returns valid build attributes' do + expect(subject).to eq({ + stage: "test", + stage_idx: 1, + name: "rspec", + commands: "pwd\nrspec", + coverage_regex: nil, + tag_list: [], + options: { + before_script: ["pwd"], + script: ["rspec"] + }, + allow_failure: false, + when: "on_success", + environment: nil, + yaml_variables: [] + }) + end + end + describe 'coverage entry' do describe 'code coverage regexp' do let(:config) do @@ -105,512 +133,118 @@ module Gitlab end end - describe '#stage_seeds' do - context 'when no refs policy is specified' do - let(:config) do - YAML.dump(production: { stage: 'deploy', script: 'cap prod' }, - rspec: { stage: 'test', script: 'rspec' }, - spinach: { stage: 'test', script: 'spinach' }) - end - - let(:pipeline) { create(:ci_empty_pipeline) } - - it 'correctly fabricates a stage seeds object' do - seeds = subject.stage_seeds(pipeline) - - expect(seeds.size).to eq 2 - expect(seeds.first.stage[:name]).to eq 'test' - expect(seeds.second.stage[:name]).to eq 'deploy' - expect(seeds.first.builds.dig(0, :name)).to eq 'rspec' - expect(seeds.first.builds.dig(1, :name)).to eq 'spinach' - expect(seeds.second.builds.dig(0, :name)).to eq 'production' - end - end - - context 'when refs policy is specified' do - let(:config) do - YAML.dump(production: { stage: 'deploy', script: 'cap prod', only: ['master'] }, - spinach: { stage: 'test', script: 'spinach', only: ['tags'] }) - end - - let(:pipeline) do - create(:ci_empty_pipeline, ref: 'feature', tag: true) - end - - it 'returns stage seeds only assigned to master to master' do - seeds = subject.stage_seeds(pipeline) - - expect(seeds.size).to eq 1 - expect(seeds.first.stage[:name]).to eq 'test' - expect(seeds.first.builds.dig(0, :name)).to eq 'spinach' - end - end - - context 'when source policy is specified' do - let(:config) do - YAML.dump(production: { stage: 'deploy', script: 'cap prod', only: ['triggers'] }, - spinach: { stage: 'test', script: 'spinach', only: ['schedules'] }) - end - - let(:pipeline) do - create(:ci_empty_pipeline, source: :schedule) - end - - it 'returns stage seeds only assigned to schedules' do - seeds = subject.stage_seeds(pipeline) - - expect(seeds.size).to eq 1 - expect(seeds.first.stage[:name]).to eq 'test' - expect(seeds.first.builds.dig(0, :name)).to eq 'spinach' - end + describe '#stages_attributes' do + let(:config) do + YAML.dump( + rspec: { script: 'rspec', stage: 'test', only: ['branches'] }, + prod: { script: 'cap prod', stage: 'deploy', only: ['tags'] } + ) end - context 'when kubernetes policy is specified' do - let(:config) do - YAML.dump( - spinach: { stage: 'test', script: 'spinach' }, - production: { - stage: 'deploy', - script: 'cap', - only: { kubernetes: 'active' } - } - ) - end - - context 'when kubernetes is active' do - shared_examples 'same behavior between KubernetesService and Platform::Kubernetes' do - it 'returns seeds for kubernetes dependent job' do - seeds = subject.stage_seeds(pipeline) - - expect(seeds.size).to eq 2 - expect(seeds.first.builds.dig(0, :name)).to eq 'spinach' - expect(seeds.second.builds.dig(0, :name)).to eq 'production' - end - end - - context 'when user configured kubernetes from Integration > Kubernetes' do - let(:project) { create(:kubernetes_project) } - let(:pipeline) { create(:ci_empty_pipeline, project: project) } - - it_behaves_like 'same behavior between KubernetesService and Platform::Kubernetes' - end - - context 'when user configured kubernetes from CI/CD > Clusters' do - let!(:cluster) { create(:cluster, :project, :provided_by_gcp) } - let(:project) { cluster.project } - let(:pipeline) { create(:ci_empty_pipeline, project: project) } - - it_behaves_like 'same behavior between KubernetesService and Platform::Kubernetes' - end - end - - context 'when kubernetes is not active' do - it 'does not return seeds for kubernetes dependent job' do - seeds = subject.stage_seeds(pipeline) - - expect(seeds.size).to eq 1 - expect(seeds.first.builds.dig(0, :name)).to eq 'spinach' - end - end + let(:attributes) do + [{ name: "build", + index: 0, + builds: [] }, + { name: "test", + index: 1, + builds: + [{ stage_idx: 1, + stage: "test", + commands: "rspec", + tag_list: [], + name: "rspec", + allow_failure: false, + when: "on_success", + environment: nil, + coverage_regex: nil, + yaml_variables: [], + options: { script: ["rspec"] }, + only: { refs: ["branches"] }, + except: {} }] }, + { name: "deploy", + index: 2, + builds: + [{ stage_idx: 2, + stage: "deploy", + commands: "cap prod", + tag_list: [], + name: "prod", + allow_failure: false, + when: "on_success", + environment: nil, + coverage_regex: nil, + yaml_variables: [], + options: { script: ["cap prod"] }, + only: { refs: ["tags"] }, + except: {} }] }] + end + + it 'returns stages seed attributes' do + expect(subject.stages_attributes).to eq attributes end end - describe "#pipeline_stage_builds" do - let(:type) { 'test' } - - it "returns builds if no branch specified" do - config = YAML.dump({ - before_script: ["pwd"], - rspec: { script: "rspec" } - }) - - config_processor = Gitlab::Ci::YamlProcessor.new(config) - - expect(config_processor.pipeline_stage_builds(type, pipeline(ref: "master")).size).to eq(1) - expect(config_processor.pipeline_stage_builds(type, pipeline(ref: "master")).first).to eq({ - stage: "test", - stage_idx: 1, - name: "rspec", - commands: "pwd\nrspec", - coverage_regex: nil, - tag_list: [], - options: { - before_script: ["pwd"], - script: ["rspec"] - }, - allow_failure: false, - when: "on_success", - environment: nil, - yaml_variables: [] - }) - end - - describe 'only' do - it "does not return builds if only has another branch" do - config = YAML.dump({ - before_script: ["pwd"], - rspec: { script: "rspec", only: ["deploy"] } - }) - - config_processor = Gitlab::Ci::YamlProcessor.new(config) - - expect(config_processor.pipeline_stage_builds(type, pipeline(ref: "master")).size).to eq(0) - end - - it "does not return builds if only has regexp with another branch" do - config = YAML.dump({ - before_script: ["pwd"], - rspec: { script: "rspec", only: ["/^deploy$/"] } - }) - - config_processor = Gitlab::Ci::YamlProcessor.new(config) - - expect(config_processor.pipeline_stage_builds(type, pipeline(ref: "master")).size).to eq(0) - end - - it "returns builds if only has specified this branch" do - config = YAML.dump({ - before_script: ["pwd"], - rspec: { script: "rspec", only: ["master"] } - }) - - config_processor = Gitlab::Ci::YamlProcessor.new(config) - - expect(config_processor.pipeline_stage_builds(type, pipeline(ref: "master")).size).to eq(1) - end - - it "returns builds if only has a list of branches including specified" do - config = YAML.dump({ - before_script: ["pwd"], - rspec: { script: "rspec", type: type, only: %w(master deploy) } - }) - - config_processor = Gitlab::Ci::YamlProcessor.new(config) - - expect(config_processor.pipeline_stage_builds(type, pipeline(ref: "deploy")).size).to eq(1) - end - - it "returns builds if only has a branches keyword specified" do - config = YAML.dump({ - before_script: ["pwd"], - rspec: { script: "rspec", type: type, only: ["branches"] } - }) - - config_processor = Gitlab::Ci::YamlProcessor.new(config) - - expect(config_processor.pipeline_stage_builds(type, pipeline(ref: "deploy")).size).to eq(1) - end - - it "does not return builds if only has a tags keyword" do - config = YAML.dump({ - before_script: ["pwd"], - rspec: { script: "rspec", type: type, only: ["tags"] } - }) - - config_processor = Gitlab::Ci::YamlProcessor.new(config) - - expect(config_processor.pipeline_stage_builds(type, pipeline(ref: "deploy")).size).to eq(0) - end - - it "returns builds if only has special keywords specified and source matches" do - possibilities = [{ keyword: 'pushes', source: 'push' }, - { keyword: 'web', source: 'web' }, - { keyword: 'triggers', source: 'trigger' }, - { keyword: 'schedules', source: 'schedule' }, - { keyword: 'api', source: 'api' }, - { keyword: 'external', source: 'external' }] - - possibilities.each do |possibility| - config = YAML.dump({ - before_script: ["pwd"], - rspec: { script: "rspec", type: type, only: [possibility[:keyword]] } - }) + describe 'only / except policies validations' do + context 'when `only` has an invalid value' do + let(:config) { { rspec: { script: "rspec", type: "test", only: only } } } + let(:processor) { Gitlab::Ci::YamlProcessor.new(YAML.dump(config)) } - config_processor = Gitlab::Ci::YamlProcessor.new(config) + context 'when it is integer' do + let(:only) { 1 } - expect(config_processor.pipeline_stage_builds(type, pipeline(ref: 'deploy', tag: false, source: possibility[:source])).size).to eq(1) + it do + expect { processor }.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, + 'jobs:rspec:only has to be either an array of conditions or a hash') end end - it "does not return builds if only has special keywords specified and source doesn't match" do - possibilities = [{ keyword: 'pushes', source: 'web' }, - { keyword: 'web', source: 'push' }, - { keyword: 'triggers', source: 'schedule' }, - { keyword: 'schedules', source: 'external' }, - { keyword: 'api', source: 'trigger' }, - { keyword: 'external', source: 'api' }] - - possibilities.each do |possibility| - config = YAML.dump({ - before_script: ["pwd"], - rspec: { script: "rspec", type: type, only: [possibility[:keyword]] } - }) + context 'when it is an array of integers' do + let(:only) { [1, 1] } - config_processor = Gitlab::Ci::YamlProcessor.new(config) - - expect(config_processor.pipeline_stage_builds(type, pipeline(ref: 'deploy', tag: false, source: possibility[:source])).size).to eq(0) + it do + expect { processor }.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, + 'jobs:rspec:only config should be an array of strings or regexps') end end - it "returns builds if only has current repository path" do - seed_pipeline = pipeline(ref: 'deploy') - - config = YAML.dump({ - before_script: ["pwd"], - rspec: { - script: "rspec", - type: type, - only: ["branches@#{seed_pipeline.project_full_path}"] - } - }) - - config_processor = Gitlab::Ci::YamlProcessor.new(config) - - expect(config_processor.pipeline_stage_builds(type, seed_pipeline).size).to eq(1) - end - - it "does not return builds if only has different repository path" do - config = YAML.dump({ - before_script: ["pwd"], - rspec: { script: "rspec", type: type, only: ["branches@fork"] } - }) - - config_processor = Gitlab::Ci::YamlProcessor.new(config) - - expect(config_processor.pipeline_stage_builds(type, pipeline(ref: "deploy")).size).to eq(0) - end - - it "returns build only for specified type" do - config = YAML.dump({ - before_script: ["pwd"], - rspec: { script: "rspec", type: "test", only: %w(master deploy) }, - staging: { script: "deploy", type: "deploy", only: %w(master deploy) }, - production: { script: "deploy", type: "deploy", only: ["master@path", "deploy"] } - }) - - config_processor = Gitlab::Ci::YamlProcessor.new(config) - - expect(config_processor.pipeline_stage_builds("deploy", pipeline(ref: "deploy")).size).to eq(2) - expect(config_processor.pipeline_stage_builds("test", pipeline(ref: "deploy")).size).to eq(1) - expect(config_processor.pipeline_stage_builds("deploy", pipeline(ref: "master")).size).to eq(1) - end - - context 'for invalid value' do - let(:config) { { rspec: { script: "rspec", type: "test", only: only } } } - let(:processor) { Gitlab::Ci::YamlProcessor.new(YAML.dump(config)) } - - context 'when it is integer' do - let(:only) { 1 } - - it do - expect { processor }.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, - 'jobs:rspec:only has to be either an array of conditions or a hash') - end - end - - context 'when it is an array of integers' do - let(:only) { [1, 1] } + context 'when it is invalid regex' do + let(:only) { ["/*invalid/"] } - it do - expect { processor }.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, - 'jobs:rspec:only config should be an array of strings or regexps') - end - end - - context 'when it is invalid regex' do - let(:only) { ["/*invalid/"] } - - it do - expect { processor }.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, - 'jobs:rspec:only config should be an array of strings or regexps') - end + it do + expect { processor }.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, + 'jobs:rspec:only config should be an array of strings or regexps') end end end - describe 'except' do - it "returns builds if except has another branch" do - config = YAML.dump({ - before_script: ["pwd"], - rspec: { script: "rspec", except: ["deploy"] } - }) - - config_processor = Gitlab::Ci::YamlProcessor.new(config) - - expect(config_processor.pipeline_stage_builds(type, pipeline(ref: "master")).size).to eq(1) - end - - it "returns builds if except has regexp with another branch" do - config = YAML.dump({ - before_script: ["pwd"], - rspec: { script: "rspec", except: ["/^deploy$/"] } - }) + context 'when `except` has an invalid value' do + let(:config) { { rspec: { script: "rspec", except: except } } } + let(:processor) { Gitlab::Ci::YamlProcessor.new(YAML.dump(config)) } - config_processor = Gitlab::Ci::YamlProcessor.new(config) - - expect(config_processor.pipeline_stage_builds(type, pipeline(ref: "master")).size).to eq(1) - end - - it "does not return builds if except has specified this branch" do - config = YAML.dump({ - before_script: ["pwd"], - rspec: { script: "rspec", except: ["master"] } - }) - - config_processor = Gitlab::Ci::YamlProcessor.new(config) - - expect(config_processor.pipeline_stage_builds(type, pipeline(ref: "master")).size).to eq(0) - end - - it "does not return builds if except has a list of branches including specified" do - config = YAML.dump({ - before_script: ["pwd"], - rspec: { script: "rspec", type: type, except: %w(master deploy) } - }) - - config_processor = Gitlab::Ci::YamlProcessor.new(config) + context 'when it is integer' do + let(:except) { 1 } - expect(config_processor.pipeline_stage_builds(type, pipeline(ref: "deploy")).size).to eq(0) - end - - it "does not return builds if except has a branches keyword specified" do - config = YAML.dump({ - before_script: ["pwd"], - rspec: { script: "rspec", type: type, except: ["branches"] } - }) - - config_processor = Gitlab::Ci::YamlProcessor.new(config) - - expect(config_processor.pipeline_stage_builds(type, pipeline(ref: "deploy")).size).to eq(0) - end - - it "returns builds if except has a tags keyword" do - config = YAML.dump({ - before_script: ["pwd"], - rspec: { script: "rspec", type: type, except: ["tags"] } - }) - - config_processor = Gitlab::Ci::YamlProcessor.new(config) - - expect(config_processor.pipeline_stage_builds(type, pipeline(ref: "deploy")).size).to eq(1) - end - - it "does not return builds if except has special keywords specified and source matches" do - possibilities = [{ keyword: 'pushes', source: 'push' }, - { keyword: 'web', source: 'web' }, - { keyword: 'triggers', source: 'trigger' }, - { keyword: 'schedules', source: 'schedule' }, - { keyword: 'api', source: 'api' }, - { keyword: 'external', source: 'external' }] - - possibilities.each do |possibility| - config = YAML.dump({ - before_script: ["pwd"], - rspec: { script: "rspec", type: type, except: [possibility[:keyword]] } - }) - - config_processor = Gitlab::Ci::YamlProcessor.new(config) - - expect(config_processor.pipeline_stage_builds(type, pipeline(ref: 'deploy', tag: false, source: possibility[:source])).size).to eq(0) + it do + expect { processor }.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, + 'jobs:rspec:except has to be either an array of conditions or a hash') end end - it "returns builds if except has special keywords specified and source doesn't match" do - possibilities = [{ keyword: 'pushes', source: 'web' }, - { keyword: 'web', source: 'push' }, - { keyword: 'triggers', source: 'schedule' }, - { keyword: 'schedules', source: 'external' }, - { keyword: 'api', source: 'trigger' }, - { keyword: 'external', source: 'api' }] - - possibilities.each do |possibility| - config = YAML.dump({ - before_script: ["pwd"], - rspec: { script: "rspec", type: type, except: [possibility[:keyword]] } - }) - - config_processor = Gitlab::Ci::YamlProcessor.new(config) + context 'when it is an array of integers' do + let(:except) { [1, 1] } - expect(config_processor.pipeline_stage_builds(type, pipeline(ref: 'deploy', tag: false, source: possibility[:source])).size).to eq(1) + it do + expect { processor }.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, + 'jobs:rspec:except config should be an array of strings or regexps') end end - it "does not return builds if except has current repository path" do - seed_pipeline = pipeline(ref: 'deploy') + context 'when it is invalid regex' do + let(:except) { ["/*invalid/"] } - config = YAML.dump({ - before_script: ["pwd"], - rspec: { - script: "rspec", - type: type, - except: ["branches@#{seed_pipeline.project_full_path}"] - } - }) - - config_processor = Gitlab::Ci::YamlProcessor.new(config) - - expect(config_processor.pipeline_stage_builds(type, seed_pipeline).size).to eq(0) - end - - it "returns builds if except has different repository path" do - config = YAML.dump({ - before_script: ["pwd"], - rspec: { script: "rspec", type: type, except: ["branches@fork"] } - }) - - config_processor = Gitlab::Ci::YamlProcessor.new(config) - - expect(config_processor.pipeline_stage_builds(type, pipeline(ref: "deploy")).size).to eq(1) - end - - it "returns build except specified type" do - master_pipeline = pipeline(ref: 'master') - test_pipeline = pipeline(ref: 'test') - deploy_pipeline = pipeline(ref: 'deploy') - - config = YAML.dump({ - before_script: ["pwd"], - rspec: { script: "rspec", type: "test", except: ["master", "deploy", "test@#{test_pipeline.project_full_path}"] }, - staging: { script: "deploy", type: "deploy", except: ["master"] }, - production: { script: "deploy", type: "deploy", except: ["master@#{master_pipeline.project_full_path}"] } - }) - - config_processor = Gitlab::Ci::YamlProcessor.new(config) - - expect(config_processor.pipeline_stage_builds("deploy", deploy_pipeline).size).to eq(2) - expect(config_processor.pipeline_stage_builds("test", test_pipeline).size).to eq(0) - expect(config_processor.pipeline_stage_builds("deploy", master_pipeline).size).to eq(0) - end - - context 'for invalid value' do - let(:config) { { rspec: { script: "rspec", except: except } } } - let(:processor) { Gitlab::Ci::YamlProcessor.new(YAML.dump(config)) } - - context 'when it is integer' do - let(:except) { 1 } - - it do - expect { processor }.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, - 'jobs:rspec:except has to be either an array of conditions or a hash') - end - end - - context 'when it is an array of integers' do - let(:except) { [1, 1] } - - it do - expect { processor }.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, - 'jobs:rspec:except config should be an array of strings or regexps') - end - end - - context 'when it is invalid regex' do - let(:except) { ["/*invalid/"] } - - it do - expect { processor }.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, - 'jobs:rspec:except config should be an array of strings or regexps') - end + it do + expect { processor }.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, + 'jobs:rspec:except config should be an array of strings or regexps') end end end @@ -620,7 +254,7 @@ module Gitlab let(:config_data) { YAML.dump(config) } let(:config_processor) { Gitlab::Ci::YamlProcessor.new(config_data) } - subject { config_processor.pipeline_stage_builds("test", pipeline(ref: "master")).first } + subject { config_processor.stage_builds_attributes('test').first } describe "before_script" do context "in global context" do @@ -703,8 +337,8 @@ module Gitlab config_processor = Gitlab::Ci::YamlProcessor.new(config) - expect(config_processor.pipeline_stage_builds("test", pipeline(ref: "master")).size).to eq(1) - expect(config_processor.pipeline_stage_builds("test", pipeline(ref: "master")).first).to eq({ + expect(config_processor.stage_builds_attributes("test").size).to eq(1) + expect(config_processor.stage_builds_attributes("test").first).to eq({ stage: "test", stage_idx: 1, name: "rspec", @@ -738,8 +372,8 @@ module Gitlab config_processor = Gitlab::Ci::YamlProcessor.new(config) - expect(config_processor.pipeline_stage_builds("test", pipeline(ref: "master")).size).to eq(1) - expect(config_processor.pipeline_stage_builds("test", pipeline(ref: "master")).first).to eq({ + expect(config_processor.stage_builds_attributes("test").size).to eq(1) + expect(config_processor.stage_builds_attributes("test").first).to eq({ stage: "test", stage_idx: 1, name: "rspec", @@ -771,8 +405,8 @@ module Gitlab config_processor = Gitlab::Ci::YamlProcessor.new(config) - expect(config_processor.pipeline_stage_builds("test", pipeline(ref: "master")).size).to eq(1) - expect(config_processor.pipeline_stage_builds("test", pipeline(ref: "master")).first).to eq({ + expect(config_processor.stage_builds_attributes("test").size).to eq(1) + expect(config_processor.stage_builds_attributes("test").first).to eq({ stage: "test", stage_idx: 1, name: "rspec", @@ -800,8 +434,8 @@ module Gitlab config_processor = Gitlab::Ci::YamlProcessor.new(config) - expect(config_processor.pipeline_stage_builds("test", pipeline(ref: "master")).size).to eq(1) - expect(config_processor.pipeline_stage_builds("test", pipeline(ref: "master")).first).to eq({ + expect(config_processor.stage_builds_attributes("test").size).to eq(1) + expect(config_processor.stage_builds_attributes("test").first).to eq({ stage: "test", stage_idx: 1, name: "rspec", @@ -946,8 +580,8 @@ module Gitlab }) config_processor = Gitlab::Ci::YamlProcessor.new(config) + builds = config_processor.stage_builds_attributes("test") - builds = config_processor.pipeline_stage_builds("test", pipeline(ref: "master")) expect(builds.size).to eq(1) expect(builds.first[:when]).to eq(when_state) end @@ -978,8 +612,8 @@ module Gitlab config_processor = Gitlab::Ci::YamlProcessor.new(config) - expect(config_processor.pipeline_stage_builds("test", pipeline(ref: "master")).size).to eq(1) - expect(config_processor.pipeline_stage_builds("test", pipeline(ref: "master")).first[:options][:cache]).to eq( + expect(config_processor.stage_builds_attributes("test").size).to eq(1) + expect(config_processor.stage_builds_attributes("test").first[:options][:cache]).to eq( paths: ["logs/", "binaries/"], untracked: true, key: 'key', @@ -997,8 +631,8 @@ module Gitlab config_processor = Gitlab::Ci::YamlProcessor.new(config) - expect(config_processor.pipeline_stage_builds("test", pipeline(ref: "master")).size).to eq(1) - expect(config_processor.pipeline_stage_builds("test", pipeline(ref: "master")).first[:options][:cache]).to eq( + expect(config_processor.stage_builds_attributes("test").size).to eq(1) + expect(config_processor.stage_builds_attributes("test").first[:options][:cache]).to eq( paths: ["logs/", "binaries/"], untracked: true, key: 'key', @@ -1017,8 +651,8 @@ module Gitlab config_processor = Gitlab::Ci::YamlProcessor.new(config) - expect(config_processor.pipeline_stage_builds("test", pipeline(ref: "master")).size).to eq(1) - expect(config_processor.pipeline_stage_builds("test", pipeline(ref: "master")).first[:options][:cache]).to eq( + expect(config_processor.stage_builds_attributes("test").size).to eq(1) + expect(config_processor.stage_builds_attributes("test").first[:options][:cache]).to eq( paths: ["test/"], untracked: false, key: 'local', @@ -1046,8 +680,8 @@ module Gitlab config_processor = Gitlab::Ci::YamlProcessor.new(config) - expect(config_processor.pipeline_stage_builds("test", pipeline(ref: "master")).size).to eq(1) - expect(config_processor.pipeline_stage_builds("test", pipeline(ref: "master")).first).to eq({ + expect(config_processor.stage_builds_attributes("test").size).to eq(1) + expect(config_processor.stage_builds_attributes("test").first).to eq({ stage: "test", stage_idx: 1, name: "rspec", @@ -1083,8 +717,8 @@ module Gitlab }) config_processor = Gitlab::Ci::YamlProcessor.new(config) + builds = config_processor.stage_builds_attributes("test") - builds = config_processor.pipeline_stage_builds("test", pipeline(ref: "master")) expect(builds.size).to eq(1) expect(builds.first[:options][:artifacts][:when]).to eq(when_state) end @@ -1099,7 +733,7 @@ module Gitlab end let(:processor) { Gitlab::Ci::YamlProcessor.new(YAML.dump(config)) } - let(:builds) { processor.pipeline_stage_builds('deploy', pipeline(ref: 'master')) } + let(:builds) { processor.stage_builds_attributes('deploy') } context 'when a production environment is specified' do let(:environment) { 'production' } @@ -1256,7 +890,7 @@ module Gitlab describe "Hidden jobs" do let(:config_processor) { Gitlab::Ci::YamlProcessor.new(config) } - subject { config_processor.pipeline_stage_builds("test", pipeline(ref: "master")) } + subject { config_processor.stage_builds_attributes("test") } shared_examples 'hidden_job_handling' do it "doesn't create jobs that start with dot" do @@ -1304,7 +938,7 @@ module Gitlab describe "YAML Alias/Anchor" do let(:config_processor) { Gitlab::Ci::YamlProcessor.new(config) } - subject { config_processor.pipeline_stage_builds("build", pipeline(ref: "master")) } + subject { config_processor.stage_builds_attributes("build") } shared_examples 'job_templates_handling' do it "is correctly supported for jobs" do @@ -1344,13 +978,13 @@ module Gitlab context 'when template is a job' do let(:config) do - <<EOT -job1: &JOBTMPL - stage: build - script: execute-script-for-job + <<~EOT + job1: &JOBTMPL + stage: build + script: execute-script-for-job -job2: *JOBTMPL -EOT + job2: *JOBTMPL + EOT end it_behaves_like 'job_templates_handling' @@ -1358,15 +992,15 @@ EOT context 'when template is a hidden job' do let(:config) do - <<EOT -.template: &JOBTMPL - stage: build - script: execute-script-for-job + <<~EOT + .template: &JOBTMPL + stage: build + script: execute-script-for-job -job1: *JOBTMPL + job1: *JOBTMPL -job2: *JOBTMPL -EOT + job2: *JOBTMPL + EOT end it_behaves_like 'job_templates_handling' @@ -1374,18 +1008,18 @@ EOT context 'when job adds its own keys to a template definition' do let(:config) do - <<EOT -.template: &JOBTMPL - stage: build - -job1: - <<: *JOBTMPL - script: execute-script-for-job - -job2: - <<: *JOBTMPL - script: execute-script-for-job -EOT + <<~EOT + .template: &JOBTMPL + stage: build + + job1: + <<: *JOBTMPL + script: execute-script-for-job + + job2: + <<: *JOBTMPL + script: execute-script-for-job + EOT end it_behaves_like 'job_templates_handling' @@ -1677,6 +1311,14 @@ EOT Gitlab::Ci::YamlProcessor.new(config) end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "jobs:rspec dependencies should be an array of strings") end + + it 'returns errors if pipeline variables expression is invalid' do + config = YAML.dump({ rspec: { script: 'test', only: { variables: ['== null'] } } }) + + expect { Gitlab::Ci::YamlProcessor.new(config) } + .to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, + 'jobs:rspec:only variables invalid expression syntax') + end end describe "Validate configuration templates" do @@ -1724,10 +1366,6 @@ EOT it { is_expected.to be_nil } end end - - def pipeline(**attributes) - build_stubbed(:ci_empty_pipeline, **attributes) - end end end end diff --git a/spec/lib/gitlab/data_builder/note_spec.rb b/spec/lib/gitlab/data_builder/note_spec.rb index aaa42566a4d..4f8412108ba 100644 --- a/spec/lib/gitlab/data_builder/note_spec.rb +++ b/spec/lib/gitlab/data_builder/note_spec.rb @@ -55,6 +55,14 @@ describe Gitlab::DataBuilder::Note do .to be > issue.hook_attrs['updated_at'] end + context 'with confidential issue' do + let(:issue) { create(:issue, project: project, confidential: true) } + + it 'sets event_type to confidential_note' do + expect(data[:event_type]).to eq('confidential_note') + end + end + include_examples 'project hook data' include_examples 'deprecated repository hook data' end diff --git a/spec/lib/gitlab/database/migration_helpers_spec.rb b/spec/lib/gitlab/database/migration_helpers_spec.rb index 1de3a14b809..280f799f2ab 100644 --- a/spec/lib/gitlab/database/migration_helpers_spec.rb +++ b/spec/lib/gitlab/database/migration_helpers_spec.rb @@ -67,17 +67,35 @@ describe Gitlab::Database::MigrationHelpers do model.add_concurrent_index(:users, :foo, unique: true) end + + it 'does nothing if the index exists already' do + expect(model).to receive(:index_exists?) + .with(:users, :foo, { algorithm: :concurrently, unique: true }).and_return(true) + expect(model).not_to receive(:add_index) + + model.add_concurrent_index(:users, :foo, unique: true) + end end context 'using MySQL' do - it 'creates a regular index' do - expect(Gitlab::Database).to receive(:postgresql?).and_return(false) + before do + allow(Gitlab::Database).to receive(:postgresql?).and_return(false) + end + it 'creates a regular index' do expect(model).to receive(:add_index) .with(:users, :foo, {}) model.add_concurrent_index(:users, :foo) end + + it 'does nothing if the index exists already' do + expect(model).to receive(:index_exists?) + .with(:users, :foo, { unique: true }).and_return(true) + expect(model).not_to receive(:add_index) + + model.add_concurrent_index(:users, :foo, unique: true) + end end end @@ -95,6 +113,7 @@ describe Gitlab::Database::MigrationHelpers do context 'outside a transaction' do before do allow(model).to receive(:transaction_open?).and_return(false) + allow(model).to receive(:index_exists?).and_return(true) end context 'using PostgreSQL' do @@ -103,18 +122,41 @@ describe Gitlab::Database::MigrationHelpers do allow(model).to receive(:disable_statement_timeout) end - it 'removes the index concurrently by column name' do - expect(model).to receive(:remove_index) - .with(:users, { algorithm: :concurrently, column: :foo }) + describe 'by column name' do + it 'removes the index concurrently' do + expect(model).to receive(:remove_index) + .with(:users, { algorithm: :concurrently, column: :foo }) - model.remove_concurrent_index(:users, :foo) + model.remove_concurrent_index(:users, :foo) + end + + it 'does nothing if the index does not exist' do + expect(model).to receive(:index_exists?) + .with(:users, :foo, { algorithm: :concurrently, unique: true }).and_return(false) + expect(model).not_to receive(:remove_index) + + model.remove_concurrent_index(:users, :foo, unique: true) + end end - it 'removes the index concurrently by index name' do - expect(model).to receive(:remove_index) - .with(:users, { algorithm: :concurrently, name: "index_x_by_y" }) + describe 'by index name' do + before do + allow(model).to receive(:index_exists_by_name?).with(:users, "index_x_by_y").and_return(true) + end + + it 'removes the index concurrently by index name' do + expect(model).to receive(:remove_index) + .with(:users, { algorithm: :concurrently, name: "index_x_by_y" }) + + model.remove_concurrent_index_by_name(:users, "index_x_by_y") + end + + it 'does nothing if the index does not exist' do + expect(model).to receive(:index_exists_by_name?).with(:users, "index_x_by_y").and_return(false) + expect(model).not_to receive(:remove_index) - model.remove_concurrent_index_by_name(:users, "index_x_by_y") + model.remove_concurrent_index_by_name(:users, "index_x_by_y") + end end end @@ -141,6 +183,10 @@ describe Gitlab::Database::MigrationHelpers do end describe '#add_concurrent_foreign_key' do + before do + allow(model).to receive(:foreign_key_exists?).and_return(false) + end + context 'inside a transaction' do it 'raises an error' do expect(model).to receive(:transaction_open?).and_return(true) @@ -157,14 +203,23 @@ describe Gitlab::Database::MigrationHelpers do end context 'using MySQL' do - it 'creates a regular foreign key' do + before do allow(Gitlab::Database).to receive(:mysql?).and_return(true) + end + it 'creates a regular foreign key' do expect(model).to receive(:add_foreign_key) .with(:projects, :users, column: :user_id, on_delete: :cascade) model.add_concurrent_foreign_key(:projects, :users, column: :user_id) end + + it 'does not create a foreign key if it exists already' do + expect(model).to receive(:foreign_key_exists?).with(:projects, :users, column: :user_id).and_return(true) + expect(model).not_to receive(:add_foreign_key) + + model.add_concurrent_foreign_key(:projects, :users, column: :user_id) + end end context 'using PostgreSQL' do @@ -189,6 +244,14 @@ describe Gitlab::Database::MigrationHelpers do column: :user_id, on_delete: :nullify) end + + it 'does not create a foreign key if it exists already' do + expect(model).to receive(:foreign_key_exists?).with(:projects, :users, column: :user_id).and_return(true) + expect(model).not_to receive(:execute).with(/ADD CONSTRAINT/) + expect(model).to receive(:execute).with(/VALIDATE CONSTRAINT/) + + model.add_concurrent_foreign_key(:projects, :users, column: :user_id) + end end end end @@ -203,6 +266,29 @@ describe Gitlab::Database::MigrationHelpers do end end + describe '#foreign_key_exists?' do + before do + key = ActiveRecord::ConnectionAdapters::ForeignKeyDefinition.new(:projects, :users, { column: :non_standard_id }) + allow(model).to receive(:foreign_keys).with(:projects).and_return([key]) + end + + it 'finds existing foreign keys by column' do + expect(model.foreign_key_exists?(:projects, :users, column: :non_standard_id)).to be_truthy + end + + it 'finds existing foreign keys by target table only' do + expect(model.foreign_key_exists?(:projects, :users)).to be_truthy + end + + it 'compares by column name if given' do + expect(model.foreign_key_exists?(:projects, :users, column: :user_id)).to be_falsey + end + + it 'compares by target if no column given' do + expect(model.foreign_key_exists?(:projects, :other_table)).to be_falsey + end + end + describe '#disable_statement_timeout' do context 'using PostgreSQL' do it 'disables statement timeouts' do @@ -1125,4 +1211,33 @@ describe Gitlab::Database::MigrationHelpers do expect(model.perform_background_migration_inline?).to eq(false) end end + + describe '#index_exists_by_name?' do + it 'returns true if an index exists' do + expect(model.index_exists_by_name?(:projects, 'index_projects_on_path')) + .to be_truthy + end + + it 'returns false if the index does not exist' do + expect(model.index_exists_by_name?(:projects, 'this_does_not_exist')) + .to be_falsy + end + + context 'when an index with a function exists', :postgresql do + before do + ActiveRecord::Base.connection.execute( + 'CREATE INDEX test_index ON projects (LOWER(path));' + ) + end + + after do + 'DROP INDEX IF EXISTS test_index;' + end + + it 'returns true if an index exists' do + expect(model.index_exists_by_name?(:projects, 'test_index')) + .to be_truthy + end + end + end end diff --git a/spec/lib/gitlab/database/sha_attribute_spec.rb b/spec/lib/gitlab/database/sha_attribute_spec.rb index 62c1d37ea1c..778bfa2cc47 100644 --- a/spec/lib/gitlab/database/sha_attribute_spec.rb +++ b/spec/lib/gitlab/database/sha_attribute_spec.rb @@ -19,15 +19,15 @@ describe Gitlab::Database::ShaAttribute do let(:attribute) { described_class.new } - describe '#type_cast_from_database' do + describe '#deserialize' do it 'converts the binary SHA to a String' do - expect(attribute.type_cast_from_database(binary_from_db)).to eq(sha) + expect(attribute.deserialize(binary_from_db)).to eq(sha) end end - describe '#type_cast_for_database' do + describe '#serialize' do it 'converts a SHA String to binary data' do - expect(attribute.type_cast_for_database(sha).to_s).to eq(binary_sha) + expect(attribute.serialize(sha).to_s).to eq(binary_sha) end end end diff --git a/spec/lib/gitlab/diff/file_spec.rb b/spec/lib/gitlab/diff/file_spec.rb index 9204ea37963..0c2e18c268a 100644 --- a/spec/lib/gitlab/diff/file_spec.rb +++ b/spec/lib/gitlab/diff/file_spec.rb @@ -455,5 +455,17 @@ describe Gitlab::Diff::File do expect(diff_file.size).to be_zero end end + + describe '#different_type?' do + it 'returns false' do + expect(diff_file).not_to be_different_type + end + end + + describe '#content_changed?' do + it 'returns false' do + expect(diff_file).not_to be_content_changed + end + end end end diff --git a/spec/lib/gitlab/diff/highlight_spec.rb b/spec/lib/gitlab/diff/highlight_spec.rb index 73d60c021c8..7c9e8c8d04e 100644 --- a/spec/lib/gitlab/diff/highlight_spec.rb +++ b/spec/lib/gitlab/diff/highlight_spec.rb @@ -79,6 +79,8 @@ describe Gitlab::Diff::Highlight do end it 'keeps the original rich line' do + allow(Gitlab::Sentry).to receive(:track_exception) + code = %q{+ raise RuntimeError, "System commands must be given as an array of strings"} expect(subject[5].text).to eq(code) @@ -86,12 +88,9 @@ describe Gitlab::Diff::Highlight do end it 'reports to Sentry if configured' do - allow(Gitlab::Sentry).to receive(:enabled?).and_return(true) - - expect(Gitlab::Sentry).to receive(:context) - expect(Raven).to receive(:capture_exception) + expect(Gitlab::Sentry).to receive(:track_exception).and_call_original - subject + expect { subject }. to raise_exception(RangeError) end end end diff --git a/spec/lib/gitlab/email/handler_spec.rb b/spec/lib/gitlab/email/handler_spec.rb index 650b01c4df4..cedbfcc0d18 100644 --- a/spec/lib/gitlab/email/handler_spec.rb +++ b/spec/lib/gitlab/email/handler_spec.rb @@ -14,4 +14,34 @@ describe Gitlab::Email::Handler do expect(described_class.for('email', '')).to be_nil end end + + describe 'regexps are set properly' do + let(:addresses) do + %W(sent_notification_key#{Gitlab::IncomingEmail::UNSUBSCRIBE_SUFFIX} sent_notification_key path/to/project+merge-request+user_email_token path/to/project+user_email_token) + end + + it 'picks each handler at least once' do + matched_handlers = addresses.map do |address| + described_class.for('email', address).class + end + + expect(matched_handlers.uniq).to match_array(ce_handlers) + end + + it 'can pick exactly one handler for each address' do + addresses.each do |address| + matched_handlers = ce_handlers.select do |handler| + handler.new('email', address).can_handle? + end + + expect(matched_handlers.count).to eq(1), "#{address} matches #{matched_handlers.count} handlers: #{matched_handlers}" + end + end + end + + def ce_handlers + @ce_handlers ||= Gitlab::Email::Handler::HANDLERS.reject do |handler| + handler.name.start_with?('Gitlab::Email::Handler::EE::') + end + end end diff --git a/spec/lib/gitlab/encoding_helper_spec.rb b/spec/lib/gitlab/encoding_helper_spec.rb index 83d431a7458..e68c9850f6b 100644 --- a/spec/lib/gitlab/encoding_helper_spec.rb +++ b/spec/lib/gitlab/encoding_helper_spec.rb @@ -161,6 +161,11 @@ describe Gitlab::EncodingHelper do 'removes invalid bytes from ASCII-8bit encoded multibyte string.', "Lorem ipsum\xC3\n dolor sit amet, xy\xC3\xA0y\xC3\xB9abcd\xC3\xB9efg".force_encoding('ASCII-8BIT'), "Lorem ipsum\n dolor sit amet, xyàyùabcdùefg" + ], + [ + 'handles UTF-16BE encoded strings', + "\xFE\xFF\x00\x41".force_encoding('ASCII-8BIT'), # An "A" prepended with UTF-16 BOM + "\xEF\xBB\xBFA" # An "A" prepended with UTF-8 BOM ] ].each do |description, test_string, xpect| it description do diff --git a/spec/lib/gitlab/git/attributes_parser_spec.rb b/spec/lib/gitlab/git/attributes_parser_spec.rb index 323334e99a5..2d103123998 100644 --- a/spec/lib/gitlab/git/attributes_parser_spec.rb +++ b/spec/lib/gitlab/git/attributes_parser_spec.rb @@ -66,18 +66,6 @@ describe Gitlab::Git::AttributesParser, seed_helper: true do end end - context 'when attributes data is a file handle' do - subject do - File.open(attributes_path, 'r') do |file_handle| - described_class.new(file_handle) - end - end - - it 'returns the attributes as a Hash' do - expect(subject.attributes('test.txt')).to eq({ 'text' => true }) - end - end - context 'when attributes data is nil' do let(:data) { nil } diff --git a/spec/lib/gitlab/git/conflict/file_spec.rb b/spec/lib/gitlab/git/conflict/file_spec.rb new file mode 100644 index 00000000000..afed6c32af6 --- /dev/null +++ b/spec/lib/gitlab/git/conflict/file_spec.rb @@ -0,0 +1,50 @@ +# coding: utf-8 +require 'spec_helper' + +describe Gitlab::Git::Conflict::File do + let(:conflict) { { theirs: { path: 'foo', mode: 33188 }, ours: { path: 'foo', mode: 33188 } } } + let(:invalid_content) { described_class.new(nil, nil, conflict, "a\xC4\xFC".force_encoding(Encoding::ASCII_8BIT)) } + let(:valid_content) { described_class.new(nil, nil, conflict, "Espa\xC3\xB1a".force_encoding(Encoding::ASCII_8BIT)) } + + describe '#lines' do + context 'when the content contains non-UTF-8 characters' do + it 'raises UnsupportedEncoding' do + expect { invalid_content.lines } + .to raise_error(described_class::UnsupportedEncoding) + end + end + + context 'when the content can be converted to UTF-8' do + it 'sets lines to the lines' do + expect(valid_content.lines).to eq([{ + full_line: 'España', + type: nil, + line_obj_index: 0, + line_old: 1, + line_new: 1 + }]) + end + + it 'sets the type to text' do + expect(valid_content.type).to eq('text') + end + end + end + + describe '#content' do + context 'when the content contains non-UTF-8 characters' do + it 'raises UnsupportedEncoding' do + expect { invalid_content.content } + .to raise_error(described_class::UnsupportedEncoding) + end + end + + context 'when the content can be converted to UTF-8' do + it 'returns a valid UTF-8 string' do + expect(valid_content.content).to eq('España') + expect(valid_content.content).to be_valid_encoding + expect(valid_content.content.encoding).to eq(Encoding::UTF_8) + end + end + end +end diff --git a/spec/lib/gitlab/git/conflict/parser_spec.rb b/spec/lib/gitlab/git/conflict/parser_spec.rb index 7b035a381f1..29a1702a1c6 100644 --- a/spec/lib/gitlab/git/conflict/parser_spec.rb +++ b/spec/lib/gitlab/git/conflict/parser_spec.rb @@ -212,13 +212,6 @@ CONFLICT .not_to raise_error end end - - context 'when the file contains non-UTF-8 characters' do - it 'raises UnsupportedEncoding' do - expect { parse_text("a\xC4\xFC".force_encoding(Encoding::ASCII_8BIT)) } - .to raise_error(Gitlab::Git::Conflict::Parser::UnsupportedEncoding) - end - end end end end diff --git a/spec/lib/gitlab/git/gitlab_projects_spec.rb b/spec/lib/gitlab/git/gitlab_projects_spec.rb index dfccc15a4f3..8b715d717c1 100644 --- a/spec/lib/gitlab/git/gitlab_projects_spec.rb +++ b/spec/lib/gitlab/git/gitlab_projects_spec.rb @@ -16,7 +16,7 @@ describe Gitlab::Git::GitlabProjects do let(:tmp_repos_path) { TestEnv.repos_path } let(:repo_name) { project.disk_path + '.git' } let(:tmp_repo_path) { File.join(tmp_repos_path, repo_name) } - let(:gl_projects) { build_gitlab_projects(tmp_repos_path, repo_name) } + let(:gl_projects) { build_gitlab_projects(TestEnv::REPOS_STORAGE, repo_name) } describe '#initialize' do it { expect(gl_projects.shard_path).to eq(tmp_repos_path) } @@ -223,11 +223,12 @@ describe Gitlab::Git::GitlabProjects do end describe '#fork_repository' do + let(:dest_repos) { TestEnv::REPOS_STORAGE } let(:dest_repos_path) { tmp_repos_path } let(:dest_repo_name) { File.join('@hashed', 'aa', 'bb', 'xyz.git') } let(:dest_repo) { File.join(dest_repos_path, dest_repo_name) } - subject { gl_projects.fork_repository(dest_repos_path, dest_repo_name) } + subject { gl_projects.fork_repository(dest_repos, dest_repo_name) } before do FileUtils.mkdir_p(dest_repos_path) @@ -268,7 +269,12 @@ describe Gitlab::Git::GitlabProjects do # that is not very straight-forward so I'm leaving this test here for now till # https://gitlab.com/gitlab-org/gitlab-ce/issues/41393 is fixed. context 'different storages' do - let(:dest_repos_path) { File.join(File.dirname(tmp_repos_path), 'alternative') } + let(:dest_repos) { 'alternative' } + let(:dest_repos_path) { File.join(File.dirname(tmp_repos_path), dest_repos) } + + before do + stub_storage_settings(dest_repos => { 'path' => dest_repos_path }) + end it 'forks the repo' do is_expected.to be_truthy diff --git a/spec/lib/gitlab/git/gitmodules_parser_spec.rb b/spec/lib/gitlab/git/gitmodules_parser_spec.rb index 143aa2218c9..6fd2b33486b 100644 --- a/spec/lib/gitlab/git/gitmodules_parser_spec.rb +++ b/spec/lib/gitlab/git/gitmodules_parser_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' describe Gitlab::Git::GitmodulesParser do it 'should parse a .gitmodules file correctly' do - parser = described_class.new(<<-'GITMODULES'.strip_heredoc) + data = <<~GITMODULES [submodule "vendor/libgit2"] path = vendor/libgit2 [submodule "vendor/libgit2"] @@ -16,6 +16,7 @@ describe Gitlab::Git::GitmodulesParser do url = https://example.com/another/project GITMODULES + parser = described_class.new(data.gsub("\n", "\r\n")) modules = parser.parse expect(modules).to eq({ diff --git a/spec/lib/gitlab/git/env_spec.rb b/spec/lib/gitlab/git/hook_env_spec.rb index 03836d49518..e6aa5ad8c90 100644 --- a/spec/lib/gitlab/git/env_spec.rb +++ b/spec/lib/gitlab/git/hook_env_spec.rb @@ -1,6 +1,8 @@ require 'spec_helper' -describe Gitlab::Git::Env do +describe Gitlab::Git::HookEnv do + let(:gl_repository) { 'project-123' } + describe ".set" do context 'with RequestStore.store disabled' do before do @@ -8,9 +10,9 @@ describe Gitlab::Git::Env do end it 'does not store anything' do - described_class.set(GIT_OBJECT_DIRECTORY: 'foo') + described_class.set(gl_repository, GIT_OBJECT_DIRECTORY_RELATIVE: 'foo') - expect(described_class.all).to be_empty + expect(described_class.all(gl_repository)).to be_empty end end @@ -21,15 +23,19 @@ describe Gitlab::Git::Env do it 'whitelist some `GIT_*` variables and stores them using RequestStore' do described_class.set( - GIT_OBJECT_DIRECTORY: 'foo', - GIT_ALTERNATE_OBJECT_DIRECTORIES: 'bar', + gl_repository, + GIT_OBJECT_DIRECTORY_RELATIVE: 'foo', + GIT_ALTERNATE_OBJECT_DIRECTORIES_RELATIVE: 'bar', GIT_EXEC_PATH: 'baz', PATH: '~/.bin:/bin') - expect(described_class[:GIT_OBJECT_DIRECTORY]).to eq('foo') - expect(described_class[:GIT_ALTERNATE_OBJECT_DIRECTORIES]).to eq('bar') - expect(described_class[:GIT_EXEC_PATH]).to be_nil - expect(described_class[:bar]).to be_nil + git_env = described_class.all(gl_repository) + + expect(git_env[:GIT_OBJECT_DIRECTORY_RELATIVE]).to eq('foo') + expect(git_env[:GIT_ALTERNATE_OBJECT_DIRECTORIES_RELATIVE]).to eq('bar') + expect(git_env[:GIT_EXEC_PATH]).to be_nil + expect(git_env[:PATH]).to be_nil + expect(git_env[:bar]).to be_nil end end end @@ -39,14 +45,15 @@ describe Gitlab::Git::Env do before do allow(RequestStore).to receive(:active?).and_return(true) described_class.set( - GIT_OBJECT_DIRECTORY: 'foo', - GIT_ALTERNATE_OBJECT_DIRECTORIES: ['bar']) + gl_repository, + GIT_OBJECT_DIRECTORY_RELATIVE: 'foo', + GIT_ALTERNATE_OBJECT_DIRECTORIES_RELATIVE: ['bar']) end it 'returns an env hash' do - expect(described_class.all).to eq({ - 'GIT_OBJECT_DIRECTORY' => 'foo', - 'GIT_ALTERNATE_OBJECT_DIRECTORIES' => ['bar'] + expect(described_class.all(gl_repository)).to eq({ + 'GIT_OBJECT_DIRECTORY_RELATIVE' => 'foo', + 'GIT_ALTERNATE_OBJECT_DIRECTORIES_RELATIVE' => ['bar'] }) end end @@ -56,8 +63,8 @@ describe Gitlab::Git::Env do context 'with RequestStore.store enabled' do using RSpec::Parameterized::TableSyntax - let(:key) { 'GIT_OBJECT_DIRECTORY' } - subject { described_class.to_env_hash } + let(:key) { 'GIT_OBJECT_DIRECTORY_RELATIVE' } + subject { described_class.to_env_hash(gl_repository) } where(:input, :output) do nil | nil @@ -70,7 +77,7 @@ describe Gitlab::Git::Env do with_them do before do allow(RequestStore).to receive(:active?).and_return(true) - described_class.set(key.to_sym => input) + described_class.set(gl_repository, key.to_sym => input) end it 'puts the right value in the hash' do @@ -84,47 +91,25 @@ describe Gitlab::Git::Env do end end - describe ".[]" do - context 'with RequestStore.store enabled' do - before do - allow(RequestStore).to receive(:active?).and_return(true) - end - - before do - described_class.set( - GIT_OBJECT_DIRECTORY: 'foo', - GIT_ALTERNATE_OBJECT_DIRECTORIES: 'bar') - end - - it 'returns a stored value for an existing key' do - expect(described_class[:GIT_OBJECT_DIRECTORY]).to eq('foo') - end - - it 'returns nil for an non-existing key' do - expect(described_class[:foo]).to be_nil - end - end - end - describe 'thread-safety' do context 'with RequestStore.store enabled' do before do allow(RequestStore).to receive(:active?).and_return(true) - described_class.set(GIT_OBJECT_DIRECTORY: 'foo') + described_class.set(gl_repository, GIT_OBJECT_DIRECTORY_RELATIVE: 'foo') end it 'is thread-safe' do another_thread = Thread.new do - described_class.set(GIT_OBJECT_DIRECTORY: 'bar') + described_class.set(gl_repository, GIT_OBJECT_DIRECTORY_RELATIVE: 'bar') Thread.stop - described_class[:GIT_OBJECT_DIRECTORY] + described_class.all(gl_repository)[:GIT_OBJECT_DIRECTORY_RELATIVE] end # Ensure another_thread runs first sleep 0.1 until another_thread.stop? - expect(described_class[:GIT_OBJECT_DIRECTORY]).to eq('foo') + expect(described_class.all(gl_repository)[:GIT_OBJECT_DIRECTORY_RELATIVE]).to eq('foo') another_thread.run expect(another_thread.value).to eq('bar') diff --git a/spec/lib/gitlab/git/info_attributes_spec.rb b/spec/lib/gitlab/git/info_attributes_spec.rb deleted file mode 100644 index ea84909c3e0..00000000000 --- a/spec/lib/gitlab/git/info_attributes_spec.rb +++ /dev/null @@ -1,43 +0,0 @@ -require 'spec_helper' - -describe Gitlab::Git::InfoAttributes, seed_helper: true do - let(:path) do - File.join(SEED_STORAGE_PATH, 'with-git-attributes.git') - end - - subject { described_class.new(path) } - - describe '#attributes' do - context 'using a path with attributes' do - it 'returns the attributes as a Hash' do - expect(subject.attributes('test.txt')).to eq({ 'text' => true }) - end - - it 'returns an empty Hash for a defined path without attributes' do - expect(subject.attributes('bla/bla.txt')).to eq({}) - end - end - end - - describe '#parser' do - it 'parses a file with entries' do - expect(subject.patterns).to be_an_instance_of(Hash) - expect(subject.patterns["/*.txt"]).to eq({ 'text' => true }) - end - - it 'does not parse anything when the attributes file does not exist' do - expect(File).to receive(:exist?) - .with(File.join(path, 'info/attributes')) - .and_return(false) - - expect(subject.patterns).to eq({}) - end - - it 'does not parse attributes files with unsupported encoding' do - path = File.join(SEED_STORAGE_PATH, 'with-invalid-git-attributes.git') - subject = described_class.new(path) - - expect(subject.patterns).to eq({}) - end - end -end diff --git a/spec/lib/gitlab/git/raw_diff_change_spec.rb b/spec/lib/gitlab/git/raw_diff_change_spec.rb new file mode 100644 index 00000000000..eedde34534f --- /dev/null +++ b/spec/lib/gitlab/git/raw_diff_change_spec.rb @@ -0,0 +1,66 @@ +require 'spec_helper' + +describe Gitlab::Git::RawDiffChange do + let(:raw_change) { } + let(:change) { described_class.new(raw_change) } + + context 'bad input' do + let(:raw_change) { 'foo' } + + it 'does not set most of the attrs' do + expect(change.blob_id).to eq('foo') + expect(change.operation).to eq(:unknown) + expect(change.old_path).to be_blank + expect(change.new_path).to be_blank + expect(change.blob_size).to be_blank + end + end + + context 'adding a file' do + let(:raw_change) { '93e123ac8a3e6a0b600953d7598af629dec7b735 59 A bar/branch-test.txt' } + + it 'initialize the proper attrs' do + expect(change.operation).to eq(:added) + expect(change.old_path).to be_blank + expect(change.new_path).to eq('bar/branch-test.txt') + expect(change.blob_id).to be_present + expect(change.blob_size).to be_present + end + end + + context 'renaming a file' do + let(:raw_change) { "85bc2f9753afd5f4fc5d7c75f74f8d526f26b4f3 107 R060\tfiles/js/commit.js.coffee\tfiles/js/commit.coffee" } + + it 'initialize the proper attrs' do + expect(change.operation).to eq(:renamed) + expect(change.old_path).to eq('files/js/commit.js.coffee') + expect(change.new_path).to eq('files/js/commit.coffee') + expect(change.blob_id).to be_present + expect(change.blob_size).to be_present + end + end + + context 'modifying a file' do + let(:raw_change) { 'c60514b6d3d6bf4bec1030f70026e34dfbd69ad5 824 M README.md' } + + it 'initialize the proper attrs' do + expect(change.operation).to eq(:modified) + expect(change.old_path).to eq('README.md') + expect(change.new_path).to eq('README.md') + expect(change.blob_id).to be_present + expect(change.blob_size).to be_present + end + end + + context 'deleting a file' do + let(:raw_change) { '60d7a906c2fd9e4509aeb1187b98d0ea7ce827c9 15364 D files/.DS_Store' } + + it 'initialize the proper attrs' do + expect(change.operation).to eq(:deleted) + expect(change.old_path).to eq('files/.DS_Store') + expect(change.new_path).to be_nil + expect(change.blob_id).to be_present + expect(change.blob_size).to be_present + end + end +end diff --git a/spec/lib/gitlab/git/repository_spec.rb b/spec/lib/gitlab/git/repository_spec.rb index 54ada3e423f..5acf40ea5ce 100644 --- a/spec/lib/gitlab/git/repository_spec.rb +++ b/spec/lib/gitlab/git/repository_spec.rb @@ -120,7 +120,7 @@ describe Gitlab::Git::Repository, seed_helper: true do describe 'alternates keyword argument' do context 'with no Git env stored' do before do - allow(Gitlab::Git::Env).to receive(:all).and_return({}) + allow(Gitlab::Git::HookEnv).to receive(:all).and_return({}) end it "is passed an empty array" do @@ -132,7 +132,7 @@ describe Gitlab::Git::Repository, seed_helper: true do context 'with absolute and relative Git object dir envvars stored' do before do - allow(Gitlab::Git::Env).to receive(:all).and_return({ + allow(Gitlab::Git::HookEnv).to receive(:all).and_return({ 'GIT_OBJECT_DIRECTORY_RELATIVE' => './objects/foo', 'GIT_ALTERNATE_OBJECT_DIRECTORIES_RELATIVE' => ['./objects/bar', './objects/baz'], 'GIT_OBJECT_DIRECTORY' => 'ignored', @@ -148,22 +148,6 @@ describe Gitlab::Git::Repository, seed_helper: true do repository.rugged end end - - context 'with only absolute Git object dir envvars stored' do - before do - allow(Gitlab::Git::Env).to receive(:all).and_return({ - 'GIT_OBJECT_DIRECTORY' => 'foo', - 'GIT_ALTERNATE_OBJECT_DIRECTORIES' => %w[bar baz], - 'GIT_OTHER' => 'another_env' - }) - end - - it "is passed the absolute object dir envvars as is" do - expect(Rugged::Repository).to receive(:new).with(repository.path, alternates: %w[foo bar baz]) - - repository.rugged - end - end end end @@ -263,38 +247,44 @@ describe Gitlab::Git::Repository, seed_helper: true do end it 'returns parameterised string for a ref containing slashes' do - prefix = repository.archive_prefix('test/branch', 'SHA') + prefix = repository.archive_prefix('test/branch', 'SHA', append_sha: nil) expect(prefix).to eq("#{project_name}-test-branch-SHA") end it 'returns correct string for a ref containing dots' do - prefix = repository.archive_prefix('test.branch', 'SHA') + prefix = repository.archive_prefix('test.branch', 'SHA', append_sha: nil) expect(prefix).to eq("#{project_name}-test.branch-SHA") end + + it 'returns string with sha when append_sha is false' do + prefix = repository.archive_prefix('test.branch', 'SHA', append_sha: false) + + expect(prefix).to eq("#{project_name}-test.branch") + end end describe '#archive' do - let(:metadata) { repository.archive_metadata('master', '/tmp') } + let(:metadata) { repository.archive_metadata('master', '/tmp', append_sha: true) } it_should_behave_like 'archive check', '.tar.gz' end describe '#archive_zip' do - let(:metadata) { repository.archive_metadata('master', '/tmp', 'zip') } + let(:metadata) { repository.archive_metadata('master', '/tmp', 'zip', append_sha: true) } it_should_behave_like 'archive check', '.zip' end describe '#archive_bz2' do - let(:metadata) { repository.archive_metadata('master', '/tmp', 'tbz2') } + let(:metadata) { repository.archive_metadata('master', '/tmp', 'tbz2', append_sha: true) } it_should_behave_like 'archive check', '.tar.bz2' end describe '#archive_fallback' do - let(:metadata) { repository.archive_metadata('master', '/tmp', 'madeup') } + let(:metadata) { repository.archive_metadata('master', '/tmp', 'madeup', append_sha: true) } it_should_behave_like 'archive check', '.tar.gz' end @@ -480,9 +470,20 @@ describe Gitlab::Git::Repository, seed_helper: true do FileUtils.rm_rf(heads_dir) FileUtils.mkdir_p(heads_dir) + repository.expire_has_local_branches_cache expect(repository.has_local_branches?).to eq(false) end end + + context 'memoizes the value' do + it 'returns true' do + expect(repository).to receive(:uncached_has_local_branches?).once.and_call_original + + 2.times do + expect(repository.has_local_branches?).to eq(true) + end + end + end end context 'with gitaly' do @@ -604,17 +605,20 @@ describe Gitlab::Git::Repository, seed_helper: true do shared_examples 'returning the right branches' do let(:head_id) { repository.rugged.head.target.oid } let(:new_branch) { head_id } + let(:utf8_branch) { 'branch-é' } before do repository.create_branch(new_branch, 'master') + repository.create_branch(utf8_branch, 'master') end after do repository.delete_branch(new_branch) + repository.delete_branch(utf8_branch) end it 'displays that branch' do - expect(repository.branch_names_contains_sha(head_id)).to include('master', new_branch) + expect(repository.branch_names_contains_sha(head_id)).to include('master', new_branch, utf8_branch) end end @@ -1050,6 +1054,44 @@ describe Gitlab::Git::Repository, seed_helper: true do it { is_expected.to eq(17) } end + describe '#raw_changes_between' do + let(:old_rev) { } + let(:new_rev) { } + let(:changes) { repository.raw_changes_between(old_rev, new_rev) } + + context 'initial commit' do + let(:old_rev) { Gitlab::Git::BLANK_SHA } + let(:new_rev) { '1a0b36b3cdad1d2ee32457c102a8c0b7056fa863' } + + it 'returns the changes' do + expect(changes).to be_present + expect(changes.size).to eq(3) + end + end + + context 'with an invalid rev' do + let(:old_rev) { 'foo' } + let(:new_rev) { 'bar' } + + it 'returns an error' do + expect { changes }.to raise_error(Gitlab::Git::Repository::GitError) + end + end + + context 'with valid revs' do + let(:old_rev) { 'fa1b1e6c004a68b7d8763b86455da9e6b23e36d6' } + let(:new_rev) { '4b4918a572fa86f9771e5ba40fbd48e1eb03e2c6' } + + it 'returns the changes' do + expect(changes.size).to eq(9) + expect(changes.first.operation).to eq(:modified) + expect(changes.first.new_path).to eq('.gitmodules') + expect(changes.last.operation).to eq(:added) + expect(changes.last.new_path).to eq('files/lfs/picture-invalid.png') + end + end + end + describe '#merge_base' do shared_examples '#merge_base' do where(:from, :to, :result) do @@ -2191,6 +2233,55 @@ describe Gitlab::Git::Repository, seed_helper: true do end end + describe '#checksum' do + shared_examples 'calculating checksum' do + it 'calculates the checksum for non-empty repo' do + expect(repository.checksum).to eq '54f21be4c32c02f6788d72207fa03ad3bce725e4' + end + + it 'returns 0000000000000000000000000000000000000000 for an empty repo' do + FileUtils.rm_rf(File.join(storage_path, 'empty-repo.git')) + + system(git_env, *%W(#{Gitlab.config.git.bin_path} init --bare empty-repo.git), + chdir: storage_path, + out: '/dev/null', + err: '/dev/null') + + empty_repo = described_class.new('default', 'empty-repo.git', '') + + expect(empty_repo.checksum).to eq '0000000000000000000000000000000000000000' + end + + it 'raises a no repository exception when there is no repo' do + broken_repo = described_class.new('default', 'a/path.git', '') + + expect { broken_repo.checksum }.to raise_error(Gitlab::Git::Repository::NoRepository) + end + end + + context 'when calculate_checksum Gitaly feature is enabled' do + it_behaves_like 'calculating checksum' + end + + context 'when calculate_checksum Gitaly feature is disabled', :disable_gitaly do + it_behaves_like 'calculating checksum' + + describe 'when storage is broken', :broken_storage do + it 'raises a storage exception when storage is not available' do + broken_repo = described_class.new('broken', 'a/path.git', '') + + expect { broken_repo.rugged }.to raise_error(Gitlab::Git::Storage::Inaccessible) + end + end + + it "raises a Gitlab::Git::Repository::Failure error if the `popen` call to git returns a non-zero exit code" do + allow(repository).to receive(:popen).and_return(['output', nil]) + + expect { repository.checksum }.to raise_error Gitlab::Git::Repository::ChecksumError + end + end + end + context 'gitlab_projects commands' do let(:gitlab_projects) { repository.gitlab_projects } let(:timeout) { Gitlab.config.gitlab_shell.git_timeout } @@ -2264,6 +2355,39 @@ describe Gitlab::Git::Repository, seed_helper: true do end end + describe '#clean_stale_repository_files' do + let(:worktree_path) { File.join(repository.path, 'worktrees', 'delete-me') } + + it 'cleans up the files' do + repository.with_worktree(worktree_path, 'master', env: ENV) do + FileUtils.touch(worktree_path, mtime: Time.now - 8.hours) + # git rev-list --all will fail in git 2.16 if HEAD is pointing to a non-existent object, + # but the HEAD must be 40 characters long or git will ignore it. + File.write(File.join(worktree_path, 'HEAD'), Gitlab::Git::BLANK_SHA) + + # git 2.16 fails with "fatal: bad object HEAD" + expect { repository.rev_list(including: :all) }.to raise_error(Gitlab::Git::Repository::GitError) + + repository.clean_stale_repository_files + + expect { repository.rev_list(including: :all) }.not_to raise_error + expect(File.exist?(worktree_path)).to be_falsey + end + end + + it 'increments a counter upon an error' do + expect(repository.gitaly_repository_client).to receive(:cleanup).and_raise(Gitlab::Git::CommandError) + + counter = double(:counter) + + expect(counter).to receive(:increment) + expect(Gitlab::Metrics).to receive(:counter).with(:failed_repository_cleanup_total, + 'Number of failed repository cleanup events').and_return(counter) + + repository.clean_stale_repository_files + end + end + describe '#delete_remote_branches' do subject do repository.delete_remote_branches('downstream-remote', ['master']) diff --git a/spec/lib/gitlab/git/rev_list_spec.rb b/spec/lib/gitlab/git/rev_list_spec.rb index 4e0ee206219..32ec1e029c8 100644 --- a/spec/lib/gitlab/git/rev_list_spec.rb +++ b/spec/lib/gitlab/git/rev_list_spec.rb @@ -3,17 +3,6 @@ require 'spec_helper' describe Gitlab::Git::RevList do let(:repository) { create(:project, :repository).repository.raw } let(:rev_list) { described_class.new(repository, newrev: 'newrev') } - let(:env_hash) do - { - 'GIT_OBJECT_DIRECTORY' => 'foo', - 'GIT_ALTERNATE_OBJECT_DIRECTORIES' => 'bar' - } - end - let(:command_env) { { 'GIT_ALTERNATE_OBJECT_DIRECTORIES' => 'foo:bar' } } - - before do - allow(Gitlab::Git::Env).to receive(:all).and_return(env_hash) - end def args_for_popen(args_list) [Gitlab.config.git.bin_path, 'rev-list', *args_list] @@ -23,7 +12,7 @@ describe Gitlab::Git::RevList do params = [ args_for_popen(additional_args), repository.path, - command_env, + {}, hash_including(lazy_block: with_lazy_block ? anything : nil) ] diff --git a/spec/lib/gitlab/git/wiki_spec.rb b/spec/lib/gitlab/git/wiki_spec.rb index 761f7732036..722d697c28e 100644 --- a/spec/lib/gitlab/git/wiki_spec.rb +++ b/spec/lib/gitlab/git/wiki_spec.rb @@ -30,7 +30,7 @@ describe Gitlab::Git::Wiki do end def commit_details(name) - Gitlab::Git::Wiki::CommitDetails.new(user.name, user.email, "created page #{name}") + Gitlab::Git::Wiki::CommitDetails.new(user.id, user.username, user.name, user.email, "created page #{name}") end def destroy_page(title, dir = '') diff --git a/spec/lib/gitlab/git_access_spec.rb b/spec/lib/gitlab/git_access_spec.rb index 6f07e423c1b..6c625596605 100644 --- a/spec/lib/gitlab/git_access_spec.rb +++ b/spec/lib/gitlab/git_access_spec.rb @@ -10,12 +10,13 @@ describe Gitlab::GitAccess do let(:protocol) { 'ssh' } let(:authentication_abilities) { %i[read_project download_code push_code] } let(:redirected_path) { nil } + let(:auth_result_type) { nil } let(:access) do described_class.new(actor, project, protocol, authentication_abilities: authentication_abilities, namespace_path: namespace_path, project_path: project_path, - redirected_path: redirected_path) + redirected_path: redirected_path, auth_result_type: auth_result_type) end let(:changes) { '_any' } @@ -45,6 +46,7 @@ describe Gitlab::GitAccess do before do disable_protocol('http') + project.add_master(user) end it 'blocks http push and pull' do @@ -53,6 +55,26 @@ describe Gitlab::GitAccess do expect { pull_access_check }.to raise_unauthorized('Git access over HTTP is not allowed') end end + + context 'when request is made from CI' do + let(:auth_result_type) { :build } + + it "doesn't block http pull" do + aggregate_failures do + expect { pull_access_check }.not_to raise_unauthorized('Git access over HTTP is not allowed') + end + end + + context 'when legacy CI credentials are used' do + let(:auth_result_type) { :ci } + + it "doesn't block http pull" do + aggregate_failures do + expect { pull_access_check }.not_to raise_unauthorized('Git access over HTTP is not allowed') + end + end + end + end end end @@ -123,6 +145,33 @@ describe Gitlab::GitAccess do expect { push_access_check }.to raise_unauthorized(described_class::ERROR_MESSAGES[:auth_upload]) end end + + context 'when actor is DeployToken' do + let(:actor) { create(:deploy_token, projects: [project]) } + + context 'when DeployToken is active and belongs to project' do + it 'allows pull access' do + expect { pull_access_check }.not_to raise_error + end + + it 'blocks the push' do + expect { push_access_check }.to raise_unauthorized(described_class::ERROR_MESSAGES[:upload]) + end + end + + context 'when DeployToken does not belong to project' do + let(:another_project) { create(:project) } + let(:actor) { create(:deploy_token, projects: [another_project]) } + + it 'blocks pull access' do + expect { pull_access_check }.to raise_not_found + end + + it 'blocks the push' do + expect { push_access_check }.to raise_not_found + end + end + end end context 'when actor is nil' do @@ -240,14 +289,21 @@ describe Gitlab::GitAccess do end shared_examples 'check_project_moved' do - it 'enqueues a redirected message' do + it 'enqueues a redirected message for pushing' do push_access_check expect(Gitlab::Checks::ProjectMoved.fetch_message(user.id, project.id)).not_to be_nil end + + it 'allows push and pull access' do + aggregate_failures do + expect { push_access_check }.not_to raise_error + expect { pull_access_check }.not_to raise_error + end + end end - describe '#check_project_moved!', :clean_gitlab_redis_shared_state do + describe '#add_project_moved_message!', :clean_gitlab_redis_shared_state do before do project.add_master(user) end @@ -261,62 +317,18 @@ describe Gitlab::GitAccess do end end - context 'when a permanent redirect and ssh protocol' do + context 'with a redirect and ssh protocol' do let(:redirected_path) { 'some/other-path' } - before do - allow_any_instance_of(Gitlab::Checks::ProjectMoved).to receive(:permanent_redirect?).and_return(true) - end - - it 'allows push and pull access' do - aggregate_failures do - expect { push_access_check }.not_to raise_error - end - end - it_behaves_like 'check_project_moved' end - context 'with a permanent redirect and http protocol' do + context 'with a redirect and http protocol' do let(:redirected_path) { 'some/other-path' } let(:protocol) { 'http' } - before do - allow_any_instance_of(Gitlab::Checks::ProjectMoved).to receive(:permanent_redirect?).and_return(true) - end - - it 'allows_push and pull access' do - aggregate_failures do - expect { push_access_check }.not_to raise_error - end - end - it_behaves_like 'check_project_moved' end - - context 'with a temporal redirect and ssh protocol' do - let(:redirected_path) { 'some/other-path' } - - it 'blocks push and pull access' do - aggregate_failures do - expect { push_access_check }.to raise_error(described_class::ProjectMovedError, /Project '#{redirected_path}' was moved to '#{project.full_path}'/) - expect { push_access_check }.to raise_error(described_class::ProjectMovedError, /git remote set-url origin #{project.ssh_url_to_repo}/) - - expect { pull_access_check }.to raise_error(described_class::ProjectMovedError, /Project '#{redirected_path}' was moved to '#{project.full_path}'/) - expect { pull_access_check }.to raise_error(described_class::ProjectMovedError, /git remote set-url origin #{project.ssh_url_to_repo}/) - end - end - end - - context 'with a temporal redirect and http protocol' do - let(:redirected_path) { 'some/other-path' } - let(:protocol) { 'http' } - - it 'does not allow to push and pull access' do - expect { push_access_check }.to raise_error(described_class::ProjectMovedError, /git remote set-url origin #{project.http_url_to_repo}/) - expect { pull_access_check }.to raise_error(described_class::ProjectMovedError, /git remote set-url origin #{project.http_url_to_repo}/) - end - end end describe '#check_authentication_abilities!' do @@ -609,6 +621,41 @@ describe Gitlab::GitAccess do end end + describe 'deploy token permissions' do + let(:deploy_token) { create(:deploy_token) } + let(:actor) { deploy_token } + + context 'pull code' do + context 'when project is authorized' do + before do + deploy_token.projects << project + end + + it { expect { pull_access_check }.not_to raise_error } + end + + context 'when unauthorized' do + context 'from public project' do + let(:project) { create(:project, :public, :repository) } + + it { expect { pull_access_check }.not_to raise_error } + end + + context 'from internal project' do + let(:project) { create(:project, :internal, :repository) } + + it { expect { pull_access_check }.to raise_not_found } + end + + context 'from private project' do + let(:project) { create(:project, :private, :repository) } + + it { expect { pull_access_check }.to raise_not_found } + end + end + end + end + describe 'build authentication_abilities permissions' do let(:authentication_abilities) { build_authentication_abilities } @@ -870,6 +917,20 @@ describe Gitlab::GitAccess do admin: { push_protected_branch: false, push_all: false, merge_into_protected_branch: false })) end end + + context 'when pushing to a project' do + let(:project) { create(:project, :public, :repository) } + let(:changes) { "#{Gitlab::Git::BLANK_SHA} 570e7b2ab refs/heads/wow" } + + before do + project.add_developer(user) + end + + it 'cleans up the files' do + expect(project.repository).to receive(:clean_stale_repository_files).and_call_original + expect { push_access_check }.not_to raise_error + end + end end describe 'build authentication abilities' do diff --git a/spec/lib/gitlab/gitaly_client/commit_service_spec.rb b/spec/lib/gitlab/gitaly_client/commit_service_spec.rb index 9be3fa633a7..7951cbe7b1d 100644 --- a/spec/lib/gitlab/gitaly_client/commit_service_spec.rb +++ b/spec/lib/gitlab/gitaly_client/commit_service_spec.rb @@ -33,7 +33,7 @@ describe Gitlab::GitalyClient::CommitService do initial_commit = project.commit('1a0b36b3cdad1d2ee32457c102a8c0b7056fa863').raw request = Gitaly::CommitDiffRequest.new( repository: repository_message, - left_commit_id: '4b825dc642cb6eb9a060e54bf8d69288fbee4904', + left_commit_id: Gitlab::Git::EMPTY_TREE_ID, right_commit_id: initial_commit.id, collapse_diffs: true, enforce_limits: true, @@ -77,7 +77,7 @@ describe Gitlab::GitalyClient::CommitService do initial_commit = project.commit('1a0b36b3cdad1d2ee32457c102a8c0b7056fa863') request = Gitaly::CommitDeltaRequest.new( repository: repository_message, - left_commit_id: '4b825dc642cb6eb9a060e54bf8d69288fbee4904', + left_commit_id: Gitlab::Git::EMPTY_TREE_ID, right_commit_id: initial_commit.id ) @@ -90,7 +90,7 @@ describe Gitlab::GitalyClient::CommitService do describe '#between' do let(:from) { 'master' } - let(:to) { '4b825dc642cb6eb9a060e54bf8d69288fbee4904' } + let(:to) { Gitlab::Git::EMPTY_TREE_ID } it 'sends an RPC request' do request = Gitaly::CommitsBetweenRequest.new( @@ -155,7 +155,7 @@ describe Gitlab::GitalyClient::CommitService do end describe '#find_commit' do - let(:revision) { '4b825dc642cb6eb9a060e54bf8d69288fbee4904' } + let(:revision) { Gitlab::Git::EMPTY_TREE_ID } it 'sends an RPC request' do request = Gitaly::FindCommitRequest.new( repository: repository_message, revision: revision diff --git a/spec/lib/gitlab/gitaly_client/remote_service_spec.rb b/spec/lib/gitlab/gitaly_client/remote_service_spec.rb index 872377c93d8..f03c7e3f04b 100644 --- a/spec/lib/gitlab/gitaly_client/remote_service_spec.rb +++ b/spec/lib/gitlab/gitaly_client/remote_service_spec.rb @@ -58,4 +58,14 @@ describe Gitlab::GitalyClient::RemoteService do client.update_remote_mirror(ref_name, only_branches_matching) end end + + describe '.exists?' do + context "when the remote doesn't exist" do + let(:url) { 'https://gitlab.com/gitlab-org/ik-besta-niet-of-ik-word-geplaagd.git' } + + it 'returns false' do + expect(described_class.exists?(url)).to be(false) + end + end + end end diff --git a/spec/lib/gitlab/gitaly_client/repository_service_spec.rb b/spec/lib/gitlab/gitaly_client/repository_service_spec.rb index 1c41dbcb9ef..ecd8657c406 100644 --- a/spec/lib/gitlab/gitaly_client/repository_service_spec.rb +++ b/spec/lib/gitlab/gitaly_client/repository_service_spec.rb @@ -17,6 +17,16 @@ describe Gitlab::GitalyClient::RepositoryService do end end + describe '#cleanup' do + it 'sends a cleanup message' do + expect_any_instance_of(Gitaly::RepositoryService::Stub) + .to receive(:cleanup) + .with(gitaly_request_with_path(storage_name, relative_path), kind_of(Hash)) + + client.cleanup + end + end + describe '#garbage_collect' do it 'sends a garbage_collect message' do expect_any_instance_of(Gitaly::RepositoryService::Stub) @@ -74,6 +84,17 @@ describe Gitlab::GitalyClient::RepositoryService do end end + describe '#info_attributes' do + it 'reads the info attributes' do + expect_any_instance_of(Gitaly::RepositoryService::Stub) + .to receive(:get_info_attributes) + .with(gitaly_request_with_path(storage_name, relative_path), kind_of(Hash)) + .and_return([]) + + client.info_attributes + end + end + describe '#has_local_branches?' do it 'sends a has_local_branches message' do expect_any_instance_of(Gitaly::RepositoryService::Stub) @@ -124,4 +145,26 @@ describe Gitlab::GitalyClient::RepositoryService do client.squash_in_progress?(squash_id) end end + + describe '#calculate_checksum' do + it 'sends a calculate_checksum message' do + expect_any_instance_of(Gitaly::RepositoryService::Stub) + .to receive(:calculate_checksum) + .with(gitaly_request_with_path(storage_name, relative_path), kind_of(Hash)) + .and_return(double(checksum: 0)) + + client.calculate_checksum + end + end + + describe '#create_from_snapshot' do + it 'sends a create_repository_from_snapshot message' do + expect_any_instance_of(Gitaly::RepositoryService::Stub) + .to receive(:create_repository_from_snapshot) + .with(gitaly_request_with_path(storage_name, relative_path), kind_of(Hash)) + .and_return(double) + + client.create_from_snapshot('http://example.com?wiki=1', 'Custom xyz') + end + end end diff --git a/spec/lib/gitlab/gitaly_client/util_spec.rb b/spec/lib/gitlab/gitaly_client/util_spec.rb index d1e0136f8c1..550db6db6d9 100644 --- a/spec/lib/gitlab/gitaly_client/util_spec.rb +++ b/spec/lib/gitlab/gitaly_client/util_spec.rb @@ -7,16 +7,19 @@ describe Gitlab::GitalyClient::Util do let(:gl_repository) { 'project-1' } let(:git_object_directory) { '.git/objects' } let(:git_alternate_object_directory) { ['/dir/one', '/dir/two'] } + let(:git_env) do + { + 'GIT_OBJECT_DIRECTORY_RELATIVE' => git_object_directory, + 'GIT_ALTERNATE_OBJECT_DIRECTORIES_RELATIVE' => git_alternate_object_directory + } + end subject do described_class.repository(repository_storage, relative_path, gl_repository) end it 'creates a Gitaly::Repository with the given data' do - allow(Gitlab::Git::Env).to receive(:[]).with('GIT_OBJECT_DIRECTORY_RELATIVE') - .and_return(git_object_directory) - allow(Gitlab::Git::Env).to receive(:[]).with('GIT_ALTERNATE_OBJECT_DIRECTORIES_RELATIVE') - .and_return(git_alternate_object_directory) + allow(Gitlab::Git::HookEnv).to receive(:all).with(gl_repository).and_return(git_env) expect(subject).to be_a(Gitaly::Repository) expect(subject.storage_name).to eq(repository_storage) diff --git a/spec/lib/gitlab/github_import/importer/repository_importer_spec.rb b/spec/lib/gitlab/github_import/importer/repository_importer_spec.rb index 5bedfc79dd3..879b1d9fb0f 100644 --- a/spec/lib/gitlab/github_import/importer/repository_importer_spec.rb +++ b/spec/lib/gitlab/github_import/importer/repository_importer_spec.rb @@ -9,7 +9,7 @@ describe Gitlab::GithubImport::Importer::RepositoryImporter do :project, import_url: 'foo.git', import_source: 'foo/bar', - repository_storage_path: 'foo', + repository_storage: 'foo', disk_path: 'foo', repository: repository, create_wiki: true @@ -38,8 +38,12 @@ describe Gitlab::GithubImport::Importer::RepositoryImporter do expect(project) .to receive(:wiki_repository_exists?) .and_return(false) + expect(Gitlab::GitalyClient::RemoteService) + .to receive(:exists?) + .with("foo.wiki.git") + .and_return(true) - expect(importer.import_wiki?).to eq(true) + expect(importer.import_wiki?).to be(true) end it 'returns false if the GitHub wiki is disabled' do diff --git a/spec/lib/gitlab/health_checks/fs_shards_check_spec.rb b/spec/lib/gitlab/health_checks/fs_shards_check_spec.rb index 4c1ca4349ea..9dcf272d25e 100644 --- a/spec/lib/gitlab/health_checks/fs_shards_check_spec.rb +++ b/spec/lib/gitlab/health_checks/fs_shards_check_spec.rb @@ -26,7 +26,7 @@ describe Gitlab::HealthChecks::FsShardsCheck do let(:storages_paths) do { - default: { path: tmp_dir } + default: Gitlab::GitalyClient::StorageSettings.new('path' => tmp_dir) }.with_indifferent_access end @@ -56,7 +56,7 @@ describe Gitlab::HealthChecks::FsShardsCheck do context 'storage points to not existing folder' do let(:storages_paths) do { - default: { path: 'tmp/this/path/doesnt/exist' } + default: Gitlab::GitalyClient::StorageSettings.new('path' => 'tmp/this/path/doesnt/exist') }.with_indifferent_access end @@ -102,7 +102,7 @@ describe Gitlab::HealthChecks::FsShardsCheck do context 'storage points to not existing folder' do let(:storages_paths) do { - default: { path: 'tmp/this/path/doesnt/exist' } + default: Gitlab::GitalyClient::StorageSettings.new('path' => 'tmp/this/path/doesnt/exist') }.with_indifferent_access end diff --git a/spec/lib/gitlab/http_spec.rb b/spec/lib/gitlab/http_spec.rb new file mode 100644 index 00000000000..d0dadfa78da --- /dev/null +++ b/spec/lib/gitlab/http_spec.rb @@ -0,0 +1,49 @@ +require 'spec_helper' + +describe Gitlab::HTTP do + describe 'allow_local_requests_from_hooks_and_services is' do + before do + WebMock.stub_request(:get, /.*/).to_return(status: 200, body: 'Success') + end + + context 'disabled' do + before do + allow(Gitlab::CurrentSettings).to receive(:allow_local_requests_from_hooks_and_services?).and_return(false) + end + + it 'deny requests to localhost' do + expect { described_class.get('http://localhost:3003') }.to raise_error(Gitlab::HTTP::BlockedUrlError) + end + + it 'deny requests to private network' do + expect { described_class.get('http://192.168.1.2:3003') }.to raise_error(Gitlab::HTTP::BlockedUrlError) + end + + context 'if allow_local_requests set to true' do + it 'override the global value and allow requests to localhost or private network' do + expect { described_class.get('http://localhost:3003', allow_local_requests: true) }.not_to raise_error + end + end + end + + context 'enabled' do + before do + allow(Gitlab::CurrentSettings).to receive(:allow_local_requests_from_hooks_and_services?).and_return(true) + end + + it 'allow requests to localhost' do + expect { described_class.get('http://localhost:3003') }.not_to raise_error + end + + it 'allow requests to private network' do + expect { described_class.get('http://192.168.1.2:3003') }.not_to raise_error + end + + context 'if allow_local_requests set to false' do + it 'override the global value and ban requests to localhost or private network' do + expect { described_class.get('http://localhost:3003', allow_local_requests: false) }.to raise_error(Gitlab::HTTP::BlockedUrlError) + end + end + end + end +end diff --git a/spec/lib/gitlab/import_export/after_export_strategies/base_after_export_strategy_spec.rb b/spec/lib/gitlab/import_export/after_export_strategies/base_after_export_strategy_spec.rb new file mode 100644 index 00000000000..ed54d87de4a --- /dev/null +++ b/spec/lib/gitlab/import_export/after_export_strategies/base_after_export_strategy_spec.rb @@ -0,0 +1,104 @@ +require 'spec_helper' + +describe Gitlab::ImportExport::AfterExportStrategies::BaseAfterExportStrategy do + let!(:service) { described_class.new } + let!(:project) { create(:project, :with_export) } + let(:shared) { project.import_export_shared } + let!(:user) { create(:user) } + + describe '#execute' do + before do + allow(service).to receive(:strategy_execute) + end + + it 'returns if project exported file is not found' do + allow(project).to receive(:export_project_path).and_return(nil) + + expect(service).not_to receive(:strategy_execute) + + service.execute(user, project) + end + + it 'creates a lock file in the export dir' do + allow(service).to receive(:delete_after_export_lock) + + service.execute(user, project) + + expect(lock_path_exist?).to be_truthy + end + + context 'when the method succeeds' do + it 'removes the lock file' do + service.execute(user, project) + + expect(lock_path_exist?).to be_falsey + end + end + + context 'when the method fails' do + before do + allow(service).to receive(:strategy_execute).and_call_original + end + + context 'when validation fails' do + before do + allow(service).to receive(:invalid?).and_return(true) + end + + it 'does not create the lock file' do + expect(service).not_to receive(:create_or_update_after_export_lock) + + service.execute(user, project) + end + + it 'does not execute main logic' do + expect(service).not_to receive(:strategy_execute) + + service.execute(user, project) + end + + it 'logs validation errors in shared context' do + expect(service).to receive(:log_validation_errors) + + service.execute(user, project) + end + end + + context 'when an exception is raised' do + it 'removes the lock' do + expect { service.execute(user, project) }.to raise_error(NotImplementedError) + + expect(lock_path_exist?).to be_falsey + end + end + end + end + + describe '#log_validation_errors' do + it 'add the message to the shared context' do + errors = %w(test_message test_message2) + + allow(service).to receive(:invalid?).and_return(true) + allow(service.errors).to receive(:full_messages).and_return(errors) + + expect(shared).to receive(:add_error_message).twice.and_call_original + + service.execute(user, project) + + expect(shared.errors).to eq errors + end + end + + describe '#to_json' do + it 'adds the current strategy class to the serialized attributes' do + params = { param1: 1 } + result = params.merge(klass: described_class.to_s).to_json + + expect(described_class.new(params).to_json).to eq result + end + end + + def lock_path_exist? + File.exist?(described_class.lock_file_path(project)) + end +end diff --git a/spec/lib/gitlab/import_export/after_export_strategies/web_upload_strategy_spec.rb b/spec/lib/gitlab/import_export/after_export_strategies/web_upload_strategy_spec.rb new file mode 100644 index 00000000000..5fe57d9987b --- /dev/null +++ b/spec/lib/gitlab/import_export/after_export_strategies/web_upload_strategy_spec.rb @@ -0,0 +1,36 @@ +require 'spec_helper' + +describe Gitlab::ImportExport::AfterExportStrategies::WebUploadStrategy do + let(:example_url) { 'http://www.example.com' } + let(:strategy) { subject.new(url: example_url, http_method: 'post') } + let!(:project) { create(:project, :with_export) } + let!(:user) { build(:user) } + + subject { described_class } + + describe 'validations' do + it 'only POST and PUT method allowed' do + %w(POST post PUT put).each do |method| + expect(subject.new(url: example_url, http_method: method)).to be_valid + end + + expect(subject.new(url: example_url, http_method: 'whatever')).not_to be_valid + end + + it 'onyl allow urls as upload urls' do + expect(subject.new(url: example_url)).to be_valid + expect(subject.new(url: 'whatever')).not_to be_valid + end + end + + describe '#execute' do + it 'removes the exported project file after the upload' do + allow(strategy).to receive(:send_file) + allow(strategy).to receive(:handle_response_error) + + expect(project).to receive(:remove_exported_project_file) + + strategy.execute(user, project) + end + end +end diff --git a/spec/lib/gitlab/import_export/after_export_strategy_builder_spec.rb b/spec/lib/gitlab/import_export/after_export_strategy_builder_spec.rb new file mode 100644 index 00000000000..bf727285a9f --- /dev/null +++ b/spec/lib/gitlab/import_export/after_export_strategy_builder_spec.rb @@ -0,0 +1,29 @@ +require 'spec_helper' + +describe Gitlab::ImportExport::AfterExportStrategyBuilder do + let!(:strategies_namespace) { 'Gitlab::ImportExport::AfterExportStrategies' } + + describe '.build!' do + context 'when klass param is' do + it 'null it returns the default strategy' do + expect(described_class.build!(nil).class).to eq described_class.default_strategy + end + + it 'not a valid class it raises StrategyNotFoundError exception' do + expect { described_class.build!('Whatever') }.to raise_error(described_class::StrategyNotFoundError) + end + + it 'not a descendant of AfterExportStrategy' do + expect { described_class.build!('User') }.to raise_error(described_class::StrategyNotFoundError) + end + end + + it 'initializes strategy with attributes param' do + params = { param1: 1, param2: 2, param3: 3 } + + strategy = described_class.build!("#{strategies_namespace}::DownloadNotificationStrategy", params) + + params.each { |k, v| expect(strategy.public_send(k)).to eq v } + end + end +end diff --git a/spec/lib/gitlab/import_export/all_models.yml b/spec/lib/gitlab/import_export/all_models.yml index bece82e531a..897a5984782 100644 --- a/spec/lib/gitlab/import_export/all_models.yml +++ b/spec/lib/gitlab/import_export/all_models.yml @@ -18,6 +18,7 @@ issues: - metrics - timelogs - issue_assignees +- closed_by events: - author - project @@ -144,6 +145,9 @@ pipeline_schedule: - pipelines pipeline_schedule_variables: - pipeline_schedule +deploy_tokens: +- project_deploy_tokens +- projects deploy_keys: - user - deploy_keys_projects @@ -279,6 +283,9 @@ project: - lfs_file_locks - project_badges - source_of_merge_requests +- internal_ids +- project_deploy_tokens +- deploy_tokens award_emoji: - awardable - user diff --git a/spec/lib/gitlab/import_export/importer_spec.rb b/spec/lib/gitlab/import_export/importer_spec.rb new file mode 100644 index 00000000000..991e354f499 --- /dev/null +++ b/spec/lib/gitlab/import_export/importer_spec.rb @@ -0,0 +1,104 @@ +require 'spec_helper' + +describe Gitlab::ImportExport::Importer do + let(:user) { create(:user) } + let(:test_path) { "#{Dir.tmpdir}/importer_spec" } + let(:shared) { project.import_export_shared } + let(:project) { create(:project, import_source: File.join(test_path, 'exported-project.gz')) } + + subject(:importer) { described_class.new(project) } + + before do + allow_any_instance_of(Gitlab::ImportExport).to receive(:storage_path).and_return(test_path) + FileUtils.mkdir_p(shared.export_path) + FileUtils.cp(Rails.root.join('spec', 'fixtures', 'exported-project.gz'), test_path) + allow(subject).to receive(:remove_import_file) + end + + after do + FileUtils.rm_rf(test_path) + end + + describe '#execute' do + it 'succeeds' do + importer.execute + + expect(shared.errors).to be_empty + end + + it 'extracts the archive' do + expect(Gitlab::ImportExport::FileImporter).to receive(:import).and_call_original + + importer.execute + end + + it 'checks the version' do + expect(Gitlab::ImportExport::VersionChecker).to receive(:check!).and_call_original + + importer.execute + end + + context 'all restores are executed' do + [ + Gitlab::ImportExport::AvatarRestorer, + Gitlab::ImportExport::RepoRestorer, + Gitlab::ImportExport::WikiRestorer, + Gitlab::ImportExport::UploadsRestorer, + Gitlab::ImportExport::LfsRestorer, + Gitlab::ImportExport::StatisticsRestorer + ].each do |restorer| + it "calls the #{restorer}" do + fake_restorer = double(restorer.to_s) + + expect(fake_restorer).to receive(:restore).and_return(true).at_least(1) + expect(restorer).to receive(:new).and_return(fake_restorer).at_least(1) + + importer.execute + end + end + + it 'restores the ProjectTree' do + expect(Gitlab::ImportExport::ProjectTreeRestorer).to receive(:new).and_call_original + + importer.execute + end + end + + context 'when project successfully restored' do + let!(:existing_project) { create(:project, namespace: user.namespace) } + let(:project) { create(:project, namespace: user.namespace, name: 'whatever', path: 'whatever') } + + before do + restorers = double + + allow(subject).to receive(:import_file).and_return(true) + allow(subject).to receive(:check_version!).and_return(true) + allow(subject).to receive(:restorers).and_return(restorers) + allow(restorers).to receive(:all?).and_return(true) + allow(project).to receive(:import_data).and_return(double(data: { 'original_path' => existing_project.path })) + end + + context 'when import_data' do + context 'has original_path' do + it 'overwrites existing project' do + expect_any_instance_of(::Projects::OverwriteProjectService).to receive(:execute).with(existing_project) + + subject.execute + end + end + + context 'has not original_path' do + before do + allow(project).to receive(:import_data).and_return(double(data: {})) + end + + it 'does not call the overwrite service' do + expect_any_instance_of(::Projects::OverwriteProjectService).not_to receive(:execute).with(existing_project) + + subject.execute + end + end + end + end + end +end diff --git a/spec/lib/gitlab/import_export/lfs_restorer_spec.rb b/spec/lib/gitlab/import_export/lfs_restorer_spec.rb new file mode 100644 index 00000000000..70eeb9ee66b --- /dev/null +++ b/spec/lib/gitlab/import_export/lfs_restorer_spec.rb @@ -0,0 +1,75 @@ +require 'spec_helper' + +describe Gitlab::ImportExport::LfsRestorer do + include UploadHelpers + + let(:export_path) { "#{Dir.tmpdir}/lfs_object_restorer_spec" } + let(:project) { create(:project) } + let(:shared) { project.import_export_shared } + subject(:restorer) { described_class.new(project: project, shared: shared) } + + before do + allow_any_instance_of(Gitlab::ImportExport).to receive(:storage_path).and_return(export_path) + FileUtils.mkdir_p(shared.export_path) + end + + after do + FileUtils.rm_rf(shared.export_path) + end + + describe '#restore' do + context 'when the archive contains lfs files' do + let(:dummy_lfs_file_path) { File.join(shared.export_path, 'lfs-objects', 'dummy') } + + def create_lfs_object_with_content(content) + dummy_lfs_file = Tempfile.new('existing') + File.write(dummy_lfs_file.path, content) + size = dummy_lfs_file.size + oid = LfsObject.calculate_oid(dummy_lfs_file.path) + LfsObject.create!(oid: oid, size: size, file: dummy_lfs_file) + end + + before do + FileUtils.mkdir_p(File.dirname(dummy_lfs_file_path)) + File.write(dummy_lfs_file_path, 'not very large') + allow(restorer).to receive(:lfs_file_paths).and_return([dummy_lfs_file_path]) + end + + it 'creates an lfs object for the project' do + expect { restorer.restore }.to change { project.reload.lfs_objects.size }.by(1) + end + + it 'assigns the file correctly' do + restorer.restore + + expect(project.lfs_objects.first.file.read).to eq('not very large') + end + + it 'links an existing LFS object if it existed' do + lfs_object = create_lfs_object_with_content('not very large') + + restorer.restore + + expect(project.lfs_objects).to include(lfs_object) + end + + it 'succeeds' do + expect(restorer.restore).to be_truthy + expect(shared.errors).to be_empty + end + + it 'stores the upload' do + expect_any_instance_of(LfsObjectUploader).to receive(:store!) + + restorer.restore + end + end + + context 'without any LFS-objects' do + it 'succeeds' do + expect(restorer.restore).to be_truthy + expect(shared.errors).to be_empty + end + end + end +end diff --git a/spec/lib/gitlab/import_export/lfs_saver_spec.rb b/spec/lib/gitlab/import_export/lfs_saver_spec.rb new file mode 100644 index 00000000000..9b0e21deb2e --- /dev/null +++ b/spec/lib/gitlab/import_export/lfs_saver_spec.rb @@ -0,0 +1,62 @@ +require 'spec_helper' + +describe Gitlab::ImportExport::LfsSaver do + let(:shared) { project.import_export_shared } + let(:export_path) { "#{Dir.tmpdir}/project_tree_saver_spec" } + let(:project) { create(:project) } + + subject(:saver) { described_class.new(project: project, shared: shared) } + + before do + allow_any_instance_of(Gitlab::ImportExport).to receive(:storage_path).and_return(export_path) + FileUtils.mkdir_p(shared.export_path) + end + + after do + FileUtils.rm_rf(shared.export_path) + end + + describe '#save' do + context 'when the project has LFS objects locally stored' do + let(:lfs_object) { create(:lfs_object, :with_file) } + + before do + project.lfs_objects << lfs_object + end + + it 'does not cause errors' do + saver.save + + expect(shared.errors).to be_empty + end + + it 'copies the file in the correct location when there is an lfs object' do + saver.save + + expect(File).to exist("#{shared.export_path}/lfs-objects/#{lfs_object.oid}") + end + end + + context 'when the LFS objects are stored in object storage' do + let(:lfs_object) { create(:lfs_object, :object_storage) } + + before do + allow(LfsObjectUploader).to receive(:object_store_enabled?).and_return(true) + allow(lfs_object.file).to receive(:url).and_return('http://my-object-storage.local') + project.lfs_objects << lfs_object + end + + it 'downloads the file to include in an archive' do + fake_uri = double + exported_file_path = "#{shared.export_path}/lfs-objects/#{lfs_object.oid}" + + expect(fake_uri).to receive(:open).and_return(StringIO.new('LFS file content')) + expect(URI).to receive(:parse).with('http://my-object-storage.local').and_return(fake_uri) + + saver.save + + expect(File.read(exported_file_path)).to eq('LFS file content') + end + end + end +end diff --git a/spec/lib/gitlab/import_export/project.json b/spec/lib/gitlab/import_export/project.json index 4a51777ba9b..6d63749296e 100644 --- a/spec/lib/gitlab/import_export/project.json +++ b/spec/lib/gitlab/import_export/project.json @@ -2,7 +2,6 @@ "description": "Nisi et repellendus ut enim quo accusamus vel magnam.", "visibility_level": 10, "archived": false, - "description_html": "description", "labels": [ { "id": 2, @@ -6181,12 +6180,6 @@ "user_id": null, "target_url": null, "description": null, - "artifacts_file": { - "url": null - }, - "artifacts_metadata": { - "url": null - }, "erased_by_id": null, "erased_at": null, "type": "Ci::Build", @@ -6219,12 +6212,6 @@ "user_id": null, "target_url": null, "description": null, - "artifacts_file": { - "url": "/Users/Test/Test/gitlab-development-kit/gitlab/shared/artifacts/2016_03/5/72/p5_build_artifacts.zip" - }, - "artifacts_metadata": { - "url": "/Users/Test/Test/gitlab-development-kit/gitlab/shared/artifacts/2016_03/5/72/p5_build_artifacts_metadata.gz" - }, "erased_by_id": null, "erased_at": null } @@ -6293,12 +6280,6 @@ "user_id": null, "target_url": null, "description": null, - "artifacts_file": { - "url": "/Users/Test/Test/gitlab-development-kit/gitlab/shared/artifacts/2016_03/5/74/p5_build_artifacts.zip" - }, - "artifacts_metadata": { - "url": "/Users/Test/Test/gitlab-development-kit/gitlab/shared/artifacts/2016_03/5/74/p5_build_artifacts_metadata.gz" - }, "erased_by_id": null, "erased_at": null }, @@ -6328,12 +6309,6 @@ "user_id": null, "target_url": null, "description": null, - "artifacts_file": { - "url": null - }, - "artifacts_metadata": { - "url": null - }, "erased_by_id": null, "erased_at": null } @@ -6393,12 +6368,6 @@ "user_id": null, "target_url": null, "description": null, - "artifacts_file": { - "url": "/Users/Test/Test/gitlab-development-kit/gitlab/shared/artifacts/2016_03/5/76/p5_build_artifacts.zip" - }, - "artifacts_metadata": { - "url": "/Users/Test/Test/gitlab-development-kit/gitlab/shared/artifacts/2016_03/5/76/p5_build_artifacts_metadata.gz" - }, "erased_by_id": null, "erased_at": null }, @@ -6428,12 +6397,6 @@ "user_id": null, "target_url": null, "description": null, - "artifacts_file": { - "url": "/Users/Test/Test/gitlab-development-kit/gitlab/shared/artifacts/2016_03/5/75/p5_build_artifacts.zip" - }, - "artifacts_metadata": { - "url": "/Users/Test/Test/gitlab-development-kit/gitlab/shared/artifacts/2016_03/5/75/p5_build_artifacts_metadata.gz" - }, "erased_by_id": null, "erased_at": null } @@ -6493,12 +6456,6 @@ "user_id": null, "target_url": null, "description": null, - "artifacts_file": { - "url": "/Users/Test/Test/gitlab-development-kit/gitlab/shared/artifacts/2016_03/5/78/p5_build_artifacts.zip" - }, - "artifacts_metadata": { - "url": "/Users/Test/Test/gitlab-development-kit/gitlab/shared/artifacts/2016_03/5/78/p5_build_artifacts_metadata.gz" - }, "erased_by_id": null, "erased_at": null }, @@ -6528,12 +6485,6 @@ "user_id": null, "target_url": null, "description": null, - "artifacts_file": { - "url": "/Users/Test/Test/gitlab-development-kit/gitlab/shared/artifacts/2016_03/5/77/p5_build_artifacts.zip" - }, - "artifacts_metadata": { - "url": "/Users/Test/Test/gitlab-development-kit/gitlab/shared/artifacts/2016_03/5/77/p5_build_artifacts_metadata.gz" - }, "erased_by_id": null, "erased_at": null } @@ -6593,12 +6544,6 @@ "user_id": null, "target_url": null, "description": null, - "artifacts_file": { - "url": null - }, - "artifacts_metadata": { - "url": null - }, "erased_by_id": null, "erased_at": null }, @@ -6628,12 +6573,6 @@ "user_id": null, "target_url": null, "description": null, - "artifacts_file": { - "url": "/Users/Test/Test/gitlab-development-kit/gitlab/shared/artifacts/2016_03/5/80/p5_build_artifacts.zip" - }, - "artifacts_metadata": { - "url": "/Users/Test/Test/gitlab-development-kit/gitlab/shared/artifacts/2016_03/5/80/p5_build_artifacts_metadata.gz" - }, "erased_by_id": null, "erased_at": null } diff --git a/spec/lib/gitlab/import_export/project_tree_restorer_spec.rb b/spec/lib/gitlab/import_export/project_tree_restorer_spec.rb index 8e25cd26c2f..13a8c9adcee 100644 --- a/spec/lib/gitlab/import_export/project_tree_restorer_spec.rb +++ b/spec/lib/gitlab/import_export/project_tree_restorer_spec.rb @@ -46,10 +46,6 @@ describe Gitlab::ImportExport::ProjectTreeRestorer do expect(Project.find_by_path('project').description).to eq('Nisi et repellendus ut enim quo accusamus vel magnam.') end - it 'has the project html description' do - expect(Project.find_by_path('project').description_html).to eq('description') - end - it 'has the same label associated to two issues' do expect(ProjectLabel.find_by_title('test2').issues.count).to eq(2) end @@ -317,6 +313,24 @@ describe Gitlab::ImportExport::ProjectTreeRestorer do end end + context 'when the project has overriden params in import data' do + it 'overwrites the params stored in the JSON' do + project.create_import_data(data: { override_params: { description: "Overridden" } }) + + restored_project_json + + expect(project.description).to eq("Overridden") + end + + it 'does not allow setting params that are excluded from import_export settings' do + project.create_import_data(data: { override_params: { lfs_enabled: true } }) + + restored_project_json + + expect(project.lfs_enabled).to be_nil + end + end + context 'with a project that has a group' do let!(:project) do create(:project, diff --git a/spec/lib/gitlab/import_export/project_tree_saver_spec.rb b/spec/lib/gitlab/import_export/project_tree_saver_spec.rb index 0d20a551e2a..2b8a11ce8f9 100644 --- a/spec/lib/gitlab/import_export/project_tree_saver_spec.rb +++ b/spec/lib/gitlab/import_export/project_tree_saver_spec.rb @@ -245,10 +245,6 @@ describe Gitlab::ImportExport::ProjectTreeSaver do end context 'project attributes' do - it 'contains the html description' do - expect(saved_project_json).to include("description_html" => 'description') - end - it 'does not contain the runners token' do expect(saved_project_json).not_to include("runners_token" => 'token') end @@ -274,7 +270,6 @@ describe Gitlab::ImportExport::ProjectTreeSaver do releases: [release], group: group ) - project.update_column(:description_html, 'description') project_label = create(:label, project: project) group_label = create(:group_label, group: group) create(:label_link, label: project_label, target: issue) diff --git a/spec/lib/gitlab/import_export/safe_model_attributes.yml b/spec/lib/gitlab/import_export/safe_model_attributes.yml index 0b938892da5..05790bb5fe1 100644 --- a/spec/lib/gitlab/import_export/safe_model_attributes.yml +++ b/spec/lib/gitlab/import_export/safe_model_attributes.yml @@ -15,6 +15,7 @@ Issue: - updated_by_id - confidential - closed_at +- closed_by_id - due_date - moved_to_id - lock_version @@ -265,7 +266,9 @@ CommitStatus: - target_url - description - artifacts_file +- artifacts_file_store - artifacts_metadata +- artifacts_metadata_store - erased_by_id - erased_at - artifacts_expire_at @@ -387,6 +390,7 @@ Service: - default - wiki_page_events - confidential_issues_events +- confidential_note_events ProjectHook: - id - url @@ -407,6 +411,7 @@ ProjectHook: - token - group_id - confidential_issues_events +- confidential_note_events - repository_update_events ProtectedBranch: - id @@ -458,6 +463,7 @@ Project: - merge_requests_ff_only_enabled - merge_requests_rebase_enabled - jobs_cache_index +- pages_https_only Author: - name ProjectFeature: @@ -531,12 +537,6 @@ ProjectCustomAttribute: - project_id - key - value -LfsFileLock: -- id -- path -- user_id -- project_id -- created_at Badge: - id - link_url diff --git a/spec/lib/gitlab/kubernetes/namespace_spec.rb b/spec/lib/gitlab/kubernetes/namespace_spec.rb index b3c987f9344..e098612f6fb 100644 --- a/spec/lib/gitlab/kubernetes/namespace_spec.rb +++ b/spec/lib/gitlab/kubernetes/namespace_spec.rb @@ -9,7 +9,7 @@ describe Gitlab::Kubernetes::Namespace do describe '#exists?' do context 'when namespace do not exits' do - let(:exception) { ::KubeException.new(404, "namespace #{name} not found", nil) } + let(:exception) { ::Kubeclient::HttpError.new(404, "namespace #{name} not found", nil) } it 'returns false' do expect(client).to receive(:get_namespace).with(name).once.and_raise(exception) diff --git a/spec/lib/gitlab/metrics/sidekiq_metrics_exporter_spec.rb b/spec/lib/gitlab/metrics/sidekiq_metrics_exporter_spec.rb index 6721e02fb85..61eb059a731 100644 --- a/spec/lib/gitlab/metrics/sidekiq_metrics_exporter_spec.rb +++ b/spec/lib/gitlab/metrics/sidekiq_metrics_exporter_spec.rb @@ -38,7 +38,9 @@ describe Gitlab::Metrics::SidekiqMetricsExporter do expect(::WEBrick::HTTPServer).to have_received(:new).with( Port: port, - BindAddress: address + BindAddress: address, + Logger: anything, + AccessLog: anything ) end end diff --git a/spec/lib/gitlab/omniauth_initializer_spec.rb b/spec/lib/gitlab/omniauth_initializer_spec.rb new file mode 100644 index 00000000000..d808b4d49e0 --- /dev/null +++ b/spec/lib/gitlab/omniauth_initializer_spec.rb @@ -0,0 +1,65 @@ +require 'spec_helper' + +describe Gitlab::OmniauthInitializer do + let(:devise_config) { class_double(Devise) } + + subject { described_class.new(devise_config) } + + describe '#execute' do + it 'configures providers from array' do + generic_config = { 'name' => 'generic' } + + expect(devise_config).to receive(:omniauth).with(:generic) + + subject.execute([generic_config]) + end + + it 'allows "args" array for app_id and app_secret' do + legacy_config = { 'name' => 'legacy', 'args' => %w(123 abc) } + + expect(devise_config).to receive(:omniauth).with(:legacy, '123', 'abc') + + subject.execute([legacy_config]) + end + + it 'passes app_id and app_secret as additional arguments' do + twitter_config = { 'name' => 'twitter', 'app_id' => '123', 'app_secret' => 'abc' } + + expect(devise_config).to receive(:omniauth).with(:twitter, '123', 'abc') + + subject.execute([twitter_config]) + end + + it 'passes "args" hash as symbolized hash argument' do + hash_config = { 'name' => 'hash', 'args' => { 'custom' => 'format' } } + + expect(devise_config).to receive(:omniauth).with(:hash, custom: 'format') + + subject.execute([hash_config]) + end + + it 'configures fail_with_empty_uid for shibboleth' do + shibboleth_config = { 'name' => 'shibboleth', 'args' => {} } + + expect(devise_config).to receive(:omniauth).with(:shibboleth, fail_with_empty_uid: true) + + subject.execute([shibboleth_config]) + end + + it 'configures remote_sign_out_handler proc for authentiq' do + authentiq_config = { 'name' => 'authentiq', 'args' => {} } + + expect(devise_config).to receive(:omniauth).with(:authentiq, remote_sign_out_handler: an_instance_of(Proc)) + + subject.execute([authentiq_config]) + end + + it 'configures on_single_sign_out proc for cas3' do + cas3_config = { 'name' => 'cas3', 'args' => {} } + + expect(devise_config).to receive(:omniauth).with(:cas3, on_single_sign_out: an_instance_of(Proc)) + + subject.execute([cas3_config]) + end + end +end diff --git a/spec/lib/gitlab/performance_bar_spec.rb b/spec/lib/gitlab/performance_bar_spec.rb index b8a2267f1a4..f480376acb4 100644 --- a/spec/lib/gitlab/performance_bar_spec.rb +++ b/spec/lib/gitlab/performance_bar_spec.rb @@ -25,6 +25,12 @@ describe Gitlab::PerformanceBar do expect(described_class.enabled?(nil)).to be_falsy end + it 'returns true when given user is an admin' do + user = build_stubbed(:user, :admin) + + expect(described_class.enabled?(user)).to be_truthy + end + it 'returns false when allowed_group_id is nil' do expect(described_class).to receive(:allowed_group_id).and_return(nil) diff --git a/spec/lib/gitlab/profiler_spec.rb b/spec/lib/gitlab/profiler_spec.rb index 3d5b56cd5b8..548eb28fe4d 100644 --- a/spec/lib/gitlab/profiler_spec.rb +++ b/spec/lib/gitlab/profiler_spec.rb @@ -110,8 +110,8 @@ describe Gitlab::Profiler do custom_logger.debug('User Load (1.3ms)') custom_logger.debug('Project Load (10.4ms)') - expect(custom_logger.load_times_by_model).to eq('User' => 2.5, - 'Project' => 10.4) + expect(custom_logger.load_times_by_model).to eq('User' => [1.2, 1.3], + 'Project' => [10.4]) end it 'logs the backtrace, ignoring lines as appropriate' do @@ -164,4 +164,24 @@ describe Gitlab::Profiler do end end end + + describe '.log_load_times_by_model' do + it 'logs the model, query count, and time by slowest first' do + expect(null_logger).to receive(:load_times_by_model).and_return( + 'User' => [1.2, 1.3], + 'Project' => [10.4] + ) + + expect(null_logger).to receive(:info).with('Project total (1): 10.4ms') + expect(null_logger).to receive(:info).with('User total (2): 2.5ms') + + described_class.log_load_times_by_model(null_logger) + end + + it 'does nothing when called with a logger that does not have load times' do + expect(null_logger).not_to receive(:info) + + expect(described_class.log_load_times_by_model(null_logger)).to be_nil + end + end end diff --git a/spec/lib/gitlab/project_search_results_spec.rb b/spec/lib/gitlab/project_search_results_spec.rb index 57905a74e92..8351b967133 100644 --- a/spec/lib/gitlab/project_search_results_spec.rb +++ b/spec/lib/gitlab/project_search_results_spec.rb @@ -83,19 +83,19 @@ describe Gitlab::ProjectSearchResults do end context 'when the matching filename contains a colon' do - let(:search_result) { "\nmaster:testdata/project::function1.yaml\x001\x00---\n" } + let(:search_result) { "master:testdata/project::function1.yaml\x001\x00---\n" } it 'returns a valid FoundBlob' do expect(subject.filename).to eq('testdata/project::function1.yaml') expect(subject.basename).to eq('testdata/project::function1') expect(subject.ref).to eq('master') expect(subject.startline).to eq(1) - expect(subject.data).to eq('---') + expect(subject.data).to eq("---\n") end end context 'when the matching content contains a number surrounded by colons' do - let(:search_result) { "\nmaster:testdata/foo.txt\x001\x00blah:9:blah" } + let(:search_result) { "master:testdata/foo.txt\x001\x00blah:9:blah" } it 'returns a valid FoundBlob' do expect(subject.filename).to eq('testdata/foo.txt') @@ -106,6 +106,18 @@ describe Gitlab::ProjectSearchResults do end end + context 'when the search result ends with an empty line' do + let(:results) { project.repository.search_files_by_content('Role models', 'master') } + + it 'returns a valid FoundBlob that ends with an empty line' do + expect(subject.filename).to eq('files/markdown/ruby-style-guide.md') + expect(subject.basename).to eq('files/markdown/ruby-style-guide') + expect(subject.ref).to eq('master') + expect(subject.startline).to eq(1) + expect(subject.data).to eq("# Prelude\n\n> Role models are important. <br/>\n> -- Officer Alex J. Murphy / RoboCop\n\n") + end + end + context 'when the search returns non-ASCII data' do context 'with UTF-8' do let(:results) { project.repository.search_files_by_content('файл', 'master') } @@ -115,7 +127,7 @@ describe Gitlab::ProjectSearchResults do expect(subject.basename).to eq('encoding/russian') expect(subject.ref).to eq('master') expect(subject.startline).to eq(1) - expect(subject.data).to eq('Хороший файл') + expect(subject.data).to eq("Хороший файл\n") end end @@ -139,7 +151,7 @@ describe Gitlab::ProjectSearchResults do expect(subject.basename).to eq('encoding/iso8859') expect(subject.ref).to eq('master') expect(subject.startline).to eq(1) - expect(subject.data).to eq("Äü\n\nfoo") + expect(subject.data).to eq("Äü\n\nfoo\n") end end end diff --git a/spec/lib/gitlab/repo_path_spec.rb b/spec/lib/gitlab/repo_path_spec.rb index b67bcc77bd4..f030f371372 100644 --- a/spec/lib/gitlab/repo_path_spec.rb +++ b/spec/lib/gitlab/repo_path_spec.rb @@ -48,8 +48,8 @@ describe ::Gitlab::RepoPath do describe '.strip_storage_path' do before do allow(Gitlab.config.repositories).to receive(:storages).and_return({ - 'storage1' => { 'path' => '/foo' }, - 'storage2' => { 'path' => '/bar' } + 'storage1' => Gitlab::GitalyClient::StorageSettings.new('path' => '/foo'), + 'storage2' => Gitlab::GitalyClient::StorageSettings.new('path' => '/bar') }) end diff --git a/spec/lib/gitlab/sentry_spec.rb b/spec/lib/gitlab/sentry_spec.rb index 8c211d1c63f..499757da061 100644 --- a/spec/lib/gitlab/sentry_spec.rb +++ b/spec/lib/gitlab/sentry_spec.rb @@ -7,7 +7,49 @@ describe Gitlab::Sentry do described_class.context(nil) - expect(Raven.tags_context[:locale]).to eq(I18n.locale.to_s) + expect(Raven.tags_context[:locale].to_s).to eq(I18n.locale.to_s) + end + end + + describe '.track_exception' do + let(:exception) { RuntimeError.new('boom') } + + before do + allow(described_class).to receive(:enabled?).and_return(true) + end + + it 'raises the exception if it should' do + expect(described_class).to receive(:should_raise?).and_return(true) + expect { described_class.track_exception(exception) } + .to raise_error(RuntimeError) + end + + context 'when exceptions should not be raised' do + before do + allow(described_class).to receive(:should_raise?).and_return(false) + end + + it 'logs the exception with all attributes passed' do + expected_extras = { + some_other_info: 'info', + issue_url: 'http://gitlab.com/gitlab-org/gitlab-ce/issues/1' + } + + expect(Raven).to receive(:capture_exception) + .with(exception, extra: a_hash_including(expected_extras)) + + described_class.track_exception( + exception, + issue_url: 'http://gitlab.com/gitlab-org/gitlab-ce/issues/1', + extra: { some_other_info: 'info' } + ) + end + + it 'sets the context' do + expect(described_class).to receive(:context) + + described_class.track_exception(exception) + end end end end diff --git a/spec/lib/gitlab/shell_spec.rb b/spec/lib/gitlab/shell_spec.rb index 14b59c5e945..7f579df1c36 100644 --- a/spec/lib/gitlab/shell_spec.rb +++ b/spec/lib/gitlab/shell_spec.rb @@ -14,7 +14,7 @@ describe Gitlab::Shell do allow(Project).to receive(:find).and_return(project) allow(gitlab_shell).to receive(:gitlab_projects) - .with(project.repository_storage_path, project.disk_path + '.git') + .with(project.repository_storage, project.disk_path + '.git') .and_return(gitlab_projects) end @@ -405,7 +405,7 @@ describe Gitlab::Shell do describe '#create_repository' do shared_examples '#create_repository' do let(:repository_storage) { 'default' } - let(:repository_storage_path) { Gitlab.config.repositories.storages[repository_storage]['path'] } + let(:repository_storage_path) { Gitlab.config.repositories.storages[repository_storage].legacy_disk_path } let(:repo_name) { 'project/path' } let(:created_path) { File.join(repository_storage_path, repo_name + '.git') } @@ -487,21 +487,21 @@ describe Gitlab::Shell do describe '#fork_repository' do subject do gitlab_shell.fork_repository( - project.repository_storage_path, + project.repository_storage, project.disk_path, - 'new/storage', + 'nfs-file05', 'fork/path' ) end it 'returns true when the command succeeds' do - expect(gitlab_projects).to receive(:fork_repository).with('new/storage', 'fork/path.git') { true } + expect(gitlab_projects).to receive(:fork_repository).with('nfs-file05', 'fork/path.git') { true } is_expected.to be_truthy end it 'return false when the command fails' do - expect(gitlab_projects).to receive(:fork_repository).with('new/storage', 'fork/path.git') { false } + expect(gitlab_projects).to receive(:fork_repository).with('nfs-file05', 'fork/path.git') { false } is_expected.to be_falsy end @@ -661,7 +661,7 @@ describe Gitlab::Shell do it 'returns true when the command succeeds' do expect(gitlab_projects).to receive(:import_project).with(import_url, timeout) { true } - result = gitlab_shell.import_repository(project.repository_storage_path, project.disk_path, import_url) + result = gitlab_shell.import_repository(project.repository_storage, project.disk_path, import_url) expect(result).to be_truthy end @@ -671,7 +671,7 @@ describe Gitlab::Shell do expect(gitlab_projects).to receive(:import_project) { false } expect do - gitlab_shell.import_repository(project.repository_storage_path, project.disk_path, import_url) + gitlab_shell.import_repository(project.repository_storage, project.disk_path, import_url) end.to raise_error(Gitlab::Shell::Error, "error") end end @@ -679,7 +679,7 @@ describe Gitlab::Shell do describe 'namespace actions' do subject { described_class.new } - let(:storage_path) { Gitlab.config.repositories.storages.default.path } + let(:storage_path) { Gitlab.config.repositories.storages.default.legacy_disk_path } describe '#add_namespace' do it 'creates a namespace' do @@ -727,7 +727,7 @@ describe Gitlab::Shell do def find_in_authorized_keys_file(key_id) gitlab_shell.batch_read_key_ids do |ids| - return true if ids.include?(key_id) + return true if ids.include?(key_id) # rubocop:disable Cop/AvoidReturnFromBlocks end false diff --git a/spec/lib/gitlab/sidekiq_logging/json_formatter_spec.rb b/spec/lib/gitlab/sidekiq_logging/json_formatter_spec.rb new file mode 100644 index 00000000000..fed9aeba30c --- /dev/null +++ b/spec/lib/gitlab/sidekiq_logging/json_formatter_spec.rb @@ -0,0 +1,31 @@ +require 'spec_helper' + +describe Gitlab::SidekiqLogging::JSONFormatter do + let(:hash_input) { { foo: 1, bar: 'test' } } + let(:message) { 'This is a test' } + let(:timestamp) { Time.now } + + it 'wraps a Hash' do + result = subject.call('INFO', timestamp, 'my program', hash_input) + + data = JSON.parse(result) + expected_output = hash_input.stringify_keys + expected_output['severity'] = 'INFO' + expected_output['time'] = timestamp.utc.iso8601(3) + + expect(data).to eq(expected_output) + end + + it 'wraps a String' do + result = subject.call('DEBUG', timestamp, 'my string', message) + + data = JSON.parse(result) + expected_output = { + severity: 'DEBUG', + time: timestamp.utc.iso8601(3), + message: message + } + + expect(data).to eq(expected_output.stringify_keys) + end +end diff --git a/spec/lib/gitlab/sidekiq_logging/structured_logger_spec.rb b/spec/lib/gitlab/sidekiq_logging/structured_logger_spec.rb new file mode 100644 index 00000000000..2421b1e5a1a --- /dev/null +++ b/spec/lib/gitlab/sidekiq_logging/structured_logger_spec.rb @@ -0,0 +1,101 @@ +require 'spec_helper' + +describe Gitlab::SidekiqLogging::StructuredLogger do + describe '#call' do + let(:timestamp) { Time.new('2018-01-01 12:00:00').utc } + let(:job) do + { + "class" => "TestWorker", + "args" => [1234, 'hello'], + "retry" => false, + "queue" => "cronjob:test_queue", + "queue_namespace" => "cronjob", + "jid" => "da883554ee4fe414012f5f42", + "created_at" => timestamp.to_f, + "enqueued_at" => timestamp.to_f + } + end + let(:logger) { double() } + let(:start_payload) do + job.merge( + 'message' => 'TestWorker JID-da883554ee4fe414012f5f42: start', + 'job_status' => 'start', + 'pid' => Process.pid, + 'created_at' => timestamp.iso8601(3), + 'enqueued_at' => timestamp.iso8601(3) + ) + end + let(:end_payload) do + start_payload.merge( + 'message' => 'TestWorker JID-da883554ee4fe414012f5f42: done: 0.0 sec', + 'job_status' => 'done', + 'duration' => 0.0, + "completed_at" => timestamp.iso8601(3) + ) + end + let(:exception_payload) do + end_payload.merge( + 'message' => 'TestWorker JID-da883554ee4fe414012f5f42: fail: 0.0 sec', + 'job_status' => 'fail', + 'error' => ArgumentError, + 'error_message' => 'some exception' + ) + end + + before do + allow(Sidekiq).to receive(:logger).and_return(logger) + + allow(subject).to receive(:current_time).and_return(timestamp.to_f) + end + + subject { described_class.new } + + context 'with SIDEKIQ_LOG_ARGUMENTS enabled' do + before do + stub_env('SIDEKIQ_LOG_ARGUMENTS', '1') + end + + it 'logs start and end of job' do + Timecop.freeze(timestamp) do + expect(logger).to receive(:info).with(start_payload).ordered + expect(logger).to receive(:info).with(end_payload).ordered + expect(subject).to receive(:log_job_start).and_call_original + expect(subject).to receive(:log_job_done).and_call_original + + subject.call(job, 'test_queue') { } + end + end + + it 'logs an exception in job' do + Timecop.freeze(timestamp) do + expect(logger).to receive(:info).with(start_payload) + # This excludes the exception_backtrace + expect(logger).to receive(:warn).with(hash_including(exception_payload)) + expect(subject).to receive(:log_job_start).and_call_original + expect(subject).to receive(:log_job_done).and_call_original + + expect do + subject.call(job, 'test_queue') do + raise ArgumentError, 'some exception' + end + end.to raise_error(ArgumentError) + end + end + end + + context 'with SIDEKIQ_LOG_ARGUMENTS disabled' do + it 'logs start and end of job' do + Timecop.freeze(timestamp) do + start_payload.delete('args') + + expect(logger).to receive(:info).with(start_payload).ordered + expect(logger).to receive(:info).with(end_payload).ordered + expect(subject).to receive(:log_job_start).and_call_original + expect(subject).to receive(:log_job_done).and_call_original + + subject.call(job, 'test_queue') { } + end + end + end + end +end diff --git a/spec/lib/gitlab/url_blocker_spec.rb b/spec/lib/gitlab/url_blocker_spec.rb index d9b3c2350b1..a3b3dc3be6d 100644 --- a/spec/lib/gitlab/url_blocker_spec.rb +++ b/spec/lib/gitlab/url_blocker_spec.rb @@ -2,6 +2,8 @@ require 'spec_helper' describe Gitlab::UrlBlocker do describe '#blocked_url?' do + let(:valid_ports) { Project::VALID_IMPORT_PORTS } + it 'allows imports from configured web host and port' do import_url = "http://#{Gitlab.config.gitlab.host}:#{Gitlab.config.gitlab.port}/t.git" expect(described_class.blocked_url?(import_url)).to be false @@ -17,7 +19,7 @@ describe Gitlab::UrlBlocker do end it 'returns true for bad port' do - expect(described_class.blocked_url?('https://gitlab.com:25/foo/foo.git')).to be true + expect(described_class.blocked_url?('https://gitlab.com:25/foo/foo.git', valid_ports: valid_ports)).to be true end it 'returns true for alternative version of 127.0.0.1 (0177.1)' do @@ -71,6 +73,47 @@ describe Gitlab::UrlBlocker do it 'returns false for legitimate URL' do expect(described_class.blocked_url?('https://gitlab.com/foo/foo.git')).to be false end + + context 'when allow_local_network is' do + let(:local_ips) { ['192.168.1.2', '10.0.0.2', '172.16.0.2'] } + let(:fake_domain) { 'www.fakedomain.fake' } + + context 'true (default)' do + it 'does not block urls from private networks' do + local_ips.each do |ip| + stub_domain_resolv(fake_domain, ip) + + expect(described_class).not_to be_blocked_url("http://#{fake_domain}") + + unstub_domain_resolv + + expect(described_class).not_to be_blocked_url("http://#{ip}") + end + end + end + + context 'false' do + it 'blocks urls from private networks' do + local_ips.each do |ip| + stub_domain_resolv(fake_domain, ip) + + expect(described_class).to be_blocked_url("http://#{fake_domain}", allow_local_network: false) + + unstub_domain_resolv + + expect(described_class).to be_blocked_url("http://#{ip}", allow_local_network: false) + end + end + end + + def stub_domain_resolv(domain, ip) + allow(Addrinfo).to receive(:getaddrinfo).with(domain, any_args).and_return([double(ip_address: ip, ipv4_private?: true)]) + end + + def unstub_domain_resolv + allow(Addrinfo).to receive(:getaddrinfo).and_call_original + end + end end # Resolv does not support resolving UTF-8 domain names diff --git a/spec/lib/gitlab/usage_data_spec.rb b/spec/lib/gitlab/usage_data_spec.rb index 138d21ede97..9e6aa109a4b 100644 --- a/spec/lib/gitlab/usage_data_spec.rb +++ b/spec/lib/gitlab/usage_data_spec.rb @@ -12,6 +12,14 @@ describe Gitlab::UsageData do create(:service, project: projects[0], type: 'SlackSlashCommandsService', active: true) create(:service, project: projects[1], type: 'SlackService', active: true) create(:service, project: projects[2], type: 'SlackService', active: true) + + gcp_cluster = create(:cluster, :provided_by_gcp) + create(:cluster, :provided_by_user) + create(:cluster, :provided_by_user, :disabled) + create(:clusters_applications_helm, :installed, cluster: gcp_cluster) + create(:clusters_applications_ingress, :installed, cluster: gcp_cluster) + create(:clusters_applications_prometheus, :installed, cluster: gcp_cluster) + create(:clusters_applications_runner, :installed, cluster: gcp_cluster) end subject { described_class.data } @@ -64,6 +72,12 @@ describe Gitlab::UsageData do clusters clusters_enabled clusters_disabled + clusters_platforms_gke + clusters_platforms_user + clusters_applications_helm + clusters_applications_ingress + clusters_applications_prometheus + clusters_applications_runner in_review_folder groups issues @@ -97,6 +111,15 @@ describe Gitlab::UsageData do expect(count_data[:projects_jira_active]).to eq(2) expect(count_data[:projects_slack_notifications_active]).to eq(2) expect(count_data[:projects_slack_slash_active]).to eq(1) + + expect(count_data[:clusters_enabled]).to eq(6) + expect(count_data[:clusters_disabled]).to eq(1) + expect(count_data[:clusters_platforms_gke]).to eq(1) + expect(count_data[:clusters_platforms_user]).to eq(1) + expect(count_data[:clusters_applications_helm]).to eq(1) + expect(count_data[:clusters_applications_ingress]).to eq(1) + expect(count_data[:clusters_applications_prometheus]).to eq(1) + expect(count_data[:clusters_applications_runner]).to eq(1) end end diff --git a/spec/lib/gitlab/utils_spec.rb b/spec/lib/gitlab/utils_spec.rb index 71a743495a2..4ba99009855 100644 --- a/spec/lib/gitlab/utils_spec.rb +++ b/spec/lib/gitlab/utils_spec.rb @@ -1,7 +1,8 @@ require 'spec_helper' describe Gitlab::Utils do - delegate :to_boolean, :boolean_to_yes_no, :slugify, :random_string, :which, :ensure_array_from_string, to: :described_class + delegate :to_boolean, :boolean_to_yes_no, :slugify, :random_string, :which, :ensure_array_from_string, + :bytes_to_megabytes, to: :described_class describe '.slugify' do { @@ -97,4 +98,12 @@ describe Gitlab::Utils do expect(ensure_array_from_string(str)).to eq(%w[seven eight 9 10]) end end + + describe '.bytes_to_megabytes' do + it 'converts bytes to megabytes' do + bytes = 1.megabyte + + expect(bytes_to_megabytes(bytes)).to eq(1) + end + end end diff --git a/spec/lib/gitlab/verify/lfs_objects_spec.rb b/spec/lib/gitlab/verify/lfs_objects_spec.rb index 64f3a9660e0..0f890e2c7ce 100644 --- a/spec/lib/gitlab/verify/lfs_objects_spec.rb +++ b/spec/lib/gitlab/verify/lfs_objects_spec.rb @@ -31,5 +31,21 @@ describe Gitlab::Verify::LfsObjects do expect(failures.keys).to contain_exactly(lfs_object) expect(failure.to_s).to include('Checksum mismatch') end + + context 'with remote files' do + before do + stub_lfs_object_storage + end + + it 'skips LFS objects in object storage' do + local_failure = create(:lfs_object) + create(:lfs_object, :object_storage) + + failures = {} + described_class.new(batch_size: 10).run_batches { |_, failed| failures.merge!(failed) } + + expect(failures.keys).to contain_exactly(local_failure) + end + end end end diff --git a/spec/lib/gitlab/verify/uploads_spec.rb b/spec/lib/gitlab/verify/uploads_spec.rb index 6146ce61226..85768308edc 100644 --- a/spec/lib/gitlab/verify/uploads_spec.rb +++ b/spec/lib/gitlab/verify/uploads_spec.rb @@ -40,5 +40,21 @@ describe Gitlab::Verify::Uploads do expect(failures.keys).to contain_exactly(upload) expect(failure.to_s).to include('Checksum missing') end + + context 'with remote files' do + before do + stub_uploads_object_storage(AvatarUploader) + end + + it 'skips uploads in object storage' do + local_failure = create(:upload) + create(:upload, :object_storage) + + failures = {} + described_class.new(batch_size: 10).run_batches { |_, failed| failures.merge!(failed) } + + expect(failures.keys).to contain_exactly(local_failure) + end + end end end diff --git a/spec/lib/gitlab/view/presenter/base_spec.rb b/spec/lib/gitlab/view/presenter/base_spec.rb index 32a946ca034..4eca53032a2 100644 --- a/spec/lib/gitlab/view/presenter/base_spec.rb +++ b/spec/lib/gitlab/view/presenter/base_spec.rb @@ -48,4 +48,11 @@ describe Gitlab::View::Presenter::Base do end end end + + describe '#present' do + it 'returns self' do + presenter = presenter_class.new(build_stubbed(:project)) + expect(presenter.present).to eq(presenter) + end + end end diff --git a/spec/lib/gitlab/wiki/committer_with_hooks_spec.rb b/spec/lib/gitlab/wiki/committer_with_hooks_spec.rb new file mode 100644 index 00000000000..830fb8a8598 --- /dev/null +++ b/spec/lib/gitlab/wiki/committer_with_hooks_spec.rb @@ -0,0 +1,154 @@ +require 'spec_helper' + +describe Gitlab::Wiki::CommitterWithHooks, seed_helper: true do + shared_examples 'calling wiki hooks' do + let(:project) { create(:project) } + let(:user) { project.owner } + let(:project_wiki) { ProjectWiki.new(project, user) } + let(:wiki) { project_wiki.wiki } + let(:options) do + { + id: user.id, + username: user.username, + name: user.name, + email: user.email, + message: 'commit message' + } + end + + subject { described_class.new(wiki, options) } + + before do + project_wiki.create_page('home', 'test content') + end + + shared_examples 'failing pre-receive hook' do + before do + expect_any_instance_of(Gitlab::Git::HooksService).to receive(:run_hook).with('pre-receive').and_return([false, '']) + expect_any_instance_of(Gitlab::Git::HooksService).not_to receive(:run_hook).with('update') + expect_any_instance_of(Gitlab::Git::HooksService).not_to receive(:run_hook).with('post-receive') + end + + it 'raises exception' do + expect { subject.commit }.to raise_error(Gitlab::Git::Wiki::OperationError) + end + + it 'does not create a new commit inside the repository' do + current_rev = find_current_rev + + expect { subject.commit }.to raise_error(Gitlab::Git::Wiki::OperationError) + + expect(current_rev).to eq find_current_rev + end + end + + shared_examples 'failing update hook' do + before do + expect_any_instance_of(Gitlab::Git::HooksService).to receive(:run_hook).with('pre-receive').and_return([true, '']) + expect_any_instance_of(Gitlab::Git::HooksService).to receive(:run_hook).with('update').and_return([false, '']) + expect_any_instance_of(Gitlab::Git::HooksService).not_to receive(:run_hook).with('post-receive') + end + + it 'raises exception' do + expect { subject.commit }.to raise_error(Gitlab::Git::Wiki::OperationError) + end + + it 'does not create a new commit inside the repository' do + current_rev = find_current_rev + + expect { subject.commit }.to raise_error(Gitlab::Git::Wiki::OperationError) + + expect(current_rev).to eq find_current_rev + end + end + + shared_examples 'failing post-receive hook' do + before do + expect_any_instance_of(Gitlab::Git::HooksService).to receive(:run_hook).with('pre-receive').and_return([true, '']) + expect_any_instance_of(Gitlab::Git::HooksService).to receive(:run_hook).with('update').and_return([true, '']) + expect_any_instance_of(Gitlab::Git::HooksService).to receive(:run_hook).with('post-receive').and_return([false, '']) + end + + it 'does not raise exception' do + expect { subject.commit }.not_to raise_error + end + + it 'creates the commit' do + current_rev = find_current_rev + + subject.commit + + expect(current_rev).not_to eq find_current_rev + end + end + + shared_examples 'when hooks call succceeds' do + let(:hook) { double(:hook) } + + it 'calls the three hooks' do + expect(Gitlab::Git::Hook).to receive(:new).exactly(3).times.and_return(hook) + expect(hook).to receive(:trigger).exactly(3).times.and_return([true, nil]) + + subject.commit + end + + it 'creates the commit' do + current_rev = find_current_rev + + subject.commit + + expect(current_rev).not_to eq find_current_rev + end + end + + context 'when creating a page' do + before do + project_wiki.create_page('index', 'test content') + end + + it_behaves_like 'failing pre-receive hook' + it_behaves_like 'failing update hook' + it_behaves_like 'failing post-receive hook' + it_behaves_like 'when hooks call succceeds' + end + + context 'when updating a page' do + before do + project_wiki.update_page(find_page('home'), content: 'some other content', format: :markdown) + end + + it_behaves_like 'failing pre-receive hook' + it_behaves_like 'failing update hook' + it_behaves_like 'failing post-receive hook' + it_behaves_like 'when hooks call succceeds' + end + + context 'when deleting a page' do + before do + project_wiki.delete_page(find_page('home')) + end + + it_behaves_like 'failing pre-receive hook' + it_behaves_like 'failing update hook' + it_behaves_like 'failing post-receive hook' + it_behaves_like 'when hooks call succceeds' + end + + def find_current_rev + wiki.gollum_wiki.repo.commits.first&.sha + end + + def find_page(name) + wiki.page(title: name) + end + end + + # TODO: Uncomment once Gitaly updates the ruby vendor code + # context 'when Gitaly is enabled' do + # it_behaves_like 'calling wiki hooks' + # end + + context 'when Gitaly is disabled', :skip_gitaly_mock do + it_behaves_like 'calling wiki hooks' + end +end diff --git a/spec/lib/gitlab/workhorse_spec.rb b/spec/lib/gitlab/workhorse_spec.rb index 37a0bf1ad36..e732b089d44 100644 --- a/spec/lib/gitlab/workhorse_spec.rb +++ b/spec/lib/gitlab/workhorse_spec.rb @@ -16,7 +16,7 @@ describe Gitlab::Workhorse do let(:ref) { 'master' } let(:format) { 'zip' } let(:storage_path) { Gitlab.config.gitlab.repository_downloads_path } - let(:base_params) { repository.archive_metadata(ref, storage_path, format) } + let(:base_params) { repository.archive_metadata(ref, storage_path, format, append_sha: nil) } let(:gitaly_params) do base_params.merge( 'GitalyServer' => { @@ -29,7 +29,7 @@ describe Gitlab::Workhorse do let(:cache_disabled) { false } subject do - described_class.send_git_archive(repository, ref: ref, format: format) + described_class.send_git_archive(repository, ref: ref, format: format, append_sha: nil) end before do @@ -55,7 +55,7 @@ describe Gitlab::Workhorse do end end - context 'when Gitaly workhorse_archive feature is disabled', :skip_gitaly_mock do + context 'when Gitaly workhorse_archive feature is disabled', :disable_gitaly do it 'sets the header correctly' do key, command, params = decode_workhorse_header(subject) @@ -100,7 +100,7 @@ describe Gitlab::Workhorse do end end - context 'when Gitaly workhorse_send_git_patch feature is disabled', :skip_gitaly_mock do + context 'when Gitaly workhorse_send_git_patch feature is disabled', :disable_gitaly do it 'sets the header correctly' do key, command, params = decode_workhorse_header(subject) @@ -173,7 +173,7 @@ describe Gitlab::Workhorse do end end - context 'when Gitaly workhorse_send_git_diff feature is disabled', :skip_gitaly_mock do + context 'when Gitaly workhorse_send_git_diff feature is disabled', :disable_gitaly do it 'sets the header correctly' do key, command, params = decode_workhorse_header(subject) @@ -275,7 +275,7 @@ describe Gitlab::Workhorse do describe '.git_http_ok' do let(:user) { create(:user) } - let(:repo_path) { repository.path_to_repo } + let(:repo_path) { 'ignored but not allowed to be empty in gitlab-workhorse' } let(:action) { 'info_refs' } let(:params) do { @@ -455,7 +455,7 @@ describe Gitlab::Workhorse do end end - context 'when Gitaly workhorse_raw_show feature is disabled', :skip_gitaly_mock do + context 'when Gitaly workhorse_raw_show feature is disabled', :disable_gitaly do it 'sets the header correctly' do key, command, params = decode_workhorse_header(subject) @@ -482,4 +482,26 @@ describe Gitlab::Workhorse do }.deep_stringify_keys) end end + + describe '.send_git_snapshot' do + let(:url) { 'http://example.com' } + + subject(:request) { described_class.send_git_snapshot(repository) } + + it 'sets the header correctly' do + key, command, params = decode_workhorse_header(request) + + expect(key).to eq("Gitlab-Workhorse-Send-Data") + expect(command).to eq('git-snapshot') + expect(params).to eq( + 'GitalyServer' => { + 'address' => Gitlab::GitalyClient.address(project.repository_storage), + 'token' => Gitlab::GitalyClient.token(project.repository_storage) + }, + 'GetSnapshotRequest' => Gitaly::GetSnapshotRequest.new( + repository: repository.gitaly_repository + ).to_json + ) + end + end end diff --git a/spec/lib/gitlab_spec.rb b/spec/lib/gitlab_spec.rb index f97136f0191..bd443a5d9e7 100644 --- a/spec/lib/gitlab_spec.rb +++ b/spec/lib/gitlab_spec.rb @@ -14,6 +14,12 @@ describe Gitlab do expect(described_class.com?).to eq true end + it 'is true when on other gitlab subdomain' do + stub_config_setting(url: 'https://example.gitlab.com') + + expect(described_class.com?).to eq true + end + it 'is false when not on GitLab.com' do stub_config_setting(url: 'http://example.com') diff --git a/spec/lib/mattermost/command_spec.rb b/spec/lib/mattermost/command_spec.rb index 369e7b181b9..8ba15ae0f38 100644 --- a/spec/lib/mattermost/command_spec.rb +++ b/spec/lib/mattermost/command_spec.rb @@ -4,10 +4,11 @@ describe Mattermost::Command do let(:params) { { 'token' => 'token', team_id: 'abc' } } before do - Mattermost::Session.base_uri('http://mattermost.example.com') + session = Mattermost::Session.new(nil) + session.base_uri = 'http://mattermost.example.com' allow_any_instance_of(Mattermost::Client).to receive(:with_session) - .and_yield(Mattermost::Session.new(nil)) + .and_yield(session) end describe '#create' do diff --git a/spec/lib/mattermost/session_spec.rb b/spec/lib/mattermost/session_spec.rb index 3db19d06305..c855643c4d8 100644 --- a/spec/lib/mattermost/session_spec.rb +++ b/spec/lib/mattermost/session_spec.rb @@ -15,7 +15,7 @@ describe Mattermost::Session, type: :request do it { is_expected.to respond_to(:strategy) } before do - described_class.base_uri(mattermost_url) + subject.base_uri = mattermost_url end describe '#with session' do diff --git a/spec/lib/mattermost/team_spec.rb b/spec/lib/mattermost/team_spec.rb index 3c8206031cf..2cfa6802612 100644 --- a/spec/lib/mattermost/team_spec.rb +++ b/spec/lib/mattermost/team_spec.rb @@ -2,10 +2,11 @@ require 'spec_helper' describe Mattermost::Team do before do - Mattermost::Session.base_uri('http://mattermost.example.com') + session = Mattermost::Session.new(nil) + session.base_uri = 'http://mattermost.example.com' allow_any_instance_of(Mattermost::Client).to receive(:with_session) - .and_yield(Mattermost::Session.new(nil)) + .and_yield(session) end describe '#all' do diff --git a/spec/lib/rspec_flaky/config_spec.rb b/spec/lib/rspec_flaky/config_spec.rb index 83556787e85..4a71b1feebd 100644 --- a/spec/lib/rspec_flaky/config_spec.rb +++ b/spec/lib/rspec_flaky/config_spec.rb @@ -16,23 +16,25 @@ describe RspecFlaky::Config, :aggregate_failures do end end - context "when ENV['FLAKY_RSPEC_GENERATE_REPORT'] is set to 'false'" do - before do - stub_env('FLAKY_RSPEC_GENERATE_REPORT', 'false') - end - - it 'returns false' do - expect(described_class).not_to be_generate_report + context "when ENV['FLAKY_RSPEC_GENERATE_REPORT'] is set" do + using RSpec::Parameterized::TableSyntax + + where(:env_value, :result) do + '1' | true + 'true' | true + 'foo' | false + '0' | false + 'false' | false end - end - context "when ENV['FLAKY_RSPEC_GENERATE_REPORT'] is set to 'true'" do - before do - stub_env('FLAKY_RSPEC_GENERATE_REPORT', 'true') - end + with_them do + before do + stub_env('FLAKY_RSPEC_GENERATE_REPORT', env_value) + end - it 'returns true' do - expect(described_class).to be_generate_report + it 'returns false' do + expect(described_class.generate_report?).to be(result) + end end end end diff --git a/spec/lib/rspec_flaky/flaky_examples_collection_spec.rb b/spec/lib/rspec_flaky/flaky_examples_collection_spec.rb index 06a8ba0d02e..6731a27ed17 100644 --- a/spec/lib/rspec_flaky/flaky_examples_collection_spec.rb +++ b/spec/lib/rspec_flaky/flaky_examples_collection_spec.rb @@ -24,14 +24,6 @@ describe RspecFlaky::FlakyExamplesCollection, :aggregate_failures do } end - describe '.from_json' do - it 'accepts a JSON' do - collection = described_class.from_json(JSON.pretty_generate(collection_hash)) - - expect(collection.to_report).to eq(described_class.new(collection_hash).to_report) - end - end - describe '#initialize' do it 'accepts no argument' do expect { described_class.new }.not_to raise_error @@ -46,11 +38,11 @@ describe RspecFlaky::FlakyExamplesCollection, :aggregate_failures do end end - describe '#to_report' do + describe '#to_h' do it 'calls #to_h on the values' do collection = described_class.new(collection_hash) - expect(collection.to_report).to eq(collection_report) + expect(collection.to_h).to eq(collection_report) end end @@ -61,7 +53,7 @@ describe RspecFlaky::FlakyExamplesCollection, :aggregate_failures do a: { example_id: 'spec/foo/bar_spec.rb:2' }, c: { example_id: 'spec/bar/baz_spec.rb:4' }) - expect((collection2 - collection1).to_report).to eq( + expect((collection2 - collection1).to_h).to eq( c: { example_id: 'spec/bar/baz_spec.rb:4', first_flaky_at: nil, diff --git a/spec/lib/rspec_flaky/listener_spec.rb b/spec/lib/rspec_flaky/listener_spec.rb index bfb7648b486..ef085445081 100644 --- a/spec/lib/rspec_flaky/listener_spec.rb +++ b/spec/lib/rspec_flaky/listener_spec.rb @@ -4,7 +4,7 @@ describe RspecFlaky::Listener, :aggregate_failures do let(:already_flaky_example_uid) { '6e869794f4cfd2badd93eb68719371d1' } let(:suite_flaky_example_report) do { - already_flaky_example_uid => { + "#{already_flaky_example_uid}": { example_id: 'spec/foo/bar_spec.rb:2', file: 'spec/foo/bar_spec.rb', line: 2, @@ -55,8 +55,7 @@ describe RspecFlaky::Listener, :aggregate_failures do it 'returns a valid Listener instance' do listener = described_class.new - expect(listener.to_report(listener.suite_flaky_examples)) - .to eq(expected_suite_flaky_examples) + expect(listener.suite_flaky_examples.to_h).to eq(expected_suite_flaky_examples) expect(listener.flaky_examples).to eq({}) end end @@ -65,25 +64,35 @@ describe RspecFlaky::Listener, :aggregate_failures do it_behaves_like 'a valid Listener instance' end - context 'when a report file exists and set by SUITE_FLAKY_RSPEC_REPORT_PATH' do - let(:report_file) do - Tempfile.new(%w[rspec_flaky_report .json]).tap do |f| - f.write(JSON.pretty_generate(suite_flaky_example_report)) - f.rewind - end - end + context 'when SUITE_FLAKY_RSPEC_REPORT_PATH is set' do + let(:report_file_path) { 'foo/report.json' } before do - stub_env('SUITE_FLAKY_RSPEC_REPORT_PATH', report_file.path) + stub_env('SUITE_FLAKY_RSPEC_REPORT_PATH', report_file_path) end - after do - report_file.close - report_file.unlink + context 'and report file exists' do + before do + expect(File).to receive(:exist?).with(report_file_path).and_return(true) + end + + it 'delegates the load to RspecFlaky::Report' do + report = RspecFlaky::Report.new(RspecFlaky::FlakyExamplesCollection.new(suite_flaky_example_report)) + + expect(RspecFlaky::Report).to receive(:load).with(report_file_path).and_return(report) + expect(described_class.new.suite_flaky_examples.to_h).to eq(report.flaky_examples.to_h) + end end - it_behaves_like 'a valid Listener instance' do - let(:expected_suite_flaky_examples) { suite_flaky_example_report } + context 'and report file does not exist' do + before do + expect(File).to receive(:exist?).with(report_file_path).and_return(false) + end + + it 'return an empty hash' do + expect(RspecFlaky::Report).not_to receive(:load) + expect(described_class.new.suite_flaky_examples.to_h).to eq({}) + end end end end @@ -186,74 +195,21 @@ describe RspecFlaky::Listener, :aggregate_failures do let(:notification_already_flaky_rspec_example) { double(example: already_flaky_rspec_example) } context 'when a report file path is set by FLAKY_RSPEC_REPORT_PATH' do - let(:report_file_path) { Rails.root.join('tmp', 'rspec_flaky_report.json') } - let(:new_report_file_path) { Rails.root.join('tmp', 'rspec_flaky_new_report.json') } + it 'delegates the writes to RspecFlaky::Report' do + listener.example_passed(notification_new_flaky_rspec_example) + listener.example_passed(notification_already_flaky_rspec_example) - before do - stub_env('FLAKY_RSPEC_REPORT_PATH', report_file_path) - stub_env('NEW_FLAKY_RSPEC_REPORT_PATH', new_report_file_path) - FileUtils.rm(report_file_path) if File.exist?(report_file_path) - FileUtils.rm(new_report_file_path) if File.exist?(new_report_file_path) - end + report1 = double + report2 = double - after do - FileUtils.rm(report_file_path) if File.exist?(report_file_path) - FileUtils.rm(new_report_file_path) if File.exist?(new_report_file_path) - end + expect(RspecFlaky::Report).to receive(:new).with(listener.flaky_examples).and_return(report1) + expect(report1).to receive(:write).with(RspecFlaky::Config.flaky_examples_report_path) - context 'when FLAKY_RSPEC_GENERATE_REPORT == "false"' do - before do - stub_env('FLAKY_RSPEC_GENERATE_REPORT', 'false') - end - - it 'does not write any report file' do - listener.example_passed(notification_new_flaky_rspec_example) + expect(RspecFlaky::Report).to receive(:new).with(listener.flaky_examples - listener.suite_flaky_examples).and_return(report2) + expect(report2).to receive(:write).with(RspecFlaky::Config.new_flaky_examples_report_path) - listener.dump_summary(nil) - - expect(File.exist?(report_file_path)).to be(false) - expect(File.exist?(new_report_file_path)).to be(false) - end + listener.dump_summary(nil) end - - context 'when FLAKY_RSPEC_GENERATE_REPORT == "true"' do - before do - stub_env('FLAKY_RSPEC_GENERATE_REPORT', 'true') - end - - around do |example| - Timecop.freeze { example.run } - end - - it 'writes the report files' do - listener.example_passed(notification_new_flaky_rspec_example) - listener.example_passed(notification_already_flaky_rspec_example) - - listener.dump_summary(nil) - - expect(File.exist?(report_file_path)).to be(true) - expect(File.exist?(new_report_file_path)).to be(true) - - expect(File.read(report_file_path)) - .to eq(JSON.pretty_generate(listener.to_report(listener.flaky_examples))) - - new_example = RspecFlaky::Example.new(notification_new_flaky_rspec_example) - new_flaky_example = RspecFlaky::FlakyExample.new(new_example) - new_flaky_example.update_flakiness! - - expect(File.read(new_report_file_path)) - .to eq(JSON.pretty_generate(listener.to_report(new_example.uid => new_flaky_example))) - end - end - end - end - - describe '#to_report' do - let(:listener) { described_class.new(suite_flaky_example_report.to_json) } - - it 'transforms the internal hash to a JSON-ready hash' do - expect(listener.to_report(already_flaky_example_uid => already_flaky_example)) - .to match(hash_including(suite_flaky_example_report)) end end end diff --git a/spec/lib/rspec_flaky/report_spec.rb b/spec/lib/rspec_flaky/report_spec.rb new file mode 100644 index 00000000000..7d57d99f7e5 --- /dev/null +++ b/spec/lib/rspec_flaky/report_spec.rb @@ -0,0 +1,125 @@ +require 'spec_helper' + +describe RspecFlaky::Report, :aggregate_failures do + let(:a_hundred_days) { 3600 * 24 * 100 } + let(:collection_hash) do + { + a: { example_id: 'spec/foo/bar_spec.rb:2' }, + b: { example_id: 'spec/foo/baz_spec.rb:3', first_flaky_at: (Time.now - a_hundred_days).to_s, last_flaky_at: (Time.now - a_hundred_days).to_s } + } + end + let(:suite_flaky_example_report) do + { + '6e869794f4cfd2badd93eb68719371d1': { + example_id: 'spec/foo/bar_spec.rb:2', + file: 'spec/foo/bar_spec.rb', + line: 2, + description: 'hello world', + first_flaky_at: 1234, + last_flaky_at: 4321, + last_attempts_count: 3, + flaky_reports: 1, + last_flaky_job: nil + } + } + end + let(:flaky_examples) { RspecFlaky::FlakyExamplesCollection.new(collection_hash) } + let(:report) { described_class.new(flaky_examples) } + + describe '.load' do + let!(:report_file) do + Tempfile.new(%w[rspec_flaky_report .json]).tap do |f| + f.write(JSON.pretty_generate(suite_flaky_example_report)) + f.rewind + end + end + + after do + report_file.close + report_file.unlink + end + + it 'loads the report file' do + expect(described_class.load(report_file.path).flaky_examples.to_h).to eq(suite_flaky_example_report) + end + end + + describe '.load_json' do + let(:report_json) do + JSON.pretty_generate(suite_flaky_example_report) + end + + it 'loads the report file' do + expect(described_class.load_json(report_json).flaky_examples.to_h).to eq(suite_flaky_example_report) + end + end + + describe '#initialize' do + it 'accepts a RspecFlaky::FlakyExamplesCollection' do + expect { report }.not_to raise_error + end + + it 'does not accept anything else' do + expect { described_class.new([1, 2, 3]) }.to raise_error(ArgumentError, "`flaky_examples` must be a RspecFlaky::FlakyExamplesCollection, Array given!") + end + end + + it 'delegates to #flaky_examples using SimpleDelegator' do + expect(report.__getobj__).to eq(flaky_examples) + end + + describe '#write' do + let(:report_file_path) { Rails.root.join('tmp', 'rspec_flaky_report.json') } + + before do + FileUtils.rm(report_file_path) if File.exist?(report_file_path) + end + + after do + FileUtils.rm(report_file_path) if File.exist?(report_file_path) + end + + context 'when RspecFlaky::Config.generate_report? is false' do + before do + allow(RspecFlaky::Config).to receive(:generate_report?).and_return(false) + end + + it 'does not write any report file' do + report.write(report_file_path) + + expect(File.exist?(report_file_path)).to be(false) + end + end + + context 'when RspecFlaky::Config.generate_report? is true' do + before do + allow(RspecFlaky::Config).to receive(:generate_report?).and_return(true) + end + + it 'delegates the writes to RspecFlaky::Report' do + report.write(report_file_path) + + expect(File.exist?(report_file_path)).to be(true) + expect(File.read(report_file_path)) + .to eq(JSON.pretty_generate(report.flaky_examples.to_h)) + end + end + end + + describe '#prune_outdated' do + it 'returns a new collection without the examples older than 90 days by default' do + new_report = flaky_examples.to_h.dup.tap { |r| r.delete(:b) } + new_flaky_examples = report.prune_outdated + + expect(new_flaky_examples).to be_a(described_class) + expect(new_flaky_examples.to_h).to eq(new_report) + expect(flaky_examples).to have_key(:b) + end + + it 'accepts a given number of days' do + new_flaky_examples = report.prune_outdated(days: 200) + + expect(new_flaky_examples.to_h).to eq(report.to_h) + end + end +end diff --git a/spec/lib/uploaded_file_spec.rb b/spec/lib/uploaded_file_spec.rb new file mode 100644 index 00000000000..cc99e7e8911 --- /dev/null +++ b/spec/lib/uploaded_file_spec.rb @@ -0,0 +1,116 @@ +require 'spec_helper' + +describe UploadedFile do + describe ".from_params" do + let(:temp_dir) { Dir.tmpdir } + let(:temp_file) { Tempfile.new("test", temp_dir) } + let(:upload_path) { nil } + + subject do + described_class.from_params(params, :file, upload_path) + end + + before do + FileUtils.touch(temp_file) + end + + after do + FileUtils.rm_f(temp_file) + FileUtils.rm_r(upload_path) if upload_path + end + + context 'when valid file is specified' do + context 'only local path is specified' do + let(:params) do + { 'file.path' => temp_file.path } + end + + it "succeeds" do + is_expected.not_to be_nil + end + + it "generates filename from path" do + expect(subject.original_filename).to eq(::File.basename(temp_file.path)) + end + end + + context 'all parameters are specified' do + let(:params) do + { 'file.path' => temp_file.path, + 'file.name' => 'my_file.txt', + 'file.type' => 'my/type', + 'file.sha256' => 'sha256', + 'file.remote_id' => 'remote_id' } + end + + it "succeeds" do + is_expected.not_to be_nil + end + + it "generates filename from path" do + expect(subject.original_filename).to eq('my_file.txt') + expect(subject.content_type).to eq('my/type') + expect(subject.sha256).to eq('sha256') + expect(subject.remote_id).to eq('remote_id') + end + end + end + + context 'when no params are specified' do + let(:params) do + {} + end + + it "does not return an object" do + is_expected.to be_nil + end + end + + context 'when only remote id is specified' do + let(:params) do + { 'file.remote_id' => 'remote_id' } + end + + it "raises an error" do + expect { subject }.to raise_error(UploadedFile::InvalidPathError, /file is invalid/) + end + end + + context 'when verifying allowed paths' do + let(:params) do + { 'file.path' => temp_file.path } + end + + context 'when file is stored in system temporary folder' do + let(:temp_dir) { Dir.tmpdir } + + it "succeeds" do + is_expected.not_to be_nil + end + end + + context 'when file is stored in user provided upload path' do + let(:upload_path) { Dir.mktmpdir } + let(:temp_dir) { upload_path } + + it "succeeds" do + is_expected.not_to be_nil + end + end + + context 'when file is stored outside of user provided upload path' do + let!(:generated_dir) { Dir.mktmpdir } + let!(:temp_dir) { Dir.mktmpdir } + + before do + # We overwrite default temporary path + allow(Dir).to receive(:tmpdir).and_return(generated_dir) + end + + it "raises an error" do + expect { subject }.to raise_error(UploadedFile::InvalidPathError, /insecure path used/) + end + end + end + end +end diff --git a/spec/mailers/notify_spec.rb b/spec/mailers/notify_spec.rb index 83c33797bbc..43e419cd7de 100644 --- a/spec/mailers/notify_spec.rb +++ b/spec/mailers/notify_spec.rb @@ -389,6 +389,48 @@ describe Notify do end end end + + shared_examples 'a push to an existing merge request' do + let(:push_user) { create(:user) } + + subject do + described_class.push_to_merge_request_email(recipient.id, merge_request.id, push_user.id, new_commits: merge_request.commits, existing_commits: existing_commits) + end + + it_behaves_like 'a multiple recipients email' + it_behaves_like 'an answer to an existing thread with reply-by-email enabled' do + let(:model) { merge_request } + end + it_behaves_like 'it should show Gmail Actions View Merge request link' + it_behaves_like 'an unsubscribeable thread' + + it 'is sent as the push user' do + sender = subject.header[:from].addrs[0] + + expect(sender.display_name).to eq(push_user.name) + expect(sender.address).to eq(gitlab_sender) + end + + it 'has the correct subject and body' do + aggregate_failures do + is_expected.to have_referable_subject(merge_request, reply: true) + is_expected.to have_body_text("#{push_user.name} pushed new commits") + is_expected.to have_body_text(project_merge_request_path(project, merge_request)) + end + end + end + + describe 'that have new commits' do + let(:existing_commits) { [] } + + it_behaves_like 'a push to an existing merge request' + end + + describe 'that have new commits on top of an existing one' do + let(:existing_commits) { [merge_request.commits.first] } + + it_behaves_like 'a push to an existing merge request' + end end context 'for issue notes' do diff --git a/spec/mailers/previews/email_rejection_mailer_preview.rb b/spec/mailers/previews/email_rejection_mailer_preview.rb new file mode 100644 index 00000000000..639e8471232 --- /dev/null +++ b/spec/mailers/previews/email_rejection_mailer_preview.rb @@ -0,0 +1,5 @@ +class EmailRejectionMailerPreview < ActionMailer::Preview + def rejection + EmailRejectionMailer.rejection("some rejection reason", "From: someone@example.com\nraw email here").message + end +end diff --git a/spec/mailers/previews/notify_preview.rb b/spec/mailers/previews/notify_preview.rb index 580f0d56a92..e32fd0bd120 100644 --- a/spec/mailers/previews/notify_preview.rb +++ b/spec/mailers/previews/notify_preview.rb @@ -58,14 +58,87 @@ class NotifyPreview < ActionMailer::Preview end end + def closed_issue_email + Notify.closed_issue_email(user.id, issue.id, user.id).message + end + + def issue_status_changed_email + Notify.issue_status_changed_email(user.id, issue.id, 'closed', user.id).message + end + + def closed_merge_request_email + Notify.closed_merge_request_email(user.id, issue.id, user.id).message + end + + def merge_request_status_email + Notify.merge_request_status_email(user.id, merge_request.id, 'closed', user.id).message + end + + def merged_merge_request_email + Notify.merged_merge_request_email(user.id, merge_request.id, user.id).message + end + + def member_access_denied_email + Notify.member_access_denied_email('project', project.id, user.id).message + end + + def member_access_granted_email + Notify.member_access_granted_email('project', user.id).message + end + + def member_access_requested_email + Notify.member_access_requested_email('group', user.id, 'some@example.com').message + end + + def member_invite_accepted_email + Notify.member_invite_accepted_email('project', user.id).message + end + + def member_invite_declined_email + Notify.member_invite_declined_email( + 'project', + project.id, + 'invite@example.com', + user.id + ).message + end + + def member_invited_email + Notify.member_invited_email('project', user.id, '1234').message + end + + def pages_domain_enabled_email + cleanup do + pages_domain = PagesDomain.new(domain: 'my.example.com', project: project, verified_at: Time.now, enabled_until: 1.week.from_now) + + Notify.pages_domain_enabled_email(pages_domain, user).message + end + end + + def pipeline_success_email + Notify.pipeline_success_email(pipeline, pipeline.user.try(:email)) + end + + def pipeline_failed_email + Notify.pipeline_failed_email(pipeline, pipeline.user.try(:email)) + end + private def project @project ||= Project.find_by_full_path('gitlab-org/gitlab-test') end + def issue + @merge_request ||= project.issues.first + end + def merge_request - @merge_request ||= project.merge_requests.find_by(source_branch: 'master', target_branch: 'feature') + @merge_request ||= project.merge_requests.first + end + + def pipeline + @pipeline = Ci::Pipeline.last end def user @@ -94,14 +167,4 @@ class NotifyPreview < ActionMailer::Preview email end - - def pipeline_success_email - pipeline = Ci::Pipeline.last - Notify.pipeline_success_email(pipeline, pipeline.user.try(:email)) - end - - def pipeline_failed_email - pipeline = Ci::Pipeline.last - Notify.pipeline_failed_email(pipeline, pipeline.user.try(:email)) - end end diff --git a/spec/mailers/previews/repository_check_mailer_preview.rb b/spec/mailers/previews/repository_check_mailer_preview.rb new file mode 100644 index 00000000000..19d4eab1805 --- /dev/null +++ b/spec/mailers/previews/repository_check_mailer_preview.rb @@ -0,0 +1,5 @@ +class RepositoryCheckMailerPreview < ActionMailer::Preview + def notify + RepositoryCheckMailer.notify(3).message + end +end diff --git a/spec/migrations/active_record/schedule_set_confidential_note_events_on_services_spec.rb b/spec/migrations/active_record/schedule_set_confidential_note_events_on_services_spec.rb new file mode 100644 index 00000000000..4395e2f8264 --- /dev/null +++ b/spec/migrations/active_record/schedule_set_confidential_note_events_on_services_spec.rb @@ -0,0 +1,44 @@ +require 'spec_helper' +require Rails.root.join('db', 'post_migrate', '20180122154930_schedule_set_confidential_note_events_on_services.rb') + +describe ScheduleSetConfidentialNoteEventsOnServices, :migration, :sidekiq do + let(:services_table) { table(:services) } + let(:migration_class) { Gitlab::BackgroundMigration::SetConfidentialNoteEventsOnServices } + let(:migration_name) { migration_class.to_s.demodulize } + + let!(:service_1) { services_table.create!(confidential_note_events: nil, note_events: true) } + let!(:service_2) { services_table.create!(confidential_note_events: nil, note_events: true) } + let!(:service_migrated) { services_table.create!(confidential_note_events: true, note_events: true) } + let!(:service_skip) { services_table.create!(confidential_note_events: nil, note_events: false) } + let!(:service_new) { services_table.create!(confidential_note_events: false, note_events: true) } + let!(:service_4) { services_table.create!(confidential_note_events: nil, note_events: true) } + + before do + stub_const("#{described_class}::BATCH_SIZE", 1) + end + + it 'schedules background migrations at correct time' do + Sidekiq::Testing.fake! do + Timecop.freeze do + migrate! + + expect(migration_name).to be_scheduled_delayed_migration(20.minutes, service_1.id, service_1.id) + expect(migration_name).to be_scheduled_delayed_migration(40.minutes, service_2.id, service_2.id) + expect(migration_name).to be_scheduled_delayed_migration(60.minutes, service_4.id, service_4.id) + expect(BackgroundMigrationWorker.jobs.size).to eq 3 + end + end + end + + it 'correctly processes services' do + Sidekiq::Testing.inline! do + expect(services_table.where(confidential_note_events: nil).count).to eq 4 + expect(services_table.where(confidential_note_events: true).count).to eq 1 + + migrate! + + expect(services_table.where(confidential_note_events: nil).count).to eq 1 + expect(services_table.where(confidential_note_events: true).count).to eq 4 + end + end +end diff --git a/spec/migrations/add_foreign_keys_to_todos_spec.rb b/spec/migrations/add_foreign_keys_to_todos_spec.rb index 4a22bd6f342..bf2fa5c0f56 100644 --- a/spec/migrations/add_foreign_keys_to_todos_spec.rb +++ b/spec/migrations/add_foreign_keys_to_todos_spec.rb @@ -4,8 +4,8 @@ require Rails.root.join('db', 'migrate', '20180201110056_add_foreign_keys_to_tod describe AddForeignKeysToTodos, :migration do let(:todos) { table(:todos) } - let(:project) { create(:project) } - let(:user) { create(:user) } + let(:project) { create(:project) } # rubocop:disable RSpec/FactoriesInMigrationSpecs + let(:user) { create(:user) } # rubocop:disable RSpec/FactoriesInMigrationSpecs context 'add foreign key on user_id' do let!(:todo_with_user) { create_todo(user_id: user.id) } @@ -34,7 +34,7 @@ describe AddForeignKeysToTodos, :migration do end context 'add foreign key on note_id' do - let(:note) { create(:note) } + let(:note) { create(:note) } # rubocop:disable RSpec/FactoriesInMigrationSpecs let!(:todo_with_note) { create_todo(note_id: note.id) } let!(:todo_with_invalid_note) { create_todo(note_id: 4711) } let!(:todo_without_note) { create_todo(note_id: nil) } diff --git a/spec/migrations/add_head_pipeline_for_each_merge_request_spec.rb b/spec/migrations/add_head_pipeline_for_each_merge_request_spec.rb index 63defcb39bf..d8dd7a2fb83 100644 --- a/spec/migrations/add_head_pipeline_for_each_merge_request_spec.rb +++ b/spec/migrations/add_head_pipeline_for_each_merge_request_spec.rb @@ -6,18 +6,18 @@ describe AddHeadPipelineForEachMergeRequest, :delete do let(:migration) { described_class.new } - let!(:project) { create(:project) } + let!(:project) { create(:project) } # rubocop:disable RSpec/FactoriesInMigrationSpecs let!(:other_project) { fork_project(project) } - let!(:pipeline_1) { create(:ci_pipeline, project: project, ref: "branch_1") } - let!(:pipeline_2) { create(:ci_pipeline, project: other_project, ref: "branch_1") } - let!(:pipeline_3) { create(:ci_pipeline, project: other_project, ref: "branch_1") } - let!(:pipeline_4) { create(:ci_pipeline, project: project, ref: "branch_2") } + let!(:pipeline_1) { create(:ci_pipeline, project: project, ref: "branch_1") } # rubocop:disable RSpec/FactoriesInMigrationSpecs + let!(:pipeline_2) { create(:ci_pipeline, project: other_project, ref: "branch_1") } # rubocop:disable RSpec/FactoriesInMigrationSpecs + let!(:pipeline_3) { create(:ci_pipeline, project: other_project, ref: "branch_1") } # rubocop:disable RSpec/FactoriesInMigrationSpecs + let!(:pipeline_4) { create(:ci_pipeline, project: project, ref: "branch_2") } # rubocop:disable RSpec/FactoriesInMigrationSpecs - let!(:mr_1) { create(:merge_request, source_project: project, target_project: project, source_branch: "branch_1", target_branch: "target_1") } - let!(:mr_2) { create(:merge_request, source_project: other_project, target_project: project, source_branch: "branch_1", target_branch: "target_2") } - let!(:mr_3) { create(:merge_request, source_project: project, target_project: project, source_branch: "branch_2", target_branch: "master") } - let!(:mr_4) { create(:merge_request, source_project: project, target_project: project, source_branch: "branch_3", target_branch: "master") } + let!(:mr_1) { create(:merge_request, source_project: project, target_project: project, source_branch: "branch_1", target_branch: "target_1") } # rubocop:disable RSpec/FactoriesInMigrationSpecs + let!(:mr_2) { create(:merge_request, source_project: other_project, target_project: project, source_branch: "branch_1", target_branch: "target_2") } # rubocop:disable RSpec/FactoriesInMigrationSpecs + let!(:mr_3) { create(:merge_request, source_project: project, target_project: project, source_branch: "branch_2", target_branch: "master") } # rubocop:disable RSpec/FactoriesInMigrationSpecs + let!(:mr_4) { create(:merge_request, source_project: project, target_project: project, source_branch: "branch_3", target_branch: "master") } # rubocop:disable RSpec/FactoriesInMigrationSpecs context "#up" do context "when source_project and source_branch of pipeline are the same of merge request" do diff --git a/spec/migrations/calculate_conv_dev_index_percentages_spec.rb b/spec/migrations/calculate_conv_dev_index_percentages_spec.rb index f3a46025376..19f06810e54 100644 --- a/spec/migrations/calculate_conv_dev_index_percentages_spec.rb +++ b/spec/migrations/calculate_conv_dev_index_percentages_spec.rb @@ -6,7 +6,7 @@ require Rails.root.join('db', 'post_migrate', '20170803090603_calculate_conv_dev describe CalculateConvDevIndexPercentages, :delete do let(:migration) { described_class.new } let!(:conv_dev_index) do - create(:conversational_development_index_metric, + create(:conversational_development_index_metric, # rubocop:disable RSpec/FactoriesInMigrationSpecs leader_notes: 0, instance_milestones: 0, percentage_issues: 0, diff --git a/spec/migrations/cleanup_namespaceless_pending_delete_projects_spec.rb b/spec/migrations/cleanup_namespaceless_pending_delete_projects_spec.rb index 033d0e7584d..b5980cb9ddb 100644 --- a/spec/migrations/cleanup_namespaceless_pending_delete_projects_spec.rb +++ b/spec/migrations/cleanup_namespaceless_pending_delete_projects_spec.rb @@ -10,9 +10,9 @@ describe CleanupNamespacelessPendingDeleteProjects, :migration, schema: 20180222 describe '#up' do it 'only cleans up pending delete projects' do - create(:project) - create(:project, pending_delete: true) - project = build(:project, pending_delete: true, namespace_id: nil) + create(:project) # rubocop:disable RSpec/FactoriesInMigrationSpecs + create(:project, pending_delete: true) # rubocop:disable RSpec/FactoriesInMigrationSpecs + project = build(:project, pending_delete: true, namespace_id: nil) # rubocop:disable RSpec/FactoriesInMigrationSpecs project.save(validate: false) expect(NamespacelessProjectDestroyWorker).to receive(:bulk_perform_async).with([[project.id]]) @@ -21,8 +21,8 @@ describe CleanupNamespacelessPendingDeleteProjects, :migration, schema: 20180222 end it 'does nothing when no pending delete projects without namespace found' do - create(:project) - create(:project, pending_delete: true) + create(:project) # rubocop:disable RSpec/FactoriesInMigrationSpecs + create(:project, pending_delete: true) # rubocop:disable RSpec/FactoriesInMigrationSpecs expect(NamespacelessProjectDestroyWorker).not_to receive(:bulk_perform_async) diff --git a/spec/migrations/cleanup_nonexisting_namespace_pending_delete_projects_spec.rb b/spec/migrations/cleanup_nonexisting_namespace_pending_delete_projects_spec.rb index 7879105a334..8f40ac3e38b 100644 --- a/spec/migrations/cleanup_nonexisting_namespace_pending_delete_projects_spec.rb +++ b/spec/migrations/cleanup_nonexisting_namespace_pending_delete_projects_spec.rb @@ -9,11 +9,11 @@ describe CleanupNonexistingNamespacePendingDeleteProjects do end describe '#up' do - set(:some_project) { create(:project) } + set(:some_project) { create(:project) } # rubocop:disable RSpec/FactoriesInMigrationSpecs it 'only cleans up when namespace does not exist' do - create(:project, pending_delete: true) - project = build(:project, pending_delete: true, namespace: nil, namespace_id: Namespace.maximum(:id).to_i.succ) + create(:project, pending_delete: true) # rubocop:disable RSpec/FactoriesInMigrationSpecs + project = build(:project, pending_delete: true, namespace: nil, namespace_id: Namespace.maximum(:id).to_i.succ) # rubocop:disable RSpec/FactoriesInMigrationSpecs project.save(validate: false) expect(NamespacelessProjectDestroyWorker).to receive(:bulk_perform_async).with([[project.id]]) @@ -22,7 +22,7 @@ describe CleanupNonexistingNamespacePendingDeleteProjects do end it 'does nothing when no pending delete projects without namespace found' do - create(:project, pending_delete: true, namespace: create(:namespace)) + create(:project, pending_delete: true, namespace: create(:namespace)) # rubocop:disable RSpec/FactoriesInMigrationSpecs expect(NamespacelessProjectDestroyWorker).not_to receive(:bulk_perform_async) diff --git a/spec/migrations/issues_moved_to_id_foreign_key_spec.rb b/spec/migrations/issues_moved_to_id_foreign_key_spec.rb index d2eef81f396..dd2b08099f2 100644 --- a/spec/migrations/issues_moved_to_id_foreign_key_spec.rb +++ b/spec/migrations/issues_moved_to_id_foreign_key_spec.rb @@ -5,9 +5,9 @@ require Rails.root.join('db', 'migrate', '20171106151218_issues_moved_to_id_fore # only_mirror_protected_branches column in the projects table to create a # project via FactoryBot. describe IssuesMovedToIdForeignKey, :migration, schema: 20171114150259 do - let!(:issue_first) { create(:issue, moved_to_id: issue_second.id) } - let!(:issue_second) { create(:issue, moved_to_id: issue_third.id) } - let!(:issue_third) { create(:issue) } + let!(:issue_first) { create(:issue, moved_to_id: issue_second.id) } # rubocop:disable RSpec/FactoriesInMigrationSpecs + let!(:issue_second) { create(:issue, moved_to_id: issue_third.id) } # rubocop:disable RSpec/FactoriesInMigrationSpecs + let!(:issue_third) { create(:issue) } # rubocop:disable RSpec/FactoriesInMigrationSpecs subject { described_class.new } diff --git a/spec/migrations/migrate_gcp_clusters_to_new_clusters_architectures_spec.rb b/spec/migrations/migrate_gcp_clusters_to_new_clusters_architectures_spec.rb index c81ec887ded..df009cec25c 100644 --- a/spec/migrations/migrate_gcp_clusters_to_new_clusters_architectures_spec.rb +++ b/spec/migrations/migrate_gcp_clusters_to_new_clusters_architectures_spec.rb @@ -4,8 +4,24 @@ require Rails.root.join('db', 'post_migrate', '20171013104327_migrate_gcp_cluste describe MigrateGcpClustersToNewClustersArchitectures, :migration do let(:projects) { table(:projects) } let(:project) { projects.create } - let(:user) { create(:user) } - let(:service) { create(:kubernetes_service, project_id: project.id) } + let(:users) { table(:users) } + let(:user) { users.create! } + let(:service) { GcpMigrationSpec::KubernetesService.create!(project_id: project.id) } + + module GcpMigrationSpec + class KubernetesService < ActiveRecord::Base + self.table_name = 'services' + + serialize :properties, JSON # rubocop:disable Cop/ActiveRecordSerialize + + default_value_for :active, true + default_value_for :type, 'KubernetesService' + default_value_for :properties, { + api_url: 'https://kubernetes.example.com', + token: 'a' * 40 + } + end + end context 'when cluster is being created' do let(:project_id) { project.id } diff --git a/spec/migrations/migrate_old_artifacts_spec.rb b/spec/migrations/migrate_old_artifacts_spec.rb index 92eb1d9ce86..4187ab149a5 100644 --- a/spec/migrations/migrate_old_artifacts_spec.rb +++ b/spec/migrations/migrate_old_artifacts_spec.rb @@ -16,18 +16,18 @@ describe MigrateOldArtifacts do end context 'with migratable data' do - set(:project1) { create(:project, ci_id: 2) } - set(:project2) { create(:project, ci_id: 3) } - set(:project3) { create(:project) } + set(:project1) { create(:project, ci_id: 2) } # rubocop:disable RSpec/FactoriesInMigrationSpecs + set(:project2) { create(:project, ci_id: 3) } # rubocop:disable RSpec/FactoriesInMigrationSpecs + set(:project3) { create(:project) } # rubocop:disable RSpec/FactoriesInMigrationSpecs - set(:pipeline1) { create(:ci_empty_pipeline, project: project1) } - set(:pipeline2) { create(:ci_empty_pipeline, project: project2) } - set(:pipeline3) { create(:ci_empty_pipeline, project: project3) } + set(:pipeline1) { create(:ci_empty_pipeline, project: project1) } # rubocop:disable RSpec/FactoriesInMigrationSpecs + set(:pipeline2) { create(:ci_empty_pipeline, project: project2) } # rubocop:disable RSpec/FactoriesInMigrationSpecs + set(:pipeline3) { create(:ci_empty_pipeline, project: project3) } # rubocop:disable RSpec/FactoriesInMigrationSpecs - let!(:build_with_legacy_artifacts) { create(:ci_build, pipeline: pipeline1) } - let!(:build_without_artifacts) { create(:ci_build, pipeline: pipeline1) } - let!(:build2) { create(:ci_build, pipeline: pipeline2) } - let!(:build3) { create(:ci_build, pipeline: pipeline3) } + let!(:build_with_legacy_artifacts) { create(:ci_build, pipeline: pipeline1) } # rubocop:disable RSpec/FactoriesInMigrationSpecs + let!(:build_without_artifacts) { create(:ci_build, pipeline: pipeline1) } # rubocop:disable RSpec/FactoriesInMigrationSpecs + let!(:build2) { create(:ci_build, pipeline: pipeline2) } # rubocop:disable RSpec/FactoriesInMigrationSpecs + let!(:build3) { create(:ci_build, pipeline: pipeline3) } # rubocop:disable RSpec/FactoriesInMigrationSpecs before do setup_builds(build2, build3) @@ -66,7 +66,7 @@ describe MigrateOldArtifacts do end it 'all files do have artifacts' do - Ci::Build.with_artifacts do |build| + Ci::Build.with_artifacts_archive do |build| expect(build).to have_artifacts end end diff --git a/spec/migrations/migrate_process_commit_worker_jobs_spec.rb b/spec/migrations/migrate_process_commit_worker_jobs_spec.rb index 657113812bd..4ee1d255fbd 100644 --- a/spec/migrations/migrate_process_commit_worker_jobs_spec.rb +++ b/spec/migrations/migrate_process_commit_worker_jobs_spec.rb @@ -4,8 +4,8 @@ require 'spec_helper' require Rails.root.join('db', 'migrate', '20161124141322_migrate_process_commit_worker_jobs.rb') describe MigrateProcessCommitWorkerJobs do - let(:project) { create(:project, :legacy_storage, :repository) } - let(:user) { create(:user) } + let(:project) { create(:project, :legacy_storage, :repository) } # rubocop:disable RSpec/FactoriesInMigrationSpecs + let(:user) { create(:user) } # rubocop:disable RSpec/FactoriesInMigrationSpecs let(:commit) { project.commit.raw.rugged_commit } describe 'Project' do diff --git a/spec/migrations/migrate_user_activities_to_users_last_activity_on_spec.rb b/spec/migrations/migrate_user_activities_to_users_last_activity_on_spec.rb index a17c9c72bde..99173708190 100644 --- a/spec/migrations/migrate_user_activities_to_users_last_activity_on_spec.rb +++ b/spec/migrations/migrate_user_activities_to_users_last_activity_on_spec.rb @@ -5,8 +5,8 @@ require Rails.root.join('db', 'post_migrate', '20170324160416_migrate_user_activ describe MigrateUserActivitiesToUsersLastActivityOn, :clean_gitlab_redis_shared_state, :delete do let(:migration) { described_class.new } - let!(:user_active_1) { create(:user) } - let!(:user_active_2) { create(:user) } + let!(:user_active_1) { create(:user) } # rubocop:disable RSpec/FactoriesInMigrationSpecs + let!(:user_active_2) { create(:user) } # rubocop:disable RSpec/FactoriesInMigrationSpecs def record_activity(user, time) Gitlab::Redis::SharedState.with do |redis| diff --git a/spec/migrations/migrate_user_project_view_spec.rb b/spec/migrations/migrate_user_project_view_spec.rb index 31d16e17d7b..80468b9d01e 100644 --- a/spec/migrations/migrate_user_project_view_spec.rb +++ b/spec/migrations/migrate_user_project_view_spec.rb @@ -5,7 +5,7 @@ require Rails.root.join('db', 'post_migrate', '20170406142253_migrate_user_proje describe MigrateUserProjectView, :delete do let(:migration) { described_class.new } - let!(:user) { create(:user, project_view: 'readme') } + let!(:user) { create(:user, project_view: 'readme') } # rubocop:disable RSpec/FactoriesInMigrationSpecs describe '#up' do it 'updates project view setting with new value' do diff --git a/spec/migrations/move_personal_snippets_files_spec.rb b/spec/migrations/move_personal_snippets_files_spec.rb index 1a319eccc0d..1f39ad98fb8 100644 --- a/spec/migrations/move_personal_snippets_files_spec.rb +++ b/spec/migrations/move_personal_snippets_files_spec.rb @@ -16,14 +16,14 @@ describe MovePersonalSnippetsFiles do describe "#up" do let(:snippet) do - snippet = create(:personal_snippet) + snippet = create(:personal_snippet) # rubocop:disable RSpec/FactoriesInMigrationSpecs create_upload('picture.jpg', snippet) snippet.update(description: markdown_linking_file('picture.jpg', snippet)) snippet end let(:snippet_with_missing_file) do - snippet = create(:snippet) + snippet = create(:snippet) # rubocop:disable RSpec/FactoriesInMigrationSpecs create_upload('picture.jpg', snippet, create_file: false) snippet.update(description: markdown_linking_file('picture.jpg', snippet)) snippet @@ -62,7 +62,7 @@ describe MovePersonalSnippetsFiles do secret = "secret#{snippet.id}" file_location = "/uploads/-/system/personal_snippet/#{snippet.id}/#{secret}/picture.jpg" markdown = markdown_linking_file('picture.jpg', snippet) - note = create(:note_on_personal_snippet, noteable: snippet, note: "with #{markdown}") + note = create(:note_on_personal_snippet, noteable: snippet, note: "with #{markdown}") # rubocop:disable RSpec/FactoriesInMigrationSpecs migration.up @@ -73,14 +73,14 @@ describe MovePersonalSnippetsFiles do describe "#down" do let(:snippet) do - snippet = create(:personal_snippet) + snippet = create(:personal_snippet) # rubocop:disable RSpec/FactoriesInMigrationSpecs create_upload('picture.jpg', snippet, in_new_path: true) snippet.update(description: markdown_linking_file('picture.jpg', snippet, in_new_path: true)) snippet end let(:snippet_with_missing_file) do - snippet = create(:personal_snippet) + snippet = create(:personal_snippet) # rubocop:disable RSpec/FactoriesInMigrationSpecs create_upload('picture.jpg', snippet, create_file: false, in_new_path: true) snippet.update(description: markdown_linking_file('picture.jpg', snippet, in_new_path: true)) snippet @@ -119,7 +119,7 @@ describe MovePersonalSnippetsFiles do markdown = markdown_linking_file('picture.jpg', snippet, in_new_path: true) secret = "secret#{snippet.id}" file_location = "/uploads/personal_snippet/#{snippet.id}/#{secret}/picture.jpg" - note = create(:note_on_personal_snippet, noteable: snippet, note: "with #{markdown}") + note = create(:note_on_personal_snippet, noteable: snippet, note: "with #{markdown}") # rubocop:disable RSpec/FactoriesInMigrationSpecs migration.down @@ -135,7 +135,7 @@ describe MovePersonalSnippetsFiles do secret = '123456789' filename = 'hello.jpg' - snippet = create(:personal_snippet) + snippet = create(:personal_snippet) # rubocop:disable RSpec/FactoriesInMigrationSpecs path_before = "/uploads/personal_snippet/#{snippet.id}/#{secret}/#{filename}" path_after = "/uploads/system/personal_snippet/#{snippet.id}/#{secret}/#{filename}" @@ -161,7 +161,7 @@ describe MovePersonalSnippetsFiles do FileUtils.touch(absolute_path) end - create(:upload, model: snippet, path: "#{secret}/#{filename}", uploader: PersonalFileUploader) + create(:upload, model: snippet, path: "#{secret}/#{filename}", uploader: PersonalFileUploader) # rubocop:disable RSpec/FactoriesInMigrationSpecs end def markdown_linking_file(filename, snippet, in_new_path: false) diff --git a/spec/migrations/remove_dot_git_from_usernames_spec.rb b/spec/migrations/remove_dot_git_from_usernames_spec.rb index 129374cb38c..f11880a83e9 100644 --- a/spec/migrations/remove_dot_git_from_usernames_spec.rb +++ b/spec/migrations/remove_dot_git_from_usernames_spec.rb @@ -4,7 +4,7 @@ require 'spec_helper' require Rails.root.join('db', 'migrate', '20161226122833_remove_dot_git_from_usernames.rb') describe RemoveDotGitFromUsernames do - let(:user) { create(:user) } + let(:user) { create(:user) } # rubocop:disable RSpec/FactoriesInMigrationSpecs let(:migration) { described_class.new } describe '#up' do @@ -23,13 +23,15 @@ describe RemoveDotGitFromUsernames do context 'when new path exists already' do describe '#up' do - let(:user2) { create(:user) } + let(:user2) { create(:user) } # rubocop:disable RSpec/FactoriesInMigrationSpecs before do update_namespace(user, 'test.git') update_namespace(user2, 'test_git') - storages = { 'default' => 'tmp/tests/custom_repositories' } + default_hash = Gitlab.config.repositories.storages.default.to_h + default_hash['path'] = 'tmp/tests/custom_repositories' + storages = { 'default' => Gitlab::GitalyClient::StorageSettings.new(default_hash) } allow(Gitlab.config.repositories).to receive(:storages).and_return(storages) allow(migration).to receive(:route_exists?).with('test_git').and_return(true) diff --git a/spec/migrations/remove_duplicate_mr_events_spec.rb b/spec/migrations/remove_duplicate_mr_events_spec.rb index e51872239ad..2509ac6afd6 100644 --- a/spec/migrations/remove_duplicate_mr_events_spec.rb +++ b/spec/migrations/remove_duplicate_mr_events_spec.rb @@ -5,17 +5,17 @@ describe RemoveDuplicateMrEvents, :delete do let(:migration) { described_class.new } describe '#up' do - let(:user) { create(:user) } - let(:merge_requests) { create_list(:merge_request, 2) } - let(:issue) { create(:issue) } + let(:user) { create(:user) } # rubocop:disable RSpec/FactoriesInMigrationSpecs + let(:merge_requests) { create_list(:merge_request, 2) } # rubocop:disable RSpec/FactoriesInMigrationSpecs + let(:issue) { create(:issue) } # rubocop:disable RSpec/FactoriesInMigrationSpecs let!(:events) do [ - create(:event, :created, author: user, target: merge_requests.first), - create(:event, :created, author: user, target: merge_requests.first), - create(:event, :updated, author: user, target: merge_requests.first), - create(:event, :created, author: user, target: merge_requests.second), - create(:event, :created, author: user, target: issue), - create(:event, :created, author: user, target: issue) + create(:event, :created, author: user, target: merge_requests.first), # rubocop:disable RSpec/FactoriesInMigrationSpecs + create(:event, :created, author: user, target: merge_requests.first), # rubocop:disable RSpec/FactoriesInMigrationSpecs + create(:event, :updated, author: user, target: merge_requests.first), # rubocop:disable RSpec/FactoriesInMigrationSpecs + create(:event, :created, author: user, target: merge_requests.second), # rubocop:disable RSpec/FactoriesInMigrationSpecs + create(:event, :created, author: user, target: issue), # rubocop:disable RSpec/FactoriesInMigrationSpecs + create(:event, :created, author: user, target: issue) # rubocop:disable RSpec/FactoriesInMigrationSpecs ] end diff --git a/spec/migrations/remove_empty_extern_uid_auth0_identities_spec.rb b/spec/migrations/remove_empty_extern_uid_auth0_identities_spec.rb new file mode 100644 index 00000000000..441c4295a40 --- /dev/null +++ b/spec/migrations/remove_empty_extern_uid_auth0_identities_spec.rb @@ -0,0 +1,22 @@ +require 'spec_helper' +require Rails.root.join('db', 'post_migrate', '20180220150310_remove_empty_extern_uid_auth0_identities.rb') + +describe RemoveEmptyExternUidAuth0Identities, :migration do + let(:identities) { table(:identities) } + + before do + identities.create(provider: 'auth0', extern_uid: '') + identities.create(provider: 'auth0', extern_uid: 'valid') + identities.create(provider: 'github', extern_uid: '') + + migrate! + end + + it 'leaves the correct auth0 identity' do + expect(identities.where(provider: 'auth0').pluck(:extern_uid)).to eq(['valid']) + end + + it 'leaves the correct github identity' do + expect(identities.where(provider: 'github').count).to eq(1) + end +end diff --git a/spec/migrations/remove_empty_fork_networks_spec.rb b/spec/migrations/remove_empty_fork_networks_spec.rb index 7f7ce91378b..f6d030ab25c 100644 --- a/spec/migrations/remove_empty_fork_networks_spec.rb +++ b/spec/migrations/remove_empty_fork_networks_spec.rb @@ -19,6 +19,10 @@ describe RemoveEmptyForkNetworks, :migration do deleted_project.destroy! end + after do + Upload.reset_column_information + end + it 'deletes only the fork network without members' do expect(fork_networks.count).to eq(2) diff --git a/spec/migrations/remove_project_labels_group_id_spec.rb b/spec/migrations/remove_project_labels_group_id_spec.rb index d80d61af20b..01b09e71d83 100644 --- a/spec/migrations/remove_project_labels_group_id_spec.rb +++ b/spec/migrations/remove_project_labels_group_id_spec.rb @@ -5,9 +5,9 @@ require Rails.root.join('db', 'post_migrate', '20180202111106_remove_project_lab describe RemoveProjectLabelsGroupId, :delete do let(:migration) { described_class.new } - let(:group) { create(:group) } - let!(:project_label) { create(:label, group_id: group.id) } - let!(:group_label) { create(:group_label) } + let(:group) { create(:group) } # rubocop:disable RSpec/FactoriesInMigrationSpecs + let!(:project_label) { create(:label, group_id: group.id) } # rubocop:disable RSpec/FactoriesInMigrationSpecs + let!(:group_label) { create(:group_label) } # rubocop:disable RSpec/FactoriesInMigrationSpecs describe '#up' do it 'updates the project labels group ID' do diff --git a/spec/migrations/remove_soft_removed_objects_spec.rb b/spec/migrations/remove_soft_removed_objects_spec.rb index ec089f9106d..fb70c284f5e 100644 --- a/spec/migrations/remove_soft_removed_objects_spec.rb +++ b/spec/migrations/remove_soft_removed_objects_spec.rb @@ -8,7 +8,7 @@ describe RemoveSoftRemovedObjects, :migration do create_with_deleted_at(:issue) end - regular_issue = create(:issue) + regular_issue = create(:issue) # rubocop:disable RSpec/FactoriesInMigrationSpecs run_migration @@ -28,7 +28,7 @@ describe RemoveSoftRemovedObjects, :migration do it 'removes routes of soft removed personal namespaces' do namespace = create_with_deleted_at(:namespace) - group = create(:group) + group = create(:group) # rubocop:disable RSpec/FactoriesInMigrationSpecs expect(Route.where(source: namespace).exists?).to eq(true) expect(Route.where(source: group).exists?).to eq(true) @@ -41,7 +41,7 @@ describe RemoveSoftRemovedObjects, :migration do it 'schedules the removal of soft removed groups' do group = create_with_deleted_at(:group) - admin = create(:user, admin: true) + admin = create(:user, admin: true) # rubocop:disable RSpec/FactoriesInMigrationSpecs expect_any_instance_of(GroupDestroyWorker) .to receive(:perform) @@ -67,7 +67,7 @@ describe RemoveSoftRemovedObjects, :migration do end def create_with_deleted_at(*args) - row = create(*args) + row = create(*args) # rubocop:disable RSpec/FactoriesInMigrationSpecs # We set "deleted_at" this way so we don't run into any column cache issues. row.class.where(id: row.id).update_all(deleted_at: 1.year.ago) diff --git a/spec/migrations/rename_more_reserved_project_names_spec.rb b/spec/migrations/rename_more_reserved_project_names_spec.rb index 75310075cc5..034e8a6a4e5 100644 --- a/spec/migrations/rename_more_reserved_project_names_spec.rb +++ b/spec/migrations/rename_more_reserved_project_names_spec.rb @@ -8,7 +8,7 @@ require Rails.root.join('db', 'post_migrate', '20170313133418_rename_more_reserv # around this we use the DELETE cleaning strategy. describe RenameMoreReservedProjectNames, :delete do let(:migration) { described_class.new } - let!(:project) { create(:project) } + let!(:project) { create(:project) } # rubocop:disable RSpec/FactoriesInMigrationSpecs before do project.path = 'artifacts' diff --git a/spec/migrations/rename_reserved_project_names_spec.rb b/spec/migrations/rename_reserved_project_names_spec.rb index 34336d705b1..592ac2b5fb9 100644 --- a/spec/migrations/rename_reserved_project_names_spec.rb +++ b/spec/migrations/rename_reserved_project_names_spec.rb @@ -12,7 +12,7 @@ require Rails.root.join('db', 'post_migrate', '20161221153951_rename_reserved_pr # Ideally, the test should not use factories and rely on the `table` helper instead. describe RenameReservedProjectNames, :migration, schema: :latest do let(:migration) { described_class.new } - let!(:project) { create(:project) } + let!(:project) { create(:project) } # rubocop:disable RSpec/FactoriesInMigrationSpecs before do project.path = 'projects' diff --git a/spec/migrations/rename_users_with_renamed_namespace_spec.rb b/spec/migrations/rename_users_with_renamed_namespace_spec.rb index cbc0ebeb44d..b8a4dc2b2c0 100644 --- a/spec/migrations/rename_users_with_renamed_namespace_spec.rb +++ b/spec/migrations/rename_users_with_renamed_namespace_spec.rb @@ -3,13 +3,13 @@ require Rails.root.join('db', 'post_migrate', '20170518200835_rename_users_with_ describe RenameUsersWithRenamedNamespace, :delete do it 'renames a user that had their namespace renamed to the namespace path' do - other_user = create(:user, username: 'kodingu') - other_user1 = create(:user, username: 'api0') + other_user = create(:user, username: 'kodingu') # rubocop:disable RSpec/FactoriesInMigrationSpecs + other_user1 = create(:user, username: 'api0') # rubocop:disable RSpec/FactoriesInMigrationSpecs - user = create(:user, username: "Users0") - user.update_attribute(:username, 'Users') - user1 = create(:user, username: "import0") - user1.update_attribute(:username, 'import') + user = create(:user, username: "Users0") # rubocop:disable RSpec/FactoriesInMigrationSpecs + user.update_column(:username, 'Users') + user1 = create(:user, username: "import0") # rubocop:disable RSpec/FactoriesInMigrationSpecs + user1.update_column(:username, 'import') described_class.new.up diff --git a/spec/migrations/schedule_build_stage_migration_spec.rb b/spec/migrations/reschedule_builds_stages_migration_spec.rb index e2ca35447fb..3bfd9dd9f6b 100644 --- a/spec/migrations/schedule_build_stage_migration_spec.rb +++ b/spec/migrations/reschedule_builds_stages_migration_spec.rb @@ -1,7 +1,8 @@ require 'spec_helper' -require Rails.root.join('db', 'post_migrate', '20180212101928_schedule_build_stage_migration') +require Rails.root.join('db', 'post_migrate', '20180405101928_reschedule_builds_stages_migration') -describe ScheduleBuildStageMigration, :sidekiq, :migration do +describe RescheduleBuildsStagesMigration, :sidekiq, :migration do + let(:namespaces) { table(:namespaces) } let(:projects) { table(:projects) } let(:pipelines) { table(:ci_pipelines) } let(:stages) { table(:ci_stages) } @@ -10,7 +11,8 @@ describe ScheduleBuildStageMigration, :sidekiq, :migration do before do stub_const("#{described_class}::BATCH_SIZE", 1) - projects.create!(id: 123, name: 'gitlab', path: 'gitlab-ce') + namespaces.create(id: 12, name: 'gitlab-org', path: 'gitlab-org') + projects.create!(id: 123, namespace_id: 12, name: 'gitlab', path: 'gitlab') pipelines.create!(id: 1, project_id: 123, ref: 'master', sha: 'adf43c3a') stages.create!(id: 1, project_id: 123, pipeline_id: 1, name: 'test') diff --git a/spec/migrations/schedule_create_gpg_key_subkeys_from_gpg_keys_spec.rb b/spec/migrations/schedule_create_gpg_key_subkeys_from_gpg_keys_spec.rb index 65ec07da31c..ed306fb3d62 100644 --- a/spec/migrations/schedule_create_gpg_key_subkeys_from_gpg_keys_spec.rb +++ b/spec/migrations/schedule_create_gpg_key_subkeys_from_gpg_keys_spec.rb @@ -3,8 +3,8 @@ require Rails.root.join('db', 'post_migrate', '20171005130944_schedule_create_gp describe ScheduleCreateGpgKeySubkeysFromGpgKeys, :migration, :sidekiq do before do - create(:gpg_key, id: 1, key: GpgHelpers::User1.public_key) - create(:gpg_key, id: 2, key: GpgHelpers::User3.public_key) + create(:gpg_key, id: 1, key: GpgHelpers::User1.public_key) # rubocop:disable RSpec/FactoriesInMigrationSpecs + create(:gpg_key, id: 2, key: GpgHelpers::User3.public_key) # rubocop:disable RSpec/FactoriesInMigrationSpecs # Delete all subkeys so they can be recreated GpgKeySubkey.destroy_all end diff --git a/spec/migrations/schedule_populate_merge_request_metrics_with_events_data_spec.rb b/spec/migrations/schedule_populate_merge_request_metrics_with_events_data_spec.rb index 7494624066a..578440cba20 100644 --- a/spec/migrations/schedule_populate_merge_request_metrics_with_events_data_spec.rb +++ b/spec/migrations/schedule_populate_merge_request_metrics_with_events_data_spec.rb @@ -8,7 +8,7 @@ describe SchedulePopulateMergeRequestMetricsWithEventsData, :migration, :sidekiq .to receive(:commits_count=).and_return(nil) end - let!(:mrs) { create_list(:merge_request, 3) } + let!(:mrs) { create_list(:merge_request, 3) } # rubocop:disable RSpec/FactoriesInMigrationSpecs it 'correctly schedules background migrations' do stub_const("#{described_class.name}::BATCH_SIZE", 2) diff --git a/spec/migrations/schedule_set_confidential_note_events_on_webhooks_spec.rb b/spec/migrations/schedule_set_confidential_note_events_on_webhooks_spec.rb new file mode 100644 index 00000000000..027f4a91c90 --- /dev/null +++ b/spec/migrations/schedule_set_confidential_note_events_on_webhooks_spec.rb @@ -0,0 +1,44 @@ +require 'spec_helper' +require Rails.root.join('db', 'post_migrate', '20180104131052_schedule_set_confidential_note_events_on_webhooks.rb') + +describe ScheduleSetConfidentialNoteEventsOnWebhooks, :migration, :sidekiq do + let(:web_hooks_table) { table(:web_hooks) } + let(:migration_class) { Gitlab::BackgroundMigration::SetConfidentialNoteEventsOnWebhooks } + let(:migration_name) { migration_class.to_s.demodulize } + + let!(:web_hook_1) { web_hooks_table.create!(confidential_note_events: nil, note_events: true) } + let!(:web_hook_2) { web_hooks_table.create!(confidential_note_events: nil, note_events: true) } + let!(:web_hook_migrated) { web_hooks_table.create!(confidential_note_events: true, note_events: true) } + let!(:web_hook_skip) { web_hooks_table.create!(confidential_note_events: nil, note_events: false) } + let!(:web_hook_new) { web_hooks_table.create!(confidential_note_events: false, note_events: true) } + let!(:web_hook_4) { web_hooks_table.create!(confidential_note_events: nil, note_events: true) } + + before do + stub_const("#{described_class}::BATCH_SIZE", 1) + end + + it 'schedules background migrations at correct time' do + Sidekiq::Testing.fake! do + Timecop.freeze do + migrate! + + expect(migration_name).to be_scheduled_delayed_migration(5.minutes, web_hook_1.id, web_hook_1.id) + expect(migration_name).to be_scheduled_delayed_migration(10.minutes, web_hook_2.id, web_hook_2.id) + expect(migration_name).to be_scheduled_delayed_migration(15.minutes, web_hook_4.id, web_hook_4.id) + expect(BackgroundMigrationWorker.jobs.size).to eq 3 + end + end + end + + it 'correctly processes web hooks' do + Sidekiq::Testing.inline! do + expect(web_hooks_table.where(confidential_note_events: nil).count).to eq 4 + expect(web_hooks_table.where(confidential_note_events: true).count).to eq 1 + + migrate! + + expect(web_hooks_table.where(confidential_note_events: nil).count).to eq 1 + expect(web_hooks_table.where(confidential_note_events: true).count).to eq 4 + end + end +end diff --git a/spec/migrations/turn_nested_groups_into_regular_groups_for_mysql_spec.rb b/spec/migrations/turn_nested_groups_into_regular_groups_for_mysql_spec.rb index 528dc54781d..560409f08de 100644 --- a/spec/migrations/turn_nested_groups_into_regular_groups_for_mysql_spec.rb +++ b/spec/migrations/turn_nested_groups_into_regular_groups_for_mysql_spec.rb @@ -2,10 +2,10 @@ require 'spec_helper' require Rails.root.join('db', 'migrate', '20170503140202_turn_nested_groups_into_regular_groups_for_mysql.rb') describe TurnNestedGroupsIntoRegularGroupsForMysql do - let!(:parent_group) { create(:group) } - let!(:child_group) { create(:group, parent: parent_group) } - let!(:project) { create(:project, :legacy_storage, :empty_repo, namespace: child_group) } - let!(:member) { create(:user) } + let!(:parent_group) { create(:group) } # rubocop:disable RSpec/FactoriesInMigrationSpecs + let!(:child_group) { create(:group, parent: parent_group) } # rubocop:disable RSpec/FactoriesInMigrationSpecs + let!(:project) { create(:project, :legacy_storage, :empty_repo, namespace: child_group) } # rubocop:disable RSpec/FactoriesInMigrationSpecs + let!(:member) { create(:user) } # rubocop:disable RSpec/FactoriesInMigrationSpecs let(:migration) { described_class.new } before do diff --git a/spec/migrations/update_retried_for_ci_build_spec.rb b/spec/migrations/update_retried_for_ci_build_spec.rb index ccb77766b84..637dcbb8e01 100644 --- a/spec/migrations/update_retried_for_ci_build_spec.rb +++ b/spec/migrations/update_retried_for_ci_build_spec.rb @@ -2,9 +2,9 @@ require 'spec_helper' require Rails.root.join('db', 'post_migrate', '20170503004427_update_retried_for_ci_build.rb') describe UpdateRetriedForCiBuild, :delete do - let(:pipeline) { create(:ci_pipeline) } - let!(:build_old) { create(:ci_build, pipeline: pipeline, name: 'test') } - let!(:build_new) { create(:ci_build, pipeline: pipeline, name: 'test') } + let(:pipeline) { create(:ci_pipeline) } # rubocop:disable RSpec/FactoriesInMigrationSpecs + let!(:build_old) { create(:ci_build, pipeline: pipeline, name: 'test') } # rubocop:disable RSpec/FactoriesInMigrationSpecs + let!(:build_new) { create(:ci_build, pipeline: pipeline, name: 'test') } # rubocop:disable RSpec/FactoriesInMigrationSpecs before do described_class.new.up diff --git a/spec/models/ability_spec.rb b/spec/models/ability_spec.rb index cd175dba6da..199f49d0bf2 100644 --- a/spec/models/ability_spec.rb +++ b/spec/models/ability_spec.rb @@ -7,62 +7,6 @@ describe Ability do end end - describe '.can_edit_note?' do - let(:project) { create(:project) } - let(:note) { create(:note_on_issue, project: project) } - - context 'using an anonymous user' do - it 'returns false' do - expect(described_class.can_edit_note?(nil, note)).to be_falsy - end - end - - context 'using a system note' do - it 'returns false' do - system_note = create(:note, system: true) - user = create(:user) - - expect(described_class.can_edit_note?(user, system_note)).to be_falsy - end - end - - context 'using users with different access levels' do - let(:user) { create(:user) } - - it 'returns true for the author' do - expect(described_class.can_edit_note?(note.author, note)).to be_truthy - end - - it 'returns false for a guest user' do - project.add_guest(user) - - expect(described_class.can_edit_note?(user, note)).to be_falsy - end - - it 'returns false for a developer' do - project.add_developer(user) - - expect(described_class.can_edit_note?(user, note)).to be_falsy - end - - it 'returns true for a master' do - project.add_master(user) - - expect(described_class.can_edit_note?(user, note)).to be_truthy - end - - it 'returns true for a group owner' do - group = create(:group) - project.project_group_links.create( - group: group, - group_access: Gitlab::Access::MASTER) - group.add_owner(user) - - expect(described_class.can_edit_note?(user, note)).to be_truthy - end - end - end - describe '.users_that_can_read_project' do context 'using a public project' do it 'returns all the users' do diff --git a/spec/models/broadcast_message_spec.rb b/spec/models/broadcast_message_spec.rb index 461e754dc1f..5326f9cb8c0 100644 --- a/spec/models/broadcast_message_spec.rb +++ b/spec/models/broadcast_message_spec.rb @@ -51,7 +51,11 @@ describe BroadcastMessage do expect(described_class).to receive(:where).and_call_original.once - 2.times { described_class.current } + described_class.current + + Timecop.travel(1.year) do + described_class.current + end end it 'includes messages that need to be displayed in the future' do diff --git a/spec/models/ci/artifact_blob_spec.rb b/spec/models/ci/artifact_blob_spec.rb index 4e72d9d748e..0014bbcf9f5 100644 --- a/spec/models/ci/artifact_blob_spec.rb +++ b/spec/models/ci/artifact_blob_spec.rb @@ -65,6 +65,19 @@ describe Ci::ArtifactBlob do expect(url).not_to be_nil expect(url).to eq("http://#{project.namespace.path}.#{Gitlab.config.pages.host}/-/#{project.path}/-/jobs/#{build.id}/artifacts/#{path}") end + + context 'when port is configured' do + let(:port) { 1234 } + + it 'returns an URL with port number' do + allow(Gitlab.config.pages).to receive(:url).and_return("#{Gitlab.config.pages.url}:#{port}") + + url = subject.external_url(build.project, build) + + expect(url).not_to be_nil + expect(url).to eq("http://#{project.namespace.path}.#{Gitlab.config.pages.host}:#{port}/-/#{project.path}/-/jobs/#{build.id}/artifacts/#{path}") + end + end end end diff --git a/spec/models/ci/build_metadata_spec.rb b/spec/models/ci/build_metadata_spec.rb new file mode 100644 index 00000000000..7e75d5a5411 --- /dev/null +++ b/spec/models/ci/build_metadata_spec.rb @@ -0,0 +1,61 @@ +require 'spec_helper' + +describe Ci::BuildMetadata do + set(:user) { create(:user) } + set(:group) { create(:group, :access_requestable) } + set(:project) { create(:project, :repository, group: group, build_timeout: 2000) } + + set(:pipeline) do + create(:ci_pipeline, project: project, + sha: project.commit.id, + ref: project.default_branch, + status: 'success') + end + + let(:build) { create(:ci_build, pipeline: pipeline) } + let(:build_metadata) { build.metadata } + + describe '#update_timeout_state' do + subject { build_metadata } + + context 'when runner is not assigned to the job' do + it "doesn't change timeout value" do + expect { subject.update_timeout_state }.not_to change { subject.reload.timeout } + end + + it "doesn't change timeout_source value" do + expect { subject.update_timeout_state }.not_to change { subject.reload.timeout_source } + end + end + + context 'when runner is assigned to the job' do + before do + build.update_attributes(runner: runner) + end + + context 'when runner timeout is lower than project timeout' do + let(:runner) { create(:ci_runner, maximum_timeout: 1900) } + + it 'sets runner timeout' do + expect { subject.update_timeout_state }.to change { subject.reload.timeout }.to(1900) + end + + it 'sets runner_timeout_source' do + expect { subject.update_timeout_state }.to change { subject.reload.timeout_source }.to('runner_timeout_source') + end + end + + context 'when runner timeout is higher than project timeout' do + let(:runner) { create(:ci_runner, maximum_timeout: 2100) } + + it 'sets project timeout' do + expect { subject.update_timeout_state }.to change { subject.reload.timeout }.to(2000) + end + + it 'sets project_timeout_source' do + expect { subject.update_timeout_state }.to change { subject.reload.timeout_source }.to('project_timeout_source') + end + end + end + end +end diff --git a/spec/models/ci/build_spec.rb b/spec/models/ci/build_spec.rb index 01203ff44c8..fcdc31c8984 100644 --- a/spec/models/ci/build_spec.rb +++ b/spec/models/ci/build_spec.rb @@ -80,6 +80,42 @@ describe Ci::Build do end end + describe '.with_artifacts_archive' do + subject { described_class.with_artifacts_archive } + + context 'when job does not have an archive' do + let!(:job) { create(:ci_build) } + + it 'does not return the job' do + is_expected.not_to include(job) + end + end + + context 'when job has a legacy archive' do + let!(:job) { create(:ci_build, :legacy_artifacts) } + + it 'returns the job' do + is_expected.to include(job) + end + end + + context 'when job has a job artifact archive' do + let!(:job) { create(:ci_build, :artifacts) } + + it 'returns the job' do + is_expected.to include(job) + end + end + + context 'when job has a job artifact trace' do + let!(:job) { create(:ci_build, :trace_artifact) } + + it 'does not return the job' do + is_expected.not_to include(job) + end + end + end + describe '#actionize' do context 'when build is a created' do before do @@ -162,6 +198,16 @@ describe Ci::Build do end context 'when legacy artifacts are used' do + let(:build) { create(:ci_build, :legacy_artifacts) } + + subject { build.artifacts? } + + context 'is expired' do + let(:build) { create(:ci_build, :legacy_artifacts, :expired) } + + it { is_expected.to be_falsy } + end + context 'artifacts archive does not exist' do let(:build) { create(:ci_build) } @@ -172,13 +218,25 @@ describe Ci::Build do let(:build) { create(:ci_build, :legacy_artifacts) } it { is_expected.to be_truthy } + end + end + end - context 'is expired' do - let(:build) { create(:ci_build, :legacy_artifacts, :expired) } + describe '#browsable_artifacts?' do + subject { build.browsable_artifacts? } - it { is_expected.to be_falsy } - end + context 'artifacts metadata does not exist' do + before do + build.update_attributes(legacy_artifacts_metadata: nil) end + + it { is_expected.to be_falsy } + end + + context 'artifacts metadata does exists' do + let(:build) { create(:ci_build, :artifacts) } + + it { is_expected.to be_truthy } end end @@ -1213,12 +1271,6 @@ describe Ci::Build do end describe 'project settings' do - describe '#timeout' do - it 'returns project timeout configuration' do - expect(build.timeout).to eq(project.build_timeout) - end - end - describe '#allow_git_fetch' do it 'return project allow_git_fetch configuration' do expect(build.allow_git_fetch).to eq(project.build_allow_git_fetch) @@ -1332,29 +1384,51 @@ describe Ci::Build do end end - describe '#update_project_statistics' do - let!(:build) { create(:ci_build, artifacts_size: 23) } - - it 'updates project statistics when the artifact size changes' do - expect(ProjectCacheWorker).to receive(:perform_async) - .with(build.project_id, [], [:build_artifacts_size]) + context 'when updating the build' do + let(:build) { create(:ci_build, artifacts_size: 23) } + it 'updates project statistics' do build.artifacts_size = 42 - build.save! + + expect(build).to receive(:update_project_statistics_after_save).and_call_original + + expect { build.save! } + .to change { build.project.statistics.reload.build_artifacts_size } + .by(19) end - it 'does not update project statistics when the artifact size stays the same' do - expect(ProjectCacheWorker).not_to receive(:perform_async) + context 'when the artifact size stays the same' do + it 'does not update project statistics' do + build.name = 'changed' - build.name = 'changed' - build.save! + expect(build).not_to receive(:update_project_statistics_after_save) + + build.save! + end end + end + + context 'when destroying the build' do + let!(:build) { create(:ci_build, artifacts_size: 23) } - it 'updates project statistics when the build is destroyed' do - expect(ProjectCacheWorker).to receive(:perform_async) - .with(build.project_id, [], [:build_artifacts_size]) + it 'updates project statistics' do + expect(ProjectStatistics) + .to receive(:increment_statistic) + .and_call_original - build.destroy + expect { build.destroy! } + .to change { build.project.statistics.reload.build_artifacts_size } + .by(-23) + end + + context 'when the build is destroyed due to the project being destroyed' do + it 'does not update the project statistics' do + expect(ProjectStatistics) + .not_to receive(:increment_statistic) + + build.project.update_attributes(pending_delete: true) + build.project.destroy! + end end end @@ -1411,24 +1485,24 @@ describe Ci::Build do let(:container_registry_enabled) { false } let(:predefined_variables) do [ + { key: 'CI_JOB_ID', value: build.id.to_s, public: true }, + { key: 'CI_JOB_TOKEN', value: build.token, public: false }, + { key: 'CI_BUILD_ID', value: build.id.to_s, public: true }, + { key: 'CI_BUILD_TOKEN', value: build.token, public: false }, + { key: 'CI_REGISTRY_USER', value: 'gitlab-ci-token', public: true }, + { key: 'CI_REGISTRY_PASSWORD', value: build.token, public: false }, + { key: 'CI_REPOSITORY_URL', value: build.repo_url, public: false }, { key: 'CI', value: 'true', public: true }, { key: 'GITLAB_CI', value: 'true', public: true }, - { key: 'GITLAB_FEATURES', value: project.namespace.features.join(','), public: true }, + { key: 'GITLAB_FEATURES', value: project.licensed_features.join(','), public: true }, { key: 'CI_SERVER_NAME', value: 'GitLab', public: true }, { key: 'CI_SERVER_VERSION', value: Gitlab::VERSION, public: true }, { key: 'CI_SERVER_REVISION', value: Gitlab::REVISION, public: true }, - { key: 'CI_JOB_ID', value: build.id.to_s, public: true }, { key: 'CI_JOB_NAME', value: 'test', public: true }, { key: 'CI_JOB_STAGE', value: 'test', public: true }, - { key: 'CI_JOB_TOKEN', value: build.token, public: false }, { key: 'CI_COMMIT_SHA', value: build.sha, public: true }, { key: 'CI_COMMIT_REF_NAME', value: build.ref, public: true }, { key: 'CI_COMMIT_REF_SLUG', value: build.ref_slug, public: true }, - { key: 'CI_REGISTRY_USER', value: 'gitlab-ci-token', public: true }, - { key: 'CI_REGISTRY_PASSWORD', value: build.token, public: false }, - { key: 'CI_REPOSITORY_URL', value: build.repo_url, public: false }, - { key: 'CI_BUILD_ID', value: build.id.to_s, public: true }, - { key: 'CI_BUILD_TOKEN', value: build.token, public: false }, { key: 'CI_BUILD_REF', value: build.sha, public: true }, { key: 'CI_BUILD_BEFORE_SHA', value: build.before_sha, public: true }, { key: 'CI_BUILD_REF_NAME', value: build.ref, public: true }, @@ -1893,6 +1967,7 @@ describe Ci::Build do before do allow(build).to receive(:predefined_variables) { [build_pre_var] } allow(build).to receive(:yaml_variables) { [build_yaml_var] } + allow(build).to receive(:persisted_variables) { [] } allow_any_instance_of(Project) .to receive(:predefined_variables) { [project_pre_var] } @@ -1941,6 +2016,106 @@ describe Ci::Build do end end end + + context 'when build has not been persisted yet' do + let(:build) do + described_class.new( + name: 'rspec', + stage: 'test', + ref: 'feature', + project: project, + pipeline: pipeline + ) + end + + it 'returns static predefined variables' do + expect(build.variables.size).to be >= 28 + expect(build.variables) + .to include(key: 'CI_COMMIT_REF_NAME', value: 'feature', public: true) + expect(build).not_to be_persisted + end + end + end + + describe '#scoped_variables' do + context 'when build has not been persisted yet' do + let(:build) do + described_class.new( + name: 'rspec', + stage: 'test', + ref: 'feature', + project: project, + pipeline: pipeline + ) + end + + it 'does not persist the build' do + expect(build).to be_valid + expect(build).not_to be_persisted + + build.scoped_variables + + expect(build).not_to be_persisted + end + + it 'returns static predefined variables' do + keys = %w[CI_JOB_NAME + CI_COMMIT_SHA + CI_COMMIT_REF_NAME + CI_COMMIT_REF_SLUG + CI_JOB_STAGE] + + variables = build.scoped_variables + + variables.map { |env| env[:key] }.tap do |names| + expect(names).to include(*keys) + end + + expect(variables) + .to include(key: 'CI_COMMIT_REF_NAME', value: 'feature', public: true) + end + + it 'does not return prohibited variables' do + keys = %w[CI_JOB_ID + CI_JOB_TOKEN + CI_BUILD_ID + CI_BUILD_TOKEN + CI_REGISTRY_USER + CI_REGISTRY_PASSWORD + CI_REPOSITORY_URL + CI_ENVIRONMENT_URL] + + build.scoped_variables.map { |env| env[:key] }.tap do |names| + expect(names).not_to include(*keys) + end + end + end + end + + describe '#scoped_variables_hash' do + context 'when overriding secret variables' do + before do + project.variables.create!(key: 'MY_VAR', value: 'my value 1') + pipeline.variables.create!(key: 'MY_VAR', value: 'my value 2') + end + + it 'returns a regular hash created using valid ordering' do + expect(build.scoped_variables_hash).to include('MY_VAR': 'my value 2') + expect(build.scoped_variables_hash).not_to include('MY_VAR': 'my value 1') + end + end + + context 'when overriding user-provided variables' do + before do + pipeline.variables.build(key: 'MY_VAR', value: 'pipeline value') + build.yaml_variables = [{ key: 'MY_VAR', value: 'myvar', public: true }] + end + + it 'returns a hash including variable with higher precedence' do + expect(build.scoped_variables_hash).to include('MY_VAR': 'pipeline value') + expect(build.scoped_variables_hash).not_to include('MY_VAR': 'myvar') + end + end end describe 'state transition: any => [:pending]' do @@ -1953,6 +2128,66 @@ describe Ci::Build do end end + describe 'state transition: pending: :running' do + let(:runner) { create(:ci_runner) } + let(:job) { create(:ci_build, :pending, runner: runner) } + + before do + job.project.update_attribute(:build_timeout, 1800) + end + + def run_job_without_exception + job.run! + rescue StateMachines::InvalidTransition + end + + shared_examples 'saves data on transition' do + it 'saves timeout' do + expect { job.run! }.to change { job.reload.ensure_metadata.timeout }.from(nil).to(expected_timeout) + end + + it 'saves timeout_source' do + expect { job.run! }.to change { job.reload.ensure_metadata.timeout_source }.from('unknown_timeout_source').to(expected_timeout_source) + end + + context 'when Ci::BuildMetadata#update_timeout_state fails update' do + before do + allow_any_instance_of(Ci::BuildMetadata).to receive(:update_timeout_state).and_return(false) + end + + it "doesn't save timeout" do + expect { run_job_without_exception }.not_to change { job.reload.ensure_metadata.timeout_source } + end + + it "doesn't save timeout_source" do + expect { run_job_without_exception }.not_to change { job.reload.ensure_metadata.timeout_source } + end + end + end + + context 'when runner timeout overrides project timeout' do + let(:expected_timeout) { 900 } + let(:expected_timeout_source) { 'runner_timeout_source' } + + before do + runner.update_attribute(:maximum_timeout, 900) + end + + it_behaves_like 'saves data on transition' + end + + context "when runner timeout doesn't override project timeout" do + let(:expected_timeout) { 1800 } + let(:expected_timeout_source) { 'project_timeout_source' } + + before do + runner.update_attribute(:maximum_timeout, 3600) + end + + it_behaves_like 'saves data on transition' + end + end + describe 'state transition: any => [:running]' do shared_examples 'validation is active' do context 'when depended job has not been completed yet' do @@ -2065,6 +2300,35 @@ describe Ci::Build do subject.drop! end + + context 'when retry service raises Gitlab::Access::AccessDeniedError exception' do + let(:retry_service) { Ci::RetryBuildService.new(subject.project, subject.user) } + + before do + allow_any_instance_of(Ci::RetryBuildService) + .to receive(:execute) + .with(subject) + .and_raise(Gitlab::Access::AccessDeniedError) + allow(Rails.logger).to receive(:error) + end + + it 'handles raised exception' do + expect { subject.drop! }.not_to raise_exception(Gitlab::Access::AccessDeniedError) + end + + it 'logs the error' do + subject.drop! + + expect(Rails.logger) + .to have_received(:error) + .with(a_string_matching("Unable to auto-retry job #{subject.id}")) + end + + it 'fails the job' do + subject.drop! + expect(subject.failed?).to be_truthy + end + end end context 'when build is not configured to be retried' do diff --git a/spec/models/ci/job_artifact_spec.rb b/spec/models/ci/job_artifact_spec.rb index a2bd36537e6..a3e119cbc27 100644 --- a/spec/models/ci/job_artifact_spec.rb +++ b/spec/models/ci/job_artifact_spec.rb @@ -1,7 +1,7 @@ require 'spec_helper' describe Ci::JobArtifact do - set(:artifact) { create(:ci_job_artifact, :archive) } + let(:artifact) { create(:ci_job_artifact, :archive) } describe "Associations" do it { is_expected.to belong_to(:project) } @@ -15,10 +15,76 @@ describe Ci::JobArtifact do it { is_expected.to delegate_method(:open).to(:file) } it { is_expected.to delegate_method(:exists?).to(:file) } - describe '#set_size' do - it 'sets the size' do + describe 'callbacks' do + subject { create(:ci_job_artifact, :archive) } + + describe '#schedule_background_upload' do + context 'when object storage is disabled' do + before do + stub_artifacts_object_storage(enabled: false) + end + + it 'does not schedule the migration' do + expect(ObjectStorageUploadWorker).not_to receive(:perform_async) + + subject + end + end + + context 'when object storage is enabled' do + context 'when background upload is enabled' do + before do + stub_artifacts_object_storage(background_upload: true) + end + + it 'schedules the model for migration' do + expect(ObjectStorage::BackgroundMoveWorker).to receive(:perform_async).with('JobArtifactUploader', described_class.name, :file, kind_of(Numeric)) + + subject + end + end + + context 'when background upload is disabled' do + before do + stub_artifacts_object_storage(background_upload: false) + end + + it 'schedules the model for migration' do + expect(ObjectStorage::BackgroundMoveWorker).not_to receive(:perform_async) + + subject + end + end + end + end + end + + context 'creating the artifact' do + let(:project) { create(:project) } + let(:artifact) { create(:ci_job_artifact, :archive, project: project) } + + it 'sets the size from the file size' do expect(artifact.size).to eq(106365) end + + it 'updates the project statistics' do + expect { artifact } + .to change { project.statistics.reload.build_artifacts_size } + .by(106365) + end + end + + context 'updating the artifact file' do + it 'updates the artifact size' do + artifact.update!(file: fixture_file_upload(File.join(Rails.root, 'spec/fixtures/dk.png'))) + expect(artifact.size).to eq(1062) + end + + it 'updates the project statistics' do + expect { artifact.update!(file: fixture_file_upload(File.join(Rails.root, 'spec/fixtures/dk.png'))) } + .to change { artifact.project.statistics.reload.build_artifacts_size } + .by(1062 - 106365) + end end describe '#file' do @@ -74,4 +140,71 @@ describe Ci::JobArtifact do is_expected.to be_nil end end + + context 'when destroying the artifact' do + let(:project) { create(:project, :repository) } + let(:pipeline) { create(:ci_pipeline, project: project) } + let!(:build) { create(:ci_build, :artifacts, pipeline: pipeline) } + + it 'updates the project statistics' do + artifact = build.job_artifacts.first + + expect(ProjectStatistics) + .to receive(:increment_statistic) + .and_call_original + + expect { artifact.destroy } + .to change { project.statistics.reload.build_artifacts_size } + .by(-106365) + end + + context 'when it is destroyed from the project level' do + it 'does not update the project statistics' do + expect(ProjectStatistics) + .not_to receive(:increment_statistic) + + project.update_attributes(pending_delete: true) + project.destroy! + end + end + end + + describe 'file is being stored' do + subject { create(:ci_job_artifact, :archive) } + + context 'when object has nil store' do + before do + subject.update_column(:file_store, nil) + subject.reload + end + + it 'is stored locally' do + expect(subject.file_store).to be(nil) + expect(subject.file).to be_file_storage + expect(subject.file.object_store).to eq(ObjectStorage::Store::LOCAL) + end + end + + context 'when existing object has local store' do + it 'is stored locally' do + expect(subject.file_store).to be(ObjectStorage::Store::LOCAL) + expect(subject.file).to be_file_storage + expect(subject.file.object_store).to eq(ObjectStorage::Store::LOCAL) + end + end + + context 'when direct upload is enabled' do + before do + stub_artifacts_object_storage(direct_upload: true) + end + + context 'when file is stored' do + it 'is stored remotely' do + expect(subject.file_store).to eq(ObjectStorage::Store::REMOTE) + expect(subject.file).not_to be_file_storage + expect(subject.file.object_store).to eq(ObjectStorage::Store::REMOTE) + end + end + end + end end diff --git a/spec/models/ci/pipeline_spec.rb b/spec/models/ci/pipeline_spec.rb index 4635f8cfe9d..dd94515b0a4 100644 --- a/spec/models/ci/pipeline_spec.rb +++ b/spec/models/ci/pipeline_spec.rb @@ -177,6 +177,24 @@ describe Ci::Pipeline, :mailer do end end + describe '#protected_ref?' do + it 'delegates method to project' do + expect(pipeline).not_to be_protected_ref + end + end + + describe '#legacy_trigger' do + let(:trigger_request) { create(:ci_trigger_request) } + + before do + pipeline.trigger_requests << trigger_request + end + + it 'returns first trigger request' do + expect(pipeline.legacy_trigger).to eq trigger_request + end + end + describe '#auto_canceled?' do subject { pipeline.auto_canceled? } @@ -215,142 +233,271 @@ describe Ci::Pipeline, :mailer do end describe 'pipeline stages' do - before do - create(:commit_status, pipeline: pipeline, - stage: 'build', - name: 'linux', - stage_idx: 0, - status: 'success') - - create(:commit_status, pipeline: pipeline, - stage: 'build', - name: 'mac', - stage_idx: 0, - status: 'failed') - - create(:commit_status, pipeline: pipeline, - stage: 'deploy', - name: 'staging', - stage_idx: 2, - status: 'running') - - create(:commit_status, pipeline: pipeline, - stage: 'test', - name: 'rspec', - stage_idx: 1, - status: 'success') - end - describe '#stage_seeds' do - let(:pipeline) do - build(:ci_pipeline, config: { rspec: { script: 'rake' } }) - end + let(:pipeline) { build(:ci_pipeline, config: config) } + let(:config) { { rspec: { script: 'rake' } } } it 'returns preseeded stage seeds object' do - expect(pipeline.stage_seeds).to all(be_a Gitlab::Ci::Stage::Seed) + expect(pipeline.stage_seeds) + .to all(be_a Gitlab::Ci::Pipeline::Seed::Base) expect(pipeline.stage_seeds.count).to eq 1 end - end - describe '#seeds_size' do - let(:pipeline) { build(:ci_pipeline_with_one_job) } + context 'when no refs policy is specified' do + let(:config) do + { production: { stage: 'deploy', script: 'cap prod' }, + rspec: { stage: 'test', script: 'rspec' }, + spinach: { stage: 'test', script: 'spinach' } } + end - it 'returns number of jobs in stage seeds' do - expect(pipeline.seeds_size).to eq 1 + it 'correctly fabricates a stage seeds object' do + seeds = pipeline.stage_seeds + + expect(seeds.size).to eq 2 + expect(seeds.first.attributes[:name]).to eq 'test' + expect(seeds.second.attributes[:name]).to eq 'deploy' + expect(seeds.dig(0, 0, :name)).to eq 'rspec' + expect(seeds.dig(0, 1, :name)).to eq 'spinach' + expect(seeds.dig(1, 0, :name)).to eq 'production' + end end - end - describe '#legacy_stages' do - subject { pipeline.legacy_stages } + context 'when refs policy is specified' do + let(:pipeline) do + build(:ci_pipeline, ref: 'feature', tag: true, config: config) + end + + let(:config) do + { production: { stage: 'deploy', script: 'cap prod', only: ['master'] }, + spinach: { stage: 'test', script: 'spinach', only: ['tags'] } } + end + + it 'returns stage seeds only assigned to master to master' do + seeds = pipeline.stage_seeds - context 'stages list' do - it 'returns ordered list of stages' do - expect(subject.map(&:name)).to eq(%w[build test deploy]) + expect(seeds.size).to eq 1 + expect(seeds.first.attributes[:name]).to eq 'test' + expect(seeds.dig(0, 0, :name)).to eq 'spinach' end end - context 'stages with statuses' do - let(:statuses) do - subject.map { |stage| [stage.name, stage.status] } + context 'when source policy is specified' do + let(:pipeline) { build(:ci_pipeline, source: :schedule, config: config) } + + let(:config) do + { production: { stage: 'deploy', script: 'cap prod', only: ['triggers'] }, + spinach: { stage: 'test', script: 'spinach', only: ['schedules'] } } end - it 'returns list of stages with correct statuses' do - expect(statuses).to eq([%w(build failed), - %w(test success), - %w(deploy running)]) + it 'returns stage seeds only assigned to schedules' do + seeds = pipeline.stage_seeds + + expect(seeds.size).to eq 1 + expect(seeds.first.attributes[:name]).to eq 'test' + expect(seeds.dig(0, 0, :name)).to eq 'spinach' end + end - context 'when commit status is retried' do - before do - create(:commit_status, pipeline: pipeline, - stage: 'build', - name: 'mac', - stage_idx: 0, - status: 'success') + context 'when kubernetes policy is specified' do + let(:config) do + { + spinach: { stage: 'test', script: 'spinach' }, + production: { + stage: 'deploy', + script: 'cap', + only: { kubernetes: 'active' } + } + } + end + + context 'when kubernetes is active' do + shared_examples 'same behavior between KubernetesService and Platform::Kubernetes' do + it 'returns seeds for kubernetes dependent job' do + seeds = pipeline.stage_seeds - pipeline.process! + expect(seeds.size).to eq 2 + expect(seeds.dig(0, 0, :name)).to eq 'spinach' + expect(seeds.dig(1, 0, :name)).to eq 'production' + end end - it 'ignores the previous state' do - expect(statuses).to eq([%w(build success), - %w(test success), - %w(deploy running)]) + context 'when user configured kubernetes from Integration > Kubernetes' do + let(:project) { create(:kubernetes_project) } + let(:pipeline) { build(:ci_pipeline, project: project, config: config) } + + it_behaves_like 'same behavior between KubernetesService and Platform::Kubernetes' + end + + context 'when user configured kubernetes from CI/CD > Clusters' do + let!(:cluster) { create(:cluster, :project, :provided_by_gcp) } + let(:project) { cluster.project } + let(:pipeline) { build(:ci_pipeline, project: project, config: config) } + + it_behaves_like 'same behavior between KubernetesService and Platform::Kubernetes' + end + end + + context 'when kubernetes is not active' do + it 'does not return seeds for kubernetes dependent job' do + seeds = pipeline.stage_seeds + + expect(seeds.size).to eq 1 + expect(seeds.dig(0, 0, :name)).to eq 'spinach' end end end - context 'when there is a stage with warnings' do - before do - create(:commit_status, pipeline: pipeline, - stage: 'deploy', - name: 'prod:2', - stage_idx: 2, - status: 'failed', - allow_failure: true) + context 'when variables policy is specified' do + let(:config) do + { unit: { script: 'minitest', only: { variables: ['$CI_PIPELINE_SOURCE'] } }, + feature: { script: 'spinach', only: { variables: ['$UNDEFINED'] } } } end - it 'populates stage with correct number of warnings' do - deploy_stage = pipeline.legacy_stages.third + it 'returns stage seeds only when variables expression is truthy' do + seeds = pipeline.stage_seeds - expect(deploy_stage).not_to receive(:statuses) - expect(deploy_stage).to have_warnings + expect(seeds.size).to eq 1 + expect(seeds.dig(0, 0, :name)).to eq 'unit' end end end - describe '#stages_count' do - it 'returns a valid number of stages' do - expect(pipeline.stages_count).to eq(3) - end - end + describe '#seeds_size' do + context 'when refs policy is specified' do + let(:config) do + { production: { stage: 'deploy', script: 'cap prod', only: ['master'] }, + spinach: { stage: 'test', script: 'spinach', only: ['tags'] } } + end + + let(:pipeline) do + build(:ci_pipeline, ref: 'feature', tag: true, config: config) + end - describe '#stages_names' do - it 'returns a valid names of stages' do - expect(pipeline.stages_names).to eq(%w(build test deploy)) + it 'returns real seeds size' do + expect(pipeline.seeds_size).to eq 1 + end end end - end - - describe '#legacy_stage' do - subject { pipeline.legacy_stage('test') } - context 'with status in stage' do + describe 'legacy stages' do before do - create(:commit_status, pipeline: pipeline, stage: 'test') + create(:commit_status, pipeline: pipeline, + stage: 'build', + name: 'linux', + stage_idx: 0, + status: 'success') + + create(:commit_status, pipeline: pipeline, + stage: 'build', + name: 'mac', + stage_idx: 0, + status: 'failed') + + create(:commit_status, pipeline: pipeline, + stage: 'deploy', + name: 'staging', + stage_idx: 2, + status: 'running') + + create(:commit_status, pipeline: pipeline, + stage: 'test', + name: 'rspec', + stage_idx: 1, + status: 'success') + end + + describe '#legacy_stages' do + subject { pipeline.legacy_stages } + + context 'stages list' do + it 'returns ordered list of stages' do + expect(subject.map(&:name)).to eq(%w[build test deploy]) + end + end + + context 'stages with statuses' do + let(:statuses) do + subject.map { |stage| [stage.name, stage.status] } + end + + it 'returns list of stages with correct statuses' do + expect(statuses).to eq([%w(build failed), + %w(test success), + %w(deploy running)]) + end + + context 'when commit status is retried' do + before do + create(:commit_status, pipeline: pipeline, + stage: 'build', + name: 'mac', + stage_idx: 0, + status: 'success') + + pipeline.process! + end + + it 'ignores the previous state' do + expect(statuses).to eq([%w(build success), + %w(test success), + %w(deploy running)]) + end + end + end + + context 'when there is a stage with warnings' do + before do + create(:commit_status, pipeline: pipeline, + stage: 'deploy', + name: 'prod:2', + stage_idx: 2, + status: 'failed', + allow_failure: true) + end + + it 'populates stage with correct number of warnings' do + deploy_stage = pipeline.legacy_stages.third + + expect(deploy_stage).not_to receive(:statuses) + expect(deploy_stage).to have_warnings + end + end + end + + describe '#stages_count' do + it 'returns a valid number of stages' do + expect(pipeline.stages_count).to eq(3) + end end - it { expect(subject).to be_a Ci::LegacyStage } - it { expect(subject.name).to eq 'test' } - it { expect(subject.statuses).not_to be_empty } + describe '#stages_names' do + it 'returns a valid names of stages' do + expect(pipeline.stages_names).to eq(%w(build test deploy)) + end + end end - context 'without status in stage' do - before do - create(:commit_status, pipeline: pipeline, stage: 'build') + describe '#legacy_stage' do + subject { pipeline.legacy_stage('test') } + + context 'with status in stage' do + before do + create(:commit_status, pipeline: pipeline, stage: 'test') + end + + it { expect(subject).to be_a Ci::LegacyStage } + it { expect(subject.name).to eq 'test' } + it { expect(subject.statuses).not_to be_empty } end - it 'return stage object' do - is_expected.to be_nil + context 'without status in stage' do + before do + create(:commit_status, pipeline: pipeline, stage: 'build') + end + + it 'return stage object' do + is_expected.to be_nil + end end end end @@ -589,20 +736,6 @@ describe Ci::Pipeline, :mailer do end end - describe '#has_stage_seeds?' do - context 'when pipeline has stage seeds' do - subject { build(:ci_pipeline_with_one_job) } - - it { is_expected.to have_stage_seeds } - end - - context 'when pipeline does not have stage seeds' do - subject { create(:ci_pipeline_without_jobs) } - - it { is_expected.not_to have_stage_seeds } - end - end - describe '#has_warnings?' do subject { pipeline.has_warnings? } diff --git a/spec/models/clusters/applications/helm_spec.rb b/spec/models/clusters/applications/helm_spec.rb index ba7bad617b4..0eb1e3876e2 100644 --- a/spec/models/clusters/applications/helm_spec.rb +++ b/spec/models/clusters/applications/helm_spec.rb @@ -3,6 +3,18 @@ require 'rails_helper' describe Clusters::Applications::Helm do include_examples 'cluster application core specs', :clusters_applications_helm + describe '.installed' do + subject { described_class.installed } + + let!(:cluster) { create(:clusters_applications_helm, :installed) } + + before do + create(:clusters_applications_helm, :errored) + end + + it { is_expected.to contain_exactly(cluster) } + end + describe '#install_command' do let(:helm) { create(:clusters_applications_helm) } diff --git a/spec/models/clusters/applications/ingress_spec.rb b/spec/models/clusters/applications/ingress_spec.rb index 03f5b88a525..a47a07d908d 100644 --- a/spec/models/clusters/applications/ingress_spec.rb +++ b/spec/models/clusters/applications/ingress_spec.rb @@ -11,6 +11,18 @@ describe Clusters::Applications::Ingress do allow(ClusterWaitForIngressIpAddressWorker).to receive(:perform_async) end + describe '.installed' do + subject { described_class.installed } + + let!(:cluster) { create(:clusters_applications_ingress, :installed) } + + before do + create(:clusters_applications_ingress, :errored) + end + + it { is_expected.to contain_exactly(cluster) } + end + describe '#make_installed!' do before do application.make_installed! diff --git a/spec/models/clusters/applications/prometheus_spec.rb b/spec/models/clusters/applications/prometheus_spec.rb index 2905b58066b..aeca6ee903a 100644 --- a/spec/models/clusters/applications/prometheus_spec.rb +++ b/spec/models/clusters/applications/prometheus_spec.rb @@ -4,6 +4,18 @@ describe Clusters::Applications::Prometheus do include_examples 'cluster application core specs', :clusters_applications_prometheus include_examples 'cluster application status specs', :cluster_application_prometheus + describe '.installed' do + subject { described_class.installed } + + let!(:cluster) { create(:clusters_applications_prometheus, :installed) } + + before do + create(:clusters_applications_prometheus, :errored) + end + + it { is_expected.to contain_exactly(cluster) } + end + describe 'transition to installed' do let(:project) { create(:project) } let(:cluster) { create(:cluster, projects: [project]) } diff --git a/spec/models/clusters/applications/runner_spec.rb b/spec/models/clusters/applications/runner_spec.rb index a574779e39d..64d995a73c1 100644 --- a/spec/models/clusters/applications/runner_spec.rb +++ b/spec/models/clusters/applications/runner_spec.rb @@ -8,6 +8,18 @@ describe Clusters::Applications::Runner do it { is_expected.to belong_to(:runner) } + describe '.installed' do + subject { described_class.installed } + + let!(:cluster) { create(:clusters_applications_runner, :installed) } + + before do + create(:clusters_applications_runner, :errored) + end + + it { is_expected.to contain_exactly(cluster) } + end + describe '#install_command' do let(:kubeclient) { double('kubernetes client') } let(:gitlab_runner) { create(:clusters_applications_runner, runner: ci_runner) } diff --git a/spec/models/clusters/cluster_spec.rb b/spec/models/clusters/cluster_spec.rb index 8f12a0e3085..b942554d67b 100644 --- a/spec/models/clusters/cluster_spec.rb +++ b/spec/models/clusters/cluster_spec.rb @@ -39,6 +39,42 @@ describe Clusters::Cluster do it { is_expected.to contain_exactly(cluster) } end + describe '.user_provided' do + subject { described_class.user_provided } + + let!(:cluster) { create(:cluster, :provided_by_user) } + + before do + create(:cluster, :provided_by_gcp) + end + + it { is_expected.to contain_exactly(cluster) } + end + + describe '.gcp_provided' do + subject { described_class.gcp_provided } + + let!(:cluster) { create(:cluster, :provided_by_gcp) } + + before do + create(:cluster, :provided_by_user) + end + + it { is_expected.to contain_exactly(cluster) } + end + + describe '.gcp_installed' do + subject { described_class.gcp_installed } + + let!(:cluster) { create(:cluster, :provided_by_gcp) } + + before do + create(:cluster, :providing_by_gcp) + end + + it { is_expected.to contain_exactly(cluster) } + end + describe 'validation' do subject { cluster.valid? } diff --git a/spec/models/clusters/platforms/kubernetes_spec.rb b/spec/models/clusters/platforms/kubernetes_spec.rb index 53a4e545ff6..add481b8096 100644 --- a/spec/models/clusters/platforms/kubernetes_spec.rb +++ b/spec/models/clusters/platforms/kubernetes_spec.rb @@ -252,7 +252,7 @@ describe Clusters::Platforms::Kubernetes, :use_clean_rails_memory_store_caching stub_kubeclient_pods(status: 500) end - it { expect { subject }.to raise_error(KubeException) } + it { expect { subject }.to raise_error(Kubeclient::HttpError) } end context 'when kubernetes responds with 404s' do diff --git a/spec/models/commit_spec.rb b/spec/models/commit_spec.rb index 959383ff0b7..4e6b037a720 100644 --- a/spec/models/commit_spec.rb +++ b/spec/models/commit_spec.rb @@ -450,6 +450,11 @@ eos it "returns nil if the path doesn't exists" do expect(commit.uri_type('this/path/doesnt/exist')).to be_nil end + + it 'is nil if the path is nil or empty' do + expect(commit.uri_type(nil)).to be_nil + expect(commit.uri_type("")).to be_nil + end end context 'when Gitaly commit_tree_entry feature is enabled' do diff --git a/spec/models/commit_status_spec.rb b/spec/models/commit_status_spec.rb index b7ed8be69fc..2ed29052dc1 100644 --- a/spec/models/commit_status_spec.rb +++ b/spec/models/commit_status_spec.rb @@ -368,9 +368,7 @@ describe CommitStatus do 'rspec:windows 0 : / 1' => 'rspec:windows', 'rspec:windows 0 : / 1 name' => 'rspec:windows name', '0 1 name ruby' => 'name ruby', - '0 :/ 1 name ruby' => 'name ruby', - 'golang test 1.8' => 'golang test', - '1.9 golang test' => 'golang test' + '0 :/ 1 name ruby' => 'name ruby' } tests.each do |name, group_name| @@ -535,4 +533,36 @@ describe CommitStatus do end end end + + describe '#enqueue' do + let!(:current_time) { Time.new(2018, 4, 5, 14, 0, 0) } + + before do + allow(Time).to receive(:now).and_return(current_time) + end + + shared_examples 'commit status enqueued' do + it 'sets queued_at value when enqueued' do + expect { commit_status.enqueue }.to change { commit_status.reload.queued_at }.from(nil).to(current_time) + end + end + + context 'when initial state is :created' do + let(:commit_status) { create(:commit_status, :created) } + + it_behaves_like 'commit status enqueued' + end + + context 'when initial state is :skipped' do + let(:commit_status) { create(:commit_status, :skipped) } + + it_behaves_like 'commit status enqueued' + end + + context 'when initial state is :manual' do + let(:commit_status) { create(:commit_status, :manual) } + + it_behaves_like 'commit status enqueued' + end + end end diff --git a/spec/models/concerns/awardable_spec.rb b/spec/models/concerns/awardable_spec.rb index 34f923d3f0c..a980cff28fb 100644 --- a/spec/models/concerns/awardable_spec.rb +++ b/spec/models/concerns/awardable_spec.rb @@ -46,6 +46,31 @@ describe Awardable do end end + describe '#user_can_award?' do + let(:user) { create(:user) } + + before do + issue.project.add_guest(user) + end + + it 'does not allow upvoting or downvoting your own issue' do + issue.update!(author: user) + + expect(issue.user_can_award?(user, AwardEmoji::DOWNVOTE_NAME)).to be_falsy + expect(issue.user_can_award?(user, AwardEmoji::UPVOTE_NAME)).to be_falsy + end + + it 'is truthy when the user is allowed to award emoji' do + expect(issue.user_can_award?(user, AwardEmoji::UPVOTE_NAME)).to be_truthy + end + + it 'is falsy when the project is archived' do + issue.project.update!(archived: true) + + expect(issue.user_can_award?(user, AwardEmoji::UPVOTE_NAME)).to be_falsy + end + end + describe "#toggle_award_emoji" do it "adds an emoji if it isn't awarded yet" do expect { issue.toggle_award_emoji("thumbsup", award_emoji.user) }.to change { AwardEmoji.count }.by(1) diff --git a/spec/models/concerns/cache_markdown_field_spec.rb b/spec/models/concerns/cache_markdown_field_spec.rb index 3c7f578975b..b3797c1fb46 100644 --- a/spec/models/concerns/cache_markdown_field_spec.rb +++ b/spec/models/concerns/cache_markdown_field_spec.rb @@ -72,7 +72,7 @@ describe CacheMarkdownField do let(:updated_markdown) { '`Bar`' } let(:updated_html) { '<p dir="auto"><code>Bar</code></p>' } - let(:thing) { ThingWithMarkdownFields.new(foo: markdown, foo_html: html, cached_markdown_version: CacheMarkdownField::CACHE_VERSION) } + let(:thing) { ThingWithMarkdownFields.new(foo: markdown, foo_html: html, cached_markdown_version: CacheMarkdownField::CACHE_COMMONMARK_VERSION) } describe '.attributes' do it 'excludes cache attributes' do @@ -89,17 +89,24 @@ describe CacheMarkdownField do it { expect(thing.foo).to eq(markdown) } it { expect(thing.foo_html).to eq(html) } it { expect(thing.foo_html_changed?).not_to be_truthy } - it { expect(thing.cached_markdown_version).to eq(CacheMarkdownField::CACHE_VERSION) } + it { expect(thing.cached_markdown_version).to eq(CacheMarkdownField::CACHE_COMMONMARK_VERSION) } end context 'a changed markdown field' do - before do - thing.foo = updated_markdown - thing.save + shared_examples 'with cache version' do |cache_version| + let(:thing) { ThingWithMarkdownFields.new(foo: markdown, foo_html: html, cached_markdown_version: cache_version) } + + before do + thing.foo = updated_markdown + thing.save + end + + it { expect(thing.foo_html).to eq(updated_html) } + it { expect(thing.cached_markdown_version).to eq(cache_version) } end - it { expect(thing.foo_html).to eq(updated_html) } - it { expect(thing.cached_markdown_version).to eq(CacheMarkdownField::CACHE_VERSION) } + it_behaves_like 'with cache version', CacheMarkdownField::CACHE_REDCARPET_VERSION + it_behaves_like 'with cache version', CacheMarkdownField::CACHE_COMMONMARK_VERSION end context 'when a markdown field is set repeatedly to an empty string' do @@ -123,15 +130,22 @@ describe CacheMarkdownField do end context 'a non-markdown field changed' do - before do - thing.bar = 'OK' - thing.save + shared_examples 'with cache version' do |cache_version| + let(:thing) { ThingWithMarkdownFields.new(foo: markdown, foo_html: html, cached_markdown_version: cache_version) } + + before do + thing.bar = 'OK' + thing.save + end + + it { expect(thing.bar).to eq('OK') } + it { expect(thing.foo).to eq(markdown) } + it { expect(thing.foo_html).to eq(html) } + it { expect(thing.cached_markdown_version).to eq(cache_version) } end - it { expect(thing.bar).to eq('OK') } - it { expect(thing.foo).to eq(markdown) } - it { expect(thing.foo_html).to eq(html) } - it { expect(thing.cached_markdown_version).to eq(CacheMarkdownField::CACHE_VERSION) } + it_behaves_like 'with cache version', CacheMarkdownField::CACHE_REDCARPET_VERSION + it_behaves_like 'with cache version', CacheMarkdownField::CACHE_COMMONMARK_VERSION end context 'version is out of date' do @@ -142,59 +156,85 @@ describe CacheMarkdownField do end it { expect(thing.foo_html).to eq(updated_html) } - it { expect(thing.cached_markdown_version).to eq(CacheMarkdownField::CACHE_VERSION) } + it { expect(thing.cached_markdown_version).to eq(CacheMarkdownField::CACHE_REDCARPET_VERSION) } end describe '#cached_html_up_to_date?' do - subject { thing.cached_html_up_to_date?(:foo) } + shared_examples 'with cache version' do |cache_version| + let(:thing) { ThingWithMarkdownFields.new(foo: markdown, foo_html: html, cached_markdown_version: cache_version) } - it 'returns false when the version is absent' do - thing.cached_markdown_version = nil + subject { thing.cached_html_up_to_date?(:foo) } - is_expected.to be_falsy - end + it 'returns false when the version is absent' do + thing.cached_markdown_version = nil - it 'returns false when the version is too early' do - thing.cached_markdown_version -= 1 + is_expected.to be_falsy + end - is_expected.to be_falsy - end + it 'returns false when the version is too early' do + thing.cached_markdown_version -= 1 - it 'returns false when the version is too late' do - thing.cached_markdown_version += 1 + is_expected.to be_falsy + end - is_expected.to be_falsy - end + it 'returns false when the version is too late' do + thing.cached_markdown_version += 1 - it 'returns true when the version is just right' do - thing.cached_markdown_version = CacheMarkdownField::CACHE_VERSION + is_expected.to be_falsy + end - is_expected.to be_truthy - end + it 'returns true when the version is just right' do + thing.cached_markdown_version = cache_version - it 'returns false if markdown has been changed but html has not' do - thing.foo = updated_html + is_expected.to be_truthy + end - is_expected.to be_falsy - end + it 'returns false if markdown has been changed but html has not' do + thing.foo = updated_html - it 'returns true if markdown has not been changed but html has' do - thing.foo_html = updated_html + is_expected.to be_falsy + end + + it 'returns true if markdown has not been changed but html has' do + thing.foo_html = updated_html - is_expected.to be_truthy + is_expected.to be_truthy + end + + it 'returns true if markdown and html have both been changed' do + thing.foo = updated_markdown + thing.foo_html = updated_html + + is_expected.to be_truthy + end + + it 'returns false if the markdown field is set but the html is not' do + thing.foo_html = nil + + is_expected.to be_falsy + end end - it 'returns true if markdown and html have both been changed' do - thing.foo = updated_markdown - thing.foo_html = updated_html + it_behaves_like 'with cache version', CacheMarkdownField::CACHE_REDCARPET_VERSION + it_behaves_like 'with cache version', CacheMarkdownField::CACHE_COMMONMARK_VERSION + end + + describe '#latest_cached_markdown_version' do + subject { thing.latest_cached_markdown_version } - is_expected.to be_truthy + it 'returns redcarpet version' do + thing.cached_markdown_version = CacheMarkdownField::CACHE_COMMONMARK_VERSION_START - 1 + is_expected.to eq(CacheMarkdownField::CACHE_REDCARPET_VERSION) end - it 'returns false if the markdown field is set but the html is not' do - thing.foo_html = nil + it 'returns commonmark version' do + thing.cached_markdown_version = CacheMarkdownField::CACHE_COMMONMARK_VERSION_START + 1 + is_expected.to eq(CacheMarkdownField::CACHE_COMMONMARK_VERSION) + end - is_expected.to be_falsy + it 'returns default version when version is nil' do + thing.cached_markdown_version = nil + is_expected.to eq(CacheMarkdownField::CACHE_REDCARPET_VERSION) end end @@ -221,37 +261,44 @@ describe CacheMarkdownField do thing.cached_markdown_version = nil thing.refresh_markdown_cache - expect(thing.cached_markdown_version).to eq(CacheMarkdownField::CACHE_VERSION) + expect(thing.cached_markdown_version).to eq(CacheMarkdownField::CACHE_REDCARPET_VERSION) end end describe '#refresh_markdown_cache!' do - before do - thing.foo = updated_markdown - end + shared_examples 'with cache version' do |cache_version| + let(:thing) { ThingWithMarkdownFields.new(foo: markdown, foo_html: html, cached_markdown_version: cache_version) } - it 'fills all html fields' do - thing.refresh_markdown_cache! + before do + thing.foo = updated_markdown + end - expect(thing.foo_html).to eq(updated_html) - expect(thing.foo_html_changed?).to be_truthy - expect(thing.baz_html_changed?).to be_truthy - end + it 'fills all html fields' do + thing.refresh_markdown_cache! - it 'skips saving if not persisted' do - expect(thing).to receive(:persisted?).and_return(false) - expect(thing).not_to receive(:update_columns) + expect(thing.foo_html).to eq(updated_html) + expect(thing.foo_html_changed?).to be_truthy + expect(thing.baz_html_changed?).to be_truthy + end - thing.refresh_markdown_cache! - end + it 'skips saving if not persisted' do + expect(thing).to receive(:persisted?).and_return(false) + expect(thing).not_to receive(:update_columns) - it 'saves the changes using #update_columns' do - expect(thing).to receive(:persisted?).and_return(true) - expect(thing).to receive(:update_columns) - .with("foo_html" => updated_html, "baz_html" => "", "cached_markdown_version" => CacheMarkdownField::CACHE_VERSION) + thing.refresh_markdown_cache! + end - thing.refresh_markdown_cache! + it 'saves the changes using #update_columns' do + expect(thing).to receive(:persisted?).and_return(true) + expect(thing).to receive(:update_columns) + .with("foo_html" => updated_html, "baz_html" => "", "cached_markdown_version" => cache_version) + + thing.refresh_markdown_cache! + end end + + it_behaves_like 'with cache version', CacheMarkdownField::CACHE_REDCARPET_VERSION + it_behaves_like 'with cache version', CacheMarkdownField::CACHE_COMMONMARK_VERSION end describe '#banzai_render_context' do @@ -299,7 +346,7 @@ describe CacheMarkdownField do expect(thing.foo_html).to eq(updated_html) expect(thing.baz_html).to eq(updated_html) - expect(thing.cached_markdown_version).to eq(CacheMarkdownField::CACHE_VERSION) + expect(thing.cached_markdown_version).to eq(CacheMarkdownField::CACHE_REDCARPET_VERSION) end end @@ -319,7 +366,7 @@ describe CacheMarkdownField do expect(thing.foo_html).to eq(updated_html) expect(thing.baz_html).to eq(updated_html) - expect(thing.cached_markdown_version).to eq(CacheMarkdownField::CACHE_VERSION) + expect(thing.cached_markdown_version).to eq(CacheMarkdownField::CACHE_REDCARPET_VERSION) end end end diff --git a/spec/models/concerns/chronic_duration_attribute_spec.rb b/spec/models/concerns/chronic_duration_attribute_spec.rb new file mode 100644 index 00000000000..8847623f705 --- /dev/null +++ b/spec/models/concerns/chronic_duration_attribute_spec.rb @@ -0,0 +1,129 @@ +require 'spec_helper' + +shared_examples 'ChronicDurationAttribute reader' do + it 'contains dynamically created reader method' do + expect(subject.class).to be_public_method_defined(virtual_field) + end + + it 'outputs chronic duration formatted value' do + subject.send("#{source_field}=", 120) + + expect(subject.send(virtual_field)).to eq('2m') + end + + context 'when value is set to nil' do + it 'outputs nil' do + subject.send("#{source_field}=", nil) + + expect(subject.send(virtual_field)).to be_nil + end + end +end + +shared_examples 'ChronicDurationAttribute writer' do + it 'contains dynamically created writer method' do + expect(subject.class).to be_public_method_defined("#{virtual_field}=") + end + + before do + subject.send("#{virtual_field}=", '10m') + end + + it 'parses chronic duration input' do + expect(subject.send(source_field)).to eq(600) + end + + it 'passes validation' do + expect(subject.valid?).to be_truthy + end + + context 'when negative input is used' do + before do + subject.send("#{source_field}=", 3600) + end + + it "doesn't raise exception" do + expect { subject.send("#{virtual_field}=", '-10m') }.not_to raise_error(ChronicDuration::DurationParseError) + end + + it "doesn't change value" do + expect { subject.send("#{virtual_field}=", '-10m') }.not_to change { subject.send(source_field) } + end + + it "doesn't pass validation" do + subject.send("#{virtual_field}=", '-10m') + + expect(subject.valid?).to be_falsey + expect(subject.errors&.messages).to include(virtual_field => ['is not a correct duration']) + end + end + + context 'when empty input is used' do + before do + subject.send("#{virtual_field}=", '') + end + + it 'writes default value' do + expect(subject.send(source_field)).to eq(default_value) + end + + it 'passes validation' do + expect(subject.valid?).to be_truthy + end + end + + context 'when nil input is used' do + before do + subject.send("#{virtual_field}=", nil) + end + + it 'writes default value' do + expect(subject.send(source_field)).to eq(default_value) + end + + it 'passes validation' do + expect(subject.valid?).to be_truthy + end + + it "doesn't raise exception" do + expect { subject.send("#{virtual_field}=", nil) }.not_to raise_error(NoMethodError) + end + end +end + +describe 'ChronicDurationAttribute' do + context 'when default value is not set' do + let(:source_field) {:maximum_timeout} + let(:virtual_field) {:maximum_timeout_human_readable} + let(:default_value) { nil } + + subject { create(:ci_runner) } + + it_behaves_like 'ChronicDurationAttribute reader' + it_behaves_like 'ChronicDurationAttribute writer' + end + + context 'when default value is set' do + let(:source_field) {:build_timeout} + let(:virtual_field) {:build_timeout_human_readable} + let(:default_value) { 3600 } + + subject { create(:project) } + + it_behaves_like 'ChronicDurationAttribute reader' + it_behaves_like 'ChronicDurationAttribute writer' + end +end + +describe 'ChronicDurationAttribute - reader' do + let(:source_field) {:timeout} + let(:virtual_field) {:timeout_human_readable} + + subject { create(:ci_build).ensure_metadata } + + it "doesn't contain dynamically created writer method" do + expect(subject.class).not_to be_public_method_defined("#{virtual_field}=") + end + + it_behaves_like 'ChronicDurationAttribute reader' +end diff --git a/spec/models/concerns/group_descendant_spec.rb b/spec/models/concerns/group_descendant_spec.rb index c163fb01a81..28352d8c961 100644 --- a/spec/models/concerns/group_descendant_spec.rb +++ b/spec/models/concerns/group_descendant_spec.rb @@ -79,9 +79,24 @@ describe GroupDescendant, :nested_groups do expect(described_class.build_hierarchy(groups)).to eq(expected_hierarchy) end + it 'tracks the exception when a parent was not preloaded' do + expect(Gitlab::Sentry).to receive(:track_exception).and_call_original + + expect { GroupDescendant.build_hierarchy([subsub_group]) }.to raise_error(ArgumentError) + end + + it 'recovers if a parent was not reloaded by querying for the parent' do + expected_hierarchy = { parent => { subgroup => subsub_group } } + + # this does not raise in production, so stubbing it here. + allow(Gitlab::Sentry).to receive(:track_exception) + + expect(GroupDescendant.build_hierarchy([subsub_group])).to eq(expected_hierarchy) + end + it 'raises an error if not all elements were preloaded' do expect { described_class.build_hierarchy([subsub_group]) } - .to raise_error('parent was not preloaded') + .to raise_error(/was not preloaded/) end end end diff --git a/spec/models/concerns/issuable_spec.rb b/spec/models/concerns/issuable_spec.rb index 4b217df2e8f..05693f067e1 100644 --- a/spec/models/concerns/issuable_spec.rb +++ b/spec/models/concerns/issuable_spec.rb @@ -34,7 +34,7 @@ describe Issuable do subject { build(:issue) } before do - allow(subject).to receive(:set_iid).and_return(false) + allow(InternalId).to receive(:generate_next).and_return(nil) end it { is_expected.to validate_presence_of(:project) } @@ -176,7 +176,7 @@ describe Issuable do end end - describe "#sort" do + describe "#sort_by_attribute" do let(:project) { create(:project) } context "by milestone due date" do @@ -193,12 +193,12 @@ describe Issuable do let!(:issue3) { create(:issue, project: project) } it "sorts desc" do - issues = project.issues.sort('milestone_due_desc') + issues = project.issues.sort_by_attribute('milestone_due_desc') expect(issues).to match_array([issue2, issue1, issue, issue3]) end it "sorts asc" do - issues = project.issues.sort('milestone_due_asc') + issues = project.issues.sort_by_attribute('milestone_due_asc') expect(issues).to match_array([issue1, issue2, issue, issue3]) end end @@ -210,7 +210,7 @@ describe Issuable do it 'has no duplicates across pages' do sorted_issue_ids = 1.upto(10).map do |i| - project.issues.sort('milestone_due_desc').page(i).per(1).first.id + project.issues.sort_by_attribute('milestone_due_desc').page(i).per(1).first.id end expect(sorted_issue_ids).to eq(sorted_issue_ids.uniq) diff --git a/spec/models/concerns/uniquify_spec.rb b/spec/models/concerns/uniquify_spec.rb index 914730718e7..6cd2de6dcce 100644 --- a/spec/models/concerns/uniquify_spec.rb +++ b/spec/models/concerns/uniquify_spec.rb @@ -22,6 +22,15 @@ describe Uniquify do expect(result).to eq('test_string2') end + it 'allows to pass an initial value for the counter' do + start_counting_from = 2 + uniquify = described_class.new(start_counting_from) + + result = uniquify.string('test_string') { |s| s == 'test_string' } + + expect(result).to eq('test_string2') + end + it 'allows passing in a base function that defines the location of the counter' do result = uniquify.string(-> (counter) { "test_#{counter}_string" }) do |s| s == 'test__string' diff --git a/spec/models/deploy_key_spec.rb b/spec/models/deploy_key_spec.rb index 3d7283e2164..41440c6d288 100644 --- a/spec/models/deploy_key_spec.rb +++ b/spec/models/deploy_key_spec.rb @@ -17,4 +17,25 @@ describe DeployKey, :mailer do should_not_email(user) end end + + describe '#user' do + let(:deploy_key) { create(:deploy_key) } + let(:user) { create(:user) } + + context 'when user is set' do + before do + deploy_key.user = user + end + + it 'returns the user' do + expect(deploy_key.user).to be(user) + end + end + + context 'when user is not set' do + it 'returns the ghost user' do + expect(deploy_key.user).to eq(User.ghost) + end + end + end end diff --git a/spec/models/deploy_token_spec.rb b/spec/models/deploy_token_spec.rb new file mode 100644 index 00000000000..780b200e837 --- /dev/null +++ b/spec/models/deploy_token_spec.rb @@ -0,0 +1,145 @@ +require 'spec_helper' + +describe DeployToken do + subject(:deploy_token) { create(:deploy_token) } + + it { is_expected.to have_many :project_deploy_tokens } + it { is_expected.to have_many(:projects).through(:project_deploy_tokens) } + + describe '#ensure_token' do + it 'should ensure a token' do + deploy_token.token = nil + deploy_token.save + + expect(deploy_token.token).not_to be_empty + end + end + + describe '#ensure_at_least_one_scope' do + context 'with at least one scope' do + it 'should be valid' do + is_expected.to be_valid + end + end + + context 'with no scopes' do + it 'should be invalid' do + deploy_token = build(:deploy_token, read_repository: false, read_registry: false) + + expect(deploy_token).not_to be_valid + expect(deploy_token.errors[:base].first).to eq("Scopes can't be blank") + end + end + end + + describe '#scopes' do + context 'with all the scopes' do + it 'should return scopes assigned to DeployToken' do + expect(deploy_token.scopes).to eq([:read_repository, :read_registry]) + end + end + + context 'with only one scope' do + it 'should return scopes assigned to DeployToken' do + deploy_token = create(:deploy_token, read_registry: false) + expect(deploy_token.scopes).to eq([:read_repository]) + end + end + end + + describe '#revoke!' do + it 'should update revoke attribute' do + deploy_token.revoke! + expect(deploy_token.revoked?).to be_truthy + end + end + + describe "#active?" do + context "when it has been revoked" do + it 'should return false' do + deploy_token.revoke! + expect(deploy_token.active?).to be_falsy + end + end + + context "when it hasn't been revoked" do + it 'should return true' do + expect(deploy_token.active?).to be_truthy + end + end + end + + describe '#username' do + it 'returns a harcoded username' do + expect(deploy_token.username).to eq("gitlab+deploy-token-#{deploy_token.id}") + end + end + + describe '#has_access_to?' do + let(:project) { create(:project) } + + subject { deploy_token.has_access_to?(project) } + + context 'when deploy token is active and related to project' do + let(:deploy_token) { create(:deploy_token, projects: [project]) } + + it { is_expected.to be_truthy } + end + + context 'when deploy token is active but not related to project' do + let(:deploy_token) { create(:deploy_token) } + + it { is_expected.to be_falsy } + end + + context 'when deploy token is revoked and related to project' do + let(:deploy_token) { create(:deploy_token, :revoked, projects: [project]) } + + it { is_expected.to be_falsy } + end + + context 'when deploy token is revoked and not related to the project' do + let(:deploy_token) { create(:deploy_token, :revoked) } + + it { is_expected.to be_falsy } + end + end + + describe '#expires_at' do + context 'when using Forever.date' do + let(:deploy_token) { create(:deploy_token, expires_at: nil) } + + it 'should return nil' do + expect(deploy_token.expires_at).to be_nil + end + end + + context 'when using a personalized date' do + let(:expires_at) { Date.today + 5.months } + let(:deploy_token) { create(:deploy_token, expires_at: expires_at) } + + it 'should return the personalized date' do + expect(deploy_token.expires_at).to eq(expires_at) + end + end + end + + describe '#expires_at=' do + context 'when passing nil' do + let(:deploy_token) { create(:deploy_token, expires_at: nil) } + + it 'should assign Forever.date' do + expect(deploy_token.read_attribute(:expires_at)).to eq(Forever.date) + end + end + + context 'when passign a value' do + let(:expires_at) { Date.today + 5.months } + let(:deploy_token) { create(:deploy_token, expires_at: expires_at) } + + it 'should respect the value' do + expect(deploy_token.read_attribute(:expires_at)).to eq(expires_at) + end + end + end +end diff --git a/spec/models/environment_spec.rb b/spec/models/environment_spec.rb index 412eca4a56b..56161bfcc28 100644 --- a/spec/models/environment_spec.rb +++ b/spec/models/environment_spec.rb @@ -368,6 +368,32 @@ describe Environment do end end + describe '#deployment_platform' do + context 'when there is a deployment platform for environment' do + let!(:cluster) do + create(:cluster, :provided_by_gcp, + environment_scope: '*', projects: [project]) + end + + it 'finds a deployment platform' do + expect(environment.deployment_platform).to eq cluster.platform + end + end + + context 'when there is no deployment platform for environment' do + it 'returns nil' do + expect(environment.deployment_platform).to be_nil + end + end + + it 'checks deployment platforms associated with a project' do + expect(project).to receive(:deployment_platform) + .with(environment: environment.name) + + environment.deployment_platform + end + end + describe '#terminals' do subject { environment.terminals } diff --git a/spec/models/group_spec.rb b/spec/models/group_spec.rb index abfc0896a41..d620943693c 100644 --- a/spec/models/group_spec.rb +++ b/spec/models/group_spec.rb @@ -240,7 +240,7 @@ describe Group do it "is false if avatar is html page" do group.update_attribute(:avatar, 'uploads/avatar.html') - expect(group.avatar_type).to eq(["only images allowed"]) + expect(group.avatar_type).to eq(["file format is not supported. Please try one of the following supported formats: png, jpg, jpeg, gif, bmp, tiff"]) end end diff --git a/spec/models/internal_id_spec.rb b/spec/models/internal_id_spec.rb new file mode 100644 index 00000000000..8ef91e8fab5 --- /dev/null +++ b/spec/models/internal_id_spec.rb @@ -0,0 +1,139 @@ +require 'spec_helper' + +describe InternalId do + let(:project) { create(:project) } + let(:usage) { :issues } + let(:issue) { build(:issue, project: project) } + let(:scope) { { project: project } } + let(:init) { ->(s) { s.project.issues.maximum(:iid) } } + + context 'validations' do + it { is_expected.to validate_presence_of(:usage) } + end + + describe '.generate_next' do + subject { described_class.generate_next(issue, scope, usage, init) } + + context 'in the absence of a record' do + it 'creates a record if not yet present' do + expect { subject }.to change { described_class.count }.from(0).to(1) + end + + it 'stores record attributes' do + subject + + described_class.first.tap do |record| + expect(record.project).to eq(project) + expect(record.usage).to eq(usage.to_s) + end + end + + context 'with existing issues' do + before do + rand(1..10).times { create(:issue, project: project) } + described_class.delete_all + end + + it 'calculates last_value values automatically' do + expect(subject).to eq(project.issues.size + 1) + end + end + + context 'with an InternalId record present and existing issues with a higher internal id' do + # This can happen if the old NonatomicInternalId is still in use + before do + issues = Array.new(rand(1..10)).map { create(:issue, project: project) } + + issue = issues.last + issue.iid = issues.map { |i| i.iid }.max + 1 + issue.save + end + + let(:maximum_iid) { project.issues.map { |i| i.iid }.max } + + it 'updates last_value to the maximum internal id present' do + subject + + expect(described_class.find_by(project: project, usage: described_class.usages[usage.to_s]).last_value).to eq(maximum_iid + 1) + end + + it 'returns next internal id correctly' do + expect(subject).to eq(maximum_iid + 1) + end + end + + context 'with concurrent inserts on table' do + it 'looks up the record if it was created concurrently' do + args = { **scope, usage: described_class.usages[usage.to_s] } + record = double + expect(described_class).to receive(:find_by).with(args).and_return(nil) # first call, record not present + expect(described_class).to receive(:find_by).with(args).and_return(record) # second call, record was created by another process + expect(described_class).to receive(:create!).and_raise(ActiveRecord::RecordNotUnique, 'record not unique') + expect(record).to receive(:increment_and_save!) + + subject + end + end + end + + it 'generates a strictly monotone, gapless sequence' do + seq = (0..rand(100)).map do + described_class.generate_next(issue, scope, usage, init) + end + normalized = seq.map { |i| i - seq.min } + + expect(normalized).to eq((0..seq.size - 1).to_a) + end + + context 'with an insufficient schema version' do + before do + described_class.reset_column_information + expect(ActiveRecord::Migrator).to receive(:current_version).and_return(InternalId::REQUIRED_SCHEMA_VERSION - 1) + end + + let(:init) { double('block') } + + it 'calculates next internal ids on the fly' do + val = rand(1..100) + + expect(init).to receive(:call).with(issue).and_return(val) + expect(subject).to eq(val + 1) + end + end + end + + describe '#increment_and_save!' do + let(:id) { create(:internal_id) } + let(:maximum_iid) { nil } + subject { id.increment_and_save!(maximum_iid) } + + it 'returns incremented iid' do + value = id.last_value + + expect(subject).to eq(value + 1) + end + + it 'saves the record' do + subject + + expect(id.changed?).to be_falsey + end + + context 'with last_value=nil' do + let(:id) { build(:internal_id, last_value: nil) } + + it 'returns 1' do + expect(subject).to eq(1) + end + end + + context 'with maximum_iid given' do + let(:id) { create(:internal_id, last_value: 1) } + let(:maximum_iid) { id.last_value + 10 } + + it 'returns maximum_iid instead' do + expect(subject).to eq(12) + end + end + end +end diff --git a/spec/models/issue_spec.rb b/spec/models/issue_spec.rb index feed7968f09..128acf83686 100644 --- a/spec/models/issue_spec.rb +++ b/spec/models/issue_spec.rb @@ -9,11 +9,17 @@ describe Issue do describe 'modules' do subject { described_class } - it { is_expected.to include_module(InternalId) } it { is_expected.to include_module(Issuable) } it { is_expected.to include_module(Referable) } it { is_expected.to include_module(Sortable) } it { is_expected.to include_module(Taskable) } + + it_behaves_like 'AtomicInternalId' do + let(:internal_id_attribute) { :iid } + let(:instance) { build(:issue) } + let(:scope_attrs) { { project: instance.project } } + let(:usage) { :issues } + end end subject { create(:issue) } @@ -370,6 +376,48 @@ describe Issue do end end + describe '#suggested_branch_name' do + let(:repository) { double } + + subject { build(:issue) } + + before do + allow(subject.project).to receive(:repository).and_return(repository) + end + + context '#to_branch_name does not exists' do + before do + allow(repository).to receive(:branch_exists?).and_return(false) + end + + it 'returns #to_branch_name' do + expect(subject.suggested_branch_name).to eq(subject.to_branch_name) + end + end + + context '#to_branch_name exists not ending with -index' do + before do + allow(repository).to receive(:branch_exists?).and_return(true) + allow(repository).to receive(:branch_exists?).with(/#{subject.to_branch_name}-\d/).and_return(false) + end + + it 'returns #to_branch_name ending with -2' do + expect(subject.suggested_branch_name).to eq("#{subject.to_branch_name}-2") + end + end + + context '#to_branch_name exists ending with -index' do + before do + allow(repository).to receive(:branch_exists?).and_return(true) + allow(repository).to receive(:branch_exists?).with("#{subject.to_branch_name}-3").and_return(false) + end + + it 'returns #to_branch_name ending with max index + 1' do + expect(subject.suggested_branch_name).to eq("#{subject.to_branch_name}-3") + end + end + end + describe '#has_related_branch?' do let(:issue) { create(:issue, title: "Blue Bell Knoll") } subject { issue.has_related_branch? } @@ -419,6 +467,27 @@ describe Issue do end end + describe '#can_be_worked_on?' do + let(:project) { build(:project) } + subject { build(:issue, :opened, project: project) } + + context 'is closed' do + subject { build(:issue, :closed) } + + it { is_expected.not_to be_can_be_worked_on } + end + + context 'project is forked' do + before do + allow(project).to receive(:forked?).and_return(true) + end + + it { is_expected.not_to be_can_be_worked_on } + end + + it { is_expected.to be_can_be_worked_on } + end + describe '#participants' do context 'using a public project' do let(:project) { create(:project, :public) } diff --git a/spec/models/lfs_object_spec.rb b/spec/models/lfs_object_spec.rb new file mode 100644 index 00000000000..ba06ff42d87 --- /dev/null +++ b/spec/models/lfs_object_spec.rb @@ -0,0 +1,124 @@ +require 'spec_helper' + +describe LfsObject do + describe '#local_store?' do + it 'returns true when file_store is nil' do + subject.file_store = nil + + expect(subject.local_store?).to eq true + end + + it 'returns true when file_store is equal to LfsObjectUploader::Store::LOCAL' do + subject.file_store = LfsObjectUploader::Store::LOCAL + + expect(subject.local_store?).to eq true + end + + it 'returns false whe file_store is equal to LfsObjectUploader::Store::REMOTE' do + subject.file_store = LfsObjectUploader::Store::REMOTE + + expect(subject.local_store?).to eq false + end + end + + describe '#schedule_background_upload' do + before do + stub_lfs_setting(enabled: true) + end + + subject { create(:lfs_object, :with_file) } + + context 'when object storage is disabled' do + before do + stub_lfs_object_storage(enabled: false) + end + + it 'does not schedule the migration' do + expect(ObjectStorage::BackgroundMoveWorker).not_to receive(:perform_async) + + subject + end + end + + context 'when object storage is enabled' do + context 'when background upload is enabled' do + context 'when is licensed' do + before do + stub_lfs_object_storage(background_upload: true) + end + + it 'schedules the model for migration' do + expect(ObjectStorage::BackgroundMoveWorker) + .to receive(:perform_async) + .with('LfsObjectUploader', described_class.name, :file, kind_of(Numeric)) + .once + + subject + end + + it 'schedules the model for migration once' do + expect(ObjectStorage::BackgroundMoveWorker) + .to receive(:perform_async) + .with('LfsObjectUploader', described_class.name, :file, kind_of(Numeric)) + .once + + lfs_object = create(:lfs_object) + lfs_object.file = fixture_file_upload(Rails.root + "spec/fixtures/dk.png", "`/png") + lfs_object.save! + end + end + end + + context 'when background upload is disabled' do + before do + stub_lfs_object_storage(background_upload: false) + end + + it 'schedules the model for migration' do + expect(ObjectStorage::BackgroundMoveWorker).not_to receive(:perform_async) + + subject + end + end + end + + describe 'file is being stored' do + let(:lfs_object) { create(:lfs_object, :with_file) } + + context 'when object has nil store' do + before do + lfs_object.update_column(:file_store, nil) + lfs_object.reload + end + + it 'is stored locally' do + expect(lfs_object.file_store).to be(nil) + expect(lfs_object.file).to be_file_storage + expect(lfs_object.file.object_store).to eq(ObjectStorage::Store::LOCAL) + end + end + + context 'when existing object has local store' do + it 'is stored locally' do + expect(lfs_object.file_store).to be(ObjectStorage::Store::LOCAL) + expect(lfs_object.file).to be_file_storage + expect(lfs_object.file.object_store).to eq(ObjectStorage::Store::LOCAL) + end + end + + context 'when direct upload is enabled' do + before do + stub_lfs_object_storage(direct_upload: true) + end + + context 'when file is stored' do + it 'is stored remotely' do + expect(lfs_object.file_store).to eq(ObjectStorage::Store::REMOTE) + expect(lfs_object.file).not_to be_file_storage + expect(lfs_object.file.object_store).to eq(ObjectStorage::Store::REMOTE) + end + end + end + end + end +end diff --git a/spec/models/merge_request_diff_commit_spec.rb b/spec/models/merge_request_diff_commit_spec.rb index 7709cf43200..8c01a7ac18f 100644 --- a/spec/models/merge_request_diff_commit_spec.rb +++ b/spec/models/merge_request_diff_commit_spec.rb @@ -36,7 +36,7 @@ describe MergeRequestDiffCommit do "committer_email": "dmitriy.zaporozhets@gmail.com", "merge_request_diff_id": merge_request_diff_id, "relative_order": 0, - "sha": sha_attribute.type_cast_for_database('5937ac0a7beb003549fc5fd26fc247adbce4a52e') + "sha": sha_attribute.serialize("5937ac0a7beb003549fc5fd26fc247adbce4a52e") }, { "message": "Change some files\n\nSigned-off-by: Dmitriy Zaporozhets \u003cdmitriy.zaporozhets@gmail.com\u003e\n", @@ -48,7 +48,7 @@ describe MergeRequestDiffCommit do "committer_email": "dmitriy.zaporozhets@gmail.com", "merge_request_diff_id": merge_request_diff_id, "relative_order": 1, - "sha": sha_attribute.type_cast_for_database('570e7b2abdd848b95f2f578043fc23bd6f6fd24d') + "sha": sha_attribute.serialize("570e7b2abdd848b95f2f578043fc23bd6f6fd24d") } ] end @@ -79,7 +79,7 @@ describe MergeRequestDiffCommit do "committer_email": "alejorro70@gmail.com", "merge_request_diff_id": merge_request_diff_id, "relative_order": 0, - "sha": sha_attribute.type_cast_for_database('ba3343bc4fa403a8dfbfcab7fc1a8c29ee34bd69') + "sha": sha_attribute.serialize("ba3343bc4fa403a8dfbfcab7fc1a8c29ee34bd69") }] end diff --git a/spec/models/merge_request_spec.rb b/spec/models/merge_request_spec.rb index 4e783acbd8b..f73f44ca0ad 100644 --- a/spec/models/merge_request_spec.rb +++ b/spec/models/merge_request_spec.rb @@ -17,7 +17,7 @@ describe MergeRequest do describe 'modules' do subject { described_class } - it { is_expected.to include_module(InternalId) } + it { is_expected.to include_module(NonatomicInternalId) } it { is_expected.to include_module(Issuable) } it { is_expected.to include_module(Referable) } it { is_expected.to include_module(Sortable) } @@ -1961,6 +1961,17 @@ describe MergeRequest do expect(subject.merge_request_diff_for(merge_request_diff3.head_commit_sha)).to eq(merge_request_diff3) end end + + it 'runs a single query on the initial call, and none afterwards' do + expect { subject.merge_request_diff_for(merge_request_diff1.diff_refs) } + .not_to exceed_query_limit(1) + + expect { subject.merge_request_diff_for(merge_request_diff2.diff_refs) } + .not_to exceed_query_limit(0) + + expect { subject.merge_request_diff_for(merge_request_diff3.head_commit_sha) } + .not_to exceed_query_limit(0) + end end describe '#version_params_for' do diff --git a/spec/models/milestone_spec.rb b/spec/models/milestone_spec.rb index 47f4a792e5c..c7460981a32 100644 --- a/spec/models/milestone_spec.rb +++ b/spec/models/milestone_spec.rb @@ -96,7 +96,9 @@ describe Milestone do allow(milestone).to receive(:due_date).and_return(Date.today.prev_year) end - it { expect(milestone.expired?).to be_truthy } + it 'returns true when due_date is in the past' do + expect(milestone.expired?).to be_truthy + end end context "not expired" do @@ -104,17 +106,19 @@ describe Milestone do allow(milestone).to receive(:due_date).and_return(Date.today.next_year) end - it { expect(milestone.expired?).to be_falsey } + it 'returns false when due_date is in the future' do + expect(milestone.expired?).to be_falsey + end end end describe '#upcoming?' do - it 'returns true' do + it 'returns true when start_date is in the future' do milestone = build(:milestone, start_date: Time.now + 1.month) expect(milestone.upcoming?).to be_truthy end - it 'returns false' do + it 'returns false when start_date is in the past' do milestone = build(:milestone, start_date: Date.today.prev_year) expect(milestone.upcoming?).to be_falsey end diff --git a/spec/models/namespace_spec.rb b/spec/models/namespace_spec.rb index ee142718f7e..62e95a622eb 100644 --- a/spec/models/namespace_spec.rb +++ b/spec/models/namespace_spec.rb @@ -305,7 +305,7 @@ describe Namespace do end describe '#rm_dir', 'callback' do - let(:repository_storage_path) { Gitlab.config.repositories.storages.default['path'] } + let(:repository_storage_path) { Gitlab.config.repositories.storages.default.legacy_disk_path } let(:path_in_dir) { File.join(repository_storage_path, namespace.full_path) } let(:deleted_path) { namespace.full_path.gsub(namespace.path, "#{namespace.full_path}+#{namespace.id}+deleted") } let(:deleted_path_in_dir) { File.join(repository_storage_path, deleted_path) } diff --git a/spec/models/note_spec.rb b/spec/models/note_spec.rb index c853f707e6d..6a6c71e6c82 100644 --- a/spec/models/note_spec.rb +++ b/spec/models/note_spec.rb @@ -91,6 +91,23 @@ describe Note do it "keeps the commit around" do expect(note.project.repository.kept_around?(commit.id)).to be_truthy end + + it 'does not generate N+1 queries for participants', :request_store do + def retrieve_participants + commit.notes_with_associations.map(&:participants).to_a + end + + # Project authorization checks are cached, establish a baseline + retrieve_participants + + control_count = ActiveRecord::QueryRecorder.new do + retrieve_participants + end + + create(:note_on_commit, project: note.project, note: 'another note', noteable_id: commit.id) + + expect { retrieve_participants }.not_to exceed_query_limit(control_count) + end end describe 'authorization' do @@ -191,6 +208,21 @@ describe Note do end end + describe "confidential?" do + it "delegates to noteable" do + issue_note = build(:note, :on_issue) + confidential_note = build(:note, noteable: create(:issue, confidential: true)) + + expect(issue_note.confidential?).to be_falsy + expect(confidential_note.confidential?).to be_truthy + end + + it "is falsey when noteable can't be confidential" do + commit_note = build(:note_on_commit) + expect(commit_note.confidential?).to be_falsy + end + end + describe "cross_reference_not_visible_for?" do let(:private_user) { create(:user) } let(:private_project) { create(:project, namespace: private_user.namespace) { |p| p.add_master(private_user) } } diff --git a/spec/models/pages_domain_spec.rb b/spec/models/pages_domain_spec.rb index 95713d8b85b..4b85c5e8720 100644 --- a/spec/models/pages_domain_spec.rb +++ b/spec/models/pages_domain_spec.rb @@ -18,24 +18,63 @@ describe PagesDomain do it { is_expected.to validate_uniqueness_of(:domain).case_insensitive } end - { - 'my.domain.com' => true, - '123.456.789' => true, - '0x12345.com' => true, - '0123123' => true, - '_foo.com' => false, - 'reserved.com' => false, - 'a.reserved.com' => false, - nil => false - }.each do |value, validity| - context "domain #{value.inspect} validity" do - before do - allow(Settings.pages).to receive(:host).and_return('reserved.com') + describe "hostname" do + { + 'my.domain.com' => true, + '123.456.789' => true, + '0x12345.com' => true, + '0123123' => true, + '_foo.com' => false, + 'reserved.com' => false, + 'a.reserved.com' => false, + nil => false + }.each do |value, validity| + context "domain #{value.inspect} validity" do + before do + allow(Settings.pages).to receive(:host).and_return('reserved.com') + end + + let(:domain) { value } + + it { expect(pages_domain.valid?).to eq(validity) } + end + end + end + + describe "HTTPS-only" do + using RSpec::Parameterized::TableSyntax + + let(:domain) { 'my.domain.com' } + + let(:project) do + instance_double(Project, pages_https_only?: pages_https_only) + end + + let(:pages_domain) do + build(:pages_domain, certificate: certificate, key: key).tap do |pd| + allow(pd).to receive(:project).and_return(project) + pd.valid? end + end - let(:domain) { value } + where(:pages_https_only, :certificate, :key, :errors_on) do + attributes = attributes_for(:pages_domain) + cert, key = attributes.fetch_values(:certificate, :key) + + true | nil | nil | %i(certificate key) + true | cert | nil | %i(key) + true | nil | key | %i(certificate key) + true | cert | key | [] + false | nil | nil | [] + false | cert | nil | %i(key) + false | nil | key | %i(key) + false | cert | key | [] + end - it { expect(pages_domain.valid?).to eq(validity) } + with_them do + it "is adds the expected errors" do + expect(pages_domain.errors.keys).to eq errors_on + end end end end @@ -43,26 +82,26 @@ describe PagesDomain do describe 'validate certificate' do subject { domain } - context 'when only certificate is specified' do - let(:domain) { build(:pages_domain, :with_certificate) } + context 'with matching key' do + let(:domain) { build(:pages_domain) } - it { is_expected.not_to be_valid } + it { is_expected.to be_valid } end - context 'when only key is specified' do - let(:domain) { build(:pages_domain, :with_key) } + context 'when no certificate is specified' do + let(:domain) { build(:pages_domain, :without_certificate) } it { is_expected.not_to be_valid } end - context 'with matching key' do - let(:domain) { build(:pages_domain, :with_certificate, :with_key) } + context 'when no key is specified' do + let(:domain) { build(:pages_domain, :without_key) } - it { is_expected.to be_valid } + it { is_expected.not_to be_valid } end context 'for not matching key' do - let(:domain) { build(:pages_domain, :with_missing_chain, :with_key) } + let(:domain) { build(:pages_domain, :with_missing_chain) } it { is_expected.not_to be_valid } end @@ -103,30 +142,26 @@ describe PagesDomain do describe '#url' do subject { domain.url } - context 'without the certificate' do - let(:domain) { build(:pages_domain, certificate: '') } + let(:domain) { build(:pages_domain) } - it { is_expected.to eq("http://#{domain.domain}") } - end + it { is_expected.to eq("https://#{domain.domain}") } - context 'with a certificate' do - let(:domain) { build(:pages_domain, :with_certificate) } + context 'without the certificate' do + let(:domain) { build(:pages_domain, :without_certificate) } - it { is_expected.to eq("https://#{domain.domain}") } + it { is_expected.to eq("http://#{domain.domain}") } end end describe '#has_matching_key?' do subject { domain.has_matching_key? } - context 'for matching key' do - let(:domain) { build(:pages_domain, :with_certificate, :with_key) } + let(:domain) { build(:pages_domain) } - it { is_expected.to be_truthy } - end + it { is_expected.to be_truthy } context 'for invalid key' do - let(:domain) { build(:pages_domain, :with_missing_chain, :with_key) } + let(:domain) { build(:pages_domain, :with_missing_chain) } it { is_expected.to be_falsey } end @@ -136,7 +171,7 @@ describe PagesDomain do subject { domain.has_intermediates? } context 'for self signed' do - let(:domain) { build(:pages_domain, :with_certificate) } + let(:domain) { build(:pages_domain) } it { is_expected.to be_truthy } end @@ -162,7 +197,7 @@ describe PagesDomain do subject { domain.expired? } context 'for valid' do - let(:domain) { build(:pages_domain, :with_certificate) } + let(:domain) { build(:pages_domain) } it { is_expected.to be_falsey } end @@ -175,7 +210,7 @@ describe PagesDomain do end describe '#subject' do - let(:domain) { build(:pages_domain, :with_certificate) } + let(:domain) { build(:pages_domain) } subject { domain.subject } @@ -183,7 +218,7 @@ describe PagesDomain do end describe '#certificate_text' do - let(:domain) { build(:pages_domain, :with_certificate) } + let(:domain) { build(:pages_domain) } subject { domain.certificate_text } @@ -191,6 +226,18 @@ describe PagesDomain do it { is_expected.not_to be_empty } end + describe "#https?" do + context "when a certificate is present" do + subject { build(:pages_domain) } + it { is_expected.to be_https } + end + + context "when no certificate is present" do + subject { build(:pages_domain, :without_certificate) } + it { is_expected.not_to be_https } + end + end + describe '#update_daemon' do it 'runs when the domain is created' do domain = build(:pages_domain) @@ -267,29 +314,30 @@ describe PagesDomain do end context 'TLS configuration' do - set(:domain_with_tls) { create(:pages_domain, :with_key, :with_certificate) } + set(:domain_without_tls) { create(:pages_domain, :without_certificate, :without_key) } + set(:domain) { create(:pages_domain) } - let(:cert1) { domain_with_tls.certificate } + let(:cert1) { domain.certificate } let(:cert2) { cert1 + ' ' } - let(:key1) { domain_with_tls.key } + let(:key1) { domain.key } let(:key2) { key1 + ' ' } it 'updates when added' do - expect(domain).to receive(:update_daemon) + expect(domain_without_tls).to receive(:update_daemon) - domain.update!(key: key1, certificate: cert1) + domain_without_tls.update!(key: key1, certificate: cert1) end it 'updates when changed' do - expect(domain_with_tls).to receive(:update_daemon) + expect(domain).to receive(:update_daemon) - domain_with_tls.update!(key: key2, certificate: cert2) + domain.update!(key: key2, certificate: cert2) end it 'updates when removed' do - expect(domain_with_tls).to receive(:update_daemon) + expect(domain).to receive(:update_daemon) - domain_with_tls.update!(key: nil, certificate: nil) + domain.update!(key: nil, certificate: nil) end end end diff --git a/spec/models/project_deploy_token_spec.rb b/spec/models/project_deploy_token_spec.rb new file mode 100644 index 00000000000..9e2e40c2e8f --- /dev/null +++ b/spec/models/project_deploy_token_spec.rb @@ -0,0 +1,14 @@ +require 'rails_helper' + +RSpec.describe ProjectDeployToken, type: :model do + let(:project) { create(:project) } + let(:deploy_token) { create(:deploy_token) } + subject(:project_deploy_token) { create(:project_deploy_token, project: project, deploy_token: deploy_token) } + + it { is_expected.to belong_to :project } + it { is_expected.to belong_to :deploy_token } + + it { is_expected.to validate_presence_of :deploy_token } + it { is_expected.to validate_presence_of :project } + it { is_expected.to validate_uniqueness_of(:deploy_token_id).scoped_to(:project_id) } +end diff --git a/spec/models/project_services/hipchat_service_spec.rb b/spec/models/project_services/hipchat_service_spec.rb index 3e2a166cdd6..0cd712e2f40 100644 --- a/spec/models/project_services/hipchat_service_spec.rb +++ b/spec/models/project_services/hipchat_service_spec.rb @@ -253,6 +253,21 @@ describe HipchatService do "<b>#{title}</b>" \ "<pre>issue <strong>note</strong></pre>") end + + context 'with confidential issue' do + before do + issue.update!(confidential: true) + end + + it 'calls Hipchat API with issue comment' do + data = Gitlab::DataBuilder::Note.build(issue_note, user) + hipchat.execute(data) + + message = hipchat.send(:create_message, data) + + expect(message).to include("<pre>issue <strong>note</strong></pre>") + end + end end context 'when snippet comment event triggered' do diff --git a/spec/models/project_services/kubernetes_service_spec.rb b/spec/models/project_services/kubernetes_service_spec.rb index 622d8844a72..3be023a48c1 100644 --- a/spec/models/project_services/kubernetes_service_spec.rb +++ b/spec/models/project_services/kubernetes_service_spec.rb @@ -370,7 +370,7 @@ describe KubernetesService, :use_clean_rails_memory_store_caching do stub_kubeclient_pods(status: 500) end - it { expect { subject }.to raise_error(KubeException) } + it { expect { subject }.to raise_error(Kubeclient::HttpError) } end context 'when kubernetes responds with 404s' do diff --git a/spec/models/project_services/mattermost_slash_commands_service_spec.rb b/spec/models/project_services/mattermost_slash_commands_service_spec.rb index a5bdf9a9337..05d33cd3874 100644 --- a/spec/models/project_services/mattermost_slash_commands_service_spec.rb +++ b/spec/models/project_services/mattermost_slash_commands_service_spec.rb @@ -9,10 +9,11 @@ describe MattermostSlashCommandsService do let(:user) { create(:user) } before do - Mattermost::Session.base_uri("http://mattermost.example.com") + session = Mattermost::Session.new(nil) + session.base_uri = 'http://mattermost.example.com' allow_any_instance_of(Mattermost::Client).to receive(:with_session) - .and_yield(Mattermost::Session.new(nil)) + .and_yield(session) end describe '#configure' do diff --git a/spec/models/project_spec.rb b/spec/models/project_spec.rb index 4cf8d861595..2675c2f52c1 100644 --- a/spec/models/project_spec.rb +++ b/spec/models/project_spec.rb @@ -84,6 +84,8 @@ describe Project do it { is_expected.to have_many(:custom_attributes).class_name('ProjectCustomAttribute') } it { is_expected.to have_many(:project_badges).class_name('ProjectBadge') } it { is_expected.to have_many(:lfs_file_locks) } + it { is_expected.to have_many(:project_deploy_tokens) } + it { is_expected.to have_many(:deploy_tokens).through(:project_deploy_tokens) } context 'after initialized' do it "has a project_feature" do @@ -224,14 +226,14 @@ describe Project do project2 = build(:project, import_url: 'http://localhost:9000/t.git') expect(project2).to be_invalid - expect(project2.errors[:import_url]).to include('imports are not allowed from that URL') + expect(project2.errors[:import_url].first).to include('Requests to localhost are not allowed') end it "does not allow blocked import_url port" do project2 = build(:project, import_url: 'http://github.com:25/t.git') expect(project2).to be_invalid - expect(project2.errors[:import_url]).to include('imports are not allowed from that URL') + expect(project2.errors[:import_url].first).to include('Only allowed ports are 22, 80, 443') end describe 'project pending deletion' do @@ -922,7 +924,7 @@ describe Project do it 'is false if avatar is html page' do project.update_attribute(:avatar, 'uploads/avatar.html') - expect(project.avatar_type).to eq(['only images allowed']) + expect(project.avatar_type).to eq(['file format is not supported. Please try one of the following supported formats: png, jpg, jpeg, gif, bmp, tiff']) end end @@ -1101,8 +1103,8 @@ describe Project do before do storages = { - 'default' => { 'path' => 'tmp/tests/repositories' }, - 'picked' => { 'path' => 'tmp/tests/repositories' } + 'default' => Gitlab::GitalyClient::StorageSettings.new('path' => 'tmp/tests/repositories'), + 'picked' => Gitlab::GitalyClient::StorageSettings.new('path' => 'tmp/tests/repositories') } allow(Gitlab.config.repositories).to receive(:storages).and_return(storages) end @@ -1265,6 +1267,34 @@ describe Project do end end + describe '#pages_group_url' do + let(:group) { create :group, name: group_name } + let(:project) { create :project, namespace: group, name: project_name } + let(:domain) { 'Example.com' } + let(:port) { 1234 } + + subject { project.pages_group_url } + + before do + allow(Settings.pages).to receive(:host).and_return(domain) + allow(Gitlab.config.pages).to receive(:url).and_return("http://example.com:#{port}") + end + + context 'group page' do + let(:group_name) { 'Group' } + let(:project_name) { 'group.example.com' } + + it { is_expected.to eq("http://group.example.com:#{port}") } + end + + context 'project page' do + let(:group_name) { 'Group' } + let(:project_name) { 'Project' } + + it { is_expected.to eq("http://group.example.com:#{port}") } + end + end + describe '.search' do let(:project) { create(:project, description: 'kitten mittens') } @@ -1617,7 +1647,7 @@ describe Project do before do allow_any_instance_of(Gitlab::Shell).to receive(:import_repository) - .with(project.repository_storage_path, project.disk_path, project.import_url) + .with(project.repository_storage, project.disk_path, project.import_url) .and_return(true) expect_any_instance_of(Repository).to receive(:after_import) @@ -1770,10 +1800,7 @@ describe Project do let(:project) { forked_project_link.forked_to_project } it 'schedules a RepositoryForkWorker job' do - expect(RepositoryForkWorker).to receive(:perform_async).with( - project.id, - forked_from_project.repository_storage_path, - forked_from_project.disk_path).and_return(import_jid) + expect(RepositoryForkWorker).to receive(:perform_async).with(project.id).and_return(import_jid) expect(project.add_import_job).to eq(import_jid) end @@ -1997,6 +2024,22 @@ describe Project do expect(forked_project.lfs_storage_project).to eq forked_project end end + + describe '#all_lfs_objects' do + let(:lfs_object) { create(:lfs_object) } + + before do + project.lfs_objects << lfs_object + end + + it 'returns the lfs object for a project' do + expect(project.all_lfs_objects).to contain_exactly(lfs_object) + end + + it 'returns the lfs object for a fork' do + expect(forked_project.all_lfs_objects).to contain_exactly(lfs_object) + end + end end describe '#pushes_since_gc' do @@ -2532,7 +2575,7 @@ describe Project do end end - describe '#remove_exports' do + describe '#remove_export' do let(:legacy_project) { create(:project, :legacy_storage, :with_export) } let(:project) { create(:project, :with_export) } @@ -2580,6 +2623,23 @@ describe Project do end end + describe '#remove_exported_project_file' do + let(:project) { create(:project, :with_export) } + + it 'removes the exported project file' do + exported_file = project.export_project_path + + expect(File.exist?(exported_file)).to be_truthy + + allow(FileUtils).to receive(:rm_f).and_call_original + expect(FileUtils).to receive(:rm_f).with(exported_file).and_call_original + + project.remove_exported_project_file + + expect(File.exist?(exported_file)).to be_falsy + end + end + describe '#forks_count' do it 'returns the number of forks' do project = build(:project) @@ -3185,6 +3245,7 @@ describe Project do expect(project).to receive(:update_project_counter_caches) expect(project).to receive(:remove_import_jid) expect(project).to receive(:after_create_default_branch) + expect(project).to receive(:refresh_markdown_cache!) project.after_import end @@ -3479,4 +3540,49 @@ describe Project do end end end + + describe "#pages_https_only?" do + subject { build(:project) } + + context "when HTTPS pages are disabled" do + it { is_expected.not_to be_pages_https_only } + end + + context "when HTTPS pages are enabled", :https_pages_enabled do + it { is_expected.to be_pages_https_only } + end + end + + describe "#pages_https_only? validation", :https_pages_enabled do + subject(:project) do + # set-up dirty object: + create(:project, pages_https_only: false).tap do |p| + p.pages_https_only = true + end + end + + context "when no domains are associated" do + it { is_expected.to be_valid } + end + + context "when domains including keys and certificates are associated" do + before do + allow(project) + .to receive(:pages_domains) + .and_return([instance_double(PagesDomain, https?: true)]) + end + + it { is_expected.to be_valid } + end + + context "when domains including no keys or certificates are associated" do + before do + allow(project) + .to receive(:pages_domains) + .and_return([instance_double(PagesDomain, https?: false)]) + end + + it { is_expected.not_to be_valid } + end + end end diff --git a/spec/models/project_statistics_spec.rb b/spec/models/project_statistics_spec.rb index 5cff2af4aca..38a3590ad12 100644 --- a/spec/models/project_statistics_spec.rb +++ b/spec/models/project_statistics_spec.rb @@ -4,26 +4,6 @@ describe ProjectStatistics do let(:project) { create :project } let(:statistics) { project.statistics } - describe 'constants' do - describe 'STORAGE_COLUMNS' do - it 'is an array of symbols' do - expect(described_class::STORAGE_COLUMNS).to be_kind_of Array - expect(described_class::STORAGE_COLUMNS.map(&:class).uniq).to eq [Symbol] - end - end - - describe 'STATISTICS_COLUMNS' do - it 'is an array of symbols' do - expect(described_class::STATISTICS_COLUMNS).to be_kind_of Array - expect(described_class::STATISTICS_COLUMNS.map(&:class).uniq).to eq [Symbol] - end - - it 'includes all storage columns' do - expect(described_class::STATISTICS_COLUMNS & described_class::STORAGE_COLUMNS).to eq described_class::STORAGE_COLUMNS - end - end - end - describe 'associations' do it { is_expected.to belong_to(:project) } it { is_expected.to belong_to(:namespace) } @@ -63,7 +43,6 @@ describe ProjectStatistics do allow(statistics).to receive(:update_commit_count) allow(statistics).to receive(:update_repository_size) allow(statistics).to receive(:update_lfs_objects_size) - allow(statistics).to receive(:update_build_artifacts_size) allow(statistics).to receive(:update_storage_size) end @@ -76,7 +55,6 @@ describe ProjectStatistics do expect(statistics).to have_received(:update_commit_count) expect(statistics).to have_received(:update_repository_size) expect(statistics).to have_received(:update_lfs_objects_size) - expect(statistics).to have_received(:update_build_artifacts_size) end end @@ -89,7 +67,6 @@ describe ProjectStatistics do expect(statistics).to have_received(:update_lfs_objects_size) expect(statistics).not_to have_received(:update_commit_count) expect(statistics).not_to have_received(:update_repository_size) - expect(statistics).not_to have_received(:update_build_artifacts_size) end end end @@ -131,40 +108,6 @@ describe ProjectStatistics do end end - describe '#update_build_artifacts_size' do - let!(:pipeline) { create(:ci_pipeline, project: project) } - - context 'when new job artifacts are calculated' do - let(:ci_build) { create(:ci_build, pipeline: pipeline) } - - before do - create(:ci_job_artifact, :archive, project: pipeline.project, job: ci_build) - end - - it "stores the size of related build artifacts" do - statistics.update_build_artifacts_size - - expect(statistics.build_artifacts_size).to be(106365) - end - - it 'calculates related build artifacts by project' do - expect(Ci::JobArtifact).to receive(:artifacts_size_for).with(project) { 0 } - - statistics.update_build_artifacts_size - end - end - - context 'when legacy artifacts are used' do - let!(:ci_build) { create(:ci_build, pipeline: pipeline, artifacts_size: 10.megabytes) } - - it "stores the size of related build artifacts" do - statistics.update_build_artifacts_size - - expect(statistics.build_artifacts_size).to eq(10.megabytes) - end - end - end - describe '#update_storage_size' do it "sums all storage counters" do statistics.update!( @@ -177,4 +120,27 @@ describe ProjectStatistics do expect(statistics.storage_size).to eq 5 end end + + describe '.increment_statistic' do + it 'increases the statistic by that amount' do + expect { described_class.increment_statistic(project.id, :build_artifacts_size, 13) } + .to change { statistics.reload.build_artifacts_size } + .by(13) + end + + context 'when the amount is 0' do + it 'does not execute a query' do + project + expect { described_class.increment_statistic(project.id, :build_artifacts_size, 0) } + .not_to exceed_query_limit(0) + end + end + + context 'when using an invalid column' do + it 'raises an error' do + expect { described_class.increment_statistic(project.id, :id, 13) } + .to raise_error(ArgumentError, "Cannot increment attribute: id") + end + end + end end diff --git a/spec/models/project_wiki_spec.rb b/spec/models/project_wiki_spec.rb index d87c1ca14f0..4e83f4353cf 100644 --- a/spec/models/project_wiki_spec.rb +++ b/spec/models/project_wiki_spec.rb @@ -172,11 +172,12 @@ describe ProjectWiki do describe '#find_file' do shared_examples 'finding a wiki file' do + let(:image) { File.open(Rails.root.join('spec', 'fixtures', 'big-image.png')) } + before do - file = File.open(Rails.root.join('spec', 'fixtures', 'dk.png')) subject.wiki # Make sure the wiki repo exists - BareRepoOperations.new(subject.repository.path_to_repo).commit_file(file, 'image.png') + BareRepoOperations.new(subject.repository.path_to_repo).commit_file(image, 'image.png') end it 'returns the latest version of the file if it exists' do @@ -192,6 +193,13 @@ describe ProjectWiki do file = subject.find_file('image.png') expect(file).to be_a Gitlab::Git::WikiFile end + + it 'returns the whole file' do + file = subject.find_file('image.png') + image.rewind + + expect(file.raw_data.b).to eq(image.read.b) + end end context 'when Gitaly wiki_find_file is enabled' do @@ -369,7 +377,7 @@ describe ProjectWiki do end def commit_details - Gitlab::Git::Wiki::CommitDetails.new(user.name, user.email, "test commit") + Gitlab::Git::Wiki::CommitDetails.new(user.id, user.username, user.name, user.email, "test commit") end def create_page(name, content) diff --git a/spec/models/repository_spec.rb b/spec/models/repository_spec.rb index 5bc972bca14..e45fe7db1e7 100644 --- a/spec/models/repository_spec.rb +++ b/spec/models/repository_spec.rb @@ -501,28 +501,6 @@ describe Repository do end end - describe '#create_hooks' do - let(:hook_path) { File.join(repository.path_to_repo, 'hooks') } - - it 'symlinks the global hooks directory' do - repository.create_hooks - - expect(File.symlink?(hook_path)).to be true - expect(File.readlink(hook_path)).to eq(Gitlab.config.gitlab_shell.hooks_path) - end - - it 'replaces existing symlink with the right directory' do - FileUtils.mkdir_p(hook_path) - - expect(File.symlink?(hook_path)).to be false - - repository.create_hooks - - expect(File.symlink?(hook_path)).to be true - expect(File.readlink(hook_path)).to eq(Gitlab.config.gitlab_shell.hooks_path) - end - end - describe "#create_dir" do it "commits a change that creates a new directory" do expect do @@ -895,7 +873,7 @@ describe Repository do end it 'returns nil when the content is not recognizable' do - repository.create_file(user, 'LICENSE', 'Copyright!', + repository.create_file(user, 'LICENSE', 'Gitlab B.V.', message: 'Add LICENSE', branch_name: 'master') expect(repository.license_key).to be_nil @@ -939,7 +917,7 @@ describe Repository do end it 'returns nil when the content is not recognizable' do - repository.create_file(user, 'LICENSE', 'Copyright!', + repository.create_file(user, 'LICENSE', 'Gitlab B.V.', message: 'Add LICENSE', branch_name: 'master') expect(repository.license).to be_nil @@ -1459,6 +1437,12 @@ describe Repository do repository.expire_emptiness_caches end + + it 'expires the memoized repository cache' do + allow(repository.raw_repository).to receive(:expire_has_local_branches_cache).and_call_original + + repository.expire_emptiness_caches + end end describe 'skip_merges option' do diff --git a/spec/models/route_spec.rb b/spec/models/route_spec.rb index dfac82b327a..01238a89a81 100644 --- a/spec/models/route_spec.rb +++ b/spec/models/route_spec.rb @@ -16,66 +16,6 @@ describe Route do it { is_expected.to validate_presence_of(:source) } it { is_expected.to validate_presence_of(:path) } it { is_expected.to validate_uniqueness_of(:path).case_insensitive } - - describe '#ensure_permanent_paths' do - context 'when the route is not yet persisted' do - let(:new_route) { described_class.new(path: 'foo', source: build(:group)) } - - context 'when permanent conflicting redirects exist' do - it 'is invalid' do - redirect = build(:redirect_route, :permanent, path: 'foo/bar/baz') - redirect.save!(validate: false) - - expect(new_route.valid?).to be_falsey - expect(new_route.errors.first[1]).to eq('has been taken before') - end - end - - context 'when no permanent conflicting redirects exist' do - it 'is valid' do - expect(new_route.valid?).to be_truthy - end - end - end - - context 'when path has changed' do - before do - route.path = 'foo' - end - - context 'when permanent conflicting redirects exist' do - it 'is invalid' do - redirect = build(:redirect_route, :permanent, path: 'foo/bar/baz') - redirect.save!(validate: false) - - expect(route.valid?).to be_falsey - expect(route.errors.first[1]).to eq('has been taken before') - end - end - - context 'when no permanent conflicting redirects exist' do - it 'is valid' do - expect(route.valid?).to be_truthy - end - end - end - - context 'when path has not changed' do - context 'when permanent conflicting redirects exist' do - it 'is valid' do - redirect = build(:redirect_route, :permanent, path: 'git_lab/foo/bar') - redirect.save!(validate: false) - - expect(route.valid?).to be_truthy - end - end - context 'when no permanent conflicting redirects exist' do - it 'is valid' do - expect(route.valid?).to be_truthy - end - end - end - end end describe 'callbacks' do @@ -211,43 +151,31 @@ describe Route do end context 'when the source is a Project' do - it 'creates a temporal RedirectRoute' do + it 'creates a RedirectRoute' do project = create(:project) route = project.route redirect_route = route.create_redirect('foo') - expect(redirect_route.permanent?).to be_falsy + expect(redirect_route).not_to be_nil end end context 'when the source is not a project' do - it 'creates a permanent RedirectRoute' do - redirect_route = route.create_redirect('foo', permanent: true) - expect(redirect_route.permanent?).to be_truthy + it 'creates a RedirectRoute' do + redirect_route = route.create_redirect('foo') + expect(redirect_route).not_to be_nil end end end describe '#delete_conflicting_redirects' do - context 'with permanent redirect' do - it 'does not delete the redirect' do - route.create_redirect("#{route.path}/foo", permanent: true) - - expect do - route.delete_conflicting_redirects - end.not_to change { RedirectRoute.count } - end - end - - context 'with temporal redirect' do - let(:route) { create(:project).route } + let(:route) { create(:project).route } - it 'deletes the redirect' do - route.create_redirect("#{route.path}/foo") + it 'deletes the redirect' do + route.create_redirect("#{route.path}/foo") - expect do - route.delete_conflicting_redirects - end.to change { RedirectRoute.count }.by(-1) - end + expect do + route.delete_conflicting_redirects + end.to change { RedirectRoute.count }.by(-1) end context 'when a redirect route with the same path exists' do @@ -289,31 +217,18 @@ describe Route do end describe '#conflicting_redirects' do + let(:route) { create(:project).route } + it 'returns an ActiveRecord::Relation' do expect(route.conflicting_redirects).to be_an(ActiveRecord::Relation) end - context 'with permanent redirects' do - it 'does not return anything' do - route.create_redirect("#{route.path}/foo", permanent: true) - route.create_redirect("#{route.path}/foo/bar", permanent: true) - route.create_redirect("#{route.path}/baz/quz", permanent: true) + it 'returns the redirect routes' do + redirect1 = route.create_redirect("#{route.path}/foo") + redirect2 = route.create_redirect("#{route.path}/foo/bar") + redirect3 = route.create_redirect("#{route.path}/baz/quz") - expect(route.conflicting_redirects).to be_empty - end - end - - context 'with temporal redirects' do - let(:route) { create(:project).route } - - it 'returns the redirect routes' do - route = create(:project).route - redirect1 = route.create_redirect("#{route.path}/foo") - redirect2 = route.create_redirect("#{route.path}/foo/bar") - redirect3 = route.create_redirect("#{route.path}/baz/quz") - - expect(route.conflicting_redirects).to match_array([redirect1, redirect2, redirect3]) - end + expect(route.conflicting_redirects).to match_array([redirect1, redirect2, redirect3]) end context 'when a redirect route with the same path exists' do @@ -348,44 +263,6 @@ describe Route do end end - describe "#conflicting_redirect_exists?" do - context 'when a conflicting redirect exists' do - let(:group1) { create(:group, path: 'foo') } - let(:group2) { create(:group, path: 'baz') } - - it 'should not be saved' do - group1.path = 'bar' - group1.save - - group2.path = 'foo' - - expect(group2.save).to be_falsy - end - - it 'should return an error on path' do - group1.path = 'bar' - group1.save - - group2.path = 'foo' - group2.valid? - expect(group2.errors[:path]).to eq(['has been taken before']) - end - end - - context 'when a conflicting redirect does not exist' do - let(:project1) { create(:project, path: 'foo') } - let(:project2) { create(:project, path: 'baz') } - - it 'should be saved' do - project1.path = 'bar' - project1.save - - project2.path = 'foo' - expect(project2.save).to be_truthy - end - end - end - describe '#delete_conflicting_orphaned_routes' do context 'when there is a conflicting route' do let!(:conflicting_group) { create(:group, path: 'foo') } diff --git a/spec/models/service_spec.rb b/spec/models/service_spec.rb index 79f25dc4360..28c908ea425 100644 --- a/spec/models/service_spec.rb +++ b/spec/models/service_spec.rb @@ -10,6 +10,22 @@ describe Service do it { is_expected.to validate_presence_of(:type) } end + describe 'Scopes' do + describe '.confidential_note_hooks' do + it 'includes services where confidential_note_events is true' do + create(:service, active: true, confidential_note_events: true) + + expect(described_class.confidential_note_hooks.count).to eq 1 + end + + it 'excludes services where confidential_note_events is false' do + create(:service, active: true, confidential_note_events: false) + + expect(described_class.confidential_note_hooks.count).to eq 0 + end + end + end + describe "Test Button" do describe '#can_test?' do let(:service) { create(:service, project: project) } @@ -58,6 +74,21 @@ describe Service do end describe "Template" do + describe '.build_from_template' do + context 'when template is invalid' do + it 'sets service template to inactive when template is invalid' do + project = create(:project) + template = JiraService.new(template: true, active: true) + template.save(validate: false) + + service = described_class.build_from_template(project.id, template) + + expect(service).to be_valid + expect(service.active).to be false + end + end + end + describe "for pushover service" do let!(:service_template) do PushoverService.create( diff --git a/spec/models/user_spec.rb b/spec/models/user_spec.rb index 5680eb24985..35db7616efb 100644 --- a/spec/models/user_spec.rb +++ b/spec/models/user_spec.rb @@ -25,7 +25,7 @@ describe User do it { is_expected.to have_many(:group_members) } it { is_expected.to have_many(:groups) } it { is_expected.to have_many(:keys).dependent(:destroy) } - it { is_expected.to have_many(:deploy_keys).dependent(:destroy) } + it { is_expected.to have_many(:deploy_keys).dependent(:nullify) } it { is_expected.to have_many(:events).dependent(:destroy) } it { is_expected.to have_many(:issues).dependent(:destroy) } it { is_expected.to have_many(:notes).dependent(:destroy) } @@ -126,23 +126,6 @@ describe User do end end - context 'when the username was used by another user before' do - let(:username) { 'foo' } - let!(:other_user) { create(:user, username: username) } - - before do - other_user.username = 'bar' - other_user.save! - end - - it 'is invalid' do - user = build(:user, username: username) - - expect(user).not_to be_valid - expect(user.errors.full_messages).to eq(['Username has been taken before']) - end - end - context 'when the username is in use by another user' do let(:username) { 'foo' } let!(:other_user) { create(:user, username: username) } @@ -1222,7 +1205,7 @@ describe User do it 'is false if avatar is html page' do user.update_attribute(:avatar, 'uploads/avatar.html') - expect(user.avatar_type).to eq(['only images allowed']) + expect(user.avatar_type).to eq(['file format is not supported. Please try one of the following supported formats: png, jpg, jpeg, gif, bmp, tiff']) end end @@ -1468,7 +1451,7 @@ describe User do end end - describe '#sort' do + describe '#sort_by_attribute' do before do described_class.delete_all @user = create :user, created_at: Date.today, current_sign_in_at: Date.today, name: 'Alpha' @@ -1477,7 +1460,7 @@ describe User do end context 'when sort by recent_sign_in' do - let(:users) { described_class.sort('recent_sign_in') } + let(:users) { described_class.sort_by_attribute('recent_sign_in') } it 'sorts users by recent sign-in time' do expect(users.first).to eq(@user) @@ -1490,7 +1473,7 @@ describe User do end context 'when sort by oldest_sign_in' do - let(:users) { described_class.sort('oldest_sign_in') } + let(:users) { described_class.sort_by_attribute('oldest_sign_in') } it 'sorts users by the oldest sign-in time' do expect(users.first).to eq(@user1) @@ -1503,15 +1486,15 @@ describe User do end it 'sorts users in descending order by their creation time' do - expect(described_class.sort('created_desc').first).to eq(@user) + expect(described_class.sort_by_attribute('created_desc').first).to eq(@user) end it 'sorts users in ascending order by their creation time' do - expect(described_class.sort('created_asc').first).to eq(@user2) + expect(described_class.sort_by_attribute('created_asc').first).to eq(@user2) end it 'sorts users by id in descending order when nil is passed' do - expect(described_class.sort(nil).first).to eq(@user2) + expect(described_class.sort_by_attribute(nil).first).to eq(@user2) end end @@ -1867,6 +1850,21 @@ describe User do it_behaves_like :member end + + context 'with subgroup with different owner for project runner', :nested_groups do + let(:group) { create(:group) } + let(:another_user) { create(:user) } + let(:subgroup) { create(:group, parent: group) } + let(:project) { create(:project, group: subgroup) } + + def add_user(access) + group.add_user(user, access) + group.add_user(another_user, :owner) + subgroup.add_user(another_user, :owner) + end + + it_behaves_like :member + end end describe '#projects_with_reporter_access_limited_to' do @@ -2088,6 +2086,8 @@ describe User do expect(ghost).to be_ghost expect(ghost).to be_persisted + expect(ghost.namespace).not_to be_nil + expect(ghost.namespace).to be_persisted end it "does not create a second ghost user if one is already present" do @@ -2249,6 +2249,20 @@ describe User do end end + context '#invalidate_personal_projects_count' do + let(:user) { build_stubbed(:user) } + + it 'invalidates cache for personal projects counter' do + cache_mock = double + + expect(cache_mock).to receive(:delete).with(['users', user.id, 'personal_projects_count']) + + allow(Rails).to receive(:cache).and_return(cache_mock) + + user.invalidate_personal_projects_count + end + end + describe '#allow_password_authentication_for_web?' do context 'regular user' do let(:user) { build(:user) } @@ -2298,11 +2312,9 @@ describe User do user = build(:user) projects = double(:projects, count: 1) - expect(user).to receive(:personal_projects).once.and_return(projects) + expect(user).to receive(:personal_projects).and_return(projects) - 2.times do - expect(user.personal_projects_count).to eq(1) - end + expect(user.personal_projects_count).to eq(1) end end @@ -2699,27 +2711,19 @@ describe User do end end - describe "#username_previously_taken?" do - let(:user1) { create(:user, username: 'foo') } + context 'changing a username' do + let(:user) { create(:user, username: 'foo') } - context 'when the username has been taken before' do - before do - user1.username = 'bar' - user1.save! - end - - it 'should raise an ActiveRecord::RecordInvalid exception' do - user2 = build(:user, username: 'foo') - expect { user2.save! }.to raise_error(ActiveRecord::RecordInvalid, /Username has been taken before/) - end + it 'creates a redirect route' do + expect { user.update!(username: 'bar') } + .to change { RedirectRoute.where(path: 'foo').count }.by(1) end - context 'when the username has not been taken before' do - it 'should be valid' do - expect(RedirectRoute.count).to eq(0) - user2 = build(:user, username: 'baz') - expect(user2).to be_valid - end + it 'deletes the redirect when a user with the old username was created' do + user.update!(username: 'bar') + + expect { create(:user, username: 'foo') } + .to change { RedirectRoute.where(path: 'foo').count }.by(-1) end end end diff --git a/spec/models/wiki_page_spec.rb b/spec/models/wiki_page_spec.rb index b2b7721674c..90b7e7715a8 100644 --- a/spec/models/wiki_page_spec.rb +++ b/spec/models/wiki_page_spec.rb @@ -561,7 +561,7 @@ describe WikiPage do end def commit_details - Gitlab::Git::Wiki::CommitDetails.new(user.name, user.email, "test commit") + Gitlab::Git::Wiki::CommitDetails.new(user.id, user.username, user.name, user.email, "test commit") end def create_page(name, content) diff --git a/spec/policies/deploy_token_policy_spec.rb b/spec/policies/deploy_token_policy_spec.rb new file mode 100644 index 00000000000..eea287d895e --- /dev/null +++ b/spec/policies/deploy_token_policy_spec.rb @@ -0,0 +1,45 @@ +require 'spec_helper' + +describe DeployTokenPolicy do + let(:current_user) { create(:user) } + let(:project) { create(:project) } + let(:deploy_token) { create(:deploy_token, projects: [project]) } + + subject { described_class.new(current_user, deploy_token) } + + describe 'creating a deploy key' do + context 'when user is master' do + before do + project.add_master(current_user) + end + + it { is_expected.to be_allowed(:create_deploy_token) } + end + + context 'when user is not master' do + before do + project.add_developer(current_user) + end + + it { is_expected.to be_disallowed(:create_deploy_token) } + end + end + + describe 'updating a deploy key' do + context 'when user is master' do + before do + project.add_master(current_user) + end + + it { is_expected.to be_allowed(:update_deploy_token) } + end + + context 'when user is not master' do + before do + project.add_developer(current_user) + end + + it { is_expected.to be_disallowed(:update_deploy_token) } + end + end +end diff --git a/spec/policies/note_policy_spec.rb b/spec/policies/note_policy_spec.rb index 58d36a2c84e..e8096358f7d 100644 --- a/spec/policies/note_policy_spec.rb +++ b/spec/policies/note_policy_spec.rb @@ -18,7 +18,6 @@ describe NotePolicy, mdoels: true do context 'when the project is public' do context 'when the note author is not a project member' do it 'can edit a note' do - expect(policies).to be_allowed(:update_note) expect(policies).to be_allowed(:admin_note) expect(policies).to be_allowed(:resolve_note) expect(policies).to be_allowed(:read_note) @@ -29,7 +28,6 @@ describe NotePolicy, mdoels: true do it 'can edit note' do policies = policies(create(:project_snippet, project: project)) - expect(policies).to be_allowed(:update_note) expect(policies).to be_allowed(:admin_note) expect(policies).to be_allowed(:resolve_note) expect(policies).to be_allowed(:read_note) @@ -47,7 +45,6 @@ describe NotePolicy, mdoels: true do end it 'can edit a note' do - expect(policies).to be_allowed(:update_note) expect(policies).to be_allowed(:admin_note) expect(policies).to be_allowed(:resolve_note) expect(policies).to be_allowed(:read_note) @@ -56,7 +53,6 @@ describe NotePolicy, mdoels: true do context 'when the note author is not a project member' do it 'can not edit a note' do - expect(policies).to be_disallowed(:update_note) expect(policies).to be_disallowed(:admin_note) expect(policies).to be_disallowed(:resolve_note) end diff --git a/spec/policies/personal_snippet_policy_spec.rb b/spec/policies/personal_snippet_policy_spec.rb index 50bb0899eba..3809692b373 100644 --- a/spec/policies/personal_snippet_policy_spec.rb +++ b/spec/policies/personal_snippet_policy_spec.rb @@ -27,6 +27,7 @@ describe PersonalSnippetPolicy do it do is_expected.to be_allowed(:read_personal_snippet) is_expected.to be_disallowed(:comment_personal_snippet) + is_expected.to be_disallowed(:award_emoji) is_expected.to be_disallowed(*author_permissions) end end @@ -37,6 +38,7 @@ describe PersonalSnippetPolicy do it do is_expected.to be_allowed(:read_personal_snippet) is_expected.to be_allowed(:comment_personal_snippet) + is_expected.to be_allowed(:award_emoji) is_expected.to be_disallowed(*author_permissions) end end @@ -47,6 +49,7 @@ describe PersonalSnippetPolicy do it do is_expected.to be_allowed(:read_personal_snippet) is_expected.to be_allowed(:comment_personal_snippet) + is_expected.to be_allowed(:award_emoji) is_expected.to be_allowed(*author_permissions) end end @@ -61,6 +64,7 @@ describe PersonalSnippetPolicy do it do is_expected.to be_disallowed(:read_personal_snippet) is_expected.to be_disallowed(:comment_personal_snippet) + is_expected.to be_disallowed(:award_emoji) is_expected.to be_disallowed(*author_permissions) end end @@ -71,6 +75,7 @@ describe PersonalSnippetPolicy do it do is_expected.to be_allowed(:read_personal_snippet) is_expected.to be_allowed(:comment_personal_snippet) + is_expected.to be_allowed(:award_emoji) is_expected.to be_disallowed(*author_permissions) end end @@ -81,6 +86,7 @@ describe PersonalSnippetPolicy do it do is_expected.to be_disallowed(:read_personal_snippet) is_expected.to be_disallowed(:comment_personal_snippet) + is_expected.to be_disallowed(:award_emoji) is_expected.to be_disallowed(*author_permissions) end end @@ -91,6 +97,7 @@ describe PersonalSnippetPolicy do it do is_expected.to be_allowed(:read_personal_snippet) is_expected.to be_allowed(:comment_personal_snippet) + is_expected.to be_allowed(:award_emoji) is_expected.to be_allowed(*author_permissions) end end @@ -105,6 +112,7 @@ describe PersonalSnippetPolicy do it do is_expected.to be_disallowed(:read_personal_snippet) is_expected.to be_disallowed(:comment_personal_snippet) + is_expected.to be_disallowed(:award_emoji) is_expected.to be_disallowed(*author_permissions) end end @@ -115,6 +123,7 @@ describe PersonalSnippetPolicy do it do is_expected.to be_disallowed(:read_personal_snippet) is_expected.to be_disallowed(:comment_personal_snippet) + is_expected.to be_disallowed(:award_emoji) is_expected.to be_disallowed(*author_permissions) end end @@ -125,6 +134,7 @@ describe PersonalSnippetPolicy do it do is_expected.to be_disallowed(:read_personal_snippet) is_expected.to be_disallowed(:comment_personal_snippet) + is_expected.to be_disallowed(:award_emoji) is_expected.to be_disallowed(*author_permissions) end end @@ -135,6 +145,7 @@ describe PersonalSnippetPolicy do it do is_expected.to be_allowed(:read_personal_snippet) is_expected.to be_allowed(:comment_personal_snippet) + is_expected.to be_allowed(:award_emoji) is_expected.to be_allowed(*author_permissions) end end diff --git a/spec/policies/project_policy_spec.rb b/spec/policies/project_policy_spec.rb index ea76e604153..8b9c4ac0b4b 100644 --- a/spec/policies/project_policy_spec.rb +++ b/spec/policies/project_policy_spec.rb @@ -11,10 +11,11 @@ describe ProjectPolicy do let(:base_guest_permissions) do %i[ - read_project read_board read_list read_wiki read_issue read_label - read_milestone read_project_snippet read_project_member - read_note create_project create_issue create_note - upload_file + read_project read_board read_list read_wiki read_issue + read_project_for_iids read_issue_iid read_merge_request_iid read_label + read_milestone read_project_snippet read_project_member read_note + create_project create_issue create_note upload_file create_merge_request_in + award_emoji ] end @@ -35,7 +36,7 @@ describe ProjectPolicy do %i[ admin_milestone admin_merge_request update_merge_request create_commit_status update_commit_status create_build update_build create_pipeline - update_pipeline create_merge_request create_wiki push_code + update_pipeline create_merge_request_from create_wiki push_code resolve_note create_container_image update_container_image create_environment create_deployment ] @@ -43,7 +44,7 @@ describe ProjectPolicy do let(:base_master_permissions) do %i[ - delete_protected_branch update_project_snippet update_environment + push_to_delete_protected_branch update_project_snippet update_environment update_deployment admin_project_snippet admin_project_member admin_note admin_wiki admin_project admin_commit_status admin_build admin_container_image @@ -120,7 +121,7 @@ describe ProjectPolicy do project.issues_enabled = false project.save! - expect_disallowed :read_issue, :create_issue, :update_issue, :admin_issue + expect_disallowed :read_issue, :read_issue_iid, :create_issue, :update_issue, :admin_issue end end @@ -131,7 +132,60 @@ describe ProjectPolicy do project.issues_enabled = false project.save! - expect_disallowed :read_issue, :create_issue, :update_issue, :admin_issue + expect_disallowed :read_issue, :read_issue_iid, :create_issue, :update_issue, :admin_issue + end + end + end + + context 'merge requests feature' do + subject { described_class.new(owner, project) } + + it 'disallows all permissions when the feature is disabled' do + project.project_feature.update(merge_requests_access_level: ProjectFeature::DISABLED) + + mr_permissions = [:create_merge_request_from, :read_merge_request, + :update_merge_request, :admin_merge_request, + :create_merge_request_in] + + expect_disallowed(*mr_permissions) + end + end + + shared_examples 'archived project policies' do + let(:feature_write_abilities) do + described_class::READONLY_FEATURES_WHEN_ARCHIVED.flat_map do |feature| + described_class.create_update_admin_destroy(feature) + end + end + + let(:other_write_abilities) do + %i[ + create_merge_request_in + create_merge_request_from + push_to_delete_protected_branch + push_code + request_access + upload_file + resolve_note + award_emoji + ] + end + + context 'when the project is archived' do + before do + project.archived = true + end + + it 'disables write actions on all relevant project features' do + expect_disallowed(*feature_write_abilities) + end + + it 'disables some other important write actions' do + expect_disallowed(*other_write_abilities) + end + + it 'does not disable other other abilities' do + expect_allowed(*(regular_abilities - feature_write_abilities - other_write_abilities)) end end end @@ -141,8 +195,8 @@ describe ProjectPolicy do context 'when a project has pending invites' do let(:group) { create(:group, :public) } let(:project) { create(:project, :public, namespace: group) } - let(:user_permissions) { [:create_project, :create_issue, :create_note, :upload_file] } - let(:anonymous_permissions) { guest_permissions - user_permissions } + let(:user_permissions) { [:create_merge_request_in, :create_project, :create_issue, :create_note, :upload_file, :award_emoji] } + let(:anonymous_permissions) { guest_permissions - user_permissions } subject { described_class.new(nil, project) } @@ -154,6 +208,10 @@ describe ProjectPolicy do expect_allowed(*anonymous_permissions) expect_disallowed(*user_permissions) end + + it_behaves_like 'archived project policies' do + let(:regular_abilities) { anonymous_permissions } + end end end @@ -184,6 +242,10 @@ describe ProjectPolicy do expect_disallowed(*owner_permissions) end + it_behaves_like 'archived project policies' do + let(:regular_abilities) { guest_permissions } + end + context 'public builds enabled' do it do expect_allowed(*guest_permissions) @@ -224,12 +286,15 @@ describe ProjectPolicy do it do expect_allowed(*guest_permissions) expect_allowed(*reporter_permissions) - expect_allowed(*reporter_permissions) expect_allowed(*team_member_reporter_permissions) expect_disallowed(*developer_permissions) expect_disallowed(*master_permissions) expect_disallowed(*owner_permissions) end + + it_behaves_like 'archived project policies' do + let(:regular_abilities) { reporter_permissions } + end end end @@ -247,6 +312,10 @@ describe ProjectPolicy do expect_disallowed(*master_permissions) expect_disallowed(*owner_permissions) end + + it_behaves_like 'archived project policies' do + let(:regular_abilities) { developer_permissions } + end end end @@ -264,6 +333,10 @@ describe ProjectPolicy do expect_allowed(*master_permissions) expect_disallowed(*owner_permissions) end + + it_behaves_like 'archived project policies' do + let(:regular_abilities) { master_permissions } + end end end @@ -281,6 +354,10 @@ describe ProjectPolicy do expect_allowed(*master_permissions) expect_allowed(*owner_permissions) end + + it_behaves_like 'archived project policies' do + let(:regular_abilities) { owner_permissions } + end end end @@ -298,6 +375,10 @@ describe ProjectPolicy do expect_allowed(*master_permissions) expect_allowed(*owner_permissions) end + + it_behaves_like 'archived project policies' do + let(:regular_abilities) { owner_permissions } + end end end diff --git a/spec/policies/protected_branch_policy_spec.rb b/spec/policies/protected_branch_policy_spec.rb new file mode 100644 index 00000000000..b39de42d721 --- /dev/null +++ b/spec/policies/protected_branch_policy_spec.rb @@ -0,0 +1,22 @@ +require 'spec_helper' + +describe ProtectedBranchPolicy do + let(:user) { create(:user) } + let(:name) { 'feature' } + let(:protected_branch) { create(:protected_branch, name: name) } + let(:project) { protected_branch.project } + + subject { described_class.new(user, protected_branch) } + + it 'branches can be updated via project masters' do + project.add_master(user) + + is_expected.to be_allowed(:update_protected_branch) + end + + it "branches can't be updated by guests" do + project.add_guest(user) + + is_expected.to be_disallowed(:update_protected_branch) + end +end diff --git a/spec/presenters/ci/build_presenter_spec.rb b/spec/presenters/ci/build_presenter_spec.rb index 1a8001be6ab..4bc005df2fc 100644 --- a/spec/presenters/ci/build_presenter_spec.rb +++ b/spec/presenters/ci/build_presenter_spec.rb @@ -72,13 +72,44 @@ describe Ci::BuildPresenter do end end - context 'when build is not auto-canceled' do - before do - expect(build).to receive(:auto_canceled?).and_return(false) + context 'when build failed' do + let(:build) { create(:ci_build, :failed, pipeline: pipeline) } + + it 'returns the reason of failure' do + status_title = presenter.status_title + + expect(status_title).to eq('Failed <br> (unknown failure)') + end + end + + context 'when build has failed && retried' do + let(:build) { create(:ci_build, :failed, :retried, pipeline: pipeline) } + + it 'does not include retried title' do + status_title = presenter.status_title + + expect(status_title).not_to include('(retried)') + expect(status_title).to eq('Failed <br> (unknown failure)') end + end + + context 'when build has failed and is allowed to' do + let(:build) { create(:ci_build, :failed, :allowed_to_fail, pipeline: pipeline) } - it 'does not have a status title' do - expect(presenter.status_title).to be_nil + it 'returns the reason of failure' do + status_title = presenter.status_title + + expect(status_title).to eq('Failed <br> (unknown failure)') + end + end + + context 'For any other build' do + let(:build) { create(:ci_build, :success, pipeline: pipeline) } + + it 'returns the status' do + tooltip_description = presenter.status_title + + expect(tooltip_description).to eq('Success') end end end @@ -134,4 +165,91 @@ describe Ci::BuildPresenter do end end end + + describe '#tooltip_message' do + context 'When build has failed' do + let(:build) { create(:ci_build, :script_failure, pipeline: pipeline) } + + it 'returns the reason of failure' do + tooltip = subject.tooltip_message + + expect(tooltip).to eq("#{build.name} - failed <br> (script failure)") + end + end + + context 'When build has failed and retried' do + let(:build) { create(:ci_build, :script_failure, :retried, pipeline: pipeline) } + + it 'should include the reason of failure and the retried title' do + tooltip = subject.tooltip_message + + expect(tooltip).to eq("#{build.name} - failed <br> (script failure) (retried)") + end + end + + context 'When build has failed and is allowed to' do + let(:build) { create(:ci_build, :script_failure, :allowed_to_fail, pipeline: pipeline) } + + it 'should include the reason of failure' do + tooltip = subject.tooltip_message + + expect(tooltip).to eq("#{build.name} - failed <br> (script failure) (allowed to fail)") + end + end + + context 'For any other build (no retried)' do + let(:build) { create(:ci_build, :success, pipeline: pipeline) } + + it 'should include build name and status' do + tooltip = subject.tooltip_message + + expect(tooltip).to eq("#{build.name} - passed") + end + end + + context 'For any other build (retried)' do + let(:build) { create(:ci_build, :success, :retried, pipeline: pipeline) } + + it 'should include build name and status' do + tooltip = subject.tooltip_message + + expect(tooltip).to eq("#{build.name} - passed (retried)") + end + end + end + + describe '#callout_failure_message' do + let(:build) { create(:ci_build, :failed, :script_failure) } + + it 'returns a verbose failure reason' do + description = subject.callout_failure_message + expect(description).to eq('There has been a script failure. Check the job log for more information') + end + end + + describe '#recoverable?' do + let(:build) { create(:ci_build, :failed, :script_failure) } + + context 'when is a script or missing dependency failure' do + let(:failure_reasons) { %w(script_failure missing_dependency_failure) } + + it 'should return false' do + failure_reasons.each do |failure_reason| + build.update_attribute(:failure_reason, failure_reason) + expect(presenter.recoverable?).to be_falsy + end + end + end + + context 'when is any other failure type' do + let(:failure_reasons) { %w(unknown_failure api_failure stuck_or_timeout_failure runner_system_failure) } + + it 'should return true' do + failure_reasons.each do |failure_reason| + build.update_attribute(:failure_reason, failure_reason) + expect(presenter.recoverable?).to be_truthy + end + end + end + end end diff --git a/spec/presenters/project_presenter_spec.rb b/spec/presenters/project_presenter_spec.rb index f8c93d91ec5..55962f345d4 100644 --- a/spec/presenters/project_presenter_spec.rb +++ b/spec/presenters/project_presenter_spec.rb @@ -339,7 +339,7 @@ describe ProjectPresenter do it 'returns link to clusters page if more than one exists' do project.add_master(user) - create(:cluster, projects: [project]) + create(:cluster, :production_environment, projects: [project]) create(:cluster, projects: [project]) expect(presenter.kubernetes_cluster_anchor_data).to eq(OpenStruct.new(enabled: true, diff --git a/spec/requests/api/boards_spec.rb b/spec/requests/api/boards_spec.rb index c6c10025f7f..92b614b087e 100644 --- a/spec/requests/api/boards_spec.rb +++ b/spec/requests/api/boards_spec.rb @@ -48,5 +48,36 @@ describe API::Boards do expect(json_response['label']['name']).to eq(group_label.title) expect(json_response['position']).to eq(3) end + + it 'creates a new board list for ancestor group labels' do + group = create(:group) + sub_group = create(:group, parent: group) + group_label = create(:group_label, group: group) + board_parent.update(group: sub_group) + group.add_developer(user) + sub_group.add_developer(user) + + post api(url, user), label_id: group_label.id + + expect(response).to have_gitlab_http_status(201) + expect(json_response['label']['name']).to eq(group_label.title) + end + end + + describe "POST /groups/:id/boards/lists", :nested_groups do + set(:group) { create(:group) } + set(:board_parent) { create(:group, parent: group ) } + let(:url) { "/groups/#{board_parent.id}/boards/#{board.id}/lists" } + set(:board) { create(:board, group: board_parent) } + + it 'creates a new board list for ancestor group labels' do + group.add_developer(user) + group_label = create(:group_label, group: group) + + post api(url, user), label_id: group_label.id + + expect(response).to have_gitlab_http_status(201) + expect(json_response['label']['name']).to eq(group_label.title) + end end end diff --git a/spec/requests/api/commits_spec.rb b/spec/requests/api/commits_spec.rb index 852f67db958..8ad19e3f0f5 100644 --- a/spec/requests/api/commits_spec.rb +++ b/spec/requests/api/commits_spec.rb @@ -1141,4 +1141,33 @@ describe API::Commits do end end end + + describe 'GET /projects/:id/repository/commits/:sha/merge_requests' do + let!(:project) { create(:project, :repository, :private) } + let!(:merged_mr) { create(:merge_request, source_project: project, source_branch: 'master', target_branch: 'feature') } + let(:commit) { merged_mr.merge_request_diff.commits.last } + + it 'returns the correct merge request' do + get api("/projects/#{project.id}/repository/commits/#{commit.id}/merge_requests", user) + + expect(response).to have_gitlab_http_status(200) + expect(response).to include_pagination_headers + expect(json_response.length).to eq(1) + expect(json_response[0]['id']).to eq(merged_mr.id) + end + + it 'returns 403 for an unauthorized user' do + project.add_guest(user) + + get api("/projects/#{project.id}/repository/commits/#{commit.id}/merge_requests", user) + + expect(response).to have_gitlab_http_status(403) + end + + it 'responds 404 when the commit does not exist' do + get api("/projects/#{project.id}/repository/commits/a7d26f00c35b/merge_requests", user) + + expect(response).to have_gitlab_http_status(404) + end + end end diff --git a/spec/requests/api/deploy_keys_spec.rb b/spec/requests/api/deploy_keys_spec.rb index 0772b3f2e64..ae9c0e9c304 100644 --- a/spec/requests/api/deploy_keys_spec.rb +++ b/spec/requests/api/deploy_keys_spec.rb @@ -91,6 +91,10 @@ describe API::DeployKeys do expect do post api("/projects/#{project.id}/deploy_keys", admin), key_attrs end.to change { project.deploy_keys.count }.by(1) + + new_key = project.deploy_keys.last + expect(new_key.key).to eq(key_attrs[:key]) + expect(new_key.user).to eq(admin) end it 'returns an existing ssh key when attempting to add a duplicate' do diff --git a/spec/requests/api/features_spec.rb b/spec/requests/api/features_spec.rb index 267058d98ee..c5354c2d639 100644 --- a/spec/requests/api/features_spec.rb +++ b/spec/requests/api/features_spec.rb @@ -1,8 +1,8 @@ require 'spec_helper' describe API::Features do - let(:user) { create(:user) } - let(:admin) { create(:admin) } + set(:user) { create(:user) } + set(:admin) { create(:admin) } before do Flipper.unregister_groups @@ -249,4 +249,43 @@ describe API::Features do end end end + + describe 'DELETE /feature/:name' do + let(:feature_name) { 'my_feature' } + + context 'when the user has no access' do + it 'returns a 401 for anonymous users' do + delete api("/features/#{feature_name}") + + expect(response).to have_gitlab_http_status(401) + end + + it 'returns a 403 for users' do + delete api("/features/#{feature_name}", user) + + expect(response).to have_gitlab_http_status(403) + end + end + + context 'when the user has access' do + it 'returns 204 when the value is not set' do + delete api("/features/#{feature_name}", admin) + + expect(response).to have_gitlab_http_status(204) + end + + context 'when the gate value was set' do + before do + Feature.get(feature_name).enable + end + + it 'deletes an enabled feature' do + delete api("/features/#{feature_name}", admin) + + expect(response).to have_gitlab_http_status(204) + expect(Feature.get(feature_name)).not_to be_enabled + end + end + end + end end diff --git a/spec/requests/api/internal_spec.rb b/spec/requests/api/internal_spec.rb index ca0aac87ba9..db8c5f963d6 100644 --- a/spec/requests/api/internal_spec.rb +++ b/spec/requests/api/internal_spec.rb @@ -251,44 +251,23 @@ describe API::Internal do end context 'with env passed as a JSON' do - context 'when relative path envs are not set' do - it 'sets env in RequestStore' do - expect(Gitlab::Git::Env).to receive(:set).with({ - 'GIT_OBJECT_DIRECTORY' => 'foo', - 'GIT_ALTERNATE_OBJECT_DIRECTORIES' => 'bar' - }) - - push(key, project.wiki, env: { - GIT_OBJECT_DIRECTORY: 'foo', - GIT_ALTERNATE_OBJECT_DIRECTORIES: 'bar' - }.to_json) + let(:gl_repository) { project.gl_repository(is_wiki: true) } - expect(response).to have_gitlab_http_status(200) - end - end + it 'sets env in RequestStore' do + obj_dir_relative = './objects' + alt_obj_dirs_relative = ['./alt-objects-1', './alt-objects-2'] - context 'when relative path envs are set' do - it 'sets env in RequestStore' do - obj_dir_relative = './objects' - alt_obj_dirs_relative = ['./alt-objects-1', './alt-objects-2'] - repo_path = project.wiki.repository.path_to_repo - - expect(Gitlab::Git::Env).to receive(:set).with({ - 'GIT_OBJECT_DIRECTORY' => File.join(repo_path, obj_dir_relative), - 'GIT_ALTERNATE_OBJECT_DIRECTORIES' => alt_obj_dirs_relative.map { |d| File.join(repo_path, d) }, - 'GIT_OBJECT_DIRECTORY_RELATIVE' => obj_dir_relative, - 'GIT_ALTERNATE_OBJECT_DIRECTORIES_RELATIVE' => alt_obj_dirs_relative - }) - - push(key, project.wiki, env: { - GIT_OBJECT_DIRECTORY: 'foo', - GIT_ALTERNATE_OBJECT_DIRECTORIES: 'bar', - GIT_OBJECT_DIRECTORY_RELATIVE: obj_dir_relative, - GIT_ALTERNATE_OBJECT_DIRECTORIES_RELATIVE: alt_obj_dirs_relative - }.to_json) + expect(Gitlab::Git::HookEnv).to receive(:set).with(gl_repository, { + 'GIT_OBJECT_DIRECTORY_RELATIVE' => obj_dir_relative, + 'GIT_ALTERNATE_OBJECT_DIRECTORIES_RELATIVE' => alt_obj_dirs_relative + }) - expect(response).to have_gitlab_http_status(200) - end + push(key, project.wiki, env: { + GIT_OBJECT_DIRECTORY_RELATIVE: obj_dir_relative, + GIT_ALTERNATE_OBJECT_DIRECTORIES_RELATIVE: alt_obj_dirs_relative + }.to_json) + + expect(response).to have_gitlab_http_status(200) end end @@ -447,6 +426,12 @@ describe API::Internal do expect(response).to have_gitlab_http_status(200) expect(json_response["status"]).to be_truthy + expect(json_response["gitaly"]).not_to be_nil + expect(json_response["gitaly"]["repository"]).not_to be_nil + expect(json_response["gitaly"]["repository"]["storage_name"]).to eq(project.repository.gitaly_repository.storage_name) + expect(json_response["gitaly"]["repository"]["relative_path"]).to eq(project.repository.gitaly_repository.relative_path) + expect(json_response["gitaly"]["address"]).to eq(Gitlab::GitalyClient.address(project.repository_storage)) + expect(json_response["gitaly"]["token"]).to eq(Gitlab::GitalyClient.token(project.repository_storage)) end end diff --git a/spec/requests/api/issues_spec.rb b/spec/requests/api/issues_spec.rb index 6614e8cea43..90f9c4ad214 100644 --- a/spec/requests/api/issues_spec.rb +++ b/spec/requests/api/issues_spec.rb @@ -384,6 +384,30 @@ describe API::Issues do end let(:base_url) { "/groups/#{group.id}/issues" } + context 'when group has subgroups', :nested_groups do + let(:subgroup_1) { create(:group, parent: group) } + let(:subgroup_2) { create(:group, parent: subgroup_1) } + + let(:subgroup_1_project) { create(:project, namespace: subgroup_1) } + let(:subgroup_2_project) { create(:project, namespace: subgroup_2) } + + let!(:issue_1) { create(:issue, project: subgroup_1_project) } + let!(:issue_2) { create(:issue, project: subgroup_2_project) } + + before do + group.add_developer(user) + end + + it 'also returns subgroups projects issues' do + get api(base_url, user) + + issue_ids = json_response.map { |issue| issue['id'] } + + expect_paginated_array_response(size: 5) + expect(issue_ids).to include(issue_1.id, issue_2.id) + end + end + it 'returns all group issues (including opened and closed)' do get api(base_url, admin) diff --git a/spec/requests/api/jobs_spec.rb b/spec/requests/api/jobs_spec.rb index 6192bbd4abb..3ffdfdc0e9a 100644 --- a/spec/requests/api/jobs_spec.rb +++ b/spec/requests/api/jobs_spec.rb @@ -1,6 +1,8 @@ require 'spec_helper' describe API::Jobs do + include HttpIOHelpers + set(:project) do create(:project, :repository, public_builds: false) end @@ -112,6 +114,7 @@ describe API::Jobs do let(:query) { Hash.new } before do + job get api("/projects/#{project.id}/pipelines/#{pipeline.id}/jobs", api_user), query end @@ -335,10 +338,55 @@ describe API::Jobs do end end + context 'when artifacts are stored remotely' do + let(:proxy_download) { false } + + before do + stub_artifacts_object_storage(proxy_download: proxy_download) + end + + let(:job) { create(:ci_build, pipeline: pipeline) } + let!(:artifact) { create(:ci_job_artifact, :archive, :remote_store, job: job) } + + before do + job.reload + + get api("/projects/#{project.id}/jobs/#{job.id}/artifacts", api_user) + end + + context 'when proxy download is enabled' do + let(:proxy_download) { true } + + it 'responds with the workhorse send-url' do + expect(response.headers[Gitlab::Workhorse::SEND_DATA_HEADER]).to start_with("send-url:") + end + end + + context 'when proxy download is disabled' do + it 'returns location redirect' do + expect(response).to have_gitlab_http_status(302) + end + end + + context 'authorized user' do + it 'returns the file remote URL' do + expect(response).to redirect_to(artifact.file.url) + end + end + + context 'unauthorized user' do + let(:api_user) { nil } + + it 'does not return specific job artifacts' do + expect(response).to have_gitlab_http_status(404) + end + end + end + it 'does not return job artifacts if not uploaded' do get api("/projects/#{project.id}/jobs/#{job.id}/artifacts", api_user) - expect(response).to have_gitlab_http_status(404) + expect(response).to have_gitlab_http_status(:not_found) end end end @@ -349,6 +397,7 @@ describe API::Jobs do let(:job) { create(:ci_build, :artifacts, pipeline: pipeline, user: api_user) } before do + stub_artifacts_object_storage job.success end @@ -412,9 +461,24 @@ describe API::Jobs do "attachment; filename=#{job.artifacts_file.filename}" } end - it { expect(response).to have_gitlab_http_status(200) } + it { expect(response).to have_http_status(:ok) } it { expect(response.headers).to include(download_headers) } end + + context 'when artifacts are stored remotely' do + let(:job) { create(:ci_build, pipeline: pipeline, user: api_user) } + let!(:artifact) { create(:ci_job_artifact, :archive, :remote_store, job: job) } + + before do + job.reload + + get api("/projects/#{project.id}/jobs/#{job.id}/artifacts", api_user) + end + + it 'returns location redirect' do + expect(response).to have_http_status(:found) + end + end end context 'with regular branch' do @@ -451,6 +515,22 @@ describe API::Jobs do end context 'authorized user' do + context 'when trace is in ObjectStorage' do + let!(:job) { create(:ci_build, :trace_artifact, pipeline: pipeline) } + + before do + stub_remote_trace_206 + allow_any_instance_of(JobArtifactUploader).to receive(:file_storage?) { false } + allow_any_instance_of(JobArtifactUploader).to receive(:url) { remote_trace_url } + allow_any_instance_of(JobArtifactUploader).to receive(:size) { remote_trace_size } + end + + it 'returns specific job trace' do + expect(response).to have_gitlab_http_status(200) + expect(response.body).to eq(job.trace.raw) + end + end + context 'when trace is artifact' do let(:job) { create(:ci_build, :trace_artifact, pipeline: pipeline) } diff --git a/spec/requests/api/merge_requests_spec.rb b/spec/requests/api/merge_requests_spec.rb index 3764aec0c71..f64623d7018 100644 --- a/spec/requests/api/merge_requests_spec.rb +++ b/spec/requests/api/merge_requests_spec.rb @@ -861,7 +861,7 @@ describe API::MergeRequests do expect(json_response['title']).to eq('Test merge_request') end - it 'returns 422 when target project has disabled merge requests' do + it 'returns 403 when target project has disabled merge requests' do project.project_feature.update(merge_requests_access_level: 0) post api("/projects/#{forked_project.id}/merge_requests", user2), @@ -871,7 +871,7 @@ describe API::MergeRequests do author: user2, target_project_id: project.id - expect(response).to have_gitlab_http_status(422) + expect(response).to have_gitlab_http_status(403) end it "returns 400 when source_branch is missing" do diff --git a/spec/requests/api/pages_domains_spec.rb b/spec/requests/api/pages_domains_spec.rb index dc3a116c060..a9ccbb32666 100644 --- a/spec/requests/api/pages_domains_spec.rb +++ b/spec/requests/api/pages_domains_spec.rb @@ -1,17 +1,17 @@ require 'rails_helper' describe API::PagesDomains do - set(:project) { create(:project, path: 'my.project') } + set(:project) { create(:project, path: 'my.project', pages_https_only: false) } set(:user) { create(:user) } set(:admin) { create(:admin) } - set(:pages_domain) { create(:pages_domain, domain: 'www.domain.test', project: project) } - set(:pages_domain_secure) { create(:pages_domain, :with_certificate, :with_key, domain: 'ssl.domain.test', project: project) } - set(:pages_domain_expired) { create(:pages_domain, :with_expired_certificate, :with_key, domain: 'expired.domain.test', project: project) } + set(:pages_domain) { create(:pages_domain, :without_key, :without_certificate, domain: 'www.domain.test', project: project) } + set(:pages_domain_secure) { create(:pages_domain, domain: 'ssl.domain.test', project: project) } + set(:pages_domain_expired) { create(:pages_domain, :with_expired_certificate, domain: 'expired.domain.test', project: project) } - let(:pages_domain_params) { build(:pages_domain, domain: 'www.other-domain.test').slice(:domain) } - let(:pages_domain_secure_params) { build(:pages_domain, :with_certificate, :with_key, domain: 'ssl.other-domain.test', project: project).slice(:domain, :certificate, :key) } - let(:pages_domain_secure_key_missmatch_params) {build(:pages_domain, :with_trusted_chain, :with_key, project: project).slice(:domain, :certificate, :key) } + let(:pages_domain_params) { build(:pages_domain, :without_key, :without_certificate, domain: 'www.other-domain.test').slice(:domain) } + let(:pages_domain_secure_params) { build(:pages_domain, domain: 'ssl.other-domain.test', project: project).slice(:domain, :certificate, :key) } + let(:pages_domain_secure_key_missmatch_params) {build(:pages_domain, :with_trusted_chain, project: project).slice(:domain, :certificate, :key) } let(:pages_domain_secure_missing_chain_params) {build(:pages_domain, :with_missing_chain, project: project).slice(:certificate) } let(:route) { "/projects/#{project.id}/pages/domains" } diff --git a/spec/requests/api/pipeline_schedules_spec.rb b/spec/requests/api/pipeline_schedules_spec.rb index 7ea25059756..91d4d5d3de9 100644 --- a/spec/requests/api/pipeline_schedules_spec.rb +++ b/spec/requests/api/pipeline_schedules_spec.rb @@ -17,6 +17,17 @@ describe API::PipelineSchedules do pipeline_schedule.pipelines << build(:ci_pipeline, project: project) end + def create_pipeline_schedules(count) + create_list(:ci_pipeline_schedule, count, project: project) + .each do |pipeline_schedule| + create(:user).tap do |user| + project.add_developer(user) + pipeline_schedule.update_attributes(owner: user) + end + pipeline_schedule.pipelines << build(:ci_pipeline, project: project) + end + end + it 'returns list of pipeline_schedules' do get api("/projects/#{project.id}/pipeline_schedules", developer) @@ -26,18 +37,14 @@ describe API::PipelineSchedules do end it 'avoids N + 1 queries' do + # We need at least two users to trigger a preload for that relation. + create_pipeline_schedules(1) + control_count = ActiveRecord::QueryRecorder.new do get api("/projects/#{project.id}/pipeline_schedules", developer) end.count - create_list(:ci_pipeline_schedule, 10, project: project) - .each do |pipeline_schedule| - create(:user).tap do |user| - project.add_developer(user) - pipeline_schedule.update_attributes(owner: user) - end - pipeline_schedule.pipelines << build(:ci_pipeline, project: project) - end + create_pipeline_schedules(10) expect do get api("/projects/#{project.id}/pipeline_schedules", developer) diff --git a/spec/requests/api/project_export_spec.rb b/spec/requests/api/project_export_spec.rb index 12583109b59..3834d27d0a9 100644 --- a/spec/requests/api/project_export_spec.rb +++ b/spec/requests/api/project_export_spec.rb @@ -5,6 +5,7 @@ describe API::ProjectExport do set(:project_none) { create(:project) } set(:project_started) { create(:project) } set(:project_finished) { create(:project) } + set(:project_after_export) { create(:project) } set(:user) { create(:user) } set(:admin) { create(:admin) } @@ -12,11 +13,13 @@ describe API::ProjectExport do let(:path_none) { "/projects/#{project_none.id}/export" } let(:path_started) { "/projects/#{project_started.id}/export" } let(:path_finished) { "/projects/#{project_finished.id}/export" } + let(:path_after_export) { "/projects/#{project_after_export.id}/export" } let(:download_path) { "/projects/#{project.id}/export/download" } let(:download_path_none) { "/projects/#{project_none.id}/export/download" } let(:download_path_started) { "/projects/#{project_started.id}/export/download" } let(:download_path_finished) { "/projects/#{project_finished.id}/export/download" } + let(:download_path_export_action) { "/projects/#{project_after_export.id}/export/download" } let(:export_path) { "#{Dir.tmpdir}/project_export_spec" } @@ -29,6 +32,11 @@ describe API::ProjectExport do # simulate exported FileUtils.mkdir_p project_finished.export_path FileUtils.touch File.join(project_finished.export_path, '_export.tar.gz') + + # simulate in after export action + FileUtils.mkdir_p project_after_export.export_path + FileUtils.touch File.join(project_after_export.export_path, '_export.tar.gz') + FileUtils.touch Gitlab::ImportExport::AfterExportStrategies::BaseAfterExportStrategy.lock_file_path(project_after_export) end after do @@ -73,6 +81,14 @@ describe API::ProjectExport do expect(json_response['export_status']).to eq('started') end + it 'is after_export' do + get api(path_after_export, user) + + expect(response).to have_gitlab_http_status(200) + expect(response).to match_response_schema('public_api/v4/project/export_status') + expect(json_response['export_status']).to eq('after_export_action') + end + it 'is finished' do get api(path_finished, user) @@ -99,6 +115,7 @@ describe API::ProjectExport do project_none.add_master(user) project_started.add_master(user) project_finished.add_master(user) + project_after_export.add_master(user) end it_behaves_like 'get project export status ok' @@ -163,6 +180,36 @@ describe API::ProjectExport do end end + shared_examples_for 'get project export upload after action' do + context 'and is uploading' do + it 'downloads' do + get api(download_path_export_action, user) + + expect(response).to have_gitlab_http_status(200) + end + end + + context 'when upload complete' do + before do + FileUtils.rm_rf(project_after_export.export_path) + end + + it_behaves_like '404 response' do + let(:request) { get api(download_path_export_action, user) } + end + end + end + + shared_examples_for 'get project download by strategy' do + context 'when upload strategy set' do + it_behaves_like 'get project export upload after action' + end + + context 'when download strategy set' do + it_behaves_like 'get project export download' + end + end + it_behaves_like 'when project export is disabled' do let(:request) { get api(download_path, admin) } end @@ -171,7 +218,7 @@ describe API::ProjectExport do context 'when user is an admin' do let(:user) { admin } - it_behaves_like 'get project export download' + it_behaves_like 'get project download by strategy' end context 'when user is a master' do @@ -180,9 +227,10 @@ describe API::ProjectExport do project_none.add_master(user) project_started.add_master(user) project_finished.add_master(user) + project_after_export.add_master(user) end - it_behaves_like 'get project export download' + it_behaves_like 'get project download by strategy' end context 'when user is a developer' do @@ -229,10 +277,30 @@ describe API::ProjectExport do end shared_examples_for 'post project export start' do - it 'starts' do - post api(path, user) + context 'with upload strategy' do + context 'when params invalid' do + it_behaves_like '400 response' do + let(:request) { post(api(path, user), 'upload[url]' => 'whatever') } + end + end + + it 'starts' do + allow_any_instance_of(Gitlab::ImportExport::AfterExportStrategies::WebUploadStrategy).to receive(:send_file) + + post(api(path, user), 'upload[url]' => 'http://gitlab.com') - expect(response).to have_gitlab_http_status(202) + expect(response).to have_gitlab_http_status(202) + end + end + + context 'with download strategy' do + it 'starts' do + expect_any_instance_of(Gitlab::ImportExport::AfterExportStrategies::WebUploadStrategy).not_to receive(:send_file) + + post api(path, user) + + expect(response).to have_gitlab_http_status(202) + end end end @@ -253,6 +321,7 @@ describe API::ProjectExport do project_none.add_master(user) project_started.add_master(user) project_finished.add_master(user) + project_after_export.add_master(user) end it_behaves_like 'post project export start' diff --git a/spec/requests/api/project_hooks_spec.rb b/spec/requests/api/project_hooks_spec.rb index 392cad667be..12a183fed1e 100644 --- a/spec/requests/api/project_hooks_spec.rb +++ b/spec/requests/api/project_hooks_spec.rb @@ -33,6 +33,7 @@ describe API::ProjectHooks, 'ProjectHooks' do expect(json_response.first['merge_requests_events']).to eq(true) expect(json_response.first['tag_push_events']).to eq(true) expect(json_response.first['note_events']).to eq(true) + expect(json_response.first['confidential_note_events']).to eq(true) expect(json_response.first['job_events']).to eq(true) expect(json_response.first['pipeline_events']).to eq(true) expect(json_response.first['wiki_page_events']).to eq(true) @@ -62,6 +63,7 @@ describe API::ProjectHooks, 'ProjectHooks' do expect(json_response['merge_requests_events']).to eq(hook.merge_requests_events) expect(json_response['tag_push_events']).to eq(hook.tag_push_events) expect(json_response['note_events']).to eq(hook.note_events) + expect(json_response['confidential_note_events']).to eq(hook.confidential_note_events) expect(json_response['job_events']).to eq(hook.job_events) expect(json_response['pipeline_events']).to eq(hook.pipeline_events) expect(json_response['wiki_page_events']).to eq(hook.wiki_page_events) @@ -104,6 +106,7 @@ describe API::ProjectHooks, 'ProjectHooks' do expect(json_response['merge_requests_events']).to eq(false) expect(json_response['tag_push_events']).to eq(false) expect(json_response['note_events']).to eq(false) + expect(json_response['confidential_note_events']).to eq(nil) expect(json_response['job_events']).to eq(true) expect(json_response['pipeline_events']).to eq(false) expect(json_response['wiki_page_events']).to eq(true) @@ -152,6 +155,7 @@ describe API::ProjectHooks, 'ProjectHooks' do expect(json_response['merge_requests_events']).to eq(hook.merge_requests_events) expect(json_response['tag_push_events']).to eq(hook.tag_push_events) expect(json_response['note_events']).to eq(hook.note_events) + expect(json_response['confidential_note_events']).to eq(hook.confidential_note_events) expect(json_response['job_events']).to eq(hook.job_events) expect(json_response['pipeline_events']).to eq(hook.pipeline_events) expect(json_response['wiki_page_events']).to eq(hook.wiki_page_events) diff --git a/spec/requests/api/project_import_spec.rb b/spec/requests/api/project_import_spec.rb index 987f6e26971..f68057a92a1 100644 --- a/spec/requests/api/project_import_spec.rb +++ b/spec/requests/api/project_import_spec.rb @@ -40,7 +40,7 @@ describe API::ProjectImport do expect(response).to have_gitlab_http_status(201) end - it 'schedules an import at the user namespace level' do + it 'does not shedule an import for a nampespace that does not exist' do expect_any_instance_of(Project).not_to receive(:import_schedule) expect(::Projects::CreateService).not_to receive(:new) @@ -71,6 +71,72 @@ describe API::ProjectImport do expect(json_response['error']).to eq('file is invalid') end + it 'stores params that can be overridden' do + stub_import(namespace) + override_params = { 'description' => 'Hello world' } + + post api('/projects/import', user), + path: 'test-import', + file: fixture_file_upload(file), + namespace: namespace.id, + override_params: override_params + import_project = Project.find(json_response['id']) + + expect(import_project.import_data.data['override_params']).to eq(override_params) + end + + it 'does not store params that are not allowed' do + stub_import(namespace) + override_params = { 'not_allowed' => 'Hello world' } + + post api('/projects/import', user), + path: 'test-import', + file: fixture_file_upload(file), + namespace: namespace.id, + override_params: override_params + import_project = Project.find(json_response['id']) + + expect(import_project.import_data.data['override_params']).to be_empty + end + + it 'correctly overrides params during the import' do + override_params = { 'description' => 'Hello world' } + + Sidekiq::Testing.inline! do + post api('/projects/import', user), + path: 'test-import', + file: fixture_file_upload(file), + namespace: namespace.id, + override_params: override_params + end + import_project = Project.find(json_response['id']) + + expect(import_project.description).to eq('Hello world') + end + + context 'when target path already exists in namespace' do + let(:existing_project) { create(:project, namespace: user.namespace) } + + it 'does not schedule an import' do + expect_any_instance_of(Project).not_to receive(:import_schedule) + + post api('/projects/import', user), path: existing_project.path, file: fixture_file_upload(file) + + expect(response).to have_gitlab_http_status(400) + expect(json_response['message']).to eq('Name has already been taken') + end + + context 'when param overwrite is true' do + it 'schedules an import' do + stub_import(user.namespace) + + post api('/projects/import', user), path: existing_project.path, file: fixture_file_upload(file), overwrite: true + + expect(response).to have_gitlab_http_status(201) + end + end + end + def stub_import(namespace) expect_any_instance_of(Project).to receive(:import_schedule) expect(::Projects::CreateService).to receive(:new).with(user, hash_including(namespace_id: namespace.id)).and_call_original diff --git a/spec/requests/api/project_snapshots_spec.rb b/spec/requests/api/project_snapshots_spec.rb new file mode 100644 index 00000000000..07a920f8d28 --- /dev/null +++ b/spec/requests/api/project_snapshots_spec.rb @@ -0,0 +1,51 @@ +require 'spec_helper' + +describe API::ProjectSnapshots do + include WorkhorseHelpers + + let(:project) { create(:project) } + let(:admin) { create(:admin) } + + describe 'GET /projects/:id/snapshot' do + def expect_snapshot_response_for(repository) + type, params = workhorse_send_data + + expect(type).to eq('git-snapshot') + expect(params).to eq( + 'GitalyServer' => { + 'address' => Gitlab::GitalyClient.address(repository.project.repository_storage), + 'token' => Gitlab::GitalyClient.token(repository.project.repository_storage) + }, + 'GetSnapshotRequest' => Gitaly::GetSnapshotRequest.new( + repository: repository.gitaly_repository + ).to_json + ) + end + + it 'returns authentication error as project owner' do + get api("/projects/#{project.id}/snapshot", project.owner) + + expect(response).to have_gitlab_http_status(403) + end + + it 'returns authentication error as unauthenticated user' do + get api("/projects/#{project.id}/snapshot", nil) + + expect(response).to have_gitlab_http_status(401) + end + + it 'requests project repository raw archive as administrator' do + get api("/projects/#{project.id}/snapshot", admin), wiki: '0' + + expect(response).to have_gitlab_http_status(200) + expect_snapshot_response_for(project.repository) + end + + it 'requests wiki repository raw archive as administrator' do + get api("/projects/#{project.id}/snapshot", admin), wiki: '1' + + expect(response).to have_gitlab_http_status(200) + expect_snapshot_response_for(project.wiki.repository) + end + end +end diff --git a/spec/requests/api/projects_spec.rb b/spec/requests/api/projects_spec.rb index cee93f6ed14..17272cb00e5 100644 --- a/spec/requests/api/projects_spec.rb +++ b/spec/requests/api/projects_spec.rb @@ -1,6 +1,18 @@ # -*- coding: utf-8 -*- require 'spec_helper' +shared_examples 'languages and percentages JSON response' do + let(:expected_languages) { project.repository.languages.map { |language| language.values_at(:label, :value)}.to_h } + + it 'returns expected language values' do + get api("/projects/#{project.id}/languages", user) + + expect(response).to have_gitlab_http_status(:ok) + expect(json_response).to eq(expected_languages) + expect(json_response.count).to be > 1 + end +end + describe API::Projects do let(:user) { create(:user) } let(:user2) { create(:user) } @@ -452,7 +464,8 @@ describe API::Projects do only_allow_merge_if_pipeline_succeeds: false, request_access_enabled: true, only_allow_merge_if_all_discussions_are_resolved: false, - ci_config_path: 'a/custom/path' + ci_config_path: 'a/custom/path', + merge_method: 'ff' }) post api('/projects', user), project @@ -569,6 +582,22 @@ describe API::Projects do expect(json_response['only_allow_merge_if_all_discussions_are_resolved']).to be_truthy end + it 'sets the merge method of a project to rebase merge' do + project = attributes_for(:project, merge_method: 'rebase_merge') + + post api('/projects', user), project + + expect(json_response['merge_method']).to eq('rebase_merge') + end + + it 'rejects invalid values for merge_method' do + project = attributes_for(:project, merge_method: 'totally_not_valid_method') + + post api('/projects', user), project + + expect(response).to have_gitlab_http_status(400) + end + it 'ignores import_url when it is nil' do project = attributes_for(:project, import_url: nil) @@ -823,6 +852,7 @@ describe API::Projects do expect(json_response['shared_with_groups'][0]['group_access_level']).to eq(link.group_access) expect(json_response['only_allow_merge_if_pipeline_succeeds']).to eq(project.only_allow_merge_if_pipeline_succeeds) expect(json_response['only_allow_merge_if_all_discussions_are_resolved']).to eq(project.only_allow_merge_if_all_discussions_are_resolved) + expect(json_response['merge_method']).to eq(project.merge_method.to_s) end it 'returns a project by path name' do @@ -1474,6 +1504,26 @@ describe API::Projects do expect(json_response[k.to_s]).to eq(v) end end + + it 'updates merge_method' do + project_param = { merge_method: 'ff' } + + put api("/projects/#{project3.id}", user), project_param + + expect(response).to have_gitlab_http_status(200) + + project_param.each_pair do |k, v| + expect(json_response[k.to_s]).to eq(v) + end + end + + it 'rejects to update merge_method when merge_method is invalid' do + project_param = { merge_method: 'invalid' } + + put api("/projects/#{project3.id}", user), project_param + + expect(response).to have_gitlab_http_status(400) + end end context 'when authenticated as project master' do @@ -1491,6 +1541,7 @@ describe API::Projects do wiki_enabled: true, snippets_enabled: true, merge_requests_enabled: true, + merge_method: 'ff', description: 'new description' } put api("/projects/#{project3.id}", user4), project_param @@ -1655,6 +1706,42 @@ describe API::Projects do end end + describe 'GET /projects/:id/languages' do + context 'with an authorized user' do + it_behaves_like 'languages and percentages JSON response' do + let(:project) { project3 } + end + + it 'returns not_found(404) for not existing project' do + get api("/projects/9999999999/languages", user) + + expect(response).to have_gitlab_http_status(:not_found) + end + end + + context 'with not authorized user' do + it 'returns not_found for existing but unauthorized project' do + get api("/projects/#{project3.id}/languages", user3) + + expect(response).to have_gitlab_http_status(:not_found) + end + end + + context 'without user' do + let(:project_public) { create(:project, :public, :repository) } + + it_behaves_like 'languages and percentages JSON response' do + let(:project) { project_public } + end + + it 'returns not_found for existing but unauthorized project' do + get api("/projects/#{project3.id}/languages", nil) + + expect(response).to have_gitlab_http_status(:not_found) + end + end + end + describe 'DELETE /projects/:id' do context 'when authenticated as user' do it 'removes project' do @@ -1718,6 +1805,12 @@ describe API::Projects do group end + let(:group3) do + group = create(:group, name: 'group3_name', parent: group2) + group.add_owner(user2) + group + end + before do project.add_reporter(user2) end @@ -1813,6 +1906,15 @@ describe API::Projects do expect(json_response['namespace']['name']).to eq(group2.name) end + it 'forks to owned subgroup' do + full_path = "#{group2.path}/#{group3.path}" + post api("/projects/#{project.id}/fork", user2), namespace: full_path + + expect(response).to have_gitlab_http_status(201) + expect(json_response['namespace']['name']).to eq(group3.name) + expect(json_response['namespace']['full_path']).to eq(full_path) + end + it 'fails to fork to not owned group' do post api("/projects/#{project.id}/fork", user2), namespace: group.name diff --git a/spec/requests/api/protected_branches_spec.rb b/spec/requests/api/protected_branches_spec.rb index 1d23e023bb6..576fde46615 100644 --- a/spec/requests/api/protected_branches_spec.rb +++ b/spec/requests/api/protected_branches_spec.rb @@ -193,6 +193,19 @@ describe API::ProtectedBranches do expect(json_response['merge_access_levels'][0]['access_level']).to eq(Gitlab::Access::MASTER) end end + + context 'when a policy restricts rule deletion' do + before do + policy = instance_double(ProtectedBranchPolicy, can?: false) + expect(ProtectedBranchPolicy).to receive(:new).and_return(policy) + end + + it "prevents deletion of the protected branch rule" do + post post_endpoint, name: branch_name + + expect(response).to have_gitlab_http_status(403) + end + end end context 'when authenticated as a guest' do @@ -209,18 +222,20 @@ describe API::ProtectedBranches do end describe "DELETE /projects/:id/protected_branches/unprotect/:branch" do + let(:delete_endpoint) { api("/projects/#{project.id}/protected_branches/#{branch_name}", user) } + before do project.add_master(user) end it "unprotects a single branch" do - delete api("/projects/#{project.id}/protected_branches/#{branch_name}", user) + delete delete_endpoint expect(response).to have_gitlab_http_status(204) end it_behaves_like '412 response' do - let(:request) { api("/projects/#{project.id}/protected_branches/#{branch_name}", user) } + let(:request) { delete_endpoint } end it "returns 404 if branch does not exist" do @@ -229,11 +244,24 @@ describe API::ProtectedBranches do expect(response).to have_gitlab_http_status(404) end + context 'when a policy restricts rule deletion' do + before do + policy = instance_double(ProtectedBranchPolicy, can?: false) + expect(ProtectedBranchPolicy).to receive(:new).and_return(policy) + end + + it "prevents deletion of the protected branch rule" do + delete delete_endpoint + + expect(response).to have_gitlab_http_status(403) + end + end + context 'when branch has a wildcard in its name' do let(:protected_name) { 'feature*' } it "unprotects a wildcard branch" do - delete api("/projects/#{project.id}/protected_branches/#{branch_name}", user) + delete delete_endpoint expect(response).to have_gitlab_http_status(204) end diff --git a/spec/requests/api/repositories_spec.rb b/spec/requests/api/repositories_spec.rb index 741800ff61d..9e6d69e3874 100644 --- a/spec/requests/api/repositories_spec.rb +++ b/spec/requests/api/repositories_spec.rb @@ -427,5 +427,20 @@ describe API::Repositories do let(:request) { get api(route, guest) } end end + + # Regression: https://gitlab.com/gitlab-org/gitlab-ce/issues/45363 + describe 'Links header contains working URLs when no `order_by` nor `sort` is given' do + let(:project) { create(:project, :public, :repository) } + let(:current_user) { nil } + + it 'returns `Link` header that includes URLs with default value for `order_by` & `sort`' do + get api(route, current_user) + + first_link_url = response.headers['Link'].split(';').first + + expect(first_link_url).to include('order_by=commits') + expect(first_link_url).to include('sort=asc') + end + end end end diff --git a/spec/requests/api/runner_spec.rb b/spec/requests/api/runner_spec.rb index 95c23726a79..17c7a511857 100644 --- a/spec/requests/api/runner_spec.rb +++ b/spec/requests/api/runner_spec.rb @@ -109,6 +109,26 @@ describe API::Runner do end end + context 'when maximum job timeout is specified' do + it 'creates runner' do + post api('/runners'), token: registration_token, + maximum_timeout: 9000 + + expect(response).to have_gitlab_http_status 201 + expect(Ci::Runner.first.maximum_timeout).to eq(9000) + end + + context 'when maximum job timeout is empty' do + it 'creates runner' do + post api('/runners'), token: registration_token, + maximum_timeout: '' + + expect(response).to have_gitlab_http_status 201 + expect(Ci::Runner.first.maximum_timeout).to be_nil + end + end + end + %w(name version revision platform architecture).each do |param| context "when info parameter '#{param}' info is present" do let(:value) { "#{param}_value" } @@ -200,7 +220,7 @@ describe API::Runner do let(:project) { create(:project, shared_runners_enabled: false) } let(:pipeline) { create(:ci_pipeline_without_jobs, project: project, ref: 'master') } let(:runner) { create(:ci_runner) } - let!(:job) do + let(:job) do create(:ci_build, :artifacts, :extended_options, pipeline: pipeline, name: 'spinach', stage: 'test', stage_idx: 0, commands: "ls\ndate") end @@ -215,6 +235,7 @@ describe API::Runner do let(:user_agent) { 'gitlab-runner 9.0.0 (9-0-stable; go1.7.4; linux/amd64)' } before do + job stub_container_registry_config(enabled: false) end @@ -339,12 +360,12 @@ describe API::Runner do let(:expected_steps) do [{ 'name' => 'script', 'script' => %w(ls date), - 'timeout' => job.timeout, + 'timeout' => job.metadata_timeout, 'when' => 'on_success', 'allow_failure' => false }, { 'name' => 'after_script', 'script' => %w(ls date), - 'timeout' => job.timeout, + 'timeout' => job.metadata_timeout, 'when' => 'always', 'allow_failure' => true }] end @@ -385,7 +406,7 @@ describe API::Runner do expect(json_response['image']).to eq({ 'name' => 'ruby:2.1', 'entrypoint' => '/bin/sh' }) expect(json_response['services']).to eq([{ 'name' => 'postgres', 'entrypoint' => nil, 'alias' => nil, 'command' => nil }, - { 'name' => 'docker:dind', 'entrypoint' => '/bin/sh', + { 'name' => 'docker:stable-dind', 'entrypoint' => '/bin/sh', 'alias' => 'docker', 'command' => 'sleep 30' }]) expect(json_response['steps']).to eq(expected_steps) expect(json_response['artifacts']).to eq(expected_artifacts) @@ -647,6 +668,41 @@ describe API::Runner do end end end + + describe 'timeout support' do + context 'when project specifies job timeout' do + let(:project) { create(:project, shared_runners_enabled: false, build_timeout: 1234) } + + it 'contains info about timeout taken from project' do + request_job + + expect(response).to have_gitlab_http_status(201) + expect(json_response['runner_info']).to include({ 'timeout' => 1234 }) + end + + context 'when runner specifies lower timeout' do + let(:runner) { create(:ci_runner, maximum_timeout: 1000) } + + it 'contains info about timeout overridden by runner' do + request_job + + expect(response).to have_gitlab_http_status(201) + expect(json_response['runner_info']).to include({ 'timeout' => 1000 }) + end + end + + context 'when runner specifies bigger timeout' do + let(:runner) { create(:ci_runner, maximum_timeout: 2000) } + + it 'contains info about timeout not overridden by runner' do + request_job + + expect(response).to have_gitlab_http_status(201) + expect(json_response['runner_info']).to include({ 'timeout' => 1234 }) + end + end + end + end end def request_job(token = runner.token, **params) @@ -888,17 +944,59 @@ describe API::Runner do let(:file_upload2) { fixture_file_upload(Rails.root + 'spec/fixtures/dk.png', 'image/gif') } before do + stub_artifacts_object_storage job.run! end describe 'POST /api/v4/jobs/:id/artifacts/authorize' do context 'when using token as parameter' do - it 'authorizes posting artifacts to running job' do - authorize_artifacts_with_token_in_params + context 'posting artifacts to running job' do + subject do + authorize_artifacts_with_token_in_params + end - expect(response).to have_gitlab_http_status(200) - expect(response.content_type.to_s).to eq(Gitlab::Workhorse::INTERNAL_API_CONTENT_TYPE) - expect(json_response['TempPath']).not_to be_nil + shared_examples 'authorizes local file' do + it 'succeeds' do + subject + + expect(response).to have_gitlab_http_status(200) + expect(response.content_type.to_s).to eq(Gitlab::Workhorse::INTERNAL_API_CONTENT_TYPE) + expect(json_response['TempPath']).to eq(JobArtifactUploader.workhorse_local_upload_path) + expect(json_response['RemoteObject']).to be_nil + end + end + + context 'when using local storage' do + it_behaves_like 'authorizes local file' + end + + context 'when using remote storage' do + context 'when direct upload is enabled' do + before do + stub_artifacts_object_storage(enabled: true, direct_upload: true) + end + + it 'succeeds' do + subject + + expect(response).to have_gitlab_http_status(200) + expect(response.content_type.to_s).to eq(Gitlab::Workhorse::INTERNAL_API_CONTENT_TYPE) + expect(json_response['TempPath']).to eq(JobArtifactUploader.workhorse_local_upload_path) + expect(json_response['RemoteObject']).to have_key('ID') + expect(json_response['RemoteObject']).to have_key('GetURL') + expect(json_response['RemoteObject']).to have_key('StoreURL') + expect(json_response['RemoteObject']).to have_key('DeleteURL') + end + end + + context 'when direct upload is disabled' do + before do + stub_artifacts_object_storage(enabled: true, direct_upload: false) + end + + it_behaves_like 'authorizes local file' + end + end end it 'fails to post too large artifact' do @@ -994,20 +1092,45 @@ describe API::Runner do end end - context 'when uses regular file post' do - before do - upload_artifacts(file_upload, headers_with_token, false) + context 'when uses accelerated file post' do + context 'for file stored locally' do + before do + upload_artifacts(file_upload, headers_with_token) + end + + it_behaves_like 'successful artifacts upload' end - it_behaves_like 'successful artifacts upload' - end + context 'for file stored remotelly' do + let!(:fog_connection) do + stub_artifacts_object_storage(direct_upload: true) + end - context 'when uses accelerated file post' do - before do - upload_artifacts(file_upload, headers_with_token, true) - end + before do + fog_connection.directories.get('artifacts').files.create( + key: 'tmp/upload/12312300', + body: 'content' + ) + + upload_artifacts(file_upload, headers_with_token, + { 'file.remote_id' => remote_id }) + end - it_behaves_like 'successful artifacts upload' + context 'when valid remote_id is used' do + let(:remote_id) { '12312300' } + + it_behaves_like 'successful artifacts upload' + end + + context 'when invalid remote_id is used' do + let(:remote_id) { 'invalid id' } + + it 'responds with bad request' do + expect(response).to have_gitlab_http_status(500) + expect(json_response['message']).to eq("Missing file") + end + end + end end context 'when using runners token' do @@ -1102,11 +1225,13 @@ describe API::Runner do let!(:artifacts) { file_upload } let!(:artifacts_sha256) { Digest::SHA256.file(artifacts.path).hexdigest } let!(:metadata) { file_upload2 } + let!(:metadata_sha256) { Digest::SHA256.file(metadata.path).hexdigest } let(:stored_artifacts_file) { job.reload.artifacts_file.file } let(:stored_metadata_file) { job.reload.artifacts_metadata.file } let(:stored_artifacts_size) { job.reload.artifacts_size } let(:stored_artifacts_sha256) { job.reload.job_artifacts_archive.file_sha256 } + let(:stored_metadata_sha256) { job.reload.job_artifacts_metadata.file_sha256 } before do post(api("/jobs/#{job.id}/artifacts"), post_data, headers_with_token) @@ -1118,7 +1243,8 @@ describe API::Runner do 'file.name' => artifacts.original_filename, 'file.sha256' => artifacts_sha256, 'metadata.path' => metadata.path, - 'metadata.name' => metadata.original_filename } + 'metadata.name' => metadata.original_filename, + 'metadata.sha256' => metadata_sha256 } end it 'stores artifacts and artifacts metadata' do @@ -1127,6 +1253,7 @@ describe API::Runner do expect(stored_metadata_file.original_filename).to eq(metadata.original_filename) expect(stored_artifacts_size).to eq(72821) expect(stored_artifacts_sha256).to eq(artifacts_sha256) + expect(stored_metadata_sha256).to eq(metadata_sha256) end end @@ -1147,15 +1274,19 @@ describe API::Runner do end context 'when artifacts are being stored outside of tmp path' do + let(:new_tmpdir) { Dir.mktmpdir } + before do + # init before overwriting tmp dir + file_upload + # by configuring this path we allow to pass file from @tmpdir only # but all temporary files are stored in system tmp directory - @tmpdir = Dir.mktmpdir - allow(JobArtifactUploader).to receive(:workhorse_upload_path).and_return(@tmpdir) + allow(Dir).to receive(:tmpdir).and_return(new_tmpdir) end after do - FileUtils.remove_entry @tmpdir + FileUtils.remove_entry(new_tmpdir) end it' "fails to post artifacts for outside of tmp path"' do @@ -1165,12 +1296,11 @@ describe API::Runner do end end - def upload_artifacts(file, headers = {}, accelerated = true) - params = if accelerated - { 'file.path' => file.path, 'file.name' => file.original_filename } - else - { 'file' => file } - end + def upload_artifacts(file, headers = {}, params = {}) + params = params.merge({ + 'file.path' => file.path, + 'file.name' => file.original_filename + }) post api("/jobs/#{job.id}/artifacts"), params, headers end @@ -1179,27 +1309,67 @@ describe API::Runner do describe 'GET /api/v4/jobs/:id/artifacts' do let(:token) { job.token } - before do - download_artifact - end - context 'when job has artifacts' do - let(:job) { create(:ci_build, :artifacts) } - let(:download_headers) do - { 'Content-Transfer-Encoding' => 'binary', - 'Content-Disposition' => 'attachment; filename=ci_build_artifacts.zip' } + let(:job) { create(:ci_build) } + let(:store) { JobArtifactUploader::Store::LOCAL } + + before do + create(:ci_job_artifact, :archive, file_store: store, job: job) end context 'when using job token' do - it 'download artifacts' do - expect(response).to have_gitlab_http_status(200) - expect(response.headers).to include download_headers + context 'when artifacts are stored locally' do + let(:download_headers) do + { 'Content-Transfer-Encoding' => 'binary', + 'Content-Disposition' => 'attachment; filename=ci_build_artifacts.zip' } + end + + before do + download_artifact + end + + it 'download artifacts' do + expect(response).to have_http_status(200) + expect(response.headers).to include download_headers + end + end + + context 'when artifacts are stored remotely' do + let(:store) { JobArtifactUploader::Store::REMOTE } + let!(:job) { create(:ci_build) } + + context 'when proxy download is being used' do + before do + download_artifact(direct_download: false) + end + + it 'uses workhorse send-url' do + expect(response).to have_gitlab_http_status(200) + expect(response.headers).to include( + 'Gitlab-Workhorse-Send-Data' => /send-url:/) + end + end + + context 'when direct download is being used' do + before do + download_artifact(direct_download: true) + end + + it 'receive redirect for downloading artifacts' do + expect(response).to have_gitlab_http_status(302) + expect(response.headers).to include('Location') + end + end end end context 'when using runnners token' do let(:token) { job.project.runners_token } + before do + download_artifact + end + it 'responds with forbidden' do expect(response).to have_gitlab_http_status(403) end @@ -1208,12 +1378,16 @@ describe API::Runner do context 'when job does not has artifacts' do it 'responds with not found' do + download_artifact + expect(response).to have_gitlab_http_status(404) end end def download_artifact(params = {}, request_headers = headers) params = params.merge(token: token) + job.reload + get api("/jobs/#{job.id}/artifacts"), params, request_headers end end diff --git a/spec/requests/api/runners_spec.rb b/spec/requests/api/runners_spec.rb index ec5cad4f4fd..d30f0cf36e2 100644 --- a/spec/requests/api/runners_spec.rb +++ b/spec/requests/api/runners_spec.rb @@ -123,6 +123,7 @@ describe API::Runners do expect(response).to have_gitlab_http_status(200) expect(json_response['description']).to eq(shared_runner.description) + expect(json_response['maximum_timeout']).to be_nil end end @@ -192,7 +193,8 @@ describe API::Runners do tag_list: ['ruby2.1', 'pgsql', 'mysql'], run_untagged: 'false', locked: 'true', - access_level: 'ref_protected') + access_level: 'ref_protected', + maximum_timeout: 1234) shared_runner.reload expect(response).to have_gitlab_http_status(200) @@ -204,6 +206,7 @@ describe API::Runners do expect(shared_runner.ref_protected?).to be_truthy expect(shared_runner.ensure_runner_queue_value) .not_to eq(runner_queue_value) + expect(shared_runner.maximum_timeout).to eq(1234) end end diff --git a/spec/requests/api/search_spec.rb b/spec/requests/api/search_spec.rb index 9052a18c60b..f8d5258a8d9 100644 --- a/spec/requests/api/search_spec.rb +++ b/spec/requests/api/search_spec.rb @@ -99,10 +99,10 @@ describe API::Search do end end - describe "GET /groups/:id/-/search" do + describe "GET /groups/:id/search" do context 'when user is not authenticated' do it 'returns 401 error' do - get api("/groups/#{group.id}/-/search"), scope: 'projects', search: 'awesome' + get api("/groups/#{group.id}/search"), scope: 'projects', search: 'awesome' expect(response).to have_gitlab_http_status(401) end @@ -110,7 +110,7 @@ describe API::Search do context 'when scope is not supported' do it 'returns 400 error' do - get api("/groups/#{group.id}/-/search", user), scope: 'unsupported', search: 'awesome' + get api("/groups/#{group.id}/search", user), scope: 'unsupported', search: 'awesome' expect(response).to have_gitlab_http_status(400) end @@ -118,7 +118,7 @@ describe API::Search do context 'when scope is missing' do it 'returns 400 error' do - get api("/groups/#{group.id}/-/search", user), search: 'awesome' + get api("/groups/#{group.id}/search", user), search: 'awesome' expect(response).to have_gitlab_http_status(400) end @@ -126,7 +126,7 @@ describe API::Search do context 'when group does not exist' do it 'returns 404 error' do - get api('/groups/9999/-/search', user), scope: 'issues', search: 'awesome' + get api('/groups/9999/search', user), scope: 'issues', search: 'awesome' expect(response).to have_gitlab_http_status(404) end @@ -136,7 +136,7 @@ describe API::Search do it 'returns 404 error' do private_group = create(:group, :private) - get api("/groups/#{private_group.id}/-/search", user), scope: 'issues', search: 'awesome' + get api("/groups/#{private_group.id}/search", user), scope: 'issues', search: 'awesome' expect(response).to have_gitlab_http_status(404) end @@ -145,7 +145,7 @@ describe API::Search do context 'with correct params' do context 'for projects scope' do before do - get api("/groups/#{group.id}/-/search", user), scope: 'projects', search: 'awesome' + get api("/groups/#{group.id}/search", user), scope: 'projects', search: 'awesome' end it_behaves_like 'response is correct', schema: 'public_api/v4/projects' @@ -155,7 +155,7 @@ describe API::Search do before do create(:issue, project: project, title: 'awesome issue') - get api("/groups/#{group.id}/-/search", user), scope: 'issues', search: 'awesome' + get api("/groups/#{group.id}/search", user), scope: 'issues', search: 'awesome' end it_behaves_like 'response is correct', schema: 'public_api/v4/issues' @@ -165,7 +165,7 @@ describe API::Search do before do create(:merge_request, source_project: repo_project, title: 'awesome mr') - get api("/groups/#{group.id}/-/search", user), scope: 'merge_requests', search: 'awesome' + get api("/groups/#{group.id}/search", user), scope: 'merge_requests', search: 'awesome' end it_behaves_like 'response is correct', schema: 'public_api/v4/merge_requests' @@ -175,7 +175,7 @@ describe API::Search do before do create(:milestone, project: project, title: 'awesome milestone') - get api("/groups/#{group.id}/-/search", user), scope: 'milestones', search: 'awesome' + get api("/groups/#{group.id}/search", user), scope: 'milestones', search: 'awesome' end it_behaves_like 'response is correct', schema: 'public_api/v4/milestones' @@ -187,7 +187,7 @@ describe API::Search do create(:milestone, project: project, title: 'awesome milestone') create(:milestone, project: another_project, title: 'awesome milestone other project') - get api("/groups/#{CGI.escape(group.full_path)}/-/search", user), scope: 'milestones', search: 'awesome' + get api("/groups/#{CGI.escape(group.full_path)}/search", user), scope: 'milestones', search: 'awesome' end it_behaves_like 'response is correct', schema: 'public_api/v4/milestones' @@ -198,7 +198,7 @@ describe API::Search do describe "GET /projects/:id/search" do context 'when user is not authenticated' do it 'returns 401 error' do - get api("/projects/#{project.id}/-/search"), scope: 'issues', search: 'awesome' + get api("/projects/#{project.id}/search"), scope: 'issues', search: 'awesome' expect(response).to have_gitlab_http_status(401) end @@ -206,7 +206,7 @@ describe API::Search do context 'when scope is not supported' do it 'returns 400 error' do - get api("/projects/#{project.id}/-/search", user), scope: 'unsupported', search: 'awesome' + get api("/projects/#{project.id}/search", user), scope: 'unsupported', search: 'awesome' expect(response).to have_gitlab_http_status(400) end @@ -214,7 +214,7 @@ describe API::Search do context 'when scope is missing' do it 'returns 400 error' do - get api("/projects/#{project.id}/-/search", user), search: 'awesome' + get api("/projects/#{project.id}/search", user), search: 'awesome' expect(response).to have_gitlab_http_status(400) end @@ -222,7 +222,7 @@ describe API::Search do context 'when project does not exist' do it 'returns 404 error' do - get api('/projects/9999/-/search', user), scope: 'issues', search: 'awesome' + get api('/projects/9999/search', user), scope: 'issues', search: 'awesome' expect(response).to have_gitlab_http_status(404) end @@ -232,7 +232,7 @@ describe API::Search do it 'returns 404 error' do project.update!(visibility_level: Gitlab::VisibilityLevel::PRIVATE) - get api("/projects/#{project.id}/-/search", user), scope: 'issues', search: 'awesome' + get api("/projects/#{project.id}/search", user), scope: 'issues', search: 'awesome' expect(response).to have_gitlab_http_status(404) end @@ -243,7 +243,7 @@ describe API::Search do before do create(:issue, project: project, title: 'awesome issue') - get api("/projects/#{project.id}/-/search", user), scope: 'issues', search: 'awesome' + get api("/projects/#{project.id}/search", user), scope: 'issues', search: 'awesome' end it_behaves_like 'response is correct', schema: 'public_api/v4/issues' @@ -253,7 +253,7 @@ describe API::Search do before do create(:merge_request, source_project: repo_project, title: 'awesome mr') - get api("/projects/#{repo_project.id}/-/search", user), scope: 'merge_requests', search: 'awesome' + get api("/projects/#{repo_project.id}/search", user), scope: 'merge_requests', search: 'awesome' end it_behaves_like 'response is correct', schema: 'public_api/v4/merge_requests' @@ -263,7 +263,7 @@ describe API::Search do before do create(:milestone, project: project, title: 'awesome milestone') - get api("/projects/#{project.id}/-/search", user), scope: 'milestones', search: 'awesome' + get api("/projects/#{project.id}/search", user), scope: 'milestones', search: 'awesome' end it_behaves_like 'response is correct', schema: 'public_api/v4/milestones' @@ -273,7 +273,7 @@ describe API::Search do before do create(:note_on_merge_request, project: project, note: 'awesome note') - get api("/projects/#{project.id}/-/search", user), scope: 'notes', search: 'awesome' + get api("/projects/#{project.id}/search", user), scope: 'notes', search: 'awesome' end it_behaves_like 'response is correct', schema: 'public_api/v4/notes' @@ -284,7 +284,7 @@ describe API::Search do wiki = create(:project_wiki, project: project) create(:wiki_page, wiki: wiki, attrs: { title: 'home', content: "Awesome page" }) - get api("/projects/#{project.id}/-/search", user), scope: 'wiki_blobs', search: 'awesome' + get api("/projects/#{project.id}/search", user), scope: 'wiki_blobs', search: 'awesome' end it_behaves_like 'response is correct', schema: 'public_api/v4/blobs' @@ -292,7 +292,7 @@ describe API::Search do context 'for commits scope' do before do - get api("/projects/#{repo_project.id}/-/search", user), scope: 'commits', search: '498214de67004b1da3d820901307bed2a68a8ef6' + get api("/projects/#{repo_project.id}/search", user), scope: 'commits', search: '498214de67004b1da3d820901307bed2a68a8ef6' end it_behaves_like 'response is correct', schema: 'public_api/v4/commits_details' @@ -300,7 +300,7 @@ describe API::Search do context 'for commits scope with project path as id' do before do - get api("/projects/#{CGI.escape(repo_project.full_path)}/-/search", user), scope: 'commits', search: '498214de67004b1da3d820901307bed2a68a8ef6' + get api("/projects/#{CGI.escape(repo_project.full_path)}/search", user), scope: 'commits', search: '498214de67004b1da3d820901307bed2a68a8ef6' end it_behaves_like 'response is correct', schema: 'public_api/v4/commits_details' @@ -308,7 +308,7 @@ describe API::Search do context 'for blobs scope' do before do - get api("/projects/#{repo_project.id}/-/search", user), scope: 'blobs', search: 'monitors' + get api("/projects/#{repo_project.id}/search", user), scope: 'blobs', search: 'monitors' end it_behaves_like 'response is correct', schema: 'public_api/v4/blobs', size: 2 diff --git a/spec/requests/api/templates_spec.rb b/spec/requests/api/templates_spec.rb index de1619f33c1..6bb53fdc98d 100644 --- a/spec/requests/api/templates_spec.rb +++ b/spec/requests/api/templates_spec.rb @@ -65,7 +65,7 @@ describe API::Templates do expect(json_response['description']).to include('A short and simple permissive license with conditions') expect(json_response['conditions']).to eq(%w[include-copyright]) expect(json_response['permissions']).to eq(%w[commercial-use modifications distribution private-use]) - expect(json_response['limitations']).to eq(%w[no-liability]) + expect(json_response['limitations']).to eq(%w[liability warranty]) expect(json_response['content']).to include('MIT License') end end diff --git a/spec/requests/api/v3/builds_spec.rb b/spec/requests/api/v3/builds_spec.rb index 79041c6a792..00f067889a0 100644 --- a/spec/requests/api/v3/builds_spec.rb +++ b/spec/requests/api/v3/builds_spec.rb @@ -216,6 +216,7 @@ describe API::V3::Builds do describe 'GET /projects/:id/builds/:build_id/artifacts' do before do + stub_artifacts_object_storage get v3_api("/projects/#{project.id}/builds/#{build.id}/artifacts", api_user) end @@ -230,13 +231,24 @@ describe API::V3::Builds do end it 'returns specific job artifacts' do - expect(response).to have_gitlab_http_status(200) + expect(response).to have_http_status(200) expect(response.headers).to include(download_headers) expect(response.body).to match_file(build.artifacts_file.file.file) end end end + context 'when artifacts are stored remotely' do + let(:build) { create(:ci_build, pipeline: pipeline) } + let!(:artifact) { create(:ci_job_artifact, :archive, :remote_store, job: build) } + + it 'returns location redirect' do + get v3_api("/projects/#{project.id}/builds/#{build.id}/artifacts", api_user) + + expect(response).to have_gitlab_http_status(302) + end + end + context 'unauthorized user' do let(:api_user) { nil } @@ -256,6 +268,7 @@ describe API::V3::Builds do let(:build) { create(:ci_build, :artifacts, pipeline: pipeline) } before do + stub_artifacts_object_storage build.success end @@ -318,9 +331,24 @@ describe API::V3::Builds do "attachment; filename=#{build.artifacts_file.filename}" } end - it { expect(response).to have_gitlab_http_status(200) } + it { expect(response).to have_http_status(200) } it { expect(response.headers).to include(download_headers) } end + + context 'when artifacts are stored remotely' do + let(:build) { create(:ci_build, pipeline: pipeline) } + let!(:artifact) { create(:ci_job_artifact, :archive, :remote_store, job: build) } + + before do + build.reload + + get v3_api("/projects/#{project.id}/builds/#{build.id}/artifacts", api_user) + end + + it 'returns location redirect' do + expect(response).to have_http_status(302) + end + end end context 'with regular branch' do diff --git a/spec/requests/api/v3/merge_requests_spec.rb b/spec/requests/api/v3/merge_requests_spec.rb index 6b748369f0d..be70cb24dce 100644 --- a/spec/requests/api/v3/merge_requests_spec.rb +++ b/spec/requests/api/v3/merge_requests_spec.rb @@ -340,7 +340,7 @@ describe API::MergeRequests do expect(json_response['title']).to eq('Test merge_request') end - it "returns 422 when target project has disabled merge requests" do + it "returns 403 when target project has disabled merge requests" do project.project_feature.update(merge_requests_access_level: 0) post v3_api("/projects/#{forked_project.id}/merge_requests", user2), @@ -350,7 +350,7 @@ describe API::MergeRequests do author: user2, target_project_id: project.id - expect(response).to have_gitlab_http_status(422) + expect(response).to have_gitlab_http_status(403) end it "returns 400 when source_branch is missing" do diff --git a/spec/requests/api/v3/templates_spec.rb b/spec/requests/api/v3/templates_spec.rb index 38a8994eb79..1a637f3cf96 100644 --- a/spec/requests/api/v3/templates_spec.rb +++ b/spec/requests/api/v3/templates_spec.rb @@ -57,7 +57,7 @@ describe API::V3::Templates do expect(json_response['description']).to include('A short and simple permissive license with conditions') expect(json_response['conditions']).to eq(%w[include-copyright]) expect(json_response['permissions']).to eq(%w[commercial-use modifications distribution private-use]) - expect(json_response['limitations']).to eq(%w[no-liability]) + expect(json_response['limitations']).to eq(%w[liability warranty]) expect(json_response['content']).to include('MIT License') end end diff --git a/spec/requests/git_http_spec.rb b/spec/requests/git_http_spec.rb index 6dbbb1ad7bb..494db30e8e0 100644 --- a/spec/requests/git_http_spec.rb +++ b/spec/requests/git_http_spec.rb @@ -163,7 +163,7 @@ describe 'Git HTTP requests' do download(path) do |response| json_body = ActiveSupport::JSON.decode(response.body) - expect(json_body['RepoPath']).to include(wiki.repository.disk_path) + expect(json_body['Repository']['relative_path']).to eq(wiki.repository.relative_path) end end end @@ -344,20 +344,11 @@ describe 'Git HTTP requests' do context 'and the user requests a redirected path' do let!(:redirect) { project.route.create_redirect('foo/bar') } let(:path) { "#{redirect.path}.git" } - let(:project_moved_message) do - <<-MSG.strip_heredoc - Project '#{redirect.path}' was moved to '#{project.full_path}'. - Please update your Git remote: - - git remote set-url origin #{project.http_url_to_repo} and try again. - MSG - end - - it 'downloads get status 404 with "project was moved" message' do + it 'downloads get status 200 for redirects' do clone_get(path, {}) - expect(response).to have_gitlab_http_status(:not_found) - expect(response.body).to match(project_moved_message) + + expect(response).to have_gitlab_http_status(:ok) end end end @@ -559,20 +550,19 @@ describe 'Git HTTP requests' do Please update your Git remote: - git remote set-url origin #{project.http_url_to_repo} and try again. + git remote set-url origin #{project.http_url_to_repo}. MSG end - it 'downloads get status 404 with "project was moved" message' do + it 'downloads get status 200' do clone_get(path, env) - expect(response).to have_gitlab_http_status(:not_found) - expect(response.body).to match(project_moved_message) + + expect(response).to have_gitlab_http_status(:ok) end it 'uploads get status 404 with "project was moved" message' do upload(path, env) do |response| - expect(response).to have_gitlab_http_status(:not_found) - expect(response.body).to match(project_moved_message) + expect(response).to have_gitlab_http_status(:ok) end end end diff --git a/spec/requests/lfs_http_spec.rb b/spec/requests/lfs_http_spec.rb index 971b45c411d..f80abb06fca 100644 --- a/spec/requests/lfs_http_spec.rb +++ b/spec/requests/lfs_http_spec.rb @@ -191,10 +191,12 @@ describe 'Git LFS API and storage' do describe 'when fetching lfs object' do let(:project) { create(:project) } let(:update_permissions) { } + let(:before_get) { } before do enable_lfs update_permissions + before_get get "#{project.http_url_to_repo}/gitlab-lfs/objects/#{sample_oid}", nil, headers end @@ -239,6 +241,38 @@ describe 'Git LFS API and storage' do end it_behaves_like 'responds with a file' + + context 'when LFS uses object storage' do + context 'when proxy download is enabled' do + let(:before_get) do + stub_lfs_object_storage(proxy_download: true) + lfs_object.file.migrate!(LfsObjectUploader::Store::REMOTE) + end + + it 'responds with redirect' do + expect(response).to have_gitlab_http_status(200) + end + + it 'responds with the workhorse send-url' do + expect(response.headers[Gitlab::Workhorse::SEND_DATA_HEADER]).to start_with("send-url:") + end + end + + context 'when proxy download is disabled' do + let(:before_get) do + stub_lfs_object_storage(proxy_download: false) + lfs_object.file.migrate!(LfsObjectUploader::Store::REMOTE) + end + + it 'responds with redirect' do + expect(response).to have_gitlab_http_status(302) + end + + it 'responds with the file location' do + expect(response.location).to include(lfs_object.reload.file.path) + end + end + end end end @@ -945,22 +979,61 @@ describe 'Git LFS API and storage' do end context 'and request is sent by gitlab-workhorse to authorize the request' do - before do - put_authorize + shared_examples 'a valid response' do + before do + put_authorize + end + + it 'responds with status 200' do + expect(response).to have_gitlab_http_status(200) + end + + it 'uses the gitlab-workhorse content type' do + expect(response.content_type.to_s).to eq(Gitlab::Workhorse::INTERNAL_API_CONTENT_TYPE) + end end - it 'responds with status 200' do - expect(response).to have_gitlab_http_status(200) + shared_examples 'a local file' do + it_behaves_like 'a valid response' do + it 'responds with status 200, location of lfs store and object details' do + expect(json_response['TempPath']).to eq(LfsObjectUploader.workhorse_local_upload_path) + expect(json_response['RemoteObject']).to be_nil + expect(json_response['LfsOid']).to eq(sample_oid) + expect(json_response['LfsSize']).to eq(sample_size) + end + end end - it 'uses the gitlab-workhorse content type' do - expect(response.content_type.to_s).to eq(Gitlab::Workhorse::INTERNAL_API_CONTENT_TYPE) + context 'when using local storage' do + it_behaves_like 'a local file' end - it 'responds with status 200, location of lfs store and object details' do - expect(json_response['StoreLFSPath']).to eq(LfsObjectUploader.workhorse_upload_path) - expect(json_response['LfsOid']).to eq(sample_oid) - expect(json_response['LfsSize']).to eq(sample_size) + context 'when using remote storage' do + context 'when direct upload is enabled' do + before do + stub_lfs_object_storage(enabled: true, direct_upload: true) + end + + it_behaves_like 'a valid response' do + it 'responds with status 200, location of lfs remote store and object details' do + expect(json_response['TempPath']).to eq(LfsObjectUploader.workhorse_local_upload_path) + expect(json_response['RemoteObject']).to have_key('ID') + expect(json_response['RemoteObject']).to have_key('GetURL') + expect(json_response['RemoteObject']).to have_key('StoreURL') + expect(json_response['RemoteObject']).to have_key('DeleteURL') + expect(json_response['LfsOid']).to eq(sample_oid) + expect(json_response['LfsSize']).to eq(sample_size) + end + end + end + + context 'when direct upload is disabled' do + before do + stub_lfs_object_storage(enabled: true, direct_upload: false) + end + + it_behaves_like 'a local file' + end end end @@ -978,14 +1051,98 @@ describe 'Git LFS API and storage' do end end + context 'and workhorse requests upload finalize for a new lfs object' do + before do + lfs_object.destroy + end + + context 'with object storage disabled' do + it "doesn't attempt to migrate file to object storage" do + expect(ObjectStorage::BackgroundMoveWorker).not_to receive(:perform_async) + + put_finalize(with_tempfile: true) + end + end + + context 'with object storage enabled' do + context 'and direct upload enabled' do + let!(:fog_connection) do + stub_lfs_object_storage(direct_upload: true) + end + + ['123123', '../../123123'].each do |remote_id| + context "with invalid remote_id: #{remote_id}" do + subject do + put_finalize(with_tempfile: true, args: { + 'file.remote_id' => remote_id + }) + end + + it 'responds with status 403' do + subject + + expect(response).to have_gitlab_http_status(403) + end + end + end + + context 'with valid remote_id' do + before do + fog_connection.directories.get('lfs-objects').files.create( + key: 'tmp/upload/12312300', + body: 'content' + ) + end + + subject do + put_finalize(with_tempfile: true, args: { + 'file.remote_id' => '12312300', + 'file.name' => 'name' + }) + end + + it 'responds with status 200' do + subject + + expect(response).to have_gitlab_http_status(200) + end + + it 'schedules migration of file to object storage' do + subject + + expect(LfsObject.last.projects).to include(project) + end + + it 'have valid file' do + subject + + expect(LfsObject.last.file_store).to eq(ObjectStorage::Store::REMOTE) + expect(LfsObject.last.file).to be_exists + end + end + end + + context 'and background upload enabled' do + before do + stub_lfs_object_storage(background_upload: true) + end + + it 'schedules migration of file to object storage' do + expect(ObjectStorage::BackgroundMoveWorker).to receive(:perform_async).with('LfsObjectUploader', 'LfsObject', :file, kind_of(Numeric)) + + put_finalize(with_tempfile: true) + end + end + end + end + context 'invalid tempfiles' do - it 'rejects slashes in the tempfile name (path traversal' do - put_finalize('foo/bar') - expect(response).to have_gitlab_http_status(403) + before do + lfs_object.destroy end - it 'rejects tempfile names that do not start with the oid' do - put_finalize("foo#{sample_oid}") + it 'rejects slashes in the tempfile name (path traversal)' do + put_finalize('../bar', with_tempfile: true) expect(response).to have_gitlab_http_status(403) end end @@ -1075,7 +1232,7 @@ describe 'Git LFS API and storage' do end it 'with location of lfs store and object details' do - expect(json_response['StoreLFSPath']).to eq(LfsObjectUploader.workhorse_upload_path) + expect(json_response['TempPath']).to eq(LfsObjectUploader.workhorse_local_upload_path) expect(json_response['LfsOid']).to eq(sample_oid) expect(json_response['LfsSize']).to eq(sample_size) end @@ -1177,9 +1334,25 @@ describe 'Git LFS API and storage' do put "#{project.http_url_to_repo}/gitlab-lfs/objects/#{sample_oid}/#{sample_size}/authorize", nil, authorize_headers end - def put_finalize(lfs_tmp = lfs_tmp_file) - put "#{project.http_url_to_repo}/gitlab-lfs/objects/#{sample_oid}/#{sample_size}", nil, - headers.merge('X-Gitlab-Lfs-Tmp' => lfs_tmp).compact + def put_finalize(lfs_tmp = lfs_tmp_file, with_tempfile: false, args: {}) + upload_path = LfsObjectUploader.workhorse_local_upload_path + file_path = upload_path + '/' + lfs_tmp if lfs_tmp + + if with_tempfile + FileUtils.mkdir_p(upload_path) + FileUtils.touch(file_path) + end + + extra_args = { + 'file.path' => file_path, + 'file.name' => File.basename(file_path) + } + + put_finalize_with_args(args.merge(extra_args).compact) + end + + def put_finalize_with_args(args) + put "#{project.http_url_to_repo}/gitlab-lfs/objects/#{sample_oid}/#{sample_size}", args, headers end def lfs_tmp_file diff --git a/spec/requests/projects/cycle_analytics_events_spec.rb b/spec/requests/projects/cycle_analytics_events_spec.rb index eef860821e5..bcc3e3a2678 100644 --- a/spec/requests/projects/cycle_analytics_events_spec.rb +++ b/spec/requests/projects/cycle_analytics_events_spec.rb @@ -23,7 +23,7 @@ describe 'cycle analytics events' do it 'lists the issue events' do get project_cycle_analytics_issue_path(project, format: :json) - first_issue_iid = project.issues.sort(:created_desc).pluck(:iid).first.to_s + first_issue_iid = project.issues.sort_by_attribute(:created_desc).pluck(:iid).first.to_s expect(json_response['events']).not_to be_empty expect(json_response['events'].first['iid']).to eq(first_issue_iid) @@ -32,7 +32,7 @@ describe 'cycle analytics events' do it 'lists the plan events' do get project_cycle_analytics_plan_path(project, format: :json) - first_mr_short_sha = project.merge_requests.sort(:created_asc).first.commits.first.short_id + first_mr_short_sha = project.merge_requests.sort_by_attribute(:created_asc).first.commits.first.short_id expect(json_response['events']).not_to be_empty expect(json_response['events'].first['short_sha']).to eq(first_mr_short_sha) @@ -43,7 +43,7 @@ describe 'cycle analytics events' do expect(json_response['events']).not_to be_empty - first_mr_iid = project.merge_requests.sort(:created_desc).pluck(:iid).first.to_s + first_mr_iid = project.merge_requests.sort_by_attribute(:created_desc).pluck(:iid).first.to_s expect(json_response['events'].first['iid']).to eq(first_mr_iid) end @@ -58,7 +58,7 @@ describe 'cycle analytics events' do it 'lists the review events' do get project_cycle_analytics_review_path(project, format: :json) - first_mr_iid = project.merge_requests.sort(:created_desc).pluck(:iid).first.to_s + first_mr_iid = project.merge_requests.sort_by_attribute(:created_desc).pluck(:iid).first.to_s expect(json_response['events']).not_to be_empty expect(json_response['events'].first['iid']).to eq(first_mr_iid) @@ -74,7 +74,7 @@ describe 'cycle analytics events' do it 'lists the production events' do get project_cycle_analytics_production_path(project, format: :json) - first_issue_iid = project.issues.sort(:created_desc).pluck(:iid).first.to_s + first_issue_iid = project.issues.sort_by_attribute(:created_desc).pluck(:iid).first.to_s expect(json_response['events']).not_to be_empty expect(json_response['events'].first['iid']).to eq(first_issue_iid) diff --git a/spec/routing/project_routing_spec.rb b/spec/routing/project_routing_spec.rb index fb1281a6b42..e1b4e618092 100644 --- a/spec/routing/project_routing_spec.rb +++ b/spec/routing/project_routing_spec.rb @@ -164,20 +164,36 @@ describe 'project routing' do # archive_project_repository GET /:project_id/repository/archive(.:format) projects/repositories#archive # edit_project_repository GET /:project_id/repository/edit(.:format) projects/repositories#edit describe Projects::RepositoriesController, 'routing' do - it 'to #archive' do - expect(get('/gitlab/gitlabhq/repository/master/archive')).to route_to('projects/repositories#archive', namespace_id: 'gitlab', project_id: 'gitlabhq', ref: 'master') - end - it 'to #archive format:zip' do - expect(get('/gitlab/gitlabhq/repository/master/archive.zip')).to route_to('projects/repositories#archive', namespace_id: 'gitlab', project_id: 'gitlabhq', format: 'zip', ref: 'master') + expect(get('/gitlab/gitlabhq/-/archive/master/archive.zip')).to route_to('projects/repositories#archive', namespace_id: 'gitlab', project_id: 'gitlabhq', format: 'zip', id: 'master/archive') end it 'to #archive format:tar.bz2' do - expect(get('/gitlab/gitlabhq/repository/master/archive.tar.bz2')).to route_to('projects/repositories#archive', namespace_id: 'gitlab', project_id: 'gitlabhq', format: 'tar.bz2', ref: 'master') + expect(get('/gitlab/gitlabhq/-/archive/master/archive.tar.bz2')).to route_to('projects/repositories#archive', namespace_id: 'gitlab', project_id: 'gitlabhq', format: 'tar.bz2', id: 'master/archive') end it 'to #archive with "/" in route' do - expect(get('/gitlab/gitlabhq/repository/improve/awesome/archive')).to route_to('projects/repositories#archive', namespace_id: 'gitlab', project_id: 'gitlabhq', ref: 'improve/awesome') + expect(get('/gitlab/gitlabhq/-/archive/improve/awesome/gitlabhq-improve-awesome.tar.gz')).to route_to('projects/repositories#archive', namespace_id: 'gitlab', project_id: 'gitlabhq', format: 'tar.gz', id: 'improve/awesome/gitlabhq-improve-awesome') + end + + it 'to #archive_alternative' do + expect(get('/gitlab/gitlabhq/repository/archive')).to route_to('projects/repositories#archive', namespace_id: 'gitlab', project_id: 'gitlabhq', append_sha: true) + end + + it 'to #archive_deprecated' do + expect(get('/gitlab/gitlabhq/repository/master/archive')).to route_to('projects/repositories#archive', namespace_id: 'gitlab', project_id: 'gitlabhq', id: 'master', append_sha: true) + end + + it 'to #archive_deprecated format:zip' do + expect(get('/gitlab/gitlabhq/repository/master/archive.zip')).to route_to('projects/repositories#archive', namespace_id: 'gitlab', project_id: 'gitlabhq', format: 'zip', id: 'master', append_sha: true) + end + + it 'to #archive_deprecated format:tar.bz2' do + expect(get('/gitlab/gitlabhq/repository/master/archive.tar.bz2')).to route_to('projects/repositories#archive', namespace_id: 'gitlab', project_id: 'gitlabhq', format: 'tar.bz2', id: 'master', append_sha: true) + end + + it 'to #archive_deprecated with "/" in route' do + expect(get('/gitlab/gitlabhq/repository/improve/awesome/archive')).to route_to('projects/repositories#archive', namespace_id: 'gitlab', project_id: 'gitlabhq', id: 'improve/awesome', append_sha: true) end end diff --git a/spec/rubocop/cop/avoid_break_from_strong_memoize_spec.rb b/spec/rubocop/cop/avoid_break_from_strong_memoize_spec.rb new file mode 100644 index 00000000000..ac7b1575ec0 --- /dev/null +++ b/spec/rubocop/cop/avoid_break_from_strong_memoize_spec.rb @@ -0,0 +1,74 @@ +require 'spec_helper' +require 'rubocop' +require 'rubocop/rspec/support' +require_relative '../../../rubocop/cop/avoid_break_from_strong_memoize' + +describe RuboCop::Cop::AvoidBreakFromStrongMemoize do + include CopHelper + + subject(:cop) { described_class.new } + + it 'flags violation for break inside strong_memoize' do + expect_offense(<<~RUBY) + strong_memoize(:result) do + break if something + ^^^^^ Do not use break inside strong_memoize, use next instead. + + do_an_heavy_calculation + end + RUBY + end + + it 'flags violation for break inside strong_memoize nested blocks' do + expect_offense(<<~RUBY) + strong_memoize do + items.each do |item| + break item + ^^^^^^^^^^ Do not use break inside strong_memoize, use next instead. + end + end + RUBY + end + + it "doesn't flag violation for next inside strong_memoize" do + expect_no_offenses(<<~RUBY) + strong_memoize(:result) do + next if something + + do_an_heavy_calculation + end + RUBY + end + + it "doesn't flag violation for break inside blocks" do + expect_no_offenses(<<~RUBY) + call do + break if something + + do_an_heavy_calculation + end + RUBY + end + + it "doesn't call add_offense twice for nested blocks" do + source = <<~RUBY + call do + strong_memoize(:result) do + break if something + + do_an_heavy_calculation + end + end + RUBY + expect_any_instance_of(described_class).to receive(:add_offense).once + + inspect_source(source) + end + + it "doesn't check when block is empty" do + expect_no_offenses(<<~RUBY) + strong_memoize(:result) do + end + RUBY + end +end diff --git a/spec/rubocop/cop/avoid_return_from_blocks_spec.rb b/spec/rubocop/cop/avoid_return_from_blocks_spec.rb new file mode 100644 index 00000000000..a5c280a7adc --- /dev/null +++ b/spec/rubocop/cop/avoid_return_from_blocks_spec.rb @@ -0,0 +1,127 @@ +require 'spec_helper' +require 'rubocop' +require 'rubocop/rspec/support' +require_relative '../../../rubocop/cop/avoid_return_from_blocks' + +describe RuboCop::Cop::AvoidReturnFromBlocks do + include CopHelper + + subject(:cop) { described_class.new } + + it 'flags violation for return inside a block' do + expect_offense(<<~RUBY) + call do + do_something + return if something_else + ^^^^^^ Do not return from a block, use next or break instead. + end + RUBY + end + + it "doesn't call add_offense twice for nested blocks" do + source = <<~RUBY + call do + call do + something + return if something_else + end + end + RUBY + expect_any_instance_of(described_class).to receive(:add_offense).once + + inspect_source(source) + end + + it 'flags violation for return inside included > def > block' do + expect_offense(<<~RUBY) + included do + def a_method + return if something + + call do + return if something_else + ^^^^^^ Do not return from a block, use next or break instead. + end + end + end + RUBY + end + + shared_examples 'examples with whitelisted method' do |whitelisted_method| + it "doesn't flag violation for return inside #{whitelisted_method}" do + expect_no_offenses(<<~RUBY) + items.#{whitelisted_method} do |item| + do_something + return if something_else + end + RUBY + end + end + + %i[each each_filename times loop].each do |whitelisted_method| + it_behaves_like 'examples with whitelisted method', whitelisted_method + end + + shared_examples 'examples with def methods' do |def_method| + it "doesn't flag violation for return inside #{def_method}" do + expect_no_offenses(<<~RUBY) + helpers do + #{def_method} do + return if something + + do_something_more + end + end + RUBY + end + end + + %i[define_method lambda].each do |def_method| + it_behaves_like 'examples with def methods', def_method + end + + it "doesn't flag violation for return inside a lambda" do + expect_no_offenses(<<~RUBY) + lambda do + do_something + return if something_else + end + RUBY + end + + it "doesn't flag violation for return used inside a method definition" do + expect_no_offenses(<<~RUBY) + describe Klass do + def a_method + do_something + return if something_else + end + end + RUBY + end + + it "doesn't flag violation for next inside a block" do + expect_no_offenses(<<~RUBY) + call do + do_something + next if something_else + end + RUBY + end + + it "doesn't flag violation for break inside a block" do + expect_no_offenses(<<~RUBY) + call do + do_something + break if something_else + end + RUBY + end + + it "doesn't check when block is empty" do + expect_no_offenses(<<~RUBY) + call do + end + RUBY + end +end diff --git a/spec/rubocop/cop/gitlab/httparty_spec.rb b/spec/rubocop/cop/gitlab/httparty_spec.rb new file mode 100644 index 00000000000..510839a21d7 --- /dev/null +++ b/spec/rubocop/cop/gitlab/httparty_spec.rb @@ -0,0 +1,74 @@ +require 'spec_helper' +require 'rubocop' +require 'rubocop/rspec/support' +require_relative '../../../../rubocop/cop/gitlab/httparty' + +describe RuboCop::Cop::Gitlab::HTTParty do # rubocop:disable RSpec/FilePath + include CopHelper + + subject(:cop) { described_class.new } + + shared_examples('registering include offense') do |options| + let(:offending_lines) { options[:offending_lines] } + + it 'registers an offense when the class includes HTTParty' do + inspect_source(source) + + aggregate_failures do + expect(cop.offenses.size).to eq(offending_lines.size) + expect(cop.offenses.map(&:line)).to eq(offending_lines) + end + end + end + + shared_examples('registering call offense') do |options| + let(:offending_lines) { options[:offending_lines] } + + it 'registers an offense when the class calls HTTParty' do + inspect_source(source) + + aggregate_failures do + expect(cop.offenses.size).to eq(offending_lines.size) + expect(cop.offenses.map(&:line)).to eq(offending_lines) + end + end + end + + context 'when source is a regular module' do + it_behaves_like 'registering include offense', offending_lines: [2] do + let(:source) do + <<~RUBY + module M + include HTTParty + end + RUBY + end + end + end + + context 'when source is a regular class' do + it_behaves_like 'registering include offense', offending_lines: [2] do + let(:source) do + <<~RUBY + class Foo + include HTTParty + end + RUBY + end + end + end + + context 'when HTTParty is called' do + it_behaves_like 'registering call offense', offending_lines: [3] do + let(:source) do + <<~RUBY + class Foo + def bar + HTTParty.get('http://example.com') + end + end + RUBY + end + end + end +end diff --git a/spec/rubocop/cop/rspec/factories_in_migration_specs_spec.rb b/spec/rubocop/cop/rspec/factories_in_migration_specs_spec.rb new file mode 100644 index 00000000000..2763f2bda21 --- /dev/null +++ b/spec/rubocop/cop/rspec/factories_in_migration_specs_spec.rb @@ -0,0 +1,48 @@ +require 'spec_helper' + +require 'rubocop' +require 'rubocop/rspec/support' + +require_relative '../../../../rubocop/cop/rspec/factories_in_migration_specs' + +describe RuboCop::Cop::RSpec::FactoriesInMigrationSpecs do + include CopHelper + + let(:source_file) { 'spec/migrations/foo_spec.rb' } + + subject(:cop) { described_class.new } + + shared_examples 'an offensive factory call' do |namespace| + %i[build build_list create create_list].each do |forbidden_method| + namespaced_forbidden_method = "#{namespace}#{forbidden_method}(:user)" + + it "registers an offense for #{namespaced_forbidden_method}" do + expect_offense(<<-RUBY) + describe 'foo' do + let(:user) { #{namespaced_forbidden_method} } + #{'^' * namespaced_forbidden_method.size} Don't use FactoryBot.#{forbidden_method} in migration specs, use `table` instead. + end + RUBY + end + end + end + + context 'in a migration spec file' do + before do + allow(cop).to receive(:in_migration_spec?).and_return(true) + end + + it_behaves_like 'an offensive factory call', '' + it_behaves_like 'an offensive factory call', 'FactoryBot.' + end + + context 'outside of a migration spec file' do + it "does not register an offense" do + expect_no_offenses(<<-RUBY) + describe 'foo' do + let(:user) { create(:user) } + end + RUBY + end + end +end diff --git a/spec/serializers/build_serializer_spec.rb b/spec/serializers/build_serializer_spec.rb index 9673b11c2a2..98cd15e248b 100644 --- a/spec/serializers/build_serializer_spec.rb +++ b/spec/serializers/build_serializer_spec.rb @@ -28,15 +28,31 @@ describe BuildSerializer do end describe '#represent_status' do - context 'when represents only status' do - let(:resource) { create(:ci_build) } + context 'for a failed build' do + let(:resource) { create(:ci_build, :failed) } + let(:status) { resource.detailed_status(double('user')) } + + subject { serializer.represent_status(resource) } + + it 'serializes only status' do + expect(subject[:text]).to eq(status.text) + expect(subject[:label]).to eq('failed') + expect(subject[:tooltip]).to eq('failed <br> (unknown failure)') + expect(subject[:icon]).to eq(status.icon) + expect(subject[:favicon]).to match_asset_path("/assets/ci_favicons/#{status.favicon}.ico") + end + end + + context 'for any other type of build' do + let(:resource) { create(:ci_build, :success) } let(:status) { resource.detailed_status(double('user')) } subject { serializer.represent_status(resource) } it 'serializes only status' do expect(subject[:text]).to eq(status.text) - expect(subject[:label]).to eq(status.label) + expect(subject[:label]).to eq('passed') + expect(subject[:tooltip]).to eq('passed') expect(subject[:icon]).to eq(status.icon) expect(subject[:favicon]).to match_asset_path("/assets/ci_favicons/#{status.favicon}.ico") end diff --git a/spec/serializers/discussion_entity_spec.rb b/spec/serializers/discussion_entity_spec.rb index 7ee8e38af1c..7e19e74ca00 100644 --- a/spec/serializers/discussion_entity_spec.rb +++ b/spec/serializers/discussion_entity_spec.rb @@ -6,7 +6,7 @@ describe DiscussionEntity do let(:user) { create(:user) } let(:note) { create(:discussion_note_on_merge_request) } let(:discussion) { note.discussion } - let(:request) { double('request') } + let(:request) { double('request', note_entity: ProjectNoteEntity) } let(:controller) { double('controller') } let(:entity) { described_class.new(discussion, request: request, context: controller) } diff --git a/spec/serializers/entity_date_helper_spec.rb b/spec/serializers/entity_date_helper_spec.rb index b9cc2f64831..36da8d33a44 100644 --- a/spec/serializers/entity_date_helper_spec.rb +++ b/spec/serializers/entity_date_helper_spec.rb @@ -32,6 +32,7 @@ describe EntityDateHelper do end it 'converts 86560 seconds' do + Rails.logger.debug date_helper_class.inspect expect(date_helper_class.distance_of_time_as_hash(86560)).to eq(days: 1, mins: 2, seconds: 40) end @@ -42,4 +43,58 @@ describe EntityDateHelper do it 'converts 986760 seconds' do expect(date_helper_class.distance_of_time_as_hash(986760)).to eq(days: 11, hours: 10, mins: 6) end + + describe '#remaining_days_in_words' do + around do |example| + Timecop.freeze(Time.utc(2017, 3, 17)) { example.run } + end + + context 'when less than 31 days remaining' do + let(:milestone_remaining) { date_helper_class.remaining_days_in_words(build_stubbed(:milestone, due_date: 12.days.from_now.utc)) } + + it 'returns days remaining' do + expect(milestone_remaining).to eq("<strong>12</strong> days remaining") + end + end + + context 'when less than 1 year and more than 30 days remaining' do + let(:milestone_remaining) { date_helper_class.remaining_days_in_words(build_stubbed(:milestone, due_date: 2.months.from_now.utc)) } + + it 'returns months remaining' do + expect(milestone_remaining).to eq("<strong>2</strong> months remaining") + end + end + + context 'when more than 1 year remaining' do + let(:milestone_remaining) { date_helper_class.remaining_days_in_words(build_stubbed(:milestone, due_date: (1.year.from_now + 2.days).utc)) } + + it 'returns years remaining' do + expect(milestone_remaining).to eq("<strong>1</strong> year remaining") + end + end + + context 'when milestone is expired' do + let(:milestone_remaining) { date_helper_class.remaining_days_in_words(build_stubbed(:milestone, due_date: 2.days.ago.utc)) } + + it 'returns "Past due"' do + expect(milestone_remaining).to eq("<strong>Past due</strong>") + end + end + + context 'when milestone has start_date in the future' do + let(:milestone_remaining) { date_helper_class.remaining_days_in_words(build_stubbed(:milestone, start_date: 2.days.from_now.utc)) } + + it 'returns "Upcoming"' do + expect(milestone_remaining).to eq("<strong>Upcoming</strong>") + end + end + + context 'when milestone has start_date in the past' do + let(:milestone_remaining) { date_helper_class.remaining_days_in_words(build_stubbed(:milestone, start_date: 2.days.ago.utc)) } + + it 'returns days elapsed' do + expect(milestone_remaining).to eq("<strong>2</strong> days elapsed") + end + end + end end diff --git a/spec/serializers/job_entity_spec.rb b/spec/serializers/job_entity_spec.rb index 026360e91a3..c90396ebb28 100644 --- a/spec/serializers/job_entity_spec.rb +++ b/spec/serializers/job_entity_spec.rb @@ -38,7 +38,7 @@ describe JobEntity do it 'contains details' do expect(subject).to include :status - expect(subject[:status]).to include :icon, :favicon, :text, :label + expect(subject[:status]).to include :icon, :favicon, :text, :label, :tooltip end context 'when job is retryable' do @@ -126,7 +126,72 @@ describe JobEntity do it 'contains details' do expect(subject).to include :status - expect(subject[:status]).to include :icon, :favicon, :text, :label + expect(subject[:status]).to include :icon, :favicon, :text, :label, :tooltip + end + end + + context 'when job failed' do + let(:job) { create(:ci_build, :script_failure) } + + it 'contains details' do + expect(subject[:status]).to include :icon, :favicon, :text, :label, :tooltip + end + + it 'states that it failed' do + expect(subject[:status][:label]).to eq('failed') + end + + it 'should indicate the failure reason on tooltip' do + expect(subject[:status][:tooltip]).to eq('failed <br> (script failure)') + end + + it 'should include a callout message with a verbose output' do + expect(subject[:callout_message]).to eq('There has been a script failure. Check the job log for more information') + end + + it 'should state that it is not recoverable' do + expect(subject[:recoverable]).to be_falsy + end + end + + context 'when job is allowed to fail' do + let(:job) { create(:ci_build, :allowed_to_fail, :script_failure) } + + it 'contains details' do + expect(subject[:status]).to include :icon, :favicon, :text, :label, :tooltip + end + + it 'states that it failed' do + expect(subject[:status][:label]).to eq('failed (allowed to fail)') + end + + it 'should indicate the failure reason on tooltip' do + expect(subject[:status][:tooltip]).to eq('failed <br> (script failure) (allowed to fail)') + end + + it 'should include a callout message with a verbose output' do + expect(subject[:callout_message]).to eq('There has been a script failure. Check the job log for more information') + end + + it 'should state that it is not recoverable' do + expect(subject[:recoverable]).to be_falsy + end + end + + context 'when job failed and is recoverable' do + let(:job) { create(:ci_build, :api_failure) } + + it 'should state it is recoverable' do + expect(subject[:recoverable]).to be_truthy + end + end + + context 'when job passed' do + let(:job) { create(:ci_build, :success) } + + it 'should not include callout message or recoverable keys' do + expect(subject).not_to include('callout_message') + expect(subject).not_to include('recoverable') end end end diff --git a/spec/serializers/note_entity_spec.rb b/spec/serializers/note_entity_spec.rb index 51a8587ace9..13cda781cda 100644 --- a/spec/serializers/note_entity_spec.rb +++ b/spec/serializers/note_entity_spec.rb @@ -10,53 +10,5 @@ describe NoteEntity do let(:user) { create(:user) } subject { entity.as_json } - context 'basic note' do - it 'exposes correct elements' do - expect(subject).to include(:type, :author, :human_access, :note, :note_html, :current_user, - :discussion_id, :emoji_awardable, :award_emoji, :toggle_award_path, :report_abuse_path, :path, :attachment) - end - - it 'does not expose elements for specific notes cases' do - expect(subject).not_to include(:last_edited_by, :last_edited_at, :system_note_icon_name) - end - - it 'exposes author correctly' do - expect(subject[:author]).to include(:id, :name, :username, :state, :avatar_url, :path) - end - - it 'does not expose web_url for author' do - expect(subject[:author]).not_to include(:web_url) - end - end - - context 'when note was edited' do - before do - note.update(updated_at: 1.minute.from_now, updated_by: user) - end - - it 'exposes last_edited_at and last_edited_by elements' do - expect(subject).to include(:last_edited_at, :last_edited_by) - end - end - - context 'when note is a system note' do - before do - note.update(system: true) - end - - it 'exposes system_note_icon_name element' do - expect(subject).to include(:system_note_icon_name) - end - end - - context 'when note is part of resolvable discussion' do - before do - allow(note).to receive(:part_of_discussion?).and_return(true) - allow(note).to receive(:resolvable?).and_return(true) - end - - it 'exposes paths to resolve note' do - expect(subject).to include(:resolve_path, :resolve_with_issue_path) - end - end + it_behaves_like 'note entity' end diff --git a/spec/serializers/pipeline_entity_spec.rb b/spec/serializers/pipeline_entity_spec.rb index 248552d1858..2473c561f4b 100644 --- a/spec/serializers/pipeline_entity_spec.rb +++ b/spec/serializers/pipeline_entity_spec.rb @@ -30,7 +30,7 @@ describe PipelineEntity do expect(subject).to include :details expect(subject[:details]) .to include :duration, :finished_at - expect(subject[:details][:status]).to include :icon, :favicon, :text, :label + expect(subject[:details][:status]).to include :icon, :favicon, :text, :label, :tooltip end it 'contains flags' do diff --git a/spec/serializers/pipeline_serializer_spec.rb b/spec/serializers/pipeline_serializer_spec.rb index c38795ad1a1..f51c11b141f 100644 --- a/spec/serializers/pipeline_serializer_spec.rb +++ b/spec/serializers/pipeline_serializer_spec.rb @@ -117,6 +117,7 @@ describe PipelineSerializer do shared_examples 'no N+1 queries' do it 'verifies number of queries', :request_store do recorded = ActiveRecord::QueryRecorder.new { subject } + expect(recorded.count).to be_within(1).of(36) expect(recorded.cached_count).to eq(0) end diff --git a/spec/serializers/project_note_entity_spec.rb b/spec/serializers/project_note_entity_spec.rb new file mode 100644 index 00000000000..dafd1cf603e --- /dev/null +++ b/spec/serializers/project_note_entity_spec.rb @@ -0,0 +1,29 @@ +require 'spec_helper' + +describe ProjectNoteEntity do + include Gitlab::Routing + + let(:request) { double('request', current_user: user, noteable: note.noteable) } + + let(:entity) { described_class.new(note, request: request) } + let(:note) { create(:note) } + let(:user) { create(:user) } + subject { entity.as_json } + + it_behaves_like 'note entity' + + it 'exposes project-specific elements' do + expect(subject).to include(:human_access, :toggle_award_path, :path) + end + + context 'when note is part of resolvable discussion' do + before do + allow(note).to receive(:part_of_discussion?).and_return(true) + allow(note).to receive(:resolvable?).and_return(true) + end + + it 'exposes paths to resolve note' do + expect(subject).to include(:resolve_path, :resolve_with_issue_path) + end + end +end diff --git a/spec/serializers/stage_entity_spec.rb b/spec/serializers/stage_entity_spec.rb index 40e303f7b89..2034c7891ef 100644 --- a/spec/serializers/stage_entity_spec.rb +++ b/spec/serializers/stage_entity_spec.rb @@ -26,7 +26,7 @@ describe StageEntity do end it 'contains detailed status' do - expect(subject[:status]).to include :text, :label, :group, :icon + expect(subject[:status]).to include :text, :label, :group, :icon, :tooltip expect(subject[:status][:label]).to eq 'passed' end diff --git a/spec/serializers/status_entity_spec.rb b/spec/serializers/status_entity_spec.rb index 16431ed4188..559475e571c 100644 --- a/spec/serializers/status_entity_spec.rb +++ b/spec/serializers/status_entity_spec.rb @@ -16,7 +16,7 @@ describe StatusEntity do subject { entity.as_json } it 'contains status details' do - expect(subject).to include :text, :icon, :favicon, :label, :group + expect(subject).to include :text, :icon, :favicon, :label, :group, :tooltip expect(subject).to include :has_details, :details_path expect(subject[:favicon]).to match_asset_path('/assets/ci_favicons/favicon_status_success.ico') end @@ -25,5 +25,10 @@ describe StatusEntity do allow(Rails.env).to receive(:development?) { true } expect(entity.as_json[:favicon]).to match_asset_path('/assets/ci_favicons/dev/favicon_status_success.ico') end + + it 'contains a canary namespaced favicon if canary env' do + stub_env('CANARY', 'true') + expect(entity.as_json[:favicon]).to match_asset_path('/assets/ci_favicons/canary/favicon_status_success.ico') + end end end diff --git a/spec/services/auth/container_registry_authentication_service_spec.rb b/spec/services/auth/container_registry_authentication_service_spec.rb index 290eeae828e..da8e660c16b 100644 --- a/spec/services/auth/container_registry_authentication_service_spec.rb +++ b/spec/services/auth/container_registry_authentication_service_spec.rb @@ -585,4 +585,140 @@ describe Auth::ContainerRegistryAuthenticationService do it_behaves_like 'not a container repository factory' end end + + context 'for deploy tokens' do + let(:current_params) do + { scope: "repository:#{project.full_path}:pull" } + end + + context 'when deploy token has read_registry as a scope' do + let(:current_user) { create(:deploy_token, projects: [project]) } + + context 'for public project' do + let(:project) { create(:project, :public) } + + context 'when pulling' do + it_behaves_like 'a pullable' + end + + context 'when pushing' do + let(:current_params) do + { scope: "repository:#{project.full_path}:push" } + end + + it_behaves_like 'an inaccessible' + end + end + + context 'for internal project' do + let(:project) { create(:project, :internal) } + + context 'when pulling' do + it_behaves_like 'a pullable' + end + + context 'when pushing' do + let(:current_params) do + { scope: "repository:#{project.full_path}:push" } + end + + it_behaves_like 'an inaccessible' + end + end + + context 'for private project' do + let(:project) { create(:project, :private) } + + context 'when pulling' do + it_behaves_like 'a pullable' + end + + context 'when pushing' do + let(:current_params) do + { scope: "repository:#{project.full_path}:push" } + end + + it_behaves_like 'an inaccessible' + end + end + end + + context 'when deploy token does not have read_registry scope' do + let(:current_user) { create(:deploy_token, projects: [project], read_registry: false) } + + context 'for public project' do + let(:project) { create(:project, :public) } + + context 'when pulling' do + it_behaves_like 'a pullable' + end + end + + context 'for internal project' do + let(:project) { create(:project, :internal) } + + context 'when pulling' do + it_behaves_like 'an inaccessible' + end + end + + context 'for private project' do + let(:project) { create(:project, :internal) } + + context 'when pulling' do + it_behaves_like 'an inaccessible' + end + end + end + + context 'when deploy token is not related to the project' do + let(:current_user) { create(:deploy_token, read_registry: false) } + + context 'for public project' do + let(:project) { create(:project, :public) } + + context 'when pulling' do + it_behaves_like 'a pullable' + end + end + + context 'for internal project' do + let(:project) { create(:project, :internal) } + + context 'when pulling' do + it_behaves_like 'an inaccessible' + end + end + + context 'for private project' do + let(:project) { create(:project, :internal) } + + context 'when pulling' do + it_behaves_like 'an inaccessible' + end + end + end + + context 'when deploy token has been revoked' do + let(:current_user) { create(:deploy_token, :revoked, projects: [project]) } + + context 'for public project' do + let(:project) { create(:project, :public) } + + it_behaves_like 'a pullable' + end + + context 'for internal project' do + let(:project) { create(:project, :internal) } + + it_behaves_like 'an inaccessible' + end + + context 'for private project' do + let(:project) { create(:project, :internal) } + + it_behaves_like 'an inaccessible' + end + end + end end diff --git a/spec/services/boards/issues/list_service_spec.rb b/spec/services/boards/issues/list_service_spec.rb index b4efa3e44b6..27a7bf0e605 100644 --- a/spec/services/boards/issues/list_service_spec.rb +++ b/spec/services/boards/issues/list_service_spec.rb @@ -48,10 +48,8 @@ describe Boards::Issues::ListService do context 'when parent is a group' do let(:user) { create(:user) } - let(:group) { create(:group) } let(:project) { create(:project, :empty_repo, namespace: group) } let(:project1) { create(:project, :empty_repo, namespace: group) } - let(:board) { create(:board, group: group) } let(:m1) { create(:milestone, group: group) } let(:m2) { create(:milestone, group: group) } @@ -92,13 +90,30 @@ describe Boards::Issues::ListService do let!(:closed_issue4) { create(:labeled_issue, :closed, project: project1, labels: [p1, p1_project1]) } let!(:closed_issue5) { create(:labeled_issue, :closed, project: project1, labels: [development]) } - let(:parent) { group } - before do group.add_developer(user) end - it_behaves_like 'issues list service' + context 'and group has no parent' do + let(:parent) { group } + let(:group) { create(:group) } + let(:board) { create(:board, group: group) } + + it_behaves_like 'issues list service' + end + + context 'and group is an ancestor', :nested_groups do + let(:parent) { create(:group) } + let(:group) { create(:group, parent: parent) } + let!(:backlog) { create(:backlog_list, board: board) } + let(:board) { create(:board, group: parent) } + + before do + parent.add_developer(user) + end + + it_behaves_like 'issues list service' + end end end end diff --git a/spec/services/boards/issues/move_service_spec.rb b/spec/services/boards/issues/move_service_spec.rb index 0a6b6d880d3..dd0ad5f11bd 100644 --- a/spec/services/boards/issues/move_service_spec.rb +++ b/spec/services/boards/issues/move_service_spec.rb @@ -48,7 +48,7 @@ describe Boards::Issues::MoveService do parent.add_developer(user) end - it_behaves_like 'issues move service' + it_behaves_like 'issues move service', true end end end diff --git a/spec/services/ci/process_pipeline_service_spec.rb b/spec/services/ci/process_pipeline_service_spec.rb index 0ce41e7c7ee..feb5120bc68 100644 --- a/spec/services/ci/process_pipeline_service_spec.rb +++ b/spec/services/ci/process_pipeline_service_spec.rb @@ -9,6 +9,8 @@ describe Ci::ProcessPipelineService, '#execute' do end before do + stub_ci_pipeline_to_return_yaml_file + stub_not_protect_default_branch project.add_developer(user) diff --git a/spec/services/ci/register_job_service_spec.rb b/spec/services/ci/register_job_service_spec.rb index 97a563c1ce1..8a537e83d5f 100644 --- a/spec/services/ci/register_job_service_spec.rb +++ b/spec/services/ci/register_job_service_spec.rb @@ -370,10 +370,111 @@ module Ci it_behaves_like 'validation is not active' end end + end + + describe '#register_success' do + let!(:current_time) { Time.new(2018, 4, 5, 14, 0, 0) } + let!(:attempt_counter) { double('Gitlab::Metrics::NullMetric') } + let!(:job_queue_duration_seconds) { double('Gitlab::Metrics::NullMetric') } + + before do + allow(Time).to receive(:now).and_return(current_time) + + # Stub defaults for any metrics other than the ones we're testing + allow(Gitlab::Metrics).to receive(:counter) + .with(any_args) + .and_return(Gitlab::Metrics::NullMetric.instance) + allow(Gitlab::Metrics).to receive(:histogram) + .with(any_args) + .and_return(Gitlab::Metrics::NullMetric.instance) + + # Stub tested metrics + allow(Gitlab::Metrics).to receive(:counter) + .with(:job_register_attempts_total, anything) + .and_return(attempt_counter) + allow(Gitlab::Metrics).to receive(:histogram) + .with(:job_queue_duration_seconds, anything, anything, anything) + .and_return(job_queue_duration_seconds) + + project.update(shared_runners_enabled: true) + pending_job.update(created_at: current_time - 3600, queued_at: current_time - 1800) + end + + shared_examples 'attempt counter collector' do + it 'increments attempt counter' do + allow(job_queue_duration_seconds).to receive(:observe) + expect(attempt_counter).to receive(:increment) + + execute(runner) + end + end + + shared_examples 'jobs queueing time histogram collector' do + it 'counts job queuing time histogram with expected labels' do + allow(attempt_counter).to receive(:increment) + expect(job_queue_duration_seconds).to receive(:observe) + .with({ shared_runner: expected_shared_runner, + jobs_running_for_project: expected_jobs_running_for_project_first_job }, 1800) + + execute(runner) + end + + context 'when project already has running jobs' do + let!(:build2) { create( :ci_build, :running, pipeline: pipeline, runner: shared_runner) } + let!(:build3) { create( :ci_build, :running, pipeline: pipeline, runner: shared_runner) } + + it 'counts job queuing time histogram with expected labels' do + allow(attempt_counter).to receive(:increment) + expect(job_queue_duration_seconds).to receive(:observe) + .with({ shared_runner: expected_shared_runner, + jobs_running_for_project: expected_jobs_running_for_project_third_job }, 1800) + + execute(runner) + end + end + end - def execute(runner) - described_class.new(runner).execute.build + shared_examples 'metrics collector' do + it_behaves_like 'attempt counter collector' + it_behaves_like 'jobs queueing time histogram collector' end + + context 'when shared runner is used' do + let(:runner) { shared_runner } + let(:expected_shared_runner) { true } + let(:expected_jobs_running_for_project_first_job) { 0 } + let(:expected_jobs_running_for_project_third_job) { 2 } + + it_behaves_like 'metrics collector' + + context 'when pending job with queued_at=nil is used' do + before do + pending_job.update(queued_at: nil) + end + + it_behaves_like 'attempt counter collector' + + it "doesn't count job queuing time histogram" do + allow(attempt_counter).to receive(:increment) + expect(job_queue_duration_seconds).not_to receive(:observe) + + execute(runner) + end + end + end + + context 'when specific runner is used' do + let(:runner) { specific_runner } + let(:expected_shared_runner) { false } + let(:expected_jobs_running_for_project_first_job) { '+Inf' } + let(:expected_jobs_running_for_project_third_job) { '+Inf' } + + it_behaves_like 'metrics collector' + end + end + + def execute(runner) + described_class.new(runner).execute.build end end end diff --git a/spec/services/ci/retry_build_service_spec.rb b/spec/services/ci/retry_build_service_spec.rb index db9c216d3f4..8de0bdf92e2 100644 --- a/spec/services/ci/retry_build_service_spec.rb +++ b/spec/services/ci/retry_build_service_spec.rb @@ -28,7 +28,9 @@ describe Ci::RetryBuildService do %i[type lock_version target_url base_tags trace_sections commit_id deployments erased_by_id last_deployment project_id runner_id tag_taggings taggings tags trigger_request_id - user_id auto_canceled_by_id retried failure_reason].freeze + user_id auto_canceled_by_id retried failure_reason + artifacts_file_store artifacts_metadata_store + metadata].freeze shared_examples 'build duplication' do let(:another_pipeline) { create(:ci_empty_pipeline, project: project) } diff --git a/spec/services/clusters/applications/install_service_spec.rb b/spec/services/clusters/applications/install_service_spec.rb index ad175226e92..93199964a0e 100644 --- a/spec/services/clusters/applications/install_service_spec.rb +++ b/spec/services/clusters/applications/install_service_spec.rb @@ -34,7 +34,7 @@ describe Clusters::Applications::InstallService do context 'when k8s cluster communication fails' do before do - error = KubeException.new(500, 'system failure', nil) + error = Kubeclient::HttpError.new(500, 'system failure', nil) expect(helm_client).to receive(:install).with(install_command).and_raise(error) end diff --git a/spec/services/clusters/create_service_spec.rb b/spec/services/clusters/create_service_spec.rb index e2e64659dfa..1c2f9c5cf43 100644 --- a/spec/services/clusters/create_service_spec.rb +++ b/spec/services/clusters/create_service_spec.rb @@ -82,7 +82,7 @@ describe Clusters::CreateService do context 'when project has a cluster' do include_context 'valid params' - let!(:cluster) { create(:cluster, :provided_by_gcp, projects: [project]) } + let!(:cluster) { create(:cluster, :provided_by_gcp, :production_environment, projects: [project]) } it 'does not create a cluster' do expect(ClusterProvisionWorker).not_to receive(:perform_async) diff --git a/spec/services/deploy_tokens/create_service_spec.rb b/spec/services/deploy_tokens/create_service_spec.rb new file mode 100644 index 00000000000..3a2bbf1ecd1 --- /dev/null +++ b/spec/services/deploy_tokens/create_service_spec.rb @@ -0,0 +1,45 @@ +require 'spec_helper' + +describe DeployTokens::CreateService do + let(:project) { create(:project) } + let(:user) { create(:user) } + let(:deploy_token_params) { attributes_for(:deploy_token) } + + describe '#execute' do + subject { described_class.new(project, user, deploy_token_params).execute } + + context 'when the deploy token is valid' do + it 'should create a new DeployToken' do + expect { subject }.to change { DeployToken.count }.by(1) + end + + it 'should create a new ProjectDeployToken' do + expect { subject }.to change { ProjectDeployToken.count }.by(1) + end + + it 'returns a DeployToken' do + expect(subject).to be_an_instance_of DeployToken + end + end + + context 'when expires at date is not passed' do + let(:deploy_token_params) { attributes_for(:deploy_token, expires_at: '') } + + it 'should set Forever.date' do + expect(subject.read_attribute(:expires_at)).to eq(Forever.date) + end + end + + context 'when the deploy token is invalid' do + let(:deploy_token_params) { attributes_for(:deploy_token, read_repository: false, read_registry: false) } + + it 'should not create a new DeployToken' do + expect { subject }.not_to change { DeployToken.count } + end + + it 'should not create a new ProjectDeployToken' do + expect { subject }.not_to change { ProjectDeployToken.count } + end + end + end +end diff --git a/spec/services/events/render_service_spec.rb b/spec/services/events/render_service_spec.rb index b4a4a44d07b..075cb45e46c 100644 --- a/spec/services/events/render_service_spec.rb +++ b/spec/services/events/render_service_spec.rb @@ -9,9 +9,7 @@ describe Events::RenderService do context 'when the request format is atom' do it 'renders the note inside events' do expect(Banzai::ObjectRenderer).to receive(:new) - .with(event.project, user, - only_path: false, - xhtml: true) + .with(user: user, redaction_context: { only_path: false, xhtml: true }) .and_call_original expect_any_instance_of(Banzai::ObjectRenderer) @@ -24,7 +22,7 @@ describe Events::RenderService do context 'when the request format is not atom' do it 'renders the note inside events' do expect(Banzai::ObjectRenderer).to receive(:new) - .with(event.project, user, {}) + .with(user: user, redaction_context: {}) .and_call_original expect_any_instance_of(Banzai::ObjectRenderer) diff --git a/spec/services/groups/transfer_service_spec.rb b/spec/services/groups/transfer_service_spec.rb index e1c873f8c1e..999677cfaaa 100644 --- a/spec/services/groups/transfer_service_spec.rb +++ b/spec/services/groups/transfer_service_spec.rb @@ -222,8 +222,8 @@ describe Groups::TransferService, :postgresql do expect(new_parent_group.children.first).to eq(group) end - it 'should create a permanent redirect for the group' do - expect(group.redirect_routes.permanent.count).to eq(1) + it 'should create a redirect for the group' do + expect(group.redirect_routes.count).to eq(1) end end @@ -243,10 +243,10 @@ describe Groups::TransferService, :postgresql do end end - it 'should create permanent redirects for the subgroups' do - expect(group.redirect_routes.permanent.count).to eq(1) - expect(subgroup1.redirect_routes.permanent.count).to eq(1) - expect(subgroup2.redirect_routes.permanent.count).to eq(1) + it 'should create redirects for the subgroups' do + expect(group.redirect_routes.count).to eq(1) + expect(subgroup1.redirect_routes.count).to eq(1) + expect(subgroup2.redirect_routes.count).to eq(1) end context 'when the new parent has a higher visibility than the children' do @@ -287,9 +287,9 @@ describe Groups::TransferService, :postgresql do end it 'should create permanent redirects for the projects' do - expect(group.redirect_routes.permanent.count).to eq(1) - expect(project1.redirect_routes.permanent.count).to eq(1) - expect(project2.redirect_routes.permanent.count).to eq(1) + expect(group.redirect_routes.count).to eq(1) + expect(project1.redirect_routes.count).to eq(1) + expect(project2.redirect_routes.count).to eq(1) end context 'when the new parent has a higher visibility than the projects' do @@ -338,12 +338,12 @@ describe Groups::TransferService, :postgresql do end end - it 'should create permanent redirect for the subgroups and projects' do - expect(group.redirect_routes.permanent.count).to eq(1) - expect(subgroup1.redirect_routes.permanent.count).to eq(1) - expect(subgroup2.redirect_routes.permanent.count).to eq(1) - expect(project1.redirect_routes.permanent.count).to eq(1) - expect(project2.redirect_routes.permanent.count).to eq(1) + it 'should create redirect for the subgroups and projects' do + expect(group.redirect_routes.count).to eq(1) + expect(subgroup1.redirect_routes.count).to eq(1) + expect(subgroup2.redirect_routes.count).to eq(1) + expect(project1.redirect_routes.count).to eq(1) + expect(project2.redirect_routes.count).to eq(1) end end @@ -380,12 +380,12 @@ describe Groups::TransferService, :postgresql do end end - it 'should create permanent redirect for the subgroups and projects' do - expect(group.redirect_routes.permanent.count).to eq(1) - expect(project1.redirect_routes.permanent.count).to eq(1) - expect(subgroup1.redirect_routes.permanent.count).to eq(1) - expect(nested_subgroup.redirect_routes.permanent.count).to eq(1) - expect(nested_project.redirect_routes.permanent.count).to eq(1) + it 'should create redirect for the subgroups and projects' do + expect(group.redirect_routes.count).to eq(1) + expect(project1.redirect_routes.count).to eq(1) + expect(subgroup1.redirect_routes.count).to eq(1) + expect(nested_subgroup.redirect_routes.count).to eq(1) + expect(nested_project.redirect_routes.count).to eq(1) end end diff --git a/spec/services/issuable/destroy_service_spec.rb b/spec/services/issuable/destroy_service_spec.rb index 0a3647a814f..8ccbba7fa58 100644 --- a/spec/services/issuable/destroy_service_spec.rb +++ b/spec/services/issuable/destroy_service_spec.rb @@ -8,7 +8,7 @@ describe Issuable::DestroyService do describe '#execute' do context 'when issuable is an issue' do - let!(:issue) { create(:issue, project: project, author: user) } + let!(:issue) { create(:issue, project: project, author: user, assignees: [user]) } it 'destroys the issue' do expect { service.execute(issue) }.to change { project.issues.count }.by(-1) @@ -26,10 +26,15 @@ describe Issuable::DestroyService do expect { service.execute(issue) } .to change { user.todos_pending_count }.from(1).to(0) end + + it 'invalidates the issues count cache for the assignees' do + expect_any_instance_of(User).to receive(:invalidate_cache_counts).once + service.execute(issue) + end end context 'when issuable is a merge request' do - let!(:merge_request) { create(:merge_request, target_project: project, source_project: project, author: user) } + let!(:merge_request) { create(:merge_request, target_project: project, source_project: project, author: user, assignee: user) } it 'destroys the merge request' do expect { service.execute(merge_request) }.to change { project.merge_requests.count }.by(-1) @@ -41,6 +46,11 @@ describe Issuable::DestroyService do service.execute(merge_request) end + it 'invalidates the merge request caches for the MR assignee' do + expect_any_instance_of(User).to receive(:invalidate_cache_counts).once + service.execute(merge_request) + end + it 'updates the todo caches for users with todos on the merge request' do create(:todo, target: merge_request, user: user, author: user, project: project) diff --git a/spec/services/issues/close_service_spec.rb b/spec/services/issues/close_service_spec.rb index 47c1ebbeb81..7ae49c06896 100644 --- a/spec/services/issues/close_service_spec.rb +++ b/spec/services/issues/close_service_spec.rb @@ -67,6 +67,10 @@ describe Issues::CloseService do expect(issue).to be_closed end + it 'records closed user' do + expect(issue.closed_by_id).to be(user.id) + end + it 'sends email to user2 about assign of new issue' do email = ActionMailer::Base.deliveries.last expect(email.to.first).to eq(user2.email) diff --git a/spec/services/issues/move_service_spec.rb b/spec/services/issues/move_service_spec.rb index c148a98569b..a9aee9e100f 100644 --- a/spec/services/issues/move_service_spec.rb +++ b/spec/services/issues/move_service_spec.rb @@ -6,7 +6,7 @@ describe Issues::MoveService do let(:title) { 'Some issue' } let(:description) { 'Some issue description' } let(:old_project) { create(:project) } - let(:new_project) { create(:project, group: create(:group)) } + let(:new_project) { create(:project) } let(:milestone1) { create(:milestone, project_id: old_project.id, title: 'v9.0') } let(:old_issue) do diff --git a/spec/services/issues/update_service_spec.rb b/spec/services/issues/update_service_spec.rb index 41237dd7160..23b1134b5a3 100644 --- a/spec/services/issues/update_service_spec.rb +++ b/spec/services/issues/update_service_spec.rb @@ -97,6 +97,39 @@ describe Issues::UpdateService, :mailer do expect(issue.relative_position).to be_between(issue1.relative_position, issue2.relative_position) end + context 'when moving issue between issues from different projects', :nested_groups do + let(:group) { create(:group) } + let(:subgroup) { create(:group, parent: group) } + + let(:project_1) { create(:project, namespace: group) } + let(:project_2) { create(:project, namespace: group) } + let(:project_3) { create(:project, namespace: subgroup) } + + let(:issue_1) { create(:issue, project: project_1) } + let(:issue_2) { create(:issue, project: project_2) } + let(:issue_3) { create(:issue, project: project_3) } + + before do + group.add_developer(user) + end + + it 'sorts issues as specified by parameters' do + # Moving all issues to end here like the last example won't work since + # all projects only have the same issue count + # so their relative_position will be the same. + issue_1.move_to_end + issue_2.move_after(issue_1) + issue_3.move_after(issue_2) + [issue_1, issue_2, issue_3].map(&:save) + + opts[:move_between_ids] = [issue_1.id, issue_2.id] + opts[:board_group_id] = group.id + + described_class.new(issue_3.project, user, opts).execute(issue_3) + expect(issue_2.relative_position).to be_between(issue_1.relative_position, issue_2.relative_position) + end + end + context 'when current user cannot admin issues in the project' do let(:guest) { create(:user) } before do diff --git a/spec/services/labels/transfer_service_spec.rb b/spec/services/labels/transfer_service_spec.rb index ae819c011de..80bac590a11 100644 --- a/spec/services/labels/transfer_service_spec.rb +++ b/spec/services/labels/transfer_service_spec.rb @@ -8,6 +8,7 @@ describe Labels::TransferService do let(:group_3) { create(:group) } let(:project_1) { create(:project, namespace: group_2) } let(:project_2) { create(:project, namespace: group_3) } + let(:project_3) { create(:project, namespace: group_1) } let(:group_label_1) { create(:group_label, group: group_1, name: 'Group Label 1') } let(:group_label_2) { create(:group_label, group: group_1, name: 'Group Label 2') } @@ -23,6 +24,7 @@ describe Labels::TransferService do create(:labeled_issue, project: project_1, labels: [group_label_4]) create(:labeled_issue, project: project_1, labels: [project_label_1]) create(:labeled_issue, project: project_2, labels: [group_label_5]) + create(:labeled_issue, project: project_3, labels: [group_label_1]) create(:labeled_merge_request, source_project: project_1, labels: [group_label_1, group_label_2]) create(:labeled_merge_request, source_project: project_2, labels: [group_label_5]) end @@ -52,5 +54,13 @@ describe Labels::TransferService do expect(project_1.labels.where(title: group_label_4.title)).to be_empty end + + it 'updates only label links in the given project' do + service.execute + + targets = LabelLink.where(label_id: group_label_1.id).map(&:target) + + expect(targets).to eq(project_3.issues) + end end end diff --git a/spec/services/merge_requests/conflicts/list_service_spec.rb b/spec/services/merge_requests/conflicts/list_service_spec.rb index 6cadcd438c3..837b8a56d12 100644 --- a/spec/services/merge_requests/conflicts/list_service_spec.rb +++ b/spec/services/merge_requests/conflicts/list_service_spec.rb @@ -77,6 +77,14 @@ describe MergeRequests::Conflicts::ListService do expect(service.can_be_resolved_in_ui?).to be_falsey end + it 'returns a falsey value when the MR has a missing revision after a force push' do + merge_request = create_merge_request('conflict-resolvable') + service = conflicts_service(merge_request) + allow(merge_request).to receive_message_chain(:target_branch_head, :raw, :id).and_return(Gitlab::Git::BLANK_SHA) + + expect(service.can_be_resolved_in_ui?).to be_falsey + end + context 'with gitaly disabled', :skip_gitaly_mock do it 'returns a falsey value when the MR has a missing ref after a force push' do merge_request = create_merge_request('conflict-resolvable') @@ -85,6 +93,14 @@ describe MergeRequests::Conflicts::ListService do expect(service.can_be_resolved_in_ui?).to be_falsey end + + it 'returns a falsey value when the MR has a missing revision after a force push' do + merge_request = create_merge_request('conflict-resolvable') + service = conflicts_service(merge_request) + allow(merge_request).to receive_message_chain(:target_branch_head, :raw, :id).and_return(Gitlab::Git::BLANK_SHA) + + expect(service.can_be_resolved_in_ui?).to be_falsey + end end end end diff --git a/spec/services/merge_requests/create_service_spec.rb b/spec/services/merge_requests/create_service_spec.rb index 44a83c436cb..736a50b2c15 100644 --- a/spec/services/merge_requests/create_service_spec.rb +++ b/spec/services/merge_requests/create_service_spec.rb @@ -1,6 +1,8 @@ require 'spec_helper' describe MergeRequests::CreateService do + include ProjectForksHelper + let(:project) { create(:project, :repository) } let(:user) { create(:user) } let(:assignee) { create(:user) } @@ -300,7 +302,7 @@ describe MergeRequests::CreateService do end context 'when source and target projects are different' do - let(:target_project) { create(:project) } + let(:target_project) { fork_project(project, nil, repository: true) } let(:opts) do { @@ -334,6 +336,26 @@ describe MergeRequests::CreateService do .to raise_error Gitlab::Access::AccessDeniedError end end + + context 'when the user has access to both projects' do + before do + target_project.add_developer(user) + project.add_developer(user) + end + + it 'creates the merge request' do + merge_request = described_class.new(project, user, opts).execute + + expect(merge_request).to be_persisted + end + + it 'does not create the merge request when the target project is archived' do + target_project.update!(archived: true) + + expect { described_class.new(project, user, opts).execute } + .to raise_error Gitlab::Access::AccessDeniedError + end + end end context 'when user sets source project id' do diff --git a/spec/services/merge_requests/refresh_service_spec.rb b/spec/services/merge_requests/refresh_service_spec.rb index 903aa0a5078..2536c6e2514 100644 --- a/spec/services/merge_requests/refresh_service_spec.rb +++ b/spec/services/merge_requests/refresh_service_spec.rb @@ -24,6 +24,14 @@ describe MergeRequests::RefreshService do merge_when_pipeline_succeeds: true, merge_user: @user) + @another_merge_request = create(:merge_request, + source_project: @project, + source_branch: 'master', + target_branch: 'test', + target_project: @project, + merge_when_pipeline_succeeds: true, + merge_user: @user) + @fork_merge_request = create(:merge_request, source_project: @fork_project, source_branch: 'master', @@ -52,9 +60,11 @@ describe MergeRequests::RefreshService do context 'push to origin repo source branch' do let(:refresh_service) { service.new(@project, @user) } + let(:notification_service) { spy('notification_service') } before do allow(refresh_service).to receive(:execute_hooks) + allow(NotificationService).to receive(:new) { notification_service } end it 'executes hooks with update action' do @@ -64,6 +74,11 @@ describe MergeRequests::RefreshService do expect(refresh_service).to have_received(:execute_hooks) .with(@merge_request, 'update', old_rev: @oldrev) + expect(notification_service).to have_received(:push_to_merge_request) + .with(@merge_request, @user, new_commits: anything, existing_commits: anything) + expect(notification_service).to have_received(:push_to_merge_request) + .with(@another_merge_request, @user, new_commits: anything, existing_commits: anything) + expect(@merge_request.notes).not_to be_empty expect(@merge_request).to be_open expect(@merge_request.merge_when_pipeline_succeeds).to be_falsey @@ -119,11 +134,13 @@ describe MergeRequests::RefreshService do context 'push to origin repo source branch when an MR was reopened' do let(:refresh_service) { service.new(@project, @user) } + let(:notification_service) { spy('notification_service') } before do @merge_request.update(state: :reopened) allow(refresh_service).to receive(:execute_hooks) + allow(NotificationService).to receive(:new) { notification_service } refresh_service.execute(@oldrev, @newrev, 'refs/heads/master') reload_mrs end @@ -131,6 +148,10 @@ describe MergeRequests::RefreshService do it 'executes hooks with update action' do expect(refresh_service).to have_received(:execute_hooks) .with(@merge_request, 'update', old_rev: @oldrev) + expect(notification_service).to have_received(:push_to_merge_request) + .with(@merge_request, @user, new_commits: anything, existing_commits: anything) + expect(notification_service).to have_received(:push_to_merge_request) + .with(@another_merge_request, @user, new_commits: anything, existing_commits: anything) expect(@merge_request.notes).not_to be_empty expect(@merge_request).to be_open diff --git a/spec/services/notes/post_process_service_spec.rb b/spec/services/notes/post_process_service_spec.rb index 6ef5e93cb20..4e2ab919f0f 100644 --- a/spec/services/notes/post_process_service_spec.rb +++ b/spec/services/notes/post_process_service_spec.rb @@ -23,5 +23,23 @@ describe Notes::PostProcessService do described_class.new(@note).execute end + + context 'with a confidential issue' do + let(:issue) { create(:issue, :confidential, project: project) } + + it "doesn't call note hooks/services" do + expect(project).not_to receive(:execute_hooks).with(anything, :note_hooks) + expect(project).not_to receive(:execute_services).with(anything, :note_hooks) + + described_class.new(@note).execute + end + + it "calls confidential-note hooks/services" do + expect(project).to receive(:execute_hooks).with(anything, :confidential_note_hooks) + expect(project).to receive(:execute_services).with(anything, :confidential_note_hooks) + + described_class.new(@note).execute + end + end end end diff --git a/spec/services/notes/render_service_spec.rb b/spec/services/notes/render_service_spec.rb index faac498037f..f771620bc0d 100644 --- a/spec/services/notes/render_service_spec.rb +++ b/spec/services/notes/render_service_spec.rb @@ -4,23 +4,28 @@ describe Notes::RenderService do describe '#execute' do it 'renders a Note' do note = double(:note) - project = double(:project) wiki = double(:wiki) user = double(:user) - expect(Banzai::ObjectRenderer).to receive(:new) - .with(project, user, - requested_path: 'foo', - project_wiki: wiki, - ref: 'bar', - only_path: nil, - xhtml: false) + expect(Banzai::ObjectRenderer) + .to receive(:new) + .with( + user: user, + redaction_context: { + requested_path: 'foo', + project_wiki: wiki, + ref: 'bar', + only_path: nil, + xhtml: false + } + ) .and_call_original expect_any_instance_of(Banzai::ObjectRenderer) - .to receive(:render).with([note], :note) + .to receive(:render) + .with([note], :note) - described_class.new(user).execute([note], project, + described_class.new(user).execute([note], requested_path: 'foo', project_wiki: wiki, ref: 'bar', diff --git a/spec/services/notification_service_spec.rb b/spec/services/notification_service_spec.rb index 62fdf870090..55bbe954491 100644 --- a/spec/services/notification_service_spec.rb +++ b/spec/services/notification_service_spec.rb @@ -34,6 +34,12 @@ describe NotificationService, :mailer do should_not_email_anyone end + it 'emails new mentions despite being unsubscribed' do + send_notifications(@unsubscribed_mentioned) + + should_only_email(@unsubscribed_mentioned) + end + it 'sends the proper notification reason header' do send_notifications(@u_watcher) should_only_email(@u_watcher) @@ -122,7 +128,7 @@ describe NotificationService, :mailer do let(:project) { create(:project, :private) } let(:issue) { create(:issue, project: project, assignees: [assignee]) } let(:mentioned_issue) { create(:issue, assignees: issue.assignees) } - let(:note) { create(:note_on_issue, noteable: issue, project_id: issue.project_id, note: '@mention referenced, @outsider also') } + let(:note) { create(:note_on_issue, noteable: issue, project_id: issue.project_id, note: '@mention referenced, @unsubscribed_mentioned and @outsider also') } before do build_team(note.project) @@ -150,7 +156,7 @@ describe NotificationService, :mailer do add_users_with_subscription(note.project, issue) reset_delivered_emails! - expect(SentNotification).to receive(:record).with(issue, any_args).exactly(9).times + expect(SentNotification).to receive(:record).with(issue, any_args).exactly(10).times notification.new_note(note) @@ -163,6 +169,7 @@ describe NotificationService, :mailer do should_email(@watcher_and_subscriber) should_email(@subscribed_participant) should_email(@u_custom_off) + should_email(@unsubscribed_mentioned) should_not_email(@u_guest_custom) should_not_email(@u_guest_watcher) should_not_email(note.author) @@ -279,6 +286,7 @@ describe NotificationService, :mailer do before do build_team(note.project) note.project.add_master(note.author) + add_users_with_subscription(note.project, issue) reset_delivered_emails! end @@ -286,6 +294,9 @@ describe NotificationService, :mailer do it 'notifies the team members' do notification.new_note(note) + # Make sure @unsubscribed_mentioned is part of the team + expect(note.project.team.members).to include(@unsubscribed_mentioned) + # Notify all team members note.project.team.members.each do |member| # User with disabled notification should not be notified @@ -486,7 +497,7 @@ describe NotificationService, :mailer do let(:group) { create(:group) } let(:project) { create(:project, :public, namespace: group) } let(:another_project) { create(:project, :public, namespace: group) } - let(:issue) { create :issue, project: project, assignees: [assignee], description: 'cc @participant' } + let(:issue) { create :issue, project: project, assignees: [assignee], description: 'cc @participant @unsubscribed_mentioned' } before do build_team(issue.project) @@ -510,6 +521,7 @@ describe NotificationService, :mailer do should_email(@u_participant_mentioned) should_email(@g_global_watcher) should_email(@g_watcher) + should_email(@unsubscribed_mentioned) should_not_email(@u_mentioned) should_not_email(@u_participating) should_not_email(@u_disabled) @@ -921,6 +933,46 @@ describe NotificationService, :mailer do let(:notification_trigger) { notification.issue_moved(issue, new_issue, @u_disabled) } end end + + describe '#issue_due' do + before do + issue.update!(due_date: Date.today) + + update_custom_notification(:issue_due, @u_guest_custom, resource: project) + update_custom_notification(:issue_due, @u_custom_global) + end + + it 'sends email to issue notification recipients, excluding watchers' do + notification.issue_due(issue) + + should_email(issue.assignees.first) + should_email(issue.author) + should_email(@u_guest_custom) + should_email(@u_custom_global) + should_email(@u_participant_mentioned) + should_email(@subscriber) + should_email(@watcher_and_subscriber) + should_not_email(@u_watcher) + should_not_email(@u_guest_watcher) + should_not_email(@unsubscriber) + should_not_email(@u_participating) + should_not_email(@u_disabled) + should_not_email(@u_lazy_participant) + end + + it 'sends the email from the author' do + notification.issue_due(issue) + email = find_email_for(@subscriber) + + expect(email.header[:from].display_names).to eq([issue.author.name]) + end + + it_behaves_like 'participating notifications' do + let(:participant) { create(:user, username: 'user-participant') } + let(:issuable) { issue } + let(:notification_trigger) { notification.issue_due(issue) } + end + end end describe 'Merge Requests' do @@ -1078,6 +1130,36 @@ describe NotificationService, :mailer do end end + describe '#push_to_merge_request' do + before do + update_custom_notification(:push_to_merge_request, @u_guest_custom, resource: project) + update_custom_notification(:push_to_merge_request, @u_custom_global) + end + + it do + notification.push_to_merge_request(merge_request, @u_disabled) + + should_email(merge_request.assignee) + should_email(@u_guest_custom) + should_email(@u_custom_global) + should_email(@u_participant_mentioned) + should_email(@subscriber) + should_email(@watcher_and_subscriber) + should_not_email(@u_watcher) + should_not_email(@u_guest_watcher) + should_not_email(@unsubscriber) + should_not_email(@u_participating) + should_not_email(@u_disabled) + should_not_email(@u_lazy_participant) + end + + it_behaves_like 'participating notifications' do + let(:participant) { create(:user, username: 'user-participant') } + let(:issuable) { merge_request } + let(:notification_trigger) { notification.push_to_merge_request(merge_request, @u_disabled) } + end + end + describe '#relabel_merge_request' do let(:group_label_1) { create(:group_label, group: group, title: 'Group Label 1', merge_requests: [merge_request]) } let(:group_label_2) { create(:group_label, group: group, title: 'Group Label 2') } @@ -1823,6 +1905,7 @@ describe NotificationService, :mailer do def add_users_with_subscription(project, issuable) @subscriber = create :user @unsubscriber = create :user + @unsubscribed_mentioned = create :user, username: 'unsubscribed_mentioned' @subscribed_participant = create_global_setting_for(create(:user, username: 'subscribed_participant'), :participating) @watcher_and_subscriber = create_global_setting_for(create(:user), :watch) @@ -1830,7 +1913,9 @@ describe NotificationService, :mailer do project.add_master(@subscriber) project.add_master(@unsubscriber) project.add_master(@watcher_and_subscriber) + project.add_master(@unsubscribed_mentioned) + issuable.subscriptions.create(user: @unsubscribed_mentioned, project: project, subscribed: false) issuable.subscriptions.create(user: @subscriber, project: project, subscribed: true) issuable.subscriptions.create(user: @subscribed_participant, project: project, subscribed: true) issuable.subscriptions.create(user: @unsubscriber, project: project, subscribed: false) diff --git a/spec/services/projects/create_from_template_service_spec.rb b/spec/services/projects/create_from_template_service_spec.rb index 609d678caea..d40e6f1449d 100644 --- a/spec/services/projects/create_from_template_service_spec.rb +++ b/spec/services/projects/create_from_template_service_spec.rb @@ -7,7 +7,7 @@ describe Projects::CreateFromTemplateService do path: user.to_param, template_name: 'rails', description: 'project description', - visibility_level: Gitlab::VisibilityLevel::PRIVATE + visibility_level: Gitlab::VisibilityLevel::PUBLIC } end @@ -24,7 +24,23 @@ describe Projects::CreateFromTemplateService do expect(project).to be_saved expect(project.scheduled?).to be(true) - expect(project.description).to match('project description') - expect(project.visibility_level).to eq(Gitlab::VisibilityLevel::PRIVATE) + end + + context 'the result project' do + before do + Sidekiq::Testing.inline! do + @project = subject.execute + end + + @project.reload + end + + it 'overrides template description' do + expect(@project.description).to match('project description') + end + + it 'overrides template visibility_level' do + expect(@project.visibility_level).to eq(Gitlab::VisibilityLevel::PUBLIC) + end end end diff --git a/spec/services/projects/create_service_spec.rb b/spec/services/projects/create_service_spec.rb index 8471467d2fa..e35f0f6337a 100644 --- a/spec/services/projects/create_service_spec.rb +++ b/spec/services/projects/create_service_spec.rb @@ -28,6 +28,14 @@ describe Projects::CreateService, '#execute' do end end + describe 'after create actions' do + it 'invalidate personal_projects_count caches' do + expect(user).to receive(:invalidate_personal_projects_count) + + create_project(user, opts) + end + end + context "admin creates project with other user's namespace_id" do it 'sets the correct permissions' do admin = create(:admin) @@ -70,6 +78,16 @@ describe Projects::CreateService, '#execute' do opts[:default_branch] = 'master' expect(create_project(user, opts)).to eq(nil) end + + it 'sets invalid service as inactive' do + create(:service, type: 'JiraService', project: nil, template: true, active: true) + + project = create_project(user, opts) + service = project.services.first + + expect(project).to be_persisted + expect(service.active).to be false + end end context 'wiki_enabled creates repository directory' do @@ -153,7 +171,7 @@ describe Projects::CreateService, '#execute' do context 'when another repository already exists on disk' do let(:repository_storage) { 'default' } - let(:repository_storage_path) { Gitlab.config.repositories.storages[repository_storage]['path'] } + let(:repository_storage_path) { Gitlab.config.repositories.storages[repository_storage].legacy_disk_path } let(:opts) do { @@ -232,14 +250,15 @@ describe Projects::CreateService, '#execute' do end context 'when a bad service template is created' do - it 'reports an error in the imported project' do + it 'sets service to be inactive' do opts[:import_url] = 'http://www.gitlab.com/gitlab-org/gitlab-ce' create(:service, type: 'DroneCiService', project: nil, template: true, active: true) project = create_project(user, opts) + service = project.services.first - expect(project.errors.full_messages_for(:base).first).to match(/Unable to save project. Error: Unable to save DroneCiService/) - expect(project.services.count).to eq 0 + expect(project).to be_persisted + expect(service.active).to be false end end diff --git a/spec/services/projects/destroy_service_spec.rb b/spec/services/projects/destroy_service_spec.rb index 0bec2054f50..a66e3c5e995 100644 --- a/spec/services/projects/destroy_service_spec.rb +++ b/spec/services/projects/destroy_service_spec.rb @@ -66,6 +66,12 @@ describe Projects::DestroyService do end it_behaves_like 'deleting the project' + + it 'invalidates personal_project_count cache' do + expect(user).to receive(:invalidate_personal_projects_count) + + destroy_project(project, user) + end end context 'Sidekiq fake' do @@ -242,6 +248,28 @@ describe Projects::DestroyService do end end + context '#attempt_restore_repositories' do + let(:path) { project.disk_path + '.git' } + + before do + expect(project.gitlab_shell.exists?(project.repository_storage_path, path)).to be_truthy + expect(project.gitlab_shell.exists?(project.repository_storage_path, remove_path)).to be_falsey + + # Dont run sidekiq to check if renamed repository exists + Sidekiq::Testing.fake! { destroy_project(project, user, {}) } + + expect(project.gitlab_shell.exists?(project.repository_storage_path, path)).to be_falsey + expect(project.gitlab_shell.exists?(project.repository_storage_path, remove_path)).to be_truthy + end + + it 'restores the repositories' do + Sidekiq::Testing.fake! { described_class.new(project, user).attempt_repositories_rollback } + + expect(project.gitlab_shell.exists?(project.repository_storage_path, path)).to be_truthy + expect(project.gitlab_shell.exists?(project.repository_storage_path, remove_path)).to be_falsey + end + end + def destroy_project(project, user, params = {}) if async Projects::DestroyService.new(project, user, params).async_execute diff --git a/spec/services/projects/fork_service_spec.rb b/spec/services/projects/fork_service_spec.rb index d1011b07db6..0f7c46367d0 100644 --- a/spec/services/projects/fork_service_spec.rb +++ b/spec/services/projects/fork_service_spec.rb @@ -105,7 +105,7 @@ describe Projects::ForkService do context 'repository already exists' do let(:repository_storage) { 'default' } - let(:repository_storage_path) { Gitlab.config.repositories.storages[repository_storage]['path'] } + let(:repository_storage_path) { Gitlab.config.repositories.storages[repository_storage].legacy_disk_path } before do gitlab_shell.create_repository(repository_storage, "#{@to_user.namespace.full_path}/#{@from_project.path}") diff --git a/spec/services/projects/gitlab_projects_import_service_spec.rb b/spec/services/projects/gitlab_projects_import_service_spec.rb index 6b8f9619bc4..ee1a886f5d6 100644 --- a/spec/services/projects/gitlab_projects_import_service_spec.rb +++ b/spec/services/projects/gitlab_projects_import_service_spec.rb @@ -2,8 +2,11 @@ require 'spec_helper' describe Projects::GitlabProjectsImportService do set(:namespace) { create(:namespace) } + let(:path) { 'test-path' } let(:file) { fixture_file_upload(Rails.root + 'spec/fixtures/doc_sample.txt', 'text/plain') } - subject { described_class.new(namespace.owner, { namespace_id: namespace.id, path: path, file: file }) } + let(:overwrite) { false } + let(:import_params) { { namespace_id: namespace.id, path: path, file: file, overwrite: overwrite } } + subject { described_class.new(namespace.owner, import_params) } describe '#execute' do context 'with an invalid path' do @@ -18,8 +21,6 @@ describe Projects::GitlabProjectsImportService do end context 'with a valid path' do - let(:path) { 'test-path' } - it 'creates a project' do project = subject.execute @@ -27,5 +28,38 @@ describe Projects::GitlabProjectsImportService do expect(project).to be_valid end end + + context 'override params' do + it 'stores them as import data when passed' do + project = described_class + .new(namespace.owner, import_params, description: 'Hello') + .execute + + expect(project.import_data.data['override_params']['description']).to eq('Hello') + end + end + + context 'when there is a project with the same path' do + let(:existing_project) { create(:project, namespace: namespace) } + let(:path) { existing_project.path} + + it 'does not create the project' do + project = subject.execute + + expect(project).to be_invalid + expect(project).not_to be_persisted + end + + context 'when overwrite param is set' do + let(:overwrite) { true } + + it 'creates a project in a temporary full_path' do + project = subject.execute + + expect(project).to be_valid + expect(project).to be_persisted + end + end + end end end diff --git a/spec/services/projects/import_export/export_service_spec.rb b/spec/services/projects/import_export/export_service_spec.rb new file mode 100644 index 00000000000..f9e5530bc9d --- /dev/null +++ b/spec/services/projects/import_export/export_service_spec.rb @@ -0,0 +1,128 @@ +require 'spec_helper' + +describe Projects::ImportExport::ExportService do + describe '#execute' do + let!(:user) { create(:user) } + let(:project) { create(:project) } + let(:shared) { project.import_export_shared } + let(:service) { described_class.new(project, user) } + let!(:after_export_strategy) { Gitlab::ImportExport::AfterExportStrategies::DownloadNotificationStrategy.new } + + it 'saves the version' do + expect(Gitlab::ImportExport::VersionSaver).to receive(:new).and_call_original + + service.execute + end + + it 'saves the avatar' do + expect(Gitlab::ImportExport::AvatarSaver).to receive(:new).and_call_original + + service.execute + end + + it 'saves the models' do + expect(Gitlab::ImportExport::ProjectTreeSaver).to receive(:new).and_call_original + + service.execute + end + + it 'saves the uploads' do + expect(Gitlab::ImportExport::UploadsSaver).to receive(:new).and_call_original + + service.execute + end + + it 'saves the repo' do + # once for the normal repo, once for the wiki + expect(Gitlab::ImportExport::RepoSaver).to receive(:new).twice.and_call_original + + service.execute + end + + it 'saves the lfs objects' do + expect(Gitlab::ImportExport::LfsSaver).to receive(:new).and_call_original + + service.execute + end + + it 'saves the wiki repo' do + expect(Gitlab::ImportExport::WikiRepoSaver).to receive(:new).and_call_original + + service.execute + end + + context 'when all saver services succeed' do + before do + allow(service).to receive(:save_services).and_return(true) + end + + it 'saves the project in the file system' do + expect(Gitlab::ImportExport::Saver).to receive(:save).with(project: project, shared: shared) + + service.execute + end + + it 'calls the after export strategy' do + expect(after_export_strategy).to receive(:execute) + + service.execute(after_export_strategy) + end + + context 'when after export strategy fails' do + before do + allow(after_export_strategy).to receive(:execute).and_return(false) + end + + after do + service.execute(after_export_strategy) + end + + it 'removes the remaining exported data' do + allow(shared).to receive(:export_path).and_return('whatever') + allow(FileUtils).to receive(:rm_rf) + + expect(FileUtils).to receive(:rm_rf).with(shared.export_path) + end + + it 'notifies the user' do + expect_any_instance_of(NotificationService).to receive(:project_not_exported) + end + + it 'notifies logger' do + allow(Rails.logger).to receive(:error) + + expect(Rails.logger).to receive(:error) + end + end + end + + context 'when saver services fail' do + before do + allow(service).to receive(:save_services).and_return(false) + end + + after do + expect { service.execute }.to raise_error(Gitlab::ImportExport::Error) + end + + it 'removes the remaining exported data' do + allow(shared).to receive(:export_path).and_return('whatever') + allow(FileUtils).to receive(:rm_rf) + + expect(FileUtils).to receive(:rm_rf).with(shared.export_path) + end + + it 'notifies the user' do + expect_any_instance_of(NotificationService).to receive(:project_not_exported) + end + + it 'notifies logger' do + expect(Rails.logger).to receive(:error) + end + + it 'the after export strategy is not called' do + expect(service).not_to receive(:execute_after_export_action) + end + end + end +end diff --git a/spec/services/projects/import_service_spec.rb b/spec/services/projects/import_service_spec.rb index bf7facaec99..30c89ebd821 100644 --- a/spec/services/projects/import_service_spec.rb +++ b/spec/services/projects/import_service_spec.rb @@ -156,7 +156,7 @@ describe Projects::ImportService do result = described_class.new(project, user).execute expect(result[:status]).to eq :error - expect(result[:message]).to end_with 'Blocked import URL.' + expect(result[:message]).to include('Requests to localhost are not allowed') end it 'fails with port 25' do @@ -165,7 +165,7 @@ describe Projects::ImportService do result = described_class.new(project, user).execute expect(result[:status]).to eq :error - expect(result[:message]).to end_with 'Blocked import URL.' + expect(result[:message]).to include('Only allowed ports are 22, 80, 443') end end diff --git a/spec/services/projects/move_access_service_spec.rb b/spec/services/projects/move_access_service_spec.rb new file mode 100644 index 00000000000..a820ebd91f4 --- /dev/null +++ b/spec/services/projects/move_access_service_spec.rb @@ -0,0 +1,114 @@ +require 'spec_helper' + +describe Projects::MoveAccessService do + let(:user) { create(:user) } + let(:group) { create(:group) } + let(:project_with_access) { create(:project, namespace: user.namespace) } + let(:master_user) { create(:user) } + let(:reporter_user) { create(:user) } + let(:developer_user) { create(:user) } + let(:master_group) { create(:group) } + let(:reporter_group) { create(:group) } + let(:developer_group) { create(:group) } + + before do + project_with_access.add_master(master_user) + project_with_access.add_developer(developer_user) + project_with_access.add_reporter(reporter_user) + project_with_access.project_group_links.create(group: master_group, group_access: Gitlab::Access::MASTER) + project_with_access.project_group_links.create(group: developer_group, group_access: Gitlab::Access::DEVELOPER) + project_with_access.project_group_links.create(group: reporter_group, group_access: Gitlab::Access::REPORTER) + end + + subject { described_class.new(target_project, user) } + + describe '#execute' do + shared_examples 'move the accesses' do + it do + expect(project_with_access.project_members.count).to eq 4 + expect(project_with_access.project_group_links.count).to eq 3 + expect(project_with_access.authorized_users.count).to eq 4 + + subject.execute(project_with_access) + + expect(project_with_access.project_members.count).to eq 0 + expect(project_with_access.project_group_links.count).to eq 0 + expect(project_with_access.authorized_users.count).to eq 1 + expect(target_project.project_members.count).to eq 4 + expect(target_project.project_group_links.count).to eq 3 + expect(target_project.authorized_users.count).to eq 4 + end + + it 'rollbacks if an exception is raised' do + allow(subject).to receive(:success).and_raise(StandardError) + + expect { subject.execute(project_with_groups) }.to raise_error(StandardError) + + expect(project_with_access.project_members.count).to eq 4 + expect(project_with_access.project_group_links.count).to eq 3 + expect(project_with_access.authorized_users.count).to eq 4 + end + end + + context 'when both projects are in the same namespace' do + let(:target_project) { create(:project, namespace: user.namespace) } + + it 'does not refresh project owner authorized projects' do + allow(project_with_access).to receive(:namespace).and_return(user.namespace) + expect(project_with_access.namespace).not_to receive(:refresh_project_authorizations) + expect(target_project.namespace).not_to receive(:refresh_project_authorizations) + + subject.execute(project_with_access) + end + + it_behaves_like 'move the accesses' + end + + context 'when projects are in different namespaces' do + let(:target_project) { create(:project, namespace: group) } + + before do + group.add_owner(user) + end + + it 'refreshes both project owner authorized projects' do + allow(project_with_access).to receive(:namespace).and_return(user.namespace) + expect(user.namespace).to receive(:refresh_project_authorizations).once + expect(group).to receive(:refresh_project_authorizations).once + + subject.execute(project_with_access) + end + + it_behaves_like 'move the accesses' + end + + context 'when remove_remaining_elements is false' do + let(:target_project) { create(:project, namespace: user.namespace) } + let(:options) { { remove_remaining_elements: false } } + + it 'does not remove remaining memberships' do + target_project.add_master(master_user) + + subject.execute(project_with_access, options) + + expect(project_with_access.project_members.count).not_to eq 0 + end + + it 'does not remove remaining group links' do + target_project.project_group_links.create(group: master_group, group_access: Gitlab::Access::MASTER) + + subject.execute(project_with_access, options) + + expect(project_with_access.project_group_links.count).not_to eq 0 + end + + it 'does not remove remaining authorizations' do + target_project.add_developer(developer_user) + + subject.execute(project_with_access, options) + + expect(project_with_access.project_authorizations.count).not_to eq 0 + end + end + end +end diff --git a/spec/services/projects/move_deploy_keys_projects_service_spec.rb b/spec/services/projects/move_deploy_keys_projects_service_spec.rb new file mode 100644 index 00000000000..c548edf39a8 --- /dev/null +++ b/spec/services/projects/move_deploy_keys_projects_service_spec.rb @@ -0,0 +1,58 @@ +require 'spec_helper' + +describe Projects::MoveDeployKeysProjectsService do + let!(:user) { create(:user) } + let!(:project_with_deploy_keys) { create(:project, namespace: user.namespace) } + let!(:target_project) { create(:project, namespace: user.namespace) } + + subject { described_class.new(target_project, user) } + + describe '#execute' do + before do + create_list(:deploy_keys_project, 2, project: project_with_deploy_keys) + end + + it 'moves the user\'s deploy keys from one project to another' do + expect(project_with_deploy_keys.deploy_keys_projects.count).to eq 2 + expect(target_project.deploy_keys_projects.count).to eq 0 + + subject.execute(project_with_deploy_keys) + + expect(project_with_deploy_keys.deploy_keys_projects.count).to eq 0 + expect(target_project.deploy_keys_projects.count).to eq 2 + end + + it 'does not link existent deploy_keys in the current project' do + target_project.deploy_keys << project_with_deploy_keys.deploy_keys.first + + expect(project_with_deploy_keys.deploy_keys_projects.count).to eq 2 + expect(target_project.deploy_keys_projects.count).to eq 1 + + subject.execute(project_with_deploy_keys) + + expect(project_with_deploy_keys.deploy_keys_projects.count).to eq 0 + expect(target_project.deploy_keys_projects.count).to eq 2 + end + + it 'rollbacks changes if transaction fails' do + allow(subject).to receive(:success).and_raise(StandardError) + + expect { subject.execute(project_with_deploy_keys) }.to raise_error(StandardError) + + expect(project_with_deploy_keys.deploy_keys_projects.count).to eq 2 + expect(target_project.deploy_keys_projects.count).to eq 0 + end + + context 'when remove_remaining_elements is false' do + let(:options) { { remove_remaining_elements: false } } + + it 'does not remove remaining deploy keys projects' do + target_project.deploy_keys << project_with_deploy_keys.deploy_keys.first + + subject.execute(project_with_deploy_keys, options) + + expect(project_with_deploy_keys.deploy_keys_projects.count).not_to eq 0 + end + end + end +end diff --git a/spec/services/projects/move_forks_service_spec.rb b/spec/services/projects/move_forks_service_spec.rb new file mode 100644 index 00000000000..f4a5a7f9fc2 --- /dev/null +++ b/spec/services/projects/move_forks_service_spec.rb @@ -0,0 +1,96 @@ +require 'spec_helper' + +describe Projects::MoveForksService do + include ProjectForksHelper + + let!(:user) { create(:user) } + let!(:project_with_forks) { create(:project, namespace: user.namespace) } + let!(:target_project) { create(:project, namespace: user.namespace) } + let!(:lvl1_forked_project_1) { fork_project(project_with_forks, user) } + let!(:lvl1_forked_project_2) { fork_project(project_with_forks, user) } + let!(:lvl2_forked_project_1_1) { fork_project(lvl1_forked_project_1, user) } + let!(:lvl2_forked_project_1_2) { fork_project(lvl1_forked_project_1, user) } + + subject { described_class.new(target_project, user) } + + describe '#execute' do + context 'when moving a root forked project' do + it 'moves the descendant forks' do + expect(project_with_forks.forks.count).to eq 2 + expect(target_project.forks.count).to eq 0 + + subject.execute(project_with_forks) + + expect(project_with_forks.forks.count).to eq 0 + expect(target_project.forks.count).to eq 2 + expect(lvl1_forked_project_1.forked_from_project).to eq target_project + expect(lvl1_forked_project_1.fork_network_member.forked_from_project).to eq target_project + expect(lvl1_forked_project_2.forked_from_project).to eq target_project + expect(lvl1_forked_project_2.fork_network_member.forked_from_project).to eq target_project + end + + it 'updates the fork network' do + expect(project_with_forks.fork_network.root_project).to eq project_with_forks + expect(project_with_forks.fork_network.fork_network_members.map(&:project)).to include project_with_forks + + subject.execute(project_with_forks) + + expect(target_project.reload.fork_network.root_project).to eq target_project + expect(target_project.fork_network.fork_network_members.map(&:project)).not_to include project_with_forks + end + end + + context 'when moving a intermediate forked project' do + it 'moves the descendant forks' do + expect(lvl1_forked_project_1.forks.count).to eq 2 + expect(target_project.forks.count).to eq 0 + + subject.execute(lvl1_forked_project_1) + + expect(lvl1_forked_project_1.forks.count).to eq 0 + expect(target_project.forks.count).to eq 2 + expect(lvl2_forked_project_1_1.forked_from_project).to eq target_project + expect(lvl2_forked_project_1_1.fork_network_member.forked_from_project).to eq target_project + expect(lvl2_forked_project_1_2.forked_from_project).to eq target_project + expect(lvl2_forked_project_1_2.fork_network_member.forked_from_project).to eq target_project + end + + it 'moves the ascendant fork' do + subject.execute(lvl1_forked_project_1) + + expect(target_project.forked_from_project).to eq project_with_forks + expect(target_project.fork_network_member.forked_from_project).to eq project_with_forks + end + + it 'does not update fork network' do + subject.execute(lvl1_forked_project_1) + + expect(target_project.reload.fork_network.root_project).to eq project_with_forks + end + end + + context 'when moving a leaf forked project' do + it 'moves the ascendant fork' do + subject.execute(lvl2_forked_project_1_1) + + expect(target_project.forked_from_project).to eq lvl1_forked_project_1 + expect(target_project.fork_network_member.forked_from_project).to eq lvl1_forked_project_1 + end + + it 'does not update fork network' do + subject.execute(lvl2_forked_project_1_1) + + expect(target_project.reload.fork_network.root_project).to eq project_with_forks + end + end + + it 'rollbacks changes if transaction fails' do + allow(subject).to receive(:success).and_raise(StandardError) + + expect { subject.execute(project_with_forks) }.to raise_error(StandardError) + + expect(project_with_forks.forks.count).to eq 2 + expect(target_project.forks.count).to eq 0 + end + end +end diff --git a/spec/services/projects/move_lfs_objects_projects_service_spec.rb b/spec/services/projects/move_lfs_objects_projects_service_spec.rb new file mode 100644 index 00000000000..517a24a982a --- /dev/null +++ b/spec/services/projects/move_lfs_objects_projects_service_spec.rb @@ -0,0 +1,55 @@ +require 'spec_helper' + +describe Projects::MoveLfsObjectsProjectsService do + let!(:user) { create(:user) } + let!(:project_with_lfs_objects) { create(:project, namespace: user.namespace) } + let!(:target_project) { create(:project, namespace: user.namespace) } + + subject { described_class.new(target_project, user) } + + before do + create_list(:lfs_objects_project, 3, project: project_with_lfs_objects) + end + + describe '#execute' do + it 'links the lfs objects from existent in source project' do + expect(target_project.lfs_objects.count).to eq 0 + + subject.execute(project_with_lfs_objects) + + expect(project_with_lfs_objects.reload.lfs_objects.count).to eq 0 + expect(target_project.reload.lfs_objects.count).to eq 3 + end + + it 'does not link existent lfs_object in the current project' do + target_project.lfs_objects << project_with_lfs_objects.lfs_objects.first(2) + + expect(target_project.lfs_objects.count).to eq 2 + + subject.execute(project_with_lfs_objects) + + expect(target_project.lfs_objects.count).to eq 3 + end + + it 'rollbacks changes if transaction fails' do + allow(subject).to receive(:success).and_raise(StandardError) + + expect { subject.execute(project_with_lfs_objects) }.to raise_error(StandardError) + + expect(project_with_lfs_objects.lfs_objects.count).to eq 3 + expect(target_project.lfs_objects.count).to eq 0 + end + + context 'when remove_remaining_elements is false' do + let(:options) { { remove_remaining_elements: false } } + + it 'does not remove remaining lfs objects' do + target_project.lfs_objects << project_with_lfs_objects.lfs_objects.first(2) + + subject.execute(project_with_lfs_objects, options) + + expect(project_with_lfs_objects.lfs_objects.count).not_to eq 0 + end + end + end +end diff --git a/spec/services/projects/move_notification_settings_service_spec.rb b/spec/services/projects/move_notification_settings_service_spec.rb new file mode 100644 index 00000000000..24d69eef86a --- /dev/null +++ b/spec/services/projects/move_notification_settings_service_spec.rb @@ -0,0 +1,56 @@ +require 'spec_helper' + +describe Projects::MoveNotificationSettingsService do + let(:user) { create(:user) } + let(:project_with_notifications) { create(:project, namespace: user.namespace) } + let(:target_project) { create(:project, namespace: user.namespace) } + + subject { described_class.new(target_project, user) } + + describe '#execute' do + context 'with notification settings' do + before do + create_list(:notification_setting, 2, source: project_with_notifications) + end + + it 'moves the user\'s notification settings from one project to another' do + expect(project_with_notifications.notification_settings.count).to eq 3 + expect(target_project.notification_settings.count).to eq 1 + + subject.execute(project_with_notifications) + + expect(project_with_notifications.notification_settings.count).to eq 0 + expect(target_project.notification_settings.count).to eq 3 + end + + it 'rollbacks changes if transaction fails' do + allow(subject).to receive(:success).and_raise(StandardError) + + expect { subject.execute(project_with_notifications) }.to raise_error(StandardError) + + expect(project_with_notifications.notification_settings.count).to eq 3 + expect(target_project.notification_settings.count).to eq 1 + end + end + + it 'does not move existent notification settings in the current project' do + expect(project_with_notifications.notification_settings.count).to eq 1 + expect(target_project.notification_settings.count).to eq 1 + expect(user.notification_settings.count).to eq 2 + + subject.execute(project_with_notifications) + + expect(user.notification_settings.count).to eq 1 + end + + context 'when remove_remaining_elements is false' do + let(:options) { { remove_remaining_elements: false } } + + it 'does not remove remaining notification settings' do + subject.execute(project_with_notifications, options) + + expect(project_with_notifications.notification_settings.count).not_to eq 0 + end + end + end +end diff --git a/spec/services/projects/move_project_authorizations_service_spec.rb b/spec/services/projects/move_project_authorizations_service_spec.rb new file mode 100644 index 00000000000..f7262b9b887 --- /dev/null +++ b/spec/services/projects/move_project_authorizations_service_spec.rb @@ -0,0 +1,56 @@ +require 'spec_helper' + +describe Projects::MoveProjectAuthorizationsService do + let!(:user) { create(:user) } + let(:project_with_users) { create(:project, namespace: user.namespace) } + let(:target_project) { create(:project, namespace: user.namespace) } + let(:master_user) { create(:user) } + let(:reporter_user) { create(:user) } + let(:developer_user) { create(:user) } + + subject { described_class.new(target_project, user) } + + describe '#execute' do + before do + project_with_users.add_master(master_user) + project_with_users.add_developer(developer_user) + project_with_users.add_reporter(reporter_user) + end + + it 'moves the authorizations from one project to another' do + expect(project_with_users.authorized_users.count).to eq 4 + expect(target_project.authorized_users.count).to eq 1 + + subject.execute(project_with_users) + + expect(project_with_users.authorized_users.count).to eq 0 + expect(target_project.authorized_users.count).to eq 4 + end + + it 'does not move existent authorizations to the current project' do + target_project.add_master(developer_user) + target_project.add_developer(reporter_user) + + expect(project_with_users.authorized_users.count).to eq 4 + expect(target_project.authorized_users.count).to eq 3 + + subject.execute(project_with_users) + + expect(project_with_users.authorized_users.count).to eq 0 + expect(target_project.authorized_users.count).to eq 4 + end + + context 'when remove_remaining_elements is false' do + let(:options) { { remove_remaining_elements: false } } + + it 'does not remove remaining project authorizations' do + target_project.add_master(developer_user) + target_project.add_developer(reporter_user) + + subject.execute(project_with_users, options) + + expect(project_with_users.project_authorizations.count).not_to eq 0 + end + end + end +end diff --git a/spec/services/projects/move_project_group_links_service_spec.rb b/spec/services/projects/move_project_group_links_service_spec.rb new file mode 100644 index 00000000000..e3d06e6d3d7 --- /dev/null +++ b/spec/services/projects/move_project_group_links_service_spec.rb @@ -0,0 +1,65 @@ +require 'spec_helper' + +describe Projects::MoveProjectGroupLinksService do + let!(:user) { create(:user) } + let(:project_with_groups) { create(:project, namespace: user.namespace) } + let(:target_project) { create(:project, namespace: user.namespace) } + let(:master_group) { create(:group) } + let(:reporter_group) { create(:group) } + let(:developer_group) { create(:group) } + + subject { described_class.new(target_project, user) } + + describe '#execute' do + before do + project_with_groups.project_group_links.create(group: master_group, group_access: Gitlab::Access::MASTER) + project_with_groups.project_group_links.create(group: developer_group, group_access: Gitlab::Access::DEVELOPER) + project_with_groups.project_group_links.create(group: reporter_group, group_access: Gitlab::Access::REPORTER) + end + + it 'moves the group links from one project to another' do + expect(project_with_groups.project_group_links.count).to eq 3 + expect(target_project.project_group_links.count).to eq 0 + + subject.execute(project_with_groups) + + expect(project_with_groups.project_group_links.count).to eq 0 + expect(target_project.project_group_links.count).to eq 3 + end + + it 'does not move existent group links in the current project' do + target_project.project_group_links.create(group: master_group, group_access: Gitlab::Access::MASTER) + target_project.project_group_links.create(group: developer_group, group_access: Gitlab::Access::DEVELOPER) + + expect(project_with_groups.project_group_links.count).to eq 3 + expect(target_project.project_group_links.count).to eq 2 + + subject.execute(project_with_groups) + + expect(project_with_groups.project_group_links.count).to eq 0 + expect(target_project.project_group_links.count).to eq 3 + end + + it 'rollbacks changes if transaction fails' do + allow(subject).to receive(:success).and_raise(StandardError) + + expect { subject.execute(project_with_groups) }.to raise_error(StandardError) + + expect(project_with_groups.project_group_links.count).to eq 3 + expect(target_project.project_group_links.count).to eq 0 + end + + context 'when remove_remaining_elements is false' do + let(:options) { { remove_remaining_elements: false } } + + it 'does not remove remaining project group links' do + target_project.project_group_links.create(group: master_group, group_access: Gitlab::Access::MASTER) + target_project.project_group_links.create(group: developer_group, group_access: Gitlab::Access::DEVELOPER) + + subject.execute(project_with_groups, options) + + expect(project_with_groups.project_group_links.count).not_to eq 0 + end + end + end +end diff --git a/spec/services/projects/move_project_members_service_spec.rb b/spec/services/projects/move_project_members_service_spec.rb new file mode 100644 index 00000000000..9c9a2d2fde1 --- /dev/null +++ b/spec/services/projects/move_project_members_service_spec.rb @@ -0,0 +1,65 @@ +require 'spec_helper' + +describe Projects::MoveProjectMembersService do + let!(:user) { create(:user) } + let(:project_with_users) { create(:project, namespace: user.namespace) } + let(:target_project) { create(:project, namespace: user.namespace) } + let(:master_user) { create(:user) } + let(:reporter_user) { create(:user) } + let(:developer_user) { create(:user) } + + subject { described_class.new(target_project, user) } + + describe '#execute' do + before do + project_with_users.add_master(master_user) + project_with_users.add_developer(developer_user) + project_with_users.add_reporter(reporter_user) + end + + it 'moves the members from one project to another' do + expect(project_with_users.project_members.count).to eq 4 + expect(target_project.project_members.count).to eq 1 + + subject.execute(project_with_users) + + expect(project_with_users.project_members.count).to eq 0 + expect(target_project.project_members.count).to eq 4 + end + + it 'does not move existent members to the current project' do + target_project.add_master(developer_user) + target_project.add_developer(reporter_user) + + expect(project_with_users.project_members.count).to eq 4 + expect(target_project.project_members.count).to eq 3 + + subject.execute(project_with_users) + + expect(project_with_users.project_members.count).to eq 0 + expect(target_project.project_members.count).to eq 4 + end + + it 'rollbacks changes if transaction fails' do + allow(subject).to receive(:success).and_raise(StandardError) + + expect { subject.execute(project_with_users) }.to raise_error(StandardError) + + expect(project_with_users.project_members.count).to eq 4 + expect(target_project.project_members.count).to eq 1 + end + + context 'when remove_remaining_elements is false' do + let(:options) { { remove_remaining_elements: false } } + + it 'does not remove remaining project members' do + target_project.add_master(developer_user) + target_project.add_developer(reporter_user) + + subject.execute(project_with_users, options) + + expect(project_with_users.project_members.count).not_to eq 0 + end + end + end +end diff --git a/spec/services/projects/move_users_star_projects_service_spec.rb b/spec/services/projects/move_users_star_projects_service_spec.rb new file mode 100644 index 00000000000..e0545c5a21b --- /dev/null +++ b/spec/services/projects/move_users_star_projects_service_spec.rb @@ -0,0 +1,42 @@ +require 'spec_helper' + +describe Projects::MoveUsersStarProjectsService do + let!(:user) { create(:user) } + let!(:project_with_stars) { create(:project, namespace: user.namespace) } + let!(:target_project) { create(:project, namespace: user.namespace) } + + subject { described_class.new(target_project, user) } + + describe '#execute' do + before do + create_list(:users_star_project, 2, project: project_with_stars) + end + + it 'moves the user\'s stars from one project to another' do + expect(project_with_stars.users_star_projects.count).to eq 2 + expect(project_with_stars.star_count).to eq 2 + expect(target_project.users_star_projects.count).to eq 0 + expect(target_project.star_count).to eq 0 + + subject.execute(project_with_stars) + project_with_stars.reload + target_project.reload + + expect(project_with_stars.users_star_projects.count).to eq 0 + expect(project_with_stars.star_count).to eq 0 + expect(target_project.users_star_projects.count).to eq 2 + expect(target_project.star_count).to eq 2 + end + + it 'rollbacks changes if transaction fails' do + allow(subject).to receive(:success).and_raise(StandardError) + + expect { subject.execute(project_with_stars) }.to raise_error(StandardError) + + expect(project_with_stars.users_star_projects.count).to eq 2 + expect(project_with_stars.star_count).to eq 2 + expect(target_project.users_star_projects.count).to eq 0 + expect(target_project.star_count).to eq 0 + end + end +end diff --git a/spec/services/projects/overwrite_project_service_spec.rb b/spec/services/projects/overwrite_project_service_spec.rb new file mode 100644 index 00000000000..252c61f4224 --- /dev/null +++ b/spec/services/projects/overwrite_project_service_spec.rb @@ -0,0 +1,198 @@ +require 'spec_helper' + +describe Projects::OverwriteProjectService do + include ProjectForksHelper + + let(:user) { create(:user) } + let(:project_from) { create(:project, namespace: user.namespace) } + let(:project_to) { create(:project, namespace: user.namespace) } + let!(:lvl1_forked_project_1) { fork_project(project_from, user) } + let!(:lvl1_forked_project_2) { fork_project(project_from, user) } + let!(:lvl2_forked_project_1_1) { fork_project(lvl1_forked_project_1, user) } + let!(:lvl2_forked_project_1_2) { fork_project(lvl1_forked_project_1, user) } + + subject { described_class.new(project_to, user) } + + before do + allow(project_to).to receive(:import_data).and_return(double(data: { 'original_path' => project_from.path })) + end + + describe '#execute' do + shared_examples 'overwrite actions' do + it 'moves deploy keys' do + deploy_keys_count = project_from.deploy_keys_projects.count + + subject.execute(project_from) + + expect(project_to.deploy_keys_projects.count).to eq deploy_keys_count + end + + it 'moves notification settings' do + notification_count = project_from.notification_settings.count + + subject.execute(project_from) + + expect(project_to.notification_settings.count).to eq notification_count + end + + it 'moves users stars' do + stars_count = project_from.users_star_projects.count + + subject.execute(project_from) + project_to.reload + + expect(project_to.users_star_projects.count).to eq stars_count + expect(project_to.star_count).to eq stars_count + end + + it 'moves project group links' do + group_links_count = project_from.project_group_links.count + + subject.execute(project_from) + + expect(project_to.project_group_links.count).to eq group_links_count + end + + it 'moves memberships and authorizations' do + members_count = project_from.project_members.count + project_authorizations = project_from.project_authorizations.count + + subject.execute(project_from) + + expect(project_to.project_members.count).to eq members_count + expect(project_to.project_authorizations.count).to eq project_authorizations + end + + context 'moves lfs objects relationships' do + before do + create_list(:lfs_objects_project, 3, project: project_from) + end + + it do + lfs_objects_count = project_from.lfs_objects.count + + subject.execute(project_from) + + expect(project_to.lfs_objects.count).to eq lfs_objects_count + end + end + + it 'removes the original project' do + subject.execute(project_from) + + expect { Project.find(project_from.id) }.to raise_error(ActiveRecord::RecordNotFound) + end + + it 'renames the project' do + subject.execute(project_from) + + expect(project_to.full_path).to eq project_from.full_path + end + end + + context 'when project does not have any relation' do + it_behaves_like 'overwrite actions' + end + + context 'when project with elements' do + it_behaves_like 'overwrite actions' do + let(:master_user) { create(:user) } + let(:reporter_user) { create(:user) } + let(:developer_user) { create(:user) } + let(:master_group) { create(:group) } + let(:reporter_group) { create(:group) } + let(:developer_group) { create(:group) } + + before do + create_list(:deploy_keys_project, 2, project: project_from) + create_list(:notification_setting, 2, source: project_from) + create_list(:users_star_project, 2, project: project_from) + project_from.project_group_links.create(group: master_group, group_access: Gitlab::Access::MASTER) + project_from.project_group_links.create(group: developer_group, group_access: Gitlab::Access::DEVELOPER) + project_from.project_group_links.create(group: reporter_group, group_access: Gitlab::Access::REPORTER) + project_from.add_master(master_user) + project_from.add_developer(developer_user) + project_from.add_reporter(reporter_user) + end + end + end + + context 'forks' do + context 'when moving a root forked project' do + it 'moves the descendant forks' do + expect(project_from.forks.count).to eq 2 + expect(project_to.forks.count).to eq 0 + + subject.execute(project_from) + + expect(project_from.forks.count).to eq 0 + expect(project_to.forks.count).to eq 2 + expect(lvl1_forked_project_1.forked_from_project).to eq project_to + expect(lvl1_forked_project_1.fork_network_member.forked_from_project).to eq project_to + expect(lvl1_forked_project_2.forked_from_project).to eq project_to + expect(lvl1_forked_project_2.fork_network_member.forked_from_project).to eq project_to + end + + it 'updates the fork network' do + expect(project_from.fork_network.root_project).to eq project_from + expect(project_from.fork_network.fork_network_members.map(&:project)).to include project_from + + subject.execute(project_from) + + expect(project_to.reload.fork_network.root_project).to eq project_to + expect(project_to.fork_network.fork_network_members.map(&:project)).not_to include project_from + end + end + context 'when moving a intermediate forked project' do + let(:project_to) { create(:project, namespace: lvl1_forked_project_1.namespace) } + + it 'moves the descendant forks' do + expect(lvl1_forked_project_1.forks.count).to eq 2 + expect(project_to.forks.count).to eq 0 + + subject.execute(lvl1_forked_project_1) + + expect(lvl1_forked_project_1.forks.count).to eq 0 + expect(project_to.forks.count).to eq 2 + expect(lvl2_forked_project_1_1.forked_from_project).to eq project_to + expect(lvl2_forked_project_1_1.fork_network_member.forked_from_project).to eq project_to + expect(lvl2_forked_project_1_2.forked_from_project).to eq project_to + expect(lvl2_forked_project_1_2.fork_network_member.forked_from_project).to eq project_to + end + + it 'moves the ascendant fork' do + subject.execute(lvl1_forked_project_1) + + expect(project_to.reload.forked_from_project).to eq project_from + expect(project_to.fork_network_member.forked_from_project).to eq project_from + end + + it 'does not update fork network' do + subject.execute(lvl1_forked_project_1) + + expect(project_to.reload.fork_network.root_project).to eq project_from + end + end + end + + context 'if an exception is raised' do + it 'rollbacks changes' do + updated_at = project_from.updated_at + + allow(subject).to receive(:rename_project).and_raise(StandardError) + + expect { subject.execute(project_from) }.to raise_error(StandardError) + expect(Project.find(project_from.id)).not_to be_nil + expect(project_from.reload.updated_at.change(usec: 0)).to eq updated_at.change(usec: 0) + end + + it 'tries to restore the original project repositories' do + allow(subject).to receive(:rename_project).and_raise(StandardError) + + expect(subject).to receive(:attempt_restore_repositories).with(project_from) + + expect { subject.execute(project_from) }.to raise_error(StandardError) + end + end + end +end diff --git a/spec/services/projects/transfer_service_spec.rb b/spec/services/projects/transfer_service_spec.rb index ce567fe3879..ff9b2372a35 100644 --- a/spec/services/projects/transfer_service_spec.rb +++ b/spec/services/projects/transfer_service_spec.rb @@ -37,6 +37,12 @@ describe Projects::TransferService do transfer_project(project, user, group) end + it 'invalidates the user\'s personal_project_count cache' do + expect(user).to receive(:invalidate_personal_projects_count) + + transfer_project(project, user, group) + end + it 'executes system hooks' do transfer_project(project, user, group) do |service| expect(service).to receive(:execute_system_hooks) @@ -146,7 +152,7 @@ describe Projects::TransferService do context 'namespace which contains orphan repository with same projects path name' do let(:repository_storage) { 'default' } - let(:repository_storage_path) { Gitlab.config.repositories.storages[repository_storage]['path'] } + let(:repository_storage_path) { Gitlab.config.repositories.storages[repository_storage].legacy_disk_path } before do group.add_owner(user) diff --git a/spec/services/projects/update_pages_service_spec.rb b/spec/services/projects/update_pages_service_spec.rb index 934106627a9..1b6caeab15d 100644 --- a/spec/services/projects/update_pages_service_spec.rb +++ b/spec/services/projects/update_pages_service_spec.rb @@ -21,75 +21,72 @@ describe Projects::UpdatePagesService do end context 'legacy artifacts' do - %w(tar.gz zip).each do |format| - let(:extension) { format } + let(:extension) { 'zip' } - context "for valid #{format}" do + before do + build.update_attributes(legacy_artifacts_file: file) + build.update_attributes(legacy_artifacts_metadata: metadata) + end + + describe 'pages artifacts' do + context 'with expiry date' do before do - build.update_attributes(legacy_artifacts_file: file) - build.update_attributes(legacy_artifacts_metadata: metadata) + build.artifacts_expire_in = "2 days" + build.save! end - describe 'pages artifacts' do - context 'with expiry date' do - before do - build.artifacts_expire_in = "2 days" - build.save! - end - - it "doesn't delete artifacts" do - expect(execute).to eq(:success) - - expect(build.reload.artifacts?).to eq(true) - end - end - - context 'without expiry date' do - it "does delete artifacts" do - expect(execute).to eq(:success) + it "doesn't delete artifacts" do + expect(execute).to eq(:success) - expect(build.reload.artifacts?).to eq(false) - end - end + expect(build.reload.artifacts?).to eq(true) end + end - it 'succeeds' do - expect(project.pages_deployed?).to be_falsey + context 'without expiry date' do + it "does delete artifacts" do expect(execute).to eq(:success) - expect(project.pages_deployed?).to be_truthy - # Check that all expected files are extracted - %w[index.html zero .hidden/file].each do |filename| - expect(File.exist?(File.join(project.public_pages_path, filename))).to be_truthy - end + expect(build.reload.artifacts?).to eq(false) end + end + end - it 'limits pages size' do - stub_application_setting(max_pages_size: 1) - expect(execute).not_to eq(:success) - end + it 'succeeds' do + expect(project.pages_deployed?).to be_falsey + expect(execute).to eq(:success) + expect(project.pages_deployed?).to be_truthy - it 'removes pages after destroy' do - expect(PagesWorker).to receive(:perform_in) - expect(project.pages_deployed?).to be_falsey - expect(execute).to eq(:success) - expect(project.pages_deployed?).to be_truthy - project.destroy - expect(project.pages_deployed?).to be_falsey - end + # Check that all expected files are extracted + %w[index.html zero .hidden/file].each do |filename| + expect(File.exist?(File.join(project.public_pages_path, filename))).to be_truthy + end + end - it 'fails if sha on branch is not latest' do - build.update_attributes(ref: 'feature') + it 'limits pages size' do + stub_application_setting(max_pages_size: 1) + expect(execute).not_to eq(:success) + end - expect(execute).not_to eq(:success) - end + it 'removes pages after destroy' do + expect(PagesWorker).to receive(:perform_in) + expect(project.pages_deployed?).to be_falsey + expect(execute).to eq(:success) + expect(project.pages_deployed?).to be_truthy + project.destroy + expect(project.pages_deployed?).to be_falsey + end - it 'fails for empty file fails' do - build.update_attributes(legacy_artifacts_file: empty_file) + it 'fails if sha on branch is not latest' do + build.update_attributes(ref: 'feature') - expect(execute).not_to eq(:success) - end - end + expect(execute).not_to eq(:success) + end + + it 'fails for empty file fails' do + build.update_attributes(legacy_artifacts_file: empty_file) + + expect { execute } + .to raise_error(Projects::UpdatePagesService::FailedToExtractError) end end @@ -159,7 +156,8 @@ describe Projects::UpdatePagesService do it 'fails for empty file fails' do build.job_artifacts_archive.update_attributes(file: empty_file) - expect(execute).not_to eq(:success) + expect { execute } + .to raise_error(Projects::UpdatePagesService::FailedToExtractError) end context 'when timeout happens by DNS error' do @@ -172,7 +170,39 @@ describe Projects::UpdatePagesService do expect { execute }.to raise_error(SocketError) build.reload - expect(build.artifacts?).to eq(true) + expect(deploy_status).to be_failed + expect(build.artifacts?).to be_truthy + end + end + + context 'when failed to extract zip artifacts' do + before do + allow_any_instance_of(described_class) + .to receive(:extract_zip_archive!) + .and_raise(Projects::UpdatePagesService::FailedToExtractError) + end + + it 'raises an error' do + expect { execute } + .to raise_error(Projects::UpdatePagesService::FailedToExtractError) + + build.reload + expect(deploy_status).to be_failed + expect(build.artifacts?).to be_truthy + end + end + + context 'when missing artifacts metadata' do + before do + allow(build).to receive(:artifacts_metadata?).and_return(false) + end + + it 'does not raise an error and remove artifacts as failed job' do + execute + + build.reload + expect(deploy_status).to be_failed + expect(build.artifacts?).to be_falsey end end end diff --git a/spec/services/projects/update_service_spec.rb b/spec/services/projects/update_service_spec.rb index f3f97b6b921..f48d466d263 100644 --- a/spec/services/projects/update_service_spec.rb +++ b/spec/services/projects/update_service_spec.rb @@ -190,7 +190,7 @@ describe Projects::UpdateService do context 'when renaming a project' do let(:repository_storage) { 'default' } - let(:repository_storage_path) { Gitlab.config.repositories.storages[repository_storage]['path'] } + let(:repository_storage_path) { Gitlab.config.repositories.storages[repository_storage].legacy_disk_path } context 'with legacy storage' do let(:project) { create(:project, :legacy_storage, :repository, creator: user, namespace: user.namespace) } @@ -241,6 +241,27 @@ describe Projects::UpdateService do }) end end + + context 'when updating #pages_https_only', :https_pages_enabled do + subject(:call_service) do + update_project(project, admin, pages_https_only: false) + end + + it 'updates the attribute' do + expect { call_service } + .to change { project.pages_https_only? } + .to(false) + end + + it 'calls Projects::UpdatePagesConfigurationService' do + expect(Projects::UpdatePagesConfigurationService) + .to receive(:new) + .with(project) + .and_call_original + + call_service + end + end end describe '#run_auto_devops_pipeline?' do diff --git a/spec/services/protected_branches/create_service_spec.rb b/spec/services/protected_branches/create_service_spec.rb index 53b3e5e365d..786493c3577 100644 --- a/spec/services/protected_branches/create_service_spec.rb +++ b/spec/services/protected_branches/create_service_spec.rb @@ -35,5 +35,18 @@ describe ProtectedBranches::CreateService do expect { service.execute }.to raise_error(Gitlab::Access::AccessDeniedError) end end + + context 'when a policy restricts rule creation' do + before do + policy = instance_double(ProtectedBranchPolicy, can?: false) + expect(ProtectedBranchPolicy).to receive(:new).and_return(policy) + end + + it "prevents creation of the protected branch rule" do + expect do + service.execute + end.to raise_error(Gitlab::Access::AccessDeniedError) + end + end end end diff --git a/spec/services/protected_branches/destroy_service_spec.rb b/spec/services/protected_branches/destroy_service_spec.rb new file mode 100644 index 00000000000..4a391b6c25c --- /dev/null +++ b/spec/services/protected_branches/destroy_service_spec.rb @@ -0,0 +1,30 @@ +require 'spec_helper' + +describe ProtectedBranches::DestroyService do + let(:protected_branch) { create(:protected_branch) } + let(:project) { protected_branch.project } + let(:user) { project.owner } + + describe '#execute' do + subject(:service) { described_class.new(project, user) } + + it 'destroys a protected branch' do + service.execute(protected_branch) + + expect(protected_branch).to be_destroyed + end + + context 'when a policy restricts rule deletion' do + before do + policy = instance_double(ProtectedBranchPolicy, can?: false) + expect(ProtectedBranchPolicy).to receive(:new).and_return(policy) + end + + it "prevents deletion of the protected branch rule" do + expect do + service.execute(protected_branch) + end.to raise_error(Gitlab::Access::AccessDeniedError) + end + end + end +end diff --git a/spec/services/protected_branches/update_service_spec.rb b/spec/services/protected_branches/update_service_spec.rb index 9fa5983db66..3f6f8e09565 100644 --- a/spec/services/protected_branches/update_service_spec.rb +++ b/spec/services/protected_branches/update_service_spec.rb @@ -22,5 +22,16 @@ describe ProtectedBranches::UpdateService do expect { service.execute(protected_branch) }.to raise_error(Gitlab::Access::AccessDeniedError) end end + + context 'when a policy restricts rule creation' do + before do + policy = instance_double(ProtectedBranchPolicy, can?: false) + expect(ProtectedBranchPolicy).to receive(:new).and_return(policy) + end + + it "prevents creation of the protected branch rule" do + expect { service.execute(protected_branch) }.to raise_error(Gitlab::Access::AccessDeniedError) + end + end end end diff --git a/spec/services/protected_tags/destroy_service_spec.rb b/spec/services/protected_tags/destroy_service_spec.rb new file mode 100644 index 00000000000..e12f53a2221 --- /dev/null +++ b/spec/services/protected_tags/destroy_service_spec.rb @@ -0,0 +1,17 @@ +require 'spec_helper' + +describe ProtectedTags::DestroyService do + let(:protected_tag) { create(:protected_tag) } + let(:project) { protected_tag.project } + let(:user) { project.owner } + + describe '#execute' do + subject(:service) { described_class.new(project, user) } + + it 'destroy a protected tag' do + service.execute(protected_tag) + + expect(protected_tag).to be_destroyed + end + end +end diff --git a/spec/services/system_note_service_spec.rb b/spec/services/system_note_service_spec.rb index a3893188c6e..893804f1470 100644 --- a/spec/services/system_note_service_spec.rb +++ b/spec/services/system_note_service_spec.rb @@ -743,7 +743,7 @@ describe SystemNoteService do expect(cross_reference(type)).to eq("Events for #{type.pluralize.humanize.downcase} are disabled.") end - it "blocks cross reference when #{type.underscore}_events is true" do + it "creates cross reference when #{type.underscore}_events is true" do jira_tracker.update("#{type}_events" => true) expect(cross_reference(type)).to eq(success_message) @@ -909,7 +909,13 @@ describe SystemNoteService do it 'sets the note text' do noteable.update_attribute(:time_estimate, 277200) - expect(subject.note).to eq "changed time estimate to 1w 4d 5h" + expect(subject.note).to eq "changed time estimate to 1w 4d 5h," + end + + it 'appends a comma to separate the note from the update_at time' do + noteable.update_attribute(:time_estimate, 277200) + + expect(subject.note).to end_with(',') end end diff --git a/spec/services/verify_pages_domain_service_spec.rb b/spec/services/verify_pages_domain_service_spec.rb index 576db1dde2d..d974cc0226f 100644 --- a/spec/services/verify_pages_domain_service_spec.rb +++ b/spec/services/verify_pages_domain_service_spec.rb @@ -93,6 +93,25 @@ describe VerifyPagesDomainService do expect(domain).not_to be_enabled end end + + context 'invalid domain' do + let(:domain) { build(:pages_domain, :expired, :with_missing_chain) } + + before do + domain.save(validate: false) + end + + it 'can be disabled' do + error_status[:message] += '. It is now disabled.' + + stub_resolver + + expect(service.execute).to eq(error_status) + + expect(domain).not_to be_verified + expect(domain).not_to be_enabled + end + end end context 'timeout behaviour' do diff --git a/spec/services/web_hook_service_spec.rb b/spec/services/web_hook_service_spec.rb index 21910e69d2e..2ef2e61babc 100644 --- a/spec/services/web_hook_service_spec.rb +++ b/spec/services/web_hook_service_spec.rb @@ -14,6 +14,20 @@ describe WebHookService do end let(:service_instance) { described_class.new(project_hook, data, :push_hooks) } + describe '#initialize' do + it 'allow_local_requests is true if hook is a SystemHook' do + instance = described_class.new(build(:system_hook), data, :system_hook) + expect(instance.request_options[:allow_local_requests]).to be_truthy + end + + it 'allow_local_requests is false if hook is not a SystemHook' do + %i(project_hook service_hook web_hook_log).each do |hook| + instance = described_class.new(build(hook), data, hook) + expect(instance.request_options[:allow_local_requests]).to be_falsey + end + end + end + describe '#execute' do before do project.hooks << [project_hook] diff --git a/spec/spec_helper.rb b/spec/spec_helper.rb index 9f6f0204a16..83664bae046 100644 --- a/spec/spec_helper.rb +++ b/spec/spec_helper.rb @@ -66,6 +66,7 @@ RSpec.configure do |config| config.include MigrationsHelpers, :migration config.include StubFeatureFlags config.include StubENV + config.include ExpectOffense config.infer_spec_type_from_file_location! @@ -97,6 +98,10 @@ RSpec.configure do |config| TestEnv.init end + config.after(:all) do + TestEnv.clean_test_path + end + config.before(:example) do # Skip pre-receive hook check so we can use the web editor and merge. allow_any_instance_of(Gitlab::Git::Hook).to receive(:trigger).and_return([true, nil]) @@ -104,7 +109,8 @@ RSpec.configure do |config| allow_any_instance_of(Gitlab::Git::GitlabProjects).to receive(:fork_repository).and_wrap_original do |m, *args| m.call(*args) - shard_path, repository_relative_path = args + shard_name, repository_relative_path = args + shard_path = Gitlab.config.repositories.storages.fetch(shard_name).legacy_disk_path # We can't leave the hooks in place after a fork, as those would fail in tests # The "internal" API is not available FileUtils.rm_rf(File.join(shard_path, repository_relative_path, 'hooks')) @@ -197,6 +203,22 @@ RSpec.configure do |config| Ability.allowed?(*args) end end + + config.before(:each, :http_pages_enabled) do |_| + allow(Gitlab.config.pages).to receive(:external_http).and_return(['1.1.1.1:80']) + end + + config.before(:each, :https_pages_enabled) do |_| + allow(Gitlab.config.pages).to receive(:external_https).and_return(['1.1.1.1:443']) + end + + config.before(:each, :http_pages_disabled) do |_| + allow(Gitlab.config.pages).to receive(:external_http).and_return(false) + end + + config.before(:each, :https_pages_disabled) do |_| + allow(Gitlab.config.pages).to receive(:external_https).and_return(false) + end end # add simpler way to match asset paths containing digest strings diff --git a/spec/support/bare_repo_operations.rb b/spec/support/bare_repo_operations.rb index 8eeaa37d3c5..3f4a4243cb6 100644 --- a/spec/support/bare_repo_operations.rb +++ b/spec/support/bare_repo_operations.rb @@ -1,19 +1,15 @@ require 'zlib' class BareRepoOperations - # The ID of empty tree. - # See http://stackoverflow.com/a/40884093/1856239 and https://github.com/git/git/blob/3ad8b5bf26362ac67c9020bf8c30eee54a84f56d/cache.h#L1011-L1012 - EMPTY_TREE_ID = '4b825dc642cb6eb9a060e54bf8d69288fbee4904'.freeze - include Gitlab::Popen def initialize(path_to_repo) @path_to_repo = path_to_repo end - def commit_tree(tree_id, msg, parent: EMPTY_TREE_ID) + def commit_tree(tree_id, msg, parent: Gitlab::Git::EMPTY_TREE_ID) commit_tree_args = ['commit-tree', tree_id, '-m', msg] - commit_tree_args += ['-p', parent] unless parent == EMPTY_TREE_ID + commit_tree_args += ['-p', parent] unless parent == Gitlab::Git::EMPTY_TREE_ID commit_id = execute(commit_tree_args) commit_id[0] @@ -21,7 +17,7 @@ class BareRepoOperations # Based on https://stackoverflow.com/a/25556917/1856239 def commit_file(file, dst_path, branch = 'master') - head_id = execute(['show', '--format=format:%H', '--no-patch', branch], allow_failure: true)[0] || EMPTY_TREE_ID + head_id = execute(['show', '--format=format:%H', '--no-patch', branch], allow_failure: true)[0] || Gitlab::Git::EMPTY_TREE_ID execute(['read-tree', '--empty']) execute(['read-tree', head_id]) diff --git a/spec/support/capybara.rb b/spec/support/capybara.rb index 8603b7f3e2c..9ddcc5f2fbf 100644 --- a/spec/support/capybara.rb +++ b/spec/support/capybara.rb @@ -7,6 +7,16 @@ require 'selenium-webdriver' # Give CI some extra time timeout = (ENV['CI'] || ENV['CI_SERVER']) ? 60 : 30 +# Define an error class for JS console messages +JSConsoleError = Class.new(StandardError) + +# Filter out innocuous JS console messages +JS_CONSOLE_FILTER = Regexp.union([ + '"[HMR] Waiting for update signal from WDS..."', + '"[WDS] Hot Module Replacement enabled."', + "Download the Vue Devtools extension" +]) + Capybara.register_driver :chrome do |app| capabilities = Selenium::WebDriver::Remote::Capabilities.chrome( # This enables access to logs with `page.driver.manage.get_log(:browser)` @@ -25,13 +35,7 @@ Capybara.register_driver :chrome do |app| options.add_argument("no-sandbox") # Run headless by default unless CHROME_HEADLESS specified - unless ENV['CHROME_HEADLESS'] =~ /^(false|no|0)$/i - options.add_argument("headless") - - # Chrome documentation says this flag is needed for now - # https://developers.google.com/web/updates/2017/04/headless-chrome#cli - options.add_argument("disable-gpu") - end + options.add_argument("headless") unless ENV['CHROME_HEADLESS'] =~ /^(false|no|0)$/i # Disable /dev/shm use in CI. See https://gitlab.com/gitlab-org/gitlab-ee/issues/4252 options.add_argument("disable-dev-shm-usage") if ENV['CI'] || ENV['CI_SERVER'] @@ -78,6 +82,15 @@ RSpec.configure do |config| end config.after(:example, :js) do |example| + # when a test fails, display any messages in the browser's console + if example.exception + console = page.driver.browser.manage.logs.get(:browser)&.reject { |log| log.message =~ JS_CONSOLE_FILTER } + if console.present? + message = "Unexpected browser console output:\n" + console.map(&:message).join("\n") + raise JSConsoleError, message + end + end + # prevent localStorage from introducing side effects based on test order unless ['', 'about:blank', 'data:,'].include? Capybara.current_session.driver.browser.current_url execute_script("localStorage.clear();") diff --git a/spec/support/commit_trailers_spec_helper.rb b/spec/support/commit_trailers_spec_helper.rb new file mode 100644 index 00000000000..add359946db --- /dev/null +++ b/spec/support/commit_trailers_spec_helper.rb @@ -0,0 +1,41 @@ +module CommitTrailersSpecHelper + extend ActiveSupport::Concern + + def expect_to_have_user_link_with_avatar(doc, user:, trailer:, email: nil) + wrapper = find_user_wrapper(doc, trailer) + + expect_to_have_links_with_url_and_avatar(wrapper, urls.user_url(user), email || user.email) + expect(wrapper.attribute('data-user').value).to eq user.id.to_s + end + + def expect_to_have_mailto_link(doc, email:, trailer:) + wrapper = find_user_wrapper(doc, trailer) + + expect_to_have_links_with_url_and_avatar(wrapper, "mailto:#{CGI.escape_html(email)}", email) + end + + def expect_to_have_links_with_url_and_avatar(doc, url, email) + expect(doc).not_to be_nil + expect(doc.xpath("a[position()<3 and @href='#{url}']").size).to eq 2 + expect(doc.xpath("a[position()=3 and @href='mailto:#{CGI.escape_html(email)}']").size).to eq 1 + expect(doc.css('img').size).to eq 1 + end + + def find_user_wrapper(doc, trailer) + doc.xpath("descendant-or-self::node()[@data-trailer='#{trailer}']").first + end + + def build_commit_message(trailer:, name:, email:) + message = trailer_line(trailer, name, email) + + [message, commit_html(message)] + end + + def trailer_line(trailer, name, email) + "#{trailer} #{name} <#{email}>" + end + + def commit_html(message) + "<pre>#{CGI.escape_html(message)}</pre>" + end +end diff --git a/spec/support/cookie_helper.rb b/spec/support/cookie_helper.rb index d72925e1838..5ff7b0b68c9 100644 --- a/spec/support/cookie_helper.rb +++ b/spec/support/cookie_helper.rb @@ -2,12 +2,25 @@ # module CookieHelper def set_cookie(name, value, options = {}) + case page.driver + when Capybara::RackTest::Driver + rack_set_cookie(name, value) + else + selenium_set_cookie(name, value, options) + end + end + + def selenium_set_cookie(name, value, options = {}) # Selenium driver will not set cookies for a given domain when the browser is at `about:blank`. # It also doesn't appear to allow overriding the cookie path. loading `/` is the most inclusive. visit options.fetch(:path, '/') unless on_a_page? page.driver.browser.manage.add_cookie(name: name, value: value, **options) end + def rack_set_cookie(name, value) + page.driver.browser.set_cookie("#{name}=#{value}") + end + def get_cookie(name) page.driver.browser.manage.cookie_named(name) end diff --git a/spec/support/features/discussion_comments_shared_example.rb b/spec/support/features/discussion_comments_shared_example.rb index c8662d41769..80604395adf 100644 --- a/spec/support/features/discussion_comments_shared_example.rb +++ b/spec/support/features/discussion_comments_shared_example.rb @@ -81,7 +81,10 @@ shared_examples 'discussion comments' do |resource_name| # on issues page, the menu closes when clicking anywhere, on other pages it will # remain open if clicking divider or menu padding, but should not change button action - if resource_name == 'issue' + # + # if dropdown menu is not toggled (and also not present), + # it's "issue-type" dropdown + if first(menu_selector).nil? expect(find(dropdown_selector)).to have_content 'Comment' find(toggle_selector).click @@ -107,8 +110,10 @@ shared_examples 'discussion comments' do |resource_name| end it 'updates the submit button text and closes the dropdown' do + button = find(submit_selector) + # on issues page, the submit input is a <button>, on other pages it is <input> - if resource_name == 'issue' + if button.tag_name == 'button' expect(find(submit_selector)).to have_content 'Start discussion' else expect(find(submit_selector).value).to eq 'Start discussion' @@ -132,6 +137,8 @@ shared_examples 'discussion comments' do |resource_name| describe 'creating a discussion' do before do find(submit_selector).click + wait_for_requests + find(comments_selector, match: :first) end @@ -197,11 +204,13 @@ shared_examples 'discussion comments' do |resource_name| end it 'updates the submit button text and closes the dropdown' do + button = find(submit_selector) + # on issues page, the submit input is a <button>, on other pages it is <input> - if resource_name == 'issue' - expect(find(submit_selector)).to have_content 'Comment' + if button.tag_name == 'button' + expect(button).to have_content 'Comment' else - expect(find(submit_selector).value).to eq 'Comment' + expect(button.value).to eq 'Comment' end expect(page).not_to have_selector menu_selector diff --git a/spec/support/features/issuable_slash_commands_shared_examples.rb b/spec/support/features/issuable_slash_commands_shared_examples.rb index f61469f673d..1bd6c25100e 100644 --- a/spec/support/features/issuable_slash_commands_shared_examples.rb +++ b/spec/support/features/issuable_slash_commands_shared_examples.rb @@ -2,7 +2,7 @@ # It takes a `issuable_type`, and expect an `issuable`. shared_examples 'issuable record that supports quick actions in its description and notes' do |issuable_type| - include QuickActionsHelpers + include Spec::Support::Helpers::Features::NotesHelpers let(:master) { create(:user) } let(:project) do @@ -61,7 +61,7 @@ shared_examples 'issuable record that supports quick actions in its description context 'with a note containing commands' do it 'creates a note without the commands and interpret the commands accordingly' do assignee = create(:user, username: 'bob') - write_note("Awesome!\n\n/assign @bob\n\n/label ~bug\n\n/milestone %\"ASAP\"") + add_note("Awesome!\n\n/assign @bob\n\n/label ~bug\n\n/milestone %\"ASAP\"") expect(page).to have_content 'Awesome!' expect(page).not_to have_content '/assign @bob' @@ -82,7 +82,7 @@ shared_examples 'issuable record that supports quick actions in its description context 'with a note containing only commands' do it 'does not create a note but interpret the commands accordingly' do assignee = create(:user, username: 'bob') - write_note("/assign @bob\n\n/label ~bug\n\n/milestone %\"ASAP\"") + add_note("/assign @bob\n\n/label ~bug\n\n/milestone %\"ASAP\"") expect(page).not_to have_content '/assign @bob' expect(page).not_to have_content '/label ~bug' @@ -105,7 +105,7 @@ shared_examples 'issuable record that supports quick actions in its description context "when current user can close #{issuable_type}" do it "closes the #{issuable_type}" do - write_note("/close") + add_note("/close") expect(page).not_to have_content '/close' expect(page).to have_content 'Commands applied' @@ -125,7 +125,7 @@ shared_examples 'issuable record that supports quick actions in its description end it "does not close the #{issuable_type}" do - write_note("/close") + add_note("/close") expect(page).not_to have_content 'Commands applied' @@ -142,7 +142,7 @@ shared_examples 'issuable record that supports quick actions in its description context "when current user can reopen #{issuable_type}" do it "reopens the #{issuable_type}" do - write_note("/reopen") + add_note("/reopen") expect(page).not_to have_content '/reopen' expect(page).to have_content 'Commands applied' @@ -162,7 +162,7 @@ shared_examples 'issuable record that supports quick actions in its description end it "does not reopen the #{issuable_type}" do - write_note("/reopen") + add_note("/reopen") expect(page).not_to have_content 'Commands applied' @@ -174,7 +174,7 @@ shared_examples 'issuable record that supports quick actions in its description context "with a note changing the #{issuable_type}'s title" do context "when current user can change title of #{issuable_type}" do it "reopens the #{issuable_type}" do - write_note("/title Awesome new title") + add_note("/title Awesome new title") expect(page).not_to have_content '/title' expect(page).to have_content 'Commands applied' @@ -194,7 +194,7 @@ shared_examples 'issuable record that supports quick actions in its description end it "does not change the #{issuable_type} title" do - write_note("/title Awesome new title") + add_note("/title Awesome new title") expect(page).not_to have_content 'Commands applied' @@ -205,7 +205,7 @@ shared_examples 'issuable record that supports quick actions in its description context "with a note marking the #{issuable_type} as todo" do it "creates a new todo for the #{issuable_type}" do - write_note("/todo") + add_note("/todo") expect(page).not_to have_content '/todo' expect(page).to have_content 'Commands applied' @@ -236,7 +236,7 @@ shared_examples 'issuable record that supports quick actions in its description expect(todo.author).to eq master expect(todo.user).to eq master - write_note("/done") + add_note("/done") expect(page).not_to have_content '/done' expect(page).to have_content 'Commands applied' @@ -249,7 +249,7 @@ shared_examples 'issuable record that supports quick actions in its description it "creates a new todo for the #{issuable_type}" do expect(issuable.subscribed?(master, project)).to be_falsy - write_note("/subscribe") + add_note("/subscribe") expect(page).not_to have_content '/subscribe' expect(page).to have_content 'Commands applied' @@ -266,7 +266,7 @@ shared_examples 'issuable record that supports quick actions in its description it "creates a new todo for the #{issuable_type}" do expect(issuable.subscribed?(master, project)).to be_truthy - write_note("/unsubscribe") + add_note("/unsubscribe") expect(page).not_to have_content '/unsubscribe' expect(page).to have_content 'Commands applied' @@ -277,7 +277,7 @@ shared_examples 'issuable record that supports quick actions in its description context "with a note assigning the #{issuable_type} to the current user" do it "assigns the #{issuable_type} to the current user" do - write_note("/assign me") + add_note("/assign me") expect(page).not_to have_content '/assign me' expect(page).to have_content 'Commands applied' diff --git a/spec/support/filter_spec_helper.rb b/spec/support/filter_spec_helper.rb index b871b7ffc90..721d359c2ee 100644 --- a/spec/support/filter_spec_helper.rb +++ b/spec/support/filter_spec_helper.rb @@ -18,6 +18,11 @@ module FilterSpecHelper context.reverse_merge!(project: project) end + render_context = Banzai::RenderContext + .new(context[:project], context[:current_user]) + + context = context.merge(render_context: render_context) + described_class.call(html, context) end diff --git a/spec/support/filtered_search_helpers.rb b/spec/support/filtered_search_helpers.rb index f3f96bd1f0a..5f42ff77fb2 100644 --- a/spec/support/filtered_search_helpers.rb +++ b/spec/support/filtered_search_helpers.rb @@ -21,6 +21,29 @@ module FilteredSearchHelpers end end + # Select a label clicking in the search dropdown instead + # of entering label names on the input. + def select_label_on_dropdown(label_title) + input_filtered_search("label:", submit: false) + + within('#js-dropdown-label') do + wait_for_requests + + find('li', text: label_title).click + end + + filtered_search.send_keys(:enter) + end + + def expect_issues_list_count(open_count, closed_count = 0) + all_count = open_count + closed_count + + expect(page).to have_issuable_counts(open: open_count, closed: closed_count, all: all_count) + page.within '.issues-list' do + expect(page).to have_selector('.issue', count: open_count) + end + end + # Enables input to be added character by character def input_filtered_search_keys(search_term) # Add an extra space to engage visual tokens diff --git a/spec/support/gitaly.rb b/spec/support/gitaly.rb index c7e8a39a617..9cf541372b5 100644 --- a/spec/support/gitaly.rb +++ b/spec/support/gitaly.rb @@ -1,11 +1,13 @@ RSpec.configure do |config| config.before(:each) do |example| if example.metadata[:disable_gitaly] - allow(Gitlab::GitalyClient).to receive(:feature_enabled?).and_return(false) + # Use 'and_wrap_original' to make sure the arguments are valid + allow(Gitlab::GitalyClient).to receive(:feature_enabled?).and_wrap_original { |m, *args| m.call(*args) && false } else next if example.metadata[:skip_gitaly_mock] - allow(Gitlab::GitalyClient).to receive(:feature_enabled?).and_return(true) + # Use 'and_wrap_original' to make sure the arguments are valid + allow(Gitlab::GitalyClient).to receive(:feature_enabled?).and_wrap_original { |m, *args| m.call(*args) || true } end end end diff --git a/spec/support/helpers/expect_offense.rb b/spec/support/helpers/expect_offense.rb new file mode 100644 index 00000000000..35718ba90c5 --- /dev/null +++ b/spec/support/helpers/expect_offense.rb @@ -0,0 +1,20 @@ +require 'rubocop/rspec/support' + +# https://github.com/backus/rubocop-rspec/blob/master/spec/support/expect_offense.rb +# rubocop-rspec gem extension of RuboCop's ExpectOffense module. +# +# This mixin is the same as rubocop's ExpectOffense except the default +# filename ends with `_spec.rb` +module ExpectOffense + include RuboCop::RSpec::ExpectOffense + + DEFAULT_FILENAME = 'example_spec.rb'.freeze + + def expect_offense(source, filename = DEFAULT_FILENAME) + super + end + + def expect_no_offenses(source, filename = DEFAULT_FILENAME) + super + end +end diff --git a/spec/support/helpers/features/branches_helpers.rb b/spec/support/helpers/features/branches_helpers.rb new file mode 100644 index 00000000000..3525d9a70a7 --- /dev/null +++ b/spec/support/helpers/features/branches_helpers.rb @@ -0,0 +1,33 @@ +# These helpers allow you to manipulate with sorting features. +# +# Usage: +# describe "..." do +# include Spec::Support::Helpers::Features::BranchesHelpers +# ... +# +# create_branch("feature") +# select_branch("master") +# +module Spec + module Support + module Helpers + module Features + module BranchesHelpers + def create_branch(branch_name, source_branch_name = "master") + fill_in("branch_name", with: branch_name) + select_branch(source_branch_name) + click_button("Create branch") + end + + def select_branch(branch_name) + find(".git-revision-dropdown-toggle").click + + page.within("#new-branch-form .dropdown-menu") do + click_link(branch_name) + end + end + end + end + end + end +end diff --git a/spec/support/helpers/features/notes_helpers.rb b/spec/support/helpers/features/notes_helpers.rb new file mode 100644 index 00000000000..1a1d5853a7a --- /dev/null +++ b/spec/support/helpers/features/notes_helpers.rb @@ -0,0 +1,27 @@ +# These helpers allow you to manipulate with notes. +# +# Usage: +# describe "..." do +# include Spec::Support::Helpers::Features::NotesHelpers +# ... +# +# add_note("Hello world!") +# +module Spec + module Support + module Helpers + module Features + module NotesHelpers + def add_note(text) + Sidekiq::Testing.fake! do + page.within(".js-main-target-form") do + fill_in("note[note]", with: text) + find(".js-comment-submit-button").click + end + end + end + end + end + end + end +end diff --git a/spec/support/helpers/features/sorting_helpers.rb b/spec/support/helpers/features/sorting_helpers.rb new file mode 100644 index 00000000000..50457b64745 --- /dev/null +++ b/spec/support/helpers/features/sorting_helpers.rb @@ -0,0 +1,26 @@ +# These helpers allow you to manipulate with sorting features. +# +# Usage: +# describe "..." do +# include Spec::Support::Helpers::Features::SortingHelpers +# ... +# +# sort_by("Last updated") +# +module Spec + module Support + module Helpers + module Features + module SortingHelpers + def sort_by(value) + find('button.dropdown-toggle').click + + page.within('.content ul.dropdown-menu.dropdown-menu-align-right li') do + click_link(value) + end + end + end + end + end + end +end diff --git a/spec/support/http_io/http_io_helpers.rb b/spec/support/http_io/http_io_helpers.rb new file mode 100644 index 00000000000..2c68c2cd9a6 --- /dev/null +++ b/spec/support/http_io/http_io_helpers.rb @@ -0,0 +1,65 @@ +module HttpIOHelpers + def stub_remote_trace_206 + WebMock.stub_request(:get, remote_trace_url) + .to_return { |request| remote_trace_response(request, 206) } + end + + def stub_remote_trace_200 + WebMock.stub_request(:get, remote_trace_url) + .to_return { |request| remote_trace_response(request, 200) } + end + + def stub_remote_trace_500 + WebMock.stub_request(:get, remote_trace_url) + .to_return(status: [500, "Internal Server Error"]) + end + + def remote_trace_url + "http://trace.com/trace" + end + + def remote_trace_response(request, responce_status) + range = request.headers['Range'].match(/bytes=(\d+)-(\d+)/) + + { + status: responce_status, + headers: remote_trace_response_headers(responce_status, range[1].to_i, range[2].to_i), + body: range_trace_body(range[1].to_i, range[2].to_i) + } + end + + def remote_trace_response_headers(responce_status, from, to) + headers = { 'Content-Type' => 'text/plain' } + + if responce_status == 206 + headers.merge('Content-Range' => "bytes #{from}-#{to}/#{remote_trace_size}") + end + + headers + end + + def range_trace_body(from, to) + remote_trace_body[from..to] + end + + def remote_trace_body + @remote_trace_body ||= File.read(expand_fixture_path('trace/sample_trace')) + .force_encoding(Encoding::BINARY) + end + + def remote_trace_size + remote_trace_body.bytesize + end + + def set_smaller_buffer_size_than(file_size) + blocks = (file_size / 128) + new_size = (blocks / 2) * 128 + stub_const("Gitlab::Ci::Trace::HttpIO::BUFFER_SIZE", new_size) + end + + def set_larger_buffer_size_than(file_size) + blocks = (file_size / 128) + new_size = (blocks * 2) * 128 + stub_const("Gitlab::Ci::Trace::HttpIO::BUFFER_SIZE", new_size) + end +end diff --git a/spec/support/issuables_list_metadata_shared_examples.rb b/spec/support/issuables_list_metadata_shared_examples.rb index 75982432ab4..e61983c60b4 100644 --- a/spec/support/issuables_list_metadata_shared_examples.rb +++ b/spec/support/issuables_list_metadata_shared_examples.rb @@ -5,9 +5,9 @@ shared_examples 'issuables list meta-data' do |issuable_type, action = nil| %w[fix improve/awesome].each do |source_branch| issuable = if issuable_type == :issue - create(issuable_type, project: project) + create(issuable_type, project: project, author: project.creator) else - create(issuable_type, source_project: project, source_branch: source_branch) + create(issuable_type, source_project: project, source_branch: source_branch, author: project.creator) end @issuable_ids << issuable.id @@ -16,7 +16,7 @@ shared_examples 'issuables list meta-data' do |issuable_type, action = nil| it "creates indexed meta-data object for issuable notes and votes count" do if action - get action + get action, author_id: project.creator.id else get :index, namespace_id: project.namespace, project_id: project end @@ -35,7 +35,7 @@ shared_examples 'issuables list meta-data' do |issuable_type, action = nil| it "doesn't execute any queries with false conditions" do get_action = if action - proc { get action } + proc { get action, author_id: project.creator.id } else proc { get :index, namespace_id: project2.namespace, project_id: project2 } end diff --git a/spec/support/issuables_requiring_filter_shared_examples.rb b/spec/support/issuables_requiring_filter_shared_examples.rb new file mode 100644 index 00000000000..439ef5ed92e --- /dev/null +++ b/spec/support/issuables_requiring_filter_shared_examples.rb @@ -0,0 +1,15 @@ +shared_examples 'issuables requiring filter' do |action| + it "doesn't load any issuables if no filter is set" do + expect_any_instance_of(described_class).not_to receive(:issuables_collection) + + get action + + expect(response).to render_template(action) + end + + it "loads issuables if at least one filter is set" do + expect_any_instance_of(described_class).to receive(:issuables_collection).and_call_original + + get action, author_id: user.id + end +end diff --git a/spec/support/ldap_helpers.rb b/spec/support/ldap_helpers.rb index 081ce0ad7b7..0e87b3d359d 100644 --- a/spec/support/ldap_helpers.rb +++ b/spec/support/ldap_helpers.rb @@ -41,4 +41,9 @@ module LdapHelpers entry end + + def raise_ldap_connection_error + allow_any_instance_of(Gitlab::Auth::LDAP::Adapter) + .to receive(:ldap_search).and_raise(Gitlab::Auth::LDAP::LDAPConnectionError) + end end diff --git a/spec/support/login_helpers.rb b/spec/support/login_helpers.rb index d08183846a0..db34090e971 100644 --- a/spec/support/login_helpers.rb +++ b/spec/support/login_helpers.rb @@ -140,6 +140,10 @@ module LoginHelpers end allow(Gitlab::Auth::OAuth::Provider).to receive_messages(providers: [:saml], config_for: mock_saml_config) stub_omniauth_setting(messages) + stub_saml_authorize_path_helpers + end + + def stub_saml_authorize_path_helpers allow_any_instance_of(Object).to receive(:user_saml_omniauth_authorize_path).and_return('/users/auth/saml') allow_any_instance_of(Object).to receive(:omniauth_authorize_path).with(:user, "saml").and_return('/users/auth/saml') end diff --git a/spec/support/matchers/have_emoji.rb b/spec/support/matchers/have_emoji.rb new file mode 100644 index 00000000000..23fb8e9c1c4 --- /dev/null +++ b/spec/support/matchers/have_emoji.rb @@ -0,0 +1,5 @@ +RSpec::Matchers.define :have_emoji do |emoji_name| + match do |actual| + expect(actual).to have_selector("gl-emoji[data-name='#{emoji_name}']") + end +end diff --git a/spec/support/matchers/issuable_matchers.rb b/spec/support/matchers/issuable_matchers.rb new file mode 100644 index 00000000000..f5d9a97051a --- /dev/null +++ b/spec/support/matchers/issuable_matchers.rb @@ -0,0 +1,11 @@ +RSpec::Matchers.define :have_header_with_correct_id_and_link do |level, text, id, parent = ".wiki"| + match do |actual| + node = find("#{parent} h#{level} a#user-content-#{id}") + + expect(node[:href]).to end_with("##{id}") + + # Work around a weird Capybara behavior where calling `parent` on a node + # returns the whole document, not the node's actual parent element + expect(find(:xpath, "#{node.path}/..").text).to eq(text) + end +end diff --git a/spec/support/migrations_helpers.rb b/spec/support/migrations_helpers.rb index 6bf976a2cf9..5d6f662e8fe 100644 --- a/spec/support/migrations_helpers.rb +++ b/spec/support/migrations_helpers.rb @@ -1,6 +1,9 @@ module MigrationsHelpers def table(name) - Class.new(ActiveRecord::Base) { self.table_name = name } + Class.new(ActiveRecord::Base) do + self.table_name = name + self.inheritance_column = :_type_disabled + end end def migrations_paths diff --git a/spec/support/quick_actions_helpers.rb b/spec/support/quick_actions_helpers.rb deleted file mode 100644 index 361190aa352..00000000000 --- a/spec/support/quick_actions_helpers.rb +++ /dev/null @@ -1,10 +0,0 @@ -module QuickActionsHelpers - def write_note(text) - Sidekiq::Testing.fake! do - page.within('.js-main-target-form') do - fill_in 'note[note]', with: text - find('.js-comment-submit-button').click - end - end - end -end diff --git a/spec/support/reference_parser_helpers.rb b/spec/support/reference_parser_helpers.rb index 01689194eac..c01897ed1a1 100644 --- a/spec/support/reference_parser_helpers.rb +++ b/spec/support/reference_parser_helpers.rb @@ -2,4 +2,38 @@ module ReferenceParserHelpers def empty_html_link Nokogiri::HTML.fragment('<a></a>').children[0] end + + shared_examples 'no N+1 queries' do + it 'avoids N+1 queries in #nodes_visible_to_user', :request_store do + context = Banzai::RenderContext.new(project, user) + + record_queries = lambda do |links| + ActiveRecord::QueryRecorder.new do + described_class.new(context).nodes_visible_to_user(user, links) + end + end + + control = record_queries.call(control_links) + actual = record_queries.call(actual_links) + + expect(actual.count).to be <= control.count + expect(actual.cached_count).to be <= control.cached_count + end + + it 'avoids N+1 queries in #records_for_nodes', :request_store do + context = Banzai::RenderContext.new(project, user) + + record_queries = lambda do |links| + ActiveRecord::QueryRecorder.new do + described_class.new(context).records_for_nodes(links) + end + end + + control = record_queries.call(control_links) + actual = record_queries.call(actual_links) + + expect(actual.count).to be <= control.count + expect(actual.cached_count).to be <= control.cached_count + end + end end diff --git a/spec/support/shared_examples/controllers/variables_shared_examples.rb b/spec/support/shared_examples/controllers/variables_shared_examples.rb index d7acf8c0032..b615a8f54cf 100644 --- a/spec/support/shared_examples/controllers/variables_shared_examples.rb +++ b/spec/support/shared_examples/controllers/variables_shared_examples.rb @@ -16,19 +16,19 @@ shared_examples 'PATCH #update updates variables' do let(:variable_attributes) do { id: variable.id, key: variable.key, - value: variable.value, + secret_value: variable.value, protected: variable.protected?.to_s } end let(:new_variable_attributes) do { key: 'new_key', - value: 'dummy_value', + secret_value: 'dummy_value', protected: 'false' } end context 'with invalid new variable parameters' do let(:variables_attributes) do [ - variable_attributes.merge(value: 'other_value'), + variable_attributes.merge(secret_value: 'other_value'), new_variable_attributes.merge(key: '...?') ] end @@ -52,7 +52,7 @@ shared_examples 'PATCH #update updates variables' do let(:variables_attributes) do [ new_variable_attributes, - new_variable_attributes.merge(value: 'other_value') + new_variable_attributes.merge(secret_value: 'other_value') ] end @@ -74,7 +74,7 @@ shared_examples 'PATCH #update updates variables' do context 'with valid new variable parameters' do let(:variables_attributes) do [ - variable_attributes.merge(value: 'other_value'), + variable_attributes.merge(secret_value: 'other_value'), new_variable_attributes ] end diff --git a/spec/support/shared_examples/models/atomic_internal_id_spec.rb b/spec/support/shared_examples/models/atomic_internal_id_spec.rb new file mode 100644 index 00000000000..144af4fc475 --- /dev/null +++ b/spec/support/shared_examples/models/atomic_internal_id_spec.rb @@ -0,0 +1,40 @@ +require 'spec_helper' + +shared_examples_for 'AtomicInternalId' do + describe '.has_internal_id' do + describe 'Module inclusion' do + subject { described_class } + + it { is_expected.to include_module(AtomicInternalId) } + end + + describe 'Validation' do + subject { instance } + + before do + allow(InternalId).to receive(:generate_next).and_return(nil) + end + + it { is_expected.to validate_presence_of(internal_id_attribute) } + it { is_expected.to validate_numericality_of(internal_id_attribute) } + end + + describe 'internal id generation' do + subject { instance.save! } + + it 'calls InternalId.generate_next and sets internal id attribute' do + iid = rand(1..1000) + + expect(InternalId).to receive(:generate_next).with(instance, scope_attrs, usage, any_args).and_return(iid) + subject + expect(instance.public_send(internal_id_attribute)).to eq(iid) + end + + it 'does not overwrite an existing internal id' do + instance.public_send("#{internal_id_attribute}=", 4711) + + expect { subject }.not_to change { instance.public_send(internal_id_attribute) } + end + end + end +end diff --git a/spec/support/shared_examples/serializers/note_entity_examples.rb b/spec/support/shared_examples/serializers/note_entity_examples.rb new file mode 100644 index 00000000000..9097c8e5513 --- /dev/null +++ b/spec/support/shared_examples/serializers/note_entity_examples.rb @@ -0,0 +1,42 @@ +shared_examples 'note entity' do + subject { entity.as_json } + + context 'basic note' do + it 'exposes correct elements' do + expect(subject).to include(:type, :author, :note, :note_html, :current_user, + :discussion_id, :emoji_awardable, :award_emoji, :report_abuse_path, :attachment) + end + + it 'does not expose elements for specific notes cases' do + expect(subject).not_to include(:last_edited_by, :last_edited_at, :system_note_icon_name) + end + + it 'exposes author correctly' do + expect(subject[:author]).to include(:id, :name, :username, :state, :avatar_url, :path) + end + + it 'does not expose web_url for author' do + expect(subject[:author]).not_to include(:web_url) + end + end + + context 'when note was edited' do + before do + note.update(updated_at: 1.minute.from_now, updated_by: user) + end + + it 'exposes last_edited_at and last_edited_by elements' do + expect(subject).to include(:last_edited_at, :last_edited_by) + end + end + + context 'when note is a system note' do + before do + note.update(system: true) + end + + it 'exposes system_note_icon_name element' do + expect(subject).to include(:system_note_icon_name) + end + end +end diff --git a/spec/support/shared_examples/services/boards/issues_move_service.rb b/spec/support/shared_examples/services/boards/issues_move_service.rb index 4a4fbaa3a0e..737863ea411 100644 --- a/spec/support/shared_examples/services/boards/issues_move_service.rb +++ b/spec/support/shared_examples/services/boards/issues_move_service.rb @@ -1,4 +1,4 @@ -shared_examples 'issues move service' do +shared_examples 'issues move service' do |group| context 'when moving an issue between lists' do let(:issue) { create(:labeled_issue, project: project, labels: [bug, development]) } let(:params) { { board_id: board1.id, from_list_id: list1.id, to_list_id: list2.id } } @@ -83,5 +83,18 @@ shared_examples 'issues move service' do expect(issue.relative_position).to be_between(issue1.relative_position, issue2.relative_position) end + + if group + context 'when on a group board' do + it 'sends the board_group_id parameter' do + params.merge!(move_after_id: issue1.id, move_before_id: issue2.id) + + match_params = { move_between_ids: [issue1.id, issue2.id], board_group_id: parent.id } + expect(Issues::UpdateService).to receive(:new).with(issue.project, user, match_params).and_return(double(execute: build(:issue))) + + described_class.new(parent, user, params).execute(issue) + end + end + end end end diff --git a/spec/support/shared_examples/uploaders/gitlab_uploader_shared_examples.rb b/spec/support/shared_examples/uploaders/gitlab_uploader_shared_examples.rb index 934d53e7bba..93c21a99e59 100644 --- a/spec/support/shared_examples/uploaders/gitlab_uploader_shared_examples.rb +++ b/spec/support/shared_examples/uploaders/gitlab_uploader_shared_examples.rb @@ -4,7 +4,7 @@ shared_examples "matches the method pattern" do |method| let(:pattern) { patterns[method] } it do - return skip "No pattern provided, skipping." unless pattern + skip "No pattern provided, skipping." unless pattern expect(target.method(method).call(*args)).to match(pattern) end diff --git a/spec/support/shared_examples/uploaders/object_storage_shared_examples.rb b/spec/support/shared_examples/uploaders/object_storage_shared_examples.rb new file mode 100644 index 00000000000..6352f1527cd --- /dev/null +++ b/spec/support/shared_examples/uploaders/object_storage_shared_examples.rb @@ -0,0 +1,138 @@ +shared_context 'with storage' do |store, **stub_params| + before do + subject.object_store = store + end +end + +shared_examples "migrates" do |to_store:, from_store: nil| + let(:to) { to_store } + let(:from) { from_store || subject.object_store } + + def migrate(to) + subject.migrate!(to) + end + + def checksum + Digest::SHA256.hexdigest(subject.read) + end + + before do + migrate(from) + end + + it 'returns corresponding file type' do + expect(subject).to be_an(CarrierWave::Uploader::Base) + expect(subject).to be_a(ObjectStorage::Concern) + + if from == described_class::Store::REMOTE + expect(subject.file).to be_a(CarrierWave::Storage::Fog::File) + elsif from == described_class::Store::LOCAL + expect(subject.file).to be_a(CarrierWave::SanitizedFile) + else + raise 'Unexpected file type' + end + end + + it 'does nothing when migrating to the current store' do + expect { migrate(from) }.not_to change { subject.object_store }.from(from) + end + + it 'migrate to the specified store' do + from_checksum = checksum + + expect { migrate(to) }.to change { subject.object_store }.from(from).to(to) + expect(checksum).to eq(from_checksum) + end + + it 'removes the original file after the migration' do + original_file = subject.file.path + migrate(to) + + expect(File.exist?(original_file)).to be_falsey + end + + it 'can access to the original file during migration' do + file = subject.file + + allow(subject).to receive(:delete_migrated_file) { } # Remove as a callback of :migrate + allow(subject).to receive(:record_upload) { } # Remove as a callback of :store (:record_upload) + + expect(file.exists?).to be_truthy + expect { migrate(to) }.not_to change { file.exists? } + end + + context 'when migrate! is not occupied by another process' do + it 'executes migrate!' do + expect(subject).to receive(:object_store=).at_least(1) + + migrate(to) + end + + it 'executes use_file' do + expect(subject).to receive(:unsafe_use_file).once + + subject.use_file + end + end + + context 'when migrate! is occupied by another process' do + let(:exclusive_lease_key) { "object_storage_migrate:#{subject.model.class}:#{subject.model.id}" } + + before do + @uuid = Gitlab::ExclusiveLease.new(exclusive_lease_key, timeout: 1.hour.to_i).try_obtain + end + + it 'does not execute migrate!' do + expect(subject).not_to receive(:unsafe_migrate!) + + expect { migrate(to) }.to raise_error('exclusive lease already taken') + end + + it 'does not execute use_file' do + expect(subject).not_to receive(:unsafe_use_file) + + expect { subject.use_file }.to raise_error('exclusive lease already taken') + end + + after do + Gitlab::ExclusiveLease.cancel(exclusive_lease_key, @uuid) + end + end + + context 'migration is unsuccessful' do + shared_examples "handles gracefully" do |error:| + it 'does not update the object_store' do + expect { migrate(to) }.to raise_error(error) + expect(subject.object_store).to eq(from) + end + + it 'does not delete the original file' do + expect { migrate(to) }.to raise_error(error) + expect(subject.exists?).to be_truthy + end + end + + context 'when the store is not supported' do + let(:to) { -1 } # not a valid store + + include_examples "handles gracefully", error: ObjectStorage::UnknownStoreError + end + + context 'upon a fog failure' do + before do + storage_class = subject.send(:storage_for, to).class + expect_any_instance_of(storage_class).to receive(:store!).and_raise("Store failure.") + end + + include_examples "handles gracefully", error: "Store failure." + end + + context 'upon a database failure' do + before do + expect(uploader).to receive(:persist_object_store!).and_raise("ActiveRecord failure.") + end + + include_examples "handles gracefully", error: "ActiveRecord failure." + end + end +end diff --git a/spec/support/slack_mattermost_notifications_shared_examples.rb b/spec/support/slack_mattermost_notifications_shared_examples.rb index 5e1ce19eafb..07bc3a51fd8 100644 --- a/spec/support/slack_mattermost_notifications_shared_examples.rb +++ b/spec/support/slack_mattermost_notifications_shared_examples.rb @@ -4,6 +4,11 @@ RSpec.shared_examples 'slack or mattermost notifications' do let(:chat_service) { described_class.new } let(:webhook_url) { 'https://example.gitlab.com/' } + def execute_with_options(options) + receive(:new).with(webhook_url, options) + .and_return(double(:slack_service).as_null_object) + end + describe "Associations" do it { is_expected.to belong_to :project } it { is_expected.to have_one :service_hook } @@ -33,6 +38,7 @@ RSpec.shared_examples 'slack or mattermost notifications' do let(:project) { create(:project, :repository) } let(:username) { 'slack_username' } let(:channel) { 'slack_channel' } + let(:issue_service_options) { { title: 'Awesome issue', description: 'please fix' } } let(:push_sample_data) do Gitlab::DataBuilder::Push.build_sample(project, user) @@ -48,12 +54,7 @@ RSpec.shared_examples 'slack or mattermost notifications' do WebMock.stub_request(:post, webhook_url) - opts = { - title: 'Awesome issue', - description: 'please fix' - } - - issue_service = Issues::CreateService.new(project, user, opts) + issue_service = Issues::CreateService.new(project, user, issue_service_options) @issue = issue_service.execute @issues_sample_data = issue_service.hook_data(@issue, 'open') @@ -164,6 +165,26 @@ RSpec.shared_examples 'slack or mattermost notifications' do chat_service.execute(@issues_sample_data) end + context 'for confidential issues' do + let(:issue_service_options) { { title: 'Secret', confidential: true } } + + it "uses confidential issue channel" do + chat_service.update_attributes(confidential_issue_channel: 'confidential') + + expect(Slack::Notifier).to execute_with_options(channel: 'confidential') + + chat_service.execute(@issues_sample_data) + end + + it 'falls back to issue channel' do + chat_service.update_attributes(issue_channel: 'fallback_channel') + + expect(Slack::Notifier).to execute_with_options(channel: 'fallback_channel') + + chat_service.execute(@issues_sample_data) + end + end + it "uses the right channel for wiki event" do chat_service.update_attributes(wiki_page_channel: "random") @@ -194,6 +215,32 @@ RSpec.shared_examples 'slack or mattermost notifications' do chat_service.execute(note_data) end + + context 'for confidential notes' do + before do + issue_note.noteable.update!(confidential: true) + end + + it "uses confidential channel" do + chat_service.update_attributes(confidential_note_channel: "confidential") + + note_data = Gitlab::DataBuilder::Note.build(issue_note, user) + + expect(Slack::Notifier).to execute_with_options(channel: 'confidential') + + chat_service.execute(note_data) + end + + it 'falls back to note channel' do + chat_service.update_attributes(note_channel: "fallback_channel") + + note_data = Gitlab::DataBuilder::Note.build(issue_note, user) + + expect(Slack::Notifier).to execute_with_options(channel: 'fallback_channel') + + chat_service.execute(note_data) + end + end end end end @@ -248,8 +295,9 @@ RSpec.shared_examples 'slack or mattermost notifications' do create(:note_on_issue, project: project, note: "issue note") end + let(:data) { Gitlab::DataBuilder::Note.build(issue_note, user) } + it "calls Slack API for issue comment events" do - data = Gitlab::DataBuilder::Note.build(issue_note, user) chat_service.execute(data) expect(WebMock).to have_requested(:post, webhook_url).once diff --git a/spec/support/sorting_helper.rb b/spec/support/sorting_helper.rb deleted file mode 100644 index 577518d726c..00000000000 --- a/spec/support/sorting_helper.rb +++ /dev/null @@ -1,18 +0,0 @@ -# Helper allows you to sort items -# -# Params -# value - value for sorting -# -# Usage: -# include SortingHelper -# -# sorting_by('Oldest updated') -# -module SortingHelper - def sorting_by(value) - find('button.dropdown-toggle').click - page.within('.content ul.dropdown-menu.dropdown-menu-align-right li') do - click_link value - end - end -end diff --git a/spec/support/stored_repositories.rb b/spec/support/stored_repositories.rb index 52e47ae2d34..21995c89a6e 100644 --- a/spec/support/stored_repositories.rb +++ b/spec/support/stored_repositories.rb @@ -4,7 +4,7 @@ RSpec.configure do |config| end config.before(:all, :broken_storage) do - FileUtils.rm_rf Gitlab.config.repositories.storages.broken['path'] + FileUtils.rm_rf Gitlab.config.repositories.storages.broken.legacy_disk_path end config.before(:each, :broken_storage) do diff --git a/spec/support/stub_configuration.rb b/spec/support/stub_configuration.rb index 9f08c139322..a75a3eaefcb 100644 --- a/spec/support/stub_configuration.rb +++ b/spec/support/stub_configuration.rb @@ -45,13 +45,21 @@ module StubConfiguration allow(Gitlab.config.lfs).to receive_messages(to_settings(messages)) end + def stub_artifacts_setting(messages) + allow(Gitlab.config.artifacts).to receive_messages(to_settings(messages)) + end + def stub_storage_settings(messages) messages.deep_stringify_keys! # Default storage is always required messages['default'] ||= Gitlab.config.repositories.storages.default - messages.each do |storage_name, storage_settings| - storage_settings['path'] = TestEnv.repos_path unless storage_settings.key?('path') + messages.each do |storage_name, storage_hash| + if !storage_hash.key?('path') || storage_hash['path'] == Gitlab::GitalyClient::StorageSettings::Deprecated + storage_hash['path'] = TestEnv.repos_path + end + + messages[storage_name] = Gitlab::GitalyClient::StorageSettings.new(storage_hash.to_h) end allow(Gitlab.config.repositories).to receive(:storages).and_return(Settingslogic.new(messages)) diff --git a/spec/support/stub_object_storage.rb b/spec/support/stub_object_storage.rb new file mode 100644 index 00000000000..6e88641da42 --- /dev/null +++ b/spec/support/stub_object_storage.rb @@ -0,0 +1,48 @@ +module StubConfiguration + def stub_object_storage_uploader( + config:, + uploader:, + remote_directory:, + enabled: true, + proxy_download: false, + background_upload: false, + direct_upload: false + ) + allow(config).to receive(:enabled) { enabled } + allow(config).to receive(:proxy_download) { proxy_download } + allow(config).to receive(:background_upload) { background_upload } + allow(config).to receive(:direct_upload) { direct_upload } + + return unless enabled + + Fog.mock! + + ::Fog::Storage.new(uploader.object_store_credentials).tap do |connection| + begin + connection.directories.create(key: remote_directory) + rescue Excon::Error::Conflict + end + end + end + + def stub_artifacts_object_storage(**params) + stub_object_storage_uploader(config: Gitlab.config.artifacts.object_store, + uploader: JobArtifactUploader, + remote_directory: 'artifacts', + **params) + end + + def stub_lfs_object_storage(**params) + stub_object_storage_uploader(config: Gitlab.config.lfs.object_store, + uploader: LfsObjectUploader, + remote_directory: 'lfs-objects', + **params) + end + + def stub_uploads_object_storage(uploader = described_class, **params) + stub_object_storage_uploader(config: Gitlab.config.uploads.object_store, + uploader: uploader, + remote_directory: 'uploads', + **params) + end +end diff --git a/spec/support/test_env.rb b/spec/support/test_env.rb index 01321989f01..d87f265cdf0 100644 --- a/spec/support/test_env.rb +++ b/spec/support/test_env.rb @@ -62,6 +62,7 @@ module TestEnv }.freeze TMP_TEST_PATH = Rails.root.join('tmp', 'tests', '**') + REPOS_STORAGE = 'default'.freeze # Test environment # @@ -225,7 +226,7 @@ module TestEnv end def repos_path - Gitlab.config.repositories.storages.default['path'] + Gitlab.config.repositories.storages[REPOS_STORAGE].legacy_disk_path end def backup_path diff --git a/spec/tasks/cache/clear/redis_spec.rb b/spec/tasks/cache/clear/redis_spec.rb new file mode 100644 index 00000000000..cca2b864e9b --- /dev/null +++ b/spec/tasks/cache/clear/redis_spec.rb @@ -0,0 +1,19 @@ +require 'rake_helper' + +describe 'clearing redis cache' do + before do + Rake.application.rake_require 'tasks/cache' + end + + describe 'clearing pipeline status cache' do + let(:pipeline_status) { create(:ci_pipeline).project.pipeline_status } + + before do + allow(pipeline_status).to receive(:loaded).and_return(nil) + end + + it 'clears pipeline status cache' do + expect { run_rake_task('cache:clear:redis') }.to change { pipeline_status.has_cache? } + end + end +end diff --git a/spec/tasks/gitlab/artifacts/migrate_rake_spec.rb b/spec/tasks/gitlab/artifacts/migrate_rake_spec.rb new file mode 100644 index 00000000000..8544fb62b5a --- /dev/null +++ b/spec/tasks/gitlab/artifacts/migrate_rake_spec.rb @@ -0,0 +1,118 @@ +require 'rake_helper' + +describe 'gitlab:artifacts namespace rake task' do + before(:context) do + Rake.application.rake_require 'tasks/gitlab/artifacts/migrate' + end + + let(:object_storage_enabled) { false } + + before do + stub_artifacts_object_storage(enabled: object_storage_enabled) + end + + subject { run_rake_task('gitlab:artifacts:migrate') } + + context 'legacy artifacts' do + describe 'migrate' do + let!(:build) { create(:ci_build, :legacy_artifacts, artifacts_file_store: store, artifacts_metadata_store: store) } + + context 'when local storage is used' do + let(:store) { ObjectStorage::Store::LOCAL } + + context 'and job does not have file store defined' do + let(:object_storage_enabled) { true } + let(:store) { nil } + + it "migrates file to remote storage" do + subject + + expect(build.reload.artifacts_file_store).to eq(ObjectStorage::Store::REMOTE) + expect(build.reload.artifacts_metadata_store).to eq(ObjectStorage::Store::REMOTE) + end + end + + context 'and remote storage is defined' do + let(:object_storage_enabled) { true } + + it "migrates file to remote storage" do + subject + + expect(build.reload.artifacts_file_store).to eq(ObjectStorage::Store::REMOTE) + expect(build.reload.artifacts_metadata_store).to eq(ObjectStorage::Store::REMOTE) + end + end + + context 'and remote storage is not defined' do + it "fails to migrate to remote storage" do + subject + + expect(build.reload.artifacts_file_store).to eq(ObjectStorage::Store::LOCAL) + expect(build.reload.artifacts_metadata_store).to eq(ObjectStorage::Store::LOCAL) + end + end + end + + context 'when remote storage is used' do + let(:object_storage_enabled) { true } + + let(:store) { ObjectStorage::Store::REMOTE } + + it "file stays on remote storage" do + subject + + expect(build.reload.artifacts_file_store).to eq(ObjectStorage::Store::REMOTE) + expect(build.reload.artifacts_metadata_store).to eq(ObjectStorage::Store::REMOTE) + end + end + end + end + + context 'job artifacts' do + let!(:artifact) { create(:ci_job_artifact, :archive, file_store: store) } + + context 'when local storage is used' do + let(:store) { ObjectStorage::Store::LOCAL } + + context 'and job does not have file store defined' do + let(:object_storage_enabled) { true } + let(:store) { nil } + + it "migrates file to remote storage" do + subject + + expect(artifact.reload.file_store).to eq(ObjectStorage::Store::REMOTE) + end + end + + context 'and remote storage is defined' do + let(:object_storage_enabled) { true } + + it "migrates file to remote storage" do + subject + + expect(artifact.reload.file_store).to eq(ObjectStorage::Store::REMOTE) + end + end + + context 'and remote storage is not defined' do + it "fails to migrate to remote storage" do + subject + + expect(artifact.reload.file_store).to eq(ObjectStorage::Store::LOCAL) + end + end + end + + context 'when remote storage is used' do + let(:object_storage_enabled) { true } + let(:store) { ObjectStorage::Store::REMOTE } + + it "file stays on remote storage" do + subject + + expect(artifact.reload.file_store).to eq(ObjectStorage::Store::REMOTE) + end + end + end +end diff --git a/spec/tasks/gitlab/backup_rake_spec.rb b/spec/tasks/gitlab/backup_rake_spec.rb index 168facd51a6..0d24782f317 100644 --- a/spec/tasks/gitlab/backup_rake_spec.rb +++ b/spec/tasks/gitlab/backup_rake_spec.rb @@ -195,14 +195,23 @@ describe 'gitlab:app namespace rake task' do end context 'multiple repository storages' do - let(:gitaly_address) { Gitlab.config.repositories.storages.default.gitaly_address } + let(:storage_default) do + Gitlab::GitalyClient::StorageSettings.new(@default_storage_hash.merge('path' => 'tmp/tests/default_storage')) + end + let(:test_second_storage) do + Gitlab::GitalyClient::StorageSettings.new(@default_storage_hash.merge('path' => 'tmp/tests/custom_storage')) + end let(:storages) do { - 'default' => { 'path' => Settings.absolute('tmp/tests/default_storage'), 'gitaly_address' => gitaly_address }, - 'test_second_storage' => { 'path' => Settings.absolute('tmp/tests/custom_storage'), 'gitaly_address' => gitaly_address } + 'default' => storage_default, + 'test_second_storage' => test_second_storage } end + before(:all) do + @default_storage_hash = Gitlab.config.repositories.storages.default.to_h + end + before do # We only need a backup of the repositories for this test stub_env('SKIP', 'db,uploads,builds,artifacts,lfs,registry') diff --git a/spec/tasks/gitlab/cleanup_rake_spec.rb b/spec/tasks/gitlab/cleanup_rake_spec.rb index 9e746ceddd6..2bf873c923f 100644 --- a/spec/tasks/gitlab/cleanup_rake_spec.rb +++ b/spec/tasks/gitlab/cleanup_rake_spec.rb @@ -6,13 +6,16 @@ describe 'gitlab:cleanup rake tasks' do end describe 'cleanup' do - let(:gitaly_address) { Gitlab.config.repositories.storages.default.gitaly_address } let(:storages) do { - 'default' => { 'path' => Settings.absolute('tmp/tests/default_storage'), 'gitaly_address' => gitaly_address } + 'default' => Gitlab::GitalyClient::StorageSettings.new(@default_storage_hash.merge('path' => 'tmp/tests/default_storage')) } end + before(:all) do + @default_storage_hash = Gitlab.config.repositories.storages.default.to_h + end + before do FileUtils.mkdir(Settings.absolute('tmp/tests/default_storage')) allow(Gitlab.config.repositories).to receive(:storages).and_return(storages) diff --git a/spec/tasks/gitlab/git_rake_spec.rb b/spec/tasks/gitlab/git_rake_spec.rb index 9aebf7b0b4a..1efaecc63a5 100644 --- a/spec/tasks/gitlab/git_rake_spec.rb +++ b/spec/tasks/gitlab/git_rake_spec.rb @@ -1,10 +1,13 @@ require 'rake_helper' describe 'gitlab:git rake tasks' do + before(:all) do + @default_storage_hash = Gitlab.config.repositories.storages.default.to_h + end + before do Rake.application.rake_require 'tasks/gitlab/git' - - storages = { 'default' => { 'path' => Settings.absolute('tmp/tests/default_storage') } } + storages = { 'default' => Gitlab::GitalyClient::StorageSettings.new(@default_storage_hash.merge('path' => 'tmp/tests/default_storage')) } FileUtils.mkdir_p(Settings.absolute('tmp/tests/default_storage/@hashed/1/2/test.git')) allow(Gitlab.config.repositories).to receive(:storages).and_return(storages) diff --git a/spec/tasks/gitlab/gitaly_rake_spec.rb b/spec/tasks/gitlab/gitaly_rake_spec.rb index 1f4053ff9ad..1e507c0236e 100644 --- a/spec/tasks/gitlab/gitaly_rake_spec.rb +++ b/spec/tasks/gitlab/gitaly_rake_spec.rb @@ -99,14 +99,14 @@ describe 'gitlab:gitaly namespace rake task' do describe 'storage_config' do it 'prints storage configuration in a TOML format' do config = { - 'default' => { + 'default' => Gitlab::GitalyClient::StorageSettings.new( 'path' => '/path/to/default', 'gitaly_address' => 'unix:/path/to/my.socket' - }, - 'nfs_01' => { + ), + 'nfs_01' => Gitlab::GitalyClient::StorageSettings.new( 'path' => '/path/to/nfs_01', 'gitaly_address' => 'unix:/path/to/my.socket' - } + ) } allow(Gitlab.config.repositories).to receive(:storages).and_return(config) allow(Rails.env).to receive(:test?).and_return(false) @@ -134,7 +134,7 @@ describe 'gitlab:gitaly namespace rake task' do parsed_output = TomlRB.parse(expected_output) config.each do |name, params| - expect(parsed_output['storage']).to include({ 'name' => name, 'path' => params['path'] }) + expect(parsed_output['storage']).to include({ 'name' => name, 'path' => params.legacy_disk_path }) end end end diff --git a/spec/tasks/gitlab/lfs/migrate_rake_spec.rb b/spec/tasks/gitlab/lfs/migrate_rake_spec.rb new file mode 100644 index 00000000000..66d1a192a96 --- /dev/null +++ b/spec/tasks/gitlab/lfs/migrate_rake_spec.rb @@ -0,0 +1,37 @@ +require 'rake_helper' + +describe 'gitlab:lfs namespace rake task' do + before :all do + Rake.application.rake_require 'tasks/gitlab/lfs/migrate' + end + + describe 'migrate' do + let(:local) { ObjectStorage::Store::LOCAL } + let(:remote) { ObjectStorage::Store::REMOTE } + let!(:lfs_object) { create(:lfs_object, :with_file, file_store: local) } + + def lfs_migrate + run_rake_task('gitlab:lfs:migrate') + end + + context 'object storage disabled' do + before do + stub_lfs_object_storage(enabled: false) + end + + it "doesn't migrate files" do + expect { lfs_migrate }.not_to change { lfs_object.reload.file_store } + end + end + + context 'object storage enabled' do + before do + stub_lfs_object_storage + end + + it 'migrates local file to object storage' do + expect { lfs_migrate }.to change { lfs_object.reload.file_store }.from(local).to(remote) + end + end + end +end diff --git a/spec/tasks/gitlab/shell_rake_spec.rb b/spec/tasks/gitlab/shell_rake_spec.rb index 65155cb044d..4a756c5742d 100644 --- a/spec/tasks/gitlab/shell_rake_spec.rb +++ b/spec/tasks/gitlab/shell_rake_spec.rb @@ -11,7 +11,7 @@ describe 'gitlab:shell rake tasks' do it 'invokes create_hooks task' do expect(Rake::Task['gitlab:shell:create_hooks']).to receive(:invoke) - storages = Gitlab.config.repositories.storages.values.map { |rs| rs['path'] } + storages = Gitlab.config.repositories.storages.values.map(&:legacy_disk_path) expect(Kernel).to receive(:system).with('bin/install', *storages).and_call_original expect(Kernel).to receive(:system).with('bin/compile').and_call_original diff --git a/spec/tasks/gitlab/uploads/migrate_rake_spec.rb b/spec/tasks/gitlab/uploads/migrate_rake_spec.rb new file mode 100644 index 00000000000..6fcfae358ec --- /dev/null +++ b/spec/tasks/gitlab/uploads/migrate_rake_spec.rb @@ -0,0 +1,143 @@ +require 'rake_helper' + +describe 'gitlab:uploads:migrate rake tasks' do + let(:model_class) { nil } + let(:uploader_class) { nil } + let(:mounted_as) { nil } + let(:batch_size) { 3 } + + before do + stub_env('BATCH', batch_size.to_s) + stub_uploads_object_storage(uploader_class) + Rake.application.rake_require 'tasks/gitlab/uploads/migrate' + + allow(ObjectStorage::MigrateUploadsWorker).to receive(:perform_async) + end + + def run + args = [uploader_class.to_s, model_class.to_s, mounted_as].compact + run_rake_task("gitlab:uploads:migrate", *args) + end + + shared_examples 'enqueue jobs in batch' do |batch:| + it do + expect(ObjectStorage::MigrateUploadsWorker) + .to receive(:perform_async).exactly(batch).times + .and_return("A fake job.") + + run + end + end + + context "for AvatarUploader" do + let(:uploader_class) { AvatarUploader } + let(:mounted_as) { :avatar } + + context "for Project" do + let(:model_class) { Project } + let!(:projects) { create_list(:project, 10, :with_avatar) } + + it_behaves_like 'enqueue jobs in batch', batch: 4 + + context 'Upload has store = nil' do + before do + Upload.where(model: projects).update_all(store: nil) + end + + it_behaves_like 'enqueue jobs in batch', batch: 4 + end + end + + context "for Group" do + let(:model_class) { Group } + + before do + create_list(:group, 10, :with_avatar) + end + + it_behaves_like 'enqueue jobs in batch', batch: 4 + end + + context "for User" do + let(:model_class) { User } + + before do + create_list(:user, 10, :with_avatar) + end + + it_behaves_like 'enqueue jobs in batch', batch: 4 + end + end + + context "for AttachmentUploader" do + let(:uploader_class) { AttachmentUploader } + + context "for Note" do + let(:model_class) { Note } + let(:mounted_as) { :attachment } + + before do + create_list(:note, 10, :with_attachment) + end + + it_behaves_like 'enqueue jobs in batch', batch: 4 + end + + context "for Appearance" do + let(:model_class) { Appearance } + let(:mounted_as) { :logo } + + before do + create(:appearance, :with_logos) + end + + %i(logo header_logo).each do |mount| + it_behaves_like 'enqueue jobs in batch', batch: 1 do + let(:mounted_as) { mount } + end + end + end + end + + context "for FileUploader" do + let(:uploader_class) { FileUploader } + let(:model_class) { Project } + + before do + create_list(:project, 10) do |model| + uploader_class.new(model) + .store!(fixture_file_upload('spec/fixtures/doc_sample.txt')) + end + end + + it_behaves_like 'enqueue jobs in batch', batch: 4 + end + + context "for PersonalFileUploader" do + let(:uploader_class) { PersonalFileUploader } + let(:model_class) { PersonalSnippet } + + before do + create_list(:personal_snippet, 10) do |model| + uploader_class.new(model) + .store!(fixture_file_upload('spec/fixtures/doc_sample.txt')) + end + end + + it_behaves_like 'enqueue jobs in batch', batch: 4 + end + + context "for NamespaceFileUploader" do + let(:uploader_class) { NamespaceFileUploader } + let(:model_class) { Snippet } + + before do + create_list(:snippet, 10) do |model| + uploader_class.new(model) + .store!(fixture_file_upload('spec/fixtures/doc_sample.txt')) + end + end + + it_behaves_like 'enqueue jobs in batch', batch: 4 + end +end diff --git a/spec/uploaders/attachment_uploader_spec.rb b/spec/uploaders/attachment_uploader_spec.rb index 091ba824fc6..d302c14efb9 100644 --- a/spec/uploaders/attachment_uploader_spec.rb +++ b/spec/uploaders/attachment_uploader_spec.rb @@ -11,4 +11,26 @@ describe AttachmentUploader do store_dir: %r[uploads/-/system/note/attachment/], upload_path: %r[uploads/-/system/note/attachment/], absolute_path: %r[#{CarrierWave.root}/uploads/-/system/note/attachment/] + + context "object_store is REMOTE" do + before do + stub_uploads_object_storage + end + + include_context 'with storage', described_class::Store::REMOTE + + it_behaves_like 'builds correct paths', + store_dir: %r[note/attachment/], + upload_path: %r[note/attachment/] + end + + describe "#migrate!" do + before do + uploader.store!(fixture_file_upload(Rails.root.join('spec/fixtures/doc_sample.txt'))) + stub_uploads_object_storage + end + + it_behaves_like "migrates", to_store: described_class::Store::REMOTE + it_behaves_like "migrates", from_store: described_class::Store::REMOTE, to_store: described_class::Store::LOCAL + end end diff --git a/spec/uploaders/avatar_uploader_spec.rb b/spec/uploaders/avatar_uploader_spec.rb index bf9028c9260..b0468bc35ff 100644 --- a/spec/uploaders/avatar_uploader_spec.rb +++ b/spec/uploaders/avatar_uploader_spec.rb @@ -1,7 +1,7 @@ require 'spec_helper' describe AvatarUploader do - let(:model) { create(:user, :with_avatar) } + let(:model) { build_stubbed(:user) } let(:uploader) { described_class.new(model, :avatar) } let(:upload) { create(:upload, model: model) } @@ -12,15 +12,28 @@ describe AvatarUploader do upload_path: %r[uploads/-/system/user/avatar/], absolute_path: %r[#{CarrierWave.root}/uploads/-/system/user/avatar/] - describe '#move_to_cache' do - it 'is false' do - expect(uploader.move_to_cache).to eq(false) + context "object_store is REMOTE" do + before do + stub_uploads_object_storage end + + include_context 'with storage', described_class::Store::REMOTE + + it_behaves_like 'builds correct paths', + store_dir: %r[user/avatar/], + upload_path: %r[user/avatar/] end - describe '#move_to_store' do - it 'is false' do - expect(uploader.move_to_store).to eq(false) + context "with a file" do + let(:project) { create(:project, :with_avatar) } + let(:uploader) { project.avatar } + let(:upload) { uploader.upload } + + before do + stub_uploads_object_storage end + + it_behaves_like "migrates", to_store: described_class::Store::REMOTE + it_behaves_like "migrates", from_store: described_class::Store::REMOTE, to_store: described_class::Store::LOCAL end end diff --git a/spec/uploaders/file_mover_spec.rb b/spec/uploaders/file_mover_spec.rb index bc024cd307c..68b7e24776d 100644 --- a/spec/uploaders/file_mover_spec.rb +++ b/spec/uploaders/file_mover_spec.rb @@ -36,6 +36,12 @@ describe FileMover do it 'creates a new update record' do expect { subject }.to change { Upload.count }.by(1) end + + it 'schedules a background migration' do + expect_any_instance_of(PersonalFileUploader).to receive(:schedule_background_upload).once + + subject + end end context 'when update_markdown fails' do diff --git a/spec/uploaders/file_uploader_spec.rb b/spec/uploaders/file_uploader_spec.rb index b42ce982b27..db2810bbe1d 100644 --- a/spec/uploaders/file_uploader_spec.rb +++ b/spec/uploaders/file_uploader_spec.rb @@ -11,32 +11,41 @@ describe FileUploader do shared_examples 'builds correct legacy storage paths' do include_examples 'builds correct paths', store_dir: %r{awesome/project/\h+}, + upload_path: %r{\h+/<filename>}, absolute_path: %r{#{described_class.root}/awesome/project/secret/foo.jpg} end - shared_examples 'uses hashed storage' do - context 'when rolled out attachments' do - let(:project) { build_stubbed(:project, namespace: group, name: 'project') } + context 'legacy storage' do + it_behaves_like 'builds correct legacy storage paths' - before do - allow(project).to receive(:disk_path).and_return('ca/fe/fe/ed') - end + context 'uses hashed storage' do + context 'when rolled out attachments' do + let(:project) { build_stubbed(:project, namespace: group, name: 'project') } - it_behaves_like 'builds correct paths', - store_dir: %r{ca/fe/fe/ed/\h+}, - absolute_path: %r{#{described_class.root}/ca/fe/fe/ed/secret/foo.jpg} - end + include_examples 'builds correct paths', + store_dir: %r{@hashed/\h{2}/\h{2}/\h+}, + upload_path: %r{\h+/<filename>} + end - context 'when only repositories are rolled out' do - let(:project) { build_stubbed(:project, namespace: group, name: 'project', storage_version: Project::HASHED_STORAGE_FEATURES[:repository]) } + context 'when only repositories are rolled out' do + let(:project) { build_stubbed(:project, namespace: group, name: 'project', storage_version: Project::HASHED_STORAGE_FEATURES[:repository]) } - it_behaves_like 'builds correct legacy storage paths' + it_behaves_like 'builds correct legacy storage paths' + end end end - context 'legacy storage' do - it_behaves_like 'builds correct legacy storage paths' - include_examples 'uses hashed storage' + context 'object store is remote' do + before do + stub_uploads_object_storage + end + + include_context 'with storage', described_class::Store::REMOTE + + # always use hashed storage path for remote uploads + it_behaves_like 'builds correct paths', + store_dir: %r{@hashed/\h{2}/\h{2}/\h+}, + upload_path: %r{@hashed/\h{2}/\h{2}/\h+/\h+/<filename>} end describe 'initialize' do @@ -78,6 +87,16 @@ describe FileUploader do end end + describe "#migrate!" do + before do + uploader.store!(fixture_file_upload(Rails.root.join('spec/fixtures/dk.png'))) + stub_uploads_object_storage + end + + it_behaves_like "migrates", to_store: described_class::Store::REMOTE + it_behaves_like "migrates", from_store: described_class::Store::REMOTE, to_store: described_class::Store::LOCAL + end + describe '#upload=' do let(:secret) { SecureRandom.hex } let(:upload) { create(:upload, :issuable_upload, secret: secret, filename: 'file.txt') } @@ -93,15 +112,5 @@ describe FileUploader do uploader.upload = upload end - - context 'uploader_context is empty' do - it 'fallbacks to regex based extraction' do - expect(upload).to receive(:uploader_context).and_return({}) - - uploader.upload = upload - expect(uploader.secret).to eq(secret) - expect(uploader.instance_variable_get(:@identifier)).to eq('file.txt') - end - end end end diff --git a/spec/uploaders/gitlab_uploader_spec.rb b/spec/uploaders/gitlab_uploader_spec.rb index 60e35dcf235..4fba122cce1 100644 --- a/spec/uploaders/gitlab_uploader_spec.rb +++ b/spec/uploaders/gitlab_uploader_spec.rb @@ -27,7 +27,7 @@ describe GitlabUploader do describe '#file_cache_storage?' do context 'when file storage is used' do before do - uploader_class.cache_storage(:file) + expect(uploader_class).to receive(:cache_storage) { CarrierWave::Storage::File } end it { is_expected.to be_file_cache_storage } @@ -35,7 +35,7 @@ describe GitlabUploader do context 'when is remote storage' do before do - uploader_class.cache_storage(:fog) + expect(uploader_class).to receive(:cache_storage) { CarrierWave::Storage::Fog } end it { is_expected.not_to be_file_cache_storage } diff --git a/spec/uploaders/job_artifact_uploader_spec.rb b/spec/uploaders/job_artifact_uploader_spec.rb index 5612ec7e661..42036d67f3d 100644 --- a/spec/uploaders/job_artifact_uploader_spec.rb +++ b/spec/uploaders/job_artifact_uploader_spec.rb @@ -1,7 +1,8 @@ require 'spec_helper' describe JobArtifactUploader do - let(:job_artifact) { create(:ci_job_artifact) } + let(:store) { described_class::Store::LOCAL } + let(:job_artifact) { create(:ci_job_artifact, file_store: store) } let(:uploader) { described_class.new(job_artifact, :file) } subject { uploader } @@ -11,6 +12,17 @@ describe JobArtifactUploader do cache_dir: %r[artifacts/tmp/cache], work_dir: %r[artifacts/tmp/work] + context "object store is REMOTE" do + before do + stub_artifacts_object_storage + end + + include_context 'with storage', described_class::Store::REMOTE + + it_behaves_like "builds correct paths", + store_dir: %r[\h{2}/\h{2}/\h{64}/\d{4}_\d{1,2}_\d{1,2}/\d+/\d+\z] + end + describe '#open' do subject { uploader.open } @@ -36,6 +48,17 @@ describe JobArtifactUploader do end end end + + context 'when trace is stored in Object storage' do + before do + allow(uploader).to receive(:file_storage?) { false } + allow(uploader).to receive(:url) { 'http://object_storage.com/trace' } + end + + it 'returns http io stream' do + is_expected.to be_a(Gitlab::Ci::Trace::HttpIO) + end + end end context 'file is stored in valid local_path' do @@ -55,4 +78,14 @@ describe JobArtifactUploader do it { is_expected.to include("/#{job_artifact.job_id}/#{job_artifact.id}/") } it { is_expected.to end_with("ci_build_artifacts.zip") } end + + describe "#migrate!" do + before do + uploader.store!(fixture_file_upload(Rails.root.join('spec/fixtures/trace/sample_trace'))) + stub_artifacts_object_storage + end + + it_behaves_like "migrates", to_store: described_class::Store::REMOTE + it_behaves_like "migrates", from_store: described_class::Store::REMOTE, to_store: described_class::Store::LOCAL + end end diff --git a/spec/uploaders/legacy_artifact_uploader_spec.rb b/spec/uploaders/legacy_artifact_uploader_spec.rb index 54c6a8b869b..eeb6fd90c9d 100644 --- a/spec/uploaders/legacy_artifact_uploader_spec.rb +++ b/spec/uploaders/legacy_artifact_uploader_spec.rb @@ -1,7 +1,8 @@ require 'rails_helper' describe LegacyArtifactUploader do - let(:job) { create(:ci_build) } + let(:store) { described_class::Store::LOCAL } + let(:job) { create(:ci_build, artifacts_file_store: store) } let(:uploader) { described_class.new(job, :legacy_artifacts_file) } let(:local_path) { described_class.root } @@ -20,6 +21,17 @@ describe LegacyArtifactUploader do cache_dir: %r[artifacts/tmp/cache], work_dir: %r[artifacts/tmp/work] + context 'object store is remote' do + before do + stub_artifacts_object_storage + end + + include_context 'with storage', described_class::Store::REMOTE + + it_behaves_like "builds correct paths", + store_dir: %r[\d{4}_\d{1,2}/\d+/\d+\z] + end + describe '#filename' do # we need to use uploader, as this makes to use mounter # which initialises uploader.file object diff --git a/spec/uploaders/lfs_object_uploader_spec.rb b/spec/uploaders/lfs_object_uploader_spec.rb index 6ebc885daa8..a2fb3886610 100644 --- a/spec/uploaders/lfs_object_uploader_spec.rb +++ b/spec/uploaders/lfs_object_uploader_spec.rb @@ -11,4 +11,62 @@ describe LfsObjectUploader do store_dir: %r[\h{2}/\h{2}], cache_dir: %r[/lfs-objects/tmp/cache], work_dir: %r[/lfs-objects/tmp/work] + + context "object store is REMOTE" do + before do + stub_lfs_object_storage + end + + include_context 'with storage', described_class::Store::REMOTE + + it_behaves_like "builds correct paths", + store_dir: %r[\h{2}/\h{2}] + end + + describe 'migration to object storage' do + context 'with object storage disabled' do + it "is skipped" do + expect(ObjectStorage::BackgroundMoveWorker).not_to receive(:perform_async) + + lfs_object + end + end + + context 'with object storage enabled' do + before do + stub_lfs_object_storage(background_upload: true) + end + + it 'is scheduled to run after creation' do + expect(ObjectStorage::BackgroundMoveWorker).to receive(:perform_async).with(described_class.name, 'LfsObject', :file, kind_of(Numeric)) + + lfs_object + end + end + end + + describe 'remote file' do + let(:remote) { described_class::Store::REMOTE } + let(:lfs_object) { create(:lfs_object, file_store: remote) } + + context 'with object storage enabled' do + before do + stub_lfs_object_storage + end + + it 'can store file remotely' do + allow(ObjectStorage::BackgroundMoveWorker).to receive(:perform_async) + + store_file(lfs_object) + + expect(lfs_object.file_store).to eq remote + expect(lfs_object.file.path).not_to be_blank + end + end + end + + def store_file(lfs_object) + lfs_object.file = fixture_file_upload(Rails.root.join("spec/fixtures/dk.png"), "`/png") + lfs_object.save! + end end diff --git a/spec/uploaders/namespace_file_uploader_spec.rb b/spec/uploaders/namespace_file_uploader_spec.rb index 24a2fc0f72e..a8ba01d70b8 100644 --- a/spec/uploaders/namespace_file_uploader_spec.rb +++ b/spec/uploaders/namespace_file_uploader_spec.rb @@ -13,4 +13,26 @@ describe NamespaceFileUploader do store_dir: %r[uploads/-/system/namespace/\d+], upload_path: IDENTIFIER, absolute_path: %r[#{CarrierWave.root}/uploads/-/system/namespace/\d+/#{IDENTIFIER}] + + context "object_store is REMOTE" do + before do + stub_uploads_object_storage + end + + include_context 'with storage', described_class::Store::REMOTE + + it_behaves_like 'builds correct paths', + store_dir: %r[namespace/\d+/\h+], + upload_path: IDENTIFIER + end + + describe "#migrate!" do + before do + uploader.store!(fixture_file_upload(Rails.root.join('spec/fixtures/doc_sample.txt'))) + stub_uploads_object_storage + end + + it_behaves_like "migrates", to_store: described_class::Store::REMOTE + it_behaves_like "migrates", from_store: described_class::Store::REMOTE, to_store: described_class::Store::LOCAL + end end diff --git a/spec/uploaders/object_storage_spec.rb b/spec/uploaders/object_storage_spec.rb new file mode 100644 index 00000000000..e7277b337f6 --- /dev/null +++ b/spec/uploaders/object_storage_spec.rb @@ -0,0 +1,660 @@ +require 'rails_helper' +require 'carrierwave/storage/fog' + +class Implementation < GitlabUploader + include ObjectStorage::Concern + include ::RecordsUploads::Concern + prepend ::ObjectStorage::Extension::RecordsUploads + + storage_options Gitlab.config.uploads + + private + + # user/:id + def dynamic_segment + File.join(model.class.to_s.underscore, model.id.to_s) + end +end + +describe ObjectStorage do + let(:uploader_class) { Implementation } + let(:object) { build_stubbed(:user) } + let(:uploader) { uploader_class.new(object, :file) } + + describe '#object_store=' do + before do + allow(uploader_class).to receive(:object_store_enabled?).and_return(true) + end + + it "reload the local storage" do + uploader.object_store = described_class::Store::LOCAL + expect(uploader.file_storage?).to be_truthy + end + + it "reload the REMOTE storage" do + uploader.object_store = described_class::Store::REMOTE + expect(uploader.file_storage?).to be_falsey + end + + context 'object_store is Store::LOCAL' do + before do + uploader.object_store = described_class::Store::LOCAL + end + + describe '#store_dir' do + it 'is the composition of (base_dir, dynamic_segment)' do + expect(uploader.store_dir).to start_with("uploads/-/system/user/") + end + end + end + + context 'object_store is Store::REMOTE' do + before do + uploader.object_store = described_class::Store::REMOTE + end + + describe '#store_dir' do + it 'is the composition of (dynamic_segment)' do + expect(uploader.store_dir).to start_with("user/") + end + end + end + end + + describe '#object_store' do + subject { uploader.object_store } + + it "delegates to <mount>_store on model" do + expect(object).to receive(:file_store) + + subject + end + + context 'when store is null' do + before do + expect(object).to receive(:file_store).and_return(nil) + end + + it "uses Store::LOCAL" do + is_expected.to eq(described_class::Store::LOCAL) + end + end + + context 'when value is set' do + before do + expect(object).to receive(:file_store).and_return(described_class::Store::REMOTE) + end + + it "returns the given value" do + is_expected.to eq(described_class::Store::REMOTE) + end + end + end + + describe '#file_cache_storage?' do + context 'when file storage is used' do + before do + expect(uploader_class).to receive(:cache_storage) { CarrierWave::Storage::File } + end + + it { expect(uploader).to be_file_cache_storage } + end + + context 'when is remote storage' do + before do + expect(uploader_class).to receive(:cache_storage) { CarrierWave::Storage::Fog } + end + + it { expect(uploader).not_to be_file_cache_storage } + end + end + + # this means the model shall include + # include RecordsUpload::Concern + # prepend ObjectStorage::Extension::RecordsUploads + # the object_store persistence is delegated to the `Upload` model. + # + context 'when persist_object_store? is false' do + let(:object) { create(:project, :with_avatar) } + let(:uploader) { object.avatar } + + it { expect(object).to be_a(Avatarable) } + it { expect(uploader.persist_object_store?).to be_falsey } + + describe 'delegates the object_store logic to the `Upload` model' do + it 'sets @upload to the found `upload`' do + expect(uploader.upload).to eq(uploader.upload) + end + + it 'sets @object_store to the `Upload` value' do + expect(uploader.object_store).to eq(uploader.upload.store) + end + end + + describe '#migrate!' do + let(:new_store) { ObjectStorage::Store::REMOTE } + + before do + stub_uploads_object_storage(uploader: AvatarUploader) + end + + subject { uploader.migrate!(new_store) } + + it 'persist @object_store to the recorded upload' do + subject + + expect(uploader.upload.store).to eq(new_store) + end + + describe 'fails' do + it 'is handled gracefully' do + store = uploader.object_store + expect_any_instance_of(Upload).to receive(:save!).and_raise("An error") + + expect { subject }.to raise_error("An error") + expect(uploader.exists?).to be_truthy + expect(uploader.upload.store).to eq(store) + end + end + end + end + + # this means the model holds an <mounted_as>_store attribute directly + # and do not delegate the object_store persistence to the `Upload` model. + # + context 'persist_object_store? is true' do + context 'when using JobArtifactsUploader' do + let(:store) { described_class::Store::LOCAL } + let(:object) { create(:ci_job_artifact, :archive, file_store: store) } + let(:uploader) { object.file } + + context 'checking described_class' do + it "uploader include described_class::Concern" do + expect(uploader).to be_a(described_class::Concern) + end + end + + describe '#use_file' do + context 'when file is stored locally' do + it "calls a regular path" do + expect { |b| uploader.use_file(&b) }.not_to yield_with_args(%r[tmp/cache]) + end + end + + context 'when file is stored remotely' do + let(:store) { described_class::Store::REMOTE } + + before do + stub_artifacts_object_storage + end + + it "calls a cache path" do + expect { |b| uploader.use_file(&b) }.to yield_with_args(%r[tmp/cache]) + end + end + end + + describe '#migrate!' do + subject { uploader.migrate!(new_store) } + + shared_examples "updates the underlying <mounted>_store" do + it do + subject + + expect(object.file_store).to eq(new_store) + end + end + + context 'when using the same storage' do + let(:new_store) { store } + + it "to not migrate the storage" do + subject + + expect(uploader).not_to receive(:store!) + expect(uploader.object_store).to eq(store) + end + end + + context 'when migrating to local storage' do + let(:store) { described_class::Store::REMOTE } + let(:new_store) { described_class::Store::LOCAL } + + before do + stub_artifacts_object_storage + end + + include_examples "updates the underlying <mounted>_store" + + it "local file does not exist" do + expect(File.exist?(uploader.path)).to eq(false) + end + + it "remote file exist" do + expect(uploader.file.exists?).to be_truthy + end + + it "does migrate the file" do + subject + + expect(uploader.object_store).to eq(new_store) + expect(File.exist?(uploader.path)).to eq(true) + end + end + + context 'when migrating to remote storage' do + let(:new_store) { described_class::Store::REMOTE } + let!(:current_path) { uploader.path } + + it "file does exist" do + expect(File.exist?(current_path)).to eq(true) + end + + context 'when storage is disabled' do + before do + stub_artifacts_object_storage(enabled: false) + end + + it "to raise an error" do + expect { subject }.to raise_error(/Object Storage is not enabled/) + end + end + + context 'when credentials are set' do + before do + stub_artifacts_object_storage + end + + include_examples "updates the underlying <mounted>_store" + + it "does migrate the file" do + subject + + expect(uploader.object_store).to eq(new_store) + end + + it "does delete original file" do + subject + + expect(File.exist?(current_path)).to eq(false) + end + + context 'when subject save fails' do + before do + expect(uploader).to receive(:persist_object_store!).and_raise(RuntimeError, "exception") + end + + it "original file is not removed" do + expect { subject }.to raise_error(/exception/) + + expect(File.exist?(current_path)).to eq(true) + end + end + end + end + end + end + end + + describe '#fog_directory' do + let(:remote_directory) { 'directory' } + + before do + allow(uploader_class).to receive(:options) do + double(object_store: double(remote_directory: remote_directory)) + end + end + + subject { uploader.fog_directory } + + it { is_expected.to eq(remote_directory) } + end + + context 'when file is in use' do + def when_file_is_in_use + uploader.use_file do + yield + end + end + + it 'cannot migrate' do + when_file_is_in_use do + expect(uploader).not_to receive(:unsafe_migrate!) + + expect { uploader.migrate!(described_class::Store::REMOTE) }.to raise_error('exclusive lease already taken') + end + end + + it 'cannot use_file' do + when_file_is_in_use do + expect(uploader).not_to receive(:unsafe_use_file) + + expect { uploader.use_file }.to raise_error('exclusive lease already taken') + end + end + end + + describe '#fog_credentials' do + let(:connection) { Settingslogic.new("provider" => "AWS") } + + before do + allow(uploader_class).to receive(:options) do + double(object_store: double(connection: connection)) + end + end + + subject { uploader.fog_credentials } + + it { is_expected.to eq(provider: 'AWS') } + end + + describe '#fog_public' do + subject { uploader.fog_public } + + it { is_expected.to eq(false) } + end + + describe '.workhorse_authorize' do + subject { uploader_class.workhorse_authorize } + + before do + # ensure that we use regular Fog libraries + # other tests might call `Fog.mock!` and + # it will make tests to fail + Fog.unmock! + end + + shared_examples 'uses local storage' do + it "returns temporary path" do + is_expected.to have_key(:TempPath) + + expect(subject[:TempPath]).to start_with(uploader_class.root) + expect(subject[:TempPath]).to include(described_class::TMP_UPLOAD_PATH) + end + + it "does not return remote store" do + is_expected.not_to have_key('RemoteObject') + end + end + + shared_examples 'uses remote storage' do + it "returns remote store" do + is_expected.to have_key(:RemoteObject) + + expect(subject[:RemoteObject]).to have_key(:ID) + expect(subject[:RemoteObject]).to have_key(:GetURL) + expect(subject[:RemoteObject]).to have_key(:DeleteURL) + expect(subject[:RemoteObject]).to have_key(:StoreURL) + expect(subject[:RemoteObject][:GetURL]).to include(described_class::TMP_UPLOAD_PATH) + expect(subject[:RemoteObject][:DeleteURL]).to include(described_class::TMP_UPLOAD_PATH) + expect(subject[:RemoteObject][:StoreURL]).to include(described_class::TMP_UPLOAD_PATH) + end + + it "does not return local store" do + is_expected.not_to have_key('TempPath') + end + end + + context 'when object storage is disabled' do + before do + allow(Gitlab.config.uploads.object_store).to receive(:enabled) { false } + end + + it_behaves_like 'uses local storage' + end + + context 'when object storage is enabled' do + before do + allow(Gitlab.config.uploads.object_store).to receive(:enabled) { true } + end + + context 'when direct upload is enabled' do + before do + allow(Gitlab.config.uploads.object_store).to receive(:direct_upload) { true } + end + + context 'uses AWS' do + before do + expect(uploader_class).to receive(:object_store_credentials) do + { provider: "AWS", + aws_access_key_id: "AWS_ACCESS_KEY_ID", + aws_secret_access_key: "AWS_SECRET_ACCESS_KEY", + region: "eu-central-1" } + end + end + + it_behaves_like 'uses remote storage' do + let(:storage_url) { "https://uploads.s3-eu-central-1.amazonaws.com/" } + + it 'returns links for S3' do + expect(subject[:RemoteObject][:GetURL]).to start_with(storage_url) + expect(subject[:RemoteObject][:DeleteURL]).to start_with(storage_url) + expect(subject[:RemoteObject][:StoreURL]).to start_with(storage_url) + end + end + end + + context 'uses Google' do + before do + expect(uploader_class).to receive(:object_store_credentials) do + { provider: "Google", + google_storage_access_key_id: 'ACCESS_KEY_ID', + google_storage_secret_access_key: 'SECRET_ACCESS_KEY' } + end + end + + it_behaves_like 'uses remote storage' do + let(:storage_url) { "https://storage.googleapis.com/uploads/" } + + it 'returns links for Google Cloud' do + expect(subject[:RemoteObject][:GetURL]).to start_with(storage_url) + expect(subject[:RemoteObject][:DeleteURL]).to start_with(storage_url) + expect(subject[:RemoteObject][:StoreURL]).to start_with(storage_url) + end + end + end + + context 'uses GDK/minio' do + before do + expect(uploader_class).to receive(:object_store_credentials) do + { provider: "AWS", + aws_access_key_id: "AWS_ACCESS_KEY_ID", + aws_secret_access_key: "AWS_SECRET_ACCESS_KEY", + endpoint: 'http://127.0.0.1:9000', + path_style: true, + region: "gdk" } + end + end + + it_behaves_like 'uses remote storage' do + let(:storage_url) { "http://127.0.0.1:9000/uploads/" } + + it 'returns links for S3' do + expect(subject[:RemoteObject][:GetURL]).to start_with(storage_url) + expect(subject[:RemoteObject][:DeleteURL]).to start_with(storage_url) + expect(subject[:RemoteObject][:StoreURL]).to start_with(storage_url) + end + end + end + end + + context 'when direct upload is disabled' do + before do + allow(Gitlab.config.uploads.object_store).to receive(:direct_upload) { false } + end + + it_behaves_like 'uses local storage' + end + end + end + + describe '#cache!' do + subject do + uploader.cache!(uploaded_file) + end + + context 'when local file is used' do + context 'when valid file is used' do + let(:uploaded_file) do + fixture_file_upload(Rails.root + 'spec/fixtures/rails_sample.jpg', 'image/jpg') + end + + it "properly caches the file" do + subject + + expect(uploader).to be_exists + expect(uploader.path).to start_with(uploader_class.root) + expect(uploader.filename).to eq('rails_sample.jpg') + end + end + end + + context 'when local file is used' do + let(:temp_file) { Tempfile.new("test") } + + before do + FileUtils.touch(temp_file) + end + + after do + FileUtils.rm_f(temp_file) + end + + context 'when valid file is used' do + context 'when valid file is specified' do + let(:uploaded_file) { temp_file } + + context 'when object storage and direct upload is specified' do + before do + stub_uploads_object_storage(uploader_class, enabled: true, direct_upload: true) + end + + context 'when file is stored' do + subject do + uploader.store!(uploaded_file) + end + + it 'file to be remotely stored in permament location' do + subject + + expect(uploader).to be_exists + expect(uploader).not_to be_cached + expect(uploader).not_to be_file_storage + expect(uploader.path).not_to be_nil + expect(uploader.path).not_to include('tmp/upload') + expect(uploader.path).not_to include('tmp/cache') + expect(uploader.object_store).to eq(described_class::Store::REMOTE) + end + end + end + + context 'when object storage and direct upload is not used' do + before do + stub_uploads_object_storage(uploader_class, enabled: true, direct_upload: false) + end + + context 'when file is stored' do + subject do + uploader.store!(uploaded_file) + end + + it 'file to be remotely stored in permament location' do + subject + + expect(uploader).to be_exists + expect(uploader).not_to be_cached + expect(uploader).to be_file_storage + expect(uploader.path).not_to be_nil + expect(uploader.path).not_to include('tmp/upload') + expect(uploader.path).not_to include('tmp/cache') + expect(uploader.object_store).to eq(described_class::Store::LOCAL) + end + end + end + end + end + end + + context 'when remote file is used' do + let(:temp_file) { Tempfile.new("test") } + + let!(:fog_connection) do + stub_uploads_object_storage(uploader_class) + end + + before do + FileUtils.touch(temp_file) + end + + after do + FileUtils.rm_f(temp_file) + end + + context 'when valid file is used' do + context 'when invalid file is specified' do + let(:uploaded_file) do + UploadedFile.new(temp_file.path, remote_id: "../test/123123") + end + + it 'raises an error' do + expect { subject }.to raise_error(uploader_class::RemoteStoreError, /Bad file path/) + end + end + + context 'when non existing file is specified' do + let(:uploaded_file) do + UploadedFile.new(temp_file.path, remote_id: "test/123123") + end + + it 'raises an error' do + expect { subject }.to raise_error(uploader_class::RemoteStoreError, /Missing file/) + end + end + + context 'when valid file is specified' do + let(:uploaded_file) do + UploadedFile.new(temp_file.path, filename: "my_file.txt", remote_id: "test/123123") + end + + let!(:fog_file) do + fog_connection.directories.get('uploads').files.create( + key: 'tmp/upload/test/123123', + body: 'content' + ) + end + + it 'file to be cached and remote stored' do + expect { subject }.not_to raise_error + + expect(uploader).to be_exists + expect(uploader).to be_cached + expect(uploader).not_to be_file_storage + expect(uploader.path).not_to be_nil + expect(uploader.path).not_to include('tmp/cache') + expect(uploader.path).not_to include('tmp/cache') + expect(uploader.object_store).to eq(described_class::Store::REMOTE) + end + + context 'when file is stored' do + subject do + uploader.store!(uploaded_file) + end + + it 'file to be remotely stored in permament location' do + subject + + expect(uploader).to be_exists + expect(uploader).not_to be_cached + expect(uploader).not_to be_file_storage + expect(uploader.path).not_to be_nil + expect(uploader.path).not_to include('tmp/upload') + expect(uploader.path).not_to include('tmp/cache') + expect(uploader.url).to include('/my_file.txt') + expect(uploader.object_store).to eq(described_class::Store::REMOTE) + end + end + end + end + end + end +end diff --git a/spec/uploaders/personal_file_uploader_spec.rb b/spec/uploaders/personal_file_uploader_spec.rb index ed1fba6edda..c70521d90dc 100644 --- a/spec/uploaders/personal_file_uploader_spec.rb +++ b/spec/uploaders/personal_file_uploader_spec.rb @@ -14,6 +14,18 @@ describe PersonalFileUploader do upload_path: IDENTIFIER, absolute_path: %r[#{CarrierWave.root}/uploads/-/system/personal_snippet/\d+/#{IDENTIFIER}] + context "object_store is REMOTE" do + before do + stub_uploads_object_storage + end + + include_context 'with storage', described_class::Store::REMOTE + + it_behaves_like 'builds correct paths', + store_dir: %r[\d+/\h+], + upload_path: IDENTIFIER + end + describe '#to_h' do before do subject.instance_variable_set(:@secret, 'secret') @@ -30,4 +42,14 @@ describe PersonalFileUploader do ) end end + + describe "#migrate!" do + before do + uploader.store!(fixture_file_upload(Rails.root.join('spec/fixtures/doc_sample.txt'))) + stub_uploads_object_storage + end + + it_behaves_like "migrates", to_store: described_class::Store::REMOTE + it_behaves_like "migrates", from_store: described_class::Store::REMOTE, to_store: described_class::Store::LOCAL + end end diff --git a/spec/uploaders/workers/object_storage/background_move_worker_spec.rb b/spec/uploaders/workers/object_storage/background_move_worker_spec.rb new file mode 100644 index 00000000000..b34f427fd8a --- /dev/null +++ b/spec/uploaders/workers/object_storage/background_move_worker_spec.rb @@ -0,0 +1,146 @@ +require 'spec_helper' + +describe ObjectStorage::BackgroundMoveWorker do + let(:local) { ObjectStorage::Store::LOCAL } + let(:remote) { ObjectStorage::Store::REMOTE } + + def perform + described_class.perform_async(uploader_class.name, subject_class, file_field, subject_id) + end + + context 'for LFS' do + let!(:lfs_object) { create(:lfs_object, :with_file, file_store: local) } + let(:uploader_class) { LfsObjectUploader } + let(:subject_class) { LfsObject } + let(:file_field) { :file } + let(:subject_id) { lfs_object.id } + + context 'when object storage is enabled' do + before do + stub_lfs_object_storage(background_upload: true) + end + + it 'uploads object to storage' do + expect { perform }.to change { lfs_object.reload.file_store }.from(local).to(remote) + end + + context 'when background upload is disabled' do + before do + allow(Gitlab.config.lfs.object_store).to receive(:background_upload) { false } + end + + it 'is skipped' do + expect { perform }.not_to change { lfs_object.reload.file_store } + end + end + end + + context 'when object storage is disabled' do + before do + stub_lfs_object_storage(enabled: false) + end + + it "doesn't migrate files" do + perform + + expect(lfs_object.reload.file_store).to eq(local) + end + end + end + + context 'for legacy artifacts' do + let(:build) { create(:ci_build, :legacy_artifacts) } + let(:uploader_class) { LegacyArtifactUploader } + let(:subject_class) { Ci::Build } + let(:file_field) { :artifacts_file } + let(:subject_id) { build.id } + + context 'when local storage is used' do + let(:store) { local } + + context 'and remote storage is defined' do + before do + stub_artifacts_object_storage(background_upload: true) + end + + it "migrates file to remote storage" do + perform + + expect(build.reload.artifacts_file_store).to eq(remote) + end + + context 'for artifacts_metadata' do + let(:file_field) { :artifacts_metadata } + + it 'migrates metadata to remote storage' do + perform + + expect(build.reload.artifacts_metadata_store).to eq(remote) + end + end + end + end + end + + context 'for job artifacts' do + let(:artifact) { create(:ci_job_artifact, :archive) } + let(:uploader_class) { JobArtifactUploader } + let(:subject_class) { Ci::JobArtifact } + let(:file_field) { :file } + let(:subject_id) { artifact.id } + + context 'when local storage is used' do + let(:store) { local } + + context 'and remote storage is defined' do + before do + stub_artifacts_object_storage(background_upload: true) + end + + it "migrates file to remote storage" do + perform + + expect(artifact.reload.file_store).to eq(remote) + end + end + end + end + + context 'for uploads' do + let!(:project) { create(:project, :with_avatar) } + let(:uploader_class) { AvatarUploader } + let(:file_field) { :avatar } + + context 'when local storage is used' do + let(:store) { local } + + context 'and remote storage is defined' do + before do + stub_uploads_object_storage(uploader_class, background_upload: true) + end + + describe 'supports using the model' do + let(:subject_class) { project.class } + let(:subject_id) { project.id } + + it "migrates file to remote storage" do + perform + + expect(project.reload.avatar.file_storage?).to be_falsey + end + end + + describe 'supports using the Upload' do + let(:subject_class) { Upload } + let(:subject_id) { project.avatar.upload.id } + + it "migrates file to remote storage" do + perform + + expect(project.reload.avatar.file_storage?).to be_falsey + end + end + end + end + end +end diff --git a/spec/uploaders/workers/object_storage/migrate_uploads_worker_spec.rb b/spec/uploaders/workers/object_storage/migrate_uploads_worker_spec.rb new file mode 100644 index 00000000000..7a7dcb71680 --- /dev/null +++ b/spec/uploaders/workers/object_storage/migrate_uploads_worker_spec.rb @@ -0,0 +1,119 @@ +require 'spec_helper' + +describe ObjectStorage::MigrateUploadsWorker, :sidekiq do + shared_context 'sanity_check! fails' do + before do + expect(described_class).to receive(:sanity_check!).and_raise(described_class::SanityCheckError) + end + end + + let!(:projects) { create_list(:project, 10, :with_avatar) } + let(:uploads) { Upload.all } + let(:model_class) { Project } + let(:mounted_as) { :avatar } + let(:to_store) { ObjectStorage::Store::REMOTE } + + before do + stub_uploads_object_storage(AvatarUploader) + end + + describe '.enqueue!' do + def enqueue! + described_class.enqueue!(uploads, Project, mounted_as, to_store) + end + + it 'is guarded by .sanity_check!' do + expect(described_class).to receive(:perform_async) + expect(described_class).to receive(:sanity_check!) + + enqueue! + end + + context 'sanity_check! fails' do + include_context 'sanity_check! fails' + + it 'does not enqueue a job' do + expect(described_class).not_to receive(:perform_async) + + expect { enqueue! }.to raise_error(described_class::SanityCheckError) + end + end + end + + describe '.sanity_check!' do + shared_examples 'raises a SanityCheckError' do + let(:mount_point) { nil } + + it do + expect { described_class.sanity_check!(uploads, model_class, mount_point) } + .to raise_error(described_class::SanityCheckError) + end + end + + context 'uploader types mismatch' do + let!(:outlier) { create(:upload, uploader: 'FileUploader') } + + include_examples 'raises a SanityCheckError' + end + + context 'model types mismatch' do + let!(:outlier) { create(:upload, model_type: 'Potato') } + + include_examples 'raises a SanityCheckError' + end + + context 'mount point not found' do + include_examples 'raises a SanityCheckError' do + let(:mount_point) { :potato } + end + end + end + + describe '#perform' do + def perform + described_class.new.perform(uploads.ids, model_class.to_s, mounted_as, to_store) + rescue ObjectStorage::MigrateUploadsWorker::Report::MigrationFailures + # swallow + end + + shared_examples 'outputs correctly' do |success: 0, failures: 0| + total = success + failures + + if success > 0 + it 'outputs the reports' do + expect(Rails.logger).to receive(:info).with(%r{Migrated #{success}/#{total} files}) + + perform + end + end + + if failures > 0 + it 'outputs upload failures' do + expect(Rails.logger).to receive(:warn).with(/Error .* I am a teapot/) + + perform + end + end + end + + it_behaves_like 'outputs correctly', success: 10 + + it 'migrates files' do + perform + + aggregate_failures do + projects.each do |project| + expect(project.reload.avatar.upload.local?).to be_falsey + end + end + end + + context 'migration is unsuccessful' do + before do + allow_any_instance_of(ObjectStorage::Concern).to receive(:migrate!).and_raise(CarrierWave::UploadError, "I am a teapot.") + end + + it_behaves_like 'outputs correctly', failures: 10 + end + end +end diff --git a/spec/views/admin/dashboard/index.html.haml_spec.rb b/spec/views/admin/dashboard/index.html.haml_spec.rb index b4359d819a0..099baacf019 100644 --- a/spec/views/admin/dashboard/index.html.haml_spec.rb +++ b/spec/views/admin/dashboard/index.html.haml_spec.rb @@ -18,4 +18,10 @@ describe 'admin/dashboard/index.html.haml' do expect(rendered).to have_content 'GitLab Workhorse' expect(rendered).to have_content Gitlab::Workhorse.version end + + it "includes revision of GitLab" do + render + + expect(rendered).to have_content "#{Gitlab::VERSION} (#{Gitlab::REVISION})" + end end diff --git a/spec/views/projects/buttons/_dropdown.html.haml_spec.rb b/spec/views/projects/buttons/_dropdown.html.haml_spec.rb index d0e692635b9..8b9aab30286 100644 --- a/spec/views/projects/buttons/_dropdown.html.haml_spec.rb +++ b/spec/views/projects/buttons/_dropdown.html.haml_spec.rb @@ -8,7 +8,8 @@ describe 'projects/buttons/_dropdown' do assign(:project, project) allow(view).to receive(:current_user).and_return(user) - allow(view).to receive(:can?).and_return(true) + allow(view).to receive(:can?).with(user, :push_code, project).and_return(true) + allow(view).to receive(:can_collaborate_with_project?).and_return(true) end context 'empty repository' do diff --git a/spec/views/ci/lints/show.html.haml_spec.rb b/spec/views/projects/ci/lints/show.html.haml_spec.rb index 7724d54c569..2f0cd38c14a 100644 --- a/spec/views/ci/lints/show.html.haml_spec.rb +++ b/spec/views/projects/ci/lints/show.html.haml_spec.rb @@ -1,11 +1,13 @@ require 'spec_helper' -describe 'ci/lints/show' do +describe 'projects/ci/lints/show' do include Devise::Test::ControllerHelpers + let(:project) { create(:project, :repository) } + let(:config_processor) { Gitlab::Ci::YamlProcessor.new(YAML.dump(content)) } describe 'XSS protection' do - let(:config_processor) { Gitlab::Ci::YamlProcessor.new(YAML.dump(content)) } before do + assign(:project, project) assign(:status, true) assign(:builds, config_processor.builds) assign(:stages, config_processor.stages) @@ -47,22 +49,21 @@ describe 'ci/lints/show' do end end - let(:content) do - { - build_template: { - script: './build.sh', - tags: ['dotnet'], - only: ['test@dude/repo'], - except: ['deploy'], - environment: 'testing' + context 'when the content is valid' do + let(:content) do + { + build_template: { + script: './build.sh', + tags: ['dotnet'], + only: ['test@dude/repo'], + except: ['deploy'], + environment: 'testing' + } } - } - end - - let(:config_processor) { Gitlab::Ci::YamlProcessor.new(YAML.dump(content)) } + end - context 'when the content is valid' do before do + assign(:project, project) assign(:status, true) assign(:builds, config_processor.builds) assign(:stages, config_processor.stages) @@ -82,6 +83,7 @@ describe 'ci/lints/show' do context 'when the content is invalid' do before do + assign(:project, project) assign(:status, false) assign(:error, 'Undefined error') end diff --git a/spec/views/projects/commit/_commit_box.html.haml_spec.rb b/spec/views/projects/commit/_commit_box.html.haml_spec.rb index 448b925cf34..2fdd28a3be4 100644 --- a/spec/views/projects/commit/_commit_box.html.haml_spec.rb +++ b/spec/views/projects/commit/_commit_box.html.haml_spec.rb @@ -7,6 +7,7 @@ describe 'projects/commit/_commit_box.html.haml' do before do assign(:project, project) assign(:commit, project.commit) + allow(view).to receive(:current_user).and_return(user) allow(view).to receive(:can_collaborate_with_project?).and_return(false) end @@ -47,7 +48,8 @@ describe 'projects/commit/_commit_box.html.haml' do context 'viewing a commit' do context 'as a developer' do before do - expect(view).to receive(:can_collaborate_with_project?).and_return(true) + project.add_developer(user) + allow(view).to receive(:can_collaborate_with_project?).and_return(true) end it 'has a link to create a new tag' do @@ -58,10 +60,6 @@ describe 'projects/commit/_commit_box.html.haml' do end context 'as a non-developer' do - before do - expect(view).to receive(:can_collaborate_with_project?).and_return(false) - end - it 'does not have a link to create a new tag' do render diff --git a/spec/views/projects/diffs/_stats.html.haml_spec.rb b/spec/views/projects/diffs/_stats.html.haml_spec.rb new file mode 100644 index 00000000000..c7d2f85747c --- /dev/null +++ b/spec/views/projects/diffs/_stats.html.haml_spec.rb @@ -0,0 +1,56 @@ +require 'spec_helper' + +describe 'projects/diffs/_stats.html.haml' do + let(:project) { create(:project, :repository) } + let(:commit) { project.commit('570e7b2abdd848b95f2f578043fc23bd6f6fd24d') } + + def render_view + render partial: "projects/diffs/stats", locals: { diff_files: commit.diffs.diff_files } + end + + context 'when the commit contains several changes' do + it 'uses plural for additions' do + render_view + + expect(rendered).to have_text('additions') + end + + it 'uses plural for deletions' do + render_view + end + end + + context 'when the commit contains no addition and no deletions' do + let(:commit) { project.commit('4cd80ccab63c82b4bad16faa5193fbd2aa06df40') } + + it 'uses plural for additions' do + render_view + + expect(rendered).to have_text('additions') + end + + it 'uses plural for deletions' do + render_view + + expect(rendered).to have_text('deletions') + end + end + + context 'when the commit contains exactly one addition and one deletion' do + let(:commit) { project.commit('08f22f255f082689c0d7d39d19205085311542bc') } + + it 'uses singular for additions' do + render_view + + expect(rendered).to have_text('addition') + expect(rendered).not_to have_text('additions') + end + + it 'uses singular for deletions' do + render_view + + expect(rendered).to have_text('deletion') + expect(rendered).not_to have_text('deletions') + end + end +end diff --git a/spec/views/projects/jobs/show.html.haml_spec.rb b/spec/views/projects/jobs/show.html.haml_spec.rb index 6a67da79ec5..c93152b88e3 100644 --- a/spec/views/projects/jobs/show.html.haml_spec.rb +++ b/spec/views/projects/jobs/show.html.haml_spec.rb @@ -1,8 +1,10 @@ require 'spec_helper' describe 'projects/jobs/show' do + let(:user) { create(:user) } let(:project) { create(:project, :repository) } let(:build) { create(:ci_build, pipeline: pipeline) } + let(:builds) { project.builds.present(current_user: user) } let(:pipeline) do create(:ci_pipeline, project: project, sha: project.commit.id) @@ -11,6 +13,7 @@ describe 'projects/jobs/show' do before do assign(:build, build.present) assign(:project, project) + assign(:builds, builds) allow(view).to receive(:can?).and_return(true) end @@ -18,7 +21,7 @@ describe 'projects/jobs/show' do describe 'environment info in job view' do context 'job with latest deployment' do let(:build) do - create(:ci_build, :success, environment: 'staging') + create(:ci_build, :success, :trace_artifact, environment: 'staging') end before do @@ -37,11 +40,11 @@ describe 'projects/jobs/show' do context 'job with outdated deployment' do let(:build) do - create(:ci_build, :success, environment: 'staging', pipeline: pipeline) + create(:ci_build, :success, :trace_artifact, environment: 'staging', pipeline: pipeline) end let(:second_build) do - create(:ci_build, :success, environment: 'staging', pipeline: pipeline) + create(:ci_build, :success, :trace_artifact, environment: 'staging', pipeline: pipeline) end let(:environment) do @@ -67,7 +70,7 @@ describe 'projects/jobs/show' do context 'job failed to deploy' do let(:build) do - create(:ci_build, :failed, environment: 'staging', pipeline: pipeline) + create(:ci_build, :failed, :trace_artifact, environment: 'staging', pipeline: pipeline) end let!(:environment) do @@ -85,7 +88,7 @@ describe 'projects/jobs/show' do context 'job will deploy' do let(:build) do - create(:ci_build, :running, environment: 'staging', pipeline: pipeline) + create(:ci_build, :running, :trace_live, environment: 'staging', pipeline: pipeline) end context 'when environment exists' do @@ -133,7 +136,7 @@ describe 'projects/jobs/show' do context 'job that failed to deploy and environment has not been created' do let(:build) do - create(:ci_build, :failed, environment: 'staging', pipeline: pipeline) + create(:ci_build, :failed, :trace_artifact, environment: 'staging', pipeline: pipeline) end let!(:environment) do @@ -151,7 +154,7 @@ describe 'projects/jobs/show' do context 'job that will deploy and environment has not been created' do let(:build) do - create(:ci_build, :running, environment: 'staging', pipeline: pipeline) + create(:ci_build, :running, :trace_live, environment: 'staging', pipeline: pipeline) end let!(:environment) do @@ -171,8 +174,9 @@ describe 'projects/jobs/show' do end context 'when job is running' do + let(:build) { create(:ci_build, :trace_live, :running, pipeline: pipeline) } + before do - build.run! render end diff --git a/spec/views/projects/merge_requests/_commits.html.haml_spec.rb b/spec/views/projects/merge_requests/_commits.html.haml_spec.rb index 3ca67114558..b1c6565c08a 100644 --- a/spec/views/projects/merge_requests/_commits.html.haml_spec.rb +++ b/spec/views/projects/merge_requests/_commits.html.haml_spec.rb @@ -28,6 +28,6 @@ describe 'projects/merge_requests/_commits.html.haml' do commit = merge_request.commits.first # HEAD href = diffs_project_merge_request_path(target_project, merge_request, commit_id: commit) - expect(rendered).to have_link(Commit.truncate_sha(commit.sha), href: href) + expect(rendered).to have_link(href: href) end end diff --git a/spec/views/projects/services/_form.haml_spec.rb b/spec/views/projects/services/_form.haml_spec.rb new file mode 100644 index 00000000000..85167bca115 --- /dev/null +++ b/spec/views/projects/services/_form.haml_spec.rb @@ -0,0 +1,46 @@ +require 'spec_helper' + +describe 'projects/services/_form' do + let(:project) { create(:redmine_project) } + let(:user) { create(:admin) } + + before do + assign(:project, project) + + allow(controller).to receive(:current_user).and_return(user) + + allow(view).to receive_messages(current_user: user, + can?: true, + current_application_settings: Gitlab::CurrentSettings.current_application_settings) + end + + context 'commit_events and merge_request_events' do + before do + assign(:service, project.redmine_service) + end + + it 'display merge_request_events and commit_events descriptions' do + allow(RedmineService).to receive(:supported_events).and_return(%w(commit merge_request)) + + render + + expect(rendered).to have_content('Event will be triggered when a commit is created/updated') + expect(rendered).to have_content('Event will be triggered when a merge request is created/updated/merged') + end + + context 'when service is JIRA' do + let(:project) { create(:jira_project) } + + before do + assign(:service, project.jira_service) + end + + it 'display merge_request_events and commit_events descriptions' do + render + + expect(rendered).to have_content('JIRA comments will be created when an issue gets referenced in a commit.') + expect(rendered).to have_content('JIRA comments will be created when an issue gets referenced in a merge request.') + end + end + end +end diff --git a/spec/views/projects/pipelines_settings/_show.html.haml_spec.rb b/spec/views/projects/settings/ci_cd/_form.html.haml_spec.rb index 7b300150874..be9a4d9c57c 100644 --- a/spec/views/projects/pipelines_settings/_show.html.haml_spec.rb +++ b/spec/views/projects/settings/ci_cd/_form.html.haml_spec.rb @@ -1,6 +1,6 @@ require 'spec_helper' -describe 'projects/pipelines_settings/_show' do +describe 'projects/settings/ci_cd/_form' do let(:project) { create(:project, :repository) } before do diff --git a/spec/views/shared/milestones/_top.html.haml.rb b/spec/views/shared/milestones/_top.html.haml.rb new file mode 100644 index 00000000000..516d81c87ac --- /dev/null +++ b/spec/views/shared/milestones/_top.html.haml.rb @@ -0,0 +1,35 @@ +require 'spec_helper' + +describe 'shared/milestones/_top.html.haml' do + set(:group) { create(:group) } + let(:project) { create(:project, group: group) } + let(:milestone) { create(:milestone, project: project) } + + before do + allow(milestone).to receive(:milestones) { [] } + end + + it 'renders a deprecation message for a legacy milestone' do + allow(milestone).to receive(:legacy_group_milestone?) { true } + + render 'shared/milestones/top', milestone: milestone + + expect(rendered).to have_css('.milestone-deprecation-message') + end + + it 'renders a deprecation message for a dashboard milestone' do + allow(milestone).to receive(:dashboard_milestone?) { true } + + render 'shared/milestones/top', milestone: milestone + + expect(rendered).to have_css('.milestone-deprecation-message') + end + + it 'does not render a deprecation message for a non-legacy and non-dashboard milestone' do + assign :group, group + + render 'shared/milestones/top', milestone: milestone + + expect(rendered).not_to have_css('.milestone-deprecation-message') + end +end diff --git a/spec/workers/concerns/waitable_worker_spec.rb b/spec/workers/concerns/waitable_worker_spec.rb index 4af0de86ac9..54ab07981a4 100644 --- a/spec/workers/concerns/waitable_worker_spec.rb +++ b/spec/workers/concerns/waitable_worker_spec.rb @@ -14,6 +14,12 @@ describe WaitableWorker do include ApplicationWorker prepend WaitableWorker + # This is a workaround for a Ruby 2.3.7 bug. rspec-mocks cannot restore + # the visibility of prepended modules. See + # https://github.com/rspec/rspec-mocks/issues/1231 for more details. + def self.bulk_perform_inline(args_list) + end + def perform(i = 0) self.class.counter += i end diff --git a/spec/workers/issue_due_scheduler_worker_spec.rb b/spec/workers/issue_due_scheduler_worker_spec.rb new file mode 100644 index 00000000000..7b60835fd26 --- /dev/null +++ b/spec/workers/issue_due_scheduler_worker_spec.rb @@ -0,0 +1,22 @@ +require 'spec_helper' + +describe IssueDueSchedulerWorker do + describe '#perform' do + it 'schedules one MailScheduler::IssueDueWorker per project with open issues due tomorrow' do + project1 = create(:project) + project2 = create(:project) + project_closed_issue = create(:project) + project_issue_due_another_day = create(:project) + + create(:issue, :opened, project: project1, due_date: Date.tomorrow) + create(:issue, :opened, project: project1, due_date: Date.tomorrow) + create(:issue, :opened, project: project2, due_date: Date.tomorrow) + create(:issue, :closed, project: project_closed_issue, due_date: Date.tomorrow) + create(:issue, :opened, project: project_issue_due_another_day, due_date: Date.today) + + expect(MailScheduler::IssueDueWorker).to receive(:bulk_perform_async).with([[project1.id], [project2.id]]) + + described_class.new.perform + end + end +end diff --git a/spec/workers/mail_scheduler/issue_due_worker_spec.rb b/spec/workers/mail_scheduler/issue_due_worker_spec.rb new file mode 100644 index 00000000000..48ac1b8a1a4 --- /dev/null +++ b/spec/workers/mail_scheduler/issue_due_worker_spec.rb @@ -0,0 +1,21 @@ +require 'spec_helper' + +describe MailScheduler::IssueDueWorker do + describe '#perform' do + let(:worker) { described_class.new } + let(:project) { create(:project) } + + it 'sends emails for open issues due tomorrow in the project specified' do + issue1 = create(:issue, :opened, project: project, due_date: Date.tomorrow) + issue2 = create(:issue, :opened, project: project, due_date: Date.tomorrow) + create(:issue, :closed, project: project, due_date: Date.tomorrow) # closed + create(:issue, :opened, project: project, due_date: 2.days.from_now) # due on another day + create(:issue, :opened, due_date: Date.tomorrow) # different project + + expect_any_instance_of(NotificationService).to receive(:issue_due).with(issue1) + expect_any_instance_of(NotificationService).to receive(:issue_due).with(issue2) + + worker.perform(project.id) + end + end +end diff --git a/spec/workers/object_storage_upload_worker_spec.rb b/spec/workers/object_storage_upload_worker_spec.rb new file mode 100644 index 00000000000..32ddcbe9757 --- /dev/null +++ b/spec/workers/object_storage_upload_worker_spec.rb @@ -0,0 +1,108 @@ +require 'spec_helper' + +describe ObjectStorageUploadWorker do + let(:local) { ObjectStorage::Store::LOCAL } + let(:remote) { ObjectStorage::Store::REMOTE } + + def perform + described_class.perform_async(uploader_class.name, subject_class, file_field, subject_id) + end + + context 'for LFS' do + let!(:lfs_object) { create(:lfs_object, :with_file, file_store: local) } + let(:uploader_class) { LfsObjectUploader } + let(:subject_class) { LfsObject } + let(:file_field) { :file } + let(:subject_id) { lfs_object.id } + + context 'when object storage is enabled' do + before do + stub_lfs_object_storage(background_upload: true) + end + + it 'uploads object to storage' do + expect { perform }.to change { lfs_object.reload.file_store }.from(local).to(remote) + end + + context 'when background upload is disabled' do + before do + allow(Gitlab.config.lfs.object_store).to receive(:background_upload) { false } + end + + it 'is skipped' do + expect { perform }.not_to change { lfs_object.reload.file_store } + end + end + end + + context 'when object storage is disabled' do + before do + stub_lfs_object_storage(enabled: false) + end + + it "doesn't migrate files" do + perform + + expect(lfs_object.reload.file_store).to eq(local) + end + end + end + + context 'for legacy artifacts' do + let(:build) { create(:ci_build, :legacy_artifacts) } + let(:uploader_class) { LegacyArtifactUploader } + let(:subject_class) { Ci::Build } + let(:file_field) { :artifacts_file } + let(:subject_id) { build.id } + + context 'when local storage is used' do + let(:store) { local } + + context 'and remote storage is defined' do + before do + stub_artifacts_object_storage(background_upload: true) + end + + it "migrates file to remote storage" do + perform + + expect(build.reload.artifacts_file_store).to eq(remote) + end + + context 'for artifacts_metadata' do + let(:file_field) { :artifacts_metadata } + + it 'migrates metadata to remote storage' do + perform + + expect(build.reload.artifacts_metadata_store).to eq(remote) + end + end + end + end + end + + context 'for job artifacts' do + let(:artifact) { create(:ci_job_artifact, :archive) } + let(:uploader_class) { JobArtifactUploader } + let(:subject_class) { Ci::JobArtifact } + let(:file_field) { :file } + let(:subject_id) { artifact.id } + + context 'when local storage is used' do + let(:store) { local } + + context 'and remote storage is defined' do + before do + stub_artifacts_object_storage(background_upload: true) + end + + it "migrates file to remote storage" do + perform + + expect(artifact.reload.file_store).to eq(remote) + end + end + end + end +end diff --git a/spec/workers/project_export_worker_spec.rb b/spec/workers/project_export_worker_spec.rb new file mode 100644 index 00000000000..8899969c178 --- /dev/null +++ b/spec/workers/project_export_worker_spec.rb @@ -0,0 +1,28 @@ +require 'spec_helper' + +describe ProjectExportWorker do + let!(:user) { create(:user) } + let!(:project) { create(:project) } + + subject { described_class.new } + + describe '#perform' do + context 'when it succeeds' do + it 'calls the ExportService' do + expect_any_instance_of(::Projects::ImportExport::ExportService).to receive(:execute) + + subject.perform(user.id, project.id, { 'klass' => 'Gitlab::ImportExport::AfterExportStrategies::DownloadNotificationStrategy' }) + end + end + + context 'when it fails' do + it 'raises an exception when params are invalid' do + expect_any_instance_of(::Projects::ImportExport::ExportService).not_to receive(:execute) + + expect { subject.perform(1234, project.id, {}) }.to raise_exception(ActiveRecord::RecordNotFound) + expect { subject.perform(user.id, 1234, {}) }.to raise_exception(ActiveRecord::RecordNotFound) + expect { subject.perform(user.id, project.id, { 'klass' => 'Whatever' }) }.to raise_exception(Gitlab::ImportExport::AfterExportStrategyBuilder::StrategyNotFoundError) + end + end + end +end diff --git a/spec/workers/repository_fork_worker_spec.rb b/spec/workers/repository_fork_worker_spec.rb index 6c66658d8c3..4b3c1736ea0 100644 --- a/spec/workers/repository_fork_worker_spec.rb +++ b/spec/workers/repository_fork_worker_spec.rb @@ -9,70 +9,91 @@ describe RepositoryForkWorker do describe "#perform" do let(:project) { create(:project, :repository) } - let(:fork_project) { create(:project, :repository, :import_scheduled, forked_from_project: project) } let(:shell) { Gitlab::Shell.new } + let(:fork_project) { create(:project, :repository, :import_scheduled, forked_from_project: project) } - before do - allow(subject).to receive(:gitlab_shell).and_return(shell) - end + shared_examples 'RepositoryForkWorker performing' do + before do + allow(subject).to receive(:gitlab_shell).and_return(shell) + end - def perform! - subject.perform(fork_project.id, '/test/path', project.disk_path) - end + def expect_fork_repository + expect(shell).to receive(:fork_repository).with( + 'default', + project.disk_path, + fork_project.repository_storage, + fork_project.disk_path + ) + end - def expect_fork_repository - expect(shell).to receive(:fork_repository).with( - '/test/path', - project.disk_path, - fork_project.repository_storage_path, - fork_project.disk_path - ) - end + describe 'when a worker was reset without cleanup' do + let(:jid) { '12345678' } - describe 'when a worker was reset without cleanup' do - let(:jid) { '12345678' } + it 'creates a new repository from a fork' do + allow(subject).to receive(:jid).and_return(jid) - it 'creates a new repository from a fork' do - allow(subject).to receive(:jid).and_return(jid) + expect_fork_repository.and_return(true) + perform! + end + end + + it "creates a new repository from a fork" do expect_fork_repository.and_return(true) perform! end - end - it "creates a new repository from a fork" do - expect_fork_repository.and_return(true) + it 'protects the default branch' do + expect_fork_repository.and_return(true) - perform! - end + perform! + + expect(fork_project.protected_branches.first.name).to eq(fork_project.default_branch) + end + + it 'flushes various caches' do + expect_fork_repository.and_return(true) - it 'protects the default branch' do - expect_fork_repository.and_return(true) + expect_any_instance_of(Repository).to receive(:expire_emptiness_caches) + .and_call_original - perform! + expect_any_instance_of(Repository).to receive(:expire_exists_cache) + .and_call_original - expect(fork_project.protected_branches.first.name).to eq(fork_project.default_branch) - end + perform! + end + + it "handles bad fork" do + error_message = "Unable to fork project #{fork_project.id} for repository #{project.disk_path} -> #{fork_project.disk_path}" - it 'flushes various caches' do - expect_fork_repository.and_return(true) + expect_fork_repository.and_return(false) - expect_any_instance_of(Repository).to receive(:expire_emptiness_caches) - .and_call_original + expect { perform! }.to raise_error(StandardError, error_message) + end + end - expect_any_instance_of(Repository).to receive(:expire_exists_cache) - .and_call_original + context 'only project ID passed' do + def perform! + subject.perform(fork_project.id) + end - perform! + it_behaves_like 'RepositoryForkWorker performing' end - it "handles bad fork" do - error_message = "Unable to fork project #{fork_project.id} for repository #{project.disk_path} -> #{fork_project.disk_path}" + context 'project ID, storage and repo paths passed' do + def perform! + subject.perform(fork_project.id, TestEnv.repos_path, project.disk_path) + end - expect_fork_repository.and_return(false) + it_behaves_like 'RepositoryForkWorker performing' - expect { perform! }.to raise_error(StandardError, error_message) + it 'logs a message about forking with old-style arguments' do + allow(Rails.logger).to receive(:info).with(anything) # To compensate for other logs + expect(Rails.logger).to receive(:info).with("Project #{fork_project.id} is being forked using old-style arguments.") + + perform! + end end end end |