diff options
| author | Mike Greiling <mike@pixelcog.com> | 2018-02-06 13:28:13 -0600 |
|---|---|---|
| committer | Mike Greiling <mike@pixelcog.com> | 2018-02-06 13:28:13 -0600 |
| commit | 47f2754a14549ccd18e4be8b3b6604b378450d6d (patch) | |
| tree | f2235166a76bcf9e7761e1b613e63110961869da /spec | |
| parent | e71a27f082c49a8e132be632cb8fe97f810af987 (diff) | |
| parent | e776096e84d01ab3d1d07a028b65e7430e195114 (diff) | |
| download | gitlab-ce-47f2754a14549ccd18e4be8b3b6604b378450d6d.tar.gz | |
Merge branch 'master' into pawel/connect_to_prometheus_through_proxy-30480
* master: (242 commits)
Validate user namespace before saving so that errors persist on model
Reset Project's column information in spec/lib/gitlab/background_migration/populate_merge_request_metrics_with_events_data_spec.rb
Explicitly set cwd in Sidekiq memory killer instead of depending on getcwd
Downgrade google-protobuf
Close low level rugged repository in project cache worker
File upload UI obeys LFS filters
Resolve "Add a link to documentation on how to get external ip in the Kubernetes cluster details page"
Upgrade GitLab Workhorse to v3.6.0
Add sorting options for /users API (admin only)
improvements from feedback
[ci-skip] add changelog
remove file after `Upload#destroy`
Fix a hardcoded pipeline ID in a spinach step
Override group sidebar links
Replace "cluster" with "Kubernetes cluster"
Reorder async/sync tasks in BuildFinishedWorker to read traces efficiently
Fix tests for Drop filename enforcement
Revert using expand_fixture_path in factory
Revert "Add FixtureHelpers for FactoryGirl"
Refactor :trace to :trace_live in spec
...
Diffstat (limited to 'spec')
208 files changed, 6435 insertions, 1797 deletions
diff --git a/spec/controllers/groups/uploads_controller_spec.rb b/spec/controllers/groups/uploads_controller_spec.rb index 67a11e56e94..6a1869d1a48 100644 --- a/spec/controllers/groups/uploads_controller_spec.rb +++ b/spec/controllers/groups/uploads_controller_spec.rb @@ -6,5 +6,7 @@ describe Groups::UploadsController do { group_id: model } end - it_behaves_like 'handle uploads' + it_behaves_like 'handle uploads' do + let(:uploader_class) { NamespaceFileUploader } + end end diff --git a/spec/controllers/groups_controller_spec.rb b/spec/controllers/groups_controller_spec.rb index 492fed42d31..8688fb33f0d 100644 --- a/spec/controllers/groups_controller_spec.rb +++ b/spec/controllers/groups_controller_spec.rb @@ -496,4 +496,87 @@ describe GroupsController do "Group '#{redirect_route.path}' was moved to '#{group.full_path}'. Please update any links and bookmarks that may still have the old path." end end + + describe 'PUT transfer', :postgresql do + before do + sign_in(user) + end + + context 'when transfering to a subgroup goes right' do + let(:new_parent_group) { create(:group, :public) } + let!(:group_member) { create(:group_member, :owner, group: group, user: user) } + let!(:new_parent_group_member) { create(:group_member, :owner, group: new_parent_group, user: user) } + + before do + put :transfer, + id: group.to_param, + new_parent_group_id: new_parent_group.id + end + + it 'should return a notice' do + expect(flash[:notice]).to eq("Group '#{group.name}' was successfully transferred.") + end + + it 'should redirect to the new path' do + expect(response).to redirect_to("/#{new_parent_group.path}/#{group.path}") + end + end + + context 'when converting to a root group goes right' do + let(:group) { create(:group, :public, :nested) } + let!(:group_member) { create(:group_member, :owner, group: group, user: user) } + + before do + put :transfer, + id: group.to_param, + new_parent_group_id: '' + end + + it 'should return a notice' do + expect(flash[:notice]).to eq("Group '#{group.name}' was successfully transferred.") + end + + it 'should redirect to the new path' do + expect(response).to redirect_to("/#{group.path}") + end + end + + context 'When the transfer goes wrong' do + let(:new_parent_group) { create(:group, :public) } + let!(:group_member) { create(:group_member, :owner, group: group, user: user) } + let!(:new_parent_group_member) { create(:group_member, :owner, group: new_parent_group, user: user) } + + before do + allow_any_instance_of(::Groups::TransferService).to receive(:proceed_to_transfer).and_raise(Gitlab::UpdatePathError, 'namespace directory cannot be moved') + + put :transfer, + id: group.to_param, + new_parent_group_id: new_parent_group.id + end + + it 'should return an alert' do + expect(flash[:alert]).to eq "Transfer failed: namespace directory cannot be moved" + end + + it 'should redirect to the current path' do + expect(response).to render_template(:edit) + end + end + + context 'when the user is not allowed to transfer the group' do + let(:new_parent_group) { create(:group, :public) } + let!(:group_member) { create(:group_member, :guest, group: group, user: user) } + let!(:new_parent_group_member) { create(:group_member, :guest, group: new_parent_group, user: user) } + + before do + put :transfer, + id: group.to_param, + new_parent_group_id: new_parent_group.id + end + + it 'should be denied' do + expect(response).to have_gitlab_http_status(404) + end + end + end end diff --git a/spec/controllers/health_check_controller_spec.rb b/spec/controllers/health_check_controller_spec.rb index 2cead1770c9..387ca46ef6f 100644 --- a/spec/controllers/health_check_controller_spec.rb +++ b/spec/controllers/health_check_controller_spec.rb @@ -5,7 +5,7 @@ describe HealthCheckController do let(:json_response) { JSON.parse(response.body) } let(:xml_response) { Hash.from_xml(response.body)['hash'] } - let(:token) { current_application_settings.health_check_access_token } + let(:token) { Gitlab::CurrentSettings.health_check_access_token } let(:whitelisted_ip) { '127.0.0.1' } let(:not_whitelisted_ip) { '127.0.0.2' } diff --git a/spec/controllers/health_controller_spec.rb b/spec/controllers/health_controller_spec.rb index 95946def5f9..542eddc2d16 100644 --- a/spec/controllers/health_controller_spec.rb +++ b/spec/controllers/health_controller_spec.rb @@ -4,7 +4,7 @@ describe HealthController do include StubENV let(:json_response) { JSON.parse(response.body) } - let(:token) { current_application_settings.health_check_access_token } + let(:token) { Gitlab::CurrentSettings.health_check_access_token } let(:whitelisted_ip) { '127.0.0.1' } let(:not_whitelisted_ip) { '127.0.0.2' } diff --git a/spec/controllers/oauth/applications_controller_spec.rb b/spec/controllers/oauth/applications_controller_spec.rb index b38652e7ab9..1195f44f37d 100644 --- a/spec/controllers/oauth/applications_controller_spec.rb +++ b/spec/controllers/oauth/applications_controller_spec.rb @@ -16,8 +16,7 @@ describe Oauth::ApplicationsController do end it 'redirects back to profile page if OAuth applications are disabled' do - settings = double(user_oauth_applications?: false) - allow_any_instance_of(Gitlab::CurrentSettings).to receive(:current_application_settings).and_return(settings) + allow(Gitlab::CurrentSettings.current_application_settings).to receive(:user_oauth_applications?).and_return(false) get :index diff --git a/spec/controllers/projects/artifacts_controller_spec.rb b/spec/controllers/projects/artifacts_controller_spec.rb index 12cb7b2647f..25a2e13fe1a 100644 --- a/spec/controllers/projects/artifacts_controller_spec.rb +++ b/spec/controllers/projects/artifacts_controller_spec.rb @@ -145,8 +145,7 @@ describe Projects::ArtifactsController do context 'when using local file storage' do it_behaves_like 'a valid file' do let(:job) { create(:ci_build, :success, :artifacts, pipeline: pipeline) } - let(:store) { ObjectStoreUploader::LOCAL_STORE } - let(:archive_path) { JobArtifactUploader.local_store_path } + let(:archive_path) { JobArtifactUploader.root } end end end diff --git a/spec/controllers/projects/clusters_controller_spec.rb b/spec/controllers/projects/clusters_controller_spec.rb index a3b13647c92..954fc79f57d 100644 --- a/spec/controllers/projects/clusters_controller_spec.rb +++ b/spec/controllers/projects/clusters_controller_spec.rb @@ -177,7 +177,7 @@ describe Projects::ClustersController do cluster.reload expect(response).to redirect_to(project_cluster_path(project, cluster)) - expect(flash[:notice]).to eq('Cluster was successfully updated.') + expect(flash[:notice]).to eq('Kubernetes cluster was successfully updated.') expect(cluster.enabled).to be_falsey end @@ -276,7 +276,7 @@ describe Projects::ClustersController do cluster.reload expect(response).to redirect_to(project_cluster_path(project, cluster)) - expect(flash[:notice]).to eq('Cluster was successfully updated.') + expect(flash[:notice]).to eq('Kubernetes cluster was successfully updated.') expect(cluster.enabled).to be_falsey expect(cluster.name).to eq('my-new-cluster-name') expect(cluster.platform_kubernetes.namespace).to eq('my-namespace') @@ -336,7 +336,7 @@ describe Projects::ClustersController do .and change { Clusters::Providers::Gcp.count }.by(-1) expect(response).to redirect_to(project_clusters_path(project)) - expect(flash[:notice]).to eq('Cluster integration was successfully removed.') + expect(flash[:notice]).to eq('Kubernetes cluster integration was successfully removed.') end end @@ -349,7 +349,7 @@ describe Projects::ClustersController do .and change { Clusters::Providers::Gcp.count }.by(-1) expect(response).to redirect_to(project_clusters_path(project)) - expect(flash[:notice]).to eq('Cluster integration was successfully removed.') + expect(flash[:notice]).to eq('Kubernetes cluster integration was successfully removed.') end end end @@ -364,7 +364,7 @@ describe Projects::ClustersController do .and change { Clusters::Providers::Gcp.count }.by(0) expect(response).to redirect_to(project_clusters_path(project)) - expect(flash[:notice]).to eq('Cluster integration was successfully removed.') + expect(flash[:notice]).to eq('Kubernetes cluster integration was successfully removed.') end end end diff --git a/spec/controllers/projects/jobs_controller_spec.rb b/spec/controllers/projects/jobs_controller_spec.rb index e6a4e7c8257..f3e303bb0fe 100644 --- a/spec/controllers/projects/jobs_controller_spec.rb +++ b/spec/controllers/projects/jobs_controller_spec.rb @@ -137,8 +137,8 @@ describe Projects::JobsController do it 'exposes needed information' do expect(response).to have_gitlab_http_status(:ok) - expect(json_response['raw_path']).to match(/jobs\/\d+\/raw\z/) - expect(json_response.dig('merge_request', 'path')).to match(/merge_requests\/\d+\z/) + expect(json_response['raw_path']).to match(%r{jobs/\d+/raw\z}) + expect(json_response.dig('merge_request', 'path')).to match(%r{merge_requests/\d+\z}) expect(json_response['new_issue_path']) .to include('/issues/new') end @@ -159,8 +159,19 @@ describe Projects::JobsController do get_trace end + context 'when job has a trace artifact' do + let(:job) { create(:ci_build, :trace_artifact, pipeline: pipeline) } + + it 'returns a trace' do + expect(response).to have_gitlab_http_status(:ok) + expect(json_response['id']).to eq job.id + expect(json_response['status']).to eq job.status + expect(json_response['html']).to eq(job.trace.html) + end + end + context 'when job has a trace' do - let(:job) { create(:ci_build, :trace, pipeline: pipeline) } + let(:job) { create(:ci_build, :trace_live, pipeline: pipeline) } it 'returns a trace' do expect(response).to have_gitlab_http_status(:ok) @@ -182,7 +193,7 @@ describe Projects::JobsController do end context 'when job has a trace with ANSI sequence and Unicode' do - let(:job) { create(:ci_build, :unicode_trace, pipeline: pipeline) } + let(:job) { create(:ci_build, :unicode_trace_live, pipeline: pipeline) } it 'returns a trace with Unicode' do expect(response).to have_gitlab_http_status(:ok) @@ -381,7 +392,7 @@ describe Projects::JobsController do end context 'when job is erasable' do - let(:job) { create(:ci_build, :erasable, :trace, pipeline: pipeline) } + let(:job) { create(:ci_build, :erasable, :trace_artifact, pipeline: pipeline) } it 'redirects to the erased job page' do expect(response).to have_gitlab_http_status(:found) @@ -408,7 +419,7 @@ describe Projects::JobsController do context 'when user is developer' do let(:role) { :developer } - let(:job) { create(:ci_build, :erasable, :trace, pipeline: pipeline, user: triggered_by) } + let(:job) { create(:ci_build, :erasable, :trace_artifact, pipeline: pipeline, user: triggered_by) } context 'when triggered by same user' do let(:triggered_by) { user } @@ -439,8 +450,18 @@ describe Projects::JobsController do get_raw end + context 'when job has a trace artifact' do + let(:job) { create(:ci_build, :trace_artifact, pipeline: pipeline) } + + it 'returns a trace' do + expect(response).to have_gitlab_http_status(:ok) + expect(response.content_type).to eq 'text/plain; charset=utf-8' + expect(response.body).to eq job.job_artifacts_trace.open.read + end + end + context 'when job has a trace file' do - let(:job) { create(:ci_build, :trace, pipeline: pipeline) } + let(:job) { create(:ci_build, :trace_live, pipeline: pipeline) } it 'send a trace file' do expect(response).to have_gitlab_http_status(:ok) diff --git a/spec/controllers/projects/raw_controller_spec.rb b/spec/controllers/projects/raw_controller_spec.rb index 3a0c3faa7b4..b7df42168e0 100644 --- a/spec/controllers/projects/raw_controller_spec.rb +++ b/spec/controllers/projects/raw_controller_spec.rb @@ -53,7 +53,7 @@ describe Projects::RawController do end it 'serves the file' do - expect(controller).to receive(:send_file).with("#{Gitlab.config.shared.path}/lfs-objects/91/ef/f75a492a3ed0dfcb544d7f31326bc4014c8551849c192fd1e48d4dd2c897", filename: 'lfs_object.iso', disposition: 'attachment') + expect(controller).to receive(:send_file).with("#{LfsObjectUploader.root}/91/ef/f75a492a3ed0dfcb544d7f31326bc4014c8551849c192fd1e48d4dd2c897", filename: 'lfs_object.iso', disposition: 'attachment') get(:show, namespace_id: public_project.namespace.to_param, project_id: public_project, diff --git a/spec/controllers/projects/todos_controller_spec.rb b/spec/controllers/projects/todos_controller_spec.rb index e2524be7724..1ce7e84bef9 100644 --- a/spec/controllers/projects/todos_controller_spec.rb +++ b/spec/controllers/projects/todos_controller_spec.rb @@ -36,7 +36,7 @@ describe Projects::TodosController do expect(response).to have_gitlab_http_status(200) expect(json_response['count']).to eq 1 - expect(json_response['delete_path']).to match(/\/dashboard\/todos\/\d{1}/) + expect(json_response['delete_path']).to match(%r{/dashboard/todos/\d{1}}) end end @@ -104,7 +104,7 @@ describe Projects::TodosController do expect(response).to have_gitlab_http_status(200) expect(json_response['count']).to eq 1 - expect(json_response['delete_path']).to match(/\/dashboard\/todos\/\d{1}/) + expect(json_response['delete_path']).to match(%r{/dashboard/todos/\d{1}}) end end diff --git a/spec/controllers/uploads_controller_spec.rb b/spec/controllers/uploads_controller_spec.rb index b1f601a19e5..376b229ffc9 100644 --- a/spec/controllers/uploads_controller_spec.rb +++ b/spec/controllers/uploads_controller_spec.rb @@ -180,6 +180,7 @@ describe UploadsController do it_behaves_like 'content not cached without revalidation' do subject do get :show, model: 'user', mounted_as: 'avatar', id: user.id, filename: 'image.png' + response end end @@ -196,6 +197,7 @@ describe UploadsController do it_behaves_like 'content not cached without revalidation' do subject do get :show, model: 'user', mounted_as: 'avatar', id: user.id, filename: 'image.png' + response end end @@ -220,6 +222,7 @@ describe UploadsController do it_behaves_like 'content not cached without revalidation' do subject do get :show, model: 'project', mounted_as: 'avatar', id: project.id, filename: 'image.png' + response end end @@ -239,6 +242,7 @@ describe UploadsController do it_behaves_like 'content not cached without revalidation' do subject do get :show, model: 'project', mounted_as: 'avatar', id: project.id, filename: 'image.png' + response end end @@ -291,6 +295,7 @@ describe UploadsController do it_behaves_like 'content not cached without revalidation' do subject do get :show, model: 'project', mounted_as: 'avatar', id: project.id, filename: 'image.png' + response end end @@ -322,6 +327,7 @@ describe UploadsController do it_behaves_like 'content not cached without revalidation' do subject do get :show, model: 'group', mounted_as: 'avatar', id: group.id, filename: 'image.png' + response end end @@ -341,6 +347,7 @@ describe UploadsController do it_behaves_like 'content not cached without revalidation' do subject do get :show, model: 'group', mounted_as: 'avatar', id: group.id, filename: 'image.png' + response end end @@ -384,6 +391,7 @@ describe UploadsController do it_behaves_like 'content not cached without revalidation' do subject do get :show, model: 'group', mounted_as: 'avatar', id: group.id, filename: 'image.png' + response end end @@ -420,6 +428,7 @@ describe UploadsController do it_behaves_like 'content not cached without revalidation' do subject do get :show, model: 'note', mounted_as: 'attachment', id: note.id, filename: 'image.png' + response end end @@ -439,6 +448,7 @@ describe UploadsController do it_behaves_like 'content not cached without revalidation' do subject do get :show, model: 'note', mounted_as: 'attachment', id: note.id, filename: 'image.png' + response end end @@ -491,6 +501,7 @@ describe UploadsController do it_behaves_like 'content not cached without revalidation' do subject do get :show, model: 'note', mounted_as: 'attachment', id: note.id, filename: 'image.png' + response end end @@ -522,6 +533,7 @@ describe UploadsController do it_behaves_like 'content not cached without revalidation' do subject do get :show, model: 'appearance', mounted_as: 'header_logo', id: appearance.id, filename: 'dk.png' + response end end @@ -541,6 +553,7 @@ describe UploadsController do it_behaves_like 'content not cached without revalidation' do subject do get :show, model: 'appearance', mounted_as: 'logo', id: appearance.id, filename: 'dk.png' + response end end diff --git a/spec/controllers/user_callouts_controller_spec.rb b/spec/controllers/user_callouts_controller_spec.rb new file mode 100644 index 00000000000..48e2ff75cac --- /dev/null +++ b/spec/controllers/user_callouts_controller_spec.rb @@ -0,0 +1,49 @@ +require 'spec_helper' + +describe UserCalloutsController do + let(:user) { create(:user) } + + before do + sign_in(user) + end + + describe "POST #create" do + subject { post :create, feature_name: feature_name, format: :json } + + context 'with valid feature name' do + let(:feature_name) { UserCallout.feature_names.keys.first } + + context 'when callout entry does not exist' do + it 'should create a callout entry with dismissed state' do + expect { subject }.to change { UserCallout.count }.by(1) + end + + it 'should return success' do + subject + + expect(response).to have_gitlab_http_status(:ok) + end + end + + context 'when callout entry already exists' do + let!(:callout) { create(:user_callout, feature_name: UserCallout.feature_names.keys.first, user: user) } + + it 'should return success' do + subject + + expect(response).to have_gitlab_http_status(:ok) + end + end + end + + context 'with invalid feature name' do + let(:feature_name) { 'bogus_feature_name' } + + it 'should return bad request' do + subject + + expect(response).to have_gitlab_http_status(:bad_request) + end + end + end +end diff --git a/spec/factories/ci/builds.rb b/spec/factories/ci/builds.rb index 6f66468570f..6ba599cdf83 100644 --- a/spec/factories/ci/builds.rb +++ b/spec/factories/ci/builds.rb @@ -135,13 +135,19 @@ FactoryBot.define do coverage_regex '/(d+)/' end - trait :trace do + trait :trace_live do after(:create) do |build, evaluator| build.trace.set('BUILD TRACE') end end - trait :unicode_trace do + trait :trace_artifact do + after(:create) do |build, evaluator| + create(:ci_job_artifact, :trace, job: build) + end + end + + trait :unicode_trace_live do after(:create) do |build, evaluator| trace = File.binread( File.expand_path( diff --git a/spec/factories/ci/job_artifacts.rb b/spec/factories/ci/job_artifacts.rb index 46afba2953c..7ee379ca2ec 100644 --- a/spec/factories/ci/job_artifacts.rb +++ b/spec/factories/ci/job_artifacts.rb @@ -26,5 +26,14 @@ FactoryBot.define do Rails.root.join('spec/fixtures/ci_build_artifacts_metadata.gz'), 'application/x-gzip') end end + + trait :trace do + file_type :trace + + after(:build) do |artifact, evaluator| + artifact.file = fixture_file_upload( + Rails.root.join('spec/fixtures/trace/sample_trace'), 'text/plain') + end + end end end diff --git a/spec/factories/commits.rb b/spec/factories/commits.rb index 84a8bc56640..d5d819d862a 100644 --- a/spec/factories/commits.rb +++ b/spec/factories/commits.rb @@ -23,7 +23,7 @@ FactoryBot.define do end after(:build) do |commit, evaluator| - allow(commit).to receive(:author).and_return(evaluator.author || build(:author)) + allow(commit).to receive(:author).and_return(evaluator.author || build_stubbed(:author)) end trait :without_author do diff --git a/spec/factories/deployments.rb b/spec/factories/deployments.rb index 9d7d5e56611..cac56695319 100644 --- a/spec/factories/deployments.rb +++ b/spec/factories/deployments.rb @@ -3,13 +3,14 @@ FactoryBot.define do sha '97de212e80737a608d939f648d959671fb0a0142' ref 'master' tag false - user + user nil project nil deployable factory: :ci_build environment factory: :environment after(:build) do |deployment, evaluator| deployment.project ||= deployment.environment.project + deployment.user ||= deployment.project.creator unless deployment.project.repository_exists? allow(deployment.project.repository).to receive(:create_ref) diff --git a/spec/factories/events.rb b/spec/factories/events.rb index ed275243ac9..5798b81ecad 100644 --- a/spec/factories/events.rb +++ b/spec/factories/events.rb @@ -1,7 +1,7 @@ FactoryBot.define do factory :event do project - author factory: :user + author(factory: :user) { project.creator } action Event::JOINED trait(:created) { action Event::CREATED } diff --git a/spec/factories/groups.rb b/spec/factories/groups.rb index 1512f5a0e58..8c531cf5909 100644 --- a/spec/factories/groups.rb +++ b/spec/factories/groups.rb @@ -18,7 +18,7 @@ FactoryBot.define do end trait :with_avatar do - avatar { File.open(Rails.root.join('spec/fixtures/dk.png')) } + avatar { fixture_file_upload('spec/fixtures/dk.png') } end trait :access_requestable do diff --git a/spec/factories/issues.rb b/spec/factories/issues.rb index 71dc169c6a2..998080a3dd5 100644 --- a/spec/factories/issues.rb +++ b/spec/factories/issues.rb @@ -1,8 +1,8 @@ FactoryBot.define do factory :issue do title { generate(:title) } - author project + author { project.creator } trait :confidential do confidential true diff --git a/spec/factories/keys.rb b/spec/factories/keys.rb index f0c43f3d6f5..23a98a899f1 100644 --- a/spec/factories/keys.rb +++ b/spec/factories/keys.rb @@ -5,6 +5,10 @@ FactoryBot.define do title key { Spec::Support::Helpers::KeyGeneratorHelper.new(1024).generate + ' dummy@gitlab.com' } + factory :key_without_comment do + key { Spec::Support::Helpers::KeyGeneratorHelper.new(1024).generate } + end + factory :deploy_key, class: 'DeployKey' factory :personal_key do diff --git a/spec/factories/merge_requests.rb b/spec/factories/merge_requests.rb index 40558c88d15..d26cb0c3417 100644 --- a/spec/factories/merge_requests.rb +++ b/spec/factories/merge_requests.rb @@ -1,9 +1,9 @@ FactoryBot.define do factory :merge_request do title { generate(:title) } - author association :source_project, :repository, factory: :project target_project { source_project } + author { source_project.creator } # $ git log --pretty=oneline feature..master # 5937ac0a7beb003549fc5fd26fc247adbce4a52e Add submodule from gitlab.com diff --git a/spec/factories/notes.rb b/spec/factories/notes.rb index 707ecbd6be5..3f4e408b3a6 100644 --- a/spec/factories/notes.rb +++ b/spec/factories/notes.rb @@ -6,7 +6,7 @@ FactoryBot.define do factory :note do project note { generate(:title) } - author + author { project&.creator || create(:user) } on_issue factory :note_on_commit, traits: [:on_commit] @@ -122,11 +122,11 @@ FactoryBot.define do end trait :with_attachment do - attachment { fixture_file_upload(Rails.root + "spec/fixtures/dk.png", "image/png") } + attachment { fixture_file_upload(Rails.root.join( "spec/fixtures/dk.png"), "image/png") } end trait :with_svg_attachment do - attachment { fixture_file_upload(Rails.root + "spec/fixtures/unsanitized.svg", "image/svg+xml") } + attachment { fixture_file_upload(Rails.root.join("spec/fixtures/unsanitized.svg"), "image/svg+xml") } end transient do diff --git a/spec/factories/project_wikis.rb b/spec/factories/project_wikis.rb index 89d8248f9f4..db2eb4fc863 100644 --- a/spec/factories/project_wikis.rb +++ b/spec/factories/project_wikis.rb @@ -3,7 +3,7 @@ FactoryBot.define do skip_create project - user factory: :user + user { project.creator } initialize_with { new(project, user) } end end diff --git a/spec/factories/projects.rb b/spec/factories/projects.rb index 1dec4eb6c04..9eef923c30d 100644 --- a/spec/factories/projects.rb +++ b/spec/factories/projects.rb @@ -90,7 +90,13 @@ FactoryBot.define do end trait :with_avatar do - avatar { File.open(Rails.root.join('spec/fixtures/dk.png')) } + avatar { fixture_file_upload('spec/fixtures/dk.png') } + end + + trait :with_export do + after(:create) do |project, evaluator| + ProjectExportWorker.new.perform(project.creator.id, project.id) + end end trait :broken_storage do diff --git a/spec/factories/sent_notifications.rb b/spec/factories/sent_notifications.rb index 80872067233..b0174dd06b7 100644 --- a/spec/factories/sent_notifications.rb +++ b/spec/factories/sent_notifications.rb @@ -1,7 +1,7 @@ FactoryBot.define do factory :sent_notification do project - recipient factory: :user + recipient { project.creator } noteable { create(:issue, project: project) } reply_key { SentNotification.reply_key } end diff --git a/spec/factories/snippets.rb b/spec/factories/snippets.rb index 2ab9a56d255..dc12b562108 100644 --- a/spec/factories/snippets.rb +++ b/spec/factories/snippets.rb @@ -21,6 +21,7 @@ FactoryBot.define do factory :project_snippet, parent: :snippet, class: :ProjectSnippet do project + author { project.creator } end factory :personal_snippet, parent: :snippet, class: :PersonalSnippet do diff --git a/spec/factories/subscriptions.rb b/spec/factories/subscriptions.rb index a4bc4e87b0a..8f7ab74ec70 100644 --- a/spec/factories/subscriptions.rb +++ b/spec/factories/subscriptions.rb @@ -1,7 +1,7 @@ FactoryBot.define do factory :subscription do - user project + user { project.creator } subscribable factory: :issue end end diff --git a/spec/factories/timelogs.rb b/spec/factories/timelogs.rb index af34b0681e2..b45f06b9a0a 100644 --- a/spec/factories/timelogs.rb +++ b/spec/factories/timelogs.rb @@ -3,7 +3,7 @@ FactoryBot.define do factory :timelog do time_spent 3600 - user issue + user { issue.project.creator } end end diff --git a/spec/factories/todos.rb b/spec/factories/todos.rb index 6a6de665dd1..94f8caedfa6 100644 --- a/spec/factories/todos.rb +++ b/spec/factories/todos.rb @@ -1,8 +1,8 @@ FactoryBot.define do factory :todo do project - author - user + author { project.creator } + user { project.creator } target factory: :issue action { Todo::ASSIGNED } diff --git a/spec/factories/uploads.rb b/spec/factories/uploads.rb index c39500faea1..ff3a2a76acc 100644 --- a/spec/factories/uploads.rb +++ b/spec/factories/uploads.rb @@ -1,24 +1,46 @@ FactoryBot.define do factory :upload do model { build(:project) } - path { "uploads/-/system/project/avatar/avatar.jpg" } size 100.kilobytes uploader "AvatarUploader" + mount_point :avatar + secret nil - trait :personal_snippet do - model { build(:personal_snippet) } + # we should build a mount agnostic upload by default + transient do + filename 'myfile.jpg' + end + + # this needs to comply with RecordsUpload::Concern#upload_path + path { File.join("uploads/-/system", model.class.to_s.underscore, mount_point.to_s, 'avatar.jpg') } + + trait :personal_snippet_upload do uploader "PersonalFileUploader" + path { File.join(secret, filename) } + model { build(:personal_snippet) } + secret SecureRandom.hex end trait :issuable_upload do - path { "#{SecureRandom.hex}/myfile.jpg" } uploader "FileUploader" + path { File.join(secret, filename) } + secret SecureRandom.hex end trait :namespace_upload do - path { "#{SecureRandom.hex}/myfile.jpg" } model { build(:group) } + path { File.join(secret, filename) } uploader "NamespaceFileUploader" + secret SecureRandom.hex + end + + trait :attachment_upload do + transient do + mount_point :attachment + end + + model { build(:note) } + uploader "AttachmentUploader" end end end diff --git a/spec/factories/user_callouts.rb b/spec/factories/user_callouts.rb new file mode 100644 index 00000000000..528e442c14b --- /dev/null +++ b/spec/factories/user_callouts.rb @@ -0,0 +1,7 @@ +FactoryBot.define do + factory :user_callout do + feature_name :gke_cluster_integration + + user + end +end diff --git a/spec/factories/users.rb b/spec/factories/users.rb index e62e0b263ca..769fd656e7a 100644 --- a/spec/factories/users.rb +++ b/spec/factories/users.rb @@ -38,7 +38,7 @@ FactoryBot.define do end trait :with_avatar do - avatar { File.open(Rails.root.join('spec/fixtures/dk.png')) } + avatar { fixture_file_upload('spec/fixtures/dk.png') } end trait :two_factor_via_otp do diff --git a/spec/features/admin/admin_health_check_spec.rb b/spec/features/admin/admin_health_check_spec.rb index ac3392b49f9..3693e5882f9 100644 --- a/spec/features/admin/admin_health_check_spec.rb +++ b/spec/features/admin/admin_health_check_spec.rb @@ -17,7 +17,7 @@ feature "Admin Health Check", :feature do page.has_text? 'Health Check' page.has_text? 'Health information can be retrieved' - token = current_application_settings.health_check_access_token + token = Gitlab::CurrentSettings.health_check_access_token expect(page).to have_content("Access token is #{token}") expect(page).to have_selector('#health-check-token', text: token) @@ -25,7 +25,7 @@ feature "Admin Health Check", :feature do describe 'reload access token' do it 'changes the access token' do - orig_token = current_application_settings.health_check_access_token + orig_token = Gitlab::CurrentSettings.health_check_access_token click_button 'Reset health check access token' expect(page).to have_content('New health check access token has been generated!') diff --git a/spec/features/admin/admin_runners_spec.rb b/spec/features/admin/admin_runners_spec.rb index c1c54177167..a01c129defd 100644 --- a/spec/features/admin/admin_runners_spec.rb +++ b/spec/features/admin/admin_runners_spec.rb @@ -156,7 +156,7 @@ describe "Admin Runners" do end describe 'runners registration token' do - let!(:token) { current_application_settings.runners_registration_token } + let!(:token) { Gitlab::CurrentSettings.runners_registration_token } before do visit admin_runners_path diff --git a/spec/features/admin/admin_settings_spec.rb b/spec/features/admin/admin_settings_spec.rb index 1218ea52227..cc0849d1cc6 100644 --- a/spec/features/admin/admin_settings_spec.rb +++ b/spec/features/admin/admin_settings_spec.rb @@ -38,12 +38,12 @@ feature 'Admin updates settings' do uncheck 'Project export enabled' click_button 'Save' - expect(current_application_settings.gravatar_enabled).to be_falsey - expect(current_application_settings.home_page_url).to eq "https://about.gitlab.com/" - expect(current_application_settings.help_page_text).to eq "Example text" - expect(current_application_settings.help_page_hide_commercial_content).to be_truthy - expect(current_application_settings.help_page_support_url).to eq "http://example.com/help" - expect(current_application_settings.project_export_enabled).to be_falsey + expect(Gitlab::CurrentSettings.gravatar_enabled).to be_falsey + expect(Gitlab::CurrentSettings.home_page_url).to eq "https://about.gitlab.com/" + expect(Gitlab::CurrentSettings.help_page_text).to eq "Example text" + expect(Gitlab::CurrentSettings.help_page_hide_commercial_content).to be_truthy + expect(Gitlab::CurrentSettings.help_page_support_url).to eq "http://example.com/help" + expect(Gitlab::CurrentSettings.project_export_enabled).to be_falsey expect(page).to have_content "Application settings saved successfully" end diff --git a/spec/features/atom/users_spec.rb b/spec/features/atom/users_spec.rb index 782f42aab04..2d074c115dd 100644 --- a/spec/features/atom/users_spec.rb +++ b/spec/features/atom/users_spec.rb @@ -64,7 +64,7 @@ describe "User Feed" do end it 'has XHTML summaries in issue descriptions' do - expect(body).to match /<hr ?\/>/ + expect(body).to match %r{<hr ?/>} end it 'has XHTML summaries in notes' do @@ -72,7 +72,7 @@ describe "User Feed" do end it 'has XHTML summaries in merge request descriptions' do - expect(body).to match /Here is the fix: <a[^>]*><img[^>]*\/><\/a>/ + expect(body).to match %r{Here is the fix: <a[^>]*><img[^>]*/></a>} end it 'has push event commit ID' do diff --git a/spec/features/dashboard/merge_requests_spec.rb b/spec/features/dashboard/merge_requests_spec.rb index 991d360ccaf..744041ac425 100644 --- a/spec/features/dashboard/merge_requests_spec.rb +++ b/spec/features/dashboard/merge_requests_spec.rb @@ -44,36 +44,38 @@ feature 'Dashboard Merge Requests' do context 'merge requests exist' do let!(:assigned_merge_request) do - create(:merge_request, assignee: current_user, target_project: project, source_project: project) + create(:merge_request, + assignee: current_user, + source_project: project, + author: create(:user)) end let!(:assigned_merge_request_from_fork) do create(:merge_request, source_branch: 'markdown', assignee: current_user, - target_project: public_project, source_project: forked_project - ) + target_project: public_project, source_project: forked_project, + author: create(:user)) end let!(:authored_merge_request) do create(:merge_request, - source_branch: 'markdown', author: current_user, - target_project: project, source_project: project - ) + source_branch: 'markdown', + source_project: project, + author: current_user) end let!(:authored_merge_request_from_fork) do create(:merge_request, source_branch: 'feature_conflict', author: current_user, - target_project: public_project, source_project: forked_project - ) + target_project: public_project, source_project: forked_project) end let!(:other_merge_request) do create(:merge_request, source_branch: 'fix', - target_project: project, source_project: project - ) + source_project: project, + author: create(:user)) end before do diff --git a/spec/features/groups/issues_spec.rb b/spec/features/groups/issues_spec.rb index cdf7aceb13c..450bc0ff8cf 100644 --- a/spec/features/groups/issues_spec.rb +++ b/spec/features/groups/issues_spec.rb @@ -3,40 +3,61 @@ require 'spec_helper' feature 'Group issues page' do include FilteredSearchHelpers - let(:path) { issues_group_path(group) } - let(:issuable) { create(:issue, project: project, title: "this is my created issuable")} + context 'with shared examples' do + let(:path) { issues_group_path(group) } + let(:issuable) { create(:issue, project: project, title: "this is my created issuable")} - include_examples 'project features apply to issuables', Issue + include_examples 'project features apply to issuables', Issue - context 'rss feed' do - let(:access_level) { ProjectFeature::ENABLED } + context 'rss feed' do + let(:access_level) { ProjectFeature::ENABLED } - context 'when signed in' do - let(:user) { user_in_group } + context 'when signed in' do + let(:user) { user_in_group } - it_behaves_like "it has an RSS button with current_user's RSS token" - it_behaves_like "an autodiscoverable RSS feed with current_user's RSS token" - end + it_behaves_like "it has an RSS button with current_user's RSS token" + it_behaves_like "an autodiscoverable RSS feed with current_user's RSS token" + end - context 'when signed out' do - let(:user) { nil } + context 'when signed out' do + let(:user) { nil } - it_behaves_like "it has an RSS button without an RSS token" - it_behaves_like "an autodiscoverable RSS feed without an RSS token" + it_behaves_like "it has an RSS button without an RSS token" + it_behaves_like "an autodiscoverable RSS feed without an RSS token" + end end - end - context 'assignee', :js do - let(:access_level) { ProjectFeature::ENABLED } - let(:user) { user_in_group } - let(:user2) { user_outside_group } - let(:path) { issues_group_path(group) } + context 'assignee', :js do + let(:access_level) { ProjectFeature::ENABLED } + let(:user) { user_in_group } + let(:user2) { user_outside_group } + let(:path) { issues_group_path(group) } + + it 'filters by only group users' do + filtered_search.set('assignee:') - it 'filters by only group users' do - filtered_search.set('assignee:') + expect(find('#js-dropdown-assignee .filter-dropdown')).to have_content(user.name) + expect(find('#js-dropdown-assignee .filter-dropdown')).not_to have_content(user2.name) + end + end + end - expect(find('#js-dropdown-assignee .filter-dropdown')).to have_content(user.name) - expect(find('#js-dropdown-assignee .filter-dropdown')).not_to have_content(user2.name) + context 'issues list', :nested_groups do + let(:group) { create(:group)} + let(:subgroup) { create(:group, parent: group) } + let(:project) { create(:project, :public, group: group)} + let(:subgroup_project) { create(:project, :public, group: subgroup)} + let!(:issue) { create(:issue, project: project, title: 'root group issue') } + let!(:subgroup_issue) { create(:issue, project: subgroup_project, title: 'subgroup issue') } + + it 'returns all group and subgroup issues' do + visit issues_group_path(group) + + page.within('.issuable-list') do + expect(page).to have_selector('li.issue', count: 2) + expect(page).to have_content('root group issue') + expect(page).to have_content('subgroup issue') + end end end end diff --git a/spec/features/issues/spam_issues_spec.rb b/spec/features/issues/spam_issues_spec.rb index c7cfd01f588..a75ca1d42b3 100644 --- a/spec/features/issues/spam_issues_spec.rb +++ b/spec/features/issues/spam_issues_spec.rb @@ -9,7 +9,7 @@ describe 'New issue', :js do before do stub_env('IN_MEMORY_APPLICATION_SETTINGS', 'false') - current_application_settings.update!( + Gitlab::CurrentSettings.update!( akismet_enabled: true, akismet_api_key: 'testkey', recaptcha_enabled: true, diff --git a/spec/features/markdown_spec.rb b/spec/features/markdown_spec.rb index a2b78a5e021..f13d78d24e3 100644 --- a/spec/features/markdown_spec.rb +++ b/spec/features/markdown_spec.rb @@ -259,6 +259,10 @@ describe 'GitLab Markdown' do it 'includes VideoLinkFilter' do expect(doc).to parse_video_links end + + it 'includes ColorFilter' do + expect(doc).to parse_colors + end end context 'wiki pipeline' do @@ -320,6 +324,10 @@ describe 'GitLab Markdown' do it 'includes VideoLinkFilter' do expect(doc).to parse_video_links end + + it 'includes ColorFilter' do + expect(doc).to parse_colors + end end # Fake a `current_user` helper diff --git a/spec/features/merge_request/user_awards_emoji_spec.rb b/spec/features/merge_request/user_awards_emoji_spec.rb index 15a0878fb16..2f24cfbd9e3 100644 --- a/spec/features/merge_request/user_awards_emoji_spec.rb +++ b/spec/features/merge_request/user_awards_emoji_spec.rb @@ -3,7 +3,7 @@ require 'rails_helper' describe 'Merge request > User awards emoji', :js do let(:project) { create(:project, :public, :repository) } let(:user) { project.creator } - let(:merge_request) { create(:merge_request, source_project: project) } + let(:merge_request) { create(:merge_request, source_project: project, author: create(:user)) } describe 'logged in' do before do diff --git a/spec/features/merge_request/user_resolves_conflicts_spec.rb b/spec/features/merge_request/user_resolves_conflicts_spec.rb index 61861d33952..19995559fae 100644 --- a/spec/features/merge_request/user_resolves_conflicts_spec.rb +++ b/spec/features/merge_request/user_resolves_conflicts_spec.rb @@ -100,12 +100,12 @@ describe 'Merge request > User resolves conflicts', :js do end it 'shows a link to the conflict resolution page' do - expect(page).to have_link('conflicts', href: /\/conflicts\Z/) + expect(page).to have_link('conflicts', href: %r{/conflicts\Z}) end context 'in Inline view mode' do before do - click_link('conflicts', href: /\/conflicts\Z/) + click_link('conflicts', href: %r{/conflicts\Z}) end include_examples "conflicts are resolved in Interactive mode" @@ -114,7 +114,7 @@ describe 'Merge request > User resolves conflicts', :js do context 'in Parallel view mode' do before do - click_link('conflicts', href: /\/conflicts\Z/) + click_link('conflicts', href: %r{/conflicts\Z}) click_button 'Side-by-side' end @@ -128,7 +128,7 @@ describe 'Merge request > User resolves conflicts', :js do before do visit project_merge_request_path(project, merge_request) - click_link('conflicts', href: /\/conflicts\Z/) + click_link('conflicts', href: %r{/conflicts\Z}) end it 'conflicts can not be resolved in Interactive mode' do @@ -181,7 +181,7 @@ describe 'Merge request > User resolves conflicts', :js do end it 'does not show a link to the conflict resolution page' do - expect(page).not_to have_link('conflicts', href: /\/conflicts\Z/) + expect(page).not_to have_link('conflicts', href: %r{/conflicts\Z}) end it 'shows an error if the conflicts page is visited directly' do diff --git a/spec/features/merge_request/user_selects_branches_for_new_mr_spec.rb b/spec/features/merge_request/user_selects_branches_for_new_mr_spec.rb index fb73ab05f87..dbca279569a 100644 --- a/spec/features/merge_request/user_selects_branches_for_new_mr_spec.rb +++ b/spec/features/merge_request/user_selects_branches_for_new_mr_spec.rb @@ -61,7 +61,7 @@ describe 'Merge request > User selects branches for new MR', :js do fill_in "merge_request_title", with: "Orphaned MR test" click_button "Submit merge request" - click_link "Check out branch" + click_button "Check out branch" expect(page).to have_content 'git checkout -b orphaned-branch origin/orphaned-branch' end diff --git a/spec/features/projects/badges/coverage_spec.rb b/spec/features/projects/badges/coverage_spec.rb index 821ce88a402..f51001edcd7 100644 --- a/spec/features/projects/badges/coverage_spec.rb +++ b/spec/features/projects/badges/coverage_spec.rb @@ -18,7 +18,7 @@ feature 'test coverage badge' do show_test_coverage_badge - expect_coverage_badge('95%') + expect_coverage_badge('95.00%') end scenario 'user requests coverage badge for specific job' do @@ -30,7 +30,7 @@ feature 'test coverage badge' do show_test_coverage_badge(job: 'coverage') - expect_coverage_badge('85%') + expect_coverage_badge('85.00%') end scenario 'user requests coverage badge for pipeline without coverage' do diff --git a/spec/features/projects/clusters/applications_spec.rb b/spec/features/projects/clusters/applications_spec.rb index 9c4abec115f..8d1e10b7191 100644 --- a/spec/features/projects/clusters/applications_spec.rb +++ b/spec/features/projects/clusters/applications_spec.rb @@ -64,7 +64,7 @@ feature 'Clusters Applications', :js do expect(page.find(:css, '.js-cluster-application-install-button')).to have_content('Installed') end - expect(page).to have_content('Helm Tiller was successfully installed on your cluster') + expect(page).to have_content('Helm Tiller was successfully installed on your Kubernetes cluster') end end @@ -98,7 +98,7 @@ feature 'Clusters Applications', :js do expect(page.find(:css, '.js-cluster-application-install-button')).to have_content('Installed') end - expect(page).to have_content('Ingress was successfully installed on your cluster') + expect(page).to have_content('Ingress was successfully installed on your Kubernetes cluster') end end end diff --git a/spec/features/projects/clusters/gcp_spec.rb b/spec/features/projects/clusters/gcp_spec.rb index 94bde723e2f..02dbd3380b3 100644 --- a/spec/features/projects/clusters/gcp_spec.rb +++ b/spec/features/projects/clusters/gcp_spec.rb @@ -32,7 +32,7 @@ feature 'Gcp Cluster', :js do before do visit project_clusters_path(project) - click_link 'Add cluster' + click_link 'Add Kubernetes cluster' click_link 'Create on GKE' end @@ -50,19 +50,19 @@ feature 'Gcp Cluster', :js do fill_in 'cluster_provider_gcp_attributes_gcp_project_id', with: 'gcp-project-123' fill_in 'cluster_name', with: 'dev-cluster' - click_button 'Create cluster' + click_button 'Create Kubernetes cluster' end it 'user sees a cluster details page and creation status' do - expect(page).to have_content('Cluster is being created on Google Kubernetes Engine...') + expect(page).to have_content('Kubernetes cluster is being created on Google Kubernetes Engine...') Clusters::Cluster.last.provider.make_created! - expect(page).to have_content('Cluster was successfully created on Google Kubernetes Engine') + expect(page).to have_content('Kubernetes cluster was successfully created on Google Kubernetes Engine') end it 'user sees a error if something worng during creation' do - expect(page).to have_content('Cluster is being created on Google Kubernetes Engine...') + expect(page).to have_content('Kubernetes cluster is being created on Google Kubernetes Engine...') Clusters::Cluster.last.provider.make_errored!('Something wrong!') @@ -72,7 +72,7 @@ feature 'Gcp Cluster', :js do context 'when user filled form with invalid parameters' do before do - click_button 'Create cluster' + click_button 'Create Kubernetes cluster' end it 'user sees a validation error' do @@ -100,7 +100,7 @@ feature 'Gcp Cluster', :js do end it 'user sees the successful message' do - expect(page).to have_content('Cluster was successfully updated.') + expect(page).to have_content('Kubernetes cluster was successfully updated.') end end @@ -111,7 +111,7 @@ feature 'Gcp Cluster', :js do end it 'user sees the successful message' do - expect(page).to have_content('Cluster was successfully updated.') + expect(page).to have_content('Kubernetes cluster was successfully updated.') expect(cluster.reload.platform_kubernetes.namespace).to eq('my-namespace') end end @@ -124,8 +124,8 @@ feature 'Gcp Cluster', :js do end it 'user sees creation form with the successful message' do - expect(page).to have_content('Cluster integration was successfully removed.') - expect(page).to have_link('Add cluster') + expect(page).to have_content('Kubernetes cluster integration was successfully removed.') + expect(page).to have_link('Add Kubernetes cluster') end end end @@ -138,16 +138,16 @@ feature 'Gcp Cluster', :js do visit project_clusters_path(project) - click_link 'Add cluster' + click_link 'Add Kubernetes cluster' click_link 'Create on GKE' fill_in 'cluster_provider_gcp_attributes_gcp_project_id', with: 'gcp-project-123' fill_in 'cluster_name', with: 'dev-cluster' - click_button 'Create cluster' + click_button 'Create Kubernetes cluster' end it 'user sees form with error' do - expect(page).to have_content('Please enable billing for one of your projects to be able to create a cluster, then try again.') + expect(page).to have_content('Please enable billing for one of your projects to be able to create a Kubernetes cluster, then try again.') end end @@ -158,12 +158,12 @@ feature 'Gcp Cluster', :js do visit project_clusters_path(project) - click_link 'Add cluster' + click_link 'Add Kubernetes cluster' click_link 'Create on GKE' fill_in 'cluster_provider_gcp_attributes_gcp_project_id', with: 'gcp-project-123' fill_in 'cluster_name', with: 'dev-cluster' - click_button 'Create cluster' + click_button 'Create Kubernetes cluster' end it 'user sees form with error' do @@ -176,7 +176,7 @@ feature 'Gcp Cluster', :js do before do visit project_clusters_path(project) - click_link 'Add cluster' + click_link 'Add Kubernetes cluster' click_link 'Create on GKE' end diff --git a/spec/features/projects/clusters/user_spec.rb b/spec/features/projects/clusters/user_spec.rb index b9ab434c259..698b64a659c 100644 --- a/spec/features/projects/clusters/user_spec.rb +++ b/spec/features/projects/clusters/user_spec.rb @@ -16,8 +16,8 @@ feature 'User Cluster', :js do before do visit project_clusters_path(project) - click_link 'Add cluster' - click_link 'Add an existing cluster' + click_link 'Add Kubernetes cluster' + click_link 'Add an existing Kubernetes cluster' end context 'when user filled form with valid parameters' do @@ -25,11 +25,11 @@ feature 'User Cluster', :js do fill_in 'cluster_name', with: 'dev-cluster' fill_in 'cluster_platform_kubernetes_attributes_api_url', with: 'http://example.com' fill_in 'cluster_platform_kubernetes_attributes_token', with: 'my-token' - click_button 'Add cluster' + click_button 'Add Kubernetes cluster' end it 'user sees a cluster details page' do - expect(page).to have_content('Cluster integration') + expect(page).to have_content('Kubernetes cluster integration') expect(page.find_field('cluster[name]').value).to eq('dev-cluster') expect(page.find_field('cluster[platform_kubernetes_attributes][api_url]').value) .to have_content('http://example.com') @@ -40,7 +40,7 @@ feature 'User Cluster', :js do context 'when user filled form with invalid parameters' do before do - click_button 'Add cluster' + click_button 'Add Kubernetes cluster' end it 'user sees a validation error' do @@ -68,7 +68,7 @@ feature 'User Cluster', :js do end it 'user sees the successful message' do - expect(page).to have_content('Cluster was successfully updated.') + expect(page).to have_content('Kubernetes cluster was successfully updated.') end end @@ -80,7 +80,7 @@ feature 'User Cluster', :js do end it 'user sees the successful message' do - expect(page).to have_content('Cluster was successfully updated.') + expect(page).to have_content('Kubernetes cluster was successfully updated.') expect(cluster.reload.name).to eq('my-dev-cluster') expect(cluster.reload.platform_kubernetes.namespace).to eq('my-namespace') end @@ -94,8 +94,8 @@ feature 'User Cluster', :js do end it 'user sees creation form with the successful message' do - expect(page).to have_content('Cluster integration was successfully removed.') - expect(page).to have_link('Add cluster') + expect(page).to have_content('Kubernetes cluster integration was successfully removed.') + expect(page).to have_link('Add Kubernetes cluster') end end end diff --git a/spec/features/projects/clusters_spec.rb b/spec/features/projects/clusters_spec.rb index 497a50bebe4..bd9f7745cf8 100644 --- a/spec/features/projects/clusters_spec.rb +++ b/spec/features/projects/clusters_spec.rb @@ -17,7 +17,7 @@ feature 'Clusters', :js do end it 'sees empty state' do - expect(page).to have_link('Add cluster') + expect(page).to have_link('Add Kubernetes cluster') expect(page).to have_selector('.empty-state') end end @@ -82,7 +82,7 @@ feature 'Clusters', :js do before do visit project_clusters_path(project) - click_link 'Add cluster' + click_link 'Add Kubernetes cluster' click_link 'Create on GKE' end diff --git a/spec/features/projects/import_export/namespace_export_file_spec.rb b/spec/features/projects/import_export/namespace_export_file_spec.rb index e76bc6f1220..c1fccf4a40b 100644 --- a/spec/features/projects/import_export/namespace_export_file_spec.rb +++ b/spec/features/projects/import_export/namespace_export_file_spec.rb @@ -1,44 +1,37 @@ require 'spec_helper' feature 'Import/Export - Namespace export file cleanup', :js do - let(:export_path) { "#{Dir.tmpdir}/import_file_spec" } - let(:config_hash) { YAML.load_file(Gitlab::ImportExport.config_file).deep_stringify_keys } + let(:export_path) { Dir.mktmpdir('namespace_export_file_spec') } - let(:project) { create(:project) } - - background do - allow_any_instance_of(Gitlab::ImportExport).to receive(:storage_path).and_return(export_path) + before do + allow(Gitlab::ImportExport).to receive(:storage_path).and_return(export_path) end after do FileUtils.rm_rf(export_path, secure: true) end - context 'admin user' do + shared_examples_for 'handling project exports on namespace change' do + let!(:old_export_path) { project.export_path } + before do sign_in(create(:admin)) + + setup_export_project end context 'moving the namespace' do - scenario 'removes the export file' do - setup_export_project - - old_export_path = project.export_path.dup - + it 'removes the export file' do expect(File).to exist(old_export_path) - project.namespace.update(path: 'new_path') + project.namespace.update!(path: build(:namespace).path) expect(File).not_to exist(old_export_path) end end context 'deleting the namespace' do - scenario 'removes the export file' do - setup_export_project - - old_export_path = project.export_path.dup - + it 'removes the export file' do expect(File).to exist(old_export_path) project.namespace.destroy @@ -46,17 +39,29 @@ feature 'Import/Export - Namespace export file cleanup', :js do expect(File).not_to exist(old_export_path) end end + end - def setup_export_project - visit edit_project_path(project) + describe 'legacy storage' do + let(:project) { create(:project) } - expect(page).to have_content('Export project') + it_behaves_like 'handling project exports on namespace change' + end + + describe 'hashed storage' do + let(:project) { create(:project, :hashed) } - find(:link, 'Export project').send_keys(:return) + it_behaves_like 'handling project exports on namespace change' + end - visit edit_project_path(project) + def setup_export_project + visit edit_project_path(project) - expect(page).to have_content('Download export') - end + expect(page).to have_content('Export project') + + find(:link, 'Export project').send_keys(:return) + + visit edit_project_path(project) + + expect(page).to have_content('Download export') end end diff --git a/spec/features/projects/jobs_spec.rb b/spec/features/projects/jobs_spec.rb index e661db1809a..5d311f2dde3 100644 --- a/spec/features/projects/jobs_spec.rb +++ b/spec/features/projects/jobs_spec.rb @@ -7,7 +7,7 @@ feature 'Jobs' do let(:project) { create(:project, :repository) } let(:pipeline) { create(:ci_pipeline, project: project) } - let(:job) { create(:ci_build, :trace, pipeline: pipeline) } + let(:job) { create(:ci_build, :trace_live, pipeline: pipeline) } let(:job2) { create(:ci_build) } let(:artifacts_file) do @@ -490,18 +490,34 @@ feature 'Jobs' do describe 'GET /:project/jobs/:id/raw', :js do context 'access source' do context 'job from project' do - before do - job.run! - end + context 'when job is running' do + before do + job.run! + end - it 'sends the right headers' do - requests = inspect_requests(inject_headers: { 'X-Sendfile-Type' => 'X-Sendfile' }) do - visit raw_project_job_path(project, job) + it 'sends the right headers' do + requests = inspect_requests(inject_headers: { 'X-Sendfile-Type' => 'X-Sendfile' }) do + visit raw_project_job_path(project, job) + end + + expect(requests.first.status_code).to eq(200) + expect(requests.first.response_headers['Content-Type']).to eq('text/plain; charset=utf-8') + expect(requests.first.response_headers['X-Sendfile']).to eq(job.trace.send(:current_path)) end + end - expect(requests.first.status_code).to eq(200) - expect(requests.first.response_headers['Content-Type']).to eq('text/plain; charset=utf-8') - expect(requests.first.response_headers['X-Sendfile']).to eq(job.trace.send(:current_path)) + context 'when job is complete' do + let(:job) { create(:ci_build, :success, :trace_artifact, pipeline: pipeline) } + + it 'sends the right headers' do + requests = inspect_requests(inject_headers: { 'X-Sendfile-Type' => 'X-Sendfile' }) do + visit raw_project_job_path(project, job) + end + + expect(requests.first.status_code).to eq(200) + expect(requests.first.response_headers['Content-Type']).to eq('text/plain; charset=utf-8') + expect(requests.first.response_headers['X-Sendfile']).to eq(job.job_artifacts_trace.file.path) + end end end diff --git a/spec/features/projects/labels/update_prioritization_spec.rb b/spec/features/projects/labels/update_prioritization_spec.rb index 85bd776932b..ae8b1364ec7 100644 --- a/spec/features/projects/labels/update_prioritization_spec.rb +++ b/spec/features/projects/labels/update_prioritization_spec.rb @@ -99,7 +99,7 @@ feature 'Prioritize labels' do expect(page).to have_content 'wontfix' # Sort labels - drag_to(selector: '.js-prioritized-labels', from_index: 1, to_index: 2) + drag_to(selector: '.label-list-item', from_index: 1, to_index: 2) page.within('.prioritized-labels') do expect(first('li')).to have_content('feature') diff --git a/spec/features/projects/pipeline_schedules_spec.rb b/spec/features/projects/pipeline_schedules_spec.rb index fa2f7a1fd78..65e24862d43 100644 --- a/spec/features/projects/pipeline_schedules_spec.rb +++ b/spec/features/projects/pipeline_schedules_spec.rb @@ -168,11 +168,11 @@ feature 'Pipeline Schedules', :js do scenario 'user sees the new variable in edit window' do find(".content-list .pipeline-schedule-table-row:nth-child(1) .btn-group a[title='Edit']").click - page.within('.pipeline-variable-list') do - expect(find(".pipeline-variable-row:nth-child(1) .pipeline-variable-key-input").value).to eq('AAA') - expect(find(".pipeline-variable-row:nth-child(1) .pipeline-variable-value-input").value).to eq('AAA123') - expect(find(".pipeline-variable-row:nth-child(2) .pipeline-variable-key-input").value).to eq('BBB') - expect(find(".pipeline-variable-row:nth-child(2) .pipeline-variable-value-input").value).to eq('BBB123') + page.within('.ci-variable-list') do + expect(find(".ci-variable-row:nth-child(1) .js-ci-variable-input-key").value).to eq('AAA') + expect(find(".ci-variable-row:nth-child(1) .js-ci-variable-input-value", visible: false).value).to eq('AAA123') + expect(find(".ci-variable-row:nth-child(2) .js-ci-variable-input-key").value).to eq('BBB') + expect(find(".ci-variable-row:nth-child(2) .js-ci-variable-input-value", visible: false).value).to eq('BBB123') end end end @@ -185,16 +185,18 @@ feature 'Pipeline Schedules', :js do visit_pipelines_schedules find(".content-list .pipeline-schedule-table-row:nth-child(1) .btn-group a[title='Edit']").click - all('[name="schedule[variables_attributes][][key]"]')[0].set('foo') - all('[name="schedule[variables_attributes][][value]"]')[0].set('bar') + + find('.js-ci-variable-list-section .js-secret-value-reveal-button').click + first('.js-ci-variable-input-key').set('foo') + first('.js-ci-variable-input-value').set('bar') click_button 'Save pipeline schedule' end scenario 'user sees the updated variable in edit window' do find(".content-list .pipeline-schedule-table-row:nth-child(1) .btn-group a[title='Edit']").click - page.within('.pipeline-variable-list') do - expect(find(".pipeline-variable-row:nth-child(1) .pipeline-variable-key-input").value).to eq('foo') - expect(find(".pipeline-variable-row:nth-child(1) .pipeline-variable-value-input").value).to eq('bar') + page.within('.ci-variable-list') do + expect(find(".ci-variable-row:nth-child(1) .js-ci-variable-input-key").value).to eq('foo') + expect(find(".ci-variable-row:nth-child(1) .js-ci-variable-input-value", visible: false).value).to eq('bar') end end end @@ -207,15 +209,15 @@ feature 'Pipeline Schedules', :js do visit_pipelines_schedules find(".content-list .pipeline-schedule-table-row:nth-child(1) .btn-group a[title='Edit']").click - find('.pipeline-variable-list .pipeline-variable-row-remove-button').click + find('.ci-variable-list .ci-variable-row-remove-button').click click_button 'Save pipeline schedule' end scenario 'user does not see the removed variable in edit window' do find(".content-list .pipeline-schedule-table-row:nth-child(1) .btn-group a[title='Edit']").click - page.within('.pipeline-variable-list') do - expect(find(".pipeline-variable-row:nth-child(1) .pipeline-variable-key-input").value).to eq('') - expect(find(".pipeline-variable-row:nth-child(1) .pipeline-variable-value-input").value).to eq('') + page.within('.ci-variable-list') do + expect(find(".ci-variable-row:nth-child(1) .js-ci-variable-input-key").value).to eq('') + expect(find(".ci-variable-row:nth-child(1) .js-ci-variable-input-value", visible: false).value).to eq('') end end end diff --git a/spec/features/projects/wiki/user_updates_wiki_page_spec.rb b/spec/features/projects/wiki/user_updates_wiki_page_spec.rb index 949d90a50ff..4d2a08afecc 100644 --- a/spec/features/projects/wiki/user_updates_wiki_page_spec.rb +++ b/spec/features/projects/wiki/user_updates_wiki_page_spec.rb @@ -1,6 +1,7 @@ require 'spec_helper' -describe 'User updates wiki page' do +# Remove skip_gitaly_mock flag when gitaly_update_page implements moving pages +describe 'User updates wiki page', :skip_gitaly_mock do let(:user) { create(:user) } before do @@ -143,6 +144,7 @@ describe 'User updates wiki page' do expect(page).to have_field('wiki[message]', with: 'Update home') fill_in(:wiki_content, with: 'My awesome wiki!') + click_button('Save changes') expect(page).to have_content('Home') @@ -151,4 +153,74 @@ describe 'User updates wiki page' do end end end + + context 'when the page is in a subdir' do + let!(:project) { create(:project, namespace: user.namespace) } + let(:project_wiki) { create(:project_wiki, project: project, user: project.creator) } + let(:page_name) { 'page_name' } + let(:page_dir) { "foo/bar/#{page_name}" } + let!(:wiki_page) { create(:wiki_page, wiki: project_wiki, attrs: { title: page_dir, content: 'Home page' }) } + + before do + visit(project_wiki_edit_path(project, wiki_page)) + end + + it 'moves the page to the root folder' do + fill_in(:wiki_title, with: "/#{page_name}") + + click_button('Save changes') + + expect(current_path).to eq(project_wiki_path(project, page_name)) + end + + it 'moves the page to other dir' do + new_page_dir = "foo1/bar1/#{page_name}" + + fill_in(:wiki_title, with: new_page_dir) + + click_button('Save changes') + + expect(current_path).to eq(project_wiki_path(project, new_page_dir)) + end + + it 'remains in the same place if title has not changed' do + original_path = project_wiki_path(project, wiki_page) + + fill_in(:wiki_title, with: page_name) + + click_button('Save changes') + + expect(current_path).to eq(original_path) + end + + it 'can be moved to a different dir with a different name' do + new_page_dir = "foo1/bar1/new_page_name" + + fill_in(:wiki_title, with: new_page_dir) + + click_button('Save changes') + + expect(current_path).to eq(project_wiki_path(project, new_page_dir)) + end + + it 'can be renamed and moved to the root folder' do + new_name = 'new_page_name' + + fill_in(:wiki_title, with: "/#{new_name}") + + click_button('Save changes') + + expect(current_path).to eq(project_wiki_path(project, new_name)) + end + + it 'squishes the title before creating the page' do + new_page_dir = " foo1 / bar1 / #{page_name} " + + fill_in(:wiki_title, with: new_page_dir) + + click_button('Save changes') + + expect(current_path).to eq(project_wiki_path(project, "foo1/bar1/#{page_name}")) + end + end end diff --git a/spec/features/projects/wiki/user_views_wiki_page_spec.rb b/spec/features/projects/wiki/user_views_wiki_page_spec.rb index ff325aeadd3..e37436838fd 100644 --- a/spec/features/projects/wiki/user_views_wiki_page_spec.rb +++ b/spec/features/projects/wiki/user_views_wiki_page_spec.rb @@ -1,6 +1,7 @@ require 'spec_helper' -describe 'User views a wiki page' do +# Remove skip_gitaly_mock flag when gitaly_update_page implements moving pages +describe 'User views a wiki page', :skip_gitaly_mock do let(:user) { create(:user) } let(:project) { create(:project, namespace: user.namespace) } let(:wiki_page) do diff --git a/spec/finders/group_projects_finder_spec.rb b/spec/finders/group_projects_finder_spec.rb index 27a09d7c6f5..be80ee7d767 100644 --- a/spec/finders/group_projects_finder_spec.rb +++ b/spec/finders/group_projects_finder_spec.rb @@ -2,6 +2,7 @@ require 'spec_helper' describe GroupProjectsFinder do let(:group) { create(:group) } + let(:subgroup) { create(:group, parent: group) } let(:current_user) { create(:user) } let(:options) { {} } @@ -12,6 +13,8 @@ describe GroupProjectsFinder do let!(:shared_project_1) { create(:project, :public, path: '3') } let!(:shared_project_2) { create(:project, :private, path: '4') } let!(:shared_project_3) { create(:project, :internal, path: '5') } + let!(:subgroup_project) { create(:project, :public, path: '6', group: subgroup) } + let!(:subgroup_private_project) { create(:project, :private, path: '7', group: subgroup) } before do shared_project_1.project_group_links.create(group_access: Gitlab::Access::MASTER, group: group) @@ -35,11 +38,31 @@ describe GroupProjectsFinder do context "only owned" do let(:options) { { only_owned: true } } - it { is_expected.to match_array([private_project, public_project]) } + context 'with subgroups projects', :nested_groups do + before do + options[:include_subgroups] = true + end + + it { is_expected.to match_array([private_project, public_project, subgroup_project, subgroup_private_project]) } + end + + context 'without subgroups projects' do + it { is_expected.to match_array([private_project, public_project]) } + end end context "all" do - it { is_expected.to match_array([shared_project_3, shared_project_2, shared_project_1, private_project, public_project]) } + context 'with subgroups projects', :nested_groups do + before do + options[:include_subgroups] = true + end + + it { is_expected.to match_array([shared_project_3, shared_project_2, shared_project_1, private_project, public_project, subgroup_project, subgroup_private_project]) } + end + + context 'without subgroups projects' do + it { is_expected.to match_array([shared_project_3, shared_project_2, shared_project_1, private_project, public_project]) } + end end end @@ -71,9 +94,20 @@ describe GroupProjectsFinder do context "without external user" do before do private_project.add_master(current_user) + subgroup_private_project.add_master(current_user) end - it { is_expected.to match_array([private_project, public_project]) } + context 'with subgroups projects', :nested_groups do + before do + options[:include_subgroups] = true + end + + it { is_expected.to match_array([private_project, public_project, subgroup_project, subgroup_private_project]) } + end + + context 'without subgroups projects' do + it { is_expected.to match_array([private_project, public_project]) } + end end context "with external user" do @@ -81,12 +115,32 @@ describe GroupProjectsFinder do current_user.update_attributes(external: true) end - it { is_expected.to eq([public_project]) } + context 'with subgroups projects', :nested_groups do + before do + options[:include_subgroups] = true + end + + it { is_expected.to match_array([public_project, subgroup_project]) } + end + + context 'without subgroups projects' do + it { is_expected.to eq([public_project]) } + end end end context "all" do - it { is_expected.to match_array([shared_project_3, shared_project_2, shared_project_1, public_project]) } + context 'with subgroups projects', :nested_groups do + before do + options[:include_subgroups] = true + end + + it { is_expected.to match_array([shared_project_3, shared_project_2, shared_project_1, public_project, subgroup_project]) } + end + + context 'without subgroups projects' do + it { is_expected.to match_array([shared_project_3, shared_project_2, shared_project_1, public_project]) } + end end end @@ -100,7 +154,17 @@ describe GroupProjectsFinder do context "only owned" do let(:options) { { only_owned: true } } - it { is_expected.to eq([public_project]) } + context 'with subgroups projects', :nested_groups do + before do + options[:include_subgroups] = true + end + + it { is_expected.to match_array([public_project, subgroup_project]) } + end + + context 'without subgroups projects' do + it { is_expected.to eq([public_project]) } + end end end end diff --git a/spec/finders/issues_finder_spec.rb b/spec/finders/issues_finder_spec.rb index 47fd98234f9..abb7631d7d7 100644 --- a/spec/finders/issues_finder_spec.rb +++ b/spec/finders/issues_finder_spec.rb @@ -3,13 +3,17 @@ require 'spec_helper' describe IssuesFinder do set(:user) { create(:user) } set(:user2) { create(:user) } - set(:project1) { create(:project) } + set(:group) { create(:group) } + set(:subgroup) { create(:group, parent: group) } + set(:project1) { create(:project, group: group) } set(:project2) { create(:project) } + set(:project3) { create(:project, group: subgroup) } set(:milestone) { create(:milestone, project: project1) } set(:label) { create(:label, project: project2) } set(:issue1) { create(:issue, author: user, assignees: [user], project: project1, milestone: milestone, title: 'gitlab', created_at: 1.week.ago) } set(:issue2) { create(:issue, author: user, assignees: [user], project: project2, description: 'gitlab') } set(:issue3) { create(:issue, author: user2, assignees: [user2], project: project2, title: 'tanuki', description: 'tanuki', created_at: 1.week.from_now) } + set(:issue4) { create(:issue, project: project3) } set(:award_emoji1) { create(:award_emoji, name: 'thumbsup', user: user, awardable: issue1) } set(:award_emoji2) { create(:award_emoji, name: 'thumbsup', user: user2, awardable: issue2) } set(:award_emoji3) { create(:award_emoji, name: 'thumbsdown', user: user, awardable: issue3) } @@ -25,10 +29,12 @@ describe IssuesFinder do project1.add_master(user) project2.add_developer(user) project2.add_developer(user2) + project3.add_developer(user) issue1 issue2 issue3 + issue4 award_emoji1 award_emoji2 @@ -39,7 +45,7 @@ describe IssuesFinder do let(:scope) { 'all' } it 'returns all issues' do - expect(issues).to contain_exactly(issue1, issue2, issue3) + expect(issues).to contain_exactly(issue1, issue2, issue3, issue4) end context 'filtering by assignee ID' do @@ -50,6 +56,26 @@ describe IssuesFinder do end end + context 'filtering by group_id' do + let(:params) { { group_id: group.id } } + + context 'when include_subgroup param not set' do + it 'returns all group issues' do + expect(issues).to contain_exactly(issue1) + end + end + + context 'when include_subgroup param is true', :nested_groups do + before do + params[:include_subgroups] = true + end + + it 'returns all group and subgroup issues' do + expect(issues).to contain_exactly(issue1, issue4) + end + end + end + context 'filtering by author ID' do let(:params) { { author_id: user2.id } } @@ -87,7 +113,7 @@ describe IssuesFinder do let(:params) { { milestone_title: Milestone::None.title } } it 'returns issues with no milestone' do - expect(issues).to contain_exactly(issue2, issue3) + expect(issues).to contain_exactly(issue2, issue3, issue4) end end @@ -185,7 +211,7 @@ describe IssuesFinder do let(:params) { { label_name: Label::None.title } } it 'returns issues with no labels' do - expect(issues).to contain_exactly(issue1, issue3) + expect(issues).to contain_exactly(issue1, issue3, issue4) end end @@ -210,7 +236,7 @@ describe IssuesFinder do let(:params) { { state: 'opened' } } it 'returns only opened issues' do - expect(issues).to contain_exactly(issue1, issue2, issue3) + expect(issues).to contain_exactly(issue1, issue2, issue3, issue4) end end @@ -226,7 +252,7 @@ describe IssuesFinder do let(:params) { { state: 'all' } } it 'returns all issues' do - expect(issues).to contain_exactly(issue1, issue2, issue3, closed_issue) + expect(issues).to contain_exactly(issue1, issue2, issue3, closed_issue, issue4) end end @@ -234,7 +260,7 @@ describe IssuesFinder do let(:params) { { state: 'invalid_state' } } it 'returns all issues' do - expect(issues).to contain_exactly(issue1, issue2, issue3, closed_issue) + expect(issues).to contain_exactly(issue1, issue2, issue3, closed_issue, issue4) end end end @@ -338,7 +364,7 @@ describe IssuesFinder do end it "doesn't return issues if feature disabled" do - [project1, project2].each do |project| + [project1, project2, project3].each do |project| project.project_feature.update!(issues_access_level: ProjectFeature::DISABLED) end @@ -351,7 +377,7 @@ describe IssuesFinder do it 'returns the number of rows for the default state' do finder = described_class.new(user) - expect(finder.row_count).to eq(3) + expect(finder.row_count).to eq(4) end it 'returns the number of rows for a given state' do diff --git a/spec/finders/merge_requests_finder_spec.rb b/spec/finders/merge_requests_finder_spec.rb index 687ffaec7cc..9385c892c9e 100644 --- a/spec/finders/merge_requests_finder_spec.rb +++ b/spec/finders/merge_requests_finder_spec.rb @@ -6,31 +6,36 @@ describe MergeRequestsFinder do let(:user) { create :user } let(:user2) { create :user } - let(:project1) { create(:project, :public) } + let(:group) { create(:group) } + let(:subgroup) { create(:group, parent: group) } + let(:project1) { create(:project, :public, group: group) } let(:project2) { fork_project(project1, user) } let(:project3) do p = fork_project(project1, user) p.update!(archived: true) p end + let(:project4) { create(:project, :public, group: subgroup) } let!(:merge_request1) { create(:merge_request, :simple, author: user, source_project: project2, target_project: project1) } let!(:merge_request2) { create(:merge_request, :simple, author: user, source_project: project2, target_project: project1, state: 'closed') } let!(:merge_request3) { create(:merge_request, :simple, author: user, source_project: project2, target_project: project2) } let!(:merge_request4) { create(:merge_request, :simple, author: user, source_project: project3, target_project: project3) } + let!(:merge_request5) { create(:merge_request, :simple, author: user, source_project: project4, target_project: project4) } before do project1.add_master(user) project2.add_developer(user) project3.add_developer(user) project2.add_developer(user2) + project4.add_developer(user) end describe "#execute" do it 'filters by scope' do params = { scope: 'authored', state: 'opened' } merge_requests = described_class.new(user, params).execute - expect(merge_requests.size).to eq(3) + expect(merge_requests.size).to eq(4) end it 'filters by project' do @@ -39,10 +44,26 @@ describe MergeRequestsFinder do expect(merge_requests.size).to eq(1) end + it 'filters by group' do + params = { group_id: group.id } + + merge_requests = described_class.new(user, params).execute + + expect(merge_requests.size).to eq(2) + end + + it 'filters by group including subgroups', :nested_groups do + params = { group_id: group.id, include_subgroups: true } + + merge_requests = described_class.new(user, params).execute + + expect(merge_requests.size).to eq(3) + end + it 'filters by non_archived' do params = { non_archived: true } merge_requests = described_class.new(user, params).execute - expect(merge_requests.size).to eq(3) + expect(merge_requests.size).to eq(4) end it 'filters by iid' do @@ -73,14 +94,14 @@ describe MergeRequestsFinder do end context 'with created_after and created_before params' do - let(:project4) { create(:project, forked_from_project: project1) } + let(:new_project) { create(:project, forked_from_project: project1) } let!(:new_merge_request) do create(:merge_request, :simple, author: user, created_at: 1.week.from_now, - source_project: project4, + source_project: new_project, target_project: project1) end @@ -89,12 +110,12 @@ describe MergeRequestsFinder do :simple, author: user, created_at: 1.week.ago, - source_project: project4, - target_project: project4) + source_project: new_project, + target_project: new_project) end before do - project4.add_master(user) + new_project.add_master(user) end it 'filters by created_after' do @@ -106,7 +127,7 @@ describe MergeRequestsFinder do end it 'filters by created_before' do - params = { project_id: project4.id, created_before: old_merge_request.created_at + 1.second } + params = { project_id: new_project.id, created_before: old_merge_request.created_at + 1.second } merge_requests = described_class.new(user, params).execute @@ -119,7 +140,7 @@ describe MergeRequestsFinder do it 'returns the number of rows for the default state' do finder = described_class.new(user) - expect(finder.row_count).to eq(3) + expect(finder.row_count).to eq(4) end it 'returns the number of rows for a given state' do diff --git a/spec/fixtures/api/schemas/public_api/v4/user/basic.json b/spec/fixtures/api/schemas/public_api/v4/user/basic.json index bf330d8278c..2d815be32a6 100644 --- a/spec/fixtures/api/schemas/public_api/v4/user/basic.json +++ b/spec/fixtures/api/schemas/public_api/v4/user/basic.json @@ -2,12 +2,16 @@ "type": ["object", "null"], "required": [ "id", + "name", + "username", "state", "avatar_url", "web_url" ], "properties": { "id": { "type": "integer" }, + "name": { "type": "string" }, + "username": { "type": "string" }, "state": { "type": "string" }, "avatar_url": { "type": "string" }, "web_url": { "type": "string" } diff --git a/spec/fixtures/markdown.md.erb b/spec/fixtures/markdown.md.erb index 71abb6da607..da32a46675f 100644 --- a/spec/fixtures/markdown.md.erb +++ b/spec/fixtures/markdown.md.erb @@ -280,6 +280,18 @@ However the wrapping tags cannot be mixed as such:  +### Colors + +`#F00` +`#F00A` +`#FF0000` +`#FF0000AA` +`RGB(0,255,0)` +`RGB(0%,100%,0%)` +`RGBA(0,255,0,0.7)` +`HSL(540,70%,50%)` +`HSLA(540,70%,50%,0.7)` + ### Mermaid > If this is not rendered correctly, see diff --git a/spec/fixtures/trace/sample_trace b/spec/fixtures/trace/sample_trace new file mode 100644 index 00000000000..55fcb9d2756 --- /dev/null +++ b/spec/fixtures/trace/sample_trace @@ -0,0 +1,1185 @@ +[0KRunning with gitlab-runner 10.4.0 (857480b6) + on docker-auto-scale-com (9a6801bd) +[0;m[0KUsing Docker executor with image dev.gitlab.org:5005/gitlab/gitlab-build-images:ruby-2.3.6-golang-1.9-git-2.14-chrome-63.0-node-8.x-yarn-1.2-postgresql-9.6 ... +[0;m[0KStarting service postgres:9.2 ... +[0;m[0KPulling docker image postgres:9.2 ... +[0;m[0KUsing docker image postgres:9.2 ID=sha256:18cdbca56093c841d28e629eb8acd4224afe0aa4c57c839351fc181888b8a470 for postgres service... +[0;m[0KStarting service redis:alpine ... +[0;m[0KPulling docker image redis:alpine ... +[0;m[0KUsing docker image redis:alpine ID=sha256:cb1ec54b370d4a91dff57d00f91fd880dc710160a58440adaa133e0f84ae999d for redis service... +[0;m[0KWaiting for services to be up and running... +[0;m[0KUsing docker image sha256:3006a02a5a6f0a116358a13bbc46ee46fb2471175efd5b7f9b1c22345ec2a8e9 for predefined container... +[0;m[0KPulling docker image dev.gitlab.org:5005/gitlab/gitlab-build-images:ruby-2.3.6-golang-1.9-git-2.14-chrome-63.0-node-8.x-yarn-1.2-postgresql-9.6 ... +[0;m[0KUsing docker image dev.gitlab.org:5005/gitlab/gitlab-build-images:ruby-2.3.6-golang-1.9-git-2.14-chrome-63.0-node-8.x-yarn-1.2-postgresql-9.6 ID=sha256:1f59be408f12738509ffe4177d65e9de6391f32461de83d9d45f58517b30af99 for build container... +[0;msection_start:1517486886:prepare_script +[0KRunning on runner-9a6801bd-project-13083-concurrent-0 via runner-9a6801bd-gsrm-1517484168-a8449153... +section_end:1517486887:prepare_script +[0Ksection_start:1517486887:get_sources +[0K[32;1mFetching changes for 42624-gitaly-bundle-isolation-not-working-in-ci with git depth set to 20...[0;m +Removing .gitlab_shell_secret +Removing .gitlab_workhorse_secret +Removing .yarn-cache/ +Removing config/database.yml +Removing config/gitlab.yml +Removing config/redis.cache.yml +Removing config/redis.queues.yml +Removing config/redis.shared_state.yml +Removing config/resque.yml +Removing config/secrets.yml +Removing coverage/ +Removing knapsack/ +Removing log/api_json.log +Removing log/application.log +Removing log/gitaly-test.log +Removing log/githost.log +Removing log/grpc.log +Removing log/test_json.log +Removing node_modules/ +Removing public/assets/ +Removing rspec_flaky/ +Removing shared/tmp/ +Removing tmp/tests/ +Removing vendor/ruby/ +HEAD is now at 4cea24f Converted todos.js to axios +From https://gitlab.com/gitlab-org/gitlab-ce + * [new branch] 42624-gitaly-bundle-isolation-not-working-in-ci -> origin/42624-gitaly-bundle-isolation-not-working-in-ci +[32;1mChecking out f42a5e24 as 42624-gitaly-bundle-isolation-not-working-in-ci...[0;m +[32;1mSkipping Git submodules setup[0;m +section_end:1517486896:get_sources +[0Ksection_start:1517486896:restore_cache +[0K[32;1mChecking cache for ruby-2.3.6-with-yarn...[0;m +Downloading cache.zip from http://runners-cache-5-internal.gitlab.com:444/runner/project/13083/ruby-2.3.6-with-yarn[0;m +[32;1mSuccessfully extracted cache[0;m +section_end:1517486919:restore_cache +[0Ksection_start:1517486919:download_artifacts +[0K[32;1mDownloading artifacts for retrieve-tests-metadata (50551658)...[0;m +Downloading artifacts from coordinator... ok [0;m id[0;m=50551658 responseStatus[0;m=200 OK token[0;m=HhF7y_1X +[32;1mDownloading artifacts for compile-assets (50551659)...[0;m +Downloading artifacts from coordinator... ok [0;m id[0;m=50551659 responseStatus[0;m=200 OK token[0;m=wTz6JrCP +[32;1mDownloading artifacts for setup-test-env (50551660)...[0;m +Downloading artifacts from coordinator... ok [0;m id[0;m=50551660 responseStatus[0;m=200 OK token[0;m=DTGgeVF5 +[0;33mWARNING: tmp/tests/gitlab-shell/.gitlab_shell_secret: chmod tmp/tests/gitlab-shell/.gitlab_shell_secret: no such file or directory (suppressing repeats)[0;m +section_end:1517486934:download_artifacts +[0Ksection_start:1517486934:build_script +[0K[32;1m$ bundle --version[0;m +Bundler version 1.16.1 +[32;1m$ source scripts/utils.sh[0;m +[32;1m$ source scripts/prepare_build.sh[0;m +The Gemfile's dependencies are satisfied +Successfully installed knapsack-1.15.0 +1 gem installed +NOTICE: database "gitlabhq_test" does not exist, skipping +DROP DATABASE +CREATE DATABASE +CREATE ROLE +GRANT +-- enable_extension("plpgsql") + -> 0.0156s +-- enable_extension("pg_trgm") + -> 0.0156s +-- create_table("abuse_reports", {:force=>:cascade}) + -> 0.0119s +-- create_table("appearances", {:force=>:cascade}) + -> 0.0065s +-- create_table("application_settings", {:force=>:cascade}) + -> 0.0382s +-- create_table("audit_events", {:force=>:cascade}) + -> 0.0056s +-- add_index("audit_events", ["entity_id", "entity_type"], {:name=>"index_audit_events_on_entity_id_and_entity_type", :using=>:btree}) + -> 0.0040s +-- create_table("award_emoji", {:force=>:cascade}) + -> 0.0058s +-- add_index("award_emoji", ["awardable_type", "awardable_id"], {:name=>"index_award_emoji_on_awardable_type_and_awardable_id", :using=>:btree}) + -> 0.0068s +-- add_index("award_emoji", ["user_id", "name"], {:name=>"index_award_emoji_on_user_id_and_name", :using=>:btree}) + -> 0.0043s +-- create_table("boards", {:force=>:cascade}) + -> 0.0049s +-- add_index("boards", ["project_id"], {:name=>"index_boards_on_project_id", :using=>:btree}) + -> 0.0056s +-- create_table("broadcast_messages", {:force=>:cascade}) + -> 0.0056s +-- add_index("broadcast_messages", ["starts_at", "ends_at", "id"], {:name=>"index_broadcast_messages_on_starts_at_and_ends_at_and_id", :using=>:btree}) + -> 0.0041s +-- create_table("chat_names", {:force=>:cascade}) + -> 0.0056s +-- add_index("chat_names", ["service_id", "team_id", "chat_id"], {:name=>"index_chat_names_on_service_id_and_team_id_and_chat_id", :unique=>true, :using=>:btree}) + -> 0.0039s +-- add_index("chat_names", ["user_id", "service_id"], {:name=>"index_chat_names_on_user_id_and_service_id", :unique=>true, :using=>:btree}) + -> 0.0036s +-- create_table("chat_teams", {:force=>:cascade}) + -> 0.0068s +-- add_index("chat_teams", ["namespace_id"], {:name=>"index_chat_teams_on_namespace_id", :unique=>true, :using=>:btree}) + -> 0.0098s +-- create_table("ci_build_trace_section_names", {:force=>:cascade}) + -> 0.0048s +-- add_index("ci_build_trace_section_names", ["project_id", "name"], {:name=>"index_ci_build_trace_section_names_on_project_id_and_name", :unique=>true, :using=>:btree}) + -> 0.0035s +-- create_table("ci_build_trace_sections", {:force=>:cascade}) + -> 0.0040s +-- add_index("ci_build_trace_sections", ["build_id", "section_name_id"], {:name=>"index_ci_build_trace_sections_on_build_id_and_section_name_id", :unique=>true, :using=>:btree}) + -> 0.0035s +-- add_index("ci_build_trace_sections", ["project_id"], {:name=>"index_ci_build_trace_sections_on_project_id", :using=>:btree}) + -> 0.0033s +-- create_table("ci_builds", {:force=>:cascade}) + -> 0.0062s +-- add_index("ci_builds", ["auto_canceled_by_id"], {:name=>"index_ci_builds_on_auto_canceled_by_id", :using=>:btree}) + -> 0.0035s +-- add_index("ci_builds", ["commit_id", "stage_idx", "created_at"], {:name=>"index_ci_builds_on_commit_id_and_stage_idx_and_created_at", :using=>:btree}) + -> 0.0032s +-- add_index("ci_builds", ["commit_id", "status", "type"], {:name=>"index_ci_builds_on_commit_id_and_status_and_type", :using=>:btree}) + -> 0.0032s +-- add_index("ci_builds", ["commit_id", "type", "name", "ref"], {:name=>"index_ci_builds_on_commit_id_and_type_and_name_and_ref", :using=>:btree}) + -> 0.0035s +-- add_index("ci_builds", ["commit_id", "type", "ref"], {:name=>"index_ci_builds_on_commit_id_and_type_and_ref", :using=>:btree}) + -> 0.0042s +-- add_index("ci_builds", ["project_id", "id"], {:name=>"index_ci_builds_on_project_id_and_id", :using=>:btree}) + -> 0.0031s +-- add_index("ci_builds", ["protected"], {:name=>"index_ci_builds_on_protected", :using=>:btree}) + -> 0.0031s +-- add_index("ci_builds", ["runner_id"], {:name=>"index_ci_builds_on_runner_id", :using=>:btree}) + -> 0.0033s +-- add_index("ci_builds", ["stage_id"], {:name=>"index_ci_builds_on_stage_id", :using=>:btree}) + -> 0.0035s +-- add_index("ci_builds", ["status", "type", "runner_id"], {:name=>"index_ci_builds_on_status_and_type_and_runner_id", :using=>:btree}) + -> 0.0031s +-- add_index("ci_builds", ["status"], {:name=>"index_ci_builds_on_status", :using=>:btree}) + -> 0.0032s +-- add_index("ci_builds", ["token"], {:name=>"index_ci_builds_on_token", :unique=>true, :using=>:btree}) + -> 0.0028s +-- add_index("ci_builds", ["updated_at"], {:name=>"index_ci_builds_on_updated_at", :using=>:btree}) + -> 0.0047s +-- add_index("ci_builds", ["user_id"], {:name=>"index_ci_builds_on_user_id", :using=>:btree}) + -> 0.0029s +-- create_table("ci_group_variables", {:force=>:cascade}) + -> 0.0055s +-- add_index("ci_group_variables", ["group_id", "key"], {:name=>"index_ci_group_variables_on_group_id_and_key", :unique=>true, :using=>:btree}) + -> 0.0028s +-- create_table("ci_job_artifacts", {:force=>:cascade}) + -> 0.0048s +-- add_index("ci_job_artifacts", ["job_id", "file_type"], {:name=>"index_ci_job_artifacts_on_job_id_and_file_type", :unique=>true, :using=>:btree}) + -> 0.0027s +-- add_index("ci_job_artifacts", ["project_id"], {:name=>"index_ci_job_artifacts_on_project_id", :using=>:btree}) + -> 0.0028s +-- create_table("ci_pipeline_schedule_variables", {:force=>:cascade}) + -> 0.0044s +-- add_index("ci_pipeline_schedule_variables", ["pipeline_schedule_id", "key"], {:name=>"index_ci_pipeline_schedule_variables_on_schedule_id_and_key", :unique=>true, :using=>:btree}) + -> 0.0032s +-- create_table("ci_pipeline_schedules", {:force=>:cascade}) + -> 0.0047s +-- add_index("ci_pipeline_schedules", ["next_run_at", "active"], {:name=>"index_ci_pipeline_schedules_on_next_run_at_and_active", :using=>:btree}) + -> 0.0029s +-- add_index("ci_pipeline_schedules", ["project_id"], {:name=>"index_ci_pipeline_schedules_on_project_id", :using=>:btree}) + -> 0.0028s +-- create_table("ci_pipeline_variables", {:force=>:cascade}) + -> 0.0045s +-- add_index("ci_pipeline_variables", ["pipeline_id", "key"], {:name=>"index_ci_pipeline_variables_on_pipeline_id_and_key", :unique=>true, :using=>:btree}) + -> 0.0030s +-- create_table("ci_pipelines", {:force=>:cascade}) + -> 0.0057s +-- add_index("ci_pipelines", ["auto_canceled_by_id"], {:name=>"index_ci_pipelines_on_auto_canceled_by_id", :using=>:btree}) + -> 0.0030s +-- add_index("ci_pipelines", ["pipeline_schedule_id"], {:name=>"index_ci_pipelines_on_pipeline_schedule_id", :using=>:btree}) + -> 0.0031s +-- add_index("ci_pipelines", ["project_id", "ref", "status", "id"], {:name=>"index_ci_pipelines_on_project_id_and_ref_and_status_and_id", :using=>:btree}) + -> 0.0032s +-- add_index("ci_pipelines", ["project_id", "sha"], {:name=>"index_ci_pipelines_on_project_id_and_sha", :using=>:btree}) + -> 0.0032s +-- add_index("ci_pipelines", ["project_id"], {:name=>"index_ci_pipelines_on_project_id", :using=>:btree}) + -> 0.0035s +-- add_index("ci_pipelines", ["status"], {:name=>"index_ci_pipelines_on_status", :using=>:btree}) + -> 0.0032s +-- add_index("ci_pipelines", ["user_id"], {:name=>"index_ci_pipelines_on_user_id", :using=>:btree}) + -> 0.0029s +-- create_table("ci_runner_projects", {:force=>:cascade}) + -> 0.0035s +-- add_index("ci_runner_projects", ["project_id"], {:name=>"index_ci_runner_projects_on_project_id", :using=>:btree}) + -> 0.0029s +-- add_index("ci_runner_projects", ["runner_id"], {:name=>"index_ci_runner_projects_on_runner_id", :using=>:btree}) + -> 0.0028s +-- create_table("ci_runners", {:force=>:cascade}) + -> 0.0059s +-- add_index("ci_runners", ["contacted_at"], {:name=>"index_ci_runners_on_contacted_at", :using=>:btree}) + -> 0.0030s +-- add_index("ci_runners", ["is_shared"], {:name=>"index_ci_runners_on_is_shared", :using=>:btree}) + -> 0.0030s +-- add_index("ci_runners", ["locked"], {:name=>"index_ci_runners_on_locked", :using=>:btree}) + -> 0.0030s +-- add_index("ci_runners", ["token"], {:name=>"index_ci_runners_on_token", :using=>:btree}) + -> 0.0029s +-- create_table("ci_stages", {:force=>:cascade}) + -> 0.0046s +-- add_index("ci_stages", ["pipeline_id", "name"], {:name=>"index_ci_stages_on_pipeline_id_and_name", :using=>:btree}) + -> 0.0031s +-- add_index("ci_stages", ["pipeline_id"], {:name=>"index_ci_stages_on_pipeline_id", :using=>:btree}) + -> 0.0030s +-- add_index("ci_stages", ["project_id"], {:name=>"index_ci_stages_on_project_id", :using=>:btree}) + -> 0.0028s +-- create_table("ci_trigger_requests", {:force=>:cascade}) + -> 0.0058s +-- add_index("ci_trigger_requests", ["commit_id"], {:name=>"index_ci_trigger_requests_on_commit_id", :using=>:btree}) + -> 0.0031s +-- create_table("ci_triggers", {:force=>:cascade}) + -> 0.0043s +-- add_index("ci_triggers", ["project_id"], {:name=>"index_ci_triggers_on_project_id", :using=>:btree}) + -> 0.0033s +-- create_table("ci_variables", {:force=>:cascade}) + -> 0.0059s +-- add_index("ci_variables", ["project_id", "key", "environment_scope"], {:name=>"index_ci_variables_on_project_id_and_key_and_environment_scope", :unique=>true, :using=>:btree}) + -> 0.0031s +-- create_table("cluster_platforms_kubernetes", {:force=>:cascade}) + -> 0.0053s +-- add_index("cluster_platforms_kubernetes", ["cluster_id"], {:name=>"index_cluster_platforms_kubernetes_on_cluster_id", :unique=>true, :using=>:btree}) + -> 0.0028s +-- create_table("cluster_projects", {:force=>:cascade}) + -> 0.0032s +-- add_index("cluster_projects", ["cluster_id"], {:name=>"index_cluster_projects_on_cluster_id", :using=>:btree}) + -> 0.0035s +-- add_index("cluster_projects", ["project_id"], {:name=>"index_cluster_projects_on_project_id", :using=>:btree}) + -> 0.0030s +-- create_table("cluster_providers_gcp", {:force=>:cascade}) + -> 0.0051s +-- add_index("cluster_providers_gcp", ["cluster_id"], {:name=>"index_cluster_providers_gcp_on_cluster_id", :unique=>true, :using=>:btree}) + -> 0.0034s +-- create_table("clusters", {:force=>:cascade}) + -> 0.0052s +-- add_index("clusters", ["enabled"], {:name=>"index_clusters_on_enabled", :using=>:btree}) + -> 0.0031s +-- add_index("clusters", ["user_id"], {:name=>"index_clusters_on_user_id", :using=>:btree}) + -> 0.0028s +-- create_table("clusters_applications_helm", {:force=>:cascade}) + -> 0.0045s +-- create_table("clusters_applications_ingress", {:force=>:cascade}) + -> 0.0044s +-- create_table("clusters_applications_prometheus", {:force=>:cascade}) + -> 0.0047s +-- create_table("container_repositories", {:force=>:cascade}) + -> 0.0050s +-- add_index("container_repositories", ["project_id", "name"], {:name=>"index_container_repositories_on_project_id_and_name", :unique=>true, :using=>:btree}) + -> 0.0032s +-- add_index("container_repositories", ["project_id"], {:name=>"index_container_repositories_on_project_id", :using=>:btree}) + -> 0.0032s +-- create_table("conversational_development_index_metrics", {:force=>:cascade}) + -> 0.0076s +-- create_table("deploy_keys_projects", {:force=>:cascade}) + -> 0.0037s +-- add_index("deploy_keys_projects", ["project_id"], {:name=>"index_deploy_keys_projects_on_project_id", :using=>:btree}) + -> 0.0032s +-- create_table("deployments", {:force=>:cascade}) + -> 0.0049s +-- add_index("deployments", ["created_at"], {:name=>"index_deployments_on_created_at", :using=>:btree}) + -> 0.0034s +-- add_index("deployments", ["environment_id", "id"], {:name=>"index_deployments_on_environment_id_and_id", :using=>:btree}) + -> 0.0028s +-- add_index("deployments", ["environment_id", "iid", "project_id"], {:name=>"index_deployments_on_environment_id_and_iid_and_project_id", :using=>:btree}) + -> 0.0029s +-- add_index("deployments", ["project_id", "iid"], {:name=>"index_deployments_on_project_id_and_iid", :unique=>true, :using=>:btree}) + -> 0.0032s +-- create_table("emails", {:force=>:cascade}) + -> 0.0046s +-- add_index("emails", ["confirmation_token"], {:name=>"index_emails_on_confirmation_token", :unique=>true, :using=>:btree}) + -> 0.0030s +-- add_index("emails", ["email"], {:name=>"index_emails_on_email", :unique=>true, :using=>:btree}) + -> 0.0035s +-- add_index("emails", ["user_id"], {:name=>"index_emails_on_user_id", :using=>:btree}) + -> 0.0028s +-- create_table("environments", {:force=>:cascade}) + -> 0.0052s +-- add_index("environments", ["project_id", "name"], {:name=>"index_environments_on_project_id_and_name", :unique=>true, :using=>:btree}) + -> 0.0031s +-- add_index("environments", ["project_id", "slug"], {:name=>"index_environments_on_project_id_and_slug", :unique=>true, :using=>:btree}) + -> 0.0028s +-- create_table("events", {:force=>:cascade}) + -> 0.0046s +-- add_index("events", ["action"], {:name=>"index_events_on_action", :using=>:btree}) + -> 0.0032s +-- add_index("events", ["author_id"], {:name=>"index_events_on_author_id", :using=>:btree}) + -> 0.0027s +-- add_index("events", ["project_id", "id"], {:name=>"index_events_on_project_id_and_id", :using=>:btree}) + -> 0.0027s +-- add_index("events", ["target_type", "target_id"], {:name=>"index_events_on_target_type_and_target_id", :using=>:btree}) + -> 0.0027s +-- create_table("feature_gates", {:force=>:cascade}) + -> 0.0046s +-- add_index("feature_gates", ["feature_key", "key", "value"], {:name=>"index_feature_gates_on_feature_key_and_key_and_value", :unique=>true, :using=>:btree}) + -> 0.0031s +-- create_table("features", {:force=>:cascade}) + -> 0.0041s +-- add_index("features", ["key"], {:name=>"index_features_on_key", :unique=>true, :using=>:btree}) + -> 0.0030s +-- create_table("fork_network_members", {:force=>:cascade}) + -> 0.0033s +-- add_index("fork_network_members", ["fork_network_id"], {:name=>"index_fork_network_members_on_fork_network_id", :using=>:btree}) + -> 0.0033s +-- add_index("fork_network_members", ["project_id"], {:name=>"index_fork_network_members_on_project_id", :unique=>true, :using=>:btree}) + -> 0.0029s +-- create_table("fork_networks", {:force=>:cascade}) + -> 0.0049s +-- add_index("fork_networks", ["root_project_id"], {:name=>"index_fork_networks_on_root_project_id", :unique=>true, :using=>:btree}) + -> 0.0029s +-- create_table("forked_project_links", {:force=>:cascade}) + -> 0.0032s +-- add_index("forked_project_links", ["forked_to_project_id"], {:name=>"index_forked_project_links_on_forked_to_project_id", :unique=>true, :using=>:btree}) + -> 0.0030s +-- create_table("gcp_clusters", {:force=>:cascade}) + -> 0.0074s +-- add_index("gcp_clusters", ["project_id"], {:name=>"index_gcp_clusters_on_project_id", :unique=>true, :using=>:btree}) + -> 0.0030s +-- create_table("gpg_key_subkeys", {:force=>:cascade}) + -> 0.0042s +-- add_index("gpg_key_subkeys", ["fingerprint"], {:name=>"index_gpg_key_subkeys_on_fingerprint", :unique=>true, :using=>:btree}) + -> 0.0029s +-- add_index("gpg_key_subkeys", ["gpg_key_id"], {:name=>"index_gpg_key_subkeys_on_gpg_key_id", :using=>:btree}) + -> 0.0032s +-- add_index("gpg_key_subkeys", ["keyid"], {:name=>"index_gpg_key_subkeys_on_keyid", :unique=>true, :using=>:btree}) + -> 0.0027s +-- create_table("gpg_keys", {:force=>:cascade}) + -> 0.0042s +-- add_index("gpg_keys", ["fingerprint"], {:name=>"index_gpg_keys_on_fingerprint", :unique=>true, :using=>:btree}) + -> 0.0032s +-- add_index("gpg_keys", ["primary_keyid"], {:name=>"index_gpg_keys_on_primary_keyid", :unique=>true, :using=>:btree}) + -> 0.0026s +-- add_index("gpg_keys", ["user_id"], {:name=>"index_gpg_keys_on_user_id", :using=>:btree}) + -> 0.0028s +-- create_table("gpg_signatures", {:force=>:cascade}) + -> 0.0054s +-- add_index("gpg_signatures", ["commit_sha"], {:name=>"index_gpg_signatures_on_commit_sha", :unique=>true, :using=>:btree}) + -> 0.0029s +-- add_index("gpg_signatures", ["gpg_key_id"], {:name=>"index_gpg_signatures_on_gpg_key_id", :using=>:btree}) + -> 0.0026s +-- add_index("gpg_signatures", ["gpg_key_primary_keyid"], {:name=>"index_gpg_signatures_on_gpg_key_primary_keyid", :using=>:btree}) + -> 0.0029s +-- add_index("gpg_signatures", ["gpg_key_subkey_id"], {:name=>"index_gpg_signatures_on_gpg_key_subkey_id", :using=>:btree}) + -> 0.0032s +-- add_index("gpg_signatures", ["project_id"], {:name=>"index_gpg_signatures_on_project_id", :using=>:btree}) + -> 0.0028s +-- create_table("group_custom_attributes", {:force=>:cascade}) + -> 0.0044s +-- add_index("group_custom_attributes", ["group_id", "key"], {:name=>"index_group_custom_attributes_on_group_id_and_key", :unique=>true, :using=>:btree}) + -> 0.0032s +-- add_index("group_custom_attributes", ["key", "value"], {:name=>"index_group_custom_attributes_on_key_and_value", :using=>:btree}) + -> 0.0028s +-- create_table("identities", {:force=>:cascade}) + -> 0.0043s +-- add_index("identities", ["user_id"], {:name=>"index_identities_on_user_id", :using=>:btree}) + -> 0.0034s +-- create_table("issue_assignees", {:id=>false, :force=>:cascade}) + -> 0.0013s +-- add_index("issue_assignees", ["issue_id", "user_id"], {:name=>"index_issue_assignees_on_issue_id_and_user_id", :unique=>true, :using=>:btree}) + -> 0.0028s +-- add_index("issue_assignees", ["user_id"], {:name=>"index_issue_assignees_on_user_id", :using=>:btree}) + -> 0.0029s +-- create_table("issue_metrics", {:force=>:cascade}) + -> 0.0032s +-- add_index("issue_metrics", ["issue_id"], {:name=>"index_issue_metrics", :using=>:btree}) + -> 0.0029s +-- create_table("issues", {:force=>:cascade}) + -> 0.0051s +-- add_index("issues", ["author_id"], {:name=>"index_issues_on_author_id", :using=>:btree}) + -> 0.0028s +-- add_index("issues", ["confidential"], {:name=>"index_issues_on_confidential", :using=>:btree}) + -> 0.0029s +-- add_index("issues", ["description"], {:name=>"index_issues_on_description_trigram", :using=>:gin, :opclasses=>{"description"=>"gin_trgm_ops"}}) + -> 0.0022s +-- add_index("issues", ["milestone_id"], {:name=>"index_issues_on_milestone_id", :using=>:btree}) + -> 0.0027s +-- add_index("issues", ["moved_to_id"], {:name=>"index_issues_on_moved_to_id", :where=>"(moved_to_id IS NOT NULL)", :using=>:btree}) + -> 0.0030s +-- add_index("issues", ["project_id", "created_at", "id", "state"], {:name=>"index_issues_on_project_id_and_created_at_and_id_and_state", :using=>:btree}) + -> 0.0039s +-- add_index("issues", ["project_id", "due_date", "id", "state"], {:name=>"idx_issues_on_project_id_and_due_date_and_id_and_state_partial", :where=>"(due_date IS NOT NULL)", :using=>:btree}) + -> 0.0031s +-- add_index("issues", ["project_id", "iid"], {:name=>"index_issues_on_project_id_and_iid", :unique=>true, :using=>:btree}) + -> 0.0032s +-- add_index("issues", ["project_id", "updated_at", "id", "state"], {:name=>"index_issues_on_project_id_and_updated_at_and_id_and_state", :using=>:btree}) + -> 0.0035s +-- add_index("issues", ["relative_position"], {:name=>"index_issues_on_relative_position", :using=>:btree}) + -> 0.0030s +-- add_index("issues", ["state"], {:name=>"index_issues_on_state", :using=>:btree}) + -> 0.0027s +-- add_index("issues", ["title"], {:name=>"index_issues_on_title_trigram", :using=>:gin, :opclasses=>{"title"=>"gin_trgm_ops"}}) + -> 0.0021s +-- add_index("issues", ["updated_at"], {:name=>"index_issues_on_updated_at", :using=>:btree}) + -> 0.0030s +-- add_index("issues", ["updated_by_id"], {:name=>"index_issues_on_updated_by_id", :where=>"(updated_by_id IS NOT NULL)", :using=>:btree}) + -> 0.0028s +-- create_table("keys", {:force=>:cascade}) + -> 0.0048s +-- add_index("keys", ["fingerprint"], {:name=>"index_keys_on_fingerprint", :unique=>true, :using=>:btree}) + -> 0.0028s +-- add_index("keys", ["user_id"], {:name=>"index_keys_on_user_id", :using=>:btree}) + -> 0.0029s +-- create_table("label_links", {:force=>:cascade}) + -> 0.0041s +-- add_index("label_links", ["label_id"], {:name=>"index_label_links_on_label_id", :using=>:btree}) + -> 0.0027s +-- add_index("label_links", ["target_id", "target_type"], {:name=>"index_label_links_on_target_id_and_target_type", :using=>:btree}) + -> 0.0028s +-- create_table("label_priorities", {:force=>:cascade}) + -> 0.0031s +-- add_index("label_priorities", ["priority"], {:name=>"index_label_priorities_on_priority", :using=>:btree}) + -> 0.0028s +-- add_index("label_priorities", ["project_id", "label_id"], {:name=>"index_label_priorities_on_project_id_and_label_id", :unique=>true, :using=>:btree}) + -> 0.0027s +-- create_table("labels", {:force=>:cascade}) + -> 0.0046s +-- add_index("labels", ["group_id", "project_id", "title"], {:name=>"index_labels_on_group_id_and_project_id_and_title", :unique=>true, :using=>:btree}) + -> 0.0028s +-- add_index("labels", ["project_id"], {:name=>"index_labels_on_project_id", :using=>:btree}) + -> 0.0032s +-- add_index("labels", ["template"], {:name=>"index_labels_on_template", :where=>"template", :using=>:btree}) + -> 0.0027s +-- add_index("labels", ["title"], {:name=>"index_labels_on_title", :using=>:btree}) + -> 0.0030s +-- add_index("labels", ["type", "project_id"], {:name=>"index_labels_on_type_and_project_id", :using=>:btree}) + -> 0.0028s +-- create_table("lfs_objects", {:force=>:cascade}) + -> 0.0040s +-- add_index("lfs_objects", ["oid"], {:name=>"index_lfs_objects_on_oid", :unique=>true, :using=>:btree}) + -> 0.0032s +-- create_table("lfs_objects_projects", {:force=>:cascade}) + -> 0.0035s +-- add_index("lfs_objects_projects", ["project_id"], {:name=>"index_lfs_objects_projects_on_project_id", :using=>:btree}) + -> 0.0025s +-- create_table("lists", {:force=>:cascade}) + -> 0.0033s +-- add_index("lists", ["board_id", "label_id"], {:name=>"index_lists_on_board_id_and_label_id", :unique=>true, :using=>:btree}) + -> 0.0026s +-- add_index("lists", ["label_id"], {:name=>"index_lists_on_label_id", :using=>:btree}) + -> 0.0026s +-- create_table("members", {:force=>:cascade}) + -> 0.0046s +-- add_index("members", ["access_level"], {:name=>"index_members_on_access_level", :using=>:btree}) + -> 0.0028s +-- add_index("members", ["invite_token"], {:name=>"index_members_on_invite_token", :unique=>true, :using=>:btree}) + -> 0.0027s +-- add_index("members", ["requested_at"], {:name=>"index_members_on_requested_at", :using=>:btree}) + -> 0.0025s +-- add_index("members", ["source_id", "source_type"], {:name=>"index_members_on_source_id_and_source_type", :using=>:btree}) + -> 0.0027s +-- add_index("members", ["user_id"], {:name=>"index_members_on_user_id", :using=>:btree}) + -> 0.0026s +-- create_table("merge_request_diff_commits", {:id=>false, :force=>:cascade}) + -> 0.0027s +-- add_index("merge_request_diff_commits", ["merge_request_diff_id", "relative_order"], {:name=>"index_merge_request_diff_commits_on_mr_diff_id_and_order", :unique=>true, :using=>:btree}) + -> 0.0032s +-- add_index("merge_request_diff_commits", ["sha"], {:name=>"index_merge_request_diff_commits_on_sha", :using=>:btree}) + -> 0.0029s +-- create_table("merge_request_diff_files", {:id=>false, :force=>:cascade}) + -> 0.0027s +-- add_index("merge_request_diff_files", ["merge_request_diff_id", "relative_order"], {:name=>"index_merge_request_diff_files_on_mr_diff_id_and_order", :unique=>true, :using=>:btree}) + -> 0.0027s +-- create_table("merge_request_diffs", {:force=>:cascade}) + -> 0.0042s +-- add_index("merge_request_diffs", ["merge_request_id", "id"], {:name=>"index_merge_request_diffs_on_merge_request_id_and_id", :using=>:btree}) + -> 0.0030s +-- create_table("merge_request_metrics", {:force=>:cascade}) + -> 0.0034s +-- add_index("merge_request_metrics", ["first_deployed_to_production_at"], {:name=>"index_merge_request_metrics_on_first_deployed_to_production_at", :using=>:btree}) + -> 0.0028s +-- add_index("merge_request_metrics", ["merge_request_id"], {:name=>"index_merge_request_metrics", :using=>:btree}) + -> 0.0025s +-- add_index("merge_request_metrics", ["pipeline_id"], {:name=>"index_merge_request_metrics_on_pipeline_id", :using=>:btree}) + -> 0.0026s +-- create_table("merge_requests", {:force=>:cascade}) + -> 0.0066s +-- add_index("merge_requests", ["assignee_id"], {:name=>"index_merge_requests_on_assignee_id", :using=>:btree}) + -> 0.0029s +-- add_index("merge_requests", ["author_id"], {:name=>"index_merge_requests_on_author_id", :using=>:btree}) + -> 0.0026s +-- add_index("merge_requests", ["created_at"], {:name=>"index_merge_requests_on_created_at", :using=>:btree}) + -> 0.0026s +-- add_index("merge_requests", ["description"], {:name=>"index_merge_requests_on_description_trigram", :using=>:gin, :opclasses=>{"description"=>"gin_trgm_ops"}}) + -> 0.0020s +-- add_index("merge_requests", ["head_pipeline_id"], {:name=>"index_merge_requests_on_head_pipeline_id", :using=>:btree}) + -> 0.0027s +-- add_index("merge_requests", ["latest_merge_request_diff_id"], {:name=>"index_merge_requests_on_latest_merge_request_diff_id", :using=>:btree}) + -> 0.0025s +-- add_index("merge_requests", ["merge_user_id"], {:name=>"index_merge_requests_on_merge_user_id", :where=>"(merge_user_id IS NOT NULL)", :using=>:btree}) + -> 0.0029s +-- add_index("merge_requests", ["milestone_id"], {:name=>"index_merge_requests_on_milestone_id", :using=>:btree}) + -> 0.0030s +-- add_index("merge_requests", ["source_branch"], {:name=>"index_merge_requests_on_source_branch", :using=>:btree}) + -> 0.0026s +-- add_index("merge_requests", ["source_project_id", "source_branch"], {:name=>"index_merge_requests_on_source_project_and_branch_state_opened", :where=>"((state)::text = 'opened'::text)", :using=>:btree}) + -> 0.0029s +-- add_index("merge_requests", ["source_project_id", "source_branch"], {:name=>"index_merge_requests_on_source_project_id_and_source_branch", :using=>:btree}) + -> 0.0031s +-- add_index("merge_requests", ["target_branch"], {:name=>"index_merge_requests_on_target_branch", :using=>:btree}) + -> 0.0028s +-- add_index("merge_requests", ["target_project_id", "iid"], {:name=>"index_merge_requests_on_target_project_id_and_iid", :unique=>true, :using=>:btree}) + -> 0.0027s +-- add_index("merge_requests", ["target_project_id", "merge_commit_sha", "id"], {:name=>"index_merge_requests_on_tp_id_and_merge_commit_sha_and_id", :using=>:btree}) + -> 0.0029s +-- add_index("merge_requests", ["title"], {:name=>"index_merge_requests_on_title", :using=>:btree}) + -> 0.0026s +-- add_index("merge_requests", ["title"], {:name=>"index_merge_requests_on_title_trigram", :using=>:gin, :opclasses=>{"title"=>"gin_trgm_ops"}}) + -> 0.0020s +-- add_index("merge_requests", ["updated_by_id"], {:name=>"index_merge_requests_on_updated_by_id", :where=>"(updated_by_id IS NOT NULL)", :using=>:btree}) + -> 0.0029s +-- create_table("merge_requests_closing_issues", {:force=>:cascade}) + -> 0.0031s +-- add_index("merge_requests_closing_issues", ["issue_id"], {:name=>"index_merge_requests_closing_issues_on_issue_id", :using=>:btree}) + -> 0.0026s +-- add_index("merge_requests_closing_issues", ["merge_request_id"], {:name=>"index_merge_requests_closing_issues_on_merge_request_id", :using=>:btree}) + -> 0.0028s +-- create_table("milestones", {:force=>:cascade}) + -> 0.0044s +-- add_index("milestones", ["description"], {:name=>"index_milestones_on_description_trigram", :using=>:gin, :opclasses=>{"description"=>"gin_trgm_ops"}}) + -> 0.0022s +-- add_index("milestones", ["due_date"], {:name=>"index_milestones_on_due_date", :using=>:btree}) + -> 0.0033s +-- add_index("milestones", ["group_id"], {:name=>"index_milestones_on_group_id", :using=>:btree}) + -> 0.0028s +-- add_index("milestones", ["project_id", "iid"], {:name=>"index_milestones_on_project_id_and_iid", :unique=>true, :using=>:btree}) + -> 0.0028s +-- add_index("milestones", ["title"], {:name=>"index_milestones_on_title", :using=>:btree}) + -> 0.0026s +-- add_index("milestones", ["title"], {:name=>"index_milestones_on_title_trigram", :using=>:gin, :opclasses=>{"title"=>"gin_trgm_ops"}}) + -> 0.0021s +-- create_table("namespaces", {:force=>:cascade}) + -> 0.0068s +-- add_index("namespaces", ["created_at"], {:name=>"index_namespaces_on_created_at", :using=>:btree}) + -> 0.0030s +-- add_index("namespaces", ["name", "parent_id"], {:name=>"index_namespaces_on_name_and_parent_id", :unique=>true, :using=>:btree}) + -> 0.0030s +-- add_index("namespaces", ["name"], {:name=>"index_namespaces_on_name_trigram", :using=>:gin, :opclasses=>{"name"=>"gin_trgm_ops"}}) + -> 0.0020s +-- add_index("namespaces", ["owner_id"], {:name=>"index_namespaces_on_owner_id", :using=>:btree}) + -> 0.0028s +-- add_index("namespaces", ["parent_id", "id"], {:name=>"index_namespaces_on_parent_id_and_id", :unique=>true, :using=>:btree}) + -> 0.0032s +-- add_index("namespaces", ["path"], {:name=>"index_namespaces_on_path", :using=>:btree}) + -> 0.0031s +-- add_index("namespaces", ["path"], {:name=>"index_namespaces_on_path_trigram", :using=>:gin, :opclasses=>{"path"=>"gin_trgm_ops"}}) + -> 0.0019s +-- add_index("namespaces", ["require_two_factor_authentication"], {:name=>"index_namespaces_on_require_two_factor_authentication", :using=>:btree}) + -> 0.0029s +-- add_index("namespaces", ["type"], {:name=>"index_namespaces_on_type", :using=>:btree}) + -> 0.0032s +-- create_table("notes", {:force=>:cascade}) + -> 0.0055s +-- add_index("notes", ["author_id"], {:name=>"index_notes_on_author_id", :using=>:btree}) + -> 0.0029s +-- add_index("notes", ["commit_id"], {:name=>"index_notes_on_commit_id", :using=>:btree}) + -> 0.0028s +-- add_index("notes", ["created_at"], {:name=>"index_notes_on_created_at", :using=>:btree}) + -> 0.0029s +-- add_index("notes", ["discussion_id"], {:name=>"index_notes_on_discussion_id", :using=>:btree}) + -> 0.0029s +-- add_index("notes", ["line_code"], {:name=>"index_notes_on_line_code", :using=>:btree}) + -> 0.0029s +-- add_index("notes", ["note"], {:name=>"index_notes_on_note_trigram", :using=>:gin, :opclasses=>{"note"=>"gin_trgm_ops"}}) + -> 0.0024s +-- add_index("notes", ["noteable_id", "noteable_type"], {:name=>"index_notes_on_noteable_id_and_noteable_type", :using=>:btree}) + -> 0.0029s +-- add_index("notes", ["noteable_type"], {:name=>"index_notes_on_noteable_type", :using=>:btree}) + -> 0.0030s +-- add_index("notes", ["project_id", "noteable_type"], {:name=>"index_notes_on_project_id_and_noteable_type", :using=>:btree}) + -> 0.0027s +-- add_index("notes", ["updated_at"], {:name=>"index_notes_on_updated_at", :using=>:btree}) + -> 0.0026s +-- create_table("notification_settings", {:force=>:cascade}) + -> 0.0053s +-- add_index("notification_settings", ["source_id", "source_type"], {:name=>"index_notification_settings_on_source_id_and_source_type", :using=>:btree}) + -> 0.0028s +-- add_index("notification_settings", ["user_id", "source_id", "source_type"], {:name=>"index_notifications_on_user_id_and_source_id_and_source_type", :unique=>true, :using=>:btree}) + -> 0.0030s +-- add_index("notification_settings", ["user_id"], {:name=>"index_notification_settings_on_user_id", :using=>:btree}) + -> 0.0031s +-- create_table("oauth_access_grants", {:force=>:cascade}) + -> 0.0042s +-- add_index("oauth_access_grants", ["token"], {:name=>"index_oauth_access_grants_on_token", :unique=>true, :using=>:btree}) + -> 0.0031s +-- create_table("oauth_access_tokens", {:force=>:cascade}) + -> 0.0051s +-- add_index("oauth_access_tokens", ["refresh_token"], {:name=>"index_oauth_access_tokens_on_refresh_token", :unique=>true, :using=>:btree}) + -> 0.0030s +-- add_index("oauth_access_tokens", ["resource_owner_id"], {:name=>"index_oauth_access_tokens_on_resource_owner_id", :using=>:btree}) + -> 0.0025s +-- add_index("oauth_access_tokens", ["token"], {:name=>"index_oauth_access_tokens_on_token", :unique=>true, :using=>:btree}) + -> 0.0026s +-- create_table("oauth_applications", {:force=>:cascade}) + -> 0.0049s +-- add_index("oauth_applications", ["owner_id", "owner_type"], {:name=>"index_oauth_applications_on_owner_id_and_owner_type", :using=>:btree}) + -> 0.0030s +-- add_index("oauth_applications", ["uid"], {:name=>"index_oauth_applications_on_uid", :unique=>true, :using=>:btree}) + -> 0.0032s +-- create_table("oauth_openid_requests", {:force=>:cascade}) + -> 0.0048s +-- create_table("pages_domains", {:force=>:cascade}) + -> 0.0052s +-- add_index("pages_domains", ["domain"], {:name=>"index_pages_domains_on_domain", :unique=>true, :using=>:btree}) + -> 0.0027s +-- add_index("pages_domains", ["project_id"], {:name=>"index_pages_domains_on_project_id", :using=>:btree}) + -> 0.0030s +-- create_table("personal_access_tokens", {:force=>:cascade}) + -> 0.0056s +-- add_index("personal_access_tokens", ["token"], {:name=>"index_personal_access_tokens_on_token", :unique=>true, :using=>:btree}) + -> 0.0032s +-- add_index("personal_access_tokens", ["user_id"], {:name=>"index_personal_access_tokens_on_user_id", :using=>:btree}) + -> 0.0028s +-- create_table("project_authorizations", {:id=>false, :force=>:cascade}) + -> 0.0018s +-- add_index("project_authorizations", ["project_id"], {:name=>"index_project_authorizations_on_project_id", :using=>:btree}) + -> 0.0033s +-- add_index("project_authorizations", ["user_id", "project_id", "access_level"], {:name=>"index_project_authorizations_on_user_id_project_id_access_level", :unique=>true, :using=>:btree}) + -> 0.0029s +-- create_table("project_auto_devops", {:force=>:cascade}) + -> 0.0043s +-- add_index("project_auto_devops", ["project_id"], {:name=>"index_project_auto_devops_on_project_id", :unique=>true, :using=>:btree}) + -> 0.0029s +-- create_table("project_custom_attributes", {:force=>:cascade}) + -> 0.0047s +-- add_index("project_custom_attributes", ["key", "value"], {:name=>"index_project_custom_attributes_on_key_and_value", :using=>:btree}) + -> 0.0030s +-- add_index("project_custom_attributes", ["project_id", "key"], {:name=>"index_project_custom_attributes_on_project_id_and_key", :unique=>true, :using=>:btree}) + -> 0.0028s +-- create_table("project_features", {:force=>:cascade}) + -> 0.0038s +-- add_index("project_features", ["project_id"], {:name=>"index_project_features_on_project_id", :using=>:btree}) + -> 0.0029s +-- create_table("project_group_links", {:force=>:cascade}) + -> 0.0036s +-- add_index("project_group_links", ["group_id"], {:name=>"index_project_group_links_on_group_id", :using=>:btree}) + -> 0.0028s +-- add_index("project_group_links", ["project_id"], {:name=>"index_project_group_links_on_project_id", :using=>:btree}) + -> 0.0030s +-- create_table("project_import_data", {:force=>:cascade}) + -> 0.0049s +-- add_index("project_import_data", ["project_id"], {:name=>"index_project_import_data_on_project_id", :using=>:btree}) + -> 0.0027s +-- create_table("project_statistics", {:force=>:cascade}) + -> 0.0046s +-- add_index("project_statistics", ["namespace_id"], {:name=>"index_project_statistics_on_namespace_id", :using=>:btree}) + -> 0.0027s +-- add_index("project_statistics", ["project_id"], {:name=>"index_project_statistics_on_project_id", :unique=>true, :using=>:btree}) + -> 0.0029s +-- create_table("projects", {:force=>:cascade}) + -> 0.0090s +-- add_index("projects", ["ci_id"], {:name=>"index_projects_on_ci_id", :using=>:btree}) + -> 0.0033s +-- add_index("projects", ["created_at"], {:name=>"index_projects_on_created_at", :using=>:btree}) + -> 0.0030s +-- add_index("projects", ["creator_id"], {:name=>"index_projects_on_creator_id", :using=>:btree}) + -> 0.0028s +-- add_index("projects", ["description"], {:name=>"index_projects_on_description_trigram", :using=>:gin, :opclasses=>{"description"=>"gin_trgm_ops"}}) + -> 0.0022s +-- add_index("projects", ["last_activity_at"], {:name=>"index_projects_on_last_activity_at", :using=>:btree}) + -> 0.0032s +-- add_index("projects", ["last_repository_check_failed"], {:name=>"index_projects_on_last_repository_check_failed", :using=>:btree}) + -> 0.0030s +-- add_index("projects", ["last_repository_updated_at"], {:name=>"index_projects_on_last_repository_updated_at", :using=>:btree}) + -> 0.0031s +-- add_index("projects", ["name"], {:name=>"index_projects_on_name_trigram", :using=>:gin, :opclasses=>{"name"=>"gin_trgm_ops"}}) + -> 0.0022s +-- add_index("projects", ["namespace_id"], {:name=>"index_projects_on_namespace_id", :using=>:btree}) + -> 0.0028s +-- add_index("projects", ["path"], {:name=>"index_projects_on_path", :using=>:btree}) + -> 0.0028s +-- add_index("projects", ["path"], {:name=>"index_projects_on_path_trigram", :using=>:gin, :opclasses=>{"path"=>"gin_trgm_ops"}}) + -> 0.0023s +-- add_index("projects", ["pending_delete"], {:name=>"index_projects_on_pending_delete", :using=>:btree}) + -> 0.0029s +-- add_index("projects", ["repository_storage"], {:name=>"index_projects_on_repository_storage", :using=>:btree}) + -> 0.0026s +-- add_index("projects", ["runners_token"], {:name=>"index_projects_on_runners_token", :using=>:btree}) + -> 0.0034s +-- add_index("projects", ["star_count"], {:name=>"index_projects_on_star_count", :using=>:btree}) + -> 0.0028s +-- add_index("projects", ["visibility_level"], {:name=>"index_projects_on_visibility_level", :using=>:btree}) + -> 0.0027s +-- create_table("protected_branch_merge_access_levels", {:force=>:cascade}) + -> 0.0042s +-- add_index("protected_branch_merge_access_levels", ["protected_branch_id"], {:name=>"index_protected_branch_merge_access", :using=>:btree}) + -> 0.0029s +-- create_table("protected_branch_push_access_levels", {:force=>:cascade}) + -> 0.0037s +-- add_index("protected_branch_push_access_levels", ["protected_branch_id"], {:name=>"index_protected_branch_push_access", :using=>:btree}) + -> 0.0030s +-- create_table("protected_branches", {:force=>:cascade}) + -> 0.0048s +-- add_index("protected_branches", ["project_id"], {:name=>"index_protected_branches_on_project_id", :using=>:btree}) + -> 0.0030s +-- create_table("protected_tag_create_access_levels", {:force=>:cascade}) + -> 0.0037s +-- add_index("protected_tag_create_access_levels", ["protected_tag_id"], {:name=>"index_protected_tag_create_access", :using=>:btree}) + -> 0.0029s +-- add_index("protected_tag_create_access_levels", ["user_id"], {:name=>"index_protected_tag_create_access_levels_on_user_id", :using=>:btree}) + -> 0.0029s +-- create_table("protected_tags", {:force=>:cascade}) + -> 0.0051s +-- add_index("protected_tags", ["project_id"], {:name=>"index_protected_tags_on_project_id", :using=>:btree}) + -> 0.0034s +-- create_table("push_event_payloads", {:id=>false, :force=>:cascade}) + -> 0.0030s +-- add_index("push_event_payloads", ["event_id"], {:name=>"index_push_event_payloads_on_event_id", :unique=>true, :using=>:btree}) + -> 0.0029s +-- create_table("redirect_routes", {:force=>:cascade}) + -> 0.0049s +-- add_index("redirect_routes", ["path"], {:name=>"index_redirect_routes_on_path", :unique=>true, :using=>:btree}) + -> 0.0031s +-- add_index("redirect_routes", ["source_type", "source_id"], {:name=>"index_redirect_routes_on_source_type_and_source_id", :using=>:btree}) + -> 0.0034s +-- create_table("releases", {:force=>:cascade}) + -> 0.0043s +-- add_index("releases", ["project_id", "tag"], {:name=>"index_releases_on_project_id_and_tag", :using=>:btree}) + -> 0.0032s +-- add_index("releases", ["project_id"], {:name=>"index_releases_on_project_id", :using=>:btree}) + -> 0.0030s +-- create_table("routes", {:force=>:cascade}) + -> 0.0055s +-- add_index("routes", ["path"], {:name=>"index_routes_on_path", :unique=>true, :using=>:btree}) + -> 0.0028s +-- add_index("routes", ["path"], {:name=>"index_routes_on_path_text_pattern_ops", :using=>:btree, :opclasses=>{"path"=>"varchar_pattern_ops"}}) + -> 0.0026s +-- add_index("routes", ["source_type", "source_id"], {:name=>"index_routes_on_source_type_and_source_id", :unique=>true, :using=>:btree}) + -> 0.0029s +-- create_table("sent_notifications", {:force=>:cascade}) + -> 0.0048s +-- add_index("sent_notifications", ["reply_key"], {:name=>"index_sent_notifications_on_reply_key", :unique=>true, :using=>:btree}) + -> 0.0029s +-- create_table("services", {:force=>:cascade}) + -> 0.0091s +-- add_index("services", ["project_id"], {:name=>"index_services_on_project_id", :using=>:btree}) + -> 0.0028s +-- add_index("services", ["template"], {:name=>"index_services_on_template", :using=>:btree}) + -> 0.0031s +-- create_table("snippets", {:force=>:cascade}) + -> 0.0050s +-- add_index("snippets", ["author_id"], {:name=>"index_snippets_on_author_id", :using=>:btree}) + -> 0.0030s +-- add_index("snippets", ["file_name"], {:name=>"index_snippets_on_file_name_trigram", :using=>:gin, :opclasses=>{"file_name"=>"gin_trgm_ops"}}) + -> 0.0020s +-- add_index("snippets", ["project_id"], {:name=>"index_snippets_on_project_id", :using=>:btree}) + -> 0.0028s +-- add_index("snippets", ["title"], {:name=>"index_snippets_on_title_trigram", :using=>:gin, :opclasses=>{"title"=>"gin_trgm_ops"}}) + -> 0.0020s +-- add_index("snippets", ["updated_at"], {:name=>"index_snippets_on_updated_at", :using=>:btree}) + -> 0.0026s +-- add_index("snippets", ["visibility_level"], {:name=>"index_snippets_on_visibility_level", :using=>:btree}) + -> 0.0026s +-- create_table("spam_logs", {:force=>:cascade}) + -> 0.0048s +-- create_table("subscriptions", {:force=>:cascade}) + -> 0.0041s +-- add_index("subscriptions", ["subscribable_id", "subscribable_type", "user_id", "project_id"], {:name=>"index_subscriptions_on_subscribable_and_user_id_and_project_id", :unique=>true, :using=>:btree}) + -> 0.0030s +-- create_table("system_note_metadata", {:force=>:cascade}) + -> 0.0040s +-- add_index("system_note_metadata", ["note_id"], {:name=>"index_system_note_metadata_on_note_id", :unique=>true, :using=>:btree}) + -> 0.0029s +-- create_table("taggings", {:force=>:cascade}) + -> 0.0047s +-- add_index("taggings", ["tag_id", "taggable_id", "taggable_type", "context", "tagger_id", "tagger_type"], {:name=>"taggings_idx", :unique=>true, :using=>:btree}) + -> 0.0030s +-- add_index("taggings", ["taggable_id", "taggable_type", "context"], {:name=>"index_taggings_on_taggable_id_and_taggable_type_and_context", :using=>:btree}) + -> 0.0025s +-- create_table("tags", {:force=>:cascade}) + -> 0.0044s +-- add_index("tags", ["name"], {:name=>"index_tags_on_name", :unique=>true, :using=>:btree}) + -> 0.0026s +-- create_table("timelogs", {:force=>:cascade}) + -> 0.0033s +-- add_index("timelogs", ["issue_id"], {:name=>"index_timelogs_on_issue_id", :using=>:btree}) + -> 0.0027s +-- add_index("timelogs", ["merge_request_id"], {:name=>"index_timelogs_on_merge_request_id", :using=>:btree}) + -> 0.0033s +-- add_index("timelogs", ["user_id"], {:name=>"index_timelogs_on_user_id", :using=>:btree}) + -> 0.0028s +-- create_table("todos", {:force=>:cascade}) + -> 0.0043s +-- add_index("todos", ["author_id"], {:name=>"index_todos_on_author_id", :using=>:btree}) + -> 0.0027s +-- add_index("todos", ["commit_id"], {:name=>"index_todos_on_commit_id", :using=>:btree}) + -> 0.0028s +-- add_index("todos", ["note_id"], {:name=>"index_todos_on_note_id", :using=>:btree}) + -> 0.0028s +-- add_index("todos", ["project_id"], {:name=>"index_todos_on_project_id", :using=>:btree}) + -> 0.0027s +-- add_index("todos", ["target_type", "target_id"], {:name=>"index_todos_on_target_type_and_target_id", :using=>:btree}) + -> 0.0028s +-- add_index("todos", ["user_id"], {:name=>"index_todos_on_user_id", :using=>:btree}) + -> 0.0026s +-- create_table("trending_projects", {:force=>:cascade}) + -> 0.0030s +-- add_index("trending_projects", ["project_id"], {:name=>"index_trending_projects_on_project_id", :using=>:btree}) + -> 0.0027s +-- create_table("u2f_registrations", {:force=>:cascade}) + -> 0.0048s +-- add_index("u2f_registrations", ["key_handle"], {:name=>"index_u2f_registrations_on_key_handle", :using=>:btree}) + -> 0.0029s +-- add_index("u2f_registrations", ["user_id"], {:name=>"index_u2f_registrations_on_user_id", :using=>:btree}) + -> 0.0028s +-- create_table("uploads", {:force=>:cascade}) + -> 0.0044s +-- add_index("uploads", ["checksum"], {:name=>"index_uploads_on_checksum", :using=>:btree}) + -> 0.0028s +-- add_index("uploads", ["model_id", "model_type"], {:name=>"index_uploads_on_model_id_and_model_type", :using=>:btree}) + -> 0.0027s +-- add_index("uploads", ["path"], {:name=>"index_uploads_on_path", :using=>:btree}) + -> 0.0028s +-- create_table("user_agent_details", {:force=>:cascade}) + -> 0.0051s +-- add_index("user_agent_details", ["subject_id", "subject_type"], {:name=>"index_user_agent_details_on_subject_id_and_subject_type", :using=>:btree}) + -> 0.0028s +-- create_table("user_custom_attributes", {:force=>:cascade}) + -> 0.0044s +-- add_index("user_custom_attributes", ["key", "value"], {:name=>"index_user_custom_attributes_on_key_and_value", :using=>:btree}) + -> 0.0027s +-- add_index("user_custom_attributes", ["user_id", "key"], {:name=>"index_user_custom_attributes_on_user_id_and_key", :unique=>true, :using=>:btree}) + -> 0.0026s +-- create_table("user_synced_attributes_metadata", {:force=>:cascade}) + -> 0.0056s +-- add_index("user_synced_attributes_metadata", ["user_id"], {:name=>"index_user_synced_attributes_metadata_on_user_id", :unique=>true, :using=>:btree}) + -> 0.0027s +-- create_table("users", {:force=>:cascade}) + -> 0.0134s +-- add_index("users", ["admin"], {:name=>"index_users_on_admin", :using=>:btree}) + -> 0.0030s +-- add_index("users", ["confirmation_token"], {:name=>"index_users_on_confirmation_token", :unique=>true, :using=>:btree}) + -> 0.0029s +-- add_index("users", ["created_at"], {:name=>"index_users_on_created_at", :using=>:btree}) + -> 0.0034s +-- add_index("users", ["email"], {:name=>"index_users_on_email", :unique=>true, :using=>:btree}) + -> 0.0030s +-- add_index("users", ["email"], {:name=>"index_users_on_email_trigram", :using=>:gin, :opclasses=>{"email"=>"gin_trgm_ops"}}) + -> 0.0431s +-- add_index("users", ["ghost"], {:name=>"index_users_on_ghost", :using=>:btree}) + -> 0.0051s +-- add_index("users", ["incoming_email_token"], {:name=>"index_users_on_incoming_email_token", :using=>:btree}) + -> 0.0044s +-- add_index("users", ["name"], {:name=>"index_users_on_name", :using=>:btree}) + -> 0.0044s +-- add_index("users", ["name"], {:name=>"index_users_on_name_trigram", :using=>:gin, :opclasses=>{"name"=>"gin_trgm_ops"}}) + -> 0.0034s +-- add_index("users", ["reset_password_token"], {:name=>"index_users_on_reset_password_token", :unique=>true, :using=>:btree}) + -> 0.0044s +-- add_index("users", ["rss_token"], {:name=>"index_users_on_rss_token", :using=>:btree}) + -> 0.0046s +-- add_index("users", ["state"], {:name=>"index_users_on_state", :using=>:btree}) + -> 0.0040s +-- add_index("users", ["username"], {:name=>"index_users_on_username", :using=>:btree}) + -> 0.0046s +-- add_index("users", ["username"], {:name=>"index_users_on_username_trigram", :using=>:gin, :opclasses=>{"username"=>"gin_trgm_ops"}}) + -> 0.0044s +-- create_table("users_star_projects", {:force=>:cascade}) + -> 0.0055s +-- add_index("users_star_projects", ["project_id"], {:name=>"index_users_star_projects_on_project_id", :using=>:btree}) + -> 0.0037s +-- add_index("users_star_projects", ["user_id", "project_id"], {:name=>"index_users_star_projects_on_user_id_and_project_id", :unique=>true, :using=>:btree}) + -> 0.0044s +-- create_table("web_hook_logs", {:force=>:cascade}) + -> 0.0060s +-- add_index("web_hook_logs", ["web_hook_id"], {:name=>"index_web_hook_logs_on_web_hook_id", :using=>:btree}) + -> 0.0034s +-- create_table("web_hooks", {:force=>:cascade}) + -> 0.0120s +-- add_index("web_hooks", ["project_id"], {:name=>"index_web_hooks_on_project_id", :using=>:btree}) + -> 0.0038s +-- add_index("web_hooks", ["type"], {:name=>"index_web_hooks_on_type", :using=>:btree}) + -> 0.0036s +-- add_foreign_key("boards", "projects", {:name=>"fk_f15266b5f9", :on_delete=>:cascade}) + -> 0.0030s +-- add_foreign_key("chat_teams", "namespaces", {:on_delete=>:cascade}) + -> 0.0021s +-- add_foreign_key("ci_build_trace_section_names", "projects", {:on_delete=>:cascade}) + -> 0.0022s +-- add_foreign_key("ci_build_trace_sections", "ci_build_trace_section_names", {:column=>"section_name_id", :name=>"fk_264e112c66", :on_delete=>:cascade}) + -> 0.0018s +-- add_foreign_key("ci_build_trace_sections", "ci_builds", {:column=>"build_id", :name=>"fk_4ebe41f502", :on_delete=>:cascade}) + -> 0.0024s +-- add_foreign_key("ci_build_trace_sections", "projects", {:on_delete=>:cascade}) + -> 0.0019s +-- add_foreign_key("ci_builds", "ci_pipelines", {:column=>"auto_canceled_by_id", :name=>"fk_a2141b1522", :on_delete=>:nullify}) + -> 0.0023s +-- add_foreign_key("ci_builds", "ci_stages", {:column=>"stage_id", :name=>"fk_3a9eaa254d", :on_delete=>:cascade}) + -> 0.0020s +-- add_foreign_key("ci_builds", "projects", {:name=>"fk_befce0568a", :on_delete=>:cascade}) + -> 0.0024s +-- add_foreign_key("ci_group_variables", "namespaces", {:column=>"group_id", :name=>"fk_33ae4d58d8", :on_delete=>:cascade}) + -> 0.0024s +-- add_foreign_key("ci_job_artifacts", "ci_builds", {:column=>"job_id", :on_delete=>:cascade}) + -> 0.0019s +-- add_foreign_key("ci_job_artifacts", "projects", {:on_delete=>:cascade}) + -> 0.0020s +-- add_foreign_key("ci_pipeline_schedule_variables", "ci_pipeline_schedules", {:column=>"pipeline_schedule_id", :name=>"fk_41c35fda51", :on_delete=>:cascade}) + -> 0.0027s +-- add_foreign_key("ci_pipeline_schedules", "projects", {:name=>"fk_8ead60fcc4", :on_delete=>:cascade}) + -> 0.0022s +-- add_foreign_key("ci_pipeline_schedules", "users", {:column=>"owner_id", :name=>"fk_9ea99f58d2", :on_delete=>:nullify}) + -> 0.0025s +-- add_foreign_key("ci_pipeline_variables", "ci_pipelines", {:column=>"pipeline_id", :name=>"fk_f29c5f4380", :on_delete=>:cascade}) + -> 0.0018s +-- add_foreign_key("ci_pipelines", "ci_pipeline_schedules", {:column=>"pipeline_schedule_id", :name=>"fk_3d34ab2e06", :on_delete=>:nullify}) + -> 0.0019s +-- add_foreign_key("ci_pipelines", "ci_pipelines", {:column=>"auto_canceled_by_id", :name=>"fk_262d4c2d19", :on_delete=>:nullify}) + -> 0.0029s +-- add_foreign_key("ci_pipelines", "projects", {:name=>"fk_86635dbd80", :on_delete=>:cascade}) + -> 0.0023s +-- add_foreign_key("ci_runner_projects", "projects", {:name=>"fk_4478a6f1e4", :on_delete=>:cascade}) + -> 0.0036s +-- add_foreign_key("ci_stages", "ci_pipelines", {:column=>"pipeline_id", :name=>"fk_fb57e6cc56", :on_delete=>:cascade}) + -> 0.0017s +-- add_foreign_key("ci_stages", "projects", {:name=>"fk_2360681d1d", :on_delete=>:cascade}) + -> 0.0020s +-- add_foreign_key("ci_trigger_requests", "ci_triggers", {:column=>"trigger_id", :name=>"fk_b8ec8b7245", :on_delete=>:cascade}) + -> 0.0016s +-- add_foreign_key("ci_triggers", "projects", {:name=>"fk_e3e63f966e", :on_delete=>:cascade}) + -> 0.0021s +-- add_foreign_key("ci_triggers", "users", {:column=>"owner_id", :name=>"fk_e8e10d1964", :on_delete=>:cascade}) + -> 0.0019s +-- add_foreign_key("ci_variables", "projects", {:name=>"fk_ada5eb64b3", :on_delete=>:cascade}) + -> 0.0021s +-- add_foreign_key("cluster_platforms_kubernetes", "clusters", {:on_delete=>:cascade}) + -> 0.0019s +-- add_foreign_key("cluster_projects", "clusters", {:on_delete=>:cascade}) + -> 0.0018s +-- add_foreign_key("cluster_projects", "projects", {:on_delete=>:cascade}) + -> 0.0020s +-- add_foreign_key("cluster_providers_gcp", "clusters", {:on_delete=>:cascade}) + -> 0.0017s +-- add_foreign_key("clusters", "users", {:on_delete=>:nullify}) + -> 0.0018s +-- add_foreign_key("clusters_applications_helm", "clusters", {:on_delete=>:cascade}) + -> 0.0019s +-- add_foreign_key("container_repositories", "projects") + -> 0.0020s +-- add_foreign_key("deploy_keys_projects", "projects", {:name=>"fk_58a901ca7e", :on_delete=>:cascade}) + -> 0.0019s +-- add_foreign_key("deployments", "projects", {:name=>"fk_b9a3851b82", :on_delete=>:cascade}) + -> 0.0021s +-- add_foreign_key("environments", "projects", {:name=>"fk_d1c8c1da6a", :on_delete=>:cascade}) + -> 0.0019s +-- add_foreign_key("events", "projects", {:on_delete=>:cascade}) + -> 0.0020s +-- add_foreign_key("events", "users", {:column=>"author_id", :name=>"fk_edfd187b6f", :on_delete=>:cascade}) + -> 0.0020s +-- add_foreign_key("fork_network_members", "fork_networks", {:on_delete=>:cascade}) + -> 0.0016s +-- add_foreign_key("fork_network_members", "projects", {:column=>"forked_from_project_id", :name=>"fk_b01280dae4", :on_delete=>:nullify}) + -> 0.0019s +-- add_foreign_key("fork_network_members", "projects", {:on_delete=>:cascade}) + -> 0.0018s +-- add_foreign_key("fork_networks", "projects", {:column=>"root_project_id", :name=>"fk_e7b436b2b5", :on_delete=>:nullify}) + -> 0.0018s +-- add_foreign_key("forked_project_links", "projects", {:column=>"forked_to_project_id", :name=>"fk_434510edb0", :on_delete=>:cascade}) + -> 0.0018s +-- add_foreign_key("gcp_clusters", "projects", {:on_delete=>:cascade}) + -> 0.0029s +-- add_foreign_key("gcp_clusters", "services", {:on_delete=>:nullify}) + -> 0.0022s +-- add_foreign_key("gcp_clusters", "users", {:on_delete=>:nullify}) + -> 0.0019s +-- add_foreign_key("gpg_key_subkeys", "gpg_keys", {:on_delete=>:cascade}) + -> 0.0017s +-- add_foreign_key("gpg_keys", "users", {:on_delete=>:cascade}) + -> 0.0019s +-- add_foreign_key("gpg_signatures", "gpg_key_subkeys", {:on_delete=>:nullify}) + -> 0.0016s +-- add_foreign_key("gpg_signatures", "gpg_keys", {:on_delete=>:nullify}) + -> 0.0016s +-- add_foreign_key("gpg_signatures", "projects", {:on_delete=>:cascade}) + -> 0.0016s +-- add_foreign_key("group_custom_attributes", "namespaces", {:column=>"group_id", :on_delete=>:cascade}) + -> 0.0014s +-- add_foreign_key("issue_assignees", "issues", {:name=>"fk_b7d881734a", :on_delete=>:cascade}) + -> 0.0019s +-- add_foreign_key("issue_assignees", "users", {:name=>"fk_5e0c8d9154", :on_delete=>:cascade}) + -> 0.0015s +-- add_foreign_key("issue_metrics", "issues", {:on_delete=>:cascade}) + -> 0.0016s +-- add_foreign_key("issues", "issues", {:column=>"moved_to_id", :name=>"fk_a194299be1", :on_delete=>:nullify}) + -> 0.0014s +-- add_foreign_key("issues", "milestones", {:name=>"fk_96b1dd429c", :on_delete=>:nullify}) + -> 0.0016s +-- add_foreign_key("issues", "projects", {:name=>"fk_899c8f3231", :on_delete=>:cascade}) + -> 0.0016s +-- add_foreign_key("issues", "users", {:column=>"author_id", :name=>"fk_05f1e72feb", :on_delete=>:nullify}) + -> 0.0015s +-- add_foreign_key("issues", "users", {:column=>"updated_by_id", :name=>"fk_ffed080f01", :on_delete=>:nullify}) + -> 0.0017s +-- add_foreign_key("label_priorities", "labels", {:on_delete=>:cascade}) + -> 0.0015s +-- add_foreign_key("label_priorities", "projects", {:on_delete=>:cascade}) + -> 0.0015s +-- add_foreign_key("labels", "namespaces", {:column=>"group_id", :on_delete=>:cascade}) + -> 0.0015s +-- add_foreign_key("labels", "projects", {:name=>"fk_7de4989a69", :on_delete=>:cascade}) + -> 0.0016s +-- add_foreign_key("lists", "boards", {:name=>"fk_0d3f677137", :on_delete=>:cascade}) + -> 0.0015s +-- add_foreign_key("lists", "labels", {:name=>"fk_7a5553d60f", :on_delete=>:cascade}) + -> 0.0014s +-- add_foreign_key("members", "users", {:name=>"fk_2e88fb7ce9", :on_delete=>:cascade}) + -> 0.0016s +-- add_foreign_key("merge_request_diff_commits", "merge_request_diffs", {:on_delete=>:cascade}) + -> 0.0014s +-- add_foreign_key("merge_request_diff_files", "merge_request_diffs", {:on_delete=>:cascade}) + -> 0.0014s +-- add_foreign_key("merge_request_diffs", "merge_requests", {:name=>"fk_8483f3258f", :on_delete=>:cascade}) + -> 0.0019s +-- add_foreign_key("merge_request_metrics", "ci_pipelines", {:column=>"pipeline_id", :on_delete=>:cascade}) + -> 0.0017s +-- add_foreign_key("merge_request_metrics", "merge_requests", {:on_delete=>:cascade}) + -> 0.0016s +-- add_foreign_key("merge_request_metrics", "users", {:column=>"latest_closed_by_id", :name=>"fk_ae440388cc", :on_delete=>:nullify}) + -> 0.0015s +-- add_foreign_key("merge_request_metrics", "users", {:column=>"merged_by_id", :name=>"fk_7f28d925f3", :on_delete=>:nullify}) + -> 0.0015s +-- add_foreign_key("merge_requests", "ci_pipelines", {:column=>"head_pipeline_id", :name=>"fk_fd82eae0b9", :on_delete=>:nullify}) + -> 0.0014s +-- add_foreign_key("merge_requests", "merge_request_diffs", {:column=>"latest_merge_request_diff_id", :name=>"fk_06067f5644", :on_delete=>:nullify}) + -> 0.0014s +-- add_foreign_key("merge_requests", "milestones", {:name=>"fk_6a5165a692", :on_delete=>:nullify}) + -> 0.0015s +-- add_foreign_key("merge_requests", "projects", {:column=>"source_project_id", :name=>"fk_3308fe130c", :on_delete=>:nullify}) + -> 0.0017s +-- add_foreign_key("merge_requests", "projects", {:column=>"target_project_id", :name=>"fk_a6963e8447", :on_delete=>:cascade}) + -> 0.0016s +-- add_foreign_key("merge_requests", "users", {:column=>"assignee_id", :name=>"fk_6149611a04", :on_delete=>:nullify}) + -> 0.0016s +-- add_foreign_key("merge_requests", "users", {:column=>"author_id", :name=>"fk_e719a85f8a", :on_delete=>:nullify}) + -> 0.0017s +-- add_foreign_key("merge_requests", "users", {:column=>"merge_user_id", :name=>"fk_ad525e1f87", :on_delete=>:nullify}) + -> 0.0018s +-- add_foreign_key("merge_requests", "users", {:column=>"updated_by_id", :name=>"fk_641731faff", :on_delete=>:nullify}) + -> 0.0017s +-- add_foreign_key("merge_requests_closing_issues", "issues", {:on_delete=>:cascade}) + -> 0.0016s +-- add_foreign_key("merge_requests_closing_issues", "merge_requests", {:on_delete=>:cascade}) + -> 0.0014s +-- add_foreign_key("milestones", "namespaces", {:column=>"group_id", :name=>"fk_95650a40d4", :on_delete=>:cascade}) + -> 0.0014s +-- add_foreign_key("milestones", "projects", {:name=>"fk_9bd0a0c791", :on_delete=>:cascade}) + -> 0.0017s +-- add_foreign_key("notes", "projects", {:name=>"fk_99e097b079", :on_delete=>:cascade}) + -> 0.0019s +-- add_foreign_key("oauth_openid_requests", "oauth_access_grants", {:column=>"access_grant_id", :name=>"fk_oauth_openid_requests_oauth_access_grants_access_grant_id"}) + -> 0.0014s +-- add_foreign_key("pages_domains", "projects", {:name=>"fk_ea2f6dfc6f", :on_delete=>:cascade}) + -> 0.0021s +-- add_foreign_key("personal_access_tokens", "users") + -> 0.0016s +-- add_foreign_key("project_authorizations", "projects", {:on_delete=>:cascade}) + -> 0.0016s +-- add_foreign_key("project_authorizations", "users", {:on_delete=>:cascade}) + -> 0.0016s +-- add_foreign_key("project_auto_devops", "projects", {:on_delete=>:cascade}) + -> 0.0026s +-- add_foreign_key("project_custom_attributes", "projects", {:on_delete=>:cascade}) + -> 0.0016s +-- add_foreign_key("project_features", "projects", {:name=>"fk_18513d9b92", :on_delete=>:cascade}) + -> 0.0020s +-- add_foreign_key("project_group_links", "projects", {:name=>"fk_daa8cee94c", :on_delete=>:cascade}) + -> 0.0016s +-- add_foreign_key("project_import_data", "projects", {:name=>"fk_ffb9ee3a10", :on_delete=>:cascade}) + -> 0.0016s +-- add_foreign_key("project_statistics", "projects", {:on_delete=>:cascade}) + -> 0.0021s +-- add_foreign_key("protected_branch_merge_access_levels", "protected_branches", {:name=>"fk_8a3072ccb3", :on_delete=>:cascade}) + -> 0.0014s +-- add_foreign_key("protected_branch_push_access_levels", "protected_branches", {:name=>"fk_9ffc86a3d9", :on_delete=>:cascade}) + -> 0.0014s +-- add_foreign_key("protected_branches", "projects", {:name=>"fk_7a9c6d93e7", :on_delete=>:cascade}) + -> 0.0016s +-- add_foreign_key("protected_tag_create_access_levels", "namespaces", {:column=>"group_id"}) + -> 0.0016s +-- add_foreign_key("protected_tag_create_access_levels", "protected_tags", {:name=>"fk_f7dfda8c51", :on_delete=>:cascade}) + -> 0.0013s +-- add_foreign_key("protected_tag_create_access_levels", "users") + -> 0.0018s +-- add_foreign_key("protected_tags", "projects", {:name=>"fk_8e4af87648", :on_delete=>:cascade}) + -> 0.0015s +-- add_foreign_key("push_event_payloads", "events", {:name=>"fk_36c74129da", :on_delete=>:cascade}) + -> 0.0013s +-- add_foreign_key("releases", "projects", {:name=>"fk_47fe2a0596", :on_delete=>:cascade}) + -> 0.0015s +-- add_foreign_key("services", "projects", {:name=>"fk_71cce407f9", :on_delete=>:cascade}) + -> 0.0015s +-- add_foreign_key("snippets", "projects", {:name=>"fk_be41fd4bb7", :on_delete=>:cascade}) + -> 0.0017s +-- add_foreign_key("subscriptions", "projects", {:on_delete=>:cascade}) + -> 0.0018s +-- add_foreign_key("system_note_metadata", "notes", {:name=>"fk_d83a918cb1", :on_delete=>:cascade}) + -> 0.0015s +-- add_foreign_key("timelogs", "issues", {:name=>"fk_timelogs_issues_issue_id", :on_delete=>:cascade}) + -> 0.0015s +-- add_foreign_key("timelogs", "merge_requests", {:name=>"fk_timelogs_merge_requests_merge_request_id", :on_delete=>:cascade}) + -> 0.0016s +-- add_foreign_key("todos", "projects", {:name=>"fk_45054f9c45", :on_delete=>:cascade}) + -> 0.0018s +-- add_foreign_key("trending_projects", "projects", {:on_delete=>:cascade}) + -> 0.0015s +-- add_foreign_key("u2f_registrations", "users") + -> 0.0017s +-- add_foreign_key("user_custom_attributes", "users", {:on_delete=>:cascade}) + -> 0.0019s +-- add_foreign_key("user_synced_attributes_metadata", "users", {:on_delete=>:cascade}) + -> 0.0016s +-- add_foreign_key("users_star_projects", "projects", {:name=>"fk_22cd27ddfc", :on_delete=>:cascade}) + -> 0.0016s +-- add_foreign_key("web_hook_logs", "web_hooks", {:on_delete=>:cascade}) + -> 0.0014s +-- add_foreign_key("web_hooks", "projects", {:name=>"fk_0c8ca6d9d1", :on_delete=>:cascade}) + -> 0.0017s +-- initialize_schema_migrations_table() + -> 0.0112s +[32;1m$ JOB_NAME=( $CI_JOB_NAME )[0;m +[32;1m$ export CI_NODE_INDEX=${JOB_NAME[-2]}[0;m +[32;1m$ export CI_NODE_TOTAL=${JOB_NAME[-1]}[0;m +[32;1m$ export KNAPSACK_REPORT_PATH=knapsack/${CI_PROJECT_NAME}/${JOB_NAME[0]}_node_${CI_NODE_INDEX}_${CI_NODE_TOTAL}_report.json[0;m +[32;1m$ export KNAPSACK_GENERATE_REPORT=true[0;m +[32;1m$ export CACHE_CLASSES=true[0;m +[32;1m$ cp ${KNAPSACK_SPINACH_SUITE_REPORT_PATH} ${KNAPSACK_REPORT_PATH}[0;m +[32;1m$ scripts/gitaly-test-spawn[0;m +Gem.path: ["/root/.gem/ruby/2.3.0", "/usr/local/lib/ruby/gems/2.3.0", "/usr/local/bundle"] +ENV['BUNDLE_GEMFILE']: nil +ENV['RUBYOPT']: nil +bundle config in /builds/gitlab-org/gitlab-ce +scripts/gitaly-test-spawn:10:in `<main>': undefined local variable or method `gitaly_dir' for main:Object (NameError) +Did you mean? gitaly_dir +Settings are listed in order of priority. The top value will be used. +retry +Set for your local app (/usr/local/bundle/config): 3 + +path +Set for your local app (/usr/local/bundle/config): "vendor" +Set via BUNDLE_PATH: "/usr/local/bundle" + +jobs +Set for your local app (/usr/local/bundle/config): "2" + +clean +Set for your local app (/usr/local/bundle/config): "true" + +without +Set for your local app (/usr/local/bundle/config): [:production] + +silence_root_warning +Set via BUNDLE_SILENCE_ROOT_WARNING: true + +app_config +Set via BUNDLE_APP_CONFIG: "/usr/local/bundle" + +install_flags +Set via BUNDLE_INSTALL_FLAGS: "--without=production --jobs=2 --path=vendor --retry=3 --quiet" + +bin +Set via BUNDLE_BIN: "/usr/local/bundle/bin" + +gemfile +Set via BUNDLE_GEMFILE: "/builds/gitlab-org/gitlab-ce/Gemfile" + +section_end:1517486961:build_script +[0Ksection_start:1517486961:after_script +[0Ksection_end:1517486962:after_script +[0Ksection_start:1517486962:upload_artifacts +[0K[32;1mUploading artifacts...[0;m +[0;33mWARNING: coverage/: no matching files [0;m +knapsack/: found 5 matching files [0;m +[0;33mWARNING: tmp/capybara/: no matching files [0;m +Uploading artifacts to coordinator... ok [0;m id[0;m=50551722 responseStatus[0;m=201 Created token[0;m=XkN753rp +section_end:1517486963:upload_artifacts +[0K[31;1mERROR: Job failed: exit code 1 +[0;m
\ No newline at end of file diff --git a/spec/helpers/groups_helper_spec.rb b/spec/helpers/groups_helper_spec.rb index 32432ee1e81..5f608fe18d9 100644 --- a/spec/helpers/groups_helper_spec.rb +++ b/spec/helpers/groups_helper_spec.rb @@ -105,7 +105,7 @@ describe GroupsHelper do it 'outputs the groups in the correct order' do expect(helper.group_title(very_deep_nested_group)) - .to match(/<li style="text-indent: 16px;"><a.*>#{deep_nested_group.name}.*<\/li>.*<a.*>#{very_deep_nested_group.name}<\/a>/m) + .to match(%r{<li style="text-indent: 16px;"><a.*>#{deep_nested_group.name}.*</li>.*<a.*>#{very_deep_nested_group.name}</a>}m) end end @@ -120,7 +120,7 @@ describe GroupsHelper do let(:possible_help_texts) do { default_help: "This setting will be applied to all subgroups unless overridden by a group owner", - ancestor_locked_but_you_can_override: /This setting is applied on <a .+>.+<\/a>\. You can override the setting or .+/, + ancestor_locked_but_you_can_override: %r{This setting is applied on <a .+>.+</a>\. You can override the setting or .+}, ancestor_locked_so_ask_the_owner: /This setting is applied on .+\. To share projects in this group with another group, ask the owner to override the setting or remove the share with group lock from .+/, ancestor_locked_and_has_been_overridden: /This setting is applied on .+ and has been overridden on this subgroup/ } diff --git a/spec/helpers/labels_helper_spec.rb b/spec/helpers/labels_helper_spec.rb index 0286d36952c..619baa78bfa 100644 --- a/spec/helpers/labels_helper_spec.rb +++ b/spec/helpers/labels_helper_spec.rb @@ -104,7 +104,7 @@ describe LabelsHelper do context 'with a tooltip argument' do context 'set to false' do it 'does not include the has-tooltip class' do - expect(link_to_label(label, tooltip: false)).not_to match %r{has-tooltip} + expect(link_to_label(label, tooltip: false)).not_to match /has-tooltip/ end end end diff --git a/spec/helpers/user_callouts_helper_spec.rb b/spec/helpers/user_callouts_helper_spec.rb new file mode 100644 index 00000000000..27455705d23 --- /dev/null +++ b/spec/helpers/user_callouts_helper_spec.rb @@ -0,0 +1,47 @@ +require "spec_helper" + +describe UserCalloutsHelper do + let(:user) { create(:user) } + + before do + allow(helper).to receive(:current_user).and_return(user) + end + + describe '.show_gke_cluster_integration_callout?' do + let(:project) { create(:project) } + + subject { helper.show_gke_cluster_integration_callout?(project) } + + context 'when user can create a cluster' do + before do + allow(helper).to receive(:can?).with(anything, :create_cluster, anything) + .and_return(true) + end + + context 'when user has not dismissed' do + before do + allow(helper).to receive(:user_dismissed?).and_return(false) + end + + it { is_expected.to be true } + end + + context 'when user dismissed' do + before do + allow(helper).to receive(:user_dismissed?).and_return(true) + end + + it { is_expected.to be false } + end + end + + context 'when user can not create a cluster' do + before do + allow(helper).to receive(:can?).with(anything, :create_cluster, anything) + .and_return(false) + end + + it { is_expected.to be false } + end + end +end diff --git a/spec/helpers/version_check_helper_spec.rb b/spec/helpers/version_check_helper_spec.rb index fa8cfda3b86..9d4e34abef5 100644 --- a/spec/helpers/version_check_helper_spec.rb +++ b/spec/helpers/version_check_helper_spec.rb @@ -4,7 +4,7 @@ describe VersionCheckHelper do describe '#version_status_badge' do it 'should return nil if not dev environment and not enabled' do allow(Rails.env).to receive(:production?) { false } - allow(helper.current_application_settings).to receive(:version_check_enabled) { false } + allow(Gitlab::CurrentSettings.current_application_settings).to receive(:version_check_enabled) { false } expect(helper.version_status_badge).to be(nil) end @@ -12,7 +12,7 @@ describe VersionCheckHelper do context 'when production and enabled' do before do allow(Rails.env).to receive(:production?) { true } - allow(helper.current_application_settings).to receive(:version_check_enabled) { true } + allow(Gitlab::CurrentSettings.current_application_settings).to receive(:version_check_enabled) { true } allow_any_instance_of(VersionCheck).to receive(:url) { 'https://version.host.com/check.svg?gitlab_info=xxx' } @image_tag = helper.version_status_badge @@ -27,7 +27,7 @@ describe VersionCheckHelper do end it 'should have a VersionCheck url as the src' do - expect(@image_tag).to match(/src="https:\/\/version\.host\.com\/check\.svg\?gitlab_info=xxx"/) + expect(@image_tag).to match(%r{src="https://version\.host\.com/check\.svg\?gitlab_info=xxx"}) end end end diff --git a/spec/javascripts/ci_variable_list/ci_variable_list_spec.js b/spec/javascripts/ci_variable_list/ci_variable_list_spec.js new file mode 100644 index 00000000000..0170ab458d4 --- /dev/null +++ b/spec/javascripts/ci_variable_list/ci_variable_list_spec.js @@ -0,0 +1,163 @@ +import VariableList from '~/ci_variable_list/ci_variable_list'; +import getSetTimeoutPromise from '../helpers/set_timeout_promise_helper'; + +describe('VariableList', () => { + preloadFixtures('pipeline_schedules/edit.html.raw'); + preloadFixtures('pipeline_schedules/edit_with_variables.html.raw'); + + let $wrapper; + let variableList; + + describe('with only key/value inputs', () => { + describe('with no variables', () => { + beforeEach(() => { + loadFixtures('pipeline_schedules/edit.html.raw'); + $wrapper = $('.js-ci-variable-list-section'); + + variableList = new VariableList({ + container: $wrapper, + formField: 'schedule', + }); + variableList.init(); + }); + + it('should remove the row when clicking the remove button', () => { + $wrapper.find('.js-row-remove-button').trigger('click'); + + expect($wrapper.find('.js-row').length).toBe(0); + }); + + it('should add another row when editing the last rows key input', () => { + const $row = $wrapper.find('.js-row'); + $row.find('.js-ci-variable-input-key') + .val('foo') + .trigger('input'); + + expect($wrapper.find('.js-row').length).toBe(2); + + // Check for the correct default in the new row + const $keyInput = $wrapper.find('.js-row:last-child').find('.js-ci-variable-input-key'); + expect($keyInput.val()).toBe(''); + }); + + it('should add another row when editing the last rows value textarea', () => { + const $row = $wrapper.find('.js-row'); + $row.find('.js-ci-variable-input-value') + .val('foo') + .trigger('input'); + + expect($wrapper.find('.js-row').length).toBe(2); + + // Check for the correct default in the new row + const $valueInput = $wrapper.find('.js-row:last-child').find('.js-ci-variable-input-key'); + expect($valueInput.val()).toBe(''); + }); + + it('should remove empty row after blurring', () => { + const $row = $wrapper.find('.js-row'); + $row.find('.js-ci-variable-input-key') + .val('foo') + .trigger('input'); + + expect($wrapper.find('.js-row').length).toBe(2); + + $row.find('.js-ci-variable-input-key') + .val('') + .trigger('input') + .trigger('blur'); + + expect($wrapper.find('.js-row').length).toBe(1); + }); + }); + + describe('with persisted variables', () => { + beforeEach(() => { + loadFixtures('pipeline_schedules/edit_with_variables.html.raw'); + $wrapper = $('.js-ci-variable-list-section'); + + variableList = new VariableList({ + container: $wrapper, + formField: 'schedule', + }); + variableList.init(); + }); + + it('should have "Reveal values" button initially when there are already variables', () => { + expect($wrapper.find('.js-secret-value-reveal-button').text()).toBe('Reveal values'); + }); + + it('should reveal hidden values', () => { + const $row = $wrapper.find('.js-row:first-child'); + const $inputValue = $row.find('.js-ci-variable-input-value'); + const $placeholder = $row.find('.js-secret-value-placeholder'); + + expect($placeholder.hasClass('hide')).toBe(false); + expect($inputValue.hasClass('hide')).toBe(true); + + // Reveal values + $wrapper.find('.js-secret-value-reveal-button').click(); + + expect($placeholder.hasClass('hide')).toBe(true); + expect($inputValue.hasClass('hide')).toBe(false); + }); + }); + }); + + describe('with all inputs(key, value, protected)', () => { + beforeEach(() => { + // This markup will be replaced with a fixture when we can render the + // CI/CD settings page with the new dynamic variable list in https://gitlab.com/gitlab-org/gitlab-ee/merge_requests/4110 + $wrapper = $(`<form class="js-variable-list"> + <ul> + <li class="js-row"> + <div class="ci-variable-body-item"> + <input class="js-ci-variable-input-key" name="variables[variables_attributes][][key]"> + </div> + + <div class="ci-variable-body-item"> + <textarea class="js-ci-variable-input-value" name="variables[variables_attributes][][value]"></textarea> + </div> + + <div class="ci-variable-body-item ci-variable-protected-item"> + <button type="button" class="js-project-feature-toggle project-feature-toggle"> + <input + type="hidden" + class="js-ci-variable-input-protected js-project-feature-toggle-input" + name="variables[variables_attributes][][protected]" + value="true" + /> + </button> + </div> + + <button type="button" class="js-row-remove-button"></button> + </li> + </ul> + <button type="button" class="js-secret-value-reveal-button"> + Reveal values + </button> + </form>`); + + variableList = new VariableList({ + container: $wrapper, + formField: 'variables', + }); + variableList.init(); + }); + + it('should add another row when editing the last rows protected checkbox', (done) => { + const $row = $wrapper.find('.js-row:last-child'); + $row.find('.ci-variable-protected-item .js-project-feature-toggle').click(); + + getSetTimeoutPromise() + .then(() => { + expect($wrapper.find('.js-row').length).toBe(2); + + // Check for the correct default in the new row + const $protectedInput = $wrapper.find('.js-row:last-child').find('.js-ci-variable-input-protected'); + expect($protectedInput.val()).toBe('true'); + }) + .then(done) + .catch(done.fail); + }); + }); +}); diff --git a/spec/javascripts/ci_variable_list/native_form_variable_list_spec.js b/spec/javascripts/ci_variable_list/native_form_variable_list_spec.js new file mode 100644 index 00000000000..eb508a7f059 --- /dev/null +++ b/spec/javascripts/ci_variable_list/native_form_variable_list_spec.js @@ -0,0 +1,30 @@ +import setupNativeFormVariableList from '~/ci_variable_list/native_form_variable_list'; + +describe('NativeFormVariableList', () => { + preloadFixtures('pipeline_schedules/edit.html.raw'); + + let $wrapper; + + beforeEach(() => { + loadFixtures('pipeline_schedules/edit.html.raw'); + $wrapper = $('.js-ci-variable-list-section'); + + setupNativeFormVariableList({ + container: $wrapper, + formField: 'schedule', + }); + }); + + describe('onFormSubmit', () => { + it('should clear out the `name` attribute on the inputs for the last empty row on form submission (avoid BE validation)', () => { + const $row = $wrapper.find('.js-row'); + expect($row.find('.js-ci-variable-input-key').attr('name')).toBe('schedule[variables_attributes][][key]'); + expect($row.find('.js-ci-variable-input-value').attr('name')).toBe('schedule[variables_attributes][][value]'); + + $wrapper.closest('form').trigger('trigger-submit'); + + expect($row.find('.js-ci-variable-input-key').attr('name')).toBe(''); + expect($row.find('.js-ci-variable-input-value').attr('name')).toBe(''); + }); + }); +}); diff --git a/spec/javascripts/clusters/clusters_bundle_spec.js b/spec/javascripts/clusters/clusters_bundle_spec.js index 7b38f6b7855..a9e244e523d 100644 --- a/spec/javascripts/clusters/clusters_bundle_spec.js +++ b/spec/javascripts/clusters/clusters_bundle_spec.js @@ -71,7 +71,8 @@ describe('Clusters', () => { helm: { status: APPLICATION_INSTALLABLE, title: 'Helm Tiller' }, }); - expect(document.querySelector('.js-cluster-application-notice .flash-text')).toBeNull(); + const flashMessage = document.querySelector('.js-cluster-application-notice .flash-text'); + expect(flashMessage).toBeNull(); }); it('shows an alert when something gets newly installed', () => { @@ -83,8 +84,9 @@ describe('Clusters', () => { helm: { status: APPLICATION_INSTALLED, title: 'Helm Tiller' }, }); - expect(document.querySelector('.js-cluster-application-notice .flash-text')).toBeDefined(); - expect(document.querySelector('.js-cluster-application-notice .flash-text').textContent.trim()).toEqual('Helm Tiller was successfully installed on your cluster'); + const flashMessage = document.querySelector('.js-cluster-application-notice .flash-text'); + expect(flashMessage).not.toBeNull(); + expect(flashMessage.textContent.trim()).toEqual('Helm Tiller was successfully installed on your Kubernetes cluster'); }); it('shows an alert when multiple things gets newly installed', () => { @@ -98,8 +100,9 @@ describe('Clusters', () => { ingress: { status: APPLICATION_INSTALLED, title: 'Ingress' }, }); - expect(document.querySelector('.js-cluster-application-notice .flash-text')).toBeDefined(); - expect(document.querySelector('.js-cluster-application-notice .flash-text').textContent.trim()).toEqual('Helm Tiller, Ingress was successfully installed on your cluster'); + const flashMessage = document.querySelector('.js-cluster-application-notice .flash-text'); + expect(flashMessage).not.toBeNull(); + expect(flashMessage.textContent.trim()).toEqual('Helm Tiller, Ingress was successfully installed on your Kubernetes cluster'); }); }); diff --git a/spec/javascripts/clusters/stores/clusters_store_spec.js b/spec/javascripts/clusters/stores/clusters_store_spec.js index ec2889355e6..726a4ed30de 100644 --- a/spec/javascripts/clusters/stores/clusters_store_spec.js +++ b/spec/javascripts/clusters/stores/clusters_store_spec.js @@ -58,6 +58,7 @@ describe('Clusters Store', () => { expect(store.state).toEqual({ helpPath: null, + ingressHelpPath: null, status: mockResponseData.status, statusReason: mockResponseData.status_reason, applications: { diff --git a/spec/javascripts/collapsed_sidebar_todo_spec.js b/spec/javascripts/collapsed_sidebar_todo_spec.js index 5026eaafaca..2abf52a1676 100644 --- a/spec/javascripts/collapsed_sidebar_todo_spec.js +++ b/spec/javascripts/collapsed_sidebar_todo_spec.js @@ -1,10 +1,14 @@ /* eslint-disable no-new */ import _ from 'underscore'; +import MockAdapter from 'axios-mock-adapter'; +import axios from '~/lib/utils/axios_utils'; import Sidebar from '~/right_sidebar'; +import timeoutPromise from './helpers/set_timeout_promise_helper'; describe('Issuable right sidebar collapsed todo toggle', () => { const fixtureName = 'issues/open-issue.html.raw'; const jsonFixtureName = 'todos/todos.json'; + let mock; preloadFixtures(fixtureName); preloadFixtures(jsonFixtureName); @@ -19,19 +23,26 @@ describe('Issuable right sidebar collapsed todo toggle', () => { document.querySelector('.js-right-sidebar') .classList.toggle('right-sidebar-collapsed'); - spyOn(jQuery, 'ajax').and.callFake((res) => { - const d = $.Deferred(); + mock = new MockAdapter(axios); + + mock.onPost(`${gl.TEST_HOST}/frontend-fixtures/issues-project/todos`).reply(() => { const response = _.clone(todoData); - if (res.type === 'DELETE') { - delete response.delete_path; - } + return [200, response]; + }); - d.resolve(response); - return d.promise(); + mock.onDelete(/(.*)\/dashboard\/todos\/\d+$/).reply(() => { + const response = _.clone(todoData); + delete response.delete_path; + + return [200, response]; }); }); + afterEach(() => { + mock.restore(); + }); + it('shows add todo button', () => { expect( document.querySelector('.js-issuable-todo.sidebar-collapsed-icon'), @@ -52,71 +63,101 @@ describe('Issuable right sidebar collapsed todo toggle', () => { ).toBe('Add todo'); }); - it('toggle todo state', () => { + it('toggle todo state', (done) => { document.querySelector('.js-issuable-todo.sidebar-collapsed-icon').click(); - expect( - document.querySelector('.js-issuable-todo.sidebar-collapsed-icon .todo-undone'), - ).not.toBeNull(); + setTimeout(() => { + expect( + document.querySelector('.js-issuable-todo.sidebar-collapsed-icon .todo-undone'), + ).not.toBeNull(); - expect( - document.querySelector('.js-issuable-todo.sidebar-collapsed-icon .fa-check-square'), - ).not.toBeNull(); - }); + expect( + document.querySelector('.js-issuable-todo.sidebar-collapsed-icon .fa-check-square'), + ).not.toBeNull(); - it('toggle todo state of expanded todo toggle', () => { - document.querySelector('.js-issuable-todo.sidebar-collapsed-icon').click(); - - expect( - document.querySelector('.issuable-sidebar-header .js-issuable-todo').textContent.trim(), - ).toBe('Mark done'); + done(); + }); }); - it('toggles todo button tooltip', () => { + it('toggle todo state of expanded todo toggle', (done) => { document.querySelector('.js-issuable-todo.sidebar-collapsed-icon').click(); - expect( - document.querySelector('.js-issuable-todo.sidebar-collapsed-icon').getAttribute('data-original-title'), - ).toBe('Mark done'); - }); - - it('marks todo as done', () => { - document.querySelector('.js-issuable-todo.sidebar-collapsed-icon').click(); + setTimeout(() => { + expect( + document.querySelector('.issuable-sidebar-header .js-issuable-todo').textContent.trim(), + ).toBe('Mark done'); - expect( - document.querySelector('.js-issuable-todo.sidebar-collapsed-icon .todo-undone'), - ).not.toBeNull(); + done(); + }); + }); + it('toggles todo button tooltip', (done) => { document.querySelector('.js-issuable-todo.sidebar-collapsed-icon').click(); - expect( - document.querySelector('.js-issuable-todo.sidebar-collapsed-icon .todo-undone'), - ).toBeNull(); + setTimeout(() => { + expect( + document.querySelector('.js-issuable-todo.sidebar-collapsed-icon').getAttribute('data-original-title'), + ).toBe('Mark done'); - expect( - document.querySelector('.issuable-sidebar-header .js-issuable-todo').textContent.trim(), - ).toBe('Add todo'); + done(); + }); }); - it('updates aria-label to mark done', () => { + it('marks todo as done', (done) => { document.querySelector('.js-issuable-todo.sidebar-collapsed-icon').click(); - expect( - document.querySelector('.js-issuable-todo.sidebar-collapsed-icon').getAttribute('aria-label'), - ).toBe('Mark done'); + timeoutPromise() + .then(() => { + expect( + document.querySelector('.js-issuable-todo.sidebar-collapsed-icon .todo-undone'), + ).not.toBeNull(); + + document.querySelector('.js-issuable-todo.sidebar-collapsed-icon').click(); + }) + .then(timeoutPromise) + .then(() => { + expect( + document.querySelector('.js-issuable-todo.sidebar-collapsed-icon .todo-undone'), + ).toBeNull(); + + expect( + document.querySelector('.issuable-sidebar-header .js-issuable-todo').textContent.trim(), + ).toBe('Add todo'); + }) + .then(done) + .catch(done.fail); }); - it('updates aria-label to add todo', () => { + it('updates aria-label to mark done', (done) => { document.querySelector('.js-issuable-todo.sidebar-collapsed-icon').click(); - expect( - document.querySelector('.js-issuable-todo.sidebar-collapsed-icon').getAttribute('aria-label'), - ).toBe('Mark done'); + setTimeout(() => { + expect( + document.querySelector('.js-issuable-todo.sidebar-collapsed-icon').getAttribute('aria-label'), + ).toBe('Mark done'); + done(); + }); + }); + + it('updates aria-label to add todo', (done) => { document.querySelector('.js-issuable-todo.sidebar-collapsed-icon').click(); - expect( - document.querySelector('.js-issuable-todo.sidebar-collapsed-icon').getAttribute('aria-label'), - ).toBe('Add todo'); + timeoutPromise() + .then(() => { + expect( + document.querySelector('.js-issuable-todo.sidebar-collapsed-icon').getAttribute('aria-label'), + ).toBe('Mark done'); + + document.querySelector('.js-issuable-todo.sidebar-collapsed-icon').click(); + }) + .then(timeoutPromise) + .then(() => { + expect( + document.querySelector('.js-issuable-todo.sidebar-collapsed-icon').getAttribute('aria-label'), + ).toBe('Add todo'); + }) + .then(done) + .catch(done.fail); }); }); diff --git a/spec/javascripts/commit/commit_pipeline_status_component_spec.js b/spec/javascripts/commit/commit_pipeline_status_component_spec.js new file mode 100644 index 00000000000..90f290e845e --- /dev/null +++ b/spec/javascripts/commit/commit_pipeline_status_component_spec.js @@ -0,0 +1,104 @@ +import Vue from 'vue'; +import MockAdapter from 'axios-mock-adapter'; +import axios from '~/lib/utils/axios_utils'; +import commitPipelineStatus from '~/projects/tree/components/commit_pipeline_status_component.vue'; +import mountComponent from '../helpers/vue_mount_component_helper'; + +describe('Commit pipeline status component', () => { + let vm; + let Component; + let mock; + const mockCiStatus = { + details_path: '/root/hello-world/pipelines/1', + favicon: 'canceled.ico', + group: 'canceled', + has_details: true, + icon: 'status_canceled', + label: 'canceled', + text: 'canceled', + }; + + beforeEach(() => { + Component = Vue.extend(commitPipelineStatus); + }); + + describe('While polling pipeline data succesfully', () => { + beforeEach(() => { + mock = new MockAdapter(axios); + mock.onGet('/dummy/endpoint').reply(() => { + const res = Promise.resolve([200, { + pipelines: [ + { + details: { + status: mockCiStatus, + }, + }, + ], + }]); + return res; + }); + vm = mountComponent(Component, { + endpoint: '/dummy/endpoint', + }); + }); + + afterEach(() => { + vm.poll.stop(); + vm.$destroy(); + mock.restore(); + }); + + it('shows the loading icon when polling is starting', (done) => { + expect(vm.$el.querySelector('.loading-container')).not.toBe(null); + setTimeout(() => { + expect(vm.$el.querySelector('.loading-container')).toBe(null); + done(); + }); + }); + + it('contains a ciStatus when the polling is succesful ', (done) => { + setTimeout(() => { + expect(vm.ciStatus).toEqual(mockCiStatus); + done(); + }); + }); + + it('contains a ci-status icon when polling is succesful', (done) => { + setTimeout(() => { + expect(vm.$el.querySelector('.ci-status-icon')).not.toBe(null); + expect(vm.$el.querySelector('.ci-status-icon').classList).toContain(`ci-status-icon-${mockCiStatus.group}`); + done(); + }); + }); + }); + + describe('When polling data was not succesful', () => { + beforeEach(() => { + mock = new MockAdapter(axios); + mock.onGet('/dummy/endpoint').reply(() => { + const res = Promise.reject([502, { }]); + return res; + }); + vm = new Component({ + props: { + endpoint: '/dummy/endpoint', + }, + }); + }); + + afterEach(() => { + vm.poll.stop(); + vm.$destroy(); + mock.restore(); + }); + + it('calls an errorCallback', (done) => { + spyOn(vm, 'errorCallback').and.callThrough(); + vm.$mount(); + setTimeout(() => { + expect(vm.errorCallback.calls.count()).toEqual(1); + done(); + }); + }); + }); +}); diff --git a/spec/javascripts/feature_highlight/feature_highlight_helper_spec.js b/spec/javascripts/feature_highlight/feature_highlight_helper_spec.js new file mode 100644 index 00000000000..34ffc7b1016 --- /dev/null +++ b/spec/javascripts/feature_highlight/feature_highlight_helper_spec.js @@ -0,0 +1,231 @@ +import MockAdapter from 'axios-mock-adapter'; +import axios from '~/lib/utils/axios_utils'; +import { + getSelector, + togglePopover, + dismiss, + mouseleave, + mouseenter, + inserted, +} from '~/feature_highlight/feature_highlight_helper'; +import getSetTimeoutPromise from '../helpers/set_timeout_promise_helper'; + +describe('feature highlight helper', () => { + describe('getSelector', () => { + it('returns js-feature-highlight selector', () => { + const highlightId = 'highlightId'; + expect(getSelector(highlightId)).toEqual(`.js-feature-highlight[data-highlight=${highlightId}]`); + }); + }); + + describe('togglePopover', () => { + describe('togglePopover(true)', () => { + it('returns true when popover is shown', () => { + const context = { + hasClass: () => false, + popover: () => {}, + toggleClass: () => {}, + }; + + expect(togglePopover.call(context, true)).toEqual(true); + }); + + it('returns false when popover is already shown', () => { + const context = { + hasClass: () => true, + }; + + expect(togglePopover.call(context, true)).toEqual(false); + }); + + it('shows popover', (done) => { + const context = { + hasClass: () => false, + popover: () => {}, + toggleClass: () => {}, + }; + + spyOn(context, 'popover').and.callFake((method) => { + expect(method).toEqual('show'); + done(); + }); + + togglePopover.call(context, true); + }); + + it('adds disable-animation and js-popover-show class', (done) => { + const context = { + hasClass: () => false, + popover: () => {}, + toggleClass: () => {}, + }; + + spyOn(context, 'toggleClass').and.callFake((classNames, show) => { + expect(classNames).toEqual('disable-animation js-popover-show'); + expect(show).toEqual(true); + done(); + }); + + togglePopover.call(context, true); + }); + }); + + describe('togglePopover(false)', () => { + it('returns true when popover is hidden', () => { + const context = { + hasClass: () => true, + popover: () => {}, + toggleClass: () => {}, + }; + + expect(togglePopover.call(context, false)).toEqual(true); + }); + + it('returns false when popover is already hidden', () => { + const context = { + hasClass: () => false, + }; + + expect(togglePopover.call(context, false)).toEqual(false); + }); + + it('hides popover', (done) => { + const context = { + hasClass: () => true, + popover: () => {}, + toggleClass: () => {}, + }; + + spyOn(context, 'popover').and.callFake((method) => { + expect(method).toEqual('hide'); + done(); + }); + + togglePopover.call(context, false); + }); + + it('removes disable-animation and js-popover-show class', (done) => { + const context = { + hasClass: () => true, + popover: () => {}, + toggleClass: () => {}, + }; + + spyOn(context, 'toggleClass').and.callFake((classNames, show) => { + expect(classNames).toEqual('disable-animation js-popover-show'); + expect(show).toEqual(false); + done(); + }); + + togglePopover.call(context, false); + }); + }); + }); + + describe('dismiss', () => { + let mock; + const context = { + hide: () => {}, + attr: () => '/-/callouts/dismiss', + }; + + beforeEach(() => { + mock = new MockAdapter(axios); + + spyOn(togglePopover, 'call').and.callFake(() => {}); + spyOn(context, 'hide').and.callFake(() => {}); + dismiss.call(context); + }); + + afterEach(() => { + mock.restore(); + }); + + it('calls persistent dismissal endpoint', (done) => { + const spy = jasmine.createSpy('dismiss-endpoint-hit'); + mock.onPost('/-/callouts/dismiss').reply(spy); + + getSetTimeoutPromise() + .then(() => { + expect(spy).toHaveBeenCalled(); + }) + .then(done) + .catch(done.fail); + }); + + it('calls hide popover', () => { + expect(togglePopover.call).toHaveBeenCalledWith(context, false); + }); + + it('calls hide', () => { + expect(context.hide).toHaveBeenCalled(); + }); + }); + + describe('mouseleave', () => { + it('calls hide popover if .popover:hover is false', () => { + const fakeJquery = { + length: 0, + }; + + spyOn($.fn, 'init').and.callFake(selector => (selector === '.popover:hover' ? fakeJquery : $.fn)); + spyOn(togglePopover, 'call'); + mouseleave(); + expect(togglePopover.call).toHaveBeenCalledWith(jasmine.any(Object), false); + }); + + it('does not call hide popover if .popover:hover is true', () => { + const fakeJquery = { + length: 1, + }; + + spyOn($.fn, 'init').and.callFake(selector => (selector === '.popover:hover' ? fakeJquery : $.fn)); + spyOn(togglePopover, 'call'); + mouseleave(); + expect(togglePopover.call).not.toHaveBeenCalledWith(false); + }); + }); + + describe('mouseenter', () => { + const context = {}; + + it('shows popover', () => { + spyOn(togglePopover, 'call').and.returnValue(false); + mouseenter.call(context); + expect(togglePopover.call).toHaveBeenCalledWith(jasmine.any(Object), true); + }); + + it('registers mouseleave event if popover is showed', (done) => { + spyOn(togglePopover, 'call').and.returnValue(true); + spyOn($.fn, 'on').and.callFake((eventName) => { + expect(eventName).toEqual('mouseleave'); + done(); + }); + mouseenter.call(context); + }); + + it('does not register mouseleave event if popover is not showed', () => { + spyOn(togglePopover, 'call').and.returnValue(false); + const spy = spyOn($.fn, 'on').and.callFake(() => {}); + mouseenter.call(context); + expect(spy).not.toHaveBeenCalled(); + }); + }); + + describe('inserted', () => { + it('registers click event callback', (done) => { + const context = { + getAttribute: () => 'popoverId', + dataset: { + highlight: 'some-feature', + }, + }; + + spyOn($.fn, 'on').and.callFake((event) => { + expect(event).toEqual('click'); + done(); + }); + inserted.call(context); + }); + }); +}); diff --git a/spec/javascripts/feature_highlight/feature_highlight_options_spec.js b/spec/javascripts/feature_highlight/feature_highlight_options_spec.js new file mode 100644 index 00000000000..7f9425d8abe --- /dev/null +++ b/spec/javascripts/feature_highlight/feature_highlight_options_spec.js @@ -0,0 +1,30 @@ +import domContentLoaded from '~/feature_highlight/feature_highlight_options'; +import bp from '~/breakpoints'; + +describe('feature highlight options', () => { + describe('domContentLoaded', () => { + it('should not call highlightFeatures when breakpoint is xs', () => { + spyOn(bp, 'getBreakpointSize').and.returnValue('xs'); + + expect(domContentLoaded()).toBe(false); + }); + + it('should not call highlightFeatures when breakpoint is sm', () => { + spyOn(bp, 'getBreakpointSize').and.returnValue('sm'); + + expect(domContentLoaded()).toBe(false); + }); + + it('should not call highlightFeatures when breakpoint is md', () => { + spyOn(bp, 'getBreakpointSize').and.returnValue('md'); + + expect(domContentLoaded()).toBe(false); + }); + + it('should call highlightFeatures when breakpoint is lg', () => { + spyOn(bp, 'getBreakpointSize').and.returnValue('lg'); + + expect(domContentLoaded()).toBe(true); + }); + }); +}); diff --git a/spec/javascripts/feature_highlight/feature_highlight_spec.js b/spec/javascripts/feature_highlight/feature_highlight_spec.js new file mode 100644 index 00000000000..6e1b0429ab7 --- /dev/null +++ b/spec/javascripts/feature_highlight/feature_highlight_spec.js @@ -0,0 +1,131 @@ +import * as featureHighlightHelper from '~/feature_highlight/feature_highlight_helper'; +import * as featureHighlight from '~/feature_highlight/feature_highlight'; + +describe('feature highlight', () => { + beforeEach(() => { + setFixtures(` + <div> + <div class="js-feature-highlight" data-highlight="test" data-highlight-priority="10" disabled> + Trigger + </div> + </div> + <div class="feature-highlight-popover-content"> + Content + <div class="dismiss-feature-highlight"> + Dismiss + </div> + </div> + `); + }); + + describe('setupFeatureHighlightPopover', () => { + const selector = '.js-feature-highlight[data-highlight=test]'; + beforeEach(() => { + spyOn(window, 'addEventListener'); + spyOn(window, 'removeEventListener'); + featureHighlight.setupFeatureHighlightPopover('test', 0); + }); + + it('setup popover content', () => { + const $popoverContent = $('.feature-highlight-popover-content'); + const outerHTML = $popoverContent.prop('outerHTML'); + + expect($(selector).data('content')).toEqual(outerHTML); + }); + + it('setup mouseenter', () => { + const toggleSpy = spyOn(featureHighlightHelper.togglePopover, 'call'); + $(selector).trigger('mouseenter'); + + expect(toggleSpy).toHaveBeenCalledWith(jasmine.any(Object), true); + }); + + it('setup debounced mouseleave', (done) => { + const toggleSpy = spyOn(featureHighlightHelper.togglePopover, 'call'); + $(selector).trigger('mouseleave'); + + // Even though we've set the debounce to 0ms, setTimeout is needed for the debounce + setTimeout(() => { + expect(toggleSpy).toHaveBeenCalledWith(jasmine.any(Object), false); + done(); + }, 0); + }); + + it('setup inserted.bs.popover', () => { + $(selector).trigger('mouseenter'); + const popoverId = $(selector).attr('aria-describedby'); + const spyEvent = spyOnEvent(`#${popoverId} .dismiss-feature-highlight`, 'click'); + + $(`#${popoverId} .dismiss-feature-highlight`).click(); + expect(spyEvent).toHaveBeenTriggered(); + }); + + it('setup show.bs.popover', () => { + $(selector).trigger('show.bs.popover'); + expect(window.addEventListener).toHaveBeenCalledWith('scroll', jasmine.any(Function)); + }); + + it('setup hide.bs.popover', () => { + $(selector).trigger('hide.bs.popover'); + expect(window.removeEventListener).toHaveBeenCalledWith('scroll', jasmine.any(Function)); + }); + + it('removes disabled attribute', () => { + expect($('.js-feature-highlight').is(':disabled')).toEqual(false); + }); + + it('displays popover', () => { + expect($(selector).attr('aria-describedby')).toBeFalsy(); + $(selector).trigger('mouseenter'); + expect($(selector).attr('aria-describedby')).toBeTruthy(); + }); + }); + + describe('findHighestPriorityFeature', () => { + beforeEach(() => { + setFixtures(` + <div class="js-feature-highlight" data-highlight="test" data-highlight-priority="10" disabled></div> + <div class="js-feature-highlight" data-highlight="test-high-priority" data-highlight-priority="20" disabled></div> + <div class="js-feature-highlight" data-highlight="test-low-priority" data-highlight-priority="0" disabled></div> + `); + }); + + it('should pick the highest priority feature highlight', () => { + setFixtures(` + <div class="js-feature-highlight" data-highlight="test" data-highlight-priority="10" disabled></div> + <div class="js-feature-highlight" data-highlight="test-high-priority" data-highlight-priority="20" disabled></div> + <div class="js-feature-highlight" data-highlight="test-low-priority" data-highlight-priority="0" disabled></div> + `); + + expect($('.js-feature-highlight').length).toBeGreaterThan(1); + expect(featureHighlight.findHighestPriorityFeature()).toEqual('test-high-priority'); + }); + + it('should work when no priority is set', () => { + setFixtures(` + <div class="js-feature-highlight" data-highlight="test" disabled></div> + `); + + expect(featureHighlight.findHighestPriorityFeature()).toEqual('test'); + }); + + it('should pick the highest priority feature highlight when some have no priority set', () => { + setFixtures(` + <div class="js-feature-highlight" data-highlight="test-no-priority1" disabled></div> + <div class="js-feature-highlight" data-highlight="test" data-highlight-priority="10" disabled></div> + <div class="js-feature-highlight" data-highlight="test-no-priority2" disabled></div> + <div class="js-feature-highlight" data-highlight="test-high-priority" data-highlight-priority="20" disabled></div> + <div class="js-feature-highlight" data-highlight="test-low-priority" data-highlight-priority="0" disabled></div> + `); + + expect($('.js-feature-highlight').length).toBeGreaterThan(1); + expect(featureHighlight.findHighestPriorityFeature()).toEqual('test-high-priority'); + }); + }); + + describe('highlightFeatures', () => { + it('calls setupFeatureHighlightPopover', () => { + expect(featureHighlight.highlightFeatures()).toEqual('test'); + }); + }); +}); diff --git a/spec/javascripts/fixtures/jobs.rb b/spec/javascripts/fixtures/jobs.rb index 87d131dfe28..6d5c6d5334f 100644 --- a/spec/javascripts/fixtures/jobs.rb +++ b/spec/javascripts/fixtures/jobs.rb @@ -7,7 +7,7 @@ describe Projects::JobsController, '(JavaScript fixtures)', type: :controller do let(:namespace) { create(:namespace, name: 'frontend-fixtures' )} let(:project) { create(:project_empty_repo, namespace: namespace, path: 'builds-project') } let(:pipeline) { create(:ci_empty_pipeline, project: project) } - let!(:build_with_artifacts) { create(:ci_build, :success, :artifacts, :trace, pipeline: pipeline, stage: 'test', artifacts_expire_at: Time.now + 18.months) } + let!(:build_with_artifacts) { create(:ci_build, :success, :artifacts, :trace_artifact, pipeline: pipeline, stage: 'test', artifacts_expire_at: Time.now + 18.months) } let!(:failed_build) { create(:ci_build, :failed, pipeline: pipeline, stage: 'build') } let!(:pending_build) { create(:ci_build, :pending, pipeline: pipeline, stage: 'deploy') } diff --git a/spec/javascripts/fixtures/pipeline_schedules.rb b/spec/javascripts/fixtures/pipeline_schedules.rb new file mode 100644 index 00000000000..56f27ea7df1 --- /dev/null +++ b/spec/javascripts/fixtures/pipeline_schedules.rb @@ -0,0 +1,43 @@ +require 'spec_helper' + +describe Projects::PipelineSchedulesController, '(JavaScript fixtures)', type: :controller do + include JavaScriptFixturesHelpers + + let(:admin) { create(:admin) } + let(:namespace) { create(:namespace, name: 'frontend-fixtures' )} + let(:project) { create(:project, :public, :repository) } + let!(:pipeline_schedule) { create(:ci_pipeline_schedule, project: project, owner: admin) } + let!(:pipeline_schedule_populated) { create(:ci_pipeline_schedule, project: project, owner: admin) } + let!(:pipeline_schedule_variable1) { create(:ci_pipeline_schedule_variable, key: 'foo', value: 'foovalue', pipeline_schedule: pipeline_schedule_populated) } + let!(:pipeline_schedule_variable2) { create(:ci_pipeline_schedule_variable, key: 'bar', value: 'barvalue', pipeline_schedule: pipeline_schedule_populated) } + + render_views + + before(:all) do + clean_frontend_fixtures('pipeline_schedules/') + end + + before do + sign_in(admin) + end + + it 'pipeline_schedules/edit.html.raw' do |example| + get :edit, + namespace_id: project.namespace.to_param, + project_id: project, + id: pipeline_schedule.id + + expect(response).to be_success + store_frontend_fixture(response, example.description) + end + + it 'pipeline_schedules/edit_with_variables.html.raw' do |example| + get :edit, + namespace_id: project.namespace.to_param, + project_id: project, + id: pipeline_schedule_populated.id + + expect(response).to be_success + store_frontend_fixture(response, example.description) + end +end diff --git a/spec/javascripts/gfm_auto_complete_spec.js b/spec/javascripts/gfm_auto_complete_spec.js index 6f357306ec7..50a587ef351 100644 --- a/spec/javascripts/gfm_auto_complete_spec.js +++ b/spec/javascripts/gfm_auto_complete_spec.js @@ -130,16 +130,25 @@ describe('GfmAutoComplete', function () { }); describe('should not match special sequences', () => { - const ShouldNotBeFollowedBy = flags.concat(['\x00', '\x10', '\x3f', '\n', ' ']); + const shouldNotBeFollowedBy = flags.concat(['\x00', '\x10', '\x3f', '\n', ' ']); + const shouldNotBePrependedBy = ['`']; flagsUseDefaultMatcher.forEach((atSign) => { - ShouldNotBeFollowedBy.forEach((followedSymbol) => { + shouldNotBeFollowedBy.forEach((followedSymbol) => { const seq = atSign + followedSymbol; it(`should not match "${seq}"`, () => { expect(defaultMatcher(atwhoInstance, atSign, seq)).toBe(null); }); }); + + shouldNotBePrependedBy.forEach((prependedSymbol) => { + const seq = prependedSymbol + atSign; + + it(`should not match "${seq}"`, () => { + expect(defaultMatcher(atwhoInstance, atSign, seq)).toBe(null); + }); + }); }); }); }); diff --git a/spec/javascripts/issue_spec.js b/spec/javascripts/issue_spec.js index 2cd2e63b15d..177962ecf82 100644 --- a/spec/javascripts/issue_spec.js +++ b/spec/javascripts/issue_spec.js @@ -1,4 +1,6 @@ /* eslint-disable space-before-function-paren, one-var, one-var-declaration-per-line, no-use-before-define, comma-dangle, max-len */ +import MockAdapter from 'axios-mock-adapter'; +import axios from '~/lib/utils/axios_utils'; import Issue from '~/issue'; import '~/lib/utils/text_utility'; @@ -68,40 +70,27 @@ describe('Issue', function() { expect($btn).toHaveText(isIssueInitiallyOpen ? 'Close issue' : 'Reopen issue'); } - describe('task lists', function() { - beforeEach(function() { - loadFixtures('issues/issue-with-task-list.html.raw'); - this.issue = new Issue(); - }); - - it('submits an ajax request on tasklist:changed', function() { - spyOn(jQuery, 'ajax').and.callFake(function(req) { - expect(req.type).toBe('PATCH'); - expect(req.url).toBe(gl.TEST_HOST + '/frontend-fixtures/issues-project/issues/1.json'); // eslint-disable-line prefer-template - expect(req.data.issue.description).not.toBe(null); - }); - - $('.js-task-list-field').trigger('tasklist:changed'); - }); - }); - [true, false].forEach((isIssueInitiallyOpen) => { describe(`with ${isIssueInitiallyOpen ? 'open' : 'closed'} issue`, function() { const action = isIssueInitiallyOpen ? 'close' : 'reopen'; + let mock; - function ajaxSpy(req) { - if (req.url === this.$triggeredButton.attr('href')) { - expect(req.type).toBe('PUT'); - expectNewBranchButtonState(true, false); - return this.issueStateDeferred; - } else if (req.url === Issue.createMrDropdownWrap.dataset.canCreatePath) { - expect(req.type).toBe('GET'); + function mockCloseButtonResponseSuccess(url, response) { + mock.onPut(url).reply(() => { expectNewBranchButtonState(true, false); - return this.canCreateBranchDeferred; - } - expect(req.url).toBe('unexpected'); - return null; + return [200, response]; + }); + } + + function mockCloseButtonResponseError(url) { + mock.onPut(url).networkError(); + } + + function mockCanCreateBranch(canCreateBranch) { + mock.onGet(/(.*)\/can_create_branch$/).reply(200, { + can_create_branch: canCreateBranch, + }); } beforeEach(function() { @@ -111,6 +100,11 @@ describe('Issue', function() { loadFixtures('issues/closed-issue.html.raw'); } + mock = new MockAdapter(axios); + + mock.onGet(/(.*)\/related_branches$/).reply(200, {}); + mock.onGet(/(.*)\/referenced_merge_requests$/).reply(200, {}); + findElements(isIssueInitiallyOpen); this.issue = new Issue(); expectIssueState(isIssueInitiallyOpen); @@ -120,71 +114,89 @@ describe('Issue', function() { this.$projectIssuesCounter = $('.issue_counter').first(); this.$projectIssuesCounter.text('1,001'); - this.issueStateDeferred = new jQuery.Deferred(); - this.canCreateBranchDeferred = new jQuery.Deferred(); + spyOn(axios, 'get').and.callThrough(); + }); - spyOn(jQuery, 'ajax').and.callFake(ajaxSpy.bind(this)); + afterEach(() => { + mock.restore(); + $('div.flash-alert').remove(); }); - it(`${action}s the issue`, function() { - this.$triggeredButton.trigger('click'); - this.issueStateDeferred.resolve({ + it(`${action}s the issue`, function(done) { + mockCloseButtonResponseSuccess(this.$triggeredButton.attr('href'), { id: 34 }); - this.canCreateBranchDeferred.resolve({ - can_create_branch: !isIssueInitiallyOpen - }); + mockCanCreateBranch(!isIssueInitiallyOpen); - expectIssueState(!isIssueInitiallyOpen); - expect(this.$triggeredButton.get(0).getAttribute('disabled')).toBeNull(); - expect(this.$projectIssuesCounter.text()).toBe(isIssueInitiallyOpen ? '1,000' : '1,002'); - expectNewBranchButtonState(false, !isIssueInitiallyOpen); + this.$triggeredButton.trigger('click'); + + setTimeout(() => { + expectIssueState(!isIssueInitiallyOpen); + expect(this.$triggeredButton.get(0).getAttribute('disabled')).toBeNull(); + expect(this.$projectIssuesCounter.text()).toBe(isIssueInitiallyOpen ? '1,000' : '1,002'); + expectNewBranchButtonState(false, !isIssueInitiallyOpen); + + done(); + }); }); - it(`fails to ${action} the issue if saved:false`, function() { - this.$triggeredButton.trigger('click'); - this.issueStateDeferred.resolve({ + it(`fails to ${action} the issue if saved:false`, function(done) { + mockCloseButtonResponseSuccess(this.$triggeredButton.attr('href'), { saved: false }); - this.canCreateBranchDeferred.resolve({ - can_create_branch: isIssueInitiallyOpen - }); + mockCanCreateBranch(isIssueInitiallyOpen); - expectIssueState(isIssueInitiallyOpen); - expect(this.$triggeredButton.get(0).getAttribute('disabled')).toBeNull(); - expectErrorMessage(); - expect(this.$projectIssuesCounter.text()).toBe('1,001'); - expectNewBranchButtonState(false, isIssueInitiallyOpen); + this.$triggeredButton.trigger('click'); + + setTimeout(() => { + expectIssueState(isIssueInitiallyOpen); + expect(this.$triggeredButton.get(0).getAttribute('disabled')).toBeNull(); + expectErrorMessage(); + expect(this.$projectIssuesCounter.text()).toBe('1,001'); + expectNewBranchButtonState(false, isIssueInitiallyOpen); + + done(); + }); }); - it(`fails to ${action} the issue if HTTP error occurs`, function() { + it(`fails to ${action} the issue if HTTP error occurs`, function(done) { + mockCloseButtonResponseError(this.$triggeredButton.attr('href')); + mockCanCreateBranch(isIssueInitiallyOpen); + this.$triggeredButton.trigger('click'); - this.issueStateDeferred.reject(); - this.canCreateBranchDeferred.resolve({ - can_create_branch: isIssueInitiallyOpen - }); - expectIssueState(isIssueInitiallyOpen); - expect(this.$triggeredButton.get(0).getAttribute('disabled')).toBeNull(); - expectErrorMessage(); - expect(this.$projectIssuesCounter.text()).toBe('1,001'); - expectNewBranchButtonState(false, isIssueInitiallyOpen); + setTimeout(() => { + expectIssueState(isIssueInitiallyOpen); + expect(this.$triggeredButton.get(0).getAttribute('disabled')).toBeNull(); + expectErrorMessage(); + expect(this.$projectIssuesCounter.text()).toBe('1,001'); + expectNewBranchButtonState(false, isIssueInitiallyOpen); + + done(); + }); }); it('disables the new branch button if Ajax call fails', function() { + mockCloseButtonResponseError(this.$triggeredButton.attr('href')); + mock.onGet(/(.*)\/can_create_branch$/).networkError(); + this.$triggeredButton.trigger('click'); - this.issueStateDeferred.reject(); - this.canCreateBranchDeferred.reject(); expectNewBranchButtonState(false, false); }); - it('does not trigger Ajax call if new branch button is missing', function() { + it('does not trigger Ajax call if new branch button is missing', function(done) { + mockCloseButtonResponseError(this.$triggeredButton.attr('href')); Issue.$btnNewBranch = $(); this.canCreateBranchDeferred = null; this.$triggeredButton.trigger('click'); - this.issueStateDeferred.reject(); + + setTimeout(() => { + expect(axios.get).not.toHaveBeenCalled(); + + done(); + }); }); }); }); diff --git a/spec/javascripts/job_spec.js b/spec/javascripts/job_spec.js index 0452934ea9e..03b58e9c1d0 100644 --- a/spec/javascripts/job_spec.js +++ b/spec/javascripts/job_spec.js @@ -1,3 +1,5 @@ +import MockAdapter from 'axios-mock-adapter'; +import axios from '~/lib/utils/axios_utils'; import { numberToHumanSize } from '~/lib/utils/number_utils'; import * as urlUtils from '~/lib/utils/url_utility'; import '~/lib/utils/datetime_utility'; @@ -6,11 +8,29 @@ import '~/breakpoints'; describe('Job', () => { const JOB_URL = `${gl.TEST_HOST}/frontend-fixtures/builds-project/-/jobs/1`; + let mock; + let response; + + function waitForPromise() { + return new Promise(resolve => requestAnimationFrame(resolve)); + } preloadFixtures('builds/build-with-artifacts.html.raw'); beforeEach(() => { loadFixtures('builds/build-with-artifacts.html.raw'); + + spyOn(urlUtils, 'visitUrl'); + + mock = new MockAdapter(axios); + + mock.onGet(new RegExp(`${JOB_URL}/trace.json?(.*)`)).reply(() => [200, response]); + }); + + afterEach(() => { + mock.restore(); + + response = {}; }); describe('class constructor', () => { @@ -55,161 +75,159 @@ describe('Job', () => { }); describe('running build', () => { - it('updates the build trace on an interval', function () { - const deferred1 = $.Deferred(); - const deferred2 = $.Deferred(); - spyOn($, 'ajax').and.returnValues(deferred1.promise(), deferred2.promise()); - spyOn(urlUtils, 'visitUrl'); - - deferred1.resolve({ + it('updates the build trace on an interval', function (done) { + response = { html: '<span>Update<span>', status: 'running', state: 'newstate', append: true, complete: false, - }); - - deferred2.resolve({ - html: '<span>More</span>', - status: 'running', - state: 'finalstate', - append: true, - complete: true, - }); + }; this.job = new Job(); - expect($('#build-trace .js-build-output').text()).toMatch(/Update/); - expect(this.job.state).toBe('newstate'); - - jasmine.clock().tick(4001); - - expect($('#build-trace .js-build-output').text()).toMatch(/UpdateMore/); - expect(this.job.state).toBe('finalstate'); + waitForPromise() + .then(() => { + expect($('#build-trace .js-build-output').text()).toMatch(/Update/); + expect(this.job.state).toBe('newstate'); + + response = { + html: '<span>More</span>', + status: 'running', + state: 'finalstate', + append: true, + complete: true, + }; + }) + .then(() => jasmine.clock().tick(4001)) + .then(waitForPromise) + .then(() => { + expect($('#build-trace .js-build-output').text()).toMatch(/UpdateMore/); + expect(this.job.state).toBe('finalstate'); + }) + .then(done) + .catch(done.fail); }); - it('replaces the entire build trace', () => { - const deferred1 = $.Deferred(); - const deferred2 = $.Deferred(); - - spyOn($, 'ajax').and.returnValues(deferred1.promise(), deferred2.promise()); - - spyOn(urlUtils, 'visitUrl'); - - deferred1.resolve({ + it('replaces the entire build trace', (done) => { + response = { html: '<span>Update<span>', status: 'running', append: false, complete: false, - }); - - deferred2.resolve({ - html: '<span>Different</span>', - status: 'running', - append: false, - }); + }; this.job = new Job(); - expect($('#build-trace .js-build-output').text()).toMatch(/Update/); - - jasmine.clock().tick(4001); - - expect($('#build-trace .js-build-output').text()).not.toMatch(/Update/); - expect($('#build-trace .js-build-output').text()).toMatch(/Different/); + waitForPromise() + .then(() => { + expect($('#build-trace .js-build-output').text()).toMatch(/Update/); + + response = { + html: '<span>Different</span>', + status: 'running', + append: false, + }; + }) + .then(() => jasmine.clock().tick(4001)) + .then(waitForPromise) + .then(() => { + expect($('#build-trace .js-build-output').text()).not.toMatch(/Update/); + expect($('#build-trace .js-build-output').text()).toMatch(/Different/); + }) + .then(done) + .catch(done.fail); }); }); describe('truncated information', () => { describe('when size is less than total', () => { - it('shows information about truncated log', () => { - spyOn(urlUtils, 'visitUrl'); - const deferred = $.Deferred(); - spyOn($, 'ajax').and.returnValue(deferred.promise()); - - deferred.resolve({ + it('shows information about truncated log', (done) => { + response = { html: '<span>Update</span>', status: 'success', append: false, size: 50, total: 100, - }); + }; this.job = new Job(); - expect(document.querySelector('.js-truncated-info').classList).not.toContain('hidden'); + waitForPromise() + .then(() => { + expect(document.querySelector('.js-truncated-info').classList).not.toContain('hidden'); + }) + .then(done) + .catch(done.fail); }); - it('shows the size in KiB', () => { + it('shows the size in KiB', (done) => { const size = 50; - spyOn(urlUtils, 'visitUrl'); - const deferred = $.Deferred(); - spyOn($, 'ajax').and.returnValue(deferred.promise()); - deferred.resolve({ + response = { html: '<span>Update</span>', status: 'success', append: false, size, total: 100, - }); + }; this.job = new Job(); - expect( - document.querySelector('.js-truncated-info-size').textContent.trim(), - ).toEqual(`${numberToHumanSize(size)}`); + waitForPromise() + .then(() => { + expect( + document.querySelector('.js-truncated-info-size').textContent.trim(), + ).toEqual(`${numberToHumanSize(size)}`); + }) + .then(done) + .catch(done.fail); }); - it('shows incremented size', () => { - const deferred1 = $.Deferred(); - const deferred2 = $.Deferred(); - - spyOn($, 'ajax').and.returnValues(deferred1.promise(), deferred2.promise()); - - spyOn(urlUtils, 'visitUrl'); - - deferred1.resolve({ + it('shows incremented size', (done) => { + response = { html: '<span>Update</span>', status: 'success', append: false, size: 50, total: 100, - }); + }; this.job = new Job(); - expect( - document.querySelector('.js-truncated-info-size').textContent.trim(), - ).toEqual(`${numberToHumanSize(50)}`); - - jasmine.clock().tick(4001); - - deferred2.resolve({ - html: '<span>Update</span>', - status: 'success', - append: true, - size: 10, - total: 100, - }); - - expect( - document.querySelector('.js-truncated-info-size').textContent.trim(), - ).toEqual(`${numberToHumanSize(60)}`); + waitForPromise() + .then(() => { + expect( + document.querySelector('.js-truncated-info-size').textContent.trim(), + ).toEqual(`${numberToHumanSize(50)}`); + + response = { + html: '<span>Update</span>', + status: 'success', + append: true, + size: 10, + total: 100, + }; + }) + .then(() => jasmine.clock().tick(4001)) + .then(waitForPromise) + .then(() => { + expect( + document.querySelector('.js-truncated-info-size').textContent.trim(), + ).toEqual(`${numberToHumanSize(60)}`); + }) + .then(done) + .catch(done.fail); }); it('renders the raw link', () => { - const deferred = $.Deferred(); - spyOn(urlUtils, 'visitUrl'); - - spyOn($, 'ajax').and.returnValue(deferred.promise()); - deferred.resolve({ + response = { html: '<span>Update</span>', status: 'success', append: false, size: 50, total: 100, - }); + }; this.job = new Job(); @@ -220,50 +238,50 @@ describe('Job', () => { }); describe('when size is equal than total', () => { - it('does not show the trunctated information', () => { - const deferred = $.Deferred(); - spyOn(urlUtils, 'visitUrl'); - - spyOn($, 'ajax').and.returnValue(deferred.promise()); - deferred.resolve({ + it('does not show the trunctated information', (done) => { + response = { html: '<span>Update</span>', status: 'success', append: false, size: 100, total: 100, - }); + }; this.job = new Job(); - expect(document.querySelector('.js-truncated-info').classList).toContain('hidden'); + waitForPromise() + .then(() => { + expect(document.querySelector('.js-truncated-info').classList).toContain('hidden'); + }) + .then(done) + .catch(done.fail); }); }); }); describe('output trace', () => { - beforeEach(() => { - const deferred = $.Deferred(); - spyOn(urlUtils, 'visitUrl'); - - spyOn($, 'ajax').and.returnValue(deferred.promise()); - deferred.resolve({ + beforeEach((done) => { + response = { html: '<span>Update</span>', status: 'success', append: false, size: 50, total: 100, - }); + }; this.job = new Job(); + + waitForPromise() + .then(done) + .catch(done.fail); }); it('should render trace controls', () => { const controllers = document.querySelector('.controllers'); - expect(controllers.querySelector('.js-raw-link-controller')).toBeDefined(); - expect(controllers.querySelector('.js-erase-link')).toBeDefined(); - expect(controllers.querySelector('.js-scroll-up')).toBeDefined(); - expect(controllers.querySelector('.js-scroll-down')).toBeDefined(); + expect(controllers.querySelector('.js-raw-link-controller')).not.toBeNull(); + expect(controllers.querySelector('.js-scroll-up')).not.toBeNull(); + expect(controllers.querySelector('.js-scroll-down')).not.toBeNull(); }); it('should render received output', () => { @@ -276,13 +294,13 @@ describe('Job', () => { describe('getBuildTrace', () => { it('should request build trace with state parameter', (done) => { - spyOn(jQuery, 'ajax').and.callThrough(); + spyOn(axios, 'get').and.callThrough(); // eslint-disable-next-line no-new new Job(); setTimeout(() => { - expect(jQuery.ajax).toHaveBeenCalledWith( - { url: `${JOB_URL}/trace.json`, data: { state: '' } }, + expect(axios.get).toHaveBeenCalledWith( + `${JOB_URL}/trace.json`, { params: { state: '' } }, ); done(); }, 0); diff --git a/spec/javascripts/labels_issue_sidebar_spec.js b/spec/javascripts/labels_issue_sidebar_spec.js index a197b35f6fb..7d992f62f64 100644 --- a/spec/javascripts/labels_issue_sidebar_spec.js +++ b/spec/javascripts/labels_issue_sidebar_spec.js @@ -1,4 +1,6 @@ /* eslint-disable no-new */ +import MockAdapter from 'axios-mock-adapter'; +import axios from '~/lib/utils/axios_utils'; import IssuableContext from '~/issuable_context'; import LabelsSelect from '~/labels_select'; @@ -10,35 +12,44 @@ import '~/users_select'; (() => { let saveLabelCount = 0; + let mock; + describe('Issue dropdown sidebar', () => { preloadFixtures('static/issue_sidebar_label.html.raw'); beforeEach(() => { loadFixtures('static/issue_sidebar_label.html.raw'); + + mock = new MockAdapter(axios); + new IssuableContext('{"id":1,"name":"Administrator","username":"root"}'); new LabelsSelect(); - spyOn(jQuery, 'ajax').and.callFake((req) => { - const d = $.Deferred(); - let LABELS_DATA = []; + mock.onGet('/root/test/labels.json').reply(() => { + const labels = Array(10).fill().map((_, i) => ({ + id: i, + title: `test ${i}`, + color: '#5CB85C', + })); - if (req.url === '/root/test/labels.json') { - for (let i = 0; i < 10; i += 1) { - LABELS_DATA.push({ id: i, title: `test ${i}`, color: '#5CB85C' }); - } - } else if (req.url === '/root/test/issues/2.json') { - const tmp = []; - for (let i = 0; i < saveLabelCount; i += 1) { - tmp.push({ id: i, title: `test ${i}`, color: '#5CB85C' }); - } - LABELS_DATA = { labels: tmp }; - } + return [200, labels]; + }); + + mock.onPut('/root/test/issues/2.json').reply(() => { + const labels = Array(saveLabelCount).fill().map((_, i) => ({ + id: i, + title: `test ${i}`, + color: '#5CB85C', + })); - d.resolve(LABELS_DATA); - return d.promise(); + return [200, { labels }]; }); }); + afterEach(() => { + mock.restore(); + }); + it('changes collapsed tooltip when changing labels when less than 5', (done) => { saveLabelCount = 5; $('.edit-link').get(0).click(); diff --git a/spec/javascripts/lib/utils/ajax_cache_spec.js b/spec/javascripts/lib/utils/ajax_cache_spec.js index 49971bd91e2..7603400b55e 100644 --- a/spec/javascripts/lib/utils/ajax_cache_spec.js +++ b/spec/javascripts/lib/utils/ajax_cache_spec.js @@ -1,3 +1,5 @@ +import MockAdapter from 'axios-mock-adapter'; +import axios from '~/lib/utils/axios_utils'; import AjaxCache from '~/lib/utils/ajax_cache'; describe('AjaxCache', () => { @@ -87,66 +89,53 @@ describe('AjaxCache', () => { }); describe('retrieve', () => { - let ajaxSpy; + let mock; beforeEach(() => { - spyOn(jQuery, 'ajax').and.callFake(url => ajaxSpy(url)); + mock = new MockAdapter(axios); + + spyOn(axios, 'get').and.callThrough(); + }); + + afterEach(() => { + mock.restore(); }); it('stores and returns data from Ajax call if cache is empty', (done) => { - ajaxSpy = (url) => { - expect(url).toBe(dummyEndpoint); - const deferred = $.Deferred(); - deferred.resolve(dummyResponse); - return deferred.promise(); - }; + mock.onGet(dummyEndpoint).reply(200, dummyResponse); AjaxCache.retrieve(dummyEndpoint) .then((data) => { - expect(data).toBe(dummyResponse); - expect(AjaxCache.internalStorage[dummyEndpoint]).toBe(dummyResponse); + expect(data).toEqual(dummyResponse); + expect(AjaxCache.internalStorage[dummyEndpoint]).toEqual(dummyResponse); }) .then(done) .catch(fail); }); - it('makes no Ajax call if request is pending', () => { - const responseDeferred = $.Deferred(); - - ajaxSpy = (url) => { - expect(url).toBe(dummyEndpoint); - // neither reject nor resolve to keep request pending - return responseDeferred.promise(); - }; - - const unexpectedResponse = data => fail(`Did not expect response: ${data}`); + it('makes no Ajax call if request is pending', (done) => { + mock.onGet(dummyEndpoint).reply(200, dummyResponse); AjaxCache.retrieve(dummyEndpoint) - .then(unexpectedResponse) + .then(done) .catch(fail); AjaxCache.retrieve(dummyEndpoint) - .then(unexpectedResponse) + .then(done) .catch(fail); - expect($.ajax.calls.count()).toBe(1); + expect(axios.get.calls.count()).toBe(1); }); it('returns undefined if Ajax call fails and cache is empty', (done) => { - const dummyStatusText = 'exploded'; - const dummyErrorMessage = 'server exploded'; - ajaxSpy = (url) => { - expect(url).toBe(dummyEndpoint); - const deferred = $.Deferred(); - deferred.reject(null, dummyStatusText, dummyErrorMessage); - return deferred.promise(); - }; + const errorMessage = 'Network Error'; + mock.onGet(dummyEndpoint).networkError(); AjaxCache.retrieve(dummyEndpoint) .then(data => fail(`Received unexpected data: ${JSON.stringify(data)}`)) .catch((error) => { - expect(error.message).toBe(`${dummyEndpoint}: ${dummyErrorMessage}`); - expect(error.textStatus).toBe(dummyStatusText); + expect(error.message).toBe(`${dummyEndpoint}: ${errorMessage}`); + expect(error.textStatus).toBe(errorMessage); done(); }) .catch(fail); @@ -154,7 +143,9 @@ describe('AjaxCache', () => { it('makes no Ajax call if matching data exists', (done) => { AjaxCache.internalStorage[dummyEndpoint] = dummyResponse; - ajaxSpy = () => fail(new Error('expected no Ajax call!')); + mock.onGet(dummyEndpoint).reply(() => { + fail(new Error('expected no Ajax call!')); + }); AjaxCache.retrieve(dummyEndpoint) .then((data) => { @@ -171,12 +162,7 @@ describe('AjaxCache', () => { AjaxCache.internalStorage[dummyEndpoint] = oldDummyResponse; - ajaxSpy = (url) => { - expect(url).toBe(dummyEndpoint); - const deferred = $.Deferred(); - deferred.resolve(dummyResponse); - return deferred.promise(); - }; + mock.onGet(dummyEndpoint).reply(200, dummyResponse); // Call without forceRetrieve param AjaxCache.retrieve(dummyEndpoint) @@ -189,7 +175,7 @@ describe('AjaxCache', () => { // Call with forceRetrieve param AjaxCache.retrieve(dummyEndpoint, true) .then((data) => { - expect(data).toBe(dummyResponse); + expect(data).toEqual(dummyResponse); }) .then(done) .catch(fail); diff --git a/spec/javascripts/lib/utils/common_utils_spec.js b/spec/javascripts/lib/utils/common_utils_spec.js index 1052b4e7c20..80430011aed 100644 --- a/spec/javascripts/lib/utils/common_utils_spec.js +++ b/spec/javascripts/lib/utils/common_utils_spec.js @@ -1,7 +1,6 @@ /* eslint-disable promise/catch-or-return */ - -import * as commonUtils from '~/lib/utils/common_utils'; import axios from '~/lib/utils/axios_utils'; +import * as commonUtils from '~/lib/utils/common_utils'; import MockAdapter from 'axios-mock-adapter'; describe('common_utils', () => { @@ -460,17 +459,6 @@ describe('common_utils', () => { }); }); - describe('ajaxPost', () => { - it('should perform `$.ajax` call and do `POST` request', () => { - const requestURL = '/some/random/api'; - const data = { keyname: 'value' }; - const ajaxSpy = spyOn($, 'ajax').and.callFake(() => {}); - - commonUtils.ajaxPost(requestURL, data); - expect(ajaxSpy.calls.allArgs()[0][0].type).toEqual('POST'); - }); - }); - describe('spriteIcon', () => { let beforeGon; diff --git a/spec/javascripts/merge_request_spec.js b/spec/javascripts/merge_request_spec.js index bae3219b043..bdfd16ac995 100644 --- a/spec/javascripts/merge_request_spec.js +++ b/spec/javascripts/merge_request_spec.js @@ -1,5 +1,6 @@ /* eslint-disable space-before-function-paren, no-return-assign */ - +import MockAdapter from 'axios-mock-adapter'; +import axios from '~/lib/utils/axios_utils'; import MergeRequest from '~/merge_request'; import CloseReopenReportToggle from '~/close_reopen_report_toggle'; import IssuablesHelper from '~/helpers/issuables_helper'; @@ -7,11 +8,24 @@ import IssuablesHelper from '~/helpers/issuables_helper'; (function() { describe('MergeRequest', function() { describe('task lists', function() { + let mock; + preloadFixtures('merge_requests/merge_request_with_task_list.html.raw'); beforeEach(function() { loadFixtures('merge_requests/merge_request_with_task_list.html.raw'); + + spyOn(axios, 'patch').and.callThrough(); + mock = new MockAdapter(axios); + + mock.onPatch(`${gl.TEST_HOST}/frontend-fixtures/merge-requests-project/merge_requests/1.json`).reply(200, {}); + return this.merge = new MergeRequest(); }); + + afterEach(() => { + mock.restore(); + }); + it('modifies the Markdown field', function() { spyOn(jQuery, 'ajax').and.stub(); const changeEvent = document.createEvent('HTMLEvents'); @@ -21,14 +35,14 @@ import IssuablesHelper from '~/helpers/issuables_helper'; }); it('submits an ajax request on tasklist:changed', (done) => { - spyOn(jQuery, 'ajax').and.callFake((req) => { - expect(req.type).toBe('PATCH'); - expect(req.url).toBe(`${gl.TEST_HOST}/frontend-fixtures/merge-requests-project/merge_requests/1.json`); - expect(req.data.merge_request.description).not.toBe(null); + $('.js-task-list-field').trigger('tasklist:changed'); + + setTimeout(() => { + expect(axios.patch).toHaveBeenCalledWith(`${gl.TEST_HOST}/frontend-fixtures/merge-requests-project/merge_requests/1.json`, { + merge_request: { description: '- [ ] Task List Item' }, + }); done(); }); - - $('.js-task-list-field').trigger('tasklist:changed'); }); }); diff --git a/spec/javascripts/merge_request_tabs_spec.js b/spec/javascripts/merge_request_tabs_spec.js index a6be474805b..fda24db98b4 100644 --- a/spec/javascripts/merge_request_tabs_spec.js +++ b/spec/javascripts/merge_request_tabs_spec.js @@ -1,5 +1,6 @@ /* eslint-disable no-var, comma-dangle, object-shorthand */ - +import MockAdapter from 'axios-mock-adapter'; +import axios from '~/lib/utils/axios_utils'; import * as urlUtils from '~/lib/utils/url_utility'; import MergeRequestTabs from '~/merge_request_tabs'; import '~/commit/pipelines/pipelines_bundle'; @@ -46,7 +47,7 @@ import 'vendor/jquery.scrollTo'; describe('activateTab', function () { beforeEach(function () { - spyOn($, 'ajax').and.callFake(function () {}); + spyOn(axios, 'get').and.returnValue(Promise.resolve({ data: {} })); loadFixtures('merge_requests/merge_request_with_task_list.html.raw'); this.subject = this.class.activateTab; }); @@ -148,7 +149,7 @@ import 'vendor/jquery.scrollTo'; describe('setCurrentAction', function () { beforeEach(function () { - spyOn($, 'ajax').and.callFake(function () {}); + spyOn(axios, 'get').and.returnValue(Promise.resolve({ data: {} })); this.subject = this.class.setCurrentAction; }); @@ -214,13 +215,21 @@ import 'vendor/jquery.scrollTo'; }); describe('tabShown', () => { + let mock; + beforeEach(function () { - spyOn($, 'ajax').and.callFake(function (options) { - options.success({ html: '' }); + mock = new MockAdapter(axios); + mock.onGet(/(.*)\/diffs\.json/).reply(200, { + data: { html: '' }, }); + loadFixtures('merge_requests/merge_request_with_task_list.html.raw'); }); + afterEach(() => { + mock.restore(); + }); + describe('with "Side-by-side"/parallel diff view', () => { beforeEach(function () { this.class.diffViewType = () => 'parallel'; @@ -292,16 +301,20 @@ import 'vendor/jquery.scrollTo'; it('triggers Ajax request to JSON endpoint', function (done) { const url = '/foo/bar/merge_requests/1/diffs'; - spyOn(this.class, 'ajaxGet').and.callFake((options) => { - expect(options.url).toEqual(`${url}.json`); + + spyOn(axios, 'get').and.callFake((reqUrl) => { + expect(reqUrl).toBe(`${url}.json`); + done(); + + return Promise.resolve({ data: {} }); }); this.class.loadDiff(url); }); it('triggers scroll event when diff already loaded', function (done) { - spyOn(this.class, 'ajaxGet').and.callFake(() => done.fail()); + spyOn(axios, 'get').and.callFake(done.fail); spyOn(document, 'dispatchEvent'); this.class.diffsLoaded = true; @@ -316,6 +329,7 @@ import 'vendor/jquery.scrollTo'; describe('with inline diff', () => { let noteId; let noteLineNumId; + let mock; beforeEach(() => { const diffsResponse = getJSONFixture(inlineChangesTabJsonFixture); @@ -330,29 +344,40 @@ import 'vendor/jquery.scrollTo'; .attr('href') .replace('#', ''); - spyOn($, 'ajax').and.callFake(function (options) { - options.success(diffsResponse); - }); + mock = new MockAdapter(axios); + mock.onGet(/(.*)\/diffs\.json/).reply(200, diffsResponse); + }); + + afterEach(() => { + mock.restore(); }); describe('with note fragment hash', () => { - it('should expand and scroll to linked fragment hash #note_xxx', function () { + it('should expand and scroll to linked fragment hash #note_xxx', function (done) { spyOn(urlUtils, 'getLocationHash').and.returnValue(noteId); this.class.loadDiff('/foo/bar/merge_requests/1/diffs'); - expect(noteId.length).toBeGreaterThan(0); - expect(Notes.instance.toggleDiffNote).toHaveBeenCalledWith({ - target: jasmine.any(Object), - lineType: 'old', - forceShow: true, + setTimeout(() => { + expect(noteId.length).toBeGreaterThan(0); + expect(Notes.instance.toggleDiffNote).toHaveBeenCalledWith({ + target: jasmine.any(Object), + lineType: 'old', + forceShow: true, + }); + + done(); }); }); - it('should gracefully ignore non-existant fragment hash', function () { + it('should gracefully ignore non-existant fragment hash', function (done) { spyOn(urlUtils, 'getLocationHash').and.returnValue('note_something-that-does-not-exist'); this.class.loadDiff('/foo/bar/merge_requests/1/diffs'); - expect(Notes.instance.toggleDiffNote).not.toHaveBeenCalled(); + setTimeout(() => { + expect(Notes.instance.toggleDiffNote).not.toHaveBeenCalled(); + + done(); + }); }); }); @@ -370,6 +395,7 @@ import 'vendor/jquery.scrollTo'; describe('with parallel diff', () => { let noteId; let noteLineNumId; + let mock; beforeEach(() => { const diffsResponse = getJSONFixture(parallelChangesTabJsonFixture); @@ -384,30 +410,40 @@ import 'vendor/jquery.scrollTo'; .attr('href') .replace('#', ''); - spyOn($, 'ajax').and.callFake(function (options) { - options.success(diffsResponse); - }); + mock = new MockAdapter(axios); + mock.onGet(/(.*)\/diffs\.json/).reply(200, diffsResponse); + }); + + afterEach(() => { + mock.restore(); }); describe('with note fragment hash', () => { - it('should expand and scroll to linked fragment hash #note_xxx', function () { + it('should expand and scroll to linked fragment hash #note_xxx', function (done) { spyOn(urlUtils, 'getLocationHash').and.returnValue(noteId); this.class.loadDiff('/foo/bar/merge_requests/1/diffs'); - expect(noteId.length).toBeGreaterThan(0); - expect(Notes.instance.toggleDiffNote).toHaveBeenCalledWith({ - target: jasmine.any(Object), - lineType: 'new', - forceShow: true, + setTimeout(() => { + expect(noteId.length).toBeGreaterThan(0); + expect(Notes.instance.toggleDiffNote).toHaveBeenCalledWith({ + target: jasmine.any(Object), + lineType: 'new', + forceShow: true, + }); + + done(); }); }); - it('should gracefully ignore non-existant fragment hash', function () { + it('should gracefully ignore non-existant fragment hash', function (done) { spyOn(urlUtils, 'getLocationHash').and.returnValue('note_something-that-does-not-exist'); this.class.loadDiff('/foo/bar/merge_requests/1/diffs'); - expect(Notes.instance.toggleDiffNote).not.toHaveBeenCalled(); + setTimeout(() => { + expect(Notes.instance.toggleDiffNote).not.toHaveBeenCalled(); + done(); + }); }); }); diff --git a/spec/javascripts/mini_pipeline_graph_dropdown_spec.js b/spec/javascripts/mini_pipeline_graph_dropdown_spec.js index 481b46c3ac6..6fa6f44f953 100644 --- a/spec/javascripts/mini_pipeline_graph_dropdown_spec.js +++ b/spec/javascripts/mini_pipeline_graph_dropdown_spec.js @@ -1,7 +1,9 @@ /* eslint-disable no-new */ +import MockAdapter from 'axios-mock-adapter'; +import axios from '~/lib/utils/axios_utils'; import MiniPipelineGraph from '~/mini_pipeline_graph_dropdown'; -import '~/flash'; +import timeoutPromise from './helpers/set_timeout_promise_helper'; describe('Mini Pipeline Graph Dropdown', () => { preloadFixtures('static/mini_dropdown_graph.html.raw'); @@ -27,6 +29,16 @@ describe('Mini Pipeline Graph Dropdown', () => { }); describe('When dropdown is clicked', () => { + let mock; + + beforeEach(() => { + mock = new MockAdapter(axios); + }); + + afterEach(() => { + mock.restore(); + }); + it('should call getBuildsList', () => { const getBuildsListSpy = spyOn( MiniPipelineGraph.prototype, @@ -41,46 +53,55 @@ describe('Mini Pipeline Graph Dropdown', () => { }); it('should make a request to the endpoint provided in the html', () => { - const ajaxSpy = spyOn($, 'ajax').and.callFake(function () {}); + const ajaxSpy = spyOn(axios, 'get').and.callThrough(); + + mock.onGet('foobar').reply(200, { + html: '', + }); new MiniPipelineGraph({ container: '.js-builds-dropdown-tests' }).bindEvents(); document.querySelector('.js-builds-dropdown-button').click(); - expect(ajaxSpy.calls.allArgs()[0][0].url).toEqual('foobar'); + expect(ajaxSpy.calls.allArgs()[0][0]).toEqual('foobar'); }); - it('should not close when user uses cmd/ctrl + click', () => { - spyOn($, 'ajax').and.callFake(function (params) { - params.success({ - html: `<li> - <a class="mini-pipeline-graph-dropdown-item" href="#"> - <span class="ci-status-icon ci-status-icon-failed"></span> - <span class="ci-build-text">build</span> - </a> - <a class="ci-action-icon-wrapper js-ci-action-icon" href="#"></a> - </li>`, - }); + it('should not close when user uses cmd/ctrl + click', (done) => { + mock.onGet('foobar').reply(200, { + html: `<li> + <a class="mini-pipeline-graph-dropdown-item" href="#"> + <span class="ci-status-icon ci-status-icon-failed"></span> + <span class="ci-build-text">build</span> + </a> + <a class="ci-action-icon-wrapper js-ci-action-icon" href="#"></a> + </li>`, }); new MiniPipelineGraph({ container: '.js-builds-dropdown-tests' }).bindEvents(); document.querySelector('.js-builds-dropdown-button').click(); - document.querySelector('a.mini-pipeline-graph-dropdown-item').click(); - - expect($('.js-builds-dropdown-list').is(':visible')).toEqual(true); + timeoutPromise() + .then(() => { + document.querySelector('a.mini-pipeline-graph-dropdown-item').click(); + }) + .then(timeoutPromise) + .then(() => { + expect($('.js-builds-dropdown-list').is(':visible')).toEqual(true); + }) + .then(done) + .catch(done.fail); }); - }); - it('should close the dropdown when request returns an error', (done) => { - spyOn($, 'ajax').and.callFake(options => options.error()); + it('should close the dropdown when request returns an error', (done) => { + mock.onGet('foobar').networkError(); - new MiniPipelineGraph({ container: '.js-builds-dropdown-tests' }).bindEvents(); + new MiniPipelineGraph({ container: '.js-builds-dropdown-tests' }).bindEvents(); - document.querySelector('.js-builds-dropdown-button').click(); + document.querySelector('.js-builds-dropdown-button').click(); - setTimeout(() => { - expect($('.js-builds-dropdown-tests .dropdown').hasClass('open')).toEqual(false); - done(); - }, 0); + setTimeout(() => { + expect($('.js-builds-dropdown-tests .dropdown').hasClass('open')).toEqual(false); + done(); + }); + }); }); }); diff --git a/spec/javascripts/notes_spec.js b/spec/javascripts/notes_spec.js index a40821a5693..274d7591c71 100644 --- a/spec/javascripts/notes_spec.js +++ b/spec/javascripts/notes_spec.js @@ -1,11 +1,14 @@ /* eslint-disable space-before-function-paren, no-unused-expressions, no-var, object-shorthand, comma-dangle, max-len */ import _ from 'underscore'; +import MockAdapter from 'axios-mock-adapter'; +import axios from '~/lib/utils/axios_utils'; import * as urlUtils from '~/lib/utils/url_utility'; import 'autosize'; import '~/gl_form'; import '~/lib/utils/text_utility'; import '~/render_gfm'; import Notes from '~/notes'; +import timeoutPromise from './helpers/set_timeout_promise_helper'; (function() { window.gon || (window.gon = {}); @@ -47,13 +50,24 @@ import Notes from '~/notes'; }); describe('task lists', function() { + let mock; + beforeEach(function() { + spyOn(axios, 'patch').and.callThrough(); + mock = new MockAdapter(axios); + + mock.onPatch(`${gl.TEST_HOST}/frontend-fixtures/merge-requests-project/merge_requests/1.json`).reply(200, {}); + $('.js-comment-button').on('click', function(e) { e.preventDefault(); }); this.notes = new Notes('', []); }); + afterEach(() => { + mock.restore(); + }); + it('modifies the Markdown field', function() { const changeEvent = document.createEvent('HTMLEvents'); changeEvent.initEvent('change', true, true); @@ -62,14 +76,15 @@ import Notes from '~/notes'; expect($('.js-task-list-field.original-task-list').val()).toBe('- [x] Task List Item'); }); - it('submits an ajax request on tasklist:changed', function() { - spyOn(jQuery, 'ajax').and.callFake(function(req) { - expect(req.type).toBe('PATCH'); - expect(req.url).toBe('http://test.host/frontend-fixtures/merge-requests-project/merge_requests/1.json'); - return expect(req.data.note).not.toBe(null); - }); + it('submits an ajax request on tasklist:changed', function(done) { + $('.js-task-list-container').trigger('tasklist:changed'); - $('.js-task-list-field.js-note-text').trigger('tasklist:changed'); + setTimeout(() => { + expect(axios.patch).toHaveBeenCalledWith(`${gl.TEST_HOST}/frontend-fixtures/merge-requests-project/merge_requests/1.json`, { + note: { note: '' }, + }); + done(); + }); }); }); @@ -119,6 +134,7 @@ import Notes from '~/notes'; let noteEntity; let $form; let $notesContainer; + let mock; beforeEach(() => { this.notes = new Notes('', []); @@ -136,24 +152,32 @@ import Notes from '~/notes'; $form = $('form.js-main-target-form'); $notesContainer = $('ul.main-notes-list'); $form.find('textarea.js-note-text').val(sampleComment); + + mock = new MockAdapter(axios); + mock.onPost(/(.*)\/notes$/).reply(200, noteEntity); }); - it('updates note and resets edit form', () => { - const deferred = $.Deferred(); - spyOn($, 'ajax').and.returnValue(deferred.promise()); + afterEach(() => { + mock.restore(); + }); + + it('updates note and resets edit form', (done) => { spyOn(this.notes, 'revertNoteEditForm'); spyOn(this.notes, 'setupNewNote'); $('.js-comment-button').click(); - deferred.resolve(noteEntity); - const $targetNote = $notesContainer.find(`#note_${noteEntity.id}`); - const updatedNote = Object.assign({}, noteEntity); - updatedNote.note = 'bar'; - this.notes.updateNote(updatedNote, $targetNote); + setTimeout(() => { + const $targetNote = $notesContainer.find(`#note_${noteEntity.id}`); + const updatedNote = Object.assign({}, noteEntity); + updatedNote.note = 'bar'; + this.notes.updateNote(updatedNote, $targetNote); + + expect(this.notes.revertNoteEditForm).toHaveBeenCalledWith($targetNote); + expect(this.notes.setupNewNote).toHaveBeenCalled(); - expect(this.notes.revertNoteEditForm).toHaveBeenCalledWith($targetNote); - expect(this.notes.setupNewNote).toHaveBeenCalled(); + done(); + }); }); }); @@ -479,8 +503,19 @@ import Notes from '~/notes'; }; let $form; let $notesContainer; + let mock; + + function mockNotesPost() { + mock.onPost(/(.*)\/notes$/).reply(200, note); + } + + function mockNotesPostError() { + mock.onPost(/(.*)\/notes$/).networkError(); + } beforeEach(() => { + mock = new MockAdapter(axios); + this.notes = new Notes('', []); window.gon.current_username = 'root'; window.gon.current_user_fullname = 'Administrator'; @@ -489,63 +524,92 @@ import Notes from '~/notes'; $form.find('textarea.js-note-text').val(sampleComment); }); + afterEach(() => { + mock.restore(); + }); + it('should show placeholder note while new comment is being posted', () => { + mockNotesPost(); + $('.js-comment-button').click(); expect($notesContainer.find('.note.being-posted').length > 0).toEqual(true); }); - it('should remove placeholder note when new comment is done posting', () => { - const deferred = $.Deferred(); - spyOn($, 'ajax').and.returnValue(deferred.promise()); + it('should remove placeholder note when new comment is done posting', (done) => { + mockNotesPost(); + $('.js-comment-button').click(); - deferred.resolve(note); - expect($notesContainer.find('.note.being-posted').length).toEqual(0); + setTimeout(() => { + expect($notesContainer.find('.note.being-posted').length).toEqual(0); + + done(); + }); }); - it('should show actual note element when new comment is done posting', () => { - const deferred = $.Deferred(); - spyOn($, 'ajax').and.returnValue(deferred.promise()); + it('should show actual note element when new comment is done posting', (done) => { + mockNotesPost(); + $('.js-comment-button').click(); - deferred.resolve(note); - expect($notesContainer.find(`#note_${note.id}`).length > 0).toEqual(true); + setTimeout(() => { + expect($notesContainer.find(`#note_${note.id}`).length > 0).toEqual(true); + + done(); + }); }); - it('should reset Form when new comment is done posting', () => { - const deferred = $.Deferred(); - spyOn($, 'ajax').and.returnValue(deferred.promise()); + it('should reset Form when new comment is done posting', (done) => { + mockNotesPost(); + $('.js-comment-button').click(); - deferred.resolve(note); - expect($form.find('textarea.js-note-text').val()).toEqual(''); + setTimeout(() => { + expect($form.find('textarea.js-note-text').val()).toEqual(''); + + done(); + }); }); - it('should show flash error message when new comment failed to be posted', () => { - const deferred = $.Deferred(); - spyOn($, 'ajax').and.returnValue(deferred.promise()); + it('should show flash error message when new comment failed to be posted', (done) => { + mockNotesPostError(); + $('.js-comment-button').click(); - deferred.reject(); - expect($notesContainer.parent().find('.flash-container .flash-text').is(':visible')).toEqual(true); + setTimeout(() => { + expect($notesContainer.parent().find('.flash-container .flash-text').is(':visible')).toEqual(true); + + done(); + }); }); - it('should show flash error message when comment failed to be updated', () => { - const deferred = $.Deferred(); - spyOn($, 'ajax').and.returnValue(deferred.promise()); + it('should show flash error message when comment failed to be updated', (done) => { + mockNotesPost(); + $('.js-comment-button').click(); - deferred.resolve(note); - const $noteEl = $notesContainer.find(`#note_${note.id}`); - $noteEl.find('.js-note-edit').click(); - $noteEl.find('textarea.js-note-text').val(updatedComment); - $noteEl.find('.js-comment-save-button').click(); + timeoutPromise() + .then(() => { + const $noteEl = $notesContainer.find(`#note_${note.id}`); + $noteEl.find('.js-note-edit').click(); + $noteEl.find('textarea.js-note-text').val(updatedComment); - deferred.reject(); - const $updatedNoteEl = $notesContainer.find(`#note_${note.id}`); - expect($updatedNoteEl.hasClass('.being-posted')).toEqual(false); // Remove being-posted visuals - expect($updatedNoteEl.find('.note-text').text().trim()).toEqual(sampleComment); // See if comment reverted back to original - expect($('.flash-container').is(':visible')).toEqual(true); // Flash error message shown + mock.restore(); + + mockNotesPostError(); + + $noteEl.find('.js-comment-save-button').click(); + }) + .then(timeoutPromise) + .then(() => { + const $updatedNoteEl = $notesContainer.find(`#note_${note.id}`); + expect($updatedNoteEl.hasClass('.being-posted')).toEqual(false); // Remove being-posted visuals + expect($updatedNoteEl.find('.note-text').text().trim()).toEqual(sampleComment); // See if comment reverted back to original + expect($('.flash-container').is(':visible')).toEqual(true); // Flash error message shown + + done(); + }) + .catch(done.fail); }); }); @@ -563,8 +627,12 @@ import Notes from '~/notes'; }; let $form; let $notesContainer; + let mock; beforeEach(() => { + mock = new MockAdapter(axios); + mock.onPost(/(.*)\/notes$/).reply(200, note); + this.notes = new Notes('', []); window.gon.current_username = 'root'; window.gon.current_user_fullname = 'Administrator'; @@ -582,15 +650,20 @@ import Notes from '~/notes'; $form.find('textarea.js-note-text').val(sampleComment); }); - it('should remove slash command placeholder when comment with slash commands is done posting', () => { - const deferred = $.Deferred(); - spyOn($, 'ajax').and.returnValue(deferred.promise()); + afterEach(() => { + mock.restore(); + }); + + it('should remove slash command placeholder when comment with slash commands is done posting', (done) => { spyOn(gl.awardsHandler, 'addAwardToEmojiBar').and.callThrough(); $('.js-comment-button').click(); expect($notesContainer.find('.system-note.being-posted').length).toEqual(1); // Placeholder shown - deferred.resolve(note); - expect($notesContainer.find('.system-note.being-posted').length).toEqual(0); // Placeholder removed + + setTimeout(() => { + expect($notesContainer.find('.system-note.being-posted').length).toEqual(0); // Placeholder removed + done(); + }); }); }); @@ -607,8 +680,12 @@ import Notes from '~/notes'; }; let $form; let $notesContainer; + let mock; beforeEach(() => { + mock = new MockAdapter(axios); + mock.onPost(/(.*)\/notes$/).reply(200, note); + this.notes = new Notes('', []); window.gon.current_username = 'root'; window.gon.current_user_fullname = 'Administrator'; @@ -617,19 +694,24 @@ import Notes from '~/notes'; $form.find('textarea.js-note-text').html(sampleComment); }); - it('should not render a script tag', () => { - const deferred = $.Deferred(); - spyOn($, 'ajax').and.returnValue(deferred.promise()); + afterEach(() => { + mock.restore(); + }); + + it('should not render a script tag', (done) => { $('.js-comment-button').click(); - deferred.resolve(note); - const $noteEl = $notesContainer.find(`#note_${note.id}`); - $noteEl.find('.js-note-edit').click(); - $noteEl.find('textarea.js-note-text').html(updatedComment); - $noteEl.find('.js-comment-save-button').click(); + setTimeout(() => { + const $noteEl = $notesContainer.find(`#note_${note.id}`); + $noteEl.find('.js-note-edit').click(); + $noteEl.find('textarea.js-note-text').html(updatedComment); + $noteEl.find('.js-comment-save-button').click(); + + const $updatedNoteEl = $notesContainer.find(`#note_${note.id}`).find('.js-task-list-container'); + expect($updatedNoteEl.find('.note-text').text().trim()).toEqual(''); - const $updatedNoteEl = $notesContainer.find(`#note_${note.id}`).find('.js-task-list-container'); - expect($updatedNoteEl.find('.note-text').text().trim()).toEqual(''); + done(); + }); }); }); diff --git a/spec/javascripts/pager_spec.js b/spec/javascripts/pager_spec.js index 2fd87754238..b09494f0b77 100644 --- a/spec/javascripts/pager_spec.js +++ b/spec/javascripts/pager_spec.js @@ -1,5 +1,6 @@ /* global fixture */ - +import MockAdapter from 'axios-mock-adapter'; +import axios from '~/lib/utils/axios_utils'; import * as utils from '~/lib/utils/url_utility'; import Pager from '~/pager'; @@ -9,7 +10,6 @@ describe('pager', () => { beforeEach(() => { setFixtures('<div class="content_list"></div><div class="loading"></div>'); - spyOn($, 'ajax'); }); afterEach(() => { @@ -47,39 +47,90 @@ describe('pager', () => { }); describe('getOld', () => { + const urlRegex = /(.*)some_list(.*)$/; + let mock; + + function mockSuccess() { + mock.onGet(urlRegex).reply(200, { + count: 0, + html: '', + }); + } + + function mockError() { + mock.onGet(urlRegex).networkError(); + } + beforeEach(() => { setFixtures('<div class="content_list" data-href="/some_list"></div><div class="loading"></div>'); + spyOn(axios, 'get').and.callThrough(); + + mock = new MockAdapter(axios); + Pager.init(); }); - it('shows loader while loading next page', () => { + afterEach(() => { + mock.restore(); + }); + + it('shows loader while loading next page', (done) => { + mockSuccess(); + spyOn(Pager.loading, 'show'); Pager.getOld(); - expect(Pager.loading.show).toHaveBeenCalled(); + + setTimeout(() => { + expect(Pager.loading.show).toHaveBeenCalled(); + + done(); + }); }); - it('hides loader on success', () => { - spyOn($, 'ajax').and.callFake(options => options.success({})); + it('hides loader on success', (done) => { + mockSuccess(); + spyOn(Pager.loading, 'hide'); Pager.getOld(); - expect(Pager.loading.hide).toHaveBeenCalled(); + + setTimeout(() => { + expect(Pager.loading.hide).toHaveBeenCalled(); + + done(); + }); }); - it('hides loader on error', () => { - spyOn($, 'ajax').and.callFake(options => options.error()); + it('hides loader on error', (done) => { + mockError(); + spyOn(Pager.loading, 'hide'); Pager.getOld(); - expect(Pager.loading.hide).toHaveBeenCalled(); + + setTimeout(() => { + expect(Pager.loading.hide).toHaveBeenCalled(); + + done(); + }); }); - it('sends request to url with offset and limit params', () => { - spyOn($, 'ajax'); + it('sends request to url with offset and limit params', (done) => { Pager.offset = 100; Pager.limit = 20; Pager.getOld(); - const [{ data, url }] = $.ajax.calls.argsFor(0); - expect(data).toBe('limit=20&offset=100'); - expect(url).toBe('/some_list'); + + setTimeout(() => { + const [url, params] = axios.get.calls.argsFor(0); + + expect(params).toEqual({ + params: { + limit: 20, + offset: 100, + }, + }); + expect(url).toBe('/some_list'); + + done(); + }); }); }); }); diff --git a/spec/javascripts/pipeline_schedules/setup_pipeline_variable_list_spec.js b/spec/javascripts/pipeline_schedules/setup_pipeline_variable_list_spec.js deleted file mode 100644 index 5b316b319a5..00000000000 --- a/spec/javascripts/pipeline_schedules/setup_pipeline_variable_list_spec.js +++ /dev/null @@ -1,145 +0,0 @@ -import { - setupPipelineVariableList, - insertRow, - removeRow, -} from '~/pipeline_schedules/setup_pipeline_variable_list'; - -describe('Pipeline Variable List', () => { - let $markup; - - describe('insertRow', () => { - it('should insert another row', () => { - $markup = $(`<div> - <li class="js-row"> - <input> - <textarea></textarea> - </li> - </div>`); - - insertRow($markup.find('.js-row')); - - expect($markup.find('.js-row').length).toBe(2); - }); - - it('should clear `data-is-persisted` on cloned row', () => { - $markup = $(`<div> - <li class="js-row" data-is-persisted="true"></li> - </div>`); - - insertRow($markup.find('.js-row')); - - const $lastRow = $markup.find('.js-row').last(); - expect($lastRow.attr('data-is-persisted')).toBe(undefined); - }); - - it('should clear inputs on cloned row', () => { - $markup = $(`<div> - <li class="js-row"> - <input value="foo"> - <textarea>bar</textarea> - </li> - </div>`); - - insertRow($markup.find('.js-row')); - - const $lastRow = $markup.find('.js-row').last(); - expect($lastRow.find('input').val()).toBe(''); - expect($lastRow.find('textarea').val()).toBe(''); - }); - }); - - describe('removeRow', () => { - it('should remove dynamic row', () => { - $markup = $(`<div> - <li class="js-row"> - <input> - <textarea></textarea> - </li> - </div>`); - - removeRow($markup.find('.js-row')); - - expect($markup.find('.js-row').length).toBe(0); - }); - - it('should hide and mark to destroy with already persisted rows', () => { - $markup = $(`<div> - <li class="js-row" data-is-persisted="true"> - <input class="js-destroy-input"> - </li> - </div>`); - - const $row = $markup.find('.js-row'); - removeRow($row); - - expect($row.find('.js-destroy-input').val()).toBe('1'); - expect($markup.find('.js-row').length).toBe(1); - }); - }); - - describe('setupPipelineVariableList', () => { - beforeEach(() => { - $markup = $(`<form> - <li class="js-row"> - <input class="js-user-input" name="schedule[variables_attributes][][key]"> - <textarea class="js-user-input" name="schedule[variables_attributes][][value]"></textarea> - <button class="js-row-remove-button"></button> - <button class="js-row-add-button"></button> - </li> - </form>`); - - setupPipelineVariableList($markup); - }); - - it('should remove the row when clicking the remove button', () => { - $markup.find('.js-row-remove-button').trigger('click'); - - expect($markup.find('.js-row').length).toBe(0); - }); - - it('should add another row when editing the last rows key input', () => { - const $row = $markup.find('.js-row'); - $row.find('input.js-user-input') - .val('foo') - .trigger('input'); - - expect($markup.find('.js-row').length).toBe(2); - }); - - it('should add another row when editing the last rows value textarea', () => { - const $row = $markup.find('.js-row'); - $row.find('textarea.js-user-input') - .val('foo') - .trigger('input'); - - expect($markup.find('.js-row').length).toBe(2); - }); - - it('should remove empty row after blurring', () => { - const $row = $markup.find('.js-row'); - $row.find('input.js-user-input') - .val('foo') - .trigger('input'); - - expect($markup.find('.js-row').length).toBe(2); - - $row.find('input.js-user-input') - .val('') - .trigger('input') - .trigger('blur'); - - expect($markup.find('.js-row').length).toBe(1); - }); - - it('should clear out the `name` attribute on the inputs for the last empty row on form submission (avoid BE validation)', () => { - const $row = $markup.find('.js-row'); - expect($row.find('input').attr('name')).toBe('schedule[variables_attributes][][key]'); - expect($row.find('textarea').attr('name')).toBe('schedule[variables_attributes][][value]'); - - $markup.filter('form').submit(); - - expect($row.find('input').attr('name')).toBe(''); - expect($row.find('textarea').attr('name')).toBe(''); - }); - }); -}); diff --git a/spec/javascripts/prometheus_metrics/prometheus_metrics_spec.js b/spec/javascripts/prometheus_metrics/prometheus_metrics_spec.js index b24567ffc0c..f6c0f51cf62 100644 --- a/spec/javascripts/prometheus_metrics/prometheus_metrics_spec.js +++ b/spec/javascripts/prometheus_metrics/prometheus_metrics_spec.js @@ -1,3 +1,5 @@ +import MockAdapter from 'axios-mock-adapter'; +import axios from '~/lib/utils/axios_utils'; import PrometheusMetrics from '~/prometheus_metrics/prometheus_metrics'; import PANEL_STATE from '~/prometheus_metrics/constants'; import { metrics, missingVarMetrics } from './mock_data'; @@ -102,25 +104,38 @@ describe('PrometheusMetrics', () => { describe('loadActiveMetrics', () => { let prometheusMetrics; + let mock; + + function mockSuccess() { + mock.onGet(prometheusMetrics.activeMetricsEndpoint).reply(200, { + data: metrics, + success: true, + }); + } + + function mockError() { + mock.onGet(prometheusMetrics.activeMetricsEndpoint).networkError(); + } beforeEach(() => { + spyOn(axios, 'get').and.callThrough(); + prometheusMetrics = new PrometheusMetrics('.js-prometheus-metrics-monitoring'); + + mock = new MockAdapter(axios); + }); + + afterEach(() => { + mock.restore(); }); it('should show loader animation while response is being loaded and hide it when request is complete', (done) => { - const deferred = $.Deferred(); - spyOn($, 'ajax').and.returnValue(deferred.promise()); + mockSuccess(); prometheusMetrics.loadActiveMetrics(); expect(prometheusMetrics.$monitoredMetricsLoading.hasClass('hidden')).toBeFalsy(); - expect($.ajax).toHaveBeenCalledWith({ - url: prometheusMetrics.activeMetricsEndpoint, - dataType: 'json', - global: false, - }); - - deferred.resolve({ data: metrics, success: true }); + expect(axios.get).toHaveBeenCalledWith(prometheusMetrics.activeMetricsEndpoint); setTimeout(() => { expect(prometheusMetrics.$monitoredMetricsLoading.hasClass('hidden')).toBeTruthy(); @@ -129,14 +144,10 @@ describe('PrometheusMetrics', () => { }); it('should show empty state if response failed to load', (done) => { - const deferred = $.Deferred(); - spyOn($, 'ajax').and.returnValue(deferred.promise()); - spyOn(prometheusMetrics, 'populateActiveMetrics'); + mockError(); prometheusMetrics.loadActiveMetrics(); - deferred.reject(); - setTimeout(() => { expect(prometheusMetrics.$monitoredMetricsLoading.hasClass('hidden')).toBeTruthy(); expect(prometheusMetrics.$monitoredMetricsEmpty.hasClass('hidden')).toBeFalsy(); @@ -145,14 +156,11 @@ describe('PrometheusMetrics', () => { }); it('should populate metrics list once response is loaded', (done) => { - const deferred = $.Deferred(); - spyOn($, 'ajax').and.returnValue(deferred.promise()); spyOn(prometheusMetrics, 'populateActiveMetrics'); + mockSuccess(); prometheusMetrics.loadActiveMetrics(); - deferred.resolve({ data: metrics, success: true }); - setTimeout(() => { expect(prometheusMetrics.populateActiveMetrics).toHaveBeenCalledWith(metrics); done(); diff --git a/spec/javascripts/right_sidebar_spec.js b/spec/javascripts/right_sidebar_spec.js index 3267e29585b..35bb630bf5d 100644 --- a/spec/javascripts/right_sidebar_spec.js +++ b/spec/javascripts/right_sidebar_spec.js @@ -1,6 +1,8 @@ /* eslint-disable space-before-function-paren, no-var, one-var, one-var-declaration-per-line, new-parens, no-return-assign, new-cap, vars-on-top, max-len */ +import MockAdapter from 'axios-mock-adapter'; import '~/commons/bootstrap'; +import axios from '~/lib/utils/axios_utils'; import Sidebar from '~/right_sidebar'; (function() { @@ -35,16 +37,23 @@ import Sidebar from '~/right_sidebar'; var fixtureName = 'issues/open-issue.html.raw'; preloadFixtures(fixtureName); loadJSONFixtures('todos/todos.json'); + let mock; beforeEach(function() { loadFixtures(fixtureName); - this.sidebar = new Sidebar; + mock = new MockAdapter(axios); + this.sidebar = new Sidebar(); $aside = $('.right-sidebar'); $page = $('.layout-page'); $icon = $aside.find('i'); $toggle = $aside.find('.js-sidebar-toggle'); return $labelsIcon = $aside.find('.sidebar-collapsed-icon'); }); + + afterEach(() => { + mock.restore(); + }); + it('should expand/collapse the sidebar when arrow is clicked', function() { assertSidebarState('expanded'); $toggle.click(); @@ -63,20 +72,19 @@ import Sidebar from '~/right_sidebar'; return assertSidebarState('collapsed'); }); - it('should broadcast todo:toggle event when add todo clicked', function() { + it('should broadcast todo:toggle event when add todo clicked', function(done) { var todos = getJSONFixture('todos/todos.json'); - spyOn(jQuery, 'ajax').and.callFake(function() { - var d = $.Deferred(); - var response = todos; - d.resolve(response); - return d.promise(); - }); + mock.onPost(/(.*)\/todos$/).reply(200, todos); var todoToggleSpy = spyOnEvent(document, 'todo:toggle'); $('.issuable-sidebar-header .js-issuable-todo').click(); - expect(todoToggleSpy.calls.count()).toEqual(1); + setTimeout(() => { + expect(todoToggleSpy.calls.count()).toEqual(1); + + done(); + }); }); it('should not hide collapsed icons', () => { diff --git a/spec/javascripts/vue_mr_widget/components/mr_widget_author_spec.js b/spec/javascripts/vue_mr_widget/components/mr_widget_author_spec.js index a750bc78f36..f14d5f6f76c 100644 --- a/spec/javascripts/vue_mr_widget/components/mr_widget_author_spec.js +++ b/spec/javascripts/vue_mr_widget/components/mr_widget_author_spec.js @@ -1,39 +1,39 @@ import Vue from 'vue'; -import authorComponent from '~/vue_merge_request_widget/components/mr_widget_author'; - -const author = { - webUrl: 'http://foo.bar', - avatarUrl: 'http://gravatar.com/foo', - name: 'fatihacet', -}; -const createComponent = () => { - const Component = Vue.extend(authorComponent); - - return new Component({ - el: document.createElement('div'), - propsData: { author }, - }); -}; +import authorComponent from '~/vue_merge_request_widget/components/mr_widget_author.vue'; +import mountComponent from '../../helpers/vue_mount_component_helper'; describe('MRWidgetAuthor', () => { - describe('props', () => { - it('should have props', () => { - const authorProp = authorComponent.props.author; + let vm; + + beforeEach(() => { + const Component = Vue.extend(authorComponent); + + vm = mountComponent(Component, { + author: { + name: 'Administrator', + username: 'root', + webUrl: 'http://localhost:3000/root', + avatarUrl: 'http://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon', + }, - expect(authorProp).toBeDefined(); - expect(authorProp.type instanceof Object).toBeTruthy(); - expect(authorProp.required).toBeTruthy(); }); }); - describe('template', () => { - it('should have correct elements', () => { - const el = createComponent().$el; + afterEach(() => { + vm.$destroy(); + }); - expect(el.tagName).toEqual('A'); - expect(el.getAttribute('href')).toEqual(author.webUrl); - expect(el.querySelector('img').getAttribute('src')).toEqual(author.avatarUrl); - expect(el.querySelector('.author').innerText.trim()).toEqual(author.name); - }); + it('renders link with the author web url', () => { + expect(vm.$el.getAttribute('href')).toEqual('http://localhost:3000/root'); + }); + + it('renders image with avatar url', () => { + expect( + vm.$el.querySelector('img').getAttribute('src'), + ).toEqual('http://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon'); + }); + + it('renders author name', () => { + expect(vm.$el.textContent.trim()).toEqual('Administrator'); }); }); diff --git a/spec/javascripts/vue_mr_widget/components/mr_widget_author_time_spec.js b/spec/javascripts/vue_mr_widget/components/mr_widget_author_time_spec.js index 515ddcbb875..8c55622b15e 100644 --- a/spec/javascripts/vue_mr_widget/components/mr_widget_author_time_spec.js +++ b/spec/javascripts/vue_mr_widget/components/mr_widget_author_time_spec.js @@ -1,61 +1,40 @@ import Vue from 'vue'; -import authorTimeComponent from '~/vue_merge_request_widget/components/mr_widget_author_time'; - -const props = { - actionText: 'Merged by', - author: { - webUrl: 'http://foo.bar', - avatarUrl: 'http://gravatar.com/foo', - name: 'fatihacet', - }, - dateTitle: '2017-03-23T23:02:00.807Z', - dateReadable: '12 hours ago', -}; -const createComponent = () => { - const Component = Vue.extend(authorTimeComponent); - - return new Component({ - el: document.createElement('div'), - propsData: props, - }); -}; +import authorTimeComponent from '~/vue_merge_request_widget/components/mr_widget_author_time.vue'; +import mountComponent from '../../helpers/vue_mount_component_helper'; describe('MRWidgetAuthorTime', () => { - describe('props', () => { - it('should have props', () => { - const { actionText, author, dateTitle, dateReadable } = authorTimeComponent.props; - const ActionTextClass = actionText.type; - const DateTitleClass = dateTitle.type; - const DateReadableClass = dateReadable.type; - - expect(new ActionTextClass() instanceof String).toBeTruthy(); - expect(actionText.required).toBeTruthy(); - - expect(author.type instanceof Object).toBeTruthy(); - expect(author.required).toBeTruthy(); - - expect(new DateTitleClass() instanceof String).toBeTruthy(); - expect(dateTitle.required).toBeTruthy(); - - expect(new DateReadableClass() instanceof String).toBeTruthy(); - expect(dateReadable.required).toBeTruthy(); + let vm; + + beforeEach(() => { + const Component = Vue.extend(authorTimeComponent); + + vm = mountComponent(Component, { + actionText: 'Merged by', + author: { + name: 'Administrator', + username: 'root', + webUrl: 'http://localhost:3000/root', + avatarUrl: 'http://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon', + }, + dateTitle: '2017-03-23T23:02:00.807Z', + dateReadable: '12 hours ago', }); }); - describe('components', () => { - it('should have components', () => { - expect(authorTimeComponent.components['mr-widget-author']).toBeDefined(); - }); + afterEach(() => { + vm.$destroy(); + }); + + it('renders provided action text', () => { + expect(vm.$el.textContent).toContain('Merged by'); }); - describe('template', () => { - it('should have correct elements', () => { - const el = createComponent().$el; + it('renders author', () => { + expect(vm.$el.textContent).toContain('Administrator'); + }); - expect(el.tagName).toEqual('H4'); - expect(el.querySelector('a').getAttribute('href')).toEqual(props.author.webUrl); - expect(el.querySelector('time').innerText).toContain(props.dateReadable); - expect(el.querySelector('time').getAttribute('title')).toEqual(props.dateTitle); - }); + it('renders provided time', () => { + expect(vm.$el.querySelector('time').getAttribute('title')).toEqual('2017-03-23T23:02:00.807Z'); + expect(vm.$el.querySelector('time').textContent.trim()).toEqual('12 hours ago'); }); }); diff --git a/spec/javascripts/vue_mr_widget/components/mr_widget_header_spec.js b/spec/javascripts/vue_mr_widget/components/mr_widget_header_spec.js index 93bb83ca8bd..13e5595bbfc 100644 --- a/spec/javascripts/vue_mr_widget/components/mr_widget_header_spec.js +++ b/spec/javascripts/vue_mr_widget/components/mr_widget_header_spec.js @@ -1,121 +1,220 @@ import Vue from 'vue'; -import headerComponent from '~/vue_merge_request_widget/components/mr_widget_header'; - -const createComponent = (mr) => { - const Component = Vue.extend(headerComponent); - return new Component({ - el: document.createElement('div'), - propsData: { mr }, - }); -}; +import headerComponent from '~/vue_merge_request_widget/components/mr_widget_header.vue'; +import mountComponent from '../../helpers/vue_mount_component_helper'; describe('MRWidgetHeader', () => { - describe('props', () => { - it('should have props', () => { - const { mr } = headerComponent.props; + let vm; + let Component; - expect(mr.type instanceof Object).toBeTruthy(); - expect(mr.required).toBeTruthy(); - }); + beforeEach(() => { + Component = Vue.extend(headerComponent); + }); + + afterEach(() => { + vm.$destroy(); }); describe('computed', () => { - let vm; - beforeEach(() => { - vm = createComponent({ - divergedCommitsCount: 12, - sourceBranch: 'mr-widget-refactor', - sourceBranchLink: '/foo/bar/mr-widget-refactor', - targetBranch: 'master', + describe('shouldShowCommitsBehindText', () => { + it('return true when there are divergedCommitsCount', () => { + vm = mountComponent(Component, { mr: { + divergedCommitsCount: 12, + sourceBranch: 'mr-widget-refactor', + sourceBranchLink: '<a href="/foo/bar/mr-widget-refactor">Link</a>', + targetBranch: 'master', + } }); + + expect(vm.shouldShowCommitsBehindText).toEqual(true); + }); + + it('returns false where there are no divergedComits count', () => { + vm = mountComponent(Component, { mr: { + divergedCommitsCount: 0, + sourceBranch: 'mr-widget-refactor', + sourceBranchLink: '<a href="/foo/bar/mr-widget-refactor">Link</a>', + targetBranch: 'master', + } }); + expect(vm.shouldShowCommitsBehindText).toEqual(false); }); }); - it('shouldShowCommitsBehindText', () => { - expect(vm.shouldShowCommitsBehindText).toBeTruthy(); + describe('commitsText', () => { + it('returns singular when there is one commit', () => { + vm = mountComponent(Component, { mr: { + divergedCommitsCount: 1, + sourceBranch: 'mr-widget-refactor', + sourceBranchLink: '<a href="/foo/bar/mr-widget-refactor">Link</a>', + targetBranch: 'master', + } }); - vm.mr.divergedCommitsCount = 0; - expect(vm.shouldShowCommitsBehindText).toBeFalsy(); - }); + expect(vm.commitsText).toEqual('1 commit behind'); + }); - it('commitsText', () => { - expect(vm.commitsText).toEqual('commits'); + it('returns plural when there is more than one commit', () => { + vm = mountComponent(Component, { mr: { + divergedCommitsCount: 2, + sourceBranch: 'mr-widget-refactor', + sourceBranchLink: '<a href="/foo/bar/mr-widget-refactor">Link</a>', + targetBranch: 'master', + } }); - vm.mr.divergedCommitsCount = 1; - expect(vm.commitsText).toEqual('commit'); + expect(vm.commitsText).toEqual('2 commits behind'); + }); }); }); describe('template', () => { - let vm; - let el; - let mr; - const sourceBranchPath = '/foo/bar/mr-widget-refactor'; - - beforeEach(() => { - mr = { - divergedCommitsCount: 12, - sourceBranch: 'mr-widget-refactor', - sourceBranchLink: `<a href="${sourceBranchPath}">mr-widget-refactor</a>`, - sourceBranchRemoved: false, - targetBranchPath: 'foo/bar/commits-path', - targetBranchTreePath: 'foo/bar/tree/path', - targetBranch: 'master', - isOpen: true, - emailPatchesPath: '/mr/email-patches', - plainDiffPath: '/mr/plainDiffPath', - }; - - vm = createComponent(mr); - el = vm.$el; + describe('common elements', () => { + beforeEach(() => { + vm = mountComponent(Component, { mr: { + divergedCommitsCount: 12, + sourceBranch: 'mr-widget-refactor', + sourceBranchLink: '<a href="/foo/bar/mr-widget-refactor">mr-widget-refactor</a>', + sourceBranchRemoved: false, + targetBranchPath: 'foo/bar/commits-path', + targetBranchTreePath: 'foo/bar/tree/path', + targetBranch: 'master', + isOpen: true, + emailPatchesPath: '/mr/email-patches', + plainDiffPath: '/mr/plainDiffPath', + } }); + }); + + it('renders source branch link', () => { + expect( + vm.$el.querySelector('.js-source-branch').innerHTML, + ).toEqual('<a href="/foo/bar/mr-widget-refactor">mr-widget-refactor</a>'); + }); + + it('renders clipboard button', () => { + expect(vm.$el.querySelector('.btn-clipboard')).not.toEqual(null); + }); + + it('renders target branch', () => { + expect(vm.$el.querySelector('.js-target-branch').textContent.trim()).toEqual('master'); + }); }); - it('should render template elements correctly', () => { - expect(el.classList.contains('mr-source-target')).toBeTruthy(); - const sourceBranchLink = el.querySelectorAll('.label-branch')[0]; - const targetBranchLink = el.querySelectorAll('.label-branch')[1]; - const commitsCount = el.querySelector('.diverged-commits-count'); + describe('with an open merge request', () => { + afterEach(() => { + vm.$destroy(); + }); - expect(sourceBranchLink.textContent).toContain(mr.sourceBranch); - expect(targetBranchLink.textContent).toContain(mr.targetBranch); - expect(sourceBranchLink.querySelector('a').getAttribute('href')).toEqual(sourceBranchPath); - expect(targetBranchLink.querySelector('a').getAttribute('href')).toEqual(mr.targetBranchTreePath); - expect(commitsCount.textContent).toContain('12 commits behind'); - expect(commitsCount.querySelector('a').getAttribute('href')).toEqual(mr.targetBranchPath); + beforeEach(() => { + vm = mountComponent(Component, { mr: { + divergedCommitsCount: 12, + sourceBranch: 'mr-widget-refactor', + sourceBranchLink: '<a href="/foo/bar/mr-widget-refactor">mr-widget-refactor</a>', + sourceBranchRemoved: false, + targetBranchPath: 'foo/bar/commits-path', + targetBranchTreePath: 'foo/bar/tree/path', + targetBranch: 'master', + isOpen: true, + emailPatchesPath: '/mr/email-patches', + plainDiffPath: '/mr/plainDiffPath', + } }); + }); + + it('renders checkout branch button with modal trigger', () => { + const button = vm.$el.querySelector('.js-check-out-branch'); + + expect(button.textContent.trim()).toEqual('Check out branch'); + expect(button.getAttribute('data-target')).toEqual('#modal_merge_info'); + expect(button.getAttribute('data-toggle')).toEqual('modal'); + }); + + it('renders download dropdown with links', () => { + expect( + vm.$el.querySelector('.js-download-email-patches').textContent.trim(), + ).toEqual('Email patches'); - expect(el.textContent).toContain('Check out branch'); - expect(el.querySelectorAll('.dropdown li a')[0].getAttribute('href')).toEqual(mr.emailPatchesPath); - expect(el.querySelectorAll('.dropdown li a')[1].getAttribute('href')).toEqual(mr.plainDiffPath); + expect( + vm.$el.querySelector('.js-download-email-patches').getAttribute('href'), + ).toEqual('/mr/email-patches'); - expect(el.querySelector('a[href="#modal_merge_info"]').getAttribute('disabled')).toBeNull(); + expect( + vm.$el.querySelector('.js-download-plain-diff').textContent.trim(), + ).toEqual('Plain diff'); + + expect( + vm.$el.querySelector('.js-download-plain-diff').getAttribute('href'), + ).toEqual('/mr/plainDiffPath'); + }); }); - it('should not have right action links if the MR state is not open', (done) => { - vm.mr.isOpen = false; - Vue.nextTick(() => { - expect(el.textContent).not.toContain('Check out branch'); - expect(el.querySelectorAll('.dropdown li a').length).toEqual(0); - done(); + describe('with a closed merge request', () => { + beforeEach(() => { + vm = mountComponent(Component, { mr: { + divergedCommitsCount: 12, + sourceBranch: 'mr-widget-refactor', + sourceBranchLink: '<a href="/foo/bar/mr-widget-refactor">mr-widget-refactor</a>', + sourceBranchRemoved: false, + targetBranchPath: 'foo/bar/commits-path', + targetBranchTreePath: 'foo/bar/tree/path', + targetBranch: 'master', + isOpen: false, + emailPatchesPath: '/mr/email-patches', + plainDiffPath: '/mr/plainDiffPath', + } }); + }); + + it('does not render checkout branch button with modal trigger', () => { + const button = vm.$el.querySelector('.js-check-out-branch'); + + expect(button).toEqual(null); + }); + + it('does not render download dropdown with links', () => { + expect( + vm.$el.querySelector('.js-download-email-patches'), + ).toEqual(null); + + expect( + vm.$el.querySelector('.js-download-plain-diff'), + ).toEqual(null); }); }); - it('should not render diverged commits count if the MR has no diverged commits', (done) => { - vm.mr.divergedCommitsCount = null; - Vue.nextTick(() => { - expect(el.textContent).not.toContain('commits behind'); - expect(el.querySelectorAll('.diverged-commits-count').length).toEqual(0); - done(); + describe('without diverged commits', () => { + beforeEach(() => { + vm = mountComponent(Component, { mr: { + divergedCommitsCount: 0, + sourceBranch: 'mr-widget-refactor', + sourceBranchLink: '<a href="/foo/bar/mr-widget-refactor">mr-widget-refactor</a>', + sourceBranchRemoved: false, + targetBranchPath: 'foo/bar/commits-path', + targetBranchTreePath: 'foo/bar/tree/path', + targetBranch: 'master', + isOpen: true, + emailPatchesPath: '/mr/email-patches', + plainDiffPath: '/mr/plainDiffPath', + } }); + }); + + it('does not render diverged commits info', () => { + expect(vm.$el.querySelector('.diverged-commits-count')).toEqual(null); }); }); - it('should disable check out branch button if source branch has been removed', (done) => { - vm.mr.sourceBranchRemoved = true; + describe('with diverged commits', () => { + beforeEach(() => { + vm = mountComponent(Component, { mr: { + divergedCommitsCount: 12, + sourceBranch: 'mr-widget-refactor', + sourceBranchLink: '<a href="/foo/bar/mr-widget-refactor">mr-widget-refactor</a>', + sourceBranchRemoved: false, + targetBranchPath: 'foo/bar/commits-path', + targetBranchTreePath: 'foo/bar/tree/path', + targetBranch: 'master', + isOpen: true, + emailPatchesPath: '/mr/email-patches', + plainDiffPath: '/mr/plainDiffPath', + } }); + }); - Vue.nextTick() - .then(() => { - expect(el.querySelector('a[href="#modal_merge_info"]').getAttribute('disabled')).toBe('disabled'); - done(); - }) - .catch(done.fail); + it('renders diverged commits info', () => { + expect(vm.$el.querySelector('.diverged-commits-count').textContent.trim()).toEqual('(12 commits behind)'); + }); }); }); }); diff --git a/spec/javascripts/vue_mr_widget/components/mr_widget_merge_help_spec.js b/spec/javascripts/vue_mr_widget/components/mr_widget_merge_help_spec.js index 4da4fc82c26..cc43639f576 100644 --- a/spec/javascripts/vue_mr_widget/components/mr_widget_merge_help_spec.js +++ b/spec/javascripts/vue_mr_widget/components/mr_widget_merge_help_spec.js @@ -1,51 +1,56 @@ import Vue from 'vue'; -import mergeHelpComponent from '~/vue_merge_request_widget/components/mr_widget_merge_help'; - -const props = { - missingBranch: 'this-is-not-the-branch-you-are-looking-for', -}; -const text = `If the ${props.missingBranch} branch exists in your local repository`; - -const createComponent = () => { - const Component = Vue.extend(mergeHelpComponent); - return new Component({ - el: document.createElement('div'), - propsData: props, - }); -}; +import mergeHelpComponent from '~/vue_merge_request_widget/components/mr_widget_merge_help.vue'; +import mountComponent from '../../helpers/vue_mount_component_helper'; describe('MRWidgetMergeHelp', () => { - describe('props', () => { - it('should have props', () => { - const { missingBranch } = mergeHelpComponent.props; - const MissingBranchTypeClass = missingBranch.type; - - expect(new MissingBranchTypeClass() instanceof String).toBeTruthy(); - expect(missingBranch.required).toBeFalsy(); - expect(missingBranch.default).toEqual(''); - }); + let vm; + let Component; + + beforeEach(() => { + Component = Vue.extend(mergeHelpComponent); }); - describe('template', () => { - let vm; - let el; + afterEach(() => { + vm.$destroy(); + }); + describe('with missing branch', () => { beforeEach(() => { - vm = createComponent(); - el = vm.$el; + vm = mountComponent(Component, { + missingBranch: 'this-is-not-the-branch-you-are-looking-for', + }); }); - it('should have the correct elements', () => { - expect(el.classList.contains('mr-widget-help')).toBeTruthy(); - expect(el.textContent).toContain(text); + it('renders missing branch information', () => { + expect( + vm.$el.textContent.trim().replace(/[\r\n]+/g, ' ').replace(/\s\s+/g, ' '), + ).toEqual( + 'If the this-is-not-the-branch-you-are-looking-for branch exists in your local repository, you can merge this merge request manually using the command line', + ); }); - it('should not show missing branch name if missingBranch props is not provided', (done) => { - vm.missingBranch = null; - Vue.nextTick(() => { - expect(el.textContent).not.toContain(text); - done(); - }); + it('renders button to open help modal', () => { + expect(vm.$el.querySelector('.js-open-modal-help').getAttribute('data-target')).toEqual('#modal_merge_info'); + expect(vm.$el.querySelector('.js-open-modal-help').getAttribute('data-toggle')).toEqual('modal'); + }); + }); + + describe('without missing branch', () => { + beforeEach(() => { + vm = mountComponent(Component); + }); + + it('renders information about how to merge manually', () => { + expect( + vm.$el.textContent.trim().replace(/[\r\n]+/g, ' ').replace(/\s\s+/g, ' '), + ).toEqual( + 'You can merge this merge request manually using the command line', + ); + }); + + it('renders element to open a modal', () => { + expect(vm.$el.querySelector('.js-open-modal-help').getAttribute('data-target')).toEqual('#modal_merge_info'); + expect(vm.$el.querySelector('.js-open-modal-help').getAttribute('data-toggle')).toEqual('modal'); }); }); }); diff --git a/spec/javascripts/vue_shared/components/confirmation_input_spec.js b/spec/javascripts/vue_shared/components/confirmation_input_spec.js new file mode 100644 index 00000000000..a6a12614e77 --- /dev/null +++ b/spec/javascripts/vue_shared/components/confirmation_input_spec.js @@ -0,0 +1,63 @@ +import Vue from 'vue'; +import confirmationInput from '~/vue_shared/components/confirmation_input.vue'; +import mountComponent from '../../helpers/vue_mount_component_helper'; + +describe('Confirmation input component', () => { + const Component = Vue.extend(confirmationInput); + const props = { + inputId: 'dummy-id', + confirmationKey: 'confirmation-key', + confirmationValue: 'confirmation-value', + }; + let vm; + + afterEach(() => { + vm.$destroy(); + }); + + describe('props', () => { + beforeEach(() => { + vm = mountComponent(Component, props); + }); + + it('sets id of the input field to inputId', () => { + expect(vm.$refs.enteredValue.id).toBe(props.inputId); + }); + + it('sets name of the input field to confirmationKey', () => { + expect(vm.$refs.enteredValue.name).toBe(props.confirmationKey); + }); + }); + + describe('computed', () => { + describe('inputLabel', () => { + it('escapes confirmationValue by default', () => { + vm = mountComponent(Component, { ...props, confirmationValue: 'n<e></e>ds escap"ng' }); + expect(vm.inputLabel).toBe('Type <code>n<e></e>ds escap"ng</code> to confirm:'); + }); + + it('does not escape confirmationValue if escapeValue is false', () => { + vm = mountComponent(Component, { ...props, confirmationValue: 'n<e></e>ds escap"ng', shouldEscapeConfirmationValue: false }); + expect(vm.inputLabel).toBe('Type <code>n<e></e>ds escap"ng</code> to confirm:'); + }); + }); + }); + + describe('methods', () => { + describe('hasCorrectValue', () => { + beforeEach(() => { + vm = mountComponent(Component, props); + }); + + it('returns false if entered value is incorrect', () => { + vm.$refs.enteredValue.value = 'incorrect'; + expect(vm.hasCorrectValue()).toBe(false); + }); + + it('returns true if entered value is correct', () => { + vm.$refs.enteredValue.value = props.confirmationValue; + expect(vm.hasCorrectValue()).toBe(true); + }); + }); + }); +}); diff --git a/spec/lib/banzai/color_parser_spec.rb b/spec/lib/banzai/color_parser_spec.rb new file mode 100644 index 00000000000..a1cb0c07b06 --- /dev/null +++ b/spec/lib/banzai/color_parser_spec.rb @@ -0,0 +1,90 @@ +require 'spec_helper' + +describe Banzai::ColorParser do + describe '.parse' do + context 'HEX format' do + [ + '#abc', '#ABC', + '#d2d2d2', '#D2D2D2', + '#123a', '#123A', + '#123456aa', '#123456AA' + ].each do |color| + it "parses the valid hex color #{color}" do + expect(subject.parse(color)).to eq(color) + end + end + + [ + '#', '#1', '#12', '#12g', '#12G', + '#12345', '#r2r2r2', '#R2R2R2', '#1234567', + '# 123', '# 1234', '# 123456', '# 12345678', + '#1 2 3', '#123 4', '#12 34 56', '#123456 78' + ].each do |color| + it "does not parse the invalid hex color #{color}" do + expect(subject.parse(color)).to be_nil + end + end + end + + context 'RGB format' do + [ + 'rgb(0,0,0)', 'rgb(255,255,255)', + 'rgb(0, 0, 0)', 'RGB(0,0,0)', + 'rgb(0,0,0,0)', 'rgb(0,0,0,0.0)', 'rgb(0,0,0,.0)', + 'rgb(0,0,0, 0)', 'rgb(0,0,0, 0.0)', 'rgb(0,0,0, .0)', + 'rgb(0,0,0,1)', 'rgb(0,0,0,1.0)', + 'rgba(0,0,0)', 'rgba(0,0,0,0)', 'RGBA(0,0,0)', + 'rgb(0%,0%,0%)', 'rgba(0%,0%,0%,0%)' + ].each do |color| + it "parses the valid rgb color #{color}" do + expect(subject.parse(color)).to eq(color) + end + end + + [ + 'FOOrgb(0,0,0)', 'rgb(0,0,0)BAR', + 'rgb(0,0,-1)', 'rgb(0,0,-0)', 'rgb(0,0,256)', + 'rgb(0,0,0,-0.1)', 'rgb(0,0,0,-0.0)', 'rgb(0,0,0,-.1)', + 'rgb(0,0,0,1.1)', 'rgb(0,0,0,2)', + 'rgba(0,0,0,)', 'rgba(0,0,0,0.)', 'rgba(0,0,0,1.)', + 'rgb(0,0,0%)', 'rgb(101%,0%,0%)' + ].each do |color| + it "does not parse the invalid rgb color #{color}" do + expect(subject.parse(color)).to be_nil + end + end + end + + context 'HSL format' do + [ + 'hsl(0,0%,0%)', 'hsl(0,100%,100%)', + 'hsl(540,0%,0%)', 'hsl(-720,0%,0%)', + 'hsl(0deg,0%,0%)', 'hsl(0DEG,0%,0%)', + 'hsl(0, 0%, 0%)', 'HSL(0,0%,0%)', + 'hsl(0,0%,0%,0)', 'hsl(0,0%,0%,0.0)', 'hsl(0,0%,0%,.0)', + 'hsl(0,0%,0%, 0)', 'hsl(0,0%,0%, 0.0)', 'hsl(0,0%,0%, .0)', + 'hsl(0,0%,0%,1)', 'hsl(0,0%,0%,1.0)', + 'hsla(0,0%,0%)', 'hsla(0,0%,0%,0)', 'HSLA(0,0%,0%)', + 'hsl(1rad,0%,0%)', 'hsl(1.1rad,0%,0%)', 'hsl(.1rad,0%,0%)', + 'hsl(-1rad,0%,0%)', 'hsl(1RAD,0%,0%)' + ].each do |color| + it "parses the valid hsl color #{color}" do + expect(subject.parse(color)).to eq(color) + end + end + + [ + 'hsl(+0,0%,0%)', 'hsl(0,0,0%)', 'hsl(0,0%,0)', 'hsl(0 deg,0%,0%)', + 'hsl(0,-0%,0%)', 'hsl(0,101%,0%)', 'hsl(0,-1%,0%)', + 'hsl(0,0%,0%,-0.1)', 'hsl(0,0%,0%,-.1)', + 'hsl(0,0%,0%,1.1)', 'hsl(0,0%,0%,2)', + 'hsl(0,0%,0%,)', 'hsl(0,0%,0%,0.)', 'hsl(0,0%,0%,1.)', + 'hsl(deg,0%,0%)', 'hsl(rad,0%,0%)' + ].each do |color| + it "does not parse the invalid hsl color #{color}" do + expect(subject.parse(color)).to be_nil + end + end + end + end +end diff --git a/spec/lib/banzai/filter/color_filter_spec.rb b/spec/lib/banzai/filter/color_filter_spec.rb new file mode 100644 index 00000000000..a098b037510 --- /dev/null +++ b/spec/lib/banzai/filter/color_filter_spec.rb @@ -0,0 +1,61 @@ +require 'spec_helper' + +describe Banzai::Filter::ColorFilter, lib: true do + include FilterSpecHelper + + let(:color) { '#F00' } + let(:color_chip_selector) { 'code > span.gfm-color_chip > span' } + + ['#123', '#1234', '#123456', '#12345678', + 'rgb(0,0,0)', 'RGB(0, 0, 0)', 'rgba(0,0,0,1)', 'RGBA(0,0,0,0.7)', + 'hsl(270,30%,50%)', 'HSLA(270, 30%, 50%, .7)'].each do |color| + it "inserts color chip for supported color format #{color}" do + content = code_tag(color) + doc = filter(content) + color_chip = doc.at_css(color_chip_selector) + + expect(color_chip.content).to be_empty + expect(color_chip.parent[:class]).to eq 'gfm-color_chip' + expect(color_chip[:style]).to eq "background-color: #{color};" + end + end + + it 'ignores valid color code without backticks(code tags)' do + doc = filter(color) + + expect(doc.css('span.gfm-color_chip').size).to be_zero + end + + it 'ignores valid color code with prepended space' do + content = code_tag(' ' + color) + doc = filter(content) + + expect(doc.css(color_chip_selector).size).to be_zero + end + + it 'ignores valid color code with appended space' do + content = code_tag(color + ' ') + doc = filter(content) + + expect(doc.css(color_chip_selector).size).to be_zero + end + + it 'ignores valid color code surrounded by spaces' do + content = code_tag(' ' + color + ' ') + doc = filter(content) + + expect(doc.css(color_chip_selector).size).to be_zero + end + + it 'ignores invalid color code' do + invalid_color = '#BAR' + content = code_tag(invalid_color) + doc = filter(content) + + expect(doc.css(color_chip_selector).size).to be_zero + end + + def code_tag(string) + "<code>#{string}</code>" + end +end diff --git a/spec/lib/banzai/filter/commit_range_reference_filter_spec.rb b/spec/lib/banzai/filter/commit_range_reference_filter_spec.rb index 935146c17fc..a41a28a56f1 100644 --- a/spec/lib/banzai/filter/commit_range_reference_filter_spec.rb +++ b/spec/lib/banzai/filter/commit_range_reference_filter_spec.rb @@ -53,7 +53,7 @@ describe Banzai::Filter::CommitRangeReferenceFilter do doc = reference_filter("See (#{reference}.)") exp = Regexp.escape(range.reference_link_text) - expect(doc.to_html).to match(/\(<a.+>#{exp}<\/a>\.\)/) + expect(doc.to_html).to match(%r{\(<a.+>#{exp}</a>\.\)}) end it 'ignores invalid commit IDs' do @@ -222,7 +222,7 @@ describe Banzai::Filter::CommitRangeReferenceFilter do doc = reference_filter("Fixed (#{reference}.)") exp = Regexp.escape(range.reference_link_text(project)) - expect(doc.to_html).to match(/\(<a.+>#{exp}<\/a>\.\)/) + expect(doc.to_html).to match(%r{\(<a.+>#{exp}</a>\.\)}) end it 'ignores invalid commit IDs on the referenced project' do diff --git a/spec/lib/banzai/filter/commit_reference_filter_spec.rb b/spec/lib/banzai/filter/commit_reference_filter_spec.rb index 080a5f57da9..35f8792ff35 100644 --- a/spec/lib/banzai/filter/commit_reference_filter_spec.rb +++ b/spec/lib/banzai/filter/commit_reference_filter_spec.rb @@ -42,7 +42,7 @@ describe Banzai::Filter::CommitReferenceFilter do it 'links with adjacent text' do doc = reference_filter("See (#{reference}.)") - expect(doc.to_html).to match(/\(<a.+>#{commit.short_id}<\/a>\.\)/) + expect(doc.to_html).to match(%r{\(<a.+>#{commit.short_id}</a>\.\)}) end it 'ignores invalid commit IDs' do @@ -199,12 +199,12 @@ describe Banzai::Filter::CommitReferenceFilter do it 'links with adjacent text' do doc = reference_filter("Fixed (#{reference}.)") - expect(doc.to_html).to match(/\(<a.+>#{commit.reference_link_text(project)}<\/a>\.\)/) + expect(doc.to_html).to match(%r{\(<a.+>#{commit.reference_link_text(project)}</a>\.\)}) end it 'ignores invalid commit IDs on the referenced project' do act = "Committed #{invalidate_reference(reference)}" - expect(reference_filter(act).to_html).to match(/<a.+>#{Regexp.escape(invalidate_reference(reference))}<\/a>/) + expect(reference_filter(act).to_html).to match(%r{<a.+>#{Regexp.escape(invalidate_reference(reference))}</a>}) end end end diff --git a/spec/lib/banzai/filter/external_issue_reference_filter_spec.rb b/spec/lib/banzai/filter/external_issue_reference_filter_spec.rb index a0d391d981c..d9018a7e4fe 100644 --- a/spec/lib/banzai/filter/external_issue_reference_filter_spec.rb +++ b/spec/lib/banzai/filter/external_issue_reference_filter_spec.rb @@ -49,7 +49,7 @@ describe Banzai::Filter::ExternalIssueReferenceFilter do it 'links with adjacent text' do doc = filter("Issue (#{reference}.)") - expect(doc.to_html).to match(/\(<a.+>#{reference}<\/a>\.\)/) + expect(doc.to_html).to match(%r{\(<a.+>#{reference}</a>\.\)}) end it 'includes a title attribute' do diff --git a/spec/lib/banzai/filter/image_link_filter_spec.rb b/spec/lib/banzai/filter/image_link_filter_spec.rb index 51920869545..c84b98eb225 100644 --- a/spec/lib/banzai/filter/image_link_filter_spec.rb +++ b/spec/lib/banzai/filter/image_link_filter_spec.rb @@ -14,7 +14,7 @@ describe Banzai::Filter::ImageLinkFilter do it 'does not wrap a duplicate link' do doc = filter(%Q(<a href="/whatever">#{image('/uploads/e90decf88d8f96fe9e1389afc2e4a91f/test.jpg')}</a>)) - expect(doc.to_html).to match /^<a href="\/whatever"><img[^>]*><\/a>$/ + expect(doc.to_html).to match %r{^<a href="/whatever"><img[^>]*></a>$} end it 'works with external images' do @@ -24,6 +24,6 @@ describe Banzai::Filter::ImageLinkFilter do it 'works with inline images' do doc = filter(%Q(<p>test #{image('/uploads/e90decf88d8f96fe9e1389afc2e4a91f/test.jpg')} inline</p>)) - expect(doc.to_html).to match /^<p>test <a[^>]*><img[^>]*><\/a> inline<\/p>$/ + expect(doc.to_html).to match %r{^<p>test <a[^>]*><img[^>]*></a> inline</p>$} end end diff --git a/spec/lib/banzai/filter/issue_reference_filter_spec.rb b/spec/lib/banzai/filter/issue_reference_filter_spec.rb index 3a5f52ea23f..905fbb9434b 100644 --- a/spec/lib/banzai/filter/issue_reference_filter_spec.rb +++ b/spec/lib/banzai/filter/issue_reference_filter_spec.rb @@ -288,7 +288,7 @@ describe Banzai::Filter::IssueReferenceFilter do it 'links with adjacent text' do doc = reference_filter("Fixed (#{reference}.)") - expect(doc.to_html).to match(/\(<a.+>#{Regexp.escape(issue.to_reference(project))} \(comment 123\)<\/a>\.\)/) + expect(doc.to_html).to match(%r{\(<a.+>#{Regexp.escape(issue.to_reference(project))} \(comment 123\)</a>\.\)}) end it 'includes default classes' do @@ -317,7 +317,7 @@ describe Banzai::Filter::IssueReferenceFilter do it 'links with adjacent text' do doc = reference_filter("Fixed (#{reference_link}.)") - expect(doc.to_html).to match(/\(<a.+>Reference<\/a>\.\)/) + expect(doc.to_html).to match(%r{\(<a.+>Reference</a>\.\)}) end it 'includes default classes' do @@ -346,7 +346,7 @@ describe Banzai::Filter::IssueReferenceFilter do it 'links with adjacent text' do doc = reference_filter("Fixed (#{reference_link}.)") - expect(doc.to_html).to match(/\(<a.+>Reference<\/a>\.\)/) + expect(doc.to_html).to match(%r{\(<a.+>Reference</a>\.\)}) end it 'includes default classes' do diff --git a/spec/lib/banzai/filter/merge_request_reference_filter_spec.rb b/spec/lib/banzai/filter/merge_request_reference_filter_spec.rb index 158844e25ae..eeb82822f68 100644 --- a/spec/lib/banzai/filter/merge_request_reference_filter_spec.rb +++ b/spec/lib/banzai/filter/merge_request_reference_filter_spec.rb @@ -42,7 +42,7 @@ describe Banzai::Filter::MergeRequestReferenceFilter do it 'links with adjacent text' do doc = reference_filter("Merge (#{reference}.)") - expect(doc.to_html).to match(/\(<a.+>#{Regexp.escape(reference)}<\/a>\.\)/) + expect(doc.to_html).to match(%r{\(<a.+>#{Regexp.escape(reference)}</a>\.\)}) end it 'ignores invalid merge IDs' do @@ -211,7 +211,7 @@ describe Banzai::Filter::MergeRequestReferenceFilter do it 'links with adjacent text' do doc = reference_filter("Merge (#{reference}.)") - expect(doc.to_html).to match(/\(<a.+>#{Regexp.escape(merge.to_reference(project))} \(diffs, comment 123\)<\/a>\.\)/) + expect(doc.to_html).to match(%r{\(<a.+>#{Regexp.escape(merge.to_reference(project))} \(diffs, comment 123\)</a>\.\)}) end end diff --git a/spec/lib/banzai/filter/snippet_reference_filter_spec.rb b/spec/lib/banzai/filter/snippet_reference_filter_spec.rb index 3a07a6dc179..e068e02d4fc 100644 --- a/spec/lib/banzai/filter/snippet_reference_filter_spec.rb +++ b/spec/lib/banzai/filter/snippet_reference_filter_spec.rb @@ -28,7 +28,7 @@ describe Banzai::Filter::SnippetReferenceFilter do it 'links with adjacent text' do doc = reference_filter("Snippet (#{reference}.)") - expect(doc.to_html).to match(/\(<a.+>#{Regexp.escape(reference)}<\/a>\.\)/) + expect(doc.to_html).to match(%r{\(<a.+>#{Regexp.escape(reference)}</a>\.\)}) end it 'ignores invalid snippet IDs' do @@ -192,13 +192,13 @@ describe Banzai::Filter::SnippetReferenceFilter do it 'links with adjacent text' do doc = reference_filter("See (#{reference}.)") - expect(doc.to_html).to match(/\(<a.+>#{Regexp.escape(snippet.to_reference(project))}<\/a>\.\)/) + expect(doc.to_html).to match(%r{\(<a.+>#{Regexp.escape(snippet.to_reference(project))}</a>\.\)}) end it 'ignores invalid snippet IDs on the referenced project' do act = "See #{invalidate_reference(reference)}" - expect(reference_filter(act).to_html).to match(/<a.+>#{Regexp.escape(invalidate_reference(reference))}<\/a>/) + expect(reference_filter(act).to_html).to match(%r{<a.+>#{Regexp.escape(invalidate_reference(reference))}</a>}) end end diff --git a/spec/lib/banzai/filter/user_reference_filter_spec.rb b/spec/lib/banzai/filter/user_reference_filter_spec.rb index c76adc262fc..2f86a046d28 100644 --- a/spec/lib/banzai/filter/user_reference_filter_spec.rb +++ b/spec/lib/banzai/filter/user_reference_filter_spec.rb @@ -146,7 +146,7 @@ describe Banzai::Filter::UserReferenceFilter do it 'links with adjacent text' do doc = reference_filter("Mention me (#{reference}.)") - expect(doc.to_html).to match(/\(<a.+>#{reference}<\/a>\.\)/) + expect(doc.to_html).to match(%r{\(<a.+>#{reference}</a>\.\)}) end it 'includes default classes' do @@ -172,7 +172,7 @@ describe Banzai::Filter::UserReferenceFilter do it 'links with adjacent text' do doc = reference_filter("Mention me (#{reference}.)") - expect(doc.to_html).to match(/\(<a.+>User<\/a>\.\)/) + expect(doc.to_html).to match(%r{\(<a.+>User</a>\.\)}) end it 'includes a data-user attribute' do diff --git a/spec/lib/file_size_validator_spec.rb b/spec/lib/file_size_validator_spec.rb index c44bc1840df..ebd907ecb7f 100644 --- a/spec/lib/file_size_validator_spec.rb +++ b/spec/lib/file_size_validator_spec.rb @@ -2,8 +2,8 @@ require 'spec_helper' describe FileSizeValidator do let(:validator) { described_class.new(options) } - let(:attachment) { AttachmentUploader.new } let(:note) { create(:note) } + let(:attachment) { AttachmentUploader.new(note) } describe 'options uses an integer' do let(:options) { { maximum: 10, attributes: { attachment: attachment } } } diff --git a/spec/lib/gitlab/background_migration/populate_merge_request_metrics_with_events_data_spec.rb b/spec/lib/gitlab/background_migration/populate_merge_request_metrics_with_events_data_spec.rb index 4cdb679c97f..2b69e718e08 100644 --- a/spec/lib/gitlab/background_migration/populate_merge_request_metrics_with_events_data_spec.rb +++ b/spec/lib/gitlab/background_migration/populate_merge_request_metrics_with_events_data_spec.rb @@ -8,7 +8,7 @@ describe Gitlab::BackgroundMigration::PopulateMergeRequestMetricsWithEventsData, end after do - [MergeRequest, MergeRequestDiff].each(&:reset_column_information) + [Project, MergeRequest, MergeRequestDiff].each(&:reset_column_information) end describe '#perform' do diff --git a/spec/lib/gitlab/background_migration/prepare_untracked_uploads_spec.rb b/spec/lib/gitlab/background_migration/prepare_untracked_uploads_spec.rb index 8bb9ebe0419..370c2490b97 100644 --- a/spec/lib/gitlab/background_migration/prepare_untracked_uploads_spec.rb +++ b/spec/lib/gitlab/background_migration/prepare_untracked_uploads_spec.rb @@ -23,6 +23,27 @@ describe Gitlab::BackgroundMigration::PrepareUntrackedUploads, :sidekiq do end end + # E.g. The installation is in use at the time of migration, and someone has + # just uploaded a file + shared_examples 'does not add files in /uploads/tmp' do + let(:tmp_file) { Rails.root.join(described_class::ABSOLUTE_UPLOAD_DIR, 'tmp', 'some_file.jpg') } + + before do + FileUtils.mkdir(File.dirname(tmp_file)) + FileUtils.touch(tmp_file) + end + + after do + FileUtils.rm(tmp_file) + end + + it 'does not add files from /uploads/tmp' do + described_class.new.perform + + expect(untracked_files_for_uploads.count).to eq(5) + end + end + it 'ensures the untracked_files_for_uploads table exists' do expect do described_class.new.perform @@ -109,24 +130,8 @@ describe Gitlab::BackgroundMigration::PrepareUntrackedUploads, :sidekiq do end end - # E.g. The installation is in use at the time of migration, and someone has - # just uploaded a file context 'when there are files in /uploads/tmp' do - let(:tmp_file) { Rails.root.join(described_class::ABSOLUTE_UPLOAD_DIR, 'tmp', 'some_file.jpg') } - - before do - FileUtils.touch(tmp_file) - end - - after do - FileUtils.rm(tmp_file) - end - - it 'does not add files from /uploads/tmp' do - described_class.new.perform - - expect(untracked_files_for_uploads.count).to eq(5) - end + it_behaves_like 'does not add files in /uploads/tmp' end end end @@ -197,24 +202,8 @@ describe Gitlab::BackgroundMigration::PrepareUntrackedUploads, :sidekiq do end end - # E.g. The installation is in use at the time of migration, and someone has - # just uploaded a file context 'when there are files in /uploads/tmp' do - let(:tmp_file) { Rails.root.join(described_class::ABSOLUTE_UPLOAD_DIR, 'tmp', 'some_file.jpg') } - - before do - FileUtils.touch(tmp_file) - end - - after do - FileUtils.rm(tmp_file) - end - - it 'does not add files from /uploads/tmp' do - described_class.new.perform - - expect(untracked_files_for_uploads.count).to eq(5) - end + it_behaves_like 'does not add files in /uploads/tmp' end end end diff --git a/spec/lib/gitlab/badge/coverage/template_spec.rb b/spec/lib/gitlab/badge/coverage/template_spec.rb index 383bae6e087..d9c21a22590 100644 --- a/spec/lib/gitlab/badge/coverage/template_spec.rb +++ b/spec/lib/gitlab/badge/coverage/template_spec.rb @@ -1,7 +1,7 @@ require 'spec_helper' describe Gitlab::Badge::Coverage::Template do - let(:badge) { double(entity: 'coverage', status: 90) } + let(:badge) { double(entity: 'coverage', status: 90.00) } let(:template) { described_class.new(badge) } describe '#key_text' do @@ -13,7 +13,17 @@ describe Gitlab::Badge::Coverage::Template do describe '#value_text' do context 'when coverage is known' do it 'returns coverage percentage' do - expect(template.value_text).to eq '90%' + expect(template.value_text).to eq '90.00%' + end + end + + context 'when coverage is known to many digits' do + before do + allow(badge).to receive(:status).and_return(92.349) + end + + it 'returns rounded coverage percentage' do + expect(template.value_text).to eq '92.35%' end end @@ -37,7 +47,7 @@ describe Gitlab::Badge::Coverage::Template do describe '#value_width' do context 'when coverage is known' do it 'is narrower when coverage is known' do - expect(template.value_width).to eq 36 + expect(template.value_width).to eq 54 end end @@ -113,7 +123,7 @@ describe Gitlab::Badge::Coverage::Template do describe '#width' do context 'when coverage is known' do it 'returns the key width plus value width' do - expect(template.width).to eq 98 + expect(template.width).to eq 116 end end diff --git a/spec/lib/gitlab/checks/force_push_spec.rb b/spec/lib/gitlab/checks/force_push_spec.rb index 633e319f46d..a65012d2314 100644 --- a/spec/lib/gitlab/checks/force_push_spec.rb +++ b/spec/lib/gitlab/checks/force_push_spec.rb @@ -2,18 +2,20 @@ require 'spec_helper' describe Gitlab::Checks::ForcePush do let(:project) { create(:project, :repository) } + let(:repository) { project.repository.raw } context "exit code checking", :skip_gitaly_mock do it "does not raise a runtime error if the `popen` call to git returns a zero exit code" do - allow_any_instance_of(Gitlab::Git::RevList).to receive(:popen).and_return(['normal output', 0]) + allow(repository).to receive(:popen).and_return(['normal output', 0]) expect { described_class.force_push?(project, 'oldrev', 'newrev') }.not_to raise_error end - it "raises a runtime error if the `popen` call to git returns a non-zero exit code" do - allow_any_instance_of(Gitlab::Git::RevList).to receive(:popen).and_return(['error', 1]) + it "raises a GitError error if the `popen` call to git returns a non-zero exit code" do + allow(repository).to receive(:popen).and_return(['error', 1]) - expect { described_class.force_push?(project, 'oldrev', 'newrev') }.to raise_error(RuntimeError) + expect { described_class.force_push?(project, 'oldrev', 'newrev') } + .to raise_error(Gitlab::Git::Repository::GitError) end end end diff --git a/spec/lib/gitlab/ci/trace_spec.rb b/spec/lib/gitlab/ci/trace_spec.rb index 3546532b9b4..91c9625ba06 100644 --- a/spec/lib/gitlab/ci/trace_spec.rb +++ b/spec/lib/gitlab/ci/trace_spec.rb @@ -238,11 +238,98 @@ describe Gitlab::Ci::Trace do end end + describe '#read' do + shared_examples 'read successfully with IO' do + it 'yields with source' do + trace.read do |stream| + expect(stream).to be_a(Gitlab::Ci::Trace::Stream) + expect(stream.stream).to be_a(IO) + end + end + end + + shared_examples 'read successfully with StringIO' do + it 'yields with source' do + trace.read do |stream| + expect(stream).to be_a(Gitlab::Ci::Trace::Stream) + expect(stream.stream).to be_a(StringIO) + end + end + end + + shared_examples 'failed to read' do + it 'yields without source' do + trace.read do |stream| + expect(stream).to be_a(Gitlab::Ci::Trace::Stream) + expect(stream.stream).to be_nil + end + end + end + + context 'when trace artifact exists' do + before do + create(:ci_job_artifact, :trace, job: build) + end + + it_behaves_like 'read successfully with IO' + end + + context 'when current_path (with project_id) exists' do + before do + expect(trace).to receive(:default_path) { expand_fixture_path('trace/sample_trace') } + end + + it_behaves_like 'read successfully with IO' + end + + context 'when current_path (with project_ci_id) exists' do + before do + expect(trace).to receive(:deprecated_path) { expand_fixture_path('trace/sample_trace') } + end + + it_behaves_like 'read successfully with IO' + end + + context 'when db trace exists' do + before do + build.send(:write_attribute, :trace, "data") + end + + it_behaves_like 'read successfully with StringIO' + end + + context 'when no sources exist' do + it_behaves_like 'failed to read' + end + end + describe 'trace handling' do + subject { trace.exist? } + context 'trace does not exist' do it { expect(trace.exist?).to be(false) } end + context 'when trace artifact exists' do + before do + create(:ci_job_artifact, :trace, job: build) + end + + it { is_expected.to be_truthy } + + context 'when the trace artifact has been erased' do + before do + trace.erase! + end + + it { is_expected.to be_falsy } + + it 'removes associations' do + expect(Ci::JobArtifact.exists?(job_id: build.id, file_type: :trace)).to be_falsy + end + end + end + context 'new trace path is used' do before do trace.send(:ensure_directory) diff --git a/spec/lib/gitlab/closing_issue_extractor_spec.rb b/spec/lib/gitlab/closing_issue_extractor_spec.rb index 8c79ef54c6c..28c679af12a 100644 --- a/spec/lib/gitlab/closing_issue_extractor_spec.rb +++ b/spec/lib/gitlab/closing_issue_extractor_spec.rb @@ -3,7 +3,7 @@ require 'spec_helper' describe Gitlab::ClosingIssueExtractor do let(:project) { create(:project) } let(:project2) { create(:project) } - let(:forked_project) { Projects::ForkService.new(project, project.creator).execute } + let(:forked_project) { Projects::ForkService.new(project, project2.creator).execute } let(:issue) { create(:issue, project: project) } let(:issue2) { create(:issue, project: project2) } let(:reference) { issue.to_reference } @@ -14,6 +14,7 @@ describe Gitlab::ClosingIssueExtractor do before do project.add_developer(project.creator) + project.add_developer(project2.creator) project2.add_master(project.creator) end diff --git a/spec/lib/gitlab/current_settings_spec.rb b/spec/lib/gitlab/current_settings_spec.rb index 492659a82b0..4ddcbd7eb66 100644 --- a/spec/lib/gitlab/current_settings_spec.rb +++ b/spec/lib/gitlab/current_settings_spec.rb @@ -8,22 +8,37 @@ describe Gitlab::CurrentSettings do end describe '#current_application_settings' do + it 'allows keys to be called directly' do + db_settings = create(:application_setting, + home_page_url: 'http://mydomain.com', + signup_enabled: false) + + expect(described_class.home_page_url).to eq(db_settings.home_page_url) + expect(described_class.signup_enabled?).to be_falsey + expect(described_class.signup_enabled).to be_falsey + expect(described_class.metrics_sample_interval).to be(15) + end + context 'with DB available' do before do - allow_any_instance_of(described_class).to receive(:connect_to_db?).and_return(true) + # For some reason, `allow(described_class).to receive(:connect_to_db?).and_return(true)` causes issues + # during the initialization phase of the test suite, so instead let's mock the internals of it + allow(ActiveRecord::Base.connection).to receive(:active?).and_return(true) + allow(ActiveRecord::Base.connection).to receive(:table_exists?).and_call_original + allow(ActiveRecord::Base.connection).to receive(:table_exists?).with('application_settings').and_return(true) end it 'attempts to use cached values first' do expect(ApplicationSetting).to receive(:cached) - expect(current_application_settings).to be_a(ApplicationSetting) + expect(described_class.current_application_settings).to be_a(ApplicationSetting) end it 'falls back to DB if Redis returns an empty value' do expect(ApplicationSetting).to receive(:cached).and_return(nil) expect(ApplicationSetting).to receive(:last).and_call_original.twice - expect(current_application_settings).to be_a(ApplicationSetting) + expect(described_class.current_application_settings).to be_a(ApplicationSetting) end it 'falls back to DB if Redis fails' do @@ -32,14 +47,14 @@ describe Gitlab::CurrentSettings do expect(ApplicationSetting).to receive(:cached).and_raise(::Redis::BaseError) expect(Rails.cache).to receive(:fetch).with(ApplicationSetting::CACHE_KEY).and_raise(Redis::BaseError) - expect(current_application_settings).to eq(db_settings) + expect(described_class.current_application_settings).to eq(db_settings) end it 'creates default ApplicationSettings if none are present' do expect(ApplicationSetting).to receive(:cached).and_raise(::Redis::BaseError) expect(Rails.cache).to receive(:fetch).with(ApplicationSetting::CACHE_KEY).and_raise(Redis::BaseError) - settings = current_application_settings + settings = described_class.current_application_settings expect(settings).to be_a(ApplicationSetting) expect(settings).to be_persisted @@ -52,7 +67,7 @@ describe Gitlab::CurrentSettings do end it 'returns an in-memory ApplicationSetting object' do - settings = current_application_settings + settings = described_class.current_application_settings expect(settings).to be_a(OpenStruct) expect(settings.sign_in_enabled?).to eq(settings.sign_in_enabled) @@ -63,7 +78,7 @@ describe Gitlab::CurrentSettings do db_settings = create(:application_setting, home_page_url: 'http://mydomain.com', signup_enabled: false) - settings = current_application_settings + settings = described_class.current_application_settings app_defaults = ApplicationSetting.last expect(settings).to be_a(OpenStruct) @@ -80,15 +95,16 @@ describe Gitlab::CurrentSettings do context 'with DB unavailable' do before do - allow_any_instance_of(described_class).to receive(:connect_to_db?).and_return(false) - allow_any_instance_of(described_class).to receive(:retrieve_settings_from_database_cache?).and_return(nil) + # For some reason, `allow(described_class).to receive(:connect_to_db?).and_return(false)` causes issues + # during the initialization phase of the test suite, so instead let's mock the internals of it + allow(ActiveRecord::Base.connection).to receive(:active?).and_return(false) end it 'returns an in-memory ApplicationSetting object' do expect(ApplicationSetting).not_to receive(:current) expect(ApplicationSetting).not_to receive(:last) - expect(current_application_settings).to be_a(OpenStruct) + expect(described_class.current_application_settings).to be_a(OpenStruct) end end @@ -101,8 +117,8 @@ describe Gitlab::CurrentSettings do expect(ApplicationSetting).not_to receive(:current) expect(ApplicationSetting).not_to receive(:last) - expect(current_application_settings).to be_a(ApplicationSetting) - expect(current_application_settings).not_to be_persisted + expect(described_class.current_application_settings).to be_a(ApplicationSetting) + expect(described_class.current_application_settings).not_to be_persisted end end end diff --git a/spec/lib/gitlab/gfm/uploads_rewriter_spec.rb b/spec/lib/gitlab/gfm/uploads_rewriter_spec.rb index 39e3b875c49..326ed2f2ecf 100644 --- a/spec/lib/gitlab/gfm/uploads_rewriter_spec.rb +++ b/spec/lib/gitlab/gfm/uploads_rewriter_spec.rb @@ -17,7 +17,7 @@ describe Gitlab::Gfm::UploadsRewriter do end let(:text) do - "Text and #{image_uploader.to_markdown} and #{zip_uploader.to_markdown}" + "Text and #{image_uploader.markdown_link} and #{zip_uploader.markdown_link}" end describe '#rewrite' do diff --git a/spec/lib/gitlab/git/lfs_pointer_file_spec.rb b/spec/lib/gitlab/git/lfs_pointer_file_spec.rb new file mode 100644 index 00000000000..d7f76737f3f --- /dev/null +++ b/spec/lib/gitlab/git/lfs_pointer_file_spec.rb @@ -0,0 +1,37 @@ +require 'spec_helper' + +describe Gitlab::Git::LfsPointerFile do + let(:data) { "1234\n" } + + subject { described_class.new(data) } + + describe '#size' do + it 'counts the bytes' do + expect(subject.size).to eq 5 + end + + it 'handles non ascii data' do + expect(described_class.new("ääää").size).to eq 8 + end + end + + describe '#sha256' do + it 'hashes the content correctly' do + expect(subject.sha256).to eq 'a883dafc480d466ee04e0d6da986bd78eb1fdd2178d04693723da3a8f95d42f4' + end + end + + describe '#pointer' do + it 'starts with the LFS version' do + expect(subject.pointer).to start_with('version https://git-lfs.github.com/spec/v1') + end + + it 'includes sha256' do + expect(subject.pointer).to match(/^oid sha256:[0-9a-fA-F]{64}/) + end + + it 'ends with the size' do + expect(subject.pointer).to end_with("\nsize 5\n") + end + end +end diff --git a/spec/lib/gitlab/git/repository_spec.rb b/spec/lib/gitlab/git/repository_spec.rb index bf01e6ef8e8..ec1c7a96f92 100644 --- a/spec/lib/gitlab/git/repository_spec.rb +++ b/spec/lib/gitlab/git/repository_spec.rb @@ -20,6 +20,7 @@ describe Gitlab::Git::Repository, seed_helper: true do let(:repository) { Gitlab::Git::Repository.new('default', TEST_REPO_PATH, '') } let(:storage_path) { TestEnv.repos_path } + let(:user) { build(:user) } describe '.create_hooks' do let(:repo_path) { File.join(storage_path, 'hook-test.git') } @@ -249,7 +250,7 @@ describe Gitlab::Git::Repository, seed_helper: true do end shared_examples 'archive check' do |extenstion| - it { expect(metadata['ArchivePath']).to match(/tmp\/gitlab-git-test.git\/gitlab-git-test-master-#{SeedRepo::LastCommit::ID}/) } + it { expect(metadata['ArchivePath']).to match(%r{tmp/gitlab-git-test.git/gitlab-git-test-master-#{SeedRepo::LastCommit::ID}}) } it { expect(metadata['ArchivePath']).to end_with extenstion } end @@ -693,7 +694,6 @@ describe Gitlab::Git::Repository, seed_helper: true do describe '#remote_tags' do let(:remote_name) { 'upstream' } let(:target_commit_id) { SeedRepo::Commit::ID } - let(:user) { create(:user) } let(:tag_name) { 'v0.0.1' } let(:tag_message) { 'My tag' } let(:remote_repository) do @@ -905,44 +905,6 @@ describe Gitlab::Git::Repository, seed_helper: true do end end - context "compare results between log_by_walk and log_by_shell" do - let(:options) { { ref: "master" } } - let(:commits_by_walk) { repository.log(options).map(&:id) } - let(:commits_by_shell) { repository.log(options.merge({ disable_walk: true })).map(&:id) } - - it { expect(commits_by_walk).to eq(commits_by_shell) } - - context "with limit" do - let(:options) { { ref: "master", limit: 1 } } - - it { expect(commits_by_walk).to eq(commits_by_shell) } - end - - context "with offset" do - let(:options) { { ref: "master", offset: 1 } } - - it { expect(commits_by_walk).to eq(commits_by_shell) } - end - - context "with skip_merges" do - let(:options) { { ref: "master", skip_merges: true } } - - it { expect(commits_by_walk).to eq(commits_by_shell) } - end - - context "with path" do - let(:options) { { ref: "master", path: "encoding" } } - - it { expect(commits_by_walk).to eq(commits_by_shell) } - - context "with follow" do - let(:options) { { ref: "master", path: "encoding", follow: true } } - - it { expect(commits_by_walk).to eq(commits_by_shell) } - end - end - end - context "where provides 'after' timestamp" do options = { after: Time.iso8601('2014-03-03T20:15:01+00:00') } @@ -1200,14 +1162,27 @@ describe Gitlab::Git::Repository, seed_helper: true do context 'when Gitaly find_branch feature is disabled', :skip_gitaly_mock do it_behaves_like 'finding a branch' - it 'should reload Rugged::Repository and return master' do - expect(Rugged::Repository).to receive(:new).twice.and_call_original + context 'force_reload is true' do + it 'should reload Rugged::Repository' do + expect(Rugged::Repository).to receive(:new).twice.and_call_original - repository.find_branch('master') - branch = repository.find_branch('master', force_reload: true) + repository.find_branch('master') + branch = repository.find_branch('master', force_reload: true) - expect(branch).to be_a_kind_of(Gitlab::Git::Branch) - expect(branch.name).to eq('master') + expect(branch).to be_a_kind_of(Gitlab::Git::Branch) + expect(branch.name).to eq('master') + end + end + + context 'force_reload is false' do + it 'should not reload Rugged::Repository' do + expect(Rugged::Repository).to receive(:new).once.and_call_original + + branch = repository.find_branch('master', force_reload: false) + + expect(branch).to be_a_kind_of(Gitlab::Git::Branch) + expect(branch.name).to eq('master') + end end end end @@ -1749,7 +1724,6 @@ describe Gitlab::Git::Repository, seed_helper: true do shared_examples "user deleting a branch" do let(:project) { create(:project, :repository) } let(:repository) { project.repository.raw } - let(:user) { create(:user) } let(:branch_name) { "to-be-deleted-soon" } before do @@ -1790,12 +1764,49 @@ describe Gitlab::Git::Repository, seed_helper: true do end end + describe '#write_config' do + before do + repository.rugged.config["gitlab.fullpath"] = repository.path + end + + shared_examples 'writing repo config' do + context 'is given a path' do + it 'writes it to disk' do + repository.write_config(full_path: "not-the/real-path.git") + + config = File.read(File.join(repository.path, "config")) + + expect(config).to include("[gitlab]") + expect(config).to include("fullpath = not-the/real-path.git") + end + end + + context 'it is given an empty path' do + it 'does not write it to disk' do + repository.write_config(full_path: "") + + config = File.read(File.join(repository.path, "config")) + + expect(config).to include("[gitlab]") + expect(config).to include("fullpath = #{repository.path}") + end + end + end + + context "when gitaly_write_config is enabled" do + it_behaves_like "writing repo config" + end + + context "when gitaly_write_config is disabled", :disable_gitaly do + it_behaves_like "writing repo config" + end + end + describe '#merge' do let(:repository) do Gitlab::Git::Repository.new('default', TEST_MUTABLE_REPO_PATH, '') end let(:source_sha) { '913c66a37b4a45b9769037c55c2d238bd0942d2e' } - let(:user) { build(:user) } let(:target_branch) { 'test-merge-target-branch' } before do @@ -1848,7 +1859,6 @@ describe Gitlab::Git::Repository, seed_helper: true do end let(:branch_head) { '6d394385cf567f80a8fd85055db1ab4c5295806f' } let(:source_sha) { 'cfe32cf61b73a0d5e9f13e774abde7ff789b1660' } - let(:user) { build(:user) } let(:target_branch) { 'test-ff-target-branch' } before do @@ -2167,6 +2177,47 @@ describe Gitlab::Git::Repository, seed_helper: true do expect { subject }.to raise_error(Gitlab::Git::CommandError, 'error') end end + + describe '#squash' do + let(:squash_id) { '1' } + let(:branch_name) { 'fix' } + let(:start_sha) { '4b4918a572fa86f9771e5ba40fbd48e1eb03e2c6' } + let(:end_sha) { '12d65c8dd2b2676fa3ac47d955accc085a37a9c1' } + + subject do + opts = { + branch: branch_name, + start_sha: start_sha, + end_sha: end_sha, + author: user, + message: 'Squash commit message' + } + + repository.squash(user, squash_id, opts) + end + + context 'sparse checkout', :skip_gitaly_mock do + let(:expected_files) { %w(files files/js files/js/application.js) } + + before do + allow(repository).to receive(:with_worktree).and_wrap_original do |m, *args| + m.call(*args) do + worktree_path = args[0] + files_pattern = File.join(worktree_path, '**', '*') + expected = expected_files.map do |path| + File.expand_path(path, worktree_path) + end + + expect(Dir[files_pattern]).to eq(expected) + end + end + end + + it 'checkouts only the files in the diff' do + subject + end + end + end end def create_remote_branch(repository, remote_name, branch_name, source_branch_name) diff --git a/spec/lib/gitlab/git/rev_list_spec.rb b/spec/lib/gitlab/git/rev_list_spec.rb index 90fbef9d248..4e0ee206219 100644 --- a/spec/lib/gitlab/git/rev_list_spec.rb +++ b/spec/lib/gitlab/git/rev_list_spec.rb @@ -1,51 +1,42 @@ require 'spec_helper' describe Gitlab::Git::RevList do - let(:project) { create(:project, :repository) } - let(:rev_list) { described_class.new(newrev: 'newrev', path_to_repo: project.repository.path_to_repo) } + let(:repository) { create(:project, :repository).repository.raw } + let(:rev_list) { described_class.new(repository, newrev: 'newrev') } let(:env_hash) do { 'GIT_OBJECT_DIRECTORY' => 'foo', 'GIT_ALTERNATE_OBJECT_DIRECTORIES' => 'bar' } end + let(:command_env) { { 'GIT_ALTERNATE_OBJECT_DIRECTORIES' => 'foo:bar' } } before do - allow(Gitlab::Git::Env).to receive(:all).and_return(env_hash.symbolize_keys) + allow(Gitlab::Git::Env).to receive(:all).and_return(env_hash) end def args_for_popen(args_list) - [ - Gitlab.config.git.bin_path, - "--git-dir=#{project.repository.path_to_repo}", - 'rev-list', - *args_list - ] - end - - def stub_popen_rev_list(*additional_args, output:) - args = args_for_popen(additional_args) - - expect(rev_list).to receive(:popen).with(args, nil, env_hash) - .and_return([output, 0]) + [Gitlab.config.git.bin_path, 'rev-list', *args_list] end - def stub_lazy_popen_rev_list(*additional_args, output:) + def stub_popen_rev_list(*additional_args, with_lazy_block: true, output:) params = [ args_for_popen(additional_args), - nil, - env_hash, - hash_including(lazy_block: anything) + repository.path, + command_env, + hash_including(lazy_block: with_lazy_block ? anything : nil) ] - expect(rev_list).to receive(:popen).with(*params) do |*_, lazy_block:| - lazy_block.call(output.lines.lazy.map(&:chomp)) + expect(repository).to receive(:popen).with(*params) do |*_, lazy_block:| + output = lazy_block.call(output.lines.lazy.map(&:chomp)) if with_lazy_block + + [output, 0] end end context "#new_refs" do it 'calls out to `popen`' do - stub_popen_rev_list('newrev', '--not', '--all', output: "sha1\nsha2") + stub_popen_rev_list('newrev', '--not', '--all', with_lazy_block: false, output: "sha1\nsha2") expect(rev_list.new_refs).to eq(%w[sha1 sha2]) end @@ -55,18 +46,18 @@ describe Gitlab::Git::RevList do it 'fetches list of newly pushed objects using rev-list' do stub_popen_rev_list('newrev', '--not', '--all', '--objects', output: "sha1\nsha2") - expect(rev_list.new_objects).to eq(%w[sha1 sha2]) + expect { |b| rev_list.new_objects(&b) }.to yield_with_args(%w[sha1 sha2]) end it 'can skip pathless objects' do stub_popen_rev_list('newrev', '--not', '--all', '--objects', output: "sha1\nsha2 path/to/file") - expect(rev_list.new_objects(require_path: true)).to eq(%w[sha2]) + expect { |b| rev_list.new_objects(require_path: true, &b) }.to yield_with_args(%w[sha2]) end it 'can handle non utf-8 paths' do non_utf_char = [0x89].pack("c*").force_encoding("UTF-8") - stub_lazy_popen_rev_list('newrev', '--not', '--all', '--objects', output: "sha2 πå†h/†ø/ƒîlé#{non_utf_char}\nsha1") + stub_popen_rev_list('newrev', '--not', '--all', '--objects', output: "sha2 πå†h/†ø/ƒîlé#{non_utf_char}\nsha1") rev_list.new_objects(require_path: true) do |object_ids| expect(object_ids.force).to eq(%w[sha2]) @@ -74,7 +65,7 @@ describe Gitlab::Git::RevList do end it 'can yield a lazy enumerator' do - stub_lazy_popen_rev_list('newrev', '--not', '--all', '--objects', output: "sha1\nsha2") + stub_popen_rev_list('newrev', '--not', '--all', '--objects', output: "sha1\nsha2") rev_list.new_objects do |object_ids| expect(object_ids).to be_a Enumerator::Lazy @@ -82,7 +73,7 @@ describe Gitlab::Git::RevList do end it 'returns the result of the block when given' do - stub_lazy_popen_rev_list('newrev', '--not', '--all', '--objects', output: "sha1\nsha2") + stub_popen_rev_list('newrev', '--not', '--all', '--objects', output: "sha1\nsha2") objects = rev_list.new_objects do |object_ids| object_ids.first @@ -94,13 +85,13 @@ describe Gitlab::Git::RevList do it 'can accept list of references to exclude' do stub_popen_rev_list('newrev', '--not', 'master', '--objects', output: "sha1\nsha2") - expect(rev_list.new_objects(not_in: ['master'])).to eq(%w[sha1 sha2]) + expect { |b| rev_list.new_objects(not_in: ['master'], &b) }.to yield_with_args(%w[sha1 sha2]) end it 'handles empty list of references to exclude as listing all known objects' do stub_popen_rev_list('newrev', '--objects', output: "sha1\nsha2") - expect(rev_list.new_objects(not_in: [])).to eq(%w[sha1 sha2]) + expect { |b| rev_list.new_objects(not_in: [], &b) }.to yield_with_args(%w[sha1 sha2]) end end @@ -108,15 +99,15 @@ describe Gitlab::Git::RevList do it 'fetches list of all pushed objects using rev-list' do stub_popen_rev_list('--all', '--objects', output: "sha1\nsha2") - expect(rev_list.all_objects).to eq(%w[sha1 sha2]) + expect { |b| rev_list.all_objects(&b) }.to yield_with_args(%w[sha1 sha2]) end end context "#missed_ref" do - let(:rev_list) { described_class.new(oldrev: 'oldrev', newrev: 'newrev', path_to_repo: project.repository.path_to_repo) } + let(:rev_list) { described_class.new(repository, oldrev: 'oldrev', newrev: 'newrev') } it 'calls out to `popen`' do - stub_popen_rev_list('--max-count=1', 'oldrev', '^newrev', output: "sha1\nsha2") + stub_popen_rev_list('--max-count=1', 'oldrev', '^newrev', with_lazy_block: false, output: "sha1\nsha2") expect(rev_list.missed_ref).to eq(%w[sha1 sha2]) end diff --git a/spec/lib/gitlab/git/wiki_spec.rb b/spec/lib/gitlab/git/wiki_spec.rb new file mode 100644 index 00000000000..bd8dbf07fa7 --- /dev/null +++ b/spec/lib/gitlab/git/wiki_spec.rb @@ -0,0 +1,36 @@ +require 'spec_helper' + +describe Gitlab::Git::Wiki do + let(:project) { create(:project) } + let(:user) { project.owner } + let(:wiki) { ProjectWiki.new(project, user) } + let(:gollum_wiki) { wiki.wiki } + + # Remove skip_gitaly_mock flag when gitaly_find_page when + # https://gitlab.com/gitlab-org/gitaly/merge_requests/539 gets merged + describe '#page', :skip_gitaly_mock do + it 'returns the right page' do + create_page('page1', 'content') + create_page('foo/page1', 'content') + + expect(gollum_wiki.page(title: 'page1', dir: '').url_path).to eq 'page1' + expect(gollum_wiki.page(title: 'page1', dir: 'foo').url_path).to eq 'foo/page1' + + destroy_page('page1') + destroy_page('page1', 'foo') + end + end + + def create_page(name, content) + gollum_wiki.write_page(name, :markdown, content, commit_details) + end + + def commit_details + Gitlab::Git::Wiki::CommitDetails.new(user.name, user.email, "test commit") + end + + def destroy_page(title, dir = '') + page = gollum_wiki.page(title: title, dir: dir) + wiki.delete_page(page, "test commit") + end +end diff --git a/spec/lib/gitlab/gitaly_client/operation_service_spec.rb b/spec/lib/gitlab/gitaly_client/operation_service_spec.rb index d9ec28ab02e..9fbdd73ee0e 100644 --- a/spec/lib/gitlab/gitaly_client/operation_service_spec.rb +++ b/spec/lib/gitlab/gitaly_client/operation_service_spec.rb @@ -123,4 +123,53 @@ describe Gitlab::GitalyClient::OperationService do expect(subject.branch_created).to be(false) end end + + describe '#user_squash' do + let(:branch_name) { 'my-branch' } + let(:squash_id) { '1' } + let(:start_sha) { 'b83d6e391c22777fca1ed3012fce84f633d7fed0' } + let(:end_sha) { '54cec5282aa9f21856362fe321c800c236a61615' } + let(:commit_message) { 'Squash message' } + let(:request) do + Gitaly::UserSquashRequest.new( + repository: repository.gitaly_repository, + user: gitaly_user, + squash_id: squash_id.to_s, + branch: branch_name, + start_sha: start_sha, + end_sha: end_sha, + author: gitaly_user, + commit_message: commit_message + ) + end + let(:squash_sha) { 'f00' } + let(:response) { Gitaly::UserSquashResponse.new(squash_sha: squash_sha) } + + subject do + client.user_squash(user, squash_id, branch_name, start_sha, end_sha, user, commit_message) + end + + it 'sends a user_squash message and returns the squash sha' do + expect_any_instance_of(Gitaly::OperationService::Stub) + .to receive(:user_squash).with(request, kind_of(Hash)) + .and_return(response) + + expect(subject).to eq(squash_sha) + end + + context "when git_error is present" do + let(:response) do + Gitaly::UserSquashResponse.new(git_error: "something failed") + end + + it "throws a PreReceive exception" do + expect_any_instance_of(Gitaly::OperationService::Stub) + .to receive(:user_squash).with(request, kind_of(Hash)) + .and_return(response) + + expect { subject }.to raise_error( + Gitlab::Git::Repository::GitError, "something failed") + end + end + end end diff --git a/spec/lib/gitlab/import_export/project_tree_restorer_spec.rb b/spec/lib/gitlab/import_export/project_tree_restorer_spec.rb index 9dfd879a1bc..d076007e4bc 100644 --- a/spec/lib/gitlab/import_export/project_tree_restorer_spec.rb +++ b/spec/lib/gitlab/import_export/project_tree_restorer_spec.rb @@ -236,12 +236,14 @@ describe Gitlab::ImportExport::ProjectTreeRestorer do labels = project.issues.first.labels expect(labels.where(type: "ProjectLabel").count).to eq(results.fetch(:first_issue_labels, 0)) + expect(labels.where(type: "ProjectLabel").where.not(group_id: nil).count).to eq(0) end end shared_examples 'restores group correctly' do |**results| it 'has group label' do expect(project.group.labels.size).to eq(results.fetch(:labels, 0)) + expect(project.group.labels.where(type: "GroupLabel").where.not(project_id: nil).count).to eq(0) end it 'has group milestone' do diff --git a/spec/lib/gitlab/import_export/uploads_restorer_spec.rb b/spec/lib/gitlab/import_export/uploads_restorer_spec.rb index 63992ea8ab8..a685521cbf0 100644 --- a/spec/lib/gitlab/import_export/uploads_restorer_spec.rb +++ b/spec/lib/gitlab/import_export/uploads_restorer_spec.rb @@ -4,7 +4,6 @@ describe Gitlab::ImportExport::UploadsRestorer do describe 'bundle a project Git repo' do let(:export_path) { "#{Dir.tmpdir}/uploads_saver_spec" } let(:shared) { Gitlab::ImportExport::Shared.new(relative_path: project.full_path) } - let(:uploads_path) { FileUploader.dynamic_path_segment(project) } before do allow_any_instance_of(Gitlab::ImportExport).to receive(:storage_path).and_return(export_path) @@ -26,9 +25,9 @@ describe Gitlab::ImportExport::UploadsRestorer do end it 'copies the uploads to the project path' do - restorer.restore + subject.restore - uploads = Dir.glob(File.join(uploads_path, '**/*')).map { |file| File.basename(file) } + uploads = Dir.glob(File.join(subject.uploads_path, '**/*')).map { |file| File.basename(file) } expect(uploads).to include('dummy.txt') end @@ -44,9 +43,9 @@ describe Gitlab::ImportExport::UploadsRestorer do end it 'copies the uploads to the project path' do - restorer.restore + subject.restore - uploads = Dir.glob(File.join(uploads_path, '**/*')).map { |file| File.basename(file) } + uploads = Dir.glob(File.join(subject.uploads_path, '**/*')).map { |file| File.basename(file) } expect(uploads).to include('dummy.txt') end diff --git a/spec/lib/gitlab/import_export/uploads_saver_spec.rb b/spec/lib/gitlab/import_export/uploads_saver_spec.rb index e8948de1f3a..959779523f4 100644 --- a/spec/lib/gitlab/import_export/uploads_saver_spec.rb +++ b/spec/lib/gitlab/import_export/uploads_saver_spec.rb @@ -30,7 +30,7 @@ describe Gitlab::ImportExport::UploadsSaver do it 'copies the uploads to the export path' do saver.save - uploads = Dir.glob(File.join(shared.export_path, 'uploads', '**/*')).map { |file| File.basename(file) } + uploads = Dir.glob(File.join(saver.uploads_export_path, '**/*')).map { |file| File.basename(file) } expect(uploads).to include('banana_sample.gif') end @@ -52,7 +52,7 @@ describe Gitlab::ImportExport::UploadsSaver do it 'copies the uploads to the export path' do saver.save - uploads = Dir.glob(File.join(shared.export_path, 'uploads', '**/*')).map { |file| File.basename(file) } + uploads = Dir.glob(File.join(saver.uploads_export_path, '**/*')).map { |file| File.basename(file) } expect(uploads).to include('banana_sample.gif') end diff --git a/spec/lib/gitlab/kubernetes/helm/pod_spec.rb b/spec/lib/gitlab/kubernetes/helm/pod_spec.rb index 0b8e97b8948..ebb6033f71e 100644 --- a/spec/lib/gitlab/kubernetes/helm/pod_spec.rb +++ b/spec/lib/gitlab/kubernetes/helm/pod_spec.rb @@ -63,14 +63,14 @@ describe Gitlab::Kubernetes::Helm::Pod do it 'should mount configMap specification in the volume' do spec = subject.generate.spec - expect(spec.volumes.first.configMap['name']).to eq('values-content-configuration') + expect(spec.volumes.first.configMap['name']).to eq("values-content-configuration-#{app.name}") expect(spec.volumes.first.configMap['items'].first['key']).to eq('values') expect(spec.volumes.first.configMap['items'].first['path']).to eq('values.yaml') end end context 'without a configuration file' do - let(:app) { create(:clusters_applications_ingress, cluster: cluster) } + let(:app) { create(:clusters_applications_helm, cluster: cluster) } it_behaves_like 'helm pod' diff --git a/spec/lib/gitlab/ldap/auth_hash_spec.rb b/spec/lib/gitlab/ldap/auth_hash_spec.rb index 1785094af10..9c30ddd7fe2 100644 --- a/spec/lib/gitlab/ldap/auth_hash_spec.rb +++ b/spec/lib/gitlab/ldap/auth_hash_spec.rb @@ -1,6 +1,8 @@ require 'spec_helper' describe Gitlab::LDAP::AuthHash do + include LdapHelpers + let(:auth_hash) do described_class.new( OmniAuth::AuthHash.new( @@ -83,4 +85,26 @@ describe Gitlab::LDAP::AuthHash do end end end + + describe '#username' do + context 'if lowercase_usernames setting is' do + let(:given_uid) { 'uid=John Smith,ou=People,dc=example,dc=com' } + + before do + raw_info[:uid] = ['JOHN'] + end + + it 'enabled the username attribute is lower cased' do + stub_ldap_config(lowercase_usernames: true) + + expect(auth_hash.username).to eq 'john' + end + + it 'disabled the username attribute is not lower cased' do + stub_ldap_config(lowercase_usernames: false) + + expect(auth_hash.username).to eq 'JOHN' + end + end + end end diff --git a/spec/lib/gitlab/ldap/person_spec.rb b/spec/lib/gitlab/ldap/person_spec.rb index ff29d9aa5be..b54d4000b53 100644 --- a/spec/lib/gitlab/ldap/person_spec.rb +++ b/spec/lib/gitlab/ldap/person_spec.rb @@ -139,6 +139,27 @@ describe Gitlab::LDAP::Person do expect(person.username).to eq(attr_value) end end + + context 'if lowercase_usernames setting is' do + let(:username_attribute) { 'uid' } + + before do + entry[username_attribute] = 'JOHN' + @person = described_class.new(entry, 'ldapmain') + end + + it 'enabled the username attribute is lower cased' do + stub_ldap_config(lowercase_usernames: true) + + expect(@person.username).to eq 'john' + end + + it 'disabled the username attribute is not lower cased' do + stub_ldap_config(lowercase_usernames: false) + + expect(@person.username).to eq 'JOHN' + end + end end def assert_generic_test(test_description, got, expected) diff --git a/spec/lib/gitlab/metrics_spec.rb b/spec/lib/gitlab/metrics_spec.rb index 9e405e9f736..03c185ddc07 100644 --- a/spec/lib/gitlab/metrics_spec.rb +++ b/spec/lib/gitlab/metrics_spec.rb @@ -20,7 +20,7 @@ describe Gitlab::Metrics do context 'prometheus metrics enabled in config' do before do - allow(Gitlab::CurrentSettings).to receive(:current_application_settings).and_return(prometheus_metrics_enabled: true) + allow(Gitlab::CurrentSettings).to receive(:prometheus_metrics_enabled).and_return(true) end context 'when metrics folder is present' do diff --git a/spec/lib/gitlab/path_regex_spec.rb b/spec/lib/gitlab/path_regex_spec.rb index 0ae90069b7f..85991c38363 100644 --- a/spec/lib/gitlab/path_regex_spec.rb +++ b/spec/lib/gitlab/path_regex_spec.rb @@ -121,7 +121,7 @@ describe Gitlab::PathRegex do STARTING_WITH_NAMESPACE = %r{^/\*namespace_id/:(project_)?id} NON_PARAM_PARTS = %r{[^:*][a-z\-_/]*} ANY_OTHER_PATH_PART = %r{[a-z\-_/:]*} - WILDCARD_SEGMENT = %r{\*} + WILDCARD_SEGMENT = /\*/ let(:namespaced_wildcard_routes) do routes_without_format.select do |p| p =~ %r{#{STARTING_WITH_NAMESPACE}/#{NON_PARAM_PARTS}/#{ANY_OTHER_PATH_PART}#{WILDCARD_SEGMENT}} diff --git a/spec/lib/gitlab/query_limiting/active_support_subscriber_spec.rb b/spec/lib/gitlab/query_limiting/active_support_subscriber_spec.rb new file mode 100644 index 00000000000..b49bc5c328c --- /dev/null +++ b/spec/lib/gitlab/query_limiting/active_support_subscriber_spec.rb @@ -0,0 +1,19 @@ +require 'spec_helper' + +describe Gitlab::QueryLimiting::ActiveSupportSubscriber do + describe '#sql' do + it 'increments the number of executed SQL queries' do + transaction = double(:transaction) + + allow(Gitlab::QueryLimiting::Transaction) + .to receive(:current) + .and_return(transaction) + + expect(transaction) + .to receive(:increment) + .at_least(:once) + + User.count + end + end +end diff --git a/spec/lib/gitlab/query_limiting/middleware_spec.rb b/spec/lib/gitlab/query_limiting/middleware_spec.rb new file mode 100644 index 00000000000..a04bcdecb4b --- /dev/null +++ b/spec/lib/gitlab/query_limiting/middleware_spec.rb @@ -0,0 +1,72 @@ +require 'spec_helper' + +describe Gitlab::QueryLimiting::Middleware do + describe '#call' do + it 'runs the application with query limiting in place' do + middleware = described_class.new(-> (env) { env }) + + expect_any_instance_of(Gitlab::QueryLimiting::Transaction) + .to receive(:act_upon_results) + + expect(middleware.call({ number: 10 })) + .to eq({ number: 10 }) + end + end + + describe '#action_name' do + let(:middleware) { described_class.new(-> (env) { env }) } + + context 'using a Rails request' do + it 'returns the name of the controller and action' do + env = { + described_class::CONTROLLER_KEY => double( + :controller, + action_name: 'show', + class: double(:class, name: 'UsersController'), + content_type: 'text/html' + ) + } + + expect(middleware.action_name(env)).to eq('UsersController#show') + end + + it 'includes the content type if this is not text/html' do + env = { + described_class::CONTROLLER_KEY => double( + :controller, + action_name: 'show', + class: double(:class, name: 'UsersController'), + content_type: 'application/json' + ) + } + + expect(middleware.action_name(env)) + .to eq('UsersController#show (application/json)') + end + end + + context 'using a Grape API request' do + it 'returns the name of the request method and endpoint path' do + env = { + described_class::ENDPOINT_KEY => double( + :endpoint, + route: double(:route, request_method: 'GET', path: '/foo') + ) + } + + expect(middleware.action_name(env)).to eq('GET /foo') + end + + it 'returns nil if the route can not be retrieved' do + endpoint = double(:endpoint) + env = { described_class::ENDPOINT_KEY => endpoint } + + allow(endpoint) + .to receive(:route) + .and_raise(RuntimeError) + + expect(middleware.action_name(env)).to be_nil + end + end + end +end diff --git a/spec/lib/gitlab/query_limiting/transaction_spec.rb b/spec/lib/gitlab/query_limiting/transaction_spec.rb new file mode 100644 index 00000000000..b4231fcd0fa --- /dev/null +++ b/spec/lib/gitlab/query_limiting/transaction_spec.rb @@ -0,0 +1,144 @@ +require 'spec_helper' + +describe Gitlab::QueryLimiting::Transaction do + after do + Thread.current[described_class::THREAD_KEY] = nil + end + + describe '.current' do + it 'returns nil when there is no transaction' do + expect(described_class.current).to be_nil + end + + it 'returns the transaction when present' do + Thread.current[described_class::THREAD_KEY] = described_class.new + + expect(described_class.current).to be_an_instance_of(described_class) + end + end + + describe '.run' do + it 'runs a transaction and returns it and its return value' do + trans, ret = described_class.run do + 10 + end + + expect(trans).to be_an_instance_of(described_class) + expect(ret).to eq(10) + end + + it 'removes the transaction from the current thread upon completion' do + described_class.run do + 10 + end + + expect(Thread.current[described_class::THREAD_KEY]).to be_nil + end + end + + describe '#act_upon_results' do + context 'when the query threshold is not exceeded' do + it 'does nothing' do + trans = described_class.new + + expect(trans).not_to receive(:raise) + + trans.act_upon_results + end + end + + context 'when the query threshold is exceeded' do + let(:transaction) do + trans = described_class.new + trans.count = described_class::THRESHOLD + 1 + + trans + end + + it 'raises an error when this is enabled' do + expect { transaction.act_upon_results } + .to raise_error(described_class::ThresholdExceededError) + end + + it 'reports the error in Sentry if raising an error is disabled' do + expect(transaction) + .to receive(:raise_error?) + .and_return(false) + + expect(Raven) + .to receive(:capture_exception) + .with(an_instance_of(described_class::ThresholdExceededError)) + + transaction.act_upon_results + end + end + end + + describe '#increment' do + it 'increments the number of executed queries' do + transaction = described_class.new + + expect(transaction.count).to be_zero + + transaction.increment + + expect(transaction.count).to eq(1) + end + end + + describe '#raise_error?' do + it 'returns true in a test environment' do + transaction = described_class.new + + expect(transaction.raise_error?).to eq(true) + end + + it 'returns false in a production environment' do + transaction = described_class.new + + expect(Rails.env) + .to receive(:test?) + .and_return(false) + + expect(transaction.raise_error?).to eq(false) + end + end + + describe '#threshold_exceeded?' do + it 'returns false when the threshold is not exceeded' do + transaction = described_class.new + + expect(transaction.threshold_exceeded?).to eq(false) + end + + it 'returns true when the threshold is exceeded' do + transaction = described_class.new + transaction.count = described_class::THRESHOLD + 1 + + expect(transaction.threshold_exceeded?).to eq(true) + end + end + + describe '#error_message' do + it 'returns the error message to display when the threshold is exceeded' do + transaction = described_class.new + transaction.count = max = described_class::THRESHOLD + + expect(transaction.error_message).to eq( + "Too many SQL queries were executed: a maximum of #{max} " \ + "is allowed but #{max} SQL queries were executed" + ) + end + + it 'includes the action name in the error message when present' do + transaction = described_class.new + transaction.count = max = described_class::THRESHOLD + transaction.action = 'UsersController#show' + + expect(transaction.error_message).to eq( + "Too many SQL queries were executed in UsersController#show: " \ + "a maximum of #{max} is allowed but #{max} SQL queries were executed" + ) + end + end +end diff --git a/spec/lib/gitlab/query_limiting_spec.rb b/spec/lib/gitlab/query_limiting_spec.rb new file mode 100644 index 00000000000..2eddab0b8c3 --- /dev/null +++ b/spec/lib/gitlab/query_limiting_spec.rb @@ -0,0 +1,65 @@ +require 'spec_helper' + +describe Gitlab::QueryLimiting do + describe '.enable?' do + it 'returns true in a test environment' do + expect(described_class.enable?).to eq(true) + end + + it 'returns true in a development environment' do + allow(Rails.env).to receive(:development?).and_return(true) + + expect(described_class.enable?).to eq(true) + end + + it 'returns true on GitLab.com' do + allow(Gitlab).to receive(:com?).and_return(true) + + expect(described_class.enable?).to eq(true) + end + + it 'returns true in a non GitLab.com' do + expect(Gitlab).to receive(:com?).and_return(false) + expect(Rails.env).to receive(:development?).and_return(false) + expect(Rails.env).to receive(:test?).and_return(false) + + expect(described_class.enable?).to eq(false) + end + end + + describe '.whitelist' do + it 'raises ArgumentError when an invalid issue URL is given' do + expect { described_class.whitelist('foo') } + .to raise_error(ArgumentError) + end + + context 'without a transaction' do + it 'does nothing' do + expect { described_class.whitelist('https://example.com') } + .not_to raise_error + end + end + + context 'with a transaction' do + let(:transaction) { Gitlab::QueryLimiting::Transaction.new } + + before do + allow(Gitlab::QueryLimiting::Transaction) + .to receive(:current) + .and_return(transaction) + end + + it 'does not increment the number of SQL queries executed in the block' do + before = transaction.count + + described_class.whitelist('https://example.com') + + 2.times do + User.count + end + + expect(transaction.count).to eq(before) + end + end + end +end diff --git a/spec/lib/gitlab/slash_commands/issue_search_spec.rb b/spec/lib/gitlab/slash_commands/issue_search_spec.rb index e41e5254dde..35d01efc1bd 100644 --- a/spec/lib/gitlab/slash_commands/issue_search_spec.rb +++ b/spec/lib/gitlab/slash_commands/issue_search_spec.rb @@ -5,7 +5,7 @@ describe Gitlab::SlashCommands::IssueSearch do let!(:issue) { create(:issue, project: project, title: 'find me') } let!(:confidential) { create(:issue, :confidential, project: project, title: 'mepmep find') } let(:project) { create(:project) } - let(:user) { issue.author } + let(:user) { create(:user) } let(:regex_match) { described_class.match("issue search find") } subject do diff --git a/spec/lib/gitlab/ssh_public_key_spec.rb b/spec/lib/gitlab/ssh_public_key_spec.rb index 93d538141ce..c15e29774b6 100644 --- a/spec/lib/gitlab/ssh_public_key_spec.rb +++ b/spec/lib/gitlab/ssh_public_key_spec.rb @@ -37,6 +37,41 @@ describe Gitlab::SSHPublicKey, lib: true do end end + describe '.sanitize(key_content)' do + let(:content) { build(:key).key } + + context 'when key has blank space characters' do + it 'removes the extra blank space characters' do + unsanitized = content.insert(100, "\n") + .insert(40, "\r\n") + .insert(30, ' ') + + sanitized = described_class.sanitize(unsanitized) + _, body = sanitized.split + + expect(sanitized).not_to eq(unsanitized) + expect(body).not_to match(/\s/) + end + end + + context "when key doesn't have blank space characters" do + it "doesn't modify the content" do + sanitized = described_class.sanitize(content) + + expect(sanitized).to eq(content) + end + end + + context "when key is invalid" do + it 'returns the original content' do + unsanitized = "ssh-foo any content==" + sanitized = described_class.sanitize(unsanitized) + + expect(sanitized).to eq(unsanitized) + end + end + end + describe '#valid?' do subject { public_key } diff --git a/spec/lib/gitlab/usage_data_spec.rb b/spec/lib/gitlab/usage_data_spec.rb index b5f2a15ada3..0e9ecff25a6 100644 --- a/spec/lib/gitlab/usage_data_spec.rb +++ b/spec/lib/gitlab/usage_data_spec.rb @@ -103,9 +103,9 @@ describe Gitlab::UsageData do subject { described_class.features_usage_data_ce } it 'gathers feature usage data' do - expect(subject[:signup]).to eq(current_application_settings.allow_signup?) + expect(subject[:signup]).to eq(Gitlab::CurrentSettings.allow_signup?) expect(subject[:ldap]).to eq(Gitlab.config.ldap.enabled) - expect(subject[:gravatar]).to eq(current_application_settings.gravatar_enabled?) + expect(subject[:gravatar]).to eq(Gitlab::CurrentSettings.gravatar_enabled?) expect(subject[:omniauth]).to eq(Gitlab.config.omniauth.enabled) expect(subject[:reply_by_email]).to eq(Gitlab::IncomingEmail.enabled?) expect(subject[:container_registry]).to eq(Gitlab.config.registry.enabled) @@ -129,7 +129,7 @@ describe Gitlab::UsageData do subject { described_class.license_usage_data } it "gathers license data" do - expect(subject[:uuid]).to eq(current_application_settings.uuid) + expect(subject[:uuid]).to eq(Gitlab::CurrentSettings.uuid) expect(subject[:version]).to eq(Gitlab::VERSION) expect(subject[:active_user_count]).to eq(User.active.count) expect(subject[:recorded_at]).to be_a(Time) diff --git a/spec/lib/gitlab/visibility_level_spec.rb b/spec/lib/gitlab/visibility_level_spec.rb index d85dac630b4..2c1146ceff5 100644 --- a/spec/lib/gitlab/visibility_level_spec.rb +++ b/spec/lib/gitlab/visibility_level_spec.rb @@ -57,6 +57,15 @@ describe Gitlab::VisibilityLevel do expect(described_class.allowed_levels) .to contain_exactly(described_class::PRIVATE, described_class::PUBLIC) end + + it 'returns all levels when no visibility level was set' do + allow(described_class) + .to receive_message_chain('current_application_settings.restricted_visibility_levels') + .and_return(nil) + + expect(described_class.allowed_levels) + .to contain_exactly(described_class::PRIVATE, described_class::INTERNAL, described_class::PUBLIC) + end end describe '.closest_allowed_level' do diff --git a/spec/lib/gitlab/workhorse_spec.rb b/spec/lib/gitlab/workhorse_spec.rb index 2e7a0265a0b..dc2bb5b9747 100644 --- a/spec/lib/gitlab/workhorse_spec.rb +++ b/spec/lib/gitlab/workhorse_spec.rb @@ -465,4 +465,21 @@ describe Gitlab::Workhorse do end end end + + describe '.send_url' do + let(:url) { 'http://example.com' } + + subject { described_class.send_url(url) } + + it 'sets the header correctly' do + key, command, params = decode_workhorse_header(subject) + + expect(key).to eq("Gitlab-Workhorse-Send-Data") + expect(command).to eq("send-url") + expect(params).to eq({ + 'URL' => url, + 'AllowRedirects' => false + }.deep_stringify_keys) + end + end end diff --git a/spec/mailers/notify_spec.rb b/spec/mailers/notify_spec.rb index 7a8e798e3c9..59eda025108 100644 --- a/spec/mailers/notify_spec.rb +++ b/spec/mailers/notify_spec.rb @@ -1357,7 +1357,7 @@ describe Notify do matcher :have_part_with do |expected| match do |actual| - actual.body.parts.any? { |part| part.content_type.try(:match, %r(#{expected})) } + actual.body.parts.any? { |part| part.content_type.try(:match, /#{expected}/) } end end end diff --git a/spec/migrations/add_foreign_keys_to_todos_spec.rb b/spec/migrations/add_foreign_keys_to_todos_spec.rb new file mode 100644 index 00000000000..4a22bd6f342 --- /dev/null +++ b/spec/migrations/add_foreign_keys_to_todos_spec.rb @@ -0,0 +1,65 @@ +require 'spec_helper' +require Rails.root.join('db', 'migrate', '20180201110056_add_foreign_keys_to_todos.rb') + +describe AddForeignKeysToTodos, :migration do + let(:todos) { table(:todos) } + + let(:project) { create(:project) } + let(:user) { create(:user) } + + context 'add foreign key on user_id' do + let!(:todo_with_user) { create_todo(user_id: user.id) } + let!(:todo_without_user) { create_todo(user_id: 4711) } + + it 'removes orphaned todos without corresponding user' do + expect { migrate! }.to change { Todo.count }.from(2).to(1) + end + + it 'does not remove entries with valid user_id' do + expect { migrate! }.not_to change { todo_with_user.reload } + end + end + + context 'add foreign key on author_id' do + let!(:todo_with_author) { create_todo(author_id: user.id) } + let!(:todo_with_invalid_author) { create_todo(author_id: 4711) } + + it 'removes orphaned todos by author_id' do + expect { migrate! }.to change { Todo.count }.from(2).to(1) + end + + it 'does not touch author_id for valid entries' do + expect { migrate! }.not_to change { todo_with_author.reload } + end + end + + context 'add foreign key on note_id' do + let(:note) { create(:note) } + let!(:todo_with_note) { create_todo(note_id: note.id) } + let!(:todo_with_invalid_note) { create_todo(note_id: 4711) } + let!(:todo_without_note) { create_todo(note_id: nil) } + + it 'deletes todo if note_id is set but does not exist in notes table' do + expect { migrate! }.to change { Todo.count }.from(3).to(2) + end + + it 'does not touch entry if note_id is nil' do + expect { migrate! }.not_to change { todo_without_note.reload } + end + + it 'does not touch note_id for valid entries' do + expect { migrate! }.not_to change { todo_with_note.reload } + end + end + + def create_todo(**opts) + todos.create!( + project_id: project.id, + user_id: user.id, + author_id: user.id, + target_type: '', + action: 0, + state: '', **opts + ) + end +end diff --git a/spec/migrations/remove_project_labels_group_id_spec.rb b/spec/migrations/remove_project_labels_group_id_spec.rb new file mode 100644 index 00000000000..d80d61af20b --- /dev/null +++ b/spec/migrations/remove_project_labels_group_id_spec.rb @@ -0,0 +1,21 @@ +# encoding: utf-8 + +require 'spec_helper' +require Rails.root.join('db', 'post_migrate', '20180202111106_remove_project_labels_group_id.rb') + +describe RemoveProjectLabelsGroupId, :delete do + let(:migration) { described_class.new } + let(:group) { create(:group) } + let!(:project_label) { create(:label, group_id: group.id) } + let!(:group_label) { create(:group_label) } + + describe '#up' do + it 'updates the project labels group ID' do + expect { migration.up }.to change { project_label.reload.group_id }.to(nil) + end + + it 'keeps the group labels group ID' do + expect { migration.up }.not_to change { group_label.reload.group_id } + end + end +end diff --git a/spec/models/ci/build_spec.rb b/spec/models/ci/build_spec.rb index f5b3b4a9fc5..0b3d5c6a0bd 100644 --- a/spec/models/ci/build_spec.rb +++ b/spec/models/ci/build_spec.rb @@ -675,7 +675,7 @@ describe Ci::Build do context 'build is erasable' do context 'new artifacts' do - let!(:build) { create(:ci_build, :trace, :success, :artifacts) } + let!(:build) { create(:ci_build, :trace_artifact, :success, :artifacts) } describe '#erase' do before do @@ -709,7 +709,7 @@ describe Ci::Build do end describe '#erased?' do - let!(:build) { create(:ci_build, :trace, :success, :artifacts) } + let!(:build) { create(:ci_build, :trace_artifact, :success, :artifacts) } subject { build.erased? } context 'job has not been erased' do @@ -744,7 +744,7 @@ describe Ci::Build do context 'old artifacts' do context 'build is erasable' do context 'new artifacts' do - let!(:build) { create(:ci_build, :trace, :success, :legacy_artifacts) } + let!(:build) { create(:ci_build, :trace_artifact, :success, :legacy_artifacts) } describe '#erase' do before do @@ -778,7 +778,7 @@ describe Ci::Build do end describe '#erased?' do - let!(:build) { create(:ci_build, :trace, :success, :legacy_artifacts) } + let!(:build) { create(:ci_build, :trace_artifact, :success, :legacy_artifacts) } subject { build.erased? } context 'job has not been erased' do diff --git a/spec/models/ci/job_artifact_spec.rb b/spec/models/ci/job_artifact_spec.rb index 0e18a326c68..a2bd36537e6 100644 --- a/spec/models/ci/job_artifact_spec.rb +++ b/spec/models/ci/job_artifact_spec.rb @@ -12,6 +12,9 @@ describe Ci::JobArtifact do it { is_expected.to respond_to(:created_at) } it { is_expected.to respond_to(:updated_at) } + it { is_expected.to delegate_method(:open).to(:file) } + it { is_expected.to delegate_method(:exists?).to(:file) } + describe '#set_size' do it 'sets the size' do expect(artifact.size).to eq(106365) diff --git a/spec/models/group_spec.rb b/spec/models/group_spec.rb index 5e82a2988ce..5ea4acb6687 100644 --- a/spec/models/group_spec.rb +++ b/spec/models/group_spec.rb @@ -582,4 +582,20 @@ describe Group do end end end + + describe '#has_parent?' do + context 'when the group has a parent' do + it 'should be truthy' do + group = create(:group, :nested) + expect(group.has_parent?).to be_truthy + end + end + + context 'when the group has no parent' do + it 'should be falsy' do + group = create(:group, parent: nil) + expect(group.has_parent?).to be_falsy + end + end + end end diff --git a/spec/models/key_spec.rb b/spec/models/key_spec.rb index 4cd9e3f4f1d..bf5703ac986 100644 --- a/spec/models/key_spec.rb +++ b/spec/models/key_spec.rb @@ -1,13 +1,6 @@ require 'spec_helper' describe Key, :mailer do - include Gitlab::CurrentSettings - - describe 'modules' do - subject { described_class } - it { is_expected.to include_module(Gitlab::CurrentSettings) } - end - describe "Associations" do it { is_expected.to belong_to(:user) } end @@ -79,16 +72,53 @@ describe Key, :mailer do expect(build(:key)).to be_valid end - it 'accepts a key with newline charecters after stripping them' do - key = build(:key) - key.key = key.key.insert(100, "\n") - key.key = key.key.insert(40, "\r\n") - expect(key).to be_valid - end - it 'rejects the unfingerprintable key (not a key)' do expect(build(:key, key: 'ssh-rsa an-invalid-key==')).not_to be_valid end + + where(:factory, :chars, :expected_sections) do + [ + [:key, ["\n", "\r\n"], 3], + [:key, [' ', ' '], 3], + [:key_without_comment, [' ', ' '], 2] + ] + end + + with_them do + let!(:key) { create(factory) } + let!(:original_fingerprint) { key.fingerprint } + + it 'accepts a key with blank space characters after stripping them' do + modified_key = key.key.insert(100, chars.first).insert(40, chars.last) + _, content = modified_key.split + + key.update!(key: modified_key) + + expect(key).to be_valid + expect(key.key.split.size).to eq(expected_sections) + + expect(content).not_to match(/\s/) + expect(original_fingerprint).to eq(key.fingerprint) + end + end + end + + context 'validate size' do + where(:key_content, :result) do + [ + [Spec::Support::Helpers::KeyGeneratorHelper.new(512).generate, false], + [Spec::Support::Helpers::KeyGeneratorHelper.new(8192).generate, false], + [Spec::Support::Helpers::KeyGeneratorHelper.new(1024).generate, true] + ] + end + + with_them do + it 'validates the size of the key' do + key = build(:key, key: key_content) + + expect(key.valid?).to eq(result) + end + end end context 'validate it meets key restrictions' do diff --git a/spec/models/namespace_spec.rb b/spec/models/namespace_spec.rb index c3673a0e2a3..5e126bc4bea 100644 --- a/spec/models/namespace_spec.rb +++ b/spec/models/namespace_spec.rb @@ -204,7 +204,7 @@ describe Namespace do let(:parent) { create(:group, name: 'parent', path: 'parent') } let(:child) { create(:group, name: 'child', path: 'child', parent: parent) } let!(:project) { create(:project_empty_repo, path: 'the-project', namespace: child, skip_disk_validation: true) } - let(:uploads_dir) { File.join(CarrierWave.root, FileUploader.base_dir) } + let(:uploads_dir) { FileUploader.root } let(:pages_dir) { File.join(TestEnv.pages_path) } before do @@ -567,32 +567,62 @@ describe Namespace do end end - describe "#allowed_path_by_redirects" do - let(:namespace1) { create(:namespace, path: 'foo') } + describe '#remove_exports' do + let(:legacy_project) { create(:project, :with_export, namespace: namespace) } + let(:hashed_project) { create(:project, :with_export, :hashed, namespace: namespace) } + let(:export_path) { Dir.mktmpdir('namespace_remove_exports_spec') } + let(:legacy_export) { legacy_project.export_project_path } + let(:hashed_export) { hashed_project.export_project_path } - context "when the path has been taken before" do - before do - namespace1.path = 'bar' - namespace1.save! + it 'removes exports for legacy and hashed projects' do + allow(Gitlab::ImportExport).to receive(:storage_path) { export_path } + + expect(File.exist?(legacy_export)).to be_truthy + expect(File.exist?(hashed_export)).to be_truthy + + namespace.remove_exports! + + expect(File.exist?(legacy_export)).to be_falsy + expect(File.exist?(hashed_export)).to be_falsy + end + end + + describe '#full_path_was' do + context 'when the group has no parent' do + it 'should return the path was' do + group = create(:group, parent: nil) + expect(group.full_path_was).to eq(group.path_was) end + end + + context 'when a parent is assigned to a group with no previous parent' do + it 'should return the path was' do + group = create(:group, parent: nil) - it 'should be invalid' do - namespace2 = build(:group, path: 'foo') - expect(namespace2).to be_invalid + parent = create(:group) + group.parent = parent + + expect(group.full_path_was).to eq("#{group.path_was}") end + end + + context 'when a parent is removed from the group' do + it 'should return the parent full path' do + parent = create(:group) + group = create(:group, parent: parent) + group.parent = nil - it 'should return an error on path' do - namespace2 = build(:group, path: 'foo') - namespace2.valid? - expect(namespace2.errors.messages[:path].first).to eq('foo has been taken before. Please use another one') + expect(group.full_path_was).to eq("#{parent.full_path}/#{group.path}") end end - context "when the path has not been taken before" do - it 'should be valid' do - expect(RedirectRoute.count).to eq(0) - namespace = build(:namespace) - expect(namespace).to be_valid + context 'when changing parents' do + it 'should return the previous parent full path' do + parent = create(:group) + group = create(:group, parent: parent) + new_parent = create(:group) + group.parent = new_parent + expect(group.full_path_was).to eq("#{parent.full_path}/#{group.path}") end end end diff --git a/spec/models/note_spec.rb b/spec/models/note_spec.rb index 3d030927036..c853f707e6d 100644 --- a/spec/models/note_spec.rb +++ b/spec/models/note_spec.rb @@ -8,7 +8,7 @@ describe Note do it { is_expected.to belong_to(:noteable).touch(false) } it { is_expected.to belong_to(:author).class_name('User') } - it { is_expected.to have_many(:todos).dependent(:destroy) } + it { is_expected.to have_many(:todos) } end describe 'modules' do @@ -17,8 +17,6 @@ describe Note do it { is_expected.to include_module(Participable) } it { is_expected.to include_module(Mentionable) } it { is_expected.to include_module(Awardable) } - - it { is_expected.to include_module(Gitlab::CurrentSettings) } end describe 'validation' do diff --git a/spec/models/project_services/jira_service_spec.rb b/spec/models/project_services/jira_service_spec.rb index 1eaaadf56c5..748c366efca 100644 --- a/spec/models/project_services/jira_service_spec.rb +++ b/spec/models/project_services/jira_service_spec.rb @@ -205,7 +205,7 @@ describe JiraService do @jira_service.close_issue(merge_request, ExternalIssue.new("JIRA-123", project)) expect(WebMock).to have_requested(:post, @comment_url).with( - body: /#{custom_base_url}\/#{project.full_path}\/commit\/#{merge_request.diff_head_sha}/ + body: %r{#{custom_base_url}/#{project.full_path}/commit/#{merge_request.diff_head_sha}} ).once end @@ -220,7 +220,7 @@ describe JiraService do @jira_service.close_issue(merge_request, ExternalIssue.new("JIRA-123", project)) expect(WebMock).to have_requested(:post, @comment_url).with( - body: /#{Gitlab.config.gitlab.url}\/#{project.full_path}\/commit\/#{merge_request.diff_head_sha}/ + body: %r{#{Gitlab.config.gitlab.url}/#{project.full_path}/commit/#{merge_request.diff_head_sha}} ).once end diff --git a/spec/models/project_services/kubernetes_service_spec.rb b/spec/models/project_services/kubernetes_service_spec.rb index 6980ba335b8..622d8844a72 100644 --- a/spec/models/project_services/kubernetes_service_spec.rb +++ b/spec/models/project_services/kubernetes_service_spec.rb @@ -408,7 +408,7 @@ describe KubernetesService, :use_clean_rails_memory_store_caching do context 'if the services is active' do it 'should return a message' do - expect(kubernetes_service.deprecation_message).to match(/Your cluster information on this page is still editable/) + expect(kubernetes_service.deprecation_message).to match(/Your Kubernetes cluster information on this page is still editable/) end end diff --git a/spec/models/project_spec.rb b/spec/models/project_spec.rb index 31dcb543cbd..da940571bc1 100644 --- a/spec/models/project_spec.rb +++ b/spec/models/project_spec.rb @@ -117,7 +117,6 @@ describe Project do it { is_expected.to include_module(Gitlab::ConfigHelper) } it { is_expected.to include_module(Gitlab::ShellAdapter) } it { is_expected.to include_module(Gitlab::VisibilityLevel) } - it { is_expected.to include_module(Gitlab::CurrentSettings) } it { is_expected.to include_module(Referable) } it { is_expected.to include_module(Sortable) } end @@ -2504,6 +2503,37 @@ describe Project do end end + describe '#remove_exports' do + let(:project) { create(:project, :with_export) } + + it 'removes the exports directory for the project' do + expect(File.exist?(project.export_path)).to be_truthy + + allow(FileUtils).to receive(:rm_rf).and_call_original + expect(FileUtils).to receive(:rm_rf).with(project.export_path).and_call_original + project.remove_exports + + expect(File.exist?(project.export_path)).to be_falsy + end + + it 'is a no-op when there is no namespace' do + export_path = project.export_path + project.update_column(:namespace_id, nil) + + expect(FileUtils).not_to receive(:rm_rf).with(export_path) + + project.remove_exports + + expect(File.exist?(export_path)).to be_truthy + end + + it 'is run when the project is destroyed' do + expect(project).to receive(:remove_exports).and_call_original + + project.destroy + end + end + describe '#forks_count' do it 'returns the number of forks' do project = build(:project) diff --git a/spec/models/project_wiki_spec.rb b/spec/models/project_wiki_spec.rb index 929086305ba..1e7671476f1 100644 --- a/spec/models/project_wiki_spec.rb +++ b/spec/models/project_wiki_spec.rb @@ -127,7 +127,7 @@ describe ProjectWiki do end after do - destroy_page(subject.pages.first.page) + subject.pages.each { |page| destroy_page(page.page) } end it "returns the latest version of the page if it exists" do @@ -148,6 +148,17 @@ describe ProjectWiki do page = subject.find_page("index page") expect(page).to be_a WikiPage end + + context 'pages with multibyte-character title' do + before do + create_page("autre pagé", "C'est un génial Gollum Wiki") + end + + it "can find a page by slug" do + page = subject.find_page("autre pagé") + expect(page.title).to eq("autre pagé") + end + end end context 'when Gitaly wiki_find_page is enabled' do diff --git a/spec/models/repository_spec.rb b/spec/models/repository_spec.rb index 1102b1c9006..02a5ee54262 100644 --- a/spec/models/repository_spec.rb +++ b/spec/models/repository_spec.rb @@ -36,26 +36,49 @@ describe Repository do end describe '#branch_names_contains' do - subject { repository.branch_names_contains(sample_commit.id) } + shared_examples '#branch_names_contains' do + set(:project) { create(:project, :repository) } + let(:repository) { project.repository } - it { is_expected.to include('master') } - it { is_expected.not_to include('feature') } - it { is_expected.not_to include('fix') } + subject { repository.branch_names_contains(sample_commit.id) } - describe 'when storage is broken', :broken_storage do - it 'should raise a storage error' do - expect_to_raise_storage_error do - broken_repository.branch_names_contains(sample_commit.id) + it { is_expected.to include('master') } + it { is_expected.not_to include('feature') } + it { is_expected.not_to include('fix') } + + describe 'when storage is broken', :broken_storage do + it 'should raise a storage error' do + expect_to_raise_storage_error do + broken_repository.branch_names_contains(sample_commit.id) + end end end end + + context 'when gitaly is enabled' do + it_behaves_like '#branch_names_contains' + end + + context 'when gitaly is disabled', :skip_gitaly_mock do + it_behaves_like '#branch_names_contains' + end end describe '#tag_names_contains' do - subject { repository.tag_names_contains(sample_commit.id) } + shared_examples '#tag_names_contains' do + subject { repository.tag_names_contains(sample_commit.id) } - it { is_expected.to include('v1.1.0') } - it { is_expected.not_to include('v1.0.0') } + it { is_expected.to include('v1.1.0') } + it { is_expected.not_to include('v1.0.0') } + end + + context 'when gitaly is enabled' do + it_behaves_like '#tag_names_contains' + end + + context 'when gitaly is enabled', :skip_gitaly_mock do + it_behaves_like '#tag_names_contains' + end end describe 'tags_sorted_by' do @@ -959,19 +982,19 @@ describe Repository do end describe '#find_branch' do - it 'loads a branch with a fresh repo' do - expect(Gitlab::Git::Repository).to receive(:new).twice.and_call_original + context 'fresh_repo is true' do + it 'delegates the call to raw_repository' do + expect(repository.raw_repository).to receive(:find_branch).with('master', true) - 2.times do - expect(repository.find_branch('feature')).not_to be_nil + repository.find_branch('master', fresh_repo: true) end end - it 'loads a branch with a cached repo' do - expect(Gitlab::Git::Repository).to receive(:new).once.and_call_original + context 'fresh_repo is false' do + it 'delegates the call to raw_repository' do + expect(repository.raw_repository).to receive(:find_branch).with('master', false) - 2.times do - expect(repository.find_branch('feature', fresh_repo: false)).not_to be_nil + repository.find_branch('master', fresh_repo: false) end end end diff --git a/spec/models/todo_spec.rb b/spec/models/todo_spec.rb index 3e8f3848eca..bd498269798 100644 --- a/spec/models/todo_spec.rb +++ b/spec/models/todo_spec.rb @@ -20,6 +20,7 @@ describe Todo do it { is_expected.to validate_presence_of(:action) } it { is_expected.to validate_presence_of(:target_type) } it { is_expected.to validate_presence_of(:user) } + it { is_expected.to validate_presence_of(:author) } context 'for commits' do subject { described_class.new(target_type: 'Commit') } diff --git a/spec/models/upload_spec.rb b/spec/models/upload_spec.rb index 345382ea8c7..36b8e5d304f 100644 --- a/spec/models/upload_spec.rb +++ b/spec/models/upload_spec.rb @@ -43,49 +43,16 @@ describe Upload do .to(a_string_matching(/\A\h{64}\z/)) end end - end - - describe '.remove_path' do - it 'removes all records at the given path' do - described_class.create!( - size: File.size(__FILE__), - path: __FILE__, - model: build_stubbed(:user), - uploader: 'AvatarUploader' - ) - - expect { described_class.remove_path(__FILE__) } - .to change { described_class.count }.from(1).to(0) - end - end - describe '.record' do - let(:fake_uploader) do - double( - file: double(size: 12_345), - relative_path: 'foo/bar.jpg', - model: build_stubbed(:user), - class: 'AvatarUploader' - ) - end - - it 'removes existing paths before creation' do - expect(described_class).to receive(:remove_path) - .with(fake_uploader.relative_path) - - described_class.record(fake_uploader) - end + describe 'after_destroy' do + context 'uploader is FileUploader-based' do + subject { create(:upload, :issuable_upload) } - it 'creates a new record and assigns size, path, model, and uploader' do - upload = described_class.record(fake_uploader) + it 'calls delete_file!' do + is_expected.to receive(:delete_file!) - aggregate_failures do - expect(upload).to be_persisted - expect(upload.size).to eq fake_uploader.file.size - expect(upload.path).to eq fake_uploader.relative_path - expect(upload.model_id).to eq fake_uploader.model.id - expect(upload.model_type).to eq fake_uploader.model.class.to_s - expect(upload.uploader).to eq fake_uploader.class + subject.destroy + end end end end @@ -111,27 +78,27 @@ describe Upload do end end - describe '#calculate_checksum' do - it 'calculates the SHA256 sum' do - upload = described_class.new( - path: __FILE__, - size: described_class::CHECKSUM_THRESHOLD - 1.megabyte - ) + describe '#calculate_checksum!' do + let(:upload) do + described_class.new(path: __FILE__, + size: described_class::CHECKSUM_THRESHOLD - 1.megabyte) + end + + it 'sets `checksum` to SHA256 sum of the file' do expected = Digest::SHA256.file(__FILE__).hexdigest - expect { upload.calculate_checksum } + expect { upload.calculate_checksum! } .to change { upload.checksum }.from(nil).to(expected) end - it 'returns nil for a non-existant file' do - upload = described_class.new( - path: __FILE__, - size: described_class::CHECKSUM_THRESHOLD - 1.megabyte - ) - + it 'sets `checksum` to nil for a non-existant file' do expect(upload).to receive(:exist?).and_return(false) - expect(upload.calculate_checksum).to be_nil + checksum = Digest::SHA256.file(__FILE__).hexdigest + upload.checksum = checksum + + expect { upload.calculate_checksum! } + .to change { upload.checksum }.from(checksum).to(nil) end end @@ -148,4 +115,10 @@ describe Upload do expect(upload).not_to exist end end + + describe "#uploader_context" do + subject { create(:upload, :issuable_upload, secret: 'secret', filename: 'file.txt') } + + it { expect(subject.uploader_context).to match(a_hash_including(secret: 'secret', identifier: 'file.txt')) } + end end diff --git a/spec/models/user_callout_spec.rb b/spec/models/user_callout_spec.rb new file mode 100644 index 00000000000..64ba17c81fe --- /dev/null +++ b/spec/models/user_callout_spec.rb @@ -0,0 +1,16 @@ +require 'rails_helper' + +describe UserCallout do + let!(:callout) { create(:user_callout) } + + describe 'relationships' do + it { is_expected.to belong_to(:user) } + end + + describe 'validations' do + it { is_expected.to validate_presence_of(:user) } + + it { is_expected.to validate_presence_of(:feature_name) } + it { is_expected.to validate_uniqueness_of(:feature_name).scoped_to(:user_id).ignoring_case_sensitivity } + end +end diff --git a/spec/models/user_spec.rb b/spec/models/user_spec.rb index 594f23718da..24d4d8f1741 100644 --- a/spec/models/user_spec.rb +++ b/spec/models/user_spec.rb @@ -1,14 +1,12 @@ require 'spec_helper' describe User do - include Gitlab::CurrentSettings include ProjectForksHelper describe 'modules' do subject { described_class } it { is_expected.to include_module(Gitlab::ConfigHelper) } - it { is_expected.to include_module(Gitlab::CurrentSettings) } it { is_expected.to include_module(Referable) } it { is_expected.to include_module(Sortable) } it { is_expected.to include_module(TokenAuthenticatable) } @@ -35,7 +33,7 @@ describe User do it { is_expected.to have_many(:merge_requests).dependent(:destroy) } it { is_expected.to have_many(:identities).dependent(:destroy) } it { is_expected.to have_many(:spam_logs).dependent(:destroy) } - it { is_expected.to have_many(:todos).dependent(:destroy) } + it { is_expected.to have_many(:todos) } it { is_expected.to have_many(:award_emoji).dependent(:destroy) } it { is_expected.to have_many(:triggers).dependent(:destroy) } it { is_expected.to have_many(:builds).dependent(:nullify) } @@ -103,7 +101,7 @@ describe User do user = build(:user, username: 'dashboard') expect(user).not_to be_valid - expect(user.errors.values).to eq [['dashboard is a reserved name']] + expect(user.errors.messages[:username]).to eq ['dashboard is a reserved name'] end it 'allows child names' do @@ -134,6 +132,23 @@ describe User do expect(user.errors.messages[:username].first).to match('cannot be changed if a personal project has container registry tags') end end + + context 'when the username was used by another user before' do + let(:username) { 'foo' } + let!(:other_user) { create(:user, username: username) } + + before do + other_user.username = 'bar' + other_user.save! + end + + it 'is invalid' do + user = build(:user, username: username) + + expect(user).not_to be_valid + expect(user.errors.messages[:"namespace.route.path"].first).to eq('foo has been taken before. Please use another one') + end + end end it 'has a DB-level NOT NULL constraint on projects_limit' do @@ -560,7 +575,7 @@ describe User do stub_config_setting(default_can_create_group: true) expect { user.update_attributes(external: false) }.to change { user.can_create_group }.to(true) - .and change { user.projects_limit }.to(current_application_settings.default_projects_limit) + .and change { user.projects_limit }.to(Gitlab::CurrentSettings.default_projects_limit) end end @@ -826,7 +841,7 @@ describe User do end end - context 'when current_application_settings.user_default_external is true' do + context 'when Gitlab::CurrentSettings.user_default_external is true' do before do stub_application_setting(user_default_external: true) end @@ -1435,28 +1450,34 @@ describe User do describe '#sort' do before do described_class.delete_all - @user = create :user, created_at: Date.today, last_sign_in_at: Date.today, name: 'Alpha' - @user1 = create :user, created_at: Date.today - 1, last_sign_in_at: Date.today - 1, name: 'Omega' - @user2 = create :user, created_at: Date.today - 2, last_sign_in_at: nil, name: 'Beta' + @user = create :user, created_at: Date.today, current_sign_in_at: Date.today, name: 'Alpha' + @user1 = create :user, created_at: Date.today - 1, current_sign_in_at: Date.today - 1, name: 'Omega' + @user2 = create :user, created_at: Date.today - 2, name: 'Beta' end context 'when sort by recent_sign_in' do - it 'sorts users by the recent sign-in time' do - expect(described_class.sort('recent_sign_in').first).to eq(@user) + let(:users) { described_class.sort('recent_sign_in') } + + it 'sorts users by recent sign-in time' do + expect(users.first).to eq(@user) + expect(users.second).to eq(@user1) end it 'pushes users who never signed in to the end' do - expect(described_class.sort('recent_sign_in').third).to eq(@user2) + expect(users.third).to eq(@user2) end end context 'when sort by oldest_sign_in' do + let(:users) { described_class.sort('oldest_sign_in') } + it 'sorts users by the oldest sign-in time' do - expect(described_class.sort('oldest_sign_in').first).to eq(@user1) + expect(users.first).to eq(@user1) + expect(users.second).to eq(@user) end it 'pushes users who never signed in to the end' do - expect(described_class.sort('oldest_sign_in').third).to eq(@user2) + expect(users.third).to eq(@user2) end end @@ -2619,7 +2640,7 @@ describe User do it 'should raise an ActiveRecord::RecordInvalid exception' do user2 = build(:user, username: 'foo') - expect { user2.save! }.to raise_error(ActiveRecord::RecordInvalid, /Path foo has been taken before/) + expect { user2.save! }.to raise_error(ActiveRecord::RecordInvalid, /Namespace route path foo has been taken before/) end end diff --git a/spec/models/wiki_page_spec.rb b/spec/models/wiki_page_spec.rb index 9840afe6c4e..d53ba497ed1 100644 --- a/spec/models/wiki_page_spec.rb +++ b/spec/models/wiki_page_spec.rb @@ -188,14 +188,37 @@ describe WikiPage do end end - describe "#update" do + describe '#create', :skip_gitaly_mock do + context 'with valid attributes' do + it 'raises an error if a page with the same path already exists' do + create_page('New Page', 'content') + create_page('foo/bar', 'content') + expect { create_page('New Page', 'other content') }.to raise_error Gitlab::Git::Wiki::DuplicatePageError + expect { create_page('foo/bar', 'other content') }.to raise_error Gitlab::Git::Wiki::DuplicatePageError + + destroy_page('New Page') + destroy_page('bar', 'foo') + end + + it 'if the title is preceded by a / it is removed' do + create_page('/New Page', 'content') + + expect(wiki.find_page('New Page')).not_to be_nil + + destroy_page('New Page') + end + end + end + + # Remove skip_gitaly_mock flag when gitaly_update_page implements moving pages + describe "#update", :skip_gitaly_mock do before do create_page("Update", "content") @page = wiki.find_page("Update") end after do - destroy_page(@page.title) + destroy_page(@page.title, @page.directory) end context "with valid attributes" do @@ -233,6 +256,95 @@ describe WikiPage do expect { @page.update(content: 'more content', last_commit_sha: 'xxx') }.to raise_error(WikiPage::PageChangedError) end end + + context 'when renaming a page' do + it 'raises an error if the page already exists' do + create_page('Existing Page', 'content') + + expect { @page.update(title: 'Existing Page', content: 'new_content') }.to raise_error(WikiPage::PageRenameError) + expect(@page.title).to eq 'Update' + expect(@page.content).to eq 'new_content' + + destroy_page('Existing Page') + end + + it 'updates the content and rename the file' do + new_title = 'Renamed Page' + new_content = 'updated content' + + expect(@page.update(title: new_title, content: new_content)).to be_truthy + + @page = wiki.find_page(new_title) + + expect(@page).not_to be_nil + expect(@page.content).to eq new_content + end + end + + context 'when moving a page' do + it 'raises an error if the page already exists' do + create_page('foo/Existing Page', 'content') + + expect { @page.update(title: 'foo/Existing Page', content: 'new_content') }.to raise_error(WikiPage::PageRenameError) + expect(@page.title).to eq 'Update' + expect(@page.content).to eq 'new_content' + + destroy_page('Existing Page', 'foo') + end + + it 'updates the content and moves the file' do + new_title = 'foo/Other Page' + new_content = 'new_content' + + expect(@page.update(title: new_title, content: new_content)).to be_truthy + + page = wiki.find_page(new_title) + + expect(page).not_to be_nil + expect(page.content).to eq new_content + end + + context 'in subdir' do + before do + create_page('foo/Existing Page', 'content') + @page = wiki.find_page('foo/Existing Page') + end + + it 'moves the page to the root folder if the title is preceded by /' do + expect(@page.slug).to eq 'foo/Existing-Page' + expect(@page.update(title: '/Existing Page', content: 'new_content')).to be_truthy + expect(@page.slug).to eq 'Existing-Page' + end + + it 'does nothing if it has the same title' do + original_path = @page.slug + + expect(@page.update(title: 'Existing Page', content: 'new_content')).to be_truthy + expect(@page.slug).to eq original_path + end + end + + context 'in root dir' do + it 'does nothing if the title is preceded by /' do + original_path = @page.slug + + expect(@page.update(title: '/Update', content: 'new_content')).to be_truthy + expect(@page.slug).to eq original_path + end + end + end + + context "with invalid attributes" do + it 'aborts update if title blank' do + expect(@page.update(title: '', content: 'new_content')).to be_falsey + expect(@page.content).to eq 'new_content' + + page = wiki.find_page('Update') + expect(page.content).to eq 'content' + + @page.title = 'Update' + end + end end describe "#destroy" do @@ -252,18 +364,34 @@ describe WikiPage do end describe "#versions" do - before do - create_page("Update", "content") - @page = wiki.find_page("Update") + shared_examples 'wiki page versions' do + let(:page) { wiki.find_page("Update") } + + before do + create_page("Update", "content") + end + + after do + destroy_page("Update") + end + + it "returns an array of all commits for the page" do + 3.times { |i| page.update(content: "content #{i}") } + + expect(page.versions.count).to eq(4) + end + + it 'returns instances of WikiPageVersion' do + expect(page.versions).to all( be_a(Gitlab::Git::WikiPageVersion) ) + end end - after do - destroy_page("Update") + context 'when Gitaly is enabled' do + it_behaves_like 'wiki page versions' end - it "returns an array of all commits for the page" do - 3.times { |i| @page.update(content: "content #{i}") } - expect(@page.versions.count).to eq(4) + context 'when Gitaly is disabled', :disable_gitaly do + it_behaves_like 'wiki page versions' end end @@ -421,8 +549,8 @@ describe WikiPage do wiki.wiki.write_page(name, :markdown, content, commit_details) end - def destroy_page(title) - page = wiki.wiki.page(title: title) + def destroy_page(title, dir = '') + page = wiki.wiki.page(title: title, dir: dir) wiki.delete_page(page, "test commit") end diff --git a/spec/policies/ci/pipeline_schedule_policy_spec.rb b/spec/policies/ci/pipeline_schedule_policy_spec.rb index 1b0e9fac355..c0c3eda4911 100644 --- a/spec/policies/ci/pipeline_schedule_policy_spec.rb +++ b/spec/policies/ci/pipeline_schedule_policy_spec.rb @@ -88,5 +88,19 @@ describe Ci::PipelineSchedulePolicy, :models do expect(policy).to be_allowed :admin_pipeline_schedule end end + + describe 'rules for non-owner of schedule' do + let(:owner) { create(:user) } + + before do + project.add_master(owner) + project.add_master(user) + pipeline_schedule.update(owner: owner) + end + + it 'includes abilities to take ownership' do + expect(policy).to be_allowed :take_ownership_pipeline_schedule + end + end end end diff --git a/spec/policies/project_policy_spec.rb b/spec/policies/project_policy_spec.rb index f2593a1a75c..129344f105f 100644 --- a/spec/policies/project_policy_spec.rb +++ b/spec/policies/project_policy_spec.rb @@ -92,7 +92,7 @@ describe ProjectPolicy do it 'does not include the read_issue permission when the issue author is not a member of the private project' do project = create(:project, :private) - issue = create(:issue, project: project) + issue = create(:issue, project: project, author: create(:user)) user = issue.author expect(project.team.member?(issue.author)).to be false diff --git a/spec/requests/api/groups_spec.rb b/spec/requests/api/groups_spec.rb index 3c0b4728dc2..bb0034e3237 100644 --- a/spec/requests/api/groups_spec.rb +++ b/spec/requests/api/groups_spec.rb @@ -30,6 +30,21 @@ describe API::Groups do expect(json_response) .to satisfy_one { |group| group['name'] == group1.name } end + + it 'avoids N+1 queries' do + # Establish baseline + get api("/groups", admin) + + control = ActiveRecord::QueryRecorder.new do + get api("/groups", admin) + end + + create(:group) + + expect do + get api("/groups", admin) + end.not_to exceed_query_limit(control) + end end context "when authenticated as user" do diff --git a/spec/requests/api/jobs_spec.rb b/spec/requests/api/jobs_spec.rb index f8d0b63afec..6192bbd4abb 100644 --- a/spec/requests/api/jobs_spec.rb +++ b/spec/requests/api/jobs_spec.rb @@ -446,16 +446,27 @@ describe API::Jobs do end describe 'GET /projects/:id/jobs/:job_id/trace' do - let(:job) { create(:ci_build, :trace, pipeline: pipeline) } - before do get api("/projects/#{project.id}/jobs/#{job.id}/trace", api_user) end context 'authorized user' do - it 'returns specific job trace' do - expect(response).to have_gitlab_http_status(200) - expect(response.body).to eq(job.trace.raw) + context 'when trace is artifact' do + let(:job) { create(:ci_build, :trace_artifact, pipeline: pipeline) } + + it 'returns specific job trace' do + expect(response).to have_gitlab_http_status(200) + expect(response.body).to eq(job.trace.raw) + end + end + + context 'when trace is file' do + let(:job) { create(:ci_build, :trace_live, pipeline: pipeline) } + + it 'returns specific job trace' do + expect(response).to have_gitlab_http_status(200) + expect(response.body).to eq(job.trace.raw) + end end end @@ -543,11 +554,11 @@ describe API::Jobs do end context 'job is erasable' do - let(:job) { create(:ci_build, :trace, :artifacts, :success, project: project, pipeline: pipeline) } + let(:job) { create(:ci_build, :trace_artifact, :artifacts, :success, project: project, pipeline: pipeline) } it 'erases job content' do expect(response).to have_gitlab_http_status(201) - expect(job).not_to have_trace + expect(job.trace.exist?).to be_falsy expect(job.artifacts_file.exists?).to be_falsy expect(job.artifacts_metadata.exists?).to be_falsy end @@ -561,7 +572,7 @@ describe API::Jobs do end context 'job is not erasable' do - let(:job) { create(:ci_build, :trace, project: project, pipeline: pipeline) } + let(:job) { create(:ci_build, :trace_live, project: project, pipeline: pipeline) } it 'responds with forbidden' do expect(response).to have_gitlab_http_status(403) @@ -570,7 +581,7 @@ describe API::Jobs do context 'when a developer erases a build' do let(:role) { :developer } - let(:job) { create(:ci_build, :trace, :artifacts, :success, project: project, pipeline: pipeline, user: owner) } + let(:job) { create(:ci_build, :trace_artifact, :artifacts, :success, project: project, pipeline: pipeline, user: owner) } context 'when the build was created by the developer' do let(:owner) { user } @@ -593,7 +604,7 @@ describe API::Jobs do context 'artifacts did not expire' do let(:job) do - create(:ci_build, :trace, :artifacts, :success, + create(:ci_build, :trace_artifact, :artifacts, :success, project: project, pipeline: pipeline, artifacts_expire_at: Time.now + 7.days) end diff --git a/spec/requests/api/projects_spec.rb b/spec/requests/api/projects_spec.rb index 97e7ffcd38e..f11cd638d96 100644 --- a/spec/requests/api/projects_spec.rb +++ b/spec/requests/api/projects_spec.rb @@ -2,8 +2,6 @@ require 'spec_helper' describe API::Projects do - include Gitlab::CurrentSettings - let(:user) { create(:user) } let(:user2) { create(:user) } let(:user3) { create(:user) } diff --git a/spec/requests/api/runner_spec.rb b/spec/requests/api/runner_spec.rb index cb66d23b77c..0bd88748479 100644 --- a/spec/requests/api/runner_spec.rb +++ b/spec/requests/api/runner_spec.rb @@ -638,7 +638,7 @@ describe API::Runner do end describe 'PUT /api/v4/jobs/:id' do - let(:job) { create(:ci_build, :pending, :trace, pipeline: pipeline, runner_id: runner.id) } + let(:job) { create(:ci_build, :pending, :trace_live, pipeline: pipeline, runner_id: runner.id) } before do job.run! @@ -680,11 +680,17 @@ describe API::Runner do end context 'when tace is given' do - it 'updates a running build' do - update_job(trace: 'BUILD TRACE UPDATED') + it 'creates a trace artifact' do + allow_any_instance_of(BuildFinishedWorker).to receive(:perform).with(job.id) do + CreateTraceArtifactWorker.new.perform(job.id) + end + + update_job(state: 'success', trace: 'BUILD TRACE UPDATED') + job.reload expect(response).to have_gitlab_http_status(200) - expect(job.reload.trace.raw).to eq 'BUILD TRACE UPDATED' + expect(job.trace.raw).to eq 'BUILD TRACE UPDATED' + expect(job.job_artifacts_trace.open.read).to eq 'BUILD TRACE UPDATED' end end @@ -713,7 +719,7 @@ describe API::Runner do end describe 'PATCH /api/v4/jobs/:id/trace' do - let(:job) { create(:ci_build, :running, :trace, runner_id: runner.id, pipeline: pipeline) } + let(:job) { create(:ci_build, :running, :trace_live, runner_id: runner.id, pipeline: pipeline) } let(:headers) { { API::Helpers::Runner::JOB_TOKEN_HEADER => job.token, 'Content-Type' => 'text/plain' } } let(:headers_with_range) { headers.merge({ 'Content-Range' => '11-20' }) } let(:update_interval) { 10.seconds.to_i } @@ -774,7 +780,7 @@ describe API::Runner do context 'when project for the build has been deleted' do let(:job) do - create(:ci_build, :running, :trace, runner_id: runner.id, pipeline: pipeline) do |job| + create(:ci_build, :running, :trace_live, runner_id: runner.id, pipeline: pipeline) do |job| job.project.update(pending_delete: true) end end @@ -945,7 +951,7 @@ describe API::Runner do context 'when artifacts are being stored inside of tmp path' do before do # by configuring this path we allow to pass temp file from any path - allow(JobArtifactUploader).to receive(:artifacts_upload_path).and_return('/') + allow(JobArtifactUploader).to receive(:workhorse_upload_path).and_return('/') end context 'when job has been erased' do @@ -1122,7 +1128,7 @@ describe API::Runner do # by configuring this path we allow to pass file from @tmpdir only # but all temporary files are stored in system tmp directory @tmpdir = Dir.mktmpdir - allow(JobArtifactUploader).to receive(:artifacts_upload_path).and_return(@tmpdir) + allow(JobArtifactUploader).to receive(:workhorse_upload_path).and_return(@tmpdir) end after do diff --git a/spec/requests/api/users_spec.rb b/spec/requests/api/users_spec.rb index 2428e63e149..f406d2ffb22 100644 --- a/spec/requests/api/users_spec.rb +++ b/spec/requests/api/users_spec.rb @@ -199,6 +199,24 @@ describe API::Users do expect(json_response.size).to eq(1) expect(json_response.first['username']).to eq(user.username) end + + it 'returns the correct order when sorted by id' do + admin + user + + get api('/users', admin), { order_by: 'id', sort: 'asc' } + + expect(response).to match_response_schema('public_api/v4/user/admins') + expect(json_response.size).to eq(2) + expect(json_response.first['id']).to eq(admin.id) + expect(json_response.last['id']).to eq(user.id) + end + + it 'returns 400 when provided incorrect sort params' do + get api('/users', admin), { order_by: 'magic', sort: 'asc' } + + expect(response).to have_gitlab_http_status(400) + end end end diff --git a/spec/requests/api/v3/builds_spec.rb b/spec/requests/api/v3/builds_spec.rb index 3f92288fef0..79041c6a792 100644 --- a/spec/requests/api/v3/builds_spec.rb +++ b/spec/requests/api/v3/builds_spec.rb @@ -352,7 +352,7 @@ describe API::V3::Builds do end describe 'GET /projects/:id/builds/:build_id/trace' do - let(:build) { create(:ci_build, :trace, pipeline: pipeline) } + let(:build) { create(:ci_build, :trace_live, pipeline: pipeline) } before do get v3_api("/projects/#{project.id}/builds/#{build.id}/trace", api_user) @@ -447,7 +447,7 @@ describe API::V3::Builds do end context 'job is erasable' do - let(:build) { create(:ci_build, :trace, :artifacts, :success, project: project, pipeline: pipeline) } + let(:build) { create(:ci_build, :trace_artifact, :artifacts, :success, project: project, pipeline: pipeline) } it 'erases job content' do expect(response.status).to eq 201 @@ -463,7 +463,7 @@ describe API::V3::Builds do end context 'job is not erasable' do - let(:build) { create(:ci_build, :trace, project: project, pipeline: pipeline) } + let(:build) { create(:ci_build, :trace_live, project: project, pipeline: pipeline) } it 'responds with forbidden' do expect(response.status).to eq 403 @@ -478,7 +478,7 @@ describe API::V3::Builds do context 'artifacts did not expire' do let(:build) do - create(:ci_build, :trace, :artifacts, :success, + create(:ci_build, :trace_artifact, :artifacts, :success, project: project, pipeline: pipeline, artifacts_expire_at: Time.now + 7.days) end diff --git a/spec/requests/api/v3/projects_spec.rb b/spec/requests/api/v3/projects_spec.rb index 13e465e0b2d..5d99d9495f3 100644 --- a/spec/requests/api/v3/projects_spec.rb +++ b/spec/requests/api/v3/projects_spec.rb @@ -1,8 +1,6 @@ require 'spec_helper' describe API::V3::Projects do - include Gitlab::CurrentSettings - let(:user) { create(:user) } let(:user2) { create(:user) } let(:user3) { create(:user) } diff --git a/spec/requests/lfs_http_spec.rb b/spec/requests/lfs_http_spec.rb index bee918a20aa..930ef49b7f3 100644 --- a/spec/requests/lfs_http_spec.rb +++ b/spec/requests/lfs_http_spec.rb @@ -958,7 +958,7 @@ describe 'Git LFS API and storage' do end it 'responds with status 200, location of lfs store and object details' do - expect(json_response['StoreLFSPath']).to eq("#{Gitlab.config.shared.path}/lfs-objects/tmp/upload") + expect(json_response['StoreLFSPath']).to eq(LfsObjectUploader.workhorse_upload_path) expect(json_response['LfsOid']).to eq(sample_oid) expect(json_response['LfsSize']).to eq(sample_size) end @@ -1075,7 +1075,7 @@ describe 'Git LFS API and storage' do end it 'with location of lfs store and object details' do - expect(json_response['StoreLFSPath']).to eq("#{Gitlab.config.shared.path}/lfs-objects/tmp/upload") + expect(json_response['StoreLFSPath']).to eq(LfsObjectUploader.workhorse_upload_path) expect(json_response['LfsOid']).to eq(sample_oid) expect(json_response['LfsSize']).to eq(sample_size) end diff --git a/spec/services/ci/create_trace_artifact_service_spec.rb b/spec/services/ci/create_trace_artifact_service_spec.rb new file mode 100644 index 00000000000..847a88920fe --- /dev/null +++ b/spec/services/ci/create_trace_artifact_service_spec.rb @@ -0,0 +1,43 @@ +require 'spec_helper' + +describe Ci::CreateTraceArtifactService do + describe '#execute' do + subject { described_class.new(nil, nil).execute(job) } + + let(:job) { create(:ci_build) } + + context 'when the job does not have trace artifact' do + context 'when the job has a trace file' do + before do + allow_any_instance_of(Gitlab::Ci::Trace) + .to receive(:default_path) { expand_fixture_path('trace/sample_trace') } + + allow_any_instance_of(JobArtifactUploader).to receive(:move_to_cache) { false } + allow_any_instance_of(JobArtifactUploader).to receive(:move_to_store) { false } + end + + it 'creates trace artifact' do + expect { subject }.to change { Ci::JobArtifact.count }.by(1) + + expect(job.job_artifacts_trace.read_attribute(:file)).to eq('sample_trace') + end + + context 'when the job has already had trace artifact' do + before do + create(:ci_job_artifact, :trace, job: job) + end + + it 'does not create trace artifact' do + expect { subject }.not_to change { Ci::JobArtifact.count } + end + end + end + + context 'when the job does not have a trace file' do + it 'does not create trace artifact' do + expect { subject }.not_to change { Ci::JobArtifact.count } + end + end + end + end +end diff --git a/spec/services/ci/retry_build_service_spec.rb b/spec/services/ci/retry_build_service_spec.rb index a06397a0782..2c2f48e323d 100644 --- a/spec/services/ci/retry_build_service_spec.rb +++ b/spec/services/ci/retry_build_service_spec.rb @@ -17,7 +17,8 @@ describe Ci::RetryBuildService do %i[id status user token coverage trace runner artifacts_expire_at artifacts_file artifacts_metadata artifacts_size created_at updated_at started_at finished_at queued_at erased_by - erased_at auto_canceled_by job_artifacts job_artifacts_archive job_artifacts_metadata].freeze + erased_at auto_canceled_by job_artifacts job_artifacts_archive + job_artifacts_metadata job_artifacts_trace].freeze IGNORE_ACCESSORS = %i[type lock_version target_url base_tags trace_sections @@ -36,7 +37,7 @@ describe Ci::RetryBuildService do let(:build) do create(:ci_build, :failed, :artifacts, :expired, :erased, :queued, :coverage, :tags, :allowed_to_fail, :on_tag, - :triggered, :trace, :teardown_environment, + :triggered, :trace_artifact, :teardown_environment, description: 'my-job', stage: 'test', pipeline: pipeline, auto_canceled_by: create(:ci_empty_pipeline, project: project)) do |build| ## diff --git a/spec/services/files/create_service_spec.rb b/spec/services/files/create_service_spec.rb new file mode 100644 index 00000000000..030263b1502 --- /dev/null +++ b/spec/services/files/create_service_spec.rb @@ -0,0 +1,78 @@ +require "spec_helper" + +describe Files::CreateService do + let(:project) { create(:project, :repository) } + let(:repository) { project.repository } + let(:user) { create(:user) } + let(:file_content) { 'Test file content' } + let(:branch_name) { project.default_branch } + let(:start_branch) { branch_name } + + let(:commit_params) do + { + file_path: file_path, + commit_message: "Update File", + file_content: file_content, + file_content_encoding: "text", + start_project: project, + start_branch: start_branch, + branch_name: branch_name + } + end + + subject { described_class.new(project, user, commit_params) } + + before do + project.add_master(user) + end + + describe "#execute" do + context 'when file matches LFS filter' do + let(:file_path) { 'test_file.lfs' } + let(:branch_name) { 'lfs' } + + context 'with LFS disabled' do + it 'skips gitattributes check' do + expect(repository).not_to receive(:attributes_at) + + subject.execute + end + + it "doesn't create LFS pointers" do + subject.execute + + blob = repository.blob_at('lfs', file_path) + + expect(blob.data).not_to start_with('version https://git-lfs.github.com/spec/v1') + expect(blob.data).to eq(file_content) + end + end + + context 'with LFS enabled' do + before do + allow(project).to receive(:lfs_enabled?).and_return(true) + end + + it 'creates an LFS pointer' do + subject.execute + + blob = repository.blob_at('lfs', file_path) + + expect(blob.data).to start_with('version https://git-lfs.github.com/spec/v1') + end + + it "creates an LfsObject with the file's content" do + subject.execute + + expect(LfsObject.last.file.read).to eq file_content + end + + it 'links the LfsObject to the project' do + expect do + subject.execute + end.to change { project.lfs_objects.count }.by(1) + end + end + end + end +end diff --git a/spec/services/groups/transfer_service_spec.rb b/spec/services/groups/transfer_service_spec.rb new file mode 100644 index 00000000000..bcc01b087f3 --- /dev/null +++ b/spec/services/groups/transfer_service_spec.rb @@ -0,0 +1,414 @@ +require 'rails_helper' + +describe Groups::TransferService, :postgresql do + let(:user) { create(:user) } + let(:new_parent_group) { create(:group, :public) } + let!(:group_member) { create(:group_member, :owner, group: group, user: user) } + let(:transfer_service) { described_class.new(group, user) } + + shared_examples 'ensuring allowed transfer for a group' do + context 'with other database than PostgreSQL' do + before do + allow(Group).to receive(:supports_nested_groups?).and_return(false) + end + + it 'should return false' do + expect(transfer_service.execute(new_parent_group)).to be_falsy + end + + it 'should add an error on group' do + transfer_service.execute(new_parent_group) + expect(transfer_service.error).to eq('Transfer failed: Database is not supported.') + end + end + + context "when there's an exception on Gitlab shell directories" do + let(:new_parent_group) { create(:group, :public) } + + before do + allow_any_instance_of(described_class).to receive(:update_group_attributes).and_raise(Gitlab::UpdatePathError, 'namespace directory cannot be moved') + create(:group_member, :owner, group: new_parent_group, user: user) + end + + it 'should return false' do + expect(transfer_service.execute(new_parent_group)).to be_falsy + end + + it 'should add an error on group' do + transfer_service.execute(new_parent_group) + expect(transfer_service.error).to eq('Transfer failed: namespace directory cannot be moved') + end + end + end + + describe '#execute' do + context 'when transforming a group into a root group' do + let!(:group) { create(:group, :public, :nested) } + + it_behaves_like 'ensuring allowed transfer for a group' + + context 'when the group is already a root group' do + let(:group) { create(:group, :public) } + + it 'should add an error on group' do + transfer_service.execute(nil) + expect(transfer_service.error).to eq('Transfer failed: Group is already a root group.') + end + end + + context 'when the user does not have the right policies' do + let!(:group_member) { create(:group_member, :guest, group: group, user: user) } + + it "should return false" do + expect(transfer_service.execute(nil)).to be_falsy + end + + it "should add an error on group" do + transfer_service.execute(new_parent_group) + expect(transfer_service.error).to eq("Transfer failed: You don't have enough permissions.") + end + end + + context 'when there is a group with the same path' do + let!(:group) { create(:group, :public, :nested, path: 'not-unique') } + + before do + create(:group, path: 'not-unique') + end + + it 'should return false' do + expect(transfer_service.execute(nil)).to be_falsy + end + + it 'should add an error on group' do + transfer_service.execute(nil) + expect(transfer_service.error).to eq('Transfer failed: The parent group already has a subgroup with the same path.') + end + end + + context 'when the group is a subgroup and the transfer is valid' do + let!(:subgroup1) { create(:group, :private, parent: group) } + let!(:subgroup2) { create(:group, :internal, parent: group) } + let!(:project1) { create(:project, :repository, :private, namespace: group) } + + before do + transfer_service.execute(nil) + group.reload + end + + it 'should update group attributes' do + expect(group.parent).to be_nil + end + + it 'should update group children path' do + group.children.each do |subgroup| + expect(subgroup.full_path).to eq("#{group.path}/#{subgroup.path}") + end + end + + it 'should update group projects path' do + group.projects.each do |project| + expect(project.full_path).to eq("#{group.path}/#{project.path}") + end + end + end + end + + context 'when transferring a subgroup into another group' do + let(:group) { create(:group, :public, :nested) } + + it_behaves_like 'ensuring allowed transfer for a group' + + context 'when the new parent group is the same as the previous parent group' do + let(:group) { create(:group, :public, :nested, parent: new_parent_group) } + + it 'should return false' do + expect(transfer_service.execute(new_parent_group)).to be_falsy + end + + it 'should add an error on group' do + transfer_service.execute(new_parent_group) + expect(transfer_service.error).to eq('Transfer failed: Group is already associated to the parent group.') + end + end + + context 'when the user does not have the right policies' do + let!(:group_member) { create(:group_member, :guest, group: group, user: user) } + + it "should return false" do + expect(transfer_service.execute(new_parent_group)).to be_falsy + end + + it "should add an error on group" do + transfer_service.execute(new_parent_group) + expect(transfer_service.error).to eq("Transfer failed: You don't have enough permissions.") + end + end + + context 'when the parent has a group with the same path' do + before do + create(:group_member, :owner, group: new_parent_group, user: user) + group.update_attribute(:path, "not-unique") + create(:group, path: "not-unique", parent: new_parent_group) + end + + it 'should return false' do + expect(transfer_service.execute(new_parent_group)).to be_falsy + end + + it 'should add an error on group' do + transfer_service.execute(new_parent_group) + expect(transfer_service.error).to eq('Transfer failed: The parent group already has a subgroup with the same path.') + end + end + + context 'when the parent group has a project with the same path' do + let!(:group) { create(:group, :public, :nested, path: 'foo') } + + before do + create(:group_member, :owner, group: new_parent_group, user: user) + create(:project, path: 'foo', namespace: new_parent_group) + group.update_attribute(:path, 'foo') + end + + it 'should return false' do + expect(transfer_service.execute(new_parent_group)).to be_falsy + end + + it 'should add an error on group' do + transfer_service.execute(new_parent_group) + expect(transfer_service.error).to eq('Transfer failed: Validation failed: Route path has already been taken, Route is invalid') + end + end + + context 'when the group is allowed to be transferred' do + before do + create(:group_member, :owner, group: new_parent_group, user: user) + transfer_service.execute(new_parent_group) + end + + context 'when the group has a lower visibility than the parent group' do + let(:new_parent_group) { create(:group, :public) } + let(:group) { create(:group, :private, :nested) } + + it 'should not update the visibility for the group' do + group.reload + expect(group.private?).to be_truthy + expect(group.visibility_level).not_to eq(new_parent_group.visibility_level) + end + end + + context 'when the group has a higher visibility than the parent group' do + let(:new_parent_group) { create(:group, :private) } + let(:group) { create(:group, :public, :nested) } + + it 'should update visibility level based on the parent group' do + group.reload + expect(group.private?).to be_truthy + expect(group.visibility_level).to eq(new_parent_group.visibility_level) + end + end + + it 'should update visibility for the group based on the parent group' do + expect(group.visibility_level).to eq(new_parent_group.visibility_level) + end + + it 'should update parent group to the new parent ' do + expect(group.parent).to eq(new_parent_group) + end + + it 'should return the group as children of the new parent' do + expect(new_parent_group.children.count).to eq(1) + expect(new_parent_group.children.first).to eq(group) + end + + it 'should create a permanent redirect for the group' do + expect(group.redirect_routes.permanent.count).to eq(1) + end + end + + context 'when transferring a group with group descendants' do + let!(:subgroup1) { create(:group, :private, parent: group) } + let!(:subgroup2) { create(:group, :internal, parent: group) } + + before do + create(:group_member, :owner, group: new_parent_group, user: user) + transfer_service.execute(new_parent_group) + end + + it 'should update subgroups path' do + new_parent_path = new_parent_group.path + group.children.each do |subgroup| + expect(subgroup.full_path).to eq("#{new_parent_path}/#{group.path}/#{subgroup.path}") + end + end + + it 'should create permanent redirects for the subgroups' do + expect(group.redirect_routes.permanent.count).to eq(1) + expect(subgroup1.redirect_routes.permanent.count).to eq(1) + expect(subgroup2.redirect_routes.permanent.count).to eq(1) + end + + context 'when the new parent has a higher visibility than the children' do + it 'should not update the children visibility' do + expect(subgroup1.private?).to be_truthy + expect(subgroup2.internal?).to be_truthy + end + end + + context 'when the new parent has a lower visibility than the children' do + let!(:subgroup1) { create(:group, :public, parent: group) } + let!(:subgroup2) { create(:group, :public, parent: group) } + let(:new_parent_group) { create(:group, :private) } + + it 'should update children visibility to match the new parent' do + group.children.each do |subgroup| + expect(subgroup.private?).to be_truthy + end + end + end + end + + context 'when transferring a group with project descendants' do + let!(:project1) { create(:project, :repository, :private, namespace: group) } + let!(:project2) { create(:project, :repository, :internal, namespace: group) } + + before do + TestEnv.clean_test_path + create(:group_member, :owner, group: new_parent_group, user: user) + transfer_service.execute(new_parent_group) + end + + it 'should update projects path' do + new_parent_path = new_parent_group.path + group.projects.each do |project| + expect(project.full_path).to eq("#{new_parent_path}/#{group.path}/#{project.name}") + end + end + + it 'should create permanent redirects for the projects' do + expect(group.redirect_routes.permanent.count).to eq(1) + expect(project1.redirect_routes.permanent.count).to eq(1) + expect(project2.redirect_routes.permanent.count).to eq(1) + end + + context 'when the new parent has a higher visibility than the projects' do + it 'should not update projects visibility' do + expect(project1.private?).to be_truthy + expect(project2.internal?).to be_truthy + end + end + + context 'when the new parent has a lower visibility than the projects' do + let!(:project1) { create(:project, :repository, :public, namespace: group) } + let!(:project2) { create(:project, :repository, :public, namespace: group) } + let(:new_parent_group) { create(:group, :private) } + + it 'should update projects visibility to match the new parent' do + group.projects.each do |project| + expect(project.private?).to be_truthy + end + end + end + end + + context 'when transferring a group with subgroups & projects descendants' do + let!(:project1) { create(:project, :repository, :private, namespace: group) } + let!(:project2) { create(:project, :repository, :internal, namespace: group) } + let!(:subgroup1) { create(:group, :private, parent: group) } + let!(:subgroup2) { create(:group, :internal, parent: group) } + + before do + TestEnv.clean_test_path + create(:group_member, :owner, group: new_parent_group, user: user) + transfer_service.execute(new_parent_group) + end + + it 'should update subgroups path' do + new_parent_path = new_parent_group.path + group.children.each do |subgroup| + expect(subgroup.full_path).to eq("#{new_parent_path}/#{group.path}/#{subgroup.path}") + end + end + + it 'should update projects path' do + new_parent_path = new_parent_group.path + group.projects.each do |project| + expect(project.full_path).to eq("#{new_parent_path}/#{group.path}/#{project.name}") + end + end + + it 'should create permanent redirect for the subgroups and projects' do + expect(group.redirect_routes.permanent.count).to eq(1) + expect(subgroup1.redirect_routes.permanent.count).to eq(1) + expect(subgroup2.redirect_routes.permanent.count).to eq(1) + expect(project1.redirect_routes.permanent.count).to eq(1) + expect(project2.redirect_routes.permanent.count).to eq(1) + end + end + + context 'when transfering a group with nested groups and projects' do + let!(:group) { create(:group, :public) } + let!(:project1) { create(:project, :repository, :private, namespace: group) } + let!(:subgroup1) { create(:group, :private, parent: group) } + let!(:nested_subgroup) { create(:group, :private, parent: subgroup1) } + let!(:nested_project) { create(:project, :repository, :private, namespace: subgroup1) } + + before do + TestEnv.clean_test_path + create(:group_member, :owner, group: new_parent_group, user: user) + transfer_service.execute(new_parent_group) + end + + it 'should update subgroups path' do + new_base_path = "#{new_parent_group.path}/#{group.path}" + group.children.each do |children| + expect(children.full_path).to eq("#{new_base_path}/#{children.path}") + end + + new_base_path = "#{new_parent_group.path}/#{group.path}/#{subgroup1.path}" + subgroup1.children.each do |children| + expect(children.full_path).to eq("#{new_base_path}/#{children.path}") + end + end + + it 'should update projects path' do + new_parent_path = "#{new_parent_group.path}/#{group.path}" + subgroup1.projects.each do |project| + project_full_path = "#{new_parent_path}/#{project.namespace.path}/#{project.name}" + expect(project.full_path).to eq(project_full_path) + end + end + + it 'should create permanent redirect for the subgroups and projects' do + expect(group.redirect_routes.permanent.count).to eq(1) + expect(project1.redirect_routes.permanent.count).to eq(1) + expect(subgroup1.redirect_routes.permanent.count).to eq(1) + expect(nested_subgroup.redirect_routes.permanent.count).to eq(1) + expect(nested_project.redirect_routes.permanent.count).to eq(1) + end + end + + context 'when updating the group goes wrong' do + let!(:subgroup1) { create(:group, :public, parent: group) } + let!(:subgroup2) { create(:group, :public, parent: group) } + let(:new_parent_group) { create(:group, :private) } + let!(:project1) { create(:project, :repository, :public, namespace: group) } + + before do + allow(group).to receive(:save!).and_raise(ActiveRecord::RecordInvalid.new(group)) + TestEnv.clean_test_path + create(:group_member, :owner, group: new_parent_group, user: user) + transfer_service.execute(new_parent_group) + end + + it 'should restore group and projects visibility' do + subgroup1.reload + project1.reload + expect(subgroup1.public?).to be_truthy + expect(project1.public?).to be_truthy + end + end + end + end +end diff --git a/spec/services/issues/close_service_spec.rb b/spec/services/issues/close_service_spec.rb index 8897a64a138..47c1ebbeb81 100644 --- a/spec/services/issues/close_service_spec.rb +++ b/spec/services/issues/close_service_spec.rb @@ -4,7 +4,7 @@ describe Issues::CloseService do let(:user) { create(:user) } let(:user2) { create(:user) } let(:guest) { create(:user) } - let(:issue) { create(:issue, assignees: [user2]) } + let(:issue) { create(:issue, assignees: [user2], author: create(:user)) } let(:project) { issue.project } let!(:todo) { create(:todo, :assigned, user: user, project: project, target: issue, author: user2) } diff --git a/spec/services/issues/move_service_spec.rb b/spec/services/issues/move_service_spec.rb index 388c9d63c7b..322c91065e7 100644 --- a/spec/services/issues/move_service_spec.rb +++ b/spec/services/issues/move_service_spec.rb @@ -6,7 +6,7 @@ describe Issues::MoveService do let(:title) { 'Some issue' } let(:description) { 'Some issue description' } let(:old_project) { create(:project) } - let(:new_project) { create(:project) } + let(:new_project) { create(:project, group: create(:group)) } let(:milestone1) { create(:milestone, project_id: old_project.id, title: 'v9.0') } let(:old_issue) do @@ -250,7 +250,7 @@ describe Issues::MoveService do context 'issue description with uploads' do let(:uploader) { build(:file_uploader, project: old_project) } - let(:description) { "Text and #{uploader.to_markdown}" } + let(:description) { "Text and #{uploader.markdown_link}" } include_context 'issue move executed' diff --git a/spec/services/issues/update_service_spec.rb b/spec/services/issues/update_service_spec.rb index 1cb6f2e097f..41237dd7160 100644 --- a/spec/services/issues/update_service_spec.rb +++ b/spec/services/issues/update_service_spec.rb @@ -13,7 +13,8 @@ describe Issues::UpdateService, :mailer do create(:issue, title: 'Old title', description: "for #{user2.to_reference}", assignee_ids: [user3.id], - project: project) + project: project, + author: create(:user)) end before do diff --git a/spec/services/merge_requests/build_service_spec.rb b/spec/services/merge_requests/build_service_spec.rb index cb4c3e72aa0..e56d335a7d6 100644 --- a/spec/services/merge_requests/build_service_spec.rb +++ b/spec/services/merge_requests/build_service_spec.rb @@ -172,11 +172,32 @@ describe MergeRequests::BuildService do end end - context 'branch starts with external issue IID followed by a hyphen' do + context 'branch starts with numeric characters followed by a hyphen with no issue tracker' do let(:source_branch) { '12345-fix-issue' } before do + allow(project).to receive(:external_issue_tracker).and_return(false) + allow(project).to receive(:issues_enabled?).and_return(false) + end + + it 'uses the title of the commit as the title of the merge request' do + expect(merge_request.title).to eq(commit_1.safe_message.split("\n").first) + end + + it 'uses the description of the commit as the description of the merge request' do + commit_description = commit_1.safe_message.split(/\n+/, 2).last + + expect(merge_request.description).to eq("#{commit_description}") + end + end + + context 'branch starts with JIRA-formatted external issue IID followed by a hyphen' do + let(:source_branch) { 'EXMPL-12345-fix-issue' } + + before do allow(project).to receive(:external_issue_tracker).and_return(true) + allow(project).to receive(:issues_enabled?).and_return(false) + allow(project).to receive(:external_issue_reference_pattern).and_return(IssueTrackerService.reference_pattern) end it 'uses the title of the commit as the title of the merge request' do @@ -186,7 +207,7 @@ describe MergeRequests::BuildService do it 'uses the description of the commit as the description of the merge request and appends the closes text' do commit_description = commit_1.safe_message.split(/\n+/, 2).last - expect(merge_request.description).to eq("#{commit_description}\n\nCloses #12345") + expect(merge_request.description).to eq("#{commit_description}\n\nCloses EXMPL-12345") end end end @@ -252,19 +273,46 @@ describe MergeRequests::BuildService do end end - context 'branch starts with external issue IID followed by a hyphen' do + context 'branch starts with numeric characters followed by a hyphen with no issue tracker' do let(:source_branch) { '12345-fix-issue' } before do - allow(project).to receive(:external_issue_tracker).and_return(true) + allow(project).to receive(:external_issue_tracker).and_return(false) + allow(project).to receive(:issues_enabled?).and_return(false) end it 'sets the title to the humanized branch title' do expect(merge_request.title).to eq('12345 fix issue') end + end + + context 'branch starts with JIRA-formatted external issue IID' do + let(:source_branch) { 'EXMPL-12345' } + + before do + allow(project).to receive(:external_issue_tracker).and_return(true) + allow(project).to receive(:issues_enabled?).and_return(false) + allow(project).to receive(:external_issue_reference_pattern).and_return(IssueTrackerService.reference_pattern) + end + + it 'sets the title to the humanized branch title' do + expect(merge_request.title).to eq('Resolve EXMPL-12345') + end it 'appends the closes text' do - expect(merge_request.description).to eq('Closes #12345') + expect(merge_request.description).to eq('Closes EXMPL-12345') + end + + context 'followed by hyphenated text' do + let(:source_branch) { 'EXMPL-12345-fix-issue' } + + it 'sets the title to the humanized branch title' do + expect(merge_request.title).to eq('Resolve EXMPL-12345 "Fix issue"') + end + + it 'appends the closes text' do + expect(merge_request.description).to eq('Closes EXMPL-12345') + end end end end diff --git a/spec/services/merge_requests/close_service_spec.rb b/spec/services/merge_requests/close_service_spec.rb index 4d12de3ecce..216e0cd4266 100644 --- a/spec/services/merge_requests/close_service_spec.rb +++ b/spec/services/merge_requests/close_service_spec.rb @@ -4,7 +4,7 @@ describe MergeRequests::CloseService do let(:user) { create(:user) } let(:user2) { create(:user) } let(:guest) { create(:user) } - let(:merge_request) { create(:merge_request, assignee: user2) } + let(:merge_request) { create(:merge_request, assignee: user2, author: create(:user)) } let(:project) { merge_request.project } let!(:todo) { create(:todo, :assigned, user: user, project: project, target: merge_request, author: user2) } diff --git a/spec/services/merge_requests/ff_merge_service_spec.rb b/spec/services/merge_requests/ff_merge_service_spec.rb index aa90feeef89..5ef6365fcc9 100644 --- a/spec/services/merge_requests/ff_merge_service_spec.rb +++ b/spec/services/merge_requests/ff_merge_service_spec.rb @@ -7,7 +7,8 @@ describe MergeRequests::FfMergeService do create(:merge_request, source_branch: 'flatten-dir', target_branch: 'improve/awesome', - assignee: user2) + assignee: user2, + author: create(:user)) end let(:project) { merge_request.project } diff --git a/spec/services/merge_requests/merge_when_pipeline_succeeds_service_spec.rb b/spec/services/merge_requests/merge_when_pipeline_succeeds_service_spec.rb index f17db70faf6..240aa638f79 100644 --- a/spec/services/merge_requests/merge_when_pipeline_succeeds_service_spec.rb +++ b/spec/services/merge_requests/merge_when_pipeline_succeeds_service_spec.rb @@ -43,7 +43,7 @@ describe MergeRequests::MergeWhenPipelineSucceedsService do it 'creates a system note' do note = merge_request.notes.last - expect(note.note).to match /enabled an automatic merge when the pipeline for (\w+\/\w+@)?\h{8}/ + expect(note.note).to match %r{enabled an automatic merge when the pipeline for (\w+/\w+@)?\h{8}} end end diff --git a/spec/services/merge_requests/rebase_service_spec.rb b/spec/services/merge_requests/rebase_service_spec.rb index fc1c3d67203..757c31ab692 100644 --- a/spec/services/merge_requests/rebase_service_spec.rb +++ b/spec/services/merge_requests/rebase_service_spec.rb @@ -108,7 +108,7 @@ describe MergeRequests::RebaseService do context 'git commands', :disable_gitaly do it 'sets GL_REPOSITORY env variable when calling git commands' do expect(repository).to receive(:popen).exactly(3) - .with(anything, anything, hash_including('GL_REPOSITORY')) + .with(anything, anything, hash_including('GL_REPOSITORY'), anything) .and_return(['', 0]) service.execute(merge_request) diff --git a/spec/services/merge_requests/refresh_service_spec.rb b/spec/services/merge_requests/refresh_service_spec.rb index 7c3374c6113..903aa0a5078 100644 --- a/spec/services/merge_requests/refresh_service_spec.rb +++ b/spec/services/merge_requests/refresh_service_spec.rb @@ -74,6 +74,14 @@ describe MergeRequests::RefreshService do expect(@fork_build_failed_todo).to be_done end + it 'reloads source branch MRs memoization' do + refresh_service.execute(@oldrev, @newrev, 'refs/heads/master') + + expect { refresh_service.execute(@oldrev, @newrev, 'refs/heads/master') }.to change { + refresh_service.instance_variable_get("@source_merge_requests").first.merge_request_diff + } + end + context 'when source branch ref does not exists' do before do DeleteBranchService.new(@project, @user).execute(@merge_request.source_branch) @@ -392,37 +400,21 @@ describe MergeRequests::RefreshService do end it 'references the commit that caused the Work in Progress status' do - refresh_service.execute(@oldrev, @newrev, 'refs/heads/master') - allow(refresh_service).to receive(:find_new_commits) - refresh_service.instance_variable_set("@commits", [ - double( - id: 'aaaaaaa', - sha: '38008cb17ce1466d8fec2dfa6f6ab8dcfe5cf49e', - short_id: 'aaaaaaa', - title: 'Fix issue', - work_in_progress?: false - ), - double( - id: 'bbbbbbb', - sha: '498214de67004b1da3d820901307bed2a68a8ef6', - short_id: 'bbbbbbb', - title: 'fixup! Fix issue', - work_in_progress?: true, - to_reference: 'bbbbbbb' - ), - double( - id: 'ccccccc', - sha: '1b12f15a11fc6e62177bef08f47bc7b5ce50b141', - short_id: 'ccccccc', - title: 'fixup! Fix issue', - work_in_progress?: true, - to_reference: 'ccccccc' - ) - ]) - refresh_service.execute(@oldrev, @newrev, 'refs/heads/wip') - reload_mrs - expect(@merge_request.notes.last.note).to eq( - "marked as a **Work In Progress** from bbbbbbb" + wip_merge_request = create(:merge_request, + source_project: @project, + source_branch: 'wip', + target_branch: 'master', + target_project: @project) + + commits = wip_merge_request.commits + oldrev = commits.last.id + newrev = commits.first.id + wip_commit = wip_merge_request.commits.find(&:work_in_progress?) + + refresh_service.execute(oldrev, newrev, 'refs/heads/wip') + + expect(wip_merge_request.reload.notes.last.note).to eq( + "marked as a **Work In Progress** from #{wip_commit.id}" ) end diff --git a/spec/services/merge_requests/reopen_service_spec.rb b/spec/services/merge_requests/reopen_service_spec.rb index a44d63e5f9f..9ee37c51d95 100644 --- a/spec/services/merge_requests/reopen_service_spec.rb +++ b/spec/services/merge_requests/reopen_service_spec.rb @@ -4,7 +4,7 @@ describe MergeRequests::ReopenService do let(:user) { create(:user) } let(:user2) { create(:user) } let(:guest) { create(:user) } - let(:merge_request) { create(:merge_request, :closed, assignee: user2) } + let(:merge_request) { create(:merge_request, :closed, assignee: user2, author: create(:user)) } let(:project) { merge_request.project } before do diff --git a/spec/services/merge_requests/update_service_spec.rb b/spec/services/merge_requests/update_service_spec.rb index 2238da2d14d..c31259239ee 100644 --- a/spec/services/merge_requests/update_service_spec.rb +++ b/spec/services/merge_requests/update_service_spec.rb @@ -12,7 +12,8 @@ describe MergeRequests::UpdateService, :mailer do create(:merge_request, :simple, title: 'Old title', description: "FYI #{user2.to_reference}", assignee_id: user3.id, - source_project: project) + source_project: project, + author: create(:user)) end before do diff --git a/spec/services/notification_service_spec.rb b/spec/services/notification_service_spec.rb index 5c59455e3e1..35eb84e5e88 100644 --- a/spec/services/notification_service_spec.rb +++ b/spec/services/notification_service_spec.rb @@ -458,7 +458,7 @@ describe NotificationService, :mailer do context "merge request diff note" do let(:project) { create(:project, :repository) } let(:user) { create(:user) } - let(:merge_request) { create(:merge_request, source_project: project, assignee: user) } + let(:merge_request) { create(:merge_request, source_project: project, assignee: user, author: create(:user)) } let(:note) { create(:diff_note_on_merge_request, project: project, noteable: merge_request) } before do @@ -469,11 +469,13 @@ describe NotificationService, :mailer do describe '#new_note' do it "records sent notifications" do - # Ensure create SentNotification by noteable = merge_request 6 times, not noteable = note + # 3 SentNotification are sent: the MR assignee and author, and the @u_watcher expect(SentNotification).to receive(:record_note).with(note, any_args).exactly(3).times.and_call_original notification.new_note(note) + expect(SentNotification.last(3).map(&:recipient).map(&:id)) + .to contain_exactly(merge_request.assignee.id, merge_request.author.id, @u_watcher.id) expect(SentNotification.last.in_reply_to_discussion_id).to eq(note.discussion_id) end end diff --git a/spec/services/projects/gitlab_projects_import_service_spec.rb b/spec/services/projects/gitlab_projects_import_service_spec.rb index bb0e274c93e..6b8f9619bc4 100644 --- a/spec/services/projects/gitlab_projects_import_service_spec.rb +++ b/spec/services/projects/gitlab_projects_import_service_spec.rb @@ -1,7 +1,7 @@ require 'spec_helper' describe Projects::GitlabProjectsImportService do - set(:namespace) { build(:namespace) } + set(:namespace) { create(:namespace) } let(:file) { fixture_file_upload(Rails.root + 'spec/fixtures/doc_sample.txt', 'text/plain') } subject { described_class.new(namespace.owner, { namespace_id: namespace.id, path: path, file: file }) } diff --git a/spec/services/projects/hashed_storage/migrate_attachments_service_spec.rb b/spec/services/projects/hashed_storage/migrate_attachments_service_spec.rb index 50e59954f73..15699574b3a 100644 --- a/spec/services/projects/hashed_storage/migrate_attachments_service_spec.rb +++ b/spec/services/projects/hashed_storage/migrate_attachments_service_spec.rb @@ -6,7 +6,7 @@ describe Projects::HashedStorage::MigrateAttachmentsService do let(:legacy_storage) { Storage::LegacyProject.new(project) } let(:hashed_storage) { Storage::HashedProject.new(project) } - let!(:upload) { Upload.find_by(path: file_uploader.relative_path) } + let!(:upload) { Upload.find_by(path: file_uploader.upload_path) } let(:file_uploader) { build(:file_uploader, project: project) } let(:old_path) { File.join(base_path(legacy_storage), upload.path) } let(:new_path) { File.join(base_path(hashed_storage), upload.path) } @@ -58,6 +58,6 @@ describe Projects::HashedStorage::MigrateAttachmentsService do end def base_path(storage) - FileUploader.dynamic_path_builder(storage.disk_path) + File.join(FileUploader.root, storage.disk_path) end end diff --git a/spec/services/system_note_service_spec.rb b/spec/services/system_note_service_spec.rb index ab3a257f36f..5b5edc1aa0d 100644 --- a/spec/services/system_note_service_spec.rb +++ b/spec/services/system_note_service_spec.rb @@ -54,10 +54,11 @@ describe SystemNoteService do expect(note_lines[0]).to eq "added #{new_commits.size} commits" end - it 'adds a message line for each commit' do - new_commits.each_with_index do |commit, i| - # Skip the header - expect(HTMLEntities.new.decode(note_lines[i + 1])).to eq "* #{commit.short_id} - #{commit.title}" + it 'adds a message for each commit' do + decoded_note_content = HTMLEntities.new.decode(subject.note) + + new_commits.each do |commit| + expect(decoded_note_content).to include("<li>#{commit.short_id} - #{commit.title}</li>") end end end @@ -69,7 +70,7 @@ describe SystemNoteService do let(:old_commits) { [noteable.commits.last] } it 'includes the existing commit' do - expect(summary_line).to eq "* #{old_commits.first.short_id} - 1 commit from branch `feature`" + expect(summary_line).to start_with("<ul><li>#{old_commits.first.short_id} - 1 commit from branch <code>feature</code>") end end @@ -79,22 +80,16 @@ describe SystemNoteService do context 'with oldrev' do let(:oldrev) { noteable.commits[2].id } - it 'includes a commit range' do - expect(summary_line).to start_with "* #{Commit.truncate_sha(oldrev)}...#{old_commits.last.short_id}" - end - - it 'includes a commit count' do - expect(summary_line).to end_with " - 26 commits from branch `feature`" + it 'includes a commit range and count' do + expect(summary_line) + .to start_with("<ul><li>#{Commit.truncate_sha(oldrev)}...#{old_commits.last.short_id} - 26 commits from branch <code>feature</code>") end end context 'without oldrev' do - it 'includes a commit range' do - expect(summary_line).to start_with "* #{old_commits[0].short_id}..#{old_commits[-1].short_id}" - end - - it 'includes a commit count' do - expect(summary_line).to end_with " - 26 commits from branch `feature`" + it 'includes a commit range and count' do + expect(summary_line) + .to start_with("<ul><li>#{old_commits[0].short_id}..#{old_commits[-1].short_id} - 26 commits from branch <code>feature</code>") end end @@ -104,7 +99,7 @@ describe SystemNoteService do end it 'includes the project namespace' do - expect(summary_line).to end_with "`#{noteable.target_project_namespace}:feature`" + expect(summary_line).to include("<code>#{noteable.target_project_namespace}:feature</code>") end end end @@ -308,7 +303,7 @@ describe SystemNoteService do end it "posts the 'merge when pipeline succeeds' system note" do - expect(subject.note).to match(/enabled an automatic merge when the pipeline for (\w+\/\w+@)?\h{40} succeeds/) + expect(subject.note).to match(%r{enabled an automatic merge when the pipeline for (\w+/\w+@)?\h{40} succeeds}) end end @@ -693,9 +688,9 @@ describe SystemNoteService do describe '.new_commit_summary' do it 'escapes HTML titles' do commit = double(title: '<pre>This is a test</pre>', short_id: '12345678') - escaped = '<pre>This is a test</pre>' + escaped = '<pre>This is a test</pre>' - expect(described_class.new_commit_summary([commit])).to all(match(%r[- #{escaped}])) + expect(described_class.new_commit_summary([commit])).to all(match(/- #{escaped}/)) end end diff --git a/spec/support/matchers/markdown_matchers.rb b/spec/support/matchers/markdown_matchers.rb index d12b2757427..ec4ec6f4038 100644 --- a/spec/support/matchers/markdown_matchers.rb +++ b/spec/support/matchers/markdown_matchers.rb @@ -190,6 +190,27 @@ module MarkdownMatchers expect(video['src']).to end_with('/assets/videos/gitlab-demo.mp4') end end + + # ColorFilter + matcher :parse_colors do + set_default_markdown_messages + + match do |actual| + color_chips = actual.css('code > span.gfm-color_chip > span') + + expect(color_chips.count).to eq(9) + + [ + '#F00', '#F00A', '#FF0000', '#FF0000AA', 'RGB(0,255,0)', + 'RGB(0%,100%,0%)', 'RGBA(0,255,0,0.7)', 'HSL(540,70%,50%)', + 'HSLA(540,70%,50%,0.7)' + ].each_with_index do |color, i| + parsed_color = Banzai::ColorParser.parse(color) + expect(color_chips[i]['style']).to match("background-color: #{parsed_color};") + expect(color_chips[i].parent.parent.content).to match(color) + end + end + end end # Monkeypatch the matcher DSL so that we can reduce some noisy duplication for diff --git a/spec/support/shared_examples/controllers/uploads_actions_shared_examples.rb b/spec/support/shared_examples/controllers/uploads_actions_shared_examples.rb index 935c08221e0..7ce80c82439 100644 --- a/spec/support/shared_examples/controllers/uploads_actions_shared_examples.rb +++ b/spec/support/shared_examples/controllers/uploads_actions_shared_examples.rb @@ -2,6 +2,8 @@ shared_examples 'handle uploads' do let(:user) { create(:user) } let(:jpg) { fixture_file_upload(Rails.root + 'spec/fixtures/rails_sample.jpg', 'image/jpg') } let(:txt) { fixture_file_upload(Rails.root + 'spec/fixtures/doc_sample.txt', 'text/plain') } + let(:secret) { FileUploader.generate_secret } + let(:uploader_class) { FileUploader } describe "POST #create" do context 'when a user is not authorized to upload a file' do @@ -65,7 +67,12 @@ shared_examples 'handle uploads' do describe "GET #show" do let(:show_upload) do - get :show, params.merge(secret: "123456", filename: "image.jpg") + get :show, params.merge(secret: secret, filename: "rails_sample.jpg") + end + + before do + expect(FileUploader).to receive(:generate_secret).and_return(secret) + UploadService.new(model, jpg, uploader_class).execute end context "when the model is public" do @@ -75,11 +82,6 @@ shared_examples 'handle uploads' do context "when not signed in" do context "when the file exists" do - before do - allow_any_instance_of(FileUploader).to receive(:file).and_return(jpg) - allow(jpg).to receive(:exists?).and_return(true) - end - it "responds with status 200" do show_upload @@ -88,6 +90,10 @@ shared_examples 'handle uploads' do end context "when the file doesn't exist" do + before do + allow_any_instance_of(FileUploader).to receive(:exists?).and_return(false) + end + it "responds with status 404" do show_upload @@ -102,11 +108,6 @@ shared_examples 'handle uploads' do end context "when the file exists" do - before do - allow_any_instance_of(FileUploader).to receive(:file).and_return(jpg) - allow(jpg).to receive(:exists?).and_return(true) - end - it "responds with status 200" do show_upload @@ -115,6 +116,10 @@ shared_examples 'handle uploads' do end context "when the file doesn't exist" do + before do + allow_any_instance_of(FileUploader).to receive(:exists?).and_return(false) + end + it "responds with status 404" do show_upload @@ -131,11 +136,6 @@ shared_examples 'handle uploads' do context "when not signed in" do context "when the file exists" do - before do - allow_any_instance_of(FileUploader).to receive(:file).and_return(jpg) - allow(jpg).to receive(:exists?).and_return(true) - end - context "when the file is an image" do before do allow_any_instance_of(FileUploader).to receive(:image?).and_return(true) @@ -149,6 +149,10 @@ shared_examples 'handle uploads' do end context "when the file is not an image" do + before do + allow_any_instance_of(FileUploader).to receive(:image?).and_return(false) + end + it "redirects to the sign in page" do show_upload @@ -158,6 +162,10 @@ shared_examples 'handle uploads' do end context "when the file doesn't exist" do + before do + allow_any_instance_of(FileUploader).to receive(:exists?).and_return(false) + end + it "redirects to the sign in page" do show_upload @@ -177,11 +185,6 @@ shared_examples 'handle uploads' do end context "when the file exists" do - before do - allow_any_instance_of(FileUploader).to receive(:file).and_return(jpg) - allow(jpg).to receive(:exists?).and_return(true) - end - it "responds with status 200" do show_upload @@ -190,6 +193,10 @@ shared_examples 'handle uploads' do end context "when the file doesn't exist" do + before do + allow_any_instance_of(FileUploader).to receive(:exists?).and_return(false) + end + it "responds with status 404" do show_upload @@ -200,11 +207,6 @@ shared_examples 'handle uploads' do context "when the user doesn't have access to the model" do context "when the file exists" do - before do - allow_any_instance_of(FileUploader).to receive(:file).and_return(jpg) - allow(jpg).to receive(:exists?).and_return(true) - end - context "when the file is an image" do before do allow_any_instance_of(FileUploader).to receive(:image?).and_return(true) @@ -218,6 +220,10 @@ shared_examples 'handle uploads' do end context "when the file is not an image" do + before do + allow_any_instance_of(FileUploader).to receive(:image?).and_return(false) + end + it "responds with status 404" do show_upload @@ -227,6 +233,10 @@ shared_examples 'handle uploads' do end context "when the file doesn't exist" do + before do + allow_any_instance_of(FileUploader).to receive(:exists?).and_return(false) + end + it "responds with status 404" do show_upload diff --git a/spec/support/shared_examples/uploaders/gitlab_uploader_shared_examples.rb b/spec/support/shared_examples/uploaders/gitlab_uploader_shared_examples.rb new file mode 100644 index 00000000000..934d53e7bba --- /dev/null +++ b/spec/support/shared_examples/uploaders/gitlab_uploader_shared_examples.rb @@ -0,0 +1,48 @@ +shared_examples "matches the method pattern" do |method| + let(:target) { subject } + let(:args) { nil } + let(:pattern) { patterns[method] } + + it do + return skip "No pattern provided, skipping." unless pattern + + expect(target.method(method).call(*args)).to match(pattern) + end +end + +shared_examples "builds correct paths" do |**patterns| + let(:patterns) { patterns } + + before do + allow(subject).to receive(:filename).and_return('<filename>') + end + + describe "#store_dir" do + it_behaves_like "matches the method pattern", :store_dir + end + + describe "#cache_dir" do + it_behaves_like "matches the method pattern", :cache_dir + end + + describe "#work_dir" do + it_behaves_like "matches the method pattern", :work_dir + end + + describe "#upload_path" do + it_behaves_like "matches the method pattern", :upload_path + end + + describe ".absolute_path" do + it_behaves_like "matches the method pattern", :absolute_path do + let(:target) { subject.class } + let(:args) { [upload] } + end + end + + describe ".base_dir" do + it_behaves_like "matches the method pattern", :base_dir do + let(:target) { subject.class } + end + end +end diff --git a/spec/support/stored_repositories.rb b/spec/support/stored_repositories.rb index f9121cce985..52e47ae2d34 100644 --- a/spec/support/stored_repositories.rb +++ b/spec/support/stored_repositories.rb @@ -15,9 +15,7 @@ RSpec.configure do |config| # Track the maximum number of failures first_failure = Time.parse("2017-11-14 17:52:30") last_failure = Time.parse("2017-11-14 18:54:37") - failure_count = Gitlab::CurrentSettings - .current_application_settings - .circuitbreaker_failure_count_threshold + 1 + failure_count = Gitlab::CurrentSettings.circuitbreaker_failure_count_threshold + 1 cache_key = "#{Gitlab::Git::Storage::REDIS_KEY_PREFIX}broken:#{Gitlab::Environment.hostname}" Gitlab::Git::Storage.redis.with do |redis| diff --git a/spec/support/stub_env.rb b/spec/support/stub_env.rb index 695152e2d4e..36b90fc68d6 100644 --- a/spec/support/stub_env.rb +++ b/spec/support/stub_env.rb @@ -1,7 +1,5 @@ # Inspired by https://github.com/ljkbennett/stub_env/blob/master/lib/stub_env/helpers.rb module StubENV - include Gitlab::CurrentSettings - def stub_env(key_or_hash, value = nil) init_stub unless env_stubbed? diff --git a/spec/support/test_env.rb b/spec/support/test_env.rb index 9e5f08fbc51..c275522159c 100644 --- a/spec/support/test_env.rb +++ b/spec/support/test_env.rb @@ -237,7 +237,7 @@ module TestEnv end def artifacts_path - Gitlab.config.artifacts.path + Gitlab.config.artifacts.storage_path end # When no cached assets exist, manually hit the root path to create them diff --git a/spec/support/track_untracked_uploads_helpers.rb b/spec/support/track_untracked_uploads_helpers.rb index d05eda08201..5752078d2a0 100644 --- a/spec/support/track_untracked_uploads_helpers.rb +++ b/spec/support/track_untracked_uploads_helpers.rb @@ -1,6 +1,6 @@ module TrackUntrackedUploadsHelpers def uploaded_file - fixture_path = Rails.root.join('spec', 'fixtures', 'rails_sample.jpg') + fixture_path = Rails.root.join('spec/fixtures/rails_sample.jpg') fixture_file_upload(fixture_path) end diff --git a/spec/support/unique_ip_check_shared_examples.rb b/spec/support/unique_ip_check_shared_examples.rb index 3d9705c9c05..e5c8ac6a004 100644 --- a/spec/support/unique_ip_check_shared_examples.rb +++ b/spec/support/unique_ip_check_shared_examples.rb @@ -9,7 +9,7 @@ shared_context 'unique ips sign in limit' do before do stub_env('IN_MEMORY_APPLICATION_SETTINGS', 'false') - current_application_settings.update!( + Gitlab::CurrentSettings.update!( unique_ips_limit_enabled: true, unique_ips_limit_time_window: 10000 ) @@ -34,7 +34,7 @@ end shared_examples 'user login operation with unique ip limit' do include_context 'unique ips sign in limit' do before do - current_application_settings.update!(unique_ips_limit_per_user: 1) + Gitlab::CurrentSettings.update!(unique_ips_limit_per_user: 1) end it 'allows user authenticating from the same ip' do @@ -52,7 +52,7 @@ end shared_examples 'user login request with unique ip limit' do |success_status = 200| include_context 'unique ips sign in limit' do before do - current_application_settings.update!(unique_ips_limit_per_user: 1) + Gitlab::CurrentSettings.update!(unique_ips_limit_per_user: 1) end it 'allows user authenticating from the same ip' do diff --git a/spec/tasks/gitlab/backup_rake_spec.rb b/spec/tasks/gitlab/backup_rake_spec.rb index b41c3b3958a..168facd51a6 100644 --- a/spec/tasks/gitlab/backup_rake_spec.rb +++ b/spec/tasks/gitlab/backup_rake_spec.rb @@ -165,7 +165,7 @@ describe 'gitlab:app namespace rake task' do expect(tar_contents).to match('pages.tar.gz') expect(tar_contents).to match('lfs.tar.gz') expect(tar_contents).to match('registry.tar.gz') - expect(tar_contents).not_to match(/^.{4,9}[rwx].* (database.sql.gz|uploads.tar.gz|repositories|builds.tar.gz|pages.tar.gz|artifacts.tar.gz|registry.tar.gz)\/$/) + expect(tar_contents).not_to match(%r{^.{4,9}[rwx].* (database.sql.gz|uploads.tar.gz|repositories|builds.tar.gz|pages.tar.gz|artifacts.tar.gz|registry.tar.gz)/$}) end it 'deletes temp directories' do diff --git a/spec/tasks/gitlab/git_rake_spec.rb b/spec/tasks/gitlab/git_rake_spec.rb index dacc5dc5ae7..9aebf7b0b4a 100644 --- a/spec/tasks/gitlab/git_rake_spec.rb +++ b/spec/tasks/gitlab/git_rake_spec.rb @@ -19,7 +19,7 @@ describe 'gitlab:git rake tasks' do describe 'fsck' do it 'outputs the integrity check for a repo' do - expect { run_rake_task('gitlab:git:fsck') }.to output(/Performed Checking integrity at .*@hashed\/1\/2\/test.git/).to_stdout + expect { run_rake_task('gitlab:git:fsck') }.to output(%r{Performed Checking integrity at .*@hashed/1/2/test.git}).to_stdout end it 'errors out about config.lock issues' do diff --git a/spec/uploaders/attachment_uploader_spec.rb b/spec/uploaders/attachment_uploader_spec.rb index 04ee6e9bfad..091ba824fc6 100644 --- a/spec/uploaders/attachment_uploader_spec.rb +++ b/spec/uploaders/attachment_uploader_spec.rb @@ -1,28 +1,14 @@ require 'spec_helper' describe AttachmentUploader do - let(:uploader) { described_class.new(build_stubbed(:user)) } + let(:note) { create(:note, :with_attachment) } + let(:uploader) { note.attachment } + let(:upload) { create(:upload, :attachment_upload, model: uploader.model) } - describe "#store_dir" do - it "stores in the system dir" do - expect(uploader.store_dir).to start_with("uploads/-/system/user") - end + subject { uploader } - it "uses the old path when using object storage" do - expect(described_class).to receive(:file_storage?).and_return(false) - expect(uploader.store_dir).to start_with("uploads/user") - end - end - - describe '#move_to_cache' do - it 'is true' do - expect(uploader.move_to_cache).to eq(true) - end - end - - describe '#move_to_store' do - it 'is true' do - expect(uploader.move_to_store).to eq(true) - end - end + it_behaves_like 'builds correct paths', + store_dir: %r[uploads/-/system/note/attachment/], + upload_path: %r[uploads/-/system/note/attachment/], + absolute_path: %r[#{CarrierWave.root}/uploads/-/system/note/attachment/] end diff --git a/spec/uploaders/avatar_uploader_spec.rb b/spec/uploaders/avatar_uploader_spec.rb index 1dc574699d8..bf9028c9260 100644 --- a/spec/uploaders/avatar_uploader_spec.rb +++ b/spec/uploaders/avatar_uploader_spec.rb @@ -1,18 +1,16 @@ require 'spec_helper' describe AvatarUploader do - let(:uploader) { described_class.new(build_stubbed(:user)) } + let(:model) { create(:user, :with_avatar) } + let(:uploader) { described_class.new(model, :avatar) } + let(:upload) { create(:upload, model: model) } - describe "#store_dir" do - it "stores in the system dir" do - expect(uploader.store_dir).to start_with("uploads/-/system/user") - end + subject { uploader } - it "uses the old path when using object storage" do - expect(described_class).to receive(:file_storage?).and_return(false) - expect(uploader.store_dir).to start_with("uploads/user") - end - end + it_behaves_like 'builds correct paths', + store_dir: %r[uploads/-/system/user/avatar/], + upload_path: %r[uploads/-/system/user/avatar/], + absolute_path: %r[#{CarrierWave.root}/uploads/-/system/user/avatar/] describe '#move_to_cache' do it 'is false' do diff --git a/spec/uploaders/file_mover_spec.rb b/spec/uploaders/file_mover_spec.rb index 0cf462e9553..bc024cd307c 100644 --- a/spec/uploaders/file_mover_spec.rb +++ b/spec/uploaders/file_mover_spec.rb @@ -3,13 +3,13 @@ require 'spec_helper' describe FileMover do let(:filename) { 'banana_sample.gif' } let(:file) { fixture_file_upload(Rails.root.join('spec', 'fixtures', filename)) } + let(:temp_file_path) { File.join('uploads/-/system/temp', 'secret55', filename) } + let(:temp_description) do - 'test  same ![banana_sample]'\ - '(/uploads/-/system/temp/secret55/banana_sample.gif)' + "test  "\ + "same  " end - let(:temp_file_path) { File.join('secret55', filename).to_s } - let(:file_path) { File.join('uploads', '-', 'system', 'personal_snippet', snippet.id.to_s, 'secret55', filename).to_s } - + let(:file_path) { File.join('uploads/-/system/personal_snippet', snippet.id.to_s, 'secret55', filename) } let(:snippet) { create(:personal_snippet, description: temp_description) } subject { described_class.new(file_path, snippet).execute } @@ -28,8 +28,8 @@ describe FileMover do expect(snippet.reload.description) .to eq( - "test "\ - " same " + "test  "\ + "same  " ) end @@ -50,8 +50,8 @@ describe FileMover do expect(snippet.reload.description) .to eq( - "test "\ - " same " + "test  "\ + "same  " ) end diff --git a/spec/uploaders/file_uploader_spec.rb b/spec/uploaders/file_uploader_spec.rb index fd195d6f9b8..6a92e7fae51 100644 --- a/spec/uploaders/file_uploader_spec.rb +++ b/spec/uploaders/file_uploader_spec.rb @@ -1,118 +1,107 @@ require 'spec_helper' describe FileUploader do - let(:uploader) { described_class.new(build_stubbed(:project)) } + let(:group) { create(:group, name: 'awesome') } + let(:project) { create(:project, namespace: group, name: 'project') } + let(:uploader) { described_class.new(project) } + let(:upload) { double(model: project, path: 'secret/foo.jpg') } - context 'legacy storage' do - let(:project) { build_stubbed(:project) } - - describe '.absolute_path' do - it 'returns the correct absolute path by building it dynamically' do - upload = double(model: project, path: 'secret/foo.jpg') + subject { uploader } - dynamic_segment = project.full_path + shared_examples 'builds correct legacy storage paths' do + include_examples 'builds correct paths', + store_dir: %r{awesome/project/\h+}, + absolute_path: %r{#{described_class.root}/awesome/project/secret/foo.jpg} + end - expect(described_class.absolute_path(upload)) - .to end_with("#{dynamic_segment}/secret/foo.jpg") + shared_examples 'uses hashed storage' do + context 'when rolled out attachments' do + before do + allow(project).to receive(:disk_path).and_return('ca/fe/fe/ed') end + + let(:project) { build_stubbed(:project, :hashed, namespace: group, name: 'project') } + + it_behaves_like 'builds correct paths', + store_dir: %r{ca/fe/fe/ed/\h+}, + absolute_path: %r{#{described_class.root}/ca/fe/fe/ed/secret/foo.jpg} end - describe "#store_dir" do - it "stores in the namespace path" do - uploader = described_class.new(project) + context 'when only repositories are rolled out' do + let(:project) { build_stubbed(:project, namespace: group, name: 'project', storage_version: Project::HASHED_STORAGE_FEATURES[:repository]) } - expect(uploader.store_dir).to include(project.full_path) - expect(uploader.store_dir).not_to include("system") - end + it_behaves_like 'builds correct legacy storage paths' end end - context 'hashed storage' do - context 'when rolled out attachments' do - let(:project) { build_stubbed(:project, :hashed) } + context 'legacy storage' do + it_behaves_like 'builds correct legacy storage paths' + include_examples 'uses hashed storage' + end - describe '.absolute_path' do - it 'returns the correct absolute path by building it dynamically' do - upload = double(model: project, path: 'secret/foo.jpg') + describe 'initialize' do + let(:uploader) { described_class.new(double, secret: 'secret') } - dynamic_segment = project.disk_path + it 'accepts a secret parameter' do + expect(described_class).not_to receive(:generate_secret) + expect(uploader.secret).to eq('secret') + end + end - expect(described_class.absolute_path(upload)) - .to end_with("#{dynamic_segment}/secret/foo.jpg") - end + describe 'callbacks' do + describe '#prune_store_dir after :remove' do + before do + uploader.store!(fixture_file_upload('spec/fixtures/doc_sample.txt')) end - describe "#store_dir" do - it "stores in the namespace path" do - uploader = described_class.new(project) - - expect(uploader.store_dir).to include(project.disk_path) - expect(uploader.store_dir).not_to include("system") - end + def store_dir + File.expand_path(uploader.store_dir, uploader.root) end - end - - context 'when only repositories are rolled out' do - let(:project) { build_stubbed(:project, storage_version: Project::HASHED_STORAGE_FEATURES[:repository]) } - - describe '.absolute_path' do - it 'returns the correct absolute path by building it dynamically' do - upload = double(model: project, path: 'secret/foo.jpg') - dynamic_segment = project.full_path + it 'is called' do + expect(uploader).to receive(:prune_store_dir).once - expect(described_class.absolute_path(upload)) - .to end_with("#{dynamic_segment}/secret/foo.jpg") - end + uploader.remove! end - describe "#store_dir" do - it "stores in the namespace path" do - uploader = described_class.new(project) - - expect(uploader.store_dir).to include(project.full_path) - expect(uploader.store_dir).not_to include("system") - end + it 'prune the store directory' do + expect { uploader.remove! } + .to change { File.exist?(store_dir) }.from(true).to(false) end end end - describe 'initialize' do + describe '#secret' do it 'generates a secret if none is provided' do - expect(SecureRandom).to receive(:hex).and_return('secret') - - uploader = described_class.new(double) - - expect(uploader.secret).to eq 'secret' + expect(described_class).to receive(:generate_secret).and_return('secret') + expect(uploader.secret).to eq('secret') end + end - it 'accepts a secret parameter' do - expect(SecureRandom).not_to receive(:hex) + describe '#upload=' do + let(:secret) { SecureRandom.hex } + let(:upload) { create(:upload, :issuable_upload, secret: secret, filename: 'file.txt') } - uploader = described_class.new(double, 'secret') + it 'handles nil' do + expect(uploader).not_to receive(:apply_context!) - expect(uploader.secret).to eq 'secret' + uploader.upload = nil end - end - describe '#move_to_cache' do - it 'is true' do - expect(uploader.move_to_cache).to eq(true) - end - end + it 'extract the uploader context from it' do + expect(uploader).to receive(:apply_context!).with(a_hash_including(secret: secret, identifier: 'file.txt')) - describe '#move_to_store' do - it 'is true' do - expect(uploader.move_to_store).to eq(true) + uploader.upload = upload end - end - describe '#relative_path' do - it 'removes the leading dynamic path segment' do - fixture = Rails.root.join('spec', 'fixtures', 'rails_sample.jpg') - uploader.store!(fixture_file_upload(fixture)) + context 'uploader_context is empty' do + it 'fallbacks to regex based extraction' do + expect(upload).to receive(:uploader_context).and_return({}) - expect(uploader.relative_path).to match(/\A\h{32}\/rails_sample.jpg\z/) + uploader.upload = upload + expect(uploader.secret).to eq(secret) + expect(uploader.instance_variable_get(:@identifier)).to eq('file.txt') + end end end end diff --git a/spec/uploaders/gitlab_uploader_spec.rb b/spec/uploaders/gitlab_uploader_spec.rb index a144b39f74f..60e35dcf235 100644 --- a/spec/uploaders/gitlab_uploader_spec.rb +++ b/spec/uploaders/gitlab_uploader_spec.rb @@ -4,7 +4,7 @@ require 'carrierwave/storage/fog' describe GitlabUploader do let(:uploader_class) { Class.new(described_class) } - subject { uploader_class.new } + subject { uploader_class.new(double) } describe '#file_storage?' do context 'when file storage is used' do diff --git a/spec/uploaders/job_artifact_uploader_spec.rb b/spec/uploaders/job_artifact_uploader_spec.rb index 98a4373e9d0..5612ec7e661 100644 --- a/spec/uploaders/job_artifact_uploader_spec.rb +++ b/spec/uploaders/job_artifact_uploader_spec.rb @@ -3,34 +3,41 @@ require 'spec_helper' describe JobArtifactUploader do let(:job_artifact) { create(:ci_job_artifact) } let(:uploader) { described_class.new(job_artifact, :file) } - let(:local_path) { Gitlab.config.artifacts.path } - describe '#store_dir' do - subject { uploader.store_dir } - - let(:path) { "#{job_artifact.created_at.utc.strftime('%Y_%m_%d')}/#{job_artifact.job_id}/#{job_artifact.id}" } - - context 'when using local storage' do - it { is_expected.to start_with(local_path) } - it { is_expected.to match(/\h{2}\/\h{2}\/\h{64}\/\d{4}_\d{1,2}_\d{1,2}\/\d+\/\d+\z/) } - it { is_expected.to end_with(path) } + subject { uploader } + + it_behaves_like "builds correct paths", + store_dir: %r[\h{2}/\h{2}/\h{64}/\d{4}_\d{1,2}_\d{1,2}/\d+/\d+\z], + cache_dir: %r[artifacts/tmp/cache], + work_dir: %r[artifacts/tmp/work] + + describe '#open' do + subject { uploader.open } + + context 'when trace is stored in File storage' do + context 'when file exists' do + let(:file) do + fixture_file_upload( + Rails.root.join('spec/fixtures/trace/sample_trace'), 'text/plain') + end + + before do + uploader.store!(file) + end + + it 'returns io stream' do + is_expected.to be_a(IO) + end + end + + context 'when file does not exist' do + it 'returns nil' do + is_expected.to be_nil + end + end end end - describe '#cache_dir' do - subject { uploader.cache_dir } - - it { is_expected.to start_with(local_path) } - it { is_expected.to end_with('/tmp/cache') } - end - - describe '#work_dir' do - subject { uploader.work_dir } - - it { is_expected.to start_with(local_path) } - it { is_expected.to end_with('/tmp/work') } - end - context 'file is stored in valid local_path' do let(:file) do fixture_file_upload( @@ -43,7 +50,7 @@ describe JobArtifactUploader do subject { uploader.file.path } - it { is_expected.to start_with(local_path) } + it { is_expected.to start_with("#{uploader.root}/#{uploader.class.base_dir}") } it { is_expected.to include("/#{job_artifact.created_at.utc.strftime('%Y_%m_%d')}/") } it { is_expected.to include("/#{job_artifact.job_id}/#{job_artifact.id}/") } it { is_expected.to end_with("ci_build_artifacts.zip") } diff --git a/spec/uploaders/legacy_artifact_uploader_spec.rb b/spec/uploaders/legacy_artifact_uploader_spec.rb index efeffb78772..54c6a8b869b 100644 --- a/spec/uploaders/legacy_artifact_uploader_spec.rb +++ b/spec/uploaders/legacy_artifact_uploader_spec.rb @@ -3,49 +3,22 @@ require 'rails_helper' describe LegacyArtifactUploader do let(:job) { create(:ci_build) } let(:uploader) { described_class.new(job, :legacy_artifacts_file) } - let(:local_path) { Gitlab.config.artifacts.path } + let(:local_path) { described_class.root } - describe '.local_store_path' do - subject { described_class.local_store_path } + subject { uploader } - it "delegate to artifacts path" do - expect(Gitlab.config.artifacts).to receive(:path) - - subject - end - end - - describe '.artifacts_upload_path' do - subject { described_class.artifacts_upload_path } + # TODO: move to Workhorse::UploadPath + describe '.workhorse_upload_path' do + subject { described_class.workhorse_upload_path } it { is_expected.to start_with(local_path) } - it { is_expected.to end_with('tmp/uploads/') } - end - - describe '#store_dir' do - subject { uploader.store_dir } - - let(:path) { "#{job.created_at.utc.strftime('%Y_%m')}/#{job.project_id}/#{job.id}" } - - context 'when using local storage' do - it { is_expected.to start_with(local_path) } - it { is_expected.to end_with(path) } - end + it { is_expected.to end_with('tmp/uploads') } end - describe '#cache_dir' do - subject { uploader.cache_dir } - - it { is_expected.to start_with(local_path) } - it { is_expected.to end_with('/tmp/cache') } - end - - describe '#work_dir' do - subject { uploader.work_dir } - - it { is_expected.to start_with(local_path) } - it { is_expected.to end_with('/tmp/work') } - end + it_behaves_like "builds correct paths", + store_dir: %r[\d{4}_\d{1,2}/\d+/\d+\z], + cache_dir: %r[artifacts/tmp/cache], + work_dir: %r[artifacts/tmp/work] describe '#filename' do # we need to use uploader, as this makes to use mounter @@ -69,7 +42,7 @@ describe LegacyArtifactUploader do subject { uploader.file.path } - it { is_expected.to start_with(local_path) } + it { is_expected.to start_with("#{uploader.root}") } it { is_expected.to include("/#{job.created_at.utc.strftime('%Y_%m')}/") } it { is_expected.to include("/#{job.project_id}/") } it { is_expected.to end_with("ci_build_artifacts.zip") } diff --git a/spec/uploaders/lfs_object_uploader_spec.rb b/spec/uploaders/lfs_object_uploader_spec.rb index 7088bc23334..6ebc885daa8 100644 --- a/spec/uploaders/lfs_object_uploader_spec.rb +++ b/spec/uploaders/lfs_object_uploader_spec.rb @@ -2,39 +2,13 @@ require 'spec_helper' describe LfsObjectUploader do let(:lfs_object) { create(:lfs_object, :with_file) } - let(:uploader) { described_class.new(lfs_object) } + let(:uploader) { described_class.new(lfs_object, :file) } let(:path) { Gitlab.config.lfs.storage_path } - describe '#move_to_cache' do - it 'is true' do - expect(uploader.move_to_cache).to eq(true) - end - end + subject { uploader } - describe '#move_to_store' do - it 'is true' do - expect(uploader.move_to_store).to eq(true) - end - end - - describe '#store_dir' do - subject { uploader.store_dir } - - it { is_expected.to start_with(path) } - it { is_expected.to end_with("#{lfs_object.oid[0, 2]}/#{lfs_object.oid[2, 2]}") } - end - - describe '#cache_dir' do - subject { uploader.cache_dir } - - it { is_expected.to start_with(path) } - it { is_expected.to end_with('/tmp/cache') } - end - - describe '#work_dir' do - subject { uploader.work_dir } - - it { is_expected.to start_with(path) } - it { is_expected.to end_with('/tmp/work') } - end + it_behaves_like "builds correct paths", + store_dir: %r[\h{2}/\h{2}], + cache_dir: %r[/lfs-objects/tmp/cache], + work_dir: %r[/lfs-objects/tmp/work] end diff --git a/spec/uploaders/namespace_file_uploader_spec.rb b/spec/uploaders/namespace_file_uploader_spec.rb index c6c4500c179..24a2fc0f72e 100644 --- a/spec/uploaders/namespace_file_uploader_spec.rb +++ b/spec/uploaders/namespace_file_uploader_spec.rb @@ -1,21 +1,16 @@ require 'spec_helper' +IDENTIFIER = %r{\h+/\S+} + describe NamespaceFileUploader do let(:group) { build_stubbed(:group) } let(:uploader) { described_class.new(group) } + let(:upload) { create(:upload, :namespace_upload, model: group) } - describe "#store_dir" do - it "stores in the namespace id directory" do - expect(uploader.store_dir).to include(group.id.to_s) - end - end - - describe ".absolute_path" do - it "stores in thecorrect directory" do - upload_record = create(:upload, :namespace_upload, model: group) + subject { uploader } - expect(described_class.absolute_path(upload_record)) - .to include("-/system/namespace/#{group.id}") - end - end + it_behaves_like 'builds correct paths', + store_dir: %r[uploads/-/system/namespace/\d+], + upload_path: IDENTIFIER, + absolute_path: %r[#{CarrierWave.root}/uploads/-/system/namespace/\d+/#{IDENTIFIER}] end diff --git a/spec/uploaders/personal_file_uploader_spec.rb b/spec/uploaders/personal_file_uploader_spec.rb index cbafa9f478d..ed1fba6edda 100644 --- a/spec/uploaders/personal_file_uploader_spec.rb +++ b/spec/uploaders/personal_file_uploader_spec.rb @@ -1,25 +1,27 @@ require 'spec_helper' +IDENTIFIER = %r{\h+/\S+} + describe PersonalFileUploader do - let(:uploader) { described_class.new(build_stubbed(:project)) } - let(:snippet) { create(:personal_snippet) } + let(:model) { create(:personal_snippet) } + let(:uploader) { described_class.new(model) } + let(:upload) { create(:upload, :personal_snippet_upload) } - describe '.absolute_path' do - it 'returns the correct absolute path by building it dynamically' do - upload = double(model: snippet, path: 'secret/foo.jpg') + subject { uploader } - dynamic_segment = "personal_snippet/#{snippet.id}" - - expect(described_class.absolute_path(upload)).to end_with("/-/system/#{dynamic_segment}/secret/foo.jpg") - end - end + it_behaves_like 'builds correct paths', + store_dir: %r[uploads/-/system/personal_snippet/\d+], + upload_path: IDENTIFIER, + absolute_path: %r[#{CarrierWave.root}/uploads/-/system/personal_snippet/\d+/#{IDENTIFIER}] describe '#to_h' do - it 'returns the hass' do - uploader = described_class.new(snippet, 'secret') + before do + subject.instance_variable_set(:@secret, 'secret') + end + it 'is correct' do allow(uploader).to receive(:file).and_return(double(extension: 'txt', filename: 'file_name')) - expected_url = "/uploads/-/system/personal_snippet/#{snippet.id}/secret/file_name" + expected_url = "/uploads/-/system/personal_snippet/#{model.id}/secret/file_name" expect(uploader.to_h).to eq( alt: 'file_name', diff --git a/spec/uploaders/records_uploads_spec.rb b/spec/uploaders/records_uploads_spec.rb index 7ef7fb7d758..9a3e5d83e01 100644 --- a/spec/uploaders/records_uploads_spec.rb +++ b/spec/uploaders/records_uploads_spec.rb @@ -3,16 +3,16 @@ require 'rails_helper' describe RecordsUploads do let!(:uploader) do class RecordsUploadsExampleUploader < GitlabUploader - include RecordsUploads + include RecordsUploads::Concern storage :file - def model - FactoryBot.build_stubbed(:user) + def dynamic_segment + 'co/fe/ee' end end - RecordsUploadsExampleUploader.new + RecordsUploadsExampleUploader.new(build_stubbed(:user)) end def upload_fixture(filename) @@ -20,48 +20,55 @@ describe RecordsUploads do end describe 'callbacks' do - it 'calls `record_upload` after `store`' do + let(:upload) { create(:upload) } + + before do + uploader.upload = upload + end + + it '#record_upload after `store`' do expect(uploader).to receive(:record_upload).once uploader.store!(upload_fixture('doc_sample.txt')) end - it 'calls `destroy_upload` after `remove`' do - expect(uploader).to receive(:destroy_upload).once - + it '#destroy_upload after `remove`' do uploader.store!(upload_fixture('doc_sample.txt')) + expect(uploader).to receive(:destroy_upload).once uploader.remove! end end describe '#record_upload callback' do - it 'returns early when not using file storage' do - allow(uploader).to receive(:file_storage?).and_return(false) - expect(Upload).not_to receive(:record) - - uploader.store!(upload_fixture('rails_sample.jpg')) + it 'creates an Upload record after store' do + expect { uploader.store!(upload_fixture('rails_sample.jpg')) }.to change { Upload.count }.by(1) end - it "returns early when the file doesn't exist" do - allow(uploader).to receive(:file).and_return(double(exists?: false)) - expect(Upload).not_to receive(:record) - + it 'creates a new record and assigns size, path, model, and uploader' do uploader.store!(upload_fixture('rails_sample.jpg')) + + upload = uploader.upload + aggregate_failures do + expect(upload).to be_persisted + expect(upload.size).to eq uploader.file.size + expect(upload.path).to eq uploader.upload_path + expect(upload.model_id).to eq uploader.model.id + expect(upload.model_type).to eq uploader.model.class.to_s + expect(upload.uploader).to eq uploader.class.to_s + end end - it 'creates an Upload record after store' do - expect(Upload).to receive(:record) - .with(uploader) + it "does not create an Upload record when the file doesn't exist" do + allow(uploader).to receive(:file).and_return(double(exists?: false)) - uploader.store!(upload_fixture('rails_sample.jpg')) + expect { uploader.store!(upload_fixture('rails_sample.jpg')) }.not_to change { Upload.count } end it 'does not create an Upload record if model is missing' do - expect_any_instance_of(RecordsUploadsExampleUploader).to receive(:model).and_return(nil) - expect(Upload).not_to receive(:record).with(uploader) + allow_any_instance_of(RecordsUploadsExampleUploader).to receive(:model).and_return(nil) - uploader.store!(upload_fixture('rails_sample.jpg')) + expect { uploader.store!(upload_fixture('rails_sample.jpg')) }.not_to change { Upload.count } end it 'it destroys Upload records at the same path before recording' do @@ -72,29 +79,15 @@ describe RecordsUploads do uploader: uploader.class.to_s ) + uploader.upload = existing uploader.store!(upload_fixture('rails_sample.jpg')) expect { existing.reload }.to raise_error(ActiveRecord::RecordNotFound) - expect(Upload.count).to eq 1 + expect(Upload.count).to eq(1) end end describe '#destroy_upload callback' do - it 'returns early when not using file storage' do - uploader.store!(upload_fixture('rails_sample.jpg')) - - allow(uploader).to receive(:file_storage?).and_return(false) - expect(Upload).not_to receive(:remove_path) - - uploader.remove! - end - - it 'returns early when file is nil' do - expect(Upload).not_to receive(:remove_path) - - uploader.remove! - end - it 'it destroys Upload records at the same path after removal' do uploader.store!(upload_fixture('rails_sample.jpg')) diff --git a/spec/views/projects/pipeline_schedules/_pipeline_schedule.html.haml_spec.rb b/spec/views/projects/pipeline_schedules/_pipeline_schedule.html.haml_spec.rb new file mode 100644 index 00000000000..6e7d8db99c4 --- /dev/null +++ b/spec/views/projects/pipeline_schedules/_pipeline_schedule.html.haml_spec.rb @@ -0,0 +1,47 @@ +require 'spec_helper' + +describe 'projects/pipeline_schedules/_pipeline_schedule' do + let(:owner) { create(:user) } + let(:master) { create(:user) } + let(:project) { create(:project) } + let(:pipeline_schedule) { create(:ci_pipeline_schedule, :nightly, project: project) } + + before do + assign(:project, project) + + allow(view).to receive(:current_user).and_return(user) + allow(view).to receive(:pipeline_schedule).and_return(pipeline_schedule) + + allow(view).to receive(:can?).and_return(true) + end + + context 'taking ownership of schedule' do + context 'when non-owner is signed in' do + let(:user) { master } + + before do + allow(view).to receive(:can?).with(master, :take_ownership_pipeline_schedule, pipeline_schedule).and_return(true) + end + + it 'non-owner can take ownership of pipeline' do + render + + expect(rendered).to have_link('Take ownership') + end + end + + context 'when owner is signed in' do + let(:user) { owner } + + before do + allow(view).to receive(:can?).with(owner, :take_ownership_pipeline_schedule, pipeline_schedule).and_return(false) + end + + it 'owner cannot take ownership of pipeline' do + render + + expect(rendered).not_to have_link('Take ownership') + end + end + end +end diff --git a/spec/workers/build_finished_worker_spec.rb b/spec/workers/build_finished_worker_spec.rb index 1a7ffd5cdbf..c7ff8cf3b92 100644 --- a/spec/workers/build_finished_worker_spec.rb +++ b/spec/workers/build_finished_worker_spec.rb @@ -6,17 +6,15 @@ describe BuildFinishedWorker do let!(:build) { create(:ci_build) } it 'calculates coverage and calls hooks' do - expect(BuildCoverageWorker) + expect(BuildTraceSectionsWorker) .to receive(:new).ordered.and_call_original - expect(BuildHooksWorker) + expect(BuildCoverageWorker) .to receive(:new).ordered.and_call_original - expect(BuildTraceSectionsWorker) - .to receive(:perform_async) - expect_any_instance_of(BuildCoverageWorker) - .to receive(:perform) - expect_any_instance_of(BuildHooksWorker) - .to receive(:perform) + expect_any_instance_of(BuildTraceSectionsWorker).to receive(:perform) + expect_any_instance_of(BuildCoverageWorker).to receive(:perform) + expect(BuildHooksWorker).to receive(:perform_async) + expect(CreateTraceArtifactWorker).to receive(:perform_async) described_class.new.perform(build.id) end diff --git a/spec/workers/create_trace_artifact_worker_spec.rb b/spec/workers/create_trace_artifact_worker_spec.rb new file mode 100644 index 00000000000..854abd9cca7 --- /dev/null +++ b/spec/workers/create_trace_artifact_worker_spec.rb @@ -0,0 +1,29 @@ +require 'spec_helper' + +describe CreateTraceArtifactWorker do + describe '#perform' do + subject { described_class.new.perform(job&.id) } + + context 'when job is found' do + let(:job) { create(:ci_build) } + + it 'executes service' do + expect_any_instance_of(Ci::CreateTraceArtifactService) + .to receive(:execute).with(job) + + subject + end + end + + context 'when job is not found' do + let(:job) { nil } + + it 'does not execute service' do + expect_any_instance_of(Ci::CreateTraceArtifactService) + .not_to receive(:execute) + + subject + end + end + end +end diff --git a/spec/workers/upload_checksum_worker_spec.rb b/spec/workers/upload_checksum_worker_spec.rb index 911360da66c..9e50ce15871 100644 --- a/spec/workers/upload_checksum_worker_spec.rb +++ b/spec/workers/upload_checksum_worker_spec.rb @@ -2,18 +2,31 @@ require 'rails_helper' describe UploadChecksumWorker do describe '#perform' do - it 'rescues ActiveRecord::RecordNotFound' do - expect { described_class.new.perform(999_999) }.not_to raise_error + subject { described_class.new } + + context 'without a valid record' do + it 'rescues ActiveRecord::RecordNotFound' do + expect { subject.perform(999_999) }.not_to raise_error + end end - it 'calls calculate_checksum_without_delay and save!' do - upload = spy - expect(Upload).to receive(:find).with(999_999).and_return(upload) + context 'with a valid record' do + let(:upload) { create(:user, :with_avatar).avatar.upload } + + before do + expect(Upload).to receive(:find).and_return(upload) + allow(upload).to receive(:foreground_checksumable?).and_return(false) + end - described_class.new.perform(999_999) + it 'calls calculate_checksum!' do + expect(upload).to receive(:calculate_checksum!) + subject.perform(upload.id) + end - expect(upload).to have_received(:calculate_checksum) - expect(upload).to have_received(:save!) + it 'calls save!' do + expect(upload).to receive(:save!) + subject.perform(upload.id) + end end end end |
