diff options
Diffstat (limited to 'spec')
1119 files changed, 47733 insertions, 11170 deletions
diff --git a/spec/bin/changelog_spec.rb b/spec/bin/changelog_spec.rb index 7f4298db59f..91aff0db7cc 100644 --- a/spec/bin/changelog_spec.rb +++ b/spec/bin/changelog_spec.rb @@ -46,9 +46,7 @@ describe 'bin/changelog' do it 'parses -h' do expect do - $stdout = StringIO.new - - described_class.parse(%w[foo -h bar]) + expect { described_class.parse(%w[foo -h bar]) }.to output.to_stdout end.to raise_error(SystemExit) end diff --git a/spec/controllers/admin/application_settings_controller_spec.rb b/spec/controllers/admin/application_settings_controller_spec.rb index 5dd8f66343f..2565622f8df 100644 --- a/spec/controllers/admin/application_settings_controller_spec.rb +++ b/spec/controllers/admin/application_settings_controller_spec.rb @@ -3,12 +3,49 @@ require 'spec_helper' describe Admin::ApplicationSettingsController do include StubENV + let(:group) { create(:group) } + let(:project) { create(:project, namespace: group) } let(:admin) { create(:admin) } + let(:user) { create(:user)} before do stub_env('IN_MEMORY_APPLICATION_SETTINGS', 'false') end + describe 'GET #usage_data with no access' do + before do + sign_in(user) + end + + it 'returns 404' do + get :usage_data, format: :html + + expect(response.status).to eq(404) + end + end + + describe 'GET #usage_data' do + before do + sign_in(admin) + end + + it 'returns HTML data' do + get :usage_data, format: :html + + expect(response.body).to start_with('<span') + expect(response.status).to eq(200) + end + + it 'returns JSON data' do + get :usage_data, format: :json + + body = JSON.parse(response.body) + expect(body["version"]).to eq(Gitlab::VERSION) + expect(body).to include('counts') + expect(response.status).to eq(200) + end + end + describe 'PUT #update' do before do sign_in(admin) diff --git a/spec/controllers/admin/groups_controller_spec.rb b/spec/controllers/admin/groups_controller_spec.rb index 84db26a958a..c29b2fe8946 100644 --- a/spec/controllers/admin/groups_controller_spec.rb +++ b/spec/controllers/admin/groups_controller_spec.rb @@ -22,4 +22,28 @@ describe Admin::GroupsController do expect(response).to redirect_to(admin_groups_path) end end + + describe 'PUT #members_update' do + let(:group_user) { create(:user) } + + it 'adds user to members' do + put :members_update, id: group, + user_ids: group_user.id, + access_level: Gitlab::Access::GUEST + + expect(response).to set_flash.to 'Users were successfully added.' + expect(response).to redirect_to(admin_group_path(group)) + expect(group.users).to include group_user + end + + it 'adds no user to members' do + put :members_update, id: group, + user_ids: '', + access_level: Gitlab::Access::GUEST + + expect(response).to set_flash.to 'No users specified.' + expect(response).to redirect_to(admin_group_path(group)) + expect(group.users).not_to include group_user + end + end end diff --git a/spec/controllers/admin/hooks_controller_spec.rb b/spec/controllers/admin/hooks_controller_spec.rb new file mode 100644 index 00000000000..1d1070e90f4 --- /dev/null +++ b/spec/controllers/admin/hooks_controller_spec.rb @@ -0,0 +1,28 @@ +require 'spec_helper' + +describe Admin::HooksController do + let(:admin) { create(:admin) } + + before do + sign_in(admin) + end + + describe 'POST #create' do + it 'sets all parameters' do + hook_params = { + enable_ssl_verification: true, + push_events: true, + tag_push_events: true, + repository_update_events: true, + token: "TEST TOKEN", + url: "http://example.com" + } + + post :create, hook: hook_params + + expect(response).to have_http_status(302) + expect(SystemHook.all.size).to eq(1) + expect(SystemHook.first).to have_attributes(hook_params) + end + end +end diff --git a/spec/controllers/admin/services_controller_spec.rb b/spec/controllers/admin/services_controller_spec.rb index e5cdd52307e..c94616d8508 100644 --- a/spec/controllers/admin/services_controller_spec.rb +++ b/spec/controllers/admin/services_controller_spec.rb @@ -23,4 +23,36 @@ describe Admin::ServicesController do end end end + + describe "#update" do + let(:project) { create(:empty_project) } + let!(:service) do + RedmineService.create( + project: project, + active: false, + template: true, + properties: { + project_url: 'http://abc', + issues_url: 'http://abc', + new_issue_url: 'http://abc' + } + ) + end + + it 'calls the propagation worker when service is active' do + expect(PropagateServiceTemplateWorker).to receive(:perform_async).with(service.id) + + put :update, id: service.id, service: { active: true } + + expect(response).to have_http_status(302) + end + + it 'does not call the propagation worker when service is not active' do + expect(PropagateServiceTemplateWorker).not_to receive(:perform_async) + + put :update, id: service.id, service: { properties: {} } + + expect(response).to have_http_status(302) + end + end end diff --git a/spec/controllers/application_controller_spec.rb b/spec/controllers/application_controller_spec.rb index 81cbccd5436..d40aae04fc3 100644 --- a/spec/controllers/application_controller_spec.rb +++ b/spec/controllers/application_controller_spec.rb @@ -4,7 +4,7 @@ describe ApplicationController do let(:user) { create(:user) } describe '#check_password_expiration' do - let(:controller) { ApplicationController.new } + let(:controller) { described_class.new } it 'redirects if the user is over their password expiry' do user.password_expires_at = Time.new(2002) @@ -34,7 +34,7 @@ describe ApplicationController do describe "#authenticate_user_from_token!" do describe "authenticating a user from a private token" do - controller(ApplicationController) do + controller(described_class) do def index render text: "authenticated" end @@ -66,7 +66,7 @@ describe ApplicationController do end describe "authenticating a user from a personal access token" do - controller(ApplicationController) do + controller(described_class) do def index render text: 'authenticated' end @@ -100,19 +100,215 @@ describe ApplicationController do end describe '#route_not_found' do - let(:controller) { ApplicationController.new } - it 'renders 404 if authenticated' do allow(controller).to receive(:current_user).and_return(user) expect(controller).to receive(:not_found) controller.send(:route_not_found) end - it 'does redirect to login page if not authenticated' do + it 'does redirect to login page via authenticate_user! if not authenticated' do allow(controller).to receive(:current_user).and_return(nil) - expect(controller).to receive(:redirect_to) - expect(controller).to receive(:new_user_session_path) + expect(controller).to receive(:authenticate_user!) controller.send(:route_not_found) end end + + context 'two-factor authentication' do + let(:controller) { described_class.new } + + describe '#check_two_factor_requirement' do + subject { controller.send :check_two_factor_requirement } + + it 'does not redirect if 2FA is not required' do + allow(controller).to receive(:two_factor_authentication_required?).and_return(false) + expect(controller).not_to receive(:redirect_to) + + subject + end + + it 'does not redirect if user is not logged in' do + allow(controller).to receive(:two_factor_authentication_required?).and_return(true) + allow(controller).to receive(:current_user).and_return(nil) + expect(controller).not_to receive(:redirect_to) + + subject + end + + it 'does not redirect if user has 2FA enabled' do + allow(controller).to receive(:two_factor_authentication_required?).and_return(true) + allow(controller).to receive(:current_user).twice.and_return(user) + allow(user).to receive(:two_factor_enabled?).and_return(true) + expect(controller).not_to receive(:redirect_to) + + subject + end + + it 'does not redirect if 2FA setup can be skipped' do + allow(controller).to receive(:two_factor_authentication_required?).and_return(true) + allow(controller).to receive(:current_user).twice.and_return(user) + allow(user).to receive(:two_factor_enabled?).and_return(false) + allow(controller).to receive(:skip_two_factor?).and_return(true) + expect(controller).not_to receive(:redirect_to) + + subject + end + + it 'redirects to 2FA setup otherwise' do + allow(controller).to receive(:two_factor_authentication_required?).and_return(true) + allow(controller).to receive(:current_user).twice.and_return(user) + allow(user).to receive(:two_factor_enabled?).and_return(false) + allow(controller).to receive(:skip_two_factor?).and_return(false) + allow(controller).to receive(:profile_two_factor_auth_path) + expect(controller).to receive(:redirect_to) + + subject + end + end + + describe '#two_factor_authentication_required?' do + subject { controller.send :two_factor_authentication_required? } + + it 'returns false if no 2FA requirement is present' do + allow(controller).to receive(:current_user).and_return(nil) + + expect(subject).to be_falsey + end + + it 'returns true if a 2FA requirement is set in the application settings' do + stub_application_setting require_two_factor_authentication: true + allow(controller).to receive(:current_user).and_return(nil) + + expect(subject).to be_truthy + end + + it 'returns true if a 2FA requirement is set on the user' do + user.require_two_factor_authentication_from_group = true + allow(controller).to receive(:current_user).and_return(user) + + expect(subject).to be_truthy + end + end + + describe '#two_factor_grace_period' do + subject { controller.send :two_factor_grace_period } + + it 'returns the grace period from the application settings' do + stub_application_setting two_factor_grace_period: 23 + allow(controller).to receive(:current_user).and_return(nil) + + expect(subject).to eq 23 + end + + context 'with a 2FA requirement set on the user' do + let(:user) { create :user, require_two_factor_authentication_from_group: true, two_factor_grace_period: 23 } + + it 'returns the user grace period if lower than the application grace period' do + stub_application_setting two_factor_grace_period: 24 + allow(controller).to receive(:current_user).and_return(user) + + expect(subject).to eq 23 + end + + it 'returns the application grace period if lower than the user grace period' do + stub_application_setting two_factor_grace_period: 22 + allow(controller).to receive(:current_user).and_return(user) + + expect(subject).to eq 22 + end + end + end + + describe '#two_factor_grace_period_expired?' do + subject { controller.send :two_factor_grace_period_expired? } + + before do + allow(controller).to receive(:current_user).and_return(user) + end + + it 'returns false if the user has not started their grace period yet' do + expect(subject).to be_falsey + end + + context 'with grace period started' do + let(:user) { create :user, otp_grace_period_started_at: 2.hours.ago } + + it 'returns true if the grace period has expired' do + allow(controller).to receive(:two_factor_grace_period).and_return(1) + + expect(subject).to be_truthy + end + + it 'returns false if the grace period is still active' do + allow(controller).to receive(:two_factor_grace_period).and_return(3) + + expect(subject).to be_falsey + end + end + end + + describe '#two_factor_skippable' do + subject { controller.send :two_factor_skippable? } + + before do + allow(controller).to receive(:current_user).and_return(user) + end + + it 'returns false if 2FA is not required' do + allow(controller).to receive(:two_factor_authentication_required?).and_return(false) + + expect(subject).to be_falsey + end + + it 'returns false if the user has already enabled 2FA' do + allow(controller).to receive(:two_factor_authentication_required?).and_return(true) + allow(user).to receive(:two_factor_enabled?).and_return(true) + + expect(subject).to be_falsey + end + + it 'returns false if the 2FA grace period has expired' do + allow(controller).to receive(:two_factor_authentication_required?).and_return(true) + allow(user).to receive(:two_factor_enabled?).and_return(false) + allow(controller).to receive(:two_factor_grace_period_expired?).and_return(true) + + expect(subject).to be_falsey + end + + it 'returns true otherwise' do + allow(controller).to receive(:two_factor_authentication_required?).and_return(true) + allow(user).to receive(:two_factor_enabled?).and_return(false) + allow(controller).to receive(:two_factor_grace_period_expired?).and_return(false) + + expect(subject).to be_truthy + end + end + + describe '#skip_two_factor?' do + subject { controller.send :skip_two_factor? } + + it 'returns false if 2FA setup was not skipped' do + allow(controller).to receive(:session).and_return({}) + + expect(subject).to be_falsey + end + + context 'with 2FA setup skipped' do + before do + allow(controller).to receive(:session).and_return({ skip_two_factor: 2.hours.from_now }) + end + + it 'returns false if the grace period has expired' do + Timecop.freeze(3.hours.from_now) do + expect(subject).to be_falsey + end + end + + it 'returns true if the grace period is still active' do + Timecop.freeze(1.hour.from_now) do + expect(subject).to be_truthy + end + end + end + end + end end diff --git a/spec/controllers/blob_controller_spec.rb b/spec/controllers/blob_controller_spec.rb deleted file mode 100644 index 44e011fd3a8..00000000000 --- a/spec/controllers/blob_controller_spec.rb +++ /dev/null @@ -1,67 +0,0 @@ -require 'spec_helper' - -describe Projects::BlobController do - let(:project) { create(:project, :repository) } - let(:user) { create(:user) } - - before do - sign_in(user) - - project.team << [user, :master] - - allow(project).to receive(:branches).and_return(['master', 'foo/bar/baz']) - allow(project).to receive(:tags).and_return(['v1.0.0', 'v2.0.0']) - controller.instance_variable_set(:@project, project) - end - - describe "GET show" do - render_views - - before do - get(:show, - namespace_id: project.namespace, - project_id: project, - id: id) - end - - context "valid branch, valid file" do - let(:id) { 'master/README.md' } - it { is_expected.to respond_with(:success) } - end - - context "valid branch, invalid file" do - let(:id) { 'master/invalid-path.rb' } - it { is_expected.to respond_with(:not_found) } - end - - context "invalid branch, valid file" do - let(:id) { 'invalid-branch/README.md' } - it { is_expected.to respond_with(:not_found) } - end - - context "binary file" do - let(:id) { 'binary-encoding/encoding/binary-1.bin' } - it { is_expected.to respond_with(:success) } - end - end - - describe 'GET show with tree path' do - render_views - - before do - get(:show, - namespace_id: project.namespace, - project_id: project, - id: id) - controller.instance_variable_set(:@blob, nil) - end - - context 'redirect to tree' do - let(:id) { 'markdown/doc' } - it 'redirects' do - expect(subject). - to redirect_to("/#{project.path_with_namespace}/tree/markdown/doc") - end - end - end -end diff --git a/spec/controllers/dashboard/todos_controller_spec.rb b/spec/controllers/dashboard/todos_controller_spec.rb index 71a4a2c43c7..085f3fd8543 100644 --- a/spec/controllers/dashboard/todos_controller_spec.rb +++ b/spec/controllers/dashboard/todos_controller_spec.rb @@ -1,8 +1,6 @@ require 'spec_helper' describe Dashboard::TodosController do - include ApiHelpers - let(:user) { create(:user) } let(:author) { create(:user) } let(:project) { create(:empty_project) } @@ -16,7 +14,7 @@ describe Dashboard::TodosController do describe 'GET #index' do context 'when using pagination' do let(:last_page) { user.todos.page.total_pages } - let!(:issues) { create_list(:issue, 2, project: project, assignee: user) } + let!(:issues) { create_list(:issue, 2, project: project, assignees: [user]) } before do issues.each { |issue| todo_service.new_issue(issue, user) } @@ -35,6 +33,13 @@ describe Dashboard::TodosController do expect(assigns(:todos).current_page).to eq(last_page) expect(response).to have_http_status(200) end + + it 'does not redirect to external sites when provided a host field' do + external_host = "www.example.com" + get :index, page: (last_page + 1).to_param, host: external_host + + expect(response).to redirect_to(dashboard_todos_path(page: last_page)) + end end end diff --git a/spec/controllers/groups/milestones_controller_spec.rb b/spec/controllers/groups/milestones_controller_spec.rb index 6e4b5f78e33..f3263bc177d 100644 --- a/spec/controllers/groups/milestones_controller_spec.rb +++ b/spec/controllers/groups/milestones_controller_spec.rb @@ -6,18 +6,29 @@ describe Groups::MilestonesController do let(:project2) { create(:empty_project, group: group) } let(:user) { create(:user) } let(:title) { '肯定不是中文的问题' } + let(:milestone) do + project_milestone = create(:milestone, project: project) + + GroupMilestone.build( + group, + [project], + project_milestone.title + ) + end + let(:milestone_path) { group_milestone_path(group, milestone.safe_title, title: milestone.title) } before do sign_in(user) group.add_owner(user) project.team << [user, :master] - controller.instance_variable_set(:@group, group) end + it_behaves_like 'milestone tabs' + describe "#create" do it "creates group milestone with Chinese title" do post :create, - group_id: group.id, + group_id: group.to_param, milestone: { project_ids: [project.id, project2.id], title: title } expect(response).to redirect_to(group_milestone_path(group, title.to_slug.to_s, title: title)) @@ -25,9 +36,139 @@ describe Groups::MilestonesController do end it "redirects to new when there are no project ids" do - post :create, group_id: group.id, milestone: { title: title, project_ids: [""] } + post :create, group_id: group.to_param, milestone: { title: title, project_ids: [""] } expect(response).to render_template :new expect(assigns(:milestone).errors).not_to be_nil end end + + describe '#ensure_canonical_path' do + before do + sign_in(user) + end + + context 'for a GET request' do + context 'when requesting the canonical path' do + context 'non-show path' do + context 'with exactly matching casing' do + it 'does not redirect' do + get :index, group_id: group.to_param + + expect(response).not_to have_http_status(301) + end + end + + context 'with different casing' do + it 'redirects to the correct casing' do + get :index, group_id: group.to_param.upcase + + expect(response).to redirect_to(group_milestones_path(group.to_param)) + expect(controller).not_to set_flash[:notice] + end + end + end + + context 'show path' do + context 'with exactly matching casing' do + it 'does not redirect' do + get :show, group_id: group.to_param, id: title + + expect(response).not_to have_http_status(301) + end + end + + context 'with different casing' do + it 'redirects to the correct casing' do + get :show, group_id: group.to_param.upcase, id: title + + expect(response).to redirect_to(group_milestone_path(group.to_param, title)) + expect(controller).not_to set_flash[:notice] + end + end + end + end + + context 'when requesting a redirected path' do + let(:redirect_route) { group.redirect_routes.create(path: 'old-path') } + + it 'redirects to the canonical path' do + get :merge_requests, group_id: redirect_route.path, id: title + + expect(response).to redirect_to(merge_requests_group_milestone_path(group.to_param, title)) + expect(controller).to set_flash[:notice].to(group_moved_message(redirect_route, group)) + end + + context 'when the old group path is a substring of the scheme or host' do + let(:redirect_route) { group.redirect_routes.create(path: 'http') } + + it 'does not modify the requested host' do + get :merge_requests, group_id: redirect_route.path, id: title + + expect(response).to redirect_to(merge_requests_group_milestone_path(group.to_param, title)) + expect(controller).to set_flash[:notice].to(group_moved_message(redirect_route, group)) + end + end + + context 'when the old group path is substring of groups' do + # I.e. /groups/oups should not become /grfoo/oups + let(:redirect_route) { group.redirect_routes.create(path: 'oups') } + + it 'does not modify the /groups part of the path' do + get :merge_requests, group_id: redirect_route.path, id: title + + expect(response).to redirect_to(merge_requests_group_milestone_path(group.to_param, title)) + expect(controller).to set_flash[:notice].to(group_moved_message(redirect_route, group)) + end + end + + context 'when the old group path is substring of groups plus the new path' do + # I.e. /groups/oups/oup should not become /grfoos + let(:redirect_route) { group.redirect_routes.create(path: 'oups/oup') } + + it 'does not modify the /groups part of the path' do + get :merge_requests, group_id: redirect_route.path, id: title + + expect(response).to redirect_to(merge_requests_group_milestone_path(group.to_param, title)) + expect(controller).to set_flash[:notice].to(group_moved_message(redirect_route, group)) + end + end + end + end + end + + context 'for a non-GET request' do + context 'when requesting the canonical path with different casing' do + it 'does not 404' do + post :create, + group_id: group.to_param, + milestone: { project_ids: [project.id, project2.id], title: title } + + expect(response).not_to have_http_status(404) + end + + it 'does not redirect to the correct casing' do + post :create, + group_id: group.to_param, + milestone: { project_ids: [project.id, project2.id], title: title } + + expect(response).not_to have_http_status(301) + end + end + + context 'when requesting a redirected path' do + let(:redirect_route) { group.redirect_routes.create(path: 'old-path') } + + it 'returns not found' do + post :create, + group_id: redirect_route.path, + milestone: { project_ids: [project.id, project2.id], title: title } + + expect(response).to have_http_status(404) + end + end + end + + def group_moved_message(redirect_route, group) + "Group '#{redirect_route.path}' was moved to '#{group.full_path}'. Please update any links and bookmarks that may still have the old path." + end end diff --git a/spec/controllers/groups_controller_spec.rb b/spec/controllers/groups_controller_spec.rb index cad82a34fb0..4626f1ebc29 100644 --- a/spec/controllers/groups_controller_spec.rb +++ b/spec/controllers/groups_controller_spec.rb @@ -26,6 +26,41 @@ describe GroupsController do end end + describe 'GET #subgroups' do + let!(:public_subgroup) { create(:group, :public, parent: group) } + let!(:private_subgroup) { create(:group, :private, parent: group) } + + context 'as a user' do + before do + sign_in(user) + end + + it 'shows the public subgroups' do + get :subgroups, id: group.to_param + + expect(assigns(:nested_groups)).to contain_exactly(public_subgroup) + end + + context 'being member' do + it 'shows public and private subgroups the user is member of' do + private_subgroup.add_guest(user) + + get :subgroups, id: group.to_param + + expect(assigns(:nested_groups)).to contain_exactly(public_subgroup, private_subgroup) + end + end + end + + context 'as a guest' do + it 'shows the public subgroups' do + get :subgroups, id: group.to_param + + expect(assigns(:nested_groups)).to contain_exactly(public_subgroup) + end + end + end + describe 'GET #issues' do let(:issue_1) { create(:issue, project: project) } let(:issue_2) { create(:issue, project: project) } @@ -33,7 +68,7 @@ describe GroupsController do before do create_list(:award_emoji, 3, awardable: issue_2) create_list(:award_emoji, 2, awardable: issue_1) - create_list(:award_emoji, 2, :downvote, awardable: issue_2,) + create_list(:award_emoji, 2, :downvote, awardable: issue_2) sign_in(user) end @@ -81,7 +116,7 @@ describe GroupsController do it 'returns 404' do sign_in(create(:user)) - delete :destroy, id: group.path + delete :destroy, id: group.to_param expect(response.status).to eq(404) end @@ -94,12 +129,12 @@ describe GroupsController do it 'schedules a group destroy' do Sidekiq::Testing.fake! do - expect { delete :destroy, id: group.path }.to change(GroupDestroyWorker.jobs, :size).by(1) + expect { delete :destroy, id: group.to_param }.to change(GroupDestroyWorker.jobs, :size).by(1) end end it 'redirects to the root path' do - delete :destroy, id: group.path + delete :destroy, id: group.to_param expect(response).to redirect_to(root_path) end @@ -111,7 +146,7 @@ describe GroupsController do sign_in(user) end - it 'updates the path succesfully' do + it 'updates the path successfully' do post :update, id: group.to_param, group: { path: 'new_path' } expect(response).to have_http_status(302) @@ -126,4 +161,201 @@ describe GroupsController do expect(assigns(:group).path).not_to eq('new_path') end end + + describe '#ensure_canonical_path' do + before do + sign_in(user) + end + + context 'for a GET request' do + context 'when requesting groups at the root path' do + before do + allow(request).to receive(:original_fullpath).and_return("/#{group_full_path}") + get :show, id: group_full_path + end + + context 'when requesting the canonical path with different casing' do + let(:group_full_path) { group.to_param.upcase } + + it 'redirects to the correct casing' do + expect(response).to redirect_to(group) + expect(controller).not_to set_flash[:notice] + end + end + + context 'when requesting a redirected path' do + let(:redirect_route) { group.redirect_routes.create(path: 'old-path') } + let(:group_full_path) { redirect_route.path } + + it 'redirects to the canonical path' do + expect(response).to redirect_to(group) + expect(controller).to set_flash[:notice].to(group_moved_message(redirect_route, group)) + end + + context 'when the old group path is a substring of the scheme or host' do + let(:redirect_route) { group.redirect_routes.create(path: 'http') } + + it 'does not modify the requested host' do + expect(response).to redirect_to(group) + expect(controller).to set_flash[:notice].to(group_moved_message(redirect_route, group)) + end + end + + context 'when the old group path is substring of groups' do + # I.e. /groups/oups should not become /grfoo/oups + let(:redirect_route) { group.redirect_routes.create(path: 'oups') } + + it 'does not modify the /groups part of the path' do + expect(response).to redirect_to(group) + expect(controller).to set_flash[:notice].to(group_moved_message(redirect_route, group)) + end + end + end + end + + context 'when requesting groups under the /groups path' do + context 'when requesting the canonical path' do + context 'non-show path' do + context 'with exactly matching casing' do + it 'does not redirect' do + get :issues, id: group.to_param + + expect(response).not_to have_http_status(301) + end + end + + context 'with different casing' do + it 'redirects to the correct casing' do + get :issues, id: group.to_param.upcase + + expect(response).to redirect_to(issues_group_path(group.to_param)) + expect(controller).not_to set_flash[:notice] + end + end + end + + context 'show path' do + context 'with exactly matching casing' do + it 'does not redirect' do + get :show, id: group.to_param + + expect(response).not_to have_http_status(301) + end + end + + context 'with different casing' do + it 'redirects to the correct casing at the root path' do + get :show, id: group.to_param.upcase + + expect(response).to redirect_to(group) + expect(controller).not_to set_flash[:notice] + end + end + end + end + + context 'when requesting a redirected path' do + let(:redirect_route) { group.redirect_routes.create(path: 'old-path') } + + it 'redirects to the canonical path' do + get :issues, id: redirect_route.path + + expect(response).to redirect_to(issues_group_path(group.to_param)) + expect(controller).to set_flash[:notice].to(group_moved_message(redirect_route, group)) + end + + context 'when the old group path is a substring of the scheme or host' do + let(:redirect_route) { group.redirect_routes.create(path: 'http') } + + it 'does not modify the requested host' do + get :issues, id: redirect_route.path + + expect(response).to redirect_to(issues_group_path(group.to_param)) + expect(controller).to set_flash[:notice].to(group_moved_message(redirect_route, group)) + end + end + + context 'when the old group path is substring of groups' do + # I.e. /groups/oups should not become /grfoo/oups + let(:redirect_route) { group.redirect_routes.create(path: 'oups') } + + it 'does not modify the /groups part of the path' do + get :issues, id: redirect_route.path + + expect(response).to redirect_to(issues_group_path(group.to_param)) + expect(controller).to set_flash[:notice].to(group_moved_message(redirect_route, group)) + end + end + + context 'when the old group path is substring of groups plus the new path' do + # I.e. /groups/oups/oup should not become /grfoos + let(:redirect_route) { group.redirect_routes.create(path: 'oups/oup') } + + it 'does not modify the /groups part of the path' do + get :issues, id: redirect_route.path + + expect(response).to redirect_to(issues_group_path(group.to_param)) + expect(controller).to set_flash[:notice].to(group_moved_message(redirect_route, group)) + end + end + end + end + end + + context 'for a POST request' do + context 'when requesting the canonical path with different casing' do + it 'does not 404' do + post :update, id: group.to_param.upcase, group: { path: 'new_path' } + + expect(response).not_to have_http_status(404) + end + + it 'does not redirect to the correct casing' do + post :update, id: group.to_param.upcase, group: { path: 'new_path' } + + expect(response).not_to have_http_status(301) + end + end + + context 'when requesting a redirected path' do + let(:redirect_route) { group.redirect_routes.create(path: 'old-path') } + + it 'returns not found' do + post :update, id: redirect_route.path, group: { path: 'new_path' } + + expect(response).to have_http_status(404) + end + end + end + + context 'for a DELETE request' do + context 'when requesting the canonical path with different casing' do + it 'does not 404' do + delete :destroy, id: group.to_param.upcase + + expect(response).not_to have_http_status(404) + end + + it 'does not redirect to the correct casing' do + delete :destroy, id: group.to_param.upcase + + expect(response).not_to have_http_status(301) + end + end + + context 'when requesting a redirected path' do + let(:redirect_route) { group.redirect_routes.create(path: 'old-path') } + + it 'returns not found' do + delete :destroy, id: redirect_route.path + + expect(response).to have_http_status(404) + end + end + end + end + + def group_moved_message(redirect_route, group) + "Group '#{redirect_route.path}' was moved to '#{group.full_path}'. Please update any links and bookmarks that may still have the old path." + end end diff --git a/spec/controllers/health_controller_spec.rb b/spec/controllers/health_controller_spec.rb new file mode 100644 index 00000000000..b8b6e0c3a88 --- /dev/null +++ b/spec/controllers/health_controller_spec.rb @@ -0,0 +1,96 @@ +require 'spec_helper' + +describe HealthController do + include StubENV + + let(:token) { current_application_settings.health_check_access_token } + let(:json_response) { JSON.parse(response.body) } + + before do + stub_env('IN_MEMORY_APPLICATION_SETTINGS', 'false') + end + + describe '#readiness' do + context 'authorization token provided' do + before do + request.headers['TOKEN'] = token + end + + it 'returns proper response' do + get :readiness + expect(json_response['db_check']['status']).to eq('ok') + expect(json_response['redis_check']['status']).to eq('ok') + expect(json_response['fs_shards_check']['status']).to eq('ok') + expect(json_response['fs_shards_check']['labels']['shard']).to eq('default') + end + end + + context 'without authorization token' do + it 'returns proper response' do + get :readiness + expect(response.status).to eq(404) + end + end + end + + describe '#liveness' do + context 'authorization token provided' do + before do + request.headers['TOKEN'] = token + end + + it 'returns proper response' do + get :liveness + expect(json_response['db_check']['status']).to eq('ok') + expect(json_response['redis_check']['status']).to eq('ok') + expect(json_response['fs_shards_check']['status']).to eq('ok') + end + end + + context 'without authorization token' do + it 'returns proper response' do + get :liveness + expect(response.status).to eq(404) + end + end + end + + describe '#metrics' do + context 'authorization token provided' do + before do + request.headers['TOKEN'] = token + end + + it 'returns DB ping metrics' do + get :metrics + expect(response.body).to match(/^db_ping_timeout 0$/) + expect(response.body).to match(/^db_ping_success 1$/) + expect(response.body).to match(/^db_ping_latency [0-9\.]+$/) + end + + it 'returns Redis ping metrics' do + get :metrics + expect(response.body).to match(/^redis_ping_timeout 0$/) + expect(response.body).to match(/^redis_ping_success 1$/) + expect(response.body).to match(/^redis_ping_latency [0-9\.]+$/) + end + + it 'returns file system check metrics' do + get :metrics + expect(response.body).to match(/^filesystem_access_latency{shard="default"} [0-9\.]+$/) + expect(response.body).to match(/^filesystem_accessible{shard="default"} 1$/) + expect(response.body).to match(/^filesystem_write_latency{shard="default"} [0-9\.]+$/) + expect(response.body).to match(/^filesystem_writable{shard="default"} 1$/) + expect(response.body).to match(/^filesystem_read_latency{shard="default"} [0-9\.]+$/) + expect(response.body).to match(/^filesystem_readable{shard="default"} 1$/) + end + end + + context 'without authorization token' do + it 'returns proper response' do + get :metrics + expect(response.status).to eq(404) + end + end + end +end diff --git a/spec/controllers/import/bitbucket_controller_spec.rb b/spec/controllers/import/bitbucket_controller_spec.rb index 51f23e4eeb9..010e3180ea4 100644 --- a/spec/controllers/import/bitbucket_controller_spec.rb +++ b/spec/controllers/import/bitbucket_controller_spec.rb @@ -200,5 +200,72 @@ describe Import::BitbucketController do end end end + + context 'user has chosen an existing nested namespace and name for the project' do + let(:parent_namespace) { create(:namespace, name: 'foo', owner: user) } + let(:nested_namespace) { create(:namespace, name: 'bar', parent: parent_namespace, owner: user) } + let(:test_name) { 'test_name' } + + it 'takes the selected namespace and name' do + expect(Gitlab::BitbucketImport::ProjectCreator). + to receive(:new).with(bitbucket_repo, test_name, nested_namespace, user, access_params). + and_return(double(execute: true)) + + post :create, { target_namespace: nested_namespace.full_path, new_name: test_name, format: :js } + end + end + + context 'user has chosen a non-existent nested namespaces and name for the project' do + let(:test_name) { 'test_name' } + + it 'takes the selected namespace and name' do + expect(Gitlab::BitbucketImport::ProjectCreator). + to receive(:new).with(bitbucket_repo, test_name, kind_of(Namespace), user, access_params). + and_return(double(execute: true)) + + post :create, { target_namespace: 'foo/bar', new_name: test_name, format: :js } + end + + it 'creates the namespaces' do + allow(Gitlab::BitbucketImport::ProjectCreator). + to receive(:new).with(bitbucket_repo, test_name, kind_of(Namespace), user, access_params). + and_return(double(execute: true)) + + expect { post :create, { target_namespace: 'foo/bar', new_name: test_name, format: :js } } + .to change { Namespace.count }.by(2) + end + + it 'new namespace has the right parent' do + allow(Gitlab::BitbucketImport::ProjectCreator). + to receive(:new).with(bitbucket_repo, test_name, kind_of(Namespace), user, access_params). + and_return(double(execute: true)) + + post :create, { target_namespace: 'foo/bar', new_name: test_name, format: :js } + + expect(Namespace.find_by_path_or_name('bar').parent.path).to eq('foo') + end + end + + context 'user has chosen existent and non-existent nested namespaces and name for the project' do + let(:test_name) { 'test_name' } + let!(:parent_namespace) { create(:namespace, name: 'foo', owner: user) } + + it 'takes the selected namespace and name' do + expect(Gitlab::BitbucketImport::ProjectCreator). + to receive(:new).with(bitbucket_repo, test_name, kind_of(Namespace), user, access_params). + and_return(double(execute: true)) + + post :create, { target_namespace: 'foo/foobar/bar', new_name: test_name, format: :js } + end + + it 'creates the namespaces' do + allow(Gitlab::BitbucketImport::ProjectCreator). + to receive(:new).with(bitbucket_repo, test_name, kind_of(Namespace), user, access_params). + and_return(double(execute: true)) + + expect { post :create, { target_namespace: 'foo/foobar/bar', new_name: test_name, format: :js } } + .to change { Namespace.count }.by(2) + end + end end end diff --git a/spec/controllers/import/gitlab_controller_spec.rb b/spec/controllers/import/gitlab_controller_spec.rb index 3f73ea000ae..2dbb89219d0 100644 --- a/spec/controllers/import/gitlab_controller_spec.rb +++ b/spec/controllers/import/gitlab_controller_spec.rb @@ -174,6 +174,72 @@ describe Import::GitlabController do end end end + + context 'user has chosen an existing nested namespace for the project' do + let(:parent_namespace) { create(:namespace, name: 'foo', owner: user) } + let(:nested_namespace) { create(:namespace, name: 'bar', parent: parent_namespace, owner: user) } + + it 'takes the selected namespace and name' do + expect(Gitlab::GitlabImport::ProjectCreator). + to receive(:new).with(gitlab_repo, nested_namespace, user, access_params). + and_return(double(execute: true)) + + post :create, { target_namespace: nested_namespace.full_path, format: :js } + end + end + + context 'user has chosen a non-existent nested namespaces for the project' do + let(:test_name) { 'test_name' } + + it 'takes the selected namespace and name' do + expect(Gitlab::GitlabImport::ProjectCreator). + to receive(:new).with(gitlab_repo, kind_of(Namespace), user, access_params). + and_return(double(execute: true)) + + post :create, { target_namespace: 'foo/bar', format: :js } + end + + it 'creates the namespaces' do + allow(Gitlab::GitlabImport::ProjectCreator). + to receive(:new).with(gitlab_repo, kind_of(Namespace), user, access_params). + and_return(double(execute: true)) + + expect { post :create, { target_namespace: 'foo/bar', format: :js } } + .to change { Namespace.count }.by(2) + end + + it 'new namespace has the right parent' do + allow(Gitlab::GitlabImport::ProjectCreator). + to receive(:new).with(gitlab_repo, kind_of(Namespace), user, access_params). + and_return(double(execute: true)) + + post :create, { target_namespace: 'foo/bar', format: :js } + + expect(Namespace.find_by_path_or_name('bar').parent.path).to eq('foo') + end + end + + context 'user has chosen existent and non-existent nested namespaces and name for the project' do + let(:test_name) { 'test_name' } + let!(:parent_namespace) { create(:namespace, name: 'foo', owner: user) } + + it 'takes the selected namespace and name' do + expect(Gitlab::GitlabImport::ProjectCreator). + to receive(:new).with(gitlab_repo, kind_of(Namespace), user, access_params). + and_return(double(execute: true)) + + post :create, { target_namespace: 'foo/foobar/bar', format: :js } + end + + it 'creates the namespaces' do + allow(Gitlab::GitlabImport::ProjectCreator). + to receive(:new).with(gitlab_repo, kind_of(Namespace), user, access_params). + and_return(double(execute: true)) + + expect { post :create, { target_namespace: 'foo/foobar/bar', format: :js } } + .to change { Namespace.count }.by(2) + end + end end end end diff --git a/spec/controllers/oauth/authorizations_controller_spec.rb b/spec/controllers/oauth/authorizations_controller_spec.rb new file mode 100644 index 00000000000..d321bfcea9d --- /dev/null +++ b/spec/controllers/oauth/authorizations_controller_spec.rb @@ -0,0 +1,55 @@ +require 'spec_helper' + +describe Oauth::AuthorizationsController do + let(:user) { create(:user) } + + let(:doorkeeper) do + Doorkeeper::Application.create( + name: "MyApp", + redirect_uri: 'http://example.com', + scopes: "") + end + + let(:params) do + { + response_type: "code", + client_id: doorkeeper.uid, + redirect_uri: doorkeeper.redirect_uri, + state: 'state' + } + end + + before do + sign_in(user) + end + + describe 'GET #new' do + context 'without valid params' do + it 'returns 200 code and renders error view' do + get :new + + expect(response).to have_http_status(200) + expect(response).to render_template('doorkeeper/authorizations/error') + end + end + + context 'with valid params' do + it 'returns 200 code and renders view' do + get :new, params + + expect(response).to have_http_status(200) + expect(response).to render_template('doorkeeper/authorizations/new') + end + + it 'deletes session.user_return_to and redirects when skip authorization' do + request.session['user_return_to'] = 'http://example.com' + allow(controller).to receive(:skip_authorization?).and_return(true) + + get :new, params + + expect(request.session['user_return_to']).to be_nil + expect(response).to have_http_status(302) + end + end + end +end diff --git a/spec/controllers/profiles/accounts_controller_spec.rb b/spec/controllers/profiles/accounts_controller_spec.rb index 18148acde3e..2f9d18e3a0e 100644 --- a/spec/controllers/profiles/accounts_controller_spec.rb +++ b/spec/controllers/profiles/accounts_controller_spec.rb @@ -1,25 +1,47 @@ require 'spec_helper' describe Profiles::AccountsController do - let(:user) { create(:omniauth_user, provider: 'saml') } + describe 'DELETE unlink' do + let(:user) { create(:omniauth_user) } - before do - sign_in(user) - end + before do + sign_in(user) + end - it 'does not allow to unlink SAML connected account' do - identity = user.identities.last - delete :unlink, provider: 'saml' - updated_user = User.find(user.id) + it 'renders 404 if someone tries to unlink a non existent provider' do + delete :unlink, provider: 'github' - expect(response).to have_http_status(302) - expect(updated_user.identities.size).to eq(1) - expect(updated_user.identities).to include(identity) - end + expect(response).to have_http_status(404) + end + + [:saml, :cas3].each do |provider| + describe "#{provider} provider" do + let(:user) { create(:omniauth_user, provider: provider.to_s) } + + it "does not allow to unlink connected account" do + identity = user.identities.last + + delete :unlink, provider: provider.to_s + + expect(response).to have_http_status(302) + expect(user.reload.identities).to include(identity) + end + end + end + + [:twitter, :facebook, :google_oauth2, :gitlab, :github, :bitbucket, :crowd, :auth0].each do |provider| + describe "#{provider} provider" do + let(:user) { create(:omniauth_user, provider: provider.to_s) } + + it 'allows to unlink connected account' do + identity = user.identities.last - it 'does allow to delete other linked accounts' do - user.identities.create(provider: 'twitter', extern_uid: 'twitter_123') + delete :unlink, provider: provider.to_s - expect { delete :unlink, provider: 'twitter' }.to change(Identity.all, :size).by(-1) + expect(response).to have_http_status(302) + expect(user.reload.identities).not_to include(identity) + end + end + end end end diff --git a/spec/controllers/profiles/personal_access_tokens_spec.rb b/spec/controllers/profiles/personal_access_tokens_controller_spec.rb index dfed1de2046..98a43e278b2 100644 --- a/spec/controllers/profiles/personal_access_tokens_spec.rb +++ b/spec/controllers/profiles/personal_access_tokens_controller_spec.rb @@ -12,7 +12,7 @@ describe Profiles::PersonalAccessTokensController do end it "allows creation of a token with scopes" do - name = FFaker::Product.brand + name = 'My PAT' scopes = %w[api read_user] post :create, personal_access_token: token_attributes.merge(scopes: scopes, name: name) diff --git a/spec/controllers/projects/artifacts_controller_spec.rb b/spec/controllers/projects/artifacts_controller_spec.rb new file mode 100644 index 00000000000..eff9fab8da2 --- /dev/null +++ b/spec/controllers/projects/artifacts_controller_spec.rb @@ -0,0 +1,188 @@ +require 'spec_helper' + +describe Projects::ArtifactsController do + let(:user) { create(:user) } + let(:project) { create(:project, :repository) } + + let(:pipeline) do + create(:ci_pipeline, + project: project, + sha: project.commit.sha, + ref: project.default_branch, + status: 'success') + end + + let(:build) { create(:ci_build, :success, :artifacts, pipeline: pipeline) } + + before do + project.team << [user, :developer] + + sign_in(user) + end + + describe 'GET download' do + it 'sends the artifacts file' do + expect(controller).to receive(:send_file).with(build.artifacts_file.path, disposition: 'attachment').and_call_original + + get :download, namespace_id: project.namespace, project_id: project, build_id: build + end + end + + describe 'GET browse' do + context 'when the directory exists' do + it 'renders the browse view' do + get :browse, namespace_id: project.namespace, project_id: project, build_id: build, path: 'other_artifacts_0.1.2' + + expect(response).to render_template('projects/artifacts/browse') + end + end + + context 'when the directory does not exist' do + it 'responds Not Found' do + get :browse, namespace_id: project.namespace, project_id: project, build_id: build, path: 'unknown' + + expect(response).to be_not_found + end + end + end + + describe 'GET file' do + context 'when the file exists' do + it 'renders the file view' do + get :file, namespace_id: project.namespace, project_id: project, build_id: build, path: 'ci_artifacts.txt' + + expect(response).to render_template('projects/artifacts/file') + end + end + + context 'when the file does not exist' do + it 'responds Not Found' do + get :file, namespace_id: project.namespace, project_id: project, build_id: build, path: 'unknown' + + expect(response).to be_not_found + end + end + end + + describe 'GET raw' do + context 'when the file exists' do + it 'serves the file using workhorse' do + get :raw, namespace_id: project.namespace, project_id: project, build_id: build, path: 'ci_artifacts.txt' + + send_data = response.headers[Gitlab::Workhorse::SEND_DATA_HEADER] + + expect(send_data).to start_with('artifacts-entry:') + + base64_params = send_data.sub(/\Aartifacts\-entry:/, '') + params = JSON.parse(Base64.urlsafe_decode64(base64_params)) + + expect(params.keys).to eq(%w(Archive Entry)) + expect(params['Archive']).to end_with('build_artifacts.zip') + expect(params['Entry']).to eq(Base64.encode64('ci_artifacts.txt')) + end + end + + context 'when the file does not exist' do + it 'responds Not Found' do + get :raw, namespace_id: project.namespace, project_id: project, build_id: build, path: 'unknown' + + expect(response).to be_not_found + end + end + end + + describe 'GET latest_succeeded' do + def params_from_ref(ref = pipeline.ref, job = build.name, path = 'browse') + { + namespace_id: project.namespace, + project_id: project, + ref_name_and_path: File.join(ref, path), + job: job + } + end + + context 'cannot find the build' do + shared_examples 'not found' do + it { expect(response).to have_http_status(:not_found) } + end + + context 'has no such ref' do + before do + get :latest_succeeded, params_from_ref('TAIL', build.name) + end + + it_behaves_like 'not found' + end + + context 'has no such build' do + before do + get :latest_succeeded, params_from_ref(pipeline.ref, 'NOBUILD') + end + + it_behaves_like 'not found' + end + + context 'has no path' do + before do + get :latest_succeeded, params_from_ref(pipeline.sha, build.name, '') + end + + it_behaves_like 'not found' + end + end + + context 'found the build and redirect' do + shared_examples 'redirect to the build' do + it 'redirects' do + path = browse_namespace_project_build_artifacts_path( + project.namespace, + project, + build) + + expect(response).to redirect_to(path) + end + end + + context 'with regular branch' do + before do + pipeline.update(ref: 'master', + sha: project.commit('master').sha) + + get :latest_succeeded, params_from_ref('master') + end + + it_behaves_like 'redirect to the build' + end + + context 'with branch name containing slash' do + before do + pipeline.update(ref: 'improve/awesome', + sha: project.commit('improve/awesome').sha) + + get :latest_succeeded, params_from_ref('improve/awesome') + end + + it_behaves_like 'redirect to the build' + end + + context 'with branch name and path containing slashes' do + before do + pipeline.update(ref: 'improve/awesome', + sha: project.commit('improve/awesome').sha) + + get :latest_succeeded, params_from_ref('improve/awesome', build.name, 'file/README.md') + end + + it 'redirects' do + path = file_namespace_project_build_artifacts_path( + project.namespace, + project, + build, + 'README.md') + + expect(response).to redirect_to(path) + end + end + end + end +end diff --git a/spec/controllers/projects/blob_controller_spec.rb b/spec/controllers/projects/blob_controller_spec.rb index ec36a64b415..3b3caa9d3e6 100644 --- a/spec/controllers/projects/blob_controller_spec.rb +++ b/spec/controllers/projects/blob_controller_spec.rb @@ -2,15 +2,61 @@ require 'rails_helper' describe Projects::BlobController do let(:project) { create(:project, :public, :repository) } - let(:user) { create(:user) } - before do - project.team << [user, :master] + describe "GET show" do + render_views + + context 'with file path' do + before do + get(:show, + namespace_id: project.namespace, + project_id: project, + id: id) + end + + context "valid branch, valid file" do + let(:id) { 'master/README.md' } + it { is_expected.to respond_with(:success) } + end + + context "valid branch, invalid file" do + let(:id) { 'master/invalid-path.rb' } + it { is_expected.to respond_with(:not_found) } + end + + context "invalid branch, valid file" do + let(:id) { 'invalid-branch/README.md' } + it { is_expected.to respond_with(:not_found) } + end + + context "binary file" do + let(:id) { 'binary-encoding/encoding/binary-1.bin' } + it { is_expected.to respond_with(:success) } + end + end + + context 'with tree path' do + before do + get(:show, + namespace_id: project.namespace, + project_id: project, + id: id) + controller.instance_variable_set(:@blob, nil) + end - sign_in(user) + context 'redirect to tree' do + let(:id) { 'markdown/doc' } + it 'redirects' do + expect(subject). + to redirect_to("/#{project.path_with_namespace}/tree/markdown/doc") + end + end + end end describe 'GET diff' do + let(:user) { create(:user) } + render_views def do_get(opts = {}) @@ -20,6 +66,12 @@ describe Projects::BlobController do get :diff, params.merge(opts) end + before do + project.team << [user, :master] + + sign_in(user) + end + context 'when essential params are missing' do it 'renders nothing' do do_get @@ -37,13 +89,75 @@ describe Projects::BlobController do end end + describe 'GET edit' do + let(:default_params) do + { + namespace_id: project.namespace, + project_id: project, + id: 'master/CHANGELOG' + } + end + + context 'anonymous' do + before do + get :edit, default_params + end + + it 'redirects to sign in and returns' do + expect(response).to redirect_to(new_user_session_path) + end + end + + context 'as guest' do + let(:guest) { create(:user) } + + before do + sign_in(guest) + get :edit, default_params + end + + it 'redirects to blob show' do + expect(response).to redirect_to(namespace_project_blob_path(project.namespace, project, 'master/CHANGELOG')) + end + end + + context 'as developer' do + let(:developer) { create(:user) } + + before do + project.team << [developer, :developer] + sign_in(developer) + get :edit, default_params + end + + it 'redirects to blob show' do + expect(response).to have_http_status(200) + end + end + + context 'as master' do + let(:master) { create(:user) } + + before do + project.team << [master, :master] + sign_in(master) + get :edit, default_params + end + + it 'redirects to blob show' do + expect(response).to have_http_status(200) + end + end + end + describe 'PUT update' do + let(:user) { create(:user) } let(:default_params) do { namespace_id: project.namespace, project_id: project, id: 'master/CHANGELOG', - target_branch: 'master', + branch_name: 'master', content: 'Added changes', commit_message: 'Update CHANGELOG' } @@ -53,6 +167,12 @@ describe Projects::BlobController do namespace_project_blob_path(project.namespace, project, 'master/CHANGELOG') end + before do + project.team << [user, :master] + + sign_in(user) + end + it 'redirects to blob' do put :update, default_params @@ -109,7 +229,7 @@ describe Projects::BlobController do context 'when editing on the original repository' do it "redirects to forked project new merge request" do - default_params[:target_branch] = "fork-test-1" + default_params[:branch_name] = "fork-test-1" default_params[:create_merge_request] = 1 put :update, default_params diff --git a/spec/controllers/projects/boards/issues_controller_spec.rb b/spec/controllers/projects/boards/issues_controller_spec.rb index 15667e8d4b1..dc3b72c6de4 100644 --- a/spec/controllers/projects/boards/issues_controller_spec.rb +++ b/spec/controllers/projects/boards/issues_controller_spec.rb @@ -34,7 +34,7 @@ describe Projects::Boards::IssuesController do issue = create(:labeled_issue, project: project, labels: [planning]) create(:labeled_issue, project: project, labels: [planning]) create(:labeled_issue, project: project, labels: [development], due_date: Date.tomorrow) - create(:labeled_issue, project: project, labels: [development], assignee: johndoe) + create(:labeled_issue, project: project, labels: [development], assignees: [johndoe]) issue.subscribe(johndoe, project) list_issues user: user, board: board, list: list2 diff --git a/spec/controllers/projects/branches_controller_spec.rb b/spec/controllers/projects/branches_controller_spec.rb index d20e7368086..f285e5333d6 100644 --- a/spec/controllers/projects/branches_controller_spec.rb +++ b/spec/controllers/projects/branches_controller_spec.rb @@ -14,7 +14,7 @@ describe Projects::BranchesController do controller.instance_variable_set(:@project, project) end - describe "POST create" do + describe "POST create with HTML format" do render_views context "on creation of a new branch" do @@ -152,6 +152,42 @@ describe Projects::BranchesController do end end + describe 'POST create with JSON format' do + before do + sign_in(user) + end + + context 'with valid params' do + it 'returns a successful 200 response' do + create_branch name: 'my-branch', ref: 'master' + + expect(response).to have_http_status(200) + end + + it 'returns the created branch' do + create_branch name: 'my-branch', ref: 'master' + + expect(response).to match_response_schema('branch') + end + end + + context 'with invalid params' do + it 'returns an unprocessable entity 422 response' do + create_branch name: "<script>alert('merge');</script>", ref: "<script>alert('ref');</script>" + + expect(response).to have_http_status(422) + end + end + + def create_branch(name:, ref:) + post :create, namespace_id: project.namespace.to_param, + project_id: project.to_param, + branch_name: name, + ref: ref, + format: :json + end + end + describe "POST destroy with HTML format" do render_views @@ -177,33 +213,98 @@ describe Projects::BranchesController do sign_in(user) post :destroy, - format: :js, - id: branch, - namespace_id: project.namespace, - project_id: project + format: format, + id: branch, + namespace_id: project.namespace, + project_id: project end - context "valid branch name, valid source" do + context 'as JS' do let(:branch) { "feature" } + let(:format) { :js } - it { expect(response).to have_http_status(200) } - end + context "valid branch name, valid source" do + let(:branch) { "feature" } - context "valid branch name with unencoded slashes" do - let(:branch) { "improve/awesome" } + it { expect(response).to have_http_status(200) } + it { expect(response.body).to be_blank } + end + + context "valid branch name with unencoded slashes" do + let(:branch) { "improve/awesome" } + + it { expect(response).to have_http_status(200) } + it { expect(response.body).to be_blank } + end - it { expect(response).to have_http_status(200) } + context "valid branch name with encoded slashes" do + let(:branch) { "improve%2Fawesome" } + + it { expect(response).to have_http_status(200) } + it { expect(response.body).to be_blank } + end + + context "invalid branch name, valid ref" do + let(:branch) { "no-branch" } + + it { expect(response).to have_http_status(404) } + it { expect(response.body).to be_blank } + end end - context "valid branch name with encoded slashes" do - let(:branch) { "improve%2Fawesome" } + context 'as JSON' do + let(:branch) { "feature" } + let(:format) { :json } + + context 'valid branch name, valid source' do + let(:branch) { "feature" } + + it 'returns JSON response with message' do + expect(json_response).to eql("message" => 'Branch was removed') + end + + it { expect(response).to have_http_status(200) } + end + + context 'valid branch name with unencoded slashes' do + let(:branch) { "improve/awesome" } + + it 'returns JSON response with message' do + expect(json_response).to eql('message' => 'Branch was removed') + end + + it { expect(response).to have_http_status(200) } + end + + context "valid branch name with encoded slashes" do + let(:branch) { 'improve%2Fawesome' } + + it 'returns JSON response with message' do + expect(json_response).to eql('message' => 'Branch was removed') + end + + it { expect(response).to have_http_status(200) } + end - it { expect(response).to have_http_status(200) } + context 'invalid branch name, valid ref' do + let(:branch) { 'no-branch' } + + it 'returns JSON response with message' do + expect(json_response).to eql('message' => 'No such branch') + end + + it { expect(response).to have_http_status(404) } + end end - context "invalid branch name, valid ref" do - let(:branch) { "no-branch" } - it { expect(response).to have_http_status(404) } + context 'as HTML' do + let(:branch) { "feature" } + let(:format) { :html } + + it 'redirects to branches path' do + expect(response) + .to redirect_to(namespace_project_branches_path(project.namespace, project)) + end end end diff --git a/spec/controllers/projects/builds_controller_spec.rb b/spec/controllers/projects/builds_controller_spec.rb index 683667129e5..3ce23c17cdc 100644 --- a/spec/controllers/projects/builds_controller_spec.rb +++ b/spec/controllers/projects/builds_controller_spec.rb @@ -3,15 +3,169 @@ require 'spec_helper' describe Projects::BuildsController do include ApiHelpers - let(:user) { create(:user) } let(:project) { create(:empty_project, :public) } + let(:pipeline) { create(:ci_pipeline, project: project) } + let(:user) { create(:user) } + + describe 'GET index' do + context 'when scope is pending' do + before do + create(:ci_build, :pending, pipeline: pipeline) + + get_index(scope: 'pending') + end + + it 'has only pending builds' do + expect(response).to have_http_status(:ok) + expect(assigns(:builds).first.status).to eq('pending') + end + end + + context 'when scope is running' do + before do + create(:ci_build, :running, pipeline: pipeline) + + get_index(scope: 'running') + end + + it 'has only running builds' do + expect(response).to have_http_status(:ok) + expect(assigns(:builds).first.status).to eq('running') + end + end + + context 'when scope is finished' do + before do + create(:ci_build, :success, pipeline: pipeline) + + get_index(scope: 'finished') + end + + it 'has only finished builds' do + expect(response).to have_http_status(:ok) + expect(assigns(:builds).first.status).to eq('success') + end + end + + context 'when page is specified' do + let(:last_page) { project.builds.page.total_pages } + + context 'when page number is eligible' do + before do + create_list(:ci_build, 2, pipeline: pipeline) + + get_index(page: last_page.to_param) + end + + it 'redirects to the page' do + expect(response).to have_http_status(:ok) + expect(assigns(:builds).current_page).to eq(last_page) + end + end + end - before do - sign_in(user) + context 'number of queries' do + before do + Ci::Build::AVAILABLE_STATUSES.each do |status| + create_build(status, status) + end + + RequestStore.begin! + end + + after do + RequestStore.end! + RequestStore.clear! + end + + it "verifies number of queries" do + recorded = ActiveRecord::QueryRecorder.new { get_index } + expect(recorded.count).to be_within(5).of(8) + end + + def create_build(name, status) + pipeline = create(:ci_pipeline, project: project) + create(:ci_build, :tags, :triggered, :artifacts, + pipeline: pipeline, name: name, status: status) + end + end + + def get_index(**extra_params) + params = { + namespace_id: project.namespace.to_param, + project_id: project + } + + get :index, params.merge(extra_params) + end + end + + describe 'GET show' do + context 'when build exists' do + let!(:build) { create(:ci_build, pipeline: pipeline) } + + before do + get_show(id: build.id) + end + + it 'has a build' do + expect(response).to have_http_status(:ok) + expect(assigns(:build).id).to eq(build.id) + end + end + + context 'when build does not exist' do + before do + get_show(id: 1234) + end + + it 'renders not_found' do + expect(response).to have_http_status(:not_found) + end + end + + def get_show(**extra_params) + params = { + namespace_id: project.namespace.to_param, + project_id: project + } + + get :show, params.merge(extra_params) + end + end + + describe 'GET trace.json' do + before do + get_trace + end + + context 'when build has a trace' do + let(:build) { create(:ci_build, :trace, pipeline: pipeline) } + + it 'returns a trace' do + expect(response).to have_http_status(:ok) + expect(json_response['html']).to eq('BUILD TRACE') + end + end + + context 'when build has no traces' do + let(:build) { create(:ci_build, pipeline: pipeline) } + + it 'returns no traces' do + expect(response).to have_http_status(:ok) + expect(json_response['html']).to be_nil + end + end + + def get_trace + get :trace, namespace_id: project.namespace, + project_id: project, + id: build.id, + format: :json + end end describe 'GET status.json' do - let(:pipeline) { create(:ci_pipeline, project: project) } let(:build) { create(:ci_build, pipeline: pipeline) } let(:status) { build.detailed_status(double('user')) } @@ -27,7 +181,266 @@ describe Projects::BuildsController do expect(json_response['text']).to eq status.text expect(json_response['label']).to eq status.label expect(json_response['icon']).to eq status.icon - expect(json_response['favicon']).to eq status.favicon + expect(json_response['favicon']).to eq "/assets/ci_favicons/#{status.favicon}.ico" + end + end + + describe 'GET trace.json' do + let(:pipeline) { create(:ci_pipeline, project: project) } + let(:build) { create(:ci_build, pipeline: pipeline) } + let(:user) { create(:user) } + + context 'when user is logged in as developer' do + before do + project.add_developer(user) + sign_in(user) + + get_trace + end + + it 'traces build log' do + expect(response).to have_http_status(:ok) + expect(json_response['id']).to eq build.id + expect(json_response['status']).to eq build.status + end + end + + context 'when user is logged in as non member' do + before do + sign_in(user) + + get_trace + end + + it 'traces build log' do + expect(response).to have_http_status(:ok) + expect(json_response['id']).to eq build.id + expect(json_response['status']).to eq build.status + end + end + + def get_trace + get :trace, namespace_id: project.namespace, + project_id: project, + id: build.id, + format: :json + end + end + + describe 'POST retry' do + before do + project.add_developer(user) + sign_in(user) + + post_retry + end + + context 'when build is retryable' do + let(:build) { create(:ci_build, :retryable, pipeline: pipeline) } + + it 'redirects to the retried build page' do + expect(response).to have_http_status(:found) + expect(response).to redirect_to(namespace_project_build_path(id: Ci::Build.last.id)) + end + end + + context 'when build is not retryable' do + let(:build) { create(:ci_build, pipeline: pipeline) } + + it 'renders unprocessable_entity' do + expect(response).to have_http_status(:unprocessable_entity) + end + end + + def post_retry + post :retry, namespace_id: project.namespace, + project_id: project, + id: build.id + end + end + + describe 'POST play' do + before do + project.add_master(user) + sign_in(user) + + post_play + end + + context 'when build is playable' do + let(:build) { create(:ci_build, :playable, pipeline: pipeline) } + + it 'redirects to the played build page' do + expect(response).to have_http_status(:found) + expect(response).to redirect_to(namespace_project_build_path(id: build.id)) + end + + it 'transits to pending' do + expect(build.reload).to be_pending + end + end + + context 'when build is not playable' do + let(:build) { create(:ci_build, pipeline: pipeline) } + + it 'renders unprocessable_entity' do + expect(response).to have_http_status(:unprocessable_entity) + end + end + + def post_play + post :play, namespace_id: project.namespace, + project_id: project, + id: build.id + end + end + + describe 'POST cancel' do + before do + project.add_developer(user) + sign_in(user) + + post_cancel + end + + context 'when build is cancelable' do + let(:build) { create(:ci_build, :cancelable, pipeline: pipeline) } + + it 'redirects to the canceled build page' do + expect(response).to have_http_status(:found) + expect(response).to redirect_to(namespace_project_build_path(id: build.id)) + end + + it 'transits to canceled' do + expect(build.reload).to be_canceled + end + end + + context 'when build is not cancelable' do + let(:build) { create(:ci_build, :canceled, pipeline: pipeline) } + + it 'returns unprocessable_entity' do + expect(response).to have_http_status(:unprocessable_entity) + end + end + + def post_cancel + post :cancel, namespace_id: project.namespace, + project_id: project, + id: build.id + end + end + + describe 'POST cancel_all' do + before do + project.add_developer(user) + sign_in(user) + end + + context 'when builds are cancelable' do + before do + create_list(:ci_build, 2, :cancelable, pipeline: pipeline) + + post_cancel_all + end + + it 'redirects to a index page' do + expect(response).to have_http_status(:found) + expect(response).to redirect_to(namespace_project_builds_path) + end + + it 'transits to canceled' do + expect(Ci::Build.all).to all(be_canceled) + end + end + + context 'when builds are not cancelable' do + before do + create_list(:ci_build, 2, :canceled, pipeline: pipeline) + + post_cancel_all + end + + it 'redirects to a index page' do + expect(response).to have_http_status(:found) + expect(response).to redirect_to(namespace_project_builds_path) + end + end + + def post_cancel_all + post :cancel_all, namespace_id: project.namespace, + project_id: project + end + end + + describe 'POST erase' do + before do + project.add_developer(user) + sign_in(user) + + post_erase + end + + context 'when build is erasable' do + let(:build) { create(:ci_build, :erasable, :trace, pipeline: pipeline) } + + it 'redirects to the erased build page' do + expect(response).to have_http_status(:found) + expect(response).to redirect_to(namespace_project_build_path(id: build.id)) + end + + it 'erases artifacts' do + expect(build.artifacts_file.exists?).to be_falsey + expect(build.artifacts_metadata.exists?).to be_falsey + end + + it 'erases trace' do + expect(build.trace.exist?).to be_falsey + end + end + + context 'when build is not erasable' do + let(:build) { create(:ci_build, :erased, pipeline: pipeline) } + + it 'returns unprocessable_entity' do + expect(response).to have_http_status(:unprocessable_entity) + end + end + + def post_erase + post :erase, namespace_id: project.namespace, + project_id: project, + id: build.id + end + end + + describe 'GET raw' do + before do + get_raw + end + + context 'when build has a trace file' do + let(:build) { create(:ci_build, :trace, pipeline: pipeline) } + + it 'send a trace file' do + expect(response).to have_http_status(:ok) + expect(response.content_type).to eq 'text/plain; charset=utf-8' + expect(response.body).to eq 'BUILD TRACE' + end + end + + context 'when build does not have a trace file' do + let(:build) { create(:ci_build, pipeline: pipeline) } + + it 'returns not_found' do + expect(response).to have_http_status(:not_found) + end + end + + def get_raw + post :raw, namespace_id: project.namespace, + project_id: project, + id: build.id end end end diff --git a/spec/controllers/projects/builds_controller_specs.rb b/spec/controllers/projects/builds_controller_specs.rb deleted file mode 100644 index d501f7b3155..00000000000 --- a/spec/controllers/projects/builds_controller_specs.rb +++ /dev/null @@ -1,47 +0,0 @@ -require 'spec_helper' - -describe Projects::BuildsController do - include ApiHelpers - - let(:project) { create(:empty_project, :public) } - - describe 'GET trace.json' do - let(:pipeline) { create(:ci_pipeline, project: project) } - let(:build) { create(:ci_build, pipeline: pipeline) } - let(:user) { create(:user) } - - context 'when user is logged in as developer' do - before do - project.add_developer(user) - sign_in(user) - get_trace - end - - it 'traces build log' do - expect(response).to have_http_status(:ok) - expect(json_response['id']).to eq build.id - expect(json_response['status']).to eq build.status - end - end - - context 'when user is logged in as non member' do - before do - sign_in(user) - get_trace - end - - it 'traces build log' do - expect(response).to have_http_status(:ok) - expect(json_response['id']).to eq build.id - expect(json_response['status']).to eq build.status - end - end - - def get_trace - get :trace, namespace_id: project.namespace, - project_id: project, - id: build.id, - format: :json - end - end -end diff --git a/spec/controllers/projects/commit_controller_spec.rb b/spec/controllers/projects/commit_controller_spec.rb index b223a22ae60..69e4706dc71 100644 --- a/spec/controllers/projects/commit_controller_spec.rb +++ b/spec/controllers/projects/commit_controller_spec.rb @@ -266,8 +266,8 @@ describe Projects::CommitController do diff_for_path(id: commit2.id, old_path: existing_path, new_path: existing_path) expect(assigns(:diff_notes_disabled)).to be_falsey - expect(assigns(:comments_target)).to eq(noteable_type: 'Commit', - commit_id: commit2.id) + expect(assigns(:new_diff_note_attrs)).to eq(noteable_type: 'Commit', + commit_id: commit2.id) end it 'only renders the diffs for the path given' do diff --git a/spec/controllers/projects/deploy_keys_controller_spec.rb b/spec/controllers/projects/deploy_keys_controller_spec.rb new file mode 100644 index 00000000000..efe1a78415b --- /dev/null +++ b/spec/controllers/projects/deploy_keys_controller_spec.rb @@ -0,0 +1,66 @@ +require 'spec_helper' + +describe Projects::DeployKeysController do + let(:project) { create(:project, :repository) } + let(:user) { create(:user) } + + before do + project.team << [user, :master] + + sign_in(user) + end + + describe 'GET index' do + let(:params) do + { namespace_id: project.namespace, project_id: project } + end + + context 'when html requested' do + it 'redirects to blob' do + get :index, params + + expect(response).to redirect_to(namespace_project_settings_repository_path(params)) + end + end + + context 'when json requested' do + let(:project2) { create(:empty_project, :internal)} + let(:project_private) { create(:empty_project, :private)} + + let(:deploy_key_internal) do + create(:deploy_key, key: 'ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAAAgQCdMHEHyhRjbhEZVddFn6lTWdgEy5Q6Bz4nwGB76xWZI5YT/1WJOMEW+sL5zYd31kk7sd3FJ5L9ft8zWMWrr/iWXQikC2cqZK24H1xy+ZUmrRuJD4qGAaIVoyyzBL+avL+lF8J5lg6YSw8gwJY/lX64/vnJHUlWw2n5BF8IFOWhiw== dummy@gitlab.com') + end + let(:deploy_key_actual) do + create(:deploy_key, key: 'ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAAAgQDNd/UJWhPrpb+b/G5oL109y57yKuCxE+WUGJGYaj7WQKsYRJmLYh1mgjrl+KVyfsWpq4ylOxIfFSnN9xBBFN8mlb0Fma5DC7YsSsibJr3MZ19ZNBprwNcdogET7aW9I0In7Wu5f2KqI6e5W/spJHCy4JVxzVMUvk6Myab0LnJ2iQ== dummy@gitlab.com') + end + let!(:deploy_key_public) { create(:deploy_key, public: true) } + + let!(:deploy_keys_project_internal) do + create(:deploy_keys_project, project: project2, deploy_key: deploy_key_internal) + end + + let!(:deploy_keys_actual_project) do + create(:deploy_keys_project, project: project, deploy_key: deploy_key_actual) + end + + let!(:deploy_keys_project_private) do + create(:deploy_keys_project, project: project_private, deploy_key: create(:another_deploy_key)) + end + + before do + project2.team << [user, :developer] + end + + it 'returns json in a correct format' do + get :index, params.merge(format: :json) + + json = JSON.parse(response.body) + + expect(json.keys).to match_array(%w(enabled_keys available_project_keys public_keys)) + expect(json['enabled_keys'].count).to eq(1) + expect(json['available_project_keys'].count).to eq(1) + expect(json['public_keys'].count).to eq(1) + end + end + end +end diff --git a/spec/controllers/projects/deployments_controller_spec.rb b/spec/controllers/projects/deployments_controller_spec.rb new file mode 100644 index 00000000000..4c69443314d --- /dev/null +++ b/spec/controllers/projects/deployments_controller_spec.rb @@ -0,0 +1,116 @@ +require 'spec_helper' + +describe Projects::DeploymentsController do + include ApiHelpers + + let(:user) { create(:user) } + let(:project) { create(:empty_project) } + let(:environment) { create(:environment, name: 'production', project: project) } + + before do + project.team << [user, :master] + + sign_in(user) + end + + describe 'GET #index' do + it 'returns list of deployments from last 8 hours' do + create(:deployment, environment: environment, created_at: 9.hours.ago) + create(:deployment, environment: environment, created_at: 7.hours.ago) + create(:deployment, environment: environment) + + get :index, deployment_params(after: 8.hours.ago) + + expect(response).to be_ok + + expect(json_response['deployments'].count).to eq(2) + end + + it 'returns a list with deployments information' do + create(:deployment, environment: environment) + + get :index, deployment_params + + expect(response).to be_ok + expect(response).to match_response_schema('deployments') + end + end + + describe 'GET #metrics' do + let(:deployment) { create(:deployment, project: project, environment: environment) } + + before do + allow(controller).to receive(:deployment).and_return(deployment) + end + context 'when metrics are disabled' do + before do + allow(deployment).to receive(:has_metrics?).and_return false + end + + it 'responds with not found' do + get :metrics, deployment_params(id: deployment.id) + + expect(response).to be_not_found + end + end + + context 'when metrics are enabled' do + before do + allow(deployment).to receive(:has_metrics?).and_return true + end + + context 'when environment has no metrics' do + before do + expect(deployment).to receive(:metrics).and_return(nil) + end + + it 'returns a empty response 204 resposne' do + get :metrics, deployment_params(id: deployment.id) + expect(response).to have_http_status(204) + expect(response.body).to eq('') + end + end + + context 'when environment has some metrics' do + let(:empty_metrics) do + { + success: true, + metrics: {}, + last_update: 42 + } + end + + before do + expect(deployment).to receive(:metrics).and_return(empty_metrics) + end + + it 'returns a metrics JSON document' do + get :metrics, deployment_params(id: deployment.id) + + expect(response).to be_ok + expect(json_response['success']).to be(true) + expect(json_response['metrics']).to eq({}) + expect(json_response['last_update']).to eq(42) + end + end + + context 'when metrics service does not implement deployment metrics' do + before do + allow(deployment).to receive(:metrics).and_raise(NotImplementedError) + end + + it 'responds with not found' do + get :metrics, deployment_params(id: deployment.id) + + expect(response).to be_not_found + end + end + end + end + + def deployment_params(opts = {}) + opts.reverse_merge(namespace_id: project.namespace, + project_id: project, + environment_id: environment.id) + end +end diff --git a/spec/controllers/projects/discussions_controller_spec.rb b/spec/controllers/projects/discussions_controller_spec.rb index 79ab364a6f3..fe62898fa9b 100644 --- a/spec/controllers/projects/discussions_controller_spec.rb +++ b/spec/controllers/projects/discussions_controller_spec.rb @@ -4,7 +4,7 @@ describe Projects::DiscussionsController do let(:user) { create(:user) } let(:merge_request) { create(:merge_request) } let(:project) { merge_request.source_project } - let(:note) { create(:diff_note_on_merge_request, noteable: merge_request, project: project) } + let(:note) { create(:discussion_note_on_merge_request, noteable: merge_request, project: project) } let(:discussion) { note.discussion } let(:request_params) do diff --git a/spec/controllers/projects/environments_controller_spec.rb b/spec/controllers/projects/environments_controller_spec.rb index 5525fbd8130..c0f8c36a018 100644 --- a/spec/controllers/projects/environments_controller_spec.rb +++ b/spec/controllers/projects/environments_controller_spec.rb @@ -1,8 +1,6 @@ require 'spec_helper' describe Projects::EnvironmentsController do - include ApiHelpers - let(:user) { create(:user) } let(:project) { create(:empty_project) } @@ -151,6 +149,48 @@ describe Projects::EnvironmentsController do end end + describe 'PATCH #stop' do + context 'when env not available' do + it 'returns 404' do + allow_any_instance_of(Environment).to receive(:available?) { false } + + patch :stop, environment_params(format: :json) + + expect(response).to have_http_status(404) + end + end + + context 'when stop action' do + it 'returns action url' do + action = create(:ci_build, :manual) + + allow_any_instance_of(Environment) + .to receive_messages(available?: true, stop_with_action!: action) + + patch :stop, environment_params(format: :json) + + expect(response).to have_http_status(200) + expect(json_response).to eq( + { 'redirect_url' => + "http://test.host/#{project.path_with_namespace}/builds/#{action.id}" }) + end + end + + context 'when no stop action' do + it 'returns env url' do + allow_any_instance_of(Environment) + .to receive_messages(available?: true, stop_with_action!: nil) + + patch :stop, environment_params(format: :json) + + expect(response).to have_http_status(200) + expect(json_response).to eq( + { 'redirect_url' => + "http://test.host/#{project.path_with_namespace}/environments/#{environment.id}" }) + end + end + end + describe 'GET #terminal' do context 'with valid id' do it 'responds with a status code 200' do diff --git a/spec/controllers/projects/imports_controller_spec.rb b/spec/controllers/projects/imports_controller_spec.rb index 7c75815f3c4..6724b474179 100644 --- a/spec/controllers/projects/imports_controller_spec.rb +++ b/spec/controllers/projects/imports_controller_spec.rb @@ -96,12 +96,19 @@ describe Projects::ImportsController do } end - it 'redirects to params[:to]' do + it 'redirects to internal params[:to]' do get :show, namespace_id: project.namespace.to_param, project_id: project, continue: params expect(flash[:notice]).to eq params[:notice] expect(response).to redirect_to params[:to] end + + it 'does not redirect to external params[:to]' do + params[:to] = "//google.com" + + get :show, namespace_id: project.namespace.to_param, project_id: project, continue: params + expect(response).not_to redirect_to params[:to] + end end end diff --git a/spec/controllers/projects/issues_controller_spec.rb b/spec/controllers/projects/issues_controller_spec.rb index 734966d50b2..04afd07c59e 100644 --- a/spec/controllers/projects/issues_controller_spec.rb +++ b/spec/controllers/projects/issues_controller_spec.rb @@ -83,6 +83,17 @@ describe Projects::IssuesController do expect(assigns(:issues).current_page).to eq(last_page) expect(response).to have_http_status(200) end + + it 'does not redirect to external sites when provided a host field' do + external_host = "www.example.com" + get :index, + namespace_id: project.namespace.to_param, + project_id: project, + page: (last_page + 1).to_param, + host: external_host + + expect(response).to redirect_to(namespace_project_issues_path(page: last_page, state: controller.params[:state], scope: controller.params[:scope])) + end end end @@ -145,6 +156,32 @@ describe Projects::IssuesController do end end + describe 'Redirect after sign in' do + context 'with an AJAX request' do + it 'does not store the visited URL' do + xhr :get, + :show, + format: :json, + namespace_id: project.namespace, + project_id: project, + id: issue.iid + + expect(session['user_return_to']).to be_blank + end + end + + context 'without an AJAX request' do + it 'stores the visited URL' do + get :show, + namespace_id: project.namespace.to_param, + project_id: project, + id: issue.iid + + expect(session['user_return_to']).to eq("/#{project.namespace.to_param}/#{project.to_param}/issues/#{issue.iid}") + end + end + end + describe 'PUT #update' do before do sign_in(user) @@ -162,12 +199,12 @@ describe Projects::IssuesController do namespace_id: project.namespace.to_param, project_id: project, id: issue.iid, - issue: { assignee_id: assignee.id }, + issue: { assignee_ids: [assignee.id] }, format: :json body = JSON.parse(response.body) - expect(body['assignee'].keys) - .to match_array(%w(name username avatar_url)) + expect(body['assignees'].first.keys) + .to match_array(%w(id name username avatar_url)) end end @@ -337,7 +374,7 @@ describe Projects::IssuesController do let(:admin) { create(:admin) } let!(:issue) { create(:issue, project: project) } let!(:unescaped_parameter_value) { create(:issue, :confidential, project: project, author: author) } - let!(:request_forgery_timing_attack) { create(:issue, :confidential, project: project, assignee: assignee) } + let!(:request_forgery_timing_attack) { create(:issue, :confidential, project: project, assignees: [assignee]) } describe 'GET #index' do it 'does not list confidential issues for guests' do @@ -508,7 +545,7 @@ describe Projects::IssuesController do end context 'resolving discussions in MergeRequest' do - let(:discussion) { Discussion.for_diff_notes([create(:diff_note_on_merge_request)]).first } + let(:discussion) { create(:diff_note_on_merge_request).to_discussion } let(:merge_request) { discussion.noteable } let(:project) { merge_request.source_project } @@ -745,4 +782,28 @@ describe Projects::IssuesController do expect(response).to have_http_status(200) end end + + describe 'POST create_merge_request' do + before do + project.add_developer(user) + sign_in(user) + end + + it 'creates a new merge request' do + expect { create_merge_request }.to change(project.merge_requests, :count).by(1) + end + + it 'render merge request as json' do + create_merge_request + + expect(response).to match_response_schema('merge_request') + end + + def create_merge_request + post :create_merge_request, namespace_id: project.namespace.to_param, + project_id: project.to_param, + id: issue.to_param, + format: :json + end + end end diff --git a/spec/controllers/projects/labels_controller_spec.rb b/spec/controllers/projects/labels_controller_spec.rb index 6a6e9bf378a..130b0b744b5 100644 --- a/spec/controllers/projects/labels_controller_spec.rb +++ b/spec/controllers/projects/labels_controller_spec.rb @@ -127,7 +127,7 @@ describe Projects::LabelsController do context 'group owner' do before do - GroupMember.add_users_to_group(group, [user], :owner) + GroupMember.add_users(group, [user], :owner) end it 'gives access' do @@ -157,4 +157,74 @@ describe Projects::LabelsController do end end end + + describe '#ensure_canonical_path' do + before do + sign_in(user) + end + + context 'for a GET request' do + context 'when requesting the canonical path' do + context 'non-show path' do + context 'with exactly matching casing' do + it 'does not redirect' do + get :index, namespace_id: project.namespace, project_id: project.to_param + + expect(response).not_to have_http_status(301) + end + end + + context 'with different casing' do + it 'redirects to the correct casing' do + get :index, namespace_id: project.namespace, project_id: project.to_param.upcase + + expect(response).to redirect_to(namespace_project_labels_path(project.namespace, project)) + expect(controller).not_to set_flash[:notice] + end + end + end + end + + context 'when requesting a redirected path' do + let!(:redirect_route) { project.redirect_routes.create(path: project.full_path + 'old') } + + it 'redirects to the canonical path' do + get :index, namespace_id: project.namespace, project_id: project.to_param + 'old' + + expect(response).to redirect_to(namespace_project_labels_path(project.namespace, project)) + expect(controller).to set_flash[:notice].to(project_moved_message(redirect_route, project)) + end + end + end + end + + context 'for a non-GET request' do + context 'when requesting the canonical path with different casing' do + it 'does not 404' do + post :generate, namespace_id: project.namespace, project_id: project + + expect(response).not_to have_http_status(404) + end + + it 'does not redirect to the correct casing' do + post :generate, namespace_id: project.namespace, project_id: project + + expect(response).not_to have_http_status(301) + end + end + + context 'when requesting a redirected path' do + let!(:redirect_route) { project.redirect_routes.create(path: project.full_path + 'old') } + + it 'returns not found' do + post :generate, namespace_id: project.namespace, project_id: project.to_param + 'old' + + expect(response).to have_http_status(404) + end + end + end + + def project_moved_message(redirect_route, project) + "Project '#{redirect_route.path}' was moved to '#{project.full_path}'. Please update any links and bookmarks that may still have the old path." + end end diff --git a/spec/controllers/projects/merge_requests_controller_spec.rb b/spec/controllers/projects/merge_requests_controller_spec.rb index 72f41f7209a..f0dc6df15ee 100644 --- a/spec/controllers/projects/merge_requests_controller_spec.rb +++ b/spec/controllers/projects/merge_requests_controller_spec.rb @@ -1,8 +1,6 @@ require 'spec_helper' describe Projects::MergeRequestsController do - include ApiHelpers - let(:project) { create(:project) } let(:user) { create(:user) } let(:merge_request) { create(:merge_request_with_diffs, target_project: project, source_project: project) } @@ -61,6 +59,18 @@ describe Projects::MergeRequestsController do end end + describe 'GET commit_change_content' do + it 'renders commit_change_content template' do + get :commit_change_content, + namespace_id: project.namespace.to_param, + project_id: project, + id: merge_request.iid, + format: 'html' + + expect(response).to render_template('_commit_change_content') + end + end + shared_examples "loads labels" do |action| it "loads labels into the @labels variable" do get action, @@ -73,63 +83,59 @@ describe Projects::MergeRequestsController do end describe "GET show" do - shared_examples "export merge as" do |format| - it "does generally work" do - get(:show, - namespace_id: project.namespace.to_param, - project_id: project, - id: merge_request.iid, - format: format) + def go(extra_params = {}) + params = { + namespace_id: project.namespace.to_param, + project_id: project, + id: merge_request.iid + } + + get :show, params.merge(extra_params) + end + + it_behaves_like "loads labels", :show + + describe 'as html' do + it "renders merge request page" do + go(format: :html) expect(response).to be_success end + end - it_behaves_like "loads labels", :show - - it "generates it" do - expect_any_instance_of(MergeRequest).to receive(:"to_#{format}") + describe 'as json' do + context 'with basic param' do + it 'renders basic MR entity as json' do + go(basic: true, format: :json) - get(:show, - namespace_id: project.namespace.to_param, - project_id: project, - id: merge_request.iid, - format: format) + expect(response).to match_response_schema('entities/merge_request_basic') + end end - it "renders it" do - get(:show, - namespace_id: project.namespace.to_param, - project_id: project, - id: merge_request.iid, - format: format) + context 'without basic param' do + it 'renders the merge request in the json format' do + go(format: :json) - expect(response.body).to eq(merge_request.send(:"to_#{format}").to_s) + expect(response).to match_response_schema('entities/merge_request') + end end - it "does not escape Html" do - allow_any_instance_of(MergeRequest).to receive(:"to_#{format}"). - and_return('HTML entities &<>" ') + context 'number of queries' do + it 'verifies number of queries' do + # pre-create objects + merge_request - get(:show, - namespace_id: project.namespace.to_param, - project_id: project, - id: merge_request.iid, - format: format) + recorded = ActiveRecord::QueryRecorder.new { go(format: :json) } - expect(response.body).not_to include('&') - expect(response.body).not_to include('>') - expect(response.body).not_to include('<') - expect(response.body).not_to include('"') + expect(recorded.count).to be_within(1).of(51) + expect(recorded.cached_count).to eq(0) + end end end describe "as diff" do it "triggers workhorse to serve the request" do - get(:show, - namespace_id: project.namespace.to_param, - project_id: project, - id: merge_request.iid, - format: :diff) + go(format: :diff) expect(response.headers[Gitlab::Workhorse::SEND_DATA_HEADER]).to start_with("git-diff:") end @@ -137,11 +143,7 @@ describe Projects::MergeRequestsController do describe "as patch" do it 'triggers workhorse to serve the request' do - get(:show, - namespace_id: project.namespace.to_param, - project_id: project, - id: merge_request.iid, - format: :patch) + go(format: :patch) expect(response.headers[Gitlab::Workhorse::SEND_DATA_HEADER]).to start_with("git-format-patch:") end @@ -176,6 +178,18 @@ describe Projects::MergeRequestsController do expect(assigns(:merge_requests).current_page).to eq(last_page) expect(response).to have_http_status(200) end + + it 'does not redirect to external sites when provided a host field' do + external_host = "www.example.com" + get :index, + namespace_id: project.namespace.to_param, + project_id: project, + state: 'opened', + page: (last_page + 1).to_param, + host: external_host + + expect(response).to redirect_to(namespace_project_merge_requests_path(page: last_page, state: controller.params[:state], scope: controller.params[:scope])) + end end context 'when filtering by opened state' do @@ -285,19 +299,18 @@ describe Projects::MergeRequestsController do namespace_id: project.namespace, project_id: project, id: merge_request.iid, - format: 'raw' + format: 'json' } end - context 'when the user does not have access' do + context 'when user cannot access' do before do - project.team.truncate - project.team << [user, :reporter] - post :merge, base_params + project.add_reporter(user) + xhr :post, :merge, base_params end - it 'returns not found' do - expect(response).to be_not_found + it 'returns 404' do + expect(response).to have_http_status(404) end end @@ -309,7 +322,7 @@ describe Projects::MergeRequestsController do end it 'returns :failed' do - expect(assigns(:status)).to eq(:failed) + expect(json_response).to eq('status' => 'failed') end end @@ -317,7 +330,7 @@ describe Projects::MergeRequestsController do before { post :merge, base_params.merge(sha: 'foo') } it 'returns :sha_mismatch' do - expect(assigns(:status)).to eq(:sha_mismatch) + expect(json_response).to eq('status' => 'sha_mismatch') end end @@ -329,7 +342,7 @@ describe Projects::MergeRequestsController do it 'returns :success' do merge_with_sha - expect(assigns(:status)).to eq(:success) + expect(json_response).to eq('status' => 'success') end it 'starts the merge immediately' do @@ -344,13 +357,14 @@ describe Projects::MergeRequestsController do end before do - create(:ci_empty_pipeline, project: project, sha: merge_request.diff_head_sha, ref: merge_request.source_branch) + pipeline = create(:ci_empty_pipeline, project: project, sha: merge_request.diff_head_sha, ref: merge_request.source_branch) + merge_request.update(head_pipeline: pipeline) end it 'returns :merge_when_pipeline_succeeds' do merge_when_pipeline_succeeds - expect(assigns(:status)).to eq(:merge_when_pipeline_succeeds) + expect(json_response).to eq('status' => 'merge_when_pipeline_succeeds') end it 'sets the MR to merge when the pipeline succeeds' do @@ -372,7 +386,7 @@ describe Projects::MergeRequestsController do it 'returns :merge_when_pipeline_succeeds' do merge_when_pipeline_succeeds - expect(assigns(:status)).to eq(:merge_when_pipeline_succeeds) + expect(json_response).to eq('status' => 'merge_when_pipeline_succeeds') end end end @@ -393,7 +407,7 @@ describe Projects::MergeRequestsController do it 'returns :failed' do merge_with_sha - expect(assigns(:status)).to eq(:failed) + expect(json_response).to eq('status' => 'failed') end end @@ -406,7 +420,7 @@ describe Projects::MergeRequestsController do it 'returns :success' do merge_with_sha - expect(assigns(:status)).to eq(:success) + expect(json_response).to eq('status' => 'success') end end end @@ -424,7 +438,7 @@ describe Projects::MergeRequestsController do it 'returns :success' do merge_with_sha - expect(assigns(:status)).to eq(:success) + expect(json_response).to eq('status' => 'success') end end @@ -437,7 +451,7 @@ describe Projects::MergeRequestsController do it 'returns :success' do merge_with_sha - expect(assigns(:status)).to eq(:success) + expect(json_response).to eq('status' => 'success') end end end @@ -574,8 +588,8 @@ describe Projects::MergeRequestsController do diff_for_path(id: merge_request.iid, old_path: existing_path, new_path: existing_path) expect(assigns(:diff_notes_disabled)).to be_falsey - expect(assigns(:comments_target)).to eq(noteable_type: 'MergeRequest', - noteable_id: merge_request.id) + expect(assigns(:new_diff_note_attrs)).to eq(noteable_type: 'MergeRequest', + noteable_id: merge_request.id) end it 'only renders the diffs for the path given' do @@ -821,18 +835,55 @@ describe Projects::MergeRequestsController do end end - context 'POST remove_wip' do - it 'removes the wip status' do + describe 'POST remove_wip' do + before do merge_request.title = merge_request.wip_title merge_request.save - post :remove_wip, - namespace_id: merge_request.project.namespace.to_param, - project_id: merge_request.project, - id: merge_request.iid + xhr :post, :remove_wip, + namespace_id: merge_request.project.namespace.to_param, + project_id: merge_request.project, + id: merge_request.iid, + format: :json + end + it 'removes the wip status' do expect(merge_request.reload.title).to eq(merge_request.wipless_title) end + + it 'renders MergeRequest as JSON' do + expect(json_response.keys).to include('id', 'iid', 'description') + end + end + + describe 'POST cancel_merge_when_pipeline_succeeds' do + subject do + xhr :post, :cancel_merge_when_pipeline_succeeds, + namespace_id: merge_request.project.namespace.to_param, + project_id: merge_request.project, + id: merge_request.iid, + format: :json + end + + it 'calls MergeRequests::MergeWhenPipelineSucceedsService' do + mwps_service = double + + allow(MergeRequests::MergeWhenPipelineSucceedsService) + .to receive(:new) + .and_return(mwps_service) + + expect(mwps_service).to receive(:cancel).with(merge_request) + + subject + end + + it { is_expected.to have_http_status(:success) } + + it 'renders MergeRequest as JSON' do + subject + + expect(json_response.keys).to include('id', 'iid', 'description') + end end describe 'GET conflict_for_path' do @@ -877,7 +928,9 @@ describe Projects::MergeRequestsController do end it 'returns the file in JSON format' do - content = merge_request_with_conflicts.conflicts.file_for_path(path, path).content + content = MergeRequests::Conflicts::ListService.new(merge_request_with_conflicts). + file_for_path(path, path). + content expect(json_response).to include('old_path' => path, 'new_path' => path, @@ -1001,11 +1054,15 @@ describe Projects::MergeRequestsController do context 'when a file has identical content to the conflict' do before do + content = MergeRequests::Conflicts::ListService.new(merge_request_with_conflicts). + file_for_path('files/ruby/popen.rb', 'files/ruby/popen.rb'). + content + resolved_files = [ { 'new_path' => 'files/ruby/popen.rb', 'old_path' => 'files/ruby/popen.rb', - 'content' => merge_request_with_conflicts.conflicts.file_for_path('files/ruby/popen.rb', 'files/ruby/popen.rb').content + 'content' => content }, { 'new_path' => 'files/ruby/regex.rb', 'old_path' => 'files/ruby/regex.rb', @@ -1057,7 +1114,7 @@ describe Projects::MergeRequestsController do end it 'correctly pluralizes flash message on success' do - issue2.update!(assignee: user) + issue2.assignees = [user] post_assign_issues @@ -1111,74 +1168,6 @@ describe Projects::MergeRequestsController do end end - describe 'GET merge_widget_refresh' do - let(:params) do - { - namespace_id: project.namespace, - project_id: project, - id: merge_request.iid, - format: :raw - } - end - - before do - project.team << [user, :developer] - xhr :get, :merge_widget_refresh, params - end - - context 'when merge in progress' do - let(:merge_request) { create(:merge_request, source_project: project, in_progress_merge_commit_sha: 'sha') } - - it 'returns an OK response' do - expect(response).to have_http_status(:ok) - end - - it 'sets status to :success' do - expect(assigns(:status)).to eq(:success) - expect(response).to render_template('merge') - end - end - - context 'when merge request was merged already' do - let(:merge_request) { create(:merge_request, source_project: project, state: :merged) } - - it 'returns an OK response' do - expect(response).to have_http_status(:ok) - end - - it 'sets status to :success' do - expect(assigns(:status)).to eq(:success) - expect(response).to render_template('merge') - end - end - - context 'when waiting for build' do - let(:merge_request) { create(:merge_request, source_project: project, merge_when_pipeline_succeeds: true, merge_user: user) } - - it 'returns an OK response' do - expect(response).to have_http_status(:ok) - end - - it 'sets status to :merge_when_pipeline_succeeds' do - expect(assigns(:status)).to eq(:merge_when_pipeline_succeeds) - expect(response).to render_template('merge') - end - end - - context 'when MR does not have special state' do - let(:merge_request) { create(:merge_request, source_project: project) } - - it 'returns an OK response' do - expect(response).to have_http_status(:ok) - end - - it 'sets status to success' do - expect(assigns(:status)).to eq(:success) - expect(response).to render_template('merge') - end - end - end - describe 'GET pipeline_status.json' do context 'when head_pipeline exists' do let!(:pipeline) do @@ -1189,14 +1178,17 @@ describe Projects::MergeRequestsController do let(:status) { pipeline.detailed_status(double('user')) } - before { get_pipeline_status } + before do + merge_request.update(head_pipeline: pipeline) + get_pipeline_status + end it 'return a detailed head_pipeline status in json' do expect(response).to have_http_status(:ok) expect(json_response['text']).to eq status.text expect(json_response['label']).to eq status.label expect(json_response['icon']).to eq status.icon - expect(json_response['favicon']).to eq status.favicon + expect(json_response['favicon']).to eq "/assets/ci_favicons/#{status.favicon}.ico" end end diff --git a/spec/controllers/projects/milestones_controller_spec.rb b/spec/controllers/projects/milestones_controller_spec.rb index 14207bf6b7a..84a61b2784e 100644 --- a/spec/controllers/projects/milestones_controller_spec.rb +++ b/spec/controllers/projects/milestones_controller_spec.rb @@ -5,7 +5,9 @@ describe Projects::MilestonesController do let(:user) { create(:user) } let(:milestone) { create(:milestone, project: project) } let(:issue) { create(:issue, project: project, milestone: milestone) } + let!(:label) { create(:label, project: project, title: 'Issue Label', issues: [issue]) } let!(:merge_request) { create(:merge_request, source_project: project, target_project: project, milestone: milestone) } + let(:milestone_path) { namespace_project_milestone_path } before do sign_in(user) @@ -13,6 +15,22 @@ describe Projects::MilestonesController do controller.instance_variable_set(:@project, project) end + it_behaves_like 'milestone tabs' + + describe "#show" do + render_views + + def view_milestone + get :show, namespace_id: project.namespace.id, project_id: project.id, id: milestone.iid + end + + it 'shows milestone page' do + view_milestone + + expect(response).to have_http_status(200) + end + end + describe "#destroy" do it "removes milestone" do expect(issue.milestone_id).to eq(milestone.id) diff --git a/spec/controllers/projects/notes_controller_spec.rb b/spec/controllers/projects/notes_controller_spec.rb index d80780b1d90..45f4cf9180d 100644 --- a/spec/controllers/projects/notes_controller_spec.rb +++ b/spec/controllers/projects/notes_controller_spec.rb @@ -14,6 +14,109 @@ describe Projects::NotesController do } end + describe 'GET index' do + let(:request_params) do + { + namespace_id: project.namespace, + project_id: project, + target_type: 'issue', + target_id: issue.id, + format: 'json' + } + end + + let(:parsed_response) { JSON.parse(response.body).with_indifferent_access } + let(:note_json) { parsed_response[:notes].first } + + before do + sign_in(user) + project.team << [user, :developer] + end + + it 'passes last_fetched_at from headers to NotesFinder' do + last_fetched_at = 3.hours.ago.to_i + + request.headers['X-Last-Fetched-At'] = last_fetched_at + + expect(NotesFinder).to receive(:new) + .with(anything, anything, hash_including(last_fetched_at: last_fetched_at)) + .and_call_original + + get :index, request_params + end + + context 'for a discussion note' do + let!(:note) { create(:discussion_note_on_issue, noteable: issue, project: project) } + + it 'responds with the expected attributes' do + get :index, request_params + + expect(note_json[:id]).to eq(note.id) + expect(note_json[:discussion_html]).not_to be_nil + expect(note_json[:diff_discussion_html]).to be_nil + end + end + + context 'for a diff discussion note' do + let(:project) { create(:project, :repository) } + let!(:note) { create(:diff_note_on_merge_request, project: project) } + + let(:params) { request_params.merge(target_type: 'merge_request', target_id: note.noteable_id) } + + it 'responds with the expected attributes' do + get :index, params + + expect(note_json[:id]).to eq(note.id) + expect(note_json[:discussion_html]).not_to be_nil + expect(note_json[:diff_discussion_html]).not_to be_nil + end + end + + context 'for a commit note' do + let(:project) { create(:project, :repository) } + let!(:note) { create(:note_on_commit, project: project) } + + context 'when displayed on a merge request' do + let(:merge_request) { create(:merge_request, source_project: project) } + + let(:params) { request_params.merge(target_type: 'merge_request', target_id: merge_request.id) } + + it 'responds with the expected attributes' do + get :index, params + + expect(note_json[:id]).to eq(note.id) + expect(note_json[:discussion_html]).not_to be_nil + expect(note_json[:diff_discussion_html]).to be_nil + end + end + + context 'when displayed on the commit' do + let(:params) { request_params.merge(target_type: 'commit', target_id: note.commit_id) } + + it 'responds with the expected attributes' do + get :index, params + + expect(note_json[:id]).to eq(note.id) + expect(note_json[:discussion_html]).to be_nil + expect(note_json[:diff_discussion_html]).to be_nil + end + end + end + + context 'for a regular note' do + let!(:note) { create(:note, noteable: issue, project: project) } + + it 'responds with the expected attributes' do + get :index, request_params + + expect(note_json[:id]).to eq(note.id) + expect(note_json[:html]).not_to be_nil + expect(note_json[:discussion_html]).to be_nil + expect(note_json[:diff_discussion_html]).to be_nil + end + end + end + describe 'POST create' do let(:merge_request) { create(:merge_request) } let(:project) { merge_request.source_project } @@ -49,7 +152,8 @@ describe Projects::NotesController do note: 'some note', noteable_id: merge_request.id.to_s, noteable_type: 'MergeRequest', - merge_request_diff_head_sha: 'sha' + merge_request_diff_head_sha: 'sha', + in_reply_to_discussion_id: nil } expect(Notes::CreateService).to receive(:new).with(project, user, service_params).and_return(double(execute: true)) @@ -63,6 +167,47 @@ describe Projects::NotesController do end end + describe 'DELETE destroy' do + let(:request_params) do + { + namespace_id: project.namespace, + project_id: project, + id: note, + format: :js + } + end + + context 'user is the author of a note' do + before do + sign_in(note.author) + project.team << [note.author, :developer] + end + + it "returns status 200 for html" do + delete :destroy, request_params + + expect(response).to have_http_status(200) + end + + it "deletes the note" do + expect { delete :destroy, request_params }.to change { Note.count }.from(1).to(0) + end + end + + context 'user is not the author of a note' do + before do + sign_in(user) + project.team << [user, :developer] + end + + it "returns status 404" do + delete :destroy, request_params + + expect(response).to have_http_status(404) + end + end + end + describe 'POST toggle_award_emoji' do before do sign_in(user) @@ -200,31 +345,4 @@ describe Projects::NotesController do end end end - - describe 'GET index' do - let(:last_fetched_at) { '1487756246' } - let(:request_params) do - { - namespace_id: project.namespace, - project_id: project, - target_type: 'issue', - target_id: issue.id - } - end - - before do - sign_in(user) - project.team << [user, :developer] - end - - it 'passes last_fetched_at from headers to NotesFinder' do - request.headers['X-Last-Fetched-At'] = last_fetched_at - - expect(NotesFinder).to receive(:new) - .with(anything, anything, hash_including(last_fetched_at: last_fetched_at)) - .and_call_original - - get :index, request_params - end - end end diff --git a/spec/controllers/projects/pages_controller_spec.rb b/spec/controllers/projects/pages_controller_spec.rb new file mode 100644 index 00000000000..df35d8e86b9 --- /dev/null +++ b/spec/controllers/projects/pages_controller_spec.rb @@ -0,0 +1,57 @@ +require 'spec_helper' + +describe Projects::PagesController do + let(:user) { create(:user) } + let(:project) { create(:empty_project, :public, :access_requestable) } + + let(:request_params) do + { + namespace_id: project.namespace, + project_id: project + } + end + + before do + allow(Gitlab.config.pages).to receive(:enabled).and_return(true) + sign_in(user) + project.add_master(user) + end + + describe 'GET show' do + it 'returns 200 status' do + get :show, request_params + + expect(response).to have_http_status(200) + end + end + + describe 'DELETE destroy' do + it 'returns 302 status' do + delete :destroy, request_params + + expect(response).to have_http_status(302) + end + end + + context 'pages disabled' do + before do + allow(Gitlab.config.pages).to receive(:enabled).and_return(false) + end + + describe 'GET show' do + it 'returns 404 status' do + get :show, request_params + + expect(response).to have_http_status(404) + end + end + + describe 'DELETE destroy' do + it 'returns 404 status' do + delete :destroy, request_params + + expect(response).to have_http_status(404) + end + end + end +end diff --git a/spec/controllers/projects/pages_domains_controller_spec.rb b/spec/controllers/projects/pages_domains_controller_spec.rb index 2362df895a8..33853c4b9d0 100644 --- a/spec/controllers/projects/pages_domains_controller_spec.rb +++ b/spec/controllers/projects/pages_domains_controller_spec.rb @@ -1,8 +1,9 @@ require 'spec_helper' describe Projects::PagesDomainsController do - let(:user) { create(:user) } - let(:project) { create(:project) } + let(:user) { create(:user) } + let(:project) { create(:empty_project) } + let!(:pages_domain) { create(:pages_domain, project: project) } let(:request_params) do { @@ -11,14 +12,17 @@ describe Projects::PagesDomainsController do } end + let(:pages_domain_params) do + build(:pages_domain, :with_certificate, :with_key, domain: 'my.otherdomain.com').slice(:key, :certificate, :domain) + end + before do + allow(Gitlab.config.pages).to receive(:enabled).and_return(true) sign_in(user) - project.team << [user, :master] + project.add_master(user) end describe 'GET show' do - let!(:pages_domain) { create(:pages_domain, project: project) } - it "displays the 'show' page" do get(:show, request_params.merge(id: pages_domain.domain)) @@ -37,10 +41,6 @@ describe Projects::PagesDomainsController do end describe 'POST create' do - let(:pages_domain_params) do - build(:pages_domain, :with_certificate, :with_key).slice(:key, :certificate, :domain) - end - it "creates a new pages domain" do expect do post(:create, request_params.merge(pages_domain: pages_domain_params)) @@ -51,8 +51,6 @@ describe Projects::PagesDomainsController do end describe 'DELETE destroy' do - let!(:pages_domain) { create(:pages_domain, project: project) } - it "deletes the pages domain" do expect do delete(:destroy, request_params.merge(id: pages_domain.domain)) @@ -61,4 +59,42 @@ describe Projects::PagesDomainsController do expect(response).to redirect_to(namespace_project_pages_path(project.namespace, project)) end end + + context 'pages disabled' do + before do + allow(Gitlab.config.pages).to receive(:enabled).and_return(false) + end + + describe 'GET show' do + it 'returns 404 status' do + get(:show, request_params.merge(id: pages_domain.domain)) + + expect(response).to have_http_status(404) + end + end + + describe 'GET new' do + it 'returns 404 status' do + get :new, request_params + + expect(response).to have_http_status(404) + end + end + + describe 'POST create' do + it "returns 404 status" do + post(:create, request_params.merge(pages_domain: pages_domain_params)) + + expect(response).to have_http_status(404) + end + end + + describe 'DELETE destroy' do + it "deletes the pages domain" do + delete(:destroy, request_params.merge(id: pages_domain.domain)) + + expect(response).to have_http_status(404) + end + end + end end diff --git a/spec/controllers/projects/pipeline_schedules_controller_spec.rb b/spec/controllers/projects/pipeline_schedules_controller_spec.rb new file mode 100644 index 00000000000..f8f95dd9bc8 --- /dev/null +++ b/spec/controllers/projects/pipeline_schedules_controller_spec.rb @@ -0,0 +1,87 @@ +require 'spec_helper' + +describe Projects::PipelineSchedulesController do + set(:project) { create(:empty_project, :public) } + let!(:pipeline_schedule) { create(:ci_pipeline_schedule, project: project) } + + describe 'GET #index' do + let(:scope) { nil } + let!(:inactive_pipeline_schedule) do + create(:ci_pipeline_schedule, :inactive, project: project) + end + + it 'renders the index view' do + visit_pipelines_schedules + + expect(response).to have_http_status(:ok) + expect(response).to render_template(:index) + end + + context 'when the scope is set to active' do + let(:scope) { 'active' } + + before do + visit_pipelines_schedules + end + + it 'only shows active pipeline schedules' do + expect(response).to have_http_status(:ok) + expect(assigns(:schedules)).to include(pipeline_schedule) + expect(assigns(:schedules)).not_to include(inactive_pipeline_schedule) + end + end + + def visit_pipelines_schedules + get :index, namespace_id: project.namespace.to_param, project_id: project, scope: scope + end + end + + describe 'GET edit' do + let(:user) { create(:user) } + + before do + project.add_master(user) + + sign_in(user) + end + + it 'loads the pipeline schedule' do + get :edit, namespace_id: project.namespace.to_param, project_id: project, id: pipeline_schedule.id + + expect(response).to have_http_status(:ok) + expect(assigns(:schedule)).to eq(pipeline_schedule) + end + end + + describe 'DELETE #destroy' do + set(:user) { create(:user) } + + context 'when a developer makes the request' do + before do + project.add_developer(user) + sign_in(user) + + delete :destroy, namespace_id: project.namespace.to_param, project_id: project, id: pipeline_schedule.id + end + + it 'does not delete the pipeline schedule' do + expect(response).not_to have_http_status(:ok) + end + end + + context 'when a master makes the request' do + before do + project.add_master(user) + sign_in(user) + end + + it 'destroys the pipeline schedule' do + expect do + delete :destroy, namespace_id: project.namespace.to_param, project_id: project, id: pipeline_schedule.id + end.to change { project.pipeline_schedules.count }.by(-1) + + expect(response).to have_http_status(302) + end + end + end +end diff --git a/spec/controllers/projects/pipelines_controller_spec.rb b/spec/controllers/projects/pipelines_controller_spec.rb index d8f9bfd0d37..c880da1e36a 100644 --- a/spec/controllers/projects/pipelines_controller_spec.rb +++ b/spec/controllers/projects/pipelines_controller_spec.rb @@ -7,6 +7,8 @@ describe Projects::PipelinesController do let(:project) { create(:empty_project, :public) } before do + project.add_developer(user) + sign_in(user) end @@ -24,6 +26,7 @@ describe Projects::PipelinesController do it 'returns JSON with serialized pipelines' do expect(response).to have_http_status(:ok) + expect(response).to match_response_schema('pipeline') expect(json_response).to include('pipelines') expect(json_response['pipelines'].count).to eq 4 @@ -34,6 +37,62 @@ describe Projects::PipelinesController do end end + describe 'GET show JSON' do + let(:pipeline) { create(:ci_pipeline_with_one_job, project: project) } + + it 'returns the pipeline' do + get_pipeline_json + + expect(response).to have_http_status(:ok) + expect(json_response).not_to be_an(Array) + expect(json_response['id']).to be(pipeline.id) + expect(json_response['details']).to have_key 'stages' + end + + context 'when the pipeline has multiple stages and groups' do + before do + RequestStore.begin! + + create_build('build', 0, 'build') + create_build('test', 1, 'rspec 0') + create_build('deploy', 2, 'production') + create_build('post deploy', 3, 'pages 0') + end + + after do + RequestStore.end! + RequestStore.clear! + end + + let(:project) { create(:project) } + let(:pipeline) do + create(:ci_empty_pipeline, project: project, user: user, sha: project.commit.id) + end + + it 'does not perform N + 1 queries' do + control_count = ActiveRecord::QueryRecorder.new { get_pipeline_json }.count + + create_build('test', 1, 'rspec 1') + create_build('test', 1, 'spinach 0') + create_build('test', 1, 'spinach 1') + create_build('test', 1, 'audit') + create_build('post deploy', 3, 'pages 1') + create_build('post deploy', 3, 'pages 2') + + new_count = ActiveRecord::QueryRecorder.new { get_pipeline_json }.count + expect(new_count).to be_within(12).of(control_count) + end + end + + def get_pipeline_json + get :show, namespace_id: project.namespace, project_id: project, id: pipeline, format: :json + end + + def create_build(stage, stage_idx, name) + create(:ci_build, pipeline: pipeline, stage: stage, stage_idx: stage_idx, name: name) + end + end + describe 'GET stages.json' do let(:pipeline) { create(:ci_pipeline, project: project) } @@ -86,7 +145,41 @@ describe Projects::PipelinesController do expect(json_response['text']).to eq status.text expect(json_response['label']).to eq status.label expect(json_response['icon']).to eq status.icon - expect(json_response['favicon']).to eq status.favicon + expect(json_response['favicon']).to eq "/assets/ci_favicons/#{status.favicon}.ico" + end + end + + describe 'POST retry.json' do + let!(:pipeline) { create(:ci_pipeline, :failed, project: project) } + let!(:build) { create(:ci_build, :failed, pipeline: pipeline) } + + before do + post :retry, namespace_id: project.namespace, + project_id: project, + id: pipeline.id, + format: :json + end + + it 'retries a pipeline without returning any content' do + expect(response).to have_http_status(:no_content) + expect(build.reload).to be_retried + end + end + + describe 'POST cancel.json' do + let!(:pipeline) { create(:ci_pipeline, project: project) } + let!(:build) { create(:ci_build, :running, pipeline: pipeline) } + + before do + post :cancel, namespace_id: project.namespace, + project_id: project, + id: pipeline.id, + format: :json + end + + it 'cancels a pipeline without returning any content' do + expect(response).to have_http_status(:no_content) + expect(pipeline.reload).to be_canceled end end end diff --git a/spec/controllers/projects/project_members_controller_spec.rb b/spec/controllers/projects/project_members_controller_spec.rb index 416eaa0037e..a4b4392d7cc 100644 --- a/spec/controllers/projects/project_members_controller_spec.rb +++ b/spec/controllers/projects/project_members_controller_spec.rb @@ -55,7 +55,7 @@ describe Projects::ProjectMembersController do user_ids: '', access_level: Gitlab::Access::GUEST - expect(response).to set_flash.to 'No users or groups specified.' + expect(response).to set_flash.to 'No users specified.' expect(response).to redirect_to(namespace_project_settings_members_path(project.namespace, project)) end end @@ -225,7 +225,7 @@ describe Projects::ProjectMembersController do id: member expect(response).to redirect_to( - namespace_project_project_members_path(project.namespace, project) + namespace_project_settings_members_path(project.namespace, project) ) expect(project.members).to include member end diff --git a/spec/controllers/projects/protected_branches_controller_spec.rb b/spec/controllers/projects/protected_branches_controller_spec.rb index e378b5714fe..80be135b5d8 100644 --- a/spec/controllers/projects/protected_branches_controller_spec.rb +++ b/spec/controllers/projects/protected_branches_controller_spec.rb @@ -3,6 +3,7 @@ require('spec_helper') describe Projects::ProtectedBranchesController do describe "GET #index" do let(:project) { create(:project_empty_repo, :public) } + it "redirects empty repo to projects page" do get(:index, namespace_id: project.namespace.to_param, project_id: project) end diff --git a/spec/controllers/projects/protected_tags_controller_spec.rb b/spec/controllers/projects/protected_tags_controller_spec.rb new file mode 100644 index 00000000000..64658988b3f --- /dev/null +++ b/spec/controllers/projects/protected_tags_controller_spec.rb @@ -0,0 +1,11 @@ +require('spec_helper') + +describe Projects::ProtectedTagsController do + describe "GET #index" do + let(:project) { create(:project_empty_repo, :public) } + + it "redirects empty repo to projects page" do + get(:index, namespace_id: project.namespace.to_param, project_id: project) + end + end +end diff --git a/spec/controllers/projects/registry/repositories_controller_spec.rb b/spec/controllers/projects/registry/repositories_controller_spec.rb new file mode 100644 index 00000000000..464302824a8 --- /dev/null +++ b/spec/controllers/projects/registry/repositories_controller_spec.rb @@ -0,0 +1,84 @@ +require 'spec_helper' + +describe Projects::Registry::RepositoriesController do + let(:user) { create(:user) } + let(:project) { create(:empty_project, :private) } + + before do + sign_in(user) + stub_container_registry_config(enabled: true) + end + + context 'when user has access to registry' do + before do + project.add_developer(user) + end + + describe 'GET index' do + context 'when root container repository exists' do + before do + create(:container_repository, :root, project: project) + end + + it 'does not create root container repository' do + expect { go_to_index }.not_to change { ContainerRepository.all.count } + end + end + + context 'when root container repository is not created' do + context 'when there are tags for this repository' do + before do + stub_container_registry_tags(repository: project.full_path, + tags: %w[rc1 latest]) + end + + it 'successfully renders container repositories' do + go_to_index + + expect(response).to have_http_status(:ok) + end + + it 'creates a root container repository' do + expect { go_to_index }.to change { ContainerRepository.all.count }.by(1) + expect(ContainerRepository.first).to be_root_repository + end + end + + context 'when there are no tags for this repository' do + before do + stub_container_registry_tags(repository: :any, tags: []) + end + + it 'successfully renders container repositories' do + go_to_index + + expect(response).to have_http_status(:ok) + end + + it 'does not ensure root container repository' do + expect { go_to_index }.not_to change { ContainerRepository.all.count } + end + end + end + end + end + + context 'when user does not have access to registry' do + describe 'GET index' do + it 'responds with 404' do + go_to_index + + expect(response).to have_http_status(:not_found) + end + + it 'does not ensure root container repository' do + expect { go_to_index }.not_to change { ContainerRepository.all.count } + end + end + end + + def go_to_index + get :index, namespace_id: project.namespace, + project_id: project + end +end diff --git a/spec/controllers/projects/services_controller_spec.rb b/spec/controllers/projects/services_controller_spec.rb index 16365642a34..2d892f4a2b7 100644 --- a/spec/controllers/projects/services_controller_spec.rb +++ b/spec/controllers/projects/services_controller_spec.rb @@ -8,6 +8,7 @@ describe Projects::ServicesController do before do sign_in(user) project.team << [user, :master] + controller.instance_variable_set(:@project, project) controller.instance_variable_set(:@service, service) end @@ -18,20 +19,60 @@ describe Projects::ServicesController do end describe "#test" do + context 'when can_test? returns false' do + it 'renders 404' do + allow_any_instance_of(Service).to receive(:can_test?).and_return(false) + + get :test, namespace_id: project.namespace.id, project_id: project.id, id: service.id, format: :html + + expect(response).to have_http_status(404) + end + end + context 'success' do + context 'with empty project' do + let(:project) { create(:empty_project) } + + context 'with chat notification service' do + let(:service) { project.create_microsoft_teams_service(webhook: 'http://webhook.com') } + + it 'redirects and show success message' do + allow_any_instance_of(MicrosoftTeams::Notifier).to receive(:ping).and_return(true) + + get :test, namespace_id: project.namespace.id, project_id: project.id, id: service.id, format: :html + + expect(response).to redirect_to(root_path) + expect(flash[:notice]).to eq('We sent a request to the provided URL') + end + end + + it 'redirects and show success message' do + expect(service).to receive(:test).and_return(success: true, result: 'done') + + get :test, namespace_id: project.namespace.id, project_id: project.id, id: service.id, format: :html + + expect(response).to redirect_to(root_path) + expect(flash[:notice]).to eq('We sent a request to the provided URL') + end + end + it "redirects and show success message" do - expect(service).to receive(:test).and_return({ success: true, result: 'done' }) + expect(service).to receive(:test).and_return(success: true, result: 'done') + get :test, namespace_id: project.namespace.id, project_id: project.id, id: service.id, format: :html - expect(response.status).to redirect_to('/') + + expect(response).to redirect_to(root_path) expect(flash[:notice]).to eq('We sent a request to the provided URL') end end context 'failure' do it "redirects and show failure message" do - expect(service).to receive(:test).and_return({ success: false, result: 'Bad test' }) + expect(service).to receive(:test).and_return(success: false, result: 'Bad test') + get :test, namespace_id: project.namespace.id, project_id: project.id, id: service.id, format: :html - expect(response.status).to redirect_to('/') + + expect(response).to redirect_to(root_path) expect(flash[:alert]).to eq('We tried to send a request to the provided URL but an error occurred: Bad test') end end diff --git a/spec/controllers/projects/todo_controller_spec.rb b/spec/controllers/projects/todos_controller_spec.rb index 9a7beeff6fe..c5a4153d991 100644 --- a/spec/controllers/projects/todo_controller_spec.rb +++ b/spec/controllers/projects/todos_controller_spec.rb @@ -1,8 +1,6 @@ require('spec_helper') describe Projects::TodosController do - include ApiHelpers - let(:user) { create(:user) } let(:project) { create(:empty_project) } let(:issue) { create(:issue, project: project) } diff --git a/spec/controllers/projects/tree_controller_spec.rb b/spec/controllers/projects/tree_controller_spec.rb index ab94e292e48..a43dad5756d 100644 --- a/spec/controllers/projects/tree_controller_spec.rb +++ b/spec/controllers/projects/tree_controller_spec.rb @@ -97,29 +97,29 @@ describe Projects::TreeController do project_id: project, id: 'master', dir_name: path, - target_branch: target_branch, + branch_name: branch_name, commit_message: 'Test commit message') end context 'successful creation' do let(:path) { 'files/new_dir'} - let(:target_branch) { 'master-test'} + let(:branch_name) { 'master-test'} it 'redirects to the new directory' do expect(subject). - to redirect_to("/#{project.path_with_namespace}/tree/#{target_branch}/#{path}") + to redirect_to("/#{project.path_with_namespace}/tree/#{branch_name}/#{path}") expect(flash[:notice]).to eq('The directory has been successfully created.') end end context 'unsuccessful creation' do let(:path) { 'README.md' } - let(:target_branch) { 'master'} + let(:branch_name) { 'master'} it 'does not allow overwriting of existing files' do expect(subject). to redirect_to("/#{project.path_with_namespace}/tree/master") - expect(flash[:alert]).to eq('Directory already exists as a file') + expect(flash[:alert]).to eq('A file with this name already exists') end end end diff --git a/spec/controllers/projects/wikis_controller_spec.rb b/spec/controllers/projects/wikis_controller_spec.rb new file mode 100644 index 00000000000..92addf30307 --- /dev/null +++ b/spec/controllers/projects/wikis_controller_spec.rb @@ -0,0 +1,16 @@ +require 'spec_helper' + +describe Projects::WikisController do + let(:project) { create(:project_empty_repo, :public) } + let(:user) { create(:user) } + + describe 'POST #preview_markdown' do + it 'renders json in a correct format' do + sign_in(user) + + post :preview_markdown, namespace_id: project.namespace, project_id: project, id: 'page/path', text: '*Markdown* text' + + expect(JSON.parse(response.body).keys).to match_array(%w(body references)) + end + end +end diff --git a/spec/controllers/projects_controller_spec.rb b/spec/controllers/projects_controller_spec.rb index a88ffc1ea6a..a8be6768a47 100644 --- a/spec/controllers/projects_controller_spec.rb +++ b/spec/controllers/projects_controller_spec.rb @@ -169,26 +169,6 @@ describe ProjectsController do end end - context "when requested with case sensitive namespace and project path" do - context "when there is a match with the same casing" do - it "loads the project" do - get :show, namespace_id: public_project.namespace, id: public_project - - expect(assigns(:project)).to eq(public_project) - expect(response).to have_http_status(200) - end - end - - context "when there is a match with different casing" do - it "redirects to the normalized path" do - get :show, namespace_id: public_project.namespace, id: public_project.path.upcase - - expect(assigns(:project)).to eq(public_project) - expect(response).to redirect_to("/#{public_project.full_path}") - end - end - end - context "when the url contains .atom" do let(:public_project_with_dot_atom) { build(:empty_project, :public, name: 'my.atom', path: 'my.atom') } @@ -224,13 +204,16 @@ describe ProjectsController do render_views let(:admin) { create(:admin) } + let(:project) { create(:project, :repository) } + let(:new_path) { 'renamed_path' } + let(:project_params) { { path: new_path } } + + before do + sign_in(admin) + end it "sets the repository to the right path after a rename" do - project = create(:project, :repository) - new_path = 'renamed_path' - project_params = { path: new_path } controller.instance_variable_set(:@project, project) - sign_in(admin) put :update, namespace_id: project.namespace, @@ -398,4 +381,121 @@ describe ProjectsController do expect(parsed_body["Commits"]).to include("123456") end end + + describe 'POST #preview_markdown' do + it 'renders json in a correct format' do + sign_in(user) + + post :preview_markdown, namespace_id: public_project.namespace, id: public_project, text: '*Markdown* text' + + expect(JSON.parse(response.body).keys).to match_array(%w(body references)) + end + end + + describe '#ensure_canonical_path' do + before do + sign_in(user) + end + + context 'for a GET request' do + context 'when requesting the canonical path' do + context "with exactly matching casing" do + it "loads the project" do + get :show, namespace_id: public_project.namespace, id: public_project + + expect(assigns(:project)).to eq(public_project) + expect(response).to have_http_status(200) + end + end + + context "with different casing" do + it "redirects to the normalized path" do + get :show, namespace_id: public_project.namespace, id: public_project.path.upcase + + expect(assigns(:project)).to eq(public_project) + expect(response).to redirect_to("/#{public_project.full_path}") + expect(controller).not_to set_flash[:notice] + end + end + end + + context 'when requesting a redirected path' do + let!(:redirect_route) { public_project.redirect_routes.create!(path: "foo/bar") } + + it 'redirects to the canonical path' do + get :show, namespace_id: 'foo', id: 'bar' + + expect(response).to redirect_to(public_project) + expect(controller).to set_flash[:notice].to(project_moved_message(redirect_route, public_project)) + end + + it 'redirects to the canonical path (testing non-show action)' do + get :refs, namespace_id: 'foo', id: 'bar' + + expect(response).to redirect_to(refs_namespace_project_path(namespace_id: public_project.namespace, id: public_project)) + expect(controller).to set_flash[:notice].to(project_moved_message(redirect_route, public_project)) + end + end + end + + context 'for a POST request' do + context 'when requesting the canonical path with different casing' do + it 'does not 404' do + post :toggle_star, namespace_id: public_project.namespace, id: public_project.path.upcase + + expect(response).not_to have_http_status(404) + end + + it 'does not redirect to the correct casing' do + post :toggle_star, namespace_id: public_project.namespace, id: public_project.path.upcase + + expect(response).not_to have_http_status(301) + end + end + + context 'when requesting a redirected path' do + let!(:redirect_route) { public_project.redirect_routes.create!(path: "foo/bar") } + + it 'returns not found' do + post :toggle_star, namespace_id: 'foo', id: 'bar' + + expect(response).to have_http_status(404) + end + end + end + + context 'for a DELETE request' do + before do + sign_in(create(:admin)) + end + + context 'when requesting the canonical path with different casing' do + it 'does not 404' do + delete :destroy, namespace_id: project.namespace, id: project.path.upcase + + expect(response).not_to have_http_status(404) + end + + it 'does not redirect to the correct casing' do + delete :destroy, namespace_id: project.namespace, id: project.path.upcase + + expect(response).not_to have_http_status(301) + end + end + + context 'when requesting a redirected path' do + let!(:redirect_route) { project.redirect_routes.create!(path: "foo/bar") } + + it 'returns not found' do + delete :destroy, namespace_id: 'foo', id: 'bar' + + expect(response).to have_http_status(404) + end + end + end + end + + def project_moved_message(redirect_route, project) + "Project '#{redirect_route.path}' was moved to '#{project.full_path}'. Please update any links and bookmarks that may still have the old path." + end end diff --git a/spec/controllers/registrations_controller_spec.rb b/spec/controllers/registrations_controller_spec.rb index 902911071c4..71dd9ef3eb4 100644 --- a/spec/controllers/registrations_controller_spec.rb +++ b/spec/controllers/registrations_controller_spec.rb @@ -68,4 +68,20 @@ describe RegistrationsController do end end end + + describe '#destroy' do + let(:user) { create(:user) } + + before do + sign_in(user) + end + + it 'schedules the user for destruction' do + expect(DeleteUserWorker).to receive(:perform_async).with(user.id, user.id) + + post(:destroy) + + expect(response.status).to eq(302) + end + end end diff --git a/spec/controllers/sessions_controller_spec.rb b/spec/controllers/sessions_controller_spec.rb index a06c29dd91a..038132cffe0 100644 --- a/spec/controllers/sessions_controller_spec.rb +++ b/spec/controllers/sessions_controller_spec.rb @@ -16,7 +16,9 @@ describe SessionsController do end end - context 'when using valid password' do + context 'when using valid password', :redis do + include UserActivitiesHelpers + let(:user) { create(:user) } it 'authenticates user correctly' do @@ -37,6 +39,12 @@ describe SessionsController do subject.sign_out user end end + + it 'updates the user activity' do + expect do + post(:create, user: { login: user.username, password: user.password }) + end.to change { user_activity(user) } + end end end @@ -211,4 +219,20 @@ describe SessionsController do end end end + + describe '#new' do + before do + @request.env['devise.mapping'] = Devise.mappings[:user] + end + + it 'redirects correctly for referer on same host with params' do + search_path = '/search?search=seed_project' + allow(controller.request).to receive(:referer). + and_return('http://%{host}%{path}' % { host: Gitlab.config.gitlab.host, path: search_path }) + + get(:new, redirect_to_referer: :yes) + + expect(controller.stored_location_for(:redirect)).to eq(search_path) + end + end end diff --git a/spec/controllers/snippets/notes_controller_spec.rb b/spec/controllers/snippets/notes_controller_spec.rb new file mode 100644 index 00000000000..1c494b8c7ab --- /dev/null +++ b/spec/controllers/snippets/notes_controller_spec.rb @@ -0,0 +1,196 @@ +require 'spec_helper' + +describe Snippets::NotesController do + let(:user) { create(:user) } + + let(:private_snippet) { create(:personal_snippet, :private) } + let(:internal_snippet) { create(:personal_snippet, :internal) } + let(:public_snippet) { create(:personal_snippet, :public) } + + let(:note_on_private) { create(:note_on_personal_snippet, noteable: private_snippet) } + let(:note_on_internal) { create(:note_on_personal_snippet, noteable: internal_snippet) } + let(:note_on_public) { create(:note_on_personal_snippet, noteable: public_snippet) } + + describe 'GET index' do + context 'when a snippet is public' do + before do + note_on_public + + get :index, { snippet_id: public_snippet } + end + + it "returns status 200" do + expect(response).to have_http_status(200) + end + + it "returns not empty array of notes" do + expect(JSON.parse(response.body)["notes"].empty?).to be_falsey + end + end + + context 'when a snippet is internal' do + before do + note_on_internal + end + + context 'when user not logged in' do + it "returns status 404" do + get :index, { snippet_id: internal_snippet } + + expect(response).to have_http_status(404) + end + end + + context 'when user logged in' do + before do + sign_in(user) + end + + it "returns status 200" do + get :index, { snippet_id: internal_snippet } + + expect(response).to have_http_status(200) + end + end + end + + context 'when a snippet is private' do + before do + note_on_private + end + + context 'when user not logged in' do + it "returns status 404" do + get :index, { snippet_id: private_snippet } + + expect(response).to have_http_status(404) + end + end + + context 'when user other than author logged in' do + before do + sign_in(user) + end + + it "returns status 404" do + get :index, { snippet_id: private_snippet } + + expect(response).to have_http_status(404) + end + end + + context 'when author logged in' do + before do + note_on_private + + sign_in(private_snippet.author) + end + + it "returns status 200" do + get :index, { snippet_id: private_snippet } + + expect(response).to have_http_status(200) + end + + it "returns 1 note" do + get :index, { snippet_id: private_snippet } + + expect(JSON.parse(response.body)['notes'].count).to eq(1) + end + end + end + + context 'dont show non visible notes' do + before do + note_on_public + + sign_in(user) + + expect_any_instance_of(Note).to receive(:cross_reference_not_visible_for?).and_return(true) + end + + it "does not return any note" do + get :index, { snippet_id: public_snippet } + + expect(JSON.parse(response.body)['notes'].count).to eq(0) + end + end + end + + describe 'DELETE destroy' do + let(:request_params) do + { + snippet_id: public_snippet, + id: note_on_public, + format: :js + } + end + + context 'when user is the author of a note' do + before do + sign_in(note_on_public.author) + end + + it "returns status 200" do + delete :destroy, request_params + + expect(response).to have_http_status(200) + end + + it "deletes the note" do + expect{ delete :destroy, request_params }.to change{ Note.count }.from(1).to(0) + end + + context 'system note' do + before do + expect_any_instance_of(Note).to receive(:system?).and_return(true) + end + + it "does not delete the note" do + expect{ delete :destroy, request_params }.not_to change{ Note.count } + end + end + end + + context 'when user is not the author of a note' do + before do + sign_in(user) + + note_on_public + end + + it "returns status 404" do + delete :destroy, request_params + + expect(response).to have_http_status(404) + end + + it "does not update the note" do + expect{ delete :destroy, request_params }.not_to change{ Note.count } + end + end + end + + describe 'POST toggle_award_emoji' do + let(:note) { create(:note_on_personal_snippet, noteable: public_snippet) } + before do + sign_in(user) + end + + subject { post(:toggle_award_emoji, snippet_id: public_snippet, id: note.id, name: "thumbsup") } + + it "toggles the award emoji" do + expect { subject }.to change { note.award_emoji.count }.by(1) + + expect(response).to have_http_status(200) + end + + it "removes the already awarded emoji when it exists" do + note.toggle_award_emoji('thumbsup', user) # create award emoji before + + expect { subject }.to change { AwardEmoji.count }.by(-1) + + expect(response).to have_http_status(200) + end + end +end diff --git a/spec/controllers/snippets_controller_spec.rb b/spec/controllers/snippets_controller_spec.rb index 5de3b9890ef..930415a4778 100644 --- a/spec/controllers/snippets_controller_spec.rb +++ b/spec/controllers/snippets_controller_spec.rb @@ -3,6 +3,34 @@ require 'spec_helper' describe SnippetsController do let(:user) { create(:user) } + describe 'GET #index' do + let(:user) { create(:user) } + + context 'when username parameter is present' do + it 'renders snippets of a user when username is present' do + get :index, username: user.username + + expect(response).to render_template(:index) + end + end + + context 'when username parameter is not present' do + it 'redirects to explore snippets page when user is not logged in' do + get :index + + expect(response).to redirect_to(explore_snippets_path) + end + + it 'redirects to snippets dashboard page when user is logged in' do + sign_in(user) + + get :index + + expect(response).to redirect_to(dashboard_snippets_path) + end + end + end + describe 'GET #new' do context 'when signed in' do before do @@ -132,7 +160,7 @@ describe SnippetsController do it 'responds with status 404' do get :show, id: 'doesntexist' - expect(response).to have_http_status(404) + expect(response).to redirect_to(new_user_session_path) end end end @@ -350,144 +378,138 @@ describe SnippetsController do end end - %w(raw download).each do |action| - describe "GET #{action}" do - context 'when the personal snippet is private' do - let(:personal_snippet) { create(:personal_snippet, :private, author: user) } + describe "GET #raw" do + context 'when the personal snippet is private' do + let(:personal_snippet) { create(:personal_snippet, :private, author: user) } - context 'when signed in' do - before do - sign_in(user) - end + context 'when signed in' do + before do + sign_in(user) + end - context 'when signed in user is not the author' do - let(:other_author) { create(:author) } - let(:other_personal_snippet) { create(:personal_snippet, :private, author: other_author) } + context 'when signed in user is not the author' do + let(:other_author) { create(:author) } + let(:other_personal_snippet) { create(:personal_snippet, :private, author: other_author) } - it 'responds with status 404' do - get action, id: other_personal_snippet.to_param + it 'responds with status 404' do + get :raw, id: other_personal_snippet.to_param - expect(response).to have_http_status(404) - end + expect(response).to have_http_status(404) end + end - context 'when signed in user is the author' do - before { get action, id: personal_snippet.to_param } + context 'when signed in user is the author' do + before { get :raw, id: personal_snippet.to_param } - it 'responds with status 200' do - expect(assigns(:snippet)).to eq(personal_snippet) - expect(response).to have_http_status(200) - end + it 'responds with status 200' do + expect(assigns(:snippet)).to eq(personal_snippet) + expect(response).to have_http_status(200) + end - it 'has expected headers' do - expect(response.header['Content-Type']).to eq('text/plain; charset=utf-8') + it 'has expected headers' do + expect(response.header['Content-Type']).to eq('text/plain; charset=utf-8') - if action == :download - expect(response.header['Content-Disposition']).to match(/attachment/) - elsif action == :raw - expect(response.header['Content-Disposition']).to match(/inline/) - end - end + expect(response.header['Content-Disposition']).to match(/inline/) end end + end - context 'when not signed in' do - it 'redirects to the sign in page' do - get action, id: personal_snippet.to_param + context 'when not signed in' do + it 'redirects to the sign in page' do + get :raw, id: personal_snippet.to_param - expect(response).to redirect_to(new_user_session_path) - end + expect(response).to redirect_to(new_user_session_path) end end + end - context 'when the personal snippet is internal' do - let(:personal_snippet) { create(:personal_snippet, :internal, author: user) } + context 'when the personal snippet is internal' do + let(:personal_snippet) { create(:personal_snippet, :internal, author: user) } - context 'when signed in' do - before do - sign_in(user) - end + context 'when signed in' do + before do + sign_in(user) + end - it 'responds with status 200' do - get action, id: personal_snippet.to_param + it 'responds with status 200' do + get :raw, id: personal_snippet.to_param - expect(assigns(:snippet)).to eq(personal_snippet) - expect(response).to have_http_status(200) - end + expect(assigns(:snippet)).to eq(personal_snippet) + expect(response).to have_http_status(200) end + end - context 'when not signed in' do - it 'redirects to the sign in page' do - get action, id: personal_snippet.to_param + context 'when not signed in' do + it 'redirects to the sign in page' do + get :raw, id: personal_snippet.to_param - expect(response).to redirect_to(new_user_session_path) - end + expect(response).to redirect_to(new_user_session_path) end end + end - context 'when the personal snippet is public' do - let(:personal_snippet) { create(:personal_snippet, :public, author: user) } + context 'when the personal snippet is public' do + let(:personal_snippet) { create(:personal_snippet, :public, author: user) } - context 'when signed in' do - before do - sign_in(user) - end + context 'when signed in' do + before do + sign_in(user) + end - it 'responds with status 200' do - get action, id: personal_snippet.to_param + it 'responds with status 200' do + get :raw, id: personal_snippet.to_param - expect(assigns(:snippet)).to eq(personal_snippet) - expect(response).to have_http_status(200) - end + expect(assigns(:snippet)).to eq(personal_snippet) + expect(response).to have_http_status(200) + end - context 'CRLF line ending' do - let(:personal_snippet) do - create(:personal_snippet, :public, author: user, content: "first line\r\nsecond line\r\nthird line") - end + context 'CRLF line ending' do + let(:personal_snippet) do + create(:personal_snippet, :public, author: user, content: "first line\r\nsecond line\r\nthird line") + end - it 'returns LF line endings by default' do - get action, id: personal_snippet.to_param + it 'returns LF line endings by default' do + get :raw, id: personal_snippet.to_param - expect(response.body).to eq("first line\nsecond line\nthird line") - end + expect(response.body).to eq("first line\nsecond line\nthird line") + end - it 'does not convert line endings when parameter present' do - get action, id: personal_snippet.to_param, line_ending: :raw + it 'does not convert line endings when parameter present' do + get :raw, id: personal_snippet.to_param, line_ending: :raw - expect(response.body).to eq("first line\r\nsecond line\r\nthird line") - end + expect(response.body).to eq("first line\r\nsecond line\r\nthird line") end end + end - context 'when not signed in' do - it 'responds with status 200' do - get action, id: personal_snippet.to_param + context 'when not signed in' do + it 'responds with status 200' do + get :raw, id: personal_snippet.to_param - expect(assigns(:snippet)).to eq(personal_snippet) - expect(response).to have_http_status(200) - end + expect(assigns(:snippet)).to eq(personal_snippet) + expect(response).to have_http_status(200) end end + end - context 'when the personal snippet does not exist' do - context 'when signed in' do - before do - sign_in(user) - end + context 'when the personal snippet does not exist' do + context 'when signed in' do + before do + sign_in(user) + end - it 'responds with status 404' do - get action, id: 'doesntexist' + it 'responds with status 404' do + get :raw, id: 'doesntexist' - expect(response).to have_http_status(404) - end + expect(response).to have_http_status(404) end + end - context 'when not signed in' do - it 'responds with status 404' do - get action, id: 'doesntexist' + context 'when not signed in' do + it 'redirects to the sign in path' do + get :raw, id: 'doesntexist' - expect(response).to have_http_status(404) - end + expect(response).to redirect_to(new_user_session_path) end end end @@ -521,4 +543,16 @@ describe SnippetsController do end end end + + describe 'POST #preview_markdown' do + let(:snippet) { create(:personal_snippet, :public) } + + it 'renders json in a correct format' do + sign_in(user) + + post :preview_markdown, id: snippet, text: '*Markdown* text' + + expect(JSON.parse(response.body).keys).to match_array(%w(body references)) + end + end end diff --git a/spec/controllers/uploads_controller_spec.rb b/spec/controllers/uploads_controller_spec.rb index f67d26da0ac..8000c9dec61 100644 --- a/spec/controllers/uploads_controller_spec.rb +++ b/spec/controllers/uploads_controller_spec.rb @@ -8,6 +8,93 @@ end describe UploadsController do let!(:user) { create(:user, avatar: fixture_file_upload(Rails.root + "spec/fixtures/dk.png", "image/png")) } + describe 'POST create' do + let(:model) { 'personal_snippet' } + let(:snippet) { create(:personal_snippet, :public) } + let(:jpg) { fixture_file_upload(Rails.root + 'spec/fixtures/rails_sample.jpg', 'image/jpg') } + let(:txt) { fixture_file_upload(Rails.root + 'spec/fixtures/doc_sample.txt', 'text/plain') } + + context 'when a user does not have permissions to upload a file' do + it "returns 401 when the user is not logged in" do + post :create, model: model, id: snippet.id, format: :json + + expect(response).to have_http_status(401) + end + + it "returns 404 when user can't comment on a snippet" do + private_snippet = create(:personal_snippet, :private) + + sign_in(user) + post :create, model: model, id: private_snippet.id, format: :json + + expect(response).to have_http_status(404) + end + end + + context 'when a user is logged in' do + before do + sign_in(user) + end + + it "returns an error without file" do + post :create, model: model, id: snippet.id, format: :json + + expect(response).to have_http_status(422) + end + + it "returns an error with invalid model" do + expect { post :create, model: 'invalid', id: snippet.id, format: :json } + .to raise_error(ActionController::UrlGenerationError) + end + + it "returns 404 status when object not found" do + post :create, model: model, id: 9999, format: :json + + expect(response).to have_http_status(404) + end + + context 'with valid image' do + before do + post :create, model: 'personal_snippet', id: snippet.id, file: jpg, format: :json + end + + it 'returns a content with original filename, new link, and correct type.' do + expect(response.body).to match '\"alt\":\"rails_sample\"' + expect(response.body).to match "\"url\":\"/uploads" + end + + it 'creates a corresponding Upload record' do + upload = Upload.last + + aggregate_failures do + expect(upload).to exist + expect(upload.model).to eq snippet + end + end + end + + context 'with valid non-image file' do + before do + post :create, model: 'personal_snippet', id: snippet.id, file: txt, format: :json + end + + it 'returns a content with original filename, new link, and correct type.' do + expect(response.body).to match '\"alt\":\"doc_sample.txt\"' + expect(response.body).to match "\"url\":\"/uploads" + end + + it 'creates a corresponding Upload record' do + upload = Upload.last + + aggregate_failures do + expect(upload).to exist + expect(upload.model).to eq snippet + end + end + end + end + end + describe "GET show" do context 'Content-Disposition security measures' do let(:project) { create(:empty_project, :public) } @@ -386,5 +473,45 @@ describe UploadsController do end end end + + context 'Appearance' do + context 'when viewing a custom header logo' do + let!(:appearance) { create :appearance, header_logo: fixture_file_upload(Rails.root.join('spec/fixtures/dk.png'), 'image/png') } + + context 'when not signed in' do + it 'responds with status 200' do + get :show, model: 'appearance', mounted_as: 'header_logo', id: appearance.id, filename: 'dk.png' + + expect(response).to have_http_status(200) + end + + it_behaves_like 'content not cached without revalidation' do + subject do + get :show, model: 'appearance', mounted_as: 'header_logo', id: appearance.id, filename: 'dk.png' + response + end + end + end + end + + context 'when viewing a custom logo' do + let!(:appearance) { create :appearance, logo: fixture_file_upload(Rails.root.join('spec/fixtures/dk.png'), 'image/png') } + + context 'when not signed in' do + it 'responds with status 200' do + get :show, model: 'appearance', mounted_as: 'logo', id: appearance.id, filename: 'dk.png' + + expect(response).to have_http_status(200) + end + + it_behaves_like 'content not cached without revalidation' do + subject do + get :show, model: 'appearance', mounted_as: 'logo', id: appearance.id, filename: 'dk.png' + response + end + end + end + end + end end end diff --git a/spec/controllers/users_controller_spec.rb b/spec/controllers/users_controller_spec.rb index bbe9aaf737f..d33e2ba1e53 100644 --- a/spec/controllers/users_controller_spec.rb +++ b/spec/controllers/users_controller_spec.rb @@ -4,15 +4,6 @@ describe UsersController do let(:user) { create(:user) } describe 'GET #show' do - it 'is case-insensitive' do - user = create(:user, username: 'CamelCaseUser') - sign_in(user) - - get :show, username: user.username.downcase - - expect(response).to be_success - end - context 'with rendered views' do render_views @@ -45,9 +36,9 @@ describe UsersController do end context 'when logged out' do - it 'renders 404' do + it 'redirects to login page' do get :show, username: user.username - expect(response).to have_http_status(404) + expect(response).to redirect_to new_user_session_path end end @@ -61,6 +52,24 @@ describe UsersController do end end end + + context 'when a user by that username does not exist' do + context 'when logged out' do + it 'redirects to login page' do + get :show, username: 'nonexistent' + expect(response).to redirect_to new_user_session_path + end + end + + context 'when logged in' do + before { sign_in(user) } + + it 'renders 404' do + get :show, username: 'nonexistent' + expect(response).to have_http_status(404) + end + end + end end describe 'GET #calendar' do @@ -92,7 +101,7 @@ describe UsersController do describe 'GET #calendar_activities' do let!(:project) { create(:empty_project) } - let!(:user) { create(:user) } + let(:user) { create(:user) } before do allow_any_instance_of(User).to receive(:contributed_projects_ids).and_return([project.id]) @@ -133,4 +142,175 @@ describe UsersController do end end end + + describe 'GET #exists' do + before do + sign_in(user) + end + + context 'when user exists' do + it 'returns JSON indicating the user exists' do + get :exists, username: user.username + + expected_json = { exists: true }.to_json + expect(response.body).to eq(expected_json) + end + + context 'when the casing is different' do + let(:user) { create(:user, username: 'CamelCaseUser') } + + it 'returns JSON indicating the user exists' do + get :exists, username: user.username.downcase + + expected_json = { exists: true }.to_json + expect(response.body).to eq(expected_json) + end + end + end + + context 'when the user does not exist' do + it 'returns JSON indicating the user does not exist' do + get :exists, username: 'foo' + + expected_json = { exists: false }.to_json + expect(response.body).to eq(expected_json) + end + + context 'when a user changed their username' do + let(:redirect_route) { user.namespace.redirect_routes.create(path: 'old-username') } + + it 'returns JSON indicating a user by that username does not exist' do + get :exists, username: 'old-username' + + expected_json = { exists: false }.to_json + expect(response.body).to eq(expected_json) + end + end + end + end + + describe '#ensure_canonical_path' do + before do + sign_in(user) + end + + context 'for a GET request' do + context 'when requesting users at the root path' do + context 'when requesting the canonical path' do + let(:user) { create(:user, username: 'CamelCaseUser') } + + context 'with exactly matching casing' do + it 'responds with success' do + get :show, username: user.username + + expect(response).to be_success + end + end + + context 'with different casing' do + it 'redirects to the correct casing' do + get :show, username: user.username.downcase + + expect(response).to redirect_to(user) + expect(controller).not_to set_flash[:notice] + end + end + end + + context 'when requesting a redirected path' do + let(:redirect_route) { user.namespace.redirect_routes.create(path: 'old-path') } + + it 'redirects to the canonical path' do + get :show, username: redirect_route.path + + expect(response).to redirect_to(user) + expect(controller).to set_flash[:notice].to(user_moved_message(redirect_route, user)) + end + + context 'when the old path is a substring of the scheme or host' do + let(:redirect_route) { user.namespace.redirect_routes.create(path: 'http') } + + it 'does not modify the requested host' do + get :show, username: redirect_route.path + + expect(response).to redirect_to(user) + expect(controller).to set_flash[:notice].to(user_moved_message(redirect_route, user)) + end + end + + context 'when the old path is substring of users' do + let(:redirect_route) { user.namespace.redirect_routes.create(path: 'ser') } + + it 'redirects to the canonical path' do + get :show, username: redirect_route.path + + expect(response).to redirect_to(user) + expect(controller).to set_flash[:notice].to(user_moved_message(redirect_route, user)) + end + end + end + end + + context 'when requesting users under the /users path' do + context 'when requesting the canonical path' do + let(:user) { create(:user, username: 'CamelCaseUser') } + + context 'with exactly matching casing' do + it 'responds with success' do + get :projects, username: user.username + + expect(response).to be_success + end + end + + context 'with different casing' do + it 'redirects to the correct casing' do + get :projects, username: user.username.downcase + + expect(response).to redirect_to(user_projects_path(user)) + expect(controller).not_to set_flash[:notice] + end + end + end + + context 'when requesting a redirected path' do + let(:redirect_route) { user.namespace.redirect_routes.create(path: 'old-path') } + + it 'redirects to the canonical path' do + get :projects, username: redirect_route.path + + expect(response).to redirect_to(user_projects_path(user)) + expect(controller).to set_flash[:notice].to(user_moved_message(redirect_route, user)) + end + + context 'when the old path is a substring of the scheme or host' do + let(:redirect_route) { user.namespace.redirect_routes.create(path: 'http') } + + it 'does not modify the requested host' do + get :projects, username: redirect_route.path + + expect(response).to redirect_to(user_projects_path(user)) + expect(controller).to set_flash[:notice].to(user_moved_message(redirect_route, user)) + end + end + + context 'when the old path is substring of users' do + let(:redirect_route) { user.namespace.redirect_routes.create(path: 'ser') } + + # I.e. /users/ser should not become /ufoos/ser + it 'does not modify the /users part of the path' do + get :projects, username: redirect_route.path + + expect(response).to redirect_to(user_projects_path(user)) + expect(controller).to set_flash[:notice].to(user_moved_message(redirect_route, user)) + end + end + end + end + end + end + + def user_moved_message(redirect_route, user) + "User '#{redirect_route.path}' was moved to '#{user.full_path}'. Please update any links and bookmarks that may still have the old path." + end end diff --git a/spec/factories/chat_names.rb b/spec/factories/chat_names.rb index 24225468d55..9a0be1a4598 100644 --- a/spec/factories/chat_names.rb +++ b/spec/factories/chat_names.rb @@ -6,11 +6,7 @@ FactoryGirl.define do team_id 'T0001' team_domain 'Awesome Team' - sequence :chat_id do |n| - "U#{n}" - end - sequence :chat_name do |n| - "user#{n}" - end + sequence(:chat_id) { |n| "U#{n}" } + chat_name { generate(:username) } end end diff --git a/spec/factories/chat_teams.rb b/spec/factories/chat_teams.rb index 82f44fa3d15..ffedf69a69b 100644 --- a/spec/factories/chat_teams.rb +++ b/spec/factories/chat_teams.rb @@ -1,9 +1,6 @@ FactoryGirl.define do factory :chat_team, class: ChatTeam do - sequence :team_id do |n| - "abcdefghijklm#{n}" - end - + sequence(:team_id) { |n| "abcdefghijklm#{n}" } namespace factory: :group end end diff --git a/spec/factories/ci/builds.rb b/spec/factories/ci/builds.rb index f78086211f7..78ddd8d5584 100644 --- a/spec/factories/ci/builds.rb +++ b/spec/factories/ci/builds.rb @@ -79,6 +79,19 @@ FactoryGirl.define do manual end + trait :retryable do + success + end + + trait :cancelable do + pending + end + + trait :erasable do + success + artifacts + end + trait :tags do tag_list [:docker, :ruby] end @@ -111,7 +124,7 @@ FactoryGirl.define do trait :trace do after(:create) do |build, evaluator| - build.trace = 'BUILD TRACE' + build.trace.set('BUILD TRACE') end end @@ -192,5 +205,10 @@ FactoryGirl.define do trait :no_options do options { {} } end + + trait :non_playable do + status 'created' + self.when 'manual' + end end end diff --git a/spec/factories/ci/pipeline_schedule.rb b/spec/factories/ci/pipeline_schedule.rb new file mode 100644 index 00000000000..a716da46ac6 --- /dev/null +++ b/spec/factories/ci/pipeline_schedule.rb @@ -0,0 +1,29 @@ +FactoryGirl.define do + factory :ci_pipeline_schedule, class: Ci::PipelineSchedule do + cron '0 1 * * *' + cron_timezone Gitlab::Ci::CronParser::VALID_SYNTAX_SAMPLE_TIME_ZONE + ref 'master' + active true + description "pipeline schedule" + project factory: :empty_project + + trait :nightly do + cron '0 1 * * *' + cron_timezone Gitlab::Ci::CronParser::VALID_SYNTAX_SAMPLE_TIME_ZONE + end + + trait :weekly do + cron '0 1 * * 6' + cron_timezone Gitlab::Ci::CronParser::VALID_SYNTAX_SAMPLE_TIME_ZONE + end + + trait :monthly do + cron '0 1 22 * *' + cron_timezone Gitlab::Ci::CronParser::VALID_SYNTAX_SAMPLE_TIME_ZONE + end + + trait :inactive do + active false + end + end +end diff --git a/spec/factories/ci/pipelines.rb b/spec/factories/ci/pipelines.rb index b67c96bc00d..561fbc8e247 100644 --- a/spec/factories/ci/pipelines.rb +++ b/spec/factories/ci/pipelines.rb @@ -48,6 +48,10 @@ FactoryGirl.define do trait :success do status :success end + + trait :failed do + status :failed + end end end end diff --git a/spec/factories/ci/runners.rb b/spec/factories/ci/runners.rb index c3b4aff55ba..05abf60d5ce 100644 --- a/spec/factories/ci/runners.rb +++ b/spec/factories/ci/runners.rb @@ -1,8 +1,6 @@ FactoryGirl.define do factory :ci_runner, class: Ci::Runner do - sequence :description do |n| - "My runner#{n}" - end + sequence(:description) { |n| "My runner#{n}" } platform "darwin" is_shared false diff --git a/spec/factories/ci/triggers.rb b/spec/factories/ci/triggers.rb index a27b04424e5..c3a29d8bf04 100644 --- a/spec/factories/ci/triggers.rb +++ b/spec/factories/ci/triggers.rb @@ -1,7 +1,14 @@ FactoryGirl.define do factory :ci_trigger_without_token, class: Ci::Trigger do factory :ci_trigger do - token 'token' + sequence(:token) { |n| "token#{n}" } + + factory :ci_trigger_for_trigger_schedule do + token { SecureRandom.hex(15) } + owner factory: :user + project factory: :project + ref 'master' + end end end end diff --git a/spec/factories/ci/variables.rb b/spec/factories/ci/variables.rb index 6653f0bb5c3..c5fba597c1c 100644 --- a/spec/factories/ci/variables.rb +++ b/spec/factories/ci/variables.rb @@ -2,5 +2,7 @@ FactoryGirl.define do factory :ci_variable, class: Ci::Variable do sequence(:key) { |n| "VARIABLE_#{n}" } value 'VARIABLE_VALUE' + + project factory: :empty_project end end diff --git a/spec/factories/container_repositories.rb b/spec/factories/container_repositories.rb new file mode 100644 index 00000000000..3fcad9fd4b3 --- /dev/null +++ b/spec/factories/container_repositories.rb @@ -0,0 +1,33 @@ +FactoryGirl.define do + factory :container_repository do + name 'test_container_image' + project + + transient do + tags [] + end + + trait :root do + name '' + end + + after(:build) do |repository, evaluator| + next if evaluator.tags.to_a.none? + + allow(repository.client) + .to receive(:repository_tags) + .and_return({ + 'name' => repository.path, + 'tags' => evaluator.tags + }) + + evaluator.tags.each do |tag| + allow(repository.client) + .to receive(:repository_tag_digest) + .with(repository.path, tag) + .and_return('sha256:4c8e63ca4cb663ce6c688cb06f1c3' \ + '72b088dac5b6d7ad7d49cd620d85cf72a15') + end + end + end +end diff --git a/spec/factories/emails.rb b/spec/factories/emails.rb index 9794772ac7d..8303861bcfe 100644 --- a/spec/factories/emails.rb +++ b/spec/factories/emails.rb @@ -1,6 +1,6 @@ FactoryGirl.define do factory :email do user - email { FFaker::Internet.email('alias') } + email { generate(:email_alias) } end end diff --git a/spec/factories/environments.rb b/spec/factories/environments.rb index 0852dda6b29..d8d699fb3aa 100644 --- a/spec/factories/environments.rb +++ b/spec/factories/environments.rb @@ -18,19 +18,30 @@ FactoryGirl.define do # interconnected objects to simulate a review app. # after(:create) do |environment, evaluator| + pipeline = create(:ci_pipeline, project: environment.project) + + deployable = create(:ci_build, name: "#{environment.name}:deploy", + pipeline: pipeline) + deployment = create(:deployment, environment: environment, project: environment.project, + deployable: deployable, ref: evaluator.ref, sha: environment.project.commit(evaluator.ref).id) teardown_build = create(:ci_build, :manual, - name: "#{deployment.environment.name}:teardown", - pipeline: deployment.deployable.pipeline) + name: "#{environment.name}:teardown", + pipeline: pipeline) deployment.update_column(:on_stop, teardown_build.name) environment.update_attribute(:deployments, [deployment]) end end + + trait :non_playable do + status 'created' + self.when 'manual' + end end end diff --git a/spec/factories/group_members.rb b/spec/factories/group_members.rb index 080b2e75ea1..32cbfe28a60 100644 --- a/spec/factories/group_members.rb +++ b/spec/factories/group_members.rb @@ -10,5 +10,11 @@ FactoryGirl.define do trait(:master) { access_level GroupMember::MASTER } trait(:owner) { access_level GroupMember::OWNER } trait(:access_request) { requested_at Time.now } + + trait(:invited) do + user_id nil + invite_token 'xxx' + invite_email 'email@email.com' + end end end diff --git a/spec/factories/groups.rb b/spec/factories/groups.rb index 86f51ffca99..52f76b094a3 100644 --- a/spec/factories/groups.rb +++ b/spec/factories/groups.rb @@ -17,6 +17,10 @@ FactoryGirl.define do visibility_level Gitlab::VisibilityLevel::PRIVATE end + trait :with_avatar do + avatar { File.open(Rails.root.join('spec/fixtures/dk.png')) } + end + trait :access_requestable do request_access_enabled true end diff --git a/spec/factories/issues.rb b/spec/factories/issues.rb index 7e09f1ba8ea..f1fd1fd7f73 100644 --- a/spec/factories/issues.rb +++ b/spec/factories/issues.rb @@ -1,10 +1,6 @@ FactoryGirl.define do - sequence :issue_created_at do |n| - 4.hours.ago + ( 2 * n ).seconds - end - factory :issue do - title + title { generate(:title) } author project factory: :empty_project @@ -12,6 +8,10 @@ FactoryGirl.define do confidential true end + trait :opened do + state :opened + end + trait :closed do state :closed end diff --git a/spec/factories/keys.rb b/spec/factories/keys.rb index dd93b439b2b..4e140102492 100644 --- a/spec/factories/keys.rb +++ b/spec/factories/keys.rb @@ -23,5 +23,9 @@ FactoryGirl.define do factory :another_deploy_key, class: 'DeployKey' do end end + + factory :write_access_key, class: 'DeployKey' do + can_push true + end end end diff --git a/spec/factories/labels.rb b/spec/factories/labels.rb index 5ba8443c62c..22c2a1f15e2 100644 --- a/spec/factories/labels.rb +++ b/spec/factories/labels.rb @@ -1,7 +1,10 @@ FactoryGirl.define do - factory :label, class: ProjectLabel do - sequence(:title) { |n| "label#{n}" } + trait :base_label do + title { generate(:label_title) } color "#990000" + end + + factory :label, traits: [:base_label], class: ProjectLabel do project factory: :empty_project transient do @@ -15,9 +18,7 @@ FactoryGirl.define do end end - factory :group_label, class: GroupLabel do - sequence(:title) { |n| "label#{n}" } - color "#990000" + factory :group_label, traits: [:base_label] do group end end diff --git a/spec/factories/merge_requests.rb b/spec/factories/merge_requests.rb index ae0bbbd6aeb..253a025af48 100644 --- a/spec/factories/merge_requests.rb +++ b/spec/factories/merge_requests.rb @@ -1,6 +1,6 @@ FactoryGirl.define do factory :merge_request do - title + title { generate(:title) } author association :source_project, :repository, factory: :project target_project { source_project } @@ -40,10 +40,18 @@ FactoryGirl.define do state :closed end + trait :opened do + state :opened + end + trait :reopened do state :reopened end + trait :locked do + state :locked + end + trait :simple do source_branch "feature" target_branch "master" diff --git a/spec/factories/merge_requests_closing_issues.rb b/spec/factories/merge_requests_closing_issues.rb new file mode 100644 index 00000000000..fdbdc00cad7 --- /dev/null +++ b/spec/factories/merge_requests_closing_issues.rb @@ -0,0 +1,6 @@ +FactoryGirl.define do + factory :merge_requests_closing_issues do + issue + merge_request + end +end diff --git a/spec/factories/notes.rb b/spec/factories/notes.rb index fe19a404e16..046974dcd6e 100644 --- a/spec/factories/notes.rb +++ b/spec/factories/notes.rb @@ -5,7 +5,7 @@ include ActionDispatch::TestProcess FactoryGirl.define do factory :note do project factory: :empty_project - note "Note" + note { generate(:title) } author on_issue @@ -16,10 +16,23 @@ FactoryGirl.define do factory :note_on_personal_snippet, traits: [:on_personal_snippet] factory :system_note, traits: [:system] - factory :legacy_diff_note_on_commit, traits: [:on_commit, :legacy_diff_note], class: LegacyDiffNote do + factory :discussion_note_on_merge_request, traits: [:on_merge_request], class: DiscussionNote do association :project, :repository + + trait :resolved do + resolved_at { Time.now } + resolved_by { create(:user) } + end end + factory :discussion_note_on_issue, traits: [:on_issue], class: DiscussionNote + + factory :discussion_note_on_commit, traits: [:on_commit], class: DiscussionNote + + factory :discussion_note_on_personal_snippet, traits: [:on_personal_snippet], class: DiscussionNote + + factory :legacy_diff_note_on_commit, traits: [:on_commit, :legacy_diff_note], class: LegacyDiffNote + factory :legacy_diff_note_on_merge_request, traits: [:on_merge_request, :legacy_diff_note], class: LegacyDiffNote do association :project, :repository end @@ -29,6 +42,7 @@ FactoryGirl.define do transient do line_number 14 + diff_refs { noteable.try(:diff_refs) } end position do @@ -37,7 +51,7 @@ FactoryGirl.define do new_path: "files/ruby/popen.rb", old_line: nil, new_line: line_number, - diff_refs: noteable.diff_refs + diff_refs: diff_refs ) end @@ -108,5 +122,18 @@ FactoryGirl.define do trait :with_svg_attachment do attachment { fixture_file_upload(Rails.root + "spec/fixtures/unsanitized.svg", "image/svg+xml") } end + + transient do + in_reply_to nil + end + + before(:create) do |note, evaluator| + discussion = evaluator.in_reply_to + next unless discussion + discussion = discussion.to_discussion if discussion.is_a?(Note) + next unless discussion + + note.assign_attributes(discussion.reply_attributes.merge(project: discussion.project)) + end end end diff --git a/spec/factories/oauth_applications.rb b/spec/factories/oauth_applications.rb index 86cdc208268..c7ede40f240 100644 --- a/spec/factories/oauth_applications.rb +++ b/spec/factories/oauth_applications.rb @@ -1,8 +1,8 @@ FactoryGirl.define do factory :oauth_application, class: 'Doorkeeper::Application', aliases: [:application] do - name { FFaker::Name.name } + sequence(:name) { |n| "OAuth App #{n}" } uid { Doorkeeper::OAuth::Helpers::UniqueToken.generate } - redirect_uri { FFaker::Internet.uri('http') } + redirect_uri { generate(:url) } owner owner_type 'User' end diff --git a/spec/factories/personal_access_tokens.rb b/spec/factories/personal_access_tokens.rb index 7b15ba47de1..06acaff6cd0 100644 --- a/spec/factories/personal_access_tokens.rb +++ b/spec/factories/personal_access_tokens.rb @@ -2,7 +2,7 @@ FactoryGirl.define do factory :personal_access_token do user token { SecureRandom.hex(50) } - name { FFaker::Product.brand } + sequence(:name) { |n| "PAT #{n}" } revoked false expires_at { 5.days.from_now } scopes ['api'] diff --git a/spec/factories/project_hooks.rb b/spec/factories/project_hooks.rb index 424ecc65759..cd754ea235f 100644 --- a/spec/factories/project_hooks.rb +++ b/spec/factories/project_hooks.rb @@ -1,6 +1,7 @@ FactoryGirl.define do factory :project_hook do - url { FFaker::Internet.uri('http') } + url { generate(:url) } + enable_ssl_verification false trait :token do token { SecureRandom.hex(10) } @@ -11,8 +12,9 @@ FactoryGirl.define do merge_requests_events true tag_push_events true issues_events true + confidential_issues_events true note_events true - build_events true + job_events true pipeline_events true wiki_page_events true end diff --git a/spec/factories/project_members.rb b/spec/factories/project_members.rb index d62799a5a47..fe4518caadf 100644 --- a/spec/factories/project_members.rb +++ b/spec/factories/project_members.rb @@ -9,5 +9,11 @@ FactoryGirl.define do trait(:developer) { access_level ProjectMember::DEVELOPER } trait(:master) { access_level ProjectMember::MASTER } trait(:access_request) { requested_at Time.now } + + trait(:invited) do + user_id nil + invite_token 'xxx' + invite_email 'email@email.com' + end end end diff --git a/spec/factories/projects.rb b/spec/factories/projects.rb index 0db2fe04edd..7a76f5f8afc 100644 --- a/spec/factories/projects.rb +++ b/spec/factories/projects.rb @@ -32,6 +32,10 @@ FactoryGirl.define do request_access_enabled true end + trait :with_avatar do + avatar { File.open(Rails.root.join('spec/fixtures/dk.png')) } + end + trait :repository do # no-op... for now! end @@ -56,7 +60,9 @@ FactoryGirl.define do trait :test_repo do after :create do |project| - TestEnv.copy_repo(project) + TestEnv.copy_repo(project, + bare_repo: TestEnv.factory_repo_path_bare, + refs: TestEnv::BRANCH_SHA) end end @@ -135,7 +141,9 @@ FactoryGirl.define do end after :create do |project, evaluator| - TestEnv.copy_repo(project) + TestEnv.copy_repo(project, + bare_repo: TestEnv.factory_repo_path_bare, + refs: TestEnv::BRANCH_SHA) if evaluator.create_template args = evaluator.create_template @@ -168,7 +176,9 @@ FactoryGirl.define do path { 'forked-gitlabhq' } after :create do |project| - TestEnv.copy_forked_repo_with_submodules(project) + TestEnv.copy_repo(project, + bare_repo: TestEnv.forked_repo_path_bare, + refs: TestEnv::FORKED_BRANCH_SHA) end end diff --git a/spec/factories/protected_tags.rb b/spec/factories/protected_tags.rb new file mode 100644 index 00000000000..d8e90ae1ee1 --- /dev/null +++ b/spec/factories/protected_tags.rb @@ -0,0 +1,22 @@ +FactoryGirl.define do + factory :protected_tag do + name + project + + after(:build) do |protected_tag| + protected_tag.create_access_levels.new(access_level: Gitlab::Access::MASTER) + end + + trait :developers_can_create do + after(:create) do |protected_tag| + protected_tag.create_access_levels.first.update!(access_level: Gitlab::Access::DEVELOPER) + end + end + + trait :no_one_can_create do + after(:create) do |protected_tag| + protected_tag.create_access_levels.first.update!(access_level: Gitlab::Access::NO_ACCESS) + end + end + end +end diff --git a/spec/factories/sent_notifications.rb b/spec/factories/sent_notifications.rb index 6287c40afe9..99253be5a22 100644 --- a/spec/factories/sent_notifications.rb +++ b/spec/factories/sent_notifications.rb @@ -2,7 +2,7 @@ FactoryGirl.define do factory :sent_notification do project factory: :empty_project recipient factory: :user - noteable factory: :issue - reply_key "0123456789abcdef" * 2 + noteable { create(:issue, project: project) } + reply_key { SentNotification.reply_key } end end diff --git a/spec/factories/sequences.rb b/spec/factories/sequences.rb new file mode 100644 index 00000000000..c0232ba5bf6 --- /dev/null +++ b/spec/factories/sequences.rb @@ -0,0 +1,12 @@ +FactoryGirl.define do + sequence(:username) { |n| "user#{n}" } + sequence(:name) { |n| "John Doe#{n}" } + sequence(:email) { |n| "user#{n}@example.org" } + sequence(:email_alias) { |n| "user.alias#{n}@example.org" } + sequence(:title) { |n| "My title #{n}" } + sequence(:filename) { |n| "filename-#{n}.rb" } + sequence(:url) { |n| "http://example#{n}.org" } + sequence(:label_title) { |n| "label#{n}" } + sequence(:branch) { |n| "my-branch-#{n}" } + sequence(:past_time) { |n| 4.hours.ago + (2 * n).seconds } +end diff --git a/spec/factories/service_hooks.rb b/spec/factories/service_hooks.rb index 6dd6af63f3e..e3f88ab8fcc 100644 --- a/spec/factories/service_hooks.rb +++ b/spec/factories/service_hooks.rb @@ -1,6 +1,6 @@ FactoryGirl.define do factory :service_hook do - url { FFaker::Internet.uri('http') } + url { generate(:url) } service end end diff --git a/spec/factories/services.rb b/spec/factories/services.rb index 88f6c265505..28ddd0da753 100644 --- a/spec/factories/services.rb +++ b/spec/factories/services.rb @@ -1,6 +1,19 @@ FactoryGirl.define do factory :service do project factory: :empty_project + type 'Service' + end + + factory :custom_issue_tracker_service, class: CustomIssueTrackerService do + project factory: :empty_project + type 'CustomIssueTrackerService' + category 'issue_tracker' + active true + properties( + project_url: 'https://project.url.com', + issues_url: 'https://issues.url.com', + new_issue_url: 'https://newissue.url.com' + ) end factory :kubernetes_service do @@ -9,7 +22,7 @@ FactoryGirl.define do properties({ namespace: 'somepath', api_url: 'https://kubernetes.example.com', - token: 'a' * 40, + token: 'a' * 40 }) end diff --git a/spec/factories/snippets.rb b/spec/factories/snippets.rb index 365f12a0c95..18cb0f5de26 100644 --- a/spec/factories/snippets.rb +++ b/spec/factories/snippets.rb @@ -1,17 +1,9 @@ FactoryGirl.define do - sequence :title, aliases: [:content] do - FFaker::Lorem.sentence - end - - sequence :file_name do - FFaker::Internet.user_name - end - factory :snippet do author - title - content - file_name + title { generate(:title) } + content { generate(:title) } + file_name { generate(:filename) } trait :public do visibility_level Snippet::PUBLIC diff --git a/spec/factories/spam_logs.rb b/spec/factories/spam_logs.rb index a4f6d291269..e369f9f13e9 100644 --- a/spec/factories/spam_logs.rb +++ b/spec/factories/spam_logs.rb @@ -1,9 +1,9 @@ FactoryGirl.define do factory :spam_log do user - source_ip { FFaker::Internet.ip_v4_address } + sequence(:source_ip) { |n| "42.42.42.#{n % 255}" } noteable_type 'Issue' - title { FFaker::Lorem.sentence } - description { FFaker::Lorem.paragraph(5) } + sequence(:title) { |n| "Spam title #{n}" } + description { "Spam description\nwith\nmultiple\nlines" } end end diff --git a/spec/factories/system_hooks.rb b/spec/factories/system_hooks.rb index c786e9cb79b..841e1e293e8 100644 --- a/spec/factories/system_hooks.rb +++ b/spec/factories/system_hooks.rb @@ -1,5 +1,5 @@ FactoryGirl.define do factory :system_hook do - url { FFaker::Internet.uri('http') } + url { generate(:url) } end end diff --git a/spec/factories/users.rb b/spec/factories/users.rb index 249dabbaae8..33fa80772ff 100644 --- a/spec/factories/users.rb +++ b/spec/factories/users.rb @@ -1,10 +1,8 @@ FactoryGirl.define do - sequence(:name) { FFaker::Name.name } - factory :user, aliases: [:author, :assignee, :recipient, :owner, :creator, :resource_owner] do - email { FFaker::Internet.email } - name - sequence(:username) { |n| "#{FFaker::Internet.user_name}#{n}" } + email { generate(:email) } + name { generate(:name) } + username { generate(:username) } password "12345678" confirmed_at { Time.now } confirmation_token { nil } @@ -31,6 +29,10 @@ FactoryGirl.define do after(:build) { |user, _| user.block! } end + trait :with_avatar do + avatar { File.open(Rails.root.join('spec/fixtures/dk.png')) } + end + trait :two_factor_via_otp do before(:create) do |user| user.otp_required_for_login = true diff --git a/spec/features/admin/admin_browse_spam_logs_spec.rb b/spec/features/admin/admin_browse_spam_logs_spec.rb index 562ace92598..bee57472270 100644 --- a/spec/features/admin/admin_browse_spam_logs_spec.rb +++ b/spec/features/admin/admin_browse_spam_logs_spec.rb @@ -1,7 +1,7 @@ require 'spec_helper' describe 'Admin browse spam logs' do - let!(:spam_log) { create(:spam_log) } + let!(:spam_log) { create(:spam_log, description: 'abcde ' * 20) } before do login_as :admin diff --git a/spec/features/admin/admin_cohorts_spec.rb b/spec/features/admin/admin_cohorts_spec.rb new file mode 100644 index 00000000000..dd14ffdb2ce --- /dev/null +++ b/spec/features/admin/admin_cohorts_spec.rb @@ -0,0 +1,15 @@ +require 'rails_helper' + +feature 'Admin cohorts page', feature: true do + before do + login_as :admin + end + + scenario 'See users count per month' do + 2.times { create(:user) } + + visit admin_cohorts_path + + expect(page).to have_content("#{Time.now.strftime('%b %Y')} 3 0") + end +end diff --git a/spec/features/admin/admin_deploy_keys_spec.rb b/spec/features/admin/admin_deploy_keys_spec.rb index 7ce6cce0a5c..c0b6995a84a 100644 --- a/spec/features/admin/admin_deploy_keys_spec.rb +++ b/spec/features/admin/admin_deploy_keys_spec.rb @@ -18,7 +18,7 @@ RSpec.describe 'admin deploy keys', type: :feature do describe 'create new deploy key' do before do visit admin_deploy_keys_path - click_link 'New Deploy Key' + click_link 'New deploy key' end it 'creates new deploy key' do diff --git a/spec/features/admin/admin_groups_spec.rb b/spec/features/admin/admin_groups_spec.rb index a871e370ba2..d5f595894d6 100644 --- a/spec/features/admin/admin_groups_spec.rb +++ b/spec/features/admin/admin_groups_spec.rb @@ -24,14 +24,23 @@ feature 'Admin Groups', feature: true do it 'creates new group' do visit admin_groups_path - click_link "New Group" - fill_in 'group_path', with: 'gitlab' - fill_in 'group_description', with: 'Group description' + click_link "New group" + path_component = 'gitlab' + group_name = 'GitLab group name' + group_description = 'Description of group for GitLab' + fill_in 'group_path', with: path_component + fill_in 'group_name', with: group_name + fill_in 'group_description', with: group_description click_button "Create group" - expect(current_path).to eq admin_group_path(Group.find_by(path: 'gitlab')) - expect(page).to have_content('Group: gitlab') - expect(page).to have_content('Group description') + expect(current_path).to eq admin_group_path(Group.find_by(path: path_component)) + content = page.find('div#content-body') + h3_texts = content.all('h3').collect(&:text).join("\n") + expect(h3_texts).to match group_name + li_texts = content.all('li').collect(&:text).join("\n") + expect(li_texts).to match group_name + expect(li_texts).to match path_component + expect(li_texts).to match group_description end scenario 'shows the visibility level radio populated with the default value' do @@ -39,6 +48,15 @@ feature 'Admin Groups', feature: true do expect_selected_visibility(internal) end + + scenario 'when entered in group path, it auto filled the group name', js: true do + visit admin_groups_path + click_link "New group" + group_path = 'gitlab' + fill_in 'group_path', with: group_path + name_field = find('input#group_name') + expect(name_field.value).to eq group_path + end end describe 'show a group' do @@ -59,6 +77,17 @@ feature 'Admin Groups', feature: true do expect_selected_visibility(group.visibility_level) end + + scenario 'edit group path does not change group name', js: true do + group = create(:group, :private) + + visit admin_group_edit_path(group) + name_field = find('input#group_name') + original_name = name_field.value + fill_in 'group_path', with: 'this-new-path' + + expect(name_field.value).to eq original_name + end end describe 'add user into a group', js: true do diff --git a/spec/features/admin/admin_health_check_spec.rb b/spec/features/admin/admin_health_check_spec.rb index f7e49a56deb..523afa2318f 100644 --- a/spec/features/admin/admin_health_check_spec.rb +++ b/spec/features/admin/admin_health_check_spec.rb @@ -2,7 +2,6 @@ require 'spec_helper' feature "Admin Health Check", feature: true do include StubENV - include WaitForAjax before do stub_env('IN_MEMORY_APPLICATION_SETTINGS', 'false') @@ -24,11 +23,12 @@ feature "Admin Health Check", feature: true do expect(page).to have_selector('#health-check-token', text: token) end - describe 'reload access token', js: true do + describe 'reload access token' do it 'changes the access token' do orig_token = current_application_settings.health_check_access_token click_button 'Reset health check access token' - wait_for_ajax + + expect(page).to have_content('New health check access token has been generated!') expect(find('#health-check-token').text).not_to eq orig_token end end diff --git a/spec/features/admin/admin_hooks_spec.rb b/spec/features/admin/admin_hooks_spec.rb index f246997d5a2..c5f24d412d7 100644 --- a/spec/features/admin/admin_hooks_spec.rb +++ b/spec/features/admin/admin_hooks_spec.rb @@ -1,6 +1,6 @@ require 'spec_helper' -describe "Admin::Hooks", feature: true do +describe 'Admin::Hooks', feature: true do before do @project = create(:project) login_as :admin @@ -8,43 +8,68 @@ describe "Admin::Hooks", feature: true do @system_hook = create(:system_hook) end - describe "GET /admin/hooks" do - it "is ok" do + describe 'GET /admin/hooks' do + it 'is ok' do visit admin_root_path - page.within ".layout-nav" do - click_on "Hooks" + page.within '.layout-nav' do + click_on 'Hooks' end expect(current_path).to eq(admin_hooks_path) end - it "has hooks list" do + it 'has hooks list' do visit admin_hooks_path expect(page).to have_content(@system_hook.url) end end - describe "New Hook" do - let(:url) { FFaker::Internet.uri('http') } + describe 'New Hook' do + let(:url) { generate(:url) } it 'adds new hook' do visit admin_hooks_path fill_in 'hook_url', with: url check 'Enable SSL verification' - expect { click_button 'Add System Hook' }.to change(SystemHook, :count).by(1) + expect { click_button 'Add system hook' }.to change(SystemHook, :count).by(1) expect(page).to have_content 'SSL Verification: enabled' expect(current_path).to eq(admin_hooks_path) expect(page).to have_content(url) end end - describe "Test" do + describe 'Update existing hook' do + let(:new_url) { generate(:url) } + + it 'updates existing hook' do + visit admin_hooks_path + + click_link 'Edit' + fill_in 'hook_url', with: new_url + check 'Enable SSL verification' + click_button 'Save changes' + + expect(page).to have_content 'SSL Verification: enabled' + expect(current_path).to eq(admin_hooks_path) + expect(page).to have_content(new_url) + end + end + + describe 'Remove existing hook' do + it 'remove existing hook' do + visit admin_hooks_path + + expect { click_link 'Remove' }.to change(SystemHook, :count).by(-1) + end + end + + describe 'Test' do before do WebMock.stub_request(:post, @system_hook.url) visit admin_hooks_path - click_link "Test Hook" + click_link 'Test hook' end it { expect(current_path).to eq(admin_hooks_path) } diff --git a/spec/features/admin/admin_labels_spec.rb b/spec/features/admin/admin_labels_spec.rb index 6d6c9165c83..fa3d9ee25c0 100644 --- a/spec/features/admin/admin_labels_spec.rb +++ b/spec/features/admin/admin_labels_spec.rb @@ -1,8 +1,6 @@ require 'spec_helper' RSpec.describe 'admin issues labels' do - include WaitForAjax - let!(:bug_label) { Label.create(title: 'bug', template: true) } let!(:feature_label) { Label.create(title: 'feature', template: true) } diff --git a/spec/features/admin/admin_manage_applications_spec.rb b/spec/features/admin/admin_manage_applications_spec.rb index c2c618b5659..0079125889b 100644 --- a/spec/features/admin/admin_manage_applications_spec.rb +++ b/spec/features/admin/admin_manage_applications_spec.rb @@ -8,7 +8,7 @@ RSpec.describe 'admin manage applications', feature: true do it do visit admin_applications_path - click_on 'New Application' + click_on 'New application' expect(page).to have_content('New application') fill_in :doorkeeper_application_name, with: 'test' diff --git a/spec/features/admin/admin_projects_spec.rb b/spec/features/admin/admin_projects_spec.rb index 87a8f62687a..9d205104ebe 100644 --- a/spec/features/admin/admin_projects_spec.rb +++ b/spec/features/admin/admin_projects_spec.rb @@ -109,7 +109,7 @@ describe "Admin::Projects", feature: true do expect(page).to have_content('Developer') end - find(:css, 'li', text: current_user.name).find(:css, 'a.btn-remove').click + find(:css, '.content-list li', text: current_user.name).find(:css, 'a.btn-remove').click expect(page).not_to have_selector(:css, '.content-list') end diff --git a/spec/features/admin/admin_requests_profiles_spec.rb b/spec/features/admin/admin_requests_profiles_spec.rb new file mode 100644 index 00000000000..e8ecb70306b --- /dev/null +++ b/spec/features/admin/admin_requests_profiles_spec.rb @@ -0,0 +1,69 @@ +require 'spec_helper' + +describe 'Admin::RequestsProfilesController', feature: true do + before do + FileUtils.mkdir_p(Gitlab::RequestProfiler::PROFILES_DIR) + login_as(:admin) + end + + after do + Gitlab::RequestProfiler.remove_all_profiles + end + + describe 'GET /admin/requests_profiles' do + it 'shows the current profile token' do + allow(Rails).to receive(:cache).and_return(ActiveSupport::Cache::MemoryStore.new) + + visit admin_requests_profiles_path + + expect(page).to have_content("X-Profile-Token: #{Gitlab::RequestProfiler.profile_token}") + end + + it 'lists all available profiles' do + time1 = 1.hour.ago + time2 = 2.hours.ago + time3 = 3.hours.ago + profile1 = "|gitlab-org|gitlab-ce_#{time1.to_i}.html" + profile2 = "|gitlab-org|gitlab-ce_#{time2.to_i}.html" + profile3 = "|gitlab-com|infrastructure_#{time3.to_i}.html" + + FileUtils.touch("#{Gitlab::RequestProfiler::PROFILES_DIR}/#{profile1}") + FileUtils.touch("#{Gitlab::RequestProfiler::PROFILES_DIR}/#{profile2}") + FileUtils.touch("#{Gitlab::RequestProfiler::PROFILES_DIR}/#{profile3}") + + visit admin_requests_profiles_path + + within('.panel', text: '/gitlab-org/gitlab-ce') do + expect(page).to have_selector("a[href='#{admin_requests_profile_path(profile1)}']", text: time1.to_s(:long)) + expect(page).to have_selector("a[href='#{admin_requests_profile_path(profile2)}']", text: time2.to_s(:long)) + end + + within('.panel', text: '/gitlab-com/infrastructure') do + expect(page).to have_selector("a[href='#{admin_requests_profile_path(profile3)}']", text: time3.to_s(:long)) + end + end + end + + describe 'GET /admin/requests_profiles/:profile' do + context 'when a profile exists' do + it 'displays the content of the profile' do + content = 'This is a request profile' + profile = "|gitlab-org|gitlab-ce_#{Time.now.to_i}.html" + + File.write("#{Gitlab::RequestProfiler::PROFILES_DIR}/#{profile}", content) + + visit admin_requests_profile_path(profile) + + expect(page).to have_content(content) + end + end + + context 'when a profile does not exist' do + it 'shows an error message' do + visit admin_requests_profile_path('|non|existent_12345.html') + + expect(page).to have_content('Profile not found') + end + end + end +end diff --git a/spec/features/admin/admin_users_impersonation_tokens_spec.rb b/spec/features/admin/admin_users_impersonation_tokens_spec.rb index 9ff5c2f9d40..0fb4baeb71c 100644 --- a/spec/features/admin/admin_users_impersonation_tokens_spec.rb +++ b/spec/features/admin/admin_users_impersonation_tokens_spec.rb @@ -16,7 +16,7 @@ describe 'Admin > Users > Impersonation Tokens', feature: true, js: true do describe "token creation" do it "allows creation of a token" do - name = FFaker::Product.brand + name = 'Hello World' visit admin_user_impersonation_tokens_path(user_id: user.username) fill_in "Name", with: name @@ -30,7 +30,7 @@ describe 'Admin > Users > Impersonation Tokens', feature: true, js: true do check "api" check "read_user" - expect { click_on "Create Impersonation Token" }.to change { PersonalAccessTokensFinder.new(impersonation: true).execute.count } + expect { click_on "Create impersonation token" }.to change { PersonalAccessTokensFinder.new(impersonation: true).execute.count } expect(active_impersonation_tokens).to have_text(name) expect(active_impersonation_tokens).to have_text('In') expect(active_impersonation_tokens).to have_text('api') diff --git a/spec/features/admin/admin_users_spec.rb b/spec/features/admin/admin_users_spec.rb index c0807b8c507..c5b1ef1295c 100644 --- a/spec/features/admin/admin_users_spec.rb +++ b/spec/features/admin/admin_users_spec.rb @@ -1,8 +1,6 @@ require 'spec_helper' describe "Admin::Users", feature: true do - include WaitForAjax - let!(:user) do create(:omniauth_user, provider: 'twitter', extern_uid: '123456') end @@ -223,7 +221,7 @@ describe "Admin::Users", feature: true do it "changes user entry" do user.reload expect(user.name).to eq('Big Bang') - expect(user.is_admin?).to be_truthy + expect(user.admin?).to be_truthy expect(user.password_expires_at).to be <= Time.now end end diff --git a/spec/features/admin/admin_uses_repository_checks_spec.rb b/spec/features/admin/admin_uses_repository_checks_spec.rb index 855247de2ea..ab5c42365fe 100644 --- a/spec/features/admin/admin_uses_repository_checks_spec.rb +++ b/spec/features/admin/admin_uses_repository_checks_spec.rb @@ -23,7 +23,7 @@ feature 'Admin uses repository checks', feature: true do project = create(:empty_project) project.update_columns( last_repository_check_failed: true, - last_repository_check_at: Time.now, + last_repository_check_at: Time.now ) visit_admin_project_page(project) diff --git a/spec/features/atom/dashboard_issues_spec.rb b/spec/features/atom/dashboard_issues_spec.rb index 58b14e09740..9ea325ab41b 100644 --- a/spec/features/atom/dashboard_issues_spec.rb +++ b/spec/features/atom/dashboard_issues_spec.rb @@ -32,7 +32,7 @@ describe "Dashboard Issues Feed", feature: true do end context "issue with basic fields" do - let!(:issue2) { create(:issue, author: user, assignee: assignee, project: project2, description: 'test desc') } + let!(:issue2) { create(:issue, author: user, assignees: [assignee], project: project2, description: 'test desc') } it "renders issue fields" do visit issues_dashboard_path(:atom, private_token: user.private_token) @@ -41,7 +41,7 @@ describe "Dashboard Issues Feed", feature: true do expect(entry).to be_present expect(entry).to have_selector('author email', text: issue2.author_public_email) - expect(entry).to have_selector('assignee email', text: issue2.assignee_public_email) + expect(entry).to have_selector('assignees email', text: assignee.public_email) expect(entry).not_to have_selector('labels') expect(entry).not_to have_selector('milestone') expect(entry).to have_selector('description', text: issue2.description) @@ -51,7 +51,7 @@ describe "Dashboard Issues Feed", feature: true do context "issue with label and milestone" do let!(:milestone1) { create(:milestone, project: project1, title: 'v1') } let!(:label1) { create(:label, project: project1, title: 'label1') } - let!(:issue1) { create(:issue, author: user, assignee: assignee, project: project1, milestone: milestone1) } + let!(:issue1) { create(:issue, author: user, assignees: [assignee], project: project1, milestone: milestone1) } before do issue1.labels << label1 @@ -64,7 +64,7 @@ describe "Dashboard Issues Feed", feature: true do expect(entry).to be_present expect(entry).to have_selector('author email', text: issue1.author_public_email) - expect(entry).to have_selector('assignee email', text: issue1.assignee_public_email) + expect(entry).to have_selector('assignees email', text: assignee.public_email) expect(entry).to have_selector('labels label', text: label1.title) expect(entry).to have_selector('milestone', text: milestone1.title) expect(entry).not_to have_selector('description') diff --git a/spec/features/atom/issues_spec.rb b/spec/features/atom/issues_spec.rb index b3903ec2faf..4f6754ad541 100644 --- a/spec/features/atom/issues_spec.rb +++ b/spec/features/atom/issues_spec.rb @@ -6,7 +6,7 @@ describe 'Issues Feed', feature: true do let!(:assignee) { create(:user, email: 'private2@example.com', public_email: 'public2@example.com') } let!(:group) { create(:group) } let!(:project) { create(:project) } - let!(:issue) { create(:issue, author: user, assignee: assignee, project: project) } + let!(:issue) { create(:issue, author: user, assignees: [assignee], project: project) } before do project.team << [user, :developer] @@ -22,7 +22,8 @@ describe 'Issues Feed', feature: true do to have_content('application/atom+xml') expect(body).to have_selector('title', text: "#{project.name} issues") expect(body).to have_selector('author email', text: issue.author_public_email) - expect(body).to have_selector('assignee email', text: issue.author_public_email) + expect(body).to have_selector('assignees assignee email', text: issue.assignees.first.public_email) + expect(body).to have_selector('assignee email', text: issue.assignees.first.public_email) expect(body).to have_selector('entry summary', text: issue.title) end end @@ -36,7 +37,8 @@ describe 'Issues Feed', feature: true do to have_content('application/atom+xml') expect(body).to have_selector('title', text: "#{project.name} issues") expect(body).to have_selector('author email', text: issue.author_public_email) - expect(body).to have_selector('assignee email', text: issue.author_public_email) + expect(body).to have_selector('assignees assignee email', text: issue.assignees.first.public_email) + expect(body).to have_selector('assignee email', text: issue.assignees.first.public_email) expect(body).to have_selector('entry summary', text: issue.title) end end diff --git a/spec/features/atom/users_spec.rb b/spec/features/atom/users_spec.rb index 55e10a1a89b..7a2987e815d 100644 --- a/spec/features/atom/users_spec.rb +++ b/spec/features/atom/users_spec.rb @@ -53,7 +53,7 @@ describe "User Feed", feature: true do end it 'has XHTML summaries in issue descriptions' do - expect(body).to match /we have a bug!<\/p>\n\n<hr ?\/>\n\n<p dir="auto">I guess/ + expect(body).to match /<hr ?\/>/ end it 'has XHTML summaries in notes' do diff --git a/spec/features/auto_deploy_spec.rb b/spec/features/auto_deploy_spec.rb index ea7a97d1d4f..6c7423e4922 100644 --- a/spec/features/auto_deploy_spec.rb +++ b/spec/features/auto_deploy_spec.rb @@ -1,20 +1,11 @@ require 'spec_helper' describe 'Auto deploy' do - include WaitForAjax - let(:user) { create(:user) } - let(:project) { create(:project) } + let(:project) { create(:project, :repository) } before do - project.create_kubernetes_service( - active: true, - properties: { - namespace: project.path, - api_url: 'https://kubernetes.example.com', - token: 'a' * 40, - } - ) + create :kubernetes_service, project: project project.team << [user, :master] login_as user end @@ -42,7 +33,7 @@ describe 'Auto deploy' do it 'includes OpenShift as an available template', js: true do click_link 'Set up auto deploy' - click_button 'Choose a GitLab CI Yaml template' + click_button 'Apply a GitLab CI Yaml template' within '.gitlab-ci-yml-selector' do expect(page).to have_content('OpenShift') @@ -51,12 +42,12 @@ describe 'Auto deploy' do it 'creates a merge request using "auto-deploy" branch', js: true do click_link 'Set up auto deploy' - click_button 'Choose a GitLab CI Yaml template' + click_button 'Apply a GitLab CI Yaml template' within '.gitlab-ci-yml-selector' do click_on 'OpenShift' end wait_for_ajax - click_button 'Commit Changes' + click_button 'Commit changes' expect(page).to have_content('New Merge Request From auto-deploy into master') end diff --git a/spec/features/boards/add_issues_modal_spec.rb b/spec/features/boards/add_issues_modal_spec.rb index 1c0f97d8a1c..505e0b5c355 100644 --- a/spec/features/boards/add_issues_modal_spec.rb +++ b/spec/features/boards/add_issues_modal_spec.rb @@ -1,7 +1,6 @@ require 'rails_helper' describe 'Issue Boards add issue modal', :feature, :js do - include WaitForAjax include WaitForVueResource let(:project) { create(:empty_project, :public) } @@ -145,7 +144,7 @@ describe 'Issue Boards add issue modal', :feature, :js do context 'selecing issues' do it 'selects single issue' do page.within('.add-issues-modal') do - first('.card').click + first('.card .card-number').click page.within('.nav-links') do expect(page).to have_content('Selected issues 1') @@ -155,7 +154,7 @@ describe 'Issue Boards add issue modal', :feature, :js do it 'changes button text' do page.within('.add-issues-modal') do - first('.card').click + first('.card .card-number').click expect(first('.add-issues-footer .btn')).to have_content('Add 1 issue') end @@ -163,7 +162,7 @@ describe 'Issue Boards add issue modal', :feature, :js do it 'changes button text with plural' do page.within('.add-issues-modal') do - all('.card').each do |el| + all('.card .card-number').each do |el| el.click end @@ -173,7 +172,7 @@ describe 'Issue Boards add issue modal', :feature, :js do it 'shows only selected issues on selected tab' do page.within('.add-issues-modal') do - first('.card').click + first('.card .card-number').click click_link 'Selected issues' @@ -203,7 +202,7 @@ describe 'Issue Boards add issue modal', :feature, :js do it 'selects all that arent already selected' do page.within('.add-issues-modal') do - first('.card').click + first('.card .card-number').click expect(page).to have_selector('.is-active', count: 1) @@ -215,11 +214,11 @@ describe 'Issue Boards add issue modal', :feature, :js do it 'unselects from selected tab' do page.within('.add-issues-modal') do - first('.card').click + first('.card .card-number').click click_link 'Selected issues' - first('.card').click + first('.card .card-number').click expect(page).not_to have_selector('.is-active') end @@ -229,7 +228,7 @@ describe 'Issue Boards add issue modal', :feature, :js do context 'adding issues' do it 'adds to board' do page.within('.add-issues-modal') do - first('.card').click + first('.card .card-number').click click_button 'Add 1 issue' end @@ -241,7 +240,7 @@ describe 'Issue Boards add issue modal', :feature, :js do it 'adds to second list' do page.within('.add-issues-modal') do - first('.card').click + first('.card .card-number').click click_button planning.title diff --git a/spec/features/boards/boards_spec.rb b/spec/features/boards/boards_spec.rb index e168585534d..18585488e26 100644 --- a/spec/features/boards/boards_spec.rb +++ b/spec/features/boards/boards_spec.rb @@ -1,7 +1,6 @@ require 'rails_helper' describe 'Issue Boards', feature: true, js: true do - include WaitForAjax include WaitForVueResource include DragTo @@ -72,7 +71,7 @@ describe 'Issue Boards', feature: true, js: true do let!(:list2) { create(:list, board: board, label: development, position: 1) } let!(:confidential_issue) { create(:labeled_issue, :confidential, project: project, author: user, labels: [planning], relative_position: 9) } - let!(:issue1) { create(:labeled_issue, project: project, assignee: user, labels: [planning], relative_position: 8) } + let!(:issue1) { create(:labeled_issue, project: project, assignees: [user], labels: [planning], relative_position: 8) } let!(:issue2) { create(:labeled_issue, project: project, author: user2, labels: [planning], relative_position: 7) } let!(:issue3) { create(:labeled_issue, project: project, labels: [planning], relative_position: 6) } let!(:issue4) { create(:labeled_issue, project: project, labels: [planning], relative_position: 5) } @@ -590,7 +589,7 @@ describe 'Issue Boards', feature: true, js: true do end def click_filter_link(link_text) - page.within('.filtered-search-input-container') do + page.within('.filtered-search-box') do expect(page).to have_button(link_text) click_button(link_text) diff --git a/spec/features/boards/issue_ordering_spec.rb b/spec/features/boards/issue_ordering_spec.rb index c50155a6d14..bfa2a72a256 100644 --- a/spec/features/boards/issue_ordering_spec.rb +++ b/spec/features/boards/issue_ordering_spec.rb @@ -38,6 +38,8 @@ describe 'Issue Boards', :feature, :js do it 'moves un-ordered issue to top of list' do drag(from_index: 3, to_index: 0) + wait_for_vue_resource + page.within(first('.board')) do expect(first('.card')).to have_content(issue4.title) end diff --git a/spec/features/boards/keyboard_shortcut_spec.rb b/spec/features/boards/keyboard_shortcut_spec.rb index a5fc766401f..a9cc6c49f8e 100644 --- a/spec/features/boards/keyboard_shortcut_spec.rb +++ b/spec/features/boards/keyboard_shortcut_spec.rb @@ -14,7 +14,7 @@ describe 'Issue Boards shortcut', feature: true, js: true do end it 'takes user to issue board index' do - find('body').native.send_keys('gl') + find('body').native.send_keys('gb') expect(page).to have_selector('.boards-list') wait_for_vue_resource diff --git a/spec/features/boards/modal_filter_spec.rb b/spec/features/boards/modal_filter_spec.rb index e2281a7da55..e1367c675e5 100644 --- a/spec/features/boards/modal_filter_spec.rb +++ b/spec/features/boards/modal_filter_spec.rb @@ -98,7 +98,7 @@ describe 'Issue Boards add issue modal filtering', :feature, :js do end context 'assignee' do - let!(:issue) { create(:issue, project: project, assignee: user2) } + let!(:issue) { create(:issue, project: project, assignees: [user2]) } before do project.team << [user2, :developer] @@ -219,7 +219,7 @@ describe 'Issue Boards add issue modal filtering', :feature, :js do end def click_filter_link(link_text) - page.within('.add-issues-modal .filtered-search-input-container') do + page.within('.add-issues-modal .filtered-search-box') do expect(page).to have_button(link_text) click_button(link_text) diff --git a/spec/features/boards/new_issue_spec.rb b/spec/features/boards/new_issue_spec.rb index e6d7cf106d4..f04a1a89e96 100644 --- a/spec/features/boards/new_issue_spec.rb +++ b/spec/features/boards/new_issue_spec.rb @@ -1,7 +1,6 @@ require 'rails_helper' describe 'Issue Boards new issue', feature: true, js: true do - include WaitForAjax include WaitForVueResource let(:project) { create(:empty_project, :public) } diff --git a/spec/features/boards/sidebar_spec.rb b/spec/features/boards/sidebar_spec.rb index 3332e07ec31..4667be49fe6 100644 --- a/spec/features/boards/sidebar_spec.rb +++ b/spec/features/boards/sidebar_spec.rb @@ -1,17 +1,17 @@ require 'rails_helper' describe 'Issue Boards', feature: true, js: true do - include WaitForAjax include WaitForVueResource let(:user) { create(:user) } + let(:user2) { create(:user) } let(:project) { create(:empty_project, :public) } let!(:milestone) { create(:milestone, project: project) } let!(:development) { create(:label, project: project, name: 'Development') } let!(:bug) { create(:label, project: project, name: 'Bug') } let!(:regression) { create(:label, project: project, name: 'Regression') } let!(:stretch) { create(:label, project: project, name: 'Stretch') } - let!(:issue1) { create(:labeled_issue, project: project, assignee: user, milestone: milestone, labels: [development], relative_position: 2) } + let!(:issue1) { create(:labeled_issue, project: project, assignees: [user], milestone: milestone, labels: [development], relative_position: 2) } let!(:issue2) { create(:labeled_issue, project: project, labels: [development, stretch], relative_position: 1) } let(:board) { create(:board, project: project) } let!(:list) { create(:list, board: board, label: development, position: 0) } @@ -113,10 +113,10 @@ describe 'Issue Boards', feature: true, js: true do page.within('.dropdown-menu-user') do click_link 'Unassigned' - - wait_for_vue_resource end + wait_for_vue_resource + expect(page).to have_content('No assignee') end @@ -129,7 +129,7 @@ describe 'Issue Boards', feature: true, js: true do page.within(find('.assignee')) do expect(page).to have_content('No assignee') - click_link 'assign yourself' + click_button 'assign yourself' wait_for_vue_resource @@ -139,7 +139,7 @@ describe 'Issue Boards', feature: true, js: true do expect(card).to have_selector('.avatar') end - it 'resets assignee dropdown' do + it 'updates assignee dropdown' do click_card(card) page.within('.assignee') do @@ -157,13 +157,13 @@ describe 'Issue Boards', feature: true, js: true do end page.within(first('.board')) do - find('.card:nth-child(2)').click + find('.card:nth-child(2)').trigger('click') end page.within('.assignee') do click_link 'Edit' - - expect(page).not_to have_selector('.is-active') + + expect(find('.dropdown-menu')).to have_selector('.is-active') end end end diff --git a/spec/features/boards/sub_group_project_spec.rb b/spec/features/boards/sub_group_project_spec.rb new file mode 100644 index 00000000000..6cd7fddd288 --- /dev/null +++ b/spec/features/boards/sub_group_project_spec.rb @@ -0,0 +1,45 @@ +require 'rails_helper' + +describe 'Sub-group project issue boards', :feature, :js do + include WaitForVueResource + + let(:group) { create(:group) } + let(:nested_group_1) { create(:group, parent: group) } + let(:project) { create(:empty_project, group: nested_group_1) } + let(:board) { create(:board, project: project) } + let(:label) { create(:label, project: project) } + let(:user) { create(:user) } + let!(:list1) { create(:list, board: board, label: label, position: 0) } + let!(:issue) { create(:labeled_issue, project: project, labels: [label]) } + + before do + project.add_master(user) + + login_as(user) + + visit namespace_project_board_path(project.namespace, project, board) + wait_for_vue_resource + end + + it 'creates new label from sidebar' do + find('.card').click + + page.within '.labels' do + click_link 'Edit' + click_link 'Create new label' + end + + page.within '.dropdown-new-label' do + fill_in 'new_label_name', with: 'test label' + first('.suggest-colors-dropdown a').click + + click_button 'Create' + + wait_for_ajax + end + + page.within '.labels' do + expect(page).to have_link 'test label' + end + end +end diff --git a/spec/features/calendar_spec.rb b/spec/features/calendar_spec.rb index 35d090c4b7f..496faf87a16 100644 --- a/spec/features/calendar_spec.rb +++ b/spec/features/calendar_spec.rb @@ -1,10 +1,8 @@ require 'spec_helper' feature 'Contributions Calendar', :feature, :js do - include WaitForAjax - let(:user) { create(:user) } - let(:contributed_project) { create(:project, :public) } + let(:contributed_project) { create(:empty_project, :public) } let(:issue_note) { create(:note, project: contributed_project) } # Ex/ Sunday Jan 1, 2016 diff --git a/spec/features/commits_spec.rb b/spec/features/commits_spec.rb index 881f1fca4d1..e6c4ab24de5 100644 --- a/spec/features/commits_spec.rb +++ b/spec/features/commits_spec.rb @@ -3,7 +3,7 @@ require 'spec_helper' describe 'Commits' do include CiStatusHelper - let(:project) { create(:project) } + let(:project) { create(:project, :repository) } describe 'CI' do before do diff --git a/spec/features/container_registry_spec.rb b/spec/features/container_registry_spec.rb index 203e55a36f2..b86609e07c5 100644 --- a/spec/features/container_registry_spec.rb +++ b/spec/features/container_registry_spec.rb @@ -1,45 +1,61 @@ require 'spec_helper' describe "Container Registry" do + let(:user) { create(:user) } let(:project) { create(:empty_project) } - let(:repository) { project.container_registry_repository } - let(:tag_name) { 'latest' } - let(:tags) { [tag_name] } + + let(:container_repository) do + create(:container_repository, name: 'my/image') + end before do - login_as(:user) - project.team << [@user, :developer] - stub_container_registry_tags(*tags) + login_as(user) + project.add_developer(user) stub_container_registry_config(enabled: true) - allow(Auth::ContainerRegistryAuthenticationService).to receive(:full_access_token).and_return('token') + stub_container_registry_tags(repository: :any, tags: []) end - describe 'GET /:project/container_registry' do + context 'when there are no image repositories' do + scenario 'user visits container registry main page' do + visit_container_registry + + expect(page).to have_content 'No container image repositories' + end + end + + context 'when there are image repositories' do before do - visit namespace_project_container_registry_index_path(project.namespace, project) + stub_container_registry_tags(repository: %r{my/image}, tags: %w[latest]) + project.container_repositories << container_repository end - context 'when no tags' do - let(:tags) { [] } + scenario 'user wants to see multi-level container repository' do + visit_container_registry - it { expect(page).to have_content('No images in Container Registry for this project') } + expect(page).to have_content('my/image') end - context 'when there are tags' do - it { expect(page).to have_content(tag_name) } - it { expect(page).to have_content('d7a513a66') } - end - end + scenario 'user removes entire container repository' do + visit_container_registry - describe 'DELETE /:project/container_registry/tag' do - before do - visit namespace_project_container_registry_index_path(project.namespace, project) + expect_any_instance_of(ContainerRepository) + .to receive(:delete_tags!).and_return(true) + + click_on 'Remove repository' end - it do - expect_any_instance_of(::ContainerRegistry::Tag).to receive(:delete).and_return(true) + scenario 'user removes a specific tag from container repository' do + visit_container_registry - click_on 'Remove' + expect_any_instance_of(ContainerRegistry::Tag) + .to receive(:delete).and_return(true) + + click_on 'Remove tag' end end + + def visit_container_registry + visit namespace_project_container_registry_index_path( + project.namespace, project) + end end diff --git a/spec/features/copy_as_gfm_spec.rb b/spec/features/copy_as_gfm_spec.rb index 55df7e45f79..be615519a09 100644 --- a/spec/features/copy_as_gfm_spec.rb +++ b/spec/features/copy_as_gfm_spec.rb @@ -1,7 +1,7 @@ require 'spec_helper' describe 'Copy as GFM', feature: true, js: true do - include GitlabMarkdownHelper + include MarkupHelper include RepoHelpers include ActionView::Helpers::JavaScriptHelper @@ -96,7 +96,7 @@ describe 'Copy as GFM', feature: true, js: true do # issue link "[Issue](#{namespace_project_issue_url(@project.namespace, @project, @feat.issue)})", # issue link with note anchor - "[Issue](#{namespace_project_issue_url(@project.namespace, @project, @feat.issue, anchor: 'note_123')})", + "[Issue](#{namespace_project_issue_url(@project.namespace, @project, @feat.issue, anchor: 'note_123')})" ) verify( @@ -433,7 +433,7 @@ describe 'Copy as GFM', feature: true, js: true do end describe 'Copying code' do - let(:project) { create(:project) } + let(:project) { create(:project, :repository) } context 'from a diff' do before do @@ -479,6 +479,7 @@ describe 'Copy as GFM', feature: true, js: true do context 'from a blob' do before do visit namespace_project_blob_path(project.namespace, project, File.join('master', 'files/ruby/popen.rb')) + wait_for_ajax end context 'selecting one word of text' do @@ -520,6 +521,7 @@ describe 'Copy as GFM', feature: true, js: true do context 'from a GFM code block' do before do visit namespace_project_blob_path(project.namespace, project, File.join('markdown', 'doc/api/users.md')) + wait_for_ajax end context 'selecting one word of text' do diff --git a/spec/features/cycle_analytics_spec.rb b/spec/features/cycle_analytics_spec.rb index 0648c89a5c7..cbeb73d9cae 100644 --- a/spec/features/cycle_analytics_spec.rb +++ b/spec/features/cycle_analytics_spec.rb @@ -1,21 +1,21 @@ require 'spec_helper' feature 'Cycle Analytics', feature: true, js: true do - include WaitForAjax - let(:user) { create(:user) } let(:guest) { create(:user) } - let(:project) { create(:project) } + let(:project) { create(:project, :repository) } let(:issue) { create(:issue, project: project, created_at: 2.days.ago) } let(:milestone) { create(:milestone, project: project) } - let(:mr) { create_merge_request_closing_issue(issue) } + let(:mr) { create_merge_request_closing_issue(issue, commit_message: "References #{issue.to_reference}") } let(:pipeline) { create(:ci_empty_pipeline, status: 'created', project: project, ref: mr.source_branch, sha: mr.source_branch_sha) } context 'as an allowed user' do context 'when project is new' do before do - project.team << [user, :master] + project.add_master(user) + login_as(user) + visit namespace_project_cycle_analytics_path(project.namespace, project) wait_for_ajax end @@ -32,9 +32,10 @@ feature 'Cycle Analytics', feature: true, js: true do context "when there's cycle analytics data" do before do - project.team << [user, :master] - allow_any_instance_of(Gitlab::ReferenceExtractor).to receive(:issues).and_return([issue]) + mr.update(head_pipeline: pipeline) + project.add_master(user) + create_cycle deploy_master @@ -64,11 +65,30 @@ feature 'Cycle Analytics', feature: true, js: true do expect_issue_to_be_present end end + + context "when my preferred language is Spanish" do + before do + user.update_attribute(:preferred_language, 'es') + + project.team << [user, :master] + login_as(user) + visit namespace_project_cycle_analytics_path(project.namespace, project) + wait_for_ajax + end + + it 'shows the content in Spanish' do + expect(page).to have_content('Estado del Pipeline') + end + + it 'resets the language to English' do + expect(I18n.locale).to eq(:en) + end + end end context "as a guest" do before do - project.team << [guest, :guest] + project.add_guest(guest) allow_any_instance_of(Gitlab::ReferenceExtractor).to receive(:issues).and_return([issue]) create_cycle diff --git a/spec/features/dashboard/datetime_on_tooltips_spec.rb b/spec/features/dashboard/datetime_on_tooltips_spec.rb index dc9d09fa396..0e9e3f78be2 100644 --- a/spec/features/dashboard/datetime_on_tooltips_spec.rb +++ b/spec/features/dashboard/datetime_on_tooltips_spec.rb @@ -1,8 +1,6 @@ require 'spec_helper' feature 'Tooltips on .timeago dates', feature: true, js: true do - include WaitForAjax - let(:user) { create(:user) } let(:project) { create(:project, name: 'test', namespace: user.namespace) } let(:created_date) { Date.yesterday.to_time } diff --git a/spec/features/dashboard/group_spec.rb b/spec/features/dashboard/group_spec.rb index d5f8470fab0..8e20fdec8ad 100644 --- a/spec/features/dashboard/group_spec.rb +++ b/spec/features/dashboard/group_spec.rb @@ -5,16 +5,18 @@ RSpec.describe 'Dashboard Group', feature: true do login_as(:user) end - it 'creates new grpup' do + it 'creates new group', js: true do visit dashboard_groups_path - click_link 'New Group' + find('.btn-new').trigger('click') + new_path = 'Samurai' + new_description = 'Tokugawa Shogunate' - fill_in 'group_path', with: 'Samurai' - fill_in 'group_description', with: 'Tokugawa Shogunate' + fill_in 'group_path', with: new_path + fill_in 'group_description', with: new_description click_button 'Create group' - expect(current_path).to eq group_path(Group.find_by(name: 'Samurai')) - expect(page).to have_content('Samurai') - expect(page).to have_content('Tokugawa Shogunate') + expect(current_path).to eq group_path(Group.find_by(name: new_path)) + expect(page).to have_content(new_path) + expect(page).to have_content(new_description) end end diff --git a/spec/features/dashboard/groups_list_spec.rb b/spec/features/dashboard/groups_list_spec.rb index ca04107d33a..52b4d82e856 100644 --- a/spec/features/dashboard/groups_list_spec.rb +++ b/spec/features/dashboard/groups_list_spec.rb @@ -1,8 +1,6 @@ require 'spec_helper' describe 'Dashboard Groups page', js: true, feature: true do - include WaitForAjax - let!(:user) { create :user } let!(:group) { create(:group) } let!(:nested_group) { create(:group, :nested) } diff --git a/spec/features/dashboard/issuables_counter_spec.rb b/spec/features/dashboard/issuables_counter_spec.rb index a1718912fc6..354267dbee7 100644 --- a/spec/features/dashboard/issuables_counter_spec.rb +++ b/spec/features/dashboard/issuables_counter_spec.rb @@ -1,45 +1,64 @@ require 'spec_helper' -describe 'Navigation bar counter', feature: true, js: true, caching: true do +describe 'Navigation bar counter', feature: true, caching: true do let(:user) { create(:user) } let(:project) { create(:empty_project, namespace: user.namespace) } let(:issue) { create(:issue, project: project) } let(:merge_request) { create(:merge_request, source_project: project) } before do - issue.update(assignee: user) + issue.assignees = [user] merge_request.update(assignee: user) login_as(user) end it 'reflects dashboard issues count' do - visit issues_dashboard_path + visit issues_path expect_counters('issues', '1') - issue.update(assignee: nil) - visit issues_dashboard_path + issue.assignees = [] - expect_counters('issues', '1') + user.invalidate_cache_counts + + Timecop.travel(3.minutes.from_now) do + visit issues_path + + expect_counters('issues', '0') + end end it 'reflects dashboard merge requests count' do - visit merge_requests_dashboard_path + visit merge_requests_path expect_counters('merge_requests', '1') merge_request.update(assignee: nil) - visit merge_requests_dashboard_path - expect_counters('merge_requests', '1') + user.invalidate_cache_counts + + Timecop.travel(3.minutes.from_now) do + visit merge_requests_path + + expect_counters('merge_requests', '0') + end + end + + def issues_path + issues_dashboard_path(assignee_id: user.id) + end + + def merge_requests_path + merge_requests_dashboard_path(assignee_id: user.id) end def expect_counters(issuable_type, count) - dashboard_count = find('li.active') - find('.global-dropdown-toggle').click + dashboard_count = find('.nav-links li.active') nav_count = find(".dashboard-shortcuts-#{issuable_type}") + header_count = find(".header-content .#{issuable_type.tr('_', '-')}-count") - expect(nav_count).to have_content(count) expect(dashboard_count).to have_content(count) + expect(nav_count).to have_content(count) + expect(header_count).to have_content(count) end end diff --git a/spec/features/dashboard/issues_spec.rb b/spec/features/dashboard/issues_spec.rb index f4420814c3a..7a132dba1e9 100644 --- a/spec/features/dashboard/issues_spec.rb +++ b/spec/features/dashboard/issues_spec.rb @@ -11,7 +11,7 @@ RSpec.describe 'Dashboard Issues', feature: true do let!(:authored_issue) { create :issue, author: current_user, project: project } let!(:authored_issue_on_public_project) { create :issue, author: current_user, project: public_project } - let!(:assigned_issue) { create :issue, assignee: current_user, project: project } + let!(:assigned_issue) { create :issue, assignees: [current_user], project: project } let!(:other_issue) { create :issue, project: project } before do @@ -26,10 +26,26 @@ RSpec.describe 'Dashboard Issues', feature: true do expect(page).not_to have_content(other_issue.title) end + it 'shows checkmark when unassigned is selected for assignee', js: true do + find('.js-assignee-search').click + find('li', text: 'Unassigned').click + find('.js-assignee-search').click + + expect(find('li[data-user-id="0"] a.is-active')).to be_visible + end + it 'shows issues when current user is author', js: true do find('#assignee_id', visible: false).set('') find('.js-author-search', match: :first).click + + expect(find('li[data-user-id="null"] a.is-active')).to be_visible + find('.dropdown-menu-author li a', match: :first, text: current_user.to_reference).click + find('.js-author-search', match: :first).click + + page.within '.dropdown-menu-user' do + expect(find('.dropdown-menu-author li a.is-active', match: :first, text: current_user.to_reference)).to be_visible + end expect(page).to have_content(authored_issue.title) expect(page).to have_content(authored_issue_on_public_project.title) diff --git a/spec/features/dashboard/merge_requests_spec.rb b/spec/features/dashboard/merge_requests_spec.rb new file mode 100644 index 00000000000..508ca38d7e5 --- /dev/null +++ b/spec/features/dashboard/merge_requests_spec.rb @@ -0,0 +1,32 @@ +require 'spec_helper' + +describe 'Dashboard Merge Requests' do + let(:current_user) { create :user } + let(:project) do + create(:empty_project) do |project| + project.add_master(current_user) + end + end + + before do + login_as(current_user) + end + + it 'should show an empty state' do + visit merge_requests_dashboard_path(assignee_id: current_user.id) + + expect(page).to have_selector('.empty-state') + end + + context 'if there are merge requests' do + before do + create(:merge_request, assignee: current_user, source_project: project) + + visit merge_requests_dashboard_path(assignee_id: current_user.id) + end + + it 'should not show an empty state' do + expect(page).not_to have_selector('.empty-state') + end + end +end diff --git a/spec/features/dashboard/milestone_filter_spec.rb b/spec/features/dashboard/milestone_filter_spec.rb new file mode 100644 index 00000000000..d60a002a8d7 --- /dev/null +++ b/spec/features/dashboard/milestone_filter_spec.rb @@ -0,0 +1,60 @@ +require 'spec_helper' + +describe 'Dashboard > milestone filter', :feature, :js do + include WaitForAjax + + let(:user) { create(:user) } + let(:project) { create(:project, name: 'test', namespace: user.namespace) } + let(:milestone) { create(:milestone, title: "v1.0", project: project) } + let(:milestone2) { create(:milestone, title: "v2.0", project: project) } + let!(:issue) { create :issue, author: user, project: project, milestone: milestone } + let!(:issue2) { create :issue, author: user, project: project, milestone: milestone2 } + + before do + login_as(user) + visit issues_dashboard_path(author_id: user.id) + end + + context 'default state' do + it 'shows issues with Any Milestone' do + page.all('.issue-info').each do |issue_info| + expect(issue_info.text).to match(/v\d.0/) + end + end + end + + context 'filtering by milestone' do + milestone_select = '.js-milestone-select' + + before do + find(milestone_select).click + wait_for_ajax + + page.within('.dropdown-content') do + click_link 'v1.0' + end + + find(milestone_select).click + wait_for_ajax + end + + it 'shows issues with Milestone v1.0' do + expect(find('.issues-list')).to have_selector('.issue', count: 1) + expect(find('.dropdown-content')).to have_selector('a.is-active', count: 1) + end + + it 'should not change active Milestone unless clicked' do + expect(find('.dropdown-content')).to have_selector('a.is-active', count: 1) + + # open & close dropdown + find('.dropdown-menu-close').click + + expect(find('.milestone-filter')).not_to have_selector('.dropdown.open') + + find(milestone_select).click + + expect(find('.dropdown-content')).to have_selector('a.is-active', count: 1) + expect(find('.dropdown-content a.is-active')).to have_content('v1.0') + end + end +end diff --git a/spec/features/dashboard/project_member_activity_index_spec.rb b/spec/features/dashboard/project_member_activity_index_spec.rb index 49d93db58a9..16c214ae060 100644 --- a/spec/features/dashboard/project_member_activity_index_spec.rb +++ b/spec/features/dashboard/project_member_activity_index_spec.rb @@ -1,8 +1,6 @@ require 'spec_helper' feature 'Project member activity', feature: true, js: true do - include WaitForAjax - let(:user) { create(:user) } let(:project) { create(:empty_project, :public, name: 'x', namespace: user.namespace) } diff --git a/spec/features/dashboard/projects_spec.rb b/spec/features/dashboard/projects_spec.rb index c4e58d14f75..f1789fc9d43 100644 --- a/spec/features/dashboard/projects_spec.rb +++ b/spec/features/dashboard/projects_spec.rb @@ -7,7 +7,6 @@ RSpec.describe 'Dashboard Projects', feature: true do before do project.team << [user, :developer] login_as user - visit dashboard_projects_path end it 'shows the project the user in a member of in the list' do @@ -15,15 +14,19 @@ RSpec.describe 'Dashboard Projects', feature: true do expect(page).to have_content('awesome stuff') end - describe "with a pipeline" do - let(:pipeline) { create(:ci_pipeline, :success, project: project, sha: project.commit.sha) } + describe "with a pipeline", redis: true do + let!(:pipeline) { create(:ci_pipeline, project: project, sha: project.commit.sha) } before do - pipeline + # Since the cache isn't updated when a new pipeline is created + # we need the pipeline to advance in the pipeline since the cache was created + # by visiting the login page. + pipeline.succeed end it 'shows that the last pipeline passed' do visit dashboard_projects_path + expect(page).to have_xpath("//a[@href='#{pipelines_namespace_project_commit_path(project.namespace, project, project.commit)}']") end end diff --git a/spec/features/dashboard/shortcuts_spec.rb b/spec/features/dashboard/shortcuts_spec.rb index 3642c0bfb5b..349b948eaee 100644 --- a/spec/features/dashboard/shortcuts_spec.rb +++ b/spec/features/dashboard/shortcuts_spec.rb @@ -1,31 +1,52 @@ require 'spec_helper' -feature 'Dashboard shortcuts', feature: true, js: true do - before do - login_as :user - visit dashboard_projects_path - end +feature 'Dashboard shortcuts', :feature, :js do + context 'logged in' do + before do + login_as :user + visit root_dashboard_path + end + + scenario 'Navigate to tabs' do + find('body').send_keys([:shift, 'I']) + + check_page_title('Issues') + + find('body').send_keys([:shift, 'M']) + + check_page_title('Merge Requests') - scenario 'Navigate to tabs' do - find('body').native.send_key('g') - find('body').native.send_key('p') + find('body').send_keys([:shift, 'T']) + + check_page_title('Todos') + + find('body').send_keys([:shift, 'P']) + + check_page_title('Projects') + end + end - check_page_title('Projects') + context 'logged out' do + before do + visit explore_root_path + end - find('body').native.send_key('g') - find('body').native.send_key('i') + scenario 'Navigate to tabs' do + find('body').send_keys([:shift, 'G']) - check_page_title('Issues') + find('.nothing-here-block') + expect(page).to have_content('No public groups') - find('body').native.send_key('g') - find('body').native.send_key('m') + find('body').send_keys([:shift, 'S']) - check_page_title('Merge Requests') + find('.nothing-here-block') + expect(page).to have_selector('.snippets-list-holder') - find('body').native.send_key('g') - find('body').native.send_key('t') + find('body').send_keys([:shift, 'P']) - check_page_title('Todos') + find('.nothing-here-block') + expect(page).to have_content('No projects found') + end end def check_page_title(title) diff --git a/spec/features/dashboard/snippets_spec.rb b/spec/features/dashboard/snippets_spec.rb index 62937688c22..c6ba118220a 100644 --- a/spec/features/dashboard/snippets_spec.rb +++ b/spec/features/dashboard/snippets_spec.rb @@ -12,4 +12,51 @@ describe 'Dashboard snippets', feature: true do it_behaves_like 'paginated snippets' end + + context 'filtering by visibility' do + let(:user) { create(:user) } + let!(:snippets) do + [ + create(:personal_snippet, :public, author: user), + create(:personal_snippet, :internal, author: user), + create(:personal_snippet, :private, author: user), + create(:personal_snippet, :public) + ] + end + + before do + login_as(user) + + visit dashboard_snippets_path + end + + it 'contains all snippets of logged user' do + expect(page).to have_selector('.snippet-row', count: 3) + + expect(page).to have_content(snippets[0].title) + expect(page).to have_content(snippets[1].title) + expect(page).to have_content(snippets[2].title) + end + + it 'contains all private snippets of logged user when clicking on private' do + click_link('Private') + + expect(page).to have_selector('.snippet-row', count: 1) + expect(page).to have_content(snippets[2].title) + end + + it 'contains all internal snippets of logged user when clicking on internal' do + click_link('Internal') + + expect(page).to have_selector('.snippet-row', count: 1) + expect(page).to have_content(snippets[1].title) + end + + it 'contains all public snippets of logged user when clicking on public' do + click_link('Public') + + expect(page).to have_selector('.snippet-row', count: 1) + expect(page).to have_content(snippets[0].title) + end + end end diff --git a/spec/features/dashboard_issues_spec.rb b/spec/features/dashboard_issues_spec.rb index 8c61cdebc4b..ad60fb2c74f 100644 --- a/spec/features/dashboard_issues_spec.rb +++ b/spec/features/dashboard_issues_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' describe "Dashboard Issues filtering", feature: true, js: true do let(:user) { create(:user) } - let(:project) { create(:project) } + let(:project) { create(:empty_project) } let(:milestone) { create(:milestone, project: project) } context 'filtering by milestone' do @@ -10,8 +10,8 @@ describe "Dashboard Issues filtering", feature: true, js: true do project.team << [user, :master] login_as(user) - create(:issue, project: project, author: user, assignee: user) - create(:issue, project: project, author: user, assignee: user, milestone: milestone) + create(:issue, project: project, author: user, assignees: [user]) + create(:issue, project: project, author: user, assignees: [user], milestone: milestone) visit_issues end diff --git a/spec/features/discussion_comments/commit_spec.rb b/spec/features/discussion_comments/commit_spec.rb new file mode 100644 index 00000000000..96e0b78f6b9 --- /dev/null +++ b/spec/features/discussion_comments/commit_spec.rb @@ -0,0 +1,18 @@ +require 'spec_helper' + +describe 'Discussion Comments Merge Request', :feature, :js do + include RepoHelpers + + let(:user) { create(:user) } + let(:project) { create(:project) } + let(:merge_request) { create(:merge_request, source_project: project) } + + before do + project.add_master(user) + login_as(user) + + visit namespace_project_commit_path(project.namespace, project, sample_commit.id) + end + + it_behaves_like 'discussion comments', 'commit' +end diff --git a/spec/features/discussion_comments/issue_spec.rb b/spec/features/discussion_comments/issue_spec.rb new file mode 100644 index 00000000000..ccc9efccd18 --- /dev/null +++ b/spec/features/discussion_comments/issue_spec.rb @@ -0,0 +1,16 @@ +require 'spec_helper' + +describe 'Discussion Comments Issue', :feature, :js do + let(:user) { create(:user) } + let(:project) { create(:empty_project) } + let(:issue) { create(:issue, project: project) } + + before do + project.add_master(user) + login_as(user) + + visit namespace_project_issue_path(project.namespace, project, issue) + end + + it_behaves_like 'discussion comments', 'issue' +end diff --git a/spec/features/discussion_comments/merge_request_spec.rb b/spec/features/discussion_comments/merge_request_spec.rb new file mode 100644 index 00000000000..f99ebeb9cd9 --- /dev/null +++ b/spec/features/discussion_comments/merge_request_spec.rb @@ -0,0 +1,16 @@ +require 'spec_helper' + +describe 'Discussion Comments Merge Request', :feature, :js do + let(:user) { create(:user) } + let(:project) { create(:project) } + let(:merge_request) { create(:merge_request, source_project: project) } + + before do + project.add_master(user) + login_as(user) + + visit namespace_project_merge_request_path(project.namespace, project, merge_request) + end + + it_behaves_like 'discussion comments', 'merge request' +end diff --git a/spec/features/discussion_comments/snippets_spec.rb b/spec/features/discussion_comments/snippets_spec.rb new file mode 100644 index 00000000000..19a306511b2 --- /dev/null +++ b/spec/features/discussion_comments/snippets_spec.rb @@ -0,0 +1,16 @@ +require 'spec_helper' + +describe 'Discussion Comments Issue', :feature, :js do + let(:user) { create(:user) } + let(:project) { create(:empty_project) } + let(:snippet) { create(:project_snippet, :private, project: project, author: user) } + + before do + project.add_master(user) + login_as(user) + + visit namespace_project_snippet_path(project.namespace, project, snippet) + end + + it_behaves_like 'discussion comments', 'snippet' +end diff --git a/spec/features/expand_collapse_diffs_spec.rb b/spec/features/expand_collapse_diffs_spec.rb index 8c64b050e19..76c77e0bc5f 100644 --- a/spec/features/expand_collapse_diffs_spec.rb +++ b/spec/features/expand_collapse_diffs_spec.rb @@ -1,10 +1,8 @@ require 'spec_helper' feature 'Expand and collapse diffs', js: true, feature: true do - include WaitForAjax - let(:branch) { 'expand-collapse-diffs' } - let(:project) { create(:project) } + let(:project) { create(:project, :repository) } before do login_as :admin diff --git a/spec/features/explore/groups_list_spec.rb b/spec/features/explore/groups_list_spec.rb index 9daaaa8e555..9828cb179a7 100644 --- a/spec/features/explore/groups_list_spec.rb +++ b/spec/features/explore/groups_list_spec.rb @@ -1,8 +1,6 @@ require 'spec_helper' -describe 'Explore Groups page', js: true, feature: true do - include WaitForAjax - +describe 'Explore Groups page', :js, :feature do let!(:user) { create :user } let!(:group) { create(:group) } let!(:public_group) { create(:group, :public) } @@ -48,19 +46,39 @@ describe 'Explore Groups page', js: true, feature: true do it 'shows non-archived projects count' do # Initially project is not archived expect(find('.js-groups-list-holder .content-list li:first-child .stats span:first-child')).to have_text("1") - + # Archive project empty_project.archive! visit explore_groups_path # Check project count expect(find('.js-groups-list-holder .content-list li:first-child .stats span:first-child')).to have_text("0") - + # Unarchive project empty_project.unarchive! visit explore_groups_path # Check project count - expect(find('.js-groups-list-holder .content-list li:first-child .stats span:first-child')).to have_text("1") + expect(find('.js-groups-list-holder .content-list li:first-child .stats span:first-child')).to have_text("1") + end + + describe 'landing component' do + it 'should show a landing component' do + expect(page).to have_content('Below you will find all the groups that are public.') + end + + it 'should be dismissable' do + find('.dismiss-button').click + + expect(page).not_to have_content('Below you will find all the groups that are public.') + end + + it 'should persistently not show once dismissed' do + find('.dismiss-button').click + + visit explore_groups_path + + expect(page).not_to have_content('Below you will find all the groups that are public.') + end end end diff --git a/spec/features/gitlab_flavored_markdown_spec.rb b/spec/features/gitlab_flavored_markdown_spec.rb index 84d73d693bc..005a029a393 100644 --- a/spec/features/gitlab_flavored_markdown_spec.rb +++ b/spec/features/gitlab_flavored_markdown_spec.rb @@ -1,28 +1,28 @@ require 'spec_helper' describe "GitLab Flavored Markdown", feature: true do - let(:project) { create(:project) } + let(:project) { create(:empty_project) } let(:issue) { create(:issue, project: project) } - let(:merge_request) { create(:merge_request, source_project: project, target_project: project) } let(:fred) do - u = create(:user, name: "fred") - project.team << [u, :master] - u + create(:user, name: 'fred') do |user| + project.add_master(user) + end end before do - allow_any_instance_of(Commit).to receive(:title). - and_return("fix #{issue.to_reference}\n\nask #{fred.to_reference} for details") + login_as(:user) + project.add_developer(@user) end - let(:commit) { project.commit } + describe "for commits" do + let(:project) { create(:project, :repository) } + let(:commit) { project.commit } - before do - login_as :user - project.team << [@user, :developer] - end + before do + allow_any_instance_of(Commit).to receive(:title). + and_return("fix #{issue.to_reference}\n\nask #{fred.to_reference} for details") + end - describe "for commits" do it "renders title in commits#index" do visit namespace_project_commits_path(project.namespace, project, 'master', limit: 1) @@ -48,18 +48,22 @@ describe "GitLab Flavored Markdown", feature: true do end end - describe "for issues" do + describe "for issues", feature: true, js: true do + include WaitForVueResource + before do @other_issue = create(:issue, author: @user, - assignee: @user, + assignees: [@user], project: project) @issue = create(:issue, author: @user, - assignee: @user, + assignees: [@user], project: project, title: "fix #{@other_issue.to_reference}", description: "ask #{fred.to_reference} for details") + + @note = create(:note_on_issue, noteable: @issue, project: @issue.project, note: "Hello world") end it "renders subject in issues#index" do @@ -82,6 +86,8 @@ describe "GitLab Flavored Markdown", feature: true do end describe "for merge requests" do + let(:project) { create(:project, :repository) } + before do @merge_request = create(:merge_request, source_project: project, target_project: project, title: "fix #{issue.to_reference}") end diff --git a/spec/features/global_search_spec.rb b/spec/features/global_search_spec.rb index f6409e00f22..4b22b07494d 100644 --- a/spec/features/global_search_spec.rb +++ b/spec/features/global_search_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' feature 'Global search', feature: true do let(:user) { create(:user) } - let(:project) { create(:project, namespace: user.namespace) } + let(:project) { create(:empty_project, namespace: user.namespace) } before do project.team << [user, :master] diff --git a/spec/features/groups/empty_states_spec.rb b/spec/features/groups/empty_states_spec.rb new file mode 100644 index 00000000000..fef8e41bffe --- /dev/null +++ b/spec/features/groups/empty_states_spec.rb @@ -0,0 +1,70 @@ +require 'spec_helper' + +feature 'Groups Merge Requests Empty States' do + let(:group) { create(:group) } + let(:user) { create(:group_member, :developer, user: create(:user), group: group ).user } + + before do + login_as(user) + end + + context 'group has a project' do + let(:project) { create(:empty_project, namespace: group) } + + before do + project.add_master(user) + end + + context 'the project has a merge request' do + before do + create(:merge_request, source_project: project) + + visit merge_requests_group_path(group) + end + + it 'should not display an empty state' do + expect(page).not_to have_selector('.empty-state') + end + end + + context 'the project has no merge requests', :js do + before do + visit merge_requests_group_path(group) + end + + it 'should display an empty state' do + expect(page).to have_selector('.empty-state') + end + + it 'should show a new merge request button' do + within '.empty-state' do + expect(page).to have_content('New merge request') + end + end + + it 'the new merge request button opens a project dropdown' do + within '.empty-state' do + find('.new-project-item-select-button').click + end + + expect(page).to have_selector('.ajax-project-dropdown') + end + end + end + + context 'group without a project' do + before do + visit merge_requests_group_path(group) + end + + it 'should display an empty state' do + expect(page).to have_selector('.empty-state') + end + + it 'should not show a new merge request button' do + within '.empty-state' do + expect(page).not_to have_link('New merge request') + end + end + end +end diff --git a/spec/features/groups/group_settings_spec.rb b/spec/features/groups/group_settings_spec.rb new file mode 100644 index 00000000000..cc25db4ad60 --- /dev/null +++ b/spec/features/groups/group_settings_spec.rb @@ -0,0 +1,80 @@ +require 'spec_helper' + +feature 'Edit group settings', feature: true do + given(:user) { create(:user) } + given(:group) { create(:group, path: 'foo') } + + background do + group.add_owner(user) + login_as(user) + end + + describe 'when the group path is changed' do + let(:new_group_path) { 'bar' } + let(:old_group_full_path) { "/#{group.path}" } + let(:new_group_full_path) { "/#{new_group_path}" } + + scenario 'the group is accessible via the new path' do + update_path(new_group_path) + visit new_group_full_path + expect(current_path).to eq(new_group_full_path) + expect(find('h1.group-title')).to have_content(new_group_path) + end + + scenario 'the old group path redirects to the new path' do + update_path(new_group_path) + visit old_group_full_path + expect(current_path).to eq(new_group_full_path) + expect(find('h1.group-title')).to have_content(new_group_path) + end + + context 'with a subgroup' do + given!(:subgroup) { create(:group, parent: group, path: 'subgroup') } + given(:old_subgroup_full_path) { "/#{group.path}/#{subgroup.path}" } + given(:new_subgroup_full_path) { "/#{new_group_path}/#{subgroup.path}" } + + scenario 'the subgroup is accessible via the new path' do + update_path(new_group_path) + visit new_subgroup_full_path + expect(current_path).to eq(new_subgroup_full_path) + expect(find('h1.group-title')).to have_content(subgroup.path) + end + + scenario 'the old subgroup path redirects to the new path' do + update_path(new_group_path) + visit old_subgroup_full_path + expect(current_path).to eq(new_subgroup_full_path) + expect(find('h1.group-title')).to have_content(subgroup.path) + end + end + + context 'with a project' do + given!(:project) { create(:project, group: group, path: 'project') } + given(:old_project_full_path) { "/#{group.path}/#{project.path}" } + given(:new_project_full_path) { "/#{new_group_path}/#{project.path}" } + + before(:context) { TestEnv.clean_test_path } + after(:example) { TestEnv.clean_test_path } + + scenario 'the project is accessible via the new path' do + update_path(new_group_path) + visit new_project_full_path + expect(current_path).to eq(new_project_full_path) + expect(find('h1.project-title')).to have_content(project.name) + end + + scenario 'the old project path redirects to the new path' do + update_path(new_group_path) + visit old_project_full_path + expect(current_path).to eq(new_project_full_path) + expect(find('h1.project-title')).to have_content(project.name) + end + end + end +end + +def update_path(new_group_path) + visit edit_group_path(group) + fill_in 'group_path', with: new_group_path + click_button 'Save group' +end diff --git a/spec/features/groups/issues_spec.rb b/spec/features/groups/issues_spec.rb index 1b3747c390b..45f57845c74 100644 --- a/spec/features/groups/issues_spec.rb +++ b/spec/features/groups/issues_spec.rb @@ -23,4 +23,20 @@ feature 'Group issues page', feature: true do it_behaves_like "an autodiscoverable RSS feed without a private token" end end + + context 'assignee', :js do + let(:access_level) { ProjectFeature::ENABLED } + let(:user) { user_in_group } + let(:user2) { user_outside_group } + let(:path) { issues_group_path(group) } + + it 'filters by only group users' do + click_button('Assignee') + + wait_for_ajax + + expect(find('.dropdown-menu-assignee')).to have_link(user.name) + expect(find('.dropdown-menu-assignee')).not_to have_link(user2.name) + end + end end diff --git a/spec/features/groups/members/list_spec.rb b/spec/features/groups/members/list_spec.rb index 14c193f7450..543879bd21d 100644 --- a/spec/features/groups/members/list_spec.rb +++ b/spec/features/groups/members/list_spec.rb @@ -1,6 +1,8 @@ require 'spec_helper' feature 'Groups members list', feature: true do + include Select2Helper + let(:user1) { create(:user, name: 'John Doe') } let(:user2) { create(:user, name: 'Mary Jane') } let(:group) { create(:group) } @@ -30,7 +32,7 @@ feature 'Groups members list', feature: true do expect(second_row).to be_blank end - it 'updates user to owner level', :js do + scenario 'update user to owner level', :js do group.add_owner(user1) group.add_developer(user2) @@ -38,13 +40,52 @@ feature 'Groups members list', feature: true do page.within(second_row) do click_button('Developer') - click_link('Owner') expect(page).to have_button('Owner') end end + scenario 'add user to group', :js do + group.add_owner(user1) + + visit group_group_members_path(group) + + add_user(user2.id, 'Reporter') + + page.within(second_row) do + expect(page).to have_content(user2.name) + expect(page).to have_button('Reporter') + end + end + + scenario 'add yourself to group when already an owner', :js do + group.add_owner(user1) + + visit group_group_members_path(group) + + add_user(user1.id, 'Reporter') + + page.within(first_row) do + expect(page).to have_content(user1.name) + expect(page).to have_content('Owner') + end + end + + scenario 'invite user to group', :js do + group.add_owner(user1) + + visit group_group_members_path(group) + + add_user('test@example.com', 'Reporter') + + page.within(second_row) do + expect(page).to have_content('test@example.com') + expect(page).to have_content('Invited') + expect(page).to have_button('Reporter') + end + end + def first_row page.all('ul.content-list > li')[0] end @@ -52,4 +93,13 @@ feature 'Groups members list', feature: true do def second_row page.all('ul.content-list > li')[1] end + + def add_user(id, role) + page.within ".users-group-form" do + select2(id, from: "#user_ids", multiple: true) + select(role, from: "access_level") + end + + click_button "Add to group" + end end diff --git a/spec/features/groups/members/sorting_spec.rb b/spec/features/groups/members/sorting_spec.rb index 608aedd3471..902d3f789ff 100644 --- a/spec/features/groups/members/sorting_spec.rb +++ b/spec/features/groups/members/sorting_spec.rb @@ -68,7 +68,7 @@ feature 'Groups > Members > Sorting', feature: true do expect(page).to have_css('.member-sort-dropdown .dropdown-toggle-text', text: 'Name, descending') end - scenario 'sorts by recent sign in' do + scenario 'sorts by recent sign in', :redis do visit_members_list(sort: :recent_sign_in) expect(first_member).to include(owner.name) @@ -76,7 +76,7 @@ feature 'Groups > Members > Sorting', feature: true do expect(page).to have_css('.member-sort-dropdown .dropdown-toggle-text', text: 'Recent sign in') end - scenario 'sorts by oldest sign in' do + scenario 'sorts by oldest sign in', :redis do visit_members_list(sort: :oldest_sign_in) expect(first_member).to include(developer.name) diff --git a/spec/features/groups/milestone_spec.rb b/spec/features/groups/milestone_spec.rb new file mode 100644 index 00000000000..daa2c6afd63 --- /dev/null +++ b/spec/features/groups/milestone_spec.rb @@ -0,0 +1,36 @@ +require 'rails_helper' + +feature 'Group milestones', :feature, :js do + let(:group) { create(:group) } + let!(:project) { create(:project_empty_repo, group: group) } + let(:user) { create(:group_member, :master, user: create(:user), group: group ).user } + + before do + Timecop.freeze + + login_as(user) + end + + after do + Timecop.return + end + + context 'create a milestone' do + before do + visit new_group_milestone_path(group) + end + + it 'creates milestone with start date' do + fill_in 'Title', with: 'testing' + find('#milestone_start_date').click + + page.within(find('.pika-single')) do + click_button '1' + end + + click_button 'Create milestone' + + expect(find('.start_date')).to have_content(Date.today.at_beginning_of_month.strftime('%b %-d, %Y')) + end + end +end diff --git a/spec/features/groups_spec.rb b/spec/features/groups_spec.rb index c90cc06a8f5..3d32c47bf09 100644 --- a/spec/features/groups_spec.rb +++ b/spec/features/groups_spec.rb @@ -83,20 +83,43 @@ feature 'Group', feature: true do end end - describe 'create a nested group' do + describe 'create a nested group', js: true do let(:group) { create(:group, path: 'foo') } - before do - visit subgroups_group_path(group) - click_link 'New Subgroup' + context 'as admin' do + before do + visit subgroups_group_path(group) + click_link 'New Subgroup' + end + + it 'creates a nested group' do + fill_in 'Group path', with: 'bar' + click_button 'Create group' + + expect(current_path).to eq(group_path('foo/bar')) + expect(page).to have_content("Group 'bar' was successfully created.") + end end - it 'creates a nested group' do - fill_in 'Group path', with: 'bar' - click_button 'Create group' + context 'as group owner' do + let(:user) { create(:user) } - expect(current_path).to eq(group_path('foo/bar')) - expect(page).to have_content("Group 'bar' was successfully created.") + before do + group.add_owner(user) + logout + login_as(user) + + visit subgroups_group_path(group) + click_link 'New Subgroup' + end + + it 'creates a nested group' do + fill_in 'Group path', with: 'bar' + click_button 'Create group' + + expect(current_path).to eq(group_path('foo/bar')) + expect(page).to have_content("Group 'bar' was successfully created.") + end end end @@ -130,7 +153,7 @@ feature 'Group', feature: true do end it 'removes group' do - click_link 'Remove Group' + click_link 'Remove group' expect(page).to have_content "scheduled for deletion" end diff --git a/spec/features/issuables/issuable_list_spec.rb b/spec/features/issuables/issuable_list_spec.rb index b90bf6268fd..414838fa22e 100644 --- a/spec/features/issuables/issuable_list_spec.rb +++ b/spec/features/issuables/issuable_list_spec.rb @@ -46,16 +46,19 @@ describe 'issuable list', feature: true do end def create_issuables(issuable_type) - 3.times do + 3.times do |n| issuable = if issuable_type == :issue create(:issue, project: project, author: user) else - create(:merge_request, title: FFaker::Lorem.sentence, source_project: project, source_branch: FFaker::Name.name) + create(:merge_request, source_project: project, source_branch: generate(:branch)) + source_branch = FFaker::Name.name + pipeline = create(:ci_empty_pipeline, project: project, ref: source_branch, status: %w(running failed success).sample, sha: 'any') + create(:merge_request, title: FFaker::Lorem.sentence, source_project: project, source_branch: source_branch, head_pipeline: pipeline) end 2.times do - create(:note_on_issue, noteable: issuable, project: project, note: 'Test note') + create(:note_on_issue, noteable: issuable, project: project) end create(:award_emoji, :downvote, awardable: issuable) @@ -65,11 +68,10 @@ describe 'issuable list', feature: true do if issuable_type == :issue issue = Issue.reorder(:iid).first merge_request = create(:merge_request, - title: FFaker::Lorem.sentence, source_project: project, - source_branch: FFaker::Name.name) + source_branch: generate(:branch)) - MergeRequestsClosingIssues.create!(issue: issue, merge_request: merge_request) + create(:merge_requests_closing_issues, issue: issue, merge_request: merge_request) end end end diff --git a/spec/features/issues/award_emoji_spec.rb b/spec/features/issues/award_emoji_spec.rb index 16e453bc328..853632614c4 100644 --- a/spec/features/issues/award_emoji_spec.rb +++ b/spec/features/issues/award_emoji_spec.rb @@ -1,13 +1,13 @@ require 'rails_helper' describe 'Awards Emoji', feature: true do - include WaitForAjax + include WaitForVueResource let!(:project) { create(:project, :public) } let!(:user) { create(:user) } let(:issue) do create(:issue, - assignee: @user, + assignees: [user], project: project) end @@ -22,10 +22,11 @@ describe 'Awards Emoji', feature: true do # The `heart_tip` emoji is not valid anymore so we need to skip validation issue.award_emoji.build(user: user, name: 'heart_tip').save!(validate: false) visit namespace_project_issue_path(project.namespace, project, issue) + wait_for_vue_resource end # Regression test: https://gitlab.com/gitlab-org/gitlab-ce/issues/29529 - it 'does not shows a 500 page' do + it 'does not shows a 500 page', js: true do expect(page).to have_text(issue.title) end end @@ -35,6 +36,7 @@ describe 'Awards Emoji', feature: true do before do visit namespace_project_issue_path(project.namespace, project, issue) + wait_for_vue_resource end it 'increments the thumbsdown emoji', js: true do diff --git a/spec/features/issues/award_spec.rb b/spec/features/issues/award_spec.rb index 401e1ea2b89..08e3f99e29f 100644 --- a/spec/features/issues/award_spec.rb +++ b/spec/features/issues/award_spec.rb @@ -6,9 +6,12 @@ feature 'Issue awards', js: true, feature: true do let(:issue) { create(:issue, project: project) } describe 'logged in' do + include WaitForVueResource + before do login_as(user) visit namespace_project_issue_path(project.namespace, project, issue) + wait_for_vue_resource end it 'adds award to issue' do @@ -38,8 +41,11 @@ feature 'Issue awards', js: true, feature: true do end describe 'logged out' do + include WaitForVueResource + before do visit namespace_project_issue_path(project.namespace, project, issue) + wait_for_vue_resource end it 'does not see award menu button' do diff --git a/spec/features/issues/bulk_assignment_labels_spec.rb b/spec/features/issues/bulk_assignment_labels_spec.rb index 2f59630b4fb..1de50d6d77e 100644 --- a/spec/features/issues/bulk_assignment_labels_spec.rb +++ b/spec/features/issues/bulk_assignment_labels_spec.rb @@ -1,8 +1,6 @@ require 'rails_helper' feature 'Issues > Labels bulk assignment', feature: true do - include WaitForAjax - let(:user) { create(:user) } let!(:project) { create(:project) } let!(:issue1) { create(:issue, project: project, title: "Issue 1") } diff --git a/spec/features/issues/create_branch_merge_request_spec.rb b/spec/features/issues/create_branch_merge_request_spec.rb new file mode 100644 index 00000000000..44c19275ae5 --- /dev/null +++ b/spec/features/issues/create_branch_merge_request_spec.rb @@ -0,0 +1,91 @@ +require 'rails_helper' + +feature 'Create Branch/Merge Request Dropdown on issue page', feature: true, js: true do + let(:user) { create(:user) } + let!(:project) { create(:project) } + let(:issue) { create(:issue, project: project, title: 'Cherry-Coloured Funk') } + + context 'for team members' do + before do + project.team << [user, :developer] + login_as(user) + end + + it 'allows creating a merge request from the issue page' do + visit namespace_project_issue_path(project.namespace, project, issue) + + select_dropdown_option('create-mr') + + wait_for_ajax + + expect(page).to have_content("created branch 1-cherry-coloured-funk") + expect(page).to have_content("mentioned in merge request !1") + + visit namespace_project_merge_request_path(project.namespace, project, MergeRequest.first) + + expect(page).to have_content('WIP: Resolve "Cherry-Coloured Funk"') + expect(current_path).to eq(namespace_project_merge_request_path(project.namespace, project, MergeRequest.first)) + end + + it 'allows creating a branch from the issue page' do + visit namespace_project_issue_path(project.namespace, project, issue) + + select_dropdown_option('create-branch') + + wait_for_ajax + + expect(page).to have_selector('.dropdown-toggle-text ', text: '1-cherry-coloured-funk') + expect(current_path).to eq namespace_project_tree_path(project.namespace, project, '1-cherry-coloured-funk') + end + + context "when there is a referenced merge request" do + let!(:note) do + create(:note, :on_issue, :system, project: project, noteable: issue, + note: "mentioned in #{referenced_mr.to_reference}") + end + + let(:referenced_mr) do + create(:merge_request, :simple, source_project: project, target_project: project, + description: "Fixes #{issue.to_reference}", author: user) + end + + before do + referenced_mr.cache_merge_request_closes_issues!(user) + + visit namespace_project_issue_path(project.namespace, project, issue) + end + + it 'disables the create branch button' do + expect(page).to have_css('.create-mr-dropdown-wrap .unavailable:not(.hide)') + expect(page).to have_css('.create-mr-dropdown-wrap .available.hide', visible: false) + expect(page).to have_content /1 Related Merge Request/ + end + end + + context 'when issue is confidential' do + it 'disables the create branch button' do + issue = create(:issue, :confidential, project: project) + + visit namespace_project_issue_path(project.namespace, project, issue) + + expect(page).not_to have_css('.create-mr-dropdown-wrap') + end + end + end + + context 'for visitors' do + before do + visit namespace_project_issue_path(project.namespace, project, issue) + end + + it 'shows no buttons' do + expect(page).not_to have_selector('.create-mr-dropdown-wrap') + end + end + + def select_dropdown_option(option) + find('.create-mr-dropdown-wrap .dropdown-toggle').click + find("li[data-value='#{option}']").click + find('.js-create-merge-request').click + end +end diff --git a/spec/features/issues/create_issue_for_discussions_in_merge_request_spec.rb b/spec/features/issues/create_issue_for_discussions_in_merge_request_spec.rb index 572bca3de21..24e2419b5ce 100644 --- a/spec/features/issues/create_issue_for_discussions_in_merge_request_spec.rb +++ b/spec/features/issues/create_issue_for_discussions_in_merge_request_spec.rb @@ -4,7 +4,7 @@ feature 'Resolving all open discussions in a merge request from an issue', featu let(:user) { create(:user) } let(:project) { create(:project) } let(:merge_request) { create(:merge_request, source_project: project) } - let!(:discussion) { Discussion.for_diff_notes([create(:diff_note_on_merge_request, noteable: merge_request, project: project)]).first } + let!(:discussion) { create(:diff_note_on_merge_request, noteable: merge_request, project: project).to_discussion } describe 'as a user with access to the project' do before do @@ -14,7 +14,7 @@ feature 'Resolving all open discussions in a merge request from an issue', featu end it 'shows a button to resolve all discussions by creating a new issue' do - within('li#resolve-count-app') do + within('#resolve-count-app') do expect(page).to have_link "Resolve all discussions in new issue", href: new_namespace_project_issue_path(project.namespace, project, merge_request_to_resolve_discussions_of: merge_request.iid) end end @@ -49,7 +49,7 @@ feature 'Resolving all open discussions in a merge request from an issue', featu end it 'does not show a link to create a new issue' do - expect(page).not_to have_link 'open an issue to resolve them later' + expect(page).not_to have_link 'Create an issue to resolve them later' end end @@ -59,18 +59,18 @@ feature 'Resolving all open discussions in a merge request from an issue', featu end it 'shows a warning that the merge request contains unresolved discussions' do - expect(page).to have_content 'This merge request has unresolved discussions' + expect(page).to have_content 'There are unresolved discussions.' end it 'has a link to resolve all discussions by creating an issue' do page.within '.mr-widget-body' do - expect(page).to have_link 'open an issue to resolve them later', href: new_namespace_project_issue_path(project.namespace, project, merge_request_to_resolve_discussions_of: merge_request.iid) + expect(page).to have_link 'Create an issue to resolve them later', href: new_namespace_project_issue_path(project.namespace, project, merge_request_to_resolve_discussions_of: merge_request.iid) end end context 'creating an issue for discussions' do before do - page.click_link 'open an issue to resolve them later', href: new_namespace_project_issue_path(project.namespace, project, merge_request_to_resolve_discussions_of: merge_request.iid) + page.click_link 'Create an issue to resolve them later', href: new_namespace_project_issue_path(project.namespace, project, merge_request_to_resolve_discussions_of: merge_request.iid) end it_behaves_like 'creating an issue for a discussion' diff --git a/spec/features/issues/create_issue_for_single_discussion_in_merge_request.rb b/spec/features/issues/create_issue_for_single_discussion_in_merge_request_spec.rb index 88e2cc60d79..3a5a79e03f4 100644 --- a/spec/features/issues/create_issue_for_single_discussion_in_merge_request.rb +++ b/spec/features/issues/create_issue_for_single_discussion_in_merge_request_spec.rb @@ -4,7 +4,7 @@ feature 'Resolve an open discussion in a merge request by creating an issue', fe let(:user) { create(:user) } let(:project) { create(:project, only_allow_merge_if_all_discussions_are_resolved: true) } let(:merge_request) { create(:merge_request, source_project: project) } - let!(:discussion) { Discussion.for_diff_notes([create(:diff_note_on_merge_request, noteable: merge_request, project: project)]).first } + let!(:discussion) { create(:diff_note_on_merge_request, noteable: merge_request, project: project).to_discussion } describe 'As a user with access to the project' do before do @@ -74,8 +74,8 @@ feature 'Resolve an open discussion in a merge request by creating an issue', fe it 'Shows a notice to ask someone else to resolve the discussions' do expect(page).to have_content("The discussion at #{merge_request.to_reference}"\ - "(discussion #{discussion.first_note.id}) will stay unresolved."\ - "Ask someone with permission to resolve it.") + " (discussion #{discussion.first_note.id}) will stay unresolved."\ + " Ask someone with permission to resolve it.") end end end diff --git a/spec/features/issues/filtered_search/dropdown_assignee_spec.rb b/spec/features/issues/filtered_search/dropdown_assignee_spec.rb index 4dcc56a97d1..0b573d7cef4 100644 --- a/spec/features/issues/filtered_search/dropdown_assignee_spec.rb +++ b/spec/features/issues/filtered_search/dropdown_assignee_spec.rb @@ -2,7 +2,6 @@ require 'rails_helper' describe 'Dropdown assignee', :feature, :js do include FilteredSearchHelpers - include WaitForAjax let!(:project) { create(:empty_project) } let!(:user) { create(:user, name: 'administrator', username: 'root') } @@ -194,7 +193,7 @@ describe 'Dropdown assignee', :feature, :js do new_user = create(:user) project.team << [new_user, :master] - find('.filtered-search-input-container .clear-search').click + find('.filtered-search-box .clear-search').click filtered_search.set('assignee') filtered_search.send_keys(':') diff --git a/spec/features/issues/filtered_search/dropdown_author_spec.rb b/spec/features/issues/filtered_search/dropdown_author_spec.rb index 1772a120045..0579d6c80ab 100644 --- a/spec/features/issues/filtered_search/dropdown_author_spec.rb +++ b/spec/features/issues/filtered_search/dropdown_author_spec.rb @@ -2,7 +2,6 @@ require 'rails_helper' describe 'Dropdown author', js: true, feature: true do include FilteredSearchHelpers - include WaitForAjax let!(:project) { create(:empty_project) } let!(:user) { create(:user, name: 'administrator', username: 'root') } @@ -172,7 +171,7 @@ describe 'Dropdown author', js: true, feature: true do new_user = create(:user) project.team << [new_user, :master] - find('.filtered-search-input-container .clear-search').click + find('.filtered-search-box .clear-search').click filtered_search.set('author') send_keys_to_filtered_search(':') diff --git a/spec/features/issues/filtered_search/dropdown_hint_spec.rb b/spec/features/issues/filtered_search/dropdown_hint_spec.rb index bc8cbe30e66..b9a37cfcc22 100644 --- a/spec/features/issues/filtered_search/dropdown_hint_spec.rb +++ b/spec/features/issues/filtered_search/dropdown_hint_spec.rb @@ -1,18 +1,13 @@ require 'rails_helper' -describe 'Dropdown hint', js: true, feature: true do +describe 'Dropdown hint', :js, :feature do include FilteredSearchHelpers - include WaitForAjax let!(:project) { create(:empty_project) } let!(:user) { create(:user) } let(:filtered_search) { find('.filtered-search') } let(:js_dropdown_hint) { '#js-dropdown-hint' } - def dropdown_hint_size - page.all('#js-dropdown-hint .filter-dropdown .filter-dropdown-item').size - end - def click_hint(text) find('#js-dropdown-hint .filter-dropdown .filter-dropdown-item', text: text).click end @@ -46,14 +41,16 @@ describe 'Dropdown hint', js: true, feature: true do it 'does not filter `Press Enter or click to search`' do filtered_search.set('randomtext') - expect(page).to have_css(js_dropdown_hint, text: 'Press Enter or click to search', visible: false) - expect(dropdown_hint_size).to eq(0) + hint_dropdown = find(js_dropdown_hint) + + expect(hint_dropdown).to have_content('Press Enter or click to search') + expect(hint_dropdown).to have_selector('.filter-dropdown .filter-dropdown-item', count: 0) end it 'filters with text' do filtered_search.set('a') - expect(dropdown_hint_size).to eq(3) + expect(find(js_dropdown_hint)).to have_selector('.filter-dropdown .filter-dropdown-item', count: 3) end end diff --git a/spec/features/issues/filtered_search/dropdown_label_spec.rb b/spec/features/issues/filtered_search/dropdown_label_spec.rb index b192064b693..abe5d61e38c 100644 --- a/spec/features/issues/filtered_search/dropdown_label_spec.rb +++ b/spec/features/issues/filtered_search/dropdown_label_spec.rb @@ -28,12 +28,8 @@ describe 'Dropdown label', js: true, feature: true do filter_dropdown.find('.filter-dropdown-item', text: text).click end - def dropdown_label_size - filter_dropdown.all('.filter-dropdown-item').size - end - def clear_search_field - find('.filtered-search-input-container .clear-search').click + find('.filtered-search-box .clear-search').click end before do @@ -81,7 +77,7 @@ describe 'Dropdown label', js: true, feature: true do filtered_search.set('label:') expect(filter_dropdown).to have_content(bug_label.title) - expect(dropdown_label_size).to eq(1) + expect(filter_dropdown).to have_selector('.filter-dropdown-item', count: 1) end end @@ -97,7 +93,8 @@ describe 'Dropdown label', js: true, feature: true do expect(filter_dropdown.find('.filter-dropdown-item', text: bug_label.title)).to be_visible expect(filter_dropdown.find('.filter-dropdown-item', text: uppercase_label.title)).to be_visible - expect(dropdown_label_size).to eq(2) + + expect(filter_dropdown).to have_selector('.filter-dropdown-item', count: 2) clear_search_field init_label_search @@ -106,14 +103,14 @@ describe 'Dropdown label', js: true, feature: true do expect(filter_dropdown.find('.filter-dropdown-item', text: bug_label.title)).to be_visible expect(filter_dropdown.find('.filter-dropdown-item', text: uppercase_label.title)).to be_visible - expect(dropdown_label_size).to eq(2) + expect(filter_dropdown).to have_selector('.filter-dropdown-item', count: 2) end it 'filters by multiple words with or without symbol' do filtered_search.send_keys('Hig') expect(filter_dropdown.find('.filter-dropdown-item', text: two_words_label.title)).to be_visible - expect(dropdown_label_size).to eq(1) + expect(filter_dropdown).to have_selector('.filter-dropdown-item', count: 1) clear_search_field init_label_search @@ -121,14 +118,14 @@ describe 'Dropdown label', js: true, feature: true do filtered_search.send_keys('~Hig') expect(filter_dropdown.find('.filter-dropdown-item', text: two_words_label.title)).to be_visible - expect(dropdown_label_size).to eq(1) + expect(filter_dropdown).to have_selector('.filter-dropdown-item', count: 1) end it 'filters by multiple words containing single quotes with or without symbol' do filtered_search.send_keys('won\'t') expect(filter_dropdown.find('.filter-dropdown-item', text: wont_fix_single_label.title)).to be_visible - expect(dropdown_label_size).to eq(1) + expect(filter_dropdown).to have_selector('.filter-dropdown-item', count: 1) clear_search_field init_label_search @@ -136,14 +133,14 @@ describe 'Dropdown label', js: true, feature: true do filtered_search.send_keys('~won\'t') expect(filter_dropdown.find('.filter-dropdown-item', text: wont_fix_single_label.title)).to be_visible - expect(dropdown_label_size).to eq(1) + expect(filter_dropdown).to have_selector('.filter-dropdown-item', count: 1) end it 'filters by multiple words containing double quotes with or without symbol' do filtered_search.send_keys('won"t') expect(filter_dropdown.find('.filter-dropdown-item', text: wont_fix_label.title)).to be_visible - expect(dropdown_label_size).to eq(1) + expect(filter_dropdown).to have_selector('.filter-dropdown-item', count: 1) clear_search_field init_label_search @@ -151,14 +148,14 @@ describe 'Dropdown label', js: true, feature: true do filtered_search.send_keys('~won"t') expect(filter_dropdown.find('.filter-dropdown-item', text: wont_fix_label.title)).to be_visible - expect(dropdown_label_size).to eq(1) + expect(filter_dropdown).to have_selector('.filter-dropdown-item', count: 1) end it 'filters by special characters with or without symbol' do filtered_search.send_keys('^+') expect(filter_dropdown.find('.filter-dropdown-item', text: special_label.title)).to be_visible - expect(dropdown_label_size).to eq(1) + expect(filter_dropdown).to have_selector('.filter-dropdown-item', count: 1) clear_search_field init_label_search @@ -166,7 +163,7 @@ describe 'Dropdown label', js: true, feature: true do filtered_search.send_keys('~^+') expect(filter_dropdown.find('.filter-dropdown-item', text: special_label.title)).to be_visible - expect(dropdown_label_size).to eq(1) + expect(filter_dropdown).to have_selector('.filter-dropdown-item', count: 1) end end @@ -280,13 +277,13 @@ describe 'Dropdown label', js: true, feature: true do create(:label, project: project, title: 'bug-label') init_label_search - expect(dropdown_label_size).to eq(1) + expect(filter_dropdown).to have_selector('.filter-dropdown-item', count: 1) create(:label, project: project) clear_search_field init_label_search - expect(dropdown_label_size).to eq(1) + expect(filter_dropdown).to have_selector('.filter-dropdown-item', count: 1) end end end diff --git a/spec/features/issues/filtered_search/dropdown_milestone_spec.rb b/spec/features/issues/filtered_search/dropdown_milestone_spec.rb index ce96a420699..448259057b0 100644 --- a/spec/features/issues/filtered_search/dropdown_milestone_spec.rb +++ b/spec/features/issues/filtered_search/dropdown_milestone_spec.rb @@ -65,7 +65,7 @@ describe 'Dropdown milestone', :feature, :js do it 'should load all the milestones when opened' do filtered_search.set('milestone:') - expect(dropdown_milestone_size).to be > 0 + expect(filter_dropdown).to have_selector('.filter-dropdown .filter-dropdown-item', count: 6) end end @@ -84,37 +84,37 @@ describe 'Dropdown milestone', :feature, :js do it 'filters by name' do filtered_search.send_keys('v1') - expect(dropdown_milestone_size).to eq(1) + expect(filter_dropdown).to have_selector('.filter-dropdown .filter-dropdown-item', count: 1) end it 'filters by case insensitive name' do filtered_search.send_keys('V1') - expect(dropdown_milestone_size).to eq(1) + expect(filter_dropdown).to have_selector('.filter-dropdown .filter-dropdown-item', count: 1) end it 'filters by name with symbol' do filtered_search.send_keys('%v1') - expect(dropdown_milestone_size).to eq(1) + expect(filter_dropdown).to have_selector('.filter-dropdown .filter-dropdown-item', count: 1) end it 'filters by case insensitive name with symbol' do filtered_search.send_keys('%V1') - expect(dropdown_milestone_size).to eq(1) + expect(filter_dropdown).to have_selector('.filter-dropdown .filter-dropdown-item', count: 1) end it 'filters by special characters' do filtered_search.send_keys('(+') - expect(dropdown_milestone_size).to eq(1) + expect(filter_dropdown).to have_selector('.filter-dropdown .filter-dropdown-item', count: 1) end it 'filters by special characters with symbol' do filtered_search.send_keys('%(+') - expect(dropdown_milestone_size).to eq(1) + expect(filter_dropdown).to have_selector('.filter-dropdown .filter-dropdown-item', count: 1) end end @@ -252,7 +252,7 @@ describe 'Dropdown milestone', :feature, :js do expect(initial_size).to be > 0 create(:milestone, project: project) - find('.filtered-search-input-container .clear-search').click + find('.filtered-search-box .clear-search').click filtered_search.set('milestone:') expect(dropdown_milestone_size).to eq(initial_size) diff --git a/spec/features/issues/filtered_search/filter_issues_spec.rb b/spec/features/issues/filtered_search/filter_issues_spec.rb index f463312bf57..a8f4e2d7e10 100644 --- a/spec/features/issues/filtered_search/filter_issues_spec.rb +++ b/spec/features/issues/filtered_search/filter_issues_spec.rb @@ -1,18 +1,18 @@ require 'spec_helper' describe 'Filter issues', js: true, feature: true do + include Devise::Test::IntegrationHelpers include FilteredSearchHelpers - include WaitForAjax let!(:group) { create(:group) } let!(:project) { create(:project, group: group) } - let!(:user) { create(:user) } - let!(:user2) { create(:user) } + let!(:user) { create(:user, username: 'joe') } + let!(:user2) { create(:user, username: 'jane') } let!(:label) { create(:label, project: project) } let!(:wontfix) { create(:label, project: project, title: "Won't fix") } let!(:bug_label) { create(:label, project: project, title: 'bug') } - let!(:caps_sensitive_label) { create(:label, project: project, title: 'CAPS_sensitive') } + let!(:caps_sensitive_label) { create(:label, project: project, title: 'CaPs') } let!(:milestone) { create(:milestone, title: "8", project: project, start_date: 2.days.ago) } let!(:multiple_words_label) { create(:label, project: project, title: "Two words") } @@ -42,23 +42,24 @@ describe 'Filter issues', js: true, feature: true do project.team << [user2, :master] group.add_developer(user) group.add_developer(user2) - login_as(user) - create(:issue, project: project) - create(:issue, title: "Bug report 1", project: project) - create(:issue, title: "Bug report 2", project: project) - create(:issue, title: "issue with 'single quotes'", project: project) - create(:issue, title: "issue with \"double quotes\"", project: project) - create(:issue, title: "issue with !@\#{$%^&*()-+", project: project) - create(:issue, title: "issue by assignee", project: project, milestone: milestone, author: user, assignee: user) - create(:issue, title: "issue by assignee with searchTerm", project: project, milestone: milestone, author: user, assignee: user) + sign_in(user) + + create(:issue, project: project) + create(:issue, project: project, title: "Bug report 1") + create(:issue, project: project, title: "Bug report 2") + create(:issue, project: project, title: "issue with 'single quotes'") + create(:issue, project: project, title: "issue with \"double quotes\"") + create(:issue, project: project, title: "issue with !@\#{$%^&*()-+") + create(:issue, project: project, title: "issue by assignee", milestone: milestone, author: user, assignees: [user]) + create(:issue, project: project, title: "issue by assignee with searchTerm", milestone: milestone, author: user, assignees: [user]) issue = create(:issue, title: "Bug 2", project: project, milestone: milestone, author: user, - assignee: user) + assignees: [user]) issue.labels << bug_label issue_with_caps_label = create(:issue, @@ -66,15 +67,15 @@ describe 'Filter issues', js: true, feature: true do project: project, milestone: milestone, author: user, - assignee: user) + assignees: [user]) issue_with_caps_label.labels << caps_sensitive_label issue_with_everything = create(:issue, - title: "Bug report with everything you thought was possible", + title: "Bug report foo was possible", project: project, milestone: milestone, author: user, - assignee: user) + assignees: [user]) issue_with_everything.labels << bug_label issue_with_everything.labels << caps_sensitive_label @@ -687,10 +688,10 @@ describe 'Filter issues', js: true, feature: true do end it 'filters issues by searched text, author, more text, assignee and even more text' do - input_filtered_search("bug author:@#{user.username} report assignee:@#{user.username} with") + input_filtered_search("bug author:@#{user.username} report assignee:@#{user.username} foo") expect_issues_list_count(1) - expect_filtered_search_input('bug report with') + expect_filtered_search_input('bug report foo') end it 'filters issues by searched text, author, assignee and label' do @@ -701,10 +702,10 @@ describe 'Filter issues', js: true, feature: true do end it 'filters issues by searched text, author, text, assignee, text, label and text' do - input_filtered_search("bug author:@#{user.username} report assignee:@#{user.username} with label:~#{bug_label.title} everything") + input_filtered_search("bug author:@#{user.username} assignee:@#{user.username} report label:~#{bug_label.title} foo") expect_issues_list_count(1) - expect_filtered_search_input('bug report with everything') + expect_filtered_search_input('bug report foo') end it 'filters issues by searched text, author, assignee, label and milestone' do @@ -715,10 +716,10 @@ describe 'Filter issues', js: true, feature: true do end it 'filters issues by searched text, author, text, assignee, text, label, text, milestone and text' do - input_filtered_search("bug author:@#{user.username} report assignee:@#{user.username} with label:~#{bug_label.title} everything milestone:%#{milestone.title} you") + input_filtered_search("bug author:@#{user.username} assignee:@#{user.username} report label:~#{bug_label.title} milestone:%#{milestone.title} foo") expect_issues_list_count(1) - expect_filtered_search_input('bug report with everything you') + expect_filtered_search_input('bug report foo') end it 'filters issues by searched text, author, assignee, multiple labels and milestone' do @@ -729,10 +730,10 @@ describe 'Filter issues', js: true, feature: true do end it 'filters issues by searched text, author, text, assignee, text, label1, text, label2, text, milestone and text' do - input_filtered_search("bug author:@#{user.username} report assignee:@#{user.username} with label:~#{bug_label.title} everything label:~#{caps_sensitive_label.title} you milestone:%#{milestone.title} thought") + input_filtered_search("bug author:@#{user.username} assignee:@#{user.username} report label:~#{bug_label.title} label:~#{caps_sensitive_label.title} milestone:%#{milestone.title} foo") expect_issues_list_count(1) - expect_filtered_search_input('bug report with everything you thought') + expect_filtered_search_input('bug report foo') end end @@ -756,10 +757,10 @@ describe 'Filter issues', js: true, feature: true do expect_issues_list_count(2) - sort_toggle = find('.filtered-search-container .dropdown-toggle') + sort_toggle = find('.filtered-search-wrapper .dropdown-toggle') sort_toggle.click - find('.filtered-search-container .dropdown-menu li a', text: 'Oldest updated').click + find('.filtered-search-wrapper .dropdown-menu li a', text: 'Oldest updated').click wait_for_ajax expect(find('.issues-list .issue:first-of-type .issue-title-text a')).to have_content(old_issue.title) diff --git a/spec/features/issues/filtered_search/recent_searches_spec.rb b/spec/features/issues/filtered_search/recent_searches_spec.rb new file mode 100644 index 00000000000..09f228bcf49 --- /dev/null +++ b/spec/features/issues/filtered_search/recent_searches_spec.rb @@ -0,0 +1,109 @@ +require 'spec_helper' + +describe 'Recent searches', js: true, feature: true do + include FilteredSearchHelpers + + let(:project_1) { create(:empty_project, :public) } + let(:project_2) { create(:empty_project, :public) } + let(:project_1_local_storage_key) { "#{project_1.full_path}-issue-recent-searches" } + + before do + Capybara.ignore_hidden_elements = false + create(:issue, project: project_1) + create(:issue, project: project_2) + + # Visit any fast-loading page so we can clear local storage without a DOM exception + visit '/404' + remove_recent_searches + end + + after do + Capybara.ignore_hidden_elements = true + end + + it 'searching adds to recent searches' do + visit namespace_project_issues_path(project_1.namespace, project_1) + + input_filtered_search('foo', submit: true) + input_filtered_search('bar', submit: true) + + items = all('.filtered-search-history-dropdown-item', visible: false) + + expect(items.count).to eq(2) + expect(items[0].text).to eq('bar') + expect(items[1].text).to eq('foo') + end + + it 'visiting URL with search params adds to recent searches' do + visit namespace_project_issues_path(project_1.namespace, project_1, label_name: 'foo', search: 'bar') + visit namespace_project_issues_path(project_1.namespace, project_1, label_name: 'qux', search: 'garply') + + items = all('.filtered-search-history-dropdown-item', visible: false) + + expect(items.count).to eq(2) + expect(items[0].text).to eq('label:~qux garply') + expect(items[1].text).to eq('label:~foo bar') + end + + it 'saved recent searches are restored last on the list' do + set_recent_searches(project_1_local_storage_key, '["saved1", "saved2"]') + + visit namespace_project_issues_path(project_1.namespace, project_1, search: 'foo') + + items = all('.filtered-search-history-dropdown-item', visible: false) + + expect(items.count).to eq(3) + expect(items[0].text).to eq('foo') + expect(items[1].text).to eq('saved1') + expect(items[2].text).to eq('saved2') + end + + it 'searches are scoped to projects' do + visit namespace_project_issues_path(project_1.namespace, project_1) + + input_filtered_search('foo', submit: true) + input_filtered_search('bar', submit: true) + + visit namespace_project_issues_path(project_2.namespace, project_2) + + input_filtered_search('more', submit: true) + input_filtered_search('things', submit: true) + + items = all('.filtered-search-history-dropdown-item', visible: false) + + expect(items.count).to eq(2) + expect(items[0].text).to eq('things') + expect(items[1].text).to eq('more') + end + + it 'clicking item fills search input' do + set_recent_searches(project_1_local_storage_key, '["foo", "bar"]') + visit namespace_project_issues_path(project_1.namespace, project_1) + + all('.filtered-search-history-dropdown-item', visible: false)[0].trigger('click') + wait_for_filtered_search('foo') + + expect(find('.filtered-search').value.strip).to eq('foo') + end + + it 'clear recent searches button, clears recent searches' do + set_recent_searches(project_1_local_storage_key, '["foo"]') + visit namespace_project_issues_path(project_1.namespace, project_1) + + items_before = all('.filtered-search-history-dropdown-item', visible: false) + + expect(items_before.count).to eq(1) + + find('.filtered-search-history-clear-button', visible: false).trigger('click') + items_after = all('.filtered-search-history-dropdown-item', visible: false) + + expect(items_after.count).to eq(0) + end + + it 'shows flash error when failed to parse saved history' do + set_recent_searches(project_1_local_storage_key, 'fail') + visit namespace_project_issues_path(project_1.namespace, project_1) + + expect(find('.flash-alert')).to have_text('An error occured while parsing recent searches') + end +end diff --git a/spec/features/issues/filtered_search/search_bar_spec.rb b/spec/features/issues/filtered_search/search_bar_spec.rb index 59244d65eec..3ea95aed0a6 100644 --- a/spec/features/issues/filtered_search/search_bar_spec.rb +++ b/spec/features/issues/filtered_search/search_bar_spec.rb @@ -2,7 +2,6 @@ require 'rails_helper' describe 'Search bar', js: true, feature: true do include FilteredSearchHelpers - include WaitForAjax let!(:project) { create(:empty_project) } let!(:user) { create(:user) } @@ -26,7 +25,7 @@ describe 'Search bar', js: true, feature: true do filtered_search.native.send_keys(:down) page.within '#js-dropdown-hint' do - expect(page).to have_selector('.dropdown-active') + expect(page).to have_selector('.droplab-item-active') end end @@ -44,7 +43,7 @@ describe 'Search bar', js: true, feature: true do filtered_search.set(search_text) expect(filtered_search.value).to eq(search_text) - find('.filtered-search-input-container .clear-search').click + find('.filtered-search-box .clear-search').click expect(filtered_search.value).to eq('') end @@ -55,7 +54,7 @@ describe 'Search bar', js: true, feature: true do it 'hides after clicked' do filtered_search.set('a') - find('.filtered-search-input-container .clear-search').click + find('.filtered-search-box .clear-search').click expect(page).to have_css('.clear-search', visible: false) end @@ -79,28 +78,30 @@ describe 'Search bar', js: true, feature: true do filtered_search.set('author') - expect(page.all('#js-dropdown-hint .filter-dropdown .filter-dropdown-item').size).to eq(1) + expect(find('#js-dropdown-hint')).to have_selector('.filter-dropdown .filter-dropdown-item', count: 1) - find('.filtered-search-input-container .clear-search').click + find('.filtered-search-box .clear-search').click filtered_search.click - expect(page.all('#js-dropdown-hint .filter-dropdown .filter-dropdown-item').size).to eq(original_size) + expect(find('#js-dropdown-hint')).to have_selector('.filter-dropdown .filter-dropdown-item', count: original_size) end it 'resets the dropdown filters' do + filtered_search.click + + hint_offset = get_left_style(find('#js-dropdown-hint')['style']) + filtered_search.set('a') - hint_style = page.find('#js-dropdown-hint')['style'] - hint_offset = get_left_style(hint_style) filtered_search.set('author:') - expect(page.all('#js-dropdown-hint .filter-dropdown .filter-dropdown-item').size).to eq(0) + find('#js-dropdown-hint', visible: false) - find('.filtered-search-input-container .clear-search').click + find('.filtered-search-box .clear-search').click filtered_search.click - expect(page.all('#js-dropdown-hint .filter-dropdown .filter-dropdown-item').size).to be > 0 - expect(get_left_style(page.find('#js-dropdown-hint')['style'])).to eq(hint_offset) + expect(find('#js-dropdown-hint')).to have_selector('.filter-dropdown .filter-dropdown-item', count: 4) + expect(get_left_style(find('#js-dropdown-hint')['style'])).to eq(hint_offset) end end end diff --git a/spec/features/issues/form_spec.rb b/spec/features/issues/form_spec.rb index 755992069ff..5c0907e26df 100644 --- a/spec/features/issues/form_spec.rb +++ b/spec/features/issues/form_spec.rb @@ -1,7 +1,9 @@ require 'rails_helper' -describe 'New/edit issue', feature: true, js: true do +describe 'New/edit issue', :feature, :js do include GitlabRoutingHelper + include ActionView::Helpers::JavaScriptHelper + include WaitForAjax let!(:project) { create(:project) } let!(:user) { create(:user)} @@ -9,7 +11,7 @@ describe 'New/edit issue', feature: true, js: true do let!(:milestone) { create(:milestone, project: project) } let!(:label) { create(:label, project: project) } let!(:label2) { create(:label, project: project) } - let!(:issue) { create(:issue, project: project, assignee: user, milestone: milestone) } + let!(:issue) { create(:issue, project: project, assignees: [user], milestone: milestone) } before do project.team << [user, :master] @@ -22,23 +24,67 @@ describe 'New/edit issue', feature: true, js: true do visit new_namespace_project_issue_path(project.namespace, project) end + describe 'single assignee' do + before do + click_button 'Unassigned' + + wait_for_ajax + end + + it 'unselects other assignees when unassigned is selected' do + page.within '.dropdown-menu-user' do + click_link user2.name + end + + click_button user2.name + + page.within '.dropdown-menu-user' do + click_link 'Unassigned' + end + + expect(find('input[name="issue[assignee_ids][]"]', visible: false).value).to match('0') + end + + it 'toggles assign to me when current user is selected and unselected' do + page.within '.dropdown-menu-user' do + click_link user.name + end + + expect(find('a', text: 'Assign to me', visible: false)).not_to be_visible + + click_button user.name + + page.within('.dropdown-menu-user') do + click_link user.name + end + + expect(page.find('.dropdown-menu-user', visible: false)).not_to be_visible + end + end + it 'allows user to create new issue' do fill_in 'issue_title', with: 'title' fill_in 'issue_description', with: 'title' expect(find('a', text: 'Assign to me')).to be_visible - click_button 'Assignee' + click_button 'Unassigned' + + wait_for_ajax + page.within '.dropdown-menu-user' do click_link user2.name end - expect(find('input[name="issue[assignee_id]"]', visible: false).value).to match(user2.id.to_s) + expect(find('input[name="issue[assignee_ids][]"]', visible: false).value).to match(user2.id.to_s) page.within '.js-assignee-search' do expect(page).to have_content user2.name end expect(find('a', text: 'Assign to me')).to be_visible click_link 'Assign to me' - expect(find('input[name="issue[assignee_id]"]', visible: false).value).to match(user.id.to_s) + assignee_ids = page.all('input[name="issue[assignee_ids][]"]', visible: false) + + expect(assignee_ids[0].value).to match(user.id.to_s) + page.within '.js-assignee-search' do expect(page).to have_content user.name end @@ -68,7 +114,7 @@ describe 'New/edit issue', feature: true, js: true do page.within '.issuable-sidebar' do page.within '.assignee' do - expect(page).to have_content user.name + expect(page).to have_content "Assignee" end page.within '.milestone' do @@ -105,6 +151,25 @@ describe 'New/edit issue', feature: true, js: true do expect(find('.js-label-select')).to have_content('Labels') end + + it 'correctly updates the selected user when changing assignee' do + click_button 'Unassigned' + + wait_for_ajax + + page.within '.dropdown-menu-user' do + click_link user.name + end + + expect(find('.js-assignee-search')).to have_content(user.name) + click_button user.name + + page.within '.dropdown-menu-user' do + click_link user2.name + end + + expect(find('.js-assignee-search')).to have_content(user2.name) + end end context 'edit issue' do @@ -113,7 +178,7 @@ describe 'New/edit issue', feature: true, js: true do end it 'allows user to update issue' do - expect(find('input[name="issue[assignee_id]"]', visible: false).value).to match(user.id.to_s) + expect(find('input[name="issue[assignee_ids][]"]', visible: false).value).to match(user.id.to_s) expect(find('input[name="issue[milestone_id]"]', visible: false).value).to match(milestone.id.to_s) expect(find('a', text: 'Assign to me', visible: false)).not_to be_visible @@ -154,4 +219,14 @@ describe 'New/edit issue', feature: true, js: true do end end end + + def before_for_selector(selector) + js = <<-JS.strip_heredoc + (function(selector) { + var el = document.querySelector(selector); + return window.getComputedStyle(el, '::before').getPropertyValue('content'); + })("#{escape_javascript(selector)}") + JS + page.evaluate_script(js) + end end diff --git a/spec/features/issues/gfm_autocomplete_spec.rb b/spec/features/issues/gfm_autocomplete_spec.rb index 7135565294b..ad29911248f 100644 --- a/spec/features/issues/gfm_autocomplete_spec.rb +++ b/spec/features/issues/gfm_autocomplete_spec.rb @@ -1,7 +1,6 @@ require 'rails_helper' feature 'GFM autocomplete', feature: true, js: true do - include WaitForAjax let(:user) { create(:user, name: '💃speciąl someone💃', username: 'someone.special') } let(:project) { create(:project) } let(:label) { create(:label, project: project, title: 'special+') } @@ -46,6 +45,33 @@ feature 'GFM autocomplete', feature: true, js: true do expect(find('#at-view-58')).not_to have_selector('.cur:first-of-type') end + it 'does not open autocomplete menu when ":" is prefixed by a number and letters' do + note = find('#note_note') + + # Number. + page.within '.timeline-content-form' do + note.native.send_keys('7:') + end + + expect(page).not_to have_selector('.atwho-view') + + # ASCII letter. + page.within '.timeline-content-form' do + note.set('') + note.native.send_keys('w:') + end + + expect(page).not_to have_selector('.atwho-view') + + # Non-ASCII letter. + page.within '.timeline-content-form' do + note.set('') + note.native.send_keys('Ё:') + end + + expect(page).not_to have_selector('.atwho-view') + end + it 'selects the first item for assignee dropdowns' do page.within '.timeline-content-form' do find('#note_note').native.send_keys('') diff --git a/spec/features/issues/issue_sidebar_spec.rb b/spec/features/issues/issue_sidebar_spec.rb index 7b9d4534ada..0de0f93089a 100644 --- a/spec/features/issues/issue_sidebar_spec.rb +++ b/spec/features/issues/issue_sidebar_spec.rb @@ -1,10 +1,10 @@ require 'rails_helper' feature 'Issue Sidebar', feature: true do - include WaitForAjax include MobileHelpers - let(:project) { create(:project, :public) } + let(:group) { create(:group, :nested) } + let(:project) { create(:project, :public, namespace: group) } let(:issue) { create(:issue, project: project) } let!(:user) { create(:user)} let!(:label) { create(:label, project: project, title: 'bug') } @@ -42,6 +42,21 @@ feature 'Issue Sidebar', feature: true do expect(page).to have_content(user2.name) end end + + it 'assigns yourself' do + find('.block.assignee .dropdown-menu-toggle').click + + click_button 'assign yourself' + + wait_for_ajax + + find('.block.assignee .edit-link').click + + page.within '.dropdown-menu-user' do + expect(page.find('.dropdown-header')).to be_visible + expect(page.find('.dropdown-menu-user-link.is-active')).to have_content(user.name) + end + end end context 'as a allowed user' do @@ -56,10 +71,12 @@ feature 'Issue Sidebar', feature: true do # Resize the window resize_screen_sm # Make sure the sidebar is collapsed + find(sidebar_selector) expect(page).to have_css(sidebar_selector) # Once is collapsed let's open the sidebard and reload open_issue_sidebar refresh + find(sidebar_selector) expect(page).to have_css(sidebar_selector) # Restore the window size as it was including the sidebar restore_window_size @@ -120,6 +137,20 @@ feature 'Issue Sidebar', feature: true do end end + context 'as a allowed mobile user', js: true do + before do + project.team << [user, :developer] + resize_screen_xs + visit_issue(project, issue) + end + + context 'mobile sidebar' do + it 'collapses the sidebar for small screens' do + expect(page).not_to have_css('aside.right-sidebar.right-sidebar-collapsed') + end + end + end + context 'as a guest' do before do project.team << [user, :guest] @@ -136,9 +167,7 @@ feature 'Issue Sidebar', feature: true do end def open_issue_sidebar - page.within('aside.right-sidebar.right-sidebar-collapsed') do - find('.js-sidebar-toggle').click - sleep 1 - end + find('aside.right-sidebar.right-sidebar-collapsed .js-sidebar-toggle').trigger('click') + find('aside.right-sidebar.right-sidebar-expanded') end end diff --git a/spec/features/issues/move_spec.rb b/spec/features/issues/move_spec.rb index f89b4db9e62..6c09903a2f6 100644 --- a/spec/features/issues/move_spec.rb +++ b/spec/features/issues/move_spec.rb @@ -37,8 +37,8 @@ feature 'issue move to another project' do edit_issue(issue) end - scenario 'moving issue to another project' do - first('#move_to_project_id', visible: false).set(new_project.id) + scenario 'moving issue to another project', js: true do + find('#move_to_project_id', visible: false).set(new_project.id) click_button('Save changes') expect(current_url).to include project_path(new_project) diff --git a/spec/features/issues/new_branch_button_spec.rb b/spec/features/issues/new_branch_button_spec.rb deleted file mode 100644 index c0ab42c6822..00000000000 --- a/spec/features/issues/new_branch_button_spec.rb +++ /dev/null @@ -1,62 +0,0 @@ -require 'rails_helper' - -feature 'Start new branch from an issue', feature: true, js: true do - let!(:project) { create(:project) } - let!(:issue) { create(:issue, project: project) } - let!(:user) { create(:user)} - - context "for team members" do - before do - project.team << [user, :master] - login_as(user) - end - - it 'shows the new branch button' do - visit namespace_project_issue_path(project.namespace, project, issue) - - expect(page).to have_css('#new-branch .available') - end - - context "when there is a referenced merge request" do - let!(:note) do - create(:note, :on_issue, :system, project: project, noteable: issue, - note: "mentioned in #{referenced_mr.to_reference}") - end - - let(:referenced_mr) do - create(:merge_request, :simple, source_project: project, target_project: project, - description: "Fixes #{issue.to_reference}", author: user) - end - - before do - referenced_mr.cache_merge_request_closes_issues!(user) - - visit namespace_project_issue_path(project.namespace, project, issue) - end - - it "hides the new branch button" do - expect(page).to have_css('#new-branch .unavailable') - expect(page).not_to have_css('#new-branch .available') - expect(page).to have_content /1 Related Merge Request/ - end - end - - context 'when issue is confidential' do - it 'hides the new branch button' do - issue = create(:issue, :confidential, project: project) - - visit namespace_project_issue_path(project.namespace, project, issue) - - expect(page).not_to have_css('#new-branch') - end - end - end - - context 'for visitors' do - it 'shows no buttons' do - visit namespace_project_issue_path(project.namespace, project, issue) - - expect(page).not_to have_css('#new-branch') - end - end -end diff --git a/spec/features/issues/note_polling_spec.rb b/spec/features/issues/note_polling_spec.rb index f5cfe2d666e..80f57906506 100644 --- a/spec/features/issues/note_polling_spec.rb +++ b/spec/features/issues/note_polling_spec.rb @@ -1,19 +1,131 @@ require 'spec_helper' -feature 'Issue notes polling' do - let!(:project) { create(:project, :public) } - let!(:issue) { create(:issue, project: project) } +feature 'Issue notes polling', :feature, :js do + let(:project) { create(:empty_project, :public) } + let(:issue) { create(:issue, project: project) } - background do - visit namespace_project_issue_path(project.namespace, project, issue) + describe 'creates' do + before do + visit namespace_project_issue_path(project.namespace, project, issue) + end + + it 'displays the new comment' do + note = create(:note, noteable: issue, project: project, note: 'Looks good!') + page.execute_script('notes.refresh();') + + expect(page).to have_selector("#note_#{note.id}", text: 'Looks good!') + end end - scenario 'Another user adds a comment to an issue', js: true do - note = create(:note, noteable: issue, project: project, - note: 'Looks good!') + describe 'updates' do + context 'when from own user' do + let(:user) { create(:user) } + let(:note_text) { "Hello World" } + let(:updated_text) { "Bye World" } + let!(:existing_note) { create(:note, noteable: issue, project: project, author: user, note: note_text) } - page.execute_script('notes.refresh();') + before do + login_as(user) + visit namespace_project_issue_path(project.namespace, project, issue) + end + + it 'has .original-note-content to compare against' do + expect(page).to have_selector("#note_#{existing_note.id}", text: note_text) + expect(page).to have_selector("#note_#{existing_note.id} .original-note-content", count: 1, visible: false) + + update_note(existing_note, updated_text) + + expect(page).to have_selector("#note_#{existing_note.id}", text: updated_text) + expect(page).to have_selector("#note_#{existing_note.id} .original-note-content", count: 1, visible: false) + end + + it 'displays the updated content' do + expect(page).to have_selector("#note_#{existing_note.id}", text: note_text) + + update_note(existing_note, updated_text) + + expect(page).to have_selector("#note_#{existing_note.id}", text: updated_text) + end + + it 'when editing but have not changed anything, and an update comes in, show the updated content in the textarea' do + find("#note_#{existing_note.id} .js-note-edit").click + + expect(page).to have_field("note[note]", with: note_text) + + update_note(existing_note, updated_text) + + expect(page).to have_field("note[note]", with: updated_text) + end + + it 'when editing but you changed some things, and an update comes in, show a warning' do + find("#note_#{existing_note.id} .js-note-edit").click + + expect(page).to have_field("note[note]", with: note_text) + + find("#note_#{existing_note.id} .js-note-text").set('something random') + + update_note(existing_note, updated_text) - expect(page).to have_selector("#note_#{note.id}", text: 'Looks good!') + expect(page).to have_selector(".alert") + end + + it 'when editing but you changed some things, an update comes in, and you press cancel, show the updated content' do + find("#note_#{existing_note.id} .js-note-edit").click + + expect(page).to have_field("note[note]", with: note_text) + + find("#note_#{existing_note.id} .js-note-text").set('something random') + + update_note(existing_note, updated_text) + + find("#note_#{existing_note.id} .note-edit-cancel").click + + expect(page).to have_selector("#note_#{existing_note.id}", text: updated_text) + end + end + + context 'when from another user' do + let(:user1) { create(:user) } + let(:user2) { create(:user) } + let(:note_text) { "Hello World" } + let(:updated_text) { "Bye World" } + let!(:existing_note) { create(:note, noteable: issue, project: project, author: user1, note: note_text) } + + before do + login_as(user2) + visit namespace_project_issue_path(project.namespace, project, issue) + end + + it 'has .original-note-content to compare against' do + expect(page).to have_selector("#note_#{existing_note.id}", text: note_text) + expect(page).to have_selector("#note_#{existing_note.id} .original-note-content", count: 1, visible: false) + + update_note(existing_note, updated_text) + + expect(page).to have_selector("#note_#{existing_note.id}", text: updated_text) + expect(page).to have_selector("#note_#{existing_note.id} .original-note-content", count: 1, visible: false) + end + end + + context 'system notes' do + let(:user) { create(:user) } + let(:note_text) { "Some system note" } + let!(:system_note) { create(:system_note, noteable: issue, project: project, author: user, note: note_text) } + + before do + login_as(user) + visit namespace_project_issue_path(project.namespace, project, issue) + end + + it 'has .original-note-content to compare against' do + expect(page).to have_selector("#note_#{system_note.id}", text: note_text) + expect(page).to have_selector("#note_#{system_note.id} .original-note-content", count: 1, visible: false) + end + end + end + + def update_note(note, new_text) + note.update(note: new_text) + page.execute_script('notes.refresh();') end end diff --git a/spec/features/issues/notes_on_issues_spec.rb b/spec/features/issues/notes_on_issues_spec.rb new file mode 100644 index 00000000000..a4035324d2b --- /dev/null +++ b/spec/features/issues/notes_on_issues_spec.rb @@ -0,0 +1,77 @@ +require 'spec_helper' + +describe 'Create notes on issues', :js, :feature do + let(:user) { create(:user) } + + shared_examples 'notes with reference' do + let(:issue) { create(:issue, project: project) } + let(:note_text) { "Check #{mention.to_reference}" } + + before do + project.team << [user, :developer] + login_as(user) + visit namespace_project_issue_path(project.namespace, project, issue) + + fill_in 'note[note]', with: note_text + click_button 'Comment' + + wait_for_ajax + end + + it 'creates a note with reference and cross references the issue' do + page.within('div#notes li.note div.note-text') do + expect(page).to have_content(note_text) + expect(page.find('a')).to have_content(mention.to_reference) + end + + find('div#notes li.note div.note-text a').click + + page.within('div#notes li.note .system-note-message') do + expect(page).to have_content('mentioned in issue') + expect(page.find('a')).to have_content(issue.to_reference) + end + end + end + + context 'mentioning issue on a private project' do + it_behaves_like 'notes with reference' do + let(:project) { create(:project, :private) } + let(:mention) { create(:issue, project: project) } + end + end + + context 'mentioning issue on an internal project' do + it_behaves_like 'notes with reference' do + let(:project) { create(:project, :internal) } + let(:mention) { create(:issue, project: project) } + end + end + + context 'mentioning issue on a public project' do + it_behaves_like 'notes with reference' do + let(:project) { create(:project, :public) } + let(:mention) { create(:issue, project: project) } + end + end + + context 'mentioning merge request on a private project' do + it_behaves_like 'notes with reference' do + let(:project) { create(:project, :private) } + let(:mention) { create(:merge_request, source_project: project) } + end + end + + context 'mentioning merge request on an internal project' do + it_behaves_like 'notes with reference' do + let(:project) { create(:project, :internal) } + let(:mention) { create(:merge_request, source_project: project) } + end + end + + context 'mentioning merge request on a public project' do + it_behaves_like 'notes with reference' do + let(:project) { create(:project, :public) } + let(:mention) { create(:merge_request, source_project: project) } + end + end +end diff --git a/spec/features/issues/spam_issues_spec.rb b/spec/features/issues/spam_issues_spec.rb index 4bc9b49f889..6001476d0ca 100644 --- a/spec/features/issues/spam_issues_spec.rb +++ b/spec/features/issues/spam_issues_spec.rb @@ -1,6 +1,6 @@ require 'rails_helper' -describe 'New issue', feature: true do +describe 'New issue', feature: true, js: true do include StubENV let(:project) { create(:project, :public) } diff --git a/spec/features/issues/update_issues_spec.rb b/spec/features/issues/update_issues_spec.rb index ae5da3877a8..b250fa2ed3c 100644 --- a/spec/features/issues/update_issues_spec.rb +++ b/spec/features/issues/update_issues_spec.rb @@ -1,8 +1,6 @@ require 'rails_helper' feature 'Multiple issue updating from issues#index', feature: true do - include WaitForAjax - let!(:project) { create(:project) } let!(:issue) { create(:issue, project: project) } let!(:user) { create(:user)} @@ -101,7 +99,7 @@ feature 'Multiple issue updating from issues#index', feature: true do end def create_assigned - create(:issue, project: project, assignee: user) + create(:issue, project: project, assignees: [user]) end def create_with_milestone diff --git a/spec/features/issues/user_uses_slash_commands_spec.rb b/spec/features/issues/user_uses_slash_commands_spec.rb index 0a9cd11ad6e..4cd6c1171ac 100644 --- a/spec/features/issues/user_uses_slash_commands_spec.rb +++ b/spec/features/issues/user_uses_slash_commands_spec.rb @@ -2,7 +2,6 @@ require 'rails_helper' feature 'Issues > User uses slash commands', feature: true, js: true do include SlashCommandsHelpers - include WaitForAjax it_behaves_like 'issuable record that supports slash commands in its description and notes', :issue do let(:issuable) { create(:issue, project: project) } diff --git a/spec/features/issues_spec.rb b/spec/features/issues_spec.rb index 7afceb88cf9..06ed2dbac64 100644 --- a/spec/features/issues_spec.rb +++ b/spec/features/issues_spec.rb @@ -4,29 +4,21 @@ describe 'Issues', feature: true do include DropzoneHelper include IssueHelpers include SortingHelper - include WaitForAjax - let(:project) { create(:project, :public) } + let(:project) { create(:empty_project, :public) } before do login_as :user user2 = create(:user) project.team << [[@user, user2], :developer] - - project.repository.create_file( - @user, - '.gitlab/issue_templates/bug.md', - 'this is a test "bug" template', - message: 'added issue template', - branch_name: 'master') end describe 'Edit issue' do let!(:issue) do create(:issue, author: @user, - assignee: @user, + assignees: [@user], project: project) end @@ -38,20 +30,13 @@ describe 'Issues', feature: true do it 'opens new issue popup' do expect(page).to have_content("Issue ##{issue.iid}") end - - describe 'fill in' do - before do - fill_in 'issue_title', with: 'bug 345' - fill_in 'issue_description', with: 'bug description' - end - end end describe 'Editing issue assignee' do let!(:issue) do create(:issue, author: @user, - assignee: @user, + assignees: [@user], project: project) end @@ -69,7 +54,7 @@ describe 'Issues', feature: true do expect(page).to have_content 'No assignee - assign yourself' end - expect(issue.reload.assignee).to be_nil + expect(issue.reload.assignees).to be_empty end end @@ -146,7 +131,7 @@ describe 'Issues', feature: true do describe 'Issue info' do it 'excludes award_emoji from comment count' do - issue = create(:issue, author: @user, assignee: @user, project: project, title: 'foobar') + issue = create(:issue, author: @user, assignees: [@user], project: project, title: 'foobar') create(:award_emoji, awardable: issue) visit namespace_project_issues_path(project.namespace, project, assignee_id: @user.id) @@ -161,14 +146,14 @@ describe 'Issues', feature: true do %w(foobar barbaz gitlab).each do |title| create(:issue, author: @user, - assignee: @user, + assignees: [@user], project: project, title: title) end @issue = Issue.find_by(title: 'foobar') @issue.milestone = create(:milestone, project: project) - @issue.assignee = nil + @issue.assignees = [] @issue.save end @@ -359,9 +344,9 @@ describe 'Issues', feature: true do let(:user2) { create(:user) } before do - foo.assignee = user2 + foo.assignees << user2 foo.save - bar.assignee = user2 + bar.assignees << user2 bar.save end @@ -378,7 +363,7 @@ describe 'Issues', feature: true do end describe 'when I want to reset my incoming email token' do - let(:project1) { create(:project, namespace: @user.namespace) } + let(:project1) { create(:empty_project, namespace: @user.namespace) } let!(:issue) { create(:issue, project: project1) } before do @@ -404,7 +389,7 @@ describe 'Issues', feature: true do end describe 'update labels from issue#show', js: true do - let(:issue) { create(:issue, project: project, author: @user, assignee: @user) } + let(:issue) { create(:issue, project: project, author: @user, assignees: [@user]) } let!(:label) { create(:label, project: project) } before do @@ -414,7 +399,8 @@ describe 'Issues', feature: true do it 'will not send ajax request when no data is changed' do page.within '.labels' do click_link 'Edit' - first('.dropdown-menu-close').click + + find('.dropdown-menu-close', match: :first).click expect(page).not_to have_selector('.block-loading') end @@ -422,7 +408,7 @@ describe 'Issues', feature: true do end describe 'update assignee from issue#show' do - let(:issue) { create(:issue, project: project, author: @user, assignee: @user) } + let(:issue) { create(:issue, project: project, author: @user, assignees: [@user]) } context 'by authorized user' do it 'allows user to select unassigned', js: true do @@ -433,10 +419,14 @@ describe 'Issues', feature: true do click_link 'Edit' click_link 'Unassigned' + first('.title').click expect(page).to have_content 'No assignee' end - expect(issue.reload.assignee).to be_nil + # wait_for_ajax does not work with vue-resource at the moment + sleep 1 + + expect(issue.reload.assignees).to be_empty end it 'allows user to select an assignee', js: true do @@ -468,14 +458,14 @@ describe 'Issues', feature: true do click_link 'Edit' click_link @user.name - page.within '.value' do + page.within '.value .author' do expect(page).to have_content @user.name end click_link 'Edit' click_link @user.name - page.within '.value' do + page.within '.value .assign-yourself' do expect(page).to have_content "No assignee" end end @@ -494,7 +484,7 @@ describe 'Issues', feature: true do login_with guest visit namespace_project_issue_path(project.namespace, project, issue) - expect(page).to have_content issue.assignee.name + expect(page).to have_content issue.assignees.first.name end end end @@ -560,18 +550,11 @@ describe 'Issues', feature: true do expect(page).to have_content milestone.title end end - - describe 'removing assignee' do - let(:user2) { create(:user) } - - before do - issue.assignee = user2 - issue.save - end - end end describe 'new issue' do + let!(:issue) { create(:issue, project: project) } + context 'by unauthenticated user' do before do logout @@ -601,15 +584,24 @@ describe 'Issues', feature: true do expect(page.find_field("issue_description").value).to have_content 'banana_sample' end - it 'adds double newline to end of attachment markdown' do + it "doesn't add double newline to end of a single attachment markdown" do dropzone_file Rails.root.join('spec', 'fixtures', 'banana_sample.gif') - expect(page.find_field("issue_description").value).to match /\n\n$/ + expect(page.find_field("issue_description").value).not_to match /\n\n$/ end end context 'form filled by URL parameters' do + let(:project) { create(:project, :public, :repository) } + before do + project.repository.create_file( + @user, + '.gitlab/issue_templates/bug.md', + 'this is a test "bug" template', + message: 'added issue template', + branch_name: 'master') + visit new_namespace_project_issue_path(project.namespace, project, issuable_template: 'bug') end @@ -653,7 +645,7 @@ describe 'Issues', feature: true do describe 'due date' do context 'update due on issue#show', js: true do - let(:issue) { create(:issue, project: project, author: @user, assignee: @user) } + let(:issue) { create(:issue, project: project, author: @user, assignees: [@user]) } before do visit namespace_project_issue_path(project.namespace, project, issue) @@ -695,4 +687,21 @@ describe 'Issues', feature: true do end end end + + describe 'title issue#show', js: true do + include WaitForVueResource + + it 'updates the title', js: true do + issue = create(:issue, author: @user, assignees: [@user], project: project, title: 'new title') + + visit namespace_project_issue_path(project.namespace, project, issue) + + expect(page).to have_text("new title") + + issue.update(title: "updated title") + + wait_for_vue_resource + expect(page).to have_text("updated title") + end + end end diff --git a/spec/features/login_spec.rb b/spec/features/login_spec.rb index f32d1f78b40..c82e8c03343 100644 --- a/spec/features/login_spec.rb +++ b/spec/features/login_spec.rb @@ -41,7 +41,7 @@ feature 'Login', feature: true do expect(page).to have_content('Your account has been blocked.') end - it 'does not update Devise trackable attributes' do + it 'does not update Devise trackable attributes', :redis do user = create(:user, :blocked) expect { login_with(user) }.not_to change { user.reload.sign_in_count } @@ -55,7 +55,7 @@ feature 'Login', feature: true do expect(page).to have_content('Invalid Login or password.') end - it 'does not update Devise trackable attributes' do + it 'does not update Devise trackable attributes', :redis do expect { login_with(User.ghost) }.not_to change { User.ghost.reload.sign_in_count } end end @@ -199,52 +199,125 @@ feature 'Login', feature: true do describe 'with required two-factor authentication enabled' do let(:user) { create(:user) } - before(:each) { stub_application_setting(require_two_factor_authentication: true) } + # TODO: otp_grace_period_started_at - context 'with grace period defined' do - before(:each) do - stub_application_setting(two_factor_grace_period: 48) - login_with(user) - end + context 'global setting' do + before(:each) { stub_application_setting(require_two_factor_authentication: true) } - context 'within the grace period' do - it 'redirects to two-factor configuration page' do - expect(current_path).to eq profile_two_factor_auth_path - expect(page).to have_content('You must enable Two-Factor Authentication for your account before') + context 'with grace period defined' do + before(:each) do + stub_application_setting(two_factor_grace_period: 48) + login_with(user) end - it 'allows skipping two-factor configuration', js: true do - expect(current_path).to eq profile_two_factor_auth_path + context 'within the grace period' do + it 'redirects to two-factor configuration page' do + expect(current_path).to eq profile_two_factor_auth_path + expect(page).to have_content('The global settings require you to enable Two-Factor Authentication for your account. You need to do this before ') + end - click_link 'Configure it later' - expect(current_path).to eq root_path + it 'allows skipping two-factor configuration', js: true do + expect(current_path).to eq profile_two_factor_auth_path + + click_link 'Configure it later' + expect(current_path).to eq root_path + end end - end - context 'after the grace period' do - let(:user) { create(:user, otp_grace_period_started_at: 9999.hours.ago) } + context 'after the grace period' do + let(:user) { create(:user, otp_grace_period_started_at: 9999.hours.ago) } - it 'redirects to two-factor configuration page' do - expect(current_path).to eq profile_two_factor_auth_path - expect(page).to have_content('You must enable Two-Factor Authentication for your account.') + it 'redirects to two-factor configuration page' do + expect(current_path).to eq profile_two_factor_auth_path + expect(page).to have_content( + 'The global settings require you to enable Two-Factor Authentication for your account.' + ) + end + + it 'disallows skipping two-factor configuration', js: true do + expect(current_path).to eq profile_two_factor_auth_path + expect(page).not_to have_link('Configure it later') + end + end + end + + context 'without grace period defined' do + before(:each) do + stub_application_setting(two_factor_grace_period: 0) + login_with(user) end - it 'disallows skipping two-factor configuration', js: true do + it 'redirects to two-factor configuration page' do expect(current_path).to eq profile_two_factor_auth_path - expect(page).not_to have_link('Configure it later') + expect(page).to have_content( + 'The global settings require you to enable Two-Factor Authentication for your account.' + ) end end end - context 'without grace period defined' do - before(:each) do - stub_application_setting(two_factor_grace_period: 0) - login_with(user) + context 'group setting' do + before do + group1 = create :group, name: 'Group 1', require_two_factor_authentication: true + group1.add_user(user, GroupMember::DEVELOPER) + group2 = create :group, name: 'Group 2', require_two_factor_authentication: true + group2.add_user(user, GroupMember::DEVELOPER) end - it 'redirects to two-factor configuration page' do - expect(current_path).to eq profile_two_factor_auth_path - expect(page).to have_content('You must enable Two-Factor Authentication for your account.') + context 'with grace period defined' do + before(:each) do + stub_application_setting(two_factor_grace_period: 48) + login_with(user) + end + + context 'within the grace period' do + it 'redirects to two-factor configuration page' do + expect(current_path).to eq profile_two_factor_auth_path + expect(page).to have_content( + 'The group settings for Group 1 and Group 2 require you to enable ' \ + 'Two-Factor Authentication for your account. You need to do this ' \ + 'before ') + end + + it 'allows skipping two-factor configuration', js: true do + expect(current_path).to eq profile_two_factor_auth_path + + click_link 'Configure it later' + expect(current_path).to eq root_path + end + end + + context 'after the grace period' do + let(:user) { create(:user, otp_grace_period_started_at: 9999.hours.ago) } + + it 'redirects to two-factor configuration page' do + expect(current_path).to eq profile_two_factor_auth_path + expect(page).to have_content( + 'The group settings for Group 1 and Group 2 require you to enable ' \ + 'Two-Factor Authentication for your account.' + ) + end + + it 'disallows skipping two-factor configuration', js: true do + expect(current_path).to eq profile_two_factor_auth_path + expect(page).not_to have_link('Configure it later') + end + end + end + + context 'without grace period defined' do + before(:each) do + stub_application_setting(two_factor_grace_period: 0) + login_with(user) + end + + it 'redirects to two-factor configuration page' do + expect(current_path).to eq profile_two_factor_auth_path + expect(page).to have_content( + 'The group settings for Group 1 and Group 2 require you to enable ' \ + 'Two-Factor Authentication for your account.' + ) + end end end end diff --git a/spec/features/markdown_spec.rb b/spec/features/markdown_spec.rb index 894df13a2dc..ba930de937d 100644 --- a/spec/features/markdown_spec.rb +++ b/spec/features/markdown_spec.rb @@ -26,7 +26,7 @@ require 'erb' describe 'GitLab Markdown', feature: true do include Capybara::Node::Matchers - include GitlabMarkdownHelper + include MarkupHelper include MarkdownMatchers # Sometimes it can be useful to see the parsed output of the Markdown document diff --git a/spec/features/merge_requests/assign_issues_spec.rb b/spec/features/merge_requests/assign_issues_spec.rb index 43cc6f2a2a7..b306e2f5f75 100644 --- a/spec/features/merge_requests/assign_issues_spec.rb +++ b/spec/features/merge_requests/assign_issues_spec.rb @@ -18,7 +18,7 @@ feature 'Merge request issue assignment', js: true, feature: true do end context 'logged in as author' do - scenario 'updates related issues' do + it 'updates related issues' do visit_merge_request click_link "Assign yourself to these issues" @@ -33,7 +33,7 @@ feature 'Merge request issue assignment', js: true, feature: true do end it "doesn't display if related issues are already assigned" do - [issue1, issue2].each { |issue| issue.update!(assignee: user) } + [issue1, issue2].each { |issue| issue.update!(assignees: [user]) } visit_merge_request diff --git a/spec/features/merge_requests/check_if_mergeable_with_unresolved_discussions_spec.rb b/spec/features/merge_requests/check_if_mergeable_with_unresolved_discussions_spec.rb index 7f11db3c417..fa306c02a43 100644 --- a/spec/features/merge_requests/check_if_mergeable_with_unresolved_discussions_spec.rb +++ b/spec/features/merge_requests/check_if_mergeable_with_unresolved_discussions_spec.rb @@ -19,8 +19,8 @@ feature 'Check if mergeable with unresolved discussions', js: true, feature: tru it 'does not allow to merge' do visit_merge_request(merge_request) - expect(page).not_to have_button 'Accept Merge Request' - expect(page).to have_content('This merge request has unresolved discussions') + expect(page).not_to have_button 'Merge' + expect(page).to have_content('There are unresolved discussions.') end end @@ -32,7 +32,7 @@ feature 'Check if mergeable with unresolved discussions', js: true, feature: tru it 'allows MR to be merged' do visit_merge_request(merge_request) - expect(page).to have_button 'Accept Merge Request' + expect(page).to have_button 'Merge' end end end @@ -46,7 +46,7 @@ feature 'Check if mergeable with unresolved discussions', js: true, feature: tru it 'does not allow to merge' do visit_merge_request(merge_request) - expect(page).to have_button 'Accept Merge Request' + expect(page).to have_button 'Merge' end end @@ -58,7 +58,7 @@ feature 'Check if mergeable with unresolved discussions', js: true, feature: tru it 'allows MR to be merged' do visit_merge_request(merge_request) - expect(page).to have_button 'Accept Merge Request' + expect(page).to have_button 'Merge' end end end diff --git a/spec/features/merge_requests/cherry_pick_spec.rb b/spec/features/merge_requests/cherry_pick_spec.rb index dfe7c910a10..6ba681e36f7 100644 --- a/spec/features/merge_requests/cherry_pick_spec.rb +++ b/spec/features/merge_requests/cherry_pick_spec.rb @@ -1,6 +1,6 @@ require 'spec_helper' -describe 'Cherry-pick Merge Requests' do +describe 'Cherry-pick Merge Requests', js: true do let(:user) { create(:user) } let(:group) { create(:group) } let(:project) { create(:project, namespace: group) } diff --git a/spec/features/merge_requests/closes_issues_spec.rb b/spec/features/merge_requests/closes_issues_spec.rb index eafcab6a0d7..ee0880a1e2f 100644 --- a/spec/features/merge_requests/closes_issues_spec.rb +++ b/spec/features/merge_requests/closes_issues_spec.rb @@ -1,6 +1,8 @@ require 'spec_helper' -feature 'Merge Request closing issues message', feature: true do +feature 'Merge Request closing issues message', feature: true, js: true do + include WaitForAjax + let(:user) { create(:user) } let(:project) { create(:project, :public) } let(:issue_1) { create(:issue, project: project)} @@ -23,6 +25,7 @@ feature 'Merge Request closing issues message', feature: true do login_as user visit namespace_project_merge_request_path(project.namespace, project, merge_request) + wait_for_ajax end context 'not closing or mentioning any issue' do @@ -35,7 +38,7 @@ feature 'Merge Request closing issues message', feature: true do let(:merge_request_description) { "Description\n\nclosing #{issue_1.to_reference}, #{issue_2.to_reference}" } it 'does not display closing issue message' do - expect(page).to have_content("Accepting this merge request will close issues #{issue_1.to_reference} and #{issue_2.to_reference}") + expect(page).to have_content("Closes issues #{issue_1.to_reference} and #{issue_2.to_reference}") end end @@ -51,7 +54,8 @@ feature 'Merge Request closing issues message', feature: true do let(:merge_request_title) { "closes #{issue_1.to_reference}\n\n refers to #{issue_2.to_reference}" } it 'does not display closing issue message' do - expect(page).to have_content("Accepting this merge request will close issue #{issue_1.to_reference}. Issue #{issue_2.to_reference} is mentioned but will not be closed.") + expect(page).to have_content("Closes issue #{issue_1.to_reference}.") + expect(page).to have_content("Issue #{issue_2.to_reference} is mentioned but will not be closed.") end end @@ -59,7 +63,7 @@ feature 'Merge Request closing issues message', feature: true do let(:merge_request_title) { "closing #{issue_1.to_reference}, #{issue_2.to_reference}" } it 'does not display closing issue message' do - expect(page).to have_content("Accepting this merge request will close issues #{issue_1.to_reference} and #{issue_2.to_reference}") + expect(page).to have_content("Closes issues #{issue_1.to_reference} and #{issue_2.to_reference}") end end @@ -75,7 +79,8 @@ feature 'Merge Request closing issues message', feature: true do let(:merge_request_title) { "closes #{issue_1.to_reference}\n\n refers to #{issue_2.to_reference}" } it 'does not display closing issue message' do - expect(page).to have_content("Accepting this merge request will close issue #{issue_1.to_reference}. Issue #{issue_2.to_reference} is mentioned but will not be closed.") + expect(page).to have_content("Closes issue #{issue_1.to_reference}. Issue #{issue_2.to_reference} is mentioned but will not be closed.") + expect(page).to have_content("Issue #{issue_2.to_reference} is mentioned but will not be closed.") end end end diff --git a/spec/features/merge_requests/conflicts_spec.rb b/spec/features/merge_requests/conflicts_spec.rb index 18508a44184..04b7593ce68 100644 --- a/spec/features/merge_requests/conflicts_spec.rb +++ b/spec/features/merge_requests/conflicts_spec.rb @@ -1,8 +1,6 @@ require 'spec_helper' feature 'Merge request conflict resolution', js: true, feature: true do - include WaitForAjax - let(:user) { create(:user) } let(:project) { create(:project) } @@ -153,7 +151,7 @@ feature 'Merge request conflict resolution', js: true, feature: true do 'conflict-too-large' => 'when the conflicts contain a large file', 'conflict-binary-file' => 'when the conflicts contain a binary file', 'conflict-missing-side' => 'when the conflicts contain a file edited in one branch and deleted in another', - 'conflict-non-utf8' => 'when the conflicts contain a non-UTF-8 file', + 'conflict-non-utf8' => 'when the conflicts contain a non-UTF-8 file' }.freeze UNRESOLVABLE_CONFLICTS.each do |source_branch, description| diff --git a/spec/features/merge_requests/create_new_mr_spec.rb b/spec/features/merge_requests/create_new_mr_spec.rb index f1ad4a55246..f1b3e7f158c 100644 --- a/spec/features/merge_requests/create_new_mr_spec.rb +++ b/spec/features/merge_requests/create_new_mr_spec.rb @@ -15,26 +15,26 @@ feature 'Create New Merge Request', feature: true, js: true do it 'selects the source branch sha when a tag with the same name exists' do visit namespace_project_merge_requests_path(project.namespace, project) - click_link 'New Merge Request' + click_link 'New merge request' expect(page).to have_content('Source branch') expect(page).to have_content('Target branch') first('.js-source-branch').click - first('.dropdown-source-branch .dropdown-content a', text: 'v1.1.0').click + find('.dropdown-source-branch .dropdown-content a', match: :first).click expect(page).to have_content "b83d6e3" end it 'selects the target branch sha when a tag with the same name exists' do visit namespace_project_merge_requests_path(project.namespace, project) - - click_link 'New Merge Request' + + click_link 'New merge request' expect(page).to have_content('Source branch') expect(page).to have_content('Target branch') first('.js-target-branch').click - first('.dropdown-target-branch .dropdown-content a', text: 'v1.1.0').click + find('.dropdown-target-branch .dropdown-content a', text: 'v1.1.0', match: :first).click expect(page).to have_content "b83d6e3" end @@ -42,12 +42,12 @@ feature 'Create New Merge Request', feature: true, js: true do it 'generates a diff for an orphaned branch' do visit namespace_project_merge_requests_path(project.namespace, project) - click_link 'New Merge Request' + page.has_link?('New Merge Request') ? click_link("New Merge Request") : click_link('New merge request') expect(page).to have_content('Source branch') expect(page).to have_content('Target branch') - first('.js-source-branch').click - first('.dropdown-source-branch .dropdown-content a', text: 'orphaned-branch').click + find('.js-source-branch', match: :first).click + find('.dropdown-source-branch .dropdown-content a', text: 'orphaned-branch', match: :first).click click_button "Compare branches" click_link "Changes" @@ -70,6 +70,18 @@ feature 'Create New Merge Request', feature: true, js: true do visit new_namespace_project_merge_request_path(project.namespace, project, merge_request: { target_project_id: private_project.id }) expect(page).not_to have_content private_project.path_with_namespace + expect(page).to have_content project.path_with_namespace + end + end + + context 'when source project cannot be viewed by the current user' do + it 'does not leak the private project name & namespace' do + private_project = create(:project, :private) + + visit new_namespace_project_merge_request_path(project.namespace, project, merge_request: { source_project_id: private_project.id }) + + expect(page).not_to have_content private_project.path_with_namespace + expect(page).to have_content project.path_with_namespace end end diff --git a/spec/features/merge_requests/created_from_fork_spec.rb b/spec/features/merge_requests/created_from_fork_spec.rb index 18833ba7266..bf34c99b92a 100644 --- a/spec/features/merge_requests/created_from_fork_spec.rb +++ b/spec/features/merge_requests/created_from_fork_spec.rb @@ -31,7 +31,7 @@ feature 'Merge request created from fork' do fork_project.destroy! end - scenario 'user can access merge request' do + scenario 'user can access merge request', js: true do visit_merge_request(merge_request) expect(page).to have_content 'Test merge request' diff --git a/spec/features/merge_requests/deleted_source_branch_spec.rb b/spec/features/merge_requests/deleted_source_branch_spec.rb index 0952b17b63e..01e5e4f3a05 100644 --- a/spec/features/merge_requests/deleted_source_branch_spec.rb +++ b/spec/features/merge_requests/deleted_source_branch_spec.rb @@ -4,8 +4,6 @@ require 'spec_helper' # message to be shown by JavaScript when the source branch was deleted. # Please do not remove "js: true". describe 'Deleted source branch', feature: true, js: true do - include WaitForAjax - let(:user) { create(:user) } let(:merge_request) { create(:merge_request) } @@ -22,7 +20,7 @@ describe 'Deleted source branch', feature: true, js: true do it 'shows a message about missing source branch' do expect(page).to have_content( - 'Source branch this-branch-does-not-exist does not exist' + 'Source branch does not exist.' ) end @@ -37,6 +35,6 @@ describe 'Deleted source branch', feature: true, js: true do wait_for_ajax expect(page).to have_selector('.diffs.tab-pane .nothing-here-block') - expect(page).to have_content('Nothing to merge from this-branch-does-not-exist into feature') + expect(page).to have_content('Source branch does not exist.') end end diff --git a/spec/features/merge_requests/diff_notes_avatars_spec.rb b/spec/features/merge_requests/diff_notes_avatars_spec.rb index a6c72b0b3ac..ccf047d3efa 100644 --- a/spec/features/merge_requests/diff_notes_avatars_spec.rb +++ b/spec/features/merge_requests/diff_notes_avatars_spec.rb @@ -1,8 +1,6 @@ require 'spec_helper' feature 'Diff note avatars', feature: true, js: true do - include WaitForAjax - let(:user) { create(:user) } let(:project) { create(:project, :public) } let(:merge_request) { create(:merge_request_with_diffs, source_project: project, author: user, title: "Bug NS-04") } @@ -93,7 +91,7 @@ feature 'Diff note avatars', feature: true, js: true do page.within find("[id='#{position.line_code(project.repository)}']") do find('.diff-notes-collapse').click - expect(first('img.js-diff-comment-avatar')["title"]).to eq("#{note.author.name}: #{note.note.truncate(17)}") + expect(first('img.js-diff-comment-avatar')["data-original-title"]).to eq("#{note.author.name}: #{note.note.truncate(17)}") end end @@ -164,9 +162,7 @@ feature 'Diff note avatars', feature: true, js: true do context 'multiple comments' do before do - create(:diff_note_on_merge_request, project: project, noteable: merge_request, position: position) - create(:diff_note_on_merge_request, project: project, noteable: merge_request, position: position) - create(:diff_note_on_merge_request, project: project, noteable: merge_request, position: position) + create_list(:diff_note_on_merge_request, 3, project: project, noteable: merge_request, in_reply_to: note) visit diffs_namespace_project_merge_request_path(project.namespace, project, merge_request, view: view) diff --git a/spec/features/merge_requests/diff_notes_resolve_spec.rb b/spec/features/merge_requests/diff_notes_resolve_spec.rb index 69164aabdb2..4d549f3bdbb 100644 --- a/spec/features/merge_requests/diff_notes_resolve_spec.rb +++ b/spec/features/merge_requests/diff_notes_resolve_spec.rb @@ -191,13 +191,15 @@ feature 'Diff notes resolve', feature: true, js: true do context 'multiple notes' do before do - create(:diff_note_on_merge_request, project: project, noteable: merge_request) + create(:diff_note_on_merge_request, project: project, noteable: merge_request, in_reply_to: note) visit_merge_request end it 'does not mark discussion as resolved when resolving single note' do page.first '.diff-content .note' do first('.line-resolve-btn').click + + expect(page).to have_selector('.note-action-button .loading') expect(first('.line-resolve-btn')['data-original-title']).to eq("Resolved by #{user.name}") end @@ -273,7 +275,7 @@ feature 'Diff notes resolve', feature: true, js: true do end page.within '.line-resolve-all-container' do - page.find('.discussion-next-btn').click + page.find('.discussion-next-btn').trigger('click') end expect(page.evaluate_script("$('body').scrollTop()")).to be > 0 diff --git a/spec/features/merge_requests/diff_notes_spec.rb b/spec/features/merge_requests/diff_notes_spec.rb deleted file mode 100644 index 06fad1007e8..00000000000 --- a/spec/features/merge_requests/diff_notes_spec.rb +++ /dev/null @@ -1,238 +0,0 @@ -require 'spec_helper' - -feature 'Diff notes', js: true, feature: true do - include WaitForAjax - - before do - login_as :admin - @merge_request = create(:merge_request) - @project = @merge_request.source_project - end - - context 'merge request diffs' do - let(:comment_button_class) { '.add-diff-note' } - let(:notes_holder_input_class) { 'js-temp-notes-holder' } - let(:notes_holder_input_xpath) { './following-sibling::*[contains(concat(" ", @class, " "), " notes_holder ")]' } - let(:test_note_comment) { 'this is a test note!' } - - context 'when hovering over a parallel view diff file' do - before(:each) do - visit diffs_namespace_project_merge_request_path(@project.namespace, @project, @merge_request, view: 'parallel') - end - - context 'with an old line on the left and no line on the right' do - it 'should allow commenting on the left side' do - should_allow_commenting(find('[id="6eb14e00385d2fb284765eb1cd8d420d33d63fc9_23_22"]').find(:xpath, '..'), 'left') - end - - it 'should not allow commenting on the right side' do - should_not_allow_commenting(find('[id="6eb14e00385d2fb284765eb1cd8d420d33d63fc9_23_22"]').find(:xpath, '..'), 'right') - end - end - - context 'with no line on the left and a new line on the right' do - it 'should not allow commenting on the left side' do - should_not_allow_commenting(find('[id="2f6fcd96b88b36ce98c38da085c795a27d92a3dd_15_15"]').find(:xpath, '..'), 'left') - end - - it 'should allow commenting on the right side' do - should_allow_commenting(find('[id="2f6fcd96b88b36ce98c38da085c795a27d92a3dd_15_15"]').find(:xpath, '..'), 'right') - end - end - - context 'with an old line on the left and a new line on the right' do - it 'should allow commenting on the left side' do - should_allow_commenting(find('[id="2f6fcd96b88b36ce98c38da085c795a27d92a3dd_9_9"]').find(:xpath, '..'), 'left') - end - - it 'should allow commenting on the right side' do - should_allow_commenting(find('[id="2f6fcd96b88b36ce98c38da085c795a27d92a3dd_9_9"]').find(:xpath, '..'), 'right') - end - end - - context 'with an unchanged line on the left and an unchanged line on the right' do - it 'should allow commenting on the left side' do - should_allow_commenting(find('[id="2f6fcd96b88b36ce98c38da085c795a27d92a3dd_7_7"]', match: :first).find(:xpath, '..'), 'left') - end - - it 'should allow commenting on the right side' do - should_allow_commenting(find('[id="2f6fcd96b88b36ce98c38da085c795a27d92a3dd_7_7"]', match: :first).find(:xpath, '..'), 'right') - end - end - - context 'with a match line' do - it 'should not allow commenting on the left side' do - should_not_allow_commenting(find('.match', match: :first).find(:xpath, '..'), 'left') - end - - it 'should not allow commenting on the right side' do - should_not_allow_commenting(find('.match', match: :first).find(:xpath, '..'), 'right') - end - end - - context 'with an unfolded line' do - before(:each) do - find('.js-unfold', match: :first).click - wait_for_ajax - end - - # The first `.js-unfold` unfolds upwards, therefore the first - # `.line_holder` will be an unfolded line. - let(:line_holder) { first('.line_holder[id="1"]') } - - it 'should not allow commenting on the left side' do - should_not_allow_commenting(line_holder, 'left') - end - - it 'should not allow commenting on the right side' do - should_not_allow_commenting(line_holder, 'right') - end - end - end - - context 'when hovering over an inline view diff file' do - before do - visit diffs_namespace_project_merge_request_path(@project.namespace, @project, @merge_request, view: 'inline') - end - - context 'with a new line' do - it 'should allow commenting' do - should_allow_commenting(find('[id="2f6fcd96b88b36ce98c38da085c795a27d92a3dd_10_9"]')) - end - end - - context 'with an old line' do - it 'should allow commenting' do - should_allow_commenting(find('[id="6eb14e00385d2fb284765eb1cd8d420d33d63fc9_22_22"]')) - end - end - - context 'with an unchanged line' do - it 'should allow commenting' do - should_allow_commenting(find('[id="2f6fcd96b88b36ce98c38da085c795a27d92a3dd_7_7"]')) - end - end - - context 'with a match line' do - it 'should not allow commenting' do - should_not_allow_commenting(find('.match', match: :first)) - end - end - - context 'with an unfolded line' do - before(:each) do - find('.js-unfold', match: :first).click - wait_for_ajax - end - - # The first `.js-unfold` unfolds upwards, therefore the first - # `.line_holder` will be an unfolded line. - let(:line_holder) { first('.line_holder[id="1"]') } - - it 'should not allow commenting' do - should_not_allow_commenting line_holder - end - end - - context 'when hovering over a diff discussion' do - before do - visit diffs_namespace_project_merge_request_path(@project.namespace, @project, @merge_request, view: 'inline') - should_allow_commenting(find('[id="2f6fcd96b88b36ce98c38da085c795a27d92a3dd_7_7"]')) - visit namespace_project_merge_request_path(@project.namespace, @project, @merge_request) - end - - it 'should not allow commenting' do - should_not_allow_commenting(find('.line_holder', match: :first)) - end - end - end - - context 'when the MR only supports legacy diff notes' do - before do - @merge_request.merge_request_diff.update_attributes(start_commit_sha: nil) - visit diffs_namespace_project_merge_request_path(@project.namespace, @project, @merge_request, view: 'inline') - end - - context 'with a new line' do - it 'should allow commenting' do - should_allow_commenting(find('[id="2f6fcd96b88b36ce98c38da085c795a27d92a3dd_10_9"]')) - end - end - - context 'with an old line' do - it 'should allow commenting' do - should_allow_commenting(find('[id="6eb14e00385d2fb284765eb1cd8d420d33d63fc9_22_22"]')) - end - end - - context 'with an unchanged line' do - it 'should allow commenting' do - should_allow_commenting(find('[id="2f6fcd96b88b36ce98c38da085c795a27d92a3dd_7_7"]')) - end - end - - context 'with a match line' do - it 'should not allow commenting' do - should_not_allow_commenting(find('.match', match: :first)) - end - end - end - - def should_allow_commenting(line_holder, diff_side = nil) - line = get_line_components(line_holder, diff_side) - line[:content].hover - expect(line[:num]).to have_css comment_button_class - - comment_on_line(line_holder, line) - - assert_comment_persistence(line_holder) - end - - def should_not_allow_commenting(line_holder, diff_side = nil) - line = get_line_components(line_holder, diff_side) - line[:content].hover - expect(line[:num]).not_to have_css comment_button_class - end - - def get_line_components(line_holder, diff_side = nil) - if diff_side.nil? - get_inline_line_components(line_holder) - else - get_parallel_line_components(line_holder, diff_side) - end - end - - def get_inline_line_components(line_holder) - { content: line_holder.find('.line_content', match: :first), num: line_holder.find('.diff-line-num', match: :first) } - end - - def get_parallel_line_components(line_holder, diff_side = nil) - side_index = diff_side == 'left' ? 0 : 1 - # Wait for `.line_content` - line_holder.find('.line_content', match: :first) - # Wait for `.diff-line-num` - line_holder.find('.diff-line-num', match: :first) - { content: line_holder.all('.line_content')[side_index], num: line_holder.all('.diff-line-num')[side_index] } - end - - def comment_on_line(line_holder, line) - line[:num].find(comment_button_class).trigger 'click' - line_holder.find(:xpath, notes_holder_input_xpath) - - notes_holder_input = line_holder.find(:xpath, notes_holder_input_xpath) - expect(notes_holder_input[:class]).to include(notes_holder_input_class) - - notes_holder_input.fill_in 'note[note]', with: test_note_comment - click_button 'Comment' - wait_for_ajax - end - - def assert_comment_persistence(line_holder) - expect(line_holder).to have_xpath notes_holder_input_xpath - - notes_holder_saved = line_holder.find(:xpath, notes_holder_input_xpath) - expect(notes_holder_saved[:class]).not_to include(notes_holder_input_class) - expect(notes_holder_saved).to have_content test_note_comment - end - end -end diff --git a/spec/features/merge_requests/diffs_spec.rb b/spec/features/merge_requests/diffs_spec.rb index 4a6c76a5caf..4860a2a7498 100644 --- a/spec/features/merge_requests/diffs_spec.rb +++ b/spec/features/merge_requests/diffs_spec.rb @@ -1,11 +1,8 @@ require 'spec_helper' feature 'Diffs URL', js: true, feature: true do - before do - login_as :admin - @merge_request = create(:merge_request) - @project = @merge_request.source_project - end + let(:project) { create(:project, :public) } + let(:merge_request) { create(:merge_request, source_project: project) } context 'when visit with */* as accept header' do before(:each) do @@ -13,9 +10,9 @@ feature 'Diffs URL', js: true, feature: true do end it 'renders the notes' do - create :note_on_merge_request, project: @project, noteable: @merge_request, note: 'Rebasing with master' + create :note_on_merge_request, project: project, noteable: merge_request, note: 'Rebasing with master' - visit diffs_namespace_project_merge_request_path(@project.namespace, @project, @merge_request) + visit diffs_namespace_project_merge_request_path(project.namespace, project, merge_request) # Load notes and diff through AJAX expect(page).to have_css('.note-text', visible: false, text: 'Rebasing with master') @@ -23,12 +20,39 @@ feature 'Diffs URL', js: true, feature: true do end end + context 'when linking to note' do + describe 'with unresolved note' do + let(:note) { create :diff_note_on_merge_request, project: project, noteable: merge_request } + let(:fragment) { "#note_#{note.id}" } + + before do + visit "#{diffs_namespace_project_merge_request_path(project.namespace, project, merge_request)}#{fragment}" + end + + it 'shows expanded note' do + expect(page).to have_selector(fragment, visible: true) + end + end + + describe 'with resolved note' do + let(:note) { create :diff_note_on_merge_request, :resolved, project: project, noteable: merge_request } + let(:fragment) { "#note_#{note.id}" } + + before do + visit "#{diffs_namespace_project_merge_request_path(project.namespace, project, merge_request)}#{fragment}" + end + + it 'shows expanded note' do + expect(page).to have_selector(fragment, visible: true) + end + end + end + context 'when merge request has overflow' do it 'displays warning' do - allow_any_instance_of(MergeRequestDiff).to receive(:overflow?).and_return(true) - allow(Commit).to receive(:max_diff_options).and_return(max_files: 20, max_lines: 20) + allow(Commit).to receive(:max_diff_options).and_return(max_files: 3) - visit diffs_namespace_project_merge_request_path(@project.namespace, @project, @merge_request) + visit diffs_namespace_project_merge_request_path(project.namespace, project, merge_request) page.within('.alert') do expect(page).to have_text("Too many changes to show. Plain diff Email patch To preserve @@ -36,4 +60,35 @@ feature 'Diffs URL', js: true, feature: true do end end end + + context 'when editing file' do + let(:author_user) { create(:user) } + let(:user) { create(:user) } + let(:forked_project) { Projects::ForkService.new(project, author_user).execute } + let(:merge_request) { create(:merge_request_with_diffs, source_project: forked_project, target_project: project, author: author_user) } + let(:changelog_id) { Digest::SHA1.hexdigest("CHANGELOG") } + + context 'as author' do + it 'shows direct edit link' do + login_as(author_user) + visit diffs_namespace_project_merge_request_path(project.namespace, project, merge_request) + + # Throws `Capybara::Poltergeist::InvalidSelector` if we try to use `#hash` syntax + expect(page).to have_selector("[id=\"#{changelog_id}\"] a.js-edit-blob") + end + end + + context 'as user who needs to fork' do + it 'shows fork/cancel confirmation' do + login_as(user) + visit diffs_namespace_project_merge_request_path(project.namespace, project, merge_request) + + # Throws `Capybara::Poltergeist::InvalidSelector` if we try to use `#hash` syntax + find("[id=\"#{changelog_id}\"] .js-edit-blob").click + + expect(page).to have_selector('.js-fork-suggestion-button', count: 1) + expect(page).to have_selector('.js-cancel-fork-suggestion-button', count: 1) + end + end + end end diff --git a/spec/features/merge_requests/discussion_spec.rb b/spec/features/merge_requests/discussion_spec.rb new file mode 100644 index 00000000000..f59d0faa274 --- /dev/null +++ b/spec/features/merge_requests/discussion_spec.rb @@ -0,0 +1,51 @@ +require 'spec_helper' + +feature 'Merge Request Discussions', feature: true do + before do + login_as :admin + end + + context "Diff discussions" do + let(:merge_request) { create(:merge_request, importing: true) } + let(:project) { merge_request.source_project } + let!(:old_merge_request_diff) { merge_request.merge_request_diffs.create(diff_refs: outdated_diff_refs) } + let!(:new_merge_request_diff) { merge_request.merge_request_diffs.create } + + let!(:outdated_discussion) { create(:diff_note_on_merge_request, project: project, noteable: merge_request, position: outdated_position).to_discussion } + let!(:active_discussion) { create(:diff_note_on_merge_request, noteable: merge_request, project: project).to_discussion } + + let(:outdated_position) do + Gitlab::Diff::Position.new( + old_path: "files/ruby/popen.rb", + new_path: "files/ruby/popen.rb", + old_line: nil, + new_line: 9, + diff_refs: outdated_diff_refs + ) + end + + let(:outdated_diff_refs) { project.commit("874797c3a73b60d2187ed6e2fcabd289ff75171e").diff_refs } + + before(:each) do + visit namespace_project_merge_request_path(project.namespace, project, merge_request) + end + + context 'active discussions' do + it 'shows a link to the diff' do + within(".discussion[data-discussion-id='#{active_discussion.id}']") do + path = diffs_namespace_project_merge_request_path(project.namespace, project, merge_request, anchor: active_discussion.line_code) + expect(page).to have_link('the diff', href: path) + end + end + end + + context 'outdated discussions' do + it 'shows a link to the outdated diff' do + within(".discussion[data-discussion-id='#{outdated_discussion.id}']") do + path = diffs_namespace_project_merge_request_path(project.namespace, project, merge_request, diff_id: old_merge_request_diff.id, anchor: outdated_discussion.line_code) + expect(page).to have_link('an outdated diff', href: path) + end + end + end + end +end diff --git a/spec/features/merge_requests/edit_mr_spec.rb b/spec/features/merge_requests/edit_mr_spec.rb index cb3bc392903..ec87a99b3ab 100644 --- a/spec/features/merge_requests/edit_mr_spec.rb +++ b/spec/features/merge_requests/edit_mr_spec.rb @@ -29,18 +29,6 @@ feature 'Edit Merge Request', feature: true do expect(page).to have_content 'Someone edited the merge request the same time you did' end - it 'allows to unselect "Remove source branch"' do - merge_request.update(merge_params: { 'force_remove_source_branch' => '1' }) - expect(merge_request.merge_params['force_remove_source_branch']).to be_truthy - - visit edit_namespace_project_merge_request_path(project.namespace, project, merge_request) - uncheck 'Remove source branch when merge request is accepted' - - click_button 'Save changes' - - expect(page).to have_content 'Remove source branch' - end - it 'should preserve description textarea height', js: true do long_description = %q( Lorem ipsum dolor sit amet, consectetur adipiscing elit. Etiam ac ornare ligula, ut tempus arcu. Etiam ultricies accumsan dolor vitae faucibus. Donec at elit lacus. Mauris orci ante, aliquam quis lorem eget, convallis faucibus arcu. Aenean at pulvinar lacus. Ut viverra quam massa, molestie ornare tortor dignissim a. Suspendisse tristique pellentesque tellus, id lacinia metus elementum id. Nam tristique, arcu rhoncus faucibus viverra, lacus ipsum sagittis ligula, vitae convallis odio lacus a nibh. Ut tincidunt est purus, ac vestibulum augue maximus in. Suspendisse vel erat et mi ultricies semper. Pellentesque volutpat pellentesque consequat. diff --git a/spec/features/merge_requests/filter_by_labels_spec.rb b/spec/features/merge_requests/filter_by_labels_spec.rb index 55f3c1863ff..32a9082b9b9 100644 --- a/spec/features/merge_requests/filter_by_labels_spec.rb +++ b/spec/features/merge_requests/filter_by_labels_spec.rb @@ -3,7 +3,6 @@ require 'rails_helper' feature 'Issue filtering by Labels', feature: true, js: true do include FilteredSearchHelpers include MergeRequestHelpers - include WaitForAjax let(:project) { create(:project, :public) } let!(:user) { create(:user) } diff --git a/spec/features/merge_requests/filter_merge_requests_spec.rb b/spec/features/merge_requests/filter_merge_requests_spec.rb index 70e3997e716..2da60e9f4ad 100644 --- a/spec/features/merge_requests/filter_merge_requests_spec.rb +++ b/spec/features/merge_requests/filter_merge_requests_spec.rb @@ -3,7 +3,6 @@ require 'rails_helper' describe 'Filter merge requests', feature: true do include FilteredSearchHelpers include MergeRequestHelpers - include WaitForAjax let!(:project) { create(:project) } let!(:group) { create(:group) } diff --git a/spec/features/merge_requests/merge_commit_message_toggle_spec.rb b/spec/features/merge_requests/merge_commit_message_toggle_spec.rb index 1bc2a5548dd..221ddb5873c 100644 --- a/spec/features/merge_requests/merge_commit_message_toggle_spec.rb +++ b/spec/features/merge_requests/merge_commit_message_toggle_spec.rb @@ -14,8 +14,6 @@ feature 'Clicking toggle commit message link', feature: true, js: true do ) end let(:textbox) { page.find(:css, '.js-commit-message', visible: false) } - let(:include_link) { page.find(:css, '.js-with-description-link', visible: false) } - let(:do_not_include_link) { page.find(:css, '.js-without-description-link', visible: false) } let(:default_message) do [ "Merge branch 'feature' into 'master'", @@ -40,7 +38,7 @@ feature 'Clicking toggle commit message link', feature: true, js: true do visit namespace_project_merge_request_path(project.namespace, project, merge_request) - expect(textbox).not_to be_visible + expect(page).not_to have_selector('.js-commit-message') click_button "Modify commit message" expect(textbox).to be_visible end @@ -56,19 +54,4 @@ feature 'Clicking toggle commit message link', feature: true, js: true do expect(textbox.value).to eq(default_message) end - - it "toggles link between 'Include description' and 'Don't include description'" do - expect(include_link).to be_visible - expect(do_not_include_link).not_to be_visible - - click_link "Include description in commit message" - - expect(include_link).not_to be_visible - expect(do_not_include_link).to be_visible - - click_link "Don't include description in commit message" - - expect(include_link).to be_visible - expect(do_not_include_link).not_to be_visible - end end diff --git a/spec/features/merge_requests/merge_immediately_with_pipeline_spec.rb b/spec/features/merge_requests/merge_immediately_with_pipeline_spec.rb index 79105b1ee46..c102722d6db 100644 --- a/spec/features/merge_requests/merge_immediately_with_pipeline_spec.rb +++ b/spec/features/merge_requests/merge_immediately_with_pipeline_spec.rb @@ -4,16 +4,18 @@ feature 'Merge immediately', :feature, :js do let(:user) { create(:user) } let(:project) { create(:project, :public) } - let(:merge_request) do + let!(:merge_request) do create(:merge_request_with_diffs, source_project: project, author: user, - title: 'Bug NS-04') + title: 'Bug NS-04', + head_pipeline: pipeline, + source_branch: pipeline.ref) end let(:pipeline) do create(:ci_pipeline, project: project, - sha: merge_request.diff_head_sha, - ref: merge_request.source_branch) + ref: 'master', + sha: project.repository.commit('master').id) end before { project.team << [user, :master] } @@ -32,11 +34,13 @@ feature 'Merge immediately', :feature, :js do page.within '.mr-widget-body' do find('.dropdown-toggle').click - click_link 'Merge Immediately' + Sidekiq::Testing.fake! do + click_link 'Merge immediately' - expect(find('.js-merge-when-pipeline-succeeds-button')).to have_content('Merge in progress') + expect(find('.accept-merge-request.btn-info')).to have_content('Merge in progress') - wait_for_ajax + wait_for_vue_resource + end end end end diff --git a/spec/features/merge_requests/merge_when_pipeline_succeeds_spec.rb b/spec/features/merge_requests/merge_when_pipeline_succeeds_spec.rb index ed7193b9777..11b6f0c0a64 100644 --- a/spec/features/merge_requests/merge_when_pipeline_succeeds_spec.rb +++ b/spec/features/merge_requests/merge_when_pipeline_succeeds_spec.rb @@ -16,7 +16,10 @@ feature 'Merge When Pipeline Succeeds', :feature, :js do ref: merge_request.source_branch) end - before { project.team << [user, :master] } + before do + project.add_master(user) + merge_request.update(head_pipeline_id: pipeline.id) + end context 'when there is active pipeline for merge request' do background do @@ -28,25 +31,25 @@ feature 'Merge When Pipeline Succeeds', :feature, :js do visit_merge_request(merge_request) end - it 'displays the Merge When Pipeline Succeeds button' do - expect(page).to have_button "Merge When Pipeline Succeeds" + it 'displays the Merge when pipeline succeeds button' do + expect(page).to have_button "Merge when pipeline succeeds" end - describe 'enabling Merge When Pipeline Succeeds' do - shared_examples 'Merge When Pipeline Succeeds activator' do - it 'activates the Merge When Pipeline Succeeds feature' do - click_button "Merge When Pipeline Succeeds" + describe 'enabling Merge when pipeline succeeds' do + shared_examples 'Merge when pipeline succeeds activator' do + it 'activates the Merge when pipeline succeeds feature' do + click_button "Merge when pipeline succeeds" expect(page).to have_content "Set by #{user.name} to be merged automatically when the pipeline succeeds." - expect(page).to have_content "The source branch will not be removed." - expect(page).to have_link "Cancel Automatic Merge" + expect(page).to have_content "The source branch will be removed." + expect(page).to have_selector ".js-cancel-auto-merge" visit_merge_request(merge_request) # Needed to refresh the page expect(page).to have_content /enabled an automatic merge when the pipeline for \h{8} succeeds/i end end context "when enabled immediately" do - it_behaves_like 'Merge When Pipeline Succeeds activator' + it_behaves_like 'Merge when pipeline succeeds activator' end context 'when enabled after pipeline status changed' do @@ -60,16 +63,16 @@ feature 'Merge When Pipeline Succeeds', :feature, :js do expect(page).to have_content "Pipeline ##{pipeline.id} running" end - it_behaves_like 'Merge When Pipeline Succeeds activator' + it_behaves_like 'Merge when pipeline succeeds activator' end context 'when enabled after it was previously canceled' do before do - click_button "Merge When Pipeline Succeeds" - click_link "Cancel Automatic Merge" + click_button "Merge when pipeline succeeds" + click_link "Cancel automatic merge" end - it_behaves_like 'Merge When Pipeline Succeeds activator' + it_behaves_like 'Merge when pipeline succeeds activator' end context 'when it was enabled and then canceled' do @@ -83,10 +86,21 @@ feature 'Merge When Pipeline Succeeds', :feature, :js do end before do - click_link "Cancel Automatic Merge" + click_link "Cancel automatic merge" end - it_behaves_like 'Merge When Pipeline Succeeds activator' + it_behaves_like 'Merge when pipeline succeeds activator' + end + end + + describe 'enabling Merge when pipeline succeeds via dropdown' do + it 'activates the Merge when pipeline succeeds feature' do + click_button 'Select merge moment' + click_link 'Merge when pipeline succeeds' + + expect(page).to have_content "Set by #{user.name} to be merged automatically when the pipeline succeeds." + expect(page).to have_content "The source branch will be removed." + expect(page).to have_link "Cancel automatic merge" end end end @@ -110,21 +124,14 @@ feature 'Merge When Pipeline Succeeds', :feature, :js do end it 'allows to cancel the automatic merge' do - click_link "Cancel Automatic Merge" + click_link "Cancel automatic merge" - expect(page).to have_button "Merge When Pipeline Succeeds" + expect(page).to have_button "Merge when pipeline succeeds" visit_merge_request(merge_request) # refresh the page expect(page).to have_content "canceled the automatic merge" end - it "allows the user to remove the source branch" do - expect(page).to have_link "Remove Source Branch When Merged" - - click_link "Remove Source Branch When Merged" - expect(page).to have_content "The source branch will be removed" - end - context 'when pipeline succeeds' do background { build.success } @@ -141,7 +148,7 @@ feature 'Merge When Pipeline Succeeds', :feature, :js do it "does not allow to enable merge when pipeline succeeds" do visit_merge_request(merge_request) - expect(page).not_to have_link 'Merge When Pipeline Succeeds' + expect(page).not_to have_link 'Merge when pipeline succeeds' end end diff --git a/spec/features/merge_requests/mini_pipeline_graph_spec.rb b/spec/features/merge_requests/mini_pipeline_graph_spec.rb index 84ad8765d8f..5b2798af32f 100644 --- a/spec/features/merge_requests/mini_pipeline_graph_spec.rb +++ b/spec/features/merge_requests/mini_pipeline_graph_spec.rb @@ -1,11 +1,9 @@ require 'rails_helper' feature 'Mini Pipeline Graph', :js, :feature do - include WaitForAjax - let(:user) { create(:user) } let(:project) { create(:project, :public) } - let(:merge_request) { create(:merge_request, source_project: project) } + let(:merge_request) { create(:merge_request, source_project: project, head_pipeline: pipeline) } let(:pipeline) { create(:ci_empty_pipeline, project: project, ref: 'master', status: 'running', sha: project.commit.id) } let(:build) { create(:ci_build, pipeline: pipeline, stage: 'test', commands: 'test') } diff --git a/spec/features/merge_requests/only_allow_merge_if_build_succeeds_spec.rb b/spec/features/merge_requests/only_allow_merge_if_build_succeeds_spec.rb index 447764566e0..cdda0542c51 100644 --- a/spec/features/merge_requests/only_allow_merge_if_build_succeeds_spec.rb +++ b/spec/features/merge_requests/only_allow_merge_if_build_succeeds_spec.rb @@ -1,6 +1,8 @@ require 'spec_helper' -feature 'Only allow merge requests to be merged if the pipeline succeeds', feature: true do +feature 'Only allow merge requests to be merged if the pipeline succeeds', feature: true, js: true do + include WaitForVueResource + let(:merge_request) { create(:merge_request_with_diffs) } let(:project) { merge_request.target_project } @@ -10,15 +12,17 @@ feature 'Only allow merge requests to be merged if the pipeline succeeds', featu project.team << [merge_request.author, :master] end - context 'project does not have CI enabled' do + context 'project does not have CI enabled', js: true do it 'allows MR to be merged' do visit_merge_request(merge_request) - expect(page).to have_button 'Accept Merge Request' + wait_for_vue_resource + + expect(page).to have_button 'Merge' end end - context 'when project has CI enabled' do + context 'when project has CI enabled', js: true do given!(:pipeline) do create(:ci_empty_pipeline, project: project, @@ -27,6 +31,8 @@ feature 'Only allow merge requests to be merged if the pipeline succeeds', featu status: status) end + before { merge_request.update(head_pipeline: pipeline) } + context 'when merge requests can only be merged if the pipeline succeeds' do before do project.update_attribute(:only_allow_merge_if_pipeline_succeeds, true) @@ -38,8 +44,10 @@ feature 'Only allow merge requests to be merged if the pipeline succeeds', featu it 'does not allow to merge immediately' do visit_merge_request(merge_request) - expect(page).to have_button 'Merge When Pipeline Succeeds' - expect(page).not_to have_button 'Select Merge Moment' + wait_for_vue_resource + + expect(page).to have_button 'Merge when pipeline succeeds' + expect(page).not_to have_button 'Select merge moment' end end @@ -49,7 +57,9 @@ feature 'Only allow merge requests to be merged if the pipeline succeeds', featu it 'does not allow MR to be merged' do visit_merge_request(merge_request) - expect(page).not_to have_button 'Accept Merge Request' + wait_for_vue_resource + + expect(page).to have_css('button[disabled="disabled"]', text: 'Merge') expect(page).to have_content('Please retry the job or push a new commit to fix the failure.') end end @@ -60,7 +70,9 @@ feature 'Only allow merge requests to be merged if the pipeline succeeds', featu it 'does not allow MR to be merged' do visit_merge_request(merge_request) - expect(page).not_to have_button 'Accept Merge Request' + wait_for_vue_resource + + expect(page).not_to have_button 'Merge' expect(page).to have_content('Please retry the job or push a new commit to fix the failure.') end end @@ -71,7 +83,9 @@ feature 'Only allow merge requests to be merged if the pipeline succeeds', featu it 'allows MR to be merged' do visit_merge_request(merge_request) - expect(page).to have_button 'Accept Merge Request' + wait_for_vue_resource + + expect(page).to have_button 'Merge' end end @@ -81,7 +95,9 @@ feature 'Only allow merge requests to be merged if the pipeline succeeds', featu it 'allows MR to be merged' do visit_merge_request(merge_request) - expect(page).to have_button 'Accept Merge Request' + wait_for_vue_resource + + expect(page).to have_button 'Merge' end end end @@ -94,13 +110,15 @@ feature 'Only allow merge requests to be merged if the pipeline succeeds', featu context 'when CI is running' do given(:status) { :running } - it 'allows MR to be merged immediately', js: true do + it 'allows MR to be merged immediately' do visit_merge_request(merge_request) - expect(page).to have_button 'Merge When Pipeline Succeeds' + wait_for_vue_resource + + expect(page).to have_button 'Merge when pipeline succeeds' - click_button 'Select Merge Moment' - expect(page).to have_content 'Merge Immediately' + click_button 'Select merge moment' + expect(page).to have_content 'Merge immediately' end end @@ -110,7 +128,9 @@ feature 'Only allow merge requests to be merged if the pipeline succeeds', featu it 'allows MR to be merged' do visit_merge_request(merge_request) - expect(page).to have_button 'Accept Merge Request' + wait_for_vue_resource + + expect(page).to have_button 'Merge' end end @@ -120,7 +140,9 @@ feature 'Only allow merge requests to be merged if the pipeline succeeds', featu it 'allows MR to be merged' do visit_merge_request(merge_request) - expect(page).to have_button 'Accept Merge Request' + wait_for_vue_resource + + expect(page).to have_button 'Merge' end end end diff --git a/spec/features/merge_requests/pipelines_spec.rb b/spec/features/merge_requests/pipelines_spec.rb index 9c4c0525267..99e283ac181 100644 --- a/spec/features/merge_requests/pipelines_spec.rb +++ b/spec/features/merge_requests/pipelines_spec.rb @@ -1,8 +1,6 @@ require 'spec_helper' feature 'Pipelines for Merge Requests', feature: true, js: true do - include WaitForAjax - given(:user) { create(:user) } given(:merge_request) { create(:merge_request) } given(:project) { merge_request.target_project } diff --git a/spec/features/merge_requests/reset_filters_spec.rb b/spec/features/merge_requests/reset_filters_spec.rb index 14511707af4..275f81f50dc 100644 --- a/spec/features/merge_requests/reset_filters_spec.rb +++ b/spec/features/merge_requests/reset_filters_spec.rb @@ -3,7 +3,6 @@ require 'rails_helper' feature 'Merge requests filter clear button', feature: true, js: true do include FilteredSearchHelpers include MergeRequestHelpers - include WaitForAjax include IssueHelpers let!(:project) { create(:project, :public) } @@ -14,7 +13,7 @@ feature 'Merge requests filter clear button', feature: true, js: true do let!(:mr2) { create(:merge_request, title: "Bugfix1", source_project: project, target_project: project, source_branch: "Bugfix1") } let(:merge_request_css) { '.merge-request' } - let(:clear_search_css) { '.filtered-search-input-container .clear-search' } + let(:clear_search_css) { '.filtered-search-box .clear-search' } before do mr2.labels << bug diff --git a/spec/features/merge_requests/target_branch_spec.rb b/spec/features/merge_requests/target_branch_spec.rb index b6134540273..c154cf8ade9 100644 --- a/spec/features/merge_requests/target_branch_spec.rb +++ b/spec/features/merge_requests/target_branch_spec.rb @@ -1,6 +1,6 @@ require 'spec_helper' -describe 'Target branch', feature: true do +describe 'Target branch', feature: true, js: true do let(:user) { create(:user) } let(:merge_request) { create(:merge_request) } let(:project) { merge_request.project } @@ -17,11 +17,6 @@ describe 'Target branch', feature: true do project.team << [user, :master] end - it 'shows link to target branch' do - visit path_to_merge_request - expect(page).to have_link('feature', href: namespace_project_commits_path(project.namespace, project, merge_request.target_branch)) - end - context 'when branch was deleted' do before do DeleteBranchService.new(project, user).execute('feature') @@ -30,12 +25,12 @@ describe 'Target branch', feature: true do it 'shows a message about missing target branch' do expect(page).to have_content( - 'Target branch feature does not exist' + 'Target branch does not exist' ) end it 'does not show link to target branch' do - expect(page).not_to have_link('feature') + expect(page).not_to have_selector('.mr-widget-body .js-branch-text a') end end end diff --git a/spec/features/merge_requests/update_merge_requests_spec.rb b/spec/features/merge_requests/update_merge_requests_spec.rb index b56fdfe5611..9ecc998785b 100644 --- a/spec/features/merge_requests/update_merge_requests_spec.rb +++ b/spec/features/merge_requests/update_merge_requests_spec.rb @@ -1,8 +1,6 @@ require 'rails_helper' feature 'Multiple merge requests updating from merge_requests#index', feature: true do - include WaitForAjax - let!(:user) { create(:user)} let!(:project) { create(:project) } let!(:merge_request) { create(:merge_request, source_project: project, target_project: project) } diff --git a/spec/features/merge_requests/user_posts_diff_notes_spec.rb b/spec/features/merge_requests/user_posts_diff_notes_spec.rb new file mode 100644 index 00000000000..7756202e3f5 --- /dev/null +++ b/spec/features/merge_requests/user_posts_diff_notes_spec.rb @@ -0,0 +1,294 @@ +require 'spec_helper' + +feature 'Merge requests > User posts diff notes', :js do + let(:user) { create(:user) } + let(:merge_request) { create(:merge_request) } + let(:project) { merge_request.source_project } + + before do + project.add_developer(user) + login_as(user) + end + + let(:comment_button_class) { '.add-diff-note' } + let(:notes_holder_input_class) { 'js-temp-notes-holder' } + let(:notes_holder_input_xpath) { './following-sibling::*[contains(concat(" ", @class, " "), " notes_holder ")]' } + let(:test_note_comment) { 'this is a test note!' } + + context 'when hovering over a parallel view diff file' do + before do + visit diffs_namespace_project_merge_request_path(project.namespace, project, merge_request, view: 'parallel') + end + + context 'with an old line on the left and no line on the right' do + it 'allows commenting on the left side' do + should_allow_commenting(find('[id="6eb14e00385d2fb284765eb1cd8d420d33d63fc9_23_22"]').find(:xpath, '..'), 'left') + end + + it 'does not allow commenting on the right side' do + should_not_allow_commenting(find('[id="6eb14e00385d2fb284765eb1cd8d420d33d63fc9_23_22"]').find(:xpath, '..'), 'right') + end + end + + context 'with no line on the left and a new line on the right' do + it 'does not allow commenting on the left side' do + should_not_allow_commenting(find('[id="2f6fcd96b88b36ce98c38da085c795a27d92a3dd_15_15"]').find(:xpath, '..'), 'left') + end + + it 'allows commenting on the right side' do + should_allow_commenting(find('[id="2f6fcd96b88b36ce98c38da085c795a27d92a3dd_15_15"]').find(:xpath, '..'), 'right') + end + end + + context 'with an old line on the left and a new line on the right' do + it 'allows commenting on the left side' do + should_allow_commenting(find('[id="2f6fcd96b88b36ce98c38da085c795a27d92a3dd_9_9"]').find(:xpath, '..'), 'left') + end + + it 'allows commenting on the right side' do + should_allow_commenting(find('[id="2f6fcd96b88b36ce98c38da085c795a27d92a3dd_9_9"]').find(:xpath, '..'), 'right') + end + end + + context 'with an unchanged line on the left and an unchanged line on the right' do + it 'allows commenting on the left side' do + should_allow_commenting(find('[id="2f6fcd96b88b36ce98c38da085c795a27d92a3dd_7_7"]', match: :first).find(:xpath, '..'), 'left') + end + + it 'allows commenting on the right side' do + should_allow_commenting(find('[id="2f6fcd96b88b36ce98c38da085c795a27d92a3dd_7_7"]', match: :first).find(:xpath, '..'), 'right') + end + end + + context 'with a match line' do + it 'does not allow commenting on the left side' do + should_not_allow_commenting(find('.match', match: :first).find(:xpath, '..'), 'left') + end + + it 'does not allow commenting on the right side' do + should_not_allow_commenting(find('.match', match: :first).find(:xpath, '..'), 'right') + end + end + + context 'with an unfolded line' do + before(:each) do + find('.js-unfold', match: :first).click + wait_for_ajax + end + + # The first `.js-unfold` unfolds upwards, therefore the first + # `.line_holder` will be an unfolded line. + let(:line_holder) { first('.line_holder[id="1"]') } + + it 'does not allow commenting on the left side' do + should_not_allow_commenting(line_holder, 'left') + end + + it 'does not allow commenting on the right side' do + should_not_allow_commenting(line_holder, 'right') + end + end + end + + context 'when hovering over an inline view diff file' do + before do + visit diffs_namespace_project_merge_request_path(project.namespace, project, merge_request, view: 'inline') + end + + context 'with a new line' do + it 'allows commenting' do + should_allow_commenting(find('[id="2f6fcd96b88b36ce98c38da085c795a27d92a3dd_10_9"]')) + end + end + + context 'with an old line' do + it 'allows commenting' do + should_allow_commenting(find('[id="6eb14e00385d2fb284765eb1cd8d420d33d63fc9_22_22"]')) + end + end + + context 'with an unchanged line' do + it 'allows commenting' do + should_allow_commenting(find('[id="2f6fcd96b88b36ce98c38da085c795a27d92a3dd_7_7"]')) + end + end + + context 'with a match line' do + it 'does not allow commenting' do + should_not_allow_commenting(find('.match', match: :first)) + end + end + + context 'with an unfolded line' do + before(:each) do + find('.js-unfold', match: :first).click + wait_for_ajax + end + + # The first `.js-unfold` unfolds upwards, therefore the first + # `.line_holder` will be an unfolded line. + let(:line_holder) { first('.line_holder[id="1"]') } + + it 'does not allow commenting' do + should_not_allow_commenting line_holder + end + end + + context 'when hovering over a diff discussion' do + before do + visit diffs_namespace_project_merge_request_path(project.namespace, project, merge_request, view: 'inline') + should_allow_commenting(find('[id="2f6fcd96b88b36ce98c38da085c795a27d92a3dd_7_7"]')) + visit namespace_project_merge_request_path(project.namespace, project, merge_request) + end + + it 'does not allow commenting' do + should_not_allow_commenting(find('.line_holder', match: :first)) + end + end + end + + context 'when cancelling the comment addition' do + before do + visit diffs_namespace_project_merge_request_path(project.namespace, project, merge_request, view: 'inline') + end + + context 'with a new line' do + it 'allows dismissing a comment' do + should_allow_dismissing_a_comment(find('[id="2f6fcd96b88b36ce98c38da085c795a27d92a3dd_10_9"]')) + end + end + end + + describe 'with muliple note forms' do + before do + visit diffs_namespace_project_merge_request_path(project.namespace, project, merge_request, view: 'inline') + click_diff_line(find('[id="2f6fcd96b88b36ce98c38da085c795a27d92a3dd_10_9"]')) + click_diff_line(find('[id="6eb14e00385d2fb284765eb1cd8d420d33d63fc9_22_22"]')) + end + + describe 'posting a note' do + it 'adds as discussion' do + expect(page).to have_css('.js-temp-notes-holder', count: 2) + + should_allow_commenting(find('[id="6eb14e00385d2fb284765eb1cd8d420d33d63fc9_22_22"]'), asset_form_reset: false) + expect(page).to have_css('.notes_holder .note', count: 1) + expect(page).to have_css('.js-temp-notes-holder', count: 1) + expect(page).to have_button('Reply...') + end + end + end + + context 'when the MR only supports legacy diff notes' do + before do + merge_request.merge_request_diff.update_attributes(start_commit_sha: nil) + visit diffs_namespace_project_merge_request_path(project.namespace, project, merge_request, view: 'inline') + end + + context 'with a new line' do + it 'allows commenting' do + should_allow_commenting(find('[id="2f6fcd96b88b36ce98c38da085c795a27d92a3dd_10_9"]')) + end + end + + context 'with an old line' do + it 'allows commenting' do + should_allow_commenting(find('[id="6eb14e00385d2fb284765eb1cd8d420d33d63fc9_22_22"]')) + end + end + + context 'with an unchanged line' do + it 'allows commenting' do + should_allow_commenting(find('[id="2f6fcd96b88b36ce98c38da085c795a27d92a3dd_7_7"]')) + end + end + + context 'with a match line' do + it 'does not allow commenting' do + should_not_allow_commenting(find('.match', match: :first)) + end + end + end + + def should_allow_commenting(line_holder, diff_side = nil, asset_form_reset: true) + write_comment_on_line(line_holder, diff_side) + + click_button 'Comment' + wait_for_ajax + + assert_comment_persistence(line_holder, asset_form_reset: asset_form_reset) + end + + def should_allow_dismissing_a_comment(line_holder, diff_side = nil) + write_comment_on_line(line_holder, diff_side) + + find('.js-close-discussion-note-form').trigger('click') + + assert_comment_dismissal(line_holder) + end + + def should_not_allow_commenting(line_holder, diff_side = nil) + line = get_line_components(line_holder, diff_side) + line[:content].hover + expect(line[:num]).not_to have_css comment_button_class + end + + def get_line_components(line_holder, diff_side = nil) + if diff_side.nil? + get_inline_line_components(line_holder) + else + get_parallel_line_components(line_holder, diff_side) + end + end + + def get_inline_line_components(line_holder) + { content: line_holder.find('.line_content', match: :first), num: line_holder.find('.diff-line-num', match: :first) } + end + + def get_parallel_line_components(line_holder, diff_side = nil) + side_index = diff_side == 'left' ? 0 : 1 + # Wait for `.line_content` + line_holder.find('.line_content', match: :first) + # Wait for `.diff-line-num` + line_holder.find('.diff-line-num', match: :first) + { content: line_holder.all('.line_content')[side_index], num: line_holder.all('.diff-line-num')[side_index] } + end + + def click_diff_line(line_holder, diff_side = nil) + line = get_line_components(line_holder, diff_side) + line[:content].hover + + expect(line[:num]).to have_css comment_button_class + + line[:num].find(comment_button_class).trigger 'click' + end + + def write_comment_on_line(line_holder, diff_side) + click_diff_line(line_holder, diff_side) + + notes_holder_input = line_holder.find(:xpath, notes_holder_input_xpath) + + expect(notes_holder_input[:class]).to include(notes_holder_input_class) + + notes_holder_input.fill_in 'note[note]', with: test_note_comment + end + + def assert_comment_persistence(line_holder, asset_form_reset:) + notes_holder_saved = line_holder.find(:xpath, notes_holder_input_xpath) + + expect(notes_holder_saved[:class]).not_to include(notes_holder_input_class) + expect(notes_holder_saved).to have_content test_note_comment + + assert_form_is_reset if asset_form_reset + end + + def assert_comment_dismissal(line_holder) + expect(line_holder).not_to have_xpath notes_holder_input_xpath + expect(page).not_to have_content test_note_comment + + assert_form_is_reset + end + + def assert_form_is_reset + expect(page).to have_no_css('.js-temp-notes-holder') + end +end diff --git a/spec/features/merge_requests/user_posts_notes_spec.rb b/spec/features/merge_requests/user_posts_notes_spec.rb new file mode 100644 index 00000000000..7fc0e2ce6ec --- /dev/null +++ b/spec/features/merge_requests/user_posts_notes_spec.rb @@ -0,0 +1,146 @@ +require 'spec_helper' + +describe 'Merge requests > User posts notes', :js do + let(:project) { create(:project) } + let(:merge_request) do + create(:merge_request, source_project: project, target_project: project) + end + let!(:note) do + create(:note_on_merge_request, :with_attachment, noteable: merge_request, + project: project) + end + + before do + login_as :admin + visit namespace_project_merge_request_path(project.namespace, project, merge_request) + end + + subject { page } + + describe 'the note form' do + it 'is valid' do + is_expected.to have_css('.js-main-target-form', visible: true, count: 1) + expect(find('.js-main-target-form .js-comment-button').value). + to eq('Comment') + page.within('.js-main-target-form') do + expect(page).not_to have_link('Cancel') + end + end + + describe 'with text' do + before do + page.within('.js-main-target-form') do + fill_in 'note[note]', with: 'This is awesome' + end + end + + it 'has enable submit button and preview button' do + page.within('.js-main-target-form') do + expect(page).not_to have_css('.js-comment-button[disabled]') + expect(page).to have_css('.js-md-preview-button', visible: true) + end + end + end + end + + describe 'when posting a note' do + before do + page.within('.js-main-target-form') do + fill_in 'note[note]', with: 'This is awesome!' + find('.js-md-preview-button').click + click_button 'Comment' + end + end + + it 'is added and form reset' do + is_expected.to have_content('This is awesome!') + page.within('.js-main-target-form') do + expect(page).to have_no_field('note[note]', with: 'This is awesome!') + expect(page).to have_css('.js-md-preview', visible: :hidden) + end + page.within('.js-main-target-form') do + is_expected.to have_css('.js-note-text', visible: true) + end + end + end + + describe 'when editing a note' do + it 'there should be a hidden edit form' do + is_expected.to have_css('.note-edit-form:not(.mr-note-edit-form)', visible: false, count: 1) + is_expected.to have_css('.note-edit-form.mr-note-edit-form', visible: false, count: 1) + end + + describe 'editing the note' do + before do + find('.note').hover + find('.js-note-edit').click + end + + it 'shows the note edit form and hide the note body' do + page.within("#note_#{note.id}") do + expect(find('.current-note-edit-form', visible: true)).to be_visible + expect(find('.note-edit-form', visible: true)).to be_visible + expect(find(:css, '.note-body > .note-text', visible: false)).not_to be_visible + end + end + + it 'resets the edit note form textarea with the original content of the note if cancelled' do + within('.current-note-edit-form') do + fill_in 'note[note]', with: 'Some new content' + find('.btn-cancel').click + expect(find('.js-note-text', visible: false).text).to eq '' + end + end + + it 'allows using markdown buttons after saving a note and then trying to edit it again' do + page.within('.current-note-edit-form') do + fill_in 'note[note]', with: 'This is the new content' + find('.btn-save').click + end + + wait_for_ajax + find('.note').hover + find('.js-note-edit').click + + page.within('.current-note-edit-form') do + expect(find('#note_note').value).to eq('This is the new content') + find('.js-md:first-child').click + expect(find('#note_note').value).to eq('This is the new content****') + end + end + + it 'appends the edited at time to the note' do + page.within('.current-note-edit-form') do + fill_in 'note[note]', with: 'Some new content' + find('.btn-save').click + end + + page.within("#note_#{note.id}") do + is_expected.to have_css('.note_edited_ago') + expect(find('.note_edited_ago').text). + to match(/less than a minute ago/) + end + end + end + + describe 'deleting an attachment' do + before do + find('.note').hover + find('.js-note-edit').click + end + + it 'shows the delete link' do + page.within('.note-attachment') do + is_expected.to have_css('.js-note-attachment-delete') + end + end + + it 'removes the attachment div and resets the edit form' do + find('.js-note-attachment-delete').click + is_expected.not_to have_css('.note-attachment') + is_expected.not_to have_css('.current-note-edit-form') + wait_for_ajax + end + end + end +end diff --git a/spec/features/merge_requests/user_sees_system_notes_spec.rb b/spec/features/merge_requests/user_sees_system_notes_spec.rb new file mode 100644 index 00000000000..55d0f9d728c --- /dev/null +++ b/spec/features/merge_requests/user_sees_system_notes_spec.rb @@ -0,0 +1,31 @@ +require 'spec_helper' + +feature 'Merge requests > User sees system notes' do + let(:public_project) { create(:project, :public) } + let(:private_project) { create(:project, :private) } + let(:issue) { create(:issue, project: private_project) } + let(:merge_request) { create(:merge_request, source_project: public_project, source_branch: 'markdown') } + let!(:note) { create(:note_on_merge_request, :system, noteable: merge_request, project: public_project, note: "mentioned in #{issue.to_reference(public_project)}") } + + context 'when logged-in as a member of the private project' do + before do + user = create(:user) + private_project.add_developer(user) + login_as(user) + end + + it 'shows the system note' do + visit namespace_project_merge_request_path(public_project.namespace, public_project, merge_request) + + expect(page).to have_css('.system-note') + end + end + + context 'when not logged-in' do + it 'hides the system note' do + visit namespace_project_merge_request_path(public_project.namespace, public_project, merge_request) + + expect(page).not_to have_css('.system-note') + end + end +end diff --git a/spec/features/merge_requests/user_uses_slash_commands_spec.rb b/spec/features/merge_requests/user_uses_slash_commands_spec.rb index a1f4eb2688b..f0ad57eb92f 100644 --- a/spec/features/merge_requests/user_uses_slash_commands_spec.rb +++ b/spec/features/merge_requests/user_uses_slash_commands_spec.rb @@ -2,7 +2,6 @@ require 'rails_helper' feature 'Merge Requests > User uses slash commands', feature: true, js: true do include SlashCommandsHelpers - include WaitForAjax let(:user) { create(:user) } let(:project) { create(:project, :public) } @@ -161,6 +160,7 @@ feature 'Merge Requests > User uses slash commands', feature: true, js: true do it 'changes target branch from a note' do write_note("message start \n/target_branch merge-test\n message end.") + wait_for_ajax expect(page).not_to have_content('/target_branch') expect(page).to have_content('message start') expect(page).to have_content('message end.') diff --git a/spec/features/merge_requests/merge_request_versions_spec.rb b/spec/features/merge_requests/versions_spec.rb index 04e85ed3f73..2b5b803946c 100644 --- a/spec/features/merge_requests/merge_request_versions_spec.rb +++ b/spec/features/merge_requests/versions_spec.rb @@ -24,7 +24,12 @@ feature 'Merge Request versions', js: true, feature: true do before do page.within '.mr-version-dropdown' do find('.btn-default').click - find(:link, 'version 1').trigger('click') + click_link 'version 1' + end + + # Wait for the page to load + page.within '.mr-version-dropdown' do + expect(page).to have_content 'version 1' end end @@ -36,8 +41,44 @@ feature 'Merge Request versions', js: true, feature: true do expect(page).to have_content '5 changed files' end - it 'show the message about disabled comments' do - expect(page).to have_content 'Comments are disabled' + it 'show the message about comments' do + expect(page).to have_content 'Not all comments are displayed' + end + + it 'shows comments that were last relevant at that version' do + position = Gitlab::Diff::Position.new( + old_path: ".gitmodules", + new_path: ".gitmodules", + old_line: nil, + new_line: 4, + diff_refs: merge_request_diff1.diff_refs + ) + outdated_diff_note = create(:diff_note_on_merge_request, project: project, noteable: merge_request, position: position) + outdated_diff_note.position = outdated_diff_note.original_position + outdated_diff_note.save! + + visit current_url + + expect(page).to have_css(".diffs .notes[data-discussion-id='#{outdated_diff_note.discussion_id}']") + end + + it 'allows commenting' do + diff_file_selector = ".diff-file[id='7445606fbf8f3683cd42bdc54b05d7a0bc2dfc44']" + line_code = '7445606fbf8f3683cd42bdc54b05d7a0bc2dfc44_2_2' + + page.within(diff_file_selector) do + find(".line_holder[id='#{line_code}'] td:nth-of-type(1)").trigger 'mouseover' + find(".line_holder[id='#{line_code}'] button").trigger 'click' + + page.within("form[data-line-code='#{line_code}']") do + fill_in "note[note]", with: "Typo, please fix" + find(".js-comment-button").click + end + + wait_for_ajax + + expect(page).to have_content("Typo, please fix") + end end end @@ -45,7 +86,12 @@ feature 'Merge Request versions', js: true, feature: true do before do page.within '.mr-version-compare-dropdown' do find('.btn-default').click - find(:link, 'version 1').trigger('click') + click_link 'version 1' + end + + # Wait for the page to load + page.within '.mr-version-compare-dropdown' do + expect(page).to have_content 'version 1' end end @@ -65,8 +111,43 @@ feature 'Merge Request versions', js: true, feature: true do end end - it 'show the message about disabled comments' do - expect(page).to have_content 'Comments are disabled' + it 'show the message about comments' do + expect(page).to have_content 'Not all comments are displayed' + end + + it 'shows comments that were last relevant at that version' do + position = Gitlab::Diff::Position.new( + old_path: ".gitmodules", + new_path: ".gitmodules", + old_line: 4, + new_line: 4, + diff_refs: merge_request_diff3.compare_with(merge_request_diff1.head_commit_sha).diff_refs + ) + outdated_diff_note = create(:diff_note_on_merge_request, project: project, noteable: merge_request, position: position) + + visit current_url + wait_for_ajax + + expect(page).to have_css(".diffs .notes[data-discussion-id='#{outdated_diff_note.discussion_id}']") + end + + it 'allows commenting' do + diff_file_selector = ".diff-file[id='7445606fbf8f3683cd42bdc54b05d7a0bc2dfc44']" + line_code = '7445606fbf8f3683cd42bdc54b05d7a0bc2dfc44_4_4' + + page.within(diff_file_selector) do + find(".line_holder[id='#{line_code}'] td:nth-of-type(1)").trigger 'mouseover' + find(".line_holder[id='#{line_code}'] button").trigger 'click' + + page.within("form[data-line-code='#{line_code}']") do + fill_in "note[note]", with: "Typo, please fix" + find(".js-comment-button").click + end + + wait_for_ajax + + expect(page).to have_content("Typo, please fix") + end end it 'show diff between new and old version' do @@ -92,14 +173,13 @@ feature 'Merge Request versions', js: true, feature: true do it 'should have 0 chages between versions' do page.within '.mr-version-compare-dropdown' do - expect(page).to have_content 'version 1' + expect(find('.dropdown-toggle')).to have_content 'version 1' end page.within '.mr-version-dropdown' do find('.btn-default').click - find(:link, 'version 1').trigger('click') + click_link 'version 1' end - expect(page).to have_content '0 changed files' end end @@ -114,12 +194,12 @@ feature 'Merge Request versions', js: true, feature: true do it 'should set the compared versions to be the same' do page.within '.mr-version-compare-dropdown' do - expect(page).to have_content 'version 2' + expect(find('.dropdown-toggle')).to have_content 'version 2' end page.within '.mr-version-dropdown' do find('.btn-default').click - find(:link, 'version 1').trigger('click') + click_link 'version 1' end page.within '.mr-version-compare-dropdown' do diff --git a/spec/features/merge_requests/widget_deployments_spec.rb b/spec/features/merge_requests/widget_deployments_spec.rb index 6676821b807..8370499f6ed 100644 --- a/spec/features/merge_requests/widget_deployments_spec.rb +++ b/spec/features/merge_requests/widget_deployments_spec.rb @@ -1,8 +1,6 @@ require 'spec_helper' feature 'Widget Deployments Header', feature: true, js: true do - include WaitForAjax - describe 'when deployed to an environment' do given(:user) { create(:user) } given(:project) { merge_request.target_project } @@ -23,7 +21,7 @@ feature 'Widget Deployments Header', feature: true, js: true do wait_for_ajax expect(page).to have_content("Deployed to #{environment.name}") - expect(find('.ci_widget > span > span')['data-title']).to eq(deployment.created_at.to_time.in_time_zone.to_s(:medium)) + expect(find('.js-deploy-time')['data-title']).to eq(deployment.created_at.to_time.in_time_zone.to_s(:medium)) end context 'with stop action' do @@ -40,11 +38,11 @@ feature 'Widget Deployments Header', feature: true, js: true do end scenario 'does show stop button' do - expect(page).to have_link('Stop environment') + expect(page).to have_button('Stop environment') end scenario 'does start build when stop button clicked' do - click_link('Stop environment') + click_button('Stop environment') expect(page).to have_content('close_app') end @@ -53,7 +51,7 @@ feature 'Widget Deployments Header', feature: true, js: true do given(:role) { :reporter } scenario 'does not show stop button' do - expect(page).not_to have_link('Stop environment') + expect(page).not_to have_button('Stop environment') end end end diff --git a/spec/features/merge_requests/widget_spec.rb b/spec/features/merge_requests/widget_spec.rb index c2db7d8da3c..ae799584c0f 100644 --- a/spec/features/merge_requests/widget_spec.rb +++ b/spec/features/merge_requests/widget_spec.rb @@ -1,8 +1,6 @@ require 'rails_helper' describe 'Merge request', :feature, :js do - include WaitForAjax - let(:user) { create(:user) } let(:project) { create(:project) } let(:merge_request) { create(:merge_request, source_project: project) } @@ -32,6 +30,7 @@ describe 'Merge request', :feature, :js do wait_for_ajax expect(page).to have_selector('.accept-merge-request') + expect(find('.accept-merge-request')['disabled']).not_to be(true) end end @@ -53,14 +52,15 @@ describe 'Merge request', :feature, :js do page.within('.mr-widget-heading') do expect(page).to have_content("Deployed to #{environment.name}") - expect(find('.js-environment-link')[:href]).to include(environment.formatted_external_url) + expect(find('.js-deploy-url')[:href]).to include(environment.formatted_external_url) end end it 'shows green accept merge request button' do # Wait for the `ci_status` and `merge_check` requests wait_for_ajax - expect(page).to have_selector('.accept-merge-request.btn-create') + expect(page).to have_selector('.accept-merge-request') + expect(find('.accept-merge-request')['disabled']).not_to be(true) end end @@ -91,6 +91,8 @@ describe 'Merge request', :feature, :js do statuses: [commit_status]) create(:ci_build, :pending, pipeline: pipeline) + merge_request.update(head_pipeline: pipeline) + visit namespace_project_merge_request_path(project.namespace, project, merge_request) end @@ -103,10 +105,15 @@ describe 'Merge request', :feature, :js do context 'when merge request is in the blocked pipeline state' do before do - create(:ci_pipeline, project: project, - sha: merge_request.diff_head_sha, - ref: merge_request.source_branch, - status: :manual) + pipeline = create( + :ci_pipeline, + project: project, + sha: merge_request.diff_head_sha, + ref: merge_request.source_branch, + status: :manual + ) + + merge_request.update(head_pipeline: pipeline) visit namespace_project_merge_request_path(project.namespace, project, @@ -131,13 +138,57 @@ describe 'Merge request', :feature, :js do statuses: [commit_status]) create(:ci_build, :pending, pipeline: pipeline) + merge_request.update(head_pipeline: pipeline) + visit namespace_project_merge_request_path(project.namespace, project, merge_request) end it 'has info button when MWBS button' do # Wait for the `ci_status` and `merge_check` requests wait_for_ajax - expect(page).to have_selector('.merge-when-pipeline-succeeds.btn-info') + expect(page).to have_selector('.accept-merge-request.btn-info') + end + end + + context 'view merge request with MWPS enabled but automatically merge fails' do + before do + merge_request.update( + merge_when_pipeline_succeeds: true, + merge_user: merge_request.author, + merge_error: 'Something went wrong' + ) + + visit namespace_project_merge_request_path(project.namespace, project, merge_request) + end + + it 'shows information about the merge error' do + # Wait for the `ci_status` and `merge_check` requests + wait_for_ajax + + page.within('.mr-widget-body') do + expect(page).to have_content('Something went wrong') + end + end + end + + context 'view merge request with MWPS enabled but automatically merge fails' do + before do + merge_request.update( + merge_when_pipeline_succeeds: true, + merge_user: merge_request.author, + merge_error: 'Something went wrong' + ) + + visit namespace_project_merge_request_path(project.namespace, project, merge_request) + end + + it 'shows information about the merge error' do + # Wait for the `ci_status` and `merge_check` requests + wait_for_ajax + + page.within('.mr-widget-body') do + expect(page).to have_content('Something went wrong') + end end end @@ -145,11 +196,11 @@ describe 'Merge request', :feature, :js do before do allow_any_instance_of(Repository).to receive(:merge).and_return(false) visit namespace_project_merge_request_path(project.namespace, project, merge_request) - click_button 'Accept Merge Request' - wait_for_ajax end it 'updates the MR widget' do + click_button 'Merge' + page.within('.mr-widget-body') do expect(page).to have_content('Conflicts detected during merge') end diff --git a/spec/features/milestone_spec.rb b/spec/features/milestone_spec.rb index c3297de709a..c07de01c594 100644 --- a/spec/features/milestone_spec.rb +++ b/spec/features/milestone_spec.rb @@ -1,8 +1,6 @@ require 'rails_helper' feature 'Milestone', feature: true do - include WaitForAjax - let(:project) { create(:empty_project, :public) } let(:user) { create(:user) } diff --git a/spec/features/milestones/milestones_spec.rb b/spec/features/milestones/milestones_spec.rb index 8de9942c54e..9eec3d7f270 100644 --- a/spec/features/milestones/milestones_spec.rb +++ b/spec/features/milestones/milestones_spec.rb @@ -1,7 +1,6 @@ require 'rails_helper' describe 'Milestone draggable', feature: true, js: true do - include WaitForAjax include DragTo let(:milestone) { create(:milestone, project: project, title: 8.14) } @@ -76,6 +75,7 @@ describe 'Milestone draggable', feature: true, js: true do create(:issue, params.merge(title: 'Foo', project: project, milestone: milestone)) visit namespace_project_milestone_path(project.namespace, project, milestone) + scroll_into_view('.milestone-content') drag_to(selector: '.issues-sortable-list', list_to_index: 1) wait_for_ajax @@ -86,8 +86,16 @@ describe 'Milestone draggable', feature: true, js: true do visit namespace_project_milestone_path(project.namespace, project, milestone) page.find("a[href='#tab-merge-requests']").click + + wait_for_ajax + + scroll_into_view('.milestone-content') drag_to(selector: '.merge_requests-sortable-list', list_to_index: 1) wait_for_ajax end + + def scroll_into_view(selector) + page.evaluate_script("document.querySelector('#{selector}').scrollIntoView();") + end end diff --git a/spec/features/milestones/show_spec.rb b/spec/features/milestones/show_spec.rb index 40b4dc63697..227eb04ba72 100644 --- a/spec/features/milestones/show_spec.rb +++ b/spec/features/milestones/show_spec.rb @@ -5,7 +5,7 @@ describe 'Milestone show', feature: true do let(:project) { create(:empty_project) } let(:milestone) { create(:milestone, project: project) } let(:labels) { create_list(:label, 2, project: project) } - let(:issue_params) { { project: project, assignee: user, author: user, milestone: milestone, labels: labels } } + let(:issue_params) { { project: project, assignees: [user], author: user, milestone: milestone, labels: labels } } before do project.add_user(user, :developer) diff --git a/spec/features/notes_on_merge_requests_spec.rb b/spec/features/notes_on_merge_requests_spec.rb deleted file mode 100644 index fab2d532e06..00000000000 --- a/spec/features/notes_on_merge_requests_spec.rb +++ /dev/null @@ -1,285 +0,0 @@ -require 'spec_helper' - -describe 'Comments', feature: true do - include RepoHelpers - include WaitForAjax - - describe 'On a merge request', js: true, feature: true do - let!(:project) { create(:project) } - let!(:merge_request) do - create(:merge_request, source_project: project, target_project: project) - end - - let!(:note) do - create(:note_on_merge_request, :with_attachment, noteable: merge_request, - project: project) - end - - before do - login_as :admin - visit namespace_project_merge_request_path(project.namespace, project, merge_request) - end - - subject { page } - - describe 'the note form' do - it 'is valid' do - is_expected.to have_css('.js-main-target-form', visible: true, count: 1) - expect(find('.js-main-target-form input[type=submit]').value). - to eq('Comment') - page.within('.js-main-target-form') do - expect(page).not_to have_link('Cancel') - end - end - - describe 'with text' do - before do - page.within('.js-main-target-form') do - fill_in 'note[note]', with: 'This is awesome' - end - end - - it 'has enable submit button and preview button' do - page.within('.js-main-target-form') do - expect(page).not_to have_css('.js-comment-button[disabled]') - expect(page).to have_css('.js-md-preview-button', visible: true) - end - end - end - end - - describe 'when posting a note' do - before do - page.within('.js-main-target-form') do - fill_in 'note[note]', with: 'This is awsome!' - find('.js-md-preview-button').click - click_button 'Comment' - end - end - - it 'is added and form reset' do - is_expected.to have_content('This is awsome!') - page.within('.js-main-target-form') do - expect(page).to have_no_field('note[note]', with: 'This is awesome!') - expect(page).to have_css('.js-md-preview', visible: :hidden) - end - page.within('.js-main-target-form') do - is_expected.to have_css('.js-note-text', visible: true) - end - end - end - - describe 'when editing a note', js: true do - it 'there should be a hidden edit form' do - is_expected.to have_css('.note-edit-form:not(.mr-note-edit-form)', visible: false, count: 1) - is_expected.to have_css('.note-edit-form.mr-note-edit-form', visible: false, count: 1) - end - - describe 'editing the note' do - before do - find('.note').hover - find('.js-note-edit').click - end - - it 'shows the note edit form and hide the note body' do - page.within("#note_#{note.id}") do - expect(find('.current-note-edit-form', visible: true)).to be_visible - expect(find('.note-edit-form', visible: true)).to be_visible - expect(find(:css, '.note-body > .note-text', visible: false)).not_to be_visible - end - end - - it 'resets the edit note form textarea with the original content of the note if cancelled' do - within('.current-note-edit-form') do - fill_in 'note[note]', with: 'Some new content' - find('.btn-cancel').click - expect(find('.js-note-text', visible: false).text).to eq '' - end - end - - it 'allows using markdown buttons after saving a note and then trying to edit it again' do - page.within('.current-note-edit-form') do - fill_in 'note[note]', with: 'This is the new content' - find('.btn-save').click - end - - find('.note').hover - find('.js-note-edit').click - - page.within('.current-note-edit-form') do - expect(find('#note_note').value).to eq('This is the new content') - find('.js-md:first-child').click - expect(find('#note_note').value).to eq('This is the new content****') - end - end - - it 'appends the edited at time to the note' do - page.within('.current-note-edit-form') do - fill_in 'note[note]', with: 'Some new content' - find('.btn-save').click - end - - page.within("#note_#{note.id}") do - is_expected.to have_css('.note_edited_ago') - expect(find('.note_edited_ago').text). - to match(/less than a minute ago/) - end - end - end - - describe 'deleting an attachment' do - before do - find('.note').hover - find('.js-note-edit').click - end - - it 'shows the delete link' do - page.within('.note-attachment') do - is_expected.to have_css('.js-note-attachment-delete') - end - end - - it 'removes the attachment div and resets the edit form' do - find('.js-note-attachment-delete').click - is_expected.not_to have_css('.note-attachment') - is_expected.not_to have_css('.current-note-edit-form') - wait_for_ajax - end - end - end - end - - describe 'Handles cross-project system notes', js: true, feature: true do - let(:user) { create(:user) } - let(:project) { create(:project, :public) } - let(:project2) { create(:project, :private) } - let(:issue) { create(:issue, project: project2) } - let(:merge_request) { create(:merge_request, source_project: project, source_branch: 'markdown') } - let!(:note) { create(:note_on_merge_request, :system, noteable: merge_request, project: project, note: "mentioned in #{issue.to_reference(project)}") } - - it 'shows the system note' do - login_as :admin - visit namespace_project_merge_request_path(project.namespace, project, merge_request) - - expect(page).to have_css('.system-note') - end - - it 'hides redacted system note' do - visit namespace_project_merge_request_path(project.namespace, project, merge_request) - - expect(page).not_to have_css('.system-note') - end - end - - describe 'On a merge request diff', js: true, feature: true do - let(:merge_request) { create(:merge_request) } - let(:project) { merge_request.source_project } - - before do - login_as :admin - visit diffs_namespace_project_merge_request_path(project.namespace, project, merge_request) - end - - subject { page } - - describe 'when adding a note' do - before do - click_diff_line - end - - describe 'the notes holder' do - it { is_expected.to have_css('.js-temp-notes-holder') } - - it 'has .new_note css class' do - page.within('.js-temp-notes-holder') do - expect(subject).to have_css('.new-note') - end - end - end - - describe 'the note form' do - it "does not add a second form for same row" do - click_diff_line - - is_expected. - to have_css("form[data-line-code='#{line_code}']", - count: 1) - end - - it 'is removed when canceled' do - is_expected.to have_css('.js-temp-notes-holder') - - page.within("form[data-line-code='#{line_code}']") do - find('.js-close-discussion-note-form').trigger('click') - end - - is_expected.to have_no_css('.js-temp-notes-holder') - end - end - end - - describe 'with muliple note forms' do - before do - click_diff_line - click_diff_line(line_code_2) - end - - it { is_expected.to have_css('.js-temp-notes-holder', count: 2) } - - describe 'previewing them separately' do - before do - # add two separate texts and trigger previews on both - page.within("tr[id='#{line_code}'] + .js-temp-notes-holder") do - fill_in 'note[note]', with: 'One comment on line 7' - find('.js-md-preview-button').click - end - page.within("tr[id='#{line_code_2}'] + .js-temp-notes-holder") do - fill_in 'note[note]', with: 'Another comment on line 10' - find('.js-md-preview-button').click - end - end - end - - describe 'posting a note' do - before do - page.within("tr[id='#{line_code_2}'] + .js-temp-notes-holder") do - fill_in 'note[note]', with: 'Another comment on line 10' - click_button('Comment') - end - end - - it 'adds as discussion' do - is_expected.to have_content('Another comment on line 10') - is_expected.to have_css('.notes_holder') - is_expected.to have_css('.notes_holder .note', count: 1) - is_expected.to have_button('Reply...') - end - - it 'adds code to discussion' do - click_button 'Reply...' - - page.within(first('.js-discussion-note-form')) do - fill_in 'note[note]', with: '```{{ test }}```' - - click_button('Comment') - end - - expect(page).to have_content('{{ test }}') - end - end - end - end - - def line_code - sample_compare.changes.first[:line_code] - end - - def line_code_2 - sample_compare.changes.last[:line_code] - end - - def click_diff_line(data = line_code) - find(".line_holder[id='#{data}'] td.line_content").hover - find(".line_holder[id='#{data}'] button").trigger('click') - end -end diff --git a/spec/features/participants_autocomplete_spec.rb b/spec/features/participants_autocomplete_spec.rb index decad589c23..449ce80bc71 100644 --- a/spec/features/participants_autocomplete_spec.rb +++ b/spec/features/participants_autocomplete_spec.rb @@ -1,7 +1,7 @@ require 'spec_helper' feature 'Member autocomplete', :js do - let(:project) { create(:project, :public) } + let(:project) { create(:empty_project, :public) } let(:user) { create(:user) } let(:author) { create(:user) } let(:note) { create(:note, noteable: noteable, project: noteable.project) } @@ -36,6 +36,7 @@ feature 'Member autocomplete', :js do end context 'adding a new note on a Merge Request' do + let(:project) { create(:project, :public, :repository) } let(:noteable) do create(:merge_request, source_project: project, target_project: project, author: author) @@ -48,6 +49,7 @@ feature 'Member autocomplete', :js do end context 'adding a new note on a Commit' do + let(:project) { create(:project, :public, :repository) } let(:noteable) { project.commit } let(:note) { create(:note_on_commit, project: project, commit_id: project.commit.id) } diff --git a/spec/features/profiles/account_spec.rb b/spec/features/profiles/account_spec.rb new file mode 100644 index 00000000000..05a7587f8d4 --- /dev/null +++ b/spec/features/profiles/account_spec.rb @@ -0,0 +1,59 @@ +require 'rails_helper' + +feature 'Profile > Account', feature: true do + given(:user) { create(:user, username: 'foo') } + + before do + login_as(user) + end + + describe 'Change username' do + given(:new_username) { 'bar' } + given(:new_user_path) { "/#{new_username}" } + given(:old_user_path) { "/#{user.username}" } + + scenario 'the user is accessible via the new path' do + update_username(new_username) + visit new_user_path + expect(current_path).to eq(new_user_path) + expect(find('.user-info')).to have_content(new_username) + end + + scenario 'the old user path redirects to the new path' do + update_username(new_username) + visit old_user_path + expect(current_path).to eq(new_user_path) + expect(find('.user-info')).to have_content(new_username) + end + + context 'with a project' do + given!(:project) { create(:project, namespace: user.namespace, path: 'project') } + given(:new_project_path) { "/#{new_username}/#{project.path}" } + given(:old_project_path) { "/#{user.username}/#{project.path}" } + + before(:context) { TestEnv.clean_test_path } + after(:example) { TestEnv.clean_test_path } + + scenario 'the project is accessible via the new path' do + update_username(new_username) + visit new_project_path + expect(current_path).to eq(new_project_path) + expect(find('h1.project-title')).to have_content(project.name) + end + + scenario 'the old project path redirects to the new path' do + update_username(new_username) + visit old_project_path + expect(current_path).to eq(new_project_path) + expect(find('h1.project-title')).to have_content(project.name) + end + end + end +end + +def update_username(new_username) + allow(user.namespace).to receive(:move_dir) + visit profile_account_path + fill_in 'user_username', with: new_username + click_button 'Update username' +end diff --git a/spec/features/profiles/personal_access_tokens_spec.rb b/spec/features/profiles/personal_access_tokens_spec.rb index 0917d4dc3ef..27a20e78a43 100644 --- a/spec/features/profiles/personal_access_tokens_spec.rb +++ b/spec/features/profiles/personal_access_tokens_spec.rb @@ -27,7 +27,7 @@ describe 'Profile > Personal Access Tokens', feature: true, js: true do describe "token creation" do it "allows creation of a personal access token" do - name = FFaker::Product.brand + name = 'My PAT' visit profile_personal_access_tokens_path fill_in "Name", with: name @@ -41,7 +41,7 @@ describe 'Profile > Personal Access Tokens', feature: true, js: true do check "api" check "read_user" - click_on "Create Personal Access Token" + click_on "Create personal access token" expect(active_personal_access_tokens).to have_text(name) expect(active_personal_access_tokens).to have_text('In') expect(active_personal_access_tokens).to have_text('api') @@ -52,9 +52,9 @@ describe 'Profile > Personal Access Tokens', feature: true, js: true do it "displays an error message" do disallow_personal_access_token_saves! visit profile_personal_access_tokens_path - fill_in "Name", with: FFaker::Product.brand + fill_in "Name", with: 'My PAT' - expect { click_on "Create Personal Access Token" }.not_to change { PersonalAccessToken.count } + expect { click_on "Create personal access token" }.not_to change { PersonalAccessToken.count } expect(page).to have_content("Name cannot be nil") end end diff --git a/spec/features/profiles/preferences_spec.rb b/spec/features/profiles/preferences_spec.rb index 15c8677fcd3..d368bc4d753 100644 --- a/spec/features/profiles/preferences_spec.rb +++ b/spec/features/profiles/preferences_spec.rb @@ -44,7 +44,7 @@ describe 'Profile > Preferences', feature: true do expect(page.current_path).to eq starred_dashboard_projects_path end - click_link 'Your projects' + find('.shortcuts-activity').trigger('click') expect(page).not_to have_content("You don't have starred projects yet") expect(page.current_path).to eq dashboard_projects_path diff --git a/spec/features/projects/artifacts/file_spec.rb b/spec/features/projects/artifacts/file_spec.rb new file mode 100644 index 00000000000..74308a7e8dd --- /dev/null +++ b/spec/features/projects/artifacts/file_spec.rb @@ -0,0 +1,59 @@ +require 'spec_helper' + +feature 'Artifact file', :js, feature: true do + let(:project) { create(:project, :public) } + let(:pipeline) { create(:ci_empty_pipeline, project: project, sha: project.commit.sha, ref: 'master') } + let(:build) { create(:ci_build, :artifacts, pipeline: pipeline) } + + def visit_file(path) + visit file_namespace_project_build_artifacts_path(project.namespace, project, build, path) + end + + context 'Text file' do + before do + visit_file('other_artifacts_0.1.2/doc_sample.txt') + + wait_for_ajax + end + + it 'displays an error' do + aggregate_failures do + # shows an error message + expect(page).to have_content('The source could not be displayed because it is stored as a job artifact. You can download it instead.') + + # does not show a viewer switcher + expect(page).not_to have_selector('.js-blob-viewer-switcher') + + # does not show a copy button + expect(page).not_to have_selector('.js-copy-blob-source-btn') + + # shows a download button + expect(page).to have_link('Download') + end + end + end + + context 'JPG file' do + before do + visit_file('rails_sample.jpg') + + wait_for_ajax + end + + it 'displays the blob' do + aggregate_failures do + # shows rendered image + expect(page).to have_selector('.image_file img') + + # does not show a viewer switcher + expect(page).not_to have_selector('.js-blob-viewer-switcher') + + # does not show a copy button + expect(page).not_to have_selector('.js-copy-blob-source-btn') + + # shows a download button + expect(page).to have_link('Download') + end + end + end +end diff --git a/spec/features/projects/blobs/blob_show_spec.rb b/spec/features/projects/blobs/blob_show_spec.rb new file mode 100644 index 00000000000..fc242082278 --- /dev/null +++ b/spec/features/projects/blobs/blob_show_spec.rb @@ -0,0 +1,466 @@ +require 'spec_helper' + +feature 'File blob', :js, feature: true do + let(:project) { create(:project, :public) } + + def visit_blob(path, fragment = nil) + visit namespace_project_blob_path(project.namespace, project, File.join('master', path), anchor: fragment) + + wait_for_ajax + end + + context 'Ruby file' do + before do + visit_blob('files/ruby/popen.rb') + end + + it 'displays the blob' do + aggregate_failures do + # shows highlighted Ruby code + expect(page).to have_content("require 'fileutils'") + + # does not show a viewer switcher + expect(page).not_to have_selector('.js-blob-viewer-switcher') + + # shows an enabled copy button + expect(page).to have_selector('.js-copy-blob-source-btn:not(.disabled)') + + # shows a raw button + expect(page).to have_link('Open raw') + end + end + end + + context 'Markdown file' do + context 'visiting directly' do + before do + visit_blob('files/markdown/ruby-style-guide.md') + end + + it 'displays the blob using the rich viewer' do + aggregate_failures do + # hides the simple viewer + expect(page).to have_selector('.blob-viewer[data-type="simple"]', visible: false) + expect(page).to have_selector('.blob-viewer[data-type="rich"]') + + # shows rendered Markdown + expect(page).to have_link("PEP-8") + + # shows a viewer switcher + expect(page).to have_selector('.js-blob-viewer-switcher') + + # shows a disabled copy button + expect(page).to have_selector('.js-copy-blob-source-btn.disabled') + + # shows a raw button + expect(page).to have_link('Open raw') + end + end + + context 'switching to the simple viewer' do + before do + find('.js-blob-viewer-switch-btn[data-viewer=simple]').click + + wait_for_ajax + end + + it 'displays the blob using the simple viewer' do + aggregate_failures do + # hides the rich viewer + expect(page).to have_selector('.blob-viewer[data-type="simple"]') + expect(page).to have_selector('.blob-viewer[data-type="rich"]', visible: false) + + # shows highlighted Markdown code + expect(page).to have_content("[PEP-8](http://www.python.org/dev/peps/pep-0008/)") + + # shows an enabled copy button + expect(page).to have_selector('.js-copy-blob-source-btn:not(.disabled)') + end + end + + context 'switching to the rich viewer again' do + before do + find('.js-blob-viewer-switch-btn[data-viewer=rich]').click + + wait_for_ajax + end + + it 'displays the blob using the rich viewer' do + aggregate_failures do + # hides the simple viewer + expect(page).to have_selector('.blob-viewer[data-type="simple"]', visible: false) + expect(page).to have_selector('.blob-viewer[data-type="rich"]') + + # shows an enabled copy button + expect(page).to have_selector('.js-copy-blob-source-btn:not(.disabled)') + end + end + end + end + end + + context 'visiting with a line number anchor' do + before do + visit_blob('files/markdown/ruby-style-guide.md', 'L1') + end + + it 'displays the blob using the simple viewer' do + aggregate_failures do + # hides the rich viewer + expect(page).to have_selector('.blob-viewer[data-type="simple"]') + expect(page).to have_selector('.blob-viewer[data-type="rich"]', visible: false) + + # highlights the line in question + expect(page).to have_selector('#LC1.hll') + + # shows highlighted Markdown code + expect(page).to have_content("[PEP-8](http://www.python.org/dev/peps/pep-0008/)") + + # shows an enabled copy button + expect(page).to have_selector('.js-copy-blob-source-btn:not(.disabled)') + end + end + end + end + + context 'Markdown file (stored in LFS)' do + before do + project.add_master(project.creator) + + Files::CreateService.new( + project, + project.creator, + start_branch: 'master', + branch_name: 'master', + commit_message: "Add Markdown in LFS", + file_path: 'files/lfs/file.md', + file_content: project.repository.blob_at('master', 'files/lfs/lfs_object.iso').data + ).execute + end + + context 'when LFS is enabled on the project' do + before do + allow(Gitlab.config.lfs).to receive(:enabled).and_return(true) + project.update_attribute(:lfs_enabled, true) + + visit_blob('files/lfs/file.md') + end + + it 'displays an error' do + aggregate_failures do + # hides the simple viewer + expect(page).to have_selector('.blob-viewer[data-type="simple"]', visible: false) + expect(page).to have_selector('.blob-viewer[data-type="rich"]') + + # shows an error message + expect(page).to have_content('The rendered file could not be displayed because it is stored in LFS. You can download it instead.') + + # shows a viewer switcher + expect(page).to have_selector('.js-blob-viewer-switcher') + + # does not show a copy button + expect(page).not_to have_selector('.js-copy-blob-source-btn') + + # shows a download button + expect(page).to have_link('Download') + end + end + + context 'switching to the simple viewer' do + before do + find('.js-blob-viewer-switcher .js-blob-viewer-switch-btn[data-viewer=simple]').click + + wait_for_ajax + end + + it 'displays an error' do + aggregate_failures do + # hides the rich viewer + expect(page).to have_selector('.blob-viewer[data-type="simple"]') + expect(page).to have_selector('.blob-viewer[data-type="rich"]', visible: false) + + # shows an error message + expect(page).to have_content('The source could not be displayed because it is stored in LFS. You can download it instead.') + + # does not show a copy button + expect(page).not_to have_selector('.js-copy-blob-source-btn') + end + end + end + end + + context 'when LFS is disabled on the project' do + before do + visit_blob('files/lfs/file.md') + end + + it 'displays the blob' do + aggregate_failures do + # shows text + expect(page).to have_content('size 1575078') + + # does not show a viewer switcher + expect(page).not_to have_selector('.js-blob-viewer-switcher') + + # shows an enabled copy button + expect(page).to have_selector('.js-copy-blob-source-btn:not(.disabled)') + + # shows a raw button + expect(page).to have_link('Open raw') + end + end + end + end + + context 'PDF file' do + before do + project.add_master(project.creator) + + Files::CreateService.new( + project, + project.creator, + start_branch: 'master', + branch_name: 'master', + commit_message: "Add PDF", + file_path: 'files/test.pdf', + file_content: project.repository.blob_at('add-pdf-file', 'files/pdf/test.pdf').data + ).execute + + visit_blob('files/test.pdf') + end + + it 'displays the blob' do + aggregate_failures do + # shows rendered PDF + expect(page).to have_selector('.js-pdf-viewer') + + # does not show a viewer switcher + expect(page).not_to have_selector('.js-blob-viewer-switcher') + + # does not show a copy button + expect(page).not_to have_selector('.js-copy-blob-source-btn') + + # shows a download button + expect(page).to have_link('Download') + end + end + end + + context 'ISO file (stored in LFS)' do + context 'when LFS is enabled on the project' do + before do + allow(Gitlab.config.lfs).to receive(:enabled).and_return(true) + project.update_attribute(:lfs_enabled, true) + + visit_blob('files/lfs/lfs_object.iso') + end + + it 'displays the blob' do + aggregate_failures do + # shows a download link + expect(page).to have_link('Download (1.5 MB)') + + # does not show a viewer switcher + expect(page).not_to have_selector('.js-blob-viewer-switcher') + + # does not show a copy button + expect(page).not_to have_selector('.js-copy-blob-source-btn') + + # shows a download button + expect(page).to have_link('Download') + end + end + end + + context 'when LFS is disabled on the project' do + before do + visit_blob('files/lfs/lfs_object.iso') + end + + it 'displays the blob' do + aggregate_failures do + # shows text + expect(page).to have_content('size 1575078') + + # does not show a viewer switcher + expect(page).not_to have_selector('.js-blob-viewer-switcher') + + # shows an enabled copy button + expect(page).to have_selector('.js-copy-blob-source-btn:not(.disabled)') + + # shows a raw button + expect(page).to have_link('Open raw') + end + end + end + end + + context 'ZIP file' do + before do + visit_blob('Gemfile.zip') + end + + it 'displays the blob' do + aggregate_failures do + # shows a download link + expect(page).to have_link('Download (2.11 KB)') + + # does not show a viewer switcher + expect(page).not_to have_selector('.js-blob-viewer-switcher') + + # does not show a copy button + expect(page).not_to have_selector('.js-copy-blob-source-btn') + + # shows a download button + expect(page).to have_link('Download') + end + end + end + + context 'empty file' do + before do + project.add_master(project.creator) + + Files::CreateService.new( + project, + project.creator, + start_branch: 'master', + branch_name: 'master', + commit_message: "Add empty file", + file_path: 'files/empty.md', + file_content: '' + ).execute + + visit_blob('files/empty.md') + end + + it 'displays an error' do + aggregate_failures do + # shows an error message + expect(page).to have_content('Empty file') + + # does not show a viewer switcher + expect(page).not_to have_selector('.js-blob-viewer-switcher') + + # does not show a copy button + expect(page).not_to have_selector('.js-copy-blob-source-btn') + + # does not show a download or raw button + expect(page).not_to have_link('Download') + expect(page).not_to have_link('Open raw') + end + end + end + + context '.gitlab-ci.yml' do + before do + project.add_master(project.creator) + + Files::CreateService.new( + project, + project.creator, + start_branch: 'master', + branch_name: 'master', + commit_message: "Add .gitlab-ci.yml", + file_path: '.gitlab-ci.yml', + file_content: File.read(Rails.root.join('spec/support/gitlab_stubs/gitlab_ci.yml')) + ).execute + + visit_blob('.gitlab-ci.yml') + end + + it 'displays an auxiliary viewer' do + aggregate_failures do + # shows that configuration is valid + expect(page).to have_content('This GitLab CI configuration is valid.') + + # shows a learn more link + expect(page).to have_link('Learn more') + end + end + end + + context '.gitlab/route-map.yml' do + before do + project.add_master(project.creator) + + Files::CreateService.new( + project, + project.creator, + start_branch: 'master', + branch_name: 'master', + commit_message: "Add .gitlab/route-map.yml", + file_path: '.gitlab/route-map.yml', + file_content: <<-MAP.strip_heredoc + # Team data + - source: 'data/team.yml' + public: 'team/' + MAP + ).execute + + visit_blob('.gitlab/route-map.yml') + end + + it 'displays an auxiliary viewer' do + aggregate_failures do + # shows that map is valid + expect(page).to have_content('This Route Map is valid.') + + # shows a learn more link + expect(page).to have_link('Learn more') + end + end + end + + context 'LICENSE' do + before do + visit_blob('LICENSE') + end + + it 'displays an auxiliary viewer' do + aggregate_failures do + # shows license + expect(page).to have_content('This project is licensed under the MIT License.') + + # shows a learn more link + expect(page).to have_link('Learn more', 'http://choosealicense.com/licenses/mit/') + end + end + end + + context '*.gemspec' do + before do + project.add_master(project.creator) + + Files::CreateService.new( + project, + project.creator, + start_branch: 'master', + branch_name: 'master', + commit_message: "Add activerecord.gemspec", + file_path: 'activerecord.gemspec', + file_content: <<-SPEC.strip_heredoc + Gem::Specification.new do |s| + s.platform = Gem::Platform::RUBY + s.name = "activerecord" + end + SPEC + ).execute + + visit_blob('activerecord.gemspec') + end + + it 'displays an auxiliary viewer' do + aggregate_failures do + # shows names of dependency manager and package + expect(page).to have_content('This project manages its dependencies using RubyGems and defines a gem named activerecord.') + + # shows a link to the gem + expect(page).to have_link('activerecord', 'https://rubygems.org/gems/activerecord') + + # shows a learn more link + expect(page).to have_link('Learn more', 'http://choosealicense.com/licenses/mit/') + end + end + end +end diff --git a/spec/features/projects/blobs/edit_spec.rb b/spec/features/projects/blobs/edit_spec.rb index a820d07ab3b..cc5b1a7e734 100644 --- a/spec/features/projects/blobs/edit_spec.rb +++ b/spec/features/projects/blobs/edit_spec.rb @@ -1,45 +1,135 @@ require 'spec_helper' feature 'Editing file blob', feature: true, js: true do - include WaitForAjax + include TreeHelper - given(:user) { create(:user) } - given(:role) { :developer } - given(:merge_request) { create(:merge_request, source_branch: 'feature', target_branch: 'master') } - given(:project) { merge_request.target_project } + let(:project) { create(:project, :public, :test_repo) } + let(:merge_request) { create(:merge_request, source_project: project, source_branch: 'feature', target_branch: 'master') } + let(:branch) { 'master' } + let(:file_path) { project.repository.ls_files(project.repository.root_ref)[1] } - background do - login_as(user) - project.team << [user, role] - end - - def edit_and_commit - wait_for_ajax - first('.file-actions').click_link 'Edit' - execute_script('ace.edit("editor").setValue("class NextFeature\nend\n")') - click_button 'Commit Changes' - end + context 'as a developer' do + let(:user) { create(:user) } + let(:role) { :developer } - context 'from MR diff' do before do - visit diffs_namespace_project_merge_request_path(project.namespace, project, merge_request) - edit_and_commit + project.team << [user, role] + login_as(user) + end + + def edit_and_commit + wait_for_ajax + find('.js-edit-blob').click + execute_script('ace.edit("editor").setValue("class NextFeature\nend\n")') + click_button 'Commit changes' + end + + context 'from MR diff' do + before do + visit diffs_namespace_project_merge_request_path(project.namespace, project, merge_request) + edit_and_commit + end + + it 'returns me to the mr' do + expect(page).to have_content(merge_request.title) + end end - scenario 'returns me to the mr' do - expect(page).to have_content(merge_request.title) + context 'from blob file path' do + before do + visit namespace_project_blob_path(project.namespace, project, tree_join(branch, file_path)) + edit_and_commit + end + + it 'updates content' do + expect(page).to have_content 'successfully committed' + expect(page).to have_content 'NextFeature' + end end end - context 'from blob file path' do - before do - visit namespace_project_blob_path(project.namespace, project, '/feature/files/ruby/feature.rb') - edit_and_commit + context 'visit blob edit' do + context 'redirects to sign in and returns' do + context 'as developer' do + let(:user) { create(:user) } + + before do + project.team << [user, :developer] + visit namespace_project_edit_blob_path(project.namespace, project, tree_join(branch, file_path)) + end + + it 'redirects to sign in and returns' do + expect(page).to have_current_path(new_user_session_path) + + login_as(user) + + expect(page).to have_current_path(namespace_project_edit_blob_path(project.namespace, project, tree_join(branch, file_path))) + end + end + + context 'as guest' do + let(:user) { create(:user) } + + before do + visit namespace_project_edit_blob_path(project.namespace, project, tree_join(branch, file_path)) + end + + it 'redirects to sign in and returns' do + expect(page).to have_current_path(new_user_session_path) + + login_as(user) + + expect(page).to have_current_path(namespace_project_blob_path(project.namespace, project, tree_join(branch, file_path))) + end + end + end + + context 'as developer' do + let(:user) { create(:user) } + let(:protected_branch) { 'protected-branch' } + + before do + project.team << [user, :developer] + project.repository.add_branch(user, protected_branch, 'master') + create(:protected_branch, project: project, name: protected_branch) + login_as(user) + end + + context 'on some branch' do + before do + visit namespace_project_edit_blob_path(project.namespace, project, tree_join(branch, file_path)) + end + + it 'shows blob editor with same branch' do + expect(page).to have_current_path(namespace_project_edit_blob_path(project.namespace, project, tree_join(branch, file_path))) + expect(find('.js-target-branch .dropdown-toggle-text').text).to eq(branch) + end + end + + context 'with protected branch' do + before do + visit namespace_project_edit_blob_path(project.namespace, project, tree_join(protected_branch, file_path)) + end + + it 'shows blob editor with patch branch' do + expect(find('.js-target-branch .dropdown-toggle-text').text).to eq('patch-1') + end + end end - scenario 'updates content' do - expect(page).to have_content 'successfully committed' - expect(page).to have_content 'NextFeature' + context 'as master' do + let(:user) { create(:user) } + + before do + project.team << [user, :master] + login_as(user) + visit namespace_project_edit_blob_path(project.namespace, project, tree_join(branch, file_path)) + end + + it 'shows blob editor with same branch' do + expect(page).to have_current_path(namespace_project_edit_blob_path(project.namespace, project, tree_join(branch, file_path))) + expect(find('.js-target-branch .dropdown-toggle-text').text).to eq(branch) + end end end end diff --git a/spec/features/projects/blobs/user_create_spec.rb b/spec/features/projects/blobs/user_create_spec.rb index 5686868a0c4..d805450e095 100644 --- a/spec/features/projects/blobs/user_create_spec.rb +++ b/spec/features/projects/blobs/user_create_spec.rb @@ -1,7 +1,6 @@ require 'spec_helper' feature 'New blob creation', feature: true, js: true do - include WaitForAjax include TargetBranchHelpers given(:user) { create(:user) } @@ -22,7 +21,7 @@ feature 'New blob creation', feature: true, js: true do end def commit_file - click_button 'Commit Changes' + click_button 'Commit changes' end context 'with default target branch' do @@ -77,7 +76,7 @@ feature 'New blob creation', feature: true, js: true do project, user, start_branch: 'master', - target_branch: 'master', + branch_name: 'master', commit_message: 'Create file', file_path: 'feature.rb', file_content: content @@ -87,8 +86,8 @@ feature 'New blob creation', feature: true, js: true do end scenario 'shows error message' do - expect(page).to have_content('Your changes could not be committed because a file with the same name already exists') - expect(page).to have_content('New File') + expect(page).to have_content('A file with this name already exists') + expect(page).to have_content('New file') expect(page).to have_content('NextFeature') end end diff --git a/spec/features/projects/branches/new_branch_ref_dropdown_spec.rb b/spec/features/projects/branches/new_branch_ref_dropdown_spec.rb new file mode 100644 index 00000000000..c5e0a0f0517 --- /dev/null +++ b/spec/features/projects/branches/new_branch_ref_dropdown_spec.rb @@ -0,0 +1,48 @@ +require 'spec_helper' + +describe 'New Branch Ref Dropdown', :js, :feature do + let(:user) { create(:user) } + let(:project) { create(:project, :public) } + let(:toggle) { find('.create-from .dropdown-menu-toggle') } + + before do + project.add_master(user) + + login_as(user) + visit new_namespace_project_branch_path(project.namespace, project) + end + + it 'filters a list of branches and tags' do + toggle.click + + filter_by('v1.0.0') + + expect(items_count).to be(1) + + filter_by('video') + + expect(items_count).to be(1) + + find('.create-from .dropdown-content li').click + + expect(toggle).to have_content 'video' + end + + it 'accepts a manually entered commit SHA' do + toggle.click + + filter_by('somecommitsha') + + find('.create-from input[type=search]').send_keys(:enter) + + expect(toggle).to have_content 'somecommitsha' + end + + def items_count + all('.create-from .dropdown-content li').length + end + + def filter_by(filter_text) + fill_in 'Filter by Git revision', with: filter_text + end +end diff --git a/spec/features/projects/branches_spec.rb b/spec/features/projects/branches_spec.rb index 8e0306ce83b..7668ce5f8be 100644 --- a/spec/features/projects/branches_spec.rb +++ b/spec/features/projects/branches_spec.rb @@ -4,7 +4,13 @@ describe 'Branches', feature: true do let(:project) { create(:project, :public) } let(:repository) { project.repository } - context 'logged in' do + def set_protected_branch_name(branch_name) + find(".js-protected-branch-select").click + find(".dropdown-input-field").set(branch_name) + click_on("Create wildcard #{branch_name}") + end + + context 'logged in as developer' do before do login_as :user project.team << [@user, :developer] @@ -38,6 +44,83 @@ describe 'Branches', feature: true do expect(find('.all-branches')).to have_selector('li', count: 1) end end + + describe 'Delete unprotected branch' do + it 'removes branch after confirmation', js: true do + visit namespace_project_branches_path(project.namespace, project) + + fill_in 'branch-search', with: 'fix' + + find('#branch-search').native.send_keys(:enter) + + expect(page).to have_content('fix') + expect(find('.all-branches')).to have_selector('li', count: 1) + find('.js-branch-fix .btn-remove').trigger(:click) + + expect(page).not_to have_content('fix') + expect(find('.all-branches')).to have_selector('li', count: 0) + end + end + + describe 'Delete protected branch' do + before do + project.add_user(@user, :master) + visit namespace_project_protected_branches_path(project.namespace, project) + set_protected_branch_name('fix') + click_on "Protect" + + within(".protected-branches-list") { expect(page).to have_content('fix') } + expect(ProtectedBranch.count).to eq(1) + project.add_user(@user, :developer) + end + + it 'does not allow devleoper to removes protected branch', js: true do + visit namespace_project_branches_path(project.namespace, project) + + fill_in 'branch-search', with: 'fix' + find('#branch-search').native.send_keys(:enter) + + expect(page).to have_css('.btn-remove.disabled') + end + end + end + + context 'logged in as master' do + before do + login_as :user + project.team << [@user, :master] + end + + describe 'Delete protected branch' do + before do + visit namespace_project_protected_branches_path(project.namespace, project) + set_protected_branch_name('fix') + click_on "Protect" + + within(".protected-branches-list") { expect(page).to have_content('fix') } + expect(ProtectedBranch.count).to eq(1) + end + + it 'removes branch after modal confirmation', js: true do + visit namespace_project_branches_path(project.namespace, project) + + fill_in 'branch-search', with: 'fix' + find('#branch-search').native.send_keys(:enter) + + expect(page).to have_content('fix') + expect(find('.all-branches')).to have_selector('li', count: 1) + page.find('[data-target="#modal-delete-branch"]').trigger(:click) + + expect(page).to have_css('.js-delete-branch[disabled]') + fill_in 'delete_branch_input', with: 'fix' + click_link 'Delete protected branch' + + fill_in 'branch-search', with: 'fix' + find('#branch-search').native.send_keys(:enter) + + expect(page).to have_content('No branches to show') + end + end end context 'logged out' do diff --git a/spec/features/projects/builds_spec.rb b/spec/features/projects/builds_spec.rb index 2116721b224..ab10434e10c 100644 --- a/spec/features/projects/builds_spec.rb +++ b/spec/features/projects/builds_spec.rb @@ -205,21 +205,13 @@ feature 'Builds', :feature do it 'loads job trace' do expect(page).to have_content 'BUILD TRACE' - build.append_trace(' and more trace', 11) + build.trace.write do |stream| + stream.append(' and more trace', 11) + end expect(page).to have_content 'BUILD TRACE and more trace' end end - - context 'when build does not have an initial trace' do - let(:build) { create(:ci_build, pipeline: pipeline) } - - it 'loads new trace' do - build.append_trace('build trace', 0) - - expect(page).to have_content 'build trace' - end - end end feature 'Variables' do @@ -390,7 +382,7 @@ feature 'Builds', :feature do it 'sends the right headers' do expect(page.status_code).to eq(200) expect(page.response_headers['Content-Type']).to eq('text/plain; charset=utf-8') - expect(page.response_headers['X-Sendfile']).to eq(build.path_to_trace) + expect(page.response_headers['X-Sendfile']).to eq(build.trace.send(:current_path)) end end @@ -409,43 +401,24 @@ feature 'Builds', :feature do context 'storage form' do let(:existing_file) { Tempfile.new('existing-trace-file').path } - let(:non_existing_file) do - file = Tempfile.new('non-existing-trace-file') - path = file.path - file.unlink - path - end - context 'when build has trace in file' do - before do - Capybara.current_session.driver.header('X-Sendfile-Type', 'X-Sendfile') - build.run! - visit namespace_project_build_path(project.namespace, project, build) + before do + Capybara.current_session.driver.header('X-Sendfile-Type', 'X-Sendfile') - allow_any_instance_of(Project).to receive(:ci_id).and_return(nil) - allow_any_instance_of(Ci::Build).to receive(:path_to_trace).and_return(existing_file) - allow_any_instance_of(Ci::Build).to receive(:old_path_to_trace).and_return(non_existing_file) + build.run! - page.within('.js-build-sidebar') { click_link 'Raw' } - end + allow_any_instance_of(Gitlab::Ci::Trace).to receive(:paths) + .and_return(paths) - it 'sends the right headers' do - expect(page.status_code).to eq(200) - expect(page.response_headers['Content-Type']).to eq('text/plain; charset=utf-8') - expect(page.response_headers['X-Sendfile']).to eq(existing_file) - end + visit namespace_project_build_path(project.namespace, project, build) end - context 'when build has trace in old file' do - before do - Capybara.current_session.driver.header('X-Sendfile-Type', 'X-Sendfile') - build.run! - visit namespace_project_build_path(project.namespace, project, build) - - allow_any_instance_of(Project).to receive(:ci_id).and_return(999) - allow_any_instance_of(Ci::Build).to receive(:path_to_trace).and_return(non_existing_file) - allow_any_instance_of(Ci::Build).to receive(:old_path_to_trace).and_return(existing_file) + context 'when build has trace in file' do + let(:paths) do + [existing_file] + end + before do page.within('.js-build-sidebar') { click_link 'Raw' } end @@ -457,20 +430,10 @@ feature 'Builds', :feature do end context 'when build has trace in DB' do - before do - Capybara.current_session.driver.header('X-Sendfile-Type', 'X-Sendfile') - build.run! - visit namespace_project_build_path(project.namespace, project, build) - - allow_any_instance_of(Project).to receive(:ci_id).and_return(nil) - allow_any_instance_of(Ci::Build).to receive(:path_to_trace).and_return(non_existing_file) - allow_any_instance_of(Ci::Build).to receive(:old_path_to_trace).and_return(non_existing_file) - - page.within('.js-build-sidebar') { click_link 'Raw' } - end + let(:paths) { [] } it 'sends the right headers' do - expect(page.status_code).to eq(404) + expect(page.status_code).not_to have_link('Raw') end end end diff --git a/spec/features/projects/commit/cherry_pick_spec.rb b/spec/features/projects/commit/cherry_pick_spec.rb index 0b972d2a439..fa67d390c47 100644 --- a/spec/features/projects/commit/cherry_pick_spec.rb +++ b/spec/features/projects/commit/cherry_pick_spec.rb @@ -1,5 +1,4 @@ require 'spec_helper' -include WaitForAjax describe 'Cherry-pick Commits' do let(:group) { create(:group) } @@ -75,8 +74,10 @@ describe 'Cherry-pick Commits' do wait_for_ajax - page.within('#modal-cherry-pick-commit .dropdown-menu .dropdown-content') do - click_link 'feature' + page.within('#modal-cherry-pick-commit .dropdown-menu') do + find('.dropdown-input input').set('feature') + wait_for_ajax + click_link "feature" end page.within('#modal-cherry-pick-commit') do diff --git a/spec/features/projects/commit/mini_pipeline_graph_spec.rb b/spec/features/projects/commit/mini_pipeline_graph_spec.rb index 30a2b2bcf8c..98c0f2c63b0 100644 --- a/spec/features/projects/commit/mini_pipeline_graph_spec.rb +++ b/spec/features/projects/commit/mini_pipeline_graph_spec.rb @@ -1,8 +1,6 @@ require 'rails_helper' feature 'Mini Pipeline Graph in Commit View', :js, :feature do - include WaitForAjax - let(:user) { create(:user) } let(:project) { create(:project, :public) } diff --git a/spec/features/projects/deploy_keys_spec.rb b/spec/features/projects/deploy_keys_spec.rb index 0b997f130ea..06abfbbc86b 100644 --- a/spec/features/projects/deploy_keys_spec.rb +++ b/spec/features/projects/deploy_keys_spec.rb @@ -1,6 +1,6 @@ require 'spec_helper' -describe 'Project deploy keys', feature: true do +describe 'Project deploy keys', :js, :feature do let(:user) { create(:user) } let(:project) { create(:project_empty_repo) } @@ -17,9 +17,13 @@ describe 'Project deploy keys', feature: true do it 'removes association between project and deploy key' do visit namespace_project_settings_repository_path(project.namespace, project) - page.within '.deploy-keys' do - expect { click_on 'Remove' } - .to change { project.deploy_keys.count }.by(-1) + page.within(find('.deploy-keys')) do + expect(page).to have_selector('.deploy-keys li', count: 1) + + click_on 'Remove' + + expect(page).not_to have_selector('.fa-spinner', count: 0) + expect(page).to have_selector('.deploy-keys li', count: 0) end end end diff --git a/spec/features/projects/edit_spec.rb b/spec/features/projects/edit_spec.rb index 7c319af893b..a263781c43c 100644 --- a/spec/features/projects/edit_spec.rb +++ b/spec/features/projects/edit_spec.rb @@ -1,8 +1,6 @@ require 'rails_helper' feature 'Project edit', feature: true, js: true do - include WaitForAjax - let(:user) { create(:user) } let(:project) { create(:project) } diff --git a/spec/features/projects/environments/environment_spec.rb b/spec/features/projects/environments/environment_spec.rb index acc3efe04e6..86ce50c976f 100644 --- a/spec/features/projects/environments/environment_spec.rb +++ b/spec/features/projects/environments/environment_spec.rb @@ -62,6 +62,8 @@ feature 'Environment', :feature do name: 'deploy to production') end + given(:role) { :master } + scenario 'does show a play button' do expect(page).to have_link(action.name.humanize) end @@ -132,6 +134,8 @@ feature 'Environment', :feature do on_stop: 'close_app') end + given(:role) { :master } + scenario 'does allow to stop environment' do click_link('Stop') @@ -200,7 +204,7 @@ feature 'Environment', :feature do end scenario 'user deletes the branch with running environment' do - visit namespace_project_branches_path(project.namespace, project) + visit namespace_project_branches_path(project.namespace, project, search: 'feature') remove_branch_with_hooks(project, user, 'feature') do page.within('.js-branch-feature') { find('a.btn-remove').click } diff --git a/spec/features/projects/environments/environments_spec.rb b/spec/features/projects/environments/environments_spec.rb index 641e2cf7402..cf393afccbb 100644 --- a/spec/features/projects/environments/environments_spec.rb +++ b/spec/features/projects/environments/environments_spec.rb @@ -23,6 +23,46 @@ feature 'Environments page', :feature, :js do expect(page).to have_link('Available') expect(page).to have_link('Stopped') end + + describe 'with one available environment' do + given(:environment) { create(:environment, project: project, state: :available) } + + describe 'in available tab page' do + it 'should show one environment' do + visit namespace_project_environments_path(project.namespace, project, scope: 'available') + expect(page).to have_css('.environments-container') + expect(page.all('tbody > tr').length).to eq(1) + end + end + + describe 'in stopped tab page' do + it 'should show no environments' do + visit namespace_project_environments_path(project.namespace, project, scope: 'stopped') + expect(page).to have_css('.environments-container') + expect(page).to have_content('You don\'t have any environments right now') + end + end + end + + describe 'with one stopped environment' do + given(:environment) { create(:environment, project: project, state: :stopped) } + + describe 'in available tab page' do + it 'should show no environments' do + visit namespace_project_environments_path(project.namespace, project, scope: 'available') + expect(page).to have_css('.environments-container') + expect(page).to have_content('You don\'t have any environments right now') + end + end + + describe 'in stopped tab page' do + it 'should show one environment' do + visit namespace_project_environments_path(project.namespace, project, scope: 'stopped') + expect(page).to have_css('.environments-container') + expect(page.all('tbody > tr').length).to eq(1) + end + end + end end context 'without environments' do diff --git a/spec/features/projects/features_visibility_spec.rb b/spec/features/projects/features_visibility_spec.rb index 9079350186d..4533a6fb144 100644 --- a/spec/features/projects/features_visibility_spec.rb +++ b/spec/features/projects/features_visibility_spec.rb @@ -1,9 +1,6 @@ require 'spec_helper' -include WaitForAjax describe 'Edit Project Settings', feature: true do - include WaitForAjax - let(:member) { create(:user) } let!(:project) { create(:project, :public, path: 'gitlab', name: 'sample') } let!(:issue) { create(:issue, project: project) } @@ -71,20 +68,23 @@ describe 'Edit Project Settings', feature: true do end describe 'project features visibility pages' do - before do - @tools = - { - builds: namespace_project_pipelines_path(project.namespace, project), - issues: namespace_project_issues_path(project.namespace, project), - wiki: namespace_project_wiki_path(project.namespace, project, :home), - snippets: namespace_project_snippets_path(project.namespace, project), - merge_requests: namespace_project_merge_requests_path(project.namespace, project), - } + let(:tools) do + { + builds: namespace_project_pipelines_path(project.namespace, project), + issues: namespace_project_issues_path(project.namespace, project), + wiki: namespace_project_wiki_path(project.namespace, project, :home), + snippets: namespace_project_snippets_path(project.namespace, project), + merge_requests: namespace_project_merge_requests_path(project.namespace, project) + } end context 'normal user' do + before do + login_as(member) + end + it 'renders 200 if tool is enabled' do - @tools.each do |method_name, url| + tools.each do |method_name, url| project.project_feature.update_attribute("#{method_name}_access_level", ProjectFeature::ENABLED) visit url expect(page.status_code).to eq(200) @@ -92,7 +92,7 @@ describe 'Edit Project Settings', feature: true do end it 'renders 404 if feature is disabled' do - @tools.each do |method_name, url| + tools.each do |method_name, url| project.project_feature.update_attribute("#{method_name}_access_level", ProjectFeature::DISABLED) visit url expect(page.status_code).to eq(404) @@ -102,21 +102,21 @@ describe 'Edit Project Settings', feature: true do it 'renders 404 if feature is enabled only for team members' do project.team.truncate - @tools.each do |method_name, url| + tools.each do |method_name, url| project.project_feature.update_attribute("#{method_name}_access_level", ProjectFeature::PRIVATE) visit url expect(page.status_code).to eq(404) end end - it 'renders 200 if users is member of group' do + it 'renders 200 if user is member of group' do group = create(:group) project.group = group project.save group.add_owner(member) - @tools.each do |method_name, url| + tools.each do |method_name, url| project.project_feature.update_attribute("#{method_name}_access_level", ProjectFeature::PRIVATE) visit url expect(page.status_code).to eq(200) @@ -131,7 +131,7 @@ describe 'Edit Project Settings', feature: true do end it 'renders 404 if feature is disabled' do - @tools.each do |method_name, url| + tools.each do |method_name, url| project.project_feature.update_attribute("#{method_name}_access_level", ProjectFeature::DISABLED) visit url expect(page.status_code).to eq(404) @@ -141,7 +141,7 @@ describe 'Edit Project Settings', feature: true do it 'renders 200 if feature is enabled only for team members' do project.team.truncate - @tools.each do |method_name, url| + tools.each do |method_name, url| project.project_feature.update_attribute("#{method_name}_access_level", ProjectFeature::PRIVATE) visit url expect(page.status_code).to eq(200) diff --git a/spec/features/projects/files/browse_files_spec.rb b/spec/features/projects/files/browse_files_spec.rb index d281043caa3..4166aec1956 100644 --- a/spec/features/projects/files/browse_files_spec.rb +++ b/spec/features/projects/files/browse_files_spec.rb @@ -1,6 +1,6 @@ require 'spec_helper' -feature 'user browses project', feature: true do +feature 'user browses project', feature: true, js: true do let(:project) { create(:project) } let(:user) { create(:user) } @@ -13,7 +13,7 @@ feature 'user browses project', feature: true do scenario "can see blame of '.gitignore'" do click_link ".gitignore" click_link 'Blame' - + expect(page).to have_content "*.rb" expect(page).to have_content "Dmitriy Zaporozhets" expect(page).to have_content "Initial commit" @@ -24,10 +24,23 @@ feature 'user browses project', feature: true do click_link 'files' click_link 'lfs' click_link 'lfs_object.iso' + wait_for_ajax expect(page).not_to have_content 'Download (1.5 MB)' expect(page).to have_content 'version https://git-lfs.github.com/spec/v1' expect(page).to have_content 'oid sha256:91eff75a492a3ed0dfcb544d7f31326bc4014c8551849c192fd1e48d4dd2c897' expect(page).to have_content 'size 1575078' end + + scenario 'can see last commit for current directory' do + last_commit = project.repository.last_commit_for_path(project.default_branch, 'files') + + click_link 'files' + wait_for_ajax + + page.within('.blob-commit-info') do + expect(page).to have_content last_commit.short_id + expect(page).to have_content last_commit.author_name + end + end end diff --git a/spec/features/projects/files/creating_a_file_spec.rb b/spec/features/projects/files/creating_a_file_spec.rb index ae448706130..69744ac3948 100644 --- a/spec/features/projects/files/creating_a_file_spec.rb +++ b/spec/features/projects/files/creating_a_file_spec.rb @@ -1,8 +1,6 @@ require 'spec_helper' feature 'User wants to create a file', feature: true do - include WaitForAjax - let(:project) { create(:project) } let(:user) { create(:user) } @@ -19,7 +17,7 @@ feature 'User wants to create a file', feature: true do file_content = find('#file-content') file_content.set options[:file_content] || 'Some content' - click_button 'Commit Changes' + click_button 'Commit changes' end scenario 'file name contains Chinese characters' do @@ -29,16 +27,16 @@ feature 'User wants to create a file', feature: true do scenario 'directory name contains Chinese characters' do submit_new_file(file_name: '中文/测试.md') - expect(page).to have_content 'The file has been successfully created.' + expect(page).to have_content 'The file has been successfully created' end scenario 'file name contains invalid characters' do submit_new_file(file_name: '\\') - expect(page).to have_content 'Your changes could not be committed, because the file name can contain only' + expect(page).to have_content 'Path can contain only' end scenario 'file name contains directory traversal' do submit_new_file(file_name: '../README.md') - expect(page).to have_content 'Your changes could not be committed, because the file name cannot include directory traversal.' + expect(page).to have_content 'Path cannot include directory traversal' end end diff --git a/spec/features/projects/files/dockerfile_dropdown_spec.rb b/spec/features/projects/files/dockerfile_dropdown_spec.rb index 32f33a3ca97..548131c7cd4 100644 --- a/spec/features/projects/files/dockerfile_dropdown_spec.rb +++ b/spec/features/projects/files/dockerfile_dropdown_spec.rb @@ -1,13 +1,14 @@ require 'spec_helper' +require 'fileutils' feature 'User wants to add a Dockerfile file', feature: true do - include WaitForAjax - before do user = create(:user) project = create(:project) project.team << [user, :master] + login_as user + visit namespace_project_new_blob_path(project.namespace, project, 'master', file_name: 'Dockerfile') end @@ -17,11 +18,14 @@ feature 'User wants to add a Dockerfile file', feature: true do scenario 'user can pick a Dockerfile file from the dropdown', js: true do find('.js-dockerfile-selector').click + wait_for_ajax + within '.dockerfile-selector' do find('.dropdown-input-field').set('HTTPd') find('.dropdown-content li', text: 'HTTPd').click end + wait_for_ajax expect(page).to have_css('.dockerfile-selector .dropdown-toggle-text', text: 'HTTPd') diff --git a/spec/features/projects/files/editing_a_file_spec.rb b/spec/features/projects/files/editing_a_file_spec.rb index 36a80d7575d..7a3afafec29 100644 --- a/spec/features/projects/files/editing_a_file_spec.rb +++ b/spec/features/projects/files/editing_a_file_spec.rb @@ -1,14 +1,12 @@ require 'spec_helper' feature 'User wants to edit a file', feature: true do - include WaitForAjax - let(:project) { create(:project) } let(:user) { create(:user) } let(:commit_params) do { start_branch: project.default_branch, - target_branch: project.default_branch, + branch_name: project.default_branch, commit_message: "Committing First Update", file_path: ".gitignore", file_content: "First Update", @@ -27,7 +25,7 @@ feature 'User wants to edit a file', feature: true do scenario 'file has been updated since the user opened the edit page' do Files::UpdateService.new(project, user, commit_params).execute - click_button 'Commit Changes' + click_button 'Commit changes' expect(page).to have_content 'Someone edited the file the same time you did.' end diff --git a/spec/features/projects/files/files_sort_submodules_with_folders_spec.rb b/spec/features/projects/files/files_sort_submodules_with_folders_spec.rb index 10b91d8990b..5c8105de4cb 100644 --- a/spec/features/projects/files/files_sort_submodules_with_folders_spec.rb +++ b/spec/features/projects/files/files_sort_submodules_with_folders_spec.rb @@ -1,8 +1,6 @@ require 'spec_helper' feature 'User views files page', feature: true do - include WaitForAjax - let(:user) { create(:user) } let(:project) { create(:forked_project_with_submodules) } diff --git a/spec/features/projects/files/find_file_keyboard_spec.rb b/spec/features/projects/files/find_file_keyboard_spec.rb index 582349d8d5b..e7a6749d8ac 100644 --- a/spec/features/projects/files/find_file_keyboard_spec.rb +++ b/spec/features/projects/files/find_file_keyboard_spec.rb @@ -1,8 +1,6 @@ require 'spec_helper' feature 'Find file keyboard shortcuts', feature: true, js: true do - include WaitForAjax - let(:user) { create(:user) } let(:project) { create(:project) } diff --git a/spec/features/projects/files/find_files_spec.rb b/spec/features/projects/files/find_files_spec.rb new file mode 100644 index 00000000000..716b7591b95 --- /dev/null +++ b/spec/features/projects/files/find_files_spec.rb @@ -0,0 +1,30 @@ +require 'spec_helper' + +feature 'Find files button in the tree header', feature: true do + given(:user) { create(:user) } + given(:project) { create(:project) } + + background do + login_as(user) + project.team << [user, :developer] + end + + scenario 'project main screen' do + visit namespace_project_path( + project.namespace, + project + ) + + expect(page).to have_selector('.tree-controls .shortcuts-find-file') + end + + scenario 'project tree screen' do + visit namespace_project_tree_path( + project.namespace, + project, + project.default_branch + ) + + expect(page).to have_selector('.tree-controls .shortcuts-find-file') + end +end diff --git a/spec/features/projects/files/gitignore_dropdown_spec.rb b/spec/features/projects/files/gitignore_dropdown_spec.rb index 9ebef505b92..e59428f8b24 100644 --- a/spec/features/projects/files/gitignore_dropdown_spec.rb +++ b/spec/features/projects/files/gitignore_dropdown_spec.rb @@ -1,8 +1,6 @@ require 'spec_helper' feature 'User wants to add a .gitignore file', feature: true do - include WaitForAjax - before do user = create(:user) project = create(:project) diff --git a/spec/features/projects/files/gitlab_ci_yml_dropdown_spec.rb b/spec/features/projects/files/gitlab_ci_yml_dropdown_spec.rb index fca40f68b01..85b66b93fba 100644 --- a/spec/features/projects/files/gitlab_ci_yml_dropdown_spec.rb +++ b/spec/features/projects/files/gitlab_ci_yml_dropdown_spec.rb @@ -1,8 +1,6 @@ require 'spec_helper' feature 'User wants to add a .gitlab-ci.yml file', feature: true do - include WaitForAjax - before do user = create(:user) project = create(:project) diff --git a/spec/features/projects/files/project_owner_creates_license_file_spec.rb b/spec/features/projects/files/project_owner_creates_license_file_spec.rb index ccadc936567..249830921ac 100644 --- a/spec/features/projects/files/project_owner_creates_license_file_spec.rb +++ b/spec/features/projects/files/project_owner_creates_license_file_spec.rb @@ -1,8 +1,6 @@ require 'spec_helper' feature 'project owner creates a license file', feature: true, js: true do - include WaitForAjax - let(:project_master) { create(:user) } let(:project) { create(:project) } background do @@ -29,7 +27,7 @@ feature 'project owner creates a license file', feature: true, js: true do expect(file_content).to have_content("Copyright (c) #{Time.now.year} #{project.namespace.human_name}") fill_in :commit_message, with: 'Add a LICENSE file', visible: true - click_button 'Commit Changes' + click_button 'Commit changes' expect(current_path).to eq( namespace_project_blob_path(project.namespace, project, 'master/LICENSE')) @@ -40,7 +38,7 @@ feature 'project owner creates a license file', feature: true, js: true do scenario 'project master creates a license file from the "Add license" link' do click_link 'Add License' - expect(page).to have_content('New File') + expect(page).to have_content('New file') expect(current_path).to eq( namespace_project_new_blob_path(project.namespace, project, 'master')) expect(find('#file_name').value).to eq('LICENSE') @@ -53,7 +51,7 @@ feature 'project owner creates a license file', feature: true, js: true do expect(file_content).to have_content("Copyright (c) #{Time.now.year} #{project.namespace.human_name}") fill_in :commit_message, with: 'Add a LICENSE file', visible: true - click_button 'Commit Changes' + click_button 'Commit changes' expect(current_path).to eq( namespace_project_blob_path(project.namespace, project, 'master/LICENSE')) @@ -63,7 +61,7 @@ feature 'project owner creates a license file', feature: true, js: true do def select_template(template) page.within('.js-license-selector-wrap') do - click_button 'Choose a License template' + click_button 'Apply a license template' click_link template wait_for_ajax end diff --git a/spec/features/projects/files/project_owner_sees_link_to_create_license_file_in_empty_project_spec.rb b/spec/features/projects/files/project_owner_sees_link_to_create_license_file_in_empty_project_spec.rb index 420db962318..70a41886985 100644 --- a/spec/features/projects/files/project_owner_sees_link_to_create_license_file_in_empty_project_spec.rb +++ b/spec/features/projects/files/project_owner_sees_link_to_create_license_file_in_empty_project_spec.rb @@ -1,8 +1,6 @@ require 'spec_helper' feature 'project owner sees a link to create a license file in empty project', feature: true, js: true do - include WaitForAjax - let(:project_master) { create(:user) } let(:project) { create(:empty_project) } background do @@ -14,7 +12,7 @@ feature 'project owner sees a link to create a license file in empty project', f visit namespace_project_path(project.namespace, project) click_link 'Create empty bare repository' click_on 'LICENSE' - expect(page).to have_content('New File') + expect(page).to have_content('New file') expect(current_path).to eq( namespace_project_new_blob_path(project.namespace, project, 'master')) @@ -30,7 +28,7 @@ feature 'project owner sees a link to create a license file in empty project', f fill_in :commit_message, with: 'Add a LICENSE file', visible: true # Remove pre-receive hook so we can push without auth FileUtils.rm_f(File.join(project.repository.path, 'hooks', 'pre-receive')) - click_button 'Commit Changes' + click_button 'Commit changes' expect(current_path).to eq( namespace_project_blob_path(project.namespace, project, 'master/LICENSE')) @@ -40,7 +38,7 @@ feature 'project owner sees a link to create a license file in empty project', f def select_template(template) page.within('.js-license-selector-wrap') do - click_button 'Choose a License template' + click_button 'Apply a license template' click_link template wait_for_ajax end diff --git a/spec/features/projects/files/template_type_dropdown_spec.rb b/spec/features/projects/files/template_type_dropdown_spec.rb new file mode 100644 index 00000000000..9fcf12e6cb9 --- /dev/null +++ b/spec/features/projects/files/template_type_dropdown_spec.rb @@ -0,0 +1,135 @@ +require 'spec_helper' + +feature 'Template type dropdown selector', js: true do + let(:project) { create(:project) } + let(:user) { create(:user) } + + before do + project.team << [user, :master] + login_as user + end + + context 'editing a non-matching file' do + before do + create_and_edit_file('.random-file.js') + end + + scenario 'not displayed' do + check_type_selector_display(false) + end + + scenario 'selects every template type correctly' do + fill_in 'file_path', with: '.gitignore' + try_selecting_all_types + end + + scenario 'updates toggle value when input matches' do + fill_in 'file_path', with: '.gitignore' + check_type_selector_toggle_text('.gitignore') + end + end + + context 'editing a matching file' do + before do + visit namespace_project_edit_blob_path(project.namespace, project, File.join(project.default_branch, 'LICENSE')) + end + + scenario 'displayed' do + check_type_selector_display(true) + end + + scenario 'is displayed when input matches' do + check_type_selector_display(true) + end + + scenario 'selects every template type correctly' do + try_selecting_all_types + end + + context 'user previews changes' do + before do + click_link 'Preview changes' + end + + scenario 'type selector is hidden and shown correctly' do + check_type_selector_display(false) + click_link 'Write' + check_type_selector_display(true) + end + end + end + + context 'creating a matching file' do + before do + visit namespace_project_new_blob_path(project.namespace, project, 'master', file_name: '.gitignore') + end + + scenario 'is displayed' do + check_type_selector_display(true) + end + + scenario 'toggle is set to the correct value' do + check_type_selector_toggle_text('.gitignore') + end + + scenario 'selects every template type correctly' do + try_selecting_all_types + end + end + + context 'creating a file' do + before do + visit namespace_project_new_blob_path(project.namespace, project, project.default_branch) + end + + scenario 'type selector is shown' do + check_type_selector_display(true) + end + + scenario 'toggle is set to the proper value' do + check_type_selector_toggle_text('Choose type') + end + + scenario 'selects every template type correctly' do + try_selecting_all_types + end + end +end + +def check_type_selector_display(is_visible) + count = is_visible ? 1 : 0 + expect(page).to have_css('.js-template-type-selector', count: count) +end + +def try_selecting_all_types + try_selecting_template_type('LICENSE', 'Apply a license template') + try_selecting_template_type('Dockerfile', 'Apply a Dockerfile template') + try_selecting_template_type('.gitlab-ci.yml', 'Apply a GitLab CI Yaml template') + try_selecting_template_type('.gitignore', 'Apply a .gitignore template') +end + +def try_selecting_template_type(template_type, selector_label) + select_template_type(template_type) + check_template_selector_display(selector_label) + check_type_selector_toggle_text(template_type) +end + +def select_template_type(template_type) + find('.js-template-type-selector').click + find('.dropdown-content li', text: template_type).click +end + +def check_template_selector_display(content) + expect(page).to have_content(content) +end + +def check_type_selector_toggle_text(template_type) + dropdown_toggle_button = find('.template-type-selector .dropdown-toggle-text') + expect(dropdown_toggle_button).to have_content(template_type) +end + +def create_and_edit_file(file_name) + visit namespace_project_new_blob_path(project.namespace, project, 'master', file_name: file_name) + click_button "Commit changes" + visit namespace_project_edit_blob_path(project.namespace, project, File.join(project.default_branch, file_name)) +end diff --git a/spec/features/projects/files/undo_template_spec.rb b/spec/features/projects/files/undo_template_spec.rb new file mode 100644 index 00000000000..cd3af0b7d29 --- /dev/null +++ b/spec/features/projects/files/undo_template_spec.rb @@ -0,0 +1,66 @@ +require 'spec_helper' + +feature 'Template Undo Button', js: true do + let(:project) { create(:project) } + let(:user) { create(:user) } + + before do + project.team << [user, :master] + login_as user + end + + context 'editing a matching file and applying a template' do + before do + visit namespace_project_edit_blob_path(project.namespace, project, File.join(project.default_branch, "LICENSE")) + select_file_template('.js-license-selector', 'Apache License 2.0') + end + + scenario 'reverts template application' do + try_template_undo('http://www.apache.org/licenses/', 'Apply a license template') + end + end + + context 'creating a non-matching file' do + before do + visit namespace_project_new_blob_path(project.namespace, project, 'master') + select_file_template_type('LICENSE') + select_file_template('.js-license-selector', 'Apache License 2.0') + end + + scenario 'reverts template application' do + try_template_undo('http://www.apache.org/licenses/', 'Apply a license template') + end + end +end + +def try_template_undo(template_content, toggle_text) + check_undo_button_display + check_content_reverted(template_content) + check_toggle_text_set(toggle_text) +end + +def check_toggle_text_set(neutral_toggle_text) + expect(page).to have_content(neutral_toggle_text) +end + +def check_undo_button_display + expect(page).to have_content('Template applied') + expect(page).to have_css('.template-selectors-undo-menu .btn-info') +end + +def check_content_reverted(template_content) + find('.template-selectors-undo-menu .btn-info').click + expect(page).not_to have_content(template_content) + expect(find('.template-type-selector .dropdown-toggle-text')).to have_content() +end + +def select_file_template(template_selector_selector, template_name) + find(template_selector_selector).click + find('.dropdown-content li', text: template_name).click + wait_for_ajax +end + +def select_file_template_type(template_type) + find('.js-template-type-selector').click + find('.dropdown-content li', text: template_type).click +end diff --git a/spec/features/projects/gfm_autocomplete_load_spec.rb b/spec/features/projects/gfm_autocomplete_load_spec.rb index dd9622f16a0..67bc9142356 100644 --- a/spec/features/projects/gfm_autocomplete_load_spec.rb +++ b/spec/features/projects/gfm_autocomplete_load_spec.rb @@ -10,7 +10,7 @@ describe 'GFM autocomplete loading', feature: true, js: true do end it 'does not load on project#show' do - expect(evaluate_script('gl.GfmAutoComplete.dataSources')).to eq({}) + expect(evaluate_script('gl.GfmAutoComplete')).to eq(nil) end it 'loads on new issue page' do diff --git a/spec/features/projects/import_export/import_file_spec.rb b/spec/features/projects/import_export/import_file_spec.rb index 2d1106ea3e8..583f479ec18 100644 --- a/spec/features/projects/import_export/import_file_spec.rb +++ b/spec/features/projects/import_export/import_file_spec.rb @@ -69,12 +69,8 @@ feature 'Import/Export - project import integration test', feature: true, js: tr select2(namespace.id, from: '#project_namespace_id') - # click on disabled element - find(:link, 'GitLab export').trigger('click') - - page.within('.flash-container') do - expect(page).to have_content('Please enter path and name') - end + # Check for tooltip disabled import button + expect(find('.import_gitlab_project')['title']).to eq('Please enter a valid project name.') end end diff --git a/spec/features/projects/import_export/test_project_export.tar.gz b/spec/features/projects/import_export/test_project_export.tar.gz Binary files differindex 399c1d478c5..4efd5a26a82 100644 --- a/spec/features/projects/import_export/test_project_export.tar.gz +++ b/spec/features/projects/import_export/test_project_export.tar.gz diff --git a/spec/features/projects/issuable_templates_spec.rb b/spec/features/projects/issuable_templates_spec.rb index 62d0aedda48..fa5e30075e3 100644 --- a/spec/features/projects/issuable_templates_spec.rb +++ b/spec/features/projects/issuable_templates_spec.rb @@ -1,8 +1,6 @@ require 'spec_helper' feature 'issuable templates', feature: true, js: true do - include WaitForAjax - let(:user) { create(:user) } let(:project) { create(:project, :public) } @@ -14,7 +12,7 @@ feature 'issuable templates', feature: true, js: true do context 'user creates an issue using templates' do let(:template_content) { 'this is a test "bug" template' } let(:longtemplate_content) { %Q(this\n\n\n\n\nis\n\n\n\n\na\n\n\n\n\nbug\n\n\n\n\ntemplate) } - let(:issue) { create(:issue, author: user, assignee: user, project: project) } + let(:issue) { create(:issue, author: user, assignees: [user], project: project) } let(:description_addition) { ' appending to description' } background do @@ -74,7 +72,7 @@ feature 'issuable templates', feature: true, js: true do context 'user creates an issue using templates, with a prior description' do let(:prior_description) { 'test issue description' } let(:template_content) { 'this is a test "bug" template' } - let(:issue) { create(:issue, author: user, assignee: user, project: project) } + let(:issue) { create(:issue, author: user, assignees: [user], project: project) } background do project.repository.create_file( @@ -163,12 +161,14 @@ feature 'issuable templates', feature: true, js: true do end def select_template(name) - first('.js-issuable-selector').click - first('.js-issuable-selector-wrap .dropdown-content a', text: name).click + find('.js-issuable-selector').click + + find('.js-issuable-selector-wrap .dropdown-content a', text: name, match: :first).click end def select_option(name) - first('.js-issuable-selector').click - first('.js-issuable-selector-wrap .dropdown-footer-list a', text: name).click + find('.js-issuable-selector').click + + find('.js-issuable-selector-wrap .dropdown-footer-list a', text: name, match: :first).click end end diff --git a/spec/features/projects/labels/update_prioritization_spec.rb b/spec/features/projects/labels/update_prioritization_spec.rb index 1e900d7e660..836f81fb16d 100644 --- a/spec/features/projects/labels/update_prioritization_spec.rb +++ b/spec/features/projects/labels/update_prioritization_spec.rb @@ -1,7 +1,6 @@ require 'spec_helper' feature 'Prioritize labels', feature: true do - include WaitForAjax include DragTo let(:user) { create(:user) } diff --git a/spec/features/projects/members/group_links_spec.rb b/spec/features/projects/members/group_links_spec.rb index cffb935ad5a..ab2b089db2e 100644 --- a/spec/features/projects/members/group_links_spec.rb +++ b/spec/features/projects/members/group_links_spec.rb @@ -1,8 +1,6 @@ require 'spec_helper' feature 'Projects > Members > Anonymous user sees members', feature: true, js: true do - include WaitForAjax - let(:user) { create(:user) } let(:group) { create(:group, :public) } let(:project) { create(:empty_project, :public) } diff --git a/spec/features/projects/members/list_spec.rb b/spec/features/projects/members/list_spec.rb new file mode 100644 index 00000000000..deea34214fb --- /dev/null +++ b/spec/features/projects/members/list_spec.rb @@ -0,0 +1,90 @@ +require 'spec_helper' + +feature 'Project members list', feature: true do + include Select2Helper + + let(:user1) { create(:user, name: 'John Doe') } + let(:user2) { create(:user, name: 'Mary Jane') } + let(:group) { create(:group) } + let(:project) { create(:project, namespace: group) } + + background do + login_as(user1) + group.add_owner(user1) + end + + scenario 'show members from project and group' do + project.add_developer(user2) + + visit_members_page + + expect(first_row.text).to include(user1.name) + expect(second_row.text).to include(user2.name) + end + + scenario 'show user once if member of both group and project' do + project.add_developer(user1) + + visit_members_page + + expect(first_row.text).to include(user1.name) + expect(second_row).to be_blank + end + + scenario 'update user acess level', :js do + project.add_developer(user2) + + visit_members_page + + page.within(second_row) do + click_button('Developer') + click_link('Reporter') + + expect(page).to have_button('Reporter') + end + end + + scenario 'add user to project', :js do + visit_members_page + + add_user(user2.id, 'Reporter') + + page.within(second_row) do + expect(page).to have_content(user2.name) + expect(page).to have_button('Reporter') + end + end + + scenario 'invite user to project', :js do + visit_members_page + + add_user('test@example.com', 'Reporter') + + page.within(second_row) do + expect(page).to have_content('test@example.com') + expect(page).to have_content('Invited') + expect(page).to have_button('Reporter') + end + end + + def first_row + page.all('ul.content-list > li')[0] + end + + def second_row + page.all('ul.content-list > li')[1] + end + + def add_user(id, role) + page.within ".users-project-form" do + select2(id, from: "#user_ids", multiple: true) + select(role, from: "access_level") + end + + click_button "Add to project" + end + + def visit_members_page + visit namespace_project_settings_members_path(project.namespace, project) + end +end diff --git a/spec/features/projects/members/master_adds_member_with_expiration_date_spec.rb b/spec/features/projects/members/master_adds_member_with_expiration_date_spec.rb index c3f45be6e4b..19d14ad9af4 100644 --- a/spec/features/projects/members/master_adds_member_with_expiration_date_spec.rb +++ b/spec/features/projects/members/master_adds_member_with_expiration_date_spec.rb @@ -1,7 +1,6 @@ require 'spec_helper' feature 'Projects > Members > Master adds member with expiration date', feature: true, js: true do - include WaitForAjax include Select2Helper include ActiveSupport::Testing::TimeHelpers diff --git a/spec/features/projects/members/sorting_spec.rb b/spec/features/projects/members/sorting_spec.rb index c7a32a65e49..b7ae5f0b925 100644 --- a/spec/features/projects/members/sorting_spec.rb +++ b/spec/features/projects/members/sorting_spec.rb @@ -68,7 +68,7 @@ feature 'Projects > Members > Sorting', feature: true do expect(page).to have_css('.member-sort-dropdown .dropdown-toggle-text', text: 'Name, descending') end - scenario 'sorts by recent sign in' do + scenario 'sorts by recent sign in', :redis do visit_members_list(sort: :recent_sign_in) expect(first_member).to include(master.name) @@ -76,7 +76,7 @@ feature 'Projects > Members > Sorting', feature: true do expect(page).to have_css('.member-sort-dropdown .dropdown-toggle-text', text: 'Recent sign in') end - scenario 'sorts by oldest sign in' do + scenario 'sorts by oldest sign in', :redis do visit_members_list(sort: :oldest_sign_in) expect(first_member).to include(developer.name) diff --git a/spec/features/projects/members/user_requests_access_spec.rb b/spec/features/projects/members/user_requests_access_spec.rb index de25d45f447..1bf8f710b9f 100644 --- a/spec/features/projects/members/user_requests_access_spec.rb +++ b/spec/features/projects/members/user_requests_access_spec.rb @@ -31,6 +31,17 @@ feature 'Projects > Members > User requests access', feature: true do expect(page).not_to have_content 'Leave Project' end + context 'code access is restricted' do + scenario 'user can request access' do + project.project_feature.update!(repository_access_level: ProjectFeature::PRIVATE, + builds_access_level: ProjectFeature::PRIVATE, + merge_requests_access_level: ProjectFeature::PRIVATE) + visit namespace_project_path(project.namespace, project) + + expect(page).to have_content 'Request Access' + end + end + scenario 'user is not listed in the project members page' do click_link 'Request Access' diff --git a/spec/features/projects/merge_request_button_spec.rb b/spec/features/projects/merge_request_button_spec.rb index b6728960fb8..1370ab1c521 100644 --- a/spec/features/projects/merge_request_button_spec.rb +++ b/spec/features/projects/merge_request_button_spec.rb @@ -1,13 +1,13 @@ require 'spec_helper' feature 'Merge Request button', feature: true do - shared_examples 'Merge Request button only shown when allowed' do + shared_examples 'Merge request button only shown when allowed' do let(:user) { create(:user) } let(:project) { create(:project, :public) } let(:forked_project) { create(:project, :public, forked_from_project: project) } context 'not logged in' do - it 'does not show Create Merge Request button' do + it 'does not show Create merge request button' do visit url within("#content-body") do @@ -22,7 +22,7 @@ feature 'Merge Request button', feature: true do project.team << [user, :developer] end - it 'shows Create Merge Request button' do + it 'shows Create merge request button' do href = new_namespace_project_merge_request_path(project.namespace, project, merge_request: { source_branch: 'feature', @@ -40,7 +40,7 @@ feature 'Merge Request button', feature: true do project.project_feature.update!(merge_requests_access_level: ProjectFeature::DISABLED) end - it 'does not show Create Merge Request button' do + it 'does not show Create merge request button' do visit url within("#content-body") do @@ -55,7 +55,7 @@ feature 'Merge Request button', feature: true do login_as(user) end - it 'does not show Create Merge Request button' do + it 'does not show Create merge request button' do visit url within("#content-body") do @@ -66,7 +66,7 @@ feature 'Merge Request button', feature: true do context 'on own fork of project' do let(:user) { forked_project.owner } - it 'shows Create Merge Request button' do + it 'shows Create merge request button' do href = new_namespace_project_merge_request_path(forked_project.namespace, forked_project, merge_request: { source_branch: 'feature', @@ -83,24 +83,24 @@ feature 'Merge Request button', feature: true do end context 'on branches page' do - it_behaves_like 'Merge Request button only shown when allowed' do - let(:label) { 'Merge Request' } - let(:url) { namespace_project_branches_path(project.namespace, project) } - let(:fork_url) { namespace_project_branches_path(forked_project.namespace, forked_project) } + it_behaves_like 'Merge request button only shown when allowed' do + let(:label) { 'Merge request' } + let(:url) { namespace_project_branches_path(project.namespace, project, search: 'feature') } + let(:fork_url) { namespace_project_branches_path(forked_project.namespace, forked_project, search: 'feature') } end end context 'on compare page' do - it_behaves_like 'Merge Request button only shown when allowed' do - let(:label) { 'Create Merge Request' } + it_behaves_like 'Merge request button only shown when allowed' do + let(:label) { 'Create merge request' } let(:url) { namespace_project_compare_path(project.namespace, project, from: 'master', to: 'feature') } let(:fork_url) { namespace_project_compare_path(forked_project.namespace, forked_project, from: 'master', to: 'feature') } end end context 'on commits page' do - it_behaves_like 'Merge Request button only shown when allowed' do - let(:label) { 'Create Merge Request' } + it_behaves_like 'Merge request button only shown when allowed' do + let(:label) { 'Create merge request' } let(:url) { namespace_project_commits_path(project.namespace, project, 'feature') } let(:fork_url) { namespace_project_commits_path(forked_project.namespace, forked_project, 'feature') } end diff --git a/spec/features/projects/merge_requests/list_spec.rb b/spec/features/projects/merge_requests/list_spec.rb index 5dd58ad66a7..7e8a796c55d 100644 --- a/spec/features/projects/merge_requests/list_spec.rb +++ b/spec/features/projects/merge_requests/list_spec.rb @@ -17,4 +17,28 @@ feature 'Merge Requests List' do expect(page).not_to have_selector('.js-new-board-list') end + + it 'should show an empty state' do + visit namespace_project_merge_requests_path(project.namespace, project) + + expect(page).to have_selector('.empty-state') + end + + it 'empty state should have a create merge request button' do + visit namespace_project_merge_requests_path(project.namespace, project) + + expect(page).to have_link 'New merge request', href: new_namespace_project_merge_request_path(project.namespace, project) + end + + context 'if there are merge requests' do + before do + create(:merge_request, assignee: user, source_project: project) + + visit namespace_project_merge_requests_path(project.namespace, project) + end + + it 'should not show an empty state' do + expect(page).not_to have_selector('.empty-state') + end + end end diff --git a/spec/features/projects/milestones/milestone_spec.rb b/spec/features/projects/milestones/milestone_spec.rb index dab78fd3571..b4fc0edbde8 100644 --- a/spec/features/projects/milestones/milestone_spec.rb +++ b/spec/features/projects/milestones/milestone_spec.rb @@ -63,4 +63,27 @@ feature 'Project milestone', :feature do expect(page).not_to have_content('Assign some issues to this milestone.') end end + + context 'when project has an issue' do + before do + create(:issue, project: project, milestone: milestone) + + visit namespace_project_milestone_path(project.namespace, project, milestone) + end + + describe 'the collapsed sidebar' do + before do + find('.milestone-sidebar .gutter-toggle').click + end + + it 'shows the total MR and issue counts' do + find('.milestone-sidebar .block', match: :first) + + aggregate_failures 'MR and issue blocks' do + expect(find('.milestone-sidebar .block.issues')).to have_content 1 + expect(find('.milestone-sidebar .block.merge-requests')).to have_content 0 + end + end + end + end end diff --git a/spec/features/projects/new_project_spec.rb b/spec/features/projects/new_project_spec.rb index 52196ce49bd..c66b9a34b86 100644 --- a/spec/features/projects/new_project_spec.rb +++ b/spec/features/projects/new_project_spec.rb @@ -71,6 +71,22 @@ feature "New project", feature: true do end end end + + context "with subgroup namespace" do + let(:group) { create(:group, :private, owner: user) } + let(:subgroup) { create(:group, parent: group) } + + before do + group.add_master(user) + visit new_project_path(namespace_id: subgroup.id) + end + + it "selects the group namespace" do + namespace = find("#project_namespace_id option[selected]") + + expect(namespace.text).to eq subgroup.full_path + end + end end context 'Import project options' do diff --git a/spec/features/projects/pipeline_schedules_spec.rb b/spec/features/projects/pipeline_schedules_spec.rb new file mode 100644 index 00000000000..1211b17b3d8 --- /dev/null +++ b/spec/features/projects/pipeline_schedules_spec.rb @@ -0,0 +1,146 @@ +require 'spec_helper' + +feature 'Pipeline Schedules', :feature do + include PipelineSchedulesHelper + include WaitForAjax + + let!(:project) { create(:project) } + let!(:pipeline_schedule) { create(:ci_pipeline_schedule, project: project) } + let!(:pipeline) { create(:ci_pipeline, pipeline_schedule: pipeline_schedule) } + let(:scope) { nil } + let!(:user) { create(:user) } + + before do + project.add_master(user) + + login_as(user) + visit_page + end + + describe 'GET /projects/pipeline_schedules' do + let(:visit_page) { visit_pipelines_schedules } + + it 'avoids N + 1 queries' do + control_count = ActiveRecord::QueryRecorder.new { visit_pipelines_schedules }.count + + create_list(:ci_pipeline_schedule, 2, project: project) + + expect { visit_pipelines_schedules }.not_to exceed_query_limit(control_count) + end + + describe 'The view' do + it 'displays the required information description' do + page.within('.pipeline-schedule-table-row') do + expect(page).to have_content('pipeline schedule') + expect(page).to have_link('master') + expect(page).to have_link("##{pipeline.id}") + end + end + + it 'creates a new scheduled pipeline' do + click_link 'New schedule' + + expect(page).to have_content('Schedule a new pipeline') + end + + it 'changes ownership of the pipeline' do + click_link 'Take ownership' + page.within('.pipeline-schedule-table-row') do + expect(page).not_to have_content('No owner') + expect(page).to have_link('John Doe') + end + end + + it 'edits the pipeline' do + page.within('.pipeline-schedule-table-row') do + click_link 'Edit' + end + + expect(page).to have_content('Edit Pipeline Schedule') + end + + it 'deletes the pipeline' do + click_link 'Delete' + + expect(page).not_to have_content('pipeline schedule') + end + end + end + + describe 'POST /projects/pipeline_schedules/new', js: true do + let(:visit_page) { visit_new_pipeline_schedule } + + it 'sets defaults for timezone and target branch' do + expect(page).to have_button('master') + expect(page).to have_button('UTC') + end + + it 'it creates a new scheduled pipeline' do + fill_in_schedule_form + save_pipeline_schedule + + expect(page).to have_content('my fancy description') + end + + it 'it prevents an invalid form from being submitted' do + save_pipeline_schedule + + expect(page).to have_content('This field is required') + end + end + + describe 'PATCH /projects/pipelines_schedules/:id/edit', js: true do + let(:visit_page) do + edit_pipeline_schedule + end + + it 'it displays existing properties' do + description = find_field('schedule_description').value + expect(description).to eq('pipeline schedule') + expect(page).to have_button('master') + expect(page).to have_button('UTC') + end + + it 'edits the scheduled pipeline' do + fill_in 'schedule_description', with: 'my brand new description' + + save_pipeline_schedule + + expect(page).to have_content('my brand new description') + end + end + + def visit_new_pipeline_schedule + visit new_namespace_project_pipeline_schedule_path(project.namespace, project, pipeline_schedule) + end + + def edit_pipeline_schedule + visit edit_namespace_project_pipeline_schedule_path(project.namespace, project, pipeline_schedule) + end + + def visit_pipelines_schedules + visit namespace_project_pipeline_schedules_path(project.namespace, project, scope: scope) + end + + def select_timezone + find('.js-timezone-dropdown').click + click_link 'American Samoa' + end + + def select_target_branch + find('.js-target-branch-dropdown').click + click_link 'master' + end + + def save_pipeline_schedule + click_button 'Save pipeline schedule' + end + + def fill_in_schedule_form + fill_in 'schedule_description', with: 'my fancy description' + fill_in 'schedule_cron', with: '* 1 2 3 4' + + select_timezone + select_target_branch + end +end diff --git a/spec/features/projects/pipelines/pipeline_spec.rb b/spec/features/projects/pipelines/pipeline_spec.rb index 5a53e48f5f8..cfac54ef259 100644 --- a/spec/features/projects/pipelines/pipeline_spec.rb +++ b/spec/features/projects/pipelines/pipeline_spec.rb @@ -254,4 +254,57 @@ describe 'Pipeline', :feature, :js do it { expect(build_manual.reload).to be_pending } end end + + describe 'GET /:project/pipelines/:id/failures' do + let(:project) { create(:project) } + let(:pipeline) { create(:ci_pipeline, project: project, ref: 'master', sha: project.commit.id) } + let(:pipeline_failures_page) { failures_namespace_project_pipeline_path(project.namespace, project, pipeline) } + let!(:failed_build) { create(:ci_build, :failed, pipeline: pipeline) } + + context 'with failed build' do + before do + failed_build.trace.set('4 examples, 1 failure') + + visit pipeline_failures_page + end + + it 'shows jobs tab pane as active' do + expect(page).to have_content('Failed Jobs') + expect(page).to have_css('#js-tab-failures.active') + end + + it 'lists failed builds' do + expect(page).to have_content(failed_build.name) + expect(page).to have_content(failed_build.stage) + end + + it 'shows build failure logs' do + expect(page).to have_content('4 examples, 1 failure') + end + end + + context 'when missing build logs' do + before do + visit pipeline_failures_page + end + + it 'includes failed jobs' do + expect(page).to have_content('No job trace') + end + end + + context 'without failures' do + before do + failed_build.update!(status: :success) + + visit pipeline_failures_page + end + + it 'displays the pipeline graph' do + expect(current_path).to eq(pipeline_path(pipeline)) + expect(page).not_to have_content('Failed Jobs') + expect(page).to have_selector('.pipeline-visualization') + end + end + end end diff --git a/spec/features/projects/pipelines/pipelines_spec.rb b/spec/features/projects/pipelines/pipelines_spec.rb index 2272b19bc8f..5f82cf2f5e5 100644 --- a/spec/features/projects/pipelines/pipelines_spec.rb +++ b/spec/features/projects/pipelines/pipelines_spec.rb @@ -22,7 +22,7 @@ describe 'Pipelines', :feature, :js do project: project, ref: 'master', status: 'running', - sha: project.commit.id, + sha: project.commit.id ) end @@ -370,6 +370,58 @@ describe 'Pipelines', :feature, :js do end end + describe 'GET /:project/pipelines/show' do + let(:project) { create(:project) } + + let(:pipeline) do + create(:ci_empty_pipeline, + project: project, + sha: project.commit.id, + user: user) + end + + before do + create_build('build', 0, 'build', :success) + create_build('test', 1, 'rspec 0:2', :pending) + create_build('test', 1, 'rspec 1:2', :running) + create_build('test', 1, 'spinach 0:2', :created) + create_build('test', 1, 'spinach 1:2', :created) + create_build('test', 1, 'audit', :created) + create_build('deploy', 2, 'production', :created) + + create(:generic_commit_status, pipeline: pipeline, stage: 'external', name: 'jenkins', stage_idx: 3) + + visit namespace_project_pipeline_path(project.namespace, project, pipeline) + wait_for_vue_resource + end + + it 'shows a graph with grouped stages' do + expect(page).to have_css('.js-pipeline-graph') + + # header + expect(page).to have_text("##{pipeline.id}") + expect(page).to have_selector(%Q(img[alt$="#{pipeline.user.name}'s avatar"])) + expect(page).to have_link(pipeline.user.name, href: user_path(pipeline.user)) + + # stages + expect(page).to have_text('Build') + expect(page).to have_text('Test') + expect(page).to have_text('Deploy') + expect(page).to have_text('External') + + # builds + expect(page).to have_text('rspec') + expect(page).to have_text('spinach') + expect(page).to have_text('rspec') + expect(page).to have_text('production') + expect(page).to have_text('jenkins') + end + + def create_build(stage, stage_idx, name, status) + create(:ci_build, pipeline: pipeline, stage: stage, stage_idx: stage_idx, name: name, status: status) + end + end + describe 'POST /:project/pipelines' do let(:project) { create(:project) } diff --git a/spec/features/projects/project_settings_spec.rb b/spec/features/projects/project_settings_spec.rb index 5d0314d5c09..11dcab4d737 100644 --- a/spec/features/projects/project_settings_spec.rb +++ b/spec/features/projects/project_settings_spec.rb @@ -1,64 +1,158 @@ require 'spec_helper' describe 'Edit Project Settings', feature: true do + include Select2Helper + let(:user) { create(:user) } - let(:project) { create(:empty_project, path: 'gitlab', name: 'sample') } + let(:project) { create(:empty_project, namespace: user.namespace, path: 'gitlab', name: 'sample') } before do login_as(user) - project.team << [user, :master] end - describe 'Project settings', js: true do + describe 'Project settings section', js: true do it 'shows errors for invalid project name' do visit edit_namespace_project_path(project.namespace, project) - fill_in 'project_name_edit', with: 'foo&bar' - click_button 'Save changes' - expect(page).to have_field 'project_name_edit', with: 'foo&bar' expect(page).to have_content "Name can contain only letters, digits, emojis, '_', '.', dash, space. It must start with letter, digit, emoji or '_'." expect(page).to have_button 'Save changes' end - scenario 'shows a successful notice when the project is updated' do + it 'shows a successful notice when the project is updated' do visit edit_namespace_project_path(project.namespace, project) - fill_in 'project_name_edit', with: 'hello world' - click_button 'Save changes' - expect(page).to have_content "Project 'hello world' was successfully updated." end end - describe 'Rename repository' do - it 'shows errors for invalid project path/name' do - visit edit_namespace_project_path(project.namespace, project) - - fill_in 'project_name', with: 'foo&bar' - fill_in 'Path', with: 'foo&bar' + describe 'Rename repository section' do + context 'with invalid characters' do + it 'shows errors for invalid project path/name' do + rename_project(project, name: 'foo&bar', path: 'foo&bar') + expect(page).to have_field 'Project name', with: 'foo&bar' + expect(page).to have_field 'Path', with: 'foo&bar' + expect(page).to have_content "Name can contain only letters, digits, emojis, '_', '.', dash, space. It must start with letter, digit, emoji or '_'." + expect(page).to have_content "Path can contain only letters, digits, '_', '-' and '.'. Cannot start with '-', end in '.git' or end in '.atom'" + end + end - click_button 'Rename project' + context 'when changing project name' do + it 'renames the repository' do + rename_project(project, name: 'bar') + expect(find('h1.title')).to have_content(project.name) + end + + context 'with emojis' do + it 'shows error for invalid project name' do + rename_project(project, name: '🚀 foo bar ☁️') + expect(page).to have_field 'Project name', with: '🚀 foo bar ☁️' + expect(page).not_to have_content "Name can contain only letters, digits, emojis '_', '.', dash and space. It must start with letter, digit, emoji or '_'." + end + end + end - expect(page).to have_field 'Project name', with: 'foo&bar' - expect(page).to have_field 'Path', with: 'foo&bar' - expect(page).to have_content "Name can contain only letters, digits, emojis, '_', '.', dash, space. It must start with letter, digit, emoji or '_'." - expect(page).to have_content "Path can contain only letters, digits, '_', '-' and '.'. Cannot start with '-', end in '.git' or end in '.atom'" + context 'when changing project path' do + # Not using empty project because we need a repo to exist + let(:project) { create(:project, namespace: user.namespace, name: 'gitlabhq') } + + before(:context) { TestEnv.clean_test_path } + after(:example) { TestEnv.clean_test_path } + + specify 'the project is accessible via the new path' do + rename_project(project, path: 'bar') + new_path = namespace_project_path(project.namespace, 'bar') + visit new_path + expect(current_path).to eq(new_path) + expect(find('h1.title')).to have_content(project.name) + end + + specify 'the project is accessible via a redirect from the old path' do + old_path = namespace_project_path(project.namespace, project) + rename_project(project, path: 'bar') + new_path = namespace_project_path(project.namespace, 'bar') + visit old_path + expect(current_path).to eq(new_path) + expect(find('h1.title')).to have_content(project.name) + end + + context 'and a new project is added with the same path' do + it 'overrides the redirect' do + old_path = namespace_project_path(project.namespace, project) + rename_project(project, path: 'bar') + new_project = create(:empty_project, namespace: user.namespace, path: 'gitlabhq', name: 'quz') + visit old_path + expect(current_path).to eq(old_path) + expect(find('h1.title')).to have_content(new_project.name) + end + end end end - describe 'Rename repository name with emojis' do - it 'shows error for invalid project name' do - visit edit_namespace_project_path(project.namespace, project) - - fill_in 'project_name', with: '🚀 foo bar ☁️' + describe 'Transfer project section', js: true do + # Not using empty project because we need a repo to exist + let!(:project) { create(:project, namespace: user.namespace, name: 'gitlabhq') } + let!(:group) { create(:group) } + + before(:context) { TestEnv.clean_test_path } + before(:example) { group.add_owner(user) } + after(:example) { TestEnv.clean_test_path } + + specify 'the project is accessible via the new path' do + transfer_project(project, group) + new_path = namespace_project_path(group, project) + visit new_path + expect(current_path).to eq(new_path) + expect(find('h1.title')).to have_content(project.name) + end - click_button 'Rename project' + specify 'the project is accessible via a redirect from the old path' do + old_path = namespace_project_path(project.namespace, project) + transfer_project(project, group) + new_path = namespace_project_path(group, project) + visit old_path + expect(current_path).to eq(new_path) + expect(find('h1.title')).to have_content(project.name) + end - expect(page).to have_field 'Project name', with: '🚀 foo bar ☁️' - expect(page).not_to have_content "Name can contain only letters, digits, emojis '_', '.', dash and space. It must start with letter, digit, emoji or '_'." + context 'and a new project is added with the same path' do + it 'overrides the redirect' do + old_path = namespace_project_path(project.namespace, project) + transfer_project(project, group) + new_project = create(:empty_project, namespace: user.namespace, path: 'gitlabhq', name: 'quz') + visit old_path + expect(current_path).to eq(old_path) + expect(find('h1.title')).to have_content(new_project.name) + end end end end + +def rename_project(project, name: nil, path: nil) + visit edit_namespace_project_path(project.namespace, project) + fill_in('project_name', with: name) if name + fill_in('Path', with: path) if path + click_button('Rename project') + wait_for_edit_project_page_reload + project.reload +end + +def transfer_project(project, namespace) + visit edit_namespace_project_path(project.namespace, project) + select2(namespace.id, from: '#new_namespace_id') + click_button('Transfer project') + confirm_transfer_modal + wait_for_edit_project_page_reload + project.reload +end + +def confirm_transfer_modal + fill_in('confirm_name_input', with: project.path) + click_button 'Confirm' +end + +def wait_for_edit_project_page_reload + expect(find('.project-edit-container')).to have_content('Rename repository') +end diff --git a/spec/features/projects/ref_switcher_spec.rb b/spec/features/projects/ref_switcher_spec.rb index 3b8f0b2d3f8..881ad7910dd 100644 --- a/spec/features/projects/ref_switcher_spec.rb +++ b/spec/features/projects/ref_switcher_spec.rb @@ -1,7 +1,6 @@ require 'rails_helper' feature 'Ref switcher', feature: true, js: true do - include WaitForAjax let(:user) { create(:user) } let(:project) { create(:project, :public) } diff --git a/spec/features/projects/settings/integration_settings_spec.rb b/spec/features/projects/settings/integration_settings_spec.rb new file mode 100644 index 00000000000..d3232f0cc16 --- /dev/null +++ b/spec/features/projects/settings/integration_settings_spec.rb @@ -0,0 +1,96 @@ +require 'spec_helper' + +feature 'Integration settings', feature: true do + let(:project) { create(:empty_project) } + let(:user) { create(:user) } + let(:role) { :developer } + let(:integrations_path) { namespace_project_settings_integrations_path(project.namespace, project) } + + background do + login_as(user) + project.team << [user, role] + end + + context 'for developer' do + given(:role) { :developer } + + scenario 'to be disallowed to view' do + visit integrations_path + + expect(page.status_code).to eq(404) + end + end + + context 'for master' do + given(:role) { :master } + + context 'Webhooks' do + let(:hook) { create(:project_hook, :all_events_enabled, enable_ssl_verification: true, project: project) } + let(:url) { generate(:url) } + + scenario 'show list of webhooks' do + hook + + visit integrations_path + + expect(page.status_code).to eq(200) + expect(page).to have_content(hook.url) + expect(page).to have_content('SSL Verification: enabled') + expect(page).to have_content('Push Events') + expect(page).to have_content('Tag Push Events') + expect(page).to have_content('Issues Events') + expect(page).to have_content('Confidential Issues Events') + expect(page).to have_content('Note Events') + expect(page).to have_content('Merge Requests Events') + expect(page).to have_content('Pipeline Events') + expect(page).to have_content('Wiki Page Events') + end + + scenario 'create webhook' do + visit integrations_path + + fill_in 'hook_url', with: url + check 'Tag push events' + check 'Enable SSL verification' + check 'Job events' + + click_button 'Add webhook' + + expect(page).to have_content(url) + expect(page).to have_content('SSL Verification: enabled') + expect(page).to have_content('Push Events') + expect(page).to have_content('Tag Push Events') + expect(page).to have_content('Job events') + end + + scenario 'edit existing webhook' do + hook + visit integrations_path + + click_link 'Edit' + fill_in 'hook_url', with: url + check 'Enable SSL verification' + click_button 'Save changes' + + expect(page).to have_content 'SSL Verification: enabled' + expect(page).to have_content(url) + end + + scenario 'test existing webhook' do + WebMock.stub_request(:post, hook.url) + visit integrations_path + + click_link 'Test' + + expect(current_path).to eq(integrations_path) + end + + scenario 'remove existing webhook' do + hook + visit integrations_path + + expect { click_link 'Remove' }.to change(ProjectHook, :count).by(-1) + end + end + end +end diff --git a/spec/features/projects/settings/pipelines_settings_spec.rb b/spec/features/projects/settings/pipelines_settings_spec.rb index 76cb240ea98..035c57eaa47 100644 --- a/spec/features/projects/settings/pipelines_settings_spec.rb +++ b/spec/features/projects/settings/pipelines_settings_spec.rb @@ -32,5 +32,16 @@ feature "Pipelines settings", feature: true do expect(page).to have_button('Save changes', disabled: false) expect(page).to have_field('Test coverage parsing', with: 'coverage_regex') end + + scenario 'updates auto_cancel_pending_pipelines' do + page.check('Auto-cancel redundant, pending pipelines') + click_on 'Save changes' + + expect(page.status_code).to eq(200) + expect(page).to have_button('Save changes', disabled: false) + + checkbox = find_field('project_auto_cancel_pending_pipelines') + expect(checkbox).to be_checked + end end end diff --git a/spec/features/projects/snippets/show_spec.rb b/spec/features/projects/snippets/show_spec.rb new file mode 100644 index 00000000000..cedf3778c7e --- /dev/null +++ b/spec/features/projects/snippets/show_spec.rb @@ -0,0 +1,144 @@ +require 'spec_helper' + +feature 'Project snippet', :js, feature: true do + let(:user) { create(:user) } + let(:project) { create(:project, :repository) } + let(:snippet) { create(:project_snippet, project: project, file_name: file_name, content: content) } + + before do + project.team << [user, :master] + login_as(user) + end + + context 'Ruby file' do + let(:file_name) { 'popen.rb' } + let(:content) { project.repository.blob_at('master', 'files/ruby/popen.rb').data } + + before do + visit namespace_project_snippet_path(project.namespace, project, snippet) + + wait_for_ajax + end + + it 'displays the blob' do + aggregate_failures do + # shows highlighted Ruby code + expect(page).to have_content("require 'fileutils'") + + # does not show a viewer switcher + expect(page).not_to have_selector('.js-blob-viewer-switcher') + + # shows an enabled copy button + expect(page).to have_selector('.js-copy-blob-source-btn:not(.disabled)') + + # shows a raw button + expect(page).to have_link('Open raw') + + # shows a download button + expect(page).to have_link('Download') + end + end + end + + context 'Markdown file' do + let(:file_name) { 'ruby-style-guide.md' } + let(:content) { project.repository.blob_at('master', 'files/markdown/ruby-style-guide.md').data } + + context 'visiting directly' do + before do + visit namespace_project_snippet_path(project.namespace, project, snippet) + + wait_for_ajax + end + + it 'displays the blob using the rich viewer' do + aggregate_failures do + # hides the simple viewer + expect(page).to have_selector('.blob-viewer[data-type="simple"]', visible: false) + expect(page).to have_selector('.blob-viewer[data-type="rich"]') + + # shows rendered Markdown + expect(page).to have_link("PEP-8") + + # shows a viewer switcher + expect(page).to have_selector('.js-blob-viewer-switcher') + + # shows a disabled copy button + expect(page).to have_selector('.js-copy-blob-source-btn.disabled') + + # shows a raw button + expect(page).to have_link('Open raw') + + # shows a download button + expect(page).to have_link('Download') + end + end + + context 'switching to the simple viewer' do + before do + find('.js-blob-viewer-switch-btn[data-viewer=simple]').click + + wait_for_ajax + end + + it 'displays the blob using the simple viewer' do + aggregate_failures do + # hides the rich viewer + expect(page).to have_selector('.blob-viewer[data-type="simple"]') + expect(page).to have_selector('.blob-viewer[data-type="rich"]', visible: false) + + # shows highlighted Markdown code + expect(page).to have_content("[PEP-8](http://www.python.org/dev/peps/pep-0008/)") + + # shows an enabled copy button + expect(page).to have_selector('.js-copy-blob-source-btn:not(.disabled)') + end + end + + context 'switching to the rich viewer again' do + before do + find('.js-blob-viewer-switch-btn[data-viewer=rich]').click + + wait_for_ajax + end + + it 'displays the blob using the rich viewer' do + aggregate_failures do + # hides the simple viewer + expect(page).to have_selector('.blob-viewer[data-type="simple"]', visible: false) + expect(page).to have_selector('.blob-viewer[data-type="rich"]') + + # shows an enabled copy button + expect(page).to have_selector('.js-copy-blob-source-btn:not(.disabled)') + end + end + end + end + end + + context 'visiting with a line number anchor' do + before do + visit namespace_project_snippet_path(project.namespace, project, snippet, anchor: 'L1') + + wait_for_ajax + end + + it 'displays the blob using the simple viewer' do + aggregate_failures do + # hides the rich viewer + expect(page).to have_selector('.blob-viewer[data-type="simple"]') + expect(page).to have_selector('.blob-viewer[data-type="rich"]', visible: false) + + # highlights the line in question + expect(page).to have_selector('#LC1.hll') + + # shows highlighted Markdown code + expect(page).to have_content("[PEP-8](http://www.python.org/dev/peps/pep-0008/)") + + # shows an enabled copy button + expect(page).to have_selector('.js-copy-blob-source-btn:not(.disabled)') + end + end + end + end +end diff --git a/spec/features/projects/snippets_spec.rb b/spec/features/projects/snippets_spec.rb index d37e8ed4699..18689c17fe9 100644 --- a/spec/features/projects/snippets_spec.rb +++ b/spec/features/projects/snippets_spec.rb @@ -4,11 +4,27 @@ describe 'Project snippets', feature: true do context 'when the project has snippets' do let(:project) { create(:empty_project, :public) } let!(:snippets) { create_list(:project_snippet, 2, :public, author: project.owner, project: project) } - before do - allow(Snippet).to receive(:default_per_page).and_return(1) - visit namespace_project_snippets_path(project.namespace, project) + let!(:other_snippet) { create(:project_snippet) } + + context 'pagination' do + before do + allow(Snippet).to receive(:default_per_page).and_return(1) + + visit namespace_project_snippets_path(project.namespace, project) + end + + it_behaves_like 'paginated snippets' end - it_behaves_like 'paginated snippets' + context 'list content' do + it 'contains all project snippets' do + visit namespace_project_snippets_path(project.namespace, project) + + expect(page).to have_selector('.snippet-row', count: 2) + + expect(page).to have_content(snippets[0].title) + expect(page).to have_content(snippets[1].title) + end + end end end diff --git a/spec/features/projects/user_create_dir_spec.rb b/spec/features/projects/user_create_dir_spec.rb index 2065abfb248..5dfdc465d7d 100644 --- a/spec/features/projects/user_create_dir_spec.rb +++ b/spec/features/projects/user_create_dir_spec.rb @@ -1,7 +1,6 @@ require 'spec_helper' feature 'New directory creation', feature: true, js: true do - include WaitForAjax include TargetBranchHelpers given(:user) { create(:user) } diff --git a/spec/features/projects/view_on_env_spec.rb b/spec/features/projects/view_on_env_spec.rb index ce5c5f21167..b7a41ca54e6 100644 --- a/spec/features/projects/view_on_env_spec.rb +++ b/spec/features/projects/view_on_env_spec.rb @@ -1,8 +1,6 @@ require 'spec_helper' describe 'View on environment', js: true do - include WaitForAjax - let(:branch_name) { 'feature' } let(:file_path) { 'files/ruby/feature.rb' } let(:project) { create(:project, :repository) } @@ -25,7 +23,7 @@ describe 'View on environment', js: true do project, user, start_branch: branch_name, - target_branch: branch_name, + branch_name: branch_name, commit_message: "Add .gitlab/route-map.yml", file_path: '.gitlab/route-map.yml', file_content: route_map @@ -36,7 +34,7 @@ describe 'View on environment', js: true do project, user, start_branch: branch_name, - target_branch: branch_name, + branch_name: branch_name, commit_message: "Update feature", file_path: file_path, file_content: "# Noop" diff --git a/spec/features/projects/wiki/markdown_preview_spec.rb b/spec/features/projects/wiki/markdown_preview_spec.rb index a1c386ddc18..49d7ef09e64 100644 --- a/spec/features/projects/wiki/markdown_preview_spec.rb +++ b/spec/features/projects/wiki/markdown_preview_spec.rb @@ -17,19 +17,23 @@ feature 'Projects > Wiki > User previews markdown changes', feature: true, js: t login_as(user) visit namespace_project_path(project.namespace, project) - click_link 'Wiki' + find('.shortcuts-wiki').trigger('click') WikiPages::CreateService.new(project, user, title: 'home', content: 'Home page').execute end context "while creating a new wiki page" do context "when there are no spaces or hyphens in the page name" do it "rewrites relative links as expected" do - click_link 'New Page' - fill_in :new_wiki_path, with: 'a/b/c/d' - click_button 'Create Page' + find('.add-new-wiki').trigger('click') + page.within '#modal-new-wiki' do + fill_in :new_wiki_path, with: 'a/b/c/d' + click_button 'Create page' + end - fill_in :wiki_content, with: wiki_content - click_on "Preview" + page.within '.wiki-form' do + fill_in :wiki_content, with: wiki_content + click_on "Preview" + end expect(page).to have_content("regular link") @@ -42,12 +46,16 @@ feature 'Projects > Wiki > User previews markdown changes', feature: true, js: t context "when there are spaces in the page name" do it "rewrites relative links as expected" do - click_link 'New Page' - fill_in :new_wiki_path, with: 'a page/b page/c page/d page' - click_button 'Create Page' + click_link 'New page' + page.within '#modal-new-wiki' do + fill_in :new_wiki_path, with: 'a page/b page/c page/d page' + click_button 'Create page' + end - fill_in :wiki_content, with: wiki_content - click_on "Preview" + page.within '.wiki-form' do + fill_in :wiki_content, with: wiki_content + click_on "Preview" + end expect(page).to have_content("regular link") @@ -60,12 +68,16 @@ feature 'Projects > Wiki > User previews markdown changes', feature: true, js: t context "when there are hyphens in the page name" do it "rewrites relative links as expected" do - click_link 'New Page' - fill_in :new_wiki_path, with: 'a-page/b-page/c-page/d-page' - click_button 'Create Page' + click_link 'New page' + page.within '#modal-new-wiki' do + fill_in :new_wiki_path, with: 'a-page/b-page/c-page/d-page' + click_button 'Create page' + end - fill_in :wiki_content, with: wiki_content - click_on "Preview" + page.within '.wiki-form' do + fill_in :wiki_content, with: wiki_content + click_on "Preview" + end expect(page).to have_content("regular link") @@ -79,11 +91,17 @@ feature 'Projects > Wiki > User previews markdown changes', feature: true, js: t context "while editing a wiki page" do def create_wiki_page(path) - click_link 'New Page' - fill_in :new_wiki_path, with: path - click_button 'Create Page' - fill_in :wiki_content, with: 'content' - click_on "Create page" + find('.add-new-wiki').trigger('click') + + page.within '#modal-new-wiki' do + fill_in :new_wiki_path, with: path + click_button 'Create page' + end + + page.within '.wiki-form' do + fill_in :wiki_content, with: 'content' + click_on "Create page" + end end context "when there are no spaces or hyphens in the page name" do diff --git a/spec/features/projects/wiki/shortcuts_spec.rb b/spec/features/projects/wiki/shortcuts_spec.rb new file mode 100644 index 00000000000..c1f6b0cce3b --- /dev/null +++ b/spec/features/projects/wiki/shortcuts_spec.rb @@ -0,0 +1,20 @@ +require 'spec_helper' + +feature 'Wiki shortcuts', :feature, :js do + let(:user) { create(:user) } + let(:project) { create(:empty_project, namespace: user.namespace) } + let(:wiki_page) do + WikiPages::CreateService.new(project, user, title: 'home', content: 'Home page').execute + end + + before do + login_as(user) + visit namespace_project_wiki_path(project.namespace, project, wiki_page) + end + + scenario 'Visit edit wiki page using "e" keyboard shortcut' do + find('body').native.send_key('e') + + expect(find('.wiki-page-title')).to have_content('Edit Page') + end +end diff --git a/spec/features/projects/wiki/user_creates_wiki_page_spec.rb b/spec/features/projects/wiki/user_creates_wiki_page_spec.rb index fff8b9f3447..5c502ce4fb5 100644 --- a/spec/features/projects/wiki/user_creates_wiki_page_spec.rb +++ b/spec/features/projects/wiki/user_creates_wiki_page_spec.rb @@ -1,6 +1,6 @@ require 'spec_helper' -feature 'Projects > Wiki > User creates wiki page', feature: true do +feature 'Projects > Wiki > User creates wiki page', js: true, feature: true do let(:user) { create(:user) } background do @@ -8,17 +8,22 @@ feature 'Projects > Wiki > User creates wiki page', feature: true do login_as(user) visit namespace_project_path(project.namespace, project) - click_link 'Wiki' + find('.shortcuts-wiki').trigger('click') end context 'in the user namespace' do let(:project) { create(:project, namespace: user.namespace) } context 'when wiki is empty' do + scenario 'commit message field has value "Create home"' do + expect(page).to have_field('wiki[message]', with: 'Create home') + end + scenario 'directly from the wiki home page' do fill_in :wiki_content, with: 'My awesome wiki!' - click_button 'Create page' - + page.within '.wiki-form' do + click_button 'Create page' + end expect(page).to have_content('Home') expect(page).to have_content("Last edited by #{user.name}") expect(page).to have_content('My awesome wiki!') @@ -32,13 +37,20 @@ feature 'Projects > Wiki > User creates wiki page', feature: true do context 'via the "new wiki page" page' do scenario 'when the wiki page has a single word name', js: true do - click_link 'New Page' + click_link 'New page' - fill_in :new_wiki_path, with: 'foo' - click_button 'Create Page' + page.within '#modal-new-wiki' do + fill_in :new_wiki_path, with: 'foo' + click_button 'Create page' + end - fill_in :wiki_content, with: 'My awesome wiki!' - click_button 'Create page' + # Commit message field should have correct value. + expect(page).to have_field('wiki[message]', with: 'Create foo') + + page.within '.wiki-form' do + fill_in :wiki_content, with: 'My awesome wiki!' + click_button 'Create page' + end expect(page).to have_content('Foo') expect(page).to have_content("Last edited by #{user.name}") @@ -46,13 +58,20 @@ feature 'Projects > Wiki > User creates wiki page', feature: true do end scenario 'when the wiki page has spaces in the name', js: true do - click_link 'New Page' + click_link 'New page' - fill_in :new_wiki_path, with: 'Spaces in the name' - click_button 'Create Page' + page.within '#modal-new-wiki' do + fill_in :new_wiki_path, with: 'Spaces in the name' + click_button 'Create page' + end - fill_in :wiki_content, with: 'My awesome wiki!' - click_button 'Create page' + # Commit message field should have correct value. + expect(page).to have_field('wiki[message]', with: 'Create spaces in the name') + + page.within '.wiki-form' do + fill_in :wiki_content, with: 'My awesome wiki!' + click_button 'Create page' + end expect(page).to have_content('Spaces in the name') expect(page).to have_content("Last edited by #{user.name}") @@ -60,13 +79,20 @@ feature 'Projects > Wiki > User creates wiki page', feature: true do end scenario 'when the wiki page has hyphens in the name', js: true do - click_link 'New Page' + click_link 'New page' - fill_in :new_wiki_path, with: 'hyphens-in-the-name' - click_button 'Create Page' + page.within '#modal-new-wiki' do + fill_in :new_wiki_path, with: 'hyphens-in-the-name' + click_button 'Create page' + end - fill_in :wiki_content, with: 'My awesome wiki!' - click_button 'Create page' + # Commit message field should have correct value. + expect(page).to have_field('wiki[message]', with: 'Create hyphens in the name') + + page.within '.wiki-form' do + fill_in :wiki_content, with: 'My awesome wiki!' + click_button 'Create page' + end expect(page).to have_content('Hyphens in the name') expect(page).to have_content("Last edited by #{user.name}") @@ -80,9 +106,15 @@ feature 'Projects > Wiki > User creates wiki page', feature: true do let(:project) { create(:project, namespace: create(:group, :public)) } context 'when wiki is empty' do + scenario 'commit message field has value "Create home"' do + expect(page).to have_field('wiki[message]', with: 'Create home') + end + scenario 'directly from the wiki home page' do fill_in :wiki_content, with: 'My awesome wiki!' - click_button 'Create page' + page.within '.wiki-form' do + click_button 'Create page' + end expect(page).to have_content('Home') expect(page).to have_content("Last edited by #{user.name}") @@ -96,13 +128,20 @@ feature 'Projects > Wiki > User creates wiki page', feature: true do end scenario 'via the "new wiki page" page', js: true do - click_link 'New Page' + click_link 'New page' - fill_in :new_wiki_path, with: 'foo' - click_button 'Create Page' + page.within '#modal-new-wiki' do + fill_in :new_wiki_path, with: 'foo' + click_button 'Create page' + end - fill_in :wiki_content, with: 'My awesome wiki!' - click_button 'Create page' + # Commit message field should have correct value. + expect(page).to have_field('wiki[message]', with: 'Create foo') + + page.within '.wiki-form' do + fill_in :wiki_content, with: 'My awesome wiki!' + click_button 'Create page' + end expect(page).to have_content('Foo') expect(page).to have_content("Last edited by #{user.name}") diff --git a/spec/features/projects/wiki/user_updates_wiki_page_spec.rb b/spec/features/projects/wiki/user_updates_wiki_page_spec.rb index aedc0333cb9..86cf520ea80 100644 --- a/spec/features/projects/wiki/user_updates_wiki_page_spec.rb +++ b/spec/features/projects/wiki/user_updates_wiki_page_spec.rb @@ -19,6 +19,9 @@ feature 'Projects > Wiki > User updates wiki page', feature: true do scenario 'success when the wiki content is not empty' do click_link 'Edit' + # Commit message field should have correct value. + expect(page).to have_field('wiki[message]', with: 'Update home') + fill_in :wiki_content, with: 'My awesome wiki!' click_button 'Save changes' @@ -48,6 +51,9 @@ feature 'Projects > Wiki > User updates wiki page', feature: true do scenario 'the home page' do click_link 'Edit' + # Commit message field should have correct value. + expect(page).to have_field('wiki[message]', with: 'Update home') + fill_in :wiki_content, with: 'My awesome wiki!' click_button 'Save changes' diff --git a/spec/features/projects_spec.rb b/spec/features/projects_spec.rb index ba56030e28d..060e19596ae 100644 --- a/spec/features/projects_spec.rb +++ b/spec/features/projects_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' feature 'Project', feature: true do describe 'description' do - let(:project) { create(:project) } + let(:project) { create(:project, :repository) } let(:path) { namespace_project_path(project.namespace, project) } before do @@ -36,7 +36,7 @@ feature 'Project', feature: true do describe 'remove forked relationship', js: true do let(:user) { create(:user) } - let(:project) { create(:project, namespace: user.namespace) } + let(:project) { create(:empty_project, namespace: user.namespace) } before do login_with user @@ -57,7 +57,7 @@ feature 'Project', feature: true do describe 'removal', js: true do let(:user) { create(:user, username: 'test', name: 'test') } - let(:project) { create(:project, namespace: user.namespace, name: 'project1') } + let(:project) { create(:empty_project, namespace: user.namespace, name: 'project1') } before do login_with(user) @@ -75,10 +75,8 @@ feature 'Project', feature: true do end describe 'project title' do - include WaitForAjax - let(:user) { create(:user) } - let(:project) { create(:project, namespace: user.namespace) } + let(:project) { create(:empty_project, namespace: user.namespace) } before do login_with(user) @@ -94,8 +92,8 @@ feature 'Project', feature: true do describe 'project title' do let(:user) { create(:user) } - let(:project) { create(:project, namespace: user.namespace) } - let(:project2) { create(:project, namespace: user.namespace, path: 'test') } + let(:project) { create(:empty_project, namespace: user.namespace) } + let(:project2) { create(:empty_project, namespace: user.namespace, path: 'test') } let(:issue) { create(:issue, project: project) } context 'on issues page', js: true do diff --git a/spec/features/protected_branches_spec.rb b/spec/features/protected_branches_spec.rb index 1a3f7b970f6..884d1bbb10c 100644 --- a/spec/features/protected_branches_spec.rb +++ b/spec/features/protected_branches_spec.rb @@ -1,16 +1,13 @@ require 'spec_helper' -Dir["./spec/features/protected_branches/*.rb"].sort.each { |f| require f } feature 'Projected Branches', feature: true, js: true do - include WaitForAjax - let(:user) { create(:user, :admin) } - let(:project) { create(:project) } + let(:project) { create(:project, :repository) } before { login_as(user) } def set_protected_branch_name(branch_name) - find(".js-protected-branch-select").click + find(".js-protected-branch-select").trigger('click') find(".dropdown-input-field").set(branch_name) click_on("Create wildcard #{branch_name}") end diff --git a/spec/features/protected_tags_spec.rb b/spec/features/protected_tags_spec.rb new file mode 100644 index 00000000000..66236dbc7fc --- /dev/null +++ b/spec/features/protected_tags_spec.rb @@ -0,0 +1,92 @@ +require 'spec_helper' + +feature 'Projected Tags', feature: true, js: true do + let(:user) { create(:user, :admin) } + let(:project) { create(:project, :repository) } + + before { login_as(user) } + + def set_protected_tag_name(tag_name) + find(".js-protected-tag-select").click + find(".dropdown-input-field").set(tag_name) + click_on("Create wildcard #{tag_name}") + find('.protected-tags-dropdown .dropdown-menu', visible: false) + end + + describe "explicit protected tags" do + it "allows creating explicit protected tags" do + visit namespace_project_protected_tags_path(project.namespace, project) + set_protected_tag_name('some-tag') + click_on "Protect" + + within(".protected-tags-list") { expect(page).to have_content('some-tag') } + expect(ProtectedTag.count).to eq(1) + expect(ProtectedTag.last.name).to eq('some-tag') + end + + it "displays the last commit on the matching tag if it exists" do + commit = create(:commit, project: project) + project.repository.add_tag(user, 'some-tag', commit.id) + + visit namespace_project_protected_tags_path(project.namespace, project) + set_protected_tag_name('some-tag') + click_on "Protect" + + within(".protected-tags-list") { expect(page).to have_content(commit.id[0..7]) } + end + + it "displays an error message if the named tag does not exist" do + visit namespace_project_protected_tags_path(project.namespace, project) + set_protected_tag_name('some-tag') + click_on "Protect" + + within(".protected-tags-list") { expect(page).to have_content('tag was removed') } + end + end + + describe "wildcard protected tags" do + it "allows creating protected tags with a wildcard" do + visit namespace_project_protected_tags_path(project.namespace, project) + set_protected_tag_name('*-stable') + click_on "Protect" + + within(".protected-tags-list") { expect(page).to have_content('*-stable') } + expect(ProtectedTag.count).to eq(1) + expect(ProtectedTag.last.name).to eq('*-stable') + end + + it "displays the number of matching tags" do + project.repository.add_tag(user, 'production-stable', 'master') + project.repository.add_tag(user, 'staging-stable', 'master') + + visit namespace_project_protected_tags_path(project.namespace, project) + set_protected_tag_name('*-stable') + click_on "Protect" + + within(".protected-tags-list") { expect(page).to have_content("2 matching tags") } + end + + it "displays all the tags matching the wildcard" do + project.repository.add_tag(user, 'production-stable', 'master') + project.repository.add_tag(user, 'staging-stable', 'master') + project.repository.add_tag(user, 'development', 'master') + + visit namespace_project_protected_tags_path(project.namespace, project) + set_protected_tag_name('*-stable') + click_on "Protect" + + visit namespace_project_protected_tags_path(project.namespace, project) + click_on "2 matching tags" + + within(".protected-tags-list") do + expect(page).to have_content("production-stable") + expect(page).to have_content("staging-stable") + expect(page).not_to have_content("development") + end + end + end + + describe "access control" do + include_examples "protected tags > access control > CE" + end +end diff --git a/spec/features/raven_js_spec.rb b/spec/features/raven_js_spec.rb new file mode 100644 index 00000000000..e8fa49c18cb --- /dev/null +++ b/spec/features/raven_js_spec.rb @@ -0,0 +1,23 @@ +require 'spec_helper' + +feature 'RavenJS', :feature, :js do + let(:raven_path) { '/raven.bundle.js' } + + it 'should not load raven if sentry is disabled' do + visit new_user_session_path + + expect(has_requested_raven).to eq(false) + end + + it 'should load raven if sentry is enabled' do + stub_application_setting(clientside_sentry_dsn: 'https://key@domain.com/id', clientside_sentry_enabled: true) + + visit new_user_session_path + + expect(has_requested_raven).to eq(true) + end + + def has_requested_raven + page.driver.network_traffic.one? {|request| request.url.end_with?(raven_path)} + end +end diff --git a/spec/features/search_spec.rb b/spec/features/search_spec.rb index a6560a81096..2fda7758407 100644 --- a/spec/features/search_spec.rb +++ b/spec/features/search_spec.rb @@ -2,11 +2,10 @@ require 'spec_helper' describe "Search", feature: true do include FilteredSearchHelpers - include WaitForAjax let(:user) { create(:user) } - let(:project) { create(:project, namespace: user.namespace) } - let!(:issue) { create(:issue, project: project, assignee: user) } + let(:project) { create(:empty_project, namespace: user.namespace) } + let!(:issue) { create(:issue, project: project, assignees: [user]) } let!(:issue2) { create(:issue, project: project, author: user) } before do @@ -21,13 +20,14 @@ describe "Search", feature: true do context 'search filters', js: true do let(:group) { create(:group) } + let!(:group_project) { create(:empty_project, group: group) } before do group.add_owner(user) end it 'shows group name after filtering' do - find('.js-search-group-dropdown').click + find('.js-search-group-dropdown').trigger('click') wait_for_ajax page.within '.search-holder' do @@ -37,9 +37,27 @@ describe "Search", feature: true do expect(find('.js-search-group-dropdown')).to have_content(group.name) end + it 'filters by group projects after filtering by group' do + find('.js-search-group-dropdown').trigger('click') + wait_for_ajax + + page.within '.search-holder' do + click_link group.name + end + + expect(find('.js-search-group-dropdown')).to have_content(group.name) + + page.within('.project-filter') do + find('.js-search-project-dropdown').trigger('click') + wait_for_ajax + + expect(page).to have_link(group_project.name_with_namespace) + end + end + it 'shows project name after filtering' do page.within('.project-filter') do - find('.js-search-project-dropdown').click + find('.js-search-project-dropdown').trigger('click') wait_for_ajax click_link project.name_with_namespace @@ -62,6 +80,7 @@ describe "Search", feature: true do context 'search for comments' do context 'when comment belongs to a invalid commit' do + let(:project) { create(:project, :repository) } let(:note) { create(:note_on_commit, author: user, project: project, commit_id: project.repository.commit.id, note: 'Bug here') } before { note.update_attributes(commit_id: 12345678) } @@ -103,6 +122,7 @@ describe "Search", feature: true do end it 'finds a commit' do + project = create(:project, :repository) { |p| p.add_reporter(user) } visit namespace_project_path(project.namespace, project) page.within '.search' do @@ -116,16 +136,19 @@ describe "Search", feature: true do end it 'finds a code' do + project = create(:project, :repository) { |p| p.add_reporter(user) } visit namespace_project_path(project.namespace, project) page.within '.search' do - fill_in 'search', with: 'def' + fill_in 'search', with: 'application.js' click_button 'Go' end click_link "Code" expect(page).to have_selector('.file-content .code') + + expect(page).to have_selector("span.line[lang='javascript']") end end @@ -162,6 +185,8 @@ describe "Search", feature: true do end context 'click the links in the category search dropdown', js: true do + let!(:merge_request) { create(:merge_request, source_project: project, author: user, assignee: user) } + before do page.find('#search').click end @@ -218,6 +243,8 @@ describe "Search", feature: true do end describe 'search for commits' do + let(:project) { create(:project, :repository) } + before do visit search_path(project_id: project.id) end diff --git a/spec/features/security/project/internal_access_spec.rb b/spec/features/security/project/internal_access_spec.rb index 1a66d1a6a1e..78a76d9c112 100644 --- a/spec/features/security/project/internal_access_spec.rb +++ b/spec/features/security/project/internal_access_spec.rb @@ -3,7 +3,7 @@ require 'spec_helper' describe "Internal Project Access", feature: true do include AccessMatchers - let(:project) { create(:project, :internal) } + set(:project) { create(:project, :internal) } describe "Project should be internal" do describe '#internal?' do @@ -399,6 +399,58 @@ describe "Internal Project Access", feature: true do end end + describe 'GET /:project_path/builds/:id/trace' do + let(:pipeline) { create(:ci_pipeline, project: project) } + let(:build) { create(:ci_build, pipeline: pipeline) } + subject { trace_namespace_project_build_path(project.namespace, project, build.id) } + + context 'when allowed for public and internal' do + before do + project.update(public_builds: true) + end + + it { is_expected.to be_allowed_for(:admin) } + it { is_expected.to be_allowed_for(:owner).of(project) } + it { is_expected.to be_allowed_for(:master).of(project) } + it { is_expected.to be_allowed_for(:developer).of(project) } + it { is_expected.to be_allowed_for(:reporter).of(project) } + it { is_expected.to be_allowed_for(:guest).of(project) } + it { is_expected.to be_allowed_for(:user) } + it { is_expected.to be_denied_for(:external) } + it { is_expected.to be_denied_for(:visitor) } + end + + context 'when disallowed for public and internal' do + before do + project.update(public_builds: false) + end + + it { is_expected.to be_allowed_for(:admin) } + it { is_expected.to be_allowed_for(:owner).of(project) } + it { is_expected.to be_allowed_for(:master).of(project) } + it { is_expected.to be_allowed_for(:developer).of(project) } + it { is_expected.to be_allowed_for(:reporter).of(project) } + it { is_expected.to be_denied_for(:guest).of(project) } + it { is_expected.to be_denied_for(:user) } + it { is_expected.to be_denied_for(:external) } + it { is_expected.to be_denied_for(:visitor) } + end + end + + describe "GET /:project_path/pipeline_schedules" do + subject { namespace_project_pipeline_schedules_path(project.namespace, project) } + + it { is_expected.to be_allowed_for(:admin) } + it { is_expected.to be_allowed_for(:owner).of(project) } + it { is_expected.to be_allowed_for(:master).of(project) } + it { is_expected.to be_allowed_for(:developer).of(project) } + it { is_expected.to be_allowed_for(:reporter).of(project) } + it { is_expected.to be_allowed_for(:guest).of(project) } + it { is_expected.to be_allowed_for(:user) } + it { is_expected.to be_denied_for(:external) } + it { is_expected.to be_denied_for(:visitor) } + end + describe "GET /:project_path/environments" do subject { namespace_project_environments_path(project.namespace, project) } @@ -428,6 +480,21 @@ describe "Internal Project Access", feature: true do it { is_expected.to be_denied_for(:visitor) } end + describe "GET /:project_path/environments/:id/deployments" do + let(:environment) { create(:environment, project: project) } + subject { namespace_project_environment_deployments_path(project.namespace, project, environment) } + + it { is_expected.to be_allowed_for(:admin) } + it { is_expected.to be_allowed_for(:owner).of(project) } + it { is_expected.to be_allowed_for(:master).of(project) } + it { is_expected.to be_allowed_for(:developer).of(project) } + it { is_expected.to be_allowed_for(:reporter).of(project) } + it { is_expected.to be_denied_for(:guest).of(project) } + it { is_expected.to be_denied_for(:user) } + it { is_expected.to be_denied_for(:external) } + it { is_expected.to be_denied_for(:visitor) } + end + describe "GET /:project_path/environments/new" do subject { new_namespace_project_environment_path(project.namespace, project) } @@ -443,9 +510,12 @@ describe "Internal Project Access", feature: true do end describe "GET /:project_path/container_registry" do + let(:container_repository) { create(:container_repository) } + before do - stub_container_registry_tags('latest') + stub_container_registry_tags(repository: :any, tags: ['latest']) stub_container_registry_config(enabled: true) + project.container_repositories << container_repository end subject { namespace_project_container_registry_index_path(project.namespace, project) } diff --git a/spec/features/security/project/private_access_spec.rb b/spec/features/security/project/private_access_spec.rb index ad3bd60a313..a66f6e09055 100644 --- a/spec/features/security/project/private_access_spec.rb +++ b/spec/features/security/project/private_access_spec.rb @@ -3,7 +3,7 @@ require 'spec_helper' describe "Private Project Access", feature: true do include AccessMatchers - let(:project) { create(:project, :private, public_builds: false) } + set(:project) { create(:project, :private, public_builds: false) } describe "Project should be private" do describe '#private?' do @@ -388,6 +388,38 @@ describe "Private Project Access", feature: true do end end + describe 'GET /:project_path/builds/:id/trace' do + let(:pipeline) { create(:ci_pipeline, project: project) } + let(:build) { create(:ci_build, pipeline: pipeline) } + subject { trace_namespace_project_build_path(project.namespace, project, build.id) } + + it { is_expected.to be_allowed_for(:admin) } + it { is_expected.to be_allowed_for(:owner).of(project) } + it { is_expected.to be_allowed_for(:master).of(project) } + it { is_expected.to be_allowed_for(:developer).of(project) } + it { is_expected.to be_allowed_for(:reporter).of(project) } + it { is_expected.to be_denied_for(:guest).of(project) } + it { is_expected.to be_denied_for(:user) } + it { is_expected.to be_denied_for(:external) } + it { is_expected.to be_denied_for(:visitor) } + + context 'when public builds is enabled' do + before do + project.update(public_builds: true) + end + + it { is_expected.to be_allowed_for(:guest).of(project) } + end + + context 'when public builds is disabled' do + before do + project.update(public_builds: false) + end + + it { is_expected.to be_denied_for(:guest).of(project) } + end + end + describe "GET /:project_path/environments" do subject { namespace_project_environments_path(project.namespace, project) } @@ -417,6 +449,21 @@ describe "Private Project Access", feature: true do it { is_expected.to be_denied_for(:visitor) } end + describe "GET /:project_path/environments/:id/deployments" do + let(:environment) { create(:environment, project: project) } + subject { namespace_project_environment_deployments_path(project.namespace, project, environment) } + + it { is_expected.to be_allowed_for(:admin) } + it { is_expected.to be_allowed_for(:owner).of(project) } + it { is_expected.to be_allowed_for(:master).of(project) } + it { is_expected.to be_allowed_for(:developer).of(project) } + it { is_expected.to be_allowed_for(:reporter).of(project) } + it { is_expected.to be_denied_for(:guest).of(project) } + it { is_expected.to be_denied_for(:user) } + it { is_expected.to be_denied_for(:external) } + it { is_expected.to be_denied_for(:visitor) } + end + describe "GET /:project_path/environments/new" do subject { new_namespace_project_environment_path(project.namespace, project) } @@ -431,10 +478,55 @@ describe "Private Project Access", feature: true do it { is_expected.to be_denied_for(:visitor) } end + describe "GET /:project_path/pipeline_schedules" do + subject { namespace_project_pipeline_schedules_path(project.namespace, project) } + + it { is_expected.to be_allowed_for(:admin) } + it { is_expected.to be_allowed_for(:owner).of(project) } + it { is_expected.to be_allowed_for(:master).of(project) } + it { is_expected.to be_allowed_for(:developer).of(project) } + it { is_expected.to be_allowed_for(:reporter).of(project) } + it { is_expected.to be_denied_for(:guest).of(project) } + it { is_expected.to be_denied_for(:user) } + it { is_expected.to be_denied_for(:external) } + it { is_expected.to be_denied_for(:visitor) } + end + + describe "GET /:project_path/pipeline_schedules/new" do + subject { new_namespace_project_pipeline_schedule_path(project.namespace, project) } + + it { is_expected.to be_allowed_for(:admin) } + it { is_expected.to be_allowed_for(:owner).of(project) } + it { is_expected.to be_allowed_for(:master).of(project) } + it { is_expected.to be_allowed_for(:developer).of(project) } + it { is_expected.to be_denied_for(:reporter).of(project) } + it { is_expected.to be_denied_for(:guest).of(project) } + it { is_expected.to be_denied_for(:user) } + it { is_expected.to be_denied_for(:external) } + it { is_expected.to be_denied_for(:visitor) } + end + + describe "GET /:project_path/environments/new" do + subject { new_namespace_project_pipeline_schedule_path(project.namespace, project) } + + it { is_expected.to be_allowed_for(:admin) } + it { is_expected.to be_allowed_for(:owner).of(project) } + it { is_expected.to be_allowed_for(:master).of(project) } + it { is_expected.to be_allowed_for(:developer).of(project) } + it { is_expected.to be_denied_for(:reporter).of(project) } + it { is_expected.to be_denied_for(:guest).of(project) } + it { is_expected.to be_denied_for(:user) } + it { is_expected.to be_denied_for(:external) } + it { is_expected.to be_denied_for(:visitor) } + end + describe "GET /:project_path/container_registry" do + let(:container_repository) { create(:container_repository) } + before do - stub_container_registry_tags('latest') + stub_container_registry_tags(repository: :any, tags: ['latest']) stub_container_registry_config(enabled: true) + project.container_repositories << container_repository end subject { namespace_project_container_registry_index_path(project.namespace, project) } diff --git a/spec/features/security/project/public_access_spec.rb b/spec/features/security/project/public_access_spec.rb index e06aab4e0b2..5cd575500c3 100644 --- a/spec/features/security/project/public_access_spec.rb +++ b/spec/features/security/project/public_access_spec.rb @@ -3,7 +3,7 @@ require 'spec_helper' describe "Public Project Access", feature: true do include AccessMatchers - let(:project) { create(:project, :public) } + set(:project) { create(:project, :public) } describe "Project should be public" do describe '#public?' do @@ -219,6 +219,58 @@ describe "Public Project Access", feature: true do end end + describe 'GET /:project_path/builds/:id/trace' do + let(:pipeline) { create(:ci_pipeline, project: project) } + let(:build) { create(:ci_build, pipeline: pipeline) } + subject { trace_namespace_project_build_path(project.namespace, project, build.id) } + + context 'when allowed for public' do + before do + project.update(public_builds: true) + end + + it { is_expected.to be_allowed_for(:admin) } + it { is_expected.to be_allowed_for(:owner).of(project) } + it { is_expected.to be_allowed_for(:master).of(project) } + it { is_expected.to be_allowed_for(:developer).of(project) } + it { is_expected.to be_allowed_for(:reporter).of(project) } + it { is_expected.to be_allowed_for(:guest).of(project) } + it { is_expected.to be_allowed_for(:user) } + it { is_expected.to be_allowed_for(:external) } + it { is_expected.to be_allowed_for(:visitor) } + end + + context 'when disallowed for public' do + before do + project.update(public_builds: false) + end + + it { is_expected.to be_allowed_for(:admin) } + it { is_expected.to be_allowed_for(:owner).of(project) } + it { is_expected.to be_allowed_for(:master).of(project) } + it { is_expected.to be_allowed_for(:developer).of(project) } + it { is_expected.to be_allowed_for(:reporter).of(project) } + it { is_expected.to be_denied_for(:guest).of(project) } + it { is_expected.to be_denied_for(:user) } + it { is_expected.to be_denied_for(:external) } + it { is_expected.to be_denied_for(:visitor) } + end + end + + describe "GET /:project_path/pipeline_schedules" do + subject { namespace_project_pipeline_schedules_path(project.namespace, project) } + + it { is_expected.to be_allowed_for(:admin) } + it { is_expected.to be_allowed_for(:owner).of(project) } + it { is_expected.to be_allowed_for(:master).of(project) } + it { is_expected.to be_allowed_for(:developer).of(project) } + it { is_expected.to be_allowed_for(:reporter).of(project) } + it { is_expected.to be_allowed_for(:guest).of(project) } + it { is_expected.to be_allowed_for(:user) } + it { is_expected.to be_allowed_for(:external) } + it { is_expected.to be_allowed_for(:visitor) } + end + describe "GET /:project_path/environments" do subject { namespace_project_environments_path(project.namespace, project) } @@ -248,6 +300,21 @@ describe "Public Project Access", feature: true do it { is_expected.to be_denied_for(:visitor) } end + describe "GET /:project_path/environments/:id/deployments" do + let(:environment) { create(:environment, project: project) } + subject { namespace_project_environment_deployments_path(project.namespace, project, environment) } + + it { is_expected.to be_allowed_for(:admin) } + it { is_expected.to be_allowed_for(:owner).of(project) } + it { is_expected.to be_allowed_for(:master).of(project) } + it { is_expected.to be_allowed_for(:developer).of(project) } + it { is_expected.to be_allowed_for(:reporter).of(project) } + it { is_expected.to be_denied_for(:guest).of(project) } + it { is_expected.to be_denied_for(:user) } + it { is_expected.to be_denied_for(:external) } + it { is_expected.to be_denied_for(:visitor) } + end + describe "GET /:project_path/environments/new" do subject { new_namespace_project_environment_path(project.namespace, project) } @@ -443,9 +510,12 @@ describe "Public Project Access", feature: true do end describe "GET /:project_path/container_registry" do + let(:container_repository) { create(:container_repository) } + before do - stub_container_registry_tags('latest') + stub_container_registry_tags(repository: :any, tags: ['latest']) stub_container_registry_config(enabled: true) + project.container_repositories << container_repository end subject { namespace_project_container_registry_index_path(project.namespace, project) } diff --git a/spec/features/signup_spec.rb b/spec/features/signup_spec.rb index 9fde8d6e5cf..d7b6dda4946 100644 --- a/spec/features/signup_spec.rb +++ b/spec/features/signup_spec.rb @@ -3,7 +3,7 @@ require 'spec_helper' feature 'Signup', feature: true do describe 'signup with no errors' do context "when sending confirmation email" do - before { allow_any_instance_of(ApplicationSetting).to receive(:send_user_confirmation_email).and_return(true) } + before { stub_application_setting(send_user_confirmation_email: true) } it 'creates the user account and sends a confirmation email' do user = build(:user) @@ -23,7 +23,7 @@ feature 'Signup', feature: true do end context "when not sending confirmation email" do - before { allow_any_instance_of(ApplicationSetting).to receive(:send_user_confirmation_email).and_return(false) } + before { stub_application_setting(send_user_confirmation_email: false) } it 'creates the user account and goes to dashboard' do user = build(:user) diff --git a/spec/features/snippets/create_snippet_spec.rb b/spec/features/snippets/create_snippet_spec.rb index 5470276bf06..9409c323288 100644 --- a/spec/features/snippets/create_snippet_spec.rb +++ b/spec/features/snippets/create_snippet_spec.rb @@ -1,6 +1,6 @@ require 'rails_helper' -feature 'Create Snippet', feature: true do +feature 'Create Snippet', :js, feature: true do before do login_as :user visit new_snippet_path @@ -9,10 +9,11 @@ feature 'Create Snippet', feature: true do scenario 'Authenticated user creates a snippet' do fill_in 'personal_snippet_title', with: 'My Snippet Title' page.within('.file-editor') do - find(:xpath, "//input[@id='personal_snippet_content']").set 'Hello World!' + find('.ace_editor').native.send_keys 'Hello World!' end click_button 'Create snippet' + wait_for_ajax expect(page).to have_content('My Snippet Title') expect(page).to have_content('Hello World!') @@ -22,10 +23,11 @@ feature 'Create Snippet', feature: true do fill_in 'personal_snippet_title', with: 'My Snippet Title' page.within('.file-editor') do find(:xpath, "//input[@id='personal_snippet_file_name']").set 'snippet+file+name' - find(:xpath, "//input[@id='personal_snippet_content']").set 'Hello World!' + find('.ace_editor').native.send_keys 'Hello World!' end click_button 'Create snippet' + wait_for_ajax expect(page).to have_content('My Snippet Title') expect(page).to have_content('snippet+file+name') diff --git a/spec/features/snippets/explore_spec.rb b/spec/features/snippets/explore_spec.rb index 10a4597e467..fd097fe2e74 100644 --- a/spec/features/snippets/explore_spec.rb +++ b/spec/features/snippets/explore_spec.rb @@ -1,11 +1,11 @@ require 'rails_helper' feature 'Explore Snippets', feature: true do - scenario 'User should see snippets that are not private' do - public_snippet = create(:personal_snippet, :public) - internal_snippet = create(:personal_snippet, :internal) - private_snippet = create(:personal_snippet, :private) + let!(:public_snippet) { create(:personal_snippet, :public) } + let!(:internal_snippet) { create(:personal_snippet, :internal) } + let!(:private_snippet) { create(:personal_snippet, :private) } + scenario 'User should see snippets that are not private' do login_as create(:user) visit explore_snippets_path @@ -13,4 +13,21 @@ feature 'Explore Snippets', feature: true do expect(page).to have_content(internal_snippet.title) expect(page).not_to have_content(private_snippet.title) end + + scenario 'External user should see only public snippets' do + login_as create(:user, :external) + visit explore_snippets_path + + expect(page).to have_content(public_snippet.title) + expect(page).not_to have_content(internal_snippet.title) + expect(page).not_to have_content(private_snippet.title) + end + + scenario 'Not authenticated user should see only public snippets' do + visit explore_snippets_path + + expect(page).to have_content(public_snippet.title) + expect(page).not_to have_content(internal_snippet.title) + expect(page).not_to have_content(private_snippet.title) + end end diff --git a/spec/features/snippets/internal_snippet_spec.rb b/spec/features/snippets/internal_snippet_spec.rb new file mode 100644 index 00000000000..93382f4c359 --- /dev/null +++ b/spec/features/snippets/internal_snippet_spec.rb @@ -0,0 +1,23 @@ +require 'rails_helper' + +feature 'Internal Snippets', feature: true, js: true do + let(:internal_snippet) { create(:personal_snippet, :internal) } + + describe 'normal user' do + before do + login_as :user + end + + scenario 'sees internal snippets' do + visit snippet_path(internal_snippet) + + expect(page).to have_content(internal_snippet.content) + end + + scenario 'sees raw internal snippets' do + visit raw_snippet_path(internal_snippet) + + expect(page).to have_content(internal_snippet.content) + end + end +end diff --git a/spec/features/snippets/notes_on_personal_snippets_spec.rb b/spec/features/snippets/notes_on_personal_snippets_spec.rb new file mode 100644 index 00000000000..698eb46573f --- /dev/null +++ b/spec/features/snippets/notes_on_personal_snippets_spec.rb @@ -0,0 +1,101 @@ +require 'spec_helper' + +describe 'Comments on personal snippets', :js, feature: true do + let!(:user) { create(:user) } + let!(:snippet) { create(:personal_snippet, :public) } + let!(:snippet_notes) do + [ + create(:note_on_personal_snippet, noteable: snippet, author: user), + create(:note_on_personal_snippet, noteable: snippet) + ] + end + let!(:other_note) { create(:note_on_personal_snippet) } + + before do + login_as user + visit snippet_path(snippet) + end + + subject { page } + + context 'when viewing the snippet detail page' do + it 'contains notes for a snippet with correct action icons' do + expect(page).to have_selector('#notes-list li', count: 2) + + # comment authored by current user + page.within("#notes-list li#note_#{snippet_notes[0].id}") do + expect(page).to have_content(snippet_notes[0].note) + expect(page).to have_selector('.js-note-delete') + expect(page).to have_selector('.note-emoji-button') + end + + page.within("#notes-list li#note_#{snippet_notes[1].id}") do + expect(page).to have_content(snippet_notes[1].note) + expect(page).not_to have_selector('.js-note-delete') + expect(page).to have_selector('.note-emoji-button') + end + end + end + + context 'when submitting a note' do + it 'shows a valid form' do + is_expected.to have_css('.js-main-target-form', visible: true, count: 1) + expect(find('.js-main-target-form .js-comment-button').value). + to eq('Comment') + + page.within('.js-main-target-form') do + expect(page).not_to have_link('Cancel') + end + end + + it 'previews a note' do + fill_in 'note[note]', with: 'This is **awesome**!' + find('.js-md-preview-button').click + + page.within('.new-note .md-preview') do + expect(page).to have_content('This is awesome!') + expect(page).to have_selector('strong') + end + end + + it 'creates a note' do + fill_in 'note[note]', with: 'This is **awesome**!' + click_button 'Comment' + + expect(find('div#notes')).to have_content('This is awesome!') + end + end + + context 'when editing a note' do + it 'changes the text' do + page.within("#notes-list li#note_#{snippet_notes[0].id}") do + click_on 'Edit comment' + end + + page.within('.current-note-edit-form') do + fill_in 'note[note]', with: 'new content' + find('.btn-save').click + end + + page.within("#notes-list li#note_#{snippet_notes[0].id}") do + edited_text = find('.edited-text') + + expect(page).to have_css('.note_edited_ago') + expect(page).to have_content('new content') + expect(edited_text).to have_selector('.note_edited_ago') + end + end + end + + context 'when deleting a note' do + it 'removes the note from the snippet detail page' do + page.within("#notes-list li#note_#{snippet_notes[0].id}") do + click_on 'Remove comment' + end + + wait_for_ajax + + expect(page).not_to have_selector("#notes-list li#note_#{snippet_notes[0].id}") + end + end +end diff --git a/spec/features/snippets/public_snippets_spec.rb b/spec/features/snippets/public_snippets_spec.rb index 34300ccb940..2df483818c3 100644 --- a/spec/features/snippets/public_snippets_spec.rb +++ b/spec/features/snippets/public_snippets_spec.rb @@ -1,10 +1,11 @@ require 'rails_helper' -feature 'Public Snippets', feature: true do +feature 'Public Snippets', :js, feature: true do scenario 'Unauthenticated user should see public snippets' do public_snippet = create(:personal_snippet, :public) visit snippet_path(public_snippet) + wait_for_ajax expect(page).to have_content(public_snippet.content) end diff --git a/spec/features/snippets/show_spec.rb b/spec/features/snippets/show_spec.rb new file mode 100644 index 00000000000..e36cf547f80 --- /dev/null +++ b/spec/features/snippets/show_spec.rb @@ -0,0 +1,138 @@ +require 'spec_helper' + +feature 'Snippet', :js, feature: true do + let(:project) { create(:project, :repository) } + let(:snippet) { create(:personal_snippet, :public, file_name: file_name, content: content) } + + context 'Ruby file' do + let(:file_name) { 'popen.rb' } + let(:content) { project.repository.blob_at('master', 'files/ruby/popen.rb').data } + + before do + visit snippet_path(snippet) + + wait_for_ajax + end + + it 'displays the blob' do + aggregate_failures do + # shows highlighted Ruby code + expect(page).to have_content("require 'fileutils'") + + # does not show a viewer switcher + expect(page).not_to have_selector('.js-blob-viewer-switcher') + + # shows an enabled copy button + expect(page).to have_selector('.js-copy-blob-source-btn:not(.disabled)') + + # shows a raw button + expect(page).to have_link('Open raw') + + # shows a download button + expect(page).to have_link('Download') + end + end + end + + context 'Markdown file' do + let(:file_name) { 'ruby-style-guide.md' } + let(:content) { project.repository.blob_at('master', 'files/markdown/ruby-style-guide.md').data } + + context 'visiting directly' do + before do + visit snippet_path(snippet) + + wait_for_ajax + end + + it 'displays the blob using the rich viewer' do + aggregate_failures do + # hides the simple viewer + expect(page).to have_selector('.blob-viewer[data-type="simple"]', visible: false) + expect(page).to have_selector('.blob-viewer[data-type="rich"]') + + # shows rendered Markdown + expect(page).to have_link("PEP-8") + + # shows a viewer switcher + expect(page).to have_selector('.js-blob-viewer-switcher') + + # shows a disabled copy button + expect(page).to have_selector('.js-copy-blob-source-btn.disabled') + + # shows a raw button + expect(page).to have_link('Open raw') + + # shows a download button + expect(page).to have_link('Download') + end + end + + context 'switching to the simple viewer' do + before do + find('.js-blob-viewer-switch-btn[data-viewer=simple]').click + + wait_for_ajax + end + + it 'displays the blob using the simple viewer' do + aggregate_failures do + # hides the rich viewer + expect(page).to have_selector('.blob-viewer[data-type="simple"]') + expect(page).to have_selector('.blob-viewer[data-type="rich"]', visible: false) + + # shows highlighted Markdown code + expect(page).to have_content("[PEP-8](http://www.python.org/dev/peps/pep-0008/)") + + # shows an enabled copy button + expect(page).to have_selector('.js-copy-blob-source-btn:not(.disabled)') + end + end + + context 'switching to the rich viewer again' do + before do + find('.js-blob-viewer-switch-btn[data-viewer=rich]').click + + wait_for_ajax + end + + it 'displays the blob using the rich viewer' do + aggregate_failures do + # hides the simple viewer + expect(page).to have_selector('.blob-viewer[data-type="simple"]', visible: false) + expect(page).to have_selector('.blob-viewer[data-type="rich"]') + + # shows an enabled copy button + expect(page).to have_selector('.js-copy-blob-source-btn:not(.disabled)') + end + end + end + end + end + + context 'visiting with a line number anchor' do + before do + visit snippet_path(snippet, anchor: 'L1') + + wait_for_ajax + end + + it 'displays the blob using the simple viewer' do + aggregate_failures do + # hides the rich viewer + expect(page).to have_selector('.blob-viewer[data-type="simple"]') + expect(page).to have_selector('.blob-viewer[data-type="rich"]', visible: false) + + # highlights the line in question + expect(page).to have_selector('#LC1.hll') + + # shows highlighted Markdown code + expect(page).to have_content("[PEP-8](http://www.python.org/dev/peps/pep-0008/)") + + # shows an enabled copy button + expect(page).to have_selector('.js-copy-blob-source-btn:not(.disabled)') + end + end + end + end +end diff --git a/spec/features/tags/master_creates_tag_spec.rb b/spec/features/tags/master_creates_tag_spec.rb index ca25c696f75..af25eebed13 100644 --- a/spec/features/tags/master_creates_tag_spec.rb +++ b/spec/features/tags/master_creates_tag_spec.rb @@ -51,10 +51,24 @@ feature 'Master creates tag', feature: true do end end + scenario 'opens dropdown for ref', js: true do + click_link 'New tag' + ref_row = find('.form-group:nth-of-type(2) .col-sm-10') + page.within ref_row do + ref_input = find('[name="ref"]', visible: false) + expect(ref_input.value).to eq 'master' + expect(find('.dropdown-toggle-text')).to have_content 'master' + + find('.js-branch-select').trigger('click') + + expect(find('.dropdown-menu')).to have_content 'empty-branch' + end + end + def create_tag_in_form(tag:, ref:, message: nil, desc: nil) click_link 'New tag' fill_in 'tag_name', with: tag - fill_in 'ref', with: ref + find('#ref', visible: false).set(ref) fill_in 'message', with: message unless message.nil? fill_in 'release_description', with: desc unless desc.nil? click_button 'Create tag' diff --git a/spec/features/tags/master_views_tags_spec.rb b/spec/features/tags/master_views_tags_spec.rb index 555f84c4772..922ac15a2eb 100644 --- a/spec/features/tags/master_views_tags_spec.rb +++ b/spec/features/tags/master_views_tags_spec.rb @@ -16,7 +16,7 @@ feature 'Master views tags', feature: true do fill_in :commit_message, with: 'Add a README file', visible: true # Remove pre-receive hook so we can push without auth FileUtils.rm_f(File.join(project.repository.path, 'hooks', 'pre-receive')) - click_button 'Commit Changes' + click_button 'Commit changes' visit namespace_project_tags_path(project.namespace, project) end diff --git a/spec/features/task_lists_spec.rb b/spec/features/task_lists_spec.rb index a5d14aa19f1..8bd13caf2b0 100644 --- a/spec/features/task_lists_spec.rb +++ b/spec/features/task_lists_spec.rb @@ -3,7 +3,7 @@ require 'spec_helper' feature 'Task Lists', feature: true do include Warden::Test::Helpers - let(:project) { create(:project) } + let(:project) { create(:empty_project) } let(:user) { create(:user) } let(:user2) { create(:user) } @@ -62,12 +62,15 @@ feature 'Task Lists', feature: true do visit namespace_project_issue_path(project.namespace, project, issue) end - describe 'for Issues' do - describe 'multiple tasks' do + describe 'for Issues', feature: true do + describe 'multiple tasks', js: true do + include WaitForVueResource + let!(:issue) { create(:issue, description: markdown, author: user, project: project) } it 'renders' do visit_issue(project, issue) + wait_for_vue_resource expect(page).to have_selector('ul.task-list', count: 1) expect(page).to have_selector('li.task-list-item', count: 6) @@ -76,25 +79,24 @@ feature 'Task Lists', feature: true do it 'contains the required selectors' do visit_issue(project, issue) + wait_for_vue_resource - container = '.detail-page-description .description.js-task-list-container' - - expect(page).to have_selector(container) - expect(page).to have_selector("#{container} .wiki .task-list .task-list-item .task-list-item-checkbox") - expect(page).to have_selector("#{container} .js-task-list-field") - expect(page).to have_selector('form.js-issuable-update') + expect(page).to have_selector(".wiki .task-list .task-list-item .task-list-item-checkbox") expect(page).to have_selector('a.btn-close') end it 'is only editable by author' do visit_issue(project, issue) - expect(page).to have_selector('.js-task-list-container') + wait_for_vue_resource - logout(:user) + expect(page).to have_selector(".wiki .task-list .task-list-item .task-list-item-checkbox") + logout(:user) login_as(user2) visit current_path - expect(page).not_to have_selector('.js-task-list-container') + wait_for_vue_resource + + expect(page).to have_selector(".wiki .task-list .task-list-item .task-list-item-checkbox") end it 'provides a summary on Issues#index' do @@ -103,11 +105,14 @@ feature 'Task Lists', feature: true do end end - describe 'single incomplete task' do + describe 'single incomplete task', js: true do + include WaitForVueResource + let!(:issue) { create(:issue, description: singleIncompleteMarkdown, author: user, project: project) } it 'renders' do visit_issue(project, issue) + wait_for_vue_resource expect(page).to have_selector('ul.task-list', count: 1) expect(page).to have_selector('li.task-list-item', count: 1) @@ -116,15 +121,18 @@ feature 'Task Lists', feature: true do it 'provides a summary on Issues#index' do visit namespace_project_issues_path(project.namespace, project) + expect(page).to have_content("0 of 1 task completed") end end - describe 'single complete task' do + describe 'single complete task', js: true do + include WaitForVueResource let!(:issue) { create(:issue, description: singleCompleteMarkdown, author: user, project: project) } it 'renders' do visit_issue(project, issue) + wait_for_vue_resource expect(page).to have_selector('ul.task-list', count: 1) expect(page).to have_selector('li.task-list-item', count: 1) @@ -133,6 +141,7 @@ feature 'Task Lists', feature: true do it 'provides a summary on Issues#index' do visit namespace_project_issues_path(project.namespace, project) + expect(page).to have_content("1 of 1 task completed") end end @@ -240,6 +249,7 @@ feature 'Task Lists', feature: true do end describe 'multiple tasks' do + let(:project) { create(:project, :repository) } let!(:merge) { create(:merge_request, :simple, description: markdown, author: user, source_project: project) } it 'renders for description' do diff --git a/spec/features/todos/todos_filtering_spec.rb b/spec/features/todos/todos_filtering_spec.rb index e8f06916d53..f32e70c2c3f 100644 --- a/spec/features/todos/todos_filtering_spec.rb +++ b/spec/features/todos/todos_filtering_spec.rb @@ -1,8 +1,6 @@ require 'spec_helper' describe 'Dashboard > User filters todos', feature: true, js: true do - include WaitForAjax - let(:user_1) { create(:user, username: 'user_1', name: 'user_1') } let(:user_2) { create(:user, username: 'user_2', name: 'user_2') } @@ -47,8 +45,8 @@ describe 'Dashboard > User filters todos', feature: true, js: true do wait_for_ajax - expect(find('.todos-list')).to have_content user_1.name - expect(find('.todos-list')).not_to have_content user_2.name + expect(find('.todos-list')).to have_content 'merge request' + expect(find('.todos-list')).not_to have_content 'issue' end it "shows only authors of existing todos" do diff --git a/spec/features/todos/todos_spec.rb b/spec/features/todos/todos_spec.rb index c270511c903..55b3e3d9424 100644 --- a/spec/features/todos/todos_spec.rb +++ b/spec/features/todos/todos_spec.rb @@ -1,8 +1,6 @@ require 'spec_helper' describe 'Dashboard Todos', feature: true do - include WaitForAjax - let(:user) { create(:user) } let(:author) { create(:user) } let(:project) { create(:project, visibility_level: Gitlab::VisibilityLevel::PUBLIC) } @@ -101,6 +99,83 @@ describe 'Dashboard Todos', feature: true do end end + context 'User created todos for themself' do + before do + login_as(user) + end + + context 'issue assigned todo' do + before do + create(:todo, :assigned, user: user, project: project, target: issue, author: user) + visit dashboard_todos_path + end + + it 'shows issue assigned to yourself message' do + page.within('.js-todos-all') do + expect(page).to have_content("You assigned issue #{issue.to_reference(full: true)} to yourself") + end + end + end + + context 'marked todo' do + before do + create(:todo, :marked, user: user, project: project, target: issue, author: user) + visit dashboard_todos_path + end + + it 'shows you added a todo message' do + page.within('.js-todos-all') do + expect(page).to have_content("You added a todo for issue #{issue.to_reference(full: true)}") + expect(page).not_to have_content('to yourself') + end + end + end + + context 'mentioned todo' do + before do + create(:todo, :mentioned, user: user, project: project, target: issue, author: user) + visit dashboard_todos_path + end + + it 'shows you mentioned yourself message' do + page.within('.js-todos-all') do + expect(page).to have_content("You mentioned yourself on issue #{issue.to_reference(full: true)}") + expect(page).not_to have_content('to yourself') + end + end + end + + context 'directly_addressed todo' do + before do + create(:todo, :directly_addressed, user: user, project: project, target: issue, author: user) + visit dashboard_todos_path + end + + it 'shows you directly addressed yourself message' do + page.within('.js-todos-all') do + expect(page).to have_content("You directly addressed yourself on issue #{issue.to_reference(full: true)}") + expect(page).not_to have_content('to yourself') + end + end + end + + context 'approval todo' do + let(:merge_request) { create(:merge_request) } + + before do + create(:todo, :approval_required, user: user, project: project, target: merge_request, author: user) + visit dashboard_todos_path + end + + it 'shows you set yourself as an approver message' do + page.within('.js-todos-all') do + expect(page).to have_content("You set yourself as an approver for merge request #{merge_request.to_reference(full: true)}") + expect(page).not_to have_content('to yourself') + end + end + end + end + context 'User has done todos', js: true do before do create(:todo, :mentioned, :done, user: user, project: project, target: issue, author: author) @@ -176,7 +251,7 @@ describe 'Dashboard Todos', feature: true do describe 'mark all as done', js: true do before do visit dashboard_todos_path - click_link 'Mark all as done' + find('.js-todos-mark-all').trigger('click') end it 'shows "All done" message!' do @@ -233,9 +308,9 @@ describe 'Dashboard Todos', feature: true do end def mark_all_and_undo - click_link 'Mark all as done' + find('.js-todos-mark-all').trigger('click') wait_for_ajax - click_link 'Undo mark all as done' + find('.js-todos-undo-all').trigger('click') wait_for_ajax end end diff --git a/spec/features/u2f_spec.rb b/spec/features/u2f_spec.rb index a8d00bb8e5a..544d2dcb87f 100644 --- a/spec/features/u2f_spec.rb +++ b/spec/features/u2f_spec.rb @@ -1,24 +1,21 @@ require 'spec_helper' -feature 'Using U2F (Universal 2nd Factor) Devices for Authentication', feature: true, js: true do - include WaitForAjax - +feature 'Using U2F (Universal 2nd Factor) Devices for Authentication', :js do before { allow_any_instance_of(U2fHelper).to receive(:inject_u2f_api?).and_return(true) } def manage_two_factor_authentication - click_on 'Manage Two-Factor Authentication' - expect(page).to have_content("Setup New U2F Device") + click_on 'Manage two-factor authentication' + expect(page).to have_content("Setup new U2F device") wait_for_ajax end - def register_u2f_device(u2f_device = nil) - name = FFaker::Name.first_name + def register_u2f_device(u2f_device = nil, name: 'My device') u2f_device ||= FakeU2fDevice.new(page, name) u2f_device.respond_to_u2f_registration - click_on 'Setup New U2F Device' + click_on 'Setup new U2F device' expect(page).to have_content('Your device was successfully set up') fill_in "Pick a name", with: name - click_on 'Register U2F Device' + click_on 'Register U2F device' u2f_device end @@ -35,9 +32,9 @@ feature 'Using U2F (Universal 2nd Factor) Devices for Authentication', feature: it 'does not allow registering a new device' do visit profile_account_path - click_on 'Enable Two-Factor Authentication' + click_on 'Enable two-factor authentication' - expect(page).to have_button('Setup New U2F Device', disabled: true) + expect(page).to have_button('Setup new U2F device', disabled: true) end end @@ -62,7 +59,7 @@ feature 'Using U2F (Universal 2nd Factor) Devices for Authentication', feature: expect(page).to have_content('Your U2F device was registered') # Second device - second_device = register_u2f_device + second_device = register_u2f_device(name: 'My other device') expect(page).to have_content('Your U2F device was registered') expect(page).to have_content(first_device.name) @@ -76,7 +73,7 @@ feature 'Using U2F (Universal 2nd Factor) Devices for Authentication', feature: expect(page).to have_content("You've already enabled two-factor authentication using mobile") first_u2f_device = register_u2f_device - second_u2f_device = register_u2f_device + second_u2f_device = register_u2f_device(name: 'My other device') click_on "Delete", match: :first @@ -99,7 +96,7 @@ feature 'Using U2F (Universal 2nd Factor) Devices for Authentication', feature: user.update_attribute(:otp_required_for_login, true) visit profile_account_path manage_two_factor_authentication - register_u2f_device(u2f_device) + register_u2f_device(u2f_device, name: 'My other device') expect(page).to have_content('Your U2F device was registered') expect(U2fRegistration.count).to eq(2) @@ -112,9 +109,9 @@ feature 'Using U2F (Universal 2nd Factor) Devices for Authentication', feature: # Have the "u2f device" respond with bad data page.execute_script("u2f.register = function(_,_,_,callback) { callback('bad response'); };") - click_on 'Setup New U2F Device' + click_on 'Setup new U2F device' expect(page).to have_content('Your device was successfully set up') - click_on 'Register U2F Device' + click_on 'Register U2F device' expect(U2fRegistration.count).to eq(0) expect(page).to have_content("The form contains the following error") @@ -127,9 +124,9 @@ feature 'Using U2F (Universal 2nd Factor) Devices for Authentication', feature: # Failed registration page.execute_script("u2f.register = function(_,_,_,callback) { callback('bad response'); };") - click_on 'Setup New U2F Device' + click_on 'Setup new U2F device' expect(page).to have_content('Your device was successfully set up') - click_on 'Register U2F Device' + click_on 'Register U2F device' expect(page).to have_content("The form contains the following error") # Successful registration @@ -198,7 +195,7 @@ feature 'Using U2F (Universal 2nd Factor) Devices for Authentication', feature: current_user.update_attribute(:otp_required_for_login, true) visit profile_account_path manage_two_factor_authentication - register_u2f_device + register_u2f_device(name: 'My other device') logout # Try authenticating user with the old U2F device @@ -231,7 +228,7 @@ feature 'Using U2F (Universal 2nd Factor) Devices for Authentication', feature: describe "when a given U2F device has not been registered" do it "does not allow logging in with that particular device" do - unregistered_device = FakeU2fDevice.new(page, FFaker::Name.first_name) + unregistered_device = FakeU2fDevice.new(page, 'My device') login_as(user) unregistered_device.respond_to_u2f_authentication expect(page).to have_content('We heard back from your U2F device') @@ -252,7 +249,7 @@ feature 'Using U2F (Universal 2nd Factor) Devices for Authentication', feature: # Register second device visit profile_two_factor_auth_path expect(page).to have_content("Your U2F device needs to be set up.") - second_device = register_u2f_device + second_device = register_u2f_device(name: 'My other device') logout # Authenticate as both devices diff --git a/spec/features/unsubscribe_links_spec.rb b/spec/features/unsubscribe_links_spec.rb index e2d9cfdd0b0..a23c4ca2b92 100644 --- a/spec/features/unsubscribe_links_spec.rb +++ b/spec/features/unsubscribe_links_spec.rb @@ -6,7 +6,7 @@ describe 'Unsubscribe links', feature: true do let(:recipient) { create(:user) } let(:author) { create(:user) } let(:project) { create(:empty_project, :public) } - let(:params) { { title: 'A bug!', description: 'Fix it!', assignee: recipient } } + let(:params) { { title: 'A bug!', description: 'Fix it!', assignees: [recipient] } } let(:issue) { Issues::CreateService.new(project, author, params).execute } let(:mail) { ActionMailer::Base.deliveries.last } diff --git a/spec/features/uploads/user_uploads_file_to_note_spec.rb b/spec/features/uploads/user_uploads_file_to_note_spec.rb index 0c160dd74b4..8f03024ea06 100644 --- a/spec/features/uploads/user_uploads_file_to_note_spec.rb +++ b/spec/features/uploads/user_uploads_file_to_note_spec.rb @@ -5,18 +5,78 @@ feature 'User uploads file to note', feature: true do let(:user) { create(:user) } let(:project) { create(:empty_project, creator: user, namespace: user.namespace) } + let(:issue) { create(:issue, project: project, author: user) } - scenario 'they see the attached file', js: true do - issue = create(:issue, project: project, author: user) - + before do login_as(user) visit namespace_project_issue_path(project.namespace, project, issue) + end + + context 'before uploading' do + it 'shows "Attach a file" button', js: true do + expect(page).to have_button('Attach a file') + expect(page).not_to have_selector('.uploading-progress-container', visible: true) + end + end + + context 'uploading is in progress' do + it 'shows "Cancel" button on uploading', js: true do + dropzone_file([Rails.root.join('spec', 'fixtures', 'dk.png')], 0, false) + + expect(page).to have_button('Cancel') + end + + it 'cancels uploading on clicking to "Cancel" button', js: true do + dropzone_file([Rails.root.join('spec', 'fixtures', 'dk.png')], 0, false) + + click_button 'Cancel' + + expect(page).to have_button('Attach a file') + expect(page).not_to have_button('Cancel') + expect(page).not_to have_selector('.uploading-progress-container', visible: true) + end + + it 'shows "Attaching a file" message on uploading 1 file', js: true do + dropzone_file([Rails.root.join('spec', 'fixtures', 'dk.png')], 0, false) + + expect(page).to have_selector('.attaching-file-message', visible: true, text: 'Attaching a file -') + end + + it 'shows "Attaching 2 files" message on uploading 2 file', js: true do + dropzone_file([Rails.root.join('spec', 'fixtures', 'video_sample.mp4'), + Rails.root.join('spec', 'fixtures', 'dk.png')], 0, false) + + expect(page).to have_selector('.attaching-file-message', visible: true, text: 'Attaching 2 files -') + end + + it 'shows error message, "retry" and "attach a new file" link a if file is too big', js: true do + dropzone_file([Rails.root.join('spec', 'fixtures', 'video_sample.mp4')], 0.01) + + error_text = 'File is too big (0.06MiB). Max filesize: 0.01MiB.' + + expect(page).to have_selector('.uploading-error-message', visible: true, text: error_text) + expect(page).to have_selector('.retry-uploading-link', visible: true, text: 'Try again') + expect(page).to have_selector('.attach-new-file', visible: true, text: 'attach a new file') + expect(page).not_to have_button('Attach a file') + end + end + + context 'uploading is complete' do + it 'shows "Attach a file" button on uploading complete', js: true do + dropzone_file([Rails.root.join('spec', 'fixtures', 'dk.png')]) + wait_for_ajax + + expect(page).to have_button('Attach a file') + expect(page).not_to have_selector('.uploading-progress-container', visible: true) + end - dropzone_file(Rails.root.join('spec', 'fixtures', 'dk.png')) - click_button 'Comment' - wait_for_ajax + scenario 'they see the attached file', js: true do + dropzone_file([Rails.root.join('spec', 'fixtures', 'dk.png')]) + click_button 'Comment' + wait_for_ajax - expect(find('a.no-attachment-icon img[alt="dk"]')['src']) - .to match(%r{/#{project.full_path}/uploads/\h{32}/dk\.png$}) + expect(find('a.no-attachment-icon img[alt="dk"]')['src']) + .to match(%r{/#{project.full_path}/uploads/\h{32}/dk\.png$}) + end end end diff --git a/spec/features/user_callout_spec.rb b/spec/features/user_callout_spec.rb index 848af5e3a4d..b84f834ff1e 100644 --- a/spec/features/user_callout_spec.rb +++ b/spec/features/user_callout_spec.rb @@ -20,7 +20,7 @@ describe 'User Callouts', js: true do visit dashboard_projects_path within('.user-callout') do - find('.close').click + find('.close').trigger('click') end visit dashboard_projects_path diff --git a/spec/features/users/projects_spec.rb b/spec/features/users/projects_spec.rb index 1d75fe434b0..373b64808f8 100644 --- a/spec/features/users/projects_spec.rb +++ b/spec/features/users/projects_spec.rb @@ -1,8 +1,6 @@ require 'spec_helper' describe 'Projects tab on a user profile', :feature, :js do - include WaitForAjax - let(:user) { create(:user) } let!(:project) { create(:empty_project, namespace: user.namespace) } let!(:project2) { create(:empty_project, namespace: user.namespace) } diff --git a/spec/features/users/snippets_spec.rb b/spec/features/users/snippets_spec.rb index ce7e809ec76..4efbd672322 100644 --- a/spec/features/users/snippets_spec.rb +++ b/spec/features/users/snippets_spec.rb @@ -1,18 +1,48 @@ require 'spec_helper' describe 'Snippets tab on a user profile', feature: true, js: true do - include WaitForAjax - context 'when the user has snippets' do let(:user) { create(:user) } - let!(:snippets) { create_list(:snippet, 2, :public, author: user) } - before do - allow(Snippet).to receive(:default_per_page).and_return(1) - visit user_path(user) - page.within('.user-profile-nav') { click_link 'Snippets' } - wait_for_ajax + + context 'pagination' do + let!(:snippets) { create_list(:snippet, 2, :public, author: user) } + + before do + allow(Snippet).to receive(:default_per_page).and_return(1) + visit user_path(user) + page.within('.user-profile-nav') { click_link 'Snippets' } + wait_for_ajax + end + + it_behaves_like 'paginated snippets', remote: true end - it_behaves_like 'paginated snippets', remote: true + context 'list content' do + let!(:public_snippet) { create(:snippet, :public, author: user) } + let!(:internal_snippet) { create(:snippet, :internal, author: user) } + let!(:private_snippet) { create(:snippet, :private, author: user) } + let!(:other_snippet) { create(:snippet, :public) } + + it 'contains only internal and public snippets of a user when a user is logged in' do + login_as(:user) + visit user_path(user) + page.within('.user-profile-nav') { click_link 'Snippets' } + wait_for_ajax + + expect(page).to have_selector('.snippet-row', count: 2) + + expect(page).to have_content(public_snippet.title) + expect(page).to have_content(internal_snippet.title) + end + + it 'contains only public snippets of a user when a user is not logged in' do + visit user_path(user) + page.within('.user-profile-nav') { click_link 'Snippets' } + wait_for_ajax + + expect(page).to have_selector('.snippet-row', count: 1) + expect(page).to have_content(public_snippet.title) + end + end end end diff --git a/spec/features/users_spec.rb b/spec/features/users_spec.rb index 2de0fbe7ab2..c43feadc808 100644 --- a/spec/features/users_spec.rb +++ b/spec/features/users_spec.rb @@ -68,7 +68,6 @@ feature 'Users', feature: true, js: true do end feature 'username validation' do - include WaitForAjax let(:loading_icon) { '.fa.fa-spinner' } let(:username_input) { 'new_user_username' } diff --git a/spec/features/variables_spec.rb b/spec/features/variables_spec.rb index a362d6fd3b6..b83a230c1f8 100644 --- a/spec/features/variables_spec.rb +++ b/spec/features/variables_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' describe 'Project variables', js: true do let(:user) { create(:user) } - let(:project) { create(:project) } + let(:project) { create(:empty_project) } let(:variable) { create(:ci_variable, key: 'test_key', value: 'test value') } before do diff --git a/spec/finders/group_projects_finder_spec.rb b/spec/finders/group_projects_finder_spec.rb index ef97b061ca7..3c7c9bdcd08 100644 --- a/spec/finders/group_projects_finder_spec.rb +++ b/spec/finders/group_projects_finder_spec.rb @@ -3,8 +3,9 @@ require 'spec_helper' describe GroupProjectsFinder do let(:group) { create(:group) } let(:current_user) { create(:user) } + let(:options) { {} } - let(:finder) { described_class.new(source_user) } + let(:finder) { described_class.new(group: group, current_user: current_user, options: options) } let!(:public_project) { create(:empty_project, :public, group: group, path: '1') } let!(:private_project) { create(:empty_project, :private, group: group, path: '2') } @@ -18,22 +19,27 @@ describe GroupProjectsFinder do shared_project_3.project_group_links.create(group_access: Gitlab::Access::MASTER, group: group) end + subject { finder.execute } + describe 'with a group member current user' do - before { group.add_user(current_user, Gitlab::Access::MASTER) } + before do + group.add_master(current_user) + end context "only shared" do - subject { described_class.new(group, only_shared: true).execute(current_user) } - it { is_expected.to eq([shared_project_3, shared_project_2, shared_project_1]) } + let(:options) { { only_shared: true } } + + it { is_expected.to match_array([shared_project_3, shared_project_2, shared_project_1]) } end context "only owned" do - subject { described_class.new(group, only_owned: true).execute(current_user) } - it { is_expected.to eq([private_project, public_project]) } + let(:options) { { only_owned: true } } + + it { is_expected.to match_array([private_project, public_project]) } end context "all" do - subject { described_class.new(group).execute(current_user) } - it { is_expected.to eq([shared_project_3, shared_project_2, shared_project_1, private_project, public_project]) } + it { is_expected.to match_array([shared_project_3, shared_project_2, shared_project_1, private_project, public_project]) } end end @@ -44,47 +50,57 @@ describe GroupProjectsFinder do end context "only shared" do + let(:options) { { only_shared: true } } + context "without external user" do - subject { described_class.new(group, only_shared: true).execute(current_user) } - it { is_expected.to eq([shared_project_3, shared_project_2, shared_project_1]) } + it { is_expected.to match_array([shared_project_3, shared_project_2, shared_project_1]) } end context "with external user" do - before { current_user.update_attributes(external: true) } - subject { described_class.new(group, only_shared: true).execute(current_user) } - it { is_expected.to eq([shared_project_2, shared_project_1]) } + before do + current_user.update_attributes(external: true) + end + + it { is_expected.to match_array([shared_project_2, shared_project_1]) } end end context "only owned" do + let(:options) { { only_owned: true } } + context "without external user" do - before { private_project.team << [current_user, Gitlab::Access::MASTER] } - subject { described_class.new(group, only_owned: true).execute(current_user) } - it { is_expected.to eq([private_project, public_project]) } + before do + private_project.team << [current_user, Gitlab::Access::MASTER] + end + + it { is_expected.to match_array([private_project, public_project]) } end context "with external user" do - before { current_user.update_attributes(external: true) } - subject { described_class.new(group, only_owned: true).execute(current_user) } - it { is_expected.to eq([public_project]) } - end + before do + current_user.update_attributes(external: true) + end - context "all" do - subject { described_class.new(group).execute(current_user) } - it { is_expected.to eq([shared_project_3, shared_project_2, shared_project_1, public_project]) } + it { is_expected.to eq([public_project]) } end end + + context "all" do + it { is_expected.to match_array([shared_project_3, shared_project_2, shared_project_1, public_project]) } + end end describe "no user" do context "only shared" do - subject { described_class.new(group, only_shared: true).execute(current_user) } - it { is_expected.to eq([shared_project_3, shared_project_1]) } + let(:options) { { only_shared: true } } + + it { is_expected.to match_array([shared_project_3, shared_project_1]) } end context "only owned" do - subject { described_class.new(group, only_owned: true).execute(current_user) } - it { is_expected.to eq([public_project]) } + let(:options) { { only_owned: true } } + + it { is_expected.to eq([public_project]) } end end end diff --git a/spec/finders/groups_finder_spec.rb b/spec/finders/groups_finder_spec.rb index d5d111e8d15..5b3591550c1 100644 --- a/spec/finders/groups_finder_spec.rb +++ b/spec/finders/groups_finder_spec.rb @@ -3,29 +3,64 @@ require 'spec_helper' describe GroupsFinder do describe '#execute' do let(:user) { create(:user) } - let!(:private_group) { create(:group, :private) } - let!(:internal_group) { create(:group, :internal) } - let!(:public_group) { create(:group, :public) } - let(:finder) { described_class.new } - describe 'execute' do - describe 'without a user' do - subject { finder.execute } + context 'root level groups' do + let!(:private_group) { create(:group, :private) } + let!(:internal_group) { create(:group, :internal) } + let!(:public_group) { create(:group, :public) } + + context 'without a user' do + subject { described_class.new.execute } it { is_expected.to eq([public_group]) } end - describe 'with a user' do - subject { finder.execute(user) } + context 'with a user' do + subject { described_class.new(user).execute } context 'normal user' do - it { is_expected.to eq([public_group, internal_group]) } + it { is_expected.to contain_exactly(public_group, internal_group) } end context 'external user' do let(:user) { create(:user, external: true) } - it { is_expected.to eq([public_group]) } + it { is_expected.to contain_exactly(public_group) } + end + + context 'user is member of the private group' do + before do + private_group.add_guest(user) + end + + it { is_expected.to contain_exactly(public_group, internal_group, private_group) } + end + end + end + + context 'subgroups' do + let!(:parent_group) { create(:group, :public) } + let!(:public_subgroup) { create(:group, :public, parent: parent_group) } + let!(:internal_subgroup) { create(:group, :internal, parent: parent_group) } + let!(:private_subgroup) { create(:group, :private, parent: parent_group) } + + context 'without a user' do + it 'only returns public subgroups' do + expect(described_class.new(nil, parent: parent_group).execute).to contain_exactly(public_subgroup) + end + end + + context 'with a user' do + it 'returns public and internal subgroups' do + expect(described_class.new(user, parent: parent_group).execute).to contain_exactly(public_subgroup, internal_subgroup) + end + + context 'being member' do + it 'returns public subgroups, internal subgroups, and private subgroups user is member of' do + private_subgroup.add_guest(user) + + expect(described_class.new(user, parent: parent_group).execute).to contain_exactly(public_subgroup, internal_subgroup, private_subgroup) + end end end end diff --git a/spec/finders/issues_finder_spec.rb b/spec/finders/issues_finder_spec.rb index ee52dc65175..96151689359 100644 --- a/spec/finders/issues_finder_spec.rb +++ b/spec/finders/issues_finder_spec.rb @@ -1,24 +1,24 @@ require 'spec_helper' describe IssuesFinder do - let(:user) { create(:user) } - let(:user2) { create(:user) } - let(:project1) { create(:empty_project) } - let(:project2) { create(:empty_project) } - let(:milestone) { create(:milestone, project: project1) } - let(:label) { create(:label, project: project2) } - let(:issue1) { create(:issue, author: user, assignee: user, project: project1, milestone: milestone, title: 'gitlab') } - let(:issue2) { create(:issue, author: user, assignee: user, project: project2, description: 'gitlab') } - let(:issue3) { create(:issue, author: user2, assignee: user2, project: project2) } + set(:user) { create(:user) } + set(:user2) { create(:user) } + set(:project1) { create(:empty_project) } + set(:project2) { create(:empty_project) } + set(:milestone) { create(:milestone, project: project1) } + set(:label) { create(:label, project: project2) } + set(:issue1) { create(:issue, author: user, assignees: [user], project: project1, milestone: milestone, title: 'gitlab') } + set(:issue2) { create(:issue, author: user, assignees: [user], project: project2, description: 'gitlab') } + set(:issue3) { create(:issue, author: user2, assignees: [user2], project: project2, title: 'tanuki', description: 'tanuki') } describe '#execute' do - let(:closed_issue) { create(:issue, author: user2, assignee: user2, project: project2, state: 'closed') } - let!(:label_link) { create(:label_link, label: label, target: issue2) } + set(:closed_issue) { create(:issue, author: user2, assignees: [user2], project: project2, state: 'closed') } + set(:label_link) { create(:label_link, label: label, target: issue2) } let(:search_user) { user } let(:params) { {} } - let(:issues) { IssuesFinder.new(search_user, params.reverse_merge(scope: scope, state: 'opened')).execute } + let(:issues) { described_class.new(search_user, params.reverse_merge(scope: scope, state: 'opened')).execute } - before do + before(:context) do project1.team << [user, :master] project2.team << [user, :developer] project2.team << [user2, :developer] @@ -91,7 +91,7 @@ describe IssuesFinder do before do milestones.each do |milestone| - create(:issue, project: milestone.project, milestone: milestone, author: user, assignee: user) + create(:issue, project: milestone.project, milestone: milestone, author: user, assignees: [user]) end end @@ -126,7 +126,7 @@ describe IssuesFinder do before do milestones.each do |milestone| - create(:issue, project: milestone.project, milestone: milestone, author: user, assignee: user) + create(:issue, project: milestone.project, milestone: milestone, author: user, assignees: [user]) end end @@ -282,15 +282,15 @@ describe IssuesFinder do let!(:confidential_issue) { create(:issue, project: project, confidential: true) } it 'returns non confidential issues for nil user' do - expect(IssuesFinder.send(:not_restricted_by_confidentiality, nil)).to include(public_issue) + expect(described_class.send(:not_restricted_by_confidentiality, nil)).to include(public_issue) end it 'returns non confidential issues for user not authorized for the issues projects' do - expect(IssuesFinder.send(:not_restricted_by_confidentiality, user)).to include(public_issue) + expect(described_class.send(:not_restricted_by_confidentiality, user)).to include(public_issue) end it 'returns all issues for user authorized for the issues projects' do - expect(IssuesFinder.send(:not_restricted_by_confidentiality, authorized_user)).to include(public_issue, confidential_issue) + expect(described_class.send(:not_restricted_by_confidentiality, authorized_user)).to include(public_issue, confidential_issue) end end end diff --git a/spec/finders/merge_requests_finder_spec.rb b/spec/finders/merge_requests_finder_spec.rb index 21ef94ac5d1..58b7cd5e098 100644 --- a/spec/finders/merge_requests_finder_spec.rb +++ b/spec/finders/merge_requests_finder_spec.rb @@ -23,26 +23,26 @@ describe MergeRequestsFinder do describe "#execute" do it 'filters by scope' do params = { scope: 'authored', state: 'opened' } - merge_requests = MergeRequestsFinder.new(user, params).execute + merge_requests = described_class.new(user, params).execute expect(merge_requests.size).to eq(3) end it 'filters by project' do params = { project_id: project1.id, scope: 'authored', state: 'opened' } - merge_requests = MergeRequestsFinder.new(user, params).execute + merge_requests = described_class.new(user, params).execute expect(merge_requests.size).to eq(1) end it 'filters by non_archived' do params = { non_archived: true } - merge_requests = MergeRequestsFinder.new(user, params).execute + merge_requests = described_class.new(user, params).execute expect(merge_requests.size).to eq(3) end it 'filters by iid' do params = { project_id: project1.id, iids: merge_request1.iid } - merge_requests = MergeRequestsFinder.new(user, params).execute + merge_requests = described_class.new(user, params).execute expect(merge_requests).to contain_exactly(merge_request1) end diff --git a/spec/finders/notes_finder_spec.rb b/spec/finders/notes_finder_spec.rb index 77a04507be1..ba6bbb3bce0 100644 --- a/spec/finders/notes_finder_spec.rb +++ b/spec/finders/notes_finder_spec.rb @@ -110,6 +110,15 @@ describe NotesFinder do expect(notes.count).to eq(1) end + it 'finds notes on personal snippets' do + note = create(:note_on_personal_snippet) + params = { target_type: 'personal_snippet', target_id: note.noteable_id } + + notes = described_class.new(project, user, params).execute + + expect(notes.count).to eq(1) + end + it 'raises an exception for an invalid target_type' do params[:target_type] = 'invalid' expect { described_class.new(project, user, params).execute }.to raise_error('invalid target_type') @@ -202,4 +211,45 @@ describe NotesFinder do end end end + + describe '#target' do + subject { described_class.new(project, user, params) } + + context 'for a issue target' do + let(:issue) { create(:issue, project: project) } + let(:params) { { target_type: 'issue', target_id: issue.id } } + + it 'returns the issue' do + expect(subject.target).to eq(issue) + end + end + + context 'for a merge request target' do + let(:merge_request) { create(:merge_request, source_project: project) } + let(:params) { { target_type: 'merge_request', target_id: merge_request.id } } + + it 'returns the merge_request' do + expect(subject.target).to eq(merge_request) + end + end + + context 'for a snippet target' do + let(:snippet) { create(:project_snippet, project: project) } + let(:params) { { target_type: 'snippet', target_id: snippet.id } } + + it 'returns the snippet' do + expect(subject.target).to eq(snippet) + end + end + + context 'for a commit target' do + let(:project) { create(:project, :repository) } + let(:commit) { project.commit } + let(:params) { { target_type: 'commit', target_id: commit.id } } + + it 'returns the commit' do + expect(subject.target).to eq(commit) + end + end + end end diff --git a/spec/finders/pipeline_schedules_finder_spec.rb b/spec/finders/pipeline_schedules_finder_spec.rb new file mode 100644 index 00000000000..e184a87c9c7 --- /dev/null +++ b/spec/finders/pipeline_schedules_finder_spec.rb @@ -0,0 +1,41 @@ +require 'spec_helper' + +describe PipelineSchedulesFinder do + let(:project) { create(:empty_project) } + + let!(:active_schedule) { create(:ci_pipeline_schedule, project: project) } + let!(:inactive_schedule) { create(:ci_pipeline_schedule, :inactive, project: project) } + + subject { described_class.new(project).execute(params) } + + describe "#execute" do + context 'when the scope is nil' do + let(:params) { { scope: nil } } + + it 'selects all pipeline pipeline schedules' do + expect(subject.count).to be(2) + expect(subject).to include(active_schedule, inactive_schedule) + end + end + + context 'when the scope is active' do + let(:params) { { scope: 'active' } } + + it 'selects only active pipelines' do + expect(subject.count).to be(1) + expect(subject).to include(active_schedule) + expect(subject).not_to include(inactive_schedule) + end + end + + context 'when the scope is inactve' do + let(:params) { { scope: 'inactive' } } + + it 'selects only inactive pipelines' do + expect(subject.count).to be(1) + expect(subject).not_to include(active_schedule) + expect(subject).to include(inactive_schedule) + end + end + end +end diff --git a/spec/finders/pipelines_finder_spec.rb b/spec/finders/pipelines_finder_spec.rb index 6bada7b3eb9..f2aeda241c1 100644 --- a/spec/finders/pipelines_finder_spec.rb +++ b/spec/finders/pipelines_finder_spec.rb @@ -3,50 +3,205 @@ require 'spec_helper' describe PipelinesFinder do let(:project) { create(:project, :repository) } - let!(:tag_pipeline) { create(:ci_pipeline, project: project, ref: 'v1.0.0') } - let!(:branch_pipeline) { create(:ci_pipeline, project: project) } - - subject { described_class.new(project).execute(params) } + subject { described_class.new(project, params).execute } describe "#execute" do - context 'when a scope is passed' do - context 'when scope is nil' do - let(:params) { { scope: nil } } + context 'when params is empty' do + let(:params) { {} } + let!(:pipelines) { create_list(:ci_pipeline, 2, project: project) } + + it 'returns all pipelines' do + is_expected.to match_array(pipelines) + end + end + + %w[running pending].each do |target| + context "when scope is #{target}" do + let(:params) { { scope: target } } + let!(:pipeline) { create(:ci_pipeline, project: project, status: target) } - it 'selects all pipelines' do - expect(subject.count).to be 2 - expect(subject).to include tag_pipeline - expect(subject).to include branch_pipeline + it 'returns matched pipelines' do + is_expected.to eq([pipeline]) end end + end + + context 'when scope is finished' do + let(:params) { { scope: 'finished' } } + let!(:pipelines) do + [create(:ci_pipeline, project: project, status: 'success'), + create(:ci_pipeline, project: project, status: 'failed'), + create(:ci_pipeline, project: project, status: 'canceled')] + end - context 'when selecting branches' do + it 'returns matched pipelines' do + is_expected.to match_array(pipelines) + end + end + + context 'when scope is branches or tags' do + let!(:pipeline_branch) { create(:ci_pipeline, project: project) } + let!(:pipeline_tag) { create(:ci_pipeline, project: project, ref: 'v1.0.0', tag: true) } + + context 'when scope is branches' do let(:params) { { scope: 'branches' } } - it 'excludes tags' do - expect(subject).not_to include tag_pipeline - expect(subject).to include branch_pipeline + it 'returns matched pipelines' do + is_expected.to eq([pipeline_branch]) end end - context 'when selecting tags' do + context 'when scope is tags' do let(:params) { { scope: 'tags' } } - it 'excludes branches' do - expect(subject).to include tag_pipeline - expect(subject).not_to include branch_pipeline + it 'returns matched pipelines' do + is_expected.to eq([pipeline_tag]) + end + end + end + + HasStatus::AVAILABLE_STATUSES.each do |target| + context "when status is #{target}" do + let(:params) { { status: target } } + let!(:pipeline) { create(:ci_pipeline, project: project, status: target) } + + before do + exception_status = HasStatus::AVAILABLE_STATUSES - [target] + create(:ci_pipeline, project: project, status: exception_status.first) + end + + it 'returns matched pipelines' do + is_expected.to eq([pipeline]) end end end - # Scoping to pending will speed up the test as it doesn't hit the FS - let(:params) { { scope: 'pending' } } + context 'when ref is specified' do + let!(:pipeline) { create(:ci_pipeline, project: project) } + + context 'when ref exists' do + let(:params) { { ref: 'master' } } + + it 'returns matched pipelines' do + is_expected.to eq([pipeline]) + end + end + + context 'when ref does not exist' do + let(:params) { { ref: 'invalid-ref' } } + + it 'returns empty' do + is_expected.to be_empty + end + end + end + + context 'when name is specified' do + let(:user) { create(:user) } + let!(:pipeline) { create(:ci_pipeline, project: project, user: user) } + + context 'when name exists' do + let(:params) { { name: user.name } } + + it 'returns matched pipelines' do + is_expected.to eq([pipeline]) + end + end + + context 'when name does not exist' do + let(:params) { { name: 'invalid-name' } } + + it 'returns empty' do + is_expected.to be_empty + end + end + end - it 'orders in descending order on ID' do - feature_pipeline = create(:ci_pipeline, project: project, ref: 'feature') + context 'when username is specified' do + let(:user) { create(:user) } + let!(:pipeline) { create(:ci_pipeline, project: project, user: user) } - expected_ids = [feature_pipeline.id, branch_pipeline.id, tag_pipeline.id].sort.reverse - expect(subject.map(&:id)).to eq expected_ids + context 'when username exists' do + let(:params) { { username: user.username } } + + it 'returns matched pipelines' do + is_expected.to eq([pipeline]) + end + end + + context 'when username does not exist' do + let(:params) { { username: 'invalid-username' } } + + it 'returns empty' do + is_expected.to be_empty + end + end + end + + context 'when yaml_errors is specified' do + let!(:pipeline1) { create(:ci_pipeline, project: project, yaml_errors: 'Syntax error') } + let!(:pipeline2) { create(:ci_pipeline, project: project) } + + context 'when yaml_errors is true' do + let(:params) { { yaml_errors: true } } + + it 'returns matched pipelines' do + is_expected.to eq([pipeline1]) + end + end + + context 'when yaml_errors is false' do + let(:params) { { yaml_errors: false } } + + it 'returns matched pipelines' do + is_expected.to eq([pipeline2]) + end + end + + context 'when yaml_errors is invalid' do + let(:params) { { yaml_errors: "invalid-yaml_errors" } } + + it 'returns all pipelines' do + is_expected.to match_array([pipeline1, pipeline2]) + end + end + end + + context 'when order_by and sort are specified' do + context 'when order_by user_id' do + let(:params) { { order_by: 'user_id', sort: 'asc' } } + let!(:pipelines) { create_list(:ci_pipeline, 2, project: project, user: create(:user)) } + + it 'sorts as user_id: :asc' do + is_expected.to match_array(pipelines) + end + + context 'when sort is invalid' do + let(:params) { { order_by: 'user_id', sort: 'invalid_sort' } } + + it 'sorts as user_id: :desc' do + is_expected.to eq(pipelines.sort_by { |p| -p.user.id }) + end + end + end + + context 'when order_by is invalid' do + let(:params) { { order_by: 'invalid_column', sort: 'asc' } } + let!(:pipelines) { create_list(:ci_pipeline, 2, project: project) } + + it 'sorts as id: :asc' do + is_expected.to eq(pipelines.sort_by { |p| p.id }) + end + end + + context 'when both are nil' do + let(:params) { { order_by: nil, sort: nil } } + let!(:pipelines) { create_list(:ci_pipeline, 2, project: project) } + + it 'sorts as id: :desc' do + is_expected.to eq(pipelines.sort_by { |p| -p.id }) + end + end end end end diff --git a/spec/finders/projects_finder_spec.rb b/spec/finders/projects_finder_spec.rb index e44e7434c80..148adcffe3b 100644 --- a/spec/finders/projects_finder_spec.rb +++ b/spec/finders/projects_finder_spec.rb @@ -21,38 +21,144 @@ describe ProjectsFinder do create(:empty_project, :private, name: 'D', path: 'D') end - let(:finder) { described_class.new } + let(:params) { {} } + let(:current_user) { user } + let(:project_ids_relation) { nil } + let(:finder) { described_class.new(params: params, current_user: current_user, project_ids_relation: project_ids_relation) } + + subject { finder.execute } describe 'without a user' do - subject { finder.execute } + let(:current_user) { nil } it { is_expected.to eq([public_project]) } end describe 'with a user' do - subject { finder.execute(user) } - describe 'without private projects' do - it { is_expected.to eq([public_project, internal_project]) } + it { is_expected.to match_array([public_project, internal_project]) } end describe 'with private projects' do before do - private_project.add_user(user, Gitlab::Access::MASTER) + private_project.add_master(user) end - it do - is_expected.to eq([public_project, internal_project, private_project]) - end + it { is_expected.to match_array([public_project, internal_project, private_project]) } end end describe 'with project_ids_relation' do let(:project_ids_relation) { Project.where(id: internal_project.id) } - subject { finder.execute(user, project_ids_relation) } - it { is_expected.to eq([internal_project]) } end + + describe 'filter by visibility_level' do + before do + private_project.add_master(user) + end + + context 'private' do + let(:params) { { visibility_level: Gitlab::VisibilityLevel::PRIVATE } } + + it { is_expected.to eq([private_project]) } + end + + context 'internal' do + let(:params) { { visibility_level: Gitlab::VisibilityLevel::INTERNAL } } + + it { is_expected.to eq([internal_project]) } + end + + context 'public' do + let(:params) { { visibility_level: Gitlab::VisibilityLevel::PUBLIC } } + + it { is_expected.to eq([public_project]) } + end + end + + describe 'filter by tags' do + before do + public_project.tag_list.add('foo') + public_project.save! + end + + let(:params) { { tag: 'foo' } } + + it { is_expected.to eq([public_project]) } + end + + describe 'filter by personal' do + let!(:personal_project) { create(:empty_project, namespace: user.namespace) } + let(:params) { { personal: true } } + + it { is_expected.to eq([personal_project]) } + end + + describe 'filter by search' do + let(:params) { { search: 'C' } } + + it { is_expected.to eq([public_project]) } + end + + describe 'filter by name for backward compatibility' do + let(:params) { { name: 'C' } } + + it { is_expected.to eq([public_project]) } + end + + describe 'filter by archived' do + let!(:archived_project) { create(:empty_project, :public, :archived, name: 'E', path: 'E') } + + context 'non_archived=true' do + let(:params) { { non_archived: true } } + + it { is_expected.to match_array([public_project, internal_project]) } + end + + context 'non_archived=false' do + let(:params) { { non_archived: false } } + + it { is_expected.to match_array([public_project, internal_project, archived_project]) } + end + + describe 'filter by archived for backward compatibility' do + let(:params) { { archived: false } } + + it { is_expected.to match_array([public_project, internal_project]) } + end + end + + describe 'filter by trending' do + let!(:trending_project) { create(:trending_project, project: public_project) } + let(:params) { { trending: true } } + + it { is_expected.to eq([public_project]) } + end + + describe 'filter by non_public' do + let(:params) { { non_public: true } } + before do + private_project.add_developer(current_user) + end + + it { is_expected.to eq([private_project]) } + end + + describe 'filter by viewable_starred_projects' do + let(:params) { { starred: true } } + before do + current_user.toggle_star(public_project) + end + + it { is_expected.to eq([public_project]) } + end + + describe 'sorting' do + let(:params) { { sort: 'name_asc' } } + + it { is_expected.to eq([internal_project, public_project]) } + end end end diff --git a/spec/finders/snippets_finder_spec.rb b/spec/finders/snippets_finder_spec.rb index 975e99c5807..35f1683eef9 100644 --- a/spec/finders/snippets_finder_spec.rb +++ b/spec/finders/snippets_finder_spec.rb @@ -8,79 +8,145 @@ describe SnippetsFinder do let(:project1) { create(:empty_project, :public, group: group) } let(:project2) { create(:empty_project, :private, group: group) } - context ':all filter' do + context 'all snippets visible to a user' do let!(:snippet1) { create(:personal_snippet, :private) } let!(:snippet2) { create(:personal_snippet, :internal) } let!(:snippet3) { create(:personal_snippet, :public) } + let!(:project_snippet1) { create(:project_snippet, :private) } + let!(:project_snippet2) { create(:project_snippet, :internal) } + let!(:project_snippet3) { create(:project_snippet, :public) } it "returns all private and internal snippets" do - snippets = SnippetsFinder.new.execute(user, filter: :all) - expect(snippets).to include(snippet2, snippet3) - expect(snippets).not_to include(snippet1) + snippets = described_class.new(user, scope: :all).execute + expect(snippets).to include(snippet2, snippet3, project_snippet2, project_snippet3) + expect(snippets).not_to include(snippet1, project_snippet1) end it "returns all public snippets" do - snippets = SnippetsFinder.new.execute(nil, filter: :all) - expect(snippets).to include(snippet3) - expect(snippets).not_to include(snippet1, snippet2) + snippets = described_class.new(nil, scope: :all).execute + expect(snippets).to include(snippet3, project_snippet3) + expect(snippets).not_to include(snippet1, snippet2, project_snippet1, project_snippet2) + end + + it "returns all public and internal snippets for normal user" do + snippets = described_class.new(user).execute + + expect(snippets).to include(snippet2, snippet3, project_snippet2, project_snippet3) + expect(snippets).not_to include(snippet1, project_snippet1) + end + + it "returns all public snippets for non authorized user" do + snippets = described_class.new(nil).execute + + expect(snippets).to include(snippet3, project_snippet3) + expect(snippets).not_to include(snippet1, snippet2, project_snippet1, project_snippet2) + end + + it "returns all public and authored snippets for external user" do + external_user = create(:user, :external) + authored_snippet = create(:personal_snippet, :internal, author: external_user) + + snippets = described_class.new(external_user).execute + + expect(snippets).to include(snippet3, project_snippet3, authored_snippet) + expect(snippets).not_to include(snippet1, snippet2, project_snippet1, project_snippet2) end end - context ':public filter' do + context 'filter by visibility' do let!(:snippet1) { create(:personal_snippet, :private) } let!(:snippet2) { create(:personal_snippet, :internal) } let!(:snippet3) { create(:personal_snippet, :public) } - it "returns public public snippets" do - snippets = SnippetsFinder.new.execute(nil, filter: :public) + it "returns public snippets when visibility is PUBLIC" do + snippets = described_class.new(nil, visibility: Snippet::PUBLIC).execute expect(snippets).to include(snippet3) expect(snippets).not_to include(snippet1, snippet2) end end - context ':by_user filter' do + context 'filter by scope' do + let!(:snippet1) { create(:personal_snippet, :private, author: user) } + let!(:snippet2) { create(:personal_snippet, :internal, author: user) } + let!(:snippet3) { create(:personal_snippet, :public, author: user) } + + it "returns all snippets for 'all' scope" do + snippets = described_class.new(user, scope: :all).execute + + expect(snippets).to include(snippet1, snippet2, snippet3) + end + + it "returns all snippets for 'are_private' scope" do + snippets = described_class.new(user, scope: :are_private).execute + + expect(snippets).to include(snippet1) + expect(snippets).not_to include(snippet2, snippet3) + end + + it "returns all snippets for 'are_interna;' scope" do + snippets = described_class.new(user, scope: :are_internal).execute + + expect(snippets).to include(snippet2) + expect(snippets).not_to include(snippet1, snippet3) + end + + it "returns all snippets for 'are_private' scope" do + snippets = described_class.new(user, scope: :are_public).execute + + expect(snippets).to include(snippet3) + expect(snippets).not_to include(snippet1, snippet2) + end + end + + context 'filter by author' do let!(:snippet1) { create(:personal_snippet, :private, author: user) } let!(:snippet2) { create(:personal_snippet, :internal, author: user) } let!(:snippet3) { create(:personal_snippet, :public, author: user) } it "returns all public and internal snippets" do - snippets = SnippetsFinder.new.execute(user1, filter: :by_user, user: user) + snippets = described_class.new(user1, author: user).execute + expect(snippets).to include(snippet2, snippet3) expect(snippets).not_to include(snippet1) end it "returns internal snippets" do - snippets = SnippetsFinder.new.execute(user, filter: :by_user, user: user, scope: "are_internal") + snippets = described_class.new(user, author: user, visibility: Snippet::INTERNAL).execute + expect(snippets).to include(snippet2) expect(snippets).not_to include(snippet1, snippet3) end it "returns private snippets" do - snippets = SnippetsFinder.new.execute(user, filter: :by_user, user: user, scope: "are_private") + snippets = described_class.new(user, author: user, visibility: Snippet::PRIVATE).execute + expect(snippets).to include(snippet1) expect(snippets).not_to include(snippet2, snippet3) end it "returns public snippets" do - snippets = SnippetsFinder.new.execute(user, filter: :by_user, user: user, scope: "are_public") + snippets = described_class.new(user, author: user, visibility: Snippet::PUBLIC).execute + expect(snippets).to include(snippet3) expect(snippets).not_to include(snippet1, snippet2) end it "returns all snippets" do - snippets = SnippetsFinder.new.execute(user, filter: :by_user, user: user) + snippets = described_class.new(user, author: user).execute + expect(snippets).to include(snippet1, snippet2, snippet3) end it "returns only public snippets if unauthenticated user" do - snippets = SnippetsFinder.new.execute(nil, filter: :by_user, user: user) + snippets = described_class.new(nil, author: user).execute + expect(snippets).to include(snippet3) expect(snippets).not_to include(snippet2, snippet1) end end - context 'by_project filter' do + context 'filter by project' do before do @snippet1 = create(:project_snippet, :private, project: project1) @snippet2 = create(:project_snippet, :internal, project: project1) @@ -88,43 +154,52 @@ describe SnippetsFinder do end it "returns public snippets for unauthorized user" do - snippets = SnippetsFinder.new.execute(nil, filter: :by_project, project: project1) + snippets = described_class.new(nil, project: project1).execute + expect(snippets).to include(@snippet3) expect(snippets).not_to include(@snippet1, @snippet2) end it "returns public and internal snippets for non project members" do - snippets = SnippetsFinder.new.execute(user, filter: :by_project, project: project1) + snippets = described_class.new(user, project: project1).execute + expect(snippets).to include(@snippet2, @snippet3) expect(snippets).not_to include(@snippet1) end it "returns public snippets for non project members" do - snippets = SnippetsFinder.new.execute(user, filter: :by_project, project: project1, scope: "are_public") + snippets = described_class.new(user, project: project1, visibility: Snippet::PUBLIC).execute + expect(snippets).to include(@snippet3) expect(snippets).not_to include(@snippet1, @snippet2) end it "returns internal snippets for non project members" do - snippets = SnippetsFinder.new.execute(user, filter: :by_project, project: project1, scope: "are_internal") + snippets = described_class.new(user, project: project1, visibility: Snippet::INTERNAL).execute + expect(snippets).to include(@snippet2) expect(snippets).not_to include(@snippet1, @snippet3) end it "does not return private snippets for non project members" do - snippets = SnippetsFinder.new.execute(user, filter: :by_project, project: project1, scope: "are_private") + snippets = described_class.new(user, project: project1, visibility: Snippet::PRIVATE).execute + expect(snippets).not_to include(@snippet1, @snippet2, @snippet3) end it "returns all snippets for project members" do project1.team << [user, :developer] - snippets = SnippetsFinder.new.execute(user, filter: :by_project, project: project1) + + snippets = described_class.new(user, project: project1).execute + expect(snippets).to include(@snippet1, @snippet2, @snippet3) end it "returns private snippets for project members" do project1.team << [user, :developer] - snippets = SnippetsFinder.new.execute(user, filter: :by_project, project: project1, scope: "are_private") + + snippets = described_class.new(user, project: project1, visibility: Snippet::PRIVATE).execute + expect(snippets).to include(@snippet1) end end diff --git a/spec/finders/users_finder_spec.rb b/spec/finders/users_finder_spec.rb new file mode 100644 index 00000000000..780b309b45e --- /dev/null +++ b/spec/finders/users_finder_spec.rb @@ -0,0 +1,66 @@ +require 'spec_helper' + +describe UsersFinder do + describe '#execute' do + let!(:user1) { create(:user, username: 'johndoe') } + let!(:user2) { create(:user, :blocked, username: 'notsorandom') } + let!(:external_user) { create(:user, :external) } + let!(:omniauth_user) { create(:omniauth_user, provider: 'twitter', extern_uid: '123456') } + + context 'with a normal user' do + let(:user) { create(:user) } + + it 'returns all users' do + users = described_class.new(user).execute + + expect(users).to contain_exactly(user, user1, user2, omniauth_user) + end + + it 'filters by username' do + users = described_class.new(user, username: 'johndoe').execute + + expect(users).to contain_exactly(user1) + end + + it 'filters by search' do + users = described_class.new(user, search: 'orando').execute + + expect(users).to contain_exactly(user2) + end + + it 'filters by blocked users' do + users = described_class.new(user, blocked: true).execute + + expect(users).to contain_exactly(user2) + end + + it 'filters by active users' do + users = described_class.new(user, active: true).execute + + expect(users).to contain_exactly(user, user1, omniauth_user) + end + + it 'returns no external users' do + users = described_class.new(user, external: true).execute + + expect(users).to contain_exactly(user, user1, user2, omniauth_user) + end + end + + context 'with an admin user' do + let(:admin) { create(:admin) } + + it 'filters by external users' do + users = described_class.new(admin, external: true).execute + + expect(users).to contain_exactly(external_user) + end + + it 'returns all users' do + users = described_class.new(admin).execute + + expect(users).to contain_exactly(admin, user1, user2, external_user, omniauth_user) + end + end + end +end diff --git a/spec/fixtures/api/schemas/branch.json b/spec/fixtures/api/schemas/branch.json new file mode 100644 index 00000000000..0bb74577010 --- /dev/null +++ b/spec/fixtures/api/schemas/branch.json @@ -0,0 +1,12 @@ +{ + "type": "object", + "required" : [ + "name", + "url" + ], + "properties" : { + "name": { "type": "string" }, + "url": { "type": "uri" } + }, + "additionalProperties": false +} diff --git a/spec/fixtures/api/schemas/deployments.json b/spec/fixtures/api/schemas/deployments.json new file mode 100644 index 00000000000..1112f23aab2 --- /dev/null +++ b/spec/fixtures/api/schemas/deployments.json @@ -0,0 +1,58 @@ +{ + "additionalProperties": false, + "properties": { + "deployments": { + "items": { + "additionalProperties": false, + "properties": { + "created_at": { + "type": "string" + }, + "id": { + "type": "integer" + }, + "iid": { + "type": "integer" + }, + "last?": { + "type": "boolean" + }, + "ref": { + "additionalProperties": false, + "properties": { + "name": { + "type": "string" + } + }, + "required": [ + "name" + ], + "type": "object" + }, + "sha": { + "type": "string" + }, + "tag": { + "type": "boolean" + } + }, + "required": [ + "sha", + "created_at", + "iid", + "tag", + "last?", + "ref", + "id" + ], + "type": "object" + }, + "minItems": 1, + "type": "array" + } + }, + "required": [ + "deployments" + ], + "type": "object" +} diff --git a/spec/fixtures/api/schemas/entities/merge_request.json b/spec/fixtures/api/schemas/entities/merge_request.json new file mode 100644 index 00000000000..4afbb87453e --- /dev/null +++ b/spec/fixtures/api/schemas/entities/merge_request.json @@ -0,0 +1,98 @@ +{ + "type": "object", + "properties" : { + "id": { "type": "integer" }, + "iid": { "type": "integer" }, + "author_id": { "type": "integer" }, + "description": { "type": ["string", "null"] }, + "lock_version": { "type": ["string", "null"] }, + "milestone_id": { "type": ["string", "null"] }, + "position": { "type": "integer" }, + "state": { "type": "string" }, + "title": { "type": "string" }, + "updated_by_id": { "type": ["string", "null"] }, + "created_at": { "type": "string" }, + "updated_at": { "type": "string" }, + "deleted_at": { "type": ["string", "null"] }, + "time_estimate": { "type": "integer" }, + "total_time_spent": { "type": "integer" }, + "human_time_estimate": { "type": ["integer", "null"] }, + "human_total_time_spent": { "type": ["integer", "null"] }, + "in_progress_merge_commit_sha": { "type": ["string", "null"] }, + "locked_at": { "type": ["string", "null"] }, + "merge_error": { "type": ["string", "null"] }, + "merge_commit_sha": { "type": ["string", "null"] }, + "merge_params": { "type": ["object", "null"] }, + "merge_status": { "type": "string" }, + "merge_user_id": { "type": ["integer", "null"] }, + "merge_when_pipeline_succeeds": { "type": "boolean" }, + "source_branch": { "type": "string" }, + "source_project_id": { "type": "integer" }, + "target_branch": { "type": "string" }, + "target_project_id": { "type": "integer" }, + "merge_event": { "type": ["object", "null"] }, + "closed_event": { "type": ["object", "null"] }, + "author": { "type": ["object", "null"] }, + "merge_user": { "type": ["object", "null"] }, + "diff_head_sha": { "type": ["string", "null"] }, + "diff_head_commit_short_id": { "type": ["string", "null"] }, + "merge_commit_message": { "type": ["string", "null"] }, + "pipeline": { "type": ["object", "null"] }, + "work_in_progress": { "type": "boolean" }, + "source_branch_exists": { "type": "boolean" }, + "mergeable_discussions_state": { "type": "boolean" }, + "conflicts_can_be_resolved_in_ui": { "type": "boolean" }, + "branch_missing": { "type": "boolean" }, + "has_conflicts": { "type": "boolean" }, + "can_be_merged": { "type": "boolean" }, + "project_archived": { "type": "boolean" }, + "only_allow_merge_if_pipeline_succeeds": { "type": "boolean" }, + "has_ci": { "type": "boolean" }, + "ci_status": { "type": ["string", "null"] }, + "issues_links": { + "type": "object", + "required": ["closing", "mentioned_but_not_closing", "assign_to_closing"], + "properties" : { + "closing": { "type": "string" }, + "mentioned_but_not_closing": { "type": "string" }, + "assign_to_closing": { "type": ["string", "null"] } + }, + "additionalProperties": false + }, + "source_branch_with_namespace_link": { "type": "string" }, + "current_user": { + "type": "object", + "required": [ + "can_remove_source_branch", + "can_revert_on_current_merge_request", + "can_cherry_pick_on_current_merge_request" + ], + "properties": { + "can_remove_source_branch": { "type": "boolean" }, + "can_revert_on_current_merge_request": { "type": ["boolean", "null"] }, + "can_cherry_pick_on_current_merge_request": { "type": ["boolean", "null"] } + }, + "additionalProperties": false + }, + "target_branch_commits_path": { "type": "string" }, + "source_branch_path": { "type": "string" }, + "conflict_resolution_path": { "type": ["string", "null"] }, + "cancel_merge_when_pipeline_succeeds_path": { "type": "string" }, + "create_issue_to_resolve_discussions_path": { "type": "string" }, + "merge_path": { "type": "string" }, + "cherry_pick_in_fork_path": { "type": ["string", "null"] }, + "revert_in_fork_path": { "type": ["string", "null"] }, + "email_patches_path": { "type": "string" }, + "plain_diff_path": { "type": "string" }, + "status_path": { "type": "string" }, + "new_blob_path": { "type": "string" }, + "merge_check_path": { "type": "string" }, + "ci_environments_status_path": { "type": "string" }, + "merge_commit_message_with_description": { "type": "string" }, + "diverged_commits_count": { "type": "integer" }, + "commit_change_content_path": { "type": "string" }, + "remove_wip_path": { "type": "string" }, + "commits_count": { "type": "integer" } + }, + "additionalProperties": false +} diff --git a/spec/fixtures/api/schemas/entities/merge_request_basic.json b/spec/fixtures/api/schemas/entities/merge_request_basic.json new file mode 100644 index 00000000000..6b14188582a --- /dev/null +++ b/spec/fixtures/api/schemas/entities/merge_request_basic.json @@ -0,0 +1,15 @@ +{ + "type": "object", + "properties" : { + "state": { "type": "string" }, + "merge_status": { "type": "string" }, + "source_branch_exists": { "type": "boolean" }, + "time_estimate": { "type": "integer" }, + "total_time_spent": { "type": "integer" }, + "human_time_estimate": { "type": ["string", "null"] }, + "human_total_time_spent": { "type": ["string", "null"] }, + "merge_error": { "type": ["string", "null"] }, + "assignee_id": { "type": ["integer", "null"] } + }, + "additionalProperties": false +} diff --git a/spec/fixtures/api/schemas/issue.json b/spec/fixtures/api/schemas/issue.json index 21c078e0f44..ff86437fdd5 100644 --- a/spec/fixtures/api/schemas/issue.json +++ b/spec/fixtures/api/schemas/issue.json @@ -46,6 +46,24 @@ "username": { "type": "string" }, "avatar_url": { "type": "uri" } }, + "assignees": { + "type": "array", + "items": { + "type": ["object", "null"], + "required": [ + "id", + "name", + "username", + "avatar_url" + ], + "properties": { + "id": { "type": "integer" }, + "name": { "type": "string" }, + "username": { "type": "string" }, + "avatar_url": { "type": "uri" } + } + } + }, "subscribed": { "type": ["boolean", "null"] } }, "additionalProperties": false diff --git a/spec/fixtures/api/schemas/merge_request.json b/spec/fixtures/api/schemas/merge_request.json new file mode 100644 index 00000000000..36962660cd9 --- /dev/null +++ b/spec/fixtures/api/schemas/merge_request.json @@ -0,0 +1,12 @@ +{ + "type": "object", + "required" : [ + "iid", + "url" + ], + "properties" : { + "iid": { "type": "integer" }, + "url": { "type": "uri" } + }, + "additionalProperties": false +} diff --git a/spec/fixtures/api/schemas/pipeline.json b/spec/fixtures/api/schemas/pipeline.json new file mode 100644 index 00000000000..55511d17b5e --- /dev/null +++ b/spec/fixtures/api/schemas/pipeline.json @@ -0,0 +1,354 @@ +{ + "$schema": "http://json-schema.org/draft-04/schema#", + "definitions": {}, + "id": "http://example.com/example.json", + "properties": { + "commit": { + "id": "/properties/commit", + "properties": { + "author": { + "id": "/properties/commit/properties/author", + "type": "null" + }, + "author_email": { + "id": "/properties/commit/properties/author_email", + "type": "string" + }, + "author_gravatar_url": { + "id": "/properties/commit/properties/author_gravatar_url", + "type": "string" + }, + "author_name": { + "id": "/properties/commit/properties/author_name", + "type": "string" + }, + "authored_date": { + "id": "/properties/commit/properties/authored_date", + "type": "string" + }, + "commit_path": { + "id": "/properties/commit/properties/commit_path", + "type": "string" + }, + "commit_url": { + "id": "/properties/commit/properties/commit_url", + "type": "string" + }, + "committed_date": { + "id": "/properties/commit/properties/committed_date", + "type": "string" + }, + "committer_email": { + "id": "/properties/commit/properties/committer_email", + "type": "string" + }, + "committer_name": { + "id": "/properties/commit/properties/committer_name", + "type": "string" + }, + "created_at": { + "id": "/properties/commit/properties/created_at", + "type": "string" + }, + "id": { + "id": "/properties/commit/properties/id", + "type": "string" + }, + "message": { + "id": "/properties/commit/properties/message", + "type": "string" + }, + "parent_ids": { + "id": "/properties/commit/properties/parent_ids", + "items": { + "id": "/properties/commit/properties/parent_ids/items", + "type": "string" + }, + "type": "array" + }, + "short_id": { + "id": "/properties/commit/properties/short_id", + "type": "string" + }, + "title": { + "id": "/properties/commit/properties/title", + "type": "string" + } + }, + "type": "object" + }, + "created_at": { + "id": "/properties/created_at", + "type": "string" + }, + "details": { + "id": "/properties/details", + "properties": { + "artifacts": { + "id": "/properties/details/properties/artifacts", + "items": {}, + "type": "array" + }, + "duration": { + "id": "/properties/details/properties/duration", + "type": "integer" + }, + "finished_at": { + "id": "/properties/details/properties/finished_at", + "type": "string" + }, + "manual_actions": { + "id": "/properties/details/properties/manual_actions", + "items": {}, + "type": "array" + }, + "stages": { + "id": "/properties/details/properties/stages", + "items": { + "id": "/properties/details/properties/stages/items", + "properties": { + "dropdown_path": { + "id": "/properties/details/properties/stages/items/properties/dropdown_path", + "type": "string" + }, + "groups": { + "id": "/properties/details/properties/stages/items/properties/groups", + "items": { + "id": "/properties/details/properties/stages/items/properties/groups/items", + "properties": { + "name": { + "id": "/properties/details/properties/stages/items/properties/groups/items/properties/name", + "type": "string" + }, + "size": { + "id": "/properties/details/properties/stages/items/properties/groups/items/properties/size", + "type": "integer" + }, + "status": { + "id": "/properties/details/properties/stages/items/properties/groups/items/properties/status", + "properties": { + "details_path": { + "id": "/properties/details/properties/stages/items/properties/groups/items/properties/status/properties/details_path", + "type": "null" + }, + "favicon": { + "id": "/properties/details/properties/stages/items/properties/groups/items/properties/status/properties/favicon", + "type": "string" + }, + "group": { + "id": "/properties/details/properties/stages/items/properties/groups/items/properties/status/properties/group", + "type": "string" + }, + "has_details": { + "id": "/properties/details/properties/stages/items/properties/groups/items/properties/status/properties/has_details", + "type": "boolean" + }, + "icon": { + "id": "/properties/details/properties/stages/items/properties/groups/items/properties/status/properties/icon", + "type": "string" + }, + "label": { + "id": "/properties/details/properties/stages/items/properties/groups/items/properties/status/properties/label", + "type": "string" + }, + "text": { + "id": "/properties/details/properties/stages/items/properties/groups/items/properties/status/properties/text", + "type": "string" + } + }, + "type": "object" + } + }, + "type": "object" + }, + "type": "array" + }, + "name": { + "id": "/properties/details/properties/stages/items/properties/name", + "type": "string" + }, + "path": { + "id": "/properties/details/properties/stages/items/properties/path", + "type": "string" + }, + "status": { + "id": "/properties/details/properties/stages/items/properties/status", + "properties": { + "details_path": { + "id": "/properties/details/properties/stages/items/properties/status/properties/details_path", + "type": "string" + }, + "favicon": { + "id": "/properties/details/properties/stages/items/properties/status/properties/favicon", + "type": "string" + }, + "group": { + "id": "/properties/details/properties/stages/items/properties/status/properties/group", + "type": "string" + }, + "has_details": { + "id": "/properties/details/properties/stages/items/properties/status/properties/has_details", + "type": "boolean" + }, + "icon": { + "id": "/properties/details/properties/stages/items/properties/status/properties/icon", + "type": "string" + }, + "label": { + "id": "/properties/details/properties/stages/items/properties/status/properties/label", + "type": "string" + }, + "text": { + "id": "/properties/details/properties/stages/items/properties/status/properties/text", + "type": "string" + } + }, + "type": "object" + }, + "title": { + "id": "/properties/details/properties/stages/items/properties/title", + "type": "string" + } + }, + "type": "object" + }, + "type": "array" + }, + "status": { + "id": "/properties/details/properties/status", + "properties": { + "details_path": { + "id": "/properties/details/properties/status/properties/details_path", + "type": "string" + }, + "favicon": { + "id": "/properties/details/properties/status/properties/favicon", + "type": "string" + }, + "group": { + "id": "/properties/details/properties/status/properties/group", + "type": "string" + }, + "has_details": { + "id": "/properties/details/properties/status/properties/has_details", + "type": "boolean" + }, + "icon": { + "id": "/properties/details/properties/status/properties/icon", + "type": "string" + }, + "label": { + "id": "/properties/details/properties/status/properties/label", + "type": "string" + }, + "text": { + "id": "/properties/details/properties/status/properties/text", + "type": "string" + } + }, + "type": "object" + } + }, + "type": "object" + }, + "flags": { + "id": "/properties/flags", + "properties": { + "cancelable": { + "id": "/properties/flags/properties/cancelable", + "type": "boolean" + }, + "latest": { + "id": "/properties/flags/properties/latest", + "type": "boolean" + }, + "retryable": { + "id": "/properties/flags/properties/retryable", + "type": "boolean" + }, + "stuck": { + "id": "/properties/flags/properties/stuck", + "type": "boolean" + }, + "triggered": { + "id": "/properties/flags/properties/triggered", + "type": "boolean" + }, + "yaml_errors": { + "id": "/properties/flags/properties/yaml_errors", + "type": "boolean" + } + }, + "type": "object" + }, + "id": { + "id": "/properties/id", + "type": "integer" + }, + "path": { + "id": "/properties/path", + "type": "string" + }, + "ref": { + "id": "/properties/ref", + "properties": { + "branch": { + "id": "/properties/ref/properties/branch", + "type": "boolean" + }, + "name": { + "id": "/properties/ref/properties/name", + "type": "string" + }, + "path": { + "id": "/properties/ref/properties/path", + "type": "string" + }, + "tag": { + "id": "/properties/ref/properties/tag", + "type": "boolean" + } + }, + "type": "object" + }, + "retry_path": { + "id": "/properties/retry_path", + "type": "string" + }, + "updated_at": { + "id": "/properties/updated_at", + "type": "string" + }, + "user": { + "id": "/properties/user", + "properties": { + "avatar_url": { + "id": "/properties/user/properties/avatar_url", + "type": "string" + }, + "id": { + "id": "/properties/user/properties/id", + "type": "integer" + }, + "name": { + "id": "/properties/user/properties/name", + "type": "string" + }, + "state": { + "id": "/properties/user/properties/state", + "type": "string" + }, + "username": { + "id": "/properties/user/properties/username", + "type": "string" + }, + "web_url": { + "id": "/properties/user/properties/web_url", + "type": "string" + } + }, + "type": "object" + } + }, + "type": "object" +} diff --git a/spec/fixtures/api/schemas/public_api/v4/issues.json b/spec/fixtures/api/schemas/public_api/v4/issues.json index 52199e75734..2d1c84ee93d 100644 --- a/spec/fixtures/api/schemas/public_api/v4/issues.json +++ b/spec/fixtures/api/schemas/public_api/v4/issues.json @@ -33,6 +33,21 @@ }, "additionalProperties": false }, + "assignees": { + "type": "array", + "items": { + "type": ["object", "null"], + "properties": { + "name": { "type": "string" }, + "username": { "type": "string" }, + "id": { "type": "integer" }, + "state": { "type": "string" }, + "avatar_url": { "type": "uri" }, + "web_url": { "type": "uri" } + }, + "additionalProperties": false + } + }, "assignee": { "type": ["object", "null"], "properties": { @@ -67,7 +82,7 @@ "required": [ "id", "iid", "project_id", "title", "description", "state", "created_at", "updated_at", "labels", - "milestone", "assignee", "author", "user_notes_count", + "milestone", "assignees", "author", "user_notes_count", "upvotes", "downvotes", "due_date", "confidential", "web_url" ], diff --git a/spec/fixtures/api/schemas/public_api/v4/user/public.json b/spec/fixtures/api/schemas/public_api/v4/user/public.json index 5587cfec61a..faa126b65f2 100644 --- a/spec/fixtures/api/schemas/public_api/v4/user/public.json +++ b/spec/fixtures/api/schemas/public_api/v4/user/public.json @@ -9,7 +9,6 @@ "avatar_url", "web_url", "created_at", - "is_admin", "bio", "location", "skype", @@ -43,7 +42,6 @@ "avatar_url": { "type": "string" }, "web_url": { "type": "string" }, "created_at": { "type": "date" }, - "is_admin": { "type": "boolean" }, "bio": { "type": ["string", "null"] }, "location": { "type": ["string", "null"] }, "skype": { "type": "string" }, diff --git a/spec/fixtures/emails/forwarded_new_issue.eml b/spec/fixtures/emails/forwarded_new_issue.eml new file mode 100644 index 00000000000..258106bb897 --- /dev/null +++ b/spec/fixtures/emails/forwarded_new_issue.eml @@ -0,0 +1,25 @@ +Delivered-To: incoming+gitlabhq/gitlabhq+auth_token@appmail.adventuretime.ooo +Return-Path: <jake@adventuretime.ooo> +Received: from iceking.adventuretime.ooo ([unix socket]) by iceking (Cyrus v2.2.13-Debian-2.2.13-19+squeeze3) with LMTPA; Thu, 13 Jun 2013 17:03:50 -0400 +Received: from mail-ie0-x234.google.com (mail-ie0-x234.google.com [IPv6:2607:f8b0:4001:c03::234]) by iceking.adventuretime.ooo (8.14.3/8.14.3/Debian-9.4) with ESMTP id r5DL3nFJ016967 (version=TLSv1/SSLv3 cipher=RC4-SHA bits=128 verify=NOT) for <incoming+gitlabhq/gitlabhq@appmail.adventuretime.ooo>; Thu, 13 Jun 2013 17:03:50 -0400 +Received: by mail-ie0-f180.google.com with SMTP id f4so21977375iea.25 for <incoming+gitlabhq/gitlabhq@appmail.adventuretime.ooo>; Thu, 13 Jun 2013 14:03:48 -0700 +Received: by 10.0.0.1 with HTTP; Thu, 13 Jun 2013 14:03:48 -0700 +Date: Thu, 13 Jun 2013 17:03:48 -0400 +From: Jake the Dog <jake@adventuretime.ooo> +Delivered-To: support@adventuretime.ooo +To: support@adventuretime.ooo +Message-ID: <CADkmRc+rNGAGGbV2iE5p918UVy4UyJqVcXRO2=otppgzduJSg@mail.gmail.com> +Subject: New Issue by email +Mime-Version: 1.0 +Content-Type: text/plain; + charset=ISO-8859-1 +Content-Transfer-Encoding: 7bit +X-Sieve: CMU Sieve 2.2 +X-Received: by 10.0.0.1 with SMTP id n7mr11234144ipb.85.1371157428600; Thu, + 13 Jun 2013 14:03:48 -0700 (PDT) +X-Scanned-By: MIMEDefang 2.69 on IPv6:2001:470:1d:165::1 + +The reply by email functionality should be extended to allow creating a new issue by email. + +* Allow an admin to specify which project the issue should be created under by checking the sender domain. +* Possibly allow the use of regular expression matches within the subject/body to specify which project the issue should be created under. diff --git a/spec/fixtures/markdown.md.erb b/spec/fixtures/markdown.md.erb index 0cdbc32431d..51a3e91d201 100644 --- a/spec/fixtures/markdown.md.erb +++ b/spec/fixtures/markdown.md.erb @@ -116,7 +116,7 @@ Linking to a file relative to this project's repository should work. Because life would be :zzz: without Emoji, right? :rocket: -Get ready for the Emoji :bomb:: :+1::-1::ok_hand::wave::v::raised_hand::muscle: +Get ready for the Emoji :bomb: : :+1: :-1: :ok_hand: :wave: :v: :raised_hand: :muscle: ### TableOfContentsFilter diff --git a/spec/fixtures/metrics.json b/spec/fixtures/metrics.json new file mode 100644 index 00000000000..06427adce57 --- /dev/null +++ b/spec/fixtures/metrics.json @@ -0,0 +1 @@ +{"success":true,"metrics":{"memory_values":[{"metric":{},"values":[[1490935421.33,"9.832775297619047"],[1490935481.33,"9.8359375"],[1490935541.33,"9.837983630952381"],[1490935601.33,"9.840401785714286"],[1490935661.33,"9.84375"],[1490935721.33,"9.846168154761905"],[1490935781.33,"9.849516369047619"],[1490935841.33,"9.85249255952381"],[1490935901.33,"9.855096726190476"],[1490935961.33,"9.845796130952381"],[1490936021.33,"9.847284226190476"],[1490936081.33,"9.84468005952381"],[1490936141.33,"9.847470238095237"],[1490936201.33,"9.850818452380953"],[1490936261.33,"9.852864583333334"],[1490936321.33,"9.854910714285714"],[1490936381.33,"9.857700892857142"],[1490936441.33,"9.865513392857142"],[1490936501.33,"9.874813988095237"],[1490936561.33,"9.866071428571429"],[1490936621.33,"9.849330357142858"],[1490936681.33,"9.841331845238095"],[1490936741.33,"9.853236607142858"],[1490936801.33,"9.839657738095237"],[1490936861.33,"9.841517857142858"],[1490936921.33,"9.852864583333334"],[1490936981.33,"9.851376488095237"],[1490937041.33,"9.837611607142858"],[1490937101.33,"9.840401785714286"],[1490937161.33,"9.843377976190476"],[1490937221.33,"9.845796130952381"],[1490937281.33,"9.84858630952381"],[1490937341.33,"9.866071428571429"],[1490937401.33,"9.852864583333334"],[1490937461.33,"9.855840773809524"],[1490937521.33,"9.837797619047619"],[1490937581.33,"9.840959821428571"],[1490937641.33,"9.848958333333334"],[1490937701.33,"9.844308035714286"],[1490937761.33,"9.845982142857142"],[1490937821.33,"9.83984375"],[1490937881.33,"9.830171130952381"],[1490937941.33,"9.83686755952381"],[1490938001.33,"9.834263392857142"],[1490938061.33,"9.836309523809524"],[1490938121.33,"9.83984375"],[1490938181.33,"9.832775297619047"],[1490938241.33,"9.818266369047619"],[1490938301.33,"9.820126488095237"],[1490938361.33,"9.824032738095237"],[1490938421.33,"9.826078869047619"],[1490938481.33,"9.817708333333334"],[1490938541.33,"9.811755952380953"],[1490938601.33,"9.811197916666666"],[1490938661.33,"9.81156994047619"],[1490938721.33,"9.812313988095237"],[1490938781.33,"9.813058035714286"],[1490938841.33,"9.81343005952381"],[1490938901.33,"9.81547619047619"],[1490938961.33,"9.818824404761905"],[1490939021.33,"9.819754464285714"],[1490939081.33,"9.820684523809524"],[1490939141.33,"9.824776785714286"],[1490939201.33,"9.826078869047619"],[1490939261.33,"9.828311011904763"],[1490939321.33,"9.820870535714286"],[1490939381.33,"9.823846726190476"],[1490939441.33,"9.824404761904763"],[1490939501.33,"9.82905505952381"],[1490939561.33,"9.832775297619047"],[1490939621.33,"9.835565476190476"],[1490939681.33,"9.833333333333334"],[1490939741.33,"9.835379464285714"],[1490939801.33,"9.837239583333334"],[1490939861.33,"9.839285714285714"],[1490939921.33,"9.829613095238095"],[1490939981.33,"9.832403273809524"],[1490940041.33,"9.835751488095237"],[1490940101.33,"9.837797619047619"],[1490940161.33,"9.840959821428571"],[1490940221.33,"9.84375"],[1490940281.33,"9.846354166666666"],[1490940341.33,"9.853980654761905"],[1490940401.33,"9.852678571428571"],[1490940461.33,"9.861979166666666"],[1490940521.33,"9.857700892857142"],[1490940581.33,"9.861793154761905"],[1490940641.33,"9.86421130952381"],[1490940701.33,"9.867001488095237"],[1490940761.33,"9.867931547619047"],[1490940821.33,"9.859933035714286"],[1490940881.33,"9.86235119047619"],[1490940941.33,"9.865141369047619"],[1490941001.33,"9.866443452380953"],[1490941061.33,"9.868861607142858"],[1490941121.33,"9.871465773809524"],[1490941181.33,"9.873511904761905"],[1490941241.33,"9.875558035714286"],[1490941301.33,"9.87797619047619"],[1490941361.33,"9.881324404761905"],[1490941421.33,"9.888392857142858"],[1490941481.33,"9.888392857142858"],[1490941541.33,"9.89546130952381"],[1490941601.33,"9.898065476190476"],[1490941661.33,"9.885044642857142"],[1490941721.33,"9.872395833333334"],[1490941781.33,"9.870349702380953"],[1490941841.33,"9.873325892857142"],[1490941901.33,"9.875558035714286"],[1490941961.33,"9.878534226190476"],[1490942021.33,"9.87983630952381"],[1490942081.33,"9.884300595238095"],[1490942141.33,"9.891927083333334"],[1490942201.33,"9.890252976190476"],[1490942261.33,"9.891927083333334"],[1490942321.33,"9.893787202380953"],[1490942381.33,"9.892113095238095"],[1490942441.33,"9.900111607142858"],[1490942501.33,"9.893415178571429"],[1490942561.33,"9.895647321428571"],[1490942621.33,"9.889322916666666"],[1490942681.33,"9.883556547619047"],[1490942741.33,"9.885602678571429"],[1490942801.33,"9.88764880952381"],[1490942861.33,"9.898623511904763"],[1490942921.33,"9.89453125"],[1490942981.33,"9.885044642857142"],[1490943041.33,"9.874813988095237"],[1490943101.33,"9.880766369047619"],[1490943161.33,"9.868675595238095"],[1490943221.33,"9.864769345238095"],[1490943281.33,"9.852864583333334"],[1490943341.33,"9.855096726190476"],[1490943401.33,"9.857514880952381"],[1490943461.33,"9.859747023809524"],[1490943521.33,"9.861793154761905"],[1490943581.33,"9.864025297619047"],[1490943641.33,"9.857514880952381"],[1490943701.33,"9.859002976190476"],[1490943761.33,"9.860677083333334"],[1490943821.33,"9.864025297619047"],[1490943881.33,"9.86625744047619"],[1490943941.33,"9.873325892857142"],[1490944001.33,"9.876674107142858"],[1490944061.33,"9.888950892857142"],[1490944121.33,"9.878534226190476"],[1490944181.33,"9.880766369047619"],[1490944241.33,"9.884858630952381"],[1490944301.33,"9.870535714285714"],[1490944361.33,"9.864769345238095"],[1490944421.33,"9.851190476190476"],[1490944481.33,"9.85249255952381"],[1490944541.33,"9.85844494047619"],[1490944601.33,"9.855840773809524"],[1490944661.33,"9.868303571428571"],[1490944721.33,"9.859188988095237"],[1490944781.33,"9.860491071428571"],[1490944841.33,"9.863467261904763"],[1490944901.33,"9.864025297619047"],[1490944961.33,"9.857514880952381"],[1490945021.33,"9.843377976190476"],[1490945081.33,"9.836123511904763"],[1490945141.33,"9.837983630952381"],[1490945201.33,"9.84077380952381"],[1490945261.33,"9.847284226190476"],[1490945321.33,"9.849702380952381"],[1490945381.33,"9.827380952380953"],[1490945441.33,"9.82124255952381"],[1490945501.33,"9.822916666666666"],[1490945561.33,"9.824962797619047"],[1490945621.33,"9.814546130952381"],[1490945681.33,"9.805989583333334"],[1490945741.33,"9.791294642857142"],[1490945801.33,"9.786458333333334"],[1490945861.33,"9.77641369047619"],[1490945921.33,"9.76655505952381"],[1490945981.33,"9.76953125"],[1490946041.33,"9.742745535714286"],[1490946101.33,"9.753162202380953"],[1490946161.33,"9.739583333333334"],[1490946221.33,"9.742931547619047"],[1490946281.33,"9.743489583333334"],[1490946341.33,"9.746837797619047"],[1490946401.33,"9.749255952380953"],[1490946461.33,"9.737165178571429"],[1490946521.33,"9.739583333333334"],[1490946581.33,"9.74311755952381"],[1490946641.33,"9.751302083333334"],[1490946701.33,"9.761346726190476"],[1490946761.33,"9.747953869047619"],[1490946821.33,"9.75093005952381"],[1490946881.33,"9.755580357142858"],[1490946941.33,"9.759858630952381"],[1490947001.33,"9.761904761904763"],[1490947061.33,"9.77641369047619"],[1490947121.33,"9.768787202380953"],[1490947181.33,"9.772879464285714"],[1490947241.33,"9.777715773809524"],[1490947301.33,"9.779947916666666"],[1490947361.33,"9.772135416666666"],[1490947421.33,"9.77641369047619"],[1490947481.33,"9.783668154761905"],[1490947541.33,"9.780505952380953"],[1490947601.33,"9.777157738095237"],[1490947661.33,"9.759114583333334"],[1490947721.33,"9.761532738095237"],[1490947781.33,"9.763392857142858"],[1490947841.33,"9.765252976190476"],[1490947901.33,"9.760602678571429"],[1490947961.33,"9.751488095238095"],[1490948021.33,"9.757998511904763"],[1490948081.33,"9.759486607142858"],[1490948141.33,"9.754650297619047"],[1490948201.33,"9.728050595238095"],[1490948261.33,"9.73530505952381"],[1490948321.33,"9.718005952380953"],[1490948381.33,"9.732142857142858"],[1490948441.33,"9.725260416666666"],[1490948501.33,"9.728422619047619"],[1490948561.33,"9.72953869047619"],[1490948621.33,"9.733072916666666"],[1490948681.33,"9.736421130952381"],[1490948741.33,"9.749627976190476"],[1490948801.33,"9.740141369047619"],[1490948861.33,"9.74311755952381"],[1490948921.33,"9.736607142857142"],[1490948981.33,"9.744233630952381"],[1490949041.33,"9.723772321428571"],[1490949101.33,"9.731956845238095"],[1490949161.33,"9.732514880952381"],[1490949221.33,"9.734747023809524"],[1490949281.33,"9.737723214285714"],[1490949341.33,"9.737909226190476"],[1490949401.33,"9.742373511904763"],[1490949461.33,"9.744977678571429"],[1490949521.33,"9.748139880952381"],[1490949581.33,"9.751302083333334"],[1490949641.33,"9.757440476190476"],[1490949701.33,"9.756324404761905"],[1490949761.33,"9.749813988095237"],[1490949821.33,"9.739025297619047"],[1490949881.33,"9.726004464285714"],[1490949941.33,"9.728236607142858"],[1490950001.33,"9.732514880952381"],[1490950061.33,"9.735119047619047"],[1490950121.33,"9.737165178571429"],[1490950181.33,"9.739025297619047"],[1490950241.33,"9.740513392857142"],[1490950301.33,"9.749441964285714"],[1490950361.33,"9.736979166666666"],[1490950421.33,"9.741629464285714"],[1490950481.33,"9.743303571428571"],[1490950541.33,"9.74609375"],[1490950601.33,"9.75093005952381"],[1490950661.33,"9.724330357142858"],[1490950721.33,"9.726748511904763"],[1490950781.33,"9.733258928571429"],[1490950841.33,"9.744233630952381"],[1490950901.33,"9.734375"],[1490950961.33,"9.737537202380953"],[1490951021.33,"9.741071428571429"],[1490951081.33,"9.757254464285714"],[1490951141.33,"9.760044642857142"],[1490951201.33,"9.755952380952381"],[1490951261.33,"9.745349702380953"],[1490951321.33,"9.746651785714286"],[1490951381.33,"9.749441964285714"],[1490951441.33,"9.751674107142858"],[1490951501.33,"9.757998511904763"],[1490951561.33,"9.756510416666666"],[1490951621.33,"9.76264880952381"],[1490951681.33,"9.765625"],[1490951741.33,"9.757254464285714"],[1490951801.33,"9.751674107142858"],[1490951861.33,"9.754278273809524"],[1490951921.33,"9.744233630952381"],[1490951981.33,"9.745349702380953"],[1490952041.33,"9.748883928571429"],[1490952101.33,"9.753162202380953"],[1490952161.33,"9.747953869047619"],[1490952221.33,"9.750186011904763"],[1490952281.33,"9.751116071428571"],[1490952341.33,"9.753162202380953"],[1490952401.33,"9.758928571428571"],[1490952461.33,"9.758928571428571"],[1490952521.33,"9.755394345238095"],[1490952581.33,"9.758928571428571"],[1490952641.33,"9.761160714285714"],[1490952701.33,"9.763206845238095"],[1490952761.33,"9.767857142857142"],[1490952821.33,"9.765438988095237"],[1490952881.33,"9.768229166666666"],[1490952941.33,"9.780877976190476"],[1490953001.33,"9.77250744047619"],[1490953061.33,"9.784412202380953"],[1490953121.33,"9.77827380952381"],[1490953181.33,"9.781063988095237"],[1490953241.33,"9.783668154761905"],[1490953301.33,"9.787016369047619"],[1490953361.33,"9.784970238095237"],[1490953421.33,"9.787946428571429"],[1490953481.33,"9.788690476190476"],[1490953541.33,"9.790922619047619"],[1490953601.33,"9.792596726190476"],[1490953661.33,"9.79594494047619"],[1490953721.33,"9.79780505952381"],[1490953781.33,"9.800223214285714"],[1490953841.33,"9.794828869047619"],[1490953901.33,"9.799293154761905"],[1490953961.33,"9.801525297619047"],[1490954021.33,"9.786458333333334"],[1490954081.33,"9.773809523809524"],[1490954141.33,"9.767485119047619"],[1490954201.33,"9.760044642857142"],[1490954261.33,"9.751116071428571"],[1490954321.33,"9.752790178571429"],[1490954381.33,"9.753162202380953"],[1490954441.33,"9.744419642857142"],[1490954501.33,"9.73921130952381"],[1490954561.33,"9.74125744047619"],[1490954621.33,"9.743303571428571"],[1490954681.33,"9.745535714285714"],[1490954741.33,"9.746837797619047"],[1490954801.33,"9.749255952380953"],[1490954861.33,"9.744419642857142"],[1490954921.33,"9.745349702380953"],[1490954981.33,"9.74702380952381"],[1490955041.33,"9.738467261904763"],[1490955101.33,"9.740141369047619"],[1490955161.33,"9.747767857142858"],[1490955221.33,"9.750372023809524"],[1490955281.33,"9.747767857142858"],[1490955341.33,"9.739025297619047"],[1490955401.33,"9.745349702380953"],[1490955461.33,"9.730282738095237"],[1490955521.33,"9.73139880952381"],[1490955581.33,"9.722842261904763"],[1490955641.33,"9.725818452380953"],[1490955701.33,"9.72749255952381"],[1490955761.33,"9.72953869047619"],[1490955821.33,"9.731956845238095"],[1490955881.33,"9.735677083333334"],[1490955941.33,"9.738467261904763"],[1490956001.33,"9.735863095238095"],[1490956061.33,"9.743675595238095"],[1490956121.33,"9.730840773809524"],[1490956181.33,"9.734747023809524"],[1490956241.33,"9.736235119047619"],[1490956301.33,"9.736607142857142"],[1490956361.33,"9.73921130952381"],[1490956421.33,"9.742001488095237"],[1490956481.33,"9.743675595238095"],[1490956541.33,"9.744977678571429"],[1490956601.33,"9.748697916666666"],[1490956661.33,"9.760602678571429"],[1490956721.33,"9.751302083333334"],[1490956781.33,"9.754278273809524"],[1490956841.33,"9.756324404761905"],[1490956901.33,"9.758370535714286"],[1490956961.33,"9.760416666666666"],[1490957021.33,"9.763020833333334"],[1490957081.33,"9.766183035714286"],[1490957141.33,"9.764508928571429"],[1490957201.33,"9.767299107142858"],[1490957261.33,"9.768787202380953"],[1490957321.33,"9.771019345238095"],[1490957381.33,"9.773623511904763"],[1490957441.33,"9.775111607142858"],[1490957501.33,"9.779389880952381"],[1490957561.33,"9.780691964285714"],[1490957621.33,"9.788690476190476"],[1490957681.33,"9.794828869047619"],[1490957741.33,"9.779203869047619"],[1490957801.33,"9.787016369047619"],[1490957861.33,"9.783854166666666"],[1490957921.33,"9.78515625"],[1490957981.33,"9.786644345238095"],[1490958041.33,"9.787946428571429"],[1490958101.33,"9.800409226190476"],[1490958161.33,"9.787202380952381"],[1490958221.33,"9.789806547619047"],[1490958281.33,"9.791852678571429"],[1490958341.33,"9.788876488095237"],[1490958401.33,"9.78515625"],[1490958461.33,"9.7890625"],[1490958521.33,"9.791108630952381"],[1490958581.33,"9.792596726190476"],[1490958641.33,"9.794828869047619"],[1490958701.33,"9.793154761904763"],[1490958761.33,"9.799293154761905"],[1490958821.33,"9.797247023809524"],[1490958881.33,"9.794084821428571"],[1490958941.33,"9.796875"],[1490959001.33,"9.763950892857142"],[1490959061.33,"9.765997023809524"],[1490959121.33,"9.767671130952381"],[1490959181.33,"9.77046130952381"],[1490959241.33,"9.773809523809524"],[1490959301.33,"9.765252976190476"],[1490959361.33,"9.767485119047619"],[1490959421.33,"9.76953125"],[1490959481.33,"9.774553571428571"],[1490959541.33,"9.77734375"],[1490959601.33,"9.778459821428571"],[1490959661.33,"9.780877976190476"],[1490959721.33,"9.783296130952381"],[1490959781.33,"9.794828869047619"],[1490959841.33,"9.787016369047619"],[1490959901.33,"9.798735119047619"],[1490959961.33,"9.803013392857142"],[1490960021.33,"9.801525297619047"],[1490960081.33,"9.804873511904763"],[1490960141.33,"9.80078125"],[1490960201.33,"9.80375744047619"],[1490960261.33,"9.805059523809524"],[1490960321.33,"9.807849702380953"],[1490960381.33,"9.810825892857142"],[1490960441.33,"9.813058035714286"],[1490960501.33,"9.813616071428571"],[1490960561.33,"9.815104166666666"],[1490960621.33,"9.81733630952381"],[1490960681.33,"9.812872023809524"],[1490960741.33,"9.814546130952381"],[1490960801.33,"9.808035714285714"],[1490960861.33,"9.810081845238095"],[1490960921.33,"9.813058035714286"],[1490960981.33,"9.825892857142858"],[1490961041.33,"9.816964285714286"],[1490961101.33,"9.82421875"],[1490961161.33,"9.80952380952381"],[1490961221.33,"9.804315476190476"],[1490961281.33,"9.797619047619047"],[1490961341.33,"9.80078125"],[1490961401.33,"9.802827380952381"],[1490961461.33,"9.803199404761905"],[1490961521.33,"9.80952380952381"],[1490961581.33,"9.806919642857142"],[1490961641.33,"9.808779761904763"],[1490961701.33,"9.811197916666666"],[1490961761.33,"9.813244047619047"],[1490961821.33,"9.815662202380953"],[1490961881.33,"9.819940476190476"],[1490961941.33,"9.822172619047619"],[1490962001.33,"9.82328869047619"],[1490962061.33,"9.826822916666666"],[1490962121.33,"9.829241071428571"],[1490962181.33,"9.832589285714286"],[1490962241.33,"9.835565476190476"],[1490962301.33,"9.839471726190476"],[1490962361.33,"9.825520833333334"],[1490962421.33,"9.829427083333334"],[1490962481.33,"9.832217261904763"],[1490962541.33,"9.839285714285714"],[1490962601.33,"9.837611607142858"],[1490962661.33,"9.841145833333334"],[1490962721.33,"9.834077380952381"],[1490962781.33,"9.837239583333334"],[1490962841.33,"9.841703869047619"],[1490962901.33,"9.844308035714286"],[1490962961.33,"9.838727678571429"],[1490963021.33,"9.840587797619047"],[1490963081.33,"9.849516369047619"],[1490963141.33,"9.845238095238095"],[1490963201.33,"9.84375"],[1490963261.33,"9.838541666666666"],[1490963321.33,"9.841889880952381"],[1490963381.33,"9.846354166666666"],[1490963441.33,"9.832403273809524"],[1490963501.33,"9.833891369047619"],[1490963561.33,"9.808221726190476"],[1490963621.33,"9.812686011904763"],[1490963681.33,"9.814918154761905"],[1490963741.33,"9.817708333333334"],[1490963801.33,"9.80561755952381"],[1490963861.33,"9.80859375"],[1490963921.33,"9.811197916666666"],[1490963981.33,"9.802269345238095"],[1490964041.33,"9.798177083333334"],[1490964101.33,"9.80078125"],[1490964161.33,"9.815104166666666"],[1490964221.33,"9.806361607142858"]]}],"memory_current":[{"metric":{},"value":[1490964221.593,"9.806361607142858"]}],"cpu_values":[{"metric":{},"values":[[1490935421.446,"0.011520035833333402"],[1490935481.446,"0.010738020634921052"],[1490935541.446,"0.011830812658730162"],[1490935601.446,"0.011666519206349292"],[1490935661.446,"0.012397734365079505"],[1490935721.446,"0.012264678253967905"],[1490935781.446,"0.011701125396825458"],[1490935841.446,"0.011413869087301435"],[1490935901.446,"0.011355704404762157"],[1490935961.446,"0.01295611777777756"],[1490936021.446,"0.012283088253968812"],[1490936081.446,"0.011711742103174674"],[1490936141.446,"0.011066851150792879"],[1490936201.446,"0.011525933611111726"],[1490936261.446,"0.012260294246031015"],[1490936321.446,"0.011917795238095285"],[1490936381.446,"0.011402582301587626"],[1490936441.446,"0.012311798253968057"],[1490936501.446,"0.011604295476191046"],[1490936561.446,"0.012329014206349137"],[1490936621.446,"0.011401263769840977"],[1490936681.446,"0.012310593492063392"],[1490936741.446,"0.01244334305555575"],[1490936801.446,"0.01176146669320973"],[1490936861.446,"0.011186474629011792"],[1490936921.446,"0.013234800079365536"],[1490936981.446,"0.01217435722222217"],[1490937041.446,"0.011211570753967583"],[1490937101.446,"0.012066252420634934"],[1490937161.446,"0.012175381944444839"],[1490937221.446,"0.011215347936507976"],[1490937281.446,"0.012909065515873003"],[1490937341.446,"0.011718783452381023"],[1490937401.446,"0.011740557499999828"],[1490937461.446,"0.012024899960317205"],[1490937521.446,"0.011518551626984471"],[1490937581.446,"0.013295429607829826"],[1490937641.446,"0.013578758822130006"],[1490937701.446,"0.01170811908668783"],[1490937761.446,"0.011867610238095478"],[1490937821.446,"0.012601599007937034"],[1490937881.446,"0.011028959285714405"],[1490937941.446,"0.011972864523808899"],[1490938001.446,"0.012236090515873134"],[1490938061.446,"0.012468855793650629"],[1490938121.446,"0.012324049999999686"],[1490938181.446,"0.012271810317460288"],[1490938241.446,"0.013109732103174912"],[1490938301.446,"0.01201708535714284"],[1490938361.446,"0.01198280035714318"],[1490938421.446,"0.011631491547618469"],[1490938481.446,"0.012698120317460778"],[1490938541.446,"0.011908042499999686"],[1490938601.446,"0.012941332460317123"],[1490938661.446,"0.012009558055555753"],[1490938721.446,"0.011749238293651211"],[1490938781.446,"0.012597720873015857"],[1490938841.446,"0.012128174365079517"],[1490938901.446,"0.013411003452380428"],[1490938961.446,"0.012712377896825132"],[1490939021.446,"0.0126730261111118"],[1490939081.446,"0.012196438134920173"],[1490939141.446,"0.011617917341270696"],[1490939201.446,"0.012271590992062863"],[1490939261.446,"0.01196238253968261"],[1490939321.446,"0.012446522619048245"],[1490939381.446,"0.013146698134919643"],[1490939441.446,"0.013160663611111774"],[1490939501.446,"0.012921960039682278"],[1490939561.446,"0.012100972380952405"],[1490939621.446,"0.01235039095238153"],[1490939681.446,"0.013303590992062684"],[1490939741.446,"0.012064513055556225"],[1490939801.446,"0.011846763531745252"],[1490939861.446,"0.012280224007936782"],[1490939921.446,"0.012305159166666833"],[1490939981.446,"0.012107076111110887"],[1490940041.446,"0.013109447341269884"],[1490940101.446,"0.011668830198412932"],[1490940161.446,"0.011757771468254286"],[1490940221.446,"0.013607426447330252"],[1490940281.446,"0.012069082212503184"],[1490940341.446,"0.012702448174603309"],[1490940401.446,"0.012915864642857006"],[1490940461.446,"0.012882558941478554"],[1490940521.446,"0.01180430288917485"],[1490940581.446,"0.012561457142856586"],[1490940641.446,"0.013117287261905215"],[1490940701.446,"0.0119707260317455"],[1490940761.446,"0.012110876587301957"],[1490940821.446,"0.012900523174603096"],[1490940881.446,"0.012405300317460836"],[1490940941.446,"0.013397718690476127"],[1490941001.446,"0.011853019404761512"],[1490941061.446,"0.011410178968254279"],[1490941121.446,"0.01385021210317412"],[1490941181.446,"0.012158262658730703"],[1490941241.446,"0.012590782142857021"],[1490941301.446,"0.011902994444444289"],[1490941361.446,"0.012597971468253468"],[1490941421.446,"0.013460530436508394"],[1490941481.446,"0.012871132936507318"],[1490941541.446,"0.012321937023810644"],[1490941601.446,"0.012861435992063004"],[1490941661.446,"0.011904687658730493"],[1490941721.446,"0.013068603849206292"],[1490941781.446,"0.011558027420635053"],[1490941841.446,"0.011785108134920095"],[1490941901.446,"0.013018491984126938"],[1490941961.446,"0.012803318611111494"],[1490942021.446,"0.011276595873015969"],[1490942081.446,"0.012407365753968128"],[1490942141.446,"0.01261537746031769"],[1490942201.446,"0.011981626507936492"],[1490942261.446,"0.011779192579364465"],[1490942321.446,"0.012944439365080001"],[1490942381.446,"0.012563845515873258"],[1490942441.446,"0.012490993809523204"],[1490942501.446,"0.011721826547619399"],[1490942561.446,"0.012376904523809195"],[1490942621.446,"0.012627997539682608"],[1490942681.446,"0.012353236984126971"],[1490942741.446,"0.012143749162511788"],[1490942801.446,"0.01210106380777602"],[1490942861.446,"0.01323092650793727"],[1490942921.446,"0.01217811805555557"],[1490942981.446,"0.011703709655399819"],[1490943041.446,"0.01140056596399108"],[1490943101.446,"0.011589462460317477"],[1490943161.446,"0.011424534784915178"],[1490943221.446,"0.011720420858480131"],[1490943281.446,"0.011956359603174035"],[1490943341.446,"0.011627974444444375"],[1490943401.446,"0.012056417142857899"],[1490943461.446,"0.012875421865079256"],[1490943521.446,"0.011447757222222438"],[1490943581.446,"0.011686728412698438"],[1490943641.446,"0.012264428214285543"],[1490943701.446,"0.011396086150793258"],[1490943761.446,"0.012637377857143453"],[1490943821.446,"0.012229487817460189"],[1490943881.446,"0.012519327516820155"],[1490943941.446,"0.011632154440677021"],[1490944001.446,"0.0127011905614214"],[1490944061.446,"0.012041664776432408"],[1490944121.446,"0.011550796183789442"],[1490944181.446,"0.012340807579364546"],[1490944241.446,"0.012514561706348858"],[1490944301.446,"0.011591095515873378"],[1490944361.446,"0.011562522896825472"],[1490944421.446,"0.012653687499999684"],[1490944481.446,"0.012597878095237767"],[1490944541.446,"0.011373836746032411"],[1490944601.446,"0.011489111309523512"],[1490944661.446,"0.012365606547618906"],[1490944721.446,"0.011246835793650788"],[1490944781.446,"0.011556645833333596"],[1490944841.446,"0.0114839880952384"],[1490944901.446,"0.011559932103174322"],[1490944961.446,"0.011456621547618827"],[1490945021.446,"0.011137903531746323"],[1490945081.446,"0.011371503134920238"],[1490945141.446,"0.01262392527777806"],[1490945201.446,"0.011231213571428417"],[1490945261.446,"0.011834045595238011"],[1490945321.446,"0.011222574087301793"],[1490945381.446,"0.01139294579365124"],[1490945441.446,"0.011876671865079205"],[1490945501.446,"0.012003088888888104"],[1490945561.446,"0.011232171746032069"],[1490945621.446,"0.01189458067460394"],[1490945681.446,"0.011593709801586787"],[1490945741.446,"0.01179023611111146"],[1490945801.446,"0.012056340952381187"],[1490945861.446,"0.011755026706348978"],[1490945921.446,"0.011906753412698057"],[1490945981.446,"0.011362850850868408"],[1490946041.446,"0.011567284784873766"],[1490946101.446,"0.01159940924603172"],[1490946161.446,"0.01169248444646143"],[1490946221.446,"0.011294826570231075"],[1490946281.446,"0.011797972936507535"],[1490946341.446,"0.011732454126984091"],[1490946401.446,"0.011992103412699077"],[1490946461.446,"0.011787900634920185"],[1490946521.446,"0.01170581265873045"],[1490946581.446,"0.011391009603175007"],[1490946641.446,"0.01205839841269773"],[1490946701.446,"0.01188169805555573"],[1490946761.446,"0.011459351746031153"],[1490946821.446,"0.012089251071429255"],[1490946881.446,"0.011159798611111122"],[1490946941.446,"0.012261993650793439"],[1490947001.446,"0.011150941865079526"],[1490947061.446,"0.011784560238095428"],[1490947121.446,"0.01146369333333352"],[1490947181.446,"0.011946112341269969"],[1490947241.446,"0.012244168452380742"],[1490947301.446,"0.01108276087301507"],[1490947361.446,"0.011391418571428976"],[1490947421.446,"0.012042411525379642"],[1490947481.446,"0.012082919141039653"],[1490947541.446,"0.011615924682540189"],[1490947601.446,"0.01218819496031727"],[1490947661.446,"0.011292488293650517"],[1490947721.446,"0.011232974365079479"],[1490947781.446,"0.011638264880952223"],[1490947841.446,"0.0115353722619047"],[1490947901.446,"0.011426710952381045"],[1490947961.446,"0.0121381246428574"],[1490948021.446,"0.011812514087301832"],[1490948081.446,"0.012050580317459442"],[1490948141.446,"0.011855329166666742"],[1490948201.446,"0.011649919960317898"],[1490948261.446,"0.01163187396825391"],[1490948321.446,"0.011266725634920935"],[1490948381.446,"0.011934722460317146"],[1490948441.446,"0.011368148333333088"],[1490948501.446,"0.011662377698413048"],[1490948561.446,"0.011039417341269188"],[1490948621.446,"0.012176113174603589"],[1490948681.446,"0.011265313531746158"],[1490948741.446,"0.01158711781746033"],[1490948801.446,"0.011557390912698215"],[1490948861.446,"0.012131684804188454"],[1490948921.446,"0.011474324082027133"],[1490948981.446,"0.011376334484127639"],[1490949041.446,"0.011627233571428175"],[1490949101.446,"0.012499916785714077"],[1490949161.446,"0.011920621706348947"],[1490949221.446,"0.011574053410790661"],[1490949281.446,"0.011837460242165967"],[1490949341.446,"0.011227153174603937"],[1490949401.446,"0.011635896944444115"],[1490949461.446,"0.011701339047618983"],[1490949521.446,"0.011847283650793895"],[1490949581.446,"0.0116057894841271"],[1490949641.446,"0.011789695753968094"],[1490949701.446,"0.011279284841269992"],[1490949761.446,"0.011470807460317041"],[1490949821.446,"0.012172255515873568"],[1490949881.446,"0.011721892103174175"],[1490949941.446,"0.010727560317460336"],[1490950001.446,"0.011509186269841303"],[1490950061.446,"0.01188623087301566"],[1490950121.446,"0.011476948452380968"],[1490950181.446,"0.01211593166666722"],[1490950241.446,"0.011757469444444444"],[1490950301.446,"0.011519936865079109"],[1490950361.446,"0.01165834781746044"],[1490950421.446,"0.010831068928571068"],[1490950481.446,"0.011977692023809912"],[1490950541.446,"0.011828264880952136"],[1490950601.446,"0.01191921916666625"],[1490950661.446,"0.011901336547619379"],[1490950721.446,"0.011776620238095158"],[1490950781.446,"0.011911536031746153"],[1490950841.446,"0.011467936309523809"],[1490950901.446,"0.012163667023809579"],[1490950961.446,"0.0116551746825399"],[1490951021.446,"0.011799408095237739"],[1490951081.446,"0.011845631309524084"],[1490951141.446,"0.011289116626983809"],[1490951201.446,"0.012258327777777984"],[1490951261.446,"0.012265819682539036"],[1490951321.446,"0.011346034166667811"],[1490951381.446,"0.011996446111110597"],[1490951441.446,"0.011511485714285046"],[1490951501.446,"0.011980616349206635"],[1490951561.446,"0.011565376031746316"],[1490951621.446,"0.010918043373016443"],[1490951681.446,"0.011479107380951632"],[1490951741.446,"0.012467024051997748"],[1490951801.446,"0.01235313125400671"],[1490951861.446,"0.012167793061507889"],[1490951921.446,"0.01249734373015914"],[1490951981.446,"0.011414617499999877"],[1490952041.446,"0.012559693849205949"],[1490952101.446,"0.012135384801587835"],[1490952161.446,"0.01195310698412663"],[1490952221.446,"0.011996730515873409"],[1490952281.446,"0.012245181626984071"],[1490952341.446,"0.01172794166666644"],[1490952401.446,"0.012153839325397124"],[1490952461.446,"0.01287662682539674"],[1490952521.446,"0.011412833611110576"],[1490952581.446,"0.0115385753968256"],[1490952641.446,"0.011953797142857927"],[1490952701.446,"0.012210606230158325"],[1490952761.446,"0.012193429836568915"],[1490952821.446,"0.01175164000191546"],[1490952881.446,"0.011686968928571266"],[1490952941.446,"0.01204885615079335"],[1490953001.446,"0.010858237182540066"],[1490953061.446,"0.012570554523809901"],[1490953121.446,"0.011606933412697877"],[1490953181.446,"0.011895175039682713"],[1490953241.446,"0.011877423888888992"],[1490953301.446,"0.01134354857142876"],[1490953361.446,"0.011999752857142089"],[1490953421.446,"0.011927079960317739"],[1490953481.446,"0.01172722273809559"],[1490953541.446,"0.0114388174999997"],[1490953601.446,"0.012584772738095138"],[1490953661.446,"0.011858990837323214"],[1490953721.446,"0.011489406427467985"],[1490953781.446,"0.011673106071428765"],[1490953841.446,"0.012389803452380168"],[1490953901.446,"0.010877735714285755"],[1490953961.446,"0.012098601984127518"],[1490954021.446,"0.011876002539682478"],[1490954081.446,"0.0119792138492057"],[1490954141.446,"0.01116768142857198"],[1490954201.446,"0.011819058452381173"],[1490954261.446,"0.011543723055555002"],[1490954321.446,"0.011877097777778114"],[1490954381.446,"0.011255818690476465"],[1490954441.446,"0.011544411269840424"],[1490954501.446,"0.011844739246031948"],[1490954561.446,"0.012498686626984624"],[1490954621.446,"0.011012790753967753"],[1490954681.446,"0.011763483769841236"],[1490954741.446,"0.011742064880952764"],[1490954801.446,"0.011329697023809454"],[1490954861.446,"0.011616721150793869"],[1490954921.446,"0.011935843650793056"],[1490954981.446,"0.012041806150794254"],[1490955041.446,"0.011776362817460298"],[1490955101.446,"0.011507964920634838"],[1490955161.446,"0.012249892380951723"],[1490955221.446,"0.011680689451964254"],[1490955281.446,"0.011966289381797203"],[1490955341.446,"0.011113054447726804"],[1490955401.446,"0.012155607703748966"],[1490955461.446,"0.011851554722222412"],[1490955521.446,"0.011899298531746077"],[1490955581.446,"0.01202313674603201"],[1490955641.446,"0.011739823253968055"],[1490955701.446,"0.011866135595237215"],[1490955761.446,"0.012171682563083994"],[1490955821.446,"0.01125473955952014"],[1490955881.446,"0.011791852817460289"],[1490955941.446,"0.011389896547619342"],[1490956001.446,"0.011801524404761971"],[1490956061.446,"0.011788201388888577"],[1490956121.446,"0.011472721388889214"],[1490956181.446,"0.012352298174603236"],[1490956241.446,"0.011831984404761721"],[1490956301.446,"0.0114478640476188"],[1490956361.446,"0.012315896944444986"],[1490956421.446,"0.01184387992063444"],[1490956481.446,"0.0108170579365078"],[1490956541.446,"0.012441825119047971"],[1490956601.446,"0.011650502579365023"],[1490956661.446,"0.011244622936507553"],[1490956721.446,"0.01138462460317496"],[1490956781.446,"0.012361013348424437"],[1490956841.446,"0.011687763677888905"],[1490956901.446,"0.011387440952381297"],[1490956961.446,"0.012246620039682158"],[1490957021.446,"0.010769535198412467"],[1490957081.446,"0.012311013690477024"],[1490957141.446,"0.011455958968253554"],[1490957201.446,"0.012126715198413286"],[1490957261.446,"0.011078292499999627"],[1490957321.446,"0.012041933253967746"],[1490957381.446,"0.01147051317460329"],[1490957441.446,"0.01173451460317538"],[1490957501.446,"0.011660740317459825"],[1490957561.446,"0.011851131269840753"],[1490957621.446,"0.012117949444444812"],[1490957681.446,"0.011214277301587397"],[1490957741.446,"0.011935565277777841"],[1490957801.446,"0.011180848809523986"],[1490957861.446,"0.011540955039682404"],[1490957921.446,"0.011678924523809829"],[1490957981.446,"0.01175049698412655"],[1490958041.446,"0.01179233821428546"],[1490958101.446,"0.011217207341269743"],[1490958161.446,"0.011623496111110998"],[1490958221.446,"0.011751017182540137"],[1490958281.446,"0.011548055515872839"],[1490958341.446,"0.01157145297619062"],[1490958401.446,"0.011809365079364814"],[1490958461.446,"0.011367088134920926"],[1490958521.446,"0.011220626785714515"],[1490958581.446,"0.012502413531745657"],[1490958641.446,"0.011674712222222085"],[1490958701.446,"0.010840117777778147"],[1490958761.446,"0.01169669242063464"],[1490958821.446,"0.01206404448412709"],[1490958881.446,"0.011476003253967956"],[1490958941.446,"0.011927363650794281"],[1490959001.446,"0.011834540039682623"],[1490959061.446,"0.011952310396106811"],[1490959121.446,"0.011641002569963536"],[1490959181.446,"0.011215335912698408"],[1490959241.446,"0.011801235515873079"],[1490959301.446,"0.012109150079365269"],[1490959361.446,"0.011696530238095701"],[1490959421.446,"0.01188721699431308"],[1490959481.446,"0.011013023946272025"],[1490959541.446,"0.011927455988174854"],[1490959601.446,"0.011773952156046168"],[1490959661.446,"0.011311449525742057"],[1490959721.446,"0.011926485873016056"],[1490959781.446,"0.012208613174603443"],[1490959841.446,"0.011077256706349554"],[1490959901.446,"0.012141572896825473"],[1490959961.446,"0.011884196547619123"],[1490960021.446,"0.01182910611111061"],[1490960081.446,"0.011089906190476237"],[1490960141.446,"0.011485851349206303"],[1490960201.446,"0.011621675079365073"],[1490960261.446,"0.011420984246031282"],[1490960321.446,"0.011702707664224543"],[1490960381.446,"0.011122996101531552"],[1490960441.446,"0.011923133293650747"],[1490960501.446,"0.012209551587301823"],[1490960561.446,"0.011541768293650705"],[1490960621.446,"0.01133343007936486"],[1490960681.446,"0.011718844880952742"],[1490960741.446,"0.01170618126984048"],[1490960801.446,"0.01158023575396868"],[1490960861.446,"0.012154581865079351"],[1490960921.446,"0.011287024246031918"],[1490960981.446,"0.012035483412697787"],[1490961041.446,"0.01206407186508005"],[1490961101.446,"0.011742228333332922"],[1490961161.446,"0.011460450952381294"],[1490961221.446,"0.011752177539682223"],[1490961281.446,"0.012416623373015778"],[1490961341.446,"0.01134374146825419"],[1490961401.446,"0.011742214642857577"],[1490961461.446,"0.01157076337301528"],[1490961521.446,"0.011251291190475883"],[1490961581.446,"0.010835279404761772"],[1490961641.446,"0.012082314722223412"],[1490961701.446,"0.011244282817460054"],[1490961761.446,"0.012600352738094536"],[1490961821.446,"0.011595374841270692"],[1490961881.446,"0.012047435158729298"],[1490961941.446,"0.012117879285714984"],[1490962001.446,"0.011105805912698236"],[1490962061.446,"0.011228379365079935"],[1490962121.446,"0.012051188888888457"],[1490962181.446,"0.011811605198411965"],[1490962241.446,"0.011438638690477312"],[1490962301.446,"0.011535638928571016"],[1490962361.446,"0.011846252277212543"],[1490962421.446,"0.011137096779830425"],[1490962481.446,"0.011301488807399701"],[1490962541.446,"0.011706436349206364"],[1490962601.446,"0.011607870952381014"],[1490962661.446,"0.01165941666666676"],[1490962721.446,"0.011457761706349363"],[1490962781.446,"0.012004376428571304"],[1490962841.446,"0.012380191230158676"],[1490962901.446,"0.011650816111111262"],[1490962961.446,"0.011339834484126858"],[1490963021.446,"0.011815001031746352"],[1490963081.446,"0.01215702742063424"],[1490963141.446,"0.011112767612387399"],[1490963201.446,"0.011991515394890143"],[1490963261.446,"0.011573327579365182"],[1490963321.446,"0.011559778809523533"],[1490963381.446,"0.012400119444444207"],[1490963441.446,"0.011127036507936056"],[1490963501.446,"0.012095518055556944"],[1490963561.446,"0.011203742460316668"],[1490963621.446,"0.012493672738095584"],[1490963681.446,"0.012086427023809085"],[1490963741.446,"0.01073350408730215"],[1490963801.446,"0.011784052619047683"],[1490963861.446,"0.011817165277777068"],[1490963921.446,"0.01162805619047661"],[1490963981.446,"0.01141054027777739"],[1490964041.446,"0.012398790952381392"],[1490964101.446,"0.011081906428571691"],[1490964161.446,"0.012049610714285322"],[1490964221.446,"0.011764468492063805"]]}],"cpu_current":[{"metric":{},"value":[1490964221.765,"0.011764468492063801"]}]},"last_update":"2017-03-31T12:43:41.618Z"} diff --git a/spec/fixtures/trace/ansi-sequence-and-unicode b/spec/fixtures/trace/ansi-sequence-and-unicode new file mode 100644 index 00000000000..5d2466f0d0f --- /dev/null +++ b/spec/fixtures/trace/ansi-sequence-and-unicode @@ -0,0 +1,5 @@ +[0m[01;34m.[0m +[30;42m..[0m +😺 +ヾ(´༎ຶД༎ຶ`)ノ +[01;32m許功蓋[0m diff --git a/spec/helpers/application_helper_spec.rb b/spec/helpers/application_helper_spec.rb index 5c07ea8a872..785fb724132 100644 --- a/spec/helpers/application_helper_spec.rb +++ b/spec/helpers/application_helper_spec.rb @@ -3,6 +3,8 @@ require 'spec_helper' describe ApplicationHelper do include UploadHelpers + let(:gitlab_host) { "http://#{Gitlab.config.gitlab.host}" } + describe 'current_controller?' do it 'returns true when controller matches argument' do stub_controller_name('foo') @@ -56,8 +58,14 @@ describe ApplicationHelper do describe 'project_icon' do it 'returns an url for the avatar' do project = create(:empty_project, avatar: File.open(uploaded_image_temp_path)) + avatar_url = "/uploads/project/avatar/#{project.id}/banana_sample.gif" + + expect(helper.project_icon(project.full_path).to_s). + to eq "<img src=\"#{avatar_url}\" alt=\"Banana sample\" />" + + allow(ActionController::Base).to receive(:asset_host).and_return(gitlab_host) + avatar_url = "#{gitlab_host}/uploads/project/avatar/#{project.id}/banana_sample.gif" - avatar_url = "http://#{Gitlab.config.gitlab.host}/uploads/project/avatar/#{project.id}/banana_sample.gif" expect(helper.project_icon(project.full_path).to_s). to eq "<img src=\"#{avatar_url}\" alt=\"Banana sample\" />" end @@ -67,9 +75,8 @@ describe ApplicationHelper do allow_any_instance_of(Project).to receive(:avatar_in_git).and_return(true) - avatar_url = "http://#{Gitlab.config.gitlab.host}#{namespace_project_avatar_path(project.namespace, project)}" - expect(helper.project_icon(project.full_path).to_s).to match( - image_tag(avatar_url)) + avatar_url = "#{gitlab_host}#{namespace_project_avatar_path(project.namespace, project)}" + expect(helper.project_icon(project.full_path).to_s).to match(image_tag(avatar_url)) end end @@ -77,8 +84,14 @@ describe ApplicationHelper do it 'returns an url for the avatar' do user = create(:user, avatar: File.open(uploaded_image_temp_path)) - expect(helper.avatar_icon(user.email).to_s). - to match("/uploads/user/avatar/#{user.id}/banana_sample.gif") + avatar_url = "/uploads/user/avatar/#{user.id}/banana_sample.gif" + + expect(helper.avatar_icon(user.email).to_s).to match(avatar_url) + + allow(ActionController::Base).to receive(:asset_host).and_return(gitlab_host) + avatar_url = "#{gitlab_host}/uploads/user/avatar/#{user.id}/banana_sample.gif" + + expect(helper.avatar_icon(user.email).to_s).to match(avatar_url) end it 'returns an url for the avatar with relative url' do @@ -239,33 +252,6 @@ describe ApplicationHelper do end end - describe 'render_markup' do - let(:content) { 'Noël' } - let(:user) { create(:user) } - before do - allow(helper).to receive(:current_user).and_return(user) - end - - it 'preserves encoding' do - expect(content.encoding.name).to eq('UTF-8') - expect(helper.render_markup('foo.rst', content).encoding.name).to eq('UTF-8') - end - - it "delegates to #markdown when file name corresponds to Markdown" do - expect(helper).to receive(:gitlab_markdown?).with('foo.md').and_return(true) - expect(helper).to receive(:markdown).and_return('NOEL') - - expect(helper.render_markup('foo.md', content)).to eq('NOEL') - end - - it "delegates to #asciidoc when file name corresponds to AsciiDoc" do - expect(helper).to receive(:asciidoc?).with('foo.adoc').and_return(true) - expect(helper).to receive(:asciidoc).and_return('NOEL') - - expect(helper.render_markup('foo.adoc', content)).to eq('NOEL') - end - end - describe '#active_when' do it { expect(helper.active_when(true)).to eq('active') } it { expect(helper.active_when(false)).to eq(nil) } diff --git a/spec/helpers/auth_helper_spec.rb b/spec/helpers/auth_helper_spec.rb index cd3281d6f51..a0e1265efff 100644 --- a/spec/helpers/auth_helper_spec.rb +++ b/spec/helpers/auth_helper_spec.rb @@ -62,4 +62,18 @@ describe AuthHelper do end end end + + describe 'unlink_allowed?' do + [:saml, :cas3].each do |provider| + it "returns true if the provider is #{provider}" do + expect(helper.unlink_allowed?(provider)).to be false + end + end + + [:twitter, :facebook, :google_oauth2, :gitlab, :github, :bitbucket, :crowd, :auth0].each do |provider| + it "returns false if the provider is #{provider}" do + expect(helper.unlink_allowed?(provider)).to be true + end + end + end end diff --git a/spec/helpers/avatars_helper_spec.rb b/spec/helpers/avatars_helper_spec.rb index 581726c1d0e..6157abfe339 100644 --- a/spec/helpers/avatars_helper_spec.rb +++ b/spec/helpers/avatars_helper_spec.rb @@ -15,7 +15,7 @@ describe AvatarsHelper do end it "contains the user's avatar image" do - is_expected.to include(CGI.escapeHTML(user.avatar_url(16))) + is_expected.to include(CGI.escapeHTML(user.avatar_url(size: 16))) end end end diff --git a/spec/helpers/award_emoji_helper_spec.rb b/spec/helpers/award_emoji_helper_spec.rb new file mode 100644 index 00000000000..7dfd6a3f6b4 --- /dev/null +++ b/spec/helpers/award_emoji_helper_spec.rb @@ -0,0 +1,61 @@ +require 'spec_helper' + +describe AwardEmojiHelper do + describe '.toggle_award_url' do + context 'note on personal snippet' do + let(:note) { create(:note_on_personal_snippet) } + + it 'returns correct url' do + expected_url = "/snippets/#{note.noteable.id}/notes/#{note.id}/toggle_award_emoji" + + expect(helper.toggle_award_url(note)).to eq(expected_url) + end + end + + context 'note on project item' do + let(:note) { create(:note_on_project_snippet) } + + it 'returns correct url' do + @project = note.noteable.project + + expected_url = "/#{@project.namespace.path}/#{@project.path}/notes/#{note.id}/toggle_award_emoji" + + expect(helper.toggle_award_url(note)).to eq(expected_url) + end + end + + context 'personal snippet' do + let(:snippet) { create(:personal_snippet) } + + it 'returns correct url' do + expected_url = "/snippets/#{snippet.id}/toggle_award_emoji" + + expect(helper.toggle_award_url(snippet)).to eq(expected_url) + end + end + + context 'merge request' do + let(:merge_request) { create(:merge_request) } + + it 'returns correct url' do + @project = merge_request.project + + expected_url = "/#{@project.namespace.path}/#{@project.path}/merge_requests/#{merge_request.id}/toggle_award_emoji" + + expect(helper.toggle_award_url(merge_request)).to eq(expected_url) + end + end + + context 'issue' do + let(:issue) { create(:issue) } + + it 'returns correct url' do + @project = issue.project + + expected_url = "/#{@project.namespace.path}/#{@project.path}/issues/#{issue.id}/toggle_award_emoji" + + expect(helper.toggle_award_url(issue)).to eq(expected_url) + end + end + end +end diff --git a/spec/helpers/blob_helper_spec.rb b/spec/helpers/blob_helper_spec.rb index bead7948486..41b5df12522 100644 --- a/spec/helpers/blob_helper_spec.rb +++ b/spec/helpers/blob_helper_spec.rb @@ -56,15 +56,14 @@ describe BlobHelper do end end - describe "#sanitize_svg" do + describe "#sanitize_svg_data" do let(:input_svg_path) { File.join(Rails.root, 'spec', 'fixtures', 'unsanitized.svg') } let(:data) { open(input_svg_path).read } let(:expected_svg_path) { File.join(Rails.root, 'spec', 'fixtures', 'sanitized.svg') } let(:expected) { open(expected_svg_path).read } it 'retains essential elements' do - blob = OpenStruct.new(data: data) - expect(sanitize_svg(blob).data).to eq(expected) + expect(sanitize_svg_data(data)).to eq(expected) end end @@ -73,7 +72,7 @@ describe BlobHelper do let(:project) { create(:project, :repository, namespace: namespace) } before do - allow(self).to receive(:current_user).and_return(double) + allow(self).to receive(:current_user).and_return(nil) allow(self).to receive(:can_collaborate_with_project?).and_return(true) end @@ -105,4 +104,137 @@ describe BlobHelper do expect(Capybara.string(link).find_link('Edit')[:href]).to eq('/gitlab/gitlabhq/edit/master/README.md?mr_id=10') end end + + context 'viewer related' do + include FakeBlobHelpers + + let(:project) { build(:empty_project, lfs_enabled: true) } + + before do + allow(Gitlab.config.lfs).to receive(:enabled).and_return(true) + end + + let(:viewer_class) do + Class.new(BlobViewer::Base) do + include BlobViewer::ServerSide + + self.overridable_max_size = 1.megabyte + self.max_size = 5.megabytes + self.type = :rich + end + end + + let(:viewer) { viewer_class.new(blob) } + let(:blob) { fake_blob } + + describe '#blob_render_error_reason' do + context 'for error :too_large' do + context 'when the blob size is larger than the absolute max size' do + let(:blob) { fake_blob(size: 10.megabytes) } + + it 'returns an error message' do + expect(helper.blob_render_error_reason(viewer)).to eq('it is larger than 5 MB') + end + end + + context 'when the blob size is larger than the max size' do + let(:blob) { fake_blob(size: 2.megabytes) } + + it 'returns an error message' do + expect(helper.blob_render_error_reason(viewer)).to eq('it is larger than 1 MB') + end + end + end + + context 'for error :server_side_but_stored_externally' do + let(:blob) { fake_blob(lfs: true) } + + it 'returns an error message' do + expect(helper.blob_render_error_reason(viewer)).to eq('it is stored in LFS') + end + end + end + + describe '#blob_render_error_options' do + before do + assign(:project, project) + assign(:blob, blob) + assign(:id, File.join('master', blob.path)) + + controller.params[:controller] = 'projects/blob' + controller.params[:action] = 'show' + controller.params[:namespace_id] = project.namespace.to_param + controller.params[:project_id] = project.to_param + controller.params[:id] = File.join('master', blob.path) + end + + context 'for error :too_large' do + context 'when the max size can be overridden' do + let(:blob) { fake_blob(size: 2.megabytes) } + + it 'includes a "load it anyway" link' do + expect(helper.blob_render_error_options(viewer)).to include(/load it anyway/) + end + end + + context 'when the max size cannot be overridden' do + let(:blob) { fake_blob(size: 10.megabytes) } + + it 'does not include a "load it anyway" link' do + expect(helper.blob_render_error_options(viewer)).not_to include(/load it anyway/) + end + end + + context 'when the viewer is rich' do + context 'the blob is rendered as text' do + let(:blob) { fake_blob(path: 'file.md', size: 2.megabytes) } + + it 'includes a "view the source" link' do + expect(helper.blob_render_error_options(viewer)).to include(/view the source/) + end + end + + context 'the blob is not rendered as text' do + let(:blob) { fake_blob(path: 'file.pdf', binary: true, size: 2.megabytes) } + + it 'does not include a "view the source" link' do + expect(helper.blob_render_error_options(viewer)).not_to include(/view the source/) + end + end + end + + context 'when the viewer is not rich' do + before do + viewer_class.type = :simple + end + + let(:blob) { fake_blob(path: 'file.md', size: 2.megabytes) } + + it 'does not include a "view the source" link' do + expect(helper.blob_render_error_options(viewer)).not_to include(/view the source/) + end + end + + it 'includes a "download it" link' do + expect(helper.blob_render_error_options(viewer)).to include(/download it/) + end + end + + context 'for error :server_side_but_stored_externally' do + let(:blob) { fake_blob(path: 'file.md', lfs: true) } + + it 'does not include a "load it anyway" link' do + expect(helper.blob_render_error_options(viewer)).not_to include(/load it anyway/) + end + + it 'does not include a "view the source" link' do + expect(helper.blob_render_error_options(viewer)).not_to include(/view the source/) + end + + it 'includes a "download it" link' do + expect(helper.blob_render_error_options(viewer)).to include(/download it/) + end + end + end + end end diff --git a/spec/helpers/ci_status_helper_spec.rb b/spec/helpers/ci_status_helper_spec.rb index 174cc84a97b..e6bb953e9d8 100644 --- a/spec/helpers/ci_status_helper_spec.rb +++ b/spec/helpers/ci_status_helper_spec.rb @@ -6,20 +6,54 @@ describe CiStatusHelper do let(:success_commit) { double("Ci::Pipeline", status: 'success') } let(:failed_commit) { double("Ci::Pipeline", status: 'failed') } - describe 'ci_icon_for_status' do + describe '#ci_icon_for_status' do it 'renders to correct svg on success' do - expect(helper).to receive(:render).with('shared/icons/icon_status_success.svg', anything) + expect(helper).to receive(:render) + .with('shared/icons/icon_status_success.svg', anything) + helper.ci_icon_for_status(success_commit.status) end + it 'renders the correct svg on failure' do - expect(helper).to receive(:render).with('shared/icons/icon_status_failed.svg', anything) + expect(helper).to receive(:render) + .with('shared/icons/icon_status_failed.svg', anything) + helper.ci_icon_for_status(failed_commit.status) end end + describe '#ci_text_for_status' do + context 'when status is manual' do + it 'changes the status to blocked' do + expect(helper.ci_text_for_status('manual')) + .to eq 'blocked' + end + end + + context 'when status is success' do + it 'changes the status to passed' do + expect(helper.ci_text_for_status('success')) + .to eq 'passed' + end + end + + context 'when status is something else' do + it 'returns status unchanged' do + expect(helper.ci_text_for_status('some-status')) + .to eq 'some-status' + end + end + end + describe "#pipeline_status_cache_key" do it "builds a cache key for pipeline status" do - pipeline_status = Ci::PipelineStatus.new(build(:project), sha: "123abc", status: "success") + pipeline_status = Gitlab::Cache::Ci::ProjectPipelineStatus.new( + build(:project), + pipeline_info: { + sha: "123abc", + status: "success" + } + ) expect(helper.pipeline_status_cache_key(pipeline_status)).to eq("pipeline-status/123abc-success") end end diff --git a/spec/helpers/diff_helper_spec.rb b/spec/helpers/diff_helper_spec.rb index eae097126ce..dd6566d25bb 100644 --- a/spec/helpers/diff_helper_spec.rb +++ b/spec/helpers/diff_helper_spec.rb @@ -122,9 +122,9 @@ describe DiffHelper do it "returns strings with marked inline diffs" do marked_old_line, marked_new_line = mark_inline_diffs(old_line, new_line) - expect(marked_old_line).to eq("abc <span class='idiff left right deletion'>'def'</span>") + expect(marked_old_line).to eq(%q{abc <span class="idiff left right deletion">'def'</span>}) expect(marked_old_line).to be_html_safe - expect(marked_new_line).to eq("abc <span class='idiff left right addition'>"def"</span>") + expect(marked_new_line).to eq(%q{abc <span class="idiff left right addition">"def"</span>}) expect(marked_new_line).to be_html_safe end end diff --git a/spec/helpers/events_helper_spec.rb b/spec/helpers/events_helper_spec.rb index 70443d27f33..c3bd0cb3542 100644 --- a/spec/helpers/events_helper_spec.rb +++ b/spec/helpers/events_helper_spec.rb @@ -2,8 +2,10 @@ require 'spec_helper' describe EventsHelper do describe '#event_note' do + let(:user) { build(:user) } + before do - allow(helper).to receive(:current_user).and_return(double) + allow(helper).to receive(:current_user).and_return(user) end it 'displays one line of plain text without alteration' do @@ -54,17 +56,32 @@ describe EventsHelper do it 'preserves code color scheme' do input = "```ruby\ndef test\n 'hello world'\nend\n```" - expected = '<pre class="code highlight js-syntax-highlight ruby">' \ + expected = "\n<pre class=\"code highlight js-syntax-highlight ruby\">" \ "<code><span class=\"line\"><span class=\"k\">def</span> <span class=\"nf\">test</span>...</span>\n" \ "</code></pre>" expect(helper.event_note(input)).to eq(expected) end - it 'preserves style attribute within a tag' do - input = '<span class="" style="background-color: #44ad8e; color: #FFFFFF;"></span>' - expected = '<p><span style="background-color: #44ad8e; color: #FFFFFF;"></span></p>' + context 'labels formatting' do + let(:input) { 'this should be ~label_1' } - expect(helper.event_note(input)).to eq(expected) + def format_event_note(project) + create(:label, title: 'label_1', project: project) + + helper.event_note(input, { project: project }) + end + + it 'preserves style attribute for a label that can be accessed by current_user' do + project = create(:empty_project, :public) + + expect(format_event_note(project)).to match(/span class=.*style=.*/) + end + + it 'does not style a label that can not be accessed by current_user' do + project = create(:empty_project, :private) + + expect(format_event_note(project)).to eq("<p>#{input}</p>") + end end end diff --git a/spec/helpers/icons_helper_spec.rb b/spec/helpers/icons_helper_spec.rb index c052981fe73..91c8faea7fd 100644 --- a/spec/helpers/icons_helper_spec.rb +++ b/spec/helpers/icons_helper_spec.rb @@ -1,6 +1,21 @@ require 'spec_helper' describe IconsHelper do + describe 'icon' do + it 'returns aria-hidden by default' do + star = icon('star') + + expect(star['aria-hidden']).to eq 'aria-hidden' + end + + it 'does not return aria-hidden if aria-label is set' do + up = icon('up', 'aria-label' => 'up') + + expect(up['aria-hidden']).to be_nil + expect(up['aria-label']).to eq 'aria-label' + end + end + describe 'file_type_icon_class' do it 'returns folder class' do expect(file_type_icon_class('folder', 0, 'folder_name')).to eq 'folder' diff --git a/spec/helpers/issuables_helper_spec.rb b/spec/helpers/issuables_helper_spec.rb index 93bb711f29a..c1ecb46aece 100644 --- a/spec/helpers/issuables_helper_spec.rb +++ b/spec/helpers/issuables_helper_spec.rb @@ -4,6 +4,23 @@ describe IssuablesHelper do let(:label) { build_stubbed(:label) } let(:label2) { build_stubbed(:label) } + describe '#users_dropdown_label' do + let(:user) { build_stubbed(:user) } + let(:user2) { build_stubbed(:user) } + + it 'returns unassigned' do + expect(users_dropdown_label([])).to eq('Unassigned') + end + + it 'returns selected user\'s name' do + expect(users_dropdown_label([user])).to eq(user.name) + end + + it 'returns selected user\'s name and counter' do + expect(users_dropdown_label([user, user2])).to eq("#{user.name} + 1 more") + end + end + describe '#issuable_labels_tooltip' do it 'returns label text' do expect(issuable_labels_tooltip([label])).to eq(label.title) diff --git a/spec/helpers/issues_helper_spec.rb b/spec/helpers/issues_helper_spec.rb index f0554cc068d..540cb0ab1e0 100644 --- a/spec/helpers/issues_helper_spec.rb +++ b/spec/helpers/issues_helper_spec.rb @@ -150,7 +150,7 @@ describe IssuesHelper do describe "when passing a discussion" do let(:diff_note) { create(:diff_note_on_merge_request) } let(:merge_request) { diff_note.noteable } - let(:discussion) { Discussion.new([diff_note]) } + let(:discussion) { diff_note.to_discussion } it "links to the merge request with first note if a single discussion was passed" do expected_path = Gitlab::UrlBuilder.build(diff_note) diff --git a/spec/helpers/gitlab_markdown_helper_spec.rb b/spec/helpers/markup_helper_spec.rb index 6cf3f86680a..2a0de0b0656 100644 --- a/spec/helpers/gitlab_markdown_helper_spec.rb +++ b/spec/helpers/markup_helper_spec.rb @@ -1,8 +1,6 @@ require 'spec_helper' -describe GitlabMarkdownHelper do - include ApplicationHelper - +describe MarkupHelper do let!(:project) { create(:project, :repository) } let(:user) { create(:user, username: 'gfm') } @@ -111,9 +109,9 @@ describe GitlabMarkdownHelper do end it 'replaces commit message with emoji to link' do - actual = link_to_gfm(':book:Book', '/foo') + actual = link_to_gfm(':book: Book', '/foo') expect(actual). - to eq '<gl-emoji data-name="book" data-unicode-version="6.0">📖</gl-emoji><a href="/foo">Book</a>' + to eq '<gl-emoji title="open book" data-name="book" data-unicode-version="6.0">📖</gl-emoji><a href="/foo"> Book</a>' end end @@ -128,7 +126,7 @@ describe GitlabMarkdownHelper do it "uses Wiki pipeline for markdown files" do allow(@wiki).to receive(:format).and_return(:markdown) - expect(helper).to receive(:markdown).with('wiki content', pipeline: :wiki, project_wiki: @wiki, page_slug: "nested/page") + expect(helper).to receive(:markdown_unsafe).with('wiki content', pipeline: :wiki, project: project, project_wiki: @wiki, page_slug: "nested/page") helper.render_wiki_content(@wiki) end @@ -136,7 +134,7 @@ describe GitlabMarkdownHelper do it "uses Asciidoctor for asciidoc files" do allow(@wiki).to receive(:format).and_return(:asciidoc) - expect(helper).to receive(:asciidoc).with('wiki content') + expect(helper).to receive(:asciidoc_unsafe).with('wiki content') helper.render_wiki_content(@wiki) end @@ -151,6 +149,29 @@ describe GitlabMarkdownHelper do end end + describe 'markup' do + let(:content) { 'Noël' } + + it 'preserves encoding' do + expect(content.encoding.name).to eq('UTF-8') + expect(helper.markup('foo.rst', content).encoding.name).to eq('UTF-8') + end + + it "delegates to #markdown_unsafe when file name corresponds to Markdown" do + expect(helper).to receive(:gitlab_markdown?).with('foo.md').and_return(true) + expect(helper).to receive(:markdown_unsafe).and_return('NOEL') + + expect(helper.markup('foo.md', content)).to eq('NOEL') + end + + it "delegates to #asciidoc_unsafe when file name corresponds to AsciiDoc" do + expect(helper).to receive(:asciidoc?).with('foo.adoc').and_return(true) + expect(helper).to receive(:asciidoc_unsafe).and_return('NOEL') + + expect(helper.markup('foo.adoc', content)).to eq('NOEL') + end + end + describe '#first_line_in_markdown' do it 'truncates Markdown properly' do text = "@#{user.username}, can you look at this?\nHello world\n" diff --git a/spec/helpers/merge_requests_helper_spec.rb b/spec/helpers/merge_requests_helper_spec.rb index 25f23826648..f2c9d927388 100644 --- a/spec/helpers/merge_requests_helper_spec.rb +++ b/spec/helpers/merge_requests_helper_spec.rb @@ -21,28 +21,6 @@ describe MergeRequestsHelper do end end - describe '#issues_sentence' do - subject { issues_sentence(issues) } - let(:issues) do - [build(:issue, iid: 1), build(:issue, iid: 2), build(:issue, iid: 3)] - end - - it { is_expected.to eq('#1, #2, and #3') } - - context 'for JIRA issues' do - let(:project) { create(:empty_project) } - let(:issues) do - [ - ExternalIssue.new('JIRA-123', project), - ExternalIssue.new('JIRA-456', project), - ExternalIssue.new('FOOBAR-7890', project) - ] - end - - it { is_expected.to eq('FOOBAR-7890, JIRA-123, and JIRA-456') } - end - end - describe '#format_mr_branch_names' do describe 'within the same project' do let(:merge_request) { create(:merge_request) } @@ -62,103 +40,4 @@ describe MergeRequestsHelper do it { is_expected.to eq([source_title, target_title]) } end end - - describe '#mr_widget_refresh_url' do - let(:guest) { create(:user) } - let(:project) { create(:project, :public) } - let(:project_fork) { Projects::ForkService.new(project, guest).execute } - let(:merge_request) { create(:merge_request, source_project: project_fork, target_project: project) } - - it 'returns correct url for MR' do - expected_url = "#{project.path_with_namespace}/merge_requests/#{merge_request.iid}/merge_widget_refresh" - - expect(mr_widget_refresh_url(merge_request)).to end_with(expected_url) - end - - it 'returns empty string for nil' do - expect(mr_widget_refresh_url(nil)).to eq('') - end - end - - describe '#mr_closes_issues' do - let(:user_1) { create(:user) } - let(:user_2) { create(:user) } - - let(:project_1) { create(:project, :private, creator: user_1, namespace: user_1.namespace) } - let(:project_2) { create(:project, :private, creator: user_2, namespace: user_2.namespace) } - - let(:issue_1) { create(:issue, project: project_1) } - let(:issue_2) { create(:issue, project: project_2) } - - let(:merge_request) { create(:merge_request, source_project: project_1, target_project: project_1,) } - - let(:merge_request) do - create(:merge_request, - source_project: project_1, target_project: project_1, - description: "Fixes #{issue_1.to_reference} Fixes #{issue_2.to_reference(project_1)}") - end - - before do - project_1.team << [user_2, :developer] - project_2.team << [user_2, :developer] - allow(merge_request.project).to receive(:default_branch).and_return(merge_request.target_branch) - @merge_request = merge_request - end - - context 'user without access to another private project' do - let(:current_user) { user_1 } - - it 'cannot see that project\'s issue that will be closed on acceptance' do - expect(mr_closes_issues).to contain_exactly(issue_1) - end - end - - context 'user with access to another private project' do - let(:current_user) { user_2 } - - it 'can see that project\'s issue that will be closed on acceptance' do - expect(mr_closes_issues).to contain_exactly(issue_1, issue_2) - end - end - end - - describe '#mr_issues_mentioned_but_not_closing' do - let(:user_1) { create(:user) } - let(:user_2) { create(:user) } - - let(:project_1) { create(:project, :private, creator: user_1, namespace: user_1.namespace) } - let(:project_2) { create(:project, :private, creator: user_2, namespace: user_2.namespace) } - - let(:issue_1) { create(:issue, project: project_1) } - let(:issue_2) { create(:issue, project: project_2) } - - let(:merge_request) do - create(:merge_request, - source_project: project_1, target_project: project_1, - description: "#{issue_1.to_reference} #{issue_2.to_reference(project_1)}") - end - - before do - project_1.team << [user_2, :developer] - project_2.team << [user_2, :developer] - allow(merge_request.project).to receive(:default_branch).and_return(merge_request.target_branch) - @merge_request = merge_request - end - - context 'user without access to another private project' do - let(:current_user) { user_1 } - - it 'cannot see that project\'s issue that will be closed on acceptance' do - expect(mr_issues_mentioned_but_not_closing).to contain_exactly(issue_1) - end - end - - context 'user with access to another private project' do - let(:current_user) { user_2 } - - it 'can see that project\'s issue that will be closed on acceptance' do - expect(mr_issues_mentioned_but_not_closing).to contain_exactly(issue_1, issue_2) - end - end - end end diff --git a/spec/helpers/notes_helper_spec.rb b/spec/helpers/notes_helper_spec.rb index 9c577501f00..099146678ae 100644 --- a/spec/helpers/notes_helper_spec.rb +++ b/spec/helpers/notes_helper_spec.rb @@ -1,6 +1,8 @@ require "spec_helper" describe NotesHelper do + include RepoHelpers + let(:owner) { create(:owner) } let(:group) { create(:group) } let(:project) { create(:empty_project, namespace: group) } @@ -37,20 +39,215 @@ describe NotesHelper do end end - describe '#preload_max_access_for_authors' do - before do - # This method reads cache from RequestStore, so make sure it's clean. - RequestStore.clear! + describe '#discussion_path' do + let(:project) { create(:project) } + + context 'for a merge request discusion' do + let(:merge_request) { create(:merge_request, source_project: project, target_project: project, importing: true) } + let!(:merge_request_diff1) { merge_request.merge_request_diffs.create(head_commit_sha: '6f6d7e7ed97bb5f0054f2b1df789b39ca89b6ff9') } + let!(:merge_request_diff2) { merge_request.merge_request_diffs.create(head_commit_sha: nil) } + let!(:merge_request_diff3) { merge_request.merge_request_diffs.create(head_commit_sha: '5937ac0a7beb003549fc5fd26fc247adbce4a52e') } + + context 'for a diff discussion' do + context 'when the discussion is active' do + let(:discussion) { create(:diff_note_on_merge_request, noteable: merge_request, project: project).to_discussion } + + it 'returns the diff path with the line code' do + expect(helper.discussion_path(discussion)).to eq(diffs_namespace_project_merge_request_path(project.namespace, project, merge_request, anchor: discussion.line_code)) + end + end + + context 'when the discussion is on an older merge request version' do + let(:position) do + Gitlab::Diff::Position.new( + old_path: ".gitmodules", + new_path: ".gitmodules", + old_line: nil, + new_line: 4, + diff_refs: merge_request_diff1.diff_refs + ) + end + + let(:diff_note) { create(:diff_note_on_merge_request, noteable: merge_request, project: project, position: position) } + let(:discussion) { diff_note.to_discussion } + + before do + diff_note.position = diff_note.original_position + diff_note.save! + end + + it 'returns the diff version path with the line code' do + expect(helper.discussion_path(discussion)).to eq(diffs_namespace_project_merge_request_path(project.namespace, project, merge_request, diff_id: merge_request_diff1, anchor: discussion.line_code)) + end + end + + context 'when the discussion is on a comparison between merge request versions' do + let(:position) do + Gitlab::Diff::Position.new( + old_path: ".gitmodules", + new_path: ".gitmodules", + old_line: 4, + new_line: 4, + diff_refs: merge_request_diff3.compare_with(merge_request_diff1.head_commit_sha).diff_refs + ) + end + + let(:discussion) { create(:diff_note_on_merge_request, noteable: merge_request, project: project, position: position).to_discussion } + + it 'returns the diff version comparison path with the line code' do + expect(helper.discussion_path(discussion)).to eq(diffs_namespace_project_merge_request_path(project.namespace, project, merge_request, diff_id: merge_request_diff3, start_sha: merge_request_diff1.head_commit_sha, anchor: discussion.line_code)) + end + end + + context 'when the discussion does not have a merge request version' do + let(:outdated_diff_note) { create(:diff_note_on_merge_request, noteable: merge_request, project: project, diff_refs: project.commit(sample_commit.id).diff_refs) } + let(:discussion) { outdated_diff_note.to_discussion } + + before do + outdated_diff_note.position = outdated_diff_note.original_position + outdated_diff_note.save! + end + + it 'returns nil' do + expect(helper.discussion_path(discussion)).to be_nil + end + end + end + + context 'for a legacy diff discussion' do + let(:discussion) { create(:legacy_diff_note_on_merge_request, noteable: merge_request, project: project).to_discussion } + + context 'when the discussion is active' do + before do + allow(discussion).to receive(:active?).and_return(true) + end + + it 'returns the diff path with the line code' do + expect(helper.discussion_path(discussion)).to eq(diffs_namespace_project_merge_request_path(project.namespace, project, merge_request, anchor: discussion.line_code)) + end + end + + context 'when the discussion is outdated' do + before do + allow(discussion).to receive(:active?).and_return(false) + end + + it 'returns nil' do + expect(helper.discussion_path(discussion)).to be_nil + end + end + end + + context 'for a non-diff discussion' do + let(:discussion) { create(:discussion_note_on_merge_request, noteable: merge_request, project: project).to_discussion } + + it 'returns nil' do + expect(helper.discussion_path(discussion)).to be_nil + end + end + end + + context 'for a commit discussion' do + let(:commit) { discussion.noteable } + + context 'for a diff discussion' do + let(:discussion) { create(:diff_note_on_commit, project: project).to_discussion } + + it 'returns the commit path with the line code' do + expect(helper.discussion_path(discussion)).to eq(namespace_project_commit_path(project.namespace, project, commit, anchor: discussion.line_code)) + end + end + + context 'for a legacy diff discussion' do + let(:discussion) { create(:legacy_diff_note_on_commit, project: project).to_discussion } + + it 'returns the commit path with the line code' do + expect(helper.discussion_path(discussion)).to eq(namespace_project_commit_path(project.namespace, project, commit, anchor: discussion.line_code)) + end + end + + context 'for a non-diff discussion' do + let(:discussion) { create(:discussion_note_on_commit, project: project).to_discussion } + + it 'returns the commit path' do + expect(helper.discussion_path(discussion)).to eq(namespace_project_commit_path(project.namespace, project, commit)) + end + end + end + end + + describe '#notes_url' do + it 'return snippet notes path for personal snippet' do + @snippet = create(:personal_snippet) + + expect(helper.notes_url).to eq("/snippets/#{@snippet.id}/notes") + end + + it 'return project notes path for project snippet' do + namespace = create(:namespace, path: 'nm') + @project = create(:empty_project, path: 'test', namespace: namespace) + @snippet = create(:project_snippet, project: @project) + @noteable = @snippet + + expect(helper.notes_url).to eq("/nm/test/noteable/project_snippet/#{@noteable.id}/notes") + end + + it 'return project notes path for other noteables' do + namespace = create(:namespace, path: 'nm') + @project = create(:empty_project, path: 'test', namespace: namespace) + @noteable = create(:issue, project: @project) + + expect(helper.notes_url).to eq("/nm/test/noteable/issue/#{@noteable.id}/notes") + end + end + + describe '#note_url' do + it 'return snippet notes path for personal snippet' do + note = create(:note_on_personal_snippet) + + expect(helper.note_url(note)).to eq("/snippets/#{note.noteable.id}/notes/#{note.id}") + end + + it 'return project notes path for project snippet' do + namespace = create(:namespace, path: 'nm') + @project = create(:empty_project, path: 'test', namespace: namespace) + note = create(:note_on_project_snippet, project: @project) + + expect(helper.note_url(note)).to eq("/nm/test/notes/#{note.id}") + end + + it 'return project notes path for other noteables' do + namespace = create(:namespace, path: 'nm') + @project = create(:empty_project, path: 'test', namespace: namespace) + note = create(:note_on_issue, project: @project) + + expect(helper.note_url(note)).to eq("/nm/test/notes/#{note.id}") + end + end + + describe '#form_resurces' do + it 'returns note for personal snippet' do + @snippet = create(:personal_snippet) + @note = create(:note_on_personal_snippet) + + expect(helper.form_resources).to eq([@note]) + end + + it 'returns namespace, project and note for project snippet' do + namespace = create(:namespace, path: 'nm') + @project = create(:empty_project, path: 'test', namespace: namespace) + @snippet = create(:project_snippet, project: @project) + @note = create(:note_on_personal_snippet) + + expect(helper.form_resources).to eq([@project.namespace, @project, @note]) end - it 'loads multiple users' do - expected_access = { - owner.id => Gitlab::Access::OWNER, - master.id => Gitlab::Access::MASTER, - reporter.id => Gitlab::Access::REPORTER - } + it 'returns namespace, project and note path for other noteables' do + namespace = create(:namespace, path: 'nm') + @project = create(:empty_project, path: 'test', namespace: namespace) + @note = create(:note_on_issue, project: @project) - expect(helper.preload_max_access_for_authors(notes, project)).to eq(expected_access) + expect(helper.form_resources).to eq([@project.namespace, @project, @note]) end end end diff --git a/spec/helpers/preferences_helper_spec.rb b/spec/helpers/preferences_helper_spec.rb index f3e79cc7290..2c0e9975f73 100644 --- a/spec/helpers/preferences_helper_spec.rb +++ b/spec/helpers/preferences_helper_spec.rb @@ -86,10 +86,10 @@ describe PreferencesHelper do context 'when repository is not empty' do let(:project) { create(:project, :public, :repository) } - it 'returns readme if user has repository access' do + it 'returns files and readme if user has repository access' do allow(helper).to receive(:can?).with(nil, :download_code, project).and_return(true) - expect(helper.default_project_view).to eq('readme') + expect(helper.default_project_view).to eq('files') end it 'returns activity if user does not have repository access' do diff --git a/spec/helpers/projects_helper_spec.rb b/spec/helpers/projects_helper_spec.rb index fc6ad6419ac..54c5ba57bdf 100644 --- a/spec/helpers/projects_helper_spec.rb +++ b/spec/helpers/projects_helper_spec.rb @@ -63,11 +63,11 @@ describe ProjectsHelper do end end - describe "#project_list_cache_key" do + describe "#project_list_cache_key", redis: true do let(:project) { create(:project) } - it "includes the namespace" do - expect(helper.project_list_cache_key(project)).to include(project.namespace.cache_key) + it "includes the route" do + expect(helper.project_list_cache_key(project)).to include(project.route.cache_key) end it "includes the project" do @@ -93,7 +93,7 @@ describe ProjectsHelper do end it "includes a version" do - expect(helper.project_list_cache_key(project)).to include("v2.3") + expect(helper.project_list_cache_key(project).last).to start_with('v') end it "includes the pipeline status when there is a status" do @@ -103,6 +103,18 @@ describe ProjectsHelper do end end + describe '#load_pipeline_status' do + it 'loads the pipeline status in batch' do + project = build(:empty_project) + + helper.load_pipeline_status([project]) + # Skip lazy loading of the `pipeline_status` attribute + pipeline_status = project.instance_variable_get('@pipeline_status') + + expect(pipeline_status).to be_a(Gitlab::Cache::Ci::ProjectPipelineStatus) + end + end + describe 'link_to_member' do let(:group) { create(:group) } let(:project) { create(:empty_project, group: group) } @@ -167,6 +179,7 @@ describe ProjectsHelper do before do allow(project).to receive(:repository_storage_path).and_return('/base/repo/path') + allow(Settings.shared).to receive(:[]).with('path').and_return('/base/repo/export/path') end it 'removes the repo path' do @@ -175,6 +188,13 @@ describe ProjectsHelper do expect(sanitize_repo_path(project, import_error)).to eq('Could not clone [REPOS PATH]/namespace/test.git') end + + it 'removes the temporary repo path used for uploads/exports' do + repo = '/base/repo/export/path/tmp/project_exports/uploads/test.tar.gz' + import_error = "Unable to decompress #{repo}\n" + + expect(sanitize_repo_path(project, import_error)).to eq('Unable to decompress [REPO EXPORT PATH]/uploads/test.tar.gz') + end end describe '#last_push_event' do @@ -257,4 +277,27 @@ describe ProjectsHelper do end end end + + describe "#visibility_select_options" do + let(:project) { create(:project, :repository) } + let(:user) { create(:user) } + + before do + allow(helper).to receive(:current_user).and_return(user) + + stub_application_setting(restricted_visibility_levels: [Gitlab::VisibilityLevel::PUBLIC]) + end + + it "does not include the Public restricted level" do + expect(helper.send(:visibility_select_options, project, Gitlab::VisibilityLevel::PRIVATE)).not_to include('Public') + end + + it "includes the Internal level" do + expect(helper.send(:visibility_select_options, project, Gitlab::VisibilityLevel::PRIVATE)).to include('Internal') + end + + it "includes the Private level" do + expect(helper.send(:visibility_select_options, project, Gitlab::VisibilityLevel::PRIVATE)).to include('Private') + end + end end diff --git a/spec/helpers/submodule_helper_spec.rb b/spec/helpers/submodule_helper_spec.rb index 28b8def331d..18935be95c9 100644 --- a/spec/helpers/submodule_helper_spec.rb +++ b/spec/helpers/submodule_helper_spec.rb @@ -70,15 +70,30 @@ describe SubmoduleHelper do expect(submodule_links(submodule_item)).to eq(['https://github.com/gitlab-org/gitlab-ce', 'https://github.com/gitlab-org/gitlab-ce/tree/hash']) end - it 'returns original with non-standard url' do + it 'handles urls with no .git on the end' do stub_url('http://github.com/gitlab-org/gitlab-ce') - expect(submodule_links(submodule_item)).to eq([repo.submodule_url_for, nil]) + expect(submodule_links(submodule_item)).to eq(['https://github.com/gitlab-org/gitlab-ce', 'https://github.com/gitlab-org/gitlab-ce/tree/hash']) + end + it 'returns original with non-standard url' do stub_url('http://github.com/another/gitlab-org/gitlab-ce.git') expect(submodule_links(submodule_item)).to eq([repo.submodule_url_for, nil]) end end + context 'in-repository submodule' do + let(:group) { create(:group, name: "Master Project", path: "master-project") } + let(:project) { create(:empty_project, group: group) } + before do + self.instance_variable_set(:@project, project) + end + + it 'in-repository' do + stub_url('./') + expect(submodule_links(submodule_item)).to eq(["/master-project/#{project.path}", "/master-project/#{project.path}/tree/hash"]) + end + end + context 'submodule on gitlab.com' do it 'detects ssh' do stub_url('git@gitlab.com:gitlab-org/gitlab-ce.git') @@ -95,16 +110,30 @@ describe SubmoduleHelper do expect(submodule_links(submodule_item)).to eq(['https://gitlab.com/gitlab-org/gitlab-ce', 'https://gitlab.com/gitlab-org/gitlab-ce/tree/hash']) end - it 'returns original with non-standard url' do + it 'handles urls with no .git on the end' do stub_url('http://gitlab.com/gitlab-org/gitlab-ce') - expect(submodule_links(submodule_item)).to eq([repo.submodule_url_for, nil]) + expect(submodule_links(submodule_item)).to eq(['https://gitlab.com/gitlab-org/gitlab-ce', 'https://gitlab.com/gitlab-org/gitlab-ce/tree/hash']) + end + it 'returns original with non-standard url' do stub_url('http://gitlab.com/another/gitlab-org/gitlab-ce.git') expect(submodule_links(submodule_item)).to eq([repo.submodule_url_for, nil]) end end context 'submodule on unsupported' do + it 'sanitizes unsupported protocols' do + stub_url('javascript:alert("XSS");') + + expect(helper.submodule_links(submodule_item)).to eq([nil, nil]) + end + + it 'sanitizes unsupported protocols disguised as a repository URL' do + stub_url('javascript:alert("XSS");foo/bar.git') + + expect(helper.submodule_links(submodule_item)).to eq([nil, nil]) + end + it 'returns original' do stub_url('http://mygitserver.com/gitlab-org/gitlab-ce') expect(submodule_links(submodule_item)).to eq([repo.submodule_url_for, nil]) diff --git a/spec/initializers/trusted_proxies_spec.rb b/spec/initializers/trusted_proxies_spec.rb index ff8b8daa347..70a18f31744 100644 --- a/spec/initializers/trusted_proxies_spec.rb +++ b/spec/initializers/trusted_proxies_spec.rb @@ -56,7 +56,7 @@ describe 'trusted_proxies', lib: true do end def stub_request(headers = {}) - ActionDispatch::RemoteIp.new(Proc.new { }, false, Rails.application.config.action_dispatch.trusted_proxies).call(headers) + ActionDispatch::RemoteIp.new(proc { }, false, Rails.application.config.action_dispatch.trusted_proxies).call(headers) ActionDispatch::Request.new(headers) end diff --git a/spec/javascripts/abuse_reports_spec.js b/spec/javascripts/abuse_reports_spec.js index 76b370b345b..069d857eab6 100644 --- a/spec/javascripts/abuse_reports_spec.js +++ b/spec/javascripts/abuse_reports_spec.js @@ -1,5 +1,5 @@ -require('~/lib/utils/text_utility'); -require('~/abuse_reports'); +import '~/lib/utils/text_utility'; +import '~/abuse_reports'; ((global) => { describe('Abuse Reports', () => { diff --git a/spec/javascripts/activities_spec.js b/spec/javascripts/activities_spec.js index e6a6fc36ca1..e8c5f721423 100644 --- a/spec/javascripts/activities_spec.js +++ b/spec/javascripts/activities_spec.js @@ -1,8 +1,8 @@ /* eslint-disable no-unused-expressions, no-prototype-builtins, no-new, no-shadow, max-len */ -require('vendor/jquery.endless-scroll.js'); -require('~/pager'); -require('~/activities'); +import 'vendor/jquery.endless-scroll'; +import '~/pager'; +import '~/activities'; (() => { window.gon || (window.gon = {}); diff --git a/spec/javascripts/ajax_loading_spinner_spec.js b/spec/javascripts/ajax_loading_spinner_spec.js index a68bccb16f4..1518ae68b0d 100644 --- a/spec/javascripts/ajax_loading_spinner_spec.js +++ b/spec/javascripts/ajax_loading_spinner_spec.js @@ -1,7 +1,7 @@ -require('~/extensions/array'); -require('jquery'); -require('jquery-ujs'); -require('~/ajax_loading_spinner'); +import '~/extensions/array'; +import 'jquery'; +import 'jquery-ujs'; +import '~/ajax_loading_spinner'; describe('Ajax Loading Spinner', () => { const fixtureTemplate = 'static/ajax_loading_spinner.html.raw'; diff --git a/spec/javascripts/api_spec.js b/spec/javascripts/api_spec.js new file mode 100644 index 00000000000..867322ce8ae --- /dev/null +++ b/spec/javascripts/api_spec.js @@ -0,0 +1,281 @@ +import Api from '~/api'; + +describe('Api', () => { + const dummyApiVersion = 'v3000'; + const dummyUrlRoot = 'http://host.invalid'; + const dummyGon = { + api_version: dummyApiVersion, + relative_url_root: dummyUrlRoot, + }; + const dummyResponse = 'hello from outer space!'; + const sendDummyResponse = () => { + const deferred = $.Deferred(); + deferred.resolve(dummyResponse); + return deferred.promise(); + }; + let originalGon; + + beforeEach(() => { + originalGon = window.gon; + window.gon = dummyGon; + }); + + afterEach(() => { + window.gon = originalGon; + }); + + describe('buildUrl', () => { + it('adds URL root and fills in API version', () => { + const input = '/api/:version/foo/bar'; + const expectedOutput = `${dummyUrlRoot}/api/${dummyApiVersion}/foo/bar`; + + const builtUrl = Api.buildUrl(input); + + expect(builtUrl).toEqual(expectedOutput); + }); + }); + + describe('group', () => { + it('fetches a group', (done) => { + const groupId = '123456'; + const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/groups/${groupId}.json`; + spyOn(jQuery, 'ajax').and.callFake((request) => { + expect(request.url).toEqual(expectedUrl); + expect(request.dataType).toEqual('json'); + return sendDummyResponse(); + }); + + Api.group(groupId, (response) => { + expect(response).toBe(dummyResponse); + done(); + }); + }); + }); + + describe('groups', () => { + it('fetches groups', (done) => { + const query = 'dummy query'; + const options = { unused: 'option' }; + const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/groups.json`; + const expectedData = Object.assign({ + search: query, + per_page: 20, + }, options); + spyOn(jQuery, 'ajax').and.callFake((request) => { + expect(request.url).toEqual(expectedUrl); + expect(request.dataType).toEqual('json'); + expect(request.data).toEqual(expectedData); + return sendDummyResponse(); + }); + + Api.groups(query, options, (response) => { + expect(response).toBe(dummyResponse); + done(); + }); + }); + }); + + describe('namespaces', () => { + it('fetches namespaces', (done) => { + const query = 'dummy query'; + const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/namespaces.json`; + const expectedData = { + search: query, + per_page: 20, + }; + spyOn(jQuery, 'ajax').and.callFake((request) => { + expect(request.url).toEqual(expectedUrl); + expect(request.dataType).toEqual('json'); + expect(request.data).toEqual(expectedData); + return sendDummyResponse(); + }); + + Api.namespaces(query, (response) => { + expect(response).toBe(dummyResponse); + done(); + }); + }); + }); + + describe('projects', () => { + it('fetches projects', (done) => { + const query = 'dummy query'; + const options = { unused: 'option' }; + const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/projects.json?simple=true`; + const expectedData = Object.assign({ + search: query, + per_page: 20, + membership: true, + }, options); + spyOn(jQuery, 'ajax').and.callFake((request) => { + expect(request.url).toEqual(expectedUrl); + expect(request.dataType).toEqual('json'); + expect(request.data).toEqual(expectedData); + return sendDummyResponse(); + }); + + Api.projects(query, options, (response) => { + expect(response).toBe(dummyResponse); + done(); + }); + }); + }); + + describe('newLabel', () => { + it('creates a new label', (done) => { + const namespace = 'some namespace'; + const project = 'some project'; + const labelData = { some: 'data' }; + const expectedUrl = `${dummyUrlRoot}/${namespace}/${project}/labels`; + const expectedData = { + label: labelData, + }; + spyOn(jQuery, 'ajax').and.callFake((request) => { + expect(request.url).toEqual(expectedUrl); + expect(request.dataType).toEqual('json'); + expect(request.type).toEqual('POST'); + expect(request.data).toEqual(expectedData); + return sendDummyResponse(); + }); + + Api.newLabel(namespace, project, labelData, (response) => { + expect(response).toBe(dummyResponse); + done(); + }); + }); + }); + + describe('groupProjects', () => { + it('fetches group projects', (done) => { + const groupId = '123456'; + const query = 'dummy query'; + const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/groups/${groupId}/projects.json`; + const expectedData = { + search: query, + per_page: 20, + }; + spyOn(jQuery, 'ajax').and.callFake((request) => { + expect(request.url).toEqual(expectedUrl); + expect(request.dataType).toEqual('json'); + expect(request.data).toEqual(expectedData); + return sendDummyResponse(); + }); + + Api.groupProjects(groupId, query, (response) => { + expect(response).toBe(dummyResponse); + done(); + }); + }); + }); + + describe('licenseText', () => { + it('fetches a license text', (done) => { + const licenseKey = "driver's license"; + const data = { unused: 'option' }; + const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/templates/licenses/${licenseKey}`; + spyOn(jQuery, 'ajax').and.callFake((request) => { + expect(request.url).toEqual(expectedUrl); + expect(request.data).toEqual(data); + return sendDummyResponse(); + }); + + Api.licenseText(licenseKey, data, (response) => { + expect(response).toBe(dummyResponse); + done(); + }); + }); + }); + + describe('gitignoreText', () => { + it('fetches a gitignore text', (done) => { + const gitignoreKey = 'ignore git'; + const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/templates/gitignores/${gitignoreKey}`; + spyOn(jQuery, 'get').and.callFake((url, callback) => { + expect(url).toEqual(expectedUrl); + callback(dummyResponse); + }); + + Api.gitignoreText(gitignoreKey, (response) => { + expect(response).toBe(dummyResponse); + done(); + }); + }); + }); + + describe('gitlabCiYml', () => { + it('fetches a .gitlab-ci.yml', (done) => { + const gitlabCiYmlKey = 'Y CI ML'; + const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/templates/gitlab_ci_ymls/${gitlabCiYmlKey}`; + spyOn(jQuery, 'get').and.callFake((url, callback) => { + expect(url).toEqual(expectedUrl); + callback(dummyResponse); + }); + + Api.gitlabCiYml(gitlabCiYmlKey, (response) => { + expect(response).toBe(dummyResponse); + done(); + }); + }); + }); + + describe('dockerfileYml', () => { + it('fetches a Dockerfile', (done) => { + const dockerfileYmlKey = 'a giant whale'; + const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/templates/dockerfiles/${dockerfileYmlKey}`; + spyOn(jQuery, 'get').and.callFake((url, callback) => { + expect(url).toEqual(expectedUrl); + callback(dummyResponse); + }); + + Api.dockerfileYml(dockerfileYmlKey, (response) => { + expect(response).toBe(dummyResponse); + done(); + }); + }); + }); + + describe('issueTemplate', () => { + it('fetches an issue template', (done) => { + const namespace = 'some namespace'; + const project = 'some project'; + const templateKey = 'template key'; + const templateType = 'template type'; + const expectedUrl = `${dummyUrlRoot}/${namespace}/${project}/templates/${templateType}/${templateKey}`; + spyOn(jQuery, 'ajax').and.callFake((request) => { + expect(request.url).toEqual(expectedUrl); + return sendDummyResponse(); + }); + + Api.issueTemplate(namespace, project, templateKey, templateType, (error, response) => { + expect(error).toBe(null); + expect(response).toBe(dummyResponse); + done(); + }); + }); + }); + + describe('users', () => { + it('fetches users', (done) => { + const query = 'dummy query'; + const options = { unused: 'option' }; + const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/users.json`; + const expectedData = Object.assign({ + search: query, + per_page: 20, + }, options); + spyOn(jQuery, 'ajax').and.callFake((request) => { + expect(request.url).toEqual(expectedUrl); + expect(request.dataType).toEqual('json'); + expect(request.data).toEqual(expectedData); + return sendDummyResponse(); + }); + + Api.users(query, options) + .then((response) => { + expect(response).toBe(dummyResponse); + }) + .then(done) + .catch(done.fail); + }); + }); +}); diff --git a/spec/javascripts/autosave_spec.js b/spec/javascripts/autosave_spec.js new file mode 100644 index 00000000000..9f9acc392c2 --- /dev/null +++ b/spec/javascripts/autosave_spec.js @@ -0,0 +1,134 @@ +import Autosave from '~/autosave'; +import AccessorUtilities from '~/lib/utils/accessor'; + +describe('Autosave', () => { + let autosave; + + describe('class constructor', () => { + const key = 'key'; + const field = jasmine.createSpyObj('field', ['data', 'on']); + + beforeEach(() => { + spyOn(AccessorUtilities, 'isLocalStorageAccessSafe').and.returnValue(true); + spyOn(Autosave.prototype, 'restore'); + + autosave = new Autosave(field, key); + }); + + it('should set .isLocalStorageAvailable', () => { + expect(AccessorUtilities.isLocalStorageAccessSafe).toHaveBeenCalled(); + expect(autosave.isLocalStorageAvailable).toBe(true); + }); + }); + + describe('restore', () => { + const key = 'key'; + const field = jasmine.createSpyObj('field', ['trigger']); + + beforeEach(() => { + autosave = { + field, + key, + }; + + spyOn(window.localStorage, 'getItem'); + }); + + describe('if .isLocalStorageAvailable is `false`', () => { + beforeEach(() => { + autosave.isLocalStorageAvailable = false; + + Autosave.prototype.restore.call(autosave); + }); + + it('should not call .getItem', () => { + expect(window.localStorage.getItem).not.toHaveBeenCalled(); + }); + }); + + describe('if .isLocalStorageAvailable is `true`', () => { + beforeEach(() => { + autosave.isLocalStorageAvailable = true; + + Autosave.prototype.restore.call(autosave); + }); + + it('should call .getItem', () => { + expect(window.localStorage.getItem).toHaveBeenCalledWith(key); + }); + }); + }); + + describe('save', () => { + const field = jasmine.createSpyObj('field', ['val']); + + beforeEach(() => { + autosave = jasmine.createSpyObj('autosave', ['reset']); + autosave.field = field; + + field.val.and.returnValue('value'); + + spyOn(window.localStorage, 'setItem'); + }); + + describe('if .isLocalStorageAvailable is `false`', () => { + beforeEach(() => { + autosave.isLocalStorageAvailable = false; + + Autosave.prototype.save.call(autosave); + }); + + it('should not call .setItem', () => { + expect(window.localStorage.setItem).not.toHaveBeenCalled(); + }); + }); + + describe('if .isLocalStorageAvailable is `true`', () => { + beforeEach(() => { + autosave.isLocalStorageAvailable = true; + + Autosave.prototype.save.call(autosave); + }); + + it('should call .setItem', () => { + expect(window.localStorage.setItem).toHaveBeenCalled(); + }); + }); + }); + + describe('reset', () => { + const key = 'key'; + + beforeEach(() => { + autosave = { + key, + }; + + spyOn(window.localStorage, 'removeItem'); + }); + + describe('if .isLocalStorageAvailable is `false`', () => { + beforeEach(() => { + autosave.isLocalStorageAvailable = false; + + Autosave.prototype.reset.call(autosave); + }); + + it('should not call .removeItem', () => { + expect(window.localStorage.removeItem).not.toHaveBeenCalled(); + }); + }); + + describe('if .isLocalStorageAvailable is `true`', () => { + beforeEach(() => { + autosave.isLocalStorageAvailable = true; + + Autosave.prototype.reset.call(autosave); + }); + + it('should call .removeItem', () => { + expect(window.localStorage.removeItem).toHaveBeenCalledWith(key); + }); + }); + }); +}); diff --git a/spec/javascripts/awards_handler_spec.js b/spec/javascripts/awards_handler_spec.js index ea7753c7a1d..3fc03324d16 100644 --- a/spec/javascripts/awards_handler_spec.js +++ b/spec/javascripts/awards_handler_spec.js @@ -3,6 +3,8 @@ import Cookies from 'js-cookie'; import AwardsHandler from '~/awards_handler'; +import '~/lib/utils/common_utils'; + (function() { var awardsHandler, lazyAssert, urlRoot, openAndWaitForEmojiMenu; @@ -28,7 +30,7 @@ import AwardsHandler from '~/awards_handler'; loadFixtures('issues/issue_with_comment.html.raw'); awardsHandler = new AwardsHandler; spyOn(awardsHandler, 'postEmoji').and.callFake((function(_this) { - return function(url, emoji, cb) { + return function(button, url, emoji, cb) { return cb(); }; })(this)); @@ -63,7 +65,7 @@ import AwardsHandler from '~/awards_handler'; $emojiMenu = $('.emoji-menu'); expect($emojiMenu.length).toBe(1); expect($emojiMenu.hasClass('is-visible')).toBe(true); - expect($emojiMenu.find('#emoji_search').length).toBe(1); + expect($emojiMenu.find('.js-emoji-menu-search').length).toBe(1); return expect($('.js-awards-block.current').length).toBe(1); }); }); @@ -115,6 +117,27 @@ import AwardsHandler from '~/awards_handler'; return expect($emojiButton.next('.js-counter').text()).toBe('4'); }); }); + describe('::userAuthored', function() { + it('should update tooltip to user authored title', function() { + var $thumbsUpEmoji, $votesBlock; + $votesBlock = $('.js-awards-block').eq(0); + $thumbsUpEmoji = $votesBlock.find('[data-name=thumbsup]').parent(); + $thumbsUpEmoji.attr('data-title', 'sam'); + awardsHandler.userAuthored($thumbsUpEmoji); + return expect($thumbsUpEmoji.data("original-title")).toBe("You cannot vote on your own issue, MR and note"); + }); + it('should restore tooltip back to initial vote list', function() { + var $thumbsUpEmoji, $votesBlock; + jasmine.clock().install(); + $votesBlock = $('.js-awards-block').eq(0); + $thumbsUpEmoji = $votesBlock.find('[data-name=thumbsup]').parent(); + $thumbsUpEmoji.attr('data-title', 'sam'); + awardsHandler.userAuthored($thumbsUpEmoji); + jasmine.clock().tick(2801); + jasmine.clock().uninstall(); + return expect($thumbsUpEmoji.data("original-title")).toBe("sam"); + }); + }); describe('::getAwardUrl', function() { return it('returns the url for request', function() { return expect(awardsHandler.getAwardUrl()).toBe('http://test.host/frontend-fixtures/issues-project/issues/1/toggle_award_emoji'); @@ -194,16 +217,35 @@ import AwardsHandler from '~/awards_handler'; return expect($thumbsUpEmoji.data("original-title")).toBe('sam'); }); }); - describe('search', function() { - return it('should filter the emoji', function(done) { + describe('::searchEmojis', () => { + it('should filter the emoji', function(done) { return openAndWaitForEmojiMenu() .then(() => { expect($('[data-name=angel]').is(':visible')).toBe(true); expect($('[data-name=anger]').is(':visible')).toBe(true); - $('#emoji_search').val('ali').trigger('input'); + awardsHandler.searchEmojis('ali'); expect($('[data-name=angel]').is(':visible')).toBe(false); expect($('[data-name=anger]').is(':visible')).toBe(false); expect($('[data-name=alien]').is(':visible')).toBe(true); + expect($('.js-emoji-menu-search').val()).toBe('ali'); + }) + .then(done) + .catch((err) => { + done.fail(`Failed to open and build emoji menu: ${err.message}`); + }); + }); + it('should clear the search when searching for nothing', function(done) { + return openAndWaitForEmojiMenu() + .then(() => { + awardsHandler.searchEmojis('ali'); + expect($('[data-name=angel]').is(':visible')).toBe(false); + expect($('[data-name=anger]').is(':visible')).toBe(false); + expect($('[data-name=alien]').is(':visible')).toBe(true); + awardsHandler.searchEmojis(''); + expect($('[data-name=angel]').is(':visible')).toBe(true); + expect($('[data-name=anger]').is(':visible')).toBe(true); + expect($('[data-name=alien]').is(':visible')).toBe(true); + expect($('.js-emoji-menu-search').val()).toBe(''); }) .then(done) .catch((err) => { @@ -211,6 +253,7 @@ import AwardsHandler from '~/awards_handler'; }); }); }); + describe('emoji menu', function() { const emojiSelector = '[data-name="sunglasses"]'; const openEmojiMenuAndAddEmoji = function() { diff --git a/spec/javascripts/behaviors/autosize_spec.js b/spec/javascripts/behaviors/autosize_spec.js index 3deaf258cae..67afba19190 100644 --- a/spec/javascripts/behaviors/autosize_spec.js +++ b/spec/javascripts/behaviors/autosize_spec.js @@ -1,6 +1,6 @@ /* eslint-disable space-before-function-paren, no-var, comma-dangle, no-return-assign, max-len */ -require('~/behaviors/autosize'); +import '~/behaviors/autosize'; (function() { describe('Autosize behavior', function() { diff --git a/spec/javascripts/behaviors/bind_in_out_spec.js b/spec/javascripts/behaviors/bind_in_out_spec.js index dd9ab33289f..5ff66167718 100644 --- a/spec/javascripts/behaviors/bind_in_out_spec.js +++ b/spec/javascripts/behaviors/bind_in_out_spec.js @@ -2,7 +2,7 @@ import BindInOut from '~/behaviors/bind_in_out'; import ClassSpecHelper from '../helpers/class_spec_helper'; describe('BindInOut', function () { - describe('.constructor', function () { + describe('constructor', function () { beforeEach(function () { this.in = {}; this.out = {}; @@ -53,7 +53,7 @@ describe('BindInOut', function () { }); }); - describe('.addEvents', function () { + describe('addEvents', function () { beforeEach(function () { this.in = jasmine.createSpyObj('in', ['addEventListener']); @@ -79,7 +79,7 @@ describe('BindInOut', function () { }); }); - describe('.updateOut', function () { + describe('updateOut', function () { beforeEach(function () { this.in = { value: 'the-value' }; this.out = { textContent: 'not-the-value' }; @@ -98,7 +98,7 @@ describe('BindInOut', function () { }); }); - describe('.removeEvents', function () { + describe('removeEvents', function () { beforeEach(function () { this.in = jasmine.createSpyObj('in', ['removeEventListener']); this.updateOut = () => {}; @@ -122,7 +122,7 @@ describe('BindInOut', function () { }); }); - describe('.initAll', function () { + describe('initAll', function () { beforeEach(function () { this.ins = [0, 1, 2]; this.instances = []; @@ -153,7 +153,7 @@ describe('BindInOut', function () { }); }); - describe('.init', function () { + describe('init', function () { beforeEach(function () { spyOn(BindInOut.prototype, 'addEvents').and.callFake(function () { return this; }); spyOn(BindInOut.prototype, 'updateOut').and.callFake(function () { return this; }); diff --git a/spec/javascripts/behaviors/gl_emoji/unicode_support_map_spec.js b/spec/javascripts/behaviors/gl_emoji/unicode_support_map_spec.js new file mode 100644 index 00000000000..1ed96a67478 --- /dev/null +++ b/spec/javascripts/behaviors/gl_emoji/unicode_support_map_spec.js @@ -0,0 +1,47 @@ +import { getUnicodeSupportMap } from '~/behaviors/gl_emoji/unicode_support_map'; +import AccessorUtilities from '~/lib/utils/accessor'; + +describe('Unicode Support Map', () => { + describe('getUnicodeSupportMap', () => { + const stringSupportMap = 'stringSupportMap'; + + beforeEach(() => { + spyOn(AccessorUtilities, 'isLocalStorageAccessSafe'); + spyOn(window.localStorage, 'getItem'); + spyOn(window.localStorage, 'setItem'); + spyOn(JSON, 'parse'); + spyOn(JSON, 'stringify').and.returnValue(stringSupportMap); + }); + + describe('if isLocalStorageAvailable is `true`', function () { + beforeEach(() => { + AccessorUtilities.isLocalStorageAccessSafe.and.returnValue(true); + + getUnicodeSupportMap(); + }); + + it('should call .getItem and .setItem', () => { + const allArgs = window.localStorage.setItem.calls.allArgs(); + + expect(window.localStorage.getItem).toHaveBeenCalledWith('gl-emoji-user-agent'); + expect(allArgs[0][0]).toBe('gl-emoji-user-agent'); + expect(allArgs[0][1]).toBe(navigator.userAgent); + expect(allArgs[1][0]).toBe('gl-emoji-unicode-support-map'); + expect(allArgs[1][1]).toBe(stringSupportMap); + }); + }); + + describe('if isLocalStorageAvailable is `false`', function () { + beforeEach(() => { + AccessorUtilities.isLocalStorageAccessSafe.and.returnValue(false); + + getUnicodeSupportMap(); + }); + + it('should not call .getItem or .setItem', () => { + expect(window.localStorage.getItem.calls.count()).toBe(1); + expect(window.localStorage.setItem).not.toHaveBeenCalled(); + }); + }); + }); +}); diff --git a/spec/javascripts/behaviors/quick_submit_spec.js b/spec/javascripts/behaviors/quick_submit_spec.js index 4820ce41ade..f56b99f8a16 100644 --- a/spec/javascripts/behaviors/quick_submit_spec.js +++ b/spec/javascripts/behaviors/quick_submit_spec.js @@ -1,6 +1,6 @@ /* eslint-disable space-before-function-paren, no-var, no-return-assign, comma-dangle, jasmine/no-spec-dupes, new-cap, max-len */ -require('~/behaviors/quick_submit'); +import '~/behaviors/quick_submit'; (function() { describe('Quick Submit behavior', function() { diff --git a/spec/javascripts/behaviors/requires_input_spec.js b/spec/javascripts/behaviors/requires_input_spec.js index 3a84013a2ed..f9fa814b801 100644 --- a/spec/javascripts/behaviors/requires_input_spec.js +++ b/spec/javascripts/behaviors/requires_input_spec.js @@ -1,6 +1,6 @@ /* eslint-disable space-before-function-paren, no-var */ -require('~/behaviors/requires_input'); +import '~/behaviors/requires_input'; (function() { describe('requiresInput', function() { diff --git a/spec/javascripts/blob/3d_viewer/mesh_object_spec.js b/spec/javascripts/blob/3d_viewer/mesh_object_spec.js new file mode 100644 index 00000000000..d1ebae33dab --- /dev/null +++ b/spec/javascripts/blob/3d_viewer/mesh_object_spec.js @@ -0,0 +1,42 @@ +import { + BoxGeometry, +} from 'three/build/three.module'; +import MeshObject from '~/blob/3d_viewer/mesh_object'; + +describe('Mesh object', () => { + it('defaults to non-wireframe material', () => { + const object = new MeshObject( + new BoxGeometry(10, 10, 10), + ); + + expect(object.material.wireframe).toBeFalsy(); + }); + + it('changes to wirefame material', () => { + const object = new MeshObject( + new BoxGeometry(10, 10, 10), + ); + + object.changeMaterial('wireframe'); + + expect(object.material.wireframe).toBeTruthy(); + }); + + it('scales object down', () => { + const object = new MeshObject( + new BoxGeometry(10, 10, 10), + ); + const radius = object.geometry.boundingSphere.radius; + + expect(radius).not.toBeGreaterThan(4); + }); + + it('does not scale object down', () => { + const object = new MeshObject( + new BoxGeometry(1, 1, 1), + ); + const radius = object.geometry.boundingSphere.radius; + + expect(radius).toBeLessThan(1); + }); +}); diff --git a/spec/javascripts/blob/balsamiq/balsamiq_viewer_integration_spec.js b/spec/javascripts/blob/balsamiq/balsamiq_viewer_integration_spec.js new file mode 100644 index 00000000000..acd0aaf2a86 --- /dev/null +++ b/spec/javascripts/blob/balsamiq/balsamiq_viewer_integration_spec.js @@ -0,0 +1,51 @@ +/* eslint-disable import/no-unresolved */ + +import BalsamiqViewer from '~/blob/balsamiq/balsamiq_viewer'; +import bmprPath from '../../fixtures/blob/balsamiq/test.bmpr'; + +describe('Balsamiq integration spec', () => { + let container; + let endpoint; + let balsamiqViewer; + + preloadFixtures('static/balsamiq_viewer.html.raw'); + + beforeEach(() => { + loadFixtures('static/balsamiq_viewer.html.raw'); + + container = document.getElementById('js-balsamiq-viewer'); + balsamiqViewer = new BalsamiqViewer(container); + }); + + describe('successful response', () => { + beforeEach((done) => { + endpoint = bmprPath; + + balsamiqViewer.loadFile(endpoint).then(done).catch(done.fail); + }); + + it('does not show loading icon', () => { + expect(document.querySelector('.loading')).toBeNull(); + }); + + it('renders the balsamiq previews', () => { + expect(document.querySelectorAll('.previews .preview').length).not.toEqual(0); + }); + }); + + describe('error getting file', () => { + beforeEach((done) => { + endpoint = 'invalid/path/to/file.bmpr'; + + balsamiqViewer.loadFile(endpoint).then(done.fail, null).catch(done); + }); + + it('does not show loading icon', () => { + expect(document.querySelector('.loading')).toBeNull(); + }); + + it('does not render the balsamiq previews', () => { + expect(document.querySelectorAll('.previews .preview').length).toEqual(0); + }); + }); +}); diff --git a/spec/javascripts/blob/balsamiq/balsamiq_viewer_spec.js b/spec/javascripts/blob/balsamiq/balsamiq_viewer_spec.js new file mode 100644 index 00000000000..aa87956109f --- /dev/null +++ b/spec/javascripts/blob/balsamiq/balsamiq_viewer_spec.js @@ -0,0 +1,326 @@ +import sqljs from 'sql.js'; +import BalsamiqViewer from '~/blob/balsamiq/balsamiq_viewer'; +import ClassSpecHelper from '../../helpers/class_spec_helper'; + +describe('BalsamiqViewer', () => { + let balsamiqViewer; + let viewer; + + describe('class constructor', () => { + beforeEach(() => { + viewer = {}; + + balsamiqViewer = new BalsamiqViewer(viewer); + }); + + it('should set .viewer', () => { + expect(balsamiqViewer.viewer).toBe(viewer); + }); + }); + + describe('fileLoaded', () => { + + }); + + describe('loadFile', () => { + let xhr; + let loadFile; + const endpoint = 'endpoint'; + + beforeEach(() => { + xhr = jasmine.createSpyObj('xhr', ['open', 'send']); + + balsamiqViewer = jasmine.createSpyObj('balsamiqViewer', ['renderFile']); + + spyOn(window, 'XMLHttpRequest').and.returnValue(xhr); + + loadFile = BalsamiqViewer.prototype.loadFile.call(balsamiqViewer, endpoint); + }); + + it('should call .open', () => { + expect(xhr.open).toHaveBeenCalledWith('GET', endpoint, true); + }); + + it('should set .responseType', () => { + expect(xhr.responseType).toBe('arraybuffer'); + }); + + it('should call .send', () => { + expect(xhr.send).toHaveBeenCalled(); + }); + + it('should return a promise', () => { + expect(loadFile).toEqual(jasmine.any(Promise)); + }); + }); + + describe('renderFile', () => { + let container; + let loadEvent; + let previews; + + beforeEach(() => { + loadEvent = { target: { response: {} } }; + viewer = jasmine.createSpyObj('viewer', ['appendChild']); + previews = [document.createElement('ul'), document.createElement('ul')]; + + balsamiqViewer = jasmine.createSpyObj('balsamiqViewer', ['initDatabase', 'getPreviews', 'renderPreview']); + balsamiqViewer.viewer = viewer; + + balsamiqViewer.getPreviews.and.returnValue(previews); + balsamiqViewer.renderPreview.and.callFake(preview => preview); + viewer.appendChild.and.callFake((containerElement) => { + container = containerElement; + }); + + BalsamiqViewer.prototype.renderFile.call(balsamiqViewer, loadEvent); + }); + + it('should call .initDatabase', () => { + expect(balsamiqViewer.initDatabase).toHaveBeenCalledWith(loadEvent.target.response); + }); + + it('should call .getPreviews', () => { + expect(balsamiqViewer.getPreviews).toHaveBeenCalled(); + }); + + it('should call .renderPreview for each preview', () => { + const allArgs = balsamiqViewer.renderPreview.calls.allArgs(); + + expect(allArgs.length).toBe(2); + + previews.forEach((preview, i) => { + expect(allArgs[i][0]).toBe(preview); + }); + }); + + it('should set the container HTML', () => { + expect(container.innerHTML).toBe('<ul></ul><ul></ul>'); + }); + + it('should add inline preview classes', () => { + expect(container.classList[0]).toBe('list-inline'); + expect(container.classList[1]).toBe('previews'); + }); + + it('should call viewer.appendChild', () => { + expect(viewer.appendChild).toHaveBeenCalledWith(container); + }); + }); + + describe('initDatabase', () => { + let database; + let uint8Array; + let data; + + beforeEach(() => { + uint8Array = {}; + database = {}; + data = 'data'; + + balsamiqViewer = {}; + + spyOn(window, 'Uint8Array').and.returnValue(uint8Array); + spyOn(sqljs, 'Database').and.returnValue(database); + + BalsamiqViewer.prototype.initDatabase.call(balsamiqViewer, data); + }); + + it('should instantiate Uint8Array', () => { + expect(window.Uint8Array).toHaveBeenCalledWith(data); + }); + + it('should call sqljs.Database', () => { + expect(sqljs.Database).toHaveBeenCalledWith(uint8Array); + }); + + it('should set .database', () => { + expect(balsamiqViewer.database).toBe(database); + }); + }); + + describe('getPreviews', () => { + let database; + let thumbnails; + let getPreviews; + + beforeEach(() => { + database = jasmine.createSpyObj('database', ['exec']); + thumbnails = [{ values: [0, 1, 2] }]; + + balsamiqViewer = { + database, + }; + + spyOn(BalsamiqViewer, 'parsePreview').and.callFake(preview => preview.toString()); + database.exec.and.returnValue(thumbnails); + + getPreviews = BalsamiqViewer.prototype.getPreviews.call(balsamiqViewer); + }); + + it('should call database.exec', () => { + expect(database.exec).toHaveBeenCalledWith('SELECT * FROM thumbnails'); + }); + + it('should call .parsePreview for each value', () => { + const allArgs = BalsamiqViewer.parsePreview.calls.allArgs(); + + expect(allArgs.length).toBe(3); + + thumbnails[0].values.forEach((value, i) => { + expect(allArgs[i][0]).toBe(value); + }); + }); + + it('should return an array of parsed values', () => { + expect(getPreviews).toEqual(['0', '1', '2']); + }); + }); + + describe('getResource', () => { + let database; + let resourceID; + let resource; + let getResource; + + beforeEach(() => { + database = jasmine.createSpyObj('database', ['exec']); + resourceID = 4; + resource = ['resource']; + + balsamiqViewer = { + database, + }; + + database.exec.and.returnValue(resource); + + getResource = BalsamiqViewer.prototype.getResource.call(balsamiqViewer, resourceID); + }); + + it('should call database.exec', () => { + expect(database.exec).toHaveBeenCalledWith(`SELECT * FROM resources WHERE id = '${resourceID}'`); + }); + + it('should return the selected resource', () => { + expect(getResource).toBe(resource[0]); + }); + }); + + describe('renderPreview', () => { + let previewElement; + let innerHTML; + let preview; + let renderPreview; + + beforeEach(() => { + innerHTML = '<a>innerHTML</a>'; + previewElement = { + outerHTML: '<p>outerHTML</p>', + classList: jasmine.createSpyObj('classList', ['add']), + }; + preview = {}; + + balsamiqViewer = jasmine.createSpyObj('balsamiqViewer', ['renderTemplate']); + + spyOn(document, 'createElement').and.returnValue(previewElement); + balsamiqViewer.renderTemplate.and.returnValue(innerHTML); + + renderPreview = BalsamiqViewer.prototype.renderPreview.call(balsamiqViewer, preview); + }); + + it('should call classList.add', () => { + expect(previewElement.classList.add).toHaveBeenCalledWith('preview'); + }); + + it('should call .renderTemplate', () => { + expect(balsamiqViewer.renderTemplate).toHaveBeenCalledWith(preview); + }); + + it('should set .innerHTML', () => { + expect(previewElement.innerHTML).toBe(innerHTML); + }); + + it('should return element', () => { + expect(renderPreview).toBe(previewElement); + }); + }); + + describe('renderTemplate', () => { + let preview; + let name; + let resource; + let template; + let renderTemplate; + + beforeEach(() => { + preview = { resourceID: 1, image: 'image' }; + name = 'name'; + resource = 'resource'; + template = ` + <div class="panel panel-default"> + <div class="panel-heading">name</div> + <div class="panel-body"> + <img class="img-thumbnail" src="data:image/png;base64,image"/> + </div> + </div> + `; + + balsamiqViewer = jasmine.createSpyObj('balsamiqViewer', ['getResource']); + + spyOn(BalsamiqViewer, 'parseTitle').and.returnValue(name); + balsamiqViewer.getResource.and.returnValue(resource); + + renderTemplate = BalsamiqViewer.prototype.renderTemplate.call(balsamiqViewer, preview); + }); + + it('should call .getResource', () => { + expect(balsamiqViewer.getResource).toHaveBeenCalledWith(preview.resourceID); + }); + + it('should call .parseTitle', () => { + expect(BalsamiqViewer.parseTitle).toHaveBeenCalledWith(resource); + }); + + it('should return the template string', function () { + expect(renderTemplate.replace(/\s/g, '')).toEqual(template.replace(/\s/g, '')); + }); + }); + + describe('parsePreview', () => { + let preview; + let parsePreview; + + beforeEach(() => { + preview = ['{}', '{ "id": 1 }']; + + spyOn(JSON, 'parse').and.callThrough(); + + parsePreview = BalsamiqViewer.parsePreview(preview); + }); + + ClassSpecHelper.itShouldBeAStaticMethod(BalsamiqViewer, 'parsePreview'); + + it('should return the parsed JSON', () => { + expect(parsePreview).toEqual(JSON.parse('{ "id": 1 }')); + }); + }); + + describe('parseTitle', () => { + let title; + let parseTitle; + + beforeEach(() => { + title = { values: [['{}', '{}', '{"name":"name"}']] }; + + spyOn(JSON, 'parse').and.callThrough(); + + parseTitle = BalsamiqViewer.parseTitle(title); + }); + + ClassSpecHelper.itShouldBeAStaticMethod(BalsamiqViewer, 'parsePreview'); + + it('should return the name value', () => { + expect(parseTitle).toBe('name'); + }); + }); +}); diff --git a/spec/javascripts/blob/blob_fork_suggestion_spec.js b/spec/javascripts/blob/blob_fork_suggestion_spec.js new file mode 100644 index 00000000000..d1ab0a32f85 --- /dev/null +++ b/spec/javascripts/blob/blob_fork_suggestion_spec.js @@ -0,0 +1,38 @@ +import BlobForkSuggestion from '~/blob/blob_fork_suggestion'; + +describe('BlobForkSuggestion', () => { + let blobForkSuggestion; + + const openButton = document.createElement('div'); + const forkButton = document.createElement('a'); + const cancelButton = document.createElement('div'); + const suggestionSection = document.createElement('div'); + const actionTextPiece = document.createElement('div'); + + beforeEach(() => { + blobForkSuggestion = new BlobForkSuggestion({ + openButtons: openButton, + forkButtons: forkButton, + cancelButtons: cancelButton, + suggestionSections: suggestionSection, + actionTextPieces: actionTextPiece, + }) + .init(); + }); + + afterEach(() => { + blobForkSuggestion.destroy(); + }); + + it('showSuggestionSection', () => { + blobForkSuggestion.showSuggestionSection('/foo', 'foo'); + expect(suggestionSection.classList.contains('hidden')).toEqual(false); + expect(forkButton.getAttribute('href')).toEqual('/foo'); + expect(actionTextPiece.textContent).toEqual('foo'); + }); + + it('hideSuggestionSection', () => { + blobForkSuggestion.hideSuggestionSection(); + expect(suggestionSection.classList.contains('hidden')).toEqual(true); + }); +}); diff --git a/spec/javascripts/blob/create_branch_dropdown_spec.js b/spec/javascripts/blob/create_branch_dropdown_spec.js index c1179e572ae..6dbaa47c544 100644 --- a/spec/javascripts/blob/create_branch_dropdown_spec.js +++ b/spec/javascripts/blob/create_branch_dropdown_spec.js @@ -1,7 +1,6 @@ -require('~/gl_dropdown'); -require('~/lib/utils/type_utility'); -require('~/blob/create_branch_dropdown'); -require('~/blob/target_branch_dropdown'); +import '~/gl_dropdown'; +import '~/blob/create_branch_dropdown'; +import '~/blob/target_branch_dropdown'; describe('CreateBranchDropdown', () => { const fixtureTemplate = 'static/target_branch_dropdown.html.raw'; diff --git a/spec/javascripts/blob/pdf/index_spec.js b/spec/javascripts/blob/pdf/index_spec.js new file mode 100644 index 00000000000..bbeaf95e68d --- /dev/null +++ b/spec/javascripts/blob/pdf/index_spec.js @@ -0,0 +1,82 @@ +/* eslint-disable import/no-unresolved */ + +import renderPDF from '~/blob/pdf'; +import testPDF from '../../fixtures/blob/pdf/test.pdf'; + +describe('PDF renderer', () => { + let viewer; + let app; + + const checkLoaded = (done) => { + if (app.loading) { + setTimeout(() => { + checkLoaded(done); + }, 100); + } else { + done(); + } + }; + + preloadFixtures('static/pdf_viewer.html.raw'); + + beforeEach(() => { + loadFixtures('static/pdf_viewer.html.raw'); + viewer = document.getElementById('js-pdf-viewer'); + viewer.dataset.endpoint = testPDF; + }); + + it('shows loading icon', () => { + renderPDF(); + + expect( + document.querySelector('.loading'), + ).not.toBeNull(); + }); + + describe('successful response', () => { + beforeEach((done) => { + app = renderPDF(); + + checkLoaded(done); + }); + + it('does not show loading icon', () => { + expect( + document.querySelector('.loading'), + ).toBeNull(); + }); + + it('renders the PDF', () => { + expect( + document.querySelector('.pdf-viewer'), + ).not.toBeNull(); + }); + + it('renders the PDF page', () => { + expect( + document.querySelector('.pdf-page'), + ).not.toBeNull(); + }); + }); + + describe('error getting file', () => { + beforeEach((done) => { + viewer.dataset.endpoint = 'invalid/path/to/file.pdf'; + app = renderPDF(); + + checkLoaded(done); + }); + + it('does not show loading icon', () => { + expect( + document.querySelector('.loading'), + ).toBeNull(); + }); + + it('shows error message', () => { + expect( + document.querySelector('.md').textContent.trim(), + ).toBe('An error occured whilst loading the file. Please try again later.'); + }); + }); +}); diff --git a/spec/javascripts/blob/sketch/index_spec.js b/spec/javascripts/blob/sketch/index_spec.js new file mode 100644 index 00000000000..79f40559817 --- /dev/null +++ b/spec/javascripts/blob/sketch/index_spec.js @@ -0,0 +1,118 @@ +/* eslint-disable no-new, promise/catch-or-return */ +import JSZip from 'jszip'; +import SketchLoader from '~/blob/sketch'; + +describe('Sketch viewer', () => { + const generateZipFileArrayBuffer = (zipFile, resolve, done) => { + zipFile + .generateAsync({ type: 'arrayBuffer' }) + .then((content) => { + resolve(content); + + setTimeout(() => { + done(); + }, 100); + }); + }; + + preloadFixtures('static/sketch_viewer.html.raw'); + + beforeEach(() => { + loadFixtures('static/sketch_viewer.html.raw'); + }); + + describe('with error message', () => { + beforeEach((done) => { + spyOn(SketchLoader.prototype, 'getZipFile').and.callFake(() => new Promise((resolve, reject) => { + reject(); + + setTimeout(() => { + done(); + }); + })); + + new SketchLoader(document.getElementById('js-sketch-viewer')); + }); + + it('renders error message', () => { + expect( + document.querySelector('#js-sketch-viewer p'), + ).not.toBeNull(); + + expect( + document.querySelector('#js-sketch-viewer p').textContent.trim(), + ).toContain('Cannot show preview.'); + }); + + it('removes render the loading icon', () => { + expect( + document.querySelector('.js-loading-icon'), + ).toBeNull(); + }); + }); + + describe('success', () => { + beforeEach((done) => { + spyOn(SketchLoader.prototype, 'getZipFile').and.callFake(() => new Promise((resolve) => { + const zipFile = new JSZip(); + zipFile.folder('previews') + .file('preview.png', 'iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAMAAAAoyzS7AAAAA1BMVEUAAACnej3aAAAAAXRSTlMAQObYZgAAAA1JREFUeNoBAgD9/wAAAAIAAVMrnDAAAAAASUVORK5CYII=', { + base64: true, + }); + + generateZipFileArrayBuffer(zipFile, resolve, done); + })); + + new SketchLoader(document.getElementById('js-sketch-viewer')); + }); + + it('does not render error message', () => { + expect( + document.querySelector('#js-sketch-viewer p'), + ).toBeNull(); + }); + + it('removes render the loading icon', () => { + expect( + document.querySelector('.js-loading-icon'), + ).toBeNull(); + }); + + it('renders preview img', () => { + const img = document.querySelector('#js-sketch-viewer img'); + + expect(img).not.toBeNull(); + expect(img.classList.contains('img-responsive')).toBeTruthy(); + }); + + it('renders link to image', () => { + const img = document.querySelector('#js-sketch-viewer img'); + const link = document.querySelector('#js-sketch-viewer a'); + + expect(link.href).toBe(img.src); + expect(link.target).toBe('_blank'); + }); + }); + + describe('incorrect file', () => { + beforeEach((done) => { + spyOn(SketchLoader.prototype, 'getZipFile').and.callFake(() => new Promise((resolve) => { + const zipFile = new JSZip(); + + generateZipFileArrayBuffer(zipFile, resolve, done); + })); + + new SketchLoader(document.getElementById('js-sketch-viewer')); + }); + + it('renders error message', () => { + expect( + document.querySelector('#js-sketch-viewer p'), + ).not.toBeNull(); + + expect( + document.querySelector('#js-sketch-viewer p').textContent.trim(), + ).toContain('Cannot show preview.'); + }); + }); +}); diff --git a/spec/javascripts/blob/target_branch_dropdown_spec.js b/spec/javascripts/blob/target_branch_dropdown_spec.js index 4fb79663c51..99c9537d2ec 100644 --- a/spec/javascripts/blob/target_branch_dropdown_spec.js +++ b/spec/javascripts/blob/target_branch_dropdown_spec.js @@ -1,7 +1,6 @@ -require('~/gl_dropdown'); -require('~/lib/utils/type_utility'); -require('~/blob/create_branch_dropdown'); -require('~/blob/target_branch_dropdown'); +import '~/gl_dropdown'; +import '~/blob/create_branch_dropdown'; +import '~/blob/target_branch_dropdown'; describe('TargetBranchDropdown', () => { const fixtureTemplate = 'static/target_branch_dropdown.html.raw'; @@ -63,7 +62,7 @@ describe('TargetBranchDropdown', () => { expect('change.branch').toHaveBeenTriggeredOn(dropdown.$dropdown); }); - describe('#dropdownData', () => { + describe('dropdownData', () => { it('cache the refs', () => { const refs = dropdown.cachedRefs; dropdown.cachedRefs = null; @@ -88,7 +87,7 @@ describe('TargetBranchDropdown', () => { }); }); - describe('#setNewBranch', () => { + describe('setNewBranch', () => { it('adds the new branch and select it', () => { const branchName = 'new_branch'; diff --git a/spec/javascripts/blob/viewer/index_spec.js b/spec/javascripts/blob/viewer/index_spec.js new file mode 100644 index 00000000000..af04e7c1e72 --- /dev/null +++ b/spec/javascripts/blob/viewer/index_spec.js @@ -0,0 +1,184 @@ +/* eslint-disable no-new */ +import BlobViewer from '~/blob/viewer/index'; + +describe('Blob viewer', () => { + let blob; + preloadFixtures('blob/show.html.raw'); + + beforeEach(() => { + loadFixtures('blob/show.html.raw'); + $('#modal-upload-blob').remove(); + + blob = new BlobViewer(); + + spyOn($, 'ajax').and.callFake(() => { + const d = $.Deferred(); + + d.resolve({ + html: '<div>testing</div>', + }); + + return d.promise(); + }); + }); + + afterEach(() => { + location.hash = ''; + }); + + it('loads source file after switching views', (done) => { + document.querySelector('.js-blob-viewer-switch-btn[data-viewer="simple"]').click(); + + setTimeout(() => { + expect($.ajax).toHaveBeenCalled(); + expect( + document.querySelector('.js-blob-viewer-switch-btn[data-viewer="simple"]') + .classList.contains('hidden'), + ).toBeFalsy(); + + done(); + }); + }); + + it('loads source file when line number is in hash', (done) => { + location.hash = '#L1'; + + new BlobViewer(); + + setTimeout(() => { + expect($.ajax).toHaveBeenCalled(); + expect( + document.querySelector('.js-blob-viewer-switch-btn[data-viewer="simple"]') + .classList.contains('hidden'), + ).toBeFalsy(); + + done(); + }); + }); + + it('doesnt reload file if already loaded', (done) => { + const asyncClick = () => new Promise((resolve) => { + document.querySelector('.js-blob-viewer-switch-btn[data-viewer="simple"]').click(); + + setTimeout(resolve); + }); + + asyncClick() + .then(() => { + expect($.ajax).toHaveBeenCalled(); + return asyncClick(); + }) + .then(() => { + expect($.ajax.calls.count()).toBe(1); + expect( + document.querySelector('.blob-viewer[data-type="simple"]').getAttribute('data-loaded'), + ).toBe('true'); + + done(); + }) + .catch(() => { + fail(); + done(); + }); + }); + + describe('copy blob button', () => { + let copyButton; + + beforeEach(() => { + copyButton = document.querySelector('.js-copy-blob-source-btn'); + }); + + it('disabled on load', () => { + expect( + copyButton.classList.contains('disabled'), + ).toBeTruthy(); + }); + + it('has tooltip when disabled', () => { + expect( + copyButton.getAttribute('data-original-title'), + ).toBe('Switch to the source to copy it to the clipboard'); + }); + + it('is blurred when clicked and disabled', () => { + spyOn(copyButton, 'blur'); + + copyButton.click(); + + expect(copyButton.blur).toHaveBeenCalled(); + }); + + it('is not blurred when clicked and not disabled', () => { + spyOn(copyButton, 'blur'); + + copyButton.classList.remove('disabled'); + copyButton.click(); + + expect(copyButton.blur).not.toHaveBeenCalled(); + }); + + it('enables after switching to simple view', (done) => { + document.querySelector('.js-blob-viewer-switch-btn[data-viewer="simple"]').click(); + + setTimeout(() => { + expect($.ajax).toHaveBeenCalled(); + expect( + copyButton.classList.contains('disabled'), + ).toBeFalsy(); + + done(); + }); + }); + + it('updates tooltip after switching to simple view', (done) => { + document.querySelector('.js-blob-viewer-switch-btn[data-viewer="simple"]').click(); + + setTimeout(() => { + expect($.ajax).toHaveBeenCalled(); + + expect( + copyButton.getAttribute('data-original-title'), + ).toBe('Copy source to clipboard'); + + done(); + }); + }); + }); + + describe('switchToViewer', () => { + it('removes active class from old viewer button', () => { + blob.switchToViewer('simple'); + + expect( + document.querySelector('.js-blob-viewer-switch-btn.active[data-viewer="rich"]'), + ).toBeNull(); + }); + + it('adds active class to new viewer button', () => { + const simpleBtn = document.querySelector('.js-blob-viewer-switch-btn[data-viewer="simple"]'); + + spyOn(simpleBtn, 'blur'); + + blob.switchToViewer('simple'); + + expect( + simpleBtn.classList.contains('active'), + ).toBeTruthy(); + expect(simpleBtn.blur).toHaveBeenCalled(); + }); + + it('sends AJAX request when switching to simple view', () => { + blob.switchToViewer('simple'); + + expect($.ajax).toHaveBeenCalled(); + }); + + it('does not send AJAX request when switching to rich view', () => { + blob.switchToViewer('simple'); + blob.switchToViewer('rich'); + + expect($.ajax.calls.count()).toBe(1); + }); + }); +}); diff --git a/spec/javascripts/boards/board_card_spec.js b/spec/javascripts/boards/board_card_spec.js index de072e7e470..447b244c71f 100644 --- a/spec/javascripts/boards/board_card_spec.js +++ b/spec/javascripts/boards/board_card_spec.js @@ -1,18 +1,18 @@ /* global List */ -/* global ListUser */ +/* global ListAssignee */ /* global ListLabel */ /* global listObj */ /* global boardsMockInterceptor */ /* global BoardService */ import Vue from 'vue'; -import '~/boards/models/user'; +import '~/boards/models/assignee'; -require('~/boards/models/list'); -require('~/boards/models/label'); -require('~/boards/stores/boards_store'); -const boardCard = require('~/boards/components/board_card').default; -require('./mock_data'); +import '~/boards/models/list'; +import '~/boards/models/label'; +import '~/boards/stores/boards_store'; +import boardCard from '~/boards/components/board_card'; +import './mock_data'; describe('Issue card', () => { let vm; @@ -133,12 +133,12 @@ describe('Issue card', () => { }); it('does not set detail issue if img is clicked', (done) => { - vm.issue.assignee = new ListUser({ + vm.issue.assignees = [new ListAssignee({ id: 1, name: 'testing 123', username: 'test', avatar: 'test_image', - }); + })]; Vue.nextTick(() => { triggerEvent('mouseup', vm.$el.querySelector('img')); diff --git a/spec/javascripts/boards/board_list_spec.js b/spec/javascripts/boards/board_list_spec.js new file mode 100644 index 00000000000..a89be911667 --- /dev/null +++ b/spec/javascripts/boards/board_list_spec.js @@ -0,0 +1,202 @@ +/* global BoardService */ +/* global boardsMockInterceptor */ +/* global List */ +/* global listObj */ +/* global ListIssue */ +import Vue from 'vue'; +import _ from 'underscore'; +import Sortable from 'vendor/Sortable'; +import BoardList from '~/boards/components/board_list'; +import eventHub from '~/boards/eventhub'; +import '~/boards/mixins/sortable_default_options'; +import '~/boards/models/issue'; +import '~/boards/models/list'; +import '~/boards/stores/boards_store'; +import './mock_data'; + +window.Sortable = Sortable; + +describe('Board list component', () => { + let component; + + beforeEach((done) => { + const el = document.createElement('div'); + + document.body.appendChild(el); + Vue.http.interceptors.push(boardsMockInterceptor); + gl.boardService = new BoardService('/test/issue-boards/board', '', '1'); + gl.issueBoards.BoardsStore.create(); + gl.IssueBoardsApp = new Vue(); + + const BoardListComp = Vue.extend(BoardList); + const list = new List(listObj); + const issue = new ListIssue({ + title: 'Testing', + iid: 1, + confidential: false, + labels: [], + assignees: [], + }); + list.issuesSize = 1; + list.issues.push(issue); + + component = new BoardListComp({ + el, + propsData: { + disabled: false, + list, + issues: list.issues, + loading: false, + issueLinkBase: '/issues', + rootPath: '/', + }, + }).$mount(); + + Vue.nextTick(() => { + done(); + }); + }); + + afterEach(() => { + Vue.http.interceptors = _.without(Vue.http.interceptors, boardsMockInterceptor); + }); + + it('renders component', () => { + expect( + component.$el.classList.contains('board-list-component'), + ).toBe(true); + }); + + it('renders loading icon', (done) => { + component.loading = true; + + Vue.nextTick(() => { + expect( + component.$el.querySelector('.board-list-loading'), + ).not.toBeNull(); + + done(); + }); + }); + + it('renders issues', () => { + expect( + component.$el.querySelectorAll('.card').length, + ).toBe(1); + }); + + it('sets data attribute with issue id', () => { + expect( + component.$el.querySelector('.card').getAttribute('data-issue-id'), + ).toBe('1'); + }); + + it('shows new issue form', (done) => { + component.toggleForm(); + + Vue.nextTick(() => { + expect( + component.$el.querySelector('.board-new-issue-form'), + ).not.toBeNull(); + + expect( + component.$el.querySelector('.is-smaller'), + ).not.toBeNull(); + + done(); + }); + }); + + it('shows new issue form after eventhub event', (done) => { + eventHub.$emit(`hide-issue-form-${component.list.id}`); + + Vue.nextTick(() => { + expect( + component.$el.querySelector('.board-new-issue-form'), + ).not.toBeNull(); + + expect( + component.$el.querySelector('.is-smaller'), + ).not.toBeNull(); + + done(); + }); + }); + + it('does not show new issue form for closed list', (done) => { + component.list.type = 'closed'; + component.toggleForm(); + + Vue.nextTick(() => { + expect( + component.$el.querySelector('.board-new-issue-form'), + ).toBeNull(); + + done(); + }); + }); + + it('shows count list item', (done) => { + component.showCount = true; + + Vue.nextTick(() => { + expect( + component.$el.querySelector('.board-list-count'), + ).not.toBeNull(); + + expect( + component.$el.querySelector('.board-list-count').textContent.trim(), + ).toBe('Showing all issues'); + + done(); + }); + }); + + it('shows how many more issues to load', (done) => { + component.showCount = true; + component.list.issuesSize = 20; + + Vue.nextTick(() => { + expect( + component.$el.querySelector('.board-list-count').textContent.trim(), + ).toBe('Showing 1 of 20 issues'); + + done(); + }); + }); + + it('loads more issues after scrolling', (done) => { + spyOn(component.list, 'nextPage'); + component.$refs.list.style.height = '100px'; + component.$refs.list.style.overflow = 'scroll'; + + for (let i = 0; i < 19; i += 1) { + const issue = component.list.issues[0]; + issue.id += 1; + component.list.issues.push(issue); + } + + Vue.nextTick(() => { + component.$refs.list.scrollTop = 20000; + + setTimeout(() => { + expect(component.list.nextPage).toHaveBeenCalled(); + + done(); + }); + }); + }); + + it('shows loading more spinner', (done) => { + component.showCount = true; + component.list.loadingMore = true; + + Vue.nextTick(() => { + expect( + component.$el.querySelector('.board-list-count .fa-spinner'), + ).not.toBeNull(); + + done(); + }); + }); +}); diff --git a/spec/javascripts/boards/board_new_issue_spec.js b/spec/javascripts/boards/board_new_issue_spec.js index 4999933c0c1..45d12e252c4 100644 --- a/spec/javascripts/boards/board_new_issue_spec.js +++ b/spec/javascripts/boards/board_new_issue_spec.js @@ -6,8 +6,8 @@ import Vue from 'vue'; import boardNewIssue from '~/boards/components/board_new_issue'; -require('~/boards/models/list'); -require('./mock_data'); +import '~/boards/models/list'; +import './mock_data'; describe('Issue boards new issue form', () => { let vm; diff --git a/spec/javascripts/boards/boards_store_spec.js b/spec/javascripts/boards/boards_store_spec.js index b55ff2f473a..5ea160b7790 100644 --- a/spec/javascripts/boards/boards_store_spec.js +++ b/spec/javascripts/boards/boards_store_spec.js @@ -8,14 +8,14 @@ import Vue from 'vue'; import Cookies from 'js-cookie'; -require('~/lib/utils/url_utility'); -require('~/boards/models/issue'); -require('~/boards/models/label'); -require('~/boards/models/list'); -require('~/boards/models/user'); -require('~/boards/services/board_service'); -require('~/boards/stores/boards_store'); -require('./mock_data'); +import '~/lib/utils/url_utility'; +import '~/boards/models/issue'; +import '~/boards/models/label'; +import '~/boards/models/list'; +import '~/boards/models/assignee'; +import '~/boards/services/board_service'; +import '~/boards/stores/boards_store'; +import './mock_data'; describe('Store', () => { beforeEach(() => { @@ -212,7 +212,8 @@ describe('Store', () => { title: 'Testing', iid: 2, confidential: false, - labels: [] + labels: [], + assignees: [], }); const list = gl.issueBoards.BoardsStore.addList(listObj); diff --git a/spec/javascripts/boards/issue_card_spec.js b/spec/javascripts/boards/issue_card_spec.js index 1a5e9e9fd07..bd9b4fbfdd3 100644 --- a/spec/javascripts/boards/issue_card_spec.js +++ b/spec/javascripts/boards/issue_card_spec.js @@ -1,20 +1,20 @@ -/* global ListUser */ +/* global ListAssignee */ /* global ListLabel */ /* global listObj */ /* global ListIssue */ import Vue from 'vue'; -require('~/boards/models/issue'); -require('~/boards/models/label'); -require('~/boards/models/list'); -require('~/boards/models/user'); -require('~/boards/stores/boards_store'); -require('~/boards/components/issue_card_inner'); -require('./mock_data'); +import '~/boards/models/issue'; +import '~/boards/models/label'; +import '~/boards/models/list'; +import '~/boards/models/assignee'; +import '~/boards/stores/boards_store'; +import '~/boards/components/issue_card_inner'; +import './mock_data'; describe('Issue card component', () => { - const user = new ListUser({ + const user = new ListAssignee({ id: 1, name: 'testing 123', username: 'test', @@ -40,6 +40,7 @@ describe('Issue card component', () => { iid: 1, confidential: false, labels: [list.label], + assignees: [], }); component = new Vue({ @@ -92,12 +93,12 @@ describe('Issue card component', () => { it('renders confidential icon', (done) => { component.issue.confidential = true; - setTimeout(() => { + Vue.nextTick(() => { expect( component.$el.querySelector('.confidential-icon'), ).not.toBeNull(); done(); - }, 0); + }); }); it('renders issue ID with #', () => { @@ -109,34 +110,32 @@ describe('Issue card component', () => { describe('assignee', () => { it('does not render assignee', () => { expect( - component.$el.querySelector('.card-assignee'), + component.$el.querySelector('.card-assignee .avatar'), ).toBeNull(); }); describe('exists', () => { beforeEach((done) => { - component.issue.assignee = user; + component.issue.assignees = [user]; - setTimeout(() => { - done(); - }, 0); + Vue.nextTick(() => done()); }); it('renders assignee', () => { expect( - component.$el.querySelector('.card-assignee'), + component.$el.querySelector('.card-assignee .avatar'), ).not.toBeNull(); }); it('sets title', () => { expect( - component.$el.querySelector('.card-assignee').getAttribute('title'), + component.$el.querySelector('.card-assignee img').getAttribute('data-original-title'), ).toContain(`Assigned to ${user.name}`); }); it('sets users path', () => { expect( - component.$el.querySelector('.card-assignee').getAttribute('href'), + component.$el.querySelector('.card-assignee a').getAttribute('href'), ).toBe('/test'); }); @@ -146,6 +145,96 @@ describe('Issue card component', () => { ).not.toBeNull(); }); }); + + describe('assignee default avatar', () => { + beforeEach((done) => { + component.issue.assignees = [new ListAssignee({ + id: 1, + name: 'testing 123', + username: 'test', + }, 'default_avatar')]; + + Vue.nextTick(done); + }); + + it('displays defaults avatar if users avatar is null', () => { + expect( + component.$el.querySelector('.card-assignee img'), + ).not.toBeNull(); + expect( + component.$el.querySelector('.card-assignee img').getAttribute('src'), + ).toBe('default_avatar'); + }); + }); + }); + + describe('multiple assignees', () => { + beforeEach((done) => { + component.issue.assignees = [ + user, + new ListAssignee({ + id: 2, + name: 'user2', + username: 'user2', + avatar: 'test_image', + }), + new ListAssignee({ + id: 3, + name: 'user3', + username: 'user3', + avatar: 'test_image', + }), + new ListAssignee({ + id: 4, + name: 'user4', + username: 'user4', + avatar: 'test_image', + })]; + + Vue.nextTick(() => done()); + }); + + it('renders all four assignees', () => { + expect(component.$el.querySelectorAll('.card-assignee .avatar').length).toEqual(4); + }); + + describe('more than four assignees', () => { + beforeEach((done) => { + component.issue.assignees.push(new ListAssignee({ + id: 5, + name: 'user5', + username: 'user5', + avatar: 'test_image', + })); + + Vue.nextTick(() => done()); + }); + + it('renders more avatar counter', () => { + expect(component.$el.querySelector('.card-assignee .avatar-counter').innerText).toEqual('+2'); + }); + + it('renders three assignees', () => { + expect(component.$el.querySelectorAll('.card-assignee .avatar').length).toEqual(3); + }); + + it('renders 99+ avatar counter', (done) => { + for (let i = 5; i < 104; i += 1) { + const u = new ListAssignee({ + id: i, + name: 'name', + username: 'username', + avatar: 'test_image', + }); + component.issue.assignees.push(u); + } + + Vue.nextTick(() => { + expect(component.$el.querySelector('.card-assignee .avatar-counter').innerText).toEqual('99+'); + done(); + }); + }); + }); }); describe('labels', () => { @@ -159,9 +248,7 @@ describe('Issue card component', () => { beforeEach((done) => { component.issue.addLabel(label1); - setTimeout(() => { - done(); - }, 0); + Vue.nextTick(() => done()); }); it('does not render list label', () => { diff --git a/spec/javascripts/boards/issue_spec.js b/spec/javascripts/boards/issue_spec.js index c96dfe94a4a..cd1497bc5e6 100644 --- a/spec/javascripts/boards/issue_spec.js +++ b/spec/javascripts/boards/issue_spec.js @@ -2,14 +2,15 @@ /* global BoardService */ /* global ListIssue */ -require('~/lib/utils/url_utility'); -require('~/boards/models/issue'); -require('~/boards/models/label'); -require('~/boards/models/list'); -require('~/boards/models/user'); -require('~/boards/services/board_service'); -require('~/boards/stores/boards_store'); -require('./mock_data'); +import Vue from 'vue'; +import '~/lib/utils/url_utility'; +import '~/boards/models/issue'; +import '~/boards/models/label'; +import '~/boards/models/list'; +import '~/boards/models/assignee'; +import '~/boards/services/board_service'; +import '~/boards/stores/boards_store'; +import './mock_data'; describe('Issue model', () => { let issue; @@ -27,7 +28,13 @@ describe('Issue model', () => { title: 'test', color: 'red', description: 'testing' - }] + }], + assignees: [{ + id: 1, + name: 'name', + username: 'username', + avatar_url: 'http://avatar_url', + }], }); }); @@ -80,6 +87,33 @@ describe('Issue model', () => { expect(issue.labels.length).toBe(0); }); + it('adds assignee', () => { + issue.addAssignee({ + id: 2, + name: 'Bruce Wayne', + username: 'batman', + avatar_url: 'http://batman', + }); + + expect(issue.assignees.length).toBe(2); + }); + + it('finds assignee', () => { + const assignee = issue.findAssignee(issue.assignees[0]); + expect(assignee).toBeDefined(); + }); + + it('removes assignee', () => { + const assignee = issue.findAssignee(issue.assignees[0]); + issue.removeAssignee(assignee); + expect(issue.assignees.length).toBe(0); + }); + + it('removes all assignees', () => { + issue.removeAllAssignees(); + expect(issue.assignees.length).toBe(0); + }); + it('sets position to infinity if no position is stored', () => { expect(issue.position).toBe(Infinity); }); @@ -90,9 +124,31 @@ describe('Issue model', () => { iid: 1, confidential: false, relative_position: 1, - labels: [] + labels: [], + assignees: [], }); expect(relativePositionIssue.position).toBe(1); }); + + describe('update', () => { + it('passes assignee ids when there are assignees', (done) => { + spyOn(Vue.http, 'patch').and.callFake((url, data) => { + expect(data.issue.assignee_ids).toEqual([1]); + done(); + }); + + issue.update('url'); + }); + + it('passes assignee ids of [0] when there are no assignees', (done) => { + spyOn(Vue.http, 'patch').and.callFake((url, data) => { + expect(data.issue.assignee_ids).toEqual([0]); + done(); + }); + + issue.removeAllAssignees(); + issue.update('url'); + }); + }); }); diff --git a/spec/javascripts/boards/list_spec.js b/spec/javascripts/boards/list_spec.js index a9d4c6ef76f..8e3d9fd77a0 100644 --- a/spec/javascripts/boards/list_spec.js +++ b/spec/javascripts/boards/list_spec.js @@ -8,14 +8,14 @@ import Vue from 'vue'; -require('~/lib/utils/url_utility'); -require('~/boards/models/issue'); -require('~/boards/models/label'); -require('~/boards/models/list'); -require('~/boards/models/user'); -require('~/boards/services/board_service'); -require('~/boards/stores/boards_store'); -require('./mock_data'); +import '~/lib/utils/url_utility'; +import '~/boards/models/issue'; +import '~/boards/models/label'; +import '~/boards/models/list'; +import '~/boards/models/assignee'; +import '~/boards/services/board_service'; +import '~/boards/stores/boards_store'; +import './mock_data'; describe('List model', () => { let list; @@ -94,7 +94,8 @@ describe('List model', () => { title: 'Testing', iid: _.random(10000), confidential: false, - labels: [list.label, listDup.label] + labels: [list.label, listDup.label], + assignees: [], }); list.issues.push(issue); @@ -107,4 +108,46 @@ describe('List model', () => { expect(gl.boardService.moveIssue) .toHaveBeenCalledWith(issue.id, list.id, listDup.id, undefined, undefined); }); + + describe('page number', () => { + beforeEach(() => { + spyOn(list, 'getIssues'); + }); + + it('increase page number if current issue count is more than the page size', () => { + for (let i = 0; i < 30; i += 1) { + list.issues.push(new ListIssue({ + title: 'Testing', + iid: _.random(10000) + i, + confidential: false, + labels: [list.label], + assignees: [], + })); + } + list.issuesSize = 50; + + expect(list.issues.length).toBe(30); + + list.nextPage(); + + expect(list.page).toBe(2); + expect(list.getIssues).toHaveBeenCalled(); + }); + + it('does not increase page number if issue count is less than the page size', () => { + list.issues.push(new ListIssue({ + title: 'Testing', + iid: _.random(10000), + confidential: false, + labels: [list.label], + assignees: [], + })); + list.issuesSize = 2; + + list.nextPage(); + + expect(list.page).toBe(1); + expect(list.getIssues).toHaveBeenCalled(); + }); + }); }); diff --git a/spec/javascripts/boards/mock_data.js b/spec/javascripts/boards/mock_data.js index a4fa694eebe..a64c3964ee3 100644 --- a/spec/javascripts/boards/mock_data.js +++ b/spec/javascripts/boards/mock_data.js @@ -33,7 +33,8 @@ const BoardsMockData = { title: 'Testing', iid: 1, confidential: false, - labels: [] + labels: [], + assignees: [], }], size: 1 } diff --git a/spec/javascripts/boards/modal_store_spec.js b/spec/javascripts/boards/modal_store_spec.js index 80db816aff8..32e6d04df9f 100644 --- a/spec/javascripts/boards/modal_store_spec.js +++ b/spec/javascripts/boards/modal_store_spec.js @@ -1,10 +1,10 @@ /* global ListIssue */ -require('~/boards/models/issue'); -require('~/boards/models/label'); -require('~/boards/models/list'); -require('~/boards/models/user'); -require('~/boards/stores/modal_store'); +import '~/boards/models/issue'; +import '~/boards/models/label'; +import '~/boards/models/list'; +import '~/boards/models/assignee'; +import '~/boards/stores/modal_store'; describe('Modal store', () => { let issue; @@ -21,12 +21,14 @@ describe('Modal store', () => { iid: 1, confidential: false, labels: [], + assignees: [], }); issue2 = new ListIssue({ title: 'Testing', iid: 2, confidential: false, labels: [], + assignees: [], }); Store.store.issues.push(issue); Store.store.issues.push(issue2); diff --git a/spec/javascripts/bootstrap_linked_tabs_spec.js b/spec/javascripts/bootstrap_linked_tabs_spec.js index fa9f95e16cd..a27dc48b3fd 100644 --- a/spec/javascripts/bootstrap_linked_tabs_spec.js +++ b/spec/javascripts/bootstrap_linked_tabs_spec.js @@ -1,4 +1,4 @@ -require('~/lib/utils/bootstrap_linked_tabs'); +import LinkedTabs from '~/lib/utils/bootstrap_linked_tabs'; (() => { // TODO: remove this hack! @@ -25,7 +25,7 @@ require('~/lib/utils/bootstrap_linked_tabs'); }); it('should activate the tab correspondent to the given action', () => { - const linkedTabs = new window.gl.LinkedTabs({ // eslint-disable-line + const linkedTabs = new LinkedTabs({ // eslint-disable-line action: 'tab1', defaultAction: 'tab1', parentEl: '.linked-tabs', @@ -35,7 +35,7 @@ require('~/lib/utils/bootstrap_linked_tabs'); }); it('should active the default tab action when the action is show', () => { - const linkedTabs = new window.gl.LinkedTabs({ // eslint-disable-line + const linkedTabs = new LinkedTabs({ // eslint-disable-line action: 'show', defaultAction: 'tab1', parentEl: '.linked-tabs', @@ -49,7 +49,7 @@ require('~/lib/utils/bootstrap_linked_tabs'); it('should change the url according to the clicked tab', () => { const historySpy = !phantomjs && spyOn(history, 'replaceState').and.callFake(() => {}); - const linkedTabs = new window.gl.LinkedTabs({ // eslint-disable-line + const linkedTabs = new LinkedTabs({ action: 'show', defaultAction: 'tab1', parentEl: '.linked-tabs', diff --git a/spec/javascripts/build_spec.js b/spec/javascripts/build_spec.js index 549c7af8ea8..8ec96bdb583 100644 --- a/spec/javascripts/build_spec.js +++ b/spec/javascripts/build_spec.js @@ -1,11 +1,11 @@ /* eslint-disable no-new */ /* global Build */ - -require('~/lib/utils/datetime_utility'); -require('~/lib/utils/url_utility'); -require('~/build'); -require('~/breakpoints'); -require('vendor/jquery.nicescroll'); +import { bytesToKiB } from '~/lib/utils/number_utils'; +import '~/lib/utils/datetime_utility'; +import '~/lib/utils/url_utility'; +import '~/build'; +import '~/breakpoints'; +import 'vendor/jquery.nicescroll'; describe('Build', () => { const BUILD_URL = `${gl.TEST_HOST}/frontend-fixtures/builds-project/builds/1`; @@ -64,54 +64,33 @@ describe('Build', () => { }); }); - describe('initial build trace', () => { - beforeEach(() => { - new Build(); - }); - - it('displays the initial build trace', () => { - expect($.ajax.calls.count()).toBe(1); - const [{ url, dataType, success, context }] = $.ajax.calls.argsFor(0); - expect(url).toBe(`${BUILD_URL}.json`); - expect(dataType).toBe('json'); - expect(success).toEqual(jasmine.any(Function)); - - success.call(context, { trace_html: '<span>Example</span>', status: 'running' }); - - expect($('#build-trace .js-build-output').text()).toMatch(/Example/); - }); - - it('removes the spinner', () => { - const [{ success, context }] = $.ajax.calls.argsFor(0); - success.call(context, { trace_html: '<span>Example</span>', status: 'success' }); - - expect($('.js-build-refresh').length).toBe(0); - }); - }); - describe('running build', () => { beforeEach(function () { - $('.js-build-options').data('buildStatus', 'running'); this.build = new Build(); - spyOn(this.build, 'location').and.returnValue(BUILD_URL); }); it('updates the build trace on an interval', function () { + spyOn(gl.utils, 'visitUrl'); + jasmine.clock().tick(4001); - expect($.ajax.calls.count()).toBe(2); - let [{ url, dataType, success, context }] = $.ajax.calls.argsFor(1); - expect(url).toBe( - `${BUILD_URL}/trace.json?state=`, - ); - expect(dataType).toBe('json'); - expect(success).toEqual(jasmine.any(Function)); + expect($.ajax.calls.count()).toBe(1); + + // We have to do it this way to prevent Webpack to fail to compile + // when destructuring assignments and reusing + // the same variables names inside the same scope + let args = $.ajax.calls.argsFor(0)[0]; - success.call(context, { + expect(args.url).toBe(`${BUILD_URL}/trace.json`); + expect(args.dataType).toBe('json'); + expect(args.success).toEqual(jasmine.any(Function)); + + args.success.call($, { html: '<span>Update<span>', status: 'running', state: 'newstate', append: true, + complete: false, }); expect($('#build-trace .js-build-output').text()).toMatch(/Update/); @@ -120,16 +99,19 @@ describe('Build', () => { jasmine.clock().tick(4001); expect($.ajax.calls.count()).toBe(3); - [{ url, dataType, success, context }] = $.ajax.calls.argsFor(2); - expect(url).toBe(`${BUILD_URL}/trace.json?state=newstate`); - expect(dataType).toBe('json'); - expect(success).toEqual(jasmine.any(Function)); - success.call(context, { + args = $.ajax.calls.argsFor(2)[0]; + expect(args.url).toBe(`${BUILD_URL}/trace.json`); + expect(args.dataType).toBe('json'); + expect(args.data.state).toBe('newstate'); + expect(args.success).toEqual(jasmine.any(Function)); + + args.success.call($, { html: '<span>More</span>', status: 'running', state: 'finalstate', append: true, + complete: true, }); expect($('#build-trace .js-build-output').text()).toMatch(/UpdateMore/); @@ -137,19 +119,22 @@ describe('Build', () => { }); it('replaces the entire build trace', () => { + spyOn(gl.utils, 'visitUrl'); + jasmine.clock().tick(4001); - let [{ success, context }] = $.ajax.calls.argsFor(1); - success.call(context, { + let args = $.ajax.calls.argsFor(0)[0]; + args.success.call($, { html: '<span>Update</span>', status: 'running', - append: true, + append: false, + complete: false, }); expect($('#build-trace .js-build-output').text()).toMatch(/Update/); jasmine.clock().tick(4001); - [{ success, context }] = $.ajax.calls.argsFor(2); - success.call(context, { + args = $.ajax.calls.argsFor(2)[0]; + args.success.call($, { html: '<span>Different</span>', status: 'running', append: false, @@ -163,15 +148,117 @@ describe('Build', () => { spyOn(gl.utils, 'visitUrl'); jasmine.clock().tick(4001); - const [{ success, context }] = $.ajax.calls.argsFor(1); - success.call(context, { + const [{ success }] = $.ajax.calls.argsFor(0); + success.call($, { html: '<span>Final</span>', status: 'passed', append: true, + complete: true, }); expect(gl.utils.visitUrl).toHaveBeenCalledWith(BUILD_URL); }); + + describe('truncated information', () => { + describe('when size is less than total', () => { + it('shows information about truncated log', () => { + jasmine.clock().tick(4001); + const [{ success }] = $.ajax.calls.argsFor(0); + + success.call($, { + html: '<span>Update</span>', + status: 'success', + append: false, + size: 50, + total: 100, + }); + + expect(document.querySelector('.js-truncated-info').classList).not.toContain('hidden'); + }); + + it('shows the size in KiB', () => { + jasmine.clock().tick(4001); + const [{ success }] = $.ajax.calls.argsFor(0); + const size = 50; + + success.call($, { + html: '<span>Update</span>', + status: 'success', + append: false, + size, + total: 100, + }); + + expect( + document.querySelector('.js-truncated-info-size').textContent.trim(), + ).toEqual(`${bytesToKiB(size)}`); + }); + + it('shows incremented size', () => { + jasmine.clock().tick(4001); + let args = $.ajax.calls.argsFor(0)[0]; + args.success.call($, { + html: '<span>Update</span>', + status: 'success', + append: false, + size: 50, + total: 100, + }); + + expect( + document.querySelector('.js-truncated-info-size').textContent.trim(), + ).toEqual(`${bytesToKiB(50)}`); + + jasmine.clock().tick(4001); + args = $.ajax.calls.argsFor(2)[0]; + args.success.call($, { + html: '<span>Update</span>', + status: 'success', + append: true, + size: 10, + total: 100, + }); + + expect( + document.querySelector('.js-truncated-info-size').textContent.trim(), + ).toEqual(`${bytesToKiB(60)}`); + }); + + it('renders the raw link', () => { + jasmine.clock().tick(4001); + const [{ success }] = $.ajax.calls.argsFor(0); + + success.call($, { + html: '<span>Update</span>', + status: 'success', + append: false, + size: 50, + total: 100, + }); + + expect( + document.querySelector('.js-raw-link').textContent.trim(), + ).toContain('Complete Raw'); + }); + }); + + describe('when size is equal than total', () => { + it('does not show the trunctated information', () => { + jasmine.clock().tick(4001); + const [{ success }] = $.ajax.calls.argsFor(0); + + success.call($, { + html: '<span>Update</span>', + status: 'success', + append: false, + size: 100, + total: 100, + }); + + expect(document.querySelector('.js-truncated-info').classList).toContain('hidden'); + }); + }); + }); }); }); }); diff --git a/spec/javascripts/comment_type_toggle_spec.js b/spec/javascripts/comment_type_toggle_spec.js new file mode 100644 index 00000000000..dfd0810d52e --- /dev/null +++ b/spec/javascripts/comment_type_toggle_spec.js @@ -0,0 +1,157 @@ +import CommentTypeToggle from '~/comment_type_toggle'; +import * as dropLabSrc from '~/droplab/drop_lab'; +import InputSetter from '~/droplab/plugins/input_setter'; + +describe('CommentTypeToggle', function () { + describe('class constructor', function () { + beforeEach(function () { + this.dropdownTrigger = {}; + this.dropdownList = {}; + this.noteTypeInput = {}; + this.submitButton = {}; + this.closeButton = {}; + + this.commentTypeToggle = new CommentTypeToggle({ + dropdownTrigger: this.dropdownTrigger, + dropdownList: this.dropdownList, + noteTypeInput: this.noteTypeInput, + submitButton: this.submitButton, + closeButton: this.closeButton, + }); + }); + + it('should set .dropdownTrigger', function () { + expect(this.commentTypeToggle.dropdownTrigger).toBe(this.dropdownTrigger); + }); + + it('should set .dropdownList', function () { + expect(this.commentTypeToggle.dropdownList).toBe(this.dropdownList); + }); + + it('should set .noteTypeInput', function () { + expect(this.commentTypeToggle.noteTypeInput).toBe(this.noteTypeInput); + }); + + it('should set .submitButton', function () { + expect(this.commentTypeToggle.submitButton).toBe(this.submitButton); + }); + + it('should set .closeButton', function () { + expect(this.commentTypeToggle.closeButton).toBe(this.closeButton); + }); + + it('should set .reopenButton', function () { + expect(this.commentTypeToggle.reopenButton).toBe(this.reopenButton); + }); + }); + + describe('initDroplab', function () { + beforeEach(function () { + this.commentTypeToggle = { + dropdownTrigger: {}, + dropdownList: {}, + noteTypeInput: {}, + submitButton: {}, + closeButton: {}, + setConfig: () => {}, + }; + this.config = {}; + + this.droplab = jasmine.createSpyObj('droplab', ['init']); + + spyOn(dropLabSrc, 'default').and.returnValue(this.droplab); + spyOn(this.commentTypeToggle, 'setConfig').and.returnValue(this.config); + + CommentTypeToggle.prototype.initDroplab.call(this.commentTypeToggle); + }); + + it('should instantiate a DropLab instance', function () { + expect(dropLabSrc.default).toHaveBeenCalled(); + }); + + it('should set .droplab', function () { + expect(this.commentTypeToggle.droplab).toBe(this.droplab); + }); + + it('should call .setConfig', function () { + expect(this.commentTypeToggle.setConfig).toHaveBeenCalled(); + }); + + it('should call DropLab.prototype.init', function () { + expect(this.droplab.init).toHaveBeenCalledWith( + this.commentTypeToggle.dropdownTrigger, + this.commentTypeToggle.dropdownList, + [InputSetter], + this.config, + ); + }); + }); + + describe('setConfig', function () { + describe('if no .closeButton is provided', function () { + beforeEach(function () { + this.commentTypeToggle = { + dropdownTrigger: {}, + dropdownList: {}, + noteTypeInput: {}, + submitButton: {}, + reopenButton: {}, + }; + + this.setConfig = CommentTypeToggle.prototype.setConfig.call(this.commentTypeToggle); + }); + + it('should not add .closeButton related InputSetter config', function () { + expect(this.setConfig).toEqual({ + InputSetter: [{ + input: this.commentTypeToggle.noteTypeInput, + valueAttribute: 'data-value', + }, { + input: this.commentTypeToggle.submitButton, + valueAttribute: 'data-submit-text', + }, { + input: this.commentTypeToggle.reopenButton, + valueAttribute: 'data-reopen-text', + }, { + input: this.commentTypeToggle.reopenButton, + valueAttribute: 'data-reopen-text', + inputAttribute: 'data-alternative-text', + }], + }); + }); + }); + + describe('if no .reopenButton is provided', function () { + beforeEach(function () { + this.commentTypeToggle = { + dropdownTrigger: {}, + dropdownList: {}, + noteTypeInput: {}, + submitButton: {}, + closeButton: {}, + }; + + this.setConfig = CommentTypeToggle.prototype.setConfig.call(this.commentTypeToggle); + }); + + it('should not add .reopenButton related InputSetter config', function () { + expect(this.setConfig).toEqual({ + InputSetter: [{ + input: this.commentTypeToggle.noteTypeInput, + valueAttribute: 'data-value', + }, { + input: this.commentTypeToggle.submitButton, + valueAttribute: 'data-submit-text', + }, { + input: this.commentTypeToggle.closeButton, + valueAttribute: 'data-close-text', + }, { + input: this.commentTypeToggle.closeButton, + valueAttribute: 'data-close-text', + inputAttribute: 'data-alternative-text', + }], + }); + }); + }); + }); +}); diff --git a/spec/javascripts/commit/pipelines/mock_data.js b/spec/javascripts/commit/pipelines/mock_data.js deleted file mode 100644 index 82b00b4c1ec..00000000000 --- a/spec/javascripts/commit/pipelines/mock_data.js +++ /dev/null @@ -1,89 +0,0 @@ -export default { - id: 73, - user: { - name: 'Administrator', - username: 'root', - id: 1, - state: 'active', - avatar_url: 'http://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon', - web_url: 'http://localhost:3000/root', - }, - path: '/root/review-app/pipelines/73', - details: { - status: { - icon: 'icon_status_failed', - text: 'failed', - label: 'failed', - group: 'failed', - has_details: true, - details_path: '/root/review-app/pipelines/73', - }, - duration: null, - finished_at: '2017-01-25T00:00:17.130Z', - stages: [{ - name: 'build', - title: 'build: failed', - status: { - icon: 'icon_status_failed', - text: 'failed', - label: 'failed', - group: 'failed', - has_details: true, - details_path: '/root/review-app/pipelines/73#build', - }, - path: '/root/review-app/pipelines/73#build', - dropdown_path: '/root/review-app/pipelines/73/stage.json?stage=build', - }], - artifacts: [], - manual_actions: [ - { - name: 'stop_review', - path: '/root/review-app/builds/1463/play', - }, - { - name: 'name', - path: '/root/review-app/builds/1490/play', - }, - ], - }, - flags: { - latest: true, - triggered: false, - stuck: false, - yaml_errors: false, - retryable: true, - cancelable: false, - }, - ref: - { - name: 'master', - path: '/root/review-app/tree/master', - tag: false, - branch: true, - }, - commit: { - id: 'fbd79f04fa98717641deaaeb092a4d417237c2e4', - short_id: 'fbd79f04', - title: 'Update .gitlab-ci.yml', - author_name: 'Administrator', - author_email: 'admin@example.com', - created_at: '2017-01-16T12:13:57.000-05:00', - committer_name: 'Administrator', - committer_email: 'admin@example.com', - message: 'Update .gitlab-ci.yml', - author: { - name: 'Administrator', - username: 'root', - id: 1, - state: 'active', - avatar_url: 'http://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon', - web_url: 'http://localhost:3000/root', - }, - author_gravatar_url: 'http://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon', - commit_url: 'http://localhost:3000/root/review-app/commit/fbd79f04fa98717641deaaeb092a4d417237c2e4', - commit_path: '/root/review-app/commit/fbd79f04fa98717641deaaeb092a4d417237c2e4', - }, - retry_path: '/root/review-app/pipelines/73/retry', - created_at: '2017-01-16T17:13:59.800Z', - updated_at: '2017-01-25T00:00:17.132Z', -}; diff --git a/spec/javascripts/commit/pipelines/pipelines_spec.js b/spec/javascripts/commit/pipelines/pipelines_spec.js index 8cac3cad232..398c593eec2 100644 --- a/spec/javascripts/commit/pipelines/pipelines_spec.js +++ b/spec/javascripts/commit/pipelines/pipelines_spec.js @@ -1,12 +1,17 @@ import Vue from 'vue'; import PipelinesTable from '~/commit/pipelines/pipelines_table'; -import pipeline from './mock_data'; describe('Pipelines table in Commits and Merge requests', () => { + const jsonFixtureName = 'pipelines/pipelines.json'; + let pipeline; + preloadFixtures('static/pipelines_table.html.raw'); + preloadFixtures(jsonFixtureName); beforeEach(() => { loadFixtures('static/pipelines_table.html.raw'); + const pipelines = getJSONFixture(jsonFixtureName).pipelines; + pipeline = pipelines.find(p => p.id === 1); }); describe('successful request', () => { @@ -36,6 +41,7 @@ describe('Pipelines table in Commits and Merge requests', () => { setTimeout(() => { expect(this.component.$el.querySelector('.empty-state')).toBeDefined(); expect(this.component.$el.querySelector('.realtime-loading')).toBe(null); + expect(this.component.$el.querySelector('.js-pipelines-error-state')).toBe(null); done(); }, 1); }); @@ -67,6 +73,8 @@ describe('Pipelines table in Commits and Merge requests', () => { setTimeout(() => { expect(this.component.$el.querySelectorAll('table > tbody > tr').length).toEqual(1); expect(this.component.$el.querySelector('.realtime-loading')).toBe(null); + expect(this.component.$el.querySelector('.empty-state')).toBe(null); + expect(this.component.$el.querySelector('.js-pipelines-error-state')).toBe(null); done(); }, 0); }); @@ -95,10 +103,12 @@ describe('Pipelines table in Commits and Merge requests', () => { this.component.$destroy(); }); - it('should render empty state', function (done) { + it('should render error state', function (done) { setTimeout(() => { expect(this.component.$el.querySelector('.js-pipelines-error-state')).toBeDefined(); expect(this.component.$el.querySelector('.realtime-loading')).toBe(null); + expect(this.component.$el.querySelector('.js-empty-state')).toBe(null); + expect(this.component.$el.querySelector('table')).toBe(null); done(); }, 0); }); diff --git a/spec/javascripts/commits_spec.js b/spec/javascripts/commits_spec.js index 05260760c43..187db7485a5 100644 --- a/spec/javascripts/commits_spec.js +++ b/spec/javascripts/commits_spec.js @@ -1,8 +1,8 @@ /* global CommitsList */ -require('vendor/jquery.endless-scroll'); -require('~/pager'); -require('~/commits'); +import 'vendor/jquery.endless-scroll'; +import '~/pager'; +import '~/commits'; (() => { // TODO: remove this hack! diff --git a/spec/javascripts/cycle_analytics/limit_warning_component_spec.js b/spec/javascripts/cycle_analytics/limit_warning_component_spec.js index 50000c5a5f5..2fb9eb0ca85 100644 --- a/spec/javascripts/cycle_analytics/limit_warning_component_spec.js +++ b/spec/javascripts/cycle_analytics/limit_warning_component_spec.js @@ -1,6 +1,9 @@ import Vue from 'vue'; +import Translate from '~/vue_shared/translate'; import limitWarningComp from '~/cycle_analytics/components/limit_warning_component'; +Vue.use(Translate); + describe('Limit warning component', () => { let component; let LimitWarningComponent; diff --git a/spec/javascripts/datetime_utility_spec.js b/spec/javascripts/datetime_utility_spec.js index d5eec10be42..e347c980c78 100644 --- a/spec/javascripts/datetime_utility_spec.js +++ b/spec/javascripts/datetime_utility_spec.js @@ -1,4 +1,4 @@ -require('~/lib/utils/datetime_utility'); +import '~/lib/utils/datetime_utility'; (() => { describe('Date time utils', () => { diff --git a/spec/javascripts/deploy_keys/components/action_btn_spec.js b/spec/javascripts/deploy_keys/components/action_btn_spec.js new file mode 100644 index 00000000000..5b93fbc5575 --- /dev/null +++ b/spec/javascripts/deploy_keys/components/action_btn_spec.js @@ -0,0 +1,70 @@ +import Vue from 'vue'; +import eventHub from '~/deploy_keys/eventhub'; +import actionBtn from '~/deploy_keys/components/action_btn.vue'; + +describe('Deploy keys action btn', () => { + const data = getJSONFixture('deploy_keys/keys.json'); + const deployKey = data.enabled_keys[0]; + let vm; + + beforeEach((done) => { + const ActionBtnComponent = Vue.extend(actionBtn); + + vm = new ActionBtnComponent({ + propsData: { + deployKey, + type: 'enable', + }, + }).$mount(); + + setTimeout(done); + }); + + it('renders the type as uppercase', () => { + expect( + vm.$el.textContent.trim(), + ).toBe('Enable'); + }); + + it('sends eventHub event with btn type', (done) => { + spyOn(eventHub, '$emit'); + + vm.$el.click(); + + setTimeout(() => { + expect( + eventHub.$emit, + ).toHaveBeenCalledWith('enable.key', deployKey); + + done(); + }); + }); + + it('shows loading spinner after click', (done) => { + vm.$el.click(); + + setTimeout(() => { + expect( + vm.$el.querySelector('.fa'), + ).toBeDefined(); + + done(); + }); + }); + + it('disables button after click', (done) => { + vm.$el.click(); + + setTimeout(() => { + expect( + vm.$el.classList.contains('disabled'), + ).toBeTruthy(); + + expect( + vm.$el.getAttribute('disabled'), + ).toBe('disabled'); + + done(); + }); + }); +}); diff --git a/spec/javascripts/deploy_keys/components/app_spec.js b/spec/javascripts/deploy_keys/components/app_spec.js new file mode 100644 index 00000000000..700897f50b0 --- /dev/null +++ b/spec/javascripts/deploy_keys/components/app_spec.js @@ -0,0 +1,142 @@ +import Vue from 'vue'; +import eventHub from '~/deploy_keys/eventhub'; +import deployKeysApp from '~/deploy_keys/components/app.vue'; + +describe('Deploy keys app component', () => { + const data = getJSONFixture('deploy_keys/keys.json'); + let vm; + + const deployKeysResponse = (request, next) => { + next(request.respondWith(JSON.stringify(data), { + status: 200, + })); + }; + + beforeEach((done) => { + const Component = Vue.extend(deployKeysApp); + + Vue.http.interceptors.push(deployKeysResponse); + + vm = new Component({ + propsData: { + endpoint: '/test', + }, + }).$mount(); + + setTimeout(done); + }); + + afterEach(() => { + Vue.http.interceptors = _.without(Vue.http.interceptors, deployKeysResponse); + }); + + it('renders loading icon', (done) => { + vm.store.keys = {}; + vm.isLoading = false; + + Vue.nextTick(() => { + expect( + vm.$el.querySelectorAll('.deploy-keys-panel').length, + ).toBe(0); + + expect( + vm.$el.querySelector('.fa-spinner'), + ).toBeDefined(); + + done(); + }); + }); + + it('renders keys panels', () => { + expect( + vm.$el.querySelectorAll('.deploy-keys-panel').length, + ).toBe(3); + }); + + it('does not render key panels when keys object is empty', (done) => { + vm.store.keys = {}; + + Vue.nextTick(() => { + expect( + vm.$el.querySelectorAll('.deploy-keys-panel').length, + ).toBe(0); + + done(); + }); + }); + + it('does not render public panel when empty', (done) => { + vm.store.keys.public_keys = []; + + Vue.nextTick(() => { + expect( + vm.$el.querySelectorAll('.deploy-keys-panel').length, + ).toBe(2); + + done(); + }); + }); + + it('re-fetches deploy keys when enabling a key', (done) => { + const key = data.public_keys[0]; + + spyOn(vm.service, 'getKeys'); + spyOn(vm.service, 'enableKey').and.callFake(() => new Promise((resolve) => { + resolve(); + + setTimeout(() => { + expect(vm.service.getKeys).toHaveBeenCalled(); + + done(); + }); + })); + + eventHub.$emit('enable.key', key); + + expect(vm.service.enableKey).toHaveBeenCalledWith(key.id); + }); + + it('re-fetches deploy keys when disabling a key', (done) => { + const key = data.public_keys[0]; + + spyOn(window, 'confirm').and.returnValue(true); + spyOn(vm.service, 'getKeys'); + spyOn(vm.service, 'disableKey').and.callFake(() => new Promise((resolve) => { + resolve(); + + setTimeout(() => { + expect(vm.service.getKeys).toHaveBeenCalled(); + + done(); + }); + })); + + eventHub.$emit('disable.key', key); + + expect(vm.service.disableKey).toHaveBeenCalledWith(key.id); + }); + + it('calls disableKey when removing a key', (done) => { + const key = data.public_keys[0]; + + spyOn(window, 'confirm').and.returnValue(true); + spyOn(vm.service, 'getKeys'); + spyOn(vm.service, 'disableKey').and.callFake(() => new Promise((resolve) => { + resolve(); + + setTimeout(() => { + expect(vm.service.getKeys).toHaveBeenCalled(); + + done(); + }); + })); + + eventHub.$emit('remove.key', key); + + expect(vm.service.disableKey).toHaveBeenCalledWith(key.id); + }); + + it('hasKeys returns true when there are keys', () => { + expect(vm.hasKeys).toEqual(3); + }); +}); diff --git a/spec/javascripts/deploy_keys/components/key_spec.js b/spec/javascripts/deploy_keys/components/key_spec.js new file mode 100644 index 00000000000..793ab8c451d --- /dev/null +++ b/spec/javascripts/deploy_keys/components/key_spec.js @@ -0,0 +1,92 @@ +import Vue from 'vue'; +import DeployKeysStore from '~/deploy_keys/store'; +import key from '~/deploy_keys/components/key.vue'; + +describe('Deploy keys key', () => { + let vm; + const KeyComponent = Vue.extend(key); + const data = getJSONFixture('deploy_keys/keys.json'); + const createComponent = (deployKey) => { + const store = new DeployKeysStore(); + store.keys = data; + + vm = new KeyComponent({ + propsData: { + deployKey, + store, + }, + }).$mount(); + }; + + describe('enabled key', () => { + const deployKey = data.enabled_keys[0]; + + beforeEach((done) => { + createComponent(deployKey); + + setTimeout(done); + }); + + it('renders the keys title', () => { + expect( + vm.$el.querySelector('.title').textContent.trim(), + ).toContain('My title'); + }); + + it('renders human friendly formatted created date', () => { + expect( + vm.$el.querySelector('.key-created-at').textContent.trim(), + ).toBe(`created ${gl.utils.getTimeago().format(deployKey.created_at)}`); + }); + + it('shows remove button', () => { + expect( + vm.$el.querySelector('.btn').textContent.trim(), + ).toBe('Remove'); + }); + + it('shows write access text when key has write access', (done) => { + vm.deployKey.can_push = true; + + Vue.nextTick(() => { + expect( + vm.$el.querySelector('.write-access-allowed'), + ).not.toBeNull(); + + expect( + vm.$el.querySelector('.write-access-allowed').textContent.trim(), + ).toBe('Write access allowed'); + + done(); + }); + }); + }); + + describe('public keys', () => { + const deployKey = data.public_keys[0]; + + beforeEach((done) => { + createComponent(deployKey); + + setTimeout(done); + }); + + it('shows enable button', () => { + expect( + vm.$el.querySelector('.btn').textContent.trim(), + ).toBe('Enable'); + }); + + it('shows disable button when key is enabled', (done) => { + vm.store.keys.enabled_keys.push(deployKey); + + Vue.nextTick(() => { + expect( + vm.$el.querySelector('.btn').textContent.trim(), + ).toBe('Disable'); + + done(); + }); + }); + }); +}); diff --git a/spec/javascripts/deploy_keys/components/keys_panel_spec.js b/spec/javascripts/deploy_keys/components/keys_panel_spec.js new file mode 100644 index 00000000000..a69b39c35c4 --- /dev/null +++ b/spec/javascripts/deploy_keys/components/keys_panel_spec.js @@ -0,0 +1,70 @@ +import Vue from 'vue'; +import DeployKeysStore from '~/deploy_keys/store'; +import deployKeysPanel from '~/deploy_keys/components/keys_panel.vue'; + +describe('Deploy keys panel', () => { + const data = getJSONFixture('deploy_keys/keys.json'); + let vm; + + beforeEach((done) => { + const DeployKeysPanelComponent = Vue.extend(deployKeysPanel); + const store = new DeployKeysStore(); + store.keys = data; + + vm = new DeployKeysPanelComponent({ + propsData: { + title: 'test', + keys: data.enabled_keys, + showHelpBox: true, + store, + }, + }).$mount(); + + setTimeout(done); + }); + + it('renders the title with keys count', () => { + expect( + vm.$el.querySelector('h5').textContent.trim(), + ).toContain('test'); + + expect( + vm.$el.querySelector('h5').textContent.trim(), + ).toContain(`(${vm.keys.length})`); + }); + + it('renders list of keys', () => { + expect( + vm.$el.querySelectorAll('li').length, + ).toBe(vm.keys.length); + }); + + it('renders help box if keys are empty', (done) => { + vm.keys = []; + + Vue.nextTick(() => { + expect( + vm.$el.querySelector('.settings-message'), + ).toBeDefined(); + + expect( + vm.$el.querySelector('.settings-message').textContent.trim(), + ).toBe('No deploy keys found. Create one with the form above.'); + + done(); + }); + }); + + it('does not render help box if keys are empty & showHelpBox is false', (done) => { + vm.keys = []; + vm.showHelpBox = false; + + Vue.nextTick(() => { + expect( + vm.$el.querySelector('.settings-message'), + ).toBeNull(); + + done(); + }); + }); +}); diff --git a/spec/javascripts/diff_comments_store_spec.js b/spec/javascripts/diff_comments_store_spec.js index 84cf98c930a..d6fc6b56b82 100644 --- a/spec/javascripts/diff_comments_store_spec.js +++ b/spec/javascripts/diff_comments_store_spec.js @@ -1,133 +1,131 @@ /* eslint-disable jasmine/no-global-setup, dot-notation, jasmine/no-expect-in-setup-teardown, max-len */ /* global CommentsStore */ -require('~/diff_notes/models/discussion'); -require('~/diff_notes/models/note'); -require('~/diff_notes/stores/comments'); - -(() => { - function createDiscussion(noteId = 1, resolved = true) { - CommentsStore.create({ - discussionId: 'a', - noteId, - canResolve: true, - resolved, - resolvedBy: 'test', - authorName: 'test', - authorAvatar: 'test', - noteTruncated: 'test...', - }); - } - - beforeEach(() => { - CommentsStore.state = {}; +import '~/diff_notes/models/discussion'; +import '~/diff_notes/models/note'; +import '~/diff_notes/stores/comments'; + +function createDiscussion(noteId = 1, resolved = true) { + CommentsStore.create({ + discussionId: 'a', + noteId, + canResolve: true, + resolved, + resolvedBy: 'test', + authorName: 'test', + authorAvatar: 'test', + noteTruncated: 'test...', }); +} - describe('New discussion', () => { - it('creates new discussion', () => { - expect(Object.keys(CommentsStore.state).length).toBe(0); - createDiscussion(); - expect(Object.keys(CommentsStore.state).length).toBe(1); - }); +beforeEach(() => { + CommentsStore.state = {}; +}); - it('creates new note in discussion', () => { - createDiscussion(); - createDiscussion(2); +describe('New discussion', () => { + it('creates new discussion', () => { + expect(Object.keys(CommentsStore.state).length).toBe(0); + createDiscussion(); + expect(Object.keys(CommentsStore.state).length).toBe(1); + }); - const discussion = CommentsStore.state['a']; - expect(Object.keys(discussion.notes).length).toBe(2); - }); + it('creates new note in discussion', () => { + createDiscussion(); + createDiscussion(2); + + const discussion = CommentsStore.state['a']; + expect(Object.keys(discussion.notes).length).toBe(2); }); +}); - describe('Get note', () => { - beforeEach(() => { - expect(Object.keys(CommentsStore.state).length).toBe(0); - createDiscussion(); - }); +describe('Get note', () => { + beforeEach(() => { + expect(Object.keys(CommentsStore.state).length).toBe(0); + createDiscussion(); + }); - it('gets note by ID', () => { - const note = CommentsStore.get('a', 1); - expect(note).toBeDefined(); - expect(note.id).toBe(1); - }); + it('gets note by ID', () => { + const note = CommentsStore.get('a', 1); + expect(note).toBeDefined(); + expect(note.id).toBe(1); }); +}); - describe('Delete discussion', () => { - beforeEach(() => { - expect(Object.keys(CommentsStore.state).length).toBe(0); - createDiscussion(); - }); +describe('Delete discussion', () => { + beforeEach(() => { + expect(Object.keys(CommentsStore.state).length).toBe(0); + createDiscussion(); + }); - it('deletes discussion by ID', () => { - CommentsStore.delete('a', 1); - expect(Object.keys(CommentsStore.state).length).toBe(0); - }); + it('deletes discussion by ID', () => { + CommentsStore.delete('a', 1); + expect(Object.keys(CommentsStore.state).length).toBe(0); + }); - it('deletes discussion when no more notes', () => { - createDiscussion(); - createDiscussion(2); - expect(Object.keys(CommentsStore.state).length).toBe(1); - expect(Object.keys(CommentsStore.state['a'].notes).length).toBe(2); + it('deletes discussion when no more notes', () => { + createDiscussion(); + createDiscussion(2); + expect(Object.keys(CommentsStore.state).length).toBe(1); + expect(Object.keys(CommentsStore.state['a'].notes).length).toBe(2); - CommentsStore.delete('a', 1); - CommentsStore.delete('a', 2); - expect(Object.keys(CommentsStore.state).length).toBe(0); - }); + CommentsStore.delete('a', 1); + CommentsStore.delete('a', 2); + expect(Object.keys(CommentsStore.state).length).toBe(0); }); +}); - describe('Update note', () => { - beforeEach(() => { - expect(Object.keys(CommentsStore.state).length).toBe(0); - createDiscussion(); - }); +describe('Update note', () => { + beforeEach(() => { + expect(Object.keys(CommentsStore.state).length).toBe(0); + createDiscussion(); + }); - it('updates note to be unresolved', () => { - CommentsStore.update('a', 1, false, 'test'); + it('updates note to be unresolved', () => { + CommentsStore.update('a', 1, false, 'test'); - const note = CommentsStore.get('a', 1); - expect(note.resolved).toBe(false); - }); + const note = CommentsStore.get('a', 1); + expect(note.resolved).toBe(false); }); +}); - describe('Discussion resolved', () => { - beforeEach(() => { - expect(Object.keys(CommentsStore.state).length).toBe(0); - createDiscussion(); - }); +describe('Discussion resolved', () => { + beforeEach(() => { + expect(Object.keys(CommentsStore.state).length).toBe(0); + createDiscussion(); + }); - it('is resolved with single note', () => { - const discussion = CommentsStore.state['a']; - expect(discussion.isResolved()).toBe(true); - }); + it('is resolved with single note', () => { + const discussion = CommentsStore.state['a']; + expect(discussion.isResolved()).toBe(true); + }); - it('is unresolved with 2 notes', () => { - const discussion = CommentsStore.state['a']; - createDiscussion(2, false); + it('is unresolved with 2 notes', () => { + const discussion = CommentsStore.state['a']; + createDiscussion(2, false); - expect(discussion.isResolved()).toBe(false); - }); + expect(discussion.isResolved()).toBe(false); + }); - it('is resolved with 2 notes', () => { - const discussion = CommentsStore.state['a']; - createDiscussion(2); + it('is resolved with 2 notes', () => { + const discussion = CommentsStore.state['a']; + createDiscussion(2); - expect(discussion.isResolved()).toBe(true); - }); + expect(discussion.isResolved()).toBe(true); + }); - it('resolve all notes', () => { - const discussion = CommentsStore.state['a']; - createDiscussion(2, false); + it('resolve all notes', () => { + const discussion = CommentsStore.state['a']; + createDiscussion(2, false); - discussion.resolveAllNotes(); - expect(discussion.isResolved()).toBe(true); - }); + discussion.resolveAllNotes(); + expect(discussion.isResolved()).toBe(true); + }); - it('unresolve all notes', () => { - const discussion = CommentsStore.state['a']; - createDiscussion(2); + it('unresolve all notes', () => { + const discussion = CommentsStore.state['a']; + createDiscussion(2); - discussion.unResolveAllNotes(); - expect(discussion.isResolved()).toBe(false); - }); + discussion.unResolveAllNotes(); + expect(discussion.isResolved()).toBe(false); }); -})(); +}); diff --git a/spec/javascripts/droplab/constants_spec.js b/spec/javascripts/droplab/constants_spec.js new file mode 100644 index 00000000000..b9d28db74cc --- /dev/null +++ b/spec/javascripts/droplab/constants_spec.js @@ -0,0 +1,41 @@ +/* eslint-disable */ + +import * as constants from '~/droplab/constants'; + +describe('constants', function () { + describe('DATA_TRIGGER', function () { + it('should be `data-dropdown-trigger`', function() { + expect(constants.DATA_TRIGGER).toBe('data-dropdown-trigger'); + }); + }); + + describe('DATA_DROPDOWN', function () { + it('should be `data-dropdown`', function() { + expect(constants.DATA_DROPDOWN).toBe('data-dropdown'); + }); + }); + + describe('SELECTED_CLASS', function () { + it('should be `droplab-item-selected`', function() { + expect(constants.SELECTED_CLASS).toBe('droplab-item-selected'); + }); + }); + + describe('ACTIVE_CLASS', function () { + it('should be `droplab-item-active`', function() { + expect(constants.ACTIVE_CLASS).toBe('droplab-item-active'); + }); + }); + + describe('TEMPLATE_REGEX', function () { + it('should be a handlebars templating syntax regex', function() { + expect(constants.TEMPLATE_REGEX).toEqual(/\{\{(.+?)\}\}/g); + }); + }); + + describe('IGNORE_CLASS', function () { + it('should be `droplab-item-ignore`', function() { + expect(constants.IGNORE_CLASS).toBe('droplab-item-ignore'); + }); + }); +}); diff --git a/spec/javascripts/droplab/drop_down_spec.js b/spec/javascripts/droplab/drop_down_spec.js new file mode 100644 index 00000000000..2bbcebeeac0 --- /dev/null +++ b/spec/javascripts/droplab/drop_down_spec.js @@ -0,0 +1,582 @@ +import DropDown from '~/droplab/drop_down'; +import utils from '~/droplab/utils'; +import { SELECTED_CLASS, IGNORE_CLASS } from '~/droplab/constants'; + +describe('DropDown', function () { + describe('class constructor', function () { + beforeEach(function () { + spyOn(DropDown.prototype, 'getItems'); + spyOn(DropDown.prototype, 'initTemplateString'); + spyOn(DropDown.prototype, 'addEvents'); + + this.list = { innerHTML: 'innerHTML' }; + this.dropdown = new DropDown(this.list); + }); + + it('sets the .hidden property to true', function () { + expect(this.dropdown.hidden).toBe(true); + }); + + it('sets the .list property', function () { + expect(this.dropdown.list).toBe(this.list); + }); + + it('calls .getItems', function () { + expect(DropDown.prototype.getItems).toHaveBeenCalled(); + }); + + it('calls .initTemplateString', function () { + expect(DropDown.prototype.initTemplateString).toHaveBeenCalled(); + }); + + it('calls .addEvents', function () { + expect(DropDown.prototype.addEvents).toHaveBeenCalled(); + }); + + it('sets the .initialState property to the .list.innerHTML', function () { + expect(this.dropdown.initialState).toBe(this.list.innerHTML); + }); + + describe('if the list argument is a string', function () { + beforeEach(function () { + this.element = {}; + this.selector = '.selector'; + + spyOn(Document.prototype, 'querySelector').and.returnValue(this.element); + + this.dropdown = new DropDown(this.selector); + }); + + it('calls .querySelector with the selector string', function () { + expect(Document.prototype.querySelector).toHaveBeenCalledWith(this.selector); + }); + + it('sets the .list property element', function () { + expect(this.dropdown.list).toBe(this.element); + }); + }); + }); + + describe('getItems', function () { + beforeEach(function () { + this.list = { querySelectorAll: () => {} }; + this.dropdown = { list: this.list }; + this.nodeList = []; + + spyOn(this.list, 'querySelectorAll').and.returnValue(this.nodeList); + + this.getItems = DropDown.prototype.getItems.call(this.dropdown); + }); + + it('calls .querySelectorAll with a list item query', function () { + expect(this.list.querySelectorAll).toHaveBeenCalledWith('li'); + }); + + it('sets the .items property to the returned list items', function () { + expect(this.dropdown.items).toEqual(jasmine.any(Array)); + }); + + it('returns the .items', function () { + expect(this.getItems).toEqual(jasmine.any(Array)); + }); + }); + + describe('initTemplateString', function () { + beforeEach(function () { + this.items = [{ outerHTML: '<a></a>' }, { outerHTML: '<img>' }]; + this.dropdown = { items: this.items }; + + DropDown.prototype.initTemplateString.call(this.dropdown); + }); + + it('should set .templateString to the last items .outerHTML', function () { + expect(this.dropdown.templateString).toBe(this.items[1].outerHTML); + }); + + it('should not set .templateString to a non-last items .outerHTML', function () { + expect(this.dropdown.templateString).not.toBe(this.items[0].outerHTML); + }); + + describe('if .items is not set', function () { + beforeEach(function () { + this.dropdown = { getItems: () => {} }; + + spyOn(this.dropdown, 'getItems').and.returnValue([]); + + DropDown.prototype.initTemplateString.call(this.dropdown); + }); + + it('should call .getItems', function () { + expect(this.dropdown.getItems).toHaveBeenCalled(); + }); + }); + + describe('if items array is empty', function () { + beforeEach(function () { + this.dropdown = { items: [] }; + + DropDown.prototype.initTemplateString.call(this.dropdown); + }); + + it('should set .templateString to an empty string', function () { + expect(this.dropdown.templateString).toBe(''); + }); + }); + }); + + describe('clickEvent', function () { + beforeEach(function () { + this.classList = jasmine.createSpyObj('classList', ['contains']); + this.list = { dispatchEvent: () => {} }; + this.dropdown = { hide: () => {}, list: this.list, addSelectedClass: () => {} }; + this.event = { preventDefault: () => {}, target: { classList: this.classList } }; + this.customEvent = {}; + this.closestElement = {}; + + spyOn(this.dropdown, 'hide'); + spyOn(this.dropdown, 'addSelectedClass'); + spyOn(this.list, 'dispatchEvent'); + spyOn(this.event, 'preventDefault'); + spyOn(window, 'CustomEvent').and.returnValue(this.customEvent); + spyOn(utils, 'closest').and.returnValues(this.closestElement, undefined); + this.classList.contains.and.returnValue(false); + + DropDown.prototype.clickEvent.call(this.dropdown, this.event); + }); + + it('should call utils.closest', function () { + expect(utils.closest).toHaveBeenCalledWith(this.event.target, 'LI'); + }); + + it('should call addSelectedClass', function () { + expect(this.dropdown.addSelectedClass).toHaveBeenCalledWith(this.closestElement); + }); + + it('should call .preventDefault', function () { + expect(this.event.preventDefault).toHaveBeenCalled(); + }); + + it('should call .hide', function () { + expect(this.dropdown.hide).toHaveBeenCalled(); + }); + + it('should construct CustomEvent', function () { + expect(window.CustomEvent).toHaveBeenCalledWith('click.dl', jasmine.any(Object)); + }); + + it('should call .classList.contains checking for IGNORE_CLASS', function () { + expect(this.classList.contains).toHaveBeenCalledWith(IGNORE_CLASS); + }); + + it('should call .dispatchEvent with the customEvent', function () { + expect(this.list.dispatchEvent).toHaveBeenCalledWith(this.customEvent); + }); + + describe('if the target is a UL element', function () { + beforeEach(function () { + this.event = { preventDefault: () => {}, target: { tagName: 'UL', classList: this.classList } }; + + spyOn(this.event, 'preventDefault'); + utils.closest.calls.reset(); + + DropDown.prototype.clickEvent.call(this.dropdown, this.event); + }); + + it('should return immediately', function () { + expect(utils.closest).not.toHaveBeenCalled(); + }); + }); + + describe('if the target has the IGNORE_CLASS class', function () { + beforeEach(function () { + this.event = { preventDefault: () => {}, target: { tagName: 'LI', classList: this.classList } }; + + spyOn(this.event, 'preventDefault'); + this.classList.contains.and.returnValue(true); + utils.closest.calls.reset(); + + DropDown.prototype.clickEvent.call(this.dropdown, this.event); + }); + + it('should return immediately', function () { + expect(utils.closest).not.toHaveBeenCalled(); + }); + }); + + describe('if no selected element exists', function () { + beforeEach(function () { + this.event.preventDefault.calls.reset(); + this.clickEvent = DropDown.prototype.clickEvent.call(this.dropdown, this.event); + }); + + it('should return undefined', function () { + expect(this.clickEvent).toBe(undefined); + }); + + it('should return before .preventDefault is called', function () { + expect(this.event.preventDefault).not.toHaveBeenCalled(); + }); + }); + }); + + describe('addSelectedClass', function () { + beforeEach(function () { + this.items = Array(4).forEach((item, i) => { + this.items[i] = { classList: { add: () => {} } }; + spyOn(this.items[i].classList, 'add'); + }); + this.selected = { classList: { add: () => {} } }; + this.dropdown = { removeSelectedClasses: () => {} }; + + spyOn(this.dropdown, 'removeSelectedClasses'); + spyOn(this.selected.classList, 'add'); + + DropDown.prototype.addSelectedClass.call(this.dropdown, this.selected); + }); + + it('should call .removeSelectedClasses', function () { + expect(this.dropdown.removeSelectedClasses).toHaveBeenCalled(); + }); + + it('should call .classList.add', function () { + expect(this.selected.classList.add).toHaveBeenCalledWith(SELECTED_CLASS); + }); + }); + + describe('removeSelectedClasses', function () { + beforeEach(function () { + this.items = Array(4); + this.items.forEach((item, i) => { + this.items[i] = { classList: { add: () => {} } }; + spyOn(this.items[i].classList, 'add'); + }); + this.dropdown = { items: this.items }; + + DropDown.prototype.removeSelectedClasses.call(this.dropdown); + }); + + it('should call .classList.remove for all items', function () { + this.items.forEach((item, i) => { + expect(this.items[i].classList.add).toHaveBeenCalledWith(SELECTED_CLASS); + }); + }); + + describe('if .items is not set', function () { + beforeEach(function () { + this.dropdown = { getItems: () => {} }; + + spyOn(this.dropdown, 'getItems').and.returnValue([]); + + DropDown.prototype.removeSelectedClasses.call(this.dropdown); + }); + + it('should call .getItems', function () { + expect(this.dropdown.getItems).toHaveBeenCalled(); + }); + }); + }); + + describe('addEvents', function () { + beforeEach(function () { + this.list = { addEventListener: () => {} }; + this.dropdown = { list: this.list, clickEvent: () => {}, eventWrapper: {} }; + + spyOn(this.list, 'addEventListener'); + + DropDown.prototype.addEvents.call(this.dropdown); + }); + + it('should call .addEventListener', function () { + expect(this.list.addEventListener).toHaveBeenCalledWith('click', jasmine.any(Function)); + }); + }); + + describe('setData', function () { + beforeEach(function () { + this.dropdown = { render: () => {} }; + this.data = ['data']; + + spyOn(this.dropdown, 'render'); + + DropDown.prototype.setData.call(this.dropdown, this.data); + }); + + it('should set .data', function () { + expect(this.dropdown.data).toBe(this.data); + }); + + it('should call .render with the .data', function () { + expect(this.dropdown.render).toHaveBeenCalledWith(this.data); + }); + }); + + describe('addData', function () { + beforeEach(function () { + this.dropdown = { render: () => {}, data: ['data1'] }; + this.data = ['data2']; + + spyOn(this.dropdown, 'render'); + spyOn(Array.prototype, 'concat').and.callThrough(); + + DropDown.prototype.addData.call(this.dropdown, this.data); + }); + + it('should call .concat with data', function () { + expect(Array.prototype.concat).toHaveBeenCalledWith(this.data); + }); + + it('should set .data with concatination', function () { + expect(this.dropdown.data).toEqual(['data1', 'data2']); + }); + + it('should call .render with the .data', function () { + expect(this.dropdown.render).toHaveBeenCalledWith(['data1', 'data2']); + }); + + describe('if .data is undefined', function () { + beforeEach(function () { + this.dropdown = { render: () => {}, data: undefined }; + this.data = ['data2']; + + spyOn(this.dropdown, 'render'); + + DropDown.prototype.addData.call(this.dropdown, this.data); + }); + + it('should set .data with concatination', function () { + expect(this.dropdown.data).toEqual(['data2']); + }); + }); + }); + + describe('render', function () { + beforeEach(function () { + this.list = { querySelector: () => {} }; + this.dropdown = { renderChildren: () => {}, list: this.list }; + this.renderableList = {}; + this.data = [0, 1]; + + spyOn(this.dropdown, 'renderChildren').and.callFake(data => data); + spyOn(this.list, 'querySelector').and.returnValue(this.renderableList); + spyOn(this.data, 'map').and.callThrough(); + + DropDown.prototype.render.call(this.dropdown, this.data); + }); + + it('should call .map', function () { + expect(this.data.map).toHaveBeenCalledWith(jasmine.any(Function)); + }); + + it('should call .renderChildren for each data item', function () { + expect(this.dropdown.renderChildren.calls.count()).toBe(this.data.length); + }); + + it('sets the renderableList .innerHTML', function () { + expect(this.renderableList.innerHTML).toBe('01'); + }); + + describe('if no data argument is passed', function () { + beforeEach(function () { + this.data.map.calls.reset(); + this.dropdown.renderChildren.calls.reset(); + + DropDown.prototype.render.call(this.dropdown, undefined); + }); + + it('should not call .map', function () { + expect(this.data.map).not.toHaveBeenCalled(); + }); + + it('should not call .renderChildren', function () { + expect(this.dropdown.renderChildren).not.toHaveBeenCalled(); + }); + }); + + describe('if no dynamic list is present', function () { + beforeEach(function () { + this.list = { querySelector: () => {} }; + this.dropdown = { renderChildren: () => {}, list: this.list }; + this.data = [0, 1]; + + spyOn(this.dropdown, 'renderChildren').and.callFake(data => data); + spyOn(this.list, 'querySelector'); + spyOn(this.data, 'map').and.callThrough(); + + DropDown.prototype.render.call(this.dropdown, this.data); + }); + + it('sets the .list .innerHTML', function () { + expect(this.list.innerHTML).toBe('01'); + }); + }); + }); + + describe('renderChildren', function () { + beforeEach(function () { + this.templateString = 'templateString'; + this.dropdown = { templateString: this.templateString }; + this.data = { droplab_hidden: true }; + this.html = 'html'; + this.template = { firstChild: { outerHTML: 'outerHTML', style: {} } }; + + spyOn(utils, 'template').and.returnValue(this.html); + spyOn(document, 'createElement').and.returnValue(this.template); + spyOn(DropDown, 'setImagesSrc'); + + this.renderChildren = DropDown.prototype.renderChildren.call(this.dropdown, this.data); + }); + + it('should call utils.t with .templateString and data', function () { + expect(utils.template).toHaveBeenCalledWith(this.templateString, this.data); + }); + + it('should call document.createElement', function () { + expect(document.createElement).toHaveBeenCalledWith('div'); + }); + + it('should set the templates .innerHTML to the HTML', function () { + expect(this.template.innerHTML).toBe(this.html); + }); + + it('should call .setImagesSrc with the template', function () { + expect(DropDown.setImagesSrc).toHaveBeenCalledWith(this.template); + }); + + it('should set the template display to none', function () { + expect(this.template.firstChild.style.display).toBe('none'); + }); + + it('should return the templates .firstChild.outerHTML', function () { + expect(this.renderChildren).toBe(this.template.firstChild.outerHTML); + }); + + describe('if droplab_hidden is false', function () { + beforeEach(function () { + this.data = { droplab_hidden: false }; + this.renderChildren = DropDown.prototype.renderChildren.call(this.dropdown, this.data); + }); + + it('should set the template display to block', function () { + expect(this.template.firstChild.style.display).toBe('block'); + }); + }); + }); + + describe('setImagesSrc', function () { + beforeEach(function () { + this.template = { querySelectorAll: () => {} }; + + spyOn(this.template, 'querySelectorAll').and.returnValue([]); + + DropDown.setImagesSrc(this.template); + }); + + it('should call .querySelectorAll', function () { + expect(this.template.querySelectorAll).toHaveBeenCalledWith('img[data-src]'); + }); + }); + + describe('show', function () { + beforeEach(function () { + this.list = { style: {} }; + this.dropdown = { list: this.list, hidden: true }; + + DropDown.prototype.show.call(this.dropdown); + }); + + it('it should set .list display to block', function () { + expect(this.list.style.display).toBe('block'); + }); + + it('it should set .hidden to false', function () { + expect(this.dropdown.hidden).toBe(false); + }); + + describe('if .hidden is false', function () { + beforeEach(function () { + this.list = { style: {} }; + this.dropdown = { list: this.list, hidden: false }; + + this.show = DropDown.prototype.show.call(this.dropdown); + }); + + it('should return undefined', function () { + expect(this.show).toEqual(undefined); + }); + + it('should not set .list display to block', function () { + expect(this.list.style.display).not.toEqual('block'); + }); + }); + }); + + describe('hide', function () { + beforeEach(function () { + this.list = { style: {} }; + this.dropdown = { list: this.list }; + + DropDown.prototype.hide.call(this.dropdown); + }); + + it('it should set .list display to none', function () { + expect(this.list.style.display).toBe('none'); + }); + + it('it should set .hidden to true', function () { + expect(this.dropdown.hidden).toBe(true); + }); + }); + + describe('toggle', function () { + beforeEach(function () { + this.hidden = true; + this.dropdown = { hidden: this.hidden, show: () => {}, hide: () => {} }; + + spyOn(this.dropdown, 'show'); + spyOn(this.dropdown, 'hide'); + + DropDown.prototype.toggle.call(this.dropdown); + }); + + it('should call .show', function () { + expect(this.dropdown.show).toHaveBeenCalled(); + }); + + describe('if .hidden is false', function () { + beforeEach(function () { + this.hidden = false; + this.dropdown = { hidden: this.hidden, show: () => {}, hide: () => {} }; + + spyOn(this.dropdown, 'show'); + spyOn(this.dropdown, 'hide'); + + DropDown.prototype.toggle.call(this.dropdown); + }); + + it('should call .hide', function () { + expect(this.dropdown.hide).toHaveBeenCalled(); + }); + }); + }); + + describe('destroy', function () { + beforeEach(function () { + this.list = { removeEventListener: () => {} }; + this.eventWrapper = { clickEvent: 'clickEvent' }; + this.dropdown = { list: this.list, hide: () => {}, eventWrapper: this.eventWrapper }; + + spyOn(this.list, 'removeEventListener'); + spyOn(this.dropdown, 'hide'); + + DropDown.prototype.destroy.call(this.dropdown); + }); + + it('it should call .hide', function () { + expect(this.dropdown.hide).toHaveBeenCalled(); + }); + + it('it should call .removeEventListener', function () { + expect(this.list.removeEventListener).toHaveBeenCalledWith('click', this.eventWrapper.clickEvent); + }); + }); +}); diff --git a/spec/javascripts/droplab/hook_spec.js b/spec/javascripts/droplab/hook_spec.js new file mode 100644 index 00000000000..75bf5f3d611 --- /dev/null +++ b/spec/javascripts/droplab/hook_spec.js @@ -0,0 +1,74 @@ +import Hook from '~/droplab/hook'; +import * as dropdownSrc from '~/droplab/drop_down'; + +describe('Hook', function () { + describe('class constructor', function () { + beforeEach(function () { + this.trigger = { id: 'id' }; + this.list = {}; + this.plugins = {}; + this.config = {}; + this.dropdown = {}; + + spyOn(dropdownSrc, 'default').and.returnValue(this.dropdown); + + this.hook = new Hook(this.trigger, this.list, this.plugins, this.config); + }); + + it('should set .trigger', function () { + expect(this.hook.trigger).toBe(this.trigger); + }); + + it('should set .list', function () { + expect(this.hook.list).toBe(this.dropdown); + }); + + it('should call DropDown constructor', function () { + expect(dropdownSrc.default).toHaveBeenCalledWith(this.list); + }); + + it('should set .type', function () { + expect(this.hook.type).toBe('Hook'); + }); + + it('should set .event', function () { + expect(this.hook.event).toBe('click'); + }); + + it('should set .plugins', function () { + expect(this.hook.plugins).toBe(this.plugins); + }); + + it('should set .config', function () { + expect(this.hook.config).toBe(this.config); + }); + + it('should set .id', function () { + expect(this.hook.id).toBe(this.trigger.id); + }); + + describe('if config argument is undefined', function () { + beforeEach(function () { + this.config = undefined; + + this.hook = new Hook(this.trigger, this.list, this.plugins, this.config); + }); + + it('should set .config to an empty object', function () { + expect(this.hook.config).toEqual({}); + }); + }); + + describe('if plugins argument is undefined', function () { + beforeEach(function () { + this.plugins = undefined; + + this.hook = new Hook(this.trigger, this.list, this.plugins, this.config); + }); + + it('should set .plugins to an empty array', function () { + expect(this.hook.plugins).toEqual([]); + }); + }); + }); +}); diff --git a/spec/javascripts/droplab/plugins/input_setter_spec.js b/spec/javascripts/droplab/plugins/input_setter_spec.js new file mode 100644 index 00000000000..bd625f4ae80 --- /dev/null +++ b/spec/javascripts/droplab/plugins/input_setter_spec.js @@ -0,0 +1,212 @@ +/* eslint-disable */ + +import InputSetter from '~/droplab/plugins/input_setter'; + +describe('InputSetter', function () { + describe('init', function () { + beforeEach(function () { + this.config = { InputSetter: {} }; + this.hook = { config: this.config }; + this.inputSetter = jasmine.createSpyObj('inputSetter', ['addEvents']); + + InputSetter.init.call(this.inputSetter, this.hook); + }); + + it('should set .hook', function () { + expect(this.inputSetter.hook).toBe(this.hook); + }); + + it('should set .config', function () { + expect(this.inputSetter.config).toBe(this.config.InputSetter); + }); + + it('should set .eventWrapper', function () { + expect(this.inputSetter.eventWrapper).toEqual({}); + }); + + it('should call .addEvents', function () { + expect(this.inputSetter.addEvents).toHaveBeenCalled(); + }); + + describe('if config.InputSetter is not set', function () { + beforeEach(function () { + this.config = { InputSetter: undefined }; + this.hook = { config: this.config }; + + InputSetter.init.call(this.inputSetter, this.hook); + }); + + it('should set .config to an empty object', function () { + expect(this.inputSetter.config).toEqual({}); + }); + + it('should set hook.config to an empty object', function () { + expect(this.hook.config.InputSetter).toEqual({}); + }); + }) + }); + + describe('addEvents', function () { + beforeEach(function () { + this.hook = { list: { list: jasmine.createSpyObj('list', ['addEventListener']) } }; + this.inputSetter = { eventWrapper: {}, hook: this.hook, setInputs: () => {} }; + + InputSetter.addEvents.call(this.inputSetter); + }); + + it('should set .eventWrapper.setInputs', function () { + expect(this.inputSetter.eventWrapper.setInputs).toEqual(jasmine.any(Function)); + }); + + it('should call .addEventListener', function () { + expect(this.hook.list.list.addEventListener) + .toHaveBeenCalledWith('click.dl', this.inputSetter.eventWrapper.setInputs); + }); + }); + + describe('removeEvents', function () { + beforeEach(function () { + this.hook = { list: { list: jasmine.createSpyObj('list', ['removeEventListener']) } }; + this.eventWrapper = jasmine.createSpyObj('eventWrapper', ['setInputs']); + this.inputSetter = { eventWrapper: this.eventWrapper, hook: this.hook }; + + InputSetter.removeEvents.call(this.inputSetter); + }); + + it('should call .removeEventListener', function () { + expect(this.hook.list.list.removeEventListener) + .toHaveBeenCalledWith('click.dl', this.eventWrapper.setInputs); + }); + }); + + describe('setInputs', function () { + beforeEach(function () { + this.event = { detail: { selected: {} } }; + this.config = [0, 1]; + this.inputSetter = { config: this.config, setInput: () => {} }; + + spyOn(this.inputSetter, 'setInput'); + + InputSetter.setInputs.call(this.inputSetter, this.event); + }); + + it('should call .setInput for each config element', function () { + const allArgs = this.inputSetter.setInput.calls.allArgs(); + + expect(allArgs.length).toEqual(2); + + allArgs.forEach((args, i) => { + expect(args[0]).toBe(this.config[i]); + expect(args[1]).toBe(this.event.detail.selected); + }); + }); + + describe('if config isnt an array', function () { + beforeEach(function () { + this.inputSetter = { config: {}, setInput: () => {} }; + + InputSetter.setInputs.call(this.inputSetter, this.event); + }); + + it('should set .config to an array with .config as the first element', function () { + expect(this.inputSetter.config).toEqual([{}]); + }); + }); + }); + + describe('setInput', function () { + beforeEach(function () { + this.selectedItem = { getAttribute: () => {} }; + this.input = { value: 'oldValue', tagName: 'INPUT', hasAttribute: () => {} }; + this.config = { valueAttribute: {}, input: this.input }; + this.inputSetter = { hook: { trigger: {} } }; + this.newValue = 'newValue'; + + spyOn(this.selectedItem, 'getAttribute').and.returnValue(this.newValue); + spyOn(this.input, 'hasAttribute').and.returnValue(false); + + InputSetter.setInput.call(this.inputSetter, this.config, this.selectedItem); + }); + + it('should call .getAttribute', function () { + expect(this.selectedItem.getAttribute).toHaveBeenCalledWith(this.config.valueAttribute); + }); + + it('should call .hasAttribute', function () { + expect(this.input.hasAttribute).toHaveBeenCalledWith(undefined); + }); + + it('should set the value of the input', function () { + expect(this.input.value).toBe(this.newValue); + }); + + describe('if no config.input is provided', function () { + beforeEach(function () { + this.config = { valueAttribute: {} }; + this.trigger = { value: 'oldValue', tagName: 'INPUT', hasAttribute: () => {} }; + this.inputSetter = { hook: { trigger: this.trigger } }; + + InputSetter.setInput.call(this.inputSetter, this.config, this.selectedItem); + }); + + it('should set the value of the hook.trigger', function () { + expect(this.trigger.value).toBe(this.newValue); + }); + }); + + describe('if the input tag is not INPUT', function () { + beforeEach(function () { + this.input = { textContent: 'oldValue', tagName: 'SPAN', hasAttribute: () => {} }; + this.config = { valueAttribute: {}, input: this.input }; + + InputSetter.setInput.call(this.inputSetter, this.config, this.selectedItem); + }); + + it('should set the textContent of the input', function () { + expect(this.input.textContent).toBe(this.newValue); + }); + }); + + describe('if there is an inputAttribute', function () { + beforeEach(function () { + this.selectedItem = { getAttribute: () => {} }; + this.input = { id: 'oldValue', hasAttribute: () => {}, setAttribute: () => {} }; + this.inputSetter = { hook: { trigger: {} } }; + this.newValue = 'newValue'; + this.inputAttribute = 'id'; + this.config = { + valueAttribute: {}, + input: this.input, + inputAttribute: this.inputAttribute, + }; + + spyOn(this.selectedItem, 'getAttribute').and.returnValue(this.newValue); + spyOn(this.input, 'hasAttribute').and.returnValue(true); + spyOn(this.input, 'setAttribute'); + + InputSetter.setInput.call(this.inputSetter, this.config, this.selectedItem); + }); + + it('should call setAttribute', function () { + expect(this.input.setAttribute).toHaveBeenCalledWith(this.inputAttribute, this.newValue); + }); + + it('should not set the value or textContent of the input', function () { + expect(this.input.value).not.toBe('newValue'); + expect(this.input.textContent).not.toBe('newValue'); + }); + }); + }); + + describe('destroy', function () { + beforeEach(function () { + this.inputSetter = jasmine.createSpyObj('inputSetter', ['removeEvents']); + + InputSetter.destroy.call(this.inputSetter); + }); + + it('should call .removeEvents', function () { + expect(this.inputSetter.removeEvents).toHaveBeenCalled(); + }); + }); +}); diff --git a/spec/javascripts/environments/environment_actions_spec.js b/spec/javascripts/environments/environment_actions_spec.js index 13840b42bd6..596d812c724 100644 --- a/spec/javascripts/environments/environment_actions_spec.js +++ b/spec/javascripts/environments/environment_actions_spec.js @@ -1,10 +1,9 @@ import Vue from 'vue'; -import actionsComp from '~/environments/components/environment_actions'; +import actionsComp from '~/environments/components/environment_actions.vue'; describe('Actions Component', () => { let ActionsComponent; let actionsMock; - let spy; let component; beforeEach(() => { @@ -19,15 +18,16 @@ describe('Actions Component', () => { name: 'foo', play_path: '#', }, + { + name: 'foo bar', + play_path: 'url', + playable: false, + }, ]; - spy = jasmine.createSpy('spy').and.returnValue(Promise.resolve()); component = new ActionsComponent({ propsData: { actions: actionsMock, - service: { - postAction: spy, - }, }, }).$mount(); }); @@ -43,10 +43,13 @@ describe('Actions Component', () => { ).toEqual(actionsMock.length); }); - it('should call the service when an action is clicked', () => { - component.$el.querySelector('.dropdown').click(); - component.$el.querySelector('.js-manual-action-link').click(); + it('should render a disabled action when it\'s not playable', () => { + expect( + component.$el.querySelector('.dropdown-menu li:last-child button').getAttribute('disabled'), + ).toEqual('disabled'); - expect(spy).toHaveBeenCalledWith(actionsMock[0].play_path); + expect( + component.$el.querySelector('.dropdown-menu li:last-child button').classList.contains('disabled'), + ).toEqual(true); }); }); diff --git a/spec/javascripts/environments/environment_external_url_spec.js b/spec/javascripts/environments/environment_external_url_spec.js index 9af218a27ff..056d68a26e9 100644 --- a/spec/javascripts/environments/environment_external_url_spec.js +++ b/spec/javascripts/environments/environment_external_url_spec.js @@ -1,5 +1,5 @@ import Vue from 'vue'; -import externalUrlComp from '~/environments/components/environment_external_url'; +import externalUrlComp from '~/environments/components/environment_external_url.vue'; describe('External URL Component', () => { let ExternalUrlComponent; diff --git a/spec/javascripts/environments/environment_item_spec.js b/spec/javascripts/environments/environment_item_spec.js index 4d42de4d549..0e141adb628 100644 --- a/spec/javascripts/environments/environment_item_spec.js +++ b/spec/javascripts/environments/environment_item_spec.js @@ -1,6 +1,6 @@ import 'timeago.js'; import Vue from 'vue'; -import environmentItemComp from '~/environments/components/environment_item'; +import environmentItemComp from '~/environments/components/environment_item.vue'; describe('Environment item', () => { let EnvironmentItem; diff --git a/spec/javascripts/environments/environment_monitoring_spec.js b/spec/javascripts/environments/environment_monitoring_spec.js index fc451cce641..0f3dba66230 100644 --- a/spec/javascripts/environments/environment_monitoring_spec.js +++ b/spec/javascripts/environments/environment_monitoring_spec.js @@ -1,5 +1,5 @@ import Vue from 'vue'; -import monitoringComp from '~/environments/components/environment_monitoring'; +import monitoringComp from '~/environments/components/environment_monitoring.vue'; describe('Monitoring Component', () => { let MonitoringComponent; diff --git a/spec/javascripts/environments/environment_rollback_spec.js b/spec/javascripts/environments/environment_rollback_spec.js index 7cb39d9df03..eb8e49d81fe 100644 --- a/spec/javascripts/environments/environment_rollback_spec.js +++ b/spec/javascripts/environments/environment_rollback_spec.js @@ -1,14 +1,12 @@ import Vue from 'vue'; -import rollbackComp from '~/environments/components/environment_rollback'; +import rollbackComp from '~/environments/components/environment_rollback.vue'; describe('Rollback Component', () => { const retryURL = 'https://gitlab.com/retry'; let RollbackComponent; - let spy; beforeEach(() => { RollbackComponent = Vue.extend(rollbackComp); - spy = jasmine.createSpy('spy').and.returnValue(Promise.resolve()); }); it('Should render Re-deploy label when isLastDeployment is true', () => { @@ -17,9 +15,6 @@ describe('Rollback Component', () => { propsData: { retryUrl: retryURL, isLastDeployment: true, - service: { - postAction: spy, - }, }, }).$mount(); @@ -32,28 +27,9 @@ describe('Rollback Component', () => { propsData: { retryUrl: retryURL, isLastDeployment: false, - service: { - postAction: spy, - }, }, }).$mount(); expect(component.$el.querySelector('span').textContent).toContain('Rollback'); }); - - it('should call the service when the button is clicked', () => { - const component = new RollbackComponent({ - propsData: { - retryUrl: retryURL, - isLastDeployment: false, - service: { - postAction: spy, - }, - }, - }).$mount(); - - component.$el.click(); - - expect(spy).toHaveBeenCalledWith(retryURL); - }); }); diff --git a/spec/javascripts/environments/environment_spec.js b/spec/javascripts/environments/environment_spec.js index 9601575577e..1c54cc3054c 100644 --- a/spec/javascripts/environments/environment_spec.js +++ b/spec/javascripts/environments/environment_spec.js @@ -1,15 +1,18 @@ import Vue from 'vue'; import '~/flash'; -import EnvironmentsComponent from '~/environments/components/environment'; -import { environment } from './mock_data'; +import environmentsComponent from '~/environments/components/environment.vue'; +import { environment, folder } from './mock_data'; describe('Environment', () => { preloadFixtures('static/environments/environments.html.raw'); + let EnvironmentsComponent; let component; beforeEach(() => { loadFixtures('static/environments/environments.html.raw'); + + EnvironmentsComponent = Vue.extend(environmentsComponent); }); describe('successfull request', () => { @@ -83,14 +86,19 @@ describe('Environment', () => { it('should render a table with environments', (done) => { setTimeout(() => { + expect(component.$el.querySelectorAll('table')).toBeDefined(); expect( - component.$el.querySelectorAll('table tbody tr').length, - ).toEqual(1); + component.$el.querySelector('.environment-name').textContent.trim(), + ).toEqual(environment.name); done(); }, 0); }); describe('pagination', () => { + afterEach(() => { + window.history.pushState({}, null, ''); + }); + it('should render pagination', (done) => { setTimeout(() => { expect( @@ -175,4 +183,101 @@ describe('Environment', () => { }, 0); }); }); + + describe('expandable folders', () => { + const environmentsResponseInterceptor = (request, next) => { + next(request.respondWith(JSON.stringify({ + environments: [folder], + stopped_count: 0, + available_count: 1, + }), { + status: 200, + headers: { + 'X-nExt-pAge': '2', + 'x-page': '1', + 'X-Per-Page': '1', + 'X-Prev-Page': '', + 'X-TOTAL': '37', + 'X-Total-Pages': '2', + }, + })); + }; + + beforeEach(() => { + Vue.http.interceptors.push(environmentsResponseInterceptor); + component = new EnvironmentsComponent({ + el: document.querySelector('#environments-list-view'), + }); + }); + + afterEach(() => { + Vue.http.interceptors = _.without( + Vue.http.interceptors, environmentsResponseInterceptor, + ); + }); + + it('should open a closed folder', (done) => { + setTimeout(() => { + component.$el.querySelector('.folder-name').click(); + + Vue.nextTick(() => { + expect( + component.$el.querySelector('.folder-icon i.fa-caret-right').getAttribute('style'), + ).toContain('display: none'); + expect( + component.$el.querySelector('.folder-icon i.fa-caret-down').getAttribute('style'), + ).not.toContain('display: none'); + done(); + }); + }); + }); + + it('should close an opened folder', (done) => { + setTimeout(() => { + // open folder + component.$el.querySelector('.folder-name').click(); + + Vue.nextTick(() => { + // close folder + component.$el.querySelector('.folder-name').click(); + + Vue.nextTick(() => { + expect( + component.$el.querySelector('.folder-icon i.fa-caret-down').getAttribute('style'), + ).toContain('display: none'); + expect( + component.$el.querySelector('.folder-icon i.fa-caret-right').getAttribute('style'), + ).not.toContain('display: none'); + done(); + }); + }); + }); + }); + + it('should show children environments and a button to show all environments', (done) => { + setTimeout(() => { + // open folder + component.$el.querySelector('.folder-name').click(); + + Vue.nextTick(() => { + const folderInterceptor = (request, next) => { + next(request.respondWith(JSON.stringify({ + environments: [environment], + }), { status: 200 })); + }; + + Vue.http.interceptors.push(folderInterceptor); + + // wait for next async request + setTimeout(() => { + expect(component.$el.querySelectorAll('.js-child-row').length).toEqual(1); + expect(component.$el.querySelector('td.text-center > a.btn').textContent).toContain('Show all'); + + Vue.http.interceptors = _.without(Vue.http.interceptors, folderInterceptor); + done(); + }); + }); + }); + }); + }); }); diff --git a/spec/javascripts/environments/environment_stop_spec.js b/spec/javascripts/environments/environment_stop_spec.js index 01055e3f255..8131f1e5b11 100644 --- a/spec/javascripts/environments/environment_stop_spec.js +++ b/spec/javascripts/environments/environment_stop_spec.js @@ -1,23 +1,18 @@ import Vue from 'vue'; -import stopComp from '~/environments/components/environment_stop'; +import stopComp from '~/environments/components/environment_stop.vue'; describe('Stop Component', () => { let StopComponent; let component; - let spy; const stopURL = '/stop'; beforeEach(() => { StopComponent = Vue.extend(stopComp); - spy = jasmine.createSpy('spy').and.returnValue(Promise.resolve()); spyOn(window, 'confirm').and.returnValue(true); component = new StopComponent({ propsData: { stopUrl: stopURL, - service: { - postAction: spy, - }, }, }).$mount(); }); @@ -26,9 +21,4 @@ describe('Stop Component', () => { expect(component.$el.tagName).toEqual('BUTTON'); expect(component.$el.getAttribute('title')).toEqual('Stop'); }); - - it('should call the service when an action is clicked', () => { - component.$el.click(); - expect(spy).toHaveBeenCalled(); - }); }); diff --git a/spec/javascripts/environments/environment_table_spec.js b/spec/javascripts/environments/environment_table_spec.js index 3df967848a7..effbc6c3ee1 100644 --- a/spec/javascripts/environments/environment_table_spec.js +++ b/spec/javascripts/environments/environment_table_spec.js @@ -1,5 +1,5 @@ import Vue from 'vue'; -import environmentTableComp from '~/environments/components/environments_table'; +import environmentTableComp from '~/environments/components/environments_table.vue'; describe('Environment item', () => { preloadFixtures('static/environments/element.html.raw'); diff --git a/spec/javascripts/environments/environment_terminal_button_spec.js b/spec/javascripts/environments/environment_terminal_button_spec.js index be2289edc2b..858472af4b6 100644 --- a/spec/javascripts/environments/environment_terminal_button_spec.js +++ b/spec/javascripts/environments/environment_terminal_button_spec.js @@ -1,5 +1,5 @@ import Vue from 'vue'; -import terminalComp from '~/environments/components/environment_terminal_button'; +import terminalComp from '~/environments/components/environment_terminal_button.vue'; describe('Stop Component', () => { let TerminalComponent; diff --git a/spec/javascripts/environments/environments_store_spec.js b/spec/javascripts/environments/environments_store_spec.js index 115d84b50f5..f617c4bdffe 100644 --- a/spec/javascripts/environments/environments_store_spec.js +++ b/spec/javascripts/environments/environments_store_spec.js @@ -1,38 +1,106 @@ import Store from '~/environments/stores/environments_store'; import { environmentsList, serverData } from './mock_data'; -(() => { - describe('Store', () => { - let store; +describe('Store', () => { + let store; - beforeEach(() => { - store = new Store(); - }); + beforeEach(() => { + store = new Store(); + }); - it('should start with a blank state', () => { - expect(store.state.environments.length).toEqual(0); - expect(store.state.stoppedCounter).toEqual(0); - expect(store.state.availableCounter).toEqual(0); - expect(store.state.paginationInformation).toEqual({}); - }); + it('should start with a blank state', () => { + expect(store.state.environments.length).toEqual(0); + expect(store.state.stoppedCounter).toEqual(0); + expect(store.state.availableCounter).toEqual(0); + expect(store.state.paginationInformation).toEqual({}); + }); + it('should store environments', () => { + store.storeEnvironments(serverData); + expect(store.state.environments.length).toEqual(serverData.length); + expect(store.state.environments[0]).toEqual(environmentsList[0]); + }); + + it('should store available count', () => { + store.storeAvailableCount(2); + expect(store.state.availableCounter).toEqual(2); + }); + + it('should store stopped count', () => { + store.storeStoppedCount(2); + expect(store.state.stoppedCounter).toEqual(2); + }); + + describe('store environments', () => { it('should store environments', () => { store.storeEnvironments(serverData); expect(store.state.environments.length).toEqual(serverData.length); - expect(store.state.environments[0]).toEqual(environmentsList[0]); }); - it('should store available count', () => { - store.storeAvailableCount(2); - expect(store.state.availableCounter).toEqual(2); + it('should add folder keys when environment is a folder', () => { + const environment = { + name: 'bar', + size: 3, + id: 2, + }; + + store.storeEnvironments([environment]); + expect(store.state.environments[0].isFolder).toEqual(true); + expect(store.state.environments[0].folderName).toEqual('bar'); + }); + + it('should extract content of `latest` key when provided', () => { + const environment = { + name: 'bar', + size: 3, + id: 2, + latest: { + last_deployment: {}, + isStoppable: true, + }, + }; + + store.storeEnvironments([environment]); + expect(store.state.environments[0].last_deployment).toEqual({}); + expect(store.state.environments[0].isStoppable).toEqual(true); }); - it('should store stopped count', () => { - store.storeStoppedCount(2); - expect(store.state.stoppedCounter).toEqual(2); + it('should store latest.name when the environment is not a folder', () => { + store.storeEnvironments(serverData); + expect(store.state.environments[0].name).toEqual(serverData[0].latest.name); }); - it('should store pagination information', () => { + it('should store root level name when environment is a folder', () => { + store.storeEnvironments(serverData); + expect(store.state.environments[1].folderName).toEqual(serverData[1].name); + }); + }); + + describe('toggleFolder', () => { + it('should toggle folder', () => { + store.storeEnvironments(serverData); + + store.toggleFolder(store.state.environments[1]); + expect(store.state.environments[1].isOpen).toEqual(true); + + store.toggleFolder(store.state.environments[1]); + expect(store.state.environments[1].isOpen).toEqual(false); + }); + }); + + describe('setfolderContent', () => { + it('should store folder content', () => { + store.storeEnvironments(serverData); + + store.setfolderContent(store.state.environments[1], serverData); + + expect(store.state.environments[1].children.length).toEqual(serverData.length); + expect(store.state.environments[1].children[0].isChildren).toEqual(true); + }); + }); + + describe('store pagination', () => { + it('should store normalized and integer pagination information', () => { const pagination = { 'X-nExt-pAge': '2', 'X-page': '1', @@ -55,4 +123,4 @@ import { environmentsList, serverData } from './mock_data'; expect(store.state.paginationInformation).toEqual(expectedResult); }); }); -})(); +}); diff --git a/spec/javascripts/environments/folder/environments_folder_view_spec.js b/spec/javascripts/environments/folder/environments_folder_view_spec.js index 43a217a67f5..350078ad5f5 100644 --- a/spec/javascripts/environments/folder/environments_folder_view_spec.js +++ b/spec/javascripts/environments/folder/environments_folder_view_spec.js @@ -1,13 +1,15 @@ import Vue from 'vue'; import '~/flash'; -import EnvironmentsFolderViewComponent from '~/environments/folder/environments_folder_view'; +import environmentsFolderViewComponent from '~/environments/folder/environments_folder_view.vue'; import { environmentsList } from '../mock_data'; describe('Environments Folder View', () => { preloadFixtures('static/environments/environments_folder_view.html.raw'); + let EnvironmentsFolderViewComponent; beforeEach(() => { loadFixtures('static/environments/environments_folder_view.html.raw'); + EnvironmentsFolderViewComponent = Vue.extend(environmentsFolderViewComponent); window.history.pushState({}, null, 'environments/folders/build'); }); @@ -47,9 +49,10 @@ describe('Environments Folder View', () => { it('should render a table with environments', (done) => { setTimeout(() => { + expect(component.$el.querySelectorAll('table')).toBeDefined(); expect( - component.$el.querySelectorAll('table tbody tr').length, - ).toEqual(2); + component.$el.querySelector('.environment-name').textContent.trim(), + ).toEqual(environmentsList[0].name); done(); }, 0); }); diff --git a/spec/javascripts/environments/mock_data.js b/spec/javascripts/environments/mock_data.js index 30861481cc5..15e11aa686b 100644 --- a/spec/javascripts/environments/mock_data.js +++ b/spec/javascripts/environments/mock_data.js @@ -84,3 +84,19 @@ export const environment = { updated_at: '2017-01-31T10:53:46.894Z', }, }; + +export const folder = { + folderName: 'build', + size: 5, + id: 12, + name: 'build/update-README', + state: 'available', + external_url: null, + environment_type: 'build', + last_deployment: null, + 'stop_action?': false, + environment_path: '/root/review-app/environments/12', + stop_path: '/root/review-app/environments/12/stop', + created_at: '2017-02-01T19:42:18.400Z', + updated_at: '2017-02-01T19:42:18.400Z', +}; diff --git a/spec/javascripts/extensions/array_spec.js b/spec/javascripts/extensions/array_spec.js index 4b871fe967d..b1b81b4efc2 100644 --- a/spec/javascripts/extensions/array_spec.js +++ b/spec/javascripts/extensions/array_spec.js @@ -1,6 +1,6 @@ /* eslint-disable space-before-function-paren, no-var */ -require('~/extensions/array'); +import '~/extensions/array'; (function() { describe('Array extensions', function() { diff --git a/spec/javascripts/filtered_search/components/recent_searches_dropdown_content_spec.js b/spec/javascripts/filtered_search/components/recent_searches_dropdown_content_spec.js new file mode 100644 index 00000000000..d0f09a561d5 --- /dev/null +++ b/spec/javascripts/filtered_search/components/recent_searches_dropdown_content_spec.js @@ -0,0 +1,186 @@ +import Vue from 'vue'; +import eventHub from '~/filtered_search/event_hub'; +import RecentSearchesDropdownContent from '~/filtered_search/components/recent_searches_dropdown_content'; + +const createComponent = (propsData) => { + const Component = Vue.extend(RecentSearchesDropdownContent); + + return new Component({ + el: document.createElement('div'), + propsData, + }); +}; + +// Remove all the newlines and whitespace from the formatted markup +const trimMarkupWhitespace = text => text.replace(/(\n|\s)+/gm, ' ').trim(); + +describe('RecentSearchesDropdownContent', () => { + const propsDataWithoutItems = { + items: [], + }; + const propsDataWithItems = { + items: [ + 'foo', + 'author:@root label:~foo bar', + ], + }; + + let vm; + afterEach(() => { + if (vm) { + vm.$destroy(); + } + }); + + describe('with no items', () => { + let el; + + beforeEach(() => { + vm = createComponent(propsDataWithoutItems); + el = vm.$el; + }); + + it('should render empty state', () => { + expect(el.querySelector('.dropdown-info-note')).toBeDefined(); + + const items = el.querySelectorAll('.filtered-search-history-dropdown-item'); + expect(items.length).toEqual(propsDataWithoutItems.items.length); + }); + }); + + describe('with items', () => { + let el; + + beforeEach(() => { + vm = createComponent(propsDataWithItems); + el = vm.$el; + }); + + it('should render clear recent searches button', () => { + expect(el.querySelector('.filtered-search-history-clear-button')).toBeDefined(); + }); + + it('should render recent search items', () => { + const items = el.querySelectorAll('.filtered-search-history-dropdown-item'); + expect(items.length).toEqual(propsDataWithItems.items.length); + + expect(trimMarkupWhitespace(items[0].querySelector('.filtered-search-history-dropdown-search-token').textContent)).toEqual('foo'); + + const item1Tokens = items[1].querySelectorAll('.filtered-search-history-dropdown-token'); + expect(item1Tokens.length).toEqual(2); + expect(item1Tokens[0].querySelector('.name').textContent).toEqual('author:'); + expect(item1Tokens[0].querySelector('.value').textContent).toEqual('@root'); + expect(item1Tokens[1].querySelector('.name').textContent).toEqual('label:'); + expect(item1Tokens[1].querySelector('.value').textContent).toEqual('~foo'); + expect(trimMarkupWhitespace(items[1].querySelector('.filtered-search-history-dropdown-search-token').textContent)).toEqual('bar'); + }); + }); + + describe('if isLocalStorageAvailable is `false`', () => { + let el; + + beforeEach(() => { + const props = Object.assign({ isLocalStorageAvailable: false }, propsDataWithItems); + + vm = createComponent(props); + el = vm.$el; + }); + + it('should render an info note', () => { + const note = el.querySelector('.dropdown-info-note'); + const items = el.querySelectorAll('.filtered-search-history-dropdown-item'); + + expect(note).toBeDefined(); + expect(note.innerText.trim()).toBe('This feature requires local storage to be enabled'); + expect(items.length).toEqual(propsDataWithoutItems.items.length); + }); + }); + + describe('computed', () => { + describe('processedItems', () => { + it('with items', () => { + vm = createComponent(propsDataWithItems); + const processedItems = vm.processedItems; + + expect(processedItems.length).toEqual(2); + + expect(processedItems[0].text).toEqual(propsDataWithItems.items[0]); + expect(processedItems[0].tokens).toEqual([]); + expect(processedItems[0].searchToken).toEqual('foo'); + + expect(processedItems[1].text).toEqual(propsDataWithItems.items[1]); + expect(processedItems[1].tokens.length).toEqual(2); + expect(processedItems[1].tokens[0].prefix).toEqual('author:'); + expect(processedItems[1].tokens[0].suffix).toEqual('@root'); + expect(processedItems[1].tokens[1].prefix).toEqual('label:'); + expect(processedItems[1].tokens[1].suffix).toEqual('~foo'); + expect(processedItems[1].searchToken).toEqual('bar'); + }); + + it('with no items', () => { + vm = createComponent(propsDataWithoutItems); + const processedItems = vm.processedItems; + + expect(processedItems.length).toEqual(0); + }); + }); + + describe('hasItems', () => { + it('with items', () => { + vm = createComponent(propsDataWithItems); + const hasItems = vm.hasItems; + expect(hasItems).toEqual(true); + }); + + it('with no items', () => { + vm = createComponent(propsDataWithoutItems); + const hasItems = vm.hasItems; + expect(hasItems).toEqual(false); + }); + }); + }); + + describe('methods', () => { + describe('onItemActivated', () => { + let onRecentSearchesItemSelectedSpy; + + beforeEach(() => { + onRecentSearchesItemSelectedSpy = jasmine.createSpy('spy'); + eventHub.$on('recentSearchesItemSelected', onRecentSearchesItemSelectedSpy); + + vm = createComponent(propsDataWithItems); + }); + + afterEach(() => { + eventHub.$off('recentSearchesItemSelected', onRecentSearchesItemSelectedSpy); + }); + + it('emits event', () => { + expect(onRecentSearchesItemSelectedSpy).not.toHaveBeenCalled(); + vm.onItemActivated('something'); + expect(onRecentSearchesItemSelectedSpy).toHaveBeenCalledWith('something'); + }); + }); + + describe('onRequestClearRecentSearches', () => { + let onRequestClearRecentSearchesSpy; + + beforeEach(() => { + onRequestClearRecentSearchesSpy = jasmine.createSpy('spy'); + eventHub.$on('requestClearRecentSearches', onRequestClearRecentSearchesSpy); + + vm = createComponent(propsDataWithItems); + }); + + afterEach(() => { + eventHub.$off('requestClearRecentSearches', onRequestClearRecentSearchesSpy); + }); + + it('emits event', () => { + expect(onRequestClearRecentSearchesSpy).not.toHaveBeenCalled(); + vm.onRequestClearRecentSearches({ stopPropagation: () => {} }); + expect(onRequestClearRecentSearchesSpy).toHaveBeenCalled(); + }); + }); + }); +}); diff --git a/spec/javascripts/filtered_search/dropdown_user_spec.js b/spec/javascripts/filtered_search/dropdown_user_spec.js index c16f77c53a2..0d8bdf4c8e7 100644 --- a/spec/javascripts/filtered_search/dropdown_user_spec.js +++ b/spec/javascripts/filtered_search/dropdown_user_spec.js @@ -1,71 +1,69 @@ -require('~/filtered_search/dropdown_utils'); -require('~/filtered_search/filtered_search_tokenizer'); -require('~/filtered_search/filtered_search_dropdown'); -require('~/filtered_search/dropdown_user'); +import '~/filtered_search/dropdown_utils'; +import '~/filtered_search/filtered_search_tokenizer'; +import '~/filtered_search/filtered_search_dropdown'; +import '~/filtered_search/dropdown_user'; -(() => { - describe('Dropdown User', () => { - describe('getSearchInput', () => { - let dropdownUser; +describe('Dropdown User', () => { + describe('getSearchInput', () => { + let dropdownUser; - beforeEach(() => { - spyOn(gl.DropdownUser.prototype, 'bindEvents').and.callFake(() => {}); - spyOn(gl.DropdownUser.prototype, 'getProjectId').and.callFake(() => {}); - spyOn(gl.DropdownUtils, 'getSearchInput').and.callFake(() => {}); + beforeEach(() => { + spyOn(gl.DropdownUser.prototype, 'bindEvents').and.callFake(() => {}); + spyOn(gl.DropdownUser.prototype, 'getProjectId').and.callFake(() => {}); + spyOn(gl.DropdownUtils, 'getSearchInput').and.callFake(() => {}); - dropdownUser = new gl.DropdownUser(); - }); - - it('should not return the double quote found in value', () => { - spyOn(gl.FilteredSearchTokenizer, 'processTokens').and.returnValue({ - lastToken: '"johnny appleseed', - }); + dropdownUser = new gl.DropdownUser(); + }); - expect(dropdownUser.getSearchInput()).toBe('johnny appleseed'); + it('should not return the double quote found in value', () => { + spyOn(gl.FilteredSearchTokenizer, 'processTokens').and.returnValue({ + lastToken: '"johnny appleseed', }); - it('should not return the single quote found in value', () => { - spyOn(gl.FilteredSearchTokenizer, 'processTokens').and.returnValue({ - lastToken: '\'larry boy', - }); + expect(dropdownUser.getSearchInput()).toBe('johnny appleseed'); + }); - expect(dropdownUser.getSearchInput()).toBe('larry boy'); + it('should not return the single quote found in value', () => { + spyOn(gl.FilteredSearchTokenizer, 'processTokens').and.returnValue({ + lastToken: '\'larry boy', }); + + expect(dropdownUser.getSearchInput()).toBe('larry boy'); }); + }); - describe('config droplabAjaxFilter\'s endpoint', () => { - beforeEach(() => { - spyOn(gl.DropdownUser.prototype, 'bindEvents').and.callFake(() => {}); - spyOn(gl.DropdownUser.prototype, 'getProjectId').and.callFake(() => {}); - }); + describe('config AjaxFilter\'s endpoint', () => { + beforeEach(() => { + spyOn(gl.DropdownUser.prototype, 'bindEvents').and.callFake(() => {}); + spyOn(gl.DropdownUser.prototype, 'getProjectId').and.callFake(() => {}); + }); - it('should return endpoint', () => { - window.gon = { - relative_url_root: '', - }; - const dropdown = new gl.DropdownUser(); + it('should return endpoint', () => { + window.gon = { + relative_url_root: '', + }; + const dropdown = new gl.DropdownUser(); - expect(dropdown.config.droplabAjaxFilter.endpoint).toBe('/autocomplete/users.json'); - }); + expect(dropdown.config.AjaxFilter.endpoint).toBe('/autocomplete/users.json'); + }); - it('should return endpoint when relative_url_root is undefined', () => { - const dropdown = new gl.DropdownUser(); + it('should return endpoint when relative_url_root is undefined', () => { + const dropdown = new gl.DropdownUser(); - expect(dropdown.config.droplabAjaxFilter.endpoint).toBe('/autocomplete/users.json'); - }); + expect(dropdown.config.AjaxFilter.endpoint).toBe('/autocomplete/users.json'); + }); - it('should return endpoint with relative url when available', () => { - window.gon = { - relative_url_root: '/gitlab_directory', - }; - const dropdown = new gl.DropdownUser(); + it('should return endpoint with relative url when available', () => { + window.gon = { + relative_url_root: '/gitlab_directory', + }; + const dropdown = new gl.DropdownUser(); - expect(dropdown.config.droplabAjaxFilter.endpoint).toBe('/gitlab_directory/autocomplete/users.json'); - }); + expect(dropdown.config.AjaxFilter.endpoint).toBe('/gitlab_directory/autocomplete/users.json'); + }); - afterEach(() => { - window.gon = {}; - }); + afterEach(() => { + window.gon = {}; }); }); -})(); +}); diff --git a/spec/javascripts/filtered_search/dropdown_utils_spec.js b/spec/javascripts/filtered_search/dropdown_utils_spec.js index e6538020896..a68e315e3e4 100644 --- a/spec/javascripts/filtered_search/dropdown_utils_spec.js +++ b/spec/javascripts/filtered_search/dropdown_utils_spec.js @@ -1,310 +1,308 @@ -require('~/extensions/array'); -require('~/filtered_search/dropdown_utils'); -require('~/filtered_search/filtered_search_tokenizer'); -require('~/filtered_search/filtered_search_dropdown_manager'); - -(() => { - describe('Dropdown Utils', () => { - describe('getEscapedText', () => { - it('should return same word when it has no space', () => { - const escaped = gl.DropdownUtils.getEscapedText('textWithoutSpace'); - expect(escaped).toBe('textWithoutSpace'); - }); +import '~/extensions/array'; +import '~/filtered_search/dropdown_utils'; +import '~/filtered_search/filtered_search_tokenizer'; +import '~/filtered_search/filtered_search_dropdown_manager'; + +describe('Dropdown Utils', () => { + describe('getEscapedText', () => { + it('should return same word when it has no space', () => { + const escaped = gl.DropdownUtils.getEscapedText('textWithoutSpace'); + expect(escaped).toBe('textWithoutSpace'); + }); - it('should escape with double quotes', () => { - let escaped = gl.DropdownUtils.getEscapedText('text with space'); - expect(escaped).toBe('"text with space"'); + it('should escape with double quotes', () => { + let escaped = gl.DropdownUtils.getEscapedText('text with space'); + expect(escaped).toBe('"text with space"'); - escaped = gl.DropdownUtils.getEscapedText('won\'t fix'); - expect(escaped).toBe('"won\'t fix"'); - }); + escaped = gl.DropdownUtils.getEscapedText('won\'t fix'); + expect(escaped).toBe('"won\'t fix"'); + }); - it('should escape with single quotes', () => { - const escaped = gl.DropdownUtils.getEscapedText('won"t fix'); - expect(escaped).toBe('\'won"t fix\''); - }); + it('should escape with single quotes', () => { + const escaped = gl.DropdownUtils.getEscapedText('won"t fix'); + expect(escaped).toBe('\'won"t fix\''); + }); - it('should escape with single quotes by default', () => { - const escaped = gl.DropdownUtils.getEscapedText('won"t\' fix'); - expect(escaped).toBe('\'won"t\' fix\''); - }); + it('should escape with single quotes by default', () => { + const escaped = gl.DropdownUtils.getEscapedText('won"t\' fix'); + expect(escaped).toBe('\'won"t\' fix\''); }); + }); - describe('filterWithSymbol', () => { - let input; - const item = { - title: '@root', - }; + describe('filterWithSymbol', () => { + let input; + const item = { + title: '@root', + }; - beforeEach(() => { - setFixtures(` - <input type="text" id="test" /> - `); + beforeEach(() => { + setFixtures(` + <input type="text" id="test" /> + `); - input = document.getElementById('test'); - }); + input = document.getElementById('test'); + }); - it('should filter without symbol', () => { - input.value = 'roo'; + it('should filter without symbol', () => { + input.value = 'roo'; - const updatedItem = gl.DropdownUtils.filterWithSymbol('@', input, item); - expect(updatedItem.droplab_hidden).toBe(false); - }); + const updatedItem = gl.DropdownUtils.filterWithSymbol('@', input, item); + expect(updatedItem.droplab_hidden).toBe(false); + }); - it('should filter with symbol', () => { - input.value = '@roo'; + it('should filter with symbol', () => { + input.value = '@roo'; - const updatedItem = gl.DropdownUtils.filterWithSymbol('@', input, item); - expect(updatedItem.droplab_hidden).toBe(false); - }); + const updatedItem = gl.DropdownUtils.filterWithSymbol('@', input, item); + expect(updatedItem.droplab_hidden).toBe(false); + }); - describe('filters multiple word title', () => { - const multipleWordItem = { - title: 'Community Contributions', - }; + describe('filters multiple word title', () => { + const multipleWordItem = { + title: 'Community Contributions', + }; - it('should filter with double quote', () => { - input.value = '"'; + it('should filter with double quote', () => { + input.value = '"'; - const updatedItem = gl.DropdownUtils.filterWithSymbol('~', input, multipleWordItem); - expect(updatedItem.droplab_hidden).toBe(false); - }); + const updatedItem = gl.DropdownUtils.filterWithSymbol('~', input, multipleWordItem); + expect(updatedItem.droplab_hidden).toBe(false); + }); - it('should filter with double quote and symbol', () => { - input.value = '~"'; + it('should filter with double quote and symbol', () => { + input.value = '~"'; - const updatedItem = gl.DropdownUtils.filterWithSymbol('~', input, multipleWordItem); - expect(updatedItem.droplab_hidden).toBe(false); - }); + const updatedItem = gl.DropdownUtils.filterWithSymbol('~', input, multipleWordItem); + expect(updatedItem.droplab_hidden).toBe(false); + }); - it('should filter with double quote and multiple words', () => { - input.value = '"community con'; + it('should filter with double quote and multiple words', () => { + input.value = '"community con'; - const updatedItem = gl.DropdownUtils.filterWithSymbol('~', input, multipleWordItem); - expect(updatedItem.droplab_hidden).toBe(false); - }); + const updatedItem = gl.DropdownUtils.filterWithSymbol('~', input, multipleWordItem); + expect(updatedItem.droplab_hidden).toBe(false); + }); - it('should filter with double quote, symbol and multiple words', () => { - input.value = '~"community con'; + it('should filter with double quote, symbol and multiple words', () => { + input.value = '~"community con'; - const updatedItem = gl.DropdownUtils.filterWithSymbol('~', input, multipleWordItem); - expect(updatedItem.droplab_hidden).toBe(false); - }); + const updatedItem = gl.DropdownUtils.filterWithSymbol('~', input, multipleWordItem); + expect(updatedItem.droplab_hidden).toBe(false); + }); - it('should filter with single quote', () => { - input.value = '\''; + it('should filter with single quote', () => { + input.value = '\''; - const updatedItem = gl.DropdownUtils.filterWithSymbol('~', input, multipleWordItem); - expect(updatedItem.droplab_hidden).toBe(false); - }); + const updatedItem = gl.DropdownUtils.filterWithSymbol('~', input, multipleWordItem); + expect(updatedItem.droplab_hidden).toBe(false); + }); - it('should filter with single quote and symbol', () => { - input.value = '~\''; + it('should filter with single quote and symbol', () => { + input.value = '~\''; - const updatedItem = gl.DropdownUtils.filterWithSymbol('~', input, multipleWordItem); - expect(updatedItem.droplab_hidden).toBe(false); - }); + const updatedItem = gl.DropdownUtils.filterWithSymbol('~', input, multipleWordItem); + expect(updatedItem.droplab_hidden).toBe(false); + }); - it('should filter with single quote and multiple words', () => { - input.value = '\'community con'; + it('should filter with single quote and multiple words', () => { + input.value = '\'community con'; - const updatedItem = gl.DropdownUtils.filterWithSymbol('~', input, multipleWordItem); - expect(updatedItem.droplab_hidden).toBe(false); - }); + const updatedItem = gl.DropdownUtils.filterWithSymbol('~', input, multipleWordItem); + expect(updatedItem.droplab_hidden).toBe(false); + }); - it('should filter with single quote, symbol and multiple words', () => { - input.value = '~\'community con'; + it('should filter with single quote, symbol and multiple words', () => { + input.value = '~\'community con'; - const updatedItem = gl.DropdownUtils.filterWithSymbol('~', input, multipleWordItem); - expect(updatedItem.droplab_hidden).toBe(false); - }); + const updatedItem = gl.DropdownUtils.filterWithSymbol('~', input, multipleWordItem); + expect(updatedItem.droplab_hidden).toBe(false); }); }); + }); - describe('filterHint', () => { - let input; - - beforeEach(() => { - setFixtures(` - <ul class="tokens-container"> - <li class="input-token"> - <input class="filtered-search" type="text" id="test" /> - </li> - </ul> - `); - - input = document.getElementById('test'); - }); + describe('filterHint', () => { + let input; - it('should filter', () => { - input.value = 'l'; - let updatedItem = gl.DropdownUtils.filterHint(input, { - hint: 'label', - }); - expect(updatedItem.droplab_hidden).toBe(false); + beforeEach(() => { + setFixtures(` + <ul class="tokens-container"> + <li class="input-token"> + <input class="filtered-search" type="text" id="test" /> + </li> + </ul> + `); - input.value = 'o'; - updatedItem = gl.DropdownUtils.filterHint(input, { - hint: 'label', - }); - expect(updatedItem.droplab_hidden).toBe(true); - }); + input = document.getElementById('test'); + }); - it('should return droplab_hidden false when item has no hint', () => { - const updatedItem = gl.DropdownUtils.filterHint(input, {}, ''); - expect(updatedItem.droplab_hidden).toBe(false); + it('should filter', () => { + input.value = 'l'; + let updatedItem = gl.DropdownUtils.filterHint(input, { + hint: 'label', }); + expect(updatedItem.droplab_hidden).toBe(false); - it('should allow multiple if item.type is array', () => { - input.value = 'label:~first la'; - const updatedItem = gl.DropdownUtils.filterHint(input, { - hint: 'label', - type: 'array', - }); - expect(updatedItem.droplab_hidden).toBe(false); + input.value = 'o'; + updatedItem = gl.DropdownUtils.filterHint(input, { + hint: 'label', }); + expect(updatedItem.droplab_hidden).toBe(true); + }); - it('should prevent multiple if item.type is not array', () => { - input.value = 'milestone:~first mile'; - let updatedItem = gl.DropdownUtils.filterHint(input, { - hint: 'milestone', - }); - expect(updatedItem.droplab_hidden).toBe(true); + it('should return droplab_hidden false when item has no hint', () => { + const updatedItem = gl.DropdownUtils.filterHint(input, {}, ''); + expect(updatedItem.droplab_hidden).toBe(false); + }); - updatedItem = gl.DropdownUtils.filterHint(input, { - hint: 'milestone', - type: 'string', - }); - expect(updatedItem.droplab_hidden).toBe(true); + it('should allow multiple if item.type is array', () => { + input.value = 'label:~first la'; + const updatedItem = gl.DropdownUtils.filterHint(input, { + hint: 'label', + type: 'array', }); + expect(updatedItem.droplab_hidden).toBe(false); }); - describe('setDataValueIfSelected', () => { - beforeEach(() => { - spyOn(gl.FilteredSearchDropdownManager, 'addWordToInput') - .and.callFake(() => {}); + it('should prevent multiple if item.type is not array', () => { + input.value = 'milestone:~first mile'; + let updatedItem = gl.DropdownUtils.filterHint(input, { + hint: 'milestone', }); + expect(updatedItem.droplab_hidden).toBe(true); - it('calls addWordToInput when dataValue exists', () => { - const selected = { - getAttribute: () => 'value', - }; - - gl.DropdownUtils.setDataValueIfSelected(null, selected); - expect(gl.FilteredSearchDropdownManager.addWordToInput.calls.count()).toEqual(1); + updatedItem = gl.DropdownUtils.filterHint(input, { + hint: 'milestone', + type: 'string', }); + expect(updatedItem.droplab_hidden).toBe(true); + }); + }); - it('returns true when dataValue exists', () => { - const selected = { - getAttribute: () => 'value', - }; + describe('setDataValueIfSelected', () => { + beforeEach(() => { + spyOn(gl.FilteredSearchDropdownManager, 'addWordToInput') + .and.callFake(() => {}); + }); - const result = gl.DropdownUtils.setDataValueIfSelected(null, selected); - expect(result).toBe(true); - }); + it('calls addWordToInput when dataValue exists', () => { + const selected = { + getAttribute: () => 'value', + }; - it('returns false when dataValue does not exist', () => { - const selected = { - getAttribute: () => null, - }; + gl.DropdownUtils.setDataValueIfSelected(null, selected); + expect(gl.FilteredSearchDropdownManager.addWordToInput.calls.count()).toEqual(1); + }); - const result = gl.DropdownUtils.setDataValueIfSelected(null, selected); - expect(result).toBe(false); - }); + it('returns true when dataValue exists', () => { + const selected = { + getAttribute: () => 'value', + }; + + const result = gl.DropdownUtils.setDataValueIfSelected(null, selected); + expect(result).toBe(true); }); - describe('getInputSelectionPosition', () => { - describe('word with trailing spaces', () => { - const value = 'label:none '; + it('returns false when dataValue does not exist', () => { + const selected = { + getAttribute: () => null, + }; + + const result = gl.DropdownUtils.setDataValueIfSelected(null, selected); + expect(result).toBe(false); + }); + }); - it('should return selectionStart when cursor is at the trailing space', () => { - const { left, right } = gl.DropdownUtils.getInputSelectionPosition({ - selectionStart: 11, - value, - }); + describe('getInputSelectionPosition', () => { + describe('word with trailing spaces', () => { + const value = 'label:none '; - expect(left).toBe(11); - expect(right).toBe(11); + it('should return selectionStart when cursor is at the trailing space', () => { + const { left, right } = gl.DropdownUtils.getInputSelectionPosition({ + selectionStart: 11, + value, }); - it('should return input when cursor is at the start of input', () => { - const { left, right } = gl.DropdownUtils.getInputSelectionPosition({ - selectionStart: 0, - value, - }); + expect(left).toBe(11); + expect(right).toBe(11); + }); - expect(left).toBe(0); - expect(right).toBe(10); + it('should return input when cursor is at the start of input', () => { + const { left, right } = gl.DropdownUtils.getInputSelectionPosition({ + selectionStart: 0, + value, }); - it('should return input when cursor is at the middle of input', () => { - const { left, right } = gl.DropdownUtils.getInputSelectionPosition({ - selectionStart: 7, - value, - }); + expect(left).toBe(0); + expect(right).toBe(10); + }); - expect(left).toBe(0); - expect(right).toBe(10); + it('should return input when cursor is at the middle of input', () => { + const { left, right } = gl.DropdownUtils.getInputSelectionPosition({ + selectionStart: 7, + value, }); - it('should return input when cursor is at the end of input', () => { - const { left, right } = gl.DropdownUtils.getInputSelectionPosition({ - selectionStart: 10, - value, - }); + expect(left).toBe(0); + expect(right).toBe(10); + }); - expect(left).toBe(0); - expect(right).toBe(10); + it('should return input when cursor is at the end of input', () => { + const { left, right } = gl.DropdownUtils.getInputSelectionPosition({ + selectionStart: 10, + value, }); - }); - describe('multiple words', () => { - const value = 'label:~"Community Contribution"'; + expect(left).toBe(0); + expect(right).toBe(10); + }); + }); - it('should return input when cursor is after the first word', () => { - const { left, right } = gl.DropdownUtils.getInputSelectionPosition({ - selectionStart: 17, - value, - }); + describe('multiple words', () => { + const value = 'label:~"Community Contribution"'; - expect(left).toBe(0); - expect(right).toBe(31); + it('should return input when cursor is after the first word', () => { + const { left, right } = gl.DropdownUtils.getInputSelectionPosition({ + selectionStart: 17, + value, }); - it('should return input when cursor is before the second word', () => { - const { left, right } = gl.DropdownUtils.getInputSelectionPosition({ - selectionStart: 18, - value, - }); + expect(left).toBe(0); + expect(right).toBe(31); + }); - expect(left).toBe(0); - expect(right).toBe(31); + it('should return input when cursor is before the second word', () => { + const { left, right } = gl.DropdownUtils.getInputSelectionPosition({ + selectionStart: 18, + value, }); - }); - describe('incomplete multiple words', () => { - const value = 'label:~"Community Contribution'; + expect(left).toBe(0); + expect(right).toBe(31); + }); + }); - it('should return entire input when cursor is at the start of input', () => { - const { left, right } = gl.DropdownUtils.getInputSelectionPosition({ - selectionStart: 0, - value, - }); + describe('incomplete multiple words', () => { + const value = 'label:~"Community Contribution'; - expect(left).toBe(0); - expect(right).toBe(30); + it('should return entire input when cursor is at the start of input', () => { + const { left, right } = gl.DropdownUtils.getInputSelectionPosition({ + selectionStart: 0, + value, }); - it('should return entire input when cursor is at the end of input', () => { - const { left, right } = gl.DropdownUtils.getInputSelectionPosition({ - selectionStart: 30, - value, - }); + expect(left).toBe(0); + expect(right).toBe(30); + }); - expect(left).toBe(0); - expect(right).toBe(30); + it('should return entire input when cursor is at the end of input', () => { + const { left, right } = gl.DropdownUtils.getInputSelectionPosition({ + selectionStart: 30, + value, }); + + expect(left).toBe(0); + expect(right).toBe(30); }); }); }); -})(); +}); diff --git a/spec/javascripts/filtered_search/filtered_search_dropdown_manager_spec.js b/spec/javascripts/filtered_search/filtered_search_dropdown_manager_spec.js index a1da3396d7b..c92a147b937 100644 --- a/spec/javascripts/filtered_search/filtered_search_dropdown_manager_spec.js +++ b/spec/javascripts/filtered_search/filtered_search_dropdown_manager_spec.js @@ -1,101 +1,99 @@ -require('~/extensions/array'); -require('~/filtered_search/filtered_search_visual_tokens'); -require('~/filtered_search/filtered_search_tokenizer'); -require('~/filtered_search/filtered_search_dropdown_manager'); - -(() => { - describe('Filtered Search Dropdown Manager', () => { - describe('addWordToInput', () => { - function getInputValue() { - return document.querySelector('.filtered-search').value; - } - - function setInputValue(value) { - document.querySelector('.filtered-search').value = value; - } - - beforeEach(() => { - setFixtures(` - <ul class="tokens-container"> - <li class="input-token"> - <input class="filtered-search"> - </li> - </ul> - `); - }); +import '~/extensions/array'; +import '~/filtered_search/filtered_search_visual_tokens'; +import '~/filtered_search/filtered_search_tokenizer'; +import '~/filtered_search/filtered_search_dropdown_manager'; + +describe('Filtered Search Dropdown Manager', () => { + describe('addWordToInput', () => { + function getInputValue() { + return document.querySelector('.filtered-search').value; + } + + function setInputValue(value) { + document.querySelector('.filtered-search').value = value; + } + + beforeEach(() => { + setFixtures(` + <ul class="tokens-container"> + <li class="input-token"> + <input class="filtered-search"> + </li> + </ul> + `); + }); - describe('input has no existing value', () => { - it('should add just tokenName', () => { - gl.FilteredSearchDropdownManager.addWordToInput('milestone'); + describe('input has no existing value', () => { + it('should add just tokenName', () => { + gl.FilteredSearchDropdownManager.addWordToInput('milestone'); - const token = document.querySelector('.tokens-container .js-visual-token'); + const token = document.querySelector('.tokens-container .js-visual-token'); - expect(token.classList.contains('filtered-search-token')).toEqual(true); - expect(token.querySelector('.name').innerText).toBe('milestone'); - expect(getInputValue()).toBe(''); - }); + expect(token.classList.contains('filtered-search-token')).toEqual(true); + expect(token.querySelector('.name').innerText).toBe('milestone'); + expect(getInputValue()).toBe(''); + }); - it('should add tokenName and tokenValue', () => { - gl.FilteredSearchDropdownManager.addWordToInput('label'); + it('should add tokenName and tokenValue', () => { + gl.FilteredSearchDropdownManager.addWordToInput('label'); - let token = document.querySelector('.tokens-container .js-visual-token'); + let token = document.querySelector('.tokens-container .js-visual-token'); - expect(token.classList.contains('filtered-search-token')).toEqual(true); - expect(token.querySelector('.name').innerText).toBe('label'); - expect(getInputValue()).toBe(''); + expect(token.classList.contains('filtered-search-token')).toEqual(true); + expect(token.querySelector('.name').innerText).toBe('label'); + expect(getInputValue()).toBe(''); - gl.FilteredSearchDropdownManager.addWordToInput('label', 'none'); - // We have to get that reference again - // Because gl.FilteredSearchDropdownManager deletes the previous token - token = document.querySelector('.tokens-container .js-visual-token'); + gl.FilteredSearchDropdownManager.addWordToInput('label', 'none'); + // We have to get that reference again + // Because gl.FilteredSearchDropdownManager deletes the previous token + token = document.querySelector('.tokens-container .js-visual-token'); - expect(token.classList.contains('filtered-search-token')).toEqual(true); - expect(token.querySelector('.name').innerText).toBe('label'); - expect(token.querySelector('.value').innerText).toBe('none'); - expect(getInputValue()).toBe(''); - }); + expect(token.classList.contains('filtered-search-token')).toEqual(true); + expect(token.querySelector('.name').innerText).toBe('label'); + expect(token.querySelector('.value').innerText).toBe('none'); + expect(getInputValue()).toBe(''); }); + }); - describe('input has existing value', () => { - it('should be able to just add tokenName', () => { - setInputValue('a'); - gl.FilteredSearchDropdownManager.addWordToInput('author'); + describe('input has existing value', () => { + it('should be able to just add tokenName', () => { + setInputValue('a'); + gl.FilteredSearchDropdownManager.addWordToInput('author'); - const token = document.querySelector('.tokens-container .js-visual-token'); + const token = document.querySelector('.tokens-container .js-visual-token'); - expect(token.classList.contains('filtered-search-token')).toEqual(true); - expect(token.querySelector('.name').innerText).toBe('author'); - expect(getInputValue()).toBe(''); - }); + expect(token.classList.contains('filtered-search-token')).toEqual(true); + expect(token.querySelector('.name').innerText).toBe('author'); + expect(getInputValue()).toBe(''); + }); - it('should replace tokenValue', () => { - gl.FilteredSearchDropdownManager.addWordToInput('author'); + it('should replace tokenValue', () => { + gl.FilteredSearchDropdownManager.addWordToInput('author'); - setInputValue('roo'); - gl.FilteredSearchDropdownManager.addWordToInput(null, '@root'); + setInputValue('roo'); + gl.FilteredSearchDropdownManager.addWordToInput(null, '@root'); - const token = document.querySelector('.tokens-container .js-visual-token'); + const token = document.querySelector('.tokens-container .js-visual-token'); - expect(token.classList.contains('filtered-search-token')).toEqual(true); - expect(token.querySelector('.name').innerText).toBe('author'); - expect(token.querySelector('.value').innerText).toBe('@root'); - expect(getInputValue()).toBe(''); - }); + expect(token.classList.contains('filtered-search-token')).toEqual(true); + expect(token.querySelector('.name').innerText).toBe('author'); + expect(token.querySelector('.value').innerText).toBe('@root'); + expect(getInputValue()).toBe(''); + }); - it('should add tokenValues containing spaces', () => { - gl.FilteredSearchDropdownManager.addWordToInput('label'); + it('should add tokenValues containing spaces', () => { + gl.FilteredSearchDropdownManager.addWordToInput('label'); - setInputValue('"test '); - gl.FilteredSearchDropdownManager.addWordToInput('label', '~\'"test me"\''); + setInputValue('"test '); + gl.FilteredSearchDropdownManager.addWordToInput('label', '~\'"test me"\''); - const token = document.querySelector('.tokens-container .js-visual-token'); + const token = document.querySelector('.tokens-container .js-visual-token'); - expect(token.classList.contains('filtered-search-token')).toEqual(true); - expect(token.querySelector('.name').innerText).toBe('label'); - expect(token.querySelector('.value').innerText).toBe('~\'"test me"\''); - expect(getInputValue()).toBe(''); - }); + expect(token.classList.contains('filtered-search-token')).toEqual(true); + expect(token.querySelector('.name').innerText).toBe('label'); + expect(token.querySelector('.value').innerText).toBe('~\'"test me"\''); + expect(getInputValue()).toBe(''); }); }); }); -})(); +}); diff --git a/spec/javascripts/filtered_search/filtered_search_manager_spec.js b/spec/javascripts/filtered_search/filtered_search_manager_spec.js index 5f7c05e9014..7c7def3470d 100644 --- a/spec/javascripts/filtered_search/filtered_search_manager_spec.js +++ b/spec/javascripts/filtered_search/filtered_search_manager_spec.js @@ -1,276 +1,362 @@ -require('~/lib/utils/url_utility'); -require('~/lib/utils/common_utils'); -require('~/filtered_search/filtered_search_token_keys'); -require('~/filtered_search/filtered_search_tokenizer'); -require('~/filtered_search/filtered_search_dropdown_manager'); -require('~/filtered_search/filtered_search_manager'); -const FilteredSearchSpecHelper = require('../helpers/filtered_search_spec_helper'); - -(() => { - describe('Filtered Search Manager', () => { - let input; - let manager; - let tokensContainer; - const placeholder = 'Search or filter results...'; - - function dispatchBackspaceEvent(element, eventType) { - const backspaceKey = 8; - const event = new Event(eventType); - event.keyCode = backspaceKey; - element.dispatchEvent(event); - } - - function dispatchDeleteEvent(element, eventType) { - const deleteKey = 46; - const event = new Event(eventType); - event.keyCode = deleteKey; - element.dispatchEvent(event); - } +import * as recentSearchesStoreSrc from '~/filtered_search/stores/recent_searches_store'; +import RecentSearchesService from '~/filtered_search/services/recent_searches_service'; +import RecentSearchesServiceError from '~/filtered_search/services/recent_searches_service_error'; +import '~/lib/utils/url_utility'; +import '~/lib/utils/common_utils'; +import '~/filtered_search/filtered_search_token_keys'; +import '~/filtered_search/filtered_search_tokenizer'; +import '~/filtered_search/filtered_search_dropdown_manager'; +import '~/filtered_search/filtered_search_manager'; +import FilteredSearchSpecHelper from '../helpers/filtered_search_spec_helper'; + +describe('Filtered Search Manager', () => { + let input; + let manager; + let tokensContainer; + const placeholder = 'Search or filter results...'; + + function dispatchBackspaceEvent(element, eventType) { + const backspaceKey = 8; + const event = new Event(eventType); + event.keyCode = backspaceKey; + element.dispatchEvent(event); + } + + function dispatchDeleteEvent(element, eventType) { + const deleteKey = 46; + const event = new Event(eventType); + event.keyCode = deleteKey; + element.dispatchEvent(event); + } + + function getVisualTokens() { + return tokensContainer.querySelectorAll('.js-visual-token'); + } + + beforeEach(() => { + setFixtures(` + <div class="filtered-search-box"> + <form> + <ul class="tokens-container list-unstyled"> + ${FilteredSearchSpecHelper.createInputHTML(placeholder)} + </ul> + <button class="clear-search" type="button"> + <i class="fa fa-times"></i> + </button> + </form> + </div> + `); + + spyOn(gl.FilteredSearchManager.prototype, 'loadSearchParamsFromURL').and.callFake(() => {}); + spyOn(gl.FilteredSearchManager.prototype, 'tokenChange').and.callFake(() => {}); + spyOn(gl.FilteredSearchDropdownManager.prototype, 'setDropdown').and.callFake(() => {}); + spyOn(gl.FilteredSearchDropdownManager.prototype, 'updateDropdownOffset').and.callFake(() => {}); + spyOn(gl.utils, 'getParameterByName').and.returnValue(null); + spyOn(gl.FilteredSearchVisualTokens, 'unselectTokens').and.callThrough(); + + input = document.querySelector('.filtered-search'); + tokensContainer = document.querySelector('.tokens-container'); + manager = new gl.FilteredSearchManager(); + }); + + afterEach(() => { + manager.cleanup(); + }); + + describe('class constructor', () => { + const isLocalStorageAvailable = 'isLocalStorageAvailable'; + let filteredSearchManager; beforeEach(() => { - setFixtures(` - <div class="filtered-search-input-container"> - <form> - <ul class="tokens-container list-unstyled"> - ${FilteredSearchSpecHelper.createInputHTML(placeholder)} - </ul> - <button class="clear-search" type="button"> - <i class="fa fa-times"></i> - </button> - </form> - </div> - `); + spyOn(RecentSearchesService, 'isAvailable').and.returnValue(isLocalStorageAvailable); + spyOn(recentSearchesStoreSrc, 'default'); - spyOn(gl.FilteredSearchManager.prototype, 'loadSearchParamsFromURL').and.callFake(() => {}); - spyOn(gl.FilteredSearchManager.prototype, 'tokenChange').and.callFake(() => {}); - spyOn(gl.FilteredSearchDropdownManager.prototype, 'setDropdown').and.callFake(() => {}); - spyOn(gl.FilteredSearchDropdownManager.prototype, 'updateDropdownOffset').and.callFake(() => {}); - spyOn(gl.utils, 'getParameterByName').and.returnValue(null); - spyOn(gl.FilteredSearchVisualTokens, 'unselectTokens').and.callThrough(); + filteredSearchManager = new gl.FilteredSearchManager(); - input = document.querySelector('.filtered-search'); - tokensContainer = document.querySelector('.tokens-container'); - manager = new gl.FilteredSearchManager(); + return filteredSearchManager; }); - afterEach(() => { - manager.cleanup(); + it('should instantiate RecentSearchesStore with isLocalStorageAvailable', () => { + expect(RecentSearchesService.isAvailable).toHaveBeenCalled(); + expect(recentSearchesStoreSrc.default).toHaveBeenCalledWith({ + isLocalStorageAvailable, + }); }); - describe('search', () => { - const defaultParams = '?scope=all&utf8=%E2%9C%93&state=opened'; + it('should not instantiate Flash if an RecentSearchesServiceError is caught', () => { + spyOn(RecentSearchesService.prototype, 'fetch').and.callFake(() => Promise.reject(new RecentSearchesServiceError())); + spyOn(window, 'Flash'); - it('should search with a single word', (done) => { - input.value = 'searchTerm'; + filteredSearchManager = new gl.FilteredSearchManager(); - spyOn(gl.utils, 'visitUrl').and.callFake((url) => { - expect(url).toEqual(`${defaultParams}&search=searchTerm`); - done(); - }); - - manager.search(); - }); + expect(window.Flash).not.toHaveBeenCalled(); + }); + }); - it('should search with multiple words', (done) => { - input.value = 'awesome search terms'; + describe('search', () => { + const defaultParams = '?scope=all&utf8=%E2%9C%93&state=opened'; - spyOn(gl.utils, 'visitUrl').and.callFake((url) => { - expect(url).toEqual(`${defaultParams}&search=awesome+search+terms`); - done(); - }); + it('should search with a single word', (done) => { + input.value = 'searchTerm'; - manager.search(); + spyOn(gl.utils, 'visitUrl').and.callFake((url) => { + expect(url).toEqual(`${defaultParams}&search=searchTerm`); + done(); }); - it('should search with special characters', (done) => { - input.value = '~!@#$%^&*()_+{}:<>,.?/'; + manager.search(); + }); - spyOn(gl.utils, 'visitUrl').and.callFake((url) => { - expect(url).toEqual(`${defaultParams}&search=~!%40%23%24%25%5E%26*()_%2B%7B%7D%3A%3C%3E%2C.%3F%2F`); - done(); - }); + it('should search with multiple words', (done) => { + input.value = 'awesome search terms'; - manager.search(); + spyOn(gl.utils, 'visitUrl').and.callFake((url) => { + expect(url).toEqual(`${defaultParams}&search=awesome+search+terms`); + done(); }); - it('removes duplicated tokens', (done) => { - tokensContainer.innerHTML = FilteredSearchSpecHelper.createTokensContainerHTML(` - ${FilteredSearchSpecHelper.createFilterVisualTokenHTML('label', '~bug')} - ${FilteredSearchSpecHelper.createFilterVisualTokenHTML('label', '~bug')} - `); + manager.search(); + }); - spyOn(gl.utils, 'visitUrl').and.callFake((url) => { - expect(url).toEqual(`${defaultParams}&label_name[]=bug`); - done(); - }); + it('should search with special characters', (done) => { + input.value = '~!@#$%^&*()_+{}:<>,.?/'; - manager.search(); + spyOn(gl.utils, 'visitUrl').and.callFake((url) => { + expect(url).toEqual(`${defaultParams}&search=~!%40%23%24%25%5E%26*()_%2B%7B%7D%3A%3C%3E%2C.%3F%2F`); + done(); }); + + manager.search(); }); - describe('handleInputPlaceholder', () => { - it('should render placeholder when there is no input', () => { - expect(input.placeholder).toEqual(placeholder); - }); + it('removes duplicated tokens', (done) => { + tokensContainer.innerHTML = FilteredSearchSpecHelper.createTokensContainerHTML(` + ${FilteredSearchSpecHelper.createFilterVisualTokenHTML('label', '~bug')} + ${FilteredSearchSpecHelper.createFilterVisualTokenHTML('label', '~bug')} + `); - it('should not render placeholder when there is input', () => { - input.value = 'test words'; + spyOn(gl.utils, 'visitUrl').and.callFake((url) => { + expect(url).toEqual(`${defaultParams}&label_name[]=bug`); + done(); + }); - const event = new Event('input'); - input.dispatchEvent(event); + manager.search(); + }); + }); - expect(input.placeholder).toEqual(''); - }); + describe('handleInputPlaceholder', () => { + it('should render placeholder when there is no input', () => { + expect(input.placeholder).toEqual(placeholder); + }); - it('should not render placeholder when there are tokens and no input', () => { - tokensContainer.innerHTML = FilteredSearchSpecHelper.createTokensContainerHTML( - FilteredSearchSpecHelper.createFilterVisualTokenHTML('label', '~bug'), - ); + it('should not render placeholder when there is input', () => { + input.value = 'test words'; - const event = new Event('input'); - input.dispatchEvent(event); + const event = new Event('input'); + input.dispatchEvent(event); - expect(input.placeholder).toEqual(''); - }); + expect(input.placeholder).toEqual(''); }); - describe('checkForBackspace', () => { - describe('tokens and no input', () => { - beforeEach(() => { - tokensContainer.innerHTML = FilteredSearchSpecHelper.createTokensContainerHTML( - FilteredSearchSpecHelper.createFilterVisualTokenHTML('label', '~bug'), - ); - }); + it('should not render placeholder when there are tokens and no input', () => { + tokensContainer.innerHTML = FilteredSearchSpecHelper.createTokensContainerHTML( + FilteredSearchSpecHelper.createFilterVisualTokenHTML('label', '~bug'), + ); - it('removes last token', () => { - spyOn(gl.FilteredSearchVisualTokens, 'removeLastTokenPartial').and.callThrough(); - dispatchBackspaceEvent(input, 'keyup'); + const event = new Event('input'); + input.dispatchEvent(event); - expect(gl.FilteredSearchVisualTokens.removeLastTokenPartial).toHaveBeenCalled(); - }); - - it('sets the input', () => { - spyOn(gl.FilteredSearchVisualTokens, 'getLastTokenPartial').and.callThrough(); - dispatchDeleteEvent(input, 'keyup'); + expect(input.placeholder).toEqual(''); + }); + }); - expect(gl.FilteredSearchVisualTokens.getLastTokenPartial).toHaveBeenCalled(); - expect(input.value).toEqual('~bug'); - }); + describe('checkForBackspace', () => { + describe('tokens and no input', () => { + beforeEach(() => { + tokensContainer.innerHTML = FilteredSearchSpecHelper.createTokensContainerHTML( + FilteredSearchSpecHelper.createFilterVisualTokenHTML('label', '~bug'), + ); }); - it('does not remove token or change input when there is existing input', () => { + it('removes last token', () => { spyOn(gl.FilteredSearchVisualTokens, 'removeLastTokenPartial').and.callThrough(); - spyOn(gl.FilteredSearchVisualTokens, 'getLastTokenPartial').and.callThrough(); + dispatchBackspaceEvent(input, 'keyup'); - input.value = 'text'; + expect(gl.FilteredSearchVisualTokens.removeLastTokenPartial).toHaveBeenCalled(); + }); + + it('sets the input', () => { + spyOn(gl.FilteredSearchVisualTokens, 'getLastTokenPartial').and.callThrough(); dispatchDeleteEvent(input, 'keyup'); - expect(gl.FilteredSearchVisualTokens.removeLastTokenPartial).not.toHaveBeenCalled(); - expect(gl.FilteredSearchVisualTokens.getLastTokenPartial).not.toHaveBeenCalled(); - expect(input.value).toEqual('text'); + expect(gl.FilteredSearchVisualTokens.getLastTokenPartial).toHaveBeenCalled(); + expect(input.value).toEqual('~bug'); }); }); - describe('removeSelectedToken', () => { - function getVisualTokens() { - return tokensContainer.querySelectorAll('.js-visual-token'); - } + it('does not remove token or change input when there is existing input', () => { + spyOn(gl.FilteredSearchVisualTokens, 'removeLastTokenPartial').and.callThrough(); + spyOn(gl.FilteredSearchVisualTokens, 'getLastTokenPartial').and.callThrough(); + + input.value = 'text'; + dispatchDeleteEvent(input, 'keyup'); + + expect(gl.FilteredSearchVisualTokens.removeLastTokenPartial).not.toHaveBeenCalled(); + expect(gl.FilteredSearchVisualTokens.getLastTokenPartial).not.toHaveBeenCalled(); + expect(input.value).toEqual('text'); + }); + }); + + describe('removeToken', () => { + it('removes token even when it is already selected', () => { + tokensContainer.innerHTML = FilteredSearchSpecHelper.createTokensContainerHTML( + FilteredSearchSpecHelper.createFilterVisualTokenHTML('milestone', 'none', true), + ); + + tokensContainer.querySelector('.js-visual-token .remove-token').click(); + expect(tokensContainer.querySelector('.js-visual-token')).toEqual(null); + }); + describe('unselected token', () => { beforeEach(() => { + spyOn(gl.FilteredSearchManager.prototype, 'removeSelectedToken').and.callThrough(); + tokensContainer.innerHTML = FilteredSearchSpecHelper.createTokensContainerHTML( - FilteredSearchSpecHelper.createFilterVisualTokenHTML('milestone', 'none', true), + FilteredSearchSpecHelper.createFilterVisualTokenHTML('milestone', 'none'), ); + tokensContainer.querySelector('.js-visual-token .remove-token').click(); }); - it('removes selected token when the backspace key is pressed', () => { - expect(getVisualTokens().length).toEqual(1); - - dispatchBackspaceEvent(document, 'keydown'); + it('removes token when remove button is selected', () => { + expect(tokensContainer.querySelector('.js-visual-token')).toEqual(null); + }); - expect(getVisualTokens().length).toEqual(0); + it('calls removeSelectedToken', () => { + expect(manager.removeSelectedToken).toHaveBeenCalled(); }); + }); + }); - it('removes selected token when the delete key is pressed', () => { - expect(getVisualTokens().length).toEqual(1); + describe('removeSelectedTokenKeydown', () => { + beforeEach(() => { + tokensContainer.innerHTML = FilteredSearchSpecHelper.createTokensContainerHTML( + FilteredSearchSpecHelper.createFilterVisualTokenHTML('milestone', 'none', true), + ); + }); - dispatchDeleteEvent(document, 'keydown'); + it('removes selected token when the backspace key is pressed', () => { + expect(getVisualTokens().length).toEqual(1); - expect(getVisualTokens().length).toEqual(0); - }); + dispatchBackspaceEvent(document, 'keydown'); - it('updates the input placeholder after removal', () => { - manager.handleInputPlaceholder(); + expect(getVisualTokens().length).toEqual(0); + }); - expect(input.placeholder).toEqual(''); - expect(getVisualTokens().length).toEqual(1); + it('removes selected token when the delete key is pressed', () => { + expect(getVisualTokens().length).toEqual(1); - dispatchBackspaceEvent(document, 'keydown'); + dispatchDeleteEvent(document, 'keydown'); - expect(input.placeholder).not.toEqual(''); - expect(getVisualTokens().length).toEqual(0); - }); + expect(getVisualTokens().length).toEqual(0); + }); - it('updates the clear button after removal', () => { - manager.toggleClearSearchButton(); + it('updates the input placeholder after removal', () => { + manager.handleInputPlaceholder(); - const clearButton = document.querySelector('.clear-search'); + expect(input.placeholder).toEqual(''); + expect(getVisualTokens().length).toEqual(1); - expect(clearButton.classList.contains('hidden')).toEqual(false); - expect(getVisualTokens().length).toEqual(1); + dispatchBackspaceEvent(document, 'keydown'); - dispatchBackspaceEvent(document, 'keydown'); + expect(input.placeholder).not.toEqual(''); + expect(getVisualTokens().length).toEqual(0); + }); - expect(clearButton.classList.contains('hidden')).toEqual(true); - expect(getVisualTokens().length).toEqual(0); - }); + it('updates the clear button after removal', () => { + manager.toggleClearSearchButton(); + + const clearButton = document.querySelector('.clear-search'); + + expect(clearButton.classList.contains('hidden')).toEqual(false); + expect(getVisualTokens().length).toEqual(1); + + dispatchBackspaceEvent(document, 'keydown'); + + expect(clearButton.classList.contains('hidden')).toEqual(true); + expect(getVisualTokens().length).toEqual(0); }); + }); - describe('unselects token', () => { - beforeEach(() => { - tokensContainer.innerHTML = FilteredSearchSpecHelper.createTokensContainerHTML(` - ${FilteredSearchSpecHelper.createFilterVisualTokenHTML('label', '~bug', true)} - ${FilteredSearchSpecHelper.createSearchVisualTokenHTML('search term')} - ${FilteredSearchSpecHelper.createFilterVisualTokenHTML('label', '~awesome')} - `); - }); + describe('removeSelectedToken', () => { + beforeEach(() => { + spyOn(gl.FilteredSearchVisualTokens, 'removeSelectedToken').and.callThrough(); + spyOn(gl.FilteredSearchManager.prototype, 'handleInputPlaceholder').and.callThrough(); + spyOn(gl.FilteredSearchManager.prototype, 'toggleClearSearchButton').and.callThrough(); + manager.removeSelectedToken(); + }); - it('unselects token when input is clicked', () => { - const selectedToken = tokensContainer.querySelector('.js-visual-token .selected'); + it('calls FilteredSearchVisualTokens.removeSelectedToken', () => { + expect(gl.FilteredSearchVisualTokens.removeSelectedToken).toHaveBeenCalled(); + }); - expect(selectedToken.classList.contains('selected')).toEqual(true); - expect(gl.FilteredSearchVisualTokens.unselectTokens).not.toHaveBeenCalled(); + it('calls handleInputPlaceholder', () => { + expect(manager.handleInputPlaceholder).toHaveBeenCalled(); + }); - // Click directly on input attached to document - // so that the click event will propagate properly - document.querySelector('.filtered-search').click(); + it('calls toggleClearSearchButton', () => { + expect(manager.toggleClearSearchButton).toHaveBeenCalled(); + }); - expect(gl.FilteredSearchVisualTokens.unselectTokens).toHaveBeenCalled(); - expect(selectedToken.classList.contains('selected')).toEqual(false); - }); + it('calls update dropdown offset', () => { + expect(manager.dropdownManager.updateDropdownOffset).toHaveBeenCalled(); + }); + }); - it('unselects token when document.body is clicked', () => { - const selectedToken = tokensContainer.querySelector('.js-visual-token .selected'); + describe('unselects token', () => { + beforeEach(() => { + tokensContainer.innerHTML = FilteredSearchSpecHelper.createTokensContainerHTML(` + ${FilteredSearchSpecHelper.createFilterVisualTokenHTML('label', '~bug', true)} + ${FilteredSearchSpecHelper.createSearchVisualTokenHTML('search term')} + ${FilteredSearchSpecHelper.createFilterVisualTokenHTML('label', '~awesome')} + `); + }); - expect(selectedToken.classList.contains('selected')).toEqual(true); - expect(gl.FilteredSearchVisualTokens.unselectTokens).not.toHaveBeenCalled(); + it('unselects token when input is clicked', () => { + const selectedToken = tokensContainer.querySelector('.js-visual-token .selected'); - document.body.click(); + expect(selectedToken.classList.contains('selected')).toEqual(true); + expect(gl.FilteredSearchVisualTokens.unselectTokens).not.toHaveBeenCalled(); - expect(selectedToken.classList.contains('selected')).toEqual(false); - expect(gl.FilteredSearchVisualTokens.unselectTokens).toHaveBeenCalled(); - }); + // Click directly on input attached to document + // so that the click event will propagate properly + document.querySelector('.filtered-search').click(); + + expect(gl.FilteredSearchVisualTokens.unselectTokens).toHaveBeenCalled(); + expect(selectedToken.classList.contains('selected')).toEqual(false); }); - describe('toggleInputContainerFocus', () => { - it('toggles on focus', () => { - input.focus(); - expect(document.querySelector('.filtered-search-input-container').classList.contains('focus')).toEqual(true); - }); + it('unselects token when document.body is clicked', () => { + const selectedToken = tokensContainer.querySelector('.js-visual-token .selected'); - it('toggles on blur', () => { - input.blur(); - expect(document.querySelector('.filtered-search-input-container').classList.contains('focus')).toEqual(false); - }); + expect(selectedToken.classList.contains('selected')).toEqual(true); + expect(gl.FilteredSearchVisualTokens.unselectTokens).not.toHaveBeenCalled(); + + document.body.click(); + + expect(selectedToken.classList.contains('selected')).toEqual(false); + expect(gl.FilteredSearchVisualTokens.unselectTokens).toHaveBeenCalled(); + }); + }); + + describe('toggleInputContainerFocus', () => { + it('toggles on focus', () => { + input.focus(); + expect(document.querySelector('.filtered-search-box').classList.contains('focus')).toEqual(true); + }); + + it('toggles on blur', () => { + input.blur(); + expect(document.querySelector('.filtered-search-box').classList.contains('focus')).toEqual(false); }); }); -})(); +}); diff --git a/spec/javascripts/filtered_search/filtered_search_token_keys_spec.js b/spec/javascripts/filtered_search/filtered_search_token_keys_spec.js index cf409a7e509..1a7631994b4 100644 --- a/spec/javascripts/filtered_search/filtered_search_token_keys_spec.js +++ b/spec/javascripts/filtered_search/filtered_search_token_keys_spec.js @@ -1,110 +1,108 @@ -require('~/extensions/array'); -require('~/filtered_search/filtered_search_token_keys'); +import '~/extensions/array'; +import '~/filtered_search/filtered_search_token_keys'; -(() => { - describe('Filtered Search Token Keys', () => { - describe('get', () => { - let tokenKeys; - - beforeEach(() => { - tokenKeys = gl.FilteredSearchTokenKeys.get(); - }); - - it('should return tokenKeys', () => { - expect(tokenKeys !== null).toBe(true); - }); - - it('should return tokenKeys as an array', () => { - expect(tokenKeys instanceof Array).toBe(true); - }); - }); - - describe('getConditions', () => { - let conditions; - - beforeEach(() => { - conditions = gl.FilteredSearchTokenKeys.getConditions(); - }); - - it('should return conditions', () => { - expect(conditions !== null).toBe(true); - }); - - it('should return conditions as an array', () => { - expect(conditions instanceof Array).toBe(true); - }); - }); - - describe('searchByKey', () => { - it('should return null when key not found', () => { - const tokenKey = gl.FilteredSearchTokenKeys.searchByKey('notakey'); - expect(tokenKey === null).toBe(true); - }); - - it('should return tokenKey when found by key', () => { - const tokenKeys = gl.FilteredSearchTokenKeys.get(); - const result = gl.FilteredSearchTokenKeys.searchByKey(tokenKeys[0].key); - expect(result).toEqual(tokenKeys[0]); - }); - }); - - describe('searchBySymbol', () => { - it('should return null when symbol not found', () => { - const tokenKey = gl.FilteredSearchTokenKeys.searchBySymbol('notasymbol'); - expect(tokenKey === null).toBe(true); - }); - - it('should return tokenKey when found by symbol', () => { - const tokenKeys = gl.FilteredSearchTokenKeys.get(); - const result = gl.FilteredSearchTokenKeys.searchBySymbol(tokenKeys[0].symbol); - expect(result).toEqual(tokenKeys[0]); - }); - }); - - describe('searchByKeyParam', () => { - it('should return null when key param not found', () => { - const tokenKey = gl.FilteredSearchTokenKeys.searchByKeyParam('notakeyparam'); - expect(tokenKey === null).toBe(true); - }); - - it('should return tokenKey when found by key param', () => { - const tokenKeys = gl.FilteredSearchTokenKeys.get(); - const result = gl.FilteredSearchTokenKeys.searchByKeyParam(`${tokenKeys[0].key}_${tokenKeys[0].param}`); - expect(result).toEqual(tokenKeys[0]); - }); - - it('should return alternative tokenKey when found by key param', () => { - const tokenKeys = gl.FilteredSearchTokenKeys.getAlternatives(); - const result = gl.FilteredSearchTokenKeys.searchByKeyParam(`${tokenKeys[0].key}_${tokenKeys[0].param}`); - expect(result).toEqual(tokenKeys[0]); - }); - }); - - describe('searchByConditionUrl', () => { - it('should return null when condition url not found', () => { - const condition = gl.FilteredSearchTokenKeys.searchByConditionUrl(null); - expect(condition === null).toBe(true); - }); - - it('should return condition when found by url', () => { - const conditions = gl.FilteredSearchTokenKeys.getConditions(); - const result = gl.FilteredSearchTokenKeys.searchByConditionUrl(conditions[0].url); - expect(result).toBe(conditions[0]); - }); - }); - - describe('searchByConditionKeyValue', () => { - it('should return null when condition tokenKey and value not found', () => { - const condition = gl.FilteredSearchTokenKeys.searchByConditionKeyValue(null, null); - expect(condition === null).toBe(true); - }); - - it('should return condition when found by tokenKey and value', () => { - const conditions = gl.FilteredSearchTokenKeys.getConditions(); - const result = gl.FilteredSearchTokenKeys - .searchByConditionKeyValue(conditions[0].tokenKey, conditions[0].value); - expect(result).toEqual(conditions[0]); - }); +describe('Filtered Search Token Keys', () => { + describe('get', () => { + let tokenKeys; + + beforeEach(() => { + tokenKeys = gl.FilteredSearchTokenKeys.get(); + }); + + it('should return tokenKeys', () => { + expect(tokenKeys !== null).toBe(true); + }); + + it('should return tokenKeys as an array', () => { + expect(tokenKeys instanceof Array).toBe(true); + }); + }); + + describe('getConditions', () => { + let conditions; + + beforeEach(() => { + conditions = gl.FilteredSearchTokenKeys.getConditions(); + }); + + it('should return conditions', () => { + expect(conditions !== null).toBe(true); + }); + + it('should return conditions as an array', () => { + expect(conditions instanceof Array).toBe(true); + }); + }); + + describe('searchByKey', () => { + it('should return null when key not found', () => { + const tokenKey = gl.FilteredSearchTokenKeys.searchByKey('notakey'); + expect(tokenKey === null).toBe(true); + }); + + it('should return tokenKey when found by key', () => { + const tokenKeys = gl.FilteredSearchTokenKeys.get(); + const result = gl.FilteredSearchTokenKeys.searchByKey(tokenKeys[0].key); + expect(result).toEqual(tokenKeys[0]); + }); + }); + + describe('searchBySymbol', () => { + it('should return null when symbol not found', () => { + const tokenKey = gl.FilteredSearchTokenKeys.searchBySymbol('notasymbol'); + expect(tokenKey === null).toBe(true); + }); + + it('should return tokenKey when found by symbol', () => { + const tokenKeys = gl.FilteredSearchTokenKeys.get(); + const result = gl.FilteredSearchTokenKeys.searchBySymbol(tokenKeys[0].symbol); + expect(result).toEqual(tokenKeys[0]); + }); + }); + + describe('searchByKeyParam', () => { + it('should return null when key param not found', () => { + const tokenKey = gl.FilteredSearchTokenKeys.searchByKeyParam('notakeyparam'); + expect(tokenKey === null).toBe(true); + }); + + it('should return tokenKey when found by key param', () => { + const tokenKeys = gl.FilteredSearchTokenKeys.get(); + const result = gl.FilteredSearchTokenKeys.searchByKeyParam(`${tokenKeys[0].key}_${tokenKeys[0].param}`); + expect(result).toEqual(tokenKeys[0]); + }); + + it('should return alternative tokenKey when found by key param', () => { + const tokenKeys = gl.FilteredSearchTokenKeys.getAlternatives(); + const result = gl.FilteredSearchTokenKeys.searchByKeyParam(`${tokenKeys[0].key}_${tokenKeys[0].param}`); + expect(result).toEqual(tokenKeys[0]); + }); + }); + + describe('searchByConditionUrl', () => { + it('should return null when condition url not found', () => { + const condition = gl.FilteredSearchTokenKeys.searchByConditionUrl(null); + expect(condition === null).toBe(true); + }); + + it('should return condition when found by url', () => { + const conditions = gl.FilteredSearchTokenKeys.getConditions(); + const result = gl.FilteredSearchTokenKeys.searchByConditionUrl(conditions[0].url); + expect(result).toBe(conditions[0]); + }); + }); + + describe('searchByConditionKeyValue', () => { + it('should return null when condition tokenKey and value not found', () => { + const condition = gl.FilteredSearchTokenKeys.searchByConditionKeyValue(null, null); + expect(condition === null).toBe(true); + }); + + it('should return condition when found by tokenKey and value', () => { + const conditions = gl.FilteredSearchTokenKeys.getConditions(); + const result = gl.FilteredSearchTokenKeys + .searchByConditionKeyValue(conditions[0].tokenKey, conditions[0].value); + expect(result).toEqual(conditions[0]); }); }); -})(); +}); diff --git a/spec/javascripts/filtered_search/filtered_search_tokenizer_spec.js b/spec/javascripts/filtered_search/filtered_search_tokenizer_spec.js index cabbc694ec4..9561580c839 100644 --- a/spec/javascripts/filtered_search/filtered_search_tokenizer_spec.js +++ b/spec/javascripts/filtered_search/filtered_search_tokenizer_spec.js @@ -1,135 +1,133 @@ -require('~/extensions/array'); -require('~/filtered_search/filtered_search_token_keys'); -require('~/filtered_search/filtered_search_tokenizer'); - -(() => { - describe('Filtered Search Tokenizer', () => { - describe('processTokens', () => { - it('returns for input containing only search value', () => { - const results = gl.FilteredSearchTokenizer.processTokens('searchTerm'); - expect(results.searchToken).toBe('searchTerm'); - expect(results.tokens.length).toBe(0); - expect(results.lastToken).toBe(results.searchToken); - }); - - it('returns for input containing only tokens', () => { - const results = gl.FilteredSearchTokenizer - .processTokens('author:@root label:~"Very Important" milestone:%v1.0 assignee:none'); - expect(results.searchToken).toBe(''); - expect(results.tokens.length).toBe(4); - expect(results.tokens[3]).toBe(results.lastToken); - - expect(results.tokens[0].key).toBe('author'); - expect(results.tokens[0].value).toBe('root'); - expect(results.tokens[0].symbol).toBe('@'); - - expect(results.tokens[1].key).toBe('label'); - expect(results.tokens[1].value).toBe('"Very Important"'); - expect(results.tokens[1].symbol).toBe('~'); - - expect(results.tokens[2].key).toBe('milestone'); - expect(results.tokens[2].value).toBe('v1.0'); - expect(results.tokens[2].symbol).toBe('%'); - - expect(results.tokens[3].key).toBe('assignee'); - expect(results.tokens[3].value).toBe('none'); - expect(results.tokens[3].symbol).toBe(''); - }); - - it('returns for input starting with search value and ending with tokens', () => { - const results = gl.FilteredSearchTokenizer - .processTokens('searchTerm anotherSearchTerm milestone:none'); - expect(results.searchToken).toBe('searchTerm anotherSearchTerm'); - expect(results.tokens.length).toBe(1); - expect(results.tokens[0]).toBe(results.lastToken); - expect(results.tokens[0].key).toBe('milestone'); - expect(results.tokens[0].value).toBe('none'); - expect(results.tokens[0].symbol).toBe(''); - }); - - it('returns for input starting with tokens and ending with search value', () => { - const results = gl.FilteredSearchTokenizer - .processTokens('assignee:@user searchTerm'); - - expect(results.searchToken).toBe('searchTerm'); - expect(results.tokens.length).toBe(1); - expect(results.tokens[0].key).toBe('assignee'); - expect(results.tokens[0].value).toBe('user'); - expect(results.tokens[0].symbol).toBe('@'); - expect(results.lastToken).toBe(results.searchToken); - }); - - it('returns for input containing search value wrapped between tokens', () => { - const results = gl.FilteredSearchTokenizer - .processTokens('author:@root label:~"Won\'t fix" searchTerm anotherSearchTerm milestone:none'); - - expect(results.searchToken).toBe('searchTerm anotherSearchTerm'); - expect(results.tokens.length).toBe(3); - expect(results.tokens[2]).toBe(results.lastToken); - - expect(results.tokens[0].key).toBe('author'); - expect(results.tokens[0].value).toBe('root'); - expect(results.tokens[0].symbol).toBe('@'); - - expect(results.tokens[1].key).toBe('label'); - expect(results.tokens[1].value).toBe('"Won\'t fix"'); - expect(results.tokens[1].symbol).toBe('~'); - - expect(results.tokens[2].key).toBe('milestone'); - expect(results.tokens[2].value).toBe('none'); - expect(results.tokens[2].symbol).toBe(''); - }); - - it('returns for input containing search value in between tokens', () => { - const results = gl.FilteredSearchTokenizer - .processTokens('author:@root searchTerm assignee:none anotherSearchTerm label:~Doing'); - expect(results.searchToken).toBe('searchTerm anotherSearchTerm'); - expect(results.tokens.length).toBe(3); - expect(results.tokens[2]).toBe(results.lastToken); - - expect(results.tokens[0].key).toBe('author'); - expect(results.tokens[0].value).toBe('root'); - expect(results.tokens[0].symbol).toBe('@'); - - expect(results.tokens[1].key).toBe('assignee'); - expect(results.tokens[1].value).toBe('none'); - expect(results.tokens[1].symbol).toBe(''); - - expect(results.tokens[2].key).toBe('label'); - expect(results.tokens[2].value).toBe('Doing'); - expect(results.tokens[2].symbol).toBe('~'); - }); - - it('returns search value for invalid tokens', () => { - const results = gl.FilteredSearchTokenizer.processTokens('fake:token'); - expect(results.lastToken).toBe('fake:token'); - expect(results.searchToken).toBe('fake:token'); - expect(results.tokens.length).toEqual(0); - }); - - it('returns search value and token for mix of valid and invalid tokens', () => { - const results = gl.FilteredSearchTokenizer.processTokens('label:real fake:token'); - expect(results.tokens.length).toEqual(1); - expect(results.tokens[0].key).toBe('label'); - expect(results.tokens[0].value).toBe('real'); - expect(results.tokens[0].symbol).toBe(''); - expect(results.lastToken).toBe('fake:token'); - expect(results.searchToken).toBe('fake:token'); - }); - - it('returns search value for invalid symbols', () => { - const results = gl.FilteredSearchTokenizer.processTokens('std::includes'); - expect(results.lastToken).toBe('std::includes'); - expect(results.searchToken).toBe('std::includes'); - }); - - it('removes duplicated values', () => { - const results = gl.FilteredSearchTokenizer.processTokens('label:~foo label:~foo'); - expect(results.tokens.length).toBe(1); - expect(results.tokens[0].key).toBe('label'); - expect(results.tokens[0].value).toBe('foo'); - expect(results.tokens[0].symbol).toBe('~'); - }); +import '~/extensions/array'; +import '~/filtered_search/filtered_search_token_keys'; +import '~/filtered_search/filtered_search_tokenizer'; + +describe('Filtered Search Tokenizer', () => { + describe('processTokens', () => { + it('returns for input containing only search value', () => { + const results = gl.FilteredSearchTokenizer.processTokens('searchTerm'); + expect(results.searchToken).toBe('searchTerm'); + expect(results.tokens.length).toBe(0); + expect(results.lastToken).toBe(results.searchToken); + }); + + it('returns for input containing only tokens', () => { + const results = gl.FilteredSearchTokenizer + .processTokens('author:@root label:~"Very Important" milestone:%v1.0 assignee:none'); + expect(results.searchToken).toBe(''); + expect(results.tokens.length).toBe(4); + expect(results.tokens[3]).toBe(results.lastToken); + + expect(results.tokens[0].key).toBe('author'); + expect(results.tokens[0].value).toBe('root'); + expect(results.tokens[0].symbol).toBe('@'); + + expect(results.tokens[1].key).toBe('label'); + expect(results.tokens[1].value).toBe('"Very Important"'); + expect(results.tokens[1].symbol).toBe('~'); + + expect(results.tokens[2].key).toBe('milestone'); + expect(results.tokens[2].value).toBe('v1.0'); + expect(results.tokens[2].symbol).toBe('%'); + + expect(results.tokens[3].key).toBe('assignee'); + expect(results.tokens[3].value).toBe('none'); + expect(results.tokens[3].symbol).toBe(''); + }); + + it('returns for input starting with search value and ending with tokens', () => { + const results = gl.FilteredSearchTokenizer + .processTokens('searchTerm anotherSearchTerm milestone:none'); + expect(results.searchToken).toBe('searchTerm anotherSearchTerm'); + expect(results.tokens.length).toBe(1); + expect(results.tokens[0]).toBe(results.lastToken); + expect(results.tokens[0].key).toBe('milestone'); + expect(results.tokens[0].value).toBe('none'); + expect(results.tokens[0].symbol).toBe(''); + }); + + it('returns for input starting with tokens and ending with search value', () => { + const results = gl.FilteredSearchTokenizer + .processTokens('assignee:@user searchTerm'); + + expect(results.searchToken).toBe('searchTerm'); + expect(results.tokens.length).toBe(1); + expect(results.tokens[0].key).toBe('assignee'); + expect(results.tokens[0].value).toBe('user'); + expect(results.tokens[0].symbol).toBe('@'); + expect(results.lastToken).toBe(results.searchToken); + }); + + it('returns for input containing search value wrapped between tokens', () => { + const results = gl.FilteredSearchTokenizer + .processTokens('author:@root label:~"Won\'t fix" searchTerm anotherSearchTerm milestone:none'); + + expect(results.searchToken).toBe('searchTerm anotherSearchTerm'); + expect(results.tokens.length).toBe(3); + expect(results.tokens[2]).toBe(results.lastToken); + + expect(results.tokens[0].key).toBe('author'); + expect(results.tokens[0].value).toBe('root'); + expect(results.tokens[0].symbol).toBe('@'); + + expect(results.tokens[1].key).toBe('label'); + expect(results.tokens[1].value).toBe('"Won\'t fix"'); + expect(results.tokens[1].symbol).toBe('~'); + + expect(results.tokens[2].key).toBe('milestone'); + expect(results.tokens[2].value).toBe('none'); + expect(results.tokens[2].symbol).toBe(''); + }); + + it('returns for input containing search value in between tokens', () => { + const results = gl.FilteredSearchTokenizer + .processTokens('author:@root searchTerm assignee:none anotherSearchTerm label:~Doing'); + expect(results.searchToken).toBe('searchTerm anotherSearchTerm'); + expect(results.tokens.length).toBe(3); + expect(results.tokens[2]).toBe(results.lastToken); + + expect(results.tokens[0].key).toBe('author'); + expect(results.tokens[0].value).toBe('root'); + expect(results.tokens[0].symbol).toBe('@'); + + expect(results.tokens[1].key).toBe('assignee'); + expect(results.tokens[1].value).toBe('none'); + expect(results.tokens[1].symbol).toBe(''); + + expect(results.tokens[2].key).toBe('label'); + expect(results.tokens[2].value).toBe('Doing'); + expect(results.tokens[2].symbol).toBe('~'); + }); + + it('returns search value for invalid tokens', () => { + const results = gl.FilteredSearchTokenizer.processTokens('fake:token'); + expect(results.lastToken).toBe('fake:token'); + expect(results.searchToken).toBe('fake:token'); + expect(results.tokens.length).toEqual(0); + }); + + it('returns search value and token for mix of valid and invalid tokens', () => { + const results = gl.FilteredSearchTokenizer.processTokens('label:real fake:token'); + expect(results.tokens.length).toEqual(1); + expect(results.tokens[0].key).toBe('label'); + expect(results.tokens[0].value).toBe('real'); + expect(results.tokens[0].symbol).toBe(''); + expect(results.lastToken).toBe('fake:token'); + expect(results.searchToken).toBe('fake:token'); + }); + + it('returns search value for invalid symbols', () => { + const results = gl.FilteredSearchTokenizer.processTokens('std::includes'); + expect(results.lastToken).toBe('std::includes'); + expect(results.searchToken).toBe('std::includes'); + }); + + it('removes duplicated values', () => { + const results = gl.FilteredSearchTokenizer.processTokens('label:~foo label:~foo'); + expect(results.tokens.length).toBe(1); + expect(results.tokens[0].key).toBe('label'); + expect(results.tokens[0].value).toBe('foo'); + expect(results.tokens[0].symbol).toBe('~'); }); }); -})(); +}); diff --git a/spec/javascripts/filtered_search/filtered_search_visual_tokens_spec.js b/spec/javascripts/filtered_search/filtered_search_visual_tokens_spec.js index bbda1476fed..c5fa2b17106 100644 --- a/spec/javascripts/filtered_search/filtered_search_visual_tokens_spec.js +++ b/spec/javascripts/filtered_search/filtered_search_visual_tokens_spec.js @@ -1,5 +1,7 @@ -require('~/filtered_search/filtered_search_visual_tokens'); -const FilteredSearchSpecHelper = require('../helpers/filtered_search_spec_helper'); +import AjaxCache from '~/lib/utils/ajax_cache'; + +import '~/filtered_search/filtered_search_visual_tokens'; +import FilteredSearchSpecHelper from '../helpers/filtered_search_spec_helper'; describe('Filtered Search Visual Tokens', () => { let tokensContainer; @@ -214,8 +216,12 @@ describe('Filtered Search Visual Tokens', () => { expect(tokenElement.querySelector('.name')).toEqual(jasmine.anything()); }); + it('contains value container div', () => { + expect(tokenElement.querySelector('.value-container')).toEqual(jasmine.anything()); + }); + it('contains value div', () => { - expect(tokenElement.querySelector('.value')).toEqual(jasmine.anything()); + expect(tokenElement.querySelector('.value-container .value')).toEqual(jasmine.anything()); }); it('contains selectable class', () => { @@ -225,6 +231,16 @@ describe('Filtered Search Visual Tokens', () => { it('contains button role', () => { expect(tokenElement.getAttribute('role')).toEqual('button'); }); + + describe('remove token', () => { + it('contains remove-token button', () => { + expect(tokenElement.querySelector('.value-container .remove-token')).toEqual(jasmine.anything()); + }); + + it('contains fa-close icon', () => { + expect(tokenElement.querySelector('.remove-token .fa-close')).toEqual(jasmine.anything()); + }); + }); }); describe('addVisualTokenElement', () => { @@ -597,4 +613,103 @@ describe('Filtered Search Visual Tokens', () => { expect(token.querySelector('.value').innerText).toEqual('~bug'); }); }); + + describe('renderVisualTokenValue', () => { + let searchTokens; + + beforeEach(() => { + tokensContainer.innerHTML = FilteredSearchSpecHelper.createTokensContainerHTML(` + ${FilteredSearchSpecHelper.createFilterVisualTokenHTML('label', 'none')} + ${FilteredSearchSpecHelper.createSearchVisualTokenHTML('search')} + ${FilteredSearchSpecHelper.createFilterVisualTokenHTML('milestone', 'upcoming')} + `); + + searchTokens = document.querySelectorAll('.filtered-search-token'); + }); + + it('renders a token value element', () => { + spyOn(gl.FilteredSearchVisualTokens, 'updateLabelTokenColor'); + const updateLabelTokenColorSpy = gl.FilteredSearchVisualTokens.updateLabelTokenColor; + + expect(searchTokens.length).toBe(2); + Array.prototype.forEach.call(searchTokens, (token) => { + updateLabelTokenColorSpy.calls.reset(); + + const tokenName = token.querySelector('.name').innerText; + const tokenValue = 'new value'; + gl.FilteredSearchVisualTokens.renderVisualTokenValue(token, tokenName, tokenValue); + + const tokenValueElement = token.querySelector('.value'); + expect(tokenValueElement.innerText).toBe(tokenValue); + + if (tokenName.toLowerCase() === 'label') { + const tokenValueContainer = token.querySelector('.value-container'); + expect(updateLabelTokenColorSpy.calls.count()).toBe(1); + const expectedArgs = [tokenValueContainer, tokenValue]; + expect(updateLabelTokenColorSpy.calls.argsFor(0)).toEqual(expectedArgs); + } else { + expect(updateLabelTokenColorSpy.calls.count()).toBe(0); + } + }); + }); + }); + + describe('updateLabelTokenColor', () => { + const jsonFixtureName = 'labels/project_labels.json'; + const dummyEndpoint = '/dummy/endpoint'; + + preloadFixtures(jsonFixtureName); + const labelData = getJSONFixture(jsonFixtureName); + const findLabel = tokenValue => labelData.find( + label => tokenValue === `~${gl.DropdownUtils.getEscapedText(label.title)}`, + ); + + const bugLabelToken = FilteredSearchSpecHelper.createFilterVisualToken('label', '~bug'); + const missingLabelToken = FilteredSearchSpecHelper.createFilterVisualToken('label', '~doesnotexist'); + const spaceLabelToken = FilteredSearchSpecHelper.createFilterVisualToken('label', '~"some space"'); + + const parseColor = (color) => { + const dummyElement = document.createElement('div'); + dummyElement.style.color = color; + return dummyElement.style.color; + }; + + beforeEach(() => { + tokensContainer.innerHTML = FilteredSearchSpecHelper.createTokensContainerHTML(` + ${bugLabelToken.outerHTML} + ${missingLabelToken.outerHTML} + ${spaceLabelToken.outerHTML} + `); + + const filteredSearchInput = document.querySelector('.filtered-search'); + filteredSearchInput.dataset.baseEndpoint = dummyEndpoint; + + AjaxCache.internalStorage = { }; + AjaxCache.internalStorage[`${dummyEndpoint}/labels.json`] = labelData; + }); + + const testCase = (token, done) => { + const tokenValueContainer = token.querySelector('.value-container'); + const tokenValue = token.querySelector('.value').innerText; + const label = findLabel(tokenValue); + + gl.FilteredSearchVisualTokens.updateLabelTokenColor(tokenValueContainer, tokenValue) + .then(() => { + if (label) { + expect(tokenValueContainer.getAttribute('style')).not.toBe(null); + expect(tokenValueContainer.style.backgroundColor).toBe(parseColor(label.color)); + expect(tokenValueContainer.style.color).toBe(parseColor(label.text_color)); + } else { + expect(token).toBe(missingLabelToken); + expect(tokenValueContainer.getAttribute('style')).toBe(null); + } + }) + .then(done) + .catch(fail); + }; + + it('updates the color of a label token', done => testCase(bugLabelToken, done)); + it('updates the color of a label token with spaces', done => testCase(spaceLabelToken, done)); + it('does not change color of a missing label', done => testCase(missingLabelToken, done)); + }); }); diff --git a/spec/javascripts/filtered_search/recent_searches_root_spec.js b/spec/javascripts/filtered_search/recent_searches_root_spec.js new file mode 100644 index 00000000000..d8ba6de5f45 --- /dev/null +++ b/spec/javascripts/filtered_search/recent_searches_root_spec.js @@ -0,0 +1,31 @@ +import RecentSearchesRoot from '~/filtered_search/recent_searches_root'; +import * as vueSrc from 'vue'; + +describe('RecentSearchesRoot', () => { + describe('render', () => { + let recentSearchesRoot; + let data; + let template; + + beforeEach(() => { + recentSearchesRoot = { + store: { + state: 'state', + }, + }; + + spyOn(vueSrc, 'default').and.callFake((options) => { + data = options.data; + template = options.template; + }); + + RecentSearchesRoot.prototype.render.call(recentSearchesRoot); + }); + + it('should instantiate Vue', () => { + expect(vueSrc.default).toHaveBeenCalled(); + expect(data()).toBe(recentSearchesRoot.store.state); + expect(template).toContain(':is-local-storage-available="isLocalStorageAvailable"'); + }); + }); +}); diff --git a/spec/javascripts/filtered_search/services/recent_searches_service_error_spec.js b/spec/javascripts/filtered_search/services/recent_searches_service_error_spec.js new file mode 100644 index 00000000000..ea7c146fa4f --- /dev/null +++ b/spec/javascripts/filtered_search/services/recent_searches_service_error_spec.js @@ -0,0 +1,18 @@ +import RecentSearchesServiceError from '~/filtered_search/services/recent_searches_service_error'; + +describe('RecentSearchesServiceError', () => { + let recentSearchesServiceError; + + beforeEach(() => { + recentSearchesServiceError = new RecentSearchesServiceError(); + }); + + it('instantiates an instance of RecentSearchesServiceError and not an Error', () => { + expect(recentSearchesServiceError).toEqual(jasmine.any(RecentSearchesServiceError)); + expect(recentSearchesServiceError.name).toBe('RecentSearchesServiceError'); + }); + + it('should set a default message', () => { + expect(recentSearchesServiceError.message).toBe('Recent Searches Service is unavailable'); + }); +}); diff --git a/spec/javascripts/filtered_search/services/recent_searches_service_spec.js b/spec/javascripts/filtered_search/services/recent_searches_service_spec.js new file mode 100644 index 00000000000..31fa478804a --- /dev/null +++ b/spec/javascripts/filtered_search/services/recent_searches_service_spec.js @@ -0,0 +1,147 @@ +/* eslint-disable promise/catch-or-return */ + +import RecentSearchesService from '~/filtered_search/services/recent_searches_service'; +import AccessorUtilities from '~/lib/utils/accessor'; + +describe('RecentSearchesService', () => { + let service; + + beforeEach(() => { + service = new RecentSearchesService(); + window.localStorage.removeItem(service.localStorageKey); + }); + + describe('fetch', () => { + beforeEach(() => { + spyOn(RecentSearchesService, 'isAvailable').and.returnValue(true); + }); + + it('should default to empty array', (done) => { + const fetchItemsPromise = service.fetch(); + + fetchItemsPromise + .then((items) => { + expect(items).toEqual([]); + done(); + }) + .catch((err) => { + done.fail('Shouldn\'t reject with empty localStorage key', err); + }); + }); + + it('should reject when unable to parse', (done) => { + window.localStorage.setItem(service.localStorageKey, 'fail'); + const fetchItemsPromise = service.fetch(); + + fetchItemsPromise + .catch((error) => { + expect(error).toEqual(jasmine.any(SyntaxError)); + done(); + }); + }); + + it('should reject when service is unavailable', (done) => { + RecentSearchesService.isAvailable.and.returnValue(false); + + service.fetch().catch((error) => { + expect(error).toEqual(jasmine.any(Error)); + done(); + }); + }); + + it('should return items from localStorage', (done) => { + window.localStorage.setItem(service.localStorageKey, '["foo", "bar"]'); + const fetchItemsPromise = service.fetch(); + + fetchItemsPromise + .then((items) => { + expect(items).toEqual(['foo', 'bar']); + done(); + }); + }); + + describe('if .isAvailable returns `false`', () => { + beforeEach(() => { + RecentSearchesService.isAvailable.and.returnValue(false); + + spyOn(window.localStorage, 'getItem'); + + RecentSearchesService.prototype.fetch(); + }); + + it('should not call .getItem', () => { + expect(window.localStorage.getItem).not.toHaveBeenCalled(); + }); + }); + }); + + describe('setRecentSearches', () => { + beforeEach(() => { + spyOn(RecentSearchesService, 'isAvailable').and.returnValue(true); + }); + + it('should save things in localStorage', () => { + const items = ['foo', 'bar']; + service.save(items); + const newLocalStorageValue = window.localStorage.getItem(service.localStorageKey); + expect(JSON.parse(newLocalStorageValue)).toEqual(items); + }); + }); + + describe('save', () => { + beforeEach(() => { + spyOn(window.localStorage, 'setItem'); + spyOn(RecentSearchesService, 'isAvailable'); + }); + + describe('if .isAvailable returns `true`', () => { + const searchesString = 'searchesString'; + const localStorageKey = 'localStorageKey'; + const recentSearchesService = { + localStorageKey, + }; + + beforeEach(() => { + RecentSearchesService.isAvailable.and.returnValue(true); + + spyOn(JSON, 'stringify').and.returnValue(searchesString); + + RecentSearchesService.prototype.save.call(recentSearchesService); + }); + + it('should call .setItem', () => { + expect(window.localStorage.setItem).toHaveBeenCalledWith(localStorageKey, searchesString); + }); + }); + + describe('if .isAvailable returns `false`', () => { + beforeEach(() => { + RecentSearchesService.isAvailable.and.returnValue(false); + + RecentSearchesService.prototype.save(); + }); + + it('should not call .setItem', () => { + expect(window.localStorage.setItem).not.toHaveBeenCalled(); + }); + }); + }); + + describe('isAvailable', () => { + let isAvailable; + + beforeEach(() => { + spyOn(AccessorUtilities, 'isLocalStorageAccessSafe').and.callThrough(); + + isAvailable = RecentSearchesService.isAvailable(); + }); + + it('should call .isLocalStorageAccessSafe', () => { + expect(AccessorUtilities.isLocalStorageAccessSafe).toHaveBeenCalled(); + }); + + it('should return a boolean', () => { + expect(typeof isAvailable).toBe('boolean'); + }); + }); +}); diff --git a/spec/javascripts/filtered_search/stores/recent_searches_store_spec.js b/spec/javascripts/filtered_search/stores/recent_searches_store_spec.js new file mode 100644 index 00000000000..1eebc6f2367 --- /dev/null +++ b/spec/javascripts/filtered_search/stores/recent_searches_store_spec.js @@ -0,0 +1,59 @@ +import RecentSearchesStore from '~/filtered_search/stores/recent_searches_store'; + +describe('RecentSearchesStore', () => { + let store; + + beforeEach(() => { + store = new RecentSearchesStore(); + }); + + describe('addRecentSearch', () => { + it('should add to the front of the list', () => { + store.addRecentSearch('foo'); + store.addRecentSearch('bar'); + + expect(store.state.recentSearches).toEqual(['bar', 'foo']); + }); + + it('should deduplicate', () => { + store.addRecentSearch('foo'); + store.addRecentSearch('bar'); + store.addRecentSearch('foo'); + + expect(store.state.recentSearches).toEqual(['foo', 'bar']); + }); + + it('only keeps track of 5 items', () => { + store.addRecentSearch('1'); + store.addRecentSearch('2'); + store.addRecentSearch('3'); + store.addRecentSearch('4'); + store.addRecentSearch('5'); + store.addRecentSearch('6'); + store.addRecentSearch('7'); + + expect(store.state.recentSearches).toEqual(['7', '6', '5', '4', '3']); + }); + }); + + describe('setRecentSearches', () => { + it('should override list', () => { + store.setRecentSearches([ + 'foo', + 'bar', + ]); + store.setRecentSearches([ + 'baz', + 'qux', + ]); + + expect(store.state.recentSearches).toEqual(['baz', 'qux']); + }); + + it('only keeps track of 5 items', () => { + store.setRecentSearches(['1', '2', '3', '4', '5', '6', '7']); + + expect(store.state.recentSearches).toEqual(['1', '2', '3', '4', '5']); + }); + }); +}); diff --git a/spec/javascripts/fixtures/balsamiq.rb b/spec/javascripts/fixtures/balsamiq.rb new file mode 100644 index 00000000000..b5372821bf5 --- /dev/null +++ b/spec/javascripts/fixtures/balsamiq.rb @@ -0,0 +1,18 @@ +require 'spec_helper' + +describe 'Balsamiq file', '(JavaScript fixtures)', type: :controller do + include JavaScriptFixturesHelpers + + let(:namespace) { create(:namespace, name: 'frontend-fixtures' )} + let(:project) { create(:project, namespace: namespace, path: 'balsamiq-project') } + + before(:all) do + clean_frontend_fixtures('blob/balsamiq/') + end + + it 'blob/balsamiq/test.bmpr' do |example| + blob = project.repository.blob_at('b89b56d79', 'files/images/balsamiq.bmpr') + + store_frontend_fixture(blob.data.force_encoding('utf-8'), example.description) + end +end diff --git a/spec/javascripts/fixtures/balsamiq_viewer.html.haml b/spec/javascripts/fixtures/balsamiq_viewer.html.haml new file mode 100644 index 00000000000..18166ba4901 --- /dev/null +++ b/spec/javascripts/fixtures/balsamiq_viewer.html.haml @@ -0,0 +1 @@ +.file-content.balsamiq-viewer#js-balsamiq-viewer{ data: { endpoint: '/test' } } diff --git a/spec/javascripts/fixtures/blob.rb b/spec/javascripts/fixtures/blob.rb new file mode 100644 index 00000000000..16490ad5039 --- /dev/null +++ b/spec/javascripts/fixtures/blob.rb @@ -0,0 +1,29 @@ +require 'spec_helper' + +describe Projects::BlobController, '(JavaScript fixtures)', type: :controller do + include JavaScriptFixturesHelpers + + let(:admin) { create(:admin) } + let(:namespace) { create(:namespace, name: 'frontend-fixtures' )} + let(:project) { create(:project, :repository, namespace: namespace, path: 'branches-project') } + + render_views + + before(:all) do + clean_frontend_fixtures('blob/') + end + + before(:each) do + sign_in(admin) + end + + it 'blob/show.html.raw' do |example| + get(:show, + namespace_id: project.namespace, + project_id: project, + id: 'add-ipython-files/files/ipython/basic.ipynb') + + expect(response).to be_success + store_frontend_fixture(response, example.description) + end +end diff --git a/spec/javascripts/fixtures/deploy_keys.rb b/spec/javascripts/fixtures/deploy_keys.rb new file mode 100644 index 00000000000..16e598a4b29 --- /dev/null +++ b/spec/javascripts/fixtures/deploy_keys.rb @@ -0,0 +1,36 @@ +require 'spec_helper' + +describe Projects::DeployKeysController, '(JavaScript fixtures)', type: :controller do + include JavaScriptFixturesHelpers + + let(:admin) { create(:admin) } + let(:namespace) { create(:namespace, name: 'frontend-fixtures' )} + let(:project) { create(:project_empty_repo, namespace: namespace, path: 'todos-project') } + let(:project2) { create(:empty_project, :internal)} + + before(:all) do + clean_frontend_fixtures('deploy_keys/') + end + + before(:each) do + sign_in(admin) + end + + render_views + + it 'deploy_keys/keys.json' do |example| + create(:deploy_key, public: true) + project_key = create(:deploy_key, key: 'ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAAAgQCdMHEHyhRjbhEZVddFn6lTWdgEy5Q6Bz4nwGB76xWZI5YT/1WJOMEW+sL5zYd31kk7sd3FJ5L9ft8zWMWrr/iWXQikC2cqZK24H1xy+ZUmrRuJD4qGAaIVoyyzBL+avL+lF8J5lg6YSw8gwJY/lX64/vnJHUlWw2n5BF8IFOWhiw== dummy@gitlab.com') + internal_key = create(:deploy_key, key: 'ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAAAgQDNd/UJWhPrpb+b/G5oL109y57yKuCxE+WUGJGYaj7WQKsYRJmLYh1mgjrl+KVyfsWpq4ylOxIfFSnN9xBBFN8mlb0Fma5DC7YsSsibJr3MZ19ZNBprwNcdogET7aW9I0In7Wu5f2KqI6e5W/spJHCy4JVxzVMUvk6Myab0LnJ2iQ== dummy@gitlab.com') + create(:deploy_keys_project, project: project, deploy_key: project_key) + create(:deploy_keys_project, project: project2, deploy_key: internal_key) + + get :index, + namespace_id: project.namespace.to_param, + project_id: project, + format: :json + + expect(response).to be_success + store_frontend_fixture(response, example.description) + end +end diff --git a/spec/javascripts/fixtures/environments.rb b/spec/javascripts/fixtures/environments.rb new file mode 100644 index 00000000000..3474f4696ef --- /dev/null +++ b/spec/javascripts/fixtures/environments.rb @@ -0,0 +1,30 @@ +require 'spec_helper' + +describe Projects::EnvironmentsController, '(JavaScript fixtures)', type: :controller do + include JavaScriptFixturesHelpers + + let(:admin) { create(:admin) } + let(:namespace) { create(:namespace, name: 'frontend-fixtures' )} + let(:project) { create(:project_empty_repo, namespace: namespace, path: 'environments-project') } + let(:environment) { create(:environment, name: 'production', project: project) } + + render_views + + before(:all) do + clean_frontend_fixtures('environments/metrics') + end + + before(:each) do + sign_in(admin) + end + + it 'environments/metrics/metrics.html.raw' do |example| + get :metrics, + namespace_id: project.namespace, + project_id: project, + id: environment.id + + expect(response).to be_success + store_frontend_fixture(response, example.description) + end +end diff --git a/spec/javascripts/fixtures/environments/metrics.html.haml b/spec/javascripts/fixtures/environments/metrics.html.haml deleted file mode 100644 index 483063fb889..00000000000 --- a/spec/javascripts/fixtures/environments/metrics.html.haml +++ /dev/null @@ -1,12 +0,0 @@ -%div - .top-area - .row - .col-sm-6 - %h3.page-title - Metrics for environment - .row - .col-sm-12 - %svg.prometheus-graph{ 'graph-type' => 'cpu_values' } - .row - .col-sm-12 - %svg.prometheus-graph{ 'graph-type' => 'memory_values' }
\ No newline at end of file diff --git a/spec/javascripts/fixtures/graph.html.haml b/spec/javascripts/fixtures/graph.html.haml new file mode 100644 index 00000000000..4fedb0f1ded --- /dev/null +++ b/spec/javascripts/fixtures/graph.html.haml @@ -0,0 +1 @@ +#js-pipeline-graph-vue{ data: { endpoint: "foo" } } diff --git a/spec/javascripts/fixtures/labels.rb b/spec/javascripts/fixtures/labels.rb new file mode 100644 index 00000000000..2e4811b64a4 --- /dev/null +++ b/spec/javascripts/fixtures/labels.rb @@ -0,0 +1,56 @@ +require 'spec_helper' + +describe 'Labels (JavaScript fixtures)' do + include JavaScriptFixturesHelpers + + let(:admin) { create(:admin) } + let(:group) { create(:group, name: 'frontend-fixtures-group' )} + let(:project) { create(:project_empty_repo, namespace: group, path: 'labels-project') } + + let!(:project_label_bug) { create(:label, project: project, title: 'bug', color: '#FF0000') } + let!(:project_label_enhancement) { create(:label, project: project, title: 'enhancement', color: '#00FF00') } + let!(:project_label_feature) { create(:label, project: project, title: 'feature', color: '#0000FF') } + + let!(:group_label_roses) { create(:group_label, group: group, title: 'roses', color: '#FF0000') } + let!(:groub_label_space) { create(:group_label, group: group, title: 'some space', color: '#FFFFFF') } + let!(:groub_label_violets) { create(:group_label, group: group, title: 'violets', color: '#0000FF') } + + before(:all) do + clean_frontend_fixtures('labels/') + end + + describe Groups::LabelsController, '(JavaScript fixtures)', type: :controller do + render_views + + before(:each) do + sign_in(admin) + end + + it 'labels/group_labels.json' do |example| + get :index, + group_id: group, + format: 'json' + + expect(response).to be_success + store_frontend_fixture(response, example.description) + end + end + + describe Projects::LabelsController, '(JavaScript fixtures)', type: :controller do + render_views + + before(:each) do + sign_in(admin) + end + + it 'labels/project_labels.json' do |example| + get :index, + namespace_id: group, + project_id: project, + format: 'json' + + expect(response).to be_success + store_frontend_fixture(response, example.description) + end + end +end diff --git a/spec/javascripts/fixtures/line_highlighter.html.haml b/spec/javascripts/fixtures/line_highlighter.html.haml index 514877340e4..2782c50e298 100644 --- a/spec/javascripts/fixtures/line_highlighter.html.haml +++ b/spec/javascripts/fixtures/line_highlighter.html.haml @@ -1,4 +1,4 @@ -#blob-content-holder +.file-holder .file-content .line-numbers - 1.upto(25) do |i| diff --git a/spec/javascripts/fixtures/merge_requests.rb b/spec/javascripts/fixtures/merge_requests.rb index fddeaaf504d..a746a776548 100644 --- a/spec/javascripts/fixtures/merge_requests.rb +++ b/spec/javascripts/fixtures/merge_requests.rb @@ -7,6 +7,7 @@ describe Projects::MergeRequestsController, '(JavaScript fixtures)', type: :cont let(:namespace) { create(:namespace, name: 'frontend-fixtures' )} let(:project) { create(:project, namespace: namespace, path: 'merge-requests-project') } let(:merge_request) { create(:merge_request, :with_diffs, source_project: project, target_project: project, description: '- [ ] Task List Item') } + let(:merged_merge_request) { create(:merge_request, :merged, source_project: project, target_project: project) } let(:pipeline) do create( :ci_pipeline, @@ -15,6 +16,16 @@ describe Projects::MergeRequestsController, '(JavaScript fixtures)', type: :cont sha: merge_request.diff_head_sha ) end + let(:path) { "files/ruby/popen.rb" } + let(:position) do + Gitlab::Diff::Position.new( + old_path: path, + new_path: path, + old_line: nil, + new_line: 14, + diff_refs: merge_request.diff_refs + ) + end render_views @@ -32,6 +43,18 @@ describe Projects::MergeRequestsController, '(JavaScript fixtures)', type: :cont render_merge_request(example.description, merge_request) end + it 'merge_requests/merged_merge_request.html.raw' do |example| + allow_any_instance_of(MergeRequest).to receive(:source_branch_exists?).and_return(true) + allow_any_instance_of(MergeRequest).to receive(:can_remove_source_branch?).and_return(true) + render_merge_request(example.description, merged_merge_request) + end + + it 'merge_requests/diff_comment.html.raw' do |example| + create(:diff_note_on_merge_request, project: project, author: admin, position: position, noteable: merge_request) + create(:note_on_merge_request, author: admin, project: project, noteable: merge_request) + render_merge_request(example.description, merge_request) + end + private def render_merge_request(fixture_file_name, merge_request) diff --git a/spec/javascripts/fixtures/mini_dropdown_graph.html.haml b/spec/javascripts/fixtures/mini_dropdown_graph.html.haml index 29370b974af..b532b48a95b 100644 --- a/spec/javascripts/fixtures/mini_dropdown_graph.html.haml +++ b/spec/javascripts/fixtures/mini_dropdown_graph.html.haml @@ -3,7 +3,7 @@ Dropdown %ul.dropdown-menu.mini-pipeline-graph-dropdown-menu.js-builds-dropdown-container - .js-builds-dropdown-list.scrollable-menu + %li.js-builds-dropdown-list.scrollable-menu - .js-builds-dropdown-loading.builds-dropdown-loading.hidden - %span.fa.fa-spinner.fa-spin + %li.js-builds-dropdown-loading.hidden + %span.fa.fa-spinner diff --git a/spec/javascripts/fixtures/pdf.rb b/spec/javascripts/fixtures/pdf.rb new file mode 100644 index 00000000000..6b2422a7986 --- /dev/null +++ b/spec/javascripts/fixtures/pdf.rb @@ -0,0 +1,18 @@ +require 'spec_helper' + +describe 'PDF file', '(JavaScript fixtures)', type: :controller do + include JavaScriptFixturesHelpers + + let(:namespace) { create(:namespace, name: 'frontend-fixtures' )} + let(:project) { create(:project, namespace: namespace, path: 'pdf-project') } + + before(:all) do + clean_frontend_fixtures('blob/pdf/') + end + + it 'blob/pdf/test.pdf' do |example| + blob = project.repository.blob_at('e774ebd33', 'files/pdf/test.pdf') + + store_frontend_fixture(blob.data.force_encoding("utf-8"), example.description) + end +end diff --git a/spec/javascripts/fixtures/pdf_viewer.html.haml b/spec/javascripts/fixtures/pdf_viewer.html.haml new file mode 100644 index 00000000000..2e57beae54b --- /dev/null +++ b/spec/javascripts/fixtures/pdf_viewer.html.haml @@ -0,0 +1 @@ +.file-content#js-pdf-viewer{ data: { endpoint: '/test' } } diff --git a/spec/javascripts/fixtures/pipelines.rb b/spec/javascripts/fixtures/pipelines.rb new file mode 100644 index 00000000000..daafbac86db --- /dev/null +++ b/spec/javascripts/fixtures/pipelines.rb @@ -0,0 +1,35 @@ +require 'spec_helper' + +describe Projects::PipelinesController, '(JavaScript fixtures)', type: :controller do + include JavaScriptFixturesHelpers + + let(:admin) { create(:admin) } + let(:namespace) { create(:namespace, name: 'frontend-fixtures' )} + let(:project) { create(:project, :repository, namespace: namespace, path: 'pipelines-project') } + let(:commit) { create(:commit, project: project) } + let(:commit_without_author) { RepoHelpers.another_sample_commit } + let!(:user) { create(:user, email: commit.author_email) } + let!(:pipeline) { create(:ci_pipeline, project: project, sha: commit.id, user: user) } + let!(:pipeline_without_author) { create(:ci_pipeline, project: project, sha: commit_without_author.id) } + let!(:pipeline_without_commit) { create(:ci_pipeline, project: project, sha: '0000') } + + render_views + + before(:all) do + clean_frontend_fixtures('pipelines/') + end + + before(:each) do + sign_in(admin) + end + + it 'pipelines/pipelines.json' do |example| + get :index, + namespace_id: namespace, + project_id: project, + format: :json + + expect(response).to be_success + store_frontend_fixture(response, example.description) + end +end diff --git a/spec/javascripts/fixtures/raw.rb b/spec/javascripts/fixtures/raw.rb new file mode 100644 index 00000000000..1ce622fc836 --- /dev/null +++ b/spec/javascripts/fixtures/raw.rb @@ -0,0 +1,24 @@ +require 'spec_helper' + +describe 'Raw files', '(JavaScript fixtures)', type: :controller do + include JavaScriptFixturesHelpers + + let(:namespace) { create(:namespace, name: 'frontend-fixtures' )} + let(:project) { create(:project, namespace: namespace, path: 'raw-project') } + + before(:all) do + clean_frontend_fixtures('blob/notebook/') + end + + it 'blob/notebook/basic.json' do |example| + blob = project.repository.blob_at('6d85bb69', 'files/ipython/basic.ipynb') + + store_frontend_fixture(blob.data, example.description) + end + + it 'blob/notebook/worksheets.json' do |example| + blob = project.repository.blob_at('6d85bb69', 'files/ipython/worksheets.ipynb') + + store_frontend_fixture(blob.data, example.description) + end +end diff --git a/spec/javascripts/fixtures/sketch_viewer.html.haml b/spec/javascripts/fixtures/sketch_viewer.html.haml new file mode 100644 index 00000000000..f01bd00925a --- /dev/null +++ b/spec/javascripts/fixtures/sketch_viewer.html.haml @@ -0,0 +1,2 @@ +.file-content#js-sketch-viewer{ data: { endpoint: '/test_sketch_file.sketch' } } + .js-loading-icon diff --git a/spec/javascripts/gfm_auto_complete_spec.js b/spec/javascripts/gfm_auto_complete_spec.js index 5dfa4008fbd..ad0c7264616 100644 --- a/spec/javascripts/gfm_auto_complete_spec.js +++ b/spec/javascripts/gfm_auto_complete_spec.js @@ -1,13 +1,15 @@ /* eslint no-param-reassign: "off" */ -require('~/gfm_auto_complete'); -require('vendor/jquery.caret'); -require('vendor/jquery.atwho'); +import GfmAutoComplete from '~/gfm_auto_complete'; -const global = window.gl || (window.gl = {}); -const GfmAutoComplete = global.GfmAutoComplete; +import 'vendor/jquery.caret'; +import 'vendor/jquery.atwho'; describe('GfmAutoComplete', function () { + const gfmAutoCompleteCallbacks = GfmAutoComplete.prototype.getDefaultCallbacks.call({ + fetchData: () => {}, + }); + describe('DefaultOptions.sorter', function () { describe('assets loading', function () { beforeEach(function () { @@ -16,7 +18,7 @@ describe('GfmAutoComplete', function () { this.atwhoInstance = { setting: {} }; this.items = []; - this.sorterValue = GfmAutoComplete.DefaultOptions.sorter + this.sorterValue = gfmAutoCompleteCallbacks.sorter .call(this.atwhoInstance, '', this.items); }); @@ -38,7 +40,7 @@ describe('GfmAutoComplete', function () { it('should enable highlightFirst if alwaysHighlightFirst is set', function () { const atwhoInstance = { setting: { alwaysHighlightFirst: true } }; - GfmAutoComplete.DefaultOptions.sorter.call(atwhoInstance); + gfmAutoCompleteCallbacks.sorter.call(atwhoInstance); expect(atwhoInstance.setting.highlightFirst).toBe(true); }); @@ -46,7 +48,7 @@ describe('GfmAutoComplete', function () { it('should enable highlightFirst if a query is present', function () { const atwhoInstance = { setting: {} }; - GfmAutoComplete.DefaultOptions.sorter.call(atwhoInstance, 'query'); + gfmAutoCompleteCallbacks.sorter.call(atwhoInstance, 'query'); expect(atwhoInstance.setting.highlightFirst).toBe(true); }); @@ -58,7 +60,7 @@ describe('GfmAutoComplete', function () { const items = []; const searchKey = 'searchKey'; - GfmAutoComplete.DefaultOptions.sorter.call(atwhoInstance, query, items, searchKey); + gfmAutoCompleteCallbacks.sorter.call(atwhoInstance, query, items, searchKey); expect($.fn.atwho.default.callbacks.sorter).toHaveBeenCalledWith(query, items, searchKey); }); @@ -67,7 +69,7 @@ describe('GfmAutoComplete', function () { describe('DefaultOptions.matcher', function () { const defaultMatcher = (context, flag, subtext) => ( - GfmAutoComplete.DefaultOptions.matcher.call(context, flag, subtext) + gfmAutoCompleteCallbacks.matcher.call(context, flag, subtext) ); const flagsUseDefaultMatcher = ['@', '#', '!', '~', '%']; diff --git a/spec/javascripts/gl_dropdown_spec.js b/spec/javascripts/gl_dropdown_spec.js index c207fb00a47..3292590b9ed 100644 --- a/spec/javascripts/gl_dropdown_spec.js +++ b/spec/javascripts/gl_dropdown_spec.js @@ -1,9 +1,8 @@ /* eslint-disable comma-dangle, no-param-reassign, no-unused-expressions, max-len */ -require('~/gl_dropdown'); -require('~/lib/utils/common_utils'); -require('~/lib/utils/type_utility'); -require('~/lib/utils/url_utility'); +import '~/gl_dropdown'; +import '~/lib/utils/common_utils'; +import '~/lib/utils/url_utility'; (() => { const NON_SELECTABLE_CLASSES = '.divider, .separator, .dropdown-header, .dropdown-menu-empty-link'; @@ -44,21 +43,18 @@ require('~/lib/utils/url_utility'); preloadFixtures('static/gl_dropdown.html.raw'); loadJSONFixtures('projects.json'); - function initDropDown(hasRemote, isFilterable) { - this.dropdownButtonElement = $('#js-project-dropdown', this.dropdownContainerElement).glDropdown({ + function initDropDown(hasRemote, isFilterable, extraOpts = {}) { + const options = Object.assign({ selectable: true, filterable: isFilterable, data: hasRemote ? remoteMock.bind({}, this.projectsData) : this.projectsData, search: { fields: ['name'] }, - text: (project) => { - (project.name_with_namespace || project.name); - }, - id: (project) => { - project.id; - } - }); + text: project => (project.name_with_namespace || project.name), + id: project => project.id, + }, extraOpts); + this.dropdownButtonElement = $('#js-project-dropdown', this.dropdownContainerElement).glDropdown(options); } beforeEach(() => { @@ -80,6 +76,37 @@ require('~/lib/utils/url_utility'); expect(this.dropdownContainerElement).toHaveClass('open'); }); + it('escapes HTML as text', () => { + this.projectsData[0].name_with_namespace = '<script>alert("testing");</script>'; + + initDropDown.call(this, false); + + this.dropdownButtonElement.click(); + + expect( + $('.dropdown-content li:first-child').text(), + ).toBe('<script>alert("testing");</script>'); + }); + + it('should output HTML when highlighting', () => { + this.projectsData[0].name_with_namespace = 'testing'; + $('.dropdown-input .dropdown-input-field').val('test'); + + initDropDown.call(this, false, true, { + highlight: true, + }); + + this.dropdownButtonElement.click(); + + expect( + $('.dropdown-content li:first-child').text(), + ).toBe('testing'); + + expect( + $('.dropdown-content li:first-child a').html(), + ).toBe('<b>t</b><b>e</b><b>s</b><b>t</b>ing'); + }); + describe('that is open', () => { beforeEach(() => { initDropDown.call(this, false, false); diff --git a/spec/javascripts/gl_field_errors_spec.js b/spec/javascripts/gl_field_errors_spec.js index 733023481f5..fa24aa426b6 100644 --- a/spec/javascripts/gl_field_errors_spec.js +++ b/spec/javascripts/gl_field_errors_spec.js @@ -1,6 +1,6 @@ /* eslint-disable space-before-function-paren, arrow-body-style */ -require('~/gl_field_errors'); +import '~/gl_field_errors'; ((global) => { preloadFixtures('static/gl_field_errors.html.raw'); diff --git a/spec/javascripts/gl_form_spec.js b/spec/javascripts/gl_form_spec.js index 71d6e2a7e22..837feacec1d 100644 --- a/spec/javascripts/gl_form_spec.js +++ b/spec/javascripts/gl_form_spec.js @@ -1,9 +1,9 @@ -/* global autosize */ +import autosize from 'vendor/autosize'; +import '~/gl_form'; +import '~/lib/utils/text_utility'; +import '~/lib/utils/common_utils'; -window.autosize = require('vendor/autosize'); -require('~/gl_form'); -require('~/lib/utils/text_utility'); -require('~/lib/utils/common_utils'); +window.autosize = autosize; describe('GLForm', () => { const global = window.gl || (window.gl = {}); @@ -27,12 +27,12 @@ describe('GLForm', () => { $.prototype.off.calls.reset(); $.prototype.on.calls.reset(); $.prototype.css.calls.reset(); - autosize.calls.reset(); + window.autosize.calls.reset(); done(); }); }); - describe('.setupAutosize', () => { + describe('setupAutosize', () => { beforeEach((done) => { this.glForm.setupAutosize(); setTimeout(() => { @@ -51,7 +51,7 @@ describe('GLForm', () => { }); it('should autosize the textarea', () => { - expect(autosize).toHaveBeenCalledWith(jasmine.any(Object)); + expect(window.autosize).toHaveBeenCalledWith(jasmine.any(Object)); }); it('should set the resize css property to vertical', () => { @@ -59,7 +59,7 @@ describe('GLForm', () => { }); }); - describe('.setHeightData', () => { + describe('setHeightData', () => { beforeEach(() => { spyOn($.prototype, 'data'); spyOn($.prototype, 'outerHeight').and.returnValue(200); @@ -75,13 +75,13 @@ describe('GLForm', () => { }); }); - describe('.destroyAutosize', () => { + describe('destroyAutosize', () => { describe('when called', () => { beforeEach(() => { spyOn($.prototype, 'data'); spyOn($.prototype, 'outerHeight').and.returnValue(200); spyOn(window, 'outerHeight').and.returnValue(400); - spyOn(autosize, 'destroy'); + spyOn(window.autosize, 'destroy'); this.glForm.destroyAutosize(); }); @@ -95,7 +95,7 @@ describe('GLForm', () => { }); it('should call autosize destroy', () => { - expect(autosize.destroy).toHaveBeenCalledWith(this.textarea); + expect(window.autosize.destroy).toHaveBeenCalledWith(this.textarea); }); it('should set the data-height attribute', () => { @@ -114,9 +114,9 @@ describe('GLForm', () => { it('should return undefined if the data-height equals the outerHeight', () => { spyOn($.prototype, 'outerHeight').and.returnValue(200); spyOn($.prototype, 'data').and.returnValue(200); - spyOn(autosize, 'destroy'); + spyOn(window.autosize, 'destroy'); expect(this.glForm.destroyAutosize()).toBeUndefined(); - expect(autosize.destroy).not.toHaveBeenCalled(); + expect(window.autosize.destroy).not.toHaveBeenCalled(); }); }); }); diff --git a/spec/javascripts/header_spec.js b/spec/javascripts/header_spec.js index b5dde5525e5..0e01934d3a3 100644 --- a/spec/javascripts/header_spec.js +++ b/spec/javascripts/header_spec.js @@ -1,7 +1,7 @@ /* eslint-disable space-before-function-paren, no-var */ -require('~/header'); -require('~/lib/utils/text_utility'); +import '~/header'; +import '~/lib/utils/text_utility'; (function() { describe('Header', function() { diff --git a/spec/javascripts/helpers/class_spec_helper.js b/spec/javascripts/helpers/class_spec_helper.js index 61db27a8fcc..7a60d33b471 100644 --- a/spec/javascripts/helpers/class_spec_helper.js +++ b/spec/javascripts/helpers/class_spec_helper.js @@ -1,4 +1,4 @@ -class ClassSpecHelper { +export default class ClassSpecHelper { static itShouldBeAStaticMethod(base, method) { return it('should be a static method', () => { expect(Object.prototype.hasOwnProperty.call(base, method)).toBeTruthy(); @@ -7,5 +7,3 @@ class ClassSpecHelper { } window.ClassSpecHelper = ClassSpecHelper; - -module.exports = ClassSpecHelper; diff --git a/spec/javascripts/helpers/class_spec_helper_spec.js b/spec/javascripts/helpers/class_spec_helper_spec.js index 0a61e561640..686b8eaed31 100644 --- a/spec/javascripts/helpers/class_spec_helper_spec.js +++ b/spec/javascripts/helpers/class_spec_helper_spec.js @@ -1,9 +1,9 @@ /* global ClassSpecHelper */ -require('./class_spec_helper'); +import './class_spec_helper'; describe('ClassSpecHelper', () => { - describe('.itShouldBeAStaticMethod', function () { + describe('itShouldBeAStaticMethod', function () { beforeEach(() => { class TestClass { instanceMethod() { this.prop = 'val'; } diff --git a/spec/javascripts/helpers/filtered_search_spec_helper.js b/spec/javascripts/helpers/filtered_search_spec_helper.js index ce83a256ddd..0d7092a2357 100644 --- a/spec/javascripts/helpers/filtered_search_spec_helper.js +++ b/spec/javascripts/helpers/filtered_search_spec_helper.js @@ -1,4 +1,4 @@ -class FilteredSearchSpecHelper { +export default class FilteredSearchSpecHelper { static createFilterVisualTokenHTML(name, value, isSelected) { return FilteredSearchSpecHelper.createFilterVisualToken(name, value, isSelected).outerHTML; } @@ -10,7 +10,12 @@ class FilteredSearchSpecHelper { li.innerHTML = ` <div class="selectable ${isSelected ? 'selected' : ''}" role="button"> <div class="name">${name}</div> - <div class="value">${value}</div> + <div class="value-container"> + <div class="value">${value}</div> + <div class="remove-token" role="button"> + <i class="fa fa-close"></i> + </div> + </div> </div> `; @@ -48,5 +53,3 @@ class FilteredSearchSpecHelper { `; } } - -module.exports = FilteredSearchSpecHelper; diff --git a/spec/javascripts/helpers/user_mock_data_helper.js b/spec/javascripts/helpers/user_mock_data_helper.js new file mode 100644 index 00000000000..a9783ea065c --- /dev/null +++ b/spec/javascripts/helpers/user_mock_data_helper.js @@ -0,0 +1,16 @@ +export default { + createNumberRandomUsers(numberUsers) { + const users = []; + for (let i = 0; i < numberUsers; i = i += 1) { + users.push( + { + avatar: 'http://gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon', + id: (i + 1), + name: `GitLab User ${i}`, + username: `gitlab${i}`, + }, + ); + } + return users; + }, +}; diff --git a/spec/javascripts/issuable_spec.js b/spec/javascripts/issuable_spec.js index 26d87cc5931..49fa2cb8367 100644 --- a/spec/javascripts/issuable_spec.js +++ b/spec/javascripts/issuable_spec.js @@ -1,7 +1,7 @@ /* global Issuable */ -require('~/lib/utils/url_utility'); -require('~/issuable'); +import '~/lib/utils/url_utility'; +import '~/issuable'; (() => { const BASE_URL = '/user/project/issues?scope=all&state=closed'; diff --git a/spec/javascripts/issuable_time_tracker_spec.js b/spec/javascripts/issuable_time_tracker_spec.js index 0a830f25e29..8ff93c4f918 100644 --- a/spec/javascripts/issuable_time_tracker_spec.js +++ b/spec/javascripts/issuable_time_tracker_spec.js @@ -2,7 +2,7 @@ import Vue from 'vue'; -require('~/issuable/time_tracking/components/time_tracker'); +import timeTracker from '~/sidebar/components/time_tracking/time_tracker'; function initTimeTrackingComponent(opts) { setFixtures(` @@ -16,187 +16,185 @@ function initTimeTrackingComponent(opts) { time_spent: opts.timeSpent, human_time_estimate: opts.timeEstimateHumanReadable, human_time_spent: opts.timeSpentHumanReadable, - docsUrl: '/help/workflow/time_tracking.md', + rootPath: '/', }; - const TimeTrackingComponent = Vue.component('issuable-time-tracker'); + const TimeTrackingComponent = Vue.extend(timeTracker); this.timeTracker = new TimeTrackingComponent({ el: '#mock-container', propsData: this.initialData, }); } -((gl) => { - describe('Issuable Time Tracker', function() { - describe('Initialization', function() { - beforeEach(function() { - initTimeTrackingComponent.call(this, { timeEstimate: 100000, timeSpent: 5000, timeEstimateHumanReadable: '2h 46m', timeSpentHumanReadable: '1h 23m' }); - }); +describe('Issuable Time Tracker', function() { + describe('Initialization', function() { + beforeEach(function() { + initTimeTrackingComponent.call(this, { timeEstimate: 100000, timeSpent: 5000, timeEstimateHumanReadable: '2h 46m', timeSpentHumanReadable: '1h 23m' }); + }); - it('should return something defined', function() { - expect(this.timeTracker).toBeDefined(); - }); + it('should return something defined', function() { + expect(this.timeTracker).toBeDefined(); + }); - it ('should correctly set timeEstimate', function(done) { - Vue.nextTick(() => { - expect(this.timeTracker.timeEstimate).toBe(this.initialData.time_estimate); - done(); - }); + it ('should correctly set timeEstimate', function(done) { + Vue.nextTick(() => { + expect(this.timeTracker.timeEstimate).toBe(this.initialData.time_estimate); + done(); }); - it ('should correctly set time_spent', function(done) { - Vue.nextTick(() => { - expect(this.timeTracker.timeSpent).toBe(this.initialData.time_spent); - done(); - }); + }); + it ('should correctly set time_spent', function(done) { + Vue.nextTick(() => { + expect(this.timeTracker.timeSpent).toBe(this.initialData.time_spent); + done(); }); }); + }); - describe('Content Display', function() { - describe('Panes', function() { - describe('Comparison pane', function() { - beforeEach(function() { - initTimeTrackingComponent.call(this, { timeEstimate: 100000, timeSpent: 5000, timeEstimateHumanReadable: '', timeSpentHumanReadable: '' }); + describe('Content Display', function() { + describe('Panes', function() { + describe('Comparison pane', function() { + beforeEach(function() { + initTimeTrackingComponent.call(this, { timeEstimate: 100000, timeSpent: 5000, timeEstimateHumanReadable: '', timeSpentHumanReadable: '' }); + }); + + it('should show the "Comparison" pane when timeEstimate and time_spent are truthy', function(done) { + Vue.nextTick(() => { + const $comparisonPane = this.timeTracker.$el.querySelector('.time-tracking-comparison-pane'); + expect(this.timeTracker.showComparisonState).toBe(true); + done(); }); + }); - it('should show the "Comparison" pane when timeEstimate and time_spent are truthy', function(done) { + describe('Remaining meter', function() { + it('should display the remaining meter with the correct width', function(done) { Vue.nextTick(() => { - const $comparisonPane = this.timeTracker.$el.querySelector('.time-tracking-comparison-pane'); - expect(this.timeTracker.showComparisonState).toBe(true); + const meterWidth = this.timeTracker.$el.querySelector('.time-tracking-comparison-pane .meter-fill').style.width; + const correctWidth = '5%'; + + expect(meterWidth).toBe(correctWidth); done(); - }); + }) }); - describe('Remaining meter', function() { - it('should display the remaining meter with the correct width', function(done) { - Vue.nextTick(() => { - const meterWidth = this.timeTracker.$el.querySelector('.time-tracking-comparison-pane .meter-fill').style.width; - const correctWidth = '5%'; - - expect(meterWidth).toBe(correctWidth); - done(); - }) - }); - - it('should display the remaining meter with the correct background color when within estimate', function(done) { - Vue.nextTick(() => { - const styledMeter = $(this.timeTracker.$el).find('.time-tracking-comparison-pane .within_estimate .meter-fill'); - expect(styledMeter.length).toBe(1); - done() - }); + it('should display the remaining meter with the correct background color when within estimate', function(done) { + Vue.nextTick(() => { + const styledMeter = $(this.timeTracker.$el).find('.time-tracking-comparison-pane .within_estimate .meter-fill'); + expect(styledMeter.length).toBe(1); + done() }); + }); - it('should display the remaining meter with the correct background color when over estimate', function(done) { - this.timeTracker.time_estimate = 100000; - this.timeTracker.time_spent = 20000000; - Vue.nextTick(() => { - const styledMeter = $(this.timeTracker.$el).find('.time-tracking-comparison-pane .over_estimate .meter-fill'); - expect(styledMeter.length).toBe(1); - done(); - }); + it('should display the remaining meter with the correct background color when over estimate', function(done) { + this.timeTracker.time_estimate = 100000; + this.timeTracker.time_spent = 20000000; + Vue.nextTick(() => { + const styledMeter = $(this.timeTracker.$el).find('.time-tracking-comparison-pane .over_estimate .meter-fill'); + expect(styledMeter.length).toBe(1); + done(); }); }); }); + }); - describe("Estimate only pane", function() { - beforeEach(function() { - initTimeTrackingComponent.call(this, { timeEstimate: 100000, timeSpent: 0, timeEstimateHumanReadable: '2h 46m', timeSpentHumanReadable: '' }); - }); + describe("Estimate only pane", function() { + beforeEach(function() { + initTimeTrackingComponent.call(this, { timeEstimate: 100000, timeSpent: 0, timeEstimateHumanReadable: '2h 46m', timeSpentHumanReadable: '' }); + }); - it('should display the human readable version of time estimated', function(done) { - Vue.nextTick(() => { - const estimateText = this.timeTracker.$el.querySelector('.time-tracking-estimate-only-pane').innerText; - const correctText = 'Estimated: 2h 46m'; + it('should display the human readable version of time estimated', function(done) { + Vue.nextTick(() => { + const estimateText = this.timeTracker.$el.querySelector('.time-tracking-estimate-only-pane').innerText; + const correctText = 'Estimated: 2h 46m'; - expect(estimateText).toBe(correctText); - done(); - }); + expect(estimateText).toBe(correctText); + done(); }); }); + }); - describe('Spent only pane', function() { - beforeEach(function() { - initTimeTrackingComponent.call(this, { timeEstimate: 0, timeSpent: 5000, timeEstimateHumanReadable: '2h 46m', timeSpentHumanReadable: '1h 23m' }); - }); + describe('Spent only pane', function() { + beforeEach(function() { + initTimeTrackingComponent.call(this, { timeEstimate: 0, timeSpent: 5000, timeEstimateHumanReadable: '2h 46m', timeSpentHumanReadable: '1h 23m' }); + }); - it('should display the human readable version of time spent', function(done) { - Vue.nextTick(() => { - const spentText = this.timeTracker.$el.querySelector('.time-tracking-spend-only-pane').innerText; - const correctText = 'Spent: 1h 23m'; + it('should display the human readable version of time spent', function(done) { + Vue.nextTick(() => { + const spentText = this.timeTracker.$el.querySelector('.time-tracking-spend-only-pane').innerText; + const correctText = 'Spent: 1h 23m'; - expect(spentText).toBe(correctText); - done(); - }); + expect(spentText).toBe(correctText); + done(); }); }); + }); - describe('No time tracking pane', function() { - beforeEach(function() { - initTimeTrackingComponent.call(this, { timeEstimate: 0, timeSpent: 0, timeEstimateHumanReadable: 0, timeSpentHumanReadable: 0 }); - }); + describe('No time tracking pane', function() { + beforeEach(function() { + initTimeTrackingComponent.call(this, { timeEstimate: 0, timeSpent: 0, timeEstimateHumanReadable: '', timeSpentHumanReadable: '' }); + }); - it('should only show the "No time tracking" pane when both timeEstimate and time_spent are falsey', function(done) { - Vue.nextTick(() => { - const $noTrackingPane = this.timeTracker.$el.querySelector('.time-tracking-no-tracking-pane'); - const noTrackingText =$noTrackingPane.innerText; - const correctText = 'No estimate or time spent'; + it('should only show the "No time tracking" pane when both timeEstimate and time_spent are falsey', function(done) { + Vue.nextTick(() => { + const $noTrackingPane = this.timeTracker.$el.querySelector('.time-tracking-no-tracking-pane'); + const noTrackingText =$noTrackingPane.innerText; + const correctText = 'No estimate or time spent'; - expect(this.timeTracker.showNoTimeTrackingState).toBe(true); - expect($noTrackingPane).toBeVisible(); - expect(noTrackingText).toBe(correctText); - done(); - }); + expect(this.timeTracker.showNoTimeTrackingState).toBe(true); + expect($noTrackingPane).toBeVisible(); + expect(noTrackingText).toBe(correctText); + done(); }); }); + }); - describe("Help pane", function() { - beforeEach(function() { - initTimeTrackingComponent.call(this, { timeEstimate: 0, timeSpent: 0 }); - }); + describe("Help pane", function() { + beforeEach(function() { + initTimeTrackingComponent.call(this, { timeEstimate: 0, timeSpent: 0 }); + }); - it('should not show the "Help" pane by default', function(done) { - Vue.nextTick(() => { - const $helpPane = this.timeTracker.$el.querySelector('.time-tracking-help-state'); + it('should not show the "Help" pane by default', function(done) { + Vue.nextTick(() => { + const $helpPane = this.timeTracker.$el.querySelector('.time-tracking-help-state'); - expect(this.timeTracker.showHelpState).toBe(false); - expect($helpPane).toBeNull(); - done(); - }); + expect(this.timeTracker.showHelpState).toBe(false); + expect($helpPane).toBeNull(); + done(); }); + }); - it('should show the "Help" pane when help button is clicked', function(done) { - Vue.nextTick(() => { - $(this.timeTracker.$el).find('.help-button').click(); + it('should show the "Help" pane when help button is clicked', function(done) { + Vue.nextTick(() => { + $(this.timeTracker.$el).find('.help-button').click(); - setTimeout(() => { - const $helpPane = this.timeTracker.$el.querySelector('.time-tracking-help-state'); - expect(this.timeTracker.showHelpState).toBe(true); - expect($helpPane).toBeVisible(); - done(); - }, 10); - }); + setTimeout(() => { + const $helpPane = this.timeTracker.$el.querySelector('.time-tracking-help-state'); + expect(this.timeTracker.showHelpState).toBe(true); + expect($helpPane).toBeVisible(); + done(); + }, 10); }); + }); - it('should not show the "Help" pane when help button is clicked and then closed', function(done) { - Vue.nextTick(() => { - $(this.timeTracker.$el).find('.help-button').click(); + it('should not show the "Help" pane when help button is clicked and then closed', function(done) { + Vue.nextTick(() => { + $(this.timeTracker.$el).find('.help-button').click(); - setTimeout(() => { + setTimeout(() => { - $(this.timeTracker.$el).find('.close-help-button').click(); + $(this.timeTracker.$el).find('.close-help-button').click(); - setTimeout(() => { - const $helpPane = this.timeTracker.$el.querySelector('.time-tracking-help-state'); + setTimeout(() => { + const $helpPane = this.timeTracker.$el.querySelector('.time-tracking-help-state'); - expect(this.timeTracker.showHelpState).toBe(false); - expect($helpPane).toBeNull(); + expect(this.timeTracker.showHelpState).toBe(false); + expect($helpPane).toBeNull(); - done(); - }, 1000); + done(); }, 1000); - }); + }, 1000); }); }); }); }); }); -})(window.gl || (window.gl = {})); +}); diff --git a/spec/javascripts/issue_show/components/app_spec.js b/spec/javascripts/issue_show/components/app_spec.js new file mode 100644 index 00000000000..ee456869c53 --- /dev/null +++ b/spec/javascripts/issue_show/components/app_spec.js @@ -0,0 +1,60 @@ +import Vue from 'vue'; +import '~/render_math'; +import '~/render_gfm'; +import issuableApp from '~/issue_show/components/app.vue'; +import issueShowData from '../mock_data'; + +const issueShowInterceptor = data => (request, next) => { + next(request.respondWith(JSON.stringify(data), { + status: 200, + headers: { + 'POLL-INTERVAL': 1, + }, + })); +}; + +describe('Issuable output', () => { + document.body.innerHTML = '<span id="task_status"></span>'; + + let vm; + + beforeEach(() => { + const IssuableDescriptionComponent = Vue.extend(issuableApp); + Vue.http.interceptors.push(issueShowInterceptor(issueShowData.initialRequest)); + + vm = new IssuableDescriptionComponent({ + propsData: { + canUpdate: true, + endpoint: '/gitlab-org/gitlab-shell/issues/9/realtime_changes', + issuableRef: '#1', + initialTitle: '', + initialDescriptionHtml: '', + initialDescriptionText: '', + }, + }).$mount(); + }); + + afterEach(() => { + Vue.http.interceptors = _.without(Vue.http.interceptors, issueShowInterceptor); + }); + + it('should render a title/description and update title/description on update', (done) => { + setTimeout(() => { + expect(document.querySelector('title').innerText).toContain('this is a title (#1)'); + expect(vm.$el.querySelector('.title').innerHTML).toContain('<p>this is a title</p>'); + expect(vm.$el.querySelector('.wiki').innerHTML).toContain('<p>this is a description!</p>'); + expect(vm.$el.querySelector('.js-task-list-field').value).toContain('this is a description'); + + Vue.http.interceptors.push(issueShowInterceptor(issueShowData.secondRequest)); + + setTimeout(() => { + expect(document.querySelector('title').innerText).toContain('2 (#1)'); + expect(vm.$el.querySelector('.title').innerHTML).toContain('<p>2</p>'); + expect(vm.$el.querySelector('.wiki').innerHTML).toContain('<p>42</p>'); + expect(vm.$el.querySelector('.js-task-list-field').value).toContain('42'); + + done(); + }); + }); + }); +}); diff --git a/spec/javascripts/issue_show/components/description_spec.js b/spec/javascripts/issue_show/components/description_spec.js new file mode 100644 index 00000000000..408349cc42d --- /dev/null +++ b/spec/javascripts/issue_show/components/description_spec.js @@ -0,0 +1,99 @@ +import Vue from 'vue'; +import descriptionComponent from '~/issue_show/components/description.vue'; + +describe('Description component', () => { + let vm; + + beforeEach(() => { + const Component = Vue.extend(descriptionComponent); + + if (!document.querySelector('.issuable-meta')) { + const metaData = document.createElement('div'); + metaData.classList.add('issuable-meta'); + metaData.innerHTML = '<span id="task_status"></span><span id="task_status_short"></span>'; + + document.body.appendChild(metaData); + } + + vm = new Component({ + propsData: { + canUpdate: true, + descriptionHtml: 'test', + descriptionText: 'test', + updatedAt: new Date().toString(), + taskStatus: '', + }, + }).$mount(); + }); + + it('animates description changes', (done) => { + vm.descriptionHtml = 'changed'; + + Vue.nextTick(() => { + expect( + vm.$el.querySelector('.wiki').classList.contains('issue-realtime-pre-pulse'), + ).toBeTruthy(); + + setTimeout(() => { + expect( + vm.$el.querySelector('.wiki').classList.contains('issue-realtime-trigger-pulse'), + ).toBeTruthy(); + + done(); + }); + }); + }); + + it('re-inits the TaskList when description changed', (done) => { + spyOn(gl, 'TaskList'); + vm.descriptionHtml = 'changed'; + + setTimeout(() => { + expect( + gl.TaskList, + ).toHaveBeenCalled(); + + done(); + }); + }); + + it('does not re-init the TaskList when canUpdate is false', (done) => { + spyOn(gl, 'TaskList'); + vm.canUpdate = false; + vm.descriptionHtml = 'changed'; + + setTimeout(() => { + expect( + gl.TaskList, + ).not.toHaveBeenCalled(); + + done(); + }); + }); + + describe('taskStatus', () => { + it('adds full taskStatus', (done) => { + vm.taskStatus = '1 of 1'; + + setTimeout(() => { + expect( + document.querySelector('.issuable-meta #task_status').textContent.trim(), + ).toBe('1 of 1'); + + done(); + }); + }); + + it('adds short taskStatus', (done) => { + vm.taskStatus = '1 of 1'; + + setTimeout(() => { + expect( + document.querySelector('.issuable-meta #task_status_short').textContent.trim(), + ).toBe('1/1 task'); + + done(); + }); + }); + }); +}); diff --git a/spec/javascripts/issue_show/components/title_spec.js b/spec/javascripts/issue_show/components/title_spec.js new file mode 100644 index 00000000000..2f953e7e92e --- /dev/null +++ b/spec/javascripts/issue_show/components/title_spec.js @@ -0,0 +1,67 @@ +import Vue from 'vue'; +import titleComponent from '~/issue_show/components/title.vue'; + +describe('Title component', () => { + let vm; + + beforeEach(() => { + const Component = Vue.extend(titleComponent); + vm = new Component({ + propsData: { + issuableRef: '#1', + titleHtml: 'Testing <img />', + titleText: 'Testing', + }, + }).$mount(); + }); + + it('renders title HTML', () => { + expect( + vm.$el.innerHTML.trim(), + ).toBe('Testing <img>'); + }); + + it('updates page title when changing titleHtml', (done) => { + spyOn(vm, 'setPageTitle'); + vm.titleHtml = 'test'; + + Vue.nextTick(() => { + expect( + vm.setPageTitle, + ).toHaveBeenCalled(); + + done(); + }); + }); + + it('animates title changes', (done) => { + vm.titleHtml = 'test'; + + Vue.nextTick(() => { + expect( + vm.$el.classList.contains('issue-realtime-pre-pulse'), + ).toBeTruthy(); + + setTimeout(() => { + expect( + vm.$el.classList.contains('issue-realtime-trigger-pulse'), + ).toBeTruthy(); + + done(); + }); + }); + }); + + it('updates page title after changing title', (done) => { + vm.titleHtml = 'changed'; + vm.titleText = 'changed'; + + Vue.nextTick(() => { + expect( + document.querySelector('title').textContent.trim(), + ).toContain('changed'); + + done(); + }); + }); +}); diff --git a/spec/javascripts/issue_show/mock_data.js b/spec/javascripts/issue_show/mock_data.js new file mode 100644 index 00000000000..6683d581bc5 --- /dev/null +++ b/spec/javascripts/issue_show/mock_data.js @@ -0,0 +1,26 @@ +export default { + initialRequest: { + title: '<p>this is a title</p>', + title_text: 'this is a title', + description: '<p>this is a description!</p>', + description_text: 'this is a description', + task_status: '2 of 4 completed', + updated_at: new Date().toString(), + }, + secondRequest: { + title: '<p>2</p>', + title_text: '2', + description: '<p>42</p>', + description_text: '42', + task_status: '0 of 0 completed', + updated_at: new Date().toString(), + }, + issueSpecRequest: { + title: '<p>this is a title</p>', + title_text: 'this is a title', + description: '<li class="task-list-item enabled"><input type="checkbox" class="task-list-item-checkbox">Task List Item</li>', + description_text: '- [ ] Task List Item', + task_status: '0 of 1 completed', + updated_at: new Date().toString(), + }, +}; diff --git a/spec/javascripts/issue_spec.js b/spec/javascripts/issue_spec.js index aabc8bea12f..df97a100b0d 100644 --- a/spec/javascripts/issue_spec.js +++ b/spec/javascripts/issue_spec.js @@ -1,18 +1,17 @@ -/* eslint-disable space-before-function-paren, no-var, one-var, one-var-declaration-per-line, no-use-before-define, comma-dangle, max-len */ +/* eslint-disable space-before-function-paren, one-var, one-var-declaration-per-line, no-use-before-define, comma-dangle, max-len */ import Issue from '~/issue'; -require('~/lib/utils/text_utility'); +import '~/lib/utils/text_utility'; describe('Issue', function() { - var INVALID_URL = 'http://goesnowhere.nothing/whereami'; - var $boxClosed, $boxOpen, $btnClose, $btnReopen; + let $boxClosed, $boxOpen, $btnClose, $btnReopen; preloadFixtures('issues/closed-issue.html.raw'); preloadFixtures('issues/issue-with-task-list.html.raw'); preloadFixtures('issues/open-issue.html.raw'); function expectErrorMessage() { - var $flashMessage = $('div.flash-alert'); + const $flashMessage = $('div.flash-alert'); expect($flashMessage).toExist(); expect($flashMessage).toBeVisible(); expect($flashMessage).toHaveText('Unable to update this issue at this time.'); @@ -26,10 +25,28 @@ describe('Issue', function() { expectVisibility($btnReopen, !isIssueOpen); } - function expectPendingRequest(req, $triggeredButton) { - expect(req.type).toBe('PUT'); - expect(req.url).toBe($triggeredButton.attr('href')); - expect($triggeredButton).toHaveProp('disabled', true); + function expectNewBranchButtonState(isPending, canCreate) { + if (Issue.$btnNewBranch.length === 0) { + return; + } + + const $available = Issue.$btnNewBranch.find('.available'); + expect($available).toHaveText('New branch'); + + if (!isPending && canCreate) { + expect($available).toBeVisible(); + } else { + expect($available).toBeHidden(); + } + + const $unavailable = Issue.$btnNewBranch.find('.unavailable'); + expect($unavailable).toHaveText('New branch unavailable'); + + if (!isPending && !canCreate) { + expect($unavailable).toBeVisible(); + } else { + expect($unavailable).toBeHidden(); + } } function expectVisibility($element, shouldBeVisible) { @@ -64,12 +81,6 @@ describe('Issue', function() { this.issue = new Issue(); }); - it('modifies the Markdown field', function() { - spyOn(jQuery, 'ajax').and.stub(); - $('input[type=checkbox]').attr('checked', true).trigger('change'); - expect($('.js-task-list-field').val()).toBe('- [x] Task List Item'); - }); - it('submits an ajax request on tasklist:changed', function() { spyOn(jQuery, 'ajax').and.callFake(function(req) { expect(req.type).toBe('PATCH'); @@ -81,100 +92,107 @@ describe('Issue', function() { }); }); - describe('close issue', function() { - beforeEach(function() { - loadFixtures('issues/open-issue.html.raw'); - findElements(); - this.issue = new Issue(); - - expectIssueState(true); - }); + [true, false].forEach((isIssueInitiallyOpen) => { + describe(`with ${isIssueInitiallyOpen ? 'open' : 'closed'} issue`, function() { + const action = isIssueInitiallyOpen ? 'close' : 'reopen'; + + function ajaxSpy(req) { + if (req.url === this.$triggeredButton.attr('href')) { + expect(req.type).toBe('PUT'); + expect(this.$triggeredButton).toHaveProp('disabled', true); + expectNewBranchButtonState(true, false); + return this.issueStateDeferred; + } else if (req.url === Issue.createMrDropdownWrap.dataset.canCreatePath) { + expect(req.type).toBe('GET'); + expectNewBranchButtonState(true, false); + return this.canCreateBranchDeferred; + } + + expect(req.url).toBe('unexpected'); + return null; + } + + beforeEach(function() { + if (isIssueInitiallyOpen) { + loadFixtures('issues/open-issue.html.raw'); + } else { + loadFixtures('issues/closed-issue.html.raw'); + } + + findElements(); + this.issue = new Issue(); + expectIssueState(isIssueInitiallyOpen); + this.$triggeredButton = isIssueInitiallyOpen ? $btnClose : $btnReopen; + + this.$projectIssuesCounter = $('.issue_counter'); + this.$projectIssuesCounter.text('1,001'); + + this.issueStateDeferred = new jQuery.Deferred(); + this.canCreateBranchDeferred = new jQuery.Deferred(); + + spyOn(jQuery, 'ajax').and.callFake(ajaxSpy.bind(this)); + }); - it('closes an issue', function() { - spyOn(jQuery, 'ajax').and.callFake(function(req) { - expectPendingRequest(req, $btnClose); - req.success({ + it(`${action}s the issue`, function() { + this.$triggeredButton.trigger('click'); + this.issueStateDeferred.resolve({ id: 34 }); - }); - - $btnClose.trigger('click'); + this.canCreateBranchDeferred.resolve({ + can_create_branch: !isIssueInitiallyOpen + }); - expectIssueState(false); - expect($btnClose).toHaveProp('disabled', false); - expect($('.issue_counter')).toHaveText(0); - }); + expectIssueState(!isIssueInitiallyOpen); + expect(this.$triggeredButton).toHaveProp('disabled', false); + expect(this.$projectIssuesCounter.text()).toBe(isIssueInitiallyOpen ? '1,000' : '1,002'); + expectNewBranchButtonState(false, !isIssueInitiallyOpen); + }); - it('fails to close an issue with success:false', function() { - spyOn(jQuery, 'ajax').and.callFake(function(req) { - expectPendingRequest(req, $btnClose); - req.success({ + it(`fails to ${action} the issue if saved:false`, function() { + this.$triggeredButton.trigger('click'); + this.issueStateDeferred.resolve({ saved: false }); - }); - - $btnClose.attr('href', INVALID_URL); - $btnClose.trigger('click'); - - expectIssueState(true); - expect($btnClose).toHaveProp('disabled', false); - expectErrorMessage(); - expect($('.issue_counter')).toHaveText(1); - }); + this.canCreateBranchDeferred.resolve({ + can_create_branch: isIssueInitiallyOpen + }); - it('fails to closes an issue with HTTP error', function() { - spyOn(jQuery, 'ajax').and.callFake(function(req) { - expectPendingRequest(req, $btnClose); - req.error(); + expectIssueState(isIssueInitiallyOpen); + expect(this.$triggeredButton).toHaveProp('disabled', false); + expectErrorMessage(); + expect(this.$projectIssuesCounter.text()).toBe('1,001'); + expectNewBranchButtonState(false, isIssueInitiallyOpen); }); - $btnClose.attr('href', INVALID_URL); - $btnClose.trigger('click'); - - expectIssueState(true); - expect($btnClose).toHaveProp('disabled', true); - expectErrorMessage(); - expect($('.issue_counter')).toHaveText(1); - }); - - it('updates counter', () => { - spyOn(jQuery, 'ajax').and.callFake(function(req) { - expectPendingRequest(req, $btnClose); - req.success({ - id: 34 + it(`fails to ${action} the issue if HTTP error occurs`, function() { + this.$triggeredButton.trigger('click'); + this.issueStateDeferred.reject(); + this.canCreateBranchDeferred.resolve({ + can_create_branch: isIssueInitiallyOpen }); - }); - expect($('.issue_counter')).toHaveText(1); - $('.issue_counter').text('1,001'); - expect($('.issue_counter').text()).toEqual('1,001'); - $btnClose.trigger('click'); - expect($('.issue_counter').text()).toEqual('1,000'); - }); - }); + expectIssueState(isIssueInitiallyOpen); + expect(this.$triggeredButton).toHaveProp('disabled', true); + expectErrorMessage(); + expect(this.$projectIssuesCounter.text()).toBe('1,001'); + expectNewBranchButtonState(false, isIssueInitiallyOpen); + }); - describe('reopen issue', function() { - beforeEach(function() { - loadFixtures('issues/closed-issue.html.raw'); - findElements(); - this.issue = new Issue(); + it('disables the new branch button if Ajax call fails', function() { + this.$triggeredButton.trigger('click'); + this.issueStateDeferred.reject(); + this.canCreateBranchDeferred.reject(); - expectIssueState(false); - }); - - it('reopens an issue', function() { - spyOn(jQuery, 'ajax').and.callFake(function(req) { - expectPendingRequest(req, $btnReopen); - req.success({ - id: 34 - }); + expectNewBranchButtonState(false, false); }); - $btnReopen.trigger('click'); + it('does not trigger Ajax call if new branch button is missing', function() { + Issue.$btnNewBranch = $(); + this.canCreateBranchDeferred = null; - expectIssueState(true); - expect($btnReopen).toHaveProp('disabled', false); - expect($('.issue_counter')).toHaveText(1); + this.$triggeredButton.trigger('click'); + this.issueStateDeferred.reject(); + }); }); }); }); diff --git a/spec/javascripts/labels_issue_sidebar_spec.js b/spec/javascripts/labels_issue_sidebar_spec.js index 37e038c16da..c99f379b871 100644 --- a/spec/javascripts/labels_issue_sidebar_spec.js +++ b/spec/javascripts/labels_issue_sidebar_spec.js @@ -2,15 +2,14 @@ /* global IssuableContext */ /* global LabelsSelect */ -require('~/lib/utils/type_utility'); -require('~/gl_dropdown'); -require('select2'); -require('vendor/jquery.nicescroll'); -require('~/api'); -require('~/create_label'); -require('~/issuable_context'); -require('~/users_select'); -require('~/labels_select'); +import '~/gl_dropdown'; +import 'select2'; +import 'vendor/jquery.nicescroll'; +import '~/api'; +import '~/create_label'; +import '~/issuable_context'; +import '~/users_select'; +import '~/labels_select'; (() => { let saveLabelCount = 0; diff --git a/spec/javascripts/landing_spec.js b/spec/javascripts/landing_spec.js new file mode 100644 index 00000000000..7916073190a --- /dev/null +++ b/spec/javascripts/landing_spec.js @@ -0,0 +1,160 @@ +import Landing from '~/landing'; +import Cookies from 'js-cookie'; + +describe('Landing', function () { + describe('class constructor', function () { + beforeEach(function () { + this.landingElement = {}; + this.dismissButton = {}; + this.cookieName = 'cookie_name'; + + this.landing = new Landing(this.landingElement, this.dismissButton, this.cookieName); + }); + + it('should set .landing', function () { + expect(this.landing.landingElement).toBe(this.landingElement); + }); + + it('should set .cookieName', function () { + expect(this.landing.cookieName).toBe(this.cookieName); + }); + + it('should set .dismissButton', function () { + expect(this.landing.dismissButton).toBe(this.dismissButton); + }); + + it('should set .eventWrapper', function () { + expect(this.landing.eventWrapper).toEqual({}); + }); + }); + + describe('toggle', function () { + beforeEach(function () { + this.isDismissed = false; + this.landingElement = { classList: jasmine.createSpyObj('classList', ['toggle']) }; + this.landing = { + isDismissed: () => {}, + addEvents: () => {}, + landingElement: this.landingElement, + }; + + spyOn(this.landing, 'isDismissed').and.returnValue(this.isDismissed); + spyOn(this.landing, 'addEvents'); + + Landing.prototype.toggle.call(this.landing); + }); + + it('should call .isDismissed', function () { + expect(this.landing.isDismissed).toHaveBeenCalled(); + }); + + it('should call .classList.toggle', function () { + expect(this.landingElement.classList.toggle).toHaveBeenCalledWith('hidden', this.isDismissed); + }); + + it('should call .addEvents', function () { + expect(this.landing.addEvents).toHaveBeenCalled(); + }); + + describe('if isDismissed is true', function () { + beforeEach(function () { + this.isDismissed = true; + this.landingElement = { classList: jasmine.createSpyObj('classList', ['toggle']) }; + this.landing = { + isDismissed: () => {}, + addEvents: () => {}, + landingElement: this.landingElement, + }; + + spyOn(this.landing, 'isDismissed').and.returnValue(this.isDismissed); + spyOn(this.landing, 'addEvents'); + + this.landing.isDismissed.calls.reset(); + + Landing.prototype.toggle.call(this.landing); + }); + + it('should not call .addEvents', function () { + expect(this.landing.addEvents).not.toHaveBeenCalled(); + }); + }); + }); + + describe('addEvents', function () { + beforeEach(function () { + this.dismissButton = jasmine.createSpyObj('dismissButton', ['addEventListener']); + this.eventWrapper = {}; + this.landing = { + eventWrapper: this.eventWrapper, + dismissButton: this.dismissButton, + dismissLanding: () => {}, + }; + + Landing.prototype.addEvents.call(this.landing); + }); + + it('should set .eventWrapper.dismissLanding', function () { + expect(this.eventWrapper.dismissLanding).toEqual(jasmine.any(Function)); + }); + + it('should call .addEventListener', function () { + expect(this.dismissButton.addEventListener).toHaveBeenCalledWith('click', this.eventWrapper.dismissLanding); + }); + }); + + describe('removeEvents', function () { + beforeEach(function () { + this.dismissButton = jasmine.createSpyObj('dismissButton', ['removeEventListener']); + this.eventWrapper = { dismissLanding: () => {} }; + this.landing = { + eventWrapper: this.eventWrapper, + dismissButton: this.dismissButton, + }; + + Landing.prototype.removeEvents.call(this.landing); + }); + + it('should call .removeEventListener', function () { + expect(this.dismissButton.removeEventListener).toHaveBeenCalledWith('click', this.eventWrapper.dismissLanding); + }); + }); + + describe('dismissLanding', function () { + beforeEach(function () { + this.landingElement = { classList: jasmine.createSpyObj('classList', ['add']) }; + this.cookieName = 'cookie_name'; + this.landing = { landingElement: this.landingElement, cookieName: this.cookieName }; + + spyOn(Cookies, 'set'); + + Landing.prototype.dismissLanding.call(this.landing); + }); + + it('should call .classList.add', function () { + expect(this.landingElement.classList.add).toHaveBeenCalledWith('hidden'); + }); + + it('should call Cookies.set', function () { + expect(Cookies.set).toHaveBeenCalledWith(this.cookieName, 'true', { expires: 365 }); + }); + }); + + describe('isDismissed', function () { + beforeEach(function () { + this.cookieName = 'cookie_name'; + this.landing = { cookieName: this.cookieName }; + + spyOn(Cookies, 'get').and.returnValue('true'); + + this.isDismissed = Landing.prototype.isDismissed.call(this.landing); + }); + + it('should call Cookies.get', function () { + expect(Cookies.get).toHaveBeenCalledWith(this.cookieName); + }); + + it('should return a boolean', function () { + expect(typeof this.isDismissed).toEqual('boolean'); + }); + }); +}); diff --git a/spec/javascripts/lib/utils/accessor_spec.js b/spec/javascripts/lib/utils/accessor_spec.js new file mode 100644 index 00000000000..b768d6f2a68 --- /dev/null +++ b/spec/javascripts/lib/utils/accessor_spec.js @@ -0,0 +1,78 @@ +import AccessorUtilities from '~/lib/utils/accessor'; + +describe('AccessorUtilities', () => { + const testError = new Error('test error'); + + describe('isPropertyAccessSafe', () => { + let base; + + it('should return `true` if access is safe', () => { + base = { testProp: 'testProp' }; + + expect(AccessorUtilities.isPropertyAccessSafe(base, 'testProp')).toBe(true); + }); + + it('should return `false` if access throws an error', () => { + base = { get testProp() { throw testError; } }; + + expect(AccessorUtilities.isPropertyAccessSafe(base, 'testProp')).toBe(false); + }); + + it('should return `false` if property is undefined', () => { + base = {}; + + expect(AccessorUtilities.isPropertyAccessSafe(base, 'testProp')).toBe(false); + }); + }); + + describe('isFunctionCallSafe', () => { + const base = {}; + + it('should return `true` if calling is safe', () => { + base.func = () => {}; + + expect(AccessorUtilities.isFunctionCallSafe(base, 'func')).toBe(true); + }); + + it('should return `false` if calling throws an error', () => { + base.func = () => { throw new Error('test error'); }; + + expect(AccessorUtilities.isFunctionCallSafe(base, 'func')).toBe(false); + }); + + it('should return `false` if function is undefined', () => { + base.func = undefined; + + expect(AccessorUtilities.isFunctionCallSafe(base, 'func')).toBe(false); + }); + }); + + describe('isLocalStorageAccessSafe', () => { + beforeEach(() => { + spyOn(window.localStorage, 'setItem'); + spyOn(window.localStorage, 'removeItem'); + }); + + it('should return `true` if access is safe', () => { + expect(AccessorUtilities.isLocalStorageAccessSafe()).toBe(true); + }); + + it('should return `false` if access to .setItem isnt safe', () => { + window.localStorage.setItem.and.callFake(() => { throw testError; }); + + expect(AccessorUtilities.isLocalStorageAccessSafe()).toBe(false); + }); + + it('should set a test item if access is safe', () => { + AccessorUtilities.isLocalStorageAccessSafe(); + + expect(window.localStorage.setItem).toHaveBeenCalledWith('isLocalStorageAccessSafe', 'true'); + }); + + it('should remove the test item if access is safe', () => { + AccessorUtilities.isLocalStorageAccessSafe(); + + expect(window.localStorage.removeItem).toHaveBeenCalledWith('isLocalStorageAccessSafe'); + }); + }); +}); diff --git a/spec/javascripts/lib/utils/ajax_cache_spec.js b/spec/javascripts/lib/utils/ajax_cache_spec.js new file mode 100644 index 00000000000..e1747a82329 --- /dev/null +++ b/spec/javascripts/lib/utils/ajax_cache_spec.js @@ -0,0 +1,158 @@ +import AjaxCache from '~/lib/utils/ajax_cache'; + +describe('AjaxCache', () => { + const dummyEndpoint = '/AjaxCache/dummyEndpoint'; + const dummyResponse = { + important: 'dummy data', + }; + + beforeEach(() => { + AjaxCache.internalStorage = { }; + AjaxCache.pendingRequests = { }; + }); + + describe('get', () => { + it('returns undefined if cache is empty', () => { + const data = AjaxCache.get(dummyEndpoint); + + expect(data).toBe(undefined); + }); + + it('returns undefined if cache contains no matching data', () => { + AjaxCache.internalStorage['not matching'] = dummyResponse; + + const data = AjaxCache.get(dummyEndpoint); + + expect(data).toBe(undefined); + }); + + it('returns matching data', () => { + AjaxCache.internalStorage[dummyEndpoint] = dummyResponse; + + const data = AjaxCache.get(dummyEndpoint); + + expect(data).toBe(dummyResponse); + }); + }); + + describe('hasData', () => { + it('returns false if cache is empty', () => { + expect(AjaxCache.hasData(dummyEndpoint)).toBe(false); + }); + + it('returns false if cache contains no matching data', () => { + AjaxCache.internalStorage['not matching'] = dummyResponse; + + expect(AjaxCache.hasData(dummyEndpoint)).toBe(false); + }); + + it('returns true if data is available', () => { + AjaxCache.internalStorage[dummyEndpoint] = dummyResponse; + + expect(AjaxCache.hasData(dummyEndpoint)).toBe(true); + }); + }); + + describe('remove', () => { + it('does nothing if cache is empty', () => { + AjaxCache.remove(dummyEndpoint); + + expect(AjaxCache.internalStorage).toEqual({ }); + }); + + it('does nothing if cache contains no matching data', () => { + AjaxCache.internalStorage['not matching'] = dummyResponse; + + AjaxCache.remove(dummyEndpoint); + + expect(AjaxCache.internalStorage['not matching']).toBe(dummyResponse); + }); + + it('removes matching data', () => { + AjaxCache.internalStorage[dummyEndpoint] = dummyResponse; + + AjaxCache.remove(dummyEndpoint); + + expect(AjaxCache.internalStorage).toEqual({ }); + }); + }); + + describe('retrieve', () => { + let ajaxSpy; + + beforeEach(() => { + spyOn(jQuery, 'ajax').and.callFake(url => ajaxSpy(url)); + }); + + it('stores and returns data from Ajax call if cache is empty', (done) => { + ajaxSpy = (url) => { + expect(url).toBe(dummyEndpoint); + const deferred = $.Deferred(); + deferred.resolve(dummyResponse); + return deferred.promise(); + }; + + AjaxCache.retrieve(dummyEndpoint) + .then((data) => { + expect(data).toBe(dummyResponse); + expect(AjaxCache.internalStorage[dummyEndpoint]).toBe(dummyResponse); + }) + .then(done) + .catch(fail); + }); + + it('makes no Ajax call if request is pending', () => { + const responseDeferred = $.Deferred(); + + ajaxSpy = (url) => { + expect(url).toBe(dummyEndpoint); + // neither reject nor resolve to keep request pending + return responseDeferred.promise(); + }; + + const unexpectedResponse = data => fail(`Did not expect response: ${data}`); + + AjaxCache.retrieve(dummyEndpoint) + .then(unexpectedResponse) + .catch(fail); + + AjaxCache.retrieve(dummyEndpoint) + .then(unexpectedResponse) + .catch(fail); + + expect($.ajax.calls.count()).toBe(1); + }); + + it('returns undefined if Ajax call fails and cache is empty', (done) => { + const dummyStatusText = 'exploded'; + const dummyErrorMessage = 'server exploded'; + ajaxSpy = (url) => { + expect(url).toBe(dummyEndpoint); + const deferred = $.Deferred(); + deferred.reject(null, dummyStatusText, dummyErrorMessage); + return deferred.promise(); + }; + + AjaxCache.retrieve(dummyEndpoint) + .then(data => fail(`Received unexpected data: ${JSON.stringify(data)}`)) + .catch((error) => { + expect(error.message).toBe(`${dummyEndpoint}: ${dummyErrorMessage}`); + expect(error.textStatus).toBe(dummyStatusText); + done(); + }) + .catch(fail); + }); + + it('makes no Ajax call if matching data exists', (done) => { + AjaxCache.internalStorage[dummyEndpoint] = dummyResponse; + ajaxSpy = () => fail(new Error('expected no Ajax call!')); + + AjaxCache.retrieve(dummyEndpoint) + .then((data) => { + expect(data).toBe(dummyResponse); + }) + .then(done) + .catch(fail); + }); + }); +}); diff --git a/spec/javascripts/lib/utils/common_utils_spec.js b/spec/javascripts/lib/utils/common_utils_spec.js index 7cf39d37181..e9bffd74d90 100644 --- a/spec/javascripts/lib/utils/common_utils_spec.js +++ b/spec/javascripts/lib/utils/common_utils_spec.js @@ -1,4 +1,6 @@ -require('~/lib/utils/common_utils'); +/* eslint-disable promise/catch-or-return */ + +import '~/lib/utils/common_utils'; (() => { describe('common_utils', () => { @@ -39,6 +41,16 @@ require('~/lib/utils/common_utils'); const paramsArray = gl.utils.getUrlParamsArray(); expect(paramsArray[0][0] !== '?').toBe(true); }); + + it('should decode params', () => { + history.pushState('', '', '?label_name%5B%5D=test'); + + expect( + gl.utils.getUrlParamsArray()[0], + ).toBe('label_name[]=test'); + + history.pushState('', '', '?'); + }); }); describe('gl.utils.handleLocationHash', () => { @@ -46,6 +58,10 @@ require('~/lib/utils/common_utils'); spyOn(window.document, 'getElementById').and.callThrough(); }); + afterEach(() => { + window.history.pushState({}, null, ''); + }); + function expectGetElementIdToHaveBeenCalledWith(elementId) { expect(window.document.getElementById).toHaveBeenCalledWith(elementId); } @@ -75,11 +91,56 @@ require('~/lib/utils/common_utils'); }); }); + describe('gl.utils.setParamInURL', () => { + afterEach(() => { + window.history.pushState({}, null, ''); + }); + + it('should return the parameter', () => { + window.history.replaceState({}, null, ''); + + expect(gl.utils.setParamInURL('page', 156)).toBe('?page=156'); + expect(gl.utils.setParamInURL('page', '156')).toBe('?page=156'); + }); + + it('should update the existing parameter when its a number', () => { + window.history.pushState({}, null, '?page=15'); + + expect(gl.utils.setParamInURL('page', 16)).toBe('?page=16'); + expect(gl.utils.setParamInURL('page', '16')).toBe('?page=16'); + expect(gl.utils.setParamInURL('page', true)).toBe('?page=true'); + }); + + it('should update the existing parameter when its a string', () => { + window.history.pushState({}, null, '?scope=all'); + + expect(gl.utils.setParamInURL('scope', 'finished')).toBe('?scope=finished'); + }); + + it('should update the existing parameter when more than one parameter exists', () => { + window.history.pushState({}, null, '?scope=all&page=15'); + + expect(gl.utils.setParamInURL('scope', 'finished')).toBe('?scope=finished&page=15'); + }); + + it('should add a new parameter to the end of the existing ones', () => { + window.history.pushState({}, null, '?scope=all'); + + expect(gl.utils.setParamInURL('page', 16)).toBe('?scope=all&page=16'); + expect(gl.utils.setParamInURL('page', '16')).toBe('?scope=all&page=16'); + expect(gl.utils.setParamInURL('page', true)).toBe('?scope=all&page=true'); + }); + }); + describe('gl.utils.getParameterByName', () => { beforeEach(() => { window.history.pushState({}, null, '?scope=all&p=2'); }); + afterEach(() => { + window.history.replaceState({}, null, null); + }); + it('should return valid parameter', () => { const value = gl.utils.getParameterByName('scope'); expect(value).toBe('all'); @@ -261,5 +322,66 @@ require('~/lib/utils/common_utils'); }); }, 10000); }); + + describe('gl.utils.setFavicon', () => { + it('should set page favicon to provided favicon', () => { + const faviconPath = '//custom_favicon'; + const fakeLink = { + setAttribute() {}, + }; + + spyOn(window.document, 'getElementById').and.callFake(() => fakeLink); + spyOn(fakeLink, 'setAttribute').and.callFake((attr, val) => { + expect(attr).toEqual('href'); + expect(val.indexOf(faviconPath) > -1).toBe(true); + }); + gl.utils.setFavicon(faviconPath); + }); + }); + + describe('gl.utils.resetFavicon', () => { + it('should reset page favicon to tanuki', () => { + const fakeLink = { + setAttribute() {}, + }; + + spyOn(window.document, 'getElementById').and.callFake(() => fakeLink); + spyOn(fakeLink, 'setAttribute').and.callFake((attr, val) => { + expect(attr).toEqual('href'); + expect(val).toMatch(/favicon/); + }); + gl.utils.resetFavicon(); + }); + }); + + describe('gl.utils.setCiStatusFavicon', () => { + it('should set page favicon to CI status favicon based on provided status', () => { + const BUILD_URL = `${gl.TEST_HOST}/frontend-fixtures/builds-project/builds/1/status.json`; + const FAVICON_PATH = '//icon_status_success'; + const spySetFavicon = spyOn(gl.utils, 'setFavicon').and.stub(); + const spyResetFavicon = spyOn(gl.utils, 'resetFavicon').and.stub(); + spyOn($, 'ajax').and.callFake(function (options) { + options.success({ favicon: FAVICON_PATH }); + expect(spySetFavicon).toHaveBeenCalledWith(FAVICON_PATH); + options.success(); + expect(spyResetFavicon).toHaveBeenCalled(); + options.error(); + expect(spyResetFavicon).toHaveBeenCalled(); + }); + + gl.utils.setCiStatusFavicon(BUILD_URL); + }); + }); + + describe('gl.utils.ajaxPost', () => { + it('should perform `$.ajax` call and do `POST` request', () => { + const requestURL = '/some/random/api'; + const data = { keyname: 'value' }; + const ajaxSpy = spyOn($, 'ajax').and.callFake(() => {}); + + gl.utils.ajaxPost(requestURL, data); + expect(ajaxSpy.calls.allArgs()[0][0].type).toEqual('POST'); + }); + }); }); })(); diff --git a/spec/javascripts/lib/utils/number_utility_spec.js b/spec/javascripts/lib/utils/number_utility_spec.js new file mode 100644 index 00000000000..90b12c9f115 --- /dev/null +++ b/spec/javascripts/lib/utils/number_utility_spec.js @@ -0,0 +1,48 @@ +import { formatRelevantDigits, bytesToKiB } from '~/lib/utils/number_utils'; + +describe('Number Utils', () => { + describe('formatRelevantDigits', () => { + it('returns an empty string when the number is NaN', () => { + expect(formatRelevantDigits('fail')).toBe(''); + }); + + it('returns 4 decimals when there is 4 plus digits to the left', () => { + const formattedNumber = formatRelevantDigits('1000.1234567'); + const rightFromDecimal = formattedNumber.split('.')[1]; + const leftFromDecimal = formattedNumber.split('.')[0]; + expect(rightFromDecimal.length).toBe(4); + expect(leftFromDecimal.length).toBe(4); + }); + + it('returns 3 decimals when there is 1 digit to the left', () => { + const formattedNumber = formatRelevantDigits('0.1234567'); + const rightFromDecimal = formattedNumber.split('.')[1]; + const leftFromDecimal = formattedNumber.split('.')[0]; + expect(rightFromDecimal.length).toBe(3); + expect(leftFromDecimal.length).toBe(1); + }); + + it('returns 2 decimals when there is 2 digits to the left', () => { + const formattedNumber = formatRelevantDigits('10.1234567'); + const rightFromDecimal = formattedNumber.split('.')[1]; + const leftFromDecimal = formattedNumber.split('.')[0]; + expect(rightFromDecimal.length).toBe(2); + expect(leftFromDecimal.length).toBe(2); + }); + + it('returns 1 decimal when there is 3 digits to the left', () => { + const formattedNumber = formatRelevantDigits('100.1234567'); + const rightFromDecimal = formattedNumber.split('.')[1]; + const leftFromDecimal = formattedNumber.split('.')[0]; + expect(rightFromDecimal.length).toBe(1); + expect(leftFromDecimal.length).toBe(3); + }); + }); + + describe('bytesToKiB', () => { + it('calculates KiB for the given bytes', () => { + expect(bytesToKiB(1024)).toEqual(1); + expect(bytesToKiB(1000)).toEqual(0.9765625); + }); + }); +}); diff --git a/spec/javascripts/lib/utils/poll_spec.js b/spec/javascripts/lib/utils/poll_spec.js index e3429c2a1cb..22f30191ab9 100644 --- a/spec/javascripts/lib/utils/poll_spec.js +++ b/spec/javascripts/lib/utils/poll_spec.js @@ -1,140 +1,118 @@ -import Vue from 'vue'; -import VueResource from 'vue-resource'; import Poll from '~/lib/utils/poll'; -Vue.use(VueResource); +const waitForAllCallsToFinish = (service, waitForCount, successCallback) => { + const timer = () => { + setTimeout(() => { + if (service.fetch.calls.count() === waitForCount) { + successCallback(); + } else { + timer(); + } + }, 0); + }; + + timer(); +}; + +function mockServiceCall(service, response, shouldFail = false) { + const action = shouldFail ? Promise.reject : Promise.resolve; + const responseObject = response; -class ServiceMock { - constructor(endpoint) { - this.service = Vue.resource(endpoint); - } + if (!responseObject.headers) responseObject.headers = {}; - fetch() { - return this.service.get(); - } + service.fetch.and.callFake(action.bind(Promise, responseObject)); } describe('Poll', () => { - let callbacks; + const service = jasmine.createSpyObj('service', ['fetch']); + const callbacks = jasmine.createSpyObj('callbacks', ['success', 'error']); - beforeEach(() => { - callbacks = { - success: () => {}, - error: () => {}, - }; - - spyOn(callbacks, 'success'); - spyOn(callbacks, 'error'); + afterEach(() => { + callbacks.success.calls.reset(); + callbacks.error.calls.reset(); + service.fetch.calls.reset(); }); it('calls the success callback when no header for interval is provided', (done) => { - const successInterceptor = (request, next) => { - next(request.respondWith(JSON.stringify([]), { status: 200 })); - }; - - Vue.http.interceptors.push(successInterceptor); + mockServiceCall(service, { status: 200 }); new Poll({ - resource: new ServiceMock('endpoint'), + resource: service, method: 'fetch', successCallback: callbacks.success, errorCallback: callbacks.error, }).makeRequest(); - setTimeout(() => { + waitForAllCallsToFinish(service, 1, () => { expect(callbacks.success).toHaveBeenCalled(); expect(callbacks.error).not.toHaveBeenCalled(); - done(); - }, 0); - Vue.http.interceptors = _.without(Vue.http.interceptors, successInterceptor); + done(); + }); }); it('calls the error callback whe the http request returns an error', (done) => { - const errorInterceptor = (request, next) => { - next(request.respondWith(JSON.stringify([]), { status: 500 })); - }; - - Vue.http.interceptors.push(errorInterceptor); + mockServiceCall(service, { status: 500 }, true); new Poll({ - resource: new ServiceMock('endpoint'), + resource: service, method: 'fetch', successCallback: callbacks.success, errorCallback: callbacks.error, }).makeRequest(); - setTimeout(() => { + waitForAllCallsToFinish(service, 1, () => { expect(callbacks.success).not.toHaveBeenCalled(); expect(callbacks.error).toHaveBeenCalled(); - done(); - }, 0); - Vue.http.interceptors = _.without(Vue.http.interceptors, errorInterceptor); + done(); + }); }); it('should call the success callback when the interval header is -1', (done) => { - const intervalInterceptor = (request, next) => { - next(request.respondWith(JSON.stringify([]), { status: 200, headers: { 'poll-interval': -1 } })); - }; - - Vue.http.interceptors.push(intervalInterceptor); + mockServiceCall(service, { status: 200, headers: { 'poll-interval': -1 } }); new Poll({ - resource: new ServiceMock('endpoint'), + resource: service, method: 'fetch', successCallback: callbacks.success, errorCallback: callbacks.error, - }).makeRequest(); - - setTimeout(() => { + }).makeRequest().then(() => { expect(callbacks.success).toHaveBeenCalled(); expect(callbacks.error).not.toHaveBeenCalled(); - done(); - }, 0); - Vue.http.interceptors = _.without(Vue.http.interceptors, intervalInterceptor); + done(); + }).catch(done.fail); }); it('starts polling when http status is 200 and interval header is provided', (done) => { - const pollInterceptor = (request, next) => { - next(request.respondWith(JSON.stringify([]), { status: 200, headers: { 'poll-interval': 2 } })); - }; - - Vue.http.interceptors.push(pollInterceptor); - - const service = new ServiceMock('endpoint'); - spyOn(service, 'fetch').and.callThrough(); + mockServiceCall(service, { status: 200, headers: { 'poll-interval': 1 } }); - new Poll({ + const Polling = new Poll({ resource: service, method: 'fetch', data: { page: 1 }, successCallback: callbacks.success, errorCallback: callbacks.error, - }).makeRequest(); + }); + + Polling.makeRequest(); + + waitForAllCallsToFinish(service, 2, () => { + Polling.stop(); - setTimeout(() => { expect(service.fetch.calls.count()).toEqual(2); expect(service.fetch).toHaveBeenCalledWith({ page: 1 }); expect(callbacks.success).toHaveBeenCalled(); expect(callbacks.error).not.toHaveBeenCalled(); - done(); - }, 5); - Vue.http.interceptors = _.without(Vue.http.interceptors, pollInterceptor); + done(); + }); }); describe('stop', () => { it('stops polling when method is called', (done) => { - const pollInterceptor = (request, next) => { - next(request.respondWith(JSON.stringify([]), { status: 200, headers: { 'poll-interval': 2 } })); - }; - - Vue.http.interceptors.push(pollInterceptor); - - const service = new ServiceMock('endpoint'); - spyOn(service, 'fetch').and.callThrough(); + mockServiceCall(service, { status: 200, headers: { 'poll-interval': 1 } }); const Polling = new Poll({ resource: service, @@ -150,28 +128,19 @@ describe('Poll', () => { Polling.makeRequest(); - setTimeout(() => { + waitForAllCallsToFinish(service, 1, () => { expect(service.fetch.calls.count()).toEqual(1); expect(service.fetch).toHaveBeenCalledWith({ page: 1 }); expect(Polling.stop).toHaveBeenCalled(); - done(); - }, 100); - Vue.http.interceptors = _.without(Vue.http.interceptors, pollInterceptor); + done(); + }); }); }); describe('restart', () => { it('should restart polling when its called', (done) => { - const pollInterceptor = (request, next) => { - next(request.respondWith(JSON.stringify([]), { status: 200, headers: { 'poll-interval': 2 } })); - }; - - Vue.http.interceptors.push(pollInterceptor); - - const service = new ServiceMock('endpoint'); - - spyOn(service, 'fetch').and.callThrough(); + mockServiceCall(service, { status: 200, headers: { 'poll-interval': 1 } }); const Polling = new Poll({ resource: service, @@ -187,17 +156,20 @@ describe('Poll', () => { }); spyOn(Polling, 'stop').and.callThrough(); + spyOn(Polling, 'restart').and.callThrough(); Polling.makeRequest(); - setTimeout(() => { + waitForAllCallsToFinish(service, 2, () => { + Polling.stop(); + expect(service.fetch.calls.count()).toEqual(2); expect(service.fetch).toHaveBeenCalledWith({ page: 1 }); expect(Polling.stop).toHaveBeenCalled(); - done(); - }, 10); + expect(Polling.restart).toHaveBeenCalled(); - Vue.http.interceptors = _.without(Vue.http.interceptors, pollInterceptor); + done(); + }); }); }); }); diff --git a/spec/javascripts/lib/utils/text_utility_spec.js b/spec/javascripts/lib/utils/text_utility_spec.js index 4200e943121..ca1b1b7cc3c 100644 --- a/spec/javascripts/lib/utils/text_utility_spec.js +++ b/spec/javascripts/lib/utils/text_utility_spec.js @@ -1,110 +1,108 @@ -require('~/lib/utils/text_utility'); +import '~/lib/utils/text_utility'; -(() => { - describe('text_utility', () => { - describe('gl.text.getTextWidth', () => { - it('returns zero width when no text is passed', () => { - expect(gl.text.getTextWidth('')).toBe(0); - }); +describe('text_utility', () => { + describe('gl.text.getTextWidth', () => { + it('returns zero width when no text is passed', () => { + expect(gl.text.getTextWidth('')).toBe(0); + }); - it('returns zero width when no text is passed and font is passed', () => { - expect(gl.text.getTextWidth('', '100px sans-serif')).toBe(0); - }); + it('returns zero width when no text is passed and font is passed', () => { + expect(gl.text.getTextWidth('', '100px sans-serif')).toBe(0); + }); - it('returns width when text is passed', () => { - expect(gl.text.getTextWidth('foo') > 0).toBe(true); - }); + it('returns width when text is passed', () => { + expect(gl.text.getTextWidth('foo') > 0).toBe(true); + }); - it('returns bigger width when font is larger', () => { - const largeFont = gl.text.getTextWidth('foo', '100px sans-serif'); - const regular = gl.text.getTextWidth('foo', '10px sans-serif'); - expect(largeFont > regular).toBe(true); - }); + it('returns bigger width when font is larger', () => { + const largeFont = gl.text.getTextWidth('foo', '100px sans-serif'); + const regular = gl.text.getTextWidth('foo', '10px sans-serif'); + expect(largeFont > regular).toBe(true); }); + }); - describe('gl.text.pluralize', () => { - it('returns pluralized', () => { - expect(gl.text.pluralize('test', 2)).toBe('tests'); - }); + describe('gl.text.pluralize', () => { + it('returns pluralized', () => { + expect(gl.text.pluralize('test', 2)).toBe('tests'); + }); - it('returns pluralized when count is 0', () => { - expect(gl.text.pluralize('test', 0)).toBe('tests'); - }); + it('returns pluralized when count is 0', () => { + expect(gl.text.pluralize('test', 0)).toBe('tests'); + }); - it('does not return pluralized', () => { - expect(gl.text.pluralize('test', 1)).toBe('test'); - }); + it('does not return pluralized', () => { + expect(gl.text.pluralize('test', 1)).toBe('test'); }); + }); - describe('gl.text.highCountTrim', () => { - it('returns 99+ for count >= 100', () => { - expect(gl.text.highCountTrim(105)).toBe('99+'); - expect(gl.text.highCountTrim(100)).toBe('99+'); - }); + describe('gl.text.highCountTrim', () => { + it('returns 99+ for count >= 100', () => { + expect(gl.text.highCountTrim(105)).toBe('99+'); + expect(gl.text.highCountTrim(100)).toBe('99+'); + }); - it('returns exact number for count < 100', () => { - expect(gl.text.highCountTrim(45)).toBe(45); - }); + it('returns exact number for count < 100', () => { + expect(gl.text.highCountTrim(45)).toBe(45); }); + }); - describe('gl.text.insertText', () => { - let textArea; + describe('gl.text.insertText', () => { + let textArea; - beforeAll(() => { - textArea = document.createElement('textarea'); - document.querySelector('body').appendChild(textArea); - }); + beforeAll(() => { + textArea = document.createElement('textarea'); + document.querySelector('body').appendChild(textArea); + }); - afterAll(() => { - textArea.parentNode.removeChild(textArea); - }); + afterAll(() => { + textArea.parentNode.removeChild(textArea); + }); - describe('without selection', () => { - it('inserts the tag on an empty line', () => { - const initialValue = ''; + describe('without selection', () => { + it('inserts the tag on an empty line', () => { + const initialValue = ''; - textArea.value = initialValue; - textArea.selectionStart = 0; - textArea.selectionEnd = 0; + textArea.value = initialValue; + textArea.selectionStart = 0; + textArea.selectionEnd = 0; - gl.text.insertText(textArea, textArea.value, '*', null, '', false); + gl.text.insertText(textArea, textArea.value, '*', null, '', false); - expect(textArea.value).toEqual(`${initialValue}* `); - }); + expect(textArea.value).toEqual(`${initialValue}* `); + }); - it('inserts the tag on a new line if the current one is not empty', () => { - const initialValue = 'some text'; + it('inserts the tag on a new line if the current one is not empty', () => { + const initialValue = 'some text'; - textArea.value = initialValue; - textArea.setSelectionRange(initialValue.length, initialValue.length); + textArea.value = initialValue; + textArea.setSelectionRange(initialValue.length, initialValue.length); - gl.text.insertText(textArea, textArea.value, '*', null, '', false); + gl.text.insertText(textArea, textArea.value, '*', null, '', false); - expect(textArea.value).toEqual(`${initialValue}\n* `); - }); + expect(textArea.value).toEqual(`${initialValue}\n* `); + }); - it('inserts the tag on the same line if the current line only contains spaces', () => { - const initialValue = ' '; + it('inserts the tag on the same line if the current line only contains spaces', () => { + const initialValue = ' '; - textArea.value = initialValue; - textArea.setSelectionRange(initialValue.length, initialValue.length); + textArea.value = initialValue; + textArea.setSelectionRange(initialValue.length, initialValue.length); - gl.text.insertText(textArea, textArea.value, '*', null, '', false); + gl.text.insertText(textArea, textArea.value, '*', null, '', false); - expect(textArea.value).toEqual(`${initialValue}* `); - }); + expect(textArea.value).toEqual(`${initialValue}* `); + }); - it('inserts the tag on the same line if the current line only contains tabs', () => { - const initialValue = '\t\t\t'; + it('inserts the tag on the same line if the current line only contains tabs', () => { + const initialValue = '\t\t\t'; - textArea.value = initialValue; - textArea.setSelectionRange(initialValue.length, initialValue.length); + textArea.value = initialValue; + textArea.setSelectionRange(initialValue.length, initialValue.length); - gl.text.insertText(textArea, textArea.value, '*', null, '', false); + gl.text.insertText(textArea, textArea.value, '*', null, '', false); - expect(textArea.value).toEqual(`${initialValue}* `); - }); + expect(textArea.value).toEqual(`${initialValue}* `); }); }); }); -})(); +}); diff --git a/spec/javascripts/line_highlighter_spec.js b/spec/javascripts/line_highlighter_spec.js index a1fd2d38968..aee274641e8 100644 --- a/spec/javascripts/line_highlighter_spec.js +++ b/spec/javascripts/line_highlighter_spec.js @@ -1,7 +1,7 @@ /* eslint-disable space-before-function-paren, no-var, no-param-reassign, quotes, prefer-template, no-else-return, new-cap, dot-notation, no-return-assign, comma-dangle, no-new, one-var, one-var-declaration-per-line, jasmine/no-spec-dupes, no-underscore-dangle, max-len */ /* global LineHighlighter */ -require('~/line_highlighter'); +import '~/line_highlighter'; (function() { describe('LineHighlighter', function() { @@ -58,7 +58,7 @@ require('~/line_highlighter'); return expect(func).not.toThrow(); }); }); - describe('#clickHandler', function() { + describe('clickHandler', function() { it('handles clicking on a child icon element', function() { var spy; spy = spyOn(this["class"], 'setHash').and.callThrough(); @@ -176,7 +176,7 @@ require('~/line_highlighter'); }); }); }); - describe('#hashToRange', function() { + describe('hashToRange', function() { beforeEach(function() { return this.subject = this["class"].hashToRange; }); @@ -190,7 +190,7 @@ require('~/line_highlighter'); return expect(this.subject('#foo')).toEqual([null, null]); }); }); - describe('#highlightLine', function() { + describe('highlightLine', function() { beforeEach(function() { return this.subject = this["class"].highlightLine; }); @@ -203,7 +203,7 @@ require('~/line_highlighter'); return expect($('#LC13')).toHaveClass(this.css); }); }); - return describe('#setHash', function() { + return describe('setHash', function() { beforeEach(function() { return this.subject = this["class"].setHash; }); diff --git a/spec/javascripts/merge_request_notes_spec.js b/spec/javascripts/merge_request_notes_spec.js new file mode 100644 index 00000000000..e54acfa8e44 --- /dev/null +++ b/spec/javascripts/merge_request_notes_spec.js @@ -0,0 +1,61 @@ +/* global Notes */ + +import 'vendor/autosize'; +import '~/gl_form'; +import '~/lib/utils/text_utility'; +import '~/render_gfm'; +import '~/render_math'; +import '~/notes'; + +describe('Merge request notes', () => { + window.gon = window.gon || {}; + window.gl = window.gl || {}; + gl.utils = gl.utils || {}; + + const fixture = 'merge_requests/diff_comment.html.raw'; + preloadFixtures(fixture); + + beforeEach(() => { + loadFixtures(fixture); + gl.utils.disableButtonIfEmptyField = _.noop; + window.project_uploads_path = 'http://test.host/uploads'; + $('body').data('page', 'projects:merge_requests:show'); + window.gon.current_user_id = $('.note:last').data('author-id'); + + return new Notes('', []); + }); + + describe('up arrow', () => { + it('edits last comment when triggered in main form', () => { + const upArrowEvent = $.Event('keydown'); + upArrowEvent.which = 38; + + spyOnEvent('.note:last .js-note-edit', 'click'); + + $('.js-note-text').trigger(upArrowEvent); + + expect('click').toHaveBeenTriggeredOn('.note:last .js-note-edit'); + }); + + it('edits last comment in discussion when triggered in discussion form', (done) => { + const upArrowEvent = $.Event('keydown'); + upArrowEvent.which = 38; + + spyOnEvent('.note-discussion .js-note-edit', 'click'); + + $('.js-discussion-reply-button').click(); + + setTimeout(() => { + expect( + $('.note-discussion .js-note-text'), + ).toExist(); + + $('.note-discussion .js-note-text').trigger(upArrowEvent); + + expect('click').toHaveBeenTriggeredOn('.note-discussion .js-note-edit'); + + done(); + }); + }); + }); +}); diff --git a/spec/javascripts/merge_request_spec.js b/spec/javascripts/merge_request_spec.js index fd97dced870..1173fa40947 100644 --- a/spec/javascripts/merge_request_spec.js +++ b/spec/javascripts/merge_request_spec.js @@ -1,7 +1,7 @@ /* eslint-disable space-before-function-paren, no-return-assign */ /* global MergeRequest */ -require('~/merge_request'); +import '~/merge_request'; (function() { describe('MergeRequest', function() { diff --git a/spec/javascripts/merge_request_tabs_spec.js b/spec/javascripts/merge_request_tabs_spec.js index 7b9632be84e..3d1706aab68 100644 --- a/spec/javascripts/merge_request_tabs_spec.js +++ b/spec/javascripts/merge_request_tabs_spec.js @@ -1,9 +1,13 @@ /* eslint-disable no-var, comma-dangle, object-shorthand */ -require('~/merge_request_tabs'); -require('~/breakpoints'); -require('~/lib/utils/common_utils'); -require('vendor/jquery.scrollTo'); +import '~/merge_request_tabs'; +import '~/commit/pipelines/pipelines_bundle'; +import '~/breakpoints'; +import '~/lib/utils/common_utils'; +import '~/diff'; +import '~/single_file_diff'; +import '~/files_comment_button'; +import 'vendor/jquery.scrollTo'; (function () { // TODO: remove this hack! @@ -39,10 +43,11 @@ require('vendor/jquery.scrollTo'); }); afterEach(function () { - this.class.destroy(); + this.class.unbindEvents(); + this.class.destroyPipelinesView(); }); - describe('#activateTab', function () { + describe('activateTab', function () { beforeEach(function () { spyOn($, 'ajax').and.callFake(function () {}); loadFixtures('merge_requests/merge_request_with_task_list.html.raw'); @@ -65,7 +70,8 @@ require('vendor/jquery.scrollTo'); expect($('#diffs')).toHaveClass('active'); }); }); - describe('#opensInNewTab', function () { + + describe('opensInNewTab', function () { var tabUrl; var windowTarget = '_blank'; @@ -116,6 +122,7 @@ require('vendor/jquery.scrollTo'); stopImmediatePropagation: function () {} }); }); + it('opens page tab in a new browser tab with Cmd+Click - Mac', function () { spyOn(window, 'open').and.callFake(function (url, name) { expect(url).toEqual(tabUrl); @@ -129,6 +136,7 @@ require('vendor/jquery.scrollTo'); stopImmediatePropagation: function () {} }); }); + it('opens page tab in a new browser tab with Middle-click - Mac/PC', function () { spyOn(window, 'open').and.callFake(function (url, name) { expect(url).toEqual(tabUrl); @@ -144,11 +152,12 @@ require('vendor/jquery.scrollTo'); }); }); - describe('#setCurrentAction', function () { + describe('setCurrentAction', function () { beforeEach(function () { spyOn($, 'ajax').and.callFake(function () {}); this.subject = this.class.setCurrentAction; }); + it('changes from commits', function () { setLocation({ pathname: '/foo/bar/merge_requests/1/commits' @@ -156,13 +165,16 @@ require('vendor/jquery.scrollTo'); expect(this.subject('notes')).toBe('/foo/bar/merge_requests/1'); expect(this.subject('diffs')).toBe('/foo/bar/merge_requests/1/diffs'); }); + it('changes from diffs', function () { setLocation({ pathname: '/foo/bar/merge_requests/1/diffs' }); + expect(this.subject('notes')).toBe('/foo/bar/merge_requests/1'); expect(this.subject('commits')).toBe('/foo/bar/merge_requests/1/commits'); }); + it('changes from diffs.html', function () { setLocation({ pathname: '/foo/bar/merge_requests/1/diffs.html' @@ -170,6 +182,7 @@ require('vendor/jquery.scrollTo'); expect(this.subject('notes')).toBe('/foo/bar/merge_requests/1'); expect(this.subject('commits')).toBe('/foo/bar/merge_requests/1/commits'); }); + it('changes from notes', function () { setLocation({ pathname: '/foo/bar/merge_requests/1' @@ -177,6 +190,7 @@ require('vendor/jquery.scrollTo'); expect(this.subject('diffs')).toBe('/foo/bar/merge_requests/1/diffs'); expect(this.subject('commits')).toBe('/foo/bar/merge_requests/1/commits'); }); + it('includes search parameters and hash string', function () { setLocation({ pathname: '/foo/bar/merge_requests/1/diffs', @@ -185,6 +199,7 @@ require('vendor/jquery.scrollTo'); }); expect(this.subject('show')).toBe('/foo/bar/merge_requests/1?view=parallel#L15-35'); }); + it('replaces the current history state', function () { var newState; setLocation({ @@ -197,6 +212,7 @@ require('vendor/jquery.scrollTo'); }, document.title, newState); } }); + it('treats "show" like "notes"', function () { setLocation({ pathname: '/foo/bar/merge_requests/1/commits' @@ -205,14 +221,18 @@ require('vendor/jquery.scrollTo'); }); }); - describe('#tabShown', () => { + describe('tabShown', () => { beforeEach(function () { + spyOn($, 'ajax').and.callFake(function (options) { + options.success({ html: '' }); + }); loadFixtures('merge_requests/merge_request_with_task_list.html.raw'); }); describe('with "Side-by-side"/parallel diff view', () => { beforeEach(function () { this.class.diffViewType = () => 'parallel'; + gl.Diff.prototype.diffViewType = () => 'parallel'; }); it('maintains `container-limited` for pipelines tab', function (done) { @@ -224,7 +244,6 @@ require('vendor/jquery.scrollTo'); }); }); }; - asyncClick('.merge-request-tabs .pipelines-tab a') .then(() => asyncClick('.merge-request-tabs .diffs-tab a')) .then(() => asyncClick('.merge-request-tabs .pipelines-tab a')) @@ -237,10 +256,32 @@ require('vendor/jquery.scrollTo'); done.fail(`Something went wrong clicking MR tabs: ${err.message}\n${err.stack}`); }); }); + + it('maintains `container-limited` when switching from "Changes" tab before it loads', function (done) { + const asyncClick = function (selector) { + return new Promise((resolve) => { + setTimeout(() => { + document.querySelector(selector).click(); + resolve(); + }); + }); + }; + + asyncClick('.merge-request-tabs .diffs-tab a') + .then(() => asyncClick('.merge-request-tabs .notes-tab a')) + .then(() => { + const hasContainerLimitedClass = document.querySelector('.content-wrapper .container-fluid').classList.contains('container-limited'); + expect(hasContainerLimitedClass).toBe(true); + }) + .then(done) + .catch((err) => { + done.fail(`Something went wrong clicking MR tabs: ${err.message}\n${err.stack}`); + }); + }); }); }); - describe('#loadDiff', function () { + describe('loadDiff', function () { it('requires an absolute pathname', function () { spyOn($, 'ajax').and.callFake(function (options) { expect(options.url).toEqual('/foo/bar/merge_requests/1/diffs.json'); diff --git a/spec/javascripts/merge_request_widget_spec.js b/spec/javascripts/merge_request_widget_spec.js deleted file mode 100644 index d5193b41c33..00000000000 --- a/spec/javascripts/merge_request_widget_spec.js +++ /dev/null @@ -1,192 +0,0 @@ -/* eslint-disable space-before-function-paren, quotes, comma-dangle, dot-notation, quote-props, no-var, max-len */ - -require('~/merge_request_widget'); -require('~/smart_interval'); -require('~/lib/utils/datetime_utility'); - -(function() { - describe('MergeRequestWidget', function() { - beforeEach(function() { - window.notifyPermissions = function() {}; - window.notify = function() {}; - this.opts = { - ci_status_url: "http://sampledomain.local/ci/getstatus", - ci_environments_status_url: "http://sampledomain.local/ci/getenvironmentsstatus", - ci_status: "", - ci_message: { - normal: "Build {{status}} for \"{{title}}\"", - preparing: "{{status}} build for \"{{title}}\"" - }, - ci_title: { - preparing: "{{status}} build", - normal: "Build {{status}}" - }, - gitlab_icon: "gitlab_logo.png", - ci_pipeline: 80, - ci_sha: "12a34bc5", - builds_path: "http://sampledomain.local/sampleBuildsPath", - commits_path: "http://sampledomain.local/commits", - pipeline_path: "http://sampledomain.local/pipelines" - }; - this["class"] = new window.gl.MergeRequestWidget(this.opts); - }); - - describe('getCIEnvironmentsStatus', function() { - beforeEach(function() { - this.ciEnvironmentsStatusData = [{ - created_at: '2016-09-12T13:38:30.636Z', - environment_id: 1, - environment_name: 'env1', - external_url: 'https://test-url.com', - external_url_formatted: 'test-url.com' - }]; - - spyOn(jQuery, 'getJSON').and.callFake(function(req, cb) { - cb(this.ciEnvironmentsStatusData); - }.bind(this)); - }); - - it('should call renderEnvironments when the environments property is set', function() { - const spy = spyOn(this.class, 'renderEnvironments').and.stub(); - this.class.getCIEnvironmentsStatus(); - expect(spy).toHaveBeenCalledWith(this.ciEnvironmentsStatusData); - }); - - it('should not call renderEnvironments when the environments property is not set', function() { - this.ciEnvironmentsStatusData = null; - const spy = spyOn(this.class, 'renderEnvironments').and.stub(); - this.class.getCIEnvironmentsStatus(); - expect(spy).not.toHaveBeenCalled(); - }); - }); - - describe('renderEnvironments', function() { - describe('should render correct timeago', function() { - beforeEach(function() { - this.environments = [{ - id: 'test-environment-id', - url: 'testurl', - deployed_at: new Date().toISOString(), - deployed_at_formatted: true - }]; - }); - - function getTimeagoText(template) { - var el = document.createElement('html'); - el.innerHTML = template; - return el.querySelector('.js-environment-timeago').innerText.trim(); - } - - it('should render less than a minute ago text', function() { - spyOn(this.class.$widgetBody, 'before').and.callFake(function(template) { - expect(getTimeagoText(template)).toBe('less than a minute ago.'); - }); - - this.class.renderEnvironments(this.environments); - }); - - it('should render about an hour ago text', function() { - var oneHourAgo = new Date(); - oneHourAgo.setHours(oneHourAgo.getHours() - 1); - - this.environments[0].deployed_at = oneHourAgo.toISOString(); - spyOn(this.class.$widgetBody, 'before').and.callFake(function(template) { - expect(getTimeagoText(template)).toBe('about an hour ago.'); - }); - - this.class.renderEnvironments(this.environments); - }); - - it('should render about 2 hours ago text', function() { - var twoHoursAgo = new Date(); - twoHoursAgo.setHours(twoHoursAgo.getHours() - 2); - - this.environments[0].deployed_at = twoHoursAgo.toISOString(); - spyOn(this.class.$widgetBody, 'before').and.callFake(function(template) { - expect(getTimeagoText(template)).toBe('about 2 hours ago.'); - }); - - this.class.renderEnvironments(this.environments); - }); - }); - }); - - describe('mergeInProgress', function() { - it('should display error with h4 tag', function() { - spyOn(this.class.$widgetBody, 'html').and.callFake(function(html) { - expect(html).toBe('<h4>Sorry, something went wrong.</h4>'); - }); - spyOn($, 'ajax').and.callFake(function(e) { - e.success({ merge_error: 'Sorry, something went wrong.' }); - }); - this.class.mergeInProgress(null); - }); - }); - - describe('getCIStatus', function() { - beforeEach(function() { - this.ciStatusData = { - "title": "Sample MR title", - "pipeline": 80, - "sha": "12a34bc5", - "status": "success", - "coverage": 98 - }; - - spyOn(jQuery, 'getJSON').and.callFake((function(_this) { - return function(req, cb) { - return cb(_this.ciStatusData); - }; - })(this)); - }); - it('should call showCIStatus even if a notification should not be displayed', function() { - var spy; - spy = spyOn(this["class"], 'showCIStatus').and.stub(); - this["class"].getCIStatus(false); - return expect(spy).toHaveBeenCalledWith(this.ciStatusData.status); - }); - it('should call showCIStatus when a notification should be displayed', function() { - var spy; - spy = spyOn(this["class"], 'showCIStatus').and.stub(); - this["class"].getCIStatus(true); - return expect(spy).toHaveBeenCalledWith(this.ciStatusData.status); - }); - it('should call showCICoverage when the coverage rate is set', function() { - var spy; - spy = spyOn(this["class"], 'showCICoverage').and.stub(); - this["class"].getCIStatus(false); - return expect(spy).toHaveBeenCalledWith(this.ciStatusData.coverage); - }); - it('should not call showCICoverage when the coverage rate is not set', function() { - var spy; - this.ciStatusData.coverage = null; - spy = spyOn(this["class"], 'showCICoverage').and.stub(); - this["class"].getCIStatus(false); - return expect(spy).not.toHaveBeenCalled(); - }); - it('should not display a notification on the first check after the widget has been created', function() { - var spy; - spy = spyOn(window, 'notify'); - this["class"] = new window.gl.MergeRequestWidget(this.opts); - this["class"].getCIStatus(true); - return expect(spy).not.toHaveBeenCalled(); - }); - it('should update the pipeline URL when the pipeline changes', function() { - var spy; - spy = spyOn(this["class"], 'updatePipelineUrls').and.stub(); - this["class"].getCIStatus(false); - this.ciStatusData.pipeline += 1; - this["class"].getCIStatus(false); - return expect(spy).toHaveBeenCalled(); - }); - it('should update the commit URL when the sha changes', function() { - var spy; - spy = spyOn(this["class"], 'updateCommitUrls').and.stub(); - this["class"].getCIStatus(false); - this.ciStatusData.sha = "9b50b99a"; - this["class"].getCIStatus(false); - return expect(spy).toHaveBeenCalled(); - }); - }); - }); -}).call(window); diff --git a/spec/javascripts/mini_pipeline_graph_dropdown_spec.js b/spec/javascripts/mini_pipeline_graph_dropdown_spec.js index e504d41d4d4..481b46c3ac6 100644 --- a/spec/javascripts/mini_pipeline_graph_dropdown_spec.js +++ b/spec/javascripts/mini_pipeline_graph_dropdown_spec.js @@ -3,70 +3,84 @@ import MiniPipelineGraph from '~/mini_pipeline_graph_dropdown'; import '~/flash'; -(() => { - describe('Mini Pipeline Graph Dropdown', () => { - preloadFixtures('static/mini_dropdown_graph.html.raw'); +describe('Mini Pipeline Graph Dropdown', () => { + preloadFixtures('static/mini_dropdown_graph.html.raw'); - beforeEach(() => { - loadFixtures('static/mini_dropdown_graph.html.raw'); - }); + beforeEach(() => { + loadFixtures('static/mini_dropdown_graph.html.raw'); + }); - describe('When is initialized', () => { - it('should initialize without errors when no options are given', () => { - const miniPipelineGraph = new MiniPipelineGraph(); + describe('When is initialized', () => { + it('should initialize without errors when no options are given', () => { + const miniPipelineGraph = new MiniPipelineGraph(); - expect(miniPipelineGraph.dropdownListSelector).toEqual('.js-builds-dropdown-container'); - }); + expect(miniPipelineGraph.dropdownListSelector).toEqual('.js-builds-dropdown-container'); + }); - it('should set the container as the given prop', () => { - const container = '.foo'; + it('should set the container as the given prop', () => { + const container = '.foo'; - const miniPipelineGraph = new MiniPipelineGraph({ container }); + const miniPipelineGraph = new MiniPipelineGraph({ container }); - expect(miniPipelineGraph.container).toEqual(container); - }); + expect(miniPipelineGraph.container).toEqual(container); }); + }); - describe('When dropdown is clicked', () => { - it('should call getBuildsList', () => { - const getBuildsListSpy = spyOn(MiniPipelineGraph.prototype, 'getBuildsList').and.callFake(function () {}); + describe('When dropdown is clicked', () => { + it('should call getBuildsList', () => { + const getBuildsListSpy = spyOn( + MiniPipelineGraph.prototype, + 'getBuildsList', + ).and.callFake(function () {}); - new MiniPipelineGraph({ container: '.js-builds-dropdown-tests' }).bindEvents(); + new MiniPipelineGraph({ container: '.js-builds-dropdown-tests' }).bindEvents(); - document.querySelector('.js-builds-dropdown-button').click(); + document.querySelector('.js-builds-dropdown-button').click(); - expect(getBuildsListSpy).toHaveBeenCalled(); - }); + expect(getBuildsListSpy).toHaveBeenCalled(); + }); - it('should make a request to the endpoint provided in the html', () => { - const ajaxSpy = spyOn($, 'ajax').and.callFake(function () {}); + it('should make a request to the endpoint provided in the html', () => { + const ajaxSpy = spyOn($, 'ajax').and.callFake(function () {}); - new MiniPipelineGraph({ container: '.js-builds-dropdown-tests' }).bindEvents(); + new MiniPipelineGraph({ container: '.js-builds-dropdown-tests' }).bindEvents(); - document.querySelector('.js-builds-dropdown-button').click(); - expect(ajaxSpy.calls.allArgs()[0][0].url).toEqual('foobar'); - }); + document.querySelector('.js-builds-dropdown-button').click(); + expect(ajaxSpy.calls.allArgs()[0][0].url).toEqual('foobar'); + }); - it('should not close when user uses cmd/ctrl + click', () => { - spyOn($, 'ajax').and.callFake(function (params) { - params.success({ - html: `<li> - <a class="mini-pipeline-graph-dropdown-item" href="#"> - <span class="ci-status-icon ci-status-icon-failed"></span> - <span class="ci-build-text">build</span> - </a> - <a class="ci-action-icon-wrapper js-ci-action-icon" href="#"></a> - </li>`, - }); + it('should not close when user uses cmd/ctrl + click', () => { + spyOn($, 'ajax').and.callFake(function (params) { + params.success({ + html: `<li> + <a class="mini-pipeline-graph-dropdown-item" href="#"> + <span class="ci-status-icon ci-status-icon-failed"></span> + <span class="ci-build-text">build</span> + </a> + <a class="ci-action-icon-wrapper js-ci-action-icon" href="#"></a> + </li>`, }); - new MiniPipelineGraph({ container: '.js-builds-dropdown-tests' }).bindEvents(); + }); + new MiniPipelineGraph({ container: '.js-builds-dropdown-tests' }).bindEvents(); - document.querySelector('.js-builds-dropdown-button').click(); + document.querySelector('.js-builds-dropdown-button').click(); - document.querySelector('a.mini-pipeline-graph-dropdown-item').click(); + document.querySelector('a.mini-pipeline-graph-dropdown-item').click(); - expect($('.js-builds-dropdown-list').is(':visible')).toEqual(true); - }); + expect($('.js-builds-dropdown-list').is(':visible')).toEqual(true); }); }); -})(); + + it('should close the dropdown when request returns an error', (done) => { + spyOn($, 'ajax').and.callFake(options => options.error()); + + new MiniPipelineGraph({ container: '.js-builds-dropdown-tests' }).bindEvents(); + + document.querySelector('.js-builds-dropdown-button').click(); + + setTimeout(() => { + expect($('.js-builds-dropdown-tests .dropdown').hasClass('open')).toEqual(false); + done(); + }, 0); + }); +}); diff --git a/spec/javascripts/monitoring/deployments_spec.js b/spec/javascripts/monitoring/deployments_spec.js new file mode 100644 index 00000000000..19bc11d0f24 --- /dev/null +++ b/spec/javascripts/monitoring/deployments_spec.js @@ -0,0 +1,133 @@ +import d3 from 'd3'; +import PrometheusGraph from '~/monitoring/prometheus_graph'; +import Deployments from '~/monitoring/deployments'; +import { prometheusMockData } from './prometheus_mock_data'; + +describe('Metrics deployments', () => { + const fixtureName = 'environments/metrics/metrics.html.raw'; + let deployment; + let prometheusGraph; + + const graphElement = () => document.querySelector('.prometheus-graph'); + + preloadFixtures(fixtureName); + + beforeEach((done) => { + // Setup the view + loadFixtures(fixtureName); + + d3.selectAll('.prometheus-graph') + .append('g') + .attr('class', 'graph-container'); + + prometheusGraph = new PrometheusGraph(); + deployment = new Deployments(1000, 500); + + spyOn(prometheusGraph, 'init'); + spyOn($, 'ajax').and.callFake(() => { + const d = $.Deferred(); + d.resolve({ + deployments: [{ + id: 1, + created_at: deployment.chartData[10].time, + sha: 'testing', + tag: false, + ref: { + name: 'testing', + }, + }, { + id: 2, + created_at: deployment.chartData[15].time, + sha: '', + tag: true, + ref: { + name: 'tag', + }, + }], + }); + + setTimeout(done); + + return d.promise(); + }); + + prometheusGraph.configureGraph(); + prometheusGraph.transformData(prometheusMockData.metrics); + + deployment.init(prometheusGraph.graphSpecificProperties.memory_values.data); + }); + + it('creates line on graph for deploment', () => { + expect( + graphElement().querySelectorAll('.deployment-line').length, + ).toBe(2); + }); + + it('creates hidden deploy boxes', () => { + expect( + graphElement().querySelectorAll('.prometheus-graph .js-deploy-info-box').length, + ).toBe(2); + }); + + it('hides the info boxes by default', () => { + expect( + graphElement().querySelectorAll('.prometheus-graph .js-deploy-info-box.hidden').length, + ).toBe(2); + }); + + it('shows sha short code when tag is false', () => { + expect( + graphElement().querySelector('.deploy-info-1-cpu_values .js-deploy-info-box').textContent.trim(), + ).toContain('testin'); + }); + + it('shows ref name when tag is true', () => { + expect( + graphElement().querySelector('.deploy-info-2-cpu_values .js-deploy-info-box').textContent.trim(), + ).toContain('tag'); + }); + + it('shows info box when moving mouse over line', () => { + deployment.mouseOverDeployInfo(deployment.data[0].xPos, 'cpu_values'); + + expect( + graphElement().querySelectorAll('.prometheus-graph .js-deploy-info-box.hidden').length, + ).toBe(1); + + expect( + graphElement().querySelector('.deploy-info-1-cpu_values .js-deploy-info-box.hidden'), + ).toBeNull(); + }); + + it('hides previously visible info box when moving mouse away', () => { + deployment.mouseOverDeployInfo(500, 'cpu_values'); + + expect( + graphElement().querySelectorAll('.prometheus-graph .js-deploy-info-box.hidden').length, + ).toBe(2); + + expect( + graphElement().querySelector('.deploy-info-1-cpu_values .js-deploy-info-box.hidden'), + ).not.toBeNull(); + }); + + describe('refText', () => { + it('returns shortened SHA', () => { + expect( + Deployments.refText({ + tag: false, + sha: '123456789', + }), + ).toBe('123456'); + }); + + it('returns tag name', () => { + expect( + Deployments.refText({ + tag: true, + ref: 'v1.0', + }), + ).toBe('v1.0'); + }); + }); +}); diff --git a/spec/javascripts/monitoring/prometheus_graph_spec.js b/spec/javascripts/monitoring/prometheus_graph_spec.js index a3c1c5e1b7c..25578bf1c6e 100644 --- a/spec/javascripts/monitoring/prometheus_graph_spec.js +++ b/spec/javascripts/monitoring/prometheus_graph_spec.js @@ -1,10 +1,9 @@ import 'jquery'; -import '~/lib/utils/common_utils'; import PrometheusGraph from '~/monitoring/prometheus_graph'; import { prometheusMockData } from './prometheus_mock_data'; describe('PrometheusGraph', () => { - const fixtureName = 'static/environments/metrics.html.raw'; + const fixtureName = 'environments/metrics/metrics.html.raw'; const prometheusGraphContainer = '.prometheus-graph'; const prometheusGraphContents = `${prometheusGraphContainer}[graph-type=cpu_values]`; @@ -12,6 +11,7 @@ describe('PrometheusGraph', () => { beforeEach(() => { loadFixtures(fixtureName); + $('.prometheus-container').data('has-metrics', 'true'); this.prometheusGraph = new PrometheusGraph(); const self = this; const fakeInit = (metricsResponse) => { @@ -37,9 +37,11 @@ describe('PrometheusGraph', () => { it('transforms the data', () => { this.prometheusGraph.init(prometheusMockData.metrics); - expect(this.prometheusGraph.data).toBeDefined(); - expect(this.prometheusGraph.data.cpu_values.length).toBe(121); - expect(this.prometheusGraph.data.memory_values.length).toBe(121); + Object.keys(this.prometheusGraph.graphSpecificProperties, (key) => { + const graphProps = this.prometheusGraph.graphSpecificProperties[key]; + expect(graphProps.data).toBeDefined(); + expect(graphProps.data.length).toBe(121); + }); }); it('creates two graphs', () => { @@ -68,8 +70,29 @@ describe('PrometheusGraph', () => { expect($prometheusGraphContents.find('.label-y-axis-line')).toBeDefined(); expect($prometheusGraphContents.find('.label-axis-text')).toBeDefined(); expect($prometheusGraphContents.find('.rect-axis-text')).toBeDefined(); - expect($axisLabelContainer.find('rect').length).toBe(2); + expect($axisLabelContainer.find('rect').length).toBe(3); expect($axisLabelContainer.find('text').length).toBe(4); }); }); }); + +describe('PrometheusGraphs UX states', () => { + const fixtureName = 'environments/metrics/metrics.html.raw'; + preloadFixtures(fixtureName); + + beforeEach(() => { + loadFixtures(fixtureName); + this.prometheusGraph = new PrometheusGraph(); + }); + + it('shows a specified state', () => { + this.prometheusGraph.state = '.js-getting-started'; + this.prometheusGraph.updateState(); + const $state = $('.js-getting-started'); + expect($state).toBeDefined(); + expect($('.state-title', $state)).toBeDefined(); + expect($('.state-svg', $state)).toBeDefined(); + expect($('.state-description', $state)).toBeDefined(); + expect($('.state-button', $state)).toBeDefined(); + }); +}); diff --git a/spec/javascripts/new_branch_spec.js b/spec/javascripts/new_branch_spec.js index 90a429beeca..c57f44dae17 100644 --- a/spec/javascripts/new_branch_spec.js +++ b/spec/javascripts/new_branch_spec.js @@ -1,7 +1,7 @@ /* eslint-disable space-before-function-paren, one-var, no-var, one-var-declaration-per-line, no-return-assign, quotes, max-len */ /* global NewBranchForm */ -require('~/new_branch_form'); +import '~/new_branch_form'; (function() { describe('Branch', function() { diff --git a/spec/javascripts/notebook/cells/code_spec.js b/spec/javascripts/notebook/cells/code_spec.js new file mode 100644 index 00000000000..0c432d73f67 --- /dev/null +++ b/spec/javascripts/notebook/cells/code_spec.js @@ -0,0 +1,55 @@ +import Vue from 'vue'; +import CodeComponent from '~/notebook/cells/code.vue'; + +const Component = Vue.extend(CodeComponent); + +describe('Code component', () => { + let vm; + let json; + + beforeEach(() => { + json = getJSONFixture('blob/notebook/basic.json'); + }); + + describe('without output', () => { + beforeEach((done) => { + vm = new Component({ + propsData: { + cell: json.cells[0], + }, + }); + vm.$mount(); + + setTimeout(() => { + done(); + }); + }); + + it('does not render output prompt', () => { + expect(vm.$el.querySelectorAll('.prompt').length).toBe(1); + }); + }); + + describe('with output', () => { + beforeEach((done) => { + vm = new Component({ + propsData: { + cell: json.cells[2], + }, + }); + vm.$mount(); + + setTimeout(() => { + done(); + }); + }); + + it('does not render output prompt', () => { + expect(vm.$el.querySelectorAll('.prompt').length).toBe(2); + }); + + it('renders output cell', () => { + expect(vm.$el.querySelector('.output')).toBeDefined(); + }); + }); +}); diff --git a/spec/javascripts/notebook/cells/markdown_spec.js b/spec/javascripts/notebook/cells/markdown_spec.js new file mode 100644 index 00000000000..38c976f38d8 --- /dev/null +++ b/spec/javascripts/notebook/cells/markdown_spec.js @@ -0,0 +1,41 @@ +import Vue from 'vue'; +import MarkdownComponent from '~/notebook/cells/markdown.vue'; + +const Component = Vue.extend(MarkdownComponent); + +describe('Markdown component', () => { + let vm; + let cell; + let json; + + beforeEach((done) => { + json = getJSONFixture('blob/notebook/basic.json'); + + cell = json.cells[1]; + + vm = new Component({ + propsData: { + cell, + }, + }); + vm.$mount(); + + setTimeout(() => { + done(); + }); + }); + + it('does not render promot', () => { + expect(vm.$el.querySelector('.prompt span')).toBeNull(); + }); + + it('does not render the markdown text', () => { + expect( + vm.$el.querySelector('.markdown').innerHTML.trim(), + ).not.toEqual(cell.source.join('')); + }); + + it('renders the markdown HTML', () => { + expect(vm.$el.querySelector('.markdown h1')).not.toBeNull(); + }); +}); diff --git a/spec/javascripts/notebook/cells/output/index_spec.js b/spec/javascripts/notebook/cells/output/index_spec.js new file mode 100644 index 00000000000..dbf79f85c7c --- /dev/null +++ b/spec/javascripts/notebook/cells/output/index_spec.js @@ -0,0 +1,126 @@ +import Vue from 'vue'; +import CodeComponent from '~/notebook/cells/output/index.vue'; + +const Component = Vue.extend(CodeComponent); + +describe('Output component', () => { + let vm; + let json; + + const createComponent = (output) => { + vm = new Component({ + propsData: { + output, + count: 1, + }, + }); + vm.$mount(); + }; + + beforeEach(() => { + json = getJSONFixture('blob/notebook/basic.json'); + }); + + describe('text output', () => { + beforeEach((done) => { + createComponent(json.cells[2].outputs[0]); + + setTimeout(() => { + done(); + }); + }); + + it('renders as plain text', () => { + expect(vm.$el.querySelector('pre')).not.toBeNull(); + }); + + it('renders promot', () => { + expect(vm.$el.querySelector('.prompt span')).not.toBeNull(); + }); + }); + + describe('image output', () => { + beforeEach((done) => { + createComponent(json.cells[3].outputs[0]); + + setTimeout(() => { + done(); + }); + }); + + it('renders as an image', () => { + expect(vm.$el.querySelector('img')).not.toBeNull(); + }); + + it('does not render the prompt', () => { + expect(vm.$el.querySelector('.prompt span')).toBeNull(); + }); + }); + + describe('html output', () => { + beforeEach((done) => { + createComponent(json.cells[4].outputs[0]); + + setTimeout(() => { + done(); + }); + }); + + it('renders raw HTML', () => { + expect(vm.$el.querySelector('p')).not.toBeNull(); + expect(vm.$el.textContent.trim()).toBe('test'); + }); + + it('does not render the prompt', () => { + expect(vm.$el.querySelector('.prompt span')).toBeNull(); + }); + }); + + describe('svg output', () => { + beforeEach((done) => { + createComponent(json.cells[5].outputs[0]); + + setTimeout(() => { + done(); + }); + }); + + it('renders as an svg', () => { + expect(vm.$el.querySelector('svg')).not.toBeNull(); + }); + + it('does not render the prompt', () => { + expect(vm.$el.querySelector('.prompt span')).toBeNull(); + }); + }); + + describe('default to plain text', () => { + beforeEach((done) => { + createComponent(json.cells[6].outputs[0]); + + setTimeout(() => { + done(); + }); + }); + + it('renders as plain text', () => { + expect(vm.$el.querySelector('pre')).not.toBeNull(); + expect(vm.$el.textContent.trim()).toContain('testing'); + }); + + it('renders promot', () => { + expect(vm.$el.querySelector('.prompt span')).not.toBeNull(); + }); + + it('renders as plain text when doesn\'t recognise other types', (done) => { + createComponent(json.cells[7].outputs[0]); + + setTimeout(() => { + expect(vm.$el.querySelector('pre')).not.toBeNull(); + expect(vm.$el.textContent.trim()).toContain('testing'); + + done(); + }); + }); + }); +}); diff --git a/spec/javascripts/notebook/cells/prompt_spec.js b/spec/javascripts/notebook/cells/prompt_spec.js new file mode 100644 index 00000000000..207fa433a59 --- /dev/null +++ b/spec/javascripts/notebook/cells/prompt_spec.js @@ -0,0 +1,56 @@ +import Vue from 'vue'; +import PromptComponent from '~/notebook/cells/prompt.vue'; + +const Component = Vue.extend(PromptComponent); + +describe('Prompt component', () => { + let vm; + + describe('input', () => { + beforeEach((done) => { + vm = new Component({ + propsData: { + type: 'In', + count: 1, + }, + }); + vm.$mount(); + + setTimeout(() => { + done(); + }); + }); + + it('renders in label', () => { + expect(vm.$el.textContent.trim()).toContain('In'); + }); + + it('renders count', () => { + expect(vm.$el.textContent.trim()).toContain('1'); + }); + }); + + describe('output', () => { + beforeEach((done) => { + vm = new Component({ + propsData: { + type: 'Out', + count: 1, + }, + }); + vm.$mount(); + + setTimeout(() => { + done(); + }); + }); + + it('renders in label', () => { + expect(vm.$el.textContent.trim()).toContain('Out'); + }); + + it('renders count', () => { + expect(vm.$el.textContent.trim()).toContain('1'); + }); + }); +}); diff --git a/spec/javascripts/notebook/index_spec.js b/spec/javascripts/notebook/index_spec.js new file mode 100644 index 00000000000..bd63ab35426 --- /dev/null +++ b/spec/javascripts/notebook/index_spec.js @@ -0,0 +1,98 @@ +import Vue from 'vue'; +import Notebook from '~/notebook/index.vue'; + +const Component = Vue.extend(Notebook); + +describe('Notebook component', () => { + let vm; + let json; + let jsonWithWorksheet; + + beforeEach(() => { + json = getJSONFixture('blob/notebook/basic.json'); + jsonWithWorksheet = getJSONFixture('blob/notebook/worksheets.json'); + }); + + describe('without JSON', () => { + beforeEach((done) => { + vm = new Component({ + propsData: { + notebook: {}, + }, + }); + vm.$mount(); + + setTimeout(() => { + done(); + }); + }); + + it('does not render', () => { + expect(vm.$el.tagName).toBeUndefined(); + }); + }); + + describe('with JSON', () => { + beforeEach((done) => { + vm = new Component({ + propsData: { + notebook: json, + codeCssClass: 'js-code-class', + }, + }); + vm.$mount(); + + setTimeout(() => { + done(); + }); + }); + + it('renders cells', () => { + expect(vm.$el.querySelectorAll('.cell').length).toBe(json.cells.length); + }); + + it('renders markdown cell', () => { + expect(vm.$el.querySelector('.markdown')).not.toBeNull(); + }); + + it('renders code cell', () => { + expect(vm.$el.querySelector('pre')).not.toBeNull(); + }); + + it('add code class to code blocks', () => { + expect(vm.$el.querySelector('.js-code-class')).not.toBeNull(); + }); + }); + + describe('with worksheets', () => { + beforeEach((done) => { + vm = new Component({ + propsData: { + notebook: jsonWithWorksheet, + codeCssClass: 'js-code-class', + }, + }); + vm.$mount(); + + setTimeout(() => { + done(); + }); + }); + + it('renders cells', () => { + expect(vm.$el.querySelectorAll('.cell').length).toBe(jsonWithWorksheet.worksheets[0].cells.length); + }); + + it('renders markdown cell', () => { + expect(vm.$el.querySelector('.markdown')).not.toBeNull(); + }); + + it('renders code cell', () => { + expect(vm.$el.querySelector('pre')).not.toBeNull(); + }); + + it('add code class to code blocks', () => { + expect(vm.$el.querySelector('.js-code-class')).not.toBeNull(); + }); + }); +}); diff --git a/spec/javascripts/notebook/lib/highlight_spec.js b/spec/javascripts/notebook/lib/highlight_spec.js new file mode 100644 index 00000000000..d71c5718858 --- /dev/null +++ b/spec/javascripts/notebook/lib/highlight_spec.js @@ -0,0 +1,15 @@ +import Prism from '~/notebook/lib/highlight'; + +describe('Highlight library', () => { + it('imports python language', () => { + expect(Prism.languages.python).toBeDefined(); + }); + + it('uses custom CSS classes', () => { + const el = document.createElement('div'); + el.innerHTML = Prism.highlight('console.log("a");', Prism.languages.javascript); + + expect(el.querySelector('.s')).not.toBeNull(); + expect(el.querySelector('.nf')).not.toBeNull(); + }); +}); diff --git a/spec/javascripts/notes_spec.js b/spec/javascripts/notes_spec.js index d81a5bbb6a5..025f08ee332 100644 --- a/spec/javascripts/notes_spec.js +++ b/spec/javascripts/notes_spec.js @@ -1,10 +1,12 @@ /* eslint-disable space-before-function-paren, no-unused-expressions, no-var, object-shorthand, comma-dangle, max-len */ /* global Notes */ -require('~/notes'); -require('vendor/autosize'); -require('~/gl_form'); -require('~/lib/utils/text_utility'); +import 'vendor/autosize'; +import '~/gl_form'; +import '~/lib/utils/text_utility'; +import '~/render_gfm'; +import '~/render_math'; +import '~/notes'; (function() { window.gon || (window.gon = {}); @@ -12,6 +14,7 @@ require('~/lib/utils/text_utility'); gl.utils = gl.utils || {}; describe('Notes', function() { + const FLASH_TYPE_ALERT = 'alert'; var commentsTemplate = 'issues/issue_with_comment.html.raw'; preloadFixtures(commentsTemplate); @@ -24,10 +27,10 @@ require('~/lib/utils/text_utility'); describe('task lists', function() { beforeEach(function() { - $('form').on('submit', function(e) { + $('.js-comment-button').on('click', function(e) { e.preventDefault(); }); - this.notes = new Notes(); + this.notes = new Notes('', []); }); it('modifies the Markdown field', function() { @@ -49,7 +52,7 @@ require('~/lib/utils/text_utility'); var textarea = '.js-note-text'; beforeEach(function() { - this.notes = new Notes(); + this.notes = new Notes('', []); this.autoSizeSpy = spyOnEvent($(textarea), 'autosize:update'); spyOn(this.notes, 'renderNote').and.stub(); @@ -58,9 +61,12 @@ require('~/lib/utils/text_utility'); reset: function() {} }); - $('form').on('submit', function(e) { + $('.js-comment-button').on('click', (e) => { + const $form = $(this); e.preventDefault(); - $('.js-main-target-form').trigger('ajax:success'); + this.notes.addNote($form); + this.notes.reenableTargetFormSubmitButton(e); + this.notes.resetMainTargetForm(e); }); }); @@ -72,5 +78,540 @@ require('~/lib/utils/text_utility'); expect(this.autoSizeSpy).toHaveBeenTriggered(); }); }); + + describe('updateNote', () => { + let sampleComment; + let noteEntity; + let $form; + let $notesContainer; + + beforeEach(() => { + this.notes = new Notes('', []); + window.gon.current_username = 'root'; + window.gon.current_user_fullname = 'Administrator'; + sampleComment = 'foo'; + noteEntity = { + id: 1234, + html: `<li class="note note-row-1234 timeline-entry" id="note_1234"> + <div class="note-text">${sampleComment}</div> + </li>`, + note: sampleComment, + valid: true + }; + $form = $('form.js-main-target-form'); + $notesContainer = $('ul.main-notes-list'); + $form.find('textarea.js-note-text').val(sampleComment); + }); + + it('updates note and resets edit form', () => { + const deferred = $.Deferred(); + spyOn($, 'ajax').and.returnValue(deferred.promise()); + spyOn(this.notes, 'revertNoteEditForm'); + + $('.js-comment-button').click(); + deferred.resolve(noteEntity); + + const $targetNote = $notesContainer.find(`#note_${noteEntity.id}`); + const updatedNote = Object.assign({}, noteEntity); + updatedNote.note = 'bar'; + this.notes.updateNote(updatedNote, $targetNote); + + expect(this.notes.revertNoteEditForm).toHaveBeenCalledWith($targetNote); + }); + }); + + describe('renderNote', () => { + let notes; + let note; + let $notesList; + + beforeEach(() => { + note = { + id: 1, + discussion_html: null, + valid: true, + note: 'heya', + html: '<div>heya</div>', + }; + $notesList = jasmine.createSpyObj('$notesList', [ + 'find', + 'append', + ]); + + notes = jasmine.createSpyObj('notes', [ + 'setupNewNote', + 'refresh', + 'collapseLongCommitList', + 'updateNotesCount', + 'putConflictEditWarningInPlace' + ]); + notes.taskList = jasmine.createSpyObj('tasklist', ['init']); + notes.note_ids = []; + notes.updatedNotesTrackingMap = {}; + + spyOn(gl.utils, 'localTimeAgo'); + spyOn(Notes, 'isNewNote').and.callThrough(); + spyOn(Notes, 'isUpdatedNote').and.callThrough(); + spyOn(Notes, 'animateAppendNote').and.callThrough(); + spyOn(Notes, 'animateUpdateNote').and.callThrough(); + }); + + describe('when adding note', () => { + it('should call .animateAppendNote', () => { + Notes.isNewNote.and.returnValue(true); + Notes.prototype.renderNote.call(notes, note, null, $notesList); + + expect(Notes.animateAppendNote).toHaveBeenCalledWith(note.html, $notesList); + }); + }); + + describe('when note was edited', () => { + it('should call .animateUpdateNote', () => { + Notes.isNewNote.and.returnValue(false); + Notes.isUpdatedNote.and.returnValue(true); + const $note = $('<div>'); + $notesList.find.and.returnValue($note); + Notes.prototype.renderNote.call(notes, note, null, $notesList); + + expect(Notes.animateUpdateNote).toHaveBeenCalledWith(note.html, $note); + }); + + describe('while editing', () => { + it('should update textarea if nothing has been touched', () => { + Notes.isNewNote.and.returnValue(false); + Notes.isUpdatedNote.and.returnValue(true); + const $note = $(`<div class="is-editing"> + <div class="original-note-content">initial</div> + <textarea class="js-note-text">initial</textarea> + </div>`); + $notesList.find.and.returnValue($note); + Notes.prototype.renderNote.call(notes, note, null, $notesList); + + expect($note.find('.js-note-text').val()).toEqual(note.note); + }); + + it('should call .putConflictEditWarningInPlace', () => { + Notes.isNewNote.and.returnValue(false); + Notes.isUpdatedNote.and.returnValue(true); + const $note = $(`<div class="is-editing"> + <div class="original-note-content">initial</div> + <textarea class="js-note-text">different</textarea> + </div>`); + $notesList.find.and.returnValue($note); + Notes.prototype.renderNote.call(notes, note, null, $notesList); + + expect(notes.putConflictEditWarningInPlace).toHaveBeenCalledWith(note, $note); + }); + }); + }); + }); + + describe('isUpdatedNote', () => { + it('should consider same note text as the same', () => { + const result = Notes.isUpdatedNote( + { + note: 'initial' + }, + $(`<div> + <div class="original-note-content">initial</div> + </div>`) + ); + + expect(result).toEqual(false); + }); + + it('should consider same note with trailing newline as the same', () => { + const result = Notes.isUpdatedNote( + { + note: 'initial\n' + }, + $(`<div> + <div class="original-note-content">initial\n</div> + </div>`) + ); + + expect(result).toEqual(false); + }); + + it('should consider different notes as different', () => { + const result = Notes.isUpdatedNote( + { + note: 'foo' + }, + $(`<div> + <div class="original-note-content">bar</div> + </div>`) + ); + + expect(result).toEqual(true); + }); + }); + + describe('renderDiscussionNote', () => { + let discussionContainer; + let note; + let notes; + let $form; + let row; + + beforeEach(() => { + note = { + html: '<li></li>', + discussion_html: '<div></div>', + discussion_id: 1, + discussion_resolvable: false, + diff_discussion_html: false, + }; + $form = jasmine.createSpyObj('$form', ['closest', 'find']); + row = jasmine.createSpyObj('row', ['prevAll', 'first', 'find']); + + notes = jasmine.createSpyObj('notes', [ + 'isParallelView', + 'updateNotesCount', + ]); + notes.note_ids = []; + + spyOn(gl.utils, 'localTimeAgo'); + spyOn(Notes, 'isNewNote'); + spyOn(Notes, 'animateAppendNote'); + Notes.isNewNote.and.returnValue(true); + notes.isParallelView.and.returnValue(false); + row.prevAll.and.returnValue(row); + row.first.and.returnValue(row); + row.find.and.returnValue(row); + }); + + describe('Discussion root note', () => { + let body; + + beforeEach(() => { + body = jasmine.createSpyObj('body', ['attr']); + discussionContainer = { length: 0 }; + + $form.closest.and.returnValues(row, $form); + $form.find.and.returnValues(discussionContainer); + body.attr.and.returnValue(''); + + Notes.prototype.renderDiscussionNote.call(notes, note, $form); + }); + + it('should call Notes.animateAppendNote', () => { + expect(Notes.animateAppendNote).toHaveBeenCalledWith(note.discussion_html, $('.main-notes-list')); + }); + }); + + describe('Discussion sub note', () => { + beforeEach(() => { + discussionContainer = { length: 1 }; + + $form.closest.and.returnValues(row, $form); + $form.find.and.returnValues(discussionContainer); + + Notes.prototype.renderDiscussionNote.call(notes, note, $form); + }); + + it('should call Notes.animateAppendNote', () => { + expect(Notes.animateAppendNote).toHaveBeenCalledWith(note.html, discussionContainer); + }); + }); + }); + + describe('animateAppendNote', () => { + let noteHTML; + let $notesList; + let $resultantNote; + + beforeEach(() => { + noteHTML = '<div></div>'; + $notesList = jasmine.createSpyObj('$notesList', ['append']); + + $resultantNote = Notes.animateAppendNote(noteHTML, $notesList); + }); + + it('should have `fade-in-full` class', () => { + expect($resultantNote.hasClass('fade-in-full')).toEqual(true); + }); + + it('should append note to the notes list', () => { + expect($notesList.append).toHaveBeenCalledWith($resultantNote); + }); + }); + + describe('animateUpdateNote', () => { + let noteHTML; + let $note; + let $updatedNote; + + beforeEach(() => { + noteHTML = '<div></div>'; + $note = jasmine.createSpyObj('$note', [ + 'replaceWith' + ]); + + $updatedNote = Notes.animateUpdateNote(noteHTML, $note); + }); + + it('should have `fade-in` class', () => { + expect($updatedNote.hasClass('fade-in')).toEqual(true); + }); + + it('should call replaceWith on $note', () => { + expect($note.replaceWith).toHaveBeenCalledWith($updatedNote); + }); + }); + + describe('postComment & updateComment', () => { + const sampleComment = 'foo'; + const updatedComment = 'bar'; + const note = { + id: 1234, + html: `<li class="note note-row-1234 timeline-entry" id="note_1234"> + <div class="note-text">${sampleComment}</div> + </li>`, + note: sampleComment, + valid: true + }; + let $form; + let $notesContainer; + + beforeEach(() => { + this.notes = new Notes('', []); + window.gon.current_username = 'root'; + window.gon.current_user_fullname = 'Administrator'; + $form = $('form.js-main-target-form'); + $notesContainer = $('ul.main-notes-list'); + $form.find('textarea.js-note-text').val(sampleComment); + }); + + it('should show placeholder note while new comment is being posted', () => { + $('.js-comment-button').click(); + expect($notesContainer.find('.note.being-posted').length > 0).toEqual(true); + }); + + it('should remove placeholder note when new comment is done posting', () => { + const deferred = $.Deferred(); + spyOn($, 'ajax').and.returnValue(deferred.promise()); + $('.js-comment-button').click(); + + deferred.resolve(note); + expect($notesContainer.find('.note.being-posted').length).toEqual(0); + }); + + it('should show actual note element when new comment is done posting', () => { + const deferred = $.Deferred(); + spyOn($, 'ajax').and.returnValue(deferred.promise()); + $('.js-comment-button').click(); + + deferred.resolve(note); + expect($notesContainer.find(`#note_${note.id}`).length > 0).toEqual(true); + }); + + it('should reset Form when new comment is done posting', () => { + const deferred = $.Deferred(); + spyOn($, 'ajax').and.returnValue(deferred.promise()); + $('.js-comment-button').click(); + + deferred.resolve(note); + expect($form.find('textarea.js-note-text').val()).toEqual(''); + }); + + it('should show flash error message when new comment failed to be posted', () => { + const deferred = $.Deferred(); + spyOn($, 'ajax').and.returnValue(deferred.promise()); + $('.js-comment-button').click(); + + deferred.reject(); + expect($notesContainer.parent().find('.flash-container .flash-text').is(':visible')).toEqual(true); + }); + + it('should show flash error message when comment failed to be updated', () => { + const deferred = $.Deferred(); + spyOn($, 'ajax').and.returnValue(deferred.promise()); + $('.js-comment-button').click(); + + deferred.resolve(note); + const $noteEl = $notesContainer.find(`#note_${note.id}`); + $noteEl.find('.js-note-edit').click(); + $noteEl.find('textarea.js-note-text').val(updatedComment); + $noteEl.find('.js-comment-save-button').click(); + + deferred.reject(); + const $updatedNoteEl = $notesContainer.find(`#note_${note.id}`); + expect($updatedNoteEl.hasClass('.being-posted')).toEqual(false); // Remove being-posted visuals + expect($updatedNoteEl.find('.note-text').text().trim()).toEqual(sampleComment); // See if comment reverted back to original + expect($('.flash-container').is(':visible')).toEqual(true); // Flash error message shown + }); + }); + + describe('getFormData', () => { + it('should return form metadata object from form reference', () => { + this.notes = new Notes('', []); + + const $form = $('form'); + const sampleComment = 'foobar'; + $form.find('textarea.js-note-text').val(sampleComment); + const { formData, formContent, formAction } = this.notes.getFormData($form); + + expect(formData.indexOf(sampleComment) > -1).toBe(true); + expect(formContent).toEqual(sampleComment); + expect(formAction).toEqual($form.attr('action')); + }); + }); + + describe('hasSlashCommands', () => { + beforeEach(() => { + this.notes = new Notes('', []); + }); + + it('should return true when comment begins with a slash command', () => { + const sampleComment = '/wip\n/milestone %1.0\n/merge\n/unassign Merging this'; + const hasSlashCommands = this.notes.hasSlashCommands(sampleComment); + + expect(hasSlashCommands).toBeTruthy(); + }); + + it('should return false when comment does NOT begin with a slash command', () => { + const sampleComment = 'Hey, /unassign Merging this'; + const hasSlashCommands = this.notes.hasSlashCommands(sampleComment); + + expect(hasSlashCommands).toBeFalsy(); + }); + + it('should return false when comment does NOT have any slash commands', () => { + const sampleComment = 'Looking good, Awesome!'; + const hasSlashCommands = this.notes.hasSlashCommands(sampleComment); + + expect(hasSlashCommands).toBeFalsy(); + }); + }); + + describe('stripSlashCommands', () => { + it('should strip slash commands from the comment which begins with a slash command', () => { + this.notes = new Notes(); + const sampleComment = '/wip\n/milestone %1.0\n/merge\n/unassign Merging this'; + const stripedComment = this.notes.stripSlashCommands(sampleComment); + + expect(stripedComment).toBe(''); + }); + + it('should strip slash commands from the comment but leaves plain comment if it is present', () => { + this.notes = new Notes(); + const sampleComment = '/wip\n/milestone %1.0\n/merge\n/unassign\nMerging this'; + const stripedComment = this.notes.stripSlashCommands(sampleComment); + + expect(stripedComment).toBe('Merging this'); + }); + + it('should NOT strip string that has slashes within', () => { + this.notes = new Notes(); + const sampleComment = 'http://127.0.0.1:3000/root/gitlab-shell/issues/1'; + const stripedComment = this.notes.stripSlashCommands(sampleComment); + + expect(stripedComment).toBe(sampleComment); + }); + }); + + describe('createPlaceholderNote', () => { + const sampleComment = 'foobar'; + const uniqueId = 'b1234-a4567'; + const currentUsername = 'root'; + const currentUserFullname = 'Administrator'; + + beforeEach(() => { + this.notes = new Notes('', []); + spyOn(_, 'escape').and.callFake((comment) => { + const escapedString = comment.replace(/["&'<>]/g, (a) => { + const escapedToken = { + '&': '&', + '<': '<', + '>': '>', + '"': '"', + "'": ''', + '`': '`' + }[a]; + + return escapedToken; + }); + + return escapedString; + }); + }); + + it('should return constructed placeholder element for regular note based on form contents', () => { + const $tempNote = this.notes.createPlaceholderNote({ + formContent: sampleComment, + uniqueId, + isDiscussionNote: false, + currentUsername, + currentUserFullname + }); + const $tempNoteHeader = $tempNote.find('.note-header'); + + expect($tempNote.prop('nodeName')).toEqual('LI'); + expect($tempNote.attr('id')).toEqual(uniqueId); + $tempNote.find('.timeline-icon > a, .note-header-info > a').each(function() { + expect($(this).attr('href')).toEqual(`/${currentUsername}`); + }); + expect($tempNote.find('.timeline-content').hasClass('discussion')).toBeFalsy(); + expect($tempNoteHeader.find('.hidden-xs').text().trim()).toEqual(currentUserFullname); + expect($tempNoteHeader.find('.note-headline-light').text().trim()).toEqual(`@${currentUsername}`); + expect($tempNote.find('.note-body .note-text p').text().trim()).toEqual(sampleComment); + }); + + it('should escape HTML characters from note based on form contents', () => { + const commentWithHtml = '<script>alert("Boom!");</script>'; + const $tempNote = this.notes.createPlaceholderNote({ + formContent: commentWithHtml, + uniqueId, + isDiscussionNote: false, + currentUsername, + currentUserFullname + }); + + expect(_.escape).toHaveBeenCalledWith(commentWithHtml); + expect($tempNote.find('.note-body .note-text p').html()).toEqual('<script>alert("Boom!");</script>'); + }); + + it('should return constructed placeholder element for discussion note based on form contents', () => { + const $tempNote = this.notes.createPlaceholderNote({ + formContent: sampleComment, + uniqueId, + isDiscussionNote: true, + currentUsername, + currentUserFullname + }); + + expect($tempNote.prop('nodeName')).toEqual('LI'); + expect($tempNote.find('.timeline-content').hasClass('discussion')).toBeTruthy(); + }); + }); + + describe('appendFlash', () => { + beforeEach(() => { + this.notes = new Notes(); + }); + + it('shows a flash message', () => { + this.notes.addFlash('Error message', FLASH_TYPE_ALERT, this.notes.parentTimeline); + + expect(document.querySelectorAll('.flash-alert').length).toBe(1); + }); + }); + + describe('clearFlash', () => { + beforeEach(() => { + $(document).off('ajax:success'); + this.notes = new Notes(); + }); + + it('removes all the associated flash messages', () => { + this.notes.addFlash('Error message 1', FLASH_TYPE_ALERT, this.notes.parentTimeline); + this.notes.addFlash('Error message 2', FLASH_TYPE_ALERT, this.notes.parentTimeline); + + this.notes.clearFlash(); + + expect(document.querySelectorAll('.flash-alert').length).toBe(0); + }); + }); }); }).call(window); diff --git a/spec/javascripts/pager_spec.js b/spec/javascripts/pager_spec.js index d966226909b..1d3e1263371 100644 --- a/spec/javascripts/pager_spec.js +++ b/spec/javascripts/pager_spec.js @@ -1,6 +1,6 @@ /* global fixture */ -require('~/pager'); +import '~/pager'; describe('pager', () => { const Pager = window.Pager; diff --git a/spec/javascripts/pdf/index_spec.js b/spec/javascripts/pdf/index_spec.js new file mode 100644 index 00000000000..f661fae5fe2 --- /dev/null +++ b/spec/javascripts/pdf/index_spec.js @@ -0,0 +1,61 @@ +/* eslint-disable import/no-unresolved */ + +import Vue from 'vue'; +import { PDFJS } from 'pdfjs-dist'; +import workerSrc from 'vendor/pdf.worker'; + +import PDFLab from '~/pdf/index.vue'; +import pdf from '../fixtures/blob/pdf/test.pdf'; + +PDFJS.workerSrc = workerSrc; +const Component = Vue.extend(PDFLab); + +describe('PDF component', () => { + let vm; + + const checkLoaded = (done) => { + if (vm.loading) { + setTimeout(() => { + checkLoaded(done); + }, 100); + } else { + done(); + } + }; + + describe('without PDF data', () => { + beforeEach((done) => { + vm = new Component({ + propsData: { + pdf: '', + }, + }); + + vm.$mount(); + + checkLoaded(done); + }); + + it('does not render', () => { + expect(vm.$el.tagName).toBeUndefined(); + }); + }); + + describe('with PDF data', () => { + beforeEach((done) => { + vm = new Component({ + propsData: { + pdf, + }, + }); + + vm.$mount(); + + checkLoaded(done); + }); + + it('renders pdf component', () => { + expect(vm.$el.tagName).toBeDefined(); + }); + }); +}); diff --git a/spec/javascripts/pdf/page_spec.js b/spec/javascripts/pdf/page_spec.js new file mode 100644 index 00000000000..ac76ebbfbe6 --- /dev/null +++ b/spec/javascripts/pdf/page_spec.js @@ -0,0 +1,57 @@ +/* eslint-disable import/no-unresolved */ + +import Vue from 'vue'; +import pdfjsLib from 'pdfjs-dist'; +import workerSrc from 'vendor/pdf.worker'; + +import PageComponent from '~/pdf/page/index.vue'; +import testPDF from '../fixtures/blob/pdf/test.pdf'; + +const Component = Vue.extend(PageComponent); + +describe('Page component', () => { + let vm; + let testPage; + pdfjsLib.PDFJS.workerSrc = workerSrc; + + const checkRendered = (done) => { + if (vm.rendering) { + setTimeout(() => { + checkRendered(done); + }, 100); + } else { + done(); + } + }; + + beforeEach((done) => { + pdfjsLib.getDocument(testPDF) + .then(pdf => pdf.getPage(1)) + .then((page) => { + testPage = page; + done(); + }) + .catch((error) => { + console.error(error); + }); + }); + + describe('render', () => { + beforeEach((done) => { + vm = new Component({ + propsData: { + page: testPage, + number: 1, + }, + }); + + vm.$mount(); + + checkRendered(done); + }); + + it('renders first page', () => { + expect(vm.$el.tagName).toBeDefined(); + }); + }); +}); diff --git a/spec/javascripts/pipeline_schedules/interval_pattern_input_spec.js b/spec/javascripts/pipeline_schedules/interval_pattern_input_spec.js new file mode 100644 index 00000000000..845b371d90c --- /dev/null +++ b/spec/javascripts/pipeline_schedules/interval_pattern_input_spec.js @@ -0,0 +1,175 @@ +import Vue from 'vue'; +import IntervalPatternInput from '~/pipeline_schedules/components/interval_pattern_input'; + +const IntervalPatternInputComponent = Vue.extend(IntervalPatternInput); +const inputNameAttribute = 'schedule[cron]'; + +const cronIntervalPresets = { + everyDay: '0 4 * * *', + everyWeek: '0 4 * * 0', + everyMonth: '0 4 1 * *', +}; + +window.gl = window.gl || {}; + +window.gl.pipelineScheduleFieldErrors = { + updateFormValidityState: () => {}, +}; + +describe('Interval Pattern Input Component', function () { + describe('when prop initialCronInterval is passed (edit)', function () { + describe('when prop initialCronInterval is custom', function () { + beforeEach(function () { + this.initialCronInterval = '1 2 3 4 5'; + this.intervalPatternComponent = new IntervalPatternInputComponent({ + propsData: { + initialCronInterval: this.initialCronInterval, + }, + }).$mount(); + }); + + it('is initialized as a Vue component', function () { + expect(this.intervalPatternComponent).toBeDefined(); + }); + + it('prop initialCronInterval is set', function () { + expect(this.intervalPatternComponent.initialCronInterval).toBe(this.initialCronInterval); + }); + + it('sets isEditable to true', function (done) { + Vue.nextTick(() => { + expect(this.intervalPatternComponent.isEditable).toBe(true); + done(); + }); + }); + }); + + describe('when prop initialCronInterval is preset', function () { + beforeEach(function () { + this.intervalPatternComponent = new IntervalPatternInputComponent({ + propsData: { + inputNameAttribute, + initialCronInterval: '0 4 * * *', + }, + }).$mount(); + }); + + it('is initialized as a Vue component', function () { + expect(this.intervalPatternComponent).toBeDefined(); + }); + + it('sets isEditable to false', function (done) { + Vue.nextTick(() => { + expect(this.intervalPatternComponent.isEditable).toBe(false); + done(); + }); + }); + }); + }); + + describe('when prop initialCronInterval is not passed (new)', function () { + beforeEach(function () { + this.intervalPatternComponent = new IntervalPatternInputComponent({ + propsData: { + inputNameAttribute, + }, + }).$mount(); + }); + + it('is initialized as a Vue component', function () { + expect(this.intervalPatternComponent).toBeDefined(); + }); + + it('prop initialCronInterval is set', function () { + const defaultInitialCronInterval = ''; + expect(this.intervalPatternComponent.initialCronInterval).toBe(defaultInitialCronInterval); + }); + + it('sets isEditable to true', function (done) { + Vue.nextTick(() => { + expect(this.intervalPatternComponent.isEditable).toBe(true); + done(); + }); + }); + }); + + describe('User Actions', function () { + beforeEach(function () { + // For an unknown reason, Phantom.js doesn't trigger click events + // on radio buttons in a way Vue can register. So, we have to mount + // to a fixture. + setFixtures('<div id="my-mount"></div>'); + + this.initialCronInterval = '1 2 3 4 5'; + this.intervalPatternComponent = new IntervalPatternInputComponent({ + propsData: { + initialCronInterval: this.initialCronInterval, + }, + }).$mount('#my-mount'); + }); + + it('cronInterval is updated when everyday preset interval is selected', function (done) { + this.intervalPatternComponent.$el.querySelector('#every-day').click(); + + Vue.nextTick(() => { + expect(this.intervalPatternComponent.cronInterval).toBe(cronIntervalPresets.everyDay); + expect(this.intervalPatternComponent.$el.querySelector('.cron-interval-input').value).toBe(cronIntervalPresets.everyDay); + done(); + }); + }); + + it('cronInterval is updated when everyweek preset interval is selected', function (done) { + this.intervalPatternComponent.$el.querySelector('#every-week').click(); + + Vue.nextTick(() => { + expect(this.intervalPatternComponent.cronInterval).toBe(cronIntervalPresets.everyWeek); + expect(this.intervalPatternComponent.$el.querySelector('.cron-interval-input').value).toBe(cronIntervalPresets.everyWeek); + + done(); + }); + }); + + it('cronInterval is updated when everymonth preset interval is selected', function (done) { + this.intervalPatternComponent.$el.querySelector('#every-month').click(); + + Vue.nextTick(() => { + expect(this.intervalPatternComponent.cronInterval).toBe(cronIntervalPresets.everyMonth); + expect(this.intervalPatternComponent.$el.querySelector('.cron-interval-input').value).toBe(cronIntervalPresets.everyMonth); + done(); + }); + }); + + it('only a space is added to cronInterval (trimmed later) when custom radio is selected', function (done) { + this.intervalPatternComponent.$el.querySelector('#every-month').click(); + this.intervalPatternComponent.$el.querySelector('#custom').click(); + + Vue.nextTick(() => { + const intervalWithSpaceAppended = `${cronIntervalPresets.everyMonth} `; + expect(this.intervalPatternComponent.cronInterval).toBe(intervalWithSpaceAppended); + expect(this.intervalPatternComponent.$el.querySelector('.cron-interval-input').value).toBe(intervalWithSpaceAppended); + done(); + }); + }); + + it('text input is disabled when preset interval is selected', function (done) { + this.intervalPatternComponent.$el.querySelector('#every-month').click(); + + Vue.nextTick(() => { + expect(this.intervalPatternComponent.isEditable).toBe(false); + expect(this.intervalPatternComponent.$el.querySelector('.cron-interval-input').disabled).toBe(true); + done(); + }); + }); + + it('text input is enabled when custom is selected', function (done) { + this.intervalPatternComponent.$el.querySelector('#every-month').click(); + this.intervalPatternComponent.$el.querySelector('#custom').click(); + + Vue.nextTick(() => { + expect(this.intervalPatternComponent.isEditable).toBe(true); + expect(this.intervalPatternComponent.$el.querySelector('.cron-interval-input').disabled).toBe(false); + done(); + }); + }); + }); +}); diff --git a/spec/javascripts/pipeline_schedules/pipeline_schedule_callout_spec.js b/spec/javascripts/pipeline_schedules/pipeline_schedule_callout_spec.js new file mode 100644 index 00000000000..6120d224ac0 --- /dev/null +++ b/spec/javascripts/pipeline_schedules/pipeline_schedule_callout_spec.js @@ -0,0 +1,106 @@ +import Vue from 'vue'; +import Cookies from 'js-cookie'; +import PipelineSchedulesCallout from '~/pipeline_schedules/components/pipeline_schedules_callout'; + +const PipelineSchedulesCalloutComponent = Vue.extend(PipelineSchedulesCallout); +const cookieKey = 'pipeline_schedules_callout_dismissed'; +const docsUrl = 'help/ci/scheduled_pipelines'; + +describe('Pipeline Schedule Callout', () => { + beforeEach(() => { + setFixtures(` + <div id='pipeline-schedules-callout' data-docs-url=${docsUrl}></div> + `); + }); + + describe('independent of cookies', () => { + beforeEach(() => { + this.calloutComponent = new PipelineSchedulesCalloutComponent().$mount(); + }); + + it('the component can be initialized', () => { + expect(this.calloutComponent).toBeDefined(); + }); + + it('correctly sets illustrationSvg', () => { + expect(this.calloutComponent.illustrationSvg).toContain('<svg'); + }); + + it('correctly sets docsUrl', () => { + expect(this.calloutComponent.docsUrl).toContain(docsUrl); + }); + }); + + describe(`when ${cookieKey} cookie is set`, () => { + beforeEach(() => { + Cookies.set(cookieKey, true); + this.calloutComponent = new PipelineSchedulesCalloutComponent().$mount(); + }); + + it('correctly sets calloutDismissed to true', () => { + expect(this.calloutComponent.calloutDismissed).toBe(true); + }); + + it('does not render the callout', () => { + expect(this.calloutComponent.$el.childNodes.length).toBe(0); + }); + }); + + describe('when cookie is not set', () => { + beforeEach(() => { + Cookies.remove(cookieKey); + this.calloutComponent = new PipelineSchedulesCalloutComponent().$mount(); + }); + + it('correctly sets calloutDismissed to false', () => { + expect(this.calloutComponent.calloutDismissed).toBe(false); + }); + + it('renders the callout container', () => { + expect(this.calloutComponent.$el.querySelector('.bordered-box')).not.toBeNull(); + }); + + it('renders the callout svg', () => { + expect(this.calloutComponent.$el.outerHTML).toContain('<svg'); + }); + + it('renders the callout title', () => { + expect(this.calloutComponent.$el.outerHTML).toContain('Scheduling Pipelines'); + }); + + it('renders the callout text', () => { + expect(this.calloutComponent.$el.outerHTML).toContain('runs pipelines in the future'); + }); + + it('renders the documentation url', () => { + expect(this.calloutComponent.$el.outerHTML).toContain(docsUrl); + }); + + it('updates calloutDismissed when close button is clicked', (done) => { + this.calloutComponent.$el.querySelector('#dismiss-callout-btn').click(); + + Vue.nextTick(() => { + expect(this.calloutComponent.calloutDismissed).toBe(true); + done(); + }); + }); + + it('#dismissCallout updates calloutDismissed', (done) => { + this.calloutComponent.dismissCallout(); + + Vue.nextTick(() => { + expect(this.calloutComponent.calloutDismissed).toBe(true); + done(); + }); + }); + + it('is hidden when close button is clicked', (done) => { + this.calloutComponent.$el.querySelector('#dismiss-callout-btn').click(); + + Vue.nextTick(() => { + expect(this.calloutComponent.$el.childNodes.length).toBe(0); + done(); + }); + }); + }); +}); diff --git a/spec/javascripts/vue_pipelines_index/async_button_spec.js b/spec/javascripts/pipelines/async_button_spec.js index bc8e504c413..28c9c7ab282 100644 --- a/spec/javascripts/vue_pipelines_index/async_button_spec.js +++ b/spec/javascripts/pipelines/async_button_spec.js @@ -1,5 +1,5 @@ import Vue from 'vue'; -import asyncButtonComp from '~/vue_pipelines_index/components/async_button'; +import asyncButtonComp from '~/pipelines/components/async_button.vue'; describe('Pipelines Async Button', () => { let component; diff --git a/spec/javascripts/vue_pipelines_index/empty_state_spec.js b/spec/javascripts/pipelines/empty_state_spec.js index 733337168dc..bb47a28d9fe 100644 --- a/spec/javascripts/vue_pipelines_index/empty_state_spec.js +++ b/spec/javascripts/pipelines/empty_state_spec.js @@ -1,5 +1,5 @@ import Vue from 'vue'; -import emptyStateComp from '~/vue_pipelines_index/components/empty_state'; +import emptyStateComp from '~/pipelines/components/empty_state.vue'; describe('Pipelines Empty State', () => { let component; diff --git a/spec/javascripts/vue_pipelines_index/error_state_spec.js b/spec/javascripts/pipelines/error_state_spec.js index 524e018b1fa..f667d351f72 100644 --- a/spec/javascripts/vue_pipelines_index/error_state_spec.js +++ b/spec/javascripts/pipelines/error_state_spec.js @@ -1,5 +1,5 @@ import Vue from 'vue'; -import errorStateComp from '~/vue_pipelines_index/components/error_state'; +import errorStateComp from '~/pipelines/components/error_state.vue'; describe('Pipelines Error State', () => { let component; diff --git a/spec/javascripts/pipelines/graph/action_component_spec.js b/spec/javascripts/pipelines/graph/action_component_spec.js new file mode 100644 index 00000000000..f033956c071 --- /dev/null +++ b/spec/javascripts/pipelines/graph/action_component_spec.js @@ -0,0 +1,40 @@ +import Vue from 'vue'; +import actionComponent from '~/pipelines/components/graph/action_component.vue'; + +describe('pipeline graph action component', () => { + let component; + + beforeEach(() => { + const ActionComponent = Vue.extend(actionComponent); + component = new ActionComponent({ + propsData: { + tooltipText: 'bar', + link: 'foo', + actionMethod: 'post', + actionIcon: 'icon_action_cancel', + }, + }).$mount(); + }); + + it('should render a link', () => { + expect(component.$el.getAttribute('href')).toEqual('foo'); + }); + + it('should render the provided title as a bootstrap tooltip', () => { + expect(component.$el.getAttribute('data-original-title')).toEqual('bar'); + }); + + it('should update bootstrap tooltip when title changes', (done) => { + component.tooltipText = 'changed'; + + Vue.nextTick(() => { + expect(component.$el.getAttribute('data-original-title')).toBe('changed'); + done(); + }); + }); + + it('should render an svg', () => { + expect(component.$el.querySelector('.ci-action-icon-wrapper')).toBeDefined(); + expect(component.$el.querySelector('svg')).toBeDefined(); + }); +}); diff --git a/spec/javascripts/pipelines/graph/dropdown_action_component_spec.js b/spec/javascripts/pipelines/graph/dropdown_action_component_spec.js new file mode 100644 index 00000000000..14ff1b0d25c --- /dev/null +++ b/spec/javascripts/pipelines/graph/dropdown_action_component_spec.js @@ -0,0 +1,30 @@ +import Vue from 'vue'; +import dropdownActionComponent from '~/pipelines/components/graph/dropdown_action_component.vue'; + +describe('action component', () => { + let component; + + beforeEach(() => { + const DropdownActionComponent = Vue.extend(dropdownActionComponent); + component = new DropdownActionComponent({ + propsData: { + tooltipText: 'bar', + link: 'foo', + actionMethod: 'post', + actionIcon: 'icon_action_cancel', + }, + }).$mount(); + }); + + it('should render a link', () => { + expect(component.$el.getAttribute('href')).toEqual('foo'); + }); + + it('should render the provided title as a bootstrap tooltip', () => { + expect(component.$el.getAttribute('data-original-title')).toEqual('bar'); + }); + + it('should render an svg', () => { + expect(component.$el.querySelector('svg')).toBeDefined(); + }); +}); diff --git a/spec/javascripts/pipelines/graph/graph_component_spec.js b/spec/javascripts/pipelines/graph/graph_component_spec.js new file mode 100644 index 00000000000..6bd0eb86263 --- /dev/null +++ b/spec/javascripts/pipelines/graph/graph_component_spec.js @@ -0,0 +1,62 @@ +import Vue from 'vue'; +import graphComponent from '~/pipelines/components/graph/graph_component.vue'; +import graphJSON from './mock_data'; + +describe('graph component', () => { + preloadFixtures('static/graph.html.raw'); + + let GraphComponent; + + beforeEach(() => { + loadFixtures('static/graph.html.raw'); + GraphComponent = Vue.extend(graphComponent); + }); + + describe('while is loading', () => { + it('should render a loading icon', () => { + const component = new GraphComponent().$mount('#js-pipeline-graph-vue'); + expect(component.$el.querySelector('.loading-icon')).toBeDefined(); + }); + }); + + describe('with a successfull response', () => { + const interceptor = (request, next) => { + next(request.respondWith(JSON.stringify(graphJSON), { + status: 200, + })); + }; + + beforeEach(() => { + Vue.http.interceptors.push(interceptor); + }); + + afterEach(() => { + Vue.http.interceptors = _.without(Vue.http.interceptors, interceptor); + }); + + it('should render the graph', (done) => { + const component = new GraphComponent().$mount('#js-pipeline-graph-vue'); + + setTimeout(() => { + expect(component.$el.classList.contains('js-pipeline-graph')).toEqual(true); + + expect( + component.$el.querySelector('.stage-column:first-child').classList.contains('no-margin'), + ).toEqual(true); + + expect( + component.$el.querySelector('.stage-column:nth-child(2)').classList.contains('left-margin'), + ).toEqual(true); + + expect( + component.$el.querySelector('.stage-column:nth-child(2) .build:nth-child(1)').classList.contains('left-connector'), + ).toEqual(true); + + expect(component.$el.querySelector('loading-icon')).toBe(null); + + expect(component.$el.querySelector('.stage-column-list')).toBeDefined(); + done(); + }, 0); + }); + }); +}); diff --git a/spec/javascripts/pipelines/graph/job_component_spec.js b/spec/javascripts/pipelines/graph/job_component_spec.js new file mode 100644 index 00000000000..63986b6c0db --- /dev/null +++ b/spec/javascripts/pipelines/graph/job_component_spec.js @@ -0,0 +1,117 @@ +import Vue from 'vue'; +import jobComponent from '~/pipelines/components/graph/job_component.vue'; + +describe('pipeline graph job component', () => { + let JobComponent; + + const mockJob = { + id: 4256, + name: 'test', + status: { + icon: 'icon_status_success', + text: 'passed', + label: 'passed', + group: 'success', + details_path: '/root/ci-mock/builds/4256', + action: { + icon: 'icon_action_retry', + title: 'Retry', + path: '/root/ci-mock/builds/4256/retry', + method: 'post', + }, + }, + }; + + beforeEach(() => { + JobComponent = Vue.extend(jobComponent); + }); + + describe('name with link', () => { + it('should render the job name and status with a link', () => { + const component = new JobComponent({ + propsData: { + job: mockJob, + }, + }).$mount(); + + const link = component.$el.querySelector('a'); + + expect(link.getAttribute('href')).toEqual(mockJob.status.details_path); + + expect( + link.getAttribute('data-original-title'), + ).toEqual(`${mockJob.name} - ${mockJob.status.label}`); + + expect(component.$el.querySelector('.js-status-icon-success')).toBeDefined(); + + expect( + component.$el.querySelector('.ci-status-text').textContent.trim(), + ).toEqual(mockJob.name); + }); + }); + + describe('name without link', () => { + it('it should render status and name', () => { + const component = new JobComponent({ + propsData: { + job: { + id: 4256, + name: 'test', + status: { + icon: 'icon_status_success', + text: 'passed', + label: 'passed', + group: 'success', + details_path: '/root/ci-mock/builds/4256', + }, + }, + }, + }).$mount(); + + expect(component.$el.querySelector('.js-status-icon-success')).toBeDefined(); + + expect( + component.$el.querySelector('.ci-status-text').textContent.trim(), + ).toEqual(mockJob.name); + }); + }); + + describe('action icon', () => { + it('it should render the action icon', () => { + const component = new JobComponent({ + propsData: { + job: mockJob, + }, + }).$mount(); + + expect(component.$el.querySelector('a.ci-action-icon-container')).toBeDefined(); + expect(component.$el.querySelector('i.ci-action-icon-wrapper')).toBeDefined(); + }); + }); + + describe('dropdown', () => { + it('should render the dropdown action icon', () => { + const component = new JobComponent({ + propsData: { + job: mockJob, + isDropdown: true, + }, + }).$mount(); + + expect(component.$el.querySelector('a.ci-action-icon-wrapper')).toBeDefined(); + }); + }); + + it('should render provided class name', () => { + const component = new JobComponent({ + propsData: { + job: mockJob, + cssClassJobName: 'css-class-job-name', + }, + }).$mount(); + + expect( + component.$el.querySelector('a').classList.contains('css-class-job-name'), + ).toBe(true); + }); +}); diff --git a/spec/javascripts/pipelines/graph/job_name_component_spec.js b/spec/javascripts/pipelines/graph/job_name_component_spec.js new file mode 100644 index 00000000000..8e2071ba0b3 --- /dev/null +++ b/spec/javascripts/pipelines/graph/job_name_component_spec.js @@ -0,0 +1,27 @@ +import Vue from 'vue'; +import jobNameComponent from '~/pipelines/components/graph/job_name_component.vue'; + +describe('job name component', () => { + let component; + + beforeEach(() => { + const JobNameComponent = Vue.extend(jobNameComponent); + component = new JobNameComponent({ + propsData: { + name: 'foo', + status: { + icon: 'icon_status_success', + }, + }, + }).$mount(); + }); + + it('should render the provided name', () => { + expect(component.$el.querySelector('.ci-status-text').textContent.trim()).toEqual('foo'); + }); + + it('should render an icon with the provided status', () => { + expect(component.$el.querySelector('.ci-status-icon-success')).toBeDefined(); + expect(component.$el.querySelector('.ci-status-icon-success svg')).toBeDefined(); + }); +}); diff --git a/spec/javascripts/pipelines/graph/mock_data.js b/spec/javascripts/pipelines/graph/mock_data.js new file mode 100644 index 00000000000..56c522b7f77 --- /dev/null +++ b/spec/javascripts/pipelines/graph/mock_data.js @@ -0,0 +1,232 @@ +/* eslint-disable quote-props, quotes, comma-dangle */ +export default { + "id": 123, + "user": { + "name": "Root", + "username": "root", + "id": 1, + "state": "active", + "avatar_url": null, + "web_url": "http://localhost:3000/root" + }, + "active": false, + "coverage": null, + "path": "/root/ci-mock/pipelines/123", + "details": { + "status": { + "icon": "icon_status_success", + "text": "passed", + "label": "passed", + "group": "success", + "has_details": true, + "details_path": "/root/ci-mock/pipelines/123", + "favicon": "/assets/ci_favicons/dev/favicon_status_success-308b4fc054cdd1b68d0865e6cfb7b02e92e3472f201507418f8eddb74ac11a59.ico" + }, + "duration": 9, + "finished_at": "2017-04-19T14:30:27.542Z", + "stages": [{ + "name": "test", + "title": "test: passed", + "groups": [{ + "name": "test", + "size": 1, + "status": { + "icon": "icon_status_success", + "text": "passed", + "label": "passed", + "group": "success", + "has_details": true, + "details_path": "/root/ci-mock/builds/4153", + "favicon": "/assets/ci_favicons/dev/favicon_status_success-308b4fc054cdd1b68d0865e6cfb7b02e92e3472f201507418f8eddb74ac11a59.ico", + "action": { + "icon": "icon_action_retry", + "title": "Retry", + "path": "/root/ci-mock/builds/4153/retry", + "method": "post" + } + }, + "jobs": [{ + "id": 4153, + "name": "test", + "build_path": "/root/ci-mock/builds/4153", + "retry_path": "/root/ci-mock/builds/4153/retry", + "playable": false, + "created_at": "2017-04-13T09:25:18.959Z", + "updated_at": "2017-04-13T09:25:23.118Z", + "status": { + "icon": "icon_status_success", + "text": "passed", + "label": "passed", + "group": "success", + "has_details": true, + "details_path": "/root/ci-mock/builds/4153", + "favicon": "/assets/ci_favicons/dev/favicon_status_success-308b4fc054cdd1b68d0865e6cfb7b02e92e3472f201507418f8eddb74ac11a59.ico", + "action": { + "icon": "icon_action_retry", + "title": "Retry", + "path": "/root/ci-mock/builds/4153/retry", + "method": "post" + } + } + }] + }], + "status": { + "icon": "icon_status_success", + "text": "passed", + "label": "passed", + "group": "success", + "has_details": true, + "details_path": "/root/ci-mock/pipelines/123#test", + "favicon": "/assets/ci_favicons/dev/favicon_status_success-308b4fc054cdd1b68d0865e6cfb7b02e92e3472f201507418f8eddb74ac11a59.ico" + }, + "path": "/root/ci-mock/pipelines/123#test", + "dropdown_path": "/root/ci-mock/pipelines/123/stage.json?stage=test" + }, { + "name": "deploy", + "title": "deploy: passed", + "groups": [{ + "name": "deploy to production", + "size": 1, + "status": { + "icon": "icon_status_success", + "text": "passed", + "label": "passed", + "group": "success", + "has_details": true, + "details_path": "/root/ci-mock/builds/4166", + "favicon": "/assets/ci_favicons/dev/favicon_status_success-308b4fc054cdd1b68d0865e6cfb7b02e92e3472f201507418f8eddb74ac11a59.ico", + "action": { + "icon": "icon_action_retry", + "title": "Retry", + "path": "/root/ci-mock/builds/4166/retry", + "method": "post" + } + }, + "jobs": [{ + "id": 4166, + "name": "deploy to production", + "build_path": "/root/ci-mock/builds/4166", + "retry_path": "/root/ci-mock/builds/4166/retry", + "playable": false, + "created_at": "2017-04-19T14:29:46.463Z", + "updated_at": "2017-04-19T14:30:27.498Z", + "status": { + "icon": "icon_status_success", + "text": "passed", + "label": "passed", + "group": "success", + "has_details": true, + "details_path": "/root/ci-mock/builds/4166", + "favicon": "/assets/ci_favicons/dev/favicon_status_success-308b4fc054cdd1b68d0865e6cfb7b02e92e3472f201507418f8eddb74ac11a59.ico", + "action": { + "icon": "icon_action_retry", + "title": "Retry", + "path": "/root/ci-mock/builds/4166/retry", + "method": "post" + } + } + }] + }, { + "name": "deploy to staging", + "size": 1, + "status": { + "icon": "icon_status_success", + "text": "passed", + "label": "passed", + "group": "success", + "has_details": true, + "details_path": "/root/ci-mock/builds/4159", + "favicon": "/assets/ci_favicons/dev/favicon_status_success-308b4fc054cdd1b68d0865e6cfb7b02e92e3472f201507418f8eddb74ac11a59.ico", + "action": { + "icon": "icon_action_retry", + "title": "Retry", + "path": "/root/ci-mock/builds/4159/retry", + "method": "post" + } + }, + "jobs": [{ + "id": 4159, + "name": "deploy to staging", + "build_path": "/root/ci-mock/builds/4159", + "retry_path": "/root/ci-mock/builds/4159/retry", + "playable": false, + "created_at": "2017-04-18T16:32:08.420Z", + "updated_at": "2017-04-18T16:32:12.631Z", + "status": { + "icon": "icon_status_success", + "text": "passed", + "label": "passed", + "group": "success", + "has_details": true, + "details_path": "/root/ci-mock/builds/4159", + "favicon": "/assets/ci_favicons/dev/favicon_status_success-308b4fc054cdd1b68d0865e6cfb7b02e92e3472f201507418f8eddb74ac11a59.ico", + "action": { + "icon": "icon_action_retry", + "title": "Retry", + "path": "/root/ci-mock/builds/4159/retry", + "method": "post" + } + } + }] + }], + "status": { + "icon": "icon_status_success", + "text": "passed", + "label": "passed", + "group": "success", + "has_details": true, + "details_path": "/root/ci-mock/pipelines/123#deploy", + "favicon": "/assets/ci_favicons/dev/favicon_status_success-308b4fc054cdd1b68d0865e6cfb7b02e92e3472f201507418f8eddb74ac11a59.ico" + }, + "path": "/root/ci-mock/pipelines/123#deploy", + "dropdown_path": "/root/ci-mock/pipelines/123/stage.json?stage=deploy" + }], + "artifacts": [], + "manual_actions": [{ + "name": "deploy to production", + "path": "/root/ci-mock/builds/4166/play", + "playable": false + }] + }, + "flags": { + "latest": true, + "triggered": false, + "stuck": false, + "yaml_errors": false, + "retryable": false, + "cancelable": false + }, + "ref": { + "name": "master", + "path": "/root/ci-mock/tree/master", + "tag": false, + "branch": true + }, + "commit": { + "id": "798e5f902592192afaba73f4668ae30e56eae492", + "short_id": "798e5f90", + "title": "Merge branch 'new-branch' into 'master'\r", + "created_at": "2017-04-13T10:25:17.000+01:00", + "parent_ids": ["54d483b1ed156fbbf618886ddf7ab023e24f8738", "c8e2d38a6c538822e81c57022a6e3a0cfedebbcc"], + "message": "Merge branch 'new-branch' into 'master'\r\n\r\nAdd new file\r\n\r\nSee merge request !1", + "author_name": "Root", + "author_email": "admin@example.com", + "authored_date": "2017-04-13T10:25:17.000+01:00", + "committer_name": "Root", + "committer_email": "admin@example.com", + "committed_date": "2017-04-13T10:25:17.000+01:00", + "author": { + "name": "Root", + "username": "root", + "id": 1, + "state": "active", + "avatar_url": null, + "web_url": "http://localhost:3000/root" + }, + "author_gravatar_url": null, + "commit_url": "http://localhost:3000/root/ci-mock/commit/798e5f902592192afaba73f4668ae30e56eae492", + "commit_path": "/root/ci-mock/commit/798e5f902592192afaba73f4668ae30e56eae492" + }, + "created_at": "2017-04-13T09:25:18.881Z", + "updated_at": "2017-04-19T14:30:27.561Z" +}; diff --git a/spec/javascripts/pipelines/graph/stage_column_component_spec.js b/spec/javascripts/pipelines/graph/stage_column_component_spec.js new file mode 100644 index 00000000000..aa4d6eedaf4 --- /dev/null +++ b/spec/javascripts/pipelines/graph/stage_column_component_spec.js @@ -0,0 +1,42 @@ +import Vue from 'vue'; +import stageColumnComponent from '~/pipelines/components/graph/stage_column_component.vue'; + +describe('stage column component', () => { + let component; + const mockJob = { + id: 4256, + name: 'test', + status: { + icon: 'icon_status_success', + text: 'passed', + label: 'passed', + group: 'success', + details_path: '/root/ci-mock/builds/4256', + action: { + icon: 'icon_action_retry', + title: 'Retry', + path: '/root/ci-mock/builds/4256/retry', + method: 'post', + }, + }, + }; + + beforeEach(() => { + const StageColumnComponent = Vue.extend(stageColumnComponent); + + component = new StageColumnComponent({ + propsData: { + title: 'foo', + jobs: [mockJob, mockJob, mockJob], + }, + }).$mount(); + }); + + it('should render provided title', () => { + expect(component.$el.querySelector('.stage-name').textContent.trim()).toEqual('foo'); + }); + + it('should render the provided jobs', () => { + expect(component.$el.querySelectorAll('.builds-container > ul > li').length).toEqual(3); + }); +}); diff --git a/spec/javascripts/vue_pipelines_index/nav_controls_spec.js b/spec/javascripts/pipelines/nav_controls_spec.js index 659c4854a56..601eebce38a 100644 --- a/spec/javascripts/vue_pipelines_index/nav_controls_spec.js +++ b/spec/javascripts/pipelines/nav_controls_spec.js @@ -1,5 +1,5 @@ import Vue from 'vue'; -import navControlsComp from '~/vue_pipelines_index/components/nav_controls'; +import navControlsComp from '~/pipelines/components/nav_controls'; describe('Pipelines Nav Controls', () => { let NavControlsComponent; diff --git a/spec/javascripts/vue_pipelines_index/pipeline_url_spec.js b/spec/javascripts/pipelines/pipeline_url_spec.js index 96a2a37b5f7..0bcc3905702 100644 --- a/spec/javascripts/vue_pipelines_index/pipeline_url_spec.js +++ b/spec/javascripts/pipelines/pipeline_url_spec.js @@ -1,5 +1,5 @@ import Vue from 'vue'; -import pipelineUrlComp from '~/vue_pipelines_index/components/pipeline_url'; +import pipelineUrlComp from '~/pipelines/components/pipeline_url'; describe('Pipeline Url Component', () => { let PipelineUrlComponent; @@ -60,7 +60,7 @@ describe('Pipeline Url Component', () => { expect( component.$el.querySelector('.js-pipeline-url-user').getAttribute('href'), ).toEqual(mockData.pipeline.user.web_url); - expect(image.getAttribute('title')).toEqual(mockData.pipeline.user.name); + expect(image.getAttribute('data-original-title')).toEqual(mockData.pipeline.user.name); expect(image.getAttribute('src')).toEqual(mockData.pipeline.user.avatar_url); }); diff --git a/spec/javascripts/vue_pipelines_index/pipelines_actions_spec.js b/spec/javascripts/pipelines/pipelines_actions_spec.js index dba998c7688..c89dacbcd93 100644 --- a/spec/javascripts/vue_pipelines_index/pipelines_actions_spec.js +++ b/spec/javascripts/pipelines/pipelines_actions_spec.js @@ -1,5 +1,5 @@ import Vue from 'vue'; -import pipelinesActionsComp from '~/vue_pipelines_index/components/pipelines_actions'; +import pipelinesActionsComp from '~/pipelines/components/pipelines_actions'; describe('Pipelines Actions dropdown', () => { let component; @@ -15,6 +15,11 @@ describe('Pipelines Actions dropdown', () => { name: 'stop_review', path: '/root/review-app/builds/1893/play', }, + { + name: 'foo', + path: '#', + playable: false, + }, ]; spy = jasmine.createSpy('spy').and.returnValue(Promise.resolve()); @@ -59,4 +64,14 @@ describe('Pipelines Actions dropdown', () => { expect(component.$el.querySelector('.fa-spinner')).toEqual(null); }); + + it('should render a disabled action when it\'s not playable', () => { + expect( + component.$el.querySelector('.dropdown-menu li:last-child button').getAttribute('disabled'), + ).toEqual('disabled'); + + expect( + component.$el.querySelector('.dropdown-menu li:last-child button').classList.contains('disabled'), + ).toEqual(true); + }); }); diff --git a/spec/javascripts/vue_pipelines_index/pipelines_artifacts_spec.js b/spec/javascripts/pipelines/pipelines_artifacts_spec.js index f7f49649c1c..9724b63d957 100644 --- a/spec/javascripts/vue_pipelines_index/pipelines_artifacts_spec.js +++ b/spec/javascripts/pipelines/pipelines_artifacts_spec.js @@ -1,5 +1,5 @@ import Vue from 'vue'; -import artifactsComp from '~/vue_pipelines_index/components/pipelines_artifacts'; +import artifactsComp from '~/pipelines/components/pipelines_artifacts'; describe('Pipelines Artifacts dropdown', () => { let component; diff --git a/spec/javascripts/vue_pipelines_index/pipelines_spec.js b/spec/javascripts/pipelines/pipelines_spec.js index 725f6cb2d7a..3a56156358b 100644 --- a/spec/javascripts/vue_pipelines_index/pipelines_spec.js +++ b/spec/javascripts/pipelines/pipelines_spec.js @@ -1,15 +1,20 @@ import Vue from 'vue'; -import pipelinesComp from '~/vue_pipelines_index/pipelines'; -import Store from '~/vue_pipelines_index/stores/pipelines_store'; -import pipelinesData from './mock_data'; +import pipelinesComp from '~/pipelines/pipelines'; +import Store from '~/pipelines/stores/pipelines_store'; describe('Pipelines', () => { + const jsonFixtureName = 'pipelines/pipelines.json'; + preloadFixtures('static/pipelines.html.raw'); + preloadFixtures(jsonFixtureName); let PipelinesComponent; + let pipeline; beforeEach(() => { loadFixtures('static/pipelines.html.raw'); + const pipelines = getJSONFixture(jsonFixtureName).pipelines; + pipeline = pipelines.find(p => p.id === 1); PipelinesComponent = Vue.extend(pipelinesComp); }); @@ -17,7 +22,7 @@ describe('Pipelines', () => { describe('successfull request', () => { describe('with pipelines', () => { const pipelinesInterceptor = (request, next) => { - next(request.respondWith(JSON.stringify(pipelinesData), { + next(request.respondWith(JSON.stringify(pipeline), { status: 200, })); }; diff --git a/spec/javascripts/vue_pipelines_index/pipelines_store_spec.js b/spec/javascripts/pipelines/pipelines_store_spec.js index 5c0934404bb..10ff0c6bb84 100644 --- a/spec/javascripts/vue_pipelines_index/pipelines_store_spec.js +++ b/spec/javascripts/pipelines/pipelines_store_spec.js @@ -1,4 +1,4 @@ -import PipelineStore from '~/vue_pipelines_index/stores/pipelines_store'; +import PipelineStore from '~/pipelines/stores/pipelines_store'; describe('Pipelines Store', () => { let store; diff --git a/spec/javascripts/pipelines/stage_spec.js b/spec/javascripts/pipelines/stage_spec.js new file mode 100644 index 00000000000..a4f32a1faed --- /dev/null +++ b/spec/javascripts/pipelines/stage_spec.js @@ -0,0 +1,86 @@ +import Vue from 'vue'; +import stage from '~/pipelines/components/stage.vue'; + +describe('Pipelines stage component', () => { + let StageComponent; + let component; + + beforeEach(() => { + StageComponent = Vue.extend(stage); + + component = new StageComponent({ + propsData: { + stage: { + status: { + group: 'success', + icon: 'icon_status_success', + title: 'success', + }, + dropdown_path: 'foo', + }, + updateDropdown: false, + }, + }).$mount(); + }); + + it('should render a dropdown with the status icon', () => { + expect(component.$el.getAttribute('class')).toEqual('dropdown'); + expect(component.$el.querySelector('svg')).toBeDefined(); + expect(component.$el.querySelector('button').getAttribute('data-toggle')).toEqual('dropdown'); + }); + + describe('with successfull request', () => { + const interceptor = (request, next) => { + next(request.respondWith(JSON.stringify({ html: 'foo' }), { + status: 200, + })); + }; + + beforeEach(() => { + Vue.http.interceptors.push(interceptor); + }); + + afterEach(() => { + Vue.http.interceptors = _.without( + Vue.http.interceptors, interceptor, + ); + }); + + it('should render the received data', (done) => { + component.$el.querySelector('button').click(); + + setTimeout(() => { + expect( + component.$el.querySelector('.js-builds-dropdown-container ul').textContent.trim(), + ).toEqual('foo'); + done(); + }, 0); + }); + }); + + describe('when request fails', () => { + const interceptor = (request, next) => { + next(request.respondWith(JSON.stringify({}), { + status: 500, + })); + }; + + beforeEach(() => { + Vue.http.interceptors.push(interceptor); + }); + + afterEach(() => { + Vue.http.interceptors = _.without( + Vue.http.interceptors, interceptor, + ); + }); + + it('should close the dropdown', () => { + component.$el.click(); + + setTimeout(() => { + expect(component.$el.classList.contains('open')).toEqual(false); + }, 0); + }); + }); +}); diff --git a/spec/javascripts/pipelines/time_ago_spec.js b/spec/javascripts/pipelines/time_ago_spec.js new file mode 100644 index 00000000000..24581e8c672 --- /dev/null +++ b/spec/javascripts/pipelines/time_ago_spec.js @@ -0,0 +1,64 @@ +import Vue from 'vue'; +import timeAgo from '~/pipelines/components/time_ago'; + +describe('Timeago component', () => { + let TimeAgo; + beforeEach(() => { + TimeAgo = Vue.extend(timeAgo); + }); + + describe('with duration', () => { + it('should render duration and timer svg', () => { + const component = new TimeAgo({ + propsData: { + duration: 10, + finishedTime: '', + }, + }).$mount(); + + expect(component.$el.querySelector('.duration')).toBeDefined(); + expect(component.$el.querySelector('.duration svg')).toBeDefined(); + }); + }); + + describe('without duration', () => { + it('should not render duration and timer svg', () => { + const component = new TimeAgo({ + propsData: { + duration: 0, + finishedTime: '', + }, + }).$mount(); + + expect(component.$el.querySelector('.duration')).toBe(null); + }); + }); + + describe('with finishedTime', () => { + it('should render time and calendar icon', () => { + const component = new TimeAgo({ + propsData: { + duration: 0, + finishedTime: '2017-04-26T12:40:23.277Z', + }, + }).$mount(); + + expect(component.$el.querySelector('.finished-at')).toBeDefined(); + expect(component.$el.querySelector('.finished-at i.fa-calendar')).toBeDefined(); + expect(component.$el.querySelector('.finished-at time')).toBeDefined(); + }); + }); + + describe('without finishedTime', () => { + it('should not render time and calendar icon', () => { + const component = new TimeAgo({ + propsData: { + duration: 0, + finishedTime: '', + }, + }).$mount(); + + expect(component.$el.querySelector('.finished-at')).toBe(null); + }); + }); +}); diff --git a/spec/javascripts/pipelines_spec.js b/spec/javascripts/pipelines_spec.js index 72770a702d3..81ac589f4e6 100644 --- a/spec/javascripts/pipelines_spec.js +++ b/spec/javascripts/pipelines_spec.js @@ -1,30 +1,22 @@ -require('~/pipelines'); +import Pipelines from '~/pipelines'; // Fix for phantomJS if (!Element.prototype.matches && Element.prototype.webkitMatchesSelector) { Element.prototype.matches = Element.prototype.webkitMatchesSelector; } -(() => { - describe('Pipelines', () => { - preloadFixtures('static/pipeline_graph.html.raw'); +describe('Pipelines', () => { + preloadFixtures('static/pipeline_graph.html.raw'); - beforeEach(() => { - loadFixtures('static/pipeline_graph.html.raw'); - }); - - it('should be defined', () => { - expect(window.gl.Pipelines).toBeDefined(); - }); - - it('should create a `Pipelines` instance without options', () => { - expect(() => { new window.gl.Pipelines(); }).not.toThrow(); //eslint-disable-line - }); + beforeEach(() => { + loadFixtures('static/pipeline_graph.html.raw'); + }); - it('should create a `Pipelines` instance with options', () => { - const pipelines = new window.gl.Pipelines({ foo: 'bar' }); + it('should be defined', () => { + expect(Pipelines).toBeDefined(); + }); - expect(pipelines.pipelineGraph).toBeDefined(); - }); + it('should create a `Pipelines` instance without options', () => { + expect(() => { new Pipelines(); }).not.toThrow(); //eslint-disable-line }); -})(); +}); diff --git a/spec/javascripts/pretty_time_spec.js b/spec/javascripts/pretty_time_spec.js index a4662cfb557..de99e7e3894 100644 --- a/spec/javascripts/pretty_time_spec.js +++ b/spec/javascripts/pretty_time_spec.js @@ -1,4 +1,4 @@ -require('~/lib/utils/pretty_time'); +import '~/lib/utils/pretty_time'; (() => { const prettyTime = gl.utils.prettyTime; diff --git a/spec/javascripts/project_title_spec.js b/spec/javascripts/project_title_spec.js index 3a1d4e2440f..3dba2e817ff 100644 --- a/spec/javascripts/project_title_spec.js +++ b/spec/javascripts/project_title_spec.js @@ -1,12 +1,11 @@ /* eslint-disable space-before-function-paren, no-unused-expressions, no-return-assign, no-param-reassign, no-var, new-cap, wrap-iife, no-unused-vars, quotes, jasmine/no-expect-in-setup-teardown, max-len */ /* global Project */ -require('select2/select2.js'); -require('~/lib/utils/type_utility'); -require('~/gl_dropdown'); -require('~/api'); -require('~/project_select'); -require('~/project'); +import 'select2/select2'; +import '~/gl_dropdown'; +import '~/api'; +import '~/project_select'; +import '~/project'; (function() { describe('Project Title', function() { diff --git a/spec/javascripts/raven/index_spec.js b/spec/javascripts/raven/index_spec.js new file mode 100644 index 00000000000..b5662cd0331 --- /dev/null +++ b/spec/javascripts/raven/index_spec.js @@ -0,0 +1,42 @@ +import RavenConfig from '~/raven/raven_config'; +import index from '~/raven/index'; + +describe('RavenConfig options', () => { + let sentryDsn; + let currentUserId; + let gitlabUrl; + let isProduction; + let indexReturnValue; + + beforeEach(() => { + sentryDsn = 'sentryDsn'; + currentUserId = 'currentUserId'; + gitlabUrl = 'gitlabUrl'; + isProduction = 'isProduction'; + + window.gon = { + sentry_dsn: sentryDsn, + current_user_id: currentUserId, + gitlab_url: gitlabUrl, + }; + + process.env.NODE_ENV = isProduction; + + spyOn(RavenConfig, 'init'); + + indexReturnValue = index(); + }); + + it('should init with .sentryDsn, .currentUserId, .whitelistUrls and .isProduction', () => { + expect(RavenConfig.init).toHaveBeenCalledWith({ + sentryDsn, + currentUserId, + whitelistUrls: [gitlabUrl], + isProduction, + }); + }); + + it('should return RavenConfig', () => { + expect(indexReturnValue).toBe(RavenConfig); + }); +}); diff --git a/spec/javascripts/raven/raven_config_spec.js b/spec/javascripts/raven/raven_config_spec.js new file mode 100644 index 00000000000..a2d720760fc --- /dev/null +++ b/spec/javascripts/raven/raven_config_spec.js @@ -0,0 +1,276 @@ +import Raven from 'raven-js'; +import RavenConfig from '~/raven/raven_config'; + +describe('RavenConfig', () => { + describe('IGNORE_ERRORS', () => { + it('should be an array of strings', () => { + const areStrings = RavenConfig.IGNORE_ERRORS.every(error => typeof error === 'string'); + + expect(areStrings).toBe(true); + }); + }); + + describe('IGNORE_URLS', () => { + it('should be an array of regexps', () => { + const areRegExps = RavenConfig.IGNORE_URLS.every(url => url instanceof RegExp); + + expect(areRegExps).toBe(true); + }); + }); + + describe('SAMPLE_RATE', () => { + it('should be a finite number', () => { + expect(typeof RavenConfig.SAMPLE_RATE).toEqual('number'); + }); + }); + + describe('init', () => { + let options; + + beforeEach(() => { + options = { + sentryDsn: '//sentryDsn', + ravenAssetUrl: '//ravenAssetUrl', + currentUserId: 1, + whitelistUrls: ['//gitlabUrl'], + isProduction: true, + }; + + spyOn(RavenConfig, 'configure'); + spyOn(RavenConfig, 'bindRavenErrors'); + spyOn(RavenConfig, 'setUser'); + + RavenConfig.init(options); + }); + + it('should set the options property', () => { + expect(RavenConfig.options).toEqual(options); + }); + + it('should call the configure method', () => { + expect(RavenConfig.configure).toHaveBeenCalled(); + }); + + it('should call the error bindings method', () => { + expect(RavenConfig.bindRavenErrors).toHaveBeenCalled(); + }); + + it('should call setUser', () => { + expect(RavenConfig.setUser).toHaveBeenCalled(); + }); + + it('should not call setUser if there is no current user ID', () => { + RavenConfig.setUser.calls.reset(); + + RavenConfig.init({ + sentryDsn: '//sentryDsn', + ravenAssetUrl: '//ravenAssetUrl', + currentUserId: undefined, + whitelistUrls: ['//gitlabUrl'], + isProduction: true, + }); + + expect(RavenConfig.setUser).not.toHaveBeenCalled(); + }); + }); + + describe('configure', () => { + let options; + let raven; + let ravenConfig; + + beforeEach(() => { + options = { + sentryDsn: '//sentryDsn', + whitelistUrls: ['//gitlabUrl'], + isProduction: true, + }; + + ravenConfig = jasmine.createSpyObj('ravenConfig', ['shouldSendSample']); + raven = jasmine.createSpyObj('raven', ['install']); + + spyOn(Raven, 'config').and.returnValue(raven); + + ravenConfig.options = options; + ravenConfig.IGNORE_ERRORS = 'ignore_errors'; + ravenConfig.IGNORE_URLS = 'ignore_urls'; + + RavenConfig.configure.call(ravenConfig); + }); + + it('should call Raven.config', () => { + expect(Raven.config).toHaveBeenCalledWith(options.sentryDsn, { + whitelistUrls: options.whitelistUrls, + environment: 'production', + ignoreErrors: ravenConfig.IGNORE_ERRORS, + ignoreUrls: ravenConfig.IGNORE_URLS, + shouldSendCallback: jasmine.any(Function), + }); + }); + + it('should call Raven.install', () => { + expect(raven.install).toHaveBeenCalled(); + }); + + it('should set .environment to development if isProduction is false', () => { + ravenConfig.options.isProduction = false; + + RavenConfig.configure.call(ravenConfig); + + expect(Raven.config).toHaveBeenCalledWith(options.sentryDsn, { + whitelistUrls: options.whitelistUrls, + environment: 'development', + ignoreErrors: ravenConfig.IGNORE_ERRORS, + ignoreUrls: ravenConfig.IGNORE_URLS, + shouldSendCallback: jasmine.any(Function), + }); + }); + }); + + describe('setUser', () => { + let ravenConfig; + + beforeEach(() => { + ravenConfig = { options: { currentUserId: 1 } }; + spyOn(Raven, 'setUserContext'); + + RavenConfig.setUser.call(ravenConfig); + }); + + it('should call .setUserContext', function () { + expect(Raven.setUserContext).toHaveBeenCalledWith({ + id: ravenConfig.options.currentUserId, + }); + }); + }); + + describe('bindRavenErrors', () => { + let $document; + let $; + + beforeEach(() => { + $document = jasmine.createSpyObj('$document', ['on']); + $ = jasmine.createSpy('$').and.returnValue($document); + + window.$ = $; + + RavenConfig.bindRavenErrors(); + }); + + it('should call .on', function () { + expect($document.on).toHaveBeenCalledWith('ajaxError.raven', RavenConfig.handleRavenErrors); + }); + }); + + describe('handleRavenErrors', () => { + let event; + let req; + let config; + let err; + + beforeEach(() => { + event = {}; + req = { status: 'status', responseText: 'responseText', statusText: 'statusText' }; + config = { type: 'type', url: 'url', data: 'data' }; + err = {}; + + spyOn(Raven, 'captureMessage'); + + RavenConfig.handleRavenErrors(event, req, config, err); + }); + + it('should call Raven.captureMessage', () => { + expect(Raven.captureMessage).toHaveBeenCalledWith(err, { + extra: { + type: config.type, + url: config.url, + data: config.data, + status: req.status, + response: req.responseText, + error: err, + event, + }, + }); + }); + + describe('if no err is provided', () => { + beforeEach(() => { + Raven.captureMessage.calls.reset(); + + RavenConfig.handleRavenErrors(event, req, config); + }); + + it('should use req.statusText as the error value', () => { + expect(Raven.captureMessage).toHaveBeenCalledWith(req.statusText, { + extra: { + type: config.type, + url: config.url, + data: config.data, + status: req.status, + response: req.responseText, + error: req.statusText, + event, + }, + }); + }); + }); + + describe('if no req.responseText is provided', () => { + beforeEach(() => { + req.responseText = undefined; + + Raven.captureMessage.calls.reset(); + + RavenConfig.handleRavenErrors(event, req, config, err); + }); + + it('should use `Unknown response text` as the response', () => { + expect(Raven.captureMessage).toHaveBeenCalledWith(err, { + extra: { + type: config.type, + url: config.url, + data: config.data, + status: req.status, + response: 'Unknown response text', + error: err, + event, + }, + }); + }); + }); + }); + + describe('shouldSendSample', () => { + let randomNumber; + + beforeEach(() => { + RavenConfig.SAMPLE_RATE = 50; + + spyOn(Math, 'random').and.callFake(() => randomNumber); + }); + + it('should call Math.random', () => { + RavenConfig.shouldSendSample(); + + expect(Math.random).toHaveBeenCalled(); + }); + + it('should return true if the sample rate is greater than the random number * 100', () => { + randomNumber = 0.1; + + expect(RavenConfig.shouldSendSample()).toBe(true); + }); + + it('should return false if the sample rate is less than the random number * 100', () => { + randomNumber = 0.9; + + expect(RavenConfig.shouldSendSample()).toBe(false); + }); + + it('should return true if the sample rate is equal to the random number * 100', () => { + randomNumber = 0.5; + + expect(RavenConfig.shouldSendSample()).toBe(true); + }); + }); +}); diff --git a/spec/javascripts/search_autocomplete_spec.js b/spec/javascripts/search_autocomplete_spec.js index aaf058bd755..a53f58b5d0d 100644 --- a/spec/javascripts/search_autocomplete_spec.js +++ b/spec/javascripts/search_autocomplete_spec.js @@ -1,10 +1,9 @@ /* eslint-disable space-before-function-paren, max-len, no-var, one-var, one-var-declaration-per-line, no-unused-expressions, consistent-return, no-param-reassign, default-case, no-return-assign, comma-dangle, object-shorthand, prefer-template, quotes, new-parens, vars-on-top, new-cap, max-len */ -require('~/gl_dropdown'); -require('~/search_autocomplete'); -require('~/lib/utils/common_utils'); -require('~/lib/utils/type_utility'); -require('vendor/fuzzaldrin-plus'); +import '~/gl_dropdown'; +import '~/search_autocomplete'; +import '~/lib/utils/common_utils'; +import 'vendor/fuzzaldrin-plus'; (function() { var addBodyAttributes, assertLinks, dashboardIssuesPath, dashboardMRsPath, groupIssuesPath, groupMRsPath, groupName, mockDashboardOptions, mockGroupOptions, mockProjectOptions, projectIssuesPath, projectMRsPath, projectName, userId, widget; diff --git a/spec/javascripts/shortcuts_issuable_spec.js b/spec/javascripts/shortcuts_issuable_spec.js index 9e19dabd0e3..3515dfbc60b 100644 --- a/spec/javascripts/shortcuts_issuable_spec.js +++ b/spec/javascripts/shortcuts_issuable_spec.js @@ -1,8 +1,8 @@ /* eslint-disable space-before-function-paren, no-return-assign, no-var, quotes */ /* global ShortcutsIssuable */ -require('~/copy_as_gfm'); -require('~/shortcuts_issuable'); +import '~/copy_as_gfm'; +import '~/shortcuts_issuable'; (function() { describe('ShortcutsIssuable', function() { @@ -13,7 +13,7 @@ require('~/shortcuts_issuable'); document.querySelector('.js-new-note-form').classList.add('js-main-target-form'); this.shortcut = new ShortcutsIssuable(); }); - describe('#replyWithSelectedText', function() { + describe('replyWithSelectedText', function() { var stubSelection; // Stub window.gl.utils.getSelectedFragment to return a node with the provided HTML. stubSelection = function(html) { diff --git a/spec/javascripts/shortcuts_spec.js b/spec/javascripts/shortcuts_spec.js new file mode 100644 index 00000000000..9b8373df29e --- /dev/null +++ b/spec/javascripts/shortcuts_spec.js @@ -0,0 +1,45 @@ +/* global Shortcuts */ +describe('Shortcuts', () => { + const fixtureName = 'issues/issue_with_comment.html.raw'; + const createEvent = (type, target) => $.Event(type, { + target, + }); + + preloadFixtures(fixtureName); + + describe('toggleMarkdownPreview', () => { + let sc; + + beforeEach(() => { + loadFixtures(fixtureName); + + spyOnEvent('.js-new-note-form .js-md-preview-button', 'focus'); + spyOnEvent('.edit-note .js-md-preview-button', 'focus'); + + sc = new Shortcuts(); + }); + + it('focuses preview button in form', () => { + sc.toggleMarkdownPreview( + createEvent('KeyboardEvent', document.querySelector('.js-new-note-form .js-note-text'), + )); + + expect('focus').toHaveBeenTriggeredOn('.js-new-note-form .js-md-preview-button'); + }); + + it('focues preview button inside edit comment form', (done) => { + document.querySelector('.js-note-edit').click(); + + setTimeout(() => { + sc.toggleMarkdownPreview( + createEvent('KeyboardEvent', document.querySelector('.edit-note .js-note-text'), + )); + + expect('focus').not.toHaveBeenTriggeredOn('.js-new-note-form .js-md-preview-button'); + expect('focus').toHaveBeenTriggeredOn('.edit-note .js-md-preview-button'); + + done(); + }); + }); + }); +}); diff --git a/spec/javascripts/sidebar/assignee_title_spec.js b/spec/javascripts/sidebar/assignee_title_spec.js new file mode 100644 index 00000000000..5b5b1bf4140 --- /dev/null +++ b/spec/javascripts/sidebar/assignee_title_spec.js @@ -0,0 +1,80 @@ +import Vue from 'vue'; +import AssigneeTitle from '~/sidebar/components/assignees/assignee_title'; + +describe('AssigneeTitle component', () => { + let component; + let AssigneeTitleComponent; + + beforeEach(() => { + AssigneeTitleComponent = Vue.extend(AssigneeTitle); + }); + + describe('assignee title', () => { + it('renders assignee', () => { + component = new AssigneeTitleComponent({ + propsData: { + numberOfAssignees: 1, + editable: false, + }, + }).$mount(); + + expect(component.$el.innerText.trim()).toEqual('Assignee'); + }); + + it('renders 2 assignees', () => { + component = new AssigneeTitleComponent({ + propsData: { + numberOfAssignees: 2, + editable: false, + }, + }).$mount(); + + expect(component.$el.innerText.trim()).toEqual('2 Assignees'); + }); + }); + + it('does not render spinner by default', () => { + component = new AssigneeTitleComponent({ + propsData: { + numberOfAssignees: 0, + editable: false, + }, + }).$mount(); + + expect(component.$el.querySelector('.fa')).toBeNull(); + }); + + it('renders spinner when loading', () => { + component = new AssigneeTitleComponent({ + propsData: { + loading: true, + numberOfAssignees: 0, + editable: false, + }, + }).$mount(); + + expect(component.$el.querySelector('.fa')).not.toBeNull(); + }); + + it('does not render edit link when not editable', () => { + component = new AssigneeTitleComponent({ + propsData: { + numberOfAssignees: 0, + editable: false, + }, + }).$mount(); + + expect(component.$el.querySelector('.edit-link')).toBeNull(); + }); + + it('renders edit link when editable', () => { + component = new AssigneeTitleComponent({ + propsData: { + numberOfAssignees: 0, + editable: true, + }, + }).$mount(); + + expect(component.$el.querySelector('.edit-link')).not.toBeNull(); + }); +}); diff --git a/spec/javascripts/sidebar/assignees_spec.js b/spec/javascripts/sidebar/assignees_spec.js new file mode 100644 index 00000000000..c9453a21189 --- /dev/null +++ b/spec/javascripts/sidebar/assignees_spec.js @@ -0,0 +1,272 @@ +import Vue from 'vue'; +import Assignee from '~/sidebar/components/assignees/assignees'; +import UsersMock from './mock_data'; +import UsersMockHelper from '../helpers/user_mock_data_helper'; + +describe('Assignee component', () => { + let component; + let AssigneeComponent; + + beforeEach(() => { + AssigneeComponent = Vue.extend(Assignee); + }); + + describe('No assignees/users', () => { + it('displays no assignee icon when collapsed', () => { + component = new AssigneeComponent({ + propsData: { + rootPath: 'http://localhost:3000', + users: [], + editable: false, + }, + }).$mount(); + + const collapsed = component.$el.querySelector('.sidebar-collapsed-icon'); + expect(collapsed.childElementCount).toEqual(1); + expect(collapsed.children[0].getAttribute('aria-label')).toEqual('No Assignee'); + expect(collapsed.children[0].classList.contains('fa')).toEqual(true); + expect(collapsed.children[0].classList.contains('fa-user')).toEqual(true); + }); + + it('displays only "No assignee" when no users are assigned and the issue is read-only', () => { + component = new AssigneeComponent({ + propsData: { + rootPath: 'http://localhost:3000', + users: [], + editable: false, + }, + }).$mount(); + const componentTextNoUsers = component.$el.querySelector('.assign-yourself').innerText.trim(); + + expect(componentTextNoUsers).toBe('No assignee'); + expect(componentTextNoUsers.indexOf('assign yourself')).toEqual(-1); + }); + + it('displays only "No assignee" when no users are assigned and the issue can be edited', () => { + component = new AssigneeComponent({ + propsData: { + rootPath: 'http://localhost:3000', + users: [], + editable: true, + }, + }).$mount(); + const componentTextNoUsers = component.$el.querySelector('.assign-yourself').innerText.trim(); + + expect(componentTextNoUsers.indexOf('No assignee')).toEqual(0); + expect(componentTextNoUsers.indexOf('assign yourself')).toBeGreaterThan(0); + }); + + it('emits the assign-self event when "assign yourself" is clicked', () => { + component = new AssigneeComponent({ + propsData: { + rootPath: 'http://localhost:3000', + users: [], + editable: true, + }, + }).$mount(); + + spyOn(component, '$emit'); + component.$el.querySelector('.assign-yourself .btn-link').click(); + expect(component.$emit).toHaveBeenCalledWith('assign-self'); + }); + }); + + describe('One assignee/user', () => { + it('displays one assignee icon when collapsed', () => { + component = new AssigneeComponent({ + propsData: { + rootPath: 'http://localhost:3000', + users: [ + UsersMock.user, + ], + editable: false, + }, + }).$mount(); + + const collapsed = component.$el.querySelector('.sidebar-collapsed-icon'); + const assignee = collapsed.children[0]; + expect(collapsed.childElementCount).toEqual(1); + expect(assignee.querySelector('.avatar').getAttribute('src')).toEqual(UsersMock.user.avatar); + expect(assignee.querySelector('.avatar').getAttribute('alt')).toEqual(`${UsersMock.user.name}'s avatar`); + expect(assignee.querySelector('.author').innerText.trim()).toEqual(UsersMock.user.name); + }); + + it('Shows one user with avatar, username and author name', () => { + component = new AssigneeComponent({ + propsData: { + rootPath: 'http://localhost:3000/', + users: [ + UsersMock.user, + ], + editable: true, + }, + }).$mount(); + + expect(component.$el.querySelector('.author_link')).not.toBeNull(); + // The image + expect(component.$el.querySelector('.author_link img').getAttribute('src')).toEqual(UsersMock.user.avatar); + // Author name + expect(component.$el.querySelector('.author_link .author').innerText.trim()).toEqual(UsersMock.user.name); + // Username + expect(component.$el.querySelector('.author_link .username').innerText.trim()).toEqual(`@${UsersMock.user.username}`); + }); + + it('has the root url present in the assigneeUrl method', () => { + component = new AssigneeComponent({ + propsData: { + rootPath: 'http://localhost:3000/', + users: [ + UsersMock.user, + ], + editable: true, + }, + }).$mount(); + + expect(component.assigneeUrl(UsersMock.user).indexOf('http://localhost:3000/')).not.toEqual(-1); + }); + }); + + describe('Two or more assignees/users', () => { + it('displays two assignee icons when collapsed', () => { + const users = UsersMockHelper.createNumberRandomUsers(2); + component = new AssigneeComponent({ + propsData: { + rootPath: 'http://localhost:3000', + users, + editable: false, + }, + }).$mount(); + + const collapsed = component.$el.querySelector('.sidebar-collapsed-icon'); + expect(collapsed.childElementCount).toEqual(2); + + const first = collapsed.children[0]; + expect(first.querySelector('.avatar').getAttribute('src')).toEqual(users[0].avatar); + expect(first.querySelector('.avatar').getAttribute('alt')).toEqual(`${users[0].name}'s avatar`); + expect(first.querySelector('.author').innerText.trim()).toEqual(users[0].name); + + const second = collapsed.children[1]; + expect(second.querySelector('.avatar').getAttribute('src')).toEqual(users[1].avatar); + expect(second.querySelector('.avatar').getAttribute('alt')).toEqual(`${users[1].name}'s avatar`); + expect(second.querySelector('.author').innerText.trim()).toEqual(users[1].name); + }); + + it('displays one assignee icon and counter when collapsed', () => { + const users = UsersMockHelper.createNumberRandomUsers(3); + component = new AssigneeComponent({ + propsData: { + rootPath: 'http://localhost:3000', + users, + editable: false, + }, + }).$mount(); + + const collapsed = component.$el.querySelector('.sidebar-collapsed-icon'); + expect(collapsed.childElementCount).toEqual(2); + + const first = collapsed.children[0]; + expect(first.querySelector('.avatar').getAttribute('src')).toEqual(users[0].avatar); + expect(first.querySelector('.avatar').getAttribute('alt')).toEqual(`${users[0].name}'s avatar`); + expect(first.querySelector('.author').innerText.trim()).toEqual(users[0].name); + + const second = collapsed.children[1]; + expect(second.querySelector('.avatar-counter').innerText.trim()).toEqual('+2'); + }); + + it('Shows two assignees', () => { + const users = UsersMockHelper.createNumberRandomUsers(2); + component = new AssigneeComponent({ + propsData: { + rootPath: 'http://localhost:3000', + users, + editable: true, + }, + }).$mount(); + + expect(component.$el.querySelectorAll('.user-item').length).toEqual(users.length); + expect(component.$el.querySelector('.user-list-more')).toBe(null); + }); + + it('Shows the "show-less" assignees label', (done) => { + const users = UsersMockHelper.createNumberRandomUsers(6); + component = new AssigneeComponent({ + propsData: { + rootPath: 'http://localhost:3000', + users, + editable: true, + }, + }).$mount(); + + expect(component.$el.querySelectorAll('.user-item').length).toEqual(component.defaultRenderCount); + expect(component.$el.querySelector('.user-list-more')).not.toBe(null); + const usersLabelExpectation = users.length - component.defaultRenderCount; + expect(component.$el.querySelector('.user-list-more .btn-link').innerText.trim()) + .not.toBe(`+${usersLabelExpectation} more`); + component.toggleShowLess(); + Vue.nextTick(() => { + expect(component.$el.querySelector('.user-list-more .btn-link').innerText.trim()) + .toBe('- show less'); + done(); + }); + }); + + it('Shows the "show-less" when "n+ more " label is clicked', (done) => { + const users = UsersMockHelper.createNumberRandomUsers(6); + component = new AssigneeComponent({ + propsData: { + rootPath: 'http://localhost:3000', + users, + editable: true, + }, + }).$mount(); + + component.$el.querySelector('.user-list-more .btn-link').click(); + Vue.nextTick(() => { + expect(component.$el.querySelector('.user-list-more .btn-link').innerText.trim()) + .toBe('- show less'); + done(); + }); + }); + + it('gets the count of avatar via a computed property ', () => { + const users = UsersMockHelper.createNumberRandomUsers(6); + component = new AssigneeComponent({ + propsData: { + rootPath: 'http://localhost:3000', + users, + editable: true, + }, + }).$mount(); + + expect(component.sidebarAvatarCounter).toEqual(`+${users.length - 1}`); + }); + + describe('n+ more label', () => { + beforeEach(() => { + const users = UsersMockHelper.createNumberRandomUsers(6); + component = new AssigneeComponent({ + propsData: { + rootPath: 'http://localhost:3000', + users, + editable: true, + }, + }).$mount(); + }); + + it('shows "+1 more" label', () => { + expect(component.$el.querySelector('.user-list-more .btn-link').innerText.trim()) + .toBe('+ 1 more'); + }); + + it('shows "show less" label', (done) => { + component.toggleShowLess(); + + Vue.nextTick(() => { + expect(component.$el.querySelector('.user-list-more .btn-link').innerText.trim()) + .toBe('- show less'); + done(); + }); + }); + }); + }); +}); diff --git a/spec/javascripts/sidebar/mock_data.js b/spec/javascripts/sidebar/mock_data.js new file mode 100644 index 00000000000..9fc8667ecc9 --- /dev/null +++ b/spec/javascripts/sidebar/mock_data.js @@ -0,0 +1,109 @@ +/* eslint-disable quote-props*/ + +const sidebarMockData = { + 'GET': { + '/gitlab-org/gitlab-shell/issues/5.json': { + id: 45, + iid: 5, + author_id: 23, + description: 'Nulla ullam commodi delectus adipisci quis sit.', + lock_version: null, + milestone_id: 21, + position: 0, + state: 'closed', + title: 'Vel et nulla voluptatibus corporis dolor iste saepe laborum.', + updated_by_id: 1, + created_at: '2017-02-02T21: 49: 49.664Z', + updated_at: '2017-05-03T22: 26: 03.760Z', + deleted_at: null, + time_estimate: 0, + total_time_spent: 0, + human_time_estimate: null, + human_total_time_spent: null, + branch_name: null, + confidential: false, + assignees: [ + { + name: 'User 0', + username: 'user0', + id: 22, + state: 'active', + avatar_url: 'http: //www.gravatar.com/avatar/52e4ce24a915fb7e51e1ad3b57f4b00a?s=80\u0026d=identicon', + web_url: 'http: //localhost:3001/user0', + }, + { + name: 'Marguerite Bartell', + username: 'tajuana', + id: 18, + state: 'active', + avatar_url: 'http: //www.gravatar.com/avatar/4852a41fb41616bf8f140d3701673f53?s=80\u0026d=identicon', + web_url: 'http: //localhost:3001/tajuana', + }, + { + name: 'Laureen Ritchie', + username: 'michaele.will', + id: 16, + state: 'active', + avatar_url: 'http: //www.gravatar.com/avatar/e301827eb03be955c9c172cb9a8e4e8a?s=80\u0026d=identicon', + web_url: 'http: //localhost:3001/michaele.will', + }, + ], + due_date: null, + moved_to_id: null, + project_id: 4, + weight: null, + milestone: { + id: 21, + iid: 1, + project_id: 4, + title: 'v0.0', + description: 'Molestiae commodi laboriosam odio sunt eaque reprehenderit.', + state: 'active', + created_at: '2017-02-02T21: 49: 30.530Z', + updated_at: '2017-02-02T21: 49: 30.530Z', + due_date: null, + start_date: null, + }, + labels: [], + }, + }, + 'PUT': { + '/gitlab-org/gitlab-shell/issues/5.json': { + data: {}, + }, + }, +}; + +export default { + mediator: { + endpoint: '/gitlab-org/gitlab-shell/issues/5.json', + editable: true, + currentUser: { + id: 1, + name: 'Administrator', + username: 'root', + avatar_url: 'http://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon', + }, + rootPath: '/', + }, + time: { + time_estimate: 3600, + total_time_spent: 0, + human_time_estimate: '1h', + human_total_time_spent: null, + }, + user: { + avatar: 'http://gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon', + id: 1, + name: 'Administrator', + username: 'root', + }, + + sidebarMockInterceptor(request, next) { + const body = sidebarMockData[request.method.toUpperCase()][request.url]; + + next(request.respondWith(JSON.stringify(body), { + status: 200, + })); + }, +}; diff --git a/spec/javascripts/sidebar/sidebar_assignees_spec.js b/spec/javascripts/sidebar/sidebar_assignees_spec.js new file mode 100644 index 00000000000..865951b2ad7 --- /dev/null +++ b/spec/javascripts/sidebar/sidebar_assignees_spec.js @@ -0,0 +1,46 @@ +import Vue from 'vue'; +import SidebarAssignees from '~/sidebar/components/assignees/sidebar_assignees'; +import SidebarMediator from '~/sidebar/sidebar_mediator'; +import SidebarService from '~/sidebar/services/sidebar_service'; +import SidebarStore from '~/sidebar/stores/sidebar_store'; +import Mock from './mock_data'; + +describe('sidebar assignees', () => { + let component; + let SidebarAssigneeComponent; + preloadFixtures('issues/open-issue.html.raw'); + + beforeEach(() => { + Vue.http.interceptors.push(Mock.sidebarMockInterceptor); + SidebarAssigneeComponent = Vue.extend(SidebarAssignees); + spyOn(SidebarMediator.prototype, 'saveAssignees').and.callThrough(); + spyOn(SidebarMediator.prototype, 'assignYourself').and.callThrough(); + this.mediator = new SidebarMediator(Mock.mediator); + loadFixtures('issues/open-issue.html.raw'); + this.sidebarAssigneesEl = document.querySelector('#js-vue-sidebar-assignees'); + }); + + afterEach(() => { + SidebarService.singleton = null; + SidebarStore.singleton = null; + SidebarMediator.singleton = null; + Vue.http.interceptors = _.without(Vue.http.interceptors, Mock.sidebarMockInterceptor); + }); + + it('calls the mediator when saves the assignees', () => { + component = new SidebarAssigneeComponent() + .$mount(this.sidebarAssigneesEl); + component.saveAssignees(); + + expect(SidebarMediator.prototype.saveAssignees).toHaveBeenCalled(); + }); + + it('calls the mediator when "assignSelf" method is called', () => { + component = new SidebarAssigneeComponent() + .$mount(this.sidebarAssigneesEl); + component.assignSelf(); + + expect(SidebarMediator.prototype.assignYourself).toHaveBeenCalled(); + expect(this.mediator.store.assignees.length).toEqual(1); + }); +}); diff --git a/spec/javascripts/sidebar/sidebar_mediator_spec.js b/spec/javascripts/sidebar/sidebar_mediator_spec.js new file mode 100644 index 00000000000..e246f41ee82 --- /dev/null +++ b/spec/javascripts/sidebar/sidebar_mediator_spec.js @@ -0,0 +1,41 @@ +import Vue from 'vue'; +import SidebarMediator from '~/sidebar/sidebar_mediator'; +import SidebarStore from '~/sidebar/stores/sidebar_store'; +import SidebarService from '~/sidebar/services/sidebar_service'; +import Mock from './mock_data'; + +describe('Sidebar mediator', () => { + beforeEach(() => { + Vue.http.interceptors.push(Mock.sidebarMockInterceptor); + this.mediator = new SidebarMediator(Mock.mediator); + }); + + afterEach(() => { + SidebarService.singleton = null; + SidebarStore.singleton = null; + SidebarMediator.singleton = null; + Vue.http.interceptors = _.without(Vue.http.interceptors, Mock.sidebarMockInterceptor); + }); + + it('assigns yourself ', () => { + this.mediator.assignYourself(); + + expect(this.mediator.store.currentUser).toEqual(Mock.mediator.currentUser); + expect(this.mediator.store.assignees[0]).toEqual(Mock.mediator.currentUser); + }); + + it('saves assignees', (done) => { + this.mediator.saveAssignees('issue[assignee_ids]') + .then((resp) => { + expect(resp.status).toEqual(200); + done(); + }) + .catch(() => {}); + }); + + it('fetches the data', () => { + spyOn(this.mediator.service, 'get').and.callThrough(); + this.mediator.fetch(); + expect(this.mediator.service.get).toHaveBeenCalled(); + }); +}); diff --git a/spec/javascripts/sidebar/sidebar_service_spec.js b/spec/javascripts/sidebar/sidebar_service_spec.js new file mode 100644 index 00000000000..91a4dd669a7 --- /dev/null +++ b/spec/javascripts/sidebar/sidebar_service_spec.js @@ -0,0 +1,33 @@ +import Vue from 'vue'; +import SidebarService from '~/sidebar/services/sidebar_service'; +import Mock from './mock_data'; + +describe('Sidebar service', () => { + beforeEach(() => { + Vue.http.interceptors.push(Mock.sidebarMockInterceptor); + this.service = new SidebarService('/gitlab-org/gitlab-shell/issues/5.json'); + }); + + afterEach(() => { + SidebarService.singleton = null; + Vue.http.interceptors = _.without(Vue.http.interceptors, Mock.sidebarMockInterceptor); + }); + + it('gets the data', (done) => { + this.service.get() + .then((resp) => { + expect(resp).toBeDefined(); + done(); + }) + .catch(() => {}); + }); + + it('updates the data', (done) => { + this.service.update('issue[assignee_ids]', [1]) + .then((resp) => { + expect(resp).toBeDefined(); + done(); + }) + .catch(() => {}); + }); +}); diff --git a/spec/javascripts/sidebar/sidebar_store_spec.js b/spec/javascripts/sidebar/sidebar_store_spec.js new file mode 100644 index 00000000000..29facf483b5 --- /dev/null +++ b/spec/javascripts/sidebar/sidebar_store_spec.js @@ -0,0 +1,80 @@ +import SidebarStore from '~/sidebar/stores/sidebar_store'; +import Mock from './mock_data'; +import UsersMockHelper from '../helpers/user_mock_data_helper'; + +describe('Sidebar store', () => { + const assignee = { + id: 2, + name: 'gitlab user 2', + username: 'gitlab2', + avatar_url: 'http://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon', + }; + + const anotherAssignee = { + id: 3, + name: 'gitlab user 3', + username: 'gitlab3', + avatar_url: 'http://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon', + }; + + beforeEach(() => { + this.store = new SidebarStore({ + currentUser: { + id: 1, + name: 'Administrator', + username: 'root', + avatar_url: 'http://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon', + }, + editable: true, + rootPath: '/', + endpoint: '/gitlab-org/gitlab-shell/issues/5.json', + }); + }); + + afterEach(() => { + SidebarStore.singleton = null; + }); + + it('adds a new assignee', () => { + this.store.addAssignee(assignee); + expect(this.store.assignees.length).toEqual(1); + }); + + it('removes an assignee', () => { + this.store.removeAssignee(assignee); + expect(this.store.assignees.length).toEqual(0); + }); + + it('finds an existent assignee', () => { + let foundAssignee; + + this.store.addAssignee(assignee); + foundAssignee = this.store.findAssignee(assignee); + expect(foundAssignee).toBeDefined(); + expect(foundAssignee).toEqual(assignee); + foundAssignee = this.store.findAssignee(anotherAssignee); + expect(foundAssignee).toBeUndefined(); + }); + + it('removes all assignees', () => { + this.store.removeAllAssignees(); + expect(this.store.assignees.length).toEqual(0); + }); + + it('set assigned data', () => { + const users = { + assignees: UsersMockHelper.createNumberRandomUsers(3), + }; + + this.store.setAssigneeData(users); + expect(this.store.assignees.length).toEqual(3); + }); + + it('set time tracking data', () => { + this.store.setTimeTrackingData(Mock.time); + expect(this.store.timeEstimate).toEqual(Mock.time.time_estimate); + expect(this.store.totalTimeSpent).toEqual(Mock.time.total_time_spent); + expect(this.store.humanTimeEstimate).toEqual(Mock.time.human_time_estimate); + expect(this.store.humanTotalTimeSpent).toEqual(Mock.time.human_total_time_spent); + }); +}); diff --git a/spec/javascripts/signin_tabs_memoizer_spec.js b/spec/javascripts/signin_tabs_memoizer_spec.js index d83d9a57b42..0a32797c3e2 100644 --- a/spec/javascripts/signin_tabs_memoizer_spec.js +++ b/spec/javascripts/signin_tabs_memoizer_spec.js @@ -1,4 +1,6 @@ -require('~/signin_tabs_memoizer'); +import AccessorUtilities from '~/lib/utils/accessor'; + +import '~/signin_tabs_memoizer'; ((global) => { describe('SigninTabsMemoizer', () => { @@ -19,6 +21,8 @@ require('~/signin_tabs_memoizer'); beforeEach(() => { loadFixtures(fixtureTemplate); + + spyOn(AccessorUtilities, 'isLocalStorageAccessSafe').and.returnValue(true); }); it('does nothing if no tab was previously selected', () => { @@ -49,5 +53,91 @@ require('~/signin_tabs_memoizer'); expect(memo.readData()).toEqual('#standard'); }); + + describe('class constructor', () => { + beforeEach(() => { + memo = createMemoizer(); + }); + + it('should set .isLocalStorageAvailable', () => { + expect(AccessorUtilities.isLocalStorageAccessSafe).toHaveBeenCalled(); + expect(memo.isLocalStorageAvailable).toBe(true); + }); + }); + + describe('saveData', () => { + beforeEach(() => { + memo = { + currentTabKey, + }; + + spyOn(localStorage, 'setItem'); + }); + + describe('if .isLocalStorageAvailable is `false`', () => { + beforeEach(function () { + memo.isLocalStorageAvailable = false; + + global.ActiveTabMemoizer.prototype.saveData.call(memo); + }); + + it('should not call .setItem', () => { + expect(localStorage.setItem).not.toHaveBeenCalled(); + }); + }); + + describe('if .isLocalStorageAvailable is `true`', () => { + const value = 'value'; + + beforeEach(function () { + memo.isLocalStorageAvailable = true; + + global.ActiveTabMemoizer.prototype.saveData.call(memo, value); + }); + + it('should call .setItem', () => { + expect(localStorage.setItem).toHaveBeenCalledWith(currentTabKey, value); + }); + }); + }); + + describe('readData', () => { + const itemValue = 'itemValue'; + let readData; + + beforeEach(() => { + memo = { + currentTabKey, + }; + + spyOn(localStorage, 'getItem').and.returnValue(itemValue); + }); + + describe('if .isLocalStorageAvailable is `false`', () => { + beforeEach(function () { + memo.isLocalStorageAvailable = false; + + readData = global.ActiveTabMemoizer.prototype.readData.call(memo); + }); + + it('should not call .getItem and should return `null`', () => { + expect(localStorage.getItem).not.toHaveBeenCalled(); + expect(readData).toBe(null); + }); + }); + + describe('if .isLocalStorageAvailable is `true`', () => { + beforeEach(function () { + memo.isLocalStorageAvailable = true; + + readData = global.ActiveTabMemoizer.prototype.readData.call(memo); + }); + + it('should call .getItem and return the localStorage value', () => { + expect(window.localStorage.getItem).toHaveBeenCalledWith(currentTabKey); + expect(readData).toBe(itemValue); + }); + }); + }); }); })(window); diff --git a/spec/javascripts/smart_interval_spec.js b/spec/javascripts/smart_interval_spec.js index 4366ec2a5b8..7833bf3fb04 100644 --- a/spec/javascripts/smart_interval_spec.js +++ b/spec/javascripts/smart_interval_spec.js @@ -1,4 +1,4 @@ -require('~/smart_interval'); +import '~/smart_interval'; (() => { const DEFAULT_MAX_INTERVAL = 100; diff --git a/spec/javascripts/subbable_resource_spec.js b/spec/javascripts/subbable_resource_spec.js deleted file mode 100644 index 454386697f5..00000000000 --- a/spec/javascripts/subbable_resource_spec.js +++ /dev/null @@ -1,63 +0,0 @@ -/* eslint-disable max-len, arrow-parens, comma-dangle */ - -require('~/subbable_resource'); - -/* -* Test that each rest verb calls the publish and subscribe function and passes the correct value back -* -* -* */ -((global) => { - describe('Subbable Resource', function () { - describe('PubSub', function () { - beforeEach(function () { - this.MockResource = new global.SubbableResource('https://example.com'); - }); - it('should successfully add a single subscriber', function () { - const callback = () => {}; - this.MockResource.subscribe(callback); - - expect(this.MockResource.subscribers.length).toBe(1); - expect(this.MockResource.subscribers[0]).toBe(callback); - }); - - it('should successfully add multiple subscribers', function () { - const callbackOne = () => {}; - const callbackTwo = () => {}; - const callbackThree = () => {}; - - this.MockResource.subscribe(callbackOne); - this.MockResource.subscribe(callbackTwo); - this.MockResource.subscribe(callbackThree); - - expect(this.MockResource.subscribers.length).toBe(3); - }); - - it('should successfully publish an update to a single subscriber', function () { - const state = { myprop: 1 }; - - const callbacks = { - one: (data) => expect(data.myprop).toBe(2), - two: (data) => expect(data.myprop).toBe(2), - three: (data) => expect(data.myprop).toBe(2) - }; - - const spyOne = spyOn(callbacks, 'one'); - const spyTwo = spyOn(callbacks, 'two'); - const spyThree = spyOn(callbacks, 'three'); - - this.MockResource.subscribe(callbacks.one); - this.MockResource.subscribe(callbacks.two); - this.MockResource.subscribe(callbacks.three); - - state.myprop += 1; - - this.MockResource.publish(state); - - expect(spyOne).toHaveBeenCalled(); - expect(spyTwo).toHaveBeenCalled(); - expect(spyThree).toHaveBeenCalled(); - }); - }); - }); -})(window.gl || (window.gl = {})); diff --git a/spec/javascripts/syntax_highlight_spec.js b/spec/javascripts/syntax_highlight_spec.js index cea223bd243..946f98379ce 100644 --- a/spec/javascripts/syntax_highlight_spec.js +++ b/spec/javascripts/syntax_highlight_spec.js @@ -1,6 +1,6 @@ /* eslint-disable space-before-function-paren, no-var, no-return-assign, quotes */ -require('~/syntax_highlight'); +import '~/syntax_highlight'; (function() { describe('Syntax Highlighter', function() { diff --git a/spec/javascripts/test_bundle.js b/spec/javascripts/test_bundle.js index b30c5da8822..13827a26571 100644 --- a/spec/javascripts/test_bundle.js +++ b/spec/javascripts/test_bundle.js @@ -1,13 +1,15 @@ -// enable test fixtures -require('jasmine-jquery'); +import $ from 'jquery'; +import _ from 'underscore'; +import 'jasmine-jquery'; +import '~/commons'; -jasmine.getFixtures().fixturesPath = 'base/spec/javascripts/fixtures'; -jasmine.getJSONFixtures().fixturesPath = 'base/spec/javascripts/fixtures'; +// enable test fixtures +jasmine.getFixtures().fixturesPath = '/base/spec/javascripts/fixtures'; +jasmine.getJSONFixtures().fixturesPath = '/base/spec/javascripts/fixtures'; -// include common libraries -require('~/commons/index.js'); -window.$ = window.jQuery = require('jquery'); -window._ = require('underscore'); +// globalize common libraries +window.$ = window.jQuery = $; +window._ = _; // stub expected globals window.gl = window.gl || {}; @@ -55,7 +57,6 @@ if (process.env.BABEL_ENV === 'coverage') { './merge_conflicts/merge_conflicts_bundle.js', './merge_conflicts/components/inline_conflict_lines.js', './merge_conflicts/components/parallel_conflict_lines.js', - './merge_request_widget/ci_bundle.js', './monitoring/monitoring_bundle.js', './network/network_bundle.js', './network/branch_graph.js', @@ -64,6 +65,7 @@ if (process.env.BABEL_ENV === 'coverage') { './snippet/snippet_bundle.js', './terminal/terminal_bundle.js', './users/users_bundle.js', + './issue_show/index.js', ]; describe('Uncovered files', function () { diff --git a/spec/javascripts/todos_spec.js b/spec/javascripts/todos_spec.js index 66e4fbd6304..cd74aba4a4e 100644 --- a/spec/javascripts/todos_spec.js +++ b/spec/javascripts/todos_spec.js @@ -1,5 +1,5 @@ -require('~/todos'); -require('~/lib/utils/common_utils'); +import '~/todos'; +import '~/lib/utils/common_utils'; describe('Todos', () => { preloadFixtures('todos/todos.html.raw'); diff --git a/spec/javascripts/u2f/authenticate_spec.js b/spec/javascripts/u2f/authenticate_spec.js index af2d02b6b29..a160c86308d 100644 --- a/spec/javascripts/u2f/authenticate_spec.js +++ b/spec/javascripts/u2f/authenticate_spec.js @@ -2,11 +2,11 @@ /* global MockU2FDevice */ /* global U2FAuthenticate */ -require('~/u2f/authenticate'); -require('~/u2f/util'); -require('~/u2f/error'); -require('vendor/u2f'); -require('./mock_u2f_device'); +import '~/u2f/authenticate'; +import '~/u2f/util'; +import '~/u2f/error'; +import 'vendor/u2f'; +import './mock_u2f_device'; (function() { describe('U2FAuthenticate', function() { diff --git a/spec/javascripts/u2f/mock_u2f_device.js b/spec/javascripts/u2f/mock_u2f_device.js index 6677fe9c1ee..4eb8ad3d9e4 100644 --- a/spec/javascripts/u2f/mock_u2f_device.js +++ b/spec/javascripts/u2f/mock_u2f_device.js @@ -1,12 +1,10 @@ /* eslint-disable space-before-function-paren, no-var, prefer-rest-params, wrap-iife, no-unused-expressions, no-return-assign, no-param-reassign, max-len */ (function() { - var bind = function(fn, me) { return function() { return fn.apply(me, arguments); }; }; - this.MockU2FDevice = (function() { function MockU2FDevice() { - this.respondToAuthenticateRequest = bind(this.respondToAuthenticateRequest, this); - this.respondToRegisterRequest = bind(this.respondToRegisterRequest, this); + this.respondToAuthenticateRequest = this.respondToAuthenticateRequest.bind(this); + this.respondToRegisterRequest = this.respondToRegisterRequest.bind(this); window.u2f || (window.u2f = {}); window.u2f.register = (function(_this) { return function(appId, registerRequests, signRequests, callback) { diff --git a/spec/javascripts/u2f/register_spec.js b/spec/javascripts/u2f/register_spec.js index 0f390c8b980..a445c80f2af 100644 --- a/spec/javascripts/u2f/register_spec.js +++ b/spec/javascripts/u2f/register_spec.js @@ -2,11 +2,11 @@ /* global MockU2FDevice */ /* global U2FRegister */ -require('~/u2f/register'); -require('~/u2f/util'); -require('~/u2f/error'); -require('vendor/u2f'); -require('./mock_u2f_device'); +import '~/u2f/register'; +import '~/u2f/util'; +import '~/u2f/error'; +import 'vendor/u2f'; +import './mock_u2f_device'; (function() { describe('U2FRegister', function() { @@ -22,7 +22,7 @@ require('./mock_u2f_device'); it('allows registering a U2F device', function() { var deviceResponse, inProgressMessage, registeredMessage, setupButton; setupButton = this.container.find("#js-setup-u2f-device"); - expect(setupButton.text()).toBe('Setup New U2F Device'); + expect(setupButton.text()).toBe('Setup new U2F device'); setupButton.trigger('click'); inProgressMessage = this.container.children("p"); expect(inProgressMessage.text()).toContain("Trying to communicate with your device"); diff --git a/spec/javascripts/user_callout_spec.js b/spec/javascripts/user_callout_spec.js index c0375ebc61c..28d0c7dcd99 100644 --- a/spec/javascripts/user_callout_spec.js +++ b/spec/javascripts/user_callout_spec.js @@ -14,7 +14,6 @@ describe('UserCallout', function () { this.userCallout = new UserCallout(); this.closeButton = $('.js-close-callout.close'); this.userCalloutBtn = $('.js-close-callout:not(.close)'); - this.userCalloutContainer = $('.user-callout'); }); it('hides when user clicks on the dismiss-icon', (done) => { diff --git a/spec/javascripts/version_check_image_spec.js b/spec/javascripts/version_check_image_spec.js index 464c1fce210..9637bd0414a 100644 --- a/spec/javascripts/version_check_image_spec.js +++ b/spec/javascripts/version_check_image_spec.js @@ -1,9 +1,8 @@ -const ClassSpecHelper = require('./helpers/class_spec_helper'); -const VersionCheckImage = require('~/version_check_image'); -require('jquery'); +import VersionCheckImage from '~/version_check_image'; +import ClassSpecHelper from './helpers/class_spec_helper'; describe('VersionCheckImage', function () { - describe('.bindErrorEvent', function () { + describe('bindErrorEvent', function () { ClassSpecHelper.itShouldBeAStaticMethod(VersionCheckImage, 'bindErrorEvent'); beforeEach(function () { diff --git a/spec/javascripts/visibility_select_spec.js b/spec/javascripts/visibility_select_spec.js index 9727c03c91e..c2eaea7c2ed 100644 --- a/spec/javascripts/visibility_select_spec.js +++ b/spec/javascripts/visibility_select_spec.js @@ -1,4 +1,4 @@ -require('~/visibility_select'); +import '~/visibility_select'; (() => { const VisibilitySelect = gl.VisibilitySelect; @@ -22,7 +22,7 @@ require('~/visibility_select'); spyOn(Element.prototype, 'querySelector').and.callFake(selector => mockElements[selector]); }); - describe('#constructor', function () { + describe('constructor', function () { beforeEach(function () { this.visibilitySelect = new VisibilitySelect(mockElements.container); }); @@ -48,7 +48,7 @@ require('~/visibility_select'); }); }); - describe('#init', function () { + describe('init', function () { describe('if there is a select', function () { beforeEach(function () { this.visibilitySelect = new VisibilitySelect(mockElements.container); @@ -85,7 +85,7 @@ require('~/visibility_select'); }); }); - describe('#updateHelpText', function () { + describe('updateHelpText', function () { beforeEach(function () { this.visibilitySelect = new VisibilitySelect(mockElements.container); this.visibilitySelect.init(); diff --git a/spec/javascripts/vue_mr_widget/components/mr_widget_author_spec.js b/spec/javascripts/vue_mr_widget/components/mr_widget_author_spec.js new file mode 100644 index 00000000000..a750bc78f36 --- /dev/null +++ b/spec/javascripts/vue_mr_widget/components/mr_widget_author_spec.js @@ -0,0 +1,39 @@ +import Vue from 'vue'; +import authorComponent from '~/vue_merge_request_widget/components/mr_widget_author'; + +const author = { + webUrl: 'http://foo.bar', + avatarUrl: 'http://gravatar.com/foo', + name: 'fatihacet', +}; +const createComponent = () => { + const Component = Vue.extend(authorComponent); + + return new Component({ + el: document.createElement('div'), + propsData: { author }, + }); +}; + +describe('MRWidgetAuthor', () => { + describe('props', () => { + it('should have props', () => { + const authorProp = authorComponent.props.author; + + expect(authorProp).toBeDefined(); + expect(authorProp.type instanceof Object).toBeTruthy(); + expect(authorProp.required).toBeTruthy(); + }); + }); + + describe('template', () => { + it('should have correct elements', () => { + const el = createComponent().$el; + + expect(el.tagName).toEqual('A'); + expect(el.getAttribute('href')).toEqual(author.webUrl); + expect(el.querySelector('img').getAttribute('src')).toEqual(author.avatarUrl); + expect(el.querySelector('.author').innerText.trim()).toEqual(author.name); + }); + }); +}); diff --git a/spec/javascripts/vue_mr_widget/components/mr_widget_author_time_spec.js b/spec/javascripts/vue_mr_widget/components/mr_widget_author_time_spec.js new file mode 100644 index 00000000000..515ddcbb875 --- /dev/null +++ b/spec/javascripts/vue_mr_widget/components/mr_widget_author_time_spec.js @@ -0,0 +1,61 @@ +import Vue from 'vue'; +import authorTimeComponent from '~/vue_merge_request_widget/components/mr_widget_author_time'; + +const props = { + actionText: 'Merged by', + author: { + webUrl: 'http://foo.bar', + avatarUrl: 'http://gravatar.com/foo', + name: 'fatihacet', + }, + dateTitle: '2017-03-23T23:02:00.807Z', + dateReadable: '12 hours ago', +}; +const createComponent = () => { + const Component = Vue.extend(authorTimeComponent); + + return new Component({ + el: document.createElement('div'), + propsData: props, + }); +}; + +describe('MRWidgetAuthorTime', () => { + describe('props', () => { + it('should have props', () => { + const { actionText, author, dateTitle, dateReadable } = authorTimeComponent.props; + const ActionTextClass = actionText.type; + const DateTitleClass = dateTitle.type; + const DateReadableClass = dateReadable.type; + + expect(new ActionTextClass() instanceof String).toBeTruthy(); + expect(actionText.required).toBeTruthy(); + + expect(author.type instanceof Object).toBeTruthy(); + expect(author.required).toBeTruthy(); + + expect(new DateTitleClass() instanceof String).toBeTruthy(); + expect(dateTitle.required).toBeTruthy(); + + expect(new DateReadableClass() instanceof String).toBeTruthy(); + expect(dateReadable.required).toBeTruthy(); + }); + }); + + describe('components', () => { + it('should have components', () => { + expect(authorTimeComponent.components['mr-widget-author']).toBeDefined(); + }); + }); + + describe('template', () => { + it('should have correct elements', () => { + const el = createComponent().$el; + + expect(el.tagName).toEqual('H4'); + expect(el.querySelector('a').getAttribute('href')).toEqual(props.author.webUrl); + expect(el.querySelector('time').innerText).toContain(props.dateReadable); + expect(el.querySelector('time').getAttribute('title')).toEqual(props.dateTitle); + }); + }); +}); diff --git a/spec/javascripts/vue_mr_widget/components/mr_widget_deployment_spec.js b/spec/javascripts/vue_mr_widget/components/mr_widget_deployment_spec.js new file mode 100644 index 00000000000..d4b200875df --- /dev/null +++ b/spec/javascripts/vue_mr_widget/components/mr_widget_deployment_spec.js @@ -0,0 +1,188 @@ +import Vue from 'vue'; +import deploymentComponent from '~/vue_merge_request_widget/components/mr_widget_deployment'; +import MRWidgetService from '~/vue_merge_request_widget/services/mr_widget_service'; +import { statusIconEntityMap } from '~/vue_shared/ci_status_icons'; + +const deploymentMockData = [ + { + id: 15, + name: 'review/diplo', + url: '/root/acets-review-apps/environments/15', + stop_url: '/root/acets-review-apps/environments/15/stop', + metrics_url: '/root/acets-review-apps/environments/15/deployments/1/metrics', + external_url: 'http://diplo.', + external_url_formatted: 'diplo.', + deployed_at: '2017-03-22T22:44:42.258Z', + deployed_at_formatted: 'Mar 22, 2017 10:44pm', + }, +]; +const createComponent = () => { + const Component = Vue.extend(deploymentComponent); + const mr = { + deployments: deploymentMockData, + }; + const service = {}; + + return new Component({ + el: document.createElement('div'), + propsData: { mr, service }, + }); +}; + +describe('MRWidgetDeployment', () => { + describe('props', () => { + it('should have props', () => { + const { mr, service } = deploymentComponent.props; + + expect(mr.type instanceof Object).toBeTruthy(); + expect(mr.required).toBeTruthy(); + + expect(service.type instanceof Object).toBeTruthy(); + expect(service.required).toBeTruthy(); + }); + }); + + describe('computed', () => { + describe('svg', () => { + it('should have the proper SVG icon', () => { + const vm = createComponent(deploymentMockData); + expect(vm.svg).toEqual(statusIconEntityMap.icon_status_success); + }); + }); + }); + + describe('methods', () => { + let vm = createComponent(); + const deployment = deploymentMockData[0]; + + describe('formatDate', () => { + it('should work', () => { + const readable = gl.utils.getTimeago().format(deployment.deployed_at); + expect(vm.formatDate(deployment.deployed_at)).toEqual(readable); + }); + }); + + describe('hasExternalUrls', () => { + it('should return true', () => { + expect(vm.hasExternalUrls(deployment)).toBeTruthy(); + }); + + it('should return false when there is not enough information', () => { + expect(vm.hasExternalUrls()).toBeFalsy(); + expect(vm.hasExternalUrls({ external_url: 'Diplo' })).toBeFalsy(); + expect(vm.hasExternalUrls({ external_url_formatted: 'Diplo' })).toBeFalsy(); + }); + }); + + describe('hasDeploymentTime', () => { + it('should return true', () => { + expect(vm.hasDeploymentTime(deployment)).toBeTruthy(); + }); + + it('should return false when there is not enough information', () => { + expect(vm.hasDeploymentTime()).toBeFalsy(); + expect(vm.hasDeploymentTime({ deployed_at: 'Diplo' })).toBeFalsy(); + expect(vm.hasDeploymentTime({ deployed_at_formatted: 'Diplo' })).toBeFalsy(); + }); + }); + + describe('hasDeploymentMeta', () => { + it('should return true', () => { + expect(vm.hasDeploymentMeta(deployment)).toBeTruthy(); + }); + + it('should return false when there is not enough information', () => { + expect(vm.hasDeploymentMeta()).toBeFalsy(); + expect(vm.hasDeploymentMeta({ url: 'Diplo' })).toBeFalsy(); + expect(vm.hasDeploymentMeta({ name: 'Diplo' })).toBeFalsy(); + }); + }); + + describe('stopEnvironment', () => { + const url = '/foo/bar'; + const returnPromise = () => new Promise((resolve) => { + resolve({ + json() { + return { + redirect_url: url, + }; + }, + }); + }); + const mockStopEnvironment = () => { + vm.stopEnvironment(deploymentMockData); + return vm; + }; + + it('should show a confirm dialog and call service.stopEnvironment when confirmed', (done) => { + spyOn(window, 'confirm').and.returnValue(true); + spyOn(MRWidgetService, 'stopEnvironment').and.returnValue(returnPromise(true)); + spyOn(gl.utils, 'visitUrl').and.returnValue(true); + vm = mockStopEnvironment(); + + expect(window.confirm).toHaveBeenCalled(); + expect(MRWidgetService.stopEnvironment).toHaveBeenCalledWith(deploymentMockData.stop_url); + setTimeout(() => { + expect(gl.utils.visitUrl).toHaveBeenCalledWith(url); + done(); + }, 333); + }); + + it('should show a confirm dialog but should not work if the dialog is rejected', () => { + spyOn(window, 'confirm').and.returnValue(false); + spyOn(MRWidgetService, 'stopEnvironment').and.returnValue(returnPromise(false)); + vm = mockStopEnvironment(); + + expect(window.confirm).toHaveBeenCalled(); + expect(MRWidgetService.stopEnvironment).not.toHaveBeenCalled(); + }); + }); + }); + + describe('template', () => { + let vm; + let el; + const [deployment] = deploymentMockData; + + beforeEach(() => { + vm = createComponent(deploymentMockData); + el = vm.$el; + }); + + it('should render template elements correctly', () => { + expect(el.classList.contains('mr-widget-heading')).toBeTruthy(); + expect(el.querySelector('.js-icon-link')).toBeDefined(); + expect(el.querySelector('.js-deploy-meta').getAttribute('href')).toEqual(deployment.url); + expect(el.querySelector('.js-deploy-meta').innerText).toContain(deployment.name); + expect(el.querySelector('.js-deploy-url').getAttribute('href')).toEqual(deployment.external_url); + expect(el.querySelector('.js-deploy-url').innerText).toContain(deployment.external_url_formatted); + expect(el.querySelector('.js-deploy-time').innerText).toContain(vm.formatDate(deployment.deployed_at)); + expect(el.querySelector('.js-mr-memory-usage')).toBeDefined(); + expect(el.querySelector('button')).toBeDefined(); + }); + + it('should list multiple deployments', (done) => { + vm.mr.deployments.push(deployment); + vm.mr.deployments.push(deployment); + + Vue.nextTick(() => { + expect(el.querySelectorAll('.ci-widget').length).toEqual(3); + expect(el.querySelectorAll('.js-mr-memory-usage').length).toEqual(3); + done(); + }); + }); + + it('should not have some elements when there is not enough data', (done) => { + vm.mr.deployments = [{}]; + + Vue.nextTick(() => { + expect(el.querySelectorAll('.js-deploy-meta').length).toEqual(0); + expect(el.querySelectorAll('.js-deploy-url').length).toEqual(0); + expect(el.querySelectorAll('.js-deploy-time').length).toEqual(0); + expect(el.querySelectorAll('.js-mr-memory-usage').length).toEqual(0); + expect(el.querySelectorAll('.button').length).toEqual(0); + done(); + }); + }); + }); +}); diff --git a/spec/javascripts/vue_mr_widget/components/mr_widget_header_spec.js b/spec/javascripts/vue_mr_widget/components/mr_widget_header_spec.js new file mode 100644 index 00000000000..7f3eea7d2e5 --- /dev/null +++ b/spec/javascripts/vue_mr_widget/components/mr_widget_header_spec.js @@ -0,0 +1,102 @@ +import Vue from 'vue'; +import headerComponent from '~/vue_merge_request_widget/components/mr_widget_header'; + +const createComponent = (mr) => { + const Component = Vue.extend(headerComponent); + return new Component({ + el: document.createElement('div'), + propsData: { mr }, + }); +}; + +describe('MRWidgetHeader', () => { + describe('props', () => { + it('should have props', () => { + const { mr } = headerComponent.props; + + expect(mr.type instanceof Object).toBeTruthy(); + expect(mr.required).toBeTruthy(); + }); + }); + + describe('computed', () => { + let vm; + beforeEach(() => { + vm = createComponent({ + divergedCommitsCount: 12, + sourceBranch: 'mr-widget-refactor', + sourceBranchLink: '/foo/bar/mr-widget-refactor', + targetBranch: 'master', + }); + }); + + it('shouldShowCommitsBehindText', () => { + expect(vm.shouldShowCommitsBehindText).toBeTruthy(); + + vm.mr.divergedCommitsCount = 0; + expect(vm.shouldShowCommitsBehindText).toBeFalsy(); + }); + + it('commitsText', () => { + expect(vm.commitsText).toEqual('commits'); + + vm.mr.divergedCommitsCount = 1; + expect(vm.commitsText).toEqual('commit'); + }); + }); + + describe('template', () => { + let vm; + let el; + const sourceBranchPath = '/foo/bar/mr-widget-refactor'; + const mr = { + divergedCommitsCount: 12, + sourceBranch: 'mr-widget-refactor', + sourceBranchLink: `<a href="${sourceBranchPath}">mr-widget-refactor</a>`, + targetBranchPath: 'foo/bar/commits-path', + targetBranch: 'master', + isOpen: true, + emailPatchesPath: '/mr/email-patches', + plainDiffPath: '/mr/plainDiffPath', + }; + + beforeEach(() => { + vm = createComponent(mr); + el = vm.$el; + }); + + it('should render template elements correctly', () => { + expect(el.classList.contains('mr-source-target')).toBeTruthy(); + const sourceBranchLink = el.querySelectorAll('.label-branch')[0]; + const targetBranchLink = el.querySelectorAll('.label-branch')[1]; + + expect(sourceBranchLink.textContent).toContain(mr.sourceBranch); + expect(targetBranchLink.textContent).toContain(mr.targetBranch); + expect(sourceBranchLink.querySelector('a').getAttribute('href')).toEqual(sourceBranchPath); + expect(targetBranchLink.querySelector('a').getAttribute('href')).toEqual(mr.targetBranchPath); + expect(el.querySelector('.diverged-commits-count').textContent).toContain('12 commits behind'); + + expect(el.textContent).toContain('Check out branch'); + expect(el.querySelectorAll('.dropdown li a')[0].getAttribute('href')).toEqual(mr.emailPatchesPath); + expect(el.querySelectorAll('.dropdown li a')[1].getAttribute('href')).toEqual(mr.plainDiffPath); + }); + + it('should not have right action links if the MR state is not open', (done) => { + vm.mr.isOpen = false; + Vue.nextTick(() => { + expect(el.textContent).not.toContain('Check out branch'); + expect(el.querySelectorAll('.dropdown li a').length).toEqual(0); + done(); + }); + }); + + it('should not render diverged commits count if the MR has no diverged commits', (done) => { + vm.mr.divergedCommitsCount = null; + Vue.nextTick(() => { + expect(el.textContent).not.toContain('commits behind'); + expect(el.querySelectorAll('.diverged-commits-count').length).toEqual(0); + done(); + }); + }); + }); +}); diff --git a/spec/javascripts/vue_mr_widget/components/mr_widget_memory_usage_spec.js b/spec/javascripts/vue_mr_widget/components/mr_widget_memory_usage_spec.js new file mode 100644 index 00000000000..da9dff18ada --- /dev/null +++ b/spec/javascripts/vue_mr_widget/components/mr_widget_memory_usage_spec.js @@ -0,0 +1,184 @@ +import Vue from 'vue'; +import memoryUsageComponent from '~/vue_merge_request_widget/components/mr_widget_memory_usage'; +import MRWidgetService from '~/vue_merge_request_widget/services/mr_widget_service'; + +const url = '/root/acets-review-apps/environments/15/deployments/1/metrics'; + +const metricsMockData = { + success: true, + metrics: { + memory_values: [ + { + metric: {}, + values: [ + [1493716685, '4.30859375'], + ], + }, + ], + }, + last_update: '2017-05-02T12:34:49.628Z', + deployment_time: 1493718485, +}; + +const createComponent = () => { + const Component = Vue.extend(memoryUsageComponent); + + return new Component({ + el: document.createElement('div'), + propsData: { + metricsUrl: url, + memoryMetrics: [], + deploymentTime: 0, + hasMetrics: false, + loadFailed: false, + loadingMetrics: true, + backOffRequestCounter: 0, + }, + }); +}; + +const messages = { + loadingMetrics: 'Loading deployment statistics.', + hasMetrics: 'Deployment memory usage:', + loadFailed: 'Failed to load deployment statistics.', + metricsUnavailable: 'Deployment statistics are not available currently.', +}; + +describe('MemoryUsage', () => { + let vm; + let el; + + beforeEach(() => { + vm = createComponent(); + el = vm.$el; + }); + + describe('props', () => { + it('should have props with defaults', () => { + const { metricsUrl } = memoryUsageComponent.props; + const MetricsUrlTypeClass = metricsUrl.type; + + Vue.nextTick(() => { + expect(new MetricsUrlTypeClass() instanceof String).toBeTruthy(); + expect(metricsUrl.required).toBeTruthy(); + }); + }); + }); + + describe('data', () => { + it('should have default data', () => { + const data = memoryUsageComponent.data(); + + expect(Array.isArray(data.memoryMetrics)).toBeTruthy(); + expect(data.memoryMetrics.length).toBe(0); + + expect(typeof data.deploymentTime).toBe('number'); + expect(data.deploymentTime).toBe(0); + + expect(typeof data.hasMetrics).toBe('boolean'); + expect(data.hasMetrics).toBeFalsy(); + + expect(typeof data.loadFailed).toBe('boolean'); + expect(data.loadFailed).toBeFalsy(); + + expect(typeof data.loadingMetrics).toBe('boolean'); + expect(data.loadingMetrics).toBeTruthy(); + + expect(typeof data.backOffRequestCounter).toBe('number'); + expect(data.backOffRequestCounter).toBe(0); + }); + }); + + describe('methods', () => { + const { metrics, deployment_time } = metricsMockData; + + describe('computeGraphData', () => { + it('should populate sparkline graph', () => { + vm.computeGraphData(metrics, deployment_time); + const { hasMetrics, memoryMetrics, deploymentTime } = vm; + + expect(hasMetrics).toBeTruthy(); + expect(memoryMetrics.length > 0).toBeTruthy(); + expect(deploymentTime).toEqual(deployment_time); + }); + }); + + describe('loadMetrics', () => { + const returnServicePromise = () => new Promise((resolve) => { + resolve({ + json() { + return metricsMockData; + }, + }); + }); + + it('should load metrics data using MRWidgetService', (done) => { + spyOn(MRWidgetService, 'fetchMetrics').and.returnValue(returnServicePromise(true)); + spyOn(vm, 'computeGraphData'); + + vm.loadMetrics(); + setTimeout(() => { + expect(MRWidgetService.fetchMetrics).toHaveBeenCalledWith(url); + expect(vm.computeGraphData).toHaveBeenCalledWith(metrics, deployment_time); + done(); + }, 333); + }); + }); + }); + + describe('template', () => { + it('should render template elements correctly', () => { + expect(el.classList.contains('mr-memory-usage')).toBeTruthy(); + expect(el.querySelector('.js-usage-info')).toBeDefined(); + }); + + it('should show loading metrics message while metrics are being loaded', (done) => { + vm.loadingMetrics = true; + vm.hasMetrics = false; + vm.loadFailed = false; + + Vue.nextTick(() => { + expect(el.querySelector('.js-usage-info.usage-info-loading')).toBeDefined(); + expect(el.querySelector('.js-usage-info .usage-info-load-spinner')).toBeDefined(); + expect(el.querySelector('.js-usage-info').innerText).toContain(messages.loadingMetrics); + done(); + }); + }); + + it('should show deployment memory usage when metrics are loaded', (done) => { + vm.loadingMetrics = false; + vm.hasMetrics = true; + vm.loadFailed = false; + + Vue.nextTick(() => { + expect(el.querySelector('.memory-graph-container')).toBeDefined(); + expect(el.querySelector('.js-usage-info').innerText).toContain(messages.hasMetrics); + done(); + }); + }); + + it('should show failure message when metrics loading failed', (done) => { + vm.loadingMetrics = false; + vm.hasMetrics = false; + vm.loadFailed = true; + + Vue.nextTick(() => { + expect(el.querySelector('.js-usage-info.usage-info-failed')).toBeDefined(); + expect(el.querySelector('.js-usage-info').innerText).toContain(messages.loadFailed); + done(); + }); + }); + + it('should show metrics unavailable message when metrics loading failed', (done) => { + vm.loadingMetrics = false; + vm.hasMetrics = false; + vm.loadFailed = false; + + Vue.nextTick(() => { + expect(el.querySelector('.js-usage-info.usage-info-unavailable')).toBeDefined(); + expect(el.querySelector('.js-usage-info').innerText).toContain(messages.metricsUnavailable); + done(); + }); + }); + }); +}); diff --git a/spec/javascripts/vue_mr_widget/components/mr_widget_merge_help_spec.js b/spec/javascripts/vue_mr_widget/components/mr_widget_merge_help_spec.js new file mode 100644 index 00000000000..4da4fc82c26 --- /dev/null +++ b/spec/javascripts/vue_mr_widget/components/mr_widget_merge_help_spec.js @@ -0,0 +1,51 @@ +import Vue from 'vue'; +import mergeHelpComponent from '~/vue_merge_request_widget/components/mr_widget_merge_help'; + +const props = { + missingBranch: 'this-is-not-the-branch-you-are-looking-for', +}; +const text = `If the ${props.missingBranch} branch exists in your local repository`; + +const createComponent = () => { + const Component = Vue.extend(mergeHelpComponent); + return new Component({ + el: document.createElement('div'), + propsData: props, + }); +}; + +describe('MRWidgetMergeHelp', () => { + describe('props', () => { + it('should have props', () => { + const { missingBranch } = mergeHelpComponent.props; + const MissingBranchTypeClass = missingBranch.type; + + expect(new MissingBranchTypeClass() instanceof String).toBeTruthy(); + expect(missingBranch.required).toBeFalsy(); + expect(missingBranch.default).toEqual(''); + }); + }); + + describe('template', () => { + let vm; + let el; + + beforeEach(() => { + vm = createComponent(); + el = vm.$el; + }); + + it('should have the correct elements', () => { + expect(el.classList.contains('mr-widget-help')).toBeTruthy(); + expect(el.textContent).toContain(text); + }); + + it('should not show missing branch name if missingBranch props is not provided', (done) => { + vm.missingBranch = null; + Vue.nextTick(() => { + expect(el.textContent).not.toContain(text); + done(); + }); + }); + }); +}); diff --git a/spec/javascripts/vue_mr_widget/components/mr_widget_pipeline_spec.js b/spec/javascripts/vue_mr_widget/components/mr_widget_pipeline_spec.js new file mode 100644 index 00000000000..647b59520f8 --- /dev/null +++ b/spec/javascripts/vue_mr_widget/components/mr_widget_pipeline_spec.js @@ -0,0 +1,131 @@ +import Vue from 'vue'; +import { statusIconEntityMap } from '~/vue_shared/ci_status_icons'; +import pipelineComponent from '~/vue_merge_request_widget/components/mr_widget_pipeline'; +import mockData from '../mock_data'; + +const createComponent = (mr) => { + const Component = Vue.extend(pipelineComponent); + return new Component({ + el: document.createElement('div'), + propsData: { mr }, + }); +}; + +describe('MRWidgetPipeline', () => { + describe('props', () => { + it('should have props', () => { + const { mr } = pipelineComponent.props; + + expect(mr.type instanceof Object).toBeTruthy(); + expect(mr.required).toBeTruthy(); + }); + }); + + describe('components', () => { + it('should have components added', () => { + expect(pipelineComponent.components['pipeline-stage']).toBeDefined(); + expect(pipelineComponent.components.ciIcon).toBeDefined(); + }); + }); + + describe('computed', () => { + describe('svg', () => { + it('should have the proper SVG icon', () => { + const vm = createComponent({ pipeline: mockData.pipeline }); + + expect(vm.svg).toEqual(statusIconEntityMap.icon_status_failed); + }); + }); + + describe('hasCIError', () => { + it('should return false when there is no CI error', () => { + const vm = createComponent({ + pipeline: mockData.pipeline, + hasCI: true, + ciStatus: 'success', + }); + + expect(vm.hasCIError).toBeFalsy(); + }); + + it('should return true when there is a CI error', () => { + const vm = createComponent({ + pipeline: mockData.pipeline, + hasCI: true, + ciStatus: null, + }); + + expect(vm.hasCIError).toBeTruthy(); + }); + }); + }); + + describe('template', () => { + let vm; + let el; + const { pipeline } = mockData; + const mr = { + hasCI: true, + ciStatus: 'success', + pipelineDetailedStatus: pipeline.details.status, + pipeline, + }; + + beforeEach(() => { + vm = createComponent(mr); + el = vm.$el; + }); + + it('should render template elements correctly', () => { + expect(el.classList.contains('mr-widget-heading')).toBeTruthy(); + expect(el.querySelectorAll('.ci-status-icon.ci-status-icon-success').length).toEqual(1); + expect(el.querySelector('.pipeline-id').textContent).toContain(`#${pipeline.id}`); + expect(el.innerText).toContain('passed'); + expect(el.innerText).toContain('with stages'); + expect(el.querySelector('.pipeline-id').getAttribute('href')).toEqual(pipeline.path); + expect(el.querySelectorAll('.stage-container').length).toEqual(2); + expect(el.querySelector('.js-ci-error')).toEqual(null); + expect(el.querySelector('.js-commit-link').getAttribute('href')).toEqual(pipeline.commit.commit_path); + expect(el.querySelector('.js-commit-link').textContent).toContain(pipeline.commit.short_id); + expect(el.querySelector('.js-mr-coverage').textContent).toContain(`Coverage ${pipeline.coverage}%.`); + }); + + it('should list single stage', (done) => { + pipeline.details.stages.splice(0, 1); + + Vue.nextTick(() => { + expect(el.querySelectorAll('.stage-container button').length).toEqual(1); + expect(el.innerText).toContain('with stage'); + done(); + }); + }); + + it('should not have stages when there is no stage', (done) => { + vm.mr.pipeline.details.stages = []; + + Vue.nextTick(() => { + expect(el.querySelectorAll('.stage-container button').length).toEqual(0); + done(); + }); + }); + + it('should not have coverage text when pipeline has no coverage info', (done) => { + vm.mr.pipeline.coverage = null; + + Vue.nextTick(() => { + expect(el.querySelector('.js-mr-coverage')).toEqual(null); + done(); + }); + }); + + it('should show CI error when there is a CI error', (done) => { + vm.mr.ciStatus = null; + + Vue.nextTick(() => { + expect(el.querySelectorAll('.js-ci-error').length).toEqual(1); + expect(el.innerText).toContain('Could not connect to the CI server'); + done(); + }); + }); + }); +}); diff --git a/spec/javascripts/vue_mr_widget/components/mr_widget_related_links_spec.js b/spec/javascripts/vue_mr_widget/components/mr_widget_related_links_spec.js new file mode 100644 index 00000000000..f6e0c3dfb74 --- /dev/null +++ b/spec/javascripts/vue_mr_widget/components/mr_widget_related_links_spec.js @@ -0,0 +1,138 @@ +import Vue from 'vue'; +import relatedLinksComponent from '~/vue_merge_request_widget/components/mr_widget_related_links'; + +const createComponent = (data) => { + const Component = Vue.extend(relatedLinksComponent); + + return new Component({ + el: document.createElement('div'), + propsData: data, + }); +}; + +describe('MRWidgetRelatedLinks', () => { + describe('props', () => { + it('should have props', () => { + const { relatedLinks } = relatedLinksComponent.props; + + expect(relatedLinks).toBeDefined(); + expect(relatedLinks.type instanceof Object).toBeTruthy(); + expect(relatedLinks.required).toBeTruthy(); + }); + }); + + describe('computed', () => { + describe('hasLinks', () => { + it('should return correct value when we have links reference', () => { + const data = { + relatedLinks: { + closing: '/foo', + mentioned: '/foo', + assignToMe: '/foo', + }, + }; + const vm = createComponent(data); + expect(vm.hasLinks).toBeTruthy(); + + vm.relatedLinks.closing = null; + expect(vm.hasLinks).toBeTruthy(); + + vm.relatedLinks.mentioned = null; + expect(vm.hasLinks).toBeTruthy(); + + vm.relatedLinks.assignToMe = null; + expect(vm.hasLinks).toBeFalsy(); + }); + }); + }); + + describe('methods', () => { + const data = { + relatedLinks: { + closing: '<a href="#">#23</a> and <a>#42</a>', + mentioned: '<a href="#">#7</a>', + }, + }; + const vm = createComponent(data); + + describe('hasMultipleIssues', () => { + it('should return true if the given text has multiple issues', () => { + expect(vm.hasMultipleIssues(data.relatedLinks.closing)).toBeTruthy(); + }); + + it('should return false if the given text has one issue', () => { + expect(vm.hasMultipleIssues(data.relatedLinks.mentioned)).toBeFalsy(); + }); + }); + + describe('issueLabel', () => { + it('should return true if the given text has multiple issues', () => { + expect(vm.issueLabel('closing')).toEqual('issues'); + }); + + it('should return false if the given text has one issue', () => { + expect(vm.issueLabel('mentioned')).toEqual('issue'); + }); + }); + + describe('verbLabel', () => { + it('should return true if the given text has multiple issues', () => { + expect(vm.verbLabel('closing')).toEqual('are'); + }); + + it('should return false if the given text has one issue', () => { + expect(vm.verbLabel('mentioned')).toEqual('is'); + }); + }); + }); + + describe('template', () => { + it('should have only have closing issues text', () => { + const vm = createComponent({ + relatedLinks: { + closing: '<a href="#">#23</a> and <a>#42</a>', + }, + }); + const content = vm.$el.textContent.replace(/\n(\s)+/g, ' ').trim(); + + expect(content).toContain('Closes issues #23 and #42'); + expect(content).not.toContain('mentioned'); + }); + + it('should have only have mentioned issues text', () => { + const vm = createComponent({ + relatedLinks: { + mentioned: '<a href="#">#7</a>', + }, + }); + + expect(vm.$el.innerText).toContain('issue #7'); + expect(vm.$el.innerText).toContain('is mentioned but will not be closed.'); + expect(vm.$el.innerText).not.toContain('Closes'); + }); + + it('should have closing and mentioned issues at the same time', () => { + const vm = createComponent({ + relatedLinks: { + closing: '<a href="#">#7</a>', + mentioned: '<a href="#">#23</a> and <a>#42</a>', + }, + }); + const content = vm.$el.textContent.replace(/\n(\s)+/g, ' ').trim(); + + expect(content).toContain('Closes issue #7.'); + expect(content).toContain('issues #23 and #42'); + expect(content).toContain('are mentioned but will not be closed.'); + }); + + it('should have assing issues link', () => { + const vm = createComponent({ + relatedLinks: { + assignToMe: '<a href="#">Assign yourself to these issues</a>', + }, + }); + + expect(vm.$el.innerText).toContain('Assign yourself to these issues'); + }); + }); +}); diff --git a/spec/javascripts/vue_mr_widget/components/states/mr_widget_archived_spec.js b/spec/javascripts/vue_mr_widget/components/states/mr_widget_archived_spec.js new file mode 100644 index 00000000000..cac2f561a0b --- /dev/null +++ b/spec/javascripts/vue_mr_widget/components/states/mr_widget_archived_spec.js @@ -0,0 +1,18 @@ +import Vue from 'vue'; +import archivedComponent from '~/vue_merge_request_widget/components/states/mr_widget_archived'; + +describe('MRWidgetArchived', () => { + describe('template', () => { + it('should have correct elements', () => { + const Component = Vue.extend(archivedComponent); + const el = new Component({ + el: document.createElement('div'), + }).$el; + + expect(el.classList.contains('mr-widget-body')).toBeTruthy(); + expect(el.querySelector('button').classList.contains('btn-success')).toBeTruthy(); + expect(el.querySelector('button').disabled).toBeTruthy(); + expect(el.innerText).toContain('This project is archived, write access has been disabled.'); + }); + }); +}); diff --git a/spec/javascripts/vue_mr_widget/components/states/mr_widget_auto_merge_failed_spec.js b/spec/javascripts/vue_mr_widget/components/states/mr_widget_auto_merge_failed_spec.js new file mode 100644 index 00000000000..47b4ba893e0 --- /dev/null +++ b/spec/javascripts/vue_mr_widget/components/states/mr_widget_auto_merge_failed_spec.js @@ -0,0 +1,32 @@ +import Vue from 'vue'; +import autoMergeFailedComponent from '~/vue_merge_request_widget/components/states/mr_widget_auto_merge_failed'; + +const mergeError = 'This is the merge error'; + +describe('MRWidgetAutoMergeFailed', () => { + describe('props', () => { + it('should have props', () => { + const mrProp = autoMergeFailedComponent.props.mr; + + expect(mrProp.type instanceof Object).toBeTruthy(); + expect(mrProp.required).toBeTruthy(); + }); + }); + + describe('template', () => { + const Component = Vue.extend(autoMergeFailedComponent); + const vm = new Component({ + el: document.createElement('div'), + propsData: { + mr: { mergeError }, + }, + }); + + it('should have correct elements', () => { + expect(vm.$el.classList.contains('mr-widget-body')).toBeTruthy(); + expect(vm.$el.querySelector('button').getAttribute('disabled')).toBeTruthy(); + expect(vm.$el.innerText).toContain('This merge request failed to be merged automatically.'); + expect(vm.$el.innerText).toContain(mergeError); + }); + }); +}); diff --git a/spec/javascripts/vue_mr_widget/components/states/mr_widget_checking_spec.js b/spec/javascripts/vue_mr_widget/components/states/mr_widget_checking_spec.js new file mode 100644 index 00000000000..3be11d47227 --- /dev/null +++ b/spec/javascripts/vue_mr_widget/components/states/mr_widget_checking_spec.js @@ -0,0 +1,19 @@ +import Vue from 'vue'; +import checkingComponent from '~/vue_merge_request_widget/components/states/mr_widget_checking'; + +describe('MRWidgetChecking', () => { + describe('template', () => { + it('should have correct elements', () => { + const Component = Vue.extend(checkingComponent); + const el = new Component({ + el: document.createElement('div'), + }).$el; + + expect(el.classList.contains('mr-widget-body')).toBeTruthy(); + expect(el.querySelector('button').classList.contains('btn-success')).toBeTruthy(); + expect(el.querySelector('button').disabled).toBeTruthy(); + expect(el.innerText).toContain('Checking ability to merge automatically.'); + expect(el.querySelector('i')).toBeDefined(); + }); + }); +}); diff --git a/spec/javascripts/vue_mr_widget/components/states/mr_widget_closed_spec.js b/spec/javascripts/vue_mr_widget/components/states/mr_widget_closed_spec.js new file mode 100644 index 00000000000..47303d1e80f --- /dev/null +++ b/spec/javascripts/vue_mr_widget/components/states/mr_widget_closed_spec.js @@ -0,0 +1,51 @@ +import Vue from 'vue'; +import closedComponent from '~/vue_merge_request_widget/components/states/mr_widget_closed'; + +const mr = { + targetBranch: 'good-branch', + targetBranchPath: '/good-branch', + closedBy: { + name: 'Fatih Acet', + username: 'fatihacet', + }, + updatedAt: '2017-03-23T20:08:08.845Z', + closedAt: '1 day ago', +}; + +const createComponent = () => { + const Component = Vue.extend(closedComponent); + + return new Component({ + el: document.createElement('div'), + propsData: { mr }, + }).$el; +}; + +describe('MRWidgetClosed', () => { + describe('props', () => { + it('should have props', () => { + const mrProp = closedComponent.props.mr; + + expect(mrProp.type instanceof Object).toBeTruthy(); + expect(mrProp.required).toBeTruthy(); + }); + }); + + describe('components', () => { + it('should have components added', () => { + expect(closedComponent.components['mr-widget-author-and-time']).toBeDefined(); + }); + }); + + describe('template', () => { + it('should have correct elements', () => { + const el = createComponent(); + + expect(el.querySelector('h4').textContent).toContain('Closed by'); + expect(el.querySelector('h4').textContent).toContain(mr.closedBy.name); + expect(el.textContent).toContain('The changes were not merged into'); + expect(el.querySelector('.label-branch').getAttribute('href')).toEqual(mr.targetBranchPath); + expect(el.querySelector('.label-branch').textContent).toContain(mr.targetBranch); + }); + }); +}); diff --git a/spec/javascripts/vue_mr_widget/components/states/mr_widget_conflicts_spec.js b/spec/javascripts/vue_mr_widget/components/states/mr_widget_conflicts_spec.js new file mode 100644 index 00000000000..e7ae85caec4 --- /dev/null +++ b/spec/javascripts/vue_mr_widget/components/states/mr_widget_conflicts_spec.js @@ -0,0 +1,69 @@ +import Vue from 'vue'; +import conflictsComponent from '~/vue_merge_request_widget/components/states/mr_widget_conflicts'; + +const path = '/conflicts'; +const createComponent = () => { + const Component = Vue.extend(conflictsComponent); + + return new Component({ + el: document.createElement('div'), + propsData: { + mr: { + canMerge: true, + conflictResolutionPath: path, + }, + }, + }); +}; + +describe('MRWidgetConflicts', () => { + describe('props', () => { + it('should have props', () => { + const { mr } = conflictsComponent.props; + + expect(mr.type instanceof Object).toBeTruthy(); + expect(mr.required).toBeTruthy(); + }); + }); + + describe('template', () => { + it('should have correct elements', () => { + const el = createComponent().$el; + const resolveButton = el.querySelectorAll('.btn-group .btn')[0]; + const mergeLocallyButton = el.querySelectorAll('.btn-group .btn')[1]; + + expect(el.textContent).toContain('There are merge conflicts.'); + expect(el.textContent).not.toContain('ask someone with write access'); + expect(el.querySelector('.btn-success').disabled).toBeTruthy(); + expect(el.querySelectorAll('.btn-group .btn').length).toBe(2); + expect(resolveButton.textContent).toContain('Resolve conflicts'); + expect(resolveButton.getAttribute('href')).toEqual(path); + expect(mergeLocallyButton.textContent).toContain('Merge locally'); + }); + + describe('when user does not have permission to merge', () => { + let vm; + + beforeEach(() => { + vm = createComponent(); + vm.mr.canMerge = false; + }); + + it('should show proper message', (done) => { + Vue.nextTick(() => { + expect(vm.$el.textContent).toContain('ask someone with write access'); + done(); + }); + }); + + it('should not have action buttons', (done) => { + Vue.nextTick(() => { + expect(vm.$el.querySelectorAll('.btn').length).toBe(1); + expect(vm.$el.querySelector('a.js-resolve-conflicts-button')).toEqual(null); + expect(vm.$el.querySelector('a.js-merge-locally-button')).toEqual(null); + done(); + }); + }); + }); + }); +}); diff --git a/spec/javascripts/vue_mr_widget/components/states/mr_widget_failed_to_merge_spec.js b/spec/javascripts/vue_mr_widget/components/states/mr_widget_failed_to_merge_spec.js new file mode 100644 index 00000000000..587b83430d9 --- /dev/null +++ b/spec/javascripts/vue_mr_widget/components/states/mr_widget_failed_to_merge_spec.js @@ -0,0 +1,122 @@ +import Vue from 'vue'; +import failedToMergeComponent from '~/vue_merge_request_widget/components/states/mr_widget_failed_to_merge'; +import eventHub from '~/vue_merge_request_widget/event_hub'; + +const mr = { + mergeError: 'Merge error happened.', +}; +const createComponent = () => { + const Component = Vue.extend(failedToMergeComponent); + return new Component({ + el: document.createElement('div'), + propsData: { mr }, + }); +}; + +describe('MRWidgetFailedToMerge', () => { + describe('data', () => { + it('should have default data', () => { + const data = failedToMergeComponent.data(); + + expect(data.timer).toEqual(10); + expect(data.isRefreshing).toBeFalsy(); + }); + }); + + describe('computed', () => { + describe('timerText', () => { + it('should return correct timer text', () => { + const vm = createComponent(); + expect(vm.timerText).toEqual('10 seconds'); + + vm.timer = 1; + expect(vm.timerText).toEqual('a second'); + }); + }); + }); + + describe('created', () => { + it('should disable polling', () => { + spyOn(eventHub, '$emit'); + createComponent(); + + expect(eventHub.$emit).toHaveBeenCalledWith('DisablePolling'); + }); + }); + + describe('methods', () => { + describe('refresh', () => { + it('should emit event to request component refresh', () => { + spyOn(eventHub, '$emit'); + const vm = createComponent(); + + expect(vm.isRefreshing).toBeFalsy(); + + vm.refresh(); + expect(vm.isRefreshing).toBeTruthy(); + expect(eventHub.$emit).toHaveBeenCalledWith('MRWidgetUpdateRequested'); + expect(eventHub.$emit).toHaveBeenCalledWith('EnablePolling'); + }); + }); + + describe('updateTimer', () => { + it('should update timer and emit event when timer end', () => { + const vm = createComponent(); + spyOn(vm, 'refresh'); + + expect(vm.timer).toEqual(10); + + for (let i = 0; i < 10; i++) { // eslint-disable-line + expect(vm.timer).toEqual(10 - i); + vm.updateTimer(); + } + + expect(vm.refresh).toHaveBeenCalled(); + }); + }); + }); + + describe('template', () => { + let vm; + let el; + + beforeEach(() => { + vm = createComponent(); + el = vm.$el; + }); + + it('should have correct elements', (done) => { + expect(el.classList.contains('mr-widget-body')).toBeTruthy(); + expect(el.innerText).toContain('Merge error happened.'); + expect(el.innerText).toContain('Refreshing in 10 seconds'); + expect(el.innerText).not.toContain('Merge failed.'); + expect(el.querySelector('button').getAttribute('disabled')).toBeTruthy(); + expect(el.querySelector('button').innerText).toContain('Merge'); + expect(el.querySelector('.js-refresh-button').innerText).toContain('Refresh now'); + expect(el.querySelector('.js-refresh-label')).toEqual(null); + expect(el.innerText).not.toContain('Refreshing now...'); + setTimeout(() => { + expect(el.innerText).toContain('Refreshing in 9 seconds'); + done(); + }, 1010); + }); + + it('should just generic merge failed message if merge_error is not available', (done) => { + vm.mr.mergeError = null; + + Vue.nextTick(() => { + expect(el.innerText).toContain('Merge failed.'); + expect(el.innerText).not.toContain('Merge error happened.'); + done(); + }); + }); + + it('should show refresh label when refresh requested', () => { + vm.refresh(); + Vue.nextTick(() => { + expect(el.innerText).not.toContain('Merge failed. Refreshing'); + expect(el.innerText).toContain('Refreshing now...'); + }); + }); + }); +}); diff --git a/spec/javascripts/vue_mr_widget/components/states/mr_widget_locked_spec.js b/spec/javascripts/vue_mr_widget/components/states/mr_widget_locked_spec.js new file mode 100644 index 00000000000..fb2ef606604 --- /dev/null +++ b/spec/javascripts/vue_mr_widget/components/states/mr_widget_locked_spec.js @@ -0,0 +1,33 @@ +import Vue from 'vue'; +import lockedComponent from '~/vue_merge_request_widget/components/states/mr_widget_locked'; + +describe('MRWidgetLocked', () => { + describe('props', () => { + it('should have props', () => { + const { mr } = lockedComponent.props; + + expect(mr.type instanceof Object).toBeTruthy(); + expect(mr.required).toBeTruthy(); + }); + }); + + describe('template', () => { + it('should have correct elements', () => { + const Component = Vue.extend(lockedComponent); + const mr = { + targetBranchPath: '/branch-path', + targetBranch: 'branch', + }; + const el = new Component({ + el: document.createElement('div'), + propsData: { mr }, + }).$el; + + expect(el.classList.contains('mr-widget-body')).toBeTruthy(); + expect(el.innerText).toContain('it is locked'); + expect(el.innerText).toContain('changes will be merged into'); + expect(el.querySelector('.label-branch a').getAttribute('href')).toEqual(mr.targetBranchPath); + expect(el.querySelector('.label-branch a').textContent).toContain(mr.targetBranch); + }); + }); +}); diff --git a/spec/javascripts/vue_mr_widget/components/states/mr_widget_merge_when_pipeline_succeeds_spec.js b/spec/javascripts/vue_mr_widget/components/states/mr_widget_merge_when_pipeline_succeeds_spec.js new file mode 100644 index 00000000000..8d8b90cea16 --- /dev/null +++ b/spec/javascripts/vue_mr_widget/components/states/mr_widget_merge_when_pipeline_succeeds_spec.js @@ -0,0 +1,213 @@ +import Vue from 'vue'; +import mwpsComponent from '~/vue_merge_request_widget/components/states/mr_widget_merge_when_pipeline_succeeds'; +import eventHub from '~/vue_merge_request_widget/event_hub'; + +const targetBranchPath = '/foo/bar'; +const targetBranch = 'foo'; +const sha = '1EA2EZ34'; + +const createComponent = () => { + const Component = Vue.extend(mwpsComponent); + const mr = { + shouldRemoveSourceBranch: false, + canRemoveSourceBranch: true, + canCancelAutomaticMerge: true, + mergeUserId: 1, + currentUserId: 1, + setToMWPSBy: {}, + sha, + targetBranchPath, + targetBranch, + }; + + const service = { + cancelAutomaticMerge() {}, + mergeResource: { + save() {}, + }, + }; + + return new Component({ + el: document.createElement('div'), + propsData: { mr, service }, + }); +}; + +describe('MRWidgetMergeWhenPipelineSucceeds', () => { + describe('props', () => { + it('should have props', () => { + const { mr, service } = mwpsComponent.props; + + expect(mr.type instanceof Object).toBeTruthy(); + expect(mr.required).toBeTruthy(); + + expect(service.type instanceof Object).toBeTruthy(); + expect(service.required).toBeTruthy(); + }); + }); + + describe('components', () => { + it('should have components added', () => { + expect(mwpsComponent.components['mr-widget-author']).toBeDefined(); + }); + }); + + describe('data', () => { + it('should have default data', () => { + const data = mwpsComponent.data(); + + expect(data.isCancellingAutoMerge).toBeFalsy(); + expect(data.isRemovingSourceBranch).toBeFalsy(); + }); + }); + + describe('computed', () => { + describe('canRemoveSourceBranch', () => { + it('should return true when user is able to remove source branch', () => { + const vm = createComponent(); + + expect(vm.canRemoveSourceBranch).toBeTruthy(); + }); + + it('should return false when user id is not the same with who set the MWPS', () => { + const vm = createComponent(); + + vm.mr.mergeUserId = 2; + expect(vm.canRemoveSourceBranch).toBeFalsy(); + + vm.mr.currentUserId = 2; + expect(vm.canRemoveSourceBranch).toBeTruthy(); + + vm.mr.currentUserId = 3; + expect(vm.canRemoveSourceBranch).toBeFalsy(); + }); + + it('should return false when shouldRemoveSourceBranch set to false', () => { + const vm = createComponent(); + + vm.mr.shouldRemoveSourceBranch = true; + expect(vm.canRemoveSourceBranch).toBeFalsy(); + }); + + it('should return false if user is not able to remove the source branch', () => { + const vm = createComponent(); + + vm.mr.canRemoveSourceBranch = false; + expect(vm.canRemoveSourceBranch).toBeFalsy(); + }); + }); + }); + + describe('methods', () => { + describe('cancelAutomaticMerge', () => { + it('should set flag and call service then tell main component to update the widget with data', (done) => { + const vm = createComponent(); + const mrObj = { + is_new_mr_data: true, + }; + spyOn(eventHub, '$emit'); + spyOn(vm.service, 'cancelAutomaticMerge').and.returnValue(new Promise((resolve) => { + resolve({ + json() { + return mrObj; + }, + }); + })); + + vm.cancelAutomaticMerge(); + setTimeout(() => { + expect(vm.isCancellingAutoMerge).toBeTruthy(); + expect(eventHub.$emit).toHaveBeenCalledWith('UpdateWidgetData', mrObj); + done(); + }, 333); + }); + }); + + describe('removeSourceBranch', () => { + it('should set flag and call service then request main component to update the widget', (done) => { + const vm = createComponent(); + spyOn(eventHub, '$emit'); + spyOn(vm.service.mergeResource, 'save').and.returnValue(new Promise((resolve) => { + resolve({ + json() { + return { + status: 'merge_when_pipeline_succeeds', + }; + }, + }); + })); + + vm.removeSourceBranch(); + setTimeout(() => { + expect(eventHub.$emit).toHaveBeenCalledWith('MRWidgetUpdateRequested'); + expect(vm.service.mergeResource.save).toHaveBeenCalledWith({ + sha, + merge_when_pipeline_succeeds: true, + should_remove_source_branch: true, + }); + done(); + }, 333); + }); + }); + }); + + describe('template', () => { + let vm; + let el; + + beforeEach(() => { + vm = createComponent(); + el = vm.$el; + }); + + it('should have correct elements', () => { + expect(el.classList.contains('mr-widget-body')).toBeTruthy(); + expect(el.innerText).toContain('to be merged automatically when the pipeline succeeds.'); + expect(el.innerText).toContain('The changes will be merged into'); + expect(el.innerText).toContain(targetBranch); + expect(el.innerText).toContain('The source branch will not be removed.'); + expect(el.querySelector('.js-cancel-auto-merge').innerText).toContain('Cancel automatic merge'); + expect(el.querySelector('.js-cancel-auto-merge').getAttribute('disabled')).toBeFalsy(); + expect(el.querySelector('.js-remove-source-branch').innerText).toContain('Remove source branch'); + expect(el.querySelector('.js-remove-source-branch').getAttribute('disabled')).toBeFalsy(); + }); + + it('should disable cancel auto merge button when the action is in progress', (done) => { + vm.isCancellingAutoMerge = true; + + Vue.nextTick(() => { + expect(el.querySelector('.js-cancel-auto-merge').getAttribute('disabled')).toBeTruthy(); + done(); + }); + }); + + it('should show source branch will be removed text when it source branch set to remove', (done) => { + vm.mr.shouldRemoveSourceBranch = true; + + Vue.nextTick(() => { + const normalizedText = el.innerText.replace(/\s+/g, ' '); + expect(normalizedText).toContain('The source branch will be removed.'); + expect(normalizedText).not.toContain('The source branch will not be removed.'); + done(); + }); + }); + + it('should not show remove source branch button when user not able to remove source branch', (done) => { + vm.mr.currentUserId = 4; + + Vue.nextTick(() => { + expect(el.querySelector('.js-remove-source-branch')).toEqual(null); + done(); + }); + }); + + it('should disable remove source branch button when the action is in progress', (done) => { + vm.isRemovingSourceBranch = true; + + Vue.nextTick(() => { + expect(el.querySelector('.js-remove-source-branch').getAttribute('disabled')).toBeTruthy(); + done(); + }); + }); + }); +}); diff --git a/spec/javascripts/vue_mr_widget/components/states/mr_widget_merged_spec.js b/spec/javascripts/vue_mr_widget/components/states/mr_widget_merged_spec.js new file mode 100644 index 00000000000..6628010112d --- /dev/null +++ b/spec/javascripts/vue_mr_widget/components/states/mr_widget_merged_spec.js @@ -0,0 +1,174 @@ +import Vue from 'vue'; +import mergedComponent from '~/vue_merge_request_widget/components/states/mr_widget_merged'; +import eventHub from '~/vue_merge_request_widget/event_hub'; + +const targetBranch = 'foo'; + +const createComponent = () => { + const Component = Vue.extend(mergedComponent); + const mr = { + isRemovingSourceBranch: false, + cherryPickInForkPath: false, + canCherryPickInCurrentMR: true, + revertInForkPath: false, + canRevertInCurrentMR: true, + canRemoveSourceBranch: true, + sourceBranchRemoved: true, + mergedBy: {}, + mergedAt: '', + updatedAt: '', + targetBranch, + }; + + const service = { + removeSourceBranch() {}, + }; + + return new Component({ + el: document.createElement('div'), + propsData: { mr, service }, + }); +}; + +describe('MRWidgetMerged', () => { + describe('props', () => { + it('should have props', () => { + const { mr, service } = mergedComponent.props; + + expect(mr.type instanceof Object).toBeTruthy(); + expect(mr.required).toBeTruthy(); + + expect(service.type instanceof Object).toBeTruthy(); + expect(service.required).toBeTruthy(); + }); + }); + + describe('components', () => { + it('should have components added', () => { + expect(mergedComponent.components['mr-widget-author-and-time']).toBeDefined(); + }); + }); + + describe('data', () => { + it('should have default data', () => { + const data = mergedComponent.data(); + + expect(data.isMakingRequest).toBeFalsy(); + }); + }); + + describe('computed', () => { + describe('shouldShowRemoveSourceBranch', () => { + it('should correct value when fields changed', () => { + const vm = createComponent(); + vm.mr.sourceBranchRemoved = false; + expect(vm.shouldShowRemoveSourceBranch).toBeTruthy(); + + vm.mr.sourceBranchRemoved = true; + expect(vm.shouldShowRemoveSourceBranch).toBeFalsy(); + + vm.mr.sourceBranchRemoved = false; + vm.mr.canRemoveSourceBranch = false; + expect(vm.shouldShowRemoveSourceBranch).toBeFalsy(); + + vm.mr.canRemoveSourceBranch = true; + vm.isMakingRequest = true; + expect(vm.shouldShowRemoveSourceBranch).toBeFalsy(); + + vm.mr.isRemovingSourceBranch = true; + vm.mr.canRemoveSourceBranch = true; + vm.isMakingRequest = true; + expect(vm.shouldShowRemoveSourceBranch).toBeFalsy(); + }); + }); + describe('shouldShowSourceBranchRemoving', () => { + it('should correct value when fields changed', () => { + const vm = createComponent(); + vm.mr.sourceBranchRemoved = false; + expect(vm.shouldShowSourceBranchRemoving).toBeFalsy(); + + vm.mr.sourceBranchRemoved = true; + expect(vm.shouldShowRemoveSourceBranch).toBeFalsy(); + + vm.mr.sourceBranchRemoved = false; + vm.isMakingRequest = true; + expect(vm.shouldShowSourceBranchRemoving).toBeTruthy(); + + vm.isMakingRequest = false; + vm.mr.isRemovingSourceBranch = true; + expect(vm.shouldShowSourceBranchRemoving).toBeTruthy(); + }); + }); + }); + + describe('methods', () => { + describe('removeSourceBranch', () => { + it('should set flag and call service then request main component to update the widget', (done) => { + const vm = createComponent(); + spyOn(eventHub, '$emit'); + spyOn(vm.service, 'removeSourceBranch').and.returnValue(new Promise((resolve) => { + resolve({ + json() { + return { + message: 'Branch was removed', + }; + }, + }); + })); + + vm.removeSourceBranch(); + setTimeout(() => { + const args = eventHub.$emit.calls.argsFor(0); + expect(vm.isMakingRequest).toBeTruthy(); + expect(args[0]).toEqual('MRWidgetUpdateRequested'); + expect(args[1]).not.toThrow(); + done(); + }, 333); + }); + }); + }); + + describe('template', () => { + let vm; + let el; + + beforeEach(() => { + vm = createComponent(); + el = vm.$el; + }); + + it('should have correct elements', () => { + expect(el.classList.contains('mr-widget-body')).toBeTruthy(); + expect(el.querySelector('.js-mr-widget-author')).toBeDefined(); + expect(el.innerText).toContain('The changes were merged into'); + expect(el.innerText).toContain(targetBranch); + expect(el.innerText).toContain('The source branch has been removed.'); + expect(el.innerText).toContain('Revert'); + expect(el.innerText).toContain('Cherry-pick'); + expect(el.innerText).not.toContain('You can remove source branch now.'); + expect(el.innerText).not.toContain('The source branch is being removed.'); + }); + + it('should not show source branch removed text', (done) => { + vm.mr.sourceBranchRemoved = false; + + Vue.nextTick(() => { + expect(el.innerText).toContain('You can remove source branch now.'); + expect(el.innerText).not.toContain('The source branch has been removed.'); + done(); + }); + }); + + it('should show source branch removing text', (done) => { + vm.mr.isRemovingSourceBranch = true; + vm.mr.sourceBranchRemoved = false; + + Vue.nextTick(() => { + expect(el.innerText).toContain('The source branch is being removed.'); + expect(el.innerText).not.toContain('You can remove source branch now.'); + expect(el.innerText).not.toContain('The source branch has been removed.'); + done(); + }); + }); + }); +}); diff --git a/spec/javascripts/vue_mr_widget/components/states/mr_widget_missing_branch_spec.js b/spec/javascripts/vue_mr_widget/components/states/mr_widget_missing_branch_spec.js new file mode 100644 index 00000000000..98674d12afb --- /dev/null +++ b/spec/javascripts/vue_mr_widget/components/states/mr_widget_missing_branch_spec.js @@ -0,0 +1,55 @@ +import Vue from 'vue'; +import missingBranchComponent from '~/vue_merge_request_widget/components/states/mr_widget_missing_branch'; + +const createComponent = () => { + const Component = Vue.extend(missingBranchComponent); + const mr = { + sourceBranchRemoved: true, + }; + + return new Component({ + el: document.createElement('div'), + propsData: { mr }, + }); +}; + +describe('MRWidgetMissingBranch', () => { + describe('props', () => { + it('should have props', () => { + const mrProp = missingBranchComponent.props.mr; + + expect(mrProp.type instanceof Object).toBeTruthy(); + expect(mrProp.required).toBeTruthy(); + }); + }); + + describe('components', () => { + it('should have components added', () => { + expect(missingBranchComponent.components['mr-widget-merge-help']).toBeDefined(); + }); + }); + + describe('computed', () => { + describe('missingBranchName', () => { + it('should return proper branch name', () => { + const vm = createComponent(); + expect(vm.missingBranchName).toEqual('source'); + + vm.mr.sourceBranchRemoved = false; + expect(vm.missingBranchName).toEqual('target'); + }); + }); + }); + + describe('template', () => { + it('should have correct elements', () => { + const el = createComponent().$el; + const content = el.textContent.replace(/\n(\s)+/g, ' ').trim(); + + expect(el.classList.contains('mr-widget-body')).toBeTruthy(); + expect(el.querySelector('button').getAttribute('disabled')).toBeTruthy(); + expect(content).toContain('source branch does not exist.'); + expect(content).toContain('Please restore the source branch or use a different source branch.'); + }); + }); +}); diff --git a/spec/javascripts/vue_mr_widget/components/states/mr_widget_not_allowed_spec.js b/spec/javascripts/vue_mr_widget/components/states/mr_widget_not_allowed_spec.js new file mode 100644 index 00000000000..61e00f4cf79 --- /dev/null +++ b/spec/javascripts/vue_mr_widget/components/states/mr_widget_not_allowed_spec.js @@ -0,0 +1,17 @@ +import Vue from 'vue'; +import notAllowedComponent from '~/vue_merge_request_widget/components/states/mr_widget_not_allowed'; + +describe('MRWidgetNotAllowed', () => { + describe('template', () => { + const Component = Vue.extend(notAllowedComponent); + const vm = new Component({ + el: document.createElement('div'), + }); + it('should have correct elements', () => { + expect(vm.$el.classList.contains('mr-widget-body')).toBeTruthy(); + expect(vm.$el.querySelector('button').getAttribute('disabled')).toBeTruthy(); + expect(vm.$el.innerText).toContain('Ready to be merged automatically.'); + expect(vm.$el.innerText).toContain('Ask someone with write access to this repository to merge this request.'); + }); + }); +}); diff --git a/spec/javascripts/vue_mr_widget/components/states/mr_widget_nothing_to_merge_spec.js b/spec/javascripts/vue_mr_widget/components/states/mr_widget_nothing_to_merge_spec.js new file mode 100644 index 00000000000..a8a02fa6b66 --- /dev/null +++ b/spec/javascripts/vue_mr_widget/components/states/mr_widget_nothing_to_merge_spec.js @@ -0,0 +1,29 @@ +import Vue from 'vue'; +import nothingToMergeComponent from '~/vue_merge_request_widget/components/states/mr_widget_nothing_to_merge'; + +describe('MRWidgetNothingToMerge', () => { + describe('template', () => { + const Component = Vue.extend(nothingToMergeComponent); + const newBlobPath = '/foo'; + const vm = new Component({ + el: document.createElement('div'), + propsData: { + mr: { newBlobPath }, + }, + }); + + it('should have correct elements', () => { + expect(vm.$el.classList.contains('mr-widget-body')).toBeTruthy(); + expect(vm.$el.querySelector('a').href).toContain(newBlobPath); + expect(vm.$el.innerText).toContain('Currently there are no changes in this merge request\'s source branch'); + expect(vm.$el.innerText).toContain('Please push new commits or use a different branch.'); + }); + + it('should not show new blob link if there is no link available', () => { + vm.mr.newBlobPath = null; + Vue.nextTick(() => { + expect(vm.$el.querySelector('a')).toEqual(null); + }); + }); + }); +}); diff --git a/spec/javascripts/vue_mr_widget/components/states/mr_widget_pipeline_blocked_spec.js b/spec/javascripts/vue_mr_widget/components/states/mr_widget_pipeline_blocked_spec.js new file mode 100644 index 00000000000..b293d118571 --- /dev/null +++ b/spec/javascripts/vue_mr_widget/components/states/mr_widget_pipeline_blocked_spec.js @@ -0,0 +1,16 @@ +import Vue from 'vue'; +import pipelineBlockedComponent from '~/vue_merge_request_widget/components/states/mr_widget_pipeline_blocked'; + +describe('MRWidgetPipelineBlocked', () => { + describe('template', () => { + const Component = Vue.extend(pipelineBlockedComponent); + const vm = new Component({ + el: document.createElement('div'), + }); + it('should have correct elements', () => { + expect(vm.$el.classList.contains('mr-widget-body')).toBeTruthy(); + expect(vm.$el.querySelector('button').getAttribute('disabled')).toBeTruthy(); + expect(vm.$el.innerText).toContain('Pipeline blocked. The pipeline for this merge request requires a manual action to proceed.'); + }); + }); +}); diff --git a/spec/javascripts/vue_mr_widget/components/states/mr_widget_pipeline_failed_spec.js b/spec/javascripts/vue_mr_widget/components/states/mr_widget_pipeline_failed_spec.js new file mode 100644 index 00000000000..807fba705d4 --- /dev/null +++ b/spec/javascripts/vue_mr_widget/components/states/mr_widget_pipeline_failed_spec.js @@ -0,0 +1,16 @@ +import Vue from 'vue'; +import pipelineFailedComponent from '~/vue_merge_request_widget/components/states/mr_widget_pipeline_failed'; + +describe('MRWidgetPipelineFailed', () => { + describe('template', () => { + const Component = Vue.extend(pipelineFailedComponent); + const vm = new Component({ + el: document.createElement('div'), + }); + it('should have correct elements', () => { + expect(vm.$el.classList.contains('mr-widget-body')).toBeTruthy(); + expect(vm.$el.querySelector('button').getAttribute('disabled')).toBeTruthy(); + expect(vm.$el.innerText).toContain('The pipeline for this merge request failed. Please retry the job or push a new commit to fix the failure.'); + }); + }); +}); diff --git a/spec/javascripts/vue_mr_widget/components/states/mr_widget_ready_to_merge_spec.js b/spec/javascripts/vue_mr_widget/components/states/mr_widget_ready_to_merge_spec.js new file mode 100644 index 00000000000..d043ad38b8b --- /dev/null +++ b/spec/javascripts/vue_mr_widget/components/states/mr_widget_ready_to_merge_spec.js @@ -0,0 +1,389 @@ +import Vue from 'vue'; +import readyToMergeComponent from '~/vue_merge_request_widget/components/states/mr_widget_ready_to_merge'; +import eventHub from '~/vue_merge_request_widget/event_hub'; +import * as simplePoll from '~/lib/utils/simple_poll'; + +const commitMessage = 'This is the commit message'; +const commitMessageWithDescription = 'This is the commit message description'; +const createComponent = () => { + const Component = Vue.extend(readyToMergeComponent); + const mr = { + isPipelineActive: false, + pipeline: null, + isPipelineFailed: false, + onlyAllowMergeIfPipelineSucceeds: false, + hasCI: false, + ciStatus: null, + sha: '12345678', + commitMessage, + commitMessageWithDescription, + }; + + const service = { + merge() {}, + poll() {}, + }; + + return new Component({ + el: document.createElement('div'), + propsData: { mr, service }, + }); +}; + +describe('MRWidgetReadyToMerge', () => { + let vm; + + beforeEach(() => { + vm = createComponent(); + }); + + describe('props', () => { + it('should have props', () => { + const { mr, service } = readyToMergeComponent.props; + + expect(mr.type instanceof Object).toBeTruthy(); + expect(mr.required).toBeTruthy(); + + expect(service.type instanceof Object).toBeTruthy(); + expect(service.required).toBeTruthy(); + }); + }); + + describe('data', () => { + it('should have default data', () => { + expect(vm.removeSourceBranch).toBeTruthy(true); + expect(vm.mergeWhenBuildSucceeds).toBeFalsy(); + expect(vm.useCommitMessageWithDescription).toBeFalsy(); + expect(vm.setToMergeWhenPipelineSucceeds).toBeFalsy(); + expect(vm.showCommitMessageEditor).toBeFalsy(); + expect(vm.isMakingRequest).toBeFalsy(); + expect(vm.isMergingImmediately).toBeFalsy(); + expect(vm.commitMessage).toBe(vm.mr.commitMessage); + expect(vm.successSvg).toBeDefined(); + expect(vm.warningSvg).toBeDefined(); + }); + }); + + describe('computed', () => { + describe('commitMessageLinkTitle', () => { + const withDesc = 'Include description in commit message'; + const withoutDesc = "Don't include description in commit message"; + + it('should return message wit description', () => { + expect(vm.commitMessageLinkTitle).toEqual(withDesc); + }); + + it('should return message without description', () => { + vm.useCommitMessageWithDescription = true; + expect(vm.commitMessageLinkTitle).toEqual(withoutDesc); + }); + }); + + describe('mergeButtonClass', () => { + const defaultClass = 'btn btn-small btn-success accept-merge-request'; + const failedClass = `${defaultClass} btn-danger`; + const inActionClass = `${defaultClass} btn-info`; + + it('should return default class', () => { + vm.mr.pipeline = true; + expect(vm.mergeButtonClass).toEqual(defaultClass); + }); + + it('should return failed class when MR has CI but also has an unknown status', () => { + vm.mr.hasCI = true; + expect(vm.mergeButtonClass).toEqual(failedClass); + }); + + it('should return default class when MR has no pipeline', () => { + expect(vm.mergeButtonClass).toEqual(defaultClass); + }); + + it('should return in action class when pipeline is active', () => { + vm.mr.pipeline = {}; + vm.mr.isPipelineActive = true; + expect(vm.mergeButtonClass).toEqual(inActionClass); + }); + + it('should return failed class when pipeline is failed', () => { + vm.mr.pipeline = {}; + vm.mr.isPipelineFailed = true; + expect(vm.mergeButtonClass).toEqual(failedClass); + }); + }); + + describe('mergeButtonText', () => { + it('should return Merge', () => { + expect(vm.mergeButtonText).toEqual('Merge'); + }); + + it('should return Merge in progress', () => { + vm.isMergingImmediately = true; + expect(vm.mergeButtonText).toEqual('Merge in progress'); + }); + + it('should return Merge when pipeline succeeds', () => { + vm.isMergingImmediately = false; + vm.mr.isPipelineActive = true; + expect(vm.mergeButtonText).toEqual('Merge when pipeline succeeds'); + }); + }); + + describe('shouldShowMergeOptionsDropdown', () => { + it('should return false with initial data', () => { + expect(vm.shouldShowMergeOptionsDropdown).toBeFalsy(); + }); + + it('should return true when pipeline active', () => { + vm.mr.isPipelineActive = true; + expect(vm.shouldShowMergeOptionsDropdown).toBeTruthy(); + }); + + it('should return false when pipeline active but only merge when pipeline succeeds set in project options', () => { + vm.mr.isPipelineActive = true; + vm.mr.onlyAllowMergeIfPipelineSucceeds = true; + expect(vm.shouldShowMergeOptionsDropdown).toBeFalsy(); + }); + }); + + describe('isMergeButtonDisabled', () => { + it('should return false with initial data', () => { + expect(vm.isMergeButtonDisabled).toBeFalsy(); + }); + + it('should return true when there is no commit message', () => { + vm.commitMessage = ''; + expect(vm.isMergeButtonDisabled).toBeTruthy(); + }); + + it('should return true if merge is not allowed', () => { + vm.mr.onlyAllowMergeIfPipelineSucceeds = true; + vm.mr.isPipelineFailed = true; + expect(vm.isMergeButtonDisabled).toBeTruthy(); + }); + + it('should return true when there vm instance is making request', () => { + vm.isMakingRequest = true; + expect(vm.isMergeButtonDisabled).toBeTruthy(); + }); + }); + }); + + describe('methods', () => { + describe('isMergeAllowed', () => { + it('should return false with initial data', () => { + expect(vm.isMergeAllowed()).toBeTruthy(); + }); + + it('should return false when MR is set only merge when pipeline succeeds', () => { + vm.mr.onlyAllowMergeIfPipelineSucceeds = true; + expect(vm.isMergeAllowed()).toBeTruthy(); + }); + + it('should return true true', () => { + vm.mr.onlyAllowMergeIfPipelineSucceeds = true; + vm.mr.isPipelineFailed = true; + expect(vm.isMergeAllowed()).toBeFalsy(); + }); + }); + + describe('updateCommitMessage', () => { + it('should revert flag and change commitMessage', () => { + expect(vm.useCommitMessageWithDescription).toBeFalsy(); + expect(vm.commitMessage).toEqual(commitMessage); + vm.updateCommitMessage(); + expect(vm.useCommitMessageWithDescription).toBeTruthy(); + expect(vm.commitMessage).toEqual(commitMessageWithDescription); + vm.updateCommitMessage(); + expect(vm.useCommitMessageWithDescription).toBeFalsy(); + expect(vm.commitMessage).toEqual(commitMessage); + }); + }); + + describe('toggleCommitMessageEditor', () => { + it('should toggle showCommitMessageEditor flag', () => { + expect(vm.showCommitMessageEditor).toBeFalsy(); + vm.toggleCommitMessageEditor(); + expect(vm.showCommitMessageEditor).toBeTruthy(); + }); + }); + + describe('handleMergeButtonClick', () => { + const returnPromise = status => new Promise((resolve) => { + resolve({ + json() { + return { status }; + }, + }); + }); + + it('should handle merge when pipeline succeeds', (done) => { + spyOn(eventHub, '$emit'); + spyOn(vm.service, 'merge').and.returnValue(returnPromise('merge_when_pipeline_succeeds')); + vm.removeSourceBranch = false; + vm.handleMergeButtonClick(true); + + setTimeout(() => { + expect(vm.setToMergeWhenPipelineSucceeds).toBeTruthy(); + expect(vm.isMakingRequest).toBeTruthy(); + expect(eventHub.$emit).toHaveBeenCalledWith('MRWidgetUpdateRequested'); + + const params = vm.service.merge.calls.argsFor(0)[0]; + expect(params.sha).toEqual(vm.mr.sha); + expect(params.commit_message).toEqual(vm.mr.commitMessage); + expect(params.should_remove_source_branch).toBeFalsy(); + expect(params.merge_when_pipeline_succeeds).toBeTruthy(); + done(); + }, 333); + }); + + it('should handle merge failed', (done) => { + spyOn(eventHub, '$emit'); + spyOn(vm.service, 'merge').and.returnValue(returnPromise('failed')); + vm.handleMergeButtonClick(false, true); + + setTimeout(() => { + expect(vm.setToMergeWhenPipelineSucceeds).toBeFalsy(); + expect(vm.isMakingRequest).toBeTruthy(); + expect(eventHub.$emit).toHaveBeenCalledWith('FailedToMerge', undefined); + + const params = vm.service.merge.calls.argsFor(0)[0]; + expect(params.should_remove_source_branch).toBeTruthy(); + expect(params.merge_when_pipeline_succeeds).toBeFalsy(); + done(); + }, 333); + }); + + it('should handle merge action accepted case', (done) => { + spyOn(vm.service, 'merge').and.returnValue(returnPromise('success')); + spyOn(vm, 'initiateMergePolling'); + vm.handleMergeButtonClick(); + + setTimeout(() => { + expect(vm.setToMergeWhenPipelineSucceeds).toBeFalsy(); + expect(vm.isMakingRequest).toBeTruthy(); + expect(vm.initiateMergePolling).toHaveBeenCalled(); + + const params = vm.service.merge.calls.argsFor(0)[0]; + expect(params.should_remove_source_branch).toBeTruthy(); + expect(params.merge_when_pipeline_succeeds).toBeFalsy(); + done(); + }, 333); + }); + }); + + describe('initiateMergePolling', () => { + it('should call simplePoll', () => { + spyOn(simplePoll, 'default'); + vm.initiateMergePolling(); + expect(simplePoll.default).toHaveBeenCalled(); + }); + }); + + describe('handleMergePolling', () => { + const returnPromise = state => new Promise((resolve) => { + resolve({ + json() { + return { state, source_branch_exists: true }; + }, + }); + }); + + it('should call start and stop polling when MR merged', (done) => { + spyOn(eventHub, '$emit'); + spyOn(vm.service, 'poll').and.returnValue(returnPromise('merged')); + spyOn(vm, 'initiateRemoveSourceBranchPolling'); + + let cpc = false; // continuePollingCalled + let spc = false; // stopPollingCalled + + vm.handleMergePolling(() => { cpc = true; }, () => { spc = true; }); + setTimeout(() => { + expect(vm.service.poll).toHaveBeenCalled(); + expect(eventHub.$emit).toHaveBeenCalledWith('MRWidgetUpdateRequested'); + expect(eventHub.$emit).toHaveBeenCalledWith('FetchActionsContent'); + expect(vm.initiateRemoveSourceBranchPolling).toHaveBeenCalled(); + expect(cpc).toBeFalsy(); + expect(spc).toBeTruthy(); + + done(); + }, 333); + }); + + it('should continue polling until MR is merged', (done) => { + spyOn(vm.service, 'poll').and.returnValue(returnPromise('some_other_state')); + spyOn(vm, 'initiateRemoveSourceBranchPolling'); + + let cpc = false; // continuePollingCalled + let spc = false; // stopPollingCalled + + vm.handleMergePolling(() => { cpc = true; }, () => { spc = true; }); + setTimeout(() => { + expect(cpc).toBeTruthy(); + expect(spc).toBeFalsy(); + + done(); + }, 333); + }); + }); + + describe('initiateRemoveSourceBranchPolling', () => { + it('should emit event and call simplePoll', () => { + spyOn(eventHub, '$emit'); + spyOn(simplePoll, 'default'); + + vm.initiateRemoveSourceBranchPolling(); + expect(eventHub.$emit).toHaveBeenCalledWith('SetBranchRemoveFlag', [true]); + expect(simplePoll.default).toHaveBeenCalled(); + }); + }); + + describe('handleRemoveBranchPolling', () => { + const returnPromise = state => new Promise((resolve) => { + resolve({ + json() { + return { source_branch_exists: state }; + }, + }); + }); + + it('should call start and stop polling when MR merged', (done) => { + spyOn(eventHub, '$emit'); + spyOn(vm.service, 'poll').and.returnValue(returnPromise(false)); + + let cpc = false; // continuePollingCalled + let spc = false; // stopPollingCalled + + vm.handleRemoveBranchPolling(() => { cpc = true; }, () => { spc = true; }); + setTimeout(() => { + expect(vm.service.poll).toHaveBeenCalled(); + + const args = eventHub.$emit.calls.argsFor(0); + expect(args[0]).toEqual('MRWidgetUpdateRequested'); + expect(args[1]).toBeDefined(); + args[1](); + expect(eventHub.$emit).toHaveBeenCalledWith('SetBranchRemoveFlag', [false]); + + expect(cpc).toBeFalsy(); + expect(spc).toBeTruthy(); + + done(); + }, 333); + }); + + it('should continue polling until MR is merged', (done) => { + spyOn(vm.service, 'poll').and.returnValue(returnPromise(true)); + + let cpc = false; // continuePollingCalled + let spc = false; // stopPollingCalled + + vm.handleRemoveBranchPolling(() => { cpc = true; }, () => { spc = true; }); + setTimeout(() => { + expect(cpc).toBeTruthy(); + expect(spc).toBeFalsy(); + + done(); + }, 333); + }); + }); + }); +}); diff --git a/spec/javascripts/vue_mr_widget/components/states/mr_widget_sha_mismatch_spec.js b/spec/javascripts/vue_mr_widget/components/states/mr_widget_sha_mismatch_spec.js new file mode 100644 index 00000000000..5fb1d69a8b3 --- /dev/null +++ b/spec/javascripts/vue_mr_widget/components/states/mr_widget_sha_mismatch_spec.js @@ -0,0 +1,16 @@ +import Vue from 'vue'; +import shaMismatchComponent from '~/vue_merge_request_widget/components/states/mr_widget_sha_mismatch'; + +describe('MRWidgetSHAMismatch', () => { + describe('template', () => { + const Component = Vue.extend(shaMismatchComponent); + const vm = new Component({ + el: document.createElement('div'), + }); + it('should have correct elements', () => { + expect(vm.$el.classList.contains('mr-widget-body')).toBeTruthy(); + expect(vm.$el.querySelector('button').getAttribute('disabled')).toBeTruthy(); + expect(vm.$el.innerText).toContain('The source branch HEAD has recently changed. Please reload the page and review the changes before merging.'); + }); + }); +}); diff --git a/spec/javascripts/vue_mr_widget/components/states/mr_widget_unresolved_discussions_spec.js b/spec/javascripts/vue_mr_widget/components/states/mr_widget_unresolved_discussions_spec.js new file mode 100644 index 00000000000..fe87f110354 --- /dev/null +++ b/spec/javascripts/vue_mr_widget/components/states/mr_widget_unresolved_discussions_spec.js @@ -0,0 +1,47 @@ +import Vue from 'vue'; +import unresolvedDiscussionsComponent from '~/vue_merge_request_widget/components/states/mr_widget_unresolved_discussions'; + +describe('MRWidgetUnresolvedDiscussions', () => { + describe('props', () => { + it('should have props', () => { + const { mr } = unresolvedDiscussionsComponent.props; + + expect(mr.type instanceof Object).toBeTruthy(); + expect(mr.required).toBeTruthy(); + }); + }); + + describe('template', () => { + let el; + let vm; + const path = 'foo/bar'; + + beforeEach(() => { + const Component = Vue.extend(unresolvedDiscussionsComponent); + const mr = { + createIssueToResolveDiscussionsPath: path, + }; + vm = new Component({ + el: document.createElement('div'), + propsData: { mr }, + }); + el = vm.$el; + }); + + it('should have correct elements', () => { + expect(el.classList.contains('mr-widget-body')).toBeTruthy(); + expect(el.innerText).toContain('There are unresolved discussions. Please resolve these discussions'); + expect(el.innerText).toContain('Create an issue to resolve them later'); + expect(el.querySelector('.js-create-issue').getAttribute('href')).toEqual(path); + }); + + it('should not show create issue button if user cannot create issue', (done) => { + vm.mr.createIssueToResolveDiscussionsPath = ''; + + Vue.nextTick(() => { + expect(el.querySelector('.js-create-issue')).toEqual(null); + done(); + }); + }); + }); +}); diff --git a/spec/javascripts/vue_mr_widget/components/states/mr_widget_wip_spec.js b/spec/javascripts/vue_mr_widget/components/states/mr_widget_wip_spec.js new file mode 100644 index 00000000000..45bd1a69964 --- /dev/null +++ b/spec/javascripts/vue_mr_widget/components/states/mr_widget_wip_spec.js @@ -0,0 +1,96 @@ +import Vue from 'vue'; +import wipComponent from '~/vue_merge_request_widget/components/states/mr_widget_wip'; +import eventHub from '~/vue_merge_request_widget/event_hub'; + +const createComponent = () => { + const Component = Vue.extend(wipComponent); + const mr = { + title: 'The best MR ever', + removeWIPPath: '/path/to/remove/wip', + }; + const service = { + removeWIP() {}, + }; + return new Component({ + el: document.createElement('div'), + propsData: { mr, service }, + }); +}; + +describe('MRWidgetWIP', () => { + describe('props', () => { + it('should have props', () => { + const { mr, service } = wipComponent.props; + + expect(mr.type instanceof Object).toBeTruthy(); + expect(mr.required).toBeTruthy(); + + expect(service.type instanceof Object).toBeTruthy(); + expect(service.required).toBeTruthy(); + }); + }); + + describe('data', () => { + it('should have default data', () => { + const vm = createComponent(); + expect(vm.isMakingRequest).toBeFalsy(); + }); + }); + + describe('methods', () => { + const mrObj = { + is_new_mr_data: true, + }; + + describe('removeWIP', () => { + it('should make a request to service and handle response', (done) => { + const vm = createComponent(); + + spyOn(window, 'Flash').and.returnValue(true); + spyOn(eventHub, '$emit'); + spyOn(vm.service, 'removeWIP').and.returnValue(new Promise((resolve) => { + resolve({ + json() { + return mrObj; + }, + }); + })); + + vm.removeWIP(); + setTimeout(() => { + expect(vm.isMakingRequest).toBeTruthy(); + expect(eventHub.$emit).toHaveBeenCalledWith('UpdateWidgetData', mrObj); + expect(window.Flash).toHaveBeenCalledWith('The merge request can now be merged.', 'notice'); + done(); + }, 333); + }); + }); + }); + + describe('template', () => { + let vm; + let el; + + beforeEach(() => { + vm = createComponent(); + el = vm.$el; + }); + + it('should have correct elements', () => { + expect(el.classList.contains('mr-widget-body')).toBeTruthy(); + expect(el.innerText).toContain('This merge request is currently Work In Progress and therefore unable to merge'); + expect(el.querySelector('button').getAttribute('disabled')).toBeTruthy(); + expect(el.querySelector('button').innerText).toContain('Merge'); + expect(el.querySelector('.js-remove-wip').innerText).toContain('Resolve WIP status'); + }); + + it('should not show removeWIP button is user cannot update MR', (done) => { + vm.mr.removeWIPPath = ''; + + Vue.nextTick(() => { + expect(el.querySelector('.js-remove-wip')).toEqual(null); + done(); + }); + }); + }); +}); diff --git a/spec/javascripts/vue_mr_widget/mock_data.js b/spec/javascripts/vue_mr_widget/mock_data.js new file mode 100644 index 00000000000..e6f96d5588b --- /dev/null +++ b/spec/javascripts/vue_mr_widget/mock_data.js @@ -0,0 +1,214 @@ +/* eslint-disable */ + +export default { + "id": 132, + "iid": 22, + "assignee_id": null, + "author_id": 1, + "description": "", + "lock_version": null, + "milestone_id": null, + "position": 0, + "state": "merged", + "title": "Update README.md", + "updated_by_id": null, + "created_at": "2017-04-07T12:27:26.718Z", + "updated_at": "2017-04-07T15:39:25.852Z", + "deleted_at": null, + "time_estimate": 0, + "total_time_spent": 0, + "human_time_estimate": null, + "human_total_time_spent": null, + "in_progress_merge_commit_sha": null, + "locked_at": null, + "merge_commit_sha": "53027d060246c8f47e4a9310fb332aa52f221775", + "merge_error": null, + "merge_params": { + "force_remove_source_branch": null + }, + "merge_status": "can_be_merged", + "merge_user_id": null, + "merge_when_pipeline_succeeds": false, + "source_branch": "daaaa", + "source_project_id": 19, + "target_branch": "master", + "target_project_id": 19, + "merge_event": { + "author": { + "name": "Administrator", + "username": "root", + "id": 1, + "state": "active", + "avatar_url": "http://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon", + "web_url": "http://localhost:3000/root" + }, + "updated_at": "2017-04-07T15:39:25.696Z" + }, + "closed_event": null, + "author": { + "name": "Administrator", + "username": "root", + "id": 1, + "state": "active", + "avatar_url": "http://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon", + "web_url": "http://localhost:3000/root" + }, + "merge_user": null, + "diff_head_sha": "104096c51715e12e7ae41f9333e9fa35b73f385d", + "diff_head_commit_short_id": "104096c5", + "merge_commit_message": "Merge branch 'daaaa' into 'master'\n\nUpdate README.md\n\nSee merge request !22", + "pipeline": { + "id": 172, + "user": { + "name": "Administrator", + "username": "root", + "id": 1, + "state": "active", + "avatar_url": "http://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon", + "web_url": "http://localhost:3000/root" + }, + "active": false, + "coverage": "92.16", + "path": "/root/acets-app/pipelines/172", + "details": { + "status": { + "icon": "icon_status_success", + "favicon": "favicon_status_success", + "text": "passed", + "label": "passed", + "group": "success", + "has_details": true, + "details_path": "/root/acets-app/pipelines/172" + }, + "duration": null, + "finished_at": "2017-04-07T14:00:14.256Z", + "stages": [ + { + "name": "build", + "title": "build: failed", + "status": { + "icon": "icon_status_failed", + "favicon": "favicon_status_failed", + "text": "failed", + "label": "failed", + "group": "failed", + "has_details": true, + "details_path": "/root/acets-app/pipelines/172#build" + }, + "path": "/root/acets-app/pipelines/172#build", + "dropdown_path": "/root/acets-app/pipelines/172/stage.json?stage=build" + }, + { + "name": "review", + "title": "review: skipped", + "status": { + "icon": "icon_status_skipped", + "favicon": "favicon_status_skipped", + "text": "skipped", + "label": "skipped", + "group": "skipped", + "has_details": true, + "details_path": "/root/acets-app/pipelines/172#review" + }, + "path": "/root/acets-app/pipelines/172#review", + "dropdown_path": "/root/acets-app/pipelines/172/stage.json?stage=review" + } + ], + "artifacts": [ + + ], + "manual_actions": [ + { + "name": "stop_review", + "path": "/root/acets-app/builds/1427/play", + "playable": false + } + ] + }, + "flags": { + "latest": false, + "triggered": false, + "stuck": false, + "yaml_errors": false, + "retryable": true, + "cancelable": false + }, + "ref": { + "name": "daaaa", + "path": "/root/acets-app/tree/daaaa", + "tag": false, + "branch": true + }, + "commit": { + "id": "104096c51715e12e7ae41f9333e9fa35b73f385d", + "short_id": "104096c5", + "title": "Update README.md", + "created_at": "2017-04-07T15:27:18.000+03:00", + "parent_ids": [ + "2396536178668d8930c29d904e53bd4d06228b32" + ], + "message": "Update README.md", + "author_name": "Administrator", + "author_email": "admin@example.com", + "authored_date": "2017-04-07T15:27:18.000+03:00", + "committer_name": "Administrator", + "committer_email": "admin@example.com", + "committed_date": "2017-04-07T15:27:18.000+03:00", + "author": { + "name": "Administrator", + "username": "root", + "id": 1, + "state": "active", + "avatar_url": "http://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon", + "web_url": "http://localhost:3000/root" + }, + "author_gravatar_url": "http://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon", + "commit_url": "http://localhost:3000/root/acets-app/commit/104096c51715e12e7ae41f9333e9fa35b73f385d", + "commit_path": "/root/acets-app/commit/104096c51715e12e7ae41f9333e9fa35b73f385d" + }, + "retry_path": "/root/acets-app/pipelines/172/retry", + "created_at": "2017-04-07T12:27:19.520Z", + "updated_at": "2017-04-07T15:28:44.800Z" + }, + "work_in_progress": false, + "source_branch_exists": false, + "mergeable_discussions_state": true, + "conflicts_can_be_resolved_in_ui": false, + "branch_missing": true, + "commits_count": 1, + "has_conflicts": false, + "can_be_merged": true, + "has_ci": true, + "ci_status": "success", + "pipeline_status_path": "/root/acets-app/merge_requests/22/pipeline_status", + "issues_links": { + "closing": "", + "mentioned_but_not_closing": "" + }, + "current_user": { + "can_resolve_conflicts": true, + "can_remove_source_branch": false, + "can_revert_on_current_merge_request": true, + "can_cherry_pick_on_current_merge_request": true + }, + "target_branch_path": "/root/acets-app/branches/master", + "source_branch_path": "/root/acets-app/branches/daaaa", + "conflict_resolution_ui_path": "/root/acets-app/merge_requests/22/conflicts", + "remove_wip_path": "/root/acets-app/merge_requests/22/remove_wip", + "cancel_merge_when_pipeline_succeeds_path": "/root/acets-app/merge_requests/22/cancel_merge_when_pipeline_succeeds", + "create_issue_to_resolve_discussions_path": "/root/acets-app/issues/new?merge_request_to_resolve_discussions_of=22", + "merge_path": "/root/acets-app/merge_requests/22/merge", + "cherry_pick_in_fork_path": "/root/acets-app/forks?continue%5Bnotice%5D=You%27re+not+allowed+to+make+changes+to+this+project+directly.+A+fork+of+this+project+has+been+created+that+you+can+make+changes+in%2C+so+you+can+submit+a+merge+request.+Try+to+revert+this+commit+again.&continue%5Bnotice_now%5D=You%27re+not+allowed+to+make+changes+to+this+project+directly.+A+fork+of+this+project+is+being+created+that+you+can+make+changes+in%2C+so+you+can+submit+a+merge+request.&continue%5Bto%5D=%2Froot%2Facets-app%2Fmerge_requests%2F22&namespace_key=1", + "revert_in_fork_path": "/root/acets-app/forks?continue%5Bnotice%5D=You%27re+not+allowed+to+make+changes+to+this+project+directly.+A+fork+of+this+project+has+been+created+that+you+can+make+changes+in%2C+so+you+can+submit+a+merge+request.+Try+to+cherry-pick+this+commit+again.&continue%5Bnotice_now%5D=You%27re+not+allowed+to+make+changes+to+this+project+directly.+A+fork+of+this+project+is+being+created+that+you+can+make+changes+in%2C+so+you+can+submit+a+merge+request.&continue%5Bto%5D=%2Froot%2Facets-app%2Fmerge_requests%2F22&namespace_key=1", + "email_patches_path": "/root/acets-app/merge_requests/22.patch", + "plain_diff_path": "/root/acets-app/merge_requests/22.diff", + "ci_status_path": "/root/acets-app/merge_requests/22/ci_status", + "status_path": "/root/acets-app/merge_requests/22.json", + "merge_check_path": "/root/acets-app/merge_requests/22/merge_check", + "ci_environments_status_url": "/root/acets-app/merge_requests/22/ci_environments_status", + "project_archived": false, + "merge_commit_message_with_description": "Merge branch 'daaaa' into 'master'\n\nUpdate README.md\n\nSee merge request !22", + "diverged_commits_count": 0, + "only_allow_merge_if_pipeline_succeeds": false, + "commit_change_content_path": "/root/acets-app/merge_requests/22/commit_change_content" +} diff --git a/spec/javascripts/vue_mr_widget/mr_widget_options_spec.js b/spec/javascripts/vue_mr_widget/mr_widget_options_spec.js new file mode 100644 index 00000000000..bdc18243a15 --- /dev/null +++ b/spec/javascripts/vue_mr_widget/mr_widget_options_spec.js @@ -0,0 +1,324 @@ +import Vue from 'vue'; +import MRWidgetService from '~/vue_merge_request_widget/services/mr_widget_service'; +import mrWidgetOptions from '~/vue_merge_request_widget/mr_widget_options'; +import eventHub from '~/vue_merge_request_widget/event_hub'; +import mockData from './mock_data'; + +const createComponent = () => { + delete mrWidgetOptions.el; // Prevent component mounting + gl.mrWidgetData = mockData; + const Component = Vue.extend(mrWidgetOptions); + return new Component(); +}; + +const returnPromise = data => new Promise((resolve) => { + resolve({ + json() { + return data; + }, + body: data, + }); +}); + +describe('mrWidgetOptions', () => { + let vm; + + beforeEach(() => { + vm = createComponent(); + }); + + describe('data', () => { + it('should instantiate Store and Service', () => { + expect(vm.mr).toBeDefined(); + expect(vm.service).toBeDefined(); + }); + }); + + describe('computed', () => { + describe('componentName', () => { + it('should return merged component', () => { + expect(vm.componentName).toEqual('mr-widget-merged'); + }); + + it('should return conflicts component', () => { + vm.mr.state = 'conflicts'; + expect(vm.componentName).toEqual('mr-widget-conflicts'); + }); + }); + + describe('shouldRenderMergeHelp', () => { + it('should return false for the initial merged state', () => { + expect(vm.shouldRenderMergeHelp).toBeFalsy(); + }); + + it('should return true for a state which requires help widget', () => { + vm.mr.state = 'conflicts'; + expect(vm.shouldRenderMergeHelp).toBeTruthy(); + }); + }); + + describe('shouldRenderPipelines', () => { + it('should return true for the initial data', () => { + expect(vm.shouldRenderPipelines).toBeTruthy(); + }); + + it('should return true when pipeline is empty but MR.hasCI is set to true', () => { + vm.mr.pipeline = {}; + expect(vm.shouldRenderPipelines).toBeTruthy(); + }); + + it('should return true when pipeline available', () => { + vm.mr.hasCI = false; + expect(vm.shouldRenderPipelines).toBeTruthy(); + }); + + it('should return false when there is no pipeline', () => { + vm.mr.pipeline = {}; + vm.mr.hasCI = false; + expect(vm.shouldRenderPipelines).toBeFalsy(); + }); + }); + + describe('shouldRenderRelatedLinks', () => { + it('should return false for the initial data', () => { + expect(vm.shouldRenderRelatedLinks).toBeFalsy(); + }); + + it('should return true if there is relatedLinks in MR', () => { + vm.mr.relatedLinks = {}; + expect(vm.shouldRenderRelatedLinks).toBeTruthy(); + }); + }); + + describe('shouldRenderDeployments', () => { + it('should return false for the initial data', () => { + expect(vm.shouldRenderDeployments).toBeFalsy(); + }); + + it('should return true if there is deployments', () => { + vm.mr.deployments.push({}, {}); + expect(vm.shouldRenderDeployments).toBeTruthy(); + }); + }); + }); + + describe('methods', () => { + describe('checkStatus', () => { + it('should tell service to check status', (done) => { + spyOn(vm.service, 'checkStatus').and.returnValue(returnPromise(mockData)); + spyOn(vm.mr, 'setData'); + let isCbExecuted = false; + const cb = () => { + isCbExecuted = true; + }; + + vm.checkStatus(cb); + + setTimeout(() => { + expect(vm.service.checkStatus).toHaveBeenCalled(); + expect(vm.mr.setData).toHaveBeenCalled(); + expect(isCbExecuted).toBeTruthy(); + done(); + }, 333); + }); + }); + + describe('initPolling', () => { + it('should call SmartInterval', () => { + spyOn(gl, 'SmartInterval').and.returnValue({ + resume() {}, + stopTimer() {}, + }); + vm.initPolling(); + + expect(vm.pollingInterval).toBeDefined(); + expect(gl.SmartInterval).toHaveBeenCalled(); + }); + }); + + describe('initDeploymentsPolling', () => { + it('should call SmartInterval', () => { + spyOn(gl, 'SmartInterval'); + vm.initDeploymentsPolling(); + + expect(vm.deploymentsInterval).toBeDefined(); + expect(gl.SmartInterval).toHaveBeenCalled(); + }); + }); + + describe('fetchDeployments', () => { + it('should fetch deployments', (done) => { + spyOn(vm.service, 'fetchDeployments').and.returnValue(returnPromise([{ deployment: 1 }])); + + vm.fetchDeployments(); + + setTimeout(() => { + expect(vm.service.fetchDeployments).toHaveBeenCalled(); + expect(vm.mr.deployments.length).toEqual(1); + expect(vm.mr.deployments[0].deployment).toEqual(1); + done(); + }, 333); + }); + }); + + describe('fetchActionsContent', () => { + it('should fetch content of Cherry Pick and Revert modals', (done) => { + spyOn(vm.service, 'fetchMergeActionsContent').and.returnValue(returnPromise('hello world')); + + vm.fetchActionsContent(); + + setTimeout(() => { + expect(vm.service.fetchMergeActionsContent).toHaveBeenCalled(); + expect(document.body.textContent).toContain('hello world'); + done(); + }, 333); + }); + }); + + describe('bindEventHubListeners', () => { + it('should bind eventHub listeners', () => { + spyOn(vm, 'checkStatus').and.returnValue(() => {}); + spyOn(vm.service, 'checkStatus').and.returnValue(returnPromise(mockData)); + spyOn(vm, 'fetchActionsContent'); + spyOn(vm.mr, 'setData'); + spyOn(vm, 'resumePolling'); + spyOn(vm, 'stopPolling'); + spyOn(eventHub, '$on'); + + vm.bindEventHubListeners(); + + eventHub.$emit('SetBranchRemoveFlag', ['flag']); + expect(vm.mr.isRemovingSourceBranch).toEqual('flag'); + + eventHub.$emit('FailedToMerge'); + expect(vm.mr.state).toEqual('failedToMerge'); + + eventHub.$emit('UpdateWidgetData', mockData); + expect(vm.mr.setData).toHaveBeenCalledWith(mockData); + + eventHub.$emit('EnablePolling'); + expect(vm.resumePolling).toHaveBeenCalled(); + + eventHub.$emit('DisablePolling'); + expect(vm.stopPolling).toHaveBeenCalled(); + + const listenersWithServiceRequest = { + MRWidgetUpdateRequested: true, + FetchActionsContent: true, + }; + + const allArgs = eventHub.$on.calls.allArgs(); + allArgs.forEach((params) => { + const eventName = params[0]; + const callback = params[1]; + + if (listenersWithServiceRequest[eventName]) { + listenersWithServiceRequest[eventName] = callback; + } + }); + + listenersWithServiceRequest.MRWidgetUpdateRequested(); + expect(vm.checkStatus).toHaveBeenCalled(); + + listenersWithServiceRequest.FetchActionsContent(); + expect(vm.fetchActionsContent).toHaveBeenCalled(); + }); + }); + + describe('handleMounted', () => { + it('should call required methods to do the initial kick-off', () => { + spyOn(vm, 'initDeploymentsPolling'); + spyOn(vm, 'setFavicon'); + + vm.handleMounted(); + + expect(vm.setFavicon).toHaveBeenCalled(); + expect(vm.initDeploymentsPolling).toHaveBeenCalled(); + }); + }); + + describe('setFavicon', () => { + it('should call setFavicon method', () => { + spyOn(gl.utils, 'setFavicon'); + vm.setFavicon(); + + expect(gl.utils.setFavicon).toHaveBeenCalledWith(vm.mr.ciStatusFaviconPath); + }); + + it('should not call setFavicon when there is no ciStatusFaviconPath', () => { + spyOn(gl.utils, 'setFavicon'); + vm.mr.ciStatusFaviconPath = null; + vm.setFavicon(); + + expect(gl.utils.setFavicon).not.toHaveBeenCalled(); + }); + }); + + describe('resumePolling', () => { + it('should call stopTimer on pollingInterval', () => { + spyOn(vm.pollingInterval, 'resume'); + + vm.resumePolling(); + expect(vm.pollingInterval.resume).toHaveBeenCalled(); + }); + }); + + describe('stopPolling', () => { + it('should call stopTimer on pollingInterval', () => { + spyOn(vm.pollingInterval, 'stopTimer'); + + vm.stopPolling(); + expect(vm.pollingInterval.stopTimer).toHaveBeenCalled(); + }); + }); + + describe('createService', () => { + it('should instantiate a Service', () => { + const endpoints = { + mergePath: '/nice/path', + mergeCheckPath: '/nice/path', + cancelAutoMergePath: '/nice/path', + removeWIPPath: '/nice/path', + sourceBranchPath: '/nice/path', + ciEnvironmentsStatusPath: '/nice/path', + statusPath: '/nice/path', + mergeActionsContentPath: '/nice/path', + }; + + const serviceInstance = vm.createService(endpoints); + const isInstanceOfMRService = serviceInstance instanceof MRWidgetService; + expect(isInstanceOfMRService).toBe(true); + Object.keys(serviceInstance).forEach((key) => { + expect(serviceInstance[key]).toBeDefined(); + }); + }); + }); + }); + + describe('components', () => { + it('should register all components', () => { + const comps = mrWidgetOptions.components; + expect(comps['mr-widget-header']).toBeDefined(); + expect(comps['mr-widget-merge-help']).toBeDefined(); + expect(comps['mr-widget-pipeline']).toBeDefined(); + expect(comps['mr-widget-deployment']).toBeDefined(); + expect(comps['mr-widget-related-links']).toBeDefined(); + expect(comps['mr-widget-merged']).toBeDefined(); + expect(comps['mr-widget-closed']).toBeDefined(); + expect(comps['mr-widget-locked']).toBeDefined(); + expect(comps['mr-widget-failed-to-merge']).toBeDefined(); + expect(comps['mr-widget-wip']).toBeDefined(); + expect(comps['mr-widget-archived']).toBeDefined(); + expect(comps['mr-widget-conflicts']).toBeDefined(); + expect(comps['mr-widget-nothing-to-merge']).toBeDefined(); + expect(comps['mr-widget-not-allowed']).toBeDefined(); + expect(comps['mr-widget-missing-branch']).toBeDefined(); + expect(comps['mr-widget-ready-to-merge']).toBeDefined(); + expect(comps['mr-widget-checking']).toBeDefined(); + expect(comps['mr-widget-unresolved-discussions']).toBeDefined(); + expect(comps['mr-widget-pipeline-blocked']).toBeDefined(); + expect(comps['mr-widget-pipeline-failed']).toBeDefined(); + expect(comps['mr-widget-merge-when-pipeline-succeeds']).toBeDefined(); + }); + }); +}); diff --git a/spec/javascripts/vue_mr_widget/services/mr_widget_service_spec.js b/spec/javascripts/vue_mr_widget/services/mr_widget_service_spec.js new file mode 100644 index 00000000000..b63633c03b8 --- /dev/null +++ b/spec/javascripts/vue_mr_widget/services/mr_widget_service_spec.js @@ -0,0 +1,46 @@ +import Vue from 'vue'; +import VueResource from 'vue-resource'; +import MRWidgetService from '~/vue_merge_request_widget/services/mr_widget_service'; + +Vue.use(VueResource); + +describe('MRWidgetService', () => { + const mr = { + mergePath: './', + mergeCheckPath: './', + cancelAutoMergePath: './', + removeWIPPath: './', + sourceBranchPath: './', + ciEnvironmentsStatusPath: './', + statusPath: './', + mergeActionsContentPath: './', + isServiceStore: true, + }; + + it('should have store and resources created in constructor', () => { + const service = new MRWidgetService(mr); + + expect(service.mergeResource).toBeDefined(); + expect(service.mergeCheckResource).toBeDefined(); + expect(service.cancelAutoMergeResource).toBeDefined(); + expect(service.removeWIPResource).toBeDefined(); + expect(service.removeSourceBranchResource).toBeDefined(); + expect(service.deploymentsResource).toBeDefined(); + expect(service.pollResource).toBeDefined(); + expect(service.mergeActionsContentResource).toBeDefined(); + }); + + it('should have methods defined', () => { + const service = new MRWidgetService(mr); + + expect(service.merge()).toBeDefined(); + expect(service.cancelAutomaticMerge()).toBeDefined(); + expect(service.removeWIP()).toBeDefined(); + expect(service.removeSourceBranch()).toBeDefined(); + expect(service.fetchDeployments()).toBeDefined(); + expect(service.poll()).toBeDefined(); + expect(service.checkStatus()).toBeDefined(); + expect(service.fetchMergeActionsContent()).toBeDefined(); + expect(MRWidgetService.stopEnvironment()).toBeDefined(); + }); +}); diff --git a/spec/javascripts/vue_mr_widget/stores/get_state_key_spec.js b/spec/javascripts/vue_mr_widget/stores/get_state_key_spec.js new file mode 100644 index 00000000000..9a331d99865 --- /dev/null +++ b/spec/javascripts/vue_mr_widget/stores/get_state_key_spec.js @@ -0,0 +1,65 @@ +import getStateKey from '~/vue_merge_request_widget/stores/get_state_key'; + +describe('getStateKey', () => { + it('should return proper state name', () => { + const context = { + mergeStatus: 'checked', + mergeWhenPipelineSucceeds: false, + canMerge: true, + onlyAllowMergeIfPipelineSucceeds: false, + isPipelineFailed: false, + hasMergeableDiscussionsState: false, + isPipelineBlocked: false, + canBeMerged: false, + }; + const data = { + project_archived: false, + branch_missing: false, + commits_count: 2, + has_conflicts: false, + work_in_progress: false, + }; + const bound = getStateKey.bind(context, data); + expect(bound()).toEqual(null); + + context.canBeMerged = true; + expect(bound()).toEqual('readyToMerge'); + + context.hasSHAChanged = true; + expect(bound()).toEqual('shaMismatch'); + + context.isPipelineBlocked = true; + expect(bound()).toEqual('pipelineBlocked'); + + context.hasMergeableDiscussionsState = true; + expect(bound()).toEqual('unresolvedDiscussions'); + + context.onlyAllowMergeIfPipelineSucceeds = true; + context.isPipelineFailed = true; + expect(bound()).toEqual('pipelineFailed'); + + context.canMerge = false; + expect(bound()).toEqual('notAllowedToMerge'); + + context.mergeWhenPipelineSucceeds = true; + expect(bound()).toEqual('mergeWhenPipelineSucceeds'); + + data.work_in_progress = true; + expect(bound()).toEqual('workInProgress'); + + data.has_conflicts = true; + expect(bound()).toEqual('conflicts'); + + context.mergeStatus = 'unchecked'; + expect(bound()).toEqual('checking'); + + data.commits_count = 0; + expect(bound()).toEqual('nothingToMerge'); + + data.branch_missing = true; + expect(bound()).toEqual('missingBranch'); + + data.project_archived = true; + expect(bound()).toEqual('archived'); + }); +}); diff --git a/spec/javascripts/vue_mr_widget/stores/mr_widget_store_spec.js b/spec/javascripts/vue_mr_widget/stores/mr_widget_store_spec.js new file mode 100644 index 00000000000..56dd0198ae2 --- /dev/null +++ b/spec/javascripts/vue_mr_widget/stores/mr_widget_store_spec.js @@ -0,0 +1,22 @@ +import MergeRequestStore from '~/vue_merge_request_widget/stores/mr_widget_store'; +import mockData from '../mock_data'; + +describe('MergeRequestStore', () => { + describe('setData', () => { + let store; + + beforeEach(() => { + store = new MergeRequestStore(mockData); + }); + + it('should set hasSHAChanged when the diff SHA changes', () => { + store.setData({ ...mockData, diff_head_sha: 'a-different-string' }); + expect(store.hasSHAChanged).toBe(true); + }); + + it('should not set hasSHAChanged when other data changes', () => { + store.setData({ ...mockData, work_in_progress: !mockData.work_in_progress }); + expect(store.hasSHAChanged).toBe(false); + }); + }); +}); diff --git a/spec/javascripts/vue_pipelines_index/mock_data.js b/spec/javascripts/vue_pipelines_index/mock_data.js deleted file mode 100644 index 2365a662b9f..00000000000 --- a/spec/javascripts/vue_pipelines_index/mock_data.js +++ /dev/null @@ -1,107 +0,0 @@ -export default { - pipelines: [{ - id: 115, - user: { - name: 'Root', - username: 'root', - id: 1, - state: 'active', - avatar_url: 'http://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80\u0026d=identicon', - web_url: 'http://localhost:3000/root', - }, - path: '/root/review-app/pipelines/115', - details: { - status: { - icon: 'icon_status_failed', - text: 'failed', - label: 'failed', - group: 'failed', - has_details: true, - details_path: '/root/review-app/pipelines/115', - }, - duration: null, - finished_at: '2017-03-17T19:00:15.996Z', - stages: [{ - name: 'build', - title: 'build: failed', - status: { - icon: 'icon_status_failed', - text: 'failed', - label: 'failed', - group: 'failed', - has_details: true, - details_path: '/root/review-app/pipelines/115#build', - }, - path: '/root/review-app/pipelines/115#build', - dropdown_path: '/root/review-app/pipelines/115/stage.json?stage=build', - }, - { - name: 'review', - title: 'review: skipped', - status: { - icon: 'icon_status_skipped', - text: 'skipped', - label: 'skipped', - group: 'skipped', - has_details: true, - details_path: '/root/review-app/pipelines/115#review', - }, - path: '/root/review-app/pipelines/115#review', - dropdown_path: '/root/review-app/pipelines/115/stage.json?stage=review', - }], - artifacts: [], - manual_actions: [{ - name: 'stop_review', - path: '/root/review-app/builds/3766/play', - }], - }, - flags: { - latest: true, - triggered: false, - stuck: false, - yaml_errors: false, - retryable: true, - cancelable: false, - }, - ref: { - name: 'thisisabranch', - path: '/root/review-app/tree/thisisabranch', - tag: false, - branch: true, - }, - commit: { - id: '9e87f87625b26c42c59a2ee0398f81d20cdfe600', - short_id: '9e87f876', - title: 'Update README.md', - created_at: '2017-03-15T22:58:28.000+00:00', - parent_ids: ['3744f9226e699faec2662a8b267e5d3fd0bfff0e'], - message: 'Update README.md', - author_name: 'Root', - author_email: 'admin@example.com', - authored_date: '2017-03-15T22:58:28.000+00:00', - committer_name: 'Root', - committer_email: 'admin@example.com', - committed_date: '2017-03-15T22:58:28.000+00:00', - author: { - name: 'Root', - username: 'root', - id: 1, - state: 'active', - avatar_url: 'http://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80\u0026d=identicon', - web_url: 'http://localhost:3000/root', - }, - author_gravatar_url: 'http://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80\u0026d=identicon', - commit_url: 'http://localhost:3000/root/review-app/commit/9e87f87625b26c42c59a2ee0398f81d20cdfe600', - commit_path: '/root/review-app/commit/9e87f87625b26c42c59a2ee0398f81d20cdfe600', - }, - retry_path: '/root/review-app/pipelines/115/retry', - created_at: '2017-03-15T22:58:33.436Z', - updated_at: '2017-03-17T19:00:15.997Z', - }], - count: { - all: 52, - running: 0, - pending: 0, - finished: 52, - }, -}; diff --git a/spec/javascripts/vue_shared/ci_action_icons_spec.js b/spec/javascripts/vue_shared/ci_action_icons_spec.js new file mode 100644 index 00000000000..3d53a5ab24d --- /dev/null +++ b/spec/javascripts/vue_shared/ci_action_icons_spec.js @@ -0,0 +1,27 @@ +import getActionIcon from '~/vue_shared/ci_action_icons'; +import cancelSVG from 'icons/_icon_action_cancel.svg'; +import retrySVG from 'icons/_icon_action_retry.svg'; +import playSVG from 'icons/_icon_action_play.svg'; +import stopSVG from 'icons/_icon_action_stop.svg'; + +describe('getActionIcon', () => { + it('should return an empty string', () => { + expect(getActionIcon()).toEqual(''); + }); + + it('should return cancel svg', () => { + expect(getActionIcon('icon_action_cancel')).toEqual(cancelSVG); + }); + + it('should return retry svg', () => { + expect(getActionIcon('icon_action_retry')).toEqual(retrySVG); + }); + + it('should return play svg', () => { + expect(getActionIcon('icon_action_play')).toEqual(playSVG); + }); + + it('should render stop svg', () => { + expect(getActionIcon('icon_action_stop')).toEqual(stopSVG); + }); +}); diff --git a/spec/javascripts/vue_shared/ci_status_icon_spec.js b/spec/javascripts/vue_shared/ci_status_icon_spec.js new file mode 100644 index 00000000000..b6621d6054d --- /dev/null +++ b/spec/javascripts/vue_shared/ci_status_icon_spec.js @@ -0,0 +1,27 @@ +import { borderlessStatusIconEntityMap, statusIconEntityMap } from '~/vue_shared/ci_status_icons'; + +describe('CI status icons', () => { + const statuses = [ + 'icon_status_canceled', + 'icon_status_created', + 'icon_status_failed', + 'icon_status_manual', + 'icon_status_pending', + 'icon_status_running', + 'icon_status_skipped', + 'icon_status_success', + 'icon_status_warning', + ]; + + it('should have a dictionary for borderless icons', () => { + statuses.forEach((status) => { + expect(borderlessStatusIconEntityMap[status]).toBeDefined(); + }); + }); + + it('should have a dictionary for icons', () => { + statuses.forEach((status) => { + expect(statusIconEntityMap[status]).toBeDefined(); + }); + }); +}); diff --git a/spec/javascripts/vue_shared/components/ci_badge_link_spec.js b/spec/javascripts/vue_shared/components/ci_badge_link_spec.js new file mode 100644 index 00000000000..daed4da3e15 --- /dev/null +++ b/spec/javascripts/vue_shared/components/ci_badge_link_spec.js @@ -0,0 +1,89 @@ +import Vue from 'vue'; +import ciBadge from '~/vue_shared/components/ci_badge_link.vue'; + +describe('CI Badge Link Component', () => { + let CIBadge; + + const statuses = { + canceled: { + text: 'canceled', + label: 'canceled', + group: 'canceled', + icon: 'icon_status_canceled', + details_path: 'status/canceled', + }, + created: { + text: 'created', + label: 'created', + group: 'created', + icon: 'icon_status_created', + details_path: 'status/created', + }, + failed: { + text: 'failed', + label: 'failed', + group: 'failed', + icon: 'icon_status_failed', + details_path: 'status/failed', + }, + manual: { + text: 'manual', + label: 'manual action', + group: 'manual', + icon: 'icon_status_manual', + details_path: 'status/manual', + }, + pending: { + text: 'pending', + label: 'pending', + group: 'pending', + icon: 'icon_status_pending', + details_path: 'status/pending', + }, + running: { + text: 'running', + label: 'running', + group: 'running', + icon: 'icon_status_running', + details_path: 'status/running', + }, + skipped: { + text: 'skipped', + label: 'skipped', + group: 'skipped', + icon: 'icon_status_skipped', + details_path: 'status/skipped', + }, + success_warining: { + text: 'passed', + label: 'passed', + group: 'success_with_warnings', + icon: 'icon_status_warning', + details_path: 'status/warning', + }, + success: { + text: 'passed', + label: 'passed', + group: 'passed', + icon: 'icon_status_success', + details_path: 'status/passed', + }, + }; + + it('should render each status badge', () => { + CIBadge = Vue.extend(ciBadge); + Object.keys(statuses).map((status) => { + const vm = new CIBadge({ + propsData: { + status: statuses[status], + }, + }).$mount(); + + expect(vm.$el.getAttribute('href')).toEqual(statuses[status].details_path); + expect(vm.$el.textContent.trim()).toEqual(statuses[status].text); + expect(vm.$el.getAttribute('class')).toEqual(`ci-status ci-${statuses[status].group}`); + expect(vm.$el.querySelector('svg')).toBeDefined(); + return vm; + }); + }); +}); diff --git a/spec/javascripts/vue_shared/components/ci_icon_spec.js b/spec/javascripts/vue_shared/components/ci_icon_spec.js new file mode 100644 index 00000000000..d8664408595 --- /dev/null +++ b/spec/javascripts/vue_shared/components/ci_icon_spec.js @@ -0,0 +1,139 @@ +import Vue from 'vue'; +import ciIcon from '~/vue_shared/components/ci_icon.vue'; + +describe('CI Icon component', () => { + let CiIcon; + beforeEach(() => { + CiIcon = Vue.extend(ciIcon); + }); + + it('should render a span element with an svg', () => { + const component = new CiIcon({ + propsData: { + status: { + icon: 'icon_status_success', + }, + }, + }).$mount(); + + expect(component.$el.tagName).toEqual('SPAN'); + expect(component.$el.querySelector('span > svg')).toBeDefined(); + }); + + it('should render a success status', () => { + const component = new CiIcon({ + propsData: { + status: { + icon: 'icon_status_success', + group: 'success', + }, + }, + }).$mount(); + + expect(component.$el.classList.contains('ci-status-icon-success')).toEqual(true); + }); + + it('should render a failed status', () => { + const component = new CiIcon({ + propsData: { + status: { + icon: 'icon_status_failed', + group: 'failed', + }, + }, + }).$mount(); + + expect(component.$el.classList.contains('ci-status-icon-failed')).toEqual(true); + }); + + it('should render success with warnings status', () => { + const component = new CiIcon({ + propsData: { + status: { + icon: 'icon_status_warning', + group: 'warning', + }, + }, + }).$mount(); + + expect(component.$el.classList.contains('ci-status-icon-warning')).toEqual(true); + }); + + it('should render pending status', () => { + const component = new CiIcon({ + propsData: { + status: { + icon: 'icon_status_pending', + group: 'pending', + }, + }, + }).$mount(); + + expect(component.$el.classList.contains('ci-status-icon-pending')).toEqual(true); + }); + + it('should render running status', () => { + const component = new CiIcon({ + propsData: { + status: { + icon: 'icon_status_running', + group: 'running', + }, + }, + }).$mount(); + + expect(component.$el.classList.contains('ci-status-icon-running')).toEqual(true); + }); + + it('should render created status', () => { + const component = new CiIcon({ + propsData: { + status: { + icon: 'icon_status_created', + group: 'created', + }, + }, + }).$mount(); + + expect(component.$el.classList.contains('ci-status-icon-created')).toEqual(true); + }); + + it('should render skipped status', () => { + const component = new CiIcon({ + propsData: { + status: { + icon: 'icon_status_skipped', + group: 'skipped', + }, + }, + }).$mount(); + + expect(component.$el.classList.contains('ci-status-icon-skipped')).toEqual(true); + }); + + it('should render canceled status', () => { + const component = new CiIcon({ + propsData: { + status: { + icon: 'icon_status_canceled', + group: 'canceled', + }, + }, + }).$mount(); + + expect(component.$el.classList.contains('ci-status-icon-canceled')).toEqual(true); + }); + + it('should render status for manual action', () => { + const component = new CiIcon({ + propsData: { + status: { + icon: 'icon_status_manual', + group: 'manual', + }, + }, + }).$mount(); + + expect(component.$el.classList.contains('ci-status-icon-manual')).toEqual(true); + }); +}); diff --git a/spec/javascripts/vue_shared/components/commit_spec.js b/spec/javascripts/vue_shared/components/commit_spec.js index df547299d75..0638483e7aa 100644 --- a/spec/javascripts/vue_shared/components/commit_spec.js +++ b/spec/javascripts/vue_shared/components/commit_spec.js @@ -61,16 +61,16 @@ describe('Commit component', () => { }); it('should render a link to the ref url', () => { - expect(component.$el.querySelector('.branch-name').getAttribute('href')).toEqual(props.commitRef.ref_url); + expect(component.$el.querySelector('.ref-name').getAttribute('href')).toEqual(props.commitRef.ref_url); }); it('should render the ref name', () => { - expect(component.$el.querySelector('.branch-name').textContent).toContain(props.commitRef.name); + expect(component.$el.querySelector('.ref-name').textContent).toContain(props.commitRef.name); }); it('should render the commit short sha with a link to the commit url', () => { - expect(component.$el.querySelector('.commit-id').getAttribute('href')).toEqual(props.commitUrl); - expect(component.$el.querySelector('.commit-id').textContent).toContain(props.shortSha); + expect(component.$el.querySelector('.commit-sha').getAttribute('href')).toEqual(props.commitUrl); + expect(component.$el.querySelector('.commit-sha').textContent).toContain(props.shortSha); }); it('should render the given commitIconSvg', () => { @@ -86,7 +86,7 @@ describe('Commit component', () => { it('Should render the author avatar with title and alt attributes', () => { expect( - component.$el.querySelector('.commit-title .avatar-image-container img').getAttribute('title'), + component.$el.querySelector('.commit-title .avatar-image-container img').getAttribute('data-original-title'), ).toContain(props.author.username); expect( component.$el.querySelector('.commit-title .avatar-image-container img').getAttribute('alt'), diff --git a/spec/javascripts/vue_shared/components/loading_icon_spec.js b/spec/javascripts/vue_shared/components/loading_icon_spec.js new file mode 100644 index 00000000000..1baf3537741 --- /dev/null +++ b/spec/javascripts/vue_shared/components/loading_icon_spec.js @@ -0,0 +1,53 @@ +import Vue from 'vue'; +import loadingIcon from '~/vue_shared/components/loading_icon.vue'; + +describe('Loading Icon Component', () => { + let LoadingIconComponent; + + beforeEach(() => { + LoadingIconComponent = Vue.extend(loadingIcon); + }); + + it('should render a spinner font awesome icon', () => { + const component = new LoadingIconComponent().$mount(); + + expect( + component.$el.querySelector('i').getAttribute('class'), + ).toEqual('fa fa-spin fa-spinner fa-1x'); + + expect(component.$el.tagName).toEqual('DIV'); + expect(component.$el.classList.contains('text-center')).toEqual(true); + }); + + it('should render accessibility attributes', () => { + const component = new LoadingIconComponent().$mount(); + + const icon = component.$el.querySelector('i'); + expect(icon.getAttribute('aria-hidden')).toEqual('true'); + expect(icon.getAttribute('aria-label')).toEqual('Loading'); + }); + + it('should render the provided label', () => { + const component = new LoadingIconComponent({ + propsData: { + label: 'This is a loading icon', + }, + }).$mount(); + + expect( + component.$el.querySelector('i').getAttribute('aria-label'), + ).toEqual('This is a loading icon'); + }); + + it('should render the provided size', () => { + const component = new LoadingIconComponent({ + propsData: { + size: '2', + }, + }).$mount(); + + expect( + component.$el.querySelector('i').classList.contains('fa-2x'), + ).toEqual(true); + }); +}); diff --git a/spec/javascripts/vue_shared/components/memory_graph_spec.js b/spec/javascripts/vue_shared/components/memory_graph_spec.js new file mode 100644 index 00000000000..d46a3f2328e --- /dev/null +++ b/spec/javascripts/vue_shared/components/memory_graph_spec.js @@ -0,0 +1,143 @@ +import Vue from 'vue'; +import memoryGraphComponent from '~/vue_shared/components/memory_graph'; +import { mockMetrics, mockMedian, mockMedianIndex } from './mock_data'; + +const defaultHeight = '25'; +const defaultWidth = '100'; + +const createComponent = () => { + const Component = Vue.extend(memoryGraphComponent); + + return new Component({ + el: document.createElement('div'), + propsData: { + metrics: [], + deploymentTime: 0, + width: '', + height: '', + pathD: '', + pathViewBox: '', + dotX: '', + dotY: '', + }, + }); +}; + +describe('MemoryGraph', () => { + let vm; + let el; + + beforeEach(() => { + vm = createComponent(); + el = vm.$el; + }); + + describe('props', () => { + it('should have props with defaults', (done) => { + const { metrics, deploymentTime, width, height } = memoryGraphComponent.props; + + Vue.nextTick(() => { + const typeClassMatcher = (propItem, expectedType) => { + const PropItemTypeClass = propItem.type; + expect(new PropItemTypeClass() instanceof expectedType).toBeTruthy(); + expect(propItem.required).toBeTruthy(); + }; + + typeClassMatcher(metrics, Array); + typeClassMatcher(deploymentTime, Number); + typeClassMatcher(width, String); + typeClassMatcher(height, String); + done(); + }); + }); + }); + + describe('data', () => { + it('should have default data', () => { + const data = memoryGraphComponent.data(); + const dataValidator = (dataItem, expectedType, defaultVal) => { + expect(typeof dataItem).toBe(expectedType); + expect(dataItem).toBe(defaultVal); + }; + + dataValidator(data.pathD, 'string', ''); + dataValidator(data.pathViewBox, 'string', ''); + dataValidator(data.dotX, 'string', ''); + dataValidator(data.dotY, 'string', ''); + }); + }); + + describe('computed', () => { + describe('getFormattedMedian', () => { + it('should show human readable median value based on provided median timestamp', () => { + vm.deploymentTime = mockMedian; + const formattedMedian = vm.getFormattedMedian; + expect(formattedMedian.indexOf('Deployed') > -1).toBeTruthy(); + expect(formattedMedian.indexOf('ago') > -1).toBeTruthy(); + }); + }); + }); + + describe('methods', () => { + describe('getMedianMetricIndex', () => { + it('should return index of closest metric timestamp to that of median', () => { + const matchingIndex = vm.getMedianMetricIndex(mockMedian, mockMetrics); + expect(matchingIndex).toBe(mockMedianIndex); + }); + }); + + describe('getGraphPlotValues', () => { + it('should return Object containing values to plot graph', () => { + const plotValues = vm.getGraphPlotValues(mockMedian, mockMetrics); + expect(plotValues.pathD).toBeDefined(); + expect(Array.isArray(plotValues.pathD)).toBeTruthy(); + + expect(plotValues.pathViewBox).toBeDefined(); + expect(typeof plotValues.pathViewBox).toBe('object'); + + expect(plotValues.dotX).toBeDefined(); + expect(typeof plotValues.dotX).toBe('number'); + + expect(plotValues.dotY).toBeDefined(); + expect(typeof plotValues.dotY).toBe('number'); + }); + }); + }); + + describe('template', () => { + it('should render template elements correctly', () => { + expect(el.classList.contains('memory-graph-container')).toBeTruthy(); + expect(el.querySelector('svg')).toBeDefined(); + }); + + it('should render graph when renderGraph is called internally', (done) => { + const { pathD, pathViewBox, dotX, dotY } = vm.getGraphPlotValues(mockMedian, mockMetrics); + vm.height = defaultHeight; + vm.width = defaultWidth; + vm.pathD = `M ${pathD}`; + vm.pathViewBox = `0 0 ${pathViewBox.lineWidth} ${pathViewBox.diff}`; + vm.dotX = dotX; + vm.dotY = dotY; + + Vue.nextTick(() => { + const svgEl = el.querySelector('svg'); + expect(svgEl).toBeDefined(); + expect(svgEl.getAttribute('height')).toBe(defaultHeight); + expect(svgEl.getAttribute('width')).toBe(defaultWidth); + + const pathEl = el.querySelector('path'); + expect(pathEl).toBeDefined(); + expect(pathEl.getAttribute('d')).toBe(`M ${pathD}`); + expect(pathEl.getAttribute('viewBox')).toBe(`0 0 ${pathViewBox.lineWidth} ${pathViewBox.diff}`); + + const circleEl = el.querySelector('circle'); + expect(circleEl).toBeDefined(); + expect(circleEl.getAttribute('r')).toBe('1.5'); + expect(circleEl.getAttribute('tranform')).toBe('translate(0 -1)'); + expect(circleEl.getAttribute('cx')).toBe(`${dotX}`); + expect(circleEl.getAttribute('cy')).toBe(`${dotY}`); + done(); + }); + }); + }); +}); diff --git a/spec/javascripts/vue_shared/components/mock_data.js b/spec/javascripts/vue_shared/components/mock_data.js new file mode 100644 index 00000000000..0d781bdca74 --- /dev/null +++ b/spec/javascripts/vue_shared/components/mock_data.js @@ -0,0 +1,69 @@ +/* eslint-disable */ + +export const mockMetrics = [ + [1493716685, '4.30859375'], + [1493716745, '4.30859375'], + [1493716805, '4.30859375'], + [1493716865, '4.30859375'], + [1493716925, '4.30859375'], + [1493716985, '4.30859375'], + [1493717045, '4.30859375'], + [1493717105, '4.30859375'], + [1493717165, '4.30859375'], + [1493717225, '4.30859375'], + [1493717285, '4.30859375'], + [1493717345, '4.30859375'], + [1493717405, '4.30859375'], + [1493717465, '4.30859375'], + [1493717525, '4.30859375'], + [1493717585, '4.30859375'], + [1493717645, '4.30859375'], + [1493717705, '4.30859375'], + [1493717765, '4.30859375'], + [1493717825, '4.30859375'], + [1493717885, '4.30859375'], + [1493717945, '4.30859375'], + [1493718005, '4.30859375'], + [1493718065, '4.30859375'], + [1493718125, '4.30859375'], + [1493718185, '4.30859375'], + [1493718245, '4.30859375'], + [1493718305, '4.234375'], + [1493718365, '4.234375'], + [1493718425, '4.234375'], + [1493718485, '4.234375'], + [1493718545, '4.243489583333333'], + [1493718605, '4.2109375'], + [1493718665, '4.2109375'], + [1493718725, '4.2109375'], + [1493718785, '4.26171875'], + [1493718845, '4.26171875'], + [1493718905, '4.26171875'], + [1493718965, '4.26171875'], + [1493719025, '4.26171875'], + [1493719085, '4.26171875'], + [1493719145, '4.26171875'], + [1493719205, '4.26171875'], + [1493719265, '4.26171875'], + [1493719325, '4.26171875'], + [1493719385, '4.26171875'], + [1493719445, '4.26171875'], + [1493719505, '4.26171875'], + [1493719565, '4.26171875'], + [1493719625, '4.26171875'], + [1493719685, '4.26171875'], + [1493719745, '4.26171875'], + [1493719805, '4.26171875'], + [1493719865, '4.26171875'], + [1493719925, '4.26171875'], + [1493719985, '4.26171875'], + [1493720045, '4.26171875'], + [1493720105, '4.26171875'], + [1493720165, '4.26171875'], + [1493720225, '4.26171875'], + [1493720285, '4.26171875'], +]; + +export const mockMedian = 1493718485; + +export const mockMedianIndex = 30; diff --git a/spec/javascripts/vue_shared/components/pipelines_table_row_spec.js b/spec/javascripts/vue_shared/components/pipelines_table_row_spec.js index 699625cdbb7..286118917e8 100644 --- a/spec/javascripts/vue_shared/components/pipelines_table_row_spec.js +++ b/spec/javascripts/vue_shared/components/pipelines_table_row_spec.js @@ -1,27 +1,47 @@ import Vue from 'vue'; import tableRowComp from '~/vue_shared/components/pipelines_table_row'; -import pipeline from '../../commit/pipelines/mock_data'; describe('Pipelines Table Row', () => { - let component; - - beforeEach(() => { + const jsonFixtureName = 'pipelines/pipelines.json'; + const buildComponent = (pipeline) => { const PipelinesTableRowComponent = Vue.extend(tableRowComp); - - component = new PipelinesTableRowComponent({ + return new PipelinesTableRowComponent({ el: document.querySelector('.test-dom-element'), propsData: { pipeline, service: {}, }, }).$mount(); + }; + + let component; + let pipeline; + let pipelineWithoutAuthor; + let pipelineWithoutCommit; + + preloadFixtures(jsonFixtureName); + + beforeEach(() => { + const pipelines = getJSONFixture(jsonFixtureName).pipelines; + pipeline = pipelines.find(p => p.id === 1); + pipelineWithoutAuthor = pipelines.find(p => p.id === 2); + pipelineWithoutCommit = pipelines.find(p => p.id === 3); + }); + + afterEach(() => { + component.$destroy(); }); it('should render a table row', () => { + component = buildComponent(pipeline); expect(component.$el).toEqual('TR'); }); describe('status column', () => { + beforeEach(() => { + component = buildComponent(pipeline); + }); + it('should render a pipeline link', () => { expect( component.$el.querySelector('td.commit-link a').getAttribute('href'), @@ -36,6 +56,10 @@ describe('Pipelines Table Row', () => { }); describe('information column', () => { + beforeEach(() => { + component = buildComponent(pipeline); + }); + it('should render a pipeline link', () => { expect( component.$el.querySelector('td:nth-child(2) a').getAttribute('href'), @@ -55,7 +79,7 @@ describe('Pipelines Table Row', () => { ).toEqual(pipeline.user.web_url); expect( - component.$el.querySelector('td:nth-child(2) img').getAttribute('title'), + component.$el.querySelector('td:nth-child(2) img').getAttribute('data-original-title'), ).toEqual(pipeline.user.name); }); }); @@ -63,13 +87,59 @@ describe('Pipelines Table Row', () => { describe('commit column', () => { it('should render link to commit', () => { - expect( - component.$el.querySelector('td:nth-child(3) .commit-id').getAttribute('href'), - ).toEqual(pipeline.commit.commit_path); + component = buildComponent(pipeline); + + const commitLink = component.$el.querySelector('.branch-commit .commit-sha'); + expect(commitLink.getAttribute('href')).toEqual(pipeline.commit.commit_path); + }); + + const findElements = () => { + const commitTitleElement = component.$el.querySelector('.branch-commit .commit-title'); + const commitAuthorElement = commitTitleElement.querySelector('a.avatar-image-container'); + + if (!commitAuthorElement) { + return { commitAuthorElement }; + } + + const commitAuthorLink = commitAuthorElement.getAttribute('href'); + const commitAuthorName = commitAuthorElement.querySelector('img.avatar').getAttribute('data-original-title'); + + return { commitAuthorElement, commitAuthorLink, commitAuthorName }; + }; + + it('renders nothing without commit', () => { + expect(pipelineWithoutCommit.commit).toBe(null); + component = buildComponent(pipelineWithoutCommit); + + const { commitAuthorElement } = findElements(); + + expect(commitAuthorElement).toBe(null); + }); + + it('renders commit author', () => { + component = buildComponent(pipeline); + const { commitAuthorLink, commitAuthorName } = findElements(); + + expect(commitAuthorLink).toEqual(pipeline.commit.author.web_url); + expect(commitAuthorName).toEqual(pipeline.commit.author.username); + }); + + it('renders commit with unregistered author', () => { + expect(pipelineWithoutAuthor.commit.author).toBe(null); + component = buildComponent(pipelineWithoutAuthor); + + const { commitAuthorLink, commitAuthorName } = findElements(); + + expect(commitAuthorLink).toEqual(`mailto:${pipelineWithoutAuthor.commit.author_email}`); + expect(commitAuthorName).toEqual(pipelineWithoutAuthor.commit.author_name); }); }); describe('stages column', () => { + beforeEach(() => { + component = buildComponent(pipeline); + }); + it('should render an icon for each stage', () => { expect( component.$el.querySelectorAll('td:nth-child(4) .js-builds-dropdown-button').length, @@ -78,6 +148,10 @@ describe('Pipelines Table Row', () => { }); describe('actions column', () => { + beforeEach(() => { + component = buildComponent(pipeline); + }); + it('should render the provided actions', () => { expect( component.$el.querySelectorAll('td:nth-child(6) ul li').length, diff --git a/spec/javascripts/vue_shared/components/pipelines_table_spec.js b/spec/javascripts/vue_shared/components/pipelines_table_spec.js index 4d3ced944d7..6cc178b8f1d 100644 --- a/spec/javascripts/vue_shared/components/pipelines_table_spec.js +++ b/spec/javascripts/vue_shared/components/pipelines_table_spec.js @@ -1,13 +1,19 @@ import Vue from 'vue'; import pipelinesTableComp from '~/vue_shared/components/pipelines_table'; import '~/lib/utils/datetime_utility'; -import pipeline from '../../commit/pipelines/mock_data'; describe('Pipelines Table', () => { + const jsonFixtureName = 'pipelines/pipelines.json'; + + let pipeline; let PipelinesTableComponent; + preloadFixtures(jsonFixtureName); + beforeEach(() => { PipelinesTableComponent = Vue.extend(pipelinesTableComp); + const pipelines = getJSONFixture(jsonFixtureName).pipelines; + pipeline = pipelines.find(p => p.id === 1); }); describe('table', () => { diff --git a/spec/javascripts/vue_shared/components/table_pagination_spec.js b/spec/javascripts/vue_shared/components/table_pagination_spec.js index d1640ffed99..895e1c585b4 100644 --- a/spec/javascripts/vue_shared/components/table_pagination_spec.js +++ b/spec/javascripts/vue_shared/components/table_pagination_spec.js @@ -1,5 +1,5 @@ import Vue from 'vue'; -import paginationComp from '~/vue_shared/components/table_pagination'; +import paginationComp from '~/vue_shared/components/table_pagination.vue'; import '~/lib/utils/common_utils'; describe('Pagination component', () => { @@ -124,6 +124,10 @@ describe('Pagination component', () => { }); describe('paramHelper', () => { + afterEach(() => { + window.history.pushState({}, null, ''); + }); + it('can parse url parameters correctly', () => { window.history.pushState({}, null, '?scope=all&p=2'); diff --git a/spec/javascripts/vue_shared/components/user_avatar_image_spec.js b/spec/javascripts/vue_shared/components/user_avatar_image_spec.js new file mode 100644 index 00000000000..8daa7610274 --- /dev/null +++ b/spec/javascripts/vue_shared/components/user_avatar_image_spec.js @@ -0,0 +1,54 @@ +import Vue from 'vue'; +import UserAvatarImage from '~/vue_shared/components/user_avatar/user_avatar_image.vue'; + +const UserAvatarImageComponent = Vue.extend(UserAvatarImage); + +describe('User Avatar Image Component', function () { + describe('Initialization', function () { + beforeEach(function () { + this.propsData = { + size: 99, + imgSrc: 'myavatarurl.com', + imgAlt: 'mydisplayname', + cssClasses: 'myextraavatarclass', + tooltipText: 'tooltip text', + tooltipPlacement: 'bottom', + }; + + this.userAvatarImage = new UserAvatarImageComponent({ + propsData: this.propsData, + }).$mount(); + }); + + it('should return a defined Vue component', function () { + expect(this.userAvatarImage).toBeDefined(); + }); + + it('should have <img> as a child element', function () { + expect(this.userAvatarImage.$el.tagName).toBe('IMG'); + }); + + it('should properly compute tooltipContainer', function () { + expect(this.userAvatarImage.tooltipContainer).toBe('body'); + }); + + it('should properly render tooltipContainer', function () { + expect(this.userAvatarImage.$el.getAttribute('data-container')).toBe('body'); + }); + + it('should properly compute avatarSizeClass', function () { + expect(this.userAvatarImage.avatarSizeClass).toBe('s99'); + }); + + it('should properly render img css', function () { + const classList = this.userAvatarImage.$el.classList; + const containsAvatar = classList.contains('avatar'); + const containsSizeClass = classList.contains('s99'); + const containsCustomClass = classList.contains('myextraavatarclass'); + + expect(containsAvatar).toBe(true); + expect(containsSizeClass).toBe(true); + expect(containsCustomClass).toBe(true); + }); + }); +}); diff --git a/spec/javascripts/vue_shared/components/user_avatar_link_spec.js b/spec/javascripts/vue_shared/components/user_avatar_link_spec.js new file mode 100644 index 00000000000..52e450e9ba5 --- /dev/null +++ b/spec/javascripts/vue_shared/components/user_avatar_link_spec.js @@ -0,0 +1,50 @@ +import Vue from 'vue'; +import UserAvatarLink from '~/vue_shared/components/user_avatar/user_avatar_link.vue'; + +describe('User Avatar Link Component', function () { + beforeEach(function () { + this.propsData = { + linkHref: 'myavatarurl.com', + imgSize: 99, + imgSrc: 'myavatarurl.com', + imgAlt: 'mydisplayname', + imgCssClasses: 'myextraavatarclass', + tooltipText: 'tooltip text', + tooltipPlacement: 'bottom', + }; + + const UserAvatarLinkComponent = Vue.extend(UserAvatarLink); + + this.userAvatarLink = new UserAvatarLinkComponent({ + propsData: this.propsData, + }).$mount(); + + this.userAvatarImage = this.userAvatarLink.$children[0]; + }); + + it('should return a defined Vue component', function () { + expect(this.userAvatarLink).toBeDefined(); + }); + + it('should have user-avatar-image registered as child component', function () { + expect(this.userAvatarLink.$options.components.userAvatarImage).toBeDefined(); + }); + + it('user-avatar-link should have user-avatar-image as child component', function () { + expect(this.userAvatarImage).toBeDefined(); + }); + + it('should render <a> as a child element', function () { + expect(this.userAvatarLink.$el.tagName).toBe('A'); + }); + + it('should have <img> as a child element', function () { + expect(this.userAvatarLink.$el.querySelector('img')).not.toBeNull(); + }); + + it('should return neccessary props as defined', function () { + _.each(this.propsData, (val, key) => { + expect(this.userAvatarLink[key]).toBeDefined(); + }); + }); +}); diff --git a/spec/javascripts/vue_shared/components/user_avatar_svg_spec.js b/spec/javascripts/vue_shared/components/user_avatar_svg_spec.js new file mode 100644 index 00000000000..b8d639ffbec --- /dev/null +++ b/spec/javascripts/vue_shared/components/user_avatar_svg_spec.js @@ -0,0 +1,29 @@ +import Vue from 'vue'; +import UserAvatarSvg from '~/vue_shared/components/user_avatar/user_avatar_svg.vue'; +import avatarSvg from 'icons/_icon_random.svg'; + +const UserAvatarSvgComponent = Vue.extend(UserAvatarSvg); + +describe('User Avatar Svg Component', function () { + describe('Initialization', function () { + beforeEach(function () { + this.propsData = { + size: 99, + svg: avatarSvg, + }; + + this.userAvatarSvg = new UserAvatarSvgComponent({ + propsData: this.propsData, + }).$mount(); + }); + + it('should return a defined Vue component', function () { + expect(this.userAvatarSvg).toBeDefined(); + }); + + it('should have <svg> as a child element', function () { + expect(this.userAvatarSvg.$el.tagName).toEqual('svg'); + expect(this.userAvatarSvg.$el.innerHTML).toContain('<path'); + }); + }); +}); diff --git a/spec/javascripts/vue_shared/translate_spec.js b/spec/javascripts/vue_shared/translate_spec.js new file mode 100644 index 00000000000..cbb3cbdff46 --- /dev/null +++ b/spec/javascripts/vue_shared/translate_spec.js @@ -0,0 +1,90 @@ +import Vue from 'vue'; +import Translate from '~/vue_shared/translate'; + +Vue.use(Translate); + +describe('Vue translate filter', () => { + let el; + + beforeEach(() => { + el = document.createElement('div'); + + document.body.appendChild(el); + }); + + it('translate single text', (done) => { + const comp = new Vue({ + el, + template: ` + <span> + {{ __('testing') }} + </span> + `, + }).$mount(); + + Vue.nextTick(() => { + expect( + comp.$el.textContent.trim(), + ).toBe('testing'); + + done(); + }); + }); + + it('translate plural text with single count', (done) => { + const comp = new Vue({ + el, + template: ` + <span> + {{ n__('%d day', '%d days', 1) }} + </span> + `, + }).$mount(); + + Vue.nextTick(() => { + expect( + comp.$el.textContent.trim(), + ).toBe('1 day'); + + done(); + }); + }); + + it('translate plural text with multiple count', (done) => { + const comp = new Vue({ + el, + template: ` + <span> + {{ n__('%d day', '%d days', 2) }} + </span> + `, + }).$mount(); + + Vue.nextTick(() => { + expect( + comp.$el.textContent.trim(), + ).toBe('2 days'); + + done(); + }); + }); + + it('translate plural without replacing any text', (done) => { + const comp = new Vue({ + el, + template: ` + <span> + {{ n__('day', 'days', 2) }} + </span> + `, + }).$mount(); + + Vue.nextTick(() => { + expect( + comp.$el.textContent.trim(), + ).toBe('days'); + + done(); + }); + }); +}); diff --git a/spec/javascripts/zen_mode_spec.js b/spec/javascripts/zen_mode_spec.js index 99515f2e5f2..4399c8b2025 100644 --- a/spec/javascripts/zen_mode_spec.js +++ b/spec/javascripts/zen_mode_spec.js @@ -3,7 +3,7 @@ /* global Mousetrap */ /* global ZenMode */ -require('~/zen_mode'); +import '~/zen_mode'; (function() { var enterZen, escapeKeydown, exitZen; diff --git a/spec/lib/banzai/filter/emoji_filter_spec.rb b/spec/lib/banzai/filter/emoji_filter_spec.rb index 707212e07fd..086a006c45f 100644 --- a/spec/lib/banzai/filter/emoji_filter_spec.rb +++ b/spec/lib/banzai/filter/emoji_filter_spec.rb @@ -68,9 +68,9 @@ describe Banzai::Filter::EmojiFilter, lib: true do expect(doc.css('gl-emoji').size).to eq 1 end - it 'matches multiple emoji in a row' do + it 'does not match multiple emoji in a row' do doc = filter(':see_no_evil::hear_no_evil::speak_no_evil:') - expect(doc.css('gl-emoji').size).to eq 3 + expect(doc.css('gl-emoji').size).to eq 0 end it 'unicode matches multiple emoji in a row' do @@ -83,6 +83,12 @@ describe Banzai::Filter::EmojiFilter, lib: true do expect(doc.css('gl-emoji').size).to eq 6 end + it 'does not match emoji in a string' do + doc = filter("'2a00:a4c0:100::1'") + + expect(doc.css('gl-emoji').size).to eq 0 + end + it 'has a data-name attribute' do doc = filter(':-1:') expect(doc.css('gl-emoji').first.attr('data-name')).to eq 'thumbsdown' diff --git a/spec/lib/banzai/filter/external_link_filter_spec.rb b/spec/lib/banzai/filter/external_link_filter_spec.rb index d9e4525cb28..0f8ec8de7a0 100644 --- a/spec/lib/banzai/filter/external_link_filter_spec.rb +++ b/spec/lib/banzai/filter/external_link_filter_spec.rb @@ -1,5 +1,22 @@ require 'spec_helper' +shared_examples 'an external link with rel attribute' do + it 'adds rel="nofollow" to external links' do + expect(doc.at_css('a')).to have_attribute('rel') + expect(doc.at_css('a')['rel']).to include 'nofollow' + end + + it 'adds rel="noreferrer" to external links' do + expect(doc.at_css('a')).to have_attribute('rel') + expect(doc.at_css('a')['rel']).to include 'noreferrer' + end + + it 'adds rel="noopener" to external links' do + expect(doc.at_css('a')).to have_attribute('rel') + expect(doc.at_css('a')['rel']).to include 'noopener' + end +end + describe Banzai::Filter::ExternalLinkFilter, lib: true do include FilterSpecHelper @@ -22,49 +39,58 @@ describe Banzai::Filter::ExternalLinkFilter, lib: true do context 'for root links on document' do let(:doc) { filter %q(<a href="https://google.com/">Google</a>) } - it 'adds rel="nofollow" to external links' do - expect(doc.at_css('a')).to have_attribute('rel') - expect(doc.at_css('a')['rel']).to include 'nofollow' + it_behaves_like 'an external link with rel attribute' + end + + context 'for nested links on document' do + let(:doc) { filter %q(<p><a href="https://google.com/">Google</a></p>) } + + it_behaves_like 'an external link with rel attribute' + end + + context 'for invalid urls' do + it 'skips broken hrefs' do + doc = filter %q(<p><a href="don't crash on broken urls">Google</a></p>) + expected = %q(<p><a href="don't%20crash%20on%20broken%20urls">Google</a></p>) + + expect(doc.to_html).to eq(expected) end - it 'adds rel="noreferrer" to external links' do - expect(doc.at_css('a')).to have_attribute('rel') - expect(doc.at_css('a')['rel']).to include 'noreferrer' + it 'skips improperly formatted mailtos' do + doc = filter %q(<p><a href="mailto://jblogs@example.com">Email</a></p>) + expected = %q(<p><a href="mailto://jblogs@example.com">Email</a></p>) + + expect(doc.to_html).to eq(expected) end end - context 'for nested links on document' do - let(:doc) { filter %q(<p><a href="https://google.com/">Google</a></p>) } + context 'for links with a username' do + context 'with a valid username' do + let(:doc) { filter %q(<a href="https://user@google.com/">Google</a>) } - it 'adds rel="nofollow" to external links' do - expect(doc.at_css('a')).to have_attribute('rel') - expect(doc.at_css('a')['rel']).to include 'nofollow' + it_behaves_like 'an external link with rel attribute' end - it 'adds rel="noreferrer" to external links' do - expect(doc.at_css('a')).to have_attribute('rel') - expect(doc.at_css('a')['rel']).to include 'noreferrer' + context 'with an impersonated username' do + let(:internal) { Gitlab.config.gitlab.url } + + let(:doc) { filter %Q(<a href="https://#{internal}@example.com" target="_blank">Reverse Tabnabbing</a>) } + + it_behaves_like 'an external link with rel attribute' end end context 'for non-lowercase scheme links' do - let(:doc_with_http) { filter %q(<p><a href="httP://google.com/">Google</a></p>) } - let(:doc_with_https) { filter %q(<p><a href="hTTpS://google.com/">Google</a></p>) } - - it 'adds rel="nofollow" to external links' do - expect(doc_with_http.at_css('a')).to have_attribute('rel') - expect(doc_with_https.at_css('a')).to have_attribute('rel') + context 'with http' do + let(:doc) { filter %q(<p><a href="httP://google.com/">Google</a></p>) } - expect(doc_with_http.at_css('a')['rel']).to include 'nofollow' - expect(doc_with_https.at_css('a')['rel']).to include 'nofollow' + it_behaves_like 'an external link with rel attribute' end - it 'adds rel="noreferrer" to external links' do - expect(doc_with_http.at_css('a')).to have_attribute('rel') - expect(doc_with_https.at_css('a')).to have_attribute('rel') + context 'with https' do + let(:doc) { filter %q(<p><a href="hTTpS://google.com/">Google</a></p>) } - expect(doc_with_http.at_css('a')['rel']).to include 'noreferrer' - expect(doc_with_https.at_css('a')['rel']).to include 'noreferrer' + it_behaves_like 'an external link with rel attribute' end it 'skips internal links' do @@ -84,14 +110,6 @@ describe Banzai::Filter::ExternalLinkFilter, lib: true do context 'for protocol-relative links' do let(:doc) { filter %q(<p><a href="//google.com/">Google</a></p>) } - it 'adds rel="nofollow" to external links' do - expect(doc.at_css('a')).to have_attribute('rel') - expect(doc.at_css('a')['rel']).to include 'nofollow' - end - - it 'adds rel="noreferrer" to external links' do - expect(doc.at_css('a')).to have_attribute('rel') - expect(doc.at_css('a')['rel']).to include 'noreferrer' - end + it_behaves_like 'an external link with rel attribute' end end diff --git a/spec/lib/banzai/filter/issuable_state_filter_spec.rb b/spec/lib/banzai/filter/issuable_state_filter_spec.rb new file mode 100644 index 00000000000..9c2399815b9 --- /dev/null +++ b/spec/lib/banzai/filter/issuable_state_filter_spec.rb @@ -0,0 +1,197 @@ +require 'spec_helper' + +describe Banzai::Filter::IssuableStateFilter, lib: true do + include ActionView::Helpers::UrlHelper + include FilterSpecHelper + + let(:user) { create(:user) } + let(:context) { { current_user: user, issuable_state_filter_enabled: true } } + let(:closed_issue) { create_issue(:closed) } + let(:project) { create(:empty_project, :public) } + let(:other_project) { create(:empty_project, :public) } + + def create_link(text, data) + link_to(text, '', class: 'gfm has-tooltip', data: data) + end + + def create_issue(state) + create(:issue, state, project: project) + end + + def create_merge_request(state) + create(:merge_request, state, + source_project: project, target_project: project) + end + + it 'ignores non-GFM links' do + html = %(See <a href="https://google.com/">Google</a>) + doc = filter(html, current_user: user) + + expect(doc.css('a').last.text).to eq('Google') + end + + it 'ignores non-issuable links' do + link = create_link('text', project: project, reference_type: 'issue') + doc = filter(link, context) + + expect(doc.css('a').last.text).to eq('text') + end + + it 'ignores issuable links with empty content' do + link = create_link('', issue: closed_issue.id, reference_type: 'issue') + doc = filter(link, context) + + expect(doc.css('a').last.text).to eq('') + end + + it 'ignores issuable links with custom anchor' do + link = create_link('something', issue: closed_issue.id, reference_type: 'issue') + doc = filter(link, context) + + expect(doc.css('a').last.text).to eq('something') + end + + it 'ignores issuable links to specific comments' do + link = create_link("#{closed_issue.to_reference} (comment 1)", issue: closed_issue.id, reference_type: 'issue') + doc = filter(link, context) + + expect(doc.css('a').last.text).to eq("#{closed_issue.to_reference} (comment 1)") + end + + it 'ignores merge request links to diffs tab' do + merge_request = create(:merge_request, :closed) + link = create_link( + "#{merge_request.to_reference} (diffs)", + merge_request: merge_request.id, + reference_type: 'merge_request' + ) + doc = filter(link, context) + + expect(doc.css('a').last.text).to eq("#{merge_request.to_reference} (diffs)") + end + + it 'handles cross project references' do + link = create_link(closed_issue.to_reference(other_project), issue: closed_issue.id, reference_type: 'issue') + doc = filter(link, context.merge(project: other_project)) + + expect(doc.css('a').last.text).to eq("#{closed_issue.to_reference(other_project)} (closed)") + end + + it 'does not append state when filter is not enabled' do + link = create_link('text', issue: closed_issue.id, reference_type: 'issue') + context = { current_user: user } + doc = filter(link, context) + + expect(doc.css('a').last.text).to eq('text') + end + + context 'when project is in pending delete' do + before do + project.update!(pending_delete: true) + end + + it 'does not append issue state' do + link = create_link('text', issue: closed_issue.id, reference_type: 'issue') + doc = filter(link, context) + + expect(doc.css('a').last.text).to eq('text') + end + end + + context 'for issue references' do + it 'ignores open issue references' do + issue = create_issue(:opened) + link = create_link(issue.to_reference, issue: issue.id, reference_type: 'issue') + doc = filter(link, context) + + expect(doc.css('a').last.text).to eq(issue.to_reference) + end + + it 'ignores reopened issue references' do + issue = create_issue(:reopened) + link = create_link(issue.to_reference, issue: issue.id, reference_type: 'issue') + doc = filter(link, context) + + expect(doc.css('a').last.text).to eq(issue.to_reference) + end + + it 'appends state to closed issue references' do + link = create_link(closed_issue.to_reference, issue: closed_issue.id, reference_type: 'issue') + doc = filter(link, context) + + expect(doc.css('a').last.text).to eq("#{closed_issue.to_reference} (closed)") + end + end + + context 'for merge request references' do + it 'ignores open merge request references' do + merge_request = create_merge_request(:opened) + + link = create_link( + merge_request.to_reference, + merge_request: merge_request.id, + reference_type: 'merge_request' + ) + + doc = filter(link, context) + + expect(doc.css('a').last.text).to eq(merge_request.to_reference) + end + + it 'ignores reopened merge request references' do + merge_request = create_merge_request(:reopened) + + link = create_link( + merge_request.to_reference, + merge_request: merge_request.id, + reference_type: 'merge_request' + ) + + doc = filter(link, context) + + expect(doc.css('a').last.text).to eq(merge_request.to_reference) + end + + it 'ignores locked merge request references' do + merge_request = create_merge_request(:locked) + + link = create_link( + merge_request.to_reference, + merge_request: merge_request.id, + reference_type: 'merge_request' + ) + + doc = filter(link, context) + + expect(doc.css('a').last.text).to eq(merge_request.to_reference) + end + + it 'appends state to closed merge request references' do + merge_request = create_merge_request(:closed) + + link = create_link( + merge_request.to_reference, + merge_request: merge_request.id, + reference_type: 'merge_request' + ) + + doc = filter(link, context) + + expect(doc.css('a').last.text).to eq("#{merge_request.to_reference} (closed)") + end + + it 'appends state to merged merge request references' do + merge_request = create_merge_request(:merged) + + link = create_link( + merge_request.to_reference, + merge_request: merge_request.id, + reference_type: 'merge_request' + ) + + doc = filter(link, context) + + expect(doc.css('a').last.text).to eq("#{merge_request.to_reference} (merged)") + end + end +end diff --git a/spec/lib/banzai/filter/markdown_filter_spec.rb b/spec/lib/banzai/filter/markdown_filter_spec.rb new file mode 100644 index 00000000000..897288b8ad5 --- /dev/null +++ b/spec/lib/banzai/filter/markdown_filter_spec.rb @@ -0,0 +1,19 @@ +require 'spec_helper' + +describe Banzai::Filter::MarkdownFilter, lib: true do + include FilterSpecHelper + + context 'code block' do + it 'adds language to lang attribute when specified' do + result = filter("```html\nsome code\n```") + + expect(result).to start_with("\n<pre><code lang=\"html\">") + end + + it 'does not add language to lang attribute when not specified' do + result = filter("```\nsome code\n```") + + expect(result).to start_with("\n<pre><code>") + end + end +end diff --git a/spec/lib/banzai/filter/plantuml_filter_spec.rb b/spec/lib/banzai/filter/plantuml_filter_spec.rb index f85a5dcbd8b..9b8ecb201f3 100644 --- a/spec/lib/banzai/filter/plantuml_filter_spec.rb +++ b/spec/lib/banzai/filter/plantuml_filter_spec.rb @@ -5,7 +5,7 @@ describe Banzai::Filter::PlantumlFilter, lib: true do it 'should replace plantuml pre tag with img tag' do stub_application_setting(plantuml_enabled: true, plantuml_url: "http://localhost:8080") - input = '<pre class="plantuml"><code>Bob -> Sara : Hello</code><pre>' + input = '<pre><code lang="plantuml">Bob -> Sara : Hello</code></pre>' output = '<div class="imageblock"><div class="content"><img class="plantuml" src="http://localhost:8080/png/U9npoazIqBLJ24uiIbImKl18pSd91m0rkGMq"></div></div>' doc = filter(input) @@ -14,8 +14,8 @@ describe Banzai::Filter::PlantumlFilter, lib: true do it 'should not replace plantuml pre tag with img tag if disabled' do stub_application_setting(plantuml_enabled: false) - input = '<pre class="plantuml"><code>Bob -> Sara : Hello</code><pre>' - output = '<pre class="plantuml"><code>Bob -> Sara : Hello</code><pre></pre></pre>' + input = '<pre><code lang="plantuml">Bob -> Sara : Hello</code></pre>' + output = '<pre><code lang="plantuml">Bob -> Sara : Hello</code></pre>' doc = filter(input) expect(doc.to_s).to eq output @@ -23,7 +23,7 @@ describe Banzai::Filter::PlantumlFilter, lib: true do it 'should not replace plantuml pre tag with img tag if url is invalid' do stub_application_setting(plantuml_enabled: true, plantuml_url: "invalid") - input = '<pre class="plantuml"><code>Bob -> Sara : Hello</code><pre>' + input = '<pre><code lang="plantuml">Bob -> Sara : Hello</code></pre>' output = '<div class="listingblock"><div class="content"><pre class="plantuml plantuml-error"> PlantUML Error: cannot connect to PlantUML server at "invalid"</pre></div></div>' doc = filter(input) diff --git a/spec/lib/banzai/filter/redactor_filter_spec.rb b/spec/lib/banzai/filter/redactor_filter_spec.rb index 0140a91c7ba..7c4a0f32c7b 100644 --- a/spec/lib/banzai/filter/redactor_filter_spec.rb +++ b/spec/lib/banzai/filter/redactor_filter_spec.rb @@ -15,6 +15,16 @@ describe Banzai::Filter::RedactorFilter, lib: true do link_to('text', '', class: 'gfm', data: data) end + it 'skips when the skip_redaction flag is set' do + user = create(:user) + project = create(:empty_project) + + link = reference_link(project: project.id, reference_type: 'test') + doc = filter(link, current_user: user, skip_redaction: true) + + expect(doc.css('a').length).to eq 1 + end + context 'with data-project' do let(:parser_class) do Class.new(Banzai::ReferenceParser::BaseParser) do @@ -103,7 +113,7 @@ describe Banzai::Filter::RedactorFilter, lib: true do it 'allows references for assignee' do assignee = create(:user) project = create(:empty_project, :public) - issue = create(:issue, :confidential, project: project, assignee: assignee) + issue = create(:issue, :confidential, project: project, assignees: [assignee]) link = reference_link(project: project.id, issue: issue.id, reference_type: 'issue') doc = filter(link, current_user: assignee) diff --git a/spec/lib/banzai/filter/sanitization_filter_spec.rb b/spec/lib/banzai/filter/sanitization_filter_spec.rb index b4cd5f63a15..fdbc65b5e00 100644 --- a/spec/lib/banzai/filter/sanitization_filter_spec.rb +++ b/spec/lib/banzai/filter/sanitization_filter_spec.rb @@ -49,11 +49,12 @@ describe Banzai::Filter::SanitizationFilter, lib: true do instance = described_class.new('Foo') 3.times { instance.whitelist } - expect(instance.whitelist[:transformers].size).to eq 5 + expect(instance.whitelist[:transformers].size).to eq 4 end - it 'allows syntax highlighting' do - exp = act = %q{<pre class="code highlight white c"><code><span class="k">def</span></code></pre>} + it 'sanitizes `class` attribute from all elements' do + act = %q{<pre class="code highlight white c"><code><span class="k">def</span></code></pre>} + exp = %q{<pre><code><span class="k">def</span></code></pre>} expect(filter(act).to_html).to eq exp end diff --git a/spec/lib/banzai/filter/syntax_highlight_filter_spec.rb b/spec/lib/banzai/filter/syntax_highlight_filter_spec.rb index 63fb1bb25c4..f61fc8ceb9e 100644 --- a/spec/lib/banzai/filter/syntax_highlight_filter_spec.rb +++ b/spec/lib/banzai/filter/syntax_highlight_filter_spec.rb @@ -12,14 +12,14 @@ describe Banzai::Filter::SyntaxHighlightFilter, lib: true do context "when a valid language is specified" do it "highlights as that language" do - result = filter('<pre><code class="ruby">def fun end</code></pre>') + result = filter('<pre><code lang="ruby">def fun end</code></pre>') expect(result.to_html).to eq('<pre class="code highlight js-syntax-highlight ruby" lang="ruby" v-pre="true"><code><span id="LC1" class="line" lang="ruby"><span class="k">def</span> <span class="nf">fun</span> <span class="k">end</span></span></code></pre>') end end context "when an invalid language is specified" do it "highlights as plaintext" do - result = filter('<pre><code class="gnuplot">This is a test</code></pre>') + result = filter('<pre><code lang="gnuplot">This is a test</code></pre>') expect(result.to_html).to eq('<pre class="code highlight js-syntax-highlight plaintext" lang="plaintext" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">This is a test</span></code></pre>') end end @@ -30,7 +30,7 @@ describe Banzai::Filter::SyntaxHighlightFilter, lib: true do end it "highlights as plaintext" do - result = filter('<pre><code class="ruby">This is a test</code></pre>') + result = filter('<pre><code lang="ruby">This is a test</code></pre>') expect(result.to_html).to eq('<pre class="code highlight" lang="" v-pre="true"><code>This is a test</code></pre>') end end diff --git a/spec/lib/banzai/issuable_extractor_spec.rb b/spec/lib/banzai/issuable_extractor_spec.rb new file mode 100644 index 00000000000..e5d332efb08 --- /dev/null +++ b/spec/lib/banzai/issuable_extractor_spec.rb @@ -0,0 +1,52 @@ +require 'spec_helper' + +describe Banzai::IssuableExtractor, lib: true do + let(:project) { create(:empty_project) } + let(:user) { create(:user) } + let(:extractor) { described_class.new(project, user) } + let(:issue) { create(:issue, project: project) } + let(:merge_request) { create(:merge_request, source_project: project) } + let(:issue_link) do + html_to_node( + "<a href='' data-issue='#{issue.id}' data-reference-type='issue' class='gfm'>text</a>" + ) + end + let(:merge_request_link) do + html_to_node( + "<a href='' data-merge-request='#{merge_request.id}' data-reference-type='merge_request' class='gfm'>text</a>" + ) + end + + def html_to_node(html) + Nokogiri::HTML.fragment( + html + ).children[0] + end + + it 'returns instances of issuables for nodes with references' do + result = extractor.extract([issue_link, merge_request_link]) + + expect(result).to eq(issue_link => issue, merge_request_link => merge_request) + end + + describe 'caching' do + before do + RequestStore.begin! + end + + after do + RequestStore.end! + RequestStore.clear! + end + + it 'saves records to cache' do + extractor.extract([issue_link, merge_request_link]) + + second_call_queries = ActiveRecord::QueryRecorder.new do + extractor.extract([issue_link, merge_request_link]) + end.count + + expect(second_call_queries).to eq 0 + end + end +end diff --git a/spec/lib/banzai/object_renderer_spec.rb b/spec/lib/banzai/object_renderer_spec.rb index 6bcda87c999..dd2674f9f20 100644 --- a/spec/lib/banzai/object_renderer_spec.rb +++ b/spec/lib/banzai/object_renderer_spec.rb @@ -3,128 +3,51 @@ require 'spec_helper' describe Banzai::ObjectRenderer do let(:project) { create(:empty_project) } let(:user) { project.owner } - - def fake_object(attrs = {}) - object = double(attrs.merge("new_record?" => true, "destroyed?" => true)) - allow(object).to receive(:markdown_cache_field_for).with(:note).and_return(:note_html) - allow(object).to receive(:banzai_render_context).with(:note).and_return(project: nil, author: nil) - allow(object).to receive(:update_column).with(:note_html, anything).and_return(true) - object - end + let(:renderer) { described_class.new(project, user, custom_value: 'value') } + let(:object) { Note.new(note: 'hello', note_html: '<p dir="auto">hello</p>', cached_markdown_version: CacheMarkdownField::CACHE_VERSION) } describe '#render' do it 'renders and redacts an Array of objects' do - renderer = described_class.new(project, user) - object = fake_object(note: 'hello', note_html: nil) - - expect(renderer).to receive(:render_objects).with([object], :note). - and_call_original - - expect(renderer).to receive(:redact_documents). - with(an_instance_of(Array)). - and_call_original - - expect(object).to receive(:redacted_note_html=).with('<p dir="auto">hello</p>') - expect(object).to receive(:user_visible_reference_count=).with(0) - renderer.render([object], :note) - end - end - - describe '#render_objects' do - it 'renders an Array of objects' do - object = fake_object(note: 'hello', note_html: nil) - - renderer = described_class.new(project, user) - expect(renderer).to receive(:render_attributes).with([object], :note). - and_call_original - - rendered = renderer.render_objects([object], :note) - - expect(rendered).to be_an_instance_of(Array) - expect(rendered[0]).to be_an_instance_of(Nokogiri::HTML::DocumentFragment) - end - end - - describe '#redact_documents' do - it 'redacts a set of documents and returns them as an Array of Hashes' do - doc = Nokogiri::HTML.fragment('<p>hello</p>') - renderer = described_class.new(project, user) - - expect_any_instance_of(Banzai::Redactor).to receive(:redact). - with([doc]). - and_call_original - - redacted = renderer.redact_documents([doc]) - - expect(redacted.count).to eq(1) - expect(redacted.first[:visible_reference_count]).to eq(0) - expect(redacted.first[:document].to_html).to eq('<p>hello</p>') + expect(object.redacted_note_html).to eq '<p dir="auto">hello</p>' + expect(object.user_visible_reference_count).to eq 0 end - end - describe '#context_for' do - let(:object) { fake_object(note: 'hello') } - let(:renderer) { described_class.new(project, user) } + it 'calls Banzai::Redactor to perform redaction' do + expect_any_instance_of(Banzai::Redactor).to receive(:redact).and_call_original - it 'returns a Hash' do - expect(renderer.context_for(object, :note)).to be_an_instance_of(Hash) - end - - it 'includes the banzai render context for the object' do - expect(object).to receive(:banzai_render_context).with(:note).and_return(foo: :bar) - context = renderer.context_for(object, :note) - expect(context).to have_key(:foo) - expect(context[:foo]).to eq(:bar) - end - end - - describe '#render_attributes' do - it 'renders the attribute of a list of objects' do - objects = [fake_object(note: 'hello', note_html: nil), fake_object(note: 'bye', note_html: nil)] - renderer = described_class.new(project, user) - - objects.each do |object| - expect(Banzai).to receive(:render_field).with(object, :note).and_call_original - end - - docs = renderer.render_attributes(objects, :note) - - expect(docs[0]).to be_an_instance_of(Nokogiri::HTML::DocumentFragment) - expect(docs[0].to_html).to eq('<p dir="auto">hello</p>') - - expect(docs[1]).to be_an_instance_of(Nokogiri::HTML::DocumentFragment) - expect(docs[1].to_html).to eq('<p dir="auto">bye</p>') - end - - it 'returns when no objects to render' do - objects = [] - renderer = described_class.new(project, user, pipeline: :note) - - expect(renderer.render_attributes(objects, :note)).to eq([]) + renderer.render([object], :note) end - end - describe '#base_context' do - let(:context) do - described_class.new(project, user, foo: :bar).base_context - end + it 'retrieves field content using Banzai.render_field' do + expect(Banzai).to receive(:render_field).with(object, :note).and_call_original - it 'returns a Hash' do - expect(context).to be_an_instance_of(Hash) - end - - it 'includes the custom attributes' do - expect(context[:foo]).to eq(:bar) + renderer.render([object], :note) end - it 'includes the current user' do - expect(context[:current_user]).to eq(user) - end + it 'passes context to PostProcessPipeline' do + another_user = create(:user) + another_project = create(:empty_project) + object = Note.new( + note: 'hello', + note_html: 'hello', + author: another_user, + project: another_project + ) + + expect(Banzai::Pipeline::PostProcessPipeline).to receive(:to_document).with( + anything, + hash_including( + skip_redaction: true, + current_user: user, + project: another_project, + author: another_user, + custom_value: 'value' + ) + ).and_call_original - it 'includes the current project' do - expect(context[:project]).to eq(project) + renderer.render([object], :note) end end end diff --git a/spec/lib/banzai/redactor_spec.rb b/spec/lib/banzai/redactor_spec.rb index 6d2c141e18b..e6f2963193c 100644 --- a/spec/lib/banzai/redactor_spec.rb +++ b/spec/lib/banzai/redactor_spec.rb @@ -42,6 +42,31 @@ describe Banzai::Redactor do end end + context 'when project is in pending delete' do + let!(:issue) { create(:issue, project: project) } + let(:redactor) { described_class.new(project, user) } + + before do + project.update(pending_delete: true) + end + + it 'redacts an issue attached' do + doc = Nokogiri::HTML.fragment("<a class='gfm' data-reference-type='issue' data-issue='#{issue.id}'>foo</a>") + + redactor.redact([doc]) + + expect(doc.to_html).to eq('foo') + end + + it 'redacts an external issue' do + doc = Nokogiri::HTML.fragment("<a class='gfm' data-reference-type='issue' data-external-issue='#{issue.id}' data-project='#{project.id}'>foo</a>") + + redactor.redact([doc]) + + expect(doc.to_html).to eq('foo') + end + end + context 'when reference visible to user' do it 'does not redact an array of documents' do doc1_html = '<a class="gfm" data-reference-type="issue">foo</a>' diff --git a/spec/lib/banzai/reference_parser/base_parser_spec.rb b/spec/lib/banzai/reference_parser/base_parser_spec.rb index aa127f0179d..d5746107ee1 100644 --- a/spec/lib/banzai/reference_parser/base_parser_spec.rb +++ b/spec/lib/banzai/reference_parser/base_parser_spec.rb @@ -92,20 +92,49 @@ describe Banzai::ReferenceParser::BaseParser, lib: true do end describe '#grouped_objects_for_nodes' do - it 'returns a Hash grouping objects per ID' do - nodes = [double(:node)] + it 'returns a Hash grouping objects per node' do + link = double(:link) + + expect(link).to receive(:has_attribute?). + with('data-user'). + and_return(true) + + expect(link).to receive(:attr). + with('data-user'). + and_return(user.id.to_s) + + nodes = [link] expect(subject).to receive(:unique_attribute_values). with(nodes, 'data-user'). - and_return([user.id]) + and_return([user.id.to_s]) hash = subject.grouped_objects_for_nodes(nodes, User, 'data-user') - expect(hash).to eq({ user.id => user }) + expect(hash).to eq({ link => user }) end - it 'returns an empty Hash when the list of nodes is empty' do - expect(subject.grouped_objects_for_nodes([], User, 'data-user')).to eq({}) + it 'returns an empty Hash when entry does not exist in the database' do + link = double(:link) + + expect(link).to receive(:has_attribute?). + with('data-user'). + and_return(true) + + expect(link).to receive(:attr). + with('data-user'). + and_return('1') + + nodes = [link] + bad_id = user.id + 100 + + expect(subject).to receive(:unique_attribute_values). + with(nodes, 'data-user'). + and_return([bad_id.to_s]) + + hash = subject.grouped_objects_for_nodes(nodes, User, 'data-user') + + expect(hash).to eq({}) end end diff --git a/spec/lib/banzai/reference_parser/issue_parser_spec.rb b/spec/lib/banzai/reference_parser/issue_parser_spec.rb index 6873b7b85f9..7031c47231c 100644 --- a/spec/lib/banzai/reference_parser/issue_parser_spec.rb +++ b/spec/lib/banzai/reference_parser/issue_parser_spec.rb @@ -67,6 +67,16 @@ describe Banzai::ReferenceParser::IssueParser, lib: true do expect(subject.referenced_by([])).to eq([]) end end + + context 'when issue with given ID does not exist' do + before do + link['data-issue'] = '-1' + end + + it 'returns an empty Array' do + expect(subject.referenced_by([link])).to eq([]) + end + end end end @@ -75,7 +85,7 @@ describe Banzai::ReferenceParser::IssueParser, lib: true do link['data-issue'] = issue.id.to_s nodes = [link] - expect(subject.issues_for_nodes(nodes)).to eq({ issue.id => issue }) + expect(subject.issues_for_nodes(nodes)).to eq({ link => issue }) end end end diff --git a/spec/lib/banzai/reference_parser/user_parser_spec.rb b/spec/lib/banzai/reference_parser/user_parser_spec.rb index 31ca9d27b0b..4ec998efe53 100644 --- a/spec/lib/banzai/reference_parser/user_parser_spec.rb +++ b/spec/lib/banzai/reference_parser/user_parser_spec.rb @@ -180,6 +180,15 @@ describe Banzai::ReferenceParser::UserParser, lib: true do expect(subject.nodes_user_can_reference(user, [link])).to eq([]) end + + it 'returns the nodes if the project attribute value equals the current project ID' do + other_user = create(:user) + + link['data-project'] = project.id.to_s + link['data-author'] = other_user.id.to_s + + expect(subject.nodes_user_can_reference(user, [link])).to eq([link]) + end end context 'when the link does not have a data-author attribute' do diff --git a/spec/lib/banzai/renderer_spec.rb b/spec/lib/banzai/renderer_spec.rb index aaa6b12e67e..0e094405e33 100644 --- a/spec/lib/banzai/renderer_spec.rb +++ b/spec/lib/banzai/renderer_spec.rb @@ -1,73 +1,36 @@ require 'spec_helper' describe Banzai::Renderer do - def expect_render(project = :project) - expected_context = { project: project } - expect(renderer).to receive(:cacheless_render) { :html }.with(:markdown, expected_context) - end - - def expect_cache_update - expect(object).to receive(:update_column).with("field_html", :html) - end - - def fake_object(*features) - markdown = :markdown if features.include?(:markdown) - html = :html if features.include?(:html) - - object = double( - "object", - banzai_render_context: { project: :project }, - field: markdown, - field_html: html - ) + def fake_object(fresh:) + object = double('object') - allow(object).to receive(:markdown_cache_field_for).with(:field).and_return("field_html") - allow(object).to receive(:new_record?).and_return(features.include?(:new)) - allow(object).to receive(:destroyed?).and_return(features.include?(:destroyed)) + allow(object).to receive(:cached_html_up_to_date?).with(:field).and_return(fresh) + allow(object).to receive(:cached_html_for).with(:field).and_return('field_html') object end - describe "#render_field" do - let(:renderer) { Banzai::Renderer } - let(:subject) { renderer.render_field(object, :field) } + describe '#render_field' do + let(:renderer) { described_class } + subject { renderer.render_field(object, :field) } - context "with an empty cache" do - let(:object) { fake_object(:markdown) } - it "caches and returns the result" do - expect_render - expect_cache_update - expect(subject).to eq(:html) - end - end + context 'with a stale cache' do + let(:object) { fake_object(fresh: false) } - context "with a filled cache" do - let(:object) { fake_object(:markdown, :html) } + it 'caches and returns the result' do + expect(object).to receive(:refresh_markdown_cache!).with(do_update: true) - it "uses the cache" do - expect_render.never - expect_cache_update.never - should eq(:html) + is_expected.to eq('field_html') end end - context "new object" do - let(:object) { fake_object(:new, :markdown) } - - it "doesn't cache the result" do - expect_render - expect_cache_update.never - expect(subject).to eq(:html) - end - end + context 'with an up-to-date cache' do + let(:object) { fake_object(fresh: true) } - context "destroyed object" do - let(:object) { fake_object(:destroyed, :markdown) } + it 'uses the cache' do + expect(object).to receive(:refresh_markdown_cache!).never - it "doesn't cache the result" do - expect_render - expect_cache_update.never - expect(subject).to eq(:html) + is_expected.to eq('field_html') end end end diff --git a/spec/lib/ci/ansi2html_spec.rb b/spec/lib/ci/ansi2html_spec.rb index 0762fd7e56a..a5dfb49478a 100644 --- a/spec/lib/ci/ansi2html_spec.rb +++ b/spec/lib/ci/ansi2html_spec.rb @@ -1,159 +1,160 @@ require 'spec_helper' describe Ci::Ansi2html, lib: true do - subject { Ci::Ansi2html } + subject { described_class } it "prints non-ansi as-is" do - expect(subject.convert("Hello")[:html]).to eq('Hello') + expect(convert_html("Hello")).to eq('Hello') end it "strips non-color-changing controll sequences" do - expect(subject.convert("Hello \e[2Kworld")[:html]).to eq('Hello world') + expect(convert_html("Hello \e[2Kworld")).to eq('Hello world') end it "prints simply red" do - expect(subject.convert("\e[31mHello\e[0m")[:html]).to eq('<span class="term-fg-red">Hello</span>') + expect(convert_html("\e[31mHello\e[0m")).to eq('<span class="term-fg-red">Hello</span>') end it "prints simply red without trailing reset" do - expect(subject.convert("\e[31mHello")[:html]).to eq('<span class="term-fg-red">Hello</span>') + expect(convert_html("\e[31mHello")).to eq('<span class="term-fg-red">Hello</span>') end it "prints simply yellow" do - expect(subject.convert("\e[33mHello\e[0m")[:html]).to eq('<span class="term-fg-yellow">Hello</span>') + expect(convert_html("\e[33mHello\e[0m")).to eq('<span class="term-fg-yellow">Hello</span>') end it "prints default on blue" do - expect(subject.convert("\e[39;44mHello")[:html]).to eq('<span class="term-bg-blue">Hello</span>') + expect(convert_html("\e[39;44mHello")).to eq('<span class="term-bg-blue">Hello</span>') end it "prints red on blue" do - expect(subject.convert("\e[31;44mHello")[:html]).to eq('<span class="term-fg-red term-bg-blue">Hello</span>') + expect(convert_html("\e[31;44mHello")).to eq('<span class="term-fg-red term-bg-blue">Hello</span>') end it "resets colors after red on blue" do - expect(subject.convert("\e[31;44mHello\e[0m world")[:html]).to eq('<span class="term-fg-red term-bg-blue">Hello</span> world') + expect(convert_html("\e[31;44mHello\e[0m world")).to eq('<span class="term-fg-red term-bg-blue">Hello</span> world') end it "performs color change from red/blue to yellow/blue" do - expect(subject.convert("\e[31;44mHello \e[33mworld")[:html]).to eq('<span class="term-fg-red term-bg-blue">Hello </span><span class="term-fg-yellow term-bg-blue">world</span>') + expect(convert_html("\e[31;44mHello \e[33mworld")).to eq('<span class="term-fg-red term-bg-blue">Hello </span><span class="term-fg-yellow term-bg-blue">world</span>') end it "performs color change from red/blue to yellow/green" do - expect(subject.convert("\e[31;44mHello \e[33;42mworld")[:html]).to eq('<span class="term-fg-red term-bg-blue">Hello </span><span class="term-fg-yellow term-bg-green">world</span>') + expect(convert_html("\e[31;44mHello \e[33;42mworld")).to eq('<span class="term-fg-red term-bg-blue">Hello </span><span class="term-fg-yellow term-bg-green">world</span>') end it "performs color change from red/blue to reset to yellow/green" do - expect(subject.convert("\e[31;44mHello\e[0m \e[33;42mworld")[:html]).to eq('<span class="term-fg-red term-bg-blue">Hello</span> <span class="term-fg-yellow term-bg-green">world</span>') + expect(convert_html("\e[31;44mHello\e[0m \e[33;42mworld")).to eq('<span class="term-fg-red term-bg-blue">Hello</span> <span class="term-fg-yellow term-bg-green">world</span>') end it "ignores unsupported codes" do - expect(subject.convert("\e[51mHello\e[0m")[:html]).to eq('Hello') + expect(convert_html("\e[51mHello\e[0m")).to eq('Hello') end it "prints light red" do - expect(subject.convert("\e[91mHello\e[0m")[:html]).to eq('<span class="term-fg-l-red">Hello</span>') + expect(convert_html("\e[91mHello\e[0m")).to eq('<span class="term-fg-l-red">Hello</span>') end it "prints default on light red" do - expect(subject.convert("\e[101mHello\e[0m")[:html]).to eq('<span class="term-bg-l-red">Hello</span>') + expect(convert_html("\e[101mHello\e[0m")).to eq('<span class="term-bg-l-red">Hello</span>') end it "performs color change from red/blue to default/blue" do - expect(subject.convert("\e[31;44mHello \e[39mworld")[:html]).to eq('<span class="term-fg-red term-bg-blue">Hello </span><span class="term-bg-blue">world</span>') + expect(convert_html("\e[31;44mHello \e[39mworld")).to eq('<span class="term-fg-red term-bg-blue">Hello </span><span class="term-bg-blue">world</span>') end it "performs color change from light red/blue to default/blue" do - expect(subject.convert("\e[91;44mHello \e[39mworld")[:html]).to eq('<span class="term-fg-l-red term-bg-blue">Hello </span><span class="term-bg-blue">world</span>') + expect(convert_html("\e[91;44mHello \e[39mworld")).to eq('<span class="term-fg-l-red term-bg-blue">Hello </span><span class="term-bg-blue">world</span>') end it "prints bold text" do - expect(subject.convert("\e[1mHello")[:html]).to eq('<span class="term-bold">Hello</span>') + expect(convert_html("\e[1mHello")).to eq('<span class="term-bold">Hello</span>') end it "resets bold text" do - expect(subject.convert("\e[1mHello\e[21m world")[:html]).to eq('<span class="term-bold">Hello</span> world') - expect(subject.convert("\e[1mHello\e[22m world")[:html]).to eq('<span class="term-bold">Hello</span> world') + expect(convert_html("\e[1mHello\e[21m world")).to eq('<span class="term-bold">Hello</span> world') + expect(convert_html("\e[1mHello\e[22m world")).to eq('<span class="term-bold">Hello</span> world') end it "prints italic text" do - expect(subject.convert("\e[3mHello")[:html]).to eq('<span class="term-italic">Hello</span>') + expect(convert_html("\e[3mHello")).to eq('<span class="term-italic">Hello</span>') end it "resets italic text" do - expect(subject.convert("\e[3mHello\e[23m world")[:html]).to eq('<span class="term-italic">Hello</span> world') + expect(convert_html("\e[3mHello\e[23m world")).to eq('<span class="term-italic">Hello</span> world') end it "prints underlined text" do - expect(subject.convert("\e[4mHello")[:html]).to eq('<span class="term-underline">Hello</span>') + expect(convert_html("\e[4mHello")).to eq('<span class="term-underline">Hello</span>') end it "resets underlined text" do - expect(subject.convert("\e[4mHello\e[24m world")[:html]).to eq('<span class="term-underline">Hello</span> world') + expect(convert_html("\e[4mHello\e[24m world")).to eq('<span class="term-underline">Hello</span> world') end it "prints concealed text" do - expect(subject.convert("\e[8mHello")[:html]).to eq('<span class="term-conceal">Hello</span>') + expect(convert_html("\e[8mHello")).to eq('<span class="term-conceal">Hello</span>') end it "resets concealed text" do - expect(subject.convert("\e[8mHello\e[28m world")[:html]).to eq('<span class="term-conceal">Hello</span> world') + expect(convert_html("\e[8mHello\e[28m world")).to eq('<span class="term-conceal">Hello</span> world') end it "prints crossed-out text" do - expect(subject.convert("\e[9mHello")[:html]).to eq('<span class="term-cross">Hello</span>') + expect(convert_html("\e[9mHello")).to eq('<span class="term-cross">Hello</span>') end it "resets crossed-out text" do - expect(subject.convert("\e[9mHello\e[29m world")[:html]).to eq('<span class="term-cross">Hello</span> world') + expect(convert_html("\e[9mHello\e[29m world")).to eq('<span class="term-cross">Hello</span> world') end it "can print 256 xterm fg colors" do - expect(subject.convert("\e[38;5;16mHello")[:html]).to eq('<span class="xterm-fg-16">Hello</span>') + expect(convert_html("\e[38;5;16mHello")).to eq('<span class="xterm-fg-16">Hello</span>') end it "can print 256 xterm fg colors on normal magenta background" do - expect(subject.convert("\e[38;5;16;45mHello")[:html]).to eq('<span class="xterm-fg-16 term-bg-magenta">Hello</span>') + expect(convert_html("\e[38;5;16;45mHello")).to eq('<span class="xterm-fg-16 term-bg-magenta">Hello</span>') end it "can print 256 xterm bg colors" do - expect(subject.convert("\e[48;5;240mHello")[:html]).to eq('<span class="xterm-bg-240">Hello</span>') + expect(convert_html("\e[48;5;240mHello")).to eq('<span class="xterm-bg-240">Hello</span>') end it "can print 256 xterm bg colors on normal magenta foreground" do - expect(subject.convert("\e[48;5;16;35mHello")[:html]).to eq('<span class="term-fg-magenta xterm-bg-16">Hello</span>') + expect(convert_html("\e[48;5;16;35mHello")).to eq('<span class="term-fg-magenta xterm-bg-16">Hello</span>') end it "prints bold colored text vividly" do - expect(subject.convert("\e[1;31mHello\e[0m")[:html]).to eq('<span class="term-fg-l-red term-bold">Hello</span>') + expect(convert_html("\e[1;31mHello\e[0m")).to eq('<span class="term-fg-l-red term-bold">Hello</span>') end it "prints bold light colored text correctly" do - expect(subject.convert("\e[1;91mHello\e[0m")[:html]).to eq('<span class="term-fg-l-red term-bold">Hello</span>') + expect(convert_html("\e[1;91mHello\e[0m")).to eq('<span class="term-fg-l-red term-bold">Hello</span>') end it "prints <" do - expect(subject.convert("<")[:html]).to eq('<') + expect(convert_html("<")).to eq('<') end it "replaces newlines with line break tags" do - expect(subject.convert("\n")[:html]).to eq('<br>') + expect(convert_html("\n")).to eq('<br>') end it "groups carriage returns with newlines" do - expect(subject.convert("\r\n")[:html]).to eq('<br>') + expect(convert_html("\r\n")).to eq('<br>') end describe "incremental update" do shared_examples 'stateable converter' do - let(:pass1) { subject.convert(pre_text) } - let(:pass2) { subject.convert(pre_text + text, pass1[:state]) } + let(:pass1_stream) { StringIO.new(pre_text) } + let(:pass2_stream) { StringIO.new(pre_text + text) } + let(:pass1) { subject.convert(pass1_stream) } + let(:pass2) { subject.convert(pass2_stream, pass1.state) } it "to returns html to append" do - expect(pass2[:append]).to be_truthy - expect(pass2[:html]).to eq(html) - expect(pass1[:text] + pass2[:text]).to eq(pre_text + text) - expect(pass1[:html] + pass2[:html]).to eq(pre_html + html) + expect(pass2.append).to be_truthy + expect(pass2.html).to eq(html) + expect(pass1.html + pass2.html).to eq(pre_html + html) end end @@ -193,4 +194,27 @@ describe Ci::Ansi2html, lib: true do it_behaves_like 'stateable converter' end end + + describe "truncates" do + let(:text) { "Hello World" } + let(:stream) { StringIO.new(text) } + let(:subject) { described_class.convert(stream) } + + before do + stream.seek(3, IO::SEEK_SET) + end + + it "returns truncated output" do + expect(subject.truncated).to be_truthy + end + + it "does not append output" do + expect(subject.append).to be_falsey + end + end + + def convert_html(data) + stream = StringIO.new(data) + subject.convert(stream).html + end end diff --git a/spec/lib/ci/gitlab_ci_yaml_processor_spec.rb b/spec/lib/ci/gitlab_ci_yaml_processor_spec.rb index 53abc056602..fe2c00bb2ca 100644 --- a/spec/lib/ci/gitlab_ci_yaml_processor_spec.rb +++ b/spec/lib/ci/gitlab_ci_yaml_processor_spec.rb @@ -225,7 +225,7 @@ module Ci before_script: ["pwd"], rspec: { script: "rspec", type: "test", only: %w(master deploy) }, staging: { script: "deploy", type: "deploy", only: %w(master deploy) }, - production: { script: "deploy", type: "deploy", only: ["master@path", "deploy"] }, + production: { script: "deploy", type: "deploy", only: ["master@path", "deploy"] } }) config_processor = GitlabCiYamlProcessor.new(config, 'fork') @@ -381,7 +381,7 @@ module Ci before_script: ["pwd"], rspec: { script: "rspec", type: "test", except: ["master", "deploy", "test@fork"] }, staging: { script: "deploy", type: "deploy", except: ["master"] }, - production: { script: "deploy", type: "deploy", except: ["master@fork"] }, + production: { script: "deploy", type: "deploy", except: ["master@fork"] } }) config_processor = GitlabCiYamlProcessor.new(config, 'fork') @@ -716,7 +716,7 @@ module Ci expect(config_processor.builds_for_stage_and_ref("test", "master").first[:options][:cache]).to eq( paths: ["logs/", "binaries/"], untracked: true, - key: 'key', + key: 'key' ) end @@ -734,7 +734,7 @@ module Ci expect(config_processor.builds_for_stage_and_ref("test", "master").first[:options][:cache]).to eq( paths: ["logs/", "binaries/"], untracked: true, - key: 'key', + key: 'key' ) end @@ -743,7 +743,7 @@ module Ci cache: { paths: ["logs/", "binaries/"], untracked: true, key: 'global' }, rspec: { script: "rspec", - cache: { paths: ["test/"], untracked: false, key: 'local' }, + cache: { paths: ["test/"], untracked: false, key: 'local' } } }) @@ -753,7 +753,7 @@ module Ci expect(config_processor.builds_for_stage_and_ref("test", "master").first[:options][:cache]).to eq( paths: ["test/"], untracked: false, - key: 'local', + key: 'local' ) end end diff --git a/spec/lib/constraints/group_url_constrainer_spec.rb b/spec/lib/constraints/group_url_constrainer_spec.rb index 96dacdc5cd2..db680489a8d 100644 --- a/spec/lib/constraints/group_url_constrainer_spec.rb +++ b/spec/lib/constraints/group_url_constrainer_spec.rb @@ -17,14 +17,49 @@ describe GroupUrlConstrainer, lib: true do it { expect(subject.matches?(request)).to be_truthy } end + context 'valid request for nested group with reserved top level name' do + let!(:nested_group) { create(:group, path: 'api', parent: group) } + let!(:request) { build_request('gitlab/api') } + + it { expect(subject.matches?(request)).to be_truthy } + end + context 'invalid request' do let(:request) { build_request('foo') } it { expect(subject.matches?(request)).to be_falsey } end + + context 'when the request matches a redirect route' do + context 'for a root group' do + let!(:redirect_route) { group.redirect_routes.create!(path: 'gitlabb') } + + context 'and is a GET request' do + let(:request) { build_request(redirect_route.path) } + + it { expect(subject.matches?(request)).to be_truthy } + end + + context 'and is NOT a GET request' do + let(:request) { build_request(redirect_route.path, 'POST') } + + it { expect(subject.matches?(request)).to be_falsey } + end + end + + context 'for a nested group' do + let!(:nested_group) { create(:group, path: 'nested', parent: group) } + let!(:redirect_route) { nested_group.redirect_routes.create!(path: 'gitlabb/nested') } + let(:request) { build_request(redirect_route.path) } + + it { expect(subject.matches?(request)).to be_truthy } + end + end end - def build_request(path) - double(:request, params: { id: path }) + def build_request(path, method = 'GET') + double(:request, + 'get?': (method == 'GET'), + params: { id: path }) end end diff --git a/spec/lib/constraints/project_url_constrainer_spec.rb b/spec/lib/constraints/project_url_constrainer_spec.rb index 4f25ad88960..b6884e37aa3 100644 --- a/spec/lib/constraints/project_url_constrainer_spec.rb +++ b/spec/lib/constraints/project_url_constrainer_spec.rb @@ -24,9 +24,26 @@ describe ProjectUrlConstrainer, lib: true do it { expect(subject.matches?(request)).to be_falsey } end end + + context 'when the request matches a redirect route' do + let(:old_project_path) { 'old_project_path' } + let!(:redirect_route) { project.redirect_routes.create!(path: "#{namespace.full_path}/#{old_project_path}") } + + context 'and is a GET request' do + let(:request) { build_request(namespace.full_path, old_project_path) } + it { expect(subject.matches?(request)).to be_truthy } + end + + context 'and is NOT a GET request' do + let(:request) { build_request(namespace.full_path, old_project_path, 'POST') } + it { expect(subject.matches?(request)).to be_falsey } + end + end end - def build_request(namespace, project) - double(:request, params: { namespace_id: namespace, id: project }) + def build_request(namespace, project, method = 'GET') + double(:request, + 'get?': (method == 'GET'), + params: { namespace_id: namespace, id: project }) end end diff --git a/spec/lib/constraints/user_url_constrainer_spec.rb b/spec/lib/constraints/user_url_constrainer_spec.rb index 207b6fe6c9e..ed69b830979 100644 --- a/spec/lib/constraints/user_url_constrainer_spec.rb +++ b/spec/lib/constraints/user_url_constrainer_spec.rb @@ -15,9 +15,26 @@ describe UserUrlConstrainer, lib: true do it { expect(subject.matches?(request)).to be_falsey } end + + context 'when the request matches a redirect route' do + let(:old_project_path) { 'old_project_path' } + let!(:redirect_route) { user.namespace.redirect_routes.create!(path: 'foo') } + + context 'and is a GET request' do + let(:request) { build_request(redirect_route.path) } + it { expect(subject.matches?(request)).to be_truthy } + end + + context 'and is NOT a GET request' do + let(:request) { build_request(redirect_route.path, 'POST') } + it { expect(subject.matches?(request)).to be_falsey } + end + end end - def build_request(username) - double(:request, params: { username: username }) + def build_request(username, method = 'GET') + double(:request, + 'get?': (method == 'GET'), + params: { username: username }) end end diff --git a/spec/lib/container_registry/blob_spec.rb b/spec/lib/container_registry/blob_spec.rb index bbacdc67ebd..ab010c6dfeb 100644 --- a/spec/lib/container_registry/blob_spec.rb +++ b/spec/lib/container_registry/blob_spec.rb @@ -1,110 +1,121 @@ require 'spec_helper' describe ContainerRegistry::Blob do - let(:digest) { 'sha256:0123456789012345' } + let(:group) { create(:group, name: 'group') } + let(:project) { create(:empty_project, path: 'test', group: group) } + + let(:repository) do + create(:container_repository, name: 'image', + tags: %w[latest rc1], + project: project) + end + let(:config) do - { - 'digest' => digest, + { 'digest' => 'sha256:0123456789012345', 'mediaType' => 'binary', - 'size' => 1000 - } + 'size' => 1000 } + end + + let(:blob) { described_class.new(repository, config) } + + before do + stub_container_registry_config(enabled: true, + api_url: 'http://registry.gitlab', + host_port: 'registry.gitlab') end - let(:token) { 'authorization-token' } - - let(:registry) { ContainerRegistry::Registry.new('http://example.com', token: token) } - let(:repository) { registry.repository('group/test') } - let(:blob) { repository.blob(config) } it { expect(blob).to respond_to(:repository) } it { expect(blob).to delegate_method(:registry).to(:repository) } it { expect(blob).to delegate_method(:client).to(:repository) } - context '#path' do - subject { blob.path } - - it { is_expected.to eq('example.com/group/test@sha256:0123456789012345') } + describe '#path' do + it 'returns a valid path to the blob' do + expect(blob.path).to eq('group/test/image@sha256:0123456789012345') + end end - context '#digest' do - subject { blob.digest } - - it { is_expected.to eq(digest) } + describe '#digest' do + it 'return correct digest value' do + expect(blob.digest).to eq 'sha256:0123456789012345' + end end - context '#type' do - subject { blob.type } - - it { is_expected.to eq('binary') } + describe '#type' do + it 'returns a correct type' do + expect(blob.type).to eq 'binary' + end end - context '#revision' do - subject { blob.revision } - - it { is_expected.to eq('0123456789012345') } + describe '#revision' do + it 'returns a correct blob SHA' do + expect(blob.revision).to eq '0123456789012345' + end end - context '#short_revision' do - subject { blob.short_revision } - - it { is_expected.to eq('012345678') } + describe '#short_revision' do + it 'return a short SHA' do + expect(blob.short_revision).to eq '012345678' + end end - context '#delete' do + describe '#delete' do before do - stub_request(:delete, 'http://example.com/v2/group/test/blobs/sha256:0123456789012345'). - to_return(status: 200) + stub_request(:delete, 'http://registry.gitlab/v2/group/test/image/blobs/sha256:0123456789012345') + .to_return(status: 200) end - subject { blob.delete } - - it { is_expected.to be_truthy } + it 'returns true when blob has been successfuly deleted' do + expect(blob.delete).to be_truthy + end end - context '#data' do - let(:data) { '{"key":"value"}' } - - subject { blob.data } - + describe '#data' do context 'when locally stored' do before do - stub_request(:get, 'http://example.com/v2/group/test/blobs/sha256:0123456789012345'). + stub_request(:get, 'http://registry.gitlab/v2/group/test/image/blobs/sha256:0123456789012345'). to_return( status: 200, headers: { 'Content-Type' => 'application/json' }, - body: data) + body: '{"key":"value"}') end - it { is_expected.to eq(data) } + it 'returns a correct blob data' do + expect(blob.data).to eq '{"key":"value"}' + end end context 'when externally stored' do + let(:location) { 'http://external.com/blob/file' } + before do - stub_request(:get, 'http://example.com/v2/group/test/blobs/sha256:0123456789012345'). - with(headers: { 'Authorization' => "bearer #{token}" }). - to_return( + stub_request(:get, 'http://registry.gitlab/v2/group/test/image/blobs/sha256:0123456789012345') + .with(headers: { 'Authorization' => 'bearer token' }) + .to_return( status: 307, headers: { 'Location' => location }) end context 'for a valid address' do - let(:location) { 'http://external.com/blob/file' } - before do stub_request(:get, location). - with(headers: { 'Authorization' => nil }). + with { |request| !request.headers.include?('Authorization') }. to_return( status: 200, headers: { 'Content-Type' => 'application/json' }, - body: data) + body: '{"key":"value"}') end - it { is_expected.to eq(data) } + it 'returns correct data' do + expect(blob.data).to eq '{"key":"value"}' + end end context 'for invalid file' do let(:location) { 'file:///etc/passwd' } - it { expect{ subject }.to raise_error(ArgumentError, 'invalid address') } + it 'raises an error' do + expect { blob.data }.to raise_error(ArgumentError, 'invalid address') + end end end end diff --git a/spec/lib/container_registry/client_spec.rb b/spec/lib/container_registry/client_spec.rb new file mode 100644 index 00000000000..ec03b533383 --- /dev/null +++ b/spec/lib/container_registry/client_spec.rb @@ -0,0 +1,39 @@ +# coding: utf-8 +require 'spec_helper' + +describe ContainerRegistry::Client do + let(:token) { '12345' } + let(:options) { { token: token } } + let(:client) { described_class.new("http://container-registry", options) } + + describe '#blob' do + it 'GET /v2/:name/blobs/:digest' do + stub_request(:get, "http://container-registry/v2/group/test/blobs/sha256:0123456789012345"). + with(headers: { + 'Accept' => 'application/octet-stream', + 'Authorization' => "bearer #{token}" + }). + to_return(status: 200, body: "Blob") + + expect(client.blob('group/test', 'sha256:0123456789012345')).to eq('Blob') + end + + it 'follows 307 redirect for GET /v2/:name/blobs/:digest' do + stub_request(:get, "http://container-registry/v2/group/test/blobs/sha256:0123456789012345"). + with(headers: { + 'Accept' => 'application/octet-stream', + 'Authorization' => "bearer #{token}" + }). + to_return(status: 307, body: "", headers: { Location: 'http://redirected' }) + # We should probably use hash_excluding here, but that requires an update to WebMock: + # https://github.com/bblimke/webmock/blob/master/lib/webmock/matchers/hash_excluding_matcher.rb + stub_request(:get, "http://redirected/"). + with { |request| !request.headers.include?('Authorization') }. + to_return(status: 200, body: "Successfully redirected") + + response = client.blob('group/test', 'sha256:0123456789012345') + + expect(response).to eq('Successfully redirected') + end + end +end diff --git a/spec/lib/container_registry/path_spec.rb b/spec/lib/container_registry/path_spec.rb new file mode 100644 index 00000000000..c2bcb54210b --- /dev/null +++ b/spec/lib/container_registry/path_spec.rb @@ -0,0 +1,246 @@ +require 'spec_helper' + +describe ContainerRegistry::Path do + subject { described_class.new(path) } + + describe '#components' do + let(:path) { 'path/to/some/project' } + + it 'splits components by a forward slash' do + expect(subject.components).to eq %w[path to some project] + end + end + + describe '#nodes' do + context 'when repository path is valid' do + let(:path) { 'path/to/some/project' } + + it 'return all project path like node in reverse order' do + expect(subject.nodes).to eq %w[path/to/some/project + path/to/some + path/to] + end + end + + context 'when repository path is invalid' do + let(:path) { '' } + + it 'rasises en error' do + expect { subject.nodes } + .to raise_error described_class::InvalidRegistryPathError + end + end + end + + describe '#to_s' do + context 'when path does not have uppercase characters' do + let(:path) { 'some/image' } + + it 'return a string with a repository path' do + expect(subject.to_s).to eq 'some/image' + end + end + + context 'when path has uppercase characters' do + let(:path) { 'SoMe/ImAgE' } + + it 'return a string with a repository path' do + expect(subject.to_s).to eq 'some/image' + end + end + end + + describe '#valid?' do + context 'when path has less than two components' do + let(:path) { 'something/' } + + it { is_expected.not_to be_valid } + end + + context 'when path has more than allowed number of components' do + let(:path) { 'a/b/c/d/e/f/g/h/i/j/k/l/m/n/o/p/r/s/t/u/w/y/z' } + + it { is_expected.not_to be_valid } + end + + context 'when path has invalid characters' do + let(:path) { 'some\path' } + + it { is_expected.not_to be_valid } + end + + context 'when path has two or more components' do + let(:path) { 'some/path' } + + it { is_expected.to be_valid } + end + + context 'when path is related to multi-level image' do + let(:path) { 'some/path/my/image' } + + it { is_expected.to be_valid } + end + + context 'when path contains uppercase letters' do + let(:path) { 'Some/Registry' } + + it { is_expected.to be_valid } + end + end + + describe '#has_repository?' do + context 'when project exists' do + let(:project) { create(:empty_project) } + let(:path) { "#{project.full_path}/my/image" } + + context 'when path already has matching repository' do + before do + create(:container_repository, project: project, name: 'my/image') + end + + it { is_expected.to have_repository } + it { is_expected.to have_project } + end + + context 'when path does not have matching repository' do + it { is_expected.not_to have_repository } + it { is_expected.to have_project } + end + end + + context 'when project does not exist' do + let(:path) { 'some/project/my/image' } + + it { is_expected.not_to have_repository } + it { is_expected.not_to have_project } + end + end + + describe '#repository_project' do + let(:group) { create(:group, path: 'some_group') } + + context 'when project for given path exists' do + let(:path) { 'some_group/some_project' } + + before do + create(:empty_project, group: group, name: 'some_project') + create(:empty_project, name: 'some_project') + end + + it 'returns a correct project' do + expect(subject.repository_project.group).to eq group + end + end + + context 'when project for given path does not exist' do + let(:path) { 'not/matching' } + + it 'returns nil' do + expect(subject.repository_project).to be_nil + end + end + + context 'when matching multi-level path' do + let(:project) do + create(:empty_project, group: group, name: 'some_project') + end + + context 'when using the zero-level path' do + let(:path) { project.full_path } + + it 'supports zero-level path' do + expect(subject.repository_project).to eq project + end + end + + context 'when using first-level path' do + let(:path) { "#{project.full_path}/repository" } + + it 'supports first-level path' do + expect(subject.repository_project).to eq project + end + end + + context 'when using second-level path' do + let(:path) { "#{project.full_path}/repository/name" } + + it 'supports second-level path' do + expect(subject.repository_project).to eq project + end + end + + context 'when using too deep nesting in the path' do + let(:path) { "#{project.full_path}/repository/name/invalid" } + + it 'does not support three-levels of nesting' do + expect(subject.repository_project).to be_nil + end + end + end + end + + describe '#repository_name' do + context 'when project does not exist' do + let(:path) { 'some/name' } + + it 'returns nil' do + expect(subject.repository_name).to be_nil + end + end + + context 'when project exists' do + let(:group) { create(:group, path: 'Some_Group') } + + before do + create(:empty_project, group: group, name: 'some_project') + end + + context 'when project path equal repository path' do + let(:path) { 'some_group/some_project' } + + it 'returns an empty string' do + expect(subject.repository_name).to eq '' + end + end + + context 'when repository path has one additional level' do + let(:path) { 'some_group/some_project/repository' } + + it 'returns a correct repository name' do + expect(subject.repository_name).to eq 'repository' + end + end + + context 'when repository path has two additional levels' do + let(:path) { 'some_group/some_project/repository/image' } + + it 'returns a correct repository name' do + expect(subject.repository_name).to eq 'repository/image' + end + end + end + end + + describe '#project_path' do + context 'when project does not exist' do + let(:path) { 'some/name' } + + it 'returns nil' do + expect(subject.project_path).to be_nil + end + end + + context 'when project with uppercase characters in path exists' do + let(:path) { 'somegroup/myproject/my/image' } + let(:group) { create(:group, path: 'SomeGroup') } + + before do + create(:empty_project, group: group, name: 'MyProject') + end + + it 'returns downcased project path' do + expect(subject.project_path).to eq 'somegroup/myproject' + end + end + end +end diff --git a/spec/lib/container_registry/registry_spec.rb b/spec/lib/container_registry/registry_spec.rb index 4f3f8b24fc4..4d6eea94bf0 100644 --- a/spec/lib/container_registry/registry_spec.rb +++ b/spec/lib/container_registry/registry_spec.rb @@ -10,7 +10,7 @@ describe ContainerRegistry::Registry do it { is_expected.to respond_to(:uri) } it { is_expected.to respond_to(:path) } - it { expect(subject.repository('test')).not_to be_nil } + it { expect(subject).not_to be_nil } context '#path' do subject { registry.path } diff --git a/spec/lib/container_registry/repository_spec.rb b/spec/lib/container_registry/repository_spec.rb deleted file mode 100644 index c364e759108..00000000000 --- a/spec/lib/container_registry/repository_spec.rb +++ /dev/null @@ -1,65 +0,0 @@ -require 'spec_helper' - -describe ContainerRegistry::Repository do - let(:registry) { ContainerRegistry::Registry.new('http://example.com') } - let(:repository) { registry.repository('group/test') } - - it { expect(repository).to respond_to(:registry) } - it { expect(repository).to delegate_method(:client).to(:registry) } - it { expect(repository.tag('test')).not_to be_nil } - - context '#path' do - subject { repository.path } - - it { is_expected.to eq('example.com/group/test') } - end - - context 'manifest processing' do - before do - stub_request(:get, 'http://example.com/v2/group/test/tags/list'). - with(headers: { 'Accept' => 'application/vnd.docker.distribution.manifest.v2+json' }). - to_return( - status: 200, - body: JSON.dump(tags: ['test']), - headers: { 'Content-Type' => 'application/json' }) - end - - context '#manifest' do - subject { repository.manifest } - - it { is_expected.not_to be_nil } - end - - context '#valid?' do - subject { repository.valid? } - - it { is_expected.to be_truthy } - end - - context '#tags' do - subject { repository.tags } - - it { is_expected.not_to be_empty } - end - end - - context '#delete_tags' do - let(:tag) { ContainerRegistry::Tag.new(repository, 'tag') } - - before { expect(repository).to receive(:tags).twice.and_return([tag]) } - - subject { repository.delete_tags } - - context 'succeeds' do - before { expect(tag).to receive(:delete).and_return(true) } - - it { is_expected.to be_truthy } - end - - context 'any fails' do - before { expect(tag).to receive(:delete).and_return(false) } - - it { is_expected.to be_falsey } - end - end -end diff --git a/spec/lib/container_registry/tag_spec.rb b/spec/lib/container_registry/tag_spec.rb index c5e31ae82b6..f8fffbdca41 100644 --- a/spec/lib/container_registry/tag_spec.rb +++ b/spec/lib/container_registry/tag_spec.rb @@ -1,25 +1,66 @@ require 'spec_helper' describe ContainerRegistry::Tag do - let(:registry) { ContainerRegistry::Registry.new('http://example.com') } - let(:repository) { registry.repository('group/test') } - let(:tag) { repository.tag('tag') } - let(:headers) { { 'Accept' => 'application/vnd.docker.distribution.manifest.v2+json' } } + let(:group) { create(:group, name: 'group') } + let(:project) { create(:project, path: 'test', group: group) } + + let(:repository) do + create(:container_repository, name: '', project: project) + end + + let(:headers) do + { 'Accept' => 'application/vnd.docker.distribution.manifest.v2+json' } + end + + let(:tag) { described_class.new(repository, 'tag') } + + before do + stub_container_registry_config(enabled: true, + api_url: 'http://registry.gitlab', + host_port: 'registry.gitlab') + end it { expect(tag).to respond_to(:repository) } it { expect(tag).to delegate_method(:registry).to(:repository) } it { expect(tag).to delegate_method(:client).to(:repository) } - context '#path' do - subject { tag.path } + describe '#path' do + context 'when tag belongs to zero-level repository' do + let(:repository) do + create(:container_repository, name: '', + tags: %w[rc1], + project: project) + end + + it 'returns path to the image' do + expect(tag.path).to eq('group/test:tag') + end + end - it { is_expected.to eq('example.com/group/test:tag') } + context 'when tag belongs to first-level repository' do + let(:repository) do + create(:container_repository, name: 'my_image', + tags: %w[tag], + project: project) + end + + it 'returns path to the image' do + expect(tag.path).to eq('group/test/my_image:tag') + end + end + end + + describe '#location' do + it 'returns a full location of the tag' do + expect(tag.location) + .to eq 'registry.gitlab/group/test:tag' + end end context 'manifest processing' do context 'schema v1' do before do - stub_request(:get, 'http://example.com/v2/group/test/manifests/tag'). + stub_request(:get, 'http://registry.gitlab/v2/group/test/manifests/tag'). with(headers: headers). to_return( status: 200, @@ -56,7 +97,7 @@ describe ContainerRegistry::Tag do context 'schema v2' do before do - stub_request(:get, 'http://example.com/v2/group/test/manifests/tag'). + stub_request(:get, 'http://registry.gitlab/v2/group/test/manifests/tag'). with(headers: headers). to_return( status: 200, @@ -93,7 +134,7 @@ describe ContainerRegistry::Tag do context 'when locally stored' do before do - stub_request(:get, 'http://example.com/v2/group/test/blobs/sha256:d7a513a663c1a6dcdba9ed832ca53c02ac2af0c333322cd6ca92936d1d9917ac'). + stub_request(:get, 'http://registry.gitlab/v2/group/test/blobs/sha256:d7a513a663c1a6dcdba9ed832ca53c02ac2af0c333322cd6ca92936d1d9917ac'). with(headers: { 'Accept' => 'application/octet-stream' }). to_return( status: 200, @@ -105,7 +146,7 @@ describe ContainerRegistry::Tag do context 'when externally stored' do before do - stub_request(:get, 'http://example.com/v2/group/test/blobs/sha256:d7a513a663c1a6dcdba9ed832ca53c02ac2af0c333322cd6ca92936d1d9917ac'). + stub_request(:get, 'http://registry.gitlab/v2/group/test/blobs/sha256:d7a513a663c1a6dcdba9ed832ca53c02ac2af0c333322cd6ca92936d1d9917ac'). with(headers: { 'Accept' => 'application/octet-stream' }). to_return( status: 307, @@ -123,29 +164,29 @@ describe ContainerRegistry::Tag do end end - context 'manifest digest' do + context 'with stubbed digest' do before do - stub_request(:head, 'http://example.com/v2/group/test/manifests/tag'). - with(headers: headers). - to_return(status: 200, headers: { 'Docker-Content-Digest' => 'sha256:digest' }) + stub_request(:head, 'http://registry.gitlab/v2/group/test/manifests/tag') + .with(headers: headers) + .to_return(status: 200, headers: { 'Docker-Content-Digest' => 'sha256:digest' }) end - context '#digest' do - subject { tag.digest } - - it { is_expected.to eq('sha256:digest') } + describe '#digest' do + it 'returns a correct tag digest' do + expect(tag.digest).to eq 'sha256:digest' + end end - context '#delete' do + describe '#delete' do before do - stub_request(:delete, 'http://example.com/v2/group/test/manifests/sha256:digest'). - with(headers: headers). - to_return(status: 200) + stub_request(:delete, 'http://registry.gitlab/v2/group/test/manifests/sha256:digest') + .with(headers: headers) + .to_return(status: 200) end - subject { tag.delete } - - it { is_expected.to be_truthy } + it 'correctly deletes the tag' do + expect(tag.delete).to be_truthy + end end end end diff --git a/spec/lib/expand_variables_spec.rb b/spec/lib/expand_variables_spec.rb index 90628917943..7faa0f31b68 100644 --- a/spec/lib/expand_variables_spec.rb +++ b/spec/lib/expand_variables_spec.rb @@ -25,7 +25,7 @@ describe ExpandVariables do result: 'keyvalueresult', variables: [ { key: 'variable', value: 'value' }, - { key: 'variable2', value: 'result' }, + { key: 'variable2', value: 'result' } ] }, { value: 'key${variable}${variable2}', result: 'keyvalueresult', @@ -37,7 +37,7 @@ describe ExpandVariables do result: 'keyresultvalue', variables: [ { key: 'variable', value: 'value' }, - { key: 'variable2', value: 'result' }, + { key: 'variable2', value: 'result' } ] }, { value: 'key${variable2}${variable}', result: 'keyresultvalue', @@ -49,7 +49,7 @@ describe ExpandVariables do result: 'review/feature/add-review-apps', variables: [ { key: 'CI_COMMIT_REF_NAME', value: 'feature/add-review-apps' } - ] }, + ] } ] tests.each do |test| diff --git a/spec/lib/gitlab/asciidoc_spec.rb b/spec/lib/gitlab/asciidoc_spec.rb index bca57105d1d..2c7ebb15fd7 100644 --- a/spec/lib/gitlab/asciidoc_spec.rb +++ b/spec/lib/gitlab/asciidoc_spec.rb @@ -22,26 +22,24 @@ module Gitlab expect(Asciidoctor).to receive(:convert) .with(input, expected_asciidoc_opts).and_return(html) - expect( render(input, context) ).to eql html + expect(render(input, context)).to eq(html) end context "with asciidoc_opts" do - let(:asciidoc_opts) { { safe: :safe, attributes: ['foo'] } } - it "merges the options with default ones" do expected_asciidoc_opts = { - safe: :safe, + safe: :secure, backend: :gitlab_html5, - attributes: described_class::DEFAULT_ADOC_ATTRS + ['foo'] + attributes: described_class::DEFAULT_ADOC_ATTRS } expect(Asciidoctor).to receive(:convert) .with(input, expected_asciidoc_opts).and_return(html) - render(input, context, asciidoc_opts) + render(input, context) end end - + context "XSS" do links = { 'links' => { @@ -50,7 +48,7 @@ module Gitlab }, 'images' => { input: 'image:https://localhost.com/image.png[Alt text" onerror="alert(7)]', - output: "<div>\n<p><span><img src=\"https://localhost.com/image.png\" alt=\"Alt text\"></span></p>\n</div>" + output: "<img src=\"https://localhost.com/image.png\" alt=\"Alt text\">" }, 'pre' => { input: '```mypre"><script>alert(3)</script>', @@ -60,10 +58,18 @@ module Gitlab links.each do |name, data| it "does not convert dangerous #{name} into HTML" do - expect(render(data[:input], context)).to eql data[:output] + expect(render(data[:input], context)).to include(data[:output]) end end end + + context 'external links' do + it 'adds the `rel` attribute to the link' do + output = render('link:https://google.com[Google]', context) + + expect(output).to include('rel="nofollow noreferrer noopener"') + end + end end def render(*args) diff --git a/spec/lib/gitlab/auth_spec.rb b/spec/lib/gitlab/auth_spec.rb index 03c4879ed6f..50bc3ef1b7c 100644 --- a/spec/lib/gitlab/auth_spec.rb +++ b/spec/lib/gitlab/auth_spec.rb @@ -118,7 +118,7 @@ describe Gitlab::Auth, lib: true do it 'succeeds for OAuth tokens with the `api` scope' do expect(gl_auth).to receive(:rate_limit!).with('ip', success: true, login: 'oauth2') - expect(gl_auth.find_for_git_client("oauth2", token_w_api_scope.token, project: nil, ip: 'ip')).to eq(Gitlab::Auth::Result.new(user, nil, :oauth, read_authentication_abilities)) + expect(gl_auth.find_for_git_client("oauth2", token_w_api_scope.token, project: nil, ip: 'ip')).to eq(Gitlab::Auth::Result.new(user, nil, :oauth, full_authentication_abilities)) end it 'fails for OAuth tokens with other scopes' do @@ -175,7 +175,7 @@ describe Gitlab::Auth, lib: true do user = create( :user, username: 'normal_user', - password: 'my-secret', + password: 'my-secret' ) expect(gl_auth.find_for_git_client(user.username, user.password, project: nil, ip: 'ip')) @@ -186,7 +186,7 @@ describe Gitlab::Auth, lib: true do user = create( :user, username: 'oauth2', - password: 'my-secret', + password: 'my-secret' ) expect(gl_auth.find_for_git_client(user.username, user.password, project: nil, ip: 'ip')) diff --git a/spec/lib/gitlab/backup/manager_spec.rb b/spec/lib/gitlab/backup/manager_spec.rb index f84782ab440..c59ff7fb290 100644 --- a/spec/lib/gitlab/backup/manager_spec.rb +++ b/spec/lib/gitlab/backup/manager_spec.rb @@ -151,7 +151,7 @@ describe Backup::Manager, lib: true do allow(Dir).to receive(:glob).and_return( [ '1451606400_2016_01_01_gitlab_backup.tar', - '1451520000_2015_12_31_gitlab_backup.tar', + '1451520000_2015_12_31_gitlab_backup.tar' ] ) end diff --git a/spec/lib/gitlab/cache/ci/project_pipeline_status_spec.rb b/spec/lib/gitlab/cache/ci/project_pipeline_status_spec.rb new file mode 100644 index 00000000000..b386852b196 --- /dev/null +++ b/spec/lib/gitlab/cache/ci/project_pipeline_status_spec.rb @@ -0,0 +1,304 @@ +require 'spec_helper' + +describe Gitlab::Cache::Ci::ProjectPipelineStatus, :redis do + let(:project) { create(:project) } + let(:pipeline_status) { described_class.new(project) } + let(:cache_key) { "projects/#{project.id}/pipeline_status" } + + describe '.load_for_project' do + it "loads the status" do + expect_any_instance_of(described_class).to receive(:load_status) + + described_class.load_for_project(project) + end + end + + describe 'loading in batches' do + let(:status) { 'success' } + let(:sha) { '424d1b73bc0d3cb726eb7dc4ce17a4d48552f8c6' } + let(:ref) { 'master' } + let(:pipeline_info) { { sha: sha, status: status, ref: ref } } + let(:project_without_status) { create(:project) } + + describe '.load_in_batch_for_projects' do + it 'preloads pipeline_status on projects' do + described_class.load_in_batch_for_projects([project]) + + # Don't call the accessor that would lazy load the variable + expect(project.instance_variable_get('@pipeline_status')).to be_a(described_class) + end + + describe 'without a status in redis' do + it 'loads the status from a commit when it was not in redis' do + empty_status = { sha: nil, status: nil, ref: nil } + fake_pipeline = described_class.new( + project_without_status, + pipeline_info: empty_status, + loaded_from_cache: false + ) + + expect(described_class).to receive(:new). + with(project_without_status, + pipeline_info: empty_status, + loaded_from_cache: false). + and_return(fake_pipeline) + expect(fake_pipeline).to receive(:load_from_project) + expect(fake_pipeline).to receive(:store_in_cache) + + described_class.load_in_batch_for_projects([project_without_status]) + end + + it 'only connects to redis twice' do + # Once to load, once to store in the cache + expect(Gitlab::Redis).to receive(:with).exactly(2).and_call_original + + described_class.load_in_batch_for_projects([project_without_status]) + + expect(project_without_status.pipeline_status).not_to be_nil + end + end + + describe 'when a status was cached in redis' do + before do + Gitlab::Redis.with do |redis| + redis.mapped_hmset(cache_key, + { sha: sha, status: status, ref: ref }) + end + end + + it 'loads the correct status' do + described_class.load_in_batch_for_projects([project]) + + pipeline_status = project.instance_variable_get('@pipeline_status') + + expect(pipeline_status.sha).to eq(sha) + expect(pipeline_status.status).to eq(status) + expect(pipeline_status.ref).to eq(ref) + end + + it 'only connects to redis once' do + expect(Gitlab::Redis).to receive(:with).exactly(1).and_call_original + + described_class.load_in_batch_for_projects([project]) + + expect(project.pipeline_status).not_to be_nil + end + + it "doesn't load the status separatly" do + expect_any_instance_of(described_class).not_to receive(:load_from_project) + expect_any_instance_of(described_class).not_to receive(:load_from_cache) + + described_class.load_in_batch_for_projects([project]) + end + end + end + + describe '.cached_results_for_projects' do + it 'loads a status from redis for all projects' do + Gitlab::Redis.with do |redis| + redis.mapped_hmset(cache_key, { sha: sha, status: status, ref: ref }) + end + + result = [{ loaded_from_cache: false, pipeline_info: { sha: nil, status: nil, ref: nil } }, + { loaded_from_cache: true, pipeline_info: pipeline_info }] + + expect(described_class.cached_results_for_projects([project_without_status, project])).to eq(result) + end + end + end + + describe '.update_for_pipeline' do + it 'refreshes the cache if nescessary' do + pipeline = build_stubbed(:ci_pipeline, + sha: '123456', status: 'success', ref: 'master') + fake_status = double + expect(described_class).to receive(:new). + with(pipeline.project, + pipeline_info: { + sha: '123456', status: 'success', ref: 'master' + }). + and_return(fake_status) + + expect(fake_status).to receive(:store_in_cache_if_needed) + + described_class.update_for_pipeline(pipeline) + end + end + + describe '#has_status?' do + it "is false when the status wasn't loaded yet" do + expect(pipeline_status.has_status?).to be_falsy + end + + it 'is true when all status information was loaded' do + fake_commit = double + allow(fake_commit).to receive(:status).and_return('failed') + allow(fake_commit).to receive(:sha).and_return('failed424d1b73bc0d3cb726eb7dc4ce17a4d48552f8c6') + allow(pipeline_status).to receive(:commit).and_return(fake_commit) + allow(pipeline_status).to receive(:has_cache?).and_return(false) + + pipeline_status.load_status + + expect(pipeline_status.has_status?).to be_truthy + end + end + + describe '#load_status' do + it 'loads the status from the cache when there is one' do + expect(pipeline_status).to receive(:has_cache?).and_return(true) + expect(pipeline_status).to receive(:load_from_cache) + + pipeline_status.load_status + end + + it 'loads the status from the project commit when there is no cache' do + allow(pipeline_status).to receive(:has_cache?).and_return(false) + + expect(pipeline_status).to receive(:load_from_project) + + pipeline_status.load_status + end + + it 'stores the status in the cache when it loading it from the project' do + allow(pipeline_status).to receive(:has_cache?).and_return(false) + allow(pipeline_status).to receive(:load_from_project) + + expect(pipeline_status).to receive(:store_in_cache) + + pipeline_status.load_status + end + + it 'sets the state to loaded' do + pipeline_status.load_status + + expect(pipeline_status).to be_loaded + end + + it 'only loads the status once' do + expect(pipeline_status).to receive(:has_cache?).and_return(true).exactly(1) + expect(pipeline_status).to receive(:load_from_cache).exactly(1) + + pipeline_status.load_status + pipeline_status.load_status + end + end + + describe "#load_from_project" do + let!(:pipeline) { create(:ci_pipeline, :success, project: project, sha: project.commit.sha) } + + it 'reads the status from the pipeline for the commit' do + pipeline_status.load_from_project + + expect(pipeline_status.status).to eq('success') + expect(pipeline_status.sha).to eq(project.commit.sha) + expect(pipeline_status.ref).to eq(project.default_branch) + end + + it "doesn't fail for an empty project" do + status_for_empty_commit = described_class.new(create(:empty_project)) + + status_for_empty_commit.load_status + + expect(status_for_empty_commit).to be_loaded + end + end + + describe "#store_in_cache", :redis do + it "sets the object in redis" do + pipeline_status.sha = '123456' + pipeline_status.status = 'failed' + + pipeline_status.store_in_cache + read_sha, read_status = Gitlab::Redis.with { |redis| redis.hmget(cache_key, :sha, :status) } + + expect(read_sha).to eq('123456') + expect(read_status).to eq('failed') + end + end + + describe '#store_in_cache_if_needed', :redis do + it 'stores the state in the cache when the sha is the HEAD of the project' do + create(:ci_pipeline, :success, project: project, sha: project.commit.sha) + pipeline_status = described_class.load_for_project(project) + + pipeline_status.store_in_cache_if_needed + sha, status, ref = Gitlab::Redis.with { |redis| redis.hmget(cache_key, :sha, :status, :ref) } + + expect(sha).not_to be_nil + expect(status).not_to be_nil + expect(ref).not_to be_nil + end + + it "doesn't store the status in redis when the sha is not the head of the project" do + other_status = described_class.new( + project, + pipeline_info: { sha: "123456", status: "failed" } + ) + + other_status.store_in_cache_if_needed + sha, status = Gitlab::Redis.with { |redis| redis.hmget(cache_key, :sha, :status) } + + expect(sha).to be_nil + expect(status).to be_nil + end + + it "deletes the cache if the repository doesn't have a head commit" do + empty_project = create(:empty_project) + Gitlab::Redis.with do |redis| + redis.mapped_hmset(cache_key, + { sha: 'sha', status: 'pending', ref: 'master' }) + end + + other_status = described_class.new(empty_project, + pipeline_info: { + sha: "123456", status: "failed" + }) + + other_status.store_in_cache_if_needed + sha, status, ref = Gitlab::Redis.with { |redis| redis.hmget("projects/#{empty_project.id}/pipeline_status", :sha, :status, :ref) } + + expect(sha).to be_nil + expect(status).to be_nil + expect(ref).to be_nil + end + end + + describe "with a status in redis", :redis do + let(:status) { 'success' } + let(:sha) { '424d1b73bc0d3cb726eb7dc4ce17a4d48552f8c6' } + let(:ref) { 'master' } + + before do + Gitlab::Redis.with do |redis| + redis.mapped_hmset(cache_key, + { sha: sha, status: status, ref: ref }) + end + end + + describe '#load_from_cache' do + it 'reads the status from redis' do + pipeline_status.load_from_cache + + expect(pipeline_status.sha).to eq(sha) + expect(pipeline_status.status).to eq(status) + expect(pipeline_status.ref).to eq(ref) + end + end + + describe '#has_cache?' do + it 'knows the status is cached' do + expect(pipeline_status.has_cache?).to be_truthy + end + end + + describe '#delete_from_cache' do + it 'deletes values from redis' do + pipeline_status.delete_from_cache + + key_exists = Gitlab::Redis.with { |redis| redis.exists(cache_key) } + + expect(key_exists).to be_falsy + end + end + end +end diff --git a/spec/lib/gitlab/changes_list_spec.rb b/spec/lib/gitlab/changes_list_spec.rb index 69d86144e32..464508fcd73 100644 --- a/spec/lib/gitlab/changes_list_spec.rb +++ b/spec/lib/gitlab/changes_list_spec.rb @@ -5,7 +5,7 @@ describe Gitlab::ChangesList do let(:invalid_changes) { 1 } context 'when changes is a valid string' do - let(:changes_list) { Gitlab::ChangesList.new(valid_changes_string) } + let(:changes_list) { described_class.new(valid_changes_string) } it 'splits elements by newline character' do expect(changes_list).to contain_exactly({ diff --git a/spec/lib/gitlab/chat_commands/command_spec.rb b/spec/lib/gitlab/chat_commands/command_spec.rb index b6e924d67be..eb4f06b371c 100644 --- a/spec/lib/gitlab/chat_commands/command_spec.rb +++ b/spec/lib/gitlab/chat_commands/command_spec.rb @@ -40,11 +40,15 @@ describe Gitlab::ChatCommands::Command, service: true do context 'when trying to do deployment' do let(:params) { { text: 'deploy staging to production' } } - let!(:build) { create(:ci_build, project: project) } + let!(:build) { create(:ci_build, pipeline: pipeline) } + let!(:pipeline) { create(:ci_pipeline, project: project) } let!(:staging) { create(:environment, name: 'staging', project: project) } let!(:deployment) { create(:deployment, environment: staging, deployable: build) } + let!(:manual) do - create(:ci_build, :manual, project: project, pipeline: build.pipeline, name: 'first', environment: 'production') + create(:ci_build, :manual, pipeline: pipeline, + name: 'first', + environment: 'production') end context 'and user can not create deployment' do @@ -56,7 +60,7 @@ describe Gitlab::ChatCommands::Command, service: true do context 'and user does have deployment permission' do before do - project.team << [user, :developer] + build.project.add_master(user) end it 'returns action' do @@ -66,7 +70,9 @@ describe Gitlab::ChatCommands::Command, service: true do context 'when duplicate action exists' do let!(:manual2) do - create(:ci_build, :manual, project: project, pipeline: build.pipeline, name: 'second', environment: 'production') + create(:ci_build, :manual, pipeline: pipeline, + name: 'second', + environment: 'production') end it 'returns error' do diff --git a/spec/lib/gitlab/chat_commands/deploy_spec.rb b/spec/lib/gitlab/chat_commands/deploy_spec.rb index b3358a32161..b33389d959e 100644 --- a/spec/lib/gitlab/chat_commands/deploy_spec.rb +++ b/spec/lib/gitlab/chat_commands/deploy_spec.rb @@ -7,7 +7,7 @@ describe Gitlab::ChatCommands::Deploy, service: true do let(:regex_match) { described_class.match('deploy staging to production') } before do - project.team << [user, :master] + project.add_master(user) end subject do @@ -23,7 +23,8 @@ describe Gitlab::ChatCommands::Deploy, service: true do context 'with environment' do let!(:staging) { create(:environment, name: 'staging', project: project) } - let!(:build) { create(:ci_build, project: project) } + let!(:pipeline) { create(:ci_pipeline, project: project) } + let!(:build) { create(:ci_build, pipeline: pipeline) } let!(:deployment) { create(:deployment, environment: staging, deployable: build) } context 'without actions' do @@ -35,7 +36,9 @@ describe Gitlab::ChatCommands::Deploy, service: true do context 'with action' do let!(:manual1) do - create(:ci_build, :manual, project: project, pipeline: build.pipeline, name: 'first', environment: 'production') + create(:ci_build, :manual, pipeline: pipeline, + name: 'first', + environment: 'production') end it 'returns success result' do @@ -45,7 +48,9 @@ describe Gitlab::ChatCommands::Deploy, service: true do context 'when duplicate action exists' do let!(:manual2) do - create(:ci_build, :manual, project: project, pipeline: build.pipeline, name: 'second', environment: 'production') + create(:ci_build, :manual, pipeline: pipeline, + name: 'second', + environment: 'production') end it 'returns error' do @@ -57,8 +62,7 @@ describe Gitlab::ChatCommands::Deploy, service: true do context 'when teardown action exists' do let!(:teardown) do create(:ci_build, :manual, :teardown_environment, - project: project, pipeline: build.pipeline, - name: 'teardown', environment: 'production') + pipeline: pipeline, name: 'teardown', environment: 'production') end it 'returns the success message' do diff --git a/spec/lib/gitlab/checks/change_access_spec.rb b/spec/lib/gitlab/checks/change_access_spec.rb index e22f88b7a32..8d81ed5856e 100644 --- a/spec/lib/gitlab/checks/change_access_spec.rb +++ b/spec/lib/gitlab/checks/change_access_spec.rb @@ -5,13 +5,10 @@ describe Gitlab::Checks::ChangeAccess, lib: true do let(:user) { create(:user) } let(:project) { create(:project, :repository) } let(:user_access) { Gitlab::UserAccess.new(user, project: project) } - let(:changes) do - { - oldrev: 'be93687618e4b132087f430a4d8fc3a609c9b77c', - newrev: '54fcc214b94e78d7a41a9a8fe6d87a5e59500e51', - ref: 'refs/heads/master' - } - end + let(:oldrev) { 'be93687618e4b132087f430a4d8fc3a609c9b77c' } + let(:newrev) { '54fcc214b94e78d7a41a9a8fe6d87a5e59500e51' } + let(:ref) { 'refs/heads/master' } + let(:changes) { { oldrev: oldrev, newrev: newrev, ref: ref } } let(:protocol) { 'ssh' } subject do @@ -23,7 +20,7 @@ describe Gitlab::Checks::ChangeAccess, lib: true do ).exec end - before { allow(user_access).to receive(:can_do_action?).with(:push_code).and_return(true) } + before { project.add_developer(user) } context 'without failed checks' do it "doesn't return any error" do @@ -41,62 +38,135 @@ describe Gitlab::Checks::ChangeAccess, lib: true do end context 'tags check' do - let(:changes) do - { - oldrev: 'be93687618e4b132087f430a4d8fc3a609c9b77c', - newrev: '54fcc214b94e78d7a41a9a8fe6d87a5e59500e51', - ref: 'refs/tags/v1.0.0' - } - end + let(:ref) { 'refs/tags/v1.0.0' } it 'returns an error if the user is not allowed to update tags' do + allow(user_access).to receive(:can_do_action?).with(:push_code).and_return(true) expect(user_access).to receive(:can_do_action?).with(:admin_project).and_return(false) expect(subject.status).to be(false) expect(subject.message).to eq('You are not allowed to change existing tags on this project.') end - end - context 'protected branches check' do - before do - allow(project).to receive(:protected_branch?).with('master').and_return(true) - end + context 'with protected tag' do + let!(:protected_tag) { create(:protected_tag, project: project, name: 'v*') } - it 'returns an error if the user is not allowed to do forced pushes to protected branches' do - expect(Gitlab::Checks::ForcePush).to receive(:force_push?).and_return(true) + context 'as master' do + before { project.add_master(user) } - expect(subject.status).to be(false) - expect(subject.message).to eq('You are not allowed to force push code to a protected branch on this project.') - end + context 'deletion' do + let(:oldrev) { 'be93687618e4b132087f430a4d8fc3a609c9b77c' } + let(:newrev) { '0000000000000000000000000000000000000000' } - it 'returns an error if the user is not allowed to merge to protected branches' do - expect_any_instance_of(Gitlab::Checks::MatchingMergeRequest).to receive(:match?).and_return(true) - expect(user_access).to receive(:can_merge_to_branch?).and_return(false) - expect(user_access).to receive(:can_push_to_branch?).and_return(false) + it 'is prevented' do + expect(subject.status).to be(false) + expect(subject.message).to include('cannot be deleted') + end + end - expect(subject.status).to be(false) - expect(subject.message).to eq('You are not allowed to merge code into protected branches on this project.') + context 'update' do + let(:oldrev) { 'be93687618e4b132087f430a4d8fc3a609c9b77c' } + let(:newrev) { '54fcc214b94e78d7a41a9a8fe6d87a5e59500e51' } + + it 'is prevented' do + expect(subject.status).to be(false) + expect(subject.message).to include('cannot be updated') + end + end + end + + context 'creation' do + let(:oldrev) { '0000000000000000000000000000000000000000' } + let(:newrev) { '54fcc214b94e78d7a41a9a8fe6d87a5e59500e51' } + let(:ref) { 'refs/tags/v9.1.0' } + + it 'prevents creation below access level' do + expect(subject.status).to be(false) + expect(subject.message).to include('allowed to create this tag as it is protected') + end + + context 'when user has access' do + let!(:protected_tag) { create(:protected_tag, :developers_can_create, project: project, name: 'v*') } + + it 'allows tag creation' do + expect(subject.status).to be(true) + end + end + end end + end - it 'returns an error if the user is not allowed to push to protected branches' do - expect(user_access).to receive(:can_push_to_branch?).and_return(false) + context 'branches check' do + context 'trying to delete the default branch' do + let(:newrev) { '0000000000000000000000000000000000000000' } + let(:ref) { 'refs/heads/master' } - expect(subject.status).to be(false) - expect(subject.message).to eq('You are not allowed to push code to protected branches on this project.') + it 'returns an error' do + expect(subject.status).to be(false) + expect(subject.message).to eq('The default branch of a project cannot be deleted.') + end end - context 'branch deletion' do - let(:changes) do - { - oldrev: 'be93687618e4b132087f430a4d8fc3a609c9b77c', - newrev: '0000000000000000000000000000000000000000', - ref: 'refs/heads/master' - } + context 'protected branches check' do + before do + allow(ProtectedBranch).to receive(:protected?).with(project, 'master').and_return(true) + allow(ProtectedBranch).to receive(:protected?).with(project, 'feature').and_return(true) + end + + it 'returns an error if the user is not allowed to do forced pushes to protected branches' do + expect(Gitlab::Checks::ForcePush).to receive(:force_push?).and_return(true) + + expect(subject.status).to be(false) + expect(subject.message).to eq('You are not allowed to force push code to a protected branch on this project.') + end + + it 'returns an error if the user is not allowed to merge to protected branches' do + expect_any_instance_of(Gitlab::Checks::MatchingMergeRequest).to receive(:match?).and_return(true) + expect(user_access).to receive(:can_merge_to_branch?).and_return(false) + expect(user_access).to receive(:can_push_to_branch?).and_return(false) + + expect(subject.status).to be(false) + expect(subject.message).to eq('You are not allowed to merge code into protected branches on this project.') end - it 'returns an error if the user is not allowed to delete protected branches' do + it 'returns an error if the user is not allowed to push to protected branches' do + expect(user_access).to receive(:can_push_to_branch?).and_return(false) + expect(subject.status).to be(false) - expect(subject.message).to eq('You are not allowed to delete protected branches from this project.') + expect(subject.message).to eq('You are not allowed to push code to protected branches on this project.') + end + + context 'branch deletion' do + let(:newrev) { '0000000000000000000000000000000000000000' } + let(:ref) { 'refs/heads/feature' } + + context 'if the user is not allowed to delete protected branches' do + it 'returns an error' do + expect(subject.status).to be(false) + expect(subject.message).to eq('You are not allowed to delete protected branches from this project. Only a project master or owner can delete a protected branch.') + end + end + + context 'if the user is allowed to delete protected branches' do + before do + project.add_master(user) + end + + context 'through the web interface' do + let(:protocol) { 'web' } + + it 'allows branch deletion' do + expect(subject.status).to be(true) + end + end + + context 'over SSH or HTTP' do + it 'returns an error' do + expect(subject.status).to be(false) + expect(subject.message).to eq('You can only delete protected branches using the web interface.') + end + end + end end end end diff --git a/spec/lib/gitlab/checks/force_push_spec.rb b/spec/lib/gitlab/checks/force_push_spec.rb index 7a84bbebd02..bc66ce83d4a 100644 --- a/spec/lib/gitlab/checks/force_push_spec.rb +++ b/spec/lib/gitlab/checks/force_push_spec.rb @@ -1,19 +1,19 @@ require 'spec_helper' -describe Gitlab::Checks::ChangeAccess, lib: true do +describe Gitlab::Checks::ForcePush, lib: true do let(:project) { create(:project, :repository) } context "exit code checking" do it "does not raise a runtime error if the `popen` call to git returns a zero exit code" do allow(Gitlab::Popen).to receive(:popen).and_return(['normal output', 0]) - expect { Gitlab::Checks::ForcePush.force_push?(project, 'oldrev', 'newrev') }.not_to raise_error + expect { described_class.force_push?(project, 'oldrev', 'newrev') }.not_to raise_error end it "raises a runtime error if the `popen` call to git returns a non-zero exit code" do allow(Gitlab::Popen).to receive(:popen).and_return(['error', 1]) - expect { Gitlab::Checks::ForcePush.force_push?(project, 'oldrev', 'newrev') }.to raise_error(RuntimeError) + expect { described_class.force_push?(project, 'oldrev', 'newrev') }.to raise_error(RuntimeError) end end end diff --git a/spec/lib/gitlab/ci/build/artifacts/metadata/entry_spec.rb b/spec/lib/gitlab/ci/build/artifacts/metadata/entry_spec.rb index abc93e1b44a..3b905611467 100644 --- a/spec/lib/gitlab/ci/build/artifacts/metadata/entry_spec.rb +++ b/spec/lib/gitlab/ci/build/artifacts/metadata/entry_spec.rb @@ -135,6 +135,17 @@ describe Gitlab::Ci::Build::Artifacts::Metadata::Entry do subject { |example| path(example).nodes } it { is_expected.to eq 4 } end + + describe '#blob' do + let(:file_entry) { |example| path(example) } + subject { file_entry.blob } + + it 'returns a blob representing the entry data' do + expect(subject).to be_a(Blob) + expect(subject.path).to eq(file_entry.path) + expect(subject.size).to eq(file_entry.metadata[:size]) + end + end end describe 'non-existent/', path: 'non-existent/' do diff --git a/spec/lib/gitlab/ci/build/credentials/factory_spec.rb b/spec/lib/gitlab/ci/build/credentials/factory_spec.rb index 10b4b7a8826..d53db05e5e6 100644 --- a/spec/lib/gitlab/ci/build/credentials/factory_spec.rb +++ b/spec/lib/gitlab/ci/build/credentials/factory_spec.rb @@ -3,14 +3,14 @@ require 'spec_helper' describe Gitlab::Ci::Build::Credentials::Factory do let(:build) { create(:ci_build, name: 'spinach', stage: 'test', stage_idx: 0) } - subject { Gitlab::Ci::Build::Credentials::Factory.new(build).create! } + subject { described_class.new(build).create! } class TestProvider def initialize(build); end end before do - allow_any_instance_of(Gitlab::Ci::Build::Credentials::Factory).to receive(:providers).and_return([TestProvider]) + allow_any_instance_of(described_class).to receive(:providers).and_return([TestProvider]) end context 'when provider is valid' do diff --git a/spec/lib/gitlab/ci/build/credentials/registry_spec.rb b/spec/lib/gitlab/ci/build/credentials/registry_spec.rb index 84e44dd53e2..c6054138cde 100644 --- a/spec/lib/gitlab/ci/build/credentials/registry_spec.rb +++ b/spec/lib/gitlab/ci/build/credentials/registry_spec.rb @@ -4,14 +4,14 @@ describe Gitlab::Ci::Build::Credentials::Registry do let(:build) { create(:ci_build, name: 'spinach', stage: 'test', stage_idx: 0) } let(:registry_url) { 'registry.example.com:5005' } - subject { Gitlab::Ci::Build::Credentials::Registry.new(build) } + subject { described_class.new(build) } before do stub_container_registry_config(host_port: registry_url) end it 'contains valid DockerRegistry credentials' do - expect(subject).to be_kind_of(Gitlab::Ci::Build::Credentials::Registry) + expect(subject).to be_kind_of(described_class) expect(subject.username).to eq 'gitlab-ci-token' expect(subject.password).to eq build.token @@ -20,7 +20,7 @@ describe Gitlab::Ci::Build::Credentials::Registry do end describe '.valid?' do - subject { Gitlab::Ci::Build::Credentials::Registry.new(build).valid? } + subject { described_class.new(build).valid? } context 'when registry is enabled' do before do diff --git a/spec/lib/gitlab/ci/config/entry/global_spec.rb b/spec/lib/gitlab/ci/config/entry/global_spec.rb index 684d01e9056..23270ad5053 100644 --- a/spec/lib/gitlab/ci/config/entry/global_spec.rb +++ b/spec/lib/gitlab/ci/config/entry/global_spec.rb @@ -113,7 +113,7 @@ describe Gitlab::Ci::Config::Entry::Global do describe '#variables_value' do it 'returns variables' do - expect(global.variables_value).to eq(VAR: 'value') + expect(global.variables_value).to eq('VAR' => 'value') end end @@ -154,7 +154,7 @@ describe Gitlab::Ci::Config::Entry::Global do services: ['postgres:9.1', 'mysql:5.5'], stage: 'test', cache: { key: 'k', untracked: true, paths: ['public/'] }, - variables: { VAR: 'value' }, + variables: { 'VAR' => 'value' }, ignore: false, after_script: ['make clean'] }, spinach: { name: :spinach, @@ -167,7 +167,7 @@ describe Gitlab::Ci::Config::Entry::Global do cache: { key: 'k', untracked: true, paths: ['public/'] }, variables: {}, ignore: false, - after_script: ['make clean'] }, + after_script: ['make clean'] } ) end end diff --git a/spec/lib/gitlab/ci/config/entry/variables_spec.rb b/spec/lib/gitlab/ci/config/entry/variables_spec.rb index f15f02f403e..84bfef9e8ad 100644 --- a/spec/lib/gitlab/ci/config/entry/variables_spec.rb +++ b/spec/lib/gitlab/ci/config/entry/variables_spec.rb @@ -13,6 +13,14 @@ describe Gitlab::Ci::Config::Entry::Variables do it 'returns hash with key value strings' do expect(entry.value).to eq config end + + context 'with numeric keys and values in the config' do + let(:config) { { 10 => 20 } } + + it 'converts numeric key and numeric value into strings' do + expect(entry.value).to eq('10' => '20') + end + end end describe '#errors' do diff --git a/spec/lib/gitlab/ci/cron_parser_spec.rb b/spec/lib/gitlab/ci/cron_parser_spec.rb new file mode 100644 index 00000000000..809fda11879 --- /dev/null +++ b/spec/lib/gitlab/ci/cron_parser_spec.rb @@ -0,0 +1,186 @@ +require 'spec_helper' + +describe Gitlab::Ci::CronParser do + shared_examples_for "returns time in the future" do + it { is_expected.to be > Time.now } + end + + describe '#next_time_from' do + subject { described_class.new(cron, cron_timezone).next_time_from(Time.now) } + + context 'when cron and cron_timezone are valid' do + context 'when specific time' do + let(:cron) { '3 4 5 6 *' } + let(:cron_timezone) { 'UTC' } + + it_behaves_like "returns time in the future" + + it 'returns exact time' do + expect(subject.min).to eq(3) + expect(subject.hour).to eq(4) + expect(subject.day).to eq(5) + expect(subject.month).to eq(6) + end + end + + context 'when specific day of week' do + let(:cron) { '* * * * 0' } + let(:cron_timezone) { 'UTC' } + + it_behaves_like "returns time in the future" + + it 'returns exact day of week' do + expect(subject.wday).to eq(0) + end + end + + context 'when slash used' do + let(:cron) { '*/10 */6 */10 */10 *' } + let(:cron_timezone) { 'UTC' } + + it_behaves_like "returns time in the future" + + it 'returns specific time' do + expect(subject.min).to be_in([0, 10, 20, 30, 40, 50]) + expect(subject.hour).to be_in([0, 6, 12, 18]) + expect(subject.day).to be_in([1, 11, 21, 31]) + expect(subject.month).to be_in([1, 11]) + end + end + + context 'when range used' do + let(:cron) { '0,20,40 * 1-5 * *' } + let(:cron_timezone) { 'UTC' } + + it_behaves_like "returns time in the future" + + it 'returns specific time' do + expect(subject.min).to be_in([0, 20, 40]) + expect(subject.day).to be_in((1..5).to_a) + end + end + + context 'when cron_timezone is TZInfo format' do + before do + allow(Time).to receive(:zone) + .and_return(ActiveSupport::TimeZone['UTC']) + end + + let(:hour_in_utc) do + ActiveSupport::TimeZone[cron_timezone] + .now.change(hour: 0).in_time_zone('UTC').hour + end + + context 'when cron_timezone is US/Pacific' do + let(:cron) { '* 0 * * *' } + let(:cron_timezone) { 'US/Pacific' } + + it_behaves_like "returns time in the future" + + it 'converts time in server time zone' do + expect(subject.hour).to eq(hour_in_utc) + end + end + end + + context 'when cron_timezone is ActiveSupport::TimeZone format' do + before do + allow(Time).to receive(:zone) + .and_return(ActiveSupport::TimeZone['UTC']) + end + + let(:hour_in_utc) do + ActiveSupport::TimeZone[cron_timezone] + .now.change(hour: 0).in_time_zone('UTC').hour + end + + context 'when cron_timezone is Berlin' do + let(:cron) { '* 0 * * *' } + let(:cron_timezone) { 'Berlin' } + + it_behaves_like "returns time in the future" + + it 'converts time in server time zone' do + expect(subject.hour).to eq(hour_in_utc) + end + end + + context 'when cron_timezone is Eastern Time (US & Canada)' do + let(:cron) { '* 0 * * *' } + let(:cron_timezone) { 'Eastern Time (US & Canada)' } + + it_behaves_like "returns time in the future" + + it 'converts time in server time zone' do + expect(subject.hour).to eq(hour_in_utc) + end + end + end + end + + context 'when cron and cron_timezone are invalid' do + let(:cron) { 'invalid_cron' } + let(:cron_timezone) { 'invalid_cron_timezone' } + + it { is_expected.to be_nil } + end + + context 'when cron syntax is quoted' do + let(:cron) { "'0 * * * *'" } + let(:cron_timezone) { 'UTC' } + + it { expect(subject).to be_nil } + end + + context 'when cron syntax is rufus-scheduler syntax' do + let(:cron) { 'every 3h' } + let(:cron_timezone) { 'UTC' } + + it { expect(subject).to be_nil } + end + end + + describe '#cron_valid?' do + subject { described_class.new(cron, Gitlab::Ci::CronParser::VALID_SYNTAX_SAMPLE_TIME_ZONE).cron_valid? } + + context 'when cron is valid' do + let(:cron) { '* * * * *' } + + it { is_expected.to eq(true) } + end + + context 'when cron is invalid' do + let(:cron) { '*********' } + + it { is_expected.to eq(false) } + end + + context 'when cron syntax is quoted' do + let(:cron) { "'0 * * * *'" } + + it { is_expected.to eq(false) } + end + end + + describe '#cron_timezone_valid?' do + subject { described_class.new(Gitlab::Ci::CronParser::VALID_SYNTAX_SAMPLE_CRON, cron_timezone).cron_timezone_valid? } + + context 'when cron is valid' do + let(:cron_timezone) { 'Europe/Istanbul' } + + it { is_expected.to eq(true) } + end + + context 'when cron is invalid' do + let(:cron_timezone) { 'Invalid-zone' } + + it { is_expected.to eq(false) } + end + + context 'when cron_timezone is ActiveSupport::TimeZone format' do + let(:cron_timezone) { 'Eastern Time (US & Canada)' } + + it { is_expected.to eq(true) } + end + end +end diff --git a/spec/lib/gitlab/ci/status/build/action_spec.rb b/spec/lib/gitlab/ci/status/build/action_spec.rb new file mode 100644 index 00000000000..8c25f72804b --- /dev/null +++ b/spec/lib/gitlab/ci/status/build/action_spec.rb @@ -0,0 +1,56 @@ +require 'spec_helper' + +describe Gitlab::Ci::Status::Build::Action do + let(:status) { double('core status') } + let(:user) { double('user') } + + subject do + described_class.new(status) + end + + describe '#label' do + before do + allow(status).to receive(:label).and_return('label') + end + + context 'when status has action' do + before do + allow(status).to receive(:has_action?).and_return(true) + end + + it 'does not append text' do + expect(subject.label).to eq 'label' + end + end + + context 'when status does not have action' do + before do + allow(status).to receive(:has_action?).and_return(false) + end + + it 'appends text about action not allowed' do + expect(subject.label).to eq 'label (not allowed)' + end + end + end + + describe '.matches?' do + subject { described_class.matches?(build, user) } + + context 'when build is an action' do + let(:build) { create(:ci_build, :manual) } + + it 'is a correct match' do + expect(subject).to be true + end + end + + context 'when build is not manual' do + let(:build) { create(:ci_build) } + + it 'does not match' do + expect(subject).to be false + end + end + end +end diff --git a/spec/lib/gitlab/ci/status/build/factory_spec.rb b/spec/lib/gitlab/ci/status/build/factory_spec.rb index e648a3ac3a2..185bb9098da 100644 --- a/spec/lib/gitlab/ci/status/build/factory_spec.rb +++ b/spec/lib/gitlab/ci/status/build/factory_spec.rb @@ -204,11 +204,12 @@ describe Gitlab::Ci::Status::Build::Factory do it 'matches correct extended statuses' do expect(factory.extended_statuses) - .to eq [Gitlab::Ci::Status::Build::Play] + .to eq [Gitlab::Ci::Status::Build::Play, + Gitlab::Ci::Status::Build::Action] end - it 'fabricates a play detailed status' do - expect(status).to be_a Gitlab::Ci::Status::Build::Play + it 'fabricates action detailed status' do + expect(status).to be_a Gitlab::Ci::Status::Build::Action end it 'fabricates status with correct details' do @@ -216,11 +217,26 @@ describe Gitlab::Ci::Status::Build::Factory do expect(status.group).to eq 'manual' expect(status.icon).to eq 'icon_status_manual' expect(status.favicon).to eq 'favicon_status_manual' - expect(status.label).to eq 'manual play action' + expect(status.label).to include 'manual play action' expect(status).to have_details - expect(status).to have_action expect(status.action_path).to include 'play' end + + context 'when user has ability to play action' do + before do + build.project.add_master(user) + end + + it 'fabricates status that has action' do + expect(status).to have_action + end + end + + context 'when user does not have ability to play action' do + it 'fabricates status that has no action' do + expect(status).not_to have_action + end + end end context 'when build is an environment stop action' do @@ -232,21 +248,24 @@ describe Gitlab::Ci::Status::Build::Factory do it 'matches correct extended statuses' do expect(factory.extended_statuses) - .to eq [Gitlab::Ci::Status::Build::Stop] + .to eq [Gitlab::Ci::Status::Build::Stop, + Gitlab::Ci::Status::Build::Action] end - it 'fabricates a stop detailed status' do - expect(status).to be_a Gitlab::Ci::Status::Build::Stop + it 'fabricates action detailed status' do + expect(status).to be_a Gitlab::Ci::Status::Build::Action end - it 'fabricates status with correct details' do - expect(status.text).to eq 'manual' - expect(status.group).to eq 'manual' - expect(status.icon).to eq 'icon_status_manual' - expect(status.favicon).to eq 'favicon_status_manual' - expect(status.label).to eq 'manual stop action' - expect(status).to have_details - expect(status).to have_action + context 'when user is not allowed to execute manual action' do + it 'fabricates status with correct details' do + expect(status.text).to eq 'manual' + expect(status.group).to eq 'manual' + expect(status.icon).to eq 'icon_status_manual' + expect(status.favicon).to eq 'favicon_status_manual' + expect(status.label).to eq 'manual stop action (not allowed)' + expect(status).to have_details + expect(status).not_to have_action + end end end end diff --git a/spec/lib/gitlab/ci/status/build/play_spec.rb b/spec/lib/gitlab/ci/status/build/play_spec.rb index 6c97a4fe5ca..f5d0f977768 100644 --- a/spec/lib/gitlab/ci/status/build/play_spec.rb +++ b/spec/lib/gitlab/ci/status/build/play_spec.rb @@ -1,43 +1,48 @@ require 'spec_helper' describe Gitlab::Ci::Status::Build::Play do - let(:status) { double('core') } - let(:user) { double('user') } + let(:user) { create(:user) } + let(:build) { create(:ci_build, :manual) } + let(:status) { Gitlab::Ci::Status::Core.new(build, user) } subject { described_class.new(status) } describe '#label' do - it { expect(subject.label).to eq 'manual play action' } + it 'has a label that says it is a manual action' do + expect(subject.label).to eq 'manual play action' + end end - describe 'action details' do - let(:user) { create(:user) } - let(:build) { create(:ci_build) } - let(:status) { Gitlab::Ci::Status::Core.new(build, user) } - - describe '#has_action?' do - context 'when user is allowed to update build' do - before { build.project.team << [user, :developer] } + describe '#has_action?' do + context 'when user is allowed to update build' do + context 'when user can push to branch' do + before { build.project.add_master(user) } it { is_expected.to have_action } end - context 'when user is not allowed to update build' do + context 'when user can not push to the branch' do + before { build.project.add_developer(user) } + it { is_expected.not_to have_action } end end - describe '#action_path' do - it { expect(subject.action_path).to include "#{build.id}/play" } + context 'when user is not allowed to update build' do + it { is_expected.not_to have_action } end + end - describe '#action_icon' do - it { expect(subject.action_icon).to eq 'icon_action_play' } - end + describe '#action_path' do + it { expect(subject.action_path).to include "#{build.id}/play" } + end - describe '#action_title' do - it { expect(subject.action_title).to eq 'Play' } - end + describe '#action_icon' do + it { expect(subject.action_icon).to eq 'icon_action_play' } + end + + describe '#action_title' do + it { expect(subject.action_title).to eq 'Play' } end describe '.matches?' do diff --git a/spec/lib/gitlab/ci/status/extended_spec.rb b/spec/lib/gitlab/ci/status/extended_spec.rb index c2d74ca5cde..6eacb07078b 100644 --- a/spec/lib/gitlab/ci/status/extended_spec.rb +++ b/spec/lib/gitlab/ci/status/extended_spec.rb @@ -1,12 +1,8 @@ require 'spec_helper' describe Gitlab::Ci::Status::Extended do - subject do - Class.new.include(described_class) - end - it 'requires subclass to implement matcher' do - expect { subject.matches?(double, double) } + expect { described_class.matches?(double, double) } .to raise_error(NotImplementedError) end end diff --git a/spec/lib/gitlab/ci/status/group/common_spec.rb b/spec/lib/gitlab/ci/status/group/common_spec.rb new file mode 100644 index 00000000000..c0ca05881f5 --- /dev/null +++ b/spec/lib/gitlab/ci/status/group/common_spec.rb @@ -0,0 +1,20 @@ +require 'spec_helper' + +describe Gitlab::Ci::Status::Group::Common do + subject do + Gitlab::Ci::Status::Core.new(double, double) + .extend(described_class) + end + + it 'does not have action' do + expect(subject).not_to have_action + end + + it 'has details' do + expect(subject).not_to have_details + end + + it 'has no details_path' do + expect(subject.details_path).to be_falsy + end +end diff --git a/spec/lib/gitlab/ci/status/group/factory_spec.rb b/spec/lib/gitlab/ci/status/group/factory_spec.rb new file mode 100644 index 00000000000..0cd83123938 --- /dev/null +++ b/spec/lib/gitlab/ci/status/group/factory_spec.rb @@ -0,0 +1,13 @@ +require 'spec_helper' + +describe Gitlab::Ci::Status::Group::Factory do + it 'inherits from the core factory' do + expect(described_class) + .to be < Gitlab::Ci::Status::Factory + end + + it 'exposes group helpers' do + expect(described_class.common_helpers) + .to eq Gitlab::Ci::Status::Group::Common + end +end diff --git a/spec/lib/gitlab/ci/trace/stream_spec.rb b/spec/lib/gitlab/ci/trace/stream_spec.rb new file mode 100644 index 00000000000..40ac5a3ed37 --- /dev/null +++ b/spec/lib/gitlab/ci/trace/stream_spec.rb @@ -0,0 +1,256 @@ +require 'spec_helper' + +describe Gitlab::Ci::Trace::Stream do + describe 'delegates' do + subject { described_class.new { nil } } + + it { is_expected.to delegate_method(:close).to(:stream) } + it { is_expected.to delegate_method(:tell).to(:stream) } + it { is_expected.to delegate_method(:seek).to(:stream) } + it { is_expected.to delegate_method(:size).to(:stream) } + it { is_expected.to delegate_method(:path).to(:stream) } + it { is_expected.to delegate_method(:truncate).to(:stream) } + it { is_expected.to delegate_method(:valid?).to(:stream).as(:present?) } + it { is_expected.to delegate_method(:file?).to(:path).as(:present?) } + end + + describe '#limit' do + let(:stream) do + described_class.new do + StringIO.new((1..8).to_a.join("\n")) + end + end + + it 'if size is larger we start from beginning' do + stream.limit(20) + + expect(stream.tell).to eq(0) + end + + it 'if size is smaller we start from the end' do + stream.limit(2) + + expect(stream.raw).to eq("8") + end + + context 'when the trace contains ANSI sequence and Unicode' do + let(:stream) do + described_class.new do + File.open(expand_fixture_path('trace/ansi-sequence-and-unicode')) + end + end + + it 'forwards to the next linefeed, case 1' do + stream.limit(7) + + result = stream.raw + + expect(result).to eq('') + expect(result.encoding).to eq(Encoding.default_external) + end + + it 'forwards to the next linefeed, case 2' do + stream.limit(29) + + result = stream.raw + + expect(result).to eq("\e[01;32m許功蓋\e[0m\n") + expect(result.encoding).to eq(Encoding.default_external) + end + + # See https://gitlab.com/gitlab-org/gitlab-ce/issues/30796 + it 'reads in binary, output as Encoding.default_external' do + stream.limit(52) + + result = stream.html + + expect(result).to eq("ヾ(´༎ຶД༎ຶ`)ノ<br><span class=\"term-fg-green\">許功蓋</span><br>") + expect(result.encoding).to eq(Encoding.default_external) + end + end + end + + describe '#append' do + let(:tempfile) { Tempfile.new } + + let(:stream) do + described_class.new do + tempfile.write("12345678") + tempfile.rewind + tempfile + end + end + + after do + tempfile.unlink + end + + it "truncates and append content" do + stream.append("89", 4) + stream.seek(0) + + expect(stream.size).to eq(6) + expect(stream.raw).to eq("123489") + end + + it 'appends in binary mode' do + '😺'.force_encoding('ASCII-8BIT').each_char.with_index do |byte, offset| + stream.append(byte, offset) + end + + stream.seek(0) + + expect(stream.size).to eq(4) + expect(stream.raw).to eq('😺') + end + end + + describe '#set' do + let(:stream) do + described_class.new do + StringIO.new("12345678") + end + end + + before do + stream.set("8901") + end + + it "overwrite content" do + stream.seek(0) + + expect(stream.size).to eq(4) + expect(stream.raw).to eq("8901") + end + end + + describe '#raw' do + let(:path) { __FILE__ } + let(:lines) { File.readlines(path) } + let(:stream) do + described_class.new do + File.open(path) + end + end + + it 'returns all contents if last_lines is not specified' do + result = stream.raw + + expect(result).to eq(lines.join) + expect(result.encoding).to eq(Encoding.default_external) + end + + context 'limit max lines' do + before do + # specifying BUFFER_SIZE forces to seek backwards + allow(described_class).to receive(:BUFFER_SIZE) + .and_return(2) + end + + it 'returns last few lines' do + result = stream.raw(last_lines: 2) + + expect(result).to eq(lines.last(2).join) + expect(result.encoding).to eq(Encoding.default_external) + end + + it 'returns everything if trying to get too many lines' do + result = stream.raw(last_lines: lines.size * 2) + + expect(result).to eq(lines.join) + expect(result.encoding).to eq(Encoding.default_external) + end + end + end + + describe '#html_with_state' do + let(:stream) do + described_class.new do + StringIO.new("1234") + end + end + + it 'returns html content with state' do + result = stream.html_with_state + + expect(result.html).to eq("1234") + end + + context 'follow-up state' do + let!(:last_result) { stream.html_with_state } + + before do + stream.append("5678", 4) + stream.seek(0) + end + + it "returns appended trace" do + result = stream.html_with_state(last_result.state) + + expect(result.append).to be_truthy + expect(result.html).to eq("5678") + end + end + end + + describe '#html' do + let(:stream) do + described_class.new do + StringIO.new("12\n34\n56") + end + end + + it "returns html" do + expect(stream.html).to eq("12<br>34<br>56") + end + + it "returns html for last line only" do + expect(stream.html(last_lines: 1)).to eq("56") + end + end + + describe '#extract_coverage' do + let(:stream) do + described_class.new do + StringIO.new(data) + end + end + + subject { stream.extract_coverage(regex) } + + context 'valid content & regex' do + let(:data) { 'Coverage 1033 / 1051 LOC (98.29%) covered' } + let(:regex) { '\(\d+.\d+\%\) covered' } + + it { is_expected.to eq("98.29") } + end + + context 'valid content & bad regex' do + let(:data) { 'Coverage 1033 / 1051 LOC (98.29%) covered\n' } + let(:regex) { 'very covered' } + + it { is_expected.to be_nil } + end + + context 'no coverage content & regex' do + let(:data) { 'No coverage for today :sad:' } + let(:regex) { '\(\d+.\d+\%\) covered' } + + it { is_expected.to be_nil } + end + + context 'multiple results in content & regex' do + let(:data) { ' (98.39%) covered. (98.29%) covered' } + let(:regex) { '\(\d+.\d+\%\) covered' } + + it { is_expected.to eq("98.29") } + end + + context 'using a regex capture' do + let(:data) { 'TOTAL 9926 3489 65%' } + let(:regex) { 'TOTAL\s+\d+\s+\d+\s+(\d{1,3}\%)' } + + it { is_expected.to eq("65") } + end + end +end diff --git a/spec/lib/gitlab/ci/trace_reader_spec.rb b/spec/lib/gitlab/ci/trace_reader_spec.rb deleted file mode 100644 index ff5551bf703..00000000000 --- a/spec/lib/gitlab/ci/trace_reader_spec.rb +++ /dev/null @@ -1,52 +0,0 @@ -require 'spec_helper' - -describe Gitlab::Ci::TraceReader do - let(:path) { __FILE__ } - let(:lines) { File.readlines(path) } - let(:bytesize) { lines.sum(&:bytesize) } - - it 'returns last few lines' do - 10.times do - subject = build_subject - last_lines = random_lines - - expected = lines.last(last_lines).join - result = subject.read(last_lines: last_lines) - - expect(result).to eq(expected) - expect(result.encoding).to eq(Encoding.default_external) - end - end - - it 'returns everything if trying to get too many lines' do - result = build_subject.read(last_lines: lines.size * 2) - - expect(result).to eq(lines.join) - expect(result.encoding).to eq(Encoding.default_external) - end - - it 'returns all contents if last_lines is not specified' do - result = build_subject.read - - expect(result).to eq(lines.join) - expect(result.encoding).to eq(Encoding.default_external) - end - - it 'raises an error if not passing an integer for last_lines' do - expect do - build_subject.read(last_lines: lines) - end.to raise_error(ArgumentError) - end - - def random_lines - Random.rand(lines.size) + 1 - end - - def random_buffer - Random.rand(bytesize) + 1 - end - - def build_subject - described_class.new(__FILE__, buffer_size: random_buffer) - end -end diff --git a/spec/lib/gitlab/ci/trace_spec.rb b/spec/lib/gitlab/ci/trace_spec.rb new file mode 100644 index 00000000000..9cb0b62590a --- /dev/null +++ b/spec/lib/gitlab/ci/trace_spec.rb @@ -0,0 +1,228 @@ +require 'spec_helper' + +describe Gitlab::Ci::Trace do + let(:build) { create(:ci_build) } + let(:trace) { described_class.new(build) } + + describe "associations" do + it { expect(trace).to respond_to(:job) } + it { expect(trace).to delegate_method(:old_trace).to(:job) } + end + + describe '#html' do + before do + trace.set("12\n34") + end + + it "returns formatted html" do + expect(trace.html).to eq("12<br>34") + end + + it "returns last line of formatted html" do + expect(trace.html(last_lines: 1)).to eq("34") + end + end + + describe '#raw' do + before do + trace.set("12\n34") + end + + it "returns raw output" do + expect(trace.raw).to eq("12\n34") + end + + it "returns last line of raw output" do + expect(trace.raw(last_lines: 1)).to eq("34") + end + end + + describe '#extract_coverage' do + let(:regex) { '\(\d+.\d+\%\) covered' } + + context 'matching coverage' do + before do + trace.set('Coverage 1033 / 1051 LOC (98.29%) covered') + end + + it "returns valid coverage" do + expect(trace.extract_coverage(regex)).to eq("98.29") + end + end + + context 'no coverage' do + before do + trace.set('No coverage') + end + + it 'returs nil' do + expect(trace.extract_coverage(regex)).to be_nil + end + end + end + + describe '#set' do + before do + trace.set("12") + end + + it "returns trace" do + expect(trace.raw).to eq("12") + end + + context 'overwrite trace' do + before do + trace.set("34") + end + + it "returns new trace" do + expect(trace.raw).to eq("34") + end + end + + context 'runners token' do + let(:token) { 'my_secret_token' } + + before do + build.project.update(runners_token: token) + trace.set(token) + end + + it "hides token" do + expect(trace.raw).not_to include(token) + end + end + + context 'hides build token' do + let(:token) { 'my_secret_token' } + + before do + build.update(token: token) + trace.set(token) + end + + it "hides token" do + expect(trace.raw).not_to include(token) + end + end + end + + describe '#append' do + before do + trace.set("1234") + end + + it "returns correct trace" do + expect(trace.append("56", 4)).to eq(6) + expect(trace.raw).to eq("123456") + end + + context 'tries to append trace at different offset' do + it "fails with append" do + expect(trace.append("56", 2)).to eq(-4) + expect(trace.raw).to eq("1234") + end + end + + context 'runners token' do + let(:token) { 'my_secret_token' } + + before do + build.project.update(runners_token: token) + trace.append(token, 0) + end + + it "hides token" do + expect(trace.raw).not_to include(token) + end + end + + context 'build token' do + let(:token) { 'my_secret_token' } + + before do + build.update(token: token) + trace.append(token, 0) + end + + it "hides token" do + expect(trace.raw).not_to include(token) + end + end + end + + describe 'trace handling' do + context 'trace does not exist' do + it { expect(trace.exist?).to be(false) } + end + + context 'new trace path is used' do + before do + trace.send(:ensure_directory) + + File.open(trace.send(:default_path), "w") do |file| + file.write("data") + end + end + + it "trace exist" do + expect(trace.exist?).to be(true) + end + + it "can be erased" do + trace.erase! + expect(trace.exist?).to be(false) + end + end + + context 'deprecated path' do + let(:path) { trace.send(:deprecated_path) } + + context 'with valid ci_id' do + before do + build.project.update(ci_id: 1000) + + FileUtils.mkdir_p(File.dirname(path)) + + File.open(path, "w") do |file| + file.write("data") + end + end + + it "trace exist" do + expect(trace.exist?).to be(true) + end + + it "can be erased" do + trace.erase! + expect(trace.exist?).to be(false) + end + end + + context 'without valid ci_id' do + it "does not return deprecated path" do + expect(path).to be_nil + end + end + end + + context 'stored in database' do + before do + build.send(:write_attribute, :trace, "data") + end + + it "trace exist" do + expect(trace.exist?).to be(true) + end + + it "can be erased" do + trace.erase! + expect(trace.exist?).to be(false) + end + + it "returns database data" do + expect(trace.raw).to eq("data") + end + end + end +end diff --git a/spec/lib/gitlab/conflict/file_collection_spec.rb b/spec/lib/gitlab/conflict/file_collection_spec.rb index 39d892c18c0..27f23ea70dc 100644 --- a/spec/lib/gitlab/conflict/file_collection_spec.rb +++ b/spec/lib/gitlab/conflict/file_collection_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' describe Gitlab::Conflict::FileCollection, lib: true do let(:merge_request) { create(:merge_request, source_branch: 'conflict-resolvable', target_branch: 'conflict-start') } - let(:file_collection) { Gitlab::Conflict::FileCollection.new(merge_request) } + let(:file_collection) { described_class.read_only(merge_request) } describe '#files' do it 'returns an array of Conflict::Files' do diff --git a/spec/lib/gitlab/contributions_calendar_spec.rb b/spec/lib/gitlab/contributions_calendar_spec.rb index e18a219ef36..79632e2b6a3 100644 --- a/spec/lib/gitlab/contributions_calendar_spec.rb +++ b/spec/lib/gitlab/contributions_calendar_spec.rb @@ -47,7 +47,7 @@ describe Gitlab::ContributionsCalendar do action: Event::CREATED, target: @targets[project], author: contributor, - created_at: day, + created_at: day ) end diff --git a/spec/lib/gitlab/current_settings_spec.rb b/spec/lib/gitlab/current_settings_spec.rb index b01c4805a34..c796c98ec9f 100644 --- a/spec/lib/gitlab/current_settings_spec.rb +++ b/spec/lib/gitlab/current_settings_spec.rb @@ -10,7 +10,7 @@ describe Gitlab::CurrentSettings do describe '#current_application_settings' do context 'with DB available' do before do - allow_any_instance_of(Gitlab::CurrentSettings).to receive(:connect_to_db?).and_return(true) + allow_any_instance_of(described_class).to receive(:connect_to_db?).and_return(true) end it 'attempts to use cached values first' do @@ -36,7 +36,7 @@ describe Gitlab::CurrentSettings do context 'with DB unavailable' do before do - allow_any_instance_of(Gitlab::CurrentSettings).to receive(:connect_to_db?).and_return(false) + allow_any_instance_of(described_class).to receive(:connect_to_db?).and_return(false) end it 'returns an in-memory ApplicationSetting object' do diff --git a/spec/lib/gitlab/cycle_analytics/base_event_fetcher_spec.rb b/spec/lib/gitlab/cycle_analytics/base_event_fetcher_spec.rb index c455cd9b942..d8757c601ab 100644 --- a/spec/lib/gitlab/cycle_analytics/base_event_fetcher_spec.rb +++ b/spec/lib/gitlab/cycle_analytics/base_event_fetcher_spec.rb @@ -20,7 +20,7 @@ describe Gitlab::CycleAnalytics::BaseEventFetcher do before do allow_any_instance_of(Gitlab::ReferenceExtractor).to receive(:issues).and_return(Issue.all) - allow_any_instance_of(Gitlab::CycleAnalytics::BaseEventFetcher).to receive(:serialize) do |event| + allow_any_instance_of(described_class).to receive(:serialize) do |event| event end diff --git a/spec/lib/gitlab/cycle_analytics/events_spec.rb b/spec/lib/gitlab/cycle_analytics/events_spec.rb index 9d2ba481919..3610a0354e8 100644 --- a/spec/lib/gitlab/cycle_analytics/events_spec.rb +++ b/spec/lib/gitlab/cycle_analytics/events_spec.rb @@ -11,8 +11,6 @@ describe 'cycle analytics events' do end before do - allow_any_instance_of(Gitlab::ReferenceExtractor).to receive(:issues).and_return([context]) - setup(context) end @@ -132,6 +130,8 @@ describe 'cycle analytics events' do end before do + merge_request.update(head_pipeline: pipeline) + create(:ci_build, pipeline: pipeline, status: :success, author: user) create(:ci_build, pipeline: pipeline, status: :success, author: user) @@ -228,6 +228,8 @@ describe 'cycle analytics events' do end before do + merge_request.update(head_pipeline: pipeline) + create(:ci_build, pipeline: pipeline, status: :success, author: user) create(:ci_build, pipeline: pipeline, status: :success, author: user) @@ -332,7 +334,7 @@ describe 'cycle analytics events' do def setup(context) milestone = create(:milestone, project: project) context.update(milestone: milestone) - mr = create_merge_request_closing_issue(context) + mr = create_merge_request_closing_issue(context, commit_message: "References #{context.to_reference}") ProcessCommitWorker.new.perform(project.id, user.id, mr.commits.last.to_hash) end diff --git a/spec/lib/gitlab/data_builder/push_spec.rb b/spec/lib/gitlab/data_builder/push_spec.rb index dbcfb9b7400..e59cba35b2f 100644 --- a/spec/lib/gitlab/data_builder/push_spec.rb +++ b/spec/lib/gitlab/data_builder/push_spec.rb @@ -35,6 +35,7 @@ describe Gitlab::DataBuilder::Push, lib: true do it { expect(data[:ref]).to eq('refs/tags/v1.1.0') } it { expect(data[:user_id]).to eq(user.id) } it { expect(data[:user_name]).to eq(user.name) } + it { expect(data[:user_username]).to eq(user.username) } it { expect(data[:user_email]).to eq(user.email) } it { expect(data[:user_avatar]).to eq(user.avatar_url) } it { expect(data[:project_id]).to eq(project.id) } diff --git a/spec/lib/gitlab/database/migration_helpers_spec.rb b/spec/lib/gitlab/database/migration_helpers_spec.rb index e007044868c..dfa3ae9142e 100644 --- a/spec/lib/gitlab/database/migration_helpers_spec.rb +++ b/spec/lib/gitlab/database/migration_helpers_spec.rb @@ -58,6 +58,48 @@ describe Gitlab::Database::MigrationHelpers, lib: true do end end + describe '#remove_concurrent_index' do + context 'outside a transaction' do + before do + allow(model).to receive(:transaction_open?).and_return(false) + end + + context 'using PostgreSQL' do + before do + allow(Gitlab::Database).to receive(:postgresql?).and_return(true) + allow(model).to receive(:disable_statement_timeout) + end + + it 'removes the index concurrently' do + expect(model).to receive(:remove_index). + with(:users, { algorithm: :concurrently, column: :foo }) + + model.remove_concurrent_index(:users, :foo) + end + end + + context 'using MySQL' do + it 'removes an index' do + expect(Gitlab::Database).to receive(:postgresql?).and_return(false) + + expect(model).to receive(:remove_index). + with(:users, { column: :foo }) + + model.remove_concurrent_index(:users, :foo) + end + end + end + + context 'inside a transaction' do + it 'raises RuntimeError' do + expect(model).to receive(:transaction_open?).and_return(true) + + expect { model.remove_concurrent_index(:users, :foo) }. + to raise_error(RuntimeError) + end + end + end + describe '#add_concurrent_foreign_key' do context 'inside a transaction' do it 'raises an error' do @@ -133,6 +175,50 @@ describe Gitlab::Database::MigrationHelpers, lib: true do end end + describe '#true_value' do + context 'using PostgreSQL' do + before do + expect(Gitlab::Database).to receive(:postgresql?).and_return(true) + end + + it 'returns the appropriate value' do + expect(model.true_value).to eq("'t'") + end + end + + context 'using MySQL' do + before do + expect(Gitlab::Database).to receive(:postgresql?).and_return(false) + end + + it 'returns the appropriate value' do + expect(model.true_value).to eq(1) + end + end + end + + describe '#false_value' do + context 'using PostgreSQL' do + before do + expect(Gitlab::Database).to receive(:postgresql?).and_return(true) + end + + it 'returns the appropriate value' do + expect(model.false_value).to eq("'f'") + end + end + + context 'using MySQL' do + before do + expect(Gitlab::Database).to receive(:postgresql?).and_return(false) + end + + it 'returns the appropriate value' do + expect(model.false_value).to eq(0) + end + end + end + describe '#update_column_in_batches' do before do create_list(:empty_project, 5) @@ -252,4 +338,431 @@ describe Gitlab::Database::MigrationHelpers, lib: true do end end end + + describe '#rename_column_concurrently' do + context 'in a transaction' do + it 'raises RuntimeError' do + allow(model).to receive(:transaction_open?).and_return(true) + + expect { model.rename_column_concurrently(:users, :old, :new) }. + to raise_error(RuntimeError) + end + end + + context 'outside a transaction' do + let(:old_column) do + double(:column, + type: :integer, + limit: 8, + default: 0, + null: false, + precision: 5, + scale: 1) + end + + let(:trigger_name) { model.rename_trigger_name(:users, :old, :new) } + + before do + allow(model).to receive(:transaction_open?).and_return(false) + allow(model).to receive(:column_for).and_return(old_column) + + # Since MySQL and PostgreSQL use different quoting styles we'll just + # stub the methods used for this to make testing easier. + allow(model).to receive(:quote_column_name) { |name| name.to_s } + allow(model).to receive(:quote_table_name) { |name| name.to_s } + end + + context 'using MySQL' do + it 'renames a column concurrently' do + allow(Gitlab::Database).to receive(:postgresql?).and_return(false) + + expect(model).to receive(:install_rename_triggers_for_mysql). + with(trigger_name, 'users', 'old', 'new') + + expect(model).to receive(:add_column). + with(:users, :new, :integer, + limit: old_column.limit, + precision: old_column.precision, + scale: old_column.scale) + + expect(model).to receive(:change_column_default). + with(:users, :new, old_column.default) + + expect(model).to receive(:update_column_in_batches) + + expect(model).to receive(:change_column_null).with(:users, :new, false) + + expect(model).to receive(:copy_indexes).with(:users, :old, :new) + expect(model).to receive(:copy_foreign_keys).with(:users, :old, :new) + + model.rename_column_concurrently(:users, :old, :new) + end + end + + context 'using PostgreSQL' do + it 'renames a column concurrently' do + allow(Gitlab::Database).to receive(:postgresql?).and_return(true) + + expect(model).to receive(:install_rename_triggers_for_postgresql). + with(trigger_name, 'users', 'old', 'new') + + expect(model).to receive(:add_column). + with(:users, :new, :integer, + limit: old_column.limit, + precision: old_column.precision, + scale: old_column.scale) + + expect(model).to receive(:change_column_default). + with(:users, :new, old_column.default) + + expect(model).to receive(:update_column_in_batches) + + expect(model).to receive(:change_column_null).with(:users, :new, false) + + expect(model).to receive(:copy_indexes).with(:users, :old, :new) + expect(model).to receive(:copy_foreign_keys).with(:users, :old, :new) + + model.rename_column_concurrently(:users, :old, :new) + end + end + end + end + + describe '#cleanup_concurrent_column_rename' do + it 'cleans up the renaming procedure for PostgreSQL' do + allow(Gitlab::Database).to receive(:postgresql?).and_return(true) + + expect(model).to receive(:remove_rename_triggers_for_postgresql). + with(:users, /trigger_.{12}/) + + expect(model).to receive(:remove_column).with(:users, :old) + + model.cleanup_concurrent_column_rename(:users, :old, :new) + end + + it 'cleans up the renaming procedure for MySQL' do + allow(Gitlab::Database).to receive(:postgresql?).and_return(false) + + expect(model).to receive(:remove_rename_triggers_for_mysql). + with(/trigger_.{12}/) + + expect(model).to receive(:remove_column).with(:users, :old) + + model.cleanup_concurrent_column_rename(:users, :old, :new) + end + end + + describe '#change_column_type_concurrently' do + it 'changes the column type' do + expect(model).to receive(:rename_column_concurrently). + with('users', 'username', 'username_for_type_change', type: :text) + + model.change_column_type_concurrently('users', 'username', :text) + end + end + + describe '#cleanup_concurrent_column_type_change' do + it 'cleans up the type changing procedure' do + expect(model).to receive(:cleanup_concurrent_column_rename). + with('users', 'username', 'username_for_type_change') + + expect(model).to receive(:rename_column). + with('users', 'username_for_type_change', 'username') + + model.cleanup_concurrent_column_type_change('users', 'username') + end + end + + describe '#install_rename_triggers_for_postgresql' do + it 'installs the triggers for PostgreSQL' do + expect(model).to receive(:execute). + with(/CREATE OR REPLACE FUNCTION foo()/m) + + expect(model).to receive(:execute). + with(/CREATE TRIGGER foo/m) + + model.install_rename_triggers_for_postgresql('foo', :users, :old, :new) + end + end + + describe '#install_rename_triggers_for_mysql' do + it 'installs the triggers for MySQL' do + expect(model).to receive(:execute). + with(/CREATE TRIGGER foo_insert.+ON users/m) + + expect(model).to receive(:execute). + with(/CREATE TRIGGER foo_update.+ON users/m) + + model.install_rename_triggers_for_mysql('foo', :users, :old, :new) + end + end + + describe '#remove_rename_triggers_for_postgresql' do + it 'removes the function and trigger' do + expect(model).to receive(:execute).with('DROP TRIGGER foo ON bar') + expect(model).to receive(:execute).with('DROP FUNCTION foo()') + + model.remove_rename_triggers_for_postgresql('bar', 'foo') + end + end + + describe '#remove_rename_triggers_for_mysql' do + it 'removes the triggers' do + expect(model).to receive(:execute).with('DROP TRIGGER foo_insert') + expect(model).to receive(:execute).with('DROP TRIGGER foo_update') + + model.remove_rename_triggers_for_mysql('foo') + end + end + + describe '#rename_trigger_name' do + it 'returns a String' do + expect(model.rename_trigger_name(:users, :foo, :bar)). + to match(/trigger_.{12}/) + end + end + + describe '#indexes_for' do + it 'returns the indexes for a column' do + idx1 = double(:idx, columns: %w(project_id)) + idx2 = double(:idx, columns: %w(user_id)) + + allow(model).to receive(:indexes).with('table').and_return([idx1, idx2]) + + expect(model.indexes_for('table', :user_id)).to eq([idx2]) + end + end + + describe '#foreign_keys_for' do + it 'returns the foreign keys for a column' do + fk1 = double(:fk, column: 'project_id') + fk2 = double(:fk, column: 'user_id') + + allow(model).to receive(:foreign_keys).with('table').and_return([fk1, fk2]) + + expect(model.foreign_keys_for('table', :user_id)).to eq([fk2]) + end + end + + describe '#copy_indexes' do + context 'using a regular index using a single column' do + it 'copies the index' do + index = double(:index, + columns: %w(project_id), + name: 'index_on_issues_project_id', + using: nil, + where: nil, + opclasses: {}, + unique: false, + lengths: [], + orders: []) + + allow(model).to receive(:indexes_for).with(:issues, 'project_id'). + and_return([index]) + + expect(model).to receive(:add_concurrent_index). + with(:issues, + %w(gl_project_id), + unique: false, + name: 'index_on_issues_gl_project_id', + length: [], + order: []) + + model.copy_indexes(:issues, :project_id, :gl_project_id) + end + end + + context 'using a regular index with multiple columns' do + it 'copies the index' do + index = double(:index, + columns: %w(project_id foobar), + name: 'index_on_issues_project_id_foobar', + using: nil, + where: nil, + opclasses: {}, + unique: false, + lengths: [], + orders: []) + + allow(model).to receive(:indexes_for).with(:issues, 'project_id'). + and_return([index]) + + expect(model).to receive(:add_concurrent_index). + with(:issues, + %w(gl_project_id foobar), + unique: false, + name: 'index_on_issues_gl_project_id_foobar', + length: [], + order: []) + + model.copy_indexes(:issues, :project_id, :gl_project_id) + end + end + + context 'using an index with a WHERE clause' do + it 'copies the index' do + index = double(:index, + columns: %w(project_id), + name: 'index_on_issues_project_id', + using: nil, + where: 'foo', + opclasses: {}, + unique: false, + lengths: [], + orders: []) + + allow(model).to receive(:indexes_for).with(:issues, 'project_id'). + and_return([index]) + + expect(model).to receive(:add_concurrent_index). + with(:issues, + %w(gl_project_id), + unique: false, + name: 'index_on_issues_gl_project_id', + length: [], + order: [], + where: 'foo') + + model.copy_indexes(:issues, :project_id, :gl_project_id) + end + end + + context 'using an index with a USING clause' do + it 'copies the index' do + index = double(:index, + columns: %w(project_id), + name: 'index_on_issues_project_id', + where: nil, + using: 'foo', + opclasses: {}, + unique: false, + lengths: [], + orders: []) + + allow(model).to receive(:indexes_for).with(:issues, 'project_id'). + and_return([index]) + + expect(model).to receive(:add_concurrent_index). + with(:issues, + %w(gl_project_id), + unique: false, + name: 'index_on_issues_gl_project_id', + length: [], + order: [], + using: 'foo') + + model.copy_indexes(:issues, :project_id, :gl_project_id) + end + end + + context 'using an index with custom operator classes' do + it 'copies the index' do + index = double(:index, + columns: %w(project_id), + name: 'index_on_issues_project_id', + using: nil, + where: nil, + opclasses: { 'project_id' => 'bar' }, + unique: false, + lengths: [], + orders: []) + + allow(model).to receive(:indexes_for).with(:issues, 'project_id'). + and_return([index]) + + expect(model).to receive(:add_concurrent_index). + with(:issues, + %w(gl_project_id), + unique: false, + name: 'index_on_issues_gl_project_id', + length: [], + order: [], + opclasses: { 'gl_project_id' => 'bar' }) + + model.copy_indexes(:issues, :project_id, :gl_project_id) + end + end + + describe 'using an index of which the name does not contain the source column' do + it 'raises RuntimeError' do + index = double(:index, + columns: %w(project_id), + name: 'index_foobar_index', + using: nil, + where: nil, + opclasses: {}, + unique: false, + lengths: [], + orders: []) + + allow(model).to receive(:indexes_for).with(:issues, 'project_id'). + and_return([index]) + + expect { model.copy_indexes(:issues, :project_id, :gl_project_id) }. + to raise_error(RuntimeError) + end + end + end + + describe '#copy_foreign_keys' do + it 'copies foreign keys from one column to another' do + fk = double(:fk, + from_table: 'issues', + to_table: 'projects', + on_delete: :cascade) + + allow(model).to receive(:foreign_keys_for).with(:issues, :project_id). + and_return([fk]) + + expect(model).to receive(:add_concurrent_foreign_key). + with('issues', 'projects', column: :gl_project_id, on_delete: :cascade) + + model.copy_foreign_keys(:issues, :project_id, :gl_project_id) + end + end + + describe '#column_for' do + it 'returns a column object for an existing column' do + column = model.column_for(:users, :id) + + expect(column.name).to eq('id') + end + + it 'returns nil when a column does not exist' do + expect(model.column_for(:users, :kittens)).to be_nil + end + end + + describe '#replace_sql' do + context 'using postgres' do + before do + allow(Gitlab::Database).to receive(:mysql?).and_return(false) + end + + it 'builds the sql with correct functions' do + expect(model.replace_sql(Arel::Table.new(:users)[:first_name], "Alice", "Eve").to_s). + to include('regexp_replace') + end + end + + context 'using mysql' do + before do + allow(Gitlab::Database).to receive(:mysql?).and_return(true) + end + + it 'builds the sql with the correct functions' do + expect(model.replace_sql(Arel::Table.new(:users)[:first_name], "Alice", "Eve").to_s). + to include('locate', 'insert') + end + end + + describe 'results' do + let!(:user) { create(:user, name: 'Kathy Alice Aliceson') } + + it 'replaces the correct part of the string' do + model.update_column_in_batches(:users, :name, model.replace_sql(Arel::Table.new(:users)[:name], 'Alice', 'Eve')) + expect(user.reload.name).to eq('Kathy Eve Aliceson') + end + end + end end diff --git a/spec/lib/gitlab/database/multi_threaded_migration_spec.rb b/spec/lib/gitlab/database/multi_threaded_migration_spec.rb new file mode 100644 index 00000000000..6c45f13bb5a --- /dev/null +++ b/spec/lib/gitlab/database/multi_threaded_migration_spec.rb @@ -0,0 +1,41 @@ +require 'spec_helper' + +describe Gitlab::Database::MultiThreadedMigration do + let(:migration) do + Class.new { include Gitlab::Database::MultiThreadedMigration }.new + end + + describe '#connection' do + after do + Thread.current[described_class::MULTI_THREAD_AR_CONNECTION] = nil + end + + it 'returns the thread-local connection if present' do + Thread.current[described_class::MULTI_THREAD_AR_CONNECTION] = 10 + + expect(migration.connection).to eq(10) + end + + it 'returns the global connection if no thread-local connection was set' do + expect(migration.connection).to eq(ActiveRecord::Base.connection) + end + end + + describe '#with_multiple_threads' do + it 'starts multiple threads and yields the supplied block in every thread' do + output = Queue.new + + migration.with_multiple_threads(2) do + output << migration.connection.execute('SELECT 1') + end + + expect(output.size).to eq(2) + end + + it 'joins the threads when the join parameter is set' do + expect_any_instance_of(Thread).to receive(:join).and_call_original + + migration.with_multiple_threads(1) { } + end + end +end diff --git a/spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_base_spec.rb b/spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_base_spec.rb new file mode 100644 index 00000000000..a3ab4e3dd9e --- /dev/null +++ b/spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_base_spec.rb @@ -0,0 +1,206 @@ +require 'spec_helper' + +describe Gitlab::Database::RenameReservedPathsMigration::V1::RenameBase do + let(:migration) { FakeRenameReservedPathMigrationV1.new } + let(:subject) { described_class.new(['the-path'], migration) } + + before do + allow(migration).to receive(:say) + end + + def migration_namespace(namespace) + Gitlab::Database::RenameReservedPathsMigration::V1::MigrationClasses:: + Namespace.find(namespace.id) + end + + def migration_project(project) + Gitlab::Database::RenameReservedPathsMigration::V1::MigrationClasses:: + Project.find(project.id) + end + + describe "#remove_last_ocurrence" do + it "removes only the last occurance of a string" do + input = "this/is/a-word-to-replace/namespace/with/a-word-to-replace" + + expect(subject.remove_last_occurrence(input, "a-word-to-replace")) + .to eq("this/is/a-word-to-replace/namespace/with/") + end + end + + describe '#remove_cached_html_for_projects' do + let(:project) { create(:empty_project, description_html: 'Project description') } + + it 'removes description_html from projects' do + subject.remove_cached_html_for_projects([project.id]) + + expect(project.reload.description_html).to be_nil + end + + it 'removes issue descriptions' do + issue = create(:issue, project: project, description_html: 'Issue description') + + subject.remove_cached_html_for_projects([project.id]) + + expect(issue.reload.description_html).to be_nil + end + + it 'removes merge request descriptions' do + merge_request = create(:merge_request, + source_project: project, + target_project: project, + description_html: 'MergeRequest description') + + subject.remove_cached_html_for_projects([project.id]) + + expect(merge_request.reload.description_html).to be_nil + end + + it 'removes note html' do + note = create(:note, + project: project, + noteable: create(:issue, project: project), + note_html: 'note description') + + subject.remove_cached_html_for_projects([project.id]) + + expect(note.reload.note_html).to be_nil + end + + it 'removes milestone description' do + milestone = create(:milestone, + project: project, + description_html: 'milestone description') + + subject.remove_cached_html_for_projects([project.id]) + + expect(milestone.reload.description_html).to be_nil + end + end + + describe '#rename_path_for_routable' do + context 'for namespaces' do + let(:namespace) { create(:namespace, path: 'the-path') } + it "renames namespaces called the-path" do + subject.rename_path_for_routable(migration_namespace(namespace)) + + expect(namespace.reload.path).to eq("the-path0") + end + + it "renames the route to the namespace" do + subject.rename_path_for_routable(migration_namespace(namespace)) + + expect(Namespace.find(namespace.id).full_path).to eq("the-path0") + end + + it "renames the route for projects of the namespace" do + project = create(:project, path: "project-path", namespace: namespace) + + subject.rename_path_for_routable(migration_namespace(namespace)) + + expect(project.route.reload.path).to eq("the-path0/project-path") + end + + it 'returns the old & the new path' do + old_path, new_path = subject.rename_path_for_routable(migration_namespace(namespace)) + + expect(old_path).to eq('the-path') + expect(new_path).to eq('the-path0') + end + + it "doesn't rename routes that start with a similar name" do + other_namespace = create(:namespace, path: 'the-path-but-not-really') + project = create(:empty_project, path: 'the-project', namespace: other_namespace) + + subject.rename_path_for_routable(migration_namespace(namespace)) + + expect(project.route.reload.path).to eq('the-path-but-not-really/the-project') + end + + context "the-path namespace -> subgroup -> the-path0 project" do + it "updates the route of the project correctly" do + subgroup = create(:group, path: "subgroup", parent: namespace) + project = create(:project, path: "the-path0", namespace: subgroup) + + subject.rename_path_for_routable(migration_namespace(namespace)) + + expect(project.route.reload.path).to eq("the-path0/subgroup/the-path0") + end + end + end + + context 'for projects' do + let(:parent) { create(:namespace, path: 'the-parent') } + let(:project) { create(:empty_project, path: 'the-path', namespace: parent) } + + it 'renames the project called `the-path`' do + subject.rename_path_for_routable(migration_project(project)) + + expect(project.reload.path).to eq('the-path0') + end + + it 'renames the route for the project' do + subject.rename_path_for_routable(project) + + expect(project.reload.route.path).to eq('the-parent/the-path0') + end + + it 'returns the old & new path' do + old_path, new_path = subject.rename_path_for_routable(migration_project(project)) + + expect(old_path).to eq('the-parent/the-path') + expect(new_path).to eq('the-parent/the-path0') + end + end + end + + describe '#move_pages' do + it 'moves the pages directory' do + expect(subject).to receive(:move_folders) + .with(TestEnv.pages_path, 'old-path', 'new-path') + + subject.move_pages('old-path', 'new-path') + end + end + + describe "#move_uploads" do + let(:test_dir) { File.join(Rails.root, 'tmp', 'tests', 'rename_reserved_paths') } + let(:uploads_dir) { File.join(test_dir, 'public', 'uploads') } + + it 'moves subdirectories in the uploads folder' do + expect(subject).to receive(:uploads_dir).and_return(uploads_dir) + expect(subject).to receive(:move_folders).with(uploads_dir, 'old_path', 'new_path') + + subject.move_uploads('old_path', 'new_path') + end + + it "doesn't move uploads when they are stored in object storage" do + expect(subject).to receive(:file_storage?).and_return(false) + expect(subject).not_to receive(:move_folders) + + subject.move_uploads('old_path', 'new_path') + end + end + + describe '#move_folders' do + let(:test_dir) { File.join(Rails.root, 'tmp', 'tests', 'rename_reserved_paths') } + let(:uploads_dir) { File.join(test_dir, 'public', 'uploads') } + + before do + FileUtils.remove_dir(test_dir) if File.directory?(test_dir) + FileUtils.mkdir_p(uploads_dir) + allow(subject).to receive(:uploads_dir).and_return(uploads_dir) + end + + it 'moves a folder with files' do + source = File.join(uploads_dir, 'parent-group', 'sub-group') + FileUtils.mkdir_p(source) + destination = File.join(uploads_dir, 'parent-group', 'moved-group') + FileUtils.touch(File.join(source, 'test.txt')) + expected_file = File.join(destination, 'test.txt') + + subject.move_folders(uploads_dir, File.join('parent-group', 'sub-group'), File.join('parent-group', 'moved-group')) + + expect(File.exist?(expected_file)).to be(true) + end + end +end diff --git a/spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_namespaces_spec.rb b/spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_namespaces_spec.rb new file mode 100644 index 00000000000..c56fded7516 --- /dev/null +++ b/spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_namespaces_spec.rb @@ -0,0 +1,227 @@ +require 'spec_helper' + +describe Gitlab::Database::RenameReservedPathsMigration::V1::RenameNamespaces do + let(:migration) { FakeRenameReservedPathMigrationV1.new } + let(:subject) { described_class.new(['the-path'], migration) } + + before do + allow(migration).to receive(:say) + end + + def migration_namespace(namespace) + Gitlab::Database::RenameReservedPathsMigration::V1::MigrationClasses:: + Namespace.find(namespace.id) + end + + describe '#namespaces_for_paths' do + context 'nested namespaces' do + let(:subject) { described_class.new(['parent/the-Path'], migration) } + + it 'includes the namespace' do + parent = create(:namespace, path: 'parent') + child = create(:namespace, path: 'the-path', parent: parent) + + found_ids = subject.namespaces_for_paths(type: :child). + map(&:id) + + expect(found_ids).to contain_exactly(child.id) + end + end + + context 'for child namespaces' do + it 'only returns child namespaces with the correct path' do + _root_namespace = create(:namespace, path: 'THE-path') + _other_path = create(:namespace, + path: 'other', + parent: create(:namespace)) + namespace = create(:namespace, + path: 'the-path', + parent: create(:namespace)) + + found_ids = subject.namespaces_for_paths(type: :child). + map(&:id) + + expect(found_ids).to contain_exactly(namespace.id) + end + + it 'has no namespaces that look the same' do + _root_namespace = create(:namespace, path: 'THE-path') + _similar_path = create(:namespace, + path: 'not-really-the-path', + parent: create(:namespace)) + namespace = create(:namespace, + path: 'the-path', + parent: create(:namespace)) + + found_ids = subject.namespaces_for_paths(type: :child). + map(&:id) + + expect(found_ids).to contain_exactly(namespace.id) + end + end + + context 'for top levelnamespaces' do + it 'only returns child namespaces with the correct path' do + root_namespace = create(:namespace, path: 'the-path') + _other_path = create(:namespace, path: 'other') + _child_namespace = create(:namespace, + path: 'the-path', + parent: create(:namespace)) + + found_ids = subject.namespaces_for_paths(type: :top_level). + map(&:id) + + expect(found_ids).to contain_exactly(root_namespace.id) + end + + it 'has no namespaces that just look the same' do + root_namespace = create(:namespace, path: 'the-path') + _similar_path = create(:namespace, path: 'not-really-the-path') + _child_namespace = create(:namespace, + path: 'the-path', + parent: create(:namespace)) + + found_ids = subject.namespaces_for_paths(type: :top_level). + map(&:id) + + expect(found_ids).to contain_exactly(root_namespace.id) + end + end + end + + describe '#move_repositories' do + let(:namespace) { create(:group, name: 'hello-group') } + it 'moves a project for a namespace' do + create(:project, namespace: namespace, path: 'hello-project') + expected_path = File.join(TestEnv.repos_path, 'bye-group', 'hello-project.git') + + subject.move_repositories(namespace, 'hello-group', 'bye-group') + + expect(File.directory?(expected_path)).to be(true) + end + + it 'moves a namespace in a subdirectory correctly' do + child_namespace = create(:group, name: 'sub-group', parent: namespace) + create(:project, namespace: child_namespace, path: 'hello-project') + + expected_path = File.join(TestEnv.repos_path, 'hello-group', 'renamed-sub-group', 'hello-project.git') + + subject.move_repositories(child_namespace, 'hello-group/sub-group', 'hello-group/renamed-sub-group') + + expect(File.directory?(expected_path)).to be(true) + end + + it 'moves a parent namespace with subdirectories' do + child_namespace = create(:group, name: 'sub-group', parent: namespace) + create(:project, namespace: child_namespace, path: 'hello-project') + expected_path = File.join(TestEnv.repos_path, 'renamed-group', 'sub-group', 'hello-project.git') + + subject.move_repositories(child_namespace, 'hello-group', 'renamed-group') + + expect(File.directory?(expected_path)).to be(true) + end + end + + describe "#child_ids_for_parent" do + it "collects child ids for all levels" do + parent = create(:namespace) + first_child = create(:namespace, parent: parent) + second_child = create(:namespace, parent: parent) + third_child = create(:namespace, parent: second_child) + all_ids = [parent.id, first_child.id, second_child.id, third_child.id] + + collected_ids = subject.child_ids_for_parent(parent, ids: [parent.id]) + + expect(collected_ids).to contain_exactly(*all_ids) + end + end + + describe "#rename_namespace" do + let(:namespace) { create(:group, name: 'the-path') } + + it 'renames paths & routes for the namespace' do + expect(subject).to receive(:rename_path_for_routable). + with(namespace). + and_call_original + + subject.rename_namespace(namespace) + + expect(namespace.reload.path).to eq('the-path0') + end + + it "moves the the repository for a project in the namespace" do + create(:project, namespace: namespace, path: "the-path-project") + expected_repo = File.join(TestEnv.repos_path, "the-path0", "the-path-project.git") + + subject.rename_namespace(namespace) + + expect(File.directory?(expected_repo)).to be(true) + end + + it "moves the uploads for the namespace" do + expect(subject).to receive(:move_uploads).with("the-path", "the-path0") + + subject.rename_namespace(namespace) + end + + it "moves the pages for the namespace" do + expect(subject).to receive(:move_pages).with("the-path", "the-path0") + + subject.rename_namespace(namespace) + end + + it 'invalidates the markdown cache of related projects' do + project = create(:empty_project, namespace: namespace, path: "the-path-project") + + expect(subject).to receive(:remove_cached_html_for_projects).with([project.id]) + + subject.rename_namespace(namespace) + end + + it "doesn't rename users for other namespaces" do + expect(subject).not_to receive(:rename_user) + + subject.rename_namespace(namespace) + end + + it 'renames the username of a namespace for a user' do + user = create(:user, username: 'the-path') + + expect(subject).to receive(:rename_user).with('the-path', 'the-path0') + + subject.rename_namespace(user.namespace) + end + end + + describe '#rename_user' do + it 'renames a username' do + subject = described_class.new([], migration) + user = create(:user, username: 'broken') + + subject.rename_user('broken', 'broken0') + + expect(user.reload.username).to eq('broken0') + end + end + + describe '#rename_namespaces' do + let!(:top_level_namespace) { create(:namespace, path: 'the-path') } + let!(:child_namespace) do + create(:namespace, path: 'the-path', parent: create(:namespace)) + end + + it 'renames top level namespaces the namespace' do + expect(subject).to receive(:rename_namespace). + with(migration_namespace(top_level_namespace)) + + subject.rename_namespaces(type: :top_level) + end + + it 'renames child namespaces' do + expect(subject).to receive(:rename_namespace). + with(migration_namespace(child_namespace)) + + subject.rename_namespaces(type: :child) + end + end +end diff --git a/spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_projects_spec.rb b/spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_projects_spec.rb new file mode 100644 index 00000000000..59e8de2712d --- /dev/null +++ b/spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_projects_spec.rb @@ -0,0 +1,102 @@ +require 'spec_helper' + +describe Gitlab::Database::RenameReservedPathsMigration::V1::RenameProjects do + let(:migration) { FakeRenameReservedPathMigrationV1.new } + let(:subject) { described_class.new(['the-path'], migration) } + + before do + allow(migration).to receive(:say) + end + + describe '#projects_for_paths' do + it 'searches using nested paths' do + namespace = create(:namespace, path: 'hello') + project = create(:empty_project, path: 'THE-path', namespace: namespace) + + result_ids = described_class.new(['Hello/the-path'], migration). + projects_for_paths.map(&:id) + + expect(result_ids).to contain_exactly(project.id) + end + + it 'includes the correct projects' do + project = create(:empty_project, path: 'THE-path') + _other_project = create(:empty_project) + + result_ids = subject.projects_for_paths.map(&:id) + + expect(result_ids).to contain_exactly(project.id) + end + end + + describe '#rename_projects' do + let!(:projects) { create_list(:empty_project, 2, path: 'the-path') } + + it 'renames each project' do + expect(subject).to receive(:rename_project).twice + + subject.rename_projects + end + + it 'invalidates the markdown cache of related projects' do + expect(subject).to receive(:remove_cached_html_for_projects). + with(projects.map(&:id)) + + subject.rename_projects + end + end + + describe '#rename_project' do + let(:project) do + create(:empty_project, + path: 'the-path', + namespace: create(:namespace, path: 'known-parent' )) + end + + it 'renames path & route for the project' do + expect(subject).to receive(:rename_path_for_routable). + with(project). + and_call_original + + subject.rename_project(project) + + expect(project.reload.path).to eq('the-path0') + end + + it 'moves the wiki & the repo' do + expect(subject).to receive(:move_repository). + with(project, 'known-parent/the-path.wiki', 'known-parent/the-path0.wiki') + expect(subject).to receive(:move_repository). + with(project, 'known-parent/the-path', 'known-parent/the-path0') + + subject.rename_project(project) + end + + it 'moves uploads' do + expect(subject).to receive(:move_uploads). + with('known-parent/the-path', 'known-parent/the-path0') + + subject.rename_project(project) + end + + it 'moves pages' do + expect(subject).to receive(:move_pages). + with('known-parent/the-path', 'known-parent/the-path0') + + subject.rename_project(project) + end + end + + describe '#move_repository' do + let(:known_parent) { create(:namespace, path: 'known-parent') } + let(:project) { create(:project, path: 'the-path', namespace: known_parent) } + + it 'moves the repository for a project' do + expected_path = File.join(TestEnv.repos_path, 'known-parent', 'new-repo.git') + + subject.move_repository(project, 'known-parent/the-path', 'known-parent/new-repo') + + expect(File.directory?(expected_path)).to be(true) + end + end +end diff --git a/spec/lib/gitlab/database/rename_reserved_paths_migration/v1_spec.rb b/spec/lib/gitlab/database/rename_reserved_paths_migration/v1_spec.rb new file mode 100644 index 00000000000..f8cc1eb91ec --- /dev/null +++ b/spec/lib/gitlab/database/rename_reserved_paths_migration/v1_spec.rb @@ -0,0 +1,54 @@ +require 'spec_helper' + +shared_examples 'renames child namespaces' do |type| + it 'renames namespaces' do + rename_namespaces = double + expect(described_class::RenameNamespaces). + to receive(:new).with(['first-path', 'second-path'], subject). + and_return(rename_namespaces) + expect(rename_namespaces).to receive(:rename_namespaces). + with(type: :child) + + subject.rename_wildcard_paths(['first-path', 'second-path']) + end +end + +describe Gitlab::Database::RenameReservedPathsMigration::V1 do + let(:subject) { FakeRenameReservedPathMigrationV1.new } + + before do + allow(subject).to receive(:say) + end + + describe '#rename_child_paths' do + it_behaves_like 'renames child namespaces' + end + + describe '#rename_wildcard_paths' do + it_behaves_like 'renames child namespaces' + + it 'should rename projects' do + rename_projects = double + expect(described_class::RenameProjects). + to receive(:new).with(['the-path'], subject). + and_return(rename_projects) + + expect(rename_projects).to receive(:rename_projects) + + subject.rename_wildcard_paths(['the-path']) + end + end + + describe '#rename_root_paths' do + it 'should rename namespaces' do + rename_namespaces = double + expect(described_class::RenameNamespaces). + to receive(:new).with(['the-path'], subject). + and_return(rename_namespaces) + expect(rename_namespaces).to receive(:rename_namespaces). + with(type: :top_level) + + subject.rename_root_paths('the-path') + end + end +end diff --git a/spec/lib/gitlab/database_spec.rb b/spec/lib/gitlab/database_spec.rb index 4ce4e6e1034..9b1d66a1b1c 100644 --- a/spec/lib/gitlab/database_spec.rb +++ b/spec/lib/gitlab/database_spec.rb @@ -150,13 +150,13 @@ describe Gitlab::Database, lib: true do it 'returns correct value for PostgreSQL' do expect(described_class).to receive(:postgresql?).and_return(true) - expect(MigrationTest.new.true_value).to eq "'t'" + expect(described_class.true_value).to eq "'t'" end it 'returns correct value for MySQL' do expect(described_class).to receive(:postgresql?).and_return(false) - expect(MigrationTest.new.true_value).to eq 1 + expect(described_class.true_value).to eq 1 end end @@ -164,13 +164,13 @@ describe Gitlab::Database, lib: true do it 'returns correct value for PostgreSQL' do expect(described_class).to receive(:postgresql?).and_return(true) - expect(MigrationTest.new.false_value).to eq "'f'" + expect(described_class.false_value).to eq "'f'" end it 'returns correct value for MySQL' do expect(described_class).to receive(:postgresql?).and_return(false) - expect(MigrationTest.new.false_value).to eq 0 + expect(described_class.false_value).to eq 0 end end end diff --git a/spec/lib/gitlab/dependency_linker/gemfile_linker_spec.rb b/spec/lib/gitlab/dependency_linker/gemfile_linker_spec.rb new file mode 100644 index 00000000000..2e52097a946 --- /dev/null +++ b/spec/lib/gitlab/dependency_linker/gemfile_linker_spec.rb @@ -0,0 +1,60 @@ +require 'rails_helper' + +describe Gitlab::DependencyLinker::GemfileLinker, lib: true do + describe '.support?' do + it 'supports Gemfile' do + expect(described_class.support?('Gemfile')).to be_truthy + end + + it 'supports gems.rb' do + expect(described_class.support?('gems.rb')).to be_truthy + end + + it 'does not support other files' do + expect(described_class.support?('Gemfile.lock')).to be_falsey + end + end + + describe '#link' do + let(:file_name) { 'Gemfile' } + + let(:file_content) do + <<-CONTENT.strip_heredoc + source 'https://rubygems.org' + + gem "rails", '4.2.6', github: "rails/rails" + gem 'rails-deprecated_sanitizer', '~> 1.0.3' + gem 'responders', '~> 2.0', :github => 'rails/responders' + gem 'sprockets', '~> 3.6.0', git: 'https://gitlab.example.com/gems/sprockets' + gem 'default_value_for', '~> 3.0.0' + CONTENT + end + + subject { Gitlab::Highlight.highlight(file_name, file_content) } + + def link(name, url) + %{<a href="#{url}" rel="noopener noreferrer" target="_blank">#{name}</a>} + end + + it 'links sources' do + expect(subject).to include(link('https://rubygems.org', 'https://rubygems.org')) + end + + it 'links dependencies' do + expect(subject).to include(link('rails', 'https://rubygems.org/gems/rails')) + expect(subject).to include(link('rails-deprecated_sanitizer', 'https://rubygems.org/gems/rails-deprecated_sanitizer')) + expect(subject).to include(link('responders', 'https://rubygems.org/gems/responders')) + expect(subject).to include(link('sprockets', 'https://rubygems.org/gems/sprockets')) + expect(subject).to include(link('default_value_for', 'https://rubygems.org/gems/default_value_for')) + end + + it 'links GitHub repos' do + expect(subject).to include(link('rails/rails', 'https://github.com/rails/rails')) + expect(subject).to include(link('rails/responders', 'https://github.com/rails/responders')) + end + + it 'links Git repos' do + expect(subject).to include(link('https://gitlab.example.com/gems/sprockets', 'https://gitlab.example.com/gems/sprockets')) + end + end +end diff --git a/spec/lib/gitlab/dependency_linker_spec.rb b/spec/lib/gitlab/dependency_linker_spec.rb new file mode 100644 index 00000000000..03d5b61d70c --- /dev/null +++ b/spec/lib/gitlab/dependency_linker_spec.rb @@ -0,0 +1,13 @@ +require 'rails_helper' + +describe Gitlab::DependencyLinker, lib: true do + describe '.link' do + it 'links using GemfileLinker' do + blob_name = 'Gemfile' + + expect(described_class::GemfileLinker).to receive(:link) + + described_class.link(blob_name, nil, nil) + end + end +end diff --git a/spec/lib/gitlab/diff/highlight_spec.rb b/spec/lib/gitlab/diff/highlight_spec.rb index c6bd4e81f4f..7d7d4a55e63 100644 --- a/spec/lib/gitlab/diff/highlight_spec.rb +++ b/spec/lib/gitlab/diff/highlight_spec.rb @@ -34,7 +34,7 @@ describe Gitlab::Diff::Highlight, lib: true do end it 'highlights and marks added lines' do - code = %Q{+<span id="LC9" class="line" lang="ruby"> <span class="k">raise</span> <span class="no"><span class='idiff left'>RuntimeError</span></span><span class="p"><span class='idiff'>,</span></span><span class='idiff right'> </span><span class="s2">"System commands must be given as an array of strings"</span></span>\n} + code = %Q{+<span id="LC9" class="line" lang="ruby"> <span class="k">raise</span> <span class="no"><span class="idiff left">RuntimeError</span></span><span class="p"><span class="idiff">,</span></span><span class="idiff right"> </span><span class="s2">"System commands must be given as an array of strings"</span></span>\n} expect(subject[5].text).to eq(code) end @@ -67,7 +67,7 @@ describe Gitlab::Diff::Highlight, lib: true do end it 'marks added lines' do - code = %q{+ raise <span class='idiff left right'>RuntimeError, </span>"System commands must be given as an array of strings"} + code = %q{+ raise <span class="idiff left right">RuntimeError, </span>"System commands must be given as an array of strings"} expect(subject[5].text).to eq(code) expect(subject[5].text).to be_html_safe diff --git a/spec/lib/gitlab/diff/inline_diff_markdown_marker_spec.rb b/spec/lib/gitlab/diff/inline_diff_markdown_marker_spec.rb new file mode 100644 index 00000000000..d6e8b8ac4b2 --- /dev/null +++ b/spec/lib/gitlab/diff/inline_diff_markdown_marker_spec.rb @@ -0,0 +1,14 @@ +require 'spec_helper' + +describe Gitlab::Diff::InlineDiffMarkdownMarker, lib: true do + describe '#mark' do + let(:raw) { "abc 'def'" } + let(:inline_diffs) { [2..5] } + let(:subject) { described_class.new(raw).mark(inline_diffs, mode: :deletion) } + + it 'marks the range' do + expect(subject).to eq("ab{-c 'd-}ef'") + expect(subject).to be_html_safe + end + end +end diff --git a/spec/lib/gitlab/diff/inline_diff_marker_spec.rb b/spec/lib/gitlab/diff/inline_diff_marker_spec.rb index 198ff977f24..95da344802d 100644 --- a/spec/lib/gitlab/diff/inline_diff_marker_spec.rb +++ b/spec/lib/gitlab/diff/inline_diff_marker_spec.rb @@ -1,26 +1,26 @@ require 'spec_helper' describe Gitlab::Diff::InlineDiffMarker, lib: true do - describe '#inline_diffs' do + describe '#mark' do context "when the rich text is html safe" do - let(:raw) { "abc 'def'" } + let(:raw) { "abc 'def'" } let(:rich) { %{<span class="abc">abc</span><span class="space"> </span><span class="def">'def'</span>}.html_safe } let(:inline_diffs) { [2..5] } - let(:subject) { Gitlab::Diff::InlineDiffMarker.new(raw, rich).mark(inline_diffs) } + let(:subject) { described_class.new(raw, rich).mark(inline_diffs) } - it 'marks the inline diffs' do - expect(subject).to eq(%{<span class="abc">ab<span class='idiff left'>c</span></span><span class="space"><span class='idiff'> </span></span><span class="def"><span class='idiff right'>'d</span>ef'</span>}) + it 'marks the range' do + expect(subject).to eq(%{<span class="abc">ab<span class="idiff left">c</span></span><span class="space"><span class="idiff"> </span></span><span class="def"><span class="idiff right">'d</span>ef'</span>}) expect(subject).to be_html_safe end end context "when the text text is not html safe" do - let(:raw) { "abc 'def'" } + let(:raw) { "abc 'def'" } let(:inline_diffs) { [2..5] } - let(:subject) { Gitlab::Diff::InlineDiffMarker.new(raw).mark(inline_diffs) } + let(:subject) { described_class.new(raw).mark(inline_diffs) } - it 'marks the inline diffs' do - expect(subject).to eq(%{ab<span class='idiff left right'>c 'd</span>ef'}) + it 'marks the range' do + expect(subject).to eq(%{ab<span class="idiff left right">c 'd</span>ef'}) expect(subject).to be_html_safe end end diff --git a/spec/lib/gitlab/diff/position_tracer_spec.rb b/spec/lib/gitlab/diff/position_tracer_spec.rb index 994995b57b8..4d202a76e1b 100644 --- a/spec/lib/gitlab/diff/position_tracer_spec.rb +++ b/spec/lib/gitlab/diff/position_tracer_spec.rb @@ -100,7 +100,7 @@ describe Gitlab::Diff::PositionTracer, lib: true do project, current_user, start_branch: branch_name, - target_branch: branch_name, + branch_name: branch_name, commit_message: "Create file", file_path: file_name, file_content: content @@ -113,7 +113,7 @@ describe Gitlab::Diff::PositionTracer, lib: true do project, current_user, start_branch: branch_name, - target_branch: branch_name, + branch_name: branch_name, commit_message: "Update file", file_path: file_name, file_content: content @@ -122,11 +122,11 @@ describe Gitlab::Diff::PositionTracer, lib: true do end def delete_file(branch_name, file_name) - Files::DestroyService.new( + Files::DeleteService.new( project, current_user, start_branch: branch_name, - target_branch: branch_name, + branch_name: branch_name, commit_message: "Delete file", file_path: file_name ).execute @@ -569,13 +569,8 @@ describe Gitlab::Diff::PositionTracer, lib: true do # 1 1 BB # 2 2 A - it "returns the new position" do - expect_new_position( - old_path: file_name, - new_path: new_file_name, - old_line: old_position.new_line, - new_line: old_position.new_line - ) + it "returns nil since the line doesn't exist in the new diffs anymore" do + expect(subject).to be_nil end end @@ -1377,7 +1372,7 @@ describe Gitlab::Diff::PositionTracer, lib: true do nil, { old_path: file_name, new_path: file_name, old_line: 5, new_line: 5 }, { old_path: file_name, old_line: 6 }, - { new_path: file_name, new_line: 7 }, + { new_path: file_name, new_line: 7 } ] expect_positions(old_position_attrs, new_position_attrs) @@ -1449,7 +1444,7 @@ describe Gitlab::Diff::PositionTracer, lib: true do nil, { old_path: file_name, new_path: file_name, old_line: 5, new_line: 5 }, { old_path: file_name, old_line: 6 }, - { new_path: file_name, new_line: 7 }, + { new_path: file_name, new_line: 7 } ] expect_positions(old_position_attrs, new_position_attrs) @@ -1503,7 +1498,7 @@ describe Gitlab::Diff::PositionTracer, lib: true do { old_path: file_name, new_path: file_name, old_line: 5, new_line: 4 }, { old_path: file_name, new_path: file_name, old_line: 6, new_line: 5 }, nil, - { new_path: file_name, new_line: 6 }, + { new_path: file_name, new_line: 6 } ] expect_positions(old_position_attrs, new_position_attrs) @@ -1751,7 +1746,7 @@ describe Gitlab::Diff::PositionTracer, lib: true do { old_path: file_name, new_path: file_name, old_line: 4, new_line: 5 }, { old_path: file_name, old_line: 5 }, { new_path: file_name, new_line: 6 }, - { new_path: file_name, new_line: 7 }, + { new_path: file_name, new_line: 7 } ] expect_positions(old_position_attrs, new_position_attrs) diff --git a/spec/lib/gitlab/email/handler/create_note_handler_spec.rb b/spec/lib/gitlab/email/handler/create_note_handler_spec.rb index b300feaabe1..3f79eaf7afb 100644 --- a/spec/lib/gitlab/email/handler/create_note_handler_spec.rb +++ b/spec/lib/gitlab/email/handler/create_note_handler_spec.rb @@ -143,6 +143,7 @@ describe Gitlab::Email::Handler::CreateNoteHandler, lib: true do expect(new_note.author).to eq(sent_notification.recipient) expect(new_note.position).to eq(note.position) expect(new_note.note).to include("I could not disagree more.") + expect(new_note.in_reply_to?(note)).to be_truthy end it "adds all attachments" do diff --git a/spec/lib/gitlab/email/receiver_spec.rb b/spec/lib/gitlab/email/receiver_spec.rb index 2a86b427806..c6e3524f743 100644 --- a/spec/lib/gitlab/email/receiver_spec.rb +++ b/spec/lib/gitlab/email/receiver_spec.rb @@ -4,12 +4,38 @@ require_relative 'email_shared_blocks' describe Gitlab::Email::Receiver, lib: true do include_context :email_shared_context + context "when the email contains a valid email address in a Delivered-To header" do + let(:email_raw) { fixture_file('emails/forwarded_new_issue.eml') } + let(:handler) { double(:handler) } + + before do + stub_incoming_email_setting(enabled: true, address: "incoming+%{key}@appmail.adventuretime.ooo") + + allow(handler).to receive(:execute) + allow(handler).to receive(:metrics_params) + end + + it "finds the mail key" do + expect(Gitlab::Email::Handler).to receive(:for).with(an_instance_of(Mail::Message), 'gitlabhq/gitlabhq+auth_token').and_return(handler) + + receiver.execute + end + end + context "when we cannot find a capable handler" do let(:email_raw) { fixture_file('emails/valid_reply.eml').gsub(mail_key, "!!!") } - it "raises a UnknownIncomingEmail" do + it "raises an UnknownIncomingEmail error" do expect { receiver.execute }.to raise_error(Gitlab::Email::UnknownIncomingEmail) end + + context "and the email contains no references header" do + let(:email_raw) { fixture_file("emails/auto_reply.eml").gsub(mail_key, "!!!") } + + it "raises an UnknownIncomingEmail error" do + expect { receiver.execute }.to raise_error(Gitlab::Email::UnknownIncomingEmail) + end + end end context "when the email is blank" do diff --git a/spec/lib/gitlab/etag_caching/middleware_spec.rb b/spec/lib/gitlab/etag_caching/middleware_spec.rb index 8b5bfc4dbb0..24df04e985a 100644 --- a/spec/lib/gitlab/etag_caching/middleware_spec.rb +++ b/spec/lib/gitlab/etag_caching/middleware_spec.rb @@ -47,9 +47,9 @@ describe Gitlab::EtagCaching::Middleware do it 'tracks "etag_caching_key_not_found" event' do expect(Gitlab::Metrics).to receive(:add_event) - .with(:etag_caching_middleware_used) + .with(:etag_caching_middleware_used, endpoint: 'issue_notes') expect(Gitlab::Metrics).to receive(:add_event) - .with(:etag_caching_key_not_found) + .with(:etag_caching_key_not_found, endpoint: 'issue_notes') middleware.call(build_env(path, if_none_match)) end @@ -91,14 +91,33 @@ describe Gitlab::EtagCaching::Middleware do expect(status).to eq 304 end + it 'returns empty body' do + _, _, body = middleware.call(build_env(path, if_none_match)) + + expect(body).to be_empty + end + it 'tracks "etag_caching_cache_hit" event' do expect(Gitlab::Metrics).to receive(:add_event) - .with(:etag_caching_middleware_used) + .with(:etag_caching_middleware_used, endpoint: 'issue_notes') expect(Gitlab::Metrics).to receive(:add_event) - .with(:etag_caching_cache_hit) + .with(:etag_caching_cache_hit, endpoint: 'issue_notes') middleware.call(build_env(path, if_none_match)) end + + context 'when polling is disabled' do + before do + allow(Gitlab::PollingInterval).to receive(:polling_enabled?). + and_return(false) + end + + it 'returns status code 429' do + status, _, _ = middleware.call(build_env(path, if_none_match)) + + expect(status).to eq 429 + end + end end context 'when If-None-Match header does not match ETag in store' do @@ -119,9 +138,9 @@ describe Gitlab::EtagCaching::Middleware do mock_app_response expect(Gitlab::Metrics).to receive(:add_event) - .with(:etag_caching_middleware_used) + .with(:etag_caching_middleware_used, endpoint: 'issue_notes') expect(Gitlab::Metrics).to receive(:add_event) - .with(:etag_caching_resource_changed) + .with(:etag_caching_resource_changed, endpoint: 'issue_notes') middleware.call(build_env(path, if_none_match)) end @@ -137,9 +156,9 @@ describe Gitlab::EtagCaching::Middleware do it 'tracks "etag_caching_header_missing" event' do expect(Gitlab::Metrics).to receive(:add_event) - .with(:etag_caching_middleware_used) + .with(:etag_caching_middleware_used, endpoint: 'issue_notes') expect(Gitlab::Metrics).to receive(:add_event) - .with(:etag_caching_header_missing) + .with(:etag_caching_header_missing, endpoint: 'issue_notes') middleware.call(build_env(path, if_none_match)) end diff --git a/spec/lib/gitlab/etag_caching/router_spec.rb b/spec/lib/gitlab/etag_caching/router_spec.rb new file mode 100644 index 00000000000..5ae4a19263c --- /dev/null +++ b/spec/lib/gitlab/etag_caching/router_spec.rb @@ -0,0 +1,83 @@ +require 'spec_helper' + +describe Gitlab::EtagCaching::Router do + it 'matches issue notes endpoint' do + env = build_env( + '/my-group/and-subgroup/here-comes-the-project/noteable/issue/1/notes' + ) + + result = described_class.match(env) + + expect(result).to be_present + expect(result.name).to eq 'issue_notes' + end + + it 'matches issue title endpoint' do + env = build_env( + '/my-group/my-project/issues/123/realtime_changes' + ) + + result = described_class.match(env) + + expect(result).to be_present + expect(result.name).to eq 'issue_title' + end + + it 'matches project pipelines endpoint' do + env = build_env( + '/my-group/my-project/pipelines.json' + ) + + result = described_class.match(env) + + expect(result).to be_present + expect(result.name).to eq 'project_pipelines' + end + + it 'matches commit pipelines endpoint' do + env = build_env( + '/my-group/my-project/commit/aa8260d253a53f73f6c26c734c72fdd600f6e6d4/pipelines.json' + ) + + result = described_class.match(env) + + expect(result).to be_present + expect(result.name).to eq 'commit_pipelines' + end + + it 'matches new merge request pipelines endpoint' do + env = build_env( + '/my-group/my-project/merge_requests/new.json' + ) + + result = described_class.match(env) + + expect(result).to be_present + expect(result.name).to eq 'new_merge_request_pipelines' + end + + it 'matches merge request pipelines endpoint' do + env = build_env( + '/my-group/my-project/merge_requests/234/pipelines.json' + ) + + result = described_class.match(env) + + expect(result).to be_present + expect(result.name).to eq 'merge_request_pipelines' + end + + it 'does not match blob with confusing name' do + env = build_env( + '/my-group/my-project/blob/master/pipelines.json' + ) + + result = described_class.match(env) + + expect(result).to be_blank + end + + def build_env(path) + { 'PATH_INFO' => path } + end +end diff --git a/spec/lib/gitlab/file_finder_spec.rb b/spec/lib/gitlab/file_finder_spec.rb new file mode 100644 index 00000000000..5a32ffd462c --- /dev/null +++ b/spec/lib/gitlab/file_finder_spec.rb @@ -0,0 +1,21 @@ +require 'spec_helper' + +describe Gitlab::FileFinder, lib: true do + describe '#find' do + let(:project) { create(:project, :public, :repository) } + let(:finder) { described_class.new(project, project.default_branch) } + + it 'finds by name' do + results = finder.find('files') + expect(results.map(&:first)).to include('files/images/wm.svg') + end + + it 'finds by content' do + results = finder.find('files') + + blob = results.select { |result| result.first == "CHANGELOG" }.flatten.last + + expect(blob.filename).to eq("CHANGELOG") + end + end +end diff --git a/spec/lib/gitlab/git/attributes_spec.rb b/spec/lib/gitlab/git/attributes_spec.rb index 9c011e34c11..1cfd8db09a5 100644 --- a/spec/lib/gitlab/git/attributes_spec.rb +++ b/spec/lib/gitlab/git/attributes_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' describe Gitlab::Git::Attributes, seed_helper: true do let(:path) do - File.join(SEED_REPOSITORY_PATH, 'with-git-attributes.git') + File.join(SEED_STORAGE_PATH, 'with-git-attributes.git') end subject { described_class.new(path) } @@ -141,7 +141,7 @@ describe Gitlab::Git::Attributes, seed_helper: true do end it 'does not yield when the attributes file has an unsupported encoding' do - path = File.join(SEED_REPOSITORY_PATH, 'with-invalid-git-attributes.git') + path = File.join(SEED_STORAGE_PATH, 'with-invalid-git-attributes.git') attrs = described_class.new(path) expect { |b| attrs.each_line(&b) }.not_to yield_control diff --git a/spec/lib/gitlab/git/blame_spec.rb b/spec/lib/gitlab/git/blame_spec.rb index e169f5af6b6..8b041ac69b1 100644 --- a/spec/lib/gitlab/git/blame_spec.rb +++ b/spec/lib/gitlab/git/blame_spec.rb @@ -2,7 +2,7 @@ require "spec_helper" describe Gitlab::Git::Blame, seed_helper: true do - let(:repository) { Gitlab::Git::Repository.new(TEST_REPO_PATH) } + let(:repository) { Gitlab::Git::Repository.new('default', TEST_REPO_PATH) } let(:blame) do Gitlab::Git::Blame.new(repository, SeedRepo::Commit::ID, "CONTRIBUTING.md") end diff --git a/spec/lib/gitlab/git/blob_spec.rb b/spec/lib/gitlab/git/blob_spec.rb index b883526151e..e6a07a58d73 100644 --- a/spec/lib/gitlab/git/blob_spec.rb +++ b/spec/lib/gitlab/git/blob_spec.rb @@ -3,7 +3,7 @@ require "spec_helper" describe Gitlab::Git::Blob, seed_helper: true do - let(:repository) { Gitlab::Git::Repository.new(TEST_REPO_PATH) } + let(:repository) { Gitlab::Git::Repository.new('default', TEST_REPO_PATH) } describe 'initialize' do let(:blob) { Gitlab::Git::Blob.new(name: 'test') } @@ -234,7 +234,7 @@ describe Gitlab::Git::Blob, seed_helper: true do it { expect(blob.lfs_pointer?).to eq(true) } it { expect(blob.lfs_oid).to eq("4206f951d2691c78aac4c0ce9f2b23580b2c92cdcc4336e1028742c0274938e0") } - it { expect(blob.lfs_size).to eq("19548") } + it { expect(blob.lfs_size).to eq(19548) } it { expect(blob.id).to eq("f4d76af13003d1106be7ac8c5a2a3d37ddf32c2a") } it { expect(blob.name).to eq("image.jpg") } it { expect(blob.path).to eq("files/lfs/image.jpg") } @@ -273,7 +273,7 @@ describe Gitlab::Git::Blob, seed_helper: true do it { expect(blob.lfs_pointer?).to eq(false) } it { expect(blob.lfs_oid).to eq(nil) } - it { expect(blob.lfs_size).to eq("1575078") } + it { expect(blob.lfs_size).to eq(1575078) } it { expect(blob.id).to eq("5ae35296e1f95c1ef9feda1241477ed29a448572") } it { expect(blob.name).to eq("picture-invalid.png") } it { expect(blob.path).to eq("files/lfs/picture-invalid.png") } diff --git a/spec/lib/gitlab/git/branch_spec.rb b/spec/lib/gitlab/git/branch_spec.rb index 78234b396c5..9eac7660cd1 100644 --- a/spec/lib/gitlab/git/branch_spec.rb +++ b/spec/lib/gitlab/git/branch_spec.rb @@ -1,12 +1,57 @@ require "spec_helper" describe Gitlab::Git::Branch, seed_helper: true do - let(:repository) { Gitlab::Git::Repository.new(TEST_REPO_PATH) } + let(:repository) { Gitlab::Git::Repository.new('default', TEST_REPO_PATH) } subject { repository.branches } it { is_expected.to be_kind_of Array } + describe 'initialize' do + let(:commit_id) { 'f00' } + let(:commit_subject) { "My commit".force_encoding('ASCII-8BIT') } + let(:committer) do + Gitaly::FindLocalBranchCommitAuthor.new( + name: generate(:name), + email: generate(:email), + date: Google::Protobuf::Timestamp.new(seconds: 123) + ) + end + let(:author) do + Gitaly::FindLocalBranchCommitAuthor.new( + name: generate(:name), + email: generate(:email), + date: Google::Protobuf::Timestamp.new(seconds: 456) + ) + end + let(:gitaly_branch) do + Gitaly::FindLocalBranchResponse.new( + name: 'foo', commit_id: commit_id, commit_subject: commit_subject, + commit_author: author, commit_committer: committer + ) + end + let(:attributes) do + { + id: commit_id, + message: commit_subject, + authored_date: Time.at(author.date.seconds), + author_name: author.name, + author_email: author.email, + committed_date: Time.at(committer.date.seconds), + committer_name: committer.name, + committer_email: committer.email + } + end + let(:branch) { described_class.new(repository, 'foo', gitaly_branch) } + + it 'parses Gitaly::FindLocalBranchResponse correctly' do + expect(Gitlab::Git::Commit).to receive(:decorate). + with(hash_including(attributes)).and_call_original + + expect(branch.dereferenced_target.message.encoding).to be(Encoding::UTF_8) + end + end + describe '#size' do subject { super().size } it { is_expected.to eq(SeedRepo::Repo::BRANCHES.size) } diff --git a/spec/lib/gitlab/git/commit_spec.rb b/spec/lib/gitlab/git/commit_spec.rb index 5cf4631fbfc..3e44c577643 100644 --- a/spec/lib/gitlab/git/commit_spec.rb +++ b/spec/lib/gitlab/git/commit_spec.rb @@ -1,7 +1,7 @@ require "spec_helper" describe Gitlab::Git::Commit, seed_helper: true do - let(:repository) { Gitlab::Git::Repository.new(TEST_REPO_PATH) } + let(:repository) { Gitlab::Git::Repository.new('default', TEST_REPO_PATH) } let(:commit) { Gitlab::Git::Commit.find(repository, SeedRepo::Commit::ID) } let(:rugged_commit) do repository.rugged.lookup(SeedRepo::Commit::ID) @@ -9,7 +9,7 @@ describe Gitlab::Git::Commit, seed_helper: true do describe "Commit info" do before do - repo = Gitlab::Git::Repository.new(TEST_REPO_PATH).rugged + repo = Gitlab::Git::Repository.new('default', TEST_REPO_PATH).rugged @committer = { email: 'mike@smith.com', @@ -59,7 +59,7 @@ describe Gitlab::Git::Commit, seed_helper: true do after do # Erase the new commit so other tests get the original repo - repo = Gitlab::Git::Repository.new(TEST_REPO_PATH).rugged + repo = Gitlab::Git::Repository.new('default', TEST_REPO_PATH).rugged repo.references.update("refs/heads/master", SeedRepo::LastCommit::ID) end end @@ -95,7 +95,7 @@ describe Gitlab::Git::Commit, seed_helper: true do end context 'with broken repo' do - let(:repository) { Gitlab::Git::Repository.new(TEST_BROKEN_REPO_PATH) } + let(:repository) { Gitlab::Git::Repository.new('default', TEST_BROKEN_REPO_PATH) } it 'returns nil' do expect(Gitlab::Git::Commit.find(repository, SeedRepo::Commit::ID)).to be_nil diff --git a/spec/lib/gitlab/git/compare_spec.rb b/spec/lib/gitlab/git/compare_spec.rb index e28debe1494..7c45071ec45 100644 --- a/spec/lib/gitlab/git/compare_spec.rb +++ b/spec/lib/gitlab/git/compare_spec.rb @@ -1,7 +1,7 @@ require "spec_helper" describe Gitlab::Git::Compare, seed_helper: true do - let(:repository) { Gitlab::Git::Repository.new(TEST_REPO_PATH) } + let(:repository) { Gitlab::Git::Repository.new('default', TEST_REPO_PATH) } let(:compare) { Gitlab::Git::Compare.new(repository, SeedRepo::BigCommit::ID, SeedRepo::Commit::ID, false) } let(:compare_straight) { Gitlab::Git::Compare.new(repository, SeedRepo::BigCommit::ID, SeedRepo::Commit::ID, true) } diff --git a/spec/lib/gitlab/git/diff_spec.rb b/spec/lib/gitlab/git/diff_spec.rb index 992126ef153..4189aaef643 100644 --- a/spec/lib/gitlab/git/diff_spec.rb +++ b/spec/lib/gitlab/git/diff_spec.rb @@ -1,7 +1,7 @@ require "spec_helper" describe Gitlab::Git::Diff, seed_helper: true do - let(:repository) { Gitlab::Git::Repository.new(TEST_REPO_PATH) } + let(:repository) { Gitlab::Git::Repository.new('default', TEST_REPO_PATH) } before do @raw_diff_hash = { @@ -120,7 +120,7 @@ EOT new_mode: 0100644, from_id: '357406f3075a57708d0163752905cc1576fceacc', to_id: '8e5177d718c561d36efde08bad36b43687ee6bf0', - raw_chunks: raw_chunks, + raw_chunks: raw_chunks ) ) end diff --git a/spec/lib/gitlab/git/encoding_helper_spec.rb b/spec/lib/gitlab/git/encoding_helper_spec.rb index 83311536893..1a3bf802a07 100644 --- a/spec/lib/gitlab/git/encoding_helper_spec.rb +++ b/spec/lib/gitlab/git/encoding_helper_spec.rb @@ -2,7 +2,7 @@ require "spec_helper" describe Gitlab::Git::EncodingHelper do let(:ext_class) { Class.new { extend Gitlab::Git::EncodingHelper } } - let(:binary_string) { File.join(SEED_REPOSITORY_PATH, 'gitlab_logo.png') } + let(:binary_string) { File.join(SEED_STORAGE_PATH, 'gitlab_logo.png') } describe '#encode!' do [ @@ -19,8 +19,8 @@ describe Gitlab::Git::EncodingHelper do [ 'removes invalid bytes from ASCII-8bit encoded multibyte string. This can occur when a git diff match line truncates in the middle of a multibyte character. This occurs after the second word in this example. The test string is as short as we can get while still triggering the error condition when not looking at `detect[:confidence]`.', "mu ns\xC3\n Lorem ipsum dolor sit amet, consectetur adipisicing ut\xC3\xA0y\xC3\xB9abcd\xC3\xB9efg kia elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non p\n {: .normal_pn}\n \n-Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in\n# *Lorem ipsum\xC3\xB9l\xC3\xB9l\xC3\xA0 dolor\xC3\xB9k\xC3\xB9 sit\xC3\xA8b\xC3\xA8 N\xC3\xA8 amet b\xC3\xA0d\xC3\xAC*\n+# *consectetur\xC3\xB9l\xC3\xB9l\xC3\xA0 adipisicing\xC3\xB9k\xC3\xB9 elit\xC3\xA8b\xC3\xA8 N\xC3\xA8 sed do\xC3\xA0d\xC3\xAC*{: .italic .smcaps}\n \n \xEF\x9B\xA1 eiusmod tempor incididunt, ut\xC3\xAAn\xC3\xB9 labore et dolore. Tw\xC4\x83nj\xC3\xAC magna aliqua. Ut enim ad minim veniam\n {: .normal}\n@@ -9,5 +9,5 @@ quis nostrud\xC3\xAAt\xC3\xB9 exercitiation ullamco laboris m\xC3\xB9s\xC3\xB9k\xC3\xB9abc\xC3\xB9 nisi ".force_encoding('ASCII-8BIT'), - "mu ns\n Lorem ipsum dolor sit amet, consectetur adipisicing ut\xC3\xA0y\xC3\xB9abcd\xC3\xB9efg kia elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non p\n {: .normal_pn}\n \n-Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in\n# *Lorem ipsum\xC3\xB9l\xC3\xB9l\xC3\xA0 dolor\xC3\xB9k\xC3\xB9 sit\xC3\xA8b\xC3\xA8 N\xC3\xA8 amet b\xC3\xA0d\xC3\xAC*\n+# *consectetur\xC3\xB9l\xC3\xB9l\xC3\xA0 adipisicing\xC3\xB9k\xC3\xB9 elit\xC3\xA8b\xC3\xA8 N\xC3\xA8 sed do\xC3\xA0d\xC3\xAC*{: .italic .smcaps}\n \n \xEF\x9B\xA1 eiusmod tempor incididunt, ut\xC3\xAAn\xC3\xB9 labore et dolore. Tw\xC4\x83nj\xC3\xAC magna aliqua. Ut enim ad minim veniam\n {: .normal}\n@@ -9,5 +9,5 @@ quis nostrud\xC3\xAAt\xC3\xB9 exercitiation ullamco laboris m\xC3\xB9s\xC3\xB9k\xC3\xB9abc\xC3\xB9 nisi ", - ], + "mu ns\n Lorem ipsum dolor sit amet, consectetur adipisicing ut\xC3\xA0y\xC3\xB9abcd\xC3\xB9efg kia elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non p\n {: .normal_pn}\n \n-Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in\n# *Lorem ipsum\xC3\xB9l\xC3\xB9l\xC3\xA0 dolor\xC3\xB9k\xC3\xB9 sit\xC3\xA8b\xC3\xA8 N\xC3\xA8 amet b\xC3\xA0d\xC3\xAC*\n+# *consectetur\xC3\xB9l\xC3\xB9l\xC3\xA0 adipisicing\xC3\xB9k\xC3\xB9 elit\xC3\xA8b\xC3\xA8 N\xC3\xA8 sed do\xC3\xA0d\xC3\xAC*{: .italic .smcaps}\n \n \xEF\x9B\xA1 eiusmod tempor incididunt, ut\xC3\xAAn\xC3\xB9 labore et dolore. Tw\xC4\x83nj\xC3\xAC magna aliqua. Ut enim ad minim veniam\n {: .normal}\n@@ -9,5 +9,5 @@ quis nostrud\xC3\xAAt\xC3\xB9 exercitiation ullamco laboris m\xC3\xB9s\xC3\xB9k\xC3\xB9abc\xC3\xB9 nisi " + ] ].each do |description, test_string, xpect| it description do expect(ext_class.encode!(test_string)).to eq(xpect) @@ -37,18 +37,18 @@ describe Gitlab::Git::EncodingHelper do [ "encodes valid utf8 encoded string to utf8", "λ, λ, λ".encode("UTF-8"), - "λ, λ, λ".encode("UTF-8"), + "λ, λ, λ".encode("UTF-8") ], [ "encodes valid ASCII-8BIT encoded string to utf8", "ascii only".encode("ASCII-8BIT"), - "ascii only".encode("UTF-8"), + "ascii only".encode("UTF-8") ], [ "encodes valid ISO-8859-1 encoded string to utf8", "Rüby ist eine Programmiersprache. Wir verlängern den text damit ICU die Sprache erkennen kann.".encode("ISO-8859-1", "UTF-8"), - "Rüby ist eine Programmiersprache. Wir verlängern den text damit ICU die Sprache erkennen kann.".encode("UTF-8"), - ], + "Rüby ist eine Programmiersprache. Wir verlängern den text damit ICU die Sprache erkennen kann.".encode("UTF-8") + ] ].each do |description, test_string, xpect| it description do r = ext_class.encode_utf8(test_string.force_encoding('UTF-8')) @@ -56,6 +56,10 @@ describe Gitlab::Git::EncodingHelper do expect(r.encoding.name).to eq('UTF-8') end end + + it 'returns empty string on conversion errors' do + expect { ext_class.encode_utf8('') }.not_to raise_error(ArgumentError) + end end describe '#clean' do @@ -73,8 +77,8 @@ describe Gitlab::Git::EncodingHelper do [ 'removes invalid bytes from ASCII-8bit encoded multibyte string.', "Lorem ipsum\xC3\n dolor sit amet, xy\xC3\xA0y\xC3\xB9abcd\xC3\xB9efg".force_encoding('ASCII-8BIT'), - "Lorem ipsum\n dolor sit amet, xyàyùabcdùefg", - ], + "Lorem ipsum\n dolor sit amet, xyàyùabcdùefg" + ] ].each do |description, test_string, xpect| it description do expect(ext_class.encode!(test_string)).to eq(xpect) diff --git a/spec/lib/gitlab/git/env_spec.rb b/spec/lib/gitlab/git/env_spec.rb new file mode 100644 index 00000000000..d9df99bfe05 --- /dev/null +++ b/spec/lib/gitlab/git/env_spec.rb @@ -0,0 +1,102 @@ +require 'spec_helper' + +describe Gitlab::Git::Env do + describe "#set" do + context 'with RequestStore.store disabled' do + before do + allow(RequestStore).to receive(:active?).and_return(false) + end + + it 'does not store anything' do + described_class.set(GIT_OBJECT_DIRECTORY: 'foo') + + expect(described_class.all).to be_empty + end + end + + context 'with RequestStore.store enabled' do + before do + allow(RequestStore).to receive(:active?).and_return(true) + end + + it 'whitelist some `GIT_*` variables and stores them using RequestStore' do + described_class.set( + GIT_OBJECT_DIRECTORY: 'foo', + GIT_ALTERNATE_OBJECT_DIRECTORIES: 'bar', + GIT_EXEC_PATH: 'baz', + PATH: '~/.bin:/bin') + + expect(described_class[:GIT_OBJECT_DIRECTORY]).to eq('foo') + expect(described_class[:GIT_ALTERNATE_OBJECT_DIRECTORIES]).to eq('bar') + expect(described_class[:GIT_EXEC_PATH]).to be_nil + expect(described_class[:bar]).to be_nil + end + end + end + + describe "#all" do + context 'with RequestStore.store enabled' do + before do + allow(RequestStore).to receive(:active?).and_return(true) + described_class.set( + GIT_OBJECT_DIRECTORY: 'foo', + GIT_ALTERNATE_OBJECT_DIRECTORIES: 'bar') + end + + it 'returns an env hash' do + expect(described_class.all).to eq({ + 'GIT_OBJECT_DIRECTORY' => 'foo', + 'GIT_ALTERNATE_OBJECT_DIRECTORIES' => 'bar' + }) + end + end + end + + describe "#[]" do + context 'with RequestStore.store enabled' do + before do + allow(RequestStore).to receive(:active?).and_return(true) + end + + before do + described_class.set( + GIT_OBJECT_DIRECTORY: 'foo', + GIT_ALTERNATE_OBJECT_DIRECTORIES: 'bar') + end + + it 'returns a stored value for an existing key' do + expect(described_class[:GIT_OBJECT_DIRECTORY]).to eq('foo') + end + + it 'returns nil for an non-existing key' do + expect(described_class[:foo]).to be_nil + end + end + end + + describe 'thread-safety' do + context 'with RequestStore.store enabled' do + before do + allow(RequestStore).to receive(:active?).and_return(true) + described_class.set(GIT_OBJECT_DIRECTORY: 'foo') + end + + it 'is thread-safe' do + another_thread = Thread.new do + described_class.set(GIT_OBJECT_DIRECTORY: 'bar') + + Thread.stop + described_class[:GIT_OBJECT_DIRECTORY] + end + + # Ensure another_thread runs first + sleep 0.1 until another_thread.stop? + + expect(described_class[:GIT_OBJECT_DIRECTORY]).to eq('foo') + + another_thread.run + expect(another_thread.value).to eq('bar') + end + end + end +end diff --git a/spec/lib/gitlab/git/index_spec.rb b/spec/lib/gitlab/git/index_spec.rb index d0c7ca60ddc..21b71654251 100644 --- a/spec/lib/gitlab/git/index_spec.rb +++ b/spec/lib/gitlab/git/index_spec.rb @@ -1,7 +1,7 @@ require 'spec_helper' describe Gitlab::Git::Index, seed_helper: true do - let(:repository) { Gitlab::Git::Repository.new(TEST_REPO_PATH) } + let(:repository) { Gitlab::Git::Repository.new('default', TEST_REPO_PATH) } let(:index) { described_class.new(repository) } before do @@ -33,7 +33,7 @@ describe Gitlab::Git::Index, seed_helper: true do end it 'raises an error' do - expect { index.create(options) }.to raise_error('Filename already exists') + expect { index.create(options) }.to raise_error('A file with this name already exists') end end @@ -89,7 +89,7 @@ describe Gitlab::Git::Index, seed_helper: true do end it 'raises an error' do - expect { index.create_dir(options) }.to raise_error('Directory already exists as a file') + expect { index.create_dir(options) }.to raise_error('A file with this name already exists') end end @@ -99,7 +99,7 @@ describe Gitlab::Git::Index, seed_helper: true do end it 'raises an error' do - expect { index.create_dir(options) }.to raise_error('Directory already exists') + expect { index.create_dir(options) }.to raise_error('A directory with this name already exists') end end end @@ -118,7 +118,7 @@ describe Gitlab::Git::Index, seed_helper: true do end it 'raises an error' do - expect { index.update(options) }.to raise_error("File doesn't exist") + expect { index.update(options) }.to raise_error("A file with this name doesn't exist") end end @@ -156,7 +156,15 @@ describe Gitlab::Git::Index, seed_helper: true do it 'raises an error' do options[:previous_path] = 'documents/story.txt' - expect { index.move(options) }.to raise_error("File doesn't exist") + expect { index.move(options) }.to raise_error("A file with this name doesn't exist") + end + end + + context 'when a file at the new path already exists' do + it 'raises an error' do + options[:file_path] = 'CHANGELOG' + + expect { index.move(options) }.to raise_error("A file with this name already exists") end end @@ -203,7 +211,7 @@ describe Gitlab::Git::Index, seed_helper: true do end it 'raises an error' do - expect { index.delete(options) }.to raise_error("File doesn't exist") + expect { index.delete(options) }.to raise_error("A file with this name doesn't exist") end end diff --git a/spec/lib/gitlab/git/repository_spec.rb b/spec/lib/gitlab/git/repository_spec.rb index d4b7684adfd..cb107c6d1f9 100644 --- a/spec/lib/gitlab/git/repository_spec.rb +++ b/spec/lib/gitlab/git/repository_spec.rb @@ -3,7 +3,7 @@ require "spec_helper" describe Gitlab::Git::Repository, seed_helper: true do include Gitlab::Git::EncodingHelper - let(:repository) { Gitlab::Git::Repository.new(TEST_REPO_PATH) } + let(:repository) { Gitlab::Git::Repository.new('default', TEST_REPO_PATH) } describe "Respond to" do subject { repository } @@ -14,6 +14,69 @@ describe Gitlab::Git::Repository, seed_helper: true do it { is_expected.to respond_to(:tags) } end + describe '#root_ref' do + context 'with gitaly disabled' do + before { allow(Gitlab::GitalyClient).to receive(:feature_enabled?).and_return(false) } + + it 'calls #discover_default_branch' do + expect(repository).to receive(:discover_default_branch) + repository.root_ref + end + end + + context 'with gitaly enabled' do + before { stub_gitaly } + after { Gitlab::GitalyClient.clear_stubs! } + + it 'gets the branch name from GitalyClient' do + expect_any_instance_of(Gitlab::GitalyClient::Ref).to receive(:default_branch_name) + repository.root_ref + end + + it 'wraps GRPC not found' do + expect_any_instance_of(Gitlab::GitalyClient::Ref).to receive(:default_branch_name). + and_raise(GRPC::NotFound) + expect { repository.root_ref }.to raise_error(Gitlab::Git::Repository::NoRepository) + end + + it 'wraps GRPC exceptions' do + expect_any_instance_of(Gitlab::GitalyClient::Ref).to receive(:default_branch_name). + and_raise(GRPC::Unknown) + expect { repository.root_ref }.to raise_error(Gitlab::Git::CommandError) + end + end + end + + describe "#rugged" do + context 'with no Git env stored' do + before do + expect(Gitlab::Git::Env).to receive(:all).and_return({}) + end + + it "whitelist some variables and pass them via the alternates keyword argument" do + expect(Rugged::Repository).to receive(:new).with(repository.path, alternates: []) + + repository.rugged + end + end + + context 'with some Git env stored' do + before do + expect(Gitlab::Git::Env).to receive(:all).and_return({ + 'GIT_OBJECT_DIRECTORY' => 'foo', + 'GIT_ALTERNATE_OBJECT_DIRECTORIES' => 'bar', + 'GIT_OTHER' => 'another_env' + }) + end + + it "whitelist some variables and pass them via the alternates keyword argument" do + expect(Rugged::Repository).to receive(:new).with(repository.path, alternates: %w[foo bar]) + + repository.rugged + end + end + end + describe "#discover_default_branch" do let(:master) { 'master' } let(:feature) { 'feature' } @@ -55,6 +118,28 @@ describe Gitlab::Git::Repository, seed_helper: true do end it { is_expected.to include("master") } it { is_expected.not_to include("branch-from-space") } + + context 'with gitaly enabled' do + before { stub_gitaly } + after { Gitlab::GitalyClient.clear_stubs! } + + it 'gets the branch names from GitalyClient' do + expect_any_instance_of(Gitlab::GitalyClient::Ref).to receive(:branch_names) + subject + end + + it 'wraps GRPC not found' do + expect_any_instance_of(Gitlab::GitalyClient::Ref).to receive(:branch_names). + and_raise(GRPC::NotFound) + expect { subject }.to raise_error(Gitlab::Git::Repository::NoRepository) + end + + it 'wraps GRPC other exceptions' do + expect_any_instance_of(Gitlab::GitalyClient::Ref).to receive(:branch_names). + and_raise(GRPC::Unknown) + expect { subject }.to raise_error(Gitlab::Git::CommandError) + end + end end describe '#tag_names' do @@ -71,6 +156,28 @@ describe Gitlab::Git::Repository, seed_helper: true do end it { is_expected.to include("v1.0.0") } it { is_expected.not_to include("v5.0.0") } + + context 'with gitaly enabled' do + before { stub_gitaly } + after { Gitlab::GitalyClient.clear_stubs! } + + it 'gets the tag names from GitalyClient' do + expect_any_instance_of(Gitlab::GitalyClient::Ref).to receive(:tag_names) + subject + end + + it 'wraps GRPC not found' do + expect_any_instance_of(Gitlab::GitalyClient::Ref).to receive(:tag_names). + and_raise(GRPC::NotFound) + expect { subject }.to raise_error(Gitlab::Git::Repository::NoRepository) + end + + it 'wraps GRPC exceptions' do + expect_any_instance_of(Gitlab::GitalyClient::Ref).to receive(:tag_names). + and_raise(GRPC::Unknown) + expect { subject }.to raise_error(Gitlab::Git::CommandError) + end + end end shared_examples 'archive check' do |extenstion| @@ -221,7 +328,7 @@ describe Gitlab::Git::Repository, seed_helper: true do end context '#submodules' do - let(:repository) { Gitlab::Git::Repository.new(TEST_REPO_PATH) } + let(:repository) { Gitlab::Git::Repository.new('default', TEST_REPO_PATH) } context 'where repo has submodules' do let(:submodules) { repository.submodules('master') } @@ -290,9 +397,9 @@ describe Gitlab::Git::Repository, seed_helper: true do end describe "#reset" do - change_path = File.join(TEST_NORMAL_REPO_PATH, "CHANGELOG") - untracked_path = File.join(TEST_NORMAL_REPO_PATH, "UNTRACKED") - tracked_path = File.join(TEST_NORMAL_REPO_PATH, "files", "ruby", "popen.rb") + change_path = File.join(SEED_STORAGE_PATH, TEST_NORMAL_REPO_PATH, "CHANGELOG") + untracked_path = File.join(SEED_STORAGE_PATH, TEST_NORMAL_REPO_PATH, "UNTRACKED") + tracked_path = File.join(SEED_STORAGE_PATH, TEST_NORMAL_REPO_PATH, "files", "ruby", "popen.rb") change_text = "New changelog text" untracked_text = "This file is untracked" @@ -311,7 +418,7 @@ describe Gitlab::Git::Repository, seed_helper: true do f.write(untracked_text) end - @normal_repo = Gitlab::Git::Repository.new(TEST_NORMAL_REPO_PATH) + @normal_repo = Gitlab::Git::Repository.new('default', TEST_NORMAL_REPO_PATH) @normal_repo.reset("HEAD", :hard) end @@ -354,7 +461,7 @@ describe Gitlab::Git::Repository, seed_helper: true do context "-b" do before(:all) do - @normal_repo = Gitlab::Git::Repository.new(TEST_NORMAL_REPO_PATH) + @normal_repo = Gitlab::Git::Repository.new('default', TEST_NORMAL_REPO_PATH) @normal_repo.checkout(new_branch, { b: true }, "origin/feature") end @@ -382,7 +489,7 @@ describe Gitlab::Git::Repository, seed_helper: true do context "without -b" do context "and specifying a nonexistent branch" do it "should not do anything" do - normal_repo = Gitlab::Git::Repository.new(TEST_NORMAL_REPO_PATH) + normal_repo = Gitlab::Git::Repository.new('default', TEST_NORMAL_REPO_PATH) expect { normal_repo.checkout(new_branch) }.to raise_error(Rugged::ReferenceError) expect(normal_repo.rugged.branches[new_branch]).to be_nil @@ -402,7 +509,7 @@ describe Gitlab::Git::Repository, seed_helper: true do context "and with a valid branch" do before(:all) do - @normal_repo = Gitlab::Git::Repository.new(TEST_NORMAL_REPO_PATH) + @normal_repo = Gitlab::Git::Repository.new('default', TEST_NORMAL_REPO_PATH) @normal_repo.rugged.branches.create("feature", "origin/feature") @normal_repo.checkout("feature") end @@ -414,13 +521,13 @@ describe Gitlab::Git::Repository, seed_helper: true do end it "should update the working directory" do - File.open(File.join(TEST_NORMAL_REPO_PATH, ".gitignore"), "r") do |f| + File.open(File.join(SEED_STORAGE_PATH, TEST_NORMAL_REPO_PATH, ".gitignore"), "r") do |f| expect(f.read.each_line.to_a).not_to include(".DS_Store\n") end end after(:all) do - FileUtils.rm_rf(TEST_NORMAL_REPO_PATH) + FileUtils.rm_rf(SEED_STORAGE_PATH, TEST_NORMAL_REPO_PATH) ensure_seeds end end @@ -429,7 +536,7 @@ describe Gitlab::Git::Repository, seed_helper: true do describe "#delete_branch" do before(:all) do - @repo = Gitlab::Git::Repository.new(TEST_MUTABLE_REPO_PATH) + @repo = Gitlab::Git::Repository.new('default', TEST_MUTABLE_REPO_PATH) @repo.delete_branch("feature") end @@ -449,7 +556,7 @@ describe Gitlab::Git::Repository, seed_helper: true do describe "#create_branch" do before(:all) do - @repo = Gitlab::Git::Repository.new(TEST_MUTABLE_REPO_PATH) + @repo = Gitlab::Git::Repository.new('default', TEST_MUTABLE_REPO_PATH) end it "should create a new branch" do @@ -496,7 +603,7 @@ describe Gitlab::Git::Repository, seed_helper: true do describe "#remote_delete" do before(:all) do - @repo = Gitlab::Git::Repository.new(TEST_MUTABLE_REPO_PATH) + @repo = Gitlab::Git::Repository.new('default', TEST_MUTABLE_REPO_PATH) @repo.remote_delete("expendable") end @@ -512,7 +619,7 @@ describe Gitlab::Git::Repository, seed_helper: true do describe "#remote_add" do before(:all) do - @repo = Gitlab::Git::Repository.new(TEST_MUTABLE_REPO_PATH) + @repo = Gitlab::Git::Repository.new('default', TEST_MUTABLE_REPO_PATH) @repo.remote_add("new_remote", SeedHelper::GITLAB_GIT_TEST_REPO_URL) end @@ -528,7 +635,7 @@ describe Gitlab::Git::Repository, seed_helper: true do describe "#remote_update" do before(:all) do - @repo = Gitlab::Git::Repository.new(TEST_MUTABLE_REPO_PATH) + @repo = Gitlab::Git::Repository.new('default', TEST_MUTABLE_REPO_PATH) @repo.remote_update("expendable", url: TEST_NORMAL_REPO_PATH) end @@ -551,7 +658,7 @@ describe Gitlab::Git::Repository, seed_helper: true do before(:context) do # Add new commits so that there's a renamed file in the commit history - repo = Gitlab::Git::Repository.new(TEST_REPO_PATH).rugged + repo = Gitlab::Git::Repository.new('default', TEST_REPO_PATH).rugged commit_with_old_name = new_commit_edit_old_file(repo) rename_commit = new_commit_move_file(repo) @@ -560,7 +667,7 @@ describe Gitlab::Git::Repository, seed_helper: true do after(:context) do # Erase our commits so other tests get the original repo - repo = Gitlab::Git::Repository.new(TEST_REPO_PATH).rugged + repo = Gitlab::Git::Repository.new('default', TEST_REPO_PATH).rugged repo.references.update("refs/heads/master", SeedRepo::LastCommit::ID) end @@ -885,7 +992,7 @@ describe Gitlab::Git::Repository, seed_helper: true do describe '#autocrlf' do before(:all) do - @repo = Gitlab::Git::Repository.new(TEST_MUTABLE_REPO_PATH) + @repo = Gitlab::Git::Repository.new('default', TEST_MUTABLE_REPO_PATH) @repo.rugged.config['core.autocrlf'] = true end @@ -900,14 +1007,14 @@ describe Gitlab::Git::Repository, seed_helper: true do describe '#autocrlf=' do before(:all) do - @repo = Gitlab::Git::Repository.new(TEST_MUTABLE_REPO_PATH) + @repo = Gitlab::Git::Repository.new('default', TEST_MUTABLE_REPO_PATH) @repo.rugged.config['core.autocrlf'] = false end it 'should set the autocrlf option to the provided option' do @repo.autocrlf = :input - File.open(File.join(TEST_MUTABLE_REPO_PATH, '.git', 'config')) do |config_file| + File.open(File.join(SEED_STORAGE_PATH, TEST_MUTABLE_REPO_PATH, '.git', 'config')) do |config_file| expect(config_file.read).to match('autocrlf = input') end end @@ -942,12 +1049,65 @@ describe Gitlab::Git::Repository, seed_helper: true do end end + describe '#ref_name_for_sha' do + let(:ref_path) { 'refs/heads' } + let(:sha) { repository.find_branch('master').dereferenced_target.id } + let(:ref_name) { 'refs/heads/master' } + + it 'returns the ref name for the given sha' do + expect(repository.ref_name_for_sha(ref_path, sha)).to eq(ref_name) + end + + it "returns an empty name if the ref doesn't exist" do + expect(repository.ref_name_for_sha(ref_path, "000000")).to eq("") + end + + it "raise an exception if the ref is empty" do + expect { repository.ref_name_for_sha(ref_path, "") }.to raise_error(ArgumentError) + end + + it "raise an exception if the ref is nil" do + expect { repository.ref_name_for_sha(ref_path, nil) }.to raise_error(ArgumentError) + end + end + + describe '#find_commits' do + it 'should return a return a collection of commits' do + commits = repository.find_commits + + expect(commits).not_to be_empty + expect(commits).to all( be_a_kind_of(Gitlab::Git::Commit) ) + end + + context 'while applying a sort order based on the `order` option' do + it "allows ordering topologically (no parents shown before their children)" do + expect_any_instance_of(Rugged::Walker).to receive(:sorting).with(Rugged::SORT_TOPO) + + repository.find_commits(order: :topo) + end + + it "allows ordering by date" do + expect_any_instance_of(Rugged::Walker).to receive(:sorting).with(Rugged::SORT_DATE | Rugged::SORT_TOPO) + + repository.find_commits(order: :date) + end + + it "applies no sorting by default" do + expect_any_instance_of(Rugged::Walker).to receive(:sorting).with(Rugged::SORT_NONE) + + repository.find_commits + end + end + end + describe '#branches with deleted branch' do before(:each) do ref = double() allow(ref).to receive(:name) { 'bad-branch' } allow(ref).to receive(:target) { raise Rugged::ReferenceError } - allow(repository.rugged).to receive(:branches) { [ref] } + branches = double() + allow(branches).to receive(:each) { [ref].each } + allow(repository.rugged).to receive(:branches) { branches } end it 'should return empty branches' do @@ -956,20 +1116,8 @@ describe Gitlab::Git::Repository, seed_helper: true do end describe '#branch_count' do - before(:each) do - valid_ref = double(:ref) - invalid_ref = double(:ref) - - allow(valid_ref).to receive_messages(name: 'master', target: double(:target)) - - allow(invalid_ref).to receive_messages(name: 'bad-branch') - allow(invalid_ref).to receive(:target) { raise Rugged::ReferenceError } - - allow(repository.rugged).to receive_messages(branches: [valid_ref, invalid_ref]) - end - it 'returns the number of branches' do - expect(repository.branch_count).to eq(1) + expect(repository.branch_count).to eq(9) end end @@ -999,7 +1147,7 @@ describe Gitlab::Git::Repository, seed_helper: true do end describe "#copy_gitattributes" do - let(:attributes_path) { File.join(TEST_REPO_PATH, 'info/attributes') } + let(:attributes_path) { File.join(SEED_STORAGE_PATH, TEST_REPO_PATH, 'info/attributes') } it "raises an error with invalid ref" do expect { repository.copy_gitattributes("invalid") }.to raise_error(Gitlab::Git::Repository::InvalidRef) @@ -1075,7 +1223,7 @@ describe Gitlab::Git::Repository, seed_helper: true do end describe '#diffable' do - info_dir_path = attributes_path = File.join(TEST_REPO_PATH, 'info') + info_dir_path = attributes_path = File.join(SEED_STORAGE_PATH, TEST_REPO_PATH, 'info') attributes_path = File.join(info_dir_path, 'attributes') before(:all) do @@ -1143,7 +1291,7 @@ describe Gitlab::Git::Repository, seed_helper: true do describe '#local_branches' do before(:all) do - @repo = Gitlab::Git::Repository.new(TEST_MUTABLE_REPO_PATH) + @repo = Gitlab::Git::Repository.new('default', File.join(TEST_MUTABLE_REPO_PATH, '.git')) end after(:all) do @@ -1158,6 +1306,29 @@ describe Gitlab::Git::Repository, seed_helper: true do expect(@repo.local_branches.any? { |branch| branch.name == 'remote_branch' }).to eq(false) expect(@repo.local_branches.any? { |branch| branch.name == 'local_branch' }).to eq(true) end + + context 'with gitaly enabled' do + before { stub_gitaly } + after { Gitlab::GitalyClient.clear_stubs! } + + it 'gets the branches from GitalyClient' do + expect_any_instance_of(Gitlab::GitalyClient::Ref).to receive(:local_branches). + and_return([]) + @repo.local_branches + end + + it 'wraps GRPC not found' do + expect_any_instance_of(Gitlab::GitalyClient::Ref).to receive(:local_branches). + and_raise(GRPC::NotFound) + expect { @repo.local_branches }.to raise_error(Gitlab::Git::Repository::NoRepository) + end + + it 'wraps GRPC exceptions' do + expect_any_instance_of(Gitlab::GitalyClient::Ref).to receive(:local_branches). + and_raise(GRPC::Unknown) + expect { @repo.local_branches }.to raise_error(Gitlab::Git::CommandError) + end + end end def create_remote_branch(remote_name, branch_name, source_branch_name) @@ -1235,4 +1406,11 @@ describe Gitlab::Git::Repository, seed_helper: true do sha = Rugged::Commit.create(repo, options) repo.lookup(sha) end + + def stub_gitaly + allow(Gitlab::GitalyClient).to receive(:feature_enabled?).and_return(true) + + stub = double(:stub) + allow(Gitaly::Ref::Stub).to receive(:new).and_return(stub) + end end diff --git a/spec/lib/gitlab/git/rev_list_spec.rb b/spec/lib/gitlab/git/rev_list_spec.rb index d48629a296d..78894ba9409 100644 --- a/spec/lib/gitlab/git/rev_list_spec.rb +++ b/spec/lib/gitlab/git/rev_list_spec.rb @@ -3,58 +3,54 @@ require 'spec_helper' describe Gitlab::Git::RevList, lib: true do let(:project) { create(:project, :repository) } - context "validations" do - described_class::ALLOWED_VARIABLES.each do |var| - context var do - it "accepts values starting with the project repo path" do - env = { var => "#{project.repository.path_to_repo}/objects" } - rev_list = described_class.new('oldrev', 'newrev', project: project, env: env) - - expect(rev_list).to be_valid - end - - it "rejects values starting not with the project repo path" do - env = { var => "/some/other/path" } - rev_list = described_class.new('oldrev', 'newrev', project: project, env: env) - - expect(rev_list).not_to be_valid - end - - it "rejects values containing the project repo path but not starting with it" do - env = { var => "/some/other/path/#{project.repository.path_to_repo}" } - rev_list = described_class.new('oldrev', 'newrev', project: project, env: env) - - expect(rev_list).not_to be_valid - end - - it "ignores nil values" do - env = { var => nil } - rev_list = described_class.new('oldrev', 'newrev', project: project, env: env) - - expect(rev_list).to be_valid - end - end - end + before do + expect(Gitlab::Git::Env).to receive(:all).and_return({ + GIT_OBJECT_DIRECTORY: 'foo', + GIT_ALTERNATE_OBJECT_DIRECTORIES: 'bar' + }) end - context "#execute" do - let(:env) { { "GIT_OBJECT_DIRECTORY" => project.repository.path_to_repo } } - let(:rev_list) { Gitlab::Git::RevList.new('oldrev', 'newrev', project: project, env: env) } - - it "calls out to `popen` without environment variables if the record is invalid" do - allow(rev_list).to receive(:valid?).and_return(false) - - expect(Open3).to receive(:popen3).with(hash_excluding(env), any_args) - - rev_list.execute + context "#new_refs" do + let(:rev_list) { Gitlab::Git::RevList.new(newrev: 'newrev', path_to_repo: project.repository.path_to_repo) } + + it 'calls out to `popen`' do + expect(Gitlab::Popen).to receive(:popen).with([ + Gitlab.config.git.bin_path, + "--git-dir=#{project.repository.path_to_repo}", + 'rev-list', + 'newrev', + '--not', + '--all' + ], + nil, + { + 'GIT_OBJECT_DIRECTORY' => 'foo', + 'GIT_ALTERNATE_OBJECT_DIRECTORIES' => 'bar' + }).and_return(["sha1\nsha2", 0]) + + expect(rev_list.new_refs).to eq(%w[sha1 sha2]) end + end - it "calls out to `popen` with environment variables if the record is valid" do - allow(rev_list).to receive(:valid?).and_return(true) - - expect(Open3).to receive(:popen3).with(hash_including(env), any_args) - - rev_list.execute + context "#missed_ref" do + let(:rev_list) { Gitlab::Git::RevList.new(oldrev: 'oldrev', newrev: 'newrev', path_to_repo: project.repository.path_to_repo) } + + it 'calls out to `popen`' do + expect(Gitlab::Popen).to receive(:popen).with([ + Gitlab.config.git.bin_path, + "--git-dir=#{project.repository.path_to_repo}", + 'rev-list', + '--max-count=1', + 'oldrev', + '^newrev' + ], + nil, + { + 'GIT_OBJECT_DIRECTORY' => 'foo', + 'GIT_ALTERNATE_OBJECT_DIRECTORIES' => 'bar' + }).and_return(["sha1\nsha2", 0]) + + expect(rev_list.missed_ref).to eq(%w[sha1 sha2]) end end end diff --git a/spec/lib/gitlab/git/tag_spec.rb b/spec/lib/gitlab/git/tag_spec.rb index ad469e94735..67a9c974298 100644 --- a/spec/lib/gitlab/git/tag_spec.rb +++ b/spec/lib/gitlab/git/tag_spec.rb @@ -1,7 +1,7 @@ require "spec_helper" describe Gitlab::Git::Tag, seed_helper: true do - let(:repository) { Gitlab::Git::Repository.new(TEST_REPO_PATH) } + let(:repository) { Gitlab::Git::Repository.new('default', TEST_REPO_PATH) } describe 'first tag' do let(:tag) { repository.tags.first } diff --git a/spec/lib/gitlab/git/tree_spec.rb b/spec/lib/gitlab/git/tree_spec.rb index 83d2ff8f9b3..4b76a43e6b5 100644 --- a/spec/lib/gitlab/git/tree_spec.rb +++ b/spec/lib/gitlab/git/tree_spec.rb @@ -2,7 +2,7 @@ require "spec_helper" describe Gitlab::Git::Tree, seed_helper: true do context :repo do - let(:repository) { Gitlab::Git::Repository.new(TEST_REPO_PATH) } + let(:repository) { Gitlab::Git::Repository.new('default', TEST_REPO_PATH) } let(:tree) { Gitlab::Git::Tree.where(repository, SeedRepo::Commit::ID) } it { expect(tree).to be_kind_of Array } @@ -19,6 +19,7 @@ describe Gitlab::Git::Tree, seed_helper: true do it { expect(dir.commit_id).to eq(SeedRepo::Commit::ID) } it { expect(dir.name).to eq('encoding') } it { expect(dir.path).to eq('encoding') } + it { expect(dir.mode).to eq('40000') } context :subdir do let(:subdir) { Gitlab::Git::Tree.where(repository, SeedRepo::Commit::ID, 'files').first } diff --git a/spec/lib/gitlab/git/util_spec.rb b/spec/lib/gitlab/git/util_spec.rb index bcca4d4c746..88c871855df 100644 --- a/spec/lib/gitlab/git/util_spec.rb +++ b/spec/lib/gitlab/git/util_spec.rb @@ -6,10 +6,10 @@ describe Gitlab::Git::Util do ["", 0], ["foo", 1], ["foo\n", 1], - ["foo\n\n", 2], + ["foo\n\n", 2] ].each do |string, line_count| it "counts #{line_count} lines in #{string.inspect}" do - expect(Gitlab::Git::Util.count_lines(string)).to eq(line_count) + expect(described_class.count_lines(string)).to eq(line_count) end end end diff --git a/spec/lib/gitlab/git_access_spec.rb b/spec/lib/gitlab/git_access_spec.rb index 48f7754bed8..25769977f24 100644 --- a/spec/lib/gitlab/git_access_spec.rb +++ b/spec/lib/gitlab/git_access_spec.rb @@ -1,7 +1,7 @@ require 'spec_helper' describe Gitlab::GitAccess, lib: true do - let(:access) { Gitlab::GitAccess.new(actor, project, 'web', authentication_abilities: authentication_abilities) } + let(:access) { Gitlab::GitAccess.new(actor, project, 'ssh', authentication_abilities: authentication_abilities) } let(:project) { create(:project, :repository) } let(:user) { create(:user) } let(:actor) { user } @@ -183,7 +183,7 @@ describe Gitlab::GitAccess, lib: true do describe '#check_push_access!' do before { merge_into_protected_branch } - let(:unprotected_branch) { FFaker::Internet.user_name } + let(:unprotected_branch) { 'unprotected_branch' } let(:changes) do { push_new_branch: "#{Gitlab::Git::BLANK_SHA} 570e7b2ab refs/heads/wow", @@ -211,9 +211,9 @@ describe Gitlab::GitAccess, lib: true do target_branch = project.repository.lookup('feature') source_branch = project.repository.create_file( user, - FFaker::InternetSE.login_user_name, - FFaker::HipsterIpsum.paragraph, - message: FFaker::HipsterIpsum.sentence, + 'filename', + 'This is the file content', + message: 'This is a good commit message', branch_name: unprotected_branch) rugged = project.repository.rugged author = { email: "email@example.com", time: Time.now, name: "Example Git User" } diff --git a/spec/lib/git_ref_validator_spec.rb b/spec/lib/gitlab/git_ref_validator_spec.rb index cc8daa535d6..cc8daa535d6 100644 --- a/spec/lib/git_ref_validator_spec.rb +++ b/spec/lib/gitlab/git_ref_validator_spec.rb diff --git a/spec/lib/gitlab/git_spec.rb b/spec/lib/gitlab/git_spec.rb index 8eaf7aac264..36f0e6507c8 100644 --- a/spec/lib/gitlab/git_spec.rb +++ b/spec/lib/gitlab/git_spec.rb @@ -1,21 +1,8 @@ require 'spec_helper' describe Gitlab::Git, lib: true do - let(:committer_email) { FFaker::Internet.email } - - # I have to remove periods from the end of the name - # This happened when the user's name had a suffix (i.e. "Sr.") - # This seems to be what git does under the hood. For example, this commit: - # - # $ git commit --author='Foo Sr. <foo@example.com>' -m 'Where's my trailing period?' - # - # results in this: - # - # $ git show --pretty - # ... - # Author: Foo Sr <foo@example.com> - # ... - let(:committer_name) { FFaker::Name.name.chomp("\.") } + let(:committer_email) { 'user@example.org' } + let(:committer_name) { 'John Doe' } describe 'committer_hash' do it "returns a hash containing the given email and name" do diff --git a/spec/lib/gitlab/gitaly_client/commit_spec.rb b/spec/lib/gitlab/gitaly_client/commit_spec.rb index 4684b1d1ac0..cf1bc74779e 100644 --- a/spec/lib/gitlab/gitaly_client/commit_spec.rb +++ b/spec/lib/gitlab/gitaly_client/commit_spec.rb @@ -1,28 +1,24 @@ require 'spec_helper' describe Gitlab::GitalyClient::Commit do - describe '.diff_from_parent' do - let(:diff_stub) { double('Gitaly::Diff::Stub') } - let(:project) { create(:project, :repository) } - let(:repository_message) { Gitaly::Repository.new(path: project.repository.path) } - let(:commit) { project.commit('913c66a37b4a45b9769037c55c2d238bd0942d2e') } - - before do - allow(Gitaly::Diff::Stub).to receive(:new).and_return(diff_stub) - allow(diff_stub).to receive(:commit_diff).and_return([]) - end + let(:diff_stub) { double('Gitaly::Diff::Stub') } + let(:project) { create(:project, :repository) } + let(:repository) { project.repository } + let(:repository_message) { repository.gitaly_repository } + let(:commit) { project.commit('913c66a37b4a45b9769037c55c2d238bd0942d2e') } + describe '#diff_from_parent' do context 'when a commit has a parent' do it 'sends an RPC request with the parent ID as left commit' do request = Gitaly::CommitDiffRequest.new( repository: repository_message, left_commit_id: 'cfe32cf61b73a0d5e9f13e774abde7ff789b1660', - right_commit_id: commit.id, + right_commit_id: commit.id ) - expect(diff_stub).to receive(:commit_diff).with(request) + expect_any_instance_of(Gitaly::Diff::Stub).to receive(:commit_diff).with(request) - described_class.diff_from_parent(commit) + described_class.new(repository).diff_from_parent(commit) end end @@ -32,17 +28,17 @@ describe Gitlab::GitalyClient::Commit do request = Gitaly::CommitDiffRequest.new( repository: repository_message, left_commit_id: '4b825dc642cb6eb9a060e54bf8d69288fbee4904', - right_commit_id: initial_commit.id, + right_commit_id: initial_commit.id ) - expect(diff_stub).to receive(:commit_diff).with(request) + expect_any_instance_of(Gitaly::Diff::Stub).to receive(:commit_diff).with(request) - described_class.diff_from_parent(initial_commit) + described_class.new(repository).diff_from_parent(initial_commit) end end it 'returns a Gitlab::Git::DiffCollection' do - ret = described_class.diff_from_parent(commit) + ret = described_class.new(repository).diff_from_parent(commit) expect(ret).to be_kind_of(Gitlab::Git::DiffCollection) end @@ -50,9 +46,40 @@ describe Gitlab::GitalyClient::Commit do it 'passes options to Gitlab::Git::DiffCollection' do options = { max_files: 31, max_lines: 13 } - expect(Gitlab::Git::DiffCollection).to receive(:new).with([], options) + expect(Gitlab::Git::DiffCollection).to receive(:new).with(kind_of(Enumerable), options) + + described_class.new(repository).diff_from_parent(commit, options) + end + end + + describe '#commit_deltas' do + context 'when a commit has a parent' do + it 'sends an RPC request with the parent ID as left commit' do + request = Gitaly::CommitDeltaRequest.new( + repository: repository_message, + left_commit_id: 'cfe32cf61b73a0d5e9f13e774abde7ff789b1660', + right_commit_id: commit.id + ) + + expect_any_instance_of(Gitaly::Diff::Stub).to receive(:commit_delta).with(request).and_return([]) - described_class.diff_from_parent(commit, options) + described_class.new(repository).commit_deltas(commit) + end + end + + context 'when a commit does not have a parent' do + it 'sends an RPC request with empty tree ref as left commit' do + initial_commit = project.commit('1a0b36b3cdad1d2ee32457c102a8c0b7056fa863') + request = Gitaly::CommitDeltaRequest.new( + repository: repository_message, + left_commit_id: '4b825dc642cb6eb9a060e54bf8d69288fbee4904', + right_commit_id: initial_commit.id + ) + + expect_any_instance_of(Gitaly::Diff::Stub).to receive(:commit_delta).with(request).and_return([]) + + described_class.new(repository).commit_deltas(initial_commit) + end end end end diff --git a/spec/lib/gitlab/gitaly_client/notifications_spec.rb b/spec/lib/gitlab/gitaly_client/notifications_spec.rb index bb5d93994ad..b87dacb175b 100644 --- a/spec/lib/gitlab/gitaly_client/notifications_spec.rb +++ b/spec/lib/gitlab/gitaly_client/notifications_spec.rb @@ -2,12 +2,15 @@ require 'spec_helper' describe Gitlab::GitalyClient::Notifications do describe '#post_receive' do + let(:project) { create(:empty_project) } + let(:repo_path) { project.repository.path_to_repo } + subject { described_class.new(project.repository) } + it 'sends a post_receive message' do - repo_path = create(:empty_project).repository.path_to_repo expect_any_instance_of(Gitaly::Notifications::Stub). - to receive(:post_receive).with(post_receive_request_with_repo_path(repo_path)) + to receive(:post_receive).with(gitaly_request_with_repo_path(repo_path)) - described_class.new(repo_path).post_receive + subject.post_receive end end end diff --git a/spec/lib/gitlab/gitaly_client/ref_spec.rb b/spec/lib/gitlab/gitaly_client/ref_spec.rb new file mode 100644 index 00000000000..d8cd2dcbd2a --- /dev/null +++ b/spec/lib/gitlab/gitaly_client/ref_spec.rb @@ -0,0 +1,71 @@ +require 'spec_helper' + +describe Gitlab::GitalyClient::Ref do + let(:project) { create(:empty_project) } + let(:repo_path) { project.repository.path_to_repo } + let(:client) { described_class.new(project.repository) } + + before do + allow(Gitlab.config.gitaly).to receive(:enabled).and_return(true) + end + + after do + # When we say `expect_any_instance_of(Gitaly::Ref::Stub)` a double is created, + # and because GitalyClient shares stubs these will get passed from example to + # example, which will cause an error, so we clean the stubs after each example. + Gitlab::GitalyClient.clear_stubs! + end + + describe '#branch_names' do + it 'sends a find_all_branch_names message' do + expect_any_instance_of(Gitaly::Ref::Stub). + to receive(:find_all_branch_names).with(gitaly_request_with_repo_path(repo_path)). + and_return([]) + + client.branch_names + end + end + + describe '#tag_names' do + it 'sends a find_all_tag_names message' do + expect_any_instance_of(Gitaly::Ref::Stub). + to receive(:find_all_tag_names).with(gitaly_request_with_repo_path(repo_path)). + and_return([]) + + client.tag_names + end + end + + describe '#default_branch_name' do + it 'sends a find_default_branch_name message' do + expect_any_instance_of(Gitaly::Ref::Stub). + to receive(:find_default_branch_name).with(gitaly_request_with_repo_path(repo_path)). + and_return(double(name: 'foo')) + + client.default_branch_name + end + end + + describe '#local_branches' do + it 'sends a find_local_branches message' do + expect_any_instance_of(Gitaly::Ref::Stub). + to receive(:find_local_branches).with(gitaly_request_with_repo_path(repo_path)). + and_return([]) + + client.local_branches + end + + it 'parses and sends the sort parameter' do + expect_any_instance_of(Gitaly::Ref::Stub). + to receive(:find_local_branches). + with(gitaly_request_with_params(sort_by: :UPDATED_DESC)). + and_return([]) + + client.local_branches(sort_by: 'updated_desc') + end + + it 'raises an argument error if an invalid sort_by parameter is passed' do + expect { client.local_branches(sort_by: 'invalid_sort') }.to raise_error(ArgumentError) + end + end +end diff --git a/spec/lib/gitlab/gitaly_client_spec.rb b/spec/lib/gitlab/gitaly_client_spec.rb new file mode 100644 index 00000000000..08ee0dff6b2 --- /dev/null +++ b/spec/lib/gitlab/gitaly_client_spec.rb @@ -0,0 +1,35 @@ +require 'spec_helper' + +describe Gitlab::GitalyClient, lib: true do + describe '.stub' do + before { described_class.clear_stubs! } + + context 'when passed a UNIX socket address' do + it 'passes the address as-is to GRPC' do + address = 'unix:/tmp/gitaly.sock' + allow(Gitlab.config.repositories).to receive(:storages).and_return({ + 'default' => { 'gitaly_address' => address } + }) + + expect(Gitaly::Commit::Stub).to receive(:new).with(address, any_args) + + described_class.stub(:commit, 'default') + end + end + + context 'when passed a TCP address' do + it 'strips tcp:// prefix before passing it to GRPC::Core::Channel initializer' do + address = 'localhost:9876' + prefixed_address = "tcp://#{address}" + + allow(Gitlab.config.repositories).to receive(:storages).and_return({ + 'default' => { 'gitaly_address' => prefixed_address } + }) + + expect(Gitaly::Commit::Stub).to receive(:new).with(address, any_args) + + described_class.stub(:commit, 'default') + end + end + end +end diff --git a/spec/lib/gitlab/github_import/importer_spec.rb b/spec/lib/gitlab/github_import/importer_spec.rb index 8b867fbe322..9d5e20841b5 100644 --- a/spec/lib/gitlab/github_import/importer_spec.rb +++ b/spec/lib/gitlab/github_import/importer_spec.rb @@ -215,9 +215,9 @@ describe Gitlab::GithubImport::Importer, lib: true do let(:updated_at) { DateTime.strptime('2011-01-27T19:01:12Z') } let(:repository) { double(id: 1, fork: false) } let(:source_sha) { create(:commit, project: project).id } - let(:source_branch) { double(ref: 'branch-merged', repo: repository, sha: source_sha) } + let(:source_branch) { double(ref: 'branch-merged', repo: repository, sha: source_sha, user: octocat) } let(:target_sha) { create(:commit, project: project, git_commit: RepoHelpers.another_sample_commit).id } - let(:target_branch) { double(ref: 'master', repo: repository, sha: target_sha) } + let(:target_branch) { double(ref: 'master', repo: repository, sha: target_sha, user: octocat) } let(:pull_request) do double( number: 1347, diff --git a/spec/lib/gitlab/github_import/issue_formatter_spec.rb b/spec/lib/gitlab/github_import/issue_formatter_spec.rb index f34d09f2c1d..a4089592cf2 100644 --- a/spec/lib/gitlab/github_import/issue_formatter_spec.rb +++ b/spec/lib/gitlab/github_import/issue_formatter_spec.rb @@ -43,7 +43,7 @@ describe Gitlab::GithubImport::IssueFormatter, lib: true do description: "*Created by: octocat*\n\nI'm having a problem with this.", state: 'opened', author_id: project.creator_id, - assignee_id: nil, + assignee_ids: [], created_at: created_at, updated_at: updated_at } @@ -64,7 +64,7 @@ describe Gitlab::GithubImport::IssueFormatter, lib: true do description: "*Created by: octocat*\n\nI'm having a problem with this.", state: 'closed', author_id: project.creator_id, - assignee_id: nil, + assignee_ids: [], created_at: created_at, updated_at: updated_at } @@ -77,19 +77,19 @@ describe Gitlab::GithubImport::IssueFormatter, lib: true do let(:raw_data) { double(base_data.merge(assignee: octocat)) } it 'returns nil as assignee_id when is not a GitLab user' do - expect(issue.attributes.fetch(:assignee_id)).to be_nil + expect(issue.attributes.fetch(:assignee_ids)).to be_empty end it 'returns GitLab user id associated with GitHub id as assignee_id' do gl_user = create(:omniauth_user, extern_uid: octocat.id, provider: 'github') - expect(issue.attributes.fetch(:assignee_id)).to eq gl_user.id + expect(issue.attributes.fetch(:assignee_ids)).to eq [gl_user.id] end it 'returns GitLab user id associated with GitHub email as assignee_id' do gl_user = create(:user, email: octocat.email) - expect(issue.attributes.fetch(:assignee_id)).to eq gl_user.id + expect(issue.attributes.fetch(:assignee_ids)).to eq [gl_user.id] end end diff --git a/spec/lib/gitlab/github_import/pull_request_formatter_spec.rb b/spec/lib/gitlab/github_import/pull_request_formatter_spec.rb index 44423917944..b7c59918a76 100644 --- a/spec/lib/gitlab/github_import/pull_request_formatter_spec.rb +++ b/spec/lib/gitlab/github_import/pull_request_formatter_spec.rb @@ -4,15 +4,18 @@ describe Gitlab::GithubImport::PullRequestFormatter, lib: true do let(:client) { double } let(:project) { create(:project, :repository) } let(:source_sha) { create(:commit, project: project).id } - let(:target_sha) { create(:commit, project: project, git_commit: RepoHelpers.another_sample_commit).id } + let(:target_commit) { create(:commit, project: project, git_commit: RepoHelpers.another_sample_commit) } + let(:target_sha) { target_commit.id } + let(:target_short_sha) { target_commit.id.to_s[0..7] } let(:repository) { double(id: 1, fork: false) } let(:source_repo) { repository } let(:source_branch) { double(ref: 'branch-merged', repo: source_repo, sha: source_sha) } let(:forked_source_repo) { double(id: 2, fork: true, name: 'otherproject', full_name: 'company/otherproject') } let(:target_repo) { repository } - let(:target_branch) { double(ref: 'master', repo: target_repo, sha: target_sha) } - let(:removed_branch) { double(ref: 'removed-branch', repo: source_repo, sha: '2e5d3239642f9161dcbbc4b70a211a68e5e45e2b') } - let(:forked_branch) { double(ref: 'master', repo: forked_source_repo, sha: '2e5d3239642f9161dcbbc4b70a211a68e5e45e2b') } + let(:target_branch) { double(ref: 'master', repo: target_repo, sha: target_sha, user: octocat) } + let(:removed_branch) { double(ref: 'removed-branch', repo: source_repo, sha: '2e5d3239642f9161dcbbc4b70a211a68e5e45e2b', user: octocat) } + let(:forked_branch) { double(ref: 'master', repo: forked_source_repo, sha: '2e5d3239642f9161dcbbc4b70a211a68e5e45e2b', user: octocat) } + let(:branch_deleted_repo) { double(ref: 'master', repo: nil, sha: '2e5d3239642f9161dcbbc4b70a211a68e5e45e2b', user: octocat) } let(:octocat) { double(id: 123456, login: 'octocat', email: 'octocat@example.com') } let(:created_at) { DateTime.strptime('2011-01-26T19:01:12Z') } let(:updated_at) { DateTime.strptime('2011-01-27T19:01:12Z') } @@ -61,7 +64,8 @@ describe Gitlab::GithubImport::PullRequestFormatter, lib: true do author_id: project.creator_id, assignee_id: nil, created_at: created_at, - updated_at: updated_at + updated_at: updated_at, + imported: true } expect(pull_request.attributes).to eq(expected) @@ -87,7 +91,8 @@ describe Gitlab::GithubImport::PullRequestFormatter, lib: true do author_id: project.creator_id, assignee_id: nil, created_at: created_at, - updated_at: updated_at + updated_at: updated_at, + imported: true } expect(pull_request.attributes).to eq(expected) @@ -114,7 +119,8 @@ describe Gitlab::GithubImport::PullRequestFormatter, lib: true do author_id: project.creator_id, assignee_id: nil, created_at: created_at, - updated_at: updated_at + updated_at: updated_at, + imported: true } expect(pull_request.attributes).to eq(expected) @@ -203,16 +209,24 @@ describe Gitlab::GithubImport::PullRequestFormatter, lib: true do context 'when source branch does not exist' do let(:raw_data) { double(base_data.merge(head: removed_branch)) } - it 'prefixes branch name with pull request number' do - expect(pull_request.source_branch_name).to eq 'pull/1347/removed-branch' + it 'prefixes branch name with gh-:short_sha/:number/:user pattern to avoid collision' do + expect(pull_request.source_branch_name).to eq "gh-#{target_short_sha}/1347/octocat/removed-branch" end end context 'when source branch is from a fork' do let(:raw_data) { double(base_data.merge(head: forked_branch)) } - it 'prefixes branch name with pull request number and project with namespace to avoid collision' do - expect(pull_request.source_branch_name).to eq 'pull/1347/company/otherproject/master' + it 'prefixes branch name with gh-:short_sha/:number/:user pattern to avoid collision' do + expect(pull_request.source_branch_name).to eq "gh-#{target_short_sha}/1347/octocat/master" + end + end + + context 'when source branch is from a deleted fork' do + let(:raw_data) { double(base_data.merge(head: branch_deleted_repo)) } + + it 'prefixes branch name with gh-:short_sha/:number/:user pattern to avoid collision' do + expect(pull_request.source_branch_name).to eq "gh-#{target_short_sha}/1347/octocat/master" end end end @@ -229,8 +243,8 @@ describe Gitlab::GithubImport::PullRequestFormatter, lib: true do context 'when target branch does not exist' do let(:raw_data) { double(base_data.merge(base: removed_branch)) } - it 'prefixes branch name with pull request number' do - expect(pull_request.target_branch_name).to eq 'pull/1347/removed-branch' + it 'prefixes branch name with gh-:short_sha/:number/:user pattern to avoid collision' do + expect(pull_request.target_branch_name).to eq 'gl-2e5d3239/1347/octocat/removed-branch' end end end @@ -290,6 +304,14 @@ describe Gitlab::GithubImport::PullRequestFormatter, lib: true do end end + context 'when source repository does not exist anymore' do + let(:raw_data) { double(base_data.merge(head: branch_deleted_repo)) } + + it 'returns true' do + expect(pull_request.cross_project?).to eq true + end + end + context 'when source and target repositories are the same' do let(:raw_data) { double(base_data.merge(head: source_branch)) } @@ -299,6 +321,14 @@ describe Gitlab::GithubImport::PullRequestFormatter, lib: true do end end + describe '#source_branch_exists?' do + let(:raw_data) { double(base_data.merge(head: forked_branch)) } + + it 'returns false when is a cross_project' do + expect(pull_request.source_branch_exists?).to eq false + end + end + describe '#url' do let(:raw_data) { double(base_data) } diff --git a/spec/lib/gitlab/gl_repository_spec.rb b/spec/lib/gitlab/gl_repository_spec.rb new file mode 100644 index 00000000000..ac3558ab386 --- /dev/null +++ b/spec/lib/gitlab/gl_repository_spec.rb @@ -0,0 +1,19 @@ +require 'spec_helper' + +describe ::Gitlab::GlRepository do + describe '.parse' do + set(:project) { create(:project) } + + it 'parses a project gl_repository' do + expect(described_class.parse("project-#{project.id}")).to eq([project, false]) + end + + it 'parses a wiki gl_repository' do + expect(described_class.parse("wiki-#{project.id}")).to eq([project, true]) + end + + it 'throws an argument error on an invalid gl_repository' do + expect { described_class.parse("badformat-#{project.id}") }.to raise_error(ArgumentError) + end + end +end diff --git a/spec/lib/gitlab/google_code_import/importer_spec.rb b/spec/lib/gitlab/google_code_import/importer_spec.rb index ccaa88a5c79..622a0f513f4 100644 --- a/spec/lib/gitlab/google_code_import/importer_spec.rb +++ b/spec/lib/gitlab/google_code_import/importer_spec.rb @@ -49,7 +49,7 @@ describe Gitlab::GoogleCodeImport::Importer, lib: true do expect(issue).not_to be_nil expect(issue.iid).to eq(169) expect(issue.author).to eq(project.creator) - expect(issue.assignee).to eq(mapped_user) + expect(issue.assignees).to eq([mapped_user]) expect(issue.state).to eq("closed") expect(issue.label_names).to include("Priority: Medium") expect(issue.label_names).to include("Status: Fixed") diff --git a/spec/lib/gitlab/health_checks/db_check_spec.rb b/spec/lib/gitlab/health_checks/db_check_spec.rb new file mode 100644 index 00000000000..33c6c24449c --- /dev/null +++ b/spec/lib/gitlab/health_checks/db_check_spec.rb @@ -0,0 +1,6 @@ +require 'spec_helper' +require_relative './simple_check_shared' + +describe Gitlab::HealthChecks::DbCheck do + include_examples 'simple_check', 'db_ping', 'Db', '1' +end diff --git a/spec/lib/gitlab/health_checks/fs_shards_check_spec.rb b/spec/lib/gitlab/health_checks/fs_shards_check_spec.rb new file mode 100644 index 00000000000..45ccd3d6459 --- /dev/null +++ b/spec/lib/gitlab/health_checks/fs_shards_check_spec.rb @@ -0,0 +1,127 @@ +require 'spec_helper' + +describe Gitlab::HealthChecks::FsShardsCheck do + let(:metric_class) { Gitlab::HealthChecks::Metric } + let(:result_class) { Gitlab::HealthChecks::Result } + let(:repository_storages) { [:default] } + let(:tmp_dir) { Dir.mktmpdir } + + let(:storages_paths) do + { + default: { path: tmp_dir } + }.with_indifferent_access + end + + before do + allow(described_class).to receive(:repository_storages) { repository_storages } + allow(described_class).to receive(:storages_paths) { storages_paths } + end + + after do + FileUtils.remove_entry_secure(tmp_dir) if Dir.exist?(tmp_dir) + end + + shared_examples 'filesystem checks' do + describe '#readiness' do + subject { described_class.readiness } + + context 'storage points to not existing folder' do + let(:storages_paths) do + { + default: { path: 'tmp/this/path/doesnt/exist' } + }.with_indifferent_access + end + + it { is_expected.to include(result_class.new(false, 'cannot stat storage', shard: :default)) } + end + + context 'storage points to directory that has both read and write rights' do + before do + FileUtils.chmod_R(0755, tmp_dir) + end + + it { is_expected.to include(result_class.new(true, nil, shard: :default)) } + + it 'cleans up files used for testing' do + expect(described_class).to receive(:storage_write_test).with(any_args).and_call_original + + subject + + expect(Dir.entries(tmp_dir).count).to eq(2) + end + + context 'read test fails' do + before do + allow(described_class).to receive(:storage_read_test).with(any_args).and_return(false) + end + + it { is_expected.to include(result_class.new(false, 'cannot read from storage', shard: :default)) } + end + + context 'write test fails' do + before do + allow(described_class).to receive(:storage_write_test).with(any_args).and_return(false) + end + + it { is_expected.to include(result_class.new(false, 'cannot write to storage', shard: :default)) } + end + end + end + + describe '#metrics' do + subject { described_class.metrics } + + context 'storage points to not existing folder' do + let(:storages_paths) do + { + default: { path: 'tmp/this/path/doesnt/exist' } + }.with_indifferent_access + end + + it { is_expected.to include(metric_class.new(:filesystem_accessible, 0, shard: :default)) } + it { is_expected.to include(metric_class.new(:filesystem_readable, 0, shard: :default)) } + it { is_expected.to include(metric_class.new(:filesystem_writable, 0, shard: :default)) } + + it { is_expected.to include(have_attributes(name: :filesystem_access_latency, value: be >= 0, labels: { shard: :default })) } + it { is_expected.to include(have_attributes(name: :filesystem_read_latency, value: be >= 0, labels: { shard: :default })) } + it { is_expected.to include(have_attributes(name: :filesystem_write_latency, value: be >= 0, labels: { shard: :default })) } + end + + context 'storage points to directory that has both read and write rights' do + before do + FileUtils.chmod_R(0755, tmp_dir) + end + + it { is_expected.to include(metric_class.new(:filesystem_accessible, 1, shard: :default)) } + it { is_expected.to include(metric_class.new(:filesystem_readable, 1, shard: :default)) } + it { is_expected.to include(metric_class.new(:filesystem_writable, 1, shard: :default)) } + + it { is_expected.to include(have_attributes(name: :filesystem_access_latency, value: be >= 0, labels: { shard: :default })) } + it { is_expected.to include(have_attributes(name: :filesystem_read_latency, value: be >= 0, labels: { shard: :default })) } + it { is_expected.to include(have_attributes(name: :filesystem_write_latency, value: be >= 0, labels: { shard: :default })) } + end + end + end + + context 'when popen always finds required binaries' do + before do + allow(Gitlab::Popen).to receive(:popen).and_wrap_original do |method, *args, &block| + begin + method.call(*args, &block) + rescue RuntimeError + raise 'expected not to happen' + end + end + end + + it_behaves_like 'filesystem checks' + end + + context 'when popen never finds required binaries' do + before do + allow(Gitlab::Popen).to receive(:popen).and_raise(Errno::ENOENT) + end + + it_behaves_like 'filesystem checks' + end +end diff --git a/spec/lib/gitlab/health_checks/redis_check_spec.rb b/spec/lib/gitlab/health_checks/redis_check_spec.rb new file mode 100644 index 00000000000..734cdcb893e --- /dev/null +++ b/spec/lib/gitlab/health_checks/redis_check_spec.rb @@ -0,0 +1,6 @@ +require 'spec_helper' +require_relative './simple_check_shared' + +describe Gitlab::HealthChecks::RedisCheck do + include_examples 'simple_check', 'redis_ping', 'Redis', 'PONG' +end diff --git a/spec/lib/gitlab/health_checks/simple_check_shared.rb b/spec/lib/gitlab/health_checks/simple_check_shared.rb new file mode 100644 index 00000000000..3f871d66034 --- /dev/null +++ b/spec/lib/gitlab/health_checks/simple_check_shared.rb @@ -0,0 +1,66 @@ +shared_context 'simple_check' do |metrics_prefix, check_name, success_result| + describe '#metrics' do + subject { described_class.metrics } + context 'Check is passing' do + before do + allow(described_class).to receive(:check).and_return success_result + end + + it { is_expected.to include(have_attributes(name: "#{metrics_prefix}_success", value: 1)) } + it { is_expected.to include(have_attributes(name: "#{metrics_prefix}_timeout", value: 0)) } + it { is_expected.to include(have_attributes(name: "#{metrics_prefix}_latency", value: be >= 0)) } + end + + context 'Check is misbehaving' do + before do + allow(described_class).to receive(:check).and_return 'error!' + end + + it { is_expected.to include(have_attributes(name: "#{metrics_prefix}_success", value: 0)) } + it { is_expected.to include(have_attributes(name: "#{metrics_prefix}_timeout", value: 0)) } + it { is_expected.to include(have_attributes(name: "#{metrics_prefix}_latency", value: be >= 0)) } + end + + context 'Check is timeouting' do + before do + allow(described_class).to receive(:check).and_return Timeout::Error.new + end + + it { is_expected.to include(have_attributes(name: "#{metrics_prefix}_success", value: 0)) } + it { is_expected.to include(have_attributes(name: "#{metrics_prefix}_timeout", value: 1)) } + it { is_expected.to include(have_attributes(name: "#{metrics_prefix}_latency", value: be >= 0)) } + end + end + + describe '#readiness' do + subject { described_class.readiness } + context 'Check returns ok' do + before do + allow(described_class).to receive(:check).and_return success_result + end + + it { is_expected.to have_attributes(success: true) } + end + + context 'Check is misbehaving' do + before do + allow(described_class).to receive(:check).and_return 'error!' + end + + it { is_expected.to have_attributes(success: false, message: "unexpected #{check_name} check result: error!") } + end + + context 'Check is timeouting' do + before do + allow(described_class).to receive(:check ).and_return Timeout::Error.new + end + + it { is_expected.to have_attributes(success: false, message: "#{check_name} check timed out") } + end + end + + describe '#liveness' do + subject { described_class.readiness } + it { is_expected.to eq(Gitlab::HealthChecks::Result.new(true)) } + end +end diff --git a/spec/lib/gitlab/highlight_spec.rb b/spec/lib/gitlab/highlight_spec.rb index e49799ad105..e57b3053871 100644 --- a/spec/lib/gitlab/highlight_spec.rb +++ b/spec/lib/gitlab/highlight_spec.rb @@ -57,4 +57,15 @@ describe Gitlab::Highlight, lib: true do end end end + + describe '#highlight' do + subject { described_class.highlight(file_name, file_content, nowrap: false) } + + it 'links dependencies via DependencyLinker' do + expect(Gitlab::DependencyLinker).to receive(:link). + with('file.name', 'Contents', anything).and_call_original + + described_class.highlight('file.name', 'Contents') + end + end end diff --git a/spec/lib/gitlab/i18n_spec.rb b/spec/lib/gitlab/i18n_spec.rb new file mode 100644 index 00000000000..52f2614d5ca --- /dev/null +++ b/spec/lib/gitlab/i18n_spec.rb @@ -0,0 +1,27 @@ +require 'spec_helper' + +module Gitlab + describe I18n, lib: true do + let(:user) { create(:user, preferred_language: 'es') } + + describe '.set_locale' do + it 'sets the locale based on current user preferred language' do + Gitlab::I18n.set_locale(user) + + expect(FastGettext.locale).to eq('es') + expect(::I18n.locale).to eq(:es) + end + end + + describe '.reset_locale' do + it 'resets the locale to the default language' do + Gitlab::I18n.set_locale(user) + + Gitlab::I18n.reset_locale + + expect(FastGettext.locale).to eq('en') + expect(::I18n.locale).to eq(:en) + end + end + end +end diff --git a/spec/lib/gitlab/import_export/all_models.yml b/spec/lib/gitlab/import_export/all_models.yml index 002cffd3062..34f617e23a5 100644 --- a/spec/lib/gitlab/import_export/all_models.yml +++ b/spec/lib/gitlab/import_export/all_models.yml @@ -3,12 +3,13 @@ issues: - subscriptions - award_emoji - author -- assignee +- assignees - updated_by - milestone - notes - label_links - labels +- last_edited_by - todos - user_agent_detail - moved_to @@ -16,6 +17,7 @@ issues: - merge_requests_closing_issues - metrics - timelogs +- issue_assignees events: - author - project @@ -26,6 +28,7 @@ notes: - noteable - author - updated_by +- last_edited_by - resolved_by - todos - events @@ -71,6 +74,7 @@ merge_requests: - notes - label_links - labels +- last_edited_by - todos - target_project - source_project @@ -81,6 +85,7 @@ merge_requests: - merge_requests_closing_issues - metrics - timelogs +- head_pipeline merge_request_diff: - merge_request pipelines: @@ -89,16 +94,34 @@ pipelines: - statuses - builds - trigger_requests +- auto_canceled_by +- auto_canceled_pipelines +- auto_canceled_jobs +- pending_builds +- retryable_builds +- cancelable_statuses +- manual_actions +- artifacts +- pipeline_schedule +- merge_requests statuses: - project - pipeline - user +- auto_canceled_by variables: - project triggers: - project - trigger_requests - owner +pipeline_schedules: +- project +- owner +- pipelines +- last_pipeline +pipeline_schedule: +- pipelines deploy_keys: - user - deploy_keys_projects @@ -112,10 +135,18 @@ protected_branches: - project - merge_access_levels - push_access_levels +protected_tags: +- project +- create_access_levels merge_access_levels: - protected_branch push_access_levels: - protected_branch +create_access_levels: +- protected_tag +container_repositories: +- project +- name project: - taggings - base_tags @@ -143,6 +174,7 @@ project: - asana_service - gemnasium_service - slack_service +- microsoft_teams_service - mattermost_service - buildkite_service - bamboo_service @@ -156,6 +188,8 @@ project: - external_wiki_service - kubernetes_service - mock_ci_service +- mock_deployment_service +- mock_monitoring_service - forked_project_link - forked_from_project - forked_project_links @@ -170,6 +204,7 @@ project: - snippets - hooks - protected_branches +- protected_tags - project_members - users - requesters @@ -190,8 +225,10 @@ project: - builds - runner_projects - runners +- active_runners - variables - triggers +- pipeline_schedules - environments - deployments - project_feature @@ -199,7 +236,9 @@ project: - authorized_users - project_authorizations - route +- redirect_routes - statistics +- container_repositories - uploads award_emoji: - awardable diff --git a/spec/lib/gitlab/import_export/fork_spec.rb b/spec/lib/gitlab/import_export/fork_spec.rb new file mode 100644 index 00000000000..42f3fc59f04 --- /dev/null +++ b/spec/lib/gitlab/import_export/fork_spec.rb @@ -0,0 +1,49 @@ +require 'spec_helper' + +describe 'forked project import', services: true do + let(:user) { create(:user) } + let!(:project_with_repo) { create(:project, :test_repo, name: 'test-repo-restorer', path: 'test-repo-restorer') } + let!(:project) { create(:empty_project, name: 'test-repo-restorer-no-repo', path: 'test-repo-restorer-no-repo') } + let(:export_path) { "#{Dir.tmpdir}/project_tree_saver_spec" } + let(:shared) { Gitlab::ImportExport::Shared.new(relative_path: project.path_with_namespace) } + let(:forked_from_project) { create(:project) } + let(:fork_link) { create(:forked_project_link, forked_from_project: project_with_repo) } + let(:repo_saver) { Gitlab::ImportExport::RepoSaver.new(project: project_with_repo, shared: shared) } + let(:bundle_path) { File.join(shared.export_path, Gitlab::ImportExport.project_bundle_filename) } + + let(:repo_restorer) do + Gitlab::ImportExport::RepoRestorer.new(path_to_bundle: bundle_path, shared: shared, project: project) + end + + let!(:merge_request) do + create(:merge_request, source_project: fork_link.forked_to_project, target_project: project_with_repo) + end + + let(:saver) do + Gitlab::ImportExport::ProjectTreeSaver.new(project: project_with_repo, current_user: user, shared: shared) + end + + let(:restorer) do + Gitlab::ImportExport::ProjectTreeRestorer.new(user: user, shared: shared, project: project) + end + + before do + allow_any_instance_of(Gitlab::ImportExport).to receive(:storage_path).and_return(export_path) + + saver.save + repo_saver.save + + repo_restorer.restore + restorer.restore + end + + after do + FileUtils.rm_rf(export_path) + FileUtils.rm_rf(project_with_repo.repository.path_to_repo) + FileUtils.rm_rf(project.repository.path_to_repo) + end + + it 'can access the MR' do + expect(project.merge_requests.first.ensure_ref_fetched.first).to include('refs/merge-requests/1/head') + end +end diff --git a/spec/lib/gitlab/import_export/hash_util_spec.rb b/spec/lib/gitlab/import_export/hash_util_spec.rb new file mode 100644 index 00000000000..1c3a0b23ece --- /dev/null +++ b/spec/lib/gitlab/import_export/hash_util_spec.rb @@ -0,0 +1,28 @@ +require 'spec_helper' + +describe Gitlab::ImportExport::HashUtil, lib: true do + let(:stringified_array) { [{ 'test' => 1 }] } + let(:stringified_array_with_date) { [{ 'test_date' => '2016-04-06 06:17:44 +0200' }] } + + describe '.deep_symbolize_array!' do + it 'symbolizes keys' do + expect { described_class.deep_symbolize_array!(stringified_array) }.to change { + stringified_array.first.keys.first + }.from('test').to(:test) + end + end + + describe '.deep_symbolize_array_with_date!' do + it 'symbolizes keys' do + expect { described_class.deep_symbolize_array_with_date!(stringified_array_with_date) }.to change { + stringified_array_with_date.first.keys.first + }.from('test_date').to(:test_date) + end + + it 'transforms date strings into Time objects' do + expect { described_class.deep_symbolize_array_with_date!(stringified_array_with_date) }.to change { + stringified_array_with_date.first.values.first.class + }.from(String).to(ActiveSupport::TimeWithZone) + end + end +end diff --git a/spec/lib/gitlab/import_export/merge_request_parser_spec.rb b/spec/lib/gitlab/import_export/merge_request_parser_spec.rb new file mode 100644 index 00000000000..349be4596b6 --- /dev/null +++ b/spec/lib/gitlab/import_export/merge_request_parser_spec.rb @@ -0,0 +1,31 @@ +require 'spec_helper' + +describe Gitlab::ImportExport::MergeRequestParser do + let(:user) { create(:user) } + let!(:project) { create(:project, :test_repo, name: 'test-repo-restorer', path: 'test-repo-restorer') } + let(:forked_from_project) { create(:project) } + let(:fork_link) { create(:forked_project_link, forked_from_project: project) } + + let!(:merge_request) do + create(:merge_request, source_project: fork_link.forked_to_project, target_project: project) + end + + let(:parsed_merge_request) do + described_class.new(project, + merge_request.diff_head_sha, + merge_request, + merge_request.as_json).parse! + end + + after do + FileUtils.rm_rf(project.repository.path_to_repo) + end + + it 'has a source branch' do + expect(project.repository.branch_exists?(parsed_merge_request.source_branch)).to be true + end + + it 'has a target branch' do + expect(project.repository.branch_exists?(parsed_merge_request.target_branch)).to be true + end +end diff --git a/spec/lib/gitlab/import_export/project.json b/spec/lib/gitlab/import_export/project.json index d9b67426818..e3599d6fe59 100644 --- a/spec/lib/gitlab/import_export/project.json +++ b/spec/lib/gitlab/import_export/project.json @@ -2,6 +2,7 @@ "description": "Nisi et repellendus ut enim quo accusamus vel magnam.", "visibility_level": 10, "archived": false, + "description_html": "description", "labels": [ { "id": 2, @@ -6981,28 +6982,6 @@ ], "services": [ { - "id": 164, - "title": null, - "project_id": 5, - "created_at": "2016-06-14T15:02:07.372Z", - "updated_at": "2016-06-14T15:02:07.372Z", - "active": false, - "properties": { - - }, - "template": false, - "push_events": true, - "issues_events": true, - "merge_requests_events": true, - "tag_push_events": true, - "note_events": true, - "build_events": true, - "category": "issue_tracker", - "type": "CustomIssueTrackerService", - "default": true, - "wiki_page_events": true - }, - { "id": 100, "title": "JetBrains TeamCity CI", "project_id": 5, @@ -7018,7 +6997,8 @@ "merge_requests_events": true, "tag_push_events": true, "note_events": true, - "build_events": true, + "job_events": true, + "type": "TeamcityService", "category": "ci", "default": false, "wiki_page_events": true @@ -7040,6 +7020,7 @@ "tag_push_events": true, "note_events": true, "pipeline_events": true, + "type": "SlackService", "category": "common", "default": false, "wiki_page_events": true @@ -7060,7 +7041,8 @@ "merge_requests_events": true, "tag_push_events": true, "note_events": true, - "build_events": true, + "job_events": true, + "type": "RedmineService", "category": "issue_tracker", "default": false, "wiki_page_events": true @@ -7081,7 +7063,8 @@ "merge_requests_events": true, "tag_push_events": true, "note_events": true, - "build_events": true, + "job_events": true, + "type": "PushoverService", "category": "common", "default": false, "wiki_page_events": true @@ -7102,7 +7085,8 @@ "merge_requests_events": true, "tag_push_events": true, "note_events": true, - "build_events": true, + "job_events": true, + "type": "PivotalTrackerService", "category": "common", "default": false, "wiki_page_events": true @@ -7124,7 +7108,8 @@ "merge_requests_events": true, "tag_push_events": true, "note_events": true, - "build_events": true, + "job_events": true, + "type": "JiraService", "category": "issue_tracker", "default": false, "wiki_page_events": true @@ -7145,7 +7130,8 @@ "merge_requests_events": true, "tag_push_events": true, "note_events": true, - "build_events": true, + "job_events": true, + "type": "IrkerService", "category": "common", "default": false, "wiki_page_events": true @@ -7167,6 +7153,7 @@ "tag_push_events": true, "note_events": true, "pipeline_events": true, + "type": "HipchatService", "category": "common", "default": false, "wiki_page_events": true @@ -7187,7 +7174,8 @@ "merge_requests_events": true, "tag_push_events": true, "note_events": true, - "build_events": true, + "job_events": true, + "type": "GemnasiumService", "category": "common", "default": false, "wiki_page_events": true @@ -7208,7 +7196,8 @@ "merge_requests_events": true, "tag_push_events": true, "note_events": true, - "build_events": true, + "job_events": true, + "type": "FlowdockService", "category": "common", "default": false, "wiki_page_events": true @@ -7229,7 +7218,8 @@ "merge_requests_events": true, "tag_push_events": true, "note_events": true, - "build_events": true, + "job_events": true, + "type": "ExternalWikiService", "category": "common", "default": false, "wiki_page_events": true @@ -7250,7 +7240,8 @@ "merge_requests_events": true, "tag_push_events": true, "note_events": true, - "build_events": true, + "job_events": true, + "type": "EmailsOnPushService", "category": "common", "default": false, "wiki_page_events": true @@ -7271,7 +7262,8 @@ "merge_requests_events": true, "tag_push_events": true, "note_events": true, - "build_events": true, + "job_events": true, + "type": "DroneCiService", "category": "ci", "default": false, "wiki_page_events": true @@ -7292,7 +7284,8 @@ "merge_requests_events": true, "tag_push_events": true, "note_events": true, - "build_events": true, + "job_events": true, + "type": "CustomIssueTrackerService", "category": "issue_tracker", "default": false, "wiki_page_events": true @@ -7313,7 +7306,8 @@ "merge_requests_events": true, "tag_push_events": true, "note_events": true, - "build_events": true, + "job_events": true, + "type": "CampfireService", "category": "common", "default": false, "wiki_page_events": true @@ -7334,7 +7328,8 @@ "merge_requests_events": true, "tag_push_events": true, "note_events": true, - "build_events": true, + "job_events": true, + "type": "BuildkiteService", "category": "ci", "default": false, "wiki_page_events": true @@ -7355,7 +7350,8 @@ "merge_requests_events": true, "tag_push_events": true, "note_events": true, - "build_events": true, + "job_events": true, + "type": "BambooService", "category": "ci", "default": false, "wiki_page_events": true @@ -7376,7 +7372,8 @@ "merge_requests_events": true, "tag_push_events": true, "note_events": true, - "build_events": true, + "job_events": true, + "type": "AssemblaService", "category": "common", "default": false, "wiki_page_events": true @@ -7397,7 +7394,8 @@ "merge_requests_events": true, "tag_push_events": true, "note_events": true, - "build_events": true, + "job_events": true, + "type": "AssemblaService", "category": "common", "default": false, "wiki_page_events": true @@ -7418,7 +7416,7 @@ "merge_requests_events": true, "tag_push_events": true, "note_events": true, - "build_events": true, + "job_events": true, "category": "common", "default": false, "wiki_page_events": true, @@ -7455,6 +7453,24 @@ ] } ], + "protected_tags": [ + { + "id": 1, + "project_id": 9, + "name": "v*", + "created_at": "2017-04-04T13:48:13.426Z", + "updated_at": "2017-04-04T13:48:13.426Z", + "create_access_levels": [ + { + "id": 1, + "protected_tag_id": 1, + "access_level": 40, + "created_at": "2017-04-04T13:48:13.458Z", + "updated_at": "2017-04-04T13:48:13.458Z" + } + ] + } + ], "project_feature": { "builds_access_level": 0, "created_at": "2014-12-26T09:26:45.000Z", diff --git a/spec/lib/gitlab/import_export/project_tree_restorer_spec.rb b/spec/lib/gitlab/import_export/project_tree_restorer_spec.rb index c36f12dbd82..14338515892 100644 --- a/spec/lib/gitlab/import_export/project_tree_restorer_spec.rb +++ b/spec/lib/gitlab/import_export/project_tree_restorer_spec.rb @@ -30,6 +30,10 @@ describe Gitlab::ImportExport::ProjectTreeRestorer, services: true do expect(project.project_feature.merge_requests_access_level).to eq(ProjectFeature::ENABLED) end + it 'has the project html description' do + expect(Project.find_by_path('project').description_html).to eq('description') + end + it 'has the same label associated to two issues' do expect(ProjectLabel.find_by_title('test2').issues.count).to eq(2) end @@ -64,6 +68,10 @@ describe Gitlab::ImportExport::ProjectTreeRestorer, services: true do expect(ProtectedBranch.first.push_access_levels).not_to be_empty end + it 'contains the create access levels on a protected tag' do + expect(ProtectedTag.first.create_access_levels).not_to be_empty + end + context 'event at forth level of the tree' do let(:event) { Event.where(title: 'test levels').first } @@ -82,6 +90,12 @@ describe Gitlab::ImportExport::ProjectTreeRestorer, services: true do expect(MergeRequestDiff.where.not(st_diffs: nil).count).to eq(9) end + it 'has the correct time for merge request st_commits' do + st_commits = MergeRequestDiff.where.not(st_commits: nil).first.st_commits + + expect(st_commits.first[:committed_date]).to be_kind_of(Time) + end + it 'has labels associated to label links, associated to issues' do expect(Label.first.label_links.first.target).not_to be_nil end diff --git a/spec/lib/gitlab/import_export/project_tree_saver_spec.rb b/spec/lib/gitlab/import_export/project_tree_saver_spec.rb index 012c22ec5ad..5aeb29b7fec 100644 --- a/spec/lib/gitlab/import_export/project_tree_saver_spec.rb +++ b/spec/lib/gitlab/import_export/project_tree_saver_spec.rb @@ -6,7 +6,7 @@ describe Gitlab::ImportExport::ProjectTreeSaver, services: true do let(:project_tree_saver) { described_class.new(project: project, current_user: user, shared: shared) } let(:export_path) { "#{Dir.tmpdir}/project_tree_saver_spec" } let(:user) { create(:user) } - let(:project) { setup_project } + let!(:project) { setup_project } before do project.team << [user, :master] @@ -79,6 +79,10 @@ describe Gitlab::ImportExport::ProjectTreeSaver, services: true do expect(saved_project_json['merge_requests'].first['merge_request_diff']).not_to be_empty end + it 'has merge requests diff st_diffs' do + expect(saved_project_json['merge_requests'].first['merge_request_diff']['utf8_st_diffs']).not_to be_nil + end + it 'has merge requests comments' do expect(saved_project_json['merge_requests'].first['notes']).not_to be_empty end @@ -185,11 +189,21 @@ describe Gitlab::ImportExport::ProjectTreeSaver, services: true do end end end + + context 'project attributes' do + it 'contains the html description' do + expect(saved_project_json).to include("description_html" => 'description') + end + + it 'does not contain the runners token' do + expect(saved_project_json).not_to include("runners_token" => 'token') + end + end end end def setup_project - issue = create(:issue, assignee: user) + issue = create(:issue, assignees: [user]) snippet = create(:project_snippet) release = create(:release) group = create(:group) @@ -205,6 +219,7 @@ describe Gitlab::ImportExport::ProjectTreeSaver, services: true do releases: [release], group: group ) + project.update_column(:description_html, 'description') project_label = create(:label, project: project) group_label = create(:group_label, group: group) create(:label_link, label: project_label, target: issue) diff --git a/spec/lib/gitlab/import_export/reader_spec.rb b/spec/lib/gitlab/import_export/reader_spec.rb index 48d74b07e27..d700af142be 100644 --- a/spec/lib/gitlab/import_export/reader_spec.rb +++ b/spec/lib/gitlab/import_export/reader_spec.rb @@ -5,7 +5,7 @@ describe Gitlab::ImportExport::Reader, lib: true do let(:test_config) { 'spec/support/import_export/import_export.yml' } let(:project_tree_hash) do { - only: [:name, :path], + except: [:id, :created_at], include: [:issues, :labels, { merge_requests: { only: [:id], diff --git a/spec/lib/gitlab/import_export/relation_factory_spec.rb b/spec/lib/gitlab/import_export/relation_factory_spec.rb index 57e412b0cef..5417c7534ea 100644 --- a/spec/lib/gitlab/import_export/relation_factory_spec.rb +++ b/spec/lib/gitlab/import_export/relation_factory_spec.rb @@ -9,7 +9,7 @@ describe Gitlab::ImportExport::RelationFactory, lib: true do relation_hash: relation_hash, members_mapper: members_mapper, user: user, - project_id: project.id) + project: project) end context 'hook object' do @@ -33,7 +33,7 @@ describe Gitlab::ImportExport::RelationFactory, lib: true do 'tag_push_events' => false, 'note_events' => true, 'enable_ssl_verification' => true, - 'build_events' => false, + 'job_events' => false, 'wiki_page_events' => true, 'token' => token } @@ -60,7 +60,7 @@ describe Gitlab::ImportExport::RelationFactory, lib: true do end context 'original service exists' do - let(:service_id) { Service.create(project: project).id } + let(:service_id) { create(:service, project: project).id } it 'does not have the original service_id' do expect(created_object.service_id).not_to eq(service_id) @@ -95,7 +95,7 @@ describe Gitlab::ImportExport::RelationFactory, lib: true do 'random_id' => 99, 'milestone_id' => 99, 'project_id' => 99, - 'user_id' => 99, + 'user_id' => 99 } end diff --git a/spec/lib/gitlab/import_export/repo_bundler_spec.rb b/spec/lib/gitlab/import_export/repo_saver_spec.rb index a7f4e11271e..a7f4e11271e 100644 --- a/spec/lib/gitlab/import_export/repo_bundler_spec.rb +++ b/spec/lib/gitlab/import_export/repo_saver_spec.rb diff --git a/spec/lib/gitlab/import_export/safe_model_attributes.yml b/spec/lib/gitlab/import_export/safe_model_attributes.yml index 1ad16a9b57d..c22fba11225 100644 --- a/spec/lib/gitlab/import_export/safe_model_attributes.yml +++ b/spec/lib/gitlab/import_export/safe_model_attributes.yml @@ -23,6 +23,8 @@ Issue: - weight - time_estimate - relative_position +- last_edited_at +- last_edited_by_id Event: - id - target_type @@ -154,6 +156,9 @@ MergeRequest: - approvals_before_merge - rebase_commit_sha - time_estimate +- last_edited_at +- last_edited_by_id +- head_pipeline_id MergeRequestDiff: - id - state @@ -183,6 +188,8 @@ Ci::Pipeline: - duration - user_id - lock_version +- auto_canceled_by_id +- pipeline_schedule_id CommitStatus: - id - project_id @@ -223,6 +230,8 @@ CommitStatus: - token - lock_version - coverage_regex +- auto_canceled_by_id +- retried Ci::Variable: - id - project_id @@ -240,6 +249,20 @@ Ci::Trigger: - updated_at - owner_id - description +- ref +Ci::PipelineSchedule: +- id +- description +- ref +- cron +- cron_timezone +- next_run_at +- project_id +- owner_id +- active +- deleted_at +- created_at +- updated_at DeployKey: - id - user_id @@ -269,7 +292,7 @@ Service: - tag_push_events - note_events - pipeline_events -- build_events +- job_events - category - default - wiki_page_events @@ -289,17 +312,24 @@ ProjectHook: - note_events - pipeline_events - enable_ssl_verification -- build_events +- job_events - wiki_page_events - token - group_id - confidential_issues_events +- repository_update_events ProtectedBranch: - id - project_id - name - created_at - updated_at +ProtectedTag: +- id +- project_id +- name +- created_at +- updated_at Project: - description - issues_enabled @@ -308,6 +338,29 @@ Project: - snippets_enabled - visibility_level - archived +- created_at +- updated_at +- last_activity_at +- star_count +- ci_id +- shared_runners_enabled +- build_coverage_regex +- build_allow_git_fetchs +- build_timeout +- pending_delete +- public_builds +- last_repository_check_failed +- last_repository_check_at +- container_registry_enabled +- only_allow_merge_if_pipeline_succeeds +- has_external_issue_tracker +- request_access_enabled +- has_external_wiki +- only_allow_merge_if_all_discussions_are_resolved +- auto_cancel_pending_pipelines +- printing_merge_request_link_enabled +- build_allow_git_fetch +- last_repository_updated_at Author: - name ProjectFeature: @@ -333,6 +386,14 @@ ProtectedBranch::PushAccessLevel: - access_level - created_at - updated_at +ProtectedTag::CreateAccessLevel: +- id +- protected_tag_id +- access_level +- created_at +- updated_at +- user_id +- group_id AwardEmoji: - id - user_id diff --git a/spec/lib/gitlab/import_export/wiki_repo_bundler_spec.rb b/spec/lib/gitlab/import_export/wiki_repo_saver_spec.rb index 071e5fac3f0..071e5fac3f0 100644 --- a/spec/lib/gitlab/import_export/wiki_repo_bundler_spec.rb +++ b/spec/lib/gitlab/import_export/wiki_repo_saver_spec.rb diff --git a/spec/lib/gitlab/issuable_sorter_spec.rb b/spec/lib/gitlab/issuable_sorter_spec.rb new file mode 100644 index 00000000000..c9a434b2bcf --- /dev/null +++ b/spec/lib/gitlab/issuable_sorter_spec.rb @@ -0,0 +1,62 @@ +require 'spec_helper' + +describe Gitlab::IssuableSorter, lib: true do + let(:namespace1) { build(:namespace, id: 1) } + let(:project1) { build(:project, id: 1, namespace: namespace1) } + + let(:project2) { build(:project, id: 2, path: "a", namespace: project1.namespace) } + let(:project3) { build(:project, id: 3, path: "b", namespace: project1.namespace) } + + let(:namespace2) { build(:namespace, id: 2, path: "a") } + let(:namespace3) { build(:namespace, id: 3, path: "b") } + let(:project4) { build(:project, id: 4, path: "a", namespace: namespace2) } + let(:project5) { build(:project, id: 5, path: "b", namespace: namespace2) } + let(:project6) { build(:project, id: 6, path: "a", namespace: namespace3) } + + let(:unsorted) { [sorted[2], sorted[3], sorted[0], sorted[1]] } + + let(:sorted) do + [build(:issue, iid: 1, project: project1), + build(:issue, iid: 2, project: project1), + build(:issue, iid: 10, project: project1), + build(:issue, iid: 20, project: project1)] + end + + it 'sorts references by a given key' do + expect(described_class.sort(project1, unsorted)).to eq(sorted) + end + + context 'for JIRA issues' do + let(:sorted) do + [ExternalIssue.new('JIRA-1', project1), + ExternalIssue.new('JIRA-2', project1), + ExternalIssue.new('JIRA-10', project1), + ExternalIssue.new('JIRA-20', project1)] + end + + it 'sorts references by a given key' do + expect(described_class.sort(project1, unsorted)).to eq(sorted) + end + end + + context 'for references from multiple projects and namespaces' do + let(:sorted) do + [build(:issue, iid: 1, project: project1), + build(:issue, iid: 2, project: project1), + build(:issue, iid: 10, project: project1), + build(:issue, iid: 1, project: project2), + build(:issue, iid: 1, project: project3), + build(:issue, iid: 1, project: project4), + build(:issue, iid: 1, project: project5), + build(:issue, iid: 1, project: project6)] + end + let(:unsorted) do + [sorted[3], sorted[1], sorted[4], sorted[2], + sorted[6], sorted[5], sorted[0], sorted[7]] + end + + it 'sorts references by project and then by a given key' do + expect(subject.sort(project1, unsorted)).to eq(sorted) + end + end +end diff --git a/spec/lib/gitlab/ldap/person_spec.rb b/spec/lib/gitlab/ldap/person_spec.rb index 9a556cde5d5..087c4d8c92c 100644 --- a/spec/lib/gitlab/ldap/person_spec.rb +++ b/spec/lib/gitlab/ldap/person_spec.rb @@ -20,7 +20,7 @@ describe Gitlab::LDAP::Person do it 'uses the configured name attribute and handles values as an array' do name = 'John Doe' entry['cn'] = [name] - person = Gitlab::LDAP::Person.new(entry, 'ldapmain') + person = described_class.new(entry, 'ldapmain') expect(person.name).to eq(name) end @@ -30,7 +30,7 @@ describe Gitlab::LDAP::Person do it 'returns the value of mail, if present' do mail = 'john@example.com' entry['mail'] = mail - person = Gitlab::LDAP::Person.new(entry, 'ldapmain') + person = described_class.new(entry, 'ldapmain') expect(person.email).to eq([mail]) end @@ -38,7 +38,7 @@ describe Gitlab::LDAP::Person do it 'returns the value of userPrincipalName, if mail and email are not present' do user_principal_name = 'john.doe@example.com' entry['userPrincipalName'] = user_principal_name - person = Gitlab::LDAP::Person.new(entry, 'ldapmain') + person = described_class.new(entry, 'ldapmain') expect(person.email).to eq([user_principal_name]) end diff --git a/spec/lib/gitlab/ldap/user_spec.rb b/spec/lib/gitlab/ldap/user_spec.rb index 346cf0d117c..f4aab429931 100644 --- a/spec/lib/gitlab/ldap/user_spec.rb +++ b/spec/lib/gitlab/ldap/user_spec.rb @@ -108,6 +108,31 @@ describe Gitlab::LDAP::User, lib: true do it "creates a new user if not found" do expect{ ldap_user.save }.to change{ User.count }.by(1) end + + context 'when signup is disabled' do + before do + stub_application_setting signup_enabled: false + end + + it 'creates the user' do + ldap_user.save + + expect(gl_user).to be_persisted + end + end + + context 'when user confirmation email is enabled' do + before do + stub_application_setting send_user_confirmation_email: true + end + + it 'creates and confirms the user anyway' do + ldap_user.save + + expect(gl_user).to be_persisted + expect(gl_user).to be_confirmed + end + end end describe 'updating email' do diff --git a/spec/lib/gitlab/metrics_spec.rb b/spec/lib/gitlab/metrics_spec.rb index ab6e311b1e8..208a8d028cd 100644 --- a/spec/lib/gitlab/metrics_spec.rb +++ b/spec/lib/gitlab/metrics_spec.rb @@ -20,7 +20,7 @@ describe Gitlab::Metrics do expect(pool).to receive(:with).and_yield(connection) expect(connection).to receive(:write_points).with(an_instance_of(Array)) - expect(Gitlab::Metrics).to receive(:pool).and_return(pool) + expect(described_class).to receive(:pool).and_return(pool) described_class.submit_metrics([{ 'series' => 'kittens', 'tags' => {} }]) end @@ -64,7 +64,7 @@ describe Gitlab::Metrics do describe '.measure' do context 'without a transaction' do it 'returns the return value of the block' do - val = Gitlab::Metrics.measure(:foo) { 10 } + val = described_class.measure(:foo) { 10 } expect(val).to eq(10) end @@ -74,7 +74,7 @@ describe Gitlab::Metrics do let(:transaction) { Gitlab::Metrics::Transaction.new } before do - allow(Gitlab::Metrics).to receive(:current_transaction). + allow(described_class).to receive(:current_transaction). and_return(transaction) end @@ -88,11 +88,11 @@ describe Gitlab::Metrics do expect(transaction).to receive(:increment). with('foo_call_count', 1) - Gitlab::Metrics.measure(:foo) { 10 } + described_class.measure(:foo) { 10 } end it 'returns the return value of the block' do - val = Gitlab::Metrics.measure(:foo) { 10 } + val = described_class.measure(:foo) { 10 } expect(val).to eq(10) end @@ -105,7 +105,7 @@ describe Gitlab::Metrics do expect_any_instance_of(Gitlab::Metrics::Transaction). not_to receive(:add_tag) - Gitlab::Metrics.tag_transaction(:foo, 'bar') + described_class.tag_transaction(:foo, 'bar') end end @@ -113,13 +113,13 @@ describe Gitlab::Metrics do let(:transaction) { Gitlab::Metrics::Transaction.new } it 'adds the tag to the transaction' do - expect(Gitlab::Metrics).to receive(:current_transaction). + expect(described_class).to receive(:current_transaction). and_return(transaction) expect(transaction).to receive(:add_tag). with(:foo, 'bar') - Gitlab::Metrics.tag_transaction(:foo, 'bar') + described_class.tag_transaction(:foo, 'bar') end end end @@ -130,7 +130,7 @@ describe Gitlab::Metrics do expect_any_instance_of(Gitlab::Metrics::Transaction). not_to receive(:action=) - Gitlab::Metrics.action = 'foo' + described_class.action = 'foo' end end @@ -138,12 +138,12 @@ describe Gitlab::Metrics do it 'sets the action of a transaction' do trans = Gitlab::Metrics::Transaction.new - expect(Gitlab::Metrics).to receive(:current_transaction). + expect(described_class).to receive(:current_transaction). and_return(trans) expect(trans).to receive(:action=).with('foo') - Gitlab::Metrics.action = 'foo' + described_class.action = 'foo' end end end @@ -160,7 +160,7 @@ describe Gitlab::Metrics do expect_any_instance_of(Gitlab::Metrics::Transaction). not_to receive(:add_event) - Gitlab::Metrics.add_event(:meow) + described_class.add_event(:meow) end end @@ -170,10 +170,10 @@ describe Gitlab::Metrics do expect(transaction).to receive(:add_event).with(:meow) - expect(Gitlab::Metrics).to receive(:current_transaction). + expect(described_class).to receive(:current_transaction). and_return(transaction) - Gitlab::Metrics.add_event(:meow) + described_class.add_event(:meow) end end end diff --git a/spec/lib/gitlab/o_auth/user_spec.rb b/spec/lib/gitlab/o_auth/user_spec.rb index 6c84a4c8b73..828c953197d 100644 --- a/spec/lib/gitlab/o_auth/user_spec.rb +++ b/spec/lib/gitlab/o_auth/user_spec.rb @@ -40,6 +40,44 @@ describe Gitlab::OAuth::User, lib: true do let(:provider) { 'twitter' } describe 'signup' do + context 'when signup is disabled' do + before do + stub_application_setting signup_enabled: false + end + + it 'creates the user' do + stub_omniauth_config(allow_single_sign_on: ['twitter']) + + oauth_user.save + + expect(gl_user).to be_persisted + end + end + + context 'when user confirmation email is enabled' do + before do + stub_application_setting send_user_confirmation_email: true + end + + it 'creates and confirms the user anyway' do + stub_omniauth_config(allow_single_sign_on: ['twitter']) + + oauth_user.save + + expect(gl_user).to be_persisted + expect(gl_user).to be_confirmed + end + end + + it 'marks user as having password_automatically_set' do + stub_omniauth_config(allow_single_sign_on: ['twitter'], external_providers: ['twitter']) + + oauth_user.save + + expect(gl_user).to be_persisted + expect(gl_user).to be_password_automatically_set + end + shared_examples 'to verify compliance with allow_single_sign_on' do context 'provider is marked as external' do it 'marks user as external' do diff --git a/spec/lib/gitlab/other_markup.rb b/spec/lib/gitlab/other_markup_spec.rb index 8f5a353b381..c0f5fa9dc1f 100644 --- a/spec/lib/gitlab/other_markup.rb +++ b/spec/lib/gitlab/other_markup_spec.rb @@ -1,17 +1,19 @@ require 'spec_helper' describe Gitlab::OtherMarkup, lib: true do + let(:context) { {} } + context "XSS Checks" do links = { 'links' => { file: 'file.rdoc', input: 'XSS[JaVaScriPt:alert(1)]', - output: '<p><a>XSS</a></p>' + output: "\n" + '<p><a>XSS</a></p>' + "\n" } } links.each do |name, data| it "does not convert dangerous #{name} into HTML" do - expect(render(data[:file], data[:input], context)).to eql data[:output] + expect(render(data[:file], data[:input], context)).to eq(data[:output]) end end end diff --git a/spec/lib/gitlab/polling_interval_spec.rb b/spec/lib/gitlab/polling_interval_spec.rb new file mode 100644 index 00000000000..5ea8ecb1c30 --- /dev/null +++ b/spec/lib/gitlab/polling_interval_spec.rb @@ -0,0 +1,34 @@ +require 'spec_helper' + +describe Gitlab::PollingInterval, lib: true do + let(:polling_interval) { described_class } + + describe '.set_header' do + let(:headers) { {} } + let(:response) { double(headers: headers) } + + context 'when polling is disabled' do + before do + stub_application_setting(polling_interval_multiplier: 0) + end + + it 'sets value to -1' do + polling_interval.set_header(response, interval: 10_000) + + expect(headers['Poll-Interval']).to eq('-1') + end + end + + context 'when polling is enabled' do + before do + stub_application_setting(polling_interval_multiplier: 0.33333) + end + + it 'applies modifier to base interval' do + polling_interval.set_header(response, interval: 10_000) + + expect(headers['Poll-Interval']).to eq('3333') + end + end + end +end diff --git a/spec/lib/gitlab/project_search_results_spec.rb b/spec/lib/gitlab/project_search_results_spec.rb index 9a8096208db..1b8690ba613 100644 --- a/spec/lib/gitlab/project_search_results_spec.rb +++ b/spec/lib/gitlab/project_search_results_spec.rb @@ -22,11 +22,40 @@ describe Gitlab::ProjectSearchResults, lib: true do end describe 'blob search' do - let(:project) { create(:project, :repository) } - let(:results) { described_class.new(user, project, 'files').objects('blobs') } + let(:project) { create(:project, :public, :repository) } + + subject(:results) { described_class.new(user, project, 'files').objects('blobs') } + + context 'when repository is disabled' do + let(:project) { create(:project, :public, :repository, :repository_disabled) } + + it 'hides blobs from members' do + project.add_reporter(user) + + is_expected.to be_empty + end + + it 'hides blobs from non-members' do + is_expected.to be_empty + end + end + + context 'when repository is internal' do + let(:project) { create(:project, :public, :repository, :repository_private) } + + it 'finds blobs for members' do + project.add_reporter(user) + + is_expected.not_to be_empty + end + + it 'hides blobs from non-members' do + is_expected.to be_empty + end + end it 'finds by name' do - expect(results).to include(["files/images/wm.svg", nil]) + expect(results.map(&:first)).to include('files/images/wm.svg') end it 'finds by content' do @@ -41,8 +70,10 @@ describe Gitlab::ProjectSearchResults, lib: true do subject { described_class.parse_search_result(search_result) } - it "returns a valid OpenStruct object" do - is_expected.to be_an OpenStruct + it "returns a valid FoundBlob" do + is_expected.to be_an Gitlab::SearchResults::FoundBlob + expect(subject.id).to be_nil + expect(subject.path).to eq('CHANGELOG') expect(subject.filename).to eq('CHANGELOG') expect(subject.basename).to eq('CHANGELOG') expect(subject.ref).to eq('master') @@ -53,6 +84,7 @@ describe Gitlab::ProjectSearchResults, lib: true do context "when filename has extension" do let(:search_result) { "master:CONTRIBUTE.md:5:- [Contribute to GitLab](#contribute-to-gitlab)\n" } + it { expect(subject.path).to eq('CONTRIBUTE.md') } it { expect(subject.filename).to eq('CONTRIBUTE.md') } it { expect(subject.basename).to eq('CONTRIBUTE') } end @@ -60,12 +92,53 @@ describe Gitlab::ProjectSearchResults, lib: true do context "when file under directory" do let(:search_result) { "master:a/b/c.md:5:a b c\n" } + it { expect(subject.path).to eq('a/b/c.md') } it { expect(subject.filename).to eq('a/b/c.md') } it { expect(subject.basename).to eq('a/b/c') } end end end + describe 'wiki search' do + let(:project) { create(:project, :public) } + let(:wiki) { build(:project_wiki, project: project) } + let!(:wiki_page) { wiki.create_page('Title', 'Content') } + + subject(:results) { described_class.new(user, project, 'Content').objects('wiki_blobs') } + + context 'when wiki is disabled' do + let(:project) { create(:project, :public, :wiki_disabled) } + + it 'hides wiki blobs from members' do + project.add_reporter(user) + + is_expected.to be_empty + end + + it 'hides wiki blobs from non-members' do + is_expected.to be_empty + end + end + + context 'when wiki is internal' do + let(:project) { create(:project, :public, :wiki_private) } + + it 'finds wiki blobs for members' do + project.add_reporter(user) + + is_expected.not_to be_empty + end + + it 'hides wiki blobs from non-members' do + is_expected.to be_empty + end + end + + it 'finds by content' do + expect(results).to include("master:Title.md:1:Content\n") + end + end + it 'does not list issues on private projects' do issue = create(:issue, project: project) @@ -75,7 +148,6 @@ describe Gitlab::ProjectSearchResults, lib: true do end describe 'confidential issues' do - let(:project) { create(:empty_project) } let(:query) { 'issue' } let(:author) { create(:user) } let(:assignee) { create(:user) } @@ -85,7 +157,7 @@ describe Gitlab::ProjectSearchResults, lib: true do let(:project) { create(:empty_project, :internal) } let!(:issue) { create(:issue, project: project, title: 'Issue 1') } let!(:security_issue_1) { create(:issue, :confidential, project: project, title: 'Security issue 1', author: author) } - let!(:security_issue_2) { create(:issue, :confidential, title: 'Security issue 2', project: project, assignee: assignee) } + let!(:security_issue_2) { create(:issue, :confidential, title: 'Security issue 2', project: project, assignees: [assignee]) } it 'does not list project confidential issues for non project members' do results = described_class.new(non_member, project, query) @@ -273,6 +345,7 @@ describe Gitlab::ProjectSearchResults, lib: true do context 'by commit hash' do let(:project) { create(:project, :public, :repository) } let(:commit) { project.repository.commit('0b4bc9a') } + commit_hashes = { short: '0b4bc9a', full: '0b4bc9a49b562e85de7cc9e834518ea6828729b9' } commit_hashes.each do |type, commit_hash| diff --git a/spec/lib/gitlab/prometheus/queries/deployment_query_spec.rb b/spec/lib/gitlab/prometheus/queries/deployment_query_spec.rb new file mode 100644 index 00000000000..d957dd932c4 --- /dev/null +++ b/spec/lib/gitlab/prometheus/queries/deployment_query_spec.rb @@ -0,0 +1,37 @@ +require 'spec_helper' + +describe Gitlab::Prometheus::Queries::DeploymentQuery, lib: true do + let(:environment) { create(:environment, slug: 'environment-slug') } + let(:deployment) { create(:deployment, environment: environment) } + + let(:client) { double('prometheus_client') } + subject { described_class.new(client) } + + around do |example| + time_without_subsecond_values = Time.local(2008, 9, 1, 12, 0, 0) + Timecop.freeze(time_without_subsecond_values) { example.run } + end + + it 'sends appropriate queries to prometheus' do + start_time = (deployment.created_at - 30.minutes).to_f + stop_time = (deployment.created_at + 30.minutes).to_f + created_at = deployment.created_at.to_f + + expect(client).to receive(:query_range).with('avg(container_memory_usage_bytes{container_name!="POD",environment="environment-slug"}) / 2^20', + start: start_time, stop: stop_time) + expect(client).to receive(:query).with('avg(avg_over_time(container_memory_usage_bytes{container_name!="POD",environment="environment-slug"}[30m]))', + time: created_at) + expect(client).to receive(:query).with('avg(avg_over_time(container_memory_usage_bytes{container_name!="POD",environment="environment-slug"}[30m]))', + time: stop_time) + + expect(client).to receive(:query_range).with('avg(rate(container_cpu_usage_seconds_total{container_name!="POD",environment="environment-slug"}[2m])) * 100', + start: start_time, stop: stop_time) + expect(client).to receive(:query).with('avg(rate(container_cpu_usage_seconds_total{container_name!="POD",environment="environment-slug"}[30m])) * 100', + time: created_at) + expect(client).to receive(:query).with('avg(rate(container_cpu_usage_seconds_total{container_name!="POD",environment="environment-slug"}[30m])) * 100', + time: stop_time) + + expect(subject.query(deployment.id)).to eq(memory_values: nil, memory_before: nil, memory_after: nil, + cpu_values: nil, cpu_before: nil, cpu_after: nil) + end +end diff --git a/spec/lib/gitlab/prometheus_spec.rb b/spec/lib/gitlab/prometheus_client_spec.rb index 280264188e2..2d8bd2f6b97 100644 --- a/spec/lib/gitlab/prometheus_spec.rb +++ b/spec/lib/gitlab/prometheus_client_spec.rb @@ -1,6 +1,6 @@ require 'spec_helper' -describe Gitlab::Prometheus, lib: true do +describe Gitlab::PrometheusClient, lib: true do include PrometheusHelpers subject { described_class.new(api_url: 'https://prometheus.example.com') } @@ -49,9 +49,43 @@ describe Gitlab::Prometheus, lib: true do end end + describe 'failure to reach a provided prometheus url' do + let(:prometheus_url) {"https://prometheus.invalid.example.com"} + + context 'exceptions are raised' do + it 'raises a Gitlab::PrometheusError error when a SocketError is rescued' do + req_stub = stub_prometheus_request_with_exception(prometheus_url, SocketError) + + expect { subject.send(:get, prometheus_url) } + .to raise_error(Gitlab::PrometheusError, "Can't connect to #{prometheus_url}") + expect(req_stub).to have_been_requested + end + + it 'raises a Gitlab::PrometheusError error when a SSLError is rescued' do + req_stub = stub_prometheus_request_with_exception(prometheus_url, OpenSSL::SSL::SSLError) + + expect { subject.send(:get, prometheus_url) } + .to raise_error(Gitlab::PrometheusError, "#{prometheus_url} contains invalid SSL data") + expect(req_stub).to have_been_requested + end + + it 'raises a Gitlab::PrometheusError error when a HTTParty::Error is rescued' do + req_stub = stub_prometheus_request_with_exception(prometheus_url, HTTParty::Error) + + expect { subject.send(:get, prometheus_url) } + .to raise_error(Gitlab::PrometheusError, "Network connection error") + expect(req_stub).to have_been_requested + end + end + end + describe '#query' do let(:prometheus_query) { prometheus_cpu_query('env-slug') } - let(:query_url) { prometheus_query_url(prometheus_query) } + let(:query_url) { prometheus_query_with_time_url(prometheus_query, Time.now.utc) } + + around do |example| + Timecop.freeze { example.run } + end context 'when request returns vector results' do it 'returns data from the API call' do @@ -93,6 +127,20 @@ describe Gitlab::Prometheus, lib: true do Timecop.freeze { example.run } end + context 'when non utc time is passed' do + let(:time_stop) { Time.now.in_time_zone("Warsaw") } + let(:time_start) { time_stop - 8.hours } + + let(:query_url) { prometheus_query_range_url(prometheus_query, start: time_start.utc.to_f, stop: time_stop.utc.to_f) } + + it 'passed dates are properly converted to utc' do + req_stub = stub_prometheus_request(query_url, body: prometheus_values_body('vector')) + + subject.query_range(prometheus_query, start: time_start, stop: time_stop) + expect(req_stub).to have_been_requested + end + end + context 'when a start time is passed' do let(:query_url) { prometheus_query_range_url(prometheus_query, start: 2.hours.ago) } diff --git a/spec/lib/gitlab/regex_spec.rb b/spec/lib/gitlab/regex_spec.rb index ba45e2d758c..72e947f2cc2 100644 --- a/spec/lib/gitlab/regex_spec.rb +++ b/spec/lib/gitlab/regex_spec.rb @@ -32,12 +32,6 @@ describe Gitlab::Regex, lib: true do it { is_expected.to match('foo@bar') } end - describe '.file_path_regex' do - subject { described_class.file_path_regex } - - it { is_expected.to match('foo@/bar') } - end - describe '.environment_slug_regex' do subject { described_class.environment_slug_regex } @@ -51,8 +45,8 @@ describe Gitlab::Regex, lib: true do it { is_expected.not_to match('foo-') } end - describe 'FULL_NAMESPACE_REGEX_STR' do - subject { %r{\A#{Gitlab::Regex::FULL_NAMESPACE_REGEX_STR}\z} } + describe '.full_namespace_regex' do + subject { described_class.full_namespace_regex } it { is_expected.to match('gitlab.org') } it { is_expected.to match('gitlab.org/gitlab-git') } diff --git a/spec/lib/gitlab/repo_path_spec.rb b/spec/lib/gitlab/repo_path_spec.rb index 0fb5d7646f2..f9025397107 100644 --- a/spec/lib/gitlab/repo_path_spec.rb +++ b/spec/lib/gitlab/repo_path_spec.rb @@ -1,11 +1,35 @@ require 'spec_helper' describe ::Gitlab::RepoPath do + describe '.parse' do + set(:project) { create(:project) } + + it 'parses a full repository path' do + expect(described_class.parse(project.repository.path)).to eq([project, false]) + end + + it 'parses a full wiki path' do + expect(described_class.parse(project.wiki.repository.path)).to eq([project, true]) + end + + it 'parses a relative repository path' do + expect(described_class.parse(project.full_path + '.git')).to eq([project, false]) + end + + it 'parses a relative wiki path' do + expect(described_class.parse(project.full_path + '.wiki.git')).to eq([project, true]) + end + + it 'parses a relative path starting with /' do + expect(described_class.parse('/' + project.full_path + '.git')).to eq([project, false]) + end + end + describe '.strip_storage_path' do before do allow(Gitlab.config.repositories).to receive(:storages).and_return({ 'storage1' => { 'path' => '/foo' }, - 'storage2' => { 'path' => '/bar' }, + 'storage2' => { 'path' => '/bar' } }) end diff --git a/spec/lib/gitlab/request_profiler_spec.rb b/spec/lib/gitlab/request_profiler_spec.rb new file mode 100644 index 00000000000..ae9c06ebb7d --- /dev/null +++ b/spec/lib/gitlab/request_profiler_spec.rb @@ -0,0 +1,27 @@ +require 'spec_helper' + +describe Gitlab::RequestProfiler, lib: true do + describe '.profile_token' do + it 'returns a token' do + expect(described_class.profile_token).to be_present + end + + it 'caches the token' do + expect(Rails.cache).to receive(:fetch).with('profile-token') + + described_class.profile_token + end + end + + describe '.remove_all_profiles' do + it 'removes Gitlab::RequestProfiler::PROFILES_DIR directory' do + dir = described_class::PROFILES_DIR + FileUtils.mkdir_p(dir) + + expect(Dir.exist?(dir)).to be true + + described_class.remove_all_profiles + expect(Dir.exist?(dir)).to be false + end + end +end diff --git a/spec/lib/gitlab/saml/user_spec.rb b/spec/lib/gitlab/saml/user_spec.rb index 4f6ef3c10fc..b106d156b75 100644 --- a/spec/lib/gitlab/saml/user_spec.rb +++ b/spec/lib/gitlab/saml/user_spec.rb @@ -211,6 +211,31 @@ describe Gitlab::Saml::User, lib: true do end end end + + context 'when signup is disabled' do + before do + stub_application_setting signup_enabled: false + end + + it 'creates the user' do + saml_user.save + + expect(gl_user).to be_persisted + end + end + + context 'when user confirmation email is enabled' do + before do + stub_application_setting send_user_confirmation_email: true + end + + it 'creates and confirms the user anyway' do + saml_user.save + + expect(gl_user).to be_persisted + expect(gl_user).to be_confirmed + end + end end describe 'blocking' do diff --git a/spec/lib/gitlab/search_results_spec.rb b/spec/lib/gitlab/search_results_spec.rb index 847fb977400..31c3cd4d53c 100644 --- a/spec/lib/gitlab/search_results_spec.rb +++ b/spec/lib/gitlab/search_results_spec.rb @@ -72,9 +72,9 @@ describe Gitlab::SearchResults do let(:admin) { create(:admin) } let!(:issue) { create(:issue, project: project_1, title: 'Issue 1') } let!(:security_issue_1) { create(:issue, :confidential, project: project_1, title: 'Security issue 1', author: author) } - let!(:security_issue_2) { create(:issue, :confidential, title: 'Security issue 2', project: project_1, assignee: assignee) } + let!(:security_issue_2) { create(:issue, :confidential, title: 'Security issue 2', project: project_1, assignees: [assignee]) } let!(:security_issue_3) { create(:issue, :confidential, project: project_2, title: 'Security issue 3', author: author) } - let!(:security_issue_4) { create(:issue, :confidential, project: project_3, title: 'Security issue 4', assignee: assignee) } + let!(:security_issue_4) { create(:issue, :confidential, project: project_3, title: 'Security issue 4', assignees: [assignee]) } let!(:security_issue_5) { create(:issue, :confidential, project: project_4, title: 'Security issue 5') } it 'does not list confidential issues for non project members' do diff --git a/spec/lib/gitlab/backend/shell_spec.rb b/spec/lib/gitlab/shell_spec.rb index 4b08a02ec73..a97a0f8452b 100644 --- a/spec/lib/gitlab/backend/shell_spec.rb +++ b/spec/lib/gitlab/shell_spec.rb @@ -69,6 +69,15 @@ describe Gitlab::Shell, lib: true do expect(io).to have_received(:puts).with("key-42\tssh-rsa foo") end + it 'handles multiple spaces in the key' do + io = spy(:io) + adder = described_class.new(io) + + adder.add_key('key-42', "ssh-rsa foo") + + expect(io).to have_received(:puts).with("key-42\tssh-rsa foo") + end + it 'raises an exception if the key contains a tab' do expect do described_class.new(StringIO.new).add_key('key-42', "ssh-rsa\tfoobar") @@ -82,4 +91,45 @@ describe Gitlab::Shell, lib: true do end end end + + describe 'projects commands' do + let(:projects_path) { 'tmp/tests/shell-projects-test/bin/gitlab-projects' } + + before do + allow(Gitlab.config.gitlab_shell).to receive(:path).and_return('tmp/tests/shell-projects-test') + allow(Gitlab.config.gitlab_shell).to receive(:git_timeout).and_return(800) + end + + describe '#fetch_remote' do + it 'returns true when the command succeeds' do + expect(Gitlab::Popen).to receive(:popen) + .with([projects_path, 'fetch-remote', 'current/storage', 'project/path.git', 'new/storage', '800']).and_return([nil, 0]) + + expect(gitlab_shell.fetch_remote('current/storage', 'project/path', 'new/storage')).to be true + end + + it 'raises an exception when the command fails' do + expect(Gitlab::Popen).to receive(:popen) + .with([projects_path, 'fetch-remote', 'current/storage', 'project/path.git', 'new/storage', '800']).and_return(["error", 1]) + + expect { gitlab_shell.fetch_remote('current/storage', 'project/path', 'new/storage') }.to raise_error(Gitlab::Shell::Error, "error") + end + end + + describe '#import_repository' do + it 'returns true when the command succeeds' do + expect(Gitlab::Popen).to receive(:popen) + .with([projects_path, 'import-project', 'current/storage', 'project/path.git', 'https://gitlab.com/gitlab-org/gitlab-ce.git', "800"]).and_return([nil, 0]) + + expect(gitlab_shell.import_repository('current/storage', 'project/path', 'https://gitlab.com/gitlab-org/gitlab-ce.git')).to be true + end + + it 'raises an exception when the command fails' do + expect(Gitlab::Popen).to receive(:popen) + .with([projects_path, 'import-project', 'current/storage', 'project/path.git', 'https://gitlab.com/gitlab-org/gitlab-ce.git', "800"]).and_return(["error", 1]) + + expect { gitlab_shell.import_repository('current/storage', 'project/path', 'https://gitlab.com/gitlab-org/gitlab-ce.git') }.to raise_error(Gitlab::Shell::Error, "error") + end + end + end end diff --git a/spec/lib/gitlab/sidekiq_status/client_middleware_spec.rb b/spec/lib/gitlab/sidekiq_status/client_middleware_spec.rb index 287bf62d9bd..6307f8c16a3 100644 --- a/spec/lib/gitlab/sidekiq_status/client_middleware_spec.rb +++ b/spec/lib/gitlab/sidekiq_status/client_middleware_spec.rb @@ -3,7 +3,7 @@ require 'spec_helper' describe Gitlab::SidekiqStatus::ClientMiddleware do describe '#call' do it 'tracks the job in Redis' do - expect(Gitlab::SidekiqStatus).to receive(:set).with('123') + expect(Gitlab::SidekiqStatus).to receive(:set).with('123', Gitlab::SidekiqStatus::DEFAULT_EXPIRATION) described_class.new. call('Foo', { 'jid' => '123' }, double(:queue), double(:pool)) { nil } diff --git a/spec/lib/gitlab/sidekiq_status_spec.rb b/spec/lib/gitlab/sidekiq_status_spec.rb index 56f06b61afb..496e50fbae4 100644 --- a/spec/lib/gitlab/sidekiq_status_spec.rb +++ b/spec/lib/gitlab/sidekiq_status_spec.rb @@ -73,4 +73,17 @@ describe Gitlab::SidekiqStatus do expect(key).to include('123') end end + + describe 'completed', :redis do + it 'returns the completed job' do + expect(described_class.completed_jids(%w(123))).to eq(['123']) + end + + it 'returns only the jobs completed' do + described_class.set('123') + described_class.set('456') + + expect(described_class.completed_jids(%w(123 456 789))).to eq(['789']) + end + end end diff --git a/spec/lib/gitlab/sidekiq_throttler_spec.rb b/spec/lib/gitlab/sidekiq_throttler_spec.rb index ff32e0e699d..6374ac80207 100644 --- a/spec/lib/gitlab/sidekiq_throttler_spec.rb +++ b/spec/lib/gitlab/sidekiq_throttler_spec.rb @@ -13,14 +13,14 @@ describe Gitlab::SidekiqThrottler do describe '#execute!' do it 'sets limits on the selected queues' do - Gitlab::SidekiqThrottler.execute! + described_class.execute! expect(Sidekiq::Queue['build'].limit).to eq 4 expect(Sidekiq::Queue['project_cache'].limit).to eq 4 end it 'does not set limits on other queues' do - Gitlab::SidekiqThrottler.execute! + described_class.execute! expect(Sidekiq::Queue['merge'].limit).to be_nil end diff --git a/spec/lib/gitlab/slash_commands/command_definition_spec.rb b/spec/lib/gitlab/slash_commands/command_definition_spec.rb index c9c2f314e57..5b9173d3d3f 100644 --- a/spec/lib/gitlab/slash_commands/command_definition_spec.rb +++ b/spec/lib/gitlab/slash_commands/command_definition_spec.rb @@ -167,6 +167,58 @@ describe Gitlab::SlashCommands::CommandDefinition do end end end + + context 'when the command defines parse_params block' do + before do + subject.parse_params_block = ->(raw) { raw.strip } + subject.action_block = ->(parsed) { self.received_arg = parsed } + end + + it 'executes the command passing the parsed param' do + subject.execute(context, {}, 'something ') + + expect(context.received_arg).to eq('something') + end + end + end + end + end + + describe '#explain' do + context 'when the command is not available' do + before do + subject.condition_block = proc { false } + subject.explanation = 'Explanation' + end + + it 'returns nil' do + result = subject.explain({}, {}, nil) + + expect(result).to be_nil + end + end + + context 'when the explanation is a static string' do + before do + subject.explanation = 'Explanation' + end + + it 'returns this static string' do + result = subject.explain({}, {}, nil) + + expect(result).to eq 'Explanation' + end + end + + context 'when the explanation is dynamic' do + before do + subject.explanation = proc { |arg| "Dynamic #{arg}" } + end + + it 'invokes the proc' do + result = subject.explain({}, {}, 'explanation') + + expect(result).to eq 'Dynamic explanation' end end end diff --git a/spec/lib/gitlab/slash_commands/dsl_spec.rb b/spec/lib/gitlab/slash_commands/dsl_spec.rb index 26217a0e3b2..33b49a5ddf9 100644 --- a/spec/lib/gitlab/slash_commands/dsl_spec.rb +++ b/spec/lib/gitlab/slash_commands/dsl_spec.rb @@ -3,7 +3,7 @@ require 'spec_helper' describe Gitlab::SlashCommands::Dsl do before :all do DummyClass = Struct.new(:project) do - include Gitlab::SlashCommands::Dsl + include Gitlab::SlashCommands::Dsl # rubocop:disable RSpec/DescribedClass desc 'A command with no args' command :no_args, :none do @@ -11,67 +11,99 @@ describe Gitlab::SlashCommands::Dsl do end params 'The first argument' - command :one_arg, :once, :first do |arg1| - arg1 + explanation 'Static explanation' + command :explanation_with_aliases, :once, :first do |arg| + arg end desc do "A dynamic description for #{noteable.upcase}" end params 'The first argument', 'The second argument' - command :two_args do |arg1, arg2| - [arg1, arg2] + command :dynamic_description do |args| + args.split end command :cc + explanation do |arg| + "Action does something with #{arg}" + end condition do project == 'foo' end command :cond_action do |arg| arg end + + parse_params do |raw_arg| + raw_arg.strip + end + command :with_params_parsing do |parsed| + parsed + end end end describe '.command_definitions' do it 'returns an array with commands definitions' do - no_args_def, one_arg_def, two_args_def, cc_def, cond_action_def = DummyClass.command_definitions + no_args_def, explanation_with_aliases_def, dynamic_description_def, + cc_def, cond_action_def, with_params_parsing_def = + DummyClass.command_definitions expect(no_args_def.name).to eq(:no_args) expect(no_args_def.aliases).to eq([:none]) expect(no_args_def.description).to eq('A command with no args') + expect(no_args_def.explanation).to eq('') expect(no_args_def.params).to eq([]) expect(no_args_def.condition_block).to be_nil expect(no_args_def.action_block).to be_a_kind_of(Proc) + expect(no_args_def.parse_params_block).to be_nil - expect(one_arg_def.name).to eq(:one_arg) - expect(one_arg_def.aliases).to eq([:once, :first]) - expect(one_arg_def.description).to eq('') - expect(one_arg_def.params).to eq(['The first argument']) - expect(one_arg_def.condition_block).to be_nil - expect(one_arg_def.action_block).to be_a_kind_of(Proc) + expect(explanation_with_aliases_def.name).to eq(:explanation_with_aliases) + expect(explanation_with_aliases_def.aliases).to eq([:once, :first]) + expect(explanation_with_aliases_def.description).to eq('') + expect(explanation_with_aliases_def.explanation).to eq('Static explanation') + expect(explanation_with_aliases_def.params).to eq(['The first argument']) + expect(explanation_with_aliases_def.condition_block).to be_nil + expect(explanation_with_aliases_def.action_block).to be_a_kind_of(Proc) + expect(explanation_with_aliases_def.parse_params_block).to be_nil - expect(two_args_def.name).to eq(:two_args) - expect(two_args_def.aliases).to eq([]) - expect(two_args_def.to_h(noteable: "issue")[:description]).to eq('A dynamic description for ISSUE') - expect(two_args_def.params).to eq(['The first argument', 'The second argument']) - expect(two_args_def.condition_block).to be_nil - expect(two_args_def.action_block).to be_a_kind_of(Proc) + expect(dynamic_description_def.name).to eq(:dynamic_description) + expect(dynamic_description_def.aliases).to eq([]) + expect(dynamic_description_def.to_h(noteable: 'issue')[:description]).to eq('A dynamic description for ISSUE') + expect(dynamic_description_def.explanation).to eq('') + expect(dynamic_description_def.params).to eq(['The first argument', 'The second argument']) + expect(dynamic_description_def.condition_block).to be_nil + expect(dynamic_description_def.action_block).to be_a_kind_of(Proc) + expect(dynamic_description_def.parse_params_block).to be_nil expect(cc_def.name).to eq(:cc) expect(cc_def.aliases).to eq([]) expect(cc_def.description).to eq('') + expect(cc_def.explanation).to eq('') expect(cc_def.params).to eq([]) expect(cc_def.condition_block).to be_nil expect(cc_def.action_block).to be_nil + expect(cc_def.parse_params_block).to be_nil expect(cond_action_def.name).to eq(:cond_action) expect(cond_action_def.aliases).to eq([]) expect(cond_action_def.description).to eq('') + expect(cond_action_def.explanation).to be_a_kind_of(Proc) expect(cond_action_def.params).to eq([]) expect(cond_action_def.condition_block).to be_a_kind_of(Proc) expect(cond_action_def.action_block).to be_a_kind_of(Proc) + expect(cond_action_def.parse_params_block).to be_nil + + expect(with_params_parsing_def.name).to eq(:with_params_parsing) + expect(with_params_parsing_def.aliases).to eq([]) + expect(with_params_parsing_def.description).to eq('') + expect(with_params_parsing_def.explanation).to eq('') + expect(with_params_parsing_def.params).to eq([]) + expect(with_params_parsing_def.condition_block).to be_nil + expect(with_params_parsing_def.action_block).to be_a_kind_of(Proc) + expect(with_params_parsing_def.parse_params_block).to be_a_kind_of(Proc) end end end diff --git a/spec/lib/gitlab/string_range_marker_spec.rb b/spec/lib/gitlab/string_range_marker_spec.rb new file mode 100644 index 00000000000..7c77772b3f6 --- /dev/null +++ b/spec/lib/gitlab/string_range_marker_spec.rb @@ -0,0 +1,36 @@ +require 'spec_helper' + +describe Gitlab::StringRangeMarker, lib: true do + describe '#mark' do + context "when the rich text is html safe" do + let(:raw) { "abc <def>" } + let(:rich) { %{<span class="abc">abc</span><span class="space"> </span><span class="def"><def></span>}.html_safe } + let(:inline_diffs) { [2..5] } + subject do + described_class.new(raw, rich).mark(inline_diffs) do |text, left:, right:| + "LEFT#{text}RIGHT" + end + end + + it 'marks the inline diffs' do + expect(subject).to eq(%{<span class="abc">abLEFTcRIGHT</span><span class="space">LEFT RIGHT</span><span class="def">LEFT<dRIGHTef></span>}) + expect(subject).to be_html_safe + end + end + + context "when the rich text is not html safe" do + let(:raw) { "abc <def>" } + let(:inline_diffs) { [2..5] } + subject do + described_class.new(raw).mark(inline_diffs) do |text, left:, right:| + "LEFT#{text}RIGHT" + end + end + + it 'marks the inline diffs' do + expect(subject).to eq(%{abLEFTc <dRIGHTef>}) + expect(subject).to be_html_safe + end + end + end +end diff --git a/spec/lib/gitlab/string_regex_marker_spec.rb b/spec/lib/gitlab/string_regex_marker_spec.rb new file mode 100644 index 00000000000..2f5cf6c6e3b --- /dev/null +++ b/spec/lib/gitlab/string_regex_marker_spec.rb @@ -0,0 +1,18 @@ +require 'spec_helper' + +describe Gitlab::StringRegexMarker, lib: true do + describe '#mark' do + let(:raw) { %{"name": "AFNetworking"} } + let(:rich) { %{<span class="key">"name"</span><span class="punctuation">: </span><span class="value">"AFNetworking"</span>}.html_safe } + subject do + described_class.new(raw, rich).mark(/"[^"]+":\s*"(?<name>[^"]+)"/, group: :name) do |text, left:, right:| + %{<a href="#">#{text}</a>} + end + end + + it 'marks the inline diffs' do + expect(subject).to eq(%{<span class="key">"name"</span><span class="punctuation">: </span><span class="value">"<a href="#">AFNetworking</a>"</span>}) + expect(subject).to be_html_safe + end + end +end diff --git a/spec/lib/gitlab/template/gitignore_template_spec.rb b/spec/lib/gitlab/template/gitignore_template_spec.rb index 9750a012e22..97797f42aaa 100644 --- a/spec/lib/gitlab/template/gitignore_template_spec.rb +++ b/spec/lib/gitlab/template/gitignore_template_spec.rb @@ -24,7 +24,7 @@ describe Gitlab::Template::GitignoreTemplate do it 'returns the Gitignore object of a valid file' do ruby = subject.find('Ruby') - expect(ruby).to be_a Gitlab::Template::GitignoreTemplate + expect(ruby).to be_a described_class expect(ruby.name).to eq('Ruby') end end diff --git a/spec/lib/gitlab/template/gitlab_ci_yml_template_spec.rb b/spec/lib/gitlab/template/gitlab_ci_yml_template_spec.rb index e3b8321eda3..6541326d1de 100644 --- a/spec/lib/gitlab/template/gitlab_ci_yml_template_spec.rb +++ b/spec/lib/gitlab/template/gitlab_ci_yml_template_spec.rb @@ -25,7 +25,7 @@ describe Gitlab::Template::GitlabCiYmlTemplate do it 'returns the GitlabCiYml object of a valid file' do ruby = subject.find('Ruby') - expect(ruby).to be_a Gitlab::Template::GitlabCiYmlTemplate + expect(ruby).to be_a described_class expect(ruby.name).to eq('Ruby') end end diff --git a/spec/lib/gitlab/template/issue_template_spec.rb b/spec/lib/gitlab/template/issue_template_spec.rb index 9213ced7b19..329d1d74970 100644 --- a/spec/lib/gitlab/template/issue_template_spec.rb +++ b/spec/lib/gitlab/template/issue_template_spec.rb @@ -37,7 +37,7 @@ describe Gitlab::Template::IssueTemplate do it 'returns the issue object of a valid file' do ruby = subject.find('bug', project) - expect(ruby).to be_a Gitlab::Template::IssueTemplate + expect(ruby).to be_a described_class expect(ruby.name).to eq('bug') end end diff --git a/spec/lib/gitlab/template/merge_request_template_spec.rb b/spec/lib/gitlab/template/merge_request_template_spec.rb index 77dd3079e22..2b0056d9bab 100644 --- a/spec/lib/gitlab/template/merge_request_template_spec.rb +++ b/spec/lib/gitlab/template/merge_request_template_spec.rb @@ -37,7 +37,7 @@ describe Gitlab::Template::MergeRequestTemplate do it 'returns the merge request object of a valid file' do ruby = subject.find('bug', project) - expect(ruby).to be_a Gitlab::Template::MergeRequestTemplate + expect(ruby).to be_a described_class expect(ruby.name).to eq('bug') end end diff --git a/spec/lib/gitlab/usage_data_spec.rb b/spec/lib/gitlab/usage_data_spec.rb new file mode 100644 index 00000000000..b47e1b56fa9 --- /dev/null +++ b/spec/lib/gitlab/usage_data_spec.rb @@ -0,0 +1,71 @@ +require 'spec_helper' + +describe Gitlab::UsageData do + let!(:project) { create(:empty_project) } + let!(:project2) { create(:empty_project) } + let!(:board) { create(:board, project: project) } + + describe '#data' do + subject { described_class.data } + + it "gathers usage data" do + expect(subject.keys).to match_array(%i( + active_user_count + counts + recorded_at + mattermost_enabled + edition + version + uuid + hostname + )) + end + + it "gathers usage counts" do + count_data = subject[:counts] + + expect(count_data[:boards]).to eq(1) + expect(count_data[:projects]).to eq(2) + + expect(count_data.keys).to match_array(%i( + boards + ci_builds + ci_pipelines + ci_runners + ci_triggers + ci_pipeline_schedules + deploy_keys + deployments + environments + groups + issues + keys + labels + lfs_objects + merge_requests + milestones + notes + projects + projects_prometheus_active + pages_domains + protected_branches + releases + snippets + todos + uploads + web_hooks + )) + end + end + + describe '#license_usage_data' do + subject { described_class.license_usage_data } + + it "gathers license data" do + expect(subject[:uuid]).to eq(current_application_settings.uuid) + expect(subject[:version]).to eq(Gitlab::VERSION) + expect(subject[:active_user_count]).to eq(User.active.count) + expect(subject[:recorded_at]).to be_a(Time) + end + end +end diff --git a/spec/lib/gitlab/user_access_spec.rb b/spec/lib/gitlab/user_access_spec.rb index 369e55f61f1..0d87cf25dbb 100644 --- a/spec/lib/gitlab/user_access_spec.rb +++ b/spec/lib/gitlab/user_access_spec.rb @@ -5,7 +5,7 @@ describe Gitlab::UserAccess, lib: true do let(:project) { create(:project) } let(:user) { create(:user) } - describe 'can_push_to_branch?' do + describe '#can_push_to_branch?' do describe 'push to none protected branch' do it 'returns true if user is a master' do project.team << [user, :master] @@ -87,10 +87,10 @@ describe Gitlab::UserAccess, lib: true do expect(access.can_push_to_branch?(branch.name)).to be_falsey end - it 'returns true if branch does not exist and user has permission to merge' do + it 'returns false if branch does not exist' do project.team << [user, :developer] - expect(access.can_push_to_branch?(not_existing_branch.name)).to be_truthy + expect(access.can_push_to_branch?(not_existing_branch.name)).to be_falsey end end @@ -142,4 +142,117 @@ describe Gitlab::UserAccess, lib: true do end end end + + describe '#can_create_tag?' do + describe 'push to none protected tag' do + it 'returns true if user is a master' do + project.add_user(user, :master) + + expect(access.can_create_tag?('random_tag')).to be_truthy + end + + it 'returns true if user is a developer' do + project.add_user(user, :developer) + + expect(access.can_create_tag?('random_tag')).to be_truthy + end + + it 'returns false if user is a reporter' do + project.add_user(user, :reporter) + + expect(access.can_create_tag?('random_tag')).to be_falsey + end + end + + describe 'push to protected tag' do + let(:tag) { create(:protected_tag, project: project, name: "test") } + let(:not_existing_tag) { create :protected_tag, project: project } + + it 'returns true if user is a master' do + project.add_user(user, :master) + + expect(access.can_create_tag?(tag.name)).to be_truthy + end + + it 'returns false if user is a developer' do + project.add_user(user, :developer) + + expect(access.can_create_tag?(tag.name)).to be_falsey + end + + it 'returns false if user is a reporter' do + project.add_user(user, :reporter) + + expect(access.can_create_tag?(tag.name)).to be_falsey + end + end + + describe 'push to protected tag if allowed for developers' do + before do + @tag = create(:protected_tag, :developers_can_create, project: project) + end + + it 'returns true if user is a master' do + project.add_user(user, :master) + + expect(access.can_create_tag?(@tag.name)).to be_truthy + end + + it 'returns true if user is a developer' do + project.add_user(user, :developer) + + expect(access.can_create_tag?(@tag.name)).to be_truthy + end + + it 'returns false if user is a reporter' do + project.add_user(user, :reporter) + + expect(access.can_create_tag?(@tag.name)).to be_falsey + end + end + end + + describe '#can_delete_branch?' do + describe 'delete unprotected branch' do + it 'returns true if user is a master' do + project.add_user(user, :master) + + expect(access.can_delete_branch?('random_branch')).to be_truthy + end + + it 'returns true if user is a developer' do + project.add_user(user, :developer) + + expect(access.can_delete_branch?('random_branch')).to be_truthy + end + + it 'returns false if user is a reporter' do + project.add_user(user, :reporter) + + expect(access.can_delete_branch?('random_branch')).to be_falsey + end + end + + describe 'delete protected branch' do + let(:branch) { create(:protected_branch, project: project, name: "test") } + + it 'returns true if user is a master' do + project.add_user(user, :master) + + expect(access.can_delete_branch?(branch.name)).to be_truthy + end + + it 'returns false if user is a developer' do + project.add_user(user, :developer) + + expect(access.can_delete_branch?(branch.name)).to be_falsey + end + + it 'returns false if user is a reporter' do + project.add_user(user, :reporter) + + expect(access.can_delete_branch?(branch.name)).to be_falsey + end + end + end end diff --git a/spec/lib/gitlab/user_activities_spec.rb b/spec/lib/gitlab/user_activities_spec.rb new file mode 100644 index 00000000000..187d88c8c58 --- /dev/null +++ b/spec/lib/gitlab/user_activities_spec.rb @@ -0,0 +1,127 @@ +require 'spec_helper' + +describe Gitlab::UserActivities, :redis, lib: true do + let(:now) { Time.now } + + describe '.record' do + context 'with no time given' do + it 'uses Time.now and records an activity in Redis' do + Timecop.freeze do + now # eager-load now + described_class.record(42) + end + + Gitlab::Redis.with do |redis| + expect(redis.hscan(described_class::KEY, 0)).to eq(['0', [['42', now.to_i.to_s]]]) + end + end + end + + context 'with a time given' do + it 'uses the given time and records an activity in Redis' do + described_class.record(42, now) + + Gitlab::Redis.with do |redis| + expect(redis.hscan(described_class::KEY, 0)).to eq(['0', [['42', now.to_i.to_s]]]) + end + end + end + end + + describe '.delete' do + context 'with a single key' do + context 'and key exists' do + it 'removes the pair from Redis' do + described_class.record(42, now) + + Gitlab::Redis.with do |redis| + expect(redis.hscan(described_class::KEY, 0)).to eq(['0', [['42', now.to_i.to_s]]]) + end + + subject.delete(42) + + Gitlab::Redis.with do |redis| + expect(redis.hscan(described_class::KEY, 0)).to eq(['0', []]) + end + end + end + + context 'and key does not exist' do + it 'removes the pair from Redis' do + Gitlab::Redis.with do |redis| + expect(redis.hscan(described_class::KEY, 0)).to eq(['0', []]) + end + + subject.delete(42) + + Gitlab::Redis.with do |redis| + expect(redis.hscan(described_class::KEY, 0)).to eq(['0', []]) + end + end + end + end + + context 'with multiple keys' do + context 'and all keys exist' do + it 'removes the pair from Redis' do + described_class.record(41, now) + described_class.record(42, now) + + Gitlab::Redis.with do |redis| + expect(redis.hscan(described_class::KEY, 0)).to eq(['0', [['41', now.to_i.to_s], ['42', now.to_i.to_s]]]) + end + + subject.delete(41, 42) + + Gitlab::Redis.with do |redis| + expect(redis.hscan(described_class::KEY, 0)).to eq(['0', []]) + end + end + end + + context 'and some keys does not exist' do + it 'removes the existing pair from Redis' do + described_class.record(42, now) + + Gitlab::Redis.with do |redis| + expect(redis.hscan(described_class::KEY, 0)).to eq(['0', [['42', now.to_i.to_s]]]) + end + + subject.delete(41, 42) + + Gitlab::Redis.with do |redis| + expect(redis.hscan(described_class::KEY, 0)).to eq(['0', []]) + end + end + end + end + end + + describe 'Enumerable' do + before do + described_class.record(40, now) + described_class.record(41, now) + described_class.record(42, now) + end + + it 'allows to read the activities sequentially' do + expected = { '40' => now.to_i.to_s, '41' => now.to_i.to_s, '42' => now.to_i.to_s } + + actual = described_class.new.each_with_object({}) do |(key, time), actual| + actual[key] = time + end + + expect(actual).to eq(expected) + end + + context 'with many records' do + before do + 1_000.times { |i| described_class.record(i, now) } + end + + it 'is possible to loop through all the records' do + expect(described_class.new.count).to eq(1_000) + end + end + end +end diff --git a/spec/lib/gitlab/workhorse_spec.rb b/spec/lib/gitlab/workhorse_spec.rb index 535c96eeee9..fdbb55fc874 100644 --- a/spec/lib/gitlab/workhorse_spec.rb +++ b/spec/lib/gitlab/workhorse_spec.rb @@ -179,19 +179,84 @@ describe Gitlab::Workhorse, lib: true do describe '.git_http_ok' do let(:user) { create(:user) } + let(:repo_path) { repository.path_to_repo } + let(:action) { 'info_refs' } + let(:params) do + { GL_ID: "user-#{user.id}", GL_REPOSITORY: "project-#{project.id}", RepoPath: repo_path } + end + + subject { described_class.git_http_ok(repository, false, user, action) } - subject { described_class.git_http_ok(repository, user) } + it { expect(subject).to include(params) } + + context 'when is_wiki' do + let(:params) do + { GL_ID: "user-#{user.id}", GL_REPOSITORY: "wiki-#{project.id}", RepoPath: repo_path } + end - it { expect(subject).to eq({ GL_ID: "user-#{user.id}", RepoPath: repository.path_to_repo }) } + subject { described_class.git_http_ok(repository, true, user, action) } + + it { expect(subject).to include(params) } + end context 'when Gitaly is enabled' do + let(:gitaly_params) do + { + GitalyAddress: Gitlab::GitalyClient.address('default') + } + end + before do allow(Gitlab.config.gitaly).to receive(:enabled).and_return(true) end - it 'includes Gitaly params in the returned value' do - gitaly_socket_path = URI(Gitlab::GitalyClient.get_address('default')).path - expect(subject).to include({ GitalySocketPath: gitaly_socket_path }) + it 'includes a Repository param' do + repo_param = { Repository: { + path: repo_path, + storage_name: 'default', + relative_path: project.full_path + '.git' + } } + + expect(subject).to include(repo_param) + end + + context "when git_upload_pack action is passed" do + let(:action) { 'git_upload_pack' } + let(:feature_flag) { :post_upload_pack } + + context 'when action is enabled by feature flag' do + it 'includes Gitaly params in the returned value' do + allow(Gitlab::GitalyClient).to receive(:feature_enabled?).with(feature_flag).and_return(true) + + expect(subject).to include(gitaly_params) + end + end + + context 'when action is not enabled by feature flag' do + it 'does not include Gitaly params in the returned value' do + allow(Gitlab::GitalyClient).to receive(:feature_enabled?).with(feature_flag).and_return(false) + + expect(subject).not_to include(gitaly_params) + end + end + end + + context "when git_receive_pack action is passed" do + let(:action) { 'git_receive_pack' } + + it { expect(subject).not_to include(gitaly_params) } + end + + context "when info_refs action is passed" do + let(:action) { 'info_refs' } + + it { expect(subject).to include(gitaly_params) } + end + + context 'when action passed is not supported by Gitaly' do + let(:action) { 'download' } + + it { expect { subject }.to raise_exception('Unsupported action: download') } end end end diff --git a/spec/lib/light_url_builder_spec.rb b/spec/lib/light_url_builder_spec.rb deleted file mode 100644 index 3fe8cf43934..00000000000 --- a/spec/lib/light_url_builder_spec.rb +++ /dev/null @@ -1,119 +0,0 @@ -require 'spec_helper' - -describe Gitlab::UrlBuilder, lib: true do - describe '.build' do - context 'when passing a Commit' do - it 'returns a proper URL' do - commit = build_stubbed(:commit) - - url = described_class.build(commit) - - expect(url).to eq "#{Settings.gitlab['url']}/#{commit.project.path_with_namespace}/commit/#{commit.id}" - end - end - - context 'when passing an Issue' do - it 'returns a proper URL' do - issue = build_stubbed(:issue, iid: 42) - - url = described_class.build(issue) - - expect(url).to eq "#{Settings.gitlab['url']}/#{issue.project.path_with_namespace}/issues/#{issue.iid}" - end - end - - context 'when passing a MergeRequest' do - it 'returns a proper URL' do - merge_request = build_stubbed(:merge_request, iid: 42) - - url = described_class.build(merge_request) - - expect(url).to eq "#{Settings.gitlab['url']}/#{merge_request.project.path_with_namespace}/merge_requests/#{merge_request.iid}" - end - end - - context 'when passing a Note' do - context 'on a Commit' do - it 'returns a proper URL' do - note = build_stubbed(:note_on_commit) - - url = described_class.build(note) - - expect(url).to eq "#{Settings.gitlab['url']}/#{note.project.path_with_namespace}/commit/#{note.commit_id}#note_#{note.id}" - end - end - - context 'on a Commit Diff' do - it 'returns a proper URL' do - note = build_stubbed(:diff_note_on_commit) - - url = described_class.build(note) - - expect(url).to eq "#{Settings.gitlab['url']}/#{note.project.path_with_namespace}/commit/#{note.commit_id}#note_#{note.id}" - end - end - - context 'on an Issue' do - it 'returns a proper URL' do - issue = create(:issue, iid: 42) - note = build_stubbed(:note_on_issue, noteable: issue) - - url = described_class.build(note) - - expect(url).to eq "#{Settings.gitlab['url']}/#{issue.project.path_with_namespace}/issues/#{issue.iid}#note_#{note.id}" - end - end - - context 'on a MergeRequest' do - it 'returns a proper URL' do - merge_request = create(:merge_request, iid: 42) - note = build_stubbed(:note_on_merge_request, noteable: merge_request) - - url = described_class.build(note) - - expect(url).to eq "#{Settings.gitlab['url']}/#{merge_request.project.path_with_namespace}/merge_requests/#{merge_request.iid}#note_#{note.id}" - end - end - - context 'on a MergeRequest Diff' do - it 'returns a proper URL' do - merge_request = create(:merge_request, iid: 42) - note = build_stubbed(:diff_note_on_merge_request, noteable: merge_request) - - url = described_class.build(note) - - expect(url).to eq "#{Settings.gitlab['url']}/#{merge_request.project.path_with_namespace}/merge_requests/#{merge_request.iid}#note_#{note.id}" - end - end - - context 'on a ProjectSnippet' do - it 'returns a proper URL' do - project_snippet = create(:project_snippet) - note = build_stubbed(:note_on_project_snippet, noteable: project_snippet) - - url = described_class.build(note) - - expect(url).to eq "#{Settings.gitlab['url']}/#{project_snippet.project.path_with_namespace}/snippets/#{note.noteable_id}#note_#{note.id}" - end - end - - context 'on another object' do - it 'returns a proper URL' do - project = build_stubbed(:empty_project) - - expect { described_class.build(project) }. - to raise_error(NotImplementedError, 'No URL builder defined for Project') - end - end - end - - context 'when passing a WikiPage' do - it 'returns a proper URL' do - wiki_page = build(:wiki_page) - url = described_class.build(wiki_page) - - expect(url).to eq "#{Gitlab.config.gitlab.url}#{wiki_page.wiki.wiki_base_path}/#{wiki_page.slug}" - end - end - end -end diff --git a/spec/lib/microsoft_teams/activity_spec.rb b/spec/lib/microsoft_teams/activity_spec.rb new file mode 100644 index 00000000000..7890ae2e7b0 --- /dev/null +++ b/spec/lib/microsoft_teams/activity_spec.rb @@ -0,0 +1,16 @@ +require 'spec_helper' + +describe MicrosoftTeams::Activity do + subject { described_class.new(title: 'title', subtitle: 'subtitle', text: 'text', image: 'image') } + + describe '#prepare' do + it 'returns the correct JSON object' do + expect(subject.prepare).to eq({ + 'activityTitle' => 'title', + 'activitySubtitle' => 'subtitle', + 'activityText' => 'text', + 'activityImage' => 'image' + }) + end + end +end diff --git a/spec/lib/microsoft_teams/notifier_spec.rb b/spec/lib/microsoft_teams/notifier_spec.rb new file mode 100644 index 00000000000..3035693812f --- /dev/null +++ b/spec/lib/microsoft_teams/notifier_spec.rb @@ -0,0 +1,55 @@ +require 'spec_helper' + +describe MicrosoftTeams::Notifier do + subject { described_class.new(webhook_url) } + + let(:webhook_url) { 'https://example.gitlab.com/'} + let(:header) { { 'Content-Type' => 'application/json' } } + let(:options) do + { + title: 'JohnDoe4/project2', + pretext: '[[JohnDoe4/project2](http://localhost/namespace2/gitlabhq)] Issue [#1 Awesome issue](http://localhost/namespace2/gitlabhq/issues/1) opened by user6', + activity: { + title: 'Issue opened by user6', + subtitle: 'in [JohnDoe4/project2](http://localhost/namespace2/gitlabhq)', + text: '[#1 Awesome issue](http://localhost/namespace2/gitlabhq/issues/1)', + image: 'http://someimage.com' + }, + attachments: 'please fix' + } + end + + let(:body) do + { + 'sections' => [ + { + 'activityTitle' => 'Issue opened by user6', + 'activitySubtitle' => 'in [JohnDoe4/project2](http://localhost/namespace2/gitlabhq)', + 'activityText' => '[#1 Awesome issue](http://localhost/namespace2/gitlabhq/issues/1)', + 'activityImage' => 'http://someimage.com' + }, + { + 'title' => 'Details', + 'facts' => [ + { + 'name' => 'Attachments', + 'value' => 'please fix' + } + ] + } + ], + 'title' => 'JohnDoe4/project2', + 'summary' => '[[JohnDoe4/project2](http://localhost/namespace2/gitlabhq)] Issue [#1 Awesome issue](http://localhost/namespace2/gitlabhq/issues/1) opened by user6' + } + end + + describe '#ping' do + before do + stub_request(:post, webhook_url).with(body: JSON(body), headers: { 'Content-Type' => 'application/json' }).to_return(status: 200, body: "", headers: {}) + end + + it 'expects to receive successfull answer' do + expect(subject.ping(options)).to be true + end + end +end diff --git a/spec/mailers/emails/merge_requests_spec.rb b/spec/mailers/emails/merge_requests_spec.rb index e22858d1d8f..2ad572bb5c7 100644 --- a/spec/mailers/emails/merge_requests_spec.rb +++ b/spec/mailers/emails/merge_requests_spec.rb @@ -1,7 +1,7 @@ require 'spec_helper' require 'email_spec' -describe Notify, "merge request notifications" do +describe Emails::MergeRequests do include EmailSpec::Matchers describe "#resolved_all_discussions_email" do diff --git a/spec/mailers/emails/profile_spec.rb b/spec/mailers/emails/profile_spec.rb index 5ca936f28f0..8c1c9bf135f 100644 --- a/spec/mailers/emails/profile_spec.rb +++ b/spec/mailers/emails/profile_spec.rb @@ -1,7 +1,7 @@ require 'spec_helper' require 'email_spec' -describe Notify do +describe Emails::Profile do include EmailSpec::Matchers include_context 'gitlab email notification' @@ -15,106 +15,104 @@ describe Notify do end end - describe 'profile notifications' do - describe 'for new users, the email' do - let(:example_site_path) { root_path } - let(:new_user) { create(:user, email: new_user_address, created_by_id: 1) } - let(:token) { 'kETLwRaayvigPq_x3SNM' } + describe 'for new users, the email' do + let(:example_site_path) { root_path } + let(:new_user) { create(:user, email: new_user_address, created_by_id: 1) } + let(:token) { 'kETLwRaayvigPq_x3SNM' } - subject { Notify.new_user_email(new_user.id, token) } + subject { Notify.new_user_email(new_user.id, token) } - it_behaves_like 'an email sent from GitLab' - it_behaves_like 'a new user email' - it_behaves_like 'it should not have Gmail Actions links' - it_behaves_like 'a user cannot unsubscribe through footer link' + it_behaves_like 'an email sent from GitLab' + it_behaves_like 'a new user email' + it_behaves_like 'it should not have Gmail Actions links' + it_behaves_like 'a user cannot unsubscribe through footer link' - it 'contains the password text' do - is_expected.to have_body_text /Click here to set your password/ - end + it 'contains the password text' do + is_expected.to have_body_text /Click here to set your password/ + end - it 'includes a link for user to set password' do - params = "reset_password_token=#{token}" - is_expected.to have_body_text( - %r{http://#{Gitlab.config.gitlab.host}(:\d+)?/users/password/edit\?#{params}} - ) - end + it 'includes a link for user to set password' do + params = "reset_password_token=#{token}" + is_expected.to have_body_text( + %r{http://#{Gitlab.config.gitlab.host}(:\d+)?/users/password/edit\?#{params}} + ) + end - it 'explains the reset link expiration' do - is_expected.to have_body_text(/This link is valid for \d+ (hours?|days?)/) - is_expected.to have_body_text(new_user_password_url) - is_expected.to have_body_text(/\?user_email=.*%40.*/) - end + it 'explains the reset link expiration' do + is_expected.to have_body_text(/This link is valid for \d+ (hours?|days?)/) + is_expected.to have_body_text(new_user_password_url) + is_expected.to have_body_text(/\?user_email=.*%40.*/) end + end - describe 'for users that signed up, the email' do - let(:example_site_path) { root_path } - let(:new_user) { create(:user, email: new_user_address, password: "securePassword") } + describe 'for users that signed up, the email' do + let(:example_site_path) { root_path } + let(:new_user) { create(:user, email: new_user_address, password: "securePassword") } - subject { Notify.new_user_email(new_user.id) } + subject { Notify.new_user_email(new_user.id) } - it_behaves_like 'an email sent from GitLab' - it_behaves_like 'a new user email' - it_behaves_like 'it should not have Gmail Actions links' - it_behaves_like 'a user cannot unsubscribe through footer link' + it_behaves_like 'an email sent from GitLab' + it_behaves_like 'a new user email' + it_behaves_like 'it should not have Gmail Actions links' + it_behaves_like 'a user cannot unsubscribe through footer link' - it 'does not contain the new user\'s password' do - is_expected.not_to have_body_text /password/ - end + it 'does not contain the new user\'s password' do + is_expected.not_to have_body_text /password/ end + end - describe 'user added ssh key' do - let(:key) { create(:personal_key) } + describe 'user added ssh key' do + let(:key) { create(:personal_key) } - subject { Notify.new_ssh_key_email(key.id) } + subject { Notify.new_ssh_key_email(key.id) } - it_behaves_like 'an email sent from GitLab' - it_behaves_like 'it should not have Gmail Actions links' - it_behaves_like 'a user cannot unsubscribe through footer link' + it_behaves_like 'an email sent from GitLab' + it_behaves_like 'it should not have Gmail Actions links' + it_behaves_like 'a user cannot unsubscribe through footer link' - it 'is sent to the new user' do - is_expected.to deliver_to key.user.email - end + it 'is sent to the new user' do + is_expected.to deliver_to key.user.email + end - it 'has the correct subject' do - is_expected.to have_subject /^SSH key was added to your account$/i - end + it 'has the correct subject' do + is_expected.to have_subject /^SSH key was added to your account$/i + end - it 'contains the new ssh key title' do - is_expected.to have_body_text /#{key.title}/ - end + it 'contains the new ssh key title' do + is_expected.to have_body_text /#{key.title}/ + end - it 'includes a link to ssh keys page' do - is_expected.to have_body_text /#{profile_keys_path}/ - end + it 'includes a link to ssh keys page' do + is_expected.to have_body_text /#{profile_keys_path}/ + end - context 'with SSH key that does not exist' do - it { expect { Notify.new_ssh_key_email('foo') }.not_to raise_error } - end + context 'with SSH key that does not exist' do + it { expect { Notify.new_ssh_key_email('foo') }.not_to raise_error } end + end - describe 'user added email' do - let(:email) { create(:email) } + describe 'user added email' do + let(:email) { create(:email) } - subject { Notify.new_email_email(email.id) } + subject { Notify.new_email_email(email.id) } - it_behaves_like 'it should not have Gmail Actions links' - it_behaves_like 'a user cannot unsubscribe through footer link' + it_behaves_like 'it should not have Gmail Actions links' + it_behaves_like 'a user cannot unsubscribe through footer link' - it 'is sent to the new user' do - is_expected.to deliver_to email.user.email - end + it 'is sent to the new user' do + is_expected.to deliver_to email.user.email + end - it 'has the correct subject' do - is_expected.to have_subject /^Email was added to your account$/i - end + it 'has the correct subject' do + is_expected.to have_subject /^Email was added to your account$/i + end - it 'contains the new email address' do - is_expected.to have_body_text /#{email.email}/ - end + it 'contains the new email address' do + is_expected.to have_body_text /#{email.email}/ + end - it 'includes a link to emails page' do - is_expected.to have_body_text /#{profile_emails_path}/ - end + it 'includes a link to emails page' do + is_expected.to have_body_text /#{profile_emails_path}/ end end end diff --git a/spec/mailers/notify_spec.rb b/spec/mailers/notify_spec.rb index f60c5ffb32a..1e6260270fe 100644 --- a/spec/mailers/notify_spec.rb +++ b/spec/mailers/notify_spec.rb @@ -36,11 +36,11 @@ describe Notify do end context 'for issues' do - let(:issue) { create(:issue, author: current_user, assignee: assignee, project: project) } - let(:issue_with_description) { create(:issue, author: current_user, assignee: assignee, project: project, description: FFaker::Lorem.sentence) } + let(:issue) { create(:issue, author: current_user, assignees: [assignee], project: project) } + let(:issue_with_description) { create(:issue, author: current_user, assignees: [assignee], project: project, description: 'My awesome description') } describe 'that are new' do - subject { Notify.new_issue_email(issue.assignee_id, issue.id) } + subject { described_class.new_issue_email(issue.assignees.first.id, issue.id) } it_behaves_like 'an assignee email' it_behaves_like 'an email starting a new thread with reply-by-email enabled' do @@ -63,13 +63,13 @@ describe Notify do it 'contains a link to note author' do is_expected.to have_html_escaped_body_text(issue.author_name) - is_expected.to have_body_text 'wrote:' + is_expected.to have_body_text 'created an issue:' end end end describe 'that are new with a description' do - subject { Notify.new_issue_email(issue_with_description.assignee_id, issue_with_description.id) } + subject { described_class.new_issue_email(issue_with_description.assignees.first.id, issue_with_description.id) } it_behaves_like 'it should show Gmail Actions View Issue link' @@ -79,7 +79,7 @@ describe Notify do end describe 'that have been reassigned' do - subject { Notify.reassigned_issue_email(recipient.id, issue.id, previous_assignee.id, current_user.id) } + subject { described_class.reassigned_issue_email(recipient.id, issue.id, [previous_assignee.id], current_user.id) } it_behaves_like 'a multiple recipients email' it_behaves_like 'an answer to an existing thread with reply-by-email enabled' do @@ -105,7 +105,7 @@ describe Notify do end describe 'that have been relabeled' do - subject { Notify.relabeled_issue_email(recipient.id, issue.id, %w[foo bar baz], current_user.id) } + subject { described_class.relabeled_issue_email(recipient.id, issue.id, %w[foo bar baz], current_user.id) } it_behaves_like 'a multiple recipients email' it_behaves_like 'an answer to an existing thread with reply-by-email enabled' do @@ -132,7 +132,7 @@ describe Notify do describe 'status changed' do let(:status) { 'closed' } - subject { Notify.issue_status_changed_email(recipient.id, issue.id, status, current_user.id) } + subject { described_class.issue_status_changed_email(recipient.id, issue.id, status, current_user.id) } it_behaves_like 'an answer to an existing thread with reply-by-email enabled' do let(:model) { issue } @@ -158,7 +158,7 @@ describe Notify do describe 'moved to another project' do let(:new_issue) { create(:issue) } - subject { Notify.issue_moved_email(recipient, issue, new_issue, current_user) } + subject { described_class.issue_moved_email(recipient, issue, new_issue, current_user) } it_behaves_like 'an answer to an existing thread with reply-by-email enabled' do let(:model) { issue } @@ -187,10 +187,10 @@ describe Notify do let(:project) { create(:project, :repository) } let(:merge_author) { create(:user) } let(:merge_request) { create(:merge_request, author: current_user, assignee: assignee, source_project: project, target_project: project) } - let(:merge_request_with_description) { create(:merge_request, author: current_user, assignee: assignee, source_project: project, target_project: project, description: FFaker::Lorem.sentence) } + let(:merge_request_with_description) { create(:merge_request, author: current_user, assignee: assignee, source_project: project, target_project: project, description: 'My awesome description') } describe 'that are new' do - subject { Notify.new_merge_request_email(merge_request.assignee_id, merge_request.id) } + subject { described_class.new_merge_request_email(merge_request.assignee_id, merge_request.id) } it_behaves_like 'an assignee email' it_behaves_like 'an email starting a new thread with reply-by-email enabled' do @@ -215,13 +215,13 @@ describe Notify do it 'contains a link to note author' do is_expected.to have_html_escaped_body_text merge_request.author_name - is_expected.to have_body_text 'wrote:' + is_expected.to have_body_text 'created a merge request:' end end end describe 'that are new with a description' do - subject { Notify.new_merge_request_email(merge_request_with_description.assignee_id, merge_request_with_description.id) } + subject { described_class.new_merge_request_email(merge_request_with_description.assignee_id, merge_request_with_description.id) } it_behaves_like 'it should show Gmail Actions View Merge request link' it_behaves_like "an unsubscribeable thread" @@ -232,7 +232,7 @@ describe Notify do end describe 'that are reassigned' do - subject { Notify.reassigned_merge_request_email(recipient.id, merge_request.id, previous_assignee.id, current_user.id) } + subject { described_class.reassigned_merge_request_email(recipient.id, merge_request.id, previous_assignee.id, current_user.id) } it_behaves_like 'a multiple recipients email' it_behaves_like 'an answer to an existing thread with reply-by-email enabled' do @@ -258,7 +258,7 @@ describe Notify do end describe 'that have been relabeled' do - subject { Notify.relabeled_merge_request_email(recipient.id, merge_request.id, %w[foo bar baz], current_user.id) } + subject { described_class.relabeled_merge_request_email(recipient.id, merge_request.id, %w[foo bar baz], current_user.id) } it_behaves_like 'a multiple recipients email' it_behaves_like 'an answer to an existing thread with reply-by-email enabled' do @@ -283,7 +283,7 @@ describe Notify do describe 'status changed' do let(:status) { 'reopened' } - subject { Notify.merge_request_status_email(recipient.id, merge_request.id, status, current_user.id) } + subject { described_class.merge_request_status_email(recipient.id, merge_request.id, status, current_user.id) } it_behaves_like 'an answer to an existing thread with reply-by-email enabled' do let(:model) { merge_request } @@ -308,7 +308,7 @@ describe Notify do end describe 'that are merged' do - subject { Notify.merged_merge_request_email(recipient.id, merge_request.id, merge_author.id) } + subject { described_class.merged_merge_request_email(recipient.id, merge_request.id, merge_author.id) } it_behaves_like 'a multiple recipients email' it_behaves_like 'an answer to an existing thread with reply-by-email enabled' do @@ -337,7 +337,7 @@ describe Notify do describe 'project was moved' do let(:project) { create(:empty_project) } let(:user) { create(:user) } - subject { Notify.project_was_moved_email(project.id, user.id, "gitlab/gitlab") } + subject { described_class.project_was_moved_email(project.id, user.id, "gitlab/gitlab") } it_behaves_like 'an email sent from GitLab' it_behaves_like 'it should not have Gmail Actions links' @@ -363,7 +363,7 @@ describe Notify do project.request_access(user) project.requesters.find_by(user_id: user.id) end - subject { Notify.member_access_requested_email('project', project_member.id) } + subject { described_class.member_access_requested_email('project', project_member.id) } it_behaves_like 'an email sent from GitLab' it_behaves_like 'it should not have Gmail Actions links' @@ -390,7 +390,7 @@ describe Notify do project.request_access(user) project.requesters.find_by(user_id: user.id) end - subject { Notify.member_access_requested_email('project', project_member.id) } + subject { described_class.member_access_requested_email('project', project_member.id) } it_behaves_like 'an email sent from GitLab' it_behaves_like 'it should not have Gmail Actions links' @@ -416,7 +416,7 @@ describe Notify do project.request_access(user) project.requesters.find_by(user_id: user.id) end - subject { Notify.member_access_denied_email('project', project.id, user.id) } + subject { described_class.member_access_denied_email('project', project.id, user.id) } it_behaves_like 'an email sent from GitLab' it_behaves_like 'it should not have Gmail Actions links' @@ -434,7 +434,7 @@ describe Notify do let(:project) { create(:empty_project, :public, :access_requestable, namespace: owner.namespace) } let(:user) { create(:user) } let(:project_member) { create(:project_member, project: project, user: user) } - subject { Notify.member_access_granted_email('project', project_member.id) } + subject { described_class.member_access_granted_email('project', project_member.id) } it_behaves_like 'an email sent from GitLab' it_behaves_like 'it should not have Gmail Actions links' @@ -465,7 +465,7 @@ describe Notify do let(:master) { create(:user).tap { |u| project.team << [u, :master] } } let(:project_member) { invite_to_project(project, inviter: master) } - subject { Notify.member_invited_email('project', project_member.id, project_member.invite_token) } + subject { described_class.member_invited_email('project', project_member.id, project_member.invite_token) } it_behaves_like 'an email sent from GitLab' it_behaves_like 'it should not have Gmail Actions links' @@ -490,7 +490,7 @@ describe Notify do invitee end - subject { Notify.member_invite_accepted_email('project', project_member.id) } + subject { described_class.member_invite_accepted_email('project', project_member.id) } it_behaves_like 'an email sent from GitLab' it_behaves_like 'it should not have Gmail Actions links' @@ -514,7 +514,7 @@ describe Notify do invitee end - subject { Notify.member_invite_declined_email('project', project.id, project_member.invite_email, master.id) } + subject { described_class.member_invite_declined_email('project', project.id, project_member.invite_email, master.id) } it_behaves_like 'an email sent from GitLab' it_behaves_like 'it should not have Gmail Actions links' @@ -554,7 +554,7 @@ describe Notify do end it 'does not contain note author' do - is_expected.not_to have_body_text 'wrote:' + is_expected.not_to have_body_text note.author_name end context 'when enabled email_author_in_body' do @@ -564,7 +564,6 @@ describe Notify do it 'contains a link to note author' do is_expected.to have_html_escaped_body_text note.author_name - is_expected.to have_body_text 'wrote:' end end end @@ -575,7 +574,7 @@ describe Notify do before(:each) { allow(note).to receive(:noteable).and_return(commit) } - subject { Notify.note_commit_email(recipient.id, note.id) } + subject { described_class.note_commit_email(recipient.id, note.id) } it_behaves_like 'a note email' it_behaves_like 'an answer to an existing thread with reply-by-email enabled' do @@ -597,7 +596,7 @@ describe Notify do let(:note_on_merge_request_path) { namespace_project_merge_request_path(project.namespace, project, merge_request, anchor: "note_#{note.id}") } before(:each) { allow(note).to receive(:noteable).and_return(merge_request) } - subject { Notify.note_merge_request_email(recipient.id, note.id) } + subject { described_class.note_merge_request_email(recipient.id, note.id) } it_behaves_like 'a note email' it_behaves_like 'an answer to an existing thread with reply-by-email enabled' do @@ -619,7 +618,7 @@ describe Notify do let(:note_on_issue_path) { namespace_project_issue_path(project.namespace, project, issue, anchor: "note_#{note.id}") } before(:each) { allow(note).to receive(:noteable).and_return(issue) } - subject { Notify.note_issue_email(recipient.id, note.id) } + subject { described_class.note_issue_email(recipient.id, note.id) } it_behaves_like 'a note email' it_behaves_like 'an answer to an existing thread with reply-by-email enabled' do @@ -637,7 +636,7 @@ describe Notify do end end - context 'items that are noteable, emails for a note on a diff' do + context 'items that are noteable, the email for a discussion note' do let(:project) { create(:project, :repository) } let(:note_author) { create(:user, name: 'author_name') } @@ -645,8 +644,118 @@ describe Notify do allow(Note).to receive(:find).with(note.id).and_return(note) end - shared_examples 'a note email on a diff' do |model| - let(:note) { create(model, project: project, author: note_author) } + shared_examples 'a discussion note email' do |model| + it_behaves_like 'it should have Gmail Actions links' + + it 'is sent to the given recipient as the author' do + sender = subject.header[:from].addrs[0] + + aggregate_failures do + expect(sender.display_name).to eq(note_author.name) + expect(sender.address).to eq(gitlab_sender) + expect(subject).to deliver_to(recipient.notification_email) + end + end + + it 'contains the message from the note' do + is_expected.to have_body_text note.note + end + + it 'contains an introduction' do + is_expected.to have_body_text 'started a new discussion' + end + + context 'when a comment on an existing discussion' do + let!(:second_note) { create(model, author: note_author, noteable: nil, in_reply_to: note) } + + it 'contains an introduction' do + is_expected.to have_body_text 'commented on a' + end + end + end + + describe 'on a commit' do + let(:commit) { project.commit } + let(:note) { create(:discussion_note_on_commit, commit_id: commit.id, project: project, author: note_author) } + + before(:each) { allow(note).to receive(:noteable).and_return(commit) } + + subject { described_class.note_commit_email(recipient.id, note.id) } + + it_behaves_like 'a discussion note email', :discussion_note_on_commit + it_behaves_like 'an answer to an existing thread with reply-by-email enabled' do + let(:model) { commit } + end + it_behaves_like 'it should show Gmail Actions View Commit link' + it_behaves_like 'a user cannot unsubscribe through footer link' + + it 'has the correct subject' do + is_expected.to have_subject "Re: #{project.name} | #{commit.title.strip} (#{commit.short_id})" + end + + it 'contains a link to the commit' do + is_expected.to have_body_text commit.short_id + end + end + + describe 'on a merge request' do + let(:merge_request) { create(:merge_request, source_project: project, target_project: project) } + let(:note) { create(:discussion_note_on_merge_request, noteable: merge_request, project: project, author: note_author) } + let(:note_on_merge_request_path) { namespace_project_merge_request_path(project.namespace, project, merge_request, anchor: "note_#{note.id}") } + before(:each) { allow(note).to receive(:noteable).and_return(merge_request) } + + subject { described_class.note_merge_request_email(recipient.id, note.id) } + + it_behaves_like 'a discussion note email', :discussion_note_on_merge_request + it_behaves_like 'an answer to an existing thread with reply-by-email enabled' do + let(:model) { merge_request } + end + it_behaves_like 'it should show Gmail Actions View Merge request link' + it_behaves_like 'an unsubscribeable thread' + + it 'has the correct subject' do + is_expected.to have_referable_subject(merge_request, reply: true) + end + + it 'contains a link to the merge request note' do + is_expected.to have_body_text note_on_merge_request_path + end + end + + describe 'on an issue' do + let(:issue) { create(:issue, project: project) } + let(:note) { create(:discussion_note_on_issue, noteable: issue, project: project, author: note_author) } + let(:note_on_issue_path) { namespace_project_issue_path(project.namespace, project, issue, anchor: "note_#{note.id}") } + before(:each) { allow(note).to receive(:noteable).and_return(issue) } + + subject { described_class.note_issue_email(recipient.id, note.id) } + + it_behaves_like 'a discussion note email', :discussion_note_on_issue + it_behaves_like 'an answer to an existing thread with reply-by-email enabled' do + let(:model) { issue } + end + it_behaves_like 'it should show Gmail Actions View Issue link' + it_behaves_like 'an unsubscribeable thread' + + it 'has the correct subject' do + is_expected.to have_referable_subject(issue, reply: true) + end + + it 'contains a link to the issue note' do + is_expected.to have_body_text note_on_issue_path + end + end + end + + context 'items that are noteable, the email for a diff discussion note' do + let(:note_author) { create(:user, name: 'author_name') } + + before :each do + allow(Note).to receive(:find).with(note.id).and_return(note) + end + + shared_examples 'an email for a note on a diff discussion' do |model| + let(:note) { create(model, author: note_author) } it "includes diffs with character-level highlighting" do is_expected.to have_body_text '<span class="p">}</span></span>' @@ -672,18 +781,15 @@ describe Notify do is_expected.to have_html_escaped_body_text note.note end - it 'does not contain note author' do - is_expected.not_to have_body_text 'wrote:' + it 'contains an introduction' do + is_expected.to have_body_text 'started a new discussion on' end - context 'when enabled email_author_in_body' do - before do - stub_application_setting(email_author_in_body: true) - end + context 'when a comment on an existing discussion' do + let!(:second_note) { create(model, author: note_author, noteable: nil, in_reply_to: note) } - it 'contains a link to note author' do - is_expected.to have_html_escaped_body_text note.author_name - is_expected.to have_body_text 'wrote:' + it 'contains an introduction' do + is_expected.to have_body_text 'commented on a discussion on' end end end @@ -692,9 +798,9 @@ describe Notify do let(:commit) { project.commit } let(:note) { create(:diff_note_on_commit) } - subject { Notify.note_commit_email(recipient.id, note.id) } + subject { described_class.note_commit_email(recipient.id, note.id) } - it_behaves_like 'a note email on a diff', :diff_note_on_commit + it_behaves_like 'an email for a note on a diff discussion', :diff_note_on_commit it_behaves_like 'it should show Gmail Actions View Commit link' it_behaves_like 'a user cannot unsubscribe through footer link' end @@ -703,9 +809,9 @@ describe Notify do let(:merge_request) { create(:merge_request, source_project: project, target_project: project) } let(:note) { create(:diff_note_on_merge_request) } - subject { Notify.note_merge_request_email(recipient.id, note.id) } + subject { described_class.note_merge_request_email(recipient.id, note.id) } - it_behaves_like 'a note email on a diff', :diff_note_on_merge_request + it_behaves_like 'an email for a note on a diff discussion', :diff_note_on_merge_request it_behaves_like 'it should show Gmail Actions View Merge request link' it_behaves_like 'an unsubscribeable thread' end @@ -720,7 +826,7 @@ describe Notify do group.request_access(user) group.requesters.find_by(user_id: user.id) end - subject { Notify.member_access_requested_email('group', group_member.id) } + subject { described_class.member_access_requested_email('group', group_member.id) } it_behaves_like 'an email sent from GitLab' it_behaves_like 'it should not have Gmail Actions links' @@ -741,7 +847,7 @@ describe Notify do group.request_access(user) group.requesters.find_by(user_id: user.id) end - subject { Notify.member_access_denied_email('group', group.id, user.id) } + subject { described_class.member_access_denied_email('group', group.id, user.id) } it_behaves_like 'an email sent from GitLab' it_behaves_like 'it should not have Gmail Actions links' @@ -759,7 +865,7 @@ describe Notify do let(:user) { create(:user) } let(:group_member) { create(:group_member, group: group, user: user) } - subject { Notify.member_access_granted_email('group', group_member.id) } + subject { described_class.member_access_granted_email('group', group_member.id) } it_behaves_like 'an email sent from GitLab' it_behaves_like 'it should not have Gmail Actions links' @@ -790,7 +896,7 @@ describe Notify do let(:owner) { create(:user).tap { |u| group.add_user(u, Gitlab::Access::OWNER) } } let(:group_member) { invite_to_group(group, inviter: owner) } - subject { Notify.member_invited_email('group', group_member.id, group_member.invite_token) } + subject { described_class.member_invited_email('group', group_member.id, group_member.invite_token) } it_behaves_like 'an email sent from GitLab' it_behaves_like 'it should not have Gmail Actions links' @@ -815,7 +921,7 @@ describe Notify do invitee end - subject { Notify.member_invite_accepted_email('group', group_member.id) } + subject { described_class.member_invite_accepted_email('group', group_member.id) } it_behaves_like 'an email sent from GitLab' it_behaves_like 'it should not have Gmail Actions links' @@ -839,7 +945,7 @@ describe Notify do invitee end - subject { Notify.member_invite_declined_email('group', group.id, group_member.invite_email, owner.id) } + subject { described_class.member_invite_declined_email('group', group.id, group_member.invite_email, owner.id) } it_behaves_like 'an email sent from GitLab' it_behaves_like 'it should not have Gmail Actions links' @@ -888,7 +994,7 @@ describe Notify do let(:user) { create(:user) } let(:tree_path) { namespace_project_tree_path(project.namespace, project, "empty-branch") } - subject { Notify.repository_push_email(project.id, author_id: user.id, ref: 'refs/heads/empty-branch', action: :create) } + subject { described_class.repository_push_email(project.id, author_id: user.id, ref: 'refs/heads/empty-branch', action: :create) } it_behaves_like 'it should not have Gmail Actions links' it_behaves_like 'a user cannot unsubscribe through footer link' @@ -914,7 +1020,7 @@ describe Notify do let(:user) { create(:user) } let(:tree_path) { namespace_project_tree_path(project.namespace, project, "v1.0") } - subject { Notify.repository_push_email(project.id, author_id: user.id, ref: 'refs/tags/v1.0', action: :create) } + subject { described_class.repository_push_email(project.id, author_id: user.id, ref: 'refs/tags/v1.0', action: :create) } it_behaves_like 'it should not have Gmail Actions links' it_behaves_like "a user cannot unsubscribe through footer link" @@ -939,7 +1045,7 @@ describe Notify do let(:example_site_path) { root_path } let(:user) { create(:user) } - subject { Notify.repository_push_email(project.id, author_id: user.id, ref: 'refs/heads/master', action: :delete) } + subject { described_class.repository_push_email(project.id, author_id: user.id, ref: 'refs/heads/master', action: :delete) } it_behaves_like 'it should not have Gmail Actions links' it_behaves_like 'a user cannot unsubscribe through footer link' @@ -961,7 +1067,7 @@ describe Notify do let(:example_site_path) { root_path } let(:user) { create(:user) } - subject { Notify.repository_push_email(project.id, author_id: user.id, ref: 'refs/tags/v1.0', action: :delete) } + subject { described_class.repository_push_email(project.id, author_id: user.id, ref: 'refs/tags/v1.0', action: :delete) } it_behaves_like 'it should not have Gmail Actions links' it_behaves_like 'a user cannot unsubscribe through footer link' @@ -990,7 +1096,7 @@ describe Notify do let(:send_from_committer_email) { false } let(:diff_refs) { Gitlab::Diff::DiffRefs.new(base_sha: project.merge_base_commit(sample_image_commit.id, sample_commit.id).id, head_sha: sample_commit.id) } - subject { Notify.repository_push_email(project.id, author_id: user.id, ref: 'refs/heads/master', action: :push, compare: compare, reverse_compare: false, diff_refs: diff_refs, send_from_committer_email: send_from_committer_email) } + subject { described_class.repository_push_email(project.id, author_id: user.id, ref: 'refs/heads/master', action: :push, compare: compare, reverse_compare: false, diff_refs: diff_refs, send_from_committer_email: send_from_committer_email) } it_behaves_like 'it should not have Gmail Actions links' it_behaves_like 'a user cannot unsubscribe through footer link' @@ -1083,7 +1189,7 @@ describe Notify do let(:diff_path) { namespace_project_commit_path(project.namespace, project, commits.first) } let(:diff_refs) { Gitlab::Diff::DiffRefs.new(base_sha: project.merge_base_commit(sample_image_commit.id, sample_commit.id).id, head_sha: sample_commit.id) } - subject { Notify.repository_push_email(project.id, author_id: user.id, ref: 'refs/heads/master', action: :push, compare: compare, diff_refs: diff_refs) } + subject { described_class.repository_push_email(project.id, author_id: user.id, ref: 'refs/heads/master', action: :push, compare: compare, diff_refs: diff_refs) } it_behaves_like 'it should show Gmail Actions View Commit link' it_behaves_like 'a user cannot unsubscribe through footer link' @@ -1109,7 +1215,7 @@ describe Notify do describe 'HTML emails setting' do let(:project) { create(:empty_project) } let(:user) { create(:user) } - let(:multipart_mail) { Notify.project_was_moved_email(project.id, user.id, "gitlab/gitlab") } + let(:multipart_mail) { described_class.project_was_moved_email(project.id, user.id, "gitlab/gitlab") } context 'when disabled' do it 'only sends the text template' do diff --git a/spec/mailers/previews/notify_preview.rb b/spec/mailers/previews/notify_preview.rb new file mode 100644 index 00000000000..580f0d56a92 --- /dev/null +++ b/spec/mailers/previews/notify_preview.rb @@ -0,0 +1,107 @@ +class NotifyPreview < ActionMailer::Preview + def note_merge_request_email_for_individual_note + note_email(:note_merge_request_email) do + note = <<-MD.strip_heredoc + This is an individual note on a merge request :smiley: + + In this notification email, we expect to see: + + - The note contents (that's what you're looking at) + - A link to view this note on Gitlab + - An explanation for why the user is receiving this notification + MD + + create_note(noteable_type: 'merge_request', noteable_id: merge_request.id, note: note) + end + end + + def note_merge_request_email_for_discussion + note_email(:note_merge_request_email) do + note = <<-MD.strip_heredoc + This is a new discussion on a merge request :smiley: + + In this notification email, we expect to see: + + - A line saying who started this discussion + - The note contents (that's what you're looking at) + - A link to view this discussion on Gitlab + - An explanation for why the user is receiving this notification + MD + + create_note(noteable_type: 'merge_request', noteable_id: merge_request.id, type: 'DiscussionNote', note: note) + end + end + + def note_merge_request_email_for_diff_discussion + note_email(:note_merge_request_email) do + note = <<-MD.strip_heredoc + This is a new discussion on a merge request :smiley: + + In this notification email, we expect to see: + + - A line saying who started this discussion and on what file + - The diff + - The note contents (that's what you're looking at) + - A link to view this discussion on Gitlab + - An explanation for why the user is receiving this notification + MD + + position = Gitlab::Diff::Position.new( + old_path: "files/ruby/popen.rb", + new_path: "files/ruby/popen.rb", + old_line: nil, + new_line: 14, + diff_refs: merge_request.diff_refs + ) + + create_note(noteable_type: 'merge_request', noteable_id: merge_request.id, type: 'DiffNote', position: position, note: note) + end + end + + private + + def project + @project ||= Project.find_by_full_path('gitlab-org/gitlab-test') + end + + def merge_request + @merge_request ||= project.merge_requests.find_by(source_branch: 'master', target_branch: 'feature') + end + + def user + @user ||= User.last + end + + def create_note(params) + Notes::CreateService.new(project, user, params).execute + end + + def note_email(method) + cleanup do + note = yield + + Notify.public_send(method, user.id, note) + end + end + + def cleanup + email = nil + + ActiveRecord::Base.transaction do + email = yield + raise ActiveRecord::Rollback + end + + email + end + + def pipeline_success_email + pipeline = Ci::Pipeline.last + Notify.pipeline_success_email(pipeline, pipeline.user.try(:email)) + end + + def pipeline_failed_email + pipeline = Ci::Pipeline.last + Notify.pipeline_failed_email(pipeline, pipeline.user.try(:email)) + end +end diff --git a/spec/migrations/active_record/schema_spec.rb b/spec/migrations/active_record/schema_spec.rb new file mode 100644 index 00000000000..e132529d8d8 --- /dev/null +++ b/spec/migrations/active_record/schema_spec.rb @@ -0,0 +1,23 @@ +require 'spec_helper' + +# Check consistency of db/schema.rb version, migrations' timestamps, and the latest migration timestamp +# stored in the database's schema_migrations table. + +describe ActiveRecord::Schema do + let(:latest_migration_timestamp) do + migrations = Dir[Rails.root.join('db', 'migrate', '*'), Rails.root.join('db', 'post_migrate', '*')] + migrations.map { |migration| File.basename(migration).split('_').first.to_i }.max + end + + it '> schema version equals last migration timestamp' do + defined_schema_version = File.open(Rails.root.join('db', 'schema.rb')) do |file| + file.find { |line| line =~ /ActiveRecord::Schema.define/ } + end.match(/(\d+)/)[0].to_i + + expect(defined_schema_version).to eq(latest_migration_timestamp) + end + + it '> schema version should equal the latest migration timestamp stored in schema_migrations table' do + expect(latest_migration_timestamp).to eq(ActiveRecord::Migrator.current_version.to_i) + end +end diff --git a/spec/migrations/add_head_pipeline_for_each_merge_request_spec.rb b/spec/migrations/add_head_pipeline_for_each_merge_request_spec.rb new file mode 100644 index 00000000000..bd5f85b901d --- /dev/null +++ b/spec/migrations/add_head_pipeline_for_each_merge_request_spec.rb @@ -0,0 +1,33 @@ +require 'spec_helper' +require Rails.root.join('db', 'post_migrate', '20170508170547_add_head_pipeline_for_each_merge_request.rb') + +describe AddHeadPipelineForEachMergeRequest do + let(:migration) { described_class.new } + + let!(:project) { create(:empty_project) } + let!(:forked_project_link) { create(:forked_project_link, forked_from_project: project) } + let!(:other_project) { forked_project_link.forked_to_project } + + let!(:pipeline_1) { create(:ci_pipeline, project: project, ref: "branch_1") } + let!(:pipeline_2) { create(:ci_pipeline, project: other_project, ref: "branch_1") } + let!(:pipeline_3) { create(:ci_pipeline, project: other_project, ref: "branch_1") } + let!(:pipeline_4) { create(:ci_pipeline, project: project, ref: "branch_2") } + + let!(:mr_1) { create(:merge_request, source_project: project, target_project: project, source_branch: "branch_1", target_branch: "target_1") } + let!(:mr_2) { create(:merge_request, source_project: other_project, target_project: project, source_branch: "branch_1", target_branch: "target_2") } + let!(:mr_3) { create(:merge_request, source_project: project, target_project: project, source_branch: "branch_2", target_branch: "master") } + let!(:mr_4) { create(:merge_request, source_project: project, target_project: project, source_branch: "branch_3", target_branch: "master") } + + context "#up" do + context "when source_project and source_branch of pipeline are the same of merge request" do + it "sets head_pipeline_id of given merge requests" do + migration.up + + expect(mr_1.reload.head_pipeline_id).to eq(pipeline_1.id) + expect(mr_2.reload.head_pipeline_id).to eq(pipeline_3.id) + expect(mr_3.reload.head_pipeline_id).to eq(pipeline_4.id) + expect(mr_4.reload.head_pipeline_id).to be_nil + end + end + end +end diff --git a/spec/migrations/cleanup_namespaceless_pending_delete_projects_spec.rb b/spec/migrations/cleanup_namespaceless_pending_delete_projects_spec.rb new file mode 100644 index 00000000000..49e750a3f4d --- /dev/null +++ b/spec/migrations/cleanup_namespaceless_pending_delete_projects_spec.rb @@ -0,0 +1,32 @@ +require 'spec_helper' +require Rails.root.join('db', 'post_migrate', '20170502101023_cleanup_namespaceless_pending_delete_projects.rb') + +describe CleanupNamespacelessPendingDeleteProjects do + before do + # Stub after_save callbacks that will fail when Project has no namespace + allow_any_instance_of(Project).to receive(:ensure_dir_exist).and_return(nil) + allow_any_instance_of(Project).to receive(:update_project_statistics).and_return(nil) + end + + describe '#up' do + it 'only cleans up pending delete projects' do + create(:empty_project) + create(:empty_project, pending_delete: true) + project = build(:empty_project, pending_delete: true, namespace_id: nil) + project.save(validate: false) + + expect(NamespacelessProjectDestroyWorker).to receive(:bulk_perform_async).with([[project.id]]) + + described_class.new.up + end + + it 'does nothing when no pending delete projects without namespace found' do + create(:empty_project) + create(:empty_project, pending_delete: true) + + expect(NamespacelessProjectDestroyWorker).not_to receive(:bulk_perform_async) + + described_class.new.up + end + end +end diff --git a/spec/migrations/fix_wrongly_renamed_routes_spec.rb b/spec/migrations/fix_wrongly_renamed_routes_spec.rb new file mode 100644 index 00000000000..148290b0e7d --- /dev/null +++ b/spec/migrations/fix_wrongly_renamed_routes_spec.rb @@ -0,0 +1,73 @@ +require 'spec_helper' +require Rails.root.join('db', 'post_migrate', '20170518231126_fix_wrongly_renamed_routes.rb') + +describe FixWronglyRenamedRoutes, truncate: true do + let(:subject) { described_class.new } + let(:broken_namespace) do + namespace = create(:group, name: 'apiis') + namespace.route.update_attribute(:path, 'api0is') + namespace + end + + describe '#wrongly_renamed' do + it "includes routes that have names that don't match their namespace" do + broken_namespace + _other_namespace = create(:group, name: 'api0') + + expect(subject.wrongly_renamed.map(&:id)) + .to contain_exactly(broken_namespace.route.id) + end + end + + describe "#paths_and_corrections" do + it 'finds the wrong path and gets the correction from the namespace' do + broken_namespace + namespace = create(:group, name: 'uploads-test') + namespace.route.update_attribute(:path, 'uploads0-test') + + expected_result = [ + { 'namespace_path' => 'apiis', 'path' => 'api0is' }, + { 'namespace_path' => 'uploads-test', 'path' => 'uploads0-test' } + ] + + expect(subject.paths_and_corrections).to include(*expected_result) + end + end + + describe '#routes_in_namespace_query' do + it 'includes only the required routes' do + namespace = create(:group, path: 'hello') + project = create(:empty_project, namespace: namespace) + _other_namespace = create(:group, path: 'hello0') + + result = Route.where(subject.routes_in_namespace_query('hello')) + + expect(result).to contain_exactly(namespace.route, project.route) + end + end + + describe '#up' do + let(:broken_project) do + project = create(:empty_project, namespace: broken_namespace, path: 'broken-project') + project.route.update_attribute(:path, 'api0is/broken-project') + project + end + + it 'renames incorrectly named routes' do + broken_project + + subject.up + + expect(broken_project.route.reload.path).to eq('apiis/broken-project') + expect(broken_namespace.route.reload.path).to eq('apiis') + end + + it "doesn't touch namespaces that look like something that should be renamed" do + namespace = create(:group, path: 'api0') + + subject.up + + expect(namespace.route.reload.path).to eq('api0') + end + end +end diff --git a/spec/migrations/migrate_build_events_to_pipeline_events_spec.rb b/spec/migrations/migrate_build_events_to_pipeline_events_spec.rb deleted file mode 100644 index 57eb03e3c80..00000000000 --- a/spec/migrations/migrate_build_events_to_pipeline_events_spec.rb +++ /dev/null @@ -1,74 +0,0 @@ -require 'spec_helper' -require Rails.root.join('db', 'post_migrate', '20170301205640_migrate_build_events_to_pipeline_events.rb') - -# This migration uses multiple threads, and thus different transactions. This -# means data created in this spec may not be visible to some threads. To work -# around this we use the TRUNCATE cleaning strategy. -describe MigrateBuildEventsToPipelineEvents, truncate: true do - let(:migration) { described_class.new } - let(:project_with_pipeline_service) { create(:empty_project) } - let(:project_with_build_service) { create(:empty_project) } - - before do - ActiveRecord::Base.connection.execute <<-SQL - INSERT INTO services (properties, build_events, pipeline_events, type) - VALUES - ('{"notify_only_broken_builds":true}', true, false, 'SlackService') - , ('{"notify_only_broken_builds":true}', true, false, 'MattermostService') - , ('{"notify_only_broken_builds":true}', true, false, 'HipchatService') - ; - SQL - - ActiveRecord::Base.connection.execute <<-SQL - INSERT INTO services - (properties, build_events, pipeline_events, type, project_id) - VALUES - ('{"notify_only_broken_builds":true}', true, false, - 'BuildsEmailService', #{project_with_pipeline_service.id}) - , ('{"notify_only_broken_pipelines":true}', false, true, - 'PipelinesEmailService', #{project_with_pipeline_service.id}) - , ('{"notify_only_broken_builds":true}', true, false, - 'BuildsEmailService', #{project_with_build_service.id}) - ; - SQL - end - - describe '#up' do - before do - silence_migration = Module.new do - # rubocop:disable Rails/Delegate - def execute(query) - connection.execute(query) - end - end - - migration.extend(silence_migration) - migration.up - end - - it 'migrates chat service properly' do - [SlackService, MattermostService, HipchatService].each do |service| - expect(service.count).to eq(1) - - verify_service_record(service.first) - end - end - - it 'migrates pipelines email service only if it has none before' do - Project.find_each do |project| - pipeline_service_count = - project.services.where(type: 'PipelinesEmailService').count - - expect(pipeline_service_count).to eq(1) - - verify_service_record(project.pipelines_email_service) - end - end - - def verify_service_record(service) - expect(service.notify_only_broken_pipelines).to be(true) - expect(service.build_events).to be(false) - expect(service.pipeline_events).to be(true) - end - end -end diff --git a/spec/migrations/migrate_user_activities_to_users_last_activity_on_spec.rb b/spec/migrations/migrate_user_activities_to_users_last_activity_on_spec.rb new file mode 100644 index 00000000000..1db9bc002ae --- /dev/null +++ b/spec/migrations/migrate_user_activities_to_users_last_activity_on_spec.rb @@ -0,0 +1,49 @@ +# encoding: utf-8 + +require 'spec_helper' +require Rails.root.join('db', 'post_migrate', '20170324160416_migrate_user_activities_to_users_last_activity_on.rb') + +describe MigrateUserActivitiesToUsersLastActivityOn, :redis do + let(:migration) { described_class.new } + let!(:user_active_1) { create(:user) } + let!(:user_active_2) { create(:user) } + + def record_activity(user, time) + Gitlab::Redis.with do |redis| + redis.zadd(described_class::USER_ACTIVITY_SET_KEY, time.to_i, user.username) + end + end + + around do |example| + Timecop.freeze { example.run } + end + + before do + record_activity(user_active_1, described_class::TIME_WHEN_ACTIVITY_SET_WAS_INTRODUCED + 2.months) + record_activity(user_active_2, described_class::TIME_WHEN_ACTIVITY_SET_WAS_INTRODUCED + 3.months) + mute_stdout { migration.up } + end + + describe '#up' do + it 'fills last_activity_on from the legacy Redis Sorted Set' do + expect(user_active_1.reload.last_activity_on).to eq((described_class::TIME_WHEN_ACTIVITY_SET_WAS_INTRODUCED + 2.months).to_date) + expect(user_active_2.reload.last_activity_on).to eq((described_class::TIME_WHEN_ACTIVITY_SET_WAS_INTRODUCED + 3.months).to_date) + end + end + + describe '#down' do + it 'sets last_activity_on to NULL for all users' do + mute_stdout { migration.down } + + expect(user_active_1.reload.last_activity_on).to be_nil + expect(user_active_2.reload.last_activity_on).to be_nil + end + end + + def mute_stdout + orig_stdout = $stdout + $stdout = StringIO.new + yield + $stdout = orig_stdout + end +end diff --git a/spec/migrations/migrate_user_project_view_spec.rb b/spec/migrations/migrate_user_project_view_spec.rb new file mode 100644 index 00000000000..70f8e0d6082 --- /dev/null +++ b/spec/migrations/migrate_user_project_view_spec.rb @@ -0,0 +1,22 @@ +# encoding: utf-8 + +require 'spec_helper' +require Rails.root.join('db', 'post_migrate', '20170406142253_migrate_user_project_view.rb') + +describe MigrateUserProjectView do + let(:migration) { described_class.new } + let!(:user) { create(:user) } + + before do + # 0 is the numeric value for the old 'readme' option + user.update_column(:project_view, 0) + end + + describe '#up' do + it 'updates project view setting with new value' do + migration.up + + expect(user.reload.project_view).to eq('files') + end + end +end diff --git a/spec/migrations/rename_users_with_renamed_namespace_spec.rb b/spec/migrations/rename_users_with_renamed_namespace_spec.rb new file mode 100644 index 00000000000..1e9aab3d9a1 --- /dev/null +++ b/spec/migrations/rename_users_with_renamed_namespace_spec.rb @@ -0,0 +1,22 @@ +require 'spec_helper' +require Rails.root.join('db', 'post_migrate', '20170518200835_rename_users_with_renamed_namespace.rb') + +describe RenameUsersWithRenamedNamespace, truncate: true do + it 'renames a user that had their namespace renamed to the namespace path' do + other_user = create(:user, username: 'kodingu') + other_user1 = create(:user, username: 'api0') + + user = create(:user, username: "Users0") + user.update_attribute(:username, 'Users') + user1 = create(:user, username: "import0") + user1.update_attribute(:username, 'import') + + described_class.new.up + + expect(user.reload.username).to eq('Users0') + expect(user1.reload.username).to eq('import0') + + expect(other_user.reload.username).to eq('kodingu') + expect(other_user1.reload.username).to eq('api0') + end +end diff --git a/spec/migrations/upate_retried_for_ci_builds_spec.rb b/spec/migrations/upate_retried_for_ci_builds_spec.rb new file mode 100644 index 00000000000..5cdb8a3c7da --- /dev/null +++ b/spec/migrations/upate_retried_for_ci_builds_spec.rb @@ -0,0 +1,17 @@ +require 'spec_helper' +require Rails.root.join('db', 'post_migrate', '20170503004427_upate_retried_for_ci_build.rb') + +describe UpateRetriedForCiBuild, truncate: true do + let(:pipeline) { create(:ci_pipeline) } + let!(:build_old) { create(:ci_build, pipeline: pipeline, name: 'test') } + let!(:build_new) { create(:ci_build, pipeline: pipeline, name: 'test') } + + before do + described_class.new.up + end + + it 'updates ci_builds.is_retried' do + expect(build_old.reload).to be_retried + expect(build_new.reload).not_to be_retried + end +end diff --git a/spec/models/abuse_report_spec.rb b/spec/models/abuse_report_spec.rb index 4e71597521d..ced93c8f762 100644 --- a/spec/models/abuse_report_spec.rb +++ b/spec/models/abuse_report_spec.rb @@ -29,7 +29,8 @@ RSpec.describe AbuseReport, type: :model do it 'lets a worker delete the user' do expect(DeleteUserWorker).to receive(:perform_async).with(user.id, subject.user.id, - delete_solo_owned_groups: true) + delete_solo_owned_groups: true, + hard_delete: true) subject.remove_user(deleted_by: user) end diff --git a/spec/models/application_setting_spec.rb b/spec/models/application_setting_spec.rb index 01ca1584ed2..fa229542f70 100644 --- a/spec/models/application_setting_spec.rb +++ b/spec/models/application_setting_spec.rb @@ -4,6 +4,7 @@ describe ApplicationSetting, models: true do let(:setting) { ApplicationSetting.create_from_defaults } it { expect(setting).to be_valid } + it { expect(setting.uuid).to be_present } describe 'validations' do let(:http) { 'http://example.com' } @@ -88,7 +89,7 @@ describe ApplicationSetting, models: true do storages = { 'custom1' => 'tmp/tests/custom_repositories_1', 'custom2' => 'tmp/tests/custom_repositories_2', - 'custom3' => 'tmp/tests/custom_repositories_3', + 'custom3' => 'tmp/tests/custom_repositories_3' } allow(Gitlab.config.repositories).to receive(:storages).and_return(storages) @@ -210,4 +211,66 @@ describe ApplicationSetting, models: true do expect(setting.domain_blacklist).to contain_exactly('example.com', 'test.com', 'foo.bar') end end + + describe 'usage ping settings' do + context 'when the usage ping is disabled in gitlab.yml' do + before do + allow(Settings.gitlab).to receive(:usage_ping_enabled).and_return(false) + end + + it 'does not allow the usage ping to be configured' do + expect(setting.usage_ping_can_be_configured?).to be_falsey + end + + context 'when the usage ping is disabled in the DB' do + before do + setting.usage_ping_enabled = false + end + + it 'returns false for usage_ping_enabled' do + expect(setting.usage_ping_enabled).to be_falsey + end + end + + context 'when the usage ping is enabled in the DB' do + before do + setting.usage_ping_enabled = true + end + + it 'returns false for usage_ping_enabled' do + expect(setting.usage_ping_enabled).to be_falsey + end + end + end + + context 'when the usage ping is enabled in gitlab.yml' do + before do + allow(Settings.gitlab).to receive(:usage_ping_enabled).and_return(true) + end + + it 'allows the usage ping to be configured' do + expect(setting.usage_ping_can_be_configured?).to be_truthy + end + + context 'when the usage ping is disabled in the DB' do + before do + setting.usage_ping_enabled = false + end + + it 'returns false for usage_ping_enabled' do + expect(setting.usage_ping_enabled).to be_falsey + end + end + + context 'when the usage ping is enabled in the DB' do + before do + setting.usage_ping_enabled = true + end + + it 'returns true for usage_ping_enabled' do + expect(setting.usage_ping_enabled).to be_truthy + end + end + end + end end diff --git a/spec/models/award_emoji_spec.rb b/spec/models/award_emoji_spec.rb index cb3c592f8cd..2a9a27752c1 100644 --- a/spec/models/award_emoji_spec.rb +++ b/spec/models/award_emoji_spec.rb @@ -25,6 +25,20 @@ describe AwardEmoji, models: true do expect(new_award).not_to be_valid end + + # Assume User A and User B both created award emoji of the same name + # on the same awardable. When User A is deleted, User A's award emoji + # is moved to the ghost user. When User B is deleted, User B's award emoji + # also needs to be moved to the ghost user - this cannot happen unless + # the uniqueness validation is disabled for ghost users. + it "allows duplicate award emoji for ghost users" do + user = create(:user, :ghost) + issue = create(:issue) + create(:award_emoji, user: user, awardable: issue) + new_award = build(:award_emoji, user: user, awardable: issue) + + expect(new_award).to be_valid + end end end end diff --git a/spec/models/blob_spec.rb b/spec/models/blob_spec.rb index 552229e9b07..f19e1af65a6 100644 --- a/spec/models/blob_spec.rb +++ b/spec/models/blob_spec.rb @@ -2,6 +2,14 @@ require 'rails_helper' describe Blob do + include FakeBlobHelpers + + let(:project) { build(:empty_project, lfs_enabled: true) } + + before do + allow(Gitlab.config.lfs).to receive(:enabled).and_return(true) + end + describe '.decorate' do it 'returns NilClass when given nil' do expect(described_class.decorate(nil)).to be_nil @@ -12,7 +20,7 @@ describe Blob do context 'using a binary blob' do it 'returns the data as-is' do data = "\n\xFF\xB9\xC3" - blob = described_class.new(double(binary?: true, data: data)) + blob = fake_blob(binary: true, data: data) expect(blob.data).to eq(data) end @@ -20,142 +28,330 @@ describe Blob do context 'using a text blob' do it 'converts the data to UTF-8' do - blob = described_class.new(double(binary?: false, data: "\n\xFF\xB9\xC3")) + blob = fake_blob(binary: false, data: "\n\xFF\xB9\xC3") expect(blob.data).to eq("\n���") end end end - describe '#svg?' do - it 'is falsey when not text' do - git_blob = double(text?: false) + describe '#external_storage_error?' do + context 'if the blob is stored in LFS' do + let(:blob) { fake_blob(path: 'file.pdf', lfs: true) } - expect(described_class.decorate(git_blob)).not_to be_svg + context 'when the project has LFS enabled' do + it 'returns false' do + expect(blob.external_storage_error?).to be_falsey + end + end + + context 'when the project does not have LFS enabled' do + before do + project.lfs_enabled = false + end + + it 'returns true' do + expect(blob.external_storage_error?).to be_truthy + end + end end - it 'is falsey when no language is detected' do - git_blob = double(text?: true, language: nil) + context 'if the blob is not stored in LFS' do + let(:blob) { fake_blob(path: 'file.md') } - expect(described_class.decorate(git_blob)).not_to be_svg + it 'returns false' do + expect(blob.external_storage_error?).to be_falsey + end end + end + + describe '#stored_externally?' do + context 'if the blob is stored in LFS' do + let(:blob) { fake_blob(path: 'file.pdf', lfs: true) } + + context 'when the project has LFS enabled' do + it 'returns true' do + expect(blob.stored_externally?).to be_truthy + end + end - it' is falsey when language is not SVG' do - git_blob = double(text?: true, language: double(name: 'XML')) + context 'when the project does not have LFS enabled' do + before do + project.lfs_enabled = false + end - expect(described_class.decorate(git_blob)).not_to be_svg + it 'returns false' do + expect(blob.stored_externally?).to be_falsey + end + end end - it 'is truthy when language is SVG' do - git_blob = double(text?: true, language: double(name: 'SVG')) + context 'if the blob is not stored in LFS' do + let(:blob) { fake_blob(path: 'file.md') } - expect(described_class.decorate(git_blob)).to be_svg + it 'returns false' do + expect(blob.stored_externally?).to be_falsey + end end end - describe '#ipython_notebook?' do - it 'is falsey when language is not Jupyter Notebook' do - git_blob = double(text?: true, language: double(name: 'JSON')) + describe '#raw_binary?' do + context 'if the blob is stored externally' do + context 'if the extension has a rich viewer' do + context 'if the viewer is binary' do + it 'returns true' do + blob = fake_blob(path: 'file.pdf', lfs: true) + + expect(blob.raw_binary?).to be_truthy + end + end + + context 'if the viewer is text-based' do + it 'return false' do + blob = fake_blob(path: 'file.md', lfs: true) - expect(described_class.decorate(git_blob)).not_to be_ipython_notebook + expect(blob.raw_binary?).to be_falsey + end + end + end + + context "if the extension doesn't have a rich viewer" do + context 'if the extension has a text mime type' do + context 'if the extension is for a programming language' do + it 'returns false' do + blob = fake_blob(path: 'file.txt', lfs: true) + + expect(blob.raw_binary?).to be_falsey + end + end + + context 'if the extension is not for a programming language' do + it 'returns false' do + blob = fake_blob(path: 'file.ics', lfs: true) + + expect(blob.raw_binary?).to be_falsey + end + end + end + + context 'if the extension has a binary mime type' do + context 'if the extension is for a programming language' do + it 'returns false' do + blob = fake_blob(path: 'file.rb', lfs: true) + + expect(blob.raw_binary?).to be_falsey + end + end + + context 'if the extension is not for a programming language' do + it 'returns true' do + blob = fake_blob(path: 'file.exe', lfs: true) + + expect(blob.raw_binary?).to be_truthy + end + end + end + + context 'if the extension has an unknown mime type' do + context 'if the extension is for a programming language' do + it 'returns false' do + blob = fake_blob(path: 'file.ini', lfs: true) + + expect(blob.raw_binary?).to be_falsey + end + end + + context 'if the extension is not for a programming language' do + it 'returns true' do + blob = fake_blob(path: 'file.wtf', lfs: true) + + expect(blob.raw_binary?).to be_truthy + end + end + end + end + end + + context 'if the blob is not stored externally' do + context 'if the blob is binary' do + it 'returns true' do + blob = fake_blob(path: 'file.pdf', binary: true) + + expect(blob.raw_binary?).to be_truthy + end + end + + context 'if the blob is text-based' do + it 'return false' do + blob = fake_blob(path: 'file.md') + + expect(blob.raw_binary?).to be_falsey + end + end end + end - it 'is truthy when language is Jupyter Notebook' do - git_blob = double(text?: true, language: double(name: 'Jupyter Notebook')) + describe '#extension' do + it 'returns the extension' do + blob = fake_blob(path: 'file.md') - expect(described_class.decorate(git_blob)).to be_ipython_notebook + expect(blob.extension).to eq('md') end end - describe '#video?' do - it 'is falsey with image extension' do - git_blob = Gitlab::Git::Blob.new(name: 'image.png') + describe '#simple_viewer' do + context 'when the blob is empty' do + it 'returns an empty viewer' do + blob = fake_blob(data: '', size: 0) + + expect(blob.simple_viewer).to be_a(BlobViewer::Empty) + end + end - expect(described_class.decorate(git_blob)).not_to be_video + context 'when the file represented by the blob is binary' do + it 'returns a download viewer' do + blob = fake_blob(binary: true) + + expect(blob.simple_viewer).to be_a(BlobViewer::Download) + end end - UploaderHelper::VIDEO_EXT.each do |ext| - it "is truthy when extension is .#{ext}" do - git_blob = Gitlab::Git::Blob.new(name: "video.#{ext}") + context 'when the file represented by the blob is text-based' do + it 'returns a text viewer' do + blob = fake_blob - expect(described_class.decorate(git_blob)).to be_video + expect(blob.simple_viewer).to be_a(BlobViewer::Text) end end end - describe '#to_partial_path' do - let(:project) { double(lfs_enabled?: true) } + describe '#rich_viewer' do + context 'when the blob has an external storage error' do + before do + project.lfs_enabled = false + end + + it 'returns nil' do + blob = fake_blob(path: 'file.pdf', lfs: true) + + expect(blob.rich_viewer).to be_nil + end + end + + context 'when the blob is empty' do + it 'returns nil' do + blob = fake_blob(data: '') + + expect(blob.rich_viewer).to be_nil + end + end - def stubbed_blob(overrides = {}) - overrides.reverse_merge!( - image?: false, - language: nil, - lfs_pointer?: false, - svg?: false, - text?: false - ) + context 'when the blob is stored externally' do + it 'returns a matching viewer' do + blob = fake_blob(path: 'file.pdf', lfs: true) - described_class.decorate(double).tap do |blob| - allow(blob).to receive_messages(overrides) + expect(blob.rich_viewer).to be_a(BlobViewer::PDF) end end - it 'handles LFS pointers with LFS enabled' do - blob = stubbed_blob(lfs_pointer?: true, text?: true) - expect(blob.to_partial_path(project)).to eq 'download' + context 'when the blob is binary' do + it 'returns a matching binary viewer' do + blob = fake_blob(path: 'file.pdf', binary: true) + + expect(blob.rich_viewer).to be_a(BlobViewer::PDF) + end end - it 'handles LFS pointers with LFS disabled' do - blob = stubbed_blob(lfs_pointer?: true, text?: true) - project = double(lfs_enabled?: false) - expect(blob.to_partial_path(project)).to eq 'text' + context 'when the blob is text-based' do + it 'returns a matching text-based viewer' do + blob = fake_blob(path: 'file.md') + + expect(blob.rich_viewer).to be_a(BlobViewer::Markup) + end end + end + + describe '#auxiliary_viewer' do + context 'when the blob has an external storage error' do + before do + project.lfs_enabled = false + end - it 'handles SVGs' do - blob = stubbed_blob(text?: true, svg?: true) - expect(blob.to_partial_path(project)).to eq 'image' + it 'returns nil' do + blob = fake_blob(path: 'LICENSE', lfs: true) + + expect(blob.auxiliary_viewer).to be_nil + end end - it 'handles images' do - blob = stubbed_blob(image?: true) - expect(blob.to_partial_path(project)).to eq 'image' + context 'when the blob is empty' do + it 'returns nil' do + blob = fake_blob(data: '') + + expect(blob.auxiliary_viewer).to be_nil + end end - it 'handles text' do - blob = stubbed_blob(text?: true) - expect(blob.to_partial_path(project)).to eq 'text' + context 'when the blob is stored externally' do + it 'returns a matching viewer' do + blob = fake_blob(path: 'LICENSE', lfs: true) + + expect(blob.auxiliary_viewer).to be_a(BlobViewer::License) + end end - it 'defaults to download' do - blob = stubbed_blob - expect(blob.to_partial_path(project)).to eq 'download' + context 'when the blob is binary' do + it 'returns nil' do + blob = fake_blob(path: 'LICENSE', binary: true) + + expect(blob.auxiliary_viewer).to be_nil + end end - it 'handles iPython notebooks' do - blob = stubbed_blob(text?: true, ipython_notebook?: true) - expect(blob.to_partial_path(project)).to eq 'notebook' + context 'when the blob is text-based' do + it 'returns a matching text-based viewer' do + blob = fake_blob(path: 'LICENSE') + + expect(blob.auxiliary_viewer).to be_a(BlobViewer::License) + end end end - describe '#size_within_svg_limits?' do - let(:blob) { described_class.decorate(double(:blob)) } + describe '#rendered_as_text?' do + context 'when ignoring errors' do + context 'when the simple viewer is text-based' do + it 'returns true' do + blob = fake_blob(path: 'file.md', size: 100.megabytes) + + expect(blob.rendered_as_text?).to be_truthy + end + end - it 'returns true when the blob size is smaller than the SVG limit' do - expect(blob).to receive(:size).and_return(42) + context 'when the simple viewer is binary' do + it 'returns false' do + blob = fake_blob(path: 'file.pdf', binary: true, size: 100.megabytes) - expect(blob.size_within_svg_limits?).to eq(true) + expect(blob.rendered_as_text?).to be_falsey + end + end end - it 'returns true when the blob size is equal to the SVG limit' do - expect(blob).to receive(:size).and_return(Blob::MAXIMUM_SVG_SIZE) + context 'when not ignoring errors' do + context 'when the viewer has render errors' do + it 'returns false' do + blob = fake_blob(path: 'file.md', size: 100.megabytes) - expect(blob.size_within_svg_limits?).to eq(true) - end + expect(blob.rendered_as_text?(ignore_errors: false)).to be_falsey + end + end - it 'returns false when the blob size is larger than the SVG limit' do - expect(blob).to receive(:size).and_return(1.terabyte) + context "when the viewer doesn't have render errors" do + it 'returns true' do + blob = fake_blob(path: 'file.md') - expect(blob.size_within_svg_limits?).to eq(false) + expect(blob.rendered_as_text?(ignore_errors: false)).to be_truthy + end + end end end end diff --git a/spec/models/blob_viewer/base_spec.rb b/spec/models/blob_viewer/base_spec.rb new file mode 100644 index 00000000000..92fbf64a6b7 --- /dev/null +++ b/spec/models/blob_viewer/base_spec.rb @@ -0,0 +1,177 @@ +require 'spec_helper' + +describe BlobViewer::Base, model: true do + include FakeBlobHelpers + + let(:project) { build(:empty_project) } + + let(:viewer_class) do + Class.new(described_class) do + include BlobViewer::ServerSide + + self.extensions = %w(pdf) + self.binary = true + self.overridable_max_size = 1.megabyte + self.max_size = 5.megabytes + end + end + + let(:viewer) { viewer_class.new(blob) } + + describe '.can_render?' do + context 'when the extension is supported' do + context 'when the binaryness matches' do + let(:blob) { fake_blob(path: 'file.pdf', binary: true) } + + it 'returns true' do + expect(viewer_class.can_render?(blob)).to be_truthy + end + end + + context 'when the binaryness does not match' do + let(:blob) { fake_blob(path: 'file.pdf', binary: false) } + + it 'returns false' do + expect(viewer_class.can_render?(blob)).to be_falsey + end + end + end + + context 'when the file type is supported' do + before do + viewer_class.file_types = %i(license) + viewer_class.binary = false + end + + context 'when the binaryness matches' do + let(:blob) { fake_blob(path: 'LICENSE', binary: false) } + + it 'returns true' do + expect(viewer_class.can_render?(blob)).to be_truthy + end + end + + context 'when the binaryness does not match' do + let(:blob) { fake_blob(path: 'LICENSE', binary: true) } + + it 'returns false' do + expect(viewer_class.can_render?(blob)).to be_falsey + end + end + end + + context 'when the extension and file type are not supported' do + let(:blob) { fake_blob(path: 'file.txt') } + + it 'returns false' do + expect(viewer_class.can_render?(blob)).to be_falsey + end + end + end + + describe '#exceeds_overridable_max_size?' do + context 'when the blob size is larger than the overridable max size' do + let(:blob) { fake_blob(path: 'file.pdf', size: 2.megabytes) } + + it 'returns true' do + expect(viewer.exceeds_overridable_max_size?).to be_truthy + end + end + + context 'when the blob size is smaller than the overridable max size' do + let(:blob) { fake_blob(path: 'file.pdf', size: 10.kilobytes) } + + it 'returns false' do + expect(viewer.exceeds_overridable_max_size?).to be_falsey + end + end + end + + describe '#exceeds_max_size?' do + context 'when the blob size is larger than the max size' do + let(:blob) { fake_blob(path: 'file.pdf', size: 10.megabytes) } + + it 'returns true' do + expect(viewer.exceeds_max_size?).to be_truthy + end + end + + context 'when the blob size is smaller than the max size' do + let(:blob) { fake_blob(path: 'file.pdf', size: 2.megabytes) } + + it 'returns false' do + expect(viewer.exceeds_max_size?).to be_falsey + end + end + end + + describe '#can_override_max_size?' do + context 'when the blob size is larger than the overridable max size' do + context 'when the blob size is larger than the max size' do + let(:blob) { fake_blob(path: 'file.pdf', size: 10.megabytes) } + + it 'returns false' do + expect(viewer.can_override_max_size?).to be_falsey + end + end + + context 'when the blob size is smaller than the max size' do + let(:blob) { fake_blob(path: 'file.pdf', size: 2.megabytes) } + + it 'returns true' do + expect(viewer.can_override_max_size?).to be_truthy + end + end + end + + context 'when the blob size is smaller than the overridable max size' do + let(:blob) { fake_blob(path: 'file.pdf', size: 10.kilobytes) } + + it 'returns false' do + expect(viewer.can_override_max_size?).to be_falsey + end + end + end + + describe '#render_error' do + context 'when the max size is overridden' do + before do + viewer.override_max_size = true + end + + context 'when the blob size is larger than the max size' do + let(:blob) { fake_blob(path: 'file.pdf', size: 10.megabytes) } + + it 'returns :too_large' do + expect(viewer.render_error).to eq(:too_large) + end + end + + context 'when the blob size is smaller than the max size' do + let(:blob) { fake_blob(path: 'file.pdf', size: 2.megabytes) } + + it 'returns nil' do + expect(viewer.render_error).to be_nil + end + end + end + + context 'when the max size is not overridden' do + context 'when the blob size is larger than the overridable max size' do + let(:blob) { fake_blob(path: 'file.pdf', size: 2.megabytes) } + + it 'returns :too_large' do + expect(viewer.render_error).to eq(:too_large) + end + end + + context 'when the blob size is smaller than the overridable max size' do + let(:blob) { fake_blob(path: 'file.pdf', size: 10.kilobytes) } + + it 'returns nil' do + expect(viewer.render_error).to be_nil + end + end + end + end +end diff --git a/spec/models/blob_viewer/changelog_spec.rb b/spec/models/blob_viewer/changelog_spec.rb new file mode 100644 index 00000000000..9066c5a05ac --- /dev/null +++ b/spec/models/blob_viewer/changelog_spec.rb @@ -0,0 +1,27 @@ +require 'spec_helper' + +describe BlobViewer::Changelog, model: true do + include FakeBlobHelpers + + let(:project) { create(:project, :repository) } + let(:blob) { fake_blob(path: 'CHANGELOG') } + subject { described_class.new(blob) } + + describe '#render_error' do + context 'when there are no tags' do + before do + allow(project.repository).to receive(:tag_count).and_return(0) + end + + it 'returns :no_tags' do + expect(subject.render_error).to eq(:no_tags) + end + end + + context 'when there are tags' do + it 'returns nil' do + expect(subject.render_error).to be_nil + end + end + end +end diff --git a/spec/models/blob_viewer/composer_json_spec.rb b/spec/models/blob_viewer/composer_json_spec.rb new file mode 100644 index 00000000000..df4f1f4815c --- /dev/null +++ b/spec/models/blob_viewer/composer_json_spec.rb @@ -0,0 +1,25 @@ +require 'spec_helper' + +describe BlobViewer::ComposerJson, model: true do + include FakeBlobHelpers + + let(:project) { build(:project) } + let(:data) do + <<-SPEC.strip_heredoc + { + "name": "laravel/laravel", + "homepage": "https://laravel.com/" + } + SPEC + end + let(:blob) { fake_blob(path: 'composer.json', data: data) } + subject { described_class.new(blob) } + + describe '#package_name' do + it 'returns the package name' do + expect(subject).to receive(:prepare!) + + expect(subject.package_name).to eq('laravel/laravel') + end + end +end diff --git a/spec/models/blob_viewer/gemspec_spec.rb b/spec/models/blob_viewer/gemspec_spec.rb new file mode 100644 index 00000000000..81e932de290 --- /dev/null +++ b/spec/models/blob_viewer/gemspec_spec.rb @@ -0,0 +1,25 @@ +require 'spec_helper' + +describe BlobViewer::Gemspec, model: true do + include FakeBlobHelpers + + let(:project) { build(:project) } + let(:data) do + <<-SPEC.strip_heredoc + Gem::Specification.new do |s| + s.platform = Gem::Platform::RUBY + s.name = "activerecord" + end + SPEC + end + let(:blob) { fake_blob(path: 'activerecord.gemspec', data: data) } + subject { described_class.new(blob) } + + describe '#package_name' do + it 'returns the package name' do + expect(subject).to receive(:prepare!) + + expect(subject.package_name).to eq('activerecord') + end + end +end diff --git a/spec/models/blob_viewer/gitlab_ci_yml_spec.rb b/spec/models/blob_viewer/gitlab_ci_yml_spec.rb new file mode 100644 index 00000000000..0c6c24ece21 --- /dev/null +++ b/spec/models/blob_viewer/gitlab_ci_yml_spec.rb @@ -0,0 +1,32 @@ +require 'spec_helper' + +describe BlobViewer::GitlabCiYml, model: true do + include FakeBlobHelpers + + let(:project) { build(:project) } + let(:data) { File.read(Rails.root.join('spec/support/gitlab_stubs/gitlab_ci.yml')) } + let(:blob) { fake_blob(path: '.gitlab-ci.yml', data: data) } + subject { described_class.new(blob) } + + describe '#validation_message' do + it 'calls prepare! on the viewer' do + expect(subject).to receive(:prepare!) + + subject.validation_message + end + + context 'when the configuration is valid' do + it 'returns nil' do + expect(subject.validation_message).to be_nil + end + end + + context 'when the configuration is invalid' do + let(:data) { 'oof' } + + it 'returns the error message' do + expect(subject.validation_message).to eq('Invalid configuration format') + end + end + end +end diff --git a/spec/models/blob_viewer/license_spec.rb b/spec/models/blob_viewer/license_spec.rb new file mode 100644 index 00000000000..944ddd32b92 --- /dev/null +++ b/spec/models/blob_viewer/license_spec.rb @@ -0,0 +1,34 @@ +require 'spec_helper' + +describe BlobViewer::License, model: true do + include FakeBlobHelpers + + let(:project) { create(:project, :repository) } + let(:blob) { fake_blob(path: 'LICENSE') } + subject { described_class.new(blob) } + + describe '#license' do + it 'returns the blob project repository license' do + expect(subject.license).not_to be_nil + expect(subject.license).to eq(project.repository.license) + end + end + + describe '#render_error' do + context 'when there is no license' do + before do + allow(project.repository).to receive(:license).and_return(nil) + end + + it 'returns :unknown_license' do + expect(subject.render_error).to eq(:unknown_license) + end + end + + context 'when there is a license' do + it 'returns nil' do + expect(subject.render_error).to be_nil + end + end + end +end diff --git a/spec/models/blob_viewer/package_json_spec.rb b/spec/models/blob_viewer/package_json_spec.rb new file mode 100644 index 00000000000..5c9a9c81963 --- /dev/null +++ b/spec/models/blob_viewer/package_json_spec.rb @@ -0,0 +1,25 @@ +require 'spec_helper' + +describe BlobViewer::PackageJson, model: true do + include FakeBlobHelpers + + let(:project) { build(:project) } + let(:data) do + <<-SPEC.strip_heredoc + { + "name": "module-name", + "version": "10.3.1" + } + SPEC + end + let(:blob) { fake_blob(path: 'package.json', data: data) } + subject { described_class.new(blob) } + + describe '#package_name' do + it 'returns the package name' do + expect(subject).to receive(:prepare!) + + expect(subject.package_name).to eq('module-name') + end + end +end diff --git a/spec/models/blob_viewer/podspec_json_spec.rb b/spec/models/blob_viewer/podspec_json_spec.rb new file mode 100644 index 00000000000..42a00940bc5 --- /dev/null +++ b/spec/models/blob_viewer/podspec_json_spec.rb @@ -0,0 +1,25 @@ +require 'spec_helper' + +describe BlobViewer::PodspecJson, model: true do + include FakeBlobHelpers + + let(:project) { build(:project) } + let(:data) do + <<-SPEC.strip_heredoc + { + "name": "AFNetworking", + "version": "2.0.0" + } + SPEC + end + let(:blob) { fake_blob(path: 'AFNetworking.podspec.json', data: data) } + subject { described_class.new(blob) } + + describe '#package_name' do + it 'returns the package name' do + expect(subject).to receive(:prepare!) + + expect(subject.package_name).to eq('AFNetworking') + end + end +end diff --git a/spec/models/blob_viewer/podspec_spec.rb b/spec/models/blob_viewer/podspec_spec.rb new file mode 100644 index 00000000000..6c9f0f42d53 --- /dev/null +++ b/spec/models/blob_viewer/podspec_spec.rb @@ -0,0 +1,25 @@ +require 'spec_helper' + +describe BlobViewer::Podspec, model: true do + include FakeBlobHelpers + + let(:project) { build(:project) } + let(:data) do + <<-SPEC.strip_heredoc + Pod::Spec.new do |spec| + spec.name = 'Reachability' + spec.version = '3.1.0' + end + SPEC + end + let(:blob) { fake_blob(path: 'Reachability.podspec', data: data) } + subject { described_class.new(blob) } + + describe '#package_name' do + it 'returns the package name' do + expect(subject).to receive(:prepare!) + + expect(subject.package_name).to eq('Reachability') + end + end +end diff --git a/spec/models/blob_viewer/route_map_spec.rb b/spec/models/blob_viewer/route_map_spec.rb new file mode 100644 index 00000000000..4854e0262d9 --- /dev/null +++ b/spec/models/blob_viewer/route_map_spec.rb @@ -0,0 +1,38 @@ +require 'spec_helper' + +describe BlobViewer::RouteMap, model: true do + include FakeBlobHelpers + + let(:project) { build(:project) } + let(:data) do + <<-MAP.strip_heredoc + # Team data + - source: 'data/team.yml' + public: 'team/' + MAP + end + let(:blob) { fake_blob(path: '.gitlab/route-map.yml', data: data) } + subject { described_class.new(blob) } + + describe '#validation_message' do + it 'calls prepare! on the viewer' do + expect(subject).to receive(:prepare!) + + subject.validation_message + end + + context 'when the configuration is valid' do + it 'returns nil' do + expect(subject.validation_message).to be_nil + end + end + + context 'when the configuration is invalid' do + let(:data) { 'oof' } + + it 'returns the error message' do + expect(subject.validation_message).to eq('Route map is not an array') + end + end + end +end diff --git a/spec/models/blob_viewer/server_side_spec.rb b/spec/models/blob_viewer/server_side_spec.rb new file mode 100644 index 00000000000..f047953d540 --- /dev/null +++ b/spec/models/blob_viewer/server_side_spec.rb @@ -0,0 +1,41 @@ +require 'spec_helper' + +describe BlobViewer::ServerSide, model: true do + include FakeBlobHelpers + + let(:project) { build(:empty_project) } + + let(:viewer_class) do + Class.new(BlobViewer::Base) do + include BlobViewer::ServerSide + end + end + + subject { viewer_class.new(blob) } + + describe '#prepare!' do + let(:blob) { fake_blob(path: 'file.txt') } + + it 'loads all blob data' do + expect(blob).to receive(:load_all_data!) + + subject.prepare! + end + end + + describe '#render_error' do + context 'when the blob is stored externally' do + let(:project) { build(:empty_project, lfs_enabled: true) } + + let(:blob) { fake_blob(path: 'file.pdf', lfs: true) } + + before do + allow(Gitlab.config.lfs).to receive(:enabled).and_return(true) + end + + it 'return :server_side_but_stored_externally' do + expect(subject.render_error).to eq(:server_side_but_stored_externally) + end + end + end +end diff --git a/spec/models/ci/artifact_blob_spec.rb b/spec/models/ci/artifact_blob_spec.rb new file mode 100644 index 00000000000..968593d7e9b --- /dev/null +++ b/spec/models/ci/artifact_blob_spec.rb @@ -0,0 +1,44 @@ +require 'spec_helper' + +describe Ci::ArtifactBlob, models: true do + let(:build) { create(:ci_build, :artifacts) } + let(:entry) { build.artifacts_metadata_entry('other_artifacts_0.1.2/another-subdirectory/banana_sample.gif') } + + subject { described_class.new(entry) } + + describe '#id' do + it 'returns a hash of the path' do + expect(subject.id).to eq(Digest::SHA1.hexdigest(entry.path)) + end + end + + describe '#name' do + it 'returns the entry name' do + expect(subject.name).to eq(entry.name) + end + end + + describe '#path' do + it 'returns the entry path' do + expect(subject.path).to eq(entry.path) + end + end + + describe '#size' do + it 'returns the entry size' do + expect(subject.size).to eq(entry.metadata[:size]) + end + end + + describe '#mode' do + it 'returns the entry mode' do + expect(subject.mode).to eq(entry.metadata[:mode]) + end + end + + describe '#external_storage' do + it 'returns :build_artifact' do + expect(subject.external_storage).to eq(:build_artifact) + end + end +end diff --git a/spec/models/ci/build_spec.rb b/spec/models/ci/build_spec.rb index 8dbcf50ee0c..e971b4bc3f9 100644 --- a/spec/models/ci/build_spec.rb +++ b/spec/models/ci/build_spec.rb @@ -17,8 +17,9 @@ describe Ci::Build, :models do it { is_expected.to belong_to(:trigger_request) } it { is_expected.to belong_to(:erased_by) } it { is_expected.to have_many(:deployments) } - it { is_expected.to validate_presence_of :ref } - it { is_expected.to respond_to :trace_html } + it { is_expected.to validate_presence_of(:ref) } + it { is_expected.to respond_to(:has_trace?) } + it { is_expected.to respond_to(:trace) } describe '#actionize' do context 'when build is a created' do @@ -78,32 +79,6 @@ describe Ci::Build, :models do end end - describe '#append_trace' do - subject { build.trace_html } - - context 'when build.trace hides runners token' do - let(:token) { 'my_secret_token' } - - before do - build.project.update(runners_token: token) - build.append_trace(token, 0) - end - - it { is_expected.not_to include(token) } - end - - context 'when build.trace hides build token' do - let(:token) { 'my_secret_token' } - - before do - build.update(token: token) - build.append_trace(token, 0) - end - - it { is_expected.not_to include(token) } - end - end - describe '#artifacts?' do subject { build.artifacts? } @@ -272,15 +247,101 @@ describe Ci::Build, :models do describe '#update_coverage' do context "regarding coverage_regex's value," do - it "saves the correct extracted coverage value" do + before do build.coverage_regex = '\(\d+.\d+\%\) covered' - allow(build).to receive(:trace) { 'Coverage 1033 / 1051 LOC (98.29%) covered' } - expect(build).to receive(:update_attributes).with(coverage: 98.29) { true } - expect(build.update_coverage).to be true + build.trace.set('Coverage 1033 / 1051 LOC (98.29%) covered') + end + + it "saves the correct extracted coverage value" do + expect(build.update_coverage).to be(true) + expect(build.coverage).to eq(98.29) end end end + describe '#trace' do + subject { build.trace } + + it { is_expected.to be_a(Gitlab::Ci::Trace) } + end + + describe '#has_trace?' do + subject { build.has_trace? } + + it "expect to call exist? method" do + expect_any_instance_of(Gitlab::Ci::Trace).to receive(:exist?) + .and_return(true) + + is_expected.to be(true) + end + end + + describe '#trace=' do + it "expect to fail trace=" do + expect { build.trace = "new" }.to raise_error(NotImplementedError) + end + end + + describe '#old_trace' do + subject { build.old_trace } + + before do + build.update_column(:trace, 'old trace') + end + + it "expect to receive data from database" do + is_expected.to eq('old trace') + end + end + + describe '#erase_old_trace!' do + subject { build.send(:read_attribute, :trace) } + + before do + build.send(:write_attribute, :trace, 'old trace') + end + + it "expect to receive data from database" do + build.erase_old_trace! + + is_expected.to be_nil + end + end + + describe '#hide_secrets' do + let(:subject) { build.hide_secrets(data) } + + context 'hide runners token' do + let(:data) { 'new token data'} + + before do + build.project.update(runners_token: 'token') + end + + it { is_expected.to eq('new xxxxx data') } + end + + context 'hide build token' do + let(:data) { 'new token data'} + + before do + build.update(token: 'token') + end + + it { is_expected.to eq('new xxxxx data') } + end + + context 'hide build token' do + let(:data) { 'new token data'} + + before do + build.update(token: 'token') + end + + it { is_expected.to eq('new xxxxx data') } + end + end + describe 'deployment' do describe '#last_deployment' do subject { build.last_deployment } @@ -438,7 +499,7 @@ describe Ci::Build, :models do end it 'erases build trace in trace file' do - expect(build.trace).to be_empty + expect(build).not_to have_trace end it 'sets erased to true' do @@ -532,38 +593,6 @@ describe Ci::Build, :models do end end - describe '#extract_coverage' do - context 'valid content & regex' do - subject { build.extract_coverage('Coverage 1033 / 1051 LOC (98.29%) covered', '\(\d+.\d+\%\) covered') } - - it { is_expected.to eq(98.29) } - end - - context 'valid content & bad regex' do - subject { build.extract_coverage('Coverage 1033 / 1051 LOC (98.29%) covered', 'very covered') } - - it { is_expected.to be_nil } - end - - context 'no coverage content & regex' do - subject { build.extract_coverage('No coverage for today :sad:', '\(\d+.\d+\%\) covered') } - - it { is_expected.to be_nil } - end - - context 'multiple results in content & regex' do - subject { build.extract_coverage(' (98.39%) covered. (98.29%) covered', '\(\d+.\d+\%\) covered') } - - it { is_expected.to eq(98.29) } - end - - context 'using a regex capture' do - subject { build.extract_coverage('TOTAL 9926 3489 65%', 'TOTAL\s+\d+\s+\d+\s+(\d{1,3}\%)') } - - it { is_expected.to eq(65) } - end - end - describe '#first_pending' do let!(:first) { create(:ci_build, pipeline: pipeline, status: 'pending', created_at: Date.yesterday) } let!(:second) { create(:ci_build, pipeline: pipeline, status: 'pending') } @@ -735,40 +764,6 @@ describe Ci::Build, :models do end end - describe '#has_commands?' do - context 'when build has commands' do - let(:build) do - create(:ci_build, commands: 'rspec') - end - - it 'has commands' do - expect(build).to have_commands - end - end - - context 'when does not have commands' do - context 'when commands are an empty string' do - let(:build) do - create(:ci_build, commands: '') - end - - it 'has no commands' do - expect(build).not_to have_commands - end - end - - context 'when commands are not set at all' do - let(:build) do - create(:ci_build, commands: nil) - end - - it 'has no commands' do - expect(build).not_to have_commands - end - end - end - end - describe '#has_tags?' do context 'when build has tags' do subject { create(:ci_build, tag_list: ['tag']) } @@ -902,22 +897,26 @@ describe Ci::Build, :models do end describe '#persisted_environment' do - before do - @environment = create(:environment, project: project, name: "foo-#{project.default_branch}") + let!(:environment) do + create(:environment, project: project, name: "foo-#{project.default_branch}") end subject { build.persisted_environment } - context 'referenced literally' do - let(:build) { create(:ci_build, pipeline: pipeline, environment: "foo-#{project.default_branch}") } + context 'when referenced literally' do + let(:build) do + create(:ci_build, pipeline: pipeline, environment: "foo-#{project.default_branch}") + end - it { is_expected.to eq(@environment) } + it { is_expected.to eq(environment) } end - context 'referenced with a variable' do - let(:build) { create(:ci_build, pipeline: pipeline, environment: "foo-$CI_COMMIT_REF_NAME") } + context 'when referenced with a variable' do + let(:build) do + create(:ci_build, pipeline: pipeline, environment: "foo-$CI_COMMIT_REF_NAME") + end - it { is_expected.to eq(@environment) } + it { is_expected.to eq(environment) } end end @@ -928,26 +927,8 @@ describe Ci::Build, :models do project.add_developer(user) end - context 'when build is manual' do - it 'enqueues a build' do - new_build = build.play(user) - - expect(new_build).to be_pending - expect(new_build).to eq(build) - end - end - - context 'when build is passed' do - before do - build.update(status: 'success') - end - - it 'creates a new build' do - new_build = build.play(user) - - expect(new_build).to be_pending - expect(new_build).not_to eq(build) - end + it 'enqueues the build' do + expect(build.play(user)).to be_pending end end @@ -983,32 +964,6 @@ describe Ci::Build, :models do it { is_expected.to eq(project.name) } end - describe '#raw_trace' do - subject { build.raw_trace } - - context 'when build.trace hides runners token' do - let(:token) { 'my_secret_token' } - - before do - build.project.update(runners_token: token) - build.update(trace: token) - end - - it { is_expected.not_to include(token) } - end - - context 'when build.trace hides build token' do - let(:token) { 'my_secret_token' } - - before do - build.update(token: token) - build.update(trace: token) - end - - it { is_expected.not_to include(token) } - end - end - describe '#ref_slug' do { 'master' => 'master', @@ -1017,7 +972,7 @@ describe Ci::Build, :models do 'fix-1-foo' => 'fix-1-foo', 'a' * 63 => 'a' * 63, 'a' * 64 => 'a' * 63, - 'FOO' => 'foo', + 'FOO' => 'foo' }.each do |ref, slug| it "transforms #{ref} to #{slug}" do build.ref = ref @@ -1074,61 +1029,6 @@ describe Ci::Build, :models do end end - describe '#trace' do - it 'obfuscates project runners token' do - allow(build).to receive(:raw_trace).and_return("Test: #{build.project.runners_token}") - - expect(build.trace).to eq("Test: xxxxxxxxxxxxxxxxxxxx") - end - - it 'empty project runners token' do - allow(build).to receive(:raw_trace).and_return(test_trace) - # runners_token can't normally be set to nil - allow(build.project).to receive(:runners_token).and_return(nil) - - expect(build.trace).to eq(test_trace) - end - - context 'when build does not have trace' do - it 'is is empty' do - expect(build.trace).to be_nil - end - end - - context 'when trace contains text' do - let(:text) { 'example output' } - before do - build.trace = text - end - - it { expect(build.trace).to eq(text) } - end - - context 'when trace hides runners token' do - let(:token) { 'my_secret_token' } - - before do - build.update(trace: token) - build.project.update(runners_token: token) - end - - it { expect(build.trace).not_to include(token) } - it { expect(build.raw_trace).to include(token) } - end - - context 'when build.trace hides build token' do - let(:token) { 'my_secret_token' } - - before do - build.update(trace: token) - build.update(token: token) - end - - it { expect(build.trace).not_to include(token) } - it { expect(build.raw_trace).to include(token) } - end - end - describe '#has_expiring_artifacts?' do context 'when artifacts have expiration date set' do before { build.update(artifacts_expire_at: 1.day.from_now) } @@ -1147,66 +1047,6 @@ describe Ci::Build, :models do end end - describe '#has_trace_file?' do - context 'when there is no trace' do - it { expect(build.has_trace_file?).to be_falsey } - it { expect(build.trace).to be_nil } - end - - context 'when there is a trace' do - context 'when trace is stored in file' do - let(:build_with_trace) { create(:ci_build, :trace) } - - it { expect(build_with_trace.has_trace_file?).to be_truthy } - it { expect(build_with_trace.trace).to eq('BUILD TRACE') } - end - - context 'when trace is stored in old file' do - before do - allow(build.project).to receive(:ci_id).and_return(999) - allow(File).to receive(:exist?).with(build.path_to_trace).and_return(false) - allow(File).to receive(:exist?).with(build.old_path_to_trace).and_return(true) - allow(File).to receive(:read).with(build.old_path_to_trace).and_return(test_trace) - end - - it { expect(build.has_trace_file?).to be_truthy } - it { expect(build.trace).to eq(test_trace) } - end - - context 'when trace is stored in DB' do - before do - allow(build.project).to receive(:ci_id).and_return(nil) - allow(build).to receive(:read_attribute).with(:trace).and_return(test_trace) - allow(File).to receive(:exist?).with(build.path_to_trace).and_return(false) - allow(File).to receive(:exist?).with(build.old_path_to_trace).and_return(false) - end - - it { expect(build.has_trace_file?).to be_falsey } - it { expect(build.trace).to eq(test_trace) } - end - end - end - - describe '#trace_file_path' do - context 'when trace is stored in file' do - before do - allow(build).to receive(:has_trace_file?).and_return(true) - allow(build).to receive(:has_old_trace_file?).and_return(false) - end - - it { expect(build.trace_file_path).to eq(build.path_to_trace) } - end - - context 'when trace is stored in old file' do - before do - allow(build).to receive(:has_trace_file?).and_return(true) - allow(build).to receive(:has_old_trace_file?).and_return(true) - end - - it { expect(build.trace_file_path).to eq(build.old_path_to_trace) } - end - end - describe '#update_project_statistics' do let!(:build) { create(:ci_build, artifacts_size: 23) } @@ -1304,7 +1144,7 @@ describe Ci::Build, :models do { key: 'CI_PIPELINE_ID', value: pipeline.id.to_s, public: true }, { key: 'CI_REGISTRY_USER', value: 'gitlab-ci-token', public: true }, { key: 'CI_REGISTRY_PASSWORD', value: build.token, public: false }, - { key: 'CI_REPOSITORY_URL', value: build.repo_url, public: false }, + { key: 'CI_REPOSITORY_URL', value: build.repo_url, public: false } ] end @@ -1460,7 +1300,7 @@ describe Ci::Build, :models do { key: 'CI_REGISTRY', value: 'registry.example.com', public: true } end let(:ci_registry_image) do - { key: 'CI_REGISTRY_IMAGE', value: project.container_registry_repository_url, public: true } + { key: 'CI_REGISTRY_IMAGE', value: project.container_registry_url, public: true } end context 'and is disabled for project' do diff --git a/spec/models/ci/group_spec.rb b/spec/models/ci/group_spec.rb new file mode 100644 index 00000000000..62e15093089 --- /dev/null +++ b/spec/models/ci/group_spec.rb @@ -0,0 +1,44 @@ +require 'spec_helper' + +describe Ci::Group, models: true do + subject do + described_class.new('test', name: 'rspec', jobs: jobs) + end + + let!(:jobs) { build_list(:ci_build, 1, :success) } + + it { is_expected.to include_module(StaticModel) } + + it { is_expected.to respond_to(:stage) } + it { is_expected.to respond_to(:name) } + it { is_expected.to respond_to(:jobs) } + it { is_expected.to respond_to(:status) } + + describe '#size' do + it 'returns the number of statuses in the group' do + expect(subject.size).to eq(1) + end + end + + describe '#detailed_status' do + context 'when there is only one item in the group' do + it 'calls the status from the object itself' do + expect(jobs.first).to receive(:detailed_status) + + expect(subject.detailed_status(double(:user))) + end + end + + context 'when there are more than one commit status in the group' do + let(:jobs) do + [create(:ci_build, :failed), + create(:ci_build, :success)] + end + + it 'fabricates a new detailed status object' do + expect(subject.detailed_status(double(:user))) + .to be_a(Gitlab::Ci::Status::Failed) + end + end + end +end diff --git a/spec/models/ci/pipeline_schedule_spec.rb b/spec/models/ci/pipeline_schedule_spec.rb new file mode 100644 index 00000000000..822b98c5f6c --- /dev/null +++ b/spec/models/ci/pipeline_schedule_spec.rb @@ -0,0 +1,112 @@ +require 'spec_helper' + +describe Ci::PipelineSchedule, models: true do + it { is_expected.to belong_to(:project) } + it { is_expected.to belong_to(:owner) } + + it { is_expected.to have_many(:pipelines) } + + it { is_expected.to respond_to(:ref) } + it { is_expected.to respond_to(:cron) } + it { is_expected.to respond_to(:cron_timezone) } + it { is_expected.to respond_to(:description) } + it { is_expected.to respond_to(:next_run_at) } + it { is_expected.to respond_to(:deleted_at) } + + describe 'validations' do + it 'does not allow invalid cron patters' do + pipeline_schedule = build(:ci_pipeline_schedule, cron: '0 0 0 * *') + + expect(pipeline_schedule).not_to be_valid + end + + it 'does not allow invalid cron patters' do + pipeline_schedule = build(:ci_pipeline_schedule, cron_timezone: 'invalid') + + expect(pipeline_schedule).not_to be_valid + end + end + + describe '#set_next_run_at' do + let!(:pipeline_schedule) { create(:ci_pipeline_schedule, :nightly) } + + context 'when creates new pipeline schedule' do + let(:expected_next_run_at) do + Gitlab::Ci::CronParser.new(pipeline_schedule.cron, pipeline_schedule.cron_timezone). + next_time_from(Time.now) + end + + it 'updates next_run_at automatically' do + expect(Ci::PipelineSchedule.last.next_run_at).to eq(expected_next_run_at) + end + end + + context 'when updates cron of exsisted pipeline schedule' do + let(:new_cron) { '0 0 1 1 *' } + + let(:expected_next_run_at) do + Gitlab::Ci::CronParser.new(new_cron, pipeline_schedule.cron_timezone). + next_time_from(Time.now) + end + + it 'updates next_run_at automatically' do + pipeline_schedule.update!(cron: new_cron) + + expect(Ci::PipelineSchedule.last.next_run_at).to eq(expected_next_run_at) + end + end + end + + describe '#schedule_next_run!' do + let!(:pipeline_schedule) { create(:ci_pipeline_schedule, :nightly) } + + context 'when reschedules after 10 days from now' do + let(:future_time) { 10.days.from_now } + + let(:expected_next_run_at) do + Gitlab::Ci::CronParser.new(pipeline_schedule.cron, pipeline_schedule.cron_timezone). + next_time_from(future_time) + end + + it 'points to proper next_run_at' do + Timecop.freeze(future_time) do + pipeline_schedule.schedule_next_run! + + expect(pipeline_schedule.next_run_at).to eq(expected_next_run_at) + end + end + end + end + + describe '#real_next_run' do + subject do + described_class.last.real_next_run(worker_cron: worker_cron, + worker_time_zone: worker_time_zone) + end + + context 'when GitLab time_zone is UTC' do + before do + allow(Time).to receive(:zone) + .and_return(ActiveSupport::TimeZone[worker_time_zone]) + end + + let(:worker_time_zone) { 'UTC' } + + context 'when cron_timezone is Eastern Time (US & Canada)' do + before do + create(:ci_pipeline_schedule, :nightly, + cron_timezone: 'Eastern Time (US & Canada)') + end + + let(:worker_cron) { '0 1 2 3 *' } + + it 'returns the next time worker executes' do + expect(subject.min).to eq(0) + expect(subject.hour).to eq(1) + expect(subject.day).to eq(2) + expect(subject.month).to eq(3) + end + end + end + end +end diff --git a/spec/models/ci/pipeline_spec.rb b/spec/models/ci/pipeline_spec.rb index 53282b999dc..56b24ce62f3 100644 --- a/spec/models/ci/pipeline_spec.rb +++ b/spec/models/ci/pipeline_spec.rb @@ -12,10 +12,14 @@ describe Ci::Pipeline, models: true do it { is_expected.to belong_to(:project) } it { is_expected.to belong_to(:user) } + it { is_expected.to belong_to(:auto_canceled_by) } + it { is_expected.to belong_to(:pipeline_schedule) } it { is_expected.to have_many(:statuses) } it { is_expected.to have_many(:trigger_requests) } it { is_expected.to have_many(:builds) } + it { is_expected.to have_many(:auto_canceled_pipelines) } + it { is_expected.to have_many(:auto_canceled_jobs) } it { is_expected.to validate_presence_of :sha } it { is_expected.to validate_presence_of :status } @@ -56,8 +60,8 @@ describe Ci::Pipeline, models: true do subject { pipeline.retried } before do - @build1 = FactoryGirl.create :ci_build, pipeline: pipeline, name: 'deploy' - @build2 = FactoryGirl.create :ci_build, pipeline: pipeline, name: 'deploy' + @build1 = create(:ci_build, pipeline: pipeline, name: 'deploy', retried: true) + @build2 = create(:ci_build, pipeline: pipeline, name: 'deploy') end it 'returns old builds' do @@ -66,31 +70,31 @@ describe Ci::Pipeline, models: true do end describe "coverage" do - let(:project) { FactoryGirl.create :empty_project, build_coverage_regex: "/.*/" } - let(:pipeline) { FactoryGirl.create :ci_empty_pipeline, project: project } + let(:project) { create(:empty_project, build_coverage_regex: "/.*/") } + let(:pipeline) { create(:ci_empty_pipeline, project: project) } it "calculates average when there are two builds with coverage" do - FactoryGirl.create :ci_build, name: "rspec", coverage: 30, pipeline: pipeline - FactoryGirl.create :ci_build, name: "rubocop", coverage: 40, pipeline: pipeline + create(:ci_build, name: "rspec", coverage: 30, pipeline: pipeline) + create(:ci_build, name: "rubocop", coverage: 40, pipeline: pipeline) expect(pipeline.coverage).to eq("35.00") end it "calculates average when there are two builds with coverage and one with nil" do - FactoryGirl.create :ci_build, name: "rspec", coverage: 30, pipeline: pipeline - FactoryGirl.create :ci_build, name: "rubocop", coverage: 40, pipeline: pipeline - FactoryGirl.create :ci_build, pipeline: pipeline + create(:ci_build, name: "rspec", coverage: 30, pipeline: pipeline) + create(:ci_build, name: "rubocop", coverage: 40, pipeline: pipeline) + create(:ci_build, pipeline: pipeline) expect(pipeline.coverage).to eq("35.00") end it "calculates average when there are two builds with coverage and one is retried" do - FactoryGirl.create :ci_build, name: "rspec", coverage: 30, pipeline: pipeline - FactoryGirl.create :ci_build, name: "rubocop", coverage: 30, pipeline: pipeline - FactoryGirl.create :ci_build, name: "rubocop", coverage: 40, pipeline: pipeline + create(:ci_build, name: "rspec", coverage: 30, pipeline: pipeline) + create(:ci_build, name: "rubocop", coverage: 30, pipeline: pipeline, retried: true) + create(:ci_build, name: "rubocop", coverage: 40, pipeline: pipeline) expect(pipeline.coverage).to eq("35.00") end it "calculates average when there is one build without coverage" do - FactoryGirl.create :ci_build, pipeline: pipeline + FactoryGirl.create(:ci_build, pipeline: pipeline) expect(pipeline.coverage).to be_nil end end @@ -134,6 +138,43 @@ describe Ci::Pipeline, models: true do end end + describe '#auto_canceled?' do + subject { pipeline.auto_canceled? } + + context 'when it is canceled' do + before do + pipeline.cancel + end + + context 'when there is auto_canceled_by' do + before do + pipeline.update(auto_canceled_by: create(:ci_empty_pipeline)) + end + + it 'is auto canceled' do + is_expected.to be_truthy + end + end + + context 'when there is no auto_canceled_by' do + it 'is not auto canceled' do + is_expected.to be_falsey + end + end + + context 'when it is retried and canceled manually' do + before do + pipeline.enqueue + pipeline.cancel + end + + it 'is not auto canceled' do + is_expected.to be_falsey + end + end + end + end + describe 'pipeline stages' do before do create(:commit_status, pipeline: pipeline, @@ -181,13 +222,15 @@ describe Ci::Pipeline, models: true do %w(deploy running)]) end - context 'when commit status is retried' do + context 'when commit status is retried' do before do create(:commit_status, pipeline: pipeline, stage: 'build', name: 'mac', stage_idx: 0, status: 'success') + + pipeline.process! end it 'ignores the previous state' do @@ -256,32 +299,56 @@ describe Ci::Pipeline, models: true do describe 'state machine' do let(:current) { Time.now.change(usec: 0) } - let(:build) { create_build('build1', 0) } - let(:build_b) { create_build('build2', 0) } - let(:build_c) { create_build('build3', 0) } + let(:build) { create_build('build1', queued_at: 0) } + let(:build_b) { create_build('build2', queued_at: 0) } + let(:build_c) { create_build('build3', queued_at: 0) } describe '#duration' do - before do - travel_to(current + 30) do - build.run! - build.success! - build_b.run! - build_c.run! - end + context 'when multiple builds are finished' do + before do + travel_to(current + 30) do + build.run! + build.success! + build_b.run! + build_c.run! + end - travel_to(current + 40) do - build_b.drop! + travel_to(current + 40) do + build_b.drop! + end + + travel_to(current + 70) do + build_c.success! + end end - travel_to(current + 70) do - build_c.success! + it 'matches sum of builds duration' do + pipeline.reload + + expect(pipeline.duration).to eq(40) end end - it 'matches sum of builds duration' do - pipeline.reload + context 'when pipeline becomes blocked' do + let!(:build) { create_build('build:1') } + let!(:action) { create_build('manual:action', :manual) } + + before do + travel_to(current + 1.minute) do + build.run! + end + + travel_to(current + 5.minutes) do + build.success! + end + end + + it 'recalculates pipeline duration' do + pipeline.reload - expect(pipeline.duration).to eq(40) + expect(pipeline).to be_manual + expect(pipeline.duration).to eq 4.minutes + end end end @@ -335,12 +402,21 @@ describe Ci::Pipeline, models: true do end end - def create_build(name, queued_at = current, started_from = 0) - create(:ci_build, + describe 'pipeline caching' do + it 'performs ExpirePipelinesCacheWorker' do + expect(ExpirePipelineCacheWorker).to receive(:perform_async).with(pipeline.id) + + pipeline.cancel + end + end + + def create_build(name, *traits, queued_at: current, started_from: 0, **opts) + create(:ci_build, *traits, name: name, pipeline: pipeline, queued_at: queued_at, - started_at: queued_at + started_from) + started_at: queued_at + started_from, + **opts) end end @@ -415,6 +491,10 @@ describe Ci::Pipeline, models: true do context 'there are multiple of the same name' do let!(:manual2) { create(:ci_build, :manual, pipeline: pipeline, name: 'deploy') } + before do + manual.update(retried: true) + end + it 'returns latest one' do is_expected.to contain_exactly(manual2) end @@ -774,6 +854,16 @@ describe Ci::Pipeline, models: true do end end end + + context 'when there is a manual action present in the pipeline' do + before do + create(:ci_build, :manual, pipeline: pipeline) + end + + it 'is not cancelable' do + expect(pipeline).not_to be_cancelable + end + end end describe '#cancel_running' do @@ -966,11 +1056,12 @@ describe Ci::Pipeline, models: true do end describe "#merge_requests" do - let(:project) { create(:project, :repository) } - let(:pipeline) { FactoryGirl.create(:ci_empty_pipeline, status: 'created', project: project, ref: 'master', sha: project.repository.commit('master').id) } + let(:project) { create(:empty_project) } + let(:pipeline) { create(:ci_empty_pipeline, status: 'created', project: project, ref: 'master', sha: 'a288a022a53a5a944fae87bcec6efc87b7061808') } it "returns merge requests whose `diff_head_sha` matches the pipeline's SHA" do - merge_request = create(:merge_request, source_project: project, source_branch: pipeline.ref) + allow_any_instance_of(MergeRequest).to receive(:diff_head_sha) { 'a288a022a53a5a944fae87bcec6efc87b7061808' } + merge_request = create(:merge_request, source_project: project, head_pipeline: pipeline, source_branch: pipeline.ref) expect(pipeline.merge_requests).to eq([merge_request]) end @@ -989,6 +1080,23 @@ describe Ci::Pipeline, models: true do end end + describe "#all_merge_requests" do + let(:project) { create(:empty_project) } + let(:pipeline) { create(:ci_empty_pipeline, status: 'created', project: project, ref: 'master') } + + it "returns all merge requests having the same source branch" do + merge_request = create(:merge_request, source_project: project, source_branch: pipeline.ref) + + expect(pipeline.all_merge_requests).to eq([merge_request]) + end + + it "doesn't return merge requests having a different source branch" do + create(:merge_request, source_project: project, source_branch: 'feature', target_branch: 'master') + + expect(pipeline.all_merge_requests).to be_empty + end + end + describe '#stuck?' do before do create(:ci_build, :pending, pipeline: pipeline) @@ -1031,19 +1139,6 @@ describe Ci::Pipeline, models: true do end end - describe '#update_status' do - let(:pipeline) { create(:ci_pipeline, sha: '123456') } - - it 'updates the cached status' do - fake_status = double - # after updating the status, the status is set to `skipped` for this pipeline's builds - expect(Ci::PipelineStatus).to receive(:new).with(pipeline.project, sha: '123456', status: 'skipped').and_return(fake_status) - expect(fake_status).to receive(:store_in_cache_if_needed) - - pipeline.update_status - end - end - describe 'notifications when pipeline success or failed' do let(:project) { create(:project, :repository) } @@ -1055,10 +1150,13 @@ describe Ci::Pipeline, models: true do end before do - reset_delivered_emails! - project.team << [pipeline.user, Gitlab::Access::DEVELOPER] + pipeline.user.global_notification_setting. + update(level: 'custom', failed_pipeline: true, success_pipeline: true) + + reset_delivered_emails! + perform_enqueued_jobs do pipeline.enqueue pipeline.run diff --git a/spec/models/ci/pipeline_status_spec.rb b/spec/models/ci/pipeline_status_spec.rb deleted file mode 100644 index bc5b71666c2..00000000000 --- a/spec/models/ci/pipeline_status_spec.rb +++ /dev/null @@ -1,173 +0,0 @@ -require 'spec_helper' - -describe Ci::PipelineStatus do - let(:project) { create(:project) } - let(:pipeline_status) { described_class.new(project) } - - describe '.load_for_project' do - it "loads the status" do - expect_any_instance_of(described_class).to receive(:load_status) - - described_class.load_for_project(project) - end - end - - describe '#has_status?' do - it "is false when the status wasn't loaded yet" do - expect(pipeline_status.has_status?).to be_falsy - end - - it 'is true when all status information was loaded' do - fake_commit = double - allow(fake_commit).to receive(:status).and_return('failed') - allow(fake_commit).to receive(:sha).and_return('failed424d1b73bc0d3cb726eb7dc4ce17a4d48552f8c6') - allow(pipeline_status).to receive(:commit).and_return(fake_commit) - allow(pipeline_status).to receive(:has_cache?).and_return(false) - - pipeline_status.load_status - - expect(pipeline_status.has_status?).to be_truthy - end - end - - describe '#load_status' do - it 'loads the status from the cache when there is one' do - expect(pipeline_status).to receive(:has_cache?).and_return(true) - expect(pipeline_status).to receive(:load_from_cache) - - pipeline_status.load_status - end - - it 'loads the status from the project commit when there is no cache' do - allow(pipeline_status).to receive(:has_cache?).and_return(false) - - expect(pipeline_status).to receive(:load_from_commit) - - pipeline_status.load_status - end - - it 'stores the status in the cache when it loading it from the project' do - allow(pipeline_status).to receive(:has_cache?).and_return(false) - allow(pipeline_status).to receive(:load_from_commit) - - expect(pipeline_status).to receive(:store_in_cache) - - pipeline_status.load_status - end - - it 'sets the state to loaded' do - pipeline_status.load_status - - expect(pipeline_status).to be_loaded - end - - it 'only loads the status once' do - expect(pipeline_status).to receive(:has_cache?).and_return(true).exactly(1) - expect(pipeline_status).to receive(:load_from_cache).exactly(1) - - pipeline_status.load_status - pipeline_status.load_status - end - end - - describe "#load_from_commit" do - let!(:pipeline) { create(:ci_pipeline, :success, project: project, sha: project.commit.sha) } - - it 'reads the status from the pipeline for the commit' do - pipeline_status.load_from_commit - - expect(pipeline_status.status).to eq('success') - expect(pipeline_status.sha).to eq(project.commit.sha) - end - - it "doesn't fail for an empty project" do - status_for_empty_commit = described_class.new(create(:empty_project)) - - status_for_empty_commit.load_status - - expect(status_for_empty_commit).to be_loaded - end - end - - describe "#store_in_cache", :redis do - it "sets the object in redis" do - pipeline_status.sha = '123456' - pipeline_status.status = 'failed' - - pipeline_status.store_in_cache - read_sha, read_status = Gitlab::Redis.with { |redis| redis.hmget("projects/#{project.id}/build_status", :sha, :status) } - - expect(read_sha).to eq('123456') - expect(read_status).to eq('failed') - end - end - - describe '#store_in_cache_if_needed', :redis do - it 'stores the state in the cache when the sha is the HEAD of the project' do - create(:ci_pipeline, :success, project: project, sha: project.commit.sha) - build_status = described_class.load_for_project(project) - - build_status.store_in_cache_if_needed - sha, status = Gitlab::Redis.with { |redis| redis.hmget("projects/#{project.id}/build_status", :sha, :status) } - - expect(sha).not_to be_nil - expect(status).not_to be_nil - end - - it "doesn't store the status in redis when the sha is not the head of the project" do - other_status = described_class.new(project, sha: "123456", status: "failed") - - other_status.store_in_cache_if_needed - sha, status = Gitlab::Redis.with { |redis| redis.hmget("projects/#{project.id}/build_status", :sha, :status) } - - expect(sha).to be_nil - expect(status).to be_nil - end - - it "deletes the cache if the repository doesn't have a head commit" do - empty_project = create(:empty_project) - Gitlab::Redis.with { |redis| redis.mapped_hmset("projects/#{empty_project.id}/build_status", { sha: "sha", status: "pending" }) } - other_status = described_class.new(empty_project, sha: "123456", status: "failed") - - other_status.store_in_cache_if_needed - sha, status = Gitlab::Redis.with { |redis| redis.hmget("projects/#{empty_project.id}/build_status", :sha, :status) } - - expect(sha).to be_nil - expect(status).to be_nil - end - end - - describe "with a status in redis", :redis do - let(:status) { 'success' } - let(:sha) { '424d1b73bc0d3cb726eb7dc4ce17a4d48552f8c6' } - - before do - Gitlab::Redis.with { |redis| redis.mapped_hmset("projects/#{project.id}/build_status", { sha: sha, status: status }) } - end - - describe '#load_from_cache' do - it 'reads the status from redis' do - pipeline_status.load_from_cache - - expect(pipeline_status.sha).to eq(sha) - expect(pipeline_status.status).to eq(status) - end - end - - describe '#has_cache?' do - it 'knows the status is cached' do - expect(pipeline_status.has_cache?).to be_truthy - end - end - - describe '#delete_from_cache' do - it 'deletes values from redis' do - pipeline_status.delete_from_cache - - key_exists = Gitlab::Redis.with { |redis| redis.exists("projects/#{project.id}/build_status") } - - expect(key_exists).to be_falsy - end - end - end -end diff --git a/spec/models/ci/stage_spec.rb b/spec/models/ci/stage_spec.rb index c38faf32f7d..8f6ab908987 100644 --- a/spec/models/ci/stage_spec.rb +++ b/spec/models/ci/stage_spec.rb @@ -28,6 +28,35 @@ describe Ci::Stage, models: true do end end + describe '#groups' do + before do + create_job(:ci_build, name: 'rspec 0 2') + create_job(:ci_build, name: 'rspec 0 1') + create_job(:ci_build, name: 'spinach 0 1') + create_job(:commit_status, name: 'aaaaa') + end + + it 'returns an array of three groups' do + expect(stage.groups).to be_a Array + expect(stage.groups).to all(be_a Ci::Group) + expect(stage.groups.size).to eq 3 + end + + it 'returns groups with correctly ordered statuses' do + expect(stage.groups.first.jobs.map(&:name)) + .to eq ['aaaaa'] + expect(stage.groups.second.jobs.map(&:name)) + .to eq ['rspec 0 1', 'rspec 0 2'] + expect(stage.groups.third.jobs.map(&:name)) + .to eq ['spinach 0 1'] + end + + it 'returns groups with correct names' do + expect(stage.groups.map(&:name)) + .to eq %w[aaaaa rspec spinach] + end + end + describe '#statuses_count' do before do create_job(:ci_build) @@ -73,6 +102,10 @@ describe Ci::Stage, models: true do context 'and builds are retried' do let!(:new_build) { create_job(:ci_build, status: :success) } + before do + stage_build.update(retried: true) + end + it "returns status of latest build" do is_expected.to eq('success') end @@ -223,7 +256,7 @@ describe Ci::Stage, models: true do end end - def create_job(type, status: 'success', stage: stage_name) - create(type, pipeline: pipeline, stage: stage, status: status) + def create_job(type, status: 'success', stage: stage_name, **opts) + create(type, pipeline: pipeline, stage: stage, status: status, **opts) end end diff --git a/spec/models/ci/trigger_spec.rb b/spec/models/ci/trigger_spec.rb index 1bcb673cb16..92c15c13c18 100644 --- a/spec/models/ci/trigger_spec.rb +++ b/spec/models/ci/trigger_spec.rb @@ -16,8 +16,8 @@ describe Ci::Trigger, models: true do expect(trigger.token).not_to be_nil end - it 'does not set an random token if one provided' do - trigger = create(:ci_trigger, project: project) + it 'does not set a random token if one provided' do + trigger = create(:ci_trigger, project: project, token: 'token') expect(trigger.token).to eq('token') end diff --git a/spec/models/ci/variable_spec.rb b/spec/models/ci/variable_spec.rb index 048d25869bc..fe8c52d5353 100644 --- a/spec/models/ci/variable_spec.rb +++ b/spec/models/ci/variable_spec.rb @@ -1,7 +1,7 @@ require 'spec_helper' describe Ci::Variable, models: true do - subject { Ci::Variable.new } + subject { build(:ci_variable) } let(:secret_value) { 'secret' } diff --git a/spec/models/commit_spec.rb b/spec/models/commit_spec.rb index befafcf457c..a239f8e165c 100644 --- a/spec/models/commit_spec.rb +++ b/spec/models/commit_spec.rb @@ -212,7 +212,7 @@ eos end end - describe '#latest_pipeline' do + describe '#last_pipeline' do let!(:first_pipeline) do create(:ci_empty_pipeline, project: project, @@ -226,8 +226,8 @@ eos status: 'success') end - it 'returns latest pipeline' do - expect(commit.latest_pipeline).to eq second_pipeline + it 'returns last pipeline' do + expect(commit.last_pipeline).to eq second_pipeline end end @@ -388,32 +388,4 @@ eos expect(described_class.valid_hash?('a' * 41)).to be false end end - - describe '#raw_diffs' do - context 'Gitaly commit_raw_diffs feature enabled' do - before do - allow(Gitlab::GitalyClient).to receive(:feature_enabled?).with(:commit_raw_diffs).and_return(true) - end - - context 'when a truthy deltas_only is not passed to args' do - it 'fetches diffs from Gitaly server' do - expect(Gitlab::GitalyClient::Commit).to receive(:diff_from_parent). - with(commit) - - commit.raw_diffs - end - end - - context 'when a truthy deltas_only is passed to args' do - it 'fetches diffs using Rugged' do - opts = { deltas_only: true } - - expect(Gitlab::GitalyClient::Commit).not_to receive(:diff_from_parent) - expect(commit.raw).to receive(:diffs).with(opts) - - commit.raw_diffs(opts) - end - end - end - end end diff --git a/spec/models/commit_status_spec.rb b/spec/models/commit_status_spec.rb index 7343b735a74..6947affcc1e 100644 --- a/spec/models/commit_status_spec.rb +++ b/spec/models/commit_status_spec.rb @@ -16,6 +16,7 @@ describe CommitStatus, :models do it { is_expected.to belong_to(:pipeline) } it { is_expected.to belong_to(:user) } it { is_expected.to belong_to(:project) } + it { is_expected.to belong_to(:auto_canceled_by) } it { is_expected.to validate_presence_of(:name) } it { is_expected.to validate_inclusion_of(:status).in_array(%w(pending running failed success canceled)) } @@ -101,6 +102,32 @@ describe CommitStatus, :models do end end + describe '#auto_canceled?' do + subject { commit_status.auto_canceled? } + + context 'when it is canceled' do + before do + commit_status.update(status: 'canceled') + end + + context 'when there is auto_canceled_by' do + before do + commit_status.update(auto_canceled_by: create(:ci_empty_pipeline)) + end + + it 'is auto canceled' do + is_expected.to be_truthy + end + end + + context 'when there is no auto_canceled_by' do + it 'is not auto canceled' do + is_expected.to be_falsey + end + end + end + end + describe '#duration' do subject { commit_status.duration } @@ -130,9 +157,9 @@ describe CommitStatus, :models do subject { described_class.latest.order(:id) } let(:statuses) do - [create_status(name: 'aa', ref: 'bb', status: 'running'), - create_status(name: 'cc', ref: 'cc', status: 'pending'), - create_status(name: 'aa', ref: 'cc', status: 'success'), + [create_status(name: 'aa', ref: 'bb', status: 'running', retried: true), + create_status(name: 'cc', ref: 'cc', status: 'pending', retried: true), + create_status(name: 'aa', ref: 'cc', status: 'success', retried: true), create_status(name: 'cc', ref: 'bb', status: 'success'), create_status(name: 'aa', ref: 'bb', status: 'success')] end @@ -142,6 +169,22 @@ describe CommitStatus, :models do end end + describe '.retried' do + subject { described_class.retried.order(:id) } + + let(:statuses) do + [create_status(name: 'aa', ref: 'bb', status: 'running', retried: true), + create_status(name: 'cc', ref: 'cc', status: 'pending', retried: true), + create_status(name: 'aa', ref: 'cc', status: 'success', retried: true), + create_status(name: 'cc', ref: 'bb', status: 'success'), + create_status(name: 'aa', ref: 'bb', status: 'success')] + end + + it 'returns unique statuses' do + is_expected.to contain_exactly(*statuses.values_at(0, 1, 2)) + end + end + describe '.running_or_pending' do subject { described_class.running_or_pending.order(:id) } @@ -154,7 +197,7 @@ describe CommitStatus, :models do end it 'returns statuses that are running or pending' do - is_expected.to eq(statuses.values_at(0, 1)) + is_expected.to contain_exactly(*statuses.values_at(0, 1)) end end diff --git a/spec/models/concerns/awardable_spec.rb b/spec/models/concerns/awardable_spec.rb index de791abdf3d..63ad3a3630b 100644 --- a/spec/models/concerns/awardable_spec.rb +++ b/spec/models/concerns/awardable_spec.rb @@ -1,10 +1,12 @@ require 'spec_helper' -describe Issue, "Awardable" do +describe Awardable do let!(:issue) { create(:issue) } let!(:award_emoji) { create(:award_emoji, :downvote, awardable: issue) } describe "Associations" do + subject { build(:issue) } + it { is_expected.to have_many(:award_emoji).dependent(:destroy) } end diff --git a/spec/models/concerns/cache_markdown_field_spec.rb b/spec/models/concerns/cache_markdown_field_spec.rb index 6151d53cd91..40bbb10eaac 100644 --- a/spec/models/concerns/cache_markdown_field_spec.rb +++ b/spec/models/concerns/cache_markdown_field_spec.rb @@ -1,9 +1,6 @@ require 'spec_helper' describe CacheMarkdownField do - caching_classes = CacheMarkdownField::CACHING_CLASSES - CacheMarkdownField::CACHING_CLASSES = ["ThingWithMarkdownFields"].freeze - # The minimum necessary ActiveModel to test this concern class ThingWithMarkdownFields include ActiveModel::Model @@ -21,24 +18,25 @@ describe CacheMarkdownField do end extend ActiveModel::Callbacks - define_model_callbacks :save + define_model_callbacks :create, :update include CacheMarkdownField cache_markdown_field :foo cache_markdown_field :baz, pipeline: :single_line - def self.add_attr(attr_name) - self.attribute_names += [attr_name] - define_attribute_methods(attr_name) - attr_reader(attr_name) - define_method("#{attr_name}=") do |val| - send("#{attr_name}_will_change!") unless val == send(attr_name) - instance_variable_set("@#{attr_name}", val) + def self.add_attr(name) + self.attribute_names += [name] + define_attribute_methods(name) + attr_reader(name) + define_method("#{name}=") do |value| + write_attribute(name, value) end end - [:foo, :foo_html, :bar, :baz, :baz_html].each do |attr_name| - add_attr(attr_name) + add_attr :cached_markdown_version + + [:foo, :foo_html, :bar, :baz, :baz_html].each do |name| + add_attr(name) end def initialize(*) @@ -48,134 +46,258 @@ describe CacheMarkdownField do clear_changes_information end + def read_attribute(name) + instance_variable_get("@#{name}") + end + + def write_attribute(name, value) + send("#{name}_will_change!") unless value == read_attribute(name) + instance_variable_set("@#{name}", value) + end + def save - run_callbacks :save do + run_callbacks :update do changes_applied end end end - CacheMarkdownField::CACHING_CLASSES = caching_classes - def thing_subclass(new_attr) Class.new(ThingWithMarkdownFields) { add_attr(new_attr) } end - let(:markdown) { "`Foo`" } - let(:html) { "<p><code>Foo</code></p>" } + let(:markdown) { '`Foo`' } + let(:html) { '<p dir="auto"><code>Foo</code></p>' } - let(:updated_markdown) { "`Bar`" } - let(:updated_html) { "<p dir=\"auto\"><code>Bar</code></p>" } + let(:updated_markdown) { '`Bar`' } + let(:updated_html) { '<p dir="auto"><code>Bar</code></p>' } - subject { ThingWithMarkdownFields.new(foo: markdown, foo_html: html) } + let(:thing) { ThingWithMarkdownFields.new(foo: markdown, foo_html: html, cached_markdown_version: CacheMarkdownField::CACHE_VERSION) } - describe ".attributes" do - it "excludes cache attributes" do - expect(thing_subclass(:qux).new.attributes.keys.sort).to eq(%w[bar baz foo qux]) + describe '.attributes' do + it 'excludes cache attributes' do + expect(thing.attributes.keys.sort).to eq(%w[bar baz foo]) end end - describe ".cache_markdown_field" do - it "refuses to allow untracked classes" do - expect { thing_subclass(:qux).__send__(:cache_markdown_field, :qux) }.to raise_error(RuntimeError) + context 'an unchanged markdown field' do + before do + thing.foo = thing.foo + thing.save end + + it { expect(thing.foo).to eq(markdown) } + it { expect(thing.foo_html).to eq(html) } + it { expect(thing.foo_html_changed?).not_to be_truthy } + it { expect(thing.cached_markdown_version).to eq(CacheMarkdownField::CACHE_VERSION) } end - context "an unchanged markdown field" do + context 'a changed markdown field' do before do - subject.foo = subject.foo - subject.save + thing.foo = updated_markdown + thing.save end - it { expect(subject.foo).to eq(markdown) } - it { expect(subject.foo_html).to eq(html) } - it { expect(subject.foo_html_changed?).not_to be_truthy } + it { expect(thing.foo_html).to eq(updated_html) } + it { expect(thing.cached_markdown_version).to eq(CacheMarkdownField::CACHE_VERSION) } end - context "a changed markdown field" do + context 'a non-markdown field changed' do before do - subject.foo = updated_markdown - subject.save + thing.bar = 'OK' + thing.save end - it { expect(subject.foo_html).to eq(updated_html) } + it { expect(thing.bar).to eq('OK') } + it { expect(thing.foo).to eq(markdown) } + it { expect(thing.foo_html).to eq(html) } + it { expect(thing.cached_markdown_version).to eq(CacheMarkdownField::CACHE_VERSION) } end - context "a non-markdown field changed" do + context 'version is out of date' do + let(:thing) { ThingWithMarkdownFields.new(foo: updated_markdown, foo_html: html, cached_markdown_version: nil) } + before do - subject.bar = "OK" - subject.save + thing.save end - it { expect(subject.bar).to eq("OK") } - it { expect(subject.foo).to eq(markdown) } - it { expect(subject.foo_html).to eq(html) } + it { expect(thing.foo_html).to eq(updated_html) } + it { expect(thing.cached_markdown_version).to eq(CacheMarkdownField::CACHE_VERSION) } + end + + describe '#cached_html_up_to_date?' do + subject { thing.cached_html_up_to_date?(:foo) } + + it 'returns false when the version is absent' do + thing.cached_markdown_version = nil + + is_expected.to be_falsy + end + + it 'returns false when the version is too early' do + thing.cached_markdown_version -= 1 + + is_expected.to be_falsy + end + + it 'returns false when the version is too late' do + thing.cached_markdown_version += 1 + + is_expected.to be_falsy + end + + it 'returns true when the version is just right' do + thing.cached_markdown_version = CacheMarkdownField::CACHE_VERSION + + is_expected.to be_truthy + end + + it 'returns false if markdown has been changed but html has not' do + thing.foo = updated_html + + is_expected.to be_falsy + end + + it 'returns true if markdown has not been changed but html has' do + thing.foo_html = updated_html + + is_expected.to be_truthy + end + + it 'returns true if markdown and html have both been changed' do + thing.foo = updated_markdown + thing.foo_html = updated_html + + is_expected.to be_truthy + end + + it 'returns false if the markdown field is set but the html is not' do + thing.foo_html = nil + + is_expected.to be_falsy + end + end + + describe '#refresh_markdown_cache!' do + before do + thing.foo = updated_markdown + end + + context 'do_update: false' do + it 'fills all html fields' do + thing.refresh_markdown_cache! + + expect(thing.foo_html).to eq(updated_html) + expect(thing.foo_html_changed?).to be_truthy + expect(thing.baz_html_changed?).to be_truthy + end + + it 'does not save the result' do + expect(thing).not_to receive(:update_columns) + + thing.refresh_markdown_cache! + end + + it 'updates the markdown cache version' do + thing.cached_markdown_version = nil + thing.refresh_markdown_cache! + + expect(thing.cached_markdown_version).to eq(CacheMarkdownField::CACHE_VERSION) + end + end + + context 'do_update: true' do + it 'fills all html fields' do + thing.refresh_markdown_cache!(do_update: true) + + expect(thing.foo_html).to eq(updated_html) + expect(thing.foo_html_changed?).to be_truthy + expect(thing.baz_html_changed?).to be_truthy + end + + it 'skips saving if not persisted' do + expect(thing).to receive(:persisted?).and_return(false) + expect(thing).not_to receive(:update_columns) + + thing.refresh_markdown_cache!(do_update: true) + end + + it 'saves the changes using #update_columns' do + expect(thing).to receive(:persisted?).and_return(true) + expect(thing).to receive(:update_columns) + .with("foo_html" => updated_html, "baz_html" => "", "cached_markdown_version" => CacheMarkdownField::CACHE_VERSION) + + thing.refresh_markdown_cache!(do_update: true) + end + end end describe '#banzai_render_context' do - it "sets project to nil if the object lacks a project" do - context = subject.banzai_render_context(:foo) - expect(context).to have_key(:project) + subject(:context) { thing.banzai_render_context(:foo) } + + it 'sets project to nil if the object lacks a project' do + is_expected.to have_key(:project) expect(context[:project]).to be_nil end - it "excludes author if the object lacks an author" do - context = subject.banzai_render_context(:foo) - expect(context).not_to have_key(:author) + it 'excludes author if the object lacks an author' do + is_expected.not_to have_key(:author) end - it "raises if the context for an unrecognised field is requested" do - expect{subject.banzai_render_context(:not_found)}.to raise_error(ArgumentError) + it 'raises if the context for an unrecognised field is requested' do + expect { thing.banzai_render_context(:not_found) }.to raise_error(ArgumentError) end - it "includes the pipeline" do - context = subject.banzai_render_context(:baz) - expect(context[:pipeline]).to eq(:single_line) + it 'includes the pipeline' do + baz = thing.banzai_render_context(:baz) + + expect(baz[:pipeline]).to eq(:single_line) end - it "returns copies of the context template" do - template = subject.cached_markdown_fields[:baz] - copy = subject.banzai_render_context(:baz) + it 'returns copies of the context template' do + template = thing.cached_markdown_fields[:baz] + copy = thing.banzai_render_context(:baz) + expect(copy).not_to be(template) end - context "with a project" do - subject { thing_subclass(:project).new(foo: markdown, foo_html: html, project: :project) } + context 'with a project' do + let(:thing) { thing_subclass(:project).new(foo: markdown, foo_html: html, project: :project_value) } - it "sets the project in the context" do - context = subject.banzai_render_context(:foo) - expect(context).to have_key(:project) - expect(context[:project]).to eq(:project) + it 'sets the project in the context' do + is_expected.to have_key(:project) + expect(context[:project]).to eq(:project_value) end - it "invalidates the cache when project changes" do - subject.project = :new_project + it 'invalidates the cache when project changes' do + thing.project = :new_project allow(Banzai::Renderer).to receive(:cacheless_render_field).and_return(updated_html) - subject.save + thing.save - expect(subject.foo_html).to eq(updated_html) - expect(subject.baz_html).to eq(updated_html) + expect(thing.foo_html).to eq(updated_html) + expect(thing.baz_html).to eq(updated_html) + expect(thing.cached_markdown_version).to eq(CacheMarkdownField::CACHE_VERSION) end end - context "with an author" do - subject { thing_subclass(:author).new(foo: markdown, foo_html: html, author: :author) } + context 'with an author' do + let(:thing) { thing_subclass(:author).new(foo: markdown, foo_html: html, author: :author_value) } - it "sets the author in the context" do - context = subject.banzai_render_context(:foo) - expect(context).to have_key(:author) - expect(context[:author]).to eq(:author) + it 'sets the author in the context' do + is_expected.to have_key(:author) + expect(context[:author]).to eq(:author_value) end - it "invalidates the cache when author changes" do - subject.author = :new_author + it 'invalidates the cache when author changes' do + thing.author = :new_author allow(Banzai::Renderer).to receive(:cacheless_render_field).and_return(updated_html) - subject.save + thing.save - expect(subject.foo_html).to eq(updated_html) - expect(subject.baz_html).to eq(updated_html) + expect(thing.foo_html).to eq(updated_html) + expect(thing.baz_html).to eq(updated_html) + expect(thing.cached_markdown_version).to eq(CacheMarkdownField::CACHE_VERSION) end end end diff --git a/spec/models/concerns/discussion_on_diff_spec.rb b/spec/models/concerns/discussion_on_diff_spec.rb new file mode 100644 index 00000000000..8571e85627c --- /dev/null +++ b/spec/models/concerns/discussion_on_diff_spec.rb @@ -0,0 +1,24 @@ +require 'spec_helper' + +describe DiscussionOnDiff, model: true do + subject { create(:diff_note_on_merge_request).to_discussion } + + describe "#truncated_diff_lines" do + let(:truncated_lines) { subject.truncated_diff_lines } + + context "when diff is greater than allowed number of truncated diff lines " do + it "returns fewer lines" do + expect(subject.diff_lines.count).to be > DiffDiscussion::NUMBER_OF_TRUNCATED_DIFF_LINES + + expect(truncated_lines.count).to be <= DiffDiscussion::NUMBER_OF_TRUNCATED_DIFF_LINES + end + end + + context "when some diff lines are meta" do + it "returns no meta lines" do + expect(subject.diff_lines).to include(be_meta) + expect(truncated_lines).not_to include(be_meta) + end + end + end +end diff --git a/spec/models/concerns/has_status_spec.rb b/spec/models/concerns/has_status_spec.rb index 82abad0e2f6..67dae7cf4c0 100644 --- a/spec/models/concerns/has_status_spec.rb +++ b/spec/models/concerns/has_status_spec.rb @@ -231,6 +231,18 @@ describe HasStatus do end end + describe '.created_or_pending' do + subject { CommitStatus.created_or_pending } + + %i[created pending].each do |status| + it_behaves_like 'containing the job', status + end + + %i[running failed success].each do |status| + it_behaves_like 'not containing the job', status + end + end + describe '.finished' do subject { CommitStatus.finished } diff --git a/spec/models/concerns/ignorable_column_spec.rb b/spec/models/concerns/ignorable_column_spec.rb new file mode 100644 index 00000000000..dba9fe43327 --- /dev/null +++ b/spec/models/concerns/ignorable_column_spec.rb @@ -0,0 +1,38 @@ +require 'spec_helper' + +describe IgnorableColumn do + let :base_class do + Class.new do + def self.columns + # This method does not have access to "double" + [Struct.new(:name).new('id'), Struct.new(:name).new('title')] + end + end + end + + let :model do + Class.new(base_class) do + include IgnorableColumn + end + end + + describe '.columns' do + it 'returns the columns, excluding the ignored ones' do + model.ignore_column(:title) + + expect(model.columns.map(&:name)).to eq(%w(id)) + end + end + + describe '.ignored_columns' do + it 'returns a Set' do + expect(model.ignored_columns).to be_an_instance_of(Set) + end + + it 'returns the names of the ignored columns' do + model.ignore_column(:title) + + expect(model.ignored_columns).to eq(Set.new(%w(title))) + end + end +end diff --git a/spec/models/concerns/issuable_spec.rb b/spec/models/concerns/issuable_spec.rb index 4522206fab1..27890e33b49 100644 --- a/spec/models/concerns/issuable_spec.rb +++ b/spec/models/concerns/issuable_spec.rb @@ -1,13 +1,15 @@ require 'spec_helper' -describe Issue, "Issuable" do +describe Issuable do + let(:issuable_class) { Issue } let(:issue) { create(:issue) } let(:user) { create(:user) } describe "Associations" do + subject { build(:issue) } + it { is_expected.to belong_to(:project) } it { is_expected.to belong_to(:author) } - it { is_expected.to belong_to(:assignee) } it { is_expected.to have_many(:notes).dependent(:destroy) } it { is_expected.to have_many(:todos).dependent(:destroy) } @@ -23,10 +25,14 @@ describe Issue, "Issuable" do end describe 'Included modules' do + let(:described_class) { issuable_class } + it { is_expected.to include_module(Awardable) } end describe "Validation" do + subject { build(:issue) } + before do allow(subject).to receive(:set_iid).and_return(false) end @@ -39,9 +45,11 @@ describe Issue, "Issuable" do end describe "Scope" do - it { expect(described_class).to respond_to(:opened) } - it { expect(described_class).to respond_to(:closed) } - it { expect(described_class).to respond_to(:assigned) } + subject { build(:issue) } + + it { expect(issuable_class).to respond_to(:opened) } + it { expect(issuable_class).to respond_to(:closed) } + it { expect(issuable_class).to respond_to(:assigned) } end describe 'author_name' do @@ -57,74 +65,20 @@ describe Issue, "Issuable" do end end - describe 'assignee_name' do - it 'is delegated to assignee' do - issue.update!(assignee: create(:user)) - - expect(issue.assignee_name).to eq issue.assignee.name - end - - it 'returns nil when assignee is nil' do - issue.assignee_id = nil - issue.save(validate: false) - - expect(issue.assignee_name).to eq nil - end - end - - describe "before_save" do - describe "#update_cache_counts" do - context "when previous assignee exists" do - before do - assignee = create(:user) - issue.project.team << [assignee, :developer] - issue.update(assignee: assignee) - end - - it "updates cache counts for new assignee" do - user = create(:user) - - expect(user).to receive(:update_cache_counts) - - issue.update(assignee: user) - end - - it "updates cache counts for previous assignee" do - old_assignee = issue.assignee - allow(User).to receive(:find_by_id).with(old_assignee.id).and_return(old_assignee) - - expect(old_assignee).to receive(:update_cache_counts) - - issue.update(assignee: nil) - end - end - - context "when previous assignee does not exist" do - before{ issue.update(assignee: nil) } - - it "updates cache count for the new assignee" do - expect_any_instance_of(User).to receive(:update_cache_counts) - - issue.update(assignee: user) - end - end - end - end - describe ".search" do let!(:searchable_issue) { create(:issue, title: "Searchable issue") } it 'returns notes with a matching title' do - expect(described_class.search(searchable_issue.title)). + expect(issuable_class.search(searchable_issue.title)). to eq([searchable_issue]) end it 'returns notes with a partially matching title' do - expect(described_class.search('able')).to eq([searchable_issue]) + expect(issuable_class.search('able')).to eq([searchable_issue]) end it 'returns notes with a matching title regardless of the casing' do - expect(described_class.search(searchable_issue.title.upcase)). + expect(issuable_class.search(searchable_issue.title.upcase)). to eq([searchable_issue]) end end @@ -135,31 +89,31 @@ describe Issue, "Issuable" do end it 'returns notes with a matching title' do - expect(described_class.full_search(searchable_issue.title)). + expect(issuable_class.full_search(searchable_issue.title)). to eq([searchable_issue]) end it 'returns notes with a partially matching title' do - expect(described_class.full_search('able')).to eq([searchable_issue]) + expect(issuable_class.full_search('able')).to eq([searchable_issue]) end it 'returns notes with a matching title regardless of the casing' do - expect(described_class.full_search(searchable_issue.title.upcase)). + expect(issuable_class.full_search(searchable_issue.title.upcase)). to eq([searchable_issue]) end it 'returns notes with a matching description' do - expect(described_class.full_search(searchable_issue.description)). + expect(issuable_class.full_search(searchable_issue.description)). to eq([searchable_issue]) end it 'returns notes with a partially matching description' do - expect(described_class.full_search(searchable_issue.description)). + expect(issuable_class.full_search(searchable_issue.description)). to eq([searchable_issue]) end it 'returns notes with a matching description regardless of the casing' do - expect(described_class.full_search(searchable_issue.description.upcase)). + expect(issuable_class.full_search(searchable_issue.description.upcase)). to eq([searchable_issue]) end end @@ -298,7 +252,20 @@ describe Issue, "Issuable" do end context "issue is assigned" do - before { issue.update_attribute(:assignee, user) } + before { issue.assignees << user } + + it "returns correct hook data" do + expect(data[:assignees].first).to eq(user.hook_attrs) + end + end + + context "merge_request is assigned" do + let(:merge_request) { create(:merge_request) } + let(:data) { merge_request.to_hook_data(user) } + + before do + merge_request.update_attribute(:assignee, user) + end it "returns correct hook data" do expect(data[:object_attributes]['assignee_id']).to eq(user.id) @@ -320,24 +287,6 @@ describe Issue, "Issuable" do include_examples 'deprecated repository hook data' end - describe '#card_attributes' do - it 'includes the author name' do - allow(issue).to receive(:author).and_return(double(name: 'Robert')) - allow(issue).to receive(:assignee).and_return(nil) - - expect(issue.card_attributes). - to eq({ 'Author' => 'Robert', 'Assignee' => nil }) - end - - it 'includes the assignee name' do - allow(issue).to receive(:author).and_return(double(name: 'Robert')) - allow(issue).to receive(:assignee).and_return(double(name: 'Douwe')) - - expect(issue.card_attributes). - to eq({ 'Author' => 'Robert', 'Assignee' => 'Douwe' }) - end - end - describe '#labels_array' do let(:project) { create(:empty_project) } let(:bug) { create(:label, project: project, title: 'bug') } @@ -466,27 +415,6 @@ describe Issue, "Issuable" do end end - describe '#assignee_or_author?' do - let(:user) { build(:user, id: 1) } - let(:issue) { build(:issue) } - - it 'returns true for a user that is assigned to an issue' do - issue.assignee = user - - expect(issue.assignee_or_author?(user)).to eq(true) - end - - it 'returns true for a user that is the author of an issue' do - issue.author = user - - expect(issue.assignee_or_author?(user)).to eq(true) - end - - it 'returns false for a user that is not the assignee or author' do - expect(issue.assignee_or_author?(user)).to eq(false) - end - end - describe '#spend_time' do let(:user) { create(:user) } let(:issue) { create(:issue) } diff --git a/spec/models/concerns/mentionable_spec.rb b/spec/models/concerns/mentionable_spec.rb index 2092576e981..e382c7120de 100644 --- a/spec/models/concerns/mentionable_spec.rb +++ b/spec/models/concerns/mentionable_spec.rb @@ -163,3 +163,52 @@ describe Issue, "Mentionable" do end end end + +describe Commit, 'Mentionable' do + let(:project) { create(:project, :public, :repository) } + let(:commit) { project.commit } + + describe '#matches_cross_reference_regex?' do + it "is false when message doesn't reference anything" do + allow(commit.raw).to receive(:message).and_return "WIP: Do something" + + expect(commit.matches_cross_reference_regex?).to be false + end + + it 'is true if issue #number mentioned in title' do + allow(commit.raw).to receive(:message).and_return "#1" + + expect(commit.matches_cross_reference_regex?).to be true + end + + it 'is true if references an MR' do + allow(commit.raw).to receive(:message).and_return "See merge request !12" + + expect(commit.matches_cross_reference_regex?).to be true + end + + it 'is true if references a commit' do + allow(commit.raw).to receive(:message).and_return "a1b2c3d4" + + expect(commit.matches_cross_reference_regex?).to be true + end + + it 'is true if issue referenced by url' do + issue = create(:issue, project: project) + + allow(commit.raw).to receive(:message).and_return Gitlab::UrlBuilder.build(issue) + + expect(commit.matches_cross_reference_regex?).to be true + end + + context 'with external issue tracker' do + let(:project) { create(:jira_project) } + + it 'is true if external issues referenced' do + allow(commit.raw).to receive(:message).and_return 'JIRA-123' + + expect(commit.matches_cross_reference_regex?).to be true + end + end + end +end diff --git a/spec/models/concerns/milestoneish_spec.rb b/spec/models/concerns/milestoneish_spec.rb index 68e4c0a522b..675b730c557 100644 --- a/spec/models/concerns/milestoneish_spec.rb +++ b/spec/models/concerns/milestoneish_spec.rb @@ -11,13 +11,13 @@ describe Milestone, 'Milestoneish' do let(:milestone) { create(:milestone, project: project) } let!(:issue) { create(:issue, project: project, milestone: milestone) } let!(:security_issue_1) { create(:issue, :confidential, project: project, author: author, milestone: milestone) } - let!(:security_issue_2) { create(:issue, :confidential, project: project, assignee: assignee, milestone: milestone) } + let!(:security_issue_2) { create(:issue, :confidential, project: project, assignees: [assignee], milestone: milestone) } let!(:closed_issue_1) { create(:issue, :closed, project: project, milestone: milestone) } let!(:closed_issue_2) { create(:issue, :closed, project: project, milestone: milestone) } let!(:closed_security_issue_1) { create(:issue, :confidential, :closed, project: project, author: author, milestone: milestone) } - let!(:closed_security_issue_2) { create(:issue, :confidential, :closed, project: project, assignee: assignee, milestone: milestone) } + let!(:closed_security_issue_2) { create(:issue, :confidential, :closed, project: project, assignees: [assignee], milestone: milestone) } let!(:closed_security_issue_3) { create(:issue, :confidential, :closed, project: project, author: author, milestone: milestone) } - let!(:closed_security_issue_4) { create(:issue, :confidential, :closed, project: project, assignee: assignee, milestone: milestone) } + let!(:closed_security_issue_4) { create(:issue, :confidential, :closed, project: project, assignees: [assignee], milestone: milestone) } let!(:merge_request) { create(:merge_request, source_project: project, target_project: project, milestone: milestone) } before do diff --git a/spec/models/concerns/noteable_spec.rb b/spec/models/concerns/noteable_spec.rb new file mode 100644 index 00000000000..bdae742ff1d --- /dev/null +++ b/spec/models/concerns/noteable_spec.rb @@ -0,0 +1,261 @@ +require 'spec_helper' + +describe Noteable, model: true do + let!(:active_diff_note1) { create(:diff_note_on_merge_request) } + let(:project) { active_diff_note1.project } + subject { active_diff_note1.noteable } + let!(:active_diff_note2) { create(:diff_note_on_merge_request, project: project, noteable: subject, in_reply_to: active_diff_note1) } + let!(:active_diff_note3) { create(:diff_note_on_merge_request, project: project, noteable: subject, position: active_position2) } + let!(:outdated_diff_note1) { create(:diff_note_on_merge_request, project: project, noteable: subject, position: outdated_position) } + let!(:outdated_diff_note2) { create(:diff_note_on_merge_request, project: project, noteable: subject, in_reply_to: outdated_diff_note1) } + let!(:discussion_note1) { create(:discussion_note_on_merge_request, project: project, noteable: subject) } + let!(:discussion_note2) { create(:discussion_note_on_merge_request, in_reply_to: discussion_note1) } + let!(:commit_diff_note1) { create(:diff_note_on_commit, project: project) } + let!(:commit_diff_note2) { create(:diff_note_on_commit, project: project, in_reply_to: commit_diff_note1) } + let!(:commit_note1) { create(:note_on_commit, project: project) } + let!(:commit_note2) { create(:note_on_commit, project: project) } + let!(:commit_discussion_note1) { create(:discussion_note_on_commit, project: project) } + let!(:commit_discussion_note2) { create(:discussion_note_on_commit, in_reply_to: commit_discussion_note1) } + let!(:commit_discussion_note3) { create(:discussion_note_on_commit, project: project) } + let!(:note1) { create(:note, project: project, noteable: subject) } + let!(:note2) { create(:note, project: project, noteable: subject) } + + let(:active_position2) do + Gitlab::Diff::Position.new( + old_path: "files/ruby/popen.rb", + new_path: "files/ruby/popen.rb", + old_line: 16, + new_line: 22, + diff_refs: subject.diff_refs + ) + end + + let(:outdated_position) do + Gitlab::Diff::Position.new( + old_path: "files/ruby/popen.rb", + new_path: "files/ruby/popen.rb", + old_line: nil, + new_line: 9, + diff_refs: project.commit("874797c3a73b60d2187ed6e2fcabd289ff75171e").diff_refs + ) + end + + describe '#discussions' do + let(:discussions) { subject.discussions } + + it 'includes discussions for diff notes, commit diff notes, commit notes, and regular notes' do + expect(discussions).to eq([ + DiffDiscussion.new([active_diff_note1, active_diff_note2], subject), + DiffDiscussion.new([active_diff_note3], subject), + DiffDiscussion.new([outdated_diff_note1, outdated_diff_note2], subject), + Discussion.new([discussion_note1, discussion_note2], subject), + DiffDiscussion.new([commit_diff_note1, commit_diff_note2], subject), + OutOfContextDiscussion.new([commit_note1, commit_note2], subject), + Discussion.new([commit_discussion_note1, commit_discussion_note2], subject), + Discussion.new([commit_discussion_note3], subject), + IndividualNoteDiscussion.new([note1], subject), + IndividualNoteDiscussion.new([note2], subject) + ]) + end + end + + describe '#grouped_diff_discussions' do + let(:grouped_diff_discussions) { subject.grouped_diff_discussions } + + it "includes active discussions" do + discussions = grouped_diff_discussions.values.flatten + + expect(discussions.count).to eq(2) + expect(discussions.map(&:id)).to eq([active_diff_note1.discussion_id, active_diff_note3.discussion_id]) + expect(discussions.all?(&:active?)).to be true + + expect(discussions.first.notes).to eq([active_diff_note1, active_diff_note2]) + expect(discussions.last.notes).to eq([active_diff_note3]) + end + + it "doesn't include outdated discussions" do + expect(grouped_diff_discussions.values.flatten.map(&:id)).not_to include(outdated_diff_note1.discussion_id) + end + + it "groups the discussions by line code" do + expect(grouped_diff_discussions[active_diff_note1.line_code].first.id).to eq(active_diff_note1.discussion_id) + expect(grouped_diff_discussions[active_diff_note3.line_code].first.id).to eq(active_diff_note3.discussion_id) + end + end + + context "discussion status" do + let(:first_discussion) { build_stubbed(:discussion_note_on_merge_request, noteable: subject, project: project).to_discussion } + let(:second_discussion) { build_stubbed(:discussion_note_on_merge_request, noteable: subject, project: project).to_discussion } + let(:third_discussion) { build_stubbed(:discussion_note_on_merge_request, noteable: subject, project: project).to_discussion } + + before do + allow(subject).to receive(:resolvable_discussions).and_return([first_discussion, second_discussion, third_discussion]) + end + + describe "#discussions_resolvable?" do + context "when all discussions are unresolvable" do + before do + allow(first_discussion).to receive(:resolvable?).and_return(false) + allow(second_discussion).to receive(:resolvable?).and_return(false) + allow(third_discussion).to receive(:resolvable?).and_return(false) + end + + it "returns false" do + expect(subject.discussions_resolvable?).to be false + end + end + + context "when some discussions are unresolvable and some discussions are resolvable" do + before do + allow(first_discussion).to receive(:resolvable?).and_return(true) + allow(second_discussion).to receive(:resolvable?).and_return(false) + allow(third_discussion).to receive(:resolvable?).and_return(true) + end + + it "returns true" do + expect(subject.discussions_resolvable?).to be true + end + end + + context "when all discussions are resolvable" do + before do + allow(first_discussion).to receive(:resolvable?).and_return(true) + allow(second_discussion).to receive(:resolvable?).and_return(true) + allow(third_discussion).to receive(:resolvable?).and_return(true) + end + + it "returns true" do + expect(subject.discussions_resolvable?).to be true + end + end + end + + describe "#discussions_resolved?" do + context "when discussions are not resolvable" do + before do + allow(subject).to receive(:discussions_resolvable?).and_return(false) + end + + it "returns false" do + expect(subject.discussions_resolved?).to be false + end + end + + context "when discussions are resolvable" do + before do + allow(subject).to receive(:discussions_resolvable?).and_return(true) + + allow(first_discussion).to receive(:resolvable?).and_return(true) + allow(second_discussion).to receive(:resolvable?).and_return(false) + allow(third_discussion).to receive(:resolvable?).and_return(true) + end + + context "when all resolvable discussions are resolved" do + before do + allow(first_discussion).to receive(:resolved?).and_return(true) + allow(third_discussion).to receive(:resolved?).and_return(true) + end + + it "returns true" do + expect(subject.discussions_resolved?).to be true + end + end + + context "when some resolvable discussions are not resolved" do + before do + allow(first_discussion).to receive(:resolved?).and_return(true) + allow(third_discussion).to receive(:resolved?).and_return(false) + end + + it "returns false" do + expect(subject.discussions_resolved?).to be false + end + end + end + end + + describe "#discussions_to_be_resolved?" do + context "when discussions are not resolvable" do + before do + allow(subject).to receive(:discussions_resolvable?).and_return(false) + end + + it "returns false" do + expect(subject.discussions_to_be_resolved?).to be false + end + end + + context "when discussions are resolvable" do + before do + allow(subject).to receive(:discussions_resolvable?).and_return(true) + + allow(first_discussion).to receive(:resolvable?).and_return(true) + allow(second_discussion).to receive(:resolvable?).and_return(false) + allow(third_discussion).to receive(:resolvable?).and_return(true) + end + + context "when all resolvable discussions are resolved" do + before do + allow(first_discussion).to receive(:resolved?).and_return(true) + allow(third_discussion).to receive(:resolved?).and_return(true) + end + + it "returns false" do + expect(subject.discussions_to_be_resolved?).to be false + end + end + + context "when some resolvable discussions are not resolved" do + before do + allow(first_discussion).to receive(:resolved?).and_return(true) + allow(third_discussion).to receive(:resolved?).and_return(false) + end + + it "returns true" do + expect(subject.discussions_to_be_resolved?).to be true + end + end + end + end + + describe "#discussions_to_be_resolved" do + before do + allow(first_discussion).to receive(:to_be_resolved?).and_return(true) + allow(second_discussion).to receive(:to_be_resolved?).and_return(false) + allow(third_discussion).to receive(:to_be_resolved?).and_return(false) + end + + it 'includes only discussions that need to be resolved' do + expect(subject.discussions_to_be_resolved).to eq([first_discussion]) + end + end + + describe '#discussions_can_be_resolved_by?' do + let(:user) { build(:user) } + + context 'all discussions can be resolved by the user' do + before do + allow(first_discussion).to receive(:can_resolve?).with(user).and_return(true) + allow(second_discussion).to receive(:can_resolve?).with(user).and_return(true) + allow(third_discussion).to receive(:can_resolve?).with(user).and_return(true) + end + + it 'allows a user to resolve the discussions' do + expect(subject.discussions_can_be_resolved_by?(user)).to be(true) + end + end + + context 'one discussion cannot be resolved by the user' do + before do + allow(first_discussion).to receive(:can_resolve?).with(user).and_return(true) + allow(second_discussion).to receive(:can_resolve?).with(user).and_return(true) + allow(third_discussion).to receive(:can_resolve?).with(user).and_return(false) + end + + it 'allows a user to resolve the discussions' do + expect(subject.discussions_can_be_resolved_by?(user)).to be(false) + end + end + end + end +end diff --git a/spec/models/concerns/relative_positioning_spec.rb b/spec/models/concerns/relative_positioning_spec.rb index 255b584a85e..494e6f1b6f6 100644 --- a/spec/models/concerns/relative_positioning_spec.rb +++ b/spec/models/concerns/relative_positioning_spec.rb @@ -1,6 +1,6 @@ require 'spec_helper' -describe Issue, 'RelativePositioning' do +describe RelativePositioning do let(:project) { create(:empty_project) } let(:issue) { create(:issue, project: project) } let(:issue1) { create(:issue, project: project) } diff --git a/spec/models/concerns/resolvable_discussion_spec.rb b/spec/models/concerns/resolvable_discussion_spec.rb new file mode 100644 index 00000000000..18327fe262d --- /dev/null +++ b/spec/models/concerns/resolvable_discussion_spec.rb @@ -0,0 +1,548 @@ +require 'spec_helper' + +describe Discussion, ResolvableDiscussion, models: true do + subject { described_class.new([first_note, second_note, third_note]) } + + let(:first_note) { create(:discussion_note_on_merge_request) } + let(:merge_request) { first_note.noteable } + let(:project) { first_note.project } + let(:second_note) { create(:discussion_note_on_merge_request, noteable: merge_request, project: project, in_reply_to: first_note) } + let(:third_note) { create(:discussion_note_on_merge_request, noteable: merge_request, project: project) } + + describe "#resolvable?" do + context "when potentially resolvable" do + before do + allow(subject).to receive(:potentially_resolvable?).and_return(true) + end + + context "when all notes are unresolvable" do + before do + allow(first_note).to receive(:resolvable?).and_return(false) + allow(second_note).to receive(:resolvable?).and_return(false) + allow(third_note).to receive(:resolvable?).and_return(false) + end + + it "returns false" do + expect(subject.resolvable?).to be false + end + end + + context "when some notes are unresolvable and some notes are resolvable" do + before do + allow(first_note).to receive(:resolvable?).and_return(true) + allow(second_note).to receive(:resolvable?).and_return(false) + allow(third_note).to receive(:resolvable?).and_return(true) + end + + it "returns true" do + expect(subject.resolvable?).to be true + end + end + + context "when all notes are resolvable" do + before do + allow(first_note).to receive(:resolvable?).and_return(true) + allow(second_note).to receive(:resolvable?).and_return(true) + allow(third_note).to receive(:resolvable?).and_return(true) + end + + it "returns true" do + expect(subject.resolvable?).to be true + end + end + end + + context "when not potentially resolvable" do + before do + allow(subject).to receive(:potentially_resolvable?).and_return(false) + end + + it "returns false" do + expect(subject.resolvable?).to be false + end + end + end + + describe "#resolved?" do + context "when not resolvable" do + before do + allow(subject).to receive(:resolvable?).and_return(false) + end + + it "returns false" do + expect(subject.resolved?).to be false + end + end + + context "when resolvable" do + before do + allow(subject).to receive(:resolvable?).and_return(true) + + allow(first_note).to receive(:resolvable?).and_return(true) + allow(second_note).to receive(:resolvable?).and_return(false) + allow(third_note).to receive(:resolvable?).and_return(true) + end + + context "when all resolvable notes are resolved" do + before do + allow(first_note).to receive(:resolved?).and_return(true) + allow(third_note).to receive(:resolved?).and_return(true) + end + + it "returns true" do + expect(subject.resolved?).to be true + end + end + + context "when some resolvable notes are not resolved" do + before do + allow(first_note).to receive(:resolved?).and_return(true) + allow(third_note).to receive(:resolved?).and_return(false) + end + + it "returns false" do + expect(subject.resolved?).to be false + end + end + end + end + + describe "#to_be_resolved?" do + context "when not resolvable" do + before do + allow(subject).to receive(:resolvable?).and_return(false) + end + + it "returns false" do + expect(subject.to_be_resolved?).to be false + end + end + + context "when resolvable" do + before do + allow(subject).to receive(:resolvable?).and_return(true) + + allow(first_note).to receive(:resolvable?).and_return(true) + allow(second_note).to receive(:resolvable?).and_return(false) + allow(third_note).to receive(:resolvable?).and_return(true) + end + + context "when all resolvable notes are resolved" do + before do + allow(first_note).to receive(:resolved?).and_return(true) + allow(third_note).to receive(:resolved?).and_return(true) + end + + it "returns false" do + expect(subject.to_be_resolved?).to be false + end + end + + context "when some resolvable notes are not resolved" do + before do + allow(first_note).to receive(:resolved?).and_return(true) + allow(third_note).to receive(:resolved?).and_return(false) + end + + it "returns true" do + expect(subject.to_be_resolved?).to be true + end + end + end + end + + describe "#can_resolve?" do + let(:current_user) { create(:user) } + + context "when not resolvable" do + before do + allow(subject).to receive(:resolvable?).and_return(false) + end + + it "returns false" do + expect(subject.can_resolve?(current_user)).to be false + end + end + + context "when resolvable" do + before do + allow(subject).to receive(:resolvable?).and_return(true) + end + + context "when not signed in" do + let(:current_user) { nil } + + it "returns false" do + expect(subject.can_resolve?(current_user)).to be false + end + end + + context "when signed in" do + context "when the signed in user is the noteable author" do + before do + subject.noteable.author = current_user + end + + it "returns true" do + expect(subject.can_resolve?(current_user)).to be true + end + end + + context "when the signed in user can push to the project" do + before do + subject.project.team << [current_user, :master] + end + + it "returns true" do + expect(subject.can_resolve?(current_user)).to be true + end + end + + context "when the signed in user is a random user" do + it "returns false" do + expect(subject.can_resolve?(current_user)).to be false + end + end + end + end + end + + describe "#resolve!" do + let(:current_user) { create(:user) } + + context "when not resolvable" do + before do + allow(subject).to receive(:resolvable?).and_return(false) + end + + it "returns nil" do + expect(subject.resolve!(current_user)).to be_nil + end + + it "doesn't set resolved_at" do + subject.resolve!(current_user) + + expect(subject.resolved_at).to be_nil + end + + it "doesn't set resolved_by" do + subject.resolve!(current_user) + + expect(subject.resolved_by).to be_nil + end + + it "doesn't mark as resolved" do + subject.resolve!(current_user) + + expect(subject.resolved?).to be false + end + end + + context "when resolvable" do + let(:user) { create(:user) } + let(:second_note) { create(:diff_note_on_commit) } # unresolvable + + before do + allow(subject).to receive(:resolvable?).and_return(true) + end + + context "when all resolvable notes are resolved" do + before do + first_note.resolve!(user) + third_note.resolve!(user) + + first_note.reload + third_note.reload + end + + it "doesn't change resolved_at on the resolved notes" do + expect(first_note.resolved_at).not_to be_nil + expect(third_note.resolved_at).not_to be_nil + + expect { subject.resolve!(current_user) }.not_to change { first_note.resolved_at } + expect { subject.resolve!(current_user) }.not_to change { third_note.resolved_at } + end + + it "doesn't change resolved_by on the resolved notes" do + expect(first_note.resolved_by).to eq(user) + expect(third_note.resolved_by).to eq(user) + + expect { subject.resolve!(current_user) }.not_to change { first_note.resolved_by } + expect { subject.resolve!(current_user) }.not_to change { third_note.resolved_by } + end + + it "doesn't change the resolved state on the resolved notes" do + expect(first_note.resolved?).to be true + expect(third_note.resolved?).to be true + + expect { subject.resolve!(current_user) }.not_to change { first_note.resolved? } + expect { subject.resolve!(current_user) }.not_to change { third_note.resolved? } + end + + it "doesn't change resolved_at" do + expect(subject.resolved_at).not_to be_nil + + expect { subject.resolve!(current_user) }.not_to change { subject.resolved_at } + end + + it "doesn't change resolved_by" do + expect(subject.resolved_by).to eq(user) + + expect { subject.resolve!(current_user) }.not_to change { subject.resolved_by } + end + + it "doesn't change resolved state" do + expect(subject.resolved?).to be true + + expect { subject.resolve!(current_user) }.not_to change { subject.resolved? } + end + end + + context "when some resolvable notes are resolved" do + before do + first_note.resolve!(user) + end + + it "doesn't change resolved_at on the resolved note" do + expect(first_note.resolved_at).not_to be_nil + + expect { subject.resolve!(current_user) }. + not_to change { first_note.reload.resolved_at } + end + + it "doesn't change resolved_by on the resolved note" do + expect(first_note.resolved_by).to eq(user) + + expect { subject.resolve!(current_user) }. + not_to change { first_note.reload && first_note.resolved_by } + end + + it "doesn't change the resolved state on the resolved note" do + expect(first_note.resolved?).to be true + + expect { subject.resolve!(current_user) }. + not_to change { first_note.reload && first_note.resolved? } + end + + it "sets resolved_at on the unresolved note" do + subject.resolve!(current_user) + third_note.reload + + expect(third_note.resolved_at).not_to be_nil + end + + it "sets resolved_by on the unresolved note" do + subject.resolve!(current_user) + third_note.reload + + expect(third_note.resolved_by).to eq(current_user) + end + + it "marks the unresolved note as resolved" do + subject.resolve!(current_user) + third_note.reload + + expect(third_note.resolved?).to be true + end + + it "sets resolved_at" do + subject.resolve!(current_user) + + expect(subject.resolved_at).not_to be_nil + end + + it "sets resolved_by" do + subject.resolve!(current_user) + + expect(subject.resolved_by).to eq(current_user) + end + + it "marks as resolved" do + subject.resolve!(current_user) + + expect(subject.resolved?).to be true + end + end + + context "when no resolvable notes are resolved" do + it "sets resolved_at on the unresolved notes" do + subject.resolve!(current_user) + first_note.reload + third_note.reload + + expect(first_note.resolved_at).not_to be_nil + expect(third_note.resolved_at).not_to be_nil + end + + it "sets resolved_by on the unresolved notes" do + subject.resolve!(current_user) + first_note.reload + third_note.reload + + expect(first_note.resolved_by).to eq(current_user) + expect(third_note.resolved_by).to eq(current_user) + end + + it "marks the unresolved notes as resolved" do + subject.resolve!(current_user) + first_note.reload + third_note.reload + + expect(first_note.resolved?).to be true + expect(third_note.resolved?).to be true + end + + it "sets resolved_at" do + subject.resolve!(current_user) + first_note.reload + third_note.reload + + expect(subject.resolved_at).not_to be_nil + end + + it "sets resolved_by" do + subject.resolve!(current_user) + first_note.reload + third_note.reload + + expect(subject.resolved_by).to eq(current_user) + end + + it "marks as resolved" do + subject.resolve!(current_user) + first_note.reload + third_note.reload + + expect(subject.resolved?).to be true + end + end + end + end + + describe "#unresolve!" do + context "when not resolvable" do + before do + allow(subject).to receive(:resolvable?).and_return(false) + end + + it "returns nil" do + expect(subject.unresolve!).to be_nil + end + end + + context "when resolvable" do + let(:user) { create(:user) } + + before do + allow(subject).to receive(:resolvable?).and_return(true) + + allow(first_note).to receive(:resolvable?).and_return(true) + allow(second_note).to receive(:resolvable?).and_return(false) + allow(third_note).to receive(:resolvable?).and_return(true) + end + + context "when all resolvable notes are resolved" do + before do + first_note.resolve!(user) + third_note.resolve!(user) + end + + it "unsets resolved_at on the resolved notes" do + subject.unresolve! + first_note.reload + third_note.reload + + expect(first_note.resolved_at).to be_nil + expect(third_note.resolved_at).to be_nil + end + + it "unsets resolved_by on the resolved notes" do + subject.unresolve! + first_note.reload + third_note.reload + + expect(first_note.resolved_by).to be_nil + expect(third_note.resolved_by).to be_nil + end + + it "unmarks the resolved notes as resolved" do + subject.unresolve! + first_note.reload + third_note.reload + + expect(first_note.resolved?).to be false + expect(third_note.resolved?).to be false + end + + it "unsets resolved_at" do + subject.unresolve! + first_note.reload + third_note.reload + + expect(subject.resolved_at).to be_nil + end + + it "unsets resolved_by" do + subject.unresolve! + first_note.reload + third_note.reload + + expect(subject.resolved_by).to be_nil + end + + it "unmarks as resolved" do + subject.unresolve! + + expect(subject.resolved?).to be false + end + end + + context "when some resolvable notes are resolved" do + before do + first_note.resolve!(user) + end + + it "unsets resolved_at on the resolved note" do + subject.unresolve! + + expect(subject.first_note.resolved_at).to be_nil + end + + it "unsets resolved_by on the resolved note" do + subject.unresolve! + + expect(subject.first_note.resolved_by).to be_nil + end + + it "unmarks the resolved note as resolved" do + subject.unresolve! + + expect(subject.first_note.resolved?).to be false + end + end + end + end + + describe "#first_note_to_resolve" do + it "returns the first note that still needs to be resolved" do + allow(first_note).to receive(:to_be_resolved?).and_return(false) + allow(second_note).to receive(:to_be_resolved?).and_return(true) + + expect(subject.first_note_to_resolve).to eq(second_note) + end + end + + describe "#last_resolved_note" do + let(:current_user) { create(:user) } + + before do + first_note.resolve!(current_user) + third_note.resolve!(current_user) + second_note.resolve!(current_user) + end + + it "returns the last note that was resolved" do + expect(subject.last_resolved_note).to eq(second_note) + end + end +end diff --git a/spec/models/concerns/resolvable_note_spec.rb b/spec/models/concerns/resolvable_note_spec.rb new file mode 100644 index 00000000000..1503ccdff11 --- /dev/null +++ b/spec/models/concerns/resolvable_note_spec.rb @@ -0,0 +1,329 @@ +require 'spec_helper' + +describe Note, ResolvableNote, models: true do + let(:project) { create(:project) } + let(:merge_request) { create(:merge_request, source_project: project) } + subject { create(:discussion_note_on_merge_request, noteable: merge_request, project: project) } + + context 'resolvability scopes' do + let!(:note1) { create(:note, project: project) } + let!(:note2) { create(:diff_note_on_commit, project: project) } + let!(:note3) { create(:diff_note_on_merge_request, :resolved, noteable: merge_request, project: project) } + let!(:note4) { create(:discussion_note_on_merge_request, noteable: merge_request, project: project) } + let!(:note5) { create(:discussion_note_on_issue, project: project) } + let!(:note6) { create(:discussion_note_on_merge_request, :system, noteable: merge_request, project: project) } + + describe '.potentially_resolvable' do + it 'includes diff and discussion notes on merge requests' do + expect(Note.potentially_resolvable).to match_array([note3, note4, note6]) + end + end + + describe '.resolvable' do + it 'includes non-system diff and discussion notes on merge requests' do + expect(Note.resolvable).to match_array([note3, note4]) + end + end + + describe '.resolved' do + it 'includes resolved non-system diff and discussion notes on merge requests' do + expect(Note.resolved).to match_array([note3]) + end + end + + describe '.unresolved' do + it 'includes non-resolved non-system diff and discussion notes on merge requests' do + expect(Note.unresolved).to match_array([note4]) + end + end + end + + describe ".resolve!" do + let(:current_user) { create(:user) } + let!(:commit_note) { create(:diff_note_on_commit, project: project) } + let!(:resolved_note) { create(:discussion_note_on_merge_request, :resolved, noteable: merge_request, project: project) } + let!(:unresolved_note) { create(:discussion_note_on_merge_request, noteable: merge_request, project: project) } + + before do + described_class.resolve!(current_user) + + commit_note.reload + resolved_note.reload + unresolved_note.reload + end + + it 'resolves only the resolvable, not yet resolved notes' do + expect(commit_note.resolved_at).to be_nil + expect(resolved_note.resolved_by).not_to eq(current_user) + expect(unresolved_note.resolved_at).not_to be_nil + expect(unresolved_note.resolved_by).to eq(current_user) + end + end + + describe ".unresolve!" do + let!(:resolved_note) { create(:discussion_note_on_merge_request, :resolved, noteable: merge_request, project: project) } + + before do + described_class.unresolve! + + resolved_note.reload + end + + it 'unresolves the resolved notes' do + expect(resolved_note.resolved_by).to be_nil + expect(resolved_note.resolved_at).to be_nil + end + end + + describe '#resolvable?' do + context "when potentially resolvable" do + before do + allow(subject).to receive(:potentially_resolvable?).and_return(true) + end + + context "when a system note" do + before do + subject.system = true + end + + it "returns false" do + expect(subject.resolvable?).to be false + end + end + + context "when a regular note" do + it "returns true" do + expect(subject.resolvable?).to be true + end + end + end + + context "when not potentially resolvable" do + before do + allow(subject).to receive(:potentially_resolvable?).and_return(false) + end + + it "returns false" do + expect(subject.resolvable?).to be false + end + end + end + + describe "#to_be_resolved?" do + context "when not resolvable" do + before do + allow(subject).to receive(:resolvable?).and_return(false) + end + + it "returns false" do + expect(subject.to_be_resolved?).to be false + end + end + + context "when resolvable" do + before do + allow(subject).to receive(:resolvable?).and_return(true) + end + + context "when resolved" do + before do + allow(subject).to receive(:resolved?).and_return(true) + end + + it "returns false" do + expect(subject.to_be_resolved?).to be false + end + end + + context "when not resolved" do + before do + allow(subject).to receive(:resolved?).and_return(false) + end + + it "returns true" do + expect(subject.to_be_resolved?).to be true + end + end + end + end + + describe "#resolved?" do + let(:current_user) { create(:user) } + + context 'when not resolvable' do + before do + subject.resolve!(current_user) + + allow(subject).to receive(:resolvable?).and_return(false) + end + + it 'returns false' do + expect(subject.resolved?).to be_falsey + end + end + + context 'when resolvable' do + context 'when the note has been resolved' do + before do + subject.resolve!(current_user) + end + + it 'returns true' do + expect(subject.resolved?).to be_truthy + end + end + + context 'when the note has not been resolved' do + it 'returns false' do + expect(subject.resolved?).to be_falsey + end + end + end + end + + describe "#resolve!" do + let(:current_user) { create(:user) } + + context "when not resolvable" do + before do + allow(subject).to receive(:resolvable?).and_return(false) + end + + it "returns nil" do + expect(subject.resolve!(current_user)).to be_nil + end + + it "doesn't set resolved_at" do + subject.resolve!(current_user) + + expect(subject.resolved_at).to be_nil + end + + it "doesn't set resolved_by" do + subject.resolve!(current_user) + + expect(subject.resolved_by).to be_nil + end + + it "doesn't mark as resolved" do + subject.resolve!(current_user) + + expect(subject.resolved?).to be false + end + end + + context "when resolvable" do + before do + allow(subject).to receive(:resolvable?).and_return(true) + end + + context "when already resolved" do + let(:user) { create(:user) } + + before do + subject.resolve!(user) + end + + it "returns nil" do + expect(subject.resolve!(current_user)).to be_nil + end + + it "doesn't change resolved_at" do + expect(subject.resolved_at).not_to be_nil + + expect { subject.resolve!(current_user) }.not_to change { subject.resolved_at } + end + + it "doesn't change resolved_by" do + expect(subject.resolved_by).to eq(user) + + expect { subject.resolve!(current_user) }.not_to change { subject.resolved_by } + end + + it "doesn't change resolved status" do + expect(subject.resolved?).to be true + + expect { subject.resolve!(current_user) }.not_to change { subject.resolved? } + end + end + + context "when not yet resolved" do + it "returns true" do + expect(subject.resolve!(current_user)).to be true + end + + it "sets resolved_at" do + subject.resolve!(current_user) + + expect(subject.resolved_at).not_to be_nil + end + + it "sets resolved_by" do + subject.resolve!(current_user) + + expect(subject.resolved_by).to eq(current_user) + end + + it "marks as resolved" do + subject.resolve!(current_user) + + expect(subject.resolved?).to be true + end + end + end + end + + describe "#unresolve!" do + context "when not resolvable" do + before do + allow(subject).to receive(:resolvable?).and_return(false) + end + + it "returns nil" do + expect(subject.unresolve!).to be_nil + end + end + + context "when resolvable" do + before do + allow(subject).to receive(:resolvable?).and_return(true) + end + + context "when resolved" do + let(:user) { create(:user) } + + before do + subject.resolve!(user) + end + + it "returns true" do + expect(subject.unresolve!).to be true + end + + it "unsets resolved_at" do + subject.unresolve! + + expect(subject.resolved_at).to be_nil + end + + it "unsets resolved_by" do + subject.unresolve! + + expect(subject.resolved_by).to be_nil + end + + it "unmarks as resolved" do + subject.unresolve! + + expect(subject.resolved?).to be false + end + end + + context "when not resolved" do + it "returns nil" do + expect(subject.unresolve!).to be_nil + end + end + end + end +end diff --git a/spec/models/concerns/routable_spec.rb b/spec/models/concerns/routable_spec.rb index 677e60e1282..49a4132f763 100644 --- a/spec/models/concerns/routable_spec.rb +++ b/spec/models/concerns/routable_spec.rb @@ -1,7 +1,7 @@ require 'spec_helper' describe Group, 'Routable' do - let!(:group) { create(:group) } + let!(:group) { create(:group, name: 'foo') } describe 'Validations' do it { is_expected.to validate_presence_of(:route) } @@ -9,6 +9,7 @@ describe Group, 'Routable' do describe 'Associations' do it { is_expected.to have_one(:route).dependent(:destroy) } + it { is_expected.to have_many(:redirect_routes).dependent(:destroy) } end describe 'Callbacks' do @@ -35,10 +36,53 @@ describe Group, 'Routable' do describe '.find_by_full_path' do let!(:nested_group) { create(:group, parent: group) } - it { expect(described_class.find_by_full_path(group.to_param)).to eq(group) } - it { expect(described_class.find_by_full_path(group.to_param.upcase)).to eq(group) } - it { expect(described_class.find_by_full_path(nested_group.to_param)).to eq(nested_group) } - it { expect(described_class.find_by_full_path('unknown')).to eq(nil) } + context 'without any redirect routes' do + it { expect(described_class.find_by_full_path(group.to_param)).to eq(group) } + it { expect(described_class.find_by_full_path(group.to_param.upcase)).to eq(group) } + it { expect(described_class.find_by_full_path(nested_group.to_param)).to eq(nested_group) } + it { expect(described_class.find_by_full_path('unknown')).to eq(nil) } + end + + context 'with redirect routes' do + let!(:group_redirect_route) { group.redirect_routes.create!(path: 'bar') } + let!(:nested_group_redirect_route) { nested_group.redirect_routes.create!(path: nested_group.path.sub('foo', 'bar')) } + + context 'without follow_redirects option' do + context 'with the given path not matching any route' do + it { expect(described_class.find_by_full_path('unknown')).to eq(nil) } + end + + context 'with the given path matching the canonical route' do + it { expect(described_class.find_by_full_path(group.to_param)).to eq(group) } + it { expect(described_class.find_by_full_path(group.to_param.upcase)).to eq(group) } + it { expect(described_class.find_by_full_path(nested_group.to_param)).to eq(nested_group) } + end + + context 'with the given path matching a redirect route' do + it { expect(described_class.find_by_full_path(group_redirect_route.path)).to eq(nil) } + it { expect(described_class.find_by_full_path(group_redirect_route.path.upcase)).to eq(nil) } + it { expect(described_class.find_by_full_path(nested_group_redirect_route.path)).to eq(nil) } + end + end + + context 'with follow_redirects option set to true' do + context 'with the given path not matching any route' do + it { expect(described_class.find_by_full_path('unknown', follow_redirects: true)).to eq(nil) } + end + + context 'with the given path matching the canonical route' do + it { expect(described_class.find_by_full_path(group.to_param, follow_redirects: true)).to eq(group) } + it { expect(described_class.find_by_full_path(group.to_param.upcase, follow_redirects: true)).to eq(group) } + it { expect(described_class.find_by_full_path(nested_group.to_param, follow_redirects: true)).to eq(nested_group) } + end + + context 'with the given path matching a redirect route' do + it { expect(described_class.find_by_full_path(group_redirect_route.path, follow_redirects: true)).to eq(group) } + it { expect(described_class.find_by_full_path(group_redirect_route.path.upcase, follow_redirects: true)).to eq(group) } + it { expect(described_class.find_by_full_path(nested_group_redirect_route.path, follow_redirects: true)).to eq(nested_group) } + end + end + end end describe '.where_full_path_in' do @@ -81,12 +125,137 @@ describe Group, 'Routable' do it { is_expected.to eq([nested_group]) } end + describe '.member_self_and_descendants' do + let!(:user) { create(:user) } + let!(:nested_group) { create(:group, parent: group) } + + before { group.add_owner(user) } + subject { described_class.member_self_and_descendants(user.id) } + + it { is_expected.to match_array [group, nested_group] } + end + + describe '.member_hierarchy' do + # foo/bar would also match foo/barbaz instead of just foo/bar and foo/bar/baz + let!(:user) { create(:user) } + + # group + # _______ (foo) _______ + # | | + # | | + # nested_group_1 nested_group_2 + # (bar) (barbaz) + # | | + # | | + # nested_group_1_1 nested_group_2_1 + # (baz) (baz) + # + let!(:nested_group_1) { create :group, parent: group, name: 'bar' } + let!(:nested_group_1_1) { create :group, parent: nested_group_1, name: 'baz' } + let!(:nested_group_2) { create :group, parent: group, name: 'barbaz' } + let!(:nested_group_2_1) { create :group, parent: nested_group_2, name: 'baz' } + + context 'user is not a member of any group' do + subject { described_class.member_hierarchy(user.id) } + + it 'returns an empty array' do + is_expected.to eq [] + end + end + + context 'user is member of all groups' do + before do + group.add_owner(user) + nested_group_1.add_owner(user) + nested_group_1_1.add_owner(user) + nested_group_2.add_owner(user) + nested_group_2_1.add_owner(user) + end + subject { described_class.member_hierarchy(user.id) } + + it 'returns all groups' do + is_expected.to match_array [ + group, + nested_group_1, nested_group_1_1, + nested_group_2, nested_group_2_1 + ] + end + end + + context 'user is member of the top group' do + before { group.add_owner(user) } + subject { described_class.member_hierarchy(user.id) } + + it 'returns all groups' do + is_expected.to match_array [ + group, + nested_group_1, nested_group_1_1, + nested_group_2, nested_group_2_1 + ] + end + end + + context 'user is member of the first child (internal node), branch 1' do + before { nested_group_1.add_owner(user) } + subject { described_class.member_hierarchy(user.id) } + + it 'returns the groups in the hierarchy' do + is_expected.to match_array [ + group, + nested_group_1, nested_group_1_1 + ] + end + end + + context 'user is member of the first child (internal node), branch 2' do + before { nested_group_2.add_owner(user) } + subject { described_class.member_hierarchy(user.id) } + + it 'returns the groups in the hierarchy' do + is_expected.to match_array [ + group, + nested_group_2, nested_group_2_1 + ] + end + end + + context 'user is member of the last child (leaf node)' do + before { nested_group_1_1.add_owner(user) } + subject { described_class.member_hierarchy(user.id) } + + it 'returns the groups in the hierarchy' do + is_expected.to match_array [ + group, + nested_group_1, nested_group_1_1 + ] + end + end + end + describe '#full_path' do let(:group) { create(:group) } let(:nested_group) { create(:group, parent: group) } it { expect(group.full_path).to eq(group.path) } it { expect(nested_group.full_path).to eq("#{group.full_path}/#{nested_group.path}") } + + context 'with RequestStore active' do + before do + RequestStore.begin! + end + + after do + RequestStore.end! + RequestStore.clear! + end + + it 'does not load the route table more than once' do + expect(group).to receive(:uncached_full_path).once.and_call_original + + 3.times { group.full_path } + expect(group.full_path).to eq(group.path) + end + end end describe '#full_name' do diff --git a/spec/models/concerns/spammable_spec.rb b/spec/models/concerns/spammable_spec.rb index fd3b8307571..e698207166c 100644 --- a/spec/models/concerns/spammable_spec.rb +++ b/spec/models/concerns/spammable_spec.rb @@ -1,9 +1,11 @@ require 'spec_helper' -describe Issue, 'Spammable' do +describe Spammable do let(:issue) { create(:issue, description: 'Test Desc.') } describe 'Associations' do + subject { build(:issue) } + it { is_expected.to have_one(:user_agent_detail).dependent(:destroy) } end diff --git a/spec/models/concerns/strip_attribute_spec.rb b/spec/models/concerns/strip_attribute_spec.rb index c3af7a0960f..8c945686b66 100644 --- a/spec/models/concerns/strip_attribute_spec.rb +++ b/spec/models/concerns/strip_attribute_spec.rb @@ -1,6 +1,6 @@ require 'spec_helper' -describe Milestone, "StripAttribute" do +describe StripAttribute do let(:milestone) { create(:milestone) } describe ".strip_attributes" do diff --git a/spec/models/container_repository_spec.rb b/spec/models/container_repository_spec.rb new file mode 100644 index 00000000000..eff41d85972 --- /dev/null +++ b/spec/models/container_repository_spec.rb @@ -0,0 +1,234 @@ +require 'spec_helper' + +describe ContainerRepository do + let(:group) { create(:group, name: 'group') } + let(:project) { create(:project, path: 'test', group: group) } + + let(:repository) do + create(:container_repository, name: 'my_image', project: project) + end + + before do + stub_container_registry_config(enabled: true, + api_url: 'http://registry.gitlab', + host_port: 'registry.gitlab') + + stub_request(:get, 'http://registry.gitlab/v2/group/test/my_image/tags/list') + .with(headers: { 'Accept' => 'application/vnd.docker.distribution.manifest.v2+json' }) + .to_return( + status: 200, + body: JSON.dump(tags: ['test_tag']), + headers: { 'Content-Type' => 'application/json' }) + end + + describe 'associations' do + it 'belongs to the project' do + expect(repository).to belong_to(:project) + end + end + + describe '#tag' do + it 'has a test tag' do + expect(repository.tag('test')).not_to be_nil + end + end + + describe '#path' do + context 'when project path does not contain uppercase letters' do + it 'returns a full path to the repository' do + expect(repository.path).to eq('group/test/my_image') + end + end + + context 'when path contains uppercase letters' do + let(:project) { create(:project, path: 'MY_PROJECT', group: group) } + + it 'returns a full path without capital letters' do + expect(repository.path).to eq('group/my_project/my_image') + end + end + end + + describe '#manifest' do + it 'returns non-empty manifest' do + expect(repository.manifest).not_to be_nil + end + end + + describe '#valid?' do + it 'is a valid repository' do + expect(repository).to be_valid + end + end + + describe '#tags' do + it 'returns non-empty tags list' do + expect(repository.tags).not_to be_empty + end + end + + describe '#has_tags?' do + it 'has tags' do + expect(repository).to have_tags + end + end + + describe '#delete_tags!' do + let(:repository) do + create(:container_repository, name: 'my_image', + tags: %w[latest rc1], + project: project) + end + + context 'when action succeeds' do + it 'returns status that indicates success' do + expect(repository.client) + .to receive(:delete_repository_tag) + .and_return(true) + + expect(repository.delete_tags!).to be_truthy + end + end + + context 'when action fails' do + it 'returns status that indicates failure' do + expect(repository.client) + .to receive(:delete_repository_tag) + .and_return(false) + + expect(repository.delete_tags!).to be_falsey + end + end + end + + describe '#location' do + context 'when registry is running on a custom port' do + before do + stub_container_registry_config(enabled: true, + api_url: 'http://registry.gitlab:5000', + host_port: 'registry.gitlab:5000') + end + + it 'returns a full location of the repository' do + expect(repository.location) + .to eq 'registry.gitlab:5000/group/test/my_image' + end + end + end + + describe '#root_repository?' do + context 'when repository is a root repository' do + let(:repository) { create(:container_repository, :root) } + + it 'returns true' do + expect(repository).to be_root_repository + end + end + + context 'when repository is not a root repository' do + it 'returns false' do + expect(repository).not_to be_root_repository + end + end + end + + describe '.build_from_path' do + let(:registry_path) do + ContainerRegistry::Path.new(project.full_path + '/some/image') + end + + let(:repository) do + described_class.build_from_path(registry_path) + end + + it 'fabricates repository assigned to a correct project' do + expect(repository.project).to eq project + end + + it 'fabricates repository with a correct name' do + expect(repository.name).to eq 'some/image' + end + + it 'is not persisted' do + expect(repository).not_to be_persisted + end + end + + describe '.create_from_path!' do + let(:repository) do + described_class.create_from_path!(ContainerRegistry::Path.new(path)) + end + + let(:repository_path) { ContainerRegistry::Path.new(path) } + + context 'when received multi-level repository path' do + let(:path) { project.full_path + '/some/image' } + + it 'fabricates repository assigned to a correct project' do + expect(repository.project).to eq project + end + + it 'fabricates repository with a correct name' do + expect(repository.name).to eq 'some/image' + end + end + + context 'when path is too long' do + let(:path) do + project.full_path + '/a/b/c/d/e/f/g/h/i/j/k/l/n/o/p/s/t/u/x/y/z' + end + + it 'does not create repository and raises error' do + expect { repository }.to raise_error( + ContainerRegistry::Path::InvalidRegistryPathError) + end + end + + context 'when received multi-level repository with nested groups' do + let(:group) { create(:group, :nested, name: 'nested') } + let(:path) { project.full_path + '/some/image' } + + it 'fabricates repository assigned to a correct project' do + expect(repository.project).to eq project + end + + it 'fabricates repository with a correct name' do + expect(repository.name).to eq 'some/image' + end + + it 'has path including a nested group' do + expect(repository.path).to include 'nested/test/some/image' + end + end + + context 'when received root repository path' do + let(:path) { project.full_path } + + it 'fabricates repository assigned to a correct project' do + expect(repository.project).to eq project + end + + it 'fabricates repository with an empty name' do + expect(repository.name).to be_empty + end + end + end + + describe '.build_root_repository' do + let(:repository) do + described_class.build_root_repository(project) + end + + it 'fabricates a root repository object' do + expect(repository).to be_root_repository + end + + it 'assignes it to the correct project' do + expect(repository.project).to eq project + end + + it 'does not persist it' do + expect(repository).not_to be_persisted + end + end +end diff --git a/spec/models/cycle_analytics/plan_spec.rb b/spec/models/cycle_analytics/plan_spec.rb index 55483fc876a..4f33f3c6d69 100644 --- a/spec/models/cycle_analytics/plan_spec.rb +++ b/spec/models/cycle_analytics/plan_spec.rb @@ -13,7 +13,7 @@ describe 'CycleAnalytics#plan', feature: true do data_fn: -> (context) do { issue: context.create(:issue, project: context.project), - branch_name: context.random_git_name + branch_name: context.generate(:branch) } end, start_time_conditions: [["issue associated with a milestone", @@ -35,7 +35,7 @@ describe 'CycleAnalytics#plan', feature: true do context "when a regular label (instead of a list label) is added to the issue" do it "returns nil" do - branch_name = random_git_name + branch_name = generate(:branch) label = create(:label) issue = create(:issue, project: project) issue.update(label_ids: [label.id]) diff --git a/spec/models/cycle_analytics/production_spec.rb b/spec/models/cycle_analytics/production_spec.rb index e6a826a9418..4744b9e05ea 100644 --- a/spec/models/cycle_analytics/production_spec.rb +++ b/spec/models/cycle_analytics/production_spec.rb @@ -23,7 +23,7 @@ describe 'CycleAnalytics#production', feature: true do # Make other changes on master sha = context.project.repository.create_file( context.user, - context.random_git_name, + context.generate(:branch), 'content', message: 'commit message', branch_name: 'master') diff --git a/spec/models/cycle_analytics/staging_spec.rb b/spec/models/cycle_analytics/staging_spec.rb index 3a02ed81adb..f78d7a23105 100644 --- a/spec/models/cycle_analytics/staging_spec.rb +++ b/spec/models/cycle_analytics/staging_spec.rb @@ -28,7 +28,7 @@ describe 'CycleAnalytics#staging', feature: true do # Make other changes on master sha = context.project.repository.create_file( context.user, - context.random_git_name, + context.generate(:branch), 'content', message: 'commit message', branch_name: 'master') diff --git a/spec/models/cycle_analytics/test_spec.rb b/spec/models/cycle_analytics/test_spec.rb index c2ba012a0e6..d0b919efcf9 100644 --- a/spec/models/cycle_analytics/test_spec.rb +++ b/spec/models/cycle_analytics/test_spec.rb @@ -14,6 +14,7 @@ describe 'CycleAnalytics#test', feature: true do issue = context.create(:issue, project: context.project) merge_request = context.create_merge_request_closing_issue(issue) pipeline = context.create(:ci_pipeline, ref: merge_request.source_branch, sha: merge_request.diff_head_sha, project: context.project) + merge_request.update(head_pipeline: pipeline) { pipeline: pipeline, issue: issue } end, start_time_conditions: [["pipeline is started", -> (context, data) { data[:pipeline].run! }]], diff --git a/spec/models/deployment_spec.rb b/spec/models/deployment_spec.rb index 080ff2f3f43..4bda7d4314a 100644 --- a/spec/models/deployment_spec.rb +++ b/spec/models/deployment_spec.rb @@ -49,6 +49,34 @@ describe Deployment, models: true do end end + describe '#metrics' do + let(:deployment) { create(:deployment) } + + subject { deployment.metrics } + + context 'metrics are disabled' do + it { is_expected.to eq({}) } + end + + context 'metrics are enabled' do + let(:simple_metrics) do + { + success: true, + metrics: {}, + last_update: 42, + deployment_time: 1494408956 + } + end + + before do + allow(deployment.project).to receive_message_chain(:monitoring_service, :deployment_metrics) + .with(any_args).and_return(simple_metrics) + end + + it { is_expected.to eq(simple_metrics) } + end + end + describe '#stop_action' do let(:build) { create(:ci_build) } diff --git a/spec/models/diff_discussion_spec.rb b/spec/models/diff_discussion_spec.rb new file mode 100644 index 00000000000..81f338745b1 --- /dev/null +++ b/spec/models/diff_discussion_spec.rb @@ -0,0 +1,86 @@ +require 'spec_helper' + +describe DiffDiscussion, model: true do + include RepoHelpers + + subject { described_class.new([diff_note]) } + + let(:project) { create(:project) } + let(:merge_request) { create(:merge_request, source_project: project, target_project: project) } + let(:diff_note) { create(:diff_note_on_merge_request, noteable: merge_request, project: project) } + + describe '#reply_attributes' do + it 'includes position and original_position' do + attributes = subject.reply_attributes + expect(attributes[:position]).to eq(diff_note.position.to_json) + expect(attributes[:original_position]).to eq(diff_note.original_position.to_json) + end + end + + describe '#merge_request_version_params' do + let(:merge_request) { create(:merge_request, source_project: project, target_project: project, importing: true) } + let!(:merge_request_diff1) { merge_request.merge_request_diffs.create(head_commit_sha: '6f6d7e7ed97bb5f0054f2b1df789b39ca89b6ff9') } + let!(:merge_request_diff2) { merge_request.merge_request_diffs.create(head_commit_sha: nil) } + let!(:merge_request_diff3) { merge_request.merge_request_diffs.create(head_commit_sha: '5937ac0a7beb003549fc5fd26fc247adbce4a52e') } + + context 'when the discussion is active' do + it 'returns an empty hash, which will end up showing the latest version' do + expect(subject.merge_request_version_params).to eq({}) + end + end + + context 'when the discussion is on an older merge request version' do + let(:position) do + Gitlab::Diff::Position.new( + old_path: ".gitmodules", + new_path: ".gitmodules", + old_line: nil, + new_line: 4, + diff_refs: merge_request_diff1.diff_refs + ) + end + + let(:diff_note) { create(:diff_note_on_merge_request, noteable: merge_request, project: project, position: position) } + + before do + diff_note.position = diff_note.original_position + diff_note.save! + end + + it 'returns the diff ID for the version to show' do + expect(diff_id: merge_request_diff1.id) + end + end + + context 'when the discussion is on a comparison between merge request versions' do + let(:position) do + Gitlab::Diff::Position.new( + old_path: ".gitmodules", + new_path: ".gitmodules", + old_line: 4, + new_line: 4, + diff_refs: merge_request_diff3.compare_with(merge_request_diff1.head_commit_sha).diff_refs + ) + end + + let(:diff_note) { create(:diff_note_on_merge_request, noteable: merge_request, project: project, position: position) } + + it 'returns the diff ID and start sha of the versions to compare' do + expect(subject.merge_request_version_params).to eq(diff_id: merge_request_diff3.id, start_sha: merge_request_diff1.head_commit_sha) + end + end + + context 'when the discussion does not have a merge request version' do + let(:diff_note) { create(:diff_note_on_merge_request, noteable: merge_request, project: project, diff_refs: project.commit(sample_commit.id).diff_refs) } + + before do + diff_note.position = diff_note.original_position + diff_note.save! + end + + it 'returns nil' do + expect(subject.merge_request_version_params).to be_nil + end + end + end +end diff --git a/spec/models/diff_note_spec.rb b/spec/models/diff_note_spec.rb index 9ea3a4b7020..ab4c51a87b0 100644 --- a/spec/models/diff_note_spec.rb +++ b/spec/models/diff_note_spec.rb @@ -31,43 +31,6 @@ describe DiffNote, models: true do subject { create(:diff_note_on_merge_request, project: project, position: position, noteable: merge_request) } - describe ".resolve!" do - let(:current_user) { create(:user) } - let!(:commit_note) { create(:diff_note_on_commit) } - let!(:resolved_note) { create(:diff_note_on_merge_request, :resolved) } - let!(:unresolved_note) { create(:diff_note_on_merge_request) } - - before do - described_class.resolve!(current_user) - - commit_note.reload - resolved_note.reload - unresolved_note.reload - end - - it 'resolves only the resolvable, not yet resolved notes' do - expect(commit_note.resolved_at).to be_nil - expect(resolved_note.resolved_by).not_to eq(current_user) - expect(unresolved_note.resolved_at).not_to be_nil - expect(unresolved_note.resolved_by).to eq(current_user) - end - end - - describe ".unresolve!" do - let!(:resolved_note) { create(:diff_note_on_merge_request, :resolved) } - - before do - described_class.unresolve! - - resolved_note.reload - end - - it 'unresolves the resolved notes' do - expect(resolved_note.resolved_by).to be_nil - expect(resolved_note.resolved_at).to be_nil - end - end - describe "#position=" do context "when provided a string" do it "sets the position" do @@ -94,6 +57,32 @@ describe DiffNote, models: true do end end + describe "#original_position=" do + context "when provided a string" do + it "sets the original position" do + subject.original_position = new_position.to_json + + expect(subject.original_position).to eq(new_position) + end + end + + context "when provided a hash" do + it "sets the original position" do + subject.original_position = new_position.to_h + + expect(subject.original_position).to eq(new_position) + end + end + + context "when provided a position object" do + it "sets the original position" do + subject.original_position = new_position + + expect(subject.original_position).to eq(new_position) + end + end + end + describe "#diff_file" do it "returns the correct diff file" do diff_file = subject.diff_file @@ -226,252 +215,6 @@ describe DiffNote, models: true do end end - describe "#resolvable?" do - context "when noteable is a commit" do - subject { create(:diff_note_on_commit, project: project, position: position) } - - it "returns false" do - expect(subject.resolvable?).to be false - end - end - - context "when noteable is a merge request" do - context "when a system note" do - before do - subject.system = true - end - - it "returns false" do - expect(subject.resolvable?).to be false - end - end - - context "when a regular note" do - it "returns true" do - expect(subject.resolvable?).to be true - end - end - end - end - - describe "#to_be_resolved?" do - context "when not resolvable" do - before do - allow(subject).to receive(:resolvable?).and_return(false) - end - - it "returns false" do - expect(subject.to_be_resolved?).to be false - end - end - - context "when resolvable" do - before do - allow(subject).to receive(:resolvable?).and_return(true) - end - - context "when resolved" do - before do - allow(subject).to receive(:resolved?).and_return(true) - end - - it "returns false" do - expect(subject.to_be_resolved?).to be false - end - end - - context "when not resolved" do - before do - allow(subject).to receive(:resolved?).and_return(false) - end - - it "returns true" do - expect(subject.to_be_resolved?).to be true - end - end - end - end - - describe "#resolve!" do - let(:current_user) { create(:user) } - - context "when not resolvable" do - before do - allow(subject).to receive(:resolvable?).and_return(false) - end - - it "returns nil" do - expect(subject.resolve!(current_user)).to be_nil - end - - it "doesn't set resolved_at" do - subject.resolve!(current_user) - - expect(subject.resolved_at).to be_nil - end - - it "doesn't set resolved_by" do - subject.resolve!(current_user) - - expect(subject.resolved_by).to be_nil - end - - it "doesn't mark as resolved" do - subject.resolve!(current_user) - - expect(subject.resolved?).to be false - end - end - - context "when resolvable" do - before do - allow(subject).to receive(:resolvable?).and_return(true) - end - - context "when already resolved" do - let(:user) { create(:user) } - - before do - subject.resolve!(user) - end - - it "returns nil" do - expect(subject.resolve!(current_user)).to be_nil - end - - it "doesn't change resolved_at" do - expect(subject.resolved_at).not_to be_nil - - expect { subject.resolve!(current_user) }.not_to change { subject.resolved_at } - end - - it "doesn't change resolved_by" do - expect(subject.resolved_by).to eq(user) - - expect { subject.resolve!(current_user) }.not_to change { subject.resolved_by } - end - - it "doesn't change resolved status" do - expect(subject.resolved?).to be true - - expect { subject.resolve!(current_user) }.not_to change { subject.resolved? } - end - end - - context "when not yet resolved" do - it "returns true" do - expect(subject.resolve!(current_user)).to be true - end - - it "sets resolved_at" do - subject.resolve!(current_user) - - expect(subject.resolved_at).not_to be_nil - end - - it "sets resolved_by" do - subject.resolve!(current_user) - - expect(subject.resolved_by).to eq(current_user) - end - - it "marks as resolved" do - subject.resolve!(current_user) - - expect(subject.resolved?).to be true - end - end - end - end - - describe "#unresolve!" do - context "when not resolvable" do - before do - allow(subject).to receive(:resolvable?).and_return(false) - end - - it "returns nil" do - expect(subject.unresolve!).to be_nil - end - end - - context "when resolvable" do - before do - allow(subject).to receive(:resolvable?).and_return(true) - end - - context "when resolved" do - let(:user) { create(:user) } - - before do - subject.resolve!(user) - end - - it "returns true" do - expect(subject.unresolve!).to be true - end - - it "unsets resolved_at" do - subject.unresolve! - - expect(subject.resolved_at).to be_nil - end - - it "unsets resolved_by" do - subject.unresolve! - - expect(subject.resolved_by).to be_nil - end - - it "unmarks as resolved" do - subject.unresolve! - - expect(subject.resolved?).to be false - end - end - - context "when not resolved" do - it "returns nil" do - expect(subject.unresolve!).to be_nil - end - end - end - end - - describe "#discussion" do - context "when not resolvable" do - before do - allow(subject).to receive(:resolvable?).and_return(false) - end - - it "returns nil" do - expect(subject.discussion).to be_nil - end - end - - context "when resolvable" do - let!(:diff_note2) { create(:diff_note_on_merge_request, project: project, noteable: merge_request, position: subject.position) } - let!(:diff_note3) { create(:diff_note_on_merge_request, project: project, noteable: merge_request, position: active_position2) } - - let(:active_position2) do - Gitlab::Diff::Position.new( - old_path: "files/ruby/popen.rb", - new_path: "files/ruby/popen.rb", - old_line: 16, - new_line: 22, - diff_refs: merge_request.diff_refs - ) - end - - it "returns the discussion this note is in" do - discussion = subject.discussion - - expect(discussion.id).to eq(subject.discussion_id) - expect(discussion.notes).to eq([subject, diff_note2]) - end - end - end - describe "#discussion_id" do let(:note) { create(:diff_note_on_merge_request) } @@ -497,27 +240,37 @@ describe DiffNote, models: true do end end - describe "#original_discussion_id" do - let(:note) { create(:diff_note_on_merge_request) } + describe '#created_at_diff?' do + let(:diff_refs) { project.commit(sample_commit.id).diff_refs } + let(:position) do + Gitlab::Diff::Position.new( + old_path: "files/ruby/popen.rb", + new_path: "files/ruby/popen.rb", + old_line: nil, + new_line: 14, + diff_refs: diff_refs + ) + end - context "when it is newly created" do - it "has a discussion id" do - expect(note.original_discussion_id).not_to be_nil - expect(note.original_discussion_id).to match(/\A\h{40}\z/) + context "when noteable is a commit" do + subject { build(:diff_note_on_commit, project: project, position: position) } + + it "returns true" do + expect(subject.created_at_diff?(diff_refs)).to be true end end - context "when it didn't store a discussion id before" do - before do - note.update_column(:original_discussion_id, nil) + context "when noteable is a merge request" do + context "when the diff refs match the original one of the diff note" do + it "returns true" do + expect(subject.created_at_diff?(diff_refs)).to be true + end end - it "has a discussion id" do - # The original_discussion_id is set in `after_initialize`, so `reload` won't work - reloaded_note = Note.find(note.id) - - expect(reloaded_note.original_discussion_id).not_to be_nil - expect(reloaded_note.original_discussion_id).to match(/\A\h{40}\z/) + context "when the diff refs don't match the original one of the diff note" do + it "returns false" do + expect(subject.created_at_diff?(merge_request.diff_refs)).to be false + end end end end diff --git a/spec/models/discussion_spec.rb b/spec/models/discussion_spec.rb index bc32fadd391..0221e23ced8 100644 --- a/spec/models/discussion_spec.rb +++ b/spec/models/discussion_spec.rb @@ -4,618 +4,27 @@ describe Discussion, model: true do subject { described_class.new([first_note, second_note, third_note]) } let(:first_note) { create(:diff_note_on_merge_request) } - let(:second_note) { create(:diff_note_on_merge_request) } + let(:merge_request) { first_note.noteable } + let(:second_note) { create(:diff_note_on_merge_request, in_reply_to: first_note) } let(:third_note) { create(:diff_note_on_merge_request) } - describe "#resolvable?" do - context "when a diff discussion" do - before do - allow(subject).to receive(:diff_discussion?).and_return(true) - end - - context "when all notes are unresolvable" do - before do - allow(first_note).to receive(:resolvable?).and_return(false) - allow(second_note).to receive(:resolvable?).and_return(false) - allow(third_note).to receive(:resolvable?).and_return(false) - end - - it "returns false" do - expect(subject.resolvable?).to be false - end - end - - context "when some notes are unresolvable and some notes are resolvable" do - before do - allow(first_note).to receive(:resolvable?).and_return(true) - allow(second_note).to receive(:resolvable?).and_return(false) - allow(third_note).to receive(:resolvable?).and_return(true) - end - - it "returns true" do - expect(subject.resolvable?).to be true - end - end - - context "when all notes are resolvable" do - before do - allow(first_note).to receive(:resolvable?).and_return(true) - allow(second_note).to receive(:resolvable?).and_return(true) - allow(third_note).to receive(:resolvable?).and_return(true) - end - - it "returns true" do - expect(subject.resolvable?).to be true - end - end - end - - context "when not a diff discussion" do - before do - allow(subject).to receive(:diff_discussion?).and_return(false) - end - - it "returns false" do - expect(subject.resolvable?).to be false - end - end - end - - describe "#resolved?" do - context "when not resolvable" do - before do - allow(subject).to receive(:resolvable?).and_return(false) - end - - it "returns false" do - expect(subject.resolved?).to be false - end - end - - context "when resolvable" do - before do - allow(subject).to receive(:resolvable?).and_return(true) - - allow(first_note).to receive(:resolvable?).and_return(true) - allow(second_note).to receive(:resolvable?).and_return(false) - allow(third_note).to receive(:resolvable?).and_return(true) - end - - context "when all resolvable notes are resolved" do - before do - allow(first_note).to receive(:resolved?).and_return(true) - allow(third_note).to receive(:resolved?).and_return(true) - end - - it "returns true" do - expect(subject.resolved?).to be true - end - end - - context "when some resolvable notes are not resolved" do - before do - allow(first_note).to receive(:resolved?).and_return(true) - allow(third_note).to receive(:resolved?).and_return(false) - end - - it "returns false" do - expect(subject.resolved?).to be false - end - end - end - end - - describe "#to_be_resolved?" do - context "when not resolvable" do - before do - allow(subject).to receive(:resolvable?).and_return(false) - end - - it "returns false" do - expect(subject.to_be_resolved?).to be false - end - end - - context "when resolvable" do - before do - allow(subject).to receive(:resolvable?).and_return(true) - - allow(first_note).to receive(:resolvable?).and_return(true) - allow(second_note).to receive(:resolvable?).and_return(false) - allow(third_note).to receive(:resolvable?).and_return(true) - end - - context "when all resolvable notes are resolved" do - before do - allow(first_note).to receive(:resolved?).and_return(true) - allow(third_note).to receive(:resolved?).and_return(true) - end - - it "returns false" do - expect(subject.to_be_resolved?).to be false - end - end - - context "when some resolvable notes are not resolved" do - before do - allow(first_note).to receive(:resolved?).and_return(true) - allow(third_note).to receive(:resolved?).and_return(false) - end - - it "returns true" do - expect(subject.to_be_resolved?).to be true - end - end - end - end - - describe "#can_resolve?" do - let(:current_user) { create(:user) } - - context "when not resolvable" do - before do - allow(subject).to receive(:resolvable?).and_return(false) - end - - it "returns false" do - expect(subject.can_resolve?(current_user)).to be false - end - end - - context "when resolvable" do - before do - allow(subject).to receive(:resolvable?).and_return(true) - end - - context "when not signed in" do - let(:current_user) { nil } - - it "returns false" do - expect(subject.can_resolve?(current_user)).to be false - end - end - - context "when signed in" do - context "when the signed in user is the noteable author" do - before do - subject.noteable.author = current_user - end - - it "returns true" do - expect(subject.can_resolve?(current_user)).to be true - end - end - - context "when the signed in user can push to the project" do - before do - subject.project.team << [current_user, :master] - end - - it "returns true" do - expect(subject.can_resolve?(current_user)).to be true - end - end - - context "when the signed in user is a random user" do - it "returns false" do - expect(subject.can_resolve?(current_user)).to be false - end - end - end - end - end - - describe "#resolve!" do - let(:current_user) { create(:user) } - - context "when not resolvable" do - before do - allow(subject).to receive(:resolvable?).and_return(false) - end - - it "returns nil" do - expect(subject.resolve!(current_user)).to be_nil - end - - it "doesn't set resolved_at" do - subject.resolve!(current_user) - - expect(subject.resolved_at).to be_nil - end - - it "doesn't set resolved_by" do - subject.resolve!(current_user) - - expect(subject.resolved_by).to be_nil - end - - it "doesn't mark as resolved" do - subject.resolve!(current_user) - - expect(subject.resolved?).to be false - end - end - - context "when resolvable" do - let(:user) { create(:user) } - let(:second_note) { create(:diff_note_on_commit) } # unresolvable - - before do - allow(subject).to receive(:resolvable?).and_return(true) - end - - context "when all resolvable notes are resolved" do - before do - first_note.resolve!(user) - third_note.resolve!(user) - - first_note.reload - third_note.reload - end - - it "doesn't change resolved_at on the resolved notes" do - expect(first_note.resolved_at).not_to be_nil - expect(third_note.resolved_at).not_to be_nil - - expect { subject.resolve!(current_user) }.not_to change { first_note.resolved_at } - expect { subject.resolve!(current_user) }.not_to change { third_note.resolved_at } - end - - it "doesn't change resolved_by on the resolved notes" do - expect(first_note.resolved_by).to eq(user) - expect(third_note.resolved_by).to eq(user) - - expect { subject.resolve!(current_user) }.not_to change { first_note.resolved_by } - expect { subject.resolve!(current_user) }.not_to change { third_note.resolved_by } - end - - it "doesn't change the resolved state on the resolved notes" do - expect(first_note.resolved?).to be true - expect(third_note.resolved?).to be true - - expect { subject.resolve!(current_user) }.not_to change { first_note.resolved? } - expect { subject.resolve!(current_user) }.not_to change { third_note.resolved? } - end - - it "doesn't change resolved_at" do - expect(subject.resolved_at).not_to be_nil - - expect { subject.resolve!(current_user) }.not_to change { subject.resolved_at } - end - - it "doesn't change resolved_by" do - expect(subject.resolved_by).to eq(user) - - expect { subject.resolve!(current_user) }.not_to change { subject.resolved_by } - end - - it "doesn't change resolved state" do - expect(subject.resolved?).to be true - - expect { subject.resolve!(current_user) }.not_to change { subject.resolved? } - end - end - - context "when some resolvable notes are resolved" do - before do - first_note.resolve!(user) - end - - it "doesn't change resolved_at on the resolved note" do - expect(first_note.resolved_at).not_to be_nil - - expect { subject.resolve!(current_user) }. - not_to change { first_note.reload.resolved_at } - end - - it "doesn't change resolved_by on the resolved note" do - expect(first_note.resolved_by).to eq(user) - - expect { subject.resolve!(current_user) }. - not_to change { first_note.reload && first_note.resolved_by } - end - - it "doesn't change the resolved state on the resolved note" do - expect(first_note.resolved?).to be true - - expect { subject.resolve!(current_user) }. - not_to change { first_note.reload && first_note.resolved? } - end - - it "sets resolved_at on the unresolved note" do - subject.resolve!(current_user) - third_note.reload - - expect(third_note.resolved_at).not_to be_nil - end - - it "sets resolved_by on the unresolved note" do - subject.resolve!(current_user) - third_note.reload - - expect(third_note.resolved_by).to eq(current_user) - end - - it "marks the unresolved note as resolved" do - subject.resolve!(current_user) - third_note.reload - - expect(third_note.resolved?).to be true - end - - it "sets resolved_at" do - subject.resolve!(current_user) - - expect(subject.resolved_at).not_to be_nil - end - - it "sets resolved_by" do - subject.resolve!(current_user) - - expect(subject.resolved_by).to eq(current_user) - end - - it "marks as resolved" do - subject.resolve!(current_user) - - expect(subject.resolved?).to be true - end - end - - context "when no resolvable notes are resolved" do - it "sets resolved_at on the unresolved notes" do - subject.resolve!(current_user) - first_note.reload - third_note.reload - - expect(first_note.resolved_at).not_to be_nil - expect(third_note.resolved_at).not_to be_nil - end - - it "sets resolved_by on the unresolved notes" do - subject.resolve!(current_user) - first_note.reload - third_note.reload - - expect(first_note.resolved_by).to eq(current_user) - expect(third_note.resolved_by).to eq(current_user) - end - - it "marks the unresolved notes as resolved" do - subject.resolve!(current_user) - first_note.reload - third_note.reload - - expect(first_note.resolved?).to be true - expect(third_note.resolved?).to be true - end - - it "sets resolved_at" do - subject.resolve!(current_user) - first_note.reload - third_note.reload - - expect(subject.resolved_at).not_to be_nil - end - - it "sets resolved_by" do - subject.resolve!(current_user) - first_note.reload - third_note.reload - - expect(subject.resolved_by).to eq(current_user) - end - - it "marks as resolved" do - subject.resolve!(current_user) - first_note.reload - third_note.reload - - expect(subject.resolved?).to be true - end - end - end - end - - describe "#unresolve!" do - context "when not resolvable" do - before do - allow(subject).to receive(:resolvable?).and_return(false) - end - - it "returns nil" do - expect(subject.unresolve!).to be_nil - end - end - - context "when resolvable" do - let(:user) { create(:user) } - - before do - allow(subject).to receive(:resolvable?).and_return(true) - - allow(first_note).to receive(:resolvable?).and_return(true) - allow(second_note).to receive(:resolvable?).and_return(false) - allow(third_note).to receive(:resolvable?).and_return(true) - end - - context "when all resolvable notes are resolved" do - before do - first_note.resolve!(user) - third_note.resolve!(user) - end - - it "unsets resolved_at on the resolved notes" do - subject.unresolve! - first_note.reload - third_note.reload - - expect(first_note.resolved_at).to be_nil - expect(third_note.resolved_at).to be_nil - end - - it "unsets resolved_by on the resolved notes" do - subject.unresolve! - first_note.reload - third_note.reload - - expect(first_note.resolved_by).to be_nil - expect(third_note.resolved_by).to be_nil - end - - it "unmarks the resolved notes as resolved" do - subject.unresolve! - first_note.reload - third_note.reload - - expect(first_note.resolved?).to be false - expect(third_note.resolved?).to be false - end - - it "unsets resolved_at" do - subject.unresolve! - first_note.reload - third_note.reload - - expect(subject.resolved_at).to be_nil - end - - it "unsets resolved_by" do - subject.unresolve! - first_note.reload - third_note.reload - - expect(subject.resolved_by).to be_nil - end - - it "unmarks as resolved" do - subject.unresolve! - - expect(subject.resolved?).to be false - end - end - - context "when some resolvable notes are resolved" do - before do - first_note.resolve!(user) - end - - it "unsets resolved_at on the resolved note" do - subject.unresolve! - - expect(subject.first_note.resolved_at).to be_nil - end - - it "unsets resolved_by on the resolved note" do - subject.unresolve! - - expect(subject.first_note.resolved_by).to be_nil - end - - it "unmarks the resolved note as resolved" do - subject.unresolve! - - expect(subject.first_note.resolved?).to be false - end - end + describe '.build' do + it 'returns a discussion of the right type' do + discussion = described_class.build([first_note, second_note], merge_request) + expect(discussion).to be_a(DiffDiscussion) + expect(discussion.notes.count).to be(2) + expect(discussion.first_note).to be(first_note) + expect(discussion.noteable).to be(merge_request) end end - describe "#first_note_to_resolve" do - it "returns the first not that still needs to be resolved" do - allow(first_note).to receive(:to_be_resolved?).and_return(false) - allow(second_note).to receive(:to_be_resolved?).and_return(true) - - expect(subject.first_note_to_resolve).to eq(second_note) - end - end - - describe "#collapsed?" do - context "when a diff discussion" do - before do - allow(subject).to receive(:diff_discussion?).and_return(true) - end - - context "when resolvable" do - before do - allow(subject).to receive(:resolvable?).and_return(true) - end - - context "when resolved" do - before do - allow(subject).to receive(:resolved?).and_return(true) - end - - it "returns true" do - expect(subject.collapsed?).to be true - end - end - - context "when not resolved" do - before do - allow(subject).to receive(:resolved?).and_return(false) - end - - it "returns false" do - expect(subject.collapsed?).to be false - end - end - end - - context "when not resolvable" do - before do - allow(subject).to receive(:resolvable?).and_return(false) - end - - context "when active" do - before do - allow(subject).to receive(:active?).and_return(true) - end - - it "returns false" do - expect(subject.collapsed?).to be false - end - end - - context "when outdated" do - before do - allow(subject).to receive(:active?).and_return(false) - end - - it "returns true" do - expect(subject.collapsed?).to be true - end - end - end - end - - context "when not a diff discussion" do - before do - allow(subject).to receive(:diff_discussion?).and_return(false) - end - - it "returns false" do - expect(subject.collapsed?).to be false - end - end - end - - describe "#truncated_diff_lines" do - let(:truncated_lines) { subject.truncated_diff_lines } - - context "when diff is greater than allowed number of truncated diff lines " do - it "returns fewer lines" do - expect(subject.diff_lines.count).to be > described_class::NUMBER_OF_TRUNCATED_DIFF_LINES - - expect(truncated_lines.count).to be <= described_class::NUMBER_OF_TRUNCATED_DIFF_LINES - end - end - - context "when some diff lines are meta" do - it "returns no meta lines" do - expect(subject.diff_lines).to include(be_meta) - expect(truncated_lines).not_to include(be_meta) - end + describe '.build_collection' do + it 'returns an array of discussions of the right type' do + discussions = described_class.build_collection([first_note, second_note, third_note], merge_request) + expect(discussions).to eq([ + DiffDiscussion.new([first_note, second_note], merge_request), + DiffDiscussion.new([third_note], merge_request) + ]) end end end diff --git a/spec/models/environment_spec.rb b/spec/models/environment_spec.rb index 9f0e7fbbe26..12519de8636 100644 --- a/spec/models/environment_spec.rb +++ b/spec/models/environment_spec.rb @@ -100,13 +100,28 @@ describe Environment, models: true do let(:head_commit) { project.commit } let(:commit) { project.commit.parent } - it 'returns deployment id for the environment' do - expect(environment.first_deployment_for(commit)).to eq deployment1 - end + context 'Gitaly find_ref_name feature disabled' do + it 'returns deployment id for the environment' do + expect(environment.first_deployment_for(commit)).to eq deployment1 + end - it 'return nil when no deployment is found' do - expect(environment.first_deployment_for(head_commit)).to eq nil + it 'return nil when no deployment is found' do + expect(environment.first_deployment_for(head_commit)).to eq nil + end end + + # TODO: Uncomment when feature is reenabled + # context 'Gitaly find_ref_name feature enabled' do + # before do + # allow(Gitlab::GitalyClient).to receive(:feature_enabled?).with(:find_ref_name).and_return(true) + # end + # + # it 'calls GitalyClient' do + # expect_any_instance_of(Gitlab::GitalyClient::Ref).to receive(:find_ref_name) + # + # environment.first_deployment_for(commit) + # end + # end end describe '#environment_type' do @@ -191,25 +206,52 @@ describe Environment, models: true do end context 'when matching action is defined' do - let(:build) { create(:ci_build) } - let!(:deployment) { create(:deployment, environment: environment, deployable: build, on_stop: 'close_app') } + let(:pipeline) { create(:ci_pipeline, project: project) } + let(:build) { create(:ci_build, pipeline: pipeline) } - context 'when action did not yet finish' do - let!(:close_action) { create(:ci_build, :manual, pipeline: build.pipeline, name: 'close_app') } + let!(:deployment) do + create(:deployment, environment: environment, + deployable: build, + on_stop: 'close_app') + end - it 'returns the same action' do - expect(subject).to eq(close_action) - expect(subject.user).to eq(user) + context 'when user is not allowed to stop environment' do + let!(:close_action) do + create(:ci_build, :manual, pipeline: pipeline, name: 'close_app') + end + + it 'raises an exception' do + expect { subject }.to raise_error(Gitlab::Access::AccessDeniedError) end end - context 'if action did finish' do - let!(:close_action) { create(:ci_build, :manual, :success, pipeline: build.pipeline, name: 'close_app') } + context 'when user is allowed to stop environment' do + before do + project.add_master(user) + end + + context 'when action did not yet finish' do + let!(:close_action) do + create(:ci_build, :manual, pipeline: pipeline, name: 'close_app') + end + + it 'returns the same action' do + expect(subject).to eq(close_action) + expect(subject.user).to eq(user) + end + end - it 'returns a new action of the same type' do - is_expected.to be_persisted - expect(subject.name).to eq(close_action.name) - expect(subject.user).to eq(user) + context 'if action did finish' do + let!(:close_action) do + create(:ci_build, :manual, :success, + pipeline: pipeline, name: 'close_app') + end + + it 'returns a new action of the same type' do + expect(subject).to be_persisted + expect(subject.name).to eq(close_action.name) + expect(subject.user).to eq(user) + end end end end @@ -351,7 +393,7 @@ describe Environment, models: true do it 'returns the metrics from the deployment service' do expect(project.monitoring_service) - .to receive(:metrics).with(environment) + .to receive(:environment_metrics).with(environment) .and_return(:fake_metrics) is_expected.to eq(:fake_metrics) @@ -396,7 +438,7 @@ describe Environment, models: true do "foo**bar" => "foo-bar" + SUFFIX, "*-foo" => "env-foo" + SUFFIX, "staging-12345678-" => "staging-12345678" + SUFFIX, - "staging-12345678-01234567" => "staging-12345678" + SUFFIX, + "staging-12345678-01234567" => "staging-12345678" + SUFFIX }.each do |name, matcher| it "returns a slug matching #{matcher}, given #{name}" do slug = described_class.new(name: name).generate_slug diff --git a/spec/models/event_spec.rb b/spec/models/event_spec.rb index 8c90a538f57..b8cb967c4cc 100644 --- a/spec/models/event_spec.rb +++ b/spec/models/event_spec.rb @@ -15,13 +15,39 @@ describe Event, models: true do end describe 'Callbacks' do - describe 'after_create :reset_project_activity' do - let(:project) { create(:empty_project) } + let(:project) { create(:empty_project) } + describe 'after_create :reset_project_activity' do it 'calls the reset_project_activity method' do expect_any_instance_of(described_class).to receive(:reset_project_activity) - create_event(project, project.owner) + create_push_event(project, project.owner) + end + end + + describe 'after_create :set_last_repository_updated_at' do + context 'with a push event' do + it 'updates the project last_repository_updated_at' do + project.update(last_repository_updated_at: 1.year.ago) + + create_push_event(project, project.owner) + + project.reload + + expect(project.last_repository_updated_at).to be_within(1.minute).of(Time.now) + end + end + + context 'without a push event' do + it 'does not update the project last_repository_updated_at' do + project.update(last_repository_updated_at: 1.year.ago) + + create(:closed_issue_event, project: project, author: project.owner) + + project.reload + + expect(project.last_repository_updated_at).to be_within(1.minute).of(1.year.ago) + end end end end @@ -29,7 +55,7 @@ describe Event, models: true do describe "Push event" do let(:project) { create(:empty_project, :private) } let(:user) { project.owner } - let(:event) { create_event(project, user) } + let(:event) { create_push_event(project, user) } it do expect(event.push?).to be_truthy @@ -92,8 +118,8 @@ describe Event, models: true do let(:author) { create(:author) } let(:assignee) { create(:user) } let(:admin) { create(:admin) } - let(:issue) { create(:issue, project: project, author: author, assignee: assignee) } - let(:confidential_issue) { create(:issue, :confidential, project: project, author: author, assignee: assignee) } + let(:issue) { create(:issue, project: project, author: author, assignees: [assignee]) } + let(:confidential_issue) { create(:issue, :confidential, project: project, author: author, assignees: [assignee]) } let(:note_on_commit) { create(:note_on_commit, project: project) } let(:note_on_issue) { create(:note_on_issue, noteable: issue, project: project) } let(:note_on_confidential_issue) { create(:note_on_issue, noteable: confidential_issue, project: project) } @@ -243,7 +269,7 @@ describe Event, models: true do expect(project).not_to receive(:update_column). with(:last_activity_at, a_kind_of(Time)) - create_event(project, project.owner) + create_push_event(project, project.owner) end end @@ -251,11 +277,11 @@ describe Event, models: true do it 'updates the project' do project.update(last_activity_at: 1.year.ago) - create_event(project, project.owner) + create_push_event(project, project.owner) project.reload - project.last_activity_at <= 1.minute.ago + expect(project.last_activity_at).to be_within(1.minute).of(Time.now) end end end @@ -278,7 +304,7 @@ describe Event, models: true do end end - def create_event(project, user, attrs = {}) + def create_push_event(project, user, attrs = {}) data = { before: Gitlab::Git::BLANK_SHA, after: "0220c11b9a3e6c69dc8fd35321254ca9a7b98f7e", diff --git a/spec/models/global_milestone_spec.rb b/spec/models/global_milestone_spec.rb index 55b87d1c48a..a14efda3eda 100644 --- a/spec/models/global_milestone_spec.rb +++ b/spec/models/global_milestone_spec.rb @@ -137,7 +137,7 @@ describe GlobalMilestone, models: true do [ milestone1_project1, milestone1_project2, - milestone1_project3, + milestone1_project3 ] milestones_relation = Milestone.where(id: milestones.map(&:id)) diff --git a/spec/models/group_spec.rb b/spec/models/group_spec.rb index 5d87938235a..6ca1eb0374d 100644 --- a/spec/models/group_spec.rb +++ b/spec/models/group_spec.rb @@ -55,6 +55,34 @@ describe Group, models: true do it { is_expected.to validate_uniqueness_of(:name).scoped_to(:parent_id) } it { is_expected.to validate_presence_of :path } it { is_expected.not_to validate_presence_of :owner } + it { is_expected.to validate_presence_of :two_factor_grace_period } + it { is_expected.to validate_numericality_of(:two_factor_grace_period).is_greater_than_or_equal_to(0) } + + describe 'path validation' do + it 'rejects paths reserved on the root namespace when the group has no parent' do + group = build(:group, path: 'api') + + expect(group).not_to be_valid + end + + it 'allows root paths when the group has a parent' do + group = build(:group, path: 'api', parent: create(:group)) + + expect(group).to be_valid + end + + it 'rejects any wildcard paths when not a top level group' do + group = build(:group, path: 'tree', parent: create(:group)) + + expect(group).not_to be_valid + end + + it 'rejects reserved group paths' do + group = build(:group, path: 'activity', parent: create(:group)) + + expect(group).not_to be_valid + end + end end describe '.visible_to_user' do @@ -147,6 +175,26 @@ describe Group, models: true do end end + describe '#avatar_url' do + let!(:group) { create(:group, :access_requestable, :with_avatar) } + let(:user) { create(:user) } + let(:gitlab_host) { "http://#{Gitlab.config.gitlab.host}" } + let(:avatar_path) { "/uploads/group/avatar/#{group.id}/dk.png" } + + context 'when avatar file is uploaded' do + before { group.add_master(user) } + + it 'shows correct avatar url' do + expect(group.avatar_url).to eq(avatar_path) + expect(group.avatar_url(only_path: false)).to eq([gitlab_host, avatar_path].join) + + allow(ActionController::Base).to receive(:asset_host).and_return(gitlab_host) + + expect(group.avatar_url).to eq([gitlab_host, avatar_path].join) + end + end + end + describe '.search' do it 'returns groups with a matching name' do expect(described_class.search(group.name)).to eq([group]) @@ -315,4 +363,44 @@ describe Group, models: true do to include(master.id, developer.id) end end + + describe '#update_two_factor_requirement' do + let(:user) { create(:user) } + + before do + group.add_user(user, GroupMember::OWNER) + end + + it 'is called when require_two_factor_authentication is changed' do + expect_any_instance_of(User).to receive(:update_two_factor_requirement) + + group.update!(require_two_factor_authentication: true) + end + + it 'is called when two_factor_grace_period is changed' do + expect_any_instance_of(User).to receive(:update_two_factor_requirement) + + group.update!(two_factor_grace_period: 23) + end + + it 'is not called when other attributes are changed' do + expect_any_instance_of(User).not_to receive(:update_two_factor_requirement) + + group.update!(description: 'foobar') + end + + it 'calls #update_two_factor_requirement on each group member' do + other_user = create(:user) + group.add_user(other_user, GroupMember::OWNER) + + calls = 0 + allow_any_instance_of(User).to receive(:update_two_factor_requirement) do + calls += 1 + end + + group.update!(require_two_factor_authentication: true, two_factor_grace_period: 23) + + expect(calls).to eq 2 + end + end end diff --git a/spec/models/hooks/system_hook_spec.rb b/spec/models/hooks/system_hook_spec.rb index 8acec805584..4340170888d 100644 --- a/spec/models/hooks/system_hook_spec.rb +++ b/spec/models/hooks/system_hook_spec.rb @@ -1,6 +1,19 @@ require "spec_helper" describe SystemHook, models: true do + context 'default attributes' do + let(:system_hook) { build(:system_hook) } + + it 'sets defined default parameters' do + attrs = { + push_events: false, + repository_update_events: true, + enable_ssl_verification: true + } + expect(system_hook).to have_attributes(attrs) + end + end + describe "execute" do let(:system_hook) { create(:system_hook) } let(:user) { create(:user) } @@ -105,4 +118,12 @@ describe SystemHook, models: true do ).once end end + + describe '.repository_update_hooks' do + it 'returns hooks for repository update events only' do + hook = create(:system_hook, repository_update_events: true) + create(:system_hook, repository_update_events: false) + expect(SystemHook.repository_update_hooks).to eq([hook]) + end + end end diff --git a/spec/models/issue_collection_spec.rb b/spec/models/issue_collection_spec.rb index d8aed25c041..93c2c538e10 100644 --- a/spec/models/issue_collection_spec.rb +++ b/spec/models/issue_collection_spec.rb @@ -28,7 +28,7 @@ describe IssueCollection do end it 'returns the issues the user is assigned to' do - issue1.assignee = user + issue1.assignees << user expect(collection.updatable_by_user(user)).to eq([issue1]) end diff --git a/spec/models/issue_spec.rb b/spec/models/issue_spec.rb index b8584301baa..bb4e70db2e9 100644 --- a/spec/models/issue_spec.rb +++ b/spec/models/issue_spec.rb @@ -3,6 +3,7 @@ require 'spec_helper' describe Issue, models: true do describe "Associations" do it { is_expected.to belong_to(:milestone) } + it { is_expected.to have_many(:assignees) } end describe 'modules' do @@ -37,6 +38,24 @@ describe Issue, models: true do end end + describe '#card_attributes' do + it 'includes the author name' do + allow(subject).to receive(:author).and_return(double(name: 'Robert')) + allow(subject).to receive(:assignees).and_return([]) + + expect(subject.card_attributes). + to eq({ 'Author' => 'Robert', 'Assignee' => '' }) + end + + it 'includes the assignee name' do + allow(subject).to receive(:author).and_return(double(name: 'Robert')) + allow(subject).to receive(:assignees).and_return([double(name: 'Douwe')]) + + expect(subject.card_attributes). + to eq({ 'Author' => 'Robert', 'Assignee' => 'Douwe' }) + end + end + describe '#closed_at' do after do Timecop.return @@ -51,14 +70,6 @@ describe Issue, models: true do expect(issue.closed_at).to eq(now) end - - it 'sets closed_at to nil when issue is reopened' do - issue = create(:issue, state: 'closed') - - issue.reopen - - expect(issue.closed_at).to be_nil - end end describe '#to_reference' do @@ -132,22 +143,24 @@ describe Issue, models: true do end end - describe '#is_being_reassigned?' do - it 'returns true if the issue assignee has changed' do - subject.assignee = create(:user) - expect(subject.is_being_reassigned?).to be_truthy - end - it 'returns false if the issue assignee has not changed' do - expect(subject.is_being_reassigned?).to be_falsey + describe '#assignee_or_author?' do + let(:user) { create(:user) } + let(:issue) { create(:issue) } + + it 'returns true for a user that is assigned to an issue' do + issue.assignees << user + + expect(issue.assignee_or_author?(user)).to be_truthy end - end - describe '#is_being_reassigned?' do - it 'returns issues assigned to user' do - user = create(:user) - create_list(:issue, 2, assignee: user) + it 'returns true for a user that is the author of an issue' do + issue.update(author: user) - expect(Issue.open_for(user).count).to eq 2 + expect(issue.assignee_or_author?(user)).to be_truthy + end + + it 'returns false for a user that is not the assignee or author' do + expect(issue.assignee_or_author?(user)).to be_falsey end end @@ -308,6 +321,27 @@ describe Issue, models: true do end end + describe '#has_related_branch?' do + let(:issue) { create(:issue, title: "Blue Bell Knoll") } + subject { issue.has_related_branch? } + + context 'branch found' do + before do + allow(issue.project.repository).to receive(:branch_names).and_return(["iceblink-luck", issue.to_branch_name]) + end + + it { is_expected.to eq true } + end + + context 'branch not found' do + before do + allow(issue.project.repository).to receive(:branch_names).and_return(["lazy-calm"]) + end + + it { is_expected.to eq false } + end + end + it_behaves_like 'an editable mentionable' do subject { create(:issue, project: create(:project, :repository)) } @@ -378,12 +412,15 @@ describe Issue, models: true do it 'updates when assignees change' do user1 = create(:user) user2 = create(:user) - issue = create(:issue, assignee: user1) + project = create(:empty_project) + issue = create(:issue, assignees: [user1], project: project) + project.add_developer(user1) + project.add_developer(user2) expect(user1.assigned_open_issues_count).to eq(1) expect(user2.assigned_open_issues_count).to eq(0) - issue.assignee = user2 + issue.assignees = [user2] issue.save expect(user1.assigned_open_issues_count).to eq(0) @@ -669,6 +706,11 @@ describe Issue, models: true do expect(attrs_hash).to include(:human_total_time_spent) expect(attrs_hash).to include('time_estimate') end + + it 'includes assignee_ids and deprecated assignee_id' do + expect(attrs_hash).to include(:assignee_id) + expect(attrs_hash).to include(:assignee_ids) + end end describe '#check_for_spam' do diff --git a/spec/models/label_spec.rb b/spec/models/label_spec.rb index a9139f7d4ab..80ca19acdda 100644 --- a/spec/models/label_spec.rb +++ b/spec/models/label_spec.rb @@ -42,11 +42,27 @@ describe Label, models: true do end end + describe '#color' do + it 'strips color' do + label = described_class.new(color: ' #abcdef ') + label.valid? + + expect(label.color).to eq('#abcdef') + end + end + describe '#title' do it 'sanitizes title' do label = described_class.new(title: '<b>foo & bar?</b>') expect(label.title).to eq('foo & bar?') end + + it 'strips title' do + label = described_class.new(title: ' label ') + label.valid? + + expect(label.title).to eq('label') + end end describe 'priorization' do diff --git a/spec/models/legacy_diff_discussion_spec.rb b/spec/models/legacy_diff_discussion_spec.rb new file mode 100644 index 00000000000..6eb4a2aaf39 --- /dev/null +++ b/spec/models/legacy_diff_discussion_spec.rb @@ -0,0 +1,33 @@ +require 'spec_helper' + +describe LegacyDiffDiscussion, models: true do + subject { create(:legacy_diff_note_on_merge_request).to_discussion } + + describe '#reply_attributes' do + it 'includes line_code' do + expect(subject.reply_attributes[:line_code]).to eq(subject.line_code) + end + end + + describe '#merge_request_version_params' do + context 'when the discussion is active' do + before do + allow(subject).to receive(:active?).and_return(true) + end + + it 'returns an empty hash, which will end up showing the latest version' do + expect(subject.merge_request_version_params).to eq({}) + end + end + + context 'when the discussion is outdated' do + before do + allow(subject).to receive(:active?).and_return(false) + end + + it 'returns nil' do + expect(subject.merge_request_version_params).to be_nil + end + end + end +end diff --git a/spec/models/legacy_diff_note_spec.rb b/spec/models/legacy_diff_note_spec.rb deleted file mode 100644 index 81517a18b74..00000000000 --- a/spec/models/legacy_diff_note_spec.rb +++ /dev/null @@ -1,101 +0,0 @@ -require 'spec_helper' - -describe LegacyDiffNote, models: true do - describe "Commit diff line notes" do - let!(:note) { create(:legacy_diff_note_on_commit, note: "+1 from me") } - let!(:commit) { note.noteable } - - it "saves a valid note" do - expect(note.commit_id).to eq(commit.id) - expect(note.noteable.id).to eq(commit.id) - end - - it "is recognized by #legacy_diff_note?" do - expect(note).to be_legacy_diff_note - end - end - - describe '#active?' do - it 'is always true when the note has no associated diff line' do - note = build(:legacy_diff_note_on_merge_request) - - expect(note).to receive(:diff_line).and_return(nil) - - expect(note).to be_active - end - - it 'is never true when the note has no noteable associated' do - note = build(:legacy_diff_note_on_merge_request) - - expect(note).to receive(:diff_line).and_return(double) - expect(note).to receive(:noteable).and_return(nil) - - expect(note).not_to be_active - end - - it 'returns the memoized value if defined' do - note = build(:legacy_diff_note_on_merge_request) - - note.instance_variable_set(:@active, 'foo') - expect(note).not_to receive(:find_noteable_diff) - - expect(note.active?).to eq 'foo' - end - - context 'for a merge request noteable' do - it 'is false when noteable has no matching diff' do - merge = build_stubbed(:merge_request, :simple) - note = build(:legacy_diff_note_on_merge_request, noteable: merge) - - allow(note).to receive(:diff_line).and_return(double) - expect(note).to receive(:find_noteable_diff).and_return(nil) - - expect(note).not_to be_active - end - - it 'is true when noteable has a matching diff' do - merge = create(:merge_request, :simple) - - # Generate a real line_code value so we know it will match. We use a - # random line from a random diff just for funsies. - diff = merge.raw_diffs.to_a.sample - line = Gitlab::Diff::Parser.new.parse(diff.diff.each_line).to_a.sample - code = Gitlab::Diff::LineCode.generate(diff.new_path, line.new_pos, line.old_pos) - - # We're persisting in order to trigger the set_diff callback - note = create(:legacy_diff_note_on_merge_request, noteable: merge, - line_code: code, - project: merge.source_project) - - # Make sure we don't get a false positive from a guard clause - expect(note).to receive(:find_noteable_diff).and_call_original - expect(note).to be_active - end - end - end - - describe "#discussion_id" do - let(:note) { create(:note) } - - context "when it is newly created" do - it "has a discussion id" do - expect(note.discussion_id).not_to be_nil - expect(note.discussion_id).to match(/\A\h{40}\z/) - end - end - - context "when it didn't store a discussion id before" do - before do - note.update_column(:discussion_id, nil) - end - - it "has a discussion id" do - # The discussion_id is set in `after_initialize`, so `reload` won't work - reloaded_note = Note.find(note.id) - - expect(reloaded_note.discussion_id).not_to be_nil - expect(reloaded_note.discussion_id).to match(/\A\h{40}\z/) - end - end - end -end diff --git a/spec/models/member_spec.rb b/spec/models/member_spec.rb index c720cc9f2c2..ccc3deac199 100644 --- a/spec/models/member_spec.rb +++ b/spec/models/member_spec.rb @@ -386,6 +386,33 @@ describe Member, models: true do end end + describe '.add_users' do + %w[project group].each do |source_type| + context "when source is a #{source_type}" do + let!(:source) { create(source_type, :public, :access_requestable) } + let!(:admin) { create(:admin) } + let(:user1) { create(:user) } + let(:user2) { create(:user) } + + it 'returns a <Source>Member objects' do + members = described_class.add_users(source, [user1, user2], :master) + + expect(members).to be_a Array + expect(members.size).to eq(2) + expect(members.first).to be_a "#{source_type.classify}Member".constantize + expect(members.first).to be_persisted + end + + it 'returns an empty array' do + members = described_class.add_users(source, [], :master) + + expect(members).to be_a Array + expect(members).to be_empty + end + end + end + end + describe '#accept_request' do let(:member) { create(:project_member, requested_at: Time.now.utc) } diff --git a/spec/models/members/group_member_spec.rb b/spec/models/members/group_member_spec.rb index 370aeb9e0a9..17765b25856 100644 --- a/spec/models/members/group_member_spec.rb +++ b/spec/models/members/group_member_spec.rb @@ -13,12 +13,12 @@ describe GroupMember, models: true do end end - describe '.add_users_to_group' do + describe '.add_users' do it 'adds the given users to the given group' do group = create(:group) users = create_list(:user, 2) - described_class.add_users_to_group( + described_class.add_users( group, [users.first.id, users.second], described_class::MASTER @@ -61,7 +61,7 @@ describe GroupMember, models: true do describe '#after_accept_request' do it 'calls NotificationService.accept_group_access_request' do - member = create(:group_member, user: build_stubbed(:user), requested_at: Time.now) + member = create(:group_member, user: build(:user), requested_at: Time.now) expect_any_instance_of(NotificationService).to receive(:new_group_member) @@ -75,4 +75,19 @@ describe GroupMember, models: true do it { is_expected.to eq 'Group' } end end + + describe '#update_two_factor_requirement' do + let(:user) { build :user } + let(:group_member) { build :group_member, user: user } + + it 'is called after creation and deletion' do + expect(user).to receive(:update_two_factor_requirement) + + group_member.save + + expect(user).to receive(:update_two_factor_requirement) + + group_member.destroy + end + end end diff --git a/spec/models/merge_request_spec.rb b/spec/models/merge_request_spec.rb index 24e7c1b17d9..ce870fcc1d3 100644 --- a/spec/models/merge_request_spec.rb +++ b/spec/models/merge_request_spec.rb @@ -9,6 +9,7 @@ describe MergeRequest, models: true do it { is_expected.to belong_to(:target_project).class_name('Project') } it { is_expected.to belong_to(:source_project).class_name('Project') } it { is_expected.to belong_to(:merge_user).class_name("User") } + it { is_expected.to belong_to(:assignee) } it { is_expected.to have_many(:merge_request_diffs).dependent(:destroy) } end @@ -86,6 +87,44 @@ describe MergeRequest, models: true do end end + describe '#card_attributes' do + it 'includes the author name' do + allow(subject).to receive(:author).and_return(double(name: 'Robert')) + allow(subject).to receive(:assignee).and_return(nil) + + expect(subject.card_attributes). + to eq({ 'Author' => 'Robert', 'Assignee' => nil }) + end + + it 'includes the assignee name' do + allow(subject).to receive(:author).and_return(double(name: 'Robert')) + allow(subject).to receive(:assignee).and_return(double(name: 'Douwe')) + + expect(subject.card_attributes). + to eq({ 'Author' => 'Robert', 'Assignee' => 'Douwe' }) + end + end + + describe '#assignee_or_author?' do + let(:user) { create(:user) } + + it 'returns true for a user that is assigned to a merge request' do + subject.assignee = user + + expect(subject.assignee_or_author?(user)).to eq(true) + end + + it 'returns true for a user that is the author of a merge request' do + subject.author = user + + expect(subject.assignee_or_author?(user)).to eq(true) + end + + it 'returns false for a user that is not the assignee or author' do + expect(subject.assignee_or_author?(user)).to eq(false) + end + end + describe '#cache_merge_request_closes_issues!' do before do subject.project.team << [subject.author, :developer] @@ -199,10 +238,10 @@ describe MergeRequest, models: true do end context 'when there are no MR diffs' do - it 'delegates to the compare object' do + it 'delegates to the compare object, setting no_collapse: true' do merge_request.compare = double(:compare) - expect(merge_request.compare).to receive(:diffs).with(options) + expect(merge_request.compare).to receive(:diffs).with(options.merge(no_collapse: true)) merge_request.diffs(options) end @@ -215,15 +254,22 @@ describe MergeRequest, models: true do end context 'when there are MR diffs' do - before do + it 'returns the correct count' do merge_request.save + + expect(merge_request.diff_size).to eq('105') end - it 'returns the correct count' do - expect(merge_request.diff_size).to eq(105) + it 'returns the correct overflow count' do + allow(Commit).to receive(:max_diff_options).and_return(max_files: 2) + merge_request.save + + expect(merge_request.diff_size).to eq('2+') end it 'does not perform highlighting' do + merge_request.save + expect(Gitlab::Diff::Highlight).not_to receive(:new) merge_request.diff_size @@ -231,7 +277,7 @@ describe MergeRequest, models: true do end context 'when there are no MR diffs' do - before do + def set_compare(merge_request) merge_request.compare = CompareService.new( merge_request.source_project, merge_request.source_branch @@ -242,10 +288,21 @@ describe MergeRequest, models: true do end it 'returns the correct count' do - expect(merge_request.diff_size).to eq(105) + set_compare(merge_request) + + expect(merge_request.diff_size).to eq('105') + end + + it 'returns the correct overflow count' do + allow(Commit).to receive(:max_diff_options).and_return(max_files: 2) + set_compare(merge_request) + + expect(merge_request.diff_size).to eq('2+') end it 'does not perform highlighting' do + set_compare(merge_request) + expect(Gitlab::Diff::Highlight).not_to receive(:new) merge_request.diff_size @@ -277,16 +334,6 @@ describe MergeRequest, models: true do end end - describe '#is_being_reassigned?' do - it 'returns true if the merge_request assignee has changed' do - subject.assignee = create(:user) - expect(subject.is_being_reassigned?).to be_truthy - end - it 'returns false if the merge request assignee has not changed' do - expect(subject.is_being_reassigned?).to be_falsey - end - end - describe '#for_fork?' do it 'returns true if the merge request is for a fork' do subject.source_project = build_stubbed(:empty_project, namespace: create(:group)) @@ -441,7 +488,7 @@ describe MergeRequest, models: true do end it "can't be removed when its a protected branch" do - allow(subject.source_project).to receive(:protected_branch?).and_return(true) + allow(ProtectedBranch).to receive(:protected?).and_return(true) expect(subject.can_remove_source_branch?(user)).to be_falsey end @@ -671,13 +718,8 @@ describe MergeRequest, models: true do describe '#head_pipeline' do describe 'when the source project exists' do it 'returns the latest pipeline' do - pipeline = double(:ci_pipeline, ref: 'master') - - allow(subject).to receive(:diff_head_sha).and_return('123abc') - - expect(subject.source_project).to receive(:pipeline_for). - with('master', '123abc'). - and_return(pipeline) + pipeline = create(:ci_empty_pipeline, project: subject.source_project, ref: 'master', status: 'running', sha: "123abc") + subject.update(head_pipeline: pipeline) expect(subject.head_pipeline).to eq(pipeline) end @@ -820,15 +862,17 @@ describe MergeRequest, models: true do user1 = create(:user) user2 = create(:user) mr = create(:merge_request, assignee: user1) + mr.project.add_developer(user1) + mr.project.add_developer(user2) - expect(user1.assigned_open_merge_request_count).to eq(1) - expect(user2.assigned_open_merge_request_count).to eq(0) + expect(user1.assigned_open_merge_requests_count).to eq(1) + expect(user2.assigned_open_merge_requests_count).to eq(0) mr.assignee = user2 mr.save - expect(user1.assigned_open_merge_request_count).to eq(0) - expect(user2.assigned_open_merge_request_count).to eq(1) + expect(user1.assigned_open_merge_requests_count).to eq(0) + expect(user2.assigned_open_merge_requests_count).to eq(1) end end @@ -1224,247 +1268,6 @@ describe MergeRequest, models: true do end end - context "discussion status" do - let(:first_discussion) { Discussion.new([create(:diff_note_on_merge_request)]) } - let(:second_discussion) { Discussion.new([create(:diff_note_on_merge_request)]) } - let(:third_discussion) { Discussion.new([create(:diff_note_on_merge_request)]) } - - before do - allow(subject).to receive(:diff_discussions).and_return([first_discussion, second_discussion, third_discussion]) - end - - describe '#resolvable_discussions' do - before do - allow(first_discussion).to receive(:to_be_resolved?).and_return(true) - allow(second_discussion).to receive(:to_be_resolved?).and_return(false) - allow(third_discussion).to receive(:to_be_resolved?).and_return(false) - end - - it 'includes only discussions that need to be resolved' do - expect(subject.resolvable_discussions).to eq([first_discussion]) - end - end - - describe '#discussions_can_be_resolved_by? user' do - let(:user) { build(:user) } - - context 'all discussions can be resolved by the user' do - before do - allow(first_discussion).to receive(:can_resolve?).with(user).and_return(true) - allow(second_discussion).to receive(:can_resolve?).with(user).and_return(true) - allow(third_discussion).to receive(:can_resolve?).with(user).and_return(true) - end - - it 'allows a user to resolve the discussions' do - expect(subject.discussions_can_be_resolved_by?(user)).to be(true) - end - end - - context 'one discussion cannot be resolved by the user' do - before do - allow(first_discussion).to receive(:can_resolve?).with(user).and_return(true) - allow(second_discussion).to receive(:can_resolve?).with(user).and_return(true) - allow(third_discussion).to receive(:can_resolve?).with(user).and_return(false) - end - - it 'allows a user to resolve the discussions' do - expect(subject.discussions_can_be_resolved_by?(user)).to be(false) - end - end - end - - describe "#discussions_resolvable?" do - context "when all discussions are unresolvable" do - before do - allow(first_discussion).to receive(:resolvable?).and_return(false) - allow(second_discussion).to receive(:resolvable?).and_return(false) - allow(third_discussion).to receive(:resolvable?).and_return(false) - end - - it "returns false" do - expect(subject.discussions_resolvable?).to be false - end - end - - context "when some discussions are unresolvable and some discussions are resolvable" do - before do - allow(first_discussion).to receive(:resolvable?).and_return(true) - allow(second_discussion).to receive(:resolvable?).and_return(false) - allow(third_discussion).to receive(:resolvable?).and_return(true) - end - - it "returns true" do - expect(subject.discussions_resolvable?).to be true - end - end - - context "when all discussions are resolvable" do - before do - allow(first_discussion).to receive(:resolvable?).and_return(true) - allow(second_discussion).to receive(:resolvable?).and_return(true) - allow(third_discussion).to receive(:resolvable?).and_return(true) - end - - it "returns true" do - expect(subject.discussions_resolvable?).to be true - end - end - end - - describe "#discussions_resolved?" do - context "when discussions are not resolvable" do - before do - allow(subject).to receive(:discussions_resolvable?).and_return(false) - end - - it "returns false" do - expect(subject.discussions_resolved?).to be false - end - end - - context "when discussions are resolvable" do - before do - allow(subject).to receive(:discussions_resolvable?).and_return(true) - - allow(first_discussion).to receive(:resolvable?).and_return(true) - allow(second_discussion).to receive(:resolvable?).and_return(false) - allow(third_discussion).to receive(:resolvable?).and_return(true) - end - - context "when all resolvable discussions are resolved" do - before do - allow(first_discussion).to receive(:resolved?).and_return(true) - allow(third_discussion).to receive(:resolved?).and_return(true) - end - - it "returns true" do - expect(subject.discussions_resolved?).to be true - end - end - - context "when some resolvable discussions are not resolved" do - before do - allow(first_discussion).to receive(:resolved?).and_return(true) - allow(third_discussion).to receive(:resolved?).and_return(false) - end - - it "returns false" do - expect(subject.discussions_resolved?).to be false - end - end - end - end - - describe "#discussions_to_be_resolved?" do - context "when discussions are not resolvable" do - before do - allow(subject).to receive(:discussions_resolvable?).and_return(false) - end - - it "returns false" do - expect(subject.discussions_to_be_resolved?).to be false - end - end - - context "when discussions are resolvable" do - before do - allow(subject).to receive(:discussions_resolvable?).and_return(true) - - allow(first_discussion).to receive(:resolvable?).and_return(true) - allow(second_discussion).to receive(:resolvable?).and_return(false) - allow(third_discussion).to receive(:resolvable?).and_return(true) - end - - context "when all resolvable discussions are resolved" do - before do - allow(first_discussion).to receive(:resolved?).and_return(true) - allow(third_discussion).to receive(:resolved?).and_return(true) - end - - it "returns false" do - expect(subject.discussions_to_be_resolved?).to be false - end - end - - context "when some resolvable discussions are not resolved" do - before do - allow(first_discussion).to receive(:resolved?).and_return(true) - allow(third_discussion).to receive(:resolved?).and_return(false) - end - - it "returns true" do - expect(subject.discussions_to_be_resolved?).to be true - end - end - end - end - end - - describe '#conflicts_can_be_resolved_in_ui?' do - def create_merge_request(source_branch) - create(:merge_request, source_branch: source_branch, target_branch: 'conflict-start') do |mr| - mr.mark_as_unmergeable - end - end - - it 'returns a falsey value when the MR can be merged without conflicts' do - merge_request = create_merge_request('master') - merge_request.mark_as_mergeable - - expect(merge_request.conflicts_can_be_resolved_in_ui?).to be_falsey - end - - it 'returns a falsey value when the MR is marked as having conflicts, but has none' do - merge_request = create_merge_request('master') - - expect(merge_request.conflicts_can_be_resolved_in_ui?).to be_falsey - end - - it 'returns a falsey value when the MR has a missing ref after a force push' do - merge_request = create_merge_request('conflict-resolvable') - allow(merge_request.conflicts).to receive(:merge_index).and_raise(Rugged::OdbError) - - expect(merge_request.conflicts_can_be_resolved_in_ui?).to be_falsey - end - - it 'returns a falsey value when the MR does not support new diff notes' do - merge_request = create_merge_request('conflict-resolvable') - merge_request.merge_request_diff.update_attributes(start_commit_sha: nil) - - expect(merge_request.conflicts_can_be_resolved_in_ui?).to be_falsey - end - - it 'returns a falsey value when the conflicts contain a large file' do - merge_request = create_merge_request('conflict-too-large') - - expect(merge_request.conflicts_can_be_resolved_in_ui?).to be_falsey - end - - it 'returns a falsey value when the conflicts contain a binary file' do - merge_request = create_merge_request('conflict-binary-file') - - expect(merge_request.conflicts_can_be_resolved_in_ui?).to be_falsey - end - - it 'returns a falsey value when the conflicts contain a file edited in one branch and deleted in another' do - merge_request = create_merge_request('conflict-missing-side') - - expect(merge_request.conflicts_can_be_resolved_in_ui?).to be_falsey - end - - it 'returns a truthy value when the conflicts are resolvable in the UI' do - merge_request = create_merge_request('conflict-resolvable') - - expect(merge_request.conflicts_can_be_resolved_in_ui?).to be_truthy - end - - it 'returns a truthy value when the conflicts have to be resolved in an editor' do - merge_request = create_merge_request('conflict-contains-conflict-markers') - - expect(merge_request.conflicts_can_be_resolved_in_ui?).to be_truthy - end - end - describe "#source_project_missing?" do let(:project) { create(:empty_project) } let(:fork_project) { create(:empty_project, forked_from_project: project) } @@ -1589,11 +1392,15 @@ describe MergeRequest, models: true do describe '#mergeable_with_slash_command?' do def create_pipeline(status) - create(:ci_pipeline_with_one_job, + pipeline = create(:ci_pipeline_with_one_job, project: project, ref: merge_request.source_branch, sha: merge_request.diff_head_sha, status: status) + + merge_request.update(head_pipeline: pipeline) + + pipeline end let(:project) { create(:project, :public, :repository, only_allow_merge_if_pipeline_succeeds: true) } @@ -1710,4 +1517,23 @@ describe MergeRequest, models: true do expect(subject.has_no_commits?).to be_truthy end end + + describe '#merge_request_diff_for' do + subject { create(:merge_request, importing: true) } + let!(:merge_request_diff1) { subject.merge_request_diffs.create(head_commit_sha: '6f6d7e7ed97bb5f0054f2b1df789b39ca89b6ff9') } + let!(:merge_request_diff2) { subject.merge_request_diffs.create(head_commit_sha: nil) } + let!(:merge_request_diff3) { subject.merge_request_diffs.create(head_commit_sha: '5937ac0a7beb003549fc5fd26fc247adbce4a52e') } + + context 'with diff refs' do + it 'returns the diffs' do + expect(subject.merge_request_diff_for(merge_request_diff1.diff_refs)).to eq(merge_request_diff1) + end + end + + context 'with a commit SHA' do + it 'returns the diffs' do + expect(subject.merge_request_diff_for(merge_request_diff3.head_commit_sha)).to eq(merge_request_diff3) + end + end + end end diff --git a/spec/models/milestone_spec.rb b/spec/models/milestone_spec.rb index f3f48f951a8..e3e8e6d571c 100644 --- a/spec/models/milestone_spec.rb +++ b/spec/models/milestone_spec.rb @@ -109,18 +109,6 @@ describe Milestone, models: true do it { expect(milestone.percent_complete(user)).to eq(75) } end - describe '#is_empty?' do - before do - milestone.issues << create(:issue, project: project) - milestone.issues << create(:closed_issue, project: project) - milestone.merge_requests << create(:merge_request) - end - - it { expect(milestone.closed_items_count(user)).to eq(1) } - it { expect(milestone.total_items_count(user)).to eq(3) } - it { expect(milestone.is_empty?(user)).to be_falsey } - end - describe '#can_be_closed?' do it { expect(milestone.can_be_closed?).to be_truthy } end diff --git a/spec/models/namespace_spec.rb b/spec/models/namespace_spec.rb index ccaf0d7abc7..8624616316c 100644 --- a/spec/models/namespace_spec.rb +++ b/spec/models/namespace_spec.rb @@ -34,6 +34,13 @@ describe Namespace, models: true do let(:group) { build(:group, :nested, path: 'tree') } it { expect(group).not_to be_valid } + + it 'rejects nested paths' do + parent = create(:group, :nested, path: 'environments') + namespace = build(:project, path: 'folders', namespace: parent) + + expect(namespace).not_to be_valid + end end context 'top-level group' do @@ -47,6 +54,7 @@ describe Namespace, models: true do describe "Respond to" do it { is_expected.to respond_to(:human_name) } it { is_expected.to respond_to(:to_param) } + it { is_expected.to respond_to(:has_parent?) } end describe '#to_param' do @@ -148,42 +156,62 @@ describe Namespace, models: true do expect(@namespace.move_dir).to be_truthy end - context "when any project has container tags" do + context "when any project has container images" do + let(:container_repository) { create(:container_repository) } + before do stub_container_registry_config(enabled: true) - stub_container_registry_tags('tag') + stub_container_registry_tags(repository: :any, tags: ['tag']) - create(:empty_project, namespace: @namespace) + create(:empty_project, namespace: @namespace, container_repositories: [container_repository]) allow(@namespace).to receive(:path_was).and_return(@namespace.path) allow(@namespace).to receive(:path).and_return('new_path') end - it { expect { @namespace.move_dir }.to raise_error('Namespace cannot be moved, because at least one project has tags in container registry') } + it 'raises an error about not movable project' do + expect { @namespace.move_dir }.to raise_error(/Namespace cannot be moved/) + end end - context 'renaming a sub-group' do + context 'with subgroups' do let(:parent) { create(:group, name: 'parent', path: 'parent') } let(:child) { create(:group, name: 'child', path: 'child', parent: parent) } let!(:project) { create(:project_empty_repo, path: 'the-project', namespace: child) } - let(:uploads_dir) { File.join(CarrierWave.root, 'uploads', 'parent') } - let(:pages_dir) { File.join(TestEnv.pages_path, 'parent') } + let(:uploads_dir) { File.join(CarrierWave.root, 'uploads') } + let(:pages_dir) { TestEnv.pages_path } before do - FileUtils.mkdir_p(File.join(uploads_dir, 'child', 'the-project')) - FileUtils.mkdir_p(File.join(pages_dir, 'child', 'the-project')) + FileUtils.mkdir_p(File.join(uploads_dir, 'parent', 'child', 'the-project')) + FileUtils.mkdir_p(File.join(pages_dir, 'parent', 'child', 'the-project')) + end + + context 'renaming child' do + it 'correctly moves the repository, uploads and pages' do + expected_repository_path = File.join(TestEnv.repos_path, 'parent', 'renamed', 'the-project.git') + expected_upload_path = File.join(uploads_dir, 'parent', 'renamed', 'the-project') + expected_pages_path = File.join(pages_dir, 'parent', 'renamed', 'the-project') + + child.update_attributes!(path: 'renamed') + + expect(File.directory?(expected_repository_path)).to be(true) + expect(File.directory?(expected_upload_path)).to be(true) + expect(File.directory?(expected_pages_path)).to be(true) + end end - it 'correctly moves the repository, uploads and pages' do - expected_repository_path = File.join(TestEnv.repos_path, 'parent', 'renamed', 'the-project.git') - expected_upload_path = File.join(uploads_dir, 'renamed', 'the-project') - expected_pages_path = File.join(pages_dir, 'renamed', 'the-project') + context 'renaming parent' do + it 'correctly moves the repository, uploads and pages' do + expected_repository_path = File.join(TestEnv.repos_path, 'renamed', 'child', 'the-project.git') + expected_upload_path = File.join(uploads_dir, 'renamed', 'child', 'the-project') + expected_pages_path = File.join(pages_dir, 'renamed', 'child', 'the-project') - child.update_attributes!(path: 'renamed') + parent.update_attributes!(path: 'renamed') - expect(File.directory?(expected_repository_path)).to be(true) - expect(File.directory?(expected_upload_path)).to be(true) - expect(File.directory?(expected_pages_path)).to be(true) + expect(File.directory?(expected_repository_path)).to be(true) + expect(File.directory?(expected_upload_path)).to be(true) + expect(File.directory?(expected_pages_path)).to be(true) + end end end end @@ -295,4 +323,13 @@ describe Namespace, models: true do to eq([namespace.owner_id]) end end + + describe '#all_projects' do + let(:group) { create(:group) } + let(:child) { create(:group, parent: group) } + let!(:project1) { create(:project_empty_repo, namespace: group) } + let!(:project2) { create(:project_empty_repo, namespace: child) } + + it { expect(group.all_projects.to_a).to eq([project2, project1]) } + end end diff --git a/spec/models/network/graph_spec.rb b/spec/models/network/graph_spec.rb index 492c4e01bd8..0fe8a591a45 100644 --- a/spec/models/network/graph_spec.rb +++ b/spec/models/network/graph_spec.rb @@ -9,4 +9,40 @@ describe Network::Graph, models: true do expect(graph.notes).to eq( { note_on_commit.commit_id => 1 } ) end + + describe '#commits' do + let(:graph) { described_class.new(project, 'refs/heads/master', project.repository.commit, nil) } + + it 'returns a list of commits' do + commits = graph.commits + + expect(commits).not_to be_empty + expect(commits).to all( be_kind_of(Network::Commit) ) + end + + it 'it the commits by commit date (descending)' do + # Remove duplicate timestamps because they make it harder to + # assert that the commits are sorted as expected. + commits = graph.commits.uniq(&:date) + sorted_commits = commits.sort_by(&:date).reverse + + expect(commits).not_to be_empty + expect(commits.map(&:id)).to eq(sorted_commits.map(&:id)) + end + + it 'sorts children before parents for commits with the same timestamp' do + commits_by_time = graph.commits.group_by(&:date) + + commits_by_time.each do |time, commits| + commit_ids = commits.map(&:id) + + commits.each_with_index do |commit, index| + parent_indexes = commit.parent_ids.map { |parent_id| commit_ids.find_index(parent_id) }.compact + + # All parents of the current commit should appear after it + expect(parent_indexes).to all( be > index ) + end + end + end + end end diff --git a/spec/models/note_spec.rb b/spec/models/note_spec.rb index 33536487c41..7a01cef9b4b 100644 --- a/spec/models/note_spec.rb +++ b/spec/models/note_spec.rb @@ -245,22 +245,36 @@ describe Note, models: true do end end + describe '.find_discussion' do + let!(:note) { create(:discussion_note_on_merge_request) } + let!(:note2) { create(:discussion_note_on_merge_request, in_reply_to: note) } + let(:merge_request) { note.noteable } + + it 'returns a discussion with multiple notes' do + discussion = merge_request.notes.find_discussion(note.discussion_id) + + expect(discussion).not_to be_nil + expect(discussion.notes).to match_array([note, note2]) + expect(discussion.first_note.discussion_id).to eq(note.discussion_id) + end + end + describe ".grouped_diff_discussions" do let!(:merge_request) { create(:merge_request) } let(:project) { merge_request.project } let!(:active_diff_note1) { create(:diff_note_on_merge_request, project: project, noteable: merge_request) } - let!(:active_diff_note2) { create(:diff_note_on_merge_request, project: project, noteable: merge_request) } + let!(:active_diff_note2) { create(:diff_note_on_merge_request, project: project, noteable: merge_request, in_reply_to: active_diff_note1) } let!(:active_diff_note3) { create(:diff_note_on_merge_request, project: project, noteable: merge_request, position: active_position2) } let!(:outdated_diff_note1) { create(:diff_note_on_merge_request, project: project, noteable: merge_request, position: outdated_position) } - let!(:outdated_diff_note2) { create(:diff_note_on_merge_request, project: project, noteable: merge_request, position: outdated_position) } + let!(:outdated_diff_note2) { create(:diff_note_on_merge_request, project: project, noteable: merge_request, in_reply_to: outdated_diff_note1) } let(:active_position2) do Gitlab::Diff::Position.new( old_path: "files/ruby/popen.rb", new_path: "files/ruby/popen.rb", - old_line: 16, - new_line: 22, - diff_refs: merge_request.diff_refs + old_line: nil, + new_line: 13, + diff_refs: project.commit(sample_commit.id).diff_refs ) end @@ -274,50 +288,77 @@ describe Note, models: true do ) end - subject { merge_request.notes.grouped_diff_discussions } + context 'active diff discussions' do + subject { merge_request.notes.grouped_diff_discussions } - it "includes active discussions" do - discussions = subject.values + it "includes active discussions" do + discussions = subject.values.flatten - expect(discussions.count).to eq(2) - expect(discussions.map(&:id)).to eq([active_diff_note1.discussion_id, active_diff_note3.discussion_id]) - expect(discussions.all?(&:active?)).to be true + expect(discussions.count).to eq(2) + expect(discussions.map(&:id)).to eq([active_diff_note1.discussion_id, active_diff_note3.discussion_id]) + expect(discussions.all?(&:active?)).to be true - expect(discussions.first.notes).to eq([active_diff_note1, active_diff_note2]) - expect(discussions.last.notes).to eq([active_diff_note3]) - end + expect(discussions.first.notes).to eq([active_diff_note1, active_diff_note2]) + expect(discussions.last.notes).to eq([active_diff_note3]) + end - it "doesn't include outdated discussions" do - expect(subject.values.map(&:id)).not_to include(outdated_diff_note1.discussion_id) - end + it "doesn't include outdated discussions" do + expect(subject.values.flatten.map(&:id)).not_to include(outdated_diff_note1.discussion_id) + end - it "groups the discussions by line code" do - expect(subject[active_diff_note1.line_code].id).to eq(active_diff_note1.discussion_id) - expect(subject[active_diff_note3.line_code].id).to eq(active_diff_note3.discussion_id) + it "groups the discussions by line code" do + expect(subject[active_diff_note1.line_code].first.id).to eq(active_diff_note1.discussion_id) + expect(subject[active_diff_note3.line_code].first.id).to eq(active_diff_note3.discussion_id) + end end - end - describe "#discussion_id" do - let(:note) { create(:note) } + context 'diff discussions for older diff refs' do + subject { merge_request.notes.grouped_diff_discussions(diff_refs) } - context "when it is newly created" do - it "has a discussion id" do - expect(note.discussion_id).not_to be_nil - expect(note.discussion_id).to match(/\A\h{40}\z/) - end - end + context 'for diff refs a discussion was created at' do + let(:diff_refs) { active_position2.diff_refs } - context "when it didn't store a discussion id before" do - before do - note.update_column(:discussion_id, nil) + it "includes discussions that were created then" do + discussions = subject.values.flatten + + expect(discussions.count).to eq(1) + + discussion = discussions.first + + expect(discussion.id).to eq(active_diff_note3.discussion_id) + expect(discussion.active?).to be true + expect(discussion.active?(diff_refs)).to be false + expect(discussion.created_at_diff?(diff_refs)).to be true + + expect(discussion.notes).to eq([active_diff_note3]) + end + + it "groups the discussions by original line code" do + expect(subject[active_diff_note3.original_line_code].first.id).to eq(active_diff_note3.discussion_id) + end end - it "has a discussion id" do - # The discussion_id is set in `after_initialize`, so `reload` won't work - reloaded_note = Note.find(note.id) + context 'for diff refs a discussion was last active at' do + let(:diff_refs) { outdated_position.diff_refs } - expect(reloaded_note.discussion_id).not_to be_nil - expect(reloaded_note.discussion_id).to match(/\A\h{40}\z/) + it "includes discussions that were last active" do + discussions = subject.values.flatten + + expect(discussions.count).to eq(1) + + discussion = discussions.first + + expect(discussion.id).to eq(outdated_diff_note1.discussion_id) + expect(discussion.active?).to be false + expect(discussion.active?(diff_refs)).to be true + expect(discussion.created_at_diff?(diff_refs)).to be true + + expect(discussion.notes).to eq([outdated_diff_note1, outdated_diff_note2]) + end + + it "groups the discussions by line code" do + expect(subject[outdated_diff_note1.line_code].first.id).to eq(outdated_diff_note1.discussion_id) + end end end end @@ -388,15 +429,267 @@ describe Note, models: true do end end + describe '#can_be_discussion_note?' do + context 'for a note on a merge request' do + it 'returns true' do + note = build(:note_on_merge_request) + + expect(note.can_be_discussion_note?).to be_truthy + end + end + + context 'for a note on an issue' do + it 'returns true' do + note = build(:note_on_issue) + + expect(note.can_be_discussion_note?).to be_truthy + end + end + + context 'for a note on a commit' do + it 'returns true' do + note = build(:note_on_commit) + + expect(note.can_be_discussion_note?).to be_truthy + end + end + + context 'for a note on a snippet' do + it 'returns true' do + note = build(:note_on_project_snippet) + + expect(note.can_be_discussion_note?).to be_truthy + end + end + + context 'for a diff note on merge request' do + it 'returns false' do + note = build(:diff_note_on_merge_request) + + expect(note.can_be_discussion_note?).to be_falsey + end + end + + context 'for a diff note on commit' do + it 'returns false' do + note = build(:diff_note_on_commit) + + expect(note.can_be_discussion_note?).to be_falsey + end + end + + context 'for a discussion note' do + it 'returns false' do + note = build(:discussion_note_on_merge_request) + + expect(note.can_be_discussion_note?).to be_falsey + end + end + end + + describe '#discussion_class' do + let(:note) { build(:note_on_commit) } + let(:merge_request) { create(:merge_request) } + + context 'when the note is displayed out of context' do + it 'returns OutOfContextDiscussion' do + expect(note.discussion_class(merge_request)).to be(OutOfContextDiscussion) + end + end + + context 'when the note is displayed in the original context' do + it 'returns IndividualNoteDiscussion' do + expect(note.discussion_class(note.noteable)).to be(IndividualNoteDiscussion) + end + end + end + + describe "#discussion_id" do + let(:note) { create(:note_on_commit) } + + context "when it is newly created" do + it "has a discussion id" do + expect(note.discussion_id).not_to be_nil + expect(note.discussion_id).to match(/\A\h{40}\z/) + end + end + + context "when it didn't store a discussion id before" do + before do + note.update_column(:discussion_id, nil) + end + + it "has a discussion id" do + # The discussion_id is set in `after_initialize`, so `reload` won't work + reloaded_note = Note.find(note.id) + + expect(reloaded_note.discussion_id).not_to be_nil + expect(reloaded_note.discussion_id).to match(/\A\h{40}\z/) + end + end + + context 'when the note is displayed out of context' do + let(:merge_request) { create(:merge_request) } + + it 'overrides the discussion id' do + expect(note.discussion_id(merge_request)).not_to eq(note.discussion_id) + end + end + end + + describe '#to_discussion' do + subject { create(:discussion_note_on_merge_request) } + let!(:note2) { create(:discussion_note_on_merge_request, project: subject.project, noteable: subject.noteable, in_reply_to: subject) } + + it "returns a discussion with just this note" do + discussion = subject.to_discussion + + expect(discussion.id).to eq(subject.discussion_id) + expect(discussion.notes).to eq([subject]) + end + end + + describe "#discussion" do + let!(:note1) { create(:discussion_note_on_merge_request) } + let!(:note2) { create(:diff_note_on_merge_request, project: note1.project, noteable: note1.noteable) } + + context 'when the note is part of a discussion' do + subject { create(:discussion_note_on_merge_request, project: note1.project, noteable: note1.noteable, in_reply_to: note1) } + + it "returns the discussion this note is in" do + discussion = subject.discussion + + expect(discussion.id).to eq(subject.discussion_id) + expect(discussion.notes).to eq([note1, subject]) + end + end + + context 'when the note is not part of a discussion' do + subject { create(:note) } + + it "returns a discussion with just this note" do + discussion = subject.discussion + + expect(discussion.id).to eq(subject.discussion_id) + expect(discussion.notes).to eq([subject]) + end + end + end + + describe "#part_of_discussion?" do + context 'for a regular note' do + let(:note) { build(:note) } + + it 'returns false' do + expect(note.part_of_discussion?).to be_falsey + end + end + + context 'for a diff note' do + let(:note) { build(:diff_note_on_commit) } + + it 'returns true' do + expect(note.part_of_discussion?).to be_truthy + end + end + + context 'for a discussion note' do + let(:note) { build(:discussion_note_on_merge_request) } + + it 'returns true' do + expect(note.part_of_discussion?).to be_truthy + end + end + end + + describe '#in_reply_to?' do + context 'for a note' do + context 'when part of a discussion' do + subject { create(:discussion_note_on_issue) } + let(:note) { create(:discussion_note_on_issue, in_reply_to: subject) } + + it 'checks if the note is in reply to the other discussion' do + expect(subject).to receive(:in_reply_to?).with(note).and_call_original + expect(subject).to receive(:in_reply_to?).with(note.noteable).and_call_original + expect(subject).to receive(:in_reply_to?).with(note.to_discussion).and_call_original + + subject.in_reply_to?(note) + end + end + + context 'when not part of a discussion' do + subject { create(:note) } + let(:note) { create(:note, in_reply_to: subject) } + + it 'checks if the note is in reply to the other noteable' do + expect(subject).to receive(:in_reply_to?).with(note).and_call_original + expect(subject).to receive(:in_reply_to?).with(note.noteable).and_call_original + + subject.in_reply_to?(note) + end + end + end + + context 'for a discussion' do + context 'when part of the same discussion' do + subject { create(:diff_note_on_merge_request) } + let(:note) { create(:diff_note_on_merge_request, in_reply_to: subject) } + + it 'returns true' do + expect(subject.in_reply_to?(note.to_discussion)).to be_truthy + end + end + + context 'when not part of the same discussion' do + subject { create(:diff_note_on_merge_request) } + let(:note) { create(:diff_note_on_merge_request) } + + it 'returns false' do + expect(subject.in_reply_to?(note.to_discussion)).to be_falsey + end + end + end + + context 'for a noteable' do + context 'when a comment on the same noteable' do + subject { create(:note) } + let(:note) { create(:note, in_reply_to: subject) } + + it 'returns true' do + expect(subject.in_reply_to?(note.noteable)).to be_truthy + end + end + + context 'when not a comment on the same noteable' do + subject { create(:note) } + let(:note) { create(:note) } + + it 'returns false' do + expect(subject.in_reply_to?(note.noteable)).to be_falsey + end + end + end + end + describe 'expiring ETag cache' do let(:note) { build(:note_on_issue) } - it "expires cache for note's issue when note is saved" do + def expect_expiration(note) expect_any_instance_of(Gitlab::EtagCaching::Store) .to receive(:touch) .with("/#{note.project.namespace.to_param}/#{note.project.to_param}/noteable/issue/#{note.noteable.id}/notes") + end + + it "expires cache for note's issue when note is saved" do + expect_expiration(note) note.save! end + + it "expires cache for note's issue when note is destroyed" do + expect_expiration(note) + + note.destroy! + end end end diff --git a/spec/models/project_authorization_spec.rb b/spec/models/project_authorization_spec.rb index 33ef67f97a7..cd0a4a94809 100644 --- a/spec/models/project_authorization_spec.rb +++ b/spec/models/project_authorization_spec.rb @@ -16,7 +16,7 @@ describe ProjectAuthorization do it 'inserts rows in batches' do described_class.insert_authorizations([ [user.id, project1.id, Gitlab::Access::MASTER], - [user.id, project2.id, Gitlab::Access::MASTER], + [user.id, project2.id, Gitlab::Access::MASTER] ], 1) expect(user.project_authorizations.count).to eq(2) diff --git a/spec/models/project_services/asana_service_spec.rb b/spec/models/project_services/asana_service_spec.rb index 48aef3a93f2..95c35162d96 100644 --- a/spec/models/project_services/asana_service_spec.rb +++ b/spec/models/project_services/asana_service_spec.rb @@ -28,7 +28,7 @@ describe AsanaService, models: true do commits: messages.map do |m| { message: m, - url: 'https://gitlab.com/', + url: 'https://gitlab.com/' } end } diff --git a/spec/models/project_services/chat_message/issue_message_spec.rb b/spec/models/project_services/chat_message/issue_message_spec.rb index 190ff4c535d..c159ab00ab1 100644 --- a/spec/models/project_services/chat_message/issue_message_spec.rb +++ b/spec/models/project_services/chat_message/issue_message_spec.rb @@ -7,7 +7,8 @@ describe ChatMessage::IssueMessage, models: true do { user: { name: 'Test User', - username: 'test.user' + username: 'test.user', + avatar_url: 'http://someavatar.com' }, project_name: 'project_name', project_url: 'http://somewhere.com', @@ -25,43 +26,84 @@ describe ChatMessage::IssueMessage, models: true do } end - let(:color) { '#C95823' } + context 'without markdown' do + let(:color) { '#C95823' } - context '#initialize' do - before do - args[:object_attributes][:description] = nil + context '#initialize' do + before do + args[:object_attributes][:description] = nil + end + + it 'returns a non-null description' do + expect(subject.description).to eq('') + end end - it 'returns a non-null description' do - expect(subject.description).to eq('') + context 'open' do + it 'returns a message regarding opening of issues' do + expect(subject.pretext).to eq( + '[<http://somewhere.com|project_name>] Issue opened by test.user') + expect(subject.attachments).to eq([ + { + title: "#100 Issue title", + title_link: "http://url.com", + text: "issue description", + color: color + } + ]) + end end - end - context 'open' do - it 'returns a message regarding opening of issues' do - expect(subject.pretext).to eq( - '[<http://somewhere.com|project_name>] Issue opened by test.user') - expect(subject.attachments).to eq([ - { - title: "#100 Issue title", - title_link: "http://url.com", - text: "issue description", - color: color, - } - ]) + context 'close' do + before do + args[:object_attributes][:action] = 'close' + args[:object_attributes][:state] = 'closed' + end + + it 'returns a message regarding closing of issues' do + expect(subject.pretext). to eq( + '[<http://somewhere.com|project_name>] Issue <http://url.com|#100 Issue title> closed by test.user') + expect(subject.attachments).to be_empty + end end end - context 'close' do + context 'with markdown' do before do - args[:object_attributes][:action] = 'close' - args[:object_attributes][:state] = 'closed' + args[:markdown] = true end - it 'returns a message regarding closing of issues' do - expect(subject.pretext). to eq( - '[<http://somewhere.com|project_name>] Issue <http://url.com|#100 Issue title> closed by test.user') - expect(subject.attachments).to be_empty + context 'open' do + it 'returns a message regarding opening of issues' do + expect(subject.pretext).to eq( + '[[project_name](http://somewhere.com)] Issue opened by test.user') + expect(subject.attachments).to eq('issue description') + expect(subject.activity).to eq({ + title: 'Issue opened by test.user', + subtitle: 'in [project_name](http://somewhere.com)', + text: '[#100 Issue title](http://url.com)', + image: 'http://someavatar.com' + }) + end + end + + context 'close' do + before do + args[:object_attributes][:action] = 'close' + args[:object_attributes][:state] = 'closed' + end + + it 'returns a message regarding closing of issues' do + expect(subject.pretext). to eq( + '[[project_name](http://somewhere.com)] Issue [#100 Issue title](http://url.com) closed by test.user') + expect(subject.attachments).to be_empty + expect(subject.activity).to eq({ + title: 'Issue closed by test.user', + subtitle: 'in [project_name](http://somewhere.com)', + text: '[#100 Issue title](http://url.com)', + image: 'http://someavatar.com' + }) + end end end end diff --git a/spec/models/project_services/chat_message/merge_message_spec.rb b/spec/models/project_services/chat_message/merge_message_spec.rb index cc154112e90..61f17031172 100644 --- a/spec/models/project_services/chat_message/merge_message_spec.rb +++ b/spec/models/project_services/chat_message/merge_message_spec.rb @@ -7,45 +7,84 @@ describe ChatMessage::MergeMessage, models: true do { user: { name: 'Test User', - username: 'test.user' + username: 'test.user', + avatar_url: 'http://someavatar.com' }, project_name: 'project_name', project_url: 'http://somewhere.com', object_attributes: { - title: "Issue title\nSecond line", + title: "Merge Request title\nSecond line", id: 10, iid: 100, assignee_id: 1, url: 'http://url.com', state: 'opened', - description: 'issue description', + description: 'merge request description', source_branch: 'source_branch', - target_branch: 'target_branch', + target_branch: 'target_branch' } } end - let(:color) { '#345' } + context 'without markdown' do + let(:color) { '#345' } - context 'open' do - it 'returns a message regarding opening of merge requests' do - expect(subject.pretext).to eq( - 'test.user opened <http://somewhere.com/merge_requests/100|merge request !100> '\ - 'in <http://somewhere.com|project_name>: *Issue title*') - expect(subject.attachments).to be_empty + context 'open' do + it 'returns a message regarding opening of merge requests' do + expect(subject.pretext).to eq( + 'test.user opened <http://somewhere.com/merge_requests/100|!100 *Merge Request title*> in <http://somewhere.com|project_name>: *Merge Request title*') + expect(subject.attachments).to be_empty + end + end + + context 'close' do + before do + args[:object_attributes][:state] = 'closed' + end + it 'returns a message regarding closing of merge requests' do + expect(subject.pretext).to eq( + 'test.user closed <http://somewhere.com/merge_requests/100|!100 *Merge Request title*> in <http://somewhere.com|project_name>: *Merge Request title*') + expect(subject.attachments).to be_empty + end end end - context 'close' do + context 'with markdown' do before do - args[:object_attributes][:state] = 'closed' + args[:markdown] = true + end + + context 'open' do + it 'returns a message regarding opening of merge requests' do + expect(subject.pretext).to eq( + 'test.user opened [!100 *Merge Request title*](http://somewhere.com/merge_requests/100) in [project_name](http://somewhere.com): *Merge Request title*') + expect(subject.attachments).to be_empty + expect(subject.activity).to eq({ + title: 'Merge Request opened by test.user', + subtitle: 'in [project_name](http://somewhere.com)', + text: '[!100 *Merge Request title*](http://somewhere.com/merge_requests/100)', + image: 'http://someavatar.com' + }) + end end - it 'returns a message regarding closing of merge requests' do - expect(subject.pretext).to eq( - 'test.user closed <http://somewhere.com/merge_requests/100|merge request !100> '\ - 'in <http://somewhere.com|project_name>: *Issue title*') - expect(subject.attachments).to be_empty + + context 'close' do + before do + args[:object_attributes][:state] = 'closed' + end + + it 'returns a message regarding closing of merge requests' do + expect(subject.pretext).to eq( + 'test.user closed [!100 *Merge Request title*](http://somewhere.com/merge_requests/100) in [project_name](http://somewhere.com): *Merge Request title*') + expect(subject.attachments).to be_empty + expect(subject.activity).to eq({ + title: 'Merge Request closed by test.user', + subtitle: 'in [project_name](http://somewhere.com)', + text: '[!100 *Merge Request title*](http://somewhere.com/merge_requests/100)', + image: 'http://someavatar.com' + }) + end end end end diff --git a/spec/models/project_services/chat_message/note_message_spec.rb b/spec/models/project_services/chat_message/note_message_spec.rb index da700a08e57..7996536218a 100644 --- a/spec/models/project_services/chat_message/note_message_spec.rb +++ b/spec/models/project_services/chat_message/note_message_spec.rb @@ -1,130 +1,190 @@ require 'spec_helper' describe ChatMessage::NoteMessage, models: true do - let(:color) { '#345' } + subject { described_class.new(args) } - before do - @args = { - user: { - name: 'Test User', - username: 'test.user', - avatar_url: 'http://fakeavatar' - }, - project_name: 'project_name', - project_url: 'http://somewhere.com', - repository: { - name: 'project_name', - url: 'http://somewhere.com', - }, - object_attributes: { - id: 10, - note: 'comment on a commit', - url: 'http://url.com', - noteable_type: 'Commit' - } + let(:color) { '#345' } + let(:args) do + { + user: { + name: 'Test User', + username: 'test.user', + avatar_url: 'http://fakeavatar' + }, + project_name: 'project_name', + project_url: 'http://somewhere.com', + repository: { + name: 'project_name', + url: 'http://somewhere.com' + }, + object_attributes: { + id: 10, + note: 'comment on a commit', + url: 'http://url.com', + noteable_type: 'Commit' + } } end context 'commit notes' do before do - @args[:object_attributes][:note] = 'comment on a commit' - @args[:object_attributes][:noteable_type] = 'Commit' - @args[:commit] = { - id: '5f163b2b95e6f53cbd428f5f0b103702a52b9a23', - message: "Added a commit message\ndetails\n123\n" + args[:object_attributes][:note] = 'comment on a commit' + args[:object_attributes][:noteable_type] = 'Commit' + args[:commit] = { + id: '5f163b2b95e6f53cbd428f5f0b103702a52b9a23', + message: "Added a commit message\ndetails\n123\n" } end - it 'returns a message regarding notes on commits' do - message = described_class.new(@args) - expect(message.pretext).to eq("test.user <http://url.com|commented on " \ - "commit 5f163b2b> in <http://somewhere.com|project_name>: " \ - "*Added a commit message*") - expected_attachments = [ - { - text: "comment on a commit", - color: color, - } - ] - expect(message.attachments).to eq(expected_attachments) + context 'without markdown' do + it 'returns a message regarding notes on commits' do + expect(subject.pretext).to eq("test.user <http://url.com|commented on " \ + "commit 5f163b2b> in <http://somewhere.com|project_name>: " \ + "*Added a commit message*") + expect(subject.attachments).to eq([{ + text: 'comment on a commit', + color: color + }]) + end + end + + context 'with markdown' do + before do + args[:markdown] = true + end + + it 'returns a message regarding notes on commits' do + expect(subject.pretext).to eq( + 'test.user [commented on commit 5f163b2b](http://url.com) in [project_name](http://somewhere.com): *Added a commit message*' + ) + expect(subject.attachments).to eq('comment on a commit') + expect(subject.activity).to eq({ + title: 'test.user [commented on commit 5f163b2b](http://url.com)', + subtitle: 'in [project_name](http://somewhere.com)', + text: 'Added a commit message', + image: 'http://fakeavatar' + }) + end end end context 'merge request notes' do before do - @args[:object_attributes][:note] = 'comment on a merge request' - @args[:object_attributes][:noteable_type] = 'MergeRequest' - @args[:merge_request] = { - id: 1, - iid: 30, - title: "merge request title\ndetails\n" + args[:object_attributes][:note] = 'comment on a merge request' + args[:object_attributes][:noteable_type] = 'MergeRequest' + args[:merge_request] = { + id: 1, + iid: 30, + title: "merge request title\ndetails\n" } end - it 'returns a message regarding notes on a merge request' do - message = described_class.new(@args) - expect(message.pretext).to eq("test.user <http://url.com|commented on " \ - "merge request !30> in <http://somewhere.com|project_name>: " \ - "*merge request title*") - expected_attachments = [ - { - text: "comment on a merge request", - color: color, - } - ] - expect(message.attachments).to eq(expected_attachments) + context 'without markdown' do + it 'returns a message regarding notes on a merge request' do + expect(subject.pretext).to eq("test.user <http://url.com|commented on " \ + "merge request !30> in <http://somewhere.com|project_name>: " \ + "*merge request title*") + expect(subject.attachments).to eq([{ + text: 'comment on a merge request', + color: color + }]) + end + end + + context 'with markdown' do + before do + args[:markdown] = true + end + + it 'returns a message regarding notes on a merge request' do + expect(subject.pretext).to eq( + 'test.user [commented on merge request !30](http://url.com) in [project_name](http://somewhere.com): *merge request title*') + expect(subject.attachments).to eq('comment on a merge request') + expect(subject.activity).to eq({ + title: 'test.user [commented on merge request !30](http://url.com)', + subtitle: 'in [project_name](http://somewhere.com)', + text: 'merge request title', + image: 'http://fakeavatar' + }) + end end end context 'issue notes' do before do - @args[:object_attributes][:note] = 'comment on an issue' - @args[:object_attributes][:noteable_type] = 'Issue' - @args[:issue] = { - id: 1, - iid: 20, - title: "issue title\ndetails\n" + args[:object_attributes][:note] = 'comment on an issue' + args[:object_attributes][:noteable_type] = 'Issue' + args[:issue] = { + id: 1, + iid: 20, + title: "issue title\ndetails\n" } end - it 'returns a message regarding notes on an issue' do - message = described_class.new(@args) - expect(message.pretext).to eq( - "test.user <http://url.com|commented on " \ - "issue #20> in <http://somewhere.com|project_name>: " \ - "*issue title*") - expected_attachments = [ - { - text: "comment on an issue", - color: color, - } - ] - expect(message.attachments).to eq(expected_attachments) + context 'without markdown' do + it 'returns a message regarding notes on an issue' do + expect(subject.pretext).to eq( + "test.user <http://url.com|commented on " \ + "issue #20> in <http://somewhere.com|project_name>: " \ + "*issue title*") + expect(subject.attachments).to eq([{ + text: 'comment on an issue', + color: color + }]) + end + end + + context 'with markdown' do + before do + args[:markdown] = true + end + + it 'returns a message regarding notes on an issue' do + expect(subject.pretext).to eq( + 'test.user [commented on issue #20](http://url.com) in [project_name](http://somewhere.com): *issue title*') + expect(subject.attachments).to eq('comment on an issue') + expect(subject.activity).to eq({ + title: 'test.user [commented on issue #20](http://url.com)', + subtitle: 'in [project_name](http://somewhere.com)', + text: 'issue title', + image: 'http://fakeavatar' + }) + end end end context 'project snippet notes' do before do - @args[:object_attributes][:note] = 'comment on a snippet' - @args[:object_attributes][:noteable_type] = 'Snippet' - @args[:snippet] = { - id: 5, - title: "snippet title\ndetails\n" + args[:object_attributes][:note] = 'comment on a snippet' + args[:object_attributes][:noteable_type] = 'Snippet' + args[:snippet] = { + id: 5, + title: "snippet title\ndetails\n" } end - it 'returns a message regarding notes on a project snippet' do - message = described_class.new(@args) - expect(message.pretext).to eq("test.user <http://url.com|commented on " \ - "snippet #5> in <http://somewhere.com|project_name>: " \ - "*snippet title*") - expected_attachments = [ - { - text: "comment on a snippet", - color: color, - } - ] - expect(message.attachments).to eq(expected_attachments) + context 'without markdown' do + it 'returns a message regarding notes on a project snippet' do + expect(subject.pretext).to eq("test.user <http://url.com|commented on " \ + "snippet $5> in <http://somewhere.com|project_name>: " \ + "*snippet title*") + expect(subject.attachments).to eq([{ + text: 'comment on a snippet', + color: color + }]) + end + end + + context 'with markdown' do + before do + args[:markdown] = true + end + + it 'returns a message regarding notes on a project snippet' do + expect(subject.pretext).to eq( + 'test.user [commented on snippet $5](http://url.com) in [project_name](http://somewhere.com): *snippet title*') + expect(subject.attachments).to eq('comment on a snippet') + end end end end diff --git a/spec/models/project_services/chat_message/pipeline_message_spec.rb b/spec/models/project_services/chat_message/pipeline_message_spec.rb index bf2a9616455..7d2599dc703 100644 --- a/spec/models/project_services/chat_message/pipeline_message_spec.rb +++ b/spec/models/project_services/chat_message/pipeline_message_spec.rb @@ -2,8 +2,9 @@ require 'spec_helper' describe ChatMessage::PipelineMessage do subject { described_class.new(args) } - let(:user) { { name: 'hacker' } } + let(:user) { { name: 'hacker' } } + let(:duration) { 7210 } let(:args) do { object_attributes: { @@ -14,54 +15,118 @@ describe ChatMessage::PipelineMessage do status: status, duration: duration }, - project: { path_with_namespace: 'project_name', - web_url: 'http://example.gitlab.com' }, + project: { + path_with_namespace: 'project_name', + web_url: 'http://example.gitlab.com' + }, user: user } end - let(:message) { build_message } + context 'without markdown' do + context 'pipeline succeeded' do + let(:status) { 'success' } + let(:color) { 'good' } + let(:message) { build_message('passed') } + + it 'returns a message with information about succeeded build' do + expect(subject.pretext).to be_empty + expect(subject.fallback).to eq(message) + expect(subject.attachments).to eq([text: message, color: color]) + end + end - context 'pipeline succeeded' do - let(:status) { 'success' } - let(:color) { 'good' } - let(:duration) { 10 } - let(:message) { build_message('passed') } + context 'pipeline failed' do + let(:status) { 'failed' } + let(:color) { 'danger' } + let(:message) { build_message } - it 'returns a message with information about succeeded build' do - verify_message + it 'returns a message with information about failed build' do + expect(subject.pretext).to be_empty + expect(subject.fallback).to eq(message) + expect(subject.attachments).to eq([text: message, color: color]) + end + + context 'when triggered by API therefore lacking user' do + let(:user) { nil } + let(:message) { build_message(status, 'API') } + + it 'returns a message stating it is by API' do + expect(subject.pretext).to be_empty + expect(subject.fallback).to eq(message) + expect(subject.attachments).to eq([text: message, color: color]) + end + end end - end - context 'pipeline failed' do - let(:status) { 'failed' } - let(:color) { 'danger' } - let(:duration) { 10 } + def build_message(status_text = status, name = user[:name]) + "<http://example.gitlab.com|project_name>:" \ + " Pipeline <http://example.gitlab.com/pipelines/123|#123>" \ + " of branch `<http://example.gitlab.com/commits/develop|develop>`" \ + " by #{name} #{status_text} in 02:00:10" + end + end - it 'returns a message with information about failed build' do - verify_message + context 'with markdown' do + before do + args[:markdown] = true end - context 'when triggered by API therefore lacking user' do - let(:user) { nil } - let(:message) { build_message(status, 'API') } + context 'pipeline succeeded' do + let(:status) { 'success' } + let(:color) { 'good' } + let(:message) { build_markdown_message('passed') } - it 'returns a message stating it is by API' do - verify_message + it 'returns a message with information about succeeded build' do + expect(subject.pretext).to be_empty + expect(subject.attachments).to eq(message) + expect(subject.activity).to eq({ + title: 'Pipeline [#123](http://example.gitlab.com/pipelines/123) of branch `[develop](http://example.gitlab.com/commits/develop)` by hacker passed', + subtitle: 'in [project_name](http://example.gitlab.com)', + text: 'in 02:00:10', + image: '' + }) end end - end - def verify_message - expect(subject.pretext).to be_empty - expect(subject.fallback).to eq(message) - expect(subject.attachments).to eq([text: message, color: color]) - end + context 'pipeline failed' do + let(:status) { 'failed' } + let(:color) { 'danger' } + let(:message) { build_markdown_message } + + it 'returns a message with information about failed build' do + expect(subject.pretext).to be_empty + expect(subject.attachments).to eq(message) + expect(subject.activity).to eq({ + title: 'Pipeline [#123](http://example.gitlab.com/pipelines/123) of branch `[develop](http://example.gitlab.com/commits/develop)` by hacker failed', + subtitle: 'in [project_name](http://example.gitlab.com)', + text: 'in 02:00:10', + image: '' + }) + end - def build_message(status_text = status, name = user[:name]) - "<http://example.gitlab.com|project_name>:" \ - " Pipeline <http://example.gitlab.com/pipelines/123|#123>" \ - " of <http://example.gitlab.com/commits/develop|develop> branch" \ - " by #{name} #{status_text} in #{duration} #{'second'.pluralize(duration)}" + context 'when triggered by API therefore lacking user' do + let(:user) { nil } + let(:message) { build_markdown_message(status, 'API') } + + it 'returns a message stating it is by API' do + expect(subject.pretext).to be_empty + expect(subject.attachments).to eq(message) + expect(subject.activity).to eq({ + title: 'Pipeline [#123](http://example.gitlab.com/pipelines/123) of branch `[develop](http://example.gitlab.com/commits/develop)` by API failed', + subtitle: 'in [project_name](http://example.gitlab.com)', + text: 'in 02:00:10', + image: '' + }) + end + end + end + + def build_markdown_message(status_text = status, name = user[:name]) + "[project_name](http://example.gitlab.com):" \ + " Pipeline [#123](http://example.gitlab.com/pipelines/123)" \ + " of branch `[develop](http://example.gitlab.com/commits/develop)`" \ + " by #{name} #{status_text} in 02:00:10" + end end end diff --git a/spec/models/project_services/chat_message/push_message_spec.rb b/spec/models/project_services/chat_message/push_message_spec.rb index 24928873bad..e38117b75f6 100644 --- a/spec/models/project_services/chat_message/push_message_spec.rb +++ b/spec/models/project_services/chat_message/push_message_spec.rb @@ -10,6 +10,7 @@ describe ChatMessage::PushMessage, models: true do project_name: 'project_name', ref: 'refs/heads/master', user_name: 'test.user', + user_avatar: 'http://someavatar.com', project_url: 'http://url.com' } end @@ -20,22 +21,40 @@ describe ChatMessage::PushMessage, models: true do before do args[:commits] = [ { message: 'message1', url: 'http://url1.com', id: 'abcdefghijkl', author: { name: 'author1' } }, - { message: 'message2', url: 'http://url2.com', id: '123456789012', author: { name: 'author2' } }, + { message: 'message2', url: 'http://url2.com', id: '123456789012', author: { name: 'author2' } } ] end - it 'returns a message regarding pushes' do - expect(subject.pretext).to eq( - 'test.user pushed to branch <http://url.com/commits/master|master> of '\ - '<http://url.com|project_name> (<http://url.com/compare/before...after|Compare changes>)' - ) - expect(subject.attachments).to eq([ - { - text: "<http://url1.com|abcdefgh>: message1 - author1\n"\ - "<http://url2.com|12345678>: message2 - author2", - color: color, - } - ]) + context 'without markdown' do + it 'returns a message regarding pushes' do + expect(subject.pretext).to eq( + 'test.user pushed to branch `<http://url.com/commits/master|master>` of '\ + '<http://url.com|project_name> (<http://url.com/compare/before...after|Compare changes>)') + expect(subject.attachments).to eq([{ + text: "<http://url1.com|abcdefgh>: message1 - author1\n\n"\ + "<http://url2.com|12345678>: message2 - author2", + color: color + }]) + end + end + + context 'with markdown' do + before do + args[:markdown] = true + end + + it 'returns a message regarding pushes' do + expect(subject.pretext).to eq( + 'test.user pushed to branch `[master](http://url.com/commits/master)` of [project_name](http://url.com) ([Compare changes](http://url.com/compare/before...after))') + expect(subject.attachments).to eq( + "[abcdefgh](http://url1.com): message1 - author1\n\n[12345678](http://url2.com): message2 - author2") + expect(subject.activity).to eq({ + title: 'test.user pushed to branch', + subtitle: 'in [project_name](http://url.com)', + text: '[Compare changes](http://url.com/compare/before...after)', + image: 'http://someavatar.com' + }) + end end end @@ -47,15 +66,36 @@ describe ChatMessage::PushMessage, models: true do project_name: 'project_name', ref: 'refs/tags/new_tag', user_name: 'test.user', + user_avatar: 'http://someavatar.com', project_url: 'http://url.com' } end - it 'returns a message regarding pushes' do - expect(subject.pretext).to eq('test.user pushed new tag ' \ - '<http://url.com/commits/new_tag|new_tag> to ' \ - '<http://url.com|project_name>') - expect(subject.attachments).to be_empty + context 'without markdown' do + it 'returns a message regarding pushes' do + expect(subject.pretext).to eq('test.user pushed new tag ' \ + '`<http://url.com/commits/new_tag|new_tag>` to ' \ + '<http://url.com|project_name>') + expect(subject.attachments).to be_empty + end + end + + context 'with markdown' do + before do + args[:markdown] = true + end + + it 'returns a message regarding pushes' do + expect(subject.pretext).to eq( + 'test.user pushed new tag `[new_tag](http://url.com/commits/new_tag)` to [project_name](http://url.com)') + expect(subject.attachments).to be_empty + expect(subject.activity).to eq({ + title: 'test.user created tag', + subtitle: 'in [project_name](http://url.com)', + text: '[Compare changes](http://url.com/compare/0000000000000000000000000000000000000000...after)', + image: 'http://someavatar.com' + }) + end end end @@ -64,12 +104,31 @@ describe ChatMessage::PushMessage, models: true do args[:before] = Gitlab::Git::BLANK_SHA end - it 'returns a message regarding a new branch' do - expect(subject.pretext).to eq( - 'test.user pushed new branch <http://url.com/commits/master|master> to '\ - '<http://url.com|project_name>' - ) - expect(subject.attachments).to be_empty + context 'without markdown' do + it 'returns a message regarding a new branch' do + expect(subject.pretext).to eq( + 'test.user pushed new branch `<http://url.com/commits/master|master>` to '\ + '<http://url.com|project_name>') + expect(subject.attachments).to be_empty + end + end + + context 'with markdown' do + before do + args[:markdown] = true + end + + it 'returns a message regarding a new branch' do + expect(subject.pretext).to eq( + 'test.user pushed new branch `[master](http://url.com/commits/master)` to [project_name](http://url.com)') + expect(subject.attachments).to be_empty + expect(subject.activity).to eq({ + title: 'test.user created branch', + subtitle: 'in [project_name](http://url.com)', + text: '[Compare changes](http://url.com/compare/0000000000000000000000000000000000000000...after)', + image: 'http://someavatar.com' + }) + end end end @@ -78,11 +137,30 @@ describe ChatMessage::PushMessage, models: true do args[:after] = Gitlab::Git::BLANK_SHA end - it 'returns a message regarding a removed branch' do - expect(subject.pretext).to eq( - 'test.user removed branch master from <http://url.com|project_name>' - ) - expect(subject.attachments).to be_empty + context 'without markdown' do + it 'returns a message regarding a removed branch' do + expect(subject.pretext).to eq( + 'test.user removed branch `master` from <http://url.com|project_name>') + expect(subject.attachments).to be_empty + end + end + + context 'with markdown' do + before do + args[:markdown] = true + end + + it 'returns a message regarding a removed branch' do + expect(subject.pretext).to eq( + 'test.user removed branch `master` from [project_name](http://url.com)') + expect(subject.attachments).to be_empty + expect(subject.activity).to eq({ + title: 'test.user removed branch', + subtitle: 'in [project_name](http://url.com)', + text: '[Compare changes](http://url.com/compare/before...0000000000000000000000000000000000000000)', + image: 'http://someavatar.com' + }) + end end end end diff --git a/spec/models/project_services/chat_message/wiki_page_message_spec.rb b/spec/models/project_services/chat_message/wiki_page_message_spec.rb index a2ad61e38e7..4ca1b8aa7b7 100644 --- a/spec/models/project_services/chat_message/wiki_page_message_spec.rb +++ b/spec/models/project_services/chat_message/wiki_page_message_spec.rb @@ -7,7 +7,8 @@ describe ChatMessage::WikiPageMessage, models: true do { user: { name: 'Test User', - username: 'test.user' + username: 'test.user', + avatar_url: 'http://someavatar.com' }, project_name: 'project_name', project_url: 'http://somewhere.com', @@ -19,54 +20,128 @@ describe ChatMessage::WikiPageMessage, models: true do } end - describe '#pretext' do - context 'when :action == "create"' do - before { args[:object_attributes][:action] = 'create' } + context 'without markdown' do + describe '#pretext' do + context 'when :action == "create"' do + before { args[:object_attributes][:action] = 'create' } - it 'returns a message that a new wiki page was created' do - expect(subject.pretext).to eq( - 'test.user created <http://url.com|wiki page> in <http://somewhere.com|project_name>: '\ - '*Wiki page title*') + it 'returns a message that a new wiki page was created' do + expect(subject.pretext).to eq( + 'test.user created <http://url.com|wiki page> in <http://somewhere.com|project_name>: '\ + '*Wiki page title*') + end + end + + context 'when :action == "update"' do + before { args[:object_attributes][:action] = 'update' } + + it 'returns a message that a wiki page was updated' do + expect(subject.pretext).to eq( + 'test.user edited <http://url.com|wiki page> in <http://somewhere.com|project_name>: '\ + '*Wiki page title*') + end end end - context 'when :action == "update"' do - before { args[:object_attributes][:action] = 'update' } + describe '#attachments' do + let(:color) { '#345' } - it 'returns a message that a wiki page was updated' do - expect(subject.pretext).to eq( - 'test.user edited <http://url.com|wiki page> in <http://somewhere.com|project_name>: '\ - '*Wiki page title*') + context 'when :action == "create"' do + before { args[:object_attributes][:action] = 'create' } + + it 'returns the attachment for a new wiki page' do + expect(subject.attachments).to eq([ + { + text: "Wiki page description", + color: color + } + ]) + end + end + + context 'when :action == "update"' do + before { args[:object_attributes][:action] = 'update' } + + it 'returns the attachment for an updated wiki page' do + expect(subject.attachments).to eq([ + { + text: "Wiki page description", + color: color + } + ]) + end end end end - describe '#attachments' do - let(:color) { '#345' } + context 'with markdown' do + before do + args[:markdown] = true + end + + describe '#pretext' do + context 'when :action == "create"' do + before { args[:object_attributes][:action] = 'create' } + + it 'returns a message that a new wiki page was created' do + expect(subject.pretext).to eq( + 'test.user created [wiki page](http://url.com) in [project_name](http://somewhere.com): *Wiki page title*') + end + end - context 'when :action == "create"' do - before { args[:object_attributes][:action] = 'create' } + context 'when :action == "update"' do + before { args[:object_attributes][:action] = 'update' } - it 'returns the attachment for a new wiki page' do - expect(subject.attachments).to eq([ - { - text: "Wiki page description", - color: color, - } - ]) + it 'returns a message that a wiki page was updated' do + expect(subject.pretext).to eq( + 'test.user edited [wiki page](http://url.com) in [project_name](http://somewhere.com): *Wiki page title*') + end end end - context 'when :action == "update"' do - before { args[:object_attributes][:action] = 'update' } + describe '#attachments' do + context 'when :action == "create"' do + before { args[:object_attributes][:action] = 'create' } + + it 'returns the attachment for a new wiki page' do + expect(subject.attachments).to eq('Wiki page description') + end + end + + context 'when :action == "update"' do + before { args[:object_attributes][:action] = 'update' } + + it 'returns the attachment for an updated wiki page' do + expect(subject.attachments).to eq('Wiki page description') + end + end + end + + describe '#activity' do + context 'when :action == "create"' do + before { args[:object_attributes][:action] = 'create' } + + it 'returns the attachment for a new wiki page' do + expect(subject.activity).to eq({ + title: 'test.user created [wiki page](http://url.com)', + subtitle: 'in [project_name](http://somewhere.com)', + text: 'Wiki page title', + image: 'http://someavatar.com' + }) + end + end + + context 'when :action == "update"' do + before { args[:object_attributes][:action] = 'update' } - it 'returns the attachment for an updated wiki page' do - expect(subject.attachments).to eq([ - { - text: "Wiki page description", - color: color, - } - ]) + it 'returns the attachment for an updated wiki page' do + expect(subject.activity).to eq({ + title: 'test.user edited [wiki page](http://url.com)', + subtitle: 'in [project_name](http://somewhere.com)', + text: 'Wiki page title', + image: 'http://someavatar.com' + }) + end end end end diff --git a/spec/models/project_services/chat_notification_service_spec.rb b/spec/models/project_services/chat_notification_service_spec.rb index c98e7ee14fd..8fbe42248ae 100644 --- a/spec/models/project_services/chat_notification_service_spec.rb +++ b/spec/models/project_services/chat_notification_service_spec.rb @@ -1,11 +1,29 @@ require 'spec_helper' describe ChatNotificationService, models: true do - describe "Associations" do + describe 'Associations' do before do allow(subject).to receive(:activated?).and_return(true) end it { is_expected.to validate_presence_of :webhook } end + + describe '#can_test?' do + context 'with empty repository' do + it 'returns true' do + subject.project = create(:empty_project, :empty_repo) + + expect(subject.can_test?).to be true + end + end + + context 'with repository' do + it 'returns true' do + subject.project = create(:project) + + expect(subject.can_test?).to be true + end + end + end end diff --git a/spec/models/project_services/issue_tracker_service_spec.rb b/spec/models/project_services/issue_tracker_service_spec.rb index fbe6f344a98..869b25b933b 100644 --- a/spec/models/project_services/issue_tracker_service_spec.rb +++ b/spec/models/project_services/issue_tracker_service_spec.rb @@ -8,7 +8,7 @@ describe IssueTrackerService, models: true do let(:service) { RedmineService.new(project: project, active: true) } before do - create(:service, project: project, active: true, category: 'issue_tracker') + create(:custom_issue_tracker_service, project: project) end context 'when service is changed manually by user' do diff --git a/spec/models/project_services/kubernetes_service_spec.rb b/spec/models/project_services/kubernetes_service_spec.rb index bf7950ef1c9..c1c2f2a7219 100644 --- a/spec/models/project_services/kubernetes_service_spec.rb +++ b/spec/models/project_services/kubernetes_service_spec.rb @@ -4,7 +4,7 @@ describe KubernetesService, models: true, caching: true do include KubernetesHelpers include ReactiveCachingHelpers - let(:project) { create(:kubernetes_project) } + let(:project) { build_stubbed(:kubernetes_project) } let(:service) { project.kubernetes_service } # We use Kubeclient to interactive with the Kubernetes API. It will @@ -32,7 +32,8 @@ describe KubernetesService, models: true, caching: true do describe 'Validations' do context 'when service is active' do before { subject.active = true } - it { is_expected.to validate_presence_of(:namespace) } + + it { is_expected.not_to validate_presence_of(:namespace) } it { is_expected.to validate_presence_of(:api_url) } it { is_expected.to validate_presence_of(:token) } @@ -53,9 +54,9 @@ describe KubernetesService, models: true, caching: true do 'a' * 63 => true, 'a' * 64 => false, 'a.b' => false, - 'a*b' => false, + 'a*b' => false }.each do |namespace, validity| - it "should validate #{namespace} as #{validity ? 'valid' : 'invalid'}" do + it "validates #{namespace} as #{validity ? 'valid' : 'invalid'}" do subject.namespace = namespace expect(subject.valid?).to eq(validity) @@ -66,24 +67,40 @@ describe KubernetesService, models: true, caching: true do context 'when service is inactive' do before { subject.active = false } - it { is_expected.not_to validate_presence_of(:namespace) } + it { is_expected.not_to validate_presence_of(:api_url) } it { is_expected.not_to validate_presence_of(:token) } end end describe '#initialize_properties' do - context 'with a project' do - let(:namespace_name) { "#{project.path}-#{project.id}" } + context 'without a project' do + it 'leaves the namespace unset' do + expect(described_class.new.namespace).to be_nil + end + end + end + + describe '#fields' do + let(:kube_namespace) do + subject.fields.find { |h| h[:name] == 'namespace' } + end + + context 'as template' do + before { subject.template = true } - it 'defaults to the project name with ID' do - expect(described_class.new(project: project).namespace).to eq(namespace_name) + it 'sets the namespace to the default' do + expect(kube_namespace).not_to be_nil + expect(kube_namespace[:placeholder]).to eq(subject.class::TEMPLATE_PLACEHOLDER) end end - context 'without a project' do - it 'leaves the namespace unset' do - expect(described_class.new.namespace).to be_nil + context 'with associated project' do + before { subject.project = project } + + it 'sets the namespace to the default' do + expect(kube_namespace).not_to be_nil + expect(kube_namespace[:placeholder]).to match(/\A#{Gitlab::Regex::PATH_REGEX_STR}-\d+\z/) end end end @@ -138,38 +155,40 @@ describe KubernetesService, models: true, caching: true do before do subject.api_url = 'https://kube.domain.com' subject.token = 'token' - subject.namespace = 'my-project' subject.ca_pem = 'CA PEM DATA' + subject.project = project end - it 'sets KUBE_URL' do - expect(subject.predefined_variables).to include( - { key: 'KUBE_URL', value: 'https://kube.domain.com', public: true } - ) - end + context 'namespace is provided' do + before { subject.namespace = 'my-project' } - it 'sets KUBE_TOKEN' do - expect(subject.predefined_variables).to include( - { key: 'KUBE_TOKEN', value: 'token', public: false } - ) + it 'sets the variables' do + expect(subject.predefined_variables).to include( + { key: 'KUBE_URL', value: 'https://kube.domain.com', public: true }, + { key: 'KUBE_TOKEN', value: 'token', public: false }, + { key: 'KUBE_NAMESPACE', value: 'my-project', public: true }, + { key: 'KUBE_CA_PEM', value: 'CA PEM DATA', public: true }, + { key: 'KUBE_CA_PEM_FILE', value: 'CA PEM DATA', public: true, file: true } + ) + end end - it 'sets KUBE_NAMESPACE' do - expect(subject.predefined_variables).to include( - { key: 'KUBE_NAMESPACE', value: 'my-project', public: true } - ) - end + context 'no namespace provided' do + it 'sets the variables' do + expect(subject.predefined_variables).to include( + { key: 'KUBE_URL', value: 'https://kube.domain.com', public: true }, + { key: 'KUBE_TOKEN', value: 'token', public: false }, + { key: 'KUBE_CA_PEM', value: 'CA PEM DATA', public: true }, + { key: 'KUBE_CA_PEM_FILE', value: 'CA PEM DATA', public: true, file: true } + ) + end - it 'sets KUBE_CA_PEM' do - expect(subject.predefined_variables).to include( - { key: 'KUBE_CA_PEM', value: 'CA PEM DATA', public: true } - ) - end + it 'sets the KUBE_NAMESPACE' do + kube_namespace = subject.predefined_variables.find { |h| h[:key] == 'KUBE_NAMESPACE' } - it 'sets KUBE_CA_PEM_FILE' do - expect(subject.predefined_variables).to include( - { key: 'KUBE_CA_PEM_FILE', value: 'CA PEM DATA', public: true, file: true } - ) + expect(kube_namespace).not_to be_nil + expect(kube_namespace[:value]).to match(/\A#{Gitlab::Regex::PATH_REGEX_STR}-\d+\z/) + end end end diff --git a/spec/models/project_services/microsoft_teams_service_spec.rb b/spec/models/project_services/microsoft_teams_service_spec.rb new file mode 100644 index 00000000000..facc034f69c --- /dev/null +++ b/spec/models/project_services/microsoft_teams_service_spec.rb @@ -0,0 +1,277 @@ +require 'spec_helper' + +describe MicrosoftTeamsService, models: true do + let(:chat_service) { described_class.new } + let(:webhook_url) { 'https://example.gitlab.com/' } + + describe "Associations" do + it { is_expected.to belong_to :project } + it { is_expected.to have_one :service_hook } + end + + describe 'Validations' do + context 'when service is active' do + before { subject.active = true } + + it { is_expected.to validate_presence_of(:webhook) } + it_behaves_like 'issue tracker service URL attribute', :webhook + end + + context 'when service is inactive' do + before { subject.active = false } + + it { is_expected.not_to validate_presence_of(:webhook) } + end + end + + describe "#execute" do + let(:user) { create(:user) } + let(:project) { create(:project, :repository) } + + before do + allow(chat_service).to receive_messages( + project: project, + project_id: project.id, + service_hook: true, + webhook: webhook_url + ) + + WebMock.stub_request(:post, webhook_url) + end + + context 'with push events' do + let(:push_sample_data) do + Gitlab::DataBuilder::Push.build_sample(project, user) + end + + it "calls Microsoft Teams API for push events" do + chat_service.execute(push_sample_data) + + expect(WebMock).to have_requested(:post, webhook_url).once + end + + it 'specifies the webhook when it is configured' do + expect(MicrosoftTeams::Notifier).to receive(:new).with(webhook_url).and_return(double(:microsoft_teams_service).as_null_object) + + chat_service.execute(push_sample_data) + end + end + + context 'with issue events' do + let(:opts) { { title: 'Awesome issue', description: 'please fix' } } + let(:issues_sample_data) do + service = Issues::CreateService.new(project, user, opts) + issue = service.execute + service.hook_data(issue, 'open') + end + + it "calls Microsoft Teams API" do + chat_service.execute(issues_sample_data) + + expect(WebMock).to have_requested(:post, webhook_url).once + end + end + + context 'with merge events' do + let(:opts) do + { + title: 'Awesome merge_request', + description: 'please fix', + source_branch: 'feature', + target_branch: 'master' + } + end + + let(:merge_sample_data) do + service = MergeRequests::CreateService.new(project, user, opts) + merge_request = service.execute + service.hook_data(merge_request, 'open') + end + + it "calls Microsoft Teams API" do + chat_service.execute(merge_sample_data) + + expect(WebMock).to have_requested(:post, webhook_url).once + end + end + + context 'with wiki page events' do + let(:opts) do + { + title: "Awesome wiki_page", + content: "Some text describing some thing or another", + format: "md", + message: "user created page: Awesome wiki_page" + } + end + + let(:wiki_page_sample_data) do + service = WikiPages::CreateService.new(project, user, opts) + wiki_page = service.execute + service.hook_data(wiki_page, 'create') + end + + it "calls Microsoft Teams API" do + chat_service.execute(wiki_page_sample_data) + + expect(WebMock).to have_requested(:post, webhook_url).once + end + end + end + + describe "Note events" do + let(:user) { create(:user) } + let(:project) { create(:project, :repository, creator: user) } + + before do + allow(chat_service).to receive_messages( + project: project, + project_id: project.id, + service_hook: true, + webhook: webhook_url + ) + + WebMock.stub_request(:post, webhook_url) + end + + context 'when commit comment event executed' do + let(:commit_note) do + create(:note_on_commit, author: user, + project: project, + commit_id: project.repository.commit.id, + note: 'a comment on a commit') + end + + it "calls Microsoft Teams API for commit comment events" do + data = Gitlab::DataBuilder::Note.build(commit_note, user) + + chat_service.execute(data) + + expect(WebMock).to have_requested(:post, webhook_url).once + end + end + + context 'when merge request comment event executed' do + let(:merge_request_note) do + create(:note_on_merge_request, project: project, + note: "merge request note") + end + + it "calls Microsoft Teams API for merge request comment events" do + data = Gitlab::DataBuilder::Note.build(merge_request_note, user) + + chat_service.execute(data) + + expect(WebMock).to have_requested(:post, webhook_url).once + end + end + + context 'when issue comment event executed' do + let(:issue_note) do + create(:note_on_issue, project: project, note: "issue note") + end + + it "calls Microsoft Teams API for issue comment events" do + data = Gitlab::DataBuilder::Note.build(issue_note, user) + + chat_service.execute(data) + + expect(WebMock).to have_requested(:post, webhook_url).once + end + end + + context 'when snippet comment event executed' do + let(:snippet_note) do + create(:note_on_project_snippet, project: project, + note: "snippet note") + end + + it "calls Microsoft Teams API for snippet comment events" do + data = Gitlab::DataBuilder::Note.build(snippet_note, user) + + chat_service.execute(data) + + expect(WebMock).to have_requested(:post, webhook_url).once + end + end + end + + describe 'Pipeline events' do + let(:user) { create(:user) } + let(:project) { create(:project, :repository) } + + let(:pipeline) do + create(:ci_pipeline, + project: project, status: status, + sha: project.commit.sha, ref: project.default_branch) + end + + before do + allow(chat_service).to receive_messages( + project: project, + service_hook: true, + webhook: webhook_url + ) + end + + shared_examples 'call Microsoft Teams API' do + before do + WebMock.stub_request(:post, webhook_url) + end + + it 'calls Microsoft Teams API for pipeline events' do + data = Gitlab::DataBuilder::Pipeline.build(pipeline) + + chat_service.execute(data) + + expect(WebMock).to have_requested(:post, webhook_url).once + end + end + + context 'with failed pipeline' do + let(:status) { 'failed' } + + it_behaves_like 'call Microsoft Teams API' + end + + context 'with succeeded pipeline' do + let(:status) { 'success' } + + context 'with default to notify_only_broken_pipelines' do + it 'does not call Microsoft Teams API for pipeline events' do + data = Gitlab::DataBuilder::Pipeline.build(pipeline) + result = chat_service.execute(data) + + expect(result).to be_falsy + end + end + + context 'with setting notify_only_broken_pipelines to false' do + before do + chat_service.notify_only_broken_pipelines = false + end + + it_behaves_like 'call Microsoft Teams API' + end + end + + context 'only notify for the default branch' do + context 'when enabled' do + let(:pipeline) do + create(:ci_pipeline, project: project, status: 'failed', ref: 'not-the-default-branch') + end + + before do + chat_service.notify_only_default_branch = true + end + + it 'does not call the Microsoft Teams API for pipeline events' do + data = Gitlab::DataBuilder::Pipeline.build(pipeline) + result = chat_service.execute(data) + + expect(result).to be_falsy + end + end + end + end +end diff --git a/spec/models/project_services/pipeline_email_service_spec.rb b/spec/models/project_services/pipelines_email_service_spec.rb index 03932895b0e..03932895b0e 100644 --- a/spec/models/project_services/pipeline_email_service_spec.rb +++ b/spec/models/project_services/pipelines_email_service_spec.rb diff --git a/spec/models/project_services/pivotaltracker_service_spec.rb b/spec/models/project_services/pivotaltracker_service_spec.rb index 45b2f1068bf..a76e909d04d 100644 --- a/spec/models/project_services/pivotaltracker_service_spec.rb +++ b/spec/models/project_services/pivotaltracker_service_spec.rb @@ -40,7 +40,7 @@ describe PivotaltrackerService, models: true do name: 'Some User' }, url: 'https://example.com/commit', - message: 'commit message', + message: 'commit message' } ] } diff --git a/spec/models/project_services/prometheus_service_spec.rb b/spec/models/project_services/prometheus_service_spec.rb index d15079b686b..1f9d3c07b51 100644 --- a/spec/models/project_services/prometheus_service_spec.rb +++ b/spec/models/project_services/prometheus_service_spec.rb @@ -6,6 +6,7 @@ describe PrometheusService, models: true, caching: true do let(:project) { create(:prometheus_project) } let(:service) { project.prometheus_service } + let(:environment_query) { Gitlab::Prometheus::Queries::EnvironmentQuery } describe "Associations" do it { is_expected.to belong_to :project } @@ -45,17 +46,18 @@ describe PrometheusService, models: true, caching: true do end end - describe '#metrics' do + describe '#environment_metrics' do let(:environment) { build_stubbed(:environment, slug: 'env-slug') } - subject { service.metrics(environment) } around do |example| Timecop.freeze { example.run } end context 'with valid data' do + subject { service.environment_metrics(environment) } + before do - stub_reactive_cache(service, prometheus_data, 'env-slug') + stub_reactive_cache(service, prometheus_data, environment_query, environment.id) end it 'returns reactive data' do @@ -64,15 +66,36 @@ describe PrometheusService, models: true, caching: true do end end + describe '#deployment_metrics' do + let(:deployment) { build_stubbed(:deployment)} + let(:deployment_query) { Gitlab::Prometheus::Queries::DeploymentQuery } + + around do |example| + Timecop.freeze { example.run } + end + + context 'with valid data' do + subject { service.deployment_metrics(deployment) } + + before do + stub_reactive_cache(service, prometheus_data, deployment_query, deployment.id) + end + + it 'returns reactive data' do + is_expected.to eq(prometheus_data.merge(deployment_time: deployment.created_at.to_i)) + end + end + end + describe '#calculate_reactive_cache' do - let(:environment) { build_stubbed(:environment, slug: 'env-slug') } + let(:environment) { create(:environment, slug: 'env-slug') } around do |example| Timecop.freeze { example.run } end subject do - service.calculate_reactive_cache(environment.slug) + service.calculate_reactive_cache(environment_query.to_s, environment.id) end context 'when service is inactive' do @@ -94,7 +117,7 @@ describe PrometheusService, models: true, caching: true do [404, 500].each do |status| context "when Prometheus responds with #{status}" do before do - stub_all_prometheus_requests(environment.slug, status: status, body: 'QUERY FAILED!') + stub_all_prometheus_requests(environment.slug, status: status, body: "QUERY FAILED!") end it { is_expected.to eq(success: false, result: %(#{status} - "QUERY FAILED!")) } diff --git a/spec/models/project_snippet_spec.rb b/spec/models/project_snippet_spec.rb index d9d7c0b0aaa..5fe4885eeb4 100644 --- a/spec/models/project_snippet_spec.rb +++ b/spec/models/project_snippet_spec.rb @@ -5,9 +5,6 @@ describe ProjectSnippet, models: true do it { is_expected.to belong_to(:project) } end - describe "Mass assignment" do - end - describe "Validation" do it { is_expected.to validate_presence_of(:project) } end diff --git a/spec/models/project_spec.rb b/spec/models/project_spec.rb index 59a2560ca06..f2b4e9070b4 100644 --- a/spec/models/project_spec.rb +++ b/spec/models/project_spec.rb @@ -22,6 +22,7 @@ describe Project, models: true do it { is_expected.to have_many(:protected_branches).dependent(:destroy) } it { is_expected.to have_one(:forked_project_link).dependent(:destroy) } it { is_expected.to have_one(:slack_service).dependent(:destroy) } + it { is_expected.to have_one(:microsoft_teams_service).dependent(:destroy) } it { is_expected.to have_one(:mattermost_service).dependent(:destroy) } it { is_expected.to have_one(:pushover_service).dependent(:destroy) } it { is_expected.to have_one(:asana_service).dependent(:destroy) } @@ -57,6 +58,7 @@ describe Project, models: true do it { is_expected.to have_many(:builds) } it { is_expected.to have_many(:runner_projects) } it { is_expected.to have_many(:runners) } + it { is_expected.to have_many(:active_runners) } it { is_expected.to have_many(:variables) } it { is_expected.to have_many(:triggers) } it { is_expected.to have_many(:pages_domains) } @@ -71,6 +73,7 @@ describe Project, models: true do it { is_expected.to have_many(:notification_settings).dependent(:destroy) } it { is_expected.to have_many(:forks).through(:forked_project_links) } it { is_expected.to have_many(:uploads).dependent(:destroy) } + it { is_expected.to have_many(:pipeline_schedules).dependent(:destroy) } context 'after initialized' do it "has a project_feature" do @@ -251,6 +254,34 @@ describe Project, models: true do expect(new_project.errors.full_messages.first).to eq('The project is still being deleted. Please try again later.') end end + + describe 'path validation' do + it 'allows paths reserved on the root namespace' do + project = build(:project, path: 'api') + + expect(project).to be_valid + end + + it 'rejects paths reserved on another level' do + project = build(:project, path: 'tree') + + expect(project).not_to be_valid + end + + it 'rejects nested paths' do + parent = create(:group, :nested, path: 'environments') + project = build(:project, path: 'folders', namespace: parent) + + expect(project).not_to be_valid + end + + it 'allows a reserved group name' do + parent = create(:group) + project = build(:project, path: 'avatar', namespace: parent) + + expect(project).to be_valid + end + end end describe 'default_scope' do @@ -702,25 +733,6 @@ describe Project, models: true do end end - describe '#open_branches' do - let(:project) { create(:project, :repository) } - - before do - project.protected_branches.create(name: 'master') - end - - it { expect(project.open_branches.map(&:name)).to include('feature') } - it { expect(project.open_branches.map(&:name)).not_to include('master') } - - it "includes branches matching a protected branch wildcard" do - expect(project.open_branches.map(&:name)).to include('feature') - - create(:protected_branch, name: 'feat*', project: project) - - expect(Project.find(project.id).open_branches.map(&:name)).to include('feature') - end - end - describe '#star_count' do it 'counts stars from multiple users' do user1 = create :user @@ -798,17 +810,19 @@ describe Project, models: true do let(:project) { create(:empty_project) } - context 'When avatar file is uploaded' do - before do - project.update_columns(avatar: 'uploads/avatar.png') - allow(project.avatar).to receive(:present?) { true } - end + context 'when avatar file is uploaded' do + let(:project) { create(:empty_project, :with_avatar) } + let(:avatar_path) { "/uploads/project/avatar/#{project.id}/dk.png" } + let(:gitlab_host) { "http://#{Gitlab.config.gitlab.host}" } - let(:avatar_path) do - "/uploads/project/avatar/#{project.id}/uploads/avatar.png" - end + it 'shows correct url' do + expect(project.avatar_url).to eq(avatar_path) + expect(project.avatar_url(only_path: false)).to eq([gitlab_host, avatar_path].join) - it { should eq "http://#{Gitlab.config.gitlab.host}#{avatar_path}" } + allow(ActionController::Base).to receive(:asset_host).and_return(gitlab_host) + + expect(project.avatar_url).to eq([gitlab_host, avatar_path].join) + end end context 'When avatar file in git' do @@ -816,9 +830,7 @@ describe Project, models: true do allow(project).to receive(:avatar_in_git) { true } end - let(:avatar_path) do - "/#{project.full_path}/avatar" - end + let(:avatar_path) { "/#{project.full_path}/avatar" } it { should eq "http://#{Gitlab.config.gitlab.host}#{avatar_path}" } end @@ -961,7 +973,7 @@ describe Project, models: true do before do storages = { 'default' => { 'path' => 'tmp/tests/repositories' }, - 'picked' => { 'path' => 'tmp/tests/repositories' }, + 'picked' => { 'path' => 'tmp/tests/repositories' } } allow(Gitlab.config.repositories).to receive(:storages).and_return(storages) end @@ -1157,11 +1169,12 @@ describe Project, models: true do # Project#gitlab_shell returns a new instance of Gitlab::Shell on every # call. This makes testing a bit easier. allow(project).to receive(:gitlab_shell).and_return(gitlab_shell) - allow(project).to receive(:previous_changes).and_return('path' => ['foo']) end it 'renames a repository' do + stub_container_registry_config(enabled: false) + expect(gitlab_shell).to receive(:mv_repository). ordered. with(project.repository_storage_path, "#{project.namespace.full_path}/foo", "#{project.full_path}"). @@ -1185,10 +1198,13 @@ describe Project, models: true do project.rename_repo end - context 'container registry with tags' do + context 'container registry with images' do + let(:container_repository) { create(:container_repository) } + before do stub_container_registry_config(enabled: true) - stub_container_registry_tags('tag') + stub_container_registry_tags(repository: :any, tags: ['tag']) + project.container_repositories << container_repository end subject { project.rename_repo } @@ -1291,62 +1307,6 @@ describe Project, models: true do end end - describe '#protected_branch?' do - context 'existing project' do - let(:project) { create(:project, :repository) } - - it 'returns true when the branch matches a protected branch via direct match' do - create(:protected_branch, project: project, name: "foo") - - expect(project.protected_branch?('foo')).to eq(true) - end - - it 'returns true when the branch matches a protected branch via wildcard match' do - create(:protected_branch, project: project, name: "production/*") - - expect(project.protected_branch?('production/some-branch')).to eq(true) - end - - it 'returns false when the branch does not match a protected branch via direct match' do - expect(project.protected_branch?('foo')).to eq(false) - end - - it 'returns false when the branch does not match a protected branch via wildcard match' do - create(:protected_branch, project: project, name: "production/*") - - expect(project.protected_branch?('staging/some-branch')).to eq(false) - end - end - - context "new project" do - let(:project) { create(:empty_project) } - - it 'returns false when default_protected_branch is unprotected' do - stub_application_setting(default_branch_protection: Gitlab::Access::PROTECTION_NONE) - - expect(project.protected_branch?('master')).to be false - end - - it 'returns false when default_protected_branch lets developers push' do - stub_application_setting(default_branch_protection: Gitlab::Access::PROTECTION_DEV_CAN_PUSH) - - expect(project.protected_branch?('master')).to be false - end - - it 'returns true when default_branch_protection does not let developers push but let developer merge branches' do - stub_application_setting(default_branch_protection: Gitlab::Access::PROTECTION_DEV_CAN_MERGE) - - expect(project.protected_branch?('master')).to be true - end - - it 'returns true when default_branch_protection is in full protection' do - stub_application_setting(default_branch_protection: Gitlab::Access::PROTECTION_FULL) - - expect(project.protected_branch?('master')).to be true - end - end - end - describe '#user_can_push_to_empty_repo?' do let(:project) { create(:empty_project) } let(:user) { create(:user) } @@ -1386,38 +1346,17 @@ describe Project, models: true do end end - describe '#container_registry_path_with_namespace' do - let(:project) { create(:empty_project, path: 'PROJECT') } - - subject { project.container_registry_path_with_namespace } - - it { is_expected.not_to eq(project.path_with_namespace) } - it { is_expected.to eq(project.path_with_namespace.downcase) } - end - - describe '#container_registry_repository' do + describe '#container_registry_url' do let(:project) { create(:empty_project) } - before { stub_container_registry_config(enabled: true) } - - subject { project.container_registry_repository } - - it { is_expected.not_to be_nil } - end - - describe '#container_registry_repository_url' do - let(:project) { create(:empty_project) } - - subject { project.container_registry_repository_url } + subject { project.container_registry_url } before { stub_container_registry_config(**registry_settings) } context 'for enabled registry' do let(:registry_settings) do - { - enabled: true, - host_port: 'example.com', - } + { enabled: true, + host_port: 'example.com' } end it { is_expected.not_to be_nil } @@ -1425,9 +1364,7 @@ describe Project, models: true do context 'for disabled registry' do let(:registry_settings) do - { - enabled: false - } + { enabled: false } end it { is_expected.to be_nil } @@ -1437,28 +1374,60 @@ describe Project, models: true do describe '#has_container_registry_tags?' do let(:project) { create(:empty_project) } - subject { project.has_container_registry_tags? } - - context 'for enabled registry' do + context 'when container registry is enabled' do before { stub_container_registry_config(enabled: true) } - context 'with tags' do - before { stub_container_registry_tags('test', 'test2') } + context 'when tags are present for multi-level registries' do + before do + create(:container_repository, project: project, name: 'image') + + stub_container_registry_tags(repository: /image/, + tags: %w[latest rc1]) + end - it { is_expected.to be_truthy } + it 'should have image tags' do + expect(project).to have_container_registry_tags + end end - context 'when no tags' do - before { stub_container_registry_tags } + context 'when tags are present for root repository' do + before do + stub_container_registry_tags(repository: project.full_path, + tags: %w[latest rc1 pre1]) + end - it { is_expected.to be_falsey } + it 'should have image tags' do + expect(project).to have_container_registry_tags + end + end + + context 'when there are no tags at all' do + before do + stub_container_registry_tags(repository: :any, tags: []) + end + + it 'should not have image tags' do + expect(project).not_to have_container_registry_tags + end end end - context 'for disabled registry' do + context 'when container registry is disabled' do before { stub_container_registry_config(enabled: false) } - it { is_expected.to be_falsey } + it 'should not have image tags' do + expect(project).not_to have_container_registry_tags + end + + it 'should not check root repository tags' do + expect(project).not_to receive(:full_path) + expect(project).not_to have_container_registry_tags + end + + it 'should iterate through container repositories' do + expect(project).to receive(:container_repositories) + expect(project).not_to have_container_registry_tags + end end end @@ -1934,11 +1903,38 @@ describe Project, models: true do describe '#pipeline_status' do let(:project) { create(:project) } it 'builds a pipeline status' do - expect(project.pipeline_status).to be_a(Ci::PipelineStatus) + expect(project.pipeline_status).to be_a(Gitlab::Cache::Ci::ProjectPipelineStatus) end it 'hase a loaded pipeline status' do expect(project.pipeline_status).to be_loaded end end + + describe '#append_or_update_attribute' do + let(:project) { create(:project) } + + it 'shows full error updating an invalid MR' do + error_message = 'Failed to replace merge_requests because one or more of the new records could not be saved.'\ + ' Validate fork Source project is not a fork of the target project' + + expect { project.append_or_update_attribute(:merge_requests, [create(:merge_request)]) }. + to raise_error(ActiveRecord::RecordNotSaved, error_message) + end + + it 'updates the project succesfully' do + merge_request = create(:merge_request, target_project: project, source_project: project) + + expect { project.append_or_update_attribute(:merge_requests, [merge_request]) }. + not_to raise_error + end + end + + describe '#last_repository_updated_at' do + it 'sets to created_at upon creation' do + project = create(:empty_project, created_at: 2.hours.ago) + + expect(project.last_repository_updated_at.to_i).to eq(project.created_at.to_i) + end + end end diff --git a/spec/models/project_statistics_spec.rb b/spec/models/project_statistics_spec.rb index ff29f6f66ba..c5ffbda9821 100644 --- a/spec/models/project_statistics_spec.rb +++ b/spec/models/project_statistics_spec.rb @@ -35,7 +35,7 @@ describe ProjectStatistics, models: true do commit_count: 8.exabytes - 1, repository_size: 2.exabytes, lfs_objects_size: 2.exabytes, - build_artifacts_size: 4.exabytes - 1, + build_artifacts_size: 4.exabytes - 1 ) statistics.reload @@ -149,7 +149,7 @@ describe ProjectStatistics, models: true do it "sums all storage counters" do statistics.update!( repository_size: 2, - lfs_objects_size: 3, + lfs_objects_size: 3 ) statistics.reload diff --git a/spec/models/project_wiki_spec.rb b/spec/models/project_wiki_spec.rb index b5b9cd024b0..969e9f7a130 100644 --- a/spec/models/project_wiki_spec.rb +++ b/spec/models/project_wiki_spec.rb @@ -213,9 +213,12 @@ describe ProjectWiki, models: true do end it 'updates project activity' do - expect(subject).to receive(:update_project_activity) - subject.create_page('Test Page', 'This is content') + + project.reload + + expect(project.last_activity_at).to be_within(1.minute).of(Time.now) + expect(project.last_repository_updated_at).to be_within(1.minute).of(Time.now) end end @@ -240,9 +243,12 @@ describe ProjectWiki, models: true do end it 'updates project activity' do - expect(subject).to receive(:update_project_activity) - subject.update_page(@gollum_page, 'Yet more content', :markdown, 'Updated page again') + + project.reload + + expect(project.last_activity_at).to be_within(1.minute).of(Time.now) + expect(project.last_repository_updated_at).to be_within(1.minute).of(Time.now) end end @@ -258,9 +264,12 @@ describe ProjectWiki, models: true do end it 'updates project activity' do - expect(subject).to receive(:update_project_activity) - subject.delete_page(@page) + + project.reload + + expect(project.last_activity_at).to be_within(1.minute).of(Time.now) + expect(project.last_repository_updated_at).to be_within(1.minute).of(Time.now) end end diff --git a/spec/models/protectable_dropdown_spec.rb b/spec/models/protectable_dropdown_spec.rb new file mode 100644 index 00000000000..4c9bade592b --- /dev/null +++ b/spec/models/protectable_dropdown_spec.rb @@ -0,0 +1,25 @@ +require 'spec_helper' + +describe ProtectableDropdown, models: true do + let(:project) { create(:project, :repository) } + let(:subject) { described_class.new(project, :branches) } + + describe '#protectable_ref_names' do + before do + project.protected_branches.create(name: 'master') + end + + it { expect(subject.protectable_ref_names).to include('feature') } + it { expect(subject.protectable_ref_names).not_to include('master') } + + it "includes branches matching a protected branch wildcard" do + expect(subject.protectable_ref_names).to include('feature') + + create(:protected_branch, name: 'feat*', project: project) + + subject = described_class.new(project.reload, :branches) + + expect(subject.protectable_ref_names).to include('feature') + end + end +end diff --git a/spec/models/protected_branch/merge_access_level_spec.rb b/spec/models/protected_branch/merge_access_level_spec.rb new file mode 100644 index 00000000000..1e7242e9fa8 --- /dev/null +++ b/spec/models/protected_branch/merge_access_level_spec.rb @@ -0,0 +1,5 @@ +require 'spec_helper' + +describe ProtectedBranch::MergeAccessLevel, :models do + it { is_expected.to validate_inclusion_of(:access_level).in_array([Gitlab::Access::MASTER, Gitlab::Access::DEVELOPER, Gitlab::Access::NO_ACCESS]) } +end diff --git a/spec/models/protected_branch/push_access_level_spec.rb b/spec/models/protected_branch/push_access_level_spec.rb new file mode 100644 index 00000000000..de68351198c --- /dev/null +++ b/spec/models/protected_branch/push_access_level_spec.rb @@ -0,0 +1,5 @@ +require 'spec_helper' + +describe ProtectedBranch::PushAccessLevel, :models do + it { is_expected.to validate_inclusion_of(:access_level).in_array([Gitlab::Access::MASTER, Gitlab::Access::DEVELOPER, Gitlab::Access::NO_ACCESS]) } +end diff --git a/spec/models/protected_branch_spec.rb b/spec/models/protected_branch_spec.rb index 8bf0d24a128..ca347cf92c9 100644 --- a/spec/models/protected_branch_spec.rb +++ b/spec/models/protected_branch_spec.rb @@ -7,9 +7,6 @@ describe ProtectedBranch, models: true do it { is_expected.to belong_to(:project) } end - describe "Mass assignment" do - end - describe 'Validation' do it { is_expected.to validate_presence_of(:project) } it { is_expected.to validate_presence_of(:name) } @@ -113,8 +110,8 @@ describe ProtectedBranch, models: true do staging = build(:protected_branch, name: "staging") expect(ProtectedBranch.matching("production")).to be_empty - expect(ProtectedBranch.matching("production", protected_branches: [production, staging])).to include(production) - expect(ProtectedBranch.matching("production", protected_branches: [production, staging])).not_to include(staging) + expect(ProtectedBranch.matching("production", protected_refs: [production, staging])).to include(production) + expect(ProtectedBranch.matching("production", protected_refs: [production, staging])).not_to include(staging) end end @@ -132,8 +129,64 @@ describe ProtectedBranch, models: true do staging = build(:protected_branch, name: "staging/*") expect(ProtectedBranch.matching("production/some-branch")).to be_empty - expect(ProtectedBranch.matching("production/some-branch", protected_branches: [production, staging])).to include(production) - expect(ProtectedBranch.matching("production/some-branch", protected_branches: [production, staging])).not_to include(staging) + expect(ProtectedBranch.matching("production/some-branch", protected_refs: [production, staging])).to include(production) + expect(ProtectedBranch.matching("production/some-branch", protected_refs: [production, staging])).not_to include(staging) + end + end + end + + describe '#protected?' do + context 'existing project' do + let(:project) { create(:project, :repository) } + + it 'returns true when the branch matches a protected branch via direct match' do + create(:protected_branch, project: project, name: "foo") + + expect(ProtectedBranch.protected?(project, 'foo')).to eq(true) + end + + it 'returns true when the branch matches a protected branch via wildcard match' do + create(:protected_branch, project: project, name: "production/*") + + expect(ProtectedBranch.protected?(project, 'production/some-branch')).to eq(true) + end + + it 'returns false when the branch does not match a protected branch via direct match' do + expect(ProtectedBranch.protected?(project, 'foo')).to eq(false) + end + + it 'returns false when the branch does not match a protected branch via wildcard match' do + create(:protected_branch, project: project, name: "production/*") + + expect(ProtectedBranch.protected?(project, 'staging/some-branch')).to eq(false) + end + end + + context "new project" do + let(:project) { create(:empty_project) } + + it 'returns false when default_protected_branch is unprotected' do + stub_application_setting(default_branch_protection: Gitlab::Access::PROTECTION_NONE) + + expect(ProtectedBranch.protected?(project, 'master')).to be false + end + + it 'returns false when default_protected_branch lets developers push' do + stub_application_setting(default_branch_protection: Gitlab::Access::PROTECTION_DEV_CAN_PUSH) + + expect(ProtectedBranch.protected?(project, 'master')).to be false + end + + it 'returns true when default_branch_protection does not let developers push but let developer merge branches' do + stub_application_setting(default_branch_protection: Gitlab::Access::PROTECTION_DEV_CAN_MERGE) + + expect(ProtectedBranch.protected?(project, 'master')).to be true + end + + it 'returns true when default_branch_protection is in full protection' do + stub_application_setting(default_branch_protection: Gitlab::Access::PROTECTION_FULL) + + expect(ProtectedBranch.protected?(project, 'master')).to be true end end end diff --git a/spec/models/protected_tag_spec.rb b/spec/models/protected_tag_spec.rb new file mode 100644 index 00000000000..51353852a93 --- /dev/null +++ b/spec/models/protected_tag_spec.rb @@ -0,0 +1,12 @@ +require 'spec_helper' + +describe ProtectedTag, models: true do + describe 'Associations' do + it { is_expected.to belong_to(:project) } + end + + describe 'Validation' do + it { is_expected.to validate_presence_of(:project) } + it { is_expected.to validate_presence_of(:name) } + end +end diff --git a/spec/models/redirect_route_spec.rb b/spec/models/redirect_route_spec.rb new file mode 100644 index 00000000000..71827421dd7 --- /dev/null +++ b/spec/models/redirect_route_spec.rb @@ -0,0 +1,27 @@ +require 'rails_helper' + +describe RedirectRoute, models: true do + let(:group) { create(:group) } + let!(:redirect_route) { group.redirect_routes.create(path: 'gitlabb') } + + describe 'relationships' do + it { is_expected.to belong_to(:source) } + end + + describe 'validations' do + it { is_expected.to validate_presence_of(:source) } + it { is_expected.to validate_presence_of(:path) } + it { is_expected.to validate_uniqueness_of(:path) } + end + + describe '.matching_path_and_descendants' do + let!(:redirect2) { group.redirect_routes.create(path: 'gitlabb/test') } + let!(:redirect3) { group.redirect_routes.create(path: 'gitlabb/test/foo') } + let!(:redirect4) { group.redirect_routes.create(path: 'gitlabb/test/foo/bar') } + let!(:redirect5) { group.redirect_routes.create(path: 'gitlabb/test/baz') } + + it 'returns correct routes' do + expect(RedirectRoute.matching_path_and_descendants('gitlabb/test')).to match_array([redirect2, redirect3, redirect4, redirect5]) + end + end +end diff --git a/spec/models/repository_spec.rb b/spec/models/repository_spec.rb index 585b87b828d..718b7d5e86b 100644 --- a/spec/models/repository_spec.rb +++ b/spec/models/repository_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' describe Repository, models: true do include RepoHelpers - TestBlob = Struct.new(:name) + TestBlob = Struct.new(:path) let(:project) { create(:project, :repository) } let(:repository) { project.repository } @@ -24,21 +24,8 @@ describe Repository, models: true do repository.commit(merge_commit_id) end - let(:author_email) { FFaker::Internet.email } - - # I have to remove periods from the end of the name - # This happened when the user's name had a suffix (i.e. "Sr.") - # This seems to be what git does under the hood. For example, this commit: - # - # $ git commit --author='Foo Sr. <foo@example.com>' -m 'Where's my trailing period?' - # - # results in this: - # - # $ git show --pretty - # ... - # Author: Foo Sr <foo@example.com> - # ... - let(:author_name) { FFaker::Name.name.chomp("\.") } + let(:author_email) { 'user@example.org' } + let(:author_name) { 'John Doe' } describe '#branch_names_contains' do subject { repository.branch_names_contains(sample_commit.id) } @@ -123,22 +110,11 @@ describe Repository, models: true do end describe '#ref_name_for_sha' do - context 'ref found' do - it 'returns the ref' do - allow_any_instance_of(Gitlab::Popen).to receive(:popen). - and_return(["b8d95eb4969eefacb0a58f6a28f6803f8070e7b9 commit\trefs/environments/production/77\n", 0]) - - expect(repository.ref_name_for_sha('bla', '0' * 40)).to eq 'refs/environments/production/77' - end - end - - context 'ref not found' do - it 'returns nil' do - allow_any_instance_of(Gitlab::Popen).to receive(:popen). - and_return(["", 0]) + it 'returns the ref' do + allow(repository.raw_repository).to receive(:ref_name_for_sha). + and_return('refs/environments/production/77') - expect(repository.ref_name_for_sha('bla', '0' * 40)).to eq nil - end + expect(repository.ref_name_for_sha('bla', '0' * 40)).to eq 'refs/environments/production/77' end end @@ -184,6 +160,27 @@ describe Repository, models: true do end end + describe '#commits' do + it 'sets follow when path is a single path' do + expect(Gitlab::Git::Commit).to receive(:where).with(a_hash_including(follow: true)).and_call_original.twice + + repository.commits('master', path: 'README.md') + repository.commits('master', path: ['README.md']) + end + + it 'does not set follow when path is multiple paths' do + expect(Gitlab::Git::Commit).to receive(:where).with(a_hash_including(follow: false)).and_call_original + + repository.commits('master', path: ['README.md', 'CHANGELOG']) + end + + it 'does not set follow when there are no paths' do + expect(Gitlab::Git::Commit).to receive(:where).with(a_hash_including(follow: false)).and_call_original + + repository.commits('master') + end + end + describe '#find_commits_by_message' do it 'returns commits with messages containing a given string' do commit_ids = repository.find_commits_by_message('submodule').map(&:id) @@ -557,31 +554,31 @@ describe Repository, models: true do it 'accepts changelog' do expect(repository.tree).to receive(:blobs).and_return([TestBlob.new('changelog')]) - expect(repository.changelog.name).to eq('changelog') + expect(repository.changelog.path).to eq('changelog') end it 'accepts news instead of changelog' do expect(repository.tree).to receive(:blobs).and_return([TestBlob.new('news')]) - expect(repository.changelog.name).to eq('news') + expect(repository.changelog.path).to eq('news') end it 'accepts history instead of changelog' do expect(repository.tree).to receive(:blobs).and_return([TestBlob.new('history')]) - expect(repository.changelog.name).to eq('history') + expect(repository.changelog.path).to eq('history') end it 'accepts changes instead of changelog' do expect(repository.tree).to receive(:blobs).and_return([TestBlob.new('changes')]) - expect(repository.changelog.name).to eq('changes') + expect(repository.changelog.path).to eq('changes') end it 'is case-insensitive' do expect(repository.tree).to receive(:blobs).and_return([TestBlob.new('CHANGELOG')]) - expect(repository.changelog.name).to eq('CHANGELOG') + expect(repository.changelog.path).to eq('CHANGELOG') end end @@ -616,7 +613,7 @@ describe Repository, models: true do repository.create_file(user, 'LICENSE', 'Copyright!', message: 'Add LICENSE', branch_name: 'master') - expect(repository.license_blob.name).to eq('LICENSE') + expect(repository.license_blob.path).to eq('LICENSE') end %w[LICENSE LICENCE LiCensE LICENSE.md LICENSE.foo COPYING COPYING.md].each do |filename| @@ -646,7 +643,7 @@ describe Repository, models: true do expect(repository.license_key).to be_nil end - it 'detects license file with no recognizable open-source license content' do + it 'returns nil when the content is not recognizable' do repository.create_file(user, 'LICENSE', 'Copyright!', message: 'Add LICENSE', branch_name: 'master') @@ -662,12 +659,45 @@ describe Repository, models: true do end end + describe '#license' do + before do + repository.delete_file(user, 'LICENSE', + message: 'Remove LICENSE', branch_name: 'master') + end + + it 'returns nil when no license is detected' do + expect(repository.license).to be_nil + end + + it 'returns nil when the repository does not exist' do + expect(repository).to receive(:exists?).and_return(false) + + expect(repository.license).to be_nil + end + + it 'returns nil when the content is not recognizable' do + repository.create_file(user, 'LICENSE', 'Copyright!', + message: 'Add LICENSE', branch_name: 'master') + + expect(repository.license).to be_nil + end + + it 'returns the license' do + license = Licensee::License.new('mit') + repository.create_file(user, 'LICENSE', + license.content, + message: 'Add LICENSE', branch_name: 'master') + + expect(repository.license).to eq(license) + end + end + describe "#gitlab_ci_yml", caching: true do it 'returns valid file' do files = [TestBlob.new('file'), TestBlob.new('.gitlab-ci.yml'), TestBlob.new('copying')] expect(repository.tree).to receive(:blobs).and_return(files) - expect(repository.gitlab_ci_yml.name).to eq('.gitlab-ci.yml') + expect(repository.gitlab_ci_yml.path).to eq('.gitlab-ci.yml') end it 'returns nil if not exists' do @@ -1090,21 +1120,33 @@ describe Repository, models: true do end describe '#merge' do - it 'merges the code and return the commit id' do + let(:merge_request) { create(:merge_request, source_branch: 'feature', target_branch: 'master', source_project: project) } + + let(:commit_options) do + author = repository.user_to_committer(user) + { message: 'Test \r\n\r\n message', committer: author, author: author } + end + + it 'merges the code and returns the commit id' do expect(merge_commit).to be_present expect(repository.blob_at(merge_commit.id, 'files/ruby/feature.rb')).to be_present end it 'sets the `in_progress_merge_commit_sha` flag for the given merge request' do - merge_request = create(:merge_request, source_branch: 'feature', target_branch: 'master', source_project: project) - - merge_commit_id = repository.merge(user, - merge_request.diff_head_sha, - merge_request, - commit_options) + merge_commit_id = merge(repository, user, merge_request, commit_options) expect(merge_request.in_progress_merge_commit_sha).to eq(merge_commit_id) end + + it 'removes carriage returns from commit message' do + merge_commit_id = merge(repository, user, merge_request, commit_options) + + expect(repository.commit(merge_commit_id).message).to eq(commit_options[:message].delete("\r")) + end + + def merge(repository, user, merge_request, options = {}) + repository.merge(user, merge_request.diff_head_sha, merge_request, options) + end end describe '#revert' do @@ -1272,7 +1314,6 @@ describe Repository, models: true do :changelog, :license, :contributing, - :version, :gitignore, :koding, :gitlab_ci, @@ -1293,19 +1334,9 @@ describe Repository, models: true do end end - describe '#before_import' do - it 'flushes the repository caches' do - expect(repository).to receive(:expire_content_cache) - - repository.before_import - end - end - describe '#after_import' do it 'flushes and builds the cache' do expect(repository).to receive(:expire_content_cache) - expect(repository).to receive(:expire_tags_cache) - expect(repository).to receive(:expire_branches_cache) repository.after_import end @@ -1382,12 +1413,22 @@ describe Repository, models: true do describe '#branch_count' do it 'returns the number of branches' do expect(repository.branch_count).to be_an(Integer) + + # NOTE: Until rugged goes away, make sure rugged and gitaly are in sync + rugged_count = repository.raw_repository.rugged.branches.count + + expect(repository.branch_count).to eq(rugged_count) end end describe '#tag_count' do it 'returns the number of tags' do expect(repository.tag_count).to be_an(Integer) + + # NOTE: Until rugged goes away, make sure rugged and gitaly are in sync + rugged_count = repository.raw_repository.rugged.tags.count + + expect(repository.tag_count).to eq(rugged_count) end end @@ -1607,15 +1648,25 @@ describe Repository, models: true do describe '#readme', caching: true do context 'with a non-existing repository' do it 'returns nil' do - expect(repository).to receive(:tree).with(:head).and_return(nil) + allow(repository).to receive(:tree).with(:head).and_return(nil) expect(repository.readme).to be_nil end end context 'with an existing repository' do - it 'returns the README' do - expect(repository.readme).to be_an_instance_of(Gitlab::Git::Blob) + context 'when no README exists' do + it 'returns nil' do + allow_any_instance_of(Tree).to receive(:readme).and_return(nil) + + expect(repository.readme).to be_nil + end + end + + context 'when a README exists' do + it 'returns the README' do + expect(repository.readme).to be_an_instance_of(ReadmeBlob) + end end end end @@ -1806,11 +1857,12 @@ describe Repository, models: true do describe '#refresh_method_caches' do it 'refreshes the caches of the given types' do expect(repository).to receive(:expire_method_caches). - with(%i(readme license_blob license_key)) + with(%i(rendered_readme license_blob license_key license)) - expect(repository).to receive(:readme) + expect(repository).to receive(:rendered_readme) expect(repository).to receive(:license_blob) expect(repository).to receive(:license_key) + expect(repository).to receive(:license) repository.refresh_method_caches(%i(readme license)) end @@ -1851,4 +1903,22 @@ describe Repository, models: true do end end end + + describe '#is_ancestor?' do + context 'Gitaly is_ancestor feature enabled' do + let(:commit) { repository.commit } + let(:ancestor) { commit.parents.first } + + before do + allow(Gitlab::GitalyClient).to receive(:enabled?).and_return(true) + allow(Gitlab::GitalyClient).to receive(:feature_enabled?).with(:is_ancestor).and_return(true) + end + + it "asks Gitaly server if it's an ancestor" do + expect_any_instance_of(Gitlab::GitalyClient::Commit).to receive(:is_ancestor).with(ancestor.id, commit.id) + + repository.is_ancestor?(ancestor.id, commit.id) + end + end + end end diff --git a/spec/models/route_spec.rb b/spec/models/route_spec.rb index 171a51fcc5b..c1fe1b06c52 100644 --- a/spec/models/route_spec.rb +++ b/spec/models/route_spec.rb @@ -1,19 +1,43 @@ require 'spec_helper' describe Route, models: true do - let!(:group) { create(:group, path: 'git_lab', name: 'git_lab') } - let!(:route) { group.route } + let(:group) { create(:group, path: 'git_lab', name: 'git_lab') } + let(:route) { group.route } describe 'relationships' do it { is_expected.to belong_to(:source) } end describe 'validations' do + before { route } it { is_expected.to validate_presence_of(:source) } it { is_expected.to validate_presence_of(:path) } it { is_expected.to validate_uniqueness_of(:path) } end + describe 'callbacks' do + context 'after update' do + it 'calls #create_redirect_for_old_path' do + expect(route).to receive(:create_redirect_for_old_path) + route.update_attributes(path: 'foo') + end + + it 'calls #delete_conflicting_redirects' do + expect(route).to receive(:delete_conflicting_redirects) + route.update_attributes(path: 'foo') + end + end + + context 'after create' do + it 'calls #delete_conflicting_redirects' do + route.destroy + new_route = Route.new(source: group, path: group.path) + expect(new_route).to receive(:delete_conflicting_redirects) + new_route.save! + end + end + end + describe '.inside_path' do let!(:nested_group) { create(:group, path: 'test', name: 'test', parent: group) } let!(:deep_nested_group) { create(:group, path: 'foo', name: 'foo', parent: nested_group) } @@ -37,7 +61,7 @@ describe Route, models: true do context 'when route name is set' do before { route.update_attributes(path: 'bar') } - it "updates children routes with new path" do + it 'updates children routes with new path' do expect(described_class.exists?(path: 'bar')).to be_truthy expect(described_class.exists?(path: 'bar/test')).to be_truthy expect(described_class.exists?(path: 'bar/test/foo')).to be_truthy @@ -56,10 +80,24 @@ describe Route, models: true do expect(route.update_attributes(path: 'bar')).to be_truthy end end + + context 'when conflicting redirects exist' do + let!(:conflicting_redirect1) { route.create_redirect('bar/test') } + let!(:conflicting_redirect2) { route.create_redirect('bar/test/foo') } + let!(:conflicting_redirect3) { route.create_redirect('gitlab-org') } + + it 'deletes the conflicting redirects' do + route.update_attributes(path: 'bar') + + expect(RedirectRoute.exists?(path: 'bar/test')).to be_falsey + expect(RedirectRoute.exists?(path: 'bar/test/foo')).to be_falsey + expect(RedirectRoute.exists?(path: 'gitlab-org')).to be_truthy + end + end end context 'name update' do - it "updates children routes with new path" do + it 'updates children routes with new path' do route.update_attributes(name: 'bar') expect(described_class.exists?(name: 'bar')).to be_truthy @@ -77,4 +115,72 @@ describe Route, models: true do end end end + + describe '#create_redirect_for_old_path' do + context 'if the path changed' do + it 'creates a RedirectRoute for the old path' do + redirect_scope = route.source.redirect_routes.where(path: 'git_lab') + expect(redirect_scope.exists?).to be_falsey + route.path = 'new-path' + route.save! + expect(redirect_scope.exists?).to be_truthy + end + end + end + + describe '#create_redirect' do + it 'creates a RedirectRoute with the same source' do + redirect_route = route.create_redirect('foo') + expect(redirect_route).to be_a(RedirectRoute) + expect(redirect_route).to be_persisted + expect(redirect_route.source).to eq(route.source) + expect(redirect_route.path).to eq('foo') + end + end + + describe '#delete_conflicting_redirects' do + context 'when a redirect route with the same path exists' do + let!(:redirect1) { route.create_redirect(route.path) } + + it 'deletes the redirect' do + route.delete_conflicting_redirects + expect(route.conflicting_redirects).to be_empty + end + + context 'when redirect routes with paths descending from the route path exists' do + let!(:redirect2) { route.create_redirect("#{route.path}/foo") } + let!(:redirect3) { route.create_redirect("#{route.path}/foo/bar") } + let!(:redirect4) { route.create_redirect("#{route.path}/baz/quz") } + let!(:other_redirect) { route.create_redirect("other") } + + it 'deletes all redirects with paths that descend from the route path' do + route.delete_conflicting_redirects + expect(route.conflicting_redirects).to be_empty + end + end + end + end + + describe '#conflicting_redirects' do + context 'when a redirect route with the same path exists' do + let!(:redirect1) { route.create_redirect(route.path) } + + it 'returns the redirect route' do + expect(route.conflicting_redirects).to be_an(ActiveRecord::Relation) + expect(route.conflicting_redirects).to match_array([redirect1]) + end + + context 'when redirect routes with paths descending from the route path exists' do + let!(:redirect2) { route.create_redirect("#{route.path}/foo") } + let!(:redirect3) { route.create_redirect("#{route.path}/foo/bar") } + let!(:redirect4) { route.create_redirect("#{route.path}/baz/quz") } + let!(:other_redirect) { route.create_redirect("other") } + + it 'returns the redirect routes' do + expect(route.conflicting_redirects).to be_an(ActiveRecord::Relation) + expect(route.conflicting_redirects).to match_array([redirect1, redirect2, redirect3, redirect4]) + end + end + end + end end diff --git a/spec/models/sent_notification_spec.rb b/spec/models/sent_notification_spec.rb new file mode 100644 index 00000000000..5710edbc9e0 --- /dev/null +++ b/spec/models/sent_notification_spec.rb @@ -0,0 +1,174 @@ +require 'spec_helper' + +describe SentNotification, model: true do + describe 'validation' do + describe 'note validity' do + context "when the project doesn't match the noteable's project" do + subject { build(:sent_notification, noteable: create(:issue)) } + + it "is invalid" do + expect(subject).not_to be_valid + end + end + + context "when the project doesn't match the discussion project" do + let(:discussion_id) { create(:note).discussion_id } + subject { build(:sent_notification, in_reply_to_discussion_id: discussion_id) } + + it "is invalid" do + expect(subject).not_to be_valid + end + end + + context "when the noteable project and discussion project match" do + let(:project) { create(:project) } + let(:issue) { create(:issue, project: project) } + let(:discussion_id) { create(:note, project: project, noteable: issue).discussion_id } + subject { build(:sent_notification, project: project, noteable: issue, in_reply_to_discussion_id: discussion_id) } + + it "is valid" do + expect(subject).to be_valid + end + end + end + end + + describe '.record' do + let(:user) { create(:user) } + let(:issue) { create(:issue) } + + it 'creates a new SentNotification' do + expect { described_class.record(issue, user.id) }.to change { SentNotification.count }.by(1) + end + end + + describe '.record_note' do + let(:user) { create(:user) } + let(:note) { create(:diff_note_on_merge_request) } + + it 'creates a new SentNotification' do + expect { described_class.record_note(note, user.id) }.to change { SentNotification.count }.by(1) + end + end + + describe '#create_reply' do + context 'for issue' do + let(:issue) { create(:issue) } + subject { described_class.record(issue, issue.author.id) } + + it 'creates a comment on the issue' do + note = subject.create_reply('Test') + expect(note.in_reply_to?(issue)).to be_truthy + end + end + + context 'for issue comment' do + let(:note) { create(:note_on_issue) } + subject { described_class.record_note(note, note.author.id) } + + it 'creates a comment on the issue' do + new_note = subject.create_reply('Test') + expect(new_note.in_reply_to?(note)).to be_truthy + expect(new_note.discussion_id).not_to eq(note.discussion_id) + end + end + + context 'for issue discussion' do + let(:note) { create(:discussion_note_on_issue) } + subject { described_class.record_note(note, note.author.id) } + + it 'creates a reply on the discussion' do + new_note = subject.create_reply('Test') + expect(new_note.in_reply_to?(note)).to be_truthy + expect(new_note.discussion_id).to eq(note.discussion_id) + end + end + + context 'for merge request' do + let(:merge_request) { create(:merge_request) } + subject { described_class.record(merge_request, merge_request.author.id) } + + it 'creates a comment on the merge_request' do + note = subject.create_reply('Test') + expect(note.in_reply_to?(merge_request)).to be_truthy + end + end + + context 'for merge request comment' do + let(:note) { create(:note_on_merge_request) } + subject { described_class.record_note(note, note.author.id) } + + it 'creates a comment on the merge request' do + new_note = subject.create_reply('Test') + expect(new_note.in_reply_to?(note)).to be_truthy + expect(new_note.discussion_id).not_to eq(note.discussion_id) + end + end + + context 'for merge request diff discussion' do + let(:note) { create(:diff_note_on_merge_request) } + subject { described_class.record_note(note, note.author.id) } + + it 'creates a reply on the discussion' do + new_note = subject.create_reply('Test') + expect(new_note.in_reply_to?(note)).to be_truthy + expect(new_note.discussion_id).to eq(note.discussion_id) + end + end + + context 'for merge request non-diff discussion' do + let(:note) { create(:discussion_note_on_merge_request) } + subject { described_class.record_note(note, note.author.id) } + + it 'creates a reply on the discussion' do + new_note = subject.create_reply('Test') + expect(new_note.in_reply_to?(note)).to be_truthy + expect(new_note.discussion_id).to eq(note.discussion_id) + end + end + + context 'for commit' do + let(:project) { create(:project) } + let(:commit) { project.commit } + subject { described_class.record(commit, project.creator.id) } + + it 'creates a comment on the commit' do + note = subject.create_reply('Test') + expect(note.in_reply_to?(commit)).to be_truthy + end + end + + context 'for commit comment' do + let(:note) { create(:note_on_commit) } + subject { described_class.record_note(note, note.author.id) } + + it 'creates a comment on the commit' do + new_note = subject.create_reply('Test') + expect(new_note.in_reply_to?(note)).to be_truthy + expect(new_note.discussion_id).not_to eq(note.discussion_id) + end + end + + context 'for commit diff discussion' do + let(:note) { create(:diff_note_on_commit) } + subject { described_class.record_note(note, note.author.id) } + + it 'creates a reply on the discussion' do + new_note = subject.create_reply('Test') + expect(new_note.in_reply_to?(note)).to be_truthy + expect(new_note.discussion_id).to eq(note.discussion_id) + end + end + + context 'for commit non-diff discussion' do + let(:note) { create(:discussion_note_on_commit) } + subject { described_class.record_note(note, note.author.id) } + + it 'creates a reply on the discussion' do + new_note = subject.create_reply('Test') + expect(new_note.in_reply_to?(note)).to be_truthy + expect(new_note.discussion_id).to eq(note.discussion_id) + end + end + end +end diff --git a/spec/models/service_spec.rb b/spec/models/service_spec.rb index 0e2f07e945f..134882648b9 100644 --- a/spec/models/service_spec.rb +++ b/spec/models/service_spec.rb @@ -6,44 +6,53 @@ describe Service, models: true do it { is_expected.to have_one :service_hook } end + describe 'Validations' do + it { is_expected.to validate_presence_of(:type) } + end + describe "Test Button" do - before do - @service = Service.new - end + describe '#can_test?' do + let(:service) { create(:service, project: project) } - describe "Testable" do - let(:project) { create(:project, :repository) } + context 'when repository is not empty' do + let(:project) { create(:project, :repository) } - before do - allow(@service).to receive(:project).and_return(project) - @testable = @service.can_test? + it 'returns true' do + expect(service.can_test?).to be true + end end - describe '#can_test?' do - it { expect(@testable).to eq(true) } + context 'when repository is empty' do + let(:project) { create(:empty_project) } + + it 'returns true' do + expect(service.can_test?).to be true + end end + end + + describe '#test' do + let(:data) { 'test' } + let(:service) { create(:service, project: project) } - describe '#test' do - let(:data) { 'test' } + context 'when repository is not empty' do + let(:project) { create(:project, :repository) } it 'test runs execute' do - expect(@service).to receive(:execute).with(data) + expect(service).to receive(:execute).with(data) - @service.test(data) + service.test(data) end end - end - describe "With commits" do - let(:project) { create(:project, :repository) } + context 'when repository is empty' do + let(:project) { create(:empty_project) } - before do - allow(@service).to receive(:project).and_return(project) - @testable = @service.can_test? - end + it 'test runs execute' do + expect(service).to receive(:execute).with(data) - describe '#can_test?' do - it { expect(@testable).to eq(true) } + service.test(data) + end end end end diff --git a/spec/models/snippet_blob_spec.rb b/spec/models/snippet_blob_spec.rb new file mode 100644 index 00000000000..120b390586b --- /dev/null +++ b/spec/models/snippet_blob_spec.rb @@ -0,0 +1,47 @@ +require 'spec_helper' + +describe SnippetBlob, models: true do + let(:snippet) { create(:snippet) } + + subject { described_class.new(snippet) } + + describe '#id' do + it 'returns the snippet ID' do + expect(subject.id).to eq(snippet.id) + end + end + + describe '#name' do + it 'returns the snippet file name' do + expect(subject.name).to eq(snippet.file_name) + end + end + + describe '#size' do + it 'returns the data size' do + expect(subject.size).to eq(subject.data.bytesize) + end + end + + describe '#data' do + it 'returns the snippet content' do + expect(subject.data).to eq(snippet.content) + end + end + + describe '#rendered_markup' do + context 'when the content is GFM' do + let(:snippet) { create(:snippet, file_name: 'file.md') } + + it 'returns the rendered GFM' do + expect(subject.rendered_markup).to eq(snippet.content_html) + end + end + + context 'when the content is not GFM' do + it 'returns nil' do + expect(subject.rendered_markup).to be_nil + end + end + end +end diff --git a/spec/models/snippet_spec.rb b/spec/models/snippet_spec.rb index 8095d01b69e..1e5c96fe593 100644 --- a/spec/models/snippet_spec.rb +++ b/spec/models/snippet_spec.rb @@ -5,7 +5,6 @@ describe Snippet, models: true do subject { described_class } it { is_expected.to include_module(Gitlab::VisibilityLevel) } - it { is_expected.to include_module(Linguist::BlobHelper) } it { is_expected.to include_module(Participable) } it { is_expected.to include_module(Referable) } it { is_expected.to include_module(Sortable) } @@ -132,46 +131,6 @@ describe Snippet, models: true do end end - describe '.accessible_to' do - let(:author) { create(:author) } - let(:project) { create(:empty_project) } - - let!(:public_snippet) { create(:snippet, :public) } - let!(:internal_snippet) { create(:snippet, :internal) } - let!(:private_snippet) { create(:snippet, :private, author: author) } - - let!(:project_public_snippet) { create(:snippet, :public, project: project) } - let!(:project_internal_snippet) { create(:snippet, :internal, project: project) } - let!(:project_private_snippet) { create(:snippet, :private, project: project) } - - it 'returns only public snippets when user is blank' do - expect(described_class.accessible_to(nil)).to match_array [public_snippet, project_public_snippet] - end - - it 'returns only public, and internal snippets for regular users' do - user = create(:user) - - expect(described_class.accessible_to(user)).to match_array [public_snippet, internal_snippet, project_public_snippet, project_internal_snippet] - end - - it 'returns public, internal snippets and project private snippets for project members' do - member = create(:user) - project.team << [member, :developer] - - expect(described_class.accessible_to(member)).to match_array [public_snippet, internal_snippet, project_public_snippet, project_internal_snippet, project_private_snippet] - end - - it 'returns private snippets where the user is the author' do - expect(described_class.accessible_to(author)).to match_array [public_snippet, internal_snippet, private_snippet, project_public_snippet, project_internal_snippet] - end - - it 'returns all snippets when for admins' do - admin = create(:admin) - - expect(described_class.accessible_to(admin)).to match_array [public_snippet, internal_snippet, private_snippet, project_public_snippet, project_internal_snippet, project_private_snippet] - end - end - describe '#participants' do let(:project) { create(:empty_project, :public) } let(:snippet) { create(:snippet, content: 'foo', project: project) } @@ -241,4 +200,16 @@ describe Snippet, models: true do end end end + + describe '#blob' do + let(:snippet) { create(:snippet) } + + it 'returns a blob representing the snippet data' do + blob = snippet.blob + + expect(blob).to be_a(Blob) + expect(blob.path).to eq(snippet.file_name) + expect(blob.data).to eq(snippet.content) + end + end end diff --git a/spec/models/spam_log_spec.rb b/spec/models/spam_log_spec.rb index c4ec7625cb0..838fba6c92d 100644 --- a/spec/models/spam_log_spec.rb +++ b/spec/models/spam_log_spec.rb @@ -1,6 +1,8 @@ require 'spec_helper' describe SpamLog, models: true do + let(:admin) { create(:admin) } + describe 'associations' do it { is_expected.to belong_to(:user) } end @@ -13,13 +15,18 @@ describe SpamLog, models: true do it 'blocks the user' do spam_log = build(:spam_log) - expect { spam_log.remove_user }.to change { spam_log.user.blocked? }.to(true) + expect { spam_log.remove_user(deleted_by: admin) }.to change { spam_log.user.blocked? }.to(true) end it 'removes the user' do spam_log = build(:spam_log) + user = spam_log.user + + Sidekiq::Testing.inline! do + spam_log.remove_user(deleted_by: admin) + end - expect { spam_log.remove_user }.to change { User.count }.by(-1) + expect { User.find(user.id) }.to raise_error(ActiveRecord::RecordNotFound) end end end diff --git a/spec/models/todo_spec.rb b/spec/models/todo_spec.rb index 581305ad39f..3f80e1ac534 100644 --- a/spec/models/todo_spec.rb +++ b/spec/models/todo_spec.rb @@ -125,4 +125,50 @@ describe Todo, models: true do expect(subject.target_reference).to eq issue.to_reference(full: true) end end + + describe '#self_added?' do + let(:user_1) { build(:user) } + + before do + subject.user = user_1 + end + + it 'is true when the user is the author' do + subject.author = user_1 + + expect(subject).to be_self_added + end + + it 'is false when the user is not the author' do + subject.author = build(:user) + + expect(subject).not_to be_self_added + end + end + + describe '#self_assigned?' do + let(:user_1) { build(:user) } + + before do + subject.user = user_1 + subject.author = user_1 + subject.action = Todo::ASSIGNED + end + + it 'is true when todo is ASSIGNED and self_added' do + expect(subject).to be_self_assigned + end + + it 'is false when the todo is not ASSIGNED' do + subject.action = Todo::MENTIONED + + expect(subject).not_to be_self_assigned + end + + it 'is false when todo is not self_added' do + subject.author = build(:user) + + expect(subject).not_to be_self_assigned + end + end end diff --git a/spec/models/user_spec.rb b/spec/models/user_spec.rb index a9e37be1157..6a15830a15c 100644 --- a/spec/models/user_spec.rb +++ b/spec/models/user_spec.rb @@ -24,11 +24,8 @@ describe User, models: true do it { is_expected.to have_many(:recent_events).class_name('Event') } it { is_expected.to have_many(:issues).dependent(:restrict_with_exception) } it { is_expected.to have_many(:notes).dependent(:destroy) } - it { is_expected.to have_many(:assigned_issues).dependent(:nullify) } it { is_expected.to have_many(:merge_requests).dependent(:destroy) } - it { is_expected.to have_many(:assigned_merge_requests).dependent(:nullify) } it { is_expected.to have_many(:identities).dependent(:destroy) } - it { is_expected.to have_one(:abuse_report) } it { is_expected.to have_many(:spam_logs).dependent(:destroy) } it { is_expected.to have_many(:todos).dependent(:destroy) } it { is_expected.to have_many(:award_emoji).dependent(:destroy) } @@ -37,6 +34,34 @@ describe User, models: true do it { is_expected.to have_many(:pipelines).dependent(:nullify) } it { is_expected.to have_many(:chat_names).dependent(:destroy) } it { is_expected.to have_many(:uploads).dependent(:destroy) } + it { is_expected.to have_many(:reported_abuse_reports).dependent(:destroy).class_name('AbuseReport') } + + describe "#abuse_report" do + let(:current_user) { create(:user) } + let(:other_user) { create(:user) } + + it { is_expected.to have_one(:abuse_report) } + + it "refers to the abuse report whose user_id is the current user" do + abuse_report = create(:abuse_report, reporter: other_user, user: current_user) + + expect(current_user.abuse_report).to eq(abuse_report) + end + + it "does not refer to the abuse report whose reporter_id is the current user" do + create(:abuse_report, reporter: current_user, user: other_user) + + expect(current_user.abuse_report).to be_nil + end + + it "does not update the user_id of an abuse report when the user is updated" do + abuse_report = create(:abuse_report, reporter: current_user, user: other_user) + + current_user.block + + expect(abuse_report.reload.user).to eq(other_user) + end + end describe '#group_members' do it 'does not include group memberships for which user is a requester' do @@ -72,6 +97,18 @@ describe User, models: true do expect(user.errors.values).to eq [['dashboard is a reserved name']] end + it 'allows child names' do + user = build(:user, username: 'avatar') + + expect(user).to be_valid + end + + it 'allows wildcard names' do + user = build(:user, username: 'blob') + + expect(user).to be_valid + end + it 'validates uniqueness' do expect(subject).to validate_uniqueness_of(:username).case_insensitive end @@ -288,7 +325,7 @@ describe User, models: true do end describe "Respond to" do - it { is_expected.to respond_to(:is_admin?) } + it { is_expected.to respond_to(:admin?) } it { is_expected.to respond_to(:name) } it { is_expected.to respond_to(:private_token) } it { is_expected.to respond_to(:external?) } @@ -307,6 +344,35 @@ describe User, models: true do end end + describe '#update_tracked_fields!', :redis do + let(:request) { OpenStruct.new(remote_ip: "127.0.0.1") } + let(:user) { create(:user) } + + it 'writes trackable attributes' do + expect do + user.update_tracked_fields!(request) + end.to change { user.reload.current_sign_in_at } + end + + it 'does not write trackable attributes when called a second time within the hour' do + user.update_tracked_fields!(request) + + expect do + user.update_tracked_fields!(request) + end.not_to change { user.reload.current_sign_in_at } + end + + it 'writes trackable attributes for a different user' do + user2 = create(:user) + + user.update_tracked_fields!(request) + + expect do + user2.update_tracked_fields!(request) + end.to change { user2.reload.current_sign_in_at } + end + end + shared_context 'user keys' do let(:user) { create(:user) } let!(:key) { create(:key, user: user) } @@ -559,7 +625,7 @@ describe User, models: true do describe 'normal user' do let(:user) { create(:user, name: 'John Smith') } - it { expect(user.is_admin?).to be_falsey } + it { expect(user.admin?).to be_falsey } it { expect(user.require_ssh_key?).to be_truthy } it { expect(user.can_create_group?).to be_truthy } it { expect(user.can_create_project?).to be_truthy } @@ -610,7 +676,7 @@ describe User, models: true do protocol_and_expectation = { 'http' => false, 'ssh' => true, - '' => true, + '' => true } protocol_and_expectation.each do |protocol, expected| @@ -812,6 +878,75 @@ describe User, models: true do end end + describe '.find_by_full_path' do + let!(:user) { create(:user) } + + context 'with a route matching the given path' do + let!(:route) { user.namespace.route } + + it 'returns the user' do + expect(User.find_by_full_path(route.path)).to eq(user) + end + + it 'is case-insensitive' do + expect(User.find_by_full_path(route.path.upcase)).to eq(user) + expect(User.find_by_full_path(route.path.downcase)).to eq(user) + end + end + + context 'with a redirect route matching the given path' do + let!(:redirect_route) { user.namespace.redirect_routes.create(path: 'foo') } + + context 'without the follow_redirects option' do + it 'returns nil' do + expect(User.find_by_full_path(redirect_route.path)).to eq(nil) + end + end + + context 'with the follow_redirects option set to true' do + it 'returns the user' do + expect(User.find_by_full_path(redirect_route.path, follow_redirects: true)).to eq(user) + end + + it 'is case-insensitive' do + expect(User.find_by_full_path(redirect_route.path.upcase, follow_redirects: true)).to eq(user) + expect(User.find_by_full_path(redirect_route.path.downcase, follow_redirects: true)).to eq(user) + end + end + end + + context 'without a route or a redirect route matching the given path' do + context 'without the follow_redirects option' do + it 'returns nil' do + expect(User.find_by_full_path('unknown')).to eq(nil) + end + end + context 'with the follow_redirects option set to true' do + it 'returns nil' do + expect(User.find_by_full_path('unknown', follow_redirects: true)).to eq(nil) + end + end + end + + context 'with a group route matching the given path' do + context 'when the group namespace has an owner_id (legacy data)' do + let!(:group) { create(:group, path: 'group_path', owner: user) } + + it 'returns nil' do + expect(User.find_by_full_path('group_path')).to eq(nil) + end + end + + context 'when the group namespace does not have an owner_id' do + let!(:group) { create(:group, path: 'group_path') } + + it 'returns nil' do + expect(User.find_by_full_path('group_path')).to eq(nil) + end + end + end + end + describe 'all_ssh_keys' do it { is_expected.to have_many(:keys).dependent(:destroy) } @@ -837,6 +972,24 @@ describe User, models: true do end end + describe '#avatar_url' do + let(:user) { create(:user, :with_avatar) } + + context 'when avatar file is uploaded' do + let(:gitlab_host) { "http://#{Gitlab.config.gitlab.host}" } + let(:avatar_path) { "/uploads/user/avatar/#{user.id}/dk.png" } + + it 'shows correct avatar url' do + expect(user.avatar_url).to eq(avatar_path) + expect(user.avatar_url(only_path: false)).to eq([gitlab_host, avatar_path].join) + + allow(ActionController::Base).to receive(:asset_host).and_return(gitlab_host) + + expect(user.avatar_url).to eq([gitlab_host, avatar_path].join) + end + end + end + describe '#requires_ldap_check?' do let(:user) { User.new } @@ -1407,6 +1560,17 @@ describe User, models: true do it { expect(user.nested_groups).to eq([nested_group]) } end + describe '#all_expanded_groups' do + let!(:user) { create(:user) } + let!(:group) { create(:group) } + let!(:nested_group_1) { create(:group, parent: group) } + let!(:nested_group_2) { create(:group, parent: group) } + + before { nested_group_1.add_owner(user) } + + it { expect(user.all_expanded_groups).to match_array [group, nested_group_1] } + end + describe '#nested_groups_projects' do let!(:user) { create(:user) } let!(:group) { create(:group) } @@ -1520,5 +1684,135 @@ describe User, models: true do expect(ghost.email).to eq('ghost1@example.com') end end + + context 'when a domain whitelist is in place' do + before do + stub_application_setting(domain_whitelist: ['gitlab.com']) + end + + it 'creates a ghost user' do + expect(User.ghost).to be_persisted + end + end + end + + describe '#update_two_factor_requirement' do + let(:user) { create :user } + + context 'with 2FA requirement on groups' do + let(:group1) { create :group, require_two_factor_authentication: true, two_factor_grace_period: 23 } + let(:group2) { create :group, require_two_factor_authentication: true, two_factor_grace_period: 32 } + + before do + group1.add_user(user, GroupMember::OWNER) + group2.add_user(user, GroupMember::OWNER) + + user.update_two_factor_requirement + end + + it 'requires 2FA' do + expect(user.require_two_factor_authentication_from_group).to be true + end + + it 'uses the shortest grace period' do + expect(user.two_factor_grace_period).to be 23 + end + end + + context 'with 2FA requirement on nested parent group' do + let!(:group1) { create :group, require_two_factor_authentication: true } + let!(:group1a) { create :group, require_two_factor_authentication: false, parent: group1 } + + before do + group1a.add_user(user, GroupMember::OWNER) + + user.update_two_factor_requirement + end + + it 'requires 2FA' do + expect(user.require_two_factor_authentication_from_group).to be true + end + end + + context 'with 2FA requirement on nested child group' do + let!(:group1) { create :group, require_two_factor_authentication: false } + let!(:group1a) { create :group, require_two_factor_authentication: true, parent: group1 } + + before do + group1.add_user(user, GroupMember::OWNER) + + user.update_two_factor_requirement + end + + it 'requires 2FA' do + expect(user.require_two_factor_authentication_from_group).to be true + end + end + + context 'without 2FA requirement on groups' do + let(:group) { create :group } + + before do + group.add_user(user, GroupMember::OWNER) + + user.update_two_factor_requirement + end + + it 'does not require 2FA' do + expect(user.require_two_factor_authentication_from_group).to be false + end + + it 'falls back to the default grace period' do + expect(user.two_factor_grace_period).to be 48 + end + end + end + + context '.active' do + before do + User.ghost + create(:user, name: 'user', state: 'active') + create(:user, name: 'user', state: 'blocked') + end + + it 'only counts active and non internal users' do + expect(User.active.count).to eq(1) + end + end + + describe 'preferred language' do + it 'is English by default' do + user = create(:user) + + expect(user.preferred_language).to eq('en') + end + end + + context '#invalidate_issue_cache_counts' do + let(:user) { build_stubbed(:user) } + + it 'invalidates cache for issue counter' do + cache_mock = double + + expect(cache_mock).to receive(:delete).with(['users', user.id, 'assigned_open_issues_count']) + + allow(Rails).to receive(:cache).and_return(cache_mock) + + user.invalidate_issue_cache_counts + end + end + + context '#invalidate_merge_request_cache_counts' do + let(:user) { build_stubbed(:user) } + + it 'invalidates cache for Merge Request counter' do + cache_mock = double + + expect(cache_mock).to receive(:delete).with(['users', user.id, 'assigned_open_merge_requests_count']) + + allow(Rails).to receive(:cache).and_return(cache_mock) + + user.invalidate_merge_request_cache_counts + end end end diff --git a/spec/policies/ci/build_policy_spec.rb b/spec/policies/ci/build_policy_spec.rb index 0f280f32eac..3f4ce222b60 100644 --- a/spec/policies/ci/build_policy_spec.rb +++ b/spec/policies/ci/build_policy_spec.rb @@ -89,5 +89,58 @@ describe Ci::BuildPolicy, :models do end end end + + describe 'rules for manual actions' do + let(:project) { create(:project) } + + before do + project.add_developer(user) + end + + context 'when branch build is assigned to is protected' do + before do + create(:protected_branch, :no_one_can_push, + name: 'some-ref', project: project) + end + + context 'when build is a manual action' do + let(:build) do + create(:ci_build, :manual, ref: 'some-ref', pipeline: pipeline) + end + + it 'does not include ability to update build' do + expect(policies).not_to include :update_build + end + end + + context 'when build is not a manual action' do + let(:build) do + create(:ci_build, ref: 'some-ref', pipeline: pipeline) + end + + it 'includes ability to update build' do + expect(policies).to include :update_build + end + end + end + + context 'when branch build is assigned to is not protected' do + context 'when build is a manual action' do + let(:build) { create(:ci_build, :manual, pipeline: pipeline) } + + it 'includes ability to update build' do + expect(policies).to include :update_build + end + end + + context 'when build is not a manual action' do + let(:build) { create(:ci_build, pipeline: pipeline) } + + it 'includes ability to update build' do + expect(policies).to include :update_build + end + end + end + end end end diff --git a/spec/policies/environment_policy_spec.rb b/spec/policies/environment_policy_spec.rb new file mode 100644 index 00000000000..650432520bb --- /dev/null +++ b/spec/policies/environment_policy_spec.rb @@ -0,0 +1,57 @@ +require 'spec_helper' + +describe EnvironmentPolicy do + let(:user) { create(:user) } + let(:project) { create(:project) } + + let(:environment) do + create(:environment, :with_review_app, project: project) + end + + let(:policies) do + described_class.abilities(user, environment).to_set + end + + describe '#rules' do + context 'when user does not have access to the project' do + let(:project) { create(:project, :private) } + + it 'does not include ability to stop environment' do + expect(policies).not_to include :stop_environment + end + end + + context 'when anonymous user has access to the project' do + let(:project) { create(:project, :public) } + + it 'does not include ability to stop environment' do + expect(policies).not_to include :stop_environment + end + end + + context 'when team member has access to the project' do + let(:project) { create(:project, :public) } + + before do + project.add_developer(user) + end + + context 'when team member has ability to stop environment' do + it 'does includes ability to stop environment' do + expect(policies).to include :stop_environment + end + end + + context 'when team member has no ability to stop environment' do + before do + create(:protected_branch, :no_one_can_push, + name: 'master', project: project) + end + + it 'does not include ability to stop environment' do + expect(policies).not_to include :stop_environment + end + end + end + end +end diff --git a/spec/policies/group_policy_spec.rb b/spec/policies/group_policy_spec.rb index 5c34ff04152..2077c14ff7a 100644 --- a/spec/policies/group_policy_spec.rb +++ b/spec/policies/group_policy_spec.rb @@ -22,7 +22,8 @@ describe GroupPolicy, models: true do :admin_group, :admin_namespace, :admin_group_member, - :change_visibility_level + :change_visibility_level, + :create_subgroup ] end diff --git a/spec/policies/issue_policy_spec.rb b/spec/policies/issue_policy_spec.rb index 2905d5b26a5..4a07c864428 100644 --- a/spec/policies/issue_policy_spec.rb +++ b/spec/policies/issue_policy_spec.rb @@ -1,118 +1,192 @@ require 'spec_helper' describe IssuePolicy, models: true do - let(:user) { create(:user) } - - describe '#rules' do - context 'using a regular issue' do - let(:project) { create(:empty_project, :public) } - let(:issue) { create(:issue, project: project) } - let(:policies) { described_class.abilities(user, issue).to_set } - - context 'with a regular user' do - it 'includes the read_issue permission' do - expect(policies).to include(:read_issue) - end - - it 'does not include the admin_issue permission' do - expect(policies).not_to include(:admin_issue) - end - - it 'does not include the update_issue permission' do - expect(policies).not_to include(:update_issue) - end - end + let(:guest) { create(:user) } + let(:author) { create(:user) } + let(:assignee) { create(:user) } + let(:reporter) { create(:user) } + let(:group) { create(:group, :public) } + let(:reporter_from_group_link) { create(:user) } + + def permissions(user, issue) + described_class.abilities(user, issue).to_set + end + + context 'a private project' do + let(:non_member) { create(:user) } + let(:project) { create(:empty_project, :private) } + let(:issue) { create(:issue, project: project, assignees: [assignee], author: author) } + let(:issue_no_assignee) { create(:issue, project: project) } + + before do + project.team << [guest, :guest] + project.team << [author, :guest] + project.team << [assignee, :guest] + project.team << [reporter, :reporter] + + group.add_reporter(reporter_from_group_link) + + create(:project_group_link, group: group, project: project) + end + + it 'does not allow non-members to read issues' do + expect(permissions(non_member, issue)).not_to include(:read_issue, :update_issue, :admin_issue) + expect(permissions(non_member, issue_no_assignee)).not_to include(:read_issue, :update_issue, :admin_issue) + end + + it 'allows guests to read issues' do + expect(permissions(guest, issue)).to include(:read_issue) + expect(permissions(guest, issue)).not_to include(:update_issue, :admin_issue) + + expect(permissions(guest, issue_no_assignee)).to include(:read_issue) + expect(permissions(guest, issue_no_assignee)).not_to include(:update_issue, :admin_issue) + end + + it 'allows reporters to read, update, and admin issues' do + expect(permissions(reporter, issue)).to include(:read_issue, :update_issue, :admin_issue) + expect(permissions(reporter, issue_no_assignee)).to include(:read_issue, :update_issue, :admin_issue) + end + + it 'allows reporters from group links to read, update, and admin issues' do + expect(permissions(reporter_from_group_link, issue)).to include(:read_issue, :update_issue, :admin_issue) + expect(permissions(reporter_from_group_link, issue_no_assignee)).to include(:read_issue, :update_issue, :admin_issue) + end + + it 'allows issue authors to read and update their issues' do + expect(permissions(author, issue)).to include(:read_issue, :update_issue) + expect(permissions(author, issue)).not_to include(:admin_issue) + + expect(permissions(author, issue_no_assignee)).to include(:read_issue) + expect(permissions(author, issue_no_assignee)).not_to include(:update_issue, :admin_issue) + end + + it 'allows issue assignees to read and update their issues' do + expect(permissions(assignee, issue)).to include(:read_issue, :update_issue) + expect(permissions(assignee, issue)).not_to include(:admin_issue) + + expect(permissions(assignee, issue_no_assignee)).to include(:read_issue) + expect(permissions(assignee, issue_no_assignee)).not_to include(:update_issue, :admin_issue) + end - context 'with a user that is a project reporter' do - before do - project.team << [user, :reporter] - end + context 'with confidential issues' do + let(:confidential_issue) { create(:issue, :confidential, project: project, assignees: [assignee], author: author) } + let(:confidential_issue_no_assignee) { create(:issue, :confidential, project: project) } - it 'includes the read_issue permission' do - expect(policies).to include(:read_issue) - end + it 'does not allow non-members to read confidential issues' do + expect(permissions(non_member, confidential_issue)).not_to include(:read_issue, :update_issue, :admin_issue) + expect(permissions(non_member, confidential_issue_no_assignee)).not_to include(:read_issue, :update_issue, :admin_issue) + end + + it 'does not allow guests to read confidential issues' do + expect(permissions(guest, confidential_issue)).not_to include(:read_issue, :update_issue, :admin_issue) + expect(permissions(guest, confidential_issue_no_assignee)).not_to include(:read_issue, :update_issue, :admin_issue) + end - it 'includes the admin_issue permission' do - expect(policies).to include(:admin_issue) - end + it 'allows reporters to read, update, and admin confidential issues' do + expect(permissions(reporter, confidential_issue)).to include(:read_issue, :update_issue, :admin_issue) + expect(permissions(reporter, confidential_issue_no_assignee)).to include(:read_issue, :update_issue, :admin_issue) + end - it 'includes the update_issue permission' do - expect(policies).to include(:update_issue) - end + it 'allows reporters from group links to read, update, and admin confidential issues' do + expect(permissions(reporter_from_group_link, confidential_issue)).to include(:read_issue, :update_issue, :admin_issue) + expect(permissions(reporter_from_group_link, confidential_issue_no_assignee)).to include(:read_issue, :update_issue, :admin_issue) end - context 'with a user that is a project guest' do - before do - project.team << [user, :guest] - end + it 'allows issue authors to read and update their confidential issues' do + expect(permissions(author, confidential_issue)).to include(:read_issue, :update_issue) + expect(permissions(author, confidential_issue)).not_to include(:admin_issue) - it 'includes the read_issue permission' do - expect(policies).to include(:read_issue) - end + expect(permissions(author, confidential_issue_no_assignee)).not_to include(:read_issue, :update_issue, :admin_issue) + end - it 'does not include the admin_issue permission' do - expect(policies).not_to include(:admin_issue) - end + it 'allows issue assignees to read and update their confidential issues' do + expect(permissions(assignee, confidential_issue)).to include(:read_issue, :update_issue) + expect(permissions(assignee, confidential_issue)).not_to include(:admin_issue) - it 'does not include the update_issue permission' do - expect(policies).not_to include(:update_issue) - end + expect(permissions(assignee, confidential_issue_no_assignee)).not_to include(:read_issue, :update_issue, :admin_issue) end end + end - context 'using a confidential issue' do - let(:issue) { create(:issue, :confidential) } + context 'a public project' do + let(:project) { create(:empty_project, :public) } + let(:issue) { create(:issue, project: project, assignees: [assignee], author: author) } + let(:issue_no_assignee) { create(:issue, project: project) } - context 'with a regular user' do - let(:policies) { described_class.abilities(user, issue).to_set } + before do + project.team << [guest, :guest] + project.team << [reporter, :reporter] - it 'does not include the read_issue permission' do - expect(policies).not_to include(:read_issue) - end + group.add_reporter(reporter_from_group_link) - it 'does not include the admin_issue permission' do - expect(policies).not_to include(:admin_issue) - end + create(:project_group_link, group: group, project: project) + end - it 'does not include the update_issue permission' do - expect(policies).not_to include(:update_issue) - end - end + it 'allows guests to read issues' do + expect(permissions(guest, issue)).to include(:read_issue) + expect(permissions(guest, issue)).not_to include(:update_issue, :admin_issue) + + expect(permissions(guest, issue_no_assignee)).to include(:read_issue) + expect(permissions(guest, issue_no_assignee)).not_to include(:update_issue, :admin_issue) + end + + it 'allows reporters to read, update, and admin issues' do + expect(permissions(reporter, issue)).to include(:read_issue, :update_issue, :admin_issue) + expect(permissions(reporter, issue_no_assignee)).to include(:read_issue, :update_issue, :admin_issue) + end + + it 'allows reporters from group links to read, update, and admin issues' do + expect(permissions(reporter_from_group_link, issue)).to include(:read_issue, :update_issue, :admin_issue) + expect(permissions(reporter_from_group_link, issue_no_assignee)).to include(:read_issue, :update_issue, :admin_issue) + end - context 'with a user that is a project member' do - let(:policies) { described_class.abilities(user, issue).to_set } + it 'allows issue authors to read and update their issues' do + expect(permissions(author, issue)).to include(:read_issue, :update_issue) + expect(permissions(author, issue)).not_to include(:admin_issue) - before do - issue.project.team << [user, :reporter] - end + expect(permissions(author, issue_no_assignee)).to include(:read_issue) + expect(permissions(author, issue_no_assignee)).not_to include(:update_issue, :admin_issue) + end + + it 'allows issue assignees to read and update their issues' do + expect(permissions(assignee, issue)).to include(:read_issue, :update_issue) + expect(permissions(assignee, issue)).not_to include(:admin_issue) - it 'includes the read_issue permission' do - expect(policies).to include(:read_issue) - end + expect(permissions(assignee, issue_no_assignee)).to include(:read_issue) + expect(permissions(assignee, issue_no_assignee)).not_to include(:update_issue, :admin_issue) + end - it 'includes the admin_issue permission' do - expect(policies).to include(:admin_issue) - end + context 'with confidential issues' do + let(:confidential_issue) { create(:issue, :confidential, project: project, assignees: [assignee], author: author) } + let(:confidential_issue_no_assignee) { create(:issue, :confidential, project: project) } - it 'includes the update_issue permission' do - expect(policies).to include(:update_issue) - end + it 'does not allow guests to read confidential issues' do + expect(permissions(guest, confidential_issue)).not_to include(:read_issue, :update_issue, :admin_issue) + expect(permissions(guest, confidential_issue_no_assignee)).not_to include(:read_issue, :update_issue, :admin_issue) end - context 'without a user' do - let(:policies) { described_class.abilities(nil, issue).to_set } + it 'allows reporters to read, update, and admin confidential issues' do + expect(permissions(reporter, confidential_issue)).to include(:read_issue, :update_issue, :admin_issue) + expect(permissions(reporter, confidential_issue_no_assignee)).to include(:read_issue, :update_issue, :admin_issue) + end + + it 'allows reporter from group links to read, update, and admin confidential issues' do + expect(permissions(reporter_from_group_link, confidential_issue)).to include(:read_issue, :update_issue, :admin_issue) + expect(permissions(reporter_from_group_link, confidential_issue_no_assignee)).to include(:read_issue, :update_issue, :admin_issue) + end - it 'does not include the read_issue permission' do - expect(policies).not_to include(:read_issue) - end + it 'allows issue authors to read and update their confidential issues' do + expect(permissions(author, confidential_issue)).to include(:read_issue, :update_issue) + expect(permissions(author, confidential_issue)).not_to include(:admin_issue) + + expect(permissions(author, confidential_issue_no_assignee)).not_to include(:read_issue, :update_issue, :admin_issue) + end - it 'does not include the admin_issue permission' do - expect(policies).not_to include(:admin_issue) - end + it 'allows issue assignees to read and update their confidential issues' do + expect(permissions(assignee, confidential_issue)).to include(:read_issue, :update_issue) + expect(permissions(assignee, confidential_issue)).not_to include(:admin_issue) - it 'does not include the update_issue permission' do - expect(policies).not_to include(:update_issue) - end + expect(permissions(assignee, confidential_issue_no_assignee)).not_to include(:read_issue, :update_issue, :admin_issue) end end end diff --git a/spec/policies/issues_policy_spec.rb b/spec/policies/issues_policy_spec.rb deleted file mode 100644 index 2b7b6cad654..00000000000 --- a/spec/policies/issues_policy_spec.rb +++ /dev/null @@ -1,193 +0,0 @@ -require 'spec_helper' - -describe IssuePolicy, models: true do - let(:guest) { create(:user) } - let(:author) { create(:user) } - let(:assignee) { create(:user) } - let(:reporter) { create(:user) } - let(:group) { create(:group, :public) } - let(:reporter_from_group_link) { create(:user) } - - def permissions(user, issue) - IssuePolicy.abilities(user, issue).to_set - end - - context 'a private project' do - let(:non_member) { create(:user) } - let(:project) { create(:empty_project, :private) } - let(:issue) { create(:issue, project: project, assignee: assignee, author: author) } - let(:issue_no_assignee) { create(:issue, project: project) } - - before do - project.team << [guest, :guest] - project.team << [author, :guest] - project.team << [assignee, :guest] - project.team << [reporter, :reporter] - - group.add_reporter(reporter_from_group_link) - - create(:project_group_link, group: group, project: project) - end - - it 'does not allow non-members to read issues' do - expect(permissions(non_member, issue)).not_to include(:read_issue, :update_issue, :admin_issue) - expect(permissions(non_member, issue_no_assignee)).not_to include(:read_issue, :update_issue, :admin_issue) - end - - it 'allows guests to read issues' do - expect(permissions(guest, issue)).to include(:read_issue) - expect(permissions(guest, issue)).not_to include(:update_issue, :admin_issue) - - expect(permissions(guest, issue_no_assignee)).to include(:read_issue) - expect(permissions(guest, issue_no_assignee)).not_to include(:update_issue, :admin_issue) - end - - it 'allows reporters to read, update, and admin issues' do - expect(permissions(reporter, issue)).to include(:read_issue, :update_issue, :admin_issue) - expect(permissions(reporter, issue_no_assignee)).to include(:read_issue, :update_issue, :admin_issue) - end - - it 'allows reporters from group links to read, update, and admin issues' do - expect(permissions(reporter_from_group_link, issue)).to include(:read_issue, :update_issue, :admin_issue) - expect(permissions(reporter_from_group_link, issue_no_assignee)).to include(:read_issue, :update_issue, :admin_issue) - end - - it 'allows issue authors to read and update their issues' do - expect(permissions(author, issue)).to include(:read_issue, :update_issue) - expect(permissions(author, issue)).not_to include(:admin_issue) - - expect(permissions(author, issue_no_assignee)).to include(:read_issue) - expect(permissions(author, issue_no_assignee)).not_to include(:update_issue, :admin_issue) - end - - it 'allows issue assignees to read and update their issues' do - expect(permissions(assignee, issue)).to include(:read_issue, :update_issue) - expect(permissions(assignee, issue)).not_to include(:admin_issue) - - expect(permissions(assignee, issue_no_assignee)).to include(:read_issue) - expect(permissions(assignee, issue_no_assignee)).not_to include(:update_issue, :admin_issue) - end - - context 'with confidential issues' do - let(:confidential_issue) { create(:issue, :confidential, project: project, assignee: assignee, author: author) } - let(:confidential_issue_no_assignee) { create(:issue, :confidential, project: project) } - - it 'does not allow non-members to read confidential issues' do - expect(permissions(non_member, confidential_issue)).not_to include(:read_issue, :update_issue, :admin_issue) - expect(permissions(non_member, confidential_issue_no_assignee)).not_to include(:read_issue, :update_issue, :admin_issue) - end - - it 'does not allow guests to read confidential issues' do - expect(permissions(guest, confidential_issue)).not_to include(:read_issue, :update_issue, :admin_issue) - expect(permissions(guest, confidential_issue_no_assignee)).not_to include(:read_issue, :update_issue, :admin_issue) - end - - it 'allows reporters to read, update, and admin confidential issues' do - expect(permissions(reporter, confidential_issue)).to include(:read_issue, :update_issue, :admin_issue) - expect(permissions(reporter, confidential_issue_no_assignee)).to include(:read_issue, :update_issue, :admin_issue) - end - - it 'allows reporters from group links to read, update, and admin confidential issues' do - expect(permissions(reporter_from_group_link, confidential_issue)).to include(:read_issue, :update_issue, :admin_issue) - expect(permissions(reporter_from_group_link, confidential_issue_no_assignee)).to include(:read_issue, :update_issue, :admin_issue) - end - - it 'allows issue authors to read and update their confidential issues' do - expect(permissions(author, confidential_issue)).to include(:read_issue, :update_issue) - expect(permissions(author, confidential_issue)).not_to include(:admin_issue) - - expect(permissions(author, confidential_issue_no_assignee)).not_to include(:read_issue, :update_issue, :admin_issue) - end - - it 'allows issue assignees to read and update their confidential issues' do - expect(permissions(assignee, confidential_issue)).to include(:read_issue, :update_issue) - expect(permissions(assignee, confidential_issue)).not_to include(:admin_issue) - - expect(permissions(assignee, confidential_issue_no_assignee)).not_to include(:read_issue, :update_issue, :admin_issue) - end - end - end - - context 'a public project' do - let(:project) { create(:empty_project, :public) } - let(:issue) { create(:issue, project: project, assignee: assignee, author: author) } - let(:issue_no_assignee) { create(:issue, project: project) } - - before do - project.team << [guest, :guest] - project.team << [reporter, :reporter] - - group.add_reporter(reporter_from_group_link) - - create(:project_group_link, group: group, project: project) - end - - it 'allows guests to read issues' do - expect(permissions(guest, issue)).to include(:read_issue) - expect(permissions(guest, issue)).not_to include(:update_issue, :admin_issue) - - expect(permissions(guest, issue_no_assignee)).to include(:read_issue) - expect(permissions(guest, issue_no_assignee)).not_to include(:update_issue, :admin_issue) - end - - it 'allows reporters to read, update, and admin issues' do - expect(permissions(reporter, issue)).to include(:read_issue, :update_issue, :admin_issue) - expect(permissions(reporter, issue_no_assignee)).to include(:read_issue, :update_issue, :admin_issue) - end - - it 'allows reporters from group links to read, update, and admin issues' do - expect(permissions(reporter_from_group_link, issue)).to include(:read_issue, :update_issue, :admin_issue) - expect(permissions(reporter_from_group_link, issue_no_assignee)).to include(:read_issue, :update_issue, :admin_issue) - end - - it 'allows issue authors to read and update their issues' do - expect(permissions(author, issue)).to include(:read_issue, :update_issue) - expect(permissions(author, issue)).not_to include(:admin_issue) - - expect(permissions(author, issue_no_assignee)).to include(:read_issue) - expect(permissions(author, issue_no_assignee)).not_to include(:update_issue, :admin_issue) - end - - it 'allows issue assignees to read and update their issues' do - expect(permissions(assignee, issue)).to include(:read_issue, :update_issue) - expect(permissions(assignee, issue)).not_to include(:admin_issue) - - expect(permissions(assignee, issue_no_assignee)).to include(:read_issue) - expect(permissions(assignee, issue_no_assignee)).not_to include(:update_issue, :admin_issue) - end - - context 'with confidential issues' do - let(:confidential_issue) { create(:issue, :confidential, project: project, assignee: assignee, author: author) } - let(:confidential_issue_no_assignee) { create(:issue, :confidential, project: project) } - - it 'does not allow guests to read confidential issues' do - expect(permissions(guest, confidential_issue)).not_to include(:read_issue, :update_issue, :admin_issue) - expect(permissions(guest, confidential_issue_no_assignee)).not_to include(:read_issue, :update_issue, :admin_issue) - end - - it 'allows reporters to read, update, and admin confidential issues' do - expect(permissions(reporter, confidential_issue)).to include(:read_issue, :update_issue, :admin_issue) - expect(permissions(reporter, confidential_issue_no_assignee)).to include(:read_issue, :update_issue, :admin_issue) - end - - it 'allows reporter from group links to read, update, and admin confidential issues' do - expect(permissions(reporter_from_group_link, confidential_issue)).to include(:read_issue, :update_issue, :admin_issue) - expect(permissions(reporter_from_group_link, confidential_issue_no_assignee)).to include(:read_issue, :update_issue, :admin_issue) - end - - it 'allows issue authors to read and update their confidential issues' do - expect(permissions(author, confidential_issue)).to include(:read_issue, :update_issue) - expect(permissions(author, confidential_issue)).not_to include(:admin_issue) - - expect(permissions(author, confidential_issue_no_assignee)).not_to include(:read_issue, :update_issue, :admin_issue) - end - - it 'allows issue assignees to read and update their confidential issues' do - expect(permissions(assignee, confidential_issue)).to include(:read_issue, :update_issue) - expect(permissions(assignee, confidential_issue)).not_to include(:admin_issue) - - expect(permissions(assignee, confidential_issue_no_assignee)).not_to include(:read_issue, :update_issue, :admin_issue) - end - end - end -end diff --git a/spec/policies/personal_snippet_policy_spec.rb b/spec/policies/personal_snippet_policy_spec.rb new file mode 100644 index 00000000000..58aa1145c9e --- /dev/null +++ b/spec/policies/personal_snippet_policy_spec.rb @@ -0,0 +1,141 @@ +require 'spec_helper' + +describe PersonalSnippetPolicy, models: true do + let(:regular_user) { create(:user) } + let(:external_user) { create(:user, :external) } + let(:admin_user) { create(:user, :admin) } + + let(:author_permissions) do + [ + :update_personal_snippet, + :admin_personal_snippet, + :destroy_personal_snippet + ] + end + + def permissions(user) + described_class.abilities(user, snippet).to_set + end + + context 'public snippet' do + let(:snippet) { create(:personal_snippet, :public) } + + context 'no user' do + subject { permissions(nil) } + + it do + is_expected.to include(:read_personal_snippet) + is_expected.not_to include(:comment_personal_snippet) + is_expected.not_to include(*author_permissions) + end + end + + context 'regular user' do + subject { permissions(regular_user) } + + it do + is_expected.to include(:read_personal_snippet) + is_expected.to include(:comment_personal_snippet) + is_expected.not_to include(*author_permissions) + end + end + + context 'author' do + subject { permissions(snippet.author) } + + it do + is_expected.to include(:read_personal_snippet) + is_expected.to include(:comment_personal_snippet) + is_expected.to include(*author_permissions) + end + end + end + + context 'internal snippet' do + let(:snippet) { create(:personal_snippet, :internal) } + + context 'no user' do + subject { permissions(nil) } + + it do + is_expected.not_to include(:read_personal_snippet) + is_expected.not_to include(:comment_personal_snippet) + is_expected.not_to include(*author_permissions) + end + end + + context 'regular user' do + subject { permissions(regular_user) } + + it do + is_expected.to include(:read_personal_snippet) + is_expected.to include(:comment_personal_snippet) + is_expected.not_to include(*author_permissions) + end + end + + context 'external user' do + subject { permissions(external_user) } + + it do + is_expected.not_to include(:read_personal_snippet) + is_expected.not_to include(:comment_personal_snippet) + is_expected.not_to include(*author_permissions) + end + end + + context 'snippet author' do + subject { permissions(snippet.author) } + + it do + is_expected.to include(:read_personal_snippet) + is_expected.to include(:comment_personal_snippet) + is_expected.to include(*author_permissions) + end + end + end + + context 'private snippet' do + let(:snippet) { create(:project_snippet, :private) } + + context 'no user' do + subject { permissions(nil) } + + it do + is_expected.not_to include(:read_personal_snippet) + is_expected.not_to include(:comment_personal_snippet) + is_expected.not_to include(*author_permissions) + end + end + + context 'regular user' do + subject { permissions(regular_user) } + + it do + is_expected.not_to include(:read_personal_snippet) + is_expected.not_to include(:comment_personal_snippet) + is_expected.not_to include(*author_permissions) + end + end + + context 'external user' do + subject { permissions(external_user) } + + it do + is_expected.not_to include(:read_personal_snippet) + is_expected.not_to include(:comment_personal_snippet) + is_expected.not_to include(*author_permissions) + end + end + + context 'snippet author' do + subject { permissions(snippet.author) } + + it do + is_expected.to include(:read_personal_snippet) + is_expected.to include(:comment_personal_snippet) + is_expected.to include(*author_permissions) + end + end + end +end diff --git a/spec/policies/project_policy_spec.rb b/spec/policies/project_policy_spec.rb index 064847ee3dc..0d3af1f4499 100644 --- a/spec/policies/project_policy_spec.rb +++ b/spec/policies/project_policy_spec.rb @@ -43,7 +43,7 @@ describe ProjectPolicy, models: true do let(:master_permissions) do %i[ - push_code_to_protected_branches update_project_snippet update_environment + delete_protected_branch update_project_snippet update_environment update_deployment admin_milestone admin_project_snippet admin_project_member admin_note admin_wiki admin_project admin_commit_status admin_build admin_container_image diff --git a/spec/policies/project_snippet_policy_spec.rb b/spec/policies/project_snippet_policy_spec.rb index d0758af57dd..e1771b636b8 100644 --- a/spec/policies/project_snippet_policy_spec.rb +++ b/spec/policies/project_snippet_policy_spec.rb @@ -1,7 +1,9 @@ require 'spec_helper' describe ProjectSnippetPolicy, models: true do - let(:current_user) { create(:user) } + let(:regular_user) { create(:user) } + let(:external_user) { create(:user, :external) } + let(:project) { create(:empty_project) } let(:author_permissions) do [ @@ -10,13 +12,15 @@ describe ProjectSnippetPolicy, models: true do ] end - subject { described_class.abilities(current_user, project_snippet).to_set } + def abilities(user, snippet_visibility) + snippet = create(:project_snippet, snippet_visibility, project: project) - context 'public snippet' do - let(:project_snippet) { create(:project_snippet, :public) } + described_class.abilities(user, snippet).to_set + end + context 'public snippet' do context 'no user' do - let(:current_user) { nil } + subject { abilities(nil, :public) } it do is_expected.to include(:read_project_snippet) @@ -25,6 +29,17 @@ describe ProjectSnippetPolicy, models: true do end context 'regular user' do + subject { abilities(regular_user, :public) } + + it do + is_expected.to include(:read_project_snippet) + is_expected.not_to include(*author_permissions) + end + end + + context 'external user' do + subject { abilities(external_user, :public) } + it do is_expected.to include(:read_project_snippet) is_expected.not_to include(*author_permissions) @@ -33,10 +48,8 @@ describe ProjectSnippetPolicy, models: true do end context 'internal snippet' do - let(:project_snippet) { create(:project_snippet, :internal) } - context 'no user' do - let(:current_user) { nil } + subject { abilities(nil, :internal) } it do is_expected.not_to include(:read_project_snippet) @@ -45,6 +58,28 @@ describe ProjectSnippetPolicy, models: true do end context 'regular user' do + subject { abilities(regular_user, :internal) } + + it do + is_expected.to include(:read_project_snippet) + is_expected.not_to include(*author_permissions) + end + end + + context 'external user' do + subject { abilities(external_user, :internal) } + + it do + is_expected.not_to include(:read_project_snippet) + is_expected.not_to include(*author_permissions) + end + end + + context 'project team member external user' do + subject { abilities(external_user, :internal) } + + before { project.team << [external_user, :developer] } + it do is_expected.to include(:read_project_snippet) is_expected.not_to include(*author_permissions) @@ -53,10 +88,8 @@ describe ProjectSnippetPolicy, models: true do end context 'private snippet' do - let(:project_snippet) { create(:project_snippet, :private) } - context 'no user' do - let(:current_user) { nil } + subject { abilities(nil, :private) } it do is_expected.not_to include(:read_project_snippet) @@ -65,6 +98,8 @@ describe ProjectSnippetPolicy, models: true do end context 'regular user' do + subject { abilities(regular_user, :private) } + it do is_expected.not_to include(:read_project_snippet) is_expected.not_to include(*author_permissions) @@ -72,7 +107,9 @@ describe ProjectSnippetPolicy, models: true do end context 'snippet author' do - let(:project_snippet) { create(:project_snippet, :private, author: current_user) } + let(:snippet) { create(:project_snippet, :private, author: regular_user) } + + subject { described_class.abilities(regular_user, snippet).to_set } it do is_expected.to include(:read_project_snippet) @@ -80,8 +117,21 @@ describe ProjectSnippetPolicy, models: true do end end - context 'project team member' do - before { project_snippet.project.team << [current_user, :developer] } + context 'project team member normal user' do + subject { abilities(regular_user, :private) } + + before { project.team << [regular_user, :developer] } + + it do + is_expected.to include(:read_project_snippet) + is_expected.not_to include(*author_permissions) + end + end + + context 'project team member external user' do + subject { abilities(external_user, :private) } + + before { project.team << [external_user, :developer] } it do is_expected.to include(:read_project_snippet) @@ -90,7 +140,7 @@ describe ProjectSnippetPolicy, models: true do end context 'admin user' do - let(:current_user) { create(:admin) } + subject { abilities(create(:admin), :private) } it do is_expected.to include(:read_project_snippet) diff --git a/spec/presenters/ci/build_presenter_spec.rb b/spec/presenters/ci/build_presenter_spec.rb index 7a35da38b2b..2190ab0e82e 100644 --- a/spec/presenters/ci/build_presenter_spec.rb +++ b/spec/presenters/ci/build_presenter_spec.rb @@ -57,6 +57,32 @@ describe Ci::BuildPresenter do end end + describe '#status_title' do + context 'when build is auto-canceled' do + before do + expect(build).to receive(:auto_canceled?).and_return(true) + expect(build).to receive(:auto_canceled_by_id).and_return(1) + end + + it 'shows that the build is auto-canceled' do + status_title = presenter.status_title + + expect(status_title).to include('auto-canceled') + expect(status_title).to include('Pipeline #1') + end + end + + context 'when build is not auto-canceled' do + before do + expect(build).to receive(:auto_canceled?).and_return(false) + end + + it 'does not have a status title' do + expect(presenter.status_title).to be_nil + end + end + end + describe 'quack like a Ci::Build permission-wise' do context 'user is not allowed' do let(:project) { build_stubbed(:empty_project, public_builds: false) } diff --git a/spec/presenters/ci/pipeline_presenter_spec.rb b/spec/presenters/ci/pipeline_presenter_spec.rb new file mode 100644 index 00000000000..9134d1cc31c --- /dev/null +++ b/spec/presenters/ci/pipeline_presenter_spec.rb @@ -0,0 +1,54 @@ +require 'spec_helper' + +describe Ci::PipelinePresenter do + let(:project) { create(:empty_project) } + let(:pipeline) { create(:ci_pipeline, project: project) } + + subject(:presenter) do + described_class.new(pipeline) + end + + it 'inherits from Gitlab::View::Presenter::Delegated' do + expect(described_class.superclass).to eq(Gitlab::View::Presenter::Delegated) + end + + describe '#initialize' do + it 'takes a pipeline and optional params' do + expect { presenter }.not_to raise_error + end + + it 'exposes pipeline' do + expect(presenter.pipeline).to eq(pipeline) + end + + it 'forwards missing methods to pipeline' do + expect(presenter.ref).to eq(pipeline.ref) + end + end + + describe '#status_title' do + context 'when pipeline is auto-canceled' do + before do + expect(pipeline).to receive(:auto_canceled?).and_return(true) + expect(pipeline).to receive(:auto_canceled_by_id).and_return(1) + end + + it 'shows that the pipeline is auto-canceled' do + status_title = presenter.status_title + + expect(status_title).to include('auto-canceled') + expect(status_title).to include('Pipeline #1') + end + end + + context 'when pipeline is not auto-canceled' do + before do + expect(pipeline).to receive(:auto_canceled?).and_return(false) + end + + it 'does not have a status title' do + expect(presenter.status_title).to be_nil + end + end + end +end diff --git a/spec/presenters/merge_request_presenter_spec.rb b/spec/presenters/merge_request_presenter_spec.rb new file mode 100644 index 00000000000..44720fc4448 --- /dev/null +++ b/spec/presenters/merge_request_presenter_spec.rb @@ -0,0 +1,356 @@ +require 'spec_helper' + +describe MergeRequestPresenter do + let(:resource) { create :merge_request, source_project: project } + let(:project) { create :empty_project } + let(:user) { create(:user) } + + describe '#ci_status' do + subject { described_class.new(resource).ci_status } + + context 'when no head pipeline' do + it 'return status using CiService' do + ci_service = double(MockCiService) + ci_status = double + + allow(resource.source_project) + .to receive(:ci_service) + .and_return(ci_service) + + allow(resource).to receive(:head_pipeline).and_return(nil) + + expect(ci_service).to receive(:commit_status) + .with(resource.diff_head_sha, resource.source_branch) + .and_return(ci_status) + + is_expected.to eq(ci_status) + end + end + + context 'when head pipeline present' do + let(:pipeline) { build_stubbed(:ci_pipeline) } + + before do + allow(resource).to receive(:head_pipeline).and_return(pipeline) + end + + context 'success with warnings' do + before do + allow(pipeline).to receive(:success?) { true } + allow(pipeline).to receive(:has_warnings?) { true } + end + + it 'returns "success_with_warnings"' do + is_expected.to eq('success_with_warnings') + end + end + + context 'pipeline HAS status AND its not success with warnings' do + before do + allow(pipeline).to receive(:success?) { false } + allow(pipeline).to receive(:has_warnings?) { false } + end + + it 'returns pipeline status' do + is_expected.to eq('pending') + end + end + + context 'pipeline has NO status AND its not success with warnings' do + before do + allow(pipeline).to receive(:status) { nil } + allow(pipeline).to receive(:success?) { false } + allow(pipeline).to receive(:has_warnings?) { false } + end + + it 'returns "preparing"' do + is_expected.to eq('preparing') + end + end + end + end + + describe '#conflict_resolution_path' do + let(:project) { create :empty_project } + let(:user) { create :user } + let(:presenter) { described_class.new(resource, current_user: user) } + let(:path) { presenter.conflict_resolution_path } + + context 'when MR cannot be resolved in UI' do + it 'does not return conflict resolution path' do + allow(presenter).to receive_message_chain(:conflicts, :can_be_resolved_in_ui?) { false } + + expect(path).to be_nil + end + end + + context 'when conflicts cannot be resolved by user' do + it 'does not return conflict resolution path' do + allow(presenter).to receive_message_chain(:conflicts, :can_be_resolved_in_ui?) { true } + allow(presenter).to receive_message_chain(:conflicts, :can_be_resolved_by?).with(user) { false } + + expect(path).to be_nil + end + end + + context 'when able to access conflict resolution UI' do + it 'does return conflict resolution path' do + allow(presenter).to receive_message_chain(:conflicts, :can_be_resolved_in_ui?) { true } + allow(presenter).to receive_message_chain(:conflicts, :can_be_resolved_by?).with(user) { true } + + expect(path) + .to eq("/#{project.full_path}/merge_requests/#{resource.iid}/conflicts") + end + end + end + + context 'issues links' do + let(:project) { create(:project, :private, creator: user, namespace: user.namespace) } + let(:issue_a) { create(:issue, project: project) } + let(:issue_b) { create(:issue, project: project) } + + let(:resource) do + create(:merge_request, + source_project: project, target_project: project, + description: "Fixes #{issue_a.to_reference} Related #{issue_b.to_reference}") + end + + before do + project.team << [user, :developer] + + allow(resource.project).to receive(:default_branch) + .and_return(resource.target_branch) + end + + describe '#closing_issues_links' do + subject { described_class.new(resource, current_user: user).closing_issues_links } + + it 'presents closing issues links' do + is_expected.to match("#{project.full_path}/issues/#{issue_a.iid}") + end + + it 'does not present related issues links' do + is_expected.not_to match("#{project.full_path}/issues/#{issue_b.iid}") + end + end + + describe '#mentioned_issues_links' do + subject do + described_class.new(resource, current_user: user) + .mentioned_issues_links + end + + it 'presents related issues links' do + is_expected.to match("#{project.full_path}/issues/#{issue_b.iid}") + end + + it 'does not present closing issues links' do + is_expected.not_to match("#{project.full_path}/issues/#{issue_a.iid}") + end + end + + describe '#assign_to_closing_issues_link' do + subject do + described_class.new(resource, current_user: user) + .assign_to_closing_issues_link + end + + before do + assign_issues_service = double(MergeRequests::AssignIssuesService, assignable_issues: assignable_issues) + allow(MergeRequests::AssignIssuesService).to receive(:new) + .and_return(assign_issues_service) + end + + context 'single closing issue' do + let(:issue) { create(:issue) } + let(:assignable_issues) { [issue] } + + it 'returns correct link with correct text' do + is_expected + .to match("#{project.full_path}/merge_requests/#{resource.iid}/assign_related_issues") + + is_expected + .to match("Assign yourself to this issue") + end + end + + context 'multiple closing issues' do + let(:issues) { create_list(:issue, 2) } + let(:assignable_issues) { issues } + + it 'returns correct link with correct text' do + is_expected + .to match("#{project.full_path}/merge_requests/#{resource.iid}/assign_related_issues") + + is_expected + .to match("Assign yourself to these issues") + end + end + + context 'no closing issue' do + let(:assignable_issues) { [] } + + it 'returns correct link with correct text' do + is_expected.to be_nil + end + end + end + end + + describe '#cancel_merge_when_pipeline_succeeds_path' do + subject do + described_class.new(resource, current_user: user) + .cancel_merge_when_pipeline_succeeds_path + end + + context 'when can cancel mwps' do + it 'returns path' do + allow(resource).to receive(:can_cancel_merge_when_pipeline_succeeds?) + .with(user) + .and_return(true) + + is_expected.to eq("/#{resource.project.full_path}/merge_requests/#{resource.iid}/cancel_merge_when_pipeline_succeeds") + end + end + + context 'when cannot cancel mwps' do + it 'returns nil' do + allow(resource).to receive(:can_cancel_merge_when_pipeline_succeeds?) + .with(user) + .and_return(false) + + is_expected.to be_nil + end + end + end + + describe '#merge_path' do + subject do + described_class.new(resource, current_user: user).merge_path + end + + context 'when can be merged by user' do + it 'returns path' do + allow(resource).to receive(:can_be_merged_by?) + .with(user) + .and_return(true) + + is_expected + .to eq("/#{resource.project.full_path}/merge_requests/#{resource.iid}/merge") + end + end + + context 'when cannot be merged by user' do + it 'returns nil' do + allow(resource).to receive(:can_be_merged_by?) + .with(user) + .and_return(false) + + is_expected.to be_nil + end + end + end + + describe '#create_issue_to_resolve_discussions_path' do + subject do + described_class.new(resource, current_user: user) + .create_issue_to_resolve_discussions_path + end + + context 'when can create issue and issues enabled' do + it 'returns path' do + allow(project).to receive(:issues_enabled?) { true } + project.team << [user, :master] + + is_expected + .to eq("/#{resource.project.full_path}/issues/new?merge_request_to_resolve_discussions_of=#{resource.iid}") + end + end + + context 'when cannot create issue' do + it 'returns nil' do + allow(project).to receive(:issues_enabled?) { true } + + is_expected.to be_nil + end + end + + context 'when issues disabled' do + it 'returns nil' do + allow(project).to receive(:issues_enabled?) { false } + project.team << [user, :master] + + is_expected.to be_nil + end + end + end + + describe '#remove_wip_path' do + subject do + described_class.new(resource, current_user: user).remove_wip_path + end + + context 'when merge request enabled and has permission' do + it 'has remove_wip_path' do + allow(project).to receive(:merge_requests_enabled?) { true } + project.team << [user, :master] + + is_expected + .to eq("/#{resource.project.full_path}/merge_requests/#{resource.iid}/remove_wip") + end + end + + context 'when has no permission' do + it 'returns nil' do + is_expected.to be_nil + end + end + end + + describe '#target_branch_commits_path' do + subject do + described_class.new(resource, current_user: user) + .target_branch_commits_path + end + + context 'when target branch exists' do + it 'returns path' do + allow(resource).to receive(:target_branch_exists?) { true } + + is_expected + .to eq("/#{resource.target_project.full_path}/commits/#{resource.target_branch}") + end + end + + context 'when target branch does not exists' do + it 'returns nil' do + allow(resource).to receive(:target_branch_exists?) { false } + + is_expected.to be_nil + end + end + end + + describe '#source_branch_path' do + subject do + described_class.new(resource, current_user: user).source_branch_path + end + + context 'when source branch exists' do + it 'returns path' do + allow(resource).to receive(:source_branch_exists?) { true } + + is_expected + .to eq("/#{resource.source_project.full_path}/branches/#{resource.source_branch}") + end + end + + context 'when source branch does not exists' do + it 'returns nil' do + allow(resource).to receive(:source_branch_exists?) { false } + + is_expected.to be_nil + end + end + end +end diff --git a/spec/requests/api/access_requests_spec.rb b/spec/requests/api/access_requests_spec.rb index 46edbd49b28..c8eacb38e6f 100644 --- a/spec/requests/api/access_requests_spec.rb +++ b/spec/requests/api/access_requests_spec.rb @@ -1,8 +1,6 @@ require 'spec_helper' -describe API::AccessRequests, api: true do - include ApiHelpers - +describe API::AccessRequests do let(:master) { create(:user) } let(:developer) { create(:user) } let(:access_requester) { create(:user) } diff --git a/spec/requests/api/api_internal_helpers_spec.rb b/spec/requests/api/api_internal_helpers_spec.rb deleted file mode 100644 index f5265ea60ff..00000000000 --- a/spec/requests/api/api_internal_helpers_spec.rb +++ /dev/null @@ -1,32 +0,0 @@ -require 'spec_helper' - -describe ::API::Helpers::InternalHelpers do - include ::API::Helpers::InternalHelpers - - describe '.clean_project_path' do - project = 'namespace/project' - namespaced = File.join('namespace2', project) - - { - File.join(Dir.pwd, project) => project, - File.join(Dir.pwd, namespaced) => namespaced, - project => project, - namespaced => namespaced, - project + '.git' => project, - namespaced + '.git' => namespaced, - "/" + project => project, - "/" + namespaced => namespaced, - }.each do |project_path, expected| - context project_path do - # Relative and absolute storage paths, with and without trailing / - ['.', './', Dir.pwd, Dir.pwd + '/'].each do |storage_path| - context "storage path is #{storage_path}" do - subject { clean_project_path(project_path, [{ 'path' => storage_path }]) } - - it { is_expected.to eq(expected) } - end - end - end - end - end -end diff --git a/spec/requests/api/award_emoji_spec.rb b/spec/requests/api/award_emoji_spec.rb index f4d4a8a2cc7..bbdef0aeb1b 100644 --- a/spec/requests/api/award_emoji_spec.rb +++ b/spec/requests/api/award_emoji_spec.rb @@ -1,7 +1,6 @@ require 'spec_helper' -describe API::AwardEmoji, api: true do - include ApiHelpers +describe API::AwardEmoji do let(:user) { create(:user) } let!(:project) { create(:empty_project) } let(:issue) { create(:issue, project: project) } diff --git a/spec/requests/api/boards_spec.rb b/spec/requests/api/boards_spec.rb index 87c36639cd4..c27db716ef8 100644 --- a/spec/requests/api/boards_spec.rb +++ b/spec/requests/api/boards_spec.rb @@ -1,8 +1,6 @@ require 'spec_helper' -describe API::Boards, api: true do - include ApiHelpers - +describe API::Boards do let(:user) { create(:user) } let(:user2) { create(:user) } let(:non_member) { create(:user) } diff --git a/spec/requests/api/branches_spec.rb b/spec/requests/api/branches_spec.rb index a70f7beaae0..c64499fc8c0 100644 --- a/spec/requests/api/branches_spec.rb +++ b/spec/requests/api/branches_spec.rb @@ -1,9 +1,7 @@ require 'spec_helper' require 'mime/types' -describe API::Branches, api: true do - include ApiHelpers - +describe API::Branches do let(:user) { create(:user) } let!(:project) { create(:project, :repository, creator: user) } let!(:master) { create(:project_member, :master, user: user, project: project) } @@ -408,19 +406,6 @@ describe API::Branches, api: true do delete api("/projects/#{project.id}/repository/branches/foobar", user) expect(response).to have_http_status(404) end - - it "removes protected branch" do - create(:protected_branch, project: project, name: branch_name) - delete api("/projects/#{project.id}/repository/branches/#{branch_name}", user) - expect(response).to have_http_status(405) - expect(json_response['message']).to eq('Protected branch cant be removed') - end - - it "does not remove HEAD branch" do - delete api("/projects/#{project.id}/repository/branches/master", user) - expect(response).to have_http_status(405) - expect(json_response['message']).to eq('Cannot remove HEAD branch') - end end describe "DELETE /projects/:id/repository/merged_branches" do diff --git a/spec/requests/api/broadcast_messages_spec.rb b/spec/requests/api/broadcast_messages_spec.rb index 024fa66848c..67989689799 100644 --- a/spec/requests/api/broadcast_messages_spec.rb +++ b/spec/requests/api/broadcast_messages_spec.rb @@ -1,8 +1,6 @@ require 'spec_helper' -describe API::BroadcastMessages, api: true do - include ApiHelpers - +describe API::BroadcastMessages do let(:user) { create(:user) } let(:admin) { create(:admin) } diff --git a/spec/requests/api/commit_statuses_spec.rb b/spec/requests/api/commit_statuses_spec.rb index d8b3cc041a5..1c163cee152 100644 --- a/spec/requests/api/commit_statuses_spec.rb +++ b/spec/requests/api/commit_statuses_spec.rb @@ -1,8 +1,6 @@ require 'spec_helper' -describe API::CommitStatuses, api: true do - include ApiHelpers - +describe API::CommitStatuses do let!(:project) { create(:project, :repository) } let(:commit) { project.repository.commit } let(:guest) { create_user(:guest) } @@ -28,8 +26,8 @@ describe API::CommitStatuses, api: true do create(:commit_status, { pipeline: commit, ref: commit.ref }.merge(opts)) end - let!(:status1) { create_status(master, status: 'running') } - let!(:status2) { create_status(master, name: 'coverage', status: 'pending') } + let!(:status1) { create_status(master, status: 'running', retried: true) } + let!(:status2) { create_status(master, name: 'coverage', status: 'pending', retried: true) } let!(:status3) { create_status(develop, status: 'running', allow_failure: true) } let!(:status4) { create_status(master, name: 'coverage', status: 'success') } let!(:status5) { create_status(develop, name: 'coverage', status: 'success') } diff --git a/spec/requests/api/commits_spec.rb b/spec/requests/api/commits_spec.rb index a10d876ffad..0b0e4c2b112 100644 --- a/spec/requests/api/commits_spec.rb +++ b/spec/requests/api/commits_spec.rb @@ -1,8 +1,7 @@ require 'spec_helper' require 'mime/types' -describe API::Commits, api: true do - include ApiHelpers +describe API::Commits do let(:user) { create(:user) } let(:user2) { create(:user) } let!(:project) { create(:project, :repository, creator: user, namespace: user.namespace) } @@ -599,8 +598,7 @@ describe API::Commits, api: true do post api("/projects/#{project.id}/repository/commits/#{master_pickable_commit.id}/cherry_pick", user), branch: 'markdown' expect(response).to have_http_status(400) - expect(json_response['message']).to eq('Sorry, we cannot cherry-pick this commit automatically. - A cherry-pick may have already been performed with this commit, or a more recent commit may have updated some of its content.') + expect(json_response['message']).to include('Sorry, we cannot cherry-pick this commit automatically.') end it 'returns 400 if you are not allowed to push to the target branch' do diff --git a/spec/requests/api/deploy_keys_spec.rb b/spec/requests/api/deploy_keys_spec.rb index 4f4b18cf0e0..843e9862b0c 100644 --- a/spec/requests/api/deploy_keys_spec.rb +++ b/spec/requests/api/deploy_keys_spec.rb @@ -1,8 +1,6 @@ require 'spec_helper' -describe API::DeployKeys, api: true do - include ApiHelpers - +describe API::DeployKeys do let(:user) { create(:user) } let(:admin) { create(:admin) } let(:project) { create(:empty_project, creator_id: user.id) } @@ -108,6 +106,15 @@ describe API::DeployKeys, api: true do expect(response).to have_http_status(201) end + + it 'accepts can_push parameter' do + key_attrs = attributes_for :write_access_key + + post api("/projects/#{project.id}/deploy_keys", admin), key_attrs + + expect(response).to have_http_status(201) + expect(json_response['can_push']).to eq(true) + end end describe 'DELETE /projects/:id/deploy_keys/:key_id' do diff --git a/spec/requests/api/deployments_spec.rb b/spec/requests/api/deployments_spec.rb index e55575ffbda..90d78d060ca 100644 --- a/spec/requests/api/deployments_spec.rb +++ b/spec/requests/api/deployments_spec.rb @@ -1,8 +1,6 @@ require 'spec_helper' -describe API::Deployments, api: true do - include ApiHelpers - +describe API::Deployments do let(:user) { create(:user) } let(:non_member) { create(:user) } let(:project) { deployment.environment.project } diff --git a/spec/requests/api/doorkeeper_access_spec.rb b/spec/requests/api/doorkeeper_access_spec.rb index f6fd567eca5..868fef65c1c 100644 --- a/spec/requests/api/doorkeeper_access_spec.rb +++ b/spec/requests/api/doorkeeper_access_spec.rb @@ -1,8 +1,6 @@ require 'spec_helper' -describe API::API, api: true do - include ApiHelpers - +describe 'doorkeeper access' do let!(:user) { create(:user) } let!(:application) { Doorkeeper::Application.create!(name: "MyApp", redirect_uri: "https://app.com", owner: user) } let!(:token) { Doorkeeper::AccessToken.create! application_id: application.id, resource_owner_id: user.id, scopes: "api" } diff --git a/spec/requests/api/environments_spec.rb b/spec/requests/api/environments_spec.rb index b54ee8e8b85..aae03c84e1f 100644 --- a/spec/requests/api/environments_spec.rb +++ b/spec/requests/api/environments_spec.rb @@ -1,8 +1,6 @@ require 'spec_helper' -describe API::Environments, api: true do - include ApiHelpers - +describe API::Environments do let(:user) { create(:user) } let(:non_member) { create(:user) } let(:project) { create(:empty_project, :private, namespace: user.namespace) } diff --git a/spec/requests/api/files_spec.rb b/spec/requests/api/files_spec.rb index a7fad7f0bdb..deb2cac6869 100644 --- a/spec/requests/api/files_spec.rb +++ b/spec/requests/api/files_spec.rb @@ -1,7 +1,6 @@ require 'spec_helper' -describe API::Files, api: true do - include ApiHelpers +describe API::Files do let(:user) { create(:user) } let!(:project) { create(:project, :repository, namespace: user.namespace ) } let(:guest) { create(:user) { |u| project.add_guest(u) } } @@ -11,21 +10,8 @@ describe API::Files, api: true do ref: 'master' } end - let(:author_email) { FFaker::Internet.email } - - # I have to remove periods from the end of the name - # This happened when the user's name had a suffix (i.e. "Sr.") - # This seems to be what git does under the hood. For example, this commit: - # - # $ git commit --author='Foo Sr. <foo@example.com>' -m 'Where's my trailing period?' - # - # results in this: - # - # $ git show --pretty - # ... - # Author: Foo Sr <foo@example.com> - # ... - let(:author_name) { FFaker::Name.name.chomp("\.") } + let(:author_email) { 'user@example.org' } + let(:author_name) { 'John Doe' } before { project.team << [user, :developer] } @@ -218,7 +204,7 @@ describe API::Files, api: true do it "returns a 400 if editor fails to create file" do allow_any_instance_of(Repository).to receive(:create_file). - and_return(false) + and_raise(Repository::CommitError, 'Cannot create file') post api(route("any%2Etxt"), user), valid_params @@ -312,8 +298,8 @@ describe API::Files, api: true do expect(response).to have_http_status(400) end - it "returns a 400 if fails to create file" do - allow_any_instance_of(Repository).to receive(:delete_file).and_return(false) + it "returns a 400 if fails to delete file" do + allow_any_instance_of(Repository).to receive(:delete_file).and_raise(Repository::CommitError, 'Cannot delete file') delete api(route(file_path), user), valid_params @@ -343,7 +329,7 @@ describe API::Files, api: true do end let(:get_params) do { - ref: 'master', + ref: 'master' } end diff --git a/spec/requests/api/groups_spec.rb b/spec/requests/api/groups_spec.rb index 2545da7b1db..90b36374ded 100644 --- a/spec/requests/api/groups_spec.rb +++ b/spec/requests/api/groups_spec.rb @@ -1,7 +1,6 @@ require 'spec_helper' -describe API::Groups, api: true do - include ApiHelpers +describe API::Groups do include UploadHelpers let(:user1) { create(:user, can_create_group: false) } @@ -74,7 +73,7 @@ describe API::Groups, api: true do storage_size: 702, repository_size: 123, lfs_objects_size: 234, - build_artifacts_size: 345, + build_artifacts_size: 345 }.stringify_keys exposed_attributes = attributes.dup exposed_attributes['job_artifacts_size'] = exposed_attributes.delete('build_artifacts_size') @@ -179,7 +178,7 @@ describe API::Groups, api: true do expect(json_response['path']).to eq(group1.path) expect(json_response['description']).to eq(group1.description) expect(json_response['visibility']).to eq(Gitlab::VisibilityLevel.string_level(group1.visibility_level)) - expect(json_response['avatar_url']).to eq(group1.avatar_url) + expect(json_response['avatar_url']).to eq(group1.avatar_url(only_path: false)) expect(json_response['web_url']).to eq(group1.web_url) expect(json_response['request_access_enabled']).to eq(group1.request_access_enabled) expect(json_response['full_name']).to eq(group1.full_name) diff --git a/spec/requests/api/helpers_spec.rb b/spec/requests/api/helpers_spec.rb index 988a57a80ea..ed392acc607 100644 --- a/spec/requests/api/helpers_spec.rb +++ b/spec/requests/api/helpers_spec.rb @@ -1,8 +1,8 @@ require 'spec_helper' -describe API::Helpers, api: true do +describe API::Helpers do include API::APIGuard::HelperMethods - include API::Helpers + include described_class include SentryHelper let(:user) { create(:user) } @@ -427,6 +427,7 @@ describe API::Helpers, api: true do context 'current_user is nil' do before do expect_any_instance_of(self.class).to receive(:current_user).and_return(nil) + allow_any_instance_of(self.class).to receive(:initial_current_user).and_return(nil) end it 'returns a 401 response' do @@ -435,13 +436,38 @@ describe API::Helpers, api: true do end context 'current_user is present' do + let(:user) { build(:user) } + before do - expect_any_instance_of(self.class).to receive(:current_user).at_least(:once).and_return(User.new) + expect_any_instance_of(self.class).to receive(:current_user).at_least(:once).and_return(user) + expect_any_instance_of(self.class).to receive(:initial_current_user).and_return(user) end it 'does not raise an error' do expect { authenticate! }.not_to raise_error end end + + context 'current_user is blocked' do + let(:user) { build(:user, :blocked) } + + before do + expect_any_instance_of(self.class).to receive(:current_user).at_least(:once).and_return(user) + end + + it 'raises an error' do + expect_any_instance_of(self.class).to receive(:initial_current_user).and_return(user) + + expect { authenticate! }.to raise_error '401 - {"message"=>"401 Unauthorized"}' + end + + it "doesn't raise an error if an admin user is impersonating a blocked user (via sudo)" do + admin_user = build(:user, :admin) + + expect_any_instance_of(self.class).to receive(:initial_current_user).and_return(admin_user) + + expect { authenticate! }.not_to raise_error + end + end end end diff --git a/spec/requests/api/internal_spec.rb b/spec/requests/api/internal_spec.rb index eed45d37444..2ceb4648ece 100644 --- a/spec/requests/api/internal_spec.rb +++ b/spec/requests/api/internal_spec.rb @@ -1,7 +1,6 @@ require 'spec_helper' -describe API::Internal, api: true do - include ApiHelpers +describe API::Internal do let(:user) { create(:user) } let(:key) { create(:key, user: user) } let(:project) { create(:project, :repository) } @@ -147,10 +146,31 @@ describe API::Internal, api: true do end end - describe "POST /internal/allowed" do + describe "POST /internal/allowed", :redis do context "access granted" do before do project.team << [user, :developer] + Timecop.freeze + end + + after do + Timecop.return + end + + context 'with env passed as a JSON' do + it 'sets env in RequestStore' do + expect(Gitlab::Git::Env).to receive(:set).with({ + 'GIT_OBJECT_DIRECTORY' => 'foo', + 'GIT_ALTERNATE_OBJECT_DIRECTORIES' => 'bar' + }) + + push(key, project.wiki, env: { + GIT_OBJECT_DIRECTORY: 'foo', + GIT_ALTERNATE_OBJECT_DIRECTORIES: 'bar' + }.to_json) + + expect(response).to have_http_status(200) + end end context "git push with project.wiki" do @@ -160,6 +180,8 @@ describe API::Internal, api: true do expect(response).to have_http_status(200) expect(json_response["status"]).to be_truthy expect(json_response["repository_path"]).to eq(project.wiki.repository.path_to_repo) + expect(json_response["gl_repository"]).to eq("wiki-#{project.id}") + expect(user).not_to have_an_activity_record end end @@ -170,6 +192,8 @@ describe API::Internal, api: true do expect(response).to have_http_status(200) expect(json_response["status"]).to be_truthy expect(json_response["repository_path"]).to eq(project.wiki.repository.path_to_repo) + expect(json_response["gl_repository"]).to eq("wiki-#{project.id}") + expect(user).to have_an_activity_record end end @@ -180,6 +204,8 @@ describe API::Internal, api: true do expect(response).to have_http_status(200) expect(json_response["status"]).to be_truthy expect(json_response["repository_path"]).to eq(project.repository.path_to_repo) + expect(json_response["gl_repository"]).to eq("project-#{project.id}") + expect(user).to have_an_activity_record end end @@ -190,6 +216,8 @@ describe API::Internal, api: true do expect(response).to have_http_status(200) expect(json_response["status"]).to be_truthy expect(json_response["repository_path"]).to eq(project.repository.path_to_repo) + expect(json_response["gl_repository"]).to eq("project-#{project.id}") + expect(user).not_to have_an_activity_record end context 'project as /namespace/project' do @@ -199,6 +227,7 @@ describe API::Internal, api: true do expect(response).to have_http_status(200) expect(json_response["status"]).to be_truthy expect(json_response["repository_path"]).to eq(project.repository.path_to_repo) + expect(json_response["gl_repository"]).to eq("project-#{project.id}") end end @@ -209,6 +238,7 @@ describe API::Internal, api: true do expect(response).to have_http_status(200) expect(json_response["status"]).to be_truthy expect(json_response["repository_path"]).to eq(project.repository.path_to_repo) + expect(json_response["gl_repository"]).to eq("project-#{project.id}") end end end @@ -225,6 +255,7 @@ describe API::Internal, api: true do expect(response).to have_http_status(200) expect(json_response["status"]).to be_falsey + expect(user).not_to have_an_activity_record end end @@ -234,6 +265,7 @@ describe API::Internal, api: true do expect(response).to have_http_status(200) expect(json_response["status"]).to be_falsey + expect(user).not_to have_an_activity_record end end end @@ -251,6 +283,7 @@ describe API::Internal, api: true do expect(response).to have_http_status(200) expect(json_response["status"]).to be_falsey + expect(user).not_to have_an_activity_record end end @@ -260,6 +293,7 @@ describe API::Internal, api: true do expect(response).to have_http_status(200) expect(json_response["status"]).to be_falsey + expect(user).not_to have_an_activity_record end end end @@ -416,18 +450,39 @@ describe API::Internal, api: true do expect(json_response).to eq([]) end + + context 'with a gl_repository parameter' do + let(:gl_repository) { "project-#{project.id}" } + + it 'returns link to create new merge request' do + get api("/internal/merge_request_urls?gl_repository=#{gl_repository}&changes=#{changes}"), secret_token: secret_token + + expect(json_response).to match [{ + "branch_name" => "new_branch", + "url" => "http://#{Gitlab.config.gitlab.host}/#{project.namespace.name}/#{project.path}/merge_requests/new?merge_request%5Bsource_branch%5D=new_branch", + "new_merge_request" => true + }] + end + end end describe 'POST /notify_post_receive' do let(:valid_params) do - { repo_path: project.repository.path, secret_token: secret_token } + { project: project.repository.path, secret_token: secret_token } + end + + let(:valid_wiki_params) do + { project: project.wiki.repository.path, secret_token: secret_token } end before do allow(Gitlab.config.gitaly).to receive(:enabled).and_return(true) end - it "calls the Gitaly client if it's enabled" do + it "calls the Gitaly client with the project's repository" do + expect(Gitlab::GitalyClient::Notifications). + to receive(:new).with(gitlab_git_repository_with(path: project.repository.path)). + and_call_original expect_any_instance_of(Gitlab::GitalyClient::Notifications). to receive(:post_receive) @@ -436,6 +491,18 @@ describe API::Internal, api: true do expect(response).to have_http_status(200) end + it "calls the Gitaly client with the wiki's repository if it's a wiki" do + expect(Gitlab::GitalyClient::Notifications). + to receive(:new).with(gitlab_git_repository_with(path: project.wiki.repository.path)). + and_call_original + expect_any_instance_of(Gitlab::GitalyClient::Notifications). + to receive(:post_receive) + + post api("/internal/notify_post_receive"), valid_wiki_params + + expect(response).to have_http_status(200) + end + it "returns 500 if the gitaly call fails" do expect_any_instance_of(Gitlab::GitalyClient::Notifications). to receive(:post_receive).and_raise(GRPC::Unavailable) @@ -444,6 +511,40 @@ describe API::Internal, api: true do expect(response).to have_http_status(500) end + + context 'with a gl_repository parameter' do + let(:valid_params) do + { gl_repository: "project-#{project.id}", secret_token: secret_token } + end + + let(:valid_wiki_params) do + { gl_repository: "wiki-#{project.id}", secret_token: secret_token } + end + + it "calls the Gitaly client with the project's repository" do + expect(Gitlab::GitalyClient::Notifications). + to receive(:new).with(gitlab_git_repository_with(path: project.repository.path)). + and_call_original + expect_any_instance_of(Gitlab::GitalyClient::Notifications). + to receive(:post_receive) + + post api("/internal/notify_post_receive"), valid_params + + expect(response).to have_http_status(200) + end + + it "calls the Gitaly client with the wiki's repository if it's a wiki" do + expect(Gitlab::GitalyClient::Notifications). + to receive(:new).with(gitlab_git_repository_with(path: project.wiki.repository.path)). + and_call_original + expect_any_instance_of(Gitlab::GitalyClient::Notifications). + to receive(:post_receive) + + post api("/internal/notify_post_receive"), valid_wiki_params + + expect(response).to have_http_status(200) + end + end end def project_with_repo_path(path) @@ -463,7 +564,7 @@ describe API::Internal, api: true do ) end - def push(key, project, protocol = 'ssh') + def push(key, project, protocol = 'ssh', env: nil) post( api("/internal/allowed"), changes: 'd14d6c0abdd253381df51a723d58691b2ee1ab08 570e7b2abdd848b95f2f578043fc23bd6f6fd24d refs/heads/master', @@ -471,7 +572,8 @@ describe API::Internal, api: true do project: project.repository.path_to_repo, action: 'git-receive-pack', secret_token: secret_token, - protocol: protocol + protocol: protocol, + env: env ) end diff --git a/spec/requests/api/issues_spec.rb b/spec/requests/api/issues_spec.rb index 91d6fb83c0b..79cac721202 100644 --- a/spec/requests/api/issues_spec.rb +++ b/spec/requests/api/issues_spec.rb @@ -1,25 +1,29 @@ require 'spec_helper' -describe API::Issues, api: true do - include ApiHelpers +describe API::Issues do include EmailHelpers - let(:user) { create(:user) } + set(:user) { create(:user) } + set(:project) do + create(:empty_project, :public, creator_id: user.id, namespace: user.namespace) + end + let(:user2) { create(:user) } let(:non_member) { create(:user) } - let(:guest) { create(:user) } - let(:author) { create(:author) } - let(:assignee) { create(:assignee) } + set(:guest) { create(:user) } + set(:author) { create(:author) } + set(:assignee) { create(:assignee) } let(:admin) { create(:user, :admin) } - let!(:project) { create(:empty_project, :public, creator_id: user.id, namespace: user.namespace ) } + let(:issue_title) { 'foo' } + let(:issue_description) { 'closed' } let!(:closed_issue) do create :closed_issue, author: user, - assignee: user, + assignees: [user], project: project, state: :closed, milestone: milestone, - created_at: generate(:issue_created_at), + created_at: generate(:past_time), updated_at: 3.hours.ago end let!(:confidential_issue) do @@ -27,32 +31,34 @@ describe API::Issues, api: true do :confidential, project: project, author: author, - assignee: assignee, - created_at: generate(:issue_created_at), + assignees: [assignee], + created_at: generate(:past_time), updated_at: 2.hours.ago end let!(:issue) do create :issue, author: user, - assignee: user, + assignees: [user], project: project, milestone: milestone, - created_at: generate(:issue_created_at), - updated_at: 1.hour.ago + created_at: generate(:past_time), + updated_at: 1.hour.ago, + title: issue_title, + description: issue_description end - let!(:label) do + set(:label) do create(:label, title: 'label', color: '#FFAABB', project: project) end let!(:label_link) { create(:label_link, label: label, target: issue) } - let!(:milestone) { create(:milestone, title: '1.0.0', project: project) } - let!(:empty_milestone) do + set(:milestone) { create(:milestone, title: '1.0.0', project: project) } + set(:empty_milestone) do create(:milestone, title: '2.0.0', project: project) end let!(:note) { create(:note_on_issue, author: user, project: project, noteable: issue) } let(:no_milestone_title) { URI.escape(Milestone::None.title) } - before do + before(:all) do project.team << [user, :reporter] project.team << [guest, :guest] end @@ -61,60 +67,63 @@ describe API::Issues, api: true do context "when unauthenticated" do it "returns authentication error" do get api("/issues") + expect(response).to have_http_status(401) end end context "when authenticated" do + let(:first_issue) { json_response.first } + it "returns an array of issues" do get api("/issues", user) - expect(response).to have_http_status(200) - expect(response).to include_pagination_headers - expect(json_response).to be_an Array + expect_paginated_array_response(size: 2) expect(json_response.first['title']).to eq(issue.title) expect(json_response.last).to have_key('web_url') end it 'returns an array of closed issues' do - get api('/issues?state=closed', user) + get api('/issues', user), state: :closed - expect(response).to have_http_status(200) - expect(response).to include_pagination_headers - expect(json_response).to be_an Array - expect(json_response.length).to eq(1) - expect(json_response.first['id']).to eq(closed_issue.id) + expect_paginated_array_response(size: 1) + expect(first_issue['id']).to eq(closed_issue.id) end it 'returns an array of opened issues' do - get api('/issues?state=opened', user) + get api('/issues', user), state: :opened - expect(response).to have_http_status(200) - expect(response).to include_pagination_headers - expect(json_response).to be_an Array - expect(json_response.length).to eq(1) - expect(json_response.first['id']).to eq(issue.id) + expect_paginated_array_response(size: 1) + expect(first_issue['id']).to eq(issue.id) end it 'returns an array of all issues' do - get api('/issues?state=all', user) + get api('/issues', user), state: :all - expect(response).to have_http_status(200) - expect(response).to include_pagination_headers - expect(json_response).to be_an Array - expect(json_response.length).to eq(2) - expect(json_response.first['id']).to eq(issue.id) + expect_paginated_array_response(size: 2) + expect(first_issue['id']).to eq(issue.id) expect(json_response.second['id']).to eq(closed_issue.id) end + it 'returns issues matching given search string for title' do + get api("/issues", user), search: issue.title + + expect_paginated_array_response(size: 1) + expect(json_response.first['id']).to eq(issue.id) + end + + it 'returns issues matching given search string for description' do + get api("/issues", user), search: issue.description + + expect_paginated_array_response(size: 1) + expect(first_issue['id']).to eq(issue.id) + end + it 'returns an array of labeled issues' do - get api("/issues?labels=#{label.title}", user) + get api("/issues", user), labels: label.title - expect(response).to have_http_status(200) - expect(response).to include_pagination_headers - expect(json_response).to be_an Array - expect(json_response.length).to eq(1) - expect(json_response.first['labels']).to eq([label.title]) + expect_paginated_array_response(size: 1) + expect(first_issue['labels']).to eq([label.title]) end it 'returns an array of labeled issues when all labels matches' do @@ -126,29 +135,20 @@ describe API::Issues, api: true do get api("/issues", user), labels: "#{label.title},#{label_b.title},#{label_c.title}" - expect(response).to have_http_status(200) - expect(response).to include_pagination_headers - expect(json_response).to be_an Array - expect(json_response.length).to eq(1) + expect_paginated_array_response(size: 1) expect(json_response.first['labels']).to eq([label_c.title, label_b.title, label.title]) end it 'returns an empty array if no issue matches labels' do - get api('/issues?labels=foo,bar', user) + get api('/issues', user), labels: 'foo,bar' - expect(response).to have_http_status(200) - expect(response).to include_pagination_headers - expect(json_response).to be_an Array - expect(json_response.length).to eq(0) + expect_paginated_array_response(size: 0) end it 'returns an array of labeled issues matching given state' do - get api("/issues?labels=#{label.title}&state=opened", user) + get api("/issues", user), labels: label.title, state: :opened - expect(response).to have_http_status(200) - expect(response).to include_pagination_headers - expect(json_response).to be_an Array - expect(json_response.length).to eq(1) + expect_paginated_array_response(size: 1) expect(json_response.first['labels']).to eq([label.title]) expect(json_response.first['state']).to eq('opened') end @@ -156,47 +156,32 @@ describe API::Issues, api: true do it 'returns unlabeled issues for "No Label" label' do get api("/issues", user), labels: 'No Label' - expect(response).to have_http_status(200) - expect(response).to include_pagination_headers - expect(json_response).to be_an Array - expect(json_response.length).to eq(1) + expect_paginated_array_response(size: 1) expect(json_response.first['labels']).to be_empty end it 'returns an empty array if no issue matches labels and state filters' do get api("/issues?labels=#{label.title}&state=closed", user) - expect(response).to have_http_status(200) - expect(response).to include_pagination_headers - expect(json_response).to be_an Array - expect(json_response.length).to eq(0) + expect_paginated_array_response(size: 0) end it 'returns an empty array if no issue matches milestone' do get api("/issues?milestone=#{empty_milestone.title}", user) - expect(response).to have_http_status(200) - expect(response).to include_pagination_headers - expect(json_response).to be_an Array - expect(json_response.length).to eq(0) + expect_paginated_array_response(size: 0) end it 'returns an empty array if milestone does not exist' do get api("/issues?milestone=foo", user) - expect(response).to have_http_status(200) - expect(response).to include_pagination_headers - expect(json_response).to be_an Array - expect(json_response.length).to eq(0) + expect_paginated_array_response(size: 0) end it 'returns an array of issues in given milestone' do get api("/issues?milestone=#{milestone.title}", user) - expect(response).to have_http_status(200) - expect(response).to include_pagination_headers - expect(json_response).to be_an Array - expect(json_response.length).to eq(2) + expect_paginated_array_response(size: 2) expect(json_response.first['id']).to eq(issue.id) expect(json_response.second['id']).to eq(closed_issue.id) end @@ -205,49 +190,36 @@ describe API::Issues, api: true do get api("/issues?milestone=#{milestone.title}"\ '&state=closed', user) - expect(response).to have_http_status(200) - expect(response).to include_pagination_headers - expect(json_response).to be_an Array - expect(json_response.length).to eq(1) + expect_paginated_array_response(size: 1) expect(json_response.first['id']).to eq(closed_issue.id) end it 'returns an array of issues with no milestone' do get api("/issues?milestone=#{no_milestone_title}", author) - expect(response).to have_http_status(200) - expect(response).to include_pagination_headers - expect(json_response).to be_an Array - expect(json_response.length).to eq(1) + expect_paginated_array_response(size: 1) expect(json_response.first['id']).to eq(confidential_issue.id) end it 'returns an array of issues found by iids' do get api('/issues', user), iids: [closed_issue.iid] - expect(response).to have_http_status(200) - expect(response).to include_pagination_headers - expect(json_response).to be_an Array - expect(json_response.length).to eq(1) + expect_paginated_array_response(size: 1) expect(json_response.first['id']).to eq(closed_issue.id) end it 'returns an empty array if iid does not exist' do get api("/issues", user), iids: [99999] - expect(response).to have_http_status(200) - expect(response).to include_pagination_headers - expect(json_response).to be_an Array - expect(json_response.length).to eq(0) + expect_paginated_array_response(size: 0) end it 'sorts by created_at descending by default' do get api('/issues', user) response_dates = json_response.map { |issue| issue['created_at'] } - expect(response).to have_http_status(200) - expect(response).to include_pagination_headers - expect(json_response).to be_an Array + + expect_paginated_array_response(size: 2) expect(response_dates).to eq(response_dates.sort.reverse) end @@ -255,9 +227,8 @@ describe API::Issues, api: true do get api('/issues?sort=asc', user) response_dates = json_response.map { |issue| issue['created_at'] } - expect(response).to have_http_status(200) - expect(response).to include_pagination_headers - expect(json_response).to be_an Array + + expect_paginated_array_response(size: 2) expect(response_dates).to eq(response_dates.sort) end @@ -265,9 +236,8 @@ describe API::Issues, api: true do get api('/issues?order_by=updated_at', user) response_dates = json_response.map { |issue| issue['updated_at'] } - expect(response).to have_http_status(200) - expect(response).to include_pagination_headers - expect(json_response).to be_an Array + + expect_paginated_array_response(size: 2) expect(response_dates).to eq(response_dates.sort.reverse) end @@ -275,9 +245,8 @@ describe API::Issues, api: true do get api('/issues?order_by=updated_at&sort=asc', user) response_dates = json_response.map { |issue| issue['updated_at'] } - expect(response).to have_http_status(200) - expect(response).to include_pagination_headers - expect(json_response).to be_an Array + + expect_paginated_array_response(size: 2) expect(response_dates).to eq(response_dates.sort) end @@ -296,7 +265,7 @@ describe API::Issues, api: true do let!(:group_closed_issue) do create :closed_issue, author: user, - assignee: user, + assignees: [user], project: group_project, state: :closed, milestone: group_milestone, @@ -307,16 +276,18 @@ describe API::Issues, api: true do :confidential, project: group_project, author: author, - assignee: assignee, + assignees: [assignee], updated_at: 2.hours.ago end let!(:group_issue) do create :issue, author: user, - assignee: user, + assignees: [user], project: group_project, milestone: group_milestone, - updated_at: 1.hour.ago + updated_at: 1.hour.ago, + title: issue_title, + description: issue_description end let!(:group_label) do create(:label, title: 'group_lbl', color: '#FFAABB', project: group_project) @@ -336,74 +307,65 @@ describe API::Issues, api: true do it 'returns all group issues (including opened and closed)' do get api(base_url, admin) - expect(response).to have_http_status(200) - expect(json_response).to be_an Array - expect(json_response.length).to eq(3) + expect_paginated_array_response(size: 3) end it 'returns group issues without confidential issues for non project members' do get api("#{base_url}?state=opened", non_member) - expect(response).to have_http_status(200) - expect(response).to include_pagination_headers - expect(json_response).to be_an Array - expect(json_response.length).to eq(1) + expect_paginated_array_response(size: 1) expect(json_response.first['title']).to eq(group_issue.title) end it 'returns group confidential issues for author' do get api("#{base_url}?state=opened", author) - expect(response).to have_http_status(200) - expect(response).to include_pagination_headers - expect(json_response).to be_an Array - expect(json_response.length).to eq(2) + expect_paginated_array_response(size: 2) end it 'returns group confidential issues for assignee' do get api("#{base_url}?state=opened", assignee) - expect(response).to have_http_status(200) - expect(response).to include_pagination_headers - expect(json_response).to be_an Array - expect(json_response.length).to eq(2) + expect_paginated_array_response(size: 2) end it 'returns group issues with confidential issues for project members' do get api("#{base_url}?state=opened", user) - expect(response).to have_http_status(200) - expect(response).to include_pagination_headers - expect(json_response).to be_an Array - expect(json_response.length).to eq(2) + expect_paginated_array_response(size: 2) end it 'returns group confidential issues for admin' do get api("#{base_url}?state=opened", admin) - expect(response).to have_http_status(200) - expect(response).to include_pagination_headers - expect(json_response).to be_an Array - expect(json_response.length).to eq(2) + expect_paginated_array_response(size: 2) end it 'returns an array of labeled group issues' do get api("#{base_url}?labels=#{group_label.title}", user) - expect(response).to have_http_status(200) - expect(response).to include_pagination_headers - expect(json_response).to be_an Array - expect(json_response.length).to eq(1) + expect_paginated_array_response(size: 1) expect(json_response.first['labels']).to eq([group_label.title]) end it 'returns an array of labeled group issues where all labels match' do get api("#{base_url}?labels=#{group_label.title},foo,bar", user) - expect(response).to have_http_status(200) - expect(response).to include_pagination_headers - expect(json_response).to be_an Array - expect(json_response.length).to eq(0) + expect_paginated_array_response(size: 0) + end + + it 'returns issues matching given search string for title' do + get api("#{base_url}?search=#{group_issue.title}", user) + + expect_paginated_array_response(size: 1) + expect(json_response.first['id']).to eq(group_issue.id) + end + + it 'returns issues matching given search string for description' do + get api("#{base_url}?search=#{group_issue.description}", user) + + expect_paginated_array_response(size: 1) + expect(json_response.first['id']).to eq(group_issue.id) end it 'returns an array of labeled issues when all labels matches' do @@ -415,65 +377,45 @@ describe API::Issues, api: true do get api("#{base_url}", user), labels: "#{group_label.title},#{label_b.title},#{label_c.title}" - expect(response).to have_http_status(200) - expect(json_response).to be_an Array - expect(json_response.length).to eq(1) + expect_paginated_array_response(size: 1) expect(json_response.first['labels']).to eq([label_c.title, label_b.title, group_label.title]) end it 'returns an array of issues found by iids' do get api(base_url, user), iids: [group_issue.iid] - expect(response).to have_http_status(200) - expect(response).to include_pagination_headers - expect(json_response).to be_an Array - expect(json_response.length).to eq(1) + expect_paginated_array_response(size: 1) expect(json_response.first['id']).to eq(group_issue.id) end it 'returns an empty array if iid does not exist' do get api(base_url, user), iids: [99999] - expect(response).to have_http_status(200) - expect(response).to include_pagination_headers - expect(json_response).to be_an Array - expect(json_response.length).to eq(0) + expect_paginated_array_response(size: 0) end it 'returns an empty array if no group issue matches labels' do get api("#{base_url}?labels=foo,bar", user) - expect(response).to have_http_status(200) - expect(response).to include_pagination_headers - expect(json_response).to be_an Array - expect(json_response.length).to eq(0) + expect_paginated_array_response(size: 0) end it 'returns an empty array if no issue matches milestone' do get api("#{base_url}?milestone=#{group_empty_milestone.title}", user) - expect(response).to have_http_status(200) - expect(response).to include_pagination_headers - expect(json_response).to be_an Array - expect(json_response.length).to eq(0) + expect_paginated_array_response(size: 0) end it 'returns an empty array if milestone does not exist' do get api("#{base_url}?milestone=foo", user) - expect(response).to have_http_status(200) - expect(response).to include_pagination_headers - expect(json_response).to be_an Array - expect(json_response.length).to eq(0) + expect_paginated_array_response(size: 0) end it 'returns an array of issues in given milestone' do get api("#{base_url}?state=opened&milestone=#{group_milestone.title}", user) - expect(response).to have_http_status(200) - expect(response).to include_pagination_headers - expect(json_response).to be_an Array - expect(json_response.length).to eq(1) + expect_paginated_array_response(size: 1) expect(json_response.first['id']).to eq(group_issue.id) end @@ -481,10 +423,7 @@ describe API::Issues, api: true do get api("#{base_url}?milestone=#{group_milestone.title}"\ '&state=closed', user) - expect(response).to have_http_status(200) - expect(response).to include_pagination_headers - expect(json_response).to be_an Array - expect(json_response.length).to eq(1) + expect_paginated_array_response(size: 1) expect(json_response.first['id']).to eq(group_closed_issue.id) end @@ -492,9 +431,8 @@ describe API::Issues, api: true do get api("#{base_url}?milestone=#{no_milestone_title}", user) expect(response).to have_http_status(200) - expect(response).to include_pagination_headers - expect(json_response).to be_an Array - expect(json_response.length).to eq(1) + + expect_paginated_array_response(size: 1) expect(json_response.first['id']).to eq(group_confidential_issue.id) end @@ -502,9 +440,8 @@ describe API::Issues, api: true do get api(base_url, user) response_dates = json_response.map { |issue| issue['created_at'] } - expect(response).to have_http_status(200) - expect(response).to include_pagination_headers - expect(json_response).to be_an Array + + expect_paginated_array_response(size: 3) expect(response_dates).to eq(response_dates.sort.reverse) end @@ -512,9 +449,8 @@ describe API::Issues, api: true do get api("#{base_url}?sort=asc", user) response_dates = json_response.map { |issue| issue['created_at'] } - expect(response).to have_http_status(200) - expect(response).to include_pagination_headers - expect(json_response).to be_an Array + + expect_paginated_array_response(size: 3) expect(response_dates).to eq(response_dates.sort) end @@ -522,9 +458,8 @@ describe API::Issues, api: true do get api("#{base_url}?order_by=updated_at", user) response_dates = json_response.map { |issue| issue['updated_at'] } - expect(response).to have_http_status(200) - expect(response).to include_pagination_headers - expect(json_response).to be_an Array + + expect_paginated_array_response(size: 3) expect(response_dates).to eq(response_dates.sort.reverse) end @@ -532,9 +467,8 @@ describe API::Issues, api: true do get api("#{base_url}?order_by=updated_at&sort=asc", user) response_dates = json_response.map { |issue| issue['updated_at'] } - expect(response).to have_http_status(200) - expect(response).to include_pagination_headers - expect(json_response).to be_an Array + + expect_paginated_array_response(size: 3) expect(response_dates).to eq(response_dates.sort) end end @@ -563,79 +497,55 @@ describe API::Issues, api: true do get api("/projects/#{restricted_project.id}/issues", non_member) - expect(response).to have_http_status(200) - expect(response).to include_pagination_headers - expect(json_response).to be_an Array - expect(json_response).to eq([]) + expect_paginated_array_response(size: 0) end it 'returns project issues without confidential issues for non project members' do get api("#{base_url}/issues", non_member) - expect(response).to have_http_status(200) - expect(response).to include_pagination_headers - expect(json_response).to be_an Array - expect(json_response.length).to eq(2) + expect_paginated_array_response(size: 2) expect(json_response.first['title']).to eq(issue.title) end it 'returns project issues without confidential issues for project members with guest role' do get api("#{base_url}/issues", guest) - expect(response).to have_http_status(200) - expect(response).to include_pagination_headers - expect(json_response).to be_an Array - expect(json_response.length).to eq(2) + expect_paginated_array_response(size: 2) expect(json_response.first['title']).to eq(issue.title) end it 'returns project confidential issues for author' do get api("#{base_url}/issues", author) - expect(response).to have_http_status(200) - expect(response).to include_pagination_headers - expect(json_response).to be_an Array - expect(json_response.length).to eq(3) + expect_paginated_array_response(size: 3) expect(json_response.first['title']).to eq(issue.title) end it 'returns project confidential issues for assignee' do get api("#{base_url}/issues", assignee) - expect(response).to have_http_status(200) - expect(response).to include_pagination_headers - expect(json_response).to be_an Array - expect(json_response.length).to eq(3) + expect_paginated_array_response(size: 3) expect(json_response.first['title']).to eq(issue.title) end it 'returns project issues with confidential issues for project members' do get api("#{base_url}/issues", user) - expect(response).to have_http_status(200) - expect(response).to include_pagination_headers - expect(json_response).to be_an Array - expect(json_response.length).to eq(3) + expect_paginated_array_response(size: 3) expect(json_response.first['title']).to eq(issue.title) end it 'returns project confidential issues for admin' do get api("#{base_url}/issues", admin) - expect(response).to have_http_status(200) - expect(response).to include_pagination_headers - expect(json_response).to be_an Array - expect(json_response.length).to eq(3) + expect_paginated_array_response(size: 3) expect(json_response.first['title']).to eq(issue.title) end it 'returns an array of labeled project issues' do get api("#{base_url}/issues?labels=#{label.title}", user) - expect(response).to have_http_status(200) - expect(response).to include_pagination_headers - expect(json_response).to be_an Array - expect(json_response.length).to eq(1) + expect_paginated_array_response(size: 1) expect(json_response.first['labels']).to eq([label.title]) end @@ -648,74 +558,65 @@ describe API::Issues, api: true do get api("#{base_url}/issues", user), labels: "#{label.title},#{label_b.title},#{label_c.title}" - expect(response).to have_http_status(200) - expect(response).to include_pagination_headers - expect(json_response).to be_an Array - expect(json_response.length).to eq(1) + expect_paginated_array_response(size: 1) expect(json_response.first['labels']).to eq([label_c.title, label_b.title, label.title]) end + it 'returns issues matching given search string for title' do + get api("#{base_url}/issues?search=#{issue.title}", user) + + expect_paginated_array_response(size: 1) + expect(json_response.first['id']).to eq(issue.id) + end + + it 'returns issues matching given search string for description' do + get api("#{base_url}/issues?search=#{issue.description}", user) + + expect_paginated_array_response(size: 1) + expect(json_response.first['id']).to eq(issue.id) + end + it 'returns an array of issues found by iids' do get api("#{base_url}/issues", user), iids: [issue.iid] - expect(response).to have_http_status(200) - expect(response).to include_pagination_headers - expect(json_response).to be_an Array - expect(json_response.length).to eq(1) + expect_paginated_array_response(size: 1) expect(json_response.first['id']).to eq(issue.id) end it 'returns an empty array if iid does not exist' do get api("#{base_url}/issues", user), iids: [99999] - expect(response).to have_http_status(200) - expect(response).to include_pagination_headers - expect(json_response).to be_an Array - expect(json_response.length).to eq(0) + expect_paginated_array_response(size: 0) end it 'returns an empty array if not all labels matches' do get api("#{base_url}/issues?labels=#{label.title},foo", user) - expect(response).to have_http_status(200) - expect(json_response).to be_an Array - expect(json_response.length).to eq(0) + expect_paginated_array_response(size: 0) end it 'returns an empty array if no project issue matches labels' do get api("#{base_url}/issues?labels=foo,bar", user) - expect(response).to have_http_status(200) - expect(response).to include_pagination_headers - expect(json_response).to be_an Array - expect(json_response.length).to eq(0) + expect_paginated_array_response(size: 0) end it 'returns an empty array if no issue matches milestone' do get api("#{base_url}/issues?milestone=#{empty_milestone.title}", user) - expect(response).to have_http_status(200) - expect(response).to include_pagination_headers - expect(json_response).to be_an Array - expect(json_response.length).to eq(0) + expect_paginated_array_response(size: 0) end it 'returns an empty array if milestone does not exist' do get api("#{base_url}/issues?milestone=foo", user) - expect(response).to have_http_status(200) - expect(response).to include_pagination_headers - expect(json_response).to be_an Array - expect(json_response.length).to eq(0) + expect_paginated_array_response(size: 0) end it 'returns an array of issues in given milestone' do get api("#{base_url}/issues?milestone=#{milestone.title}", user) - expect(response).to have_http_status(200) - expect(response).to include_pagination_headers - expect(json_response).to be_an Array - expect(json_response.length).to eq(2) + expect_paginated_array_response(size: 2) expect(json_response.first['id']).to eq(issue.id) expect(json_response.second['id']).to eq(closed_issue.id) end @@ -723,20 +624,14 @@ describe API::Issues, api: true do it 'returns an array of issues matching state in milestone' do get api("#{base_url}/issues?milestone=#{milestone.title}&state=closed", user) - expect(response).to have_http_status(200) - expect(response).to include_pagination_headers - expect(json_response).to be_an Array - expect(json_response.length).to eq(1) + expect_paginated_array_response(size: 1) expect(json_response.first['id']).to eq(closed_issue.id) end it 'returns an array of issues with no milestone' do get api("#{base_url}/issues?milestone=#{no_milestone_title}", user) - expect(response).to have_http_status(200) - expect(response).to include_pagination_headers - expect(json_response).to be_an Array - expect(json_response.length).to eq(1) + expect_paginated_array_response(size: 1) expect(json_response.first['id']).to eq(confidential_issue.id) end @@ -744,9 +639,8 @@ describe API::Issues, api: true do get api("#{base_url}/issues", user) response_dates = json_response.map { |issue| issue['created_at'] } - expect(response).to have_http_status(200) - expect(response).to include_pagination_headers - expect(json_response).to be_an Array + + expect_paginated_array_response(size: 3) expect(response_dates).to eq(response_dates.sort.reverse) end @@ -754,9 +648,8 @@ describe API::Issues, api: true do get api("#{base_url}/issues?sort=asc", user) response_dates = json_response.map { |issue| issue['created_at'] } - expect(response).to have_http_status(200) - expect(response).to include_pagination_headers - expect(json_response).to be_an Array + + expect_paginated_array_response(size: 3) expect(response_dates).to eq(response_dates.sort) end @@ -764,9 +657,8 @@ describe API::Issues, api: true do get api("#{base_url}/issues?order_by=updated_at", user) response_dates = json_response.map { |issue| issue['updated_at'] } - expect(response).to have_http_status(200) - expect(response).to include_pagination_headers - expect(json_response).to be_an Array + + expect_paginated_array_response(size: 3) expect(response_dates).to eq(response_dates.sort.reverse) end @@ -774,9 +666,8 @@ describe API::Issues, api: true do get api("#{base_url}/issues?order_by=updated_at&sort=asc", user) response_dates = json_response.map { |issue| issue['updated_at'] } - expect(response).to have_http_status(200) - expect(response).to include_pagination_headers - expect(json_response).to be_an Array + + expect_paginated_array_response(size: 3) expect(response_dates).to eq(response_dates.sort) end end @@ -796,6 +687,7 @@ describe API::Issues, api: true do expect(json_response['updated_at']).to be_present expect(json_response['labels']).to eq(issue.label_names) expect(json_response['milestone']).to be_a Hash + expect(json_response['assignees']).to be_a Array expect(json_response['assignee']).to be_a Hash expect(json_response['author']).to be_a Hash expect(json_response['confidential']).to be_falsy @@ -868,15 +760,41 @@ describe API::Issues, api: true do end describe "POST /projects/:id/issues" do + context 'support for deprecated assignee_id' do + it 'creates a new project issue' do + post api("/projects/#{project.id}/issues", user), + title: 'new issue', assignee_id: user2.id + + expect(response).to have_http_status(201) + expect(json_response['title']).to eq('new issue') + expect(json_response['assignee']['name']).to eq(user2.name) + expect(json_response['assignees'].first['name']).to eq(user2.name) + end + end + + context 'CE restrictions' do + it 'creates a new project issue with no more than one assignee' do + post api("/projects/#{project.id}/issues", user), + title: 'new issue', assignee_ids: [user2.id, guest.id] + + expect(response).to have_http_status(201) + expect(json_response['title']).to eq('new issue') + expect(json_response['assignees'].count).to eq(1) + end + end + it 'creates a new project issue' do post api("/projects/#{project.id}/issues", user), - title: 'new issue', labels: 'label, label2' + title: 'new issue', labels: 'label, label2', weight: 3, + assignee_ids: [user2.id] expect(response).to have_http_status(201) expect(json_response['title']).to eq('new issue') expect(json_response['description']).to be_nil expect(json_response['labels']).to eq(%w(label label2)) expect(json_response['confidential']).to be_falsy + expect(json_response['assignee']['name']).to eq(user2.name) + expect(json_response['assignees'].first['name']).to eq(user2.name) end it 'creates a new confidential project issue' do @@ -953,7 +871,7 @@ describe API::Issues, api: true do end context 'resolving discussions' do - let(:discussion) { Discussion.for_diff_notes([create(:diff_note_on_merge_request)]).first } + let(:discussion) { create(:diff_note_on_merge_request).to_discussion } let(:merge_request) { discussion.noteable } let(:project) { merge_request.source_project } @@ -1166,6 +1084,57 @@ describe API::Issues, api: true do end end + describe 'PUT /projects/:id/issues/:issue_iid to update assignee' do + context 'support for deprecated assignee_id' do + it 'removes assignee' do + put api("/projects/#{project.id}/issues/#{issue.iid}", user), + assignee_id: 0 + + expect(response).to have_http_status(200) + + expect(json_response['assignee']).to be_nil + end + + it 'updates an issue with new assignee' do + put api("/projects/#{project.id}/issues/#{issue.iid}", user), + assignee_id: user2.id + + expect(response).to have_http_status(200) + + expect(json_response['assignee']['name']).to eq(user2.name) + end + end + + it 'removes assignee' do + put api("/projects/#{project.id}/issues/#{issue.iid}", user), + assignee_ids: [0] + + expect(response).to have_http_status(200) + + expect(json_response['assignees']).to be_empty + end + + it 'updates an issue with new assignee' do + put api("/projects/#{project.id}/issues/#{issue.iid}", user), + assignee_ids: [user2.id] + + expect(response).to have_http_status(200) + + expect(json_response['assignees'].first['name']).to eq(user2.name) + end + + context 'CE restrictions' do + it 'updates an issue with several assignees but only one has been applied' do + put api("/projects/#{project.id}/issues/#{issue.iid}", user), + assignee_ids: [user2.id, guest.id] + + expect(response).to have_http_status(200) + + expect(json_response['assignees'].size).to eq(1) + end + end + end + describe 'PUT /projects/:id/issues/:issue_iid to update labels' do let!(:label) { create(:label, title: 'dummy', project: project) } let!(:label_link) { create(:label_link, label: label, target: issue) } @@ -1457,4 +1426,46 @@ describe API::Issues, api: true do include_examples 'time tracking endpoints', 'issue' end + + describe 'GET :id/issues/:issue_iid/closed_by' do + let(:merge_request) do + create(:merge_request, + :simple, + author: user, + source_project: project, + target_project: project, + description: "closes #{issue.to_reference}") + end + + before do + create(:merge_requests_closing_issues, issue: issue, merge_request: merge_request) + end + + it 'returns merge requests that will close issue on merge' do + get api("/projects/#{project.id}/issues/#{issue.iid}/closed_by", user) + + expect_paginated_array_response(size: 1) + end + + context 'when no merge requests will close issue' do + it 'returns empty array' do + get api("/projects/#{project.id}/issues/#{closed_issue.iid}/closed_by", user) + + expect_paginated_array_response(size: 0) + end + end + + it "returns 404 when issue doesn't exists" do + get api("/projects/#{project.id}/issues/9999/closed_by", user) + + expect(response).to have_http_status(404) + end + end + + def expect_paginated_array_response(size: nil) + expect(response).to have_http_status(200) + expect(response).to include_pagination_headers + expect(json_response).to be_an Array + expect(json_response.length).to eq(size) if size + end end diff --git a/spec/requests/api/jobs_spec.rb b/spec/requests/api/jobs_spec.rb index 9450701064b..e5e5872dc1f 100644 --- a/spec/requests/api/jobs_spec.rb +++ b/spec/requests/api/jobs_spec.rb @@ -1,16 +1,26 @@ require 'spec_helper' -describe API::Jobs, api: true do - include ApiHelpers +describe API::Jobs, :api do + let!(:project) do + create(:project, :repository, public_builds: false) + end + + let!(:pipeline) do + create(:ci_empty_pipeline, project: project, + sha: project.commit.id, + ref: project.default_branch) + end + + let!(:build) { create(:ci_build, pipeline: pipeline) } let(:user) { create(:user) } let(:api_user) { user } - let!(:project) { create(:project, :repository, creator: user, public_builds: false) } - let!(:developer) { create(:project_member, :developer, user: user, project: project) } - let(:reporter) { create(:project_member, :reporter, project: project) } - let(:guest) { create(:project_member, :guest, project: project) } - let!(:pipeline) { create(:ci_empty_pipeline, project: project, sha: project.commit.id, ref: project.default_branch) } - let!(:build) { create(:ci_build, pipeline: pipeline) } + let(:reporter) { create(:project_member, :reporter, project: project).user } + let(:guest) { create(:project_member, :guest, project: project).user } + + before do + project.add_developer(user) + end describe 'GET /projects/:id/jobs' do let(:query) { Hash.new } @@ -213,7 +223,7 @@ describe API::Jobs, api: true do end describe 'GET /projects/:id/artifacts/:ref_name/download?job=name' do - let(:api_user) { reporter.user } + let(:api_user) { reporter } let(:build) { create(:ci_build, :artifacts, pipeline: pipeline) } before do @@ -237,7 +247,7 @@ describe API::Jobs, api: true do end context 'when logging as guest' do - let(:api_user) { guest.user } + let(:api_user) { guest } before do get_for_ref @@ -320,7 +330,7 @@ describe API::Jobs, api: true do context 'authorized user' do it 'returns specific job trace' do expect(response).to have_http_status(200) - expect(response.body).to eq(build.trace) + expect(response.body).to eq(build.trace.raw) end end @@ -347,7 +357,7 @@ describe API::Jobs, api: true do end context 'user without :update_build permission' do - let(:api_user) { reporter.user } + let(:api_user) { reporter } it 'does not cancel job' do expect(response).to have_http_status(403) @@ -381,7 +391,7 @@ describe API::Jobs, api: true do end context 'user without :update_build permission' do - let(:api_user) { reporter.user } + let(:api_user) { reporter } it 'does not retry job' do expect(response).to have_http_status(403) @@ -408,7 +418,7 @@ describe API::Jobs, api: true do it 'erases job content' do expect(response).to have_http_status(201) - expect(build.trace).to be_empty + expect(build).not_to have_trace expect(build.artifacts_file.exists?).to be_falsy expect(build.artifacts_metadata.exists?).to be_falsy end @@ -457,16 +467,39 @@ describe API::Jobs, api: true do describe 'POST /projects/:id/jobs/:job_id/play' do before do - post api("/projects/#{project.id}/jobs/#{build.id}/play", user) + post api("/projects/#{project.id}/jobs/#{build.id}/play", api_user) end context 'on an playable job' do let(:build) { create(:ci_build, :manual, project: project, pipeline: pipeline) } - it 'plays the job' do - expect(response).to have_http_status(200) - expect(json_response['user']['id']).to eq(user.id) - expect(json_response['id']).to eq(build.id) + context 'when user is authorized to trigger a manual action' do + it 'plays the job' do + expect(response).to have_http_status(200) + expect(json_response['user']['id']).to eq(user.id) + expect(json_response['id']).to eq(build.id) + expect(build.reload).to be_pending + end + end + + context 'when user is not authorized to trigger a manual action' do + context 'when user does not have access to the project' do + let(:api_user) { create(:user) } + + it 'does not trigger a manual action' do + expect(build.reload).to be_manual + expect(response).to have_http_status(404) + end + end + + context 'when user is not allowed to trigger the manual action' do + let(:api_user) { reporter } + + it 'does not trigger a manual action' do + expect(build.reload).to be_manual + expect(response).to have_http_status(403) + end + end end end diff --git a/spec/requests/api/keys_spec.rb b/spec/requests/api/keys_spec.rb index 4c80987d680..ab957c72984 100644 --- a/spec/requests/api/keys_spec.rb +++ b/spec/requests/api/keys_spec.rb @@ -1,8 +1,6 @@ require 'spec_helper' -describe API::Keys, api: true do - include ApiHelpers - +describe API::Keys do let(:user) { create(:user) } let(:admin) { create(:admin) } let(:key) { create(:key, user: user) } @@ -34,6 +32,12 @@ describe API::Keys, api: true do expect(json_response['user']['id']).to eq(user.id) expect(json_response['user']['username']).to eq(user.username) end + + it "does not include the user's `is_admin` flag" do + get api("/keys/#{key.id}", admin) + + expect(json_response['user']['is_admin']).to be_nil + end end end end diff --git a/spec/requests/api/labels_spec.rb b/spec/requests/api/labels_spec.rb index a1adaba7b98..0c6b55c1630 100644 --- a/spec/requests/api/labels_spec.rb +++ b/spec/requests/api/labels_spec.rb @@ -1,8 +1,6 @@ require 'spec_helper' -describe API::Labels, api: true do - include ApiHelpers - +describe API::Labels do let(:user) { create(:user) } let(:project) { create(:empty_project, creator_id: user.id, namespace: user.namespace) } let!(:label1) { create(:label, title: 'label1', project: project) } diff --git a/spec/requests/api/lint_spec.rb b/spec/requests/api/lint_spec.rb index 391fc13a380..df7c91b5bc1 100644 --- a/spec/requests/api/lint_spec.rb +++ b/spec/requests/api/lint_spec.rb @@ -1,8 +1,6 @@ require 'spec_helper' -describe API::Lint, api: true do - include ApiHelpers - +describe API::Lint do describe 'POST /ci/lint' do context 'with valid .gitlab-ci.yaml content' do let(:yaml_content) do diff --git a/spec/requests/api/members_spec.rb b/spec/requests/api/members_spec.rb index 2d37d026a39..e095053fa03 100644 --- a/spec/requests/api/members_spec.rb +++ b/spec/requests/api/members_spec.rb @@ -1,9 +1,7 @@ require 'spec_helper' -describe API::Members, api: true do - include ApiHelpers - - let(:master) { create(:user) } +describe API::Members do + let(:master) { create(:user, username: 'master_user') } let(:developer) { create(:user) } let(:access_requester) { create(:user) } let(:stranger) { create(:user) } diff --git a/spec/requests/api/merge_request_diffs_spec.rb b/spec/requests/api/merge_request_diffs_spec.rb index 79f3151ba52..d1b22179888 100644 --- a/spec/requests/api/merge_request_diffs_spec.rb +++ b/spec/requests/api/merge_request_diffs_spec.rb @@ -1,8 +1,6 @@ require "spec_helper" -describe API::MergeRequestDiffs, 'MergeRequestDiffs', api: true do - include ApiHelpers - +describe API::MergeRequestDiffs, 'MergeRequestDiffs' do let!(:user) { create(:user) } let!(:merge_request) { create(:merge_request, importing: true) } let!(:project) { merge_request.target_project } diff --git a/spec/requests/api/merge_requests_spec.rb b/spec/requests/api/merge_requests_spec.rb index 61d965e8974..16e5efb2f5b 100644 --- a/spec/requests/api/merge_requests_spec.rb +++ b/spec/requests/api/merge_requests_spec.rb @@ -1,16 +1,22 @@ require "spec_helper" -describe API::MergeRequests, api: true do - include ApiHelpers +describe API::MergeRequests do let(:base_time) { Time.now } let(:user) { create(:user) } let(:admin) { create(:user, :admin) } let(:non_member) { create(:user) } - let!(:project) { create(:project, :public, :repository, creator: user, namespace: user.namespace) } - let!(:merge_request) { create(:merge_request, :simple, author: user, assignee: user, source_project: project, title: "Test", created_at: base_time) } - let!(:merge_request_closed) { create(:merge_request, state: "closed", author: user, assignee: user, source_project: project, title: "Closed test", created_at: base_time + 1.second) } - let!(:merge_request_merged) { create(:merge_request, state: "merged", author: user, assignee: user, source_project: project, title: "Merged test", created_at: base_time + 2.seconds, merge_commit_sha: '9999999999999999999999999999999999999999') } + let!(:project) { create(:project, :public, :repository, creator: user, namespace: user.namespace, only_allow_merge_if_pipeline_succeeds: false) } let(:milestone) { create(:milestone, title: '1.0.0', project: project) } + let(:milestone1) { create(:milestone, title: '0.9', project: project) } + let!(:merge_request) { create(:merge_request, :simple, milestone: milestone1, author: user, assignee: user, source_project: project, target_project: project, title: "Test", created_at: base_time) } + let!(:merge_request_closed) { create(:merge_request, state: "closed", milestone: milestone1, author: user, assignee: user, source_project: project, target_project: project, title: "Closed test", created_at: base_time + 1.second) } + let!(:merge_request_merged) { create(:merge_request, state: "merged", author: user, assignee: user, source_project: project, target_project: project, title: "Merged test", created_at: base_time + 2.seconds, merge_commit_sha: '9999999999999999999999999999999999999999') } + let!(:note) { create(:note_on_merge_request, author: user, project: project, noteable: merge_request, note: "a comment on a MR") } + let!(:note2) { create(:note_on_merge_request, author: user, project: project, noteable: merge_request, note: "another comment on a MR") } + let!(:label) do + create(:label, title: 'label', color: '#FFAABB', project: project) + end + let!(:label_link) { create(:label_link, label: label, target: merge_request) } before do project.team << [user, :reporter] @@ -20,6 +26,7 @@ describe API::MergeRequests, api: true do context "when unauthenticated" do it "returns authentication error" do get api("/projects/#{project.id}/merge_requests") + expect(response).to have_http_status(401) end end @@ -100,6 +107,63 @@ describe API::MergeRequests, api: true do expect(response).to match_response_schema('public_api/v4/merge_requests') end + it 'returns an empty array if no issue matches milestone' do + get api("/projects/#{project.id}/merge_requests", user), milestone: '1.0.0' + + expect(response).to have_http_status(200) + expect(json_response).to be_an Array + expect(json_response.length).to eq(0) + end + + it 'returns an empty array if milestone does not exist' do + get api("/projects/#{project.id}/merge_requests", user), milestone: 'foo' + + expect(response).to have_http_status(200) + expect(json_response).to be_an Array + expect(json_response.length).to eq(0) + end + + it 'returns an array of merge requests in given milestone' do + get api("/projects/#{project.id}/merge_requests", user), milestone: '0.9' + + expect(json_response.first['title']).to eq merge_request_closed.title + expect(json_response.first['id']).to eq merge_request_closed.id + end + + it 'returns an array of merge requests matching state in milestone' do + get api("/projects/#{project.id}/merge_requests", user), milestone: '0.9', state: 'closed' + + expect(response).to have_http_status(200) + expect(json_response).to be_an Array + expect(json_response.length).to eq(1) + expect(json_response.first['id']).to eq(merge_request_closed.id) + end + + it 'returns an array of labeled merge requests' do + get api("/projects/#{project.id}/merge_requests?labels=#{label.title}", user) + + expect(response).to have_http_status(200) + expect(json_response).to be_an Array + expect(json_response.length).to eq(1) + expect(json_response.first['labels']).to eq([label.title]) + end + + it 'returns an array of labeled merge requests where all labels match' do + get api("/projects/#{project.id}/merge_requests?labels=#{label.title},foo,bar", user) + + expect(response).to have_http_status(200) + expect(json_response).to be_an Array + expect(json_response.length).to eq(0) + end + + it 'returns an empty array if no merge request matches labels' do + get api("/projects/#{project.id}/merge_requests?labels=foo,bar", user) + + expect(response).to have_http_status(200) + expect(json_response).to be_an Array + expect(json_response.length).to eq(0) + end + context "with ordering" do before do @mr_later = mr_with_later_created_and_updated_at_time @@ -167,7 +231,7 @@ describe API::MergeRequests, api: true do expect(json_response['created_at']).to be_present expect(json_response['updated_at']).to be_present expect(json_response['labels']).to eq(merge_request.label_names) - expect(json_response['milestone']).to be_nil + expect(json_response['milestone']).to be_a Hash expect(json_response['assignee']).to be_a Hash expect(json_response['author']).to be_a Hash expect(json_response['target_branch']).to eq(merge_request.target_branch) @@ -370,6 +434,19 @@ describe API::MergeRequests, api: true do expect(json_response['title']).to eq('Test merge_request') end + it 'returns 422 when target project has disabled merge requests' do + project.project_feature.update(merge_requests_access_level: 0) + + post api("/projects/#{fork_project.id}/merge_requests", user2), + title: 'Test', + target_branch: 'master', + source_branch: 'markdown', + author: user2, + target_project_id: project.id + + expect(response).to have_http_status(422) + end + it "returns 400 when source_branch is missing" do post api("/projects/#{fork_project.id}/merge_requests", user2), title: 'Test merge_request', target_branch: "master", author: user2, target_project_id: project.id @@ -527,6 +604,18 @@ describe API::MergeRequests, api: true do expect(json_response['merge_when_pipeline_succeeds']).to eq(true) end + it "enables merge when pipeline succeeds if the pipeline is active and only_allow_merge_if_pipeline_succeeds is true" do + allow_any_instance_of(MergeRequest).to receive(:head_pipeline).and_return(pipeline) + allow(pipeline).to receive(:active?).and_return(true) + project.update_attribute(:only_allow_merge_if_pipeline_succeeds, true) + + put api("/projects/#{project.id}/merge_requests/#{merge_request.iid}/merge", user), merge_when_pipeline_succeeds: true + + expect(response).to have_http_status(200) + expect(json_response['title']).to eq('Test') + expect(json_response['merge_when_pipeline_succeeds']).to eq(true) + end + it "returns 404 for an invalid merge request IID" do put api("/projects/#{project.id}/merge_requests/12345/merge", user) diff --git a/spec/requests/api/milestones_spec.rb b/spec/requests/api/milestones_spec.rb index 7fb728fed6f..dd74351a2b1 100644 --- a/spec/requests/api/milestones_spec.rb +++ b/spec/requests/api/milestones_spec.rb @@ -1,7 +1,6 @@ require 'spec_helper' -describe API::Milestones, api: true do - include ApiHelpers +describe API::Milestones do let(:user) { create(:user) } let!(:project) { create(:empty_project, namespace: user.namespace ) } let!(:closed_milestone) { create(:closed_milestone, project: project, title: 'version1', description: 'closed milestone') } @@ -306,6 +305,8 @@ describe API::Milestones, api: true do end it 'returns project merge_requests for a particular milestone' do + # eager-load another_merge_request + another_merge_request get api("/projects/#{project.id}/milestones/#{milestone.id}/merge_requests", user) expect(response).to have_http_status(200) diff --git a/spec/requests/api/namespaces_spec.rb b/spec/requests/api/namespaces_spec.rb index da8fa06d0af..3bf16a3ae27 100644 --- a/spec/requests/api/namespaces_spec.rb +++ b/spec/requests/api/namespaces_spec.rb @@ -1,7 +1,6 @@ require 'spec_helper' -describe API::Namespaces, api: true do - include ApiHelpers +describe API::Namespaces do let(:admin) { create(:admin) } let(:user) { create(:user) } let!(:group1) { create(:group) } diff --git a/spec/requests/api/notes_spec.rb b/spec/requests/api/notes_spec.rb index d8eb8ce921e..6afcd237c3c 100644 --- a/spec/requests/api/notes_spec.rb +++ b/spec/requests/api/notes_spec.rb @@ -1,7 +1,6 @@ require 'spec_helper' -describe API::Notes, api: true do - include ApiHelpers +describe API::Notes do let(:user) { create(:user) } let!(:project) { create(:empty_project, :public, namespace: user.namespace) } let!(:issue) { create(:issue, project: project, author: user) } diff --git a/spec/requests/api/notification_settings_spec.rb b/spec/requests/api/notification_settings_spec.rb index 39d3afcb78f..f619b7e6eaf 100644 --- a/spec/requests/api/notification_settings_spec.rb +++ b/spec/requests/api/notification_settings_spec.rb @@ -1,8 +1,6 @@ require 'spec_helper' -describe API::NotificationSettings, api: true do - include ApiHelpers - +describe API::NotificationSettings do let(:user) { create(:user) } let!(:group) { create(:group) } let!(:project) { create(:empty_project, :public, creator_id: user.id, namespace: group) } diff --git a/spec/requests/api/oauth_tokens_spec.rb b/spec/requests/api/oauth_tokens_spec.rb index 367225df717..0d56e1f732e 100644 --- a/spec/requests/api/oauth_tokens_spec.rb +++ b/spec/requests/api/oauth_tokens_spec.rb @@ -1,8 +1,6 @@ require 'spec_helper' -describe API::API, api: true do - include ApiHelpers - +describe 'OAuth tokens' do context 'Resource Owner Password Credentials' do def request_oauth_token(user) post '/oauth/token', username: user.username, password: user.password, grant_type: 'password' diff --git a/spec/requests/api/pipelines_spec.rb b/spec/requests/api/pipelines_spec.rb index 51af999b455..f9e5316b3de 100644 --- a/spec/requests/api/pipelines_spec.rb +++ b/spec/requests/api/pipelines_spec.rb @@ -1,8 +1,6 @@ require 'spec_helper' -describe API::Pipelines, api: true do - include ApiHelpers - +describe API::Pipelines do let(:user) { create(:user) } let(:non_member) { create(:user) } let(:project) { create(:project, :repository, creator: user) } @@ -26,6 +24,245 @@ describe API::Pipelines, api: true do expect(json_response.first['id']).to eq pipeline.id expect(json_response.first.keys).to contain_exactly(*%w[id sha ref status]) end + + context 'when parameter is passed' do + %w[running pending].each do |target| + context "when scope is #{target}" do + before do + create(:ci_pipeline, project: project, status: target) + end + + it 'returns matched pipelines' do + get api("/projects/#{project.id}/pipelines", user), scope: target + + expect(response).to have_http_status(:ok) + expect(response).to include_pagination_headers + expect(json_response).not_to be_empty + json_response.each { |r| expect(r['status']).to eq(target) } + end + end + end + + context 'when scope is finished' do + before do + create(:ci_pipeline, project: project, status: 'success') + create(:ci_pipeline, project: project, status: 'failed') + create(:ci_pipeline, project: project, status: 'canceled') + end + + it 'returns matched pipelines' do + get api("/projects/#{project.id}/pipelines", user), scope: 'finished' + + expect(response).to have_http_status(:ok) + expect(response).to include_pagination_headers + expect(json_response).not_to be_empty + json_response.each { |r| expect(r['status']).to be_in(%w[success failed canceled]) } + end + end + + context 'when scope is branches or tags' do + let!(:pipeline_branch) { create(:ci_pipeline, project: project) } + let!(:pipeline_tag) { create(:ci_pipeline, project: project, ref: 'v1.0.0', tag: true) } + + context 'when scope is branches' do + it 'returns matched pipelines' do + get api("/projects/#{project.id}/pipelines", user), scope: 'branches' + + expect(response).to have_http_status(:ok) + expect(response).to include_pagination_headers + expect(json_response).not_to be_empty + expect(json_response.last['id']).to eq(pipeline_branch.id) + end + end + + context 'when scope is tags' do + it 'returns matched pipelines' do + get api("/projects/#{project.id}/pipelines", user), scope: 'tags' + + expect(response).to have_http_status(:ok) + expect(response).to include_pagination_headers + expect(json_response).not_to be_empty + expect(json_response.last['id']).to eq(pipeline_tag.id) + end + end + end + + context 'when scope is invalid' do + it 'returns bad_request' do + get api("/projects/#{project.id}/pipelines", user), scope: 'invalid-scope' + + expect(response).to have_http_status(:bad_request) + end + end + + HasStatus::AVAILABLE_STATUSES.each do |target| + context "when status is #{target}" do + before do + create(:ci_pipeline, project: project, status: target) + exception_status = HasStatus::AVAILABLE_STATUSES - [target] + create(:ci_pipeline, project: project, status: exception_status.sample) + end + + it 'returns matched pipelines' do + get api("/projects/#{project.id}/pipelines", user), status: target + + expect(response).to have_http_status(:ok) + expect(response).to include_pagination_headers + expect(json_response).not_to be_empty + json_response.each { |r| expect(r['status']).to eq(target) } + end + end + end + + context 'when status is invalid' do + it 'returns bad_request' do + get api("/projects/#{project.id}/pipelines", user), status: 'invalid-status' + + expect(response).to have_http_status(:bad_request) + end + end + + context 'when ref is specified' do + before do + create(:ci_pipeline, project: project) + end + + context 'when ref exists' do + it 'returns matched pipelines' do + get api("/projects/#{project.id}/pipelines", user), ref: 'master' + + expect(response).to have_http_status(:ok) + expect(response).to include_pagination_headers + expect(json_response).not_to be_empty + json_response.each { |r| expect(r['ref']).to eq('master') } + end + end + + context 'when ref does not exist' do + it 'returns empty' do + get api("/projects/#{project.id}/pipelines", user), ref: 'invalid-ref' + + expect(response).to have_http_status(:ok) + expect(response).to include_pagination_headers + expect(json_response).to be_empty + end + end + end + + context 'when name is specified' do + let!(:pipeline) { create(:ci_pipeline, project: project, user: user) } + + context 'when name exists' do + it 'returns matched pipelines' do + get api("/projects/#{project.id}/pipelines", user), name: user.name + + expect(response).to have_http_status(:ok) + expect(response).to include_pagination_headers + expect(json_response.first['id']).to eq(pipeline.id) + end + end + + context 'when name does not exist' do + it 'returns empty' do + get api("/projects/#{project.id}/pipelines", user), name: 'invalid-name' + + expect(response).to have_http_status(:ok) + expect(response).to include_pagination_headers + expect(json_response).to be_empty + end + end + end + + context 'when username is specified' do + let!(:pipeline) { create(:ci_pipeline, project: project, user: user) } + + context 'when username exists' do + it 'returns matched pipelines' do + get api("/projects/#{project.id}/pipelines", user), username: user.username + + expect(response).to have_http_status(:ok) + expect(response).to include_pagination_headers + expect(json_response.first['id']).to eq(pipeline.id) + end + end + + context 'when username does not exist' do + it 'returns empty' do + get api("/projects/#{project.id}/pipelines", user), username: 'invalid-username' + + expect(response).to have_http_status(:ok) + expect(response).to include_pagination_headers + expect(json_response).to be_empty + end + end + end + + context 'when yaml_errors is specified' do + let!(:pipeline1) { create(:ci_pipeline, project: project, yaml_errors: 'Syntax error') } + let!(:pipeline2) { create(:ci_pipeline, project: project) } + + context 'when yaml_errors is true' do + it 'returns matched pipelines' do + get api("/projects/#{project.id}/pipelines", user), yaml_errors: true + + expect(response).to have_http_status(:ok) + expect(response).to include_pagination_headers + expect(json_response.first['id']).to eq(pipeline1.id) + end + end + + context 'when yaml_errors is false' do + it 'returns matched pipelines' do + get api("/projects/#{project.id}/pipelines", user), yaml_errors: false + + expect(response).to have_http_status(:ok) + expect(response).to include_pagination_headers + expect(json_response.first['id']).to eq(pipeline2.id) + end + end + + context 'when yaml_errors is invalid' do + it 'returns bad_request' do + get api("/projects/#{project.id}/pipelines", user), yaml_errors: 'invalid-yaml_errors' + + expect(response).to have_http_status(:bad_request) + end + end + end + + context 'when order_by and sort are specified' do + context 'when order_by user_id' do + let!(:pipeline) { create_list(:ci_pipeline, 2, project: project, user: create(:user)) } + + it 'sorts as user_id: :asc' do + get api("/projects/#{project.id}/pipelines", user), order_by: 'user_id', sort: 'asc' + + expect(response).to have_http_status(:ok) + expect(response).to include_pagination_headers + expect(json_response).not_to be_empty + pipeline.sort_by { |p| p.user.id }.tap do |sorted_pipeline| + json_response.each_with_index { |r, i| expect(r['id']).to eq(sorted_pipeline[i].id) } + end + end + + context 'when sort is invalid' do + it 'returns bad_request' do + get api("/projects/#{project.id}/pipelines", user), order_by: 'user_id', sort: 'invalid_sort' + + expect(response).to have_http_status(:bad_request) + end + end + end + + context 'when order_by is invalid' do + it 'returns bad_request' do + get api("/projects/#{project.id}/pipelines", user), order_by: 'lock_version', sort: 'asc' + + expect(response).to have_http_status(:bad_request) + end + end + end + end end context 'unauthorized user' do diff --git a/spec/requests/api/project_hooks_spec.rb b/spec/requests/api/project_hooks_spec.rb index b1f8c249092..0f9330b062d 100644 --- a/spec/requests/api/project_hooks_spec.rb +++ b/spec/requests/api/project_hooks_spec.rb @@ -1,7 +1,6 @@ require 'spec_helper' -describe API::ProjectHooks, 'ProjectHooks', api: true do - include ApiHelpers +describe API::ProjectHooks, 'ProjectHooks' do let(:user) { create(:user) } let(:user3) { create(:user) } let!(:project) { create(:empty_project, creator_id: user.id, namespace: user.namespace) } @@ -22,8 +21,8 @@ describe API::ProjectHooks, 'ProjectHooks', api: true do context "authorized user" do it "returns project hooks" do get api("/projects/#{project.id}/hooks", user) - expect(response).to have_http_status(200) + expect(response).to have_http_status(200) expect(json_response).to be_an Array expect(response).to include_pagination_headers expect(json_response.count).to eq(1) @@ -43,6 +42,7 @@ describe API::ProjectHooks, 'ProjectHooks', api: true do context "unauthorized user" do it "does not access project hooks" do get api("/projects/#{project.id}/hooks", user3) + expect(response).to have_http_status(403) end end @@ -52,6 +52,7 @@ describe API::ProjectHooks, 'ProjectHooks', api: true do context "authorized user" do it "returns a project hook" do get api("/projects/#{project.id}/hooks/#{hook.id}", user) + expect(response).to have_http_status(200) expect(json_response['url']).to eq(hook.url) expect(json_response['issues_events']).to eq(hook.issues_events) @@ -59,7 +60,7 @@ describe API::ProjectHooks, 'ProjectHooks', api: true do expect(json_response['merge_requests_events']).to eq(hook.merge_requests_events) expect(json_response['tag_push_events']).to eq(hook.tag_push_events) expect(json_response['note_events']).to eq(hook.note_events) - expect(json_response['job_events']).to eq(hook.build_events) + expect(json_response['job_events']).to eq(hook.job_events) expect(json_response['pipeline_events']).to eq(hook.pipeline_events) expect(json_response['wiki_page_events']).to eq(hook.wiki_page_events) expect(json_response['enable_ssl_verification']).to eq(hook.enable_ssl_verification) @@ -67,6 +68,7 @@ describe API::ProjectHooks, 'ProjectHooks', api: true do it "returns a 404 error if hook id is not available" do get api("/projects/#{project.id}/hooks/1234", user) + expect(response).to have_http_status(404) end end @@ -88,7 +90,8 @@ describe API::ProjectHooks, 'ProjectHooks', api: true do it "adds hook to project" do expect do post api("/projects/#{project.id}/hooks", user), - url: "http://example.com", issues_events: true, wiki_page_events: true + url: "http://example.com", issues_events: true, wiki_page_events: true, + job_events: true end.to change {project.hooks.count}.by(1) expect(response).to have_http_status(201) @@ -98,7 +101,7 @@ describe API::ProjectHooks, 'ProjectHooks', api: true do expect(json_response['merge_requests_events']).to eq(false) expect(json_response['tag_push_events']).to eq(false) expect(json_response['note_events']).to eq(false) - expect(json_response['job_events']).to eq(false) + expect(json_response['job_events']).to eq(true) expect(json_response['pipeline_events']).to eq(false) expect(json_response['wiki_page_events']).to eq(true) expect(json_response['enable_ssl_verification']).to eq(true) @@ -136,7 +139,8 @@ describe API::ProjectHooks, 'ProjectHooks', api: true do describe "PUT /projects/:id/hooks/:hook_id" do it "updates an existing project hook" do put api("/projects/#{project.id}/hooks/#{hook.id}", user), - url: 'http://example.org', push_events: false + url: 'http://example.org', push_events: false, job_events: true + expect(response).to have_http_status(200) expect(json_response['url']).to eq('http://example.org') expect(json_response['issues_events']).to eq(hook.issues_events) @@ -144,7 +148,7 @@ describe API::ProjectHooks, 'ProjectHooks', api: true do expect(json_response['merge_requests_events']).to eq(hook.merge_requests_events) expect(json_response['tag_push_events']).to eq(hook.tag_push_events) expect(json_response['note_events']).to eq(hook.note_events) - expect(json_response['job_events']).to eq(hook.build_events) + expect(json_response['job_events']).to eq(hook.job_events) expect(json_response['pipeline_events']).to eq(hook.pipeline_events) expect(json_response['wiki_page_events']).to eq(hook.wiki_page_events) expect(json_response['enable_ssl_verification']).to eq(hook.enable_ssl_verification) diff --git a/spec/requests/api/project_snippets_spec.rb b/spec/requests/api/project_snippets_spec.rb index 9e88c19b0bc..3ab1764f5c3 100644 --- a/spec/requests/api/project_snippets_spec.rb +++ b/spec/requests/api/project_snippets_spec.rb @@ -1,8 +1,6 @@ require 'rails_helper' -describe API::ProjectSnippets, api: true do - include ApiHelpers - +describe API::ProjectSnippets do let(:project) { create(:empty_project, :public) } let(:user) { create(:user) } let(:admin) { create(:admin) } diff --git a/spec/requests/api/projects_spec.rb b/spec/requests/api/projects_spec.rb index a3de4702ad0..d5c3b5b34ad 100644 --- a/spec/requests/api/projects_spec.rb +++ b/spec/requests/api/projects_spec.rb @@ -1,7 +1,7 @@ # -*- coding: utf-8 -*- require 'spec_helper' -describe API::Projects, :api do +describe API::Projects do include Gitlab::CurrentSettings let(:user) { create(:user) } @@ -24,6 +24,7 @@ describe API::Projects, :api do namespace: user.namespace, merge_requests_enabled: false, issues_enabled: false, wiki_enabled: false, + builds_enabled: false, snippets_enabled: false) end let(:project_member3) do @@ -341,8 +342,8 @@ describe API::Projects, :api do it "assigns attributes to project" do project = attributes_for(:project, { path: 'camelCasePath', - description: FFaker::Lorem.sentence, issues_enabled: false, + jobs_enabled: false, merge_requests_enabled: false, wiki_enabled: false, only_allow_merge_if_pipeline_succeeds: false, @@ -352,6 +353,8 @@ describe API::Projects, :api do post api('/projects', user), project + expect(response).to have_http_status(201) + project.each_pair do |k, v| next if %i[has_external_issue_tracker issues_enabled merge_requests_enabled wiki_enabled].include?(k) expect(json_response[k.to_s]).to eq(v) @@ -475,7 +478,6 @@ describe API::Projects, :api do it 'assigns attributes to project' do project = attributes_for(:project, { - description: FFaker::Lorem.sentence, issues_enabled: false, merge_requests_enabled: false, wiki_enabled: false, @@ -659,10 +661,24 @@ describe API::Projects, :api do 'name' => user.namespace.name, 'path' => user.namespace.path, 'kind' => user.namespace.kind, - 'full_path' => user.namespace.full_path, + 'full_path' => user.namespace.full_path }) end + it "does not include statistics by default" do + get api("/projects/#{project.id}", user) + + expect(response).to have_http_status(200) + expect(json_response).not_to include 'statistics' + end + + it "includes statistics if requested" do + get api("/projects/#{project.id}", user), statistics: true + + expect(response).to have_http_status(200) + expect(json_response).to include 'statistics' + end + describe 'permissions' do context 'all projects' do before { project.team << [user, :master] } @@ -1078,10 +1094,21 @@ describe API::Projects, :api do before { project_member3 } before { project_member2 } + it 'returns 400 when nothing sent' do + project_param = {} + + put api("/projects/#{project.id}", user), project_param + + expect(response).to have_http_status(400) + expect(json_response['error']).to match('at least one parameter must be provided') + end + context 'when unauthenticated' do it 'returns authentication error' do project_param = { name: 'bar' } + put api("/projects/#{project.id}"), project_param + expect(response).to have_http_status(401) end end @@ -1089,8 +1116,11 @@ describe API::Projects, :api do context 'when authenticated as project owner' do it 'updates name' do project_param = { name: 'bar' } + put api("/projects/#{project.id}", user), project_param + expect(response).to have_http_status(200) + project_param.each_pair do |k, v| expect(json_response[k.to_s]).to eq(v) end @@ -1098,8 +1128,11 @@ describe API::Projects, :api do it 'updates visibility_level' do project_param = { visibility: 'public' } + put api("/projects/#{project3.id}", user), project_param + expect(response).to have_http_status(200) + project_param.each_pair do |k, v| expect(json_response[k.to_s]).to eq(v) end @@ -1108,17 +1141,23 @@ describe API::Projects, :api do it 'updates visibility_level from public to private' do project3.update_attributes({ visibility_level: Gitlab::VisibilityLevel::PUBLIC }) project_param = { visibility: 'private' } + put api("/projects/#{project3.id}", user), project_param + expect(response).to have_http_status(200) + project_param.each_pair do |k, v| expect(json_response[k.to_s]).to eq(v) end + expect(json_response['visibility']).to eq('private') end it 'does not update name to existing name' do project_param = { name: project3.name } + put api("/projects/#{project.id}", user), project_param + expect(response).to have_http_status(400) expect(json_response['message']['name']).to eq(['has already been taken']) end @@ -1134,8 +1173,23 @@ describe API::Projects, :api do it 'updates path & name to existing path & name in different namespace' do project_param = { path: project4.path, name: project4.name } + + put api("/projects/#{project3.id}", user), project_param + + expect(response).to have_http_status(200) + + project_param.each_pair do |k, v| + expect(json_response[k.to_s]).to eq(v) + end + end + + it 'updates jobs_enabled' do + project_param = { jobs_enabled: true } + put api("/projects/#{project3.id}", user), project_param + expect(response).to have_http_status(200) + project_param.each_pair do |k, v| expect(json_response[k.to_s]).to eq(v) end diff --git a/spec/requests/api/repositories_spec.rb b/spec/requests/api/repositories_spec.rb index 4783d011d54..1a0695615e3 100644 --- a/spec/requests/api/repositories_spec.rb +++ b/spec/requests/api/repositories_spec.rb @@ -1,8 +1,7 @@ require 'spec_helper' require 'mime/types' -describe API::Repositories, api: true do - include ApiHelpers +describe API::Repositories do include RepoHelpers include WorkhorseHelpers diff --git a/spec/requests/api/runner_spec.rb b/spec/requests/api/runner_spec.rb index 044b989e5ba..be83514ed9c 100644 --- a/spec/requests/api/runner_spec.rb +++ b/spec/requests/api/runner_spec.rb @@ -1,7 +1,6 @@ require 'spec_helper' describe API::Runner do - include ApiHelpers include StubGitlabCalls let(:registration_token) { 'abcdefg123456' } @@ -461,6 +460,29 @@ describe API::Runner do end end + context 'when dependencies is an empty array' do + let!(:job) { create(:ci_build_tag, pipeline: pipeline, name: 'spinach', stage: 'test', stage_idx: 0) } + let!(:job2) { create(:ci_build_tag, pipeline: pipeline, name: 'rubocop', stage: 'test', stage_idx: 0) } + let!(:empty_dependencies_job) do + create(:ci_build, pipeline: pipeline, token: 'test-job-token', name: 'empty_dependencies_job', + stage: 'deploy', stage_idx: 1, + options: { dependencies: [] }) + end + + before do + job.success + job2.success + end + + it 'returns an empty array' do + request_job + + expect(response).to have_http_status(201) + expect(json_response['id']).to eq(empty_dependencies_job.id) + expect(json_response['dependencies'].count).to eq(0) + end + end + context 'when job has no tags' do before { job.update(tags: []) } @@ -569,7 +591,7 @@ describe API::Runner do update_job(trace: 'BUILD TRACE UPDATED') expect(response).to have_http_status(200) - expect(job.reload.trace).to eq 'BUILD TRACE UPDATED' + expect(job.reload.trace.raw).to eq 'BUILD TRACE UPDATED' end end @@ -577,7 +599,7 @@ describe API::Runner do it 'does not override trace information' do update_job - expect(job.reload.trace).to eq 'BUILD TRACE' + expect(job.reload.trace.raw).to eq 'BUILD TRACE' end end @@ -608,7 +630,7 @@ describe API::Runner do context 'when request is valid' do it 'gets correct response' do expect(response.status).to eq 202 - expect(job.reload.trace).to eq 'BUILD TRACE appended' + expect(job.reload.trace.raw).to eq 'BUILD TRACE appended' expect(response.header).to have_key 'Range' expect(response.header).to have_key 'Job-Status' end @@ -619,7 +641,7 @@ describe API::Runner do it "changes the job's trace" do patch_the_trace - expect(job.reload.trace).to eq 'BUILD TRACE appended appended' + expect(job.reload.trace.raw).to eq 'BUILD TRACE appended appended' end context 'when Runner makes a force-patch' do @@ -628,7 +650,7 @@ describe API::Runner do it "doesn't change the build.trace" do force_patch_the_trace - expect(job.reload.trace).to eq 'BUILD TRACE appended' + expect(job.reload.trace.raw).to eq 'BUILD TRACE appended' end end end @@ -641,7 +663,7 @@ describe API::Runner do it 'changes the job.trace' do patch_the_trace - expect(job.reload.trace).to eq 'BUILD TRACE appended appended' + expect(job.reload.trace.raw).to eq 'BUILD TRACE appended appended' end context 'when Runner makes a force-patch' do @@ -650,7 +672,7 @@ describe API::Runner do it "doesn't change the job.trace" do force_patch_the_trace - expect(job.reload.trace).to eq 'BUILD TRACE appended' + expect(job.reload.trace.raw).to eq 'BUILD TRACE appended' end end end @@ -675,7 +697,7 @@ describe API::Runner do it 'gets correct response' do expect(response.status).to eq 202 - expect(job.reload.trace).to eq 'BUILD TRACE appended' + expect(job.reload.trace.raw).to eq 'BUILD TRACE appended' expect(response.header).to have_key 'Range' expect(response.header).to have_key 'Job-Status' end @@ -715,9 +737,11 @@ describe API::Runner do def patch_the_trace(content = ' appended', request_headers = nil) unless request_headers - offset = job.trace_length - limit = offset + content.length - 1 - request_headers = headers.merge({ 'Content-Range' => "#{offset}-#{limit}" }) + job.trace.read do |stream| + offset = stream.size + limit = offset + content.length - 1 + request_headers = headers.merge({ 'Content-Range' => "#{offset}-#{limit}" }) + end end Timecop.travel(job.updated_at + update_interval) do diff --git a/spec/requests/api/runners_spec.rb b/spec/requests/api/runners_spec.rb index 8a82543a830..645a5389850 100644 --- a/spec/requests/api/runners_spec.rb +++ b/spec/requests/api/runners_spec.rb @@ -1,8 +1,6 @@ require 'spec_helper' -describe API::Runners, api: true do - include ApiHelpers - +describe API::Runners do let(:admin) { create(:user, :admin) } let(:user) { create(:user) } let(:user2) { create(:user) } diff --git a/spec/requests/api/services_spec.rb b/spec/requests/api/services_spec.rb index fd334934ca5..95df3429314 100644 --- a/spec/requests/api/services_spec.rb +++ b/spec/requests/api/services_spec.rb @@ -1,8 +1,6 @@ require "spec_helper" -describe API::Services, api: true do - include ApiHelpers - +describe API::Services do let(:user) { create(:user) } let(:admin) { create(:admin) } let(:user2) { create(:user) } diff --git a/spec/requests/api/session_spec.rb b/spec/requests/api/session_spec.rb index 28fab2011a5..5e77519c867 100644 --- a/spec/requests/api/session_spec.rb +++ b/spec/requests/api/session_spec.rb @@ -1,8 +1,6 @@ require 'spec_helper' -describe API::Session, api: true do - include ApiHelpers - +describe API::Session do let(:user) { create(:user) } describe "POST /session" do @@ -13,7 +11,7 @@ describe API::Session, api: true do expect(json_response['email']).to eq(user.email) expect(json_response['private_token']).to eq(user.private_token) - expect(json_response['is_admin']).to eq(user.is_admin?) + expect(json_response['is_admin']).to eq(user.admin?) expect(json_response['can_create_project']).to eq(user.can_create_project?) expect(json_response['can_create_group']).to eq(user.can_create_group?) end @@ -37,7 +35,7 @@ describe API::Session, api: true do expect(json_response['email']).to eq user.email expect(json_response['private_token']).to eq user.private_token - expect(json_response['is_admin']).to eq user.is_admin? + expect(json_response['is_admin']).to eq user.admin? expect(json_response['can_create_project']).to eq user.can_create_project? expect(json_response['can_create_group']).to eq user.can_create_group? end @@ -50,7 +48,7 @@ describe API::Session, api: true do expect(json_response['email']).to eq user.email expect(json_response['private_token']).to eq user.private_token - expect(json_response['is_admin']).to eq user.is_admin? + expect(json_response['is_admin']).to eq user.admin? expect(json_response['can_create_project']).to eq user.can_create_project? expect(json_response['can_create_group']).to eq user.can_create_group? end diff --git a/spec/requests/api/settings_spec.rb b/spec/requests/api/settings_spec.rb index 11b4b718e2c..2398ae6219c 100644 --- a/spec/requests/api/settings_spec.rb +++ b/spec/requests/api/settings_spec.rb @@ -1,8 +1,6 @@ require 'spec_helper' -describe API::Settings, 'Settings', api: true do - include ApiHelpers - +describe API::Settings, 'Settings' do let(:user) { create(:user) } let(:admin) { create(:admin) } diff --git a/spec/requests/api/sidekiq_metrics_spec.rb b/spec/requests/api/sidekiq_metrics_spec.rb index 28067f8ca88..83042d0cb12 100644 --- a/spec/requests/api/sidekiq_metrics_spec.rb +++ b/spec/requests/api/sidekiq_metrics_spec.rb @@ -1,8 +1,6 @@ require 'spec_helper' -describe API::SidekiqMetrics, api: true do - include ApiHelpers - +describe API::SidekiqMetrics do let(:admin) { create(:user, :admin) } describe 'GET sidekiq/*' do diff --git a/spec/requests/api/snippets_spec.rb b/spec/requests/api/snippets_spec.rb index 5d75b47b3cd..e429cddcf6a 100644 --- a/spec/requests/api/snippets_spec.rb +++ b/spec/requests/api/snippets_spec.rb @@ -1,7 +1,6 @@ require 'rails_helper' -describe API::Snippets, api: true do - include ApiHelpers +describe API::Snippets do let!(:user) { create(:user) } describe 'GET /snippets/' do diff --git a/spec/requests/api/system_hooks_spec.rb b/spec/requests/api/system_hooks_spec.rb index d1e10f12657..2eb191d6049 100644 --- a/spec/requests/api/system_hooks_spec.rb +++ b/spec/requests/api/system_hooks_spec.rb @@ -1,8 +1,6 @@ require 'spec_helper' -describe API::SystemHooks, api: true do - include ApiHelpers - +describe API::SystemHooks do let(:user) { create(:user) } let(:admin) { create(:admin) } let!(:hook) { create(:system_hook, url: "http://example.com") } @@ -34,8 +32,9 @@ describe API::SystemHooks, api: true do expect(response).to include_pagination_headers expect(json_response).to be_an Array expect(json_response.first['url']).to eq(hook.url) - expect(json_response.first['push_events']).to be true + expect(json_response.first['push_events']).to be false expect(json_response.first['tag_push_events']).to be false + expect(json_response.first['repository_update_events']).to be true end end end diff --git a/spec/requests/api/tags_spec.rb b/spec/requests/api/tags_spec.rb index b132d033a61..ef7d0c3ee41 100644 --- a/spec/requests/api/tags_spec.rb +++ b/spec/requests/api/tags_spec.rb @@ -1,8 +1,7 @@ require 'spec_helper' require 'mime/types' -describe API::Tags, api: true do - include ApiHelpers +describe API::Tags do include RepoHelpers let(:user) { create(:user) } diff --git a/spec/requests/api/templates_spec.rb b/spec/requests/api/templates_spec.rb index 2c83e119065..cb55985e3f5 100644 --- a/spec/requests/api/templates_spec.rb +++ b/spec/requests/api/templates_spec.rb @@ -1,8 +1,6 @@ require 'spec_helper' -describe API::Templates, api: true do - include ApiHelpers - +describe API::Templates do context 'the Template Entity' do before { get api('/templates/gitignores/Ruby') } diff --git a/spec/requests/api/todos_spec.rb b/spec/requests/api/todos_spec.rb index b789284fa8d..92533f4dfea 100644 --- a/spec/requests/api/todos_spec.rb +++ b/spec/requests/api/todos_spec.rb @@ -1,8 +1,6 @@ require 'spec_helper' -describe API::Todos, api: true do - include ApiHelpers - +describe API::Todos do let(:project_1) { create(:empty_project, :test_repo) } let(:project_2) { create(:empty_project) } let(:author_1) { create(:user) } diff --git a/spec/requests/api/triggers_spec.rb b/spec/requests/api/triggers_spec.rb index d93a734f5b6..16ddade27d9 100644 --- a/spec/requests/api/triggers_spec.rb +++ b/spec/requests/api/triggers_spec.rb @@ -1,8 +1,6 @@ require 'spec_helper' describe API::Triggers do - include ApiHelpers - let(:user) { create(:user) } let(:user2) { create(:user) } let!(:trigger_token) { 'secure_token' } diff --git a/spec/requests/api/users_spec.rb b/spec/requests/api/users_spec.rb index 04e7837fd7a..4919ad19833 100644 --- a/spec/requests/api/users_spec.rb +++ b/spec/requests/api/users_spec.rb @@ -1,12 +1,10 @@ require 'spec_helper' -describe API::Users, api: true do - include ApiHelpers - +describe API::Users do let(:user) { create(:user) } let(:admin) { create(:admin) } - let(:key) { create(:key, user: user) } - let(:email) { create(:email, user: user) } + let(:key) { create(:key, user: user) } + let(:email) { create(:email, user: user) } let(:omniauth_user) { create(:omniauth_user) } let(:ldap_user) { create(:omniauth_user, provider: 'ldapmain') } let(:ldap_blocked_user) { create(:omniauth_user, provider: 'ldapmain', state: 'ldap_blocked') } @@ -72,6 +70,12 @@ describe API::Users, api: true do expect(json_response).to be_an Array expect(json_response.first['username']).to eq(omniauth_user.username) end + + it "returns a 403 when non-admin user searches by external UID" do + get api("/users?extern_uid=#{omniauth_user.identities.first.extern_uid}&provider=#{omniauth_user.identities.first.provider}", user) + + expect(response).to have_http_status(403) + end end context "when admin" do @@ -100,6 +104,27 @@ describe API::Users, api: true do expect(json_response).to be_an Array expect(json_response).to all(include('external' => true)) end + + it "returns one user by external UID" do + get api("/users?extern_uid=#{omniauth_user.identities.first.extern_uid}&provider=#{omniauth_user.identities.first.provider}", admin) + + expect(response).to have_http_status(200) + expect(json_response).to be_an Array + expect(json_response.size).to eq(1) + expect(json_response.first['username']).to eq(omniauth_user.username) + end + + it "returns 400 error if provider with no extern_uid" do + get api("/users?extern_uid=#{omniauth_user.identities.first.extern_uid}", admin) + + expect(response).to have_http_status(400) + end + + it "returns 400 error if provider with no extern_uid" do + get api("/users?provider=#{omniauth_user.identities.first.provider}", admin) + + expect(response).to have_http_status(400) + end end end @@ -110,6 +135,12 @@ describe API::Users, api: true do expect(json_response['username']).to eq(user.username) end + it "does not return the user's `is_admin` flag" do + get api("/users/#{user.id}", user) + + expect(json_response['is_admin']).to be_nil + end + it "returns a 401 if unauthenticated" do get api("/users/9998") expect(response).to have_http_status(401) @@ -129,7 +160,7 @@ describe API::Users, api: true do end describe "POST /users" do - before{ admin } + before { admin } it "creates user" do expect do @@ -214,9 +245,9 @@ describe API::Users, api: true do it "does not create user with invalid email" do post api('/users', admin), - email: 'invalid email', - password: 'password', - name: 'test' + email: 'invalid email', + password: 'password', + name: 'test' expect(response).to have_http_status(400) end @@ -242,12 +273,12 @@ describe API::Users, api: true do it 'returns 400 error if user does not validate' do post api('/users', admin), - password: 'pass', - email: 'test@example.com', - username: 'test!', - name: 'test', - bio: 'g' * 256, - projects_limit: -1 + password: 'pass', + email: 'test@example.com', + username: 'test!', + name: 'test', + bio: 'g' * 256, + projects_limit: -1 expect(response).to have_http_status(400) expect(json_response['message']['password']). to eq(['is too short (minimum is 8 characters)']) @@ -267,19 +298,19 @@ describe API::Users, api: true do context 'with existing user' do before do post api('/users', admin), - email: 'test@example.com', - password: 'password', - username: 'test', - name: 'foo' + email: 'test@example.com', + password: 'password', + username: 'test', + name: 'foo' end it 'returns 409 conflict error if user with same email exists' do expect do post api('/users', admin), - name: 'foo', - email: 'test@example.com', - password: 'password', - username: 'foo' + name: 'foo', + email: 'test@example.com', + password: 'password', + username: 'foo' end.to change { User.count }.by(0) expect(response).to have_http_status(409) expect(json_response['message']).to eq('Email has already been taken') @@ -288,10 +319,10 @@ describe API::Users, api: true do it 'returns 409 conflict error if same username exists' do expect do post api('/users', admin), - name: 'foo', - email: 'foo@example.com', - password: 'password', - username: 'test' + name: 'foo', + email: 'foo@example.com', + password: 'password', + username: 'test' end.to change { User.count }.by(0) expect(response).to have_http_status(409) expect(json_response['message']).to eq('Username has already been taken') @@ -372,7 +403,6 @@ describe API::Users, api: true do it "updates admin status" do put api("/users/#{user.id}", admin), { admin: true } expect(response).to have_http_status(200) - expect(json_response['is_admin']).to eq(true) expect(user.reload.admin).to eq(true) end @@ -386,7 +416,6 @@ describe API::Users, api: true do it "does not update admin status" do put api("/users/#{admin_user.id}", admin), { can_create_group: false } expect(response).to have_http_status(200) - expect(json_response['is_admin']).to eq(true) expect(admin_user.reload.admin).to eq(true) expect(admin_user.can_create_group).to eq(false) end @@ -416,12 +445,12 @@ describe API::Users, api: true do it 'returns 400 error if user does not validate' do put api("/users/#{user.id}", admin), - password: 'pass', - email: 'test@example.com', - username: 'test!', - name: 'test', - bio: 'g' * 256, - projects_limit: -1 + password: 'pass', + email: 'test@example.com', + username: 'test!', + name: 'test', + bio: 'g' * 256, + projects_limit: -1 expect(response).to have_http_status(400) expect(json_response['message']['password']). to eq(['is too short (minimum is 8 characters)']) @@ -488,7 +517,7 @@ describe API::Users, api: true do key_attrs = attributes_for :key expect do post api("/users/#{user.id}/keys", admin), key_attrs - end.to change{ user.keys.count }.by(1) + end.to change { user.keys.count }.by(1) end it "returns 400 for invalid ID" do @@ -580,7 +609,7 @@ describe API::Users, api: true do email_attrs = attributes_for :email expect do post api("/users/#{user.id}/emails", admin), email_attrs - end.to change{ user.emails.count }.by(1) + end.to change { user.emails.count }.by(1) end it "returns a 400 for invalid ID" do @@ -676,7 +705,7 @@ describe API::Users, api: true do before { admin } it "deletes user" do - delete api("/users/#{user.id}", admin) + Sidekiq::Testing.inline! { delete api("/users/#{user.id}", admin) } expect(response).to have_http_status(204) expect { User.find(user.id) }.to raise_error ActiveRecord::RecordNotFound @@ -684,23 +713,23 @@ describe API::Users, api: true do end it "does not delete for unauthenticated user" do - delete api("/users/#{user.id}") + Sidekiq::Testing.inline! { delete api("/users/#{user.id}") } expect(response).to have_http_status(401) end it "is not available for non admin users" do - delete api("/users/#{user.id}", user) + Sidekiq::Testing.inline! { delete api("/users/#{user.id}", user) } expect(response).to have_http_status(403) end it "returns 404 for non-existing user" do - delete api("/users/999999", admin) + Sidekiq::Testing.inline! { delete api("/users/999999", admin) } expect(response).to have_http_status(404) expect(json_response['message']).to eq('404 User Not Found') end it "returns a 404 for invalid ID" do - delete api("/users/ASDF", admin) + Sidekiq::Testing.inline! { delete api("/users/ASDF", admin) } expect(response).to have_http_status(404) end @@ -842,7 +871,7 @@ describe API::Users, api: true do key_attrs = attributes_for :key expect do post api("/user/keys", user), key_attrs - end.to change{ user.keys.count }.by(1) + end.to change { user.keys.count }.by(1) expect(response).to have_http_status(201) end @@ -880,7 +909,7 @@ describe API::Users, api: true do delete api("/user/keys/#{key.id}", user) expect(response).to have_http_status(204) - end.to change{user.keys.count}.by(-1) + end.to change { user.keys.count}.by(-1) end it "returns 404 if key ID not found" do @@ -963,7 +992,7 @@ describe API::Users, api: true do email_attrs = attributes_for :email expect do post api("/user/emails", user), email_attrs - end.to change{ user.emails.count }.by(1) + end.to change { user.emails.count }.by(1) expect(response).to have_http_status(201) end @@ -989,7 +1018,7 @@ describe API::Users, api: true do delete api("/user/emails/#{email.id}", user) expect(response).to have_http_status(204) - end.to change{user.emails.count}.by(-1) + end.to change { user.emails.count}.by(-1) end it "returns 404 if email ID not found" do @@ -1158,6 +1187,49 @@ describe API::Users, api: true do end end + context "user activities", :redis do + let!(:old_active_user) { create(:user, last_activity_on: Time.utc(2000, 1, 1)) } + let!(:newly_active_user) { create(:user, last_activity_on: 2.days.ago.midday) } + + context 'last activity as normal user' do + it 'has no permission' do + get api("/user/activities", user) + + expect(response).to have_http_status(403) + end + end + + context 'as admin' do + it 'returns the activities from the last 6 months' do + get api("/user/activities", admin) + + expect(response).to include_pagination_headers + expect(json_response.size).to eq(1) + + activity = json_response.last + + expect(activity['username']).to eq(newly_active_user.username) + expect(activity['last_activity_on']).to eq(2.days.ago.to_date.to_s) + expect(activity['last_activity_at']).to eq(2.days.ago.to_date.to_s) + end + + context 'passing a :from parameter' do + it 'returns the activities from the given date' do + get api("/user/activities?from=2000-1-1", admin) + + expect(response).to include_pagination_headers + expect(json_response.size).to eq(2) + + activity = json_response.first + + expect(activity['username']).to eq(old_active_user.username) + expect(activity['last_activity_on']).to eq(Time.utc(2000, 1, 1).to_date.to_s) + expect(activity['last_activity_at']).to eq(Time.utc(2000, 1, 1).to_date.to_s) + end + end + end + end + describe 'GET /users/:user_id/impersonation_tokens' do let!(:active_personal_access_token) { create(:personal_access_token, user: user) } let!(:revoked_personal_access_token) { create(:personal_access_token, :revoked, user: user) } diff --git a/spec/requests/api/v3/award_emoji_spec.rb b/spec/requests/api/v3/award_emoji_spec.rb index eeb4d128c1b..9234710f488 100644 --- a/spec/requests/api/v3/award_emoji_spec.rb +++ b/spec/requests/api/v3/award_emoji_spec.rb @@ -1,8 +1,6 @@ require 'spec_helper' -describe API::V3::AwardEmoji, api: true do - include ApiHelpers - +describe API::V3::AwardEmoji do let(:user) { create(:user) } let!(:project) { create(:empty_project) } let(:issue) { create(:issue, project: project) } diff --git a/spec/requests/api/v3/boards_spec.rb b/spec/requests/api/v3/boards_spec.rb index eb95934f354..4d786331d1b 100644 --- a/spec/requests/api/v3/boards_spec.rb +++ b/spec/requests/api/v3/boards_spec.rb @@ -1,8 +1,6 @@ require 'spec_helper' -describe API::V3::Boards, api: true do - include ApiHelpers - +describe API::V3::Boards do let(:user) { create(:user) } let(:guest) { create(:user) } let(:non_member) { create(:user) } diff --git a/spec/requests/api/v3/branches_spec.rb b/spec/requests/api/v3/branches_spec.rb index 5dcd4f21f4e..c88f7788697 100644 --- a/spec/requests/api/v3/branches_spec.rb +++ b/spec/requests/api/v3/branches_spec.rb @@ -1,9 +1,7 @@ require 'spec_helper' require 'mime/types' -describe API::V3::Branches, api: true do - include ApiHelpers - +describe API::V3::Branches do let(:user) { create(:user) } let(:user2) { create(:user) } let!(:project) { create(:project, :repository, creator: user) } @@ -49,19 +47,6 @@ describe API::V3::Branches, api: true do delete v3_api("/projects/#{project.id}/repository/branches/foobar", user) expect(response).to have_http_status(404) end - - it "removes protected branch" do - create(:protected_branch, project: project, name: branch_name) - delete v3_api("/projects/#{project.id}/repository/branches/#{branch_name}", user) - expect(response).to have_http_status(405) - expect(json_response['message']).to eq('Protected branch cant be removed') - end - - it "does not remove HEAD branch" do - delete v3_api("/projects/#{project.id}/repository/branches/master", user) - expect(response).to have_http_status(405) - expect(json_response['message']).to eq('Cannot remove HEAD branch') - end end describe "DELETE /projects/:id/repository/merged_branches" do diff --git a/spec/requests/api/v3/broadcast_messages_spec.rb b/spec/requests/api/v3/broadcast_messages_spec.rb index 06556401a29..948cd78c177 100644 --- a/spec/requests/api/v3/broadcast_messages_spec.rb +++ b/spec/requests/api/v3/broadcast_messages_spec.rb @@ -1,8 +1,6 @@ require 'spec_helper' -describe API::V3::BroadcastMessages, api: true do - include ApiHelpers - +describe API::V3::BroadcastMessages do let(:user) { create(:user) } let(:admin) { create(:admin) } diff --git a/spec/requests/api/v3/builds_spec.rb b/spec/requests/api/v3/builds_spec.rb index a50c22a6dd1..dc95599546c 100644 --- a/spec/requests/api/v3/builds_spec.rb +++ b/spec/requests/api/v3/builds_spec.rb @@ -1,8 +1,6 @@ require 'spec_helper' -describe API::V3::Builds, api: true do - include ApiHelpers - +describe API::V3::Builds do let(:user) { create(:user) } let(:api_user) { user } let!(:project) { create(:project, :repository, creator: user, public_builds: false) } @@ -330,7 +328,7 @@ describe API::V3::Builds, api: true do context 'authorized user' do it 'returns specific job trace' do expect(response).to have_http_status(200) - expect(response.body).to eq(build.trace) + expect(response.body).to eq(build.trace.raw) end end @@ -418,7 +416,7 @@ describe API::V3::Builds, api: true do it 'erases job content' do expect(response.status).to eq 201 - expect(build.trace).to be_empty + expect(build).not_to have_trace expect(build.artifacts_file.exists?).to be_falsy expect(build.artifacts_metadata.exists?).to be_falsy end diff --git a/spec/requests/api/v3/commits_spec.rb b/spec/requests/api/v3/commits_spec.rb index adba3a787aa..c2e8c3ae6f7 100644 --- a/spec/requests/api/v3/commits_spec.rb +++ b/spec/requests/api/v3/commits_spec.rb @@ -1,8 +1,7 @@ require 'spec_helper' require 'mime/types' -describe API::V3::Commits, api: true do - include ApiHelpers +describe API::V3::Commits do let(:user) { create(:user) } let(:user2) { create(:user) } let!(:project) { create(:project, :repository, creator: user, namespace: user.namespace) } @@ -485,8 +484,7 @@ describe API::V3::Commits, api: true do post v3_api("/projects/#{project.id}/repository/commits/#{master_pickable_commit.id}/cherry_pick", user), branch: 'markdown' expect(response).to have_http_status(400) - expect(json_response['message']).to eq('Sorry, we cannot cherry-pick this commit automatically. - A cherry-pick may have already been performed with this commit, or a more recent commit may have updated some of its content.') + expect(json_response['message']).to include('Sorry, we cannot cherry-pick this commit automatically.') end it 'returns 400 if you are not allowed to push to the target branch' do diff --git a/spec/requests/api/v3/deploy_keys_spec.rb b/spec/requests/api/v3/deploy_keys_spec.rb index f5bdf408c5e..b61b2b618a6 100644 --- a/spec/requests/api/v3/deploy_keys_spec.rb +++ b/spec/requests/api/v3/deploy_keys_spec.rb @@ -1,8 +1,6 @@ require 'spec_helper' -describe API::V3::DeployKeys, api: true do - include ApiHelpers - +describe API::V3::DeployKeys do let(:user) { create(:user) } let(:admin) { create(:admin) } let(:project) { create(:empty_project, creator_id: user.id) } diff --git a/spec/requests/api/v3/deployments_spec.rb b/spec/requests/api/v3/deployments_spec.rb index 3c5ce407b32..0389a264781 100644 --- a/spec/requests/api/v3/deployments_spec.rb +++ b/spec/requests/api/v3/deployments_spec.rb @@ -1,8 +1,6 @@ require 'spec_helper' -describe API::Deployments, api: true do - include ApiHelpers - +describe API::V3::Deployments do let(:user) { create(:user) } let(:non_member) { create(:user) } let(:project) { deployment.environment.project } @@ -26,11 +24,11 @@ describe API::Deployments, api: true do describe 'GET /projects/:id/deployments' do context 'as member of the project' do it_behaves_like 'a paginated resources' do - let(:request) { get api("/projects/#{project.id}/deployments", user) } + let(:request) { get v3_api("/projects/#{project.id}/deployments", user) } end it 'returns projects deployments' do - get api("/projects/#{project.id}/deployments", user) + get v3_api("/projects/#{project.id}/deployments", user) expect(response).to have_http_status(200) expect(json_response).to be_an Array @@ -42,7 +40,7 @@ describe API::Deployments, api: true do context 'as non member' do it 'returns a 404 status code' do - get api("/projects/#{project.id}/deployments", non_member) + get v3_api("/projects/#{project.id}/deployments", non_member) expect(response).to have_http_status(404) end @@ -52,7 +50,7 @@ describe API::Deployments, api: true do describe 'GET /projects/:id/deployments/:deployment_id' do context 'as a member of the project' do it 'returns the projects deployment' do - get api("/projects/#{project.id}/deployments/#{deployment.id}", user) + get v3_api("/projects/#{project.id}/deployments/#{deployment.id}", user) expect(response).to have_http_status(200) expect(json_response['sha']).to match /\A\h{40}\z/ @@ -62,7 +60,7 @@ describe API::Deployments, api: true do context 'as non member' do it 'returns a 404 status code' do - get api("/projects/#{project.id}/deployments/#{deployment.id}", non_member) + get v3_api("/projects/#{project.id}/deployments/#{deployment.id}", non_member) expect(response).to have_http_status(404) end diff --git a/spec/requests/api/v3/environments_spec.rb b/spec/requests/api/v3/environments_spec.rb index 216192c9d34..99f35723974 100644 --- a/spec/requests/api/v3/environments_spec.rb +++ b/spec/requests/api/v3/environments_spec.rb @@ -1,8 +1,6 @@ require 'spec_helper' -describe API::V3::Environments, api: true do - include ApiHelpers - +describe API::V3::Environments do let(:user) { create(:user) } let(:non_member) { create(:user) } let(:project) { create(:empty_project, :private, namespace: user.namespace) } diff --git a/spec/requests/api/v3/files_spec.rb b/spec/requests/api/v3/files_spec.rb index 3b61139a2cd..378ca1720ff 100644 --- a/spec/requests/api/v3/files_spec.rb +++ b/spec/requests/api/v3/files_spec.rb @@ -1,8 +1,6 @@ require 'spec_helper' -describe API::V3::Files, api: true do - include ApiHelpers - +describe API::V3::Files do # I have to remove periods from the end of the name # This happened when the user's name had a suffix (i.e. "Sr.") # This seems to be what git does under the hood. For example, this commit: @@ -26,8 +24,8 @@ describe API::V3::Files, api: true do ref: 'master' } end - let(:author_email) { FFaker::Internet.email } - let(:author_name) { FFaker::Name.name.chomp("\.") } + let(:author_email) { 'user@example.org' } + let(:author_name) { 'John Doe' } before { project.team << [user, :developer] } @@ -55,7 +53,7 @@ describe API::V3::Files, api: true do let(:params) do { file_path: 'app/models/application.rb', - ref: 'master', + ref: 'master' } end @@ -129,7 +127,7 @@ describe API::V3::Files, api: true do it "returns a 400 if editor fails to create file" do allow_any_instance_of(Repository).to receive(:create_file). - and_return(false) + and_raise(Repository::CommitError, 'Cannot create file') post v3_api("/projects/#{project.id}/repository/files", user), valid_params @@ -229,8 +227,8 @@ describe API::V3::Files, api: true do expect(response).to have_http_status(400) end - it "returns a 400 if fails to create file" do - allow_any_instance_of(Repository).to receive(:delete_file).and_return(false) + it "returns a 400 if fails to delete file" do + allow_any_instance_of(Repository).to receive(:delete_file).and_raise(Repository::CommitError, 'Cannot delete file') delete v3_api("/projects/#{project.id}/repository/files", user), valid_params @@ -265,7 +263,7 @@ describe API::V3::Files, api: true do let(:get_params) do { file_path: file_path, - ref: 'master', + ref: 'master' } end diff --git a/spec/requests/api/v3/groups_spec.rb b/spec/requests/api/v3/groups_spec.rb index a71b7d4b008..bc261b5e07c 100644 --- a/spec/requests/api/v3/groups_spec.rb +++ b/spec/requests/api/v3/groups_spec.rb @@ -1,7 +1,6 @@ require 'spec_helper' -describe API::V3::Groups, api: true do - include ApiHelpers +describe API::V3::Groups do include UploadHelpers let(:user1) { create(:user, can_create_group: false) } @@ -70,7 +69,7 @@ describe API::V3::Groups, api: true do storage_size: 702, repository_size: 123, lfs_objects_size: 234, - build_artifacts_size: 345, + build_artifacts_size: 345 }.stringify_keys project1.statistics.update!(attributes) @@ -177,7 +176,7 @@ describe API::V3::Groups, api: true do expect(json_response['path']).to eq(group1.path) expect(json_response['description']).to eq(group1.description) expect(json_response['visibility_level']).to eq(group1.visibility_level) - expect(json_response['avatar_url']).to eq(group1.avatar_url) + expect(json_response['avatar_url']).to eq(group1.avatar_url(only_path: false)) expect(json_response['web_url']).to eq(group1.web_url) expect(json_response['request_access_enabled']).to eq(group1.request_access_enabled) expect(json_response['full_name']).to eq(group1.full_name) diff --git a/spec/requests/api/v3/issues_spec.rb b/spec/requests/api/v3/issues_spec.rb index 383871d5c38..cc81922697a 100644 --- a/spec/requests/api/v3/issues_spec.rb +++ b/spec/requests/api/v3/issues_spec.rb @@ -1,7 +1,6 @@ require 'spec_helper' -describe API::V3::Issues, api: true do - include ApiHelpers +describe API::V3::Issues do include EmailHelpers let(:user) { create(:user) } @@ -15,11 +14,11 @@ describe API::V3::Issues, api: true do let!(:closed_issue) do create :closed_issue, author: user, - assignee: user, + assignees: [user], project: project, state: :closed, milestone: milestone, - created_at: generate(:issue_created_at), + created_at: generate(:past_time), updated_at: 3.hours.ago end let!(:confidential_issue) do @@ -27,17 +26,17 @@ describe API::V3::Issues, api: true do :confidential, project: project, author: author, - assignee: assignee, - created_at: generate(:issue_created_at), + assignees: [assignee], + created_at: generate(:past_time), updated_at: 2.hours.ago end let!(:issue) do create :issue, author: user, - assignee: user, + assignees: [user], project: project, milestone: milestone, - created_at: generate(:issue_created_at), + created_at: generate(:past_time), updated_at: 1.hour.ago end let!(:label) do @@ -248,7 +247,7 @@ describe API::V3::Issues, api: true do let!(:group_closed_issue) do create :closed_issue, author: user, - assignee: user, + assignees: [user], project: group_project, state: :closed, milestone: group_milestone, @@ -259,13 +258,13 @@ describe API::V3::Issues, api: true do :confidential, project: group_project, author: author, - assignee: assignee, + assignees: [assignee], updated_at: 2.hours.ago end let!(:group_issue) do create :issue, author: user, - assignee: user, + assignees: [user], project: group_project, milestone: group_milestone, updated_at: 1.hour.ago @@ -738,13 +737,14 @@ describe API::V3::Issues, api: true do describe "POST /projects/:id/issues" do it 'creates a new project issue' do post v3_api("/projects/#{project.id}/issues", user), - title: 'new issue', labels: 'label, label2' + title: 'new issue', labels: 'label, label2', assignee_id: assignee.id expect(response).to have_http_status(201) expect(json_response['title']).to eq('new issue') expect(json_response['description']).to be_nil expect(json_response['labels']).to eq(%w(label label2)) expect(json_response['confidential']).to be_falsy + expect(json_response['assignee']['name']).to eq(assignee.name) end it 'creates a new confidential project issue' do @@ -824,7 +824,7 @@ describe API::V3::Issues, api: true do end context 'resolving issues in a merge request' do - let(:discussion) { Discussion.for_diff_notes([create(:diff_note_on_merge_request)]).first } + let(:discussion) { create(:diff_note_on_merge_request).to_discussion } let(:merge_request) { discussion.noteable } let(:project) { merge_request.source_project } before do @@ -1141,6 +1141,22 @@ describe API::V3::Issues, api: true do end end + describe 'PUT /projects/:id/issues/:issue_id to update assignee' do + it 'updates an issue with no assignee' do + put v3_api("/projects/#{project.id}/issues/#{issue.id}", user), assignee_id: 0 + + expect(response).to have_http_status(200) + expect(json_response['assignee']).to eq(nil) + end + + it 'updates an issue with assignee' do + put v3_api("/projects/#{project.id}/issues/#{issue.id}", user), assignee_id: user2.id + + expect(response).to have_http_status(200) + expect(json_response['assignee']['name']).to eq(user2.name) + end + end + describe "DELETE /projects/:id/issues/:issue_id" do it "rejects a non member from deleting an issue" do delete v3_api("/projects/#{project.id}/issues/#{issue.id}", non_member) diff --git a/spec/requests/api/v3/labels_spec.rb b/spec/requests/api/v3/labels_spec.rb index dfac357d37c..62faa1cb129 100644 --- a/spec/requests/api/v3/labels_spec.rb +++ b/spec/requests/api/v3/labels_spec.rb @@ -1,8 +1,6 @@ require 'spec_helper' -describe API::V3::Labels, api: true do - include ApiHelpers - +describe API::V3::Labels do let(:user) { create(:user) } let(:project) { create(:empty_project, creator_id: user.id, namespace: user.namespace) } let!(:label1) { create(:label, title: 'label1', project: project) } diff --git a/spec/requests/api/v3/members_spec.rb b/spec/requests/api/v3/members_spec.rb index 13814ed10c3..623f02902b8 100644 --- a/spec/requests/api/v3/members_spec.rb +++ b/spec/requests/api/v3/members_spec.rb @@ -1,9 +1,7 @@ require 'spec_helper' -describe API::V3::Members, api: true do - include ApiHelpers - - let(:master) { create(:user) } +describe API::V3::Members do + let(:master) { create(:user, username: 'master_user') } let(:developer) { create(:user) } let(:access_requester) { create(:user) } let(:stranger) { create(:user) } diff --git a/spec/requests/api/v3/merge_request_diffs_spec.rb b/spec/requests/api/v3/merge_request_diffs_spec.rb index c53800eef30..8020ddab4c8 100644 --- a/spec/requests/api/v3/merge_request_diffs_spec.rb +++ b/spec/requests/api/v3/merge_request_diffs_spec.rb @@ -1,8 +1,6 @@ require "spec_helper" -describe API::V3::MergeRequestDiffs, 'MergeRequestDiffs', api: true do - include ApiHelpers - +describe API::V3::MergeRequestDiffs, 'MergeRequestDiffs' do let!(:user) { create(:user) } let!(:merge_request) { create(:merge_request, importing: true) } let!(:project) { merge_request.target_project } diff --git a/spec/requests/api/v3/merge_requests_spec.rb b/spec/requests/api/v3/merge_requests_spec.rb index d73e9635c9b..f6ff96be566 100644 --- a/spec/requests/api/v3/merge_requests_spec.rb +++ b/spec/requests/api/v3/merge_requests_spec.rb @@ -1,7 +1,6 @@ require "spec_helper" -describe API::MergeRequests, api: true do - include ApiHelpers +describe API::MergeRequests do let(:base_time) { Time.now } let(:user) { create(:user) } let(:admin) { create(:user, :admin) } @@ -339,6 +338,19 @@ describe API::MergeRequests, api: true do expect(json_response['title']).to eq('Test merge_request') end + it "returns 422 when target project has disabled merge requests" do + project.project_feature.update(merge_requests_access_level: 0) + + post v3_api("/projects/#{fork_project.id}/merge_requests", user2), + title: 'Test', + target_branch: "master", + source_branch: 'markdown', + author: user2, + target_project_id: project.id + + expect(response).to have_http_status(422) + end + it "returns 400 when source_branch is missing" do post v3_api("/projects/#{fork_project.id}/merge_requests", user2), title: 'Test merge_request', target_branch: "master", author: user2, target_project_id: project.id diff --git a/spec/requests/api/v3/milestones_spec.rb b/spec/requests/api/v3/milestones_spec.rb index 127c0eec881..f04efc990a7 100644 --- a/spec/requests/api/v3/milestones_spec.rb +++ b/spec/requests/api/v3/milestones_spec.rb @@ -1,7 +1,6 @@ require 'spec_helper' -describe API::V3::Milestones, api: true do - include ApiHelpers +describe API::V3::Milestones do let(:user) { create(:user) } let!(:project) { create(:empty_project, namespace: user.namespace ) } let!(:closed_milestone) { create(:closed_milestone, project: project) } diff --git a/spec/requests/api/v3/notes_spec.rb b/spec/requests/api/v3/notes_spec.rb index ddef2d5eb04..2bae4a60931 100644 --- a/spec/requests/api/v3/notes_spec.rb +++ b/spec/requests/api/v3/notes_spec.rb @@ -1,8 +1,6 @@ require 'spec_helper' -describe API::V3::Notes, api: true do - include ApiHelpers - +describe API::V3::Notes do let(:user) { create(:user) } let!(:project) { create(:empty_project, :public, namespace: user.namespace) } let!(:issue) { create(:issue, project: project, author: user) } diff --git a/spec/requests/api/v3/pipelines_spec.rb b/spec/requests/api/v3/pipelines_spec.rb index 3786eb06932..e1d036ff365 100644 --- a/spec/requests/api/v3/pipelines_spec.rb +++ b/spec/requests/api/v3/pipelines_spec.rb @@ -1,8 +1,6 @@ require 'spec_helper' -describe API::V3::Pipelines, api: true do - include ApiHelpers - +describe API::V3::Pipelines do let(:user) { create(:user) } let(:non_member) { create(:user) } let(:project) { create(:project, :repository, creator: user) } diff --git a/spec/requests/api/v3/project_hooks_spec.rb b/spec/requests/api/v3/project_hooks_spec.rb index a981119dc5a..1969d1c7f2b 100644 --- a/spec/requests/api/v3/project_hooks_spec.rb +++ b/spec/requests/api/v3/project_hooks_spec.rb @@ -1,7 +1,6 @@ require 'spec_helper' -describe API::ProjectHooks, 'ProjectHooks', api: true do - include ApiHelpers +describe API::ProjectHooks, 'ProjectHooks' do let(:user) { create(:user) } let(:user3) { create(:user) } let!(:project) { create(:project, creator_id: user.id, namespace: user.namespace) } @@ -59,7 +58,7 @@ describe API::ProjectHooks, 'ProjectHooks', api: true do expect(json_response['merge_requests_events']).to eq(hook.merge_requests_events) expect(json_response['tag_push_events']).to eq(hook.tag_push_events) expect(json_response['note_events']).to eq(hook.note_events) - expect(json_response['build_events']).to eq(hook.build_events) + expect(json_response['build_events']).to eq(hook.job_events) expect(json_response['pipeline_events']).to eq(hook.pipeline_events) expect(json_response['wiki_page_events']).to eq(hook.wiki_page_events) expect(json_response['enable_ssl_verification']).to eq(hook.enable_ssl_verification) @@ -144,7 +143,7 @@ describe API::ProjectHooks, 'ProjectHooks', api: true do expect(json_response['merge_requests_events']).to eq(hook.merge_requests_events) expect(json_response['tag_push_events']).to eq(hook.tag_push_events) expect(json_response['note_events']).to eq(hook.note_events) - expect(json_response['build_events']).to eq(hook.build_events) + expect(json_response['build_events']).to eq(hook.job_events) expect(json_response['pipeline_events']).to eq(hook.pipeline_events) expect(json_response['wiki_page_events']).to eq(hook.wiki_page_events) expect(json_response['enable_ssl_verification']).to eq(hook.enable_ssl_verification) diff --git a/spec/requests/api/v3/project_snippets_spec.rb b/spec/requests/api/v3/project_snippets_spec.rb index 957a3bf97ef..365e7365fda 100644 --- a/spec/requests/api/v3/project_snippets_spec.rb +++ b/spec/requests/api/v3/project_snippets_spec.rb @@ -1,8 +1,6 @@ require 'rails_helper' -describe API::ProjectSnippets, api: true do - include ApiHelpers - +describe API::ProjectSnippets do let(:project) { create(:empty_project, :public) } let(:user) { create(:user) } let(:admin) { create(:admin) } diff --git a/spec/requests/api/v3/projects_spec.rb b/spec/requests/api/v3/projects_spec.rb index b1aa793ec00..dc7c3d125b1 100644 --- a/spec/requests/api/v3/projects_spec.rb +++ b/spec/requests/api/v3/projects_spec.rb @@ -1,7 +1,6 @@ require 'spec_helper' -describe API::V3::Projects, api: true do - include ApiHelpers +describe API::V3::Projects do include Gitlab::CurrentSettings let(:user) { create(:user) } @@ -228,7 +227,7 @@ describe API::V3::Projects, api: true do storage_size: 702, repository_size: 123, lfs_objects_size: 234, - build_artifacts_size: 345, + build_artifacts_size: 345 } project4.statistics.update!(attributes) @@ -356,7 +355,6 @@ describe API::V3::Projects, api: true do it "assigns attributes to project" do project = attributes_for(:project, { path: 'camelCasePath', - description: FFaker::Lorem.sentence, issues_enabled: false, merge_requests_enabled: false, wiki_enabled: false, @@ -501,7 +499,6 @@ describe API::V3::Projects, api: true do it 'assigns attributes to project' do project = attributes_for(:project, { - description: FFaker::Lorem.sentence, issues_enabled: false, merge_requests_enabled: false, wiki_enabled: false, @@ -709,7 +706,7 @@ describe API::V3::Projects, api: true do 'name' => user.namespace.name, 'path' => user.namespace.path, 'kind' => user.namespace.kind, - 'full_path' => user.namespace.full_path, + 'full_path' => user.namespace.full_path }) end diff --git a/spec/requests/api/v3/repositories_spec.rb b/spec/requests/api/v3/repositories_spec.rb index fef6fb641fa..1a55e2a71cd 100644 --- a/spec/requests/api/v3/repositories_spec.rb +++ b/spec/requests/api/v3/repositories_spec.rb @@ -1,8 +1,7 @@ require 'spec_helper' require 'mime/types' -describe API::V3::Repositories, api: true do - include ApiHelpers +describe API::V3::Repositories do include RepoHelpers include WorkhorseHelpers diff --git a/spec/requests/api/v3/runners_spec.rb b/spec/requests/api/v3/runners_spec.rb index ca335ce9cf0..dbda2cf34c3 100644 --- a/spec/requests/api/v3/runners_spec.rb +++ b/spec/requests/api/v3/runners_spec.rb @@ -1,8 +1,6 @@ require 'spec_helper' -describe API::V3::Runners, api: true do - include ApiHelpers - +describe API::V3::Runners do let(:admin) { create(:user, :admin) } let(:user) { create(:user) } let(:user2) { create(:user) } diff --git a/spec/requests/api/v3/services_spec.rb b/spec/requests/api/v3/services_spec.rb index 3a760a8f25c..3ba62de822a 100644 --- a/spec/requests/api/v3/services_spec.rb +++ b/spec/requests/api/v3/services_spec.rb @@ -1,8 +1,6 @@ require "spec_helper" -describe API::V3::Services, api: true do - include ApiHelpers - +describe API::V3::Services do let(:user) { create(:user) } let(:project) { create(:empty_project, creator_id: user.id, namespace: user.namespace) } diff --git a/spec/requests/api/v3/settings_spec.rb b/spec/requests/api/v3/settings_spec.rb index a9fa5adac17..41d039b7da0 100644 --- a/spec/requests/api/v3/settings_spec.rb +++ b/spec/requests/api/v3/settings_spec.rb @@ -1,8 +1,6 @@ require 'spec_helper' -describe API::V3::Settings, 'Settings', api: true do - include ApiHelpers - +describe API::V3::Settings, 'Settings' do let(:user) { create(:user) } let(:admin) { create(:admin) } diff --git a/spec/requests/api/v3/snippets_spec.rb b/spec/requests/api/v3/snippets_spec.rb index 05653bd0d51..4f02b7b1a54 100644 --- a/spec/requests/api/v3/snippets_spec.rb +++ b/spec/requests/api/v3/snippets_spec.rb @@ -1,7 +1,6 @@ require 'rails_helper' -describe API::V3::Snippets, api: true do - include ApiHelpers +describe API::V3::Snippets do let!(:user) { create(:user) } describe 'GET /snippets/' do diff --git a/spec/requests/api/v3/system_hooks_spec.rb b/spec/requests/api/v3/system_hooks_spec.rb index 91038977c82..ae427541abb 100644 --- a/spec/requests/api/v3/system_hooks_spec.rb +++ b/spec/requests/api/v3/system_hooks_spec.rb @@ -1,8 +1,6 @@ require 'spec_helper' -describe API::V3::SystemHooks, api: true do - include ApiHelpers - +describe API::V3::SystemHooks do let(:user) { create(:user) } let(:admin) { create(:admin) } let!(:hook) { create(:system_hook, url: "http://example.com") } @@ -33,8 +31,9 @@ describe API::V3::SystemHooks, api: true do expect(response).to have_http_status(200) expect(json_response).to be_an Array expect(json_response.first['url']).to eq(hook.url) - expect(json_response.first['push_events']).to be true + expect(json_response.first['push_events']).to be false expect(json_response.first['tag_push_events']).to be false + expect(json_response.first['repository_update_events']).to be true end end end diff --git a/spec/requests/api/v3/tags_spec.rb b/spec/requests/api/v3/tags_spec.rb index 6870cfd2668..1c4b25c47c3 100644 --- a/spec/requests/api/v3/tags_spec.rb +++ b/spec/requests/api/v3/tags_spec.rb @@ -1,8 +1,7 @@ require 'spec_helper' require 'mime/types' -describe API::V3::Tags, api: true do - include ApiHelpers +describe API::V3::Tags do include RepoHelpers let(:user) { create(:user) } diff --git a/spec/requests/api/v3/templates_spec.rb b/spec/requests/api/v3/templates_spec.rb index f1e554b98cc..00446c7f29c 100644 --- a/spec/requests/api/v3/templates_spec.rb +++ b/spec/requests/api/v3/templates_spec.rb @@ -1,8 +1,6 @@ require 'spec_helper' -describe API::V3::Templates, api: true do - include ApiHelpers - +describe API::V3::Templates do shared_examples_for 'the Template Entity' do |path| before { get v3_api(path) } diff --git a/spec/requests/api/v3/todos_spec.rb b/spec/requests/api/v3/todos_spec.rb index 80fa697e949..9c2c4d64257 100644 --- a/spec/requests/api/v3/todos_spec.rb +++ b/spec/requests/api/v3/todos_spec.rb @@ -1,8 +1,6 @@ require 'spec_helper' -describe API::V3::Todos, api: true do - include ApiHelpers - +describe API::V3::Todos do let(:project_1) { create(:empty_project) } let(:project_2) { create(:empty_project) } let(:author_1) { create(:user) } diff --git a/spec/requests/api/v3/triggers_spec.rb b/spec/requests/api/v3/triggers_spec.rb index 9233e9621bf..d3de6bf13bc 100644 --- a/spec/requests/api/v3/triggers_spec.rb +++ b/spec/requests/api/v3/triggers_spec.rb @@ -1,8 +1,6 @@ require 'spec_helper' describe API::V3::Triggers do - include ApiHelpers - let(:user) { create(:user) } let(:user2) { create(:user) } let!(:trigger_token) { 'secure_token' } diff --git a/spec/requests/api/v3/users_spec.rb b/spec/requests/api/v3/users_spec.rb index b38cbe74b85..e9c57f7c6c3 100644 --- a/spec/requests/api/v3/users_spec.rb +++ b/spec/requests/api/v3/users_spec.rb @@ -1,8 +1,6 @@ require 'spec_helper' -describe API::V3::Users, api: true do - include ApiHelpers - +describe API::V3::Users do let(:user) { create(:user) } let(:admin) { create(:admin) } let(:key) { create(:key, user: user) } @@ -276,5 +274,11 @@ describe API::V3::Users, api: true do expect(new_user).to be_confirmed end + + it 'does not reveal the `is_admin` flag of the user' do + post v3_api('/users', admin), attributes_for(:user) + + expect(json_response['is_admin']).to be_nil + end end end diff --git a/spec/requests/api/variables_spec.rb b/spec/requests/api/variables_spec.rb index 0c1413119e0..63d6d3001ac 100644 --- a/spec/requests/api/variables_spec.rb +++ b/spec/requests/api/variables_spec.rb @@ -1,8 +1,6 @@ require 'spec_helper' -describe API::Variables, api: true do - include ApiHelpers - +describe API::Variables do let(:user) { create(:user) } let(:user2) { create(:user) } let!(:project) { create(:empty_project, creator_id: user.id) } diff --git a/spec/requests/api/version_spec.rb b/spec/requests/api/version_spec.rb index da1b2fda70e..8870d48bbc9 100644 --- a/spec/requests/api/version_spec.rb +++ b/spec/requests/api/version_spec.rb @@ -1,8 +1,6 @@ require 'spec_helper' -describe API::Version, api: true do - include ApiHelpers - +describe API::Version do describe 'GET /version' do context 'when unauthenticated' do it 'returns authentication error' do diff --git a/spec/requests/ci/api/builds_spec.rb b/spec/requests/ci/api/builds_spec.rb index c879f37f50d..286de277ae7 100644 --- a/spec/requests/ci/api/builds_spec.rb +++ b/spec/requests/ci/api/builds_spec.rb @@ -1,8 +1,6 @@ require 'spec_helper' describe Ci::API::Builds do - include ApiHelpers - let(:runner) { FactoryGirl.create(:ci_runner, tag_list: %w(mysql ruby)) } let(:project) { FactoryGirl.create(:empty_project, shared_runners_enabled: false) } let(:last_update) { nil } @@ -187,7 +185,7 @@ describe Ci::API::Builds do { "key" => "CI_PIPELINE_TRIGGERED", "value" => "true", "public" => true }, { "key" => "DB_NAME", "value" => "postgres", "public" => true }, { "key" => "SECRET_KEY", "value" => "secret_value", "public" => false }, - { "key" => "TRIGGER_KEY_1", "value" => "TRIGGER_VALUE_1", "public" => false }, + { "key" => "TRIGGER_KEY_1", "value" => "TRIGGER_VALUE_1", "public" => false } ) end end @@ -285,7 +283,7 @@ describe Ci::API::Builds do end it 'does not override trace information when no trace is given' do - expect(build.reload.trace).to eq 'BUILD TRACE' + expect(build.reload.trace.raw).to eq 'BUILD TRACE' end context 'job has been erased' do @@ -309,9 +307,11 @@ describe Ci::API::Builds do def patch_the_trace(content = ' appended', request_headers = nil) unless request_headers - offset = build.trace_length - limit = offset + content.length - 1 - request_headers = headers.merge({ 'Content-Range' => "#{offset}-#{limit}" }) + build.trace.read do |stream| + offset = stream.size + limit = offset + content.length - 1 + request_headers = headers.merge({ 'Content-Range' => "#{offset}-#{limit}" }) + end end Timecop.travel(build.updated_at + update_interval) do @@ -335,7 +335,7 @@ describe Ci::API::Builds do context 'when request is valid' do it 'gets correct response' do expect(response.status).to eq 202 - expect(build.reload.trace).to eq 'BUILD TRACE appended' + expect(build.reload.trace.raw).to eq 'BUILD TRACE appended' expect(response.header).to have_key 'Range' expect(response.header).to have_key 'Build-Status' end @@ -346,7 +346,7 @@ describe Ci::API::Builds do it 'changes the build trace' do patch_the_trace - expect(build.reload.trace).to eq 'BUILD TRACE appended appended' + expect(build.reload.trace.raw).to eq 'BUILD TRACE appended appended' end context 'when Runner makes a force-patch' do @@ -355,7 +355,7 @@ describe Ci::API::Builds do it "doesn't change the build.trace" do force_patch_the_trace - expect(build.reload.trace).to eq 'BUILD TRACE appended' + expect(build.reload.trace.raw).to eq 'BUILD TRACE appended' end end end @@ -368,7 +368,7 @@ describe Ci::API::Builds do it 'changes the build.trace' do patch_the_trace - expect(build.reload.trace).to eq 'BUILD TRACE appended appended' + expect(build.reload.trace.raw).to eq 'BUILD TRACE appended appended' end context 'when Runner makes a force-patch' do @@ -377,7 +377,7 @@ describe Ci::API::Builds do it "doesn't change the build.trace" do force_patch_the_trace - expect(build.reload.trace).to eq 'BUILD TRACE appended' + expect(build.reload.trace.raw).to eq 'BUILD TRACE appended' end end end @@ -403,7 +403,7 @@ describe Ci::API::Builds do it 'gets correct response' do expect(response.status).to eq 202 - expect(build.reload.trace).to eq 'BUILD TRACE appended' + expect(build.reload.trace.raw).to eq 'BUILD TRACE appended' expect(response.header).to have_key 'Range' expect(response.header).to have_key 'Build-Status' end diff --git a/spec/requests/ci/api/runners_spec.rb b/spec/requests/ci/api/runners_spec.rb index d50cdfdc2d6..0b9733221d8 100644 --- a/spec/requests/ci/api/runners_spec.rb +++ b/spec/requests/ci/api/runners_spec.rb @@ -1,7 +1,6 @@ require 'spec_helper' describe Ci::API::Runners do - include ApiHelpers include StubGitlabCalls let(:registration_token) { 'abcdefg123456' } diff --git a/spec/requests/ci/api/triggers_spec.rb b/spec/requests/ci/api/triggers_spec.rb index 5321f8b134f..26b03c0f148 100644 --- a/spec/requests/ci/api/triggers_spec.rb +++ b/spec/requests/ci/api/triggers_spec.rb @@ -1,8 +1,6 @@ require 'spec_helper' describe Ci::API::Triggers do - include ApiHelpers - describe 'POST /projects/:project_id/refs/:ref/trigger' do let!(:trigger_token) { 'secure token' } let!(:project) { create(:project, :repository, ci_id: 10) } diff --git a/spec/requests/git_http_spec.rb b/spec/requests/git_http_spec.rb index 006d6a6af1c..6ca3ef18fe6 100644 --- a/spec/requests/git_http_spec.rb +++ b/spec/requests/git_http_spec.rb @@ -3,6 +3,7 @@ require "spec_helper" describe 'Git HTTP requests', lib: true do include GitHttpHelpers include WorkhorseHelpers + include UserActivitiesHelpers it "gives WWW-Authenticate hints" do clone_get('doesnt/exist.git') @@ -255,6 +256,14 @@ describe 'Git HTTP requests', lib: true do expect(response.content_type.to_s).to eq(Gitlab::Workhorse::INTERNAL_API_CONTENT_TYPE) end end + + it 'updates the user last activity', :redis do + expect(user_activity(user)).to be_nil + + download(path, env) do |response| + expect(user_activity(user)).to be_present + end + end end context "when an oauth token is provided" do @@ -270,10 +279,10 @@ describe 'Git HTTP requests', lib: true do expect(response.content_type.to_s).to eq(Gitlab::Workhorse::INTERNAL_API_CONTENT_TYPE) end - it "uploads get status 401 (no project existence information leak)" do + it "uploads get status 200" do push_get "#{project.path_with_namespace}.git", user: 'oauth2', password: @token.token - expect(response).to have_http_status(401) + expect(response).to have_http_status(200) end end diff --git a/spec/requests/lfs_http_spec.rb b/spec/requests/lfs_http_spec.rb index 5d495bc9e7d..0c9b4121adf 100644 --- a/spec/requests/lfs_http_spec.rb +++ b/spec/requests/lfs_http_spec.rb @@ -425,7 +425,7 @@ describe 'Git LFS API and storage' do 'size' => sample_size, 'error' => { 'code' => 404, - 'message' => "Object does not exist on the server or you don't have permissions to access it", + 'message' => "Object does not exist on the server or you don't have permissions to access it" } } ] @@ -456,7 +456,7 @@ describe 'Git LFS API and storage' do 'size' => 1575078, 'error' => { 'code' => 404, - 'message' => "Object does not exist on the server or you don't have permissions to access it", + 'message' => "Object does not exist on the server or you don't have permissions to access it" } } ] @@ -493,7 +493,7 @@ describe 'Git LFS API and storage' do 'size' => 1575078, 'error' => { 'code' => 404, - 'message' => "Object does not exist on the server or you don't have permissions to access it", + 'message' => "Object does not exist on the server or you don't have permissions to access it" } }, { diff --git a/spec/requests/openid_connect_spec.rb b/spec/requests/openid_connect_spec.rb index 5206634bca5..05176c3beaa 100644 --- a/spec/requests/openid_connect_spec.rb +++ b/spec/requests/openid_connect_spec.rb @@ -1,8 +1,6 @@ require 'spec_helper' describe 'OpenID Connect requests' do - include ApiHelpers - let(:user) { create :user } let(:access_grant) { create :oauth_access_grant, application: application, resource_owner_id: user.id } let(:access_token) { create :oauth_access_token, application: application, resource_owner_id: user.id } @@ -63,7 +61,7 @@ describe 'OpenID Connect requests' do email: private_email.email, public_email: public_email.email, website_url: 'https://example.com', - avatar: fixture_file_upload(Rails.root + "spec/fixtures/dk.png"), + avatar: fixture_file_upload(Rails.root + "spec/fixtures/dk.png") ) end @@ -81,7 +79,7 @@ describe 'OpenID Connect requests' do 'email_verified' => true, 'website' => 'https://example.com', 'profile' => 'http://localhost/alice', - 'picture' => "http://localhost/uploads/user/avatar/#{user.id}/dk.png", + 'picture' => "http://localhost/uploads/user/avatar/#{user.id}/dk.png" }) end end @@ -100,7 +98,7 @@ describe 'OpenID Connect requests' do expect(@payload['sub']).to eq hashed_subject end - it 'includes the time of the last authentication' do + it 'includes the time of the last authentication', :redis do expect(@payload['auth_time']).to eq user.current_sign_in_at.to_i end diff --git a/spec/requests/projects/artifacts_controller_spec.rb b/spec/requests/projects/artifacts_controller_spec.rb deleted file mode 100644 index d20866c0d44..00000000000 --- a/spec/requests/projects/artifacts_controller_spec.rb +++ /dev/null @@ -1,117 +0,0 @@ -require 'spec_helper' - -describe Projects::ArtifactsController do - let(:user) { create(:user) } - let(:project) { create(:project, :repository) } - - let(:pipeline) do - create(:ci_pipeline, - project: project, - sha: project.commit.sha, - ref: project.default_branch, - status: 'success') - end - - let(:build) { create(:ci_build, :success, :artifacts, pipeline: pipeline) } - - describe 'GET /:project/builds/artifacts/:ref_name/browse?job=name' do - before do - project.team << [user, :developer] - - login_as(user) - end - - def path_from_ref( - ref = pipeline.ref, job = build.name, path = 'browse') - latest_succeeded_namespace_project_artifacts_path( - project.namespace, - project, - [ref, path].join('/'), - job: job) - end - - context 'cannot find the build' do - shared_examples 'not found' do - it { expect(response).to have_http_status(:not_found) } - end - - context 'has no such ref' do - before do - get path_from_ref('TAIL', build.name) - end - - it_behaves_like 'not found' - end - - context 'has no such build' do - before do - get path_from_ref(pipeline.ref, 'NOBUILD') - end - - it_behaves_like 'not found' - end - - context 'has no path' do - before do - get path_from_ref(pipeline.sha, build.name, '') - end - - it_behaves_like 'not found' - end - end - - context 'found the build and redirect' do - shared_examples 'redirect to the build' do - it 'redirects' do - path = browse_namespace_project_build_artifacts_path( - project.namespace, - project, - build) - - expect(response).to redirect_to(path) - end - end - - context 'with regular branch' do - before do - pipeline.update(ref: 'master', - sha: project.commit('master').sha) - - get path_from_ref('master') - end - - it_behaves_like 'redirect to the build' - end - - context 'with branch name containing slash' do - before do - pipeline.update(ref: 'improve/awesome', - sha: project.commit('improve/awesome').sha) - - get path_from_ref('improve/awesome') - end - - it_behaves_like 'redirect to the build' - end - - context 'with branch name and path containing slashes' do - before do - pipeline.update(ref: 'improve/awesome', - sha: project.commit('improve/awesome').sha) - - get path_from_ref('improve/awesome', build.name, 'file/README.md') - end - - it 'redirects' do - path = file_namespace_project_build_artifacts_path( - project.namespace, - project, - build, - 'README.md') - - expect(response).to redirect_to(path) - end - end - end - end -end diff --git a/spec/requests/projects/cycle_analytics_events_spec.rb b/spec/requests/projects/cycle_analytics_events_spec.rb index 0edbffbcd3b..d92daa345b3 100644 --- a/spec/requests/projects/cycle_analytics_events_spec.rb +++ b/spec/requests/projects/cycle_analytics_events_spec.rb @@ -1,8 +1,6 @@ require 'spec_helper' -describe 'cycle analytics events' do - include ApiHelpers - +describe 'cycle analytics events', api: true do let(:user) { create(:user) } let(:project) { create(:project, :repository, public_builds: false) } let(:issue) { create(:issue, project: project, created_at: 2.days.ago) } @@ -11,8 +9,6 @@ describe 'cycle analytics events' do before do project.team << [user, :developer] - allow_any_instance_of(Gitlab::ReferenceExtractor).to receive(:issues).and_return([issue]) - 3.times do |count| Timecop.freeze(Time.now + count.days) do create_cycle @@ -123,9 +119,10 @@ describe 'cycle analytics events' do def create_cycle milestone = create(:milestone, project: project) issue.update(milestone: milestone) - mr = create_merge_request_closing_issue(issue) + mr = create_merge_request_closing_issue(issue, commit_message: "References #{issue.to_reference}") pipeline = create(:ci_empty_pipeline, status: 'created', project: project, ref: mr.source_branch, sha: mr.source_branch_sha) + mr.update(head_pipeline_id: pipeline.id) pipeline.run create(:ci_build, pipeline: pipeline, status: :success, author: user) diff --git a/spec/requests/request_profiler_spec.rb b/spec/requests/request_profiler_spec.rb new file mode 100644 index 00000000000..51fbfecec4b --- /dev/null +++ b/spec/requests/request_profiler_spec.rb @@ -0,0 +1,44 @@ +require 'spec_helper' + +describe 'Request Profiler' do + let(:user) { create(:user) } + + shared_examples 'profiling a request' do + before do + allow(Rails).to receive(:cache).and_return(ActiveSupport::Cache::MemoryStore.new) + allow(RubyProf::Profile).to receive(:profile) do |&blk| + blk.call + RubyProf::Profile.new + end + end + + it 'creates a profile of the request' do + project = create(:project, namespace: user.namespace) + time = Time.now + path = "/#{project.path_with_namespace}" + + Timecop.freeze(time) do + get path, nil, 'X-Profile-Token' => Gitlab::RequestProfiler.profile_token + end + + profile_path = "#{Gitlab.config.shared.path}/tmp/requests_profiles/#{path.tr('/', '|')}_#{time.to_i}.html" + expect(File.exist?(profile_path)).to be true + end + + after do + Gitlab::RequestProfiler.remove_all_profiles + end + end + + context "when user is logged-in" do + before do + login_as(user) + end + + include_examples 'profiling a request' + end + + context "when user is not logged-in" do + include_examples 'profiling a request' + end +end diff --git a/spec/routing/admin_routing_spec.rb b/spec/routing/admin_routing_spec.rb index 99c44bde151..e5fc0b676af 100644 --- a/spec/routing/admin_routing_spec.rb +++ b/spec/routing/admin_routing_spec.rb @@ -71,13 +71,15 @@ describe Admin::ProjectsController, "routing" do end end -# admin_hook_test GET /admin/hooks/:hook_id/test(.:format) admin/hooks#test +# admin_hook_test GET /admin/hooks/:id/test(.:format) admin/hooks#test # admin_hooks GET /admin/hooks(.:format) admin/hooks#index # POST /admin/hooks(.:format) admin/hooks#create # admin_hook DELETE /admin/hooks/:id(.:format) admin/hooks#destroy +# PUT /admin/hooks/:id(.:format) admin/hooks#update +# edit_admin_hook GET /admin/hooks/:id(.:format) admin/hooks#edit describe Admin::HooksController, "routing" do it "to #test" do - expect(get("/admin/hooks/1/test")).to route_to('admin/hooks#test', hook_id: '1') + expect(get("/admin/hooks/1/test")).to route_to('admin/hooks#test', id: '1') end it "to #index" do @@ -88,6 +90,14 @@ describe Admin::HooksController, "routing" do expect(post("/admin/hooks")).to route_to('admin/hooks#create') end + it "to #edit" do + expect(get("/admin/hooks/1/edit")).to route_to('admin/hooks#edit', id: '1') + end + + it "to #update" do + expect(put("/admin/hooks/1")).to route_to('admin/hooks#update', id: '1') + end + it "to #destroy" do expect(delete("/admin/hooks/1")).to route_to('admin/hooks#destroy', id: '1') end diff --git a/spec/routing/environments_spec.rb b/spec/routing/environments_spec.rb index ba124de70bb..624f3c43f0a 100644 --- a/spec/routing/environments_spec.rb +++ b/spec/routing/environments_spec.rb @@ -1,6 +1,6 @@ require 'spec_helper' -describe Projects::EnvironmentsController, :routing do +describe 'environments routing', :routing do let(:project) { create(:empty_project) } let(:environment) do diff --git a/spec/routing/notifications_routing_spec.rb b/spec/routing/notifications_routing_spec.rb index 24592942a96..54ed87b5520 100644 --- a/spec/routing/notifications_routing_spec.rb +++ b/spec/routing/notifications_routing_spec.rb @@ -1,13 +1,11 @@ require "spec_helper" -describe Profiles::NotificationsController do - describe "routing" do - it "routes to #show" do - expect(get("/profile/notifications")).to route_to("profiles/notifications#show") - end +describe "notifications routing" do + it "routes to #show" do + expect(get("/profile/notifications")).to route_to("profiles/notifications#show") + end - it "routes to #update" do - expect(put("/profile/notifications")).to route_to("profiles/notifications#update") - end + it "routes to #update" do + expect(put("/profile/notifications")).to route_to("profiles/notifications#update") end end diff --git a/spec/routing/project_routing_spec.rb b/spec/routing/project_routing_spec.rb index 4baccacd448..d5400bbaaf1 100644 --- a/spec/routing/project_routing_spec.rb +++ b/spec/routing/project_routing_spec.rb @@ -3,7 +3,7 @@ require 'spec_helper' describe 'project routing' do before do allow(Project).to receive(:find_by_full_path).and_return(false) - allow(Project).to receive(:find_by_full_path).with('gitlab/gitlabhq').and_return(true) + allow(Project).to receive(:find_by_full_path).with('gitlab/gitlabhq', any_args).and_return(true) end # Shared examples for a resource inside a Project @@ -93,13 +93,13 @@ describe 'project routing' do end context 'name with dot' do - before { allow(Project).to receive(:find_by_full_path).with('gitlab/gitlabhq.keys').and_return(true) } + before { allow(Project).to receive(:find_by_full_path).with('gitlab/gitlabhq.keys', any_args).and_return(true) } it { expect(get('/gitlab/gitlabhq.keys')).to route_to('projects#show', namespace_id: 'gitlab', id: 'gitlabhq.keys') } end context 'with nested group' do - before { allow(Project).to receive(:find_by_full_path).with('gitlab/subgroup/gitlabhq').and_return(true) } + before { allow(Project).to receive(:find_by_full_path).with('gitlab/subgroup/gitlabhq', any_args).and_return(true) } it { expect(get('/gitlab/subgroup/gitlabhq')).to route_to('projects#show', namespace_id: 'gitlab/subgroup', id: 'gitlabhq') } end @@ -243,7 +243,6 @@ describe 'project routing' do # diffs_namespace_project_merge_request GET /:namespace_id/:project_id/merge_requests/:id/diffs(.:format) projects/merge_requests#diffs # commits_namespace_project_merge_request GET /:namespace_id/:project_id/merge_requests/:id/commits(.:format) projects/merge_requests#commits # merge_namespace_project_merge_request POST /:namespace_id/:project_id/merge_requests/:id/merge(.:format) projects/merge_requests#merge - # merge_check_namespace_project_merge_request GET /:namespace_id/:project_id/merge_requests/:id/merge_check(.:format) projects/merge_requests#merge_check # ci_status_namespace_project_merge_request GET /:namespace_id/:project_id/merge_requests/:id/ci_status(.:format) projects/merge_requests#ci_status # toggle_subscription_namespace_project_merge_request POST /:namespace_id/:project_id/merge_requests/:id/toggle_subscription(.:format) projects/merge_requests#toggle_subscription # branch_from_namespace_project_merge_requests GET /:namespace_id/:project_id/merge_requests/branch_from(.:format) projects/merge_requests#branch_from @@ -272,10 +271,6 @@ describe 'project routing' do ) end - it 'to #merge_check' do - expect(get('/gitlab/gitlabhq/merge_requests/1/merge_check')).to route_to('projects/merge_requests#merge_check', namespace_id: 'gitlab', project_id: 'gitlabhq', id: '1') - end - it 'to #branch_from' do expect(get('/gitlab/gitlabhq/merge_requests/branch_from')).to route_to('projects/merge_requests#branch_from', namespace_id: 'gitlab', project_id: 'gitlabhq') end @@ -340,14 +335,16 @@ describe 'project routing' do # test_project_hook GET /:project_id/hooks/:id/test(.:format) hooks#test # project_hooks GET /:project_id/hooks(.:format) hooks#index # POST /:project_id/hooks(.:format) hooks#create - # project_hook DELETE /:project_id/hooks/:id(.:format) hooks#destroy + # edit_project_hook GET /:project_id/hooks/:id/edit(.:format) hooks#edit + # project_hook PUT /:project_id/hooks/:id(.:format) hooks#update + # DELETE /:project_id/hooks/:id(.:format) hooks#destroy describe Projects::HooksController, 'routing' do it 'to #test' do expect(get('/gitlab/gitlabhq/hooks/1/test')).to route_to('projects/hooks#test', namespace_id: 'gitlab', project_id: 'gitlabhq', id: '1') end it_behaves_like 'RESTful project resources' do - let(:actions) { [:index, :create, :destroy] } + let(:actions) { [:index, :create, :destroy, :edit, :update] } let(:controller) { 'hooks' } end end @@ -484,7 +481,7 @@ describe 'project routing' do end it 'to #list' do - expect(get('/gitlab/gitlabhq/files/master.json')).to route_to('projects/find_file#list', namespace_id: 'gitlab', project_id: 'gitlabhq', id: 'master', format: 'json') + expect(get('/gitlab/gitlabhq/files/master.json')).to route_to('projects/find_file#list', namespace_id: 'gitlab', project_id: 'gitlabhq', id: 'master.json') end end diff --git a/spec/routing/routing_spec.rb b/spec/routing/routing_spec.rb index 9f6defe1450..abacc50a371 100644 --- a/spec/routing/routing_spec.rb +++ b/spec/routing/routing_spec.rb @@ -249,17 +249,34 @@ describe RootController, 'routing' do end end -# new_user_session GET /users/sign_in(.:format) devise/sessions#new -# user_session POST /users/sign_in(.:format) devise/sessions#create -# destroy_user_session DELETE /users/sign_out(.:format) devise/sessions#destroy -# user_omniauth_authorize /users/auth/:provider(.:format) omniauth_callbacks#passthru -# user_omniauth_callback /users/auth/:action/callback(.:format) omniauth_callbacks#(?-mix:(?!)) -# user_password POST /users/password(.:format) devise/passwords#create -# new_user_password GET /users/password/new(.:format) devise/passwords#new -# edit_user_password GET /users/password/edit(.:format) devise/passwords#edit -# PUT /users/password(.:format) devise/passwords#update describe "Authentication", "routing" do - # pending + it "GET /users/sign_in" do + expect(get("/users/sign_in")).to route_to('sessions#new') + end + + it "POST /users/sign_in" do + expect(post("/users/sign_in")).to route_to('sessions#create') + end + + it "DELETE /users/sign_out" do + expect(delete("/users/sign_out")).to route_to('sessions#destroy') + end + + it "POST /users/password" do + expect(post("/users/password")).to route_to('passwords#create') + end + + it "GET /users/password/new" do + expect(get("/users/password/new")).to route_to('passwords#new') + end + + it "GET /users/password/edit" do + expect(get("/users/password/edit")).to route_to('passwords#edit') + end + + it "PUT /users/password" do + expect(put("/users/password")).to route_to('passwords#update') + end end describe "Groups", "routing" do diff --git a/spec/rubocop/cop/migration/add_column_with_default_to_large_table_spec.rb b/spec/rubocop/cop/migration/add_column_with_default_to_large_table_spec.rb new file mode 100644 index 00000000000..07cb3fc4a2e --- /dev/null +++ b/spec/rubocop/cop/migration/add_column_with_default_to_large_table_spec.rb @@ -0,0 +1,44 @@ +require 'spec_helper' + +require 'rubocop' +require 'rubocop/rspec/support' + +require_relative '../../../../rubocop/cop/migration/add_column_with_default_to_large_table' + +describe RuboCop::Cop::Migration::AddColumnWithDefaultToLargeTable do + include CopHelper + + subject(:cop) { described_class.new } + + context 'in migration' do + before do + allow(cop).to receive(:in_migration?).and_return(true) + end + + described_class::LARGE_TABLES.each do |table| + it "registers an offense for the #{table} table" do + inspect_source(cop, "add_column_with_default :#{table}, :column, default: true") + + aggregate_failures do + expect(cop.offenses.size).to eq(1) + expect(cop.offenses.map(&:line)).to eq([1]) + end + end + end + + it 'registers no offense for non-blacklisted tables' do + inspect_source(cop, "add_column_with_default :table, :column, default: true") + + expect(cop.offenses).to be_empty + end + end + + context 'outside of migration' do + it 'registers no offense' do + table = described_class::LARGE_TABLES.sample + inspect_source(cop, "add_column_with_default :#{table}, :column, default: true") + + expect(cop.offenses).to be_empty + end + end +end diff --git a/spec/rubocop/cop/migration/remove_concurrent_index_spec.rb b/spec/rubocop/cop/migration/remove_concurrent_index_spec.rb new file mode 100644 index 00000000000..a714bf4e5d5 --- /dev/null +++ b/spec/rubocop/cop/migration/remove_concurrent_index_spec.rb @@ -0,0 +1,41 @@ +require 'spec_helper' + +require 'rubocop' +require 'rubocop/rspec/support' + +require_relative '../../../../rubocop/cop/migration/remove_concurrent_index' + +describe RuboCop::Cop::Migration::RemoveConcurrentIndex do + include CopHelper + + subject(:cop) { described_class.new } + + context 'in migration' do + before do + allow(cop).to receive(:in_migration?).and_return(true) + end + + it 'registers an offense when remove_concurrent_index is used inside a change method' do + inspect_source(cop, 'def change; remove_concurrent_index :table, :column; end') + + aggregate_failures do + expect(cop.offenses.size).to eq(1) + expect(cop.offenses.map(&:line)).to eq([1]) + end + end + + it 'registers no offense when remove_concurrent_index is used inside an up method' do + inspect_source(cop, 'def up; remove_concurrent_index :table, :column; end') + + expect(cop.offenses.size).to eq(0) + end + end + + context 'outside of migration' do + it 'registers no offense' do + inspect_source(cop, 'def change; remove_concurrent_index :table, :column; end') + + expect(cop.offenses.size).to eq(0) + end + end +end diff --git a/spec/rubocop/cop/migration/remove_index_spec.rb b/spec/rubocop/cop/migration/remove_index_spec.rb new file mode 100644 index 00000000000..31923cb7429 --- /dev/null +++ b/spec/rubocop/cop/migration/remove_index_spec.rb @@ -0,0 +1,35 @@ +require 'spec_helper' + +require 'rubocop' +require 'rubocop/rspec/support' + +require_relative '../../../../rubocop/cop/migration/remove_index' + +describe RuboCop::Cop::Migration::RemoveIndex do + include CopHelper + + subject(:cop) { described_class.new } + + context 'in migration' do + before do + allow(cop).to receive(:in_migration?).and_return(true) + end + + it 'registers an offense when remove_index is used' do + inspect_source(cop, 'def change; remove_index :table, :column; end') + + aggregate_failures do + expect(cop.offenses.size).to eq(1) + expect(cop.offenses.map(&:line)).to eq([1]) + end + end + end + + context 'outside of migration' do + it 'registers no offense' do + inspect_source(cop, 'def change; remove_index :table, :column; end') + + expect(cop.offenses.size).to eq(0) + end + end +end diff --git a/spec/rubocop/cop/migration/add_column_with_default_spec.rb b/spec/rubocop/cop/migration/reversible_add_column_with_default_spec.rb index 6b9b6b19650..3723d635083 100644 --- a/spec/rubocop/cop/migration/add_column_with_default_spec.rb +++ b/spec/rubocop/cop/migration/reversible_add_column_with_default_spec.rb @@ -3,9 +3,9 @@ require 'spec_helper' require 'rubocop' require 'rubocop/rspec/support' -require_relative '../../../../rubocop/cop/migration/add_column_with_default' +require_relative '../../../../rubocop/cop/migration/reversible_add_column_with_default' -describe RuboCop::Cop::Migration::AddColumnWithDefault do +describe RuboCop::Cop::Migration::ReversibleAddColumnWithDefault do include CopHelper subject(:cop) { described_class.new } diff --git a/spec/serializers/analytics_generic_entity_spec.rb b/spec/serializers/analytics_issue_entity_spec.rb index 68086216ba9..75d606d5eb3 100644 --- a/spec/serializers/analytics_generic_entity_spec.rb +++ b/spec/serializers/analytics_issue_entity_spec.rb @@ -9,7 +9,7 @@ describe AnalyticsIssueEntity do iid: "1", id: "1", created_at: "2016-11-12 15:04:02.948604", - author: user, + author: user } end diff --git a/spec/serializers/analytics_issue_serializer_spec.rb b/spec/serializers/analytics_issue_serializer_spec.rb index ba24cf8e481..7c14c198a74 100644 --- a/spec/serializers/analytics_issue_serializer_spec.rb +++ b/spec/serializers/analytics_issue_serializer_spec.rb @@ -16,7 +16,7 @@ describe AnalyticsIssueSerializer do iid: "1", id: "1", created_at: "2016-11-12 15:04:02.948604", - author: user, + author: user } end diff --git a/spec/serializers/build_action_entity_spec.rb b/spec/serializers/build_action_entity_spec.rb index 0f7be8b2c39..059deba5416 100644 --- a/spec/serializers/build_action_entity_spec.rb +++ b/spec/serializers/build_action_entity_spec.rb @@ -2,9 +2,10 @@ require 'spec_helper' describe BuildActionEntity do let(:build) { create(:ci_build, name: 'test_build') } + let(:request) { double('request') } let(:entity) do - described_class.new(build, request: double) + described_class.new(build, request: spy('request')) end describe '#as_json' do @@ -17,5 +18,9 @@ describe BuildActionEntity do it 'contains path to the action play' do expect(subject[:path]).to include "builds/#{build.id}/play" end + + it 'contains whether it is playable' do + expect(subject[:playable]).to eq build.playable? + end end end diff --git a/spec/serializers/build_entity_spec.rb b/spec/serializers/build_entity_spec.rb index 7dcdf54fd93..b5eb84ae43b 100644 --- a/spec/serializers/build_entity_spec.rb +++ b/spec/serializers/build_entity_spec.rb @@ -6,7 +6,7 @@ describe BuildEntity do let(:request) { double('request') } before do - allow(request).to receive(:user).and_return(user) + allow(request).to receive(:current_user).and_return(user) end let(:entity) do @@ -24,6 +24,10 @@ describe BuildEntity do expect(subject).not_to include(/variables/) end + it 'contains whether it is playable' do + expect(subject[:playable]).to eq build.playable? + end + it 'contains timestamps' do expect(subject).to include(:created_at, :updated_at) end @@ -37,13 +41,37 @@ describe BuildEntity do it 'does not contain path to play action' do expect(subject).not_to include(:play_path) end + + it 'is not a playable job' do + expect(subject[:playable]).to be false + end end context 'when build is a manual action' do let(:build) { create(:ci_build, :manual) } - it 'contains path to play action' do - expect(subject).to include(:play_path) + context 'when user is allowed to trigger action' do + before do + build.project.add_master(user) + end + + it 'contains path to play action' do + expect(subject).to include(:play_path) + end + + it 'is a playable action' do + expect(subject[:playable]).to be true + end + end + + context 'when user is not allowed to trigger action' do + it 'does not contain path to play action' do + expect(subject).not_to include(:play_path) + end + + it 'is not a playable action' do + expect(subject[:playable]).to be false + end end end end diff --git a/spec/serializers/build_serializer_spec.rb b/spec/serializers/build_serializer_spec.rb index 3cc791bca50..01e2cfed6f8 100644 --- a/spec/serializers/build_serializer_spec.rb +++ b/spec/serializers/build_serializer_spec.rb @@ -4,7 +4,7 @@ describe BuildSerializer do let(:user) { create(:user) } let(:serializer) do - described_class.new(user: user) + described_class.new(current_user: user) end subject { serializer.represent(resource) } @@ -38,7 +38,7 @@ describe BuildSerializer do expect(subject[:text]).to eq(status.text) expect(subject[:label]).to eq(status.label) expect(subject[:icon]).to eq(status.icon) - expect(subject[:favicon]).to eq(status.favicon) + expect(subject[:favicon]).to eq("/assets/ci_favicons/#{status.favicon}.ico") end end end diff --git a/spec/serializers/deploy_key_entity_spec.rb b/spec/serializers/deploy_key_entity_spec.rb new file mode 100644 index 00000000000..e73fbe190ca --- /dev/null +++ b/spec/serializers/deploy_key_entity_spec.rb @@ -0,0 +1,38 @@ +require 'spec_helper' + +describe DeployKeyEntity do + include RequestAwareEntity + + let(:user) { create(:user) } + let(:project) { create(:empty_project, :internal)} + let(:project_private) { create(:empty_project, :private)} + let(:deploy_key) { create(:deploy_key) } + let!(:deploy_key_internal) { create(:deploy_keys_project, project: project, deploy_key: deploy_key) } + let!(:deploy_key_private) { create(:deploy_keys_project, project: project_private, deploy_key: deploy_key) } + + let(:entity) { described_class.new(deploy_key, user: user) } + + it 'returns deploy keys with projects a user can read' do + expected_result = { + id: deploy_key.id, + user_id: deploy_key.user_id, + title: deploy_key.title, + fingerprint: deploy_key.fingerprint, + can_push: deploy_key.can_push, + destroyed_when_orphaned: true, + almost_orphaned: false, + created_at: deploy_key.created_at, + updated_at: deploy_key.updated_at, + projects: [ + { + id: project.id, + name: project.name, + full_path: namespace_project_path(project.namespace, project), + full_name: project.full_name + } + ] + } + + expect(entity.as_json).to eq(expected_result) + end +end diff --git a/spec/serializers/deployment_entity_spec.rb b/spec/serializers/deployment_entity_spec.rb index 95eca5463eb..522c92ce295 100644 --- a/spec/serializers/deployment_entity_spec.rb +++ b/spec/serializers/deployment_entity_spec.rb @@ -3,25 +3,23 @@ require 'spec_helper' describe DeploymentEntity do let(:user) { create(:user) } let(:request) { double('request') } + let(:deployment) { create(:deployment) } + let(:entity) { described_class.new(deployment, request: request) } + subject { entity.as_json } before do - allow(request).to receive(:user).and_return(user) + allow(request).to receive(:current_user).and_return(user) end - let(:entity) do - described_class.new(deployment, request: request) - end - - let(:deployment) { create(:deployment) } - - subject { entity.as_json } - it 'exposes internal deployment id' do expect(subject).to include(:iid) end it 'exposes nested information about branch' do expect(subject[:ref][:name]).to eq 'master' - expect(subject[:ref][:ref_path]).not_to be_empty + end + + it 'exposes creation date' do + expect(subject).to include(:created_at) end end diff --git a/spec/serializers/environment_serializer_spec.rb b/spec/serializers/environment_serializer_spec.rb index 1909e6385b5..d2ad6c44702 100644 --- a/spec/serializers/environment_serializer_spec.rb +++ b/spec/serializers/environment_serializer_spec.rb @@ -6,7 +6,7 @@ describe EnvironmentSerializer do let(:json) do described_class - .new(user: user, project: project) + .new(current_user: user, project: project) .represent(resource) end diff --git a/spec/serializers/event_entity_spec.rb b/spec/serializers/event_entity_spec.rb new file mode 100644 index 00000000000..bb54597c967 --- /dev/null +++ b/spec/serializers/event_entity_spec.rb @@ -0,0 +1,13 @@ +require 'spec_helper' + +describe EventEntity do + subject { described_class.represent(create(:event)).as_json } + + it 'exposes author' do + expect(subject).to include(:author) + end + + it 'exposes core elements of event' do + expect(subject).to include(:updated_at) + end +end diff --git a/spec/serializers/label_serializer_spec.rb b/spec/serializers/label_serializer_spec.rb new file mode 100644 index 00000000000..c58c7da1f9e --- /dev/null +++ b/spec/serializers/label_serializer_spec.rb @@ -0,0 +1,46 @@ +require 'spec_helper' + +describe LabelSerializer do + let(:user) { create(:user) } + + let(:serializer) do + described_class.new(user: user) + end + + subject { serializer.represent(resource) } + + describe '#represent' do + context 'when a single object is being serialized' do + let(:resource) { create(:label) } + + it 'serializes the label object' do + expect(subject[:id]).to eq resource.id + end + end + + context 'when multiple objects are being serialized' do + let(:num_labels) { 2 } + let(:resource) { create_list(:label, num_labels) } + + it 'serializes the array of labels' do + expect(subject.size).to eq(num_labels) + end + end + end + + describe '#represent_appearance' do + context 'when represents only appearance' do + let(:resource) { create(:label) } + + subject { serializer.represent_appearance(resource) } + + it 'serializes only attributes used for appearance' do + expect(subject.keys).to eq([:id, :title, :color, :text_color]) + expect(subject[:id]).to eq(resource.id) + expect(subject[:title]).to eq(resource.title) + expect(subject[:color]).to eq(resource.color) + expect(subject[:text_color]).to eq(resource.text_color) + end + end + end +end diff --git a/spec/serializers/merge_request_basic_serializer_spec.rb b/spec/serializers/merge_request_basic_serializer_spec.rb new file mode 100644 index 00000000000..4daf5a59d0c --- /dev/null +++ b/spec/serializers/merge_request_basic_serializer_spec.rb @@ -0,0 +1,12 @@ +require 'spec_helper' + +describe MergeRequestBasicSerializer do + let(:resource) { create(:merge_request) } + let(:user) { create(:user) } + + subject { described_class.new.represent(resource) } + + it 'has important MergeRequest attributes' do + expect(subject).to include(:merge_status) + end +end diff --git a/spec/serializers/merge_request_entity_spec.rb b/spec/serializers/merge_request_entity_spec.rb new file mode 100644 index 00000000000..b75c73e78c2 --- /dev/null +++ b/spec/serializers/merge_request_entity_spec.rb @@ -0,0 +1,145 @@ +require 'spec_helper' + +describe MergeRequestEntity do + let(:project) { create :empty_project } + let(:resource) { create(:merge_request, source_project: project, target_project: project) } + let(:user) { create(:user) } + + let(:request) { double('request', current_user: user) } + + subject do + described_class.new(resource, request: request).as_json + end + + it 'includes author' do + req = double('request') + + author_payload = UserEntity + .represent(resource.author, request: req) + .as_json + + expect(subject[:author]).to eq(author_payload) + end + + it 'includes pipeline' do + req = double('request', current_user: user) + pipeline = build_stubbed(:ci_pipeline) + allow(resource).to receive(:head_pipeline).and_return(pipeline) + + pipeline_payload = PipelineEntity + .represent(pipeline, request: req) + .as_json + + expect(subject[:pipeline]).to eq(pipeline_payload) + end + + it 'includes issues_links' do + issues_links = subject[:issues_links] + + expect(issues_links).to include(:closing, :mentioned_but_not_closing, + :assign_to_closing) + end + + it 'has important MergeRequest attributes' do + expect(subject).to include(:diff_head_sha, :merge_commit_message, + :has_conflicts, :has_ci, :merge_path, + :conflict_resolution_path, + :cancel_merge_when_pipeline_succeeds_path, + :create_issue_to_resolve_discussions_path, + :source_branch_path, :target_branch_commits_path, + :commits_count) + end + + it 'has email_patches_path' do + expect(subject[:email_patches_path]) + .to eq("/#{resource.project.full_path}/merge_requests/#{resource.iid}.patch") + end + + it 'has plain_diff_path' do + expect(subject[:plain_diff_path]) + .to eq("/#{resource.project.full_path}/merge_requests/#{resource.iid}.diff") + end + + it 'has merge_commit_message_with_description' do + expect(subject[:merge_commit_message_with_description]) + .to eq(resource.merge_commit_message(include_description: true)) + end + + describe 'new_blob_path' do + context 'when user can push to project' do + it 'returns path' do + project.add_developer(user) + + expect(subject[:new_blob_path]) + .to eq("/#{resource.project.full_path}/new/#{resource.source_branch}") + end + end + + context 'when user cannot push to project' do + it 'returns nil' do + expect(subject[:new_blob_path]).to be_nil + end + end + end + + describe 'diff_head_sha' do + before do + allow(resource).to receive(:diff_head_sha) { 'sha' } + end + + context 'when no diff head commit' do + it 'returns nil' do + allow(resource).to receive(:diff_head_commit) { nil } + + expect(subject[:diff_head_sha]).to be_nil + end + end + + context 'when diff head commit present' do + it 'returns diff head commit short id' do + allow(resource).to receive(:diff_head_commit) { double } + + expect(subject[:diff_head_sha]).to eq('sha') + end + end + end + + it 'includes merge_event' do + create(:event, :merged, author: user, project: resource.project, target: resource) + + expect(subject[:merge_event]).to include(:author, :updated_at) + end + + it 'includes closed_event' do + create(:event, :closed, author: user, project: resource.project, target: resource) + + expect(subject[:closed_event]).to include(:author, :updated_at) + end + + describe 'diverged_commits_count' do + context 'when MR open and its diverging' do + it 'returns diverged commits count' do + allow(resource).to receive_messages(open?: true, diverged_from_target_branch?: true, + diverged_commits_count: 10) + + expect(subject[:diverged_commits_count]).to eq(10) + end + end + + context 'when MR is not open' do + it 'returns 0' do + allow(resource).to receive_messages(open?: false) + + expect(subject[:diverged_commits_count]).to be_zero + end + end + + context 'when MR is not diverging' do + it 'returns 0' do + allow(resource).to receive_messages(open?: true, diverged_from_target_branch?: false) + + expect(subject[:diverged_commits_count]).to be_zero + end + end + end +end diff --git a/spec/serializers/merge_request_serializer_spec.rb b/spec/serializers/merge_request_serializer_spec.rb new file mode 100644 index 00000000000..73fbecc153d --- /dev/null +++ b/spec/serializers/merge_request_serializer_spec.rb @@ -0,0 +1,37 @@ +require 'spec_helper' + +describe MergeRequestSerializer do + let(:user) { build_stubbed(:user) } + let(:merge_request) { build_stubbed(:merge_request) } + + let(:serializer) do + described_class.new(current_user: user) + end + + describe '#represent' do + let(:opts) { { basic: basic } } + subject { serializer.represent(merge_request, basic: basic) } + + context 'when basic param is truthy' do + let(:basic) { true } + + it 'calls super class #represent with correct params' do + expect_any_instance_of(BaseSerializer).to receive(:represent) + .with(merge_request, opts, MergeRequestBasicEntity) + + subject + end + end + + context 'when basic param is falsy' do + let(:basic) { false } + + it 'calls super class #represent with correct params' do + expect_any_instance_of(BaseSerializer).to receive(:represent) + .with(merge_request, opts, MergeRequestEntity) + + subject + end + end + end +end diff --git a/spec/serializers/pipeline_entity_spec.rb b/spec/serializers/pipeline_entity_spec.rb index 93d5a21419d..d2482ac434b 100644 --- a/spec/serializers/pipeline_entity_spec.rb +++ b/spec/serializers/pipeline_entity_spec.rb @@ -5,7 +5,7 @@ describe PipelineEntity do let(:request) { double('request') } before do - allow(request).to receive(:user).and_return(user) + allow(request).to receive(:current_user).and_return(user) end let(:entity) do @@ -19,7 +19,7 @@ describe PipelineEntity do let(:pipeline) { create(:ci_empty_pipeline) } it 'contains required fields' do - expect(subject).to include :id, :user, :path + expect(subject).to include :id, :user, :path, :coverage expect(subject).to include :ref, :commit expect(subject).to include :updated_at, :created_at end diff --git a/spec/serializers/pipeline_serializer_spec.rb b/spec/serializers/pipeline_serializer_spec.rb index 8642b803844..f2426db6d81 100644 --- a/spec/serializers/pipeline_serializer_spec.rb +++ b/spec/serializers/pipeline_serializer_spec.rb @@ -4,7 +4,7 @@ describe PipelineSerializer do let(:user) { create(:user) } let(:serializer) do - described_class.new(user: user) + described_class.new(current_user: user) end subject { serializer.represent(resource) } @@ -44,7 +44,7 @@ describe PipelineSerializer do end let(:serializer) do - described_class.new(user: user) + described_class.new(current_user: user) .with_pagination(request, response) end @@ -93,6 +93,44 @@ describe PipelineSerializer do end end end + + context 'number of queries' do + let(:resource) { Ci::Pipeline.all } + let(:project) { create(:empty_project) } + + before do + Ci::Pipeline::AVAILABLE_STATUSES.each do |status| + create_pipeline(status) + end + + RequestStore.begin! + end + + after do + RequestStore.end! + RequestStore.clear! + end + + it "verifies number of queries" do + recorded = ActiveRecord::QueryRecorder.new { subject } + expect(recorded.count).to be_within(1).of(58) + expect(recorded.cached_count).to eq(0) + end + + def create_pipeline(status) + create(:ci_empty_pipeline, project: project, status: status).tap do |pipeline| + Ci::Build::AVAILABLE_STATUSES.each do |status| + create_build(pipeline, status, status) + end + end + end + + def create_build(pipeline, stage, status) + create(:ci_build, :tags, :triggered, :artifacts, + pipeline: pipeline, stage: stage, + name: stage, status: status) + end + end end describe '#represent_status' do @@ -106,7 +144,7 @@ describe PipelineSerializer do expect(subject[:text]).to eq(status.text) expect(subject[:label]).to eq(status.label) expect(subject[:icon]).to eq(status.icon) - expect(subject[:favicon]).to eq(status.favicon) + expect(subject[:favicon]).to eq("/assets/ci_favicons/#{status.favicon}.ico") end end end diff --git a/spec/serializers/stage_entity_spec.rb b/spec/serializers/stage_entity_spec.rb index 4ab40d08432..64b3217b809 100644 --- a/spec/serializers/stage_entity_spec.rb +++ b/spec/serializers/stage_entity_spec.rb @@ -14,7 +14,7 @@ describe StageEntity do end before do - allow(request).to receive(:user).and_return(user) + allow(request).to receive(:current_user).and_return(user) create(:ci_build, :success, pipeline: pipeline) end @@ -47,5 +47,13 @@ describe StageEntity do it 'contains stage title' do expect(subject[:title]).to eq 'test: passed' end + + context 'when the jobs should be grouped' do + let(:entity) { described_class.new(stage, request: request, grouped: true) } + + it 'exposes the group key' do + expect(subject).to include :groups + end + end end end diff --git a/spec/serializers/status_entity_spec.rb b/spec/serializers/status_entity_spec.rb index c94902dbab8..3964b998084 100644 --- a/spec/serializers/status_entity_spec.rb +++ b/spec/serializers/status_entity_spec.rb @@ -18,6 +18,12 @@ describe StatusEntity do it 'contains status details' do expect(subject).to include :text, :icon, :favicon, :label, :group expect(subject).to include :has_details, :details_path + expect(subject[:favicon]).to eq('/assets/ci_favicons/favicon_status_success.ico') + end + + it 'contains a dev namespaced favicon if dev env' do + allow(Rails.env).to receive(:development?) { true } + expect(entity.as_json[:favicon]).to eq('/assets/ci_favicons/dev/favicon_status_success.ico') end end end diff --git a/spec/services/auth/container_registry_authentication_service_spec.rb b/spec/services/auth/container_registry_authentication_service_spec.rb index b91234ddb1e..e273dfe1552 100644 --- a/spec/services/auth/container_registry_authentication_service_spec.rb +++ b/spec/services/auth/container_registry_authentication_service_spec.rb @@ -6,14 +6,15 @@ describe Auth::ContainerRegistryAuthenticationService, services: true do let(:current_params) { {} } let(:rsa_key) { OpenSSL::PKey::RSA.generate(512) } let(:payload) { JWT.decode(subject[:token], rsa_key).first } + let(:authentication_abilities) do - [ - :read_container_image, - :create_container_image - ] + [:read_container_image, :create_container_image] end - subject { described_class.new(current_project, current_user, current_params).execute(authentication_abilities: authentication_abilities) } + subject do + described_class.new(current_project, current_user, current_params) + .execute(authentication_abilities: authentication_abilities) + end before do allow(Gitlab.config.registry).to receive_messages(enabled: true, issuer: 'rspec', key: nil) @@ -40,13 +41,11 @@ describe Auth::ContainerRegistryAuthenticationService, services: true do end end - shared_examples 'a accessible' do + shared_examples 'an accessible' do let(:access) do - [{ - 'type' => 'repository', + [{ 'type' => 'repository', 'name' => project.path_with_namespace, - 'actions' => actions, - }] + 'actions' => actions }] end it_behaves_like 'a valid token' @@ -59,19 +58,19 @@ describe Auth::ContainerRegistryAuthenticationService, services: true do end shared_examples 'a pullable' do - it_behaves_like 'a accessible' do + it_behaves_like 'an accessible' do let(:actions) { ['pull'] } end end shared_examples 'a pushable' do - it_behaves_like 'a accessible' do + it_behaves_like 'an accessible' do let(:actions) { ['push'] } end end shared_examples 'a pullable and pushable' do - it_behaves_like 'a accessible' do + it_behaves_like 'an accessible' do let(:actions) { %w(pull push) } end end @@ -81,15 +80,30 @@ describe Auth::ContainerRegistryAuthenticationService, services: true do it { is_expected.not_to include(:token) } end + shared_examples 'container repository factory' do + it 'creates a new container repository resource' do + expect { subject } + .to change { project.container_repositories.count }.by(1) + end + end + + shared_examples 'not a container repository factory' do + it 'does not create a new container repository resource' do + expect { subject }.not_to change { ContainerRepository.count } + end + end + describe '#full_access_token' do let(:project) { create(:empty_project) } let(:token) { described_class.full_access_token(project.path_with_namespace) } subject { { token: token } } - it_behaves_like 'a accessible' do + it_behaves_like 'an accessible' do let(:actions) { ['*'] } end + + it_behaves_like 'not a container repository factory' end context 'user authorization' do @@ -110,16 +124,20 @@ describe Auth::ContainerRegistryAuthenticationService, services: true do end it_behaves_like 'a pushable' + it_behaves_like 'container repository factory' end context 'allow reporter to pull images' do before { project.team << [current_user, :reporter] } - let(:current_params) do - { scope: "repository:#{project.path_with_namespace}:pull" } - end + context 'when pulling from root level repository' do + let(:current_params) do + { scope: "repository:#{project.path_with_namespace}:pull" } + end - it_behaves_like 'a pullable' + it_behaves_like 'a pullable' + it_behaves_like 'not a container repository factory' + end end context 'return a least of privileges' do @@ -130,6 +148,7 @@ describe Auth::ContainerRegistryAuthenticationService, services: true do end it_behaves_like 'a pullable' + it_behaves_like 'not a container repository factory' end context 'disallow guest to pull or push images' do @@ -140,6 +159,7 @@ describe Auth::ContainerRegistryAuthenticationService, services: true do end it_behaves_like 'an inaccessible' + it_behaves_like 'not a container repository factory' end end @@ -152,6 +172,7 @@ describe Auth::ContainerRegistryAuthenticationService, services: true do end it_behaves_like 'a pullable' + it_behaves_like 'not a container repository factory' end context 'disallow anyone to push images' do @@ -160,6 +181,16 @@ describe Auth::ContainerRegistryAuthenticationService, services: true do end it_behaves_like 'an inaccessible' + it_behaves_like 'not a container repository factory' + end + + context 'when repository name is invalid' do + let(:current_params) do + { scope: 'repository:invalid:push' } + end + + it_behaves_like 'an inaccessible' + it_behaves_like 'not a container repository factory' end end @@ -173,6 +204,7 @@ describe Auth::ContainerRegistryAuthenticationService, services: true do end it_behaves_like 'a pullable' + it_behaves_like 'not a container repository factory' end context 'disallow anyone to push images' do @@ -181,6 +213,7 @@ describe Auth::ContainerRegistryAuthenticationService, services: true do end it_behaves_like 'an inaccessible' + it_behaves_like 'not a container repository factory' end end @@ -191,6 +224,7 @@ describe Auth::ContainerRegistryAuthenticationService, services: true do end it_behaves_like 'an inaccessible' + it_behaves_like 'not a container repository factory' end end end @@ -198,11 +232,9 @@ describe Auth::ContainerRegistryAuthenticationService, services: true do context 'build authorized as user' do let(:current_project) { create(:empty_project) } let(:current_user) { create(:user) } + let(:authentication_abilities) do - [ - :build_read_container_image, - :build_create_container_image - ] + [:build_read_container_image, :build_create_container_image] end before do @@ -219,6 +251,10 @@ describe Auth::ContainerRegistryAuthenticationService, services: true do it_behaves_like 'a pullable and pushable' do let(:project) { current_project } end + + it_behaves_like 'container repository factory' do + let(:project) { current_project } + end end context 'for other projects' do @@ -231,11 +267,13 @@ describe Auth::ContainerRegistryAuthenticationService, services: true do let(:project) { create(:empty_project, :public) } it_behaves_like 'a pullable' + it_behaves_like 'not a container repository factory' end shared_examples 'pullable for being team member' do context 'when you are not member' do it_behaves_like 'an inaccessible' + it_behaves_like 'not a container repository factory' end context 'when you are member' do @@ -244,12 +282,14 @@ describe Auth::ContainerRegistryAuthenticationService, services: true do end it_behaves_like 'a pullable' + it_behaves_like 'not a container repository factory' end context 'when you are owner' do let(:project) { create(:empty_project, namespace: current_user.namespace) } it_behaves_like 'a pullable' + it_behaves_like 'not a container repository factory' end end @@ -263,6 +303,7 @@ describe Auth::ContainerRegistryAuthenticationService, services: true do context 'when you are not member' do it_behaves_like 'an inaccessible' + it_behaves_like 'not a container repository factory' end context 'when you are member' do @@ -271,12 +312,14 @@ describe Auth::ContainerRegistryAuthenticationService, services: true do end it_behaves_like 'a pullable' + it_behaves_like 'not a container repository factory' end context 'when you are owner' do let(:project) { create(:empty_project, namespace: current_user.namespace) } it_behaves_like 'a pullable' + it_behaves_like 'not a container repository factory' end end end @@ -296,12 +339,14 @@ describe Auth::ContainerRegistryAuthenticationService, services: true do end it_behaves_like 'an inaccessible' + it_behaves_like 'not a container repository factory' end context 'when you are owner' do let(:project) { create(:empty_project, :public, namespace: current_user.namespace) } it_behaves_like 'an inaccessible' + it_behaves_like 'not a container repository factory' end end end @@ -318,6 +363,7 @@ describe Auth::ContainerRegistryAuthenticationService, services: true do end it_behaves_like 'an inaccessible' + it_behaves_like 'not a container repository factory' end end end @@ -325,6 +371,7 @@ describe Auth::ContainerRegistryAuthenticationService, services: true do context 'unauthorized' do context 'disallow to use scope-less authentication' do it_behaves_like 'a forbidden' + it_behaves_like 'not a container repository factory' end context 'for invalid scope' do @@ -333,6 +380,7 @@ describe Auth::ContainerRegistryAuthenticationService, services: true do end it_behaves_like 'a forbidden' + it_behaves_like 'not a container repository factory' end context 'for private project' do @@ -354,6 +402,7 @@ describe Auth::ContainerRegistryAuthenticationService, services: true do end it_behaves_like 'a pullable' + it_behaves_like 'not a container repository factory' end context 'when pushing' do @@ -362,6 +411,7 @@ describe Auth::ContainerRegistryAuthenticationService, services: true do end it_behaves_like 'a forbidden' + it_behaves_like 'not a container repository factory' end end end diff --git a/spec/services/ci/create_pipeline_service_spec.rb b/spec/services/ci/create_pipeline_service_spec.rb index d2f0337c260..b536103ed65 100644 --- a/spec/services/ci/create_pipeline_service_spec.rb +++ b/spec/services/ci/create_pipeline_service_spec.rb @@ -9,72 +9,178 @@ describe Ci::CreatePipelineService, services: true do end describe '#execute' do - def execute(params) + def execute_service(after: project.commit.id, message: 'Message', ref: 'refs/heads/master') + params = { ref: ref, + before: '00000000', + after: after, + commits: [{ message: message }] } + described_class.new(project, user, params).execute end context 'valid params' do - let(:pipeline) do - execute(ref: 'refs/heads/master', - before: '00000000', - after: project.commit.id, - commits: [{ message: "Message" }]) - end - - it { expect(pipeline).to be_kind_of(Ci::Pipeline) } - it { expect(pipeline).to be_valid } - it { expect(pipeline).to be_persisted } - it { expect(pipeline).to eq(project.pipelines.last) } - it { expect(pipeline).to have_attributes(user: user) } - it { expect(pipeline.builds.first).to be_kind_of(Ci::Build) } + let(:pipeline) { execute_service } + + let(:pipeline_on_previous_commit) do + execute_service( + after: previous_commit_sha_from_ref('master') + ) + end + + it 'creates a pipeline' do + expect(pipeline).to be_kind_of(Ci::Pipeline) + expect(pipeline).to be_valid + expect(pipeline).to eq(project.pipelines.last) + expect(pipeline).to have_attributes(user: user) + expect(pipeline).to have_attributes(status: 'pending') + expect(pipeline.builds.first).to be_kind_of(Ci::Build) + end + + context '#update_merge_requests_head_pipeline' do + it 'updates head pipeline of each merge request' do + merge_request_1 = create(:merge_request, source_branch: 'master', target_branch: "branch_1", source_project: project) + merge_request_2 = create(:merge_request, source_branch: 'master', target_branch: "branch_2", source_project: project) + + head_pipeline = pipeline + + expect(merge_request_1.reload.head_pipeline).to eq(head_pipeline) + expect(merge_request_2.reload.head_pipeline).to eq(head_pipeline) + end + + context 'when there is no pipeline for source branch' do + it "does not update merge request head pipeline" do + merge_request = create(:merge_request, source_branch: 'other_branch', target_branch: "branch_1", source_project: project) + + head_pipeline = pipeline + + expect(merge_request.reload.head_pipeline).not_to eq(head_pipeline) + end + end + + context 'when merge request target project is different from source project' do + let!(:target_project) { create(:empty_project) } + let!(:forked_project_link) { create(:forked_project_link, forked_to_project: project, forked_from_project: target_project) } + + it 'updates head pipeline for merge request' do + merge_request = + create(:merge_request, source_branch: 'master', target_branch: "branch_1", source_project: project, target_project: target_project) + + head_pipeline = pipeline + + expect(merge_request.reload.head_pipeline).to eq(head_pipeline) + end + end + end + + context 'auto-cancel enabled' do + before do + project.update(auto_cancel_pending_pipelines: 'enabled') + end + + it 'does not cancel HEAD pipeline' do + pipeline + pipeline_on_previous_commit + + expect(pipeline.reload).to have_attributes(status: 'pending', auto_canceled_by_id: nil) + end + + it 'auto cancel pending non-HEAD pipelines' do + pipeline_on_previous_commit + pipeline + + expect(pipeline_on_previous_commit.reload).to have_attributes(status: 'canceled', auto_canceled_by_id: pipeline.id) + end + + it 'does not cancel running outdated pipelines' do + pipeline_on_previous_commit.run + execute_service + + expect(pipeline_on_previous_commit.reload).to have_attributes(status: 'running', auto_canceled_by_id: nil) + end + + it 'cancel created outdated pipelines' do + pipeline_on_previous_commit.update(status: 'created') + pipeline + + expect(pipeline_on_previous_commit.reload).to have_attributes(status: 'canceled', auto_canceled_by_id: pipeline.id) + end + + it 'does not cancel pipelines from the other branches' do + pending_pipeline = execute_service( + ref: 'refs/heads/feature', + after: previous_commit_sha_from_ref('feature') + ) + pipeline + + expect(pending_pipeline.reload).to have_attributes(status: 'pending', auto_canceled_by_id: nil) + end + end + + context 'auto-cancel disabled' do + before do + project.update(auto_cancel_pending_pipelines: 'disabled') + end + + it 'does not auto cancel pending non-HEAD pipelines' do + pipeline_on_previous_commit + pipeline + + expect(pipeline_on_previous_commit.reload) + .to have_attributes(status: 'pending', auto_canceled_by_id: nil) + end + end + + def previous_commit_sha_from_ref(ref) + project.commit(ref).parent.sha + end end context "skip tag if there is no build for it" do it "creates commit if there is appropriate job" do - result = execute(ref: 'refs/heads/master', - before: '00000000', - after: project.commit.id, - commits: [{ message: "Message" }]) - expect(result).to be_persisted + expect(execute_service).to be_persisted end it "creates commit if there is no appropriate job but deploy job has right ref setting" do config = YAML.dump({ deploy: { script: "ls", only: ["master"] } }) stub_ci_pipeline_yaml_file(config) - result = execute(ref: 'refs/heads/master', - before: '00000000', - after: project.commit.id, - commits: [{ message: "Message" }]) - expect(result).to be_persisted + expect(execute_service).to be_persisted end end it 'skips creating pipeline for refs without .gitlab-ci.yml' do stub_ci_pipeline_yaml_file(nil) - result = execute(ref: 'refs/heads/master', - before: '00000000', - after: project.commit.id, - commits: [{ message: 'Message' }]) - expect(result).not_to be_persisted + expect(execute_service).not_to be_persisted expect(Ci::Pipeline.count).to eq(0) end - it 'fails commits if yaml is invalid' do - message = 'message' - allow_any_instance_of(Ci::Pipeline).to receive(:git_commit_message) { message } - stub_ci_pipeline_yaml_file('invalid: file: file') - commits = [{ message: message }] - pipeline = execute(ref: 'refs/heads/master', - before: '00000000', - after: project.commit.id, - commits: commits) - - expect(pipeline).to be_persisted - expect(pipeline.builds.any?).to be false - expect(pipeline.status).to eq('failed') - expect(pipeline.yaml_errors).not_to be_nil + shared_examples 'a failed pipeline' do + it 'creates failed pipeline' do + stub_ci_pipeline_yaml_file(ci_yaml) + + pipeline = execute_service(message: message) + + expect(pipeline).to be_persisted + expect(pipeline.builds.any?).to be false + expect(pipeline.status).to eq('failed') + expect(pipeline.yaml_errors).not_to be_nil + end + end + + context 'when yaml is invalid' do + let(:ci_yaml) { 'invalid: file: fiile' } + let(:message) { 'Message' } + + it_behaves_like 'a failed pipeline' + + context 'when receive git commit' do + before do + allow_any_instance_of(Ci::Pipeline).to receive(:git_commit_message) { message } + end + + it_behaves_like 'a failed pipeline' + end end context 'when commit contains a [ci skip] directive' do @@ -97,11 +203,7 @@ describe Ci::CreatePipelineService, services: true do ci_messages.each do |ci_message| it "skips builds creation if the commit message is #{ci_message}" do - commits = [{ message: ci_message }] - pipeline = execute(ref: 'refs/heads/master', - before: '00000000', - after: project.commit.id, - commits: commits) + pipeline = execute_service(message: ci_message) expect(pipeline).to be_persisted expect(pipeline.builds.any?).to be false @@ -109,58 +211,34 @@ describe Ci::CreatePipelineService, services: true do end end - it "does not skips builds creation if there is no [ci skip] or [skip ci] tag in commit message" do - allow_any_instance_of(Ci::Pipeline).to receive(:git_commit_message) { "some message" } + shared_examples 'creating a pipeline' do + it 'does not skip pipeline creation' do + allow_any_instance_of(Ci::Pipeline).to receive(:git_commit_message) { commit_message } - commits = [{ message: "some message" }] - pipeline = execute(ref: 'refs/heads/master', - before: '00000000', - after: project.commit.id, - commits: commits) + pipeline = execute_service(message: commit_message) - expect(pipeline).to be_persisted - expect(pipeline.builds.first.name).to eq("rspec") + expect(pipeline).to be_persisted + expect(pipeline.builds.first.name).to eq("rspec") + end end - it "does not skip builds creation if the commit message is nil" do - allow_any_instance_of(Ci::Pipeline).to receive(:git_commit_message) { nil } + context 'when commit message does not contain [ci skip] nor [skip ci]' do + let(:commit_message) { 'some message' } - commits = [{ message: nil }] - pipeline = execute(ref: 'refs/heads/master', - before: '00000000', - after: project.commit.id, - commits: commits) - - expect(pipeline).to be_persisted - expect(pipeline.builds.first.name).to eq("rspec") + it_behaves_like 'creating a pipeline' end - it "fails builds creation if there is [ci skip] tag in commit message and yaml is invalid" do - stub_ci_pipeline_yaml_file('invalid: file: fiile') - commits = [{ message: message }] - pipeline = execute(ref: 'refs/heads/master', - before: '00000000', - after: project.commit.id, - commits: commits) + context 'when commit message is nil' do + let(:commit_message) { nil } - expect(pipeline).to be_persisted - expect(pipeline.builds.any?).to be false - expect(pipeline.status).to eq("failed") - expect(pipeline.yaml_errors).not_to be_nil + it_behaves_like 'creating a pipeline' end - end - it "creates commit with failed status if yaml is invalid" do - stub_ci_pipeline_yaml_file('invalid: file') - commits = [{ message: "some message" }] - pipeline = execute(ref: 'refs/heads/master', - before: '00000000', - after: project.commit.id, - commits: commits) - - expect(pipeline).to be_persisted - expect(pipeline.status).to eq("failed") - expect(pipeline.builds.any?).to be false + context 'when there is [ci skip] tag in commit message and yaml is invalid' do + let(:ci_yaml) { 'invalid: file: fiile' } + + it_behaves_like 'a failed pipeline' + end end context 'when there are no jobs for this pipeline' do @@ -170,10 +248,7 @@ describe Ci::CreatePipelineService, services: true do end it 'does not create a new pipeline' do - result = execute(ref: 'refs/heads/master', - before: '00000000', - after: project.commit.id, - commits: [{ message: 'some msg' }]) + result = execute_service expect(result).not_to be_persisted expect(Ci::Build.all).to be_empty @@ -188,10 +263,7 @@ describe Ci::CreatePipelineService, services: true do end it 'does not create a new pipeline' do - result = execute(ref: 'refs/heads/master', - before: '00000000', - after: project.commit.id, - commits: [{ message: 'some msg' }]) + result = execute_service expect(result).to be_persisted expect(result.manual_actions).not_to be_empty @@ -205,10 +277,7 @@ describe Ci::CreatePipelineService, services: true do end it 'creates the environment' do - result = execute(ref: 'refs/heads/master', - before: '00000000', - after: project.commit.id, - commits: [{ message: 'some msg' }]) + result = execute_service expect(result).to be_persisted expect(Environment.find_by(name: "review/master")).not_to be_nil diff --git a/spec/services/ci/play_build_service_spec.rb b/spec/services/ci/play_build_service_spec.rb new file mode 100644 index 00000000000..d6f9fa42045 --- /dev/null +++ b/spec/services/ci/play_build_service_spec.rb @@ -0,0 +1,105 @@ +require 'spec_helper' + +describe Ci::PlayBuildService, '#execute', :services do + let(:user) { create(:user) } + let(:project) { create(:empty_project) } + let(:pipeline) { create(:ci_pipeline, project: project) } + let(:build) { create(:ci_build, :manual, pipeline: pipeline) } + + let(:service) do + described_class.new(project, user) + end + + context 'when project does not have repository yet' do + let(:project) { create(:empty_project) } + + it 'allows user with master role to play build' do + project.add_master(user) + + service.execute(build) + + expect(build.reload).to be_pending + end + + it 'does not allow user with developer role to play build' do + project.add_developer(user) + + expect { service.execute(build) } + .to raise_error Gitlab::Access::AccessDeniedError + end + end + + context 'when project has repository' do + let(:project) { create(:project) } + + it 'allows user with developer role to play a build' do + project.add_developer(user) + + service.execute(build) + + expect(build.reload).to be_pending + end + end + + context 'when build is a playable manual action' do + let(:build) { create(:ci_build, :manual, pipeline: pipeline) } + + before do + project.add_master(user) + end + + it 'enqueues the build' do + expect(service.execute(build)).to eq build + expect(build.reload).to be_pending + end + + it 'reassignes build user correctly' do + service.execute(build) + + expect(build.reload.user).to eq user + end + end + + context 'when build is not a playable manual action' do + let(:build) { create(:ci_build, when: :manual, pipeline: pipeline) } + + before do + project.add_master(user) + end + + it 'duplicates the build' do + duplicate = service.execute(build) + + expect(duplicate).not_to eq build + expect(duplicate).to be_pending + end + + it 'assigns users correctly' do + duplicate = service.execute(build) + + expect(build.user).not_to eq user + expect(duplicate.user).to eq user + end + end + + context 'when build is not action' do + let(:build) { create(:ci_build, :success, pipeline: pipeline) } + + it 'raises an error' do + expect { service.execute(build) } + .to raise_error Gitlab::Access::AccessDeniedError + end + end + + context 'when user does not have ability to trigger action' do + before do + create(:protected_branch, :no_one_can_push, + name: build.ref, project: project) + end + + it 'raises an error' do + expect { service.execute(build) } + .to raise_error Gitlab::Access::AccessDeniedError + end + end +end diff --git a/spec/services/ci/process_pipeline_service_spec.rb b/spec/services/ci/process_pipeline_service_spec.rb index d93616c4f50..fc5de5d069a 100644 --- a/spec/services/ci/process_pipeline_service_spec.rb +++ b/spec/services/ci/process_pipeline_service_spec.rb @@ -268,6 +268,24 @@ describe Ci::ProcessPipelineService, '#execute', :services do end end + context 'when there are only manual actions in stages' do + before do + create_build('image', stage_idx: 0, when: 'manual', allow_failure: true) + create_build('build', stage_idx: 1, when: 'manual', allow_failure: true) + create_build('deploy', stage_idx: 2, when: 'manual') + create_build('check', stage_idx: 3) + + process_pipeline + end + + it 'processes all jobs until blocking actions encountered' do + expect(all_builds_statuses).to eq(%w[manual manual manual created]) + expect(all_builds_names).to eq(%w[image build deploy check]) + + expect(pipeline.reload).to be_blocked + end + end + context 'when blocking manual actions are defined' do before do create_build('code:test', stage_idx: 0) @@ -314,6 +332,13 @@ describe Ci::ProcessPipelineService, '#execute', :services do end context 'when pipeline is promoted sequentially up to the end' do + before do + # We are using create(:empty_project), and users has to be master in + # order to execute manual action when repository does not exist. + # + project.add_master(user) + end + it 'properly processes entire pipeline' do process_pipeline @@ -418,62 +443,18 @@ describe Ci::ProcessPipelineService, '#execute', :services do end end - context 'when there are builds that are not created yet' do - let(:pipeline) do - create(:ci_pipeline, config: config) - end - - let(:config) do - { rspec: { stage: 'test', script: 'rspec' }, - deploy: { stage: 'deploy', script: 'rsync' } } - end - - before do - create_build('linux', stage: 'build', stage_idx: 0) - create_build('mac', stage: 'build', stage_idx: 0) - end + context 'updates a list of retried builds' do + subject { described_class.retried.order(:id) } - it 'processes the pipeline' do - # Currently we have five builds with state created - # - expect(builds.count).to eq(0) - expect(all_builds.count).to eq(2) + let!(:build_retried) { create_build('build') } + let!(:build) { create_build('build') } + let!(:test) { create_build('test') } - # Process builds service will enqueue builds from the first stage. - # + it 'returns unique statuses' do process_pipeline - expect(builds.count).to eq(2) - expect(all_builds.count).to eq(2) - - # When builds succeed we will enqueue remaining builds. - # - # We will have 2 succeeded, 1 pending (from stage test), total 4 (two - # additional build from `.gitlab-ci.yml`). - # - succeed_pending - process_pipeline - - expect(builds.success.count).to eq(2) - expect(builds.pending.count).to eq(1) - expect(all_builds.count).to eq(4) - - # When pending merge_when_pipeline_succeeds in stage test, we enqueue deploy stage. - # - succeed_pending - process_pipeline - - expect(builds.pending.count).to eq(1) - expect(builds.success.count).to eq(3) - expect(all_builds.count).to eq(4) - - # When the last one succeeds we have 4 successful builds. - # - succeed_pending - process_pipeline - - expect(builds.success.count).to eq(4) - expect(all_builds.count).to eq(4) + expect(all_builds.latest).to contain_exactly(build, test) + expect(all_builds.retried).to contain_exactly(build_retried) end end @@ -493,6 +474,10 @@ describe Ci::ProcessPipelineService, '#execute', :services do builds.pluck(:name) end + def all_builds_names + all_builds.pluck(:name) + end + def builds_statuses builds.pluck(:status) end @@ -521,7 +506,9 @@ describe Ci::ProcessPipelineService, '#execute', :services do builds.find_by(name: name).play(user) end - delegate :manual_actions, to: :pipeline + def manual_actions + pipeline.manual_actions(true) + end def create_build(name, **opts) create(:ci_build, :created, pipeline: pipeline, name: name, **opts) diff --git a/spec/services/ci/retry_build_service_spec.rb b/spec/services/ci/retry_build_service_spec.rb index 8567817147b..7254e6b357a 100644 --- a/spec/services/ci/retry_build_service_spec.rb +++ b/spec/services/ci/retry_build_service_spec.rb @@ -16,20 +16,21 @@ describe Ci::RetryBuildService, :services do %i[id status user token coverage trace runner artifacts_expire_at artifacts_file artifacts_metadata artifacts_size created_at updated_at started_at finished_at queued_at erased_by - erased_at].freeze + erased_at auto_canceled_by].freeze IGNORE_ACCESSORS = %i[type lock_version target_url base_tags commit_id deployments erased_by_id last_deployment project_id runner_id tag_taggings taggings tags trigger_request_id - user_id].freeze + user_id auto_canceled_by_id retried].freeze shared_examples 'build duplication' do let(:build) do create(:ci_build, :failed, :artifacts_expired, :erased, :queued, :coverage, :tags, :allowed_to_fail, :on_tag, :teardown_environment, :triggered, :trace, - description: 'some build', pipeline: pipeline) + description: 'some build', pipeline: pipeline, + auto_canceled_by: create(:ci_empty_pipeline)) end describe 'clone accessors' do @@ -114,7 +115,7 @@ describe Ci::RetryBuildService, :services do end describe '#reprocess' do - let(:new_build) { service.reprocess(build) } + let(:new_build) { service.reprocess!(build) } context 'when user has ability to execute build' do before do @@ -130,11 +131,16 @@ describe Ci::RetryBuildService, :services do it 'does not enqueue the new build' do expect(new_build).to be_created end + + it 'does mark old build as retried' do + expect(new_build).to be_latest + expect(build.reload).to be_retried + end end context 'when user does not have ability to execute build' do it 'raises an error' do - expect { service.reprocess(build) } + expect { service.reprocess!(build) } .to raise_error Gitlab::Access::AccessDeniedError end end diff --git a/spec/services/ci/retry_pipeline_service_spec.rb b/spec/services/ci/retry_pipeline_service_spec.rb index f1b2d3a4798..d941d56c0d8 100644 --- a/spec/services/ci/retry_pipeline_service_spec.rb +++ b/spec/services/ci/retry_pipeline_service_spec.rb @@ -7,11 +7,13 @@ describe Ci::RetryPipelineService, '#execute', :services do let(:service) { described_class.new(project, user) } context 'when user has ability to modify pipeline' do - let(:user) { create(:admin) } + before do + project.add_master(user) + end context 'when there are already retried jobs present' do before do - create_build('rspec', :canceled, 0) + create_build('rspec', :canceled, 0, retried: true) create_build('rspec', :failed, 0) end @@ -227,6 +229,46 @@ describe Ci::RetryPipelineService, '#execute', :services do end end + context 'when user is not allowed to trigger manual action' do + before do + project.add_developer(user) + end + + context 'when there is a failed manual action present' do + before do + create_build('test', :failed, 0) + create_build('deploy', :failed, 0, when: :manual) + create_build('verify', :canceled, 1) + end + + it 'does not reprocess manual action' do + service.execute(pipeline) + + expect(build('test')).to be_pending + expect(build('deploy')).to be_failed + expect(build('verify')).to be_created + expect(pipeline.reload).to be_running + end + end + + context 'when there is a failed manual action in later stage' do + before do + create_build('test', :failed, 0) + create_build('deploy', :failed, 1, when: :manual) + create_build('verify', :canceled, 2) + end + + it 'does not reprocess manual action' do + service.execute(pipeline) + + expect(build('test')).to be_pending + expect(build('deploy')).to be_failed + expect(build('verify')).to be_created + expect(pipeline.reload).to be_running + end + end + end + def statuses pipeline.reload.statuses end diff --git a/spec/services/ci/stop_environments_service_spec.rb b/spec/services/ci/stop_environments_service_spec.rb index 32c72a9cf5e..98044ad232e 100644 --- a/spec/services/ci/stop_environments_service_spec.rb +++ b/spec/services/ci/stop_environments_service_spec.rb @@ -55,8 +55,22 @@ describe Ci::StopEnvironmentsService, services: true do end context 'when user does not have permission to stop environment' do + context 'when user has no access to manage deployments' do + before do + project.team << [user, :guest] + end + + it 'does not stop environment' do + expect_environment_not_stopped_on('master') + end + end + end + + context 'when branch for stop action is protected' do before do - project.team << [user, :guest] + project.add_developer(user) + create(:protected_branch, :no_one_can_push, + name: 'master', project: project) end it 'does not stop environment' do diff --git a/spec/services/cohorts_service_spec.rb b/spec/services/cohorts_service_spec.rb new file mode 100644 index 00000000000..77595d7ba2d --- /dev/null +++ b/spec/services/cohorts_service_spec.rb @@ -0,0 +1,99 @@ +require 'spec_helper' + +describe CohortsService do + describe '#execute' do + def month_start(months_ago) + months_ago.months.ago.beginning_of_month.to_date + end + + # In the interests of speed and clarity, this example has minimal data. + it 'returns a list of user cohorts' do + 6.times do |months_ago| + months_ago_time = (months_ago * 2).months.ago + + create(:user, created_at: months_ago_time, last_activity_on: Time.now) + create(:user, created_at: months_ago_time, last_activity_on: months_ago_time) + end + + create(:user) # this user is inactive and belongs to the current month + + expected_cohorts = [ + { + registration_month: month_start(11), + activity_months: Array.new(12) { { total: 0, percentage: 0 } }, + total: 0, + inactive: 0 + }, + { + registration_month: month_start(10), + activity_months: [{ total: 2, percentage: 100 }] + Array.new(10) { { total: 1, percentage: 50 } }, + total: 2, + inactive: 0 + }, + { + registration_month: month_start(9), + activity_months: Array.new(10) { { total: 0, percentage: 0 } }, + total: 0, + inactive: 0 + }, + { + registration_month: month_start(8), + activity_months: [{ total: 2, percentage: 100 }] + Array.new(8) { { total: 1, percentage: 50 } }, + total: 2, + inactive: 0 + }, + { + registration_month: month_start(7), + activity_months: Array.new(8) { { total: 0, percentage: 0 } }, + total: 0, + inactive: 0 + }, + { + registration_month: month_start(6), + activity_months: [{ total: 2, percentage: 100 }] + Array.new(6) { { total: 1, percentage: 50 } }, + total: 2, + inactive: 0 + }, + { + registration_month: month_start(5), + activity_months: Array.new(6) { { total: 0, percentage: 0 } }, + total: 0, + inactive: 0 + }, + { + registration_month: month_start(4), + activity_months: [{ total: 2, percentage: 100 }] + Array.new(4) { { total: 1, percentage: 50 } }, + total: 2, + inactive: 0 + }, + { + registration_month: month_start(3), + activity_months: Array.new(4) { { total: 0, percentage: 0 } }, + total: 0, + inactive: 0 + }, + { + registration_month: month_start(2), + activity_months: [{ total: 2, percentage: 100 }] + Array.new(2) { { total: 1, percentage: 50 } }, + total: 2, + inactive: 0 + }, + { + registration_month: month_start(1), + activity_months: Array.new(2) { { total: 0, percentage: 0 } }, + total: 0, + inactive: 0 + }, + { + registration_month: month_start(0), + activity_months: [{ total: 2, percentage: 100 }], + total: 2, + inactive: 1 + } + ] + + expect(described_class.new.execute).to eq(months_included: 12, + cohorts: expected_cohorts) + end + end +end diff --git a/spec/services/create_deployment_service_spec.rb b/spec/services/create_deployment_service_spec.rb index a883705bd45..f35d7a33548 100644 --- a/spec/services/create_deployment_service_spec.rb +++ b/spec/services/create_deployment_service_spec.rb @@ -255,7 +255,7 @@ describe CreateDeploymentService, services: true do environment: 'production', ref: 'master', tag: false, - sha: '97de212e80737a608d939f648d959671fb0a0142b', + sha: '97de212e80737a608d939f648d959671fb0a0142b' } end diff --git a/spec/services/delete_merged_branches_service_spec.rb b/spec/services/delete_merged_branches_service_spec.rb index a41a421fa6e..cae74df9c90 100644 --- a/spec/services/delete_merged_branches_service_spec.rb +++ b/spec/services/delete_merged_branches_service_spec.rb @@ -6,33 +6,22 @@ describe DeleteMergedBranchesService, services: true do let(:project) { create(:project, :repository) } context '#execute' do - context 'unprotected branches' do - before do - service.execute - end + it 'deletes a branch that was merged' do + service.execute - it 'deletes a branch that was merged' do - expect(project.repository.branch_names).not_to include('improve/awesome') - end + expect(project.repository.branch_names).not_to include('improve/awesome') + end - it 'keeps branch that is unmerged' do - expect(project.repository.branch_names).to include('feature') - end + it 'keeps branch that is unmerged' do + service.execute - it 'keeps "master"' do - expect(project.repository.branch_names).to include('master') - end + expect(project.repository.branch_names).to include('feature') end - context 'protected branches' do - before do - create(:protected_branch, name: 'improve/awesome', project: project) - service.execute - end + it 'keeps "master"' do + service.execute - it 'keeps protected branch' do - expect(project.repository.branch_names).to include('improve/awesome') - end + expect(project.repository.branch_names).to include('master') end context 'user without rights' do @@ -42,6 +31,19 @@ describe DeleteMergedBranchesService, services: true do expect { described_class.new(project, user).execute }.to raise_error(Gitlab::Access::AccessDeniedError) end end + + context 'open merge requests' do + it 'does not delete branches from open merge requests' do + fork_link = create(:forked_project_link, forked_from_project: project) + create(:merge_request, :reopened, source_project: project, target_project: project, source_branch: 'branch-merged', target_branch: 'master') + create(:merge_request, :opened, source_project: fork_link.forked_to_project, target_project: project, target_branch: 'improve/awesome', source_branch: 'master') + + service.execute + + expect(project.repository.branch_names).to include('branch-merged') + expect(project.repository.branch_names).to include('improve/awesome') + end + end end context '#async_execute' do diff --git a/spec/services/discussions/resolve_service_spec.rb b/spec/services/discussions/resolve_service_spec.rb index 12c3cdf28c6..ab8df7b74cd 100644 --- a/spec/services/discussions/resolve_service_spec.rb +++ b/spec/services/discussions/resolve_service_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' describe Discussions::ResolveService do describe '#execute' do - let(:discussion) { Discussion.for_diff_notes([create(:diff_note_on_merge_request)]).first } + let(:discussion) { create(:diff_note_on_merge_request).to_discussion } let(:project) { merge_request.project } let(:merge_request) { discussion.noteable } let(:user) { create(:user) } @@ -41,7 +41,7 @@ describe Discussions::ResolveService do end it 'can resolve multiple discussions at once' do - other_discussion = Discussion.for_diff_notes([create(:diff_note_on_merge_request, noteable: discussion.noteable, project: discussion.noteable.source_project)]).first + other_discussion = create(:diff_note_on_merge_request, noteable: discussion.noteable, project: discussion.noteable.source_project).to_discussion service.execute([discussion, other_discussion]) diff --git a/spec/services/event_create_service_spec.rb b/spec/services/event_create_service_spec.rb index f2c2009bcbf..b06cefe071d 100644 --- a/spec/services/event_create_service_spec.rb +++ b/spec/services/event_create_service_spec.rb @@ -1,6 +1,8 @@ require 'spec_helper' describe EventCreateService, services: true do + include UserActivitiesHelpers + let(:service) { EventCreateService.new } describe 'Issues' do @@ -111,6 +113,19 @@ describe EventCreateService, services: true do end end + describe '#push', :redis do + let(:project) { create(:empty_project) } + let(:user) { create(:user) } + + it 'creates a new event' do + expect { service.push(project, user, {}) }.to change { Event.count } + end + + it 'updates user last activity' do + expect { service.push(project, user, {}) }.to change { user_activity(user) } + end + end + describe 'Project' do let(:user) { create :user } let(:project) { create(:empty_project) } diff --git a/spec/services/files/update_service_spec.rb b/spec/services/files/update_service_spec.rb index 26aa5b432d4..16bca66766a 100644 --- a/spec/services/files/update_service_spec.rb +++ b/spec/services/files/update_service_spec.rb @@ -7,7 +7,7 @@ describe Files::UpdateService do let(:user) { create(:user) } let(:file_path) { 'files/ruby/popen.rb' } let(:new_contents) { 'New Content' } - let(:target_branch) { project.default_branch } + let(:branch_name) { project.default_branch } let(:last_commit_sha) { nil } let(:commit_params) do @@ -19,7 +19,7 @@ describe Files::UpdateService do last_commit_sha: last_commit_sha, start_project: project, start_branch: project.default_branch, - target_branch: target_branch + branch_name: branch_name } end @@ -73,7 +73,7 @@ describe Files::UpdateService do end context 'when target branch is different than source branch' do - let(:target_branch) { "#{project.default_branch}-new" } + let(:branch_name) { "#{project.default_branch}-new" } it 'fires hooks only once' do expect(GitHooksService).to receive(:new).once.and_call_original diff --git a/spec/services/git_push_service_spec.rb b/spec/services/git_push_service_spec.rb index 0477cac6677..ab06f45dbb9 100644 --- a/spec/services/git_push_service_spec.rb +++ b/spec/services/git_push_service_spec.rb @@ -584,7 +584,7 @@ describe GitPushService, services: true do commit = double(:commit) diff = double(:diff, new_path: 'README.md') - expect(commit).to receive(:raw_diffs).with(deltas_only: true). + expect(commit).to receive(:raw_deltas). and_return([diff]) service.push_commits = [commit] @@ -622,12 +622,21 @@ describe GitPushService, services: true do it 'only schedules a limited number of commits' do allow(service).to receive(:push_commits). - and_return(Array.new(1000, double(:commit, to_hash: {}))) + and_return(Array.new(1000, double(:commit, to_hash: {}, matches_cross_reference_regex?: true))) expect(ProcessCommitWorker).to receive(:perform_async).exactly(100).times service.process_commit_messages end + + it "skips commits which don't include cross-references" do + allow(service).to receive(:push_commits). + and_return([double(:commit, to_hash: {}, matches_cross_reference_regex?: false)]) + + expect(ProcessCommitWorker).not_to receive(:perform_async) + + service.process_commit_messages + end end def execute_service(project, user, oldrev, newrev, ref) diff --git a/spec/services/groups/destroy_service_spec.rb b/spec/services/groups/destroy_service_spec.rb index 2ee11fc8b4c..a37257d1bf4 100644 --- a/spec/services/groups/destroy_service_spec.rb +++ b/spec/services/groups/destroy_service_spec.rb @@ -7,6 +7,7 @@ describe Groups::DestroyService, services: true do let!(:group) { create(:group) } let!(:nested_group) { create(:group, parent: group) } let!(:project) { create(:empty_project, namespace: group) } + let!(:notification_setting) { create(:notification_setting, source: group)} let!(:gitlab_shell) { Gitlab::Shell.new } let!(:remove_path) { group.path + "+#{group.id}+deleted" } @@ -23,6 +24,7 @@ describe Groups::DestroyService, services: true do it { expect(Group.unscoped.all).not_to include(group) } it { expect(Group.unscoped.all).not_to include(nested_group) } it { expect(Project.unscoped.all).not_to include(project) } + it { expect(NotificationSetting.unscoped.all).not_to include(notification_setting) } end context 'file system' do diff --git a/spec/services/issuable/bulk_update_service_spec.rb b/spec/services/issuable/bulk_update_service_spec.rb index 7a1ac027310..6437d00e451 100644 --- a/spec/services/issuable/bulk_update_service_spec.rb +++ b/spec/services/issuable/bulk_update_service_spec.rb @@ -4,11 +4,12 @@ describe Issuable::BulkUpdateService, services: true do let(:user) { create(:user) } let(:project) { create(:empty_project, namespace: user.namespace) } - def bulk_update(issues, extra_params = {}) + def bulk_update(issuables, extra_params = {}) bulk_update_params = extra_params - .reverse_merge(issuable_ids: Array(issues).map(&:id).join(',')) + .reverse_merge(issuable_ids: Array(issuables).map(&:id).join(',')) - Issuable::BulkUpdateService.new(project, user, bulk_update_params).execute('issue') + type = Array(issuables).first.model_name.param_key + Issuable::BulkUpdateService.new(project, user, bulk_update_params).execute(type) end describe 'close issues' do @@ -47,40 +48,77 @@ describe Issuable::BulkUpdateService, services: true do end end - describe 'updating assignee' do - let(:issue) { create(:issue, project: project, assignee: user) } + describe 'updating merge request assignee' do + let(:merge_request) { create(:merge_request, target_project: project, source_project: project, assignee: user) } context 'when the new assignee ID is a valid user' do it 'succeeds' do new_assignee = create(:user) project.team << [new_assignee, :developer] - result = bulk_update(issue, assignee_id: new_assignee.id) + result = bulk_update(merge_request, assignee_id: new_assignee.id) expect(result[:success]).to be_truthy expect(result[:count]).to eq(1) end - it 'updates the assignee to the use ID passed' do + it 'updates the assignee to the user ID passed' do assignee = create(:user) project.team << [assignee, :developer] - expect { bulk_update(issue, assignee_id: assignee.id) } - .to change { issue.reload.assignee }.from(user).to(assignee) + expect { bulk_update(merge_request, assignee_id: assignee.id) } + .to change { merge_request.reload.assignee }.from(user).to(assignee) end end context "when the new assignee ID is #{IssuableFinder::NONE}" do it "unassigns the issues" do - expect { bulk_update(issue, assignee_id: IssuableFinder::NONE) } - .to change { issue.reload.assignee }.to(nil) + expect { bulk_update(merge_request, assignee_id: IssuableFinder::NONE) } + .to change { merge_request.reload.assignee }.to(nil) end end context 'when the new assignee ID is not present' do it 'does not unassign' do - expect { bulk_update(issue, assignee_id: nil) } - .not_to change { issue.reload.assignee } + expect { bulk_update(merge_request, assignee_id: nil) } + .not_to change { merge_request.reload.assignee } + end + end + end + + describe 'updating issue assignee' do + let(:issue) { create(:issue, project: project, assignees: [user]) } + + context 'when the new assignee ID is a valid user' do + it 'succeeds' do + new_assignee = create(:user) + project.team << [new_assignee, :developer] + + result = bulk_update(issue, assignee_ids: [new_assignee.id]) + + expect(result[:success]).to be_truthy + expect(result[:count]).to eq(1) + end + + it 'updates the assignee to the user ID passed' do + assignee = create(:user) + project.team << [assignee, :developer] + expect { bulk_update(issue, assignee_ids: [assignee.id]) } + .to change { issue.reload.assignees.first }.from(user).to(assignee) + end + end + + context "when the new assignee ID is #{IssuableFinder::NONE}" do + it "unassigns the issues" do + expect { bulk_update(issue, assignee_ids: [IssuableFinder::NONE.to_s]) } + .to change { issue.reload.assignees.count }.from(1).to(0) + end + end + + context 'when the new assignee ID is not present' do + it 'does not unassign' do + expect { bulk_update(issue, assignee_ids: []) } + .not_to change{ issue.reload.assignees } end end end @@ -125,7 +163,7 @@ describe Issuable::BulkUpdateService, services: true do { label_ids: labels.map(&:id), add_label_ids: add_labels.map(&:id), - remove_label_ids: remove_labels.map(&:id), + remove_label_ids: remove_labels.map(&:id) } end diff --git a/spec/services/issues/build_service_spec.rb b/spec/services/issues/build_service_spec.rb index 17990f41b3b..bed25fe7ccf 100644 --- a/spec/services/issues/build_service_spec.rb +++ b/spec/services/issues/build_service_spec.rb @@ -11,7 +11,7 @@ describe Issues::BuildService, services: true do context 'for a single discussion' do describe '#execute' do let(:merge_request) { create(:merge_request, title: "Hello world", source_project: project) } - let(:discussion) { Discussion.new([create(:diff_note_on_merge_request, project: project, noteable: merge_request, note: "Almost done")]) } + let(:discussion) { create(:diff_note_on_merge_request, project: project, noteable: merge_request, note: "Almost done").to_discussion } let(:service) { described_class.new(project, user, merge_request_to_resolve_discussions_of: merge_request.iid, discussion_to_resolve: discussion.id) } it 'references the noteable title in the issue title' do @@ -47,7 +47,7 @@ describe Issues::BuildService, services: true do let(:service) { described_class.new(project, user, merge_request_to_resolve_discussions_of: merge_request.iid) } it 'mentions the author of the note' do - discussion = Discussion.new([create(:diff_note_on_merge_request, author: create(:user, username: 'author'))]) + discussion = create(:diff_note_on_merge_request, author: create(:user, username: 'author')).to_discussion expect(service.item_for_discussion(discussion)).to include('@author') end @@ -60,7 +60,7 @@ describe Issues::BuildService, services: true do note_result = " > This is a string\n"\ " > > with a blockquote\n"\ " > > > That has a quote\n" - discussion = Discussion.new([create(:diff_note_on_merge_request, note: note_text)]) + discussion = create(:diff_note_on_merge_request, note: note_text).to_discussion expect(service.item_for_discussion(discussion)).to include(note_result) end end @@ -91,25 +91,23 @@ describe Issues::BuildService, services: true do end describe 'with multiple discussions' do - before do - create(:diff_note_on_merge_request, noteable: merge_request, project: merge_request.target_project, line_number: 15) - end + let!(:diff_note) { create(:diff_note_on_merge_request, noteable: merge_request, project: merge_request.target_project, line_number: 15) } it 'mentions all the authors in the description' do - authors = merge_request.diff_discussions.map(&:author) + authors = merge_request.resolvable_discussions.map(&:author) expect(issue.description).to include(*authors.map(&:to_reference)) end it 'has a link for each unresolved discussion in the description' do - notes = merge_request.diff_discussions.map(&:first_note) + notes = merge_request.resolvable_discussions.map(&:first_note) links = notes.map { |note| Gitlab::UrlBuilder.build(note) } expect(issue.description).to include(*links) end it 'mentions additional notes' do - create_list(:diff_note_on_merge_request, 2, noteable: merge_request, project: merge_request.target_project, line_number: 15) + create_list(:diff_note_on_merge_request, 2, noteable: merge_request, project: merge_request.target_project, in_reply_to: diff_note) expect(issue.description).to include('(+2 comments)') end @@ -138,7 +136,7 @@ describe Issues::BuildService, services: true do user, title: 'Issue #1', description: 'Issue description', - milestone_id: milestone.id, + milestone_id: milestone.id ).execute expect(issue.title).to eq('Issue #1') diff --git a/spec/services/issues/close_service_spec.rb b/spec/services/issues/close_service_spec.rb index 7a54373963e..0a1f41719f7 100644 --- a/spec/services/issues/close_service_spec.rb +++ b/spec/services/issues/close_service_spec.rb @@ -4,7 +4,7 @@ describe Issues::CloseService, services: true do let(:user) { create(:user) } let(:user2) { create(:user) } let(:guest) { create(:user) } - let(:issue) { create(:issue, assignee: user2) } + let(:issue) { create(:issue, assignees: [user2]) } let(:project) { issue.project } let!(:todo) { create(:todo, :assigned, user: user, project: project, target: issue, author: user2) } @@ -51,8 +51,10 @@ describe Issues::CloseService, services: true do end end - it { expect(issue).to be_valid } - it { expect(issue).to be_closed } + it 'closes the issue' do + expect(issue).to be_valid + expect(issue).to be_closed + end it 'sends email to user2 about assign of new issue' do email = ActionMailer::Base.deliveries.last @@ -96,9 +98,11 @@ describe Issues::CloseService, services: true do described_class.new(project, user).close_issue(issue) end - it { expect(issue).to be_valid } - it { expect(issue).to be_opened } - it { expect(todo.reload).to be_pending } + it 'closes the issue' do + expect(issue).to be_valid + expect(issue).to be_opened + expect(todo.reload).to be_pending + end end end end diff --git a/spec/services/issues/create_service_spec.rb b/spec/services/issues/create_service_spec.rb index 776cbc4296b..dab1a3469f7 100644 --- a/spec/services/issues/create_service_spec.rb +++ b/spec/services/issues/create_service_spec.rb @@ -6,10 +6,10 @@ describe Issues::CreateService, services: true do describe '#execute' do let(:issue) { described_class.new(project, user, opts).execute } + let(:assignee) { create(:user) } + let(:milestone) { create(:milestone, project: project) } context 'when params are valid' do - let(:assignee) { create(:user) } - let(:milestone) { create(:milestone, project: project) } let(:labels) { create_pair(:label, project: project) } before do @@ -20,7 +20,7 @@ describe Issues::CreateService, services: true do let(:opts) do { title: 'Awesome issue', description: 'please fix', - assignee_id: assignee.id, + assignee_ids: [assignee.id], label_ids: labels.map(&:id), milestone_id: milestone.id, due_date: Date.tomorrow } @@ -29,7 +29,7 @@ describe Issues::CreateService, services: true do it 'creates the issue with the given params' do expect(issue).to be_persisted expect(issue.title).to eq('Awesome issue') - expect(issue.assignee).to eq assignee + expect(issue.assignees).to eq [assignee] expect(issue.labels).to match_array labels expect(issue.milestone).to eq milestone expect(issue.due_date).to eq Date.tomorrow @@ -37,6 +37,7 @@ describe Issues::CreateService, services: true do context 'when current user cannot admin issues in the project' do let(:guest) { create(:user) } + before do project.team << [guest, :guest] end @@ -47,7 +48,7 @@ describe Issues::CreateService, services: true do expect(issue).to be_persisted expect(issue.title).to eq('Awesome issue') expect(issue.description).to eq('please fix') - expect(issue.assignee).to be_nil + expect(issue.assignees).to be_empty expect(issue.labels).to be_empty expect(issue.milestone).to be_nil expect(issue.due_date).to be_nil @@ -117,6 +118,22 @@ describe Issues::CreateService, services: true do end end + context 'when assignee is set' do + let(:opts) do + { title: 'Title', + description: 'Description', + assignees: [assignee] } + end + + it 'invalidates open issues counter for assignees when issue is assigned' do + project.team << [assignee, :master] + + described_class.new(project, user, opts).execute + + expect(assignee.assigned_open_issues_count).to eq 1 + end + end + it 'executes issue hooks when issue is not confidential' do opts = { title: 'Title', description: 'Description', confidential: false } @@ -136,12 +153,85 @@ describe Issues::CreateService, services: true do end end - it_behaves_like 'issuable create service' + context 'issue create service' do + context 'assignees' do + before { project.team << [user, :master] } + + it 'removes assignee when user id is invalid' do + opts = { title: 'Title', description: 'Description', assignee_ids: [-1] } + + issue = described_class.new(project, user, opts).execute + + expect(issue.assignees).to be_empty + end + + it 'removes assignee when user id is 0' do + opts = { title: 'Title', description: 'Description', assignee_ids: [0] } + + issue = described_class.new(project, user, opts).execute + + expect(issue.assignees).to be_empty + end + + it 'saves assignee when user id is valid' do + project.team << [assignee, :master] + opts = { title: 'Title', description: 'Description', assignee_ids: [assignee.id] } + + issue = described_class.new(project, user, opts).execute + + expect(issue.assignees).to eq([assignee]) + end + + context "when issuable feature is private" do + before do + project.project_feature.update(issues_access_level: ProjectFeature::PRIVATE, + merge_requests_access_level: ProjectFeature::PRIVATE) + end + + levels = [Gitlab::VisibilityLevel::INTERNAL, Gitlab::VisibilityLevel::PUBLIC] + + levels.each do |level| + it "removes not authorized assignee when project is #{Gitlab::VisibilityLevel.level_name(level)}" do + project.update(visibility_level: level) + opts = { title: 'Title', description: 'Description', assignee_ids: [assignee.id] } + + issue = described_class.new(project, user, opts).execute + + expect(issue.assignees).to be_empty + end + end + end + end + end it_behaves_like 'new issuable record that supports slash commands' + context 'Slash commands' do + context 'with assignee and milestone in params and command' do + let(:opts) do + { + assignee_ids: [create(:user).id], + milestone_id: 1, + title: 'Title', + description: %(/assign @#{assignee.username}\n/milestone %"#{milestone.name}") + } + end + + before do + project.team << [user, :master] + project.team << [assignee, :master] + end + + it 'assigns and sets milestone to issuable from command' do + expect(issue).to be_persisted + expect(issue.assignees).to eq([assignee]) + expect(issue.milestone).to eq(milestone) + end + end + end + context 'resolving discussions' do - let(:discussion) { Discussion.for_diff_notes([create(:diff_note_on_merge_request)]).first } + let(:discussion) { create(:diff_note_on_merge_request).to_discussion } let(:merge_request) { discussion.noteable } let(:project) { merge_request.source_project } diff --git a/spec/services/issues/resolve_discussions_spec.rb b/spec/services/issues/resolve_discussions_spec.rb index 3a72f92383c..86f218dec12 100644 --- a/spec/services/issues/resolve_discussions_spec.rb +++ b/spec/services/issues/resolve_discussions_spec.rb @@ -1,15 +1,15 @@ require 'spec_helper.rb' -class DummyService < Issues::BaseService - include ::Issues::ResolveDiscussions +describe Issues::ResolveDiscussions, services: true do + class DummyService < Issues::BaseService + include ::Issues::ResolveDiscussions - def initialize(*args) - super - filter_resolve_discussion_params + def initialize(*args) + super + filter_resolve_discussion_params + end end -end -describe DummyService, services: true do let(:project) { create(:project, :repository) } let(:user) { create(:user) } @@ -18,12 +18,12 @@ describe DummyService, services: true do end describe "for resolving discussions" do - let(:discussion) { Discussion.new([create(:diff_note_on_merge_request, project: project, note: "Almost done")]) } + let(:discussion) { create(:diff_note_on_merge_request, project: project, note: "Almost done").to_discussion } let(:merge_request) { discussion.noteable } let(:other_merge_request) { create(:merge_request, source_project: project, source_branch: "other") } describe "#merge_request_for_resolving_discussion" do - let(:service) { described_class.new(project, user, merge_request_to_resolve_discussions_of: merge_request.iid) } + let(:service) { DummyService.new(project, user, merge_request_to_resolve_discussions_of: merge_request.iid) } it "finds the merge request" do expect(service.merge_request_to_resolve_discussions_of).to eq(merge_request) @@ -43,7 +43,7 @@ describe DummyService, services: true do describe "#discussions_to_resolve" do it "contains a single discussion when matching merge request and discussion are passed" do - service = described_class.new( + service = DummyService.new( project, user, discussion_to_resolve: discussion.id, @@ -61,7 +61,7 @@ describe DummyService, services: true do noteable: merge_request, project: merge_request.target_project, line_number: 15)]) - service = described_class.new( + service = DummyService.new( project, user, merge_request_to_resolve_discussions_of: merge_request.iid @@ -77,9 +77,9 @@ describe DummyService, services: true do _second_discussion = Discussion.new([create(:diff_note_on_merge_request, :resolved, noteable: merge_request, project: merge_request.target_project, - line_number: 15, + line_number: 15 )]) - service = described_class.new( + service = DummyService.new( project, user, merge_request_to_resolve_discussions_of: merge_request.iid @@ -92,7 +92,7 @@ describe DummyService, services: true do end it "is empty when a discussion and another merge request are passed" do - service = described_class.new( + service = DummyService.new( project, user, discussion_to_resolve: discussion.id, diff --git a/spec/services/issues/update_service_spec.rb b/spec/services/issues/update_service_spec.rb index 5b324f3c706..5184c1d5f19 100644 --- a/spec/services/issues/update_service_spec.rb +++ b/spec/services/issues/update_service_spec.rb @@ -14,7 +14,7 @@ describe Issues::UpdateService, services: true do let(:issue) do create(:issue, title: 'Old title', description: "for #{user2.to_reference}", - assignee_id: user3.id, + assignee_ids: [user3.id], project: project) end @@ -40,7 +40,7 @@ describe Issues::UpdateService, services: true do { title: 'New title', description: 'Also please fix', - assignee_id: user2.id, + assignee_ids: [user2.id], state_event: 'close', label_ids: [label.id], due_date: Date.tomorrow @@ -53,15 +53,22 @@ describe Issues::UpdateService, services: true do expect(issue).to be_valid expect(issue.title).to eq 'New title' expect(issue.description).to eq 'Also please fix' - expect(issue.assignee).to eq user2 + expect(issue.assignees).to match_array([user2]) expect(issue).to be_closed expect(issue.labels).to match_array [label] expect(issue.due_date).to eq Date.tomorrow end + it 'updates open issue counter for assignees when issue is reassigned' do + update_issue(assignee_ids: [user2.id]) + + expect(user3.assigned_open_issues_count).to eq 0 + expect(user2.assigned_open_issues_count).to eq 1 + end + it 'sorts issues as specified by parameters' do - issue1 = create(:issue, project: project, assignee_id: user3.id) - issue2 = create(:issue, project: project, assignee_id: user3.id) + issue1 = create(:issue, project: project, assignees: [user3]) + issue2 = create(:issue, project: project, assignees: [user3]) [issue, issue1, issue2].each do |issue| issue.move_to_end @@ -87,7 +94,7 @@ describe Issues::UpdateService, services: true do expect(issue).to be_valid expect(issue.title).to eq 'New title' expect(issue.description).to eq 'Also please fix' - expect(issue.assignee).to eq user3 + expect(issue.assignees).to match_array [user3] expect(issue.labels).to be_empty expect(issue.milestone).to be_nil expect(issue.due_date).to be_nil @@ -132,12 +139,23 @@ describe Issues::UpdateService, services: true do end end + context 'when description changed' do + it 'creates system note about description change' do + update_issue(description: 'Changed description') + + note = find_note('changed the description') + + expect(note).not_to be_nil + expect(note.note).to eq('changed the description') + end + end + context 'when issue turns confidential' do let(:opts) do { title: 'New title', description: 'Also please fix', - assignee_id: user2.id, + assignee_ids: [user2], state_event: 'close', label_ids: [label.id], confidential: true @@ -163,12 +181,12 @@ describe Issues::UpdateService, services: true do it 'does not update assignee_id with unauthorized users' do project.update(visibility_level: Gitlab::VisibilityLevel::PUBLIC) update_issue(confidential: true) - non_member = create(:user) - original_assignee = issue.assignee + non_member = create(:user) + original_assignees = issue.assignees - update_issue(assignee_id: non_member.id) + update_issue(assignee_ids: [non_member.id]) - expect(issue.reload.assignee_id).to eq(original_assignee.id) + expect(issue.reload.assignees).to eq(original_assignees) end end @@ -205,7 +223,7 @@ describe Issues::UpdateService, services: true do context 'when is reassigned' do before do - update_issue(assignee: user2) + update_issue(assignees: [user2]) end it 'marks previous assignee todos as done' do @@ -408,6 +426,41 @@ describe Issues::UpdateService, services: true do end end + context 'updating asssignee_id' do + it 'does not update assignee when assignee_id is invalid' do + update_issue(assignee_ids: [-1]) + + expect(issue.reload.assignees).to eq([user3]) + end + + it 'unassigns assignee when user id is 0' do + update_issue(assignee_ids: [0]) + + expect(issue.reload.assignees).to be_empty + end + + it 'does not update assignee_id when user cannot read issue' do + update_issue(assignee_ids: [create(:user).id]) + + expect(issue.reload.assignees).to eq([user3]) + end + + context "when issuable feature is private" do + levels = [Gitlab::VisibilityLevel::INTERNAL, Gitlab::VisibilityLevel::PUBLIC] + + levels.each do |level| + it "does not update with unauthorized assignee when project is #{Gitlab::VisibilityLevel.level_name(level)}" do + assignee = create(:user) + project.update(visibility_level: level) + feature_visibility_attr = :"#{issue.model_name.plural}_access_level" + project.project_feature.update_attribute(feature_visibility_attr, ProjectFeature::PRIVATE) + + expect{ update_issue(assignee_ids: [assignee.id]) }.not_to change{ issue.assignees } + end + end + end + end + context 'updating mentions' do let(:mentionable) { issue } include_examples 'updating mentions', Issues::UpdateService diff --git a/spec/services/members/authorized_destroy_service_spec.rb b/spec/services/members/authorized_destroy_service_spec.rb new file mode 100644 index 00000000000..8a6732faa19 --- /dev/null +++ b/spec/services/members/authorized_destroy_service_spec.rb @@ -0,0 +1,66 @@ +require 'spec_helper' + +describe Members::AuthorizedDestroyService, services: true do + let(:member_user) { create(:user) } + let(:project) { create(:empty_project, :public) } + let(:group) { create(:group, :public) } + let(:group_project) { create(:empty_project, :public, group: group) } + + def number_of_assigned_issuables(user) + Issue.assigned_to(user).count + MergeRequest.assigned_to(user).count + end + + context 'Invited users' do + # Regression spec for issue: https://gitlab.com/gitlab-org/gitlab-ce/issues/32504 + it 'destroys invited project member' do + project.team << [member_user, :developer] + + member = create :project_member, :invited, project: project + + expect { described_class.new(member, member_user).execute } + .to change { Member.count }.from(2).to(1) + end + + it 'destroys invited group member' do + group.add_developer(member_user) + + member = create :group_member, :invited, group: group + + expect { described_class.new(member, member_user).execute } + .to change { Member.count }.from(2).to(1) + end + end + + context 'Group member' do + it "unassigns issues and merge requests" do + group.add_developer(member_user) + + issue = create :issue, project: group_project, assignees: [member_user] + create :issue, assignees: [member_user] + merge_request = create :merge_request, target_project: group_project, source_project: group_project, assignee: member_user + create :merge_request, target_project: project, source_project: project, assignee: member_user + + member = group.members.find_by(user_id: member_user.id) + + expect { described_class.new(member, member_user).execute } + .to change { number_of_assigned_issuables(member_user) }.from(4).to(2) + + expect(issue.reload.assignee_id).to be_nil + expect(merge_request.reload.assignee_id).to be_nil + end + end + + context 'Project member' do + it "unassigns issues and merge requests" do + project.team << [member_user, :developer] + + create :issue, project: project, assignees: [member_user] + create :merge_request, target_project: project, source_project: project, assignee: member_user + + member = project.members.find_by(user_id: member_user.id) + + expect { described_class.new(member, member_user).execute } + .to change { number_of_assigned_issuables(member_user) }.from(2).to(0) + end + end +end diff --git a/spec/services/merge_requests/assign_issues_service_spec.rb b/spec/services/merge_requests/assign_issues_service_spec.rb index fe75757dd29..d3556020d4d 100644 --- a/spec/services/merge_requests/assign_issues_service_spec.rb +++ b/spec/services/merge_requests/assign_issues_service_spec.rb @@ -15,14 +15,14 @@ describe MergeRequests::AssignIssuesService, services: true do expect(service.assignable_issues.map(&:id)).to include(issue.id) end - it 'ignores issues already assigned to any user' do - issue.update!(assignee: create(:user)) + it 'ignores issues the user cannot update assignee on' do + project.team.truncate expect(service.assignable_issues).to be_empty end - it 'ignores issues the user cannot update assignee on' do - project.team.truncate + it 'ignores issues already assigned to any user' do + issue.assignees = [create(:user)] expect(service.assignable_issues).to be_empty end @@ -44,7 +44,7 @@ describe MergeRequests::AssignIssuesService, services: true do end it 'assigns these to the merge request owner' do - expect { service.execute }.to change { issue.reload.assignee }.to(user) + expect { service.execute }.to change { issue.assignees.first }.to(user) end it 'ignores external issues' do diff --git a/spec/services/merge_requests/build_service_spec.rb b/spec/services/merge_requests/build_service_spec.rb index c8bd4d4601a..6f9d1208b1d 100644 --- a/spec/services/merge_requests/build_service_spec.rb +++ b/spec/services/merge_requests/build_service_spec.rb @@ -4,6 +4,8 @@ describe MergeRequests::BuildService, services: true do include RepoHelpers let(:project) { create(:project, :repository) } + let(:source_project) { nil } + let(:target_project) { nil } let(:user) { create(:user) } let(:issue_confidential) { false } let(:issue) { create(:issue, project: project, title: 'A bug', confidential: issue_confidential) } @@ -20,7 +22,9 @@ describe MergeRequests::BuildService, services: true do MergeRequests::BuildService.new(project, user, description: description, source_branch: source_branch, - target_branch: target_branch) + target_branch: target_branch, + source_project: source_project, + target_project: target_project) end before do @@ -256,5 +260,51 @@ describe MergeRequests::BuildService, services: true do ) end end + + context 'upstream project has disabled merge requests' do + let(:upstream_project) { create(:empty_project, :merge_requests_disabled) } + let(:project) { create(:empty_project, forked_from_project: upstream_project) } + let(:commits) { Commit.decorate([commit_1], project) } + + it 'sets target project correctly' do + expect(merge_request.target_project).to eq(project) + end + end + + context 'target_project is set and accessible by current_user' do + let(:target_project) { create(:project, :public, :repository)} + let(:commits) { Commit.decorate([commit_1], project) } + + it 'sets target project correctly' do + expect(merge_request.target_project).to eq(target_project) + end + end + + context 'target_project is set but not accessible by current_user' do + let(:target_project) { create(:project, :private, :repository)} + let(:commits) { Commit.decorate([commit_1], project) } + + it 'sets target project correctly' do + expect(merge_request.target_project).to eq(project) + end + end + + context 'source_project is set and accessible by current_user' do + let(:source_project) { create(:project, :public, :repository)} + let(:commits) { Commit.decorate([commit_1], project) } + + it 'sets target project correctly' do + expect(merge_request.source_project).to eq(source_project) + end + end + + context 'source_project is set but not accessible by current_user' do + let(:source_project) { create(:project, :private, :repository)} + let(:commits) { Commit.decorate([commit_1], project) } + + it 'sets target project correctly' do + expect(merge_request.source_project).to eq(project) + end + end end end diff --git a/spec/services/merge_requests/conflicts/list_service_spec.rb b/spec/services/merge_requests/conflicts/list_service_spec.rb new file mode 100644 index 00000000000..23982b9e6e1 --- /dev/null +++ b/spec/services/merge_requests/conflicts/list_service_spec.rb @@ -0,0 +1,80 @@ +require 'spec_helper' + +describe MergeRequests::Conflicts::ListService do + describe '#can_be_resolved_in_ui?' do + def create_merge_request(source_branch) + create(:merge_request, source_branch: source_branch, target_branch: 'conflict-start') do |mr| + mr.mark_as_unmergeable + end + end + + def conflicts_service(merge_request) + described_class.new(merge_request) + end + + it 'returns a falsey value when the MR can be merged without conflicts' do + merge_request = create_merge_request('master') + merge_request.mark_as_mergeable + + expect(conflicts_service(merge_request).can_be_resolved_in_ui?).to be_falsey + end + + it 'returns a falsey value when the MR is marked as having conflicts, but has none' do + merge_request = create_merge_request('master') + + expect(conflicts_service(merge_request).can_be_resolved_in_ui?).to be_falsey + end + + it 'returns a falsey value when one of the MR branches is missing' do + merge_request = create_merge_request('conflict-resolvable') + merge_request.project.repository.rm_branch(merge_request.author, 'conflict-resolvable') + + expect(conflicts_service(merge_request).can_be_resolved_in_ui?).to be_falsey + end + + it 'returns a falsey value when the MR has a missing ref after a force push' do + merge_request = create_merge_request('conflict-resolvable') + service = conflicts_service(merge_request) + allow(service.conflicts).to receive(:merge_index).and_raise(Rugged::OdbError) + + expect(service.can_be_resolved_in_ui?).to be_falsey + end + + it 'returns a falsey value when the MR does not support new diff notes' do + merge_request = create_merge_request('conflict-resolvable') + merge_request.merge_request_diff.update_attributes(start_commit_sha: nil) + + expect(conflicts_service(merge_request).can_be_resolved_in_ui?).to be_falsey + end + + it 'returns a falsey value when the conflicts contain a large file' do + merge_request = create_merge_request('conflict-too-large') + + expect(conflicts_service(merge_request).can_be_resolved_in_ui?).to be_falsey + end + + it 'returns a falsey value when the conflicts contain a binary file' do + merge_request = create_merge_request('conflict-binary-file') + + expect(conflicts_service(merge_request).can_be_resolved_in_ui?).to be_falsey + end + + it 'returns a falsey value when the conflicts contain a file edited in one branch and deleted in another' do + merge_request = create_merge_request('conflict-missing-side') + + expect(conflicts_service(merge_request).can_be_resolved_in_ui?).to be_falsey + end + + it 'returns a truthy value when the conflicts are resolvable in the UI' do + merge_request = create_merge_request('conflict-resolvable') + + expect(conflicts_service(merge_request).can_be_resolved_in_ui?).to be_truthy + end + + it 'returns a truthy value when the conflicts have to be resolved in an editor' do + merge_request = create_merge_request('conflict-contains-conflict-markers') + + expect(conflicts_service(merge_request).can_be_resolved_in_ui?).to be_truthy + end + end +end diff --git a/spec/services/merge_requests/resolve_service_spec.rb b/spec/services/merge_requests/conflicts/resolve_service_spec.rb index eaf7785e549..19e8d5cc5f1 100644 --- a/spec/services/merge_requests/resolve_service_spec.rb +++ b/spec/services/merge_requests/conflicts/resolve_service_spec.rb @@ -1,6 +1,6 @@ require 'spec_helper' -describe MergeRequests::ResolveService do +describe MergeRequests::Conflicts::ResolveService do let(:user) { create(:user) } let(:project) { create(:project, :repository) } @@ -24,6 +24,8 @@ describe MergeRequests::ResolveService do end describe '#execute' do + let(:service) { described_class.new(merge_request) } + context 'with section params' do let(:params) do { @@ -50,7 +52,7 @@ describe MergeRequests::ResolveService do context 'when the source and target project are the same' do before do - MergeRequests::ResolveService.new(project, user, params).execute(merge_request) + service.execute(user, params) end it 'creates a commit with the message' do @@ -74,15 +76,26 @@ describe MergeRequests::ResolveService do branch_name: 'conflict-start') end - before do - MergeRequests::ResolveService.new(fork_project, user, params).execute(merge_request_from_fork) + def resolve_conflicts + described_class.new(merge_request_from_fork).execute(user, params) + end + + it 'gets conflicts from the source project' do + expect(fork_project.repository.rugged).to receive(:merge_commits).and_call_original + expect(project.repository.rugged).not_to receive(:merge_commits) + + resolve_conflicts end it 'creates a commit with the message' do + resolve_conflicts + expect(merge_request_from_fork.source_branch_head.message).to eq(params[:commit_message]) end it 'creates a commit with the correct parents' do + resolve_conflicts + expect(merge_request_from_fork.source_branch_head.parents.map(&:id)). to eq(['404fa3fc7c2c9b5dacff102f353bdf55b1be2813', target_head]) @@ -115,7 +128,7 @@ describe MergeRequests::ResolveService do end before do - MergeRequests::ResolveService.new(project, user, params).execute(merge_request) + service.execute(user, params) end it 'creates a commit with the message' do @@ -154,15 +167,15 @@ describe MergeRequests::ResolveService do } end - let(:service) { MergeRequests::ResolveService.new(project, user, invalid_params) } - it 'raises a MissingResolution error' do - expect { service.execute(merge_request) }. + expect { service.execute(user, invalid_params) }. to raise_error(Gitlab::Conflict::File::MissingResolution) end end context 'when the content of a file is unchanged' do + let(:list_service) { MergeRequests::Conflicts::ListService.new(merge_request) } + let(:invalid_params) do { files: [ @@ -173,17 +186,15 @@ describe MergeRequests::ResolveService do }, { old_path: 'files/ruby/regex.rb', new_path: 'files/ruby/regex.rb', - content: merge_request.conflicts.file_for_path('files/ruby/regex.rb', 'files/ruby/regex.rb').content + content: list_service.conflicts.file_for_path('files/ruby/regex.rb', 'files/ruby/regex.rb').content } ], commit_message: 'This is a commit message!' } end - let(:service) { MergeRequests::ResolveService.new(project, user, invalid_params) } - it 'raises a MissingResolution error' do - expect { service.execute(merge_request) }. + expect { service.execute(user, invalid_params) }. to raise_error(Gitlab::Conflict::File::MissingResolution) end end @@ -202,11 +213,9 @@ describe MergeRequests::ResolveService do } end - let(:service) { MergeRequests::ResolveService.new(project, user, invalid_params) } - it 'raises a MissingFiles error' do - expect { service.execute(merge_request) }. - to raise_error(MergeRequests::ResolveService::MissingFiles) + expect { service.execute(user, invalid_params) }. + to raise_error(described_class::MissingFiles) end end end diff --git a/spec/services/merge_requests/create_from_issue_service_spec.rb b/spec/services/merge_requests/create_from_issue_service_spec.rb new file mode 100644 index 00000000000..1588d30c394 --- /dev/null +++ b/spec/services/merge_requests/create_from_issue_service_spec.rb @@ -0,0 +1,74 @@ +require 'spec_helper' + +describe MergeRequests::CreateFromIssueService, services: true do + let(:project) { create(:project, :repository) } + let(:user) { create(:user) } + let(:issue) { create(:issue, project: project) } + + subject(:service) { described_class.new(project, user, issue_iid: issue.iid) } + + before do + project.add_developer(user) + end + + describe '#execute' do + it 'returns an error with invalid issue iid' do + result = described_class.new(project, user, issue_iid: -1).execute + + expect(result[:status]).to eq :error + expect(result[:message]).to eq 'Invalid issue iid' + end + + it 'delegates issue search to IssuesFinder' do + expect_any_instance_of(IssuesFinder).to receive(:execute).once.and_call_original + + described_class.new(project, user, issue_iid: -1).execute + end + + it 'delegates the branch creation to CreateBranchService' do + expect_any_instance_of(CreateBranchService).to receive(:execute).once.and_call_original + + service.execute + end + + it 'creates a branch based on issue title' do + service.execute + + expect(project.repository.branch_exists?(issue.to_branch_name)).to be_truthy + end + + it 'creates a system note' do + expect(SystemNoteService).to receive(:new_issue_branch).with(issue, project, user, issue.to_branch_name) + + service.execute + end + + it 'creates a merge request' do + expect { service.execute }.to change(project.merge_requests, :count).by(1) + end + + it 'sets the merge request title to: "WIP: Resolves "$issue-title"' do + result = service.execute + + expect(result[:merge_request].title).to eq("WIP: Resolve \"#{issue.title}\"") + end + + it 'sets the merge request author to current user' do + result = service.execute + + expect(result[:merge_request].author).to eq user + end + + it 'sets the merge request source branch to the new issue branch' do + result = service.execute + + expect(result[:merge_request].source_branch).to eq issue.to_branch_name + end + + it 'sets the merge request target branch to the project default branch' do + result = service.execute + + expect(result[:merge_request].target_branch).to eq project.default_branch + end + end +end diff --git a/spec/services/merge_requests/create_service_spec.rb b/spec/services/merge_requests/create_service_spec.rb index 0e16c7cc94b..b70e9d534a4 100644 --- a/spec/services/merge_requests/create_service_spec.rb +++ b/spec/services/merge_requests/create_service_spec.rb @@ -27,10 +27,12 @@ describe MergeRequests::CreateService, services: true do @merge_request = service.execute end - it { expect(@merge_request).to be_valid } - it { expect(@merge_request.title).to eq('Awesome merge_request') } - it { expect(@merge_request.assignee).to be_nil } - it { expect(@merge_request.merge_params['force_remove_source_branch']).to eq('1') } + it 'creates an MR' do + expect(@merge_request).to be_valid + expect(@merge_request.title).to eq('Awesome merge_request') + expect(@merge_request.assignee).to be_nil + expect(@merge_request.merge_params['force_remove_source_branch']).to eq('1') + end it 'executes hooks with default action' do expect(service).to have_received(:execute_hooks).with(@merge_request) @@ -84,7 +86,107 @@ describe MergeRequests::CreateService, services: true do end end - it_behaves_like 'issuable create service' + context 'Slash commands' do + context 'with assignee and milestone in params and command' do + let(:merge_request) { described_class.new(project, user, opts).execute } + let(:milestone) { create(:milestone, project: project) } + + let(:opts) do + { + assignee_id: create(:user).id, + milestone_id: 1, + title: 'Title', + description: %(/assign @#{assignee.username}\n/milestone %"#{milestone.name}"), + source_branch: 'feature', + target_branch: 'master' + } + end + + before do + project.team << [user, :master] + project.team << [assignee, :master] + end + + it 'assigns and sets milestone to issuable from command' do + expect(merge_request).to be_persisted + expect(merge_request.assignee).to eq(assignee) + expect(merge_request.milestone).to eq(milestone) + end + end + end + + context 'merge request create service' do + context 'asssignee_id' do + let(:assignee) { create(:user) } + + before { project.team << [user, :master] } + + it 'removes assignee_id when user id is invalid' do + opts = { title: 'Title', description: 'Description', assignee_id: -1 } + + merge_request = described_class.new(project, user, opts).execute + + expect(merge_request.assignee_id).to be_nil + end + + it 'removes assignee_id when user id is 0' do + opts = { title: 'Title', description: 'Description', assignee_id: 0 } + + merge_request = described_class.new(project, user, opts).execute + + expect(merge_request.assignee_id).to be_nil + end + + it 'saves assignee when user id is valid' do + project.team << [assignee, :master] + opts = { title: 'Title', description: 'Description', assignee_id: assignee.id } + + merge_request = described_class.new(project, user, opts).execute + + expect(merge_request.assignee).to eq(assignee) + end + + context 'when assignee is set' do + let(:opts) do + { + title: 'Title', + description: 'Description', + assignee_id: assignee.id, + source_branch: 'feature', + target_branch: 'master' + } + end + + it 'invalidates open merge request counter for assignees when merge request is assigned' do + project.team << [assignee, :master] + + described_class.new(project, user, opts).execute + + expect(assignee.assigned_open_merge_requests_count).to eq 1 + end + end + + context "when issuable feature is private" do + before do + project.project_feature.update(issues_access_level: ProjectFeature::PRIVATE, + merge_requests_access_level: ProjectFeature::PRIVATE) + end + + levels = [Gitlab::VisibilityLevel::INTERNAL, Gitlab::VisibilityLevel::PUBLIC] + + levels.each do |level| + it "removes not authorized assignee when project is #{Gitlab::VisibilityLevel.level_name(level)}" do + project.update(visibility_level: level) + opts = { title: 'Title', description: 'Description', assignee_id: assignee.id } + + merge_request = described_class.new(project, user, opts).execute + + expect(merge_request.assignee_id).to be_nil + end + end + end + end + end context 'while saving references to issues that the created merge request closes' do let(:first_issue) { create(:issue, project: project) } diff --git a/spec/services/merge_requests/get_urls_service_spec.rb b/spec/services/merge_requests/get_urls_service_spec.rb index 290e00ea1ba..4a7d8ab4c6c 100644 --- a/spec/services/merge_requests/get_urls_service_spec.rb +++ b/spec/services/merge_requests/get_urls_service_spec.rb @@ -2,7 +2,7 @@ require "spec_helper" describe MergeRequests::GetUrlsService do let(:project) { create(:project, :public, :repository) } - let(:service) { MergeRequests::GetUrlsService.new(project) } + let(:service) { described_class.new(project) } let(:source_branch) { "my_branch" } let(:new_merge_request_url) { "http://#{Gitlab.config.gitlab.host}/#{project.namespace.name}/#{project.path}/merge_requests/new?merge_request%5Bsource_branch%5D=#{source_branch}" } let(:show_merge_request_url) { "http://#{Gitlab.config.gitlab.host}/#{project.namespace.name}/#{project.path}/merge_requests/#{merge_request.iid}" } @@ -89,7 +89,7 @@ describe MergeRequests::GetUrlsService do let!(:merge_request) { create(:merge_request, source_project: forked_project, target_project: project, source_branch: source_branch) } let(:changes) { existing_branch_changes } # Source project is now the forked one - let(:service) { MergeRequests::GetUrlsService.new(forked_project) } + let(:service) { described_class.new(forked_project) } before do allow(forked_project).to receive(:empty_repo?).and_return(false) diff --git a/spec/services/merge_requests/merge_request_diff_cache_service_spec.rb b/spec/services/merge_requests/merge_request_diff_cache_service_spec.rb index 35804d41b46..935f4710851 100644 --- a/spec/services/merge_requests/merge_request_diff_cache_service_spec.rb +++ b/spec/services/merge_requests/merge_request_diff_cache_service_spec.rb @@ -1,7 +1,7 @@ require 'spec_helper' describe MergeRequests::MergeRequestDiffCacheService do - let(:subject) { MergeRequests::MergeRequestDiffCacheService.new } + let(:subject) { described_class.new } describe '#execute' do it 'retrieves the diff files to cache the highlighted result' do diff --git a/spec/services/merge_requests/merge_when_pipeline_succeeds_service_spec.rb b/spec/services/merge_requests/merge_when_pipeline_succeeds_service_spec.rb index 769b3193275..3ef5135e6a3 100644 --- a/spec/services/merge_requests/merge_when_pipeline_succeeds_service_spec.rb +++ b/spec/services/merge_requests/merge_when_pipeline_succeeds_service_spec.rb @@ -82,6 +82,10 @@ describe MergeRequests::MergeWhenPipelineSucceedsService do sha: merge_request_head, status: 'success') end + before do + mr_merge_if_green_enabled.update(head_pipeline: triggering_pipeline) + end + it "merges all merge requests with merge when the pipeline succeeds enabled" do expect(MergeWorker).to receive(:perform_async) service.trigger(triggering_pipeline) @@ -124,6 +128,8 @@ describe MergeRequests::MergeWhenPipelineSucceedsService do sha: mr_conflict.diff_head_sha, status: 'success') end + before { mr_conflict.update(head_pipeline: conflict_pipeline) } + it 'does not merge the merge request' do expect(MergeWorker).not_to receive(:perform_async) diff --git a/spec/services/merge_requests/refresh_service_spec.rb b/spec/services/merge_requests/refresh_service_spec.rb index c22d145ca5d..1f109eab268 100644 --- a/spec/services/merge_requests/refresh_service_spec.rb +++ b/spec/services/merge_requests/refresh_service_spec.rb @@ -49,6 +49,7 @@ describe MergeRequests::RefreshService, services: true do context 'push to origin repo source branch' do let(:refresh_service) { service.new(@project, @user) } + before do allow(refresh_service).to receive(:execute_hooks) refresh_service.execute(@oldrev, @newrev, 'refs/heads/master') @@ -70,6 +71,32 @@ describe MergeRequests::RefreshService, services: true do end end + context 'push to origin repo source branch when an MR was reopened' do + let(:refresh_service) { service.new(@project, @user) } + + before do + @merge_request.update(state: :reopened) + + allow(refresh_service).to receive(:execute_hooks) + refresh_service.execute(@oldrev, @newrev, 'refs/heads/master') + reload_mrs + end + + it 'executes hooks with update action' do + expect(refresh_service).to have_received(:execute_hooks). + with(@merge_request, 'update', @oldrev) + + expect(@merge_request.notes).not_to be_empty + expect(@merge_request).to be_open + expect(@merge_request.merge_when_pipeline_succeeds).to be_falsey + expect(@merge_request.diff_head_sha).to eq(@newrev) + expect(@fork_merge_request).to be_open + expect(@fork_merge_request.notes).to be_empty + expect(@build_failed_todo).to be_done + expect(@fork_build_failed_todo).to be_done + end + end + context 'push to origin repo target branch' do before do service.new(@project, @user).execute(@oldrev, @newrev, 'refs/heads/feature') @@ -321,7 +348,7 @@ describe MergeRequests::RefreshService, services: true do title: 'fixup! Fix issue', work_in_progress?: true, to_reference: 'ccccccc' - ), + ) ]) refresh_service.execute(@oldrev, @newrev, 'refs/heads/wip') reload_mrs diff --git a/spec/services/merge_requests/resolved_discussion_notification_service.rb b/spec/services/merge_requests/resolved_discussion_notification_service_spec.rb index 7ddd812e513..7ddd812e513 100644 --- a/spec/services/merge_requests/resolved_discussion_notification_service.rb +++ b/spec/services/merge_requests/resolved_discussion_notification_service_spec.rb diff --git a/spec/services/merge_requests/update_service_spec.rb b/spec/services/merge_requests/update_service_spec.rb index f2ca1e6fcbd..860a7798857 100644 --- a/spec/services/merge_requests/update_service_spec.rb +++ b/spec/services/merge_requests/update_service_spec.rb @@ -59,14 +59,16 @@ describe MergeRequests::UpdateService, services: true do end end - it { expect(@merge_request).to be_valid } - it { expect(@merge_request.title).to eq('New title') } - it { expect(@merge_request.assignee).to eq(user2) } - it { expect(@merge_request).to be_closed } - it { expect(@merge_request.labels.count).to eq(1) } - it { expect(@merge_request.labels.first.title).to eq(label.name) } - it { expect(@merge_request.target_branch).to eq('target') } - it { expect(@merge_request.merge_params['force_remove_source_branch']).to eq('1') } + it 'mathces base expectations' do + expect(@merge_request).to be_valid + expect(@merge_request.title).to eq('New title') + expect(@merge_request.assignee).to eq(user2) + expect(@merge_request).to be_closed + expect(@merge_request.labels.count).to eq(1) + expect(@merge_request.labels.first.title).to eq(label.name) + expect(@merge_request.target_branch).to eq('target') + expect(@merge_request.merge_params['force_remove_source_branch']).to eq('1') + end it 'executes hooks with update action' do expect(service).to have_received(:execute_hooks). @@ -102,6 +104,13 @@ describe MergeRequests::UpdateService, services: true do expect(note.note).to eq 'changed title from **{-Old-} title** to **{+New+} title**' end + it 'creates system note about description change' do + note = find_note('changed the description') + + expect(note).not_to be_nil + expect(note.note).to eq('changed the description') + end + it 'creates system note about branch change' do note = find_note('changed target') @@ -141,9 +150,11 @@ describe MergeRequests::UpdateService, services: true do end end - it { expect(@merge_request).to be_valid } - it { expect(@merge_request.state).to eq('merged') } - it { expect(@merge_request.merge_error).to be_nil } + it 'merges the MR' do + expect(@merge_request).to be_valid + expect(@merge_request.state).to eq('merged') + expect(@merge_request.merge_error).to be_nil + end end context 'with finished pipeline' do @@ -160,18 +171,22 @@ describe MergeRequests::UpdateService, services: true do end end - it { expect(@merge_request).to be_valid } - it { expect(@merge_request.state).to eq('merged') } + it 'merges the MR' do + expect(@merge_request).to be_valid + expect(@merge_request.state).to eq('merged') + end end context 'with active pipeline' do before do service_mock = double - create(:ci_pipeline_with_one_job, + pipeline = create(:ci_pipeline_with_one_job, project: project, ref: merge_request.source_branch, sha: merge_request.diff_head_sha) + merge_request.update(head_pipeline: pipeline) + expect(MergeRequests::MergeWhenPipelineSucceedsService).to receive(:new).with(project, user). and_return(service_mock) expect(service_mock).to receive(:execute).with(merge_request) @@ -193,8 +208,10 @@ describe MergeRequests::UpdateService, services: true do end end - it { expect(@merge_request.state).to eq('opened') } - it { expect(@merge_request.merge_error).not_to be_nil } + it 'does not merge the MR' do + expect(@merge_request.state).to eq('opened') + expect(@merge_request.merge_error).not_to be_nil + end end context 'MR can not be merged when note sha != MR sha' do @@ -290,6 +307,15 @@ describe MergeRequests::UpdateService, services: true do end end + context 'when the assignee changes' do + it 'updates open merge request counter for assignees when merge request is reassigned' do + update_merge_request(assignee_id: user2.id) + + expect(user3.assigned_open_merge_requests_count).to eq 0 + expect(user2.assigned_open_merge_requests_count).to eq 1 + end + end + context 'when the target branch change' do before do update_merge_request({ target_branch: 'target' }) @@ -423,6 +449,54 @@ describe MergeRequests::UpdateService, services: true do end end + context 'updating asssignee_id' do + it 'does not update assignee when assignee_id is invalid' do + merge_request.update(assignee_id: user.id) + + update_merge_request(assignee_id: -1) + + expect(merge_request.reload.assignee).to eq(user) + end + + it 'unassigns assignee when user id is 0' do + merge_request.update(assignee_id: user.id) + + update_merge_request(assignee_id: 0) + + expect(merge_request.assignee_id).to be_nil + end + + it 'saves assignee when user id is valid' do + update_merge_request(assignee_id: user.id) + + expect(merge_request.assignee_id).to eq(user.id) + end + + it 'does not update assignee_id when user cannot read issue' do + non_member = create(:user) + original_assignee = merge_request.assignee + + update_merge_request(assignee_id: non_member.id) + + expect(merge_request.assignee_id).to eq(original_assignee.id) + end + + context "when issuable feature is private" do + levels = [Gitlab::VisibilityLevel::INTERNAL, Gitlab::VisibilityLevel::PUBLIC] + + levels.each do |level| + it "does not update with unauthorized assignee when project is #{Gitlab::VisibilityLevel.level_name(level)}" do + assignee = create(:user) + project.update(visibility_level: level) + feature_visibility_attr = :"#{merge_request.model_name.plural}_access_level" + project.project_feature.update_attribute(feature_visibility_attr, ProjectFeature::PRIVATE) + + expect{ update_merge_request(assignee_id: assignee) }.not_to change{ merge_request.assignee } + end + end + end + end + include_examples 'issuable update service' do let(:open_issuable) { merge_request } let(:closed_issuable) { create(:closed_merge_request, source_project: project) } diff --git a/spec/services/notes/build_service_spec.rb b/spec/services/notes/build_service_spec.rb new file mode 100644 index 00000000000..133175769ca --- /dev/null +++ b/spec/services/notes/build_service_spec.rb @@ -0,0 +1,112 @@ +require 'spec_helper' + +describe Notes::BuildService, services: true do + let(:note) { create(:discussion_note_on_issue) } + let(:project) { note.project } + let(:author) { note.author } + + describe '#execute' do + context 'when in_reply_to_discussion_id is specified' do + context 'when a note with that original discussion ID exists' do + it 'sets the note up to be in reply to that note' do + new_note = described_class.new(project, author, note: 'Test', in_reply_to_discussion_id: note.discussion_id).execute + expect(new_note).to be_valid + expect(new_note.in_reply_to?(note)).to be_truthy + end + end + + context 'when a note with that discussion ID exists' do + it 'sets the note up to be in reply to that note' do + new_note = described_class.new(project, author, note: 'Test', in_reply_to_discussion_id: note.discussion_id).execute + expect(new_note).to be_valid + expect(new_note.in_reply_to?(note)).to be_truthy + end + end + + context 'when no note with that discussion ID exists' do + it 'sets an error' do + new_note = described_class.new(project, author, note: 'Test', in_reply_to_discussion_id: 'foo').execute + expect(new_note.errors[:base]).to include('Discussion to reply to cannot be found') + end + end + + context 'personal snippet note' do + def reply(note, user = nil) + user ||= create(:user) + + described_class.new(nil, + user, + note: 'Test', + in_reply_to_discussion_id: note.discussion_id).execute + end + + let(:snippet_author) { create(:user) } + + context 'when a snippet is public' do + it 'creates a reply note' do + snippet = create(:personal_snippet, :public) + note = create(:discussion_note_on_personal_snippet, noteable: snippet) + + new_note = reply(note) + + expect(new_note).to be_valid + expect(new_note.in_reply_to?(note)).to be_truthy + end + end + + context 'when a snippet is private' do + let(:snippet) { create(:personal_snippet, :private, author: snippet_author) } + let(:note) { create(:discussion_note_on_personal_snippet, noteable: snippet) } + + it 'creates a reply note when the author replies' do + new_note = reply(note, snippet_author) + + expect(new_note).to be_valid + expect(new_note.in_reply_to?(note)).to be_truthy + end + + it 'sets an error when another user replies' do + new_note = reply(note) + + expect(new_note.errors[:base]).to include('Discussion to reply to cannot be found') + end + end + + context 'when a snippet is internal' do + let(:snippet) { create(:personal_snippet, :internal, author: snippet_author) } + let(:note) { create(:discussion_note_on_personal_snippet, noteable: snippet) } + + it 'creates a reply note when the author replies' do + new_note = reply(note, snippet_author) + + expect(new_note).to be_valid + expect(new_note.in_reply_to?(note)).to be_truthy + end + + it 'creates a reply note when a regular user replies' do + new_note = reply(note) + + expect(new_note).to be_valid + expect(new_note.in_reply_to?(note)).to be_truthy + end + + it 'sets an error when an external user replies' do + new_note = reply(note, create(:user, :external)) + + expect(new_note.errors[:base]).to include('Discussion to reply to cannot be found') + end + end + end + end + + it 'builds a note without saving it' do + new_note = described_class.new(project, + author, + noteable_type: note.noteable_type, + noteable_id: note.noteable_id, + note: 'Test').execute + expect(new_note).to be_valid + expect(new_note).not_to be_persisted + end + end +end diff --git a/spec/services/notes/slash_commands_service_spec.rb b/spec/services/notes/slash_commands_service_spec.rb index 1a64c8bbf00..c9954dc3603 100644 --- a/spec/services/notes/slash_commands_service_spec.rb +++ b/spec/services/notes/slash_commands_service_spec.rb @@ -66,7 +66,7 @@ describe Notes::SlashCommandsService, services: true do expect(content).to eq '' expect(note.noteable).to be_closed expect(note.noteable.labels).to match_array(labels) - expect(note.noteable.assignee).to eq(assignee) + expect(note.noteable.assignees).to eq([assignee]) expect(note.noteable.milestone).to eq(milestone) end end @@ -113,7 +113,7 @@ describe Notes::SlashCommandsService, services: true do expect(content).to eq "HELLO\nWORLD" expect(note.noteable).to be_closed expect(note.noteable.labels).to match_array(labels) - expect(note.noteable.assignee).to eq(assignee) + expect(note.noteable.assignees).to eq([assignee]) expect(note.noteable.milestone).to eq(milestone) end end @@ -220,4 +220,31 @@ describe Notes::SlashCommandsService, services: true do let(:note) { build(:note_on_commit, project: project) } end end + + context 'CE restriction for issue assignees' do + describe '/assign' do + let(:project) { create(:empty_project) } + let(:master) { create(:user).tap { |u| project.team << [u, :master] } } + let(:assignee) { create(:user) } + let(:master) { create(:user) } + let(:service) { described_class.new(project, master) } + let(:note) { create(:note_on_issue, note: note_text, project: project) } + + let(:note_text) do + %(/assign @#{assignee.username} @#{master.username}\n") + end + + before do + project.team << [master, :master] + project.team << [assignee, :master] + end + + it 'adds only one assignee from the list' do + _, command_params = service.extract_commands(note) + service.execute(command_params, note) + + expect(note.noteable.assignees.count).to eq(1) + end + end + end end diff --git a/spec/services/notification_service_spec.rb b/spec/services/notification_service_spec.rb index 5c841843b40..de3bbc6b6a1 100644 --- a/spec/services/notification_service_spec.rb +++ b/spec/services/notification_service_spec.rb @@ -4,6 +4,7 @@ describe NotificationService, services: true do include EmailHelpers let(:notification) { NotificationService.new } + let(:assignee) { create(:user) } around(:each) do |example| perform_enqueued_jobs do @@ -52,7 +53,11 @@ describe NotificationService, services: true do shared_examples 'participating by assignee notification' do it 'emails the participant' do - issuable.update_attribute(:assignee, participant) + if issuable.is_a?(Issue) + issuable.assignees << participant + else + issuable.update_attribute(:assignee, participant) + end notification_trigger @@ -103,17 +108,17 @@ describe NotificationService, services: true do describe 'Notes' do context 'issue note' do let(:project) { create(:empty_project, :private) } - let(:issue) { create(:issue, project: project, assignee: create(:user)) } - let(:mentioned_issue) { create(:issue, assignee: issue.assignee) } + let(:issue) { create(:issue, project: project, assignees: [assignee]) } + let(:mentioned_issue) { create(:issue, assignees: issue.assignees) } let(:note) { create(:note_on_issue, noteable: issue, project_id: issue.project_id, note: '@mention referenced, @outsider also') } before do build_team(note.project) project.add_master(issue.author) - project.add_master(issue.assignee) + project.add_master(assignee) project.add_master(note.author) create(:note_on_issue, noteable: issue, project_id: issue.project_id, note: '@subscribed_participant cc this guy') - update_custom_notification(:new_note, @u_guest_custom, project) + update_custom_notification(:new_note, @u_guest_custom, resource: project) update_custom_notification(:new_note, @u_custom_global) end @@ -130,7 +135,7 @@ describe NotificationService, services: true do should_email(@u_watcher) should_email(note.noteable.author) - should_email(note.noteable.assignee) + should_email(note.noteable.assignees.first) should_email(@u_custom_global) should_email(@u_mentioned) should_email(@subscriber) @@ -196,7 +201,7 @@ describe NotificationService, services: true do notification.new_note(note) should_email(note.noteable.author) - should_email(note.noteable.assignee) + should_email(note.noteable.assignees.first) should_email(@u_mentioned) should_email(@u_custom_global) should_not_email(@u_guest_custom) @@ -218,7 +223,7 @@ describe NotificationService, services: true do let(:member) { create(:user) } let(:guest) { create(:user) } let(:admin) { create(:admin) } - let(:confidential_issue) { create(:issue, :confidential, project: project, author: author, assignee: assignee) } + let(:confidential_issue) { create(:issue, :confidential, project: project, author: author, assignees: [assignee]) } let(:note) { create(:note_on_issue, noteable: confidential_issue, project: project, note: "#{author.to_reference} #{assignee.to_reference} #{non_member.to_reference} #{member.to_reference} #{admin.to_reference}") } let(:guest_watcher) { create_user_with_notification(:watch, "guest-watcher-confidential") } @@ -244,8 +249,8 @@ describe NotificationService, services: true do context 'issue note mention' do let(:project) { create(:empty_project, :public) } - let(:issue) { create(:issue, project: project, assignee: create(:user)) } - let(:mentioned_issue) { create(:issue, assignee: issue.assignee) } + let(:issue) { create(:issue, project: project, assignees: [assignee]) } + let(:mentioned_issue) { create(:issue, assignees: issue.assignees) } let(:note) { create(:note_on_issue, noteable: issue, project_id: issue.project_id, note: '@all mentioned') } before do @@ -269,7 +274,7 @@ describe NotificationService, services: true do should_email(@u_guest_watcher) should_email(note.noteable.author) - should_email(note.noteable.assignee) + should_email(note.noteable.assignees.first) should_not_email(note.author) should_email(@u_mentioned) should_not_email(@u_disabled) @@ -345,7 +350,7 @@ describe NotificationService, services: true do create(:note_on_personal_snippet, noteable: snippet, note: 'note', author: @u_participant), create(:note_on_personal_snippet, noteable: snippet, note: 'note', author: @u_mentioned), create(:note_on_personal_snippet, noteable: snippet, note: 'note', author: @u_disabled), - create(:note_on_personal_snippet, noteable: snippet, note: 'note', author: @u_note_author), + create(:note_on_personal_snippet, noteable: snippet, note: 'note', author: @u_note_author) ] end @@ -379,7 +384,7 @@ describe NotificationService, services: true do build_team(note.project) reset_delivered_emails! allow_any_instance_of(Commit).to receive(:author).and_return(@u_committer) - update_custom_notification(:new_note, @u_guest_custom, project) + update_custom_notification(:new_note, @u_guest_custom, resource: project) update_custom_notification(:new_note, @u_custom_global) end @@ -439,7 +444,7 @@ describe NotificationService, services: true do notification.new_note(note) - expect(SentNotification.last.position).to eq(note.position) + expect(SentNotification.last.in_reply_to_discussion_id).to eq(note.discussion_id) end end end @@ -449,7 +454,7 @@ describe NotificationService, services: true do let(:group) { create(:group) } let(:project) { create(:empty_project, :public, namespace: group) } let(:another_project) { create(:empty_project, :public, namespace: group) } - let(:issue) { create :issue, project: project, assignee: create(:user), description: 'cc @participant' } + let(:issue) { create :issue, project: project, assignees: [assignee], description: 'cc @participant' } before do build_team(issue.project) @@ -457,7 +462,7 @@ describe NotificationService, services: true do add_users_with_subscription(issue.project, issue) reset_delivered_emails! - update_custom_notification(:new_issue, @u_guest_custom, project) + update_custom_notification(:new_issue, @u_guest_custom, resource: project) update_custom_notification(:new_issue, @u_custom_global) end @@ -465,7 +470,7 @@ describe NotificationService, services: true do it do notification.new_issue(issue, @u_disabled) - should_email(issue.assignee) + should_email(assignee) should_email(@u_watcher) should_email(@u_guest_watcher) should_email(@u_guest_custom) @@ -480,10 +485,10 @@ describe NotificationService, services: true do end it do - create_global_setting_for(issue.assignee, :mention) + create_global_setting_for(issue.assignees.first, :mention) notification.new_issue(issue, @u_disabled) - should_not_email(issue.assignee) + should_not_email(issue.assignees.first) end it "emails the author if they've opted into notifications about their activity" do @@ -528,7 +533,7 @@ describe NotificationService, services: true do let(:member) { create(:user) } let(:guest) { create(:user) } let(:admin) { create(:admin) } - let(:confidential_issue) { create(:issue, :confidential, project: project, title: 'Confidential issue', author: author, assignee: assignee) } + let(:confidential_issue) { create(:issue, :confidential, project: project, title: 'Confidential issue', author: author, assignees: [assignee]) } it "emails subscribers of the issue's labels that can read the issue" do project.add_developer(member) @@ -567,14 +572,14 @@ describe NotificationService, services: true do describe '#reassigned_issue' do before do - update_custom_notification(:reassign_issue, @u_guest_custom, project) + update_custom_notification(:reassign_issue, @u_guest_custom, resource: project) update_custom_notification(:reassign_issue, @u_custom_global) end it 'emails new assignee' do - notification.reassigned_issue(issue, @u_disabled) + notification.reassigned_issue(issue, @u_disabled, [assignee]) - should_email(issue.assignee) + should_email(issue.assignees.first) should_email(@u_watcher) should_email(@u_guest_watcher) should_email(@u_guest_custom) @@ -588,9 +593,8 @@ describe NotificationService, services: true do end it 'emails previous assignee even if he has the "on mention" notif level' do - issue.update_attribute(:assignee, @u_mentioned) - issue.update_attributes(assignee: @u_watcher) - notification.reassigned_issue(issue, @u_disabled) + issue.assignees = [@u_mentioned] + notification.reassigned_issue(issue, @u_disabled, [@u_watcher]) should_email(@u_mentioned) should_email(@u_watcher) @@ -606,11 +610,11 @@ describe NotificationService, services: true do end it 'emails new assignee even if he has the "on mention" notif level' do - issue.update_attributes(assignee: @u_mentioned) - notification.reassigned_issue(issue, @u_disabled) + issue.assignees = [@u_mentioned] + notification.reassigned_issue(issue, @u_disabled, [@u_mentioned]) - expect(issue.assignee).to be @u_mentioned - should_email(issue.assignee) + expect(issue.assignees.first).to be @u_mentioned + should_email(issue.assignees.first) should_email(@u_watcher) should_email(@u_guest_watcher) should_email(@u_guest_custom) @@ -624,11 +628,11 @@ describe NotificationService, services: true do end it 'emails new assignee' do - issue.update_attribute(:assignee, @u_mentioned) - notification.reassigned_issue(issue, @u_disabled) + issue.assignees = [@u_mentioned] + notification.reassigned_issue(issue, @u_disabled, [@u_mentioned]) - expect(issue.assignee).to be @u_mentioned - should_email(issue.assignee) + expect(issue.assignees.first).to be @u_mentioned + should_email(issue.assignees.first) should_email(@u_watcher) should_email(@u_guest_watcher) should_email(@u_guest_custom) @@ -642,17 +646,17 @@ describe NotificationService, services: true do end it 'does not email new assignee if they are the current user' do - issue.update_attribute(:assignee, @u_mentioned) - notification.reassigned_issue(issue, @u_mentioned) + issue.assignees = [@u_mentioned] + notification.reassigned_issue(issue, @u_mentioned, [@u_mentioned]) - expect(issue.assignee).to be @u_mentioned + expect(issue.assignees.first).to be @u_mentioned should_email(@u_watcher) should_email(@u_guest_watcher) should_email(@u_guest_custom) should_email(@u_participant_mentioned) should_email(@subscriber) should_email(@u_custom_global) - should_not_email(issue.assignee) + should_not_email(issue.assignees.first) should_not_email(@unsubscriber) should_not_email(@u_participating) should_not_email(@u_disabled) @@ -662,7 +666,7 @@ describe NotificationService, services: true do it_behaves_like 'participating notifications' do let(:participant) { create(:user, username: 'user-participant') } let(:issuable) { issue } - let(:notification_trigger) { notification.reassigned_issue(issue, @u_disabled) } + let(:notification_trigger) { notification.reassigned_issue(issue, @u_disabled, [assignee]) } end end @@ -705,7 +709,7 @@ describe NotificationService, services: true do it "doesn't send email to anyone but subscribers of the given labels" do notification.relabeled_issue(issue, [group_label_2, label_2], @u_disabled) - should_not_email(issue.assignee) + should_not_email(issue.assignees.first) should_not_email(issue.author) should_not_email(@u_watcher) should_not_email(@u_guest_watcher) @@ -729,7 +733,7 @@ describe NotificationService, services: true do let(:member) { create(:user) } let(:guest) { create(:user) } let(:admin) { create(:admin) } - let(:confidential_issue) { create(:issue, :confidential, project: project, title: 'Confidential issue', author: author, assignee: assignee) } + let(:confidential_issue) { create(:issue, :confidential, project: project, title: 'Confidential issue', author: author, assignees: [assignee]) } let!(:label_1) { create(:label, project: project, issues: [confidential_issue]) } let!(:label_2) { create(:label, project: project) } @@ -760,14 +764,14 @@ describe NotificationService, services: true do describe '#close_issue' do before do - update_custom_notification(:close_issue, @u_guest_custom, project) + update_custom_notification(:close_issue, @u_guest_custom, resource: project) update_custom_notification(:close_issue, @u_custom_global) end it 'sends email to issue assignee and issue author' do notification.close_issue(issue, @u_disabled) - should_email(issue.assignee) + should_email(issue.assignees.first) should_email(issue.author) should_email(@u_watcher) should_email(@u_guest_watcher) @@ -791,14 +795,14 @@ describe NotificationService, services: true do describe '#reopen_issue' do before do - update_custom_notification(:reopen_issue, @u_guest_custom, project) + update_custom_notification(:reopen_issue, @u_guest_custom, resource: project) update_custom_notification(:reopen_issue, @u_custom_global) end it 'sends email to issue notification recipients' do notification.reopen_issue(issue, @u_disabled) - should_email(issue.assignee) + should_email(issue.assignees.first) should_email(issue.author) should_email(@u_watcher) should_email(@u_guest_watcher) @@ -826,7 +830,7 @@ describe NotificationService, services: true do it 'sends email to issue notification recipients' do notification.issue_moved(issue, new_issue, @u_disabled) - should_email(issue.assignee) + should_email(issue.assignees.first) should_email(issue.author) should_email(@u_watcher) should_email(@u_guest_watcher) @@ -856,14 +860,14 @@ describe NotificationService, services: true do before do build_team(merge_request.target_project) add_users_with_subscription(merge_request.target_project, merge_request) - update_custom_notification(:new_merge_request, @u_guest_custom, project) + update_custom_notification(:new_merge_request, @u_guest_custom, resource: project) update_custom_notification(:new_merge_request, @u_custom_global) reset_delivered_emails! end describe '#new_merge_request' do before do - update_custom_notification(:new_merge_request, @u_guest_custom, project) + update_custom_notification(:new_merge_request, @u_guest_custom, resource: project) update_custom_notification(:new_merge_request, @u_custom_global) end @@ -952,7 +956,7 @@ describe NotificationService, services: true do describe '#reassigned_merge_request' do before do - update_custom_notification(:reassign_merge_request, @u_guest_custom, project) + update_custom_notification(:reassign_merge_request, @u_guest_custom, resource: project) update_custom_notification(:reassign_merge_request, @u_custom_global) end @@ -1026,7 +1030,7 @@ describe NotificationService, services: true do describe '#closed_merge_request' do before do - update_custom_notification(:close_merge_request, @u_guest_custom, project) + update_custom_notification(:close_merge_request, @u_guest_custom, resource: project) update_custom_notification(:close_merge_request, @u_custom_global) end @@ -1056,7 +1060,7 @@ describe NotificationService, services: true do describe '#merged_merge_request' do before do - update_custom_notification(:merge_merge_request, @u_guest_custom, project) + update_custom_notification(:merge_merge_request, @u_guest_custom, resource: project) update_custom_notification(:merge_merge_request, @u_custom_global) end @@ -1108,7 +1112,7 @@ describe NotificationService, services: true do describe '#reopen_merge_request' do before do - update_custom_notification(:reopen_merge_request, @u_guest_custom, project) + update_custom_notification(:reopen_merge_request, @u_guest_custom, resource: project) update_custom_notification(:reopen_merge_request, @u_custom_global) end @@ -1181,6 +1185,22 @@ describe NotificationService, services: true do should_not_email(@u_disabled) end end + + describe '#project_exported' do + it do + notification.project_exported(project, @u_disabled) + + should_only_email(@u_disabled) + end + end + + describe '#project_not_exported' do + it do + notification.project_not_exported(project, @u_disabled, ['error']) + + should_only_email(@u_disabled) + end + end end describe 'GroupMember' do @@ -1281,40 +1301,172 @@ describe NotificationService, services: true do describe 'Pipelines' do describe '#pipeline_finished' do let(:project) { create(:project, :public, :repository) } - let(:current_user) { create(:user) } let(:u_member) { create(:user) } - let(:u_other) { create(:user) } + let(:u_watcher) { create_user_with_notification(:watch, 'watcher') } + + let(:u_custom_notification_unset) do + create_user_with_notification(:custom, 'custom_unset') + end + + let(:u_custom_notification_enabled) do + user = create_user_with_notification(:custom, 'custom_enabled') + update_custom_notification(:success_pipeline, user, resource: project) + update_custom_notification(:failed_pipeline, user, resource: project) + user + end + + let(:u_custom_notification_disabled) do + user = create_user_with_notification(:custom, 'custom_disabled') + update_custom_notification(:success_pipeline, user, resource: project, value: false) + update_custom_notification(:failed_pipeline, user, resource: project, value: false) + user + end let(:commit) { project.commit } - let(:pipeline) do - create(:ci_pipeline, :success, + + def create_pipeline(user, status) + create(:ci_pipeline, status, project: project, - user: current_user, + user: user, ref: 'refs/heads/master', sha: commit.id, before_sha: '00000000') end before do - project.add_master(current_user) project.add_master(u_member) + project.add_master(u_watcher) + project.add_master(u_custom_notification_unset) + project.add_master(u_custom_notification_enabled) + project.add_master(u_custom_notification_disabled) + reset_delivered_emails! end - context 'without custom recipients' do - it 'notifies the pipeline user' do - notification.pipeline_finished(pipeline) + context 'with a successful pipeline' do + context 'when the creator has default settings' do + before do + pipeline = create_pipeline(u_member, :success) + notification.pipeline_finished(pipeline) + end + + it 'notifies nobody' do + should_not_email_anyone + end + end + + context 'when the creator has watch set' do + before do + pipeline = create_pipeline(u_watcher, :success) + notification.pipeline_finished(pipeline) + end + + it 'notifies nobody' do + should_not_email_anyone + end + end + + context 'when the creator has custom notifications, but without any set' do + before do + pipeline = create_pipeline(u_custom_notification_unset, :success) + notification.pipeline_finished(pipeline) + end + + it 'notifies nobody' do + should_not_email_anyone + end + end + + context 'when the creator has custom notifications disabled' do + before do + pipeline = create_pipeline(u_custom_notification_disabled, :success) + notification.pipeline_finished(pipeline) + end + + it 'notifies nobody' do + should_not_email_anyone + end + end + + context 'when the creator has custom notifications enabled' do + before do + pipeline = create_pipeline(u_custom_notification_enabled, :success) + notification.pipeline_finished(pipeline) + end - should_only_email(current_user, kind: :bcc) + it 'emails only the creator' do + should_only_email(u_custom_notification_enabled, kind: :bcc) + end end end - context 'with custom recipients' do - it 'notifies the custom recipients' do - users = [u_member, u_other] - notification.pipeline_finished(pipeline, users.map(&:notification_email)) + context 'with a failed pipeline' do + context 'when the creator has no custom notification set' do + before do + pipeline = create_pipeline(u_member, :failed) + notification.pipeline_finished(pipeline) + end + + it 'emails only the creator' do + should_only_email(u_member, kind: :bcc) + end + end + + context 'when the creator has watch set' do + before do + pipeline = create_pipeline(u_watcher, :failed) + notification.pipeline_finished(pipeline) + end + + it 'emails only the creator' do + should_only_email(u_watcher, kind: :bcc) + end + end + + context 'when the creator has custom notifications, but without any set' do + before do + pipeline = create_pipeline(u_custom_notification_unset, :failed) + notification.pipeline_finished(pipeline) + end + + it 'emails only the creator' do + should_only_email(u_custom_notification_unset, kind: :bcc) + end + end + + context 'when the creator has custom notifications disabled' do + before do + pipeline = create_pipeline(u_custom_notification_disabled, :failed) + notification.pipeline_finished(pipeline) + end + + it 'notifies nobody' do + should_not_email_anyone + end + end + + context 'when the creator has custom notifications set' do + before do + pipeline = create_pipeline(u_custom_notification_enabled, :failed) + notification.pipeline_finished(pipeline) + end + + it 'emails only the creator' do + should_only_email(u_custom_notification_enabled, kind: :bcc) + end + end + + context 'when the creator has no read_build access' do + before do + pipeline = create_pipeline(u_member, :failed) + project.update(public_builds: false) + project.team.truncate + notification.pipeline_finished(pipeline) + end - should_only_email(*users, kind: :bcc) + it 'does not send emails' do + should_not_email_anyone + end end end end @@ -1385,9 +1537,9 @@ describe NotificationService, services: true do # Create custom notifications # When resource is nil it means global notification - def update_custom_notification(event, user, resource = nil) + def update_custom_notification(event, user, resource: nil, value: true) setting = user.notification_settings_for(resource) - setting.events[event] = true + setting.events[event] = value setting.save end diff --git a/spec/services/preview_markdown_service_spec.rb b/spec/services/preview_markdown_service_spec.rb new file mode 100644 index 00000000000..b2fb5c91313 --- /dev/null +++ b/spec/services/preview_markdown_service_spec.rb @@ -0,0 +1,67 @@ +require 'spec_helper' + +describe PreviewMarkdownService do + let(:user) { create(:user) } + let(:project) { create(:empty_project) } + + before do + project.add_developer(user) + end + + describe 'user references' do + let(:params) { { text: "Take a look #{user.to_reference}" } } + let(:service) { described_class.new(project, user, params) } + + it 'returns users referenced in text' do + result = service.execute + + expect(result[:users]).to eq [user.username] + end + end + + context 'new note with slash commands' do + let(:issue) { create(:issue, project: project) } + let(:params) do + { + text: "Please do it\n/assign #{user.to_reference}", + slash_commands_target_type: 'Issue', + slash_commands_target_id: issue.id + } + end + let(:service) { described_class.new(project, user, params) } + + it 'removes slash commands from text' do + result = service.execute + + expect(result[:text]).to eq 'Please do it' + end + + it 'explains slash commands effect' do + result = service.execute + + expect(result[:commands]).to eq "Assigns #{user.to_reference}." + end + end + + context 'merge request description' do + let(:params) do + { + text: "My work\n/estimate 2y", + slash_commands_target_type: 'MergeRequest' + } + end + let(:service) { described_class.new(project, user, params) } + + it 'removes slash commands from text' do + result = service.execute + + expect(result[:text]).to eq 'My work' + end + + it 'explains slash commands effect' do + result = service.execute + + expect(result[:commands]).to eq 'Sets time estimate to 2y.' + end + end +end diff --git a/spec/services/projects/autocomplete_service_spec.rb b/spec/services/projects/autocomplete_service_spec.rb index 7916c2d957c..c198c3eedfc 100644 --- a/spec/services/projects/autocomplete_service_spec.rb +++ b/spec/services/projects/autocomplete_service_spec.rb @@ -11,7 +11,7 @@ describe Projects::AutocompleteService, services: true do let(:project) { create(:empty_project, :public) } let!(:issue) { create(:issue, project: project, title: 'Issue 1') } let!(:security_issue_1) { create(:issue, :confidential, project: project, title: 'Security issue 1', author: author) } - let!(:security_issue_2) { create(:issue, :confidential, title: 'Security issue 2', project: project, assignee: assignee) } + let!(:security_issue_2) { create(:issue, :confidential, title: 'Security issue 2', project: project, assignees: [assignee]) } it 'does not list project confidential issues for guests' do autocomplete = described_class.new(project, nil) diff --git a/spec/services/projects/create_service_spec.rb b/spec/services/projects/create_service_spec.rb index 62f21049b0b..033e6ecd18c 100644 --- a/spec/services/projects/create_service_spec.rb +++ b/spec/services/projects/create_service_spec.rb @@ -27,6 +27,22 @@ describe Projects::CreateService, '#execute', services: true do end end + context "admin creates project with other user's namespace_id" do + it 'sets the correct permissions' do + admin = create(:admin) + opts = { + name: 'GitLab', + namespace_id: user.namespace.id + } + project = create_project(admin, opts) + + expect(project).to be_persisted + expect(project.owner).to eq(user) + expect(project.team.masters).to include(user, admin) + expect(project.namespace).to eq(user.namespace) + end + end + context 'group namespace' do let(:group) do create(:group).tap do |group| @@ -144,6 +160,20 @@ describe Projects::CreateService, '#execute', services: true do end end + context 'when a bad service template is created' do + before do + create(:service, type: 'DroneCiService', project: nil, template: true, active: true) + end + + it 'reports an error in the imported project' do + opts[:import_url] = 'http://www.gitlab.com/gitlab-org/gitlab-ce' + project = create_project(user, opts) + + expect(project.errors.full_messages_for(:base).first).to match /Unable to save project. Error: Unable to save DroneCiService/ + expect(project.services.count).to eq 0 + end + end + def create_project(user, opts) Projects::CreateService.new(user, opts).execute end diff --git a/spec/services/projects/destroy_service_spec.rb b/spec/services/projects/destroy_service_spec.rb index b1e10f4562e..4b8589b2736 100644 --- a/spec/services/projects/destroy_service_spec.rb +++ b/spec/services/projects/destroy_service_spec.rb @@ -7,6 +7,11 @@ describe Projects::DestroyService, services: true do let!(:remove_path) { path.sub(/\.git\Z/, "+#{project.id}+deleted.git") } let!(:async) { false } # execute or async_execute + before do + stub_container_registry_config(enabled: true) + stub_container_registry_tags(repository: :any, tags: []) + end + shared_examples 'deleting the project' do it 'deletes the project' do expect(Project.unscoped.all).not_to include(project) @@ -89,30 +94,64 @@ describe Projects::DestroyService, services: true do it_behaves_like 'deleting the project with pipeline and build' end - context 'container registry' do - before do - stub_container_registry_config(enabled: true) - stub_container_registry_tags('tag') - end + describe 'container registry' do + context 'when there are regular container repositories' do + let(:container_repository) { create(:container_repository) } + + before do + stub_container_registry_tags(repository: project.full_path + '/image', + tags: ['tag']) + project.container_repositories << container_repository + end + + context 'when image repository deletion succeeds' do + it 'removes tags' do + expect_any_instance_of(ContainerRepository) + .to receive(:delete_tags!).and_return(true) + + destroy_project(project, user) + end + end - context 'tags deletion succeeds' do - it do - expect_any_instance_of(ContainerRegistry::Tag).to receive(:delete).and_return(true) + context 'when image repository deletion fails' do + it 'raises an exception' do + expect_any_instance_of(ContainerRepository) + .to receive(:delete_tags!).and_return(false) - destroy_project(project, user, {}) + expect{ destroy_project(project, user) } + .to raise_error(ActiveRecord::RecordNotDestroyed) + end end end - context 'tags deletion fails' do - before { expect_any_instance_of(ContainerRegistry::Tag).to receive(:delete).and_return(false) } + context 'when there are tags for legacy root repository' do + before do + stub_container_registry_tags(repository: project.full_path, + tags: ['tag']) + end + + context 'when image repository tags deletion succeeds' do + it 'removes tags' do + expect_any_instance_of(ContainerRepository) + .to receive(:delete_tags!).and_return(true) - subject { destroy_project(project, user, {}) } + destroy_project(project, user) + end + end + + context 'when image repository tags deletion fails' do + it 'raises an exception' do + expect_any_instance_of(ContainerRepository) + .to receive(:delete_tags!).and_return(false) - it { expect{subject}.to raise_error(Projects::DestroyService::DestroyError) } + expect { destroy_project(project, user) } + .to raise_error(Projects::DestroyService::DestroyError) + end + end end end - def destroy_project(project, user, params) + def destroy_project(project, user, params = {}) if async Projects::DestroyService.new(project, user, params).async_execute else diff --git a/spec/services/projects/enable_deploy_key_service_spec.rb b/spec/services/projects/enable_deploy_key_service_spec.rb index a37510cf159..78626fbad4b 100644 --- a/spec/services/projects/enable_deploy_key_service_spec.rb +++ b/spec/services/projects/enable_deploy_key_service_spec.rb @@ -21,6 +21,16 @@ describe Projects::EnableDeployKeyService, services: true do end end + context 'add the same key twice' do + before do + project.deploy_keys << deploy_key + end + + it 'returns existing key' do + expect(service.execute).to eq(deploy_key) + end + end + def service Projects::EnableDeployKeyService.new(project, user, params) end diff --git a/spec/services/projects/housekeeping_service_spec.rb b/spec/services/projects/housekeeping_service_spec.rb index eaf63457b32..fff12beed71 100644 --- a/spec/services/projects/housekeeping_service_spec.rb +++ b/spec/services/projects/housekeeping_service_spec.rb @@ -1,7 +1,7 @@ require 'spec_helper' describe Projects::HousekeepingService do - subject { Projects::HousekeepingService.new(project) } + subject { described_class.new(project) } let(:project) { create(:project, :repository) } before do diff --git a/spec/services/projects/import_service_spec.rb b/spec/services/projects/import_service_spec.rb index e5917bb0b7a..852a4ac852f 100644 --- a/spec/services/projects/import_service_spec.rb +++ b/spec/services/projects/import_service_spec.rb @@ -26,30 +26,68 @@ describe Projects::ImportService, services: true do result = subject.execute expect(result[:status]).to eq :error - expect(result[:message]).to eq 'The repository could not be created.' + expect(result[:message]).to eq "Error importing repository #{project.import_url} into #{project.path_with_namespace} - The repository could not be created." end end context 'with known url' do before do project.import_url = 'https://github.com/vim/vim.git' + project.import_type = 'github' end - it 'succeeds if repository import is successfully' do - expect_any_instance_of(Gitlab::Shell).to receive(:import_repository).with(project.repository_storage_path, project.path_with_namespace, project.import_url).and_return(true) + context 'with a Github repository' do + it 'succeeds if repository import is successfully' do + expect_any_instance_of(Repository).to receive(:fetch_remote).and_return(true) + expect_any_instance_of(Gitlab::GithubImport::Importer).to receive(:execute).and_return(true) - result = subject.execute + result = subject.execute - expect(result[:status]).to eq :success + expect(result[:status]).to eq :success + end + + it 'fails if repository import fails' do + expect_any_instance_of(Repository).to receive(:fetch_remote).and_raise(Gitlab::Shell::Error.new('Failed to import the repository')) + + result = subject.execute + + expect(result[:status]).to eq :error + expect(result[:message]).to eq "Error importing repository #{project.import_url} into #{project.path_with_namespace} - Failed to import the repository" + end + + it 'does not remove the GitHub remote' do + expect_any_instance_of(Repository).to receive(:fetch_remote).and_return(true) + expect_any_instance_of(Gitlab::GithubImport::Importer).to receive(:execute).and_return(true) + + subject.execute + + expect(project.repository.raw_repository.remote_names).to include('github') + end end - it 'fails if repository import fails' do - expect_any_instance_of(Gitlab::Shell).to receive(:import_repository).with(project.repository_storage_path, project.path_with_namespace, project.import_url).and_raise(Gitlab::Shell::Error.new('Failed to import the repository')) + context 'with a non Github repository' do + before do + project.import_url = 'https://bitbucket.org/vim/vim.git' + project.import_type = 'bitbucket' + end - result = subject.execute + it 'succeeds if repository import is successfully' do + expect_any_instance_of(Gitlab::Shell).to receive(:import_repository).and_return(true) + expect_any_instance_of(Gitlab::BitbucketImport::Importer).to receive(:execute).and_return(true) - expect(result[:status]).to eq :error - expect(result[:message]).to eq "Error importing repository #{project.import_url} into #{project.path_with_namespace} - Failed to import the repository" + result = subject.execute + + expect(result[:status]).to eq :success + end + + it 'fails if repository import fails' do + expect_any_instance_of(Gitlab::Shell).to receive(:import_repository).and_raise(Gitlab::Shell::Error.new('Failed to import the repository')) + + result = subject.execute + + expect(result[:status]).to eq :error + expect(result[:message]).to eq "Error importing repository #{project.import_url} into #{project.path_with_namespace} - Failed to import the repository" + end end end @@ -64,8 +102,8 @@ describe Projects::ImportService, services: true do end it 'succeeds if importer succeeds' do - expect_any_instance_of(Gitlab::Shell).to receive(:import_repository).with(project.repository_storage_path, project.path_with_namespace, project.import_url).and_return(true) - expect_any_instance_of(Gitlab::GithubImport::Importer).to receive(:execute).and_return(true) + allow_any_instance_of(Repository).to receive(:fetch_remote).and_return(true) + allow_any_instance_of(Gitlab::GithubImport::Importer).to receive(:execute).and_return(true) result = subject.execute @@ -73,48 +111,42 @@ describe Projects::ImportService, services: true do end it 'flushes various caches' do - expect_any_instance_of(Gitlab::Shell).to receive(:import_repository). - with(project.repository_storage_path, project.path_with_namespace, project.import_url). + allow_any_instance_of(Repository).to receive(:fetch_remote). and_return(true) - expect_any_instance_of(Gitlab::GithubImport::Importer).to receive(:execute). + allow_any_instance_of(Gitlab::GithubImport::Importer).to receive(:execute). and_return(true) - expect_any_instance_of(Repository).to receive(:expire_emptiness_caches). - and_call_original - - expect_any_instance_of(Repository).to receive(:expire_exists_cache). - and_call_original + expect_any_instance_of(Repository).to receive(:expire_content_cache) subject.execute end it 'fails if importer fails' do - expect_any_instance_of(Gitlab::Shell).to receive(:import_repository).with(project.repository_storage_path, project.path_with_namespace, project.import_url).and_return(true) - expect_any_instance_of(Gitlab::GithubImport::Importer).to receive(:execute).and_return(false) + allow_any_instance_of(Repository).to receive(:fetch_remote).and_return(true) + allow_any_instance_of(Gitlab::GithubImport::Importer).to receive(:execute).and_return(false) result = subject.execute expect(result[:status]).to eq :error - expect(result[:message]).to eq 'The remote data could not be imported.' + expect(result[:message]).to eq "Error importing repository #{project.import_url} into #{project.path_with_namespace} - The remote data could not be imported." end it 'fails if importer raise an error' do - expect_any_instance_of(Gitlab::Shell).to receive(:import_repository).with(project.repository_storage_path, project.path_with_namespace, project.import_url).and_return(true) - expect_any_instance_of(Gitlab::GithubImport::Importer).to receive(:execute).and_raise(Projects::ImportService::Error.new('Github: failed to connect API')) + allow_any_instance_of(Gitlab::Shell).to receive(:fetch_remote).and_return(true) + allow_any_instance_of(Gitlab::GithubImport::Importer).to receive(:execute).and_raise(Projects::ImportService::Error.new('Github: failed to connect API')) result = subject.execute expect(result[:status]).to eq :error - expect(result[:message]).to eq 'Github: failed to connect API' + expect(result[:message]).to eq "Error importing repository #{project.import_url} into #{project.path_with_namespace} - Github: failed to connect API" end - it 'expires existence cache after error' do + it 'expires content cache after error' do allow_any_instance_of(Project).to receive(:repository_exists?).and_return(false, true) - expect_any_instance_of(Gitlab::Shell).to receive(:import_repository).with(project.repository_storage_path, project.path_with_namespace, project.import_url).and_raise(Gitlab::Shell::Error.new('Failed to import the repository')) - expect_any_instance_of(Repository).to receive(:expire_emptiness_caches).and_call_original - expect_any_instance_of(Repository).to receive(:expire_exists_cache).and_call_original + expect_any_instance_of(Gitlab::Shell).to receive(:fetch_remote).and_raise(Gitlab::Shell::Error.new('Failed to import the repository')) + expect_any_instance_of(Repository).to receive(:expire_content_cache) subject.execute end diff --git a/spec/services/projects/participants_service_spec.rb b/spec/services/projects/participants_service_spec.rb index 063b3bd76eb..0657b7e93fe 100644 --- a/spec/services/projects/participants_service_spec.rb +++ b/spec/services/projects/participants_service_spec.rb @@ -6,7 +6,6 @@ describe Projects::ParticipantsService, services: true do let(:project) { create(:empty_project, :public) } let(:group) { create(:group, avatar: fixture_file_upload(Rails.root + 'spec/fixtures/dk.png')) } let(:user) { create(:user) } - let(:base_url) { Settings.send(:build_base_gitlab_url) } let!(:group_member) { create(:group_member, group: group, user: user) } it 'should return an url for the avatar' do @@ -14,7 +13,7 @@ describe Projects::ParticipantsService, services: true do groups = participants.groups expect(groups.size).to eq 1 - expect(groups.first[:avatar_url]).to eq "#{base_url}/uploads/group/avatar/#{group.id}/dk.png" + expect(groups.first[:avatar_url]).to eq("/uploads/group/avatar/#{group.id}/dk.png") end it 'should return an url for the avatar with relative url' do @@ -25,7 +24,7 @@ describe Projects::ParticipantsService, services: true do groups = participants.groups expect(groups.size).to eq 1 - expect(groups.first[:avatar_url]).to eq "#{base_url}/gitlab/uploads/group/avatar/#{group.id}/dk.png" + expect(groups.first[:avatar_url]).to eq("/gitlab/uploads/group/avatar/#{group.id}/dk.png") end end end diff --git a/spec/services/projects/propagate_service_template_spec.rb b/spec/services/projects/propagate_service_template_spec.rb new file mode 100644 index 00000000000..90eff3bbc1e --- /dev/null +++ b/spec/services/projects/propagate_service_template_spec.rb @@ -0,0 +1,103 @@ +require 'spec_helper' + +describe Projects::PropagateServiceTemplate, services: true do + describe '.propagate' do + let!(:service_template) do + PushoverService.create( + template: true, + active: true, + properties: { + device: 'MyDevice', + sound: 'mic', + priority: 4, + user_key: 'asdf', + api_key: '123456789' + }) + end + + let!(:project) { create(:empty_project) } + + it 'creates services for projects' do + expect(project.pushover_service).to be_nil + + described_class.propagate(service_template) + + expect(project.reload.pushover_service).to be_present + end + + it 'creates services for a project that has another service' do + BambooService.create( + template: true, + active: true, + project: project, + properties: { + bamboo_url: 'http://gitlab.com', + username: 'mic', + password: "password", + build_key: 'build' + } + ) + + expect(project.pushover_service).to be_nil + + described_class.propagate(service_template) + + expect(project.reload.pushover_service).to be_present + end + + it 'does not create the service if it exists already' do + other_service = BambooService.create( + template: true, + active: true, + properties: { + bamboo_url: 'http://gitlab.com', + username: 'mic', + password: "password", + build_key: 'build' + } + ) + + Service.build_from_template(project.id, service_template).save! + Service.build_from_template(project.id, other_service).save! + + expect { described_class.propagate(service_template) }. + not_to change { Service.count } + end + + it 'creates the service containing the template attributes' do + described_class.propagate(service_template) + + expect(project.pushover_service.properties).to eq(service_template.properties) + end + + describe 'bulk update' do + it 'creates services for all projects' do + project_total = 5 + stub_const 'Projects::PropagateServiceTemplate::BATCH_SIZE', 3 + + project_total.times { create(:empty_project) } + + expect { described_class.propagate(service_template) }. + to change { Service.count }.by(project_total + 1) + end + end + + describe 'external tracker' do + it 'updates the project external tracker' do + service_template.update!(category: 'issue_tracker', default: false) + + expect { described_class.propagate(service_template) }. + to change { project.reload.has_external_issue_tracker }.to(true) + end + end + + describe 'external wiki' do + it 'updates the project external tracker' do + service_template.update!(type: 'ExternalWikiService') + + expect { described_class.propagate(service_template) }. + to change { project.reload.has_external_wiki }.to(true) + end + end + end +end diff --git a/spec/services/projects/transfer_service_spec.rb b/spec/services/projects/transfer_service_spec.rb index f8187fefc14..29ccce59c53 100644 --- a/spec/services/projects/transfer_service_spec.rb +++ b/spec/services/projects/transfer_service_spec.rb @@ -29,9 +29,12 @@ describe Projects::TransferService, services: true do end context 'disallow transfering of project with tags' do + let(:container_repository) { create(:container_repository) } + before do stub_container_registry_config(enabled: true) - stub_container_registry_tags('tag') + stub_container_registry_tags(repository: :any, tags: ['tag']) + project.container_repositories << container_repository end subject { transfer_project(project, user, group) } diff --git a/spec/services/protected_branches/update_service_spec.rb b/spec/services/protected_branches/update_service_spec.rb new file mode 100644 index 00000000000..62bdd49a4d7 --- /dev/null +++ b/spec/services/protected_branches/update_service_spec.rb @@ -0,0 +1,26 @@ +require 'spec_helper' + +describe ProtectedBranches::UpdateService, services: true do + let(:protected_branch) { create(:protected_branch) } + let(:project) { protected_branch.project } + let(:user) { project.owner } + let(:params) { { name: 'new protected branch name' } } + + describe '#execute' do + subject(:service) { described_class.new(project, user, params) } + + it 'updates a protected branch' do + result = service.execute(protected_branch) + + expect(result.reload.name).to eq(params[:name]) + end + + context 'without admin_project permissions' do + let(:user) { create(:user) } + + it "raises error" do + expect{ service.execute(protected_branch) }.to raise_error(Gitlab::Access::AccessDeniedError) + end + end + end +end diff --git a/spec/services/protected_tags/create_service_spec.rb b/spec/services/protected_tags/create_service_spec.rb new file mode 100644 index 00000000000..d91a58e8de5 --- /dev/null +++ b/spec/services/protected_tags/create_service_spec.rb @@ -0,0 +1,21 @@ +require 'spec_helper' + +describe ProtectedTags::CreateService, services: true do + let(:project) { create(:empty_project) } + let(:user) { project.owner } + let(:params) do + { + name: 'master', + create_access_levels_attributes: [{ access_level: Gitlab::Access::MASTER }] + } + end + + describe '#execute' do + subject(:service) { described_class.new(project, user, params) } + + it 'creates a new protected tag' do + expect { service.execute }.to change(ProtectedTag, :count).by(1) + expect(project.protected_tags.last.create_access_levels.map(&:access_level)).to eq([Gitlab::Access::MASTER]) + end + end +end diff --git a/spec/services/protected_tags/update_service_spec.rb b/spec/services/protected_tags/update_service_spec.rb new file mode 100644 index 00000000000..e78fde4c48d --- /dev/null +++ b/spec/services/protected_tags/update_service_spec.rb @@ -0,0 +1,26 @@ +require 'spec_helper' + +describe ProtectedTags::UpdateService, services: true do + let(:protected_tag) { create(:protected_tag) } + let(:project) { protected_tag.project } + let(:user) { project.owner } + let(:params) { { name: 'new protected tag name' } } + + describe '#execute' do + subject(:service) { described_class.new(project, user, params) } + + it 'updates a protected tag' do + result = service.execute(protected_tag) + + expect(result.reload.name).to eq(params[:name]) + end + + context 'without admin_project permissions' do + let(:user) { create(:user) } + + it "raises error" do + expect{ service.execute(protected_tag) }.to raise_error(Gitlab::Access::AccessDeniedError) + end + end + end +end diff --git a/spec/services/search/global_service_spec.rb b/spec/services/search/global_service_spec.rb new file mode 100644 index 00000000000..cbf4f56213d --- /dev/null +++ b/spec/services/search/global_service_spec.rb @@ -0,0 +1,45 @@ +require 'spec_helper' + +describe Search::GlobalService, services: true do + let(:user) { create(:user) } + let(:internal_user) { create(:user) } + + let!(:found_project) { create(:empty_project, :private, name: 'searchable_project') } + let!(:unfound_project) { create(:empty_project, :private, name: 'unfound_project') } + let!(:internal_project) { create(:empty_project, :internal, name: 'searchable_internal_project') } + let!(:public_project) { create(:empty_project, :public, name: 'searchable_public_project') } + + before do + found_project.add_master(user) + end + + describe '#execute' do + context 'unauthenticated' do + it 'returns public projects only' do + results = Search::GlobalService.new(nil, search: "searchable").execute + + expect(results.objects('projects')).to match_array [public_project] + end + end + + context 'authenticated' do + it 'returns public, internal and private projects' do + results = Search::GlobalService.new(user, search: "searchable").execute + + expect(results.objects('projects')).to match_array [public_project, found_project, internal_project] + end + + it 'returns only public & internal projects' do + results = Search::GlobalService.new(internal_user, search: "searchable").execute + + expect(results.objects('projects')).to match_array [internal_project, public_project] + end + + it 'namespace name is searchable' do + results = Search::GlobalService.new(user, search: found_project.namespace.path).execute + + expect(results.objects('projects')).to match_array [found_project] + end + end + end +end diff --git a/spec/services/search/group_service_spec.rb b/spec/services/search/group_service_spec.rb new file mode 100644 index 00000000000..38f264f6e7b --- /dev/null +++ b/spec/services/search/group_service_spec.rb @@ -0,0 +1,40 @@ +require 'spec_helper' + +describe Search::GroupService, services: true do + shared_examples_for 'group search' do + context 'finding projects by name' do + let(:user) { create(:user) } + let(:term) { "Project Name" } + let(:nested_group) { create(:group, :nested) } + + # These projects shouldn't be found + let!(:outside_project) { create(:empty_project, :public, name: "Outside #{term}") } + let!(:private_project) { create(:empty_project, :private, namespace: nested_group, name: "Private #{term}" )} + let!(:other_project) { create(:empty_project, :public, namespace: nested_group, name: term.reverse) } + + # These projects should be found + let!(:project1) { create(:empty_project, :internal, namespace: nested_group, name: "Inner #{term} 1") } + let!(:project2) { create(:empty_project, :internal, namespace: nested_group, name: "Inner #{term} 2") } + let!(:project3) { create(:empty_project, :internal, namespace: nested_group.parent, name: "Outer #{term}") } + + let(:results) { Search::GroupService.new(user, search_group, search: term).execute } + subject { results.objects('projects') } + + context 'in parent group' do + let(:search_group) { nested_group.parent } + + it { is_expected.to match_array([project1, project2, project3]) } + end + + context 'in subgroup' do + let(:search_group) { nested_group } + + it { is_expected.to match_array([project1, project2]) } + end + end + end + + describe 'basic search' do + include_examples 'group search' + end +end diff --git a/spec/services/search_service_spec.rb b/spec/services/search_service_spec.rb index 6ef5fa008aa..2112f1cf9ea 100644 --- a/spec/services/search_service_spec.rb +++ b/spec/services/search_service_spec.rb @@ -1,65 +1,286 @@ require 'spec_helper' -describe 'Search::GlobalService', services: true do +describe SearchService, services: true do let(:user) { create(:user) } - let(:public_user) { create(:user) } - let(:internal_user) { create(:user) } - let!(:found_project) { create(:empty_project, :private, name: 'searchable_project') } - let!(:unfound_project) { create(:empty_project, :private, name: 'unfound_project') } - let!(:internal_project) { create(:empty_project, :internal, name: 'searchable_internal_project') } - let!(:public_project) { create(:empty_project, :public, name: 'searchable_public_project') } + let(:accessible_group) { create(:group, :private) } + let(:inaccessible_group) { create(:group, :private) } + let!(:group_member) { create(:group_member, group: accessible_group, user: user) } + + let!(:accessible_project) { create(:empty_project, :private, name: 'accessible_project') } + let!(:inaccessible_project) { create(:empty_project, :private, name: 'inaccessible_project') } + let(:note) { create(:note_on_issue, project: accessible_project) } + + let(:snippet) { create(:snippet, author: user) } + let(:group_project) { create(:empty_project, group: accessible_group, name: 'group_project') } + let(:public_project) { create(:empty_project, :public, name: 'public_project') } before do - found_project.team << [user, :master] + accessible_project.add_master(user) + end + + describe '#project' do + context 'when the project is accessible' do + it 'returns the project' do + project = SearchService.new(user, project_id: accessible_project.id).project + + expect(project).to eq accessible_project + end + end + + context 'when the project is not accessible' do + it 'returns nil' do + project = SearchService.new(user, project_id: inaccessible_project.id).project + + expect(project).to be_nil + end + end + + context 'when there is no project_id' do + it 'returns nil' do + project = SearchService.new(user).project + + expect(project).to be_nil + end + end end - describe '#execute' do - context 'unauthenticated' do - it 'returns public projects only' do - context = Search::GlobalService.new(nil, search: "searchable") - results = context.execute - expect(results.objects('projects')).to match_array [public_project] + describe '#group' do + context 'when the group is accessible' do + it 'returns the group' do + group = SearchService.new(user, group_id: accessible_group.id).group + + expect(group).to eq accessible_group end end - context 'authenticated' do - it 'returns public, internal and private projects' do - context = Search::GlobalService.new(user, search: "searchable") - results = context.execute - expect(results.objects('projects')).to match_array [public_project, found_project, internal_project] + context 'when the group is not accessible' do + it 'returns nil' do + group = SearchService.new(user, group_id: inaccessible_group.id).group + + expect(group).to be_nil end + end + + context 'when there is no group_id' do + it 'returns nil' do + group = SearchService.new(user).group - it 'returns only public & internal projects' do - context = Search::GlobalService.new(internal_user, search: "searchable") - results = context.execute - expect(results.objects('projects')).to match_array [internal_project, public_project] + expect(group).to be_nil end + end + end + + describe '#show_snippets?' do + context 'when :snippets is \'true\'' do + it 'returns true' do + show_snippets = SearchService.new(user, snippets: 'true').show_snippets? - it 'namespace name is searchable' do - context = Search::GlobalService.new(user, search: found_project.namespace.path) - results = context.execute - expect(results.objects('projects')).to match_array [found_project] + expect(show_snippets).to be_truthy end + end - context 'nested group' do - let!(:nested_group) { create(:group, :nested) } - let!(:project) { create(:empty_project, namespace: nested_group) } + context 'when :snippets is not \'true\'' do + it 'returns false' do + show_snippets = SearchService.new(user, snippets: 'tru').show_snippets? + + expect(show_snippets).to be_falsey + end + end - before { project.add_master(user) } + context 'when :snippets is missing' do + it 'returns false' do + show_snippets = SearchService.new(user).show_snippets? - it 'returns result from nested group' do - context = Search::GlobalService.new(user, search: project.path) - results = context.execute - expect(results.objects('projects')).to match_array [project] + expect(show_snippets).to be_falsey + end + end + end + + describe '#scope' do + context 'with accessible project_id' do + context 'and allowed scope' do + it 'returns the specified scope' do + scope = SearchService.new(user, project_id: accessible_project.id, scope: 'notes').scope + + expect(scope).to eq 'notes' end + end + + context 'and disallowed scope' do + it 'returns the default scope' do + scope = SearchService.new(user, project_id: accessible_project.id, scope: 'projects').scope - it 'returns result from descendants when search inside group' do - context = Search::GlobalService.new(user, search: project.path, group_id: nested_group.parent) - results = context.execute - expect(results.objects('projects')).to match_array [project] + expect(scope).to eq 'blobs' end end + + context 'and no scope' do + it 'returns the default scope' do + scope = SearchService.new(user, project_id: accessible_project.id).scope + + expect(scope).to eq 'blobs' + end + end + end + + context 'with \'true\' snippets' do + context 'and allowed scope' do + it 'returns the specified scope' do + scope = SearchService.new(user, snippets: 'true', scope: 'snippet_titles').scope + + expect(scope).to eq 'snippet_titles' + end + end + + context 'and disallowed scope' do + it 'returns the default scope' do + scope = SearchService.new(user, snippets: 'true', scope: 'projects').scope + + expect(scope).to eq 'snippet_blobs' + end + end + + context 'and no scope' do + it 'returns the default scope' do + scope = SearchService.new(user, snippets: 'true').scope + + expect(scope).to eq 'snippet_blobs' + end + end + end + + context 'with no project_id, no snippets' do + context 'and allowed scope' do + it 'returns the specified scope' do + scope = SearchService.new(user, scope: 'issues').scope + + expect(scope).to eq 'issues' + end + end + + context 'and disallowed scope' do + it 'returns the default scope' do + scope = SearchService.new(user, scope: 'blobs').scope + + expect(scope).to eq 'projects' + end + end + + context 'and no scope' do + it 'returns the default scope' do + scope = SearchService.new(user).scope + + expect(scope).to eq 'projects' + end + end + end + end + + describe '#search_results' do + context 'with accessible project_id' do + it 'returns an instance of Gitlab::ProjectSearchResults' do + search_results = SearchService.new( + user, + project_id: accessible_project.id, + scope: 'notes', + search: note.note).search_results + + expect(search_results).to be_a Gitlab::ProjectSearchResults + end + end + + context 'with accessible project_id and \'true\' snippets' do + it 'returns an instance of Gitlab::ProjectSearchResults' do + search_results = SearchService.new( + user, + project_id: accessible_project.id, + snippets: 'true', + scope: 'notes', + search: note.note).search_results + + expect(search_results).to be_a Gitlab::ProjectSearchResults + end + end + + context 'with \'true\' snippets' do + it 'returns an instance of Gitlab::SnippetSearchResults' do + search_results = SearchService.new( + user, + snippets: 'true', + search: snippet.content).search_results + + expect(search_results).to be_a Gitlab::SnippetSearchResults + end + end + + context 'with no project_id and no snippets' do + it 'returns an instance of Gitlab::SearchResults' do + search_results = SearchService.new( + user, + search: public_project.name).search_results + + expect(search_results).to be_a Gitlab::SearchResults + end + end + end + + describe '#search_objects' do + context 'with accessible project_id' do + it 'returns objects in the project' do + search_objects = SearchService.new( + user, + project_id: accessible_project.id, + scope: 'notes', + search: note.note).search_objects + + expect(search_objects.first).to eq note + end + end + + context 'with accessible project_id and \'true\' snippets' do + it 'returns objects in the project' do + search_objects = SearchService.new( + user, + project_id: accessible_project.id, + snippets: 'true', + scope: 'notes', + search: note.note).search_objects + + expect(search_objects.first).to eq note + end + end + + context 'with \'true\' snippets' do + it 'returns objects in snippets' do + search_objects = SearchService.new( + user, + snippets: 'true', + search: snippet.content).search_objects + + expect(search_objects.first).to eq snippet + end + end + + context 'with accessible group_id' do + it 'returns objects in the group' do + search_objects = SearchService.new( + user, + group_id: accessible_group.id, + search: group_project.name).search_objects + + expect(search_objects.first).to eq group_project + end + end + + context 'with no project_id, group_id or snippets' do + it 'returns objects in global' do + search_objects = SearchService.new( + user, + search: public_project.name).search_objects + + expect(search_objects.first).to eq public_project + end end end end diff --git a/spec/services/slash_commands/interpret_service_spec.rb b/spec/services/slash_commands/interpret_service_spec.rb index a63281f0eab..e5e400ee281 100644 --- a/spec/services/slash_commands/interpret_service_spec.rb +++ b/spec/services/slash_commands/interpret_service_spec.rb @@ -3,6 +3,7 @@ require 'spec_helper' describe SlashCommands::InterpretService, services: true do let(:project) { create(:empty_project, :public) } let(:developer) { create(:user) } + let(:developer2) { create(:user) } let(:issue) { create(:issue, project: project) } let(:milestone) { create(:milestone, project: project, title: '9.10') } let(:inprogress) { create(:label, project: project, title: 'In Progress') } @@ -42,23 +43,6 @@ describe SlashCommands::InterpretService, services: true do end end - shared_examples 'assign command' do - it 'fetches assignee and populates assignee_id if content contains /assign' do - _, updates = service.execute(content, issuable) - - expect(updates).to eq(assignee_id: developer.id) - end - end - - shared_examples 'unassign command' do - it 'populates assignee_id: nil if content contains /unassign' do - issuable.update(assignee_id: developer.id) - _, updates = service.execute(content, issuable) - - expect(updates).to eq(assignee_id: nil) - end - end - shared_examples 'milestone command' do it 'fetches milestone and populates milestone_id if content contains /milestone' do milestone # populate the milestone @@ -70,7 +54,7 @@ describe SlashCommands::InterpretService, services: true do shared_examples 'remove_milestone command' do it 'populates milestone_id: nil if content contains /remove_milestone' do - issuable.update(milestone_id: milestone.id) + issuable.update!(milestone_id: milestone.id) _, updates = service.execute(content, issuable) expect(updates).to eq(milestone_id: nil) @@ -108,7 +92,7 @@ describe SlashCommands::InterpretService, services: true do shared_examples 'unlabel command' do it 'fetches label ids and populates remove_label_ids if content contains /unlabel' do - issuable.update(label_ids: [inprogress.id]) # populate the label + issuable.update!(label_ids: [inprogress.id]) # populate the label _, updates = service.execute(content, issuable) expect(updates).to eq(remove_label_ids: [inprogress.id]) @@ -117,7 +101,7 @@ describe SlashCommands::InterpretService, services: true do shared_examples 'multiple unlabel command' do it 'fetches label ids and populates remove_label_ids if content contains mutiple /unlabel' do - issuable.update(label_ids: [inprogress.id, bug.id]) # populate the label + issuable.update!(label_ids: [inprogress.id, bug.id]) # populate the label _, updates = service.execute(content, issuable) expect(updates).to eq(remove_label_ids: [inprogress.id, bug.id]) @@ -126,7 +110,7 @@ describe SlashCommands::InterpretService, services: true do shared_examples 'unlabel command with no argument' do it 'populates label_ids: [] if content contains /unlabel with no arguments' do - issuable.update(label_ids: [inprogress.id]) # populate the label + issuable.update!(label_ids: [inprogress.id]) # populate the label _, updates = service.execute(content, issuable) expect(updates).to eq(label_ids: []) @@ -135,7 +119,7 @@ describe SlashCommands::InterpretService, services: true do shared_examples 'relabel command' do it 'populates label_ids: [] if content contains /relabel' do - issuable.update(label_ids: [bug.id]) # populate the label + issuable.update!(label_ids: [bug.id]) # populate the label inprogress # populate the label _, updates = service.execute(content, issuable) @@ -187,7 +171,7 @@ describe SlashCommands::InterpretService, services: true do shared_examples 'remove_due_date command' do it 'populates due_date: nil if content contains /remove_due_date' do - issuable.update(due_date: Date.today) + issuable.update!(due_date: Date.today) _, updates = service.execute(content, issuable) expect(updates).to eq(due_date: nil) @@ -204,7 +188,7 @@ describe SlashCommands::InterpretService, services: true do shared_examples 'unwip command' do it 'returns wip_event: "unwip" if content contains /wip' do - issuable.update(title: issuable.wip_title) + issuable.update!(title: issuable.wip_title) _, updates = service.execute(content, issuable) expect(updates).to eq(wip_event: 'unwip') @@ -371,14 +355,46 @@ describe SlashCommands::InterpretService, services: true do let(:issuable) { issue } end - it_behaves_like 'assign command' do + context 'assign command' do let(:content) { "/assign @#{developer.username}" } - let(:issuable) { issue } + + context 'Issue' do + it 'fetches assignee and populates assignee_id if content contains /assign' do + _, updates = service.execute(content, issue) + + expect(updates).to eq(assignee_ids: [developer.id]) + end + end + + context 'Merge Request' do + it 'fetches assignee and populates assignee_id if content contains /assign' do + _, updates = service.execute(content, merge_request) + + expect(updates).to eq(assignee_id: developer.id) + end + end end - it_behaves_like 'assign command' do - let(:content) { "/assign @#{developer.username}" } - let(:issuable) { merge_request } + context 'assign command with multiple assignees' do + let(:content) { "/assign @#{developer.username} @#{developer2.username}" } + + before{ project.team << [developer2, :developer] } + + context 'Issue' do + it 'fetches assignee and populates assignee_id if content contains /assign' do + _, updates = service.execute(content, issue) + + expect(updates[:assignee_ids]).to match_array([developer.id, developer2.id]) + end + end + + context 'Merge Request' do + it 'fetches assignee and populates assignee_id if content contains /assign' do + _, updates = service.execute(content, merge_request) + + expect(updates).to eq(assignee_id: developer.id) + end + end end it_behaves_like 'empty command' do @@ -391,14 +407,26 @@ describe SlashCommands::InterpretService, services: true do let(:issuable) { issue } end - it_behaves_like 'unassign command' do + context 'unassign command' do let(:content) { '/unassign' } - let(:issuable) { issue } - end - it_behaves_like 'unassign command' do - let(:content) { '/unassign' } - let(:issuable) { merge_request } + context 'Issue' do + it 'populates assignee_ids: [] if content contains /unassign' do + issue.update(assignee_ids: [developer.id]) + _, updates = service.execute(content, issue) + + expect(updates).to eq(assignee_ids: []) + end + end + + context 'Merge Request' do + it 'populates assignee_id: nil if content contains /unassign' do + merge_request.update(assignee_id: developer.id) + _, updates = service.execute(content, merge_request) + + expect(updates).to eq(assignee_id: nil) + end + end end it_behaves_like 'milestone command' do @@ -727,5 +755,282 @@ describe SlashCommands::InterpretService, services: true do end end end + + context '/board_move command' do + let(:todo) { create(:label, project: project, title: 'To Do') } + let(:inreview) { create(:label, project: project, title: 'In Review') } + let(:content) { %{/board_move ~"#{inreview.title}"} } + + let!(:board) { create(:board, project: project) } + let!(:todo_list) { create(:list, board: board, label: todo) } + let!(:inreview_list) { create(:list, board: board, label: inreview) } + let!(:inprogress_list) { create(:list, board: board, label: inprogress) } + + it 'populates remove_label_ids for all current board columns' do + issue.update!(label_ids: [todo.id, inprogress.id]) + + _, updates = service.execute(content, issue) + + expect(updates[:remove_label_ids]).to match_array([todo.id, inprogress.id]) + end + + it 'populates add_label_ids with the id of the given label' do + _, updates = service.execute(content, issue) + + expect(updates[:add_label_ids]).to eq([inreview.id]) + end + + it 'does not include the given label id in remove_label_ids' do + issue.update!(label_ids: [todo.id, inreview.id]) + + _, updates = service.execute(content, issue) + + expect(updates[:remove_label_ids]).to match_array([todo.id]) + end + + it 'does not remove label ids that are not lists on the board' do + issue.update!(label_ids: [todo.id, bug.id]) + + _, updates = service.execute(content, issue) + + expect(updates[:remove_label_ids]).to match_array([todo.id]) + end + + context 'if the project has multiple boards' do + let(:issuable) { issue } + before { create(:board, project: project) } + it_behaves_like 'empty command' + end + + context 'if the given label does not exist' do + let(:issuable) { issue } + let(:content) { '/board_move ~"Fake Label"' } + it_behaves_like 'empty command' + end + + context 'if multiple labels are given' do + let(:issuable) { issue } + let(:content) { %{/board_move ~"#{inreview.title}" ~"#{todo.title}"} } + it_behaves_like 'empty command' + end + + context 'if the given label is not a list on the board' do + let(:issuable) { issue } + let(:content) { %{/board_move ~"#{bug.title}"} } + it_behaves_like 'empty command' + end + + context 'if issuable is not an Issue' do + let(:issuable) { merge_request } + it_behaves_like 'empty command' + end + end + end + + describe '#explain' do + let(:service) { described_class.new(project, developer) } + let(:merge_request) { create(:merge_request, source_project: project) } + + describe 'close command' do + let(:content) { '/close' } + + it 'includes issuable name' do + _, explanations = service.explain(content, issue) + + expect(explanations).to eq(['Closes this issue.']) + end + end + + describe 'reopen command' do + let(:content) { '/reopen' } + let(:merge_request) { create(:merge_request, :closed, source_project: project) } + + it 'includes issuable name' do + _, explanations = service.explain(content, merge_request) + + expect(explanations).to eq(['Reopens this merge request.']) + end + end + + describe 'title command' do + let(:content) { '/title This is new title' } + + it 'includes new title' do + _, explanations = service.explain(content, issue) + + expect(explanations).to eq(['Changes the title to "This is new title".']) + end + end + + describe 'assign command' do + let(:content) { "/assign @#{developer.username} do it!" } + + it 'includes only the user reference' do + _, explanations = service.explain(content, merge_request) + + expect(explanations).to eq(["Assigns @#{developer.username}."]) + end + end + + describe 'unassign command' do + let(:content) { '/unassign' } + let(:issue) { create(:issue, project: project, assignees: [developer]) } + + it 'includes current assignee reference' do + _, explanations = service.explain(content, issue) + + expect(explanations).to eq(["Removes assignee @#{developer.username}."]) + end + end + + describe 'milestone command' do + let(:content) { '/milestone %wrong-milestone' } + let!(:milestone) { create(:milestone, project: project, title: '9.10') } + + it 'is empty when milestone reference is wrong' do + _, explanations = service.explain(content, issue) + + expect(explanations).to eq([]) + end + end + + describe 'remove milestone command' do + let(:content) { '/remove_milestone' } + let(:merge_request) { create(:merge_request, source_project: project, milestone: milestone) } + + it 'includes current milestone name' do + _, explanations = service.explain(content, merge_request) + + expect(explanations).to eq(['Removes %"9.10" milestone.']) + end + end + + describe 'label command' do + let(:content) { '/label ~missing' } + let!(:label) { create(:label, project: project) } + + it 'is empty when there are no correct labels' do + _, explanations = service.explain(content, issue) + + expect(explanations).to eq([]) + end + end + + describe 'unlabel command' do + let(:content) { '/unlabel' } + + it 'says all labels if no parameter provided' do + merge_request.update!(label_ids: [bug.id]) + _, explanations = service.explain(content, merge_request) + + expect(explanations).to eq(['Removes all labels.']) + end + end + + describe 'relabel command' do + let(:content) { '/relabel Bug' } + let!(:bug) { create(:label, project: project, title: 'Bug') } + let(:feature) { create(:label, project: project, title: 'Feature') } + + it 'includes label name' do + issue.update!(label_ids: [feature.id]) + _, explanations = service.explain(content, issue) + + expect(explanations).to eq(["Replaces all labels with ~#{bug.id} label."]) + end + end + + describe 'subscribe command' do + let(:content) { '/subscribe' } + + it 'includes issuable name' do + _, explanations = service.explain(content, issue) + + expect(explanations).to eq(['Subscribes to this issue.']) + end + end + + describe 'unsubscribe command' do + let(:content) { '/unsubscribe' } + + it 'includes issuable name' do + merge_request.subscribe(developer, project) + _, explanations = service.explain(content, merge_request) + + expect(explanations).to eq(['Unsubscribes from this merge request.']) + end + end + + describe 'due command' do + let(:content) { '/due April 1st 2016' } + + it 'includes the date' do + _, explanations = service.explain(content, issue) + + expect(explanations).to eq(['Sets the due date to Apr 1, 2016.']) + end + end + + describe 'wip command' do + let(:content) { '/wip' } + + it 'includes the new status' do + _, explanations = service.explain(content, merge_request) + + expect(explanations).to eq(['Marks this merge request as Work In Progress.']) + end + end + + describe 'award command' do + let(:content) { '/award :confetti_ball: ' } + + it 'includes the emoji' do + _, explanations = service.explain(content, issue) + + expect(explanations).to eq(['Toggles :confetti_ball: emoji award.']) + end + end + + describe 'estimate command' do + let(:content) { '/estimate 79d' } + + it 'includes the formatted duration' do + _, explanations = service.explain(content, merge_request) + + expect(explanations).to eq(['Sets time estimate to 3mo 3w 4d.']) + end + end + + describe 'spend command' do + let(:content) { '/spend -120m' } + + it 'includes the formatted duration and proper verb' do + _, explanations = service.explain(content, issue) + + expect(explanations).to eq(['Substracts 2h spent time.']) + end + end + + describe 'target branch command' do + let(:content) { '/target_branch my-feature ' } + + it 'includes the branch name' do + _, explanations = service.explain(content, merge_request) + + expect(explanations).to eq(['Sets target branch to my-feature.']) + end + end + + describe 'board move command' do + let(:content) { '/board_move ~bug' } + let!(:bug) { create(:label, project: project, title: 'bug') } + let!(:board) { create(:board, project: project) } + + it 'includes the label name' do + _, explanations = service.explain(content, issue) + + expect(explanations).to eq(["Moves issue to ~#{bug.id} column in the board."]) + end + end end end diff --git a/spec/services/system_note_service_spec.rb b/spec/services/system_note_service_spec.rb index 90cde705b85..7a9cd7553b1 100644 --- a/spec/services/system_note_service_spec.rb +++ b/spec/services/system_note_service_spec.rb @@ -6,6 +6,7 @@ describe SystemNoteService, services: true do let(:project) { create(:empty_project) } let(:author) { create(:user) } let(:noteable) { create(:issue, project: project) } + let(:issue) { noteable } shared_examples_for 'a system note' do let(:expected_noteable) { noteable } @@ -155,6 +156,52 @@ describe SystemNoteService, services: true do end end + describe '.change_issue_assignees' do + subject { described_class.change_issue_assignees(noteable, project, author, [assignee]) } + + let(:assignee) { create(:user) } + let(:assignee1) { create(:user) } + let(:assignee2) { create(:user) } + let(:assignee3) { create(:user) } + + it_behaves_like 'a system note' do + let(:action) { 'assignee' } + end + + def build_note(old_assignees, new_assignees) + issue.assignees = new_assignees + described_class.change_issue_assignees(issue, project, author, old_assignees).note + end + + it 'builds a correct phrase when an assignee is added to a non-assigned issue' do + expect(build_note([], [assignee1])).to eq "assigned to @#{assignee1.username}" + end + + it 'builds a correct phrase when assignee removed' do + expect(build_note([assignee1], [])).to eq 'removed assignee' + end + + it 'builds a correct phrase when assignees changed' do + expect(build_note([assignee1], [assignee2])).to eq \ + "assigned to @#{assignee2.username} and unassigned @#{assignee1.username}" + end + + it 'builds a correct phrase when three assignees removed and one added' do + expect(build_note([assignee, assignee1, assignee2], [assignee3])).to eq \ + "assigned to @#{assignee3.username} and unassigned @#{assignee.username}, @#{assignee1.username}, and @#{assignee2.username}" + end + + it 'builds a correct phrase when one assignee changed from a set' do + expect(build_note([assignee, assignee1], [assignee, assignee2])).to eq \ + "assigned to @#{assignee2.username} and unassigned @#{assignee1.username}" + end + + it 'builds a correct phrase when one assignee removed from a set' do + expect(build_note([assignee, assignee1, assignee2], [assignee, assignee1])).to eq \ + "unassigned @#{assignee2.username}" + end + end + describe '.change_label' do subject { described_class.change_label(noteable, project, author, added, removed) } @@ -221,26 +268,23 @@ describe SystemNoteService, services: true do describe '.change_status' do subject { described_class.change_status(noteable, project, author, status, source) } - let(:status) { 'new_status' } - let(:source) { nil } + context 'with status reopened' do + let(:status) { 'reopened' } + let(:source) { nil } - it_behaves_like 'a system note' do - let(:action) { 'status' } + it_behaves_like 'a system note' do + let(:action) { 'opened' } + end end context 'with a source' do + let(:status) { 'opened' } let(:source) { double('commit', gfm_reference: 'commit 123456') } it 'sets the note text' do expect(subject.note).to eq "#{status} via commit 123456" end end - - context 'without a source' do - it 'sets the note text' do - expect(subject.note).to eq status - end - end end describe '.merge_when_pipeline_succeeds' do @@ -295,12 +339,40 @@ describe SystemNoteService, services: true do end end + describe '.change_description' do + subject { described_class.change_description(noteable, project, author) } + + context 'when noteable responds to `description`' do + it_behaves_like 'a system note' do + let(:action) { 'description' } + end + + it 'sets the note text' do + expect(subject.note).to eq('changed the description') + end + end + end + describe '.change_issue_confidentiality' do subject { described_class.change_issue_confidentiality(noteable, project, author) } - context 'when noteable responds to `confidential`' do + context 'issue has been made confidential' do + before do + noteable.update_attribute(:confidential, true) + end + + it_behaves_like 'a system note' do + let(:action) { 'confidential' } + end + + it 'sets the note text' do + expect(subject.note).to eq 'made the issue confidential' + end + end + + context 'issue has been made visible' do it_behaves_like 'a system note' do - let(:action) { 'confidentiality' } + let(:action) { 'visible' } end it 'sets the note text' do @@ -584,7 +656,7 @@ describe SystemNoteService, services: true do end shared_examples 'cross project mentionable' do - include GitlabMarkdownHelper + include MarkupHelper it 'contains cross reference to new noteable' do expect(subject.note).to include cross_project_reference(new_project, new_noteable) @@ -785,7 +857,7 @@ describe SystemNoteService, services: true do end describe '.discussion_continued_in_issue' do - let(:discussion) { Discussion.for_diff_notes([create(:diff_note_on_merge_request)]).first } + let(:discussion) { create(:diff_note_on_merge_request).to_discussion } let(:merge_request) { discussion.noteable } let(:project) { merge_request.source_project } let(:issue) { create(:issue, project: project) } diff --git a/spec/services/todo_service_spec.rb b/spec/services/todo_service_spec.rb index 89b3b6aad10..175a42a32d9 100644 --- a/spec/services/todo_service_spec.rb +++ b/spec/services/todo_service_spec.rb @@ -25,11 +25,11 @@ describe TodoService, services: true do end describe 'Issues' do - let(:issue) { create(:issue, project: project, assignee: john_doe, author: author, description: "- [ ] Task 1\n- [ ] Task 2 #{mentions}") } - let(:addressed_issue) { create(:issue, project: project, assignee: john_doe, author: author, description: "#{directly_addressed}\n- [ ] Task 1\n- [ ] Task 2") } - let(:unassigned_issue) { create(:issue, project: project, assignee: nil) } - let(:confidential_issue) { create(:issue, :confidential, project: project, author: author, assignee: assignee, description: mentions) } - let(:addressed_confident_issue) { create(:issue, :confidential, project: project, author: author, assignee: assignee, description: directly_addressed) } + let(:issue) { create(:issue, project: project, assignees: [john_doe], author: author, description: "- [ ] Task 1\n- [ ] Task 2 #{mentions}") } + let(:addressed_issue) { create(:issue, project: project, assignees: [john_doe], author: author, description: "#{directly_addressed}\n- [ ] Task 1\n- [ ] Task 2") } + let(:unassigned_issue) { create(:issue, project: project, assignees: []) } + let(:confidential_issue) { create(:issue, :confidential, project: project, author: author, assignees: [assignee], description: mentions) } + let(:addressed_confident_issue) { create(:issue, :confidential, project: project, author: author, assignees: [assignee], description: directly_addressed) } describe '#new_issue' do it 'creates a todo if assigned' do @@ -43,7 +43,7 @@ describe TodoService, services: true do end it 'creates a todo if assignee is the current user' do - unassigned_issue.update_attribute(:assignee, john_doe) + unassigned_issue.assignees = [john_doe] service.new_issue(unassigned_issue, john_doe) should_create_todo(user: john_doe, target: unassigned_issue, author: john_doe, action: Todo::ASSIGNED) @@ -258,20 +258,20 @@ describe TodoService, services: true do describe '#reassigned_issue' do it 'creates a pending todo for new assignee' do - unassigned_issue.update_attribute(:assignee, john_doe) + unassigned_issue.assignees << john_doe service.reassigned_issue(unassigned_issue, author) should_create_todo(user: john_doe, target: unassigned_issue, action: Todo::ASSIGNED) end it 'does not create a todo if unassigned' do - issue.update_attribute(:assignee, nil) + issue.assignees.destroy_all should_not_create_any_todo { service.reassigned_issue(issue, author) } end it 'creates a todo if new assignee is the current user' do - unassigned_issue.update_attribute(:assignee, john_doe) + unassigned_issue.assignees << john_doe service.reassigned_issue(unassigned_issue, john_doe) should_create_todo(user: john_doe, target: unassigned_issue, author: john_doe, action: Todo::ASSIGNED) @@ -361,7 +361,7 @@ describe TodoService, services: true do describe '#new_note' do let!(:first_todo) { create(:todo, :assigned, user: john_doe, project: project, target: issue, author: author) } let!(:second_todo) { create(:todo, :assigned, user: john_doe, project: project, target: issue, author: author) } - let(:confidential_issue) { create(:issue, :confidential, project: project, author: author, assignee: assignee) } + let(:confidential_issue) { create(:issue, :confidential, project: project, author: author, assignees: [assignee]) } let(:note) { create(:note, project: project, noteable: issue, author: john_doe, note: mentions) } let(:addressed_note) { create(:note, project: project, noteable: issue, author: john_doe, note: directly_addressed) } let(:note_on_commit) { create(:note_on_commit, project: project, author: john_doe, note: mentions) } @@ -854,7 +854,7 @@ describe TodoService, services: true do end it 'updates cached counts when a todo is created' do - issue = create(:issue, project: project, assignee: john_doe, author: author, description: mentions) + issue = create(:issue, project: project, assignees: [john_doe], author: author, description: mentions) expect(john_doe.todos_pending_count).to eq(0) expect(john_doe).to receive(:update_todos_count_cache).and_call_original @@ -866,8 +866,8 @@ describe TodoService, services: true do end describe '#mark_todos_as_done' do - let(:issue) { create(:issue, project: project, author: author, assignee: john_doe) } - let(:another_issue) { create(:issue, project: project, author: author, assignee: john_doe) } + let(:issue) { create(:issue, project: project, author: author, assignees: [john_doe]) } + let(:another_issue) { create(:issue, project: project, author: author, assignees: [john_doe]) } it 'marks a relation of todos as done' do create(:todo, :mentioned, user: john_doe, target: issue, project: project) diff --git a/spec/services/projects/upload_service_spec.rb b/spec/services/upload_service_spec.rb index d2cefa46bfa..95ba28dbecd 100644 --- a/spec/services/projects/upload_service_spec.rb +++ b/spec/services/upload_service_spec.rb @@ -1,6 +1,6 @@ require 'spec_helper' -describe Projects::UploadService, services: true do +describe UploadService, services: true do describe 'File service' do before do @user = create(:user) @@ -68,6 +68,6 @@ describe Projects::UploadService, services: true do end def upload_file(project, file) - Projects::UploadService.new(project, file).execute + described_class.new(project, file, FileUploader).execute end end diff --git a/spec/services/users/activity_service_spec.rb b/spec/services/users/activity_service_spec.rb new file mode 100644 index 00000000000..8d67ebe3231 --- /dev/null +++ b/spec/services/users/activity_service_spec.rb @@ -0,0 +1,48 @@ +require 'spec_helper' + +describe Users::ActivityService, services: true do + include UserActivitiesHelpers + + let(:user) { create(:user) } + + subject(:service) { described_class.new(user, 'type') } + + describe '#execute', :redis do + context 'when last activity is nil' do + before do + service.execute + end + + it 'sets the last activity timestamp for the user' do + expect(last_hour_user_ids).to eq([user.id]) + end + + it 'updates the same user' do + service.execute + + expect(last_hour_user_ids).to eq([user.id]) + end + + it 'updates the timestamp of an existing user' do + Timecop.freeze(Date.tomorrow) do + expect { service.execute }.to change { user_activity(user) }.to(Time.now.to_i.to_s) + end + end + + describe 'other user' do + it 'updates other user' do + other_user = create(:user) + described_class.new(other_user, 'type').execute + + expect(last_hour_user_ids).to match_array([user.id, other_user.id]) + end + end + end + end + + def last_hour_user_ids + Gitlab::UserActivities.new. + select { |k, v| v >= 1.hour.ago.to_i.to_s }. + map { |k, _| k.to_i } + end +end diff --git a/spec/services/users/build_service_spec.rb b/spec/services/users/build_service_spec.rb new file mode 100644 index 00000000000..2a6bfc1b3a0 --- /dev/null +++ b/spec/services/users/build_service_spec.rb @@ -0,0 +1,55 @@ +require 'spec_helper' + +describe Users::BuildService, services: true do + describe '#execute' do + let(:params) do + { name: 'John Doe', username: 'jduser', email: 'jd@example.com', password: 'mydummypass' } + end + + context 'with an admin user' do + let(:admin_user) { create(:admin) } + let(:service) { described_class.new(admin_user, params) } + + it 'returns a valid user' do + expect(service.execute).to be_valid + end + end + + context 'with non admin user' do + let(:user) { create(:user) } + let(:service) { described_class.new(user, params) } + + it 'raises AccessDeniedError exception' do + expect { service.execute }.to raise_error Gitlab::Access::AccessDeniedError + end + end + + context 'with nil user' do + let(:service) { described_class.new(nil, params) } + + it 'returns a valid user' do + expect(service.execute).to be_valid + end + + context 'when "send_user_confirmation_email" application setting is true' do + before do + stub_application_setting(send_user_confirmation_email: true, signup_enabled?: true) + end + + it 'does not confirm the user' do + expect(service.execute).not_to be_confirmed + end + end + + context 'when "send_user_confirmation_email" application setting is false' do + before do + stub_application_setting(send_user_confirmation_email: false, signup_enabled?: true) + end + + it 'confirms the user' do + expect(service.execute).to be_confirmed + end + end + end + end +end diff --git a/spec/services/users/create_service_spec.rb b/spec/services/users/create_service_spec.rb index 66f68650f81..75746278573 100644 --- a/spec/services/users/create_service_spec.rb +++ b/spec/services/users/create_service_spec.rb @@ -1,38 +1,6 @@ require 'spec_helper' describe Users::CreateService, services: true do - describe '#build' do - let(:params) do - { name: 'John Doe', username: 'jduser', email: 'jd@example.com', password: 'mydummypass' } - end - - context 'with an admin user' do - let(:admin_user) { create(:admin) } - let(:service) { described_class.new(admin_user, params) } - - it 'returns a valid user' do - expect(service.build).to be_valid - end - end - - context 'with non admin user' do - let(:user) { create(:user) } - let(:service) { described_class.new(user, params) } - - it 'raises AccessDeniedError exception' do - expect { service.build }.to raise_error Gitlab::Access::AccessDeniedError - end - end - - context 'with nil user' do - let(:service) { described_class.new(nil, params) } - - it 'returns a valid user' do - expect(service.build).to be_valid - end - end - end - describe '#execute' do let(:admin_user) { create(:admin) } @@ -122,6 +90,32 @@ describe Users::CreateService, services: true do end end + context 'when password_automatically_set parameter is true' do + let(:params) do + { + name: 'John Doe', + username: 'jduser', + email: 'jd@example.com', + password: 'mydummypass', + password_automatically_set: true + } + end + + it 'persists the given attributes' do + user = service.execute + user.reload + + expect(user).to have_attributes( + name: params[:name], + username: params[:username], + email: params[:email], + password: params[:password], + created_by_id: admin_user.id, + password_automatically_set: params[:password_automatically_set] + ) + end + end + context 'when skip_confirmation parameter is true' do let(:params) do { name: 'John Doe', username: 'jduser', email: 'jd@example.com', password: 'mydummypass', skip_confirmation: true } @@ -159,40 +153,18 @@ describe Users::CreateService, services: true do end let(:service) { described_class.new(nil, params) } - context 'when "send_user_confirmation_email" application setting is true' do - before do - current_application_settings = double(:current_application_settings, send_user_confirmation_email: true, signup_enabled?: true) - allow(service).to receive(:current_application_settings).and_return(current_application_settings) - end - - it 'does not confirm the user' do - expect(service.execute).not_to be_confirmed - end - end - - context 'when "send_user_confirmation_email" application setting is false' do - before do - current_application_settings = double(:current_application_settings, send_user_confirmation_email: false, signup_enabled?: true) - allow(service).to receive(:current_application_settings).and_return(current_application_settings) - end - - it 'confirms the user' do - expect(service.execute).to be_confirmed - end - - it 'persists the given attributes' do - user = service.execute - user.reload - - expect(user).to have_attributes( - name: params[:name], - username: params[:username], - email: params[:email], - password: params[:password], - created_by_id: nil, - admin: false - ) - end + it 'persists the given attributes' do + user = service.execute + user.reload + + expect(user).to have_attributes( + name: params[:name], + username: params[:username], + email: params[:email], + password: params[:password], + created_by_id: nil, + admin: false + ) end end end diff --git a/spec/services/users/destroy_spec.rb b/spec/services/users/destroy_service_spec.rb index 9a28c03d968..de37a61e388 100644 --- a/spec/services/users/destroy_spec.rb +++ b/spec/services/users/destroy_service_spec.rb @@ -17,13 +17,28 @@ describe Users::DestroyService, services: true do expect { Namespace.with_deleted.find(user.namespace.id) }.to raise_error(ActiveRecord::RecordNotFound) end - it 'will delete the project in the near future' do - expect_any_instance_of(Projects::DestroyService).to receive(:async_execute).once + it 'will delete the project' do + expect_any_instance_of(Projects::DestroyService).to receive(:execute).once service.execute(user) end end + context 'projects in pending_delete' do + before do + project.pending_delete = true + project.save + end + + it 'destroys a project in pending_delete' do + expect_any_instance_of(Projects::DestroyService).to receive(:execute).once + + service.execute(user) + + expect { Project.find(project.id) }.to raise_error(ActiveRecord::RecordNotFound) + end + end + context "a deleted user's issues" do let(:project) { create(:project) } @@ -31,43 +46,47 @@ describe Users::DestroyService, services: true do project.add_developer(user) end - context "for an issue the user has created" do - let!(:issue) { create(:issue, project: project, author: user) } + context "for an issue the user was assigned to" do + let!(:issue) { create(:issue, project: project, assignees: [user]) } before do service.execute(user) end - it 'does not delete the issue' do + it 'does not delete issues the user is assigned to' do expect(Issue.find_by_id(issue.id)).to be_present end - it 'migrates the issue so that the "Ghost User" is the issue owner' do + it 'migrates the issue so that it is "Unassigned"' do migrated_issue = Issue.find_by_id(issue.id) - expect(migrated_issue.author).to eq(User.ghost) + expect(migrated_issue.assignees).to be_empty end + end + end - it 'blocks the user before migrating issues to the "Ghost User' do - expect(user).to be_blocked - end + context "a deleted user's merge_requests" do + let(:project) { create(:project) } + + before do + project.add_developer(user) end - context "for an issue the user was assigned to" do - let!(:issue) { create(:issue, project: project, assignee: user) } + context "for an merge request the user was assigned to" do + let!(:merge_request) { create(:merge_request, source_project: project, assignee: user) } before do service.execute(user) end - it 'does not delete issues the user is assigned to' do - expect(Issue.find_by_id(issue.id)).to be_present + it 'does not delete merge requests the user is assigned to' do + expect(MergeRequest.find_by_id(merge_request.id)).to be_present end - it 'migrates the issue so that it is "Unassigned"' do - migrated_issue = Issue.find_by_id(issue.id) + it 'migrates the merge request so that it is "Unassigned"' do + migrated_merge_request = MergeRequest.find_by_id(merge_request.id) - expect(migrated_issue.assignee).to be_nil + expect(migrated_merge_request.assignee).to be_nil end end end @@ -126,5 +145,19 @@ describe Users::DestroyService, services: true do expect(User.exists?(user.id)).to be(false) end end + + context "migrating associated records" do + it 'delegates to the `MigrateToGhostUser` service to move associated records to the ghost user' do + expect_any_instance_of(Users::MigrateToGhostUserService).to receive(:execute).once + + service.execute(user) + end + + it 'does not run `MigrateToGhostUser` if hard_delete option is given' do + expect_any_instance_of(Users::MigrateToGhostUserService).not_to receive(:execute) + + service.execute(user, hard_delete: true) + end + end end end diff --git a/spec/services/users/migrate_to_ghost_user_service_spec.rb b/spec/services/users/migrate_to_ghost_user_service_spec.rb new file mode 100644 index 00000000000..9e1edf1ac30 --- /dev/null +++ b/spec/services/users/migrate_to_ghost_user_service_spec.rb @@ -0,0 +1,82 @@ +require 'spec_helper' + +describe Users::MigrateToGhostUserService, services: true do + let!(:user) { create(:user) } + let!(:project) { create(:project) } + let(:service) { described_class.new(user) } + + context "migrating a user's associated records to the ghost user" do + context 'issues' do + include_examples "migrating a deleted user's associated records to the ghost user", Issue do + let(:created_record) { create(:issue, project: project, author: user) } + let(:assigned_record) { create(:issue, project: project, assignee: user) } + end + end + + context 'merge requests' do + include_examples "migrating a deleted user's associated records to the ghost user", MergeRequest do + let(:created_record) { create(:merge_request, source_project: project, author: user, target_branch: "first") } + let(:assigned_record) { create(:merge_request, source_project: project, assignee: user, target_branch: 'second') } + end + end + + context 'notes' do + include_examples "migrating a deleted user's associated records to the ghost user", Note do + let(:created_record) { create(:note, project: project, author: user) } + end + end + + context 'abuse reports' do + include_examples "migrating a deleted user's associated records to the ghost user", AbuseReport do + let(:created_record) { create(:abuse_report, reporter: user, user: create(:user)) } + end + end + + context 'award emoji' do + include_examples "migrating a deleted user's associated records to the ghost user", AwardEmoji do + let(:created_record) { create(:award_emoji, user: user) } + let(:author_alias) { :user } + + context "when the awardable already has an award emoji of the same name assigned to the ghost user" do + let(:awardable) { create(:issue) } + let!(:existing_award_emoji) { create(:award_emoji, user: User.ghost, name: "thumbsup", awardable: awardable) } + let!(:award_emoji) { create(:award_emoji, user: user, name: "thumbsup", awardable: awardable) } + + it "migrates the award emoji regardless" do + service.execute + + migrated_record = AwardEmoji.find_by_id(award_emoji.id) + + expect(migrated_record.user).to eq(User.ghost) + end + + it "does not leave the migrated award emoji in an invalid state" do + service.execute + + migrated_record = AwardEmoji.find_by_id(award_emoji.id) + + expect(migrated_record).to be_valid + end + end + end + end + + context "when record migration fails with a rollback exception" do + before do + expect_any_instance_of(MergeRequest::ActiveRecord_Associations_CollectionProxy) + .to receive(:update_all).and_raise(ActiveRecord::Rollback) + end + + context "for records that were already migrated" do + let!(:issue) { create(:issue, project: project, author: user) } + let!(:merge_request) { create(:merge_request, source_project: project, author: user, target_branch: "first") } + + it "reverses the migration" do + service.execute + + expect(issue.reload.author).to eq(user) + end + end + end + end +end diff --git a/spec/sidekiq/cron/job_gem_dependency_spec.rb b/spec/sidekiq/cron/job_gem_dependency_spec.rb new file mode 100644 index 00000000000..2e30cf025b0 --- /dev/null +++ b/spec/sidekiq/cron/job_gem_dependency_spec.rb @@ -0,0 +1,18 @@ +require 'spec_helper' + +describe Sidekiq::Cron::Job do + describe 'cron jobs' do + context 'when rufus-scheduler depends on ZoTime or EoTime' do + before do + described_class + .create(name: 'TestCronWorker', + cron: Settings.cron_jobs[:pipeline_schedule_worker]['cron'], + class: Settings.cron_jobs[:pipeline_schedule_worker]['job_class']) + end + + it 'does not get "Rufus::Scheduler::ZoTime/EtOrbi::EoTime into an exact number"' do + expect { described_class.all.first.should_enque?(Time.now) }.not_to raise_error + end + end + end +end diff --git a/spec/spec_helper.rb b/spec/spec_helper.rb index 4eb5b150af5..a58f4e664b7 100644 --- a/spec/spec_helper.rb +++ b/spec/spec_helper.rb @@ -9,8 +9,15 @@ require 'rspec/rails' require 'shoulda/matchers' require 'rspec/retry' -if (ENV['RSPEC_PROFILING_POSTGRES_URL'] || ENV['RSPEC_PROFILING']) && - (!ENV.has_key?('CI') || ENV['CI_COMMIT_REF_NAME'] == 'master') +rspec_profiling_is_configured = + ENV['RSPEC_PROFILING_POSTGRES_URL'].present? || + ENV['RSPEC_PROFILING'] +branch_can_be_profiled = + ENV['GITLAB_DATABASE'] == 'postgresql' && + (ENV['CI_COMMIT_REF_NAME'] == 'master' || + ENV['CI_COMMIT_REF_NAME'] =~ /rspec-profile/) + +if rspec_profiling_is_configured && (!ENV.key?('CI') || branch_can_be_profiled) require 'rspec_profiling/rspec' end @@ -59,6 +66,10 @@ RSpec.configure do |config| TestEnv.init end + config.after(:suite) do + TestEnv.cleanup + end + if ENV['CI'] # Retry only on feature specs that use JS config.around :each, :js do |ex| diff --git a/spec/support/capybara.rb b/spec/support/capybara.rb index aa14709bc9c..b8ca8f22a3d 100644 --- a/spec/support/capybara.rb +++ b/spec/support/capybara.rb @@ -1,10 +1,11 @@ +# rubocop:disable Style/GlobalVars require 'capybara/rails' require 'capybara/rspec' require 'capybara/poltergeist' require 'capybara-screenshot/rspec' # Give CI some extra time -timeout = (ENV['CI'] || ENV['CI_SERVER']) ? 30 : 10 +timeout = (ENV['CI'] || ENV['CI_SERVER']) ? 60 : 30 Capybara.javascript_driver = :poltergeist Capybara.register_driver :poltergeist do |app| @@ -26,7 +27,10 @@ Capybara.ignore_hidden_elements = true Capybara::Screenshot.prune_strategy = :keep_last_run RSpec.configure do |config| - config.before(:suite) do - TestEnv.warm_asset_cache + config.before(:context, :js) do + next if $capybara_server_already_started + + TestEnv.eager_load_driver_server + $capybara_server_already_started = true end end diff --git a/spec/support/controllers/githubish_import_controller_shared_examples.rb b/spec/support/controllers/githubish_import_controller_shared_examples.rb index 51f1015f43c..c59b30c772d 100644 --- a/spec/support/controllers/githubish_import_controller_shared_examples.rb +++ b/spec/support/controllers/githubish_import_controller_shared_examples.rb @@ -180,7 +180,7 @@ shared_examples 'a GitHub-ish import controller: POST create' do it "takes the new namespace" do expect(Gitlab::GithubImport::ProjectCreator). to receive(:new).with(provider_repo, provider_repo.name, an_instance_of(Group), user, access_params, type: provider). - and_return(double(execute: true)) + and_return(double(execute: true)) post :create, target_namespace: provider_repo.name, format: :js end @@ -201,7 +201,7 @@ shared_examples 'a GitHub-ish import controller: POST create' do it "takes the current user's namespace" do expect(Gitlab::GithubImport::ProjectCreator). to receive(:new).with(provider_repo, provider_repo.name, user.namespace, user, access_params, type: provider). - and_return(double(execute: true)) + and_return(double(execute: true)) post :create, format: :js end @@ -229,7 +229,7 @@ shared_examples 'a GitHub-ish import controller: POST create' do end end - context 'user has chosen a nested namespace and name for the project' do + context 'user has chosen an existing nested namespace and name for the project' do let(:parent_namespace) { create(:namespace, name: 'foo', owner: user) } let(:nested_namespace) { create(:namespace, name: 'bar', parent: parent_namespace, owner: user) } let(:test_name) { 'test_name' } @@ -242,5 +242,58 @@ shared_examples 'a GitHub-ish import controller: POST create' do post :create, { target_namespace: nested_namespace.full_path, new_name: test_name, format: :js } end end + + context 'user has chosen a non-existent nested namespaces and name for the project' do + let(:test_name) { 'test_name' } + + it 'takes the selected namespace and name' do + expect(Gitlab::GithubImport::ProjectCreator). + to receive(:new).with(provider_repo, test_name, kind_of(Namespace), user, access_params, type: provider). + and_return(double(execute: true)) + + post :create, { target_namespace: 'foo/bar', new_name: test_name, format: :js } + end + + it 'creates the namespaces' do + allow(Gitlab::GithubImport::ProjectCreator). + to receive(:new).with(provider_repo, test_name, kind_of(Namespace), user, access_params, type: provider). + and_return(double(execute: true)) + + expect { post :create, { target_namespace: 'foo/bar', new_name: test_name, format: :js } } + .to change { Namespace.count }.by(2) + end + + it 'new namespace has the right parent' do + allow(Gitlab::GithubImport::ProjectCreator). + to receive(:new).with(provider_repo, test_name, kind_of(Namespace), user, access_params, type: provider). + and_return(double(execute: true)) + + post :create, { target_namespace: 'foo/bar', new_name: test_name, format: :js } + + expect(Namespace.find_by_path_or_name('bar').parent.path).to eq('foo') + end + end + + context 'user has chosen existent and non-existent nested namespaces and name for the project' do + let(:test_name) { 'test_name' } + let!(:parent_namespace) { create(:namespace, name: 'foo', owner: user) } + + it 'takes the selected namespace and name' do + expect(Gitlab::GithubImport::ProjectCreator). + to receive(:new).with(provider_repo, test_name, kind_of(Namespace), user, access_params, type: provider). + and_return(double(execute: true)) + + post :create, { target_namespace: 'foo/foobar/bar', new_name: test_name, format: :js } + end + + it 'creates the namespaces' do + allow(Gitlab::GithubImport::ProjectCreator). + to receive(:new).with(provider_repo, test_name, kind_of(Namespace), user, access_params, type: provider). + and_return(double(execute: true)) + + expect { post :create, { target_namespace: 'foo/foobar/bar', new_name: test_name, format: :js } } + .to change { Namespace.count }.by(2) + end + end end end diff --git a/spec/support/cycle_analytics_helpers.rb b/spec/support/cycle_analytics_helpers.rb index c864a705ca4..66545127a44 100644 --- a/spec/support/cycle_analytics_helpers.rb +++ b/spec/support/cycle_analytics_helpers.rb @@ -1,5 +1,5 @@ module CycleAnalyticsHelpers - def create_commit_referencing_issue(issue, branch_name: random_git_name) + def create_commit_referencing_issue(issue, branch_name: generate(:branch)) project.repository.add_branch(user, branch_name, 'master') create_commit("Commit for ##{issue.iid}", issue.project, user, branch_name) end @@ -7,9 +7,7 @@ module CycleAnalyticsHelpers def create_commit(message, project, user, branch_name, count: 1) oldrev = project.repository.commit(branch_name).sha commit_shas = Array.new(count) do |index| - filename = random_git_name - - commit_sha = project.repository.create_file(user, filename, "content", message: message, branch_name: branch_name) + commit_sha = project.repository.create_file(user, generate(:branch), "content", message: message, branch_name: branch_name) project.repository.commit(commit_sha) commit_sha @@ -22,17 +20,17 @@ module CycleAnalyticsHelpers ref: 'refs/heads/master').execute end - def create_merge_request_closing_issue(issue, message: nil, source_branch: nil) + def create_merge_request_closing_issue(issue, message: nil, source_branch: nil, commit_message: 'commit message') if !source_branch || project.repository.commit(source_branch).blank? - source_branch = random_git_name + source_branch = generate(:branch) project.repository.add_branch(user, source_branch, 'master') end sha = project.repository.create_file( user, - random_git_name, + generate(:branch), 'content', - message: 'commit message', + message: commit_message, branch_name: source_branch) project.repository.commit(sha) diff --git a/spec/support/drag_to_helper.rb b/spec/support/drag_to_helper.rb index 0c0659d3ecd..ae149631ed9 100644 --- a/spec/support/drag_to_helper.rb +++ b/spec/support/drag_to_helper.rb @@ -3,11 +3,11 @@ module DragTo evaluate_script("simulateDrag({scrollable: $('#{scrollable}').get(0), from: {el: $('#{selector}').eq(#{list_from_index}).get(0), index: #{from_index}}, to: {el: $('#{selector}').eq(#{list_to_index}).get(0), index: #{to_index}}});") Timeout.timeout(Capybara.default_max_wait_time) do - loop until drag_active? + loop while drag_active? end end def drag_active? - page.evaluate_script('window.SIMULATE_DRAG_ACTIVE').zero? + page.evaluate_script('window.SIMULATE_DRAG_ACTIVE').nonzero? end end diff --git a/spec/support/dropzone_helper.rb b/spec/support/dropzone_helper.rb index 984ec7d2741..02fdeb08afe 100644 --- a/spec/support/dropzone_helper.rb +++ b/spec/support/dropzone_helper.rb @@ -6,32 +6,52 @@ module DropzoneHelper # Dropzone events to perform the actual upload. # # This method waits for the upload to complete before returning. - def dropzone_file(file_path) + # max_file_size is an optional parameter. + # If it's not 0, then it used in dropzone.maxFilesize parameter. + # wait_for_queuecomplete is an optional parameter. + # If it's 'false', then the helper will NOT wait for backend response + # It lets to test behaviors while AJAX is processing. + def dropzone_file(files, max_file_size = 0, wait_for_queuecomplete = true) # Generate a fake file input that Capybara can attach to page.execute_script <<-JS.strip_heredoc + $('#fakeFileInput').remove(); var fakeFileInput = window.$('<input/>').attr( - {id: 'fakeFileInput', type: 'file'} + {id: 'fakeFileInput', type: 'file', multiple: true} ).appendTo('body'); window._dropzoneComplete = false; JS - # Attach the file to the fake input selector with Capybara - attach_file('fakeFileInput', file_path) + # Attach files to the fake input selector with Capybara + attach_file('fakeFileInput', files) # Manually trigger a Dropzone "drop" event with the fake input's file list page.execute_script <<-JS.strip_heredoc - var fileList = [$('#fakeFileInput')[0].files[0]]; - var e = jQuery.Event('drop', { dataTransfer : { files : fileList } }); - var dropzone = $('.div-dropzone')[0].dropzone; + dropzone.options.autoProcessQueue = false; + + if (#{max_file_size} > 0) { + dropzone.options.maxFilesize = #{max_file_size}; + } + dropzone.on('queuecomplete', function() { window._dropzoneComplete = true; }); - dropzone.listeners[0].events.drop(e); + + var fileList = [$('#fakeFileInput')[0].files]; + + $.map(fileList, function(file){ + var e = jQuery.Event('drop', { dataTransfer : { files : file } }); + + dropzone.listeners[0].events.drop(e); + }); + + dropzone.processQueue(); JS - # Wait until Dropzone's fired `queuecomplete` - loop until page.evaluate_script('window._dropzoneComplete === true') + if wait_for_queuecomplete + # Wait until Dropzone's fired `queuecomplete` + loop until page.evaluate_script('window._dropzoneComplete === true') + end end end diff --git a/spec/support/fake_migration_classes.rb b/spec/support/fake_migration_classes.rb new file mode 100644 index 00000000000..3de0460c3ca --- /dev/null +++ b/spec/support/fake_migration_classes.rb @@ -0,0 +1,3 @@ +class FakeRenameReservedPathMigrationV1 < ActiveRecord::Migration + include Gitlab::Database::RenameReservedPathsMigration::V1 +end diff --git a/spec/support/features/discussion_comments_shared_example.rb b/spec/support/features/discussion_comments_shared_example.rb new file mode 100644 index 00000000000..bb4542b1683 --- /dev/null +++ b/spec/support/features/discussion_comments_shared_example.rb @@ -0,0 +1,219 @@ +shared_examples 'discussion comments' do |resource_name| + let(:form_selector) { '.js-main-target-form' } + let(:dropdown_selector) { "#{form_selector} .comment-type-dropdown" } + let(:toggle_selector) { "#{dropdown_selector} .dropdown-toggle" } + let(:menu_selector) { "#{dropdown_selector} .dropdown-menu" } + let(:submit_selector) { "#{form_selector} .js-comment-submit-button" } + let(:close_selector) { "#{form_selector} .btn-comment-and-close" } + let(:comments_selector) { '.timeline > .note.timeline-entry' } + + it 'clicking "Comment" will post a comment' do + expect(page).to have_selector toggle_selector + + find("#{form_selector} .note-textarea").send_keys('a') + + find(submit_selector).click + + find(comments_selector, match: :first) + new_comment = all(comments_selector).last + + expect(new_comment).to have_content 'a' + expect(new_comment).not_to have_selector '.discussion' + end + + if resource_name == 'issue' + it "clicking 'Comment & close #{resource_name}' will post a comment and close the #{resource_name}" do + find("#{form_selector} .note-textarea").send_keys('a') + + find(close_selector).click + + find(comments_selector, match: :first) + find("#{comments_selector}.system-note") + entries = all(comments_selector) + close_note = entries.last + new_comment = entries[-2] + + expect(close_note).to have_content 'closed' + expect(new_comment).not_to have_selector '.discussion' + end + end + + describe 'when the toggle is clicked' do + before do + find("#{form_selector} .note-textarea").send_keys('a') + + find(toggle_selector).click + end + + it 'has a "Comment" item (selected by default) and "Start discussion" item' do + expect(page).to have_selector menu_selector + + find("#{menu_selector} li", match: :first) + items = all("#{menu_selector} li") + + expect(items.first).to have_content 'Comment' + expect(items.first).to have_content "Add a general comment to this #{resource_name}." + expect(items.first).to have_selector '.fa-check' + expect(items.first['class']).to match 'droplab-item-selected' + + expect(items.last).to have_content 'Start discussion' + expect(items.last).to have_content "Discuss a specific suggestion or question#{' that needs to be resolved' if resource_name == 'merge request'}." + expect(items.last).not_to have_selector '.fa-check' + expect(items.last['class']).not_to match 'droplab-item-selected' + end + + it 'closes the menu when clicking the toggle or body' do + find(toggle_selector).click + + expect(page).not_to have_selector menu_selector + + find(toggle_selector).click + find('body').click + + expect(page).not_to have_selector menu_selector + end + + it 'clicking the ul padding or divider should not change the text' do + find(menu_selector).trigger 'click' + + expect(page).to have_selector menu_selector + expect(find(dropdown_selector)).to have_content 'Comment' + + find("#{menu_selector} .divider").trigger 'click' + + expect(page).to have_selector menu_selector + expect(find(dropdown_selector)).to have_content 'Comment' + end + + describe 'when selecting "Start discussion"' do + before do + find("#{menu_selector} li", match: :first) + all("#{menu_selector} li").last.click + end + + it 'updates the submit button text, note_type input and closes the dropdown' do + expect(find(dropdown_selector)).to have_content 'Start discussion' + expect(find("#{form_selector} #note_type", visible: false).value).to eq('DiscussionNote') + expect(page).not_to have_selector menu_selector + end + + if resource_name =~ /(issue|merge request)/ + it 'updates the close button text' do + expect(find(close_selector)).to have_content "Start discussion & close #{resource_name}" + end + + it 'typing does not change the close button text' do + find("#{form_selector} .note-textarea").send_keys('b') + + expect(find(close_selector)).to have_content "Start discussion & close #{resource_name}" + end + end + + it 'clicking "Start discussion" will post a discussion' do + find(submit_selector).click + + find(comments_selector, match: :first) + new_comment = all(comments_selector).last + + expect(new_comment).to have_content 'a' + expect(new_comment).to have_selector '.discussion' + end + + if resource_name == 'issue' + it "clicking 'Start discussion & close #{resource_name}' will post a discussion and close the #{resource_name}" do + find(close_selector).click + + find(comments_selector, match: :first) + find("#{comments_selector}.system-note") + entries = all(comments_selector) + close_note = entries.last + new_discussion = entries[-2] + + expect(close_note).to have_content 'closed' + expect(new_discussion).to have_selector '.discussion' + end + end + + describe 'when opening the menu' do + before do + find(toggle_selector).click + end + + it 'should have "Start discussion" selected' do + find("#{menu_selector} li", match: :first) + items = all("#{menu_selector} li") + + expect(items.first).to have_content 'Comment' + expect(items.first).not_to have_selector '.fa-check' + expect(items.first['class']).not_to match 'droplab-item-selected' + + expect(items.last).to have_content 'Start discussion' + expect(items.last).to have_selector '.fa-check' + expect(items.last['class']).to match 'droplab-item-selected' + end + + describe 'when selecting "Comment"' do + before do + find("#{menu_selector} li", match: :first).click + end + + it 'updates the submit button text, clears the note_type input and closes the dropdown' do + expect(find(dropdown_selector)).to have_content 'Comment' + expect(find("#{form_selector} #note_type", visible: false).value).to eq('') + expect(page).not_to have_selector menu_selector + end + + if resource_name =~ /(issue|merge request)/ + it 'updates the close button text' do + expect(find(close_selector)).to have_content "Comment & close #{resource_name}" + end + + it 'typing does not change the close button text' do + find("#{form_selector} .note-textarea").send_keys('b') + + expect(find(close_selector)).to have_content "Comment & close #{resource_name}" + end + end + + it 'should have "Comment" selected when opening the menu' do + find(toggle_selector).click + + find("#{menu_selector} li", match: :first) + items = all("#{menu_selector} li") + + expect(items.first).to have_content 'Comment' + expect(items.first).to have_selector '.fa-check' + expect(items.first['class']).to match 'droplab-item-selected' + + expect(items.last).to have_content 'Start discussion' + expect(items.last).not_to have_selector '.fa-check' + expect(items.last['class']).not_to match 'droplab-item-selected' + end + end + end + end + end + + if resource_name =~ /(issue|merge request)/ + describe "on a closed #{resource_name}" do + before do + find("#{form_selector} .js-note-target-close").click + + find("#{form_selector} .note-textarea").send_keys('a') + end + + it "should show a 'Comment & reopen #{resource_name}' button" do + expect(find("#{form_selector} .js-note-target-reopen")).to have_content "Comment & reopen #{resource_name}" + end + + it "should show a 'Start discussion & reopen #{resource_name}' button when 'Start discussion' is selected" do + find(toggle_selector).click + + find("#{menu_selector} li", match: :first) + all("#{menu_selector} li").last.click + + expect(find("#{form_selector} .js-note-target-reopen")).to have_content "Start discussion & reopen #{resource_name}" + end + end + end +end diff --git a/spec/support/features/issuable_slash_commands_shared_examples.rb b/spec/support/features/issuable_slash_commands_shared_examples.rb index a4713e53f63..ad46b163cd6 100644 --- a/spec/support/features/issuable_slash_commands_shared_examples.rb +++ b/spec/support/features/issuable_slash_commands_shared_examples.rb @@ -3,7 +3,6 @@ shared_examples 'issuable record that supports slash commands in its description and notes' do |issuable_type| include SlashCommandsHelpers - include WaitForAjax let(:master) { create(:user) } let(:assignee) { create(:user, username: 'bob') } @@ -26,7 +25,7 @@ shared_examples 'issuable record that supports slash commands in its description wait_for_ajax end - describe "new #{issuable_type}" do + describe "new #{issuable_type}", js: true do context 'with commands in the description' do it "creates the #{issuable_type} and interpret commands accordingly" do visit public_send("new_namespace_project_#{issuable_type}_path", project.namespace, project, new_url_opts) @@ -45,7 +44,7 @@ shared_examples 'issuable record that supports slash commands in its description end end - describe "note on #{issuable_type}" do + describe "note on #{issuable_type}", js: true do before do visit public_send("namespace_project_#{issuable_type}_path", project.namespace, project, issuable) end @@ -59,11 +58,12 @@ shared_examples 'issuable record that supports slash commands in its description expect(page).not_to have_content '/label ~bug' expect(page).not_to have_content '/milestone %"ASAP"' + wait_for_ajax issuable.reload note = issuable.notes.user.first expect(note.note).to eq "Awesome!" - expect(issuable.assignee).to eq assignee + expect(issuable.assignees).to eq [assignee] expect(issuable.labels).to eq [label_bug] expect(issuable.milestone).to eq milestone end @@ -81,7 +81,7 @@ shared_examples 'issuable record that supports slash commands in its description issuable.reload expect(issuable.notes.user).to be_empty - expect(issuable.assignee).to eq assignee + expect(issuable.assignees).to eq [assignee] expect(issuable.labels).to eq [label_bug] expect(issuable.milestone).to eq milestone end @@ -258,4 +258,19 @@ shared_examples 'issuable record that supports slash commands in its description end end end + + describe "preview of note on #{issuable_type}" do + it 'removes slash commands from note and explains them' do + visit public_send("namespace_project_#{issuable_type}_path", project.namespace, project, issuable) + + page.within('.js-main-target-form') do + fill_in 'note[note]', with: "Awesome!\n/assign @bob " + click_on 'Preview' + + expect(page).to have_content 'Awesome!' + expect(page).not_to have_content '/assign @bob' + expect(page).to have_content 'Assigns @bob.' + end + end + end end diff --git a/spec/support/filter_spec_helper.rb b/spec/support/filter_spec_helper.rb index a8e454eb09e..b871b7ffc90 100644 --- a/spec/support/filter_spec_helper.rb +++ b/spec/support/filter_spec_helper.rb @@ -63,9 +63,9 @@ module FilterSpecHelper # # Returns a String def invalidate_reference(reference) - if reference =~ /\A(.+)?.\d+\z/ + if reference =~ /\A(.+)?[^\d]\d+\z/ # Integer-based reference with optional project prefix - reference.gsub(/\d+\z/) { |i| i.to_i + 1 } + reference.gsub(/\d+\z/) { |i| i.to_i + 10_000 } elsif reference =~ /\A(.+@)?(\h{7,40}\z)/ # SHA-based reference with optional prefix reference.gsub(/\h{7,40}\z/) { |v| v.reverse } diff --git a/spec/support/filtered_search_helpers.rb b/spec/support/filtered_search_helpers.rb index 6b009b132b6..37cc308e613 100644 --- a/spec/support/filtered_search_helpers.rb +++ b/spec/support/filtered_search_helpers.rb @@ -30,7 +30,7 @@ module FilteredSearchHelpers end def clear_search_field - find('.filtered-search-input-container .clear-search').click + find('.filtered-search-box .clear-search').click end def reset_filters @@ -51,7 +51,7 @@ module FilteredSearchHelpers # Iterates through each visual token inside # .tokens-container to make sure the correct names and values are rendered def expect_tokens(tokens) - page.find '.filtered-search-input-container .tokens-container' do + page.find '.filtered-search-box .tokens-container' do page.all(:css, '.tokens-container li').each_with_index do |el, index| token_name = tokens[index][:name] token_value = tokens[index][:value] @@ -71,4 +71,18 @@ module FilteredSearchHelpers def get_filtered_search_placeholder find('.filtered-search')['placeholder'] end + + def remove_recent_searches + execute_script('window.localStorage.clear();') + end + + def set_recent_searches(key, input) + execute_script("window.localStorage.setItem('#{key}', '#{input}');") + end + + def wait_for_filtered_search(text) + Timeout.timeout(Capybara.default_max_wait_time) do + loop until find('.filtered-search').value.strip == text + end + end end diff --git a/spec/support/fixture_helpers.rb b/spec/support/fixture_helpers.rb index a05c9d18002..5515c355cea 100644 --- a/spec/support/fixture_helpers.rb +++ b/spec/support/fixture_helpers.rb @@ -1,8 +1,11 @@ module FixtureHelpers def fixture_file(filename) return '' if filename.blank? - file_path = File.expand_path(Rails.root.join('spec/fixtures/', filename)) - File.read(file_path) + File.read(expand_fixture_path(filename)) + end + + def expand_fixture_path(filename) + File.expand_path(Rails.root.join('spec/fixtures/', filename)) end end diff --git a/spec/support/generate-seed-repo-rb b/spec/support/generate-seed-repo-rb new file mode 100755 index 00000000000..7335f74c0e9 --- /dev/null +++ b/spec/support/generate-seed-repo-rb @@ -0,0 +1,162 @@ +#!/usr/bin/env ruby +# +# # generate-seed-repo-rb +# +# This script generates the seed_repo.rb file used by lib/gitlab/git +# tests. The seed_repo.rb file needs to be updated anytime there is a +# Git push to https://gitlab.com/gitlab-org/gitlab-git-test. +# +# Usage: +# +# ./spec/support/generate-seed-repo-rb > spec/support/seed_repo.rb +# +# + +require 'erb' +require 'tempfile' + +SOURCE = 'https://gitlab.com/gitlab-org/gitlab-git-test.git'.freeze +SCRIPT_NAME = 'generate-seed-repo-rb'.freeze +REPO_NAME = 'gitlab-git-test.git'.freeze + +def main + Dir.mktmpdir do |dir| + unless system(*%W[git clone --bare #{SOURCE} #{REPO_NAME}], chdir: dir) + abort "git clone failed" + end + repo = File.join(dir, REPO_NAME) + erb = ERB.new(DATA.read) + erb.run(binding) + end +end + +def capture!(cmd, dir) + output = IO.popen(cmd, 'r', chdir: dir) { |io| io.read } + raise "command failed with #{$?}: #{cmd.join(' ')}" unless $?.success? + output.chomp +end + +main + +__END__ +# This file is generated by <%= SCRIPT_NAME %>. Do not edit this file manually. +# +# Seed repo: +<%= capture!(%w{git log --format=#\ %H\ %s}, repo) %> + +module SeedRepo + module BigCommit + ID = "913c66a37b4a45b9769037c55c2d238bd0942d2e".freeze + PARENT_ID = "cfe32cf61b73a0d5e9f13e774abde7ff789b1660".freeze + MESSAGE = "Files, encoding and much more".freeze + AUTHOR_FULL_NAME = "Dmitriy Zaporozhets".freeze + FILES_COUNT = 2 + end + + module Commit + ID = "570e7b2abdd848b95f2f578043fc23bd6f6fd24d".freeze + PARENT_ID = "6f6d7e7ed97bb5f0054f2b1df789b39ca89b6ff9".freeze + MESSAGE = "Change some files\n\nSigned-off-by: Dmitriy Zaporozhets <dmitriy.zaporozhets@gmail.com>\n".freeze + AUTHOR_FULL_NAME = "Dmitriy Zaporozhets".freeze + FILES = ["files/ruby/popen.rb", "files/ruby/regex.rb"].freeze + FILES_COUNT = 2 + C_FILE_PATH = "files/ruby".freeze + C_FILES = ["popen.rb", "regex.rb", "version_info.rb"].freeze + BLOB_FILE = %{%h3= @key.title\n%hr\n%pre= @key.key\n.actions\n = link_to 'Remove', @key, :confirm => 'Are you sure?', :method => :delete, :class => \"btn danger delete-key\"\n\n\n}.freeze + BLOB_FILE_PATH = "app/views/keys/show.html.haml".freeze + end + + module EmptyCommit + ID = "b0e52af38d7ea43cf41d8a6f2471351ac036d6c9".freeze + PARENT_ID = "40f4a7a617393735a95a0bb67b08385bc1e7c66d".freeze + MESSAGE = "Empty commit".freeze + AUTHOR_FULL_NAME = "Rémy Coutable".freeze + FILES = [].freeze + FILES_COUNT = FILES.count + end + + module EncodingCommit + ID = "40f4a7a617393735a95a0bb67b08385bc1e7c66d".freeze + PARENT_ID = "66028349a123e695b589e09a36634d976edcc5e8".freeze + MESSAGE = "Add ISO-8859-encoded file".freeze + AUTHOR_FULL_NAME = "Stan Hu".freeze + FILES = ["encoding/iso8859.txt"].freeze + FILES_COUNT = FILES.count + end + + module FirstCommit + ID = "1a0b36b3cdad1d2ee32457c102a8c0b7056fa863".freeze + PARENT_ID = nil + MESSAGE = "Initial commit".freeze + AUTHOR_FULL_NAME = "Dmitriy Zaporozhets".freeze + FILES = ["LICENSE", ".gitignore", "README.md"].freeze + FILES_COUNT = 3 + end + + module LastCommit + ID = <%= capture!(%w[git show -s --format=%H HEAD], repo).inspect %>.freeze + PARENT_ID = <%= capture!(%w[git show -s --format=%P HEAD], repo).split.last.inspect %>.freeze + MESSAGE = <%= capture!(%w[git show -s --format=%s HEAD], repo).inspect %>.freeze + AUTHOR_FULL_NAME = <%= capture!(%w[git show -s --format=%an HEAD], repo).inspect %>.freeze + FILES = <%= + parents = capture!(%w[git show -s --format=%P HEAD], repo).split + merge_base = parents.size > 1 ? capture!(%w[git merge-base] + parents, repo) : parents.first + capture!( %W[git diff --name-only #{merge_base}..HEAD --], repo).split("\n").inspect + %>.freeze + FILES_COUNT = FILES.count + end + + module Repo + HEAD = "master".freeze + BRANCHES = %w[ +<%= capture!(%W[git for-each-ref --format=#{' ' * 3}%(refname:strip=2) refs/heads/], repo) %> + ].freeze + TAGS = %w[ +<%= capture!(%W[git for-each-ref --format=#{' ' * 3}%(refname:strip=2) refs/tags/], repo) %> + ].freeze + end + + module RubyBlob + ID = "7e3e39ebb9b2bf433b4ad17313770fbe4051649c".freeze + NAME = "popen.rb".freeze + CONTENT = <<-eos.freeze +require 'fileutils' +require 'open3' + +module Popen + extend self + + def popen(cmd, path=nil) + unless cmd.is_a?(Array) + raise RuntimeError, "System commands must be given as an array of strings" + end + + path ||= Dir.pwd + + vars = { + "PWD" => path + } + + options = { + chdir: path + } + + unless File.directory?(path) + FileUtils.mkdir_p(path) + end + + @cmd_output = "" + @cmd_status = 0 + + Open3.popen3(vars, *cmd, options) do |stdin, stdout, stderr, wait_thr| + @cmd_output << stdout.read + @cmd_output << stderr.read + @cmd_status = wait_thr.value.exitstatus + end + + return @cmd_output, @cmd_status + end +end + eos + end +end diff --git a/spec/support/git_helpers.rb b/spec/support/git_helpers.rb deleted file mode 100644 index 93422390ef7..00000000000 --- a/spec/support/git_helpers.rb +++ /dev/null @@ -1,9 +0,0 @@ -module GitHelpers - def random_git_name - "#{FFaker::Product.brand}-#{FFaker::Product.brand}-#{rand(1000)}" - end -end - -RSpec.configure do |config| - config.include GitHelpers -end diff --git a/spec/support/gitaly.rb b/spec/support/gitaly.rb new file mode 100644 index 00000000000..7aca902fc61 --- /dev/null +++ b/spec/support/gitaly.rb @@ -0,0 +1,7 @@ +if Gitlab::GitalyClient.enabled? + RSpec.configure do |config| + config.before(:each) do + allow(Gitlab::GitalyClient).to receive(:feature_enabled?).and_return(true) + end + end +end diff --git a/spec/support/helpers/fake_blob_helpers.rb b/spec/support/helpers/fake_blob_helpers.rb new file mode 100644 index 00000000000..bc9686ed9cf --- /dev/null +++ b/spec/support/helpers/fake_blob_helpers.rb @@ -0,0 +1,40 @@ +module FakeBlobHelpers + class FakeBlob + include BlobLike + + attr_reader :path, :size, :data, :lfs_oid, :lfs_size + + def initialize(path: 'file.txt', size: 1.kilobyte, data: 'foo', binary: false, lfs: nil) + @path = path + @size = size + @data = data + @binary = binary + + @lfs_pointer = lfs.present? + if @lfs_pointer + @lfs_oid = SecureRandom.hex(20) + @lfs_size = 1.megabyte + end + end + + alias_method :name, :path + + def id + 0 + end + + def binary? + @binary + end + + def external_storage + :lfs if @lfs_pointer + end + + alias_method :external_size, :lfs_size + end + + def fake_blob(**kwargs) + Blob.decorate(FakeBlob.new(**kwargs), project) + end +end diff --git a/spec/support/import_export/export_file_helper.rb b/spec/support/import_export/export_file_helper.rb index 944ea30656f..57b6abe12b7 100644 --- a/spec/support/import_export/export_file_helper.rb +++ b/spec/support/import_export/export_file_helper.rb @@ -10,7 +10,7 @@ module ExportFileHelper create(:release, project: project) - issue = create(:issue, assignee: user, project: project) + issue = create(:issue, assignees: [user], project: project) snippet = create(:project_snippet, project: project) label = create(:label, project: project) milestone = create(:milestone, project: project) diff --git a/spec/support/import_export/import_export.yml b/spec/support/import_export/import_export.yml index 17136dee000..734d6838f4d 100644 --- a/spec/support/import_export/import_export.yml +++ b/spec/support/import_export/import_export.yml @@ -11,9 +11,6 @@ project_tree: - :user included_attributes: - project: - - :name - - :path merge_requests: - :id user: @@ -21,4 +18,7 @@ included_attributes: excluded_attributes: merge_requests: - - :iid
\ No newline at end of file + - :iid + project: + - :id + - :created_at
\ No newline at end of file diff --git a/spec/support/issuables_list_metadata_shared_examples.rb b/spec/support/issuables_list_metadata_shared_examples.rb index 4c0f556e736..3406e4c3161 100644 --- a/spec/support/issuables_list_metadata_shared_examples.rb +++ b/spec/support/issuables_list_metadata_shared_examples.rb @@ -2,12 +2,12 @@ shared_examples 'issuables list meta-data' do |issuable_type, action = nil| before do @issuable_ids = [] - 2.times do + 2.times do |n| issuable = if issuable_type == :issue create(issuable_type, project: project) else - create(issuable_type, title: FFaker::Lorem.sentence, source_project: project, source_branch: FFaker::Name.name) + create(issuable_type, source_project: project, source_branch: "#{n}-feature") end @issuable_ids << issuable.id @@ -33,4 +33,19 @@ shared_examples 'issuables list meta-data' do |issuable_type, action = nil| expect(meta_data[id].upvotes).to eq(id + 2) end end + + describe "when given empty collection" do + let(:project2) { create(:empty_project, :public) } + + it "doesn't execute any queries with false conditions" do + get_action = + if action + proc { get action } + else + proc { get :index, namespace_id: project2.namespace, project_id: project2 } + end + + expect(&get_action).not_to make_queries_matching(/WHERE (?:1=0|0=1)/) + end + end end diff --git a/spec/support/kubernetes_helpers.rb b/spec/support/kubernetes_helpers.rb index b5ed71ba3be..d2a1ded57ff 100644 --- a/spec/support/kubernetes_helpers.rb +++ b/spec/support/kubernetes_helpers.rb @@ -5,7 +5,7 @@ module KubernetesHelpers { "kind" => "APIResourceList", "resources" => [ - { "name" => "pods", "namespaced" => true, "kind" => "Pod" }, + { "name" => "pods", "namespaced" => true, "kind" => "Pod" } ] } end @@ -22,13 +22,13 @@ module KubernetesHelpers "metadata" => { "name" => "kube-pod", "creationTimestamp" => "2016-11-25T19:55:19Z", - "labels" => { "app" => app }, + "labels" => { "app" => app } }, "spec" => { "containers" => [ { "name" => "container-0" }, - { "name" => "container-1" }, - ], + { "name" => "container-1" } + ] }, "status" => { "phase" => "Running" } } diff --git a/spec/support/login_helpers.rb b/spec/support/login_helpers.rb index 9ffb00be0b8..e6da852e728 100644 --- a/spec/support/login_helpers.rb +++ b/spec/support/login_helpers.rb @@ -84,8 +84,4 @@ module LoginHelpers def logout_direct page.driver.submit :delete, '/users/sign_out', {} end - - def skip_ci_admin_auth - allow_any_instance_of(Ci::Admin::ApplicationController).to receive_messages(authenticate_admin!: true) - end end diff --git a/spec/support/markdown_feature.rb b/spec/support/markdown_feature.rb index dea0015f105..21a054af4e1 100644 --- a/spec/support/markdown_feature.rb +++ b/spec/support/markdown_feature.rb @@ -23,7 +23,7 @@ class MarkdownFeature # Direct references ---------------------------------------------------------- def project - @project ||= create(:project).tap do |project| + @project ||= create(:project, :repository).tap do |project| project.team << [user, :master] end end @@ -80,7 +80,7 @@ class MarkdownFeature def xproject @xproject ||= begin group = create(:group, :nested) - create(:project, namespace: group) do |project| + create(:project, :repository, namespace: group) do |project| project.team << [user, :developer] end end diff --git a/spec/support/matchers/access_matchers.rb b/spec/support/matchers/access_matchers.rb index 7d238850520..3e4ca8b7ab0 100644 --- a/spec/support/matchers/access_matchers.rb +++ b/spec/support/matchers/access_matchers.rb @@ -51,7 +51,7 @@ module AccessMatchers emulate_user(user, @membership) visit(url) - status_code != 404 && current_path != new_user_session_path + status_code == 200 && current_path != new_user_session_path end chain :of do |membership| @@ -66,7 +66,7 @@ module AccessMatchers emulate_user(user, @membership) visit(url) - status_code == 404 || current_path == new_user_session_path + [401, 404].include?(status_code) || current_path == new_user_session_path end chain :of do |membership| diff --git a/spec/support/matchers/gitaly_matchers.rb b/spec/support/matchers/gitaly_matchers.rb index d7a53820684..ed14bcec9f2 100644 --- a/spec/support/matchers/gitaly_matchers.rb +++ b/spec/support/matchers/gitaly_matchers.rb @@ -1,3 +1,9 @@ -RSpec::Matchers.define :post_receive_request_with_repo_path do |path| +RSpec::Matchers.define :gitaly_request_with_repo_path do |path| match { |actual| actual.repository.path == path } end + +RSpec::Matchers.define :gitaly_request_with_params do |params| + match do |actual| + params.reduce(true) { |r, (key, val)| r && actual.send(key) == val } + end +end diff --git a/spec/support/matchers/gitlab_git_matchers.rb b/spec/support/matchers/gitlab_git_matchers.rb new file mode 100644 index 00000000000..c840cd4bf2d --- /dev/null +++ b/spec/support/matchers/gitlab_git_matchers.rb @@ -0,0 +1,6 @@ +RSpec::Matchers.define :gitlab_git_repository_with do |values| + match do |actual| + actual.is_a?(Gitlab::Git::Repository) && + values.all? { |k, v| actual.send(k) == v } + end +end diff --git a/spec/support/matchers/query_matcher.rb b/spec/support/matchers/query_matcher.rb new file mode 100644 index 00000000000..ac8c4ab91d9 --- /dev/null +++ b/spec/support/matchers/query_matcher.rb @@ -0,0 +1,33 @@ +RSpec::Matchers.define :make_queries_matching do |matcher, expected_count = nil| + supports_block_expectations + + match do |block| + @counter = query_count(matcher, &block) + if expected_count + @counter.count == expected_count + else + @counter.count > 0 + end + end + + failure_message_when_negated do |_| + if expected_count + "expected #{matcher} not to match #{expected_count} queries, got #{@counter.count} matches:\n\n#{@counter.inspect}" + else + "expected #{matcher} not to match any query, got #{@counter.count} matches:\n\n#{@counter.inspect}" + end + end + + failure_message do |_| + if expected_count + "expected #{matcher} to match #{expected_count} queries, got #{@counter.count} matches:\n\n#{@counter.inspect}" + else + "expected #{matcher} to match at least one query, got #{@counter.count} matches:\n\n#{@counter.inspect}" + end + end + + def query_count(regex, &block) + @recorder = ActiveRecord::QueryRecorder.new(&block).log + @recorder.select{ |q| q.match(regex) } + end +end diff --git a/spec/support/matchers/user_activity_matchers.rb b/spec/support/matchers/user_activity_matchers.rb new file mode 100644 index 00000000000..ce3b683b6d2 --- /dev/null +++ b/spec/support/matchers/user_activity_matchers.rb @@ -0,0 +1,5 @@ +RSpec::Matchers.define :have_an_activity_record do |expected| + match do |user| + expect(Gitlab::UserActivities.new.find { |k, _| k == user.id.to_s }).to be_present + end +end diff --git a/spec/support/milestone_tabs_examples.rb b/spec/support/milestone_tabs_examples.rb new file mode 100644 index 00000000000..4ad8b0a16e1 --- /dev/null +++ b/spec/support/milestone_tabs_examples.rb @@ -0,0 +1,68 @@ +shared_examples 'milestone tabs' do + def go(path, extra_params = {}) + params = if milestone.is_a?(GlobalMilestone) + { group_id: group.to_param, id: milestone.safe_title, title: milestone.title } + else + { namespace_id: project.namespace.to_param, project_id: project, id: milestone.iid } + end + + get path, params.merge(extra_params) + end + + describe '#merge_requests' do + context 'as html' do + before { go(:merge_requests, format: 'html') } + + it 'redirects to milestone#show' do + expect(response).to redirect_to(milestone_path) + end + end + + context 'as json' do + before { go(:merge_requests, format: 'json') } + + it 'renders the merge requests tab template to a string' do + expect(response).to render_template('shared/milestones/_merge_requests_tab') + expect(json_response).to have_key('html') + end + end + end + + describe '#participants' do + context 'as html' do + before { go(:participants, format: 'html') } + + it 'redirects to milestone#show' do + expect(response).to redirect_to(milestone_path) + end + end + + context 'as json' do + before { go(:participants, format: 'json') } + + it 'renders the participants tab template to a string' do + expect(response).to render_template('shared/milestones/_participants_tab') + expect(json_response).to have_key('html') + end + end + end + + describe '#labels' do + context 'as html' do + before { go(:labels, format: 'html') } + + it 'redirects to milestone#show' do + expect(response).to redirect_to(milestone_path) + end + end + + context 'as json' do + before { go(:labels, format: 'json') } + + it 'renders the labels tab template to a string' do + expect(response).to render_template('shared/milestones/_labels_tab') + expect(json_response).to have_key('html') + end + end + end +end diff --git a/spec/support/mobile_helpers.rb b/spec/support/mobile_helpers.rb index 20d5849bcab..431f20a2a5c 100644 --- a/spec/support/mobile_helpers.rb +++ b/spec/support/mobile_helpers.rb @@ -1,4 +1,8 @@ module MobileHelpers + def resize_screen_xs + resize_window(767, 768) + end + def resize_screen_sm resize_window(900, 768) end diff --git a/spec/support/prometheus_helpers.rb b/spec/support/prometheus_helpers.rb index cc79b11616a..6b9ebcf2bb3 100644 --- a/spec/support/prometheus_helpers.rb +++ b/spec/support/prometheus_helpers.rb @@ -1,10 +1,16 @@ module PrometheusHelpers def prometheus_memory_query(environment_slug) - %{(sum(container_memory_usage_bytes{container_name!="POD",environment="#{environment_slug}"}) / count(container_memory_usage_bytes{container_name!="POD",environment="#{environment_slug}"})) /1024/1024} + %{avg(container_memory_usage_bytes{container_name!="POD",environment="#{environment_slug}"}) / 2^20} end def prometheus_cpu_query(environment_slug) - %{sum(rate(container_cpu_usage_seconds_total{container_name!="POD",environment="#{environment_slug}"}[2m])) / count(container_cpu_usage_seconds_total{container_name!="POD",environment="#{environment_slug}"}) * 100} + %{avg(rate(container_cpu_usage_seconds_total{container_name!="POD",environment="#{environment_slug}"}[2m])) * 100} + end + + def prometheus_ping_url(prometheus_query) + query = { query: prometheus_query }.to_query + + "https://prometheus.example.com/api/v1/query?#{query}" end def prometheus_query_url(prometheus_query) @@ -13,11 +19,17 @@ module PrometheusHelpers "https://prometheus.example.com/api/v1/query?#{query}" end - def prometheus_query_range_url(prometheus_query, start: 8.hours.ago) + def prometheus_query_with_time_url(prometheus_query, time) + query = { query: prometheus_query, time: time.to_f }.to_query + + "https://prometheus.example.com/api/v1/query?#{query}" + end + + def prometheus_query_range_url(prometheus_query, start: 8.hours.ago, stop: Time.now.to_f) query = { query: prometheus_query, start: start.to_f, - end: Time.now.utc.to_f, + end: stop, step: 1.minute.to_i }.to_query @@ -33,9 +45,18 @@ module PrometheusHelpers }) end + def stub_prometheus_request_with_exception(url, exception_type) + WebMock.stub_request(:get, url).to_raise(exception_type) + end + def stub_all_prometheus_requests(environment_slug, body: nil, status: 200) stub_prometheus_request( - prometheus_query_url(prometheus_memory_query(environment_slug)), + prometheus_query_with_time_url(prometheus_memory_query(environment_slug), Time.now.utc), + status: status, + body: body || prometheus_value_body + ) + stub_prometheus_request( + prometheus_query_with_time_url(prometheus_memory_query(environment_slug), 8.hours.ago), status: status, body: body || prometheus_value_body ) @@ -45,7 +66,12 @@ module PrometheusHelpers body: body || prometheus_values_body ) stub_prometheus_request( - prometheus_query_url(prometheus_cpu_query(environment_slug)), + prometheus_query_with_time_url(prometheus_cpu_query(environment_slug), Time.now.utc), + status: status, + body: body || prometheus_value_body + ) + stub_prometheus_request( + prometheus_query_with_time_url(prometheus_cpu_query(environment_slug), 8.hours.ago), status: status, body: body || prometheus_value_body ) diff --git a/spec/features/protected_branches/access_control_ce_spec.rb b/spec/support/protected_branches/access_control_ce_shared_examples.rb index e4aca25a339..7fda4ade665 100644 --- a/spec/features/protected_branches/access_control_ce_spec.rb +++ b/spec/support/protected_branches/access_control_ce_shared_examples.rb @@ -2,15 +2,18 @@ RSpec.shared_examples "protected branches > access control > CE" do ProtectedBranch::PushAccessLevel.human_access_levels.each do |(access_type_id, access_type_name)| it "allows creating protected branches that #{access_type_name} can push to" do visit namespace_project_protected_branches_path(project.namespace, project) + set_protected_branch_name('master') + within('.new_protected_branch') do allowed_to_push_button = find(".js-allowed-to-push") unless allowed_to_push_button.text == access_type_name - allowed_to_push_button.click + allowed_to_push_button.trigger('click') within(".dropdown.open .dropdown-menu") { click_on access_type_name } end end + click_on "Protect" expect(ProtectedBranch.count).to eq(1) @@ -19,14 +22,16 @@ RSpec.shared_examples "protected branches > access control > CE" do it "allows updating protected branches so that #{access_type_name} can push to them" do visit namespace_project_protected_branches_path(project.namespace, project) + set_protected_branch_name('master') + click_on "Protect" expect(ProtectedBranch.count).to eq(1) within(".protected-branches-list") do find(".js-allowed-to-push").click - + within('.js-allowed-to-push-container') do expect(first("li")).to have_content("Roles") click_on access_type_name @@ -34,6 +39,7 @@ RSpec.shared_examples "protected branches > access control > CE" do end wait_for_ajax + expect(ProtectedBranch.last.push_access_levels.map(&:access_level)).to include(access_type_id) end end @@ -41,7 +47,9 @@ RSpec.shared_examples "protected branches > access control > CE" do ProtectedBranch::MergeAccessLevel.human_access_levels.each do |(access_type_id, access_type_name)| it "allows creating protected branches that #{access_type_name} can merge to" do visit namespace_project_protected_branches_path(project.namespace, project) + set_protected_branch_name('master') + within('.new_protected_branch') do allowed_to_merge_button = find(".js-allowed-to-merge") @@ -50,6 +58,7 @@ RSpec.shared_examples "protected branches > access control > CE" do within(".dropdown.open .dropdown-menu") { click_on access_type_name } end end + click_on "Protect" expect(ProtectedBranch.count).to eq(1) @@ -58,7 +67,9 @@ RSpec.shared_examples "protected branches > access control > CE" do it "allows updating protected branches so that #{access_type_name} can merge to them" do visit namespace_project_protected_branches_path(project.namespace, project) + set_protected_branch_name('master') + click_on "Protect" expect(ProtectedBranch.count).to eq(1) @@ -73,6 +84,7 @@ RSpec.shared_examples "protected branches > access control > CE" do end wait_for_ajax + expect(ProtectedBranch.last.merge_access_levels.map(&:access_level)).to include(access_type_id) end end diff --git a/spec/support/protected_tags/access_control_ce_shared_examples.rb b/spec/support/protected_tags/access_control_ce_shared_examples.rb new file mode 100644 index 00000000000..12622cd548a --- /dev/null +++ b/spec/support/protected_tags/access_control_ce_shared_examples.rb @@ -0,0 +1,47 @@ +RSpec.shared_examples "protected tags > access control > CE" do + ProtectedTag::CreateAccessLevel.human_access_levels.each do |(access_type_id, access_type_name)| + it "allows creating protected tags that #{access_type_name} can create" do + visit namespace_project_protected_tags_path(project.namespace, project) + + set_protected_tag_name('master') + + within('.js-new-protected-tag') do + allowed_to_create_button = find(".js-allowed-to-create") + + unless allowed_to_create_button.text == access_type_name + allowed_to_create_button.trigger('click') + find('.create_access_levels-container .dropdown-menu li', match: :first) + within('.create_access_levels-container .dropdown-menu') { click_on access_type_name } + end + end + + click_on "Protect" + + expect(ProtectedTag.count).to eq(1) + expect(ProtectedTag.last.create_access_levels.map(&:access_level)).to eq([access_type_id]) + end + + it "allows updating protected tags so that #{access_type_name} can create them" do + visit namespace_project_protected_tags_path(project.namespace, project) + + set_protected_tag_name('master') + + click_on "Protect" + + expect(ProtectedTag.count).to eq(1) + + within(".protected-tags-list") do + find(".js-allowed-to-create").click + + within('.js-allowed-to-create-container') do + expect(first("li")).to have_content("Roles") + click_on access_type_name + end + end + + wait_for_ajax + + expect(ProtectedTag.last.create_access_levels.map(&:access_level)).to include(access_type_id) + end + end +end diff --git a/spec/support/query_recorder.rb b/spec/support/query_recorder.rb index e40d5ebd9a8..55b531b4cf7 100644 --- a/spec/support/query_recorder.rb +++ b/spec/support/query_recorder.rb @@ -1,21 +1,29 @@ module ActiveRecord class QueryRecorder - attr_reader :log + attr_reader :log, :cached def initialize(&block) @log = [] + @cached = [] ActiveSupport::Notifications.subscribed(method(:callback), 'sql.active_record', &block) end def callback(name, start, finish, message_id, values) - return if %w(CACHE SCHEMA).include?(values[:name]) - @log << values[:sql] + if values[:name]&.include?("CACHE") + @cached << values[:sql] + elsif !values[:name]&.include?("SCHEMA") + @log << values[:sql] + end end def count @log.count end + def cached_count + @cached.count + end + def log_message @log.join("\n\n") end diff --git a/spec/support/repo_helpers.rb b/spec/support/repo_helpers.rb index e9d5c7b12ae..3c6956cf5e0 100644 --- a/spec/support/repo_helpers.rb +++ b/spec/support/repo_helpers.rb @@ -92,11 +92,11 @@ eos changes = [ { line_code: 'a5cc2925ca8258af241be7e5b0381edf30266302_20_20', - file_path: '.gitignore', + file_path: '.gitignore' }, { line_code: '7445606fbf8f3683cd42bdc54b05d7a0bc2dfc44_4_6', - file_path: '.gitmodules', + file_path: '.gitmodules' } ] diff --git a/spec/support/seed_helper.rb b/spec/support/seed_helper.rb index f55fee28ff9..47b5f556e66 100644 --- a/spec/support/seed_helper.rb +++ b/spec/support/seed_helper.rb @@ -1,20 +1,22 @@ +require_relative 'test_env' + # This file is specific to specs in spec/lib/gitlab/git/ -SEED_REPOSITORY_PATH = File.expand_path('../../tmp/repositories', __dir__) -TEST_REPO_PATH = File.join(SEED_REPOSITORY_PATH, 'gitlab-git-test.git') -TEST_NORMAL_REPO_PATH = File.join(SEED_REPOSITORY_PATH, "not-bare-repo.git") -TEST_MUTABLE_REPO_PATH = File.join(SEED_REPOSITORY_PATH, "mutable-repo.git") -TEST_BROKEN_REPO_PATH = File.join(SEED_REPOSITORY_PATH, "broken-repo.git") +SEED_STORAGE_PATH = TestEnv.repos_path +TEST_REPO_PATH = 'gitlab-git-test.git'.freeze +TEST_NORMAL_REPO_PATH = 'not-bare-repo.git'.freeze +TEST_MUTABLE_REPO_PATH = 'mutable-repo.git'.freeze +TEST_BROKEN_REPO_PATH = 'broken-repo.git'.freeze module SeedHelper GITLAB_GIT_TEST_REPO_URL = ENV.fetch('GITLAB_GIT_TEST_REPO_URL', 'https://gitlab.com/gitlab-org/gitlab-git-test.git').freeze def ensure_seeds - if File.exist?(SEED_REPOSITORY_PATH) - FileUtils.rm_r(SEED_REPOSITORY_PATH) + if File.exist?(SEED_STORAGE_PATH) + FileUtils.rm_r(SEED_STORAGE_PATH) end - FileUtils.mkdir_p(SEED_REPOSITORY_PATH) + FileUtils.mkdir_p(SEED_STORAGE_PATH) create_bare_seeds create_normal_seeds @@ -26,41 +28,45 @@ module SeedHelper def create_bare_seeds system(git_env, *%W(#{Gitlab.config.git.bin_path} clone --bare #{GITLAB_GIT_TEST_REPO_URL}), - chdir: SEED_REPOSITORY_PATH, + chdir: SEED_STORAGE_PATH, out: '/dev/null', err: '/dev/null') end def create_normal_seeds system(git_env, *%W(#{Gitlab.config.git.bin_path} clone #{TEST_REPO_PATH} #{TEST_NORMAL_REPO_PATH}), + chdir: SEED_STORAGE_PATH, out: '/dev/null', err: '/dev/null') end def create_mutable_seeds system(git_env, *%W(#{Gitlab.config.git.bin_path} clone #{TEST_REPO_PATH} #{TEST_MUTABLE_REPO_PATH}), + chdir: SEED_STORAGE_PATH, out: '/dev/null', err: '/dev/null') - system(git_env, *%w(git branch -t feature origin/feature), - chdir: TEST_MUTABLE_REPO_PATH, out: '/dev/null', err: '/dev/null') + mutable_repo_full_path = File.join(SEED_STORAGE_PATH, TEST_MUTABLE_REPO_PATH) + system(git_env, *%W(#{Gitlab.config.git.bin_path} branch -t feature origin/feature), + chdir: mutable_repo_full_path, out: '/dev/null', err: '/dev/null') system(git_env, *%W(#{Gitlab.config.git.bin_path} remote add expendable #{GITLAB_GIT_TEST_REPO_URL}), - chdir: TEST_MUTABLE_REPO_PATH, out: '/dev/null', err: '/dev/null') + chdir: mutable_repo_full_path, out: '/dev/null', err: '/dev/null') end def create_broken_seeds system(git_env, *%W(#{Gitlab.config.git.bin_path} clone --bare #{TEST_REPO_PATH} #{TEST_BROKEN_REPO_PATH}), + chdir: SEED_STORAGE_PATH, out: '/dev/null', err: '/dev/null') - refs_path = File.join(TEST_BROKEN_REPO_PATH, 'refs') + refs_path = File.join(SEED_STORAGE_PATH, TEST_BROKEN_REPO_PATH, 'refs') FileUtils.rm_r(refs_path) end def create_git_attributes - dir = File.join(SEED_REPOSITORY_PATH, 'with-git-attributes.git', 'info') + dir = File.join(SEED_STORAGE_PATH, 'with-git-attributes.git', 'info') FileUtils.mkdir_p(dir) @@ -85,7 +91,7 @@ bla/bla.txt end def create_invalid_git_attributes - dir = File.join(SEED_REPOSITORY_PATH, 'with-invalid-git-attributes.git', 'info') + dir = File.join(SEED_STORAGE_PATH, 'with-invalid-git-attributes.git', 'info') FileUtils.mkdir_p(dir) diff --git a/spec/support/seed_repo.rb b/spec/support/seed_repo.rb index 99a500bbbb1..cfe7fc980a8 100644 --- a/spec/support/seed_repo.rb +++ b/spec/support/seed_repo.rb @@ -1,4 +1,8 @@ +# This file is generated by generate-seed-repo-rb. Do not edit this file manually. +# # Seed repo: +# 4b4918a572fa86f9771e5ba40fbd48e1eb03e2c6 Merge branch 'master' into 'master' +# 0e1b353b348f8477bdbec1ef47087171c5032cd9 adds an executable with different permissions # 0e50ec4d3c7ce42ab74dda1d422cb2cbffe1e326 Merge branch 'lfs_pointers' into 'master' # 33bcff41c232a11727ac6d660bd4b0c2ba86d63d Add valid and invalid lfs pointers # 732401c65e924df81435deb12891ef570167d2e2 Update year in license file @@ -94,7 +98,12 @@ module SeedRepo master merge-test ].freeze - TAGS = %w[v1.0.0 v1.1.0 v1.2.0 v1.2.1].freeze + TAGS = %w[ + v1.0.0 + v1.1.0 + v1.2.0 + v1.2.1 + ].freeze end module RubyBlob diff --git a/spec/support/services/issuable_create_service_shared_examples.rb b/spec/support/services/issuable_create_service_shared_examples.rb deleted file mode 100644 index 4f0c745b7ee..00000000000 --- a/spec/support/services/issuable_create_service_shared_examples.rb +++ /dev/null @@ -1,52 +0,0 @@ -shared_examples 'issuable create service' do - context 'asssignee_id' do - let(:assignee) { create(:user) } - - before { project.team << [user, :master] } - - it 'removes assignee_id when user id is invalid' do - opts = { title: 'Title', description: 'Description', assignee_id: -1 } - - issuable = described_class.new(project, user, opts).execute - - expect(issuable.assignee_id).to be_nil - end - - it 'removes assignee_id when user id is 0' do - opts = { title: 'Title', description: 'Description', assignee_id: 0 } - - issuable = described_class.new(project, user, opts).execute - - expect(issuable.assignee_id).to be_nil - end - - it 'saves assignee when user id is valid' do - project.team << [assignee, :master] - opts = { title: 'Title', description: 'Description', assignee_id: assignee.id } - - issuable = described_class.new(project, user, opts).execute - - expect(issuable.assignee_id).to eq(assignee.id) - end - - context "when issuable feature is private" do - before do - project.project_feature.update(issues_access_level: ProjectFeature::PRIVATE, - merge_requests_access_level: ProjectFeature::PRIVATE) - end - - levels = [Gitlab::VisibilityLevel::INTERNAL, Gitlab::VisibilityLevel::PUBLIC] - - levels.each do |level| - it "removes not authorized assignee when project is #{Gitlab::VisibilityLevel.level_name(level)}" do - project.update(visibility_level: level) - opts = { title: 'Title', description: 'Description', assignee_id: assignee.id } - - issuable = described_class.new(project, user, opts).execute - - expect(issuable.assignee_id).to be_nil - end - end - end - end -end diff --git a/spec/support/services/issuable_create_service_slash_commands_shared_examples.rb b/spec/support/services/issuable_create_service_slash_commands_shared_examples.rb index ee492daee30..1dd3663b944 100644 --- a/spec/support/services/issuable_create_service_slash_commands_shared_examples.rb +++ b/spec/support/services/issuable_create_service_slash_commands_shared_examples.rb @@ -7,7 +7,7 @@ shared_examples 'new issuable record that supports slash commands' do let(:assignee) { create(:user) } let!(:milestone) { create(:milestone, project: project) } let!(:labels) { create_list(:label, 3, project: project) } - let(:base_params) { { title: FFaker::Lorem.sentence(3) } } + let(:base_params) { { title: 'My issuable title' } } let(:params) { base_params.merge(defined?(default_params) ? default_params : {}).merge(example_params) } let(:issuable) { described_class.new(project, user, params).execute } @@ -49,23 +49,7 @@ shared_examples 'new issuable record that supports slash commands' do it 'assigns and sets milestone to issuable' do expect(issuable).to be_persisted - expect(issuable.assignee).to eq(assignee) - expect(issuable.milestone).to eq(milestone) - end - end - - context 'with assignee and milestone in params and command' do - let(:example_params) do - { - assignee: create(:user), - milestone_id: 1, - description: %(/assign @#{assignee.username}\n/milestone %"#{milestone.name}") - } - end - - it 'assigns and sets milestone to issuable from command' do - expect(issuable).to be_persisted - expect(issuable.assignee).to eq(assignee) + expect(issuable.assignees).to eq([assignee]) expect(issuable.milestone).to eq(milestone) end end diff --git a/spec/support/services/issuable_update_service_shared_examples.rb b/spec/support/services/issuable_update_service_shared_examples.rb index 49cea1e608c..8947f20562f 100644 --- a/spec/support/services/issuable_update_service_shared_examples.rb +++ b/spec/support/services/issuable_update_service_shared_examples.rb @@ -18,52 +18,4 @@ shared_examples 'issuable update service' do end end end - - context 'asssignee_id' do - it 'does not update assignee when assignee_id is invalid' do - open_issuable.update(assignee_id: user.id) - - update_issuable(assignee_id: -1) - - expect(open_issuable.reload.assignee).to eq(user) - end - - it 'unassigns assignee when user id is 0' do - open_issuable.update(assignee_id: user.id) - - update_issuable(assignee_id: 0) - - expect(open_issuable.assignee_id).to be_nil - end - - it 'saves assignee when user id is valid' do - update_issuable(assignee_id: user.id) - - expect(open_issuable.assignee_id).to eq(user.id) - end - - it 'does not update assignee_id when user cannot read issue' do - non_member = create(:user) - original_assignee = open_issuable.assignee - - update_issuable(assignee_id: non_member.id) - - expect(open_issuable.assignee_id).to eq(original_assignee.id) - end - - context "when issuable feature is private" do - levels = [Gitlab::VisibilityLevel::INTERNAL, Gitlab::VisibilityLevel::PUBLIC] - - levels.each do |level| - it "does not update with unauthorized assignee when project is #{Gitlab::VisibilityLevel.level_name(level)}" do - assignee = create(:user) - project.update(visibility_level: level) - feature_visibility_attr = :"#{open_issuable.model_name.plural}_access_level" - project.project_feature.update_attribute(feature_visibility_attr, ProjectFeature::PRIVATE) - - expect{ update_issuable(assignee_id: assignee) }.not_to change{ open_issuable.assignee } - end - end - end - end end diff --git a/spec/support/services/migrate_to_ghost_user_service_shared_examples.rb b/spec/support/services/migrate_to_ghost_user_service_shared_examples.rb new file mode 100644 index 00000000000..dcc562c684b --- /dev/null +++ b/spec/support/services/migrate_to_ghost_user_service_shared_examples.rb @@ -0,0 +1,91 @@ +require "spec_helper" + +shared_examples "migrating a deleted user's associated records to the ghost user" do |record_class| + record_class_name = record_class.to_s.titleize.downcase + + let(:project) { create(:project) } + + before do + project.add_developer(user) + end + + context "for a #{record_class_name} the user has created" do + let!(:record) { created_record } + + it "does not delete the #{record_class_name}" do + service.execute + + expect(record_class.find_by_id(record.id)).to be_present + end + + it "migrates the #{record_class_name} so that the 'Ghost User' is the #{record_class_name} owner" do + service.execute + + migrated_record = record_class.find_by_id(record.id) + + if migrated_record.respond_to?(:author) + expect(migrated_record.author).to eq(User.ghost) + else + expect(migrated_record.send(author_alias)).to eq(User.ghost) + end + end + + it "blocks the user before migrating #{record_class_name}s to the 'Ghost User'" do + service.execute + + expect(user).to be_blocked + end + + context "race conditions" do + context "when #{record_class_name} migration fails and is rolled back" do + before do + expect_any_instance_of(record_class::ActiveRecord_Associations_CollectionProxy) + .to receive(:update_all).and_raise(ActiveRecord::Rollback) + end + + it 'rolls back the user block' do + service.execute + + expect(user.reload).not_to be_blocked + end + + it "doesn't unblock an previously-blocked user" do + user.block + + service.execute + + expect(user.reload).to be_blocked + end + end + + context "when #{record_class_name} migration fails with a non-rollback exception" do + before do + expect_any_instance_of(record_class::ActiveRecord_Associations_CollectionProxy) + .to receive(:update_all).and_raise(ArgumentError) + end + + it 'rolls back the user block' do + service.execute rescue nil + + expect(user.reload).not_to be_blocked + end + + it "doesn't unblock an previously-blocked user" do + user.block + + service.execute rescue nil + + expect(user.reload).to be_blocked + end + end + + it "blocks the user before #{record_class_name} migration begins" do + expect(service).to receive("migrate_#{record_class_name.parameterize('_')}s".to_sym) do + expect(user.reload).to be_blocked + end + + service.execute + end + end + end +end diff --git a/spec/support/slack_mattermost_notifications_shared_examples.rb b/spec/support/slack_mattermost_notifications_shared_examples.rb index b902fe90707..7e35ebb6c97 100644 --- a/spec/support/slack_mattermost_notifications_shared_examples.rb +++ b/spec/support/slack_mattermost_notifications_shared_examples.rb @@ -328,7 +328,7 @@ RSpec.shared_examples 'slack or mattermost notifications' do context 'only notify for the default branch' do context 'when enabled' do let(:pipeline) do - create(:ci_pipeline, project: project, status: 'failed', ref: 'not-the-default-branch') + create(:ci_pipeline, :failed, project: project, ref: 'not-the-default-branch') end before do @@ -342,6 +342,18 @@ RSpec.shared_examples 'slack or mattermost notifications' do expect(result).to be_falsy end end + + context 'when disabled' do + let(:pipeline) do + create(:ci_pipeline, :failed, project: project, ref: 'not-the-default-branch') + end + + before do + chat_service.notify_only_default_branch = false + end + + it_behaves_like 'call Slack/Mattermost API' + end end end end diff --git a/spec/support/slash_commands_helpers.rb b/spec/support/slash_commands_helpers.rb index 0d91fe5fd5d..4bfe481115f 100644 --- a/spec/support/slash_commands_helpers.rb +++ b/spec/support/slash_commands_helpers.rb @@ -3,7 +3,7 @@ module SlashCommandsHelpers Sidekiq::Testing.fake! do page.within('.js-main-target-form') do fill_in 'note[note]', with: text - find('.comment-btn').trigger('click') + find('.js-comment-submit-button').trigger('click') end end end diff --git a/spec/support/stub_gitlab_calls.rb b/spec/support/stub_gitlab_calls.rb index a01ef576234..ded2d593059 100644 --- a/spec/support/stub_gitlab_calls.rb +++ b/spec/support/stub_gitlab_calls.rb @@ -27,23 +27,40 @@ module StubGitlabCalls def stub_container_registry_config(registry_settings) allow(Gitlab.config.registry).to receive_messages(registry_settings) - allow(Auth::ContainerRegistryAuthenticationService).to receive(:full_access_token).and_return('token') + allow(Auth::ContainerRegistryAuthenticationService) + .to receive(:full_access_token).and_return('token') end - def stub_container_registry_tags(*tags) - allow_any_instance_of(ContainerRegistry::Client).to receive(:repository_tags).and_return( - { "tags" => tags } - ) - allow_any_instance_of(ContainerRegistry::Client).to receive(:repository_manifest).and_return( - JSON.parse(File.read(Rails.root + 'spec/fixtures/container_registry/tag_manifest.json')) - ) - allow_any_instance_of(ContainerRegistry::Client).to receive(:blob).and_return( - File.read(Rails.root + 'spec/fixtures/container_registry/config_blob.json') - ) + def stub_container_registry_tags(repository: :any, tags:) + repository = any_args if repository == :any + + allow_any_instance_of(ContainerRegistry::Client) + .to receive(:repository_tags).with(repository) + .and_return({ 'tags' => tags }) + + allow_any_instance_of(ContainerRegistry::Client) + .to receive(:repository_manifest).with(repository) + .and_return(stub_container_registry_tag_manifest) + + allow_any_instance_of(ContainerRegistry::Client) + .to receive(:blob).with(repository) + .and_return(stub_container_registry_blob) end private + def stub_container_registry_tag_manifest + fixture_path = 'spec/fixtures/container_registry/tag_manifest.json' + + JSON.parse(File.read(Rails.root + fixture_path)) + end + + def stub_container_registry_blob + fixture_path = 'spec/fixtures/container_registry/config_blob.json' + + File.read(Rails.root + fixture_path) + end + def gitlab_url Gitlab.config.gitlab.url end diff --git a/spec/support/test_env.rb b/spec/support/test_env.rb index 78be23bd853..b168098edea 100644 --- a/spec/support/test_env.rb +++ b/spec/support/test_env.rb @@ -27,6 +27,7 @@ module TestEnv 'expand-collapse-files' => '025db92', 'expand-collapse-lines' => '238e82d', 'video' => '8879059', + 'add-balsamiq-file' => 'b89b56d', 'crlf-diff' => '5938907', 'conflict-start' => '824be60', 'conflict-resolvable' => '1450cd6', @@ -38,7 +39,9 @@ module TestEnv 'deleted-image-test' => '6c17798', 'wip' => 'b9238ee', 'csv' => '3dd0896', - 'v1.1.0' => 'b83d6e3' + 'v1.1.0' => 'b83d6e3', + 'add-ipython-files' => '6d85bb6', + 'add-pdf-file' => 'e774ebd' }.freeze # gitlab-test-fork is a fork of gitlab-fork, but we don't necessarily @@ -64,6 +67,8 @@ module TestEnv # Setup GitLab shell for test instance setup_gitlab_shell + setup_gitaly if Gitlab::GitalyClient.enabled? + # Create repository for FactoryGirl.create(:project) setup_factory_repo @@ -71,6 +76,10 @@ module TestEnv setup_forked_repo end + def cleanup + stop_gitaly + end + def disable_mailer allow_any_instance_of(NotificationService).to receive(:mailer). and_return(double.as_null_object) @@ -92,7 +101,7 @@ module TestEnv tmp_test_path = Rails.root.join('tmp', 'tests', '**') Dir[tmp_test_path].each do |entry| - unless File.basename(entry) =~ /\Agitlab-(shell|test|test_bare|test-fork|test-fork_bare)\z/ + unless File.basename(entry) =~ /\A(gitaly|gitlab-(shell|test|test_bare|test-fork|test-fork_bare))\z/ FileUtils.rm_rf(entry) end end @@ -110,6 +119,30 @@ module TestEnv end end + def setup_gitaly + socket_path = Gitlab::GitalyClient.address('default').sub(/\Aunix:/, '') + gitaly_dir = File.dirname(socket_path) + + unless File.directory?(gitaly_dir) || system('rake', "gitlab:gitaly:install[#{gitaly_dir}]") + raise "Can't clone gitaly" + end + + start_gitaly(gitaly_dir) + end + + def start_gitaly(gitaly_dir) + gitaly_exec = File.join(gitaly_dir, 'gitaly') + gitaly_config = File.join(gitaly_dir, 'config.toml') + log_file = Rails.root.join('log/gitaly-test.log').to_s + @gitaly_pid = spawn(gitaly_exec, gitaly_config, [:out, :err] => log_file) + end + + def stop_gitaly + return unless @gitaly_pid + + Process.kill('KILL', @gitaly_pid) + end + def setup_factory_repo setup_repo(factory_repo_path, factory_repo_path_bare, factory_repo_name, BRANCH_SHA) @@ -122,26 +155,27 @@ module TestEnv FORKED_BRANCH_SHA) end - def setup_repo(repo_path, repo_path_bare, repo_name, branch_sha) + def setup_repo(repo_path, repo_path_bare, repo_name, refs) clone_url = "https://gitlab.com/gitlab-org/#{repo_name}.git" unless File.directory?(repo_path) system(*%W(#{Gitlab.config.git.bin_path} clone -q #{clone_url} #{repo_path})) end - set_repo_refs(repo_path, branch_sha) + set_repo_refs(repo_path, refs) - # We must copy bare repositories because we will push to them. - system(git_env, *%W(#{Gitlab.config.git.bin_path} clone -q --bare #{repo_path} #{repo_path_bare})) + unless File.directory?(repo_path_bare) + # We must copy bare repositories because we will push to them. + system(git_env, *%W(#{Gitlab.config.git.bin_path} clone -q --bare #{repo_path} #{repo_path_bare})) + end end - def copy_repo(project) - base_repo_path = File.expand_path(factory_repo_path_bare) + def copy_repo(project, bare_repo:, refs:) target_repo_path = File.expand_path(project.repository_storage_path + "/#{project.full_path}.git") FileUtils.mkdir_p(target_repo_path) - FileUtils.cp_r("#{base_repo_path}/.", target_repo_path) + FileUtils.cp_r("#{File.expand_path(bare_repo)}/.", target_repo_path) FileUtils.chmod_R 0755, target_repo_path - set_repo_refs(target_repo_path, BRANCH_SHA) + set_repo_refs(target_repo_path, refs) end def repos_path @@ -156,29 +190,23 @@ module TestEnv Gitlab.config.pages.path end - def copy_forked_repo_with_submodules(project) - base_repo_path = File.expand_path(forked_repo_path_bare) - target_repo_path = File.expand_path(project.repository_storage_path + "/#{project.full_path}.git") - FileUtils.mkdir_p(target_repo_path) - FileUtils.cp_r("#{base_repo_path}/.", target_repo_path) - FileUtils.chmod_R 0755, target_repo_path - set_repo_refs(target_repo_path, FORKED_BRANCH_SHA) - end - # When no cached assets exist, manually hit the root path to create them # # Otherwise they'd be created by the first test, often timing out and # causing a transient test failure - def warm_asset_cache - return if warm_asset_cache? + def eager_load_driver_server return unless defined?(Capybara) - Capybara.current_session.driver.visit '/' + puts "Starting the Capybara driver server..." + Capybara.current_session.visit '/' end - def warm_asset_cache? - cache = Rails.root.join(*%w(tmp cache assets test)) - Dir.exist?(cache) && Dir.entries(cache).length > 2 + def factory_repo_path_bare + "#{factory_repo_path}_bare" + end + + def forked_repo_path_bare + "#{forked_repo_path}_bare" end private @@ -187,10 +215,6 @@ module TestEnv @factory_repo_path ||= Rails.root.join('tmp', 'tests', factory_repo_name) end - def factory_repo_path_bare - "#{factory_repo_path}_bare" - end - def factory_repo_name 'gitlab-test' end @@ -199,10 +223,6 @@ module TestEnv @forked_repo_path ||= Rails.root.join('tmp', 'tests', forked_repo_name) end - def forked_repo_path_bare - "#{forked_repo_path}_bare" - end - def forked_repo_name 'gitlab-test-fork' end @@ -214,19 +234,22 @@ module TestEnv end def set_repo_refs(repo_path, branch_sha) - instructions = branch_sha.map {|branch, sha| "update refs/heads/#{branch}\x00#{sha}\x00" }.join("\x00") << "\x00" + instructions = branch_sha.map { |branch, sha| "update refs/heads/#{branch}\x00#{sha}\x00" }.join("\x00") << "\x00" update_refs = %W(#{Gitlab.config.git.bin_path} update-ref --stdin -z) reset = proc do - IO.popen(update_refs, "w") {|io| io.write(instructions) } - $?.success? + Dir.chdir(repo_path) do + IO.popen(update_refs, "w") { |io| io.write(instructions) } + $?.success? + end end - Dir.chdir(repo_path) do - # Try to reset without fetching to avoid using the network. - unless reset.call - raise 'Could not fetch test seed repository.' unless system(*%W(#{Gitlab.config.git.bin_path} fetch origin)) - raise 'The fetched test seed does not contain the required revision.' unless reset.call - end + # Try to reset without fetching to avoid using the network. + unless reset.call + raise 'Could not fetch test seed repository.' unless system(*%W(#{Gitlab.config.git.bin_path} -C #{repo_path} fetch origin)) + + # Before we used Git clone's --mirror option, bare repos could end up + # with missing refs, clearing them and retrying should fix the issue. + cleanup && init unless reset.call end end end diff --git a/spec/support/time_tracking_shared_examples.rb b/spec/support/time_tracking_shared_examples.rb index 52f4fabdc47..84ef46ffa27 100644 --- a/spec/support/time_tracking_shared_examples.rb +++ b/spec/support/time_tracking_shared_examples.rb @@ -8,6 +8,7 @@ shared_examples 'issuable time tracker' do it 'updates the sidebar component when estimate is added' do submit_time('/estimate 3w 1d 1h') + wait_for_ajax page.within '.time-tracking-estimate-only-pane' do expect(page).to have_content '3w 1d 1h' end @@ -16,6 +17,7 @@ shared_examples 'issuable time tracker' do it 'updates the sidebar component when spent is added' do submit_time('/spend 3w 1d 1h') + wait_for_ajax page.within '.time-tracking-spend-only-pane' do expect(page).to have_content '3w 1d 1h' end @@ -25,6 +27,7 @@ shared_examples 'issuable time tracker' do submit_time('/estimate 3w 1d 1h') submit_time('/spend 3w 1d 1h') + wait_for_ajax page.within '.time-tracking-comparison-pane' do expect(page).to have_content '3w 1d 1h' end @@ -34,7 +37,7 @@ shared_examples 'issuable time tracker' do submit_time('/estimate 3w 1d 1h') submit_time('/remove_estimate') - page.within '#issuable-time-tracker' do + page.within '.time-tracking-component-wrap' do expect(page).to have_content 'No estimate or time spent' end end @@ -43,13 +46,13 @@ shared_examples 'issuable time tracker' do submit_time('/spend 3w 1d 1h') submit_time('/remove_time_spent') - page.within '#issuable-time-tracker' do + page.within '.time-tracking-component-wrap' do expect(page).to have_content 'No estimate or time spent' end end it 'shows the help state when icon is clicked' do - page.within '#issuable-time-tracker' do + page.within '.time-tracking-component-wrap' do find('.help-button').click expect(page).to have_content 'Track time with slash commands' expect(page).to have_content 'Learn more' @@ -57,7 +60,7 @@ shared_examples 'issuable time tracker' do end it 'hides the help state when close icon is clicked' do - page.within '#issuable-time-tracker' do + page.within '.time-tracking-component-wrap' do find('.help-button').click find('.close-help-button').click @@ -67,7 +70,7 @@ shared_examples 'issuable time tracker' do end it 'displays the correct help url' do - page.within '#issuable-time-tracker' do + page.within '.time-tracking-component-wrap' do find('.help-button').click expect(find_link('Learn more')[:href]).to have_content('/help/workflow/time_tracking.md') @@ -77,6 +80,6 @@ end def submit_time(slash_command) fill_in 'note[note]', with: slash_command - find('.comment-btn').trigger('click') + find('.js-comment-submit-button').trigger('click') wait_for_ajax end diff --git a/spec/support/user_activities_helpers.rb b/spec/support/user_activities_helpers.rb new file mode 100644 index 00000000000..f7ca9a31edd --- /dev/null +++ b/spec/support/user_activities_helpers.rb @@ -0,0 +1,7 @@ +module UserActivitiesHelpers + def user_activity(user) + Gitlab::UserActivities.new. + find { |k, _| k == user.id.to_s }&. + second + end +end diff --git a/spec/support/wait_for_ajax.rb b/spec/support/wait_for_ajax.rb index 0f9dc2dee75..508de2ee8e1 100644 --- a/spec/support/wait_for_ajax.rb +++ b/spec/support/wait_for_ajax.rb @@ -6,10 +6,13 @@ module WaitForAjax end def finished_all_ajax_requests? + return true unless javascript_test? + return true if page.evaluate_script('typeof jQuery === "undefined"') + page.evaluate_script('jQuery.active').zero? end def javascript_test? - [:selenium, :webkit, :chrome, :poltergeist].include?(Capybara.current_driver) + Capybara.current_driver == Capybara.javascript_driver end end diff --git a/spec/support/wait_for_requests.rb b/spec/support/wait_for_requests.rb index 0bfa7f72ff8..d41e83ae128 100644 --- a/spec/support/wait_for_requests.rb +++ b/spec/support/wait_for_requests.rb @@ -1,5 +1,10 @@ +require_relative './wait_for_ajax' +require_relative './wait_for_vue_resource' + module WaitForRequests extend self + include WaitForAjax + include WaitForVueResource # This is inspired by http://www.salsify.com/blog/engineering/tearing-capybara-ajax-tests def wait_for_requests_complete diff --git a/spec/support/wait_for_vue_resource.rb b/spec/support/wait_for_vue_resource.rb index 4a4e2e16ee7..3bb3d9c2e51 100644 --- a/spec/support/wait_for_vue_resource.rb +++ b/spec/support/wait_for_vue_resource.rb @@ -1,7 +1,19 @@ module WaitForVueResource def wait_for_vue_resource(spinner: true) Timeout.timeout(Capybara.default_max_wait_time) do - loop until page.evaluate_script('window.activeVueResources').zero? + loop until finished_all_vue_resource_requests? end end + + private + + def finished_all_vue_resource_requests? + return true unless javascript_test? + + page.evaluate_script('window.activeVueResources || 0').zero? + end + + def javascript_test? + Capybara.current_driver == Capybara.javascript_driver + end end diff --git a/spec/support/workhorse_helpers.rb b/spec/support/workhorse_helpers.rb index 47673cd4c3a..ef1f9f68671 100644 --- a/spec/support/workhorse_helpers.rb +++ b/spec/support/workhorse_helpers.rb @@ -9,7 +9,7 @@ module WorkhorseHelpers header = split_header.join(':') [ type, - JSON.parse(Base64.urlsafe_decode64(header)), + JSON.parse(Base64.urlsafe_decode64(header)) ] end end diff --git a/spec/tasks/config_lint_spec.rb b/spec/tasks/config_lint_spec.rb index c32f9a740b7..ed6c5b09663 100644 --- a/spec/tasks/config_lint_spec.rb +++ b/spec/tasks/config_lint_spec.rb @@ -5,11 +5,11 @@ describe ConfigLint do let(:files){ ['lib/support/fake.sh'] } it 'errors out if any bash scripts have errors' do - expect { ConfigLint.run(files){ system('exit 1') } }.to raise_error(SystemExit) + expect { described_class.run(files){ system('exit 1') } }.to raise_error(SystemExit) end it 'passes if all scripts are fine' do - expect { ConfigLint.run(files){ system('exit 0') } }.not_to raise_error + expect { described_class.run(files){ system('exit 0') } }.not_to raise_error end end diff --git a/spec/tasks/gitlab/backup_rake_spec.rb b/spec/tasks/gitlab/backup_rake_spec.rb index daea0c6bb37..0ff1a988a9e 100644 --- a/spec/tasks/gitlab/backup_rake_spec.rb +++ b/spec/tasks/gitlab/backup_rake_spec.rb @@ -230,9 +230,10 @@ describe 'gitlab:app namespace rake task' do before do FileUtils.mkdir('tmp/tests/default_storage') FileUtils.mkdir('tmp/tests/custom_storage') + gitaly_address = Gitlab.config.repositories.storages.default.gitaly_address storages = { - 'default' => { 'path' => Settings.absolute('tmp/tests/default_storage') }, - 'custom' => { 'path' => Settings.absolute('tmp/tests/custom_storage') } + 'default' => { 'path' => Settings.absolute('tmp/tests/default_storage'), 'gitaly_address' => gitaly_address }, + 'custom' => { 'path' => Settings.absolute('tmp/tests/custom_storage'), 'gitaly_address' => gitaly_address } } allow(Gitlab.config.repositories).to receive(:storages).and_return(storages) @@ -350,7 +351,7 @@ describe 'gitlab:app namespace rake task' do end it 'name has human readable time' do - expect(@backup_tar).to match(/\d+_\d{4}_\d{2}_\d{2}_gitlab_backup.tar$/) + expect(@backup_tar).to match(/\d+_\d{4}_\d{2}_\d{2}_\d+\.\d+\.\d+.*_gitlab_backup.tar$/) end end end # gitlab:app namespace diff --git a/spec/tasks/gitlab/gitaly_rake_spec.rb b/spec/tasks/gitlab/gitaly_rake_spec.rb index d95baddf546..4a636decafd 100644 --- a/spec/tasks/gitlab/gitaly_rake_spec.rb +++ b/spec/tasks/gitlab/gitaly_rake_spec.rb @@ -8,7 +8,7 @@ describe 'gitlab:gitaly namespace rake task' do describe 'install' do let(:repo) { 'https://gitlab.com/gitlab-org/gitaly.git' } let(:clone_path) { Rails.root.join('tmp/tests/gitaly').to_s } - let(:tag) { "v#{File.read(Rails.root.join(Gitlab::GitalyClient::SERVER_VERSION_FILE)).chomp}" } + let(:version) { File.read(Rails.root.join(Gitlab::GitalyClient::SERVER_VERSION_FILE)).chomp } context 'no dir given' do it 'aborts and display a help message' do @@ -21,7 +21,7 @@ describe 'gitlab:gitaly namespace rake task' do context 'when an underlying Git command fail' do it 'aborts and display a help message' do expect_any_instance_of(Object). - to receive(:checkout_or_clone_tag).and_raise 'Git error' + to receive(:checkout_or_clone_version).and_raise 'Git error' expect { run_rake_task('gitlab:gitaly:install', clone_path) }.to raise_error 'Git error' end @@ -32,9 +32,9 @@ describe 'gitlab:gitaly namespace rake task' do expect(Dir).to receive(:chdir).with(clone_path) end - it 'calls checkout_or_clone_tag with the right arguments' do + it 'calls checkout_or_clone_version with the right arguments' do expect_any_instance_of(Object). - to receive(:checkout_or_clone_tag).with(tag: tag, repo: repo, target_dir: clone_path) + to receive(:checkout_or_clone_version).with(version: version, repo: repo, target_dir: clone_path) run_rake_task('gitlab:gitaly:install', clone_path) end @@ -48,7 +48,7 @@ describe 'gitlab:gitaly namespace rake task' do context 'gmake is available' do before do - expect_any_instance_of(Object).to receive(:checkout_or_clone_tag) + expect_any_instance_of(Object).to receive(:checkout_or_clone_version) allow_any_instance_of(Object).to receive(:run_command!).with(['gmake']).and_return(true) end @@ -62,7 +62,7 @@ describe 'gitlab:gitaly namespace rake task' do context 'gmake is not available' do before do - expect_any_instance_of(Object).to receive(:checkout_or_clone_tag) + expect_any_instance_of(Object).to receive(:checkout_or_clone_version) allow_any_instance_of(Object).to receive(:run_command!).with(['make']).and_return(true) end @@ -75,4 +75,36 @@ describe 'gitlab:gitaly namespace rake task' do end end end + + describe 'storage_config' do + it 'prints storage configuration in a TOML format' do + config = { + 'default' => { 'path' => '/path/to/default' }, + 'nfs_01' => { 'path' => '/path/to/nfs_01' } + } + allow(Gitlab.config.repositories).to receive(:storages).and_return(config) + + expected_output = '' + Timecop.freeze do + expected_output = <<~TOML + # Gitaly storage configuration generated from #{Gitlab.config.source} on #{Time.current.to_s(:long)} + # This is in TOML format suitable for use in Gitaly's config.toml file. + [[storage]] + name = "default" + path = "/path/to/default" + [[storage]] + name = "nfs_01" + path = "/path/to/nfs_01" + TOML + end + + expect { run_rake_task('gitlab:gitaly:storage_config')}. + to output(expected_output).to_stdout + + parsed_output = TOML.parse(expected_output) + config.each do |name, params| + expect(parsed_output['storage']).to include({ 'name' => name, 'path' => params['path'] }) + end + end + end end diff --git a/spec/tasks/gitlab/shell_rake_spec.rb b/spec/tasks/gitlab/shell_rake_spec.rb index 226d34fe2c9..ee3614c50f6 100644 --- a/spec/tasks/gitlab/shell_rake_spec.rb +++ b/spec/tasks/gitlab/shell_rake_spec.rb @@ -11,6 +11,10 @@ describe 'gitlab:shell rake tasks' do it 'invokes create_hooks task' do expect(Rake::Task['gitlab:shell:create_hooks']).to receive(:invoke) + storages = Gitlab.config.repositories.storages.values.map { |rs| rs['path'] } + expect(Kernel).to receive(:system).with('bin/install', *storages).and_call_original + expect(Kernel).to receive(:system).with('bin/compile').and_call_original + run_rake_task('gitlab:shell:install') end end diff --git a/spec/tasks/gitlab/task_helpers_spec.rb b/spec/tasks/gitlab/task_helpers_spec.rb index 86e42d845ce..3d9ba7cdc6f 100644 --- a/spec/tasks/gitlab/task_helpers_spec.rb +++ b/spec/tasks/gitlab/task_helpers_spec.rb @@ -10,19 +10,38 @@ describe Gitlab::TaskHelpers do let(:repo) { 'https://gitlab.com/gitlab-org/gitlab-test.git' } let(:clone_path) { Rails.root.join('tmp/tests/task_helpers_tests').to_s } + let(:version) { '1.1.0' } let(:tag) { 'v1.1.0' } - describe '#checkout_or_clone_tag' do + describe '#checkout_or_clone_version' do before do allow(subject).to receive(:run_command!) - expect(subject).to receive(:reset_to_tag).with(tag, clone_path) end - context 'target_dir does not exist' do - it 'clones the repo, retrieve the tag from origin, and checkout the tag' do + it 'checkout the version and reset to it' do + expect(subject).to receive(:checkout_version).with(tag, clone_path) + expect(subject).to receive(:reset_to_version).with(tag, clone_path) + + subject.checkout_or_clone_version(version: version, repo: repo, target_dir: clone_path) + end + + context 'with a branch version' do + let(:version) { '=branch_name' } + let(:branch) { 'branch_name' } + + it 'checkout the version and reset to it with a branch name' do + expect(subject).to receive(:checkout_version).with(branch, clone_path) + expect(subject).to receive(:reset_to_version).with(branch, clone_path) + + subject.checkout_or_clone_version(version: version, repo: repo, target_dir: clone_path) + end + end + + context "target_dir doesn't exist" do + it 'clones the repo' do expect(subject).to receive(:clone_repo).with(repo, clone_path) - subject.checkout_or_clone_tag(tag: tag, repo: repo, target_dir: clone_path) + subject.checkout_or_clone_version(version: version, repo: repo, target_dir: clone_path) end end @@ -31,10 +50,10 @@ describe Gitlab::TaskHelpers do expect(Dir).to receive(:exist?).and_return(true) end - it 'fetch and checkout the tag' do - expect(subject).to receive(:checkout_tag).with(tag, clone_path) + it "doesn't clone the repository" do + expect(subject).not_to receive(:clone_repo) - subject.checkout_or_clone_tag(tag: tag, repo: repo, target_dir: clone_path) + subject.checkout_or_clone_version(version: version, repo: repo, target_dir: clone_path) end end end @@ -48,49 +67,23 @@ describe Gitlab::TaskHelpers do end end - describe '#checkout_tag' do + describe '#checkout_version' do it 'clones the repo in the target dir' do expect(subject). - to receive(:run_command!).with(%W[#{Gitlab.config.git.bin_path} -C #{clone_path} fetch --tags --quiet]) + to receive(:run_command!).with(%W[#{Gitlab.config.git.bin_path} -C #{clone_path} fetch --quiet]) expect(subject). to receive(:run_command!).with(%W[#{Gitlab.config.git.bin_path} -C #{clone_path} checkout --quiet #{tag}]) - subject.checkout_tag(tag, clone_path) + subject.checkout_version(tag, clone_path) end end - describe '#reset_to_tag' do - let(:tag) { 'v1.1.0' } - before do + describe '#reset_to_version' do + it 'resets --hard to the given version' do expect(subject). to receive(:run_command!).with(%W[#{Gitlab.config.git.bin_path} -C #{clone_path} reset --hard #{tag}]) - end - context 'when the tag is not checked out locally' do - before do - expect(subject). - to receive(:run_command!).with(%W[#{Gitlab.config.git.bin_path} -C #{clone_path} describe -- #{tag}]).and_raise(Gitlab::TaskFailedError) - end - - it 'fetch origin, ensure the tag exists, and resets --hard to the given tag' do - expect(subject). - to receive(:run_command!).with(%W[#{Gitlab.config.git.bin_path} -C #{clone_path} fetch origin]) - expect(subject). - to receive(:run_command!).with(%W[#{Gitlab.config.git.bin_path} -C #{clone_path} describe -- origin/#{tag}]).and_return(tag) - - subject.reset_to_tag(tag, clone_path) - end - end - - context 'when the tag is checked out locally' do - before do - expect(subject). - to receive(:run_command!).with(%W[#{Gitlab.config.git.bin_path} -C #{clone_path} describe -- #{tag}]).and_return(tag) - end - - it 'resets --hard to the given tag' do - subject.reset_to_tag(tag, clone_path) - end + subject.reset_to_version(tag, clone_path) end end end diff --git a/spec/tasks/gitlab/workhorse_rake_spec.rb b/spec/tasks/gitlab/workhorse_rake_spec.rb index 8a66a4aa047..63d1cf2bbe5 100644 --- a/spec/tasks/gitlab/workhorse_rake_spec.rb +++ b/spec/tasks/gitlab/workhorse_rake_spec.rb @@ -8,7 +8,7 @@ describe 'gitlab:workhorse namespace rake task' do describe 'install' do let(:repo) { 'https://gitlab.com/gitlab-org/gitlab-workhorse.git' } let(:clone_path) { Rails.root.join('tmp/tests/gitlab-workhorse').to_s } - let(:tag) { "v#{File.read(Rails.root.join(Gitlab::Workhorse::VERSION_FILE)).chomp}" } + let(:version) { File.read(Rails.root.join(Gitlab::Workhorse::VERSION_FILE)).chomp } context 'no dir given' do it 'aborts and display a help message' do @@ -21,7 +21,7 @@ describe 'gitlab:workhorse namespace rake task' do context 'when an underlying Git command fail' do it 'aborts and display a help message' do expect_any_instance_of(Object). - to receive(:checkout_or_clone_tag).and_raise 'Git error' + to receive(:checkout_or_clone_version).and_raise 'Git error' expect { run_rake_task('gitlab:workhorse:install', clone_path) }.to raise_error 'Git error' end @@ -32,9 +32,9 @@ describe 'gitlab:workhorse namespace rake task' do expect(Dir).to receive(:chdir).with(clone_path) end - it 'calls checkout_or_clone_tag with the right arguments' do + it 'calls checkout_or_clone_version with the right arguments' do expect_any_instance_of(Object). - to receive(:checkout_or_clone_tag).with(tag: tag, repo: repo, target_dir: clone_path) + to receive(:checkout_or_clone_version).with(version: version, repo: repo, target_dir: clone_path) run_rake_task('gitlab:workhorse:install', clone_path) end @@ -48,7 +48,7 @@ describe 'gitlab:workhorse namespace rake task' do context 'gmake is available' do before do - expect_any_instance_of(Object).to receive(:checkout_or_clone_tag) + expect_any_instance_of(Object).to receive(:checkout_or_clone_version) allow_any_instance_of(Object).to receive(:run_command!).with(['gmake']).and_return(true) end @@ -62,7 +62,7 @@ describe 'gitlab:workhorse namespace rake task' do context 'gmake is not available' do before do - expect_any_instance_of(Object).to receive(:checkout_or_clone_tag) + expect_any_instance_of(Object).to receive(:checkout_or_clone_version) allow_any_instance_of(Object).to receive(:run_command!).with(['make']).and_return(true) end diff --git a/spec/unicorn/unicorn_spec.rb b/spec/unicorn/unicorn_spec.rb new file mode 100644 index 00000000000..8518c047a47 --- /dev/null +++ b/spec/unicorn/unicorn_spec.rb @@ -0,0 +1,98 @@ +require 'fileutils' + +require 'excon' + +require 'spec_helper' + +describe 'Unicorn' do + before(:all) do + config_lines = File.read('config/unicorn.rb.example').split("\n") + + # Remove these because they make setup harder. + config_lines = config_lines.reject do |line| + %w[ + working_directory + worker_processes + listen + pid + stderr_path + stdout_path + ].any? { |prefix| line.start_with?(prefix) } + end + + config_lines << "working_directory '#{Rails.root}'" + + # We want to have exactly 1 worker process because that makes it + # predictable which process will handle our requests. + config_lines << 'worker_processes 1' + + @socket_path = File.join(Dir.pwd, 'tmp/tests/unicorn.socket') + config_lines << "listen '#{@socket_path}'" + + ready_file = 'tmp/tests/unicorn-worker-ready' + FileUtils.rm_f(ready_file) + after_fork_index = config_lines.index { |l| l.start_with?('after_fork') } + config_lines.insert(after_fork_index + 1, "File.write('#{ready_file}', Process.pid)") + + config_path = 'tmp/tests/unicorn.rb' + File.write(config_path, config_lines.join("\n") + "\n") + + cmd = %W[unicorn -E test -c #{config_path} #{Rails.root.join('config.ru')}] + @unicorn_master_pid = spawn(*cmd) + wait_unicorn_boot!(@unicorn_master_pid, ready_file) + WebMock.allow_net_connect! + end + + %w[SIGQUIT SIGTERM SIGKILL].each do |signal| + it "has a worker that self-terminates on signal #{signal}" do + response = Excon.get('unix:///unicorn_test/pid', socket: @socket_path) + expect(response.status).to eq(200) + + worker_pid = response.body.to_i + expect(worker_pid).to be > 0 + + begin + Excon.post('unix:///unicorn_test/kill', socket: @socket_path, body: "signal=#{signal}") + rescue Excon::Error::Socket + # The connection may be closed abruptly + end + + expect(pid_gone?(worker_pid)).to eq(true) + end + end + + after(:all) do + WebMock.disable_net_connect!(allow_localhost: true) + Process.kill('TERM', @unicorn_master_pid) + end + + def wait_unicorn_boot!(master_pid, ready_file) + # Unicorn should boot in under 60 seconds so 120 seconds seems like a good timeout. + timeout = 120 + timeout.times do + return if File.exist?(ready_file) + pid = Process.waitpid(master_pid, Process::WNOHANG) + raise "unicorn failed to boot: #{$?}" unless pid.nil? + + sleep 1 + end + + raise "unicorn boot timed out after #{timeout} seconds" + end + + def pid_gone?(pid) + # Worker termination should take less than a second. That makes 10 + # seconds a generous timeout. + 10.times do + begin + Process.kill(0, pid) + rescue Errno::ESRCH + return true + end + + sleep 1 + end + + false + end +end diff --git a/spec/uploaders/personal_file_uploader_spec.rb b/spec/uploaders/personal_file_uploader_spec.rb new file mode 100644 index 00000000000..fb92f2ae3ab --- /dev/null +++ b/spec/uploaders/personal_file_uploader_spec.rb @@ -0,0 +1,31 @@ +require 'spec_helper' + +describe PersonalFileUploader do + let(:uploader) { described_class.new(build_stubbed(:empty_project)) } + let(:snippet) { create(:personal_snippet) } + + describe '.absolute_path' do + it 'returns the correct absolute path by building it dynamically' do + upload = double(model: snippet, path: 'secret/foo.jpg') + + dynamic_segment = "personal_snippet/#{snippet.id}" + + expect(described_class.absolute_path(upload)).to end_with("#{dynamic_segment}/secret/foo.jpg") + end + end + + describe '#to_h' do + it 'returns the hass' do + uploader = described_class.new(snippet, 'secret') + + allow(uploader).to receive(:file).and_return(double(extension: 'txt', filename: 'file_name')) + expected_url = "/uploads/personal_snippet/#{snippet.id}/secret/file_name" + + expect(uploader.to_h).to eq( + alt: 'file_name', + url: expected_url, + markdown: "[file_name](#{expected_url})" + ) + end + end +end diff --git a/spec/validators/dynamic_path_validator_spec.rb b/spec/validators/dynamic_path_validator_spec.rb new file mode 100644 index 00000000000..b114bfc1bca --- /dev/null +++ b/spec/validators/dynamic_path_validator_spec.rb @@ -0,0 +1,266 @@ +require 'spec_helper' + +describe DynamicPathValidator do + let(:validator) { described_class.new(attributes: [:path]) } + + # Pass in a full path to remove the format segment: + # `/ci/lint(.:format)` -> `/ci/lint` + def without_format(path) + path.split('(', 2)[0] + end + + # Pass in a full path and get the last segment before a wildcard + # That's not a parameter + # `/*namespace_id/:project_id/builds/artifacts/*ref_name_and_path` + # -> 'builds/artifacts' + def path_before_wildcard(path) + path = path.gsub(STARTING_WITH_NAMESPACE, "") + path_segments = path.split('/').reject(&:empty?) + wildcard_index = path_segments.index { |segment| parameter?(segment) } + + segments_before_wildcard = path_segments[0..wildcard_index - 1] + + segments_before_wildcard.join('/') + end + + def parameter?(segment) + segment =~ /[*:]/ + end + + # If the path is reserved. Then no conflicting paths can# be created for any + # route using this reserved word. + # + # Both `builds/artifacts` & `build` are covered by reserving the word + # `build` + def wildcards_include?(path) + described_class::WILDCARD_ROUTES.include?(path) || + described_class::WILDCARD_ROUTES.include?(path.split('/').first) + end + + def failure_message(missing_words, constant_name, migration_helper) + missing_words = Array(missing_words) + <<-MSG + Found new routes that could cause conflicts with existing namespaced routes + for groups or projects. + + Add <#{missing_words.join(', ')}> to `DynamicPathValidator::#{constant_name} + to make sure no projects or namespaces can be created with those paths. + + To rename any existing records with those paths you can use the + `Gitlab::Database::RenameReservedpathsMigration::<VERSION>.#{migration_helper}` + migration helper. + + Make sure to make a note of the renamed records in the release blog post. + + MSG + end + + let(:all_routes) do + Rails.application.routes.routes.routes. + map { |r| r.path.spec.to_s } + end + + let(:routes_without_format) { all_routes.map { |path| without_format(path) } } + + # Routes not starting with `/:` or `/*` + # all routes not starting with a param + let(:routes_not_starting_in_wildcard) { routes_without_format.select { |p| p !~ %r{^/[:*]} } } + + let(:top_level_words) do + routes_not_starting_in_wildcard.map do |route| + route.split('/')[1] + end.compact.uniq + end + + # All routes that start with a namespaced path, that have 1 or more + # path-segments before having another wildcard parameter. + # - Starting with paths: + # - `/*namespace_id/:project_id/` + # - `/*namespace_id/:id/` + # - Followed by one or more path-parts not starting with `:` or `*` + # - Followed by a path-part that includes a wildcard parameter `*` + # At the time of writing these routes match: http://rubular.com/r/Rv2pDE5Dvw + STARTING_WITH_NAMESPACE = %r{^/\*namespace_id/:(project_)?id} + NON_PARAM_PARTS = %r{[^:*][a-z\-_/]*} + ANY_OTHER_PATH_PART = %r{[a-z\-_/:]*} + WILDCARD_SEGMENT = %r{\*} + let(:namespaced_wildcard_routes) do + routes_without_format.select do |p| + p =~ %r{#{STARTING_WITH_NAMESPACE}/#{NON_PARAM_PARTS}/#{ANY_OTHER_PATH_PART}#{WILDCARD_SEGMENT}} + end + end + + # This will return all paths that are used in a namespaced route + # before another wildcard path: + # + # /*namespace_id/:project_id/builds/artifacts/*ref_name_and_path + # /*namespace_id/:project_id/info/lfs/objects/*oid + # /*namespace_id/:project_id/commits/*id + # /*namespace_id/:project_id/builds/:build_id/artifacts/file/*path + # -> ['builds/artifacts', 'info/lfs/objects', 'commits', 'artifacts/file'] + let(:all_wildcard_paths) do + namespaced_wildcard_routes.map do |route| + path_before_wildcard(route) + end.uniq + end + + STARTING_WITH_GROUP = %r{^/groups/\*(group_)?id/} + let(:group_routes) do + routes_without_format.select do |path| + path =~ STARTING_WITH_GROUP + end + end + + let(:paths_after_group_id) do + group_routes.map do |route| + route.gsub(STARTING_WITH_GROUP, '').split('/').first + end.uniq + end + + describe 'TOP_LEVEL_ROUTES' do + it 'includes all the top level namespaces' do + failure_block = lambda do + missing_words = top_level_words - described_class::TOP_LEVEL_ROUTES + failure_message(missing_words, 'TOP_LEVEL_ROUTES', 'rename_root_paths') + end + + expect(described_class::TOP_LEVEL_ROUTES) + .to include(*top_level_words), failure_block + end + end + + describe 'GROUP_ROUTES' do + it "don't contain a second wildcard" do + failure_block = lambda do + missing_words = paths_after_group_id - described_class::GROUP_ROUTES + failure_message(missing_words, 'GROUP_ROUTES', 'rename_child_paths') + end + + expect(described_class::GROUP_ROUTES) + .to include(*paths_after_group_id), failure_block + end + end + + describe 'WILDCARD_ROUTES' do + it 'includes all paths that can be used after a namespace/project path' do + aggregate_failures do + all_wildcard_paths.each do |path| + expect(wildcards_include?(path)) + .to be(true), failure_message(path, 'WILDCARD_ROUTES', 'rename_wildcard_paths') + end + end + end + end + + describe '.without_reserved_wildcard_paths_regex' do + subject { described_class.without_reserved_wildcard_paths_regex } + + it 'rejects paths starting with a reserved top level' do + expect(subject).not_to match('dashboard/hello/world') + expect(subject).not_to match('dashboard') + end + + it 'matches valid paths with a toplevel word in a different place' do + expect(subject).to match('parent/dashboard/project-path') + end + + it 'rejects paths containing a wildcard reserved word' do + expect(subject).not_to match('hello/edit') + expect(subject).not_to match('hello/edit/in-the-middle') + expect(subject).not_to match('foo/bar1/refs/master/logs_tree') + end + + it 'matches valid paths' do + expect(subject).to match('parent/child/project-path') + end + end + + describe '.regex_excluding_child_paths' do + let(:subject) { described_class.without_reserved_child_paths_regex } + + it 'rejects paths containing a child reserved word' do + expect(subject).not_to match('hello/group_members') + expect(subject).not_to match('hello/activity/in-the-middle') + expect(subject).not_to match('foo/bar1/refs/master/logs_tree') + end + + it 'allows a child path on the top level' do + expect(subject).to match('activity/foo') + expect(subject).to match('avatar') + end + end + + describe ".valid?" do + it 'is not case sensitive' do + expect(described_class.valid?("Users")).to be_falsey + end + + it "isn't valid when the top level is reserved" do + test_path = 'u/should-be-a/reserved-word' + + expect(described_class.valid?(test_path)).to be_falsey + end + + it "isn't valid if any of the path segments is reserved" do + test_path = 'the-wildcard/wikis/is-not-allowed' + + expect(described_class.valid?(test_path)).to be_falsey + end + + it "is valid if the path doesn't contain reserved words" do + test_path = 'there-are/no-wildcards/in-this-path' + + expect(described_class.valid?(test_path)).to be_truthy + end + + it 'allows allows a child path on the last spot' do + test_path = 'there/can-be-a/project-called/labels' + + expect(described_class.valid?(test_path)).to be_truthy + end + + it 'rejects a child path somewhere else' do + test_path = 'there/can-be-no/labels/group' + + expect(described_class.valid?(test_path)).to be_falsey + end + + it 'rejects paths that are in an incorrect format' do + test_path = 'incorrect/format.git' + + expect(described_class.valid?(test_path)).to be_falsey + end + end + + describe '#path_reserved_for_record?' do + it 'reserves a sub-group named activity' do + group = build(:group, :nested, path: 'activity') + + expect(validator.path_reserved_for_record?(group, 'activity')).to be_truthy + end + + it "doesn't reserve a project called activity" do + project = build(:project, path: 'activity') + + expect(validator.path_reserved_for_record?(project, 'activity')).to be_falsey + end + end + + describe '#validates_each' do + it 'adds a message when the path is not in the correct format' do + group = build(:group) + + validator.validate_each(group, :path, "Path with spaces, and comma's!") + + expect(group.errors[:path]).to include(Gitlab::Regex.namespace_regex_message) + end + + it 'adds a message when the path is not in the correct format' do + group = build(:group, path: 'users') + + validator.validate_each(group, :path, 'users') + + expect(group.errors[:path]).to include('users is a reserved name') + end + end +end diff --git a/spec/views/layouts/nav/_project.html.haml_spec.rb b/spec/views/layouts/nav/_project.html.haml_spec.rb new file mode 100644 index 00000000000..fd1637ca91b --- /dev/null +++ b/spec/views/layouts/nav/_project.html.haml_spec.rb @@ -0,0 +1,37 @@ +require 'spec_helper' + +describe 'layouts/nav/_project' do + describe 'container registry tab' do + before do + stub_container_registry_config(enabled: true) + + assign(:project, create(:project)) + allow(view).to receive(:current_ref).and_return('master') + + allow(view).to receive(:can?).and_return(true) + allow(controller).to receive(:controller_name) + .and_return('repositories') + allow(controller).to receive(:controller_path) + .and_return('projects/registry/repositories') + end + + it 'has both Registry and Repository tabs' do + render + + expect(rendered).to have_text 'Repository' + expect(rendered).to have_text 'Registry' + end + + it 'highlights only one tab' do + render + + expect(rendered).to have_css('.active', count: 1) + end + + it 'highlights container registry tab only' do + render + + expect(rendered).to have_css('.active', text: 'Registry') + end + end +end diff --git a/spec/views/notify/pipeline_failed_email.html.haml_spec.rb b/spec/views/notify/pipeline_failed_email.html.haml_spec.rb new file mode 100644 index 00000000000..f627f9165fb --- /dev/null +++ b/spec/views/notify/pipeline_failed_email.html.haml_spec.rb @@ -0,0 +1,54 @@ +require 'spec_helper' + +describe 'notify/pipeline_failed_email.html.haml' do + include Devise::Test::ControllerHelpers + + let(:user) { create(:user) } + let(:project) { create(:project) } + let(:merge_request) { create(:merge_request, :simple, source_project: project) } + + let(:pipeline) do + create(:ci_pipeline, + project: project, + user: user, + ref: project.default_branch, + sha: project.commit.sha, + status: :success) + end + + before do + assign(:project, project) + assign(:pipeline, pipeline) + assign(:merge_request, merge_request) + end + + context 'pipeline with user' do + it 'renders the email correctly' do + render + + expect(rendered).to have_content "Your pipeline has failed" + expect(rendered).to have_content pipeline.project.name + expect(rendered).to have_content pipeline.git_commit_message.truncate(50) + expect(rendered).to have_content pipeline.commit.author_name + expect(rendered).to have_content "##{pipeline.id}" + expect(rendered).to have_content pipeline.user.name + end + end + + context 'pipeline without user' do + before do + pipeline.update_attribute(:user, nil) + end + + it 'renders the email correctly' do + render + + expect(rendered).to have_content "Your pipeline has failed" + expect(rendered).to have_content pipeline.project.name + expect(rendered).to have_content pipeline.git_commit_message.truncate(50) + expect(rendered).to have_content pipeline.commit.author_name + expect(rendered).to have_content "##{pipeline.id}" + expect(rendered).to have_content "by API" + end + end +end diff --git a/spec/views/notify/pipeline_success_email.html.haml_spec.rb b/spec/views/notify/pipeline_success_email.html.haml_spec.rb new file mode 100644 index 00000000000..ecd096ee579 --- /dev/null +++ b/spec/views/notify/pipeline_success_email.html.haml_spec.rb @@ -0,0 +1,54 @@ +require 'spec_helper' + +describe 'notify/pipeline_success_email.html.haml' do + include Devise::Test::ControllerHelpers + + let(:user) { create(:user) } + let(:project) { create(:project) } + let(:merge_request) { create(:merge_request, :simple, source_project: project) } + + let(:pipeline) do + create(:ci_pipeline, + project: project, + user: user, + ref: project.default_branch, + sha: project.commit.sha, + status: :success) + end + + before do + assign(:project, project) + assign(:pipeline, pipeline) + assign(:merge_request, merge_request) + end + + context 'pipeline with user' do + it 'renders the email correctly' do + render + + expect(rendered).to have_content "Your pipeline has passed" + expect(rendered).to have_content pipeline.project.name + expect(rendered).to have_content pipeline.git_commit_message.truncate(50) + expect(rendered).to have_content pipeline.commit.author_name + expect(rendered).to have_content "##{pipeline.id}" + expect(rendered).to have_content pipeline.user.name + end + end + + context 'pipeline without user' do + before do + pipeline.update_attribute(:user, nil) + end + + it 'renders the email correctly' do + render + + expect(rendered).to have_content "Your pipeline has passed" + expect(rendered).to have_content pipeline.project.name + expect(rendered).to have_content pipeline.git_commit_message.truncate(50) + expect(rendered).to have_content pipeline.commit.author_name + expect(rendered).to have_content "##{pipeline.id}" + expect(rendered).to have_content "by API" + end + end +end diff --git a/spec/views/projects/blob/_viewer.html.haml_spec.rb b/spec/views/projects/blob/_viewer.html.haml_spec.rb new file mode 100644 index 00000000000..c6b0ed8da3c --- /dev/null +++ b/spec/views/projects/blob/_viewer.html.haml_spec.rb @@ -0,0 +1,97 @@ +require 'spec_helper' + +describe 'projects/blob/_viewer.html.haml', :view do + include FakeBlobHelpers + + let(:project) { build(:empty_project) } + + let(:viewer_class) do + Class.new(BlobViewer::Base) do + include BlobViewer::Rich + + self.partial_name = 'text' + self.overridable_max_size = 1.megabyte + self.max_size = 5.megabytes + self.load_async = true + end + end + + let(:viewer) { viewer_class.new(blob) } + let(:blob) { fake_blob } + + before do + assign(:project, project) + assign(:blob, blob) + assign(:id, File.join('master', blob.path)) + + controller.params[:controller] = 'projects/blob' + controller.params[:action] = 'show' + controller.params[:namespace_id] = project.namespace.to_param + controller.params[:project_id] = project.to_param + controller.params[:id] = File.join('master', blob.path) + end + + def render_view + render partial: 'projects/blob/viewer', locals: { viewer: viewer } + end + + context 'when the viewer is loaded asynchronously' do + before do + viewer_class.load_async = true + end + + context 'when there is no render error' do + it 'adds a URL to the blob viewer element' do + render_view + + expect(rendered).to have_css('.blob-viewer[data-url]') + end + + it 'renders the loading indicator' do + render_view + + expect(view).to render_template('projects/blob/viewers/_loading') + end + end + + context 'when there is a render error' do + let(:blob) { fake_blob(size: 10.megabytes) } + + it 'renders the error' do + render_view + + expect(view).to render_template('projects/blob/_render_error') + end + end + end + + context 'when the viewer is loaded synchronously' do + before do + viewer_class.load_async = false + end + + context 'when there is no render error' do + it 'prepares the viewer' do + expect(viewer).to receive(:prepare!) + + render_view + end + + it 'renders the viewer' do + render_view + + expect(view).to render_template('projects/blob/viewers/_text') + end + end + + context 'when there is a render error' do + let(:blob) { fake_blob(size: 10.megabytes) } + + it 'renders the error' do + render_view + + expect(view).to render_template('projects/blob/_render_error') + end + end + end +end diff --git a/spec/views/projects/builds/show.html.haml_spec.rb b/spec/views/projects/builds/show.html.haml_spec.rb index 55b64808fb3..0f39df0f250 100644 --- a/spec/views/projects/builds/show.html.haml_spec.rb +++ b/spec/views/projects/builds/show.html.haml_spec.rb @@ -9,7 +9,7 @@ describe 'projects/builds/show', :view do end before do - assign(:build, build) + assign(:build, build.present) assign(:project, project) allow(view).to receive(:can?).and_return(true) diff --git a/spec/views/projects/commit/_commit_box.html.haml_spec.rb b/spec/views/projects/commit/_commit_box.html.haml_spec.rb index cec87dcecc8..ab120929c6c 100644 --- a/spec/views/projects/commit/_commit_box.html.haml_spec.rb +++ b/spec/views/projects/commit/_commit_box.html.haml_spec.rb @@ -1,8 +1,6 @@ require 'spec_helper' -describe 'projects/commit/_commit_box.html.haml' do - include Devise::Test::ControllerHelpers - +describe 'projects/commit/_commit_box.html.haml', :view do let(:user) { create(:user) } let(:project) { create(:project, :repository) } @@ -18,14 +16,32 @@ describe 'projects/commit/_commit_box.html.haml' do expect(rendered).to have_text("#{Commit.truncate_sha(project.commit.sha)}") end - it 'shows the last pipeline that ran for the commit' do - create(:ci_pipeline, project: project, sha: project.commit.id, status: 'success') - create(:ci_pipeline, project: project, sha: project.commit.id, status: 'canceled') - third_pipeline = create(:ci_pipeline, project: project, sha: project.commit.id, status: 'failed') + context 'when there is a pipeline present' do + context 'when there are multiple pipelines for a commit' do + it 'shows the last pipeline' do + create(:ci_pipeline, project: project, sha: project.commit.id, status: 'success') + create(:ci_pipeline, project: project, sha: project.commit.id, status: 'canceled') + third_pipeline = create(:ci_pipeline, project: project, sha: project.commit.id, status: 'failed') - render + render + + expect(rendered).to have_text("Pipeline ##{third_pipeline.id} failed") + end + end - expect(rendered).to have_text("Pipeline ##{third_pipeline.id} failed") + context 'when pipeline for the commit is blocked' do + let!(:pipeline) do + create(:ci_pipeline, :blocked, project: project, + sha: project.commit.id) + end + + it 'shows correct pipeline description' do + render + + expect(rendered).to have_text "Pipeline ##{pipeline.id} " \ + 'waiting for manual action' + end + end end context 'viewing a commit' do diff --git a/spec/views/projects/commit/show.html.haml_spec.rb b/spec/views/projects/commit/show.html.haml_spec.rb new file mode 100644 index 00000000000..122075cc10e --- /dev/null +++ b/spec/views/projects/commit/show.html.haml_spec.rb @@ -0,0 +1,44 @@ +require 'spec_helper' + +describe 'projects/commit/show.html.haml', :view do + let(:project) { create(:project, :repository) } + + before do + assign(:project, project) + assign(:repository, project.repository) + assign(:commit, project.commit) + assign(:noteable, project.commit) + assign(:notes, []) + assign(:diffs, project.commit.diffs) + + allow(view).to receive(:current_user).and_return(nil) + allow(view).to receive(:can?).and_return(false) + allow(view).to receive(:can_collaborate_with_project?).and_return(false) + allow(view).to receive(:current_ref).and_return(project.repository.root_ref) + allow(view).to receive(:diff_btn).and_return('') + end + + context 'inline diff view' do + before do + allow(view).to receive(:diff_view).and_return(:inline) + + render + end + + it 'keeps container-limited' do + expect(rendered).not_to have_selector('.limit-container-width') + end + end + + context 'parallel diff view' do + before do + allow(view).to receive(:diff_view).and_return(:parallel) + + render + end + + it 'spans full width' do + expect(rendered).to have_selector('.limit-container-width') + end + end +end diff --git a/spec/views/projects/environments/terminal.html.haml_spec.rb b/spec/views/projects/environments/terminal.html.haml_spec.rb new file mode 100644 index 00000000000..d2e47225226 --- /dev/null +++ b/spec/views/projects/environments/terminal.html.haml_spec.rb @@ -0,0 +1,32 @@ +require 'spec_helper' + +describe 'projects/environments/terminal' do + let!(:environment) { create(:environment, :with_review_app) } + + before do + assign(:environment, environment) + assign(:project, environment.project) + + allow(view).to receive(:can?).and_return(true) + end + + context 'when environment has external URL' do + it 'shows external URL button' do + environment.update_attribute(:external_url, 'https://gitlab.com') + + render + + expect(rendered).to have_link(nil, href: 'https://gitlab.com') + end + end + + context 'when environment does not have external URL' do + it 'shows external URL button' do + environment.update_attribute(:external_url, nil) + + render + + expect(rendered).not_to have_link(nil, href: 'https://gitlab.com') + end + end +end diff --git a/spec/views/projects/imports/new.html.haml_spec.rb b/spec/views/projects/imports/new.html.haml_spec.rb new file mode 100644 index 00000000000..9b293065797 --- /dev/null +++ b/spec/views/projects/imports/new.html.haml_spec.rb @@ -0,0 +1,22 @@ +require "spec_helper" + +describe "projects/imports/new.html.haml" do + let(:user) { create(:user) } + + context 'when import fails' do + let(:project) { create(:project_empty_repo, import_status: :failed, import_error: '<a href="http://googl.com">Foo</a>', import_type: :gitlab_project, import_source: '/var/opt/gitlab/gitlab-rails/shared/tmp/project_exports/uploads/t.tar.gz', import_url: nil) } + + before do + sign_in(user) + project.team << [user, :master] + end + + it "escapes HTML in import errors" do + assign(:project, project) + + render + + expect(rendered).not_to have_link('Foo', href: "http://googl.com") + end + end +end diff --git a/spec/views/projects/pipelines/_stage.html.haml_spec.rb b/spec/views/projects/pipelines/_stage.html.haml_spec.rb index 10095ad7694..9c91c4e0fbd 100644 --- a/spec/views/projects/pipelines/_stage.html.haml_spec.rb +++ b/spec/views/projects/pipelines/_stage.html.haml_spec.rb @@ -39,9 +39,8 @@ describe 'projects/pipelines/_stage', :view do context 'when there are retried builds present' do before do - create_list(:ci_build, 2, name: 'test:build', - stage: stage.name, - pipeline: pipeline) + create(:ci_build, name: 'test:build', stage: stage.name, pipeline: pipeline, retried: true) + create(:ci_build, name: 'test:build', stage: stage.name, pipeline: pipeline) end it 'shows only latest builds' do diff --git a/spec/views/projects/pipelines/show.html.haml_spec.rb b/spec/views/projects/pipelines/show.html.haml_spec.rb deleted file mode 100644 index dca78dec6df..00000000000 --- a/spec/views/projects/pipelines/show.html.haml_spec.rb +++ /dev/null @@ -1,61 +0,0 @@ -require 'spec_helper' - -describe 'projects/pipelines/show' do - include Devise::Test::ControllerHelpers - - let(:user) { create(:user) } - let(:project) { create(:project, :repository) } - let(:pipeline) { create(:ci_empty_pipeline, project: project, sha: project.commit.id, user: user) } - - before do - controller.prepend_view_path('app/views/projects') - - create_build('build', 0, 'build', :success) - create_build('test', 1, 'rspec 0:2', :pending) - create_build('test', 1, 'rspec 1:2', :running) - create_build('test', 1, 'spinach 0:2', :created) - create_build('test', 1, 'spinach 1:2', :created) - create_build('test', 1, 'audit', :created) - create_build('deploy', 2, 'production', :created) - - create(:generic_commit_status, pipeline: pipeline, stage: 'external', name: 'jenkins', stage_idx: 3) - - assign(:project, project) - assign(:pipeline, pipeline) - assign(:commit, project.commit) - - allow(view).to receive(:can?).and_return(true) - end - - it 'shows a graph with grouped stages' do - render - - expect(rendered).to have_css('.js-pipeline-graph') - expect(rendered).to have_css('.js-grouped-pipeline-dropdown') - - # header - expect(rendered).to have_text("##{pipeline.id}") - expect(rendered).to have_css('time', text: pipeline.created_at.strftime("%b %d, %Y")) - expect(rendered).to have_selector(%Q(img[alt$="#{pipeline.user.name}'s avatar"])) - expect(rendered).to have_link(pipeline.user.name, href: user_path(pipeline.user)) - - # stages - expect(rendered).to have_text('Build') - expect(rendered).to have_text('Test') - expect(rendered).to have_text('Deploy') - expect(rendered).to have_text('External') - - # builds - expect(rendered).to have_text('rspec') - expect(rendered).to have_text('spinach') - expect(rendered).to have_text('rspec 0:2') - expect(rendered).to have_text('production') - expect(rendered).to have_text('jenkins') - end - - private - - def create_build(stage, stage_idx, name, status) - create(:ci_build, pipeline: pipeline, stage: stage, stage_idx: stage_idx, name: name, status: status) - end -end diff --git a/spec/views/projects/registry/repositories/index.html.haml_spec.rb b/spec/views/projects/registry/repositories/index.html.haml_spec.rb new file mode 100644 index 00000000000..ceeace3dc8d --- /dev/null +++ b/spec/views/projects/registry/repositories/index.html.haml_spec.rb @@ -0,0 +1,36 @@ +require 'spec_helper' + +describe 'projects/registry/repositories/index', :view do + let(:group) { create(:group, path: 'group') } + let(:project) { create(:empty_project, group: group, path: 'test') } + + let(:repository) do + create(:container_repository, project: project, name: 'image') + end + + before do + stub_container_registry_config(enabled: true, + host_port: 'registry.gitlab', + api_url: 'http://registry.gitlab') + + stub_container_registry_tags(repository: :any, tags: [:latest]) + + assign(:project, project) + assign(:images, [repository]) + + allow(view).to receive(:can?).and_return(true) + end + + it 'contains container repository path' do + render + + expect(rendered).to have_content 'group/test/image' + end + + it 'contains attribute for copying tag location into clipboard' do + render + + expect(rendered).to have_css 'button[data-clipboard-text="docker pull ' \ + 'registry.gitlab/group/test/image:latest"]' + end +end diff --git a/spec/views/projects/tags/index.html.haml_spec.rb b/spec/views/projects/tags/index.html.haml_spec.rb new file mode 100644 index 00000000000..33122365e9a --- /dev/null +++ b/spec/views/projects/tags/index.html.haml_spec.rb @@ -0,0 +1,20 @@ +require 'spec_helper' + +describe 'projects/tags/index', :view do + let(:project) { create(:project) } + + before do + assign(:project, project) + assign(:repository, project.repository) + assign(:tags, []) + + allow(view).to receive(:current_ref).and_return('master') + allow(view).to receive(:can?).and_return(false) + end + + it 'defaults sort dropdown toggle to last updated' do + render + + expect(rendered).to have_button('Last updated') + end +end diff --git a/spec/views/projects/tree/show.html.haml_spec.rb b/spec/views/projects/tree/show.html.haml_spec.rb index 900f8d4732f..33eba3e6d3d 100644 --- a/spec/views/projects/tree/show.html.haml_spec.rb +++ b/spec/views/projects/tree/show.html.haml_spec.rb @@ -21,17 +21,17 @@ describe 'projects/tree/show' do let(:tree) { repository.tree(commit.id, path) } before do + assign(:id, File.join(ref, path)) assign(:ref, ref) - assign(:commit, commit) - assign(:id, commit.id) - assign(:tree, tree) assign(:path, path) + assign(:last_commit, commit) + assign(:tree, tree) end it 'displays correctly' do render expect(rendered).to have_css('.js-project-refs-dropdown .dropdown-toggle-text', text: ref) - expect(rendered).to have_css('.readme-holder .file-content', text: ref) + expect(rendered).to have_css('.readme-holder') end end end diff --git a/spec/views/projects/notes/_form.html.haml_spec.rb b/spec/views/shared/notes/_form.html.haml_spec.rb index b61f016967f..d7d0a5bf56a 100644 --- a/spec/views/projects/notes/_form.html.haml_spec.rb +++ b/spec/views/shared/notes/_form.html.haml_spec.rb @@ -1,10 +1,10 @@ require 'spec_helper' -describe 'projects/notes/_form' do +describe 'shared/notes/_form' do include Devise::Test::ControllerHelpers let(:user) { create(:user) } - let(:project) { create(:empty_project) } + let(:project) { create(:project, :repository) } before do project.team << [user, :master] @@ -20,7 +20,7 @@ describe 'projects/notes/_form' do context "with a note on #{noteable}" do let(:note) { build(:"note_on_#{noteable}", project: project) } - it 'says that only markdown is supported, not slash commands' do + it 'says that markdown and slash commands are supported' do expect(rendered).to have_content('Markdown and slash commands are supported') end end diff --git a/spec/workers/delete_user_worker_spec.rb b/spec/workers/delete_user_worker_spec.rb index 0765573408c..5912dd76262 100644 --- a/spec/workers/delete_user_worker_spec.rb +++ b/spec/workers/delete_user_worker_spec.rb @@ -8,13 +8,13 @@ describe DeleteUserWorker do expect_any_instance_of(Users::DestroyService).to receive(:execute). with(user, {}) - DeleteUserWorker.new.perform(current_user.id, user.id) + described_class.new.perform(current_user.id, user.id) end it "uses symbolized keys" do expect_any_instance_of(Users::DestroyService).to receive(:execute). with(user, test: "test") - DeleteUserWorker.new.perform(current_user.id, user.id, "test" => "test") + described_class.new.perform(current_user.id, user.id, "test" => "test") end end diff --git a/spec/workers/emails_on_push_worker_spec.rb b/spec/workers/emails_on_push_worker_spec.rb index 8cf2b888f9a..a0ed85cc0b3 100644 --- a/spec/workers/emails_on_push_worker_spec.rb +++ b/spec/workers/emails_on_push_worker_spec.rb @@ -12,7 +12,7 @@ describe EmailsOnPushWorker do let(:perform) { subject.perform(project.id, recipients, data.stringify_keys) } let(:email) { ActionMailer::Base.deliveries.last } - subject { EmailsOnPushWorker.new } + subject { described_class.new } describe "#perform" do context "when push is a new branch" do diff --git a/spec/workers/expire_build_instance_artifacts_worker_spec.rb b/spec/workers/expire_build_instance_artifacts_worker_spec.rb index d202b3de77e..1d8da68883b 100644 --- a/spec/workers/expire_build_instance_artifacts_worker_spec.rb +++ b/spec/workers/expire_build_instance_artifacts_worker_spec.rb @@ -34,12 +34,14 @@ describe ExpireBuildInstanceArtifactsWorker do context 'when associated project was removed' do let(:build) do create(:ci_build, :artifacts, artifacts_expiry) do |build| - build.project.delete + build.project.pending_delete = true end end it 'does not remove artifacts' do - expect(build.reload.artifacts_file.exists?).to be_truthy + expect do + build.reload.artifacts_file + end.not_to raise_error end end end diff --git a/spec/workers/expire_pipeline_cache_worker_spec.rb b/spec/workers/expire_pipeline_cache_worker_spec.rb new file mode 100644 index 00000000000..ceba604dea2 --- /dev/null +++ b/spec/workers/expire_pipeline_cache_worker_spec.rb @@ -0,0 +1,44 @@ +require 'spec_helper' + +describe ExpirePipelineCacheWorker do + let(:user) { create(:user) } + let(:project) { create(:empty_project) } + let(:pipeline) { create(:ci_pipeline, project: project) } + subject { described_class.new } + + describe '#perform' do + it 'invalidates Etag caching for project pipelines path' do + pipelines_path = "/#{project.full_path}/pipelines.json" + new_mr_pipelines_path = "/#{project.full_path}/merge_requests/new.json" + + expect_any_instance_of(Gitlab::EtagCaching::Store).to receive(:touch).with(pipelines_path) + expect_any_instance_of(Gitlab::EtagCaching::Store).to receive(:touch).with(new_mr_pipelines_path) + + subject.perform(pipeline.id) + end + + it 'invalidates Etag caching for merge request pipelines if pipeline runs on any commit of that source branch' do + pipeline = create(:ci_empty_pipeline, status: 'created', project: project, ref: 'master') + merge_request = create(:merge_request, source_project: project, source_branch: pipeline.ref) + merge_request_pipelines_path = "/#{project.full_path}/merge_requests/#{merge_request.iid}/pipelines.json" + + allow_any_instance_of(Gitlab::EtagCaching::Store).to receive(:touch) + expect_any_instance_of(Gitlab::EtagCaching::Store).to receive(:touch).with(merge_request_pipelines_path) + + subject.perform(pipeline.id) + end + + it "doesn't do anything if the pipeline not exist" do + expect_any_instance_of(Gitlab::EtagCaching::Store).not_to receive(:touch) + + subject.perform(617748) + end + + it 'updates the cached status for a project' do + expect(Gitlab::Cache::Ci::ProjectPipelineStatus).to receive(:update_for_pipeline). + with(pipeline) + + subject.perform(pipeline.id) + end + end +end diff --git a/spec/workers/git_garbage_collect_worker_spec.rb b/spec/workers/git_garbage_collect_worker_spec.rb index 029f35512e0..8c5303b61cc 100644 --- a/spec/workers/git_garbage_collect_worker_spec.rb +++ b/spec/workers/git_garbage_collect_worker_spec.rb @@ -6,7 +6,7 @@ describe GitGarbageCollectWorker do let(:project) { create(:project, :repository) } let(:shell) { Gitlab::Shell.new } - subject { GitGarbageCollectWorker.new } + subject { described_class.new } describe "#perform" do it "flushes ref caches when the task is 'gc'" do @@ -105,7 +105,7 @@ describe GitGarbageCollectWorker do author: Gitlab::Git.committer_hash(email: 'foo@bar', name: 'baz'), committer: Gitlab::Git.committer_hash(email: 'foo@bar', name: 'baz'), tree: old_commit.tree, - parents: [old_commit], + parents: [old_commit] ) GitOperationService.new(nil, project.repository).send( :update_ref, diff --git a/spec/workers/gitlab_usage_ping_worker_spec.rb b/spec/workers/gitlab_usage_ping_worker_spec.rb new file mode 100644 index 00000000000..26241044533 --- /dev/null +++ b/spec/workers/gitlab_usage_ping_worker_spec.rb @@ -0,0 +1,23 @@ +require 'spec_helper' + +describe GitlabUsagePingWorker do + subject { described_class.new } + + it "sends POST request" do + stub_application_setting(usage_ping_enabled: true) + + stub_request(:post, "https://version.gitlab.com/usage_data"). + to_return(status: 200, body: '', headers: {}) + expect(Gitlab::UsageData).to receive(:to_json).with({ force_refresh: true }).and_call_original + expect(subject).to receive(:try_obtain_lease).and_return(true) + + expect(subject.perform.response.code.to_i).to eq(200) + end + + it "does not run if usage ping is disabled" do + stub_application_setting(usage_ping_enabled: false) + + expect(subject).not_to receive(:try_obtain_lease) + expect(subject).not_to receive(:perform) + end +end diff --git a/spec/workers/group_destroy_worker_spec.rb b/spec/workers/group_destroy_worker_spec.rb index 1ff5a3b9034..c78efc67076 100644 --- a/spec/workers/group_destroy_worker_spec.rb +++ b/spec/workers/group_destroy_worker_spec.rb @@ -5,7 +5,7 @@ describe GroupDestroyWorker do let(:user) { create(:admin) } let!(:project) { create(:empty_project, namespace: group) } - subject { GroupDestroyWorker.new } + subject { described_class.new } describe "#perform" do it "deletes the project" do diff --git a/spec/workers/merge_worker_spec.rb b/spec/workers/merge_worker_spec.rb index b5e1fdb8ded..303193bab9b 100644 --- a/spec/workers/merge_worker_spec.rb +++ b/spec/workers/merge_worker_spec.rb @@ -15,7 +15,7 @@ describe MergeWorker do it 'clears cache of source repo after removing source branch' do expect(source_project.repository.branch_names).to include('markdown') - MergeWorker.new.perform( + described_class.new.perform( merge_request.id, merge_request.author_id, commit_message: 'wow such merge', should_remove_source_branch: true) diff --git a/spec/workers/namespaceless_project_destroy_worker_spec.rb b/spec/workers/namespaceless_project_destroy_worker_spec.rb new file mode 100644 index 00000000000..8533b7b85e9 --- /dev/null +++ b/spec/workers/namespaceless_project_destroy_worker_spec.rb @@ -0,0 +1,79 @@ +require 'spec_helper' + +describe NamespacelessProjectDestroyWorker do + subject { described_class.new } + + before do + # Stub after_save callbacks that will fail when Project has no namespace + allow_any_instance_of(Project).to receive(:ensure_dir_exist).and_return(nil) + allow_any_instance_of(Project).to receive(:update_project_statistics).and_return(nil) + end + + describe '#perform' do + context 'project has namespace' do + it 'does not do anything' do + project = create(:empty_project) + + subject.perform(project.id) + + expect(Project.unscoped.all).to include(project) + end + end + + context 'project has no namespace' do + let!(:project) do + project = build(:empty_project, namespace_id: nil) + project.save(validate: false) + project + end + + context 'project not a fork of another project' do + it "truncates the project's team" do + expect_any_instance_of(ProjectTeam).to receive(:truncate) + + subject.perform(project.id) + end + + it 'deletes the project' do + subject.perform(project.id) + + expect(Project.unscoped.all).not_to include(project) + end + + it 'does not call unlink_fork' do + is_expected.not_to receive(:unlink_fork) + + subject.perform(project.id) + end + + it 'does not do anything in Project#remove_pages method' do + expect(Gitlab::PagesTransfer).not_to receive(:new) + + subject.perform(project.id) + end + end + + context 'project forked from another' do + let!(:parent_project) { create(:empty_project) } + + before do + create(:forked_project_link, forked_to_project: project, forked_from_project: parent_project) + end + + it 'closes open merge requests' do + merge_request = create(:merge_request, source_project: project, target_project: parent_project) + + subject.perform(project.id) + + expect(merge_request.reload).to be_closed + end + + it 'destroys the link' do + subject.perform(project.id) + + expect(parent_project.forked_project_links).to be_empty + end + end + end + end +end diff --git a/spec/workers/pipeline_metrics_worker_spec.rb b/spec/workers/pipeline_metrics_worker_spec.rb index 5dbc0da95c2..ef71125c0b6 100644 --- a/spec/workers/pipeline_metrics_worker_spec.rb +++ b/spec/workers/pipeline_metrics_worker_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' describe PipelineMetricsWorker do let(:project) { create(:project, :repository) } - let!(:merge_request) { create(:merge_request, source_project: project, source_branch: pipeline.ref) } + let!(:merge_request) { create(:merge_request, source_project: project, source_branch: pipeline.ref, head_pipeline: pipeline) } let(:pipeline) do create(:ci_empty_pipeline, diff --git a/spec/workers/pipeline_notification_worker_spec.rb b/spec/workers/pipeline_notification_worker_spec.rb index 5a7ce2e08c4..139032d77bd 100644 --- a/spec/workers/pipeline_notification_worker_spec.rb +++ b/spec/workers/pipeline_notification_worker_spec.rb @@ -3,131 +3,19 @@ require 'spec_helper' describe PipelineNotificationWorker do include EmailHelpers - let(:pipeline) do - create(:ci_pipeline, - project: project, - sha: project.commit('master').sha, - user: pusher, - status: status) - end - - let(:project) { create(:project, :repository, public_builds: false) } - let(:user) { create(:user) } - let(:pusher) { user } - let(:watcher) { pusher } + let(:pipeline) { create(:ci_pipeline) } describe '#execute' do - before do - reset_delivered_emails! - pipeline.project.team << [pusher, Gitlab::Access::DEVELOPER] - end - - context 'when watcher has developer access' do - before do - pipeline.project.team << [watcher, Gitlab::Access::DEVELOPER] - end - - shared_examples 'sending emails' do - it 'sends emails' do - perform_enqueued_jobs do - subject.perform(pipeline.id) - end - - emails = ActionMailer::Base.deliveries - actual = emails.flat_map(&:bcc).sort - expected_receivers = receivers.map(&:email).uniq.sort - - expect(actual).to eq(expected_receivers) - expect(emails.size).to eq(1) - expect(emails.last.subject).to include(email_subject) - end - end - - context 'with success pipeline' do - let(:status) { 'success' } - let(:email_subject) { "Pipeline ##{pipeline.id} has succeeded" } - let(:receivers) { [pusher, watcher] } - - it_behaves_like 'sending emails' - - context 'with pipeline from someone else' do - let(:pusher) { create(:user) } - let(:watcher) { user } - - context 'with success pipeline notification on' do - before do - watcher.global_notification_setting. - update(level: 'custom', success_pipeline: true) - end - - it_behaves_like 'sending emails' - end - - context 'with success pipeline notification off' do - let(:receivers) { [pusher] } + it 'calls NotificationService#pipeline_finished when the pipeline exists' do + expect(NotificationService).to receive_message_chain(:new, :pipeline_finished) - before do - watcher.global_notification_setting. - update(level: 'custom', success_pipeline: false) - end - - it_behaves_like 'sending emails' - end - end - - context 'with failed pipeline' do - let(:status) { 'failed' } - let(:email_subject) { "Pipeline ##{pipeline.id} has failed" } - - it_behaves_like 'sending emails' - - context 'with pipeline from someone else' do - let(:pusher) { create(:user) } - let(:watcher) { user } - - context 'with failed pipeline notification on' do - before do - watcher.global_notification_setting. - update(level: 'custom', failed_pipeline: true) - end - - it_behaves_like 'sending emails' - end - - context 'with failed pipeline notification off' do - let(:receivers) { [pusher] } - - before do - watcher.global_notification_setting. - update(level: 'custom', failed_pipeline: false) - end - - it_behaves_like 'sending emails' - end - end - end - end + subject.perform(pipeline.id) end - context 'when watcher has no read_build access' do - let(:status) { 'failed' } - let(:email_subject) { "Pipeline ##{pipeline.id} has failed" } - let(:watcher) { create(:user) } - - before do - pipeline.project.team << [watcher, Gitlab::Access::GUEST] - - watcher.global_notification_setting. - update(level: 'custom', failed_pipeline: true) - - perform_enqueued_jobs do - subject.perform(pipeline.id) - end - end + it 'does nothing when the pipeline does not exist' do + expect(NotificationService).not_to receive(:new) - it 'does not send emails' do - should_only_email(pusher, kind: :bcc) - end + subject.perform(Ci::Pipeline.maximum(:id).to_i.succ) end end end diff --git a/spec/workers/pipeline_proccess_worker_spec.rb b/spec/workers/pipeline_process_worker_spec.rb index 86e9d7f6684..86e9d7f6684 100644 --- a/spec/workers/pipeline_proccess_worker_spec.rb +++ b/spec/workers/pipeline_process_worker_spec.rb diff --git a/spec/workers/pipeline_schedule_worker_spec.rb b/spec/workers/pipeline_schedule_worker_spec.rb new file mode 100644 index 00000000000..9c650354d72 --- /dev/null +++ b/spec/workers/pipeline_schedule_worker_spec.rb @@ -0,0 +1,64 @@ +require 'spec_helper' + +describe PipelineScheduleWorker do + subject { described_class.new.perform } + + set(:project) { create(:project, :repository) } + set(:user) { create(:user) } + + let!(:pipeline_schedule) do + create(:ci_pipeline_schedule, :nightly, project: project, owner: user) + end + + before do + stub_ci_pipeline_to_return_yaml_file + + pipeline_schedule.update_column(:next_run_at, 1.day.ago) + end + + context 'when the schedule is runnable by the user' do + before do + project.add_master(user) + end + + context 'when there is a scheduled pipeline within next_run_at' do + it 'creates a new pipeline' do + expect { subject }.to change { project.pipelines.count }.by(1) + end + + it 'updates the next_run_at field' do + subject + + expect(pipeline_schedule.reload.next_run_at).to be > Time.now + end + + it 'sets the schedule on the pipeline' do + subject + + expect(project.pipelines.last.pipeline_schedule).to eq(pipeline_schedule) + end + end + + context 'inactive schedule' do + before do + pipeline_schedule.deactivate! + end + + it 'does not creates a new pipeline' do + expect { subject }.not_to change { project.pipelines.count } + end + end + end + + context 'when the schedule is not runnable by the user' do + it 'deactivates the schedule' do + subject + + expect(pipeline_schedule.reload.active).to be_falsy + end + + it 'does not schedule a pipeline' do + expect { subject }.not_to change { project.pipelines.count } + end + end +end diff --git a/spec/workers/post_receive_spec.rb b/spec/workers/post_receive_spec.rb index a2a559a2369..f4bc63bcc6a 100644 --- a/spec/workers/post_receive_spec.rb +++ b/spec/workers/post_receive_spec.rb @@ -4,13 +4,37 @@ describe PostReceive do let(:changes) { "123456 789012 refs/heads/tést\n654321 210987 refs/tags/tag" } let(:wrongly_encoded_changes) { changes.encode("ISO-8859-1").force_encoding("UTF-8") } let(:base64_changes) { Base64.encode64(wrongly_encoded_changes) } - let(:project) { create(:project, :repository) } + let(:project_identifier) { "project-#{project.id}" } let(:key) { create(:key, user: project.owner) } let(:key_id) { key.shell_id } - context "as a resque worker" do - it "reponds to #perform" do - expect(PostReceive.new).to respond_to(:perform) + let(:project) do + create(:project, :repository, auto_cancel_pending_pipelines: 'disabled') + end + + context "as a sidekiq worker" do + it "responds to #perform" do + expect(described_class.new).to respond_to(:perform) + end + end + + context 'with a non-existing project' do + let(:project_identifier) { "project-123456789" } + let(:error_message) do + "Triggered hook for non-existing project with identifier \"#{project_identifier}\"" + end + + it "returns false and logs an error" do + expect(Gitlab::GitLogger).to receive(:error).with("POST-RECEIVE: #{error_message}") + expect(described_class.new.perform(project_identifier, key_id, base64_changes)).to be(false) + end + end + + context "with an absolute path as the project identifier" do + it "searches the project by full path" do + expect(Project).to receive(:find_by_full_path).with(project.full_path).and_call_original + + described_class.new.perform(pwd(project), key_id, base64_changes) end end @@ -25,7 +49,7 @@ describe PostReceive do it "calls GitTagPushService" do expect_any_instance_of(GitPushService).to receive(:execute).and_return(true) expect_any_instance_of(GitTagPushService).not_to receive(:execute) - PostReceive.new.perform(pwd(project), key_id, base64_changes) + described_class.new.perform(project_identifier, key_id, base64_changes) end end @@ -35,7 +59,7 @@ describe PostReceive do it "calls GitTagPushService" do expect_any_instance_of(GitPushService).not_to receive(:execute) expect_any_instance_of(GitTagPushService).to receive(:execute).and_return(true) - PostReceive.new.perform(pwd(project), key_id, base64_changes) + described_class.new.perform(project_identifier, key_id, base64_changes) end end @@ -45,12 +69,12 @@ describe PostReceive do it "does not call any of the services" do expect_any_instance_of(GitPushService).not_to receive(:execute) expect_any_instance_of(GitTagPushService).not_to receive(:execute) - PostReceive.new.perform(pwd(project), key_id, base64_changes) + described_class.new.perform(project_identifier, key_id, base64_changes) end end context "gitlab-ci.yml" do - subject { PostReceive.new.perform(pwd(project), key_id, base64_changes) } + subject { described_class.new.perform(project_identifier, key_id, base64_changes) } context "creates a Ci::Pipeline for every change" do before do @@ -72,10 +96,31 @@ describe PostReceive do end end + describe '#process_repository_update' do + let(:changes) {'123456 789012 refs/heads/tést'} + let(:fake_hook_data) do + { event_name: 'repository_update' } + end + + before do + allow_any_instance_of(Gitlab::GitPostReceive).to receive(:identify).and_return(project.owner) + allow_any_instance_of(Gitlab::DataBuilder::Repository).to receive(:update).and_return(fake_hook_data) + # silence hooks so we can isolate + allow_any_instance_of(Key).to receive(:post_create_hook).and_return(true) + allow(subject).to receive(:process_project_changes).and_return(true) + end + + it 'calls SystemHooksService' do + expect_any_instance_of(SystemHooksService).to receive(:execute_hooks).with(fake_hook_data, :repository_update_hooks).and_return(true) + + subject.perform(pwd(project), key_id, base64_changes) + end + end + context "webhook" do it "fetches the correct project" do - expect(Project).to receive(:find_by_full_path).with(project.path_with_namespace).and_return(project) - PostReceive.new.perform(pwd(project), key_id, base64_changes) + expect(Project).to receive(:find_by).with(id: project.id.to_s) + described_class.new.perform(project_identifier, key_id, base64_changes) end it "does not run if the author is not in the project" do @@ -85,22 +130,22 @@ describe PostReceive do expect(project).not_to receive(:execute_hooks) - expect(PostReceive.new.perform(pwd(project), key_id, base64_changes)).to be_falsey + expect(described_class.new.perform(project_identifier, key_id, base64_changes)).to be_falsey end it "asks the project to trigger all hooks" do - allow(Project).to receive(:find_by_full_path).and_return(project) + allow(Project).to receive(:find_by).and_return(project) expect(project).to receive(:execute_hooks).twice expect(project).to receive(:execute_services).twice - PostReceive.new.perform(pwd(project), key_id, base64_changes) + described_class.new.perform(project_identifier, key_id, base64_changes) end it "enqueues a UpdateMergeRequestsWorker job" do - allow(Project).to receive(:find_by_full_path).and_return(project) + allow(Project).to receive(:find_by).and_return(project) expect(UpdateMergeRequestsWorker).to receive(:perform_async).with(project.id, project.owner.id, any_args) - PostReceive.new.perform(pwd(project), key_id, base64_changes) + described_class.new.perform(project_identifier, key_id, base64_changes) end end diff --git a/spec/workers/process_commit_worker_spec.rb b/spec/workers/process_commit_worker_spec.rb index 1c383d0514d..6295856b461 100644 --- a/spec/workers/process_commit_worker_spec.rb +++ b/spec/workers/process_commit_worker_spec.rb @@ -20,6 +20,14 @@ describe ProcessCommitWorker do worker.perform(project.id, -1, commit.to_hash) end + it 'does not process the commit when no issues are referenced' do + allow(worker).to receive(:build_commit).and_return(double(matches_cross_reference_regex?: false)) + + expect(worker).not_to receive(:process_commit_message) + + worker.perform(project.id, user.id, commit.to_hash) + end + it 'processes the commit message' do expect(worker).to receive(:process_commit_message).and_call_original @@ -99,6 +107,13 @@ describe ProcessCommitWorker do expect(metric.first_mentioned_in_commit_at).to eq(commit.committed_date) end + + it "doesn't execute any queries with false conditions" do + allow(commit).to receive(:safe_message). + and_return("Lorem Ipsum") + + expect { worker.update_issue_metrics(commit, user) }.not_to make_queries_matching(/WHERE (?:1=0|0=1)/) + end end describe '#build_commit' do diff --git a/spec/workers/project_cache_worker_spec.rb b/spec/workers/project_cache_worker_spec.rb index c23ffdf99c0..a4ba5f7c943 100644 --- a/spec/workers/project_cache_worker_spec.rb +++ b/spec/workers/project_cache_worker_spec.rb @@ -45,6 +45,18 @@ describe ProjectCacheWorker do worker.perform(project.id, %w(readme)) end + + context 'with plain readme' do + it 'refreshes the method caches' do + allow(MarkupHelper).to receive(:gitlab_markdown?).and_return(false) + allow(MarkupHelper).to receive(:plain?).and_return(true) + + expect_any_instance_of(Repository).to receive(:refresh_method_caches). + with(%i(readme)). + and_call_original + worker.perform(project.id, %w(readme)) + end + end end end diff --git a/spec/workers/project_destroy_worker_spec.rb b/spec/workers/project_destroy_worker_spec.rb index 0ab42f99510..3d135f40c1f 100644 --- a/spec/workers/project_destroy_worker_spec.rb +++ b/spec/workers/project_destroy_worker_spec.rb @@ -4,7 +4,7 @@ describe ProjectDestroyWorker do let(:project) { create(:project, :repository) } let(:path) { project.repository.path_to_repo } - subject { ProjectDestroyWorker.new } + subject { described_class.new } describe "#perform" do it "deletes the project" do diff --git a/spec/workers/propagate_service_template_worker_spec.rb b/spec/workers/propagate_service_template_worker_spec.rb new file mode 100644 index 00000000000..7040d5ef81c --- /dev/null +++ b/spec/workers/propagate_service_template_worker_spec.rb @@ -0,0 +1,29 @@ +require 'spec_helper' + +describe PropagateServiceTemplateWorker do + let!(:service_template) do + PushoverService.create( + template: true, + active: true, + properties: { + device: 'MyDevice', + sound: 'mic', + priority: 4, + user_key: 'asdf', + api_key: '123456789' + }) + end + + before do + allow_any_instance_of(Gitlab::ExclusiveLease).to receive(:try_obtain). + and_return(true) + end + + describe '#perform' do + it 'calls the propagate service with the template' do + expect(Projects::PropagateServiceTemplate).to receive(:propagate).with(service_template) + + subject.perform(service_template.id) + end + end +end diff --git a/spec/workers/remove_expired_members_worker_spec.rb b/spec/workers/remove_expired_members_worker_spec.rb index 402aa1e714e..058fdf4c009 100644 --- a/spec/workers/remove_expired_members_worker_spec.rb +++ b/spec/workers/remove_expired_members_worker_spec.rb @@ -1,7 +1,7 @@ require 'spec_helper' describe RemoveExpiredMembersWorker do - let(:worker) { RemoveExpiredMembersWorker.new } + let(:worker) { described_class.new } describe '#perform' do context 'project members' do diff --git a/spec/workers/remove_unreferenced_lfs_objects_worker_spec.rb b/spec/workers/remove_unreferenced_lfs_objects_worker_spec.rb index 6d42946de38..1c183ce54f4 100644 --- a/spec/workers/remove_unreferenced_lfs_objects_worker_spec.rb +++ b/spec/workers/remove_unreferenced_lfs_objects_worker_spec.rb @@ -1,7 +1,7 @@ require 'spec_helper' describe RemoveUnreferencedLfsObjectsWorker do - let(:worker) { RemoveUnreferencedLfsObjectsWorker.new } + let(:worker) { described_class.new } describe '#perform' do let!(:unreferenced_lfs_object1) { create(:lfs_object, oid: '1') } diff --git a/spec/workers/repository_check/clear_worker_spec.rb b/spec/workers/repository_check/clear_worker_spec.rb index a3b70c74787..3b1a64c5057 100644 --- a/spec/workers/repository_check/clear_worker_spec.rb +++ b/spec/workers/repository_check/clear_worker_spec.rb @@ -5,7 +5,7 @@ describe RepositoryCheck::ClearWorker do project = create(:empty_project) project.update_columns( last_repository_check_failed: true, - last_repository_check_at: Time.now, + last_repository_check_at: Time.now ) described_class.new.perform diff --git a/spec/workers/repository_fork_worker_spec.rb b/spec/workers/repository_fork_worker_spec.rb index 7d6a2db2972..5e1cb74c7fc 100644 --- a/spec/workers/repository_fork_worker_spec.rb +++ b/spec/workers/repository_fork_worker_spec.rb @@ -5,7 +5,7 @@ describe RepositoryForkWorker do let(:fork_project) { create(:project, :repository, forked_from_project: project) } let(:shell) { Gitlab::Shell.new } - subject { RepositoryForkWorker.new } + subject { described_class.new } before do allow(subject).to receive(:gitlab_shell).and_return(shell) diff --git a/spec/workers/repository_import_worker_spec.rb b/spec/workers/repository_import_worker_spec.rb index fbb22439f33..5a2c0671dac 100644 --- a/spec/workers/repository_import_worker_spec.rb +++ b/spec/workers/repository_import_worker_spec.rb @@ -23,10 +23,12 @@ describe RepositoryImportWorker do error = %q{remote: Not Found fatal: repository 'https://user:pass@test.com/root/repoC.git/' not found } expect_any_instance_of(Projects::ImportService).to receive(:execute). and_return({ status: :error, message: error }) + allow(subject).to receive(:jid).and_return('123') subject.perform(project.id) expect(project.reload.import_error).to include("https://*****:*****@test.com/root/repoC.git/") + expect(project.reload.import_jid).not_to be_nil end end end diff --git a/spec/workers/schedule_update_user_activity_worker_spec.rb b/spec/workers/schedule_update_user_activity_worker_spec.rb new file mode 100644 index 00000000000..e583c3203aa --- /dev/null +++ b/spec/workers/schedule_update_user_activity_worker_spec.rb @@ -0,0 +1,25 @@ +require 'spec_helper' + +describe ScheduleUpdateUserActivityWorker, :redis do + let(:now) { Time.now } + + before do + Gitlab::UserActivities.record('1', now) + Gitlab::UserActivities.record('2', now) + end + + it 'schedules UpdateUserActivityWorker once' do + expect(UpdateUserActivityWorker).to receive(:perform_async).with({ '1' => now.to_i.to_s, '2' => now.to_i.to_s }) + + subject.perform + end + + context 'when specifying a batch size' do + it 'schedules UpdateUserActivityWorker twice' do + expect(UpdateUserActivityWorker).to receive(:perform_async).with({ '1' => now.to_i.to_s }) + expect(UpdateUserActivityWorker).to receive(:perform_async).with({ '2' => now.to_i.to_s }) + + subject.perform(1) + end + end +end diff --git a/spec/workers/stuck_import_jobs_worker_spec.rb b/spec/workers/stuck_import_jobs_worker_spec.rb new file mode 100644 index 00000000000..466277a5e5e --- /dev/null +++ b/spec/workers/stuck_import_jobs_worker_spec.rb @@ -0,0 +1,36 @@ +require 'spec_helper' + +describe StuckImportJobsWorker do + let(:worker) { described_class.new } + let(:exclusive_lease_uuid) { SecureRandom.uuid } + + before do + allow_any_instance_of(Gitlab::ExclusiveLease).to receive(:try_obtain).and_return(exclusive_lease_uuid) + end + + describe 'long running import' do + let(:project) { create(:empty_project, import_jid: '123', import_status: 'started') } + + before do + allow(Gitlab::SidekiqStatus).to receive(:completed_jids).and_return(['123']) + end + + it 'marks the project as failed' do + expect { worker.perform }.to change { project.reload.import_status }.to('failed') + end + end + + describe 'running import' do + let(:project) { create(:empty_project, import_jid: '123', import_status: 'started') } + + before do + allow(Gitlab::SidekiqStatus).to receive(:completed_jids).and_return([]) + end + + it 'does not mark the project as failed' do + worker.perform + + expect(project.reload.import_status).to eq('started') + end + end +end diff --git a/spec/workers/update_user_activity_worker_spec.rb b/spec/workers/update_user_activity_worker_spec.rb new file mode 100644 index 00000000000..43e9511f116 --- /dev/null +++ b/spec/workers/update_user_activity_worker_spec.rb @@ -0,0 +1,35 @@ +require 'spec_helper' + +describe UpdateUserActivityWorker, :redis do + let(:user_active_2_days_ago) { create(:user, current_sign_in_at: 10.months.ago) } + let(:user_active_yesterday_1) { create(:user) } + let(:user_active_yesterday_2) { create(:user) } + let(:user_active_today) { create(:user) } + let(:data) do + { + user_active_2_days_ago.id.to_s => 2.days.ago.at_midday.to_i.to_s, + user_active_yesterday_1.id.to_s => 1.day.ago.at_midday.to_i.to_s, + user_active_yesterday_2.id.to_s => 1.day.ago.at_midday.to_i.to_s, + user_active_today.id.to_s => Time.now.to_i.to_s + } + end + + it 'updates users.last_activity_on' do + subject.perform(data) + + aggregate_failures do + expect(user_active_2_days_ago.reload.last_activity_on).to eq(2.days.ago.to_date) + expect(user_active_yesterday_1.reload.last_activity_on).to eq(1.day.ago.to_date) + expect(user_active_yesterday_2.reload.last_activity_on).to eq(1.day.ago.to_date) + expect(user_active_today.reload.reload.last_activity_on).to eq(Date.today) + end + end + + it 'deletes the pairs from Redis' do + data.each { |id, time| Gitlab::UserActivities.record(id, time) } + + subject.perform(data) + + expect(Gitlab::UserActivities.new.to_a).to be_empty + end +end |