summaryrefslogtreecommitdiff
path: root/spec
diff options
context:
space:
mode:
Diffstat (limited to 'spec')
-rw-r--r--spec/controllers/profiles_controller_spec.rb7
-rw-r--r--spec/features/issues/filtered_search/filter_issues_spec.rb9
-rw-r--r--spec/features/labels_hierarchy_spec.rb305
-rw-r--r--spec/features/projects/issues/user_sorts_issues_spec.rb3
-rw-r--r--spec/features/protected_branches_spec.rb5
-rw-r--r--spec/features/protected_tags_spec.rb5
-rw-r--r--spec/finders/labels_finder_spec.rb34
-rw-r--r--spec/javascripts/ide/components/commit_sidebar/list_item_spec.js38
-rw-r--r--spec/javascripts/ide/components/repo_tab_spec.js2
-rw-r--r--spec/javascripts/ide/components/repo_tabs_spec.js2
-rw-r--r--spec/javascripts/ide/lib/common/model_manager_spec.js15
-rw-r--r--spec/javascripts/ide/lib/common/model_spec.js8
-rw-r--r--spec/javascripts/ide/lib/decorations/controller_spec.js41
-rw-r--r--spec/javascripts/ide/lib/diff/controller_spec.js4
-rw-r--r--spec/javascripts/ide/stores/actions/file_spec.js131
-rw-r--r--spec/javascripts/ide/stores/mutations/file_spec.js80
-rw-r--r--spec/lib/banzai/filter/commit_trailers_filter_spec.rb171
-rw-r--r--spec/lib/banzai/filter/merge_request_reference_filter_spec.rb35
-rw-r--r--spec/lib/gitlab/auth/ldap/access_spec.rb22
-rw-r--r--spec/lib/gitlab/auth/ldap/adapter_spec.rb30
-rw-r--r--spec/lib/gitlab/auth/o_auth/user_spec.rb66
-rw-r--r--spec/lib/gitlab/bitbucket_import/importer_spec.rb2
-rw-r--r--spec/lib/gitlab/git/gitlab_projects_spec.rb12
-rw-r--r--spec/lib/gitlab/github_import/importer/repository_importer_spec.rb2
-rw-r--r--spec/lib/gitlab/shell_spec.rb14
-rw-r--r--spec/lib/gitlab/sidekiq_logging/json_formatter_spec.rb31
-rw-r--r--spec/lib/gitlab/sidekiq_logging/structured_logger_spec.rb101
-rw-r--r--spec/models/commit_status_spec.rb4
-rw-r--r--spec/models/concerns/issuable_spec.rb8
-rw-r--r--spec/models/project_spec.rb7
-rw-r--r--spec/models/user_spec.rb12
-rw-r--r--spec/requests/api/boards_spec.rb31
-rw-r--r--spec/requests/api/features_spec.rb43
-rw-r--r--spec/requests/api/projects_spec.rb41
-rw-r--r--spec/requests/projects/cycle_analytics_events_spec.rb10
-rw-r--r--spec/spec_helper.rb3
-rw-r--r--spec/support/commit_trailers_spec_helper.rb41
-rw-r--r--spec/support/filtered_search_helpers.rb23
-rw-r--r--spec/support/ldap_helpers.rb5
-rw-r--r--spec/support/test_env.rb3
-rw-r--r--spec/workers/repository_fork_worker_spec.rb101
41 files changed, 1333 insertions, 174 deletions
diff --git a/spec/controllers/profiles_controller_spec.rb b/spec/controllers/profiles_controller_spec.rb
index 03cbbb21e62..891485406c6 100644
--- a/spec/controllers/profiles_controller_spec.rb
+++ b/spec/controllers/profiles_controller_spec.rb
@@ -84,6 +84,13 @@ describe ProfilesController, :request_store do
expect(user.username).to eq(new_username)
end
+ it 'raises a correct error when the username is missing' do
+ sign_in(user)
+
+ expect { put :update_username, user: { gandalf: 'you shall not pass' } }
+ .to raise_error(ActionController::ParameterMissing)
+ end
+
context 'with legacy storage' do
it 'moves dependent projects to new namespace' do
project = create(:project_empty_repo, :legacy_storage, namespace: namespace)
diff --git a/spec/features/issues/filtered_search/filter_issues_spec.rb b/spec/features/issues/filtered_search/filter_issues_spec.rb
index b3c50964810..08ba91a2682 100644
--- a/spec/features/issues/filtered_search/filter_issues_spec.rb
+++ b/spec/features/issues/filtered_search/filter_issues_spec.rb
@@ -22,15 +22,6 @@ describe 'Filter issues', :js do
end
end
- def expect_issues_list_count(open_count, closed_count = 0)
- all_count = open_count + closed_count
-
- expect(page).to have_issuable_counts(open: open_count, closed: closed_count, all: all_count)
- page.within '.issues-list' do
- expect(page).to have_selector('.issue', count: open_count)
- end
- end
-
before do
project.add_master(user)
diff --git a/spec/features/labels_hierarchy_spec.rb b/spec/features/labels_hierarchy_spec.rb
new file mode 100644
index 00000000000..99e1fb30d5b
--- /dev/null
+++ b/spec/features/labels_hierarchy_spec.rb
@@ -0,0 +1,305 @@
+require 'spec_helper'
+
+feature 'Labels Hierarchy', :js, :nested_groups do
+ include FilteredSearchHelpers
+
+ let!(:user) { create(:user) }
+ let!(:grandparent) { create(:group) }
+ let!(:parent) { create(:group, parent: grandparent) }
+ let!(:child) { create(:group, parent: parent) }
+ let!(:project_1) { create(:project, namespace: parent) }
+
+ let!(:grandparent_group_label) { create(:group_label, group: grandparent, title: 'Label_1') }
+ let!(:parent_group_label) { create(:group_label, group: parent, title: 'Label_2') }
+ let!(:child_group_label) { create(:group_label, group: child, title: 'Label_3') }
+ let!(:project_label_1) { create(:label, project: project_1, title: 'Label_4') }
+
+ before do
+ grandparent.add_owner(user)
+
+ sign_in(user)
+ end
+
+ shared_examples 'assigning labels from sidebar' do
+ it 'can assign all ancestors labels' do
+ [grandparent_group_label, parent_group_label, project_label_1].each do |label|
+ page.within('.block.labels') do
+ find('.edit-link').click
+ end
+
+ wait_for_requests
+
+ find('a.label-item', text: label.title).click
+ find('.dropdown-menu-close-icon').click
+
+ wait_for_requests
+
+ expect(page).to have_selector('span.label', text: label.title)
+ end
+ end
+
+ it 'does not find child group labels on dropdown' do
+ page.within('.block.labels') do
+ find('.edit-link').click
+ end
+
+ wait_for_requests
+
+ expect(page).not_to have_selector('span.label', text: child_group_label.title)
+ end
+ end
+
+ shared_examples 'filtering by ancestor labels for projects' do |board = false|
+ it 'filters by ancestor labels' do
+ [grandparent_group_label, parent_group_label, project_label_1].each do |label|
+ select_label_on_dropdown(label.title)
+
+ wait_for_requests
+
+ if board
+ expect(page).to have_selector('.card-title') do |card|
+ expect(card).to have_selector('a', text: labeled_issue.title)
+ end
+ else
+ expect_issues_list_count(1)
+ expect(page).to have_selector('span.issue-title-text', text: labeled_issue.title)
+ end
+ end
+ end
+
+ it 'does not filter by descendant group labels' do
+ filtered_search.set("label:")
+
+ wait_for_requests
+
+ expect(page).not_to have_selector('.btn-link', text: child_group_label.title)
+ end
+ end
+
+ shared_examples 'filtering by ancestor labels for groups' do |board = false|
+ let(:project_2) { create(:project, namespace: parent) }
+ let!(:project_label_2) { create(:label, project: project_2, title: 'Label_4') }
+
+ let(:project_3) { create(:project, namespace: child) }
+ let!(:group_label_3) { create(:group_label, group: child, title: 'Label_5') }
+ let!(:project_label_3) { create(:label, project: project_3, title: 'Label_6') }
+
+ let!(:labeled_issue_2) { create(:labeled_issue, project: project_2, labels: [grandparent_group_label, parent_group_label, project_label_2]) }
+ let!(:labeled_issue_3) { create(:labeled_issue, project: project_3, labels: [grandparent_group_label, parent_group_label, group_label_3]) }
+
+ let!(:issue_2) { create(:issue, project: project_2) }
+
+ it 'filters by ancestors and current group labels' do
+ [grandparent_group_label, parent_group_label].each do |label|
+ select_label_on_dropdown(label.title)
+
+ wait_for_requests
+
+ if board
+ expect(page).to have_selector('.card-title') do |card|
+ expect(card).to have_selector('a', text: labeled_issue.title)
+ end
+
+ expect(page).to have_selector('.card-title') do |card|
+ expect(card).to have_selector('a', text: labeled_issue_2.title)
+ end
+ else
+ expect_issues_list_count(3)
+ expect(page).to have_selector('span.issue-title-text', text: labeled_issue.title)
+ expect(page).to have_selector('span.issue-title-text', text: labeled_issue_2.title)
+ expect(page).to have_selector('span.issue-title-text', text: labeled_issue_3.title)
+ end
+ end
+ end
+
+ it 'filters by descendant group labels' do
+ wait_for_requests
+
+ if board
+ pending("Waiting for https://gitlab.com/gitlab-org/gitlab-ce/issues/44270")
+
+ select_label_on_dropdown(group_label_3.title)
+
+ expect(page).to have_selector('.card-title') do |card|
+ expect(card).to have_selector('a', text: labeled_issue_3.title)
+ end
+ else
+ select_label_on_dropdown(group_label_3.title)
+
+ expect_issues_list_count(1)
+ expect(page).to have_selector('span.issue-title-text', text: labeled_issue_3.title)
+ end
+ end
+
+ it 'does not filter by descendant group project labels' do
+ filtered_search.set("label:")
+
+ wait_for_requests
+
+ expect(page).not_to have_selector('.btn-link', text: project_label_3.title)
+ end
+ end
+
+ context 'when creating new issuable' do
+ before do
+ visit new_project_issue_path(project_1)
+ end
+
+ it 'should be able to assign ancestor group labels' do
+ fill_in 'issue_title', with: 'new created issue'
+ fill_in 'issue_description', with: 'new issue description'
+
+ find(".js-label-select").click
+ wait_for_requests
+
+ find('a.label-item', text: grandparent_group_label.title).click
+ find('a.label-item', text: parent_group_label.title).click
+ find('a.label-item', text: project_label_1.title).click
+
+ find('.btn-create').click
+
+ expect(page.find('.issue-details h2.title')).to have_content('new created issue')
+ expect(page).to have_selector('span.label', text: grandparent_group_label.title)
+ expect(page).to have_selector('span.label', text: parent_group_label.title)
+ expect(page).to have_selector('span.label', text: project_label_1.title)
+ end
+ end
+
+ context 'issuable sidebar' do
+ let!(:issue) { create(:issue, project: project_1) }
+
+ context 'on issue sidebar' do
+ before do
+ visit project_issue_path(project_1, issue)
+ end
+
+ it_behaves_like 'assigning labels from sidebar'
+ end
+
+ context 'on project board issue sidebar' do
+ let(:board) { create(:board, project: project_1) }
+
+ before do
+ visit project_board_path(project_1, board)
+
+ wait_for_requests
+
+ find('.card').click
+ end
+
+ it_behaves_like 'assigning labels from sidebar'
+ end
+
+ context 'on group board issue sidebar' do
+ let(:board) { create(:board, group: parent) }
+
+ before do
+ visit group_board_path(parent, board)
+
+ wait_for_requests
+
+ find('.card').click
+ end
+
+ it_behaves_like 'assigning labels from sidebar'
+ end
+ end
+
+ context 'issuable filtering' do
+ let!(:labeled_issue) { create(:labeled_issue, project: project_1, labels: [grandparent_group_label, parent_group_label, project_label_1]) }
+ let!(:issue) { create(:issue, project: project_1) }
+
+ context 'on project issuable list' do
+ before do
+ visit project_issues_path(project_1)
+ end
+
+ it_behaves_like 'filtering by ancestor labels for projects'
+
+ it 'does not filter by descendant group labels' do
+ filtered_search.set("label:")
+
+ wait_for_requests
+
+ expect(page).not_to have_selector('.btn-link', text: child_group_label.title)
+ end
+ end
+
+ context 'on group issuable list' do
+ before do
+ visit issues_group_path(parent)
+ end
+
+ it_behaves_like 'filtering by ancestor labels for groups'
+ end
+
+ context 'on project boards filter' do
+ let(:board) { create(:board, project: project_1) }
+
+ before do
+ visit project_board_path(project_1, board)
+ end
+
+ it_behaves_like 'filtering by ancestor labels for projects', true
+ end
+
+ context 'on group boards filter' do
+ let(:board) { create(:board, group: parent) }
+
+ before do
+ visit group_board_path(parent, board)
+ end
+
+ it_behaves_like 'filtering by ancestor labels for groups', true
+ end
+ end
+
+ context 'creating boards lists' do
+ context 'on project boards' do
+ let(:board) { create(:board, project: project_1) }
+
+ before do
+ visit project_board_path(project_1, board)
+ find('.js-new-board-list').click
+ wait_for_requests
+ end
+
+ it 'creates lists from all ancestor labels' do
+ [grandparent_group_label, parent_group_label, project_label_1].each do |label|
+ find('a', text: label.title).click
+ end
+
+ wait_for_requests
+
+ expect(page).to have_selector('.board-title-text', text: grandparent_group_label.title)
+ expect(page).to have_selector('.board-title-text', text: parent_group_label.title)
+ expect(page).to have_selector('.board-title-text', text: project_label_1.title)
+ end
+ end
+
+ context 'on group boards' do
+ let(:board) { create(:board, group: parent) }
+
+ before do
+ visit group_board_path(parent, board)
+ find('.js-new-board-list').click
+ wait_for_requests
+ end
+
+ it 'creates lists from all ancestor group labels' do
+ [grandparent_group_label, parent_group_label].each do |label|
+ find('a', text: label.title).click
+ end
+
+ wait_for_requests
+
+ expect(page).to have_selector('.board-title-text', text: grandparent_group_label.title)
+ expect(page).to have_selector('.board-title-text', text: parent_group_label.title)
+ end
+
+ it 'does not create lists from descendant groups' do
+ expect(page).not_to have_selector('a', text: child_group_label.title)
+ end
+ end
+ end
+end
diff --git a/spec/features/projects/issues/user_sorts_issues_spec.rb b/spec/features/projects/issues/user_sorts_issues_spec.rb
index 34148ae0116..c3d63000dac 100644
--- a/spec/features/projects/issues/user_sorts_issues_spec.rb
+++ b/spec/features/projects/issues/user_sorts_issues_spec.rb
@@ -25,17 +25,14 @@ describe "User sorts issues" do
page.within(".issues-list") do
page.within("li.issue:nth-child(1)") do
expect(page).to have_content(issue1.title)
- expect(page).to have_content("2 1")
end
page.within("li.issue:nth-child(2)") do
expect(page).to have_content(issue2.title)
- expect(page).to have_content("1 2")
end
page.within("li.issue:nth-child(3)") do
expect(page).to have_content(issue3.title)
- expect(page).not_to have_content("0 0")
end
end
end
diff --git a/spec/features/protected_branches_spec.rb b/spec/features/protected_branches_spec.rb
index a4084818284..43cabd3b9f2 100644
--- a/spec/features/protected_branches_spec.rb
+++ b/spec/features/protected_branches_spec.rb
@@ -142,7 +142,10 @@ feature 'Protected Branches', :js do
set_protected_branch_name('*-stable')
click_on "Protect"
- within(".protected-branches-list") { expect(page).to have_content("2 matching branches") }
+ within(".protected-branches-list") do
+ expect(page).to have_content("Protected branch (2)")
+ expect(page).to have_content("2 matching branches")
+ end
end
it "displays all the branches matching the wildcard" do
diff --git a/spec/features/protected_tags_spec.rb b/spec/features/protected_tags_spec.rb
index 8cc6f17b8d9..efccaeaff6c 100644
--- a/spec/features/protected_tags_spec.rb
+++ b/spec/features/protected_tags_spec.rb
@@ -65,7 +65,10 @@ feature 'Protected Tags', :js do
set_protected_tag_name('*-stable')
click_on "Protect"
- within(".protected-tags-list") { expect(page).to have_content("2 matching tags") }
+ within(".protected-tags-list") do
+ expect(page).to have_content("Protected tag (2)")
+ expect(page).to have_content("2 matching tags")
+ end
end
it "displays all the tags matching the wildcard" do
diff --git a/spec/finders/labels_finder_spec.rb b/spec/finders/labels_finder_spec.rb
index d434c501110..899d0d22819 100644
--- a/spec/finders/labels_finder_spec.rb
+++ b/spec/finders/labels_finder_spec.rb
@@ -71,6 +71,24 @@ describe LabelsFinder do
end
end
+ context 'when group has no projects' do
+ let(:empty_group) { create(:group) }
+ let!(:empty_group_label_1) { create(:group_label, group: empty_group, title: 'Label 1 (empty group)') }
+ let!(:empty_group_label_2) { create(:group_label, group: empty_group, title: 'Label 2 (empty group)') }
+
+ before do
+ empty_group.add_developer(user)
+ end
+
+ context 'when only group labels is false' do
+ it 'returns group labels' do
+ finder = described_class.new(user, group_id: empty_group.id)
+
+ expect(finder.execute).to eq [empty_group_label_1, empty_group_label_2]
+ end
+ end
+ end
+
context 'when including labels from group ancestors', :nested_groups do
it 'returns labels from group and its ancestors' do
private_group_1.add_developer(user)
@@ -110,7 +128,21 @@ describe LabelsFinder do
end
end
- context 'filtering by project_id' do
+ context 'filtering by project_id', :nested_groups do
+ context 'when include_ancestor_groups is true' do
+ let!(:sub_project) { create(:project, namespace: private_subgroup_1 ) }
+ let!(:project_label) { create(:label, project: sub_project, title: 'Label 5') }
+ let(:finder) { described_class.new(user, project_id: sub_project.id, include_ancestor_groups: true) }
+
+ before do
+ private_group_1.add_developer(user)
+ end
+
+ it 'returns all ancestor labels' do
+ expect(finder.execute).to match_array([private_subgroup_label_1, private_group_label_1, project_label])
+ end
+ end
+
it 'returns labels available for the project' do
finder = described_class.new(user, project_id: project_1.id)
diff --git a/spec/javascripts/ide/components/commit_sidebar/list_item_spec.js b/spec/javascripts/ide/components/commit_sidebar/list_item_spec.js
index 15b66952d99..509434e4300 100644
--- a/spec/javascripts/ide/components/commit_sidebar/list_item_spec.js
+++ b/spec/javascripts/ide/components/commit_sidebar/list_item_spec.js
@@ -1,8 +1,9 @@
import Vue from 'vue';
import listItem from '~/ide/components/commit_sidebar/list_item.vue';
import router from '~/ide/ide_router';
-import mountComponent from 'spec/helpers/vue_mount_component_helper';
-import { file } from '../../helpers';
+import store from '~/ide/stores';
+import { createComponentWithStore } from 'spec/helpers/vue_mount_component_helper';
+import { file, resetStore } from '../../helpers';
describe('Multi-file editor commit sidebar list item', () => {
let vm;
@@ -13,19 +14,21 @@ describe('Multi-file editor commit sidebar list item', () => {
f = file('test-file');
- vm = mountComponent(Component, {
+ store.state.entries[f.path] = f;
+
+ vm = createComponentWithStore(Component, store, {
file: f,
- });
+ }).$mount();
});
afterEach(() => {
vm.$destroy();
+
+ resetStore(store);
});
it('renders file path', () => {
- expect(
- vm.$el.querySelector('.multi-file-commit-list-path').textContent.trim(),
- ).toBe(f.path);
+ expect(vm.$el.querySelector('.multi-file-commit-list-path').textContent.trim()).toBe(f.path);
});
it('calls discardFileChanges when clicking discard button', () => {
@@ -36,25 +39,32 @@ describe('Multi-file editor commit sidebar list item', () => {
expect(vm.discardFileChanges).toHaveBeenCalled();
});
- it('opens a closed file in the editor when clicking the file path', () => {
+ it('opens a closed file in the editor when clicking the file path', done => {
spyOn(vm, 'openFileInEditor').and.callThrough();
- spyOn(vm, 'updateViewer');
spyOn(router, 'push');
vm.$el.querySelector('.multi-file-commit-list-path').click();
- expect(vm.openFileInEditor).toHaveBeenCalled();
- expect(router.push).toHaveBeenCalled();
+ setTimeout(() => {
+ expect(vm.openFileInEditor).toHaveBeenCalled();
+ expect(router.push).toHaveBeenCalled();
+
+ done();
+ });
});
- it('calls updateViewer with diff when clicking file', () => {
+ it('calls updateViewer with diff when clicking file', done => {
spyOn(vm, 'openFileInEditor').and.callThrough();
- spyOn(vm, 'updateViewer');
+ spyOn(vm, 'updateViewer').and.callThrough();
spyOn(router, 'push');
vm.$el.querySelector('.multi-file-commit-list-path').click();
- expect(vm.updateViewer).toHaveBeenCalledWith('diff');
+ setTimeout(() => {
+ expect(vm.updateViewer).toHaveBeenCalledWith('diff');
+
+ done();
+ });
});
describe('computed', () => {
diff --git a/spec/javascripts/ide/components/repo_tab_spec.js b/spec/javascripts/ide/components/repo_tab_spec.js
index ddb5204e3a7..8cabc6e8935 100644
--- a/spec/javascripts/ide/components/repo_tab_spec.js
+++ b/spec/javascripts/ide/components/repo_tab_spec.js
@@ -59,7 +59,7 @@ describe('RepoTab', () => {
vm.$el.querySelector('.multi-file-tab-close').click();
- expect(vm.closeFile).toHaveBeenCalledWith(vm.tab.path);
+ expect(vm.closeFile).toHaveBeenCalledWith(vm.tab);
});
it('changes icon on hover', done => {
diff --git a/spec/javascripts/ide/components/repo_tabs_spec.js b/spec/javascripts/ide/components/repo_tabs_spec.js
index 73ea7960485..cb785ba2cd3 100644
--- a/spec/javascripts/ide/components/repo_tabs_spec.js
+++ b/spec/javascripts/ide/components/repo_tabs_spec.js
@@ -17,6 +17,7 @@ describe('RepoTabs', () => {
files: openedFiles,
viewer: 'editor',
hasChanges: false,
+ activeFile: file('activeFile'),
hasMergeRequest: false,
});
openedFiles[0].active = true;
@@ -57,6 +58,7 @@ describe('RepoTabs', () => {
files: [],
viewer: 'editor',
hasChanges: false,
+ activeFile: file('activeFile'),
hasMergeRequest: false,
},
'#test-app',
diff --git a/spec/javascripts/ide/lib/common/model_manager_spec.js b/spec/javascripts/ide/lib/common/model_manager_spec.js
index 4381f6fcfd0..c00d590c580 100644
--- a/spec/javascripts/ide/lib/common/model_manager_spec.js
+++ b/spec/javascripts/ide/lib/common/model_manager_spec.js
@@ -27,9 +27,10 @@ describe('Multi-file editor library model manager', () => {
});
it('caches model by file path', () => {
- instance.addModel(file('path-name'));
+ const f = file('path-name');
+ instance.addModel(f);
- expect(instance.models.keys().next().value).toBe('path-name');
+ expect(instance.models.keys().next().value).toBe(f.key);
});
it('adds model into disposable', () => {
@@ -56,7 +57,7 @@ describe('Multi-file editor library model manager', () => {
instance.addModel(f);
expect(eventHub.$on).toHaveBeenCalledWith(
- `editor.update.model.dispose.${f.path}`,
+ `editor.update.model.dispose.${f.key}`,
jasmine.anything(),
);
});
@@ -68,9 +69,11 @@ describe('Multi-file editor library model manager', () => {
});
it('returns true when model exists', () => {
- instance.addModel(file('path-name'));
+ const f = file('path-name');
+
+ instance.addModel(f);
- expect(instance.hasCachedModel('path-name')).toBeTruthy();
+ expect(instance.hasCachedModel(f.key)).toBeTruthy();
});
});
@@ -103,7 +106,7 @@ describe('Multi-file editor library model manager', () => {
instance.removeCachedModel(f);
expect(eventHub.$off).toHaveBeenCalledWith(
- `editor.update.model.dispose.${f.path}`,
+ `editor.update.model.dispose.${f.key}`,
jasmine.anything(),
);
});
diff --git a/spec/javascripts/ide/lib/common/model_spec.js b/spec/javascripts/ide/lib/common/model_spec.js
index 7cd990adb53..8fc2fccb64c 100644
--- a/spec/javascripts/ide/lib/common/model_spec.js
+++ b/spec/javascripts/ide/lib/common/model_spec.js
@@ -32,14 +32,14 @@ describe('Multi-file editor library model', () => {
it('adds eventHub listener', () => {
expect(eventHub.$on).toHaveBeenCalledWith(
- `editor.update.model.dispose.${model.file.path}`,
+ `editor.update.model.dispose.${model.file.key}`,
jasmine.anything(),
);
});
describe('path', () => {
it('returns file path', () => {
- expect(model.path).toBe('path');
+ expect(model.path).toBe(model.file.key);
});
});
@@ -74,7 +74,7 @@ describe('Multi-file editor library model', () => {
model.onChange(() => {});
expect(model.events.size).toBe(1);
- expect(model.events.keys().next().value).toBe('path');
+ expect(model.events.keys().next().value).toBe(model.file.key);
});
it('calls callback on change', done => {
@@ -115,7 +115,7 @@ describe('Multi-file editor library model', () => {
model.dispose();
expect(eventHub.$off).toHaveBeenCalledWith(
- `editor.update.model.dispose.${model.file.path}`,
+ `editor.update.model.dispose.${model.file.key}`,
jasmine.anything(),
);
});
diff --git a/spec/javascripts/ide/lib/decorations/controller_spec.js b/spec/javascripts/ide/lib/decorations/controller_spec.js
index 092170d086a..aec325e26a9 100644
--- a/spec/javascripts/ide/lib/decorations/controller_spec.js
+++ b/spec/javascripts/ide/lib/decorations/controller_spec.js
@@ -36,9 +36,7 @@ describe('Multi-file editor library decorations controller', () => {
});
it('returns decorations by model URL', () => {
- controller.addDecorations(model, 'key', [
- { decoration: 'decorationValue' },
- ]);
+ controller.addDecorations(model, 'key', [{ decoration: 'decorationValue' }]);
const decorations = controller.getAllDecorationsForModel(model);
@@ -48,39 +46,29 @@ describe('Multi-file editor library decorations controller', () => {
describe('addDecorations', () => {
it('caches decorations in a new map', () => {
- controller.addDecorations(model, 'key', [
- { decoration: 'decorationValue' },
- ]);
+ controller.addDecorations(model, 'key', [{ decoration: 'decorationValue' }]);
expect(controller.decorations.size).toBe(1);
});
it('does not create new cache model', () => {
- controller.addDecorations(model, 'key', [
- { decoration: 'decorationValue' },
- ]);
- controller.addDecorations(model, 'key', [
- { decoration: 'decorationValue2' },
- ]);
+ controller.addDecorations(model, 'key', [{ decoration: 'decorationValue' }]);
+ controller.addDecorations(model, 'key', [{ decoration: 'decorationValue2' }]);
expect(controller.decorations.size).toBe(1);
});
it('caches decorations by model URL', () => {
- controller.addDecorations(model, 'key', [
- { decoration: 'decorationValue' },
- ]);
+ controller.addDecorations(model, 'key', [{ decoration: 'decorationValue' }]);
expect(controller.decorations.size).toBe(1);
- expect(controller.decorations.keys().next().value).toBe('path');
+ expect(controller.decorations.keys().next().value).toBe('path--path');
});
it('calls decorate method', () => {
spyOn(controller, 'decorate');
- controller.addDecorations(model, 'key', [
- { decoration: 'decorationValue' },
- ]);
+ controller.addDecorations(model, 'key', [{ decoration: 'decorationValue' }]);
expect(controller.decorate).toHaveBeenCalled();
});
@@ -92,10 +80,7 @@ describe('Multi-file editor library decorations controller', () => {
controller.decorate(model);
- expect(controller.editor.instance.deltaDecorations).toHaveBeenCalledWith(
- [],
- [],
- );
+ expect(controller.editor.instance.deltaDecorations).toHaveBeenCalledWith([], []);
});
it('caches decorations', () => {
@@ -111,15 +96,13 @@ describe('Multi-file editor library decorations controller', () => {
controller.decorate(model);
- expect(controller.editorDecorations.keys().next().value).toBe('path');
+ expect(controller.editorDecorations.keys().next().value).toBe('path--path');
});
});
describe('dispose', () => {
it('clears cached decorations', () => {
- controller.addDecorations(model, 'key', [
- { decoration: 'decorationValue' },
- ]);
+ controller.addDecorations(model, 'key', [{ decoration: 'decorationValue' }]);
controller.dispose();
@@ -127,9 +110,7 @@ describe('Multi-file editor library decorations controller', () => {
});
it('clears cached editorDecorations', () => {
- controller.addDecorations(model, 'key', [
- { decoration: 'decorationValue' },
- ]);
+ controller.addDecorations(model, 'key', [{ decoration: 'decorationValue' }]);
controller.dispose();
diff --git a/spec/javascripts/ide/lib/diff/controller_spec.js b/spec/javascripts/ide/lib/diff/controller_spec.js
index c8f3e9f4830..ff73240734e 100644
--- a/spec/javascripts/ide/lib/diff/controller_spec.js
+++ b/spec/javascripts/ide/lib/diff/controller_spec.js
@@ -131,7 +131,7 @@ describe('Multi-file editor library dirty diff controller', () => {
it('adds decorations into decorations controller', () => {
spyOn(controller.decorationsController, 'addDecorations');
- controller.decorate({ data: { changes: [], path: 'path' } });
+ controller.decorate({ data: { changes: [], path: model.path } });
expect(
controller.decorationsController.addDecorations,
@@ -145,7 +145,7 @@ describe('Multi-file editor library dirty diff controller', () => {
);
controller.decorate({
- data: { changes: computeDiff('123', '1234'), path: 'path' },
+ data: { changes: computeDiff('123', '1234'), path: model.path },
});
expect(spy).toHaveBeenCalledWith(
diff --git a/spec/javascripts/ide/stores/actions/file_spec.js b/spec/javascripts/ide/stores/actions/file_spec.js
index 2f4516377cf..479ed7ce49e 100644
--- a/spec/javascripts/ide/stores/actions/file_spec.js
+++ b/spec/javascripts/ide/stores/actions/file_spec.js
@@ -29,7 +29,7 @@ describe('IDE store file actions', () => {
it('closes open files', done => {
store
- .dispatch('closeFile', localFile.path)
+ .dispatch('closeFile', localFile)
.then(() => {
expect(localFile.opened).toBeFalsy();
expect(localFile.active).toBeFalsy();
@@ -44,7 +44,7 @@ describe('IDE store file actions', () => {
store.state.changedFiles.push(localFile);
store
- .dispatch('closeFile', localFile.path)
+ .dispatch('closeFile', localFile)
.then(Vue.nextTick)
.then(() => {
expect(store.state.openFiles.length).toBe(0);
@@ -65,7 +65,7 @@ describe('IDE store file actions', () => {
store.state.entries[f.path] = f;
store
- .dispatch('closeFile', localFile.path)
+ .dispatch('closeFile', localFile)
.then(Vue.nextTick)
.then(() => {
expect(router.push).toHaveBeenCalledWith(`/project${f.url}`);
@@ -74,6 +74,22 @@ describe('IDE store file actions', () => {
})
.catch(done.fail);
});
+
+ it('removes file if it pending', done => {
+ store.state.openFiles.push({
+ ...localFile,
+ pending: true,
+ });
+
+ store
+ .dispatch('closeFile', localFile)
+ .then(() => {
+ expect(store.state.openFiles.length).toBe(0);
+
+ done();
+ })
+ .catch(done.fail);
+ });
});
describe('setFileActive', () => {
@@ -445,4 +461,113 @@ describe('IDE store file actions', () => {
.catch(done.fail);
});
});
+
+ describe('openPendingTab', () => {
+ let f;
+
+ beforeEach(() => {
+ f = {
+ ...file(),
+ projectId: '123',
+ };
+
+ store.state.entries[f.path] = f;
+ });
+
+ it('makes file pending in openFiles', done => {
+ store
+ .dispatch('openPendingTab', f)
+ .then(() => {
+ expect(store.state.openFiles[0].pending).toBe(true);
+ })
+ .then(done)
+ .catch(done.fail);
+ });
+
+ it('returns true when opened', done => {
+ store
+ .dispatch('openPendingTab', f)
+ .then(added => {
+ expect(added).toBe(true);
+ })
+ .then(done)
+ .catch(done.fail);
+ });
+
+ it('pushes router URL when added', done => {
+ store.state.currentBranchId = 'master';
+
+ store
+ .dispatch('openPendingTab', f)
+ .then(() => {
+ expect(router.push).toHaveBeenCalledWith('/project/123/tree/master/');
+ })
+ .then(done)
+ .catch(done.fail);
+ });
+
+ it('calls scrollToTab', done => {
+ const scrollToTabSpy = jasmine.createSpy('scrollToTab');
+ const oldScrollToTab = store._actions.scrollToTab; // eslint-disable-line
+ store._actions.scrollToTab = [scrollToTabSpy]; // eslint-disable-line
+
+ store
+ .dispatch('openPendingTab', f)
+ .then(() => {
+ expect(scrollToTabSpy).toHaveBeenCalled();
+ store._actions.scrollToTab = oldScrollToTab; // eslint-disable-line
+ })
+ .then(done)
+ .catch(done.fail);
+ });
+
+ it('returns false when passed in file is active & viewer is diff', done => {
+ f.active = true;
+ store.state.openFiles.push(f);
+ store.state.viewer = 'diff';
+
+ store
+ .dispatch('openPendingTab', f)
+ .then(added => {
+ expect(added).toBe(false);
+ })
+ .then(done)
+ .catch(done.fail);
+ });
+ });
+
+ describe('removePendingTab', () => {
+ let f;
+
+ beforeEach(() => {
+ spyOn(eventHub, '$emit');
+
+ f = {
+ ...file('pendingFile'),
+ pending: true,
+ };
+ });
+
+ it('removes pending file from open files', done => {
+ store.state.openFiles.push(f);
+
+ store
+ .dispatch('removePendingTab', f)
+ .then(() => {
+ expect(store.state.openFiles.length).toBe(0);
+ })
+ .then(done)
+ .catch(done.fail);
+ });
+
+ it('emits event to dispose model', done => {
+ store
+ .dispatch('removePendingTab', f)
+ .then(() => {
+ expect(eventHub.$emit).toHaveBeenCalledWith(`editor.update.model.dispose.${f.key}`);
+ })
+ .then(done)
+ .catch(done.fail);
+ });
+ });
});
diff --git a/spec/javascripts/ide/stores/mutations/file_spec.js b/spec/javascripts/ide/stores/mutations/file_spec.js
index 8fec94e882a..88285ee409f 100644
--- a/spec/javascripts/ide/stores/mutations/file_spec.js
+++ b/spec/javascripts/ide/stores/mutations/file_spec.js
@@ -22,6 +22,21 @@ describe('IDE store file mutations', () => {
expect(localFile.active).toBeTruthy();
});
+
+ it('sets pending tab as not active', () => {
+ localState.openFiles.push({
+ ...localFile,
+ pending: true,
+ active: true,
+ });
+
+ mutations.SET_FILE_ACTIVE(localState, {
+ path: localFile.path,
+ active: true,
+ });
+
+ expect(localState.openFiles[0].active).toBe(false);
+ });
});
describe('TOGGLE_FILE_OPEN', () => {
@@ -178,4 +193,69 @@ describe('IDE store file mutations', () => {
expect(localFile.changed).toBeTruthy();
});
});
+
+ describe('ADD_PENDING_TAB', () => {
+ beforeEach(() => {
+ const f = {
+ ...file('openFile'),
+ path: 'openFile',
+ active: true,
+ opened: true,
+ };
+
+ localState.entries[f.path] = f;
+ localState.openFiles.push(f);
+ });
+
+ it('adds file into openFiles as pending', () => {
+ mutations.ADD_PENDING_TAB(localState, { file: localFile });
+
+ expect(localState.openFiles.length).toBe(2);
+ expect(localState.openFiles[1].pending).toBe(true);
+ expect(localState.openFiles[1].key).toBe(`pending-${localFile.key}`);
+ });
+
+ it('updates open file to pending', () => {
+ mutations.ADD_PENDING_TAB(localState, { file: localState.openFiles[0] });
+
+ expect(localState.openFiles.length).toBe(1);
+ });
+
+ it('updates pending open file to active', () => {
+ localState.openFiles.push({
+ ...localFile,
+ pending: true,
+ });
+
+ mutations.ADD_PENDING_TAB(localState, { file: localFile });
+
+ expect(localState.openFiles[1].pending).toBe(true);
+ expect(localState.openFiles[1].active).toBe(true);
+ });
+
+ it('sets all openFiles to not active', () => {
+ mutations.ADD_PENDING_TAB(localState, { file: localFile });
+
+ expect(localState.openFiles.length).toBe(2);
+
+ localState.openFiles.forEach(f => {
+ if (f.pending) {
+ expect(f.active).toBe(true);
+ } else {
+ expect(f.active).toBe(false);
+ }
+ });
+ });
+ });
+
+ describe('REMOVE_PENDING_TAB', () => {
+ it('removes pending tab from openFiles', () => {
+ localFile.key = 'testing';
+ localState.openFiles.push(localFile);
+
+ mutations.REMOVE_PENDING_TAB(localState, localFile);
+
+ expect(localState.openFiles.length).toBe(0);
+ });
+ });
});
diff --git a/spec/lib/banzai/filter/commit_trailers_filter_spec.rb b/spec/lib/banzai/filter/commit_trailers_filter_spec.rb
new file mode 100644
index 00000000000..1fd145116df
--- /dev/null
+++ b/spec/lib/banzai/filter/commit_trailers_filter_spec.rb
@@ -0,0 +1,171 @@
+require 'spec_helper'
+require 'ffaker'
+
+describe Banzai::Filter::CommitTrailersFilter do
+ include FilterSpecHelper
+ include CommitTrailersSpecHelper
+
+ let(:secondary_email) { create(:email, :confirmed) }
+ let(:user) { create(:user) }
+
+ let(:trailer) { "#{FFaker::Lorem.word}-by:"}
+
+ let(:commit_message) { trailer_line(trailer, user.name, user.email) }
+ let(:commit_message_html) { commit_html(commit_message) }
+
+ context 'detects' do
+ let(:email) { FFaker::Internet.email }
+
+ it 'trailers in the form of *-by and replace users with links' do
+ doc = filter(commit_message_html)
+
+ expect_to_have_user_link_with_avatar(doc, user: user, trailer: trailer)
+ end
+
+ it 'trailers prefixed with whitespaces' do
+ message_html = commit_html("\n\r #{commit_message}")
+
+ doc = filter(message_html)
+
+ expect_to_have_user_link_with_avatar(doc, user: user, trailer: trailer)
+ end
+
+ it 'GitLab users via a secondary email' do
+ _, message_html = build_commit_message(
+ trailer: trailer,
+ name: secondary_email.user.name,
+ email: secondary_email.email
+ )
+
+ doc = filter(message_html)
+
+ expect_to_have_user_link_with_avatar(
+ doc,
+ user: secondary_email.user,
+ trailer: trailer,
+ email: secondary_email.email
+ )
+ end
+
+ it 'non GitLab users and replaces them with mailto links' do
+ _, message_html = build_commit_message(
+ trailer: trailer,
+ name: FFaker::Name.name,
+ email: email
+ )
+
+ doc = filter(message_html)
+
+ expect_to_have_mailto_link(doc, email: email, trailer: trailer)
+ end
+
+ it 'multiple trailers in the same message' do
+ different_trailer = "#{FFaker::Lorem.word}-by:"
+ message = commit_html %(
+ #{commit_message}
+ #{trailer_line(different_trailer, FFaker::Name.name, email)}
+ )
+
+ doc = filter(message)
+
+ expect_to_have_user_link_with_avatar(doc, user: user, trailer: trailer)
+ expect_to_have_mailto_link(doc, email: email, trailer: different_trailer)
+ end
+
+ context 'special names' do
+ where(:name) do
+ [
+ 'John S. Doe',
+ 'L33t H@x0r'
+ ]
+ end
+
+ with_them do
+ it do
+ message, message_html = build_commit_message(
+ trailer: trailer,
+ name: name,
+ email: email
+ )
+
+ doc = filter(message_html)
+
+ expect_to_have_mailto_link(doc, email: email, trailer: trailer)
+ expect(doc.text).to match Regexp.escape(message)
+ end
+ end
+ end
+ end
+
+ context "ignores" do
+ it 'commit messages without trailers' do
+ exp = message = commit_html(FFaker::Lorem.sentence)
+ doc = filter(message)
+
+ expect(doc.to_html).to match Regexp.escape(exp)
+ end
+
+ it 'trailers that are inline the commit message body' do
+ message = commit_html %(
+ #{FFaker::Lorem.sentence} #{commit_message} #{FFaker::Lorem.sentence}
+ )
+
+ doc = filter(message)
+
+ expect(doc.css('a').size).to eq 0
+ end
+ end
+
+ context "structure" do
+ it 'preserves the commit trailer structure' do
+ doc = filter(commit_message_html)
+
+ expect_to_have_user_link_with_avatar(doc, user: user, trailer: trailer)
+ expect(doc.text).to match Regexp.escape(commit_message)
+ end
+
+ it 'preserves the original name used in the commit message' do
+ message, message_html = build_commit_message(
+ trailer: trailer,
+ name: FFaker::Name.name,
+ email: user.email
+ )
+
+ doc = filter(message_html)
+
+ expect_to_have_user_link_with_avatar(doc, user: user, trailer: trailer)
+ expect(doc.text).to match Regexp.escape(message)
+ end
+
+ it 'preserves the original email used in the commit message' do
+ message, message_html = build_commit_message(
+ trailer: trailer,
+ name: secondary_email.user.name,
+ email: secondary_email.email
+ )
+
+ doc = filter(message_html)
+
+ expect_to_have_user_link_with_avatar(
+ doc,
+ user: secondary_email.user,
+ trailer: trailer,
+ email: secondary_email.email
+ )
+ expect(doc.text).to match Regexp.escape(message)
+ end
+
+ it 'only replaces trailer lines not the full commit message' do
+ commit_body = FFaker::Lorem.paragraph
+ message = commit_html %(
+ #{commit_body}
+ #{commit_message}
+ )
+
+ doc = filter(message)
+
+ expect_to_have_user_link_with_avatar(doc, user: user, trailer: trailer)
+ expect(doc.text).to include(commit_body)
+ end
+ end
+end
diff --git a/spec/lib/banzai/filter/merge_request_reference_filter_spec.rb b/spec/lib/banzai/filter/merge_request_reference_filter_spec.rb
index eeb82822f68..a1dd72c498f 100644
--- a/spec/lib/banzai/filter/merge_request_reference_filter_spec.rb
+++ b/spec/lib/banzai/filter/merge_request_reference_filter_spec.rb
@@ -196,6 +196,41 @@ describe Banzai::Filter::MergeRequestReferenceFilter do
end
end
+ context 'URL reference for a commit' do
+ let(:mr) { create(:merge_request, :with_diffs) }
+ let(:reference) do
+ urls.project_merge_request_url(mr.project, mr) + "/diffs?commit_id=#{mr.diff_head_sha}"
+ end
+ let(:commit) { mr.commits.find { |commit| commit.sha == mr.diff_head_sha } }
+
+ it 'links to a valid reference' do
+ doc = reference_filter("See #{reference}")
+
+ expect(doc.css('a').first.attr('href'))
+ .to eq reference
+ end
+
+ it 'has valid text' do
+ doc = reference_filter("See #{reference}")
+
+ expect(doc.text).to eq("See #{mr.to_reference(full: true)} (#{commit.short_id})")
+ end
+
+ it 'has valid title attribute' do
+ doc = reference_filter("See #{reference}")
+
+ expect(doc.css('a').first.attr('title')).to eq(commit.title)
+ end
+
+ it 'ignores invalid commit short_ids on link text' do
+ invalidate_commit_reference =
+ urls.project_merge_request_url(mr.project, mr) + "/diffs?commit_id=12345678"
+ doc = reference_filter("See #{invalidate_commit_reference}")
+
+ expect(doc.text).to eq("See #{mr.to_reference(full: true)} (diffs)")
+ end
+ end
+
context 'cross-project URL reference' do
let(:namespace) { create(:namespace, name: 'cross-reference') }
let(:project2) { create(:project, :public, namespace: namespace) }
diff --git a/spec/lib/gitlab/auth/ldap/access_spec.rb b/spec/lib/gitlab/auth/ldap/access_spec.rb
index 9b3916bf9e3..6b251d824f7 100644
--- a/spec/lib/gitlab/auth/ldap/access_spec.rb
+++ b/spec/lib/gitlab/auth/ldap/access_spec.rb
@@ -1,6 +1,8 @@
require 'spec_helper'
describe Gitlab::Auth::LDAP::Access do
+ include LdapHelpers
+
let(:access) { described_class.new user }
let(:user) { create(:omniauth_user) }
@@ -32,8 +34,10 @@ describe Gitlab::Auth::LDAP::Access do
end
context 'when the user is found' do
+ let(:ldap_user) { Gitlab::Auth::LDAP::Person.new(Net::LDAP::Entry.new, 'ldapmain') }
+
before do
- allow(Gitlab::Auth::LDAP::Person).to receive(:find_by_dn).and_return(:ldap_user)
+ allow(Gitlab::Auth::LDAP::Person).to receive(:find_by_dn).and_return(ldap_user)
end
context 'and the user is disabled via active directory' do
@@ -120,6 +124,22 @@ describe Gitlab::Auth::LDAP::Access do
end
end
end
+
+ context 'when the connection fails' do
+ before do
+ raise_ldap_connection_error
+ end
+
+ it 'does not block the user' do
+ access.allowed?
+
+ expect(user.ldap_blocked?).to be_falsey
+ end
+
+ it 'denies access' do
+ expect(access.allowed?).to be_falsey
+ end
+ end
end
describe '#block_user' do
diff --git a/spec/lib/gitlab/auth/ldap/adapter_spec.rb b/spec/lib/gitlab/auth/ldap/adapter_spec.rb
index 10c60d792bd..3eeaf3862f6 100644
--- a/spec/lib/gitlab/auth/ldap/adapter_spec.rb
+++ b/spec/lib/gitlab/auth/ldap/adapter_spec.rb
@@ -124,16 +124,36 @@ describe Gitlab::Auth::LDAP::Adapter do
context "when the search raises an LDAP exception" do
before do
+ allow(adapter).to receive(:renew_connection_adapter).and_return(ldap)
allow(ldap).to receive(:search) { raise Net::LDAP::Error, "some error" }
allow(Rails.logger).to receive(:warn)
end
- it { is_expected.to eq [] }
+ context 'retries the operation' do
+ before do
+ stub_const("#{described_class}::MAX_SEARCH_RETRIES", 3)
+ end
+
+ it 'as many times as MAX_SEARCH_RETRIES' do
+ expect(ldap).to receive(:search).exactly(3).times
+ expect { subject }.to raise_error(Gitlab::Auth::LDAP::LDAPConnectionError)
+ end
+
+ context 'when no more retries' do
+ before do
+ stub_const("#{described_class}::MAX_SEARCH_RETRIES", 1)
+ end
- it 'logs the error' do
- subject
- expect(Rails.logger).to have_received(:warn).with(
- "LDAP search raised exception Net::LDAP::Error: some error")
+ it 'raises the exception' do
+ expect { subject }.to raise_error(Gitlab::Auth::LDAP::LDAPConnectionError)
+ end
+
+ it 'logs the error' do
+ expect { subject }.to raise_error(Gitlab::Auth::LDAP::LDAPConnectionError)
+ expect(Rails.logger).to have_received(:warn).with(
+ "LDAP search raised exception Net::LDAP::Error: some error")
+ end
+ end
end
end
end
diff --git a/spec/lib/gitlab/auth/o_auth/user_spec.rb b/spec/lib/gitlab/auth/o_auth/user_spec.rb
index 0c71f1d8ca6..64f3d09a25b 100644
--- a/spec/lib/gitlab/auth/o_auth/user_spec.rb
+++ b/spec/lib/gitlab/auth/o_auth/user_spec.rb
@@ -1,6 +1,8 @@
require 'spec_helper'
describe Gitlab::Auth::OAuth::User do
+ include LdapHelpers
+
let(:oauth_user) { described_class.new(auth_hash) }
let(:gl_user) { oauth_user.gl_user }
let(:uid) { 'my-uid' }
@@ -38,10 +40,6 @@ describe Gitlab::Auth::OAuth::User do
end
describe '#save' do
- def stub_ldap_config(messages)
- allow(Gitlab::Auth::LDAP::Config).to receive_messages(messages)
- end
-
let(:provider) { 'twitter' }
describe 'when account exists on server' do
@@ -269,20 +267,47 @@ describe Gitlab::Auth::OAuth::User do
end
context 'when an LDAP person is not found by uid' do
- it 'tries to find an LDAP person by DN and adds the omniauth identity to the user' do
+ it 'tries to find an LDAP person by email and adds the omniauth identity to the user' do
allow(Gitlab::Auth::LDAP::Person).to receive(:find_by_uid).and_return(nil)
- allow(Gitlab::Auth::LDAP::Person).to receive(:find_by_dn).and_return(ldap_user)
+ allow(Gitlab::Auth::LDAP::Person).to receive(:find_by_email).and_return(ldap_user)
+
+ oauth_user.save
+
+ identities_as_hash = gl_user.identities.map { |id| { provider: id.provider, extern_uid: id.extern_uid } }
+ expect(identities_as_hash).to match_array(result_identities(dn, uid))
+ end
+
+ context 'when also not found by email' do
+ it 'tries to find an LDAP person by DN and adds the omniauth identity to the user' do
+ allow(Gitlab::Auth::LDAP::Person).to receive(:find_by_uid).and_return(nil)
+ allow(Gitlab::Auth::LDAP::Person).to receive(:find_by_email).and_return(nil)
+ allow(Gitlab::Auth::LDAP::Person).to receive(:find_by_dn).and_return(ldap_user)
+
+ oauth_user.save
+
+ identities_as_hash = gl_user.identities.map { |id| { provider: id.provider, extern_uid: id.extern_uid } }
+ expect(identities_as_hash).to match_array(result_identities(dn, uid))
+ end
+ end
+ end
+ def result_identities(dn, uid)
+ [
+ { provider: 'ldapmain', extern_uid: dn },
+ { provider: 'twitter', extern_uid: uid }
+ ]
+ end
+
+ context 'when there is an LDAP connection error' do
+ before do
+ raise_ldap_connection_error
+ end
+
+ it 'does not save the identity' do
oauth_user.save
identities_as_hash = gl_user.identities.map { |id| { provider: id.provider, extern_uid: id.extern_uid } }
- expect(identities_as_hash)
- .to match_array(
- [
- { provider: 'ldapmain', extern_uid: dn },
- { provider: 'twitter', extern_uid: uid }
- ]
- )
+ expect(identities_as_hash).to match_array([{ provider: 'twitter', extern_uid: uid }])
end
end
end
@@ -739,4 +764,19 @@ describe Gitlab::Auth::OAuth::User do
expect(oauth_user.find_user).to eql gl_user
end
end
+
+ describe '#find_ldap_person' do
+ context 'when LDAP connection fails' do
+ before do
+ raise_ldap_connection_error
+ end
+
+ it 'returns nil' do
+ adapter = Gitlab::Auth::LDAP::Adapter.new('ldapmain')
+ hash = OmniAuth::AuthHash.new(uid: 'whatever', provider: 'ldapmain')
+
+ expect(oauth_user.send(:find_ldap_person, hash, adapter)).to be_nil
+ end
+ end
+ end
end
diff --git a/spec/lib/gitlab/bitbucket_import/importer_spec.rb b/spec/lib/gitlab/bitbucket_import/importer_spec.rb
index a6a1d9e619f..c63120b0b29 100644
--- a/spec/lib/gitlab/bitbucket_import/importer_spec.rb
+++ b/spec/lib/gitlab/bitbucket_import/importer_spec.rb
@@ -137,7 +137,7 @@ describe Gitlab::BitbucketImport::Importer do
it 'imports to the project disk_path' do
expect(project.wiki).to receive(:repository_exists?) { false }
expect(importer.gitlab_shell).to receive(:import_repository).with(
- project.repository_storage_path,
+ project.repository_storage,
project.wiki.disk_path,
project.import_url + '/wiki'
)
diff --git a/spec/lib/gitlab/git/gitlab_projects_spec.rb b/spec/lib/gitlab/git/gitlab_projects_spec.rb
index dfccc15a4f3..8b715d717c1 100644
--- a/spec/lib/gitlab/git/gitlab_projects_spec.rb
+++ b/spec/lib/gitlab/git/gitlab_projects_spec.rb
@@ -16,7 +16,7 @@ describe Gitlab::Git::GitlabProjects do
let(:tmp_repos_path) { TestEnv.repos_path }
let(:repo_name) { project.disk_path + '.git' }
let(:tmp_repo_path) { File.join(tmp_repos_path, repo_name) }
- let(:gl_projects) { build_gitlab_projects(tmp_repos_path, repo_name) }
+ let(:gl_projects) { build_gitlab_projects(TestEnv::REPOS_STORAGE, repo_name) }
describe '#initialize' do
it { expect(gl_projects.shard_path).to eq(tmp_repos_path) }
@@ -223,11 +223,12 @@ describe Gitlab::Git::GitlabProjects do
end
describe '#fork_repository' do
+ let(:dest_repos) { TestEnv::REPOS_STORAGE }
let(:dest_repos_path) { tmp_repos_path }
let(:dest_repo_name) { File.join('@hashed', 'aa', 'bb', 'xyz.git') }
let(:dest_repo) { File.join(dest_repos_path, dest_repo_name) }
- subject { gl_projects.fork_repository(dest_repos_path, dest_repo_name) }
+ subject { gl_projects.fork_repository(dest_repos, dest_repo_name) }
before do
FileUtils.mkdir_p(dest_repos_path)
@@ -268,7 +269,12 @@ describe Gitlab::Git::GitlabProjects do
# that is not very straight-forward so I'm leaving this test here for now till
# https://gitlab.com/gitlab-org/gitlab-ce/issues/41393 is fixed.
context 'different storages' do
- let(:dest_repos_path) { File.join(File.dirname(tmp_repos_path), 'alternative') }
+ let(:dest_repos) { 'alternative' }
+ let(:dest_repos_path) { File.join(File.dirname(tmp_repos_path), dest_repos) }
+
+ before do
+ stub_storage_settings(dest_repos => { 'path' => dest_repos_path })
+ end
it 'forks the repo' do
is_expected.to be_truthy
diff --git a/spec/lib/gitlab/github_import/importer/repository_importer_spec.rb b/spec/lib/gitlab/github_import/importer/repository_importer_spec.rb
index 1f0f1fdd7da..879b1d9fb0f 100644
--- a/spec/lib/gitlab/github_import/importer/repository_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/repository_importer_spec.rb
@@ -9,7 +9,7 @@ describe Gitlab::GithubImport::Importer::RepositoryImporter do
:project,
import_url: 'foo.git',
import_source: 'foo/bar',
- repository_storage_path: 'foo',
+ repository_storage: 'foo',
disk_path: 'foo',
repository: repository,
create_wiki: true
diff --git a/spec/lib/gitlab/shell_spec.rb b/spec/lib/gitlab/shell_spec.rb
index ea5ce58e34b..7ff2c0639ec 100644
--- a/spec/lib/gitlab/shell_spec.rb
+++ b/spec/lib/gitlab/shell_spec.rb
@@ -14,7 +14,7 @@ describe Gitlab::Shell do
allow(Project).to receive(:find).and_return(project)
allow(gitlab_shell).to receive(:gitlab_projects)
- .with(project.repository_storage_path, project.disk_path + '.git')
+ .with(project.repository_storage, project.disk_path + '.git')
.and_return(gitlab_projects)
end
@@ -487,21 +487,21 @@ describe Gitlab::Shell do
describe '#fork_repository' do
subject do
gitlab_shell.fork_repository(
- project.repository_storage_path,
+ project.repository_storage,
project.disk_path,
- 'new/storage',
+ 'nfs-file05',
'fork/path'
)
end
it 'returns true when the command succeeds' do
- expect(gitlab_projects).to receive(:fork_repository).with('new/storage', 'fork/path.git') { true }
+ expect(gitlab_projects).to receive(:fork_repository).with('nfs-file05', 'fork/path.git') { true }
is_expected.to be_truthy
end
it 'return false when the command fails' do
- expect(gitlab_projects).to receive(:fork_repository).with('new/storage', 'fork/path.git') { false }
+ expect(gitlab_projects).to receive(:fork_repository).with('nfs-file05', 'fork/path.git') { false }
is_expected.to be_falsy
end
@@ -661,7 +661,7 @@ describe Gitlab::Shell do
it 'returns true when the command succeeds' do
expect(gitlab_projects).to receive(:import_project).with(import_url, timeout) { true }
- result = gitlab_shell.import_repository(project.repository_storage_path, project.disk_path, import_url)
+ result = gitlab_shell.import_repository(project.repository_storage, project.disk_path, import_url)
expect(result).to be_truthy
end
@@ -671,7 +671,7 @@ describe Gitlab::Shell do
expect(gitlab_projects).to receive(:import_project) { false }
expect do
- gitlab_shell.import_repository(project.repository_storage_path, project.disk_path, import_url)
+ gitlab_shell.import_repository(project.repository_storage, project.disk_path, import_url)
end.to raise_error(Gitlab::Shell::Error, "error")
end
end
diff --git a/spec/lib/gitlab/sidekiq_logging/json_formatter_spec.rb b/spec/lib/gitlab/sidekiq_logging/json_formatter_spec.rb
new file mode 100644
index 00000000000..fed9aeba30c
--- /dev/null
+++ b/spec/lib/gitlab/sidekiq_logging/json_formatter_spec.rb
@@ -0,0 +1,31 @@
+require 'spec_helper'
+
+describe Gitlab::SidekiqLogging::JSONFormatter do
+ let(:hash_input) { { foo: 1, bar: 'test' } }
+ let(:message) { 'This is a test' }
+ let(:timestamp) { Time.now }
+
+ it 'wraps a Hash' do
+ result = subject.call('INFO', timestamp, 'my program', hash_input)
+
+ data = JSON.parse(result)
+ expected_output = hash_input.stringify_keys
+ expected_output['severity'] = 'INFO'
+ expected_output['time'] = timestamp.utc.iso8601(3)
+
+ expect(data).to eq(expected_output)
+ end
+
+ it 'wraps a String' do
+ result = subject.call('DEBUG', timestamp, 'my string', message)
+
+ data = JSON.parse(result)
+ expected_output = {
+ severity: 'DEBUG',
+ time: timestamp.utc.iso8601(3),
+ message: message
+ }
+
+ expect(data).to eq(expected_output.stringify_keys)
+ end
+end
diff --git a/spec/lib/gitlab/sidekiq_logging/structured_logger_spec.rb b/spec/lib/gitlab/sidekiq_logging/structured_logger_spec.rb
new file mode 100644
index 00000000000..2421b1e5a1a
--- /dev/null
+++ b/spec/lib/gitlab/sidekiq_logging/structured_logger_spec.rb
@@ -0,0 +1,101 @@
+require 'spec_helper'
+
+describe Gitlab::SidekiqLogging::StructuredLogger do
+ describe '#call' do
+ let(:timestamp) { Time.new('2018-01-01 12:00:00').utc }
+ let(:job) do
+ {
+ "class" => "TestWorker",
+ "args" => [1234, 'hello'],
+ "retry" => false,
+ "queue" => "cronjob:test_queue",
+ "queue_namespace" => "cronjob",
+ "jid" => "da883554ee4fe414012f5f42",
+ "created_at" => timestamp.to_f,
+ "enqueued_at" => timestamp.to_f
+ }
+ end
+ let(:logger) { double() }
+ let(:start_payload) do
+ job.merge(
+ 'message' => 'TestWorker JID-da883554ee4fe414012f5f42: start',
+ 'job_status' => 'start',
+ 'pid' => Process.pid,
+ 'created_at' => timestamp.iso8601(3),
+ 'enqueued_at' => timestamp.iso8601(3)
+ )
+ end
+ let(:end_payload) do
+ start_payload.merge(
+ 'message' => 'TestWorker JID-da883554ee4fe414012f5f42: done: 0.0 sec',
+ 'job_status' => 'done',
+ 'duration' => 0.0,
+ "completed_at" => timestamp.iso8601(3)
+ )
+ end
+ let(:exception_payload) do
+ end_payload.merge(
+ 'message' => 'TestWorker JID-da883554ee4fe414012f5f42: fail: 0.0 sec',
+ 'job_status' => 'fail',
+ 'error' => ArgumentError,
+ 'error_message' => 'some exception'
+ )
+ end
+
+ before do
+ allow(Sidekiq).to receive(:logger).and_return(logger)
+
+ allow(subject).to receive(:current_time).and_return(timestamp.to_f)
+ end
+
+ subject { described_class.new }
+
+ context 'with SIDEKIQ_LOG_ARGUMENTS enabled' do
+ before do
+ stub_env('SIDEKIQ_LOG_ARGUMENTS', '1')
+ end
+
+ it 'logs start and end of job' do
+ Timecop.freeze(timestamp) do
+ expect(logger).to receive(:info).with(start_payload).ordered
+ expect(logger).to receive(:info).with(end_payload).ordered
+ expect(subject).to receive(:log_job_start).and_call_original
+ expect(subject).to receive(:log_job_done).and_call_original
+
+ subject.call(job, 'test_queue') { }
+ end
+ end
+
+ it 'logs an exception in job' do
+ Timecop.freeze(timestamp) do
+ expect(logger).to receive(:info).with(start_payload)
+ # This excludes the exception_backtrace
+ expect(logger).to receive(:warn).with(hash_including(exception_payload))
+ expect(subject).to receive(:log_job_start).and_call_original
+ expect(subject).to receive(:log_job_done).and_call_original
+
+ expect do
+ subject.call(job, 'test_queue') do
+ raise ArgumentError, 'some exception'
+ end
+ end.to raise_error(ArgumentError)
+ end
+ end
+ end
+
+ context 'with SIDEKIQ_LOG_ARGUMENTS disabled' do
+ it 'logs start and end of job' do
+ Timecop.freeze(timestamp) do
+ start_payload.delete('args')
+
+ expect(logger).to receive(:info).with(start_payload).ordered
+ expect(logger).to receive(:info).with(end_payload).ordered
+ expect(subject).to receive(:log_job_start).and_call_original
+ expect(subject).to receive(:log_job_done).and_call_original
+
+ subject.call(job, 'test_queue') { }
+ end
+ end
+ end
+ end
+end
diff --git a/spec/models/commit_status_spec.rb b/spec/models/commit_status_spec.rb
index b7ed8be69fc..c536dab2681 100644
--- a/spec/models/commit_status_spec.rb
+++ b/spec/models/commit_status_spec.rb
@@ -368,9 +368,7 @@ describe CommitStatus do
'rspec:windows 0 : / 1' => 'rspec:windows',
'rspec:windows 0 : / 1 name' => 'rspec:windows name',
'0 1 name ruby' => 'name ruby',
- '0 :/ 1 name ruby' => 'name ruby',
- 'golang test 1.8' => 'golang test',
- '1.9 golang test' => 'golang test'
+ '0 :/ 1 name ruby' => 'name ruby'
}
tests.each do |name, group_name|
diff --git a/spec/models/concerns/issuable_spec.rb b/spec/models/concerns/issuable_spec.rb
index f8874d14e3f..05693f067e1 100644
--- a/spec/models/concerns/issuable_spec.rb
+++ b/spec/models/concerns/issuable_spec.rb
@@ -176,7 +176,7 @@ describe Issuable do
end
end
- describe "#sort" do
+ describe "#sort_by_attribute" do
let(:project) { create(:project) }
context "by milestone due date" do
@@ -193,12 +193,12 @@ describe Issuable do
let!(:issue3) { create(:issue, project: project) }
it "sorts desc" do
- issues = project.issues.sort('milestone_due_desc')
+ issues = project.issues.sort_by_attribute('milestone_due_desc')
expect(issues).to match_array([issue2, issue1, issue, issue3])
end
it "sorts asc" do
- issues = project.issues.sort('milestone_due_asc')
+ issues = project.issues.sort_by_attribute('milestone_due_asc')
expect(issues).to match_array([issue1, issue2, issue, issue3])
end
end
@@ -210,7 +210,7 @@ describe Issuable do
it 'has no duplicates across pages' do
sorted_issue_ids = 1.upto(10).map do |i|
- project.issues.sort('milestone_due_desc').page(i).per(1).first.id
+ project.issues.sort_by_attribute('milestone_due_desc').page(i).per(1).first.id
end
expect(sorted_issue_ids).to eq(sorted_issue_ids.uniq)
diff --git a/spec/models/project_spec.rb b/spec/models/project_spec.rb
index fef868ac0f2..0e560be9eaa 100644
--- a/spec/models/project_spec.rb
+++ b/spec/models/project_spec.rb
@@ -1645,7 +1645,7 @@ describe Project do
before do
allow_any_instance_of(Gitlab::Shell).to receive(:import_repository)
- .with(project.repository_storage_path, project.disk_path, project.import_url)
+ .with(project.repository_storage, project.disk_path, project.import_url)
.and_return(true)
expect_any_instance_of(Repository).to receive(:after_import)
@@ -1798,10 +1798,7 @@ describe Project do
let(:project) { forked_project_link.forked_to_project }
it 'schedules a RepositoryForkWorker job' do
- expect(RepositoryForkWorker).to receive(:perform_async).with(
- project.id,
- forked_from_project.repository_storage_path,
- forked_from_project.disk_path).and_return(import_jid)
+ expect(RepositoryForkWorker).to receive(:perform_async).with(project.id).and_return(import_jid)
expect(project.add_import_job).to eq(import_jid)
end
diff --git a/spec/models/user_spec.rb b/spec/models/user_spec.rb
index 100418da804..4027c420e47 100644
--- a/spec/models/user_spec.rb
+++ b/spec/models/user_spec.rb
@@ -1451,7 +1451,7 @@ describe User do
end
end
- describe '#sort' do
+ describe '#sort_by_attribute' do
before do
described_class.delete_all
@user = create :user, created_at: Date.today, current_sign_in_at: Date.today, name: 'Alpha'
@@ -1460,7 +1460,7 @@ describe User do
end
context 'when sort by recent_sign_in' do
- let(:users) { described_class.sort('recent_sign_in') }
+ let(:users) { described_class.sort_by_attribute('recent_sign_in') }
it 'sorts users by recent sign-in time' do
expect(users.first).to eq(@user)
@@ -1473,7 +1473,7 @@ describe User do
end
context 'when sort by oldest_sign_in' do
- let(:users) { described_class.sort('oldest_sign_in') }
+ let(:users) { described_class.sort_by_attribute('oldest_sign_in') }
it 'sorts users by the oldest sign-in time' do
expect(users.first).to eq(@user1)
@@ -1486,15 +1486,15 @@ describe User do
end
it 'sorts users in descending order by their creation time' do
- expect(described_class.sort('created_desc').first).to eq(@user)
+ expect(described_class.sort_by_attribute('created_desc').first).to eq(@user)
end
it 'sorts users in ascending order by their creation time' do
- expect(described_class.sort('created_asc').first).to eq(@user2)
+ expect(described_class.sort_by_attribute('created_asc').first).to eq(@user2)
end
it 'sorts users by id in descending order when nil is passed' do
- expect(described_class.sort(nil).first).to eq(@user2)
+ expect(described_class.sort_by_attribute(nil).first).to eq(@user2)
end
end
diff --git a/spec/requests/api/boards_spec.rb b/spec/requests/api/boards_spec.rb
index c6c10025f7f..92b614b087e 100644
--- a/spec/requests/api/boards_spec.rb
+++ b/spec/requests/api/boards_spec.rb
@@ -48,5 +48,36 @@ describe API::Boards do
expect(json_response['label']['name']).to eq(group_label.title)
expect(json_response['position']).to eq(3)
end
+
+ it 'creates a new board list for ancestor group labels' do
+ group = create(:group)
+ sub_group = create(:group, parent: group)
+ group_label = create(:group_label, group: group)
+ board_parent.update(group: sub_group)
+ group.add_developer(user)
+ sub_group.add_developer(user)
+
+ post api(url, user), label_id: group_label.id
+
+ expect(response).to have_gitlab_http_status(201)
+ expect(json_response['label']['name']).to eq(group_label.title)
+ end
+ end
+
+ describe "POST /groups/:id/boards/lists", :nested_groups do
+ set(:group) { create(:group) }
+ set(:board_parent) { create(:group, parent: group ) }
+ let(:url) { "/groups/#{board_parent.id}/boards/#{board.id}/lists" }
+ set(:board) { create(:board, group: board_parent) }
+
+ it 'creates a new board list for ancestor group labels' do
+ group.add_developer(user)
+ group_label = create(:group_label, group: group)
+
+ post api(url, user), label_id: group_label.id
+
+ expect(response).to have_gitlab_http_status(201)
+ expect(json_response['label']['name']).to eq(group_label.title)
+ end
end
end
diff --git a/spec/requests/api/features_spec.rb b/spec/requests/api/features_spec.rb
index 267058d98ee..c5354c2d639 100644
--- a/spec/requests/api/features_spec.rb
+++ b/spec/requests/api/features_spec.rb
@@ -1,8 +1,8 @@
require 'spec_helper'
describe API::Features do
- let(:user) { create(:user) }
- let(:admin) { create(:admin) }
+ set(:user) { create(:user) }
+ set(:admin) { create(:admin) }
before do
Flipper.unregister_groups
@@ -249,4 +249,43 @@ describe API::Features do
end
end
end
+
+ describe 'DELETE /feature/:name' do
+ let(:feature_name) { 'my_feature' }
+
+ context 'when the user has no access' do
+ it 'returns a 401 for anonymous users' do
+ delete api("/features/#{feature_name}")
+
+ expect(response).to have_gitlab_http_status(401)
+ end
+
+ it 'returns a 403 for users' do
+ delete api("/features/#{feature_name}", user)
+
+ expect(response).to have_gitlab_http_status(403)
+ end
+ end
+
+ context 'when the user has access' do
+ it 'returns 204 when the value is not set' do
+ delete api("/features/#{feature_name}", admin)
+
+ expect(response).to have_gitlab_http_status(204)
+ end
+
+ context 'when the gate value was set' do
+ before do
+ Feature.get(feature_name).enable
+ end
+
+ it 'deletes an enabled feature' do
+ delete api("/features/#{feature_name}", admin)
+
+ expect(response).to have_gitlab_http_status(204)
+ expect(Feature.get(feature_name)).not_to be_enabled
+ end
+ end
+ end
+ end
end
diff --git a/spec/requests/api/projects_spec.rb b/spec/requests/api/projects_spec.rb
index d73a42f48ad..2ec29a79e93 100644
--- a/spec/requests/api/projects_spec.rb
+++ b/spec/requests/api/projects_spec.rb
@@ -452,7 +452,8 @@ describe API::Projects do
only_allow_merge_if_pipeline_succeeds: false,
request_access_enabled: true,
only_allow_merge_if_all_discussions_are_resolved: false,
- ci_config_path: 'a/custom/path'
+ ci_config_path: 'a/custom/path',
+ merge_method: 'ff'
})
post api('/projects', user), project
@@ -569,6 +570,22 @@ describe API::Projects do
expect(json_response['only_allow_merge_if_all_discussions_are_resolved']).to be_truthy
end
+ it 'sets the merge method of a project to rebase merge' do
+ project = attributes_for(:project, merge_method: 'rebase_merge')
+
+ post api('/projects', user), project
+
+ expect(json_response['merge_method']).to eq('rebase_merge')
+ end
+
+ it 'rejects invalid values for merge_method' do
+ project = attributes_for(:project, merge_method: 'totally_not_valid_method')
+
+ post api('/projects', user), project
+
+ expect(response).to have_gitlab_http_status(400)
+ end
+
it 'ignores import_url when it is nil' do
project = attributes_for(:project, import_url: nil)
@@ -823,6 +840,7 @@ describe API::Projects do
expect(json_response['shared_with_groups'][0]['group_access_level']).to eq(link.group_access)
expect(json_response['only_allow_merge_if_pipeline_succeeds']).to eq(project.only_allow_merge_if_pipeline_succeeds)
expect(json_response['only_allow_merge_if_all_discussions_are_resolved']).to eq(project.only_allow_merge_if_all_discussions_are_resolved)
+ expect(json_response['merge_method']).to eq(project.merge_method.to_s)
end
it 'returns a project by path name' do
@@ -1474,6 +1492,26 @@ describe API::Projects do
expect(json_response[k.to_s]).to eq(v)
end
end
+
+ it 'updates merge_method' do
+ project_param = { merge_method: 'ff' }
+
+ put api("/projects/#{project3.id}", user), project_param
+
+ expect(response).to have_gitlab_http_status(200)
+
+ project_param.each_pair do |k, v|
+ expect(json_response[k.to_s]).to eq(v)
+ end
+ end
+
+ it 'rejects to update merge_method when merge_method is invalid' do
+ project_param = { merge_method: 'invalid' }
+
+ put api("/projects/#{project3.id}", user), project_param
+
+ expect(response).to have_gitlab_http_status(400)
+ end
end
context 'when authenticated as project master' do
@@ -1491,6 +1529,7 @@ describe API::Projects do
wiki_enabled: true,
snippets_enabled: true,
merge_requests_enabled: true,
+ merge_method: 'ff',
description: 'new description' }
put api("/projects/#{project3.id}", user4), project_param
diff --git a/spec/requests/projects/cycle_analytics_events_spec.rb b/spec/requests/projects/cycle_analytics_events_spec.rb
index eef860821e5..bcc3e3a2678 100644
--- a/spec/requests/projects/cycle_analytics_events_spec.rb
+++ b/spec/requests/projects/cycle_analytics_events_spec.rb
@@ -23,7 +23,7 @@ describe 'cycle analytics events' do
it 'lists the issue events' do
get project_cycle_analytics_issue_path(project, format: :json)
- first_issue_iid = project.issues.sort(:created_desc).pluck(:iid).first.to_s
+ first_issue_iid = project.issues.sort_by_attribute(:created_desc).pluck(:iid).first.to_s
expect(json_response['events']).not_to be_empty
expect(json_response['events'].first['iid']).to eq(first_issue_iid)
@@ -32,7 +32,7 @@ describe 'cycle analytics events' do
it 'lists the plan events' do
get project_cycle_analytics_plan_path(project, format: :json)
- first_mr_short_sha = project.merge_requests.sort(:created_asc).first.commits.first.short_id
+ first_mr_short_sha = project.merge_requests.sort_by_attribute(:created_asc).first.commits.first.short_id
expect(json_response['events']).not_to be_empty
expect(json_response['events'].first['short_sha']).to eq(first_mr_short_sha)
@@ -43,7 +43,7 @@ describe 'cycle analytics events' do
expect(json_response['events']).not_to be_empty
- first_mr_iid = project.merge_requests.sort(:created_desc).pluck(:iid).first.to_s
+ first_mr_iid = project.merge_requests.sort_by_attribute(:created_desc).pluck(:iid).first.to_s
expect(json_response['events'].first['iid']).to eq(first_mr_iid)
end
@@ -58,7 +58,7 @@ describe 'cycle analytics events' do
it 'lists the review events' do
get project_cycle_analytics_review_path(project, format: :json)
- first_mr_iid = project.merge_requests.sort(:created_desc).pluck(:iid).first.to_s
+ first_mr_iid = project.merge_requests.sort_by_attribute(:created_desc).pluck(:iid).first.to_s
expect(json_response['events']).not_to be_empty
expect(json_response['events'].first['iid']).to eq(first_mr_iid)
@@ -74,7 +74,7 @@ describe 'cycle analytics events' do
it 'lists the production events' do
get project_cycle_analytics_production_path(project, format: :json)
- first_issue_iid = project.issues.sort(:created_desc).pluck(:iid).first.to_s
+ first_issue_iid = project.issues.sort_by_attribute(:created_desc).pluck(:iid).first.to_s
expect(json_response['events']).not_to be_empty
expect(json_response['events'].first['iid']).to eq(first_issue_iid)
diff --git a/spec/spec_helper.rb b/spec/spec_helper.rb
index e8cecf361ff..beabba99cf5 100644
--- a/spec/spec_helper.rb
+++ b/spec/spec_helper.rb
@@ -108,7 +108,8 @@ RSpec.configure do |config|
allow_any_instance_of(Gitlab::Git::GitlabProjects).to receive(:fork_repository).and_wrap_original do |m, *args|
m.call(*args)
- shard_path, repository_relative_path = args
+ shard_name, repository_relative_path = args
+ shard_path = Gitlab.config.repositories.storages.fetch(shard_name).legacy_disk_path
# We can't leave the hooks in place after a fork, as those would fail in tests
# The "internal" API is not available
FileUtils.rm_rf(File.join(shard_path, repository_relative_path, 'hooks'))
diff --git a/spec/support/commit_trailers_spec_helper.rb b/spec/support/commit_trailers_spec_helper.rb
new file mode 100644
index 00000000000..add359946db
--- /dev/null
+++ b/spec/support/commit_trailers_spec_helper.rb
@@ -0,0 +1,41 @@
+module CommitTrailersSpecHelper
+ extend ActiveSupport::Concern
+
+ def expect_to_have_user_link_with_avatar(doc, user:, trailer:, email: nil)
+ wrapper = find_user_wrapper(doc, trailer)
+
+ expect_to_have_links_with_url_and_avatar(wrapper, urls.user_url(user), email || user.email)
+ expect(wrapper.attribute('data-user').value).to eq user.id.to_s
+ end
+
+ def expect_to_have_mailto_link(doc, email:, trailer:)
+ wrapper = find_user_wrapper(doc, trailer)
+
+ expect_to_have_links_with_url_and_avatar(wrapper, "mailto:#{CGI.escape_html(email)}", email)
+ end
+
+ def expect_to_have_links_with_url_and_avatar(doc, url, email)
+ expect(doc).not_to be_nil
+ expect(doc.xpath("a[position()<3 and @href='#{url}']").size).to eq 2
+ expect(doc.xpath("a[position()=3 and @href='mailto:#{CGI.escape_html(email)}']").size).to eq 1
+ expect(doc.css('img').size).to eq 1
+ end
+
+ def find_user_wrapper(doc, trailer)
+ doc.xpath("descendant-or-self::node()[@data-trailer='#{trailer}']").first
+ end
+
+ def build_commit_message(trailer:, name:, email:)
+ message = trailer_line(trailer, name, email)
+
+ [message, commit_html(message)]
+ end
+
+ def trailer_line(trailer, name, email)
+ "#{trailer} #{name} <#{email}>"
+ end
+
+ def commit_html(message)
+ "<pre>#{CGI.escape_html(message)}</pre>"
+ end
+end
diff --git a/spec/support/filtered_search_helpers.rb b/spec/support/filtered_search_helpers.rb
index f3f96bd1f0a..5f42ff77fb2 100644
--- a/spec/support/filtered_search_helpers.rb
+++ b/spec/support/filtered_search_helpers.rb
@@ -21,6 +21,29 @@ module FilteredSearchHelpers
end
end
+ # Select a label clicking in the search dropdown instead
+ # of entering label names on the input.
+ def select_label_on_dropdown(label_title)
+ input_filtered_search("label:", submit: false)
+
+ within('#js-dropdown-label') do
+ wait_for_requests
+
+ find('li', text: label_title).click
+ end
+
+ filtered_search.send_keys(:enter)
+ end
+
+ def expect_issues_list_count(open_count, closed_count = 0)
+ all_count = open_count + closed_count
+
+ expect(page).to have_issuable_counts(open: open_count, closed: closed_count, all: all_count)
+ page.within '.issues-list' do
+ expect(page).to have_selector('.issue', count: open_count)
+ end
+ end
+
# Enables input to be added character by character
def input_filtered_search_keys(search_term)
# Add an extra space to engage visual tokens
diff --git a/spec/support/ldap_helpers.rb b/spec/support/ldap_helpers.rb
index 081ce0ad7b7..0e87b3d359d 100644
--- a/spec/support/ldap_helpers.rb
+++ b/spec/support/ldap_helpers.rb
@@ -41,4 +41,9 @@ module LdapHelpers
entry
end
+
+ def raise_ldap_connection_error
+ allow_any_instance_of(Gitlab::Auth::LDAP::Adapter)
+ .to receive(:ldap_search).and_raise(Gitlab::Auth::LDAP::LDAPConnectionError)
+ end
end
diff --git a/spec/support/test_env.rb b/spec/support/test_env.rb
index f14e69b1041..d87f265cdf0 100644
--- a/spec/support/test_env.rb
+++ b/spec/support/test_env.rb
@@ -62,6 +62,7 @@ module TestEnv
}.freeze
TMP_TEST_PATH = Rails.root.join('tmp', 'tests', '**')
+ REPOS_STORAGE = 'default'.freeze
# Test environment
#
@@ -225,7 +226,7 @@ module TestEnv
end
def repos_path
- Gitlab.config.repositories.storages.default.legacy_disk_path
+ Gitlab.config.repositories.storages[REPOS_STORAGE].legacy_disk_path
end
def backup_path
diff --git a/spec/workers/repository_fork_worker_spec.rb b/spec/workers/repository_fork_worker_spec.rb
index 6c66658d8c3..4b3c1736ea0 100644
--- a/spec/workers/repository_fork_worker_spec.rb
+++ b/spec/workers/repository_fork_worker_spec.rb
@@ -9,70 +9,91 @@ describe RepositoryForkWorker do
describe "#perform" do
let(:project) { create(:project, :repository) }
- let(:fork_project) { create(:project, :repository, :import_scheduled, forked_from_project: project) }
let(:shell) { Gitlab::Shell.new }
+ let(:fork_project) { create(:project, :repository, :import_scheduled, forked_from_project: project) }
- before do
- allow(subject).to receive(:gitlab_shell).and_return(shell)
- end
+ shared_examples 'RepositoryForkWorker performing' do
+ before do
+ allow(subject).to receive(:gitlab_shell).and_return(shell)
+ end
- def perform!
- subject.perform(fork_project.id, '/test/path', project.disk_path)
- end
+ def expect_fork_repository
+ expect(shell).to receive(:fork_repository).with(
+ 'default',
+ project.disk_path,
+ fork_project.repository_storage,
+ fork_project.disk_path
+ )
+ end
- def expect_fork_repository
- expect(shell).to receive(:fork_repository).with(
- '/test/path',
- project.disk_path,
- fork_project.repository_storage_path,
- fork_project.disk_path
- )
- end
+ describe 'when a worker was reset without cleanup' do
+ let(:jid) { '12345678' }
- describe 'when a worker was reset without cleanup' do
- let(:jid) { '12345678' }
+ it 'creates a new repository from a fork' do
+ allow(subject).to receive(:jid).and_return(jid)
- it 'creates a new repository from a fork' do
- allow(subject).to receive(:jid).and_return(jid)
+ expect_fork_repository.and_return(true)
+ perform!
+ end
+ end
+
+ it "creates a new repository from a fork" do
expect_fork_repository.and_return(true)
perform!
end
- end
- it "creates a new repository from a fork" do
- expect_fork_repository.and_return(true)
+ it 'protects the default branch' do
+ expect_fork_repository.and_return(true)
- perform!
- end
+ perform!
+
+ expect(fork_project.protected_branches.first.name).to eq(fork_project.default_branch)
+ end
+
+ it 'flushes various caches' do
+ expect_fork_repository.and_return(true)
- it 'protects the default branch' do
- expect_fork_repository.and_return(true)
+ expect_any_instance_of(Repository).to receive(:expire_emptiness_caches)
+ .and_call_original
- perform!
+ expect_any_instance_of(Repository).to receive(:expire_exists_cache)
+ .and_call_original
- expect(fork_project.protected_branches.first.name).to eq(fork_project.default_branch)
- end
+ perform!
+ end
+
+ it "handles bad fork" do
+ error_message = "Unable to fork project #{fork_project.id} for repository #{project.disk_path} -> #{fork_project.disk_path}"
- it 'flushes various caches' do
- expect_fork_repository.and_return(true)
+ expect_fork_repository.and_return(false)
- expect_any_instance_of(Repository).to receive(:expire_emptiness_caches)
- .and_call_original
+ expect { perform! }.to raise_error(StandardError, error_message)
+ end
+ end
- expect_any_instance_of(Repository).to receive(:expire_exists_cache)
- .and_call_original
+ context 'only project ID passed' do
+ def perform!
+ subject.perform(fork_project.id)
+ end
- perform!
+ it_behaves_like 'RepositoryForkWorker performing'
end
- it "handles bad fork" do
- error_message = "Unable to fork project #{fork_project.id} for repository #{project.disk_path} -> #{fork_project.disk_path}"
+ context 'project ID, storage and repo paths passed' do
+ def perform!
+ subject.perform(fork_project.id, TestEnv.repos_path, project.disk_path)
+ end
- expect_fork_repository.and_return(false)
+ it_behaves_like 'RepositoryForkWorker performing'
- expect { perform! }.to raise_error(StandardError, error_message)
+ it 'logs a message about forking with old-style arguments' do
+ allow(Rails.logger).to receive(:info).with(anything) # To compensate for other logs
+ expect(Rails.logger).to receive(:info).with("Project #{fork_project.id} is being forked using old-style arguments.")
+
+ perform!
+ end
end
end
end