summaryrefslogtreecommitdiff
path: root/spec/models
diff options
context:
space:
mode:
authorMike Greiling <mike@pixelcog.com>2018-02-16 16:00:03 -0600
committerMike Greiling <mike@pixelcog.com>2018-02-16 16:00:03 -0600
commit8e65c13a586031928c681c4926d059df23ad5753 (patch)
treedf99f6a592a2d3f7f5fabb4c85c6b90f0343ca68 /spec/models
parentfa260ac8400b16bc19acc5740b47c596c1c903c0 (diff)
parentb236348388c46c0550ec6844df35ec2689c4060b (diff)
downloadgitlab-ce-chart.html.haml-refactor.tar.gz
Merge branch 'master' into chart.html.haml-refactorchart.html.haml-refactor
* master: (484 commits) migrate admin:users:* to static bundle correct for missing break statement in dispatcher.js alias create and update actions to new and edit migrate projects:merge_requests:edit to static bundle migrate projects:merge_requests:creations:diffs to static bundle migrate projects:merge_requests:creations:new to static bundle migrate projects:issues:new and projects:issues:edit to static bundle migrate projects:branches:index to static bundle migrate projects:branches:new to static bundle migrate projects:compare:show to static bundle migrate projects:environments:metrics to static bundle migrate projects:milestones:* and groups:milestones:* to static bundle migrate explore:groups:index to static bundle migrate explore:projects:* to static bundle migrate dashboard:projects:* to static bundle migrate admin:jobs:index to static bundle migrate dashboard:todos:index to static bundle migrate groups:merge_requests to static bundle migrate groups:issues to static bundle migrate dashboard:merge_requests to static bundle ...
Diffstat (limited to 'spec/models')
-rw-r--r--spec/models/application_setting_spec.rb34
-rw-r--r--spec/models/ci/build_spec.rb9
-rw-r--r--spec/models/ci/runner_spec.rb108
-rw-r--r--spec/models/clusters/applications/prometheus_spec.rb72
-rw-r--r--spec/models/concerns/redis_cacheable_spec.rb39
-rw-r--r--spec/models/group_spec.rb8
-rw-r--r--spec/models/identity_spec.rb33
-rw-r--r--spec/models/key_spec.rb51
-rw-r--r--spec/models/lfs_file_lock_spec.rb57
-rw-r--r--spec/models/namespace_spec.rb206
-rw-r--r--spec/models/project_auto_devops_spec.rb43
-rw-r--r--spec/models/project_services/prometheus_service_spec.rb237
-rw-r--r--spec/models/project_spec.rb78
-rw-r--r--spec/models/repository_spec.rb34
-rw-r--r--spec/models/user_spec.rb33
-rw-r--r--spec/models/wiki_page_spec.rb229
16 files changed, 993 insertions, 278 deletions
diff --git a/spec/models/application_setting_spec.rb b/spec/models/application_setting_spec.rb
index ef480e7a80a..ae2d34750a7 100644
--- a/spec/models/application_setting_spec.rb
+++ b/spec/models/application_setting_spec.rb
@@ -114,6 +114,40 @@ describe ApplicationSetting do
it { expect(setting.repository_storages).to eq(['default']) }
end
+ context 'auto_devops_domain setting' do
+ context 'when auto_devops_enabled? is true' do
+ before do
+ setting.update(auto_devops_enabled: true)
+ end
+
+ it 'can be blank' do
+ setting.update(auto_devops_domain: '')
+
+ expect(setting).to be_valid
+ end
+
+ context 'with a valid value' do
+ before do
+ setting.update(auto_devops_domain: 'domain.com')
+ end
+
+ it 'is valid' do
+ expect(setting).to be_valid
+ end
+ end
+
+ context 'with an invalid value' do
+ before do
+ setting.update(auto_devops_domain: 'definitelynotahostname')
+ end
+
+ it 'is invalid' do
+ expect(setting).to be_invalid
+ end
+ end
+ end
+ end
+
context 'circuitbreaker settings' do
[:circuitbreaker_failure_count_threshold,
:circuitbreaker_check_interval,
diff --git a/spec/models/ci/build_spec.rb b/spec/models/ci/build_spec.rb
index 0b3d5c6a0bd..2b6b6a61182 100644
--- a/spec/models/ci/build_spec.rb
+++ b/spec/models/ci/build_spec.rb
@@ -1413,6 +1413,7 @@ describe Ci::Build do
[
{ key: 'CI', value: 'true', public: true },
{ key: 'GITLAB_CI', value: 'true', public: true },
+ { key: 'GITLAB_FEATURES', value: project.namespace.features.join(','), public: true },
{ key: 'CI_SERVER_NAME', value: 'GitLab', public: true },
{ key: 'CI_SERVER_VERSION', value: Gitlab::VERSION, public: true },
{ key: 'CI_SERVER_REVISION', value: Gitlab::REVISION, public: true },
@@ -1589,7 +1590,7 @@ describe Ci::Build do
context 'when the branch is protected' do
before do
- create(:protected_branch, project: build.project, name: build.ref)
+ allow(build.project).to receive(:protected_for?).with(build.ref).and_return(true)
end
it { is_expected.to include(protected_variable) }
@@ -1597,7 +1598,7 @@ describe Ci::Build do
context 'when the tag is protected' do
before do
- create(:protected_tag, project: build.project, name: build.ref)
+ allow(build.project).to receive(:protected_for?).with(build.ref).and_return(true)
end
it { is_expected.to include(protected_variable) }
@@ -1634,7 +1635,7 @@ describe Ci::Build do
context 'when the branch is protected' do
before do
- create(:protected_branch, project: build.project, name: build.ref)
+ allow(build.project).to receive(:protected_for?).with(build.ref).and_return(true)
end
it { is_expected.to include(protected_variable) }
@@ -1642,7 +1643,7 @@ describe Ci::Build do
context 'when the tag is protected' do
before do
- create(:protected_tag, project: build.project, name: build.ref)
+ allow(build.project).to receive(:protected_for?).with(build.ref).and_return(true)
end
it { is_expected.to include(protected_variable) }
diff --git a/spec/models/ci/runner_spec.rb b/spec/models/ci/runner_spec.rb
index b2b64e6ff48..ab170e6351c 100644
--- a/spec/models/ci/runner_spec.rb
+++ b/spec/models/ci/runner_spec.rb
@@ -95,28 +95,68 @@ describe Ci::Runner do
subject { runner.online? }
- context 'never contacted' do
+ before do
+ allow_any_instance_of(described_class).to receive(:cached_attribute).and_call_original
+ allow_any_instance_of(described_class).to receive(:cached_attribute)
+ .with(:platform).and_return("darwin")
+ end
+
+ context 'no cache value' do
before do
- runner.contacted_at = nil
+ stub_redis_runner_contacted_at(nil)
end
- it { is_expected.to be_falsey }
- end
+ context 'never contacted' do
+ before do
+ runner.contacted_at = nil
+ end
- context 'contacted long time ago time' do
- before do
- runner.contacted_at = 1.year.ago
+ it { is_expected.to be_falsey }
+ end
+
+ context 'contacted long time ago time' do
+ before do
+ runner.contacted_at = 1.year.ago
+ end
+
+ it { is_expected.to be_falsey }
end
- it { is_expected.to be_falsey }
+ context 'contacted 1s ago' do
+ before do
+ runner.contacted_at = 1.second.ago
+ end
+
+ it { is_expected.to be_truthy }
+ end
end
- context 'contacted 1s ago' do
- before do
- runner.contacted_at = 1.second.ago
+ context 'with cache value' do
+ context 'contacted long time ago time' do
+ before do
+ runner.contacted_at = 1.year.ago
+ stub_redis_runner_contacted_at(1.year.ago.to_s)
+ end
+
+ it { is_expected.to be_falsey }
+ end
+
+ context 'contacted 1s ago' do
+ before do
+ runner.contacted_at = 50.minutes.ago
+ stub_redis_runner_contacted_at(1.second.ago.to_s)
+ end
+
+ it { is_expected.to be_truthy }
end
+ end
- it { is_expected.to be_truthy }
+ def stub_redis_runner_contacted_at(value)
+ Gitlab::Redis::SharedState.with do |redis|
+ cache_key = runner.send(:cache_attribute_key)
+ expect(redis).to receive(:get).with(cache_key)
+ .and_return({ contacted_at: value }.to_json).at_least(:once)
+ end
end
end
@@ -361,6 +401,50 @@ describe Ci::Runner do
end
end
+ describe '#update_cached_info' do
+ let(:runner) { create(:ci_runner) }
+
+ subject { runner.update_cached_info(architecture: '18-bit') }
+
+ context 'when database was updated recently' do
+ before do
+ runner.contacted_at = Time.now
+ end
+
+ it 'updates cache' do
+ expect_redis_update
+
+ subject
+ end
+ end
+
+ context 'when database was not updated recently' do
+ before do
+ runner.contacted_at = 2.hours.ago
+ end
+
+ it 'updates database' do
+ expect_redis_update
+
+ expect { subject }.to change { runner.reload.read_attribute(:contacted_at) }
+ .and change { runner.reload.read_attribute(:architecture) }
+ end
+
+ it 'updates cache' do
+ expect_redis_update
+
+ subject
+ end
+ end
+
+ def expect_redis_update
+ Gitlab::Redis::SharedState.with do |redis|
+ redis_key = runner.send(:cache_attribute_key)
+ expect(redis).to receive(:set).with(redis_key, anything, any_args)
+ end
+ end
+ end
+
describe '#destroy' do
let(:runner) { create(:ci_runner) }
diff --git a/spec/models/clusters/applications/prometheus_spec.rb b/spec/models/clusters/applications/prometheus_spec.rb
index 696099f7cf7..01037919530 100644
--- a/spec/models/clusters/applications/prometheus_spec.rb
+++ b/spec/models/clusters/applications/prometheus_spec.rb
@@ -6,6 +6,24 @@ describe Clusters::Applications::Prometheus do
include_examples 'cluster application specs', described_class
+ describe 'transition to installed' do
+ let(:project) { create(:project) }
+ let(:cluster) { create(:cluster, projects: [project]) }
+ let(:prometheus_service) { double('prometheus_service') }
+
+ subject { create(:clusters_applications_prometheus, :installing, cluster: cluster) }
+
+ before do
+ allow(project).to receive(:find_or_initialize_service).with('prometheus').and_return prometheus_service
+ end
+
+ it 'ensures Prometheus service is activated' do
+ expect(prometheus_service).to receive(:update).with(active: true)
+
+ subject.make_installed
+ end
+ end
+
describe "#chart_values_file" do
subject { create(:clusters_applications_prometheus).chart_values_file }
@@ -13,4 +31,58 @@ describe Clusters::Applications::Prometheus do
expect(subject).to eq("#{Rails.root}/vendor/prometheus/values.yaml")
end
end
+
+ describe '#proxy_client' do
+ context 'cluster is nil' do
+ it 'returns nil' do
+ expect(subject.cluster).to be_nil
+ expect(subject.proxy_client).to be_nil
+ end
+ end
+
+ context "cluster doesn't have kubeclient" do
+ let(:cluster) { create(:cluster) }
+ subject { create(:clusters_applications_prometheus, cluster: cluster) }
+
+ it 'returns nil' do
+ expect(subject.proxy_client).to be_nil
+ end
+ end
+
+ context 'cluster has kubeclient' do
+ let(:kubernetes_url) { 'http://example.com' }
+ let(:k8s_discover_response) do
+ {
+ resources: [
+ {
+ name: 'service',
+ kind: 'Service'
+ }
+ ]
+ }
+ end
+
+ let(:kube_client) { Kubeclient::Client.new(kubernetes_url) }
+
+ let(:cluster) { create(:cluster) }
+ subject { create(:clusters_applications_prometheus, cluster: cluster) }
+
+ before do
+ allow(kube_client.rest_client).to receive(:get).and_return(k8s_discover_response.to_json)
+ allow(subject.cluster).to receive(:kubeclient).and_return(kube_client)
+ end
+
+ it 'creates proxy prometheus rest client' do
+ expect(subject.proxy_client).to be_instance_of(RestClient::Resource)
+ end
+
+ it 'creates proper url' do
+ expect(subject.proxy_client.url).to eq('http://example.com/api/v1/proxy/namespaces/gitlab-managed-apps/service/prometheus-prometheus-server:80')
+ end
+
+ it 'copies options and headers from kube client to proxy client' do
+ expect(subject.proxy_client.options).to eq(kube_client.rest_client.options.merge(headers: kube_client.headers))
+ end
+ end
+ end
end
diff --git a/spec/models/concerns/redis_cacheable_spec.rb b/spec/models/concerns/redis_cacheable_spec.rb
new file mode 100644
index 00000000000..3d7963120b6
--- /dev/null
+++ b/spec/models/concerns/redis_cacheable_spec.rb
@@ -0,0 +1,39 @@
+require 'spec_helper'
+
+describe RedisCacheable do
+ let(:model) { double }
+
+ before do
+ model.extend(described_class)
+ allow(model).to receive(:cache_attribute_key).and_return('key')
+ end
+
+ describe '#cached_attribute' do
+ let(:payload) { { attribute: 'value' } }
+
+ subject { model.cached_attribute(payload.keys.first) }
+
+ it 'gets the cache attribute' do
+ Gitlab::Redis::SharedState.with do |redis|
+ expect(redis).to receive(:get).with('key')
+ .and_return(payload.to_json)
+ end
+
+ expect(subject).to eq(payload.values.first)
+ end
+ end
+
+ describe '#cache_attributes' do
+ let(:values) { { name: 'new_name' } }
+
+ subject { model.cache_attributes(values) }
+
+ it 'sets the cache attributes' do
+ Gitlab::Redis::SharedState.with do |redis|
+ expect(redis).to receive(:set).with('key', values.to_json, anything)
+ end
+
+ subject
+ end
+ end
+end
diff --git a/spec/models/group_spec.rb b/spec/models/group_spec.rb
index 338fb314ee9..4f16b73ef38 100644
--- a/spec/models/group_spec.rb
+++ b/spec/models/group_spec.rb
@@ -549,7 +549,7 @@ describe Group do
context 'when the ref is a protected branch' do
before do
- create(:protected_branch, name: 'ref', project: project)
+ allow(project).to receive(:protected_for?).with('ref').and_return(true)
end
it_behaves_like 'ref is protected'
@@ -557,7 +557,7 @@ describe Group do
context 'when the ref is a protected tag' do
before do
- create(:protected_tag, name: 'ref', project: project)
+ allow(project).to receive(:protected_for?).with('ref').and_return(true)
end
it_behaves_like 'ref is protected'
@@ -571,6 +571,10 @@ describe Group do
let(:variable_child_2) { create(:ci_group_variable, group: group_child_2) }
let(:variable_child_3) { create(:ci_group_variable, group: group_child_3) }
+ before do
+ allow(project).to receive(:protected_for?).with('ref').and_return(true)
+ end
+
it 'returns all variables belong to the group and parent groups' do
expected_array1 = [protected_variable, secret_variable]
expected_array2 = [variable_child, variable_child_2, variable_child_3]
diff --git a/spec/models/identity_spec.rb b/spec/models/identity_spec.rb
index 7c66c98231b..a5ce245c21d 100644
--- a/spec/models/identity_spec.rb
+++ b/spec/models/identity_spec.rb
@@ -70,5 +70,38 @@ describe Identity do
end
end
end
+
+ context 'after_destroy' do
+ let!(:user) { create(:user) }
+ let(:ldap_identity) { create(:identity, provider: 'ldapmain', extern_uid: 'uid=john smith,ou=people,dc=example,dc=com', user: user) }
+ let(:ldap_user_synced_attributes) { { provider: 'ldapmain', name_synced: true, email_synced: true } }
+ let(:other_provider_user_synced_attributes) { { provider: 'other', name_synced: true, email_synced: true } }
+
+ describe 'if user synced attributes metadada provider' do
+ context 'matches the identity provider ' do
+ it 'removes the user synced attributes' do
+ user.create_user_synced_attributes_metadata(ldap_user_synced_attributes)
+
+ expect(user.user_synced_attributes_metadata.provider).to eq 'ldapmain'
+
+ ldap_identity.destroy
+
+ expect(user.reload.user_synced_attributes_metadata).to be_nil
+ end
+ end
+
+ context 'does not matche the identity provider' do
+ it 'does not remove the user synced attributes' do
+ user.create_user_synced_attributes_metadata(other_provider_user_synced_attributes)
+
+ expect(user.user_synced_attributes_metadata.provider).to eq 'other'
+
+ ldap_identity.destroy
+
+ expect(user.reload.user_synced_attributes_metadata.provider).to eq 'other'
+ end
+ end
+ end
+ end
end
end
diff --git a/spec/models/key_spec.rb b/spec/models/key_spec.rb
index bf5703ac986..7398fd25aa8 100644
--- a/spec/models/key_spec.rb
+++ b/spec/models/key_spec.rb
@@ -72,52 +72,15 @@ describe Key, :mailer do
expect(build(:key)).to be_valid
end
- it 'rejects the unfingerprintable key (not a key)' do
- expect(build(:key, key: 'ssh-rsa an-invalid-key==')).not_to be_valid
- end
-
- where(:factory, :chars, :expected_sections) do
- [
- [:key, ["\n", "\r\n"], 3],
- [:key, [' ', ' '], 3],
- [:key_without_comment, [' ', ' '], 2]
- ]
- end
-
- with_them do
- let!(:key) { create(factory) }
- let!(:original_fingerprint) { key.fingerprint }
-
- it 'accepts a key with blank space characters after stripping them' do
- modified_key = key.key.insert(100, chars.first).insert(40, chars.last)
- _, content = modified_key.split
-
- key.update!(key: modified_key)
-
- expect(key).to be_valid
- expect(key.key.split.size).to eq(expected_sections)
-
- expect(content).not_to match(/\s/)
- expect(original_fingerprint).to eq(key.fingerprint)
- end
- end
- end
-
- context 'validate size' do
- where(:key_content, :result) do
- [
- [Spec::Support::Helpers::KeyGeneratorHelper.new(512).generate, false],
- [Spec::Support::Helpers::KeyGeneratorHelper.new(8192).generate, false],
- [Spec::Support::Helpers::KeyGeneratorHelper.new(1024).generate, true]
- ]
+ it 'accepts a key with newline charecters after stripping them' do
+ key = build(:key)
+ key.key = key.key.insert(100, "\n")
+ key.key = key.key.insert(40, "\r\n")
+ expect(key).to be_valid
end
- with_them do
- it 'validates the size of the key' do
- key = build(:key, key: key_content)
-
- expect(key.valid?).to eq(result)
- end
+ it 'rejects the unfingerprintable key (not a key)' do
+ expect(build(:key, key: 'ssh-rsa an-invalid-key==')).not_to be_valid
end
end
diff --git a/spec/models/lfs_file_lock_spec.rb b/spec/models/lfs_file_lock_spec.rb
new file mode 100644
index 00000000000..ce87b01b49c
--- /dev/null
+++ b/spec/models/lfs_file_lock_spec.rb
@@ -0,0 +1,57 @@
+require 'rails_helper'
+
+describe LfsFileLock do
+ set(:lfs_file_lock) { create(:lfs_file_lock) }
+ subject { lfs_file_lock }
+
+ it { is_expected.to belong_to(:project) }
+ it { is_expected.to belong_to(:user) }
+
+ it { is_expected.to validate_presence_of(:project_id) }
+ it { is_expected.to validate_presence_of(:user_id) }
+ it { is_expected.to validate_presence_of(:path) }
+
+ describe '#can_be_unlocked_by?' do
+ let(:developer) { create(:user) }
+ let(:master) { create(:user) }
+
+ before do
+ project = lfs_file_lock.project
+
+ project.add_developer(developer)
+ project.add_master(master)
+ end
+
+ context "when it's forced" do
+ it 'can be unlocked by the author' do
+ user = lfs_file_lock.user
+
+ expect(lfs_file_lock.can_be_unlocked_by?(user, true)).to eq(true)
+ end
+
+ it 'can be unlocked by a master' do
+ expect(lfs_file_lock.can_be_unlocked_by?(master, true)).to eq(true)
+ end
+
+ it "can't be unlocked by other user" do
+ expect(lfs_file_lock.can_be_unlocked_by?(developer, true)).to eq(false)
+ end
+ end
+
+ context "when it isn't forced" do
+ it 'can be unlocked by the author' do
+ user = lfs_file_lock.user
+
+ expect(lfs_file_lock.can_be_unlocked_by?(user)).to eq(true)
+ end
+
+ it "can't be unlocked by a master" do
+ expect(lfs_file_lock.can_be_unlocked_by?(master)).to eq(false)
+ end
+
+ it "can't be unlocked by other user" do
+ expect(lfs_file_lock.can_be_unlocked_by?(developer)).to eq(false)
+ end
+ end
+ end
+end
diff --git a/spec/models/namespace_spec.rb b/spec/models/namespace_spec.rb
index 191b60e4383..e626efd054d 100644
--- a/spec/models/namespace_spec.rb
+++ b/spec/models/namespace_spec.rb
@@ -168,84 +168,105 @@ describe Namespace do
end
describe '#move_dir', :request_store do
- let(:namespace) { create(:namespace) }
- let!(:project) { create(:project_empty_repo, namespace: namespace) }
+ shared_examples "namespace restrictions" do
+ context "when any project has container images" do
+ let(:container_repository) { create(:container_repository) }
- it "raises error when directory exists" do
- expect { namespace.move_dir }.to raise_error("namespace directory cannot be moved")
- end
+ before do
+ stub_container_registry_config(enabled: true)
+ stub_container_registry_tags(repository: :any, tags: ['tag'])
- it "moves dir if path changed" do
- namespace.update_attributes(path: namespace.full_path + '_new')
+ create(:project, namespace: namespace, container_repositories: [container_repository])
- expect(gitlab_shell.exists?(project.repository_storage_path, "#{namespace.path}/#{project.path}.git")).to be_truthy
- end
+ allow(namespace).to receive(:path_was).and_return(namespace.path)
+ allow(namespace).to receive(:path).and_return('new_path')
+ end
- context "when any project has container images" do
- let(:container_repository) { create(:container_repository) }
+ it 'raises an error about not movable project' do
+ expect { namespace.move_dir }.to raise_error(/Namespace cannot be moved/)
+ end
+ end
+ end
- before do
- stub_container_registry_config(enabled: true)
- stub_container_registry_tags(repository: :any, tags: ['tag'])
+ context 'legacy storage' do
+ let(:namespace) { create(:namespace) }
+ let!(:project) { create(:project_empty_repo, :legacy_storage, namespace: namespace) }
- create(:project, namespace: namespace, container_repositories: [container_repository])
+ it_behaves_like 'namespace restrictions'
- allow(namespace).to receive(:path_was).and_return(namespace.path)
- allow(namespace).to receive(:path).and_return('new_path')
+ it "raises error when directory exists" do
+ expect { namespace.move_dir }.to raise_error("namespace directory cannot be moved")
end
- it 'raises an error about not movable project' do
- expect { namespace.move_dir }.to raise_error(/Namespace cannot be moved/)
+ it "moves dir if path changed" do
+ namespace.update_attributes(path: namespace.full_path + '_new')
+
+ expect(gitlab_shell.exists?(project.repository_storage_path, "#{namespace.path}/#{project.path}.git")).to be_truthy
end
- end
- context 'with subgroups' do
- let(:parent) { create(:group, name: 'parent', path: 'parent') }
- let(:child) { create(:group, name: 'child', path: 'child', parent: parent) }
- let!(:project) { create(:project_empty_repo, path: 'the-project', namespace: child, skip_disk_validation: true) }
- let(:uploads_dir) { FileUploader.root }
- let(:pages_dir) { File.join(TestEnv.pages_path) }
+ context 'with subgroups' do
+ let(:parent) { create(:group, name: 'parent', path: 'parent') }
+ let(:child) { create(:group, name: 'child', path: 'child', parent: parent) }
+ let!(:project) { create(:project_empty_repo, :legacy_storage, path: 'the-project', namespace: child, skip_disk_validation: true) }
+ let(:uploads_dir) { FileUploader.root }
+ let(:pages_dir) { File.join(TestEnv.pages_path) }
- before do
- FileUtils.mkdir_p(File.join(uploads_dir, 'parent', 'child', 'the-project'))
- FileUtils.mkdir_p(File.join(pages_dir, 'parent', 'child', 'the-project'))
- end
+ before do
+ FileUtils.mkdir_p(File.join(uploads_dir, project.full_path))
+ FileUtils.mkdir_p(File.join(pages_dir, project.full_path))
+ end
+
+ context 'renaming child' do
+ it 'correctly moves the repository, uploads and pages' do
+ expected_repository_path = File.join(TestEnv.repos_path, 'parent', 'renamed', 'the-project.git')
+ expected_upload_path = File.join(uploads_dir, 'parent', 'renamed', 'the-project')
+ expected_pages_path = File.join(pages_dir, 'parent', 'renamed', 'the-project')
- context 'renaming child' do
- it 'correctly moves the repository, uploads and pages' do
- expected_repository_path = File.join(TestEnv.repos_path, 'parent', 'renamed', 'the-project.git')
- expected_upload_path = File.join(uploads_dir, 'parent', 'renamed', 'the-project')
- expected_pages_path = File.join(pages_dir, 'parent', 'renamed', 'the-project')
+ child.update_attributes!(path: 'renamed')
- child.update_attributes!(path: 'renamed')
+ expect(File.directory?(expected_repository_path)).to be(true)
+ expect(File.directory?(expected_upload_path)).to be(true)
+ expect(File.directory?(expected_pages_path)).to be(true)
+ end
+ end
+
+ context 'renaming parent' do
+ it 'correctly moves the repository, uploads and pages' do
+ expected_repository_path = File.join(TestEnv.repos_path, 'renamed', 'child', 'the-project.git')
+ expected_upload_path = File.join(uploads_dir, 'renamed', 'child', 'the-project')
+ expected_pages_path = File.join(pages_dir, 'renamed', 'child', 'the-project')
- expect(File.directory?(expected_repository_path)).to be(true)
- expect(File.directory?(expected_upload_path)).to be(true)
- expect(File.directory?(expected_pages_path)).to be(true)
+ parent.update_attributes!(path: 'renamed')
+
+ expect(File.directory?(expected_repository_path)).to be(true)
+ expect(File.directory?(expected_upload_path)).to be(true)
+ expect(File.directory?(expected_pages_path)).to be(true)
+ end
end
end
+ end
- context 'renaming parent' do
- it 'correctly moves the repository, uploads and pages' do
- expected_repository_path = File.join(TestEnv.repos_path, 'renamed', 'child', 'the-project.git')
- expected_upload_path = File.join(uploads_dir, 'renamed', 'child', 'the-project')
- expected_pages_path = File.join(pages_dir, 'renamed', 'child', 'the-project')
+ context 'hashed storage' do
+ let(:namespace) { create(:namespace) }
+ let!(:project) { create(:project_empty_repo, namespace: namespace) }
- parent.update_attributes!(path: 'renamed')
+ it_behaves_like 'namespace restrictions'
- expect(File.directory?(expected_repository_path)).to be(true)
- expect(File.directory?(expected_upload_path)).to be(true)
- expect(File.directory?(expected_pages_path)).to be(true)
- end
+ it "repository directory remains unchanged if path changed" do
+ before_disk_path = project.disk_path
+ namespace.update_attributes(path: namespace.full_path + '_new')
+
+ expect(before_disk_path).to eq(project.disk_path)
+ expect(gitlab_shell.exists?(project.repository_storage_path, "#{project.disk_path}.git")).to be_truthy
end
end
it 'updates project full path in .git/config for each project inside namespace' do
parent = create(:group, name: 'mygroup', path: 'mygroup')
subgroup = create(:group, name: 'mysubgroup', path: 'mysubgroup', parent: parent)
- project_in_parent_group = create(:project, :repository, namespace: parent, name: 'foo1')
- hashed_project_in_subgroup = create(:project, :repository, :hashed, namespace: subgroup, name: 'foo2')
- legacy_project_in_subgroup = create(:project, :repository, namespace: subgroup, name: 'foo3')
+ project_in_parent_group = create(:project, :legacy_storage, :repository, namespace: parent, name: 'foo1')
+ hashed_project_in_subgroup = create(:project, :repository, namespace: subgroup, name: 'foo2')
+ legacy_project_in_subgroup = create(:project, :legacy_storage, :repository, namespace: subgroup, name: 'foo3')
parent.update(path: 'mygroup_new')
@@ -260,38 +281,18 @@ describe Namespace do
end
describe '#rm_dir', 'callback' do
- let!(:project) { create(:project_empty_repo, namespace: namespace) }
let(:repository_storage_path) { Gitlab.config.repositories.storages.default['path'] }
let(:path_in_dir) { File.join(repository_storage_path, namespace.full_path) }
let(:deleted_path) { namespace.full_path.gsub(namespace.path, "#{namespace.full_path}+#{namespace.id}+deleted") }
let(:deleted_path_in_dir) { File.join(repository_storage_path, deleted_path) }
- it 'renames its dirs when deleted' do
- allow(GitlabShellWorker).to receive(:perform_in)
-
- namespace.destroy
-
- expect(File.exist?(deleted_path_in_dir)).to be(true)
- end
-
- it 'schedules the namespace for deletion' do
- expect(GitlabShellWorker).to receive(:perform_in).with(5.minutes, :rm_namespace, repository_storage_path, deleted_path)
-
- namespace.destroy
- end
-
- context 'in sub-groups' do
- let(:parent) { create(:group, path: 'parent') }
- let(:child) { create(:group, parent: parent, path: 'child') }
- let!(:project) { create(:project_empty_repo, namespace: child) }
- let(:path_in_dir) { File.join(repository_storage_path, 'parent', 'child') }
- let(:deleted_path) { File.join('parent', "child+#{child.id}+deleted") }
- let(:deleted_path_in_dir) { File.join(repository_storage_path, deleted_path) }
+ context 'legacy storage' do
+ let!(:project) { create(:project_empty_repo, :legacy_storage, namespace: namespace) }
it 'renames its dirs when deleted' do
allow(GitlabShellWorker).to receive(:perform_in)
- child.destroy
+ namespace.destroy
expect(File.exist?(deleted_path_in_dir)).to be(true)
end
@@ -299,14 +300,57 @@ describe Namespace do
it 'schedules the namespace for deletion' do
expect(GitlabShellWorker).to receive(:perform_in).with(5.minutes, :rm_namespace, repository_storage_path, deleted_path)
- child.destroy
+ namespace.destroy
+ end
+
+ context 'in sub-groups' do
+ let(:parent) { create(:group, path: 'parent') }
+ let(:child) { create(:group, parent: parent, path: 'child') }
+ let!(:project) { create(:project_empty_repo, :legacy_storage, namespace: child) }
+ let(:path_in_dir) { File.join(repository_storage_path, 'parent', 'child') }
+ let(:deleted_path) { File.join('parent', "child+#{child.id}+deleted") }
+ let(:deleted_path_in_dir) { File.join(repository_storage_path, deleted_path) }
+
+ it 'renames its dirs when deleted' do
+ allow(GitlabShellWorker).to receive(:perform_in)
+
+ child.destroy
+
+ expect(File.exist?(deleted_path_in_dir)).to be(true)
+ end
+
+ it 'schedules the namespace for deletion' do
+ expect(GitlabShellWorker).to receive(:perform_in).with(5.minutes, :rm_namespace, repository_storage_path, deleted_path)
+
+ child.destroy
+ end
+ end
+
+ it 'removes the exports folder' do
+ expect(namespace).to receive(:remove_exports!)
+
+ namespace.destroy
end
end
- it 'removes the exports folder' do
- expect(namespace).to receive(:remove_exports!)
+ context 'hashed storage' do
+ let!(:project) { create(:project_empty_repo, namespace: namespace) }
+
+ it 'has no repositories base directories to remove' do
+ allow(GitlabShellWorker).to receive(:perform_in)
+
+ expect(File.exist?(path_in_dir)).to be(false)
- namespace.destroy
+ namespace.destroy
+
+ expect(File.exist?(deleted_path_in_dir)).to be(false)
+ end
+
+ it 'removes the exports folder' do
+ expect(namespace).to receive(:remove_exports!)
+
+ namespace.destroy
+ end
end
end
@@ -567,8 +611,8 @@ describe Namespace do
end
describe '#remove_exports' do
- let(:legacy_project) { create(:project, :with_export, namespace: namespace) }
- let(:hashed_project) { create(:project, :with_export, :hashed, namespace: namespace) }
+ let(:legacy_project) { create(:project, :with_export, :legacy_storage, namespace: namespace) }
+ let(:hashed_project) { create(:project, :with_export, namespace: namespace) }
let(:export_path) { Dir.mktmpdir('namespace_remove_exports_spec') }
let(:legacy_export) { legacy_project.export_project_path }
let(:hashed_export) { hashed_project.export_project_path }
diff --git a/spec/models/project_auto_devops_spec.rb b/spec/models/project_auto_devops_spec.rb
index 12069575866..296b91a771c 100644
--- a/spec/models/project_auto_devops_spec.rb
+++ b/spec/models/project_auto_devops_spec.rb
@@ -18,7 +18,21 @@ describe ProjectAutoDevops do
context 'when domain is empty' do
let(:auto_devops) { build_stubbed(:project_auto_devops, project: project, domain: '') }
- it { expect(auto_devops).not_to have_domain }
+ context 'when there is an instance domain specified' do
+ before do
+ allow(Gitlab::CurrentSettings).to receive(:auto_devops_domain).and_return('example.com')
+ end
+
+ it { expect(auto_devops).to have_domain }
+ end
+
+ context 'when there is no instance domain specified' do
+ before do
+ allow(Gitlab::CurrentSettings).to receive(:auto_devops_domain).and_return(nil)
+ end
+
+ it { expect(auto_devops).not_to have_domain }
+ end
end
end
@@ -29,9 +43,32 @@ describe ProjectAutoDevops do
let(:domain) { 'example.com' }
it 'returns AUTO_DEVOPS_DOMAIN' do
- expect(auto_devops.variables).to include(
- { key: 'AUTO_DEVOPS_DOMAIN', value: 'example.com', public: true })
+ expect(auto_devops.variables).to include(domain_variable)
end
end
+
+ context 'when domain is not defined' do
+ let(:domain) { nil }
+
+ context 'when there is an instance domain specified' do
+ before do
+ allow(Gitlab::CurrentSettings).to receive(:auto_devops_domain).and_return('example.com')
+ end
+
+ it { expect(auto_devops.variables).to include(domain_variable) }
+ end
+
+ context 'when there is no instance domain specified' do
+ before do
+ allow(Gitlab::CurrentSettings).to receive(:auto_devops_domain).and_return(nil)
+ end
+
+ it { expect(auto_devops.variables).not_to include(domain_variable) }
+ end
+ end
+
+ def domain_variable
+ { key: 'AUTO_DEVOPS_DOMAIN', value: 'example.com', public: true }
+ end
end
end
diff --git a/spec/models/project_services/prometheus_service_spec.rb b/spec/models/project_services/prometheus_service_spec.rb
index bf39e8d7a39..ed17e019d42 100644
--- a/spec/models/project_services/prometheus_service_spec.rb
+++ b/spec/models/project_services/prometheus_service_spec.rb
@@ -13,17 +13,17 @@ describe PrometheusService, :use_clean_rails_memory_store_caching do
end
describe 'Validations' do
- context 'when service is active' do
+ context 'when manual_configuration is enabled' do
before do
- subject.active = true
+ subject.manual_configuration = true
end
it { is_expected.to validate_presence_of(:api_url) }
end
- context 'when service is inactive' do
+ context 'when manual configuration is disabled' do
before do
- subject.active = false
+ subject.manual_configuration = false
end
it { is_expected.not_to validate_presence_of(:api_url) }
@@ -31,12 +31,17 @@ describe PrometheusService, :use_clean_rails_memory_store_caching do
end
describe '#test' do
+ before do
+ service.manual_configuration = true
+ end
+
let!(:req_stub) { stub_prometheus_request(prometheus_query_url('1'), body: prometheus_value_body('vector')) }
context 'success' do
it 'reads the discovery endpoint' do
+ expect(service.test[:result]).to eq('Checked API endpoint')
expect(service.test[:success]).to be_truthy
- expect(req_stub).to have_been_requested
+ expect(req_stub).to have_been_requested.twice
end
end
@@ -70,6 +75,25 @@ describe PrometheusService, :use_clean_rails_memory_store_caching do
end
end
+ describe '#matched_metrics' do
+ let(:matched_metrics_query) { Gitlab::Prometheus::Queries::MatchedMetricsQuery }
+ let(:client) { double(:client, label_values: nil) }
+
+ context 'with valid data' do
+ subject { service.matched_metrics }
+
+ before do
+ allow(service).to receive(:client).and_return(client)
+ synchronous_reactive_cache(service)
+ end
+
+ it 'returns reactive data' do
+ expect(subject[:success]).to be_truthy
+ expect(subject[:data]).to eq([])
+ end
+ end
+ end
+
describe '#deployment_metrics' do
let(:deployment) { build_stubbed(:deployment) }
let(:deployment_query) { Gitlab::Prometheus::Queries::DeploymentQuery }
@@ -83,7 +107,7 @@ describe PrometheusService, :use_clean_rails_memory_store_caching do
let(:fake_deployment_time) { 10 }
before do
- stub_reactive_cache(service, prometheus_data, deployment_query, deployment.id)
+ stub_reactive_cache(service, prometheus_data, deployment_query, deployment.environment.id, deployment.id)
end
it 'returns reactive data' do
@@ -96,13 +120,17 @@ describe PrometheusService, :use_clean_rails_memory_store_caching do
describe '#calculate_reactive_cache' do
let(:environment) { create(:environment, slug: 'env-slug') }
-
- around do |example|
- Timecop.freeze { example.run }
+ before do
+ service.manual_configuration = true
+ service.active = true
end
subject do
- service.calculate_reactive_cache(environment_query.to_s, environment.id)
+ service.calculate_reactive_cache(environment_query.name, environment.id)
+ end
+
+ around do |example|
+ Timecop.freeze { example.run }
end
context 'when service is inactive' do
@@ -132,4 +160,193 @@ describe PrometheusService, :use_clean_rails_memory_store_caching do
end
end
end
+
+ describe '#client' do
+ context 'manual configuration is enabled' do
+ let(:api_url) { 'http://some_url' }
+ before do
+ subject.manual_configuration = true
+ subject.api_url = api_url
+ end
+
+ it 'returns simple rest client from api_url' do
+ expect(subject.client).to be_instance_of(Gitlab::PrometheusClient)
+ expect(subject.client.rest_client.url).to eq(api_url)
+ end
+ end
+
+ context 'manual configuration is disabled' do
+ let!(:cluster_for_all) { create(:cluster, environment_scope: '*', projects: [project]) }
+ let!(:cluster_for_dev) { create(:cluster, environment_scope: 'dev', projects: [project]) }
+
+ let!(:prometheus_for_dev) { create(:clusters_applications_prometheus, :installed, cluster: cluster_for_dev) }
+ let(:proxy_client) { double('proxy_client') }
+
+ before do
+ service.manual_configuration = false
+ end
+
+ context 'with cluster for all environments with prometheus installed' do
+ let!(:prometheus_for_all) { create(:clusters_applications_prometheus, :installed, cluster: cluster_for_all) }
+
+ context 'without environment supplied' do
+ it 'returns client handling all environments' do
+ expect(service).to receive(:client_from_cluster).with(cluster_for_all).and_return(proxy_client).twice
+
+ expect(service.client).to be_instance_of(Gitlab::PrometheusClient)
+ expect(service.client.rest_client).to eq(proxy_client)
+ end
+ end
+
+ context 'with dev environment supplied' do
+ let!(:environment) { create(:environment, project: project, name: 'dev') }
+
+ it 'returns dev cluster client' do
+ expect(service).to receive(:client_from_cluster).with(cluster_for_dev).and_return(proxy_client).twice
+
+ expect(service.client(environment.id)).to be_instance_of(Gitlab::PrometheusClient)
+ expect(service.client(environment.id).rest_client).to eq(proxy_client)
+ end
+ end
+
+ context 'with prod environment supplied' do
+ let!(:environment) { create(:environment, project: project, name: 'prod') }
+
+ it 'returns dev cluster client' do
+ expect(service).to receive(:client_from_cluster).with(cluster_for_all).and_return(proxy_client).twice
+
+ expect(service.client(environment.id)).to be_instance_of(Gitlab::PrometheusClient)
+ expect(service.client(environment.id).rest_client).to eq(proxy_client)
+ end
+ end
+ end
+
+ context 'with cluster for all environments without prometheus installed' do
+ context 'without environment supplied' do
+ it 'raises PrometheusError because cluster was not found' do
+ expect { service.client }.to raise_error(Gitlab::PrometheusError, /couldn't find cluster with Prometheus installed/)
+ end
+ end
+
+ context 'with dev environment supplied' do
+ let!(:environment) { create(:environment, project: project, name: 'dev') }
+
+ it 'returns dev cluster client' do
+ expect(service).to receive(:client_from_cluster).with(cluster_for_dev).and_return(proxy_client).twice
+
+ expect(service.client(environment.id)).to be_instance_of(Gitlab::PrometheusClient)
+ expect(service.client(environment.id).rest_client).to eq(proxy_client)
+ end
+ end
+
+ context 'with prod environment supplied' do
+ let!(:environment) { create(:environment, project: project, name: 'prod') }
+
+ it 'raises PrometheusError because cluster was not found' do
+ expect { service.client }.to raise_error(Gitlab::PrometheusError, /couldn't find cluster with Prometheus installed/)
+ end
+ end
+ end
+ end
+ end
+
+ describe '#prometheus_installed?' do
+ context 'clusters with installed prometheus' do
+ let!(:cluster) { create(:cluster, projects: [project]) }
+ let!(:prometheus) { create(:clusters_applications_prometheus, :installed, cluster: cluster) }
+
+ it 'returns true' do
+ expect(service.prometheus_installed?).to be(true)
+ end
+ end
+
+ context 'clusters without prometheus installed' do
+ let(:cluster) { create(:cluster, projects: [project]) }
+ let!(:prometheus) { create(:clusters_applications_prometheus, cluster: cluster) }
+
+ it 'returns false' do
+ expect(service.prometheus_installed?).to be(false)
+ end
+ end
+
+ context 'clusters without prometheus' do
+ let(:cluster) { create(:cluster, projects: [project]) }
+
+ it 'returns false' do
+ expect(service.prometheus_installed?).to be(false)
+ end
+ end
+
+ context 'no clusters' do
+ it 'returns false' do
+ expect(service.prometheus_installed?).to be(false)
+ end
+ end
+ end
+
+ describe '#synchronize_service_state! before_save callback' do
+ context 'no clusters with prometheus are installed' do
+ context 'when service is inactive' do
+ before do
+ service.active = false
+ end
+
+ it 'activates service when manual_configuration is enabled' do
+ expect { service.update!(manual_configuration: true) }.to change { service.active }.from(false).to(true)
+ end
+
+ it 'keeps service inactive when manual_configuration is disabled' do
+ expect { service.update!(manual_configuration: false) }.not_to change { service.active }.from(false)
+ end
+ end
+
+ context 'when service is active' do
+ before do
+ service.active = true
+ end
+
+ it 'keeps the service active when manual_configuration is enabled' do
+ expect { service.update!(manual_configuration: true) }.not_to change { service.active }.from(true)
+ end
+
+ it 'inactivates the service when manual_configuration is disabled' do
+ expect { service.update!(manual_configuration: false) }.to change { service.active }.from(true).to(false)
+ end
+ end
+ end
+
+ context 'with prometheus installed in the cluster' do
+ before do
+ allow(service).to receive(:prometheus_installed?).and_return(true)
+ end
+
+ context 'when service is inactive' do
+ before do
+ service.active = false
+ end
+
+ it 'activates service when manual_configuration is enabled' do
+ expect { service.update!(manual_configuration: true) }.to change { service.active }.from(false).to(true)
+ end
+
+ it 'activates service when manual_configuration is disabled' do
+ expect { service.update!(manual_configuration: false) }.to change { service.active }.from(false).to(true)
+ end
+ end
+
+ context 'when service is active' do
+ before do
+ service.active = true
+ end
+
+ it 'keeps service active when manual_configuration is enabled' do
+ expect { service.update!(manual_configuration: true) }.not_to change { service.active }.from(true)
+ end
+
+ it 'keeps service active when manual_configuration is disabled' do
+ expect { service.update!(manual_configuration: false) }.not_to change { service.active }.from(true)
+ end
+ end
+ end
+ end
end
diff --git a/spec/models/project_spec.rb b/spec/models/project_spec.rb
index a63f5d6d5a1..ee04d74d848 100644
--- a/spec/models/project_spec.rb
+++ b/spec/models/project_spec.rb
@@ -80,6 +80,7 @@ describe Project do
it { is_expected.to have_many(:members_and_requesters) }
it { is_expected.to have_many(:clusters) }
it { is_expected.to have_many(:custom_attributes).class_name('ProjectCustomAttribute') }
+ it { is_expected.to have_many(:lfs_file_locks) }
context 'after initialized' do
it "has a project_feature" do
@@ -2070,7 +2071,7 @@ describe Project do
create(:ci_variable, :protected, value: 'protected', project: project)
end
- subject { project.secret_variables_for(ref: 'ref') }
+ subject { project.reload.secret_variables_for(ref: 'ref') }
before do
stub_application_setting(
@@ -2091,7 +2092,7 @@ describe Project do
context 'when the ref is a protected branch' do
before do
- create(:protected_branch, name: 'ref', project: project)
+ allow(project).to receive(:protected_for?).with('ref').and_return(true)
end
it_behaves_like 'ref is protected'
@@ -2099,7 +2100,7 @@ describe Project do
context 'when the ref is a protected tag' do
before do
- create(:protected_tag, name: 'ref', project: project)
+ allow(project).to receive(:protected_for?).with('ref').and_return(true)
end
it_behaves_like 'ref is protected'
@@ -2124,6 +2125,8 @@ describe Project do
context 'when the ref is a protected branch' do
before do
+ allow(project).to receive(:repository).and_call_original
+ allow(project).to receive_message_chain(:repository, :branch_exists?).and_return(true)
create(:protected_branch, name: 'ref', project: project)
end
@@ -2134,6 +2137,8 @@ describe Project do
context 'when the ref is a protected tag' do
before do
+ allow(project).to receive_message_chain(:repository, :branch_exists?).and_return(false)
+ allow(project).to receive_message_chain(:repository, :tag_exists?).and_return(true)
create(:protected_tag, name: 'ref', project: project)
end
@@ -2503,6 +2508,7 @@ describe Project do
end
describe '#remove_exports' do
+ let(:legacy_project) { create(:project, :legacy_storage, :with_export) }
let(:project) { create(:project, :with_export) }
it 'removes the exports directory for the project' do
@@ -2515,15 +2521,29 @@ describe Project do
expect(File.exist?(project.export_path)).to be_falsy
end
- it 'is a no-op when there is no namespace' do
+ it 'is a no-op on legacy projects when there is no namespace' do
+ export_path = legacy_project.export_path
+
+ legacy_project.update_column(:namespace_id, nil)
+
+ expect(FileUtils).not_to receive(:rm_rf).with(export_path)
+
+ legacy_project.remove_exports
+
+ expect(File.exist?(export_path)).to be_truthy
+ end
+
+ it 'runs on hashed storage projects when there is no namespace' do
export_path = project.export_path
+
project.update_column(:namespace_id, nil)
- expect(FileUtils).not_to receive(:rm_rf).with(export_path)
+ allow(FileUtils).to receive(:rm_rf).and_call_original
+ expect(FileUtils).to receive(:rm_rf).with(export_path).and_call_original
project.remove_exports
- expect(File.exist?(export_path)).to be_truthy
+ expect(File.exist?(export_path)).to be_falsy
end
it 'is run when the project is destroyed' do
@@ -2544,7 +2564,7 @@ describe Project do
end
context 'legacy storage' do
- let(:project) { create(:project, :repository) }
+ let(:project) { create(:project, :repository, :legacy_storage) }
let(:gitlab_shell) { Gitlab::Shell.new }
let(:project_storage) { project.send(:storage) }
@@ -2718,6 +2738,8 @@ describe Project do
let(:project) { create(:project, :repository, skip_disk_validation: true) }
let(:gitlab_shell) { Gitlab::Shell.new }
let(:hash) { Digest::SHA2.hexdigest(project.id.to_s) }
+ let(:hashed_prefix) { File.join('@hashed', hash[0..1], hash[2..3]) }
+ let(:hashed_path) { File.join(hashed_prefix, hash) }
before do
stub_application_setting(hashed_storage_enabled: true)
@@ -2743,14 +2765,12 @@ describe Project do
describe '#base_dir' do
it 'returns base_dir based on hash of project id' do
- expect(project.base_dir).to eq("@hashed/#{hash[0..1]}/#{hash[2..3]}")
+ expect(project.base_dir).to eq(hashed_prefix)
end
end
describe '#disk_path' do
it 'returns disk_path based on hash of project id' do
- hashed_path = "@hashed/#{hash[0..1]}/#{hash[2..3]}/#{hash}"
-
expect(project.disk_path).to eq(hashed_path)
end
end
@@ -2759,7 +2779,7 @@ describe Project do
it 'delegates to gitlab_shell to ensure namespace is created' do
allow(project).to receive(:gitlab_shell).and_return(gitlab_shell)
- expect(gitlab_shell).to receive(:add_namespace).with(project.repository_storage_path, "@hashed/#{hash[0..1]}/#{hash[2..3]}")
+ expect(gitlab_shell).to receive(:add_namespace).with(project.repository_storage_path, hashed_prefix)
project.ensure_storage_path_exists
end
@@ -3009,18 +3029,40 @@ describe Project do
subject { project.auto_devops_variables }
- context 'when enabled in settings' do
+ context 'when enabled in instance settings' do
before do
stub_application_setting(auto_devops_enabled: true)
end
context 'when domain is empty' do
before do
+ stub_application_setting(auto_devops_domain: nil)
+ end
+
+ it 'variables does not include AUTO_DEVOPS_DOMAIN' do
+ is_expected.not_to include(domain_variable)
+ end
+ end
+
+ context 'when domain is configured' do
+ before do
+ stub_application_setting(auto_devops_domain: 'example.com')
+ end
+
+ it 'variables includes AUTO_DEVOPS_DOMAIN' do
+ is_expected.to include(domain_variable)
+ end
+ end
+ end
+
+ context 'when explicitely enabled' do
+ context 'when domain is empty' do
+ before do
create(:project_auto_devops, project: project, domain: nil)
end
- it 'variables are empty' do
- is_expected.to be_empty
+ it 'variables does not include AUTO_DEVOPS_DOMAIN' do
+ is_expected.not_to include(domain_variable)
end
end
@@ -3029,11 +3071,15 @@ describe Project do
create(:project_auto_devops, project: project, domain: 'example.com')
end
- it "variables are not empty" do
- is_expected.not_to be_empty
+ it 'variables includes AUTO_DEVOPS_DOMAIN' do
+ is_expected.to include(domain_variable)
end
end
end
+
+ def domain_variable
+ { key: 'AUTO_DEVOPS_DOMAIN', value: 'example.com', public: true }
+ end
end
describe '#latest_successful_builds_for' do
diff --git a/spec/models/repository_spec.rb b/spec/models/repository_spec.rb
index 02a5ee54262..0bc07dc7a85 100644
--- a/spec/models/repository_spec.rb
+++ b/spec/models/repository_spec.rb
@@ -262,6 +262,28 @@ describe Repository do
end
end
+ describe '#new_commits' do
+ let(:new_refs) do
+ double(:git_rev_list, new_refs: %w[
+ c1acaa58bbcbc3eafe538cb8274ba387047b69f8
+ 5937ac0a7beb003549fc5fd26fc247adbce4a52e
+ ])
+ end
+
+ it 'delegates to Gitlab::Git::RevList' do
+ expect(Gitlab::Git::RevList).to receive(:new).with(
+ repository.raw,
+ newrev: 'aaaabbbbccccddddeeeeffffgggghhhhiiiijjjj').and_return(new_refs)
+
+ commits = repository.new_commits('aaaabbbbccccddddeeeeffffgggghhhhiiiijjjj')
+
+ expect(commits).to eq([
+ repository.commit('c1acaa58bbcbc3eafe538cb8274ba387047b69f8'),
+ repository.commit('5937ac0a7beb003549fc5fd26fc247adbce4a52e')
+ ])
+ end
+ end
+
describe '#commits_by' do
set(:project) { create(:project, :repository) }
@@ -851,6 +873,18 @@ describe Repository do
expect(repository.license_key).to be_nil
end
+ it 'returns nil when the commit SHA does not exist' do
+ allow(repository.head_commit).to receive(:sha).and_return('1' * 40)
+
+ expect(repository.license_key).to be_nil
+ end
+
+ it 'returns nil when master does not exist' do
+ repository.rm_branch(user, 'master')
+
+ expect(repository.license_key).to be_nil
+ end
+
it 'returns the license key' do
repository.create_file(user, 'LICENSE',
Licensee::License.new('mit').content,
diff --git a/spec/models/user_spec.rb b/spec/models/user_spec.rb
index cb02d526a98..1815696a8a0 100644
--- a/spec/models/user_spec.rb
+++ b/spec/models/user_spec.rb
@@ -893,6 +893,14 @@ describe User do
end
end
+ describe '.find_for_database_authentication' do
+ it 'strips whitespace from login' do
+ user = create(:user)
+
+ expect(described_class.find_for_database_authentication({ login: " #{user.username} " })).to eq user
+ end
+ end
+
describe '.find_by_any_email' do
it 'finds by primary email' do
user = create(:user, email: 'foo@example.com')
@@ -1586,14 +1594,37 @@ describe User do
describe '#authorized_groups' do
let!(:user) { create(:user) }
let!(:private_group) { create(:group) }
+ let!(:child_group) { create(:group, parent: private_group) }
+
+ let!(:project_group) { create(:group) }
+ let!(:project) { create(:project, group: project_group) }
before do
private_group.add_user(user, Gitlab::Access::MASTER)
+ project.add_master(user)
end
subject { user.authorized_groups }
- it { is_expected.to eq([private_group]) }
+ it { is_expected.to contain_exactly private_group, project_group }
+ end
+
+ describe '#membership_groups' do
+ let!(:user) { create(:user) }
+ let!(:parent_group) { create(:group) }
+ let!(:child_group) { create(:group, parent: parent_group) }
+
+ before do
+ parent_group.add_user(user, Gitlab::Access::MASTER)
+ end
+
+ subject { user.membership_groups }
+
+ if Group.supports_nested_groups?
+ it { is_expected.to contain_exactly parent_group, child_group }
+ else
+ it { is_expected.to contain_exactly parent_group }
+ end
end
describe '#authorized_projects', :delete do
diff --git a/spec/models/wiki_page_spec.rb b/spec/models/wiki_page_spec.rb
index d53ba497ed1..b2b7721674c 100644
--- a/spec/models/wiki_page_spec.rb
+++ b/spec/models/wiki_page_spec.rb
@@ -188,162 +188,181 @@ describe WikiPage do
end
end
- describe '#create', :skip_gitaly_mock do
- context 'with valid attributes' do
- it 'raises an error if a page with the same path already exists' do
- create_page('New Page', 'content')
- create_page('foo/bar', 'content')
- expect { create_page('New Page', 'other content') }.to raise_error Gitlab::Git::Wiki::DuplicatePageError
- expect { create_page('foo/bar', 'other content') }.to raise_error Gitlab::Git::Wiki::DuplicatePageError
-
- destroy_page('New Page')
- destroy_page('bar', 'foo')
- end
+ describe '#create' do
+ shared_examples 'create method' do
+ context 'with valid attributes' do
+ it 'raises an error if a page with the same path already exists' do
+ create_page('New Page', 'content')
+ create_page('foo/bar', 'content')
+ expect { create_page('New Page', 'other content') }.to raise_error Gitlab::Git::Wiki::DuplicatePageError
+ expect { create_page('foo/bar', 'other content') }.to raise_error Gitlab::Git::Wiki::DuplicatePageError
+
+ destroy_page('New Page')
+ destroy_page('bar', 'foo')
+ end
- it 'if the title is preceded by a / it is removed' do
- create_page('/New Page', 'content')
+ it 'if the title is preceded by a / it is removed' do
+ create_page('/New Page', 'content')
- expect(wiki.find_page('New Page')).not_to be_nil
+ expect(wiki.find_page('New Page')).not_to be_nil
- destroy_page('New Page')
+ destroy_page('New Page')
+ end
end
end
- end
- # Remove skip_gitaly_mock flag when gitaly_update_page implements moving pages
- describe "#update", :skip_gitaly_mock do
- before do
- create_page("Update", "content")
- @page = wiki.find_page("Update")
+ context 'when Gitaly is enabled' do
+ it_behaves_like 'create method'
end
- after do
- destroy_page(@page.title, @page.directory)
+ context 'when Gitaly is disabled', :skip_gitaly_mock do
+ it_behaves_like 'create method'
end
+ end
- context "with valid attributes" do
- it "updates the content of the page" do
- new_content = "new content"
-
- @page.update(content: new_content)
+ describe "#update" do
+ shared_examples 'update method' do
+ before do
+ create_page("Update", "content")
@page = wiki.find_page("Update")
+ end
- expect(@page.content).to eq("new content")
+ after do
+ destroy_page(@page.title, @page.directory)
end
- it "updates the title of the page" do
- new_title = "Index v.1.2.4"
+ context "with valid attributes" do
+ it "updates the content of the page" do
+ new_content = "new content"
- @page.update(title: new_title)
- @page = wiki.find_page(new_title)
+ @page.update(content: new_content)
+ @page = wiki.find_page("Update")
- expect(@page.title).to eq(new_title)
- end
+ expect(@page.content).to eq("new content")
+ end
- it "returns true" do
- expect(@page.update(content: "more content")).to be_truthy
+ it "updates the title of the page" do
+ new_title = "Index v.1.2.4"
+
+ @page.update(title: new_title)
+ @page = wiki.find_page(new_title)
+
+ expect(@page.title).to eq(new_title)
+ end
+
+ it "returns true" do
+ expect(@page.update(content: "more content")).to be_truthy
+ end
end
- end
- context 'with same last commit sha' do
- it 'returns true' do
- expect(@page.update(content: 'more content', last_commit_sha: @page.last_commit_sha)).to be_truthy
+ context 'with same last commit sha' do
+ it 'returns true' do
+ expect(@page.update(content: 'more content', last_commit_sha: @page.last_commit_sha)).to be_truthy
+ end
end
- end
- context 'with different last commit sha' do
- it 'raises exception' do
- expect { @page.update(content: 'more content', last_commit_sha: 'xxx') }.to raise_error(WikiPage::PageChangedError)
+ context 'with different last commit sha' do
+ it 'raises exception' do
+ expect { @page.update(content: 'more content', last_commit_sha: 'xxx') }.to raise_error(WikiPage::PageChangedError)
+ end
end
- end
- context 'when renaming a page' do
- it 'raises an error if the page already exists' do
- create_page('Existing Page', 'content')
+ context 'when renaming a page' do
+ it 'raises an error if the page already exists' do
+ create_page('Existing Page', 'content')
- expect { @page.update(title: 'Existing Page', content: 'new_content') }.to raise_error(WikiPage::PageRenameError)
- expect(@page.title).to eq 'Update'
- expect(@page.content).to eq 'new_content'
+ expect { @page.update(title: 'Existing Page', content: 'new_content') }.to raise_error(WikiPage::PageRenameError)
+ expect(@page.title).to eq 'Update'
+ expect(@page.content).to eq 'new_content'
- destroy_page('Existing Page')
- end
+ destroy_page('Existing Page')
+ end
- it 'updates the content and rename the file' do
- new_title = 'Renamed Page'
- new_content = 'updated content'
+ it 'updates the content and rename the file' do
+ new_title = 'Renamed Page'
+ new_content = 'updated content'
- expect(@page.update(title: new_title, content: new_content)).to be_truthy
+ expect(@page.update(title: new_title, content: new_content)).to be_truthy
- @page = wiki.find_page(new_title)
+ @page = wiki.find_page(new_title)
- expect(@page).not_to be_nil
- expect(@page.content).to eq new_content
+ expect(@page).not_to be_nil
+ expect(@page.content).to eq new_content
+ end
end
- end
-
- context 'when moving a page' do
- it 'raises an error if the page already exists' do
- create_page('foo/Existing Page', 'content')
- expect { @page.update(title: 'foo/Existing Page', content: 'new_content') }.to raise_error(WikiPage::PageRenameError)
- expect(@page.title).to eq 'Update'
- expect(@page.content).to eq 'new_content'
+ context 'when moving a page' do
+ it 'raises an error if the page already exists' do
+ create_page('foo/Existing Page', 'content')
- destroy_page('Existing Page', 'foo')
- end
+ expect { @page.update(title: 'foo/Existing Page', content: 'new_content') }.to raise_error(WikiPage::PageRenameError)
+ expect(@page.title).to eq 'Update'
+ expect(@page.content).to eq 'new_content'
- it 'updates the content and moves the file' do
- new_title = 'foo/Other Page'
- new_content = 'new_content'
+ destroy_page('Existing Page', 'foo')
+ end
- expect(@page.update(title: new_title, content: new_content)).to be_truthy
+ it 'updates the content and moves the file' do
+ new_title = 'foo/Other Page'
+ new_content = 'new_content'
- page = wiki.find_page(new_title)
+ expect(@page.update(title: new_title, content: new_content)).to be_truthy
- expect(page).not_to be_nil
- expect(page.content).to eq new_content
- end
+ page = wiki.find_page(new_title)
- context 'in subdir' do
- before do
- create_page('foo/Existing Page', 'content')
- @page = wiki.find_page('foo/Existing Page')
+ expect(page).not_to be_nil
+ expect(page.content).to eq new_content
end
- it 'moves the page to the root folder if the title is preceded by /' do
- expect(@page.slug).to eq 'foo/Existing-Page'
- expect(@page.update(title: '/Existing Page', content: 'new_content')).to be_truthy
- expect(@page.slug).to eq 'Existing-Page'
+ context 'in subdir' do
+ before do
+ create_page('foo/Existing Page', 'content')
+ @page = wiki.find_page('foo/Existing Page')
+ end
+
+ it 'moves the page to the root folder if the title is preceded by /', :skip_gitaly_mock do
+ expect(@page.slug).to eq 'foo/Existing-Page'
+ expect(@page.update(title: '/Existing Page', content: 'new_content')).to be_truthy
+ expect(@page.slug).to eq 'Existing-Page'
+ end
+
+ it 'does nothing if it has the same title' do
+ original_path = @page.slug
+
+ expect(@page.update(title: 'Existing Page', content: 'new_content')).to be_truthy
+ expect(@page.slug).to eq original_path
+ end
end
- it 'does nothing if it has the same title' do
- original_path = @page.slug
+ context 'in root dir' do
+ it 'does nothing if the title is preceded by /' do
+ original_path = @page.slug
- expect(@page.update(title: 'Existing Page', content: 'new_content')).to be_truthy
- expect(@page.slug).to eq original_path
+ expect(@page.update(title: '/Update', content: 'new_content')).to be_truthy
+ expect(@page.slug).to eq original_path
+ end
end
end
- context 'in root dir' do
- it 'does nothing if the title is preceded by /' do
- original_path = @page.slug
+ context "with invalid attributes" do
+ it 'aborts update if title blank' do
+ expect(@page.update(title: '', content: 'new_content')).to be_falsey
+ expect(@page.content).to eq 'new_content'
- expect(@page.update(title: '/Update', content: 'new_content')).to be_truthy
- expect(@page.slug).to eq original_path
+ page = wiki.find_page('Update')
+ expect(page.content).to eq 'content'
+
+ @page.title = 'Update'
end
end
end
- context "with invalid attributes" do
- it 'aborts update if title blank' do
- expect(@page.update(title: '', content: 'new_content')).to be_falsey
- expect(@page.content).to eq 'new_content'
-
- page = wiki.find_page('Update')
- expect(page.content).to eq 'content'
+ context 'when Gitaly is enabled' do
+ it_behaves_like 'update method'
+ end
- @page.title = 'Update'
- end
+ context 'when Gitaly is disabled', :skip_gitaly_mock do
+ it_behaves_like 'update method'
end
end