diff options
| author | Kamil Trzciński <ayufan@ayufan.eu> | 2018-02-28 20:36:55 +0100 |
|---|---|---|
| committer | Kamil Trzciński <ayufan@ayufan.eu> | 2018-02-28 20:36:55 +0100 |
| commit | 965dc28691e2d70b7040e28d90ccbc3721a9e416 (patch) | |
| tree | 84258f35b72f2e7ce6a7198db66032df4ad5aadb /spec/models | |
| parent | e3fafa7632e038927085cf8c8228c93be44b36bd (diff) | |
| parent | 7fabc892f251740dbd9a4755baede662e6854870 (diff) | |
| download | gitlab-ce-965dc28691e2d70b7040e28d90ccbc3721a9e416.tar.gz | |
Merge commit '7fabc892f251740dbd9a4755baede662e6854870' into object-storage-ee-to-ce-backport
Diffstat (limited to 'spec/models')
40 files changed, 1914 insertions, 562 deletions
diff --git a/spec/models/appearance_spec.rb b/spec/models/appearance_spec.rb index 49f44525b29..56b5d616284 100644 --- a/spec/models/appearance_spec.rb +++ b/spec/models/appearance_spec.rb @@ -5,9 +5,6 @@ describe Appearance do it { is_expected.to be_valid } - it { is_expected.to validate_presence_of(:title) } - it { is_expected.to validate_presence_of(:description) } - it { is_expected.to have_many(:uploads).dependent(:destroy) } describe '.current', :use_clean_rails_memory_store_caching do diff --git a/spec/models/application_setting_spec.rb b/spec/models/application_setting_spec.rb index 47b7150d36f..ef480e7a80a 100644 --- a/spec/models/application_setting_spec.rb +++ b/spec/models/application_setting_spec.rb @@ -115,9 +115,8 @@ describe ApplicationSetting do end context 'circuitbreaker settings' do - [:circuitbreaker_backoff_threshold, - :circuitbreaker_failure_count_threshold, - :circuitbreaker_failure_wait_time, + [:circuitbreaker_failure_count_threshold, + :circuitbreaker_check_interval, :circuitbreaker_failure_reset_time, :circuitbreaker_storage_timeout].each do |field| it "Validates #{field} as number" do @@ -126,16 +125,6 @@ describe ApplicationSetting do .is_greater_than_or_equal_to(0) end end - - it 'requires the `backoff_threshold` to be lower than the `failure_count_threshold`' do - setting.circuitbreaker_failure_count_threshold = 10 - setting.circuitbreaker_backoff_threshold = 15 - failure_message = "The circuitbreaker backoff threshold should be lower "\ - "than the failure count threshold" - - expect(setting).not_to be_valid - expect(setting.errors[:circuitbreaker_backoff_threshold]).to include(failure_message) - end end context 'repository storages' do @@ -219,6 +208,65 @@ describe ApplicationSetting do expect(subject).to be_valid end end + + context 'gitaly timeouts' do + [:gitaly_timeout_default, :gitaly_timeout_medium, :gitaly_timeout_fast].each do |timeout_name| + it do + is_expected.to validate_presence_of(timeout_name) + is_expected.to validate_numericality_of(timeout_name).only_integer + .is_greater_than_or_equal_to(0) + end + end + + [:gitaly_timeout_medium, :gitaly_timeout_fast].each do |timeout_name| + it "validates that #{timeout_name} is lower than timeout_default" do + subject[:gitaly_timeout_default] = 50 + subject[timeout_name] = 100 + + expect(subject).to be_invalid + end + end + + it 'accepts all timeouts equal' do + subject.gitaly_timeout_default = 0 + subject.gitaly_timeout_medium = 0 + subject.gitaly_timeout_fast = 0 + + expect(subject).to be_valid + end + + it 'accepts timeouts in descending order' do + subject.gitaly_timeout_default = 50 + subject.gitaly_timeout_medium = 30 + subject.gitaly_timeout_fast = 20 + + expect(subject).to be_valid + end + + it 'rejects timeouts in ascending order' do + subject.gitaly_timeout_default = 20 + subject.gitaly_timeout_medium = 30 + subject.gitaly_timeout_fast = 50 + + expect(subject).to be_invalid + end + + it 'rejects medium timeout larger than default' do + subject.gitaly_timeout_default = 30 + subject.gitaly_timeout_medium = 50 + subject.gitaly_timeout_fast = 20 + + expect(subject).to be_invalid + end + + it 'rejects medium timeout smaller than fast' do + subject.gitaly_timeout_default = 30 + subject.gitaly_timeout_medium = 15 + subject.gitaly_timeout_fast = 20 + + expect(subject).to be_invalid + end + end end describe '.current' do @@ -564,4 +612,22 @@ describe ApplicationSetting do expect(setting.key_restriction_for(:foo)).to eq(described_class::FORBIDDEN_KEY_VALUE) end end + + describe '#allow_signup?' do + it 'returns true' do + expect(setting.allow_signup?).to be_truthy + end + + it 'returns false if signup is disabled' do + allow(setting).to receive(:signup_enabled?).and_return(false) + + expect(setting.allow_signup?).to be_falsey + end + + it 'returns false if password authentication is disabled for the web interface' do + allow(setting).to receive(:password_authentication_enabled_for_web?).and_return(false) + + expect(setting.allow_signup?).to be_falsey + end + end end diff --git a/spec/models/blob_spec.rb b/spec/models/blob_spec.rb index 47342f98283..81e35e6c931 100644 --- a/spec/models/blob_spec.rb +++ b/spec/models/blob_spec.rb @@ -16,6 +16,23 @@ describe Blob do end end + describe '.lazy' do + let(:project) { create(:project, :repository) } + let(:commit) { project.commit_by(oid: 'e63f41fe459e62e1228fcef60d7189127aeba95a') } + + it 'fetches all blobs when the first is accessed' do + changelog = described_class.lazy(project, commit.id, 'CHANGELOG') + contributing = described_class.lazy(project, commit.id, 'CONTRIBUTING.md') + + expect(Gitlab::Git::Blob).to receive(:batch).once.and_call_original + expect(Gitlab::Git::Blob).not_to receive(:find) + + # Access property so the values are loaded + changelog.id + contributing.id + end + end + describe '#data' do context 'using a binary blob' do it 'returns the data as-is' do diff --git a/spec/models/ci/build_spec.rb b/spec/models/ci/build_spec.rb index 99a669464e0..1a20c2dda00 100644 --- a/spec/models/ci/build_spec.rb +++ b/spec/models/ci/build_spec.rb @@ -23,6 +23,8 @@ describe Ci::Build do it { is_expected.to respond_to(:has_trace?) } it { is_expected.to respond_to(:trace) } + it { is_expected.to be_a(ArtifactMigratable) } + describe 'callbacks' do context 'when running after_create callback' do it 'triggers asynchronous build hooks worker' do @@ -130,34 +132,55 @@ describe Ci::Build do end describe '#artifacts?' do - subject { build.artifacts? } + context 'when new artifacts are used' do + let(:build) { create(:ci_build, :artifacts) } - context 'artifacts archive does not exist' do - before do - build.update_attributes(artifacts_file: nil) + subject { build.artifacts? } + + context 'artifacts archive does not exist' do + let(:build) { create(:ci_build) } + + it { is_expected.to be_falsy } end - it { is_expected.to be_falsy } - end + context 'artifacts archive exists' do + it { is_expected.to be_truthy } - context 'artifacts archive exists' do - let(:build) { create(:ci_build, :artifacts) } - it { is_expected.to be_truthy } + context 'is expired' do + let!(:build) { create(:ci_build, :artifacts, :expired) } - context 'is expired' do - before do - build.update(artifacts_expire_at: Time.now - 7.days) + it { is_expected.to be_falsy } end + context 'is not expired' do + it { is_expected.to be_truthy } + end + end + end + + context 'when legacy artifacts are used' do + let(:build) { create(:ci_build, :legacy_artifacts) } + + subject { build.artifacts? } + + context 'artifacts archive does not exist' do + let(:build) { create(:ci_build) } + it { is_expected.to be_falsy } end - context 'is not expired' do - before do - build.update(artifacts_expire_at: Time.now + 7.days) + context 'artifacts archive exists' do + it { is_expected.to be_truthy } + + context 'is expired' do + let!(:build) { create(:ci_build, :legacy_artifacts, :expired) } + + it { is_expected.to be_falsy } end - it { is_expected.to be_truthy } + context 'is not expired' do + it { is_expected.to be_truthy } + end end end end @@ -314,6 +337,23 @@ describe Ci::Build do end end + describe '#triggered_by?' do + subject { build.triggered_by?(user) } + + context 'when user is owner' do + let(:build) { create(:ci_build, pipeline: pipeline, user: user) } + + it { is_expected.to be_truthy } + end + + context 'when user is not owner' do + let(:another_user) { create(:user) } + let(:build) { create(:ci_build, pipeline: pipeline, user: another_user) } + + it { is_expected.to be_falsy } + end + end + describe '#detailed_status' do it 'returns a detailed status' do expect(build.detailed_status(user)) @@ -639,71 +679,144 @@ describe Ci::Build do describe '#erasable?' do subject { build.erasable? } + it { is_expected.to eq false } end end context 'build is erasable' do - let!(:build) { create(:ci_build, :trace, :success, :artifacts) } + context 'new artifacts' do + let!(:build) { create(:ci_build, :trace, :success, :artifacts) } - describe '#erase' do - before do - build.erase(erased_by: user) - end + describe '#erase' do + before do + build.erase(erased_by: user) + end - context 'erased by user' do - let!(:user) { create(:user, username: 'eraser') } + context 'erased by user' do + let!(:user) { create(:user, username: 'eraser') } - include_examples 'erasable' + include_examples 'erasable' - it 'records user who erased a build' do - expect(build.erased_by).to eq user + it 'records user who erased a build' do + expect(build.erased_by).to eq user + end end - end - context 'erased by system' do - let(:user) { nil } + context 'erased by system' do + let(:user) { nil } - include_examples 'erasable' + include_examples 'erasable' - it 'does not set user who erased a build' do - expect(build.erased_by).to be_nil + it 'does not set user who erased a build' do + expect(build.erased_by).to be_nil + end end end - end - describe '#erasable?' do - subject { build.erasable? } - it { is_expected.to be_truthy } - end + describe '#erasable?' do + subject { build.erasable? } + it { is_expected.to be_truthy } + end - describe '#erased?' do - let!(:build) { create(:ci_build, :trace, :success, :artifacts) } - subject { build.erased? } + describe '#erased?' do + let!(:build) { create(:ci_build, :trace, :success, :artifacts) } + subject { build.erased? } - context 'job has not been erased' do - it { is_expected.to be_falsey } + context 'job has not been erased' do + it { is_expected.to be_falsey } + end + + context 'job has been erased' do + before do + build.erase + end + + it { is_expected.to be_truthy } + end end - context 'job has been erased' do + context 'metadata and build trace are not available' do + let!(:build) { create(:ci_build, :success, :artifacts) } + before do - build.erase + build.remove_artifacts_metadata! end - it { is_expected.to be_truthy } + describe '#erase' do + it 'does not raise error' do + expect { build.erase }.not_to raise_error + end + end end end + end - context 'metadata and build trace are not available' do - let!(:build) { create(:ci_build, :success, :artifacts) } + context 'old artifacts' do + context 'build is erasable' do + context 'new artifacts' do + let!(:build) { create(:ci_build, :trace, :success, :legacy_artifacts) } - before do - build.remove_artifacts_metadata! - end + describe '#erase' do + before do + build.erase(erased_by: user) + end - describe '#erase' do - it 'does not raise error' do - expect { build.erase }.not_to raise_error + context 'erased by user' do + let!(:user) { create(:user, username: 'eraser') } + + include_examples 'erasable' + + it 'records user who erased a build' do + expect(build.erased_by).to eq user + end + end + + context 'erased by system' do + let(:user) { nil } + + include_examples 'erasable' + + it 'does not set user who erased a build' do + expect(build.erased_by).to be_nil + end + end + end + + describe '#erasable?' do + subject { build.erasable? } + it { is_expected.to be_truthy } + end + + describe '#erased?' do + let!(:build) { create(:ci_build, :trace, :success, :legacy_artifacts) } + subject { build.erased? } + + context 'job has not been erased' do + it { is_expected.to be_falsey } + end + + context 'job has been erased' do + before do + build.erase + end + + it { is_expected.to be_truthy } + end + end + + context 'metadata and build trace are not available' do + let!(:build) { create(:ci_build, :success, :legacy_artifacts) } + + before do + build.remove_artifacts_metadata! + end + + describe '#erase' do + it 'does not raise error' do + expect { build.erase }.not_to raise_error + end + end end end end @@ -939,11 +1052,23 @@ describe Ci::Build do describe '#keep_artifacts!' do let(:build) { create(:ci_build, artifacts_expire_at: Time.now + 7.days) } + subject { build.keep_artifacts! } + it 'to reset expire_at' do - build.keep_artifacts! + subject expect(build.artifacts_expire_at).to be_nil end + + context 'when having artifacts files' do + let!(:artifact) { create(:ci_job_artifact, job: build, expire_in: '7 days') } + + it 'to reset dependent objects' do + subject + + expect(artifact.reload.expire_at).to be_nil + end + end end describe '#merge_request' do @@ -1268,10 +1393,10 @@ describe Ci::Build do context 'when config does not have a questioned job' do let(:config) do YAML.dump({ - test_other: { - script: 'Hello World' - } - }) + test_other: { + script: 'Hello World' + } + }) end it { is_expected.to eq('on_success') } @@ -1280,11 +1405,11 @@ describe Ci::Build do context 'when config has `when`' do let(:config) do YAML.dump({ - test: { - script: 'Hello World', - when: 'always' - } - }) + test: { + script: 'Hello World', + when: 'always' + } + }) end it { is_expected.to eq('always') } @@ -1365,10 +1490,10 @@ describe Ci::Build do let!(:environment) do create(:environment, - project: build.project, - name: 'production', - slug: 'prod-slug', - external_url: '') + project: build.project, + name: 'production', + slug: 'prod-slug', + external_url: '') end before do @@ -1592,8 +1717,8 @@ describe Ci::Build do let!(:pipeline_schedule_variable) do create(:ci_pipeline_schedule_variable, - key: 'SCHEDULE_VARIABLE_KEY', - pipeline_schedule: pipeline_schedule) + key: 'SCHEDULE_VARIABLE_KEY', + pipeline_schedule: pipeline_schedule) end before do @@ -1735,8 +1860,8 @@ describe Ci::Build do allow_any_instance_of(Project) .to receive(:secret_variables_for) .with(ref: 'master', environment: nil) do - [create(:ci_variable, key: 'secret', value: 'value')] - end + [create(:ci_variable, key: 'secret', value: 'value')] + end allow_any_instance_of(Ci::Pipeline) .to receive(:predefined_variables) { [pipeline_pre_var] } @@ -1787,6 +1912,94 @@ describe Ci::Build do end end + describe 'state transition: any => [:running]' do + shared_examples 'validation is active' do + context 'when depended job has not been completed yet' do + let!(:pre_stage_job) { create(:ci_build, :running, pipeline: pipeline, name: 'test', stage_idx: 0) } + + it { expect { job.run! }.to raise_error(Ci::Build::MissingDependenciesError) } + end + + context 'when artifacts of depended job has been expired' do + let!(:pre_stage_job) { create(:ci_build, :success, :expired, pipeline: pipeline, name: 'test', stage_idx: 0) } + + it { expect { job.run! }.to raise_error(Ci::Build::MissingDependenciesError) } + end + + context 'when artifacts of depended job has been erased' do + let!(:pre_stage_job) { create(:ci_build, :success, pipeline: pipeline, name: 'test', stage_idx: 0, erased_at: 1.minute.ago) } + + before do + pre_stage_job.erase + end + + it { expect { job.run! }.to raise_error(Ci::Build::MissingDependenciesError) } + end + end + + shared_examples 'validation is not active' do + context 'when depended job has not been completed yet' do + let!(:pre_stage_job) { create(:ci_build, :running, pipeline: pipeline, name: 'test', stage_idx: 0) } + + it { expect { job.run! }.not_to raise_error } + end + + context 'when artifacts of depended job has been expired' do + let!(:pre_stage_job) { create(:ci_build, :success, :expired, pipeline: pipeline, name: 'test', stage_idx: 0) } + + it { expect { job.run! }.not_to raise_error } + end + + context 'when artifacts of depended job has been erased' do + let!(:pre_stage_job) { create(:ci_build, :success, pipeline: pipeline, name: 'test', stage_idx: 0, erased_at: 1.minute.ago) } + + before do + pre_stage_job.erase + end + + it { expect { job.run! }.not_to raise_error } + end + end + + let!(:job) { create(:ci_build, :pending, pipeline: pipeline, stage_idx: 1, options: options) } + + context 'when validates for dependencies is enabled' do + before do + stub_feature_flags(ci_disable_validates_dependencies: false) + end + + let!(:pre_stage_job) { create(:ci_build, :success, pipeline: pipeline, name: 'test', stage_idx: 0) } + + context 'when "dependencies" keyword is not defined' do + let(:options) { {} } + + it { expect { job.run! }.not_to raise_error } + end + + context 'when "dependencies" keyword is empty' do + let(:options) { { dependencies: [] } } + + it { expect { job.run! }.not_to raise_error } + end + + context 'when "dependencies" keyword is specified' do + let(:options) { { dependencies: ['test'] } } + + it_behaves_like 'validation is active' + end + end + + context 'when validates for dependencies is disabled' do + let(:options) { { dependencies: ['test'] } } + + before do + stub_feature_flags(ci_disable_validates_dependencies: true) + end + + it_behaves_like 'validation is not active' + end + end + describe 'state transition when build fails' do let(:service) { MergeRequests::AddTodoWhenBuildFailsService.new(project, user) } @@ -1840,4 +2053,77 @@ describe Ci::Build do end end end + + describe '.matches_tag_ids' do + set(:build) { create(:ci_build, project: project, user: user) } + let(:tag_ids) { ::ActsAsTaggableOn::Tag.named_any(tag_list).ids } + + subject { described_class.where(id: build).matches_tag_ids(tag_ids) } + + before do + build.update(tag_list: build_tag_list) + end + + context 'when have different tags' do + let(:build_tag_list) { %w(A B) } + let(:tag_list) { %w(C D) } + + it "does not match a build" do + is_expected.not_to contain_exactly(build) + end + end + + context 'when have a subset of tags' do + let(:build_tag_list) { %w(A B) } + let(:tag_list) { %w(A B C D) } + + it "does match a build" do + is_expected.to contain_exactly(build) + end + end + + context 'when build does not have tags' do + let(:build_tag_list) { [] } + let(:tag_list) { %w(C D) } + + it "does match a build" do + is_expected.to contain_exactly(build) + end + end + + context 'when does not have a subset of tags' do + let(:build_tag_list) { %w(A B C) } + let(:tag_list) { %w(C D) } + + it "does not match a build" do + is_expected.not_to contain_exactly(build) + end + end + end + + describe '.matches_tags' do + set(:build) { create(:ci_build, project: project, user: user) } + + subject { described_class.where(id: build).with_any_tags } + + before do + build.update(tag_list: tag_list) + end + + context 'when does have tags' do + let(:tag_list) { %w(A B) } + + it "does match a build" do + is_expected.to contain_exactly(build) + end + end + + context 'when does not have tags' do + let(:tag_list) { [] } + + it "does not match a build" do + is_expected.not_to contain_exactly(build) + end + end + end end diff --git a/spec/models/ci/job_artifact_spec.rb b/spec/models/ci/job_artifact_spec.rb new file mode 100644 index 00000000000..0e18a326c68 --- /dev/null +++ b/spec/models/ci/job_artifact_spec.rb @@ -0,0 +1,74 @@ +require 'spec_helper' + +describe Ci::JobArtifact do + set(:artifact) { create(:ci_job_artifact, :archive) } + + describe "Associations" do + it { is_expected.to belong_to(:project) } + it { is_expected.to belong_to(:job) } + end + + it { is_expected.to respond_to(:file) } + it { is_expected.to respond_to(:created_at) } + it { is_expected.to respond_to(:updated_at) } + + describe '#set_size' do + it 'sets the size' do + expect(artifact.size).to eq(106365) + end + end + + describe '#file' do + subject { artifact.file } + + context 'the uploader api' do + it { is_expected.to respond_to(:store_dir) } + it { is_expected.to respond_to(:cache_dir) } + it { is_expected.to respond_to(:work_dir) } + end + end + + describe '#expire_in' do + subject { artifact.expire_in } + + it { is_expected.to be_nil } + + context 'when expire_at is specified' do + let(:expire_at) { Time.now + 7.days } + + before do + artifact.expire_at = expire_at + end + + it { is_expected.to be_within(5).of(expire_at - Time.now) } + end + end + + describe '#expire_in=' do + subject { artifact.expire_in } + + it 'when assigning valid duration' do + artifact.expire_in = '7 days' + + is_expected.to be_within(10).of(7.days.to_i) + end + + it 'when assigning invalid duration' do + expect { artifact.expire_in = '7 elephants' }.to raise_error(ChronicDuration::DurationParseError) + + is_expected.to be_nil + end + + it 'when resetting value' do + artifact.expire_in = nil + + is_expected.to be_nil + end + + it 'when setting to 0' do + artifact.expire_in = '0' + + is_expected.to be_nil + end + end +end diff --git a/spec/models/ci/pipeline_spec.rb b/spec/models/ci/pipeline_spec.rb index 2c9e7013b77..bb89e093890 100644 --- a/spec/models/ci/pipeline_spec.rb +++ b/spec/models/ci/pipeline_spec.rb @@ -557,10 +557,23 @@ describe Ci::Pipeline, :mailer do describe '#has_kubernetes_active?' do context 'when kubernetes is active' do - let(:project) { create(:kubernetes_project) } + shared_examples 'same behavior between KubernetesService and Platform::Kubernetes' do + it 'returns true' do + expect(pipeline).to have_kubernetes_active + end + end - it 'returns true' do - expect(pipeline).to have_kubernetes_active + context 'when user configured kubernetes from Integration > Kubernetes' do + let(:project) { create(:kubernetes_project) } + + it_behaves_like 'same behavior between KubernetesService and Platform::Kubernetes' + end + + context 'when user configured kubernetes from CI/CD > Clusters' do + let!(:cluster) { create(:cluster, :project, :provided_by_gcp) } + let(:project) { cluster.project } + + it_behaves_like 'same behavior between KubernetesService and Platform::Kubernetes' end end @@ -625,38 +638,29 @@ describe Ci::Pipeline, :mailer do shared_context 'with some outdated pipelines' do before do - create_pipeline(:canceled, 'ref', 'A') - create_pipeline(:success, 'ref', 'A') - create_pipeline(:failed, 'ref', 'B') - create_pipeline(:skipped, 'feature', 'C') + create_pipeline(:canceled, 'ref', 'A', project) + create_pipeline(:success, 'ref', 'A', project) + create_pipeline(:failed, 'ref', 'B', project) + create_pipeline(:skipped, 'feature', 'C', project) end - def create_pipeline(status, ref, sha) - create(:ci_empty_pipeline, status: status, ref: ref, sha: sha) + def create_pipeline(status, ref, sha, project) + create( + :ci_empty_pipeline, + status: status, + ref: ref, + sha: sha, + project: project + ) end end - describe '.latest' do + describe '.newest_first' do include_context 'with some outdated pipelines' - context 'when no ref is specified' do - let(:pipelines) { described_class.latest.all } - - it 'returns the latest pipeline for the same ref and different sha' do - expect(pipelines.map(&:sha)).to contain_exactly('A', 'B', 'C') - expect(pipelines.map(&:status)) - .to contain_exactly('success', 'failed', 'skipped') - end - end - - context 'when ref is specified' do - let(:pipelines) { described_class.latest('ref').all } - - it 'returns the latest pipeline for ref and different sha' do - expect(pipelines.map(&:sha)).to contain_exactly('A', 'B') - expect(pipelines.map(&:status)) - .to contain_exactly('success', 'failed') - end + it 'returns the pipelines from new to old' do + expect(described_class.newest_first.pluck(:status)) + .to eq(%w[skipped failed success canceled]) end end @@ -664,20 +668,14 @@ describe Ci::Pipeline, :mailer do include_context 'with some outdated pipelines' context 'when no ref is specified' do - let(:latest_status) { described_class.latest_status } - - it 'returns the latest status for the same ref and different sha' do - expect(latest_status).to eq(described_class.latest.status) - expect(latest_status).to eq('failed') + it 'returns the status of the latest pipeline' do + expect(described_class.latest_status).to eq('skipped') end end context 'when ref is specified' do - let(:latest_status) { described_class.latest_status('ref') } - - it 'returns the latest status for ref and different sha' do - expect(latest_status).to eq(described_class.latest_status('ref')) - expect(latest_status).to eq('failed') + it 'returns the status of the latest pipeline for the given ref' do + expect(described_class.latest_status('ref')).to eq('failed') end end end @@ -686,7 +684,7 @@ describe Ci::Pipeline, :mailer do include_context 'with some outdated pipelines' let!(:latest_successful_pipeline) do - create_pipeline(:success, 'ref', 'D') + create_pipeline(:success, 'ref', 'D', project) end it 'returns the latest successful pipeline' do @@ -698,8 +696,13 @@ describe Ci::Pipeline, :mailer do describe '.latest_successful_for_refs' do include_context 'with some outdated pipelines' - let!(:latest_successful_pipeline1) { create_pipeline(:success, 'ref1', 'D') } - let!(:latest_successful_pipeline2) { create_pipeline(:success, 'ref2', 'D') } + let!(:latest_successful_pipeline1) do + create_pipeline(:success, 'ref1', 'D', project) + end + + let!(:latest_successful_pipeline2) do + create_pipeline(:success, 'ref2', 'D', project) + end it 'returns the latest successful pipeline for both refs' do refs = %w(ref1 ref2 ref3) @@ -708,6 +711,62 @@ describe Ci::Pipeline, :mailer do end end + describe '.latest_status_per_commit' do + let(:project) { create(:project) } + + before do + pairs = [ + %w[success ref1 123], + %w[manual master 123], + %w[failed ref 456] + ] + + pairs.each do |(status, ref, sha)| + create( + :ci_empty_pipeline, + status: status, + ref: ref, + sha: sha, + project: project + ) + end + end + + context 'without a ref' do + it 'returns a Hash containing the latest status per commit for all refs' do + expect(described_class.latest_status_per_commit(%w[123 456])) + .to eq({ '123' => 'manual', '456' => 'failed' }) + end + + it 'only includes the status of the given commit SHAs' do + expect(described_class.latest_status_per_commit(%w[123])) + .to eq({ '123' => 'manual' }) + end + + context 'when there are two pipelines for a ref and SHA' do + it 'returns the status of the latest pipeline' do + create( + :ci_empty_pipeline, + status: 'failed', + ref: 'master', + sha: '123', + project: project + ) + + expect(described_class.latest_status_per_commit(%w[123])) + .to eq({ '123' => 'failed' }) + end + end + end + + context 'with a ref' do + it 'only includes the pipelines for the given ref' do + expect(described_class.latest_status_per_commit(%w[123 456], 'master')) + .to eq({ '123' => 'manual' }) + end + end + end + describe '.internal_sources' do subject { described_class.internal_sources } @@ -809,62 +868,59 @@ describe Ci::Pipeline, :mailer do end describe '#set_config_source' do - context 'on object initialisation' do - context 'when pipelines does not contain needed data' do - let(:pipeline) do - Ci::Pipeline.new - end + context 'when pipelines does not contain needed data' do + it 'defines source to be unknown' do + pipeline.set_config_source - it 'defines source to be unknown' do - expect(pipeline).to be_unknown_source - end + expect(pipeline).to be_unknown_source + end + end + + context 'when pipeline contains all needed data' do + let(:pipeline) do + create(:ci_pipeline, project: project, + sha: '1234', + ref: 'master', + source: :push) end - context 'when pipeline contains all needed data' do - let(:pipeline) do - Ci::Pipeline.new( - project: project, - sha: '1234', - ref: 'master', - source: :push) + context 'when the repository has a config file' do + before do + allow(project.repository).to receive(:gitlab_ci_yml_for) + .and_return('config') end - context 'when the repository has a config file' do - before do - allow(project.repository).to receive(:gitlab_ci_yml_for) - .and_return('config') - end + it 'defines source to be from repository' do + pipeline.set_config_source - it 'defines source to be from repository' do - expect(pipeline).to be_repository_source - end + expect(pipeline).to be_repository_source + end - context 'when loading an object' do - let(:new_pipeline) { Ci::Pipeline.find(pipeline.id) } + context 'when loading an object' do + let(:new_pipeline) { Ci::Pipeline.find(pipeline.id) } - it 'does not redefine the source' do - # force to overwrite the source - pipeline.unknown_source! + it 'does not redefine the source' do + # force to overwrite the source + pipeline.unknown_source! - expect(new_pipeline).to be_unknown_source - end + expect(new_pipeline).to be_unknown_source end end + end - context 'when the repository does not have a config file' do - let(:implied_yml) { Gitlab::Template::GitlabCiYmlTemplate.find('Auto-DevOps').content } + context 'when the repository does not have a config file' do + let(:implied_yml) { Gitlab::Template::GitlabCiYmlTemplate.find('Auto-DevOps').content } - context 'auto devops enabled' do - before do - stub_application_setting(auto_devops_enabled: true) - allow(project).to receive(:ci_config_path) { 'custom' } - end + context 'auto devops enabled' do + before do + stub_application_setting(auto_devops_enabled: true) + allow(project).to receive(:ci_config_path) { 'custom' } + end - it 'defines source to be auto devops' do - subject + it 'defines source to be auto devops' do + pipeline.set_config_source - expect(pipeline).to be_auto_devops_source - end + expect(pipeline).to be_auto_devops_source end end end @@ -1188,7 +1244,7 @@ describe Ci::Pipeline, :mailer do describe '#execute_hooks' do let!(:build_a) { create_build('a', 0) } - let!(:build_b) { create_build('b', 1) } + let!(:build_b) { create_build('b', 0) } let!(:hook) do create(:project_hook, project: project, pipeline_events: enabled) @@ -1244,6 +1300,8 @@ describe Ci::Pipeline, :mailer do end context 'when stage one failed' do + let!(:build_b) { create_build('b', 1) } + before do build_a.drop end @@ -1456,6 +1514,10 @@ describe Ci::Pipeline, :mailer do create(:ci_build, :success, :artifacts, pipeline: pipeline) end + it 'returns an Array' do + expect(pipeline.latest_builds_with_artifacts).to be_an_instance_of(Array) + end + it 'returns the latest builds' do expect(pipeline.latest_builds_with_artifacts).to eq([build]) end diff --git a/spec/models/ci/runner_spec.rb b/spec/models/ci/runner_spec.rb index 584dfe9a5c1..a93e7e233a8 100644 --- a/spec/models/ci/runner_spec.rb +++ b/spec/models/ci/runner_spec.rb @@ -473,7 +473,7 @@ describe Ci::Runner do end describe '.search' do - let(:runner) { create(:ci_runner, token: '123abc') } + let(:runner) { create(:ci_runner, token: '123abc', description: 'test runner') } it 'returns runners with a matching token' do expect(described_class.search(runner.token)).to eq([runner]) diff --git a/spec/models/clusters/cluster_spec.rb b/spec/models/clusters/cluster_spec.rb index b91a5e7a272..2683d21ddbe 100644 --- a/spec/models/clusters/cluster_spec.rb +++ b/spec/models/clusters/cluster_spec.rb @@ -9,7 +9,6 @@ describe Clusters::Cluster do it { is_expected.to delegate_method(:status_reason).to(:provider) } it { is_expected.to delegate_method(:status_name).to(:provider) } it { is_expected.to delegate_method(:on_creation?).to(:provider) } - it { is_expected.to delegate_method(:update_kubernetes_integration!).to(:platform) } it { is_expected.to respond_to :project } describe '.enabled' do @@ -199,4 +198,26 @@ describe Clusters::Cluster do end end end + + describe '#created?' do + let(:cluster) { create(:cluster, :provided_by_gcp) } + + subject { cluster.created? } + + context 'when status_name is :created' do + before do + allow(cluster).to receive_message_chain(:provider, :status_name).and_return(:created) + end + + it { is_expected.to eq(true) } + end + + context 'when status_name is not :created' do + before do + allow(cluster).to receive_message_chain(:provider, :status_name).and_return(:creating) + end + + it { is_expected.to eq(false) } + end + end end diff --git a/spec/models/clusters/platforms/kubernetes_spec.rb b/spec/models/clusters/platforms/kubernetes_spec.rb index ed76be703a5..53a4e545ff6 100644 --- a/spec/models/clusters/platforms/kubernetes_spec.rb +++ b/spec/models/clusters/platforms/kubernetes_spec.rb @@ -5,6 +5,8 @@ describe Clusters::Platforms::Kubernetes, :use_clean_rails_memory_store_caching include ReactiveCachingHelpers it { is_expected.to belong_to(:cluster) } + it { is_expected.to be_kind_of(Gitlab::Kubernetes) } + it { is_expected.to be_kind_of(ReactiveCaching) } it { is_expected.to respond_to :ca_pem } describe 'before_validation' do @@ -90,99 +92,175 @@ describe Clusters::Platforms::Kubernetes, :use_clean_rails_memory_store_caching end end - describe 'after_save from Clusters::Cluster' do - context 'when platform_kubernetes is being cerated' do - let(:enabled) { true } - let(:project) { create(:project) } - let(:cluster) { build(:cluster, provider_type: :gcp, platform_type: :kubernetes, platform_kubernetes: platform, provider_gcp: provider, enabled: enabled, projects: [project]) } - let(:platform) { build(:cluster_platform_kubernetes, :configured) } - let(:provider) { build(:cluster_provider_gcp) } - let(:kubernetes_service) { project.kubernetes_service } + describe '#actual_namespace' do + subject { kubernetes.actual_namespace } - it 'updates KubernetesService' do - cluster.save! + let!(:cluster) { create(:cluster, :project, platform_kubernetes: kubernetes) } + let(:project) { cluster.project } + let(:kubernetes) { create(:cluster_platform_kubernetes, :configured, namespace: namespace) } - expect(kubernetes_service.active).to eq(enabled) - expect(kubernetes_service.api_url).to eq(platform.api_url) - expect(kubernetes_service.namespace).to eq(platform.namespace) - expect(kubernetes_service.ca_pem).to eq(platform.ca_cert) - end + context 'when namespace is present' do + let(:namespace) { 'namespace-123' } + + it { is_expected.to eq(namespace) } end - context 'when platform_kubernetes has been created' do - let(:enabled) { false } - let!(:project) { create(:project) } - let!(:cluster) { create(:cluster, :provided_by_gcp, projects: [project]) } - let(:platform) { cluster.platform } - let(:kubernetes_service) { project.kubernetes_service } + context 'when namespace is not present' do + let(:namespace) { nil } + + it { is_expected.to eq("#{project.path}-#{project.id}") } + end + end - it 'updates KubernetesService' do - cluster.update(enabled: enabled) + describe '#default_namespace' do + subject { kubernetes.send(:default_namespace) } - expect(kubernetes_service.active).to eq(enabled) + let(:kubernetes) { create(:cluster_platform_kubernetes, :configured) } + + context 'when cluster belongs to a project' do + let!(:cluster) { create(:cluster, :project, platform_kubernetes: kubernetes) } + let(:project) { cluster.project } + + it { is_expected.to eq("#{project.path}-#{project.id}") } + end + + context 'when cluster belongs to nothing' do + let!(:cluster) { create(:cluster, platform_kubernetes: kubernetes) } + + it { is_expected.to be_nil } + end + end + + describe '#predefined_variables' do + let!(:cluster) { create(:cluster, :project, platform_kubernetes: kubernetes) } + let(:kubernetes) { create(:cluster_platform_kubernetes, api_url: api_url, ca_cert: ca_pem, token: token) } + let(:api_url) { 'https://kube.domain.com' } + let(:ca_pem) { 'CA PEM DATA' } + let(:token) { 'token' } + + let(:kubeconfig) do + config_file = expand_fixture_path('config/kubeconfig.yml') + config = YAML.load(File.read(config_file)) + config.dig('users', 0, 'user')['token'] = token + config.dig('contexts', 0, 'context')['namespace'] = namespace + config.dig('clusters', 0, 'cluster')['certificate-authority-data'] = + Base64.strict_encode64(ca_pem) + + YAML.dump(config) + end + + shared_examples 'setting variables' do + it 'sets the variables' do + expect(kubernetes.predefined_variables).to include( + { key: 'KUBE_URL', value: api_url, public: true }, + { key: 'KUBE_TOKEN', value: token, public: false }, + { key: 'KUBE_NAMESPACE', value: namespace, public: true }, + { key: 'KUBECONFIG', value: kubeconfig, public: false, file: true }, + { key: 'KUBE_CA_PEM', value: ca_pem, public: true }, + { key: 'KUBE_CA_PEM_FILE', value: ca_pem, public: true, file: true } + ) end end - context 'when kubernetes_service has been configured without cluster integration' do - let!(:project) { create(:project) } - let(:cluster) { build(:cluster, provider_type: :gcp, platform_type: :kubernetes, platform_kubernetes: platform, provider_gcp: provider, projects: [project]) } - let(:platform) { build(:cluster_platform_kubernetes, :configured, api_url: 'https://111.111.111.111') } - let(:provider) { build(:cluster_provider_gcp) } + context 'namespace is provided' do + let(:namespace) { 'my-project' } before do - create(:kubernetes_service, project: project) + kubernetes.namespace = namespace end - it 'raises an error' do - expect { cluster.save! }.to raise_error('Kubernetes service already configured') + it_behaves_like 'setting variables' + end + + context 'no namespace provided' do + let(:namespace) { kubernetes.actual_namespace } + + it_behaves_like 'setting variables' + + it 'sets the KUBE_NAMESPACE' do + kube_namespace = kubernetes.predefined_variables.find { |h| h[:key] == 'KUBE_NAMESPACE' } + + expect(kube_namespace).not_to be_nil + expect(kube_namespace[:value]).to match(/\A#{Gitlab::PathRegex::PATH_REGEX_STR}-\d+\z/) end end end - describe '#actual_namespace' do - subject { kubernetes.actual_namespace } + describe '#terminals' do + subject { service.terminals(environment) } - let!(:cluster) { create(:cluster, :project, platform_kubernetes: kubernetes) } + let!(:cluster) { create(:cluster, :project, platform_kubernetes: service) } let(:project) { cluster.project } - let(:kubernetes) { create(:cluster_platform_kubernetes, :configured, namespace: namespace) } + let(:service) { create(:cluster_platform_kubernetes, :configured) } + let(:environment) { build(:environment, project: project, name: "env", slug: "env-000000") } - context 'when namespace is present' do - let(:namespace) { 'namespace-123' } + context 'with invalid pods' do + it 'returns no terminals' do + stub_reactive_cache(service, pods: [{ "bad" => "pod" }]) - it { is_expected.to eq(namespace) } + is_expected.to be_empty + end end - context 'when namespace is not present' do - let(:namespace) { nil } + context 'with valid pods' do + let(:pod) { kube_pod(app: environment.slug) } + let(:terminals) { kube_terminals(service, pod) } - it { is_expected.to eq("#{project.path}-#{project.id}") } + before do + stub_reactive_cache( + service, + pods: [pod, pod, kube_pod(app: "should-be-filtered-out")] + ) + end + + it 'returns terminals' do + is_expected.to eq(terminals + terminals) + end + + it 'uses max session time from settings' do + stub_application_setting(terminal_max_session_time: 600) + + times = subject.map { |terminal| terminal[:max_session_time] } + expect(times).to eq [600, 600, 600, 600] + end end end - describe '.namespace_for_project' do - subject { described_class.namespace_for_project(project) } + describe '#calculate_reactive_cache' do + subject { service.calculate_reactive_cache } - let(:project) { create(:project) } + let!(:cluster) { create(:cluster, :project, enabled: enabled, platform_kubernetes: service) } + let(:service) { create(:cluster_platform_kubernetes, :configured) } + let(:enabled) { true } - it { is_expected.to eq("#{project.path}-#{project.id}") } - end + context 'when cluster is disabled' do + let(:enabled) { false } - describe '#default_namespace' do - subject { kubernetes.default_namespace } + it { is_expected.to be_nil } + end - let(:kubernetes) { create(:cluster_platform_kubernetes, :configured) } + context 'when kubernetes responds with valid pods' do + before do + stub_kubeclient_pods + end - context 'when cluster belongs to a project' do - let!(:cluster) { create(:cluster, :project, platform_kubernetes: kubernetes) } - let(:project) { cluster.project } + it { is_expected.to eq(pods: [kube_pod]) } + end - it { is_expected.to eq("#{project.path}-#{project.id}") } + context 'when kubernetes responds with 500s' do + before do + stub_kubeclient_pods(status: 500) + end + + it { expect { subject }.to raise_error(KubeException) } end - context 'when cluster belongs to nothing' do - let!(:cluster) { create(:cluster, platform_kubernetes: kubernetes) } + context 'when kubernetes responds with 404s' do + before do + stub_kubeclient_pods(status: 404) + end - it { is_expected.to be_nil } + it { is_expected.to eq(pods: []) } end end end diff --git a/spec/models/commit_collection_spec.rb b/spec/models/commit_collection_spec.rb new file mode 100644 index 00000000000..066fe7d154e --- /dev/null +++ b/spec/models/commit_collection_spec.rb @@ -0,0 +1,59 @@ +require 'spec_helper' + +describe CommitCollection do + let(:project) { create(:project, :repository) } + let(:commit) { project.commit } + + describe '#each' do + it 'yields every commit' do + collection = described_class.new(project, [commit]) + + expect { |b| collection.each(&b) }.to yield_with_args(commit) + end + end + + describe '#with_pipeline_status' do + it 'sets the pipeline status for every commit so no additional queries are necessary' do + create( + :ci_empty_pipeline, + ref: 'master', + sha: commit.id, + status: 'success', + project: project + ) + + collection = described_class.new(project, [commit]) + collection.with_pipeline_status + + recorder = ActiveRecord::QueryRecorder.new do + expect(commit.status).to eq('success') + end + + expect(recorder.count).to be_zero + end + end + + describe '#respond_to_missing?' do + it 'returns true when the underlying Array responds to the message' do + collection = described_class.new(project, []) + + expect(collection.respond_to?(:last)).to eq(true) + end + + it 'returns false when the underlying Array does not respond to the message' do + collection = described_class.new(project, []) + + expect(collection.respond_to?(:foo)).to eq(false) + end + end + + describe '#method_missing' do + it 'delegates undefined methods to the underlying Array' do + collection = described_class.new(project, [commit]) + + expect(collection.length).to eq(1) + expect(collection.last).to eq(commit) + expect(collection).not_to be_empty + end + end +end diff --git a/spec/models/commit_spec.rb b/spec/models/commit_spec.rb index e3cfa149e3a..d18a5c9dfa6 100644 --- a/spec/models/commit_spec.rb +++ b/spec/models/commit_spec.rb @@ -351,12 +351,19 @@ eos end it 'gives compound status from latest pipelines if ref is nil' do - expect(commit.status(nil)).to eq(Ci::Pipeline.latest_status) - expect(commit.status(nil)).to eq('failed') + expect(commit.status(nil)).to eq(pipeline_from_fix.status) end end end + describe '#set_status_for_ref' do + it 'sets the status for a given reference' do + commit.set_status_for_ref('master', 'failed') + + expect(commit.status('master')).to eq('failed') + end + end + describe '#participants' do let(:user1) { build(:user) } let(:user2) { build(:user) } diff --git a/spec/models/concerns/avatarable_spec.rb b/spec/models/concerns/avatarable_spec.rb new file mode 100644 index 00000000000..cbdc438be0b --- /dev/null +++ b/spec/models/concerns/avatarable_spec.rb @@ -0,0 +1,44 @@ +require 'spec_helper' + +describe Avatarable do + subject { create(:project, avatar: fixture_file_upload(File.join(Rails.root, 'spec/fixtures/dk.png'))) } + + let(:gitlab_host) { "https://gitlab.example.com" } + let(:relative_url_root) { "/gitlab" } + let(:asset_host) { "https://gitlab-assets.example.com" } + + before do + stub_config_setting(base_url: gitlab_host) + stub_config_setting(relative_url_root: relative_url_root) + end + + describe '#avatar_path' do + using RSpec::Parameterized::TableSyntax + + where(:has_asset_host, :visibility_level, :only_path, :avatar_path) do + true | Project::PRIVATE | true | [gitlab_host, relative_url_root, subject.avatar.url] + true | Project::PRIVATE | false | [gitlab_host, relative_url_root, subject.avatar.url] + true | Project::INTERNAL | true | [gitlab_host, relative_url_root, subject.avatar.url] + true | Project::INTERNAL | false | [gitlab_host, relative_url_root, subject.avatar.url] + true | Project::PUBLIC | true | [subject.avatar.url] + true | Project::PUBLIC | false | [asset_host, subject.avatar.url] + false | Project::PRIVATE | true | [relative_url_root, subject.avatar.url] + false | Project::PRIVATE | false | [gitlab_host, relative_url_root, subject.avatar.url] + false | Project::INTERNAL | true | [relative_url_root, subject.avatar.url] + false | Project::INTERNAL | false | [gitlab_host, relative_url_root, subject.avatar.url] + false | Project::PUBLIC | true | [relative_url_root, subject.avatar.url] + false | Project::PUBLIC | false | [gitlab_host, relative_url_root, subject.avatar.url] + end + + with_them do + before do + allow(ActionController::Base).to receive(:asset_host).and_return(has_asset_host ? asset_host : nil) + subject.visibility_level = visibility_level + end + + it 'returns the expected avatar path' do + expect(subject.avatar_path(only_path: only_path)).to eq(avatar_path.join) + end + end + end +end diff --git a/spec/models/concerns/has_variable_spec.rb b/spec/models/concerns/has_variable_spec.rb index f4b24e6d1d9..f87869a2fdc 100644 --- a/spec/models/concerns/has_variable_spec.rb +++ b/spec/models/concerns/has_variable_spec.rb @@ -9,6 +9,24 @@ describe HasVariable do it { is_expected.not_to allow_value('foo bar').for(:key) } it { is_expected.not_to allow_value('foo/bar').for(:key) } + describe '#key=' do + context 'when the new key is nil' do + it 'strips leading and trailing whitespaces' do + subject.key = nil + + expect(subject.key).to eq('') + end + end + + context 'when the new key has leadind and trailing whitespaces' do + it 'strips leading and trailing whitespaces' do + subject.key = ' my key ' + + expect(subject.key).to eq('my key') + end + end + end + describe '#value' do before do subject.value = 'secret' diff --git a/spec/models/concerns/issuable_spec.rb b/spec/models/concerns/issuable_spec.rb index ba57301a3c9..9df26f06a11 100644 --- a/spec/models/concerns/issuable_spec.rb +++ b/spec/models/concerns/issuable_spec.rb @@ -67,6 +67,7 @@ describe Issuable do describe ".search" do let!(:searchable_issue) { create(:issue, title: "Searchable awesome issue") } + let!(:searchable_issue2) { create(:issue, title: 'Aw') } it 'returns issues with a matching title' do expect(issuable_class.search(searchable_issue.title)) @@ -86,8 +87,8 @@ describe Issuable do expect(issuable_class.search('searchable issue')).to eq([searchable_issue]) end - it 'returns all issues with a query shorter than 3 chars' do - expect(issuable_class.search('zz')).to eq(issuable_class.all) + it 'returns issues with a matching title for a query shorter than 3 chars' do + expect(issuable_class.search(searchable_issue2.title.downcase)).to eq([searchable_issue2]) end end @@ -95,6 +96,7 @@ describe Issuable do let!(:searchable_issue) do create(:issue, title: "Searchable awesome issue", description: 'Many cute kittens') end + let!(:searchable_issue2) { create(:issue, title: "Aw", description: "Cu") } it 'returns issues with a matching title' do expect(issuable_class.full_search(searchable_issue.title)) @@ -133,8 +135,8 @@ describe Issuable do expect(issuable_class.full_search('many kittens')).to eq([searchable_issue]) end - it 'returns all issues with a query shorter than 3 chars' do - expect(issuable_class.search('zz')).to eq(issuable_class.all) + it 'returns issues with a matching description for a query shorter than 3 chars' do + expect(issuable_class.full_search(searchable_issue2.description.downcase)).to eq([searchable_issue2]) end end @@ -169,7 +171,7 @@ describe Issuable do it "returns false when record has been updated" do allow(issue).to receive(:today?).and_return(true) - issue.touch + issue.update_attribute(:updated_at, 1.hour.ago) expect(issue.new?).to be_falsey end end @@ -265,25 +267,44 @@ describe Issuable do end describe '#to_hook_data' do + let(:builder) { double } + context 'labels are updated' do let(:labels) { create_list(:label, 2) } before do issue.update(labels: [labels[1]]) + expect(Gitlab::HookData::IssuableBuilder) + .to receive(:new).with(issue).and_return(builder) end it 'delegates to Gitlab::HookData::IssuableBuilder#build' do - builder = double + expect(builder).to receive(:build).with( + user: user, + changes: hash_including( + 'labels' => [[labels[0].hook_attrs], [labels[1].hook_attrs]] + )) + + issue.to_hook_data(user, old_associations: { labels: [labels[0]] }) + end + end + context 'total_time_spent is updated' do + before do + issue.spend_time(duration: 2, user: user, spent_at: Time.now) + issue.save expect(Gitlab::HookData::IssuableBuilder) .to receive(:new).with(issue).and_return(builder) + end + + it 'delegates to Gitlab::HookData::IssuableBuilder#build' do expect(builder).to receive(:build).with( user: user, changes: hash_including( - 'labels' => [[labels[0].hook_attrs], [labels[1].hook_attrs]] + 'total_time_spent' => [1, 2] )) - issue.to_hook_data(user, old_labels: [labels[0]]) + issue.to_hook_data(user, old_associations: { total_time_spent: 1 }) end end @@ -292,20 +313,18 @@ describe Issuable do before do issue.assignees << user << user2 + expect(Gitlab::HookData::IssuableBuilder) + .to receive(:new).with(issue).and_return(builder) end it 'delegates to Gitlab::HookData::IssuableBuilder#build' do - builder = double - - expect(Gitlab::HookData::IssuableBuilder) - .to receive(:new).with(issue).and_return(builder) expect(builder).to receive(:build).with( user: user, changes: hash_including( 'assignees' => [[user.hook_attrs], [user.hook_attrs, user2.hook_attrs]] )) - issue.to_hook_data(user, old_assignees: [user]) + issue.to_hook_data(user, old_associations: { assignees: [user] }) end end @@ -316,13 +335,11 @@ describe Issuable do before do merge_request.update(assignee: user) merge_request.update(assignee: user2) + expect(Gitlab::HookData::IssuableBuilder) + .to receive(:new).with(merge_request).and_return(builder) end it 'delegates to Gitlab::HookData::IssuableBuilder#build' do - builder = double - - expect(Gitlab::HookData::IssuableBuilder) - .to receive(:new).with(merge_request).and_return(builder) expect(builder).to receive(:build).with( user: user, changes: hash_including( @@ -330,7 +347,7 @@ describe Issuable do 'assignee' => [user.hook_attrs, user2.hook_attrs] )) - merge_request.to_hook_data(user, old_assignees: [user]) + merge_request.to_hook_data(user, old_associations: { assignees: [user] }) end end end diff --git a/spec/models/concerns/manual_inverse_association_spec.rb b/spec/models/concerns/manual_inverse_association_spec.rb new file mode 100644 index 00000000000..aad40883854 --- /dev/null +++ b/spec/models/concerns/manual_inverse_association_spec.rb @@ -0,0 +1,51 @@ +require 'spec_helper' + +describe ManualInverseAssociation do + let(:model) do + Class.new(MergeRequest) do + belongs_to :manual_association, class_name: 'MergeRequestDiff', foreign_key: :latest_merge_request_diff_id + manual_inverse_association :manual_association, :merge_request + end + end + + before do + stub_const("#{described_class}::Model", model) + end + + let(:instance) { create(:merge_request).becomes(model) } + + describe '.manual_inverse_association' do + context 'when the relation exists' do + before do + instance.create_merge_request_diff + instance.reload + end + + it 'loads the relation' do + expect(instance.manual_association).to be_an_instance_of(MergeRequestDiff) + end + + it 'does not perform extra queries after loading' do + instance.manual_association + + expect { instance.manual_association.merge_request } + .not_to exceed_query_limit(0) + end + + it 'passes arguments to the default association method, to allow reloading' do + query_count = ActiveRecord::QueryRecorder.new do + instance.manual_association + instance.manual_association(true) + end.count + + expect(query_count).to eq(2) + end + end + + context 'when the relation does not return a value' do + it 'does not try to set an inverse' do + expect(instance.manual_association).to be_nil + end + end + end +end diff --git a/spec/models/concerns/milestoneish_spec.rb b/spec/models/concerns/milestoneish_spec.rb index 66353935427..9048da0c73d 100644 --- a/spec/models/concerns/milestoneish_spec.rb +++ b/spec/models/concerns/milestoneish_spec.rb @@ -186,4 +186,21 @@ describe Milestone, 'Milestoneish' do expect(milestone.elapsed_days).to eq(2) end end + + describe '#total_issue_time_spent' do + it 'calculates total issue time spent' do + closed_issue_1.spend_time(duration: 300, user: author) + closed_issue_1.save! + closed_issue_2.spend_time(duration: 600, user: assignee) + closed_issue_2.save! + + expect(milestone.total_issue_time_spent).to eq(900) + end + end + + describe '#human_total_issue_time_spent' do + it 'returns nil if no time has been spent' do + expect(milestone.human_total_issue_time_spent).to be_nil + end + end end diff --git a/spec/models/diff_note_spec.rb b/spec/models/diff_note_spec.rb index da972d2d86a..4d0b3245a13 100644 --- a/spec/models/diff_note_spec.rb +++ b/spec/models/diff_note_spec.rb @@ -9,13 +9,14 @@ describe DiffNote do let(:path) { "files/ruby/popen.rb" } + let(:diff_refs) { merge_request.diff_refs } let!(:position) do Gitlab::Diff::Position.new( old_path: path, new_path: path, old_line: nil, new_line: 14, - diff_refs: merge_request.diff_refs + diff_refs: diff_refs ) end @@ -25,7 +26,7 @@ describe DiffNote do new_path: path, old_line: 16, new_line: 22, - diff_refs: merge_request.diff_refs + diff_refs: diff_refs ) end @@ -158,25 +159,21 @@ describe DiffNote do describe "creation" do describe "updating of position" do context "when noteable is a commit" do - let(:diff_note) { create(:diff_note_on_commit, project: project, position: position) } + let(:diff_refs) { commit.diff_refs } - it "doesn't update the position" do - diff_note + subject { create(:diff_note_on_commit, project: project, position: position, commit_id: commit.id) } - expect(diff_note.original_position).to eq(position) - expect(diff_note.position).to eq(position) + it "doesn't update the position" do + is_expected.to have_attributes(original_position: position, + position: position) end end context "when noteable is a merge request" do - let(:diff_note) { create(:diff_note_on_merge_request, project: project, position: position, noteable: merge_request) } - context "when the note is active" do it "doesn't update the position" do - diff_note - - expect(diff_note.original_position).to eq(position) - expect(diff_note.position).to eq(position) + expect(subject.original_position).to eq(position) + expect(subject.position).to eq(position) end end @@ -186,10 +183,8 @@ describe DiffNote do end it "updates the position" do - diff_note - - expect(diff_note.original_position).to eq(position) - expect(diff_note.position).not_to eq(position) + expect(subject.original_position).to eq(position) + expect(subject.position).not_to eq(position) end end end @@ -283,6 +278,12 @@ describe DiffNote do expect(diff_line).to be nil expect(subject).to be_valid end + + it "does not update the position" do + expect(subject).not_to receive(:update_position) + + subject.save + end end it "returns true for on_image?" do diff --git a/spec/models/diff_viewer/base_spec.rb b/spec/models/diff_viewer/base_spec.rb index b26de3f3b97..c90b32c5d77 100644 --- a/spec/models/diff_viewer/base_spec.rb +++ b/spec/models/diff_viewer/base_spec.rb @@ -32,10 +32,8 @@ describe DiffViewer::Base do end context 'when the binaryness does not match' do - before do - allow(diff_file.old_blob).to receive(:binary?).and_return(false) - allow(diff_file.new_blob).to receive(:binary?).and_return(false) - end + let(:commit) { project.commit_by(oid: 'ae73cb07c9eeaf35924a10f713b364d32b2dd34f') } + let(:diff_file) { commit.diffs.diff_file_with_new_path('Gemfile.zip') } it 'returns false' do expect(viewer_class.can_render?(diff_file)).to be_falsey @@ -60,8 +58,7 @@ describe DiffViewer::Base do context 'when the binaryness does not match' do before do - allow(diff_file.old_blob).to receive(:binary?).and_return(true) - allow(diff_file.new_blob).to receive(:binary?).and_return(true) + allow_any_instance_of(Blob).to receive(:binary?).and_return(true) end it 'returns false' do @@ -77,12 +74,12 @@ describe DiffViewer::Base do end context 'when the file was renamed and only the old blob is supported' do - let(:commit) { project.commit('2f63565e7aac07bcdadb654e253078b727143ec4') } + let(:commit) { project.commit_by(oid: '2f63565e7aac07bcdadb654e253078b727143ec4') } let(:diff_file) { commit.diffs.diff_file_with_new_path('files/images/6049019_460s.jpg') } before do allow(diff_file).to receive(:renamed_file?).and_return(true) - allow(diff_file.new_blob).to receive(:extension).and_return('jpeg') + viewer_class.extensions = %w(notjpg) end it 'returns false' do @@ -94,8 +91,7 @@ describe DiffViewer::Base do describe '#collapsed?' do context 'when the combined blob size is larger than the collapse limit' do before do - allow(diff_file.old_blob).to receive(:raw_size).and_return(512.kilobytes) - allow(diff_file.new_blob).to receive(:raw_size).and_return(513.kilobytes) + allow(diff_file).to receive(:raw_size).and_return(1025.kilobytes) end it 'returns true' do @@ -113,8 +109,7 @@ describe DiffViewer::Base do describe '#too_large?' do context 'when the combined blob size is larger than the size limit' do before do - allow(diff_file.old_blob).to receive(:raw_size).and_return(2.megabytes) - allow(diff_file.new_blob).to receive(:raw_size).and_return(4.megabytes) + allow(diff_file).to receive(:raw_size).and_return(6.megabytes) end it 'returns true' do @@ -132,8 +127,7 @@ describe DiffViewer::Base do describe '#render_error' do context 'when the combined blob size is larger than the size limit' do before do - allow(diff_file.old_blob).to receive(:raw_size).and_return(2.megabytes) - allow(diff_file.new_blob).to receive(:raw_size).and_return(4.megabytes) + allow(diff_file).to receive(:raw_size).and_return(6.megabytes) end it 'returns :too_large' do diff --git a/spec/models/diff_viewer/server_side_spec.rb b/spec/models/diff_viewer/server_side_spec.rb index 92e613f92de..98a8f6d4cc9 100644 --- a/spec/models/diff_viewer/server_side_spec.rb +++ b/spec/models/diff_viewer/server_side_spec.rb @@ -1,9 +1,9 @@ require 'spec_helper' describe DiffViewer::ServerSide do - let(:project) { create(:project, :repository) } - let(:commit) { project.commit('570e7b2abdd848b95f2f578043fc23bd6f6fd24d') } - let(:diff_file) { commit.diffs.diff_file_with_new_path('files/ruby/popen.rb') } + set(:project) { create(:project, :repository) } + let(:commit) { project.commit_by(oid: '570e7b2abdd848b95f2f578043fc23bd6f6fd24d') } + let!(:diff_file) { commit.diffs.diff_file_with_new_path('files/ruby/popen.rb') } let(:viewer_class) do Class.new(DiffViewer::Base) do @@ -15,8 +15,7 @@ describe DiffViewer::ServerSide do describe '#prepare!' do it 'loads all diff file data' do - expect(diff_file.old_blob).to receive(:load_all_data!) - expect(diff_file.new_blob).to receive(:load_all_data!) + expect(Blob).to receive(:lazy).at_least(:twice) subject.prepare! end diff --git a/spec/models/environment_spec.rb b/spec/models/environment_spec.rb index 1ce1d595c60..6f24a039998 100644 --- a/spec/models/environment_spec.rb +++ b/spec/models/environment_spec.rb @@ -327,15 +327,28 @@ describe Environment do context 'when the enviroment is available' do context 'with a deployment service' do - let(:project) { create(:kubernetes_project) } + shared_examples 'same behavior between KubernetesService and Platform::Kubernetes' do + context 'and a deployment' do + let!(:deployment) { create(:deployment, environment: environment) } + it { is_expected.to be_truthy } + end - context 'and a deployment' do - let!(:deployment) { create(:deployment, environment: environment) } - it { is_expected.to be_truthy } + context 'but no deployments' do + it { is_expected.to be_falsy } + end end - context 'but no deployments' do - it { is_expected.to be_falsy } + context 'when user configured kubernetes from Integration > Kubernetes' do + let(:project) { create(:kubernetes_project) } + + it_behaves_like 'same behavior between KubernetesService and Platform::Kubernetes' + end + + context 'when user configured kubernetes from CI/CD > Clusters' do + let!(:cluster) { create(:cluster, :project, :provided_by_gcp) } + let(:project) { cluster.project } + + it_behaves_like 'same behavior between KubernetesService and Platform::Kubernetes' end end @@ -356,7 +369,6 @@ describe Environment do end describe '#terminals' do - let(:project) { create(:kubernetes_project) } subject { environment.terminals } context 'when the environment has terminals' do @@ -364,12 +376,27 @@ describe Environment do allow(environment).to receive(:has_terminals?).and_return(true) end - it 'returns the terminals from the deployment service' do - expect(project.deployment_service) - .to receive(:terminals).with(environment) - .and_return(:fake_terminals) + shared_examples 'same behavior between KubernetesService and Platform::Kubernetes' do + it 'returns the terminals from the deployment service' do + expect(project.deployment_platform) + .to receive(:terminals).with(environment) + .and_return(:fake_terminals) + + is_expected.to eq(:fake_terminals) + end + end + + context 'when user configured kubernetes from Integration > Kubernetes' do + let(:project) { create(:kubernetes_project) } + + it_behaves_like 'same behavior between KubernetesService and Platform::Kubernetes' + end + + context 'when user configured kubernetes from CI/CD > Clusters' do + let!(:cluster) { create(:cluster, :project, :provided_by_gcp) } + let(:project) { cluster.project } - is_expected.to eq(:fake_terminals) + it_behaves_like 'same behavior between KubernetesService and Platform::Kubernetes' end end diff --git a/spec/models/fork_network_member_spec.rb b/spec/models/fork_network_member_spec.rb index 532ca1fca8c..25bf596fddc 100644 --- a/spec/models/fork_network_member_spec.rb +++ b/spec/models/fork_network_member_spec.rb @@ -5,4 +5,22 @@ describe ForkNetworkMember do it { is_expected.to validate_presence_of(:project) } it { is_expected.to validate_presence_of(:fork_network) } end + + describe 'destroying a ForkNetworkMember' do + let(:fork_network_member) { create(:fork_network_member) } + let(:fork_network) { fork_network_member.fork_network } + + it 'removes the fork network if it was the last member' do + fork_network.fork_network_members.destroy_all + + expect(ForkNetwork.count).to eq(0) + end + + it 'does not destroy the fork network if there are members left' do + fork_network_member.destroy! + + # The root of the fork network is left + expect(ForkNetwork.count).to eq(1) + end + end end diff --git a/spec/models/group_spec.rb b/spec/models/group_spec.rb index d4052a64570..5e82a2988ce 100644 --- a/spec/models/group_spec.rb +++ b/spec/models/group_spec.rb @@ -247,8 +247,6 @@ describe Group do describe '#avatar_url' do let!(:group) { create(:group, :access_requestable, :with_avatar) } let(:user) { create(:user) } - let(:gitlab_host) { "http://#{Gitlab.config.gitlab.host}" } - let(:avatar_path) { "/uploads/-/system/group/avatar/#{group.id}/dk.png" } context 'when avatar file is uploaded' do before do @@ -256,12 +254,8 @@ describe Group do end it 'shows correct avatar url' do - expect(group.avatar_url).to eq(avatar_path) - expect(group.avatar_url(only_path: false)).to eq([gitlab_host, avatar_path].join) - - allow(ActionController::Base).to receive(:asset_host).and_return(gitlab_host) - - expect(group.avatar_url).to eq([gitlab_host, avatar_path].join) + expect(group.avatar_url).to eq(group.avatar.url) + expect(group.avatar_url(only_path: false)).to eq([Gitlab.config.gitlab.url, group.avatar.url].join) end end end diff --git a/spec/models/identity_spec.rb b/spec/models/identity_spec.rb index 3ed048744de..a45a6088831 100644 --- a/spec/models/identity_spec.rb +++ b/spec/models/identity_spec.rb @@ -33,5 +33,15 @@ describe Identity do expect(identity).to eq(ldap_identity) end end + + context 'any other provider' do + let!(:test_entity) { create(:identity, provider: 'test_provider', extern_uid: 'test_uid') } + + it 'the extern_uid lookup is case insensitive' do + identity = described_class.with_extern_uid('test_provider', 'TEST_UID').first + + expect(identity).to eq(test_entity) + end + end end end diff --git a/spec/models/issue_spec.rb b/spec/models/issue_spec.rb index bb5033c1628..0ea287d007a 100644 --- a/spec/models/issue_spec.rb +++ b/spec/models/issue_spec.rb @@ -766,21 +766,7 @@ describe Issue do end end - describe '#update_project_counter_caches?' do - it 'returns true when the state changes' do - subject.state = 'closed' - - expect(subject.update_project_counter_caches?).to eq(true) - end - - it 'returns true when the confidential flag changes' do - subject.confidential = true - - expect(subject.update_project_counter_caches?).to eq(true) - end - - it 'returns false when the state or confidential flag did not change' do - expect(subject.update_project_counter_caches?).to eq(false) - end + it_behaves_like 'throttled touch' do + subject { create(:issue, updated_at: 1.hour.ago) } end end diff --git a/spec/models/key_spec.rb b/spec/models/key_spec.rb index 81c2057e175..4cd9e3f4f1d 100644 --- a/spec/models/key_spec.rb +++ b/spec/models/key_spec.rb @@ -166,4 +166,27 @@ describe Key, :mailer do expect(key.public_key.key_text).to eq(valid_key) end end + + describe '#refresh_user_cache', :use_clean_rails_memory_store_caching do + context 'when the key belongs to a user' do + it 'refreshes the keys count cache for the user' do + expect_any_instance_of(Users::KeysCountService) + .to receive(:refresh_cache) + .and_call_original + + key = create(:personal_key) + + expect(Users::KeysCountService.new(key.user).count).to eq(1) + end + end + + context 'when the key does not belong to a user' do + it 'does nothing' do + expect_any_instance_of(Users::KeysCountService) + .not_to receive(:refresh_cache) + + create(:key) + end + end + end end diff --git a/spec/models/merge_request_diff_spec.rb b/spec/models/merge_request_diff_spec.rb index 0cfaa17676e..d556004eccf 100644 --- a/spec/models/merge_request_diff_spec.rb +++ b/spec/models/merge_request_diff_spec.rb @@ -1,8 +1,10 @@ require 'spec_helper' describe MergeRequestDiff do + let(:diff_with_commits) { create(:merge_request).merge_request_diff } + describe 'create new record' do - subject { create(:merge_request).merge_request_diff } + subject { diff_with_commits } it { expect(subject).to be_valid } it { expect(subject).to be_persisted } @@ -18,62 +20,46 @@ describe MergeRequestDiff do let!(:first_diff) { mr.merge_request_diff } let!(:last_diff) { mr.create_merge_request_diff } - it { expect(last_diff.latest?).to be_truthy } - it { expect(first_diff.latest?).to be_falsey } + it { expect(last_diff.reload).to be_latest } + it { expect(first_diff.reload).not_to be_latest } end describe '#diffs' do - let(:mr) { create(:merge_request, :with_diffs) } - let(:mr_diff) { mr.merge_request_diff } - context 'when the :ignore_whitespace_change option is set' do it 'creates a new compare object instead of loading from the DB' do - expect(mr_diff).not_to receive(:load_diffs) - expect(Gitlab::Git::Compare).to receive(:new).and_call_original + expect(diff_with_commits).not_to receive(:load_diffs) + expect(diff_with_commits.compare).to receive(:diffs).and_call_original - mr_diff.raw_diffs(ignore_whitespace_change: true) + diff_with_commits.raw_diffs(ignore_whitespace_change: true) end end context 'when the raw diffs are empty' do before do - MergeRequestDiffFile.delete_all(merge_request_diff_id: mr_diff.id) - end - - it 'returns an empty DiffCollection' do - expect(mr_diff.raw_diffs).to be_a(Gitlab::Git::DiffCollection) - expect(mr_diff.raw_diffs).to be_empty - end - end - - context 'when the raw diffs have invalid content' do - before do - MergeRequestDiffFile.delete_all(merge_request_diff_id: mr_diff.id) - mr_diff.update_attributes(st_diffs: ["--broken-diff"]) + MergeRequestDiffFile.delete_all(merge_request_diff_id: diff_with_commits.id) end it 'returns an empty DiffCollection' do - expect(mr_diff.raw_diffs.to_a).to be_empty - expect(mr_diff.raw_diffs).to be_a(Gitlab::Git::DiffCollection) - expect(mr_diff.raw_diffs).to be_empty + expect(diff_with_commits.raw_diffs).to be_a(Gitlab::Git::DiffCollection) + expect(diff_with_commits.raw_diffs).to be_empty end end context 'when the raw diffs exist' do it 'returns the diffs' do - expect(mr_diff.raw_diffs).to be_a(Gitlab::Git::DiffCollection) - expect(mr_diff.raw_diffs).not_to be_empty + expect(diff_with_commits.raw_diffs).to be_a(Gitlab::Git::DiffCollection) + expect(diff_with_commits.raw_diffs).not_to be_empty end context 'when the :paths option is set' do - let(:diffs) { mr_diff.raw_diffs(paths: ['files/ruby/popen.rb', 'files/ruby/popen.rb']) } + let(:diffs) { diff_with_commits.raw_diffs(paths: ['files/ruby/popen.rb', 'files/ruby/popen.rb']) } it 'only returns diffs that match the (old path, new path) given' do expect(diffs.map(&:new_path)).to contain_exactly('files/ruby/popen.rb') end it 'uses the diffs from the DB' do - expect(mr_diff).to receive(:load_diffs) + expect(diff_with_commits).to receive(:load_diffs) diffs end @@ -117,51 +103,29 @@ describe MergeRequestDiff do end describe '#commit_shas' do - it 'returns all commits SHA using serialized commits' do - subject.st_commits = [ - { id: 'sha1' }, - { id: 'sha2' } - ] - - expect(subject.commit_shas).to eq(%w(sha1 sha2)) + it 'returns all commit SHAs using commits from the DB' do + expect(diff_with_commits.commit_shas).not_to be_empty + expect(diff_with_commits.commit_shas).to all(match(/\h{40}/)) end end describe '#compare_with' do - subject { create(:merge_request, source_branch: 'fix').merge_request_diff } - it 'delegates compare to the service' do expect(CompareService).to receive(:new).and_call_original - subject.compare_with(nil) + diff_with_commits.compare_with(nil) end it 'uses git diff A..B approach by default' do - diffs = subject.compare_with('0b4bc9a49b562e85de7cc9e834518ea6828729b9').diffs + diffs = diff_with_commits.compare_with('0b4bc9a49b562e85de7cc9e834518ea6828729b9').diffs - expect(diffs.size).to eq(3) + expect(diffs.size).to eq(21) end end describe '#commits_count' do it 'returns number of commits using serialized commits' do - subject.st_commits = [ - { id: 'sha1' }, - { id: 'sha2' } - ] - - expect(subject.commits_count).to eq 2 - end - end - - describe '#utf8_st_diffs' do - it 'does not raise error when a hash value is in binary' do - subject.st_diffs = [ - { diff: "\0" }, - { diff: "\x05\x00\x68\x65\x6c\x6c\x6f" } - ] - - expect { subject.utf8_st_diffs }.not_to raise_error + expect(diff_with_commits.commits_count).to eq(29) end end end diff --git a/spec/models/merge_request_spec.rb b/spec/models/merge_request_spec.rb index d022dae3476..30a5a3bbff7 100644 --- a/spec/models/merge_request_spec.rb +++ b/spec/models/merge_request_spec.rb @@ -79,6 +79,43 @@ describe MergeRequest do end end + describe '.set_latest_merge_request_diff_ids!' do + def create_merge_request_with_diffs(source_branch, diffs: 2) + params = { + target_project: project, + target_branch: 'master', + source_project: project, + source_branch: source_branch + } + + create(:merge_request, params).tap do |mr| + diffs.times { mr.merge_request_diffs.create } + end + end + + let(:project) { create(:project) } + + it 'sets IDs for merge requests, whether they are already set or not' do + merge_requests = [ + create_merge_request_with_diffs('feature'), + create_merge_request_with_diffs('feature-conflict'), + create_merge_request_with_diffs('wip', diffs: 0), + create_merge_request_with_diffs('csv') + ] + + merge_requests.take(2).each do |merge_request| + merge_request.update_column(:latest_merge_request_diff_id, nil) + end + + expected = merge_requests.map do |merge_request| + merge_request.merge_request_diffs.maximum(:id) + end + + expect { project.merge_requests.set_latest_merge_request_diff_ids! } + .to change { merge_requests.map { |mr| mr.reload.latest_merge_request_diff_id } }.to(expected) + end + end + describe '#target_branch_sha' do let(:project) { create(:project, :repository) } @@ -222,7 +259,7 @@ describe MergeRequest do end describe '#source_branch_sha' do - let(:last_branch_commit) { subject.source_project.repository.commit(subject.source_branch) } + let(:last_branch_commit) { subject.source_project.repository.commit(Gitlab::Git::BRANCH_REF_PREFIX + subject.source_branch) } context 'with diffs' do subject { create(:merge_request, :with_diffs) } @@ -236,6 +273,21 @@ describe MergeRequest do it 'returns the sha of the source branch last commit' do expect(subject.source_branch_sha).to eq(last_branch_commit.sha) end + + context 'when there is a tag name matching the branch name' do + let(:tag_name) { subject.source_branch } + + it 'returns the sha of the source branch last commit' do + subject.source_project.repository.add_tag(subject.author, + tag_name, + subject.target_branch_sha, + 'Add a tag') + + expect(subject.source_branch_sha).to eq(last_branch_commit.sha) + + subject.source_project.repository.rm_tag(subject.author, tag_name) + end + end end context 'when the merge request is being created' do @@ -775,20 +827,47 @@ describe MergeRequest do end end - describe '#head_pipeline' do - describe 'when the source project exists' do - it 'returns the latest pipeline' do - pipeline = create(:ci_empty_pipeline, project: subject.source_project, ref: 'master', status: 'running', sha: "123abc", head_pipeline_of: subject) + context 'head pipeline' do + before do + allow(subject).to receive(:diff_head_sha).and_return('lastsha') + end + + describe '#head_pipeline' do + it 'returns nil for MR without head_pipeline_id' do + subject.update_attribute(:head_pipeline_id, nil) + + expect(subject.head_pipeline).to be_nil + end + + context 'when the source project does not exist' do + it 'returns nil' do + allow(subject).to receive(:source_project).and_return(nil) - expect(subject.head_pipeline).to eq(pipeline) + expect(subject.head_pipeline).to be_nil + end end end - describe 'when the source project does not exist' do - it 'returns nil' do + describe '#actual_head_pipeline' do + it 'returns nil for MR with old pipeline' do + pipeline = create(:ci_empty_pipeline, sha: 'notlatestsha') + subject.update_attribute(:head_pipeline_id, pipeline.id) + + expect(subject.actual_head_pipeline).to be_nil + end + + it 'returns the pipeline for MR with recent pipeline' do + pipeline = create(:ci_empty_pipeline, sha: 'lastsha') + subject.update_attribute(:head_pipeline_id, pipeline.id) + + expect(subject.actual_head_pipeline).to eq(subject.head_pipeline) + expect(subject.actual_head_pipeline).to eq(pipeline) + end + + it 'returns nil when source project does not exist' do allow(subject).to receive(:source_project).and_return(nil) - expect(subject.head_pipeline).to be_nil + expect(subject.actual_head_pipeline).to be_nil end end end @@ -888,7 +967,7 @@ describe MergeRequest do end shared_examples 'returning all SHA' do - it 'returns all SHA from all merge_request_diffs' do + it 'returns all SHAs from all merge_request_diffs' do expect(subject.merge_request_diffs.size).to eq(2) expect(subject.all_commit_shas).to match_array(all_commit_shas) end @@ -896,7 +975,7 @@ describe MergeRequest do context 'with a completely different branch' do before do - subject.update(target_branch: 'v1.0.0') + subject.update(target_branch: 'csv') end it_behaves_like 'returning all SHA' @@ -904,7 +983,7 @@ describe MergeRequest do context 'with a branch having no difference' do before do - subject.update(target_branch: 'v1.1.0') + subject.update(target_branch: 'branch-merged') subject.reload # make sure commits were not cached end @@ -1127,7 +1206,7 @@ describe MergeRequest do context 'when it is only allowed to merge when build is green' do context 'and a failed pipeline is associated' do before do - pipeline.update(status: 'failed') + pipeline.update(status: 'failed', sha: subject.diff_head_sha) allow(subject).to receive(:head_pipeline) { pipeline } end @@ -1136,7 +1215,7 @@ describe MergeRequest do context 'and a successful pipeline is associated' do before do - pipeline.update(status: 'success') + pipeline.update(status: 'success', sha: subject.diff_head_sha) allow(subject).to receive(:head_pipeline) { pipeline } end @@ -1145,7 +1224,7 @@ describe MergeRequest do context 'and a skipped pipeline is associated' do before do - pipeline.update(status: 'skipped') + pipeline.update(status: 'skipped', sha: subject.diff_head_sha) allow(subject).to receive(:head_pipeline) { pipeline } end @@ -1773,15 +1852,7 @@ describe MergeRequest do end end - describe '#update_project_counter_caches?' do - it 'returns true when the state changes' do - subject.state = 'closed' - - expect(subject.update_project_counter_caches?).to eq(true) - end - - it 'returns false when the state did not change' do - expect(subject.update_project_counter_caches?).to eq(false) - end + it_behaves_like 'throttled touch' do + subject { create(:merge_request, updated_at: 1.hour.ago) } end end diff --git a/spec/models/milestone_spec.rb b/spec/models/milestone_spec.rb index 13e37fffa4e..47f4a792e5c 100644 --- a/spec/models/milestone_spec.rb +++ b/spec/models/milestone_spec.rb @@ -11,7 +11,7 @@ describe Milestone do milestone = build(:milestone, start_date: Date.tomorrow, due_date: Date.yesterday) expect(milestone).not_to be_valid - expect(milestone.errors[:start_date]).to include("Can't be greater than due date") + expect(milestone.errors[:due_date]).to include("must be greater than start date") end end end diff --git a/spec/models/namespace_spec.rb b/spec/models/namespace_spec.rb index 90b768f595e..3817f20bfe7 100644 --- a/spec/models/namespace_spec.rb +++ b/spec/models/namespace_spec.rb @@ -531,7 +531,7 @@ describe Namespace do end end - describe '#has_forks_of?' do + describe '#find_fork_of?' do let(:project) { create(:project, :public) } let!(:forked_project) { fork_project(project, namespace.owner, namespace: namespace) } @@ -550,5 +550,13 @@ describe Namespace do expect(other_namespace.find_fork_of(project)).to eq(other_fork) end + + context 'with request store enabled', :request_store do + it 'only queries once' do + expect(project.fork_network).to receive(:find_forks_in).once.and_call_original + + 2.times { namespace.find_fork_of(project) } + end + end end end diff --git a/spec/models/note_spec.rb b/spec/models/note_spec.rb index 1ecb50586c7..e1a0c55b6a6 100644 --- a/spec/models/note_spec.rb +++ b/spec/models/note_spec.rb @@ -5,7 +5,7 @@ describe Note do describe 'associations' do it { is_expected.to belong_to(:project) } - it { is_expected.to belong_to(:noteable).touch(true) } + it { is_expected.to belong_to(:noteable).touch(false) } it { is_expected.to belong_to(:author).class_name('User') } it { is_expected.to have_many(:todos).dependent(:destroy) } @@ -231,6 +231,37 @@ describe Note do end end + describe '#cross_reference?' do + it 'falsey for user-generated notes' do + note = create(:note, system: false) + + expect(note.cross_reference?).to be_falsy + end + + context 'when the note might contain cross references' do + SystemNoteMetadata::TYPES_WITH_CROSS_REFERENCES.each do |type| + let(:note) { create(:note, :system) } + let!(:metadata) { create(:system_note_metadata, note: note, action: type) } + + it 'delegates to the cross-reference regex' do + expect(note).to receive(:matches_cross_reference_regex?).and_return(false) + + note.cross_reference? + end + end + end + + context 'when the note cannot contain cross references' do + let(:commit_note) { build(:note, note: 'mentioned in 1312312313 something else.', system: true) } + let(:label_note) { build(:note, note: 'added ~2323232323', system: true) } + + it 'scan for a `mentioned in` prefix' do + expect(commit_note.cross_reference?).to be_truthy + expect(label_note.cross_reference?).to be_falsy + end + end + end + describe 'clear_blank_line_code!' do it 'clears a blank line code before validation' do note = build(:note, line_code: ' ') diff --git a/spec/models/personal_access_token_spec.rb b/spec/models/personal_access_token_spec.rb index 01440b15674..2bb1c49b740 100644 --- a/spec/models/personal_access_token_spec.rb +++ b/spec/models/personal_access_token_spec.rb @@ -1,6 +1,8 @@ require 'spec_helper' describe PersonalAccessToken do + subject { described_class } + describe '.build' do let(:personal_access_token) { build(:personal_access_token) } let(:invalid_personal_access_token) { build(:personal_access_token, :invalid) } @@ -45,6 +47,29 @@ describe PersonalAccessToken do end end + describe 'Redis storage' do + let(:user_id) { 123 } + let(:token) { 'abc000foo' } + + before do + subject.redis_store!(user_id, token) + end + + it 'returns stored data' do + expect(subject.redis_getdel(user_id)).to eq(token) + end + + context 'after deletion' do + before do + expect(subject.redis_getdel(user_id)).to eq(token) + end + + it 'token is removed' do + expect(subject.redis_getdel(user_id)).to be_nil + end + end + end + context "validations" do let(:personal_access_token) { build(:personal_access_token) } diff --git a/spec/models/project_services/flowdock_service_spec.rb b/spec/models/project_services/flowdock_service_spec.rb index 5e8e880985e..fabcb142858 100644 --- a/spec/models/project_services/flowdock_service_spec.rb +++ b/spec/models/project_services/flowdock_service_spec.rb @@ -46,6 +46,7 @@ describe FlowdockService do @sample_data[:commits].each do |commit| # One request to Flowdock per new commit next if commit[:id] == @sample_data[:before] + expect(WebMock).to have_requested(:post, @api_url).with( body: /#{commit[:id]}.*#{project.path}/ ).once diff --git a/spec/models/project_services/kubernetes_service_spec.rb b/spec/models/project_services/kubernetes_service_spec.rb index 1c629155e1e..f037ee77a94 100644 --- a/spec/models/project_services/kubernetes_service_spec.rb +++ b/spec/models/project_services/kubernetes_service_spec.rb @@ -4,8 +4,8 @@ describe KubernetesService, :use_clean_rails_memory_store_caching do include KubernetesHelpers include ReactiveCachingHelpers - let(:project) { build_stubbed(:kubernetes_project) } - let(:service) { project.kubernetes_service } + let(:project) { create(:kubernetes_project) } + let(:service) { project.deployment_platform } describe 'Associations' do it { is_expected.to belong_to :project } diff --git a/spec/models/project_spec.rb b/spec/models/project_spec.rb index 6185f55c1dc..f4699fd243d 100644 --- a/spec/models/project_spec.rb +++ b/spec/models/project_spec.rb @@ -78,7 +78,7 @@ describe Project do it { is_expected.to have_many(:uploads).dependent(:destroy) } it { is_expected.to have_many(:pipeline_schedules) } it { is_expected.to have_many(:members_and_requesters) } - it { is_expected.to have_one(:cluster) } + it { is_expected.to have_many(:clusters) } it { is_expected.to have_many(:custom_attributes).class_name('ProjectCustomAttribute') } context 'after initialized' do @@ -138,6 +138,7 @@ describe Project do it { is_expected.to validate_length_of(:ci_config_path).is_at_most(255) } it { is_expected.to allow_value('').for(:ci_config_path) } it { is_expected.not_to allow_value('test/../foo').for(:ci_config_path) } + it { is_expected.not_to allow_value('/test/foo').for(:ci_config_path) } it { is_expected.to validate_presence_of(:creator) } @@ -312,9 +313,7 @@ describe Project do it { is_expected.to delegate_method(method).to(:team) } end - it { is_expected.to delegate_method(:empty_repo?).to(:repository) } it { is_expected.to delegate_method(:members).to(:team).with_prefix(true) } - it { is_expected.to delegate_method(:count).to(:forks).with_prefix(true) } it { is_expected.to delegate_method(:name).to(:owner).with_prefix(true).with_arguments(allow_nil: true) } end @@ -451,7 +450,7 @@ describe Project do end end - describe "#new_issue_address" do + describe "#new_issuable_address" do let(:project) { create(:project, path: "somewhere") } let(:user) { create(:user) } @@ -463,7 +462,13 @@ describe Project do it 'returns the address to create a new issue' do address = "p+#{project.full_path}+#{user.incoming_email_token}@gl.ab" - expect(project.new_issue_address(user)).to eq(address) + expect(project.new_issuable_address(user, 'issue')).to eq(address) + end + + it 'returns the address to create a new merge request' do + address = "p+#{project.full_path}+merge-request+#{user.incoming_email_token}@gl.ab" + + expect(project.new_issuable_address(user, 'merge_request')).to eq(address) end end @@ -473,7 +478,11 @@ describe Project do end it 'returns nil' do - expect(project.new_issue_address(user)).to be_nil + expect(project.new_issuable_address(user, 'issue')).to be_nil + end + + it 'returns nil' do + expect(project.new_issuable_address(user, 'merge_request')).to be_nil end end end @@ -646,6 +655,24 @@ describe Project do end end + describe '#empty_repo?' do + context 'when the repo does not exist' do + let(:project) { build_stubbed(:project) } + + it 'returns true' do + expect(project.empty_repo?).to be(true) + end + end + + context 'when the repo exists' do + let(:project) { create(:project, :repository) } + let(:empty_project) { create(:project, :empty_repo) } + + it { expect(empty_project.empty_repo?).to be(true) } + it { expect(project.empty_repo?).to be(false) } + end + end + describe '#external_issue_tracker' do let(:project) { create(:project) } let(:ext_project) { create(:redmine_project) } @@ -883,20 +910,14 @@ describe Project do context 'when avatar file is uploaded' do let(:project) { create(:project, :public, :with_avatar) } - let(:avatar_path) { "/uploads/-/system/project/avatar/#{project.id}/dk.png" } - let(:gitlab_host) { "http://#{Gitlab.config.gitlab.host}" } it 'shows correct url' do - expect(project.avatar_url).to eq(avatar_path) - expect(project.avatar_url(only_path: false)).to eq([gitlab_host, avatar_path].join) - - allow(ActionController::Base).to receive(:asset_host).and_return(gitlab_host) - - expect(project.avatar_url).to eq([gitlab_host, avatar_path].join) + expect(project.avatar_url).to eq(project.avatar.url) + expect(project.avatar_url(only_path: false)).to eq([Gitlab.config.gitlab.url, project.avatar.url].join) end end - context 'When avatar file in git' do + context 'when avatar file in git' do before do allow(project).to receive(:avatar_in_git) { true } end @@ -1260,24 +1281,6 @@ describe Project do expect(described_class.search(project.path.upcase)).to eq([project]) end - it 'returns projects with a matching namespace name' do - expect(described_class.search(project.namespace.name)).to eq([project]) - end - - it 'returns projects with a partially matching namespace name' do - expect(described_class.search(project.namespace.name[0..2])).to eq([project]) - end - - it 'returns projects with a matching namespace name regardless of the casing' do - expect(described_class.search(project.namespace.name.upcase)).to eq([project]) - end - - it 'returns projects when eager loading namespaces' do - relation = described_class.all.includes(:namespace) - - expect(relation.search(project.namespace.name)).to eq([project]) - end - describe 'with pending_delete project' do let(:pending_delete_project) { create(:project, pending_delete: true) } @@ -1572,8 +1575,8 @@ describe Project do expect(project.ci_config_path).to eq('foo/.gitlab_ci.yml') end - it 'sets a string but removes all leading slashes and null characters' do - project.update!(ci_config_path: "///f\0oo/\0/.gitlab_ci.yml") + it 'sets a string but removes all null characters' do + project.update!(ci_config_path: "f\0oo/\0/.gitlab_ci.yml") expect(project.ci_config_path).to eq('foo//.gitlab_ci.yml') end @@ -1740,8 +1743,7 @@ describe Project do expect(RepositoryForkWorker).to receive(:perform_async).with( project.id, forked_from_project.repository_storage_path, - forked_from_project.disk_path, - project.namespace.full_path).and_return(import_jid) + forked_from_project.disk_path).and_return(import_jid) expect(project.add_import_job).to eq(import_jid) end @@ -1944,6 +1946,24 @@ describe Project do expect(second_fork.fork_source).to eq(project) end end + + describe '#lfs_storage_project' do + it 'returns self for non-forks' do + expect(project.lfs_storage_project).to eq project + end + + it 'returns the fork network root for forks' do + second_fork = fork_project(forked_project) + + expect(second_fork.lfs_storage_project).to eq project + end + + it 'returns self when fork_source is nil' do + expect(forked_project).to receive(:fork_source).and_return(nil) + + expect(forked_project.lfs_storage_project).to eq forked_project + end + end end describe '#pushes_since_gc' do @@ -2008,12 +2028,25 @@ describe Project do end context 'when project has a deployment service' do - let(:project) { create(:kubernetes_project) } + shared_examples 'same behavior between KubernetesService and Platform::Kubernetes' do + it 'returns variables from this service' do + expect(project.deployment_variables).to include( + { key: 'KUBE_TOKEN', value: project.deployment_platform.token, public: false } + ) + end + end + + context 'when user configured kubernetes from Integration > Kubernetes' do + let(:project) { create(:kubernetes_project) } - it 'returns variables from this service' do - expect(project.deployment_variables).to include( - { key: 'KUBE_TOKEN', value: project.kubernetes_service.token, public: false } - ) + it_behaves_like 'same behavior between KubernetesService and Platform::Kubernetes' + end + + context 'when user configured kubernetes from CI/CD > Clusters' do + let!(:cluster) { create(:cluster, :project, :provided_by_gcp) } + let(:project) { cluster.project } + + it_behaves_like 'same behavior between KubernetesService and Platform::Kubernetes' end end end @@ -2465,7 +2498,7 @@ describe Project do it 'returns the number of forks' do project = build(:project) - allow(project.forks).to receive(:count).and_return(1) + expect_any_instance_of(Projects::ForksCountService).to receive(:count).and_return(1) expect(project.forks_count).to eq(1) end @@ -3016,4 +3049,96 @@ describe Project do end end end + + describe '#after_import' do + let(:project) { build(:project) } + + it 'runs the correct hooks' do + expect(project.repository).to receive(:after_import) + expect(project).to receive(:import_finish) + expect(project).to receive(:update_project_counter_caches) + expect(project).to receive(:remove_import_jid) + + project.after_import + end + end + + describe '#update_project_counter_caches' do + let(:project) { create(:project) } + + it 'updates all project counter caches' do + expect_any_instance_of(Projects::OpenIssuesCountService) + .to receive(:refresh_cache) + .and_call_original + + expect_any_instance_of(Projects::OpenMergeRequestsCountService) + .to receive(:refresh_cache) + .and_call_original + + project.update_project_counter_caches + end + end + + describe '#remove_import_jid', :clean_gitlab_redis_cache do + let(:project) { } + + context 'without an import JID' do + it 'does nothing' do + project = create(:project) + + expect(Gitlab::SidekiqStatus) + .not_to receive(:unset) + + project.remove_import_jid + end + end + + context 'with an import JID' do + it 'unsets the import JID' do + project = create(:project, import_jid: '123') + + expect(Gitlab::SidekiqStatus) + .to receive(:unset) + .with('123') + .and_call_original + + project.remove_import_jid + + expect(project.import_jid).to be_nil + end + end + end + + describe '#wiki_repository_exists?' do + it 'returns true when the wiki repository exists' do + project = create(:project, :wiki_repo) + + expect(project.wiki_repository_exists?).to eq(true) + end + + it 'returns false when the wiki repository does not exist' do + project = create(:project) + + expect(project.wiki_repository_exists?).to eq(false) + end + end + + describe '#deployment_platform' do + subject { project.deployment_platform } + + let(:project) { create(:project) } + + context 'when user configured kubernetes from Integration > Kubernetes' do + let!(:kubernetes_service) { create(:kubernetes_service, project: project) } + + it { is_expected.to eq(kubernetes_service) } + end + + context 'when user configured kubernetes from CI/CD > Clusters' do + let!(:cluster) { create(:cluster, :provided_by_gcp, projects: [project]) } + let(:platform_kubernetes) { cluster.platform_kubernetes } + + it { is_expected.to eq(platform_kubernetes) } + end + end end diff --git a/spec/models/project_statistics_spec.rb b/spec/models/project_statistics_spec.rb index 59e20e84c2f..e78ed1df821 100644 --- a/spec/models/project_statistics_spec.rb +++ b/spec/models/project_statistics_spec.rb @@ -133,15 +133,29 @@ describe ProjectStatistics do describe '#update_build_artifacts_size' do let!(:pipeline) { create(:ci_pipeline, project: project) } - let!(:build1) { create(:ci_build, pipeline: pipeline, artifacts_size: 45.megabytes) } - let!(:build2) { create(:ci_build, pipeline: pipeline, artifacts_size: 56.megabytes) } - before do - statistics.update_build_artifacts_size + context 'when new job artifacts are calculated' do + let(:ci_build) { create(:ci_build, pipeline: pipeline) } + + before do + create(:ci_job_artifact, :archive, project: pipeline.project, job: ci_build) + end + + it "stores the size of related build artifacts" do + statistics.update_build_artifacts_size + + expect(statistics.build_artifacts_size).to be(106365) + end end - it "stores the size of related build artifacts" do - expect(statistics.build_artifacts_size).to eq 101.megabytes + context 'when legacy artifacts are used' do + let!(:ci_build) { create(:ci_build, pipeline: pipeline, artifacts_size: 10.megabytes) } + + it "stores the size of related build artifacts" do + statistics.update_build_artifacts_size + + expect(statistics.build_artifacts_size).to eq(10.megabytes) + end end end diff --git a/spec/models/project_wiki_spec.rb b/spec/models/project_wiki_spec.rb index 3d46434fc27..929086305ba 100644 --- a/spec/models/project_wiki_spec.rb +++ b/spec/models/project_wiki_spec.rb @@ -10,6 +10,10 @@ describe ProjectWiki do subject { project_wiki } + it { is_expected.to delegate_method(:empty?).to :pages } + it { is_expected.to delegate_method(:repository_storage_path).to :project } + it { is_expected.to delegate_method(:hashed_storage?).to :project } + describe "#path_with_namespace" do it "returns the project path with namespace with the .wiki extension" do expect(subject.path_with_namespace).to eq(project.full_path + '.wiki') diff --git a/spec/models/repository_spec.rb b/spec/models/repository_spec.rb index 8a6aa767ce6..358bc3dfb94 100644 --- a/spec/models/repository_spec.rb +++ b/spec/models/repository_spec.rb @@ -29,7 +29,9 @@ describe Repository do def expect_to_raise_storage_error expect { yield }.to raise_error do |exception| storage_exceptions = [Gitlab::Git::Storage::Inaccessible, Gitlab::Git::CommandError, GRPC::Unavailable] - expect(exception.class).to be_in(storage_exceptions) + known_exception = storage_exceptions.select { |e| exception.is_a?(e) } + + expect(known_exception).not_to be_nil end end @@ -299,24 +301,6 @@ describe Repository do it { is_expected.to be_falsey } end - - context 'when pre-loaded merged branches are provided' do - using RSpec::Parameterized::TableSyntax - - where(:branch, :pre_loaded, :expected) do - 'not-merged-branch' | ['branch-merged'] | false - 'branch-merged' | ['not-merged-branch'] | false - 'branch-merged' | ['branch-merged'] | true - 'not-merged-branch' | ['not-merged-branch'] | false - 'master' | ['master'] | false - end - - with_them do - subject { repository.merged_to_root_ref?(branch, pre_loaded) } - - it { is_expected.to eq(expected) } - end - end end describe '#can_be_merged?' do @@ -601,7 +585,7 @@ describe Repository do end it 'properly handles query when repo is empty' do - repository = create(:project).repository + repository = create(:project, :empty_repo).repository results = repository.search_files_by_content('test', 'master') expect(results).to match_array([]) @@ -637,7 +621,7 @@ describe Repository do end it 'properly handles query when repo is empty' do - repository = create(:project).repository + repository = create(:project, :empty_repo).repository results = repository.search_files_by_name('test', 'master') @@ -652,9 +636,7 @@ describe Repository do end describe '#fetch_ref' do - # Setting the var here, sidesteps the stub that makes gitaly raise an error - # before the actual test call - set(:broken_repository) { create(:project, :broken_storage).repository } + let(:broken_repository) { create(:project, :broken_storage).repository } describe 'when storage is broken', :broken_storage do it 'should raise a storage error' do @@ -1166,6 +1148,31 @@ describe Repository do end end + describe '#branch_exists?' do + it 'uses branch_names' do + allow(repository).to receive(:branch_names).and_return(['foobar']) + + expect(repository.branch_exists?('foobar')).to eq(true) + expect(repository.branch_exists?('master')).to eq(false) + end + end + + describe '#branch_names', :use_clean_rails_memory_store_caching do + let(:fake_branch_names) { ['foobar'] } + + it 'gets cached across Repository instances' do + allow(repository.raw_repository).to receive(:branch_names).once.and_return(fake_branch_names) + + expect(repository.branch_names).to eq(fake_branch_names) + + fresh_repository = Project.find(project.id).repository + expect(fresh_repository.object_id).not_to eq(repository.object_id) + + expect(fresh_repository.raw_repository).not_to receive(:branch_names) + expect(fresh_repository.branch_names).to eq(fake_branch_names) + end + end + describe '#update_autocrlf_option' do describe 'when autocrlf is not already set to :input' do before do @@ -1197,17 +1204,15 @@ describe Repository do let(:empty_repository) { create(:project_empty_repo).repository } it 'returns true for an empty repository' do - expect(empty_repository.empty?).to eq(true) + expect(empty_repository).to be_empty end it 'returns false for a non-empty repository' do - expect(repository.empty?).to eq(false) + expect(repository).not_to be_empty end it 'caches the output' do - expect(repository.raw_repository).to receive(:empty?) - .once - .and_return(false) + expect(repository.raw_repository).to receive(:has_visible_content?).once repository.empty? repository.empty? @@ -1365,78 +1370,98 @@ describe Repository do end describe '#revert' do - let(:new_image_commit) { repository.commit('33f3729a45c02fc67d00adb1b8bca394b0e761d9') } - let(:update_image_commit) { repository.commit('2f63565e7aac07bcdadb654e253078b727143ec4') } - let(:message) { 'revert message' } - - context 'when there is a conflict' do - it 'raises an error' do - expect { repository.revert(user, new_image_commit, 'master', message) }.to raise_error(Gitlab::Git::Repository::CreateTreeError) + shared_examples 'reverting a commit' do + let(:new_image_commit) { repository.commit('33f3729a45c02fc67d00adb1b8bca394b0e761d9') } + let(:update_image_commit) { repository.commit('2f63565e7aac07bcdadb654e253078b727143ec4') } + let(:message) { 'revert message' } + + context 'when there is a conflict' do + it 'raises an error' do + expect { repository.revert(user, new_image_commit, 'master', message) }.to raise_error(Gitlab::Git::Repository::CreateTreeError) + end end - end - context 'when commit was already reverted' do - it 'raises an error' do - repository.revert(user, update_image_commit, 'master', message) + context 'when commit was already reverted' do + it 'raises an error' do + repository.revert(user, update_image_commit, 'master', message) - expect { repository.revert(user, update_image_commit, 'master', message) }.to raise_error(Gitlab::Git::Repository::CreateTreeError) + expect { repository.revert(user, update_image_commit, 'master', message) }.to raise_error(Gitlab::Git::Repository::CreateTreeError) + end end - end - context 'when commit can be reverted' do - it 'reverts the changes' do - expect(repository.revert(user, update_image_commit, 'master', message)).to be_truthy + context 'when commit can be reverted' do + it 'reverts the changes' do + expect(repository.revert(user, update_image_commit, 'master', message)).to be_truthy + end end - end - context 'reverting a merge commit' do - it 'reverts the changes' do - merge_commit - expect(repository.blob_at_branch('master', 'files/ruby/feature.rb')).to be_present + context 'reverting a merge commit' do + it 'reverts the changes' do + merge_commit + expect(repository.blob_at_branch('master', 'files/ruby/feature.rb')).to be_present - repository.revert(user, merge_commit, 'master', message) - expect(repository.blob_at_branch('master', 'files/ruby/feature.rb')).not_to be_present + repository.revert(user, merge_commit, 'master', message) + expect(repository.blob_at_branch('master', 'files/ruby/feature.rb')).not_to be_present + end end end + + context 'when Gitaly revert feature is enabled' do + it_behaves_like 'reverting a commit' + end + + context 'when Gitaly revert feature is disabled', :disable_gitaly do + it_behaves_like 'reverting a commit' + end end describe '#cherry_pick' do - let(:conflict_commit) { repository.commit('c642fe9b8b9f28f9225d7ea953fe14e74748d53b') } - let(:pickable_commit) { repository.commit('7d3b0f7cff5f37573aea97cebfd5692ea1689924') } - let(:pickable_merge) { repository.commit('e56497bb5f03a90a51293fc6d516788730953899') } - let(:message) { 'cherry-pick message' } - - context 'when there is a conflict' do - it 'raises an error' do - expect { repository.cherry_pick(user, conflict_commit, 'master', message) }.to raise_error(Gitlab::Git::Repository::CreateTreeError) + shared_examples 'cherry-picking a commit' do + let(:conflict_commit) { repository.commit('c642fe9b8b9f28f9225d7ea953fe14e74748d53b') } + let(:pickable_commit) { repository.commit('7d3b0f7cff5f37573aea97cebfd5692ea1689924') } + let(:pickable_merge) { repository.commit('e56497bb5f03a90a51293fc6d516788730953899') } + let(:message) { 'cherry-pick message' } + + context 'when there is a conflict' do + it 'raises an error' do + expect { repository.cherry_pick(user, conflict_commit, 'master', message) }.to raise_error(Gitlab::Git::Repository::CreateTreeError) + end end - end - context 'when commit was already cherry-picked' do - it 'raises an error' do - repository.cherry_pick(user, pickable_commit, 'master', message) + context 'when commit was already cherry-picked' do + it 'raises an error' do + repository.cherry_pick(user, pickable_commit, 'master', message) - expect { repository.cherry_pick(user, pickable_commit, 'master', message) }.to raise_error(Gitlab::Git::Repository::CreateTreeError) + expect { repository.cherry_pick(user, pickable_commit, 'master', message) }.to raise_error(Gitlab::Git::Repository::CreateTreeError) + end end - end - context 'when commit can be cherry-picked' do - it 'cherry-picks the changes' do - expect(repository.cherry_pick(user, pickable_commit, 'master', message)).to be_truthy + context 'when commit can be cherry-picked' do + it 'cherry-picks the changes' do + expect(repository.cherry_pick(user, pickable_commit, 'master', message)).to be_truthy + end end - end - context 'cherry-picking a merge commit' do - it 'cherry-picks the changes' do - expect(repository.blob_at_branch('improve/awesome', 'foo/bar/.gitkeep')).to be_nil + context 'cherry-picking a merge commit' do + it 'cherry-picks the changes' do + expect(repository.blob_at_branch('improve/awesome', 'foo/bar/.gitkeep')).to be_nil - cherry_pick_commit_sha = repository.cherry_pick(user, pickable_merge, 'improve/awesome', message) - cherry_pick_commit_message = project.commit(cherry_pick_commit_sha).message + cherry_pick_commit_sha = repository.cherry_pick(user, pickable_merge, 'improve/awesome', message) + cherry_pick_commit_message = project.commit(cherry_pick_commit_sha).message - expect(repository.blob_at_branch('improve/awesome', 'foo/bar/.gitkeep')).not_to be_nil - expect(cherry_pick_commit_message).to eq(message) + expect(repository.blob_at_branch('improve/awesome', 'foo/bar/.gitkeep')).not_to be_nil + expect(cherry_pick_commit_message).to eq(message) + end end end + + context 'when Gitaly cherry_pick feature is enabled' do + it_behaves_like 'cherry-picking a commit' + end + + context 'when Gitaly cherry_pick feature is disabled', :disable_gitaly do + it_behaves_like 'cherry-picking a commit' + end end describe '#before_delete' do diff --git a/spec/models/snippet_spec.rb b/spec/models/snippet_spec.rb index de3ca300ae3..e09d89d235d 100644 --- a/spec/models/snippet_spec.rb +++ b/spec/models/snippet_spec.rb @@ -88,7 +88,7 @@ describe Snippet do end describe '.search' do - let(:snippet) { create(:snippet) } + let(:snippet) { create(:snippet, title: 'test snippet') } it 'returns snippets with a matching title' do expect(described_class.search(snippet.title)).to eq([snippet]) diff --git a/spec/models/user_spec.rb b/spec/models/user_spec.rb index d2f97009ad9..03c96a8f5aa 100644 --- a/spec/models/user_spec.rb +++ b/spec/models/user_spec.rb @@ -642,16 +642,40 @@ describe User do end describe 'groups' do + let(:user) { create(:user) } + let(:group) { create(:group) } + before do - @user = create :user - @group = create :group - @group.add_owner(@user) + group.add_owner(user) end - it { expect(@user.several_namespaces?).to be_truthy } - it { expect(@user.authorized_groups).to eq([@group]) } - it { expect(@user.owned_groups).to eq([@group]) } - it { expect(@user.namespaces).to match_array([@user.namespace, @group]) } + it { expect(user.several_namespaces?).to be_truthy } + it { expect(user.authorized_groups).to eq([group]) } + it { expect(user.owned_groups).to eq([group]) } + it { expect(user.namespaces).to contain_exactly(user.namespace, group) } + it { expect(user.manageable_namespaces).to contain_exactly(user.namespace, group) } + + context 'with child groups', :nested_groups do + let!(:subgroup) { create(:group, parent: group) } + + describe '#manageable_namespaces' do + it 'includes all the namespaces the user can manage' do + expect(user.manageable_namespaces).to contain_exactly(user.namespace, group, subgroup) + end + end + + describe '#manageable_groups' do + it 'includes all the namespaces the user can manage' do + expect(user.manageable_groups).to contain_exactly(group, subgroup) + end + + it 'does not include duplicates if a membership was added for the subgroup' do + subgroup.add_owner(user) + + expect(user.manageable_groups).to contain_exactly(group, subgroup) + end + end + end end describe 'group multiple owners' do @@ -804,7 +828,7 @@ describe User do end end - describe '#require_ssh_key?' do + describe '#require_ssh_key?', :use_clean_rails_memory_store_caching do protocol_and_expectation = { 'http' => false, 'ssh' => true, @@ -819,6 +843,12 @@ describe User do expect(user.require_ssh_key?).to eq(expected) end end + + it 'returns false when the user has 1 or more SSH keys' do + key = create(:personal_key) + + expect(key.user.require_ssh_key?).to eq(false) + end end end @@ -841,6 +871,19 @@ describe User do end end + describe '.by_any_email' do + it 'returns an ActiveRecord::Relation' do + expect(described_class.by_any_email('foo@example.com')) + .to be_a_kind_of(ActiveRecord::Relation) + end + + it 'returns a relation of users' do + user = create(:user) + + expect(described_class.by_any_email(user.email)).to eq([user]) + end + end + describe '.search' do let!(:user) { create(:user, name: 'user', username: 'usern', email: 'email@gmail.com') } let!(:user2) { create(:user, name: 'user name', username: 'username', email: 'someemail@gmail.com') } @@ -1136,16 +1179,9 @@ describe User do let(:user) { create(:user, :with_avatar) } context 'when avatar file is uploaded' do - let(:gitlab_host) { "http://#{Gitlab.config.gitlab.host}" } - let(:avatar_path) { "/uploads/-/system/user/avatar/#{user.id}/dk.png" } - it 'shows correct avatar url' do - expect(user.avatar_url).to eq(avatar_path) - expect(user.avatar_url(only_path: false)).to eq([gitlab_host, avatar_path].join) - - allow(ActionController::Base).to receive(:asset_host).and_return(gitlab_host) - - expect(user.avatar_url).to eq([gitlab_host, avatar_path].join) + expect(user.avatar_url).to eq(user.avatar.url) + expect(user.avatar_url(only_path: false)).to eq([Gitlab.config.gitlab.url, user.avatar.url].join) end end end @@ -2107,25 +2143,47 @@ describe User do end end - describe '#allow_password_authentication?' do + describe '#allow_password_authentication_for_web?' do + context 'regular user' do + let(:user) { build(:user) } + + it 'returns true when password authentication is enabled for the web interface' do + expect(user.allow_password_authentication_for_web?).to be_truthy + end + + it 'returns false when password authentication is disabled for the web interface' do + stub_application_setting(password_authentication_enabled_for_web: false) + + expect(user.allow_password_authentication_for_web?).to be_falsey + end + end + + it 'returns false for ldap user' do + user = create(:omniauth_user, provider: 'ldapmain') + + expect(user.allow_password_authentication_for_web?).to be_falsey + end + end + + describe '#allow_password_authentication_for_git?' do context 'regular user' do let(:user) { build(:user) } - it 'returns true when sign-in is enabled' do - expect(user.allow_password_authentication?).to be_truthy + it 'returns true when password authentication is enabled for Git' do + expect(user.allow_password_authentication_for_git?).to be_truthy end - it 'returns false when sign-in is disabled' do - stub_application_setting(password_authentication_enabled: false) + it 'returns false when password authentication is disabled Git' do + stub_application_setting(password_authentication_enabled_for_git: false) - expect(user.allow_password_authentication?).to be_falsey + expect(user.allow_password_authentication_for_git?).to be_falsey end end it 'returns false for ldap user' do user = create(:omniauth_user, provider: 'ldapmain') - expect(user.allow_password_authentication?).to be_falsey + expect(user.allow_password_authentication_for_git?).to be_falsey end end @@ -2345,7 +2403,8 @@ describe User do let(:expected) { !(password_automatically_set || ldap_user || password_authentication_disabled) } before do - stub_application_setting(password_authentication_enabled: !password_authentication_disabled) + stub_application_setting(password_authentication_enabled_for_web: !password_authentication_disabled) + stub_application_setting(password_authentication_enabled_for_git: !password_authentication_disabled) end it 'returns false unless all inputs are true' do @@ -2374,4 +2433,163 @@ describe User do expect(user).not_to be_blocked end end + + describe '#max_member_access_for_project_ids' do + shared_examples 'max member access for projects' do + let(:user) { create(:user) } + let(:group) { create(:group) } + let(:owner_project) { create(:project, group: group) } + let(:master_project) { create(:project) } + let(:reporter_project) { create(:project) } + let(:developer_project) { create(:project) } + let(:guest_project) { create(:project) } + let(:no_access_project) { create(:project) } + + let(:projects) do + [owner_project, master_project, reporter_project, developer_project, guest_project, no_access_project].map(&:id) + end + + let(:expected) do + { + owner_project.id => Gitlab::Access::OWNER, + master_project.id => Gitlab::Access::MASTER, + reporter_project.id => Gitlab::Access::REPORTER, + developer_project.id => Gitlab::Access::DEVELOPER, + guest_project.id => Gitlab::Access::GUEST, + no_access_project.id => Gitlab::Access::NO_ACCESS + } + end + + before do + create(:group_member, user: user, group: group) + master_project.add_master(user) + reporter_project.add_reporter(user) + developer_project.add_developer(user) + guest_project.add_guest(user) + end + + it 'returns correct roles for different projects' do + expect(user.max_member_access_for_project_ids(projects)).to eq(expected) + end + end + + context 'with RequestStore enabled', :request_store do + include_examples 'max member access for projects' + + def access_levels(projects) + user.max_member_access_for_project_ids(projects) + end + + it 'does not perform extra queries when asked for projects who have already been found' do + access_levels(projects) + + expect { access_levels(projects) }.not_to exceed_query_limit(0) + + expect(access_levels(projects)).to eq(expected) + end + + it 'only requests the extra projects when uncached projects are passed' do + second_master_project = create(:project) + second_developer_project = create(:project) + second_master_project.add_master(user) + second_developer_project.add_developer(user) + + all_projects = projects + [second_master_project.id, second_developer_project.id] + + expected_all = expected.merge(second_master_project.id => Gitlab::Access::MASTER, + second_developer_project.id => Gitlab::Access::DEVELOPER) + + access_levels(projects) + + queries = ActiveRecord::QueryRecorder.new { access_levels(all_projects) } + + expect(queries.count).to eq(1) + expect(queries.log_message).to match(/\W(#{second_master_project.id}, #{second_developer_project.id})\W/) + expect(access_levels(all_projects)).to eq(expected_all) + end + end + + context 'with RequestStore disabled' do + include_examples 'max member access for projects' + end + end + + describe '#max_member_access_for_group_ids' do + shared_examples 'max member access for groups' do + let(:user) { create(:user) } + let(:owner_group) { create(:group) } + let(:master_group) { create(:group) } + let(:reporter_group) { create(:group) } + let(:developer_group) { create(:group) } + let(:guest_group) { create(:group) } + let(:no_access_group) { create(:group) } + + let(:groups) do + [owner_group, master_group, reporter_group, developer_group, guest_group, no_access_group].map(&:id) + end + + let(:expected) do + { + owner_group.id => Gitlab::Access::OWNER, + master_group.id => Gitlab::Access::MASTER, + reporter_group.id => Gitlab::Access::REPORTER, + developer_group.id => Gitlab::Access::DEVELOPER, + guest_group.id => Gitlab::Access::GUEST, + no_access_group.id => Gitlab::Access::NO_ACCESS + } + end + + before do + owner_group.add_owner(user) + master_group.add_master(user) + reporter_group.add_reporter(user) + developer_group.add_developer(user) + guest_group.add_guest(user) + end + + it 'returns correct roles for different groups' do + expect(user.max_member_access_for_group_ids(groups)).to eq(expected) + end + end + + context 'with RequestStore enabled', :request_store do + include_examples 'max member access for groups' + + def access_levels(groups) + user.max_member_access_for_group_ids(groups) + end + + it 'does not perform extra queries when asked for groups who have already been found' do + access_levels(groups) + + expect { access_levels(groups) }.not_to exceed_query_limit(0) + + expect(access_levels(groups)).to eq(expected) + end + + it 'only requests the extra groups when uncached groups are passed' do + second_master_group = create(:group) + second_developer_group = create(:group) + second_master_group.add_master(user) + second_developer_group.add_developer(user) + + all_groups = groups + [second_master_group.id, second_developer_group.id] + + expected_all = expected.merge(second_master_group.id => Gitlab::Access::MASTER, + second_developer_group.id => Gitlab::Access::DEVELOPER) + + access_levels(groups) + + queries = ActiveRecord::QueryRecorder.new { access_levels(all_groups) } + + expect(queries.count).to eq(1) + expect(queries.log_message).to match(/\W(#{second_master_group.id}, #{second_developer_group.id})\W/) + expect(access_levels(all_groups)).to eq(expected_all) + end + end + + context 'with RequestStore disabled' do + include_examples 'max member access for groups' + end + end end diff --git a/spec/models/wiki_page_spec.rb b/spec/models/wiki_page_spec.rb index a7227b38850..ea75434e399 100644 --- a/spec/models/wiki_page_spec.rb +++ b/spec/models/wiki_page_spec.rb @@ -373,7 +373,7 @@ describe WikiPage do end it 'returns commit sha' do - expect(@page.last_commit_sha).to eq @page.commit.sha + expect(@page.last_commit_sha).to eq @page.last_version.sha end it 'is changed after page updated' do |
