summaryrefslogtreecommitdiff
path: root/spec
diff options
context:
space:
mode:
authorAchilleas Pipinellis <axil@gitlab.com>2019-06-04 12:03:48 +0000
committerAchilleas Pipinellis <axil@gitlab.com>2019-06-04 12:03:48 +0000
commit5fd70f1779fa9ea0bd5eda523833996f71d0e706 (patch)
tree27aa4cb0a5713aa7bca5eb8a5d1180d00e414089 /spec
parent72797b30479b8649cd4e4b42bb940093648daa9c (diff)
parentd2ea615415311e7da1daf6b0a4e5c3a23fb2f1c2 (diff)
downloadgitlab-ce-patch-66.tar.gz
Merge branch 'master' into 'patch-66'patch-66
# Conflicts: # doc/administration/high_availability/gitaly.md
Diffstat (limited to 'spec')
-rw-r--r--spec/controllers/concerns/import_url_params_spec.rb12
-rw-r--r--spec/controllers/projects/serverless/functions_controller_spec.rb92
-rw-r--r--spec/factories/ci/pipeline_schedule.rb10
-rw-r--r--spec/features/projects/pipeline_schedules_spec.rb2
-rw-r--r--spec/features/projects/serverless/functions_spec.rb59
-rw-r--r--spec/finders/clusters/knative_services_finder_spec.rb105
-rw-r--r--spec/finders/projects/serverless/functions_finder_spec.rb68
-rw-r--r--spec/fixtures/security-reports/master/gl-dast-report.json74
-rw-r--r--spec/frontend/helpers/vue_test_utils_helper.js4
-rw-r--r--spec/frontend/operation_settings/components/external_dashboard_spec.js8
-rw-r--r--spec/frontend/serverless/components/environment_row_spec.js8
-rw-r--r--spec/frontend/serverless/components/functions_spec.js27
-rw-r--r--spec/frontend/serverless/mock_data.js110
-rw-r--r--spec/frontend/serverless/store/getters_spec.js2
-rw-r--r--spec/frontend/serverless/store/mutations_spec.js4
-rw-r--r--spec/helpers/emails_helper_spec.rb8
-rw-r--r--spec/javascripts/boards/boards_store_spec.js12
-rw-r--r--spec/javascripts/helpers/vue_test_utils_helper.js4
-rw-r--r--spec/javascripts/matchers.js2
-rw-r--r--spec/lib/gitlab/metrics/samplers/puma_sampler_spec.rb27
-rw-r--r--spec/models/ci/pipeline_schedule_spec.rb145
-rw-r--r--spec/models/ci/pipeline_spec.rb34
-rw-r--r--spec/models/clusters/applications/knative_spec.rb76
-rw-r--r--spec/models/clusters/cluster_spec.rb5
-rw-r--r--spec/services/ci/pipeline_schedule_service_spec.rb28
-rw-r--r--spec/services/merge_requests/close_service_spec.rb8
-rw-r--r--spec/support/helpers/kubernetes_helpers.rb46
-rw-r--r--spec/workers/auto_merge_process_worker_spec.rb31
-rw-r--r--spec/workers/pipeline_schedule_worker_spec.rb57
-rw-r--r--spec/workers/pipeline_success_worker_spec.rb27
-rw-r--r--spec/workers/run_pipeline_schedule_worker_spec.rb32
31 files changed, 749 insertions, 378 deletions
diff --git a/spec/controllers/concerns/import_url_params_spec.rb b/spec/controllers/concerns/import_url_params_spec.rb
index fc5dfb5263f..adbe6e5d3bf 100644
--- a/spec/controllers/concerns/import_url_params_spec.rb
+++ b/spec/controllers/concerns/import_url_params_spec.rb
@@ -8,6 +8,18 @@ describe ImportUrlParams do
controller.import_url_params
end
+ context 'empty URL' do
+ let(:params) do
+ ActionController::Parameters.new(project: {
+ title: 'Test'
+ })
+ end
+
+ it 'returns empty hash' do
+ expect(import_url_params).to eq({})
+ end
+ end
+
context 'url and password separately provided' do
let(:params) do
ActionController::Parameters.new(project: {
diff --git a/spec/controllers/projects/serverless/functions_controller_spec.rb b/spec/controllers/projects/serverless/functions_controller_spec.rb
index 782f5f272d9..18c594acae0 100644
--- a/spec/controllers/projects/serverless/functions_controller_spec.rb
+++ b/spec/controllers/projects/serverless/functions_controller_spec.rb
@@ -8,9 +8,8 @@ describe Projects::Serverless::FunctionsController do
let(:user) { create(:user) }
let(:cluster) { create(:cluster, :project, :provided_by_gcp) }
- let(:knative) { create(:clusters_applications_knative, :installed, cluster: cluster) }
let(:service) { cluster.platform_kubernetes }
- let(:project) { cluster.project}
+ let(:project) { cluster.project }
let(:namespace) do
create(:cluster_kubernetes_namespace,
@@ -30,17 +29,69 @@ describe Projects::Serverless::FunctionsController do
end
describe 'GET #index' do
- context 'empty cache' do
- it 'has no data' do
+ let(:expected_json) { { 'knative_installed' => knative_state, 'functions' => functions } }
+
+ context 'when cache is being read' do
+ let(:knative_state) { 'checking' }
+ let(:functions) { [] }
+
+ before do
get :index, params: params({ format: :json })
+ end
- expect(response).to have_gitlab_http_status(204)
+ it 'returns checking' do
+ expect(json_response).to eq expected_json
end
- it 'renders an html page' do
- get :index, params: params
+ it { expect(response).to have_gitlab_http_status(200) }
+ end
+
+ context 'when cache is ready' do
+ let(:knative_services_finder) { project.clusters.first.knative_services_finder(project) }
+ let(:knative_state) { true }
- expect(response).to have_gitlab_http_status(200)
+ before do
+ allow_any_instance_of(Clusters::Cluster)
+ .to receive(:knative_services_finder)
+ .and_return(knative_services_finder)
+ synchronous_reactive_cache(knative_services_finder)
+ stub_kubeclient_service_pods(
+ kube_response({ "kind" => "PodList", "items" => [] }),
+ namespace: namespace.namespace
+ )
+ end
+
+ context 'when no functions were found' do
+ let(:functions) { [] }
+
+ before do
+ stub_kubeclient_knative_services(
+ namespace: namespace.namespace,
+ response: kube_response({ "kind" => "ServiceList", "items" => [] })
+ )
+ get :index, params: params({ format: :json })
+ end
+
+ it 'returns checking' do
+ expect(json_response).to eq expected_json
+ end
+
+ it { expect(response).to have_gitlab_http_status(200) }
+ end
+
+ context 'when functions were found' do
+ let(:functions) { ["asdf"] }
+
+ before do
+ stub_kubeclient_knative_services(namespace: namespace.namespace)
+ get :index, params: params({ format: :json })
+ end
+
+ it 'returns functions' do
+ expect(json_response["functions"]).not_to be_empty
+ end
+
+ it { expect(response).to have_gitlab_http_status(200) }
end
end
end
@@ -56,11 +107,12 @@ describe Projects::Serverless::FunctionsController do
context 'valid data', :use_clean_rails_memory_store_caching do
before do
stub_kubeclient_service_pods
- stub_reactive_cache(knative,
+ stub_reactive_cache(cluster.knative_services_finder(project),
{
services: kube_knative_services_body(namespace: namespace.namespace, name: cluster.project.name)["items"],
pods: kube_knative_pods_body(cluster.project.name, namespace.namespace)["items"]
- })
+ },
+ *cluster.knative_services_finder(project).cache_args)
end
it 'has a valid function name' do
@@ -88,11 +140,12 @@ describe Projects::Serverless::FunctionsController do
describe 'GET #index with data', :use_clean_rails_memory_store_caching do
before do
stub_kubeclient_service_pods
- stub_reactive_cache(knative,
+ stub_reactive_cache(cluster.knative_services_finder(project),
{
services: kube_knative_services_body(namespace: namespace.namespace, name: cluster.project.name)["items"],
pods: kube_knative_pods_body(cluster.project.name, namespace.namespace)["items"]
- })
+ },
+ *cluster.knative_services_finder(project).cache_args)
end
it 'has data' do
@@ -100,11 +153,16 @@ describe Projects::Serverless::FunctionsController do
expect(response).to have_gitlab_http_status(200)
- expect(json_response).to contain_exactly(
- a_hash_including(
- "name" => project.name,
- "url" => "http://#{project.name}.#{namespace.namespace}.example.com"
- )
+ expect(json_response).to match(
+ {
+ "knative_installed" => "checking",
+ "functions" => [
+ a_hash_including(
+ "name" => project.name,
+ "url" => "http://#{project.name}.#{namespace.namespace}.example.com"
+ )
+ ]
+ }
)
end
diff --git a/spec/factories/ci/pipeline_schedule.rb b/spec/factories/ci/pipeline_schedule.rb
index b2b79807429..4b83ba2ac1b 100644
--- a/spec/factories/ci/pipeline_schedule.rb
+++ b/spec/factories/ci/pipeline_schedule.rb
@@ -7,6 +7,16 @@ FactoryBot.define do
description "pipeline schedule"
project
+ trait :every_minute do
+ cron '*/1 * * * *'
+ cron_timezone Gitlab::Ci::CronParser::VALID_SYNTAX_SAMPLE_TIME_ZONE
+ end
+
+ trait :hourly do
+ cron '* */1 * * *'
+ cron_timezone Gitlab::Ci::CronParser::VALID_SYNTAX_SAMPLE_TIME_ZONE
+ end
+
trait :nightly do
cron '0 1 * * *'
cron_timezone Gitlab::Ci::CronParser::VALID_SYNTAX_SAMPLE_TIME_ZONE
diff --git a/spec/features/projects/pipeline_schedules_spec.rb b/spec/features/projects/pipeline_schedules_spec.rb
index b1a705f09ce..24041a51383 100644
--- a/spec/features/projects/pipeline_schedules_spec.rb
+++ b/spec/features/projects/pipeline_schedules_spec.rb
@@ -225,7 +225,7 @@ describe 'Pipeline Schedules', :js do
context 'when active is true and next_run_at is NULL' do
before do
create(:ci_pipeline_schedule, project: project, owner: user).tap do |pipeline_schedule|
- pipeline_schedule.update_attribute(:cron, nil) # Consequently next_run_at will be nil
+ pipeline_schedule.update_attribute(:next_run_at, nil) # Consequently next_run_at will be nil
end
end
diff --git a/spec/features/projects/serverless/functions_spec.rb b/spec/features/projects/serverless/functions_spec.rb
index e14934b1672..9865dbbfb3c 100644
--- a/spec/features/projects/serverless/functions_spec.rb
+++ b/spec/features/projects/serverless/functions_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
describe 'Functions', :js do
include KubernetesHelpers
+ include ReactiveCachingHelpers
let(:project) { create(:project) }
let(:user) { create(:user) }
@@ -13,44 +14,70 @@ describe 'Functions', :js do
gitlab_sign_in(user)
end
- context 'when user does not have a cluster and visits the serverless page' do
+ shared_examples "it's missing knative installation" do
before do
visit project_serverless_functions_path(project)
end
- it 'sees an empty state' do
+ it 'sees an empty state require Knative installation' do
expect(page).to have_link('Install Knative')
expect(page).to have_selector('.empty-state')
end
end
+ context 'when user does not have a cluster and visits the serverless page' do
+ it_behaves_like "it's missing knative installation"
+ end
+
context 'when the user does have a cluster and visits the serverless page' do
let(:cluster) { create(:cluster, :project, :provided_by_gcp) }
- before do
- visit project_serverless_functions_path(project)
- end
-
- it 'sees an empty state' do
- expect(page).to have_link('Install Knative')
- expect(page).to have_selector('.empty-state')
- end
+ it_behaves_like "it's missing knative installation"
end
context 'when the user has a cluster and knative installed and visits the serverless page' do
let(:cluster) { create(:cluster, :project, :provided_by_gcp) }
let(:service) { cluster.platform_kubernetes }
- let(:knative) { create(:clusters_applications_knative, :installed, cluster: cluster) }
- let(:project) { knative.cluster.project }
+ let(:project) { cluster.project }
+ let(:knative_services_finder) { project.clusters.first.knative_services_finder(project) }
+ let(:namespace) do
+ create(:cluster_kubernetes_namespace,
+ cluster: cluster,
+ cluster_project: cluster.cluster_project,
+ project: cluster.cluster_project.project)
+ end
before do
- stub_kubeclient_knative_services
- stub_kubeclient_service_pods
+ allow_any_instance_of(Clusters::Cluster)
+ .to receive(:knative_services_finder)
+ .and_return(knative_services_finder)
+ synchronous_reactive_cache(knative_services_finder)
+ stub_kubeclient_knative_services(stub_get_services_options)
+ stub_kubeclient_service_pods(nil, namespace: namespace.namespace)
visit project_serverless_functions_path(project)
end
- it 'sees an empty listing of serverless functions' do
- expect(page).to have_selector('.empty-state')
+ context 'when there are no functions' do
+ let(:stub_get_services_options) do
+ {
+ namespace: namespace.namespace,
+ response: kube_response({ "kind" => "ServiceList", "items" => [] })
+ }
+ end
+
+ it 'sees an empty listing of serverless functions' do
+ expect(page).to have_selector('.empty-state')
+ expect(page).not_to have_selector('.content-list')
+ end
+ end
+
+ context 'when there are functions' do
+ let(:stub_get_services_options) { { namespace: namespace.namespace } }
+
+ it 'does not see an empty listing of serverless functions' do
+ expect(page).not_to have_selector('.empty-state')
+ expect(page).to have_selector('.content-list')
+ end
end
end
end
diff --git a/spec/finders/clusters/knative_services_finder_spec.rb b/spec/finders/clusters/knative_services_finder_spec.rb
new file mode 100644
index 00000000000..b731c2bd6bf
--- /dev/null
+++ b/spec/finders/clusters/knative_services_finder_spec.rb
@@ -0,0 +1,105 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Clusters::KnativeServicesFinder do
+ include KubernetesHelpers
+ include ReactiveCachingHelpers
+
+ let(:cluster) { create(:cluster, :project, :provided_by_gcp) }
+ let(:service) { cluster.platform_kubernetes }
+ let(:project) { cluster.cluster_project.project }
+ let(:namespace) do
+ create(:cluster_kubernetes_namespace,
+ cluster: cluster,
+ cluster_project: cluster.cluster_project,
+ project: project)
+ end
+
+ before do
+ stub_kubeclient_knative_services(namespace: namespace.namespace)
+ stub_kubeclient_service_pods(
+ kube_response(
+ kube_knative_pods_body(
+ project.name, namespace.namespace
+ )
+ ),
+ namespace: namespace.namespace
+ )
+ end
+
+ shared_examples 'a cached data' do
+ it 'has an unintialized cache' do
+ is_expected.to be_blank
+ end
+
+ context 'when using synchronous reactive cache' do
+ before do
+ synchronous_reactive_cache(cluster.knative_services_finder(project))
+ end
+
+ context 'when there are functions for cluster namespace' do
+ it { is_expected.not_to be_blank }
+ end
+
+ context 'when there are no functions for cluster namespace' do
+ before do
+ stub_kubeclient_knative_services(
+ namespace: namespace.namespace,
+ response: kube_response({ "kind" => "ServiceList", "items" => [] })
+ )
+ stub_kubeclient_service_pods(
+ kube_response({ "kind" => "PodList", "items" => [] }),
+ namespace: namespace.namespace
+ )
+ end
+
+ it { is_expected.to be_blank }
+ end
+ end
+ end
+
+ describe '#service_pod_details' do
+ subject { cluster.knative_services_finder(project).service_pod_details(project.name) }
+
+ it_behaves_like 'a cached data'
+ end
+
+ describe '#services' do
+ subject { cluster.knative_services_finder(project).services }
+
+ it_behaves_like 'a cached data'
+ end
+
+ describe '#knative_detected' do
+ subject { cluster.knative_services_finder(project).knative_detected }
+ before do
+ synchronous_reactive_cache(cluster.knative_services_finder(project))
+ end
+
+ context 'when knative is installed' do
+ before do
+ stub_kubeclient_discover(service.api_url)
+ end
+
+ it { is_expected.to be_truthy }
+ it "discovers knative installation" do
+ expect { subject }
+ .to change { cluster.kubeclient.knative_client.discovered }
+ .from(false)
+ .to(true)
+ end
+ end
+
+ context 'when knative is not installed' do
+ before do
+ stub_kubeclient_discover_knative_not_found(service.api_url)
+ end
+
+ it { is_expected.to be_falsy }
+ it "does not discover knative installation" do
+ expect { subject }.not_to change { cluster.kubeclient.knative_client.discovered }
+ end
+ end
+ end
+end
diff --git a/spec/finders/projects/serverless/functions_finder_spec.rb b/spec/finders/projects/serverless/functions_finder_spec.rb
index 3ad38207da4..8aea45b457c 100644
--- a/spec/finders/projects/serverless/functions_finder_spec.rb
+++ b/spec/finders/projects/serverless/functions_finder_spec.rb
@@ -10,7 +10,7 @@ describe Projects::Serverless::FunctionsFinder do
let(:user) { create(:user) }
let(:cluster) { create(:cluster, :project, :provided_by_gcp) }
let(:service) { cluster.platform_kubernetes }
- let(:project) { cluster.project}
+ let(:project) { cluster.project }
let(:namespace) do
create(:cluster_kubernetes_namespace,
@@ -23,9 +23,45 @@ describe Projects::Serverless::FunctionsFinder do
project.add_maintainer(user)
end
+ describe '#installed' do
+ it 'when reactive_caching is still fetching data' do
+ expect(described_class.new(project).knative_installed).to eq 'checking'
+ end
+
+ context 'when reactive_caching has finished' do
+ let(:knative_services_finder) { project.clusters.first.knative_services_finder(project) }
+
+ before do
+ allow_any_instance_of(Clusters::Cluster)
+ .to receive(:knative_services_finder)
+ .and_return(knative_services_finder)
+ synchronous_reactive_cache(knative_services_finder)
+ end
+
+ context 'when knative is not installed' do
+ it 'returns false' do
+ stub_kubeclient_discover_knative_not_found(service.api_url)
+
+ expect(described_class.new(project).knative_installed).to eq false
+ end
+ end
+
+ context 'reactive_caching is finished and knative is installed' do
+ let(:knative_services_finder) { project.clusters.first.knative_services_finder(project) }
+
+ it 'returns true' do
+ stub_kubeclient_knative_services(namespace: namespace.namespace)
+ stub_kubeclient_service_pods(nil, namespace: namespace.namespace)
+
+ expect(described_class.new(project).knative_installed).to be true
+ end
+ end
+ end
+ end
+
describe 'retrieve data from knative' do
- it 'does not have knative installed' do
- expect(described_class.new(project).execute).to be_empty
+ context 'does not have knative installed' do
+ it { expect(described_class.new(project).execute).to be_empty }
end
context 'has knative installed' do
@@ -38,22 +74,24 @@ describe Projects::Serverless::FunctionsFinder do
it 'there are functions', :use_clean_rails_memory_store_caching do
stub_kubeclient_service_pods
- stub_reactive_cache(knative,
+ stub_reactive_cache(cluster.knative_services_finder(project),
{
services: kube_knative_services_body(namespace: namespace.namespace, name: cluster.project.name)["items"],
pods: kube_knative_pods_body(cluster.project.name, namespace.namespace)["items"]
- })
+ },
+ *cluster.knative_services_finder(project).cache_args)
expect(finder.execute).not_to be_empty
end
it 'has a function', :use_clean_rails_memory_store_caching do
stub_kubeclient_service_pods
- stub_reactive_cache(knative,
+ stub_reactive_cache(cluster.knative_services_finder(project),
{
services: kube_knative_services_body(namespace: namespace.namespace, name: cluster.project.name)["items"],
pods: kube_knative_pods_body(cluster.project.name, namespace.namespace)["items"]
- })
+ },
+ *cluster.knative_services_finder(project).cache_args)
result = finder.service(cluster.environment_scope, cluster.project.name)
expect(result).not_to be_empty
@@ -84,20 +122,4 @@ describe Projects::Serverless::FunctionsFinder do
end
end
end
-
- describe 'verify if knative is installed' do
- context 'knative is not installed' do
- it 'does not have knative installed' do
- expect(described_class.new(project).installed?).to be false
- end
- end
-
- context 'knative is installed' do
- let!(:knative) { create(:clusters_applications_knative, :installed, cluster: cluster) }
-
- it 'does have knative installed' do
- expect(described_class.new(project).installed?).to be true
- end
- end
- end
end
diff --git a/spec/fixtures/security-reports/master/gl-dast-report.json b/spec/fixtures/security-reports/master/gl-dast-report.json
index 3a308bf047e..df459d9419d 100644
--- a/spec/fixtures/security-reports/master/gl-dast-report.json
+++ b/spec/fixtures/security-reports/master/gl-dast-report.json
@@ -1,40 +1,42 @@
{
- "site": {
- "alerts": [
- {
- "sourceid": "3",
- "wascid": "15",
- "cweid": "16",
- "reference": "<p>http://msdn.microsoft.com/en-us/library/ie/gg622941%28v=vs.85%29.aspx</p><p>https://www.owasp.org/index.php/List_of_useful_HTTP_headers</p>",
- "otherinfo": "<p>This issue still applies to error type pages (401, 403, 500, etc) as those pages are often still affected by injection issues, in which case there is still concern for browsers sniffing pages away from their actual content type.</p><p>At \"High\" threshold this scanner will not alert on client or server error responses.</p>",
- "solution": "<p>Ensure that the application/web server sets the Content-Type header appropriately, and that it sets the X-Content-Type-Options header to 'nosniff' for all web pages.</p><p>If possible, ensure that the end user uses a standards-compliant and modern web browser that does not perform MIME-sniffing at all, or that can be directed by the web application/web server to not perform MIME-sniffing.</p>",
- "count": "2",
- "pluginid": "10021",
- "alert": "X-Content-Type-Options Header Missing",
- "name": "X-Content-Type-Options Header Missing",
- "riskcode": "1",
- "confidence": "2",
- "riskdesc": "Low (Medium)",
- "desc": "<p>The Anti-MIME-Sniffing header X-Content-Type-Options was not set to 'nosniff'. This allows older versions of Internet Explorer and Chrome to perform MIME-sniffing on the response body, potentially causing the response body to be interpreted and displayed as a content type other than the declared content type. Current (early 2014) and legacy versions of Firefox will use the declared content type (if one is set), rather than performing MIME-sniffing.</p>",
- "instances": [
- {
- "param": "X-Content-Type-Options",
- "method": "GET",
- "uri": "http://bikebilly-spring-auto-devops-review-feature-br-3y2gpb.35.192.176.43.xip.io"
- },
- {
- "param": "X-Content-Type-Options",
- "method": "GET",
- "uri": "http://bikebilly-spring-auto-devops-review-feature-br-3y2gpb.35.192.176.43.xip.io/"
- }
- ]
- }
- ],
- "@ssl": "false",
- "@port": "80",
- "@host": "bikebilly-spring-auto-devops-review-feature-br-3y2gpb.35.192.176.43.xip.io",
- "@name": "http://bikebilly-spring-auto-devops-review-feature-br-3y2gpb.35.192.176.43.xip.io"
- },
+ "site": [
+ {
+ "alerts": [
+ {
+ "sourceid": "3",
+ "wascid": "15",
+ "cweid": "16",
+ "reference": "<p>http://msdn.microsoft.com/en-us/library/ie/gg622941%28v=vs.85%29.aspx</p><p>https://www.owasp.org/index.php/List_of_useful_HTTP_headers</p>",
+ "otherinfo": "<p>This issue still applies to error type pages (401, 403, 500, etc) as those pages are often still affected by injection issues, in which case there is still concern for browsers sniffing pages away from their actual content type.</p><p>At \"High\" threshold this scanner will not alert on client or server error responses.</p>",
+ "solution": "<p>Ensure that the application/web server sets the Content-Type header appropriately, and that it sets the X-Content-Type-Options header to 'nosniff' for all web pages.</p><p>If possible, ensure that the end user uses a standards-compliant and modern web browser that does not perform MIME-sniffing at all, or that can be directed by the web application/web server to not perform MIME-sniffing.</p>",
+ "count": "2",
+ "pluginid": "10021",
+ "alert": "X-Content-Type-Options Header Missing",
+ "name": "X-Content-Type-Options Header Missing",
+ "riskcode": "1",
+ "confidence": "2",
+ "riskdesc": "Low (Medium)",
+ "desc": "<p>The Anti-MIME-Sniffing header X-Content-Type-Options was not set to 'nosniff'. This allows older versions of Internet Explorer and Chrome to perform MIME-sniffing on the response body, potentially causing the response body to be interpreted and displayed as a content type other than the declared content type. Current (early 2014) and legacy versions of Firefox will use the declared content type (if one is set), rather than performing MIME-sniffing.</p>",
+ "instances": [
+ {
+ "param": "X-Content-Type-Options",
+ "method": "GET",
+ "uri": "http://bikebilly-spring-auto-devops-review-feature-br-3y2gpb.35.192.176.43.xip.io"
+ },
+ {
+ "param": "X-Content-Type-Options",
+ "method": "GET",
+ "uri": "http://bikebilly-spring-auto-devops-review-feature-br-3y2gpb.35.192.176.43.xip.io/"
+ }
+ ]
+ }
+ ],
+ "@ssl": "false",
+ "@port": "80",
+ "@host": "bikebilly-spring-auto-devops-review-feature-br-3y2gpb.35.192.176.43.xip.io",
+ "@name": "http://bikebilly-spring-auto-devops-review-feature-br-3y2gpb.35.192.176.43.xip.io"
+ }
+ ],
"@generated": "Fri, 13 Apr 2018 09:22:01",
"@version": "2.7.0"
}
diff --git a/spec/frontend/helpers/vue_test_utils_helper.js b/spec/frontend/helpers/vue_test_utils_helper.js
index 19e27388eeb..121e99c9783 100644
--- a/spec/frontend/helpers/vue_test_utils_helper.js
+++ b/spec/frontend/helpers/vue_test_utils_helper.js
@@ -16,4 +16,6 @@ const vNodeContainsText = (vnode, text) =>
* @param {String} text
*/
export const shallowWrapperContainsSlotText = (shallowWrapper, slotName, text) =>
- !!shallowWrapper.vm.$slots[slotName].filter(vnode => vNodeContainsText(vnode, text)).length;
+ Boolean(
+ shallowWrapper.vm.$slots[slotName].filter(vnode => vNodeContainsText(vnode, text)).length,
+ );
diff --git a/spec/frontend/operation_settings/components/external_dashboard_spec.js b/spec/frontend/operation_settings/components/external_dashboard_spec.js
index de1dd219fe0..23dc3c3db8c 100644
--- a/spec/frontend/operation_settings/components/external_dashboard_spec.js
+++ b/spec/frontend/operation_settings/components/external_dashboard_spec.js
@@ -21,6 +21,14 @@ describe('operation settings external dashboard component', () => {
expect(wrapper.find('.js-section-header').text()).toBe('External Dashboard');
});
+ describe('expand/collapse button', () => {
+ it('renders as an expand button by default', () => {
+ const button = wrapper.find(GlButton);
+
+ expect(button.text()).toBe('Expand');
+ });
+ });
+
describe('sub-header', () => {
let subHeader;
diff --git a/spec/frontend/serverless/components/environment_row_spec.js b/spec/frontend/serverless/components/environment_row_spec.js
index 161a637dd75..0ad85e218dc 100644
--- a/spec/frontend/serverless/components/environment_row_spec.js
+++ b/spec/frontend/serverless/components/environment_row_spec.js
@@ -14,7 +14,7 @@ describe('environment row component', () => {
beforeEach(() => {
localVue = createLocalVue();
- vm = createComponent(localVue, translate(mockServerlessFunctions)['*'], '*');
+ vm = createComponent(localVue, translate(mockServerlessFunctions.functions)['*'], '*');
});
afterEach(() => vm.$destroy());
@@ -48,7 +48,11 @@ describe('environment row component', () => {
beforeEach(() => {
localVue = createLocalVue();
- vm = createComponent(localVue, translate(mockServerlessFunctionsDiffEnv).test, 'test');
+ vm = createComponent(
+ localVue,
+ translate(mockServerlessFunctionsDiffEnv.functions).test,
+ 'test',
+ );
});
afterEach(() => vm.$destroy());
diff --git a/spec/frontend/serverless/components/functions_spec.js b/spec/frontend/serverless/components/functions_spec.js
index 6924fb9e91f..d8a80f8031e 100644
--- a/spec/frontend/serverless/components/functions_spec.js
+++ b/spec/frontend/serverless/components/functions_spec.js
@@ -34,11 +34,11 @@ describe('functionsComponent', () => {
});
it('should render empty state when Knative is not installed', () => {
+ store.dispatch('receiveFunctionsSuccess', { knative_installed: false });
component = shallowMount(functionsComponent, {
localVue,
store,
propsData: {
- installed: false,
clustersPath: '',
helpPath: '',
statusPath: '',
@@ -55,7 +55,6 @@ describe('functionsComponent', () => {
localVue,
store,
propsData: {
- installed: true,
clustersPath: '',
helpPath: '',
statusPath: '',
@@ -67,12 +66,11 @@ describe('functionsComponent', () => {
});
it('should render empty state when there is no function data', () => {
- store.dispatch('receiveFunctionsNoDataSuccess');
+ store.dispatch('receiveFunctionsNoDataSuccess', { knative_installed: true });
component = shallowMount(functionsComponent, {
localVue,
store,
propsData: {
- installed: true,
clustersPath: '',
helpPath: '',
statusPath: '',
@@ -91,12 +89,31 @@ describe('functionsComponent', () => {
);
});
+ it('should render functions and a loader when functions are partially fetched', () => {
+ store.dispatch('receiveFunctionsPartial', {
+ ...mockServerlessFunctions,
+ knative_installed: 'checking',
+ });
+ component = shallowMount(functionsComponent, {
+ localVue,
+ store,
+ propsData: {
+ clustersPath: '',
+ helpPath: '',
+ statusPath: '',
+ },
+ sync: false,
+ });
+
+ expect(component.find('.js-functions-wrapper').exists()).toBe(true);
+ expect(component.find('.js-functions-loader').exists()).toBe(true);
+ });
+
it('should render the functions list', () => {
component = shallowMount(functionsComponent, {
localVue,
store,
propsData: {
- installed: true,
clustersPath: 'clustersPath',
helpPath: 'helpPath',
statusPath,
diff --git a/spec/frontend/serverless/mock_data.js b/spec/frontend/serverless/mock_data.js
index a2c18616324..ef616ceb37f 100644
--- a/spec/frontend/serverless/mock_data.js
+++ b/spec/frontend/serverless/mock_data.js
@@ -1,56 +1,62 @@
-export const mockServerlessFunctions = [
- {
- name: 'testfunc1',
- namespace: 'tm-example',
- environment_scope: '*',
- cluster_id: 46,
- detail_url: '/testuser/testproj/serverless/functions/*/testfunc1',
- podcount: null,
- created_at: '2019-02-05T01:01:23Z',
- url: 'http://testfunc1.tm-example.apps.example.com',
- description: 'A test service',
- image: 'knative-test-container-buildtemplate',
- },
- {
- name: 'testfunc2',
- namespace: 'tm-example',
- environment_scope: '*',
- cluster_id: 46,
- detail_url: '/testuser/testproj/serverless/functions/*/testfunc2',
- podcount: null,
- created_at: '2019-02-05T01:01:23Z',
- url: 'http://testfunc2.tm-example.apps.example.com',
- description: 'A second test service\nThis one with additional descriptions',
- image: 'knative-test-echo-buildtemplate',
- },
-];
+export const mockServerlessFunctions = {
+ knative_installed: true,
+ functions: [
+ {
+ name: 'testfunc1',
+ namespace: 'tm-example',
+ environment_scope: '*',
+ cluster_id: 46,
+ detail_url: '/testuser/testproj/serverless/functions/*/testfunc1',
+ podcount: null,
+ created_at: '2019-02-05T01:01:23Z',
+ url: 'http://testfunc1.tm-example.apps.example.com',
+ description: 'A test service',
+ image: 'knative-test-container-buildtemplate',
+ },
+ {
+ name: 'testfunc2',
+ namespace: 'tm-example',
+ environment_scope: '*',
+ cluster_id: 46,
+ detail_url: '/testuser/testproj/serverless/functions/*/testfunc2',
+ podcount: null,
+ created_at: '2019-02-05T01:01:23Z',
+ url: 'http://testfunc2.tm-example.apps.example.com',
+ description: 'A second test service\nThis one with additional descriptions',
+ image: 'knative-test-echo-buildtemplate',
+ },
+ ],
+};
-export const mockServerlessFunctionsDiffEnv = [
- {
- name: 'testfunc1',
- namespace: 'tm-example',
- environment_scope: '*',
- cluster_id: 46,
- detail_url: '/testuser/testproj/serverless/functions/*/testfunc1',
- podcount: null,
- created_at: '2019-02-05T01:01:23Z',
- url: 'http://testfunc1.tm-example.apps.example.com',
- description: 'A test service',
- image: 'knative-test-container-buildtemplate',
- },
- {
- name: 'testfunc2',
- namespace: 'tm-example',
- environment_scope: 'test',
- cluster_id: 46,
- detail_url: '/testuser/testproj/serverless/functions/*/testfunc2',
- podcount: null,
- created_at: '2019-02-05T01:01:23Z',
- url: 'http://testfunc2.tm-example.apps.example.com',
- description: 'A second test service\nThis one with additional descriptions',
- image: 'knative-test-echo-buildtemplate',
- },
-];
+export const mockServerlessFunctionsDiffEnv = {
+ knative_installed: true,
+ functions: [
+ {
+ name: 'testfunc1',
+ namespace: 'tm-example',
+ environment_scope: '*',
+ cluster_id: 46,
+ detail_url: '/testuser/testproj/serverless/functions/*/testfunc1',
+ podcount: null,
+ created_at: '2019-02-05T01:01:23Z',
+ url: 'http://testfunc1.tm-example.apps.example.com',
+ description: 'A test service',
+ image: 'knative-test-container-buildtemplate',
+ },
+ {
+ name: 'testfunc2',
+ namespace: 'tm-example',
+ environment_scope: 'test',
+ cluster_id: 46,
+ detail_url: '/testuser/testproj/serverless/functions/*/testfunc2',
+ podcount: null,
+ created_at: '2019-02-05T01:01:23Z',
+ url: 'http://testfunc2.tm-example.apps.example.com',
+ description: 'A second test service\nThis one with additional descriptions',
+ image: 'knative-test-echo-buildtemplate',
+ },
+ ],
+};
export const mockServerlessFunction = {
name: 'testfunc1',
diff --git a/spec/frontend/serverless/store/getters_spec.js b/spec/frontend/serverless/store/getters_spec.js
index fb549c8f153..92853fda37c 100644
--- a/spec/frontend/serverless/store/getters_spec.js
+++ b/spec/frontend/serverless/store/getters_spec.js
@@ -32,7 +32,7 @@ describe('Serverless Store Getters', () => {
describe('getFunctions', () => {
it('should translate the raw function array to group the functions per environment scope', () => {
- state.functions = mockServerlessFunctions;
+ state.functions = mockServerlessFunctions.functions;
const funcs = getters.getFunctions(state);
diff --git a/spec/frontend/serverless/store/mutations_spec.js b/spec/frontend/serverless/store/mutations_spec.js
index ca3053e5c38..e2771c7e5fd 100644
--- a/spec/frontend/serverless/store/mutations_spec.js
+++ b/spec/frontend/serverless/store/mutations_spec.js
@@ -19,13 +19,13 @@ describe('ServerlessMutations', () => {
expect(state.isLoading).toEqual(false);
expect(state.hasFunctionData).toEqual(true);
- expect(state.functions).toEqual(mockServerlessFunctions);
+ expect(state.functions).toEqual(mockServerlessFunctions.functions);
});
it('should ensure loading has stopped and hasFunctionData is false when there are no functions available', () => {
const state = {};
- mutations[types.RECEIVE_FUNCTIONS_NODATA_SUCCESS](state);
+ mutations[types.RECEIVE_FUNCTIONS_NODATA_SUCCESS](state, { knative_installed: true });
expect(state.isLoading).toEqual(false);
expect(state.hasFunctionData).toEqual(false);
diff --git a/spec/helpers/emails_helper_spec.rb b/spec/helpers/emails_helper_spec.rb
index 0434af25866..e6aacb5b92b 100644
--- a/spec/helpers/emails_helper_spec.rb
+++ b/spec/helpers/emails_helper_spec.rb
@@ -8,19 +8,19 @@ describe EmailsHelper do
context "and format is text" do
it "returns plain text" do
- expect(closure_reason_text(merge_request, format: :text)).to eq(" via merge request #{merge_request.to_reference} (#{merge_request_presenter.web_url})")
+ expect(closure_reason_text(merge_request, format: :text)).to eq("via merge request #{merge_request.to_reference} (#{merge_request_presenter.web_url})")
end
end
context "and format is HTML" do
it "returns HTML" do
- expect(closure_reason_text(merge_request, format: :html)).to eq(" via merge request #{link_to(merge_request.to_reference, merge_request_presenter.web_url)}")
+ expect(closure_reason_text(merge_request, format: :html)).to eq("via merge request #{link_to(merge_request.to_reference, merge_request_presenter.web_url)}")
end
end
context "and format is unknown" do
it "returns plain text" do
- expect(closure_reason_text(merge_request, format: :text)).to eq(" via merge request #{merge_request.to_reference} (#{merge_request_presenter.web_url})")
+ expect(closure_reason_text(merge_request, format: :text)).to eq("via merge request #{merge_request.to_reference} (#{merge_request_presenter.web_url})")
end
end
end
@@ -29,7 +29,7 @@ describe EmailsHelper do
let(:closed_via) { "5a0eb6fd7e0f133044378c662fcbbc0d0c16dbfa" }
it "returns plain text" do
- expect(closure_reason_text(closed_via)).to eq(" via #{closed_via}")
+ expect(closure_reason_text(closed_via)).to eq("via #{closed_via}")
end
end
diff --git a/spec/javascripts/boards/boards_store_spec.js b/spec/javascripts/boards/boards_store_spec.js
index d3e6eb78e5a..9b125593869 100644
--- a/spec/javascripts/boards/boards_store_spec.js
+++ b/spec/javascripts/boards/boards_store_spec.js
@@ -287,4 +287,16 @@ describe('Store', () => {
expect(boardsStore.detail.issue).toEqual({});
});
});
+
+ describe('startMoving', () => {
+ it('stores list and issue', () => {
+ const dummyIssue = 'some issue';
+ const dummyList = 'some list';
+
+ boardsStore.startMoving(dummyList, dummyIssue);
+
+ expect(boardsStore.moving.issue).toEqual(dummyIssue);
+ expect(boardsStore.moving.list).toEqual(dummyList);
+ });
+ });
});
diff --git a/spec/javascripts/helpers/vue_test_utils_helper.js b/spec/javascripts/helpers/vue_test_utils_helper.js
index 19e27388eeb..121e99c9783 100644
--- a/spec/javascripts/helpers/vue_test_utils_helper.js
+++ b/spec/javascripts/helpers/vue_test_utils_helper.js
@@ -16,4 +16,6 @@ const vNodeContainsText = (vnode, text) =>
* @param {String} text
*/
export const shallowWrapperContainsSlotText = (shallowWrapper, slotName, text) =>
- !!shallowWrapper.vm.$slots[slotName].filter(vnode => vNodeContainsText(vnode, text)).length;
+ Boolean(
+ shallowWrapper.vm.$slots[slotName].filter(vnode => vNodeContainsText(vnode, text)).length,
+ );
diff --git a/spec/javascripts/matchers.js b/spec/javascripts/matchers.js
index 406527b08a3..7d1921cabcf 100644
--- a/spec/javascripts/matchers.js
+++ b/spec/javascripts/matchers.js
@@ -28,7 +28,7 @@ export default {
reference.getAttribute('xlink:href').endsWith(`#${iconName}`),
);
const result = {
- pass: !!matchingIcon,
+ pass: Boolean(matchingIcon),
};
if (result.pass) {
diff --git a/spec/lib/gitlab/metrics/samplers/puma_sampler_spec.rb b/spec/lib/gitlab/metrics/samplers/puma_sampler_spec.rb
index c471c30a194..f4a6e1fc7d9 100644
--- a/spec/lib/gitlab/metrics/samplers/puma_sampler_spec.rb
+++ b/spec/lib/gitlab/metrics/samplers/puma_sampler_spec.rb
@@ -61,6 +61,33 @@ describe Gitlab::Metrics::Samplers::PumaSampler do
end
end
+ context 'with empty worker stats' do
+ let(:puma_stats) do
+ <<~EOS
+ {
+ "workers": 2,
+ "phase": 2,
+ "booted_workers": 2,
+ "old_workers": 0,
+ "worker_status": [{
+ "pid": 32534,
+ "index": 0,
+ "phase": 1,
+ "booted": true,
+ "last_checkin": "2019-05-15T07:57:55Z",
+ "last_status": {}
+ }]
+ }
+ EOS
+ end
+
+ it 'does not log worker stats' do
+ expect(subject).not_to receive(:set_worker_metrics)
+
+ subject.sample
+ end
+ end
+
context 'in single mode' do
let(:puma_stats) do
<<~EOS
diff --git a/spec/models/ci/pipeline_schedule_spec.rb b/spec/models/ci/pipeline_schedule_spec.rb
index 42d4769a921..6382be73ea7 100644
--- a/spec/models/ci/pipeline_schedule_spec.rb
+++ b/spec/models/ci/pipeline_schedule_spec.rb
@@ -48,32 +48,116 @@ describe Ci::PipelineSchedule do
end
end
+ describe '.runnable_schedules' do
+ subject { described_class.runnable_schedules }
+
+ let!(:pipeline_schedule) do
+ Timecop.freeze(1.day.ago) do
+ create(:ci_pipeline_schedule, :hourly)
+ end
+ end
+
+ it 'returns the runnable schedule' do
+ is_expected.to eq([pipeline_schedule])
+ end
+
+ context 'when there are no runnable schedules' do
+ let!(:pipeline_schedule) { }
+
+ it 'returns an empty array' do
+ is_expected.to be_empty
+ end
+ end
+ end
+
+ describe '.preloaded' do
+ subject { described_class.preloaded }
+
+ before do
+ create_list(:ci_pipeline_schedule, 3)
+ end
+
+ it 'preloads the associations' do
+ subject
+
+ query = ActiveRecord::QueryRecorder.new { subject.each(&:project) }
+
+ expect(query.count).to eq(2)
+ end
+ end
+
describe '#set_next_run_at' do
- let!(:pipeline_schedule) { create(:ci_pipeline_schedule, :nightly) }
+ let(:pipeline_schedule) { create(:ci_pipeline_schedule, :nightly) }
+ let(:ideal_next_run_at) { pipeline_schedule.send(:ideal_next_run_at) }
+
+ let(:expected_next_run_at) do
+ Gitlab::Ci::CronParser.new(Settings.cron_jobs['pipeline_schedule_worker']['cron'], Time.zone.name)
+ .next_time_from(ideal_next_run_at)
+ end
+
+ let(:cron_worker_next_run_at) do
+ Gitlab::Ci::CronParser.new(Settings.cron_jobs['pipeline_schedule_worker']['cron'], Time.zone.name)
+ .next_time_from(Time.now)
+ end
context 'when creates new pipeline schedule' do
- let(:expected_next_run_at) do
- Gitlab::Ci::CronParser.new(pipeline_schedule.cron, pipeline_schedule.cron_timezone)
- .next_time_from(Time.now)
+ it 'updates next_run_at automatically' do
+ expect(pipeline_schedule.next_run_at).to eq(expected_next_run_at)
end
+ end
- it 'updates next_run_at automatically' do
- expect(described_class.last.next_run_at).to eq(expected_next_run_at)
+ context 'when PipelineScheduleWorker runs at a specific interval' do
+ before do
+ allow(Settings).to receive(:cron_jobs) do
+ {
+ 'pipeline_schedule_worker' => {
+ 'cron' => '0 1 2 3 *'
+ }
+ }
+ end
+ end
+
+ it "updates next_run_at to the sidekiq worker's execution time" do
+ expect(pipeline_schedule.next_run_at.min).to eq(0)
+ expect(pipeline_schedule.next_run_at.hour).to eq(1)
+ expect(pipeline_schedule.next_run_at.day).to eq(2)
+ expect(pipeline_schedule.next_run_at.month).to eq(3)
end
end
- context 'when updates cron of exsisted pipeline schedule' do
- let(:new_cron) { '0 0 1 1 *' }
+ context 'when pipeline schedule runs every minute' do
+ let(:pipeline_schedule) { create(:ci_pipeline_schedule, :every_minute) }
- let(:expected_next_run_at) do
- Gitlab::Ci::CronParser.new(new_cron, pipeline_schedule.cron_timezone)
- .next_time_from(Time.now)
+ it "updates next_run_at to the sidekiq worker's execution time" do
+ expect(pipeline_schedule.next_run_at).to eq(cron_worker_next_run_at)
+ end
+ end
+
+ context 'when there are two different pipeline schedules in different time zones' do
+ let(:pipeline_schedule_1) { create(:ci_pipeline_schedule, :weekly, cron_timezone: 'Eastern Time (US & Canada)') }
+ let(:pipeline_schedule_2) { create(:ci_pipeline_schedule, :weekly, cron_timezone: 'UTC') }
+
+ it 'sets different next_run_at' do
+ expect(pipeline_schedule_1.next_run_at).not_to eq(pipeline_schedule_2.next_run_at)
+ end
+ end
+
+ context 'when there are two different pipeline schedules in the same time zones' do
+ let(:pipeline_schedule_1) { create(:ci_pipeline_schedule, :weekly, cron_timezone: 'UTC') }
+ let(:pipeline_schedule_2) { create(:ci_pipeline_schedule, :weekly, cron_timezone: 'UTC') }
+
+ it 'sets the sames next_run_at' do
+ expect(pipeline_schedule_1.next_run_at).to eq(pipeline_schedule_2.next_run_at)
end
+ end
+
+ context 'when updates cron of exsisted pipeline schedule' do
+ let(:new_cron) { '0 0 1 1 *' }
it 'updates next_run_at automatically' do
pipeline_schedule.update!(cron: new_cron)
- expect(described_class.last.next_run_at).to eq(expected_next_run_at)
+ expect(pipeline_schedule.next_run_at).to eq(expected_next_run_at)
end
end
end
@@ -83,10 +167,11 @@ describe Ci::PipelineSchedule do
context 'when reschedules after 10 days from now' do
let(:future_time) { 10.days.from_now }
+ let(:ideal_next_run_at) { pipeline_schedule.send(:ideal_next_run_at) }
let(:expected_next_run_at) do
- Gitlab::Ci::CronParser.new(pipeline_schedule.cron, pipeline_schedule.cron_timezone)
- .next_time_from(future_time)
+ Gitlab::Ci::CronParser.new(Settings.cron_jobs['pipeline_schedule_worker']['cron'], Time.zone.name)
+ .next_time_from(ideal_next_run_at)
end
it 'points to proper next_run_at' do
@@ -99,38 +184,6 @@ describe Ci::PipelineSchedule do
end
end
- describe '#real_next_run' do
- subject do
- described_class.last.real_next_run(worker_cron: worker_cron,
- worker_time_zone: worker_time_zone)
- end
-
- context 'when GitLab time_zone is UTC' do
- before do
- allow(Time).to receive(:zone)
- .and_return(ActiveSupport::TimeZone[worker_time_zone])
- end
-
- let(:worker_time_zone) { 'UTC' }
-
- context 'when cron_timezone is Eastern Time (US & Canada)' do
- before do
- create(:ci_pipeline_schedule, :nightly,
- cron_timezone: 'Eastern Time (US & Canada)')
- end
-
- let(:worker_cron) { '0 1 2 3 *' }
-
- it 'returns the next time worker executes' do
- expect(subject.min).to eq(0)
- expect(subject.hour).to eq(1)
- expect(subject.day).to eq(2)
- expect(subject.month).to eq(3)
- end
- end
- end
- end
-
describe '#job_variables' do
let!(:pipeline_schedule) { create(:ci_pipeline_schedule) }
diff --git a/spec/models/ci/pipeline_spec.rb b/spec/models/ci/pipeline_spec.rb
index a0319b3eb0a..a8701f0efa4 100644
--- a/spec/models/ci/pipeline_spec.rb
+++ b/spec/models/ci/pipeline_spec.rb
@@ -1381,6 +1381,40 @@ describe Ci::Pipeline, :mailer do
end
end
+ describe 'auto merge' do
+ let(:merge_request) { create(:merge_request, :merge_when_pipeline_succeeds) }
+
+ let(:pipeline) do
+ create(:ci_pipeline, :running, project: merge_request.source_project,
+ ref: merge_request.source_branch,
+ sha: merge_request.diff_head_sha)
+ end
+
+ before do
+ merge_request.update_head_pipeline
+ end
+
+ %w[succeed! drop! cancel! skip!].each do |action|
+ context "when the pipeline recieved #{action} event" do
+ it 'performs AutoMergeProcessWorker' do
+ expect(AutoMergeProcessWorker).to receive(:perform_async).with(merge_request.id)
+
+ pipeline.public_send(action)
+ end
+ end
+ end
+
+ context 'when auto merge is not enabled in the merge request' do
+ let(:merge_request) { create(:merge_request) }
+
+ it 'performs AutoMergeProcessWorker' do
+ expect(AutoMergeProcessWorker).not_to receive(:perform_async)
+
+ pipeline.succeed!
+ end
+ end
+ end
+
def create_build(name, *traits, queued_at: current, started_from: 0, **opts)
create(:ci_build, *traits,
name: name,
diff --git a/spec/models/clusters/applications/knative_spec.rb b/spec/models/clusters/applications/knative_spec.rb
index d5974f47190..b38cf96de7e 100644
--- a/spec/models/clusters/applications/knative_spec.rb
+++ b/spec/models/clusters/applications/knative_spec.rb
@@ -3,9 +3,6 @@
require 'rails_helper'
describe Clusters::Applications::Knative do
- include KubernetesHelpers
- include ReactiveCachingHelpers
-
let(:knative) { create(:clusters_applications_knative) }
include_examples 'cluster application core specs', :clusters_applications_knative
@@ -146,77 +143,4 @@ describe Clusters::Applications::Knative do
describe 'validations' do
it { is_expected.to validate_presence_of(:hostname) }
end
-
- describe '#service_pod_details' do
- let(:cluster) { create(:cluster, :project, :provided_by_gcp) }
- let(:service) { cluster.platform_kubernetes }
- let(:knative) { create(:clusters_applications_knative, cluster: cluster) }
-
- let(:namespace) do
- create(:cluster_kubernetes_namespace,
- cluster: cluster,
- cluster_project: cluster.cluster_project,
- project: cluster.cluster_project.project)
- end
-
- before do
- stub_kubeclient_discover(service.api_url)
- stub_kubeclient_knative_services
- stub_kubeclient_service_pods
- stub_reactive_cache(knative,
- {
- services: kube_response(kube_knative_services_body),
- pods: kube_response(kube_knative_pods_body(cluster.cluster_project.project.name, namespace.namespace))
- })
- synchronous_reactive_cache(knative)
- end
-
- it 'is able k8s core for pod details' do
- expect(knative.service_pod_details(namespace.namespace, cluster.cluster_project.project.name)).not_to be_nil
- end
- end
-
- describe '#services' do
- let(:cluster) { create(:cluster, :project, :provided_by_gcp) }
- let(:service) { cluster.platform_kubernetes }
- let(:knative) { create(:clusters_applications_knative, cluster: cluster) }
-
- let(:namespace) do
- create(:cluster_kubernetes_namespace,
- cluster: cluster,
- cluster_project: cluster.cluster_project,
- project: cluster.cluster_project.project)
- end
-
- subject { knative.services }
-
- before do
- stub_kubeclient_discover(service.api_url)
- stub_kubeclient_knative_services
- stub_kubeclient_service_pods
- end
-
- it 'has an unintialized cache' do
- is_expected.to be_nil
- end
-
- context 'when using synchronous reactive cache' do
- before do
- stub_reactive_cache(knative,
- {
- services: kube_response(kube_knative_services_body),
- pods: kube_response(kube_knative_pods_body(cluster.cluster_project.project.name, namespace.namespace))
- })
- synchronous_reactive_cache(knative)
- end
-
- it 'has cached services' do
- is_expected.not_to be_nil
- end
-
- it 'matches our namespace' do
- expect(knative.services_for(ns: namespace)).not_to be_nil
- end
- end
- end
end
diff --git a/spec/models/clusters/cluster_spec.rb b/spec/models/clusters/cluster_spec.rb
index 4739e62289a..f206bb41f45 100644
--- a/spec/models/clusters/cluster_spec.rb
+++ b/spec/models/clusters/cluster_spec.rb
@@ -38,6 +38,11 @@ describe Clusters::Cluster, :use_clean_rails_memory_store_caching do
it { is_expected.to respond_to :project }
+ it do
+ expect(subject.knative_services_finder(subject.project))
+ .to be_instance_of(Clusters::KnativeServicesFinder)
+ end
+
describe '.enabled' do
subject { described_class.enabled }
diff --git a/spec/services/ci/pipeline_schedule_service_spec.rb b/spec/services/ci/pipeline_schedule_service_spec.rb
new file mode 100644
index 00000000000..f2ac53cb25a
--- /dev/null
+++ b/spec/services/ci/pipeline_schedule_service_spec.rb
@@ -0,0 +1,28 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Ci::PipelineScheduleService do
+ let(:project) { create(:project) }
+ let(:user) { create(:user) }
+ let(:service) { described_class.new(project, user) }
+
+ describe '#execute' do
+ subject { service.execute(schedule) }
+
+ let(:schedule) { create(:ci_pipeline_schedule, project: project, owner: user) }
+
+ it 'schedules next run' do
+ expect(schedule).to receive(:schedule_next_run!)
+
+ subject
+ end
+
+ it 'runs RunPipelineScheduleWorker' do
+ expect(RunPipelineScheduleWorker)
+ .to receive(:perform_async).with(schedule.id, schedule.owner.id)
+
+ subject
+ end
+ end
+end
diff --git a/spec/services/merge_requests/close_service_spec.rb b/spec/services/merge_requests/close_service_spec.rb
index ffa612cf315..29b7e0f17e2 100644
--- a/spec/services/merge_requests/close_service_spec.rb
+++ b/spec/services/merge_requests/close_service_spec.rb
@@ -52,6 +52,14 @@ describe MergeRequests::CloseService do
it 'marks todos as done' do
expect(todo.reload).to be_done
end
+
+ context 'when auto merge is enabled' do
+ let(:merge_request) { create(:merge_request, :merge_when_pipeline_succeeds) }
+
+ it 'cancels the auto merge' do
+ expect(@merge_request).not_to be_auto_merge_enabled
+ end
+ end
end
it 'updates metrics' do
diff --git a/spec/support/helpers/kubernetes_helpers.rb b/spec/support/helpers/kubernetes_helpers.rb
index 78b7ae9c00c..011c4df0fe5 100644
--- a/spec/support/helpers/kubernetes_helpers.rb
+++ b/spec/support/helpers/kubernetes_helpers.rb
@@ -17,17 +17,38 @@ module KubernetesHelpers
kube_response(kube_deployments_body)
end
- def stub_kubeclient_discover(api_url)
+ def stub_kubeclient_discover_base(api_url)
WebMock.stub_request(:get, api_url + '/api/v1').to_return(kube_response(kube_v1_discovery_body))
- WebMock.stub_request(:get, api_url + '/apis/extensions/v1beta1').to_return(kube_response(kube_v1beta1_discovery_body))
- WebMock.stub_request(:get, api_url + '/apis/rbac.authorization.k8s.io/v1').to_return(kube_response(kube_v1_rbac_authorization_discovery_body))
- WebMock.stub_request(:get, api_url + '/apis/serving.knative.dev/v1alpha1').to_return(kube_response(kube_v1alpha1_serving_knative_discovery_body))
+ WebMock
+ .stub_request(:get, api_url + '/apis/extensions/v1beta1')
+ .to_return(kube_response(kube_v1beta1_discovery_body))
+ WebMock
+ .stub_request(:get, api_url + '/apis/rbac.authorization.k8s.io/v1')
+ .to_return(kube_response(kube_v1_rbac_authorization_discovery_body))
+ end
+
+ def stub_kubeclient_discover(api_url)
+ stub_kubeclient_discover_base(api_url)
+
+ WebMock
+ .stub_request(:get, api_url + '/apis/serving.knative.dev/v1alpha1')
+ .to_return(kube_response(kube_v1alpha1_serving_knative_discovery_body))
+ end
+
+ def stub_kubeclient_discover_knative_not_found(api_url)
+ stub_kubeclient_discover_base(api_url)
+
+ WebMock
+ .stub_request(:get, api_url + '/apis/serving.knative.dev/v1alpha1')
+ .to_return(status: [404, "Resource Not Found"])
end
- def stub_kubeclient_service_pods(status: nil)
+ def stub_kubeclient_service_pods(response = nil, options = {})
stub_kubeclient_discover(service.api_url)
- pods_url = service.api_url + "/api/v1/pods"
- response = { status: status } if status
+
+ namespace_path = options[:namespace].present? ? "namespaces/#{options[:namespace]}/" : ""
+
+ pods_url = service.api_url + "/api/v1/#{namespace_path}pods"
WebMock.stub_request(:get, pods_url).to_return(response || kube_pods_response)
end
@@ -56,15 +77,18 @@ module KubernetesHelpers
WebMock.stub_request(:get, deployments_url).to_return(response || kube_deployments_response)
end
- def stub_kubeclient_knative_services(**options)
+ def stub_kubeclient_knative_services(options = {})
+ namespace_path = options[:namespace].present? ? "namespaces/#{options[:namespace]}/" : ""
+
options[:name] ||= "kubetest"
- options[:namespace] ||= "default"
options[:domain] ||= "example.com"
+ options[:response] ||= kube_response(kube_knative_services_body(options))
stub_kubeclient_discover(service.api_url)
- knative_url = service.api_url + "/apis/serving.knative.dev/v1alpha1/services"
- WebMock.stub_request(:get, knative_url).to_return(kube_response(kube_knative_services_body(options)))
+ knative_url = service.api_url + "/apis/serving.knative.dev/v1alpha1/#{namespace_path}services"
+
+ WebMock.stub_request(:get, knative_url).to_return(options[:response])
end
def stub_kubeclient_get_secret(api_url, **options)
diff --git a/spec/workers/auto_merge_process_worker_spec.rb b/spec/workers/auto_merge_process_worker_spec.rb
new file mode 100644
index 00000000000..616727ce5ca
--- /dev/null
+++ b/spec/workers/auto_merge_process_worker_spec.rb
@@ -0,0 +1,31 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe AutoMergeProcessWorker do
+ describe '#perform' do
+ subject { described_class.new.perform(merge_request&.id) }
+
+ context 'when merge request is found' do
+ let(:merge_request) { create(:merge_request) }
+
+ it 'executes AutoMergeService' do
+ expect_next_instance_of(AutoMergeService) do |auto_merge|
+ expect(auto_merge).to receive(:process)
+ end
+
+ subject
+ end
+ end
+
+ context 'when merge request is not found' do
+ let(:merge_request) { nil }
+
+ it 'does not execute AutoMergeService' do
+ expect(AutoMergeService).not_to receive(:new)
+
+ subject
+ end
+ end
+ end
+end
diff --git a/spec/workers/pipeline_schedule_worker_spec.rb b/spec/workers/pipeline_schedule_worker_spec.rb
index 8c604b13297..9326db34209 100644
--- a/spec/workers/pipeline_schedule_worker_spec.rb
+++ b/spec/workers/pipeline_schedule_worker_spec.rb
@@ -41,16 +41,6 @@ describe PipelineScheduleWorker do
it_behaves_like 'successful scheduling'
- context 'when exclusive lease has already been taken by the other instance' do
- before do
- stub_exclusive_lease_taken(described_class::EXCLUSIVE_LOCK_KEY, timeout: described_class::LOCK_TIMEOUT)
- end
-
- it 'raises an error and does not start creating pipelines' do
- expect { subject }.to raise_error(Gitlab::ExclusiveLeaseHelpers::FailedToObtainLockError)
- end
- end
-
context 'when the latest commit contains [ci skip]' do
before do
allow_any_instance_of(Ci::Pipeline)
@@ -77,47 +67,19 @@ describe PipelineScheduleWorker do
stub_ci_pipeline_yaml_file(YAML.dump(rspec: { variables: 'rspec' } ))
end
- it 'creates a failed pipeline with the reason' do
- expect { subject }.to change { project.ci_pipelines.count }.by(1)
- expect(Ci::Pipeline.last).to be_config_error
- expect(Ci::Pipeline.last.yaml_errors).not_to be_nil
+ it 'does not creates a new pipeline' do
+ expect { subject }.not_to change { project.ci_pipelines.count }
end
end
end
context 'when the schedule is not runnable by the user' do
- before do
- expect(Gitlab::Sentry)
- .to receive(:track_exception)
- .with(Ci::CreatePipelineService::CreateError,
- issue_url: 'https://gitlab.com/gitlab-org/gitlab-ce/issues/41231',
- extra: { schedule_id: pipeline_schedule.id } ).once
- end
-
it 'does not deactivate the schedule' do
subject
expect(pipeline_schedule.reload.active).to be_truthy
end
- it 'increments Prometheus counter' do
- expect(Gitlab::Metrics)
- .to receive(:counter)
- .with(:pipeline_schedule_creation_failed_total, "Counter of failed attempts of pipeline schedule creation")
- .and_call_original
-
- subject
- end
-
- it 'logging a pipeline error' do
- expect(Rails.logger)
- .to receive(:error)
- .with(a_string_matching("Insufficient permissions to create a new pipeline"))
- .and_call_original
-
- subject
- end
-
it 'does not create a pipeline' do
expect { subject }.not_to change { project.ci_pipelines.count }
end
@@ -131,21 +93,6 @@ describe PipelineScheduleWorker do
before do
stub_ci_pipeline_yaml_file(nil)
project.add_maintainer(user)
-
- expect(Gitlab::Sentry)
- .to receive(:track_exception)
- .with(Ci::CreatePipelineService::CreateError,
- issue_url: 'https://gitlab.com/gitlab-org/gitlab-ce/issues/41231',
- extra: { schedule_id: pipeline_schedule.id } ).once
- end
-
- it 'logging a pipeline error' do
- expect(Rails.logger)
- .to receive(:error)
- .with(a_string_matching("Missing .gitlab-ci.yml file"))
- .and_call_original
-
- subject
end
it 'does not create a pipeline' do
diff --git a/spec/workers/pipeline_success_worker_spec.rb b/spec/workers/pipeline_success_worker_spec.rb
deleted file mode 100644
index b511edfa620..00000000000
--- a/spec/workers/pipeline_success_worker_spec.rb
+++ /dev/null
@@ -1,27 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-describe PipelineSuccessWorker do
- describe '#perform' do
- context 'when pipeline exists' do
- let(:pipeline) { create(:ci_pipeline, status: 'success', ref: merge_request.source_branch, project: merge_request.source_project) }
- let(:merge_request) { create(:merge_request) }
-
- it 'performs "merge when pipeline succeeds"' do
- expect_next_instance_of(AutoMergeService) do |auto_merge|
- expect(auto_merge).to receive(:process)
- end
-
- described_class.new.perform(pipeline.id)
- end
- end
-
- context 'when pipeline does not exist' do
- it 'does not raise exception' do
- expect { described_class.new.perform(123) }
- .not_to raise_error
- end
- end
- end
-end
diff --git a/spec/workers/run_pipeline_schedule_worker_spec.rb b/spec/workers/run_pipeline_schedule_worker_spec.rb
index 690af22f4dc..7414470f8e7 100644
--- a/spec/workers/run_pipeline_schedule_worker_spec.rb
+++ b/spec/workers/run_pipeline_schedule_worker_spec.rb
@@ -32,7 +32,37 @@ describe RunPipelineScheduleWorker do
it 'calls the Service' do
expect(Ci::CreatePipelineService).to receive(:new).with(project, user, ref: pipeline_schedule.ref).and_return(create_pipeline_service)
- expect(create_pipeline_service).to receive(:execute).with(:schedule, ignore_skip_ci: true, save_on_errors: false, schedule: pipeline_schedule)
+ expect(create_pipeline_service).to receive(:execute!).with(:schedule, ignore_skip_ci: true, save_on_errors: false, schedule: pipeline_schedule)
+
+ worker.perform(pipeline_schedule.id, user.id)
+ end
+ end
+
+ context 'when database statement timeout happens' do
+ before do
+ allow(Ci::CreatePipelineService).to receive(:new) { raise ActiveRecord::StatementInvalid }
+
+ expect(Gitlab::Sentry)
+ .to receive(:track_exception)
+ .with(ActiveRecord::StatementInvalid,
+ issue_url: 'https://gitlab.com/gitlab-org/gitlab-ce/issues/41231',
+ extra: { schedule_id: pipeline_schedule.id } ).once
+ end
+
+ it 'increments Prometheus counter' do
+ expect(Gitlab::Metrics)
+ .to receive(:counter)
+ .with(:pipeline_schedule_creation_failed_total, "Counter of failed attempts of pipeline schedule creation")
+ .and_call_original
+
+ worker.perform(pipeline_schedule.id, user.id)
+ end
+
+ it 'logging a pipeline error' do
+ expect(Rails.logger)
+ .to receive(:error)
+ .with(a_string_matching('ActiveRecord::StatementInvalid'))
+ .and_call_original
worker.perform(pipeline_schedule.id, user.id)
end