diff options
| author | GitLab Bot <gitlab-bot@gitlab.com> | 2023-01-23 09:08:21 +0000 |
|---|---|---|
| committer | GitLab Bot <gitlab-bot@gitlab.com> | 2023-01-23 09:08:21 +0000 |
| commit | 5b1258ee90fb29779d6c9da3f488ebff61e243a3 (patch) | |
| tree | ff63f9b2f31759d2f20126219997c7230b5b822f | |
| parent | 9cc33a92d0d4e79d7ca4a1e7b4400fbbdda33933 (diff) | |
| download | gitlab-ce-5b1258ee90fb29779d6c9da3f488ebff61e243a3.tar.gz | |
Add latest changes from gitlab-org/gitlab@master
36 files changed, 190 insertions, 54 deletions
@@ -150,7 +150,13 @@ gem 'fog-google', '~> 1.19', require: 'fog/google' gem 'fog-local', '~> 0.8' gem 'fog-openstack', '~> 1.0' gem 'fog-rackspace', '~> 0.1.1' -gem 'fog-aliyun', '~> 0.3' +# NOTE: +# the fog-aliyun gem since v0.4 pulls in aliyun-sdk transitively, which monkey-patches +# the rest-client gem to drop the Content-Length header field for chunked transfers, +# which may have knock-on effects on other features using `RestClient`. +# We may want to update this dependency if this is ever addressed upstream, e.g. via +# https://github.com/aliyun/aliyun-oss-ruby-sdk/pull/93 +gem 'fog-aliyun', '~> 0.4' gem 'gitlab-fog-azure-rm', '~> 1.4.0', require: 'fog/azurerm' # for Google storage diff --git a/Gemfile.checksum b/Gemfile.checksum index 0b00383ccef..0b949733701 100644 --- a/Gemfile.checksum +++ b/Gemfile.checksum @@ -18,6 +18,7 @@ {"name":"addressable","version":"2.8.1","platform":"ruby","checksum":"bc724a176ef02118c8a3ed6b5c04c39cf59209607ffcce77b91d0261dbadedfa"}, {"name":"aes_key_wrap","version":"1.1.0","platform":"ruby","checksum":"b935f4756b37375895db45669e79dfcdc0f7901e12d4e08974d5540c8e0776a5"}, {"name":"akismet","version":"3.0.0","platform":"ruby","checksum":"74991b8e3d3257eeea996b47069abb8da2006c84a144255123e8dffd1c86b230"}, +{"name":"aliyun-sdk","version":"0.8.0","platform":"ruby","checksum":"65915d3f9b528082253d1f9ad0e4d13d6b552933fe49251c68c6915cd4d75b9d"}, {"name":"android_key_attestation","version":"0.3.0","platform":"ruby","checksum":"467eb01a99d2bb48ef9cf24cc13712669d7056cba5a52d009554ff037560570b"}, {"name":"apollo_upload_server","version":"2.1.0","platform":"ruby","checksum":"e5f3c9dda0c2ca775d007072742b98d517dfd91a667111fedbcdc94dfabd904e"}, {"name":"app_store_connect","version":"0.29.0","platform":"ruby","checksum":"01d7a923825a4221892099acb5a72f86f6ee7d8aa95815d3c459ba6816ea430f"}, @@ -178,7 +179,7 @@ {"name":"flipper","version":"0.25.0","platform":"ruby","checksum":"ccb2776752b8378bc994c9d873ccde290c090341940761b873494695ee697add"}, {"name":"flipper-active_record","version":"0.25.0","platform":"ruby","checksum":"85a5c99465e2cc6a09e91931a9998b0dbd463cd6c80dd513129377132e3eb67f"}, {"name":"flipper-active_support_cache_store","version":"0.25.0","platform":"ruby","checksum":"7282bf994b08d1a076b65c6f3b51e3dc04fcb00fa6e7b20089e60db25c7b531b"}, -{"name":"fog-aliyun","version":"0.3.3","platform":"ruby","checksum":"d0aa317f7c1473a1d684fff51699f216bb9cb78b9ee9ce55a81c9bcc93fb85ee"}, +{"name":"fog-aliyun","version":"0.4.0","platform":"ruby","checksum":"8f2334604beb781eafbb9cd5f50141fbb2c7eb77c7f2b01f45c2e04db0e5cc38"}, {"name":"fog-aws","version":"3.15.0","platform":"ruby","checksum":"09752931ea0c6165b018e1a89253248d86b246645086ccf19bc44fabe3381e8c"}, {"name":"fog-core","version":"2.1.0","platform":"ruby","checksum":"53e5d793554d7080d015ef13cd44b54027e421d924d9dba4ce3d83f95f37eda9"}, {"name":"fog-google","version":"1.19.0","platform":"ruby","checksum":"3c909a230837fe84117fffdfd927b523821b88f61d3aeab531e1417a9810f488"}, diff --git a/Gemfile.lock b/Gemfile.lock index 2298cb8bd83..832102ce337 100644 --- a/Gemfile.lock +++ b/Gemfile.lock @@ -168,6 +168,9 @@ GEM public_suffix (>= 2.0.2, < 6.0) aes_key_wrap (1.1.0) akismet (3.0.0) + aliyun-sdk (0.8.0) + nokogiri (~> 1.6) + rest-client (~> 2.0) android_key_attestation (0.3.0) apollo_upload_server (2.1.0) actionpack (>= 4.2) @@ -494,7 +497,9 @@ GEM flipper-active_support_cache_store (0.25.0) activesupport (>= 4.2, < 8) flipper (~> 0.25.0) - fog-aliyun (0.3.3) + fog-aliyun (0.4.0) + addressable (~> 2.8.0) + aliyun-sdk (~> 0.8.0) fog-core fog-json ipaddress (~> 0.8) @@ -1652,7 +1657,7 @@ DEPENDENCIES flipper (~> 0.25.0) flipper-active_record (~> 0.25.0) flipper-active_support_cache_store (~> 0.25.0) - fog-aliyun (~> 0.3) + fog-aliyun (~> 0.4) fog-aws (~> 3.15) fog-core (= 2.1.0) fog-google (~> 1.19) diff --git a/app/helpers/projects_helper.rb b/app/helpers/projects_helper.rb index 507e05b9967..7fd14363377 100644 --- a/app/helpers/projects_helper.rb +++ b/app/helpers/projects_helper.rb @@ -123,6 +123,25 @@ module ProjectsHelper end end + def vue_fork_divergence_data(project, ref) + source_project = visible_fork_source(project) + + return {} unless source_project + + source_default_branch = source_project.default_branch + + { + source_name: source_project.full_name, + source_path: project_path(source_project), + ahead_compare_path: project_compare_path( + project, from: source_default_branch, to: ref, from_project_id: source_project.id + ), + behind_compare_path: project_compare_path( + source_project, from: ref, to: source_default_branch, from_project_id: project.id + ) + } + end + def remove_fork_project_warning_message(project) _("You are going to remove the fork relationship from %{project_full_name}. Are you ABSOLUTELY sure?") % { project_full_name: project.full_name } diff --git a/app/services/ci/pipeline_schedule_service.rb b/app/services/ci/pipeline_schedule_service.rb index d320382d19f..e9fdadedcc3 100644 --- a/app/services/ci/pipeline_schedule_service.rb +++ b/app/services/ci/pipeline_schedule_service.rb @@ -8,7 +8,7 @@ module Ci # Ensure `next_run_at` is set properly before creating a pipeline. # Otherwise, multiple pipelines could be created in a short interval. schedule.schedule_next_run! - RunPipelineScheduleWorker.perform_async(schedule.id, current_user&.id) + RunPipelineScheduleWorker.perform_async(schedule.id, current_user&.id, next_run_scheduled: true) end end end diff --git a/app/views/admin/applications/index.html.haml b/app/views/admin/applications/index.html.haml index a92bad5e601..d6a0974d10f 100644 --- a/app/views/admin/applications/index.html.haml +++ b/app/views/admin/applications/index.html.haml @@ -43,7 +43,9 @@ %td= application.redirect_uri %td= application.trusted? ? _('Yes'): _('No') %td= application.confidential? ? _('Yes'): _('No') - %td= link_to 'Edit', edit_admin_application_path(application), class: 'gl-button btn btn-link' + %td + = render Pajamas::ButtonComponent.new(href: edit_admin_application_path(application), variant: :link) do + = _('Edit') %td= render 'delete_form', application: application = paginate @applications, theme: 'gitlab' diff --git a/app/views/admin/labels/index.html.haml b/app/views/admin/labels/index.html.haml index d6f2898a383..8d6df064c3c 100644 --- a/app/views/admin/labels/index.html.haml +++ b/app/views/admin/labels/index.html.haml @@ -1,7 +1,9 @@ - page_title _("Labels") %div - = link_to new_admin_label_path, class: "float-right btn gl-button btn-confirm" do + = render Pajamas::ButtonComponent.new(variant: :confirm, + href: new_admin_label_path, + button_options: { class: 'float-right' }) do = _('New label') %h1.page-title.gl-font-size-h-display = _('Labels') diff --git a/app/views/projects/_files.html.haml b/app/views/projects/_files.html.haml index e4eed63f45a..bd732f7b016 100644 --- a/app/views/projects/_files.html.haml +++ b/app/views/projects/_files.html.haml @@ -4,7 +4,6 @@ - ref = local_assigns.fetch(:ref) { current_ref } - project = local_assigns.fetch(:project) { @project } - add_page_startup_api_call logs_file_project_ref_path(@project, ref, @path, format: "json", offset: 0) -- source = visible_fork_source(@project) - if readme_path = @project.repository.readme_path - add_page_startup_api_call project_blob_path(@project, tree_join(@ref, readme_path), viewer: "rich", format: "json") @@ -18,7 +17,7 @@ = render 'projects/tree/tree_header', tree: @tree, is_project_overview: is_project_overview - if project.forked? && Feature.enabled?(:fork_divergence_counts, @project.fork_source) - #js-fork-info{ data: { source_name: source ? source.full_name : '', source_path: source ? project_path(source) : '' } } + #js-fork-info{ data: vue_fork_divergence_data(project, ref) } - if is_project_overview .project-buttons.gl-mb-5.js-show-on-project-root{ data: { qa_selector: 'project_buttons' } } diff --git a/app/workers/run_pipeline_schedule_worker.rb b/app/workers/run_pipeline_schedule_worker.rb index db82cf3af91..0e83c0691b4 100644 --- a/app/workers/run_pipeline_schedule_worker.rb +++ b/app/workers/run_pipeline_schedule_worker.rb @@ -19,7 +19,7 @@ class RunPipelineScheduleWorker # rubocop:disable Scalability/IdempotentWorker return unless schedule && schedule.project && user - if Feature.enabled?(:ci_use_run_pipeline_schedule_worker) + if Feature.enabled?(:ci_use_run_pipeline_schedule_worker) && !options[:next_run_scheduled] return if schedule.next_run_at > Time.current update_next_run_at_for(schedule) diff --git a/doc/user/analytics/value_stream_analytics.md b/doc/user/analytics/value_stream_analytics.md index 0906f7d17a7..093266e8aee 100644 --- a/doc/user/analytics/value_stream_analytics.md +++ b/doc/user/analytics/value_stream_analytics.md @@ -214,3 +214,33 @@ as every merge request should be tested. stream analytics dashboard shows the calculated median elapsed time for these issues. - Value stream analytics identifies production environments based on the [deployment tier of environments](../../ci/environments/index.md#deployment-tier-of-environments). + +## Troubleshooting + +### 100% CPU utilization by Sidekiq `cronjob:analytics_cycle_analytics` + +It is possible that Value stream analytics background jobs +strongly impact performance by monopolizing CPU resources. + +To recover from this situation: + +1. Disable the feature for all projects in [the Rails console](../../administration/operations/rails_console.md), + and remove existing jobs: + + ```ruby + Project.find_each do |p| + p.analytics_access_level='disabled'; + p.save! + end + + Analytics::CycleAnalytics::GroupStage.delete_all + Analytics::CycleAnalytics::Aggregation.delete_all + ``` + +1. Configure a [Sidekiq routing](../../administration/sidekiq/processing_specific_job_classes.md) + with for example a single `feature_category=value_stream_management` + and multiple `feature_category!=value_stream_management` entries. + Find other relevant queue metadata in the + [Enterprise Edition list](../../administration/sidekiq/processing_specific_job_classes.md#list-of-available-job-classes). +1. Enable value stream analytics for one project after another. + You might need to tweak the Sidekiq routing further according to your performance requirements. diff --git a/doc/user/group/subgroups/index.md b/doc/user/group/subgroups/index.md index f8d3456648d..9be1027ae2f 100644 --- a/doc/user/group/subgroups/index.md +++ b/doc/user/group/subgroups/index.md @@ -11,7 +11,7 @@ info: To determine the technical writer assigned to the Stage/Group associated w You can organize GitLab [groups](../index.md) into subgroups. You can use subgroups to: - Separate internal and external organizations. Because every subgroup can have its own - [visibility level](../../../development/permissions.md#general-permissions), you can host groups for different + [visibility level](../../public_access.md), you can host groups for different purposes under the same parent group. - Organize large projects. You can use subgroups to give different access to parts of the source code. diff --git a/doc/user/group/value_stream_analytics/index.md b/doc/user/group/value_stream_analytics/index.md index 8635b4567ef..14f2a1c4057 100644 --- a/doc/user/group/value_stream_analytics/index.md +++ b/doc/user/group/value_stream_analytics/index.md @@ -367,3 +367,7 @@ To view tasks by type: and select **Issues** or **Merge Requests**. 1. To add or remove labels, select the **Settings** (**{settings}**) dropdown list and select or search for a label. By default the top group-level labels (maximum 10) are selected. You can select a maximum of 15 labels. + +## Troubleshooting + +See [Value stream analytics for projects](../../analytics/value_stream_analytics.md#troubleshooting). diff --git a/doc/user/project/integrations/webhooks.md b/doc/user/project/integrations/webhooks.md index 3d45e947c4c..cc4682f2d9d 100644 --- a/doc/user/project/integrations/webhooks.md +++ b/doc/user/project/integrations/webhooks.md @@ -258,15 +258,17 @@ For more information about supported events for Webhooks, go to [Webhook events] ## Delivery headers -> `X-Gitlab-Instance` header [introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/31333) in GitLab 15.5. +> - `X-Gitlab-Event-UUID` header [introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/329743) in GitLab 14.8. +> - `X-Gitlab-Instance` header [introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/31333) in GitLab 15.5. Webhook requests to your endpoint include the following headers: | Header | Description | Example | | ------ | ------ | ------ | | `User-Agent` | In the format `"Gitlab/<VERSION>"`. | `"GitLab/15.5.0-pre"` | -| `X-Gitlab-Event` | Name of the webhook type. Corresponds to [event types](webhook_events.md) but in the format `"<EVENT> Hook"`. | `"Push Hook"` | | `X-Gitlab-Instance` | Hostname of the GitLab instance that sent the webhook. | `"https://gitlab.com"` | +| `X-Gitlab-Event` | Name of the webhook type. Corresponds to [event types](webhook_events.md) but in the format `"<EVENT> Hook"`. | `"Push Hook"` | +| `X-Gitlab-Event-UUID` | Unique ID per webhook that is not recursive. A hook is recursive if triggered by an earlier webhook that hit the GitLab instance. Recursive webhooks have the same value for this header. | `"13792a34-cac6-4fda-95a8-c58e00a3954e"` | ## Troubleshoot webhooks diff --git a/doc/user/project/push_options.md b/doc/user/project/push_options.md index 9e5413b020e..796280d0f1e 100644 --- a/doc/user/project/push_options.md +++ b/doc/user/project/push_options.md @@ -36,7 +36,7 @@ You can use push options to skip a CI/CD pipeline, or pass CI/CD variables. | Push option | Description | Introduced in version | | ------------------------------ | ------------------------------------------------------------------------------------------- |---------------------- | | `ci.skip` | Do not create a CI pipeline for the latest push. Only skips branch pipelines and not [merge request pipelines](../../ci/pipelines/merge_request_pipelines.md). | [11.7](https://gitlab.com/gitlab-org/gitlab-foss/-/merge_requests/15643) | -| `ci.variable="<name>=<value>"` | Provide [CI/CD variables](../../ci/variables/index.md) to be used in a CI pipeline, if one is created due to the push. | [12.6](https://gitlab.com/gitlab-org/gitlab/-/issues/27983) | +| `ci.variable="<name>=<value>"` | Provide [CI/CD variables](../../ci/variables/index.md) to be used in a CI pipeline, if one is created due to the push. Only passes variables to branch pipelines and not [merge request pipelines](../../ci/pipelines/merge_request_pipelines.md). | [12.6](https://gitlab.com/gitlab-org/gitlab/-/issues/27983) | An example of using `ci.skip`: diff --git a/lib/gitlab/email/message/in_product_marketing/create.rb b/lib/gitlab/email/message/in_product_marketing/create.rb index 6b01c83b8e7..68f9a9a21c9 100644 --- a/lib/gitlab/email/message/in_product_marketing/create.rb +++ b/lib/gitlab/email/message/in_product_marketing/create.rb @@ -68,7 +68,7 @@ module Gitlab private def project_link - link(s_('InProductMarketing|create a project'), help_page_url('gitlab-basics/create-project')) + link(s_('InProductMarketing|create a project'), help_page_url('user/project/index')) end def repo_link @@ -76,7 +76,7 @@ module Gitlab end def github_link - link(s_('InProductMarketing|GitHub Enterprise projects to GitLab'), help_page_url('integration/github')) + link(s_('InProductMarketing|GitHub Enterprise projects to GitLab'), help_page_url('user/project/import/github')) end def bitbucket_link @@ -84,11 +84,11 @@ module Gitlab end def mirroring_link - link(s_('InProductMarketing|repository mirroring'), help_page_url('user/project/repository/repository_mirroring')) + link(s_('InProductMarketing|repository mirroring'), help_page_url('user/project/repository/mirror/index')) end def basics_link - link(s_('InProductMarketing|Git basics'), help_page_url('gitlab-basics/index')) + link(s_('InProductMarketing|Git basics'), help_page_url('topics/git/index')) end def import_link diff --git a/spec/features/admin/admin_projects_spec.rb b/spec/features/admin/admin_projects_spec.rb index 3c7eba2cc97..c0a6bd7255e 100644 --- a/spec/features/admin/admin_projects_spec.rb +++ b/spec/features/admin/admin_projects_spec.rb @@ -114,7 +114,8 @@ RSpec.describe "Admin::Projects", feature_category: :projects do end end - it 'transfers project to group web', :js do + it 'transfers project to group web', :js, + quarantine: 'https://gitlab.com/gitlab-org/quality/engineering-productivity/q/-/issues/668' do visit admin_project_path(project) click_button 'Search for Namespace' diff --git a/spec/frontend/clusters_list/store/actions_spec.js b/spec/frontend/clusters_list/store/actions_spec.js index 1deebf8b75a..8e03f8105c2 100644 --- a/spec/frontend/clusters_list/store/actions_spec.js +++ b/spec/frontend/clusters_list/store/actions_spec.js @@ -7,6 +7,7 @@ import * as actions from '~/clusters_list/store/actions'; import * as types from '~/clusters_list/store/mutation_types'; import { createAlert } from '~/flash'; import axios from '~/lib/utils/axios_utils'; +import { HTTP_STATUS_BAD_REQUEST } from '~/lib/utils/http_status'; import Poll from '~/lib/utils/poll'; import { apiData } from '../mock_data'; @@ -81,7 +82,7 @@ describe('Clusters store actions', () => { }); it('should show flash on API error', async () => { - mock.onGet().reply(400, 'Not Found'); + mock.onGet().reply(HTTP_STATUS_BAD_REQUEST, 'Not Found'); await testAction( actions.fetchClusters, diff --git a/spec/frontend/contributors/store/actions_spec.js b/spec/frontend/contributors/store/actions_spec.js index 865f683a91a..a4a78fc12ee 100644 --- a/spec/frontend/contributors/store/actions_spec.js +++ b/spec/frontend/contributors/store/actions_spec.js @@ -4,6 +4,7 @@ import * as actions from '~/contributors/stores/actions'; import * as types from '~/contributors/stores/mutation_types'; import { createAlert } from '~/flash'; import axios from '~/lib/utils/axios_utils'; +import { HTTP_STATUS_BAD_REQUEST } from '~/lib/utils/http_status'; jest.mock('~/flash.js'); @@ -38,7 +39,7 @@ describe('Contributors store actions', () => { }); it('should show flash on API error', async () => { - mock.onGet().reply(400, 'Not Found'); + mock.onGet().reply(HTTP_STATUS_BAD_REQUEST, 'Not Found'); await testAction( actions.fetchChartData, diff --git a/spec/frontend/diffs/store/actions_spec.js b/spec/frontend/diffs/store/actions_spec.js index 9cb7fa4fb4b..3e9d5363947 100644 --- a/spec/frontend/diffs/store/actions_spec.js +++ b/spec/frontend/diffs/store/actions_spec.js @@ -16,7 +16,11 @@ import * as treeWorkerUtils from '~/diffs/utils/tree_worker_utils'; import { createAlert } from '~/flash'; import axios from '~/lib/utils/axios_utils'; import * as commonUtils from '~/lib/utils/common_utils'; -import { HTTP_STATUS_INTERNAL_SERVER_ERROR, HTTP_STATUS_NOT_FOUND } from '~/lib/utils/http_status'; +import { + HTTP_STATUS_BAD_REQUEST, + HTTP_STATUS_INTERNAL_SERVER_ERROR, + HTTP_STATUS_NOT_FOUND, +} from '~/lib/utils/http_status'; import { mergeUrlParams } from '~/lib/utils/url_utility'; import eventHub from '~/notes/event_hub'; import { diffMetadata } from '../mock_data/diff_metadata'; @@ -261,7 +265,7 @@ describe('DiffsStoreActions', () => { }); it('should show flash on API error', async () => { - mock.onGet(endpointCoverage).reply(400); + mock.onGet(endpointCoverage).reply(HTTP_STATUS_BAD_REQUEST); await testAction(diffActions.fetchCoverageFiles, {}, { endpointCoverage }, [], []); expect(createAlert).toHaveBeenCalledTimes(1); diff --git a/spec/frontend/dropzone_input_spec.js b/spec/frontend/dropzone_input_spec.js index 0f7926ccbf9..fdd157dd09f 100644 --- a/spec/frontend/dropzone_input_spec.js +++ b/spec/frontend/dropzone_input_spec.js @@ -7,7 +7,7 @@ import { TEST_HOST } from 'spec/test_constants'; import PasteMarkdownTable from '~/behaviors/markdown/paste_markdown_table'; import dropzoneInput from '~/dropzone_input'; import axios from '~/lib/utils/axios_utils'; -import { HTTP_STATUS_OK } from '~/lib/utils/http_status'; +import { HTTP_STATUS_BAD_REQUEST, HTTP_STATUS_OK } from '~/lib/utils/http_status'; const TEST_FILE = new File([], 'somefile.jpg'); TEST_FILE.upload = {}; @@ -161,7 +161,7 @@ describe('dropzone_input', () => { ${'text/plain'} | ${TEST_ERROR_MESSAGE} `('when AJAX fails with json', ({ responseType, responseBody }) => { mock.post(TEST_UPLOAD_PATH, { - status: 400, + status: HTTP_STATUS_BAD_REQUEST, body: responseBody, headers: { 'Content-Type': responseType }, }); diff --git a/spec/frontend/error_tracking/store/actions_spec.js b/spec/frontend/error_tracking/store/actions_spec.js index 8f085282f80..6e6fc6f3351 100644 --- a/spec/frontend/error_tracking/store/actions_spec.js +++ b/spec/frontend/error_tracking/store/actions_spec.js @@ -4,6 +4,7 @@ import * as actions from '~/error_tracking/store/actions'; import * as types from '~/error_tracking/store/mutation_types'; import { createAlert } from '~/flash'; import axios from '~/lib/utils/axios_utils'; +import { HTTP_STATUS_BAD_REQUEST } from '~/lib/utils/http_status'; import { visitUrl } from '~/lib/utils/url_utility'; jest.mock('~/flash.js'); @@ -46,7 +47,7 @@ describe('Sentry common store actions', () => { }); it('should handle unsuccessful status update', async () => { - mock.onPut().reply(400, {}); + mock.onPut().reply(HTTP_STATUS_BAD_REQUEST, {}); await testAction(actions.updateStatus, params, {}, [], []); expect(visitUrl).not.toHaveBeenCalled(); expect(createAlert).toHaveBeenCalledTimes(1); diff --git a/spec/frontend/error_tracking/store/details/actions_spec.js b/spec/frontend/error_tracking/store/details/actions_spec.js index 6248bef9dcd..27ab314564f 100644 --- a/spec/frontend/error_tracking/store/details/actions_spec.js +++ b/spec/frontend/error_tracking/store/details/actions_spec.js @@ -4,7 +4,7 @@ import * as actions from '~/error_tracking/store/details/actions'; import * as types from '~/error_tracking/store/details/mutation_types'; import { createAlert } from '~/flash'; import axios from '~/lib/utils/axios_utils'; -import { HTTP_STATUS_NO_CONTENT } from '~/lib/utils/http_status'; +import { HTTP_STATUS_BAD_REQUEST, HTTP_STATUS_NO_CONTENT } from '~/lib/utils/http_status'; import Poll from '~/lib/utils/poll'; let mockedAdapter; @@ -45,7 +45,7 @@ describe('Sentry error details store actions', () => { }); it('should show flash on API error', async () => { - mockedAdapter.onGet().reply(400); + mockedAdapter.onGet().reply(HTTP_STATUS_BAD_REQUEST); await testAction( actions.startPollingStacktrace, diff --git a/spec/frontend/error_tracking_settings/store/actions_spec.js b/spec/frontend/error_tracking_settings/store/actions_spec.js index cb335bdbaf6..40f82b58fe5 100644 --- a/spec/frontend/error_tracking_settings/store/actions_spec.js +++ b/spec/frontend/error_tracking_settings/store/actions_spec.js @@ -6,7 +6,7 @@ import * as types from '~/error_tracking_settings/store/mutation_types'; import defaultState from '~/error_tracking_settings/store/state'; import axios from '~/lib/utils/axios_utils'; import { convertObjectPropsToCamelCase } from '~/lib/utils/common_utils'; -import { HTTP_STATUS_OK } from '~/lib/utils/http_status'; +import { HTTP_STATUS_BAD_REQUEST, HTTP_STATUS_OK } from '~/lib/utils/http_status'; import { refreshCurrentPage } from '~/lib/utils/url_utility'; import { projectList } from '../mock'; @@ -126,7 +126,7 @@ describe('error tracking settings actions', () => { }); it('should handle a server error', async () => { - mock.onPatch(TEST_HOST).reply(400); + mock.onPatch(TEST_HOST).reply(HTTP_STATUS_BAD_REQUEST); await testAction( actions.updateSettings, null, diff --git a/spec/frontend/groups/components/app_spec.js b/spec/frontend/groups/components/app_spec.js index 13c9f206033..fea3b547977 100644 --- a/spec/frontend/groups/components/app_spec.js +++ b/spec/frontend/groups/components/app_spec.js @@ -11,7 +11,7 @@ import eventHub from '~/groups/event_hub'; import GroupsService from '~/groups/service/groups_service'; import GroupsStore from '~/groups/store/groups_store'; import axios from '~/lib/utils/axios_utils'; -import { HTTP_STATUS_FORBIDDEN } from '~/lib/utils/http_status'; +import { HTTP_STATUS_BAD_REQUEST, HTTP_STATUS_FORBIDDEN } from '~/lib/utils/http_status'; import * as urlUtilities from '~/lib/utils/url_utility'; import setWindowLocation from 'helpers/set_window_location_helper'; @@ -113,7 +113,7 @@ describe('AppComponent', () => { }); it('should show flash error when request fails', () => { - mock.onGet('/dashboard/groups.json').reply(400); + mock.onGet('/dashboard/groups.json').reply(HTTP_STATUS_BAD_REQUEST); jest.spyOn(window, 'scrollTo').mockImplementation(() => {}); return vm.fetchGroups({}).then(() => { @@ -253,7 +253,7 @@ describe('AppComponent', () => { }); it('should set `isChildrenLoading` back to `false` if load request fails', () => { - mock.onGet('/dashboard/groups.json').reply(400); + mock.onGet('/dashboard/groups.json').reply(HTTP_STATUS_BAD_REQUEST); vm.toggleChildren(groupItem); diff --git a/spec/frontend/ide/stores/modules/terminal/actions/checks_spec.js b/spec/frontend/ide/stores/modules/terminal/actions/checks_spec.js index 09be1e333b3..bf57373dd03 100644 --- a/spec/frontend/ide/stores/modules/terminal/actions/checks_spec.js +++ b/spec/frontend/ide/stores/modules/terminal/actions/checks_spec.js @@ -11,6 +11,7 @@ import * as messages from '~/ide/stores/modules/terminal/messages'; import * as mutationTypes from '~/ide/stores/modules/terminal/mutation_types'; import axios from '~/lib/utils/axios_utils'; import { + HTTP_STATUS_BAD_REQUEST, HTTP_STATUS_FORBIDDEN, HTTP_STATUS_NOT_FOUND, HTTP_STATUS_UNPROCESSABLE_ENTITY, @@ -144,7 +145,7 @@ describe('IDE store terminal check actions', () => { }); it('when error, dispatches request and receive', () => { - mock.onPost(/.*\/ide_terminals\/check_config/).reply(400, {}); + mock.onPost(/.*\/ide_terminals\/check_config/).reply(HTTP_STATUS_BAD_REQUEST, {}); return testAction( actions.fetchConfigCheck, diff --git a/spec/frontend/ide/stores/modules/terminal/actions/session_controls_spec.js b/spec/frontend/ide/stores/modules/terminal/actions/session_controls_spec.js index 9fd5f1a38d7..3bc51e5840c 100644 --- a/spec/frontend/ide/stores/modules/terminal/actions/session_controls_spec.js +++ b/spec/frontend/ide/stores/modules/terminal/actions/session_controls_spec.js @@ -6,7 +6,11 @@ import { STARTING, PENDING, STOPPING, STOPPED } from '~/ide/stores/modules/termi import * as messages from '~/ide/stores/modules/terminal/messages'; import * as mutationTypes from '~/ide/stores/modules/terminal/mutation_types'; import axios from '~/lib/utils/axios_utils'; -import { HTTP_STATUS_NOT_FOUND, HTTP_STATUS_UNPROCESSABLE_ENTITY } from '~/lib/utils/http_status'; +import { + HTTP_STATUS_BAD_REQUEST, + HTTP_STATUS_NOT_FOUND, + HTTP_STATUS_UNPROCESSABLE_ENTITY, +} from '~/lib/utils/http_status'; jest.mock('~/flash'); @@ -126,7 +130,7 @@ describe('IDE store terminal session controls actions', () => { }); it('dispatches request and receive on error', () => { - mock.onPost(/.*\/ide_terminals/).reply(400); + mock.onPost(/.*\/ide_terminals/).reply(HTTP_STATUS_BAD_REQUEST); return testAction( actions.startSession, @@ -191,7 +195,7 @@ describe('IDE store terminal session controls actions', () => { }); it('dispatches request and receive on error', () => { - mock.onPost(TEST_SESSION.cancel_path).reply(400); + mock.onPost(TEST_SESSION.cancel_path).reply(HTTP_STATUS_BAD_REQUEST); const state = { session: { cancelPath: TEST_SESSION.cancel_path }, @@ -271,7 +275,7 @@ describe('IDE store terminal session controls actions', () => { it('dispatches request and receive on error', () => { mock .onPost(state.session.retryPath, { branch: rootState.currentBranchId, format: 'json' }) - .reply(400); + .reply(HTTP_STATUS_BAD_REQUEST); return testAction( actions.restartSession, diff --git a/spec/frontend/ide/stores/modules/terminal/actions/session_status_spec.js b/spec/frontend/ide/stores/modules/terminal/actions/session_status_spec.js index fe2328f25c2..3928f209e96 100644 --- a/spec/frontend/ide/stores/modules/terminal/actions/session_status_spec.js +++ b/spec/frontend/ide/stores/modules/terminal/actions/session_status_spec.js @@ -6,6 +6,7 @@ import { PENDING, RUNNING, STOPPING, STOPPED } from '~/ide/stores/modules/termin import * as messages from '~/ide/stores/modules/terminal/messages'; import * as mutationTypes from '~/ide/stores/modules/terminal/mutation_types'; import axios from '~/lib/utils/axios_utils'; +import { HTTP_STATUS_BAD_REQUEST } from '~/lib/utils/http_status'; jest.mock('~/flash'); @@ -157,7 +158,7 @@ describe('IDE store terminal session controls actions', () => { }); it('dispatches error on error', () => { - mock.onGet(state.session.showPath).reply(400); + mock.onGet(state.session.showPath).reply(HTTP_STATUS_BAD_REQUEST); return testAction( actions.fetchSessionStatus, diff --git a/spec/frontend/issues/related_merge_requests/store/actions_spec.js b/spec/frontend/issues/related_merge_requests/store/actions_spec.js index d3ec6c3bc9d..ecadcb1a13f 100644 --- a/spec/frontend/issues/related_merge_requests/store/actions_spec.js +++ b/spec/frontend/issues/related_merge_requests/store/actions_spec.js @@ -2,6 +2,7 @@ import MockAdapter from 'axios-mock-adapter'; import testAction from 'helpers/vuex_action_helper'; import { createAlert } from '~/flash'; import axios from '~/lib/utils/axios_utils'; +import { HTTP_STATUS_BAD_REQUEST } from '~/lib/utils/http_status'; import * as actions from '~/issues/related_merge_requests/store/actions'; import * as types from '~/issues/related_merge_requests/store/mutation_types'; @@ -86,7 +87,7 @@ describe('RelatedMergeRequest store actions', () => { describe('for a failing request', () => { it('should dispatch error action', async () => { - mock.onGet(`${state.apiEndpoint}?per_page=100`).replyOnce(400); + mock.onGet(`${state.apiEndpoint}?per_page=100`).replyOnce(HTTP_STATUS_BAD_REQUEST); await testAction( actions.fetchMergeRequests, diff --git a/spec/frontend/lib/utils/axios_startup_calls_spec.js b/spec/frontend/lib/utils/axios_startup_calls_spec.js index e12bf725560..fc5bd3b811c 100644 --- a/spec/frontend/lib/utils/axios_startup_calls_spec.js +++ b/spec/frontend/lib/utils/axios_startup_calls_spec.js @@ -1,6 +1,7 @@ import axios from 'axios'; import MockAdapter from 'axios-mock-adapter'; import setupAxiosStartupCalls from '~/lib/utils/axios_startup_calls'; +import { HTTP_STATUS_BAD_REQUEST } from '~/lib/utils/http_status'; describe('setupAxiosStartupCalls', () => { const AXIOS_RESPONSE = { text: 'AXIOS_RESPONSE' }; @@ -55,7 +56,7 @@ describe('setupAxiosStartupCalls', () => { fetchCall: mockFetchCall(200), }, '/startup-failing': { - fetchCall: mockFetchCall(400), + fetchCall: mockFetchCall(HTTP_STATUS_BAD_REQUEST), }, }; setupAxiosStartupCalls(axios); diff --git a/spec/frontend/merge_conflicts/store/actions_spec.js b/spec/frontend/merge_conflicts/store/actions_spec.js index 50eac982e20..4399436b2c5 100644 --- a/spec/frontend/merge_conflicts/store/actions_spec.js +++ b/spec/frontend/merge_conflicts/store/actions_spec.js @@ -1,6 +1,7 @@ import axios from 'axios'; import MockAdapter from 'axios-mock-adapter'; import Cookies from '~/lib/utils/cookies'; +import { HTTP_STATUS_BAD_REQUEST } from '~/lib/utils/http_status'; import { useMockLocationHelper } from 'helpers/mock_window_location_helper'; import testAction from 'helpers/vuex_action_helper'; import { createAlert } from '~/flash'; @@ -64,7 +65,7 @@ describe('merge conflicts actions', () => { }); it('when request fails', () => { - mock.onGet(conflictsPath).reply(400); + mock.onGet(conflictsPath).reply(HTTP_STATUS_BAD_REQUEST); return testAction( actions.fetchConflictsData, conflictsPath, @@ -114,7 +115,7 @@ describe('merge conflicts actions', () => { }); it('on errors shows flash', async () => { - mock.onPost(resolveConflictsPath).reply(400); + mock.onPost(resolveConflictsPath).reply(HTTP_STATUS_BAD_REQUEST); await testAction( actions.submitResolvedConflicts, resolveConflictsPath, diff --git a/spec/frontend/packages_and_registries/infrastructure_registry/components/list/stores/actions_spec.js b/spec/frontend/packages_and_registries/infrastructure_registry/components/list/stores/actions_spec.js index 36417eaf793..7152c237420 100644 --- a/spec/frontend/packages_and_registries/infrastructure_registry/components/list/stores/actions_spec.js +++ b/spec/frontend/packages_and_registries/infrastructure_registry/components/list/stores/actions_spec.js @@ -3,6 +3,7 @@ import MockAdapter from 'axios-mock-adapter'; import testAction from 'helpers/vuex_action_helper'; import Api from '~/api'; import { createAlert } from '~/flash'; +import { HTTP_STATUS_BAD_REQUEST } from '~/lib/utils/http_status'; import { MISSING_DELETE_PATH_ERROR } from '~/packages_and_registries/infrastructure_registry/list/constants'; import * as actions from '~/packages_and_registries/infrastructure_registry/list/stores/actions'; import * as types from '~/packages_and_registries/infrastructure_registry/list/stores/mutation_types'; @@ -198,7 +199,7 @@ describe('Actions Package list store', () => { }); it('should stop the loading and call create flash on api error', async () => { - mock.onDelete(payload._links.delete_api_path).replyOnce(400); + mock.onDelete(payload._links.delete_api_path).replyOnce(HTTP_STATUS_BAD_REQUEST); await testAction( actions.requestDeletePackage, payload, diff --git a/spec/frontend/single_file_diff_spec.js b/spec/frontend/single_file_diff_spec.js index 6f42ec47458..a6e532922c8 100644 --- a/spec/frontend/single_file_diff_spec.js +++ b/spec/frontend/single_file_diff_spec.js @@ -2,6 +2,7 @@ import MockAdapter from 'axios-mock-adapter'; import $ from 'jquery'; import { setHTMLFixture, resetHTMLFixture } from 'helpers/fixtures'; import axios from '~/lib/utils/axios_utils'; +import { HTTP_STATUS_BAD_REQUEST } from '~/lib/utils/http_status'; import SingleFileDiff from '~/single_file_diff'; describe('SingleFileDiff', () => { @@ -54,7 +55,7 @@ describe('SingleFileDiff', () => { expect(diff.isOpen).toBe(false); expect(diff.content).not.toBeNull(); - mock.onGet(blobDiffPath).replyOnce(400, ''); + mock.onGet(blobDiffPath).replyOnce(HTTP_STATUS_BAD_REQUEST, ''); // Opening again await diff.toggleDiff($(document.querySelector('.js-file-title'))); diff --git a/spec/helpers/projects_helper_spec.rb b/spec/helpers/projects_helper_spec.rb index 91dd4c46a74..2da9f7b1ebc 100644 --- a/spec/helpers/projects_helper_spec.rb +++ b/spec/helpers/projects_helper_spec.rb @@ -1352,4 +1352,30 @@ RSpec.describe ProjectsHelper do end end end + + describe '#vue_fork_divergence_data' do + it 'returns empty hash when fork source is not available' do + expect(helper.vue_fork_divergence_data(project, 'ref')).to eq({}) + end + + context 'when fork source is available' do + it 'returns the data related to fork divergence' do + source_project = project_with_repo + + allow(helper).to receive(:visible_fork_source).with(project).and_return(source_project) + + ahead_path = + "/#{project.full_path}/-/compare/#{source_project.default_branch}...ref?from_project_id=#{source_project.id}" + behind_path = + "/#{source_project.full_path}/-/compare/ref...#{source_project.default_branch}?from_project_id=#{project.id}" + + expect(helper.vue_fork_divergence_data(project, 'ref')).to eq({ + source_name: source_project.full_name, + source_path: project_path(source_project), + ahead_compare_path: ahead_path, + behind_compare_path: behind_path + }) + end + end + end end diff --git a/spec/requests/api/graphql/mutations/ci/pipeline_schedule_play_spec.rb b/spec/requests/api/graphql/mutations/ci/pipeline_schedule_play_spec.rb index 0e43fa024f3..3deeffa7bd5 100644 --- a/spec/requests/api/graphql/mutations/ci/pipeline_schedule_play_spec.rb +++ b/spec/requests/api/graphql/mutations/ci/pipeline_schedule_play_spec.rb @@ -68,7 +68,7 @@ RSpec.describe 'PipelineSchedulePlay', feature_category: :continuious_integratio it do expect(RunPipelineScheduleWorker) .to receive(:perform_async) - .with(pipeline_schedule.id, user.id) + .with(pipeline_schedule.id, user.id, next_run_scheduled: true) post_graphql_mutation(mutation, current_user: user) diff --git a/spec/services/ci/pipeline_schedule_service_spec.rb b/spec/services/ci/pipeline_schedule_service_spec.rb index 2f094583f1a..4f20a4389ca 100644 --- a/spec/services/ci/pipeline_schedule_service_spec.rb +++ b/spec/services/ci/pipeline_schedule_service_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Ci::PipelineScheduleService do +RSpec.describe Ci::PipelineScheduleService, feature_category: :continuous_integration do let_it_be(:user) { create(:user) } let_it_be(:project) { create(:project) } @@ -21,7 +21,7 @@ RSpec.describe Ci::PipelineScheduleService do it 'runs RunPipelineScheduleWorker' do expect(RunPipelineScheduleWorker) - .to receive(:perform_async).with(schedule.id, schedule.owner.id) + .to receive(:perform_async).with(schedule.id, schedule.owner.id, next_run_scheduled: true) subject end @@ -43,7 +43,7 @@ RSpec.describe Ci::PipelineScheduleService do it 'does not run RunPipelineScheduleWorker' do expect(RunPipelineScheduleWorker) - .not_to receive(:perform_async).with(schedule.id, schedule.owner.id) + .not_to receive(:perform_async).with(schedule.id, schedule.owner.id, next_run_scheduled: true) subject end diff --git a/spec/workers/run_pipeline_schedule_worker_spec.rb b/spec/workers/run_pipeline_schedule_worker_spec.rb index 25158de3341..b1fb3777a66 100644 --- a/spec/workers/run_pipeline_schedule_worker_spec.rb +++ b/spec/workers/run_pipeline_schedule_worker_spec.rb @@ -21,11 +21,6 @@ RSpec.describe RunPipelineScheduleWorker, feature_category: :continuous_integrat end end - it 'accepts an option' do - expect { worker.perform(pipeline_schedule.id, user.id, {}) }.not_to raise_error - expect { worker.perform(pipeline_schedule.id, user.id, {}, {}) }.to raise_error(ArgumentError) - end - context 'when a schedule not found' do it 'does not call the Service' do expect(Ci::CreatePipelineService).not_to receive(:new) @@ -94,6 +89,32 @@ RSpec.describe RunPipelineScheduleWorker, feature_category: :continuous_integrat expect { worker.perform(pipeline_schedule.id, user.id) }.not_to change { pipeline_schedule.reload.next_run_at } end end + + context 'when next_run_scheduled option is given as true' do + it "returns the service response" do + expect(worker.perform(pipeline_schedule.id, user.id, next_run_scheduled: true)).to eq(service_response) + end + + it "does not log errors" do + expect(worker).not_to receive(:log_extra_metadata_on_done) + + expect(worker.perform(pipeline_schedule.id, user.id, next_run_scheduled: true)).to eq(service_response) + end + + it "does not change the next_run_at" do + expect { worker.perform(pipeline_schedule.id, user.id, next_run_scheduled: true) }.not_to change { pipeline_schedule.reload.next_run_at } + end + + context 'when feature flag ci_use_run_pipeline_schedule_worker is disabled' do + before do + stub_feature_flags(ci_use_run_pipeline_schedule_worker: false) + end + + it 'does not change the next_run_at' do + expect { worker.perform(pipeline_schedule.id, user.id, next_run_scheduled: true) }.not_to change { pipeline_schedule.reload.next_run_at } + end + end + end end context "when pipeline was not persisted" do |
