diff options
125 files changed, 1242 insertions, 237 deletions
diff --git a/.rubocop_todo/rspec/context_wording.yml b/.rubocop_todo/rspec/context_wording.yml index 14608571f1c..0780e011d65 100644 --- a/.rubocop_todo/rspec/context_wording.yml +++ b/.rubocop_todo/rspec/context_wording.yml @@ -443,7 +443,6 @@ RSpec/ContextWording: - 'ee/spec/models/audit_events/external_audit_event_destination_spec.rb' - 'ee/spec/models/board_spec.rb' - 'ee/spec/models/boards/epic_board_position_spec.rb' - - 'ee/spec/models/ci/build_spec.rb' - 'ee/spec/models/ci/minutes/namespace_monthly_usage_spec.rb' - 'ee/spec/models/ci/minutes/project_monthly_usage_spec.rb' - 'ee/spec/models/ci/sources/project_spec.rb' diff --git a/.rubocop_todo/rspec/missing_feature_category.yml b/.rubocop_todo/rspec/missing_feature_category.yml index 21a30917e13..97647e95e3a 100644 --- a/.rubocop_todo/rspec/missing_feature_category.yml +++ b/.rubocop_todo/rspec/missing_feature_category.yml @@ -8753,7 +8753,6 @@ RSpec/MissingFeatureCategory: - 'spec/views/projects/pages_domains/show.html.haml_spec.rb' - 'spec/views/projects/pipeline_schedules/_pipeline_schedule.html.haml_spec.rb' - 'spec/views/projects/project_members/index.html.haml_spec.rb' - - 'spec/views/projects/runners/_specific_runners.html.haml_spec.rb' - 'spec/views/projects/settings/ci_cd/_autodevops_form.html.haml_spec.rb' - 'spec/views/projects/settings/integrations/edit.html.haml_spec.rb' - 'spec/views/projects/settings/merge_requests/show.html.haml_spec.rb' diff --git a/GITALY_SERVER_VERSION b/GITALY_SERVER_VERSION index 850103fba65..8ab3273840e 100644 --- a/GITALY_SERVER_VERSION +++ b/GITALY_SERVER_VERSION @@ -1 +1 @@ -d7b9dcec61e29644ffc58dd2a59756435bf58bcb +5b5abda2e69a93c5898609cd9c9aa02954c10556 diff --git a/app/assets/javascripts/related_issues/constants.js b/app/assets/javascripts/related_issues/constants.js index d1b2d41d7ae..7d462711f9e 100644 --- a/app/assets/javascripts/related_issues/constants.js +++ b/app/assets/javascripts/related_issues/constants.js @@ -45,7 +45,7 @@ export const autoCompleteTextMap = { false: { [issuableTypesMap.ISSUE]: '', [issuableTypesMap.EPIC]: '', - [issuableTypesMap.MERGE_REQUEST]: __(' or references (e.g. path/to/project!merge_request_id)'), + [issuableTypesMap.MERGE_REQUEST]: __(' or references'), }, }; diff --git a/app/assets/javascripts/repository/components/fork_info.vue b/app/assets/javascripts/repository/components/fork_info.vue index 980fa140eb5..9804837b200 100644 --- a/app/assets/javascripts/repository/components/fork_info.vue +++ b/app/assets/javascripts/repository/components/fork_info.vue @@ -1,5 +1,5 @@ <script> -import { GlIcon, GlLink, GlSkeletonLoader } from '@gitlab/ui'; +import { GlIcon, GlLink, GlSkeletonLoader, GlSprintf } from '@gitlab/ui'; import { s__, sprintf, n__ } from '~/locale'; import { createAlert } from '~/flash'; import forkDetailsQuery from '../queries/fork_details.query.graphql'; @@ -9,9 +9,9 @@ export const i18n = { inaccessibleProject: s__('ForkedFromProjectPath|Forked from an inaccessible project.'), upToDate: s__('ForksDivergence|Up to date with the upstream repository.'), unknown: s__('ForksDivergence|This fork has diverged from the upstream repository.'), - behind: s__('ForksDivergence|%{behind} %{commit_word} behind'), - ahead: s__('ForksDivergence|%{ahead} %{commit_word} ahead of'), - behindAndAhead: s__('ForksDivergence|%{messages} the upstream repository.'), + behind: s__('ForksDivergence|%{behindLinkStart}%{behind} %{commit_word} behind%{behindLinkEnd}'), + ahead: s__('ForksDivergence|%{aheadLinkStart}%{ahead} %{commit_word} ahead%{aheadLinkEnd} of'), + behindAhead: s__('ForksDivergence|%{messages} the upstream repository.'), error: s__('ForksDivergence|Failed to fetch fork details. Try again later.'), }; @@ -20,6 +20,7 @@ export default { components: { GlIcon, GlLink, + GlSprintf, GlSkeletonLoader, }, apollo: { @@ -28,7 +29,7 @@ export default { variables() { return { projectPath: this.projectPath, - ref: this.selectedRef, + ref: this.selectedBranch, }; }, skip() { @@ -48,7 +49,7 @@ export default { type: String, required: true, }, - selectedRef: { + selectedBranch: { type: String, required: true, }, @@ -62,6 +63,16 @@ export default { required: false, default: '', }, + aheadComparePath: { + type: String, + required: false, + default: '', + }, + behindComparePath: { + type: String, + required: false, + default: '', + }, }, data() { return { @@ -116,7 +127,7 @@ export default { return this.$options.i18n.unknown; } if (this.hasBehindAheadMessage) { - return sprintf(this.$options.i18n.behindAndAhead, { + return sprintf(this.$options.i18n.behindAhead, { messages: this.behindAheadMessage, }); } @@ -134,8 +145,15 @@ export default { {{ $options.i18n.forkedFrom }} <gl-link data-qa-selector="forked_from_link" :href="sourcePath">{{ sourceName }}</gl-link> <gl-skeleton-loader v-if="isLoading" :lines="1" /> - <div v-else class="gl-text-secondary"> - {{ forkDivergenceMessage }} + <div v-else class="gl-text-secondary" data-testid="divergence-message"> + <gl-sprintf :message="forkDivergenceMessage"> + <template #aheadLink="{ content }"> + <gl-link :href="aheadComparePath">{{ content }}</gl-link> + </template> + <template #behindLink="{ content }"> + <gl-link :href="behindComparePath">{{ content }}</gl-link> + </template> + </gl-sprintf> </div> </div> <div v-else data-testid="inaccessible-project" class="gl-align-items-center gl-display-flex"> diff --git a/app/assets/javascripts/repository/index.js b/app/assets/javascripts/repository/index.js index e5d22f50d72..23d857de7a0 100644 --- a/app/assets/javascripts/repository/index.js +++ b/app/assets/javascripts/repository/index.js @@ -69,7 +69,7 @@ export default function setupVueRepositoryList() { if (!forkEl) { return null; } - const { sourceName, sourcePath } = forkEl.dataset; + const { sourceName, sourcePath, aheadComparePath, behindComparePath } = forkEl.dataset; return new Vue({ el: forkEl, apolloProvider, @@ -77,9 +77,11 @@ export default function setupVueRepositoryList() { return h(ForkInfo, { props: { projectPath, - selectedRef: ref, + selectedBranch: ref, sourceName, sourcePath, + aheadComparePath, + behindComparePath, }, }); }, diff --git a/app/controllers/profiles/keys_controller.rb b/app/controllers/profiles/keys_controller.rb index d0c2dc26fa6..31c758ac763 100644 --- a/app/controllers/profiles/keys_controller.rb +++ b/app/controllers/profiles/keys_controller.rb @@ -34,6 +34,16 @@ class Profiles::KeysController < Profiles::ApplicationController end end + def revoke + @key = current_user.keys.find(params[:id]) + Keys::RevokeService.new(current_user).execute(@key) + + respond_to do |format| + format.html { redirect_to profile_keys_url, status: :found } + format.js { head :ok } + end + end + private def key_params diff --git a/app/controllers/projects/pipelines_controller.rb b/app/controllers/projects/pipelines_controller.rb index 2581bff8477..10f58a9f479 100644 --- a/app/controllers/projects/pipelines_controller.rb +++ b/app/controllers/projects/pipelines_controller.rb @@ -219,6 +219,8 @@ class Projects::PipelinesController < Projects::ApplicationController def config_variables respond_to do |format| format.json do + # Even if the parameter name is `sha`, it is actually a ref name. We always send `ref` to the endpoint. + # See: https://gitlab.com/gitlab-org/gitlab/-/issues/389065 result = Ci::ListConfigVariablesService.new(@project, current_user).execute(params[:sha]) result.nil? ? head(:no_content) : render(json: result) diff --git a/app/graphql/types/project_type.rb b/app/graphql/types/project_type.rb index fe13ee7ef3c..2f8a2d26120 100644 --- a/app/graphql/types/project_type.rb +++ b/app/graphql/types/project_type.rb @@ -612,6 +612,8 @@ module Types project.container_repositories.size end + # Even if the parameter name is `sha`, it is actually a ref name. We always send `ref` to the endpoint. + # See: https://gitlab.com/gitlab-org/gitlab/-/issues/389065 def ci_config_variables(sha:) result = ::Ci::ListConfigVariablesService.new(object, context[:current_user]).execute(sha) diff --git a/app/helpers/ssh_keys_helper.rb b/app/helpers/ssh_keys_helper.rb index 4cd40836335..13d6851f3cd 100644 --- a/app/helpers/ssh_keys_helper.rb +++ b/app/helpers/ssh_keys_helper.rb @@ -23,6 +23,28 @@ module SshKeysHelper } end + def ssh_key_revoke_modal_data(key, path) + title = _('Revoke Key') + + { + path: path, + method: 'delete', + qa_selector: 'revoke_ssh_key_button', + title: title, + aria_label: title, + modal_attributes: { + 'data-qa-selector': 'ssh_key_revoke_modal', + title: _('Are you sure you want to revoke this SSH key?'), + message: _('This action cannot be undone, and will permanently delete the %{key} SSH key. All commits signed using this SSH key will be marked as unverified.') % { key: key.title }, + okVariant: 'danger', + okTitle: _('Revoke') + }, + toggle: 'tooltip', + placement: 'top', + container: 'body' + } + end + def ssh_key_allowed_algorithms allowed_algorithms = Gitlab::CurrentSettings.allowed_key_types.flat_map do |ssh_key_type_name| Gitlab::SSHPublicKey.supported_algorithms_for_name(ssh_key_type_name) diff --git a/app/models/concerns/commit_signature.rb b/app/models/concerns/commit_signature.rb index 7f1fbbefd94..5dac3c7833a 100644 --- a/app/models/concerns/commit_signature.rb +++ b/app/models/concerns/commit_signature.rb @@ -4,6 +4,7 @@ module CommitSignature included do include ShaAttribute + include EachBatch sha_attribute :commit_sha @@ -14,7 +15,8 @@ module CommitSignature other_user: 3, unverified_key: 4, unknown_key: 5, - multiple_signatures: 6 + multiple_signatures: 6, + revoked_key: 7 } belongs_to :project, class_name: 'Project', foreign_key: 'project_id', optional: false diff --git a/app/models/key.rb b/app/models/key.rb index 1f2234129ed..596186276bb 100644 --- a/app/models/key.rb +++ b/app/models/key.rb @@ -11,6 +11,8 @@ class Key < ApplicationRecord belongs_to :user + has_many :ssh_signatures, class_name: 'CommitSignatures::SshSignature' + before_validation :generate_fingerprint validates :title, @@ -136,6 +138,10 @@ class Key < ApplicationRecord save if generate_fingerprint end + def signing? + super || auth_and_signing? + end + private def generate_fingerprint diff --git a/app/services/ci/list_config_variables_service.rb b/app/services/ci/list_config_variables_service.rb index df4963d1b33..dbea270b7c6 100644 --- a/app/services/ci/list_config_variables_service.rb +++ b/app/services/ci/list_config_variables_service.rb @@ -17,7 +17,9 @@ module Ci new(project, user) end - def execute(sha) + def execute(ref) + sha = project.commit(ref).try(:sha) + with_reactive_cache(sha) { |result| result } end diff --git a/app/services/ci/runners/stale_machines_cleanup_service.rb b/app/services/ci/runners/stale_machines_cleanup_service.rb new file mode 100644 index 00000000000..3e5706d24a6 --- /dev/null +++ b/app/services/ci/runners/stale_machines_cleanup_service.rb @@ -0,0 +1,33 @@ +# frozen_string_literal: true + +module Ci + module Runners + class StaleMachinesCleanupService + MAX_DELETIONS = 1000 + + def execute + ServiceResponse.success(payload: { + # the `stale` relationship can return duplicates, so we don't try to return a precise count here + deleted_machines: delete_stale_runner_machines > 0 + }) + end + + private + + def delete_stale_runner_machines + total_deleted_count = 0 + loop do + sub_batch_limit = [100, MAX_DELETIONS].min + + # delete_all discards part of the `stale` scope query, so we expliclitly wrap it with a SELECT as a workaround + deleted_count = Ci::RunnerMachine.id_in(Ci::RunnerMachine.stale.limit(sub_batch_limit)).delete_all + total_deleted_count += deleted_count + + break if deleted_count == 0 || total_deleted_count >= MAX_DELETIONS + end + + total_deleted_count + end + end + end +end diff --git a/app/services/keys/revoke_service.rb b/app/services/keys/revoke_service.rb new file mode 100644 index 00000000000..42ea9ab73be --- /dev/null +++ b/app/services/keys/revoke_service.rb @@ -0,0 +1,28 @@ +# frozen_string_literal: true + +module Keys + class RevokeService < ::Keys::DestroyService + def execute(key) + key.transaction do + unverify_associated_signatures(key) + + raise ActiveRecord::Rollback unless super(key) + end + end + + private + + def unverify_associated_signatures(key) + return unless Feature.enabled?(:revoke_ssh_signatures) + + key.ssh_signatures.each_batch do |batch| + batch.update_all( + verification_status: CommitSignatures::SshSignature.verification_statuses[:revoked_key], + updated_at: Time.zone.now + ) + end + end + end +end + +Keys::DestroyService.prepend_mod diff --git a/app/services/notes/destroy_service.rb b/app/services/notes/destroy_service.rb index eda8bbcbc2e..ccee94a5cea 100644 --- a/app/services/notes/destroy_service.rb +++ b/app/services/notes/destroy_service.rb @@ -10,6 +10,7 @@ module Notes clear_noteable_diffs_cache(note) track_note_removal_usage_for_issues(note) if note.for_issue? track_note_removal_usage_for_merge_requests(note) if note.for_merge_request? + track_note_removal_usage_for_design(note) if note.for_design? end private @@ -22,6 +23,13 @@ module Notes def track_note_removal_usage_for_merge_requests(note) Gitlab::UsageDataCounters::MergeRequestActivityUniqueCounter.track_remove_comment_action(note: note) end + + def track_note_removal_usage_for_design(note) + Gitlab::UsageDataCounters::IssueActivityUniqueCounter.track_issue_design_comment_removed_action( + author: note.author, + project: project + ) + end end end diff --git a/app/views/profiles/keys/_key.html.haml b/app/views/profiles/keys/_key.html.haml index 219e7c4d2fe..b4b205bc52a 100644 --- a/app/views/profiles/keys/_key.html.haml +++ b/app/views/profiles/keys/_key.html.haml @@ -28,7 +28,11 @@ %span.last-used-at.gl-mr-3 = s_('Profiles|Usage type:') = ssh_key_usage_types.invert[key.usage_type] - %span.key-created-at.gl-display-flex.gl-align-items-center - - if key.can_delete? - .gl-ml-3 - = render 'shared/ssh_keys/key_delete', icon: true, button_data: ssh_key_delete_modal_data(key, path_to_key(key, is_admin)) + .gl-display-flex.gl-float-right + - if key.can_delete? + - if key.signing? && !is_admin && Feature.enabled?(:revoke_ssh_signatures) + = render Pajamas::ButtonComponent.new(size: :small, button_options: { class: 'js-confirm-modal-button', data: ssh_key_revoke_modal_data(key, revoke_profile_key_path(key)) }) do + = _('Revoke') + .gl-pl-3 + = render Pajamas::ButtonComponent.new(size: :small, button_options: { class: 'js-confirm-modal-button', data: ssh_key_delete_modal_data(key, path_to_key(key, is_admin)) }) do + = _('Remove') diff --git a/app/views/projects/_files.html.haml b/app/views/projects/_files.html.haml index bd732f7b016..e2d1a50ae5e 100644 --- a/app/views/projects/_files.html.haml +++ b/app/views/projects/_files.html.haml @@ -17,7 +17,8 @@ = render 'projects/tree/tree_header', tree: @tree, is_project_overview: is_project_overview - if project.forked? && Feature.enabled?(:fork_divergence_counts, @project.fork_source) - #js-fork-info{ data: vue_fork_divergence_data(project, ref) } + + #js-fork-info{ data: vue_fork_divergence_data(project, ref), project_id: @project.id } - if is_project_overview .project-buttons.gl-mb-5.js-show-on-project-root{ data: { qa_selector: 'project_buttons' } } diff --git a/app/views/projects/commit/_revoked_key_signature_badge.html.haml b/app/views/projects/commit/_revoked_key_signature_badge.html.haml new file mode 100644 index 00000000000..2e0ca42561a --- /dev/null +++ b/app/views/projects/commit/_revoked_key_signature_badge.html.haml @@ -0,0 +1,5 @@ +- title = s_('CommitSignature|Unverified signature') +- description = s_('CommitSignature|This commit was signed with a key that was revoked.') +- locals = { signature: signature, title: title, description: description, label: s_('CommitSignature|Unverified'), css_class: 'invalid' } + += render partial: 'projects/commit/signature_badge', locals: locals diff --git a/app/views/shared/ssh_keys/_key_delete.html.haml b/app/views/shared/ssh_keys/_key_delete.html.haml index 4b89b2a0cbf..80cd23989a0 100644 --- a/app/views/shared/ssh_keys/_key_delete.html.haml +++ b/app/views/shared/ssh_keys/_key_delete.html.haml @@ -1,7 +1,5 @@ -- icon = local_assigns[:icon] - category = local_assigns[:category] || :primary .gl-p-2 - = render Pajamas::ButtonComponent.new(variant: :danger, category: category, icon: ('remove' if icon), button_options: { class: 'js-confirm-modal-button', data: button_data }) do - - unless icon - = _('Delete') + = render Pajamas::ButtonComponent.new(variant: :danger, category: category, button_options: { class: 'js-confirm-modal-button', data: button_data }) do + = _('Delete') diff --git a/app/workers/all_queues.yml b/app/workers/all_queues.yml index 208d878ebc5..d1078c4bf92 100644 --- a/app/workers/all_queues.yml +++ b/app/workers/all_queues.yml @@ -255,6 +255,15 @@ :weight: 1 :idempotent: true :tags: [] +- :name: cronjob:ci_runners_stale_machines_cleanup_cron + :worker_name: Ci::Runners::StaleMachinesCleanupCronWorker + :feature_category: :runner_fleet + :has_external_dependencies: false + :urgency: :low + :resource_boundary: :unknown + :weight: 1 + :idempotent: true + :tags: [] - :name: cronjob:ci_schedule_delete_objects_cron :worker_name: Ci::ScheduleDeleteObjectsCronWorker :feature_category: :continuous_integration diff --git a/app/workers/ci/runners/reconcile_existing_runner_versions_cron_worker.rb b/app/workers/ci/runners/reconcile_existing_runner_versions_cron_worker.rb index 69ab477c80a..722c513a4bb 100644 --- a/app/workers/ci/runners/reconcile_existing_runner_versions_cron_worker.rb +++ b/app/workers/ci/runners/reconcile_existing_runner_versions_cron_worker.rb @@ -30,7 +30,7 @@ module Ci end result = ::Ci::Runners::ReconcileExistingRunnerVersionsService.new.execute - result.payload.each { |key, value| log_extra_metadata_on_done(key, value) } + log_hash_metadata_on_done(result.payload) end end end diff --git a/app/workers/ci/runners/stale_machines_cleanup_cron_worker.rb b/app/workers/ci/runners/stale_machines_cleanup_cron_worker.rb new file mode 100644 index 00000000000..9a11db33fb6 --- /dev/null +++ b/app/workers/ci/runners/stale_machines_cleanup_cron_worker.rb @@ -0,0 +1,24 @@ +# frozen_string_literal: true + +module Ci + module Runners + class StaleMachinesCleanupCronWorker + include ApplicationWorker + + # This worker does not schedule other workers that require context. + include CronjobQueue # rubocop:disable Scalability/CronWorkerContext + + data_consistency :sticky + feature_category :runner_fleet + urgency :low + + idempotent! + + def perform + result = ::Ci::Runners::StaleMachinesCleanupService.new.execute + log_extra_metadata_on_done(:status, result.status) + log_hash_metadata_on_done(result.payload) + end + end + end +end diff --git a/app/workers/concerns/application_worker.rb b/app/workers/concerns/application_worker.rb index 222d045b0ba..e2e31b0a5bd 100644 --- a/app/workers/concerns/application_worker.rb +++ b/app/workers/concerns/application_worker.rb @@ -36,6 +36,11 @@ module ApplicationWorker @done_log_extra_metadata[key] = value end + def log_hash_metadata_on_done(hash) + @done_log_extra_metadata ||= {} + hash.each { |key, value| @done_log_extra_metadata[key] = value } + end + def logging_extras return {} unless @done_log_extra_metadata diff --git a/config/feature_flags/development/revoke_ssh_signatures.yml b/config/feature_flags/development/revoke_ssh_signatures.yml new file mode 100644 index 00000000000..6232e699515 --- /dev/null +++ b/config/feature_flags/development/revoke_ssh_signatures.yml @@ -0,0 +1,8 @@ +--- +name: revoke_ssh_signatures +introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/108344 +rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/388986 +milestone: '15.9' +type: development +group: group::source code +default_enabled: false diff --git a/config/gitlab.yml.example b/config/gitlab.yml.example index 340864d2dfa..7342fe4821a 100644 --- a/config/gitlab.yml.example +++ b/config/gitlab.yml.example @@ -578,6 +578,10 @@ production: &base ci_runner_versions_reconciliation_worker: cron: "@daily" + # Periodically clean up stale runner machines. + ci_runners_stale_machines_cleanup_worker: + cron: "36 4 * * *" + # GitLab EE only jobs. These jobs are automatically enabled for an EE # installation, and ignored for a CE installation. ee_cron_jobs: diff --git a/config/initializers/1_settings.rb b/config/initializers/1_settings.rb index 55c21744ad0..8d0d95e01fe 100644 --- a/config/initializers/1_settings.rb +++ b/config/initializers/1_settings.rb @@ -676,6 +676,9 @@ Settings.cron_jobs['ci_runner_versions_reconciliation_worker']['job_class'] = 'C Settings.cron_jobs['users_migrate_records_to_ghost_user_in_batches_worker'] ||= Settingslogic.new({}) Settings.cron_jobs['users_migrate_records_to_ghost_user_in_batches_worker']['cron'] ||= '*/2 * * * *' Settings.cron_jobs['users_migrate_records_to_ghost_user_in_batches_worker']['job_class'] = 'Users::MigrateRecordsToGhostUserInBatchesWorker' +Settings.cron_jobs['ci_runners_stale_machines_cleanup_worker'] ||= Settingslogic.new({}) +Settings.cron_jobs['ci_runners_stale_machines_cleanup_worker']['cron'] ||= '36 4 * * *' +Settings.cron_jobs['ci_runners_stale_machines_cleanup_worker']['job_class'] = 'Ci::Runners::StaleMachinesCleanupCronWorker' Gitlab.ee do Settings.cron_jobs['analytics_devops_adoption_create_all_snapshots_worker'] ||= Settingslogic.new({}) diff --git a/config/metrics/counts_28d/20230105222225_g_project_management_issue_design_comments_removed_monthly.yml b/config/metrics/counts_28d/20230105222225_g_project_management_issue_design_comments_removed_monthly.yml new file mode 100644 index 00000000000..9c02ed98a82 --- /dev/null +++ b/config/metrics/counts_28d/20230105222225_g_project_management_issue_design_comments_removed_monthly.yml @@ -0,0 +1,26 @@ +--- +key_path: redis_hll_counters.issues_edit.g_project_management_issue_design_comments_removed_monthly +description: Count of unique users per month who removed a comment on a design +product_section: dev +product_stage: plan +product_group: product_planning +product_category: design_management +value_type: number +status: active +milestone: "15.9" +introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/108307 +time_frame: 28d +data_source: redis_hll +data_category: optional +instrumentation_class: RedisHLLMetric +performance_indicator_type: [] +options: + events: + - g_project_management_issue_design_comments_removed +distribution: +- ce +- ee +tier: +- free +- premium +- ultimate diff --git a/config/metrics/counts_7d/20230105222224_g_project_management_issue_design_comments_removed_weekly.yml b/config/metrics/counts_7d/20230105222224_g_project_management_issue_design_comments_removed_weekly.yml new file mode 100644 index 00000000000..138638a0f45 --- /dev/null +++ b/config/metrics/counts_7d/20230105222224_g_project_management_issue_design_comments_removed_weekly.yml @@ -0,0 +1,26 @@ +--- +key_path: redis_hll_counters.issues_edit.g_project_management_issue_design_comments_removed_weekly +description: Count of unique users per week who removed a comment on a design +product_section: dev +product_stage: plan +product_group: product_planning +product_category: design_management +value_type: number +status: active +milestone: "15.9" +introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/108307 +time_frame: 7d +data_source: redis_hll +data_category: optional +instrumentation_class: RedisHLLMetric +performance_indicator_type: [] +options: + events: + - g_project_management_issue_design_comments_removed +distribution: +- ce +- ee +tier: +- free +- premium +- ultimate diff --git a/config/routes/profile.rb b/config/routes/profile.rb index 91f6eb678e4..46a078ce3b1 100644 --- a/config/routes/profile.rb +++ b/config/routes/profile.rb @@ -38,7 +38,13 @@ resource :profile, only: [:show, :update] do end end resource :preferences, only: [:show, :update] - resources :keys, only: [:index, :show, :create, :destroy] + + resources :keys, only: [:index, :show, :create, :destroy] do + member do + delete :revoke + end + end + resources :gpg_keys, only: [:index, :create, :destroy] do member do put :revoke diff --git a/db/post_migrate/20230119085509_add_index_to_ci_running_build.rb b/db/post_migrate/20230119085509_add_index_to_ci_running_build.rb new file mode 100644 index 00000000000..f340b78801f --- /dev/null +++ b/db/post_migrate/20230119085509_add_index_to_ci_running_build.rb @@ -0,0 +1,17 @@ +# frozen_string_literal: true + +class AddIndexToCiRunningBuild < Gitlab::Database::Migration[2.1] + disable_ddl_transaction! + + INDEX_NAME = :index_ci_running_builds_on_partition_id_build_id + TABLE_NAME = :ci_running_builds + COLUMNS = [:partition_id, :build_id] + + def up + add_concurrent_index(TABLE_NAME, COLUMNS, unique: true, name: INDEX_NAME) + end + + def down + remove_concurrent_index_by_name(TABLE_NAME, INDEX_NAME) + end +end diff --git a/db/post_migrate/20230119085552_add_foreign_key_to_ci_running_build.rb b/db/post_migrate/20230119085552_add_foreign_key_to_ci_running_build.rb new file mode 100644 index 00000000000..94dfdc5b967 --- /dev/null +++ b/db/post_migrate/20230119085552_add_foreign_key_to_ci_running_build.rb @@ -0,0 +1,31 @@ +# frozen_string_literal: true + +class AddForeignKeyToCiRunningBuild < Gitlab::Database::Migration[2.1] + disable_ddl_transaction! + + SOURCE_TABLE_NAME = :ci_running_builds + TARGET_TABLE_NAME = :ci_builds + COLUMN = :build_id + TARGET_COLUMN = :id + FK_NAME = :fk_rails_da45cfa165_p + PARTITION_COLUMN = :partition_id + + def up + add_concurrent_foreign_key( + SOURCE_TABLE_NAME, + TARGET_TABLE_NAME, + column: [PARTITION_COLUMN, COLUMN], + target_column: [PARTITION_COLUMN, TARGET_COLUMN], + validate: true, + reverse_lock_order: true, + name: FK_NAME, + on_update: :cascade + ) + end + + def down + with_lock_retries do + remove_foreign_key_if_exists(SOURCE_TABLE_NAME, name: FK_NAME) + end + end +end diff --git a/db/post_migrate/20230119095022_add_fk_index_to_ci_builds_runner_session_on_partition_id_and_build_id.rb b/db/post_migrate/20230119095022_add_fk_index_to_ci_builds_runner_session_on_partition_id_and_build_id.rb new file mode 100644 index 00000000000..9fee62e55f2 --- /dev/null +++ b/db/post_migrate/20230119095022_add_fk_index_to_ci_builds_runner_session_on_partition_id_and_build_id.rb @@ -0,0 +1,17 @@ +# frozen_string_literal: true + +class AddFkIndexToCiBuildsRunnerSessionOnPartitionIdAndBuildId < Gitlab::Database::Migration[2.1] + disable_ddl_transaction! + + INDEX_NAME = :index_ci_builds_runner_session_on_partition_id_build_id + TABLE_NAME = :ci_builds_runner_session + COLUMNS = [:partition_id, :build_id] + + def up + add_concurrent_index(TABLE_NAME, COLUMNS, name: INDEX_NAME, unique: true) + end + + def down + remove_concurrent_index_by_name(TABLE_NAME, INDEX_NAME) + end +end diff --git a/db/post_migrate/20230119095023_add_fk_to_ci_builds_runner_session_on_partition_id_and_build_id.rb b/db/post_migrate/20230119095023_add_fk_to_ci_builds_runner_session_on_partition_id_and_build_id.rb new file mode 100644 index 00000000000..e3aed238e0f --- /dev/null +++ b/db/post_migrate/20230119095023_add_fk_to_ci_builds_runner_session_on_partition_id_and_build_id.rb @@ -0,0 +1,37 @@ +# frozen_string_literal: true + +class AddFkToCiBuildsRunnerSessionOnPartitionIdAndBuildId < Gitlab::Database::Migration[2.1] + disable_ddl_transaction! + + SOURCE_TABLE_NAME = :ci_builds_runner_session + TARGET_TABLE_NAME = :ci_builds + COLUMN = :build_id + TARGET_COLUMN = :id + FK_NAME = :fk_rails_70707857d3_p + PARTITION_COLUMN = :partition_id + + def up + add_concurrent_foreign_key( + SOURCE_TABLE_NAME, + TARGET_TABLE_NAME, + column: [PARTITION_COLUMN, COLUMN], + target_column: [PARTITION_COLUMN, TARGET_COLUMN], + validate: false, + reverse_lock_order: true, + on_update: :cascade, + on_delete: :cascade, + name: FK_NAME + ) + end + + def down + with_lock_retries do + remove_foreign_key_if_exists( + SOURCE_TABLE_NAME, + TARGET_TABLE_NAME, + name: FK_NAME, + reverse_lock_order: true + ) + end + end +end diff --git a/db/post_migrate/20230119095024_validate_fk_on_ci_builds_runner_session_partition_id_and_build_id.rb b/db/post_migrate/20230119095024_validate_fk_on_ci_builds_runner_session_partition_id_and_build_id.rb new file mode 100644 index 00000000000..4c332a1db81 --- /dev/null +++ b/db/post_migrate/20230119095024_validate_fk_on_ci_builds_runner_session_partition_id_and_build_id.rb @@ -0,0 +1,17 @@ +# frozen_string_literal: true + +class ValidateFkOnCiBuildsRunnerSessionPartitionIdAndBuildId < Gitlab::Database::Migration[2.1] + disable_ddl_transaction! + + TABLE_NAME = :ci_builds_runner_session + FK_NAME = :fk_rails_70707857d3_p + COLUMNS = [:partition_id, :build_id] + + def up + validate_foreign_key(TABLE_NAME, COLUMNS, name: FK_NAME) + end + + def down + # no-op + end +end diff --git a/db/post_migrate/20230119095025_remove_fk_to_ci_builds_ci_builds_runner_session_on_build_id.rb b/db/post_migrate/20230119095025_remove_fk_to_ci_builds_ci_builds_runner_session_on_build_id.rb new file mode 100644 index 00000000000..0707114d3a5 --- /dev/null +++ b/db/post_migrate/20230119095025_remove_fk_to_ci_builds_ci_builds_runner_session_on_build_id.rb @@ -0,0 +1,35 @@ +# frozen_string_literal: true + +class RemoveFkToCiBuildsCiBuildsRunnerSessionOnBuildId < Gitlab::Database::Migration[2.1] + disable_ddl_transaction! + + SOURCE_TABLE_NAME = :ci_builds_runner_session + TARGET_TABLE_NAME = :ci_builds + COLUMN = :build_id + TARGET_COLUMN = :id + FK_NAME = :fk_rails_70707857d3 + + def up + with_lock_retries do + remove_foreign_key_if_exists( + SOURCE_TABLE_NAME, + TARGET_TABLE_NAME, + name: FK_NAME, + reverse_lock_order: true + ) + end + end + + def down + add_concurrent_foreign_key( + SOURCE_TABLE_NAME, + TARGET_TABLE_NAME, + column: COLUMN, + target_column: TARGET_COLUMN, + validate: true, + reverse_lock_order: true, + on_delete: :cascade, + name: FK_NAME + ) + end +end diff --git a/db/schema_migrations/20230119085509 b/db/schema_migrations/20230119085509 new file mode 100644 index 00000000000..1eb8297a4b3 --- /dev/null +++ b/db/schema_migrations/20230119085509 @@ -0,0 +1 @@ +6206e50e14c129aeb1d44fbd82add001e73b338bbe80bdade852ff7ec0bc0f86
\ No newline at end of file diff --git a/db/schema_migrations/20230119085552 b/db/schema_migrations/20230119085552 new file mode 100644 index 00000000000..d548c864d2f --- /dev/null +++ b/db/schema_migrations/20230119085552 @@ -0,0 +1 @@ +4bc2f855e1448c3c1b3d6d2b853dc61b049048fa0fee663fe798d86ea88b09a0
\ No newline at end of file diff --git a/db/schema_migrations/20230119095022 b/db/schema_migrations/20230119095022 new file mode 100644 index 00000000000..02c615d27f2 --- /dev/null +++ b/db/schema_migrations/20230119095022 @@ -0,0 +1 @@ +938ad78781f488d0add8b1a85217f93ca41d7093015817d5bd5c8a853d3172b8
\ No newline at end of file diff --git a/db/schema_migrations/20230119095023 b/db/schema_migrations/20230119095023 new file mode 100644 index 00000000000..ae882ff2ca7 --- /dev/null +++ b/db/schema_migrations/20230119095023 @@ -0,0 +1 @@ +f0b069e73170819e044496fb6eb02b43d0ee2697524659b50ab9b66dd3ec9792
\ No newline at end of file diff --git a/db/schema_migrations/20230119095024 b/db/schema_migrations/20230119095024 new file mode 100644 index 00000000000..e7e4255c233 --- /dev/null +++ b/db/schema_migrations/20230119095024 @@ -0,0 +1 @@ +be0b7fd198db3caa10e16606a861de6e708eadd225ad0360604a9f4c512a3d19
\ No newline at end of file diff --git a/db/schema_migrations/20230119095025 b/db/schema_migrations/20230119095025 new file mode 100644 index 00000000000..f7eac79873c --- /dev/null +++ b/db/schema_migrations/20230119095025 @@ -0,0 +1 @@ +66239a6e2fdc476126ca247042078a1b939da15ff91c00cd0392ce664eadc9af
\ No newline at end of file diff --git a/db/structure.sql b/db/structure.sql index adff1278af1..be11c779d29 100644 --- a/db/structure.sql +++ b/db/structure.sql @@ -28932,6 +28932,8 @@ CREATE INDEX index_ci_builds_runner_id_running ON ci_builds USING btree (runner_ CREATE UNIQUE INDEX index_ci_builds_runner_session_on_build_id ON ci_builds_runner_session USING btree (build_id); +CREATE UNIQUE INDEX index_ci_builds_runner_session_on_partition_id_build_id ON ci_builds_runner_session USING btree (partition_id, build_id); + CREATE INDEX index_ci_daily_build_group_report_results_on_group_id ON ci_daily_build_group_report_results USING btree (group_id); CREATE INDEX index_ci_daily_build_group_report_results_on_last_pipeline_id ON ci_daily_build_group_report_results USING btree (last_pipeline_id); @@ -29142,6 +29144,8 @@ CREATE INDEX index_ci_runners_on_version ON ci_runners USING btree (version); CREATE UNIQUE INDEX index_ci_running_builds_on_build_id ON ci_running_builds USING btree (build_id); +CREATE UNIQUE INDEX index_ci_running_builds_on_partition_id_build_id ON ci_running_builds USING btree (partition_id, build_id); + CREATE INDEX index_ci_running_builds_on_project_id ON ci_running_builds USING btree (project_id); CREATE INDEX index_ci_running_builds_on_runner_id ON ci_running_builds USING btree (runner_id); @@ -35017,7 +35021,7 @@ ALTER TABLE ONLY analytics_dashboards_pointers ADD CONSTRAINT fk_rails_7027b7eaa9 FOREIGN KEY (namespace_id) REFERENCES namespaces(id) ON DELETE CASCADE; ALTER TABLE ONLY ci_builds_runner_session - ADD CONSTRAINT fk_rails_70707857d3 FOREIGN KEY (build_id) REFERENCES ci_builds(id) ON DELETE CASCADE; + ADD CONSTRAINT fk_rails_70707857d3_p FOREIGN KEY (partition_id, build_id) REFERENCES ci_builds(partition_id, id) ON UPDATE CASCADE ON DELETE CASCADE; ALTER TABLE ONLY list_user_preferences ADD CONSTRAINT fk_rails_70b2ef5ce2 FOREIGN KEY (list_id) REFERENCES lists(id) ON DELETE CASCADE; @@ -35694,6 +35698,9 @@ ALTER TABLE ONLY merge_request_reviewers ALTER TABLE ONLY ci_running_builds ADD CONSTRAINT fk_rails_da45cfa165 FOREIGN KEY (build_id) REFERENCES ci_builds(id) ON DELETE CASCADE; +ALTER TABLE ONLY ci_running_builds + ADD CONSTRAINT fk_rails_da45cfa165_p FOREIGN KEY (partition_id, build_id) REFERENCES ci_builds(partition_id, id) ON UPDATE CASCADE ON DELETE CASCADE; + ALTER TABLE ONLY jira_imports ADD CONSTRAINT fk_rails_da617096ce FOREIGN KEY (user_id) REFERENCES users(id) ON DELETE SET NULL; diff --git a/doc/administration/pages/index.md b/doc/administration/pages/index.md index c01ee081aee..ed08b10fe97 100644 --- a/doc/administration/pages/index.md +++ b/doc/administration/pages/index.md @@ -272,7 +272,7 @@ control over how the Pages daemon runs and serves content in your environment. | `log_verbose` | Verbose logging, true/false. | | `propagate_correlation_id` | Set to true (false by default) to re-use existing Correlation ID from the incoming request header `X-Request-ID` if present. If a reverse proxy sets this header, the value is propagated in the request chain. | | `max_connections` | Limit on the number of concurrent connections to the HTTP, HTTPS or proxy listeners. | -| `max_uri_length` | The maximum length of URIs accepted by GitLab Pages. Set to 0 for unlimited length. [Introduced](https://gitlab.com/gitlab-org/gitlab-pages/-/issues/659) in GitLab 14.5. +| `max_uri_length` | The maximum length of URIs accepted by GitLab Pages. Set to 0 for unlimited length. [Introduced](https://gitlab.com/gitlab-org/omnibus-gitlab/-/merge_requests/5729) in GitLab 14.5. | `metrics_address` | The address to listen on for metrics requests. | | `redirect_http` | Redirect pages from HTTP to HTTPS, true/false. | | `redirects_max_config_size` | The maximum size of the `_redirects` file, in bytes (default: 65536). | diff --git a/doc/administration/raketasks/maintenance.md b/doc/administration/raketasks/maintenance.md index 1cc3de868be..5c258d73fdb 100644 --- a/doc/administration/raketasks/maintenance.md +++ b/doc/administration/raketasks/maintenance.md @@ -234,8 +234,11 @@ sudo -u git -H bundle exec rake cache:clear RAILS_ENV=production Sometimes during version upgrades you might end up with some wrong CSS or missing some icons. In that case, try to precompile the assets again. -This only applies to source installations and does not apply to -Omnibus packages. +This Rake task only applies to source installations. [Read more](../../update/package/index.md#missing-asset-files) +about troubleshooting this problem when running the Omnibus GitLab package. +The guidance for Omnibus GitLab might be applicable for Kubernetes and Docker Omnibus +deployments of GitLab, though in general, container-based installations +don't have issues with missing assets. **Source Installation** diff --git a/doc/administration/sidekiq/extra_sidekiq_processes.md b/doc/administration/sidekiq/extra_sidekiq_processes.md index 2a29b6bda1b..7959d1a5ce7 100644 --- a/doc/administration/sidekiq/extra_sidekiq_processes.md +++ b/doc/administration/sidekiq/extra_sidekiq_processes.md @@ -70,8 +70,9 @@ higher for mixed low-priority work. A reasonable starting range is `15` to `25` for a non-specialized deployment. We only recommend setting explicit concurrency by setting `min_concurrency` and -`max_concurrency` to the same value. The two values are kept for backwards -compatibility reasons, but for more predictable results, use the same value. +`max_concurrency` to the same value. The two distinct settings are kept for +backwards compatibility reasons, but for more predictable results use the same +values – otherwise you might run into issues with Sidekiq jobs piling up. For example, to set the concurrency to `20`: @@ -89,7 +90,8 @@ For example, to set the concurrency to `20`: ``` `min_concurrency` and `max_concurrency` are independent; one can be set without -the other. Setting `min_concurrency` to `0` disables the limit. +the other. Setting `min_concurrency` to `0` disables the limit. Not explicitly +setting `min_concurrency` is the same as setting it to `0`. For each queue group, let `N` be one more than the number of queues. The concurrency is set to: diff --git a/doc/api/graphql/reference/index.md b/doc/api/graphql/reference/index.md index d2d6d2f4fc5..9e4391b8abe 100644 --- a/doc/api/graphql/reference/index.md +++ b/doc/api/graphql/reference/index.md @@ -23354,6 +23354,7 @@ Verification status of a GPG or X.509 signature for a commit. | ----- | ----------- | | <a id="verificationstatusmultiple_signatures"></a>`MULTIPLE_SIGNATURES` | multiple_signatures verification status. | | <a id="verificationstatusother_user"></a>`OTHER_USER` | other_user verification status. | +| <a id="verificationstatusrevoked_key"></a>`REVOKED_KEY` | revoked_key verification status. | | <a id="verificationstatussame_user_different_email"></a>`SAME_USER_DIFFERENT_EMAIL` | same_user_different_email verification status. | | <a id="verificationstatusunknown_key"></a>`UNKNOWN_KEY` | unknown_key verification status. | | <a id="verificationstatusunverified"></a>`UNVERIFIED` | unverified verification status. | diff --git a/doc/ci/pipelines/cicd_minutes.md b/doc/ci/pipelines/cicd_minutes.md index 1a94c253176..1e0b3a97714 100644 --- a/doc/ci/pipelines/cicd_minutes.md +++ b/doc/ci/pipelines/cicd_minutes.md @@ -288,7 +288,7 @@ processing new jobs. The grace period for running jobs is `1,000` CI/CD minutes. -Jobs on specific runners are not affected by the quota of CI/CD minutes. +Jobs on project runners are not affected by the quota of CI/CD minutes. ### GitLab SaaS usage notifications diff --git a/doc/ci/runners/configure_runners.md b/doc/ci/runners/configure_runners.md index dc57e8f570f..4916dc12322 100644 --- a/doc/ci/runners/configure_runners.md +++ b/doc/ci/runners/configure_runners.md @@ -355,7 +355,7 @@ try to preserve worktrees and try to re-use them by default. This has limitations when using the [Docker Machine executor](https://docs.gitlab.com/runner/executors/docker_machine.html). A Git strategy of `none` also re-uses the local working copy, but skips all Git -operations normally done by GitLab. GitLab Runner pre-clone scripts are also skipped, +operations usually done by GitLab. GitLab Runner pre-clone scripts are also skipped, if present. This strategy could mean you need to add `fetch` and `checkout` commands to [your `.gitlab-ci.yml` script](../yaml/index.md#script). @@ -535,14 +535,14 @@ You can set it globally or per-job in the [`variables`](../yaml/index.md#variabl `GIT_SUBMODULE_UPDATE_FLAGS` accepts all options of the [`git submodule update`](https://git-scm.com/docs/git-submodule#Documentation/git-submodule.txt-update--init--remote-N--no-fetch--no-recommend-shallow-f--force--checkout--rebase--merge--referenceltrepositorygt--depthltdepthgt--recursive--jobsltngt--no-single-branch--ltpathgt82308203) -subcommand. However, note that `GIT_SUBMODULE_UPDATE_FLAGS` flags are appended after a few default flags: +subcommand. However, `GIT_SUBMODULE_UPDATE_FLAGS` flags are appended after a few default flags: - `--init`, if [`GIT_SUBMODULE_STRATEGY`](#git-submodule-strategy) was set to `normal` or `recursive`. - `--recursive`, if [`GIT_SUBMODULE_STRATEGY`](#git-submodule-strategy) was set to `recursive`. - [`GIT_DEPTH`](#shallow-cloning). See the default value below. Git honors the last occurrence of a flag in the list of arguments, so manually -providing them in `GIT_SUBMODULE_UPDATE_FLAGS` will also override these default flags. +providing them in `GIT_SUBMODULE_UPDATE_FLAGS` overrides these default flags. You can use this variable to fetch the latest remote `HEAD` instead of the commit tracked, in the repository, or to speed up the checkout by fetching submodules in multiple parallel jobs: @@ -779,7 +779,7 @@ variables: NOTE: Zip archives are the only supported artifact type. Follow [the issue for details](https://gitlab.com/gitlab-org/gitlab/-/issues/367203). -GitLab Runner can generate and produce attestation metadata for all build artifacts. To enable this feature, you must set the `RUNNER_GENERATE_ARTIFACTS_METADATA` environment variable to `true`. This variable can either be set globally or it can be set for individual jobs. The metadata is in rendered in a plain text `.json` file that's stored with the artifact. The file name is as follows: `{ARTIFACT_NAME}-metadata.json` where `ARTIFACT_NAME` is what was defined as the [name for the artifact](../pipelines/job_artifacts.md#use-cicd-variables-to-define-the-artifacts-name) in the CI file. The file name, however, defaults to `artifacts-metadata.json` if no name was given to the build artifacts. +GitLab Runner can generate and produce attestation metadata for all build artifacts. To enable this feature, you must set the `RUNNER_GENERATE_ARTIFACTS_METADATA` environment variable to `true`. This variable can either be set globally or it can be set for individual jobs. The metadata is in rendered in a plain text `.json` file that's stored with the artifact. The filename is as follows: `{ARTIFACT_NAME}-metadata.json` where `ARTIFACT_NAME` is what was defined as the [name for the artifact](../pipelines/job_artifacts.md#use-cicd-variables-to-define-the-artifacts-name) in the CI file. The filename, however, defaults to `artifacts-metadata.json` if no name was given to the build artifacts. ### Attestation format @@ -801,7 +801,7 @@ The attestation metadata is generated in the [in-toto attestation format](https: | `predicate.invocation.environment.architecture` | The architecture on which the CI job is run. | | `predicate.invocation.parameters` | The names of any CI/CD or environment variables that were present when the build command was run. The value is always represented as an empty string to avoid leaking any secrets. | | `metadata.buildStartedOn` | The time when the build was started. `RFC3339` formatted. | -| `metadata.buildEndedOn` | The time when the build ended. Since metadata generation happens during the build this moment in time will be slightly earlier than the one reported in GitLab. `RFC3339` formatted. | +| `metadata.buildEndedOn` | The time when the build ended. Since metadata generation happens during the build this moment in time is slightly earlier than the one reported in GitLab. `RFC3339` formatted. | | `metadata.reproducible` | Whether the build is reproducible by gathering all the generated metadata. Always `false`. | | `metadata.completeness.parameters` | Whether the parameters are supplied. Always `true`. | | `metadata.completeness.environment` | Whether the builder's environment is reported. Always `true`. | @@ -893,7 +893,7 @@ sequentially. To avoid writing to disk and reading the contents back for smaller files, a small buffer per concurrency is used. This setting can be controlled with `FASTZIP_ARCHIVER_BUFFER_SIZE`. The default size for this buffer is 2 MiB, therefore, a -concurrency of 16 will allocate 32 MiB. Data that exceeds the buffer size will be written to and read back from disk. +concurrency of 16 allocates 32 MiB. Data that exceeds the buffer size is written to and read back from disk. Therefore, using no buffer, `FASTZIP_ARCHIVER_BUFFER_SIZE: 0`, and only scratch space is a valid option. `FASTZIP_ARCHIVER_CONCURRENCY` controls how many files are compressed concurrency. As mentioned above, this setting diff --git a/doc/integration/datadog.md b/doc/integration/datadog.md index 1f20bccf083..ffb2c981be7 100644 --- a/doc/integration/datadog.md +++ b/doc/integration/datadog.md @@ -9,7 +9,7 @@ info: To determine the technical writer assigned to the Stage/Group associated w > [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/270123) in GitLab 14.1 This integration enables you to send CI/CD pipeline and job information to -[Datadog](https://www.datadoghq.com/). Datadog's [CI Visibility](https://app.datadoghq.com/ci) +[Datadog](https://www.datadoghq.com/). The [Datadog CI Visibility](https://app.datadoghq.com/ci) product helps you monitor for job failures and performance issues, then troubleshoot them. It's based on [Webhooks](../user/project/integrations/webhooks.md), and only requires configuration on GitLab. diff --git a/doc/integration/sourcegraph.md b/doc/integration/sourcegraph.md index 39efccb7c50..09f0d0aca08 100644 --- a/doc/integration/sourcegraph.md +++ b/doc/integration/sourcegraph.md @@ -99,11 +99,11 @@ for updates. ### Sourcegraph isn't working -If you enabled Sourcegraph for your project but it isn't working, Sourcegraph may not have indexed the project yet. You can check for Sourcegraph's availability of your project by visiting `https://sourcegraph.com/gitlab.com/<project-path>`replacing `<project-path>` with the path to your GitLab project. +If you enabled Sourcegraph for your project but it isn't working, Sourcegraph may not have indexed the project yet. You can check if Sourcegraph is available for your project by visiting `https://sourcegraph.com/gitlab.com/<project-path>`replacing `<project-path>` with the path to your GitLab project. ## Sourcegraph and Privacy -From Sourcegraph's [extension documentation](https://docs.sourcegraph.com/integration/browser_extension#privacy) which is the +From the Sourcegraph [extension documentation](https://docs.sourcegraph.com/integration/browser_extension#privacy) which is the engine behind the native GitLab integration: > Sourcegraph integrations never send any logs, pings, usage statistics, or telemetry to Sourcegraph.com. diff --git a/doc/operations/metrics/dashboards/yaml_number_format.md b/doc/operations/metrics/dashboards/yaml_number_format.md index cd597ea8783..99e6be96a3c 100644 --- a/doc/operations/metrics/dashboards/yaml_number_format.md +++ b/doc/operations/metrics/dashboards/yaml_number_format.md @@ -113,12 +113,12 @@ Formats supported: `milliseconds`, `seconds` | Format | Data | Displayed | | -------------- | ------ | --------- | -| `milliseconds` | `10` | 10ms | -| `milliseconds` | `500` | 100ms | -| `milliseconds` | `1000` | 1000ms | -| `seconds` | `10` | 10s | -| `seconds` | `500` | 500s | -| `seconds` | `1000` | 1000s | +| `milliseconds` | `10` | 10 ms | +| `milliseconds` | `500` | 100 ms | +| `milliseconds` | `1000` | 1000 ms | +| `seconds` | `10` | 10 s | +| `seconds` | `500` | 500 s | +| `seconds` | `1000` | 1000 s | ## Digital (Metric) @@ -138,15 +138,15 @@ Formats supported: | Format | Data | Displayed | | -------------- | --------- | --------- | -| `decimalBytes` | `1` | 1B | -| `decimalBytes` | `1000` | 1kB | -| `decimalBytes` | `1000000` | 1MB | -| `kilobytes` | `1` | 1kB | -| `kilobytes` | `1000` | 1MB | -| `kilobytes` | `1000000` | 1GB | -| `megabytes` | `1` | 1MB | -| `megabytes` | `1000` | 1GB | -| `megabytes` | `1000000` | 1TB | +| `decimalBytes` | `1` | 1 B | +| `decimalBytes` | `1000` | 1 kB | +| `decimalBytes` | `1000000` | 1 MB | +| `kilobytes` | `1` | 1 kB | +| `kilobytes` | `1000` | 1 MB | +| `kilobytes` | `1000000` | 1 GB | +| `megabytes` | `1` | 1 MB | +| `megabytes` | `1000` | 1 GB | +| `megabytes` | `1000000` | 1 TB | ## Digital (IEC) @@ -166,12 +166,12 @@ Formats supported: | Format | Data | Displayed | | ----------- | ------------- | --------- | -| `bytes` | `1` | 1B | -| `bytes` | `1024` | 1KiB | -| `bytes` | `1024 * 1024` | 1MiB | -| `kibibytes` | `1` | 1KiB | -| `kibibytes` | `1024` | 1MiB | -| `kibibytes` | `1024 * 1024` | 1GiB | -| `mebibytes` | `1` | 1MiB | -| `mebibytes` | `1024` | 1GiB | -| `mebibytes` | `1024 * 1024` | 1TiB | +| `bytes` | `1` | 1 B | +| `bytes` | `1024` | 1 KiB | +| `bytes` | `1024 * 1024` | 1 MiB | +| `kibibytes` | `1` | 1 KiB | +| `kibibytes` | `1024` | 1 MiB | +| `kibibytes` | `1024 * 1024` | 1 GiB | +| `mebibytes` | `1` | 1 MiB | +| `mebibytes` | `1024` | 1 GiB | +| `mebibytes` | `1024 * 1024` | 1 TiB | diff --git a/doc/topics/autodevops/stages.md b/doc/topics/autodevops/stages.md index 9219d4bcd22..d07cb29dbcc 100644 --- a/doc/topics/autodevops/stages.md +++ b/doc/topics/autodevops/stages.md @@ -160,7 +160,7 @@ Cloud Native Buildpacks, and only buildpacks that implement the ### Currently supported languages -Note that not all buildpacks support Auto Test yet, as it's a relatively new +Not all buildpacks support Auto Test yet, as it's a relatively new enhancement. All of Heroku's [officially supported languages](https://devcenter.heroku.com/articles/heroku-ci#supported-languages) support Auto Test. The languages supported by Heroku's Herokuish buildpacks all @@ -502,9 +502,9 @@ as a Helm post-install hook. As some applications can't run without a successful database initialization step, GitLab deploys the first release without the application deployment, and only the database initialization step. After the database initialization completes, GitLab deploys a second release with the application -deployment as normal. +deployment as standard. -Note that a post-install hook means that if any deploy succeeds, +A post-install hook means that if any deploy succeeds, `DB_INITIALIZE` isn't processed thereafter. If present, `DB_MIGRATE` is run as a shell command within an application pod as diff --git a/doc/update/package/index.md b/doc/update/package/index.md index 575194793c2..34c7c096a8d 100644 --- a/doc/update/package/index.md +++ b/doc/update/package/index.md @@ -340,3 +340,147 @@ To fix this error: sudo gitlab-ctl hup puma sudo gitlab-ctl restart sidekiq ``` + +### Missing asset files + +Following an upgrade, GitLab might not be correctly serving up assets such as images, JavaScript, and style sheets. +It might be generating 500 errors, or the web UI may be failing to render properly. + +In a scaled out GitLab environment, if one web server behind the load balancer is demonstrating +this issue, the problem occurs intermittently. + +The [Rake task to recompile](../../administration/raketasks/maintenance.md#precompile-the-assets) the +assets doesn't apply to an Omnibus installation which serves +pre-compiled assets from `/opt/gitlab/embedded/service/gitlab-rails/public/assets`. + +Potential causes and fixes: + +- [Ensure no old processes are running](#old-processes). +- [Remove duplicate sprockets files](#duplicate-sprockets-files) +- [The installation is incomplete](#incomplete-installation) +- [NGINX Gzip support is disabled](#nginx-gzip-support) + +#### Old processes + +The most likely cause is that an old Puma process is running, instructing clients +to request asset files from a previous release of GitLab. As the files no longer exist, +HTTP 404 errors are returned. + +A reboot is the best way to ensure these old Puma processes are no longer running. + +Alternatively: + +1. Stop Puma: + + ```shell + gitlab-ctl stop puma + ``` + +1. Check for any remaining Puma processes, and kill them: + + ```shell + ps -ef | egrep 'puma[: ]' + kill <processid> + ``` + +1. Verify with `ps` that the Puma processes have stopped running. + +1. Start Puma + + ```shell + gitlab-ctl start puma + ``` + +#### Duplicate sprockets files + +The compiled asset files have unique file names in each release. The sprockets files +provide a mapping from the filenames in the application code to the unique filenames. + +```plaintext +/opt/gitlab/embedded/service/gitlab-rails/public/assets/.sprockets-manifest*.json +``` + +Make sure there's only one sprockets file. [Rails uses the first one](https://github.com/rails/sprockets-rails/blob/118ce60b1ffeb7a85640661b014cd2ee3c4e3e56/lib/sprockets/railtie.rb#L201). + +A check for duplicate sprockets files runs during Omnibus GitLab upgrades: + +```plaintext +GitLab discovered stale file(s) from the previous install that need to be cleaned up. +The following files need to be removed: + +/opt/gitlab/embedded/service/gitlab-rails/public/assets/.sprockets-manifest-e16fdb7dd73cfdd64ed9c2cc0e35718a.json +``` + +Options for resolving this include: + +- If you have the output from the package upgrade, remove the specified files. Then restart Puma: + + ```shell + gitlab-ctl restart puma + ``` + +- If you don't have the message, perform a reinstall + (see [incomplete installation](#incomplete-installation) below for more details) + to generate it again. + +- Remove all the sprockets files and then follow the instructions for an [incomplete installation](#incomplete-installation). + +#### Incomplete installation + +An incomplete installation could be the cause of this issue. + +Verify the package to determine if this is the problem: + +- For Debian distributions: + + ```shell + apt-get install debsums + debsums -c gitlab-ee + ``` + +- For Red Hat/SUSE (RPM) distributions: + + ```shell + rpm -V gitlab-ee + ``` + +To reinstall the package to fix an incomplete installation: + +1. Check the installed version + + - For Debian distributions: + + ```shell + apt --installed list gitlab-ee + ``` + + - For Red Hat/SUSE (RPM) distributions: + + ```shell + rpm -qa gitlab-ee + ``` + +1. Reinstall the package, specifying the installed version. For example 14.4.0 Enterprise Edition: + + - For Debian distributions: + + ```shell + apt-get install --reinstall gitlab-ee=14.4.0-ee.0 + ``` + + - For Red Hat/SUSE (RPM) distributions: + + ```shell + yum reinstall gitlab-ee-14.4.0 + ``` + +#### NGINX Gzip support + +Check whether `nginx['gzip_enabled']` has been disabled: + +```shell +grep gzip /etc/gitlab/gitlab.rb +``` + +This might prevent some assets from being served. +[Read more](https://gitlab.com/gitlab-org/omnibus-gitlab/-/issues/6087#note_558194395) in one of the related issues. diff --git a/doc/user/project/integrations/prometheus_library/kubernetes.md b/doc/user/project/integrations/prometheus_library/kubernetes.md index 0795c110deb..34a6823f007 100644 --- a/doc/user/project/integrations/prometheus_library/kubernetes.md +++ b/doc/user/project/integrations/prometheus_library/kubernetes.md @@ -42,7 +42,7 @@ Prometheus needs to be deployed into the cluster and configured properly to gath ## Specifying the Environment -In order to isolate and only display relevant CPU and Memory metrics for a given environment, GitLab needs a method to detect which containers it is running. Because these metrics are tracked at the container level, traditional Kubernetes labels are not available. +To isolate and only display relevant CPU and Memory metrics for a given environment, GitLab needs a method to detect which containers it is running. Because these metrics are tracked at the container level, traditional Kubernetes labels are not available. Instead, the [Deployment](https://kubernetes.io/docs/concepts/workloads/controllers/deployment/) or [DaemonSet](https://kubernetes.io/docs/concepts/workloads/controllers/daemonset/) name should begin with [CI_ENVIRONMENT_SLUG](../../../../ci/variables/index.md#predefined-cicd-variables). It can be followed by a `-` and additional content if desired. For example, a deployment name of `review-homepage-5620p5` would match the `review/homepage` environment. diff --git a/doc/user/project/pages/custom_domains_ssl_tls_certification/lets_encrypt_integration.md b/doc/user/project/pages/custom_domains_ssl_tls_certification/lets_encrypt_integration.md index eec624f4624..95ac2e50f29 100644 --- a/doc/user/project/pages/custom_domains_ssl_tls_certification/lets_encrypt_integration.md +++ b/doc/user/project/pages/custom_domains_ssl_tls_certification/lets_encrypt_integration.md @@ -62,7 +62,7 @@ associated Pages domain. GitLab also renews it automatically. > - Issuing the certificate and updating Pages configuration > **can take up to an hour**. > - If you already have an SSL certificate in domain settings it -> continues to work until replaced by the Let's Encrypt's certificate. +> continues to work until replaced by the Let's Encrypt certificate. ## Troubleshooting diff --git a/doc/user/project/pages/redirects.md b/doc/user/project/pages/redirects.md index cf0c0dbff82..f5447fd67ca 100644 --- a/doc/user/project/pages/redirects.md +++ b/doc/user/project/pages/redirects.md @@ -41,13 +41,11 @@ this test suite! To create redirects, create a configuration file named `_redirects` in the `public/` directory of your GitLab Pages site. -Note that: - - All paths must start with a forward slash `/`. - A default status code of `301` is applied if no [status code](#http-status-codes) is provided. - The `_redirects` file has a file size limit and a maximum number of rules per project, configured at the instance level. Only the first matching rules within the configured maximum are processed. - The default file size limit is 64KB, and the default maximum number of rules is 1,000. + The default file size limit is 64 KB, and the default maximum number of rules is 1,000. - If your GitLab Pages site uses the default domain name (such as `namespace.gitlab.io/projectname`) you must prefix every rule with the project name: @@ -74,7 +72,7 @@ is ignored because `hello.html` exists: /projectname/hello.html /projectname/world.html 302 ``` -GitLab doesn't support Netlify's +GitLab does not support Netlify [force option](https://docs.netlify.com/routing/redirects/rewrites-proxies/#shadowing) to change this behavior. @@ -231,7 +229,7 @@ rule 10: valid rule 11: valid ``` -## Differences from Netlify's implementation +## Differences from Netlify implementation Most supported `_redirects` rules behave the same in both GitLab and Netlify. However, there are some minor differences: diff --git a/doc/user/project/repository/ssh_signed_commits/index.md b/doc/user/project/repository/ssh_signed_commits/index.md index 85d2ce1d480..4a6a6ebcdba 100644 --- a/doc/user/project/repository/ssh_signed_commits/index.md +++ b/doc/user/project/repository/ssh_signed_commits/index.md @@ -160,8 +160,19 @@ for Git to associate SSH public keys with users: ## Revoke an SSH key for signing commits -You can't revoke an SSH key used for signing commits. To learn more, read -[Add revocation for SSH keys](https://gitlab.com/gitlab-org/gitlab/-/issues/382984). +> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/108344) in GitLab 15.9. + +If an SSH key becomes compromised, revoke it. Revoking a key changes both future and past commits: + +- Past commits signed by this key are marked as unverified. +- Future commits signed by this key are marked as unverified. + +To revoke an SSH key: + +1. In the top-right corner, select your avatar. +1. Select **Edit profile**. +1. On the left sidebar, select (**{key}**) **SSH Keys**. +1. Select **Revoke** next to the SSH key you want to delete. ## Related topics diff --git a/lib/gitlab/search/found_blob.rb b/lib/gitlab/search/found_blob.rb index 60d3e360984..79d6cfc84a3 100644 --- a/lib/gitlab/search/found_blob.rb +++ b/lib/gitlab/search/found_blob.rb @@ -9,7 +9,7 @@ module Gitlab include Gitlab::Utils::StrongMemoize include BlobActiveModel - attr_reader :project, :content_match, :blob_path, :highlight_line + attr_reader :project, :content_match, :blob_path, :highlight_line, :matched_lines_count PATH_REGEXP = /\A(?<ref>[^:]*):(?<path>[^\x00]*)\x00/.freeze CONTENT_REGEXP = /^(?<ref>[^:]*):(?<path>[^\x00]*)\x00(?<startline>\d+)\x00/.freeze @@ -25,6 +25,7 @@ module Gitlab @binary_path = opts.fetch(:path, nil) @binary_basename = opts.fetch(:basename, nil) @ref = opts.fetch(:ref, nil) + @matched_lines_count = opts.fetch(:matched_lines_count, nil) @startline = opts.fetch(:startline, nil) @highlight_line = opts.fetch(:highlight_line, nil) @binary_data = opts.fetch(:data, nil) diff --git a/lib/gitlab/usage_data_counters/issue_activity_unique_counter.rb b/lib/gitlab/usage_data_counters/issue_activity_unique_counter.rb index 477fa288874..a59ea36961d 100644 --- a/lib/gitlab/usage_data_counters/issue_activity_unique_counter.rb +++ b/lib/gitlab/usage_data_counters/issue_activity_unique_counter.rb @@ -34,6 +34,7 @@ module Gitlab ISSUE_COMMENT_ADDED = 'g_project_management_issue_comment_added' ISSUE_COMMENT_EDITED = 'g_project_management_issue_comment_edited' ISSUE_COMMENT_REMOVED = 'g_project_management_issue_comment_removed' + ISSUE_DESIGN_COMMENT_REMOVED = 'g_project_management_issue_design_comments_removed' class << self def track_issue_created_action(author:, project:) @@ -171,6 +172,11 @@ module Gitlab track_unique_action(ISSUE_CLONED, author) end + def track_issue_design_comment_removed_action(author:, project:) + track_snowplow_action(ISSUE_DESIGN_COMMENT_REMOVED, author, project) + track_unique_action(ISSUE_DESIGN_COMMENT_REMOVED, author) + end + private def track_snowplow_action(event_name, author, project) diff --git a/lib/gitlab/usage_data_counters/known_events/common.yml b/lib/gitlab/usage_data_counters/known_events/common.yml index a64b7c4032b..5cd94722512 100644 --- a/lib/gitlab/usage_data_counters/known_events/common.yml +++ b/lib/gitlab/usage_data_counters/known_events/common.yml @@ -216,6 +216,10 @@ category: issues_edit redis_slot: project_management aggregation: daily +- name: g_project_management_issue_design_comments_removed + category: issues_edit + redis_slot: project_management + aggregation: daily - name: g_project_management_issue_time_estimate_changed category: issues_edit redis_slot: project_management diff --git a/locale/gitlab.pot b/locale/gitlab.pot index 4f89fd54904..856ac295614 100644 --- a/locale/gitlab.pot +++ b/locale/gitlab.pot @@ -60,7 +60,7 @@ msgstr "" msgid " or %{emphasisStart}&epic id%{emphasisEnd}" msgstr "" -msgid " or references (e.g. path/to/project!merge_request_id)" +msgid " or references" msgstr "" msgid " reacted with :%{name}:" @@ -5337,6 +5337,9 @@ msgstr "" msgid "Are you sure you want to revoke this %{accessTokenType}? This action cannot be undone." msgstr "" +msgid "Are you sure you want to revoke this SSH key?" +msgstr "" + msgid "Are you sure you want to revoke this group access token? This action cannot be undone." msgstr "" @@ -10103,6 +10106,15 @@ msgstr "" msgid "CommitMessage|Add %{file_name}" msgstr "" +msgid "CommitSignature|This commit was signed with a key that was revoked." +msgstr "" + +msgid "CommitSignature|Unverified" +msgstr "" + +msgid "CommitSignature|Unverified signature" +msgstr "" + msgid "CommitWidget|authored" msgstr "" @@ -17815,10 +17827,10 @@ msgstr "" msgid "Forks" msgstr "" -msgid "ForksDivergence|%{ahead} %{commit_word} ahead of" +msgid "ForksDivergence|%{aheadLinkStart}%{ahead} %{commit_word} ahead%{aheadLinkEnd} of" msgstr "" -msgid "ForksDivergence|%{behind} %{commit_word} behind" +msgid "ForksDivergence|%{behindLinkStart}%{behind} %{commit_word} behind%{behindLinkEnd}" msgstr "" msgid "ForksDivergence|%{messages} the upstream repository." @@ -20891,6 +20903,9 @@ msgstr "" msgid "IdentityVerification|Before you sign in, we need to verify your identity. Enter the following code on the sign-in page." msgstr "" +msgid "IdentityVerification|Complete verification to sign in." +msgstr "" + msgid "IdentityVerification|Confirm your email address" msgstr "" @@ -29116,6 +29131,9 @@ msgstr "" msgid "OnDemandScans|No %{profileType} profiles found for DAST" msgstr "" +msgid "OnDemandScans|No matching results" +msgstr "" + msgid "OnDemandScans|On-demand Scans" msgstr "" @@ -29134,6 +29152,9 @@ msgstr "" msgid "OnDemandScans|Run scan" msgstr "" +msgid "OnDemandScans|Runner tags (optional)" +msgstr "" + msgid "OnDemandScans|Save and run scan" msgstr "" @@ -29158,6 +29179,9 @@ msgstr "" msgid "OnDemandScans|Scan type" msgstr "" +msgid "OnDemandScans|Select runner tags" +msgstr "" + msgid "OnDemandScans|Start by creating a new profile. Profiles make it easy to save and reuse configuration details for GitLab’s security tools." msgstr "" @@ -29188,6 +29212,12 @@ msgstr "" msgid "OnDemandScans|Timezone" msgstr "" +msgid "OnDemandScans|Unable to fetch runner tags. Try reloading the page." +msgstr "" + +msgid "OnDemandScans|Use runner tags to select specific runners for this security scan. %{linkStart}What are runner tags?%{linkEnd}" +msgstr "" + msgid "OnDemandScans|Verify configuration" msgstr "" @@ -34802,6 +34832,9 @@ msgstr "" msgid "References" msgstr "" +msgid "References should be in the form of path/to/project!merge_request_id" +msgstr "" + msgid "Refine your search criteria (select a %{strong_open}group%{strong_close} and %{strong_open}project%{strong_close} when possible)" msgstr "" @@ -36140,6 +36173,9 @@ msgstr "" msgid "Revoke" msgstr "" +msgid "Revoke Key" +msgstr "" + msgid "Revoked" msgstr "" @@ -42937,6 +42973,9 @@ msgstr "" msgid "This action cannot be undone, and will permanently delete the %{key} SSH key" msgstr "" +msgid "This action cannot be undone, and will permanently delete the %{key} SSH key. All commits signed using this SSH key will be marked as unverified." +msgstr "" + msgid "This action deletes %{codeOpen}%{project_path_with_namespace}%{codeClose} and everything this project contains. %{strongOpen}There is no going back.%{strongClose}" msgstr "" diff --git a/qa/qa/service/cluster_provider/gcloud.rb b/qa/qa/service/cluster_provider/gcloud.rb index f00d802007e..749ebca8897 100644 --- a/qa/qa/service/cluster_provider/gcloud.rb +++ b/qa/qa/service/cluster_provider/gcloud.rb @@ -55,7 +55,7 @@ module QA shell <<~CMD.tr("\n", ' ') curl -fsSL -o get_helm.sh https://raw.githubusercontent.com/helm/helm/main/scripts/get-helm-3 && chmod 700 get_helm.sh && - DESIRED_VERSION=3.7.0 ./get_helm.sh + DESIRED_VERSION=v3.7.0 ./get_helm.sh CMD end diff --git a/rubocop/migration_helpers.rb b/rubocop/migration_helpers.rb index 50d7b198931..d14b1bdd6bb 100644 --- a/rubocop/migration_helpers.rb +++ b/rubocop/migration_helpers.rb @@ -61,11 +61,16 @@ module RuboCop File.basename(node.location.expression.source_buffer.name).split('_').first.to_i end - # Returns true if a column definition is for an array + # Returns true if a column definition is for an array, like { array: true } + # + # @example + # add_column :table, :ids, :integer, array: true, default: [] + # # rubocop:disable Lint/BooleanSymbol def array_column?(node) node.each_descendant(:pair).any? do |pair_node| - pair_node.child_nodes[0].value == :array && # Searching for a (pair (sym :array) (true)) node + pair_node.child_nodes[0].sym_type? && # Searching for a RuboCop::AST::SymbolNode + pair_node.child_nodes[0].value == :array && # Searching for a (pair (sym :array) (true)) node pair_node.child_nodes[1].type == :true # RuboCop::AST::Node uses symbols for types, even when that is a :true end end diff --git a/spec/controllers/projects/pipelines_controller_spec.rb b/spec/controllers/projects/pipelines_controller_spec.rb index 3d1d28945f7..0c97f11090d 100644 --- a/spec/controllers/projects/pipelines_controller_spec.rb +++ b/spec/controllers/projects/pipelines_controller_spec.rb @@ -1355,8 +1355,8 @@ RSpec.describe Projects::PipelinesController do .and_return(service) end - context 'when sending a valid sha' do - let(:sha) { 'master' } + context 'when sending a valid ref' do + let(:ref) { 'master' } let(:ci_config) do { variables: { @@ -1381,8 +1381,8 @@ RSpec.describe Projects::PipelinesController do end end - context 'when sending an invalid sha' do - let(:sha) { 'invalid-sha' } + context 'when sending an invalid ref' do + let(:ref) { 'invalid-ref' } before do synchronous_reactive_cache(service) @@ -1397,7 +1397,7 @@ RSpec.describe Projects::PipelinesController do end context 'when sending an invalid config' do - let(:sha) { 'master' } + let(:ref) { 'master' } let(:ci_config) do { variables: { @@ -1423,7 +1423,7 @@ RSpec.describe Projects::PipelinesController do end context 'when the cache is empty' do - let(:sha) { 'master' } + let(:ref) { 'master' } let(:ci_config) do { variables: { @@ -1446,7 +1446,7 @@ RSpec.describe Projects::PipelinesController do context 'when project uses external project ci config' do let(:other_project) { create(:project, :custom_repo, files: other_project_files) } let(:other_project_files) { { '.gitlab-ci.yml' => YAML.dump(other_project_ci_config) } } - let(:sha) { 'master' } + let(:ref) { 'master' } let(:other_project_ci_config) do { @@ -1479,7 +1479,7 @@ RSpec.describe Projects::PipelinesController do def get_config_variables get :config_variables, params: { namespace_id: project.namespace, project_id: project, - sha: sha }, + sha: ref }, format: :json end end diff --git a/spec/db/schema_spec.rb b/spec/db/schema_spec.rb index 4af901e4cf6..9b839873fbb 100644 --- a/spec/db/schema_spec.rb +++ b/spec/db/schema_spec.rb @@ -38,7 +38,7 @@ RSpec.describe 'Database schema', feature_category: :database do ci_build_trace_chunks: %w[partition_id], ci_build_trace_metadata: %w[partition_id], ci_builds: %w[erased_by_id trigger_request_id partition_id], - ci_builds_runner_session: %w[partition_id], + ci_builds_runner_session: %w[partition_id build_id], p_ci_builds_metadata: %w[partition_id], ci_job_artifacts: %w[partition_id], ci_job_variables: %w[partition_id], diff --git a/spec/factories/ci/runner_machines.rb b/spec/factories/ci/runner_machines.rb index 09bf5d0844e..c382ebdcb26 100644 --- a/spec/factories/ci/runner_machines.rb +++ b/spec/factories/ci/runner_machines.rb @@ -4,5 +4,10 @@ FactoryBot.define do factory :ci_runner_machine, class: 'Ci::RunnerMachine' do runner factory: :ci_runner machine_xid { "r_#{SecureRandom.hex.slice(0, 10)}" } + + trait :stale do + created_at { 1.year.ago } + contacted_at { Ci::RunnerMachine::STALE_TIMEOUT.ago } + end end end diff --git a/spec/factories/users.rb b/spec/factories/users.rb index 2b53a469841..e641f925758 100644 --- a/spec/factories/users.rb +++ b/spec/factories/users.rb @@ -180,6 +180,10 @@ FactoryBot.define do provider { 'ldapmain' } end end + + trait :unconfirmed do + confirmed_at { nil } + end end factory :atlassian_user do diff --git a/spec/features/profiles/keys_spec.rb b/spec/features/profiles/keys_spec.rb index 5acc59b190f..8d4666dcb50 100644 --- a/spec/features/profiles/keys_spec.rb +++ b/spec/features/profiles/keys_spec.rb @@ -76,35 +76,74 @@ RSpec.describe 'Profile > SSH Keys', feature_category: :user_profile do expect(page).to have_content(key.title) end + def destroy_key(path, action, confirmation_button) + visit path + + page.click_button(action) + + page.within('.modal') do + page.click_button(confirmation_button) + end + + expect(page).to have_content('Your SSH keys (0)') + end + describe 'User removes a key', :js do - shared_examples 'removes key' do - it 'removes key' do - visit path - find('[data-testid=remove-icon]').click + let!(:key) { create(:key, user: user) } - page.within('.modal') do - page.click_button('Delete') - end + context 'via the key index' do + it 'removes key' do + destroy_key(profile_keys_path, 'Remove', 'Delete') + end + end - expect(page).to have_content('Your SSH keys (0)') + context 'via its details page' do + it 'removes key' do + destroy_key(profile_keys_path(key), 'Remove', 'Delete') end end + end + + describe 'User revokes a key', :js do + context 'when a commit is signed using SSH key' do + let!(:project) { create(:project, :repository) } + let!(:key) { create(:key, user: user) } + let!(:commit) { project.commit('ssh-signed-commit') } + + let!(:signature) do + create(:ssh_signature, + project: project, + key: key, + key_fingerprint_sha256: key.fingerprint_sha256, + commit_sha: commit.sha) + end - context 'via the key index' do before do - create(:key, user: user) + project.add_developer(user) end - let(:path) { profile_keys_path } + it 'revoking the SSH key marks commits as unverified' do + visit project_commit_path(project, commit) - it_behaves_like 'removes key' - end + find('a.gpg-status-box', text: 'Verified').click - context 'via its details page' do - let(:key) { create(:key, user: user) } - let(:path) { profile_keys_path(key) } + within('.popover') do + expect(page).to have_content("Verified commit") + expect(page).to have_content("SSH key fingerprint: #{key.fingerprint_sha256}") + end + + destroy_key(profile_keys_path, 'Revoke', 'Revoke') + + visit project_commit_path(project, commit) - it_behaves_like 'removes key' + find('a.gpg-status-box', text: 'Unverified').click + + within('.popover') do + expect(page).to have_content("Unverified signature") + expect(page).to have_content('This commit was signed with a key that was revoked.') + expect(page).to have_content("SSH key fingerprint: #{signature.key_fingerprint_sha256}") + end + end end end end diff --git a/spec/frontend/admin/broadcast_messages/components/base_spec.js b/spec/frontend/admin/broadcast_messages/components/base_spec.js index 79bde54286e..d69bf4a22bf 100644 --- a/spec/frontend/admin/broadcast_messages/components/base_spec.js +++ b/spec/frontend/admin/broadcast_messages/components/base_spec.js @@ -6,7 +6,7 @@ import waitForPromises from 'helpers/wait_for_promises'; import { useMockLocationHelper } from 'helpers/mock_window_location_helper'; import { createAlert } from '~/flash'; import axios from '~/lib/utils/axios_utils'; -import { HTTP_STATUS_OK } from '~/lib/utils/http_status'; +import { HTTP_STATUS_INTERNAL_SERVER_ERROR, HTTP_STATUS_OK } from '~/lib/utils/http_status'; import { redirectTo } from '~/lib/utils/url_utility'; import BroadcastMessagesBase from '~/admin/broadcast_messages/components/base.vue'; import MessagesTable from '~/admin/broadcast_messages/components/messages_table.vue'; @@ -71,7 +71,7 @@ describe('BroadcastMessagesBase', () => { it('does not remove a deleted message if the request fails', async () => { createComponent(); const { id, delete_path } = MOCK_MESSAGES[0]; - axiosMock.onDelete(delete_path).replyOnce(500); + axiosMock.onDelete(delete_path).replyOnce(HTTP_STATUS_INTERNAL_SERVER_ERROR); findTable().vm.$emit('delete-message', id); await waitForPromises(); diff --git a/spec/frontend/admin/statistics_panel/store/actions_spec.js b/spec/frontend/admin/statistics_panel/store/actions_spec.js index e7cdb5feb6a..bf97699b1fc 100644 --- a/spec/frontend/admin/statistics_panel/store/actions_spec.js +++ b/spec/frontend/admin/statistics_panel/store/actions_spec.js @@ -5,6 +5,7 @@ import * as actions from '~/admin/statistics_panel/store/actions'; import * as types from '~/admin/statistics_panel/store/mutation_types'; import getInitialState from '~/admin/statistics_panel/store/state'; import { convertObjectPropsToCamelCase } from '~/lib/utils/common_utils'; +import { HTTP_STATUS_INTERNAL_SERVER_ERROR } from '~/lib/utils/http_status'; import mockStatistics from '../mock_data'; describe('Admin statistics panel actions', () => { @@ -43,7 +44,9 @@ describe('Admin statistics panel actions', () => { describe('error', () => { beforeEach(() => { - mock.onGet(/api\/(.*)\/application\/statistics/).replyOnce(500); + mock + .onGet(/api\/(.*)\/application\/statistics/) + .replyOnce(HTTP_STATUS_INTERNAL_SERVER_ERROR); }); it('dispatches error', () => { @@ -99,12 +102,12 @@ describe('Admin statistics panel actions', () => { it('should commit error', () => { return testAction( actions.receiveStatisticsError, - 500, + HTTP_STATUS_INTERNAL_SERVER_ERROR, state, [ { type: types.RECEIVE_STATISTICS_ERROR, - payload: 500, + payload: HTTP_STATUS_INTERNAL_SERVER_ERROR, }, ], [], diff --git a/spec/frontend/admin/statistics_panel/store/mutations_spec.js b/spec/frontend/admin/statistics_panel/store/mutations_spec.js index 0a3dad09c9a..70c1e723f08 100644 --- a/spec/frontend/admin/statistics_panel/store/mutations_spec.js +++ b/spec/frontend/admin/statistics_panel/store/mutations_spec.js @@ -1,6 +1,7 @@ import * as types from '~/admin/statistics_panel/store/mutation_types'; import mutations from '~/admin/statistics_panel/store/mutations'; import getInitialState from '~/admin/statistics_panel/store/state'; +import { HTTP_STATUS_INTERNAL_SERVER_ERROR } from '~/lib/utils/http_status'; import mockStatistics from '../mock_data'; describe('Admin statistics panel mutations', () => { @@ -30,11 +31,10 @@ describe('Admin statistics panel mutations', () => { describe(`${types.RECEIVE_STATISTICS_ERROR}`, () => { it('sets error and clears data', () => { - const error = 500; - mutations[types.RECEIVE_STATISTICS_ERROR](state, error); + mutations[types.RECEIVE_STATISTICS_ERROR](state, HTTP_STATUS_INTERNAL_SERVER_ERROR); expect(state.isLoading).toBe(false); - expect(state.error).toBe(error); + expect(state.error).toBe(HTTP_STATUS_INTERNAL_SERVER_ERROR); expect(state.statistics).toEqual(null); }); }); diff --git a/spec/frontend/badges/store/actions_spec.js b/spec/frontend/badges/store/actions_spec.js index b799273ff63..4e8d7aaaca9 100644 --- a/spec/frontend/badges/store/actions_spec.js +++ b/spec/frontend/badges/store/actions_spec.js @@ -5,6 +5,7 @@ import actions, { transformBackendBadge } from '~/badges/store/actions'; import mutationTypes from '~/badges/store/mutation_types'; import createState from '~/badges/store/state'; import axios from '~/lib/utils/axios_utils'; +import { HTTP_STATUS_INTERNAL_SERVER_ERROR } from '~/lib/utils/http_status'; import { createDummyBadge, createDummyBadgeResponse } from '../dummy_badge'; describe('Badges store actions', () => { @@ -119,7 +120,7 @@ describe('Badges store actions', () => { expect(dispatch.mock.calls).toEqual([['requestNewBadge']]); dispatch.mockClear(); - return [500, '']; + return [HTTP_STATUS_INTERNAL_SERVER_ERROR, '']; }); await expect(actions.addBadge({ state, dispatch })).rejects.toThrow(); @@ -187,7 +188,7 @@ describe('Badges store actions', () => { endpointMock.replyOnce(() => { expect(dispatch.mock.calls).toEqual([['requestDeleteBadge', badgeId]]); dispatch.mockClear(); - return [500, '']; + return [HTTP_STATUS_INTERNAL_SERVER_ERROR, '']; }); await expect(actions.deleteBadge({ state, dispatch }, { id: badgeId })).rejects.toThrow(); @@ -279,7 +280,7 @@ describe('Badges store actions', () => { endpointMock.replyOnce(() => { expect(dispatch.mock.calls).toEqual([['requestLoadBadges', dummyData]]); dispatch.mockClear(); - return [500, '']; + return [HTTP_STATUS_INTERNAL_SERVER_ERROR, '']; }); await expect(actions.loadBadges({ state, dispatch }, dummyData)).rejects.toThrow(); @@ -393,7 +394,7 @@ describe('Badges store actions', () => { endpointMock.replyOnce(() => { expect(dispatch.mock.calls).toEqual([['requestRenderedBadge']]); dispatch.mockClear(); - return [500, '']; + return [HTTP_STATUS_INTERNAL_SERVER_ERROR, '']; }); await expect(actions.renderBadge({ state, dispatch })).rejects.toThrow(); @@ -487,7 +488,7 @@ describe('Badges store actions', () => { expect(dispatch.mock.calls).toEqual([['requestUpdatedBadge']]); dispatch.mockClear(); - return [500, '']; + return [HTTP_STATUS_INTERNAL_SERVER_ERROR, '']; }); await expect(actions.saveBadge({ state, dispatch })).rejects.toThrow(); diff --git a/spec/frontend/blob/notebook/notebook_viever_spec.js b/spec/frontend/blob/notebook/notebook_viever_spec.js index ea4badc03fb..e6480c8d874 100644 --- a/spec/frontend/blob/notebook/notebook_viever_spec.js +++ b/spec/frontend/blob/notebook/notebook_viever_spec.js @@ -4,6 +4,7 @@ import MockAdapter from 'axios-mock-adapter'; import waitForPromises from 'helpers/wait_for_promises'; import component from '~/blob/notebook/notebook_viewer.vue'; import axios from '~/lib/utils/axios_utils'; +import { HTTP_STATUS_INTERNAL_SERVER_ERROR } from '~/lib/utils/http_status'; import NotebookLab from '~/notebook/index.vue'; describe('iPython notebook renderer', () => { @@ -90,7 +91,7 @@ describe('iPython notebook renderer', () => { describe('error getting file', () => { beforeEach(() => { - mock.onGet(endpoint).reply(500, ''); + mock.onGet(endpoint).reply(HTTP_STATUS_INTERNAL_SERVER_ERROR, ''); mountComponent(); return waitForPromises(); diff --git a/spec/frontend/ci/pipeline_editor/pipeline_editor_app_spec.js b/spec/frontend/ci/pipeline_editor/pipeline_editor_app_spec.js index 2246d0bbf7e..a103acb33bc 100644 --- a/spec/frontend/ci/pipeline_editor/pipeline_editor_app_spec.js +++ b/spec/frontend/ci/pipeline_editor/pipeline_editor_app_spec.js @@ -5,6 +5,7 @@ import createMockApollo from 'helpers/mock_apollo_helper'; import setWindowLocation from 'helpers/set_window_location_helper'; import waitForPromises from 'helpers/wait_for_promises'; +import { HTTP_STATUS_INTERNAL_SERVER_ERROR } from '~/lib/utils/http_status'; import { objectToQuery, redirectTo } from '~/lib/utils/url_utility'; import { resolvers } from '~/ci/pipeline_editor/graphql/resolvers'; import PipelineEditorTabs from '~/ci/pipeline_editor/components/pipeline_editor_tabs.vue'; @@ -343,7 +344,7 @@ describe('Pipeline editor app component', () => { describe('when the lint query returns a 500 error', () => { beforeEach(async () => { - mockCiConfigData.mockRejectedValueOnce(new Error(500)); + mockCiConfigData.mockRejectedValueOnce(new Error(HTTP_STATUS_INTERNAL_SERVER_ERROR)); await createComponentWithApollo({ stubs: { PipelineEditorHome, PipelineEditorHeader, ValidationSegment }, }); diff --git a/spec/frontend/code_navigation/store/actions_spec.js b/spec/frontend/code_navigation/store/actions_spec.js index 8eee61d1342..3cedb2fe232 100644 --- a/spec/frontend/code_navigation/store/actions_spec.js +++ b/spec/frontend/code_navigation/store/actions_spec.js @@ -4,6 +4,7 @@ import testAction from 'helpers/vuex_action_helper'; import actions from '~/code_navigation/store/actions'; import { setCurrentHoverElement, addInteractionClass } from '~/code_navigation/utils'; import axios from '~/lib/utils/axios_utils'; +import { HTTP_STATUS_INTERNAL_SERVER_ERROR } from '~/lib/utils/http_status'; jest.mock('~/code_navigation/utils'); @@ -124,7 +125,7 @@ describe('Code navigation actions', () => { describe('error', () => { beforeEach(() => { - mock.onGet(codeNavigationPath).replyOnce(500); + mock.onGet(codeNavigationPath).replyOnce(HTTP_STATUS_INTERNAL_SERVER_ERROR); }); it('dispatches requestDataError', () => { diff --git a/spec/frontend/commit/pipelines/pipelines_table_spec.js b/spec/frontend/commit/pipelines/pipelines_table_spec.js index 6865b721441..6ad1c308b01 100644 --- a/spec/frontend/commit/pipelines/pipelines_table_spec.js +++ b/spec/frontend/commit/pipelines/pipelines_table_spec.js @@ -337,7 +337,7 @@ describe('Pipelines table in Commits and Merge requests', () => { describe('unsuccessfull request', () => { beforeEach(async () => { - mock.onGet('endpoint.json').reply(500, []); + mock.onGet('endpoint.json').reply(HTTP_STATUS_INTERNAL_SERVER_ERROR, []); createComponent(); diff --git a/spec/frontend/deploy_tokens/components/new_deploy_token_spec.js b/spec/frontend/deploy_tokens/components/new_deploy_token_spec.js index 0bf69acd251..0b82cb32dc4 100644 --- a/spec/frontend/deploy_tokens/components/new_deploy_token_spec.js +++ b/spec/frontend/deploy_tokens/components/new_deploy_token_spec.js @@ -3,6 +3,7 @@ import { nextTick } from 'vue'; import { GlButton, GlFormCheckbox, GlFormInput, GlFormInputGroup, GlDatepicker } from '@gitlab/ui'; import MockAdapter from 'axios-mock-adapter'; import axios from '~/lib/utils/axios_utils'; +import { HTTP_STATUS_INTERNAL_SERVER_ERROR } from '~/lib/utils/http_status'; import { TEST_HOST } from 'helpers/test_constants'; import NewDeployToken from '~/deploy_tokens/components/new_deploy_token.vue'; import waitForPromises from 'helpers/wait_for_promises'; @@ -131,7 +132,7 @@ describe('New Deploy Token', () => { write_package_registry: true, }, }) - .replyOnce(500, { message: expectedErrorMessage }); + .replyOnce(HTTP_STATUS_INTERNAL_SERVER_ERROR, { message: expectedErrorMessage }); wrapper.findAllComponents(GlButton).at(0).vm.$emit('click'); diff --git a/spec/frontend/environments/folder/environments_folder_view_spec.js b/spec/frontend/environments/folder/environments_folder_view_spec.js index f8b8465cf6f..9c1f463ec3f 100644 --- a/spec/frontend/environments/folder/environments_folder_view_spec.js +++ b/spec/frontend/environments/folder/environments_folder_view_spec.js @@ -5,6 +5,7 @@ import { removeBreakLine, removeWhitespace } from 'helpers/text_helper'; import EnvironmentTable from '~/environments/components/environments_table.vue'; import EnvironmentsFolderViewComponent from '~/environments/folder/environments_folder_view.vue'; import axios from '~/lib/utils/axios_utils'; +import { HTTP_STATUS_INTERNAL_SERVER_ERROR } from '~/lib/utils/http_status'; import { environmentsList } from '../mock_data'; describe('Environments Folder View', () => { @@ -120,7 +121,7 @@ describe('Environments Folder View', () => { describe('unsuccessfull request', () => { beforeEach(() => { - mock.onGet(mockData.endpoint).reply(500, { environments: [] }); + mock.onGet(mockData.endpoint).reply(HTTP_STATUS_INTERNAL_SERVER_ERROR, { environments: [] }); createWrapper(); return axios.waitForAll(); }); diff --git a/spec/frontend/feature_flags/components/feature_flags_spec.js b/spec/frontend/feature_flags/components/feature_flags_spec.js index d27b23c5cd1..4ed17234971 100644 --- a/spec/frontend/feature_flags/components/feature_flags_spec.js +++ b/spec/frontend/feature_flags/components/feature_flags_spec.js @@ -11,6 +11,7 @@ import FeatureFlagsComponent from '~/feature_flags/components/feature_flags.vue' import FeatureFlagsTable from '~/feature_flags/components/feature_flags_table.vue'; import createStore from '~/feature_flags/store/index'; import axios from '~/lib/utils/axios_utils'; +import { HTTP_STATUS_INTERNAL_SERVER_ERROR } from '~/lib/utils/http_status'; import TablePagination from '~/vue_shared/components/pagination/table_pagination.vue'; import { getRequestData } from '../mock_data'; @@ -271,7 +272,9 @@ describe('Feature flags', () => { describe('unsuccessful request', () => { beforeEach(() => { - mock.onGet(mockState.endpoint, { params: { page: '1' } }).replyOnce(500, {}); + mock + .onGet(mockState.endpoint, { params: { page: '1' } }) + .replyOnce(HTTP_STATUS_INTERNAL_SERVER_ERROR, {}); factory(); return waitForPromises(); diff --git a/spec/frontend/feature_flags/store/edit/actions_spec.js b/spec/frontend/feature_flags/store/edit/actions_spec.js index bcacfa9fda9..8b9b42f4eb1 100644 --- a/spec/frontend/feature_flags/store/edit/actions_spec.js +++ b/spec/frontend/feature_flags/store/edit/actions_spec.js @@ -17,7 +17,7 @@ import * as types from '~/feature_flags/store/edit/mutation_types'; import state from '~/feature_flags/store/edit/state'; import { mapStrategiesToRails } from '~/feature_flags/store/helpers'; import axios from '~/lib/utils/axios_utils'; -import { HTTP_STATUS_OK } from '~/lib/utils/http_status'; +import { HTTP_STATUS_INTERNAL_SERVER_ERROR, HTTP_STATUS_OK } from '~/lib/utils/http_status'; jest.mock('~/lib/utils/url_utility'); @@ -79,7 +79,9 @@ describe('Feature flags Edit Module actions', () => { describe('error', () => { it('dispatches requestUpdateFeatureFlag and receiveUpdateFeatureFlagError', () => { - mock.onPut(`${TEST_HOST}/endpoint.json`).replyOnce(500, { message: [] }); + mock + .onPut(`${TEST_HOST}/endpoint.json`) + .replyOnce(HTTP_STATUS_INTERNAL_SERVER_ERROR, { message: [] }); return testAction( updateFeatureFlag, @@ -180,7 +182,9 @@ describe('Feature flags Edit Module actions', () => { describe('error', () => { it('dispatches requestFeatureFlag and receiveUpdateFeatureFlagError', () => { - mock.onGet(`${TEST_HOST}/endpoint.json`, {}).replyOnce(500, {}); + mock + .onGet(`${TEST_HOST}/endpoint.json`, {}) + .replyOnce(HTTP_STATUS_INTERNAL_SERVER_ERROR, {}); return testAction( fetchFeatureFlag, diff --git a/spec/frontend/feature_flags/store/index/actions_spec.js b/spec/frontend/feature_flags/store/index/actions_spec.js index 96a7d868316..a7a8793c82c 100644 --- a/spec/frontend/feature_flags/store/index/actions_spec.js +++ b/spec/frontend/feature_flags/store/index/actions_spec.js @@ -20,6 +20,7 @@ import { import * as types from '~/feature_flags/store/index/mutation_types'; import state from '~/feature_flags/store/index/state'; import axios from '~/lib/utils/axios_utils'; +import { HTTP_STATUS_INTERNAL_SERVER_ERROR } from '~/lib/utils/http_status'; import { getRequestData, rotateData, featureFlag } from '../../mock_data'; jest.mock('~/api.js'); @@ -79,7 +80,9 @@ describe('Feature flags actions', () => { describe('error', () => { it('dispatches requestFeatureFlags and receiveFeatureFlagsError', () => { - mock.onGet(`${TEST_HOST}/endpoint.json`, {}).replyOnce(500, {}); + mock + .onGet(`${TEST_HOST}/endpoint.json`, {}) + .replyOnce(HTTP_STATUS_INTERNAL_SERVER_ERROR, {}); return testAction( fetchFeatureFlags, @@ -176,7 +179,9 @@ describe('Feature flags actions', () => { describe('error', () => { it('dispatches requestRotateInstanceId and receiveRotateInstanceIdError', () => { - mock.onGet(`${TEST_HOST}/endpoint.json`, {}).replyOnce(500, {}); + mock + .onGet(`${TEST_HOST}/endpoint.json`, {}) + .replyOnce(HTTP_STATUS_INTERNAL_SERVER_ERROR, {}); return testAction( rotateInstanceId, @@ -275,7 +280,7 @@ describe('Feature flags actions', () => { describe('error', () => { it('dispatches updateFeatureFlag and receiveUpdateFeatureFlagSuccess', () => { - mock.onPut(featureFlag.update_path).replyOnce(500); + mock.onPut(featureFlag.update_path).replyOnce(HTTP_STATUS_INTERNAL_SERVER_ERROR); return testAction( toggleFeatureFlag, diff --git a/spec/frontend/feature_flags/store/new/actions_spec.js b/spec/frontend/feature_flags/store/new/actions_spec.js index c8bf05e4dbd..01b6ab4d5ed 100644 --- a/spec/frontend/feature_flags/store/new/actions_spec.js +++ b/spec/frontend/feature_flags/store/new/actions_spec.js @@ -11,7 +11,7 @@ import { import * as types from '~/feature_flags/store/new/mutation_types'; import state from '~/feature_flags/store/new/state'; import axios from '~/lib/utils/axios_utils'; -import { HTTP_STATUS_OK } from '~/lib/utils/http_status'; +import { HTTP_STATUS_INTERNAL_SERVER_ERROR, HTTP_STATUS_OK } from '~/lib/utils/http_status'; jest.mock('~/lib/utils/url_utility'); @@ -88,7 +88,7 @@ describe('Feature flags New Module Actions', () => { }; mock .onPost(mockedState.endpoint, mapStrategiesToRails(actionParams)) - .replyOnce(500, { message: [] }); + .replyOnce(HTTP_STATUS_INTERNAL_SERVER_ERROR, { message: [] }); return testAction( createFeatureFlag, diff --git a/spec/frontend/frequent_items/store/actions_spec.js b/spec/frontend/frequent_items/store/actions_spec.js index 4f998cc26da..15905d71a21 100644 --- a/spec/frontend/frequent_items/store/actions_spec.js +++ b/spec/frontend/frequent_items/store/actions_spec.js @@ -5,6 +5,7 @@ import * as types from '~/frequent_items/store/mutation_types'; import state from '~/frequent_items/store/state'; import AccessorUtilities from '~/lib/utils/accessor'; import axios from '~/lib/utils/axios_utils'; +import { HTTP_STATUS_INTERNAL_SERVER_ERROR } from '~/lib/utils/http_status'; import { useLocalStorageSpy } from 'helpers/local_storage_helper'; import { mockNamespace, @@ -192,7 +193,7 @@ describe('Frequent Items Dropdown Store Actions', () => { it('should dispatch `receiveSearchedItemsError`', () => { gon.api_version = 'v4'; - mock.onGet(/\/api\/v4\/projects.json(.*)$/).replyOnce(500); + mock.onGet(/\/api\/v4\/projects.json(.*)$/).replyOnce(HTTP_STATUS_INTERNAL_SERVER_ERROR); return testAction( actions.fetchSearchedItems, diff --git a/spec/frontend/groups/components/app_spec.js b/spec/frontend/groups/components/app_spec.js index fea3b547977..eca56325366 100644 --- a/spec/frontend/groups/components/app_spec.js +++ b/spec/frontend/groups/components/app_spec.js @@ -11,7 +11,11 @@ import eventHub from '~/groups/event_hub'; import GroupsService from '~/groups/service/groups_service'; import GroupsStore from '~/groups/store/groups_store'; import axios from '~/lib/utils/axios_utils'; -import { HTTP_STATUS_BAD_REQUEST, HTTP_STATUS_FORBIDDEN } from '~/lib/utils/http_status'; +import { + HTTP_STATUS_BAD_REQUEST, + HTTP_STATUS_FORBIDDEN, + HTTP_STATUS_INTERNAL_SERVER_ERROR, +} from '~/lib/utils/http_status'; import * as urlUtilities from '~/lib/utils/url_utility'; import setWindowLocation from 'helpers/set_window_location_helper'; @@ -322,7 +326,9 @@ describe('AppComponent', () => { it('should show error flash message if request failed to leave group', () => { const message = 'An error occurred. Please try again.'; - jest.spyOn(vm.service, 'leaveGroup').mockRejectedValue({ status: 500 }); + jest + .spyOn(vm.service, 'leaveGroup') + .mockRejectedValue({ status: HTTP_STATUS_INTERNAL_SERVER_ERROR }); jest.spyOn(vm.store, 'removeGroup'); vm.leaveGroup(); diff --git a/spec/frontend/ide/lib/mirror_spec.js b/spec/frontend/ide/lib/mirror_spec.js index 8f417ea54dc..33dd0fefc6c 100644 --- a/spec/frontend/ide/lib/mirror_spec.js +++ b/spec/frontend/ide/lib/mirror_spec.js @@ -7,6 +7,7 @@ import { MSG_CONNECTION_ERROR, SERVICE_DELAY, } from '~/ide/lib/mirror'; +import { HTTP_STATUS_INTERNAL_SERVER_ERROR } from '~/lib/utils/http_status'; import { getWebSocketUrl } from '~/lib/utils/url_utility'; jest.mock('~/ide/lib/create_diff', () => jest.fn()); @@ -26,7 +27,7 @@ const TEST_ERROR_RESPONSE = { const TEST_ERROR_PAYLOAD_RESPONSE = { data: JSON.stringify({ error: { code: 0 }, - payload: { status_code: 500, error_message: TEST_ERROR }, + payload: { status_code: HTTP_STATUS_INTERNAL_SERVER_ERROR, error_message: TEST_ERROR }, }), }; diff --git a/spec/frontend/ide/stores/actions/tree_spec.js b/spec/frontend/ide/stores/actions/tree_spec.js index 6e8a03b47ad..4551a7a21f1 100644 --- a/spec/frontend/ide/stores/actions/tree_spec.js +++ b/spec/frontend/ide/stores/actions/tree_spec.js @@ -8,6 +8,7 @@ import { createStore } from '~/ide/stores'; import { showTreeEntry, getFiles, setDirectoryData } from '~/ide/stores/actions/tree'; import * as types from '~/ide/stores/mutation_types'; import axios from '~/lib/utils/axios_utils'; +import { HTTP_STATUS_INTERNAL_SERVER_ERROR } from '~/lib/utils/http_status'; import { file, createEntriesFromPaths } from '../../helpers'; describe('Multi-file store tree actions', () => { @@ -98,7 +99,7 @@ describe('Multi-file store tree actions', () => { findBranch: () => store.state.projects['abc/def'].branches['main-testing'], }; - mock.onGet(/(.*)/).replyOnce(500); + mock.onGet(/(.*)/).replyOnce(HTTP_STATUS_INTERNAL_SERVER_ERROR); await expect( getFiles( diff --git a/spec/frontend/ide/stores/modules/branches/actions_spec.js b/spec/frontend/ide/stores/modules/branches/actions_spec.js index 306330e3ba2..38a2e1ac12f 100644 --- a/spec/frontend/ide/stores/modules/branches/actions_spec.js +++ b/spec/frontend/ide/stores/modules/branches/actions_spec.js @@ -10,6 +10,7 @@ import { import * as types from '~/ide/stores/modules/branches/mutation_types'; import state from '~/ide/stores/modules/branches/state'; import axios from '~/lib/utils/axios_utils'; +import { HTTP_STATUS_INTERNAL_SERVER_ERROR } from '~/lib/utils/http_status'; import { branches, projectData } from '../../../mock_data'; describe('IDE branches actions', () => { @@ -124,7 +125,9 @@ describe('IDE branches actions', () => { describe('error', () => { beforeEach(() => { - mock.onGet(/\/api\/v4\/projects\/\d+\/repository\/branches(.*)$/).replyOnce(500); + mock + .onGet(/\/api\/v4\/projects\/\d+\/repository\/branches(.*)$/) + .replyOnce(HTTP_STATUS_INTERNAL_SERVER_ERROR); }); it('dispatches error', () => { diff --git a/spec/frontend/ide/stores/modules/file_templates/actions_spec.js b/spec/frontend/ide/stores/modules/file_templates/actions_spec.js index 1080a30d2d8..0a99496a147 100644 --- a/spec/frontend/ide/stores/modules/file_templates/actions_spec.js +++ b/spec/frontend/ide/stores/modules/file_templates/actions_spec.js @@ -4,6 +4,7 @@ import * as actions from '~/ide/stores/modules/file_templates/actions'; import * as types from '~/ide/stores/modules/file_templates/mutation_types'; import createState from '~/ide/stores/modules/file_templates/state'; import axios from '~/lib/utils/axios_utils'; +import { HTTP_STATUS_INTERNAL_SERVER_ERROR } from '~/lib/utils/http_status'; describe('IDE file templates actions', () => { let state; @@ -108,7 +109,7 @@ describe('IDE file templates actions', () => { describe('error', () => { beforeEach(() => { - mock.onGet(/api\/(.*)\/templates\/licenses/).replyOnce(500); + mock.onGet(/api\/(.*)\/templates\/licenses/).replyOnce(HTTP_STATUS_INTERNAL_SERVER_ERROR); }); it('dispatches actions', () => { @@ -248,7 +249,9 @@ describe('IDE file templates actions', () => { describe('error', () => { beforeEach(() => { - mock.onGet(/api\/(.*)\/templates\/licenses\/mit/).replyOnce(500); + mock + .onGet(/api\/(.*)\/templates\/licenses\/mit/) + .replyOnce(HTTP_STATUS_INTERNAL_SERVER_ERROR); }); it('dispatches error', () => { diff --git a/spec/frontend/ide/stores/modules/merge_requests/actions_spec.js b/spec/frontend/ide/stores/modules/merge_requests/actions_spec.js index 344fe3a41c3..5d3bf063ac8 100644 --- a/spec/frontend/ide/stores/modules/merge_requests/actions_spec.js +++ b/spec/frontend/ide/stores/modules/merge_requests/actions_spec.js @@ -10,6 +10,7 @@ import { import * as types from '~/ide/stores/modules/merge_requests/mutation_types'; import state from '~/ide/stores/modules/merge_requests/state'; import axios from '~/lib/utils/axios_utils'; +import { HTTP_STATUS_INTERNAL_SERVER_ERROR } from '~/lib/utils/http_status'; import { mergeRequests } from '../../../mock_data'; describe('IDE merge requests actions', () => { @@ -169,7 +170,7 @@ describe('IDE merge requests actions', () => { describe('error', () => { beforeEach(() => { - mock.onGet(/\/api\/v4\/merge_requests(.*)$/).replyOnce(500); + mock.onGet(/\/api\/v4\/merge_requests(.*)$/).replyOnce(HTTP_STATUS_INTERNAL_SERVER_ERROR); }); it('dispatches error', () => { diff --git a/spec/frontend/ide/stores/modules/terminal/actions/checks_spec.js b/spec/frontend/ide/stores/modules/terminal/actions/checks_spec.js index bf57373dd03..f541e579810 100644 --- a/spec/frontend/ide/stores/modules/terminal/actions/checks_spec.js +++ b/spec/frontend/ide/stores/modules/terminal/actions/checks_spec.js @@ -13,6 +13,7 @@ import axios from '~/lib/utils/axios_utils'; import { HTTP_STATUS_BAD_REQUEST, HTTP_STATUS_FORBIDDEN, + HTTP_STATUS_INTERNAL_SERVER_ERROR, HTTP_STATUS_NOT_FOUND, HTTP_STATUS_UNPROCESSABLE_ENTITY, } from '~/lib/utils/http_status'; @@ -277,7 +278,9 @@ describe('IDE store terminal check actions', () => { }); it('dispatches request and receive, when error', () => { - mock.onGet(/api\/.*\/projects\/.*\/runners/, { params: { scope: 'active' } }).reply(500, []); + mock + .onGet(/api\/.*\/projects\/.*\/runners/, { params: { scope: 'active' } }) + .reply(HTTP_STATUS_INTERNAL_SERVER_ERROR, []); return testAction( actions.fetchRunnersCheck, diff --git a/spec/frontend/labels/components/promote_label_modal_spec.js b/spec/frontend/labels/components/promote_label_modal_spec.js index 8953e3cbcd8..9dd6f1ca64c 100644 --- a/spec/frontend/labels/components/promote_label_modal_spec.js +++ b/spec/frontend/labels/components/promote_label_modal_spec.js @@ -6,6 +6,7 @@ import { TEST_HOST } from 'helpers/test_constants'; import { stubComponent } from 'helpers/stub_component'; import axios from '~/lib/utils/axios_utils'; +import { HTTP_STATUS_INTERNAL_SERVER_ERROR } from '~/lib/utils/http_status'; import PromoteLabelModal from '~/labels/components/promote_label_modal.vue'; import eventHub from '~/labels/event_hub'; @@ -85,8 +86,10 @@ describe('Promote label modal', () => { it('displays an error if promoting a label failed', async () => { const dummyError = new Error('promoting label failed'); - dummyError.response = { status: 500 }; - axiosMock.onPost(labelMockData.url).reply(500, { error: dummyError }); + dummyError.response = { status: HTTP_STATUS_INTERNAL_SERVER_ERROR }; + axiosMock + .onPost(labelMockData.url) + .reply(HTTP_STATUS_INTERNAL_SERVER_ERROR, { error: dummyError }); wrapper.findComponent(GlModal).vm.$emit('primary'); diff --git a/spec/frontend/lib/utils/favicon_ci_spec.js b/spec/frontend/lib/utils/favicon_ci_spec.js index e35b008b862..6385af952e7 100644 --- a/spec/frontend/lib/utils/favicon_ci_spec.js +++ b/spec/frontend/lib/utils/favicon_ci_spec.js @@ -2,6 +2,7 @@ import MockAdapter from 'axios-mock-adapter'; import axios from '~/lib/utils/axios_utils'; import { setFaviconOverlay, resetFavicon } from '~/lib/utils/favicon'; import { setCiStatusFavicon } from '~/lib/utils/favicon_ci'; +import { HTTP_STATUS_INTERNAL_SERVER_ERROR } from '~/lib/utils/http_status'; jest.mock('~/lib/utils/favicon'); @@ -41,7 +42,7 @@ describe('~/lib/utils/favicon_ci', () => { ); it('with error', async () => { - mock.onGet(TEST_URL).replyOnce(500); + mock.onGet(TEST_URL).replyOnce(HTTP_STATUS_INTERNAL_SERVER_ERROR); await expect(setCiStatusFavicon(TEST_URL)).rejects.toEqual(expect.any(Error)); expect(resetFavicon).toHaveBeenCalled(); diff --git a/spec/frontend/lib/utils/poll_spec.js b/spec/frontend/lib/utils/poll_spec.js index 94a5f5385b7..0e6c08bf033 100644 --- a/spec/frontend/lib/utils/poll_spec.js +++ b/spec/frontend/lib/utils/poll_spec.js @@ -1,5 +1,5 @@ import waitForPromises from 'helpers/wait_for_promises'; -import { successCodes } from '~/lib/utils/http_status'; +import { HTTP_STATUS_INTERNAL_SERVER_ERROR, successCodes } from '~/lib/utils/http_status'; import Poll from '~/lib/utils/poll'; describe('Poll', () => { @@ -61,7 +61,7 @@ describe('Poll', () => { }); it('calls the error callback when the http request returns an error', () => { - mockServiceCall({ status: 500 }, true); + mockServiceCall({ status: HTTP_STATUS_INTERNAL_SERVER_ERROR }, true); setup(); return waitForAllCallsToFinish(1, () => { diff --git a/spec/frontend/milestones/components/milestone_combobox_spec.js b/spec/frontend/milestones/components/milestone_combobox_spec.js index c20c51db75e..27485f3d51d 100644 --- a/spec/frontend/milestones/components/milestone_combobox_spec.js +++ b/spec/frontend/milestones/components/milestone_combobox_spec.js @@ -4,6 +4,7 @@ import axios from 'axios'; import MockAdapter from 'axios-mock-adapter'; import Vue, { nextTick } from 'vue'; import Vuex from 'vuex'; +import { HTTP_STATUS_INTERNAL_SERVER_ERROR } from '~/lib/utils/http_status'; import { ENTER_KEY } from '~/lib/utils/keys'; import MilestoneCombobox from '~/milestones/components/milestone_combobox.vue'; import createStore from '~/milestones/stores/'; @@ -314,8 +315,10 @@ describe('Milestone combobox component', () => { describe('when the project milestones search returns an error', () => { beforeEach(() => { - projectMilestonesApiCallSpy = jest.fn().mockReturnValue([500]); - searchApiCallSpy = jest.fn().mockReturnValue([500]); + projectMilestonesApiCallSpy = jest + .fn() + .mockReturnValue([HTTP_STATUS_INTERNAL_SERVER_ERROR]); + searchApiCallSpy = jest.fn().mockReturnValue([HTTP_STATUS_INTERNAL_SERVER_ERROR]); createComponent({ value: [] }); @@ -441,8 +444,10 @@ describe('Milestone combobox component', () => { describe('when the group milestones search returns an error', () => { beforeEach(() => { - groupMilestonesApiCallSpy = jest.fn().mockReturnValue([500]); - searchApiCallSpy = jest.fn().mockReturnValue([500]); + groupMilestonesApiCallSpy = jest + .fn() + .mockReturnValue([HTTP_STATUS_INTERNAL_SERVER_ERROR]); + searchApiCallSpy = jest.fn().mockReturnValue([HTTP_STATUS_INTERNAL_SERVER_ERROR]); createComponent({ value: [] }); diff --git a/spec/frontend/milestones/components/promote_milestone_modal_spec.js b/spec/frontend/milestones/components/promote_milestone_modal_spec.js index 60657fbc9b8..d7ad3d29d0a 100644 --- a/spec/frontend/milestones/components/promote_milestone_modal_spec.js +++ b/spec/frontend/milestones/components/promote_milestone_modal_spec.js @@ -5,6 +5,7 @@ import { TEST_HOST } from 'helpers/test_constants'; import waitForPromises from 'helpers/wait_for_promises'; import { createAlert } from '~/flash'; import axios from '~/lib/utils/axios_utils'; +import { HTTP_STATUS_INTERNAL_SERVER_ERROR } from '~/lib/utils/http_status'; import * as urlUtils from '~/lib/utils/url_utility'; import PromoteMilestoneModal from '~/milestones/components/promote_milestone_modal.vue'; @@ -94,7 +95,7 @@ describe('Promote milestone modal', () => { it('displays an error if promoting a milestone failed', async () => { const dummyError = new Error('promoting milestone failed'); - dummyError.response = { status: 500 }; + dummyError.response = { status: HTTP_STATUS_INTERNAL_SERVER_ERROR }; jest.spyOn(axios, 'post').mockImplementation((url) => { expect(url).toBe(milestoneMockData.url); return Promise.reject(dummyError); diff --git a/spec/frontend/pipelines/pipeline_multi_actions_spec.js b/spec/frontend/pipelines/pipeline_multi_actions_spec.js index f0dae8ebcbe..544a33b10db 100644 --- a/spec/frontend/pipelines/pipeline_multi_actions_spec.js +++ b/spec/frontend/pipelines/pipeline_multi_actions_spec.js @@ -5,6 +5,7 @@ import { mockTracking, unmockTracking } from 'helpers/tracking_helper'; import { extendedWrapper } from 'helpers/vue_test_utils_helper'; import waitForPromises from 'helpers/wait_for_promises'; import axios from '~/lib/utils/axios_utils'; +import { HTTP_STATUS_INTERNAL_SERVER_ERROR } from '~/lib/utils/http_status'; import PipelineMultiActions, { i18n, } from '~/pipelines/components/pipelines_list/pipeline_multi_actions.vue'; @@ -140,7 +141,7 @@ describe('Pipeline Multi Actions Dropdown', () => { describe('with a failing request', () => { it('should render an error message', async () => { const endpoint = artifactsEndpoint.replace(artifactsEndpointPlaceholder, pipelineId); - mockAxios.onGet(endpoint).replyOnce(500); + mockAxios.onGet(endpoint).replyOnce(HTTP_STATUS_INTERNAL_SERVER_ERROR); createComponent(); findDropdown().vm.$emit('show'); await waitForPromises(); diff --git a/spec/frontend/pipelines/pipelines_spec.js b/spec/frontend/pipelines/pipelines_spec.js index 351572fc83a..17ff0969b30 100644 --- a/spec/frontend/pipelines/pipelines_spec.js +++ b/spec/frontend/pipelines/pipelines_spec.js @@ -13,6 +13,7 @@ import waitForPromises from 'helpers/wait_for_promises'; import Api from '~/api'; import { createAlert, VARIANT_WARNING } from '~/flash'; import axios from '~/lib/utils/axios_utils'; +import { HTTP_STATUS_INTERNAL_SERVER_ERROR } from '~/lib/utils/http_status'; import NavigationControls from '~/pipelines/components/pipelines_list/nav_controls.vue'; import PipelinesComponent from '~/pipelines/components/pipelines_list/pipelines.vue'; import PipelinesCiTemplates from '~/pipelines/components/pipelines_list/empty_state/pipelines_ci_templates.vue'; @@ -702,7 +703,7 @@ describe('Pipelines', () => { describe('when pipelines cannot be loaded', () => { beforeEach(async () => { - mock.onGet(mockPipelinesEndpoint).reply(500, {}); + mock.onGet(mockPipelinesEndpoint).reply(HTTP_STATUS_INTERNAL_SERVER_ERROR, {}); }); describe('when user has no permissions', () => { diff --git a/spec/frontend/projects/commit_box/info/load_branches_spec.js b/spec/frontend/projects/commit_box/info/load_branches_spec.js index 9456e6ef5f5..50e305dea7f 100644 --- a/spec/frontend/projects/commit_box/info/load_branches_spec.js +++ b/spec/frontend/projects/commit_box/info/load_branches_spec.js @@ -2,6 +2,7 @@ import axios from 'axios'; import MockAdapter from 'axios-mock-adapter'; import { setHTMLFixture } from 'helpers/fixtures'; import waitForPromises from 'helpers/wait_for_promises'; +import { HTTP_STATUS_INTERNAL_SERVER_ERROR } from '~/lib/utils/http_status'; import { loadBranches } from '~/projects/commit_box/info/load_branches'; const mockCommitPath = '/commit/abcd/branches'; @@ -60,7 +61,7 @@ describe('~/projects/commit_box/info/load_branches', () => { describe('when branches request fails', () => { beforeEach(() => { - mock.onGet(mockCommitPath).reply(500, 'Error!'); + mock.onGet(mockCommitPath).reply(HTTP_STATUS_INTERNAL_SERVER_ERROR, 'Error!'); }); it('attempts to load and renders an error', async () => { diff --git a/spec/frontend/projects/commits/store/actions_spec.js b/spec/frontend/projects/commits/store/actions_spec.js index 930b801af71..64eba056c6f 100644 --- a/spec/frontend/projects/commits/store/actions_spec.js +++ b/spec/frontend/projects/commits/store/actions_spec.js @@ -2,6 +2,7 @@ import axios from 'axios'; import MockAdapter from 'axios-mock-adapter'; import testAction from 'helpers/vuex_action_helper'; import { createAlert } from '~/flash'; +import { HTTP_STATUS_INTERNAL_SERVER_ERROR } from '~/lib/utils/http_status'; import actions from '~/projects/commits/store/actions'; import * as types from '~/projects/commits/store/mutation_types'; import createState from '~/projects/commits/store/state'; @@ -63,7 +64,7 @@ describe('Project commits actions', () => { it('dispatches request/receive on error', () => { const path = '/-/autocomplete/users.json'; - mock.onGet(path).replyOnce(500); + mock.onGet(path).replyOnce(HTTP_STATUS_INTERNAL_SERVER_ERROR); testAction(actions.fetchAuthors, null, state, [], [{ type: 'receiveAuthorsError' }]); }); diff --git a/spec/frontend/protected_branches/protected_branch_edit_spec.js b/spec/frontend/protected_branches/protected_branch_edit_spec.js index 0aec4fbc037..8e1a09500c7 100644 --- a/spec/frontend/protected_branches/protected_branch_edit_spec.js +++ b/spec/frontend/protected_branches/protected_branch_edit_spec.js @@ -4,6 +4,7 @@ import { setHTMLFixture, resetHTMLFixture } from 'helpers/fixtures'; import { TEST_HOST } from 'helpers/test_constants'; import { createAlert } from '~/flash'; import axios from '~/lib/utils/axios_utils'; +import { HTTP_STATUS_INTERNAL_SERVER_ERROR } from '~/lib/utils/http_status'; import ProtectedBranchEdit from '~/protected_branches/protected_branch_edit'; jest.mock('~/flash'); @@ -142,7 +143,7 @@ describe('ProtectedBranchEdit', () => { describe('when clicked and BE error', () => { beforeEach(() => { - mock.onPatch(TEST_URL).replyOnce(500); + mock.onPatch(TEST_URL).replyOnce(HTTP_STATUS_INTERNAL_SERVER_ERROR); toggle.click(); }); diff --git a/spec/frontend/ref/components/ref_selector_spec.js b/spec/frontend/ref/components/ref_selector_spec.js index ac588248f15..e12659a0eff 100644 --- a/spec/frontend/ref/components/ref_selector_spec.js +++ b/spec/frontend/ref/components/ref_selector_spec.js @@ -9,7 +9,7 @@ import commit from 'test_fixtures/api/commits/commit.json'; import branches from 'test_fixtures/api/branches/branches.json'; import tags from 'test_fixtures/api/tags/tags.json'; import { trimText } from 'helpers/text_helper'; -import { HTTP_STATUS_NOT_FOUND } from '~/lib/utils/http_status'; +import { HTTP_STATUS_INTERNAL_SERVER_ERROR, HTTP_STATUS_NOT_FOUND } from '~/lib/utils/http_status'; import { ENTER_KEY } from '~/lib/utils/keys'; import { sprintf } from '~/locale'; import RefSelector from '~/ref/components/ref_selector.vue'; @@ -400,7 +400,7 @@ describe('Ref selector component', () => { describe('when the branches search returns an error', () => { beforeEach(() => { - branchesApiCallSpy = jest.fn().mockReturnValue([500]); + branchesApiCallSpy = jest.fn().mockReturnValue([HTTP_STATUS_INTERNAL_SERVER_ERROR]); createComponent(); @@ -465,7 +465,7 @@ describe('Ref selector component', () => { describe('when the tags search returns an error', () => { beforeEach(() => { - tagsApiCallSpy = jest.fn().mockReturnValue([500]); + tagsApiCallSpy = jest.fn().mockReturnValue([HTTP_STATUS_INTERNAL_SERVER_ERROR]); createComponent(); @@ -531,7 +531,7 @@ describe('Ref selector component', () => { describe('when the commit search returns an error (other than a 404)', () => { beforeEach(() => { - commitApiCallSpy = jest.fn().mockReturnValue([500]); + commitApiCallSpy = jest.fn().mockReturnValue([HTTP_STATUS_INTERNAL_SERVER_ERROR]); createComponent(); diff --git a/spec/frontend/repository/components/fork_info_spec.js b/spec/frontend/repository/components/fork_info_spec.js index c23d5ae5823..f327a8cfae7 100644 --- a/spec/frontend/repository/components/fork_info_spec.js +++ b/spec/frontend/repository/components/fork_info_spec.js @@ -1,6 +1,6 @@ import Vue from 'vue'; import VueApollo from 'vue-apollo'; -import { GlSkeletonLoader, GlIcon, GlLink } from '@gitlab/ui'; +import { GlSkeletonLoader, GlIcon, GlLink, GlSprintf } from '@gitlab/ui'; import createMockApollo from 'helpers/mock_apollo_helper'; import waitForPromises from 'helpers/wait_for_promises'; import { shallowMountExtended } from 'helpers/vue_test_utils_helper'; @@ -16,13 +16,14 @@ describe('ForkInfo component', () => { let wrapper; let mockResolver; const forkInfoError = new Error('Something went wrong'); + const projectId = 'gid://gitlab/Project/1'; Vue.use(VueApollo); const createCommitData = ({ ahead = 3, behind = 7 }) => { return { data: { - project: { id: '1', forkDetails: { ahead, behind, __typename: 'ForkDetails' } }, + project: { id: projectId, forkDetails: { ahead, behind, __typename: 'ForkDetails' } }, }, }; }; @@ -35,6 +36,7 @@ describe('ForkInfo component', () => { wrapper = shallowMountExtended(ForkInfo, { apolloProvider: createMockApollo([[forkDetailsQuery, mockResolver]]), propsData: { ...propsForkInfo, ...props }, + stubs: { GlSprintf }, }); return waitForPromises(); }; @@ -42,8 +44,10 @@ describe('ForkInfo component', () => { const findLink = () => wrapper.findComponent(GlLink); const findSkeleton = () => wrapper.findComponent(GlSkeletonLoader); const findIcon = () => wrapper.findComponent(GlIcon); - const findDivergenceMessage = () => wrapper.find('.gl-text-secondary'); + const findDivergenceMessage = () => wrapper.findByTestId('divergence-message'); const findInaccessibleMessage = () => wrapper.findByTestId('inaccessible-project'); + const findCompareLinks = () => findDivergenceMessage().findAllComponents(GlLink); + it('displays a skeleton while loading data', async () => { createComponent(); expect(findSkeleton().exists()).toBe(true); @@ -88,28 +92,54 @@ describe('ForkInfo component', () => { expect(findDivergenceMessage().text()).toBe(i18n.unknown); }); - it('shows correct divergence message when data is present', async () => { - await createComponent(); - expect(findDivergenceMessage().text()).toMatchInterpolatedText( - '7 commits behind, 3 commits ahead of the upstream repository.', - ); - }); - it('renders up to date message when divergence is unknown', async () => { await createComponent({}, { ahead: 0, behind: 0 }); expect(findDivergenceMessage().text()).toBe(i18n.upToDate); }); - it('renders commits ahead message', async () => { - await createComponent({}, { behind: 0 }); - expect(findDivergenceMessage().text()).toBe('3 commits ahead of the upstream repository.'); - }); - - it('renders commits behind message', async () => { - await createComponent({}, { ahead: 0 }); - - expect(findDivergenceMessage().text()).toBe('7 commits behind the upstream repository.'); - }); + describe.each([ + { + ahead: 7, + behind: 3, + message: '3 commits behind, 7 commits ahead of the upstream repository.', + firstLink: propsForkInfo.behindComparePath, + secondLink: propsForkInfo.aheadComparePath, + }, + { + ahead: 7, + behind: 0, + message: '7 commits ahead of the upstream repository.', + firstLink: propsForkInfo.aheadComparePath, + secondLink: '', + }, + { + ahead: 0, + behind: 3, + message: '3 commits behind the upstream repository.', + firstLink: propsForkInfo.behindComparePath, + secondLink: '', + }, + ])( + 'renders correct divergence message for ahead: $ahead, behind: $behind divergence commits', + ({ ahead, behind, message, firstLink, secondLink }) => { + beforeEach(async () => { + await createComponent({}, { ahead, behind }); + }); + + it('displays correct text', () => { + expect(findDivergenceMessage().text()).toBe(message); + }); + + it('adds correct links', () => { + const links = findCompareLinks(); + expect(links.at(0).attributes('href')).toBe(firstLink); + + if (secondLink) { + expect(links.at(1).attributes('href')).toBe(secondLink); + } + }); + }, + ); it('renders alert with error message when request fails', async () => { await createComponent({}, {}, true); diff --git a/spec/frontend/repository/mock_data.js b/spec/frontend/repository/mock_data.js index d85434a9148..04ffe52bc3f 100644 --- a/spec/frontend/repository/mock_data.js +++ b/spec/frontend/repository/mock_data.js @@ -120,7 +120,9 @@ export const graphQLErrors = [ export const propsForkInfo = { projectPath: 'nataliia/myGitLab', - selectedRef: 'main', + selectedBranch: 'main', sourceName: 'gitLab', sourcePath: 'gitlab-org/gitlab', + aheadComparePath: '/nataliia/myGitLab/-/compare/main...ref?from_project_id=1', + behindComparePath: 'gitlab-org/gitlab/-/compare/ref...main?from_project_id=2', }; diff --git a/spec/frontend/sidebar/components/labels/labels_select_vue/store/actions_spec.js b/spec/frontend/sidebar/components/labels/labels_select_vue/store/actions_spec.js index 0e0024aa6c2..56d516bf589 100644 --- a/spec/frontend/sidebar/components/labels/labels_select_vue/store/actions_spec.js +++ b/spec/frontend/sidebar/components/labels/labels_select_vue/store/actions_spec.js @@ -3,6 +3,7 @@ import MockAdapter from 'axios-mock-adapter'; import testAction from 'helpers/vuex_action_helper'; import { createAlert } from '~/flash'; import axios from '~/lib/utils/axios_utils'; +import { HTTP_STATUS_INTERNAL_SERVER_ERROR } from '~/lib/utils/http_status'; import * as actions from '~/sidebar/components/labels/labels_select_vue/store/actions'; import * as types from '~/sidebar/components/labels/labels_select_vue/store/mutation_types'; import defaultState from '~/sidebar/components/labels/labels_select_vue/store/state'; @@ -135,7 +136,7 @@ describe('LabelsSelect Actions', () => { describe('on failure', () => { it('dispatches `requestLabels` & `receiveLabelsFailure` actions', () => { - mock.onGet(/labels.json/).replyOnce(500, {}); + mock.onGet(/labels.json/).replyOnce(HTTP_STATUS_INTERNAL_SERVER_ERROR, {}); return testAction( actions.fetchLabels, @@ -224,7 +225,7 @@ describe('LabelsSelect Actions', () => { describe('on failure', () => { it('dispatches `requestCreateLabel` & `receiveCreateLabelFailure` actions', () => { - mock.onPost(/labels.json/).replyOnce(500, {}); + mock.onPost(/labels.json/).replyOnce(HTTP_STATUS_INTERNAL_SERVER_ERROR, {}); return testAction( actions.createLabel, diff --git a/spec/frontend/snippets/components/snippet_blob_edit_spec.js b/spec/frontend/snippets/components/snippet_blob_edit_spec.js index 33b8e2be969..ad5fca760f4 100644 --- a/spec/frontend/snippets/components/snippet_blob_edit_spec.js +++ b/spec/frontend/snippets/components/snippet_blob_edit_spec.js @@ -6,6 +6,7 @@ import waitForPromises from 'helpers/wait_for_promises'; import BlobHeaderEdit from '~/blob/components/blob_edit_header.vue'; import { createAlert } from '~/flash'; import axios from '~/lib/utils/axios_utils'; +import { HTTP_STATUS_INTERNAL_SERVER_ERROR } from '~/lib/utils/http_status'; import { joinPaths } from '~/lib/utils/url_utility'; import SnippetBlobEdit from '~/snippets/components/snippet_blob_edit.vue'; import SourceEditor from '~/vue_shared/components/source_editor.vue'; @@ -118,7 +119,7 @@ describe('Snippet Blob Edit component', () => { describe('with error', () => { beforeEach(() => { axiosMock.reset(); - axiosMock.onGet(TEST_FULL_PATH).replyOnce(500); + axiosMock.onGet(TEST_FULL_PATH).replyOnce(HTTP_STATUS_INTERNAL_SERVER_ERROR); createComponent(); }); diff --git a/spec/frontend/vue_merge_request_widget/components/artifacts_list_app_spec.js b/spec/frontend/vue_merge_request_widget/components/artifacts_list_app_spec.js index 73fa4b7b08f..f993b82026d 100644 --- a/spec/frontend/vue_merge_request_widget/components/artifacts_list_app_spec.js +++ b/spec/frontend/vue_merge_request_widget/components/artifacts_list_app_spec.js @@ -5,6 +5,7 @@ import Vue, { nextTick } from 'vue'; import Vuex from 'vuex'; import { TEST_HOST as FAKE_ENDPOINT } from 'helpers/test_constants'; import axios from '~/lib/utils/axios_utils'; +import { HTTP_STATUS_INTERNAL_SERVER_ERROR } from '~/lib/utils/http_status'; import ArtifactsListApp from '~/vue_merge_request_widget/components/artifacts_list_app.vue'; import { getStoreConfig } from '~/vue_merge_request_widget/stores/artifacts_list'; import { artifacts } from '../mock_data'; @@ -109,7 +110,7 @@ describe('Merge Requests Artifacts list app', () => { describe('with error', () => { beforeEach(() => { createComponent(); - mock.onGet(FAKE_ENDPOINT).reply(500, {}, {}); + mock.onGet(FAKE_ENDPOINT).reply(HTTP_STATUS_INTERNAL_SERVER_ERROR, {}, {}); store.dispatch('receiveArtifactsError'); return nextTick(); }); diff --git a/spec/graphql/types/commit_signatures/verification_status_enum_spec.rb b/spec/graphql/types/commit_signatures/verification_status_enum_spec.rb index cb7ce19c9fc..a0d99f5f0c1 100644 --- a/spec/graphql/types/commit_signatures/verification_status_enum_spec.rb +++ b/spec/graphql/types/commit_signatures/verification_status_enum_spec.rb @@ -10,7 +10,7 @@ RSpec.describe GitlabSchema.types['VerificationStatus'] do .to match_array(%w[ UNVERIFIED UNVERIFIED_KEY VERIFIED SAME_USER_DIFFERENT_EMAIL OTHER_USER UNKNOWN_KEY - MULTIPLE_SIGNATURES + MULTIPLE_SIGNATURES REVOKED_KEY ]) end end diff --git a/spec/lib/gitlab/search/found_blob_spec.rb b/spec/lib/gitlab/search/found_blob_spec.rb index c41a051bc42..8efbe053155 100644 --- a/spec/lib/gitlab/search/found_blob_spec.rb +++ b/spec/lib/gitlab/search/found_blob_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Gitlab::Search::FoundBlob do +RSpec.describe Gitlab::Search::FoundBlob, feature_category: :global_search do let(:project) { create(:project, :public, :repository) } describe 'parsing content results' do @@ -17,6 +17,7 @@ RSpec.describe Gitlab::Search::FoundBlob do expect(subject.path).to eq('CHANGELOG') expect(subject.basename).to eq('CHANGELOG') expect(subject.ref).to eq('master') + expect(subject.matched_lines_count).to be_nil expect(subject.startline).to eq(188) expect(subject.data.lines[2]).to eq(" - Feature: Replace teams with group membership\n") end diff --git a/spec/lib/gitlab/usage_data_counters/issue_activity_unique_counter_spec.rb b/spec/lib/gitlab/usage_data_counters/issue_activity_unique_counter_spec.rb index 032a5e78385..33e0d446fca 100644 --- a/spec/lib/gitlab/usage_data_counters/issue_activity_unique_counter_spec.rb +++ b/spec/lib/gitlab/usage_data_counters/issue_activity_unique_counter_spec.rb @@ -284,6 +284,16 @@ RSpec.describe Gitlab::UsageDataCounters::IssueActivityUniqueCounter, :clean_git end end + context 'for Issue design comment removed actions' do + it_behaves_like 'daily tracked issuable snowplow and service ping events with project' do + let(:action) { described_class::ISSUE_DESIGN_COMMENT_REMOVED } + + def track_action(params) + described_class.track_issue_design_comment_removed_action(**params) + end + end + end + it 'can return the count of actions per user deduplicated', :aggregate_failures do described_class.track_issue_title_changed_action(author: user1, project: project) described_class.track_issue_description_changed_action(author: user1, project: project) diff --git a/spec/models/ci/runner_machine_spec.rb b/spec/models/ci/runner_machine_spec.rb index 9fc35006233..c658058c131 100644 --- a/spec/models/ci/runner_machine_spec.rb +++ b/spec/models/ci/runner_machine_spec.rb @@ -38,10 +38,11 @@ RSpec.describe Ci::RunnerMachine, feature_category: :runner_fleet, type: :model describe '.stale', :freeze_time do subject { described_class.stale.ids } - let!(:runner_machine1) { create(:ci_runner_machine, created_at: 8.days.ago, contacted_at: 7.days.ago) } - let!(:runner_machine2) { create(:ci_runner_machine, created_at: 7.days.ago, contacted_at: nil) } - let!(:runner_machine3) { create(:ci_runner_machine, created_at: 5.days.ago, contacted_at: nil) } - let!(:runner_machine4) do + let!(:runner_machine1) { create(:ci_runner_machine, :stale) } + let!(:runner_machine2) { create(:ci_runner_machine, :stale, contacted_at: nil) } + let!(:runner_machine3) { create(:ci_runner_machine, created_at: 6.months.ago, contacted_at: Time.current) } + let!(:runner_machine4) { create(:ci_runner_machine, created_at: 5.days.ago) } + let!(:runner_machine5) do create(:ci_runner_machine, created_at: (7.days - 1.second).ago, contacted_at: (7.days - 1.second).ago) end diff --git a/spec/models/ci/runner_spec.rb b/spec/models/ci/runner_spec.rb index 0fde9e5f187..7b79b4c8a6d 100644 --- a/spec/models/ci/runner_spec.rb +++ b/spec/models/ci/runner_spec.rb @@ -261,14 +261,14 @@ RSpec.describe Ci::Runner, feature_category: :runner do describe '.belonging_to_project' do it 'returns the project runner' do # own - specific_project = create(:project) - specific_runner = create(:ci_runner, :project, projects: [specific_project]) + own_project = create(:project) + own_runner = create(:ci_runner, :project, projects: [own_project]) # other other_project = create(:project) create(:ci_runner, :project, projects: [other_project]) - expect(described_class.belonging_to_project(specific_project.id)).to eq [specific_runner] + expect(described_class.belonging_to_project(own_project.id)).to eq [own_runner] end end diff --git a/spec/models/key_spec.rb b/spec/models/key_spec.rb index 92f4d6d8531..f1bc7b41cee 100644 --- a/spec/models/key_spec.rb +++ b/spec/models/key_spec.rb @@ -489,4 +489,12 @@ RSpec.describe Key, :mailer do end end end + + describe '#signing?' do + it 'returns whether a key can be used for signing' do + expect(build(:key, usage_type: :signing)).to be_signing + expect(build(:key, usage_type: :auth_and_signing)).to be_signing + expect(build(:key, usage_type: :auth)).not_to be_signing + end + end end diff --git a/spec/requests/api/graphql/ci/config_variables_spec.rb b/spec/requests/api/graphql/ci/config_variables_spec.rb index e6d73701b8f..f76bb8ff837 100644 --- a/spec/requests/api/graphql/ci/config_variables_spec.rb +++ b/spec/requests/api/graphql/ci/config_variables_spec.rb @@ -14,13 +14,13 @@ RSpec.describe 'Query.project(fullPath).ciConfigVariables(sha)', feature_categor let_it_be(:user) { create(:user) } let(:service) { Ci::ListConfigVariablesService.new(project, user) } - let(:sha) { project.repository.commit.sha } + let(:ref) { project.default_branch } let(:query) do %( query { project(fullPath: "#{project.full_path}") { - ciConfigVariables(sha: "#{sha}") { + ciConfigVariables(sha: "#{ref}") { key value valueOptions @@ -47,7 +47,7 @@ RSpec.describe 'Query.project(fullPath).ciConfigVariables(sha)', feature_categor it 'returns the CI variables for the config' do expect(service) .to receive(:execute) - .with(sha) + .with(ref) .and_call_original post_graphql(query, current_user: user) diff --git a/spec/requests/profiles/keys_controller_spec.rb b/spec/requests/profiles/keys_controller_spec.rb new file mode 100644 index 00000000000..48c382e6230 --- /dev/null +++ b/spec/requests/profiles/keys_controller_spec.rb @@ -0,0 +1,31 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Profiles::KeysController, feature_category: :source_code_management do + let_it_be(:user) { create(:user) } + + before do + login_as(user) + end + + describe 'DELETE /-/profile/keys/:id/revoke' do + it 'returns 404 if a key not found' do + delete revoke_profile_key_path(non_existing_record_id) + + expect(response).to have_gitlab_http_status(:not_found) + end + + it 'revokes ssh commit signatures' do + key = create(:key, user: user) + signature = create(:ssh_signature, key: key) + + expect do + delete revoke_profile_key_path(signature.key) + end.to change { signature.reload.key }.from(signature.key).to(nil) + .and change { signature.verification_status }.from('verified').to('revoked_key') + + expect(response).to have_gitlab_http_status(:found) + end + end +end diff --git a/spec/rubocop/migration_helpers_spec.rb b/spec/rubocop/migration_helpers_spec.rb index 6e6c3a7a0b9..56b7b6aa76b 100644 --- a/spec/rubocop/migration_helpers_spec.rb +++ b/spec/rubocop/migration_helpers_spec.rb @@ -2,6 +2,7 @@ require 'fast_spec_helper' require 'rspec-parameterized' +require 'rubocop/ast' require_relative '../../rubocop/migration_helpers' @@ -69,4 +70,27 @@ RSpec.describe RuboCop::MigrationHelpers do it { expect(fake_cop.time_enforced?(node)).to eq(expected) } end end + + describe '#array_column?' do + let(:name) { nil } + let(:node) { double(:node, each_descendant: [pair_node]) } + let(:pair_node) { double(child_nodes: child_nodes) } + + context 'when it matches array: true' do + let(:child_nodes) do + [ + RuboCop::AST::SymbolNode.new(:sym, [:array]), + RuboCop::AST::Node.new(:true) # rubocop:disable Lint/BooleanSymbol + ] + end + + it { expect(fake_cop.array_column?(node)).to eq(true) } + end + + context 'when it matches a variable => 100' do + let(:child_nodes) { [RuboCop::AST::Node.new(:lvar, [:variable]), RuboCop::AST::IntNode.new(:int, [100])] } + + it { expect(fake_cop.array_column?(node)).to eq(false) } + end + end end diff --git a/spec/services/ci/list_config_variables_service_spec.rb b/spec/services/ci/list_config_variables_service_spec.rb index 5b865914d1b..e2bbdefef7f 100644 --- a/spec/services/ci/list_config_variables_service_spec.rb +++ b/spec/services/ci/list_config_variables_service_spec.rb @@ -2,19 +2,21 @@ require 'spec_helper' -RSpec.describe Ci::ListConfigVariablesService, :use_clean_rails_memory_store_caching do +RSpec.describe Ci::ListConfigVariablesService, +:use_clean_rails_memory_store_caching, feature_category: :pipeline_authoring do include ReactiveCachingHelpers let(:ci_config) { {} } let(:files) { { '.gitlab-ci.yml' => YAML.dump(ci_config) } } let(:project) { create(:project, :custom_repo, :auto_devops_disabled, files: files) } let(:user) { project.creator } - let(:sha) { project.default_branch } + let(:ref) { project.default_branch } + let(:sha) { project.commit(ref).sha } let(:service) { described_class.new(project, user) } - subject(:result) { service.execute(sha) } + subject(:result) { service.execute(ref) } - context 'when sending a valid sha' do + context 'when sending a valid ref' do let(:ci_config) do { variables: { @@ -109,8 +111,8 @@ RSpec.describe Ci::ListConfigVariablesService, :use_clean_rails_memory_store_cac end end - context 'when sending an invalid sha' do - let(:sha) { 'invalid-sha' } + context 'when sending an invalid ref' do + let(:ref) { 'invalid-ref' } let(:ci_config) { nil } before do diff --git a/spec/services/ci/runners/stale_machines_cleanup_service_spec.rb b/spec/services/ci/runners/stale_machines_cleanup_service_spec.rb new file mode 100644 index 00000000000..456dbcebb84 --- /dev/null +++ b/spec/services/ci/runners/stale_machines_cleanup_service_spec.rb @@ -0,0 +1,45 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Ci::Runners::StaleMachinesCleanupService, feature_category: :runner_fleet do + let(:service) { described_class.new } + let!(:runner_machine3) { create(:ci_runner_machine, created_at: 6.months.ago, contacted_at: Time.current) } + + subject(:response) { service.execute } + + context 'with no stale runner machines' do + it 'does not clean any runner machines and returns :success status' do + expect do + expect(response).to be_success + expect(response.payload).to match({ deleted_machines: false }) + end.not_to change { Ci::RunnerMachine.count }.from(1) + end + end + + context 'with some stale runner machines' do + before do + create(:ci_runner_machine, :stale) + create(:ci_runner_machine, :stale, contacted_at: nil) + end + + it 'only leaves non-stale runners' do + expect(response).to be_success + expect(response.payload).to match({ deleted_machines: true }) + expect(Ci::RunnerMachine.all).to contain_exactly(runner_machine3) + end + + context 'with more stale runners than MAX_DELETIONS' do + before do + stub_const("#{described_class}::MAX_DELETIONS", 1) + end + + it 'only leaves non-stale runners' do + expect do + expect(response).to be_success + expect(response.payload).to match({ deleted_machines: true }) + end.to change { Ci::RunnerMachine.count }.by(-Ci::Runners::StaleMachinesCleanupService::MAX_DELETIONS) + end + end + end +end diff --git a/spec/services/keys/revoke_service_spec.rb b/spec/services/keys/revoke_service_spec.rb new file mode 100644 index 00000000000..ec07701b4b7 --- /dev/null +++ b/spec/services/keys/revoke_service_spec.rb @@ -0,0 +1,48 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Keys::RevokeService, feature_category: :source_code_management do + let(:user) { create(:user) } + + subject(:service) { described_class.new(user) } + + it 'destroys a key' do + key = create(:key) + + expect { service.execute(key) }.to change { key.persisted? }.from(true).to(false) + end + + it 'unverifies associated signatures' do + key = create(:key) + signature = create(:ssh_signature, key: key) + + expect do + service.execute(key) + end.to change { signature.reload.key }.from(key).to(nil) + .and change { signature.reload.verification_status }.from('verified').to('revoked_key') + end + + it 'does not unverifies signatures if destroy fails' do + key = create(:key) + signature = create(:ssh_signature, key: key) + + expect(key).to receive(:destroy).and_return(false) + + expect { service.execute(key) }.not_to change { signature.reload.verification_status } + expect(key).to be_persisted + end + + context 'when revoke_ssh_signatures disabled' do + before do + stub_feature_flags(revoke_ssh_signatures: false) + end + + it 'does not unverifies signatures' do + key = create(:key) + signature = create(:ssh_signature, key: key) + + expect { service.execute(key) }.not_to change { signature.reload.verification_status } + end + end +end diff --git a/spec/services/notes/destroy_service_spec.rb b/spec/services/notes/destroy_service_spec.rb index 82caec52aee..744808525f5 100644 --- a/spec/services/notes/destroy_service_spec.rb +++ b/spec/services/notes/destroy_service_spec.rb @@ -91,5 +91,13 @@ RSpec.describe Notes::DestroyService do end end end + + it 'tracks design comment removal' do + note = create(:note_on_design, project: project) + expect(Gitlab::UsageDataCounters::IssueActivityUniqueCounter).to receive(:track_issue_design_comment_removed_action).with(author: note.author, + project: project) + + described_class.new(project, user).execute(note) + end end end diff --git a/spec/support/rspec_order_todo.yml b/spec/support/rspec_order_todo.yml index 6cba9332347..e50b08d8fbe 100644 --- a/spec/support/rspec_order_todo.yml +++ b/spec/support/rspec_order_todo.yml @@ -1674,7 +1674,6 @@ - './ee/spec/models/broadcast_message_spec.rb' - './ee/spec/models/burndown_spec.rb' - './ee/spec/models/ci/bridge_spec.rb' -- './ee/spec/models/ci/build_spec.rb' - './ee/spec/models/ci/daily_build_group_report_result_spec.rb' - './ee/spec/models/ci/minutes/additional_pack_spec.rb' - './ee/spec/models/ci/minutes/context_spec.rb' diff --git a/spec/views/profiles/keys/_key.html.haml_spec.rb b/spec/views/profiles/keys/_key.html.haml_spec.rb index d2e27bd2ee0..09053a29fe0 100644 --- a/spec/views/profiles/keys/_key.html.haml_spec.rb +++ b/spec/views/profiles/keys/_key.html.haml_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe 'profiles/keys/_key.html.haml' do +RSpec.describe 'profiles/keys/_key.html.haml', feature_category: :authentication_and_authorization do let_it_be(:user) { create(:user) } before do @@ -27,15 +27,18 @@ RSpec.describe 'profiles/keys/_key.html.haml' do expect(rendered).to have_text(l(key.last_used_at, format: "%b %d, %Y")) expect(rendered).to have_text(l(key.created_at, format: "%b %d, %Y")) expect(rendered).to have_text(key.expires_at.to_date) - expect(response).to render_template(partial: 'shared/ssh_keys/_key_delete') + expect(rendered).to have_button('Remove') end context 'displays the usage type' do - where(:usage_type, :usage_type_text) do + where(:usage_type, :usage_type_text, :displayed_buttons, :hidden_buttons, :revoke_ssh_signatures_ff) do [ - [:auth, 'Authentication'], - [:auth_and_signing, 'Authentication & Signing'], - [:signing, 'Signing'] + [:auth, 'Authentication', ['Remove'], ['Revoke'], true], + [:auth_and_signing, 'Authentication & Signing', %w[Remove Revoke], [], true], + [:signing, 'Signing', %w[Remove Revoke], [], true], + [:auth, 'Authentication', ['Remove'], ['Revoke'], false], + [:auth_and_signing, 'Authentication & Signing', %w[Remove], ['Revoke'], false], + [:signing, 'Signing', %w[Remove], ['Revoke'], false] ] end @@ -47,6 +50,20 @@ RSpec.describe 'profiles/keys/_key.html.haml' do expect(rendered).to have_text(usage_type_text) end + + it 'renders remove/revoke buttons', :aggregate_failures do + stub_feature_flags(revoke_ssh_signatures: revoke_ssh_signatures_ff) + + render + + displayed_buttons.each do |button| + expect(rendered).to have_text(button) + end + + hidden_buttons.each do |button| + expect(rendered).not_to have_text(button) + end + end end end @@ -98,7 +115,8 @@ RSpec.describe 'profiles/keys/_key.html.haml' do it 'does not render the partial' do render - expect(response).not_to render_template(partial: 'shared/ssh_keys/_key_delete') + expect(response).not_to have_text('Remove') + expect(response).not_to have_text('Revoke') end end diff --git a/spec/views/shared/ssh_keys/_key_delete.html.haml_spec.rb b/spec/views/shared/ssh_keys/_key_delete.html.haml_spec.rb index c9bdcabb4b6..5cef3a949d3 100644 --- a/spec/views/shared/ssh_keys/_key_delete.html.haml_spec.rb +++ b/spec/views/shared/ssh_keys/_key_delete.html.haml_spec.rb @@ -2,21 +2,9 @@ require 'spec_helper' RSpec.describe 'shared/ssh_keys/_key_delete.html.haml' do - context 'when the icon parameter is used' do - it 'has text' do - render partial: 'shared/ssh_keys/key_delete', formats: :html, locals: { icon: true, button_data: '' } + it 'has text' do + render partial: 'shared/ssh_keys/key_delete', formats: :html, locals: { button_data: '' } - expect(rendered).not_to have_button('Delete') - expect(rendered).to have_selector('[data-testid=remove-icon]') - end - end - - context 'when the icon parameter is not used' do - it 'does not have text' do - render partial: 'shared/ssh_keys/key_delete', formats: :html, locals: { button_data: '' } - - expect(rendered).to have_button('Delete') - expect(rendered).not_to have_selector('[data-testid=remove-icon]') - end + expect(rendered).to have_button('Delete') end end diff --git a/spec/workers/ci/runners/stale_machines_cleanup_cron_worker_spec.rb b/spec/workers/ci/runners/stale_machines_cleanup_cron_worker_spec.rb new file mode 100644 index 00000000000..d8f620bc024 --- /dev/null +++ b/spec/workers/ci/runners/stale_machines_cleanup_cron_worker_spec.rb @@ -0,0 +1,42 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Ci::Runners::StaleMachinesCleanupCronWorker, feature_category: :runner_fleet do + let(:worker) { described_class.new } + + describe '#perform', :freeze_time do + subject(:perform) { worker.perform } + + let!(:runner_machine1) do + create(:ci_runner_machine, created_at: 7.days.ago, contacted_at: 7.days.ago) + end + + let!(:runner_machine2) { create(:ci_runner_machine) } + let!(:runner_machine3) { create(:ci_runner_machine, created_at: 6.days.ago) } + + it_behaves_like 'an idempotent worker' do + it 'delegates to Ci::Runners::StaleMachinesCleanupService' do + expect_next_instance_of(Ci::Runners::StaleMachinesCleanupService) do |service| + expect(service) + .to receive(:execute).and_call_original + end + + perform + + expect(worker.logging_extras).to eq({ + "extra.ci_runners_stale_machines_cleanup_cron_worker.status" => :success, + "extra.ci_runners_stale_machines_cleanup_cron_worker.deleted_machines" => true + }) + end + + it 'cleans up stale runner machines', :aggregate_failures do + expect(Ci::RunnerMachine.stale.count).to eq 1 + + expect { perform }.to change { Ci::RunnerMachine.count }.from(3).to(2) + + expect(Ci::RunnerMachine.all).to match_array [runner_machine2, runner_machine3] + end + end + end +end diff --git a/spec/workers/concerns/application_worker_spec.rb b/spec/workers/concerns/application_worker_spec.rb index 5fde54b98f0..0abb029f146 100644 --- a/spec/workers/concerns/application_worker_spec.rb +++ b/spec/workers/concerns/application_worker_spec.rb @@ -103,6 +103,15 @@ RSpec.describe ApplicationWorker do expect(instance.logging_extras).to eq({ 'extra.gitlab_foo_bar_dummy_worker.key1' => "value1", 'extra.gitlab_foo_bar_dummy_worker.key2' => "value2" }) end + it 'returns extra data to be logged that was set from #log_hash_metadata_on_done' do + instance.log_hash_metadata_on_done({ key1: 'value0', key2: 'value1' }) + + expect(instance.logging_extras).to match_array({ + 'extra.gitlab_foo_bar_dummy_worker.key1' => 'value0', + 'extra.gitlab_foo_bar_dummy_worker.key2' => 'value1' + }) + end + context 'when nothing is set' do it 'returns {}' do expect(instance.logging_extras).to eq({}) |