diff options
author | GitLab Bot <gitlab-bot@gitlab.com> | 2022-01-25 18:11:55 +0000 |
---|---|---|
committer | GitLab Bot <gitlab-bot@gitlab.com> | 2022-01-25 18:11:55 +0000 |
commit | 2b2d833ab3e78f8c9f626af950a16d43fc38c9f8 (patch) | |
tree | 13c101679f3cda5808affea46709207a24f4a3c9 | |
parent | 7d8d5a3dab415672a41ab29c3bfa9581f275dc50 (diff) | |
download | gitlab-ce-2b2d833ab3e78f8c9f626af950a16d43fc38c9f8.tar.gz |
Add latest changes from gitlab-org/gitlab@master
95 files changed, 2048 insertions, 481 deletions
diff --git a/.gitignore b/.gitignore index 03f77ed89e5..bdd3ac98876 100644 --- a/.gitignore +++ b/.gitignore @@ -79,6 +79,7 @@ eslint-report.html /deprecations/ /knapsack/ /rspec_flaky/ +/rspec/ /locale/**/LC_MESSAGES /locale/**/*.time_stamp /.rspec diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 314f99c5f41..ababac4571f 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -69,10 +69,14 @@ variables: GET_SOURCES_ATTEMPTS: "3" KNAPSACK_RSPEC_SUITE_REPORT_PATH: knapsack/report-master.json - FLAKY_RSPEC_SUITE_REPORT_PATH: rspec_flaky/report-suite.json + FLAKY_RSPEC_SUITE_REPORT_PATH: rspec/flaky/report-suite.json RSPEC_TESTS_MAPPING_PATH: crystalball/mapping.json RSPEC_PACKED_TESTS_MAPPING_PATH: crystalball/packed-mapping.json + RSPEC_PROFILING_FOLDER_PATH: rspec/profiling FRONTEND_FIXTURES_MAPPING_PATH: crystalball/frontend_fixtures_mapping.json + RSPEC_LAST_RUN_RESULTS_FILE: rspec/rspec_last_run_results.txt + JUNIT_RESULT_FILE: rspec/junit_rspec.xml + JUNIT_RETRY_FILE: rspec/junit_rspec-retry.xml ES_JAVA_OPTS: "-Xms256m -Xmx256m" ELASTIC_URL: "http://elastic:changeme@elasticsearch:9200" diff --git a/.gitlab/ci/memory.gitlab-ci.yml b/.gitlab/ci/memory.gitlab-ci.yml index 9234b116ff8..520f77cd8ca 100644 --- a/.gitlab/ci/memory.gitlab-ci.yml +++ b/.gitlab/ci/memory.gitlab-ci.yml @@ -11,7 +11,9 @@ metrics: "${METRICS_FILE}" expire_in: 31d -memory-static: +# Disabled since it causes intermittent SIGSEGV in CI: +# https://gitlab.com/gitlab-org/gitlab/-/issues/350296 +.memory-static: extends: .only-code-memory-job-base stage: test needs: ["setup-test-env"] diff --git a/.gitlab/ci/rails.gitlab-ci.yml b/.gitlab/ci/rails.gitlab-ci.yml index 4b9c91e4e94..2647e38a793 100644 --- a/.gitlab/ci/rails.gitlab-ci.yml +++ b/.gitlab/ci/rails.gitlab-ci.yml @@ -29,7 +29,9 @@ GITLAB_USE_MODEL_LOAD_BALANCING: "true" .rspec-base: - extends: .rails-job-base + extends: + - .rails-job-base + - .base-artifacts stage: test variables: RUBY_GC_MALLOC_LIMIT: 67108864 @@ -40,6 +42,8 @@ script: - !reference [.base-script, script] - rspec_paralellized_job "--tag ~quarantine --tag ~geo --tag ~level:migration" + +.base-artifacts: artifacts: expire_in: 31d when: always @@ -48,16 +52,17 @@ - crystalball/ - deprecations/ - knapsack/ - - rspec_flaky/ - - rspec_profiling/ + - rspec/ - tmp/capybara/ - tmp/memory_test/ - log/*.log reports: - junit: junit_rspec.xml + junit: ${JUNIT_RESULT_FILE} .rspec-base-migration: - extends: .rails:rules:ee-and-foss-migration + extends: + - .base-artifacts + - .rails:rules:ee-and-foss-migration script: - !reference [.base-script, script] - rspec_paralellized_job "--tag ~quarantine --tag ~geo --tag level:migration" @@ -550,7 +555,6 @@ rspec:coverage: - rspec-ee integration pg12 geo minimal - rspec-ee system pg12 geo minimal # Memory jobs - - memory-static - memory-on-boot # As-if-FOSS jobs - rspec migration pg12-as-if-foss @@ -617,54 +621,26 @@ rspec:feature-flags: run_timed_command "bundle exec scripts/used-feature-flags"; fi -rspec:skipped-flaky-tests-report: +rspec:flaky-tests-report: extends: - .default-retry - - .rails:rules:skipped-flaky-tests-report - image: ruby:2.7-alpine + - .rails:rules:flaky-tests-report stage: post-test # We cannot use needs since it would mean needing 84 jobs (since most are parallelized) # so we use `dependencies` here. - dependencies: - # FOSS/EE jobs - - rspec migration pg12 - - rspec unit pg12 - - rspec integration pg12 - - rspec system pg12 - # FOSS/EE minimal jobs - - rspec migration pg12 minimal - - rspec unit pg12 minimal - - rspec integration pg12 minimal - - rspec system pg12 minimal - # EE jobs - - rspec-ee migration pg12 - - rspec-ee unit pg12 - - rspec-ee integration pg12 - - rspec-ee system pg12 - # EE minimal jobs - - rspec-ee migration pg12 minimal - - rspec-ee unit pg12 minimal - - rspec-ee integration pg12 minimal - - rspec-ee system pg12 minimal - # Geo jobs - - rspec-ee unit pg12 geo - - rspec-ee integration pg12 geo - - rspec-ee system pg12 geo - # Geo minimal jobs - - rspec-ee unit pg12 geo minimal - - rspec-ee integration pg12 geo minimal - - rspec-ee system pg12 geo minimal + dependencies: !reference ["rspec:coverage", "dependencies"] variables: - SKIPPED_FLAKY_TESTS_REPORT: skipped_flaky_tests_report.txt + SKIPPED_FLAKY_TESTS_REPORT_PATH: rspec/flaky/skipped_flaky_tests_report.txt + RETRIED_TESTS_REPORT_PATH: rspec/flaky/retried_tests_report.txt before_script: - - 'echo "SKIP_FLAKY_TESTS_AUTOMATICALLY: $SKIP_FLAKY_TESTS_AUTOMATICALLY"' - - mkdir -p rspec_flaky + - source scripts/utils.sh + - source scripts/rspec_helpers.sh script: - - find rspec_flaky/ -type f -name 'skipped_flaky_tests_*_report.txt' -exec cat {} + >> "${SKIPPED_FLAKY_TESTS_REPORT}" + - generate_flaky_tests_reports artifacts: expire_in: 31d paths: - - ${SKIPPED_FLAKY_TESTS_REPORT} + - rspec/ # EE/FOSS: default refs (MRs, default branch, schedules) jobs # ####################################################### diff --git a/.gitlab/ci/rules.gitlab-ci.yml b/.gitlab/ci/rules.gitlab-ci.yml index 58f5e716bc5..5e183177a63 100644 --- a/.gitlab/ci/rules.gitlab-ci.yml +++ b/.gitlab/ci/rules.gitlab-ci.yml @@ -121,9 +121,6 @@ .if-security-pipeline-merge-result: &if-security-pipeline-merge-result if: '$CI_PIPELINE_SOURCE == "merge_request_event" && $CI_MERGE_REQUEST_TARGET_BRANCH_NAME == $CI_DEFAULT_BRANCH && $CI_PROJECT_NAMESPACE == "gitlab-org/security" && $GITLAB_USER_LOGIN == "gitlab-release-tools-bot"' -.if-skip-flaky-tests-automatically: &if-skip-flaky-tests-automatically - if: '$SKIP_FLAKY_TESTS_AUTOMATICALLY == "true"' - #################### # Changes patterns # #################### @@ -1436,13 +1433,16 @@ when: never - changes: *code-backstage-patterns -.rails:rules:skipped-flaky-tests-report: +.rails:rules:flaky-tests-report: rules: - <<: *if-not-ee when: never - - <<: *if-skip-flaky-tests-automatically + - if: '$SKIP_FLAKY_TESTS_AUTOMATICALLY == "true" || $RETRY_FAILED_TESTS_IN_NEW_PROCESS == "true"' changes: *code-backstage-patterns - - changes: *ci-patterns + when: always + - if: '$SKIP_FLAKY_TESTS_AUTOMATICALLY == "true" || $RETRY_FAILED_TESTS_IN_NEW_PROCESS == "true"' + changes: *ci-patterns + when: always ######################### # Static analysis rules # diff --git a/.gitlab/ci/test-metadata.gitlab-ci.yml b/.gitlab/ci/test-metadata.gitlab-ci.yml index d0d45cb9294..8f4aca730e7 100644 --- a/.gitlab/ci/test-metadata.gitlab-ci.yml +++ b/.gitlab/ci/test-metadata.gitlab-ci.yml @@ -6,8 +6,7 @@ expire_in: 31d paths: - knapsack/ - - rspec_flaky/ - - rspec_profiling/ + - rspec/ - crystalball/ retrieve-tests-metadata: @@ -44,6 +43,6 @@ update-tests-metadata: script: - run_timed_command "retry gem install fog-aws mime-types activesupport rspec_profiling postgres-copy --no-document" - source ./scripts/rspec_helpers.sh - - test -f rspec_flaky/report-suite.json || echo -e "\e[31m" 'Consider add ~"pipeline:run-all-rspec" to run full rspec jobs' "\e[0m" + - test -f "${FLAKY_RSPEC_SUITE_REPORT_PATH}" || echo -e "\e[31m" 'Consider add ~"pipeline:run-all-rspec" to run full rspec jobs' "\e[0m" - update_tests_metadata - update_tests_mapping diff --git a/GITLAB_SHELL_VERSION b/GITLAB_SHELL_VERSION index 35d41ddb27d..9e9323fc86a 100644 --- a/GITLAB_SHELL_VERSION +++ b/GITLAB_SHELL_VERSION @@ -1 +1 @@ -13.22.2 +13.23.0 @@ -466,16 +466,9 @@ gem 'sys-filesystem', '~> 1.4.3' # NTP client gem 'net-ntp' -# SSH host key support -gem 'net-ssh', '~> 6.0' +# SSH keys support gem 'ssh_data', '~> 1.2' -# Required for ED25519 SSH host key support -group :ed25519 do - gem 'ed25519', '~> 1.2' - gem 'bcrypt_pbkdf', '~> 1.1' -end - # Spamcheck GRPC protocol definitions gem 'spamcheck', '~> 0.1.0' diff --git a/Gemfile.lock b/Gemfile.lock index 1c47f52dff1..2dee6391d82 100644 --- a/Gemfile.lock +++ b/Gemfile.lock @@ -137,7 +137,6 @@ GEM base32 (0.3.2) batch-loader (2.0.1) bcrypt (3.1.16) - bcrypt_pbkdf (1.1.0) benchmark (0.1.1) benchmark-ips (2.3.0) benchmark-memory (0.1.2) @@ -308,7 +307,6 @@ GEM e2mmap (0.1.0) ecma-re-validator (0.3.0) regexp_parser (~> 2.0) - ed25519 (1.2.4) elasticsearch (6.8.2) elasticsearch-api (= 6.8.2) elasticsearch-transport (= 6.8.2) @@ -1408,7 +1406,6 @@ DEPENDENCIES base32 (~> 0.3.0) batch-loader (~> 2.0.1) bcrypt (~> 3.1, >= 3.1.14) - bcrypt_pbkdf (~> 1.1) benchmark-ips (~> 2.3.0) benchmark-memory (~> 0.1) better_errors (~> 2.9.0) @@ -1441,7 +1438,6 @@ DEPENDENCIES discordrb-webhooks (~> 3.4) doorkeeper (~> 5.5.0.rc2) doorkeeper-openid_connect (~> 1.7.5) - ed25519 (~> 1.2) elasticsearch-api (~> 6.8.2) elasticsearch-model (~> 6.1) elasticsearch-rails (~> 6.1) @@ -1542,7 +1538,6 @@ DEPENDENCIES multi_json (~> 1.14.1) net-ldap (~> 0.16.3) net-ntp - net-ssh (~> 6.0) nokogiri (~> 1.12) oauth2 (~> 1.4) octokit (~> 4.15) diff --git a/app/assets/javascripts/batch_comments/components/draft_note.vue b/app/assets/javascripts/batch_comments/components/draft_note.vue index a218624f2d4..ae728d834e2 100644 --- a/app/assets/javascripts/batch_comments/components/draft_note.vue +++ b/app/assets/javascripts/batch_comments/components/draft_note.vue @@ -115,10 +115,15 @@ export default { ></div> <p class="draft-note-actions d-flex"> - <publish-button :show-count="true" :should-publish="false" category="secondary" /> + <publish-button + :show-count="true" + :should-publish="false" + category="secondary" + :disabled="isPublishingDraft(draft.id)" + /> <gl-button - ref="publishNowButton" - :loading="isPublishingDraft(draft.id) || isPublishing" + :disabled="isPublishing" + :loading="isPublishingDraft(draft.id)" class="gl-ml-3" @click="publishNow" > diff --git a/app/assets/javascripts/boards/components/board_content_sidebar.vue b/app/assets/javascripts/boards/components/board_content_sidebar.vue index 156029b62b0..0320b4d925e 100644 --- a/app/assets/javascripts/boards/components/board_content_sidebar.vue +++ b/app/assets/javascripts/boards/components/board_content_sidebar.vue @@ -184,29 +184,15 @@ export default { :issuable-type="issuableType" data-testid="sidebar-milestones" /> - <template v-if="!glFeatures.iterationCadences"> - <sidebar-dropdown-widget - v-if="iterationFeatureAvailable && !isIncidentSidebar" - :iid="activeBoardItem.iid" - issuable-attribute="iteration" - :workspace-path="projectPathForActiveIssue" - :attr-workspace-path="groupPathForActiveIssue" - :issuable-type="issuableType" - class="gl-mt-5" - data-testid="iteration-edit" - /> - </template> - <template v-else> - <iteration-sidebar-dropdown-widget - v-if="iterationFeatureAvailable && !isIncidentSidebar" - :iid="activeBoardItem.iid" - :workspace-path="projectPathForActiveIssue" - :attr-workspace-path="groupPathForActiveIssue" - :issuable-type="issuableType" - class="gl-mt-5" - data-testid="iteration-edit" - /> - </template> + <iteration-sidebar-dropdown-widget + v-if="iterationFeatureAvailable && !isIncidentSidebar" + :iid="activeBoardItem.iid" + :workspace-path="projectPathForActiveIssue" + :attr-workspace-path="groupPathForActiveIssue" + :issuable-type="issuableType" + class="gl-mt-5" + data-testid="iteration-edit" + /> </div> <board-sidebar-time-tracker /> <sidebar-date-widget diff --git a/app/assets/javascripts/ide/components/activity_bar.vue b/app/assets/javascripts/ide/components/activity_bar.vue index 846b4d92724..92dacf8c94a 100644 --- a/app/assets/javascripts/ide/components/activity_bar.vue +++ b/app/assets/javascripts/ide/components/activity_bar.vue @@ -1,5 +1,5 @@ <script> -import { GlIcon, GlTooltipDirective } from '@gitlab/ui'; +import { GlIcon, GlTooltipDirective, GlBadge } from '@gitlab/ui'; import { mapActions, mapState } from 'vuex'; import { BV_HIDE_TOOLTIP } from '~/lib/utils/constants'; import { leftSidebarViews } from '../constants'; @@ -7,6 +7,7 @@ import { leftSidebarViews } from '../constants'; export default { components: { GlIcon, + GlBadge, }, directives: { GlTooltip: GlTooltipDirective, @@ -82,9 +83,13 @@ export default { @click.prevent="changedActivityView($event, $options.leftSidebarViews.commit.name)" > <gl-icon name="commit" /> - <div v-if="stagedFiles.length > 0" class="ide-commit-badge badge badge-pill"> + <gl-badge + v-if="stagedFiles.length" + class="gl-absolute gl-px-2 gl-top-3 gl-right-3 gl-font-weight-bold gl-bg-gray-900! gl-text-white!" + size="sm" + > {{ stagedFiles.length }} - </div> + </gl-badge> </button> </li> </ul> diff --git a/app/assets/javascripts/main.js b/app/assets/javascripts/main.js index 376134afef0..f78b4da181e 100644 --- a/app/assets/javascripts/main.js +++ b/app/assets/javascripts/main.js @@ -15,11 +15,11 @@ import { initRails } from '~/lib/utils/rails_ujs'; import * as popovers from '~/popovers'; import * as tooltips from '~/tooltips'; import { initPrefetchLinks } from '~/lib/utils/navigation_utility'; +import { logHelloDeferred } from 'jh_else_ce/lib/logger/hello_deferred'; import initAlertHandler from './alert_handler'; import { addDismissFlashClickListener } from './flash'; import initTodoToggle from './header'; import initLayoutNav from './layout_nav'; -import { logHelloDeferred } from './lib/logger/hello_deferred'; import { handleLocationHash, addSelectOnFocusBehaviour } from './lib/utils/common_utils'; import { localTimeAgo } from './lib/utils/datetime/timeago_utility'; import { getLocationHash, visitUrl } from './lib/utils/url_utility'; diff --git a/app/assets/stylesheets/page_bundles/ide.scss b/app/assets/stylesheets/page_bundles/ide.scss index d37171bc75e..6c270852e53 100644 --- a/app/assets/stylesheets/page_bundles/ide.scss +++ b/app/assets/stylesheets/page_bundles/ide.scss @@ -630,7 +630,6 @@ $ide-commit-header-height: 48px; width: 1px; background: var(--ide-highlight-background, $white); } - } &.is-right { @@ -642,17 +641,6 @@ $ide-commit-header-height: 48px; left: -1px; } } - - .ide-commit-badge { - background-color: var(--ide-highlight-accent, $almost-black) !important; - color: var(--ide-highlight-background, $white) !important; - position: absolute; - left: 38px; - top: $gl-padding-8; - font-size: $gl-font-size-12; - padding: 2px $gl-padding-4; - font-weight: $gl-font-weight-bold !important; - } } .ide-activity-bar { diff --git a/app/graphql/resolvers/ci/runners_resolver.rb b/app/graphql/resolvers/ci/runners_resolver.rb index 9848b5a503f..e221dfea4d0 100644 --- a/app/graphql/resolvers/ci/runners_resolver.rb +++ b/app/graphql/resolvers/ci/runners_resolver.rb @@ -9,7 +9,12 @@ module Resolvers argument :active, ::GraphQL::Types::Boolean, required: false, - description: 'Filter runners by active (true) or paused (false) status.' + description: 'Filter runners by `active` (true) or `paused` (false) status.', + deprecated: { reason: :renamed, replacement: 'paused', milestone: '14.8' } + + argument :paused, ::GraphQL::Types::Boolean, + required: false, + description: 'Filter runners by `paused` (true) or `active` (false) status.' argument :status, ::Types::Ci::RunnerStatusEnum, required: false, @@ -41,8 +46,11 @@ module Resolvers protected def runners_finder_params(params) + # Give preference to paused argument over the deprecated 'active' argument + paused = params.fetch(:paused, params[:active] ? !params[:active] : nil) + { - active: params[:active], + active: paused.nil? ? nil : !paused, status_status: params[:status]&.to_s, type_type: params[:type], tag_name: params[:tag_list], diff --git a/app/models/concerns/issuable.rb b/app/models/concerns/issuable.rb index dcd80201d3f..b32ca064954 100644 --- a/app/models/concerns/issuable.rb +++ b/app/models/concerns/issuable.rb @@ -363,9 +363,10 @@ module Issuable end # Includes table keys in group by clause when sorting - # preventing errors in postgres + # preventing errors in Postgres + # + # Returns an array of Arel columns # - # Returns an array of arel columns def grouping_columns(sort) sort = sort.to_s grouping_columns = [arel_table[:id]] @@ -384,9 +385,10 @@ module Issuable end # Includes all table keys in group by clause when sorting - # preventing errors in postgres when using CTE search optimisation + # preventing errors in Postgres when using CTE search optimization + # + # Returns an array of Arel columns # - # Returns an array of arel columns def issue_grouping_columns(use_cte: false) if use_cte attribute_names.map { |attr| arel_table[attr.to_sym] } diff --git a/app/models/container_repository.rb b/app/models/container_repository.rb index 4b83eee84fb..e14555f2907 100644 --- a/app/models/container_repository.rb +++ b/app/models/container_repository.rb @@ -8,12 +8,16 @@ class ContainerRepository < ApplicationRecord WAITING_CLEANUP_STATUSES = %i[cleanup_scheduled cleanup_unfinished].freeze REQUIRING_CLEANUP_STATUSES = %i[cleanup_unscheduled cleanup_scheduled].freeze + IDLE_MIGRATION_STATES = %w[default pre_import_done import_done import_aborted import_skipped].freeze + ACTIVE_MIGRATION_STATES = %w[pre_importing importing].freeze + MIGRATION_STATES = (IDLE_MIGRATION_STATES + ACTIVE_MIGRATION_STATES).freeze belongs_to :project validates :name, length: { minimum: 0, allow_nil: false } validates :name, uniqueness: { scope: :project_id } - validates :migration_state, presence: true + validates :migration_state, presence: true, inclusion: { in: MIGRATION_STATES } + validates :migration_aborted_in_state, inclusion: { in: ACTIVE_MIGRATION_STATES }, allow_nil: true validates :migration_retries_count, presence: true, numericality: { greater_than_or_equal_to: 0 }, @@ -23,7 +27,7 @@ class ContainerRepository < ApplicationRecord enum expiration_policy_cleanup_status: { cleanup_unscheduled: 0, cleanup_scheduled: 1, cleanup_unfinished: 2, cleanup_ongoing: 3 } enum migration_skipped_reason: { not_in_plan: 0, too_many_retries: 1, too_many_tags: 2, root_namespace_in_deny_list: 3 } - delegate :client, to: :registry + delegate :client, :gitlab_api_client, to: :registry scope :ordered, -> { order(:name) } scope :with_api_entity_associations, -> { preload(project: [:route, { namespace: :route }]) } @@ -41,6 +45,119 @@ class ContainerRepository < ApplicationRecord scope :expiration_policy_started_at_nil_or_before, ->(timestamp) { where('expiration_policy_started_at < ? OR expiration_policy_started_at IS NULL', timestamp) } scope :with_stale_ongoing_cleanup, ->(threshold) { cleanup_ongoing.where('expiration_policy_started_at < ?', threshold) } + state_machine :migration_state, initial: :default do + state :pre_importing do + validates :migration_pre_import_started_at, presence: true + validates :migration_pre_import_done_at, presence: false + end + + state :pre_import_done do + validates :migration_pre_import_started_at, + :migration_pre_import_done_at, + presence: true + end + + state :importing do + validates :migration_import_started_at, presence: true + validates :migration_import_done_at, presence: false + end + + state :import_done + + state :import_skipped do + validates :migration_skipped_reason, + :migration_skipped_at, + presence: true + end + + state :import_aborted do + validates :migration_aborted_at, presence: true + validates :migration_retries_count, presence: true, numericality: { greater_than_or_equal_to: 1 } + end + + event :start_pre_import do + transition default: :pre_importing + end + + event :finish_pre_import do + transition pre_importing: :pre_import_done + end + + event :start_import do + transition pre_import_done: :importing + end + + event :finish_import do + transition importing: :import_done + end + + event :already_migrated do + transition default: :import_done + end + + event :abort_import do + transition %i[pre_importing importing] => :import_aborted + end + + event :skip_import do + transition %i[default pre_importing importing] => :import_skipped + end + + event :retry_pre_import do + transition import_aborted: :pre_importing + end + + event :retry_import do + transition import_aborted: :importing + end + + before_transition any => :pre_importing do |container_repository| + container_repository.migration_pre_import_started_at = Time.zone.now + container_repository.migration_pre_import_done_at = nil + end + + after_transition any => :pre_importing do |container_repository| + container_repository.abort_import unless container_repository.migration_pre_import == :ok + end + + before_transition pre_importing: :pre_import_done do |container_repository| + container_repository.migration_pre_import_done_at = Time.zone.now + end + + before_transition any => :importing do |container_repository| + container_repository.migration_import_started_at = Time.zone.now + container_repository.migration_import_done_at = nil + end + + after_transition any => :importing do |container_repository| + container_repository.abort_import unless container_repository.migration_import == :ok + end + + before_transition importing: :import_done do |container_repository| + container_repository.migration_import_done_at = Time.zone.now + end + + before_transition any => :import_aborted do |container_repository| + container_repository.migration_aborted_in_state = container_repository.migration_state + container_repository.migration_aborted_at = Time.zone.now + container_repository.migration_retries_count += 1 + end + + before_transition import_aborted: any do |container_repository| + container_repository.migration_aborted_at = nil + container_repository.migration_aborted_in_state = nil + end + + before_transition any => :import_skipped do |container_repository| + container_repository.migration_skipped_at = Time.zone.now + end + + before_transition any => %i[import_done import_aborted] do + # EnqueuerJob.enqueue perform_async or perform_in depending on the speed FF + # To be implemented in https://gitlab.com/gitlab-org/gitlab/-/issues/349744 + end + end + def self.exists_by_path?(path) where( project: path.repository_project, @@ -64,6 +181,30 @@ class ContainerRepository < ApplicationRecord with_enabled_policy.cleanup_unfinished end + def skip_import(reason:) + self.migration_skipped_reason = reason + + super + end + + def start_pre_import + return false unless ContainerRegistry::Migration.enabled? + + super + end + + def retry_pre_import + return false unless ContainerRegistry::Migration.enabled? + + super + end + + def retry_import + return false unless ContainerRegistry::Migration.enabled? + + super + end + # rubocop: disable CodeReuse/ServiceClass def registry @registry ||= begin @@ -150,6 +291,18 @@ class ContainerRepository < ApplicationRecord migration_state == 'importing' end + def migration_pre_import + return :error unless gitlab_api_client.supports_gitlab_api? + + gitlab_api_client.pre_import_repository(self.path) + end + + def migration_import + return :error unless gitlab_api_client.supports_gitlab_api? + + gitlab_api_client.import_repository(self.path) + end + def self.build_from_path(path) self.new(project: path.repository_project, name: path.repository_name) diff --git a/app/services/service_ping/submit_service.rb b/app/services/service_ping/submit_service.rb index d3d9dcecb2b..3e8c1848f88 100644 --- a/app/services/service_ping/submit_service.rb +++ b/app/services/service_ping/submit_service.rb @@ -5,6 +5,7 @@ module ServicePing PRODUCTION_BASE_URL = 'https://version.gitlab.com' STAGING_BASE_URL = 'https://gitlab-services-version-gitlab-com-staging.gs-staging.gitlab.org' USAGE_DATA_PATH = 'usage_data' + ERROR_PATH = 'usage_ping_errors' SubmissionError = Class.new(StandardError) @@ -15,12 +16,23 @@ module ServicePing def execute return unless ServicePing::ServicePingSettings.product_intelligence_enabled? + start = Time.current begin usage_data = BuildPayloadService.new.execute response = submit_usage_data_payload(usage_data) - rescue StandardError + rescue StandardError => e return unless Gitlab::CurrentSettings.usage_ping_enabled? + error_payload = { + time: Time.current, + uuid: Gitlab::UsageData.add_metric('UuidMetric'), + hostname: Gitlab::UsageData.add_metric('HostnameMetric'), + version: Gitlab::UsageData.alt_usage_data { Gitlab::VERSION }, + message: e.message, + elapsed: (Time.current - start).round(1) + } + submit_payload({ error: error_payload }, url: error_url) + usage_data = Gitlab::UsageData.data(force_refresh: true) response = submit_usage_data_payload(usage_data) end @@ -42,12 +54,16 @@ module ServicePing URI.join(base_url, USAGE_DATA_PATH) end + def error_url + URI.join(base_url, ERROR_PATH) + end + private - def submit_payload(usage_data) + def submit_payload(payload, url: self.url) Gitlab::HTTP.post( url, - body: usage_data.to_json, + body: payload.to_json, allow_local_requests: true, headers: { 'Content-type' => 'application/json' } ) diff --git a/app/services/update_container_registry_info_service.rb b/app/services/update_container_registry_info_service.rb index 531335839a9..7d79b257687 100644 --- a/app/services/update_container_registry_info_service.rb +++ b/app/services/update_container_registry_info_service.rb @@ -15,6 +15,12 @@ class UpdateContainerRegistryInfoService client = ContainerRegistry::Client.new(registry_config.api_url, token: token) info = client.registry_info + gitlab_api_client = ContainerRegistry::GitlabApiClient.new(registry_config.api_url, token: token) + if gitlab_api_client.supports_gitlab_api? + info[:features] ||= [] + info[:features] << ContainerRegistry::GitlabApiClient::REGISTRY_GITLAB_V1_API_FEATURE + end + Gitlab::CurrentSettings.update!( container_registry_vendor: info[:vendor] || '', container_registry_version: info[:version] || '', diff --git a/app/validators/x509_certificate_credentials_validator.rb b/app/validators/x509_certificate_credentials_validator.rb index d2f18e956c3..11b53d59c7d 100644 --- a/app/validators/x509_certificate_credentials_validator.rb +++ b/app/validators/x509_certificate_credentials_validator.rb @@ -41,7 +41,7 @@ class X509CertificateCredentialsValidator < ActiveModel::Validator return if private_key.nil? || certificate.nil? - unless certificate.public_key.fingerprint == private_key.public_key.fingerprint + unless certificate.check_private_key(private_key) record.errors.add(options[:pkey], _('private key does not match certificate.')) end end diff --git a/config/feature_flags/development/container_registry_migration_phase2_capacity_1.yml b/config/feature_flags/development/container_registry_migration_phase2_capacity_1.yml new file mode 100644 index 00000000000..846bc8b690b --- /dev/null +++ b/config/feature_flags/development/container_registry_migration_phase2_capacity_1.yml @@ -0,0 +1,8 @@ +--- +name: container_registry_migration_phase2_capacity_1 +introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/79061 +rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/350543 +milestone: '14.8' +type: development +group: group::package +default_enabled: false diff --git a/config/feature_flags/development/container_registry_migration_phase2_capacity_10.yml b/config/feature_flags/development/container_registry_migration_phase2_capacity_10.yml new file mode 100644 index 00000000000..fcbcc5bfb48 --- /dev/null +++ b/config/feature_flags/development/container_registry_migration_phase2_capacity_10.yml @@ -0,0 +1,8 @@ +--- +name: container_registry_migration_phase2_capacity_10 +introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/79061 +rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/350543 +milestone: '14.8' +type: development +group: group::package +default_enabled: false diff --git a/config/feature_flags/development/container_registry_migration_phase2_capacity_25.yml b/config/feature_flags/development/container_registry_migration_phase2_capacity_25.yml new file mode 100644 index 00000000000..b52693e0aba --- /dev/null +++ b/config/feature_flags/development/container_registry_migration_phase2_capacity_25.yml @@ -0,0 +1,8 @@ +--- +name: container_registry_migration_phase2_capacity_25 +introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/79061 +rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/350543 +milestone: '14.8' +type: development +group: group::package +default_enabled: false diff --git a/config/feature_flags/development/container_registry_migration_phase2_enabled.yml b/config/feature_flags/development/container_registry_migration_phase2_enabled.yml new file mode 100644 index 00000000000..c48cbdb435e --- /dev/null +++ b/config/feature_flags/development/container_registry_migration_phase2_enabled.yml @@ -0,0 +1,8 @@ +--- +name: container_registry_migration_phase2_enabled +introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/79061 +rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/350543 +milestone: '14.8' +type: development +group: group::package +default_enabled: false diff --git a/config/feature_flags/development/container_registry_migration_phase2_enqueue_speed_fast.yml b/config/feature_flags/development/container_registry_migration_phase2_enqueue_speed_fast.yml new file mode 100644 index 00000000000..9a312161824 --- /dev/null +++ b/config/feature_flags/development/container_registry_migration_phase2_enqueue_speed_fast.yml @@ -0,0 +1,8 @@ +--- +name: container_registry_migration_phase2_enqueue_speed_fast +introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/79061 +rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/350543 +milestone: '14.8' +type: development +group: group::package +default_enabled: false diff --git a/config/feature_flags/development/container_registry_migration_phase2_enqueue_speed_slow.yml b/config/feature_flags/development/container_registry_migration_phase2_enqueue_speed_slow.yml new file mode 100644 index 00000000000..f02259be928 --- /dev/null +++ b/config/feature_flags/development/container_registry_migration_phase2_enqueue_speed_slow.yml @@ -0,0 +1,8 @@ +--- +name: container_registry_migration_phase2_enqueue_speed_slow +introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/79061 +rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/350543 +milestone: '14.8' +type: development +group: group::package +default_enabled: false diff --git a/config/initializers/rspec_profiling.rb b/config/initializers/rspec_profiling.rb index 5edea6489ed..2ab7bdade31 100644 --- a/config/initializers/rspec_profiling.rb +++ b/config/initializers/rspec_profiling.rb @@ -62,7 +62,7 @@ RspecProfiling.configure do |config| config.collector = RspecProfilingExt::Collectors::CSVWithTimestamps config.csv_path = -> do prefix = "#{ENV['CI_JOB_NAME']}-".gsub(%r{[ /]}, '-') if ENV['CI_JOB_NAME'] - "rspec_profiling/#{prefix}#{Time.now.to_i}-#{SecureRandom.hex(8)}-rspec-data.csv" + "#{ENV['RSPEC_PROFILING_FOLDER_PATH']}/#{prefix}#{Time.now.to_i}-#{SecureRandom.hex(8)}-rspec-data.csv" end end end diff --git a/db/post_migrate/20220124145019_remove_projects_external_pull_requests_project_id_fk.rb b/db/post_migrate/20220124145019_remove_projects_external_pull_requests_project_id_fk.rb new file mode 100644 index 00000000000..b16234fc812 --- /dev/null +++ b/db/post_migrate/20220124145019_remove_projects_external_pull_requests_project_id_fk.rb @@ -0,0 +1,19 @@ +# frozen_string_literal: true + +class RemoveProjectsExternalPullRequestsProjectIdFk < Gitlab::Database::Migration[1.0] + disable_ddl_transaction! + + def up + return unless foreign_key_exists?(:external_pull_requests, :projects, name: "fk_rails_bcae9b5c7b") + + with_lock_retries do + execute('LOCK projects, external_pull_requests IN ACCESS EXCLUSIVE MODE') if transaction_open? + + remove_foreign_key_if_exists(:external_pull_requests, :projects, name: "fk_rails_bcae9b5c7b") + end + end + + def down + add_concurrent_foreign_key(:external_pull_requests, :projects, name: "fk_rails_bcae9b5c7b", column: :project_id, target_column: :id, on_delete: :cascade) + end +end diff --git a/db/post_migrate/20220124152824_remove_projects_ci_subscriptions_projects_downstream_project_id_fk.rb b/db/post_migrate/20220124152824_remove_projects_ci_subscriptions_projects_downstream_project_id_fk.rb new file mode 100644 index 00000000000..8596b1f14ba --- /dev/null +++ b/db/post_migrate/20220124152824_remove_projects_ci_subscriptions_projects_downstream_project_id_fk.rb @@ -0,0 +1,19 @@ +# frozen_string_literal: true + +class RemoveProjectsCiSubscriptionsProjectsDownstreamProjectIdFk < Gitlab::Database::Migration[1.0] + disable_ddl_transaction! + + def up + return unless foreign_key_exists?(:ci_subscriptions_projects, :projects, name: "fk_rails_0818751483") + + with_lock_retries do + execute('LOCK projects, ci_subscriptions_projects IN ACCESS EXCLUSIVE MODE') if transaction_open? + + remove_foreign_key_if_exists(:ci_subscriptions_projects, :projects, name: "fk_rails_0818751483") + end + end + + def down + add_concurrent_foreign_key(:ci_subscriptions_projects, :projects, name: "fk_rails_0818751483", column: :downstream_project_id, target_column: :id, on_delete: :cascade) + end +end diff --git a/db/post_migrate/20220124215857_remove_projects_ci_job_token_project_scope_links_source_project_id_fk.rb b/db/post_migrate/20220124215857_remove_projects_ci_job_token_project_scope_links_source_project_id_fk.rb new file mode 100644 index 00000000000..8bfe9586a50 --- /dev/null +++ b/db/post_migrate/20220124215857_remove_projects_ci_job_token_project_scope_links_source_project_id_fk.rb @@ -0,0 +1,19 @@ +# frozen_string_literal: true + +class RemoveProjectsCiJobTokenProjectScopeLinksSourceProjectIdFk < Gitlab::Database::Migration[1.0] + disable_ddl_transaction! + + def up + return unless foreign_key_exists?(:ci_job_token_project_scope_links, :projects, name: "fk_rails_4b2ee3290b") + + with_lock_retries do + execute('LOCK projects, ci_job_token_project_scope_links IN ACCESS EXCLUSIVE MODE') if transaction_open? + + remove_foreign_key_if_exists(:ci_job_token_project_scope_links, :projects, name: "fk_rails_4b2ee3290b") + end + end + + def down + add_concurrent_foreign_key(:ci_job_token_project_scope_links, :projects, name: "fk_rails_4b2ee3290b", column: :source_project_id, target_column: :id, on_delete: :cascade) + end +end diff --git a/db/schema_migrations/20220124145019 b/db/schema_migrations/20220124145019 new file mode 100644 index 00000000000..8de01024798 --- /dev/null +++ b/db/schema_migrations/20220124145019 @@ -0,0 +1 @@ +3b1f7a7b6481a960ac25523e5e1b5abc6c1436332d64d4319d9e4112b0fa765e
\ No newline at end of file diff --git a/db/schema_migrations/20220124152824 b/db/schema_migrations/20220124152824 new file mode 100644 index 00000000000..5a0db5859f0 --- /dev/null +++ b/db/schema_migrations/20220124152824 @@ -0,0 +1 @@ +acf680cbf06d6eb2c0da44d416ff947e0460593cad74499acda91eb4bec63894
\ No newline at end of file diff --git a/db/schema_migrations/20220124215857 b/db/schema_migrations/20220124215857 new file mode 100644 index 00000000000..495e428d2c4 --- /dev/null +++ b/db/schema_migrations/20220124215857 @@ -0,0 +1 @@ +785c2404175faef7d3d6f81ae8652ca0ced0f4b01def8d6364a4450f0db49ecf
\ No newline at end of file diff --git a/db/structure.sql b/db/structure.sql index 4e6592174cc..6f85319d656 100644 --- a/db/structure.sql +++ b/db/structure.sql @@ -30041,9 +30041,6 @@ ALTER TABLE ONLY ip_restrictions ALTER TABLE ONLY terraform_state_versions ADD CONSTRAINT fk_rails_04f176e239 FOREIGN KEY (terraform_state_id) REFERENCES terraform_states(id) ON DELETE CASCADE; -ALTER TABLE ONLY ci_subscriptions_projects - ADD CONSTRAINT fk_rails_0818751483 FOREIGN KEY (downstream_project_id) REFERENCES projects(id) ON DELETE CASCADE; - ALTER TABLE ONLY trending_projects ADD CONSTRAINT fk_rails_09feecd872 FOREIGN KEY (project_id) REFERENCES projects(id) ON DELETE CASCADE; @@ -30455,9 +30452,6 @@ ALTER TABLE ONLY user_custom_attributes ALTER TABLE ONLY upcoming_reconciliations ADD CONSTRAINT fk_rails_497b4938ac FOREIGN KEY (namespace_id) REFERENCES namespaces(id) ON DELETE CASCADE; -ALTER TABLE ONLY ci_job_token_project_scope_links - ADD CONSTRAINT fk_rails_4b2ee3290b FOREIGN KEY (source_project_id) REFERENCES projects(id) ON DELETE CASCADE; - ALTER TABLE ONLY group_deletion_schedules ADD CONSTRAINT fk_rails_4b8c694a6c FOREIGN KEY (group_id) REFERENCES namespaces(id) ON DELETE CASCADE; @@ -31151,9 +31145,6 @@ ALTER TABLE ONLY projects_sync_events ALTER TABLE ONLY approval_merge_request_rules_users ADD CONSTRAINT fk_rails_bc8972fa55 FOREIGN KEY (user_id) REFERENCES users(id) ON DELETE CASCADE; -ALTER TABLE ONLY external_pull_requests - ADD CONSTRAINT fk_rails_bcae9b5c7b FOREIGN KEY (project_id) REFERENCES projects(id) ON DELETE CASCADE; - ALTER TABLE ONLY elasticsearch_indexed_projects ADD CONSTRAINT fk_rails_bd13bbdc3d FOREIGN KEY (project_id) REFERENCES projects(id) ON DELETE CASCADE; diff --git a/doc/api/graphql/reference/index.md b/doc/api/graphql/reference/index.md index f2d713e9c97..458a9a2b98a 100644 --- a/doc/api/graphql/reference/index.md +++ b/doc/api/graphql/reference/index.md @@ -376,7 +376,8 @@ four standard [pagination arguments](#connection-pagination-arguments): | Name | Type | Description | | ---- | ---- | ----------- | -| <a id="queryrunnersactive"></a>`active` | [`Boolean`](#boolean) | Filter runners by active (true) or paused (false) status. | +| <a id="queryrunnersactive"></a>`active` **{warning-solid}** | [`Boolean`](#boolean) | **Deprecated** in 14.8. This was renamed. Use: `paused`. | +| <a id="queryrunnerspaused"></a>`paused` | [`Boolean`](#boolean) | Filter runners by `paused` (true) or `active` (false) status. | | <a id="queryrunnerssearch"></a>`search` | [`String`](#string) | Filter by full token or partial text in description field. | | <a id="queryrunnerssort"></a>`sort` | [`CiRunnerSort`](#cirunnersort) | Sort order of results. | | <a id="queryrunnersstatus"></a>`status` | [`CiRunnerStatus`](#cirunnerstatus) | Filter runners by status. | @@ -11213,8 +11214,9 @@ four standard [pagination arguments](#connection-pagination-arguments): | Name | Type | Description | | ---- | ---- | ----------- | -| <a id="grouprunnersactive"></a>`active` | [`Boolean`](#boolean) | Filter runners by active (true) or paused (false) status. | +| <a id="grouprunnersactive"></a>`active` **{warning-solid}** | [`Boolean`](#boolean) | **Deprecated** in 14.8. This was renamed. Use: `paused`. | | <a id="grouprunnersmembership"></a>`membership` | [`RunnerMembershipFilter`](#runnermembershipfilter) | Control which runners to include in the results. | +| <a id="grouprunnerspaused"></a>`paused` | [`Boolean`](#boolean) | Filter runners by `paused` (true) or `active` (false) status. | | <a id="grouprunnerssearch"></a>`search` | [`String`](#string) | Filter by full token or partial text in description field. | | <a id="grouprunnerssort"></a>`sort` | [`CiRunnerSort`](#cirunnersort) | Sort order of results. | | <a id="grouprunnersstatus"></a>`status` | [`CiRunnerStatus`](#cirunnerstatus) | Filter runners by status. | diff --git a/doc/ci/git_submodules.md b/doc/ci/git_submodules.md index cdc75fd2bec..23055514839 100644 --- a/doc/ci/git_submodules.md +++ b/doc/ci/git_submodules.md @@ -60,6 +60,15 @@ To make submodules work correctly in CI/CD jobs: GIT_SUBMODULE_STRATEGY: recursive ``` +1. You can provide additional flags to control advanced checkout behavior using + [`GIT_SUBMODULE_UPDATE_FLAGS`](runners/configure_runners.md#git-submodule-update-flags). + + ```yaml + variables: + GIT_SUBMODULE_STRATEGY: recursive + GIT_SUBMODULE_UPDATE_FLAGS: --jobs 4 + ``` + If you use the [`CI_JOB_TOKEN`](jobs/ci_job_token.md) to clone a submodule in a pipeline job, the user executing the job must be assigned to a role that has [permission](../user/permissions.md#gitlab-cicd-permissions) to trigger a pipeline diff --git a/doc/ci/runners/configure_runners.md b/doc/ci/runners/configure_runners.md index d826b28acce..8860c43cb11 100644 --- a/doc/ci/runners/configure_runners.md +++ b/doc/ci/runners/configure_runners.md @@ -291,6 +291,7 @@ globally or for individual jobs: - [`GIT_CHECKOUT`](#git-checkout) - [`GIT_CLEAN_FLAGS`](#git-clean-flags) - [`GIT_FETCH_EXTRA_FLAGS`](#git-fetch-extra-flags) +- [`GIT_SUBMODULE_UPDATE_FLAGS`](#git-submodule-update-flags) - [`GIT_DEPTH`](#shallow-cloning) (shallow cloning) - [`GIT_CLONE_PATH`](#custom-build-directories) (custom build directories) - [`TRANSFER_METER_FREQUENCY`](#artifact-and-cache-settings) (artifact/cache meter update frequency) @@ -382,6 +383,8 @@ For this feature to work correctly, the submodules must be configured - a relative path to another repository on the same GitLab server. See the [Git submodules](../git_submodules.md) documentation. +You can provide additional flags to control advanced behavior using [`GIT_SUBMODULE_UPDATE_FLAGS`](#git-submodule-update-flags). + ### Git checkout > Introduced in GitLab Runner 9.3. @@ -442,14 +445,14 @@ script: > [Introduced](https://gitlab.com/gitlab-org/gitlab-runner/-/issues/4142) in GitLab Runner 13.1. -The `GIT_FETCH_EXTRA_FLAGS` variable is used to control the behavior of +Use the `GIT_FETCH_EXTRA_FLAGS` variable to control the behavior of `git fetch`. You can set it globally or per-job in the [`variables`](../yaml/index.md#variables) section. `GIT_FETCH_EXTRA_FLAGS` accepts all options of the [`git fetch`](https://git-scm.com/docs/git-fetch) command. However, `GIT_FETCH_EXTRA_FLAGS` flags are appended after the default flags that can't be modified. The default flags are: -- [GIT_DEPTH](#shallow-cloning). +- [`GIT_DEPTH`](#shallow-cloning). - The list of [refspecs](https://git-scm.com/book/en/v2/Git-Internals-The-Refspec). - A remote called `origin`. @@ -475,6 +478,47 @@ git fetch origin $REFSPECS --depth 50 --prune Where `$REFSPECS` is a value provided to the runner internally by GitLab. +### Git submodule update flags + +> [Introduced](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/3192) in GitLab Runner 14.8. + +Use the `GIT_SUBMODULE_UPDATE_FLAGS` variable to control the behavior of `git submodule update` +when [`GIT_SUBMODULE_STRATEGY`](#git-submodule-strategy) is set to either `normal` or `recursive`. +You can set it globally or per-job in the [`variables`](../yaml/index.md#variables) section. + +`GIT_SUBMODULE_UPDATE_FLAGS` accepts all options of the +[`git submodule update`](https://git-scm.com/docs/git-submodule#Documentation/git-submodule.txt-update--init--remote-N--no-fetch--no-recommend-shallow-f--force--checkout--rebase--merge--referenceltrepositorygt--depthltdepthgt--recursive--jobsltngt--no-single-branch--ltpathgt82308203) +subcommand. However, note that `GIT_SUBMODULE_UPDATE_FLAGS` flags are appended after a few default flags: + +- `--init`, if [`GIT_SUBMODULE_STRATEGY`](#git-submodule-strategy) was set to `normal` or `recursive`. +- `--recursive`, if [`GIT_SUBMODULE_STRATEGY`](#git-submodule-strategy) was set to `recursive`. +- [`GIT_DEPTH`](#shallow-cloning). See the default value below. + +Git honors the last occurrence of a flag in the list of arguments, so manually +providing them in `GIT_SUBMODULE_UPDATE_FLAGS` will also override these default flags. + +You can use this variable to fetch the latest remote `HEAD` instead of the commit tracked, +in the repository, or to speed up the checkout by fetching submodules in multiple parallel jobs: + +```yaml +variables: + GIT_SUBMODULE_STRATEGY: recursive + GIT_SUBMODULE_UPDATE_FLAGS: --remote --jobs 4 +script: + - ls -al .git/modules/ +``` + +The configuration above results in `git submodule update` being called this way: + +```shell +git submodule update --init --depth 50 --recursive --remote --jobs 4 +``` + +WARNING: +You should be aware of the implications for the security, stability, and reproducibility of +your builds when using the `--remote` flag. In most cases, it is better to explicitly track +submodule commits as designed, and update them using an auto-remediation/dependency bot. + ### Shallow cloning > Introduced in GitLab 8.9 as an experimental feature. diff --git a/doc/development/pipelines.md b/doc/development/pipelines.md index f478bad6d74..2ab7e46b096 100644 --- a/doc/development/pipelines.md +++ b/doc/development/pipelines.md @@ -170,10 +170,19 @@ After that, the next pipeline uses the up-to-date `knapsack/report-master.json` ### Flaky tests +#### Automatic skipping of flaky tests + Tests that are [known to be flaky](testing_guide/flaky_tests.md#automatic-retries-and-flaky-tests-detection) are skipped unless the `$SKIP_FLAKY_TESTS_AUTOMATICALLY` variable is set to `false` or if the `~"pipeline:run-flaky-tests"` label is set on the MR. +#### Automatic retry of failing tests in a separate process + +When the `$RETRY_FAILED_TESTS_IN_NEW_PROCESS` variable is set to `true`, RSpec tests that failed are automatically retried once in a separate +RSpec process. The goal is to get rid of most side-effects from previous tests that may lead to a subsequent test failure. + +We keep track of retried tests in the `$RETRIED_TESTS_REPORT_FILE` file saved as artifact by the `rspec:flaky-tests-report` job. + ### Monitoring The GitLab test suite is [monitored](performance.md#rspec-profiling) for the `main` branch, and any branch diff --git a/doc/development/service_ping/index.md b/doc/development/service_ping/index.md index 428a34461d5..c9fd51addd0 100644 --- a/doc/development/service_ping/index.md +++ b/doc/development/service_ping/index.md @@ -205,6 +205,25 @@ sequenceDiagram If a firewall exception is needed, the required URL depends on several things. If the hostname is `version.gitlab.com`, the protocol is `TCP`, and the port number is `443`, the required URL is <https://version.gitlab.com/>. +1. In case of an error, it will be reported to the Version application along with following pieces of information: + +- `uuid` - GitLab instance unique identifier +- `hostname` - GitLab instance hostname +- `version` - GitLab instance current versions +- `elapsed` - Amount of time which passed since Service Ping report process started and moment of error occurrence +- `message` - Error message + +<pre> +<code> +{ + "uuid"=>"02333324-1cd7-4c3b-a45b-a4993f05fb1d", + "hostname"=>"127.0.0.1", + "version"=>"14.7.0-pre", + "elapsed"=>0.006946, + "message"=>'PG::UndefinedColumn: ERROR: column \"non_existent_attribute\" does not exist\nLINE 1: SELECT COUNT(non_existent_attribute) FROM \"issues\" /*applica...' +} +</code> +</pre> ### On a Geo secondary site @@ -510,7 +529,7 @@ To generate Service Ping, use [Teleport](https://goteleport.com/docs/) or a deta ### Verification (After approx 30 hours) -#### Verify with a detached screen session +#### Verify with Teleport 1. Follow [the steps](https://gitlab.com/gitlab-com/runbooks/-/blob/master/docs/Teleport/Connect_to_Rails_Console_via_Teleport.md#how-to-use-teleport-to-connect-to-rails-console) to request a new access to the required environment and connect to the Rails console 1. Check the last payload in `raw_usage_data` table: `RawUsageData.last.payload` diff --git a/lib/container_registry/base_client.rb b/lib/container_registry/base_client.rb new file mode 100644 index 00000000000..5438d7a6a8c --- /dev/null +++ b/lib/container_registry/base_client.rb @@ -0,0 +1,136 @@ +# frozen_string_literal: true + +require 'faraday' +require 'faraday_middleware' +require 'digest' + +module ContainerRegistry + class BaseClient + DOCKER_DISTRIBUTION_MANIFEST_V2_TYPE = 'application/vnd.docker.distribution.manifest.v2+json' + DOCKER_DISTRIBUTION_MANIFEST_LIST_V2_TYPE = 'application/vnd.docker.distribution.manifest.list.v2+json' + OCI_MANIFEST_V1_TYPE = 'application/vnd.oci.image.manifest.v1+json' + CONTAINER_IMAGE_V1_TYPE = 'application/vnd.docker.container.image.v1+json' + + ACCEPTED_TYPES = [DOCKER_DISTRIBUTION_MANIFEST_V2_TYPE, OCI_MANIFEST_V1_TYPE].freeze + ACCEPTED_TYPES_RAW = [DOCKER_DISTRIBUTION_MANIFEST_V2_TYPE, OCI_MANIFEST_V1_TYPE, DOCKER_DISTRIBUTION_MANIFEST_LIST_V2_TYPE].freeze + + RETRY_EXCEPTIONS = [Faraday::Request::Retry::DEFAULT_EXCEPTIONS, Faraday::ConnectionFailed].flatten.freeze + RETRY_OPTIONS = { + max: 1, + interval: 5, + exceptions: RETRY_EXCEPTIONS + }.freeze + + ERROR_CALLBACK_OPTIONS = { + callback: -> (env, exception) do + Gitlab::ErrorTracking.log_exception( + exception, + class: name, + url: env[:url] + ) + end + }.freeze + + # Taken from: FaradayMiddleware::FollowRedirects + REDIRECT_CODES = Set.new [301, 302, 303, 307] + + class << self + private + + def with_dummy_client(return_value_if_disabled: nil) + registry_config = Gitlab.config.registry + unless registry_config.enabled && registry_config.api_url.present? + return return_value_if_disabled + end + + token = Auth::ContainerRegistryAuthenticationService.access_token([], []) + yield new(registry_config.api_url, token: token) + end + end + + def initialize(base_uri, options = {}) + @base_uri = base_uri + @options = options + end + + private + + def faraday(timeout_enabled: true) + @faraday ||= faraday_base(timeout_enabled: timeout_enabled) do |conn| + initialize_connection(conn, @options, &method(:accept_manifest)) + end + end + + def faraday_base(timeout_enabled: true, &block) + request_options = timeout_enabled ? Gitlab::HTTP::DEFAULT_TIMEOUT_OPTIONS : nil + + Faraday.new( + @base_uri, + headers: { user_agent: "GitLab/#{Gitlab::VERSION}" }, + request: request_options, + &block + ) + end + + def initialize_connection(conn, options) + conn.request :json + + if options[:user] && options[:password] + conn.request(:basic_auth, options[:user].to_s, options[:password].to_s) + elsif options[:token] + conn.request(:authorization, :bearer, options[:token].to_s) + end + + yield(conn) if block_given? + + conn.request(:retry, RETRY_OPTIONS) + conn.request(:gitlab_error_callback, ERROR_CALLBACK_OPTIONS) + conn.adapter :net_http + end + + def response_body(response, allow_redirect: false) + if allow_redirect && REDIRECT_CODES.include?(response.status) + response = redirect_response(response.headers['location']) + end + + response.body if response && response.success? + end + + def redirect_response(location) + return unless location + + uri = URI(@base_uri).merge(location) + raise ArgumentError, "Invalid scheme for #{location}" unless %w[http https].include?(uri.scheme) + + faraday_redirect.get(uri) + end + + def accept_manifest(conn) + conn.headers['Accept'] = ACCEPTED_TYPES + + conn.response :json, content_type: 'application/json' + conn.response :json, content_type: 'application/vnd.docker.distribution.manifest.v1+prettyjws' + conn.response :json, content_type: 'application/vnd.docker.distribution.manifest.v1+json' + conn.response :json, content_type: DOCKER_DISTRIBUTION_MANIFEST_V2_TYPE + conn.response :json, content_type: OCI_MANIFEST_V1_TYPE + end + + # Create a new request to make sure the Authorization header is not inserted + # via the Faraday middleware + def faraday_redirect + @faraday_redirect ||= faraday_base do |conn| + conn.request :json + + conn.request(:retry, RETRY_OPTIONS) + conn.request(:gitlab_error_callback, ERROR_CALLBACK_OPTIONS) + conn.adapter :net_http + end + end + + def delete_if_exists(path) + result = faraday.delete(path) + + result.success? || result.status == 404 + end + end +end diff --git a/lib/container_registry/client.rb b/lib/container_registry/client.rb index c2ad9e6ae89..add238350dd 100644 --- a/lib/container_registry/client.rb +++ b/lib/container_registry/client.rb @@ -1,68 +1,25 @@ # frozen_string_literal: true -require 'faraday' -require 'faraday_middleware' -require 'digest' - module ContainerRegistry - class Client + class Client < BaseClient include Gitlab::Utils::StrongMemoize attr_accessor :uri - DOCKER_DISTRIBUTION_MANIFEST_V2_TYPE = 'application/vnd.docker.distribution.manifest.v2+json' - DOCKER_DISTRIBUTION_MANIFEST_LIST_V2_TYPE = 'application/vnd.docker.distribution.manifest.list.v2+json' - OCI_MANIFEST_V1_TYPE = 'application/vnd.oci.image.manifest.v1+json' - CONTAINER_IMAGE_V1_TYPE = 'application/vnd.docker.container.image.v1+json' REGISTRY_VERSION_HEADER = 'gitlab-container-registry-version' REGISTRY_FEATURES_HEADER = 'gitlab-container-registry-features' REGISTRY_TAG_DELETE_FEATURE = 'tag_delete' - ACCEPTED_TYPES = [DOCKER_DISTRIBUTION_MANIFEST_V2_TYPE, OCI_MANIFEST_V1_TYPE].freeze - - ACCEPTED_TYPES_RAW = [DOCKER_DISTRIBUTION_MANIFEST_V2_TYPE, OCI_MANIFEST_V1_TYPE, DOCKER_DISTRIBUTION_MANIFEST_LIST_V2_TYPE].freeze - - # Taken from: FaradayMiddleware::FollowRedirects - REDIRECT_CODES = Set.new [301, 302, 303, 307] - - RETRY_EXCEPTIONS = [Faraday::Request::Retry::DEFAULT_EXCEPTIONS, Faraday::ConnectionFailed].flatten.freeze - RETRY_OPTIONS = { - max: 1, - interval: 5, - exceptions: RETRY_EXCEPTIONS - }.freeze - - ERROR_CALLBACK_OPTIONS = { - callback: -> (env, exception) do - Gitlab::ErrorTracking.log_exception( - exception, - class: name, - url: env[:url] - ) - end - }.freeze - def self.supports_tag_delete? - registry_config = Gitlab.config.registry - return false unless registry_config.enabled && registry_config.api_url.present? - - token = Auth::ContainerRegistryAuthenticationService.access_token([], []) - client = new(registry_config.api_url, token: token) - client.supports_tag_delete? + with_dummy_client(return_value_if_disabled: false) do |client| + client.supports_tag_delete? + end end def self.registry_info - registry_config = Gitlab.config.registry - return unless registry_config.enabled && registry_config.api_url.present? - - token = Auth::ContainerRegistryAuthenticationService.access_token([], []) - client = new(registry_config.api_url, token: token) - client.registry_info - end - - def initialize(base_uri, options = {}) - @base_uri = base_uri - @options = options + with_dummy_client do |client| + client.registry_info + end end def registry_info @@ -176,89 +133,11 @@ module ContainerRegistry private - def initialize_connection(conn, options) - conn.request :json - - if options[:user] && options[:password] - conn.request(:basic_auth, options[:user].to_s, options[:password].to_s) - elsif options[:token] - conn.request(:authorization, :bearer, options[:token].to_s) - end - - yield(conn) if block_given? - - conn.request(:retry, RETRY_OPTIONS) - conn.request(:gitlab_error_callback, ERROR_CALLBACK_OPTIONS) - conn.adapter :net_http - end - - def accept_manifest(conn) - conn.headers['Accept'] = ACCEPTED_TYPES - - conn.response :json, content_type: 'application/json' - conn.response :json, content_type: 'application/vnd.docker.distribution.manifest.v1+prettyjws' - conn.response :json, content_type: 'application/vnd.docker.distribution.manifest.v1+json' - conn.response :json, content_type: DOCKER_DISTRIBUTION_MANIFEST_V2_TYPE - conn.response :json, content_type: OCI_MANIFEST_V1_TYPE - end - - def response_body(response, allow_redirect: false) - if allow_redirect && REDIRECT_CODES.include?(response.status) - response = redirect_response(response.headers['location']) - end - - response.body if response && response.success? - end - - def redirect_response(location) - return unless location - - uri = URI(@base_uri).merge(location) - raise ArgumentError, "Invalid scheme for #{location}" unless %w[http https].include?(uri.scheme) - - faraday_redirect.get(uri) - end - - def faraday(timeout_enabled: true) - @faraday ||= faraday_base(timeout_enabled: timeout_enabled) do |conn| - initialize_connection(conn, @options, &method(:accept_manifest)) - end - end - def faraday_blob @faraday_blob ||= faraday_base do |conn| initialize_connection(conn, @options) end end - - # Create a new request to make sure the Authorization header is not inserted - # via the Faraday middleware - def faraday_redirect - @faraday_redirect ||= faraday_base do |conn| - conn.request :json - - conn.request(:retry, RETRY_OPTIONS) - conn.request(:gitlab_error_callback, ERROR_CALLBACK_OPTIONS) - conn.adapter :net_http - end - end - - def faraday_base(timeout_enabled: true, &block) - request_options = timeout_enabled ? Gitlab::HTTP::DEFAULT_TIMEOUT_OPTIONS : nil - - Faraday.new( - @base_uri, - headers: { user_agent: "GitLab/#{Gitlab::VERSION}" }, - request: request_options, - &block - ) - end - - def delete_if_exists(path) - result = faraday.delete(path) - - result.success? || result.status == 404 - end end end diff --git a/lib/container_registry/gitlab_api_client.rb b/lib/container_registry/gitlab_api_client.rb new file mode 100644 index 00000000000..38929e5e904 --- /dev/null +++ b/lib/container_registry/gitlab_api_client.rb @@ -0,0 +1,54 @@ +# frozen_string_literal: true + +module ContainerRegistry + class GitlabApiClient < BaseClient + include Gitlab::Utils::StrongMemoize + + IMPORT_RESPONSES = { + 200 => :already_imported, + 202 => :ok, + 401 => :unauthorized, + 404 => :not_found, + 409 => :already_being_imported, + 424 => :pre_import_failed, + 425 => :already_being_imported, + 429 => :too_many_imports + }.freeze + + REGISTRY_GITLAB_V1_API_FEATURE = 'gitlab_v1_api' + + def self.supports_gitlab_api? + with_dummy_client(return_value_if_disabled: false) do |client| + client.supports_gitlab_api? + end + end + + def supports_gitlab_api? + strong_memoize(:supports_gitlab_api) do + registry_features = Gitlab::CurrentSettings.container_registry_features || [] + next true if ::Gitlab.com? && registry_features.include?(REGISTRY_GITLAB_V1_API_FEATURE) + + response = faraday.get('/gitlab/v1/') + response.success? || response.status == 401 + end + end + + def pre_import_repository(path) + response = start_import_for(path, pre: true) + IMPORT_RESPONSES.fetch(response.status, :error) + end + + def import_repository(path) + response = start_import_for(path, pre: false) + IMPORT_RESPONSES.fetch(response.status, :error) + end + + private + + def start_import_for(path, pre:) + faraday.put("/gitlab/v1/import/#{path}") do |req| + req.params['pre'] = pre.to_s + end + end + end +end diff --git a/lib/container_registry/migration.rb b/lib/container_registry/migration.rb new file mode 100644 index 00000000000..5983c3b8b93 --- /dev/null +++ b/lib/container_registry/migration.rb @@ -0,0 +1,40 @@ +# frozen_string_literal: true + +module ContainerRegistry + class Migration + class << self + delegate :container_registry_import_max_tags_count, to: ::Gitlab::CurrentSettings + delegate :container_registry_import_max_retries, to: ::Gitlab::CurrentSettings + delegate :container_registry_import_start_max_retries, to: ::Gitlab::CurrentSettings + delegate :container_registry_import_max_step_duration, to: ::Gitlab::CurrentSettings + delegate :container_registry_import_target_plan, to: ::Gitlab::CurrentSettings + delegate :container_registry_import_created_before, to: ::Gitlab::CurrentSettings + + alias_method :max_tags_count, :container_registry_import_max_tags_count + alias_method :max_retries, :container_registry_import_max_retries + alias_method :start_max_retries, :container_registry_import_start_max_retries + alias_method :max_step_duration, :container_registry_import_max_step_duration + alias_method :target_plan_name, :container_registry_import_target_plan + alias_method :created_before, :container_registry_import_created_before + end + + def self.enabled? + Feature.enabled?(:container_registry_migration_phase2_enabled) + end + + def self.enqueue_waiting_time + return 0 if Feature.enabled?(:container_registry_migration_phase2_enqueue_speed_fast) + return 6.hours if Feature.enabled?(:container_registry_migration_phase2_enqueue_speed_slow) + + 1.hour + end + + def self.capacity + return 25 if Feature.enabled?(:container_registry_migration_phase2_capacity_25) + return 10 if Feature.enabled?(:container_registry_migration_phase2_capacity_10) + return 1 if Feature.enabled?(:container_registry_migration_phase2_capacity_1) + + 0 + end + end +end diff --git a/lib/container_registry/registry.rb b/lib/container_registry/registry.rb index 523364ac7c7..b69db564b25 100644 --- a/lib/container_registry/registry.rb +++ b/lib/container_registry/registry.rb @@ -2,12 +2,21 @@ module ContainerRegistry class Registry + include Gitlab::Utils::StrongMemoize + attr_reader :uri, :client, :path def initialize(uri, options = {}) @uri = uri - @path = options[:path] || default_path - @client = ContainerRegistry::Client.new(uri, options) + @options = options + @path = @options[:path] || default_path + @client = ContainerRegistry::Client.new(@uri, @options) + end + + def gitlab_api_client + strong_memoize(:gitlab_api_client) do + ContainerRegistry::GitlabApiClient.new(@uri, @options) + end end private diff --git a/lib/gitlab/database/gitlab_loose_foreign_keys.yml b/lib/gitlab/database/gitlab_loose_foreign_keys.yml index 235b49a1878..e3793c14ee2 100644 --- a/lib/gitlab/database/gitlab_loose_foreign_keys.yml +++ b/lib/gitlab/database/gitlab_loose_foreign_keys.yml @@ -35,6 +35,9 @@ ci_job_token_project_scope_links: - table: users column: added_by_id on_delete: async_nullify + - table: projects + column: source_project_id + on_delete: async_delete ci_daily_build_group_report_results: - table: namespaces column: group_id @@ -42,6 +45,10 @@ ci_daily_build_group_report_results: - table: projects column: project_id on_delete: async_delete +external_pull_requests: + - table: projects + column: project_id + on_delete: async_delete ci_freeze_periods: - table: projects column: project_id @@ -161,6 +168,10 @@ requirements_management_test_reports: - table: ci_builds column: build_id on_delete: async_nullify +ci_subscriptions_projects: + - table: projects + column: downstream_project_id + on_delete: async_delete security_scans: - table: ci_builds column: build_id diff --git a/lib/gitlab/import_export/saver.rb b/lib/gitlab/import_export/saver.rb index a20e545f6aa..07f4b333378 100644 --- a/lib/gitlab/import_export/saver.rb +++ b/lib/gitlab/import_export/saver.rb @@ -16,19 +16,20 @@ module Gitlab def save if compress_and_save - Gitlab::Export::Logger.info( - message: 'Export archive saved', - exportable_class: @exportable.class.to_s, - archive_file: archive_file - ) + log_export_results('Export archive saved') save_upload + + log_export_results('Export archive uploaded') else @shared.error(Gitlab::ImportExport::Error.new(error_message)) + false end rescue StandardError => e @shared.error(e) + log_export_results('Export archive saver failed') + false ensure remove_archive_tmp_dir @@ -36,8 +37,16 @@ module Gitlab private + attr_accessor :compress_duration_s, :assign_duration_s, :upload_duration_s, :upload_bytes + def compress_and_save - tar_czf(archive: archive_file, dir: @shared.export_path) + result = nil + + @compress_duration_s = Benchmark.realtime do + result = tar_czf(archive: archive_file, dir: @shared.export_path) + end + + result end def remove_archive_tmp_dir @@ -51,9 +60,12 @@ module Gitlab def save_upload upload = initialize_upload - File.open(archive_file) { |file| upload.export_file = file } + @upload_bytes = File.size(archive_file) + @assign_duration_s = Benchmark.realtime do + File.open(archive_file) { |file| upload.export_file = file } + end - upload.save! + @upload_duration_s = Benchmark.realtime { upload.save! } true end @@ -67,6 +79,23 @@ module Gitlab ImportExportUpload.find_or_initialize_by(Hash[exportable_kind, @exportable]) end + + def log_export_results(message) + Gitlab::Export::Logger.info(message: message, **log_data) + end + + def log_data + ApplicationContext.current.merge( + { + exportable_class: @exportable.class.to_s, + archive_file: archive_file, + compress_duration_s: compress_duration_s&.round(6), + assign_duration_s: assign_duration_s&.round(6), + upload_duration_s: upload_duration_s&.round(6), + upload_bytes: upload_bytes + } + ).compact + end end end end diff --git a/locale/gitlab.pot b/locale/gitlab.pot index fa5fe2dea99..6a42992d50c 100644 --- a/locale/gitlab.pot +++ b/locale/gitlab.pot @@ -4494,22 +4494,28 @@ msgstr "" msgid "ApprovalSettings|Merge request approval settings have been updated." msgstr "" +msgid "ApprovalSettings|Prevent approval by author" +msgstr "" + msgid "ApprovalSettings|Prevent approval by author." msgstr "" +msgid "ApprovalSettings|Prevent approvals by users who add commits" +msgstr "" + msgid "ApprovalSettings|Prevent approvals by users who add commits." msgstr "" -msgid "ApprovalSettings|Prevent editing approval rules in merge requests." +msgid "ApprovalSettings|Prevent editing approval rules in merge requests" msgstr "" msgid "ApprovalSettings|Prevent editing approval rules in projects and merge requests." msgstr "" -msgid "ApprovalSettings|Remove all approvals when commits are added to the source branch." +msgid "ApprovalSettings|Remove all approvals when commits are added to the source branch" msgstr "" -msgid "ApprovalSettings|Require user password to approve." +msgid "ApprovalSettings|Require user password to approve" msgstr "" msgid "ApprovalSettings|There was an error loading merge request approval settings." @@ -31543,7 +31549,7 @@ msgstr "" msgid "SecurityApprovals|License-Check" msgstr "" -msgid "SecurityApprovals|Requires approval for Denied licenses. %{linkStart}More information%{linkEnd}" +msgid "SecurityApprovals|Requires approval for Denied licenses. %{linkStart}Learn more.%{linkEnd}" msgstr "" msgid "SecurityApprovals|Requires approval for decreases in test coverage. %{linkStart}Learn more.%{linkEnd}" diff --git a/rubocop/cop/file_decompression.rb b/rubocop/cop/file_decompression.rb new file mode 100644 index 00000000000..44813244028 --- /dev/null +++ b/rubocop/cop/file_decompression.rb @@ -0,0 +1,45 @@ +# frozen_string_literal: true + +module RuboCop + module Cop + # Check for symlinks when extracting files to avoid arbitrary file reading. + class FileDecompression < RuboCop::Cop::Cop + MSG = <<~EOF + While extracting files check for symlink to avoid arbitrary file reading. + https://gitlab.com/gitlab-com/gl-infra/production/-/issues/6132 + EOF + + def_node_matcher :system?, <<~PATTERN + (send {nil? | const} {:system | :exec | :spawn | :popen} + (str $_)) + PATTERN + + def_node_matcher :subshell?, <<~PATTERN + (xstr + (str $_)) + PATTERN + + FORBIDDEN_COMMANDS = %w[gunzip gzip zip tar].freeze + + def on_xstr(node) + subshell?(node) do |match| + add_offense(node, message: MSG) if forbidden_command?(match) + end + end + + def on_send(node) + system?(node) do |match| + add_offense(node, location: :expression, message: MSG) if forbidden_command?(match) + end + end + + private + + def forbidden_command?(cmd) + FORBIDDEN_COMMANDS.any? do |forbidden| + cmd.match?(forbidden) + end + end + end + end +end diff --git a/scripts/insert-rspec-profiling-data b/scripts/insert-rspec-profiling-data index be25972644c..996ad78ba5f 100755 --- a/scripts/insert-rspec-profiling-data +++ b/scripts/insert-rspec-profiling-data @@ -43,4 +43,4 @@ def insert_data(path) end end -insert_data('rspec_profiling') if ENV['RSPEC_PROFILING_POSTGRES_URL'].present? +insert_data(ENV['RSPEC_PROFILING_FOLDER_PATH']) if ENV['RSPEC_PROFILING_POSTGRES_URL'].present? diff --git a/scripts/rspec_helpers.sh b/scripts/rspec_helpers.sh index 2a6eb91a1f3..a6d9899bf2a 100644 --- a/scripts/rspec_helpers.sh +++ b/scripts/rspec_helpers.sh @@ -1,15 +1,18 @@ #!/usr/bin/env bash function retrieve_tests_metadata() { - mkdir -p $(dirname "$KNAPSACK_RSPEC_SUITE_REPORT_PATH") $(dirname "$FLAKY_RSPEC_SUITE_REPORT_PATH") rspec_profiling/ + mkdir -p $(dirname "${KNAPSACK_RSPEC_SUITE_REPORT_PATH}") $(dirname "${FLAKY_RSPEC_SUITE_REPORT_PATH}") "${RSPEC_PROFILING_FOLDER_PATH}" if [[ -n "${RETRIEVE_TESTS_METADATA_FROM_PAGES}" ]]; then if [[ ! -f "${KNAPSACK_RSPEC_SUITE_REPORT_PATH}" ]]; then - curl --location -o "${KNAPSACK_RSPEC_SUITE_REPORT_PATH}" "https://gitlab-org.gitlab.io/gitlab/${KNAPSACK_RSPEC_SUITE_REPORT_PATH}" || echo "{}" > "${KNAPSACK_RSPEC_SUITE_REPORT_PATH}" + curl --location -o "${KNAPSACK_RSPEC_SUITE_REPORT_PATH}" "https://gitlab-org.gitlab.io/gitlab/${KNAPSACK_RSPEC_SUITE_REPORT_PATH}" || + echo "{}" > "${KNAPSACK_RSPEC_SUITE_REPORT_PATH}" fi if [[ ! -f "${FLAKY_RSPEC_SUITE_REPORT_PATH}" ]]; then - curl --location -o "${FLAKY_RSPEC_SUITE_REPORT_PATH}" "https://gitlab-org.gitlab.io/gitlab/${FLAKY_RSPEC_SUITE_REPORT_PATH}" || echo "{}" > "${FLAKY_RSPEC_SUITE_REPORT_PATH}" + curl --location -o "${FLAKY_RSPEC_SUITE_REPORT_PATH}" "https://gitlab-org.gitlab.io/gitlab/${FLAKY_RSPEC_SUITE_REPORT_PATH}" || + curl --location -o "${FLAKY_RSPEC_SUITE_REPORT_PATH}" "https://gitlab-org.gitlab.io/gitlab/rspec_flaky/report-suite.json" || # temporary back-compat + echo "{}" > "${FLAKY_RSPEC_SUITE_REPORT_PATH}" fi else # ${CI_DEFAULT_BRANCH} might not be master in other forks but we want to @@ -31,7 +34,14 @@ function retrieve_tests_metadata() { fi if [[ ! -f "${FLAKY_RSPEC_SUITE_REPORT_PATH}" ]]; then - scripts/api/download_job_artifact.rb --endpoint "https://gitlab.com/api/v4" --project "${project_path}" --job-id "${test_metadata_job_id}" --artifact-path "${FLAKY_RSPEC_SUITE_REPORT_PATH}" || echo "{}" > "${FLAKY_RSPEC_SUITE_REPORT_PATH}" + scripts/api/download_job_artifact.rb --endpoint "https://gitlab.com/api/v4" --project "${project_path}" --job-id "${test_metadata_job_id}" --artifact-path "${FLAKY_RSPEC_SUITE_REPORT_PATH}" || + scripts/api/download_job_artifact.rb --endpoint "https://gitlab.com/api/v4" --project "${project_path}" --job-id "${test_metadata_job_id}" --artifact-path "rspec_flaky/report-suite.json" || # temporary back-compat + echo "{}" > "${FLAKY_RSPEC_SUITE_REPORT_PATH}" + + # temporary back-compat + if [[ -f "rspec_flaky/report-suite.json" ]]; then + mv "rspec_flaky/report-suite.json" "${FLAKY_RSPEC_SUITE_REPORT_PATH}" + fi fi else echo "test_metadata_job_id couldn't be found!" @@ -42,21 +52,24 @@ function retrieve_tests_metadata() { } function update_tests_metadata() { + local rspec_flaky_folder_path="$(dirname "${FLAKY_RSPEC_SUITE_REPORT_PATH}")/" + local knapsack_folder_path="$(dirname "${KNAPSACK_RSPEC_SUITE_REPORT_PATH}")/" + echo "{}" > "${KNAPSACK_RSPEC_SUITE_REPORT_PATH}" - scripts/merge-reports "${KNAPSACK_RSPEC_SUITE_REPORT_PATH}" knapsack/rspec*.json - rm -f knapsack/rspec*.json + scripts/merge-reports "${KNAPSACK_RSPEC_SUITE_REPORT_PATH}" ${knapsack_folder_path}rspec*.json export FLAKY_RSPEC_GENERATE_REPORT="true" - scripts/merge-reports "${FLAKY_RSPEC_SUITE_REPORT_PATH}" rspec_flaky/all_*.json + scripts/merge-reports "${FLAKY_RSPEC_SUITE_REPORT_PATH}" ${rspec_flaky_folder_path}all_*.json scripts/flaky_examples/prune-old-flaky-examples "${FLAKY_RSPEC_SUITE_REPORT_PATH}" - rm -f rspec_flaky/all_*.json rspec_flaky/new_*.json if [[ "$CI_PIPELINE_SOURCE" == "schedule" ]]; then scripts/insert-rspec-profiling-data else echo "Not inserting profiling data as the pipeline is not a scheduled one." fi + + cleanup_individual_job_reports } function retrieve_tests_mapping() { @@ -158,12 +171,20 @@ function retrieve_previous_failed_tests() { scripts/failed_tests.rb --previous-tests-report-path "${pipeline_report_path}" --output-directory "${directory_for_output_reports}" --rspec-pg-regex "${rspec_pg_regex}" --rspec-ee-pg-regex "${rspec_ee_pg_regex}" } -function rspec_simple_job() { +function rspec_args() { local rspec_opts="${1}" + local junit_report_file="${2:-${JUNIT_RESULT_FILE}}" + + echo "-Ispec -rspec_helper --color --format documentation --format RspecJunitFormatter --out ${junit_report_file} ${rspec_opts}" +} +function rspec_simple_job() { export NO_KNAPSACK="1" - eval "bin/rspec -Ispec -rspec_helper --color --format documentation --format RspecJunitFormatter --out junit_rspec.xml ${rspec_opts}" + local rspec_cmd="bin/rspec $(rspec_args "${1}" "${2}")" + echoinfo "Running RSpec command: ${rspec_cmd}" + + eval "${rspec_cmd}" } function rspec_db_library_code() { @@ -172,6 +193,24 @@ function rspec_db_library_code() { rspec_simple_job "-- ${db_files}" } +function debug_rspec_variables() { + echoinfo "SKIP_FLAKY_TESTS_AUTOMATICALLY: ${SKIP_FLAKY_TESTS_AUTOMATICALLY}" + echoinfo "RETRY_FAILED_TESTS_IN_NEW_PROCESS: ${RETRY_FAILED_TESTS_IN_NEW_PROCESS}" + + echoinfo "KNAPSACK_GENERATE_REPORT: ${KNAPSACK_GENERATE_REPORT}" + echoinfo "FLAKY_RSPEC_GENERATE_REPORT: ${FLAKY_RSPEC_GENERATE_REPORT}" + + echoinfo "KNAPSACK_TEST_FILE_PATTERN: ${KNAPSACK_TEST_FILE_PATTERN}" + echoinfo "KNAPSACK_LOG_LEVEL: ${KNAPSACK_LOG_LEVEL}" + echoinfo "KNAPSACK_REPORT_PATH: ${KNAPSACK_REPORT_PATH}" + + echoinfo "FLAKY_RSPEC_SUITE_REPORT_PATH: ${FLAKY_RSPEC_SUITE_REPORT_PATH}" + echoinfo "FLAKY_RSPEC_REPORT_PATH: ${FLAKY_RSPEC_REPORT_PATH}" + echoinfo "NEW_FLAKY_RSPEC_REPORT_PATH: ${NEW_FLAKY_RSPEC_REPORT_PATH}" + echoinfo "SKIPPED_FLAKY_TESTS_REPORT_PATH: ${SKIPPED_FLAKY_TESTS_REPORT_PATH}" + echoinfo "RETRIED_TESTS_REPORT_PATH: ${RETRIED_TESTS_REPORT_PATH}" +} + function rspec_paralellized_job() { read -ra job_name <<< "${CI_JOB_NAME}" local test_tool="${job_name[0]}" @@ -179,6 +218,9 @@ function rspec_paralellized_job() { local report_name=$(echo "${CI_JOB_NAME}" | sed -E 's|[/ ]|_|g') # e.g. 'rspec unit pg12 1/24' would become 'rspec_unit_pg12_1_24' local rspec_opts="${1}" local spec_folder_prefixes="" + local rspec_flaky_folder_path="$(dirname "${FLAKY_RSPEC_SUITE_REPORT_PATH}")/" + local knapsack_folder_path="$(dirname "${KNAPSACK_RSPEC_SUITE_REPORT_PATH}")/" + local rspec_run_status=0 if [[ "${test_tool}" =~ "-ee" ]]; then spec_folder_prefixes="'ee/'" @@ -193,7 +235,7 @@ function rspec_paralellized_job() { fi export KNAPSACK_LOG_LEVEL="debug" - export KNAPSACK_REPORT_PATH="knapsack/${report_name}_report.json" + export KNAPSACK_REPORT_PATH="${knapsack_folder_path}${report_name}_report.json" # There's a bug where artifacts are sometimes not downloaded. Since specs can run without the Knapsack report, we can # handle the missing artifact gracefully here. See https://gitlab.com/gitlab-org/gitlab/-/issues/212349. @@ -208,16 +250,14 @@ function rspec_paralellized_job() { export KNAPSACK_TEST_FILE_PATTERN="${pattern}" fi - echo "KNAPSACK_TEST_FILE_PATTERN: ${KNAPSACK_TEST_FILE_PATTERN}" - echo "SKIP_FLAKY_TESTS_AUTOMATICALLY: ${SKIP_FLAKY_TESTS_AUTOMATICALLY}" + export FLAKY_RSPEC_REPORT_PATH="${rspec_flaky_folder_path}all_${report_name}_report.json" + export NEW_FLAKY_RSPEC_REPORT_PATH="${rspec_flaky_folder_path}new_${report_name}_report.json" + export SKIPPED_FLAKY_TESTS_REPORT_PATH="${rspec_flaky_folder_path}skipped_flaky_tests_${report_name}_report.txt" + export RETRIED_TESTS_REPORT_PATH="${rspec_flaky_folder_path}retried_tests_${report_name}_report.txt" if [[ -d "ee/" ]]; then export KNAPSACK_GENERATE_REPORT="true" export FLAKY_RSPEC_GENERATE_REPORT="true" - export SUITE_FLAKY_RSPEC_REPORT_PATH="${FLAKY_RSPEC_SUITE_REPORT_PATH}" - export FLAKY_RSPEC_REPORT_PATH="rspec_flaky/all_${report_name}_report.json" - export NEW_FLAKY_RSPEC_REPORT_PATH="rspec_flaky/new_${report_name}_report.json" - export SKIPPED_FLAKY_TESTS_REPORT_PATH="rspec_flaky/skipped_flaky_tests_${report_name}_report.txt" if [[ ! -f $FLAKY_RSPEC_REPORT_PATH ]]; then echo "{}" > "${FLAKY_RSPEC_REPORT_PATH}" @@ -228,19 +268,44 @@ function rspec_paralellized_job() { fi fi + debug_rspec_variables + mkdir -p tmp/memory_test export MEMORY_TEST_PATH="tmp/memory_test/${report_name}_memory.csv" - local rspec_args="-Ispec -rspec_helper --color --format documentation --format RspecJunitFormatter --out junit_rspec.xml ${rspec_opts}" - if [[ -n $RSPEC_TESTS_MAPPING_ENABLED ]]; then - tooling/bin/parallel_rspec --rspec_args "${rspec_args}" --filter "tmp/matching_tests.txt" + tooling/bin/parallel_rspec --rspec_args "$(rspec_args)" --filter "tmp/matching_tests.txt" || rspec_run_status=$? else - tooling/bin/parallel_rspec --rspec_args "${rspec_args}" + tooling/bin/parallel_rspec --rspec_args "$(rspec_args "${rspec_opts}")" || rspec_run_status=$? fi - date + echoinfo "RSpec exited with ${rspec_run_status}." + + # Experiment to retry failed examples in a new RSpec process: https://gitlab.com/gitlab-org/quality/team-tasks/-/issues/1148 + if [[ $rspec_run_status -ne 0 ]]; then + if [[ "${RETRY_FAILED_TESTS_IN_NEW_PROCESS}" == "true" ]]; then + # Keep track of the tests that are retried, later consolidated in a single file by the `rspec:flaky-tests-report` job + local failed_examples=$(grep " failed" ${RSPEC_LAST_RUN_RESULTS_FILE}) + echo "${CI_JOB_URL}" > "${RETRIED_TESTS_REPORT_PATH}" + echo $failed_examples >> "${RETRIED_TESTS_REPORT_PATH}" + + echoinfo "Retrying the failing examples in a new RSpec proces..." + + install_junit_merge_gem + + # Retry only the tests that failed on first try + rspec_simple_job "--only-failures" "${JUNIT_RETRY_FILE}" + rspec_run_status=$? + + # Merge the JUnit report from retry into the first-try report + junit_merge "${JUNIT_RETRY_FILE}" "${JUNIT_RESULT_FILE}" + fi + else + echosuccess "No examples to retry, congrats!" + fi + + exit $rspec_run_status } function rspec_rerun_previous_failed_tests() { @@ -330,3 +395,30 @@ function generate_frontend_fixtures_mapping() { rspec_simple_job "--pattern \"${pattern}\"" } + +function cleanup_individual_job_reports() { + local rspec_flaky_folder_path="$(dirname "${FLAKY_RSPEC_SUITE_REPORT_PATH}")/" + local knapsack_folder_path="$(dirname "${KNAPSACK_RSPEC_SUITE_REPORT_PATH}")/" + + rm -rf ${knapsack_folder_path}rspec*.json \ + ${rspec_flaky_folder_path}all_*.json \ + ${rspec_flaky_folder_path}new_*.json \ + ${rspec_flaky_folder_path}skipped_flaky_tests_*_report.txt \ + ${rspec_flaky_folder_path}retried_tests_*_report.txt \ + ${RSPEC_LAST_RUN_RESULTS_FILE} \ + ${RSPEC_PROFILING_FOLDER_PATH}/**/* + rmdir ${RSPEC_PROFILING_FOLDER_PATH} || true +} + +function generate_flaky_tests_reports() { + local rspec_flaky_folder_path="$(dirname "${FLAKY_RSPEC_SUITE_REPORT_PATH}")/" + + debug_rspec_variables + + mkdir -p ${rspec_flaky_folder_path} + + find ${rspec_flaky_folder_path} -type f -name 'skipped_flaky_tests_*_report.txt' -exec cat {} + >> "${SKIPPED_FLAKY_TESTS_REPORT_PATH}" + find ${rspec_flaky_folder_path} -type f -name 'retried_tests_*_report.txt' -exec cat {} + >> "${RETRIED_TESTS_REPORT_PATH}" + + cleanup_individual_job_reports +} diff --git a/scripts/utils.sh b/scripts/utils.sh index 15047d35fc3..31afc225019 100644 --- a/scripts/utils.sh +++ b/scripts/utils.sh @@ -64,16 +64,20 @@ function setup_db() { } function install_api_client_dependencies_with_apk() { - apk add --update openssl curl jq + run_timed_command "apk add --update openssl curl jq" } function install_gitlab_gem() { - gem install httparty --no-document --version 0.18.1 - gem install gitlab --no-document --version 4.17.0 + run_timed_command "gem install httparty --no-document --version 0.18.1" + run_timed_command "gem install gitlab --no-document --version 4.17.0" } function install_tff_gem() { - gem install test_file_finder --version 0.1.1 + run_timed_command "gem install test_file_finder --no-document --version 0.1.1" +} + +function install_junit_merge_gem() { + run_timed_command "gem install junit_merge --no-document --version 0.1.2" } function run_timed_command() { diff --git a/spec/factories/container_repositories.rb b/spec/factories/container_repositories.rb index 86bb129067f..ce83e9e8006 100644 --- a/spec/factories/container_repositories.rb +++ b/spec/factories/container_repositories.rb @@ -33,6 +33,52 @@ FactoryBot.define do expiration_policy_cleanup_status { :cleanup_ongoing } end + trait :default do + migration_state { 'default' } + end + + trait :pre_importing do + migration_state { 'pre_importing' } + migration_pre_import_started_at { Time.zone.now } + end + + trait :pre_import_done do + migration_state { 'pre_import_done' } + migration_pre_import_started_at { Time.zone.now } + migration_pre_import_done_at { Time.zone.now } + end + + trait :importing do + migration_state { 'importing' } + migration_pre_import_started_at { Time.zone.now } + migration_pre_import_done_at { Time.zone.now } + migration_import_started_at { Time.zone.now } + end + + trait :import_done do + migration_state { 'import_done' } + migration_pre_import_started_at { Time.zone.now } + migration_pre_import_done_at { Time.zone.now } + migration_import_started_at { Time.zone.now } + migration_import_done_at { Time.zone.now } + end + + trait :import_aborted do + migration_state { 'import_aborted' } + migration_pre_import_started_at { Time.zone.now } + migration_pre_import_done_at { Time.zone.now } + migration_import_started_at { Time.zone.now } + migration_aborted_at { Time.zone.now } + migration_aborted_in_state { 'importing' } + migration_retries_count { 1 } + end + + trait :import_skipped do + migration_state { 'import_skipped' } + migration_skipped_at { Time.zone.now } + migration_skipped_reason { :too_many_tags } + end + after(:build) do |repository, evaluator| next if evaluator.tags.to_a.none? diff --git a/spec/frontend/batch_comments/components/draft_note_spec.js b/spec/frontend/batch_comments/components/draft_note_spec.js index f93daf91c2f..145af3a2d5b 100644 --- a/spec/frontend/batch_comments/components/draft_note_spec.js +++ b/spec/frontend/batch_comments/components/draft_note_spec.js @@ -1,8 +1,10 @@ +import { nextTick } from 'vue'; +import { GlButton } from '@gitlab/ui'; import { getByRole } from '@testing-library/dom'; import { shallowMount } from '@vue/test-utils'; -import { nextTick } from 'vue'; import { stubComponent } from 'helpers/stub_component'; import DraftNote from '~/batch_comments/components/draft_note.vue'; +import PublishButton from '~/batch_comments/components/publish_button.vue'; import { createStore } from '~/batch_comments/stores'; import NoteableNote from '~/notes/components/noteable_note.vue'; import '~/behaviors/markdown/render_gfm'; @@ -28,6 +30,8 @@ describe('Batch comments draft note component', () => { }; const getList = () => getByRole(wrapper.element, 'list'); + const findSubmitReviewButton = () => wrapper.findComponent(PublishButton); + const findAddCommentButton = () => wrapper.findComponent(GlButton); const createComponent = (propsData = { draft }) => { wrapper = shallowMount(DraftNote, { @@ -63,7 +67,7 @@ describe('Batch comments draft note component', () => { describe('add comment now', () => { it('dispatches publishSingleDraft when clicking', () => { createComponent(); - const publishNowButton = wrapper.find({ ref: 'publishNowButton' }); + const publishNowButton = findAddCommentButton(); publishNowButton.vm.$emit('click'); expect(wrapper.vm.$store.dispatch).toHaveBeenCalledWith( @@ -77,10 +81,33 @@ describe('Batch comments draft note component', () => { wrapper.vm.$store.state.batchComments.currentlyPublishingDrafts.push(1); await nextTick(); - const publishNowButton = wrapper.find({ ref: 'publishNowButton' }); + const publishNowButton = findAddCommentButton(); expect(publishNowButton.props().loading).toBe(true); }); + + it('sets as disabled when review is publishing', async () => { + createComponent(); + wrapper.vm.$store.state.batchComments.isPublishing = true; + + await nextTick(); + const publishNowButton = findAddCommentButton(); + + expect(publishNowButton.props().disabled).toBe(true); + expect(publishNowButton.props().loading).toBe(false); + }); + }); + + describe('submit review', () => { + it('sets as disabled when draft is publishing', async () => { + createComponent(); + wrapper.vm.$store.state.batchComments.currentlyPublishingDrafts.push(1); + + await nextTick(); + const publishNowButton = findSubmitReviewButton(); + + expect(publishNowButton.attributes().disabled).toBeTruthy(); + }); }); describe('update', () => { diff --git a/spec/graphql/resolvers/ci/runners_resolver_spec.rb b/spec/graphql/resolvers/ci/runners_resolver_spec.rb index df6490df915..9251fbf24d9 100644 --- a/spec/graphql/resolvers/ci/runners_resolver_spec.rb +++ b/spec/graphql/resolvers/ci/runners_resolver_spec.rb @@ -43,34 +43,99 @@ RSpec.describe Resolvers::Ci::RunnersResolver do # Only thing we can do is to verify that args from the resolver is correctly transformed to params of the Finder and we return the Finder's result back. describe 'Allowed query arguments' do let(:finder) { instance_double(::Ci::RunnersFinder) } - let(:args) do - { - active: true, - status: 'active', - type: :instance_type, - tag_list: ['active_runner'], - search: 'abc', - sort: :contacted_asc - } + + context 'with active filter' do + let(:args) do + { + active: true, + status: 'active', + type: :instance_type, + tag_list: ['active_runner'], + search: 'abc', + sort: :contacted_asc + } + end + + let(:expected_params) do + { + active: true, + status_status: 'active', + type_type: :instance_type, + tag_name: ['active_runner'], + preload: { tag_name: nil }, + search: 'abc', + sort: 'contacted_asc' + } + end + + it 'calls RunnersFinder with expected arguments' do + expect(::Ci::RunnersFinder).to receive(:new).with(current_user: user, params: expected_params).once.and_return(finder) + allow(finder).to receive(:execute).once.and_return([:execute_return_value]) + + expect(subject.items.to_a).to eq([:execute_return_value]) + end + end + + context 'with both active and paused filter' do + let(:args) do + { + active: true, + paused: true + } + end + + let(:expected_params) do + { + active: false, + preload: { tag_name: nil } + } + end + + it 'calls RunnersFinder with expected arguments' do + expect(::Ci::RunnersFinder).to receive(:new).with(current_user: user, params: expected_params).once.and_return(finder) + allow(finder).to receive(:execute).once.and_return([:execute_return_value]) + + expect(subject.items.to_a).to eq([:execute_return_value]) + end end - let(:expected_params) do - { - active: true, - status_status: 'active', - type_type: :instance_type, - tag_name: ['active_runner'], - preload: { tag_name: nil }, - search: 'abc', - sort: 'contacted_asc' - } + context 'with paused filter' do + let(:args) do + { paused: true } + end + + let(:expected_params) do + { + active: false, + preload: { tag_name: nil } + } + end + + it 'calls RunnersFinder with expected arguments' do + expect(::Ci::RunnersFinder).to receive(:new).with(current_user: user, params: expected_params).once.and_return(finder) + allow(finder).to receive(:execute).once.and_return([:execute_return_value]) + + expect(subject.items.to_a).to eq([:execute_return_value]) + end end - it 'calls RunnersFinder with expected arguments' do - allow(::Ci::RunnersFinder).to receive(:new).with(current_user: user, params: expected_params).once.and_return(finder) - allow(finder).to receive(:execute).once.and_return([:execute_return_value]) + context 'with neither paused or active filters' do + let(:args) do + {} + end + + let(:expected_params) do + { + preload: { tag_name: nil } + } + end + + it 'calls RunnersFinder with expected arguments' do + expect(::Ci::RunnersFinder).to receive(:new).with(current_user: user, params: expected_params).once.and_return(finder) + allow(finder).to receive(:execute).once.and_return([:execute_return_value]) - expect(subject.items.to_a).to eq([:execute_return_value]) + expect(subject.items.to_a).to eq([:execute_return_value]) + end end end end diff --git a/spec/lib/container_registry/client_spec.rb b/spec/lib/container_registry/client_spec.rb index 259d7d5ad13..974c3478ddc 100644 --- a/spec/lib/container_registry/client_spec.rb +++ b/spec/lib/container_registry/client_spec.rb @@ -5,29 +5,7 @@ require 'spec_helper' RSpec.describe ContainerRegistry::Client do using RSpec::Parameterized::TableSyntax - let(:token) { '12345' } - let(:options) { { token: token } } - let(:registry_api_url) { 'http://container-registry' } - let(:client) { described_class.new(registry_api_url, options) } - let(:push_blob_headers) do - { - 'Accept' => 'application/vnd.docker.distribution.manifest.v2+json, application/vnd.oci.image.manifest.v1+json', - 'Authorization' => "bearer #{token}", - 'Content-Type' => 'application/octet-stream', - 'User-Agent' => "GitLab/#{Gitlab::VERSION}" - } - end - - let(:headers_with_accept_types) do - { - 'Accept' => 'application/vnd.docker.distribution.manifest.v2+json, application/vnd.oci.image.manifest.v1+json', - 'Authorization' => "bearer #{token}", - 'User-Agent' => "GitLab/#{Gitlab::VERSION}" - } - end - - let(:expected_faraday_headers) { { user_agent: "GitLab/#{Gitlab::VERSION}" } } - let(:expected_faraday_request_options) { Gitlab::HTTP::DEFAULT_TIMEOUT_OPTIONS } + include_context 'container registry client' shared_examples 'handling timeouts' do let(:retry_options) do @@ -48,14 +26,14 @@ RSpec.describe ContainerRegistry::Client do retry_block: -> (_, _, _, _) { actual_retries += 1 } ) - stub_const('ContainerRegistry::Client::RETRY_OPTIONS', retry_options_with_block) + stub_const('ContainerRegistry::BaseClient::RETRY_OPTIONS', retry_options_with_block) expect { subject }.to raise_error(Faraday::ConnectionFailed) expect(actual_retries).to eq(retry_options_with_block[:max]) end it 'logs the error' do - stub_const('ContainerRegistry::Client::RETRY_OPTIONS', retry_options) + stub_const('ContainerRegistry::BaseClient::RETRY_OPTIONS', retry_options) expect(Gitlab::ErrorTracking) .to receive(:log_exception) @@ -63,7 +41,7 @@ RSpec.describe ContainerRegistry::Client do .times .with( an_instance_of(Faraday::ConnectionFailed), - class: described_class.name, + class: ::ContainerRegistry::BaseClient.name, url: URI(url) ) @@ -325,14 +303,14 @@ RSpec.describe ContainerRegistry::Client do subject { client.supports_tag_delete? } where(:registry_tags_support_enabled, :is_on_dot_com, :container_registry_features, :expect_registry_to_be_pinged, :expected_result) do - true | true | [ContainerRegistry::Client::REGISTRY_TAG_DELETE_FEATURE] | false | true - true | false | [ContainerRegistry::Client::REGISTRY_TAG_DELETE_FEATURE] | true | true - true | true | [] | true | true - true | false | [] | true | true - false | true | [ContainerRegistry::Client::REGISTRY_TAG_DELETE_FEATURE] | false | true - false | false | [ContainerRegistry::Client::REGISTRY_TAG_DELETE_FEATURE] | true | false - false | true | [] | true | false - false | false | [] | true | false + true | true | [described_class::REGISTRY_TAG_DELETE_FEATURE] | false | true + true | false | [described_class::REGISTRY_TAG_DELETE_FEATURE] | true | true + true | true | [] | true | true + true | false | [] | true | true + false | true | [described_class::REGISTRY_TAG_DELETE_FEATURE] | false | true + false | false | [described_class::REGISTRY_TAG_DELETE_FEATURE] | true | false + false | true | [] | true | false + false | false | [] | true | false end with_them do @@ -366,38 +344,38 @@ RSpec.describe ContainerRegistry::Client do subject { described_class.supports_tag_delete? } where(:registry_api_url, :registry_enabled, :registry_tags_support_enabled, :is_on_dot_com, :container_registry_features, :expect_registry_to_be_pinged, :expected_result) do - 'http://sandbox.local' | true | true | true | [ContainerRegistry::Client::REGISTRY_TAG_DELETE_FEATURE] | false | true - 'http://sandbox.local' | true | true | false | [ContainerRegistry::Client::REGISTRY_TAG_DELETE_FEATURE] | true | true - 'http://sandbox.local' | true | false | true | [ContainerRegistry::Client::REGISTRY_TAG_DELETE_FEATURE] | false | true - 'http://sandbox.local' | true | false | false | [ContainerRegistry::Client::REGISTRY_TAG_DELETE_FEATURE] | true | false - 'http://sandbox.local' | false | true | true | [ContainerRegistry::Client::REGISTRY_TAG_DELETE_FEATURE] | false | false - 'http://sandbox.local' | false | true | false | [ContainerRegistry::Client::REGISTRY_TAG_DELETE_FEATURE] | false | false - 'http://sandbox.local' | false | false | true | [ContainerRegistry::Client::REGISTRY_TAG_DELETE_FEATURE] | false | false - 'http://sandbox.local' | false | false | false | [ContainerRegistry::Client::REGISTRY_TAG_DELETE_FEATURE] | false | false - 'http://sandbox.local' | true | true | true | [] | true | true - 'http://sandbox.local' | true | true | false | [] | true | true - 'http://sandbox.local' | true | false | true | [] | true | false - 'http://sandbox.local' | true | false | false | [] | true | false - 'http://sandbox.local' | false | true | true | [] | false | false - 'http://sandbox.local' | false | true | false | [] | false | false - 'http://sandbox.local' | false | false | true | [] | false | false - 'http://sandbox.local' | false | false | false | [] | false | false - '' | true | true | true | [ContainerRegistry::Client::REGISTRY_TAG_DELETE_FEATURE] | false | false - '' | true | true | false | [ContainerRegistry::Client::REGISTRY_TAG_DELETE_FEATURE] | false | false - '' | true | false | true | [ContainerRegistry::Client::REGISTRY_TAG_DELETE_FEATURE] | false | false - '' | true | false | false | [ContainerRegistry::Client::REGISTRY_TAG_DELETE_FEATURE] | false | false - '' | false | true | true | [ContainerRegistry::Client::REGISTRY_TAG_DELETE_FEATURE] | false | false - '' | false | true | false | [ContainerRegistry::Client::REGISTRY_TAG_DELETE_FEATURE] | false | false - '' | false | false | true | [ContainerRegistry::Client::REGISTRY_TAG_DELETE_FEATURE] | false | false - '' | false | false | false | [ContainerRegistry::Client::REGISTRY_TAG_DELETE_FEATURE] | false | false - '' | true | true | true | [] | false | false - '' | true | true | false | [] | false | false - '' | true | false | true | [] | false | false - '' | true | false | false | [] | false | false - '' | false | true | true | [] | false | false - '' | false | true | false | [] | false | false - '' | false | false | true | [] | false | false - '' | false | false | false | [] | false | false + 'http://sandbox.local' | true | true | true | [described_class::REGISTRY_TAG_DELETE_FEATURE] | false | true + 'http://sandbox.local' | true | true | false | [described_class::REGISTRY_TAG_DELETE_FEATURE] | true | true + 'http://sandbox.local' | true | false | true | [described_class::REGISTRY_TAG_DELETE_FEATURE] | false | true + 'http://sandbox.local' | true | false | false | [described_class::REGISTRY_TAG_DELETE_FEATURE] | true | false + 'http://sandbox.local' | false | true | true | [described_class::REGISTRY_TAG_DELETE_FEATURE] | false | false + 'http://sandbox.local' | false | true | false | [described_class::REGISTRY_TAG_DELETE_FEATURE] | false | false + 'http://sandbox.local' | false | false | true | [described_class::REGISTRY_TAG_DELETE_FEATURE] | false | false + 'http://sandbox.local' | false | false | false | [described_class::REGISTRY_TAG_DELETE_FEATURE] | false | false + 'http://sandbox.local' | true | true | true | [] | true | true + 'http://sandbox.local' | true | true | false | [] | true | true + 'http://sandbox.local' | true | false | true | [] | true | false + 'http://sandbox.local' | true | false | false | [] | true | false + 'http://sandbox.local' | false | true | true | [] | false | false + 'http://sandbox.local' | false | true | false | [] | false | false + 'http://sandbox.local' | false | false | true | [] | false | false + 'http://sandbox.local' | false | false | false | [] | false | false + '' | true | true | true | [described_class::REGISTRY_TAG_DELETE_FEATURE] | false | false + '' | true | true | false | [described_class::REGISTRY_TAG_DELETE_FEATURE] | false | false + '' | true | false | true | [described_class::REGISTRY_TAG_DELETE_FEATURE] | false | false + '' | true | false | false | [described_class::REGISTRY_TAG_DELETE_FEATURE] | false | false + '' | false | true | true | [described_class::REGISTRY_TAG_DELETE_FEATURE] | false | false + '' | false | true | false | [described_class::REGISTRY_TAG_DELETE_FEATURE] | false | false + '' | false | false | true | [described_class::REGISTRY_TAG_DELETE_FEATURE] | false | false + '' | false | false | false | [described_class::REGISTRY_TAG_DELETE_FEATURE] | false | false + '' | true | true | true | [] | false | false + '' | true | true | false | [] | false | false + '' | true | false | true | [] | false | false + '' | true | false | false | [] | false | false + '' | false | true | true | [] | false | false + '' | false | true | false | [] | false | false + '' | false | false | true | [] | false | false + '' | false | false | false | [] | false | false end with_them do diff --git a/spec/lib/container_registry/gitlab_api_client_spec.rb b/spec/lib/container_registry/gitlab_api_client_spec.rb new file mode 100644 index 00000000000..251e15390b1 --- /dev/null +++ b/spec/lib/container_registry/gitlab_api_client_spec.rb @@ -0,0 +1,178 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe ContainerRegistry::GitlabApiClient do + using RSpec::Parameterized::TableSyntax + + include_context 'container registry client' + + describe '#supports_gitlab_api?' do + subject { client.supports_gitlab_api? } + + where(:registry_gitlab_api_enabled, :is_on_dot_com, :container_registry_features, :expect_registry_to_be_pinged, :expected_result) do + false | true | [described_class::REGISTRY_GITLAB_V1_API_FEATURE] | false | true + true | false | [described_class::REGISTRY_GITLAB_V1_API_FEATURE] | true | true + true | true | [] | true | true + true | false | [] | true | true + false | true | [described_class::REGISTRY_GITLAB_V1_API_FEATURE] | false | true + false | false | [described_class::REGISTRY_GITLAB_V1_API_FEATURE] | true | false + false | true | [] | true | false + false | false | [] | true | false + end + + with_them do + before do + allow(::Gitlab).to receive(:com?).and_return(is_on_dot_com) + stub_registry_gitlab_api_support(registry_gitlab_api_enabled) + stub_application_setting(container_registry_features: container_registry_features) + end + + it 'returns the expected result' do + if expect_registry_to_be_pinged + expect_next_instance_of(Faraday::Connection) do |connection| + expect(connection).to receive(:run_request).and_call_original + end + else + expect(Faraday::Connection).not_to receive(:new) + end + + expect(subject).to be expected_result + end + end + + context 'with 401 response' do + before do + allow(::Gitlab).to receive(:com?).and_return(false) + stub_application_setting(container_registry_features: []) + stub_request(:get, "#{registry_api_url}/gitlab/v1/") + .to_return(status: 401, body: '') + end + + it { is_expected.to be_truthy } + end + end + + describe '#pre_import_repository' do + let(:path) { 'namespace/path/to/repository' } + + subject { client.pre_import_repository('namespace/path/to/repository') } + + where(:status_code, :expected_result) do + 200 | :already_imported + 202 | :ok + 401 | :unauthorized + 404 | :not_found + 409 | :already_being_imported + 418 | :error + 424 | :pre_import_failed + 425 | :already_being_imported + 429 | :too_many_imports + end + + with_them do + before do + stub_pre_import(path, status_code, pre: true) + end + + it { is_expected.to eq(expected_result) } + end + end + + describe '#pre_import_repository' do + let(:path) { 'namespace/path/to/repository' } + + subject { client.import_repository('namespace/path/to/repository') } + + where(:status_code, :expected_result) do + 200 | :already_imported + 202 | :ok + 401 | :unauthorized + 404 | :not_found + 409 | :already_being_imported + 418 | :error + 424 | :pre_import_failed + 425 | :already_being_imported + 429 | :too_many_imports + end + + with_them do + before do + stub_pre_import(path, status_code, pre: false) + end + + it { is_expected.to eq(expected_result) } + end + end + + describe '.supports_gitlab_api?' do + subject { described_class.supports_gitlab_api? } + + where(:registry_gitlab_api_enabled, :is_on_dot_com, :container_registry_features, :expect_registry_to_be_pinged, :expected_result) do + true | true | [described_class::REGISTRY_GITLAB_V1_API_FEATURE] | false | true + true | false | [described_class::REGISTRY_GITLAB_V1_API_FEATURE] | true | true + false | true | [described_class::REGISTRY_GITLAB_V1_API_FEATURE] | false | true + false | false | [described_class::REGISTRY_GITLAB_V1_API_FEATURE] | true | false + true | true | [] | true | true + true | false | [] | true | true + false | true | [] | true | false + false | false | [] | true | false + end + + with_them do + before do + allow(::Gitlab).to receive(:com?).and_return(is_on_dot_com) + stub_container_registry_config(enabled: true, api_url: registry_api_url, key: 'spec/fixtures/x509_certificate_pk.key') + stub_registry_gitlab_api_support(registry_gitlab_api_enabled) + stub_application_setting(container_registry_features: container_registry_features) + end + + it 'returns the expected result' do + if expect_registry_to_be_pinged + expect_next_instance_of(Faraday::Connection) do |connection| + expect(connection).to receive(:run_request).and_call_original + end + else + expect(Faraday::Connection).not_to receive(:new) + end + + expect(subject).to be expected_result + end + end + + context 'with the registry disabled' do + before do + stub_container_registry_config(enabled: false, api_url: 'http://sandbox.local', key: 'spec/fixtures/x509_certificate_pk.key') + end + + it 'returns false' do + expect(Faraday::Connection).not_to receive(:new) + + expect(subject).to be_falsey + end + end + + context 'with a blank registry url' do + before do + stub_container_registry_config(enabled: true, api_url: '', key: 'spec/fixtures/x509_certificate_pk.key') + end + + it 'returns false' do + expect(Faraday::Connection).not_to receive(:new) + + expect(subject).to be_falsey + end + end + end + + def stub_pre_import(path, status_code, pre:) + stub_request(:put, "#{registry_api_url}/gitlab/v1/import/#{path}?pre=#{pre}") + .to_return(status: status_code, body: '') + end + + def stub_registry_gitlab_api_support(supported = true) + status_code = supported ? 200 : 404 + stub_request(:get, "#{registry_api_url}/gitlab/v1/") + .to_return(status: status_code, body: '') + end +end diff --git a/spec/lib/container_registry/migration_spec.rb b/spec/lib/container_registry/migration_spec.rb new file mode 100644 index 00000000000..cc20fc24d33 --- /dev/null +++ b/spec/lib/container_registry/migration_spec.rb @@ -0,0 +1,142 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe ContainerRegistry::Migration do + using RSpec::Parameterized::TableSyntax + + describe '.enabled?' do + subject { described_class.enabled? } + + it { is_expected.to eq(true) } + + context 'feature flag disabled' do + before do + stub_feature_flags(container_registry_migration_phase2_enabled: false) + end + + it { is_expected.to eq(false) } + end + end + + describe '.enqueue_waiting_time' do + subject { described_class.enqueue_waiting_time } + + where(:slow_enabled, :fast_enabled, :expected_result) do + false | false | 1.hour + true | false | 6.hours + false | true | 0 + true | true | 0 + end + + with_them do + before do + stub_feature_flags( + container_registry_migration_phase2_enqueue_speed_slow: slow_enabled, + container_registry_migration_phase2_enqueue_speed_fast: fast_enabled + ) + end + + it { is_expected.to eq(expected_result) } + end + end + + describe '.capacity' do + subject { described_class.capacity } + + where(:ff_1_enabled, :ff_10_enabled, :ff_25_enabled, :expected_result) do + false | false | false | 0 + true | false | false | 1 + true | true | false | 10 + true | true | true | 25 + false | true | false | 10 + false | true | true | 25 + false | false | true | 25 + true | false | true | 25 + end + + with_them do + before do + stub_feature_flags( + container_registry_migration_phase2_capacity_1: ff_1_enabled, + container_registry_migration_phase2_capacity_10: ff_10_enabled, + container_registry_migration_phase2_capacity_25: ff_25_enabled + ) + end + + it { is_expected.to eq(expected_result) } + end + end + + describe '.max_tags_count' do + let(:value) { 1 } + + before do + stub_application_setting(container_registry_import_max_tags_count: value) + end + + it 'returns the matching application_setting' do + expect(described_class.max_tags_count).to eq(value) + end + end + + describe '.max_retries' do + let(:value) { 1 } + + before do + stub_application_setting(container_registry_import_max_retries: value) + end + + it 'returns the matching application_setting' do + expect(described_class.max_retries).to eq(value) + end + end + + describe '.start_max_retries' do + let(:value) { 1 } + + before do + stub_application_setting(container_registry_import_start_max_retries: value) + end + + it 'returns the matching application_setting' do + expect(described_class.start_max_retries).to eq(value) + end + end + + describe '.max_step_duration' do + let(:value) { 5.minutes } + + before do + stub_application_setting(container_registry_import_max_step_duration: value) + end + + it 'returns the matching application_setting' do + expect(described_class.max_step_duration).to eq(value) + end + end + + describe '.target_plan_name' do + let(:value) { 'free' } + + before do + stub_application_setting(container_registry_import_target_plan: value) + end + + it 'returns the matching application_setting' do + expect(described_class.target_plan_name).to eq(value) + end + end + + describe '.created_before' do + let(:value) { 1.day.ago } + + before do + stub_application_setting(container_registry_import_created_before: value) + end + + it 'returns the matching application_setting' do + expect(described_class.created_before).to eq(value) + end + end +end diff --git a/spec/lib/container_registry/registry_spec.rb b/spec/lib/container_registry/registry_spec.rb index d6e2b17f53b..405ef1170ae 100644 --- a/spec/lib/container_registry/registry_spec.rb +++ b/spec/lib/container_registry/registry_spec.rb @@ -27,4 +27,10 @@ RSpec.describe ContainerRegistry::Registry do it { is_expected.to eq(path) } end end + + describe '#gitlab_api_client' do + subject { registry.gitlab_api_client } + + it { is_expected.to be_instance_of(ContainerRegistry::GitlabApiClient) } + end end diff --git a/spec/lib/gitlab/database/no_cross_db_foreign_keys_spec.rb b/spec/lib/gitlab/database/no_cross_db_foreign_keys_spec.rb index 112ff2c7380..3a8c14fc2f6 100644 --- a/spec/lib/gitlab/database/no_cross_db_foreign_keys_spec.rb +++ b/spec/lib/gitlab/database/no_cross_db_foreign_keys_spec.rb @@ -19,7 +19,6 @@ RSpec.describe 'cross-database foreign keys' do ci_freeze_periods.project_id ci_job_artifacts.project_id ci_job_token_project_scope_links.added_by_id - ci_job_token_project_scope_links.source_project_id ci_job_token_project_scope_links.target_project_id ci_pending_builds.namespace_id ci_pending_builds.project_id @@ -33,11 +32,9 @@ RSpec.describe 'cross-database foreign keys' do ci_running_builds.project_id ci_sources_projects.source_project_id ci_stages.project_id - ci_subscriptions_projects.downstream_project_id ci_subscriptions_projects.upstream_project_id ci_unit_tests.project_id dast_site_profiles_pipelines.ci_pipeline_id - external_pull_requests.project_id vulnerability_feedback.pipeline_id ).freeze end diff --git a/spec/lib/gitlab/import_export/saver_spec.rb b/spec/lib/gitlab/import_export/saver_spec.rb index 877474dd862..90accfaf3a9 100644 --- a/spec/lib/gitlab/import_export/saver_spec.rb +++ b/spec/lib/gitlab/import_export/saver_spec.rb @@ -36,6 +36,32 @@ RSpec.describe Gitlab::ImportExport::Saver do .to match(%r[\/uploads\/-\/system\/import_export_upload\/export_file.*]) end + it 'logs metrics after saving' do + stub_uploads_object_storage(ImportExportUploader) + expect(Gitlab::Export::Logger).to receive(:info).with( + hash_including( + message: 'Export archive saved', + exportable_class: 'Project', + 'correlation_id' => anything, + archive_file: anything, + compress_duration_s: anything + )).and_call_original + + expect(Gitlab::Export::Logger).to receive(:info).with( + hash_including( + message: 'Export archive uploaded', + exportable_class: 'Project', + 'correlation_id' => anything, + archive_file: anything, + compress_duration_s: anything, + assign_duration_s: anything, + upload_duration_s: anything, + upload_bytes: anything + )).and_call_original + + subject.save + end + it 'removes archive path and keeps base path untouched' do allow(shared).to receive(:archive_path).and_return(archive_path) @@ -45,4 +71,22 @@ RSpec.describe Gitlab::ImportExport::Saver do expect(FileUtils).to have_received(:rm_rf).with(archive_path) expect(Dir.exist?(archive_path)).to eq(false) end + + context 'when save throws an exception' do + before do + expect(subject).to receive(:save_upload).and_raise(SocketError.new) + end + + it 'logs a saver error' do + allow(Gitlab::Export::Logger).to receive(:info).with(anything).and_call_original + expect(Gitlab::Export::Logger).to receive(:info).with( + hash_including( + message: 'Export archive saver failed', + exportable_class: 'Project', + 'correlation_id' => anything + )).and_call_original + + subject.save + end + end end diff --git a/spec/mailers/notify_spec.rb b/spec/mailers/notify_spec.rb index 0fbdc09a206..978118ed1b1 100644 --- a/spec/mailers/notify_spec.rb +++ b/spec/mailers/notify_spec.rb @@ -714,7 +714,7 @@ RSpec.describe Notify do describe 'project access requested' do let(:project) do create(:project, :public) do |project| - project.add_maintainer(project.owner) + project.add_maintainer(project.first_owner) end end diff --git a/spec/models/ability_spec.rb b/spec/models/ability_spec.rb index bb8d476f257..5bd69ad9fad 100644 --- a/spec/models/ability_spec.rb +++ b/spec/models/ability_spec.rb @@ -394,7 +394,7 @@ RSpec.describe Ability do describe '.project_disabled_features_rules' do let(:project) { create(:project, :wiki_disabled) } - subject { described_class.policy_for(project.owner, project) } + subject { described_class.policy_for(project.first_owner, project) } context 'wiki named abilities' do it 'disables wiki abilities if the project has no wiki' do diff --git a/spec/models/ci/build_trace_chunk_spec.rb b/spec/models/ci/build_trace_chunk_spec.rb index 31c7c7a44bc..e08fe196d65 100644 --- a/spec/models/ci/build_trace_chunk_spec.rb +++ b/spec/models/ci/build_trace_chunk_spec.rb @@ -851,7 +851,7 @@ RSpec.describe Ci::BuildTraceChunk, :clean_gitlab_redis_shared_state, :clean_git context 'when project is destroyed' do let(:subject) do - Projects::DestroyService.new(project, project.owner).execute + Projects::DestroyService.new(project, project.first_owner).execute end it_behaves_like 'deletes all build_trace_chunk and data in redis' diff --git a/spec/models/ci/job_token/project_scope_link_spec.rb b/spec/models/ci/job_token/project_scope_link_spec.rb index 8d7bb44bd16..8494446476f 100644 --- a/spec/models/ci/job_token/project_scope_link_spec.rb +++ b/spec/models/ci/job_token/project_scope_link_spec.rb @@ -88,4 +88,11 @@ RSpec.describe Ci::JobToken::ProjectScopeLink do it { is_expected.to be_nil } end end + + context 'loose foreign key on ci_job_token_project_scope_links.source_project_id' do + it_behaves_like 'cleanup by a loose foreign key' do + let!(:parent) { create(:project) } + let!(:model) { create(:ci_job_token_project_scope_link, source_project: parent) } + end + end end diff --git a/spec/models/ci/pipeline_spec.rb b/spec/models/ci/pipeline_spec.rb index 8c0f843fdea..9ca4092bba7 100644 --- a/spec/models/ci/pipeline_spec.rb +++ b/spec/models/ci/pipeline_spec.rb @@ -3427,7 +3427,7 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep do create(:ci_pipeline, project: project, sha: project.commit('master').sha, - user: project.owner) + user: project.first_owner) end before do @@ -4502,7 +4502,7 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep do describe '#reset_source_bridge!' do let(:pipeline) { create(:ci_pipeline, :created, project: project) } - subject(:reset_bridge) { pipeline.reset_source_bridge!(project.owner) } + subject(:reset_bridge) { pipeline.reset_source_bridge!(project.first_owner) } context 'when the pipeline is a child pipeline and the bridge is depended' do let!(:parent_pipeline) { create(:ci_pipeline) } diff --git a/spec/models/ci/trigger_spec.rb b/spec/models/ci/trigger_spec.rb index c9ae8519c63..4ac8720780c 100644 --- a/spec/models/ci/trigger_spec.rb +++ b/spec/models/ci/trigger_spec.rb @@ -59,7 +59,7 @@ RSpec.describe Ci::Trigger do end it_behaves_like 'includes Limitable concern' do - subject { build(:ci_trigger, owner: project.owner, project: project) } + subject { build(:ci_trigger, owner: project.first_owner, project: project) } end context 'loose foreign key on ci_triggers.owner_id' do diff --git a/spec/models/commit_spec.rb b/spec/models/commit_spec.rb index 2176eea75bc..4d3a2fac0fc 100644 --- a/spec/models/commit_spec.rb +++ b/spec/models/commit_spec.rb @@ -486,7 +486,7 @@ eos it 'uses the CachedMarkdownField cache instead of the Mentionable cache', :use_clean_rails_redis_caching do expect(commit.title_html).not_to be_present - commit.all_references(project.owner).all + commit.all_references(project.first_owner).all expect(commit.title_html).to be_present expect(Rails.cache.read("banzai/commit:#{commit.id}/safe_message/single_line")).to be_nil diff --git a/spec/models/container_repository_spec.rb b/spec/models/container_repository_spec.rb index 6cfd7def013..27281060935 100644 --- a/spec/models/container_repository_spec.rb +++ b/spec/models/container_repository_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe ContainerRepository do +RSpec.describe ContainerRepository, :aggregate_failures do using RSpec::Parameterized::TableSyntax let(:group) { create(:group, name: 'group') } @@ -36,7 +36,282 @@ RSpec.describe ContainerRepository do describe 'validations' do it { is_expected.to validate_presence_of(:migration_retries_count) } it { is_expected.to validate_numericality_of(:migration_retries_count).is_greater_than_or_equal_to(0) } - it { is_expected.to validate_presence_of(:migration_state) } + + it { is_expected.to validate_inclusion_of(:migration_aborted_in_state).in_array(ContainerRepository::ACTIVE_MIGRATION_STATES) } + it { is_expected.to allow_value(nil).for(:migration_aborted_in_state) } + + context 'migration_state' do + it { is_expected.to validate_presence_of(:migration_state) } + it { is_expected.to validate_inclusion_of(:migration_state).in_array(ContainerRepository::MIGRATION_STATES) } + + describe 'pre_importing' do + it 'validates expected attributes' do + expect(build(:container_repository, migration_state: 'pre_importing')).to be_invalid + expect(build(:container_repository, :pre_importing)).to be_valid + end + end + + describe 'pre_import_done' do + it 'validates expected attributes' do + expect(build(:container_repository, migration_state: 'pre_import_done')).to be_invalid + expect(build(:container_repository, :pre_import_done)).to be_valid + end + end + + describe 'importing' do + it 'validates expected attributes' do + expect(build(:container_repository, migration_state: 'importing')).to be_invalid + expect(build(:container_repository, :importing)).to be_valid + end + end + + describe 'import_skipped' do + it 'validates expected attributes' do + expect(build(:container_repository, migration_state: 'import_skipped')).to be_invalid + expect(build(:container_repository, :import_skipped)).to be_valid + end + end + + describe 'import_aborted' do + it 'validates expected attributes' do + expect(build(:container_repository, migration_state: 'import_aborted')).to be_invalid + expect(build(:container_repository, :import_aborted)).to be_valid + end + end + end + end + + context ':migration_state state_machine' do + shared_examples 'no action when feature flag is disabled' do + context 'feature flag disabled' do + before do + stub_feature_flags(container_registry_migration_phase2_enabled: false) + end + + it { is_expected.to eq(false) } + end + end + + shared_examples 'transitioning to pre_importing', skip_pre_import_success: true do + before do + repository.update_column(:migration_pre_import_done_at, Time.zone.now) + end + + it_behaves_like 'no action when feature flag is disabled' + + context 'successful pre_import request' do + it 'sets migration_pre_import_started_at and resets migration_pre_import_done_at' do + expect(repository).to receive(:migration_pre_import).and_return(:ok) + + expect { subject }.to change { repository.reload.migration_pre_import_started_at } + .and change { repository.migration_pre_import_done_at }.to(nil) + + expect(repository).to be_pre_importing + end + end + + context 'failed pre_import request' do + it 'sets migration_pre_import_started_at and resets migration_pre_import_done_at' do + expect(repository).to receive(:migration_pre_import).and_return(:error) + + expect { subject }.to change { repository.reload.migration_pre_import_started_at } + .and change { repository.migration_aborted_at } + .and change { repository.migration_pre_import_done_at }.to(nil) + + expect(repository.migration_aborted_in_state).to eq('pre_importing') + expect(repository).to be_import_aborted + end + end + end + + shared_examples 'transitioning to importing', skip_import_success: true do + before do + repository.update_columns(migration_import_done_at: Time.zone.now) + end + + context 'successful import request' do + it 'sets migration_import_started_at and resets migration_import_done_at' do + expect(repository).to receive(:migration_import).and_return(:ok) + + expect { subject }.to change { repository.reload.migration_import_started_at } + .and change { repository.migration_import_done_at }.to(nil) + + expect(repository).to be_importing + end + end + + context 'failed import request' do + it 'sets migration_import_started_at and resets migration_import_done_at' do + expect(repository).to receive(:migration_import).and_return(:error) + + expect { subject }.to change { repository.reload.migration_import_started_at } + .and change { repository.migration_aborted_at } + + expect(repository.migration_aborted_in_state).to eq('importing') + expect(repository).to be_import_aborted + end + end + end + + shared_examples 'transitioning out of import_aborted' do + it 'resets migration_aborted_at and migration_aborted_in_state' do + expect { subject }.to change { repository.reload.migration_aborted_in_state }.to(nil) + .and change { repository.migration_aborted_at }.to(nil) + end + end + + shared_examples 'transitioning from allowed states' do |allowed_states| + ContainerRepository::MIGRATION_STATES.each do |state| + result = allowed_states.include?(state) + + context "when transitioning from #{state}" do + let(:repository) { create(:container_repository, state.to_sym) } + + it "returns #{result}" do + expect(subject).to eq(result) + end + end + end + end + + describe '#start_pre_import' do + let_it_be_with_reload(:repository) { create(:container_repository) } + + subject { repository.start_pre_import } + + before do |example| + unless example.metadata[:skip_pre_import_success] + allow(repository).to receive(:migration_pre_import).and_return(:ok) + end + end + + it_behaves_like 'transitioning from allowed states', %w[default] + it_behaves_like 'transitioning to pre_importing' + end + + describe '#retry_pre_import' do + let_it_be_with_reload(:repository) { create(:container_repository, :import_aborted) } + + subject { repository.retry_pre_import } + + before do |example| + unless example.metadata[:skip_pre_import_success] + allow(repository).to receive(:migration_pre_import).and_return(:ok) + end + end + + it_behaves_like 'transitioning from allowed states', %w[import_aborted] + it_behaves_like 'transitioning to pre_importing' + it_behaves_like 'transitioning out of import_aborted' + end + + describe '#finish_pre_import' do + let_it_be_with_reload(:repository) { create(:container_repository, :pre_importing) } + + subject { repository.finish_pre_import } + + it_behaves_like 'transitioning from allowed states', %w[pre_importing] + + it 'sets migration_pre_import_done_at' do + expect { subject }.to change { repository.reload.migration_pre_import_done_at } + + expect(repository).to be_pre_import_done + end + end + + describe '#start_import' do + let_it_be_with_reload(:repository) { create(:container_repository, :pre_import_done) } + + subject { repository.start_import } + + before do |example| + unless example.metadata[:skip_import_success] + allow(repository).to receive(:migration_import).and_return(:ok) + end + end + + it_behaves_like 'transitioning from allowed states', %w[pre_import_done] + it_behaves_like 'transitioning to importing' + end + + describe '#retry_import' do + let_it_be_with_reload(:repository) { create(:container_repository, :import_aborted) } + + subject { repository.retry_import } + + before do |example| + unless example.metadata[:skip_import_success] + allow(repository).to receive(:migration_import).and_return(:ok) + end + end + + it_behaves_like 'transitioning from allowed states', %w[import_aborted] + it_behaves_like 'transitioning to importing' + it_behaves_like 'no action when feature flag is disabled' + end + + describe '#finish_import' do + let_it_be_with_reload(:repository) { create(:container_repository, :importing) } + + subject { repository.finish_import } + + it_behaves_like 'transitioning from allowed states', %w[importing] + + it 'sets migration_import_done_at' do + expect { subject }.to change { repository.reload.migration_import_done_at } + + expect(repository).to be_import_done + end + end + + describe '#already_migrated' do + let_it_be_with_reload(:repository) { create(:container_repository) } + + subject { repository.already_migrated } + + it_behaves_like 'transitioning from allowed states', %w[default] + + it 'sets migration_import_done_at' do + subject + + expect(repository).to be_import_done + end + end + + describe '#abort_import' do + let_it_be_with_reload(:repository) { create(:container_repository, :importing) } + + subject { repository.abort_import } + + it_behaves_like 'transitioning from allowed states', %w[pre_importing importing] + + it 'sets migration_aborted_at and migration_aborted_at and increments the retry count' do + expect { subject }.to change { repository.migration_aborted_at } + .and change { repository.reload.migration_retries_count }.by(1) + + expect(repository.migration_aborted_in_state).to eq('importing') + expect(repository).to be_import_aborted + end + end + + describe '#skip_import' do + let_it_be_with_reload(:repository) { create(:container_repository) } + + subject { repository.skip_import(reason: :too_many_retries) } + + it_behaves_like 'transitioning from allowed states', %w[default pre_importing importing] + + it 'sets migration_skipped_at and migration_skipped_reason' do + expect { subject }.to change { repository.reload.migration_skipped_at } + + expect(repository.migration_skipped_reason).to eq('too_many_retries') + expect(repository).to be_import_skipped + end + + it 'raises and error if a reason is not given' do + expect { repository.skip_import }.to raise_error(ArgumentError) + end + end end describe '#tag' do @@ -209,6 +484,46 @@ RSpec.describe ContainerRepository do end end + context 'registry migration' do + shared_examples 'handling the migration step' do |step| + let(:client_response) { :foobar } + + before do + allow(repository.gitlab_api_client).to receive(:supports_gitlab_api?).and_return(true) + end + + it 'returns the same response as the client' do + expect(repository.gitlab_api_client) + .to receive(step).with(repository.path).and_return(client_response) + expect(subject).to eq(client_response) + end + + context 'when the gitlab_api feature is not supported' do + before do + allow(repository.gitlab_api_client).to receive(:supports_gitlab_api?).and_return(false) + end + + it 'returns :error' do + expect(repository.gitlab_api_client).not_to receive(step) + + expect(subject).to eq(:error) + end + end + end + + describe '#migration_pre_import' do + subject { repository.migration_pre_import } + + it_behaves_like 'handling the migration step', :pre_import_repository + end + + describe '#migration_import' do + subject { repository.migration_import } + + it_behaves_like 'handling the migration step', :import_repository + end + end + describe '.build_from_path' do let(:registry_path) do ContainerRegistry::Path.new(project.full_path + '/some/image') @@ -505,20 +820,14 @@ RSpec.describe ContainerRepository do end describe '#migration_importing?' do - let_it_be_with_reload(:container_repository) { create(:container_repository, migration_state: 'importing')} - subject { container_repository.migration_importing? } - it { is_expected.to eq(true) } + ContainerRepository::MIGRATION_STATES.each do |state| + context "when in #{state} migration_state" do + let(:container_repository) { create(:container_repository, state.to_sym)} - context 'when not importing' do - before do - # Technical debt: when the state machine is added, we should iterate through all possible states - # https://gitlab.com/gitlab-org/gitlab/-/merge_requests/78499 - container_repository.update_column(:migration_state, 'default') + it { is_expected.to eq(state == 'importing') } end - - it { is_expected.to eq(false) } end end diff --git a/spec/models/environment_status_spec.rb b/spec/models/environment_status_spec.rb index 1b9b38a0932..1db1171401c 100644 --- a/spec/models/environment_status_spec.rb +++ b/spec/models/environment_status_spec.rb @@ -161,7 +161,7 @@ RSpec.describe EnvironmentStatus do let!(:build) { create(:ci_build, :with_deployment, :deploy_to_production, pipeline: pipeline) } let(:environment) { build.deployment.environment } - let(:user) { project.owner } + let(:user) { project.first_owner } context 'when environment is created on a forked project', :sidekiq_inline do let(:project) { create(:project, :repository) } diff --git a/spec/models/event_spec.rb b/spec/models/event_spec.rb index 9119ef83034..f099015e63e 100644 --- a/spec/models/event_spec.rb +++ b/spec/models/event_spec.rb @@ -25,7 +25,7 @@ RSpec.describe Event do expect(instance).to receive(:reset_project_activity) end - create_push_event(project, project.owner) + create_push_event(project, project.first_owner) end end @@ -34,7 +34,7 @@ RSpec.describe Event do it 'updates the project last_repository_updated_at and updated_at' do project.touch(:last_repository_updated_at, time: 1.year.ago) # rubocop: disable Rails/SkipsModelValidations - event = create_push_event(project, project.owner) + event = create_push_event(project, project.first_owner) project.reload @@ -47,7 +47,7 @@ RSpec.describe Event do it 'does not update the project last_repository_updated_at' do project.update!(last_repository_updated_at: 1.year.ago) - create(:closed_issue_event, project: project, author: project.owner) + create(:closed_issue_event, project: project, author: project.first_owner) project.reload @@ -63,14 +63,14 @@ RSpec.describe Event do project.reload # a reload removes fractions of seconds expect do - create_push_event(project, project.owner) + create_push_event(project, project.first_owner) project.reload end.not_to change { project.last_repository_updated_at } end end describe 'after_create UserInteractedProject.track' do - let(:event) { build(:push_event, project: project, author: project.owner) } + let(:event) { build(:push_event, project: project, author: project.first_owner) } it 'passes event to UserInteractedProject.track' do expect(UserInteractedProject).to receive(:track).with(event) @@ -157,7 +157,7 @@ RSpec.describe Event do describe "Push event" do let(:project) { create(:project, :private) } - let(:user) { project.owner } + let(:user) { project.first_owner } let(:event) { create_push_event(project, user) } it do @@ -173,7 +173,7 @@ RSpec.describe Event do describe '#target_title' do let_it_be(:project) { create(:project) } - let(:author) { project.owner } + let(:author) { project.first_owner } let(:target) { nil } let(:event) do @@ -746,7 +746,7 @@ RSpec.describe Event do target = kind == :project ? nil : build(kind, **extra_data) - [kind, build(:event, :created, author: project.owner, project: project, target: target)] + [kind, build(:event, :created, author: project.first_owner, project: project, target: target)] end end @@ -830,7 +830,7 @@ RSpec.describe Event do expect(project).not_to receive(:update_column) .with(:last_activity_at, a_kind_of(Time)) - create_push_event(project, project.owner) + create_push_event(project, project.first_owner) end end @@ -838,7 +838,7 @@ RSpec.describe Event do it 'updates the project' do project.touch(:last_activity_at, time: 1.year.ago) # rubocop: disable Rails/SkipsModelValidations - event = create_push_event(project, project.owner) + event = create_push_event(project, project.first_owner) project.reload diff --git a/spec/models/external_pull_request_spec.rb b/spec/models/external_pull_request_spec.rb index b141600c4fd..82da7cdf34b 100644 --- a/spec/models/external_pull_request_spec.rb +++ b/spec/models/external_pull_request_spec.rb @@ -236,4 +236,11 @@ RSpec.describe ExternalPullRequest do it_behaves_like 'it has loose foreign keys' do let(:factory_name) { :external_pull_request } end + + context 'loose foreign key on external_pull_requests.project_id' do + it_behaves_like 'cleanup by a loose foreign key' do + let!(:parent) { create(:project) } + let!(:model) { create(:external_pull_request, project: parent) } + end + end end diff --git a/spec/models/group_spec.rb b/spec/models/group_spec.rb index 05ee2166245..4bc4df02c24 100644 --- a/spec/models/group_spec.rb +++ b/spec/models/group_spec.rb @@ -1249,7 +1249,7 @@ RSpec.describe Group do let(:common_id) { [Project.maximum(:id).to_i, Namespace.maximum(:id).to_i].max + 999 } let!(:group) { create(:group, id: common_id) } let!(:unrelated_project) { create(:project, id: common_id) } - let(:user) { unrelated_project.owner } + let(:user) { unrelated_project.first_owner } it 'returns correct access level' do expect(shared_group_parent.max_member_access_for_user(user)).to eq(Gitlab::Access::NO_ACCESS) diff --git a/spec/models/issue_collection_spec.rb b/spec/models/issue_collection_spec.rb index d67bd8debce..183082bab26 100644 --- a/spec/models/issue_collection_spec.rb +++ b/spec/models/issue_collection_spec.rb @@ -50,7 +50,9 @@ RSpec.describe IssueCollection do end end - context 'using a user that is the owner of a project' do + # TODO update when we have multiple owners of a project + # https://gitlab.com/gitlab-org/gitlab/-/issues/350605 + context 'using a user that is an owner of a project' do it 'returns the issues of the project' do expect(collection.updatable_by_user(project.namespace.owner)) .to eq([issue1, issue2]) diff --git a/spec/models/issue_spec.rb b/spec/models/issue_spec.rb index b91b299467d..5af42cc67ea 100644 --- a/spec/models/issue_spec.rb +++ b/spec/models/issue_spec.rb @@ -887,6 +887,8 @@ RSpec.describe Issue do end end + # TODO update when we have multiple owners of a project + # https://gitlab.com/gitlab-org/gitlab/-/issues/350605 context 'with an owner' do before do project.add_maintainer(user) diff --git a/spec/models/note_spec.rb b/spec/models/note_spec.rb index 9d9cca0678a..f7d32dc1a7b 100644 --- a/spec/models/note_spec.rb +++ b/spec/models/note_spec.rb @@ -617,7 +617,7 @@ RSpec.describe Note do let(:note) do create :note, noteable: ext_issue, project: ext_proj, - note: "mentioned in #{ext_proj.owner.to_reference}", + note: "mentioned in #{ext_proj.first_owner.to_reference}", system: true end diff --git a/spec/models/project_spec.rb b/spec/models/project_spec.rb index 0d1ab22c520..d78d0a56f10 100644 --- a/spec/models/project_spec.rb +++ b/spec/models/project_spec.rb @@ -871,6 +871,8 @@ RSpec.describe Project, factory_default: :keep do end describe 'reference methods' do + # TODO update when we have multiple owners of a project + # https://gitlab.com/gitlab-org/gitlab/-/issues/350605 let_it_be(:owner) { create(:user, name: 'Gitlab') } let_it_be(:namespace) { create(:namespace, name: 'Sample namespace', path: 'sample-namespace', owner: owner) } let_it_be(:project) { create(:project, name: 'Sample project', path: 'sample-project', namespace: namespace) } @@ -2874,7 +2876,7 @@ RSpec.describe Project, factory_default: :keep do end before do - project.repository.rm_branch(project.owner, branch.name) + project.repository.rm_branch(project.first_owner, branch.name) end subject { project.latest_pipeline(branch.name) } diff --git a/spec/models/project_team_spec.rb b/spec/models/project_team_spec.rb index d5af2d80906..bfdebbc33df 100644 --- a/spec/models/project_team_spec.rb +++ b/spec/models/project_team_spec.rb @@ -80,7 +80,7 @@ RSpec.describe ProjectTeam do describe 'owner methods' do context 'personal project' do let(:project) { create(:project) } - let(:owner) { project.owner } + let(:owner) { project.first_owner } specify { expect(project.team.owners).to contain_exactly(owner) } specify { expect(project.team.owner?(owner)).to be_truthy } @@ -108,10 +108,12 @@ RSpec.describe ProjectTeam do let(:project) { create(:project) } it 'returns project members' do + # TODO this can be updated when we have multiple project owners + # See https://gitlab.com/gitlab-org/gitlab/-/issues/350605 user = create(:user) project.add_guest(user) - expect(project.team.members).to contain_exactly(user, project.owner) + expect(project.team.members).to contain_exactly(user, project.first_owner) end it 'returns project members of a specified level' do @@ -129,7 +131,7 @@ RSpec.describe ProjectTeam do group_access: Gitlab::Access::GUEST) expect(project.team.members) - .to contain_exactly(group_member.user, project.owner) + .to contain_exactly(group_member.user, project.first_owner) end it 'returns invited members of a group of a specified level' do diff --git a/spec/policies/ci/pipeline_policy_spec.rb b/spec/policies/ci/pipeline_policy_spec.rb index 9a65823c950..b68bb966820 100644 --- a/spec/policies/ci/pipeline_policy_spec.rb +++ b/spec/policies/ci/pipeline_policy_spec.rb @@ -89,7 +89,7 @@ RSpec.describe Ci::PipelinePolicy, :models do let(:project) { create(:project, :public) } context 'when user has owner access' do - let(:user) { project.owner } + let(:user) { project.first_owner } it 'is enabled' do expect(policy).to be_allowed :destroy_pipeline @@ -107,7 +107,7 @@ RSpec.describe Ci::PipelinePolicy, :models do let(:project) { create(:project, :public) } context 'when user has owner access' do - let(:user) { project.owner } + let(:user) { project.first_owner } it 'is enabled' do expect(policy).to be_allowed :read_pipeline_variable @@ -129,7 +129,7 @@ RSpec.describe Ci::PipelinePolicy, :models do end context 'when user is developer and it is not the creator of the pipeline' do - let(:pipeline) { create(:ci_empty_pipeline, project: project, user: project.owner) } + let(:pipeline) { create(:ci_empty_pipeline, project: project, user: project.first_owner) } before do project.add_developer(user) diff --git a/spec/policies/namespaces/project_namespace_policy_spec.rb b/spec/policies/namespaces/project_namespace_policy_spec.rb index f6fe4ae552a..f1022747fab 100644 --- a/spec/policies/namespaces/project_namespace_policy_spec.rb +++ b/spec/policies/namespaces/project_namespace_policy_spec.rb @@ -28,7 +28,7 @@ RSpec.describe Namespaces::ProjectNamespacePolicy do end context 'parent owner' do - let_it_be(:current_user) { parent.owner } + let_it_be(:current_user) { parent.first_owner } it { is_expected.to be_disallowed(*permissions) } end diff --git a/spec/policies/project_member_policy_spec.rb b/spec/policies/project_member_policy_spec.rb index aebbe685bb3..12b3e60fdb2 100644 --- a/spec/policies/project_member_policy_spec.rb +++ b/spec/policies/project_member_policy_spec.rb @@ -24,7 +24,7 @@ RSpec.describe ProjectMemberPolicy do end context 'when user is project owner' do - let(:member_user) { project.owner } + let(:member_user) { project.first_owner } let(:member) { project.members.find_by!(user: member_user) } it { is_expected.to be_allowed(:read_project) } diff --git a/spec/policies/project_policy_spec.rb b/spec/policies/project_policy_spec.rb index 38e4e18c894..793b1fffd5f 100644 --- a/spec/policies/project_policy_spec.rb +++ b/spec/policies/project_policy_spec.rb @@ -117,7 +117,7 @@ RSpec.describe ProjectPolicy do end describe 'for unconfirmed user' do - let(:current_user) { project.owner.tap { |u| u.update!(confirmed_at: nil) } } + let(:current_user) { project.first_owner.tap { |u| u.update!(confirmed_at: nil) } } it 'disallows to modify pipelines' do expect_disallowed(:create_pipeline) @@ -144,7 +144,7 @@ RSpec.describe ProjectPolicy do end describe 'for project owner' do - let(:current_user) { project.owner } + let(:current_user) { project.first_owner } it 'allows :destroy_pipeline' do expect(current_user.can?(:destroy_pipeline, pipeline)).to be_truthy diff --git a/spec/rubocop/cop/file_decompression_spec.rb b/spec/rubocop/cop/file_decompression_spec.rb new file mode 100644 index 00000000000..7be1a784001 --- /dev/null +++ b/spec/rubocop/cop/file_decompression_spec.rb @@ -0,0 +1,48 @@ +# frozen_string_literal: true + +require 'fast_spec_helper' +require_relative '../../../rubocop/cop/file_decompression' + +RSpec.describe RuboCop::Cop::FileDecompression do + subject(:cop) { described_class.new } + + it 'does not flag when using a system command not related to file decompression' do + expect_no_offenses('system("ls")') + end + + described_class::FORBIDDEN_COMMANDS.map { [_1, '^' * _1.length] }.each do |cmd, len| + it "flags the when using '#{cmd}' system command" do + expect_offense(<<~SOURCE) + system('#{cmd}') + ^^^^^^^^#{len}^^ While extracting files check for symlink to avoid arbitrary file reading[...] + SOURCE + + expect_offense(<<~SOURCE) + exec('#{cmd}') + ^^^^^^#{len}^^ While extracting files check for symlink to avoid arbitrary file reading[...] + SOURCE + + expect_offense(<<~SOURCE) + Kernel.spawn('#{cmd}') + ^^^^^^^^^^^^^^#{len}^^ While extracting files check for symlink to avoid arbitrary file reading[...] + SOURCE + + expect_offense(<<~SOURCE) + IO.popen('#{cmd}') + ^^^^^^^^^^#{len}^^ While extracting files check for symlink to avoid arbitrary file reading[...] + SOURCE + end + + it "flags the when using '#{cmd}' subshell command" do + expect_offense(<<~SOURCE) + `#{cmd}` + ^#{len}^ While extracting files check for symlink to avoid arbitrary file reading[...] + SOURCE + + expect_offense(<<~SOURCE) + %x(#{cmd}) + ^^^#{len}^ While extracting files check for symlink to avoid arbitrary file reading[...] + SOURCE + end + end +end diff --git a/spec/services/service_ping/submit_service_ping_service_spec.rb b/spec/services/service_ping/submit_service_ping_service_spec.rb index 2971c9a9309..b5c28e0346a 100644 --- a/spec/services/service_ping/submit_service_ping_service_spec.rb +++ b/spec/services/service_ping/submit_service_ping_service_spec.rb @@ -63,7 +63,7 @@ RSpec.describe ServicePing::SubmitService do shared_examples 'does not send a blank usage ping payload' do it do - expect(Gitlab::HTTP).not_to receive(:post) + expect(Gitlab::HTTP).not_to receive(:post).with(subject.url, any_args) expect { subject.execute }.to raise_error(described_class::SubmissionError) do |error| expect(error.message).to include('Usage data is blank') @@ -129,6 +129,7 @@ RSpec.describe ServicePing::SubmitService do stub_usage_data_connections stub_database_flavor_check stub_application_setting(usage_ping_enabled: true) + stub_response(body: nil, url: subject.error_url, status: 201) end context 'and user requires usage stats consent' do @@ -277,7 +278,8 @@ RSpec.describe ServicePing::SubmitService do context 'if payload service fails' do before do stub_response(body: with_dev_ops_score_params) - allow(ServicePing::BuildPayloadService).to receive(:execute).and_raise(described_class::SubmissionError, 'SubmissionError') + allow(ServicePing::BuildPayloadService).to receive_message_chain(:new, :execute) + .and_raise(described_class::SubmissionError, 'SubmissionError') end it 'calls UsageData .data method' do @@ -287,6 +289,15 @@ RSpec.describe ServicePing::SubmitService do subject.execute end + + it 'submits error' do + expect(Gitlab::HTTP).to receive(:post).with(subject.url, any_args) + .and_call_original + expect(Gitlab::HTTP).to receive(:post).with(subject.error_url, any_args) + .and_call_original + + subject.execute + end end context 'calls BuildPayloadService first' do diff --git a/spec/services/update_container_registry_info_service_spec.rb b/spec/services/update_container_registry_info_service_spec.rb index 740e53b0472..64071e79508 100644 --- a/spec/services/update_container_registry_info_service_spec.rb +++ b/spec/services/update_container_registry_info_service_spec.rb @@ -48,6 +48,7 @@ RSpec.describe UpdateContainerRegistryInfoService do before do stub_registry_info({}) + stub_supports_gitlab_api(false) end it 'uses a token with no access permissions' do @@ -63,6 +64,7 @@ RSpec.describe UpdateContainerRegistryInfoService do context 'when unabled to detect the container registry type' do it 'sets the application settings to their defaults' do stub_registry_info({}) + stub_supports_gitlab_api(false) subject @@ -76,20 +78,23 @@ RSpec.describe UpdateContainerRegistryInfoService do context 'when able to detect the container registry type' do context 'when using the GitLab container registry' do it 'updates application settings accordingly' do - stub_registry_info(vendor: 'gitlab', version: '2.9.1-gitlab', features: %w[a,b,c]) + stub_registry_info(vendor: 'gitlab', version: '2.9.1-gitlab', features: %w[a b c]) + stub_supports_gitlab_api(true) subject application_settings.reload expect(application_settings.container_registry_vendor).to eq('gitlab') expect(application_settings.container_registry_version).to eq('2.9.1-gitlab') - expect(application_settings.container_registry_features).to eq(%w[a,b,c]) + expect(application_settings.container_registry_features) + .to match_array(%W[a b c #{ContainerRegistry::GitlabApiClient::REGISTRY_GITLAB_V1_API_FEATURE}]) end end context 'when using a third-party container registry' do it 'updates application settings accordingly' do stub_registry_info(vendor: 'other', version: nil, features: nil) + stub_supports_gitlab_api(false) subject @@ -112,4 +117,10 @@ RSpec.describe UpdateContainerRegistryInfoService do allow(client).to receive(:registry_info).and_return(output) end end + + def stub_supports_gitlab_api(output) + allow_next_instance_of(ContainerRegistry::GitlabApiClient) do |client| + allow(client).to receive(:supports_gitlab_api?).and_return(output) + end + end end diff --git a/spec/spec_helper.rb b/spec/spec_helper.rb index 17f091278e5..2b3b7b114c8 100644 --- a/spec/spec_helper.rb +++ b/spec/spec_helper.rb @@ -112,11 +112,11 @@ RSpec.configure do |config| # falling back to all tests when there is no `:focus` example. config.filter_run focus: true config.run_all_when_everything_filtered = true - - # Re-run failures locally with `--only-failures` - config.example_status_persistence_file_path = './spec/examples.txt' end + # Re-run failures locally with `--only-failures` + config.example_status_persistence_file_path = ENV.fetch('RSPEC_LAST_RUN_RESULTS_FILE', './spec/examples.txt') + config.define_derived_metadata(file_path: %r{(ee)?/spec/.+_spec\.rb\z}) do |metadata| location = metadata[:location] @@ -208,7 +208,9 @@ RSpec.configure do |config| config.exceptions_to_hard_fail = [DeprecationToolkitEnv::DeprecationBehaviors::SelectiveRaise::RaiseDisallowedDeprecation] end - if ENV['FLAKY_RSPEC_GENERATE_REPORT'] + require_relative '../tooling/rspec_flaky/config' + + if RspecFlaky::Config.generate_report? require_relative '../tooling/rspec_flaky/listener' config.reporter.register_listener( diff --git a/spec/support/flaky_tests.rb b/spec/support/flaky_tests.rb index 5ce55c47aab..4df0d23bfc3 100644 --- a/spec/support/flaky_tests.rb +++ b/spec/support/flaky_tests.rb @@ -4,14 +4,14 @@ return unless ENV['CI'] return if ENV['SKIP_FLAKY_TESTS_AUTOMATICALLY'] == "false" return if ENV['CI_MERGE_REQUEST_LABELS'].to_s.include?('pipeline:run-flaky-tests') +require_relative '../../tooling/rspec_flaky/config' require_relative '../../tooling/rspec_flaky/report' RSpec.configure do |config| $flaky_test_example_ids = begin # rubocop:disable Style/GlobalVars - raise "$SUITE_FLAKY_RSPEC_REPORT_PATH is empty." if ENV['SUITE_FLAKY_RSPEC_REPORT_PATH'].to_s.empty? - raise "#{ENV['SUITE_FLAKY_RSPEC_REPORT_PATH']} doesn't exist" unless File.exist?(ENV['SUITE_FLAKY_RSPEC_REPORT_PATH']) + raise "#{RspecFlaky::Config.suite_flaky_examples_report_path} doesn't exist" unless File.exist?(RspecFlaky::Config.suite_flaky_examples_report_path) - RspecFlaky::Report.load(ENV['SUITE_FLAKY_RSPEC_REPORT_PATH']).map { |_, flaky_test_data| flaky_test_data.to_h[:example_id] } + RspecFlaky::Report.load(RspecFlaky::Config.suite_flaky_examples_report_path).map { |_, flaky_test_data| flaky_test_data.to_h[:example_id] } rescue => e # rubocop:disable Style/RescueStandardError puts e [] @@ -29,8 +29,9 @@ RSpec.configure do |config| end config.after(:suite) do - next unless ENV['SKIPPED_FLAKY_TESTS_REPORT_PATH'] + next unless RspecFlaky::Config.skipped_flaky_tests_report_path + next if $skipped_flaky_tests_report.empty? # rubocop:disable Style/GlobalVars - File.write(ENV['SKIPPED_FLAKY_TESTS_REPORT_PATH'], "#{$skipped_flaky_tests_report.join("\n")}\n") # rubocop:disable Style/GlobalVars + File.write(RspecFlaky::Config.skipped_flaky_tests_report_path, "#{ENV['CI_JOB_URL']}\n#{$skipped_flaky_tests_report.join("\n")}\n\n") # rubocop:disable Style/GlobalVars end end diff --git a/spec/support/helpers/features/iteration_helpers.rb b/spec/support/helpers/features/iteration_helpers.rb new file mode 100644 index 00000000000..8e1d252f55f --- /dev/null +++ b/spec/support/helpers/features/iteration_helpers.rb @@ -0,0 +1,6 @@ +# frozen_string_literal: true +module IterationHelpers + def iteration_period(iteration) + "#{iteration.start_date.to_s(:medium)} - #{iteration.due_date.to_s(:medium)}" + end +end diff --git a/spec/support/shared_contexts/lib/container_registry/client_shared_context.rb b/spec/support/shared_contexts/lib/container_registry/client_shared_context.rb new file mode 100644 index 00000000000..a87a3247b95 --- /dev/null +++ b/spec/support/shared_contexts/lib/container_registry/client_shared_context.rb @@ -0,0 +1,27 @@ +# frozen_string_literal: true + +RSpec.shared_context 'container registry client' do + let(:token) { '12345' } + let(:options) { { token: token } } + let(:registry_api_url) { 'http://container-registry' } + let(:client) { described_class.new(registry_api_url, options) } + let(:push_blob_headers) do + { + 'Accept' => 'application/vnd.docker.distribution.manifest.v2+json, application/vnd.oci.image.manifest.v1+json', + 'Authorization' => "bearer #{token}", + 'Content-Type' => 'application/octet-stream', + 'User-Agent' => "GitLab/#{Gitlab::VERSION}" + } + end + + let(:headers_with_accept_types) do + { + 'Accept' => 'application/vnd.docker.distribution.manifest.v2+json, application/vnd.oci.image.manifest.v1+json', + 'Authorization' => "bearer #{token}", + 'User-Agent' => "GitLab/#{Gitlab::VERSION}" + } + end + + let(:expected_faraday_headers) { { user_agent: "GitLab/#{Gitlab::VERSION}" } } + let(:expected_faraday_request_options) { Gitlab::HTTP::DEFAULT_TIMEOUT_OPTIONS } +end diff --git a/spec/support/shared_examples/services/container_registry_auth_service_shared_examples.rb b/spec/support/shared_examples/services/container_registry_auth_service_shared_examples.rb index 4713e2fc789..ac5029edc82 100644 --- a/spec/support/shared_examples/services/container_registry_auth_service_shared_examples.rb +++ b/spec/support/shared_examples/services/container_registry_auth_service_shared_examples.rb @@ -1142,7 +1142,7 @@ RSpec.shared_examples 'a container registry auth service' do end context 'when importing' do - let_it_be(:container_repository) { create(:container_repository, :root, migration_state: 'importing') } + let_it_be(:container_repository) { create(:container_repository, :root, :importing) } let_it_be(:current_project) { container_repository.project } let_it_be(:current_user) { create(:user) } diff --git a/spec/tooling/lib/tooling/parallel_rspec_runner_spec.rb b/spec/tooling/lib/tooling/parallel_rspec_runner_spec.rb index 4b44d991d89..34542668730 100644 --- a/spec/tooling/lib/tooling/parallel_rspec_runner_spec.rb +++ b/spec/tooling/lib/tooling/parallel_rspec_runner_spec.rb @@ -18,7 +18,9 @@ RSpec.describe Tooling::ParallelRSpecRunner do # rubocop:disable RSpec/FilePath allow(File).to receive(:exist?).with(filter_tests_file).and_return(true) allow(File).to receive(:read).and_call_original allow(File).to receive(:read).with(filter_tests_file).and_return(filter_tests) - allow(subject).to receive(:exec) + allow(Process).to receive(:spawn) + allow(Process).to receive(:wait) + allow(Process).to receive(:last_status).and_return(double(exitstatus: 0)) end subject { described_class.new(allocator: allocator, filter_tests_file: filter_tests_file, rspec_args: rspec_args) } @@ -86,7 +88,7 @@ RSpec.describe Tooling::ParallelRSpecRunner do # rubocop:disable RSpec/FilePath end def expect_command(cmd) - expect(subject).to receive(:exec).with(*cmd) + expect(Process).to receive(:spawn).with(*cmd) end end end diff --git a/spec/tooling/rspec_flaky/config_spec.rb b/spec/tooling/rspec_flaky/config_spec.rb index 12b5ed74cb2..c95e5475d66 100644 --- a/spec/tooling/rspec_flaky/config_spec.rb +++ b/spec/tooling/rspec_flaky/config_spec.rb @@ -11,9 +11,10 @@ RSpec.describe RspecFlaky::Config, :aggregate_failures do before do # Stub these env variables otherwise specs don't behave the same on the CI stub_env('FLAKY_RSPEC_GENERATE_REPORT', nil) - stub_env('SUITE_FLAKY_RSPEC_REPORT_PATH', nil) + stub_env('FLAKY_RSPEC_SUITE_REPORT_PATH', nil) stub_env('FLAKY_RSPEC_REPORT_PATH', nil) stub_env('NEW_FLAKY_RSPEC_REPORT_PATH', nil) + stub_env('SKIPPED_FLAKY_TESTS_REPORT_PATH', nil) # Ensure the behavior is the same locally and on CI (where Rails is defined since we run this test as part of the whole suite), i.e. Rails isn't defined allow(described_class).to receive(:rails_path).and_wrap_original do |method, path| path @@ -51,15 +52,15 @@ RSpec.describe RspecFlaky::Config, :aggregate_failures do end describe '.suite_flaky_examples_report_path' do - context "when ENV['SUITE_FLAKY_RSPEC_REPORT_PATH'] is not set" do + context "when ENV['FLAKY_RSPEC_SUITE_REPORT_PATH'] is not set" do it 'returns the default path' do - expect(described_class.suite_flaky_examples_report_path).to eq('rspec_flaky/suite-report.json') + expect(described_class.suite_flaky_examples_report_path).to eq('rspec/flaky/suite-report.json') end end - context "when ENV['SUITE_FLAKY_RSPEC_REPORT_PATH'] is set" do + context "when ENV['FLAKY_RSPEC_SUITE_REPORT_PATH'] is set" do before do - stub_env('SUITE_FLAKY_RSPEC_REPORT_PATH', 'foo/suite-report.json') + stub_env('FLAKY_RSPEC_SUITE_REPORT_PATH', 'foo/suite-report.json') end it 'returns the value of the env variable' do @@ -71,7 +72,7 @@ RSpec.describe RspecFlaky::Config, :aggregate_failures do describe '.flaky_examples_report_path' do context "when ENV['FLAKY_RSPEC_REPORT_PATH'] is not set" do it 'returns the default path' do - expect(described_class.flaky_examples_report_path).to eq('rspec_flaky/report.json') + expect(described_class.flaky_examples_report_path).to eq('rspec/flaky/report.json') end end @@ -89,7 +90,7 @@ RSpec.describe RspecFlaky::Config, :aggregate_failures do describe '.new_flaky_examples_report_path' do context "when ENV['NEW_FLAKY_RSPEC_REPORT_PATH'] is not set" do it 'returns the default path' do - expect(described_class.new_flaky_examples_report_path).to eq('rspec_flaky/new-report.json') + expect(described_class.new_flaky_examples_report_path).to eq('rspec/flaky/new-report.json') end end @@ -103,4 +104,22 @@ RSpec.describe RspecFlaky::Config, :aggregate_failures do end end end + + describe '.skipped_flaky_tests_report_path' do + context "when ENV['SKIPPED_FLAKY_TESTS_REPORT_PATH'] is not set" do + it 'returns the default path' do + expect(described_class.skipped_flaky_tests_report_path).to eq('rspec/flaky/skipped_flaky_tests_report.txt') + end + end + + context "when ENV['SKIPPED_FLAKY_TESTS_REPORT_PATH'] is set" do + before do + stub_env('SKIPPED_FLAKY_TESTS_REPORT_PATH', 'foo/skipped_flaky_tests_report.txt') + end + + it 'returns the value of the env variable' do + expect(described_class.skipped_flaky_tests_report_path).to eq('foo/skipped_flaky_tests_report.txt') + end + end + end end diff --git a/spec/tooling/rspec_flaky/listener_spec.rb b/spec/tooling/rspec_flaky/listener_spec.rb index 51a815dafbf..62bbe53cac1 100644 --- a/spec/tooling/rspec_flaky/listener_spec.rb +++ b/spec/tooling/rspec_flaky/listener_spec.rb @@ -54,7 +54,7 @@ RSpec.describe RspecFlaky::Listener, :aggregate_failures do before do # Stub these env variables otherwise specs don't behave the same on the CI stub_env('CI_JOB_URL', nil) - stub_env('SUITE_FLAKY_RSPEC_REPORT_PATH', nil) + stub_env('FLAKY_RSPEC_SUITE_REPORT_PATH', nil) end describe '#initialize' do @@ -73,11 +73,11 @@ RSpec.describe RspecFlaky::Listener, :aggregate_failures do it_behaves_like 'a valid Listener instance' end - context 'when SUITE_FLAKY_RSPEC_REPORT_PATH is set' do + context 'when FLAKY_RSPEC_SUITE_REPORT_PATH is set' do let(:report_file_path) { 'foo/report.json' } before do - stub_env('SUITE_FLAKY_RSPEC_REPORT_PATH', report_file_path) + stub_env('FLAKY_RSPEC_SUITE_REPORT_PATH', report_file_path) end context 'and report file exists' do diff --git a/tooling/bin/parallel_rspec b/tooling/bin/parallel_rspec index 06e5603a6b8..08bfa4997f2 100755 --- a/tooling/bin/parallel_rspec +++ b/tooling/bin/parallel_rspec @@ -16,4 +16,4 @@ OptionParser.new do |opts| end end.parse! -Tooling::ParallelRSpecRunner.run(**options) +exit Tooling::ParallelRSpecRunner.run(**options) diff --git a/tooling/lib/tooling/parallel_rspec_runner.rb b/tooling/lib/tooling/parallel_rspec_runner.rb index b482160d3c0..d3fa5af01e4 100644 --- a/tooling/lib/tooling/parallel_rspec_runner.rb +++ b/tooling/lib/tooling/parallel_rspec_runner.rb @@ -38,12 +38,8 @@ module Tooling Knapsack.logger.info tests_to_run Knapsack.logger.info - if tests_to_run.empty? - Knapsack.logger.info 'No tests to run on this node, exiting.' - return - end - - exec(*rspec_command) + Process.wait Process.spawn(*rspec_command) + Process.last_status.exitstatus end private diff --git a/tooling/rspec_flaky/config.rb b/tooling/rspec_flaky/config.rb index ea18a601c11..36e35671587 100644 --- a/tooling/rspec_flaky/config.rb +++ b/tooling/rspec_flaky/config.rb @@ -7,15 +7,19 @@ module RspecFlaky end def self.suite_flaky_examples_report_path - ENV['SUITE_FLAKY_RSPEC_REPORT_PATH'] || rails_path("rspec_flaky/suite-report.json") + ENV['FLAKY_RSPEC_SUITE_REPORT_PATH'] || rails_path("rspec/flaky/suite-report.json") end def self.flaky_examples_report_path - ENV['FLAKY_RSPEC_REPORT_PATH'] || rails_path("rspec_flaky/report.json") + ENV['FLAKY_RSPEC_REPORT_PATH'] || rails_path("rspec/flaky/report.json") end def self.new_flaky_examples_report_path - ENV['NEW_FLAKY_RSPEC_REPORT_PATH'] || rails_path("rspec_flaky/new-report.json") + ENV['NEW_FLAKY_RSPEC_REPORT_PATH'] || rails_path("rspec/flaky/new-report.json") + end + + def self.skipped_flaky_tests_report_path + ENV['SKIPPED_FLAKY_TESTS_REPORT_PATH'] || rails_path("rspec/flaky/skipped_flaky_tests_report.txt") end def self.rails_path(path) |